diff --git a/.github/workflows/cdktf-documentation.yml b/.github/workflows/cdktf-documentation.yml index 73066ea1b38..8b6c59a8d1d 100644 --- a/.github/workflows/cdktf-documentation.yml +++ b/.github/workflows/cdktf-documentation.yml @@ -9,14 +9,29 @@ permissions: pull-requests: write jobs: + generateToken: + runs-on: ubuntu-latest + outputs: + token: ${{ steps.generate_token.outputs.token }} + steps: + - name: Generate Token + id: generate_token + uses: tibdex/github-app-token@b62528385c34dbc9f38e5f4225ac829252d1ea92 # v1.8.0 + with: + app_id: ${{ secrets.APP_ID }} + installation_id: ${{ secrets.INSTALLATION_ID }} + private_key: ${{secrets.APP_PEM }} + cdktfDocs: + needs: + - generateToken uses: hashicorp/terraform-cdk/.github/workflows/registry-docs-pr-based.yml@304e2507209c9657135dc7b4b7ee68030327468f secrets: - GH_PR_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_PR_TOKEN: ${{ needs.generateToken.outputs.token }} with: providerFqn: "hashicorp/aws" - files: "*/ec2*.html.markdown,*/lambda*.html.markdown,*/eks*.html.markdown,*/vpc*.html.markdown,*/instance.html.markdown" languages: "typescript,python" + files: "d/*.html.markdown,r/*.html.markdown" parallelFileConversions: 1 maxRunners: 20 cdktfRegistryDocsVersion: "1.14.2" diff --git a/website/docs/cdktf/python/d/acm_certificate.html.markdown b/website/docs/cdktf/python/d/acm_certificate.html.markdown new file mode 100644 index 00000000000..c9812d3d336 --- /dev/null +++ b/website/docs/cdktf/python/d/acm_certificate.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "ACM (Certificate Manager)" +layout: "aws" +page_title: "AWS: aws_acm_certificate" +description: |- + Get information on a Amazon Certificate Manager (ACM) Certificate +--- + + + +# Data Source: aws_acm_certificate + +Use this data source to get the ARN of a certificate in AWS Certificate +Manager (ACM), you can reference +it by domain without having to hard code the ARNs as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_acm_certificate import DataAwsAcmCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAcmCertificate(self, "amazon_issued", + domain="tf.example.com", + most_recent=True, + types=["AMAZON_ISSUED"] + ) + DataAwsAcmCertificate(self, "issued", + domain="tf.example.com", + statuses=["ISSUED"] + ) + DataAwsAcmCertificate(self, "rsa_4096", + domain="tf.example.com", + key_types=["RSA_4096"] + ) +``` + +## Argument Reference + +* `domain` - (Required) Domain of the certificate to look up. If no certificate is found with this name, an error will be returned. +* `key_types` - (Optional) List of key algorithms to filter certificates. By default, ACM does not return all certificate types when searching. See the [ACM API Reference](https://docs.aws.amazon.com/acm/latest/APIReference/API_CertificateDetail.html#ACM-Type-CertificateDetail-KeyAlgorithm) for supported key algorithms. +* `statuses` - (Optional) List of statuses on which to filter the returned list. Valid values are `PENDING_VALIDATION`, `ISSUED`, + `INACTIVE`, `EXPIRED`, `VALIDATION_TIMED_OUT`, `REVOKED` and `FAILED`. If no value is specified, only certificates in the `ISSUED` state + are returned. +* `types` - (Optional) List of types on which to filter the returned list. Valid values are `AMAZON_ISSUED`, `PRIVATE`, and `IMPORTED`. +* `most_recent` - (Optional) If set to true, it sorts the certificates matched by previous criteria by the NotBefore field, returning only the most recent one. If set to false, it returns an error if more than one certificate is found. Defaults to false. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the found certificate, suitable for referencing in other resources that support ACM certificates. +* `id` - ARN of the found certificate, suitable for referencing in other resources that support ACM certificates. +* `status` - Status of the found certificate. +* `certificate` - ACM-issued certificate. +* `certificate_chain` - Certificates forming the requested ACM-issued certificate's chain of trust. The chain consists of the certificate of the issuing CA and the intermediate certificates of any other subordinate CAs. +* `tags` - Mapping of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/acmpca_certificate.html.markdown b/website/docs/cdktf/python/d/acmpca_certificate.html.markdown new file mode 100644 index 00000000000..1ad1798ab8d --- /dev/null +++ b/website/docs/cdktf/python/d/acmpca_certificate.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_certificate" +description: |- + Get information on a Certificate issued by a AWS Certificate Manager Private Certificate Authority +--- + + + +# Data Source: aws_acmpca_certificate + +Get information on a Certificate issued by a AWS Certificate Manager Private Certificate Authority. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_acmpca_certificate import DataAwsAcmpcaCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAcmpcaCertificate(self, "example", + arn="arn:aws:acm-pca:us-east-1:123456789012:certificate-authority/12345678-1234-1234-1234-123456789012/certificate/1234b4a0d73e2056789bdbe77d5b1a23", + certificate_authority_arn="arn:aws:acm-pca:us-east-1:123456789012:certificate-authority/12345678-1234-1234-1234-123456789012" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Required) ARN of the certificate issued by the private certificate authority. +* `certificate_authority_arn` - (Required) ARN of the certificate authority. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `certificate` - PEM-encoded certificate value. +* `certificate_chain` - PEM-encoded certificate chain that includes any intermediate certificates and chains up to root CA. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/acmpca_certificate_authority.html.markdown b/website/docs/cdktf/python/d/acmpca_certificate_authority.html.markdown new file mode 100644 index 00000000000..ff77eaabc5c --- /dev/null +++ b/website/docs/cdktf/python/d/acmpca_certificate_authority.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_certificate_authority" +description: |- + Get information on a AWS Certificate Manager Private Certificate Authority +--- + + + +# Data Source: aws_acmpca_certificate_authority + +Get information on a AWS Certificate Manager Private Certificate Authority (ACM PCA Certificate Authority). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_acmpca_certificate_authority import DataAwsAcmpcaCertificateAuthority +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAcmpcaCertificateAuthority(self, "example", + arn="arn:aws:acm-pca:us-east-1:123456789012:certificate-authority/12345678-1234-1234-1234-123456789012" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Required) ARN of the certificate authority. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ARN of the certificate authority. +* `certificate` - Base64-encoded certificate authority (CA) certificate. Only available after the certificate authority certificate has been imported. +* `certificate_chain` - Base64-encoded certificate chain that includes any intermediate certificates and chains up to root on-premises certificate that you used to sign your private CA certificate. The chain does not include your private CA certificate. Only available after the certificate authority certificate has been imported. +* `certificate_signing_request` - The base64 PEM-encoded certificate signing request (CSR) for your private CA certificate. +* `usage_mode` - Specifies whether the CA issues general-purpose certificates that typically require a revocation mechanism, or short-lived certificates that may optionally omit revocation because they expire quickly. +* `not_after` - Date and time after which the certificate authority is not valid. Only available after the certificate authority certificate has been imported. +* `not_before` - Date and time before which the certificate authority is not valid. Only available after the certificate authority certificate has been imported. +* `revocation_configuration` - Nested attribute containing revocation configuration. + * `revocation_configuration.0.crl_configuration` - Nested attribute containing configuration of the certificate revocation list (CRL), if any, maintained by the certificate authority. + * `revocation_configuration.0.crl_configuration.0.custom_cname` - Name inserted into the certificate CRL Distribution Points extension that enables the use of an alias for the CRL distribution point. + * `revocation_configuration.0.crl_configuration.0.enabled` - Boolean value that specifies whether certificate revocation lists (CRLs) are enabled. + * `revocation_configuration.0.crl_configuration.0.expiration_in_days` - Number of days until a certificate expires. + * `revocation_configuration.0.crl_configuration.0.s3_bucket_name` - Name of the S3 bucket that contains the CRL. + * `revocation_configuration.0.crl_configuration.0.s3_object_acl` - Whether the CRL is publicly readable or privately held in the CRL Amazon S3 bucket. + * `revocation_configuration.0.ocsp_configuration.0.enabled` - Boolean value that specifies whether a custom OCSP responder is enabled. + * `revocation_configuration.0.ocsp_configuration.0.ocsp_custom_cname` - A CNAME specifying a customized OCSP domain. +* `serial` - Serial number of the certificate authority. Only available after the certificate authority certificate has been imported. +* `status` - Status of the certificate authority. +* `tags` - Key-value map of user-defined tags that are attached to the certificate authority. +* `type` - Type of the certificate authority. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ami.html.markdown b/website/docs/cdktf/python/d/ami.html.markdown new file mode 100644 index 00000000000..9ba019f413c --- /dev/null +++ b/website/docs/cdktf/python/d/ami.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami" +description: |- + Get information on an Amazon Machine Image (AMI). +--- + + + +# Data Source: aws_ami + +Use this data source to get the ID of a registered AMI for use in other +resources. + +## Example Usage + +```terraform +data "aws_ami" "example" { + executable_users = ["self"] + most_recent = true + name_regex = "^myami-\\d{3}" + owners = ["self"] + + filter { + name = "name" + values = ["myami-*"] + } + + filter { + name = "root-device-type" + values = ["ebs"] + } + + filter { + name = "virtualization-type" + values = ["hvm"] + } +} +``` + +## Argument Reference + +* `owners` - (Optional) List of AMI owners to limit search. Valid values: an AWS account ID, `self` (the current account), or an AWS owner alias (e.g., `amazon`, `aws-marketplace`, `microsoft`). + +* `most_recent` - (Optional) If more than one result is returned, use the most +recent AMI. + +* `executable_users` - (Optional) Limit search to users with *explicit* launch permission on + the image. Valid items are the numeric account ID or `self`. + +* `include_deprecated` - (Optional) If true, all deprecated AMIs are included in the response. If false, no deprecated AMIs are included in the response. If no value is specified, the default value is false. + +* `filter` - (Optional) One or more name/value pairs to filter off of. There are +several valid keys, for a full reference, check out +[describe-images in the AWS CLI reference][1]. + +* `name_regex` - (Optional) Regex string to apply to the AMI list returned +by AWS. This allows more advanced filtering not supported from the AWS API. This +filtering is done locally on what AWS returns, and could have a performance +impact if the result is large. Combine this with other +options to narrow down the list AWS returns. + +~> **NOTE:** If more or less than a single match is returned by the search, +Terraform will fail. Ensure that your search is specific enough to return +a single AMI ID only, or use `most_recent` to choose the most recent one. If +you want to match multiple AMIs, use the `aws_ami_ids` data source instead. + +## Attribute Reference + +`id` is set to the ID of the found AMI. In addition, the following attributes +are exported: + +~> **NOTE:** Some values are not always set and may not be available for +interpolation. + +* `arn` - ARN of the AMI. +* `architecture` - OS architecture of the AMI (ie: `i386` or `x86_64`). +* `boot_mode` - Boot mode of the image. +* `block_device_mappings` - Set of objects with block device mappings of the AMI. + * `device_name` - Physical name of the device. + * `ebs` - Map containing EBS information, if the device is EBS based. Unlike most object attributes, these are accessed directly (e.g., `ebs.volume_size` or `ebs["volume_size"]`) rather than accessed through the first element of a list (e.g., `ebs[0].volume_size`). + * `delete_on_termination` - `true` if the EBS volume will be deleted on termination. + * `encrypted` - `true` if the EBS volume is encrypted. + * `iops` - `0` if the EBS volume is not a provisioned IOPS image, otherwise the supported IOPS count. + * `snapshot_id` - The ID of the snapshot. + * `volume_size` - The size of the volume, in GiB. + * `throughput` - The throughput that the EBS volume supports, in MiB/s. + * `volume_type` - The volume type. + * `no_device` - Suppresses the specified device included in the block device mapping of the AMI. + * `virtual_name` - Virtual device name (for instance stores). +* `creation_date` - Date and time the image was created. +* `deprecation_time` - Date and time when the image will be deprecated. +* `description` - Description of the AMI that was provided during image + creation. +* `hypervisor` - Hypervisor type of the image. +* `image_id` - ID of the AMI. Should be the same as the resource `id`. +* `image_location` - Location of the AMI. +* `image_owner_alias` - AWS account alias (for example, `amazon`, `self`) or + the AWS account ID of the AMI owner. +* `image_type` - Type of image. +* `imds_support` - Instance Metadata Service (IMDS) support mode for the image. Set to `v2.0` if instances ran from this image enforce IMDSv2. +* `kernel_id` - Kernel associated with the image, if any. Only applicable + for machine images. +* `name` - Name of the AMI that was provided during image creation. +* `owner_id` - AWS account ID of the image owner. +* `platform` - Value is Windows for `Windows` AMIs; otherwise blank. +* `product_codes` - Any product codes associated with the AMI. + * `product_codes.#.product_code_id` - The product code. + * `product_codes.#.product_code_type` - The type of product code. +* `public` - `true` if the image has public launch permissions. +* `ramdisk_id` - RAM disk associated with the image, if any. Only applicable + for machine images. +* `root_device_name` - Device name of the root device. +* `root_device_type` - Type of root device (ie: `ebs` or `instance-store`). +* `root_snapshot_id` - Snapshot id associated with the root device, if any + (only applies to `ebs` root devices). +* `sriov_net_support` - Whether enhanced networking is enabled. +* `state` - Current state of the AMI. If the state is `available`, the image + is successfully registered and can be used to launch an instance. +* `state_reason` - Describes a state change. Fields are `UNSET` if not available. + * `state_reason.code` - The reason code for the state change. + * `state_reason.message` - The message for the state change. +* `tags` - Any tags assigned to the image. + * `tags.#.key` - Key name of the tag. + * `tags.#.value` - Value of the tag. +* `tpm_support` - If the image is configured for NitroTPM support, the value is `v2.0`. +* `virtualization_type` - Type of virtualization of the AMI (ie: `hvm` or + `paravirtual`). +* `usage_operation` - Operation of the Amazon EC2 instance and the billing code that is associated with the AMI. +* `platform_details` - Platform details associated with the billing code of the AMI. +* `ena_support` - Whether enhanced networking with ENA is enabled. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-images.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ami_ids.html.markdown b/website/docs/cdktf/python/d/ami_ids.html.markdown new file mode 100644 index 00000000000..3ab8eb9ac19 --- /dev/null +++ b/website/docs/cdktf/python/d/ami_ids.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami_ids" +description: |- + Provides a list of AMI IDs. +--- + + + +# Data Source: aws_ami_ids + +Use this data source to get a list of AMI IDs matching the specified criteria. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ami_ids import DataAwsAmiIds +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAmiIds(self, "ubuntu", + filter=[DataAwsAmiIdsFilter( + name="name", + values=["ubuntu/images/ubuntu-*-*-amd64-server-*"] + ) + ], + owners=["099720109477"] + ) +``` + +## Argument Reference + +* `owners` - (Required) List of AMI owners to limit search. At least 1 value must be specified. Valid values: an AWS account ID, `self` (the current account), or an AWS owner alias (e.g., `amazon`, `aws-marketplace`, `microsoft`). + +* `executable_users` - (Optional) Limit search to users with *explicit* launch +permission on the image. Valid items are the numeric account ID or `self`. + +* `filter` - (Optional) One or more name/value pairs to filter off of. There +are several valid keys, for a full reference, check out +[describe-images in the AWS CLI reference][1]. + +* `name_regex` - (Optional) Regex string to apply to the AMI list returned +by AWS. This allows more advanced filtering not supported from the AWS API. +This filtering is done locally on what AWS returns, and could have a performance +impact if the result is large. Combine this with other +options to narrow down the list AWS returns. + +* `sort_ascending` - (Optional) Used to sort AMIs by creation time. +If no value is specified, the default value is `false`. + +* `include_deprecated` - (Optional) If true, all deprecated AMIs are included in the response. +If false, no deprecated AMIs are included in the response. If no value is specified, the default value is `false`. + +## Attribute Reference + +`ids` is set to the list of AMI IDs, sorted by creation time according to `sort_ascending`. + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-images.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/api_gateway_api_key.html.markdown b/website/docs/cdktf/python/d/api_gateway_api_key.html.markdown new file mode 100644 index 00000000000..99e3209db2e --- /dev/null +++ b/website/docs/cdktf/python/d/api_gateway_api_key.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_api_key" +description: |- + Get information on an API Gateway REST API Key +--- + + + +# Data Source: aws_api_gateway_api_key + +Use this data source to get the name and value of a pre-existing API Key, for +example to supply credentials for a dependency microservice. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_api_gateway_api_key import DataAwsApiGatewayApiKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsApiGatewayApiKey(self, "my_api_key", + id="ru3mpjgse6" + ) +``` + +## Argument Reference + +* `id` - (Required) ID of the API Key to look up. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Set to the ID of the API Key. +* `name` - Set to the name of the API Key. +* `value` - Set to the value of the API Key. +* `created_date` - Date and time when the API Key was created. +* `last_updated_date` - Date and time when the API Key was last updated. +* `description` - Description of the API Key. +* `enabled` - Whether the API Key is enabled. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/api_gateway_authorizer.html.markdown b/website/docs/cdktf/python/d/api_gateway_authorizer.html.markdown new file mode 100644 index 00000000000..16fc32ca739 --- /dev/null +++ b/website/docs/cdktf/python/d/api_gateway_authorizer.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_authorizer" +description: |- + Provides details about a specific API Gateway Authorizer. +--- + + + +# Data Source: aws_api_gateway_authorizer + +Provides details about a specific API Gateway Authorizer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_api_gateway_authorizer import DataAwsApiGatewayAuthorizer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsApiGatewayAuthorizer(self, "example", + authorizer_id=Token.as_string( + property_access(data_aws_api_gateway_authorizers_example.ids, ["0"])), + rest_api_id=Token.as_string(aws_api_gateway_rest_api_example.id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `authorizer_id` - (Required) Authorizer identifier. +* `rest_api_id` - (Required) ID of the associated REST API. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the API Gateway Authorizer. +* `authorizer_credentials` - Credentials required for the authorizer. +* `authorizer_result_ttl_in_seconds` - TTL of cached authorizer results in seconds. +* `authorizer_uri` - Authorizer's Uniform Resource Identifier (URI). +* `identity_source` - Source of the identity in an incoming request. +* `identity_validation_expression` - Validation expression for the incoming identity. +* `name` - Name of the authorizer. +* `provider_arns` - List of the Amazon Cognito user pool ARNs. +* `type` - Type of the authorizer. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/api_gateway_authorizers.html.markdown b/website/docs/cdktf/python/d/api_gateway_authorizers.html.markdown new file mode 100644 index 00000000000..1593acf0a5d --- /dev/null +++ b/website/docs/cdktf/python/d/api_gateway_authorizers.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_authorizers" +description: |- + Provides details about multiple API Gateway Authorizers. +--- + + + +# Data Source: aws_api_gateway_authorizers + +Provides details about multiple API Gateway Authorizers. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_api_gateway_authorizers import DataAwsApiGatewayAuthorizers +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsApiGatewayAuthorizers(self, "example", + rest_api_id=Token.as_string(aws_api_gateway_rest_api_example.id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `rest_api_id` - (Required) ID of the associated REST API. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - List of Authorizer identifiers. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/api_gateway_domain_name.html.markdown b/website/docs/cdktf/python/d/api_gateway_domain_name.html.markdown new file mode 100644 index 00000000000..afc6cf071f1 --- /dev/null +++ b/website/docs/cdktf/python/d/api_gateway_domain_name.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_domain_name" +description: |- + Get information on a custom domain name for use with AWS API Gateway. +--- + + + +# Data Source: aws_api_gateway_domain_name + +Use this data source to get the custom domain name for use with AWS API Gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_api_gateway_domain_name import DataAwsApiGatewayDomainName +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsApiGatewayDomainName(self, "example", + domain_name="api.example.com" + ) +``` + +## Argument Reference + +* `domain_name` - (Required) Fully-qualified domain name to look up. If no domain name is found, an error will be returned. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the found custom domain name. +* `certificate_arn` - ARN for an AWS-managed certificate that is used by edge-optimized endpoint for this domain name. +* `certificate_name` - Name of the certificate that is used by edge-optimized endpoint for this domain name. +* `certificate_upload_date` - Upload date associated with the domain certificate. +* `cloudfront_domain_name` - Hostname created by Cloudfront to represent the distribution that implements this domain name mapping. +* `cloudfront_zone_id` - For convenience, the hosted zone ID (`Z2FDTNDATAQYW2`) that can be used to create a Route53 alias record for the distribution. +* `endpoint_configuration` - List of objects with the endpoint configuration of this domain name. + * `types` - List of endpoint types. +* `regional_certificate_arn` - ARN for an AWS-managed certificate that is used for validating the regional domain name. +* `regional_certificate_name` - User-friendly name of the certificate that is used by regional endpoint for this domain name. +* `regional_domain_name` - Hostname for the custom domain's regional endpoint. +* `regional_zone_id` - Hosted zone ID that can be used to create a Route53 alias record for the regional endpoint. +* `security_policy` - Security policy for the domain name. +* `tags` - Key-value map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/api_gateway_export.html.markdown b/website/docs/cdktf/python/d/api_gateway_export.html.markdown new file mode 100644 index 00000000000..a4fa20f6f51 --- /dev/null +++ b/website/docs/cdktf/python/d/api_gateway_export.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_export" +description: |- + Get information on an API Gateway REST API Key +--- + + + +# Data Source: aws_api_gateway_export + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_api_gateway_export import DataAwsApiGatewayExport +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsApiGatewayExport(self, "example", + export_type="oas30", + rest_api_id=Token.as_string(aws_api_gateway_stage_example.rest_api_id), + stage_name=Token.as_string(aws_api_gateway_stage_example.stage_name) + ) +``` + +## Argument Reference + +* `export_type` - (Required) Type of export. Acceptable values are `oas30` for OpenAPI 3.0.x and `swagger` for Swagger/OpenAPI 2.0. +* `rest_api_id` - (Required) Identifier of the associated REST API. +* `stage_name` - (Required) Name of the Stage that will be exported. +* `accepts` - (Optional) Content-type of the export. Valid values are `application/json` and `application/yaml` are supported for `export_type` `ofoas30` and `swagger`. +* `parameters` - (Optional) Key-value map of query string parameters that specify properties of the export. the following parameters are supported: `extensions='integrations'` or `extensions='apigateway'` will export the API with x-amazon-apigateway-integration extensions. `extensions='authorizers'` will export the API with x-amazon-apigateway-authorizer extensions. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The `REST-API-ID:STAGE-NAME` +* `body` - API Spec. +* `content_type` - Content-type header value in the HTTP response. +* `content_disposition` - Content-disposition header value in the HTTP response. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/api_gateway_resource.html.markdown b/website/docs/cdktf/python/d/api_gateway_resource.html.markdown new file mode 100644 index 00000000000..3b7946aebdb --- /dev/null +++ b/website/docs/cdktf/python/d/api_gateway_resource.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_resource" +description: |- + Get information on a API Gateway Resource +--- + + + +# Data Source: aws_api_gateway_resource + +Use this data source to get the id of a Resource in API Gateway. +To fetch the Resource, you must provide the REST API id as well as the full path. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_api_gateway_resource import DataAwsApiGatewayResource +from imports.aws.data_aws_api_gateway_rest_api import DataAwsApiGatewayRestApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + my_rest_api = DataAwsApiGatewayRestApi(self, "my_rest_api", + name="my-rest-api" + ) + DataAwsApiGatewayResource(self, "my_resource", + path="/endpoint/path", + rest_api_id=Token.as_string(my_rest_api.id) + ) +``` + +## Argument Reference + +* `rest_api_id` - (Required) REST API id that owns the resource. If no REST API is found, an error will be returned. +* `path` - (Required) Full path of the resource. If no path is found, an error will be returned. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Set to the ID of the found Resource. +* `parent_id` - Set to the ID of the parent Resource. +* `path_part` - Set to the path relative to the parent Resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/api_gateway_rest_api.html.markdown b/website/docs/cdktf/python/d/api_gateway_rest_api.html.markdown new file mode 100644 index 00000000000..ece0ccedc19 --- /dev/null +++ b/website/docs/cdktf/python/d/api_gateway_rest_api.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_rest_api" +description: |- + Get information on a API Gateway REST API +--- + + + +# Data Source: aws_api_gateway_rest_api + +Use this data source to get the id and root_resource_id of a REST API in +API Gateway. To fetch the REST API you must provide a name to match against. +As there is no unique name constraint on REST APIs this data source will +error if there is more than one match. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_api_gateway_rest_api import DataAwsApiGatewayRestApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsApiGatewayRestApi(self, "my_rest_api", + name="my-rest-api" + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the REST API to look up. If no REST API is found with this name, an error will be returned. If multiple REST APIs are found with this name, an error will be returned. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `api_key_source` - Source of the API key for requests. +* `arn` - ARN of the REST API. +* `binary_media_types` - List of binary media types supported by the REST API. +* `description` - Description of the REST API. +* `endpoint_configuration` - The endpoint configuration of this RestApi showing the endpoint types of the API. +* `execution_arn` - Execution ARN part to be used in [`lambda_permission`](/docs/providers/aws/r/lambda_permission.html)'s `source_arn` when allowing API Gateway to invoke a Lambda function, e.g., `arn:aws:execute-api:eu-west-2:123456789012:z4675bid1j`, which can be concatenated with allowed stage, method and resource path. +* `id` - Set to the ID of the found REST API. +* `minimum_compression_size` - Minimum response size to compress for the REST API. +* `policy` - JSON formatted policy document that controls access to the API Gateway. +* `root_resource_id` - Set to the ID of the API Gateway Resource on the found REST API where the route matches '/'. +* `tags` - Key-value map of resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/api_gateway_sdk.html.markdown b/website/docs/cdktf/python/d/api_gateway_sdk.html.markdown new file mode 100644 index 00000000000..69db9c33cc0 --- /dev/null +++ b/website/docs/cdktf/python/d/api_gateway_sdk.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_sdk" +description: |- + Gets an API Gateway client SDK +--- + + + +# Data Source: aws_api_gateway_sdk + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_api_gateway_sdk import DataAwsApiGatewaySdk +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsApiGatewaySdk(self, "example", + parameters={ + "artifact_id": "example", + "artifact_version": "example", + "group_id": "example", + "invoker_package": "example" + }, + rest_api_id=Token.as_string(aws_api_gateway_stage_example.rest_api_id), + sdk_type="android", + stage_name=Token.as_string(aws_api_gateway_stage_example.stage_name) + ) +``` + +## Argument Reference + +* `rest_api_id` - (Required) Identifier of the associated REST API. +* `stage_name` - (Required) Name of the Stage that will be exported. +* `sdk_type` - (Required) Language for the generated SDK. Currently `java`, `javascript`, `android`, `objectivec` (for iOS), `swift` (for iOS), and `ruby` are supported. +* `parameters` - (Optional) Key-value map of query string parameters `sdk_type` properties of the SDK. For SDK Type of `objectivec` or `swift`, a parameter named `classPrefix` is required. For SDK Type of `android`, parameters named `groupId`, `artifactId`, `artifactVersion`, and `invokerPackage` are required. For SDK Type of `java`, parameters named `serviceName` and `javaPackageName` are required. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The `REST-API-ID:STAGE-NAME` +* `body` - SDK as a string. +* `content_type` - Content-type header value in the HTTP response. +* `content_disposition` - Content-disposition header value in the HTTP response. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/api_gateway_vpc_link.html.markdown b/website/docs/cdktf/python/d/api_gateway_vpc_link.html.markdown new file mode 100644 index 00000000000..74a8506ce7a --- /dev/null +++ b/website/docs/cdktf/python/d/api_gateway_vpc_link.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_vpc_link" +description: |- + Get information on a API Gateway VPC Link +--- + + + +# Data Source: aws_api_gateway_vpc_link + +Use this data source to get the id of a VPC Link in +API Gateway. To fetch the VPC Link you must provide a name to match against. +As there is no unique name constraint on API Gateway VPC Links this data source will +error if there is more than one match. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_api_gateway_vpc_link import DataAwsApiGatewayVpcLink +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsApiGatewayVpcLink(self, "my_api_gateway_vpc_link", + name="my-vpc-link" + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the API Gateway VPC Link to look up. If no API Gateway VPC Link is found with this name, an error will be returned. + If multiple API Gateway VPC Links are found with this name, an error will be returned. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Set to the ID of the found API Gateway VPC Link. +* `description` - Description of the VPC link. +* `status` - Status of the VPC link. +* `status_message` - Status message of the VPC link. +* `target_arns` - List of network load balancer arns in the VPC targeted by the VPC link. Currently AWS only supports 1 target. +* `tags` - Key-value map of resource tags + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/apigatewayv2_api.html.markdown b/website/docs/cdktf/python/d/apigatewayv2_api.html.markdown new file mode 100644 index 00000000000..43b41d670cf --- /dev/null +++ b/website/docs/cdktf/python/d/apigatewayv2_api.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_api" +description: |- + Provides details about a specific Amazon API Gateway Version 2 API. +--- + + + +# Data Source: aws_apigatewayv2_api + +Provides details about a specific Amazon API Gateway Version 2 API. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_apigatewayv2_api import DataAwsApigatewayv2Api +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsApigatewayv2Api(self, "example", + api_id="aabbccddee" + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available APIs in the current region. +The given filters must match exactly one API whose data will be exported as attributes. + +This argument supports the following arguments: + +* `api_id` - (Required) API identifier. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `api_endpoint` - URI of the API, of the form `https://{api-id}.execute-api.{region}.amazonaws.com` for HTTP APIs and `wss://{api-id}.execute-api.{region}.amazonaws.com` for WebSocket APIs. +* `api_key_selection_expression` - An [API key selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-apikey-selection-expressions). +Applicable for WebSocket APIs. +* `arn` - ARN of the API. +* `cors_configuration` - Cross-origin resource sharing (CORS) [configuration](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-cors.html). +Applicable for HTTP APIs. +* `description` - Description of the API. +* `disable_execute_api_endpoint` - Whether clients can invoke the API by using the default `execute-api` endpoint. +* `execution_arn` - ARN prefix to be used in an [`aws_lambda_permission`](/docs/providers/aws/r/lambda_permission.html)'s `source_arn` attribute +or in an [`aws_iam_policy`](/docs/providers/aws/r/iam_policy.html) to authorize access to the [`@connections` API](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-how-to-call-websocket-api-connections.html). +See the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-control-access-iam.html) for details. +* `name` - Name of the API. +* `protocol_type` - API protocol. +* `route_selection_expression` - The [route selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-route-selection-expressions) for the API. +* `tags` - Map of resource tags. +* `version` - Version identifier for the API. + +The `cors_configuration` object supports the following: + +* `allow_credentials` - Whether credentials are included in the CORS request. +* `allow_headers` - Set of allowed HTTP headers. +* `allow_methods` - Set of allowed HTTP methods. +* `allow_origins` - Set of allowed origins. +* `expose_headers` - Set of exposed HTTP headers. +* `max_age` - Number of seconds that the browser should cache preflight request results. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/apigatewayv2_apis.html.markdown b/website/docs/cdktf/python/d/apigatewayv2_apis.html.markdown new file mode 100644 index 00000000000..a5a811def30 --- /dev/null +++ b/website/docs/cdktf/python/d/apigatewayv2_apis.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_apis" +description: |- + Provides details about multiple Amazon API Gateway Version 2 APIs. +--- + + + +# Data Source: aws_apigatewayv2_apis + +Provides details about multiple Amazon API Gateway Version 2 APIs. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_apigatewayv2_apis import DataAwsApigatewayv2Apis +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsApigatewayv2Apis(self, "example", + protocol_type="HTTP" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Optional) API name. +* `protocol_type` - (Optional) API protocol. +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired APIs. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - Set of API identifiers. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/apigatewayv2_export.html.markdown b/website/docs/cdktf/python/d/apigatewayv2_export.html.markdown new file mode 100644 index 00000000000..6472f592cec --- /dev/null +++ b/website/docs/cdktf/python/d/apigatewayv2_export.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_export" +description: |- + Exports a definition of an API in a particular output format and specification. +--- + + + +# Data Source: aws_apigatewayv2_export + +Exports a definition of an API in a particular output format and specification. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_apigatewayv2_export import DataAwsApigatewayv2Export +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsApigatewayv2Export(self, "test", + api_id=Token.as_string(aws_apigatewayv2_route_test.api_id), + output_type="JSON", + specification="OAS30" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `api_id` - (Required) API identifier. +* `specification` - (Required) Version of the API specification to use. `OAS30`, for OpenAPI 3.0, is the only supported value. +* `output_type` - (Required) Output type of the exported definition file. Valid values are `JSON` and `YAML`. +* `export_version` - (Optional) Version of the API Gateway export algorithm. API Gateway uses the latest version by default. Currently, the only supported version is `1.0`. +* `include_extensions` - (Optional) Whether to include API Gateway extensions in the exported API definition. API Gateway extensions are included by default. +* `stage_name` - (Optional) Name of the API stage to export. If you don't specify this property, a representation of the latest API configuration is exported. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - API identifier. +* `body` - ID of the API. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appconfig_configuration_profile.html.markdown b/website/docs/cdktf/python/d/appconfig_configuration_profile.html.markdown new file mode 100644 index 00000000000..4465d9f0efb --- /dev/null +++ b/website/docs/cdktf/python/d/appconfig_configuration_profile.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_configuration_profile" +description: |- + Terraform data source for managing an AWS AppConfig Configuration Profile. +--- + + + +# Data Source: aws_appconfig_configuration_profile + +Provides access to an AppConfig Configuration Profile. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appconfig_configuration_profile import DataAwsAppconfigConfigurationProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAppconfigConfigurationProfile(self, "example", + application_id="b5d5gpj", + configuration_profile_id="qrbb1c1" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `application_id` - (Required) ID of the AppConfig application to which this configuration profile belongs. +* `configuration_profile_id` - (Required) ID of the Configuration Profile. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Configuration Profile. +* `description` - Description of the Configuration Profile. +* `id` - AppConfig Configuration Profile ID and Application ID separated by a colon `(:)`. +* `location_uri` - Location URI of the Configuration Profile. +* `name` - Name of the Configuration Profile. +* `retrieval_role_arn` - ARN of an IAM role with permission to access the configuration at the specified location_uri. +* `tags` - Map of tags for the resource. +* `validator` - Nested list of methods for validating the configuration. + * `content` - Either the JSON Schema content or the ARN of an AWS Lambda function. + * `type` - Type of validator. Valid values: JSON_SCHEMA and LAMBDA. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appconfig_configuration_profiles.html.markdown b/website/docs/cdktf/python/d/appconfig_configuration_profiles.html.markdown new file mode 100644 index 00000000000..1abcc032749 --- /dev/null +++ b/website/docs/cdktf/python/d/appconfig_configuration_profiles.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_configuration_profiles" +description: |- + Terraform data source for managing an AWS AppConfig Configuration Profiles. +--- + + + +# Data Source: aws_appconfig_configuration_profiles + +Provides access to all Configuration Properties for an AppConfig Application. This will allow you to pass Configuration +Profile IDs to another resource. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformIterator, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appconfig_configuration_profile import DataAwsAppconfigConfigurationProfile +from imports.aws.data_aws_appconfig_configuration_profiles import DataAwsAppconfigConfigurationProfiles +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsAppconfigConfigurationProfiles(self, "example", + application_id="a1d3rpe" + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_for_each_iterator = TerraformIterator.from_list( + Token.as_any(example.configuration_profile_ids)) + data_aws_appconfig_configuration_profile_example = + DataAwsAppconfigConfigurationProfile(self, "example_1", + application_id=Token.as_string(aws_appconfig_application_example.id), + configuration_profile_id=Token.as_string(example_for_each_iterator.value), + for_each=example_for_each_iterator + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_appconfig_configuration_profile_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `application_id` - (Required) ID of the AppConfig Application. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `configuration_profile_ids` - Set of Configuration Profile IDs associated with the AppConfig Application. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appconfig_environment.html.markdown b/website/docs/cdktf/python/d/appconfig_environment.html.markdown new file mode 100644 index 00000000000..0af35d8a4a6 --- /dev/null +++ b/website/docs/cdktf/python/d/appconfig_environment.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_environment" +description: |- + Terraform data source for managing an AWS AppConfig Environment. +--- + + + +# Data Source: aws_appconfig_environment + +Provides access to an AppConfig Environment. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appconfig_environment import DataAwsAppconfigEnvironment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAppconfigEnvironment(self, "example", + application_id="b5d5gpj", + environment_id="qrbb1c1" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `application_id` - (Required) ID of the AppConfig Application to which this Environment belongs. +* `environment_id` - (Required) ID of the AppConfig Environment. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the environment. +* `name` - Name of the environment. +* `description` - Name of the environment. +* `monitor` - Set of Amazon CloudWatch alarms to monitor during the deployment process. + * `alarm_arn` - ARN of the Amazon CloudWatch alarm. + * `alarm_role_arn` - ARN of an IAM role for AWS AppConfig to monitor. +* `state` - State of the environment. Possible values are `READY_FOR_DEPLOYMENT`, `DEPLOYING`, `ROLLING_BACK` + or `ROLLED_BACK`. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appconfig_environments.html.markdown b/website/docs/cdktf/python/d/appconfig_environments.html.markdown new file mode 100644 index 00000000000..6944f14c73b --- /dev/null +++ b/website/docs/cdktf/python/d/appconfig_environments.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_environments" +description: |- + Terraform data source for managing an AWS AppConfig Environments. +--- + + + +# Data Source: aws_appconfig_environments + +Provides access to all Environments for an AppConfig Application. This will allow you to pass Environment IDs to another +resource. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appconfig_environments import DataAwsAppconfigEnvironments +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAppconfigEnvironments(self, "example", + application_id="a1d3rpe" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `application_id` - (Required) ID of the AppConfig Application. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `environment_ids` - Set of Environment IDs associated with this AppConfig Application. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appintegrations_event_integration.html.markdown b/website/docs/cdktf/python/d/appintegrations_event_integration.html.markdown new file mode 100644 index 00000000000..ec775675be5 --- /dev/null +++ b/website/docs/cdktf/python/d/appintegrations_event_integration.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "AppIntegrations" +layout: "aws" +page_title: "AWS: aws_appintegrations_event_integration" +description: |- + Provides details about an Amazon AppIntegrations Event Integration +--- + + + +# Data Source: aws_appintegrations_event_integration + +Use this data source to get information on an existing AppIntegrations Event Integration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appintegrations_event_integration import DataAwsAppintegrationsEventIntegration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAppintegrationsEventIntegration(self, "example", + name="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) The AppIntegrations Event Integration name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the AppIntegrations Event Integration. +* `description` - The description of the Event Integration. +* `eventbridge_bus` - The EventBridge bus. +* `event_filter` - A block that defines the configuration information for the event filter. The Event Filter block is documented below. +* `id` - The identifier of the Event Integration which is the name of the Event Integration. +* `tags` - Metadata that you can assign to help organize the report plans you create. + +### Event Filter Attributes + +`event_filter` has the following attributes: + +* `source` - The source of the events. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appmesh_gateway_route.html.markdown b/website/docs/cdktf/python/d/appmesh_gateway_route.html.markdown new file mode 100644 index 00000000000..a4ac47297a7 --- /dev/null +++ b/website/docs/cdktf/python/d/appmesh_gateway_route.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_gateway_route" +description: |- + Terraform data source for managing an AWS App Mesh Gateway Route. +--- + + + +# Data Source: aws_appmesh_gateway_route + +The App Mesh Gateway Route data source allows details of an App Mesh Gateway Route to be retrieved by its name, mesh_name, virtual_gateway_name, and optionally the mesh_owner. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appmesh_gateway_route import DataAwsAppmeshGatewayRoute +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAppmeshGatewayRoute(self, "test", + mesh_name="test-mesh", + name="test-route", + virtual_gateway_name="test-gateway" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the gateway route. +* `mesh_name` - (Required) Name of the service mesh in which the virtual gateway exists. +* `virtual_gateway_name` - (Required) Name of the virtual gateway in which the route exists. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the gateway route. +* `created_date` - Creation date of the gateway route. +* `last_updated_date` - Last update date of the gateway route. +* `resource_owner` - Resource owner's AWS account ID. +* `spec` - Gateway route specification. See the [`aws_appmesh_gateway_route`](/docs/providers/aws/r/appmesh_gateway_route.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appmesh_mesh.html.markdown b/website/docs/cdktf/python/d/appmesh_mesh.html.markdown new file mode 100644 index 00000000000..f2be9838717 --- /dev/null +++ b/website/docs/cdktf/python/d/appmesh_mesh.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_mesh" +description: |- + Terraform data source for managing an AWS App Mesh Mesh. +--- + + + +# Data Source: aws_appmesh_mesh + +The App Mesh Mesh data source allows details of an App Mesh Mesh to be retrieved by its name and optionally the mesh_owner. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appmesh_mesh import DataAwsAppmeshMesh +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAppmeshMesh(self, "simple", + name="simpleapp" + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appmesh_mesh import DataAwsAppmeshMesh +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + DataAwsAppmeshMesh(self, "simple", + mesh_owner=Token.as_string(current.account_id), + name="simpleapp" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the service mesh. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the service mesh. +* `created_date` - Creation date of the service mesh. +* `last_updated_date` - Last update date of the service mesh. +* `resource_owner` - Resource owner's AWS account ID. +* `spec` - Service mesh specification. See the [`aws_appmesh_mesh`](/docs/providers/aws/r/appmesh_mesh.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appmesh_route.html.markdown b/website/docs/cdktf/python/d/appmesh_route.html.markdown new file mode 100644 index 00000000000..662e5716ba5 --- /dev/null +++ b/website/docs/cdktf/python/d/appmesh_route.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_route" +description: |- + Terraform data source for managing an AWS App Mesh Route. +--- + + + +# Data Source: aws_appmesh_route + +The App Mesh Route data source allows details of an App Mesh Route to be retrieved by its name, mesh_name, virtual_router_name, and optionally the mesh_owner. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appmesh_virtual_service import DataAwsAppmeshVirtualService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAppmeshVirtualService(self, "test", + mesh_name="test-mesh", + name="test-route", + virtual_router_name="test-router" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the route. +* `mesh_name` - (Required) Name of the service mesh in which the virtual router exists. +* `virtual_router_name` - (Required) Name of the virtual router in which the route exists. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the route. +* `created_date` - Creation date of the route. +* `last_updated_date` - Last update date of the route. +* `resource_owner` - Resource owner's AWS account ID. +* `spec` - Route specification. See the [`aws_appmesh_route`](/docs/providers/aws/r/appmesh_route.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appmesh_virtual_gateway.html.markdown b/website/docs/cdktf/python/d/appmesh_virtual_gateway.html.markdown new file mode 100644 index 00000000000..f302ac973bf --- /dev/null +++ b/website/docs/cdktf/python/d/appmesh_virtual_gateway.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_gateway" +description: |- + Terraform data source for managing an AWS App Mesh Virtual Gateway. +--- + + + +# Data Source: aws_appmesh_virtual_gateway + +Terraform data source for managing an AWS App Mesh Virtual Gateway. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appmesh_virtual_gateway import DataAwsAppmeshVirtualGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAppmeshVirtualGateway(self, "example", + mesh_name="mesh-gateway", + name="example-mesh" + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appmesh_virtual_gateway import DataAwsAppmeshVirtualGateway +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + DataAwsAppmeshVirtualGateway(self, "test", + mesh_name="example-mesh", + mesh_owner=Token.as_string(current.account_id), + name="example.mesh.local" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the virtual gateway. +* `mesh_name` - (Required) Name of the service mesh in which the virtual gateway exists. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the virtual gateway. +* `created_date` - Creation date of the virtual gateway. +* `last_updated_date` - Last update date of the virtual gateway. +* `resource_owner` - Resource owner's AWS account ID. +* `spec` - Virtual gateway specification. See the [`aws_appmesh_virtual_gateway`](/docs/providers/aws/r/appmesh_virtual_gateway.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appmesh_virtual_node.html.markdown b/website/docs/cdktf/python/d/appmesh_virtual_node.html.markdown new file mode 100644 index 00000000000..47c2531a40f --- /dev/null +++ b/website/docs/cdktf/python/d/appmesh_virtual_node.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_node" +description: |- + Terraform data source for managing an AWS App Mesh Virtual Node. +--- + + + +# Data Source: aws_appmesh_virtual_node + +Terraform data source for managing an AWS App Mesh Virtual Node. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appmesh_virtual_node import DataAwsAppmeshVirtualNode +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAppmeshVirtualNode(self, "test", + mesh_name="example-mesh", + name="serviceBv1" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the virtual node. +* `mesh_name` - (Required) Name of the service mesh in which the virtual node exists. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the virtual node. +* `created_date` - Creation date of the virtual node. +* `last_updated_date` - Last update date of the virtual node. +* `resource_owner` - Resource owner's AWS account ID. +* `spec` - Virtual node specification. See the [`aws_appmesh_virtual_node`](/docs/providers/aws/r/appmesh_virtual_node.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appmesh_virtual_router.html.markdown b/website/docs/cdktf/python/d/appmesh_virtual_router.html.markdown new file mode 100644 index 00000000000..4ca1181ad70 --- /dev/null +++ b/website/docs/cdktf/python/d/appmesh_virtual_router.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_router" +description: |- + Terraform data source for managing an AWS App Mesh Virtual Router. +--- + + + +# Data Source: aws_appmesh_virtual_router + +The App Mesh Virtual Router data source allows details of an App Mesh Virtual Service to be retrieved by its name and mesh_name. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appmesh_virtual_router import DataAwsAppmeshVirtualRouter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAppmeshVirtualRouter(self, "test", + mesh_name="example-mesh-name", + name="example-router-name" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the virtual router. +* `mesh_name` - (Required) Name of the mesh in which the virtual router exists + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the virtual router. +* `created_date` - Creation date of the virtual router. +* `last_updated_date` - Last update date of the virtual router. +* `resource_owner` - Resource owner's AWS account ID. +* `spec` - Virtual routers specification. See the [`aws_appmesh_virtual_router`](/docs/providers/aws/r/appmesh_virtual_router.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/appmesh_virtual_service.html.markdown b/website/docs/cdktf/python/d/appmesh_virtual_service.html.markdown new file mode 100644 index 00000000000..5d88ffc8066 --- /dev/null +++ b/website/docs/cdktf/python/d/appmesh_virtual_service.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_service" +description: |- + Terraform data source for managing an AWS App Mesh Virtual Service. +--- + + + +# Data Source: aws_appmesh_virtual_service + +The App Mesh Virtual Service data source allows details of an App Mesh Virtual Service to be retrieved by its name, mesh_name, and optionally the mesh_owner. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appmesh_virtual_service import DataAwsAppmeshVirtualService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAppmeshVirtualService(self, "test", + mesh_name="example-mesh", + name="example.mesh.local" + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_appmesh_virtual_service import DataAwsAppmeshVirtualService +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + DataAwsAppmeshVirtualService(self, "test", + mesh_name="example-mesh", + mesh_owner=Token.as_string(current.account_id), + name="example.mesh.local" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the virtual service. +* `mesh_name` - (Required) Name of the service mesh in which the virtual service exists. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the virtual service. +* `created_date` - Creation date of the virtual service. +* `last_updated_date` - Last update date of the virtual service. +* `resource_owner` - Resource owner's AWS account ID. +* `spec` - Virtual service specification. See the [`aws_appmesh_virtual_service`](/docs/providers/aws/r/appmesh_virtual_service.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/arn.html.markdown b/website/docs/cdktf/python/d/arn.html.markdown new file mode 100644 index 00000000000..b2f3bddcb86 --- /dev/null +++ b/website/docs/cdktf/python/d/arn.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_arn" +description: |- + Parses an ARN into its constituent parts. +--- + + + +# Data Source: aws_arn + +Parses an ARN into its constituent parts. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_arn import DataAwsArn +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsArn(self, "db_instance", + arn="arn:aws:rds:eu-west-1:123456789012:db:mysql-db" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Required) ARN to parse. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `partition` - Partition that the resource is in. + +* `service` - The [service namespace](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html#genref-aws-service-namespaces) that identifies the AWS product. + +* `region` - Region the resource resides in. +Note that the ARNs for some resources do not require a region, so this component might be omitted. + +* `account` - The [ID](https://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html) of the AWS account that owns the resource, without the hyphens. + +* `resource` - Content of this part of the ARN varies by service. +It often includes an indicator of the type of resource—for example, an IAM user or Amazon RDS database —followed by a slash (/) or a colon (:), followed by the resource name itself. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/auditmanager_control.html.markdown b/website/docs/cdktf/python/d/auditmanager_control.html.markdown new file mode 100644 index 00000000000..a291df27f8b --- /dev/null +++ b/website/docs/cdktf/python/d/auditmanager_control.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_control" +description: |- + Terraform data source for managing an AWS Audit Manager Control. +--- + + + +# Data Source: aws_auditmanager_control + +Terraform data source for managing an AWS Audit Manager Control. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_auditmanager_control import DataAwsAuditmanagerControl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAuditmanagerControl(self, "example", + name="1. Risk Management", + type="Standard" + ) +``` + +### With Framework Resource + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.auditmanager_framework import AuditmanagerFramework +from imports.aws.data_aws_auditmanager_control import DataAwsAuditmanagerControl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsAuditmanagerControl(self, "example", + name="1. Risk Management", + type="Standard" + ) + example2 = DataAwsAuditmanagerControl(self, "example2", + name="2. Personnel", + type="Standard" + ) + aws_auditmanager_framework_example = AuditmanagerFramework(self, "example_2", + control_sets=[AuditmanagerFrameworkControlSets( + controls=[AuditmanagerFrameworkControlSetsControls( + id=Token.as_string(example.id) + ) + ], + name="example" + ), AuditmanagerFrameworkControlSets( + controls=[AuditmanagerFrameworkControlSetsControls( + id=Token.as_string(example2.id) + ) + ], + name="example2" + ) + ], + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_auditmanager_framework_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the control. +* `type` - (Required) Type of control. Valid values are `Custom` and `Standard`. + +## Attribute Reference + +See the [`aws_auditmanager_control` resource](/docs/providers/aws/r/auditmanager_control.html) for details on the returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/auditmanager_framework.html.markdown b/website/docs/cdktf/python/d/auditmanager_framework.html.markdown new file mode 100644 index 00000000000..7d9bdde02dd --- /dev/null +++ b/website/docs/cdktf/python/d/auditmanager_framework.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_framework" +description: |- + Terraform data source for managing an AWS Audit Manager Framework. +--- + + + +# Data Source: aws_auditmanager_framework + +Terraform data source for managing an AWS Audit Manager Framework. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_auditmanager_framework import DataAwsAuditmanagerFramework +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAuditmanagerFramework(self, "example", + framework_type="Standard", + name="Essential Eight" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the framework. +* `type` - (Required) Type of framework. Valid values are `Custom` and `Standard`. + +## Attribute Reference + +See the [`aws_auditmanager_framework` resource](/docs/providers/aws/r/auditmanager_framework.html) for details on the returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/autoscaling_group.html.markdown b/website/docs/cdktf/python/d/autoscaling_group.html.markdown new file mode 100644 index 00000000000..d796e45d072 --- /dev/null +++ b/website/docs/cdktf/python/d/autoscaling_group.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_group" +description: |- + Get information on an Amazon EC2 Autoscaling Group. +--- + + + +# Data Source: aws_autoscaling_group + +Use this data source to get information on an existing autoscaling group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_autoscaling_group import DataAwsAutoscalingGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAutoscalingGroup(self, "foo", + name="foo" + ) +``` + +## Argument Reference + +* `name` - Specify the exact name of the desired autoscaling group. + +## Attribute Reference + +~> **NOTE:** Some values are not always set and may not be available for +interpolation. + +* `arn` - ARN of the Auto Scaling group. +* `availability_zones` - One or more Availability Zones for the group. +* `default_cool_down` - Amount of time, in seconds, after a scaling activity completes before another scaling activity can start. +* `desired_capacity` - Desired size of the group. +* `desired_capacity_type` - The unit of measurement for the value returned for `desired_capacity`. +* `enabled_metrics` - List of metrics enabled for collection. +* `health_check_grace_period` - The amount of time, in seconds, that Amazon EC2 Auto Scaling waits before checking the health status of an EC2 instance that has come into service. +* `health_check_type` - Service to use for the health checks. The valid values are EC2 and ELB. +* `id` - Name of the Auto Scaling Group. +* `launch_configuration` - The name of the associated launch configuration. +* `launch_template` - List of launch templates for the group. + * `id` - ID of the launch template. + * `name` - Name of the launch template. + * `version` - Template version. +* `load_balancers` - One or more load balancers associated with the group. +* `max_instance_lifetime` - Maximum amount of time, in seconds, that an instance can be in service. +* `max_size` - Maximum size of the group. +* `min_size` - Minimum size of the group. +* `mixed_instances_policy` - List of mixed instances policy objects for the group. + * `instances_distribution` - List of instances distribution objects. + * `on_demand_allocation_strategy` - Strategy used when launching on-demand instances. + * `on_demand_base_capacity` - Absolute minimum amount of desired capacity that must be fulfilled by on-demand instances. + * `spot_allocation_strategy` - Strategy used when launching Spot instances. + * `spot_instance_pools` - Number of Spot pools per availability zone to allocate capacity. + * `spot_max_price` - Maximum price per unit hour that the user is willing to pay for the Spot instances. + * `launch_template` - List of launch templates along with the overrides. + * `launch_template_specification` - List of launch template specification objects. + * `launch_template_id` - ID of the launch template. + * `launch_template_name` - Name of the launch template. + * `version` - Template version. + * `override` - List of properties overriding the same properties in the launch template. + * `instance_requirements` - List of instance requirements objects. + * `accelerator_count - List of objects describing the minimum and maximum number of accelerators for an instance type. + * `min` - Minimum. + * `max` - Maximum. + * `accelerator_manufacturers` - List of accelerator manufacturer names. + * `accelerator_names` - List of accelerator names. + * `accelerator_total_memory_mib` - List of objects describing the minimum and maximum total memory of the accelerators. + * `accelerator_types` - List of accelerator types. + * `allowed_instance_types` - List of instance types to apply the specified attributes against. + * `bare_metal` - Indicates whether bare metal instances are included, excluded, or required. + * `baseline_ebs_bandwidth_mbps` - List of objects describing the minimum and maximum baseline EBS bandwidth (Mbps). + * `min` - Minimum. + * `max` - Maximum. + * `burstable_performance` - Indicates whether burstable performance instance types are included, excluded, or required. + * `cpu_manufacturers` - List of CPU manufacturer names. + * `excluded_instance_types` - List of excluded instance types. + * `instance_generations` - List of instance generation names. + * `local_storage` - Indicates whether instance types with instance store volumes are included, excluded, or required. + * `local_storage_types` - List of local storage type names. + * `memory_gib_per_vcpu` - List of objects describing the minimum and maximum amount of memory (GiB) per vCPU. + * `min` - Minimum. + * `max` - Maximum. + * `memory_mib` - List of objects describing the minimum and maximum amount of memory (MiB). + * `min` - Minimum. + * `max` - Maximum. + * `network_bandwidth_gbps` - List of objects describing the minimum and maximum amount of network bandwidth (Gbps). + * `min` - Minimum. + * `max`- Maximum. + * `network_interface_count` - List of objects describing the minimum and maximum amount of network interfaces. + * `min` - Minimum. + * `max` - Maximum. + * `on_demand_max_price_percentage_over_lowest_price` - Price protection threshold for On-Demand Instances. + * `require_hibernate_support` - Indicates whether instance types must support On-Demand Instance Hibernation. + * `spot_max_price_percentage_over_lowest_price` - Price protection threshold for Spot Instances. + * `total_local_storage_gb` - List of objects describing the minimum and maximum total storage (GB). + * `min` - Minimum. + * `max` - Maximum. + * `vcpu_count` - List of objects describing the minimum and maximum number of vCPUs. + * `min` - Minimum. + * `max` - Maximum. + * `instance_type` - Overriding instance type. + * `launch_template_specification` - List of overriding launch template specification objects. + * `launch_template_id` - ID of the launch template. + * `launch_template_name` - Name of the launch template. + * `version` - Template version. + * `weighted_capacity` - Number of capacity units, which gives the instance type a proportional weight to other instance types. +* `name` - Name of the Auto Scaling Group. +* `placement_group` - Name of the placement group into which to launch your instances, if any. For more information, see Placement Groups (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/placement-groups.html) in the Amazon Elastic Compute Cloud User Guide. +* `predicted_capacity` - Predicted capacity of the group. +* `service_linked_role_arn` - ARN of the service-linked role that the Auto Scaling group uses to call other AWS services on your behalf. +* `status` - Current state of the group when DeleteAutoScalingGroup is in progress. +* `suspended_processes` - List of processes suspended processes for the Auto Scaling Group. +* `tag` - List of tags for the group. + * `key` - Key. + * `value` - Value. + * `propagate_at_launch` - Whether the tag is propagated to Amazon EC2 instances launched via this ASG. +* `target_group_arns` - ARNs of the target groups for your load balancer. +* `termination_policies` - The termination policies for the group. +* `traffic_source` -Traffic sources. + * `identifier` - Identifies the traffic source. For Application Load Balancers, Gateway Load Balancers, Network Load Balancers, and VPC Lattice, this will be the Amazon Resource Name (ARN) for a target group in this account and Region. For Classic Load Balancers, this will be the name of the Classic Load Balancer in this account and Region. + * `type` - Traffic source type. +* `vpc_zone_identifier` - VPC ID for the group. +* `warm_pool` - List of warm pool configuration objects. + * `instance_reuse_policy` - List of instance reuse policy objects. + * `reuse_on_scale_in` - Indicates whether instances in the Auto Scaling group can be returned to the warm pool on scale in. + * `max_group_prepared_policy` - Total maximum number of instances that are allowed to be in the warm pool or in any state except Terminated for the Auto Scaling group. + * `min_size` - Minimum number of instances to maintain in the warm pool. + * `pool_state` - Instance state to transition to after the lifecycle actions are complete. +* `warm_pool_size` - Current size of the warm pool. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/autoscaling_groups.html.markdown b/website/docs/cdktf/python/d/autoscaling_groups.html.markdown new file mode 100644 index 00000000000..95345408ba5 --- /dev/null +++ b/website/docs/cdktf/python/d/autoscaling_groups.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_groups" +description: |- + Provides a list of Autoscaling Groups within a specific region. +--- + + + +# Data Source: aws_autoscaling_groups + +The Autoscaling Groups data source allows access to the list of AWS +ASGs within a specific region. This will allow you to pass a list of AutoScaling Groups to other resources. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_notification import AutoscalingNotification +from imports.aws.data_aws_autoscaling_groups import DataAwsAutoscalingGroups +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + groups = DataAwsAutoscalingGroups(self, "groups", + filter=[DataAwsAutoscalingGroupsFilter( + name="tag:Team", + values=["Pets"] + ), DataAwsAutoscalingGroupsFilter( + name="tag-key", + values=["Environment"] + ) + ] + ) + AutoscalingNotification(self, "slack_notifications", + group_names=Token.as_list(groups.names), + notifications=["autoscaling:EC2_INSTANCE_LAUNCH", "autoscaling:EC2_INSTANCE_TERMINATE", "autoscaling:EC2_INSTANCE_LAUNCH_ERROR", "autoscaling:EC2_INSTANCE_TERMINATE_ERROR" + ], + topic_arn="TOPIC ARN" + ) +``` + +## Argument Reference + +* `names` - (Optional) List of autoscaling group names +* `filter` - (Optional) Filter used to scope the list e.g., by tags. See [related docs](http://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_Filter.html). + * `name` - (Required) Name of the DescribeAutoScalingGroup filter. The recommended values are: `tag-key`, `tag-value`, and `tag:` + * `values` - (Required) Value of the filter. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - List of the Autoscaling Groups Arns in the current region. +* `id` - AWS Region. +* `names` - List of the Autoscaling Groups in the current region. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/availability_zone.html.markdown b/website/docs/cdktf/python/d/availability_zone.html.markdown new file mode 100644 index 00000000000..0d1450ead57 --- /dev/null +++ b/website/docs/cdktf/python/d/availability_zone.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_availability_zone" +description: |- + Provides details about a specific availability zone +--- + + + +# Data Source: aws_availability_zone + +`aws_availability_zone` provides details about a specific availability zone (AZ) +in the current region. + +This can be used both to validate an availability zone given in a variable +and to split the AZ name into its component parts of an AWS region and an +AZ identifier letter. The latter may be useful e.g., for implementing a +consistent subnet numbering scheme across several regions by mapping both +the region and the subnet letter to network numbers. + +This is different from the `aws_availability_zones` (plural) data source, +which provides a list of the available zones. + +## Example Usage + +The following example shows how this data source might be used to derive +VPC and subnet CIDR prefixes systematically for an availability zone. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, property_access, Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zone import DataAwsAvailabilityZone +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + az_number = TerraformVariable(self, "az_number", + default=[{ + "a": 1, + "b": 2, + "c": 3, + "d": 4, + "e": 5, + "f": 6 + } + ] + ) + region_number = TerraformVariable(self, "region_number", + default=[{ + "ap-northeast-1": 5, + "eu-central-1": 4, + "us-east-1": 1, + "us-west-1": 2, + "us-west-2": 3 + } + ] + ) + example = DataAwsAvailabilityZone(self, "example", + name="eu-central-1a" + ) + aws_vpc_example = Vpc(self, "example_3", + cidr_block=Token.as_string( + Fn.cidrsubnet("10.0.0.0/8", 4, + Token.as_number(property_access(region_number.value, [example.region])))) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpc_example.override_logical_id("example") + aws_subnet_example = Subnet(self, "example_4", + cidr_block=Token.as_string( + Fn.cidrsubnet( + Token.as_string(aws_vpc_example.cidr_block), 4, + Token.as_number(property_access(az_number.value, [example.name_suffix])))), + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_subnet_example.override_logical_id("example") +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +availability zones. The given filters must match exactly one availability +zone whose data will be exported as attributes. + +* `all_availability_zones` - (Optional) Set to `true` to include all Availability Zones and Local Zones regardless of your opt in status. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. +* `name` - (Optional) Full name of the availability zone to select. +* `state` - (Optional) Specific availability zone state to require. May be any of `"available"`, `"information"` or `"impaired"`. +* `zone_id` - (Optional) Zone ID of the availability zone to select. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeAvailabilityZones API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeAvailabilityZones.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `group_name` - For Availability Zones, this is the same value as the Region name. For Local Zones, the name of the associated group, for example `us-west-2-lax-1`. +* `name_suffix` - Part of the AZ name that appears after the region name, uniquely identifying the AZ within its region. +For Availability Zones this is usually a single letter, for example `a` for the `us-west-2a` zone. +For Local and Wavelength Zones this is a longer string, for example `wl1-sfo-wlz-1` for the `us-west-2-wl1-sfo-wlz-1` zone. +* `network_border_group` - The name of the location from which the address is advertised. +* `opt_in_status` - For Availability Zones, this always has the value of `opt-in-not-required`. For Local Zones, this is the opt in status. The possible values are `opted-in` and `not-opted-in`. +* `parent_zone_id` - ID of the zone that handles some of the Local Zone or Wavelength Zone control plane operations, such as API calls. +* `parent_zone_name` - Name of the zone that handles some of the Local Zone or Wavelength Zone control plane operations, such as API calls. +* `region` - Region where the selected availability zone resides. This is always the region selected on the provider, since this data source searches only within that region. +* `zone_type` - Type of zone. Values are `availability-zone`, `local-zone`, and `wavelength-zone`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/availability_zones.html.markdown b/website/docs/cdktf/python/d/availability_zones.html.markdown new file mode 100644 index 00000000000..7b48c22a226 --- /dev/null +++ b/website/docs/cdktf/python/d/availability_zones.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_availability_zones" +description: |- + Provides a list of Availability Zones which can be used by an AWS account. +--- + + + +# Data Source: aws_availability_zones + +The Availability Zones data source allows access to the list of AWS +Availability Zones which can be accessed by an AWS account within the region +configured in the provider. + +This is different from the `aws_availability_zone` (singular) data source, +which provides some details about a specific availability zone. + +-> When [Local Zones](https://aws.amazon.com/about-aws/global-infrastructure/localzones/) are enabled in a region, by default the API and this data source include both Local Zones and Availability Zones. To return only Availability Zones, see the example section below. + +## Example Usage + +### By State + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.subnet import Subnet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, vpcId, vpcId1): + super().__init__(scope, name) + available = DataAwsAvailabilityZones(self, "available", + state="available" + ) + Subnet(self, "primary", + availability_zone=Token.as_string(property_access(available.names, ["0"])), + vpc_id=vpc_id + ) + Subnet(self, "secondary", + availability_zone=Token.as_string(property_access(available.names, ["1"])), + vpc_id=vpc_id1 + ) +``` + +### By Filter + +All Local Zones (regardless of opt-in status): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAvailabilityZones(self, "example", + all_availability_zones=True, + filter=[DataAwsAvailabilityZonesFilter( + name="opt-in-status", + values=["not-opted-in", "opted-in"] + ) + ] + ) +``` + +Only Availability Zones (no Local Zones): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsAvailabilityZones(self, "example", + filter=[DataAwsAvailabilityZonesFilter( + name="opt-in-status", + values=["opt-in-not-required"] + ) + ] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `all_availability_zones` - (Optional) Set to `true` to include all Availability Zones and Local Zones regardless of your opt in status. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. +* `exclude_names` - (Optional) List of Availability Zone names to exclude. +* `exclude_zone_ids` - (Optional) List of Availability Zone IDs to exclude. +* `state` - (Optional) Allows to filter list of Availability Zones based on their +current state. Can be either `"available"`, `"information"`, `"impaired"` or +`"unavailable"`. By default the list includes a complete set of Availability Zones +to which the underlying AWS account has access, regardless of their state. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeAvailabilityZones API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeAvailabilityZones.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `group_names` A set of the Availability Zone Group names. For Availability Zones, this is the same value as the Region name. For Local Zones, the name of the associated group, for example `us-west-2-lax-1`. +* `id` - Region of the Availability Zones. +* `names` - List of the Availability Zone names available to the account. +* `zone_ids` - List of the Availability Zone IDs available to the account. + +Note that the indexes of Availability Zone names and IDs correspond. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/backup_framework.html.markdown b/website/docs/cdktf/python/d/backup_framework.html.markdown new file mode 100644 index 00000000000..f5e12d9dc9e --- /dev/null +++ b/website/docs/cdktf/python/d/backup_framework.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_framework" +description: |- + Provides details about an AWS Backup Framework. +--- + + + +# Data Source: aws_backup_framework + +Use this data source to get information on an existing backup framework. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_backup_framework import DataAwsBackupFramework +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsBackupFramework(self, "example", + name="tf_example_backup_framework_name" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Backup framework name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the backup framework. +* `control` - One or more control blocks that make up the framework. Each control in the list has a name, input parameters, and scope. Detailed below. +* `creation_time` - Date and time that a framework is created, in Unix format and Coordinated Universal Time (UTC). +* `deployment_status` - Deployment status of a framework. The statuses are: `CREATE_IN_PROGRESS` | `UPDATE_IN_PROGRESS` | `DELETE_IN_PROGRESS` | `COMPLETED`| `FAILED`. +* `description` - Description of the framework. +* `id` - ID of the framework. +* `status` - Framework consists of one or more controls. Each control governs a resource, such as backup plans, backup selections, backup vaults, or recovery points. You can also turn AWS Config recording on or off for each resource. The statuses are: `ACTIVE`, `PARTIALLY_ACTIVE`, `INACTIVE`, `UNAVAILABLE`. For more information refer to the [AWS documentation for Framework Status](https://docs.aws.amazon.com/aws-backup/latest/devguide/API_DescribeFramework.html#Backup-DescribeFramework-response-FrameworkStatus) +* `tags` - Metadata that helps organize the frameworks you create. + +### Control Attributes + +`control` has the following attributes: + +* `input_parameter` - One or more input parameter blocks. An example of a control with two parameters is: "backup plan frequency is at least daily and the retention period is at least 1 year". The first parameter is daily. The second parameter is 1 year. Detailed below. +* `name` - Name of a control. +* `scope` - Scope of a control. The control scope defines what the control will evaluate. Three examples of control scopes are: a specific backup plan, all backup plans with a specific tag, or all backup plans. Detailed below. + +### Input Parameter Attributes + +`input_parameter` has the following attributes: + +* `name` - Name of a parameter, for example, BackupPlanFrequency. +* `value` - Value of parameter, for example, hourly. + +### Scope Attributes + +`scope` has the following attributes: + +* `compliance_resource_ids` - The ID of the only AWS resource that you want your control scope to contain. +* `compliance_resource_types` - Describes whether the control scope includes one or more types of resources, such as EFS or RDS. +* `tags` - Tag key-value pair applied to those AWS resources that you want to trigger an evaluation for a rule. A maximum of one key-value pair can be provided. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/backup_plan.html.markdown b/website/docs/cdktf/python/d/backup_plan.html.markdown new file mode 100644 index 00000000000..f0a33ca24f9 --- /dev/null +++ b/website/docs/cdktf/python/d/backup_plan.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_plan" +description: |- + Provides details about an AWS Backup plan. +--- + + + +# Data Source: aws_backup_plan + +Use this data source to get information on an existing backup plan. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_backup_plan import DataAwsBackupPlan +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsBackupPlan(self, "example", + plan_id="tf_example_backup_plan_id" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `plan_id` - (Required) Backup plan ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the backup plan. +* `name` - Display name of a backup plan. +* `tags` - Metadata that you can assign to help organize the plans you create. +* `version` - Unique, randomly generated, Unicode, UTF-8 encoded string that serves as the version ID of the backup plan. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/backup_report_plan.html.markdown b/website/docs/cdktf/python/d/backup_report_plan.html.markdown new file mode 100644 index 00000000000..2b34dd375b9 --- /dev/null +++ b/website/docs/cdktf/python/d/backup_report_plan.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_report_plan" +description: |- + Provides details about an AWS Backup Report Plan. +--- + + + +# Data Source: aws_backup_report_plan + +Use this data source to get information on an existing backup report plan. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_backup_report_plan import DataAwsBackupReportPlan +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsBackupReportPlan(self, "example", + name="tf_example_backup_report_plan_name" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Backup report plan name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the backup report plan. +* `creation_time` - Date and time that a report plan is created, in Unix format and Coordinated Universal Time (UTC). +* `deployment_status` - Deployment status of a report plan. The statuses are: `CREATE_IN_PROGRESS` | `UPDATE_IN_PROGRESS` | `DELETE_IN_PROGRESS` | `COMPLETED`. +* `description` - Description of the report plan. +* `id` - ID of the report plan. +* `report_delivery_channel` - An object that contains information about where and how to deliver your reports, specifically your Amazon S3 bucket name, S3 key prefix, and the formats of your reports. Detailed below. +* `report_setting` - An object that identifies the report template for the report. Reports are built using a report template. Detailed below. +* `tags` - Metadata that you can assign to help organize the report plans you create. + +### Report Delivery Channel Attributes + +`report_delivery_channel` has the following attributes: + +* `formats` - List of the format of your reports: CSV, JSON, or both. +* `s3_bucket_name` - Unique name of the S3 bucket that receives your reports. +* `s3_key_prefix` - Prefix for where Backup Audit Manager delivers your reports to Amazon S3. The prefix is this part of the following path: s3://your-bucket-name/prefix/Backup/us-west-2/year/month/day/report-name. + +### Report Setting Attributes + +`report_setting` has the following attributes: + +* `accounts` - (Optional) Specifies the list of accounts a report covers. +* `framework_arns` - ARNs of the frameworks a report covers. +* `number_of_frameworks` - Specifies the number of frameworks a report covers. +* `organization_units` - (Optional) Specifies the list of Organizational Units a report covers. +* `regions` - (Optional) Specifies the list of regions a report covers. +* `report_template` - Identifies the report template for the report. Reports are built using a report template. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/backup_selection.html.markdown b/website/docs/cdktf/python/d/backup_selection.html.markdown new file mode 100644 index 00000000000..b20dfef5348 --- /dev/null +++ b/website/docs/cdktf/python/d/backup_selection.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_selection" +description: |- + Provides details about an AWS Backup selection. +--- + + + +# Data Source: aws_backup_selection + +Use this data source to get information on an existing backup selection. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_backup_selection import DataAwsBackupSelection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsBackupSelection(self, "example", + plan_id=Token.as_string(data_aws_backup_plan_example.id), + selection_id="selection-id-example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `plan_id` - (Required) Backup plan ID associated with the selection of resources. +* `selection_id` - (Required) Backup selection ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - Display name of a resource selection document. +* `iam_role_arn` - ARN of the IAM role that AWS Backup uses to authenticate when restoring and backing up the target resource. See the [AWS Backup Developer Guide](https://docs.aws.amazon.com/aws-backup/latest/devguide/access-control.html#managed-policies) for additional information about using AWS managed policies or creating custom policies attached to the IAM role. +* `resources` - An array of strings that either contain Amazon Resource Names (ARNs) or match patterns of resources to assign to a backup plan.. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/backup_vault.html.markdown b/website/docs/cdktf/python/d/backup_vault.html.markdown new file mode 100644 index 00000000000..50dc7ecc416 --- /dev/null +++ b/website/docs/cdktf/python/d/backup_vault.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_vault" +description: |- + Provides details about an AWS Backup vault. +--- + + + +# Data Source: aws_backup_vault + +Use this data source to get information on an existing backup vault. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_backup_vault import DataAwsBackupVault +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsBackupVault(self, "example", + name="example_backup_vault" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the backup vault. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the vault. +* `kms_key_arn` - Server-side encryption key that is used to protect your backups. +* `recovery_points` - Number of recovery points that are stored in a backup vault. +* `tags` - Metadata that you can assign to help organize the resources that you create. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/batch_compute_environment.html.markdown b/website/docs/cdktf/python/d/batch_compute_environment.html.markdown new file mode 100644 index 00000000000..e735798ccf8 --- /dev/null +++ b/website/docs/cdktf/python/d/batch_compute_environment.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_compute_environment" +description: |- + Provides details about a batch compute environment +--- + + + +# Data Source: aws_batch_compute_environment + +The Batch Compute Environment data source allows access to details of a specific +compute environment within AWS Batch. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_batch_compute_environment import DataAwsBatchComputeEnvironment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsBatchComputeEnvironment(self, "batch-mongo", + compute_environment_name="batch-mongo-production" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `compute_environment_name` - (Required) Name of the Batch Compute Environment + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the compute environment. +* `ecs_cluster_arn` - ARN of the underlying Amazon ECS cluster used by the compute environment. +* `service_role` - ARN of the IAM role that allows AWS Batch to make calls to other AWS services on your behalf. +* `type` - Type of the compute environment (for example, `MANAGED` or `UNMANAGED`). +* `status` - Current status of the compute environment (for example, `CREATING` or `VALID`). +* `status_reason` - Short, human-readable string to provide additional details about the current status of the compute environment. +* `state` - State of the compute environment (for example, `ENABLED` or `DISABLED`). If the state is `ENABLED`, then the compute environment accepts jobs from a queue and can scale out automatically based on queues. +* `tags` - Key-value map of resource tags + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/batch_job_queue.html.markdown b/website/docs/cdktf/python/d/batch_job_queue.html.markdown new file mode 100644 index 00000000000..64232369eef --- /dev/null +++ b/website/docs/cdktf/python/d/batch_job_queue.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_job_queue" +description: |- + Provides details about a batch job queue +--- + + + +# Data Source: aws_batch_job_queue + +The Batch Job Queue data source allows access to details of a specific +job queue within AWS Batch. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_batch_job_queue import DataAwsBatchJobQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsBatchJobQueue(self, "test-queue", + name="tf-test-batch-job-queue" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the job queue. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the job queue. +* `scheduling_policy_arn` - The ARN of the fair share scheduling policy. If this attribute has a value, the job queue uses a fair share scheduling policy. If this attribute does not have a value, the job queue uses a first in, first out (FIFO) scheduling policy. +* `status` - Current status of the job queue (for example, `CREATING` or `VALID`). +* `status_reason` - Short, human-readable string to provide additional details about the current status + of the job queue. +* `state` - Describes the ability of the queue to accept new jobs (for example, `ENABLED` or `DISABLED`). +* `tags` - Key-value map of resource tags +* `priority` - Priority of the job queue. Job queues with a higher priority are evaluated first when + associated with the same compute environment. +* `compute_environment_order` - The compute environments that are attached to the job queue and the order in + which job placement is preferred. Compute environments are selected for job placement in ascending order. + * `compute_environment_order.#.order` - The order of the compute environment. + * `compute_environment_order.#.compute_environment` - The ARN of the compute environment. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/batch_scheduling_policy.html.markdown b/website/docs/cdktf/python/d/batch_scheduling_policy.html.markdown new file mode 100644 index 00000000000..048b5127144 --- /dev/null +++ b/website/docs/cdktf/python/d/batch_scheduling_policy.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_scheduling_policy" +description: |- + Provides details about a Batch Scheduling Policy +--- + + + +# Data Source: aws_batch_scheduling_policy + +The Batch Scheduling Policy data source allows access to details of a specific Scheduling Policy within AWS Batch. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_batch_scheduling_policy import DataAwsBatchSchedulingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsBatchSchedulingPolicy(self, "test", + arn="arn:aws:batch:us-east-1:012345678910:scheduling-policy/example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Required) ARN of the scheduling policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `fairshare_policy` - Fairshare policy block specifies the `compute_reservation`, `share_delay_seconds`, and `share_distribution` of the scheduling policy. The `fairshare_policy` block is documented below. +* `name` - Name of the scheduling policy. +* `tags` - Key-value map of resource tags + +A `fairshare_policy` block supports the following arguments: + +* `compute_reservation` - Value used to reserve some of the available maximum vCPU for fair share identifiers that have not yet been used. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). +* `share_delay_seconds` - Time period to use to calculate a fair share percentage for each fair share identifier in use, in seconds. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). +* `share_distribution` - One or more share distribution blocks which define the weights for the fair share identifiers for the fair share policy. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). The `share_distribution` block is documented below. + +A `share_distribution` block supports the following arguments: + +* `share_identifier` - Fair share identifier or fair share identifier prefix. For more information, see [ShareAttributes](https://docs.aws.amazon.com/batch/latest/APIReference/API_ShareAttributes.html). +* `weight_factor` - Weight factor for the fair share identifier. For more information, see [ShareAttributes](https://docs.aws.amazon.com/batch/latest/APIReference/API_ShareAttributes.html). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/billing_service_account.html.markdown b/website/docs/cdktf/python/d/billing_service_account.html.markdown new file mode 100644 index 00000000000..04e9160f976 --- /dev/null +++ b/website/docs/cdktf/python/d/billing_service_account.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_billing_service_account" +description: |- + Get AWS Billing Service Account +--- + + + +# Data Source: aws_billing_service_account + +Use this data source to get the Account ID of the [AWS Billing and Cost Management Service Account](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/billing-getting-started.html#step-2) for the purpose of permitting in S3 bucket policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_billing_service_account import DataAwsBillingServiceAccount +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_policy import S3BucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + billing_logs = S3Bucket(self, "billing_logs", + bucket="my-billing-tf-test-bucket" + ) + S3BucketAcl(self, "billing_logs_acl", + acl="private", + bucket=billing_logs.id + ) + main = DataAwsBillingServiceAccount(self, "main") + allow_billing_logging = DataAwsIamPolicyDocument(self, "allow_billing_logging", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:GetBucketAcl", "s3:GetBucketPolicy"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[Token.as_string(main.arn)], + type="AWS" + ) + ], + resources=[billing_logs.arn] + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:PutObject"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[Token.as_string(main.arn)], + type="AWS" + ) + ], + resources=["${" + billing_logs.arn + "}/*"] + ) + ] + ) + aws_s3_bucket_policy_allow_billing_logging = S3BucketPolicy(self, "allow_billing_logging_4", + bucket=billing_logs.id, + policy=Token.as_string(allow_billing_logging.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_policy_allow_billing_logging.override_logical_id("allow_billing_logging") +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS billing service account. +* `arn` - ARN of the AWS billing service account. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/budgets_budget.html.markdown b/website/docs/cdktf/python/d/budgets_budget.html.markdown new file mode 100644 index 00000000000..9d65a9b8cff --- /dev/null +++ b/website/docs/cdktf/python/d/budgets_budget.html.markdown @@ -0,0 +1,149 @@ +--- +subcategory: "Web Services Budgets" +layout: "aws" +page_title: "AWS: aws_budgets_budget" +description: |- + Terraform data source for managing an AWS Web Services Budgets Budget. +--- + + + +# Data Source: aws_budgets_budget + +Terraform data source for managing an AWS Web Services Budgets Budget. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_budgets_budget import DataAwsBudgetsBudget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsBudgetsBudget(self, "test", + name=Token.as_string(aws_budgets_budget_test.name) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - The name of a budget. Unique within accounts. + +The following arguments are optional: + +* `account_id` - The ID of the target account for budget. Will use current user's account_id by default if omitted. +* `name_prefix` - The prefix of the name of a budget. Unique within accounts. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `auto_adjust_data` - Object containing [AutoAdjustData] which determines the budget amount for an auto-adjusting budget. +* `budget_exceeded` - Boolean indicating whether this budget has been exceeded. +* `budget_limit` - The total amount of cost, usage, RI utilization, RI coverage, Savings Plans utilization, or Savings Plans coverage that you want to track with your budget. Contains object [Spend](#spend). +* `budget_type` - Whether this budget tracks monetary cost or usage. +* `calculated_spend` - The spend objects that are associated with this budget. The [actualSpend](#actual-spend) tracks how much you've used, cost, usage, RI units, or Savings Plans units and the [forecastedSpend](#forecasted-spend) tracks how much that you're predicted to spend based on your historical usage profile. +* `cost_filter` - A list of [CostFilter](#cost-filter) name/values pair to apply to budget. +* `cost_types` - Object containing [CostTypes](#cost-types) The types of cost included in a budget, such as tax and subscriptions. +* `notification` - Object containing [Budget Notifications](#budget-notification). Can be used multiple times to define more than one budget notification. +* `planned_limit` - Object containing [Planned Budget Limits](#planned-budget-limits). Can be used multiple times to plan more than one budget limit. See [PlannedBudgetLimits](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_Budget.html#awscostmanagement-Type-budgets_Budget-PlannedBudgetLimits) documentation. +* `time_period_end` - The end of the time period covered by the budget. There are no restrictions on the end date. Format: `2017-01-01_12:00`. +* `time_period_start` - The start of the time period covered by the budget. If you don't specify a start date, AWS defaults to the start of your chosen time period. The start date must come before the end date. Format: `2017-01-01_12:00`. +* `time_unit` - The length of time until a budget resets the actual and forecasted spend. Valid values: `MONTHLY`, `QUARTERLY`, `ANNUALLY`, and `DAILY`. + +### Actual Spend + +The amount of cost, usage, RI units, or Savings Plans units that you used. Type is [Spend](#spend) + +### Auto Adjust Data + +The parameters that determine the budget amount for an auto-adjusting budget. + +* `auto_adjust_type` (Required) - The string that defines whether your budget auto-adjusts based on historical or forecasted data. Valid values: `FORECAST`,`HISTORICAL`. +* `historical_options` (Optional) - Configuration block of [Historical Options](#historical-options). Required for `auto_adjust_type` of `HISTORICAL` Configuration block that defines the historical data that your auto-adjusting budget is based on. +* `last_auto_adjust_time` (Optional) - The last time that your budget was auto-adjusted. + +### Budget Notification + +Valid keys for `notification` parameter. + +* `comparison_operator` - (Required) Comparison operator to use to evaluate the condition. Can be `LESS_THAN`, `EQUAL_TO` or `GREATER_THAN`. +* `threshold` - (Required) Threshold when the notification should be sent. +* `threshold_type` - (Required) What kind of threshold is defined. Can be `PERCENTAGE` OR `ABSOLUTE_VALUE`. +* `notification_type` - (Required) What kind of budget value to notify on. Can be `ACTUAL` or `FORECASTED`. +* `subscriber_email_addresses` - (Optional) E-Mail addresses to notify. Either this or `subscriber_sns_topic_arns` is required. +* `subscriber_sns_topic_arns` - (Optional) SNS topics to notify. Either this or `subscriber_email_addresses` is required. + +### Cost Filter + +Based on your choice of budget type, you can choose one or more of the available budget filters. + +* `PurchaseType` +* `UsageTypeGroup` +* `Service` +* `Operation` +* `UsageType` +* `BillingEntity` +* `CostCategory` +* `LinkedAccount` +* `TagKeyValue` +* `LegalEntityName` +* `InvoicingEntity` +* `AZ` +* `Region` +* `InstanceType` + +Refer to [AWS CostFilter documentation](https://docs.aws.amazon.com/cost-management/latest/userguide/budgets-create-filters.html) for further detail. + +### Cost Types + +Valid keys for `cost_types` parameter. + +* `include_credit` - A boolean value whether to include credits in the cost budget. Defaults to `true`. +* `include_discount` - Whether a budget includes discounts. Defaults to `true`. +* `include_other_subscription` - A boolean value whether to include other subscription costs in the cost budget. Defaults to `true`. +* `include_recurring` - A boolean value whether to include recurring costs in the cost budget. Defaults to `true`. +* `include_refund` - A boolean value whether to include refunds in the cost budget. Defaults to `true`. +* `include_subscription` - A boolean value whether to include subscriptions in the cost budget. Defaults to `true`. +* `include_support` - A boolean value whether to include support costs in the cost budget. Defaults to `true`. +* `include_tax` - A boolean value whether to include tax in the cost budget. Defaults to `true`. +* `include_upfront` - A boolean value whether to include upfront costs in the cost budget. Defaults to `true`. +* `use_amortized` - Whether a budget uses the amortized rate. Defaults to `false`. +* `use_blended` - A boolean value whether to use blended costs in the cost budget. Defaults to `false`. + +Refer to [AWS CostTypes documentation](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_CostTypes.html) for further detail. + +### Forecasted Spend + +The amount of cost, usage, RI units, or Savings Plans units that you're forecasted to use. +Type is [Spend](#spend) + +### Historical Options + +* `budget_adjustment_period` (Required) - The number of budget periods included in the moving-average calculation that determines your auto-adjusted budget amount. +* `lookback_available_periods` (Optional) - The integer that describes how many budget periods in your BudgetAdjustmentPeriod are included in the calculation of your current budget limit. If the first budget period in your BudgetAdjustmentPeriod has no cost data, then that budget period isn’t included in the average that determines your budget limit. You can’t set your own LookBackAvailablePeriods. The value is automatically calculated from the `budget_adjustment_period` and your historical cost data. + +### Planned Budget Limits + +Valid keys for `planned_limit` parameter. + +* `amount` - (Required) The amount of cost or usage being measured for a budget. +* `start_time` - (Required) The start time of the budget limit. Format: `2017-01-01_12:00`. See [PlannedBudgetLimits](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_Budget.html#awscostmanagement-Type-budgets_Budget-PlannedBudgetLimits) documentation. +* `unit` - (Required) The unit of measurement used for the budget forecast, actual spend, or budget threshold, such as dollars or GB. See [Spend](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/data-type-spend.html) documentation. + +### Spend + +* `amount` - The cost or usage amount that's associated with a budget forecast, actual spend, or budget threshold. Length Constraints: Minimum length of `1`. Maximum length of `2147483647`. +* `unit` - The unit of measurement that's used for the budget forecast, actual spend, or budget threshold, such as USD or GBP. Length Constraints: Minimum length of `1`. Maximum length of `2147483647`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/caller_identity.html.markdown b/website/docs/cdktf/python/d/caller_identity.html.markdown new file mode 100644 index 00000000000..af29a5e7483 --- /dev/null +++ b/website/docs/cdktf/python/d/caller_identity.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "STS (Security Token)" +layout: "aws" +page_title: "AWS: aws_caller_identity" +description: |- + Get information about the identity of the caller for the provider + connection to AWS. +--- + + + +# Data Source: aws_caller_identity + +Use this data source to get the access to the effective Account ID, User ID, and ARN in +which Terraform is authorized. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + TerraformOutput(self, "account_id", + value=current.account_id + ) + TerraformOutput(self, "caller_arn", + value=current.arn + ) + TerraformOutput(self, "caller_user", + value=current.user_id + ) +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `account_id` - AWS Account ID number of the account that owns or contains the calling entity. +* `arn` - ARN associated with the calling entity. +* `id` - Account ID number of the account that owns or contains the calling entity. +* `user_id` - Unique identifier of the calling entity. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/canonical_user_id.html.markdown b/website/docs/cdktf/python/d/canonical_user_id.html.markdown new file mode 100644 index 00000000000..29ef4842508 --- /dev/null +++ b/website/docs/cdktf/python/d/canonical_user_id.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_canonical_user_id" +description: |- + Provides the canonical user ID for the AWS account associated with the provider + connection to AWS. +--- + + + +# Data Source: aws_canonical_user_id + +The Canonical User ID data source allows access to the [canonical user ID](http://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html) +for the effective account in which Terraform is working. + +~> **NOTE:** To use this data source, you must have the `s3:ListAllMyBuckets` permission. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_canonical_user_id import DataAwsCanonicalUserId +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCanonicalUserId(self, "current") + TerraformOutput(self, "canonical_user_id", + value=current.id + ) +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Canonical user ID associated with the AWS account. + +* `display_name` - Human-friendly name linked to the canonical user ID. The bucket owner's display name. **NOTE:** [This value](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTServiceGET.html) is only included in the response in the US East (N. Virginia), US West (N. California), US West (Oregon), Asia Pacific (Singapore), Asia Pacific (Sydney), Asia Pacific (Tokyo), EU (Ireland), and South America (São Paulo) regions. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ce_cost_category.html.markdown b/website/docs/cdktf/python/d/ce_cost_category.html.markdown new file mode 100644 index 00000000000..a89854b328e --- /dev/null +++ b/website/docs/cdktf/python/d/ce_cost_category.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_cost_category" +description: |- + Provides details about a specific CostExplorer Cost Category Definition +--- + + + +# Resource: aws_ce_cost_category + +Provides details about a specific CostExplorer Cost Category. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ce_cost_category import DataAwsCeCostCategory +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCeCostCategory(self, "example", + cost_category_arn="costCategoryARN" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `cost_category_arn` - (Required) Unique name for the Cost Category. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cost category. +* `default_value` - Default value for the cost category. +* `effective_end` - Effective end data of your Cost Category. +* `effective_start` - Effective state data of your Cost Category. +* `id` - Unique ID of the cost category. +* `rule` - Configuration block for the Cost Category rules used to categorize costs. See below. +* `rule_version` - Rule schema version in this particular Cost Category. +* `split_charge_rule` - Configuration block for the split charge rules used to allocate your charges between your Cost Category values. See below. +* `tags` - Resource tags. + +### `rule` + +* `inherited_value` - Configuration block for the value the line item is categorized as if the line item contains the matched dimension. See below. +* `rule` - Configuration block for the `Expression` object used to categorize costs. See below. +* `type` - You can define the CostCategoryRule rule type as either `REGULAR` or `INHERITED_VALUE`. +* `value` - Default value for the cost category. + +### `inherited_value` + +* `dimension_key` - Key to extract cost category values. +* `dimension_name` - Name of the dimension that's used to group costs. If you specify `LINKED_ACCOUNT_NAME`, the cost category value is based on account name. If you specify `TAG`, the cost category value will be based on the value of the specified tag key. Valid values are `LINKED_ACCOUNT_NAME`, `TAG` + +### `rule` + +* `and` - Return results that match both `Dimension` objects. +* `cost_category` - Configuration block for the filter that's based on `CostCategory` values. See below. +* `dimension` - Configuration block for the specific `Dimension` to use for `Expression`. See below. +* `not` - Return results that do not match the `Dimension` object. +* `or` - Return results that match either `Dimension` object. +* `tags` - Configuration block for the specific `Tag` to use for `Expression`. See below. + +### `cost_category` + +* `key` - Unique name of the Cost Category. +* `match_options` - Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - Specific value of the Cost Category. + +### `dimension` + +* `key` - Unique name of the Cost Category. +* `match_options` - Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - Specific value of the Cost Category. + +### `tags` + +* `key` - Key for the tag. +* `match_options` - Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - Specific value of the Cost Category. + +### `split_charge_rule` + +* `method` - Method that's used to define how to split your source costs across your targets. Valid values are `FIXED`, `PROPORTIONAL`, `EVEN` +* `parameter` - Configuration block for the parameters for a split charge method. This is only required for the `FIXED` method. See below. +* `source` - Cost Category value that you want to split. +* `targets` - Cost Category values that you want to split costs across. These values can't be used as a source in other split charge rules. + +### `parameter` + +* `type` - Parameter type. +* `values` - Parameter values. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ce_tags.html.markdown b/website/docs/cdktf/python/d/ce_tags.html.markdown new file mode 100644 index 00000000000..98452115f62 --- /dev/null +++ b/website/docs/cdktf/python/d/ce_tags.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_tags" +description: |- + Provides details about a specific CE Tags +--- + + + +# Resource: aws_ce_tags + +Provides details about a specific CE Tags. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ce_tags import DataAwsCeTags +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCeTags(self, "test", + time_period=DataAwsCeTagsTimePeriod( + end="2022-12-01", + start="2021-01-01" + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `time_period` - (Required) Configuration block for the start and end dates for retrieving the dimension values. + +The following arguments are optional: + +* `filter` - (Optional) Configuration block for the `Expression` object used to categorize costs. See below. +* `search_string` - (Optional) Value that you want to search for. +* `sort_by` - (Optional) Configuration block for the value by which you want to sort the data. See below. +* `tag_key` - (Optional) Key of the tag that you want to return values for. + +### `time_period` + +* `start` - (Required) End of the time period. +* `end` - (Required) Beginning of the time period. + +### `filter` + +* `and` - (Optional) Return results that match both `Dimension` objects. +* `cost_category` - (Optional) Configuration block for the filter that's based on `CostCategory` values. See below. +* `dimension` - (Optional) Configuration block for the specific `Dimension` to use for `Expression`. See below. +* `not` - (Optional) Return results that match both `Dimension` object. +* `or` - (Optional) Return results that match both `Dimension` object. +* `tag` - (Optional) Configuration block for the specific `Tag` to use for `Expression`. See below. + +### `cost_category` + +* `key` - (Optional) Unique name of the Cost Category. +* `match_options` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - (Optional) Specific value of the Cost Category. + +### `dimension` + +* `key` - (Optional) Unique name of the Cost Category. +* `match_options` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - (Optional) Specific value of the Cost Category. + +### `tag` + +* `key` - (Optional) Key for the tag. +* `match_options` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - (Optional) Specific value of the Cost Category. + +### `sort_by` + +* `key` - (Required) key that's used to sort the data. Valid values are: `BlendedCost`, `UnblendedCost`, `AmortizedCost`, `NetAmortizedCost`, `NetUnblendedCost`, `UsageQuantity`, `NormalizedUsageAmount`. +* `sort_order` - (Optional) order that's used to sort the data. Valid values are: `ASCENDING`, `DESCENDING`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the tag. +* `tags` - Tags that match your request. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudcontrolapi_resource.html.markdown b/website/docs/cdktf/python/d/cloudcontrolapi_resource.html.markdown new file mode 100644 index 00000000000..b2c2d3afbc4 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudcontrolapi_resource.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Cloud Control API" +layout: "aws" +page_title: "AWS: aws_cloudcontrolapi_resource" +description: |- + Provides details for a Cloud Control API Resource. +--- + + + +# Data Source: aws_cloudcontrolapi_resource + +Provides details for a Cloud Control API Resource. The reading of these resources is proxied through Cloud Control API handlers to the backend service. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudcontrolapi_resource import DataAwsCloudcontrolapiResource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudcontrolapiResource(self, "example", + identifier="example", + type_name="AWS::ECS::Cluster" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `identifier` - (Required) Identifier of the CloudFormation resource type. For example, `vpc-12345678`. +* `type_name` - (Required) CloudFormation resource type name. For example, `AWS::EC2::VPC`. + +The following arguments are optional: + +* `role_arn` - (Optional) ARN of the IAM Role to assume for operations. +* `type_version_id` - (Optional) Identifier of the CloudFormation resource type version. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `properties` - JSON string matching the CloudFormation resource type schema with current configuration. Underlying attributes can be referenced via the [`jsondecode()` function](https://www.terraform.io/docs/language/functions/jsondecode.html), for example, `jsondecode(data.aws_cloudcontrolapi_resource.example.properties)["example"]`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudformation_export.html.markdown b/website/docs/cdktf/python/d/cloudformation_export.html.markdown new file mode 100644 index 00000000000..28a81bcbbe6 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudformation_export.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_export" +description: |- + Provides metadata of a CloudFormation Export (e.g., Cross Stack References) +--- + + + +# Data Source: aws_cloudformation_export + +The CloudFormation Export data source allows access to stack +exports specified in the [Output](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/outputs-section-structure.html) section of the Cloudformation Template using the optional Export Property. + + -> Note: If you are trying to use a value from a Cloudformation Stack in the same Terraform run please use normal interpolation or Cloudformation Outputs. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudformation_export import DataAwsCloudformationExport +from imports.aws.instance import Instance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + subnet_id = DataAwsCloudformationExport(self, "subnet_id", + name="mySubnetIdExportName" + ) + Instance(self, "web", + ami="ami-abb07bcb", + instance_type="t2.micro", + subnet_id=Token.as_string(subnet_id.value) + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the export as it appears in the console or from [list-exports](http://docs.aws.amazon.com/cli/latest/reference/cloudformation/list-exports.html) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `value` - Value from Cloudformation export identified by the export name found from [list-exports](http://docs.aws.amazon.com/cli/latest/reference/cloudformation/list-exports.html) +* `exporting_stack_id` - ARN of stack that contains the exported output name and value. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudformation_stack.html.markdown b/website/docs/cdktf/python/d/cloudformation_stack.html.markdown new file mode 100644 index 00000000000..c9cd4cce89b --- /dev/null +++ b/website/docs/cdktf/python/d/cloudformation_stack.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_stack" +description: |- + Provides metadata of a CloudFormation stack (e.g., outputs) +--- + + + +# Data Source: aws_cloudformation_stack + +The CloudFormation Stack data source allows access to stack +outputs and other useful data including the template body. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudformation_stack import DataAwsCloudformationStack +from imports.aws.instance import Instance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + network = DataAwsCloudformationStack(self, "network", + name="my-network-stack" + ) + Instance(self, "web", + ami="ami-abb07bcb", + instance_type="t2.micro", + subnet_id=Token.as_string(property_access(network.outputs, ["\"SubnetId\""])), + tags={ + "Name": "HelloWorld" + } + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the stack + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `capabilities` - List of capabilities +* `description` - Description of the stack +* `disable_rollback` - Whether the rollback of the stack is disabled when stack creation fails +* `notification_arns` - List of SNS topic ARNs to publish stack related events +* `outputs` - Map of outputs from the stack. +* `parameters` - Map of parameters that specify input parameters for the stack. +* `tags` - Map of tags associated with this stack. +* `template_body` - Structure containing the template body. +* `iam_role_arn` - ARN of the IAM role used to create the stack. +* `timeout_in_minutes` - Amount of time that can pass before the stack status becomes `CREATE_FAILED` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudformation_type.html.markdown b/website/docs/cdktf/python/d/cloudformation_type.html.markdown new file mode 100644 index 00000000000..7c96a43470c --- /dev/null +++ b/website/docs/cdktf/python/d/cloudformation_type.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_type" +description: |- + Provides details about a CloudFormation Type. +--- + + + +# Data Source: aws_cloudformation_type + +Provides details about a CloudFormation Type. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudformation_type import DataAwsCloudformationType +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudformationType(self, "example", + type="RESOURCE", + type_name="AWS::Athena::WorkGroup" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) ARN of the CloudFormation Type. For example, `arn:aws:cloudformation:us-west-2::type/resource/AWS-EC2-VPC`. +* `type` - (Optional) CloudFormation Registry Type. For example, `RESOURCE`. +* `type_name` - (Optional) CloudFormation Type name. For example, `AWS::EC2::VPC`. +* `version_id` - (Optional) Identifier of the CloudFormation Type version. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `default_version_id` - Identifier of the CloudFormation Type default version. +* `deprecated_status` - Deprecation status of the CloudFormation Type. +* `description` - Description of the CloudFormation Type. +* `documentation_url` - URL of the documentation for the CloudFormation Type. +* `execution_role_arn` - ARN of the IAM Role used to register the CloudFormation Type. +* `is_default_version` - Whether the CloudFormation Type version is the default version. +* `logging_config` - List of objects containing logging configuration. + * `log_group_name` - Name of the CloudWatch Log Group where CloudFormation sends error logging information when invoking the type's handlers. + * `log_role_arn` - ARN of the IAM Role CloudFormation assumes when sending error logging information to CloudWatch Logs. +* `provisioning_type` - Provisioning behavior of the CloudFormation Type. +* `schema` - JSON document of the CloudFormation Type schema. +* `source_url` - URL of the source code for the CloudFormation Type. +* `visibility` - Scope of the CloudFormation Type. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudfront_cache_policy.html.markdown b/website/docs/cdktf/python/d/cloudfront_cache_policy.html.markdown new file mode 100644 index 00000000000..cd9aa86868e --- /dev/null +++ b/website/docs/cdktf/python/d/cloudfront_cache_policy.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_cache_policy" +description: |- + Use this data source to retrieve information about a CloudFront cache policy. +--- + + + +# Data Source: aws_cloudfront_cache_policy + +Use this data source to retrieve information about a CloudFront cache policy. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_cache_policy import DataAwsCloudfrontCachePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudfrontCachePolicy(self, "example", + name="example-policy" + ) +``` + +### AWS-Managed Policies + +AWS managed cache policy names are prefixed with `Managed-`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_cache_policy import DataAwsCloudfrontCachePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudfrontCachePolicy(self, "example", + name="Managed-CachingOptimized" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Optional) Unique name to identify the cache policy. +* `id` - (Optional) Identifier for the cache policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `etag` - Current version of the cache policy. +* `min_ttl` - Minimum amount of time, in seconds, that you want objects to stay in the CloudFront cache before CloudFront sends another request to the origin to see if the object has been updated. +* `max_ttl` - Maximum amount of time, in seconds, that objects stay in the CloudFront cache before CloudFront sends another request to the origin to see if the object has been updated. +* `default_ttl` - Default amount of time, in seconds, that you want objects to stay in the CloudFront cache before CloudFront sends another request to the origin to see if the object has been updated. +* `comment` - Comment to describe the cache policy. +* `parameters_in_cache_key_and_forwarded_to_origin` - The HTTP headers, cookies, and URL query strings to include in the cache key. See [Parameters In Cache Key And Forwarded To Origin](#parameters-in-cache-key-and-forwarded-to-origin) for more information. + +### Parameters In Cache Key And Forwarded To Origin + +* `cookies_config` - Object that determines whether any cookies in viewer requests (and if so, which cookies) are included in the cache key and automatically included in requests that CloudFront sends to the origin. See [Cookies Config](#cookies-config) for more information. +* `headers_config` - Object that determines whether any HTTP headers (and if so, which headers) are included in the cache key and automatically included in requests that CloudFront sends to the origin. See [Headers Config](#headers-config) for more information. +* `query_strings_config` - Object that determines whether any URL query strings in viewer requests (and if so, which query strings) are included in the cache key and automatically included in requests that CloudFront sends to the origin. See [Query String Config](#query-string-config) for more information. +* `enable_accept_encoding_brotli` - A flag that can affect whether the Accept-Encoding HTTP header is included in the cache key and included in requests that CloudFront sends to the origin. +* `enable_accept_encoding_gzip` - A flag that can affect whether the Accept-Encoding HTTP header is included in the cache key and included in requests that CloudFront sends to the origin. + +### Cookies Config + +* `cookie_behavior` - Determines whether any cookies in viewer requests are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `allExcept`, `all`. +* `cookies` - Object that contains a list of cookie names. See [Items](#items) for more information. + +### Headers Config + +* `header_behavior` - Determines whether any HTTP headers are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`. +* `headers` - Object that contains a list of header names. See [Items](#items) for more information. + +### Query String Config + +* `query_string_behavior` - Determines whether any URL query strings in viewer requests are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `allExcept`, `all`. +* `query_strings` - Object that contains a list of query string names. See [Items](#items) for more information. + +### Items + +* `items` - List of item names (`cookies`, `headers`, or `query_strings`). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudfront_distribution.html.markdown b/website/docs/cdktf/python/d/cloudfront_distribution.html.markdown new file mode 100644 index 00000000000..658cf7f3a55 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudfront_distribution.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_distribution" +description: |- + Provides a CloudFront web distribution data source. +--- + + + +# Data Source: aws_cloudfront_distribution + +Use this data source to retrieve information about a CloudFront distribution. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_distribution import DataAwsCloudfrontDistribution +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudfrontDistribution(self, "test", + id="EDFDVBD632BHDS5" + ) +``` + +## Argument Reference + +* `id` - Identifier for the distribution. For example: `EDFDVBD632BHDS5`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Identifier for the distribution. For example: `EDFDVBD632BHDS5`. + +* `aliases` - List that contains information about CNAMEs (alternate domain names), if any, for this distribution. + +* `arn` - ARN (Amazon Resource Name) for the distribution. For example: arn:aws:cloudfront::123456789012:distribution/EDFDVBD632BHDS5, where 123456789012 is your AWS account ID. + +* `status` - Current status of the distribution. `Deployed` if the + distribution's information is fully propagated throughout the Amazon + CloudFront system. + +* `domain_name` - Domain name corresponding to the distribution. For + example: `d604721fxaaqy9.cloudfront.net`. + +* `last_modified_time` - Date and time the distribution was last modified. + +* `in_progress_validation_batches` - The number of invalidation batches + currently in progress. + +* `etag` - Current version of the distribution's information. For example: + `E2QWRUHAPOMQZL`. + +* `hosted_zone_id` - CloudFront Route 53 zone ID that can be used to + route an [Alias Resource Record Set][7] to. This attribute is simply an + alias for the zone ID `Z2FDTNDATAQYW2`. +* `web_acl_id` AWS WAF web ACL associated with this distribution. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudfront_function.html.markdown b/website/docs/cdktf/python/d/cloudfront_function.html.markdown new file mode 100644 index 00000000000..d9136a6714a --- /dev/null +++ b/website/docs/cdktf/python/d/cloudfront_function.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_function" +description: |- + Provides a CloudFront Function data source. +--- + + + +# aws_cloudfront_function + +Provides information about a CloudFront Function. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_function import DataAwsCloudfrontFunction +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, stage): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + function_name = TerraformVariable(self, "function_name", + type=VariableType.STRING + ) + DataAwsCloudfrontFunction(self, "existing", + name=function_name.string_value, + stage=stage + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the CloudFront function. +* `stage` - (Required) Function’s stage, either `DEVELOPMENT` or `LIVE`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN identifying your CloudFront Function. +* `code` - Source code of the function +* `comment` - Comment. +* `etag` - ETag hash of the function +* `last_modified_time` - When this resource was last modified. +* `runtime` - Identifier of the function's runtime. +* `status` - Status of the function. Can be `UNPUBLISHED`, `UNASSOCIATED` or `ASSOCIATED`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudfront_log_delivery_canonical_user_id.html.markdown b/website/docs/cdktf/python/d/cloudfront_log_delivery_canonical_user_id.html.markdown new file mode 100644 index 00000000000..1504f8fe38c --- /dev/null +++ b/website/docs/cdktf/python/d/cloudfront_log_delivery_canonical_user_id.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_log_delivery_canonical_user_id" +description: |- + Provides the canonical user ID of the AWS `awslogsdelivery` account for CloudFront bucket logging. +--- + + + +# Data Source: aws_cloudfront_log_delivery_canonical_user_id + +The CloudFront Log Delivery Canonical User ID data source allows access to the [canonical user ID](http://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html) of the AWS `awslogsdelivery` account for CloudFront bucket logging. +See the [Amazon CloudFront Developer Guide](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/AccessLogs.html) for more information. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_log_delivery_canonical_user_id import DataAwsCloudfrontLogDeliveryCanonicalUserId +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, owner): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + data_aws_cloudfront_log_delivery_canonical_user_id_example = + DataAwsCloudfrontLogDeliveryCanonicalUserId(self, "example_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_cloudfront_log_delivery_canonical_user_id_example.override_logical_id("example") + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_2", + access_control_policy=S3BucketAclAccessControlPolicy( + grant=[S3BucketAclAccessControlPolicyGrant( + grantee=S3BucketAclAccessControlPolicyGrantGrantee( + id=Token.as_string(data_aws_cloudfront_log_delivery_canonical_user_id_example.id), + type="CanonicalUser" + ), + permission="FULL_CONTROL" + ) + ], + owner=owner + ), + bucket=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") +``` + +## Argument Reference + +This data source supports the following arguments: + +* `region` - (Optional) Region you'd like the zone for. By default, fetches the current region. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Canonical user ID for the AWS `awslogsdelivery` account in the region. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudfront_origin_access_identities.html.markdown b/website/docs/cdktf/python/d/cloudfront_origin_access_identities.html.markdown new file mode 100644 index 00000000000..74af5ae09f9 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudfront_origin_access_identities.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_access_identities" +description: |- + Use this data source to retrieve information about a set of Amazon CloudFront origin access identities. +--- + + + +# Data Source: aws_cloudfront_origin_access_identities + +Use this data source to get ARNs, ids and S3 canonical user IDs of Amazon CloudFront origin access identities. + +## Example Usage + +### All origin access identities in the account + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_origin_access_identities import DataAwsCloudfrontOriginAccessIdentities +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudfrontOriginAccessIdentities(self, "example") +``` + +### Origin access identities filtered by comment/name + +Origin access identities whose comments are `example-comment1`, `example-comment2` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_origin_access_identities import DataAwsCloudfrontOriginAccessIdentities +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudfrontOriginAccessIdentities(self, "example", + comments=["example-comment1", "example-comment2"] + ) +``` + +## Argument Reference + +* `comments` (Optional) - Filter origin access identities by comment. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `iam_arns` - Set of ARNs of the matched origin access identities. +* `ids` - Set of ids of the matched origin access identities. +* `s3_canonical_user_ids` - Set of S3 canonical user IDs of the matched origin access identities. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudfront_origin_access_identity.html.markdown b/website/docs/cdktf/python/d/cloudfront_origin_access_identity.html.markdown new file mode 100644 index 00000000000..00b582f147a --- /dev/null +++ b/website/docs/cdktf/python/d/cloudfront_origin_access_identity.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_access_identity" +description: |- + Use this data source to retrieve information for an Amazon CloudFront origin access identity. +--- + + + +# Data Source: aws_cloudfront_origin_access_identity + +Use this data source to retrieve information for an Amazon CloudFront origin access identity. + +## Example Usage + +The following example below creates a CloudFront origin access identity. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_origin_access_identity import DataAwsCloudfrontOriginAccessIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudfrontOriginAccessIdentity(self, "example", + id="EDFDVBD632BHDS5" + ) +``` + +## Argument Reference + +* `id` (Required) - The identifier for the distribution. For example: `EDFDVBD632BHDS5`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `caller_reference` - Internal value used by CloudFront to allow future + updates to the origin access identity. +* `cloudfront_access_identity_path` - A shortcut to the full path for the + origin access identity to use in CloudFront, see below. +* `comment` - An optional comment for the origin access identity. +* `etag` - Current version of the origin access identity's information. + For example: `E2QWRUHAPOMQZL`. +* `iam_arn` - Pre-generated ARN for use in S3 bucket policies (see below). + Example: `arn:aws:iam::cloudfront:user/CloudFront Origin Access Identity + E2QWRUHAPOMQZL`. +* `s3_canonical_user_id` - The Amazon S3 canonical user ID for the origin + access identity, which you use when giving the origin access identity read + permission to an object in Amazon S3. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudfront_origin_request_policy.html.markdown b/website/docs/cdktf/python/d/cloudfront_origin_request_policy.html.markdown new file mode 100644 index 00000000000..4b9453e7528 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudfront_origin_request_policy.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_request_policy" +description: |- + Determines the values that CloudFront includes in requests that it sends to the origin. +--- + + + +# Data Source: aws_cloudfront_origin_request_policy + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_origin_request_policy import DataAwsCloudfrontOriginRequestPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudfrontOriginRequestPolicy(self, "example", + name="example-policy" + ) +``` + +### AWS-Managed Policies + +AWS managed origin request policy names are prefixed with `Managed-`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_origin_request_policy import DataAwsCloudfrontOriginRequestPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudfrontOriginRequestPolicy(self, "ua_referer", + name="Managed-UserAgentRefererHeaders" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - Unique name to identify the origin request policy. +* `id` - Identifier for the origin request policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `comment` - Comment to describe the origin request policy. +* `cookies_config` - Object that determines whether any cookies in viewer requests (and if so, which cookies) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Cookies Config](#cookies-config) for more information. +* `etag` - Current version of the origin request policy. +* `headers_config` - Object that determines whether any HTTP headers (and if so, which headers) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Headers Config](#headers-config) for more information. +* `query_strings_config` - Object that determines whether any URL query strings in viewer requests (and if so, which query strings) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Query String Config](#query-string-config) for more information. + +### Cookies Config + +`cookie_behavior` - Determines whether any cookies in viewer requests are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist` `all`. +`cookies` - Object that contains a list of cookie names. See [Items](#items) for more information. + +### Headers Config + +`header_behavior` - Determines whether any HTTP headers are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `allViewer`, `allViewerAndWhitelistCloudFront`. +`headers` - Object that contains a list of header names. See [Items](#items) for more information. + +### Query String Config + +`query_string_behavior` - Determines whether any URL query strings in viewer requests are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `all`. +`query_strings` - Object that contains a list of query string names. See [Items](#items) for more information. + +### Items + +`items` - List of item names (cookies, headers, or query strings). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudfront_realtime_log_config.html.markdown b/website/docs/cdktf/python/d/cloudfront_realtime_log_config.html.markdown new file mode 100644 index 00000000000..d57138010f7 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudfront_realtime_log_config.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_realtime_log_config" +description: |- + Provides a CloudFront real-time log configuration resource. +--- + + + +# Data Source: aws_cloudfront_realtime_log_config + +Provides a CloudFront real-time log configuration resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_realtime_log_config import DataAwsCloudfrontRealtimeLogConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudfrontRealtimeLogConfig(self, "example", + name="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Unique name to identify this real-time log configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN (Amazon Resource Name) of the CloudFront real-time log configuration. +* `endpoint` - (Required) Amazon Kinesis data streams where real-time log data is sent. +* `fields` - (Required) Fields that are included in each real-time log record. See the [AWS documentation](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/real-time-logs.html#understand-real-time-log-config-fields) for supported values. +* `sampling_rate` - (Required) Sampling rate for this real-time log configuration. The sampling rate determines the percentage of viewer requests that are represented in the real-time log data. An integer between `1` and `100`, inclusive. + +The `endpoint` object supports the following: + +* `kinesis_stream_config` - (Required) Amazon Kinesis data stream configuration. +* `stream_type` - (Required) Type of data stream where real-time log data is sent. The only valid value is `Kinesis`. + +The `kinesis_stream_config` object supports the following: + +* `role_arn` - (Required) ARN of an [IAM role](iam_role.html) that CloudFront can use to send real-time log data to the Kinesis data stream. +See the [AWS documentation](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/real-time-logs.html#understand-real-time-log-config-iam-role) for more information. +* `stream_arn` - (Required) ARN of the [Kinesis data stream](kinesis_stream.html). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudfront_response_headers_policy.html.markdown b/website/docs/cdktf/python/d/cloudfront_response_headers_policy.html.markdown new file mode 100644 index 00000000000..4d9f902d50b --- /dev/null +++ b/website/docs/cdktf/python/d/cloudfront_response_headers_policy.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_response_headers_policy" +description: |- + Use this data source to retrieve information about a CloudFront response headers policy. +--- + + + +# Data Source: aws_cloudfront_response_headers_policy + +Use this data source to retrieve information about a CloudFront cache policy. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_response_headers_policy import DataAwsCloudfrontResponseHeadersPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudfrontResponseHeadersPolicy(self, "example", + name="example-policy" + ) +``` + +### AWS-Managed Policies + +AWS managed response header policy names are prefixed with `Managed-`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudfront_response_headers_policy import DataAwsCloudfrontResponseHeadersPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudfrontResponseHeadersPolicy(self, "example", + name="Managed-SimpleCORS" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Optional) Unique name to identify the response headers policy. +* `id` - (Optional) Identifier for the response headers policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `comment` - Comment to describe the response headers policy. The comment cannot be longer than 128 characters. +* `etag` - Current version of the response headers policy. +* `cors_config` - Configuration for a set of HTTP response headers that are used for Cross-Origin Resource Sharing (CORS). See [Cors Config](#cors-config) for more information. +* `custom_headers_config` - Object that contains an attribute `items` that contains a list of Custom Headers. See [Custom Header](#custom-header) for more information. +* `remove_headers_config` - Object that contains an attribute `items` that contains a list of Remove Headers. See [Remove Header](#remove-header) for more information. +* `security_headers_config` - A configuration for a set of security-related HTTP response headers. See [Security Headers Config](#security-headers-config) for more information. +* `server_timing_headers_config` - (Optional) Configuration for enabling the Server-Timing header in HTTP responses sent from CloudFront. See [Server Timing Headers Config](#server-timing-headers-config) for more information. + +### Cors Config + +* `access_control_allow_credentials` - A Boolean value that CloudFront uses as the value for the Access-Control-Allow-Credentials HTTP response header. +* `access_control_allow_headers` - Object that contains an attribute `items` that contains a list of HTTP header names that CloudFront includes as values for the Access-Control-Allow-Headers HTTP response header. +* `access_control_allow_methods` - Object that contains an attribute `items` that contains a list of HTTP methods that CloudFront includes as values for the Access-Control-Allow-Methods HTTP response header. Valid values: `GET` | `POST` | `OPTIONS` | `PUT` | `DELETE` | `HEAD` | `ALL` +* `access_control_allow_origins` - Object that contains an attribute `items` that contains a list of origins that CloudFront can use as the value for the Access-Control-Allow-Origin HTTP response header. +* `access_control_expose_headers` - Object that contains an attribute `items` that contains a list of HTTP headers that CloudFront includes as values for the Access-Control-Expose-Headers HTTP response header. +* `access_control_max_age_sec` - A number that CloudFront uses as the value for the Access-Control-Max-Age HTTP response header. + +### Custom Header + +* `header` - HTTP response header name. +* `override` - Whether CloudFront overrides a response header with the same name received from the origin with the header specifies here. +* `value` - Value for the HTTP response header. + +### Remove Header + +* `header` - The HTTP header name. + +### Security Headers Config + +* `content_security_policy` - The policy directives and their values that CloudFront includes as values for the Content-Security-Policy HTTP response header. See [Content Security Policy](#content-security-policy) for more information. +* `content_type_options` - A setting that determines whether CloudFront includes the X-Content-Type-Options HTTP response header with its value set to nosniff. See [Content Type Options](#content-type-options) for more information. +* `frame_options` - Setting that determines whether CloudFront includes the X-Frame-Options HTTP response header and the header’s value. See [Frame Options](#frame-options) for more information. +* `referrer_policy` - Setting that determines whether CloudFront includes the Referrer-Policy HTTP response header and the header’s value. See [Referrer Policy](#referrer-policy) for more information. +* `strict_transport_security` - Settings that determine whether CloudFront includes the Strict-Transport-Security HTTP response header and the header’s value. See [Strict Transport Security](#strict-transport-security) for more information. +* `xss_protection` - Settings that determine whether CloudFront includes the X-XSS-Protection HTTP response header and the header’s value. See [XSS Protection](#xss-protection) for more information. + +### Content Security Policy + +* `content_security_policy` - The policy directives and their values that CloudFront includes as values for the Content-Security-Policy HTTP response header. +* `override` - Whether CloudFront overrides the Content-Security-Policy HTTP response header received from the origin with the one specified in this response headers policy. + +### Content Type Options + +* `override` - Whether CloudFront overrides the X-Content-Type-Options HTTP response header received from the origin with the one specified in this response headers policy. + +### Frame Options + +* `frame_option` - Value of the X-Frame-Options HTTP response header. Valid values: `DENY` | `SAMEORIGIN` +* `override` - Whether CloudFront overrides the X-Frame-Options HTTP response header received from the origin with the one specified in this response headers policy. + +### Referrer Policy + +* `referrer_policy` - Value of the Referrer-Policy HTTP response header. Valid Values: `no-referrer` | `no-referrer-when-downgrade` | `origin` | `origin-when-cross-origin` | `same-origin` | `strict-origin` | `strict-origin-when-cross-origin` | `unsafe-url` +* `override` - Whether CloudFront overrides the Referrer-Policy HTTP response header received from the origin with the one specified in this response headers policy. + +### Strict Transport Security + +* `access_control_max_age_sec` - A number that CloudFront uses as the value for the max-age directive in the Strict-Transport-Security HTTP response header. +* `include_subdomains` - Whether CloudFront includes the includeSubDomains directive in the Strict-Transport-Security HTTP response header. +* `override` - Whether CloudFront overrides the Strict-Transport-Security HTTP response header received from the origin with the one specified in this response headers policy. +* `preload` - Whether CloudFront includes the preload directive in the Strict-Transport-Security HTTP response header. + +### XSS Protection + +* `mode_block` - Whether CloudFront includes the mode=block directive in the X-XSS-Protection header. +* `override` - Whether CloudFront overrides the X-XSS-Protection HTTP response header received from the origin with the one specified in this response headers policy. +* `protection` - Boolean value that determines the value of the X-XSS-Protection HTTP response header. When this setting is true, the value of the X-XSS-Protection header is 1. When this setting is false, the value of the X-XSS-Protection header is 0. +* `report_uri` - Whether CloudFront sets a reporting URI in the X-XSS-Protection header. + +### Server Timing Headers Config + +* `enabled` - Whether CloudFront adds the `Server-Timing` header to HTTP responses that it sends in response to requests that match a cache behavior that's associated with this response headers policy. +* `sampling_rate` - Number 0–100 (inclusive) that specifies the percentage of responses that you want CloudFront to add the Server-Timing header to. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudhsm_v2_cluster.html.markdown b/website/docs/cdktf/python/d/cloudhsm_v2_cluster.html.markdown new file mode 100644 index 00000000000..4c97021f25b --- /dev/null +++ b/website/docs/cdktf/python/d/cloudhsm_v2_cluster.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "CloudHSM" +layout: "aws" +page_title: "AWS: aws_cloudhsm_v2_cluster" +description: |- + Get information on a CloudHSM v2 cluster. +--- + + + +# Data Source: aws_cloudhsm_v2_cluster + +Use this data source to get information about a CloudHSM v2 cluster + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudhsm_v2_cluster import DataAwsCloudhsmV2Cluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudhsmV2Cluster(self, "cluster", + cluster_id="cluster-testclusterid" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `cluster_id` - (Required) ID of Cloud HSM v2 cluster. +* `cluster_state` - (Optional) State of the cluster to be found. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `vpc_id` - ID of the VPC that the CloudHSM cluster resides in. +* `security_group_id` - ID of the security group associated with the CloudHSM cluster. +* `subnet_ids` - IDs of subnets in which cluster operates. +* `cluster_certificates` - The list of cluster certificates. + * `cluster_certificates.0.cluster_certificate` - The cluster certificate issued (signed) by the issuing certificate authority (CA) of the cluster's owner. + * `cluster_certificates.0.cluster_csr` - The certificate signing request (CSR). Available only in UNINITIALIZED state. + * `cluster_certificates.0.aws_hardware_certificate` - The HSM hardware certificate issued (signed) by AWS CloudHSM. + * `cluster_certificates.0.hsm_certificate` - The HSM certificate issued (signed) by the HSM hardware. + * `cluster_certificates.0.manufacturer_hardware_certificate` - The HSM hardware certificate issued (signed) by the hardware manufacturer. +The number of available cluster certificates may vary depending on state of the cluster. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudtrail_service_account.html.markdown b/website/docs/cdktf/python/d/cloudtrail_service_account.html.markdown new file mode 100644 index 00000000000..28b42b91317 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudtrail_service_account.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "CloudTrail" +layout: "aws" +page_title: "AWS: aws_cloudtrail_service_account" +description: |- + Get AWS CloudTrail Service Account ID for storing trail data in S3. +--- + + + +# Data Source: aws_cloudtrail_service_account + +Use this data source to get the Account ID of the [AWS CloudTrail Service Account](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-supported-regions.html) +in a given region for the purpose of allowing CloudTrail to store trail data in S3. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudtrail_service_account import DataAwsCloudtrailServiceAccount +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_policy import S3BucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bucket = S3Bucket(self, "bucket", + bucket="tf-cloudtrail-logging-test-bucket", + force_destroy=True + ) + main = DataAwsCloudtrailServiceAccount(self, "main") + allow_cloudtrail_logging = DataAwsIamPolicyDocument(self, "allow_cloudtrail_logging", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:PutObject"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[Token.as_string(main.arn)], + type="AWS" + ) + ], + resources=["${" + bucket.arn + "}/*"], + sid="Put bucket policy needed for trails" + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:GetBucketAcl"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[Token.as_string(main.arn)], + type="AWS" + ) + ], + resources=[bucket.arn], + sid="Get bucket policy needed for trails" + ) + ] + ) + aws_s3_bucket_policy_allow_cloudtrail_logging = S3BucketPolicy(self, "allow_cloudtrail_logging_3", + bucket=bucket.id, + policy=Token.as_string(allow_cloudtrail_logging.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_policy_allow_cloudtrail_logging.override_logical_id("allow_cloudtrail_logging") +``` + +## Argument Reference + +* `region` - (Optional) Name of the region whose AWS CloudTrail account ID is desired. +Defaults to the region from the AWS provider configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS CloudTrail service account in the selected region. +* `arn` - ARN of the AWS CloudTrail service account in the selected region. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudwatch_event_bus.html.markdown b/website/docs/cdktf/python/d/cloudwatch_event_bus.html.markdown new file mode 100644 index 00000000000..627bc19997f --- /dev/null +++ b/website/docs/cdktf/python/d/cloudwatch_event_bus.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_bus" +description: |- + Get information on an EventBridge (Cloudwatch) Event Bus. +--- + + + +# Data Source: aws_cloudwatch_event_bus + +This data source can be used to fetch information about a specific +EventBridge event bus. Use this data source to compute the ARN of +an event bus, given the name of the bus. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudwatch_event_bus import DataAwsCloudwatchEventBus +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudwatchEventBus(self, "example", + name="example-bus-name" + ) +``` + +## Argument Reference + +* `name` - (Required) Friendly EventBridge event bus name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudwatch_event_connection.html.markdown b/website/docs/cdktf/python/d/cloudwatch_event_connection.html.markdown new file mode 100644 index 00000000000..d622376ec33 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudwatch_event_connection.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_connection" +description: |- + Provides an EventBridge connection data source. +--- + + + +# Data Source: aws_cloudwatch_event_connection + +Use this data source to retrieve information about an EventBridge connection. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudwatch_event_connection import DataAwsCloudwatchEventConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudwatchEventConnection(self, "test", + name="test" + ) +``` + +## Argument Reference + +* `name` - Name of the connection. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - Name of the connection. + +* `arn` - ARN (Amazon Resource Name) for the connection. + +* `secret_arn` - ARN (Amazon Resource Name) for the secret created from the authorization parameters specified for the connection. + +* `authorization_type` - Type of authorization to use to connect. One of `API_KEY`,`BASIC`,`OAUTH_CLIENT_CREDENTIALS`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudwatch_event_source.html.markdown b/website/docs/cdktf/python/d/cloudwatch_event_source.html.markdown new file mode 100644 index 00000000000..a8c511ead26 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudwatch_event_source.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_source" +description: |- + Get information on an EventBridge (Cloudwatch) Event Source. +--- + + + +# Data Source: aws_cloudwatch_event_source + +Use this data source to get information about an EventBridge Partner Event Source. This data source will only return one partner event source. An error will be returned if multiple sources match the same name prefix. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudwatch_event_source import DataAwsCloudwatchEventSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudwatchEventSource(self, "examplepartner", + name_prefix="aws.partner/examplepartner.com" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name_prefix` - (Optional) Specifying this limits the results to only those partner event sources with names that start with the specified prefix + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the partner event source +* `created_by` - Name of the SaaS partner that created the event source +* `name` - Name of the event source +* `state` - State of the event source (`ACTIVE` or `PENDING`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudwatch_log_data_protection_policy_document.html.markdown b/website/docs/cdktf/python/d/cloudwatch_log_data_protection_policy_document.html.markdown new file mode 100644 index 00000000000..a1ab6156021 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudwatch_log_data_protection_policy_document.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_data_protection_policy_document" +description: |- + Generates a CloudWatch Log Group Data Protection Policy document in JSON format +--- + + + +# Data Source: aws_cloudwatch_log_data_protection_policy_document + +Generates a CloudWatch Log Group Data Protection Policy document in JSON format for use with the `aws_cloudwatch_log_data_protection_policy` resource. + +-> For more information about data protection policies, see the [Help protect sensitive log data with masking](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/mask-sensitive-log-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_data_protection_policy import CloudwatchLogDataProtectionPolicy +from imports.aws.data_aws_cloudwatch_log_data_protection_policy_document import DataAwsCloudwatchLogDataProtectionPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsCloudwatchLogDataProtectionPolicyDocument(self, "example", + name="Example", + statement=[DataAwsCloudwatchLogDataProtectionPolicyDocumentStatement( + data_identifiers=["arn:aws:dataprotection::aws:data-identifier/EmailAddress", "arn:aws:dataprotection::aws:data-identifier/DriversLicense-US" + ], + operation=DataAwsCloudwatchLogDataProtectionPolicyDocumentStatementOperation( + audit=DataAwsCloudwatchLogDataProtectionPolicyDocumentStatementOperationAudit( + findings_destination=DataAwsCloudwatchLogDataProtectionPolicyDocumentStatementOperationAuditFindingsDestination( + cloudwatch_logs=DataAwsCloudwatchLogDataProtectionPolicyDocumentStatementOperationAuditFindingsDestinationCloudwatchLogs( + log_group=audit.name + ), + firehose=DataAwsCloudwatchLogDataProtectionPolicyDocumentStatementOperationAuditFindingsDestinationFirehose( + delivery_stream=Token.as_string(aws_kinesis_firehose_delivery_stream_audit.name) + ), + s3=DataAwsCloudwatchLogDataProtectionPolicyDocumentStatementOperationAuditFindingsDestinationS3( + bucket=Token.as_string(aws_s3_bucket_audit.bucket) + ) + ) + ) + ), + sid="Audit" + ), DataAwsCloudwatchLogDataProtectionPolicyDocumentStatement( + data_identifiers=["arn:aws:dataprotection::aws:data-identifier/EmailAddress", "arn:aws:dataprotection::aws:data-identifier/DriversLicense-US" + ], + operation=DataAwsCloudwatchLogDataProtectionPolicyDocumentStatementOperation( + deidentify=DataAwsCloudwatchLogDataProtectionPolicyDocumentStatementOperationDeidentify( + mask_config=DataAwsCloudwatchLogDataProtectionPolicyDocumentStatementOperationDeidentifyMaskConfig() + ) + ), + sid="Deidentify" + ) + ] + ) + aws_cloudwatch_log_data_protection_policy_example = + CloudwatchLogDataProtectionPolicy(self, "example_1", + log_group_name=Token.as_string(aws_cloudwatch_log_group_example.name), + policy_document=Token.as_string(example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_data_protection_policy_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the data protection policy document. +* `statement` - (Required) Configures the data protection policy. + +-> There must be exactly two statements: the first with an `audit` operation, and the second with a `deidentify` operation. + +The following arguments are optional: + +* `description` - (Optional) +* `version` - (Optional) + +### statement Configuration Block + +* `data_identifiers` - (Required) Set of at least 1 sensitive data identifiers that you want to mask. Read more in [Types of data that you can protect](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/protect-sensitive-log-data-types.html). +* `operation` - (Required) Configures the data protection operation applied by this statement. +* `sid` - (Optional) Name of this statement. + +#### operation Configuration Block + +* `audit` - (Optional) Configures the detection of sensitive data. +* `deidentify` - (Optional) Configures the masking of sensitive data. + +-> Every policy statement must specify exactly one operation. + +##### audit Configuration Block + +* `findings_destination` - (Required) Configures destinations to send audit findings to. + +##### findings_destination Configuration Block + +* `cloudwatch_logs` - (Optional) Configures CloudWatch Logs as a findings destination. +* `firehose` - (Optional) Configures Kinesis Firehose as a findings destination. +* `s3` - (Optional) Configures S3 as a findings destination. + +###### cloudwatch_logs Configuration Block + +* `log_group` - (Required) Name of the CloudWatch Log Group to send findings to. + +###### firehose Configuration Block + +* `delivery_stream` - (Required) Name of the Kinesis Firehose Delivery Stream to send findings to. + +###### s3 Configuration Block + +* `bucket` - (Required) Name of the S3 Bucket to send findings to. + +##### deidentify Configuration Block + +* `mask_config` - (Required) An empty object that configures masking. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `json` - Standard JSON policy document rendered based on the arguments above. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudwatch_log_group.html.markdown b/website/docs/cdktf/python/d/cloudwatch_log_group.html.markdown new file mode 100644 index 00000000000..29fe5603379 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudwatch_log_group.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_group" +description: |- + Get information on a Cloudwatch Log Group. +--- + + + +# Data Source: aws_cloudwatch_log_group + +Use this data source to get information about an AWS Cloudwatch Log Group + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudwatch_log_group import DataAwsCloudwatchLogGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudwatchLogGroup(self, "example", + name="MyImportantLogs" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the Cloudwatch log group + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Cloudwatch log group. Any `:*` suffix added by the API, denoting all CloudWatch Log Streams under the CloudWatch Log Group, is removed for greater compatibility with other AWS services that do not accept the suffix. +* `creation_time` - Creation time of the log group, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. +* `retention_in_days` - Number of days log events retained in the specified log group. +* `kms_key_id` - ARN of the KMS Key to use when encrypting log data. +* `tags` - Map of tags to assign to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cloudwatch_log_groups.html.markdown b/website/docs/cdktf/python/d/cloudwatch_log_groups.html.markdown new file mode 100644 index 00000000000..932ec5ca239 --- /dev/null +++ b/website/docs/cdktf/python/d/cloudwatch_log_groups.html.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_groups" +description: |- + Get list of Cloudwatch Log Groups. +--- + + + +# Data Source: aws_cloudwatch_log_groups + +Use this data source to get a list of AWS Cloudwatch Log Groups + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cloudwatch_log_groups import DataAwsCloudwatchLogGroups +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCloudwatchLogGroups(self, "example", + log_group_name_prefix="/MyImportantLogs" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `log_group_name_prefix` - (Optional) Group prefix of the Cloudwatch log groups to list + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the Cloudwatch log groups +* `log_group_names` - Set of names of the Cloudwatch log groups + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/codeartifact_authorization_token.html.markdown b/website/docs/cdktf/python/d/codeartifact_authorization_token.html.markdown new file mode 100644 index 00000000000..cb1ca071f18 --- /dev/null +++ b/website/docs/cdktf/python/d/codeartifact_authorization_token.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_authorization_token" +description: |- + Provides details about a CodeArtifact Authorization Token +--- + + + +# Data Source: aws_codeartifact_authorization_token + +The CodeArtifact Authorization Token data source generates a temporary authentication token for accessing repositories in a CodeArtifact domain. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_codeartifact_authorization_token import DataAwsCodeartifactAuthorizationToken +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCodeartifactAuthorizationToken(self, "test", + domain=Token.as_string(aws_codeartifact_domain_test.domain) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `domain` - (Required) Name of the domain that is in scope for the generated authorization token. +* `domain_owner` - (Optional) Account number of the AWS account that owns the domain. +* `duration_seconds` - (Optional) Time, in seconds, that the generated authorization token is valid. Valid values are `0` and between `900` and `43200`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `authorization_token` - Temporary authorization token. +* `expiration` - Time in UTC RFC3339 format when the authorization token expires. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/codeartifact_repository_endpoint.html.markdown b/website/docs/cdktf/python/d/codeartifact_repository_endpoint.html.markdown new file mode 100644 index 00000000000..aee9e871af6 --- /dev/null +++ b/website/docs/cdktf/python/d/codeartifact_repository_endpoint.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_repository_endpoint" +description: |- + Provides details about a CodeArtifact Repository Endpoint +--- + + + +# Data Source: aws_codeartifact_repository_endpoint + +The CodeArtifact Repository Endpoint data source returns the endpoint of a repository for a specific package format. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_codeartifact_repository_endpoint import DataAwsCodeartifactRepositoryEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCodeartifactRepositoryEndpoint(self, "test", + domain=Token.as_string(aws_codeartifact_domain_test.domain), + format="npm", + repository=Token.as_string(aws_codeartifact_repository_test.repository) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `domain` - (Required) Name of the domain that contains the repository. +* `repository` - (Required) Name of the repository. +* `format` - (Required) Which endpoint of a repository to return. A repository has one endpoint for each package format: `npm`, `pypi`, `maven`, and `nuget`. +* `domain_owner` - (Optional) Account number of the AWS account that owns the domain. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `repository_endpoint` - URL of the returned endpoint. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/codecommit_approval_rule_template.html.markdown b/website/docs/cdktf/python/d/codecommit_approval_rule_template.html.markdown new file mode 100644 index 00000000000..ab286652a20 --- /dev/null +++ b/website/docs/cdktf/python/d/codecommit_approval_rule_template.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_approval_rule_template" +description: |- + Provides details about a specific CodeCommit Approval Rule Template. +--- + + + +# Data Source: aws_codecommit_approval_rule_template + +Provides details about a specific CodeCommit Approval Rule Template. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_codecommit_approval_rule_template import DataAwsCodecommitApprovalRuleTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCodecommitApprovalRuleTemplate(self, "example", + name="MyExampleApprovalRuleTemplate" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name for the approval rule template. This needs to be less than 100 characters. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `approval_rule_template_id` - The ID of the approval rule template. +* `content` - Content of the approval rule template. +* `creation_date` - Date the approval rule template was created, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `description` - Description of the approval rule template. +* `last_modified_date` - Date the approval rule template was most recently changed, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `last_modified_user` - ARN of the user who made the most recent changes to the approval rule template. +* `rule_content_sha256` - SHA-256 hash signature for the content of the approval rule template. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/codecommit_repository.html.markdown b/website/docs/cdktf/python/d/codecommit_repository.html.markdown new file mode 100644 index 00000000000..a1fb44ea6c0 --- /dev/null +++ b/website/docs/cdktf/python/d/codecommit_repository.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_repository" +description: |- + Provides details about CodeCommit Repository. +--- + + + +# Data Source: aws_codecommit_repository + +The CodeCommit Repository data source allows the ARN, Repository ID, Repository URL for HTTP and Repository URL for SSH to be retrieved for an CodeCommit repository. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_codecommit_repository import DataAwsCodecommitRepository +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCodecommitRepository(self, "test", + repository_name="MyTestRepository" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `repository_name` - (Required) Name for the repository. This needs to be less than 100 characters. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `repository_id` - ID of the repository +* `arn` - ARN of the repository +* `clone_url_http` - URL to use for cloning the repository over HTTPS. +* `clone_url_ssh` - URL to use for cloning the repository over SSH. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/codestarconnections_connection.html.markdown b/website/docs/cdktf/python/d/codestarconnections_connection.html.markdown new file mode 100644 index 00000000000..a1dd4f68941 --- /dev/null +++ b/website/docs/cdktf/python/d/codestarconnections_connection.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "CodeStar Connections" +layout: "aws" +page_title: "AWS: aws_codestarconnections_connection" +description: |- + Provides details about CodeStar Connection +--- + + + +# Data Source: aws_codestarconnections_connection + +Provides details about CodeStar Connection. + +## Example Usage + +### By ARN + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_codestarconnections_connection import DataAwsCodestarconnectionsConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCodestarconnectionsConnection(self, "example", + arn=Token.as_string(aws_codestarconnections_connection_example.arn) + ) +``` + +### By Name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_codestarconnections_connection import DataAwsCodestarconnectionsConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCodestarconnectionsConnection(self, "example", + name=Token.as_string(aws_codestarconnections_connection_example.name) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) CodeStar Connection ARN. +* `name` - (Optional) CodeStar Connection name. + +~> **NOTE:** When both `arn` and `name` are specified, `arn` takes precedence. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `connection_status` - CodeStar Connection status. Possible values are `PENDING`, `AVAILABLE` and `ERROR`. +* `id` - CodeStar Connection ARN. +* `host_arn` - ARN of the host associated with the connection. +* `name` - Name of the CodeStar Connection. The name is unique in the calling AWS account. +* `provider_type` - Name of the external provider where your third-party code repository is configured. Possible values are `Bitbucket` and `GitHub`. For connections to a GitHub Enterprise Server instance, you must create an [aws_codestarconnections_host](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codestarconnections_host) resource and use `host_arn` instead. +* `tags` - Map of key-value resource tags to associate with the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cognito_user_pool_client.markdown b/website/docs/cdktf/python/d/cognito_user_pool_client.markdown new file mode 100644 index 00000000000..4bcfb6ccfbb --- /dev/null +++ b/website/docs/cdktf/python/d/cognito_user_pool_client.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_client" +description: |- + Provides a Cognito User Pool Client +--- + + + +# Data Source: aws_cognito_user_pool_client + +Provides a Cognito User Pool Client resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cognito_user_pool_client import DataAwsCognitoUserPoolClient +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCognitoUserPoolClient(self, "client", + client_id="38fjsnc484p94kpqsnet7mpld0", + user_pool_id="us-west-2_aaaaaaaaa" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `client_id` - (Required) Client Id of the user pool. +* `user_pool_id` - (Required) User pool the client belongs to. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `access_token_validity` - (Optional) Time limit, between 5 minutes and 1 day, after which the access token is no longer valid and cannot be used. This value will be overridden if you have entered a value in `token_validity_units`. +* `allowed_oauth_flows_user_pool_client` - (Optional) Whether the client is allowed to follow the OAuth protocol when interacting with Cognito user pools. +* `allowed_oauth_flows` - (Optional) List of allowed OAuth flows (code, implicit, client_credentials). +* `allowed_oauth_scopes` - (Optional) List of allowed OAuth scopes (phone, email, openid, profile, and aws.cognito.signin.user.admin). +* `analytics_configuration` - (Optional) Configuration block for Amazon Pinpoint analytics for collecting metrics for this user pool. [Detailed below](#analytics_configuration). +* `callback_urls` - (Optional) List of allowed callback URLs for the identity providers. +* `client_secret` - Client secret of the user pool client. +* `default_redirect_uri` - (Optional) Default redirect URI. Must be in the list of callback URLs. +* `enable_token_revocation` - (Optional) Enables or disables token revocation. +* `explicit_auth_flows` - (Optional) List of authentication flows (ADMIN_NO_SRP_AUTH, CUSTOM_AUTH_FLOW_ONLY, USER_PASSWORD_AUTH, ALLOW_ADMIN_USER_PASSWORD_AUTH, ALLOW_CUSTOM_AUTH, ALLOW_USER_PASSWORD_AUTH, ALLOW_USER_SRP_AUTH, ALLOW_REFRESH_TOKEN_AUTH). +* `generate_secret` - (Optional) Should an application secret be generated. +* `id_token_validity` - (Optional) Time limit, between 5 minutes and 1 day, after which the ID token is no longer valid and cannot be used. This value will be overridden if you have entered a value in `token_validity_units`. +* `logout_urls` - (Optional) List of allowed logout URLs for the identity providers. +* `prevent_user_existence_errors` - (Optional) Choose which errors and responses are returned by Cognito APIs during authentication, account confirmation, and password recovery when the user does not exist in the user pool. When set to `ENABLED` and the user does not exist, authentication returns an error indicating either the username or password was incorrect, and account confirmation and password recovery return a response indicating a code was sent to a simulated destination. When set to `LEGACY`, those APIs will return a `UserNotFoundException` exception if the user does not exist in the user pool. +* `read_attributes` - (Optional) List of user pool attributes the application client can read from. +* `refresh_token_validity` - (Optional) Time limit in days refresh tokens are valid for. +* `supported_identity_providers` - (Optional) List of provider names for the identity providers that are supported on this client. Uses the `provider_name` attribute of `aws_cognito_identity_provider` resource(s), or the equivalent string(s). +* `token_validity_units` - (Optional) Configuration block for units in which the validity times are represented in. [Detailed below](#token_validity_units). +* `write_attributes` - (Optional) List of user pool attributes the application client can write to. + +### analytics_configuration + +Either `application_arn` or `application_id` is required. + +* `application_arn` - (Optional) Application ARN for an Amazon Pinpoint application. Conflicts with `external_id` and `role_arn`. +* `application_id` - (Optional) Application ID for an Amazon Pinpoint application. +* `external_id` - (Optional) ID for the Analytics Configuration. Conflicts with `application_arn`. +* `role_arn` - (Optional) ARN of an IAM role that authorizes Amazon Cognito to publish events to Amazon Pinpoint analytics. Conflicts with `application_arn`. +* `user_data_shared` (Optional) If set to `true`, Amazon Cognito will include user data in the events it publishes to Amazon Pinpoint analytics. + +### token_validity_units + +Valid values for the following arguments are: `seconds`, `minutes`, `hours` or `days`. + +* `access_token` - (Optional) Time unit in for the value in `access_token_validity`, defaults to `hours`. +* `id_token` - (Optional) Time unit in for the value in `id_token_validity`, defaults to `hours`. +* `refresh_token` - (Optional) Time unit in for the value in `refresh_token_validity`, defaults to `days`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cognito_user_pool_clients.markdown b/website/docs/cdktf/python/d/cognito_user_pool_clients.markdown new file mode 100644 index 00000000000..cae15c11625 --- /dev/null +++ b/website/docs/cdktf/python/d/cognito_user_pool_clients.markdown @@ -0,0 +1,45 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_clients" +description: |- + Get list of cognito user pool clients connected to user pool. +--- + + + +# Data Source: aws_cognito_user_pool_clients + +Use this data source to get a list of Cognito user pools clients for a Cognito IdP user pool. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cognito_user_pool_clients import DataAwsCognitoUserPoolClients +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCognitoUserPoolClients(self, "main", + user_pool_id=Token.as_string(aws_cognito_user_pool_main.id) + ) +``` + +## Argument Reference + +* `user_pool_id` - (Required) Cognito user pool ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `client_ids` - List of Cognito user pool client IDs. +* `client_names` - List of Cognito user pool client names. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cognito_user_pool_signing_certificate.markdown b/website/docs/cdktf/python/d/cognito_user_pool_signing_certificate.markdown new file mode 100644 index 00000000000..bc1e3c99e82 --- /dev/null +++ b/website/docs/cdktf/python/d/cognito_user_pool_signing_certificate.markdown @@ -0,0 +1,44 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_signing_certificate" +description: |- + Get signing certificate of user pool +--- + + + +# Data Source: aws_cognito_user_pool_signing_certificate + +Use this data source to get the signing certificate for a Cognito IdP user pool. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cognito_user_pool_signing_certificate import DataAwsCognitoUserPoolSigningCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCognitoUserPoolSigningCertificate(self, "sc", + user_pool_id=my_pool.id + ) +``` + +## Argument Reference + +* `user_pool_id` - (Required) Cognito user pool ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `certificate` - Certificate string + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cognito_user_pools.markdown b/website/docs/cdktf/python/d/cognito_user_pools.markdown new file mode 100644 index 00000000000..08a017ba528 --- /dev/null +++ b/website/docs/cdktf/python/d/cognito_user_pools.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pools" +description: |- + Get list of cognito user pools. +--- + + + +# Data Source: aws_cognito_user_pools + +Use this data source to get a list of cognito user pools. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_authorizer import ApiGatewayAuthorizer +from imports.aws.data_aws_api_gateway_rest_api import DataAwsApiGatewayRestApi +from imports.aws.data_aws_cognito_user_pools import DataAwsCognitoUserPools +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + selected = DataAwsApiGatewayRestApi(self, "selected", + name=api_gateway_name.string_value + ) + data_aws_cognito_user_pools_selected = DataAwsCognitoUserPools(self, "selected_1", + name=cognito_user_pool_name.string_value + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_cognito_user_pools_selected.override_logical_id("selected") + ApiGatewayAuthorizer(self, "cognito", + name="cognito", + provider_arns=Token.as_list(data_aws_cognito_user_pools_selected.arns), + rest_api_id=Token.as_string(selected.id), + type="COGNITO_USER_POOLS" + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the cognito user pools. Name is not a unique attribute for cognito user pool, so multiple pools might be returned with given name. If the pool name is expected to be unique, you can reference the pool id via `tolist(data.aws_cognito_user_pools.selected.ids)[0]` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - Set of cognito user pool ids. +* `arns` - Set of cognito user pool Amazon Resource Names (ARNs). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_bot_association.markdown b/website/docs/cdktf/python/d/connect_bot_association.markdown new file mode 100644 index 00000000000..80f9b35f2b8 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_bot_association.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_bot_association" +description: |- + Provides details about a specific Lex (V1) Bot associated with an Amazon Connect instance +--- + + + +# Data Source: aws_connect_bot_association + +Provides details about a specific Lex (V1) Bot associated with an Amazon Connect instance. + +## Example Usage + +### By name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_bot_association import DataAwsConnectBotAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectBotAssociation(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + lex_bot=DataAwsConnectBotAssociationLexBot( + name="Test" + ) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `instance_id` - (Required) Identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. +* `lex_bot` - (Required) Configuration information of an Amazon Lex (V1) bot. Detailed below. + +### lex_bot + +The `lex_bot` configuration block supports the following: + +* `name` - (Required) Name of the Amazon Lex (V1) bot. +* `lex_region` - (Optional) Region that the Amazon Lex (V1) bot was created in. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_contact_flow.html.markdown b/website/docs/cdktf/python/d/connect_contact_flow.html.markdown new file mode 100644 index 00000000000..3a65e0d4448 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_contact_flow.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_contact_flow" +description: |- + Provides details about a specific Amazon Connect Contact Flow. +--- + + + +# Data Source: aws_connect_contact_flow + +Provides details about a specific Amazon Connect Contact Flow. + +## Example Usage + +By name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_contact_flow import DataAwsConnectContactFlow +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectContactFlow(self, "test", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Test" + ) +``` + +By contact_flow_id + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_contact_flow import DataAwsConnectContactFlow +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectContactFlow(self, "test", + contact_flow_id="cccccccc-bbbb-cccc-dddd-111111111111", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +~> **NOTE:** `instance_id` and one of either `name` or `contact_flow_id` is required. + +This argument supports the following arguments: + +* `contact_flow_id` - (Optional) Returns information on a specific Contact Flow by contact flow id +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Contact Flow by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Contact Flow. +* `content` - Logic of the Contact Flow. +* `description` - Description of the Contact Flow. +* `tags` - Tags to assign to the Contact Flow. +* `type` - Type of Contact Flow. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_contact_flow_module.html.markdown b/website/docs/cdktf/python/d/connect_contact_flow_module.html.markdown new file mode 100644 index 00000000000..cc666bbf127 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_contact_flow_module.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_contact_flow_module" +description: |- + Provides details about a specific Amazon Connect Contact Flow Module. +--- + + + +# Data Source: aws_connect_contact_flow_module + +Provides details about a specific Amazon Connect Contact Flow Module. + +## Example Usage + +By `name` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_contact_flow_module import DataAwsConnectContactFlowModule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectContactFlowModule(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="example" + ) +``` + +By `contact_flow_module_id` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_contact_flow_module import DataAwsConnectContactFlowModule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectContactFlowModule(self, "example", + contact_flow_module_id="cccccccc-bbbb-cccc-dddd-111111111111", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +~> **NOTE:** `instance_id` and one of either `name` or `contact_flow_module_id` is required. + +This argument supports the following arguments: + +* `contact_flow_module_id` - (Optional) Returns information on a specific Contact Flow Module by contact flow module id +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Contact Flow Module by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Contact Flow Module. +* `content` - Logic of the Contact Flow Module. +* `description` - Description of the Contact Flow Module. +* `tags` - Map of tags to assign to the Contact Flow Module. +* `state` - Type of Contact Flow Module Module. Values are either `ACTIVE` or `ARCHIVED`. +* `status` - Status of the Contact Flow Module Module. Values are either `PUBLISHED` or `SAVED`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_hours_of_operation.html.markdown b/website/docs/cdktf/python/d/connect_hours_of_operation.html.markdown new file mode 100644 index 00000000000..961823cd2ab --- /dev/null +++ b/website/docs/cdktf/python/d/connect_hours_of_operation.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_hours_of_operation" +description: |- + Provides details about a specific Amazon Connect Hours of Operation. +--- + + + +# Data Source: aws_connect_hours_of_operation + +Provides details about a specific Amazon Connect Hours of Operation. + +## Example Usage + +By `name` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_hours_of_operation import DataAwsConnectHoursOfOperation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectHoursOfOperation(self, "test", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Test" + ) +``` + +By `hours_of_operation_id` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_hours_of_operation import DataAwsConnectHoursOfOperation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectHoursOfOperation(self, "test", + hours_of_operation_id="cccccccc-bbbb-cccc-dddd-111111111111", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +~> **NOTE:** `instance_id` and one of either `name` or `hours_of_operation_id` is required. + +This argument supports the following arguments: + +* `hours_of_operation_id` - (Optional) Returns information on a specific Hours of Operation by hours of operation id +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Hours of Operation by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Hours of Operation. +* `config` - Configuration information for the hours of operation: day, start time, and end time . Config blocks are documented below. Config blocks are documented below. +* `description` - Description of the Hours of Operation. +* `hours_of_operation_id` - The identifier for the hours of operation. +* `instance_id` - Identifier of the hosting Amazon Connect Instance. +* `name` - Name of the Hours of Operation. +* `tags` - Map of tags to assign to the Hours of Operation. +* `time_zone` - Time zone of the Hours of Operation. + +A `config` block supports the following arguments: + +* `day` - Day that the hours of operation applies to. +* `end_time` - End time block specifies the time that your contact center closes. The `end_time` is documented below. +* `start_time` - Start time block specifies the time that your contact center opens. The `start_time` is documented below. + +A `end_time` block supports the following arguments: + +* `hours` - Hour of closing. +* `minutes` - Minute of closing. + +A `start_time` block supports the following arguments: + +* `hours` - Hour of opening. +* `minutes` - Minute of opening. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_instance.html.markdown b/website/docs/cdktf/python/d/connect_instance.html.markdown new file mode 100644 index 00000000000..eacb3a134c4 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_instance.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_instance" +description: |- + Provides details about a specific Connect Instance. +--- + + + +# Data Source: aws_connect_instance + +Provides details about a specific Amazon Connect Instance. + +## Example Usage + +By instance_alias + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_instance import DataAwsConnectInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectInstance(self, "foo", + instance_alias="foo" + ) +``` + +By instance_id + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_instance import DataAwsConnectInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectInstance(self, "foo", + instance_id="97afc98d-101a-ba98-ab97-ae114fc115ec" + ) +``` + +## Argument Reference + +~> **NOTE:** One of either `instance_id` or `instance_alias` is required. + +This argument supports the following arguments: + +* `instance_id` - (Optional) Returns information on a specific connect instance by id + +* `instance_alias` - (Optional) Returns information on a specific connect instance by alias + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `created_time` - When the instance was created. +* `arn` - ARN of the instance. +* `identity_management_type` - Specifies The identity management type attached to the instance. +* `inbound_calls_enabled` - Whether inbound calls are enabled. +* `outbound_calls_enabled` - Whether outbound calls are enabled. +* `early_media_enabled` - Whether early media for outbound calls is enabled . +* `contact_flow_logs_enabled` - Whether contact flow logs are enabled. +* `contact_lens_enabled` - Whether contact lens is enabled. +* `auto_resolve_best_voices` - Whether auto resolve best voices is enabled. +* `multi_party_conference_enabled` - Whether multi-party calls/conference is enabled. +* `use_custom_tts_voices` - Whether use custom tts voices is enabled. +* `status` - State of the instance. +* `service_role` - Service role of the instance. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_instance_storage_config.html.markdown b/website/docs/cdktf/python/d/connect_instance_storage_config.html.markdown new file mode 100644 index 00000000000..b27cd165ba7 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_instance_storage_config.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_instance_storage_config" +description: |- + Provides details about a specific Amazon Connect Instance Storage Config. +--- + + + +# Data Source: aws_connect_instance_storage_config + +Provides details about a specific Amazon Connect Instance Storage Config. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_instance_storage_config import DataAwsConnectInstanceStorageConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectInstanceStorageConfig(self, "example", + association_id="1234567890123456789012345678901234567890123456789012345678901234", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + resource_type="CONTACT_TRACE_RECORDS" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `association_id` - (Required) The existing association identifier that uniquely identifies the resource type and storage config for the given instance ID. +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance +* `resource_type` - (Required) A valid resource type. Valid Values: `CHAT_TRANSCRIPTS` | `CALL_RECORDINGS` | `SCHEDULED_REPORTS` | `MEDIA_STREAMS` | `CONTACT_TRACE_RECORDS` | `AGENT_EVENTS` | `REAL_TIME_CONTACT_ANALYSIS_SEGMENTS`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the hosting Amazon Connect Instance, `association_id`, and `resource_type` separated by a colon (`:`). +* `storage_config` - Specifies the storage configuration options for the Connect Instance. [Documented below](#storage_config). + +### `storage_config` + +The `storage_config` configuration block supports the following arguments: + +* `kinesis_firehose_config` - A block that specifies the configuration of the Kinesis Firehose delivery stream. [Documented below](#kinesis_firehose_config). +* `kinesis_stream_config` - A block that specifies the configuration of the Kinesis data stream. [Documented below](#kinesis_stream_config). +* `kinesis_video_stream_config` - A block that specifies the configuration of the Kinesis video stream. [Documented below](#kinesis_video_stream_config). +* `s3_config` - A block that specifies the configuration of S3 Bucket. [Documented below](#s3_config). +* `storage_type` - A valid storage type. Valid Values: `S3` | `KINESIS_VIDEO_STREAM` | `KINESIS_STREAM` | `KINESIS_FIREHOSE`. + +#### `kinesis_firehose_config` + +The `kinesis_firehose_config` configuration block supports the following arguments: + +* `firehose_arn` - The Amazon Resource Name (ARN) of the delivery stream. + +#### `kinesis_stream_config` + +The `kinesis_stream_config` configuration block supports the following arguments: + +* `stream_arn` - The Amazon Resource Name (ARN) of the data stream. + +#### `kinesis_video_stream_config` + +The `kinesis_video_stream_config` configuration block supports the following arguments: + +* `encryption_config` - The encryption configuration. [Documented below](#encryption_config). +* `prefix` - The prefix of the video stream. Minimum length of `1`. Maximum length of `128`. When read from the state, the value returned is `-connect--contact-` since the API appends additional details to the `prefix`. +* `retention_period_hours` - The number of hours to retain the data in a data store associated with the stream. Minimum value of `0`. Maximum value of `87600`. A value of `0` indicates that the stream does not persist data. + +#### `s3_config` + +The `s3_config` configuration block supports the following arguments: + +* `bucket_name` - The S3 bucket name. +* `bucket_prefix` - The S3 bucket prefix. +* `encryption_config` - The encryption configuration. [Documented below](#encryption_config). + +#### `encryption_config` + +The `encryption_config` configuration block supports the following arguments: + +* `encryption_type` - The type of encryption. Valid Values: `KMS`. +* `key_id` - The full ARN of the encryption key. Be sure to provide the full ARN of the encryption key, not just the ID. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_lambda_function_association.markdown b/website/docs/cdktf/python/d/connect_lambda_function_association.markdown new file mode 100644 index 00000000000..19e65efad58 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_lambda_function_association.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_lambda_function_association" +description: |- + Provides details about a specific Connect Lambda Function Association. +--- + + + +# Data Source: aws_connect_lambda_function_association + +Provides details about a specific Connect Lambda Function Association. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_lambda_function_association import DataAwsConnectLambdaFunctionAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectLambdaFunctionAssociation(self, "example", + function_arn="arn:aws:lambda:us-west-2:123456789123:function:abcdefg", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `function_arn` - (Required) ARN of the Lambda Function, omitting any version or alias qualifier. +* `instance_id` - (Required) Identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_prompt.html.markdown b/website/docs/cdktf/python/d/connect_prompt.html.markdown new file mode 100644 index 00000000000..b113f0a056a --- /dev/null +++ b/website/docs/cdktf/python/d/connect_prompt.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_prompt" +description: |- + Provides details about a specific Amazon Connect Prompt. +--- + + + +# Data Source: aws_connect_prompt + +Provides details about a specific Amazon Connect Prompt. + +## Example Usage + +By `name` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_prompt import DataAwsConnectPrompt +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectPrompt(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Beep.wav" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Required) Returns information on a specific Prompt by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Prompt. +* `prompt_id` - Identifier for the prompt. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_queue.markdown b/website/docs/cdktf/python/d/connect_queue.markdown new file mode 100644 index 00000000000..ebabb5e928e --- /dev/null +++ b/website/docs/cdktf/python/d/connect_queue.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_queue" +description: |- + Provides details about a specific Amazon Connect Queue. +--- + + + +# Data Source: aws_connect_queue + +Provides details about a specific Amazon Connect Queue. + +## Example Usage + +By `name` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_queue import DataAwsConnectQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectQueue(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example" + ) +``` + +By `queue_id` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_queue import DataAwsConnectQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectQueue(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + queue_id="cccccccc-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +~> **NOTE:** `instance_id` and one of either `name` or `queue_id` is required. + +This argument supports the following arguments: + +* `queue_id` - (Optional) Returns information on a specific Queue by Queue id +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Queue by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Queue. +* `description` - Description of the Queue. +* `hours_of_operation_id` - Specifies the identifier of the Hours of Operation. +* `id` - Identifier of the hosting Amazon Connect Instance and identifier of the Queue separated by a colon (`:`). +* `max_contacts` - Maximum number of contacts that can be in the queue before it is considered full. Minimum value of 0. +* `outbound_caller_config` - A block that defines the outbound caller ID name, number, and outbound whisper flow. The Outbound Caller Config block is documented below. +* `queue_id` - Identifier for the Queue. +* `status` - Description of the Queue. Values are `ENABLED` or `DISABLED`. +* `tags` - Map of tags assigned to the Queue. + +A `outbound_caller_config` block supports the following arguments: + +* `outbound_caller_id_name` - Specifies the caller ID name. +* `outbound_caller_id_number_id` - Specifies the caller ID number. +* `outbound_flow_id` - Outbound whisper flow to be used during an outbound call. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_quick_connect.markdown b/website/docs/cdktf/python/d/connect_quick_connect.markdown new file mode 100644 index 00000000000..b72ebc7780d --- /dev/null +++ b/website/docs/cdktf/python/d/connect_quick_connect.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_quick_connect" +description: |- + Provides details about a specific Amazon Connect Quick Connect. +--- + + + +# Data Source: aws_connect_quick_connect + +Provides details about a specific Amazon Connect Quick Connect. + +## Example Usage + +By `name` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_quick_connect import DataAwsConnectQuickConnect +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectQuickConnect(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example" + ) +``` + +By `quick_connect_id` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_quick_connect import DataAwsConnectQuickConnect +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectQuickConnect(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + quick_connect_id="cccccccc-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +~> **NOTE:** `instance_id` and one of either `name` or `quick_connect_id` is required. + +This argument supports the following arguments: + +* `quick_connect_id` - (Optional) Returns information on a specific Quick Connect by Quick Connect id +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Quick Connect by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Quick Connect. +* `description` - Description of the Quick Connect. +* `id` - Identifier of the hosting Amazon Connect Instance and identifier of the Quick Connect separated by a colon (`:`). +* `quick_connect_config` - A block that defines the configuration information for the Quick Connect: `quick_connect_type` and one of `phone_config`, `queue_config`, `user_config` . The Quick Connect Config block is documented below. +* `quick_connect_id` - Identifier for the Quick Connect. +* `tags` - Map of tags to assign to the Quick Connect. + +A `quick_connect_config` block contains the following arguments: + +* `quick_connect_type` - Configuration type of the Quick Connect. Valid values are `PHONE_NUMBER`, `QUEUE`, `USER`. +* `phone_config` - Phone configuration of the Quick Connect. This is returned only if `quick_connect_type` is `PHONE_NUMBER`. The `phone_config` block is documented below. +* `queue_config` - Queue configuration of the Quick Connect. This is returned only if `quick_connect_type` is `QUEUE`. The `queue_config` block is documented below. +* `user_config` - User configuration of the Quick Connect. This is returned only if `quick_connect_type` is `USER`. The `user_config` block is documented below. + +A `phone_config` block contains the following arguments: + +* `phone_number` - Phone number in in E.164 format. + +A `queue_config` block contains the following arguments: + +* `contact_flow_id` - Identifier of the contact flow. +* `queue_id` - Identifier for the queue. + +A `user_config` block contains the following arguments: + +* `contact_flow_id` - Identifier of the contact flow. +* `user_id` - Identifier for the user. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_routing_profile.html.markdown b/website/docs/cdktf/python/d/connect_routing_profile.html.markdown new file mode 100644 index 00000000000..baac85d18a8 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_routing_profile.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_routing_profile" +description: |- + Provides details about a specific Amazon Connect Routing Profile. +--- + + + +# Data Source: aws_connect_routing_profile + +Provides details about a specific Amazon Connect Routing Profile. + +## Example Usage + +By `name` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_routing_profile import DataAwsConnectRoutingProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectRoutingProfile(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example" + ) +``` + +By `routing_profile_id` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_routing_profile import DataAwsConnectRoutingProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectRoutingProfile(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + routing_profile_id="cccccccc-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +~> **NOTE:** `instance_id` and one of either `name` or `routing_profile_id` is required. + +This argument supports the following arguments: + +* `instance_id` - Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Routing Profile by name +* `routing_profile_id` - (Optional) Returns information on a specific Routing Profile by Routing Profile id + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Routing Profile. +* `default_outbound_queue_id` - Specifies the default outbound queue for the Routing Profile. +* `description` - Description of the Routing Profile. +* `id` - Identifier of the hosting Amazon Connect Instance and identifier of the Routing Profile separated by a colon (`:`). +* `media_concurrencies` - One or more `media_concurrencies` blocks that specify the channels that agents can handle in the Contact Control Panel (CCP) for this Routing Profile. The `media_concurrencies` block is documented below. +* `queue_configs` - One or more `queue_configs` blocks that specify the inbound queues associated with the routing profile. If no queue is added, the agent only can make outbound calls. The `queue_configs` block is documented below. +* `tags` - Map of tags to assign to the Routing Profile. + +A `media_concurrencies` block supports the following attributes: + +* `channel` - Channels that agents can handle in the Contact Control Panel (CCP). Valid values are `VOICE`, `CHAT`, `TASK`. +* `concurrency` - Number of contacts an agent can have on a channel simultaneously. Valid Range for `VOICE`: Minimum value of 1. Maximum value of 1. Valid Range for `CHAT`: Minimum value of 1. Maximum value of 10. Valid Range for `TASK`: Minimum value of 1. Maximum value of 10. + +A `queue_configs` block supports the following attributes: + +* `channel` - Channels agents can handle in the Contact Control Panel (CCP) for this routing profile. Valid values are `VOICE`, `CHAT`, `TASK`. +* `delay` - Delay, in seconds, that a contact should be in the queue before they are routed to an available agent +* `priority` - Order in which contacts are to be handled for the queue. +* `queue_arn` - ARN for the queue. +* `queue_id` - Identifier for the queue. +* `queue_name` - Name for the queue. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_security_profile.html.markdown b/website/docs/cdktf/python/d/connect_security_profile.html.markdown new file mode 100644 index 00000000000..8b04db53a69 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_security_profile.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_security_profile" +description: |- + Provides details about a specific Amazon Connect Security Profile. +--- + + + +# Data Source: aws_connect_security_profile + +Provides details about a specific Amazon Connect Security Profile. + +## Example Usage + +By `name` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_security_profile import DataAwsConnectSecurityProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectSecurityProfile(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example" + ) +``` + +By `security_profile_id` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_security_profile import DataAwsConnectSecurityProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectSecurityProfile(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + security_profile_id="cccccccc-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +~> **NOTE:** `instance_id` and one of either `name` or `security_profile_id` is required. + +This argument supports the following arguments: + +* `security_profile_id` - (Optional) Returns information on a specific Security Profile by Security Profile id +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Security Profile by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Security Profile. +* `description` - Description of the Security Profile. +* `id` - Identifier of the hosting Amazon Connect Instance and identifier of the Security Profile separated by a colon (`:`). +* `organization_resource_id` - The organization resource identifier for the security profile. +* `permissions` - List of permissions assigned to the security profile. +* `tags` - Map of tags to assign to the Security Profile. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_user.html.markdown b/website/docs/cdktf/python/d/connect_user.html.markdown new file mode 100644 index 00000000000..bdcd591fbd4 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_user.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user" +description: |- + Provides details about a specific Amazon Connect User. +--- + + + +# Data Source: aws_connect_user + +Provides details about a specific Amazon Connect User. + +## Example Usage + +By `name` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_user import DataAwsConnectUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectUser(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example" + ) +``` + +By `user_id` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_user import DataAwsConnectUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectUser(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + user_id="cccccccc-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +~> **NOTE:** `instance_id` and one of either `name` or `user_id` is required. + +This argument supports the following arguments: + +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific User by name +* `user_id` - (Optional) Returns information on a specific User by User id + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the User. +* `directory_user_id` - The identifier of the user account in the directory used for identity management. +* `hierarchy_group_id` - The identifier of the hierarchy group for the user. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the User separated by a colon (`:`). +* `identity_info` - A block that contains information about the identity of the user. [Documented below](#identity_info). +* `instance_id` - Specifies the identifier of the hosting Amazon Connect Instance. +* `phone_config` - A block that contains information about the phone settings for the user. [Documented below](#phone_config). +* `routing_profile_id` - The identifier of the routing profile for the user. +* `security_profile_ids` - A list of identifiers for the security profiles for the user. +* `tags` - A map of tags to assign to the User. + +### `identity_info` + +An `identity_info` block supports the following attributes: + +* `email` - The email address. +* `first_name` - The first name. +* `last_name` - The last name. + +### `phone_config` + +A `phone_config` block supports the following attributes: + +* `after_contact_work_time_limit` - The After Call Work (ACW) timeout setting, in seconds. +* `auto_accept` - When Auto-Accept Call is enabled for an available agent, the agent connects to contacts automatically. +* `desk_phone_number` - The phone number for the user's desk phone. +* `phone_type` - The phone type. Valid values are `DESK_PHONE` and `SOFT_PHONE`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_user_hierarchy_group.html.markdown b/website/docs/cdktf/python/d/connect_user_hierarchy_group.html.markdown new file mode 100644 index 00000000000..c1a6ccb89a3 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_user_hierarchy_group.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user_hierarchy_group" +description: |- + Provides details about a specific Amazon Connect User Hierarchy Group. +--- + + + +# Data Source: aws_connect_user_hierarchy_group + +Provides details about a specific Amazon Connect User Hierarchy Group. + +## Example Usage + +By `name` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_user_hierarchy_group import DataAwsConnectUserHierarchyGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectUserHierarchyGroup(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example" + ) +``` + +By `hierarchy_group_id` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_user_hierarchy_group import DataAwsConnectUserHierarchyGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectUserHierarchyGroup(self, "example", + hierarchy_group_id="cccccccc-bbbb-cccc-dddd-111111111111", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +~> **NOTE:** `instance_id` and one of either `name` or `hierarchy_group_id` is required. + +This argument supports the following arguments: + +* `hierarchy_group_id` - (Optional) Returns information on a specific hierarchy group by hierarchy group id +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific hierarchy group by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the hierarchy group. +* `hierarchy_path` - Block that contains information about the levels in the hierarchy group. The `hierarchy_path` block is documented below. +* `level_id` - Identifier of the level in the hierarchy group. +* `id` - Identifier of the hosting Amazon Connect Instance and identifier of the hierarchy group separated by a colon (`:`). +* `tags` - Map of tags to assign to the hierarchy group. + +A `hierarchy_path` block supports the following attributes: + +* `level_one` - Details of level one. See below. +* `level_two` - Details of level two. See below. +* `level_three` - Details of level three. See below. +* `level_four` - Details of level four. See below. +* `level_five` - Details of level five. See below. + +A level block supports the following attributes: + +* `arn` - ARN of the hierarchy group. +* `id` - The identifier of the hierarchy group. +* `name` - Name of the hierarchy group. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_user_hierarchy_structure.html.markdown b/website/docs/cdktf/python/d/connect_user_hierarchy_structure.html.markdown new file mode 100644 index 00000000000..80ff29adb44 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_user_hierarchy_structure.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user_hierarchy_structure" +description: |- + Provides details about a specific Amazon Connect User Hierarchy Structure +--- + + + +# Data Source: aws_connect_user_hierarchy_structure + +Provides details about a specific Amazon Connect User Hierarchy Structure + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_user_hierarchy_structure import DataAwsConnectUserHierarchyStructure +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectUserHierarchyStructure(self, "test", + instance_id=Token.as_string(aws_connect_instance_test.id) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `hierarchy_structure` - Block that defines the hierarchy structure's levels. The `hierarchy_structure` block is documented below. + +A `hierarchy_structure` block supports the following attributes: + +* `level_one` - Details of level one. See below. +* `level_two` - Details of level two. See below. +* `level_three` - Details of level three. See below. +* `level_four` - Details of level four. See below. +* `level_five` - Details of level five. See below. + +Each level block supports the following attributes: + +* `arn` - ARN of the hierarchy level. +* `id` - The identifier of the hierarchy level. +* `name` - Name of the user hierarchy level. Must not be more than 50 characters. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/connect_vocabulary.html.markdown b/website/docs/cdktf/python/d/connect_vocabulary.html.markdown new file mode 100644 index 00000000000..02683c8a0f4 --- /dev/null +++ b/website/docs/cdktf/python/d/connect_vocabulary.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_vocabulary" +description: |- + Provides details about a specific Amazon Connect Vocabulary. +--- + + + +# Data Source: aws_connect_vocabulary + +Provides details about a specific Amazon Connect Vocabulary. + +## Example Usage + +By `name` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_vocabulary import DataAwsConnectVocabulary +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectVocabulary(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example" + ) +``` + +By `vocabulary_id` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_connect_vocabulary import DataAwsConnectVocabulary +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsConnectVocabulary(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + vocabulary_id="cccccccc-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +~> **NOTE:** `instance_id` and one of either `name` or `vocabulary_id` is required. + +This argument supports the following arguments: + +* `instance_id` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Vocabulary by name +* `vocabulary_id` - (Optional) Returns information on a specific Vocabulary by Vocabulary id + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Vocabulary. +* `content` - The content of the custom vocabulary in plain-text format with a table of values. Each row in the table represents a word or a phrase, described with Phrase, IPA, SoundsLike, and DisplayAs fields. Separate the fields with TAB characters. For more information, see [Create a custom vocabulary using a table](https://docs.aws.amazon.com/transcribe/latest/dg/custom-vocabulary.html#create-vocabulary-table). +* `failure_reason` - The reason why the custom vocabulary was not created. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the vocabulary +separated by a colon (`:`). +* `language_code` - The language code of the vocabulary entries. For a list of languages and their corresponding language codes, see [What is Amazon Transcribe?](https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html). Valid Values are `ar-AE`, `de-CH`, `de-DE`, `en-AB`, `en-AU`, `en-GB`, `en-IE`, `en-IN`, `en-US`, `en-WL`, `es-ES`, `es-US`, `fr-CA`, `fr-FR`, `hi-IN`, `it-IT`, `ja-JP`, `ko-KR`, `pt-BR`, `pt-PT`, `zh-CN`. +* `last_modified_time` - The timestamp when the custom vocabulary was last modified. +* `state` - The current state of the custom vocabulary. Valid values are `CREATION_IN_PROGRESS`, `ACTIVE`, `CREATION_FAILED`, `DELETE_IN_PROGRESS`. +* `tags` - A map of tags to assign to the Vocabulary. +* `vocabulary_id` - The identifier of the custom vocabulary. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/controltower_controls.html.markdown b/website/docs/cdktf/python/d/controltower_controls.html.markdown new file mode 100644 index 00000000000..6f2a13d96b5 --- /dev/null +++ b/website/docs/cdktf/python/d/controltower_controls.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Control Tower" +layout: "aws" +page_title: "AWS: aws_controltower_controls" +description: |- + List of Control Tower controls applied to an OU. +--- + + + +# Data Source: aws_controltower_controls + +List of Control Tower controls applied to an OU. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_controltower_controls import DataAwsControltowerControls +from imports.aws.data_aws_organizations_organization import DataAwsOrganizationsOrganization +from imports.aws.data_aws_organizations_organizational_units import DataAwsOrganizationsOrganizationalUnits +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + this_var = DataAwsOrganizationsOrganization(self, "this") + data_aws_organizations_organizational_units_this = + DataAwsOrganizationsOrganizationalUnits(self, "this_1", + parent_id=Token.as_string(property_access(this_var.roots, ["0", "id"])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_organizations_organizational_units_this.override_logical_id("this") + data_aws_controltower_controls_this = DataAwsControltowerControls(self, "this_2", + target_identifier=Token.as_string( + property_access("${[ for x in ${" + data_aws_organizations_organizational_units_this.children + "} : x.arn if x.name == \"Security\"]}", ["0"])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_controltower_controls_this.override_logical_id("this") +``` + +## Argument Reference + +The following arguments are required: + +* `target_identifier` - (Required) The ARN of the organizational unit. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `enabled_controls` - List of all the ARNs for the controls applied to the `target_identifier`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/cur_report_definition.html.markdown b/website/docs/cdktf/python/d/cur_report_definition.html.markdown new file mode 100644 index 00000000000..26a1cc682e3 --- /dev/null +++ b/website/docs/cdktf/python/d/cur_report_definition.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Cost and Usage Report" +layout: "aws" +page_title: "AWS: aws_cur_report_definition" +description: |- + Get information on an AWS Cost and Usage Report Definition. +--- + + + +# Data Source: aws_cur_report_definition + +Use this data source to get information on an AWS Cost and Usage Report Definition. + +~> *NOTE:* The AWS Cost and Usage Report service is only available in `us-east-1` currently. + +~> *NOTE:* If AWS Organizations is enabled, only the master account can use this resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_cur_report_definition import DataAwsCurReportDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsCurReportDefinition(self, "report_definition", + report_name="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `report_name` - (Required) Name of the report definition to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `time_unit` - Frequency on which report data are measured and displayed. +* `format` - Preferred compression format for report. +* `compression` - Preferred format for report. +* `additional_schema_elements` - A list of schema elements. +* `s3_bucket` - Name of customer S3 bucket. +* `s3_prefix` - Preferred report path prefix. +* `s3_region` - Region of customer S3 bucket. +* `additional_artifacts` - A list of additional artifacts. +* `refresh_closed_reports` - If true reports are updated after they have been finalized. +* `report_versioning` - Overwrite the previous version of each report or to deliver the report in addition to the previous versions. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/customer_gateway.html.markdown b/website/docs/cdktf/python/d/customer_gateway.html.markdown new file mode 100644 index 00000000000..13ebc0ad1a4 --- /dev/null +++ b/website/docs/cdktf/python/d/customer_gateway.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_customer_gateway" +description: |- + Get an existing AWS Customer Gateway. +--- + + + +# Data Source: aws_customer_gateway + +Get an existing AWS Customer Gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_customer_gateway import DataAwsCustomerGateway +from imports.aws.vpn_connection import VpnConnection +from imports.aws.vpn_gateway import VpnGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = VpnGateway(self, "main", + amazon_side_asn=Token.as_string(7224), + vpc_id=Token.as_string(aws_vpc_main.id) + ) + foo = DataAwsCustomerGateway(self, "foo", + filter=[DataAwsCustomerGatewayFilter( + name="tag:Name", + values=["foo-prod"] + ) + ] + ) + VpnConnection(self, "transit", + customer_gateway_id=Token.as_string(foo.id), + static_routes_only=False, + type=Token.as_string(foo.type), + vpn_gateway_id=main.id + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` - (Optional) ID of the gateway. +* `filter` - (Optional) One or more [name-value pairs][dcg-filters] to filter by. + +[dcg-filters]: https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeCustomerGateways.html + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the customer gateway. +* `bgp_asn` - Gateway's Border Gateway Protocol (BGP) Autonomous System Number (ASN). +* `certificate_arn` - ARN for the customer gateway certificate. +* `device_name` - Name for the customer gateway device. +* `ip_address` - IP address of the gateway's Internet-routable external interface. +* `tags` - Map of key-value pairs assigned to the gateway. +* `type` - Type of customer gateway. The only type AWS supports at this time is "ipsec.1". + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/datapipeline_pipeline.html.markdown b/website/docs/cdktf/python/d/datapipeline_pipeline.html.markdown new file mode 100644 index 00000000000..b8778ade62f --- /dev/null +++ b/website/docs/cdktf/python/d/datapipeline_pipeline.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Data Pipeline" +layout: "aws" +page_title: "AWS: aws_datapipeline_pipeline" +description: |- + Provides details about a specific DataPipeline. +--- + + + +# Source: aws_datapipeline_pipeline + +Provides details about a specific DataPipeline Pipeline. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_datapipeline_pipeline import DataAwsDatapipelinePipeline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDatapipelinePipeline(self, "example", + pipeline_id="pipelineID" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `pipeline_id` - (Required) ID of the pipeline. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - Name of Pipeline. +* `description` - Description of Pipeline. +* `tags` - Map of tags assigned to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/datapipeline_pipeline_definition.html.markdown b/website/docs/cdktf/python/d/datapipeline_pipeline_definition.html.markdown new file mode 100644 index 00000000000..c81d28f64ff --- /dev/null +++ b/website/docs/cdktf/python/d/datapipeline_pipeline_definition.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Data Pipeline" +layout: "aws" +page_title: "AWS: aws_datapipeline_pipeline_definition" +description: |- + Provides details about a specific DataPipeline Definition. +--- + + + +# Source: aws_datapipeline_pipeline_definition + +Provides details about a specific DataPipeline Pipeline Definition. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_datapipeline_pipeline_definition import DataAwsDatapipelinePipelineDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDatapipelinePipelineDefinition(self, "example", + pipeline_id="pipelineID" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `pipeline_id` - (Required) ID of the pipeline. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `parameter_object` - Parameter objects used in the pipeline definition. See below +* `parameter_value` - Parameter values used in the pipeline definition. See below +* `pipeline_object` - Objects defined in the pipeline. See below + +### `parameter_object` + +* `attributes` - Attributes of the parameter object. See below +* `id` - ID of the parameter object. + +### `attributes` + +* `key` - Field identifier. +* `string_value` - Field value, expressed as a String. + +### `parameter_value` + +* `id` - ID of the parameter value. +* `string_value` - Field value, expressed as a String. + +### `pipeline_object` + +* `field` - Key-value pairs that define the properties of the object. See below +* `id` - ID of the object. +* `name` - ARN of the storage connector. + +### `field` + +* `key` - Field identifier. +* `ref_value` - Field value, expressed as the identifier of another object +* `string_value` - Field value, expressed as a String. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/db_cluster_snapshot.html.markdown b/website/docs/cdktf/python/d/db_cluster_snapshot.html.markdown new file mode 100644 index 00000000000..67819f9a3fb --- /dev/null +++ b/website/docs/cdktf/python/d/db_cluster_snapshot.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_cluster_snapshot" +description: |- + Get information on a DB Cluster Snapshot. +--- + + + +# Data Source: aws_db_cluster_snapshot + +Use this data source to get information about a DB Cluster Snapshot for use when provisioning DB clusters. + +~> **NOTE:** This data source does not apply to snapshots created on DB Instances. +See the [`aws_db_snapshot` data source](/docs/providers/aws/d/db_snapshot.html) for DB Instance snapshots. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_db_cluster_snapshot import DataAwsDbClusterSnapshot +from imports.aws.rds_cluster import RdsCluster +from imports.aws.rds_cluster_instance import RdsClusterInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine, engine1): + super().__init__(scope, name) + development_final_snapshot = DataAwsDbClusterSnapshot(self, "development_final_snapshot", + db_cluster_identifier="development_cluster", + most_recent=True + ) + aurora = RdsCluster(self, "aurora", + cluster_identifier="development_cluster", + db_subnet_group_name="my_db_subnet_group", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[snapshot_identifier] + ), + snapshot_identifier=Token.as_string(development_final_snapshot.id), + engine=engine + ) + aws_rds_cluster_instance_aurora = RdsClusterInstance(self, "aurora_2", + cluster_identifier=aurora.id, + db_subnet_group_name="my_db_subnet_group", + instance_class="db.t2.small", + engine=engine1 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_instance_aurora.override_logical_id("aurora") +``` + +## Argument Reference + +This data source supports the following arguments: + +* `most_recent` - (Optional) If more than one result is returned, use the most recent Snapshot. + +* `db_cluster_identifier` - (Optional) Returns the list of snapshots created by the specific db_cluster + +* `db_cluster_snapshot_identifier` - (Optional) Returns information on a specific snapshot_id. + +* `snapshot_type` - (Optional) Type of snapshots to be returned. If you don't specify a SnapshotType +value, then both automated and manual DB cluster snapshots are returned. Shared and public DB Cluster Snapshots are not +included in the returned results by default. Possible values are, `automated`, `manual`, `shared`, `public` and `awsbackup`. + +* `include_shared` - (Optional) Set this value to true to include shared manual DB Cluster Snapshots from other +AWS accounts that this AWS account has been given permission to copy or restore, otherwise set this value to false. +The default is `false`. + +* `include_public` - (Optional) Set this value to true to include manual DB Cluster Snapshots that are public and can be +copied or restored by any AWS account, otherwise set this value to false. The default is `false`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `allocated_storage` - Allocated storage size in gigabytes (GB). +* `availability_zones` - List of EC2 Availability Zones that instances in the DB cluster snapshot can be restored in. +* `db_cluster_identifier` - Specifies the DB cluster identifier of the DB cluster that this DB cluster snapshot was created from. +* `db_cluster_snapshot_arn` - The ARN for the DB Cluster Snapshot. +* `engine_version` - Version of the database engine for this DB cluster snapshot. +* `engine` - Name of the database engine. +* `id` - Snapshot ID. +* `kms_key_id` - If storage_encrypted is true, the AWS KMS key identifier for the encrypted DB cluster snapshot. +* `license_model` - License model information for the restored DB cluster. +* `port` - Port that the DB cluster was listening on at the time of the snapshot. +* `snapshot_create_time` - Time when the snapshot was taken, in Universal Coordinated Time (UTC). +* `source_db_cluster_snapshot_identifier` - DB Cluster Snapshot ARN that the DB Cluster Snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `status` - Status of this DB Cluster Snapshot. +* `storage_encrypted` - Whether the DB cluster snapshot is encrypted. +* `vpc_id` - VPC ID associated with the DB cluster snapshot. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/db_event_categories.html.markdown b/website/docs/cdktf/python/d/db_event_categories.html.markdown new file mode 100644 index 00000000000..f3b04c000b7 --- /dev/null +++ b/website/docs/cdktf/python/d/db_event_categories.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_event_categories" +description: |- + Provides a list of DB Event Categories which can be used to pass values into DB Event Subscription. +--- + + + +# Data Source: aws_db_event_categories + +## Example Usage + +List the event categories of all the RDS resources. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_db_event_categories import DataAwsDbEventCategories +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsDbEventCategories(self, "example") + cdktf_terraform_output_example = TerraformOutput(self, "example_1", + value=example.event_categories + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + cdktf_terraform_output_example.override_logical_id("example") +``` + +List the event categories specific to the RDS resource `db-snapshot`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_db_event_categories import DataAwsDbEventCategories +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsDbEventCategories(self, "example", + source_type="db-snapshot" + ) + cdktf_terraform_output_example = TerraformOutput(self, "example_1", + value=example.event_categories + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + cdktf_terraform_output_example.override_logical_id("example") +``` + +## Argument Reference + +This data source supports the following arguments: + +* `source_type` - (Optional) Type of source that will be generating the events. Valid options are db-instance, db-security-group, db-parameter-group, db-snapshot, db-cluster or db-cluster-snapshot. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `event_categories` - List of the event categories. +* `id` - Region of the event categories. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/db_instance.html.markdown b/website/docs/cdktf/python/d/db_instance.html.markdown new file mode 100644 index 00000000000..e3ff950c876 --- /dev/null +++ b/website/docs/cdktf/python/d/db_instance.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_instance" +description: |- + Get information on an RDS Database Instance. +--- + + + +# Data Source: aws_db_instance + +Use this data source to get information about an RDS instance + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_db_instance import DataAwsDbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDbInstance(self, "database", + db_instance_identifier="my-test-database" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `db_instance_identifier` - (Required) Name of the RDS instance + +## Attribute Reference + +~> **NOTE:** The `port` field may be empty while an Aurora cluster is still in the process of being created. This can occur if the cluster was initiated with the [AWS CLI `create-db-cluster`](https://docs.aws.amazon.com/cli/latest/reference/rds/create-db-cluster.html) command, but no DB instance has yet been added to it. + +This data source exports the following attributes in addition to the arguments above: + +* `address` - Hostname of the RDS instance. See also `endpoint` and `port`. +* `allocated_storage` - Allocated storage size specified in gigabytes. +* `auto_minor_version_upgrade` - Indicates that minor version patches are applied automatically. +* `availability_zone` - Name of the Availability Zone the DB instance is located in. +* `backup_retention_period` - Specifies the number of days for which automatic DB snapshots are retained. +* `db_cluster_identifier` - If the DB instance is a member of a DB cluster, contains the name of the DB cluster that the DB instance is a member of. +* `db_instance_arn` - ARN for the DB instance. +* `db_instance_class` - Contains the name of the compute and memory capacity class of the DB instance. +* `db_name` - Contains the name of the initial database of this instance that was provided at create time, if one was specified when the DB instance was created. This same name is returned for the life of the DB instance. +* `db_parameter_groups` - Provides the list of DB parameter groups applied to this DB instance. +* `db_subnet_group` - Name of the subnet group associated with the DB instance. +* `db_instance_port` - Port that the DB instance listens on. +* `enabled_cloudwatch_logs_exports` - List of log types to export to cloudwatch. +* `endpoint` - Connection endpoint in `address:port` format. +* `engine` - Provides the name of the database engine to be used for this DB instance. +* `engine_version` - Database engine version. +* `hosted_zone_id` - Canonical hosted zone ID of the DB instance (to be used in a Route 53 Alias record). +* `iops` - Provisioned IOPS (I/O operations per second) value. +* `kms_key_id` - If StorageEncrypted is true, the KMS key identifier for the encrypted DB instance. +* `license_model` - License model information for this DB instance. +* `master_username` - Contains the master username for the DB instance. +* `master_user_secret` - Provides the master user secret. Only available when `manage_master_user_password` is set to true. [Documented below](#master_user_secret). +* `max_allocated_storage` - The upper limit to which Amazon RDS can automatically scale the storage of the DB instance. +* `monitoring_interval` - Interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. +* `monitoring_role_arn` - ARN for the IAM role that permits RDS to send Enhanced Monitoring metrics to CloudWatch Logs. +* `multi_az` - If the DB instance is a Multi-AZ deployment. +* `network_type` - Network type of the DB instance. +* `option_group_memberships` - Provides the list of option group memberships for this DB instance. +* `port` - Database endpoint port, primarily used by an Aurora DB cluster. For a conventional RDS DB instance, the `db_instance_port` is typically the preferred choice. +* `preferred_backup_window` - Specifies the daily time range during which automated backups are created. +* `preferred_maintenance_window` - Specifies the weekly time range during which system maintenance can occur in UTC. +* `publicly_accessible` - Accessibility options for the DB instance. +* `resource_id` - RDS Resource ID of this instance. +* `storage_encrypted` - Whether the DB instance is encrypted. +* `storage_throughput` - Storage throughput value for the DB instance. +* `storage_type` - Storage type associated with DB instance. +* `timezone` - Time zone of the DB instance. +* `vpc_security_groups` - Provides a list of VPC security group elements that the DB instance belongs to. +* `replicate_source_db` - Identifier of the source DB that this is a replica of. +* `ca_cert_identifier` - Identifier of the CA certificate for the DB instance. + +### master_user_secret + +The `master_user_secret` configuration block supports the following attributes: + +* `kms_key_id` - The Amazon Web Services KMS key identifier that is used to encrypt the secret. +* `secret_arn` - The Amazon Resource Name (ARN) of the secret. +* `secret_status` - The status of the secret. Valid Values: `creating` | `active` | `rotating` | `impaired`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/db_instances.html.markdown b/website/docs/cdktf/python/d/db_instances.html.markdown new file mode 100644 index 00000000000..9e38e9c32db --- /dev/null +++ b/website/docs/cdktf/python/d/db_instances.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_instances" +description: |- + Terraform data source for listing RDS Database Instances. +--- + + + +# Data Source: aws_db_instances + +Terraform data source for listing RDS Database Instances. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_db_instances import DataAwsDbInstances +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDbInstances(self, "example", + filter=[DataAwsDbInstancesFilter( + name="db-instance-id", + values=["my-database-id"] + ) + ] + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [RDS DescribeDBClusters API Reference](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DescribeDBClusters.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `instance_arns` - ARNs of the matched RDS instances. +* `instance_identifiers` - Identifiers of the matched RDS instances. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/db_proxy.html.markdown b/website/docs/cdktf/python/d/db_proxy.html.markdown new file mode 100644 index 00000000000..2e7dc747c2e --- /dev/null +++ b/website/docs/cdktf/python/d/db_proxy.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_proxy" +description: |- + Get information on a DB Proxy. +--- + + + +# Data Source: aws_db_proxy + +Use this data source to get information about a DB Proxy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_db_proxy import DataAwsDbProxy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDbProxy(self, "proxy", + name="my-test-db-proxy" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the DB proxy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the DB Proxy. +* `auth` - Configuration(s) with authorization mechanisms to connect to the associated instance or cluster. +* `debug_logging` - Whether the proxy includes detailed information about SQL statements in its logs. +* `endpoint` - Endpoint that you can use to connect to the DB proxy. +* `engine_family` - Kinds of databases that the proxy can connect to. +* `idle_client_timeout` - Number of seconds a connection to the proxy can have no activity before the proxy drops the client connection. +* `require_tls` - Whether Transport Layer Security (TLS) encryption is required for connections to the proxy. +* `role_arn` - ARN for the IAM role that the proxy uses to access Amazon Secrets Manager. +* `vpc_id` - Provides the VPC ID of the DB proxy. +* `vpc_security_group_ids` - Provides a list of VPC security groups that the proxy belongs to. +* `vpc_subnet_ids` - EC2 subnet IDs for the proxy. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/db_snapshot.html.markdown b/website/docs/cdktf/python/d/db_snapshot.html.markdown new file mode 100644 index 00000000000..4c12f352404 --- /dev/null +++ b/website/docs/cdktf/python/d/db_snapshot.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_snapshot" +description: |- + Get information on a DB Snapshot. +--- + + + +# Data Source: aws_db_snapshot + +Use this data source to get information about a DB Snapshot for use when provisioning DB instances + +~> **NOTE:** This data source does not apply to snapshots created on Aurora DB clusters. +See the [`aws_db_cluster_snapshot` data source](/docs/providers/aws/d/db_cluster_snapshot.html) for DB Cluster snapshots. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_db_snapshot import DataAwsDbSnapshot +from imports.aws.db_instance import DbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + prod = DbInstance(self, "prod", + allocated_storage=10, + db_name="mydb", + db_subnet_group_name="my_database_subnet_group", + engine="mysql", + engine_version="5.6.17", + instance_class="db.t2.micro", + parameter_group_name="default.mysql5.6", + password="bar", + username="foo" + ) + latest_prod_snapshot = DataAwsDbSnapshot(self, "latest_prod_snapshot", + db_instance_identifier=prod.identifier, + most_recent=True + ) + DbInstance(self, "dev", + db_name="mydbdev", + instance_class="db.t2.micro", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[snapshot_identifier] + ), + snapshot_identifier=Token.as_string(latest_prod_snapshot.id) + ) +``` + +## Argument Reference + +~> **NOTE:** One of either `db_instance_identifier` or `db_snapshot_identifier` is required. + +This argument supports the following arguments: + +* `most_recent` - (Optional) If more than one result is returned, use the most +recent Snapshot. + +* `db_instance_identifier` - (Optional) Returns the list of snapshots created by the specific db_instance + +* `db_snapshot_identifier` - (Optional) Returns information on a specific snapshot_id. + +* `snapshot_type` - (Optional) Type of snapshots to be returned. If you don't specify a SnapshotType +value, then both automated and manual snapshots are returned. Shared and public DB snapshots are not +included in the returned results by default. Possible values are, `automated`, `manual`, `shared`, `public` and `awsbackup`. + +* `include_shared` - (Optional) Set this value to true to include shared manual DB snapshots from other +AWS accounts that this AWS account has been given permission to copy or restore, otherwise set this value to false. +The default is `false`. + +* `include_public` - (Optional) Set this value to true to include manual DB snapshots that are public and can be +copied or restored by any AWS account, otherwise set this value to false. The default is `false`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Snapshot ID. +* `allocated_storage` - Allocated storage size in gigabytes (GB). +* `availability_zone` - Name of the Availability Zone the DB instance was located in at the time of the DB snapshot. +* `db_snapshot_arn` - ARN for the DB snapshot. +* `encrypted` - Whether the DB snapshot is encrypted. +* `engine` - Name of the database engine. +* `engine_version` - Version of the database engine. +* `iops` - Provisioned IOPS (I/O operations per second) value of the DB instance at the time of the snapshot. +* `kms_key_id` - ARN for the KMS encryption key. +* `license_model` - License model information for the restored DB instance. +* `option_group_name` - Provides the option group name for the DB snapshot. +* `source_db_snapshot_identifier` - DB snapshot ARN that the DB snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `source_region` - Region that the DB snapshot was created in or copied from. +* `status` - Status of this DB snapshot. +* `storage_type` - Storage type associated with DB snapshot. +* `vpc_id` - ID of the VPC associated with the DB snapshot. +* `snapshot_create_time` - Provides the time when the snapshot was taken, in Universal Coordinated Time (UTC). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/db_subnet_group.html.markdown b/website/docs/cdktf/python/d/db_subnet_group.html.markdown new file mode 100644 index 00000000000..c6e0be97ef0 --- /dev/null +++ b/website/docs/cdktf/python/d/db_subnet_group.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_subnet_group" +description: |- + Get information on an RDS Database Subnet Group. +--- + + + +# Data Source: aws_db_subnet_group + +Use this data source to get information about an RDS subnet group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_db_subnet_group import DataAwsDbSubnetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDbSubnetGroup(self, "database", + name="my-test-database-subnet-group" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the RDS database subnet group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the DB subnet group. +* `description` - Provides the description of the DB subnet group. +* `status` - Provides the status of the DB subnet group. +* `subnet_ids` - Contains a list of subnet identifiers. +* `supported_network_types` - The network type of the DB subnet group. +* `vpc_id` - Provides the VPC ID of the DB subnet group. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/default_tags.markdown b/website/docs/cdktf/python/d/default_tags.markdown new file mode 100644 index 00000000000..74327eb3939 --- /dev/null +++ b/website/docs/cdktf/python/d/default_tags.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_default_tags" +description: |- + Access the default tags configured on the provider. +--- + + + +# Data Source: aws_default_tags + +Use this data source to get the default tags configured on the provider. + +With this data source, you can apply default tags to resources not _directly_ managed by a Terraform resource, such as the instances underneath an Auto Scaling group or the volumes created for an EC2 instance. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_default_tags import DataAwsDefaultTags +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDefaultTags(self, "example") +``` + +### Dynamically Apply Default Tags to Auto Scaling Group + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformIterator, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.data_aws_default_tags import DataAwsDefaultTags +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, maxSize, minSize): + super().__init__(scope, name) + AwsProvider(self, "aws", + default_tags=[AwsProviderDefaultTags( + tags={ + "Environment": "Test", + "Name": "Provider Tag" + } + ) + ] + ) + example = DataAwsDefaultTags(self, "example") + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_dynamic_iterator0 = TerraformIterator.from_list( + Token.as_any(example.tags)) + aws_autoscaling_group_example = AutoscalingGroup(self, "example_2", + tag=example_dynamic_iterator0.dynamic({ + "key": example_dynamic_iterator0.key, + "propagate_at_launch": True, + "value": example_dynamic_iterator0.value + }), + max_size=max_size, + min_size=min_size + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_autoscaling_group_example.override_logical_id("example") +``` + +## Argument Reference + +This data source has no arguments. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `tags` - Blocks of default tags set on the provider. See details below. + +### tags + +* `key` - Key name of the tag (i.e., `tags.#.key`). +* `value` - Value of the tag (i.e., `tags.#.value`). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/directory_service_directory.html.markdown b/website/docs/cdktf/python/d/directory_service_directory.html.markdown new file mode 100644 index 00000000000..aa60f100b4d --- /dev/null +++ b/website/docs/cdktf/python/d/directory_service_directory.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_directory" +description: |- + AWS Directory Service Directory +--- + + + +# Data Source: aws_directory_service_directory + +Get attributes of AWS Directory Service directory (SimpleAD, Managed AD, AD Connector). It's especially useful to refer AWS Managed AD or on-premise AD in AD Connector configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_directory_service_directory import DataAwsDirectoryServiceDirectory +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDirectoryServiceDirectory(self, "example", + directory_id=main.id + ) +``` + +## Argument Reference + +* `directory_id` - (Required) ID of the directory. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `type` - Directory type (`SimpleAD`, `ADConnector` or `MicrosoftAD`). +* `edition` - (for `MicrosoftAD`) Microsoft AD edition (`Standard` or `Enterprise`). +* `name` - Fully qualified name for the directory/connector. +* `password` - Password for the directory administrator or connector user. +* `size` - (for `SimpleAD` and `ADConnector`) Size of the directory/connector (`Small` or `Large`). +* `alias` - Alias for the directory/connector, such as `d-991708b282.awsapps.com`. +* `description` - Textual description for the directory/connector. +* `short_name` - Short name of the directory/connector, such as `CORP`. +* `enable_sso` - Directory/connector single-sign on status. +* `access_url` - Access URL for the directory/connector, such as http://alias.awsapps.com. +* `dns_ip_addresses` - List of IP addresses of the DNS servers for the directory/connector. +* `security_group_id` - ID of the security group created by the directory/connector. +* `tags` – A map of tags assigned to the directory/connector. + + `vpc_settings` (for `SimpleAD` and `MicrosoftAD`) is also exported with the following attributes: + +* `subnet_ids` - Identifiers of the subnets for the directory servers (2 subnets in 2 different AZs). +* `vpc_id` - ID of the VPC that the directory is in. + +`connect_settings` (for `ADConnector`) is also exported with the following attributes: + +* `connect_ips` - IP addresses of the AD Connector servers. +* `customer_username` - Username corresponding to the password provided. +* `customer_dns_ips` - DNS IP addresses of the domain to connect to. +* `subnet_ids` - Identifiers of the subnets for the connector servers (2 subnets in 2 different AZs). +* `vpc_id` - ID of the VPC that the connector is in. + +`radius_settings` is also exported with the following attributes: + +* `authentication_protocol` - The protocol specified for your RADIUS endpoints. +* `display_label` - Display label. +* `radius_port` - Port that your RADIUS server is using for communications. +* `radius_retries` - Maximum number of times that communication with the RADIUS server is attempted. +* `radius_servers` - Set of strings that contains the fully qualified domain name (FQDN) or IP addresses of the RADIUS server endpoints, or the FQDN or IP addresses of your RADIUS server load balancer. +* `radius_timeout` - Amount of time, in seconds, to wait for the RADIUS server to respond. +* `use_same_username` - Not currently used. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dms_certificate.html.markdown b/website/docs/cdktf/python/d/dms_certificate.html.markdown new file mode 100644 index 00000000000..367fb526934 --- /dev/null +++ b/website/docs/cdktf/python/d/dms_certificate.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_certificate" +description: |- + Terraform data source for managing an AWS DMS (Database Migration) Certificate. +--- + + + +# Data Source: aws_dms_certificate + +Terraform data source for managing an AWS DMS (Database Migration) Certificate. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dms_certificate import DataAwsDmsCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDmsCertificate(self, "example", + certificate_id=test.certificate_id + ) +``` + +## Argument Reference + +The following arguments are required: + +* `certificate_id` - (Required) A customer-assigned name for the certificate. Identifiers must begin with a letter and must contain only ASCII letters, digits, and hyphens. They can't end with a hyphen or contain two consecutive hyphens. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `certificate_creation_date` - The date that the certificate was created. +* `certificate_pem` - The contents of a .pem file, which contains an X.509 certificate. +* `certificate_owner` - The owner of the certificate. +* `certificate_arn` - The Amazon Resource Name (ARN) for the certificate. +* `certificate_wallet` - The owner of the certificate. +* `key_length` - The key length of the cryptographic algorithm being used. +* `signing_algorithm` - The algorithm for the certificate. +* `valid_from_date` - The beginning date that the certificate is valid. +* `valid_to_date` - The final date that the certificate is valid. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dms_endpoint.html.markdown b/website/docs/cdktf/python/d/dms_endpoint.html.markdown new file mode 100644 index 00000000000..124185fa161 --- /dev/null +++ b/website/docs/cdktf/python/d/dms_endpoint.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_endpoint" +description: |- + Terraform data source for managing an AWS DMS (Database Migration) Endpoint. +--- + + + +# Data Source: aws_dms_endpoint + +Terraform data source for managing an AWS DMS (Database Migration) Endpoint. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dms_endpoint import DataAwsDmsEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDmsEndpoint(self, "test", + endpoint_id="test_id" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `endpoint_id` - (Required) Database endpoint identifier. Identifiers must contain from 1 to 255 alphanumeric characters or hyphens, begin with a letter, contain only ASCII letters, digits, and hyphens, not end with a hyphen, and not contain two consecutive hyphens. + +## Attribute Reference + +See the [`aws_dms_endpoint` resource](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/dms_endpoint) for details on the returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dms_replication_instance.html.markdown b/website/docs/cdktf/python/d/dms_replication_instance.html.markdown new file mode 100644 index 00000000000..3ef2a1373dd --- /dev/null +++ b/website/docs/cdktf/python/d/dms_replication_instance.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_instance" +description: |- + Terraform data source for managing an AWS DMS (Database Migration) Replication Instance. +--- + + + +# Data Source: aws_dms_replication_instance + +Terraform data source for managing an AWS DMS (Database Migration) Replication Instance. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dms_replication_instance import DataAwsDmsReplicationInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDmsReplicationInstance(self, "test", + replication_instance_id=Token.as_string(aws_dms_replication_instance_test.replication_instance_id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `replication_instance_id` - (Required) The replication instance identifier. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `allocated_storage` - The amount of storage (in gigabytes) to be initially allocated for the replication instance. +* `auto_minor_version_upgrade` - Indicates that minor engine upgrades will be applied automatically to the replication instance during the maintenance window. +* `availability_zone` - The EC2 Availability Zone that the replication instance will be created in. +* `engine_version` - The engine version number of the replication instance. +* `kms_key_arn` - The Amazon Resource Name (ARN) for the KMS key used to encrypt the connection parameters. +* `multi_az` - Specifies if the replication instance is a multi-az deployment. +* `preferred_maintenance_window` - The weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). +* `publicly_accessible` - Specifies the accessibility options for the replication instance. A value of true represents an instance with a public IP address. A value of false represents an instance with a private IP address. +* `replication_instance_arn` - The Amazon Resource Name (ARN) of the replication instance. +* `replication_instance_class` - The compute and memory capacity of the replication instance as specified by the replication instance class. See [AWS DMS User Guide](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_ReplicationInstance.Types.html) for information on instance classes. +* `replication_instance_private_ips` - A list of the private IP addresses of the replication instance. +* `replication_instance_public_ips` - A list of the public IP addresses of the replication instance. +* `replication_subnet_group_id` - A subnet group to associate with the replication instance. +* `vpc_security_group_ids` - A set of VPC security group IDs that are used with the replication instance. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dms_replication_subnet_group.html.markdown b/website/docs/cdktf/python/d/dms_replication_subnet_group.html.markdown new file mode 100644 index 00000000000..27e96c03e79 --- /dev/null +++ b/website/docs/cdktf/python/d/dms_replication_subnet_group.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_subnet_group" +description: |- + Terraform data source for managing an AWS DMS (Database Migration) Replication Subnet Group. +--- + + + +# Data Source: aws_dms_replication_subnet_group + +Terraform data source for managing an AWS DMS (Database Migration) Replication Subnet Group. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dms_replication_subnet_group import DataAwsDmsReplicationSubnetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDmsReplicationSubnetGroup(self, "test", + replication_subnet_group_id=Token.as_string(aws_dms_replication_subnet_group_test.replication_subnet_group_id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `replication_subnet_group_id` - (Required) Name for the replication subnet group. This value is stored as a lowercase string. It must contain no more than 255 alphanumeric characters, periods, spaces, underscores, or hyphens and cannot be `default`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `replication_subnet_group_description` - Description for the subnet group. +* `subnet_ids` - List of at least 2 EC2 subnet IDs for the subnet group. The subnets must cover at least 2 availability zones. +* `vpc_id` - The ID of the VPC the subnet group is in. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dms_replication_task.html.markdown b/website/docs/cdktf/python/d/dms_replication_task.html.markdown new file mode 100644 index 00000000000..3ca9ceec302 --- /dev/null +++ b/website/docs/cdktf/python/d/dms_replication_task.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_task" +description: |- + Terraform data source for managing an AWS DMS (Database Migration) Replication Task. +--- + + + +# Data Source: aws_dms_replication_task + +Terraform data source for managing an AWS DMS (Database Migration) Replication Task. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dms_replication_task import DataAwsDmsReplicationTask +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDmsReplicationTask(self, "test", + replication_task_id=Token.as_string(aws_dms_replication_task_test.replication_task_id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `replication_task_id` - (Required) The replication task identifier. + + - Must contain from 1 to 255 alphanumeric characters or hyphens. + - First character must be a letter. + - Cannot end with a hyphen. + - Cannot contain two consecutive hyphens. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `cdc_start_position` - (Conflicts with `cdc_start_time`) Indicates when you want a change data capture (CDC) operation to start. The value can be in date, checkpoint, or LSN/SCN format depending on the source engine. For more information, see [Determining a CDC native start point](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Task.CDC.html#CHAP_Task.CDC.StartPoint.Native). +* `cdc_start_time` - (Conflicts with `cdc_start_position`) The Unix timestamp integer for the start of the Change Data Capture (CDC) operation. +* `migration_type` - The migration type. Can be one of `full-load | cdc | full-load-and-cdc`. +* `replication_instance_arn` - The Amazon Resource Name (ARN) of the replication instance. +* `replication_task_settings` - An escaped JSON string that contains the task settings. For a complete list of task settings, see [Task Settings for AWS Database Migration Service Tasks](http://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.CustomizingTasks.TaskSettings.html). +* `source_endpoint_arn` - The Amazon Resource Name (ARN) string that uniquely identifies the source endpoint. +* `start_replication_task` - Whether to run or stop the replication task. +* `status` - Replication Task status. +* `table_mappings` - An escaped JSON string that contains the table mappings. For information on table mapping see [Using Table Mapping with an AWS Database Migration Service Task to Select and Filter Data](http://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.CustomizingTasks.TableMapping.html) +* `target_endpoint_arn` - The Amazon Resource Name (ARN) string that uniquely identifies the target endpoint. +* `replication_task_arn` - The Amazon Resource Name (ARN) for the replication task. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/docdb_engine_version.markdown b/website/docs/cdktf/python/d/docdb_engine_version.markdown new file mode 100644 index 00000000000..9da3cda6ae3 --- /dev/null +++ b/website/docs/cdktf/python/d/docdb_engine_version.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_engine_version" +description: |- + Information about a DocumentDB engine version. +--- + + + +# Data Source: aws_docdb_engine_version + +Information about a DocumentDB engine version. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_docdb_engine_version import DataAwsDocdbEngineVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDocdbEngineVersion(self, "test", + version="3.6.0" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engine` - (Optional) DB engine. (Default: `docdb`) +* `parameter_group_family` - (Optional) Name of a specific DB parameter group family. An example parameter group family is `docdb3.6`. +* `preferred_versions` - (Optional) Ordered list of preferred engine versions. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. If both the `version` and `preferred_versions` arguments are not configured, the data source will return the default version for the engine. +* `version` - (Optional) Version of the DB engine. For example, `3.6.0`. If `version` and `preferred_versions` are not set, the data source will provide information for the AWS-defined default version. If both the `version` and `preferred_versions` arguments are not configured, the data source will return the default version for the engine. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `engine_description` - Description of the database engine. +* `exportable_log_types` - Set of log types that the database engine has available for export to CloudWatch Logs. +* `supports_log_exports_to_cloudwatch` - Indicates whether the engine version supports exporting the log types specified by `exportable_log_types` to CloudWatch Logs. +* `valid_upgrade_targets` - A set of engine versions that this database engine version can be upgraded to. +* `version_description` - Description of the database engine version. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/docdb_orderable_db_instance.markdown b/website/docs/cdktf/python/d/docdb_orderable_db_instance.markdown new file mode 100644 index 00000000000..9d40ccac9db --- /dev/null +++ b/website/docs/cdktf/python/d/docdb_orderable_db_instance.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_orderable_db_instance" +description: |- + Information about DocumentDB orderable DB instances. +--- + + + +# Data Source: aws_docdb_orderable_db_instance + +Information about DocumentDB orderable DB instances. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_docdb_orderable_db_instance import DataAwsDocdbOrderableDbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDocdbOrderableDbInstance(self, "test", + engine="docdb", + engine_version="3.6.0", + license_model="na", + preferred_instance_classes=["db.r5.large", "db.r4.large", "db.t3.medium"] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engine` - (Optional) DB engine. Default: `docdb` +* `engine_version` - (Optional) Version of the DB engine. +* `instance_class` - (Optional) DB instance class. Examples of classes are `db.r5.12xlarge`, `db.r5.24xlarge`, `db.r5.2xlarge`, `db.r5.4xlarge`, `db.r5.large`, `db.r5.xlarge`, and `db.t3.medium`. (Conflicts with `preferred_instance_classes`.) +* `license_model` - (Optional) License model. Default: `na` +* `preferred_instance_classes` - (Optional) Ordered list of preferred DocumentDB DB instance classes. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. (Conflicts with `instance_class`.) +* `vpc` - (Optional) Enable to show only VPC. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `availability_zones` - Availability zones where the instance is available. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dx_connection.html.markdown b/website/docs/cdktf/python/d/dx_connection.html.markdown new file mode 100644 index 00000000000..f4804362369 --- /dev/null +++ b/website/docs/cdktf/python/d/dx_connection.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_connection" +description: |- + Retrieve information about a Direct Connect Connection. +--- + + + +# Data Source: aws_dx_connection + +Retrieve information about a Direct Connect Connection. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dx_connection import DataAwsDxConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDxConnection(self, "example", + name="tf-dx-connection" + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the connection to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the connection. +* `aws_device` - Direct Connect endpoint on which the physical connection terminates. +* `bandwidth` - Bandwidth of the connection. +* `id` - ID of the connection. +* `location` - AWS Direct Connect location where the connection is located. +* `owner_account_id` - ID of the AWS account that owns the connection. +* `partner_name` - The name of the AWS Direct Connect service provider associated with the connection. +* `provider_name` - Name of the service provider associated with the connection. +* `tags` - Map of tags for the resource. +* `vlan_id` - The VLAN ID. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dx_gateway.html.markdown b/website/docs/cdktf/python/d/dx_gateway.html.markdown new file mode 100644 index 00000000000..3507e89ff0a --- /dev/null +++ b/website/docs/cdktf/python/d/dx_gateway.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_gateway" +description: |- + Retrieve information about a Direct Connect Gateway +--- + + + +# Data Source: aws_dx_gateway + +Retrieve information about a Direct Connect Gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dx_gateway import DataAwsDxGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDxGateway(self, "example", + name="example" + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the gateway to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `amazon_side_asn` - ASN on the Amazon side of the connection. +* `id` - ID of the gateway. +* `owner_account_id` - AWS Account ID of the gateway. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dx_location.html.markdown b/website/docs/cdktf/python/d/dx_location.html.markdown new file mode 100644 index 00000000000..214059b5aa9 --- /dev/null +++ b/website/docs/cdktf/python/d/dx_location.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_location" +description: |- + Retrieve information about a specific AWS Direct Connect location in the current AWS Region. +--- + + + +# Data Source: aws_dx_location + +Retrieve information about a specific AWS Direct Connect location in the current AWS Region. +These are the locations that can be specified when configuring [`aws_dx_connection`](/docs/providers/aws/r/dx_connection.html) or [`aws_dx_lag`](/docs/providers/aws/r/dx_lag.html) resources. + +~> **Note:** This data source is different from the [`aws_dx_locations`](/docs/providers/aws/d/dx_locations.html) data source which retrieves information about all the AWS Direct Connect locations in the current AWS Region. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dx_location import DataAwsDxLocation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDxLocation(self, "example", + location_code="CS32A-24FL" + ) +``` + +## Argument Reference + +* `location_code` - (Required) Code for the location to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `available_macsec_port_speeds` - The available MAC Security (MACsec) port speeds for the location. +* `available_port_speeds` - The available port speeds for the location. +* `available_providers` - Names of the service providers for the location. +* `location_name` - Name of the location. This includes the name of the colocation partner and the physical site of the building. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dx_locations.html.markdown b/website/docs/cdktf/python/d/dx_locations.html.markdown new file mode 100644 index 00000000000..d5f87a7c4e7 --- /dev/null +++ b/website/docs/cdktf/python/d/dx_locations.html.markdown @@ -0,0 +1,45 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_locations" +description: |- + Retrieve information about the AWS Direct Connect locations in the current AWS Region. +--- + + + +# Data Source: aws_dx_locations + +Retrieve information about the AWS Direct Connect locations in the current AWS Region. +These are the locations that can be specified when configuring [`aws_dx_connection`](/docs/providers/aws/r/dx_connection.html) or [`aws_dx_lag`](/docs/providers/aws/r/dx_lag.html) resources. + +~> **Note:** This data source is different from the [`aws_dx_location`](/docs/providers/aws/d/dx_location.html) data source which retrieves information about a specific AWS Direct Connect location in the current AWS Region. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dx_locations import DataAwsDxLocations +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDxLocations(self, "available") +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `location_codes` - Code for the locations. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dx_router_configuration.html.markdown b/website/docs/cdktf/python/d/dx_router_configuration.html.markdown new file mode 100644 index 00000000000..11084d7ab84 --- /dev/null +++ b/website/docs/cdktf/python/d/dx_router_configuration.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_router_configuration" +description: |- + Terraform data source for managing an AWS Direct Connect Router Configuration. +--- + + + +# Data Source: aws_dx_router_configuration + +Terraform data source for retrieving Router Configuration instructions for a given AWS Direct Connect Virtual Interface and Router Type. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dx_router_configuration import DataAwsDxRouterConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDxRouterConfiguration(self, "example", + router_type_identifier="CiscoSystemsInc-2900SeriesRouters-IOS124", + virtual_interface_id="dxvif-abcde123" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `virtual_interface_id` - (Required) ID of the Direct Connect Virtual Interface +* `router_type_identifier` - (Required) ID of the Router Type. For example: `CiscoSystemsInc-2900SeriesRouters-IOS124` + +There is currently no AWS API to retrieve the full list of `router_type_identifier` values. Here is a list of known `RouterType` objects that can be used: + +```json +{ + "routerTypes": [ + {"platform":"2900 Series Routers","routerTypeIdentifier":"CiscoSystemsInc-2900SeriesRouters-IOS124","software":"IOS 12.4+","vendor":"Cisco Systems, Inc.","xsltTemplateName":"customer-router-cisco-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"3700 Series Routers","routerTypeIdentifier":"CiscoSystemsInc-3700SeriesRouters-IOS124","software":"IOS 12.4+","vendor":"Cisco Systems, Inc.","xsltTemplateName":"customer-router-cisco-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"7200 Series Routers","routerTypeIdentifier":"CiscoSystemsInc-7200SeriesRouters-IOS124","software":"IOS 12.4+","vendor":"Cisco Systems, Inc.","xsltTemplateName":"customer-router-cisco-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"Nexus 7000 Series Switches","routerTypeIdentifier":"CiscoSystemsInc-Nexus7000SeriesSwitches-NXOS51","software":"NX-OS 5.1+","vendor":"Cisco Systems, Inc.","xsltTemplateName":"customer-switch-cisco-nexus-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"Nexus 9K+ Series Switches","routerTypeIdentifier":"CiscoSystemsInc-Nexus9KSeriesSwitches-NXOS93","software":"NX-OS 9.3+","vendor":"Cisco Systems, Inc.","xsltTemplateName":"customer-switch-cisco-nexus-generic.xslt","xsltTemplateNameForMacSec":"customer-switch-cisco-nexus-generic-macsec.xslt"}, + {"platform":"M/MX Series Routers","routerTypeIdentifier":"JuniperNetworksInc-MMXSeriesRouters-JunOS95","software":"JunOS 9.5+","vendor":"Juniper Networks, Inc.","xsltTemplateName":"customer-router-juniper-generic.xslt","xsltTemplateNameForMacSec":"customer-router-juniper-generic-macsec.xslt"}, + {"platform":"SRX Series Routers","routerTypeIdentifier":"JuniperNetworksInc-SRXSeriesRouters-JunOS95","software":"JunOS 9.5+","vendor":"Juniper Networks, Inc.","xsltTemplateName":"customer-router-juniper-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"T Series Routers","routerTypeIdentifier":"JuniperNetworksInc-TSeriesRouters-JunOS95","software":"JunOS 9.5+","vendor":"Juniper Networks, Inc.","xsltTemplateName":"customer-router-juniper-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"PA-3000+ and 5000+ series","routerTypeIdentifier":"PaloAltoNetworks-PA3000and5000series-PANOS803","software":"PAN-OS 8.0.3+","vendor":"Palo Alto Networks","xsltTemplateName":"customer-router-palo-alto-generic.xslt","xsltTemplateNameForMacSec":""}] +} +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `customer_router_config` - Instructions for configuring your router +* `router` - Block of the router type details + +A `router` block supports the following attributes: + +* `platform` - Router platform +* `router_type_identifier` - Router type identifier +* `software` - Router operating system +* `vendor` - Router vendor +* `xslt_template_name` - Router XSLT Template Name +* `xslt_template_name_for_mac` - Router XSLT Template Name for MacSec + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dynamodb_table.html.markdown b/website/docs/cdktf/python/d/dynamodb_table.html.markdown new file mode 100644 index 00000000000..6b40147df41 --- /dev/null +++ b/website/docs/cdktf/python/d/dynamodb_table.html.markdown @@ -0,0 +1,45 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_table" +description: |- + Provides a DynamoDB table data source. +--- + + + +# Data Source: aws_dynamodb_table + +Provides information about a DynamoDB table. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dynamodb_table import DataAwsDynamodbTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDynamodbTable(self, "tableName", + name="tableName" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the DynamoDB table. + +## Attribute Reference + +See the [DynamoDB Table Resource](/docs/providers/aws/r/dynamodb_table.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/dynamodb_table_item.html.markdown b/website/docs/cdktf/python/d/dynamodb_table_item.html.markdown new file mode 100644 index 00000000000..5efb1efadcf --- /dev/null +++ b/website/docs/cdktf/python/d/dynamodb_table_item.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_table_item" +description: |- + Terraform data source for retrieving a value from an AWS DynamoDB table. +--- + + + +# Data Source: aws_dynamodb_table_item + +Terraform data source for retrieving a value from an AWS DynamoDB table. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dynamodb_table_item import DataAwsDynamodbTableItem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsDynamodbTableItem(self, "test", + depends_on=[example], + expression_attribute_names={ + "#P": "Percentile" + }, + key="{\n\t\"hashKey\": {\"S\": \"example\"}\n}\n\n", + projection_expression="#P", + table_name=Token.as_string(aws_dynamodb_table_example.name) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `table_name` - (Required) The name of the table containing the requested item. +* `key` - (Required) A map of attribute names to AttributeValue objects, representing the primary key of the item to retrieve. + For the primary key, you must provide all of the attributes. For example, with a simple primary key, you only need to provide a value for the partition key. For a composite primary key, you must provide values for both the partition key and the sort key. + +The following arguments are optional: + +* `expression_attribute_name` - (Optional) - One or more substitution tokens for attribute names in an expression. Use the `#` character in an expression to dereference an attribute name. +* `projection_expression` - (Optional) A string that identifies one or more attributes to retrieve from the table. These attributes can include scalars, sets, or elements of a JSON document. The attributes in the expression must be separated by commas. +If no attribute names are specified, then all attributes are returned. If any of the requested attributes are not found, they do not appear in the result. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `item` - JSON representation of a map of attribute names to [AttributeValue](https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_AttributeValue.html) objects, as specified by ProjectionExpression. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ebs_default_kms_key.html.markdown b/website/docs/cdktf/python/d/ebs_default_kms_key.html.markdown new file mode 100644 index 00000000000..2423b9b7a9f --- /dev/null +++ b/website/docs/cdktf/python/d/ebs_default_kms_key.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_default_kms_key" +description: |- + Provides metadata about the KMS key set for EBS default encryption +--- + + + +# Data Source: aws_ebs_default_kms_key + +Use this data source to get the default EBS encryption KMS key in the current region. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ebs_default_kms_key import DataAwsEbsDefaultKmsKey +from imports.aws.ebs_volume import EbsVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsEbsDefaultKmsKey(self, "current") + EbsVolume(self, "example", + availability_zone="us-west-2a", + encrypted=True, + kms_key_id=Token.as_string(current.key_arn) + ) +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `key_arn` - ARN of the default KMS key uses to encrypt an EBS volume in this region when no key is specified in an API call that creates the volume and encryption by default is enabled. +* `id` - Region of the default KMS Key. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ebs_encryption_by_default.html.markdown b/website/docs/cdktf/python/d/ebs_encryption_by_default.html.markdown new file mode 100644 index 00000000000..126348e566a --- /dev/null +++ b/website/docs/cdktf/python/d/ebs_encryption_by_default.html.markdown @@ -0,0 +1,45 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_encryption_by_default" +description: |- + Checks whether default EBS encryption is enabled for your AWS account in the current AWS region. +--- + + + +# Data Source: aws_ebs_encryption_by_default + +Provides a way to check whether default EBS encryption is enabled for your AWS account in the current AWS region. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ebs_encryption_by_default import DataAwsEbsEncryptionByDefault +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEbsEncryptionByDefault(self, "current") +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `enabled` - Whether or not default EBS encryption is enabled. Returns as `true` or `false`. +* `id` - Region of default EBS encryption. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ebs_snapshot.html.markdown b/website/docs/cdktf/python/d/ebs_snapshot.html.markdown new file mode 100644 index 00000000000..d8736c4e92d --- /dev/null +++ b/website/docs/cdktf/python/d/ebs_snapshot.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_snapshot" +description: |- + Get information on an EBS Snapshot. +--- + + + +# Data Source: aws_ebs_snapshot + +Use this data source to get information about an EBS Snapshot for use when provisioning EBS Volumes + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ebs_snapshot import DataAwsEbsSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEbsSnapshot(self, "ebs_volume", + filter=[DataAwsEbsSnapshotFilter( + name="volume-size", + values=["40"] + ), DataAwsEbsSnapshotFilter( + name="tag:Name", + values=["Example"] + ) + ], + most_recent=True, + owners=["self"] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `most_recent` - (Optional) If more than one result is returned, use the most recent snapshot. + +* `owners` - (Optional) Returns the snapshots owned by the specified owner id. Multiple owners can be specified. + +* `snapshot_ids` - (Optional) Returns information on a specific snapshot_id. + +* `restorable_by_user_ids` - (Optional) One or more AWS accounts IDs that can create volumes from the snapshot. + +* `filter` - (Optional) One or more name/value pairs to filter off of. There are +several valid keys, for a full reference, check out +[describe-snapshots in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the EBS Snapshot. +* `id` - Snapshot ID (e.g., snap-59fcb34e). +* `snapshot_id` - Snapshot ID (e.g., snap-59fcb34e). +* `description` - Description for the snapshot +* `owner_id` - AWS account ID of the EBS snapshot owner. +* `owner_alias` - Value from an Amazon-maintained list (`amazon`, `aws-marketplace`, `microsoft`) of snapshot owners. +* `volume_id` - Volume ID (e.g., vol-59fcb34e). +* `encrypted` - Whether the snapshot is encrypted. +* `volume_size` - Size of the drive in GiBs. +* `kms_key_id` - ARN for the KMS encryption key. +* `data_encryption_key_id` - The data encryption key identifier for the snapshot. +* `state` - Snapshot state. +* `storage_tier` - Storage tier in which the snapshot is stored. +* `outpost_arn` - ARN of the Outpost on which the snapshot is stored. +* `tags` - Map of tags for the resource. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-snapshots.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ebs_snapshot_ids.html.markdown b/website/docs/cdktf/python/d/ebs_snapshot_ids.html.markdown new file mode 100644 index 00000000000..324d41da90f --- /dev/null +++ b/website/docs/cdktf/python/d/ebs_snapshot_ids.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_snapshot_ids" +description: |- + Provides a list of EBS snapshot IDs. +--- + + + +# Data Source: aws_ebs_snapshot_ids + +Use this data source to get a list of EBS Snapshot IDs matching the specified +criteria. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ebs_snapshot_ids import DataAwsEbsSnapshotIds +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEbsSnapshotIds(self, "ebs_volumes", + filter=[DataAwsEbsSnapshotIdsFilter( + name="volume-size", + values=["40"] + ), DataAwsEbsSnapshotIdsFilter( + name="tag:Name", + values=["Example"] + ) + ], + owners=["self"] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `owners` - (Optional) Returns the snapshots owned by the specified owner id. Multiple owners can be specified. + +* `restorable_by_user_ids` - (Optional) One or more AWS accounts IDs that can create volumes from the snapshot. + +* `filter` - (Optional) One or more name/value pairs to filter off of. There are +several valid keys, for a full reference, check out +[describe-volumes in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - Set of EBS snapshot IDs, sorted by creation time in descending order. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-snapshots.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ebs_volume.html.markdown b/website/docs/cdktf/python/d/ebs_volume.html.markdown new file mode 100644 index 00000000000..c528998bec1 --- /dev/null +++ b/website/docs/cdktf/python/d/ebs_volume.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_volume" +description: |- + Get information on an EBS volume. +--- + + + +# Data Source: aws_ebs_volume + +Use this data source to get information about an EBS volume for use in other +resources. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ebs_volume import DataAwsEbsVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEbsVolume(self, "ebs_volume", + filter=[DataAwsEbsVolumeFilter( + name="volume-type", + values=["gp2"] + ), DataAwsEbsVolumeFilter( + name="tag:Name", + values=["Example"] + ) + ], + most_recent=True + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `most_recent` - (Optional) If more than one result is returned, use the most +recent Volume. +* `filter` - (Optional) One or more name/value pairs to filter off of. There are +several valid keys, for a full reference, check out +[describe-volumes in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Volume ID (e.g., vol-59fcb34e). +* `volume_id` - Volume ID (e.g., vol-59fcb34e). +* `arn` - Volume ARN (e.g., arn:aws:ec2:us-east-1:0123456789012:volume/vol-59fcb34e). +* `availability_zone` - AZ where the EBS volume exists. +* `encrypted` - Whether the disk is encrypted. +* `iops` - Amount of IOPS for the disk. +* `multi_attach_enabled` - (Optional) Specifies whether Amazon EBS Multi-Attach is enabled. +* `size` - Size of the drive in GiBs. +* `snapshot_id` - Snapshot_id the EBS volume is based off. +* `outpost_arn` - ARN of the Outpost. +* `volume_type` - Type of EBS volume. +* `kms_key_id` - ARN for the KMS encryption key. +* `tags` - Map of tags for the resource. +* `throughput` - Throughput that the volume supports, in MiB/s. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-volumes.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ebs_volumes.html.markdown b/website/docs/cdktf/python/d/ebs_volumes.html.markdown new file mode 100644 index 00000000000..d14be4c8423 --- /dev/null +++ b/website/docs/cdktf/python/d/ebs_volumes.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_volumes" +description: |- + Provides identifying information for EBS volumes matching given criteria +--- + + + +# Data Source: aws_ebs_volumes + +`aws_ebs_volumes` provides identifying information for EBS volumes matching given criteria. + +This data source can be useful for getting a list of volume IDs with (for example) matching tags. + +## Example Usage + +The following demonstrates obtaining a map of availability zone to EBS volume ID for volumes with a given tag value. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformIterator, TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ebs_volume import DataAwsEbsVolume +from imports.aws.data_aws_ebs_volumes import DataAwsEbsVolumes +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsEbsVolumes(self, "example", + tags={ + "VolumeSet": "TestVolumeSet" + } + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_for_each_iterator = TerraformIterator.from_list( + Token.as_any(example.ids)) + data_aws_ebs_volume_example = DataAwsEbsVolume(self, "example_1", + filter=[DataAwsEbsVolumeFilter( + name="volume-id", + values=[Token.as_string(example_for_each_iterator.value)] + ) + ], + for_each=example_for_each_iterator + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_ebs_volume_example.override_logical_id("example") + TerraformOutput(self, "availability_zone_to_volume_id", + value="${{ for s in ${" + data_aws_ebs_volume_example.fqn + "} : s.id => s.availability_zone}}" + ) +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired volumes. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVolumes.html). + For example, if matching against the `size` filter, use: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ebs_volumes import DataAwsEbsVolumes +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEbsVolumes(self, "ten_or_twenty_gb_volumes", + filter=[DataAwsEbsVolumesFilter( + name="size", + values=["10", "20"] + ) + ] + ) +``` + +* `values` - (Required) Set of values that are accepted for the given field. + EBS Volume IDs will be selected if any one of the given values match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - Set of all the EBS Volume IDs found. This data source will fail if + no volumes match the provided criteria. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_client_vpn_endpoint.html.markdown b/website/docs/cdktf/python/d/ec2_client_vpn_endpoint.html.markdown index f9b4bd2ffb6..43ccdfa9ac3 100644 --- a/website/docs/cdktf/python/d/ec2_client_vpn_endpoint.html.markdown +++ b/website/docs/cdktf/python/d/ec2_client_vpn_endpoint.html.markdown @@ -58,7 +58,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `client_vpn_endpoint_id` - (Optional) ID of the Client VPN endpoint. * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. @@ -75,7 +75,7 @@ The following arguments are required: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - The ARN of the Client VPN endpoint. * `authentication_options` - Information about the authentication method used by the Client VPN endpoint. @@ -101,4 +101,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_coip_pool.html.markdown b/website/docs/cdktf/python/d/ec2_coip_pool.html.markdown index d80a5b69792..9c797a52803 100644 --- a/website/docs/cdktf/python/d/ec2_coip_pool.html.markdown +++ b/website/docs/cdktf/python/d/ec2_coip_pool.html.markdown @@ -62,14 +62,14 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A COIP Pool will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` blocks are also exported as result attributes. This data source will complete the data by populating any fields that are not included in the configuration with the data for the selected COIP Pool. -In addition, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the COIP pool * `pool_cidrs` - Set of CIDR blocks in pool @@ -80,4 +80,4 @@ In addition, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_coip_pools.html.markdown b/website/docs/cdktf/python/d/ec2_coip_pools.html.markdown index a648ee72362..7f2074f03cb 100644 --- a/website/docs/cdktf/python/d/ec2_coip_pools.html.markdown +++ b/website/docs/cdktf/python/d/ec2_coip_pools.html.markdown @@ -52,7 +52,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A COIP Pool will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `pool_ids` - Set of COIP Pool Identifiers @@ -63,4 +65,4 @@ which take the following arguments: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_host.html.markdown b/website/docs/cdktf/python/d/ec2_host.html.markdown index 8f7d844e501..26d183170e6 100644 --- a/website/docs/cdktf/python/d/ec2_host.html.markdown +++ b/website/docs/cdktf/python/d/ec2_host.html.markdown @@ -78,12 +78,13 @@ The following arguments are required: * `name` - (Required) Name of the field to filter by, as defined by [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeHosts.html). * `values` - (Required) Set of values that are accepted for the given field. A host will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference -In addition to the attributes above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - ID of the Dedicated Host. * `arn` - ARN of the Dedicated Host. +* `asset_id` - The ID of the Outpost hardware asset on which the Dedicated Host is allocated. * `auto_placement` - Whether auto-placement is on or off. * `availability_zone` - Availability Zone of the Dedicated Host. * `cores` - Number of cores on the Dedicated Host. @@ -101,4 +102,4 @@ In addition to the attributes above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_instance_type.html.markdown b/website/docs/cdktf/python/d/ec2_instance_type.html.markdown index 940938462f8..c5c840e2fca 100644 --- a/website/docs/cdktf/python/d/ec2_instance_type.html.markdown +++ b/website/docs/cdktf/python/d/ec2_instance_type.html.markdown @@ -34,13 +34,13 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following argument is supported: +This data source supports the following arguments: * `instance_type` - (Required) Instance ## Attribute Reference -In addition to the argument above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: ~> **NOTE:** Not all attributes are set for every instance type. @@ -110,4 +110,4 @@ In addition to the argument above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_instance_type_offering.html.markdown b/website/docs/cdktf/python/d/ec2_instance_type_offering.html.markdown index 74cc3d3398a..3a5a7d681bc 100644 --- a/website/docs/cdktf/python/d/ec2_instance_type_offering.html.markdown +++ b/website/docs/cdktf/python/d/ec2_instance_type_offering.html.markdown @@ -38,7 +38,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. See the [EC2 API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstanceTypeOfferings.html) for supported filters. Detailed below. * `location_type` - (Optional) Location type. Defaults to `region`. Valid values: `availability-zone`, `availability-zone-id`, and `region`. @@ -51,7 +51,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - EC2 Instance Type. * `instance_type` - EC2 Instance Type. @@ -62,4 +62,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_instance_type_offerings.html.markdown b/website/docs/cdktf/python/d/ec2_instance_type_offerings.html.markdown index e79129aba60..53446ba4cb1 100644 --- a/website/docs/cdktf/python/d/ec2_instance_type_offerings.html.markdown +++ b/website/docs/cdktf/python/d/ec2_instance_type_offerings.html.markdown @@ -41,7 +41,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. See the [EC2 API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstanceTypeOfferings.html) for supported filters. Detailed below. * `location_type` - (Optional) Location type. Defaults to `region`. Valid values: `availability-zone`, `availability-zone-id`, and `region`. @@ -53,7 +53,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `instance_types` - List of EC2 Instance Types. @@ -68,4 +68,4 @@ Note that the indexes of Instance Type Offering instance types, locations and lo - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_instance_types.html.markdown b/website/docs/cdktf/python/d/ec2_instance_types.html.markdown index 559505c5d33..78034ce78a6 100644 --- a/website/docs/cdktf/python/d/ec2_instance_types.html.markdown +++ b/website/docs/cdktf/python/d/ec2_instance_types.html.markdown @@ -46,7 +46,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. See the [EC2 API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstanceTypes.html) for supported filters. Detailed below. @@ -57,7 +57,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `instance_types` - List of EC2 Instance Types. @@ -68,4 +68,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_local_gateway.html.markdown b/website/docs/cdktf/python/d/ec2_local_gateway.html.markdown index 2b1e3b556fa..697451f3402 100644 --- a/website/docs/cdktf/python/d/ec2_local_gateway.html.markdown +++ b/website/docs/cdktf/python/d/ec2_local_gateway.html.markdown @@ -61,7 +61,7 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Local Gateway will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` blocks are also exported as result attributes. This data source will complete the data by populating @@ -80,4 +80,4 @@ The following attributes are additionally exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_local_gateway_route_tables.html.markdown b/website/docs/cdktf/python/d/ec2_local_gateway_route_tables.html.markdown index 15dd02d3608..1e483cf711d 100644 --- a/website/docs/cdktf/python/d/ec2_local_gateway_route_tables.html.markdown +++ b/website/docs/cdktf/python/d/ec2_local_gateway_route_tables.html.markdown @@ -52,7 +52,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Local Gateway Route Table will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of Local Gateway Route Table identifiers @@ -63,4 +65,4 @@ which take the following arguments: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface.html.markdown b/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface.html.markdown index 1176be49e82..d781282854e 100644 --- a/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface.html.markdown +++ b/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface.html.markdown @@ -55,7 +55,7 @@ The `filter` configuration block supports the following arguments: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `local_address` - Local address. * `local_bgp_asn` - Border Gateway Protocol (BGP) Autonomous System Number (ASN) of the EC2 Local Gateway. @@ -70,4 +70,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface_group.html.markdown b/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface_group.html.markdown index 6a4d0eac810..509a434aa38 100644 --- a/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface_group.html.markdown +++ b/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface_group.html.markdown @@ -49,7 +49,7 @@ The `filter` configuration block supports the following arguments: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `local_gateway_virtual_interface_ids` - Set of EC2 Local Gateway Virtual Interface identifiers. @@ -59,4 +59,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface_groups.html.markdown b/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface_groups.html.markdown index 3cda2dc3f96..c9e314e4bb5 100644 --- a/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface_groups.html.markdown +++ b/website/docs/cdktf/python/d/ec2_local_gateway_virtual_interface_groups.html.markdown @@ -45,7 +45,7 @@ The `filter` configuration block supports the following arguments: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of EC2 Local Gateway Virtual Interface Group identifiers. @@ -57,4 +57,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_local_gateways.html.markdown b/website/docs/cdktf/python/d/ec2_local_gateways.html.markdown index 468e6269394..5f0e38f2e1a 100644 --- a/website/docs/cdktf/python/d/ec2_local_gateways.html.markdown +++ b/website/docs/cdktf/python/d/ec2_local_gateways.html.markdown @@ -56,7 +56,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Local Gateway will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of all the Local Gateway identifiers @@ -67,4 +69,4 @@ which take the following arguments: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_managed_prefix_list.html.markdown b/website/docs/cdktf/python/d/ec2_managed_prefix_list.html.markdown index a3002ba8517..7134c8134c7 100644 --- a/website/docs/cdktf/python/d/ec2_managed_prefix_list.html.markdown +++ b/website/docs/cdktf/python/d/ec2_managed_prefix_list.html.markdown @@ -71,14 +71,14 @@ whose data will be exported as attributes. ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the EC2 [DescribeManagedPrefixLists](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeManagedPrefixLists.html) API Reference. * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - ID of the selected prefix list. * `arn` - ARN of the selected prefix list. @@ -95,4 +95,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_managed_prefix_lists.html.markdown b/website/docs/cdktf/python/d/ec2_managed_prefix_lists.html.markdown index 0a89e73523b..88f1134260d 100644 --- a/website/docs/cdktf/python/d/ec2_managed_prefix_lists.html.markdown +++ b/website/docs/cdktf/python/d/ec2_managed_prefix_lists.html.markdown @@ -63,7 +63,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A managed prefix list will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - List of all the managed prefix list ids found. @@ -74,4 +76,4 @@ which take the following arguments: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_network_insights_analysis.html.markdown b/website/docs/cdktf/python/d/ec2_network_insights_analysis.html.markdown index 18bf2d49ab3..245373059ca 100644 --- a/website/docs/cdktf/python/d/ec2_network_insights_analysis.html.markdown +++ b/website/docs/cdktf/python/d/ec2_network_insights_analysis.html.markdown @@ -42,14 +42,14 @@ whose data will be exported as attributes. ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the EC2 [`DescribeNetworkInsightsAnalyses`](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNetworkInsightsAnalyses.html) API Reference. * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `alternate_path_hints` - Potential intermediate components of a feasible path. * `arn` - ARN of the selected Network Insights Analysis. @@ -64,4 +64,4 @@ In addition to all arguments above, the following attributes are exported: * `status_message` - Message to provide more context when the `status` is `failed`. * `warning_message` - Warning message. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_network_insights_path.html.markdown b/website/docs/cdktf/python/d/ec2_network_insights_path.html.markdown index 3841f5937b5..591e48dfbc7 100644 --- a/website/docs/cdktf/python/d/ec2_network_insights_path.html.markdown +++ b/website/docs/cdktf/python/d/ec2_network_insights_path.html.markdown @@ -42,14 +42,14 @@ whose data will be exported as attributes. ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the EC2 [`DescribeNetworkInsightsPaths`](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNetworkInsightsPaths.html) API Reference. * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the selected Network Insights Path. * `destination` - AWS resource that is the destination of the path. @@ -60,4 +60,4 @@ In addition to all arguments above, the following attributes are exported: * `source_ip` - IP address of the AWS resource that is the source of the path. * `tags` - Map of tags assigned to the resource. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_public_ipv4_pool.html.markdown b/website/docs/cdktf/python/d/ec2_public_ipv4_pool.html.markdown index 281a7d8b275..322a6cea0aa 100644 --- a/website/docs/cdktf/python/d/ec2_public_ipv4_pool.html.markdown +++ b/website/docs/cdktf/python/d/ec2_public_ipv4_pool.html.markdown @@ -39,9 +39,9 @@ The following arguments are required: * `pool_id` - (Required) AWS resource IDs of a public IPv4 pool (as a string) for which this data source will fetch detailed information. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `description` - Description of the pool, if any. * `network_border_group` - Name of the location from which the address pool is advertised. @@ -54,4 +54,4 @@ In addition to all arguments above, the following attributes are exported: * `total_address_count` - Total number of addresses in the pool. * `total_available_address_count` - Total number of available addresses in the pool. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_public_ipv4_pools.html.markdown b/website/docs/cdktf/python/d/ec2_public_ipv4_pools.html.markdown index cd1d80c1e3e..6f3ce31fa1c 100644 --- a/website/docs/cdktf/python/d/ec2_public_ipv4_pools.html.markdown +++ b/website/docs/cdktf/python/d/ec2_public_ipv4_pools.html.markdown @@ -67,8 +67,10 @@ which take the following arguments: * `name` - (Required) Name of the field to filter by, as defined by [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribePublicIpv4Pools.html). * `values` - (Required) Set of values that are accepted for the given field. Pool IDs will be selected if any one of the given values match. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `pool_ids` - List of all the pool IDs found. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_serial_console_access.html.markdown b/website/docs/cdktf/python/d/ec2_serial_console_access.html.markdown index 56e9796a378..6d924ec364d 100644 --- a/website/docs/cdktf/python/d/ec2_serial_console_access.html.markdown +++ b/website/docs/cdktf/python/d/ec2_serial_console_access.html.markdown @@ -29,9 +29,9 @@ class MyConvertedCode(TerraformStack): DataAwsEc2SerialConsoleAccess(self, "current") ``` -## Attributes Reference +## Attribute Reference -The following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `enabled` - Whether or not serial console access is enabled. Returns as `true` or `false`. * `id` - Region of serial console access. @@ -42,4 +42,4 @@ The following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_spot_price.html.markdown b/website/docs/cdktf/python/d/ec2_spot_price.html.markdown index 3acc6c185ae..1743a1f0cf8 100644 --- a/website/docs/cdktf/python/d/ec2_spot_price.html.markdown +++ b/website/docs/cdktf/python/d/ec2_spot_price.html.markdown @@ -39,7 +39,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `instance_type` - (Optional) Type of instance for which to query Spot Price information. * `availability_zone` - (Optional) Availability zone in which to query Spot price information. @@ -52,7 +52,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `spot_price` - Most recent Spot Price value for the given instance type and AZ. @@ -64,4 +64,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway.html.markdown index de5735d1ff1..9a6ccf6ff5a 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway.html.markdown @@ -58,7 +58,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `id` - (Optional) Identifier of the EC2 Transit Gateway. @@ -70,7 +70,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `amazon_side_asn` - Private Autonomous System Number (ASN) for the Amazon side of a BGP session * `arn` - EC2 Transit Gateway ARN @@ -94,4 +94,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_attachment.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_attachment.html.markdown index 4cbb7798af4..295e1716c40 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_attachment.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_attachment.html.markdown @@ -40,7 +40,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `transit_gateway_attachment_id` - (Optional) ID of the attachment. @@ -52,7 +52,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the attachment. * `association_state` - The state of the association (see [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_TransitGatewayAttachmentAssociation.html) for valid values). @@ -65,4 +65,4 @@ In addition to all arguments above, the following attributes are exported: * `transit_gateway_id` - ID of the transit gateway. * `transit_gateway_owner_id` - The ID of the AWS account that owns the transit gateway. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_attachments.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_attachments.html.markdown index 8e61c55b7d8..426b019ffef 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_attachments.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_attachments.html.markdown @@ -53,7 +53,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. @@ -64,7 +64,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `ids` A list of all attachments ids matching the filter. You can retrieve more information about the attachment using the [aws_ec2_transit_gateway_attachment][2] data source, searching by identifier. @@ -77,4 +77,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_connect.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_connect.html.markdown index 4a89010dc15..4a597fbf801 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_connect.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_connect.html.markdown @@ -58,7 +58,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `transit_gateway_connect_id` - (Optional) Identifier of the EC2 Transit Gateway Connect. @@ -70,7 +70,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `protocol` - Tunnel protocol * `tags` - Key-value tags for the EC2 Transit Gateway Connect @@ -83,4 +83,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_connect_peer.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_connect_peer.html.markdown index c1dd47bd822..898e64a3318 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_connect_peer.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_connect_peer.html.markdown @@ -58,7 +58,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `transit_gateway_connect_peer_id` - (Optional) Identifier of the EC2 Transit Gateway Connect Peer. @@ -70,7 +70,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - EC2 Transit Gateway Connect Peer ARN * `bgp_asn` - BGP ASN number assigned customer device @@ -88,4 +88,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_dx_gateway_attachment.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_dx_gateway_attachment.html.markdown index 78f31a4c85d..043fdf8e462 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_dx_gateway_attachment.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_dx_gateway_attachment.html.markdown @@ -36,7 +36,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `transit_gateway_id` - (Optional) Identifier of the EC2 Transit Gateway. * `dx_gateway_id` - (Optional) Identifier of the Direct Connect Gateway. @@ -45,14 +45,14 @@ The following arguments are supported: ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeTransitGatewayAttachments API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeTransitGatewayAttachments.html). * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `tags` - Key-value tags for the EC2 Transit Gateway Attachment @@ -63,4 +63,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_multicast_domain.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_multicast_domain.html.markdown index 58653a90360..cc69fc43533 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_multicast_domain.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_multicast_domain.html.markdown @@ -58,7 +58,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `transit_gateway_multicast_domain_id` - (Optional) Identifier of the EC2 Transit Gateway Multicast Domain. @@ -74,7 +74,7 @@ The following arguments are required: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Multicast Domain identifier. * `arn` - EC2 Transit Gateway Multicast Domain ARN. @@ -100,4 +100,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_peering_attachment.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_peering_attachment.html.markdown index 7253d1f3270..f8d4a2909f0 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_peering_attachment.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_peering_attachment.html.markdown @@ -58,7 +58,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `id` - (Optional) Identifier of the EC2 Transit Gateway Peering Attachment. @@ -75,7 +75,7 @@ which take the following arguments: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `peer_account_id` - Identifier of the peer AWS account * `peer_region` - Identifier of the peer AWS region @@ -88,4 +88,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_route_table.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_route_table.html.markdown index 9f61745de95..0fc241b5f98 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_route_table.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_route_table.html.markdown @@ -61,7 +61,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `id` - (Optional) Identifier of the EC2 Transit Gateway Route Table. @@ -73,7 +73,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - EC2 Transit Gateway Route Table ARN. * `default_association_route_table` - Boolean whether this is the default association route table for the EC2 Transit Gateway @@ -88,4 +88,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_route_table_associations.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_route_table_associations.html.markdown index e6bc8adecd1..312a34c6b0f 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_route_table_associations.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_route_table_associations.html.markdown @@ -52,11 +52,11 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Transit Gateway Route Table will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of Transit Gateway Route Table Association identifiers. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_route_table_propagations.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_route_table_propagations.html.markdown index e03bead59fb..69e2fcdb592 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_route_table_propagations.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_route_table_propagations.html.markdown @@ -52,11 +52,11 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Transit Gateway Route Table will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of Transit Gateway Route Table Association identifiers. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_route_tables.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_route_tables.html.markdown index cccd1a7c9a3..a9cfbc882af 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_route_tables.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_route_tables.html.markdown @@ -38,7 +38,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) Custom filter block as described below. @@ -54,9 +54,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Transit Gateway Route Table will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of Transit Gateway Route Table identifiers. @@ -67,4 +67,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_vpc_attachment.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_vpc_attachment.html.markdown index 16a7ee2d006..42c3451e98a 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_vpc_attachment.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_vpc_attachment.html.markdown @@ -58,7 +58,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `id` - (Optional) Identifier of the EC2 Transit Gateway VPC Attachment. @@ -70,7 +70,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `appliance_mode_support` - Whether Appliance Mode support is enabled. * `dns_support` - Whether DNS support is enabled. @@ -88,4 +88,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_vpc_attachments.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_vpc_attachments.html.markdown index e2b7992d540..64cf9a24f5a 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_vpc_attachments.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_vpc_attachments.html.markdown @@ -50,7 +50,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. @@ -61,7 +61,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `ids` A list of all attachments ids matching the filter. You can retrieve more information about the attachment using the [aws_ec2_transit_gateway_vpc_attachment][2] data source, searching by identifier. @@ -74,4 +74,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ec2_transit_gateway_vpn_attachment.html.markdown b/website/docs/cdktf/python/d/ec2_transit_gateway_vpn_attachment.html.markdown index 924d0c9e104..4d664053cde 100644 --- a/website/docs/cdktf/python/d/ec2_transit_gateway_vpn_attachment.html.markdown +++ b/website/docs/cdktf/python/d/ec2_transit_gateway_vpn_attachment.html.markdown @@ -61,7 +61,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `transit_gateway_id` - (Optional) Identifier of the EC2 Transit Gateway. * `vpn_connection_id` - (Optional) Identifier of the EC2 VPN Connection. @@ -70,14 +70,14 @@ The following arguments are supported: ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeTransitGatewayAttachments API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeTransitGatewayAttachments.html). * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway VPN Attachment identifier * `tags` - Key-value tags for the EC2 Transit Gateway VPN Attachment @@ -88,4 +88,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ecr_authorization_token.html.markdown b/website/docs/cdktf/python/d/ecr_authorization_token.html.markdown new file mode 100644 index 00000000000..47e0e85c0aa --- /dev/null +++ b/website/docs/cdktf/python/d/ecr_authorization_token.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_authorization_token" +description: |- + Provides details about an ECR Authorization Token +--- + + + +# Data Source: aws_ecr_authorization_token + +The ECR Authorization Token data source allows the authorization token, proxy endpoint, token expiration date, user name and password to be retrieved for an ECR repository. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ecr_authorization_token import DataAwsEcrAuthorizationToken +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEcrAuthorizationToken(self, "token") +``` + +## Argument Reference + +This data source supports the following arguments: + +* `registry_id` - (Optional) AWS account ID of the ECR Repository. If not specified the default account is assumed. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `authorization_token` - Temporary IAM authentication credentials to access the ECR repository encoded in base64 in the form of `user_name:password`. +* `expires_at` - Time in UTC RFC3339 format when the authorization token expires. +* `id` - Region of the authorization token. +* `password` - Password decoded from the authorization token. +* `proxy_endpoint` - Registry URL to use in the docker login command. +* `user_name` - User name decoded from the authorization token. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ecr_image.html.markdown b/website/docs/cdktf/python/d/ecr_image.html.markdown new file mode 100644 index 00000000000..50ebfb0f62c --- /dev/null +++ b/website/docs/cdktf/python/d/ecr_image.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_image" +description: |- + Provides details about an ECR Image +--- + + + +# Data Source: aws_ecr_image + +The ECR Image data source allows the details of an image with a particular tag or digest to be retrieved. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ecr_image import DataAwsEcrImage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEcrImage(self, "service_image", + image_tag="latest", + repository_name="my/service" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `registry_id` - (Optional) ID of the Registry where the repository resides. +* `repository_name` - (Required) Name of the ECR Repository. +* `image_digest` - (Optional) Sha256 digest of the image manifest. At least one of `image_digest`, `image_tag`, or `most_recent` must be specified. +* `image_tag` - (Optional) Tag associated with this image. At least one of `image_digest`, `image_tag`, or `most_recent` must be specified. +* `most_recent` - (Optional) Return the most recently pushed image. At least one of `image_digest`, `image_tag`, or `most_recent` must be specified. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - SHA256 digest of the image manifest. +* `image_pushed_at` - Date and time, expressed as a unix timestamp, at which the current image was pushed to the repository. +* `image_size_in_bytes` - Size, in bytes, of the image in the repository. +* `image_tags` - List of tags associated with this image. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ecr_pull_through_cache_rule.html.markdown b/website/docs/cdktf/python/d/ecr_pull_through_cache_rule.html.markdown new file mode 100644 index 00000000000..aea90de6123 --- /dev/null +++ b/website/docs/cdktf/python/d/ecr_pull_through_cache_rule.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_pull_through_cache_rule" +description: |- + Provides details about an ECR Pull Through Cache Rule +--- + + + +# Data Source: aws_ecr_pull_through_cache_rule + +The ECR Pull Through Cache Rule data source allows the upstream registry URL and registry ID to be retrieved for a Pull Through Cache Rule. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ecr_pull_through_cache_rule import DataAwsEcrPullThroughCacheRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEcrPullThroughCacheRule(self, "ecr_public", + ecr_repository_prefix="ecr-public" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +- `ecr_repository_prefix` - (Required) The repository name prefix to use when caching images from the source registry. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +- `id` - The repository name prefix. +- `upstream_registry_url` - The registry URL of the upstream public registry to use as the source. +- `registry_id` - The registry ID where the repository was created. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ecr_repository.html.markdown b/website/docs/cdktf/python/d/ecr_repository.html.markdown new file mode 100644 index 00000000000..262472083f7 --- /dev/null +++ b/website/docs/cdktf/python/d/ecr_repository.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_repository" +description: |- + Provides details about an ECR Repository +--- + + + +# Data Source: aws_ecr_repository + +The ECR Repository data source allows the ARN, Repository URI and Registry ID to be retrieved for an ECR repository. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ecr_repository import DataAwsEcrRepository +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEcrRepository(self, "service", + name="ecr-repository" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the ECR Repository. +* `registry_id` - (Optional) Registry ID where the repository was created. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Full ARN of the repository. +* `encryption_configuration` - Encryption configuration for the repository. See [Encryption Configuration](#encryption-configuration) below. +* `image_scanning_configuration` - Configuration block that defines image scanning configuration for the repository. See [Image Scanning Configuration](#image-scanning-configuration) below. +* `image_tag_mutability` - The tag mutability setting for the repository. +* `most_recent_image_tags` - List of image tags associated with the most recently pushed image in the repository. +* `repository_url` - URL of the repository (in the form `aws_account_id.dkr.ecr.region.amazonaws.com/repositoryName`). +* `tags` - Map of tags assigned to the resource. + +### Encryption Configuration + +* `encryption_type` - Encryption type to use for the repository, either `AES256` or `KMS`. +* `kms_key` - If `encryption_type` is `KMS`, the ARN of the KMS key used. + +### Image Scanning Configuration + +* `scan_on_push` - Whether images are scanned after being pushed to the repository. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ecrpublic_authorization_token.html.markdown b/website/docs/cdktf/python/d/ecrpublic_authorization_token.html.markdown new file mode 100644 index 00000000000..edb31ef6309 --- /dev/null +++ b/website/docs/cdktf/python/d/ecrpublic_authorization_token.html.markdown @@ -0,0 +1,44 @@ +--- +subcategory: "ECR Public" +layout: "aws" +page_title: "AWS: aws_ecrpublic_authorization_token" +description: |- + Provides details about a Public ECR Authorization Token +--- + + + +# Data Source: aws_ecrpublic_authorization_token + +The Public ECR Authorization Token data source allows the authorization token, token expiration date, user name, and password to be retrieved for a Public ECR repository. + +~> **NOTE:** This data source can only be used in the `us-east-1` region. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ecrpublic_authorization_token import DataAwsEcrpublicAuthorizationToken +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEcrpublicAuthorizationToken(self, "token") +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `authorization_token` - Temporary IAM authentication credentials to access the ECR repository encoded in base64 in the form of `user_name:password`. +* `expires_at` - Time in UTC RFC3339 format when the authorization token expires. +* `id` - Region of the authorization token. +* `password` - Password decoded from the authorization token. +* `user_name` - User name decoded from the authorization token. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ecs_cluster.html.markdown b/website/docs/cdktf/python/d/ecs_cluster.html.markdown new file mode 100644 index 00000000000..0989d9ffbba --- /dev/null +++ b/website/docs/cdktf/python/d/ecs_cluster.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_cluster" +description: |- + Provides details about an ecs cluster +--- + + + +# Data Source: aws_ecs_cluster + +The ECS Cluster data source allows access to details of a specific +cluster within an AWS ECS service. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ecs_cluster import DataAwsEcsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEcsCluster(self, "ecs-mongo", + cluster_name="ecs-mongo-production" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `cluster_name` - (Required) Name of the ECS Cluster + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the ECS Cluster +* `status` - Status of the ECS Cluster +* `pending_tasks_count` - Number of pending tasks for the ECS Cluster +* `running_tasks_count` - Number of running tasks for the ECS Cluster +* `registered_container_instances_count` - The number of registered container instances for the ECS Cluster +* `service_connect_defaults` - The default Service Connect namespace +* `setting` - Settings associated with the ECS Cluster +* `tags` - Key-value map of resource tags + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ecs_container_definition.html.markdown b/website/docs/cdktf/python/d/ecs_container_definition.html.markdown new file mode 100644 index 00000000000..fab0abc72d6 --- /dev/null +++ b/website/docs/cdktf/python/d/ecs_container_definition.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_container_definition" +description: |- + Provides details about a single container within an ecs task definition +--- + + + +# Data Source: aws_ecs_container_definition + +The ECS container definition data source allows access to details of +a specific container within an AWS ECS service. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ecs_container_definition import DataAwsEcsContainerDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEcsContainerDefinition(self, "ecs-mongo", + container_name="mongodb", + task_definition=mongo.id + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `task_definition` - (Required) ARN of the task definition which contains the container +* `container_name` - (Required) Name of the container definition + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `image` - Docker image in use, including the digest +* `image_digest` - Digest of the docker image in use +* `cpu` - CPU limit for this container definition +* `memory` - Memory limit for this container definition +* `memory_reservation` - Soft limit (in MiB) of memory to reserve for the container. When system memory is under contention, Docker attempts to keep the container memory to this soft limit +* `environment` - Environment in use +* `disable_networking` - Indicator if networking is disabled +* `docker_labels` - Set docker labels + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ecs_service.html.markdown b/website/docs/cdktf/python/d/ecs_service.html.markdown new file mode 100644 index 00000000000..198b25d81b4 --- /dev/null +++ b/website/docs/cdktf/python/d/ecs_service.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_service" +description: |- + Provides details about an ecs service +--- + + + +# Data Source: aws_ecs_service + +The ECS Service data source allows access to details of a specific +Service within a AWS ECS Cluster. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ecs_service import DataAwsEcsService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEcsService(self, "example", + cluster_arn=Token.as_string(data_aws_ecs_cluster_example.arn), + service_name="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `service_name` - (Required) Name of the ECS Service +* `cluster_arn` - (Required) ARN of the ECS Cluster + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the ECS Service +* `desired_count` - Number of tasks for the ECS Service +* `launch_type` - Launch type for the ECS Service +* `scheduling_strategy` - Scheduling strategy for the ECS Service +* `task_definition` - Family for the latest ACTIVE revision or full ARN of the task definition. +* `tags` - Resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ecs_task_definition.html.markdown b/website/docs/cdktf/python/d/ecs_task_definition.html.markdown new file mode 100644 index 00000000000..5247c4773f1 --- /dev/null +++ b/website/docs/cdktf/python/d/ecs_task_definition.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_task_definition" +description: |- + Provides details about an ecs task definition +--- + + + +# Data Source: aws_ecs_task_definition + +The ECS task definition data source allows access to details of +a specific AWS ECS task definition. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ecs_task_definition import DataAwsEcsTaskDefinition +from imports.aws.ecs_cluster import EcsCluster +from imports.aws.ecs_service import EcsService +from imports.aws.ecs_task_definition import EcsTaskDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = EcsCluster(self, "foo", + name="foo" + ) + mongo = EcsTaskDefinition(self, "mongo", + container_definitions="[\n {\n \"cpu\": 128,\n \"environment\": [{\n \"name\": \"SECRET\",\n \"value\": \"KEY\"\n }],\n \"essential\": true,\n \"image\": \"mongo:latest\",\n \"memory\": 128,\n \"memoryReservation\": 64,\n \"name\": \"mongodb\"\n }\n]\n\n", + family="mongodb" + ) + data_aws_ecs_task_definition_mongo = DataAwsEcsTaskDefinition(self, "mongo_2", + task_definition=mongo.family + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_ecs_task_definition_mongo.override_logical_id("mongo") + aws_ecs_service_mongo = EcsService(self, "mongo_3", + cluster=foo.id, + desired_count=2, + name="mongo", + task_definition=Token.as_string(data_aws_ecs_task_definition_mongo.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ecs_service_mongo.override_logical_id("mongo") +``` + +## Argument Reference + +This data source supports the following arguments: + +* `task_definition` - (Required) Family for the latest ACTIVE revision, family and revision (family:revision) for a specific revision in the family, the ARN of the task definition to access to. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ARN of the task definition. +* `arn` - ARN of the task definition. +* `arn_without_revision` - ARN of the Task Definition with the trailing `revision` removed. This may be useful for situations where the latest task definition is always desired. If a revision isn't specified, the latest ACTIVE revision is used. See the [AWS documentation](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_StartTask.html#ECS-StartTask-request-taskDefinition) for details. +* `execution_role_arn` - ARN of the task execution role that the Amazon ECS container agent and the Docker. +* `family` - Family of this task definition. +* `network_mode` - Docker networking mode to use for the containers in this task. +* `revision` - Revision of this task definition. +* `status` - Status of this task definition. +* `task_role_arn` - ARN of the IAM role that containers in this task can assume. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ecs_task_execution.html.markdown b/website/docs/cdktf/python/d/ecs_task_execution.html.markdown new file mode 100644 index 00000000000..1b328003cdf --- /dev/null +++ b/website/docs/cdktf/python/d/ecs_task_execution.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_task_execution" +description: |- + Terraform data source for managing an AWS ECS (Elastic Container) Task Execution. +--- + + + +# Data Source: aws_ecs_task_execution + +Terraform data source for managing an AWS ECS (Elastic Container) Task Execution. This data source calls the [RunTask](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_RunTask.html) API, allowing execution of one-time tasks that don't fit a standard resource lifecycle. See the [feature request issue](https://github.com/hashicorp/terraform-provider-aws/issues/1703) for additional context. + +~> **NOTE on plan operations:** This data source calls the `RunTask` API on every read operation, which means new task(s) may be created from a `terraform plan` command if all attributes are known. Placing this functionality behind a data source is an intentional trade off to enable use cases requiring a one-time task execution without relying on [provisioners](https://developer.hashicorp.com/terraform/language/resources/provisioners/syntax). Caution should be taken to ensure the data source is only executed once, or that the resulting tasks can safely run in parallel. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ecs_task_execution import DataAwsEcsTaskExecution +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEcsTaskExecution(self, "example", + cluster=Token.as_string(aws_ecs_cluster_example.id), + desired_count=1, + launch_type="FARGATE", + network_configuration=DataAwsEcsTaskExecutionNetworkConfiguration( + assign_public_ip=False, + security_groups=[Token.as_string(aws_security_group_example.id)], + subnets=Token.as_list(property_access(aws_subnet_example, ["*", "id"])) + ), + task_definition=Token.as_string(aws_ecs_task_definition_example.arn) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `cluster` - (Required) Short name or full Amazon Resource Name (ARN) of the cluster to run the task on. +* `task_definition` - (Required) The `family` and `revision` (`family:revision`) or full ARN of the task definition to run. If a revision isn't specified, the latest `ACTIVE` revision is used. + +The following arguments are optional: + +* `capacity_provider_strategy` - (Optional) Set of capacity provider strategies to use for the cluster. See below. +* `desired_count` - (Optional) Number of instantiations of the specified task to place on your cluster. You can specify up to 10 tasks for each call. +* `enable_ecs_managed_tags` - (Optional) Specifies whether to enable Amazon ECS managed tags for the tasks within the service. +* `enable_execute_command` - (Optional) Specifies whether to enable Amazon ECS Exec for the tasks within the service. +* `group` - (Optional) Name of the task group to associate with the task. The default value is the family name of the task definition. +* `launch_type` - (Optional) Launch type on which to run your service. Valid values are `EC2`, `FARGATE`, and `EXTERNAL`. +* `network_configuration` - (Optional) Network configuration for the service. This parameter is required for task definitions that use the `awsvpc` network mode to receive their own Elastic Network Interface, and it is not supported for other network modes. See below. +* `overrides` - (Optional) A list of container overrides that specify the name of a container in the specified task definition and the overrides it should receive. +* `placement_constraints` - (Optional) An array of placement constraint objects to use for the task. You can specify up to 10 constraints for each task. See below. +* `placement_strategy` - (Optional) The placement strategy objects to use for the task. You can specify a maximum of 5 strategy rules for each task. See below. +* `platform_version` - (Optional) The platform version the task uses. A platform version is only specified for tasks hosted on Fargate. If one isn't specified, the `LATEST` platform version is used. +* `propagate_tags` - (Optional) Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags aren't propagated. An error will be received if you specify the `SERVICE` option when running a task. Valid values are `TASK_DEFINITION` or `NONE`. +* `reference_id` - (Optional) The reference ID to use for the task. +* `started_by` - (Optional) An optional tag specified when a task is started. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### capacity_provider_strategy + +* `capacity_provider` - (Required) Name of the capacity provider. +* `base` - (Optional) The number of tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. Defaults to `0`. +* `weight` - (Optional) The relative percentage of the total number of launched tasks that should use the specified capacity provider. The `weight` value is taken into consideration after the `base` count of tasks has been satisfied. Defaults to `0`. + +### network_configuration + +* `subnets` - (Required) Subnets associated with the task or service. +* `security_groups` - (Optional) Security groups associated with the task or service. If you do not specify a security group, the default security group for the VPC is used. +* `assign_public_ip` - (Optional) Assign a public IP address to the ENI (Fargate launch type only). Valid values are `true` or `false`. Default `false`. + +For more information, see the [Task Networking](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-networking.html) documentation. + +### overrides + +* `container_overrides` - (Optional) One or more container overrides that are sent to a task. See below. +* `cpu` - (Optional) The CPU override for the task. +* `execution_role_arn` - (Optional) Amazon Resource Name (ARN) of the task execution role override for the task. +* `inference_accelerator_overrides` - (Optional) Elastic Inference accelerator override for the task. See below. +* `memory` - (Optional) The memory override for the task. +* `task_role_arn` - (Optional) Amazon Resource Name (ARN) of the role that containers in this task can assume. + +### container_overrides + +* `command` - (Optional) The command to send to the container that overrides the default command from the Docker image or the task definition. +* `cpu` - (Optional) The number of cpu units reserved for the container, instead of the default value from the task definition. +* `environment` - (Optional) The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. See below. +* `memory` - (Optional) The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. +* `memory_reservation` - (Optional) The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. +* `name` - (Optional) The name of the container that receives the override. This parameter is required if any override is specified. +* `resource_requirements` - (Optional) The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. See below. + +### environment + +* `key` - (Required) The name of the key-value pair. For environment variables, this is the name of the environment variable. +* `value` - (Required) The value of the key-value pair. For environment variables, this is the value of the environment variable. + +### resource_requirements + +* `type` - (Required) The type of resource to assign to a container. Valid values are `GPU` or `InferenceAccelerator`. +* `value` - (Required) The value for the specified resource type. If the `GPU` type is used, the value is the number of physical GPUs the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on. If the `InferenceAccelerator` type is used, the value matches the `deviceName` for an InferenceAccelerator specified in a task definition. + +### inference_accelerator_overrides + +* `device_name` - (Optional) The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition. +* `device_type` - (Optional) The Elastic Inference accelerator type to use. + +### placement_constraints + +* `expression` - (Optional) A cluster query language expression to apply to the constraint. The expression can have a maximum length of 2000 characters. You can't specify an expression if the constraint type is `distinctInstance`. +* `type` - (Optional) The type of constraint. Valid values are `distinctInstance` or `memberOf`. Use `distinctInstance` to ensure that each task in a particular group is running on a different container instance. Use `memberOf` to restrict the selection to a group of valid candidates. + +### placement_strategy + +* `field` - (Optional) The field to apply the placement strategy against. +* `type` - (Optional) The type of placement strategy. Valid values are `random`, `spread`, and `binpack`. + +For more information, see the [Placement Strategy](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_PlacementStrategy.html) documentation. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `task_arns` - A list of the provisioned task ARNs. +* `id` - The unique identifier, which is a comma-delimited string joining the `cluster` and `task_definition` attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/efs_access_point.html.markdown b/website/docs/cdktf/python/d/efs_access_point.html.markdown new file mode 100644 index 00000000000..dd369791c2f --- /dev/null +++ b/website/docs/cdktf/python/d/efs_access_point.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_access_point" +description: |- + Provides an Elastic File System (EFS) Access Point data source. +--- + + + +# Data Source: aws_efs_access_point + +Provides information about an Elastic File System (EFS) Access Point. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_efs_access_point import DataAwsEfsAccessPoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEfsAccessPoint(self, "test", + access_point_id="fsap-12345678" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `access_point_id` - (Required) ID that identifies the file system. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the access point. +* `arn` - Amazon Resource Name of the file system. +* `file_system_arn` - Amazon Resource Name of the file system. +* `file_system_id` - ID of the file system for which the access point is intended. +* `posix_user` - Single element list containing operating system user and group applied to all file system requests made using the access point. + * `gid` - Group ID + * `secondary_gids` - Secondary group IDs + * `uid` - User Id +* `root_directory`- Single element list containing information on the directory on the Amazon EFS file system that the access point provides access to. + * `creation_info` - Single element list containing information on the creation permissions of the directory + * `owner_gid` - POSIX owner group ID + * `owner_uid` - POSIX owner user ID + * `permissions` - POSIX permissions mode + * `path` - Path exposed as the root directory +* `tags` - Key-value mapping of resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/efs_access_points.html.markdown b/website/docs/cdktf/python/d/efs_access_points.html.markdown new file mode 100644 index 00000000000..bd30d8a505c --- /dev/null +++ b/website/docs/cdktf/python/d/efs_access_points.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_access_points" +description: |- + Provides information about multiple Elastic File System (EFS) Access Points. +--- + + + +# Data Source: aws_efs_access_points + +Provides information about multiple Elastic File System (EFS) Access Points. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_efs_access_points import DataAwsEfsAccessPoints +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEfsAccessPoints(self, "test", + file_system_id="fs-12345678" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `file_system_id` - (Required) EFS File System identifier. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of Amazon Resource Names (ARNs). +* `id` - EFS File System identifier. +* `ids` - Set of identifiers. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/efs_file_system.html.markdown b/website/docs/cdktf/python/d/efs_file_system.html.markdown new file mode 100644 index 00000000000..fa9aec5df8b --- /dev/null +++ b/website/docs/cdktf/python/d/efs_file_system.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_file_system" +description: |- + Provides an Elastic File System (EFS) File System data source. +--- + + + +# Data Source: aws_efs_file_system + +Provides information about an Elastic File System (EFS) File System. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_efs_file_system import DataAwsEfsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + file_system_id = TerraformVariable(self, "file_system_id", + default="", + type=VariableType.STRING + ) + DataAwsEfsFileSystem(self, "by_id", + file_system_id=file_system_id.string_value + ) + DataAwsEfsFileSystem(self, "by_tag", + tags={ + "Environment": "dev" + } + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `file_system_id` - (Optional) ID that identifies the file system (e.g., fs-ccfc0d65). +* `creation_token` - (Optional) Restricts the list to the file system with this creation token. +* `tags` - (Optional) Restricts the list to the file system with these tags. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `availability_zone_name` - The Availability Zone name in which the file system's One Zone storage classes exist. +* `availability_zone_id` - The identifier of the Availability Zone in which the file system's One Zone storage classes exist. +* `dns_name` - DNS name for the filesystem per [documented convention](http://docs.aws.amazon.com/efs/latest/ug/mounting-fs-mount-cmd-dns-name.html). +* `encrypted` - Whether EFS is encrypted. +* `kms_key_id` - ARN for the KMS encryption key. +* `lifecycle_policy` - File system [lifecycle policy](https://docs.aws.amazon.com/efs/latest/ug/API_LifecyclePolicy.html) object. +* `performance_mode` - File system performance mode. +* `provisioned_throughput_in_mibps` - The throughput, measured in MiB/s, that you want to provision for the file system. +* `tags` -A map of tags to assign to the file system. +* `throughput_mode` - Throughput mode for the file system. +* `size_in_bytes` - Current byte count used by the file system. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/efs_mount_target.html.markdown b/website/docs/cdktf/python/d/efs_mount_target.html.markdown new file mode 100644 index 00000000000..ac57f0cc2f7 --- /dev/null +++ b/website/docs/cdktf/python/d/efs_mount_target.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_mount_target" +description: |- + Provides an Elastic File System Mount Target (EFS) data source. +--- + + + +# Data Source: aws_efs_mount_target + +Provides information about an Elastic File System Mount Target (EFS). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_efs_mount_target import DataAwsEfsMountTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + mount_target_id = TerraformVariable(self, "mount_target_id", + default="", + type=VariableType.STRING + ) + DataAwsEfsMountTarget(self, "by_id", + mount_target_id=mount_target_id.string_value + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `access_point_id` - (Optional) ID or ARN of the access point whose mount target that you want to find. It must be included if a `file_system_id` and `mount_target_id` are not included. +* `file_system_id` - (Optional) ID or ARN of the file system whose mount target that you want to find. It must be included if an `access_point_id` and `mount_target_id` are not included. +* `mount_target_id` - (Optional) ID or ARN of the mount target that you want to find. It must be included in your request if an `access_point_id` and `file_system_id` are not included. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `file_system_arn` - Amazon Resource Name of the file system for which the mount target is intended. +* `subnet_id` - ID of the mount target's subnet. +* `ip_address` - Address at which the file system may be mounted via the mount target. +* `security_groups` - List of VPC security group IDs attached to the mount target. +* `dns_name` - DNS name for the EFS file system. +* `mount_target_dns_name` - The DNS name for the given subnet/AZ per [documented convention](http://docs.aws.amazon.com/efs/latest/ug/mounting-fs-mount-cmd-dns-name.html). +* `network_interface_id` - The ID of the network interface that Amazon EFS created when it created the mount target. +* `availability_zone_name` - The name of the Availability Zone (AZ) that the mount target resides in. +* `availability_zone_id` - The unique and consistent identifier of the Availability Zone (AZ) that the mount target resides in. +* `owner_id` - AWS account ID that owns the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/eip.html.markdown b/website/docs/cdktf/python/d/eip.html.markdown new file mode 100644 index 00000000000..d44be1f8814 --- /dev/null +++ b/website/docs/cdktf/python/d/eip.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_eip" +description: |- + Provides details about a specific Elastic IP +--- + + + +# Data Source: aws_eip + +`aws_eip` provides details about a specific Elastic IP. + +## Example Usage + +### Search By Allocation ID (VPC only) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_eip import DataAwsEip +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEip(self, "by_allocation_id", + id="eipalloc-12345678" + ) +``` + +### Search By Filters (EC2-Classic or VPC) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_eip import DataAwsEip +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEip(self, "by_filter", + filter=[DataAwsEipFilter( + name="tag:Name", + values=["exampleNameTagValue"] + ) + ] + ) +``` + +### Search By Public IP (EC2-Classic or VPC) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_eip import DataAwsEip +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEip(self, "by_public_ip", + public_ip="1.2.3.4" + ) +``` + +### Search By Tags (EC2-Classic or VPC) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_eip import DataAwsEip +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEip(self, "by_tags", + tags={ + "Name": "exampleNameTagValue" + } + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +Elastic IPs in the current region. The given filters must match exactly one +Elastic IP whose data will be exported as attributes. + +* `filter` - (Optional) One or more name/value pairs to use as filters. There are several valid keys, for a full reference, check out the [EC2 API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeAddresses.html). +* `id` - (Optional) Allocation ID of the specific VPC EIP to retrieve. If a classic EIP is required, do NOT set `id`, only set `public_ip` +* `public_ip` - (Optional) Public IP of the specific EIP to retrieve. +* `tags` - (Optional) Map of tags, each pair of which must exactly match a pair on the desired Elastic IP + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `association_id` - ID representing the association of the address with an instance in a VPC. +* `domain` - Whether the address is for use in EC2-Classic (standard) or in a VPC (vpc). +* `id` - If VPC Elastic IP, the allocation identifier. If EC2-Classic Elastic IP, the public IP address. +* `instance_id` - ID of the instance that the address is associated with (if any). +* `network_interface_id` - The ID of the network interface. +* `network_interface_owner_id` - The ID of the AWS account that owns the network interface. +* `private_ip` - Private IP address associated with the Elastic IP address. +* `private_dns` - Private DNS associated with the Elastic IP address. +* `public_ip` - Public IP address of Elastic IP. +* `public_dns` - Public DNS associated with the Elastic IP address. +* `public_ipv4_pool` - ID of an address pool. +* `carrier_ip` - Carrier IP address. +* `customer_owned_ipv4_pool` - The ID of a Customer Owned IP Pool. For more on customer owned IP addressed check out [Customer-owned IP addresses guide](https://docs.aws.amazon.com/outposts/latest/userguide/outposts-networking-components.html#ip-addressing) +* `customer_owned_ip` - Customer Owned IP. +* `tags` - Key-value map of tags associated with Elastic IP. + +~> **Note:** The data source computes the `public_dns` and `private_dns` attributes according to the [VPC DNS Guide](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-dns.html#vpc-dns-hostnames) as they are not available with the EC2 API. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/eips.html.markdown b/website/docs/cdktf/python/d/eips.html.markdown new file mode 100644 index 00000000000..e07825ed36d --- /dev/null +++ b/website/docs/cdktf/python/d/eips.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_eips" +description: |- + Provides a list of Elastic IPs in a region +--- + + + +# Data Source: aws_eips + +Provides a list of Elastic IPs in a region. + +## Example Usage + +The following shows outputting all Elastic IPs with the a specific tag value. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_eips import DataAwsEips +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsEips(self, "example", + tags={ + "Env": "dev" + } + ) + TerraformOutput(self, "allocation_ids", + value=example.allocation_ids + ) + TerraformOutput(self, "public_ips", + value=example.public_ips + ) +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. +* `tags` - (Optional) Map of tags, each pair of which must exactly match a pair on the desired Elastic IPs. + +More complex filters can be expressed using one or more `filter` sub-blocks, which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeAddresses.html). +* `values` - (Required) Set of values that are accepted for the given field. An Elastic IP will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `allocation_ids` - List of all the allocation IDs for address for use with EC2-VPC. +* `public_ips` - List of all the Elastic IP addresses. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/eks_addon.html.markdown b/website/docs/cdktf/python/d/eks_addon.html.markdown index be87f241e8f..10e9eae7982 100644 --- a/website/docs/cdktf/python/d/eks_addon.html.markdown +++ b/website/docs/cdktf/python/d/eks_addon.html.markdown @@ -41,9 +41,9 @@ class MyConvertedCode(TerraformStack): the names returned by [list-addon](https://docs.aws.amazon.com/cli/latest/reference/eks/list-addons.html). * `cluster_name` – (Required) Name of the EKS Cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`). -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the EKS add-on. * `addon_version` - Version of EKS add-on. @@ -54,4 +54,4 @@ In addition to all arguments above, the following attributes are exported: * `created_at` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the EKS add-on was created. * `modified_at` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the EKS add-on was updated. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/eks_addon_version.html.markdown b/website/docs/cdktf/python/d/eks_addon_version.html.markdown index 50e4fbe46d9..1f06981afd9 100644 --- a/website/docs/cdktf/python/d/eks_addon_version.html.markdown +++ b/website/docs/cdktf/python/d/eks_addon_version.html.markdown @@ -60,11 +60,11 @@ class MyConvertedCode(TerraformStack): * `kubernetes_version` – (Required) Version of the EKS Cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`). * `most_recent` - (Optional) Determines if the most recent or default version of the addon should be returned. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - Name of the add-on * `version` - Version of the EKS add-on. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/eks_cluster.html.markdown b/website/docs/cdktf/python/d/eks_cluster.html.markdown index 5382b7c8c69..013444f09a9 100644 --- a/website/docs/cdktf/python/d/eks_cluster.html.markdown +++ b/website/docs/cdktf/python/d/eks_cluster.html.markdown @@ -44,7 +44,9 @@ class MyConvertedCode(TerraformStack): * `name` - (Required) Name of the cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`). -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - Name of the cluster * `arn` - ARN of the cluster. @@ -80,4 +82,4 @@ class MyConvertedCode(TerraformStack): * `subnet_ids` – List of subnet IDs * `vpc_id` – The VPC associated with your cluster. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/eks_cluster_auth.html.markdown b/website/docs/cdktf/python/d/eks_cluster_auth.html.markdown index 0b83c066af4..4b307e50724 100644 --- a/website/docs/cdktf/python/d/eks_cluster_auth.html.markdown +++ b/website/docs/cdktf/python/d/eks_cluster_auth.html.markdown @@ -58,9 +58,11 @@ class MyConvertedCode(TerraformStack): * `name` - (Required) Name of the cluster -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - Name of the cluster. * `token` - Token to use to authenticate with the cluster. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/eks_clusters.html.markdown b/website/docs/cdktf/python/d/eks_clusters.html.markdown index 2c503f46dc2..e3d8baea289 100644 --- a/website/docs/cdktf/python/d/eks_clusters.html.markdown +++ b/website/docs/cdktf/python/d/eks_clusters.html.markdown @@ -42,9 +42,11 @@ class MyConvertedCode(TerraformStack): data_aws_eks_cluster_example.override_logical_id("example") ``` -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `names` - Set of EKS clusters names - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/eks_node_group.html.markdown b/website/docs/cdktf/python/d/eks_node_group.html.markdown index cbfbad2e9d3..3bdd077b2df 100644 --- a/website/docs/cdktf/python/d/eks_node_group.html.markdown +++ b/website/docs/cdktf/python/d/eks_node_group.html.markdown @@ -37,7 +37,9 @@ class MyConvertedCode(TerraformStack): * `cluster_name` - (Required) Name of the cluster. * `node_group_name` - (Required) Name of the node group. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - EKS Cluster name and EKS Node Group name separated by a colon (`:`). * `ami_type` - Type of Amazon Machine Image (AMI) associated with the EKS Node Group. @@ -72,4 +74,4 @@ class MyConvertedCode(TerraformStack): * `tags` - Key-value map of resource tags. * `version` – Kubernetes version. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/eks_node_groups.html.markdown b/website/docs/cdktf/python/d/eks_node_groups.html.markdown index 036c6afbd59..e46a5f69527 100644 --- a/website/docs/cdktf/python/d/eks_node_groups.html.markdown +++ b/website/docs/cdktf/python/d/eks_node_groups.html.markdown @@ -49,9 +49,11 @@ class MyConvertedCode(TerraformStack): * `cluster_name` - (Required) Name of the cluster. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - Cluster name. * `names` - Set of all node group names in an EKS Cluster. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/elastic_beanstalk_application.html.markdown b/website/docs/cdktf/python/d/elastic_beanstalk_application.html.markdown new file mode 100644 index 00000000000..76e0a89138e --- /dev/null +++ b/website/docs/cdktf/python/d/elastic_beanstalk_application.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_application" +description: |- + Retrieve information about an Elastic Beanstalk Application +--- + + + +# Data Source: aws_elastic_beanstalk_application + +Retrieve information about an Elastic Beanstalk Application. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_elastic_beanstalk_application import DataAwsElasticBeanstalkApplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsElasticBeanstalkApplication(self, "example", + name="example" + ) + TerraformOutput(self, "arn", + value=example.arn + ) + TerraformOutput(self, "description", + value=example.description + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the application + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the application +* `arn` - ARN of the application. +* `description` - Short description of the application + +Application version lifecycle (`appversion_lifecycle`) supports the nested attribute containing. + +* `service_role` - ARN of an IAM service role under which the application version is deleted. Elastic Beanstalk must have permission to assume this role. +* `max_count` - Maximum number of application versions to retain. +* `max_age_in_days` - Number of days to retain an application version. +* `delete_source_from_s3` - Specifies whether delete a version's source bundle from S3 when the application version is deleted. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/elastic_beanstalk_hosted_zone.html.markdown b/website/docs/cdktf/python/d/elastic_beanstalk_hosted_zone.html.markdown new file mode 100644 index 00000000000..d7bfe118ac9 --- /dev/null +++ b/website/docs/cdktf/python/d/elastic_beanstalk_hosted_zone.html.markdown @@ -0,0 +1,44 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_hosted_zone" +description: |- + Get an elastic beanstalk hosted zone. +--- + + + +# Data Source: aws_elastic_beanstalk_hosted_zone + +Use this data source to get the ID of an [elastic beanstalk hosted zone](http://docs.aws.amazon.com/general/latest/gr/rande.html#elasticbeanstalk_region). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_elastic_beanstalk_hosted_zone import DataAwsElasticBeanstalkHostedZone +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsElasticBeanstalkHostedZone(self, "current") +``` + +## Argument Reference + +* `region` - (Optional) Region you'd like the zone for. By default, fetches the current region. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the hosted zone. + +* `region` - Region of the hosted zone. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/elastic_beanstalk_solution_stack.html.markdown b/website/docs/cdktf/python/d/elastic_beanstalk_solution_stack.html.markdown new file mode 100644 index 00000000000..044f1f7d1d1 --- /dev/null +++ b/website/docs/cdktf/python/d/elastic_beanstalk_solution_stack.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_solution_stack" +description: |- + Get an elastic beanstalk solution stack. +--- + + + +# Data Source: aws_elastic_beanstalk_solution_stack + +Use this data source to get the name of a elastic beanstalk solution stack. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_elastic_beanstalk_solution_stack import DataAwsElasticBeanstalkSolutionStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsElasticBeanstalkSolutionStack(self, "multi_docker", + most_recent=True, + name_regex="^64bit Amazon Linux (.*) Multi-container Docker (.*)$" + ) +``` + +## Argument Reference + +* `most_recent` - (Optional) If more than one result is returned, use the most +recent solution stack. + +* `name_regex` - Regex string to apply to the solution stack list returned +by AWS. See [Elastic Beanstalk Supported Platforms][beanstalk-platforms] from +AWS documentation for reference solution stack names. + +~> **NOTE:** If more or less than a single match is returned by the search, +Terraform will fail. Ensure that your search is specific enough to return +a single solution stack, or use `most_recent` to choose the most recent one. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - Name of the solution stack. + +[beanstalk-platforms]: http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/concepts.platforms.html "AWS Elastic Beanstalk Supported Platforms documentation" + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/elasticache_cluster.html.markdown b/website/docs/cdktf/python/d/elasticache_cluster.html.markdown new file mode 100644 index 00000000000..81046845ea8 --- /dev/null +++ b/website/docs/cdktf/python/d/elasticache_cluster.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_cluster" +description: |- + Get information on an ElastiCache Cluster resource. +--- + + + +# Data Source: aws_elasticache_cluster + +Use this data source to get information about an ElastiCache Cluster + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_elasticache_cluster import DataAwsElasticacheCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsElasticacheCluster(self, "my_cluster", + cluster_id="my-cluster-id" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `cluster_id` – (Required) Group identifier. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `node_type` – The cluster node type. +* `num_cache_nodes` – The number of cache nodes that the cache cluster has. +* `engine` – Name of the cache engine. +* `engine_version` – Version number of the cache engine. +* `ip_discovery` - The IP version advertised in the discovery protocol. +* `network_type` - The IP versions for cache cluster connections. +* `subnet_group_name` – Name of the subnet group associated to the cache cluster. +* `security_group_ids` – List VPC security groups associated with the cache cluster. +* `parameter_group_name` – Name of the parameter group associated with this cache cluster. +* `replication_group_id` - The replication group to which this cache cluster belongs. +* `log_delivery_configuration` - Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log) delivery settings. +* `maintenance_window` – Specifies the weekly time range for when maintenance +on the cache cluster is performed. +* `snapshot_window` - Daily time range (in UTC) during which ElastiCache will +begin taking a daily snapshot of the cache cluster. +* `snapshot_retention_limit` - The number of days for which ElastiCache will +retain automatic cache cluster snapshots before deleting them. +* `availability_zone` - Availability Zone for the cache cluster. +* `notification_topic_arn` – An ARN of an +SNS topic that ElastiCache notifications get sent to. +* `port` – The port number on which each of the cache nodes will +accept connections. +* `configuration_endpoint` - (Memcached only) Configuration endpoint to allow host discovery. +* `cluster_address` - (Memcached only) DNS name of the cache cluster without the port appended. +* `preferred_outpost_arn` - The outpost ARN in which the cache cluster was created if created in outpost. +* `cache_nodes` - List of node objects including `id`, `address`, `port`, `availability_zone` and `outpost_arn`. + Referenceable e.g., as `${data.aws_elasticache_cluster.bar.cache_nodes.0.address}` +* `tags` - Tags assigned to the resource + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/elasticache_replication_group.html.markdown b/website/docs/cdktf/python/d/elasticache_replication_group.html.markdown new file mode 100644 index 00000000000..1414e945391 --- /dev/null +++ b/website/docs/cdktf/python/d/elasticache_replication_group.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_replication_group" +description: |- + Get information on an ElastiCache Replication Group resource. +--- + + + +# Data Source: aws_elasticache_replication_group + +Use this data source to get information about an ElastiCache Replication Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_elasticache_replication_group import DataAwsElasticacheReplicationGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsElasticacheReplicationGroup(self, "bar", + replication_group_id="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `replication_group_id` – (Required) Identifier for the replication group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `description` - Description of the replication group. +* `arn` - ARN of the created ElastiCache Replication Group. +* `auth_token_enabled` - Whether an AuthToken (password) is enabled. +* `automatic_failover_enabled` - A flag whether a read-only replica will be automatically promoted to read/write primary if the existing primary fails. +* `node_type` – The cluster node type. +* `num_cache_clusters` – The number of cache clusters that the replication group has. +* `num_node_groups` - Number of node groups (shards) for the replication group. +* `member_clusters` - Identifiers of all the nodes that are part of this replication group. +* `multi_az_enabled` - Whether Multi-AZ Support is enabled for the replication group. +* `replicas_per_node_group` - Number of replica nodes in each node group. +* `log_delivery_configuration` - Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log) delivery settings. +* `snapshot_window` - Daily time range (in UTC) during which ElastiCache begins taking a daily snapshot of your node group (shard). +* `snapshot_retention_limit` - The number of days for which ElastiCache retains automatic cache cluster snapshots before deleting them. +* `port` – The port number on which the configuration endpoint will accept connections. +* `configuration_endpoint_address` - The configuration endpoint address to allow host discovery. +* `primary_endpoint_address` - The endpoint of the primary node in this node group (shard). +* `reader_endpoint_address` - The endpoint of the reader node in this node group (shard). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/elasticache_subnet_group.html.markdown b/website/docs/cdktf/python/d/elasticache_subnet_group.html.markdown new file mode 100644 index 00000000000..3ae41b1ecda --- /dev/null +++ b/website/docs/cdktf/python/d/elasticache_subnet_group.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_subnet_group" +description: |- + Provides information about a ElastiCache Subnet Group. +--- + + + +# Resource: aws_elasticache_subnet_group + +Provides information about a ElastiCache Subnet Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_elasticache_subnet_group import DataAwsElasticacheSubnetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsElasticacheSubnetGroup(self, "example", + name="my-subnet-group" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the subnet group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the subnet group. +* `arn` - ARN of the subnet group. +* `description` - Description of the subnet group. +* `subnet_ids` - Set of VPC Subnet ID-s of the subnet group. +* `tags` - Map of tags assigned to the subnet group. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/elasticache_user.html.markdown b/website/docs/cdktf/python/d/elasticache_user.html.markdown new file mode 100644 index 00000000000..6da8ada88d8 --- /dev/null +++ b/website/docs/cdktf/python/d/elasticache_user.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_user" +description: |- + Get information on an ElastiCache User resource. +--- + + + +# Data Source: aws_elasticache_user + +Use this data source to get information about an ElastiCache User. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_elasticache_user import DataAwsElasticacheUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsElasticacheUser(self, "bar", + user_id="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `user_id` – (Required) Identifier for the user. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `user_id` - Identifier for the user. +* `user_name` - User name of the user. +* `access_string` - String for what access a user possesses within the associated ElastiCache replication groups or clusters. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/elasticsearch_domain.html.markdown b/website/docs/cdktf/python/d/elasticsearch_domain.html.markdown new file mode 100644 index 00000000000..564da4da22d --- /dev/null +++ b/website/docs/cdktf/python/d/elasticsearch_domain.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "Elasticsearch" +layout: "aws" +page_title: "AWS: aws_elasticsearch_domain" +description: |- + Get information on an Elasticsearch Domain resource. +--- + + + +# Data Source: aws_elasticsearch_domain + +Use this data source to get information about an Elasticsearch Domain + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_elasticsearch_domain import DataAwsElasticsearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsElasticsearchDomain(self, "my_domain", + domain_name="my-domain-name" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `domain_name` – (Required) Name of the domain. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `access_policies` – The policy document attached to the domain. +* `advanced_options` - Key-value string pairs to specify advanced configuration options. +* `advanced_security_options` - Status of the Elasticsearch domain's advanced security options. The block consists of the following attributes: + * `enabled` - Whether advanced security is enabled. + * `internal_user_database_enabled` - Whether the internal user database is enabled. +* `arn` – The ARN of the domain. +* `auto_tune_options` - Configuration of the Auto-Tune options of the domain. + * `desired_state` - The Auto-Tune desired state for the domain. + * `maintenance_schedule` - A list of the nested configurations for the Auto-Tune maintenance windows of the domain. + * `start_at` - Date and time at which the Auto-Tune maintenance schedule starts in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). + * `duration` - Configuration block for the duration of the Auto-Tune maintenance window. + * `value` - Duration of an Auto-Tune maintenance window. + * `unit` - Unit of time. + * `cron_expression_for_recurrence` - Cron expression for an Auto-Tune maintenance schedule. + * `rollback_on_disable` - Whether the domain is set to roll back to default Auto-Tune settings when disabling Auto-Tune. +* `cluster_config` - Cluster configuration of the domain. + * `cold_storage_options` - Configuration block containing cold storage configuration. + * `enabled` - Indicates cold storage is enabled. + * `instance_type` - Instance type of data nodes in the cluster. + * `instance_count` - Number of instances in the cluster. + * `dedicated_master_enabled` - Indicates whether dedicated master nodes are enabled for the cluster. + * `dedicated_master_type` - Instance type of the dedicated master nodes in the cluster. + * `dedicated_master_count` - Number of dedicated master nodes in the cluster. + * `zone_awareness_enabled` - Indicates whether zone awareness is enabled. + * `zone_awareness_config` - Configuration block containing zone awareness settings. + * `availability_zone_count` - Number of availability zones used. + * `warm_enabled` - Warm storage is enabled. + * `warm_count` - The number of warm nodes in the cluster. + * `warm_type` - The instance type for the Elasticsearch cluster's warm nodes. +* `cognito_options` - Domain Amazon Cognito Authentication options for Kibana. + * `enabled` - Whether Amazon Cognito Authentication is enabled. + * `user_pool_id` - The Cognito User pool used by the domain. + * `identity_pool_id` - The Cognito Identity pool used by the domain. + * `role_arn` - The IAM Role with the AmazonESCognitoAccess policy attached. +* `created` – Status of the creation of the domain. +* `deleted` – Status of the deletion of the domain. +* `domain_id` – Unique identifier for the domain. +* `ebs_options` - EBS Options for the instances in the domain. + * `ebs_enabled` - Whether EBS volumes are attached to data nodes in the domain. + * `throughput` - The throughput (in MiB/s) of the EBS volumes attached to data nodes. + * `volume_type` - The type of EBS volumes attached to data nodes. + * `volume_size` - The size of EBS volumes attached to data nodes (in GB). + * `iops` - The baseline input/output (I/O) performance of EBS volumes attached to data nodes. +* `elasticsearch_version` – Elasticsearch version for the domain. +* `encryption_at_rest` - Domain encryption at rest related options. + * `enabled` - Whether encryption at rest is enabled in the domain. + * `kms_key_id` - The KMS key id used to encrypt data at rest. +* `endpoint` – Domain-specific endpoint used to submit index, search, and data upload requests. +* `kibana_endpoint` - Domain-specific endpoint used to access the Kibana application. +* `log_publishing_options` - Domain log publishing related options. + * `log_type` - The type of Elasticsearch log being published. + * `cloudwatch_log_group_arn` - The CloudWatch Log Group where the logs are published. + * `enabled` - Whether log publishing is enabled. +* `node_to_node_encryption` - Domain in transit encryption related options. + * `enabled` - Whether node to node encryption is enabled. +* `processing` – Status of a configuration change in the domain. +* `snapshot_options` – Domain snapshot related options. + * `automated_snapshot_start_hour` - Hour during which the service takes an automated daily snapshot of the indices in the domain. +* `tags` - Tags assigned to the domain. +* `vpc_options` - VPC Options for private Elasticsearch domains. + * `availability_zones` - The availability zones used by the domain. + * `security_group_ids` - The security groups used by the domain. + * `subnet_ids` - The subnets used by the domain. + * `vpc_id` - The VPC used by the domain. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/elb.html.markdown b/website/docs/cdktf/python/d/elb.html.markdown new file mode 100644 index 00000000000..e9fcf8ef018 --- /dev/null +++ b/website/docs/cdktf/python/d/elb.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_elb" +description: |- + Provides a classic Elastic Load Balancer data source. +--- + + + +# aws_elb + +Provides information about a "classic" Elastic Load Balancer (ELB). +See [LB Data Source](/docs/providers/aws/d/lb.html) if you are looking for "v2" +Application Load Balancer (ALB) or Network Load Balancer (NLB). + +This data source can prove useful when a module accepts an LB as an input +variable and needs to, for example, determine the security groups associated +with it, etc. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_elb import DataAwsElb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + lb_name = TerraformVariable(self, "lb_name", + default="", + type=VariableType.STRING + ) + DataAwsElb(self, "test", + name=lb_name.string_value + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Unique name of the load balancer. + +## Attribute Reference + +See the [ELB Resource](/docs/providers/aws/r/elb.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/elb_hosted_zone_id.html.markdown b/website/docs/cdktf/python/d/elb_hosted_zone_id.html.markdown new file mode 100644 index 00000000000..bfa21000085 --- /dev/null +++ b/website/docs/cdktf/python/d/elb_hosted_zone_id.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_elb_hosted_zone_id" +description: |- + Get AWS Elastic Load Balancing Hosted Zone Id +--- + + + +# Data Source: aws_elb_hosted_zone_id + +Use this data source to get the HostedZoneId of the AWS Elastic Load Balancing HostedZoneId +in a given region for the purpose of using in an AWS Route53 Alias. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_elb_hosted_zone_id import DataAwsElbHostedZoneId +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = DataAwsElbHostedZoneId(self, "main") + Route53Record(self, "www", + alias=Route53RecordAlias( + evaluate_target_health=True, + name=Token.as_string(aws_elb_main.dns_name), + zone_id=Token.as_string(main.id) + ), + name="example.com", + type="A", + zone_id=primary.zone_id + ) +``` + +## Argument Reference + +* `region` - (Optional) Name of the region whose AWS ELB HostedZoneId is desired. + Defaults to the region from the AWS provider configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS ELB HostedZoneId in the selected region. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/elb_service_account.html.markdown b/website/docs/cdktf/python/d/elb_service_account.html.markdown new file mode 100644 index 00000000000..e9acd811e5b --- /dev/null +++ b/website/docs/cdktf/python/d/elb_service_account.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_elb_service_account" +description: |- + Get AWS Elastic Load Balancing Service Account +--- + + + +# Data Source: aws_elb_service_account + +Use this data source to get the Account ID of the [AWS Elastic Load Balancing Service Account](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-access-logs.html#attach-bucket-policy) +in a given region for the purpose of permitting in S3 bucket policy. + +~> **Note:** For AWS Regions opened since Jakarta (`ap-southeast-3`) in December 2021, AWS [documents that](https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-access-logs.html#attach-bucket-policy) a [service principal name](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html#principal-services) should be used instead of an AWS account ID in any relevant IAM policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_elb_service_account import DataAwsElbServiceAccount +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.elb import Elb +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_policy import S3BucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + elb_logs = S3Bucket(self, "elb_logs", + bucket="my-elb-tf-test-bucket" + ) + S3BucketAcl(self, "elb_logs_acl", + acl="private", + bucket=elb_logs.id + ) + main = DataAwsElbServiceAccount(self, "main") + allow_elb_logging = DataAwsIamPolicyDocument(self, "allow_elb_logging", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:PutObject"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[Token.as_string(main.arn)], + type="AWS" + ) + ], + resources=["${" + elb_logs.arn + "}/AWSLogs/*"] + ) + ] + ) + Elb(self, "bar", + access_logs=ElbAccessLogs( + bucket=elb_logs.id, + interval=5 + ), + availability_zones=["us-west-2a"], + listener=[ElbListener( + instance_port=8000, + instance_protocol="http", + lb_port=80, + lb_protocol="http" + ) + ], + name="my-foobar-terraform-elb" + ) + aws_s3_bucket_policy_allow_elb_logging = S3BucketPolicy(self, "allow_elb_logging_5", + bucket=elb_logs.id, + policy=Token.as_string(allow_elb_logging.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_policy_allow_elb_logging.override_logical_id("allow_elb_logging") +``` + +## Argument Reference + +* `region` - (Optional) Name of the region whose AWS ELB account ID is desired. + Defaults to the region from the AWS provider configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS ELB service account in the selected region. +* `arn` - ARN of the AWS ELB service account in the selected region. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/emr_release_labels.markdown b/website/docs/cdktf/python/d/emr_release_labels.markdown new file mode 100644 index 00000000000..86e85cbadc7 --- /dev/null +++ b/website/docs/cdktf/python/d/emr_release_labels.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_release_labels" +description: |- + Retrieve information about EMR Release Labels +--- + + + +# Data Source: aws_emr_release_labels + +Retrieve information about EMR Release Labels. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_emr_release_labels import DataAwsEmrReleaseLabels +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsEmrReleaseLabels(self, "example", + filters=DataAwsEmrReleaseLabelsFilters( + application="spark@2.1.0", + prefix="emr-5" + ) + ) +``` + +## Argument Reference + +* `filters` – (Optional) Filters the results of the request. Prefix specifies the prefix of release labels to return. Application specifies the application (with/without version) of release labels to return. See [Filters](#filters). + +### Filters + +* `application` - (Optional) Optional release label application filter. For example, `Spark@2.1.0` or `Spark`. +* `prefix` - (Optional) Optional release label version prefix filter. For example, `emr-5`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `release_labels` - Returned release labels. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/emrcontainers_virtual_cluster.markdown b/website/docs/cdktf/python/d/emrcontainers_virtual_cluster.markdown new file mode 100644 index 00000000000..16840778a9b --- /dev/null +++ b/website/docs/cdktf/python/d/emrcontainers_virtual_cluster.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "EMR Containers" +layout: "aws" +page_title: "AWS: aws_emrcontainers_virtual_cluster" +description: |- + Retrieve information about an EMR Containers (EMR on EKS) Virtual Cluster +--- + + + +# Data Source: aws_emrcontainers_virtual_cluster + +Retrieve information about an EMR Containers (EMR on EKS) Virtual Cluster. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_emrcontainers_virtual_cluster import DataAwsEmrcontainersVirtualCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsEmrcontainersVirtualCluster(self, "example", + virtual_cluster_id="example id" + ) + TerraformOutput(self, "arn", + value=example.arn + ) + TerraformOutput(self, "name", + value=example.name + ) +``` + +## Argument Reference + +* `virtual_cluster_id` - (Required) ID of the cluster. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the cluster. +* `name` - Name of the cluster. +* `arn` - ARN of the cluster. +* `container_provider` - Nested attribute containing information about the underlying container provider (EKS cluster) for your EMR Containers cluster. + * `id` - The name of the container provider that is running your EMR Containers cluster + * `info` - Nested list containing information about the configuration of the container provider + * `eks_info` - Nested list containing EKS-specific information about the cluster where the EMR Containers cluster is running + * `namespace` - The namespace where the EMR Containers cluster is running + * `type` - The type of the container provider +* `created_at` - Unix epoch time stamp in seconds for when the cluster was created. +* `state` - Status of the EKS cluster. One of `RUNNING`, `TERMINATING`, `TERMINATED`, `ARRESTED`. +* `tags` - Key-value mapping of resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/fsx_openzfs_snapshot.html.markdown b/website/docs/cdktf/python/d/fsx_openzfs_snapshot.html.markdown new file mode 100644 index 00000000000..0b2ee674c64 --- /dev/null +++ b/website/docs/cdktf/python/d/fsx_openzfs_snapshot.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_openzfs_snapshot" +description: |- + Get information on an Amazon FSx for OpenZFS snapshot. +--- + + + +# Data Source: aws_fsx_openzfs_snapshot + +Use this data source to get information about an Amazon FSx for OpenZFS Snapshot for use when provisioning new Volumes. + +## Example Usage + +### Root volume Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_fsx_openzfs_snapshot import DataAwsFsxOpenzfsSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsFsxOpenzfsSnapshot(self, "example", + filter=[DataAwsFsxOpenzfsSnapshotFilter( + name="volume-id", + values=["fsvol-073a32b6098a73feb"] + ) + ], + most_recent=True + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `most_recent` - (Optional) If more than one result is returned, use the most recent snapshot. + +* `snapshot_ids` - (Optional) Returns information on a specific snapshot_id. + +* `filter` - (Optional) One or more name/value pairs to filter off of. The +supported names are file-system-id or volume-id. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the snapshot. +* `creation_time` - Time that the resource was created. +* `id` - Identifier of the snapshot, e.g., `fsvolsnap-12345678` +* `name` - Name of the snapshot. +* `snapshot_id` - ID of the snapshot. +* `tags` - List of Tag values, with a maximum of 50 elements. +* `volume_id` - ID of the volume that the snapshot is of. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/fsx_windows_file_system.html.markdown b/website/docs/cdktf/python/d/fsx_windows_file_system.html.markdown new file mode 100644 index 00000000000..a6f06b9439e --- /dev/null +++ b/website/docs/cdktf/python/d/fsx_windows_file_system.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_windows_file_system" +description: |- + Retrieve information on FSx Windows File System. +--- + + + +# Data Source: aws_fsx_windows_file_system + +Retrieve information on FSx Windows File System. + +## Example Usage + +### Root volume Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_fsx_windows_file_system import DataAwsFsxWindowsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsFsxWindowsFileSystem(self, "example", + id="fs-12345678" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `id` - (Required) Identifier of the file system (e.g. `fs-12345678`). + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `active_directory_id` - The ID for Microsoft Active Directory instance that the file system is join to. +* `aliases` - An array DNS alias names associated with the Amazon FSx file system. +* `arn` - Amazon Resource Name of the file system. +* `audit_log_configuration` - The configuration that Amazon FSx for Windows File Server uses to audit and log user accesses of files, folders, and file shares on the Amazon FSx for Windows File Server file system. +* `automatic_backup_retention_days` - The number of days to retain automatic backups. +* `copy_tags_to_backups` - A boolean flag indicating whether tags on the file system should be copied to backups. +* `daily_automatic_backup_start_time` - The preferred time (in `HH:MM` format) to take daily automatic backups, in the UTC time zone. +* `deployment_type` - The file system deployment type. +* `dns_name` - DNS name for the file system (e.g. `fs-12345678.corp.example.com`). +* `id` - Identifier of the file system (e.g. `fs-12345678`). +* `kms_key_id` - ARN for the KMS Key to encrypt the file system at rest. +* `owner_id` - AWS account identifier that created the file system. +* `preferred_subnet_id` - Specifies the subnet in which you want the preferred file server to be located. +* `preferred_file_server_ip` - The IP address of the primary, or preferred, file server. +* `storage_capacity` - The storage capacity of the file system in gibibytes (GiB). +* `storage_type` - The type of storage the file system is using. If set to `SSD`, the file system uses solid state drive storage. If set to `HDD`, the file system uses hard disk drive storage. +* `subnet_ids` - Specifies the IDs of the subnets that the file system is accessible from. +* `tags` - The tags to associate with the file system. +* `throughput_capacity` - Throughput (megabytes per second) of the file system in power of 2 increments. Minimum of `8` and maximum of `2048`. +* `vpc_id` - The ID of the primary virtual private cloud (VPC) for the file system. +* `weekly_maintenance_start_time` - The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/globalaccelerator_accelerator.html.markdown b/website/docs/cdktf/python/d/globalaccelerator_accelerator.html.markdown new file mode 100644 index 00000000000..dd2115111b6 --- /dev/null +++ b/website/docs/cdktf/python/d/globalaccelerator_accelerator.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_accelerator" +description: |- + Provides a Global Accelerator accelerator data source. +--- + + + +# Data Source: aws_globalaccelerator_accelerator + +Provides information about a Global Accelerator accelerator. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_globalaccelerator_accelerator import DataAwsGlobalacceleratorAccelerator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + accelerator_arn = TerraformVariable(self, "accelerator_arn", + default="", + type=VariableType.STRING + ) + accelerator_name = TerraformVariable(self, "accelerator_name", + default="", + type=VariableType.STRING + ) + DataAwsGlobalacceleratorAccelerator(self, "example", + arn=accelerator_arn.string_value, + name=accelerator_name.string_value + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) Full ARN of the Global Accelerator. +* `name` - (Optional) Unique name of the Global Accelerator. + +~> **NOTE:** When both `arn` and `name` are specified, `arn` takes precedence. + +## Attribute Reference + +See the [`aws_globalaccelerator_accelerator` resource](/docs/providers/aws/r/globalaccelerator_accelerator.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/globalaccelerator_custom_routing_accelerator.html.markdown b/website/docs/cdktf/python/d/globalaccelerator_custom_routing_accelerator.html.markdown new file mode 100644 index 00000000000..c3e77146be2 --- /dev/null +++ b/website/docs/cdktf/python/d/globalaccelerator_custom_routing_accelerator.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_custom_routing_accelerator" +description: |- + Provides a Global Accelerator custom routing accelerator data source. +--- + + + +# Data Source: aws_globalaccelerator_custom_routing_accelerator + +Provides information about a Global Accelerator custom routing accelerator. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_globalaccelerator_custom_routing_accelerator import DataAwsGlobalacceleratorCustomRoutingAccelerator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + accelerator_arn = TerraformVariable(self, "accelerator_arn", + default="", + type=VariableType.STRING + ) + accelerator_name = TerraformVariable(self, "accelerator_name", + default="", + type=VariableType.STRING + ) + DataAwsGlobalacceleratorCustomRoutingAccelerator(self, "example", + arn=accelerator_arn.string_value, + name=accelerator_name.string_value + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) Full ARN of the custom routing accelerator. +* `name` - (Optional) Unique name of the custom routing accelerator. + +~> **NOTE:** When both `arn` and `name` are specified, `arn` takes precedence. + +## Attribute Reference + +See the [`aws_globalaccelerator_custom_routing_accelerator` resource](/docs/providers/aws/r/globalaccelerator_custom_routing_accelerator.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/glue_catalog_table.html.markdown b/website/docs/cdktf/python/d/glue_catalog_table.html.markdown new file mode 100644 index 00000000000..34f724d311f --- /dev/null +++ b/website/docs/cdktf/python/d/glue_catalog_table.html.markdown @@ -0,0 +1,132 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_catalog_table" +description: |- + Get information on AWS Glue Data Catalog Table +--- + + + +# Data Source: aws_glue_catalog_table + +This data source can be used to fetch information about an AWS Glue Data Catalog Table. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_glue_catalog_table import DataAwsGlueCatalogTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsGlueCatalogTable(self, "example", + database_name="MyCatalogDatabase", + name="MyCatalogTable" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the table. +* `database_name` - (Required) Name of the metadata database where the table metadata resides. +* `catalog_id` - (Optional) ID of the Glue Catalog and database where the table metadata resides. If omitted, this defaults to the current AWS Account ID. +* `query_as_of_time`- (Optional) The time as of when to read the table contents. If not set, the most recent transaction commit time will be used. Cannot be specified along with `transaction_id`. Specified in RFC 3339 format, e.g. `2006-01-02T15:04:05Z07:00`. +* `transaction_id` - (Optional) The transaction ID at which to read the table contents. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Catalog ID, Database name and of the name table. +* `arn` - The ARN of the Glue Table. +* `description` - Description of the table. +* `owner` - Owner of the table. +* `parameters` - Properties associated with this table, as a list of key-value pairs. +* `partition_index` - Configuration block for a maximum of 3 partition indexes. See [`partition_index`](#partition_index) below. +* `partition_keys` - Configuration block of columns by which the table is partitioned. Only primitive types are supported as partition keys. See [`partition_keys`](#partition_keys) below. +* `retention` - Retention time for this table. +* `storage_descriptor` - Configuration block for information about the physical storage of this table. For more information, refer to the [Glue Developer Guide](https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-catalog-tables.html#aws-glue-api-catalog-tables-StorageDescriptor). See [`storage_descriptor`](#storage_descriptor) below. +* `table_type` - Type of this table (EXTERNAL_TABLE, VIRTUAL_VIEW, etc.). While optional, some Athena DDL queries such as `ALTER TABLE` and `SHOW CREATE TABLE` will fail if this argument is empty. +* `target_table` - Configuration block of a target table for resource linking. See [`target_table`](#target_table) below. +* `view_expanded_text` - If the table is a view, the expanded text of the view; otherwise null. +* `view_original_text` - If the table is a view, the original text of the view; otherwise null. + +### partition_index + +* `index_name` - Name of the partition index. +* `keys` - Keys for the partition index. + +### partition_keys + +* `comment` - Free-form text comment. +* `name` - Name of the Partition Key. +* `type` - Datatype of data in the Partition Key. + +### storage_descriptor + +* `bucket_columns` - List of reducer grouping columns, clustering columns, and bucketing columns in the table. +* `columns` - Configuration block for columns in the table. See [`columns`](#columns) below. +* `compressed` - Whether the data in the table is compressed. +* `input_format` - Input format: SequenceFileInputFormat (binary), or TextInputFormat, or a custom format. +* `location` - Physical location of the table. By default, this takes the form of the warehouse location, followed by the database location in the warehouse, followed by the table name. +* `number_of_buckets` - Is if the table contains any dimension columns. +* `output_format` - Output format: SequenceFileOutputFormat (binary), or IgnoreKeyTextOutputFormat, or a custom format. +* `parameters` - User-supplied properties in key-value form. +* `schema_reference` - Object that references a schema stored in the AWS Glue Schema Registry. See [`schema_reference`](#schema_reference) below. +* `ser_de_info` - Configuration block for serialization and deserialization ("SerDe") information. See [`ser_de_info`](#ser_de_info) below. +* `skewed_info` - Configuration block with information about values that appear very frequently in a column (skewed values). See [`skewed_info`](#skewed_info) below. +* `sort_columns` - Configuration block for the sort order of each bucket in the table. See [`sort_columns`](#sort_columns) below. +* `stored_as_sub_directories` - Whether the table data is stored in subdirectories. + +#### columns + +* `comment` - Free-form text comment. +* `name` - Name of the Column. +* `parameters` - Key-value pairs defining properties associated with the column. +* `type` - Datatype of data in the Column. + +#### schema_reference + +* `schema_id` - Configuration block that contains schema identity fields. See [`schema_id`](#schema_id) below. +* `schema_version_id` - Unique ID assigned to a version of the schema. +* `schema_version_number` - Version number of the schema. + +##### schema_id + +* `registry_name` - Name of the schema registry that contains the schema. +* `schema_arn` - ARN of the schema. +* `schema_name` - Name of the schema. + +#### ser_de_info + +* `name` - Name of the SerDe. +* `parameters` - Map of initialization parameters for the SerDe, in key-value form. +* `serialization_library` - Usually the class that implements the SerDe. An example is `org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe`. + +#### sort_columns + +* `column` - Name of the column. +* `sort_order` - Whether the column is sorted in ascending (`1`) or descending order (`0`). + +#### skewed_info + +* `skewed_column_names` - List of names of columns that contain skewed values. +* `skewed_column_value_location_maps` - List of values that appear so frequently as to be considered skewed. +* `skewed_column_values` - Map of skewed values to the columns that contain them. + +### target_table + +* `catalog_id` - ID of the Data Catalog in which the table resides. +* `database_name` - Name of the catalog database that contains the target table. +* `name` - Name of the target table. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/glue_connection.html.markdown b/website/docs/cdktf/python/d/glue_connection.html.markdown new file mode 100644 index 00000000000..03434dfa1cf --- /dev/null +++ b/website/docs/cdktf/python/d/glue_connection.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_connection" +description: |- + Get information on an AWS Glue Connection +--- + + + +# Data Source: aws_glue_connection + +This data source can be used to fetch information about a specific Glue Connection. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_glue_connection import DataAwsGlueConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsGlueConnection(self, "example", + id="123456789123:connection" + ) +``` + +## Argument Reference + +* `id` - (Required) Concatenation of the catalog ID and connection name. For example, if your account ID is +`123456789123` and the connection name is `conn` then the ID is `123456789123:conn`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Glue Connection. +* `catalog_id` - Catalog ID of the Glue Connection. +* `connection_type` - Type of Glue Connection. +* `description` – Description of the connection. +* `match_criteria` – A list of criteria that can be used in selecting this connection. +* `name` - Name of the Glue Connection. +* `physical_connection_requirements` - A map of physical connection requirements, such as VPC and SecurityGroup. +* `tags` - Tags assigned to the resource + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/glue_data_catalog_encryption_settings.html.markdown b/website/docs/cdktf/python/d/glue_data_catalog_encryption_settings.html.markdown new file mode 100644 index 00000000000..33358e3d1c2 --- /dev/null +++ b/website/docs/cdktf/python/d/glue_data_catalog_encryption_settings.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_data_catalog_encryption_settings" +description: |- + Get information on AWS Glue Data Catalog Encryption Settings +--- + + + +# Data Source: aws_glue_data_catalog_encryption_settings + +This data source can be used to fetch information about AWS Glue Data Catalog Encryption Settings. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_glue_data_catalog_encryption_settings import DataAwsGlueDataCatalogEncryptionSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, catalogId): + super().__init__(scope, name) + DataAwsGlueDataCatalogEncryptionSettings(self, "example", + id="123456789123", + catalog_id=catalog_id + ) +``` + +## Argument Reference + +* `catalog_id` - (Required) ID of the Data Catalog. This is typically the AWS account ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `data_catalog_encryption_settings` – The security configuration to set. see [Data Catalog Encryption Settings](#data_catalog_encryption_settings). +* `id` – The ID of the Data Catalog to set the security configuration for. + +### data_catalog_encryption_settings + +* `connection_password_encryption` - When connection password protection is enabled, the Data Catalog uses a customer-provided key to encrypt the password as part of CreateConnection or UpdateConnection and store it in the ENCRYPTED_PASSWORD field in the connection properties. You can enable catalog encryption or only password encryption. see [Connection Password Encryption](#connection_password_encryption). +* `encryption_at_rest` - Encryption-at-rest configuration for the Data Catalog. see [Encryption At Rest](#encryption_at_rest). + +### connection_password_encryption + +* `return_connection_password_encrypted` - When set to `true`, passwords remain encrypted in the responses of GetConnection and GetConnections. This encryption takes effect independently of the catalog encryption. +* `aws_kms_key_id` - KMS key ARN that is used to encrypt the connection password. + +### encryption_at_rest + +* `catalog_encryption_mode` - The encryption-at-rest mode for encrypting Data Catalog data. +* `sse_aws_kms_key_id` - ARN of the AWS KMS key to use for encryption at rest. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/glue_script.html.markdown b/website/docs/cdktf/python/d/glue_script.html.markdown new file mode 100644 index 00000000000..914886bffe8 --- /dev/null +++ b/website/docs/cdktf/python/d/glue_script.html.markdown @@ -0,0 +1,254 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_script" +description: |- + Generate Glue script from Directed Acyclic Graph +--- + + + +# Data Source: aws_glue_script + +Use this data source to generate a Glue script from a Directed Acyclic Graph (DAG). + +## Example Usage + +### Generate Python Script + +```terraform +data "aws_glue_script" "example" { + language = "PYTHON" + + dag_edge { + source = "datasource0" + target = "applymapping1" + } + + dag_edge { + source = "applymapping1" + target = "selectfields2" + } + + dag_edge { + source = "selectfields2" + target = "resolvechoice3" + } + + dag_edge { + source = "resolvechoice3" + target = "datasink4" + } + + dag_node { + id = "datasource0" + node_type = "DataSource" + + args { + name = "database" + value = "\"${aws_glue_catalog_database.source.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.source.name}\"" + } + } + + dag_node { + id = "applymapping1" + node_type = "ApplyMapping" + + args { + name = "mapping" + value = "[(\"column1\", \"string\", \"column1\", \"string\")]" + } + } + + dag_node { + id = "selectfields2" + node_type = "SelectFields" + + args { + name = "paths" + value = "[\"column1\"]" + } + } + + dag_node { + id = "resolvechoice3" + node_type = "ResolveChoice" + + args { + name = "choice" + value = "\"MATCH_CATALOG\"" + } + + args { + name = "database" + value = "\"${aws_glue_catalog_database.destination.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.destination.name}\"" + } + } + + dag_node { + id = "datasink4" + node_type = "DataSink" + + args { + name = "database" + value = "\"${aws_glue_catalog_database.destination.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.destination.name}\"" + } + } +} + +output "python_script" { + value = data.aws_glue_script.example.python_script +} +``` + +### Generate Scala Code + +```terraform +data "aws_glue_script" "example" { + language = "SCALA" + + dag_edge { + source = "datasource0" + target = "applymapping1" + } + + dag_edge { + source = "applymapping1" + target = "selectfields2" + } + + dag_edge { + source = "selectfields2" + target = "resolvechoice3" + } + + dag_edge { + source = "resolvechoice3" + target = "datasink4" + } + + dag_node { + id = "datasource0" + node_type = "DataSource" + + args { + name = "database" + value = "\"${aws_glue_catalog_database.source.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.source.name}\"" + } + } + + dag_node { + id = "applymapping1" + node_type = "ApplyMapping" + + args { + name = "mappings" + value = "[(\"column1\", \"string\", \"column1\", \"string\")]" + } + } + + dag_node { + id = "selectfields2" + node_type = "SelectFields" + + args { + name = "paths" + value = "[\"column1\"]" + } + } + + dag_node { + id = "resolvechoice3" + node_type = "ResolveChoice" + + args { + name = "choice" + value = "\"MATCH_CATALOG\"" + } + + args { + name = "database" + value = "\"${aws_glue_catalog_database.destination.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.destination.name}\"" + } + } + + dag_node { + id = "datasink4" + node_type = "DataSink" + + args { + name = "database" + value = "\"${aws_glue_catalog_database.destination.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.destination.name}\"" + } + } +} + +output "scala_code" { + value = data.aws_glue_script.example.scala_code +} +``` + +## Argument Reference + +* `dag_edge` - (Required) List of the edges in the DAG. Defined below. +* `dag_node` - (Required) List of the nodes in the DAG. Defined below. +* `language` - (Optional) Programming language of the resulting code from the DAG. Defaults to `PYTHON`. Valid values are `PYTHON` and `SCALA`. + +### dag_edge Argument Reference + +* `source` - (Required) ID of the node at which the edge starts. +* `target` - (Required) ID of the node at which the edge ends. +* `target_parameter` - (Optional) Target of the edge. + +### dag_node Argument Reference + +* `args` - (Required) Nested configuration an argument or property of a node. Defined below. +* `id` - (Required) Node identifier that is unique within the node's graph. +* `node_type` - (Required) Type of node this is. +* `line_number` - (Optional) Line number of the node. + +#### args Argument Reference + +* `name` - (Required) Name of the argument or property. +* `value` - (Required) Value of the argument or property. +* `param` - (Optional) Boolean if the value is used as a parameter. Defaults to `false`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `python_script` - Python script generated from the DAG when the `language` argument is set to `PYTHON`. +* `scala_code` - Scala code generated from the DAG when the `language` argument is set to `SCALA`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/grafana_workspace.html.markdown b/website/docs/cdktf/python/d/grafana_workspace.html.markdown new file mode 100644 index 00000000000..49222ef134b --- /dev/null +++ b/website/docs/cdktf/python/d/grafana_workspace.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_workspace" +description: |- + Gets information on an Amazon Managed Grafana workspace. +--- + + + +# Data Source: aws_grafana_workspace + +Provides an Amazon Managed Grafana workspace data source. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_grafana_workspace import DataAwsGrafanaWorkspace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsGrafanaWorkspace(self, "example", + workspace_id="g-2054c75a02" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `workspace_id` - (Required) Grafana workspace ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `account_access_type` - (Required) Type of account access for the workspace. Valid values are `CURRENT_ACCOUNT` and `ORGANIZATION`. If `ORGANIZATION` is specified, then `organizational_units` must also be present. +* `authentication_providers` - (Required) Authentication providers for the workspace. Valid values are `AWS_SSO`, `SAML`, or both. +* `arn` - ARN of the Grafana workspace. +* `created_date` - Creation date of the Grafana workspace. +* `data_sources` - Data sources for the workspace. +* `description` - Workspace description. +* `endpoint` - Endpoint of the Grafana workspace. +* `grafana_version` - Version of Grafana running on the workspace. +* `last_updated_date` - Last updated date of the Grafana workspace. +* `name` - Grafana workspace name. +* `notification_destinations` - The notification destinations. +* `organization_role_name` - The role name that the workspace uses to access resources through Amazon Organizations. +* `organizational_units` - The Amazon Organizations organizational units that the workspace is authorized to use data sources from. +* `permission_type` - Permission type of the workspace. +* `role_arn` - IAM role ARN that the workspace assumes. +* `stack_set_name` - AWS CloudFormation stack set name that provisions IAM roles to be used by the workspace. +* `status` - Status of the Grafana workspace. +* `tags` - Tags assigned to the resource + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/guardduty_detector.html.markdown b/website/docs/cdktf/python/d/guardduty_detector.html.markdown new file mode 100644 index 00000000000..3c0989a2fd2 --- /dev/null +++ b/website/docs/cdktf/python/d/guardduty_detector.html.markdown @@ -0,0 +1,44 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_detector" +description: |- + Retrieve information about a GuardDuty detector. +--- + + + +# Data Source: aws_guardduty_detector + +Retrieve information about a GuardDuty detector. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_guardduty_detector import DataAwsGuarddutyDetector +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsGuarddutyDetector(self, "example") +``` + +## Argument Reference + +* `id` - (Optional) ID of the detector. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `finding_publishing_frequency` - The frequency of notifications sent about subsequent finding occurrences. +* `service_role_arn` - Service-linked role that grants GuardDuty access to the resources in the AWS account. +* `status` - Current status of the detector. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/guardduty_finding_ids.html.markdown b/website/docs/cdktf/python/d/guardduty_finding_ids.html.markdown new file mode 100644 index 00000000000..1f722df2b79 --- /dev/null +++ b/website/docs/cdktf/python/d/guardduty_finding_ids.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_finding_ids" +description: |- + Terraform data source for managing an AWS GuardDuty Finding Ids. +--- + + + +# Data Source: aws_guardduty_finding_ids + +Terraform data source for managing an AWS GuardDuty Finding Ids. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_guardduty_finding_ids import DataAwsGuarddutyFindingIds +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsGuarddutyFindingIds(self, "example", + detector_id=Token.as_string(aws_guardduty_detector_example.id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `detector_id` - (Required) ID of the GuardDuty detector. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `has_findings` - Indicates whether findings are present for the specified detector. +* `finding_ids` - A list of finding IDs for the specified detector. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_access_keys.html.markdown b/website/docs/cdktf/python/d/iam_access_keys.html.markdown new file mode 100644 index 00000000000..8cb6abac670 --- /dev/null +++ b/website/docs/cdktf/python/d/iam_access_keys.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_access_keys" +description: |- + Get information on IAM access keys associated with the specified IAM user. +--- + + + +# Data Source: aws_iam_access_keys + +This data source can be used to fetch information about IAM access keys of a +specific IAM user. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_access_keys import DataAwsIamAccessKeys +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamAccessKeys(self, "example", + user="an_example_user_name" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `user` - (Required) Name of the IAM user associated with the access keys. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `access_keys` - List of the IAM access keys associated with the specified user. See below. + +The elements of the `access_keys` are exported with the following attributes: + +* `access_key_id` - Access key ID. +* `create_date` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the access key was created. +* `status` - Access key status. Possible values are `Active` and `Inactive`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_account_alias.html.markdown b/website/docs/cdktf/python/d/iam_account_alias.html.markdown new file mode 100644 index 00000000000..5b50424b6c6 --- /dev/null +++ b/website/docs/cdktf/python/d/iam_account_alias.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_account_alias" +description: |- + Provides the account alias for the AWS account associated with the provider + connection to AWS. +--- + + + +# Data Source: aws_iam_account_alias + +The IAM Account Alias data source allows access to the account alias +for the effective account in which Terraform is working. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_account_alias import DataAwsIamAccountAlias +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsIamAccountAlias(self, "current") + TerraformOutput(self, "account_id", + value=current.account_alias + ) +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `account_alias` - Alias associated with the AWS account. +* `id` - Alias associated with the AWS account. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_group.html.markdown b/website/docs/cdktf/python/d/iam_group.html.markdown new file mode 100644 index 00000000000..b48e0619e96 --- /dev/null +++ b/website/docs/cdktf/python/d/iam_group.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_group" +description: |- + Get information on a Amazon IAM group +--- + + + +# Data Source: aws_iam_group + +This data source can be used to fetch information about a specific +IAM group. By using this data source, you can reference IAM group +properties without having to hard code ARNs as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_group import DataAwsIamGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamGroup(self, "example", + group_name="an_example_group_name" + ) +``` + +## Argument Reference + +* `group_name` - (Required) Friendly IAM group name to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Group ARN. +* `group_id` - Stable and unique string identifying the group. +* `path` - Path to the group. +* `users` - List of objects containing group member information. See below. + +### `users` + +* `arn` - User ARN. +* `path` - Path to the IAM user. +* `user_id` - Stable and unique string identifying the IAM user. +* `user_name` - Name of the IAM user. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_instance_profile.html.markdown b/website/docs/cdktf/python/d/iam_instance_profile.html.markdown new file mode 100644 index 00000000000..b3f32c3f8cb --- /dev/null +++ b/website/docs/cdktf/python/d/iam_instance_profile.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_instance_profile" +description: |- + Get information on a Amazon IAM Instance Profile +--- + + + +# Data Source: aws_iam_instance_profile + +This data source can be used to fetch information about a specific +IAM instance profile. By using this data source, you can reference IAM +instance profile properties without having to hard code ARNs as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_instance_profile import DataAwsIamInstanceProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamInstanceProfile(self, "example", + name="an_example_instance_profile_name" + ) +``` + +## Argument Reference + +* `name` - (Required) Friendly IAM instance profile name to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN. +* `create_date` - String representation of the date the instance profile was created. +* `path` - Path to the instance profile. +* `role_arn` - Role ARN associated with this instance profile. +* `role_id` - Role ID associated with this instance profile. +* `role_name` - Role name associated with this instance profile. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_instance_profiles.html.markdown b/website/docs/cdktf/python/d/iam_instance_profiles.html.markdown new file mode 100644 index 00000000000..a93d16a0bc0 --- /dev/null +++ b/website/docs/cdktf/python/d/iam_instance_profiles.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_instance_profiles" +description: |- + Get information on a Amazon IAM Instance Profiles from IAM role +--- + + + +# Data Source: aws_iam_instance_profiles + +This data source can be used to fetch information about all +IAM instance profiles under a role. By using this data source, you can reference IAM +instance profile properties without having to hard code ARNs as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_instance_profiles import DataAwsIamInstanceProfiles +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamInstanceProfiles(self, "example", + role_name="an_example_iam_role_name" + ) +``` + +## Argument Reference + +* `role_name` - (Required) IAM role name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of instance profiles. +* `names` - Set of IAM instance profile names. +* `paths` - Set of IAM instance profile paths. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_openid_connect_provider.html.markdown b/website/docs/cdktf/python/d/iam_openid_connect_provider.html.markdown new file mode 100644 index 00000000000..02344e9efba --- /dev/null +++ b/website/docs/cdktf/python/d/iam_openid_connect_provider.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_openid_connect_provider" +description: |- + Get information on a Amazon IAM OpenID Connect provider. +--- + + + +# Data Source: aws_iam_openid_connect_provider + +This data source can be used to fetch information about a specific +IAM OpenID Connect provider. By using this data source, you can retrieve the +the resource information by either its `arn` or `url`. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_openid_connect_provider import DataAwsIamOpenidConnectProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamOpenidConnectProvider(self, "example", + arn="arn:aws:iam::123456789012:oidc-provider/accounts.google.com" + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_openid_connect_provider import DataAwsIamOpenidConnectProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamOpenidConnectProvider(self, "example", + url="https://accounts.google.com" + ) +``` + +## Argument Reference + +* `arn` - (Optional) ARN of the OpenID Connect provider. +* `url` - (Optional) URL of the OpenID Connect provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `client_id_list` - List of client IDs (also known as audiences). When a mobile or web app registers with an OpenID Connect provider, they establish a value that identifies the application. (This is the value that's sent as the client_id parameter on OAuth requests.) +* `thumbprint_list` - List of server certificate thumbprints for the OpenID Connect (OIDC) identity provider's server certificate(s). +* `tags` - Map of resource tags for the IAM OIDC provider. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_policy.html.markdown b/website/docs/cdktf/python/d/iam_policy.html.markdown new file mode 100644 index 00000000000..5e5b70ea602 --- /dev/null +++ b/website/docs/cdktf/python/d/iam_policy.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_policy" +description: |- + Get information on a Amazon IAM policy +--- + + + +# Data Source: aws_iam_policy + +This data source can be used to fetch information about a specific +IAM policy. + +## Example Usage + +### By ARN + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy import DataAwsIamPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamPolicy(self, "example", + arn="arn:aws:iam::123456789012:policy/UsersManageOwnCredentials" + ) +``` + +### By Name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy import DataAwsIamPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamPolicy(self, "example", + name="test_policy" + ) +``` + +## Argument Reference + +* `arn` - (Optional) ARN of the IAM policy. + Conflicts with `name` and `path_prefix`. +* `name` - (Optional) Name of the IAM policy. + Conflicts with `arn`. +* `path_prefix` - (Optional) Prefix of the path to the IAM policy. + Defaults to a slash (`/`). + Conflicts with `arn`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the policy. +* `path` - Path to the policy. +* `description` - Description of the policy. +* `policy` - Policy document of the policy. +* `policy_id` - Policy's ID. +* `tags` - Key-value mapping of tags for the IAM Policy. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_policy_document.html.markdown b/website/docs/cdktf/python/d/iam_policy_document.html.markdown new file mode 100644 index 00000000000..c2e1b114a7d --- /dev/null +++ b/website/docs/cdktf/python/d/iam_policy_document.html.markdown @@ -0,0 +1,570 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_policy_document" +description: |- + Generates an IAM policy document in JSON format +--- + + + +# Data Source: aws_iam_policy_document + +Generates an IAM policy document in JSON format for use with resources that expect policy documents such as [`aws_iam_policy`](/docs/providers/aws/r/iam_policy.html). + +Using this data source to generate policy documents is *optional*. It is also valid to use literal JSON strings in your configuration or to use the `file` interpolation function to read a raw JSON policy document from a file. + +~> **NOTE:** AWS's IAM policy document syntax allows for replacement of [policy variables](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_variables.html) within a statement using `${...}`-style notation, which conflicts with Terraform's interpolation syntax. In order to use AWS policy variables with this data source, use `&{...}` notation for interpolations that should be processed by AWS rather than by Terraform. + +-> For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Example Usage + +### Basic Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_policy import IamPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsIamPolicyDocument(self, "example", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:ListAllMyBuckets", "s3:GetBucketLocation"], + resources=["arn:aws:s3:::*"], + sid="1" + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:ListBucket"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringLike", + values=["", "home/", "home/&{aws:username}/"], + variable="s3:prefix" + ) + ], + resources=["arn:aws:s3:::${" + s3_bucket_name.value + "}"] + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + resources=["arn:aws:s3:::${" + s3_bucket_name.value + "}/home/&{aws:username}", "arn:aws:s3:::${" + s3_bucket_name.value + "}/home/&{aws:username}/*" + ] + ) + ] + ) + aws_iam_policy_example = IamPolicy(self, "example_1", + name="example_policy", + path="/", + policy=Token.as_string(example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_policy_example.override_logical_id("example") +``` + +### Example Multiple Condition Keys and Values + +You can specify a [condition with multiple keys and values](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_multi-value-conditions.html) by supplying multiple `condition` blocks with the same `test` value, but differing `variable` and `values` values. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamPolicyDocument(self, "example_multiple_condition_keys_and_values", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["kms:Decrypt", "kms:GenerateDataKey"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="ForAnyValue:StringEquals", + values=["pi"], + variable="kms:EncryptionContext:service" + ), DataAwsIamPolicyDocumentStatementCondition( + test="ForAnyValue:StringEquals", + values=["rds"], + variable="kms:EncryptionContext:aws:pi:service" + ), DataAwsIamPolicyDocumentStatementCondition( + test="ForAnyValue:StringEquals", + values=["db-AAAAABBBBBCCCCCDDDDDEEEEE", "db-EEEEEDDDDDCCCCCBBBBBAAAAA" + ], + variable="kms:EncryptionContext:aws:rds:db-id" + ) + ], + resources=["*"] + ) + ] + ) +``` + +`data.aws_iam_policy_document.example_multiple_condition_keys_and_values.json` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Action": [ + "kms:GenerateDataKey", + "kms:Decrypt" + ], + "Resource": "*", + "Condition": { + "ForAnyValue:StringEquals": { + "kms:EncryptionContext:aws:pi:service": "rds", + "kms:EncryptionContext:aws:rds:db-id": [ + "db-AAAAABBBBBCCCCCDDDDDEEEEE", + "db-EEEEEDDDDDCCCCCBBBBBAAAAA" + ], + "kms:EncryptionContext:service": "pi" + } + } + } + ] +} +``` + +### Example Assume-Role Policy with Multiple Principals + +You can specify multiple principal blocks with different types. You can also use this data source to generate an assume-role policy. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamPolicyDocument(self, "event_stream_bucket_role_assume_role_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["firehose.amazonaws.com"], + type="Service" + ), DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[trusted_role_arn.string_value], + type="AWS" + ), DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["arn:aws:iam::${" + account_id.value + "}:saml-provider/${" + provider_name.value + "}", "cognito-identity.amazonaws.com" + ], + type="Federated" + ) + ] + ) + ] + ) +``` + +### Example Using A Source Document + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + source = DataAwsIamPolicyDocument(self, "source", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:*"], + resources=["*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + resources=["*"], + sid="SidToOverride" + ) + ] + ) + DataAwsIamPolicyDocument(self, "source_document_example", + source_policy_documents=[Token.as_string(source.json)], + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + resources=["arn:aws:s3:::somebucket", "arn:aws:s3:::somebucket/*"], + sid="SidToOverride" + ) + ] + ) +``` + +`data.aws_iam_policy_document.source_document_example.json` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Action": "ec2:*", + "Resource": "*" + }, + { + "Sid": "SidToOverride", + "Effect": "Allow", + "Action": "s3:*", + "Resource": [ + "arn:aws:s3:::somebucket/*", + "arn:aws:s3:::somebucket" + ] + } + ] +} +``` + +### Example Using An Override Document + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + override = DataAwsIamPolicyDocument(self, "override", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + resources=["*"], + sid="SidToOverride" + ) + ] + ) + DataAwsIamPolicyDocument(self, "override_policy_document_example", + override_policy_documents=[Token.as_string(override.json)], + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:*"], + resources=["*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + resources=["arn:aws:s3:::somebucket", "arn:aws:s3:::somebucket/*"], + sid="SidToOverride" + ) + ] + ) +``` + +`data.aws_iam_policy_document.override_policy_document_example.json` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Action": "ec2:*", + "Resource": "*" + }, + { + "Sid": "SidToOverride", + "Effect": "Allow", + "Action": "s3:*", + "Resource": "*" + } + ] +} +``` + +### Example with Both Source and Override Documents + +You can also combine `source_policy_documents` and `override_policy_documents` in the same document. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + override = DataAwsIamPolicyDocument(self, "override", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:GetObject"], + resources=["*"], + sid="OverridePlaceholder" + ) + ] + ) + source = DataAwsIamPolicyDocument(self, "source", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:DescribeAccountAttributes"], + resources=["*"], + sid="OverridePlaceholder" + ) + ] + ) + DataAwsIamPolicyDocument(self, "politik", + override_policy_documents=[Token.as_string(override.json)], + source_policy_documents=[Token.as_string(source.json)] + ) +``` + +`data.aws_iam_policy_document.politik.json` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "OverridePlaceholder", + "Effect": "Allow", + "Action": "s3:GetObject", + "Resource": "*" + } + ] +} +``` + +### Example of Merging Source Documents + +Multiple documents can be combined using the `source_policy_documents` or `override_policy_documents` attributes. `source_policy_documents` requires that all documents have unique Sids, while `override_policy_documents` will iteratively override matching Sids. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + source_one = DataAwsIamPolicyDocument(self, "source_one", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:*"], + resources=["*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + resources=["*"], + sid="UniqueSidOne" + ) + ] + ) + source_two = DataAwsIamPolicyDocument(self, "source_two", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["iam:*"], + resources=["*"], + sid="UniqueSidTwo" + ), DataAwsIamPolicyDocumentStatement( + actions=["lambda:*"], + resources=["*"] + ) + ] + ) + DataAwsIamPolicyDocument(self, "combined", + source_policy_documents=[ + Token.as_string(source_one.json), + Token.as_string(source_two.json) + ] + ) +``` + +`data.aws_iam_policy_document.combined.json` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Action": "ec2:*", + "Resource": "*" + }, + { + "Sid": "UniqueSidOne", + "Effect": "Allow", + "Action": "s3:*", + "Resource": "*" + }, + { + "Sid": "UniqueSidTwo", + "Effect": "Allow", + "Action": "iam:*", + "Resource": "*" + }, + { + "Sid": "", + "Effect": "Allow", + "Action": "lambda:*", + "Resource": "*" + } + ] +} +``` + +### Example of Merging Override Documents + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + policy_one = DataAwsIamPolicyDocument(self, "policy_one", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + effect="Allow", + resources=["*"], + sid="OverridePlaceHolderOne" + ) + ] + ) + policy_three = DataAwsIamPolicyDocument(self, "policy_three", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:*"], + effect="Deny", + resources=["*"], + sid="OverridePlaceHolderOne" + ) + ] + ) + policy_two = DataAwsIamPolicyDocument(self, "policy_two", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:*"], + effect="Allow", + resources=["*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["iam:*"], + effect="Allow", + resources=["*"], + sid="OverridePlaceHolderTwo" + ) + ] + ) + DataAwsIamPolicyDocument(self, "combined", + override_policy_documents=[ + Token.as_string(policy_one.json), + Token.as_string(policy_two.json), + Token.as_string(policy_three.json) + ], + statement=[DataAwsIamPolicyDocumentStatement( + actions=["*"], + effect="Deny", + resources=["*"], + sid="OverridePlaceHolderTwo" + ) + ] + ) +``` + +`data.aws_iam_policy_document.combined.json` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "OverridePlaceholderTwo", + "Effect": "Allow", + "Action": "iam:*", + "Resource": "*" + }, + { + "Sid": "OverridePlaceholderOne", + "Effect": "Deny", + "Action": "logs:*", + "Resource": "*" + }, + { + "Sid": "", + "Effect": "Allow", + "Action": "ec2:*", + "Resource": "*" + }, + ] +} +``` + +## Argument Reference + +The following arguments are optional: + +~> **NOTE:** Statements without a `sid` cannot be overridden. In other words, a statement without a `sid` from `source_policy_documents` cannot be overridden by statements from `override_policy_documents`. + +* `override_policy_documents` (Optional) - List of IAM policy documents that are merged together into the exported document. In merging, statements with non-blank `sid`s will override statements with the same `sid` from earlier documents in the list. Statements with non-blank `sid`s will also override statements with the same `sid` from `source_policy_documents`. Non-overriding statements will be added to the exported document. +* `policy_id` (Optional) - ID for the policy document. +* `source_policy_documents` (Optional) - List of IAM policy documents that are merged together into the exported document. Statements defined in `source_policy_documents` must have unique `sid`s. Statements with the same `sid` from `override_policy_documents` will override source statements. +* `statement` (Optional) - Configuration block for a policy statement. Detailed below. +* `version` (Optional) - IAM policy document version. Valid values are `2008-10-17` and `2012-10-17`. Defaults to `2012-10-17`. For more information, see the [AWS IAM User Guide](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_version.html). + +### `statement` + +The following arguments are optional: + +* `actions` (Optional) - List of actions that this statement either allows or denies. For example, `["ec2:RunInstances", "s3:*"]`. +* `condition` (Optional) - Configuration block for a condition. Detailed below. +* `effect` (Optional) - Whether this statement allows or denies the given actions. Valid values are `Allow` and `Deny`. Defaults to `Allow`. +* `not_actions` (Optional) - List of actions that this statement does *not* apply to. Use to apply a policy statement to all actions *except* those listed. +* `not_principals` (Optional) - Like `principals` except these are principals that the statement does *not* apply to. +* `not_resources` (Optional) - List of resource ARNs that this statement does *not* apply to. Use to apply a policy statement to all resources *except* those listed. Conflicts with `resources`. +* `principals` (Optional) - Configuration block for principals. Detailed below. +* `resources` (Optional) - List of resource ARNs that this statement applies to. This is required by AWS if used for an IAM policy. Conflicts with `not_resources`. +* `sid` (Optional) - Sid (statement ID) is an identifier for a policy statement. + +### `condition` + +A `condition` constrains whether a statement applies in a particular situation. Conditions can be specific to an AWS service. When using multiple `condition` blocks, they must *all* evaluate to true for the policy statement to apply. In other words, AWS evaluates the conditions as though with an "AND" boolean operation. + +The following arguments are required: + +* `test` (Required) Name of the [IAM condition operator](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_condition_operators.html) to evaluate. +* `values` (Required) Values to evaluate the condition against. If multiple values are provided, the condition matches if at least one of them applies. That is, AWS evaluates multiple values as though using an "OR" boolean operation. +* `variable` (Required) Name of a [Context Variable](http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#AvailableKeys) to apply the condition to. Context variables may either be standard AWS variables starting with `aws:` or service-specific variables prefixed with the service name. + +### `principals` and `not_principals` + +The `principals` and `not_principals` arguments define to whom a statement applies or does not apply, respectively. + +~> **NOTE:** Even though the [IAM Documentation](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html) states that `"Principal": "*"` and `"Principal": {"AWS": "*"}` are equivalent, those principal elements have different behavior in some situations, e.g., IAM Role Trust Policy. To have Terraform render JSON containing `"Principal": "*"`, use `type = "*"` and `identifiers = ["*"]`. To have Terraform render JSON containing `"Principal": {"AWS": "*"}`, use `type = "AWS"` and `identifiers = ["*"]`. + +-> For more information about AWS principals, refer to the [AWS Identity and Access Management User Guide: AWS JSON policy elements: Principal](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html). + +The following arguments are required: + +* `identifiers` (Required) List of identifiers for principals. When `type` is `AWS`, these are IAM principal ARNs, e.g., `arn:aws:iam::12345678901:role/yak-role`. When `type` is `Service`, these are AWS Service roles, e.g., `lambda.amazonaws.com`. When `type` is `Federated`, these are web identity users or SAML provider ARNs, e.g., `accounts.google.com` or `arn:aws:iam::12345678901:saml-provider/yak-saml-provider`. When `type` is `CanonicalUser`, these are [canonical user IDs](https://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html#FindingCanonicalId), e.g., `79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be`. +* `type` (Required) Type of principal. Valid values include `AWS`, `Service`, `Federated`, `CanonicalUser` and `*`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `json` - Standard JSON policy document rendered based on the arguments above. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_principal_policy_simulation.html.markdown b/website/docs/cdktf/python/d/iam_principal_policy_simulation.html.markdown new file mode 100644 index 00000000000..226abd3c1a9 --- /dev/null +++ b/website/docs/cdktf/python/d/iam_principal_policy_simulation.html.markdown @@ -0,0 +1,239 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_principal_policy_simulation" +description: |- + Runs a simulation of the IAM policies of a particular principal against a given hypothetical request. +--- + + + +# Data Source: aws_iam_principal_policy_simulation + +Runs a simulation of the IAM policies of a particular principal against a given hypothetical request. + +You can use this data source in conjunction with +[Preconditions and Postconditions](https://www.terraform.io/language/expressions/custom-conditions#preconditions-and-postconditions) so that your configuration can test either whether it should have sufficient access to do its own work, or whether policies your configuration declares itself are sufficient for their intended use elsewhere. + +-> **Note:** Correctly using this data source requires familiarity with various details of AWS Identity and Access Management, and how various AWS services integrate with it. For general information on the AWS IAM policy simulator, see [Testing IAM policies with the IAM policy simulator](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_testing-policies.html). This data source wraps the `iam:SimulatePrincipalPolicy` API action described on that page. + +## Example Usage + +### Self Access-checking Example + +The following example raises an error if the credentials passed to the AWS provider do not have access to perform the three actions `s3:GetObject`, `s3:PutObject`, and `s3:DeleteObject` on the S3 bucket with the given ARN. It combines `aws_iam_principal_policy_simulation` with the core Terraform postconditions feature. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle, Postcondition +from constructs import Construct +from cdktf import TerraformSelf, Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_principal_policy_simulation import DataAwsIamPrincipalPolicySimulation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + DataAwsIamPrincipalPolicySimulation(self, "s3_object_access", + action_names=["s3:GetObject", "s3:PutObject", "s3:DeleteObject"], + lifecycle=TerraformResourceLifecycle( + postcondition=[Postcondition( + condition=TerraformSelf.get_any("all_allowed"), + error_message=" Given AWS credentials do not have sufficient access to manage " + + Token.as_string( + Fn.join(", ", + Token.as_list(TerraformSelf.get_any("resource_arns")))) + ".\n\n" + ) + ] + ), + policy_source_arn=Token.as_string(current.arn), + resource_arns=["arn:aws:s3:::my-test-bucket"] + ) +``` + +If you intend to use this data source to quickly raise an error when the given credentials are insufficient then you must use [`depends_on`](https://www.terraform.io/language/meta-arguments/depends_on) inside any resource which would require those credentials, to ensure that the policy check will run first: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_object import S3BucketObject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, key): + super().__init__(scope, name) + S3BucketObject(self, "example", + bucket="my-test-bucket", + depends_on=[s3_object_access], + key=key + ) +``` + +### Testing the Effect of a Declared Policy + +The following example declares an S3 bucket and a user that should have access to the bucket, and then uses `aws_iam_principal_policy_simulation` to verify that the user does indeed have access to perform needed operations against the bucket. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle, Postcondition +from constructs import Construct +from cdktf import Fn, Token, TerraformSelf, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_principal_policy_simulation import DataAwsIamPrincipalPolicySimulation +from imports.aws.iam_user import IamUser +from imports.aws.iam_user_policy import IamUserPolicy +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_policy import S3BucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = IamUser(self, "example", + name="example" + ) + aws_s3_bucket_example = S3Bucket(self, "example_1", + bucket="my-test-bucket" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_example.override_logical_id("example") + current = DataAwsCallerIdentity(self, "current") + s3_access = IamUserPolicy(self, "s3_access", + name="example_s3_access", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "s3:GetObject", + "Effect": "Allow", + "Resource": aws_s3_bucket_example.arn + } + ], + "Version": "2012-10-17" + })), + user=example.name + ) + account_access = S3BucketPolicy(self, "account_access", + bucket=Token.as_string(aws_s3_bucket_example.bucket), + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "s3:*", + "Effect": "Allow", + "Principal": { + "AWS": current.account_id + }, + "Resource": [aws_s3_bucket_example.arn, "${" + aws_s3_bucket_example.arn + "}/*" + ] + } + ], + "Version": "2012-10-17" + })) + ) + DataAwsIamPrincipalPolicySimulation(self, "s3_object_access", + action_names=["s3:GetObject"], + depends_on=[s3_access], + lifecycle=TerraformResourceLifecycle( + postcondition=[Postcondition( + condition=TerraformSelf.get_any("all_allowed"), + error_message=" " + + Token.as_string(TerraformSelf.get_any("policy_source_arn")) + " does not have the expected access to " + + Token.as_string( + Fn.join(", ", + Token.as_list(TerraformSelf.get_any("resource_arns")))) + ".\n\n" + ) + ] + ), + policy_source_arn=example.arn, + resource_arns=[Token.as_string(aws_s3_bucket_example.arn)], + resource_policy_json=account_access.policy + ) +``` + +When using `aws_iam_principal_policy_simulation` to test the effect of a policy declared elsewhere in the same configuration, it's important to use [`depends_on`](https://www.terraform.io/language/meta-arguments/depends_on) to make sure that the needed policy has been fully created or updated before running the simulation. + +## Argument Reference + +The following arguments are required for any principal policy simulation: + +* `action_names` (Required) - A set of IAM action names to run simulations for. Each entry in this set adds an additional hypothetical request to the simulation. + + Action names consist of a service prefix and an action verb separated by a colon, such as `s3:GetObject`. Refer to [Actions, resources, and condition keys for AWS services](https://docs.aws.amazon.com/service-authorization/latest/reference/reference_policies_actions-resources-contextkeys.html) to see the full set of possible IAM action names across all AWS services. + +* `policy_source_arn` (Required) - The [ARN](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html) of the IAM user, group, or role whose policies will be included in the simulation. + +You must closely match the form of the real service request you are simulating in order to achieve a realistic result. You can use the following additional arguments to specify other characteristics of the simulated requests: + +* `caller_arn` (Optional) - The ARN of an user that will appear as the "caller" of the simulated requests. If you do not specify `caller_arn` then the simulation will use the `policy_source_arn` instead, if it contains a user ARN. + +* `context` (Optional) - Each [`context` block](#context-block-arguments) defines an entry in the table of additional context keys in the simulated request. + + IAM uses context keys for both custom conditions and for interpolating dynamic request-specific values into policy values. If you use policies that include those features then you will need to provide suitable example values for those keys to achieve a realistic simulation. + +* `additional_policies_json` (Optional) - A set of additional principal policy documents to include in the simulation. The simulator will behave as if each of these policies were associated with the object specified in `policy_source_arn`, allowing you to test the effect of hypothetical policies not yet created. + +* `permissions_boundary_policies_json` (Optional) - A set of [permissions boundary policy documents](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html) to include in the simulation. + +* `resource_arns` (Optional) - A set of ARNs of resources to include in the simulation. + + This argument is important for actions that have either required or optional resource types listed in [Actions, resources, and condition keys for AWS services](https://docs.aws.amazon.com/service-authorization/latest/reference/reference_policies_actions-resources-contextkeys.html), and you must provide ARNs that identify AWS objects of the appropriate types for the chosen actions. + + The policy simulator only automatically loads policies associated with the `policy_source_arn`, so if your given resources have their own resource-level policy then you'll also need to provide that explicitly using the `resource_policy_json` argument to achieve a realistic simulation. + +* `resource_handling_option` (Optional) - Specifies a special simulation type to run. Some EC2 actions require special simulation behaviors and a particular set of resource ARNs to achieve a realistic result. + + For more details, see the `ResourceHandlingOption` request parameter for [the underlying `iam:SimulatePrincipalPolicy` action](https://docs.aws.amazon.com/IAM/latest/APIReference/API_SimulatePrincipalPolicy.html). + +* `resource_owner_account_id` (Optional) - An AWS account ID to use for any resource ARN in `resource_arns` that doesn't include its own AWS account ID. If unspecified, the simulator will use the account ID from the `caller_arn` argument as a placeholder. + +* `resource_policy_json` (Optional) - An IAM policy document representing the resource-level policy of all of the resources specified in `resource_arns`. + + The policy simulator cannot automatically load policies that are associated with individual resources, as described in the documentation for `resource_arns` above. + +### `context` block arguments + +The following arguments are all required in each `context` block: + +* `key` (Required) - The context _condition key_ to set. + + If you have policies containing `Condition` elements or using dynamic interpolations then you will need to provide suitable values for each condition key your policies use. See [Actions, resources, and condition keys for AWS services](https://docs.aws.amazon.com/service-authorization/latest/reference/reference_policies_actions-resources-contextkeys.html) to find the various condition keys that are normally provided for real requests to each action of each AWS service. + +* `type` (Required) - An IAM value type that determines how the policy simulator will interpret the strings given in `values`. + + For more information, see the `ContextKeyType` field of [`iam.ContextEntry`](https://docs.aws.amazon.com/IAM/latest/APIReference/API_ContextEntry.html) in the underlying API. + +* `values` (Required) - A set of one or more values for this context entry. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `all_allowed` - `true` if all of the simulation results have decision "allowed", or `false` otherwise. + + This is a convenient shorthand for the common case of requiring that all of the simulated requests passed in a postcondition associated with the data source. If you need to describe a more granular condition, use the `results` attribute instead. + +* `results` - A set of result objects, one for each of the simulated requests, with the following nested attributes: + + * `action_name` - The name of the single IAM action used for this particular request. + + * `decision` - The raw decision determined from all of the policies in scope; either "allowed", "explicitDeny", or "implicitDeny". + + * `allowed` - `true` if `decision` is "allowed", and `false` otherwise. + + * `decision_details` - A map of arbitrary metadata entries returned by the policy simulator for this request. + + * `resource_arn` - ARN of the resource that was used for this particular request. When you specify multiple actions and multiple resource ARNs, that causes a separate policy request for each combination of unique action and resource. + + * `matched_statements` - A nested set of objects describing which policies contained statements that were relevant to this simulation request. Each object has attributes `source_policy_id` and `source_policy_type` to identify one of the policies. + + * `missing_context_keys` - A set of context keys (or condition keys) that were needed by some of the policies contributing to this result but not specified using a `context` block in the configuration. Missing or incorrect context keys will typically cause a simulated request to be disallowed. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_role.html.markdown b/website/docs/cdktf/python/d/iam_role.html.markdown new file mode 100644 index 00000000000..a4786fc5110 --- /dev/null +++ b/website/docs/cdktf/python/d/iam_role.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_role" +description: |- + Get information on a Amazon IAM role +--- + + + +# Data Source: aws_iam_role + +This data source can be used to fetch information about a specific +IAM role. By using this data source, you can reference IAM role +properties without having to hard code ARNs as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_role import DataAwsIamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamRole(self, "example", + name="an_example_role_name" + ) +``` + +## Argument Reference + +* `name` - (Required) Friendly IAM role name to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Friendly IAM role name to match. +* `arn` - ARN of the role. +* `assume_role_policy` - Policy document associated with the role. +* `create_date` - Creation date of the role in RFC 3339 format. +* `description` - Description for the role. +* `max_session_duration` - Maximum session duration. +* `path` - Path to the role. +* `permissions_boundary` - The ARN of the policy that is used to set the permissions boundary for the role. +* `role_last_used` - Contains information about the last time that an IAM role was used. See [`role_last_used`](#role_last_used) for details. +* `unique_id` - Stable and unique string identifying the role. +* `tags` - Tags attached to the role. + +### role_last_used + +* `region` - The name of the AWS Region in which the role was last used. +* `last_used_date` - The date and time, in RFC 3339 format, that the role was last used. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_roles.html.markdown b/website/docs/cdktf/python/d/iam_roles.html.markdown new file mode 100644 index 00000000000..2cc562f4d25 --- /dev/null +++ b/website/docs/cdktf/python/d/iam_roles.html.markdown @@ -0,0 +1,155 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_roles" +description: |- + Get information about a set of IAM Roles. +--- + + + +# Data Source: aws_iam_roles + +Use this data source to get the ARNs and Names of IAM Roles. + +## Example Usage + +### All roles in an account + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_roles import DataAwsIamRoles +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamRoles(self, "roles") +``` + +### Roles filtered by name regex + +Roles whose role-name contains `project` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_roles import DataAwsIamRoles +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamRoles(self, "roles", + name_regex=".*project.*" + ) +``` + +### Roles filtered by path prefix + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_roles import DataAwsIamRoles +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamRoles(self, "roles", + path_prefix="/custom-path" + ) +``` + +### Roles provisioned by AWS SSO + +Roles in the account filtered by path prefix + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_roles import DataAwsIamRoles +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamRoles(self, "roles", + path_prefix="/aws-reserved/sso.amazonaws.com/" + ) +``` + +Specific role in the account filtered by name regex and path prefix + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_roles import DataAwsIamRoles +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamRoles(self, "roles", + name_regex="AWSReservedSSO_permission_set_name_.*", + path_prefix="/aws-reserved/sso.amazonaws.com/" + ) +``` + +### Role ARNs with paths removed + +For services like Amazon EKS that do not permit a path in the role ARN when used in a cluster's configuration map + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_roles import DataAwsIamRoles +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + roles = DataAwsIamRoles(self, "roles", + path_prefix="/aws-reserved/sso.amazonaws.com/" + ) + TerraformOutput(self, "arns", + value="${[ for parts in ${" + ("${[ for arn in ${" + roles.arns + "} : split(\"/\", arn)]}") + "} : format(\"%s/%s\", parts[0], element(parts, length(parts) - 1))]}" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name_regex` - (Optional) Regex string to apply to the IAM roles list returned by AWS. This allows more advanced filtering not supported from the AWS API. This filtering is done locally on what AWS returns, and could have a performance impact if the result is large. Combine this with other options to narrow down the list AWS returns. +* `path_prefix` - (Optional) Path prefix for filtering the results. For example, the prefix `/application_abc/component_xyz/` gets all roles whose path starts with `/application_abc/component_xyz/`. If it is not included, it defaults to a slash (`/`), listing all roles. For more details, check out [list-roles in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched IAM roles. +* `names` - Set of Names of the matched IAM roles. + +[1]: https://awscli.amazonaws.com/v2/documentation/api/latest/reference/iam/list-roles.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_saml_provider.html.markdown b/website/docs/cdktf/python/d/iam_saml_provider.html.markdown new file mode 100644 index 00000000000..aa26d273b20 --- /dev/null +++ b/website/docs/cdktf/python/d/iam_saml_provider.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_saml_provider" +description: |- + Get information on an IAM SAML provider. +--- + + + +# Data Source: aws_saml_provider + +This data source can be used to fetch information about a specific +IAM SAML provider. This will allow you to easily retrieve the metadata +document of an existing SAML provider. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_saml_provider import DataAwsIamSamlProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamSamlProvider(self, "example", + arn="arn:aws:iam::123456789:saml-provider/myprovider" + ) +``` + +## Argument Reference + +* `arn` - (Required) ARN assigned by AWS for the provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `create_date` - Creation date of the SAML provider in RFC1123 format, e.g. `Mon, 02 Jan 2006 15:04:05 MST`. +* `name` - Name of the provider. +* `saml_metadata_document` - The XML document generated by an identity provider that supports SAML 2.0. +* `tags` - Tags attached to the SAML provider. +* `valid_until` - Expiration date and time for the SAML provider in RFC1123 format, e.g. `Mon, 02 Jan 2007 15:04:05 MST`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_server_certificate.html.markdown b/website/docs/cdktf/python/d/iam_server_certificate.html.markdown new file mode 100644 index 00000000000..73c5b72363e --- /dev/null +++ b/website/docs/cdktf/python/d/iam_server_certificate.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_server_certificate" +description: |- + Get information about a server certificate +--- + + + +# Data Source: aws_iam_server_certificate + +Use this data source to lookup information about IAM Server Certificates. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_server_certificate import DataAwsIamServerCertificate +from imports.aws.elb import Elb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + my_domain = DataAwsIamServerCertificate(self, "my-domain", + latest=True, + name_prefix="my-domain.org" + ) + Elb(self, "elb", + listener=[ElbListener( + instance_port=8000, + instance_protocol="https", + lb_port=443, + lb_protocol="https", + ssl_certificate_id=Token.as_string(my_domain.arn) + ) + ], + name="my-domain-elb" + ) +``` + +## Argument Reference + +* `name_prefix` - prefix of cert to filter by +* `path_prefix` - prefix of path to filter by +* `name` - exact name of the cert to lookup +* `latest` - sort results by expiration date. returns the certificate with expiration date in furthest in the future. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` is set to the unique id of the IAM Server Certificate +* `arn` is set to the ARN of the IAM Server Certificate +* `path` is set to the path of the IAM Server Certificate +* `expiration_date` is set to the expiration date of the IAM Server Certificate +* `upload_date` is the date when the server certificate was uploaded +* `certificate_body` is the public key certificate (PEM-encoded). This is useful when [configuring back-end instance authentication](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-create-https-ssl-load-balancer.html) policy for load balancer +* `certificate_chain` is the public key certificate chain (PEM-encoded) if exists, empty otherwise + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an IAM server certificate using `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an IAM server certificate using `name`. For example: + +```console +% terraform import aws_iam_server_certificate.example example +``` + +Import will read in the certificate body, certificate chain (if it exists), ID, name, path, and ARN. It will not retrieve the private key which is not available through the AWS API. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_session_context.markdown b/website/docs/cdktf/python/d/iam_session_context.markdown new file mode 100644 index 00000000000..9648a6b2cab --- /dev/null +++ b/website/docs/cdktf/python/d/iam_session_context.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_session_context" +description: |- + Get information on the IAM source role of an STS assumed role +--- + + + +# Data Source: aws_iam_session_context + +This data source provides information on the IAM source role of an STS assumed role. For non-role ARNs, this data source simply passes the ARN through in `issuer_arn`. + +For some AWS resources, multiple types of principals are allowed in the same argument (e.g., IAM users and IAM roles). However, these arguments often do not allow assumed-role (i.e., STS, temporary credential) principals. Given an STS ARN, this data source provides the ARN for the source IAM role. + +## Example Usage + +### Basic Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_session_context import DataAwsIamSessionContext +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamSessionContext(self, "example", + arn="arn:aws:sts::123456789012:assumed-role/Audien-Heaven/MatyNoyes" + ) +``` + +### Find the Terraform Runner's Source Role + +Combined with `aws_caller_identity`, you can get the current user's source IAM role ARN (`issuer_arn`) if you're using an assumed role. If you're not using an assumed role, the caller's (e.g., an IAM user's) ARN will simply be passed through. In environments where both IAM users and individuals using assumed roles need to apply the same configurations, this data source enables seamless use. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_session_context import DataAwsIamSessionContext +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + DataAwsIamSessionContext(self, "example", + arn=Token.as_string(current.arn) + ) +``` + +## Argument Reference + +* `arn` - (Required) ARN for an assumed role. + +~> If `arn` is a non-role ARN, Terraform gives no error and `issuer_arn` will be equal to the `arn` value. For STS assumed-role ARNs, Terraform gives an error if the identified IAM role does not exist. + +## Attribute Reference + +~> With the exception of `issuer_arn`, the attributes will not be populated unless the `arn` corresponds to an STS assumed role. + +* `issuer_arn` - IAM source role ARN if `arn` corresponds to an STS assumed role. Otherwise, `issuer_arn` is equal to `arn`. +* `issuer_id` - Unique identifier of the IAM role that issues the STS assumed role. +* `issuer_name` - Name of the source role. Only available if `arn` corresponds to an STS assumed role. +* `session_name` - Name of the STS session. Only available if `arn` corresponds to an STS assumed role. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_user.html.markdown b/website/docs/cdktf/python/d/iam_user.html.markdown new file mode 100644 index 00000000000..d614172e9d5 --- /dev/null +++ b/website/docs/cdktf/python/d/iam_user.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user" +description: |- + Get information on a Amazon IAM user +--- + + + +# Data Source: aws_iam_user + +This data source can be used to fetch information about a specific +IAM user. By using this data source, you can reference IAM user +properties without having to hard code ARNs or unique IDs as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_user import DataAwsIamUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamUser(self, "example", + user_name="an_example_user_name" + ) +``` + +## Argument Reference + +* `user_name` - (Required) Friendly IAM user name to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN assigned by AWS for this user. +* `path` - Path in which this user was created. +* `permissions_boundary` - The ARN of the policy that is used to set the permissions boundary for the user. +* `user_id` - Unique ID assigned by AWS for this user. +* `user_name` - Name associated to this User +* `tags` - Map of key-value pairs associated with the user. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_user_ssh_key.html.markdown b/website/docs/cdktf/python/d/iam_user_ssh_key.html.markdown new file mode 100644 index 00000000000..998d8b28ecd --- /dev/null +++ b/website/docs/cdktf/python/d/iam_user_ssh_key.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_ssh_key" +description: |- + Get information on a SSH public key associated with the specified IAM user. +--- + + + +# Data Source: aws_iam_user_ssh_key + +Use this data source to get information about a SSH public key associated with the specified IAM user. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_user_ssh_key import DataAwsIamUserSshKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamUserSshKey(self, "example", + encoding="SSH", + ssh_public_key_id="APKARUZ32GUTKIGARLXE", + username="test-user" + ) +``` + +## Argument Reference + +* `encoding` - (Required) Specifies the public key encoding format to use in the response. To retrieve the public key in ssh-rsa format, use `SSH`. To retrieve the public key in PEM format, use `PEM`. +* `ssh_public_key_id` - (Required) Unique identifier for the SSH public key. +* `username` - (Required) Name of the IAM user associated with the SSH public key. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `fingerprint` - MD5 message digest of the SSH public key. +* `public_key` - SSH public key. +* `status` - Status of the SSH public key. Active means that the key can be used for authentication with an CodeCommit repository. Inactive means that the key cannot be used. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iam_users.html.markdown b/website/docs/cdktf/python/d/iam_users.html.markdown new file mode 100644 index 00000000000..e361b4b7fd5 --- /dev/null +++ b/website/docs/cdktf/python/d/iam_users.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_users" +description: |- + Get information about a set of IAM Users. +--- + + + +# Data Source: aws_iam_users + +Use this data source to get the ARNs and Names of IAM Users. + +## Example Usage + +### All users in an account + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_users import DataAwsIamUsers +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamUsers(self, "users") +``` + +### Users filtered by name regex + +Users whose username contains `abc` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_users import DataAwsIamUsers +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamUsers(self, "users", + name_regex=".*abc.*" + ) +``` + +### Users filtered by path prefix + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_users import DataAwsIamUsers +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIamUsers(self, "users", + path_prefix="/custom-path" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name_regex` - (Optional) Regex string to apply to the IAM users list returned by AWS. This allows more advanced filtering not supported from the AWS API. This filtering is done locally on what AWS returns, and could have a performance impact if the result is large. Combine this with other options to narrow down the list AWS returns. +* `path_prefix` - (Optional) Path prefix for filtering the results. For example, the prefix `/division_abc/subdivision_xyz/` gets all users whose path starts with `/division_abc/subdivision_xyz/`. If it is not included, it defaults to a slash (`/`), listing all users. For more details, check out [list-users in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched IAM users. +* `names` - Set of Names of the matched IAM users. + +[1]: https://awscli.amazonaws.com/v2/documentation/api/latest/reference/iam/list-users.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/identitystore_group.html.markdown b/website/docs/cdktf/python/d/identitystore_group.html.markdown new file mode 100644 index 00000000000..a8edd2adf4e --- /dev/null +++ b/website/docs/cdktf/python/d/identitystore_group.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "SSO Identity Store" +layout: "aws" +page_title: "AWS: aws_identitystore_group" +description: |- + Get information on an Identity Store Group +--- + + + +# Data Source: aws_identitystore_group + +Use this data source to get an Identity Store Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, property_access, Token, TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_identitystore_group import DataAwsIdentitystoreGroup +from imports.aws.data_aws_ssoadmin_instances import DataAwsSsoadminInstances +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsSsoadminInstances(self, "example") + data_aws_identitystore_group_example = DataAwsIdentitystoreGroup(self, "example_1", + alternate_identifier=DataAwsIdentitystoreGroupAlternateIdentifier( + unique_attribute=DataAwsIdentitystoreGroupAlternateIdentifierUniqueAttribute( + attribute_path="DisplayName", + attribute_value="ExampleGroup" + ) + ), + identity_store_id=Token.as_string( + property_access(Fn.tolist(example.identity_store_ids), ["0"])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_identitystore_group_example.override_logical_id("example") + TerraformOutput(self, "group_id", + value=data_aws_identitystore_group_example.group_id + ) +``` + +## Argument Reference + +The following arguments are required: + +* `identity_store_id` - (Required) Identity Store ID associated with the Single Sign-On Instance. + +The following arguments are optional: + +* `alternate_identifier` (Optional) A unique identifier for the group that is not the primary identifier. Conflicts with `group_id`. Detailed below. +* `group_id` - (Optional) The identifier for a group in the Identity Store. + +-> Exactly one of the above arguments must be provided. + +### `alternate_identifier` Configuration Block + +The `alternate_identifier` configuration block supports the following arguments: + +* `external_id` - (Optional) Configuration block for filtering by the identifier issued by an external identity provider. Detailed below. +* `unique_attribute` - (Optional) An entity attribute that's unique to a specific entity. Detailed below. + +-> Exactly one of the above arguments must be provided. + +### `external_id` Configuration Block + +The `external_id` configuration block supports the following arguments: + +* `id` - (Required) The identifier issued to this resource by an external identity provider. +* `issuer` - (Required) The issuer for an external identifier. + +### `unique_attribute` Configuration Block + +The `unique_attribute` configuration block supports the following arguments: + +* `attribute_path` - (Required) Attribute path that is used to specify which attribute name to search. For example: `DisplayName`. Refer to the [Group data type](https://docs.aws.amazon.com/singlesignon/latest/IdentityStoreAPIReference/API_Group.html). +* `attribute_value` - (Required) Value for an attribute. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the group in the Identity Store. +* `description` - Description of the specified group. +* `display_name` - Group's display name value. +* `external_ids` - List of identifiers issued to this resource by an external identity provider. + * `id` - The identifier issued to this resource by an external identity provider. + * `issuer` - The issuer for an external identifier. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/identitystore_user.html.markdown b/website/docs/cdktf/python/d/identitystore_user.html.markdown new file mode 100644 index 00000000000..472f749623c --- /dev/null +++ b/website/docs/cdktf/python/d/identitystore_user.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "SSO Identity Store" +layout: "aws" +page_title: "AWS: aws_identitystore_user" +description: |- + Get information on an Identity Store User +--- + + + +# Data Source: aws_identitystore_user + +Use this data source to get an Identity Store User. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, property_access, Token, TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_identitystore_user import DataAwsIdentitystoreUser +from imports.aws.data_aws_ssoadmin_instances import DataAwsSsoadminInstances +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsSsoadminInstances(self, "example") + data_aws_identitystore_user_example = DataAwsIdentitystoreUser(self, "example_1", + alternate_identifier=DataAwsIdentitystoreUserAlternateIdentifier( + unique_attribute=DataAwsIdentitystoreUserAlternateIdentifierUniqueAttribute( + attribute_path="UserName", + attribute_value="ExampleUser" + ) + ), + identity_store_id=Token.as_string( + property_access(Fn.tolist(example.identity_store_ids), ["0"])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_identitystore_user_example.override_logical_id("example") + TerraformOutput(self, "user_id", + value=data_aws_identitystore_user_example.user_id + ) +``` + +## Argument Reference + +The following arguments are required: + +* `identity_store_id` - (Required) Identity Store ID associated with the Single Sign-On Instance. + +The following arguments are optional: + +* `alternate_identifier` (Optional) A unique identifier for a user or group that is not the primary identifier. Conflicts with `user_id`. Detailed below. +* `user_id` - (Optional) The identifier for a user in the Identity Store. + +-> Exactly one of the above arguments must be provided. + +### `alternate_identifier` Configuration Block + +The `alternate_identifier` configuration block supports the following arguments: + +* `external_id` - (Optional) Configuration block for filtering by the identifier issued by an external identity provider. Detailed below. +* `unique_attribute` - (Optional) An entity attribute that's unique to a specific entity. Detailed below. + +-> Exactly one of the above arguments must be provided. + +### `external_id` Configuration Block + +The `external_id` configuration block supports the following arguments: + +* `id` - (Required) The identifier issued to this resource by an external identity provider. +* `issuer` - (Required) The issuer for an external identifier. + +### `unique_attribute` Configuration Block + +The `unique_attribute` configuration block supports the following arguments: + +* `attribute_path` - (Required) Attribute path that is used to specify which attribute name to search. For example: `UserName`. Refer to the [User data type](https://docs.aws.amazon.com/singlesignon/latest/IdentityStoreAPIReference/API_User.html). +* `attribute_value` - (Required) Value for an attribute. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the user in the Identity Store. +* `addresses` - List of details about the user's address. + * `country` - The country that this address is in. + * `formatted` - The name that is typically displayed when the address is shown for display. + * `locality` - The address locality. + * `postal_code` - The postal code of the address. + * `primary` - When `true`, this is the primary address associated with the user. + * `region` - The region of the address. + * `street_address` - The street of the address. + * `type` - The type of address. +* `display_name` - The name that is typically displayed when the user is referenced. +* `emails` - List of details about the user's email. + * `primary` - When `true`, this is the primary email associated with the user. + * `type` - The type of email. + * `value` - The email address. This value must be unique across the identity store. +* `external_ids` - List of identifiers issued to this resource by an external identity provider. + * `id` - The identifier issued to this resource by an external identity provider. + * `issuer` - The issuer for an external identifier. +* `locale` - The user's geographical region or location. +* `name` - Details about the user's full name. + * `family_name` - The family name of the user. + * `formatted` - The name that is typically displayed when the name is shown for display. + * `given_name` - The given name of the user. + * `honorific_prefix` - The honorific prefix of the user. + * `honorific_suffix` - The honorific suffix of the user. + * `middle_name` - The middle name of the user. +* `nickname` - An alternate name for the user. +* `phone_numbers` - List of details about the user's phone number. + * `primary` - When `true`, this is the primary phone number associated with the user. + * `type` - The type of phone number. + * `value` - The user's phone number. +* `preferred_language` - The preferred language of the user. +* `profile_url` - An URL that may be associated with the user. +* `timezone` - The user's time zone. +* `title` - The user's title. +* `user_name` - User's user name value. +* `user_type` - The user type. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_component.html.markdown b/website/docs/cdktf/python/d/imagebuilder_component.html.markdown new file mode 100644 index 00000000000..042b5f99181 --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_component.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_component" +description: |- + Provides details about an Image Builder Component +--- + + + +# Data Source: aws_imagebuilder_component + +Provides details about an Image Builder Component. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_component import DataAwsImagebuilderComponent +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderComponent(self, "example", + arn="arn:aws:imagebuilder:us-west-2:aws:component/amazon-cloudwatch-agent-linux/1.0.0" + ) +``` + +## Argument Reference + +* `arn` - (Required) ARN of the component. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `change_description` - Change description of the component. +* `data` - Data of the component. +* `date_created` - Date the component was created. +* `description` - Description of the component. +* `encrypted` - Encryption status of the component. +* `kms_key_id` - ARN of the Key Management Service (KMS) Key used to encrypt the component. +* `name` - Name of the component. +* `owner` - Owner of the component. +* `platform` - Platform of the component. +* `supported_os_versions` - Operating Systems (OSes) supported by the component. +* `tags` - Key-value map of resource tags for the component. +* `type` - Type of the component. +* `version` - Version of the component. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_components.html.markdown b/website/docs/cdktf/python/d/imagebuilder_components.html.markdown new file mode 100644 index 00000000000..c82cf9d4fba --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_components.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_components" +description: |- + Get information on Image Builder Components. +--- + + + +# Data Source: aws_imagebuilder_components + +Use this data source to get the ARNs and names of Image Builder Components matching the specified criteria. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_components import DataAwsImagebuilderComponents +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderComponents(self, "example", + filter=[DataAwsImagebuilderComponentsFilter( + name="platform", + values=["Linux"] + ) + ], + owner="Self" + ) +``` + +## Argument Reference + +* `owner` - (Optional) Owner of the image recipes. Valid values are `Self`, `Shared` and `Amazon`. Defaults to `Self`. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListComponents API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListComponents.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Components. +* `names` - Set of names of the matched Image Builder Components. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_container_recipe.html.markdown b/website/docs/cdktf/python/d/imagebuilder_container_recipe.html.markdown new file mode 100644 index 00000000000..0dd2a32ae22 --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_container_recipe.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_container_recipe" +description: |- + Provides details about an Image Builder Container Recipe +--- + + + +# Data Source: aws_imagebuilder_container_recipe + +Provides details about an Image builder Container Recipe. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_container_recipe import DataAwsImagebuilderContainerRecipe +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderContainerRecipe(self, "example", + arn="arn:aws:imagebuilder:us-east-1:aws:container-recipe/example/1.0.0" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) ARN of the container recipe. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `component` - List of objects with components for the container recipe. + * `component_arn` - ARN of the Image Builder Component. + * `parameter` - Set of parameters that are used to configure the component. + * `name` - Name of the component parameter. + * `value` - Value of the component parameter. +* `container_type` - Type of the container. +* `date_created` - Date the container recipe was created. +* `description` - Description of the container recipe. +* `dockerfile_template_data` - Dockerfile template used to build the image. +* `encrypted` - Flag that indicates if the target container is encrypted. +* `instance_configuration` - List of objects with instance configurations for building and testing container images. + * `block_device_mapping` - Set of objects with block device mappings for the instance configuration. + * `device_name` - Name of the device. For example, `/dev/sda` or `/dev/xvdb`. + * `ebs` - Single list of object with Elastic Block Storage (EBS) block device mapping settings. + * `delete_on_termination` - Whether to delete the volume on termination. Defaults to unset, which is the value inherited from the parent image. + * `encrypted` - Whether to encrypt the volume. Defaults to unset, which is the value inherited from the parent image. + * `iops` - Number of Input/Output (I/O) operations per second to provision for an `io1` or `io2` volume. + * `kms_key_id` - ARN of the Key Management Service (KMS) Key for encryption. + * `snapshot_id` - Identifier of the EC2 Volume Snapshot. + * `throughput` - For GP3 volumes only. The throughput in MiB/s that the volume supports. + * `volume_size` - Size of the volume, in GiB. + * `volume_type` - Type of the volume. For example, `gp2` or `io2`. + * `no_device` - Whether to remove a mapping from the parent image. + * `virtual_name` - Virtual device name. For example, `ephemeral0`. Instance store volumes are numbered starting from 0. + * `image` - AMI ID of the base image for container build and test instance. +* `kms_key_id` - KMS key used to encrypt the container image. +* `name` - Name of the container recipe. +* `owner` - Owner of the container recipe. +* `parent_image` - Base image for the container recipe. +* `platform` - Platform of the container recipe. +* `tags` - Key-value map of resource tags for the container recipe. +* `target_repository` - Destination repository for the container image. + * `repository_name` - Name of the container repository where the output container image is stored. The name is prefixed by the repository location. + * `service` - Service in which this image is registered. +* `version` - Version of the container recipe. +* `working_directory` - Working directory used during build and test workflows. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_container_recipes.html.markdown b/website/docs/cdktf/python/d/imagebuilder_container_recipes.html.markdown new file mode 100644 index 00000000000..1ac6cabd559 --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_container_recipes.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_container_recipes" +description: |- + Get information on Image Builder Container Recipes. +--- + + + +# Data Source: aws_imagebuilder_container_recipes + +Use this data source to get the ARNs and names of Image Builder Container Recipes matching the specified criteria. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_container_recipes import DataAwsImagebuilderContainerRecipes +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderContainerRecipes(self, "example", + filter=[DataAwsImagebuilderContainerRecipesFilter( + name="platform", + values=["Linux"] + ) + ], + owner="Self" + ) +``` + +## Argument Reference + +* `owner` - (Optional) Owner of the container recipes. Valid values are `Self`, `Shared` and `Amazon`. Defaults to `Self`. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListContainerRecipes API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListContainerRecipes.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Container Recipes. +* `names` - Set of names of the matched Image Builder Container Recipes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_distribution_configuration.html.markdown b/website/docs/cdktf/python/d/imagebuilder_distribution_configuration.html.markdown new file mode 100644 index 00000000000..6cb9713d208 --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_distribution_configuration.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_distribution_configuration" +description: |- + Provides details about an Image Builder Distribution Configuration +--- + + + +# Data Source: aws_imagebuilder_distribution_configuration + +Provides details about an Image Builder Distribution Configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_distribution_configuration import DataAwsImagebuilderDistributionConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderDistributionConfiguration(self, "example", + arn="arn:aws:imagebuilder:us-west-2:aws:distribution-configuration/example" + ) +``` + +## Argument Reference + +* `arn` - (Required) ARN of the distribution configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `date_created` - Date the distribution configuration was created. +* `date_updated` - Date the distribution configuration was updated. +* `description` - Description of the distribution configuration. +* `distribution` - Set of distributions. + * `ami_distribution_configuration` - Nested list of AMI distribution configuration. + * `ami_tags` - Key-value map of tags to apply to distributed AMI. + * `description` - Description to apply to distributed AMI. + * `kms_key_id` - ARN of Key Management Service (KMS) Key to encrypt AMI. + * `launch_permission` - Nested list of EC2 launch permissions. + * `organization_arns` - Set of AWS Organization ARNs. + * `organizational_unit_arns` - Set of AWS Organizational Unit ARNs. + * `user_groups` - Set of EC2 launch permission user groups. + * `user_ids` - Set of AWS Account identifiers. + * `target_account_ids` - Set of target AWS Account identifiers. + * `container_distribution_configuration` - Nested list of container distribution configurations. + * `container_tags` - Set of tags that are attached to the container distribution configuration. + * `description` - Description of the container distribution configuration. + * `target_repository` - Set of destination repositories for the container distribution configuration. + * `repository_name` - Name of the container repository where the output container image is stored. + * `service` - Service in which the image is registered. + * `fast_launch_configuration` - Nested list of Windows faster-launching configurations to use for AMI distribution. + * `account_id` - The owner account ID for the fast-launch enabled Windows AMI. + * `enabled` - A Boolean that represents the current state of faster launching for the Windows AMI. + * `launch_template` - Nested list of launch templates that the fast-launch enabled Windows AMI uses when it launches Windows instances to create pre-provisioned snapshots. + * `launch_template_id` - The ID of the launch template to use for faster launching for a Windows AMI. + * `launch_template_name` - The name of the launch template to use for faster launching for a Windows AMI. + * `launch_template_version` - The version of the launch template to use for faster launching for a Windows AMI. + * `max_parallel_launches` - The maximum number of parallel instances that are launched for creating resources. + * `snapshot_configuration` - Nested list of configurations for managing the number of snapshots that are created from pre-provisioned instances for the Windows AMI when faster launching is enabled. + * `target_resource_count` - The number of pre-provisioned snapshots to keep on hand for a fast-launch enabled Windows AMI. + * `launch_template_configuration` - Nested list of launch template configurations. + * `default` - Whether the specified Amazon EC2 launch template is set as the default launch template. + * `launch_template_id` - ID of the Amazon EC2 launch template. + * `account_id` - The account ID that this configuration applies to. + * `license_configuration_arns` - Set of Amazon Resource Names (ARNs) of License Manager License Configurations. + * `region` - AWS Region of distribution. +* `name` - Name of the distribution configuration. +* `tags` - Key-value map of resource tags for the distribution configuration. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_distribution_configurations.html.markdown b/website/docs/cdktf/python/d/imagebuilder_distribution_configurations.html.markdown new file mode 100644 index 00000000000..38729184800 --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_distribution_configurations.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_distribution_configurations" +description: |- + Get information on Image Builder Distribution Configurations. +--- + + + +# Data Source: aws_imagebuilder_distribution_configurations + +Use this data source to get the ARNs and names of Image Builder Distribution Configurations matching the specified criteria. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_distribution_configurations import DataAwsImagebuilderDistributionConfigurations +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderDistributionConfigurations(self, "example", + filter=[DataAwsImagebuilderDistributionConfigurationsFilter( + name="name", + values=["example"] + ) + ] + ) +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +## filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListDistributionConfigurations API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListDistributionConfigurations.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Distribution Configurations. +* `names` - Set of names of the matched Image Builder Distribution Configurations. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_image.html.markdown b/website/docs/cdktf/python/d/imagebuilder_image.html.markdown new file mode 100644 index 00000000000..65aaa806caa --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_image.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image" +description: |- + Provides details about an Image Builder Image +--- + + + +# Data Source: aws_imagebuilder_image + +Provides details about an Image Builder Image. + +## Example Usage + +### Latest + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_image import DataAwsImagebuilderImage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderImage(self, "example", + arn="arn:aws:imagebuilder:us-west-2:aws:image/amazon-linux-2-x86/x.x.x" + ) +``` + +## Argument Reference + +* `arn` - (Required) ARN of the image. The suffix can either be specified with wildcards (`x.x.x`) to fetch the latest build version or a full build version (e.g., `2020.11.26/1`) to fetch an exact version. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `build_version_arn` - Build version ARN of the image. This will always have the `#.#.#/#` suffix. +* `container_recipe_arn` - ARN of the container recipe. +* `date_created` - Date the image was created. +* `distribution_configuration_arn` - ARN of the Image Builder Distribution Configuration. +* `enhanced_image_metadata_enabled` - Whether additional information about the image being created is collected. +* `image_recipe_arn` - ARN of the image recipe. +* `image_tests_configuration` - List of an object with image tests configuration. + * `image_tests_enabled` - Whether image tests are enabled. + * `timeout_minutes` - Number of minutes before image tests time out. +* `infrastructure_configuration_arn` - ARN of the Image Builder Infrastructure Configuration. +* `name` - Name of the image. +* `platform` - Platform of the image. +* `os_version` - Operating System version of the image. +* `output_resources` - List of objects with resources created by the image. + * `amis` - Set of objects with each Amazon Machine Image (AMI) created. + * `account_id` - Account identifier of the AMI. + * `description` - Description of the AMI. + * `image` - Identifier of the AMI. + * `name` - Name of the AMI. + * `region` - Region of the AMI. + * `containers` - Set of objects with each container image created and stored in the output repository. + * `image_uris` - Set of URIs for created containers. + * `region` - Region of the container image. +* `tags` - Key-value map of resource tags for the image. +* `version` - Version of the image. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_image_pipeline.html.markdown b/website/docs/cdktf/python/d/imagebuilder_image_pipeline.html.markdown new file mode 100644 index 00000000000..b51022d4b0f --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_image_pipeline.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_pipeline" +description: |- + Provides details about an Image Builder Image Pipeline +--- + + + +# Data Source: aws_imagebuilder_image_pipeline + +Provides details about an Image Builder Image Pipeline. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_image_pipeline import DataAwsImagebuilderImagePipeline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderImagePipeline(self, "example", + arn="arn:aws:imagebuilder:us-west-2:aws:image-pipeline/example" + ) +``` + +## Argument Reference + +* `arn` - (Required) ARN of the image pipeline. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `container_recipe_arn` - ARN of the container recipe. +* `date_created` - Date the image pipeline was created. +* `date_last_run` - Date the image pipeline was last run. +* `date_next_run` - Date the image pipeline will run next. +* `date_updated` - Date the image pipeline was updated. +* `description` - Description of the image pipeline. +* `distribution_configuration_arn` - ARN of the Image Builder Distribution Configuration. +* `enhanced_image_metadata_enabled` - Whether additional information about the image being created is collected. +* `image_recipe_arn` - ARN of the image recipe. +* `image_tests_configuration` - List of an object with image tests configuration. + * `image_tests_enabled` - Whether image tests are enabled. + * `timeout_minutes` - Number of minutes before image tests time out. +* `infrastructure_configuration_arn` - ARN of the Image Builder Infrastructure Configuration. +* `name` - Name of the image pipeline. +* `platform` - Platform of the image pipeline. +* `schedule` - List of an object with schedule settings. + * `pipeline_execution_start_condition` - Condition when the pipeline should trigger a new image build. + * `schedule_expression` - Cron expression of how often the pipeline start condition is evaluated. +* `status` - Status of the image pipeline. +* `tags` - Key-value map of resource tags for the image pipeline. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_image_pipelines.html.markdown b/website/docs/cdktf/python/d/imagebuilder_image_pipelines.html.markdown new file mode 100644 index 00000000000..66f4de22f59 --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_image_pipelines.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_pipelines" +description: |- + Get information on Image Builder Image Pipelines. +--- + + + +# Data Source: aws_imagebuilder_image_pipelines + +Use this data source to get the ARNs and names of Image Builder Image Pipelines matching the specified criteria. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_image_pipelines import DataAwsImagebuilderImagePipelines +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderImagePipelines(self, "example", + filter=[DataAwsImagebuilderImagePipelinesFilter( + name="name", + values=["example"] + ) + ] + ) +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListImagePipelines API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListImagePipelines.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Image Pipelines. +* `names` - Set of names of the matched Image Builder Image Pipelines. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_image_recipe.html.markdown b/website/docs/cdktf/python/d/imagebuilder_image_recipe.html.markdown new file mode 100644 index 00000000000..8516112de34 --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_image_recipe.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_recipe" +description: |- + Provides details about an Image Builder Image Recipe +--- + + + +# Data Source: aws_imagebuilder_image_recipe + +Provides details about an Image Builder Image Recipe. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_image_recipe import DataAwsImagebuilderImageRecipe +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderImageRecipe(self, "example", + arn="arn:aws:imagebuilder:us-east-1:aws:image-recipe/example/1.0.0" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) ARN of the image recipe. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `block_device_mapping` - Set of objects with block device mappings for the image recipe. + * `device_name` - Name of the device. For example, `/dev/sda` or `/dev/xvdb`. + * `ebs` - Single list of object with Elastic Block Storage (EBS) block device mapping settings. + * `delete_on_termination` - Whether to delete the volume on termination. Defaults to unset, which is the value inherited from the parent image. + * `encrypted` - Whether to encrypt the volume. Defaults to unset, which is the value inherited from the parent image. + * `iops` - Number of Input/Output (I/O) operations per second to provision for an `io1` or `io2` volume. + * `kms_key_id` - ARN of the Key Management Service (KMS) Key for encryption. + * `snapshot_id` - Identifier of the EC2 Volume Snapshot. + * `throughput` - For GP3 volumes only. The throughput in MiB/s that the volume supports. + * `volume_size` - Size of the volume, in GiB. + * `volume_type` - Type of the volume. For example, `gp2` or `io2`. + * `no_device` - Whether to remove a mapping from the parent image. + * `virtual_name` - Virtual device name. For example, `ephemeral0`. Instance store volumes are numbered starting from 0. +* `component` - List of objects with components for the image recipe. + * `component_arn` - ARN of the Image Builder Component. + * `parameter` - Set of parameters that are used to configure the component. + * `name` - Name of the component parameter. + * `value` - Value of the component parameter. +* `date_created` - Date the image recipe was created. +* `description` - Description of the image recipe. +* `name` - Name of the image recipe. +* `owner` - Owner of the image recipe. +* `parent_image` - Base image of the image recipe. +* `platform` - Platform of the image recipe. +* `tags` - Key-value map of resource tags for the image recipe. +* `user_data_base64` - Base64 encoded contents of user data. Commands or a command script to run when build instance is launched. +* `version` - Version of the image recipe. +* `working_directory` - Working directory used during build and test workflows. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_image_recipes.html.markdown b/website/docs/cdktf/python/d/imagebuilder_image_recipes.html.markdown new file mode 100644 index 00000000000..e5d7f8ba8ca --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_image_recipes.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_recipes" +description: |- + Get information on Image Builder Image Recipes. +--- + + + +# Data Source: aws_imagebuilder_image_recipes + +Use this data source to get the ARNs and names of Image Builder Image Recipes matching the specified criteria. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_image_recipes import DataAwsImagebuilderImageRecipes +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderImageRecipes(self, "example", + filter=[DataAwsImagebuilderImageRecipesFilter( + name="platform", + values=["Linux"] + ) + ], + owner="Self" + ) +``` + +## Argument Reference + +* `owner` - (Optional) Owner of the image recipes. Valid values are `Self`, `Shared` and `Amazon`. Defaults to `Self`. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListImageRecipes API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListImageRecipes.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Image Recipes. +* `names` - Set of names of the matched Image Builder Image Recipes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_infrastructure_configuration.html.markdown b/website/docs/cdktf/python/d/imagebuilder_infrastructure_configuration.html.markdown new file mode 100644 index 00000000000..17907971c95 --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_infrastructure_configuration.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_infrastructure_configuration" +description: |- + Provides details about an Image Builder Infrastructure Configuration +--- + + + +# Data Source: aws_imagebuilder_infrastructure_configuration + +Provides details about an Image Builder Infrastructure Configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_infrastructure_configuration import DataAwsImagebuilderInfrastructureConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderInfrastructureConfiguration(self, "example", + arn="arn:aws:imagebuilder:us-west-2:aws:infrastructure-configuration/example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) ARN of the infrastructure configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `date_created` - Date the infrastructure configuration was created. +* `date_created` - Date the infrastructure configuration was updated. +* `description` - Description of the infrastructure configuration. +* `instance_metadata_options` - Nested list of instance metadata options for the HTTP requests that pipeline builds use to launch EC2 build and test instances. + * `http_put_response_hop_limit` - Number of hops that an instance can traverse to reach its destonation. + * `http_tokens` - Whether a signed token is required for instance metadata retrieval requests. +* `instance_profile_name` - Name of the IAM Instance Profile associated with the configuration. +* `instance_types` - Set of EC2 Instance Types associated with the configuration. +* `key_pair` - Name of the EC2 Key Pair associated with the configuration. +* `logging` - Nested list of logging settings. + * `s3_logs` - Nested list of S3 logs settings. + * `s3_bucket_name` - Name of the S3 Bucket for logging. + * `s3_key_prefix` - Key prefix for S3 Bucket logging. +* `name` - Name of the infrastructure configuration. +* `resource_tags` - Key-value map of resource tags for the infrastructure created by the infrastructure configuration. +* `security_group_ids` - Set of EC2 Security Group identifiers associated with the configuration. +* `sns_topic_arn` - ARN of the SNS Topic associated with the configuration. +* `subnet_id` - Identifier of the EC2 Subnet associated with the configuration. +* `tags` - Key-value map of resource tags for the infrastructure configuration. +* `terminate_instance_on_failure` - Whether instances are terminated on failure. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/imagebuilder_infrastructure_configurations.html.markdown b/website/docs/cdktf/python/d/imagebuilder_infrastructure_configurations.html.markdown new file mode 100644 index 00000000000..8fa8f7b6803 --- /dev/null +++ b/website/docs/cdktf/python/d/imagebuilder_infrastructure_configurations.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_infrastructure_configurations" +description: |- + Get information on Image Builder Infrastructure Configurations. +--- + + + +# Data Source: aws_imagebuilder_infrastructure_configurations + +Use this data source to get the ARNs and names of Image Builder Infrastructure Configurations matching the specified criteria. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_imagebuilder_infrastructure_configurations import DataAwsImagebuilderInfrastructureConfigurations +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsImagebuilderInfrastructureConfigurations(self, "example", + filter=[DataAwsImagebuilderInfrastructureConfigurationsFilter( + name="name", + values=["example"] + ) + ] + ) +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +## filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListInfrastructureConfigurations API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListInfrastructureConfigurations.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Infrastructure Configurations. +* `names` - Set of names of the matched Image Builder Infrastructure Configurations. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/inspector_rules_packages.html.markdown b/website/docs/cdktf/python/d/inspector_rules_packages.html.markdown new file mode 100644 index 00000000000..909cdde9ad3 --- /dev/null +++ b/website/docs/cdktf/python/d/inspector_rules_packages.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Inspector Classic" +layout: "aws" +page_title: "AWS: aws_inspector_rules_packages" +description: |- + Provides a list of Amazon Inspector Classic Rules packages which can be used by Amazon Inspector Classic. +--- + + + +# Data Source: aws_inspector_rules_packages + +The Amazon Inspector Classic Rules Packages data source allows access to the list of AWS +Inspector Rules Packages which can be used by Amazon Inspector Classic within the region +configured in the provider. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_inspector_rules_packages import DataAwsInspectorRulesPackages +from imports.aws.inspector_assessment_target import InspectorAssessmentTarget +from imports.aws.inspector_assessment_template import InspectorAssessmentTemplate +from imports.aws.inspector_resource_group import InspectorResourceGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + group = InspectorResourceGroup(self, "group", + tags={ + "test": "test" + } + ) + rules = DataAwsInspectorRulesPackages(self, "rules") + assessment = InspectorAssessmentTarget(self, "assessment", + name="test", + resource_group_arn=group.arn + ) + aws_inspector_assessment_template_assessment = + InspectorAssessmentTemplate(self, "assessment_3", + duration=Token.as_number("60"), + name="Test", + rules_package_arns=Token.as_list(rules.arns), + target_arn=assessment.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_inspector_assessment_template_assessment.override_logical_id("assessment") +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `arns` - List of the Amazon Inspector Classic Rules Packages arns available in the AWS region. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/instance.html.markdown b/website/docs/cdktf/python/d/instance.html.markdown new file mode 100644 index 00000000000..63fc27db071 --- /dev/null +++ b/website/docs/cdktf/python/d/instance.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_instance" +description: |- + Get information on an Amazon EC2 Instance. +--- + + + +# Data Source: aws_instance + +Use this data source to get the ID of an Amazon EC2 Instance for use in other resources. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_instance import DataAwsInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsInstance(self, "foo", + filter=[DataAwsInstanceFilter( + name="image-id", + values=["ami-xxxxxxxx"] + ), DataAwsInstanceFilter( + name="tag:Name", + values=["instance-name-tag"] + ) + ], + instance_id="i-instanceid" + ) +``` + +## Argument Reference + +* `instance_id` - (Optional) Specify the exact Instance ID with which to populate the data source. + +* `instance_tags` - (Optional) Map of tags, each pair of which must +exactly match a pair on the desired Instance. + +* `filter` - (Optional) One or more name/value pairs to use as filters. There are +several valid keys, for a full reference, check out +[describe-instances in the AWS CLI reference][1]. + +* `get_password_data` - (Optional) If true, wait for password data to become available and retrieve it. Useful for getting the administrator password for instances running Microsoft Windows. The password data is exported to the `password_data` attribute. See [GetPasswordData](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_GetPasswordData.html) for more information. + +* `get_user_data` - (Optional) Retrieve Base64 encoded User Data contents into the `user_data_base64` attribute. A SHA-1 hash of the User Data contents will always be present in the `user_data` attribute. Defaults to `false`. + +~> **NOTE:** At least one of `filter`, `instance_tags`, or `instance_id` must be specified. + +~> **NOTE:** If anything other than a single match is returned by the search, +Terraform will fail. Ensure that your search is specific enough to return +a single Instance ID only. + +## Attribute Reference + +`id` is set to the ID of the found Instance. In addition, the following attributes +are exported: + +~> **NOTE:** Some values are not always set and may not be available for +interpolation. + +* `ami` - ID of the AMI used to launch the instance. +* `arn` - ARN of the instance. +* `associate_public_ip_address` - Whether or not the Instance is associated with a public IP address or not (Boolean). +* `availability_zone` - Availability zone of the Instance. +* `credit_specification` - Credit specification of the Instance. +* `disable_api_stop` - Whether or not EC2 Instance Stop Protection](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Stop_Start.html#Using_StopProtection) is enabled (Boolean). +* `disable_api_termination` - Whether or not [EC2 Instance Termination Protection](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#Using_ChangingDisableAPITermination) is enabled (Boolean). +* `ebs_block_device` - EBS block device mappings of the Instance. + * `delete_on_termination` - If the EBS volume will be deleted on termination. + * `device_name` - Physical name of the device. + * `encrypted` - If the EBS volume is encrypted. + * `iops` - `0` If the EBS volume is not a provisioned IOPS image, otherwise the supported IOPS count. + * `kms_key_arn` - ARN of KMS Key, if EBS volume is encrypted. + * `snapshot_id` - ID of the snapshot. + * `throughput` - Throughput of the volume, in MiB/s. + * `volume_size` - Size of the volume, in GiB. + * `volume_type` - Volume type. +* `ebs_optimized` - Whether the Instance is EBS optimized or not (Boolean). +* `enclave_options` - Enclave options of the instance. + * `enabled` - Whether Nitro Enclaves are enabled. +* `ephemeral_block_device` - Ephemeral block device mappings of the Instance. + * `device_name` - Physical name of the device. + * `no_device` - Whether the specified device included in the device mapping was suppressed or not (Boolean). + * `virtual_name` - Virtual device name. +* `host_id` - ID of the dedicated host the instance will be assigned to. +* `host_resource_group_arn` - ARN of the host resource group the instance is associated with. +* `iam_instance_profile` - Name of the instance profile associated with the Instance. +* `instance_state` - State of the instance. One of: `pending`, `running`, `shutting-down`, `terminated`, `stopping`, `stopped`. See [Instance Lifecycle](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-lifecycle.html) for more information. +* `instance_type` - Type of the Instance. +* `ipv6_addresses` - IPv6 addresses associated to the Instance, if applicable. **NOTE**: Unlike the IPv4 address, this doesn't change if you attach an EIP to the instance. +* `key_name` - Key name of the Instance. +* `maintenance_options` - Maintenance and recovery options for the instance. + * `auto_recovery` - Automatic recovery behavior of the instance. +* `metadata_options` - Metadata options of the Instance. + * `http_endpoint` - State of the metadata service: `enabled`, `disabled`. + * `http_tokens` - If session tokens are required: `optional`, `required`. + * `http_put_response_hop_limit` - Desired HTTP PUT response hop limit for instance metadata requests. + * `instance_metadata_tags` - If access to instance tags is allowed from the metadata service: `enabled`, `disabled`. +* `monitoring` - Whether detailed monitoring is enabled or disabled for the Instance (Boolean). +* `network_interface_id` - ID of the network interface that was created with the Instance. +* `outpost_arn` - ARN of the Outpost. +* `password_data` - Base-64 encoded encrypted password data for the instance. Useful for getting the administrator password for instances running Microsoft Windows. This attribute is only exported if `get_password_data` is true. See [GetPasswordData](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_GetPasswordData.html) for more information. +* `placement_group` - Placement group of the Instance. +* `placement_partition_number` - Number of the partition the instance is in. +* `private_dns` - Private DNS name assigned to the Instance. Can only be used inside the Amazon EC2, and only available if you've enabled DNS hostnames for your VPC. +* `private_dns_name_options` - Options for the instance hostname. + * `enable_resource_name_dns_aaaa_record` - Indicates whether to respond to DNS queries for instance hostnames with DNS AAAA records. + * `enable_resource_name_dns_a_record` - Indicates whether to respond to DNS queries for instance hostnames with DNS A records. + * `hostname_type` - Type of hostname for EC2 instances. +* `private_ip` - Private IP address assigned to the Instance. +* `public_dns` - Public DNS name assigned to the Instance. For EC2-VPC, this is only available if you've enabled DNS hostnames for your VPC. +* `public_ip` - Public IP address assigned to the Instance, if applicable. **NOTE**: If you are using an [`aws_eip`](/docs/providers/aws/r/eip.html) with your instance, you should refer to the EIP's address directly and not use `public_ip`, as this field will change after the EIP is attached. +* `root_block_device` - Root block device mappings of the Instance + * `device_name` - Physical name of the device. + * `delete_on_termination` - If the root block device will be deleted on termination. + * `encrypted` - If the EBS volume is encrypted. + * `iops` - `0` If the volume is not a provisioned IOPS image, otherwise the supported IOPS count. + * `kms_key_arn` - ARN of KMS Key, if EBS volume is encrypted. + * `throughput` - Throughput of the volume, in MiB/s. + * `volume_size` - Size of the volume, in GiB. + * `volume_type` - Type of the volume. +* `secondary_private_ips` - Secondary private IPv4 addresses assigned to the instance's primary network interface (eth0) in a VPC. +* `security_groups` - Associated security groups. +* `source_dest_check` - Whether the network interface performs source/destination checking (Boolean). +* `subnet_id` - VPC subnet ID. +* `tags` - Map of tags assigned to the Instance. +* `tenancy` - Tenancy of the instance: `dedicated`, `default`, `host`. +* `user_data` - SHA-1 hash of User Data supplied to the Instance. +* `user_data_base64` - Base64 encoded contents of User Data supplied to the Instance. Valid UTF-8 contents can be decoded with the [`base64decode` function](https://www.terraform.io/docs/configuration/functions/base64decode.html). This attribute is only exported if `get_user_data` is true. +* `vpc_security_group_ids` - Associated security groups in a non-default VPC. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-instances.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/instances.html.markdown b/website/docs/cdktf/python/d/instances.html.markdown new file mode 100644 index 00000000000..d8e46c1cba6 --- /dev/null +++ b/website/docs/cdktf/python/d/instances.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_instances" +description: |- + Get information on an Amazon EC2 instances. +--- + + + +# Data Source: aws_instances + +Use this data source to get IDs or IPs of Amazon EC2 instances to be referenced elsewhere, +e.g., to allow easier migration from another management solution +or to make it easier for an operator to connect through bastion host(s). + +-> **Note:** It's a best practice to expose instance details via [outputs](https://www.terraform.io/docs/configuration/outputs.html) +and [remote state](https://www.terraform.io/docs/state/remote.html) and +**use [`terraform_remote_state`](https://www.terraform.io/docs/providers/terraform/d/remote_state.html) +data source instead** if you manage referenced instances via Terraform. + +~> **Note:** It's strongly discouraged to use this data source for querying ephemeral +instances (e.g., managed via autoscaling group), as the output may change at any time +and you'd need to re-run `apply` every time an instance comes up or dies. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformCount, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_instances import DataAwsInstances +from imports.aws.eip import Eip +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = DataAwsInstances(self, "test", + filter=[DataAwsInstancesFilter( + name="instance.group-id", + values=["sg-12345678"] + ) + ], + instance_state_names=["running", "stopped"], + instance_tags={ + "Role": "HardWorker" + } + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + test_count = TerraformCount.of(Token.as_number(Fn.length_of(test.ids))) + aws_eip_test = Eip(self, "test_1", + instance=Token.as_string(property_access(test.ids, [test_count.index])), + count=test_count + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_eip_test.override_logical_id("test") +``` + +## Argument Reference + +* `instance_tags` - (Optional) Map of tags, each pair of which must +exactly match a pair on desired instances. + +* `instance_state_names` - (Optional) List of instance states that should be applicable to the desired instances. The permitted values are: `pending, running, shutting-down, stopped, stopping, terminated`. The default value is `running`. + +* `filter` - (Optional) One or more name/value pairs to use as filters. There are +several valid keys, for a full reference, check out +[describe-instances in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - IDs of instances found through the filter +* `private_ips` - Private IP addresses of instances found through the filter +* `public_ips` - Public IP addresses of instances found through the filter +* `ipv6_addresses` - IPv6 addresses of instances found through the filter + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-instances.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/internet_gateway.html.markdown b/website/docs/cdktf/python/d/internet_gateway.html.markdown new file mode 100644 index 00000000000..4d240f7b708 --- /dev/null +++ b/website/docs/cdktf/python/d/internet_gateway.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_internet_gateway" +description: |- + Provides details about a specific Internet Gateway +--- + + + +# Data Source: aws_internet_gateway + +`aws_internet_gateway` provides details about a specific Internet Gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_internet_gateway import DataAwsInternetGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + vpc_id = TerraformVariable(self, "vpc_id") + DataAwsInternetGateway(self, "default", + filter=[DataAwsInternetGatewayFilter( + name="attachment.vpc-id", + values=[vpc_id.string_value] + ) + ] + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +Internet Gateway in the current region. The given filters must match exactly one +Internet Gateway whose data will be exported as attributes. + +* `internet_gateway_id` - (Optional) ID of the specific Internet Gateway to retrieve. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired Internet Gateway. + +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInternetGateways.html). + +* `values` - (Required) Set of values that are accepted for the given field. + An Internet Gateway will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Internet Gateway. + +All of the argument attributes except `filter` block are also exported as +result attributes. This data source will complete the data by populating +any fields that are not included in the configuration with the data for +the selected Internet Gateway. + +`attachments` are also exported with the following attributes, when there are relevants: +Each attachment supports the following: + +* `owner_id` - ID of the AWS account that owns the internet gateway. +* `state` - Current state of the attachment between the gateway and the VPC. Present only if a VPC is attached +* `vpc_id` - ID of an attached VPC. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/iot_endpoint.html.markdown b/website/docs/cdktf/python/d/iot_endpoint.html.markdown new file mode 100644 index 00000000000..0fbf87191ad --- /dev/null +++ b/website/docs/cdktf/python/d/iot_endpoint.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_endpoint" +description: |- + Get the unique IoT endpoint +--- + + + +# Data Source: aws_iot_endpoint + +Returns a unique endpoint specific to the AWS account making the call. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iot_endpoint import DataAwsIotEndpoint +from imports.kubernetes.pod import Pod +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # The following providers are missing schema information and might need manual adjustments to synthesize correctly: kubernetes. + # For a more precise conversion please use the --provider flag in convert. + example = DataAwsIotEndpoint(self, "example") + Pod(self, "agent", + metadata=[{ + "name": "my-device" + } + ], + spec=[{ + "container": [{ + "env": [{ + "name": "IOT_ENDPOINT", + "value": example.endpoint_address + } + ], + "image": "gcr.io/my-project/image-name", + "name": "image-name" + } + ] + } + ] + ) +``` + +## Argument Reference + +* `endpoint_type` - (Optional) Endpoint type. Valid values: `iot:CredentialProvider`, `iot:Data`, `iot:Data-ATS`, `iot:Jobs`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `endpoint_address` - Endpoint based on `endpoint_type`: + * No `endpoint_type`: Either `iot:Data` or `iot:Data-ATS` [depending on region](https://aws.amazon.com/blogs/iot/aws-iot-core-ats-endpoints/) + * `iot:CredentialsProvider`: `IDENTIFIER.credentials.iot.REGION.amazonaws.com` + * `iot:Data`: `IDENTIFIER.iot.REGION.amazonaws.com` + * `iot:Data-ATS`: `IDENTIFIER-ats.iot.REGION.amazonaws.com` + * `iot:Jobs`: `IDENTIFIER.jobs.iot.REGION.amazonaws.com` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ip_ranges.html.markdown b/website/docs/cdktf/python/d/ip_ranges.html.markdown new file mode 100644 index 00000000000..34cc0b48e77 --- /dev/null +++ b/website/docs/cdktf/python/d/ip_ranges.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_ip_ranges" +description: |- + Get information on AWS IP ranges. +--- + + + +# Data Source: aws_ip_ranges + +Use this data source to get the IP ranges of various AWS products and services. For more information about the contents of this data source and required JSON syntax if referencing a custom URL, see the [AWS IP Address Ranges documentation][1]. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ip_ranges import DataAwsIpRanges +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + european_ec2 = DataAwsIpRanges(self, "european_ec2", + regions=["eu-west-1", "eu-central-1"], + services=["ec2"] + ) + SecurityGroup(self, "from_europe", + ingress=[SecurityGroupIngress( + cidr_blocks=Token.as_list(european_ec2.cidr_blocks), + from_port=Token.as_number("443"), + ipv6_cidr_blocks=Token.as_list(european_ec2.ipv6_cidr_blocks), + protocol="tcp", + to_port=Token.as_number("443") + ) + ], + name="from_europe", + tags={ + "CreateDate": Token.as_string(european_ec2.create_date), + "SyncToken": Token.as_string(european_ec2.sync_token) + } + ) +``` + +## Argument Reference + +* `regions` - (Optional) Filter IP ranges by regions (or include all regions, if +omitted). Valid items are `global` (for `cloudfront`) as well as all AWS regions +(e.g., `eu-central-1`) + +* `services` - (Required) Filter IP ranges by services. Valid items are `amazon` +(for amazon.com), `amazon_connect`, `api_gateway`, `cloud9`, `cloudfront`, +`codebuild`, `dynamodb`, `ec2`, `ec2_instance_connect`, `globalaccelerator`, +`route53`, `route53_healthchecks`, `s3` and `workspaces_gateways`. See the +[`service` attribute][2] documentation for other possible values. + +~> **NOTE:** If the specified combination of regions and services does not yield any +CIDR blocks, Terraform will fail. + +* `url` - (Optional) Custom URL for source JSON file. Syntax must match [AWS IP Address Ranges documentation][1]. Defaults to `https://ip-ranges.amazonaws.com/ip-ranges.json`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `cidr_blocks` - Lexically ordered list of CIDR blocks. +* `ipv6_cidr_blocks` - Lexically ordered list of IPv6 CIDR blocks. +* `create_date` - Publication time of the IP ranges (e.g., `2016-08-03-23-46-05`). +* `sync_token` - Publication time of the IP ranges, in Unix epoch time format + (e.g., `1470267965`). + +[1]: https://docs.aws.amazon.com/general/latest/gr/aws-ip-ranges.html +[2]: https://docs.aws.amazon.com/general/latest/gr/aws-ip-ranges.html#aws-ip-syntax + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ivs_stream_key.html.markdown b/website/docs/cdktf/python/d/ivs_stream_key.html.markdown new file mode 100644 index 00000000000..69a96300e90 --- /dev/null +++ b/website/docs/cdktf/python/d/ivs_stream_key.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "IVS (Interactive Video)" +layout: "aws" +page_title: "AWS: aws_ivs_stream_key" +description: |- + Terraform data source for managing an AWS IVS (Interactive Video) Stream Key. +--- + + + +# Data Source: aws_ivs_stream_key + +Terraform data source for managing an AWS IVS (Interactive Video) Stream Key. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ivs_stream_key import DataAwsIvsStreamKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsIvsStreamKey(self, "example", + channel_arn="arn:aws:ivs:us-west-2:326937407773:channel/0Y1lcs4U7jk5" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `channel_arn` - (Required) ARN of the Channel. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Stream Key. +* `tags` - Map of tags assigned to the resource. +* `value` - Stream Key value. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kendra_experience.html.markdown b/website/docs/cdktf/python/d/kendra_experience.html.markdown new file mode 100644 index 00000000000..310fe59b807 --- /dev/null +++ b/website/docs/cdktf/python/d/kendra_experience.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_experience" +description: |- + Provides details about a specific Amazon Kendra Experience. +--- + + + +# Data Source: aws_kendra_experience + +Provides details about a specific Amazon Kendra Experience. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kendra_experience import DataAwsKendraExperience +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKendraExperience(self, "example", + experience_id="87654321-1234-4321-4321-321987654321", + index_id="12345678-1234-1234-1234-123456789123" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `experience_id` - (Required) Identifier of the Experience. +* `index_id` - (Required) Identifier of the index that contains the Experience. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Experience. +* `configuration` - Block that specifies the configuration information for your Amazon Kendra Experience. This includes `content_source_configuration`, which specifies the data source IDs and/or FAQ IDs, and `user_identity_configuration`, which specifies the user or group information to grant access to your Amazon Kendra Experience. Documented below. +* `created_at` - Unix datetime that the Experience was created. +* `description` - Description of the Experience. +* `endpoints` - Shows the endpoint URLs for your Amazon Kendra Experiences. The URLs are unique and fully hosted by AWS. Documented below. +* `error_message` - Reason your Amazon Kendra Experience could not properly process. +* `id` - Unique identifiers of the Experience and index separated by a slash (`/`). +* `name` - Name of the Experience. +* `role_arn` - Shows the ARN of a role with permission to access `Query` API, `QuerySuggestions` API, `SubmitFeedback` API, and AWS SSO that stores your user and group information. +* `status` - Current processing status of your Amazon Kendra Experience. When the status is `ACTIVE`, your Amazon Kendra Experience is ready to use. When the status is `FAILED`, the `error_message` field contains the reason that this failed. +* `updated_at` - Date and time that the Experience was last updated. + +The `configuration` block supports the following attributes: + +* `content_source_configuration` - The identifiers of your data sources and FAQs. This is the content you want to use for your Amazon Kendra Experience. Documented below. +* `user_identity_configuration` - The AWS SSO field name that contains the identifiers of your users, such as their emails. Documented below. + +The `content_source_configuration` block supports the following attributes: + +* `data_source_ids` - Identifiers of the data sources you want to use for your Amazon Kendra Experience. +* `direct_put_content` - Whether to use documents you indexed directly using the `BatchPutDocument API`. +* `faq_ids` - Identifier of the FAQs that you want to use for your Amazon Kendra Experience. + +The `user_identity_configuration` block supports the following attributes: + +* `identity_attribute_name` - The AWS SSO field name that contains the identifiers of your users, such as their emails. + +The `endpoints` block supports the following attributes: + +* `endpoint` - Endpoint of your Amazon Kendra Experience. +* `endpoint_type` - Type of endpoint for your Amazon Kendra Experience. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kendra_faq.html.markdown b/website/docs/cdktf/python/d/kendra_faq.html.markdown new file mode 100644 index 00000000000..14dc9063f29 --- /dev/null +++ b/website/docs/cdktf/python/d/kendra_faq.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_faq" +description: |- + Provides details about a specific Amazon Kendra Faq. +--- + + + +# Data Source: aws_kendra_faq + +Provides details about a specific Amazon Kendra Faq. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kendra_faq import DataAwsKendraFaq +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKendraFaq(self, "test", + faq_id="87654321-1234-4321-4321-321987654321", + index_id="12345678-1234-1234-1234-123456789123" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `faq_id` - (Required) Identifier of the FAQ. +* `index_id` - (Required) Identifier of the index that contains the FAQ. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the FAQ. +* `created_at` - Unix datetime that the faq was created. +* `description` - Description of the FAQ. +* `error_message` - When the `status` field value is `FAILED`, this contains a message that explains why. +* `file_format` - File format used by the input files for the FAQ. Valid Values are `CSV`, `CSV_WITH_HEADER`, `JSON`. +* `id` - Unique identifiers of the FAQ and index separated by a slash (`/`). +* `language_code` - Code for a language. This shows a supported language for the FAQ document. For more information on supported languages, including their codes, see [Adding documents in languages other than English](https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html). +* `name` - Name of the FAQ. +* `role_arn` - ARN of a role with permission to access the S3 bucket that contains the FAQs. For more information, see [IAM Roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). +* `s3_path` - S3 location of the FAQ input data. Detailed below. +* `status` - Status of the FAQ. It is ready to use when the status is ACTIVE. +* `updated_at` - Date and time that the FAQ was last updated. +* `tags` - Metadata that helps organize the FAQs you create. + +The `s3_path` configuration block supports the following attributes: + +* `bucket` - Name of the S3 bucket that contains the file. +* `key` - Name of the file. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kendra_index.html.markdown b/website/docs/cdktf/python/d/kendra_index.html.markdown new file mode 100644 index 00000000000..3d0e998b528 --- /dev/null +++ b/website/docs/cdktf/python/d/kendra_index.html.markdown @@ -0,0 +1,132 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_index" +description: |- + Provides details about a specific Amazon Kendra Index. +--- + + + +# Data Source: aws_kendra_index + +Provides details about a specific Amazon Kendra Index. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kendra_index import DataAwsKendraIndex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKendraIndex(self, "example", + id="12345678-1234-1234-1234-123456789123" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` - (Required) Returns information on a specific Index by id. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Index. +* `capacity_units` - Block that sets the number of additional document storage and query capacity units that should be used by the index. Documented below. +* `created_at` - Unix datetime that the index was created. +* `description` - Description of the Index. +* `document_metadata_configuration_updates` - One or more blocks that specify the configuration settings for any metadata applied to the documents in the index. Documented below. +* `edition` - Amazon Kendra edition for the index. +* `error_message` - When the Status field value is `FAILED`, this contains a message that explains why. +* `id` - Identifier of the Index. +* `index_statistics` - Block that provides information about the number of FAQ questions and answers and the number of text documents indexed. Documented below. +* `name` - Name of the Index. +* `role_arn` - An AWS Identity and Access Management (IAM) role that gives Amazon Kendra permissions to access your Amazon CloudWatch logs and metrics. This is also the role you use when you call the `BatchPutDocument` API to index documents from an Amazon S3 bucket. +* `server_side_encryption_configuration` - A block that specifies the identifier of the AWS KMS customer managed key (CMK) that's used to encrypt data indexed by Amazon Kendra. Amazon Kendra doesn't support asymmetric CMKs. Documented below. +* `status` - Current status of the index. When the value is `ACTIVE`, the index is ready for use. If the Status field value is `FAILED`, the `error_message` field contains a message that explains why. +* `updated_at` - Unix datetime that the index was last updated. +* `user_context_policy` - User context policy. Valid values are `ATTRIBUTE_FILTER` or `USER_TOKEN`. For more information, refer to [UserContextPolicy](https://docs.aws.amazon.com/kendra/latest/APIReference/API_CreateIndex.html#kendra-CreateIndex-request-UserContextPolicy). +* `user_group_resolution_configuration` - A block that enables fetching access levels of groups and users from an AWS Single Sign-On identity source. Documented below. +* `user_token_configurations` - A block that specifies the user token configuration. Documented below. +* `tags` - Metadata that helps organize the Indices you create. + +A `capacity_units` block supports the following attributes: + +* `query_capacity_units` - The amount of extra query capacity for an index and GetQuerySuggestions capacity. For more information, refer to [QueryCapacityUnits](https://docs.aws.amazon.com/kendra/latest/APIReference/API_CapacityUnitsConfiguration.html#Kendra-Type-CapacityUnitsConfiguration-QueryCapacityUnits). +* `storage_capacity_units` - The amount of extra storage capacity for an index. A single capacity unit provides 30 GB of storage space or 100,000 documents, whichever is reached first. Minimum value of 0. + +A `document_metadata_configuration_updates` block supports the following attributes: + +* `name` - Name of the index field. Minimum length of 1. Maximum length of 30. +* `relevance` - Block that provides manual tuning parameters to determine how the field affects the search results. Documented below. +* `search` - Block that provides information about how the field is used during a search. Documented below. +* `type` - Data type of the index field. Valid values are `STRING_VALUE`, `STRING_LIST_VALUE`, `LONG_VALUE`, `DATE_VALUE`. + +A `relevance` block supports the following attributes: + +* `duration` - Time period that the boost applies to. For more information, refer to [Duration](https://docs.aws.amazon.com/kendra/latest/APIReference/API_Relevance.html#Kendra-Type-Relevance-Duration). +* `freshness` - How "fresh" a document is. For more information, refer to [Freshness](https://docs.aws.amazon.com/kendra/latest/APIReference/API_Relevance.html#Kendra-Type-Relevance-Freshness). +* `importance` - Relative importance of the field in the search. Larger numbers provide more of a boost than smaller numbers. Minimum value of 1. Maximum value of 10. +* `rank_order` - Determines how values should be interpreted. For more information, refer to [RankOrder](https://docs.aws.amazon.com/kendra/latest/APIReference/API_Relevance.html#Kendra-Type-Relevance-RankOrder). +* `values_importance_map` - A list of values that should be given a different boost when they appear in the result list. For more information, refer to [ValueImportanceMap](https://docs.aws.amazon.com/kendra/latest/APIReference/API_Relevance.html#Kendra-Type-Relevance-ValueImportanceMap). + +A `search` block supports the following attributes: + +* `displayable` - Determines whether the field is returned in the query response. The default is `true`. +* `facetable` - Whether the field can be used to create search facets, a count of results for each value in the field. The default is `false`. +* `searchable` - Determines whether the field is used in the search. If the Searchable field is true, you can use relevance tuning to manually tune how Amazon Kendra weights the field in the search. The default is `true` for `string` fields and `false` for `number` and `date` fields. +* `sortable` - Determines whether the field can be used to sort the results of a query. If you specify sorting on a field that does not have Sortable set to true, Amazon Kendra returns an exception. The default is `false`. + +A `index_statistics` block supports the following attributes: + +* `faq_statistics` - Block that specifies the number of question and answer topics in the index. Documented below. +* `text_document_statistics` - A block that specifies the number of text documents indexed. + +A `faq_statistics` block supports the following attributes: + +* `indexed_question_answers_count` - The total number of FAQ questions and answers contained in the index. + +A `text_document_statistics` block supports the following attributes: + +* `indexed_text_bytes` - Total size, in bytes, of the indexed documents. +* `indexed_text_documents_count` - The number of text documents indexed. + +A `server_side_encryption_configuration` block supports the following attributes: + +* `kms_key_id` - Identifier of the AWS KMScustomer master key (CMK). Amazon Kendra doesn't support asymmetric CMKs. + +A `user_group_resolution_configuration` block supports the following attributes: + +* `user_group_resolution_mode` - The identity store provider (mode) you want to use to fetch access levels of groups and users. AWS Single Sign-On is currently the only available mode. Your users and groups must exist in an AWS SSO identity source in order to use this mode. Valid Values are `AWS_SSO` or `NONE`. + +A `user_token_configurations` block supports the following attributes: + +* `json_token_type_configuration` - A block that specifies the information about the JSON token type configuration. +* `jwt_token_type_configuration` - A block that specifies the information about the JWT token type configuration. + +A `json_token_type_configuration` block supports the following attributes: + +* `group_attribute_field` - The group attribute field. +* `user_name_attribute_field` - The user name attribute field. + +A `jwt_token_type_configuration` block supports the following attributes: + +* `claim_regex` - Regular expression that identifies the claim. +* `group_attribute_field` - The group attribute field. +* `issuer` - Issuer of the token. +* `key_location` - Location of the key. Valid values are `URL` or `SECRET_MANAGER` +* `secrets_manager_arn` - ARN of the secret. +* `url` - Signing key URL. +* `user_name_attribute_field` - The user name attribute field. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kendra_query_suggestions_block_list.html.markdown b/website/docs/cdktf/python/d/kendra_query_suggestions_block_list.html.markdown new file mode 100644 index 00000000000..1426230e413 --- /dev/null +++ b/website/docs/cdktf/python/d/kendra_query_suggestions_block_list.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_query_suggestions_block_list" +description: |- + Provides details about a specific Amazon Kendra block list used for query suggestions for an index. +--- + + + +# Data Source: aws_kendra_query_suggestions_block_list + +Provides details about a specific Amazon Kendra block list used for query suggestions for an index. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kendra_query_suggestions_block_list import DataAwsKendraQuerySuggestionsBlockList +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKendraQuerySuggestionsBlockList(self, "example", + index_id="12345678-1234-1234-1234-123456789123", + query_suggestions_block_list_id="87654321-1234-4321-4321-321987654321" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `index_id` - (Required) Identifier of the index that contains the block list. +* `query_suggestions_block_list_id` - (Required) Identifier of the block list. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the block list. +* `created_at` - Date-time a block list was created. +* `description` - Description for the block list. +* `error_message` - Error message containing details if there are issues processing the block list. +* `file_size_bytes` - Current size of the block list text file in S3. +* `id` - Unique identifiers of the block list and index separated by a slash (`/`). +* `item_count` - Current number of valid, non-empty words or phrases in the block list text file. +* `name` - Name of the block list. +* `role_arn` - ARN of a role with permission to access the S3 bucket that contains the block list. For more information, see [IAM Roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). +* `source_s3_path` - S3 location of the block list input data. Detailed below. +* `status` - Current status of the block list. When the value is `ACTIVE`, the block list is ready for use. +* `updated_at` - Date and time that the block list was last updated. +* `tags` - Metadata that helps organize the block list you create. + +The `source_s3_path` configuration block supports the following attributes: + +* `bucket` - Name of the S3 bucket that contains the file. +* `key` - Name of the file. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kendra_thesaurus.html.markdown b/website/docs/cdktf/python/d/kendra_thesaurus.html.markdown new file mode 100644 index 00000000000..30a346de1ef --- /dev/null +++ b/website/docs/cdktf/python/d/kendra_thesaurus.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_thesaurus" +description: |- + Provides details about a specific Amazon Kendra Thesaurus. +--- + + + +# Data Source: aws_kendra_thesaurus + +Provides details about a specific Amazon Kendra Thesaurus. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kendra_thesaurus import DataAwsKendraThesaurus +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKendraThesaurus(self, "example", + index_id="12345678-1234-1234-1234-123456789123", + thesaurus_id="87654321-1234-4321-4321-321987654321" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `index_id` - (Required) Identifier of the index that contains the Thesaurus. +* `thesaurus_id` - (Required) Identifier of the Thesaurus. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Thesaurus. +* `created_at` - Unix datetime that the Thesaurus was created. +* `description` - Description of the Thesaurus. +* `error_message` - When the `status` field value is `FAILED`, this contains a message that explains why. +* `file_size_bytes` - Size of the Thesaurus file in bytes. +* `id` - Unique identifiers of the Thesaurus and index separated by a slash (`/`). +* `name` - Name of the Thesaurus. +* `role_arn` - ARN of a role with permission to access the S3 bucket that contains the Thesaurus. For more information, see [IAM Roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). +* `source_s3_path` - S3 location of the Thesaurus input data. Detailed below. +* `status` - Status of the Thesaurus. It is ready to use when the status is `ACTIVE`. +* `synonym_rule_count` - Number of synonym rules in the Thesaurus file. +* `term_count` - Number of unique terms in the Thesaurus file. For example, the synonyms `a,b,c` and `a=>d`, the term count would be 4. +* `updated_at` - Date and time that the Thesaurus was last updated. +* `tags` - Metadata that helps organize the Thesaurus you create. + +The `source_s3_path` configuration block supports the following attributes: + +* `bucket` - Name of the S3 bucket that contains the file. +* `key` - Name of the file. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/key_pair.html.markdown b/website/docs/cdktf/python/d/key_pair.html.markdown new file mode 100644 index 00000000000..36e21d7343b --- /dev/null +++ b/website/docs/cdktf/python/d/key_pair.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_key_pair" +description: |- + Provides details about a specific EC2 Key Pair. +--- + + + +# Data Source: aws_key_pair + +Use this data source to get information about a specific EC2 Key Pair. + +## Example Usage + +The following example shows how to get a EC2 Key Pair including the public key material from its name. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_key_pair import DataAwsKeyPair +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsKeyPair(self, "example", + filter=[DataAwsKeyPairFilter( + name="tag:Component", + values=["web"] + ) + ], + include_public_key=True, + key_name="test" + ) + TerraformOutput(self, "fingerprint", + value=example.fingerprint + ) + TerraformOutput(self, "id", + value=example.id + ) + TerraformOutput(self, "name", + value=example.key_name + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +Key Pairs. The given filters must match exactly one Key Pair +whose data will be exported as attributes. + +* `key_pair_id` - (Optional) Key Pair ID. +* `key_name` - (Optional) Key Pair name. +* `include_public_key` - (Optional) Whether to include the public key material in the response. +* `filter` - (Optional) Custom filter block as described below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeKeyPairs API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeKeyPairs.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the Key Pair. +* `arn` - ARN of the Key Pair. +* `create_time` - Timestamp for when the key pair was created in ISO 8601 format. +* `fingerprint` - SHA-1 digest of the DER encoded private key. +* `key_type` - Type of key pair. +* `public_key` - Public key material. +* `tags` - Any tags assigned to the Key Pair. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kinesis_firehose_delivery_stream.html.markdown b/website/docs/cdktf/python/d/kinesis_firehose_delivery_stream.html.markdown new file mode 100644 index 00000000000..62695cd4904 --- /dev/null +++ b/website/docs/cdktf/python/d/kinesis_firehose_delivery_stream.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Kinesis Firehose" +layout: "aws" +page_title: "AWS: aws_kinesis_firehose_delivery_stream" +description: |- + Provides an AWS Kinesis Firehose Delivery Stream data source. +--- + + + +# Data Source: aws_kinesis_firehose_delivery_stream + +Use this data source to get information about a Kinesis Firehose Delivery Stream for use in other resources. + +For more details, see the [Amazon Kinesis Firehose Documentation][1]. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kinesis_firehose_delivery_stream import DataAwsKinesisFirehoseDeliveryStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKinesisFirehoseDeliveryStream(self, "stream", + name="stream-name" + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the Kinesis Stream. + +## Attribute Reference + +`id` is set to the ARN of the Kinesis Stream. In addition, the following attributes +are exported: + +* `arn` - ARN of the Kinesis Stream (same as id). + +[1]: https://aws.amazon.com/documentation/firehose/ + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kinesis_stream.html.markdown b/website/docs/cdktf/python/d/kinesis_stream.html.markdown new file mode 100644 index 00000000000..e43a683793c --- /dev/null +++ b/website/docs/cdktf/python/d/kinesis_stream.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "Kinesis" +layout: "aws" +page_title: "AWS: aws_kinesis_stream" +description: |- + Provides a Kinesis Stream data source. +--- + + + +# Data Source: aws_kinesis_stream + +Use this data source to get information about a Kinesis Stream for use in other +resources. + +For more details, see the [Amazon Kinesis Documentation][1]. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kinesis_stream import DataAwsKinesisStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKinesisStream(self, "stream", + name="stream-name" + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the Kinesis Stream. + +## Attribute Reference + +`id` is set to the ARN of the Kinesis Stream. In addition, the following attributes +are exported: + +* `arn` - ARN of the Kinesis Stream (same as id). +* `name` - Name of the Kinesis Stream. +* `creation_timestamp` - Approximate UNIX timestamp that the stream was created. +* `status` - Current status of the stream. The stream status is one of CREATING, DELETING, ACTIVE, or UPDATING. +* `retention_period` - Length of time (in hours) data records are accessible after they are added to the stream. +* `open_shards` - List of shard ids in the OPEN state. See [Shard State][2] for more. +* `closed_shards` - List of shard ids in the CLOSED state. See [Shard State][2] for more. +* `shard_level_metrics` - List of shard-level CloudWatch metrics which are enabled for the stream. See [Monitoring with CloudWatch][3] for more. +* `stream_mode_details` - [Capacity mode][4] of the data stream. Detailed below. +* `tags` - Map of tags to assigned to the stream. + +### stream_mode_details Configuration Block + +* `stream_mode` - Capacity mode of the stream. Either `ON_DEMAND` or `PROVISIONED`. + +[1]: https://aws.amazon.com/documentation/kinesis/ +[2]: https://docs.aws.amazon.com/streams/latest/dev/kinesis-using-sdk-java-after-resharding.html#kinesis-using-sdk-java-resharding-data-routing +[3]: https://docs.aws.amazon.com/streams/latest/dev/monitoring-with-cloudwatch.html +[4]: https://docs.aws.amazon.com/streams/latest/dev/how-do-i-size-a-stream.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kinesis_stream_consumer.html.markdown b/website/docs/cdktf/python/d/kinesis_stream_consumer.html.markdown new file mode 100644 index 00000000000..dc364ec0746 --- /dev/null +++ b/website/docs/cdktf/python/d/kinesis_stream_consumer.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Kinesis" +layout: "aws" +page_title: "AWS: aws_kinesis_stream_consumer" +description: |- + Provides details about a Kinesis Stream Consumer. +--- + + + +# Data Source: aws_kinesis_stream_consumer + +Provides details about a Kinesis Stream Consumer. + +For more details, see the [Amazon Kinesis Stream Consumer Documentation][1]. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kinesis_stream_consumer import DataAwsKinesisStreamConsumer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKinesisStreamConsumer(self, "example", + name="example-consumer", + stream_arn=Token.as_string(aws_kinesis_stream_example.arn) + ) +``` + +## Argument Reference + +* `arn` - (Optional) ARN of the stream consumer. +* `name` - (Optional) Name of the stream consumer. +* `stream_arn` - (Required) ARN of the data stream the consumer is registered with. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `creation_timestamp` - Approximate timestamp in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) of when the stream consumer was created. +* `id` - ARN of the stream consumer. +* `status` - Current status of the stream consumer. + +[1]: https://docs.aws.amazon.com/streams/latest/dev/amazon-kinesis-consumers.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kms_alias.html.markdown b/website/docs/cdktf/python/d/kms_alias.html.markdown new file mode 100644 index 00000000000..e296e6f3cc9 --- /dev/null +++ b/website/docs/cdktf/python/d/kms_alias.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_alias" +description: |- + Get information on a AWS Key Management Service (KMS) Alias +--- + + + +# Data Source: aws_kms_alias + +Use this data source to get the ARN of a KMS key alias. +By using this data source, you can reference key alias +without having to hard code the ARN as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kms_alias import DataAwsKmsAlias +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKmsAlias(self, "s3", + name="alias/aws/s3" + ) +``` + +## Argument Reference + +* `name` - (Required) Display name of the alias. The name must start with the word "alias" followed by a forward slash (alias/) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name(ARN) of the key alias. +* `id` - Amazon Resource Name(ARN) of the key alias. +* `target_key_id` - Key identifier pointed to by the alias. +* `target_key_arn` - ARN pointed to by the alias. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kms_ciphertext.html.markdown b/website/docs/cdktf/python/d/kms_ciphertext.html.markdown new file mode 100644 index 00000000000..a9626117771 --- /dev/null +++ b/website/docs/cdktf/python/d/kms_ciphertext.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_ciphertext" +description: |- + Provides ciphertext encrypted using a KMS key +--- + + + +# Data Source: aws_kms_ciphertext + +The KMS ciphertext data source allows you to encrypt plaintext into ciphertext +by using an AWS KMS customer master key. The value returned by this data source +changes every apply. For a stable ciphertext value, see the [`aws_kms_ciphertext` +resource](/docs/providers/aws/r/kms_ciphertext.html). + +~> **Note:** All arguments including the plaintext be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kms_ciphertext import DataAwsKmsCiphertext +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + oauth_config = KmsKey(self, "oauth_config", + description="oauth config", + is_enabled=True + ) + DataAwsKmsCiphertext(self, "oauth", + key_id=oauth_config.key_id, + plaintext="{\n \"client_id\": \"e587dbae22222f55da22\",\n \"client_secret\": \"8289575d00000ace55e1815ec13673955721b8a5\"\n}\n\n" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `plaintext` - (Required) Data to be encrypted. Note that this may show up in logs, and it will be stored in the state file. +* `key_id` - (Required) Globally unique key ID for the customer master key. +* `context` - (Optional) An optional mapping that makes up the encryption context. + +## Attribute Reference + +All of the argument attributes are also exported as result attributes. + +* `id` - Globally unique key ID for the customer master key. +* `ciphertext_blob` - Base64 encoded ciphertext + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kms_custom_key_store.html.markdown b/website/docs/cdktf/python/d/kms_custom_key_store.html.markdown new file mode 100644 index 00000000000..8c2cc825ae8 --- /dev/null +++ b/website/docs/cdktf/python/d/kms_custom_key_store.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_custom_key_store" +description: |- + Get information on a AWS Key Management Service (KMS) Custom Key Store +--- + + + +# Data Source: aws_kms_custom_key_store + +Use this data source to get the metadata KMS custom key store. +By using this data source, you can reference KMS custom key store +without having to hard code the ID as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kms_custom_key_store import DataAwsKmsCustomKeyStore +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKmsCustomKeyStore(self, "keystore", + custom_key_store_name="my_cloudhsm" + ) +``` + +## Argument Reference + +* `custom_key_store_id` - (Optional) The ID for the custom key store. +* `custom_key_store_name` - (Optional) The user-specified friendly name for the custom key store. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The ID for the custom key store. +* `cloudhsm_cluster_id` - ID for the CloudHSM cluster that is associated with the custom key store. +* `connection_state` - Indicates whether the custom key store is connected to its CloudHSM cluster. +* `creation_date` - The date and time when the custom key store was created. +* `trust_anchor_certificate` - The trust anchor certificate of the associated CloudHSM cluster. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kms_key.html.markdown b/website/docs/cdktf/python/d/kms_key.html.markdown new file mode 100644 index 00000000000..125c3e6c507 --- /dev/null +++ b/website/docs/cdktf/python/d/kms_key.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_key" +description: |- + Get information on a AWS Key Management Service (KMS) Key +--- + + + +# aws_kms_key + +Use this data source to get detailed information about +the specified KMS Key with flexible key id input. +This can be useful to reference key alias +without having to hard code the ARN as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kms_key import DataAwsKmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKmsKey(self, "by_alias", + key_id="alias/my-key" + ) + DataAwsKmsKey(self, "by_alias_arn", + key_id="arn:aws:kms:us-east-1:111122223333:alias/my-key" + ) + DataAwsKmsKey(self, "by_id", + key_id="1234abcd-12ab-34cd-56ef-1234567890ab" + ) + DataAwsKmsKey(self, "by_key_arn", + key_id="arn:aws:kms:us-east-1:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" + ) +``` + +## Argument Reference + +* `key_id` - (Required) Key identifier which can be one of the following format: + * Key ID. E.g: `1234abcd-12ab-34cd-56ef-1234567890ab` + * Key ARN. E.g.: `arn:aws:kms:us-east-1:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab` + * Alias name. E.g.: `alias/my-key` + * Alias ARN: E.g.: `arn:aws:kms:us-east-1:111122223333:alias/my-key` +* `grant_tokens` - (Optional) List of grant tokens + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id`: The globally unique identifier for the key +* `arn`: The ARN of the key +* `aws_account_id`: The twelve-digit account ID of the AWS account that owns the key +* `cloud_hsm_cluster_id`: The cluster ID of the AWS CloudHSM cluster that contains the key material for the KMS key. +* `creation_date`: The date and time when the key was created +* `custom_key_store_id`: A unique identifier for the custom key store that contains the KMS key. +* `customer_master_key_spec`: Specifies whether the key contains a symmetric key or an asymmetric key pair and the encryption algorithms or signing algorithms that the key supports +* `deletion_date`: The date and time after which AWS KMS deletes the key. This value is present only when `key_state` is `PendingDeletion`, otherwise this value is 0 +* `description`: The description of the key. +* `enabled`: Specifies whether the key is enabled. When `key_state` is `Enabled` this value is true, otherwise it is false +* `expiration_model`: Specifies whether the Key's key material expires. This value is present only when `origin` is `EXTERNAL`, otherwise this value is empty +* `key_manager`: The key's manager +* `key_spec`: Describes the type of key material in the KMS key. +* `key_state`: The state of the key +* `key_usage`: Specifies the intended use of the key +* `multi_region`: Indicates whether the KMS key is a multi-Region (`true`) or regional (`false`) key. +* `multi_region_configuration`: Lists the primary and replica keys in same multi-Region key. Present only when the value of `multi_region` is `true`. +* `origin`: When this value is `AWS_KMS`, AWS KMS created the key material. When this value is `EXTERNAL`, the key material was imported from your existing key management infrastructure or the CMK lacks key material +* `pending_deletion_window_in_days`: The waiting period before the primary key in a multi-Region key is deleted. +* `valid_to`: The time at which the imported key material expires. This value is present only when `origin` is `EXTERNAL` and whose `expiration_model` is `KEY_MATERIAL_EXPIRES`, otherwise this value is 0 +* `xks_key_configuration`: Information about the external key that is associated with a KMS key in an external key store. + +The `multi_region_configuration` object supports the following: + +* `multi_region_key_type`: Indicates whether the KMS key is a `PRIMARY` or `REPLICA` key. +* `primary_key`: The key ARN and Region of the primary key. This is the current KMS key if it is the primary key. +* `replica_keys`: The key ARNs and Regions of all replica keys. Includes the current KMS key if it is a replica key. + +The `primary_key` and `replica_keys` objects support the following: + +* `arn`: The key ARN of a primary or replica key of a multi-Region key. +* `region`: The AWS Region of a primary or replica key in a multi-Region key. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kms_public_key.html.markdown b/website/docs/cdktf/python/d/kms_public_key.html.markdown new file mode 100644 index 00000000000..74b6bbcb68b --- /dev/null +++ b/website/docs/cdktf/python/d/kms_public_key.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_public_key" +description: |- + Get information on a KMS public key +--- + + + +# aws_kms_public_key + +Use this data source to get the public key about the specified KMS Key with flexible key id input. This can be useful to reference key alias without having to hard code the ARN as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kms_public_key import DataAwsKmsPublicKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsKmsPublicKey(self, "by_alias", + key_id="alias/my-key" + ) + DataAwsKmsPublicKey(self, "by_alias_arn", + key_id="arn:aws:kms:us-east-1:111122223333:alias/my-key" + ) + DataAwsKmsPublicKey(self, "by_id", + key_id="1234abcd-12ab-34cd-56ef-1234567890ab" + ) + DataAwsKmsPublicKey(self, "by_key_arn", + key_id="arn:aws:kms:us-east-1:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `key_id` - (Required) Key identifier which can be one of the following format: + * Key ID. E.g - `1234abcd-12ab-34cd-56ef-1234567890ab` + * Key ARN. E.g. - `arn:aws:kms:us-east-1:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab` + * Alias name. E.g. - `alias/my-key` + * Alias ARN - E.g. - `arn:aws:kms:us-east-1:111122223333:alias/my-key` +* `grant_tokens` - (Optional) List of grant tokens + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Key ARN of the asymmetric CMK from which the public key was downloaded. +* `customer_master_key_spec` - Type of the public key that was downloaded. +* `encryption_algorithms` - Encryption algorithms that AWS KMS supports for this key. Only set when the `key_usage` of the public key is `ENCRYPT_DECRYPT`. +* `id` - Key ARN of the asymmetric CMK from which the public key was downloaded. +* `key_usage` - Permitted use of the public key. Valid values are `ENCRYPT_DECRYPT` or `SIGN_VERIFY` +* `public_key` - Exported public key. The value is a DER-encoded X.509 public key, also known as SubjectPublicKeyInfo (SPKI), as defined in [RFC 5280](https://tools.ietf.org/html/rfc5280). The value is Base64-encoded. +* `public_key_pem` - Exported public key. The value is Privacy Enhanced Mail (PEM) encoded. +* `signing_algorithms` - Signing algorithms that AWS KMS supports for this key. Only set when the `key_usage` of the public key is `SIGN_VERIFY`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kms_secret.html.markdown b/website/docs/cdktf/python/d/kms_secret.html.markdown new file mode 100644 index 00000000000..43f477862a1 --- /dev/null +++ b/website/docs/cdktf/python/d/kms_secret.html.markdown @@ -0,0 +1,15 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_secret" +description: |- + Provides secret data encrypted with the KMS service +--- + + + +# Data Source: aws_kms_secret + +!> **WARNING:** This data source was removed in version 2.0.0 of the Terraform AWS Provider. You can migrate existing configurations to the [`aws_kms_secrets` data source](/docs/providers/aws/d/kms_secrets.html) following instructions available in the [Version 2 Upgrade Guide](/docs/providers/aws/guides/version-2-upgrade.html#data-source-aws_kms_secret). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/kms_secrets.html.markdown b/website/docs/cdktf/python/d/kms_secrets.html.markdown new file mode 100644 index 00000000000..9473c1a00e3 --- /dev/null +++ b/website/docs/cdktf/python/d/kms_secrets.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_secrets" +description: |- + Decrypt multiple secrets from data encrypted with the AWS KMS service +--- + + + +# Data Source: aws_kms_secrets + +Decrypt multiple secrets from data encrypted with the AWS KMS service. + +~> **NOTE:** Using this data provider will allow you to conceal secret data within your resource definitions but does not take care of protecting that data in all Terraform logging and state output. Please take care to secure your secret data beyond just the Terraform configuration. + +## Example Usage + +If you do not already have a `CiphertextBlob` from encrypting a KMS secret, you can use the below commands to obtain one using the [AWS CLI kms encrypt](https://docs.aws.amazon.com/cli/latest/reference/kms/encrypt.html) command. This requires you to have your AWS CLI setup correctly and replace the `--key-id` with your own. Alternatively you can use `--plaintext 'master-password'` (CLIv1) or `--plaintext fileb://<(echo -n 'master-password')` (CLIv2) instead of reading from a file. + +-> If you have a newline character at the end of your file, it will be decrypted with this newline character intact. For most use cases this is undesirable and leads to incorrect passwords or invalid values, as well as possible changes in the plan. Be sure to use `echo -n` if necessary. +-> If you are using asymmetric keys ensure you are using the right encryption algorithm when you encrypt and decrypt else you will get IncorrectKeyException during the decrypt phase. + +```console +% echo -n 'master-password' > plaintext-password +% aws kms encrypt --key-id ab123456-c012-4567-890a-deadbeef123 --plaintext fileb://plaintext-password --encryption-context foo=bar --output text --query CiphertextBlob +AQECAHgaPa0J8WadplGCqqVAr4HNvDaFSQ+NaiwIBhmm6qDSFwAAAGIwYAYJKoZIhvcNAQcGoFMwUQIBADBMBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDI+LoLdvYv8l41OhAAIBEIAfx49FFJCLeYrkfMfAw6XlnxP23MmDBdqP8dPp28OoAQ== +% aws kms encrypt --key-id ab123456-c012-4567-890a-deadbeef123 --plaintext fileb://plaintext-password --encryption-algorithm RSAES_OAEP_SHA_256 --output text --query CiphertextBlob +AQECAHgaPa0J8WadplGCqqVAr4HNvDaFSQ+NaiwIBhmm6qDSFwAAAGIwYAYJKoZIhvcNAQcGoFMwUQIBADBMBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDI+LoLdvYv8l41OhAAIBEIAfx49FFJCLeYrkfMfAw6XlnxP23MmDBdqP8dPp28OoAQ== +``` + +That encrypted output can now be inserted into Terraform configurations without exposing the plaintext secret directly. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kms_secrets import DataAwsKmsSecrets +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine): + super().__init__(scope, name) + example = DataAwsKmsSecrets(self, "example", + secret=[DataAwsKmsSecretsSecret( + context={ + "foo": "bar" + }, + name="master_password", + payload="AQECAHgaPa0J8WadplGCqqVAr4HNvDaFSQ+NaiwIBhmm6qDSFwAAAGIwYAYJKoZIhvcNAQcGoFMwUQIBADBMBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDI+LoLdvYv8l41OhAAIBEIAfx49FFJCLeYrkfMfAw6XlnxP23MmDBdqP8dPp28OoAQ==" + ), DataAwsKmsSecretsSecret( + name="master_username", + payload="AQECAHgaPa0J8WadplGCqqVAr4HNvDaFSQ+NaiwIBhmm6qDSFwAAAGIwYAYJKoZIhvcNAQcGoFMwUQIBADBMBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDI+LoLdvYv8l41OhAAIBEIAfx49FFJCLeYrkfMfAw6XlnxP23MmDBdqP8dPp28OoAQ==" + ) + ] + ) + aws_rds_cluster_example = RdsCluster(self, "example_1", + master_password=Token.as_string( + property_access(example.plaintext, ["\"master_password\""])), + master_username=Token.as_string( + property_access(example.plaintext, ["\"master_username\""])), + engine=engine + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_example.override_logical_id("example") +``` + +## Argument Reference + +This data source supports the following arguments: + +* `secret` - (Required) One or more encrypted payload definitions from the KMS service. See the Secret Definitions below. + +### Secret Definitions + +Each `secret` supports the following arguments: + +* `name` - (Required) Name to export this secret under in the attributes. +* `payload` - (Required) Base64 encoded payload, as returned from a KMS encrypt operation. +* `context` - (Optional) An optional mapping that makes up the Encryption Context for the secret. +* `grant_tokens` (Optional) An optional list of Grant Tokens for the secret. +* `encryption_algorithm` - (Optional) The encryption algorithm that will be used to decrypt the ciphertext. This parameter is required only when the ciphertext was encrypted under an asymmetric KMS key. Valid Values: SYMMETRIC_DEFAULT | RSAES_OAEP_SHA_1 | RSAES_OAEP_SHA_256 | SM2PKE +* `key_id` (Optional) Specifies the KMS key that AWS KMS uses to decrypt the ciphertext. This parameter is required only when the ciphertext was encrypted under an asymmetric KMS key. + +For more information on `context` and `grant_tokens` see the [KMS +Concepts](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `plaintext` - Map containing each `secret` `name` as the key with its decrypted plaintext value + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lakeformation_data_lake_settings.html.markdown b/website/docs/cdktf/python/d/lakeformation_data_lake_settings.html.markdown new file mode 100644 index 00000000000..607d1b74279 --- /dev/null +++ b/website/docs/cdktf/python/d/lakeformation_data_lake_settings.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_data_lake_settings" +description: |- + Get data lake administrators and default database and table permissions +--- + + + +# Data Source: aws_lakeformation_data_lake_settings + +Get Lake Formation principals designated as data lake administrators and lists of principal permission entries for default create database and default create table permissions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lakeformation_data_lake_settings import DataAwsLakeformationDataLakeSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLakeformationDataLakeSettings(self, "example", + catalog_id="14916253649" + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `catalog_id` – (Optional) Identifier for the Data Catalog. By default, the account ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `admins` – List of ARNs of AWS Lake Formation principals (IAM users or roles). +* `create_database_default_permissions` - Up to three configuration blocks of principal permissions for default create database permissions. Detailed below. +* `create_table_default_permissions` - Up to three configuration blocks of principal permissions for default create table permissions. Detailed below. +* `trusted_resource_owners` – List of the resource-owning account IDs that the caller's account can use to share their user access details (user ARNs). +* `allow_external_data_filtering` - Whether to allow Amazon EMR clusters to access data managed by Lake Formation. +* `external_data_filtering_allow_list` - A list of the account IDs of Amazon Web Services accounts with Amazon EMR clusters that are to perform data filtering. +* `authorized_session_tag_value_list` - Lake Formation relies on a privileged process secured by Amazon EMR or the third party integrator to tag the user's role while assuming it. + +### create_database_default_permissions + +* `permissions` - List of permissions granted to the principal. +* `principal` - Principal who is granted permissions. + +### create_table_default_permissions + +* `permissions` - List of permissions granted to the principal. +* `principal` - Principal who is granted permissions. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lakeformation_permissions.html.markdown b/website/docs/cdktf/python/d/lakeformation_permissions.html.markdown new file mode 100644 index 00000000000..5773cd53161 --- /dev/null +++ b/website/docs/cdktf/python/d/lakeformation_permissions.html.markdown @@ -0,0 +1,193 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_permissions" +description: |- + Get permissions for a principal to access metadata in the Data Catalog and data organized in underlying data storage such as Amazon S3. +--- + + + +# Data Source: aws_lakeformation_permissions + +Get permissions for a principal to access metadata in the Data Catalog and data organized in underlying data storage such as Amazon S3. Permissions are granted to a principal, in a Data Catalog, relative to a Lake Formation resource, which includes the Data Catalog, databases, tables, LF-tags, and LF-tag policies. For more information, see [Security and Access Control to Metadata and Data in Lake Formation](https://docs.aws.amazon.com/lake-formation/latest/dg/security-data-access.html). + +~> **NOTE:** This data source deals with explicitly granted permissions. Lake Formation grants implicit permissions to data lake administrators, database creators, and table creators. For more information, see [Implicit Lake Formation Permissions](https://docs.aws.amazon.com/lake-formation/latest/dg/implicit-permissions.html). + +## Example Usage + +### Permissions For A Lake Formation S3 Resource + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lakeformation_permissions import DataAwsLakeformationPermissions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLakeformationPermissions(self, "test", + data_location=DataAwsLakeformationPermissionsDataLocation( + arn=Token.as_string(aws_lakeformation_resource_test.arn) + ), + principal=workflow_role.arn + ) +``` + +### Permissions For A Glue Catalog Database + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lakeformation_permissions import DataAwsLakeformationPermissions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLakeformationPermissions(self, "test", + database=DataAwsLakeformationPermissionsDatabase( + catalog_id="110376042874", + name=Token.as_string(aws_glue_catalog_database_test.name) + ), + principal=workflow_role.arn + ) +``` + +### Permissions For Tag-Based Access Control + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lakeformation_permissions import DataAwsLakeformationPermissions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLakeformationPermissions(self, "test", + lf_tag_policy=DataAwsLakeformationPermissionsLfTagPolicy( + expression=[DataAwsLakeformationPermissionsLfTagPolicyExpression( + key="Team", + values=["Sales"] + ), DataAwsLakeformationPermissionsLfTagPolicyExpression( + key="Environment", + values=["Dev", "Production"] + ) + ], + resource_type="DATABASE" + ), + principal=workflow_role.arn + ) +``` + +## Argument Reference + +The following arguments are required: + +* `principal` – (Required) Principal to be granted the permissions on the resource. Supported principals are IAM users or IAM roles. + +One of the following is required: + +* `catalog_resource` - Whether the permissions are to be granted for the Data Catalog. Defaults to `false`. +* `data_location` - Configuration block for a data location resource. Detailed below. +* `database` - Configuration block for a database resource. Detailed below. +* `lf_tag` - (Optional) Configuration block for an LF-tag resource. Detailed below. +* `lf_tag_policy` - (Optional) Configuration block for an LF-tag policy resource. Detailed below. +* `table` - Configuration block for a table resource. Detailed below. +* `table_with_columns` - Configuration block for a table with columns resource. Detailed below. + +The following arguments are optional: + +* `catalog_id` – (Optional) Identifier for the Data Catalog. By default, the account ID. The Data Catalog is the persistent metadata store. It contains database definitions, table definitions, and other control information to manage your Lake Formation environment. + +### data_location + +The following argument is required: + +* `arn` – (Required) ARN that uniquely identifies the data location resource. + +The following argument is optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog where the location is registered with Lake Formation. By default, it is the account ID of the caller. + +### database + +The following argument is required: + +* `name` – (Required) Name of the database resource. Unique to the Data Catalog. + +The following argument is optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### lf_tag + +The following arguments are required: + +* `key` – (Required) Key-name for the tag. +* `values` - (Required) List of possible values an attribute can take. + +The following argument is optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### lf_tag_policy + +The following arguments are required: + +* `resource_type` – (Required) Resource type for which the tag policy applies. Valid values are `DATABASE` and `TABLE`. +* `expression` - (Required) List of tag conditions that apply to the resource's tag policy. Configuration block for tag conditions that apply to the policy. See [`expression`](#expression) below. + +The following argument is optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +#### expression + +* `key` – (Required) Key-name of an LF-Tag. +* `values` - (Required) List of possible values of an LF-Tag. + +### table + +The following argument is required: + +* `database_name` – (Required) Name of the database for the table. Unique to a Data Catalog. + +The following arguments are optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. +* `name` - (Optional) Name of the table. At least one of `name` or `wildcard` is required. +* `wildcard` - (Optional) Whether to use a wildcard representing every table under a database. At least one of `name` or `wildcard` is required. Defaults to `false`. + +### table_with_columns + +The following arguments are required: + +* `database_name` – (Required) Name of the database for the table with columns resource. Unique to the Data Catalog. +* `name` – (Required) Name of the table resource. + +The following arguments are optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. +* `column_names` - (Optional) Set of column names for the table. At least one of `column_names` or `excluded_column_names` is required. +* `excluded_column_names` - (Optional) Set of column names for the table to exclude. At least one of `column_names` or `excluded_column_names` is required. + +## Attribute Reference + +In addition to the above arguments, the following attribute is exported: + +* `permissions` – List of permissions granted to the principal. For details on permissions, see [Lake Formation Permissions Reference](https://docs.aws.amazon.com/lake-formation/latest/dg/lf-permissions-reference.html). +* `permissions_with_grant_option` - Subset of `permissions` which the principal can pass. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lakeformation_resource.html.markdown b/website/docs/cdktf/python/d/lakeformation_resource.html.markdown new file mode 100644 index 00000000000..3cd67a4e822 --- /dev/null +++ b/website/docs/cdktf/python/d/lakeformation_resource.html.markdown @@ -0,0 +1,45 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_resource" +description: |- + Provides details about a Lake Formation resource. +--- + + + +# Data Source: aws_lakeformation_resource + +Provides details about a Lake Formation resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lakeformation_resource import DataAwsLakeformationResource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLakeformationResource(self, "example", + arn="arn:aws:s3:::tf-acc-test-9151654063908211878" + ) +``` + +## Argument Reference + +* `arn` – (Required) ARN of the resource, an S3 path. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `last_modified` - Date and time the resource was last modified in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `role_arn` – Role that the resource was registered with. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lambda_alias.html.markdown b/website/docs/cdktf/python/d/lambda_alias.html.markdown index 36c53e2cab1..c3a3327719b 100644 --- a/website/docs/cdktf/python/d/lambda_alias.html.markdown +++ b/website/docs/cdktf/python/d/lambda_alias.html.markdown @@ -34,18 +34,18 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `function_name` - (Required) Name of the aliased Lambda function. * `name` - (Required) Name of the Lambda alias. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN identifying the Lambda function alias. * `description` - Description of alias. * `function_version` - Lambda function version which the alias uses. * `invoke_arn` - ARN to be used for invoking Lambda Function from API Gateway - to be used in aws_api_gateway_integration's `uri`. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lambda_code_signing_config.html.markdown b/website/docs/cdktf/python/d/lambda_code_signing_config.html.markdown index c49b9efabe3..2ef3e2eebcb 100644 --- a/website/docs/cdktf/python/d/lambda_code_signing_config.html.markdown +++ b/website/docs/cdktf/python/d/lambda_code_signing_config.html.markdown @@ -35,13 +35,13 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `arn` - (Required) ARN of the code signing configuration. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `allowed_publishers` - List of allowed publishers as signing profiles for this code signing configuration. * `config_id` - Unique identifier for the code signing configuration. @@ -59,4 +59,4 @@ In addition to all arguments above, the following attributes are exported: [1]: https://docs.aws.amazon.com/lambda/latest/dg/configuration-codesigning.html - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lambda_function.html.markdown b/website/docs/cdktf/python/d/lambda_function.html.markdown index 42bec68d9f2..65fa26a27ef 100644 --- a/website/docs/cdktf/python/d/lambda_function.html.markdown +++ b/website/docs/cdktf/python/d/lambda_function.html.markdown @@ -38,14 +38,14 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `function_name` - (Required) Name of the lambda function. * `qualifier` - (Optional) Alias name or version number of the lambda functionE.g., `$LATEST`, `my-alias`, or `1`. When not included: the data source resolves to the most recent published version; if no published version exists: it resolves to the most recent unpublished version. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `architectures` - Instruction set architecture for the Lambda function. * `arn` - Unqualified (no `:QUALIFIER` or `:VERSION` suffix) ARN identifying your Lambda Function. See also `qualified_arn`. @@ -76,4 +76,4 @@ In addition to all arguments above, the following attributes are exported: * `version` - The version of the Lambda function returned. If `qualifier` is not set, this will resolve to the most recent published version. If no published version of the function exists, `version` will resolve to `$LATEST`. * `vpc_config` - VPC configuration associated with your Lambda function. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lambda_function_url.html.markdown b/website/docs/cdktf/python/d/lambda_function_url.html.markdown index 89c999f7cbb..d8d5a865e93 100644 --- a/website/docs/cdktf/python/d/lambda_function_url.html.markdown +++ b/website/docs/cdktf/python/d/lambda_function_url.html.markdown @@ -38,14 +38,14 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `function_name` - (Required) he name (or ARN) of the Lambda function. * `qualifier` - (Optional) Alias name or `"$LATEST"`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `authorization_type` - Type of authentication that the function URL uses. * `cors` - The [cross-origin resource sharing (CORS)](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) settings for the function URL. See the [`aws_lambda_function_url` resource](/docs/providers/aws/r/lambda_function_url.html) documentation for more details. @@ -56,4 +56,4 @@ In addition to all arguments above, the following attributes are exported: * `last_modified_time` - When the function URL configuration was last updated, in [ISO-8601 format](https://www.w3.org/TR/NOTE-datetime). * `url_id` - Generated ID for the endpoint. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lambda_functions.html.markdown b/website/docs/cdktf/python/d/lambda_functions.html.markdown index 4d95b564057..5e2b55516f4 100644 --- a/website/docs/cdktf/python/d/lambda_functions.html.markdown +++ b/website/docs/cdktf/python/d/lambda_functions.html.markdown @@ -33,11 +33,11 @@ class MyConvertedCode(TerraformStack): The resource does not support any arguments. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `function_names` - A list of Lambda Function names. * `function_arns` - A list of Lambda Function ARNs. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lambda_invocation.html.markdown b/website/docs/cdktf/python/d/lambda_invocation.html.markdown index 791277322e1..0d30091e6c9 100644 --- a/website/docs/cdktf/python/d/lambda_invocation.html.markdown +++ b/website/docs/cdktf/python/d/lambda_invocation.html.markdown @@ -47,8 +47,10 @@ class MyConvertedCode(TerraformStack): * `qualifier` - (Optional) Qualifier (a.k.a version) of the lambda function. Defaults to `$LATEST`. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `result` - String result of the lambda function invocation. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lambda_layer_version.html.markdown b/website/docs/cdktf/python/d/lambda_layer_version.html.markdown index 35265281430..5badd67996d 100644 --- a/website/docs/cdktf/python/d/lambda_layer_version.html.markdown +++ b/website/docs/cdktf/python/d/lambda_layer_version.html.markdown @@ -38,16 +38,16 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `layer_name` - (Required) Name of the lambda layer. * `version` - (Optional) Specific layer version. Conflicts with `compatible_runtime` and `compatible_architecture`. If omitted, the latest available layer version will be used. * `compatible_runtime` (Optional) Specific runtime the layer version must support. Conflicts with `version`. If specified, the latest available layer version supporting the provided runtime will be used. * `compatible_architecture` (Optional) Specific architecture the layer version could support. Conflicts with `version`. If specified, the latest available layer version supporting the provided architecture will be used. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `description` - Description of the specific Lambda Layer version. * `license_info` - License info associated with the specific Lambda Layer version. @@ -65,4 +65,4 @@ In addition to all arguments above, the following attributes are exported: [1]: https://docs.aws.amazon.com/lambda/latest/dg/API_GetLayerVersion.html#SSS-GetLayerVersion-response-CompatibleRuntimes [2]: https://docs.aws.amazon.com/lambda/latest/dg/API_GetLayerVersion.html#SSS-GetLayerVersion-response-CompatibleArchitectures - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/launch_configuration.html.markdown b/website/docs/cdktf/python/d/launch_configuration.html.markdown new file mode 100644 index 00000000000..fb0fea691eb --- /dev/null +++ b/website/docs/cdktf/python/d/launch_configuration.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_launch_configuration" +description: |- + Provides a Launch Configuration data source. +--- + + + +# Data Source: aws_launch_configuration + +Provides information about a Launch Configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_launch_configuration import DataAwsLaunchConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLaunchConfiguration(self, "ubuntu", + name="test-launch-config" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the launch configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the launch configuration. +* `arn` - Amazon Resource Name of the launch configuration. +* `name` - Name of the launch configuration. +* `image_id` - EC2 Image ID of the instance. +* `instance_type` - Instance Type of the instance to launch. +* `iam_instance_profile` - The IAM Instance Profile to associate with launched instances. +* `key_name` - Key Name that should be used for the instance. +* `metadata_options` - Metadata options for the instance. + * `http_endpoint` - State of the metadata service: `enabled`, `disabled`. + * `http_tokens` - If session tokens are required: `optional`, `required`. + * `http_put_response_hop_limit` - The desired HTTP PUT response hop limit for instance metadata requests. +* `security_groups` - List of associated Security Group IDS. +* `associate_public_ip_address` - Whether a Public IP address is associated with the instance. +* `user_data` - User Data of the instance. +* `enable_monitoring` - Whether Detailed Monitoring is Enabled. +* `ebs_optimized` - Whether the launched EC2 instance will be EBS-optimized. +* `root_block_device` - Root Block Device of the instance. +* `ebs_block_device` - EBS Block Devices attached to the instance. +* `ephemeral_block_device` - The Ephemeral volumes on the instance. +* `spot_price` - Price to use for reserving Spot instances. +* `placement_tenancy` - Tenancy of the instance. + +`root_block_device` is exported with the following attributes: + +* `delete_on_termination` - Whether the EBS Volume will be deleted on instance termination. +* `encrypted` - Whether the volume is Encrypted. +* `iops` - Provisioned IOPs of the volume. +* `throughput` - Throughput of the volume. +* `volume_size` - Size of the volume. +* `volume_type` - Type of the volume. + +`ebs_block_device` is exported with the following attributes: + +* `delete_on_termination` - Whether the EBS Volume will be deleted on instance termination. +* `device_name` - Name of the device. +* `encrypted` - Whether the volume is Encrypted. +* `iops` - Provisioned IOPs of the volume. +* `no_device` - Whether the device in the block device mapping of the AMI is suppressed. +* `snapshot_id` - Snapshot ID of the mount. +* `throughput` - Throughput of the volume. +* `volume_size` - Size of the volume. +* `volume_type` - Type of the volume. + +`ephemeral_block_device` is exported with the following attributes: + +* `device_name` - Name of the device. +* `virtual_name` - Virtual Name of the device. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/launch_template.html.markdown b/website/docs/cdktf/python/d/launch_template.html.markdown new file mode 100644 index 00000000000..d79e3fc2835 --- /dev/null +++ b/website/docs/cdktf/python/d/launch_template.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_launch_template" +description: |- + Provides a Launch Template data source. +--- + + + +# Data Source: aws_launch_template + +Provides information about a Launch Template. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_launch_template import DataAwsLaunchTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLaunchTemplate(self, "default", + name="my-launch-template" + ) +``` + +### Filter + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_launch_template import DataAwsLaunchTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLaunchTemplate(self, "test", + filter=[DataAwsLaunchTemplateFilter( + name="launch-template-name", + values=["some-template"] + ) + ] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. +* `id` - (Optional) ID of the specific launch template to retrieve. +* `name` - (Optional) Name of the launch template. +* `tags` - (Optional) Map of tags, each pair of which must exactly match a pair on the desired Launch Template. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeLaunchTemplates API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeLaunchTemplates.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the launch template. + +This resource also exports a full set of attributes corresponding to the arguments of the [`aws_launch_template`](/docs/providers/aws/r/launch_template.html) resource. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lb.html.markdown b/website/docs/cdktf/python/d/lb.html.markdown new file mode 100644 index 00000000000..e8a85b8da78 --- /dev/null +++ b/website/docs/cdktf/python/d/lb.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb" +description: |- + Provides a Load Balancer data source. +--- + + + +# Data Source: aws_lb + +~> **Note:** `aws_alb` is known as `aws_lb`. The functionality is identical. + +Provides information about a Load Balancer. + +This data source can prove useful when a module accepts an LB as an input +variable and needs to, for example, determine the security groups associated +with it, etc. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lb import DataAwsLb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + lb_arn = TerraformVariable(self, "lb_arn", + default="", + type=VariableType.STRING + ) + lb_name = TerraformVariable(self, "lb_name", + default="", + type=VariableType.STRING + ) + DataAwsLb(self, "test", + arn=lb_arn.string_value, + name=lb_name.string_value + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) Full ARN of the load balancer. +* `name` - (Optional) Unique name of the load balancer. +* `tags` - (Optional) Mapping of tags, each pair of which must exactly match a pair on the desired load balancer. + +~> **NOTE:** When both `arn` and `name` are specified, `arn` takes precedence. `tags` has lowest precedence. + +## Attribute Reference + +See the [LB Resource](/docs/providers/aws/r/lb.html) for details on the +returned attributes - they are identical. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lb_hosted_zone_id.html.markdown b/website/docs/cdktf/python/d/lb_hosted_zone_id.html.markdown new file mode 100644 index 00000000000..be9cd24268f --- /dev/null +++ b/website/docs/cdktf/python/d/lb_hosted_zone_id.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_hosted_zone_id" +description: |- + Provides AWS Elastic Load Balancing Hosted Zone Id +--- + + + +# Data Source: aws_lb_hosted_zone_id + +Use this data source to get the HostedZoneId of the AWS Elastic Load Balancing (ELB) in a given region for the purpose of using in an AWS Route53 Alias. Specify the ELB type (`network` or `application`) to return the relevant the associated HostedZoneId. Ref: [ELB service endpoints](https://docs.aws.amazon.com/general/latest/gr/elb.html#elb_region) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lb_hosted_zone_id import DataAwsLbHostedZoneId +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = DataAwsLbHostedZoneId(self, "main") + Route53Record(self, "www", + alias=Route53RecordAlias( + evaluate_target_health=True, + name=Token.as_string(aws_lb_main.dns_name), + zone_id=Token.as_string(main.id) + ), + name="example.com", + type="A", + zone_id=primary.zone_id + ) +``` + +## Argument Reference + +* `region` - (Optional) Name of the region whose AWS ELB HostedZoneId is desired. + Defaults to the region from the AWS provider configuration. + +* `load_balancer_type` - (Optional) Type of load balancer to create. Possible values are `application` or `network`. The default value is `application`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS ELB HostedZoneId in the selected region. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lb_listener.html.markdown b/website/docs/cdktf/python/d/lb_listener.html.markdown new file mode 100644 index 00000000000..ec55a73f87f --- /dev/null +++ b/website/docs/cdktf/python/d/lb_listener.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_listener" +description: |- + Provides a Load Balancer Listener data source. +--- + + + +# Data Source: aws_lb_listener + +~> **Note:** `aws_alb_listener` is known as `aws_lb_listener`. The functionality is identical. + +Provides information about a Load Balancer Listener. + +This data source can prove useful when a module accepts an LB Listener as an input variable and needs to know the LB it is attached to, or other information specific to the listener in question. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lb import DataAwsLb +from imports.aws.data_aws_lb_listener import DataAwsLbListener +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + listener_arn = TerraformVariable(self, "listener_arn", + type=VariableType.STRING + ) + selected = DataAwsLb(self, "selected", + name="default-public" + ) + DataAwsLbListener(self, "listener", + arn=listener_arn.string_value + ) + DataAwsLbListener(self, "selected443", + load_balancer_arn=Token.as_string(selected.arn), + port=443 + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) ARN of the listener. Required if `load_balancer_arn` and `port` is not set. +* `load_balancer_arn` - (Optional) ARN of the load balancer. Required if `arn` is not set. +* `port` - (Optional) Port of the listener. Required if `arn` is not set. + +## Attribute Reference + +See the [LB Listener Resource](/docs/providers/aws/r/lb_listener.html) for details on the returned attributes - they are identical. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lb_target_group.html.markdown b/website/docs/cdktf/python/d/lb_target_group.html.markdown new file mode 100644 index 00000000000..37eabbe1ed0 --- /dev/null +++ b/website/docs/cdktf/python/d/lb_target_group.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_target_group" +description: |- + Provides a Load Balancer Target Group data source. +--- + + + +# Data Source: aws_lb_target_group + +~> **Note:** `aws_alb_target_group` is known as `aws_lb_target_group`. The functionality is identical. + +Provides information about a Load Balancer Target Group. + +This data source can prove useful when a module accepts an LB Target Group as an +input variable and needs to know its attributes. It can also be used to get the ARN of +an LB Target Group for use in other resources, given LB Target Group name. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lb_target_group import DataAwsLbTargetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + lb_tg_arn = TerraformVariable(self, "lb_tg_arn", + default="", + type=VariableType.STRING + ) + lb_tg_name = TerraformVariable(self, "lb_tg_name", + default="", + type=VariableType.STRING + ) + DataAwsLbTargetGroup(self, "test", + arn=lb_tg_arn.string_value, + name=lb_tg_name.string_value + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) Full ARN of the target group. +* `name` - (Optional) Unique name of the target group. +* `tags` - (Optional) Mapping of tags, each pair of which must exactly match a pair on the desired target group. + +~> **NOTE:** When both `arn` and `name` are specified, `arn` takes precedence. `tags` has the lowest precedence. + +## Attribute Reference + +See the [LB Target Group Resource](/docs/providers/aws/r/lb_target_group.html) for details +on the returned attributes - they are identical. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lbs.html.markdown b/website/docs/cdktf/python/d/lbs.html.markdown new file mode 100644 index 00000000000..bd76bf6e6ca --- /dev/null +++ b/website/docs/cdktf/python/d/lbs.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lbs" +description: |- + Data source for managing an AWS ELB (Elastic Load Balancing) Load Balancers. +--- + + + +# Data Source: aws_lbs + +Use this data source to get a list of Load Balancer ARNs matching the specified criteria. Useful for passing to other +resources. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lbs import DataAwsLbs +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLbs(self, "example", + tags={ + "elbv2.k8s.aws/cluster": "my-cluster" + } + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired Load Balancers. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of Load Balancer ARNs. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lex_bot.html.markdown b/website/docs/cdktf/python/d/lex_bot.html.markdown new file mode 100644 index 00000000000..6290976c33a --- /dev/null +++ b/website/docs/cdktf/python/d/lex_bot.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_bot" +description: |- + Provides details about a specific Lex Bot +--- + + + +# Data Source: aws_lex_bot + +Provides details about a specific Amazon Lex Bot. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lex_bot import DataAwsLexBot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLexBot(self, "order_flowers_bot", + name="OrderFlowers", + version="$LATEST" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the bot. The name is case sensitive. +* `version` - (Optional) Version or alias of the bot. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the bot. +* `checksum` - Checksum of the bot used to identify a specific revision of the bot's `$LATEST` version. +* `child_directed` - If this Amazon Lex Bot is related to a website, program, or other application that is directed or targeted, in whole or in part, to children under age 13 and subject to COPPA. +* `created_date` - Date that the bot was created. +* `description` - Description of the bot. +* `detect_sentiment` - When set to true user utterances are sent to Amazon Comprehend for sentiment analysis. +* `enable_model_improvements` - Set to true if natural language understanding improvements are enabled. +* `failure_reason` - If the `status` is `FAILED`, the reason why the bot failed to build. +* `idle_session_ttl_in_seconds` - The maximum time in seconds that Amazon Lex retains the data gathered in a conversation. +* `last_updated_date` - Date that the bot was updated. +* `locale` - Target locale for the bot. Any intent used in the bot must be compatible with the locale of the bot. +* `name` - Name of the bot, case sensitive. +* `nlu_intent_confidence_threshold` - The threshold where Amazon Lex will insert the AMAZON.FallbackIntent, AMAZON.KendraSearchIntent, or both when returning alternative intents in a PostContent or PostText response. AMAZON.FallbackIntent and AMAZON.KendraSearchIntent are only inserted if they are configured for the bot. +* `status` - Status of the bot. +* `version` - Version of the bot. For a new bot, the version is always `$LATEST`. +* `voice_id` - Amazon Polly voice ID that the Amazon Lex Bot uses for voice interactions with the user. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lex_bot_alias.html.markdown b/website/docs/cdktf/python/d/lex_bot_alias.html.markdown new file mode 100644 index 00000000000..78cef3e3532 --- /dev/null +++ b/website/docs/cdktf/python/d/lex_bot_alias.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_bot_alias" +description: |- + Provides details about a specific Lex Bot Alias +--- + + + +# Data Source: aws_lex_bot_alias + +Provides details about a specific Amazon Lex Bot Alias. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lex_bot_alias import DataAwsLexBotAlias +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLexBotAlias(self, "order_flowers_prod", + bot_name="OrderFlowers", + name="OrderFlowersProd" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bot_name` - (Required) Name of the bot. +* `name` - (Required) Name of the bot alias. The name is case sensitive. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the bot alias. +* `bot_name` - Name of the bot. +* `bot_version` - Version of the bot that the alias points to. +* `checksum` - Checksum of the bot alias. +* `created_date` - Date that the bot alias was created. +* `description` - Description of the alias. +* `last_updated_date` - Date that the bot alias was updated. When you create a resource, the creation date and the last updated date are the same. +* `name` - Name of the alias. The name is not case sensitive. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lex_intent.html.markdown b/website/docs/cdktf/python/d/lex_intent.html.markdown new file mode 100644 index 00000000000..eb1fdb8dde6 --- /dev/null +++ b/website/docs/cdktf/python/d/lex_intent.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_intent" +description: |- + Provides details about a specific Amazon Lex Intent +--- + + + +# Data Source: aws_lex_intent + +Provides details about a specific Amazon Lex Intent. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lex_intent import DataAwsLexIntent +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLexIntent(self, "order_flowers", + name="OrderFlowers", + version="$LATEST" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the intent. The name is case sensitive. +* `version` - (Optional) Version of the intent. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Lex intent. +* `checksum` - Checksum identifying the version of the intent that was created. The checksum is not +included as an argument because the resource will add it automatically when updating the intent. +* `created_date` - Date when the intent version was created. +* `description` - Description of the intent. +* `last_updated_date` - Date when the $LATEST version of this intent was updated. +* `name` - Name of the intent, not case sensitive. +* `parent_intent_signature` - A unique identifier for the built-in intent to base this +intent on. To find the signature for an intent, see +[Standard Built-in Intents](https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/standard-intents) +in the Alexa Skills Kit. +* `version` - Version of the bot. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/lex_slot_type.html.markdown b/website/docs/cdktf/python/d/lex_slot_type.html.markdown new file mode 100644 index 00000000000..688f1380b12 --- /dev/null +++ b/website/docs/cdktf/python/d/lex_slot_type.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_slot_type" +description: |- + Provides details about a specific Amazon Lex Slot Type +--- + + + +# Data Source: aws_lex_slot_type + +Provides details about a specific Amazon Lex Slot Type. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_lex_slot_type import DataAwsLexSlotType +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLexSlotType(self, "flower_types", + name="FlowerTypes", + version="1" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the slot type. The name is case sensitive. +* `version` - (Optional) Version of the slot type. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `checksum` - Checksum identifying the version of the slot type that was created. The checksum is +not included as an argument because the resource will add it automatically when updating the slot type. +* `created_date` - Date when the slot type version was created. +* `description` - Description of the slot type. +* `enumeration_value` - Set of EnumerationValue objects that defines the values that +the slot type can take. Each value can have a set of synonyms, which are additional values that help +train the machine learning model about the values that it resolves for a slot. +* `last_updated_date` - Date when the $LATEST version of this slot type was updated. +* `name` - Name of the slot type. The name is not case sensitive. +* `value_selection_strategy` - Determines the slot resolution strategy that Amazon Lex +uses to return slot type values. `ORIGINAL_VALUE` returns the value entered by the user if the user +value is similar to the slot value. `TOP_RESOLUTION` returns the first value in the resolution list +if there is a resolution list for the slot, otherwise null is returned. +* `version` - Version of the slot type. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/licensemanager_grants.html.markdown b/website/docs/cdktf/python/d/licensemanager_grants.html.markdown new file mode 100644 index 00000000000..78813b6bd1a --- /dev/null +++ b/website/docs/cdktf/python/d/licensemanager_grants.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_grants" +description: |- + Get information about a set of license manager grant licenses +--- + + + +# Data Source: aws_licensemanager_grants + +This resource can be used to get a set of license grant ARNs matching a filter. + +## Example Usage + +The following shows getting all license grant ARNs granted to your account. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_licensemanager_grants import DataAwsLicensemanagerGrants +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + DataAwsLicensemanagerGrants(self, "test", + filter=[DataAwsLicensemanagerGrantsFilter( + name="GranteePrincipalARN", + values=["arn:aws:iam::${" + current.account_id + "}:root"] + ) + ] + ) +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/license-manager/latest/APIReference/API_ListReceivedGrants.html#API_ListReceivedGrants_RequestSyntax). + For example, if filtering using `ProductSKU`, use: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_licensemanager_grants import DataAwsLicensemanagerGrants +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLicensemanagerGrants(self, "selected", + filter=[DataAwsLicensemanagerGrantsFilter( + name="ProductSKU", + values=[""] + ) + ] + ) +``` + +* `values` - (Required) Set of values that are accepted for the given field. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - List of all the license grant ARNs found. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/licensemanager_received_license.html.markdown b/website/docs/cdktf/python/d/licensemanager_received_license.html.markdown new file mode 100644 index 00000000000..801e65596a4 --- /dev/null +++ b/website/docs/cdktf/python/d/licensemanager_received_license.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_received_license" +description: |- + Get information about a set of license manager received license +--- + + + +# Data Source: aws_licensemanager_received_license + +This resource can be used to get data on a received license using an ARN. This can be helpful for pulling in data on a license from the AWS marketplace and sharing that license with another account. + +## Example Usage + +The following shows getting the received license data using and ARN. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_licensemanager_received_license import DataAwsLicensemanagerReceivedLicense +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLicensemanagerReceivedLicense(self, "test", + license_arn="arn:aws:license-manager::111111111111:license:l-ecbaa94eb71a4830b6d7e49268fecaa0" + ) +``` + +## Argument Reference + +* `license_arn` - (Required) The ARN of the received license you want data for. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The received license ARN (Same as: `license_arn`). +* `beneficiary` - Granted license beneficiary. This is in the form of the ARN of the root user of the account. +* `consumption_configuration` - Configuration for consumption of the license. [Detailed below](#consumption_configuration) +* `create_time` - Creation time of the granted license in RFC 3339 format. +* `entitlements` - License entitlements. [Detailed below](#entitlements) +* `home_region` - Home Region of the granted license. +* `issuer` - Granted license issuer. [Detailed below](#issuer) +* `license_arn` - Amazon Resource Name (ARN) of the license. +* `license_metadata`- Granted license metadata. This is in the form of a set of all meta data. [Detailed below](#license_metadata) +* `license_name` - License name. +* `product_name` - Product name. +* `product_sku ` - Product SKU. +* `received_metadata` - Granted license received metadata. [Detailed below](#received_metadata) +* `status` - Granted license status. +* `validity` - Date and time range during which the granted license is valid, in ISO8601-UTC format. [Detailed below](#validity) +* `version` - Version of the granted license. + +### consumption_configuration + +* `borrow_configuration` - Details about a borrow configuration. [Detailed below](#borrow_configuration) +* `provisional_configuration` - Details about a provisional configuration. [Detailed below](#provisional_configuration) +* `renewal_frequency` - Renewal frequency. + +#### borrow_configuration + +A list with a single map. + +* `allow_early_check_in` - Indicates whether early check-ins are allowed. +* `max_time_to_live_in_minutes` - Maximum time for the borrow configuration, in minutes. + +#### provisional_configuration + +A list with a single map. + +* `max_time_to_live_in_minutes` - Maximum time for the provisional configuration, in minutes. + +### entitlements + +A list with a single map. + +* `allow_check_in` - Indicates whether check-ins are allowed. +* `max_count` - Maximum entitlement count. Use if the unit is not None. +* `name` - Entitlement name. +* `overage` - Indicates whether overages are allowed. +* `unit` - Entitlement unit. +* `value` - Entitlement resource. Use only if the unit is None. + +### issuer + +A list with a single map. + +* `key_fingerprint` - Issuer key fingerprint. +* `name` - Issuer name. +* `sign_key` - Asymmetric KMS key from AWS Key Management Service. The KMS key must have a key usage of sign and verify, and support the RSASSA-PSS SHA-256 signing algorithm. + +### license_metadata + +Each metadata item will have the following attributes. + +* `name` - The key name. +* `value` - The value. + +### received_metadata + +A list with a single map. + +* `allowed_operations` - A list of allowed operations. +* `received_status` - Received status. +* `received_status_reason` - Received status reason. + +### validity + +A list with a single map. + +* `begin` - Start of the validity time range. +* `end` - End of the validity time range. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/licensemanager_received_licenses.html.markdown b/website/docs/cdktf/python/d/licensemanager_received_licenses.html.markdown new file mode 100644 index 00000000000..a09c9d22521 --- /dev/null +++ b/website/docs/cdktf/python/d/licensemanager_received_licenses.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_received_licenses" +description: |- + Get information about a set of license manager received licenses +--- + + + +# Data Source: aws_licensemanager_received_licenses + +This resource can be used to get a set of license ARNs matching a filter. + +## Example Usage + +The following shows getting all license ARNs issued from the AWS marketplace. Providing no filter, would provide all license ARNs for the entire account. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_licensemanager_received_licenses import DataAwsLicensemanagerReceivedLicenses +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLicensemanagerReceivedLicenses(self, "test", + filter=[DataAwsLicensemanagerReceivedLicensesFilter( + name="IssuerName", + values=["AWS/Marketplace"] + ) + ] + ) +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/license-manager/latest/APIReference/API_ListReceivedLicenses.html#API_ListReceivedLicenses_RequestSyntax). + For example, if filtering using `ProductSKU`, use: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_licensemanager_received_licenses import DataAwsLicensemanagerReceivedLicenses +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLicensemanagerReceivedLicenses(self, "selected", + filter=[DataAwsLicensemanagerReceivedLicensesFilter( + name="ProductSKU", + values=[""] + ) + ] + ) +``` + +* `values` - (Required) Set of values that are accepted for the given field. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - List of all the license ARNs found. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/location_geofence_collection.html.markdown b/website/docs/cdktf/python/d/location_geofence_collection.html.markdown new file mode 100644 index 00000000000..43d2e16e709 --- /dev/null +++ b/website/docs/cdktf/python/d/location_geofence_collection.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_geofence_collection" +description: |- + Retrieve information about a Location Service Geofence Collection. +--- + + + +# Data Source: aws_location_geofence_collection + +Retrieve information about a Location Service Geofence Collection. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_location_geofence_collection import DataAwsLocationGeofenceCollection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLocationGeofenceCollection(self, "example", + collection_name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `collection_name` - (Required) Name of the geofence collection. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `collection_arn` - ARN for the geofence collection resource. Used when you need to specify a resource across all AWS. +* `create_time` - Timestamp for when the geofence collection resource was created in ISO 8601 format. +* `description` - Optional description of the geofence collection resource. +* `kms_key_id` - Key identifier for an AWS KMS customer managed key assigned to the Amazon Location resource. +* `tags` - Key-value map of resource tags for the geofence collection. +* `update_time` - Timestamp for when the geofence collection resource was last updated in ISO 8601 format. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/location_map.html.markdown b/website/docs/cdktf/python/d/location_map.html.markdown new file mode 100644 index 00000000000..630c228e10d --- /dev/null +++ b/website/docs/cdktf/python/d/location_map.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_map" +description: |- + Retrieve information about a Location Service Map. +--- + + + +# Data Source: aws_location_map + +Retrieve information about a Location Service Map. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_location_map import DataAwsLocationMap +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLocationMap(self, "example", + map_name="example" + ) +``` + +## Argument Reference + +* `map_name` - (Required) Name of the map resource. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `configuration` - List of configurations that specify the map tile style selected from a partner data provider. + * `style` - The map style selected from an available data provider. +* `create_time` - Timestamp for when the map resource was created in ISO 8601 format. +* `description` - Optional description for the map resource. +* `map_arn` - ARN for the map resource. +* `tags` - Key-value map of resource tags for the map. +* `update_time` - Timestamp for when the map resource was last updated in ISO 8601 format. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/location_place_index.html.markdown b/website/docs/cdktf/python/d/location_place_index.html.markdown new file mode 100644 index 00000000000..37e71629868 --- /dev/null +++ b/website/docs/cdktf/python/d/location_place_index.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_place_index" +description: |- + Retrieve information about a Location Service Place Index. +--- + + + +# Data Source: aws_location_place_index + +Retrieve information about a Location Service Place Index. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_location_place_index import DataAwsLocationPlaceIndex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLocationPlaceIndex(self, "example", + index_name="example" + ) +``` + +## Argument Reference + +* `index_name` - (Required) Name of the place index resource. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `create_time` - Timestamp for when the place index resource was created in ISO 8601 format. +* `data_source` - Data provider of geospatial data. +* `data_source_configuration` - List of configurations that specify data storage option for requesting Places. +* `description` - Optional description for the place index resource. +* `index_arn` - ARN for the place index resource. +* `tags` - Key-value map of resource tags for the place index. +* `update_time` - Timestamp for when the place index resource was last updated in ISO 8601 format. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/location_route_calculator.html.markdown b/website/docs/cdktf/python/d/location_route_calculator.html.markdown new file mode 100644 index 00000000000..9028ba087ef --- /dev/null +++ b/website/docs/cdktf/python/d/location_route_calculator.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_route_calculator" +description: |- + Retrieve information about a Location Service Route Calculator. +--- + + + +# Data Source: aws_location_route_calculator + +Retrieve information about a Location Service Route Calculator. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_location_route_calculator import DataAwsLocationRouteCalculator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLocationRouteCalculator(self, "example", + calculator_name="example" + ) +``` + +## Argument Reference + +* `calculator_name` - (Required) Name of the route calculator resource. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `calculator_arn` - ARN for the Route calculator resource. Use the ARN when you specify a resource across AWS. +* `create_time` - Timestamp for when the route calculator resource was created in ISO 8601 format. +* `data_source` - Data provider of traffic and road network data. +* `description` - Optional description of the route calculator resource. +* `tags` - Key-value map of resource tags for the route calculator. +* `update_time` - Timestamp for when the route calculator resource was last updated in ISO 8601 format. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/location_tracker.html.markdown b/website/docs/cdktf/python/d/location_tracker.html.markdown new file mode 100644 index 00000000000..e7e76e3cc43 --- /dev/null +++ b/website/docs/cdktf/python/d/location_tracker.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_tracker" +description: |- + Retrieve information about a Location Service Tracker. +--- + + + +# Data Source: aws_location_tracker + +Retrieve information about a Location Service Tracker. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_location_tracker import DataAwsLocationTracker +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLocationTracker(self, "example", + tracker_name="example" + ) +``` + +## Argument Reference + +* `tracker_name` - (Required) Name of the tracker resource. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `create_time` - Timestamp for when the tracker resource was created in ISO 8601 format. +* `description` - Optional description for the tracker resource. +* `kms_key_id` - Key identifier for an AWS KMS customer managed key assigned to the Amazon Location resource. +* `position_filtering` - Position filtering method of the tracker resource. +* `tags` - Key-value map of resource tags for the tracker. +* `tracker_arn` - ARN for the tracker resource. Used when you need to specify a resource across all AWS. +* `update_time` - Timestamp for when the tracker resource was last updated in ISO 8601 format. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/location_tracker_association.html.markdown b/website/docs/cdktf/python/d/location_tracker_association.html.markdown new file mode 100644 index 00000000000..809daeb05e8 --- /dev/null +++ b/website/docs/cdktf/python/d/location_tracker_association.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_tracker_association" +description: |- + Retrieve information about a Location Service Tracker Association. +--- + + + +# Data Source: aws_location_tracker_association + +Retrieve information about a Location Service Tracker Association. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_location_tracker_association import DataAwsLocationTrackerAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLocationTrackerAssociation(self, "example", + consumer_arn="arn:aws:geo:region:account-id:geofence-collection/ExampleGeofenceCollectionConsumer", + tracker_name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `consumer_arn` - (Required) ARN of the geofence collection associated to tracker resource. +* `tracker_name` - (Required) Name of the tracker resource associated with a geofence collection. + +## Attribute Reference + +This data source exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/location_tracker_associations.html.markdown b/website/docs/cdktf/python/d/location_tracker_associations.html.markdown new file mode 100644 index 00000000000..3b108f37603 --- /dev/null +++ b/website/docs/cdktf/python/d/location_tracker_associations.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_tracker_associations" +description: |- + Retrieve information about Location Service Tracker Associations. +--- + + + +# Data Source: aws_location_tracker_associations + +Retrieve information about Location Service Tracker Associations. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_location_tracker_associations import DataAwsLocationTrackerAssociations +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsLocationTrackerAssociations(self, "example", + tracker_name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `tracker_name` - (Required) Name of the tracker resource associated with a geofence collection. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `consumer_arns` - List of geofence collection ARNs associated to the tracker resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/memorydb_acl.html.markdown b/website/docs/cdktf/python/d/memorydb_acl.html.markdown new file mode 100644 index 00000000000..b6d0b950351 --- /dev/null +++ b/website/docs/cdktf/python/d/memorydb_acl.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_acl" +description: |- + Provides information about a MemoryDB ACL. +--- + + + +# Resource: aws_memorydb_acl + +Provides information about a MemoryDB ACL. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_memorydb_acl import DataAwsMemorydbAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMemorydbAcl(self, "example", + name="my-acl" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the ACL. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the ACL. +* `arn` - ARN of the ACL. +* `minimum_engine_version` - The minimum engine version supported by the ACL. +* `tags` - Map of tags assigned to the ACL. +* `user_names` - Set of MemoryDB user names included in this ACL. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/memorydb_cluster.html.markdown b/website/docs/cdktf/python/d/memorydb_cluster.html.markdown new file mode 100644 index 00000000000..6a2d81250b7 --- /dev/null +++ b/website/docs/cdktf/python/d/memorydb_cluster.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_cluster" +description: |- + Provides information about a MemoryDB Cluster. +--- + + + +# Resource: aws_memorydb_cluster + +Provides information about a MemoryDB Cluster. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_memorydb_cluster import DataAwsMemorydbCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMemorydbCluster(self, "example", + name="my-cluster" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the cluster. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Same as `name`. +* `arn` - ARN of the cluster. +* `acl_name` - Name of the Access Control List associated with the cluster. +* `auto_minor_version_upgrade` - True when the cluster allows automatic minor version upgrades. +* `cluster_endpoint` + * `address` - DNS hostname of the cluster configuration endpoint. + * `port` - Port number that the cluster configuration endpoint is listening on. +* `data_tiering` - True when data tiering is enabled. +* `description` - Description for the cluster. +* `engine_patch_version` - Patch version number of the Redis engine used by the cluster. +* `engine_version` - Version number of the Redis engine used by the cluster. +* `final_snapshot_name` - Name of the final cluster snapshot to be created when this resource is deleted. If omitted, no final snapshot will be made. +* `kms_key_arn` - ARN of the KMS key used to encrypt the cluster at rest. +* `maintenance_window` - Weekly time range during which maintenance on the cluster is performed. Specify as a range in the format `ddd:hh24:mi-ddd:hh24:mi` (24H Clock UTC). Example: `sun:23:00-mon:01:30`. +* `node_type` - Compute and memory capacity of the nodes in the cluster. +* `num_replicas_per_shard` - The number of replicas to apply to each shard. +* `num_shards` - Number of shards in the cluster. +* `parameter_group_name` - The name of the parameter group associated with the cluster. +* `port` - Port number on which each of the nodes accepts connections. +* `security_group_ids` - Set of VPC Security Group ID-s associated with this cluster. +* `shards` - Set of shards in this cluster. + * `name` - Name of this shard. + * `num_nodes` - Number of individual nodes in this shard. + * `slots` - Keyspace for this shard. Example: `0-16383`. + * `nodes` - Set of nodes in this shard. + * `availability_zone` - The Availability Zone in which the node resides. + * `create_time` - The date and time when the node was created. Example: `2022-01-01T21:00:00Z`. + * `name` - Name of this node. + * `endpoint` + * `address` - DNS hostname of the node. + * `port` - Port number that this node is listening on. +* `snapshot_retention_limit` - The number of days for which MemoryDB retains automatic snapshots before deleting them. When set to `0`, automatic backups are disabled. +* `snapshot_window` - Daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of your shard. Example: `05:00-09:00`. +* `sns_topic_arn` - ARN of the SNS topic to which cluster notifications are sent. +* `subnet_group_name` -The name of the subnet group used for the cluster. +* `tls_enabled` - When true, in-transit encryption is enabled for the cluster. +* `tags` - Map of tags assigned to the cluster. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/memorydb_parameter_group.html.markdown b/website/docs/cdktf/python/d/memorydb_parameter_group.html.markdown new file mode 100644 index 00000000000..ec586b993f2 --- /dev/null +++ b/website/docs/cdktf/python/d/memorydb_parameter_group.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_parameter_group" +description: |- + Provides information about a MemoryDB Parameter Group. +--- + + + +# Resource: aws_memorydb_parameter_group + +Provides information about a MemoryDB Parameter Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_memorydb_parameter_group import DataAwsMemorydbParameterGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMemorydbParameterGroup(self, "example", + name="my-parameter-group" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the parameter group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the parameter group. +* `arn` - ARN of the parameter group. +* `description` - Description of the parameter group. +* `family` - Engine version that the parameter group can be used with. +* `parameter` - Set of user-defined MemoryDB parameters applied by the parameter group. + * `name` - Name of the parameter. + * `value` - Value of the parameter. +* `tags` - Map of tags assigned to the parameter group. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/memorydb_snapshot.html.markdown b/website/docs/cdktf/python/d/memorydb_snapshot.html.markdown new file mode 100644 index 00000000000..f3861201858 --- /dev/null +++ b/website/docs/cdktf/python/d/memorydb_snapshot.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_snapshot" +description: |- + Provides information about a MemoryDB Snapshot. +--- + + + +# Resource: aws_memorydb_snapshot + +Provides information about a MemoryDB Snapshot. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_memorydb_snapshot import DataAwsMemorydbSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMemorydbSnapshot(self, "example", + name="my-snapshot" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the snapshot. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the snapshot. +* `arn` - ARN of the snapshot. +* `cluster_configuration` - The configuration of the cluster from which the snapshot was taken. + * `description` - Description for the cluster. + * `engine_version` - Version number of the Redis engine used by the cluster. + * `maintenance_window` - The weekly time range during which maintenance on the cluster is performed. + * `name` - Name of the cluster. + * `node_type` - Compute and memory capacity of the nodes in the cluster. + * `num_shards` - Number of shards in the cluster. + * `parameter_group_name` - Name of the parameter group associated with the cluster. + * `port` - Port number on which the cluster accepts connections. + * `snapshot_retention_limit` - Number of days for which MemoryDB retains automatic snapshots before deleting them. + * `snapshot_window` - The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of the shard. + * `subnet_group_name` - Name of the subnet group used by the cluster. + * `topic_arn` - ARN of the SNS topic to which cluster notifications are sent. + * `vpc_id` - The VPC in which the cluster exists. +* `cluster_name` - Name of the MemoryDB cluster that this snapshot was taken from. +* `kms_key_arn` - ARN of the KMS key used to encrypt the snapshot at rest. +* `source` - Whether the snapshot is from an automatic backup (`automated`) or was created manually (`manual`). +* `tags` - Map of tags assigned to the snapshot. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/memorydb_subnet_group.html.markdown b/website/docs/cdktf/python/d/memorydb_subnet_group.html.markdown new file mode 100644 index 00000000000..678c769c428 --- /dev/null +++ b/website/docs/cdktf/python/d/memorydb_subnet_group.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_subnet_group" +description: |- + Provides information about a MemoryDB Subnet Group. +--- + + + +# Resource: aws_memorydb_subnet_group + +Provides information about a MemoryDB Subnet Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_memorydb_subnet_group import DataAwsMemorydbSubnetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMemorydbSubnetGroup(self, "example", + name="my-subnet-group" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the subnet group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the subnet group. +* `arn` - ARN of the subnet group. +* `description` - Description of the subnet group. +* `subnet_ids` - Set of VPC Subnet ID-s of the subnet group. +* `vpc_id` - VPC in which the subnet group exists. +* `tags` - Map of tags assigned to the subnet group. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/memorydb_user.html.markdown b/website/docs/cdktf/python/d/memorydb_user.html.markdown new file mode 100644 index 00000000000..3b47b16415c --- /dev/null +++ b/website/docs/cdktf/python/d/memorydb_user.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_user" +description: |- + Provides information about a MemoryDB User. +--- + + + +# Resource: aws_memorydb_user + +Provides information about a MemoryDB User. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_memorydb_user import DataAwsMemorydbUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMemorydbUser(self, "example", + user_name="my-user" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `user_name` - (Required) Name of the user. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the user. +* `access_string` - Access permissions string used for this user. +* `arn` - ARN of the user. +* `authentication_mode` - Denotes the user's authentication properties. + * `password_count` - The number of passwords belonging to the user. + * `type` - Whether the user requires a password to authenticate. +* `minimum_engine_version` - The minimum engine version supported for the user. +* `tags` - Map of tags assigned to the subnet group. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/mq_broker.html.markdown b/website/docs/cdktf/python/d/mq_broker.html.markdown new file mode 100644 index 00000000000..bcb3c3676e7 --- /dev/null +++ b/website/docs/cdktf/python/d/mq_broker.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "MQ" +layout: "aws" +page_title: "AWS: aws_mq_broker" +description: |- + Provides a MQ Broker data source. +--- + + + +# Data Source: aws_mq_broker + +Provides information about a MQ Broker. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_mq_broker import DataAwsMqBroker +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + broker_id = TerraformVariable(self, "broker_id", + default="", + type=VariableType.STRING + ) + broker_name = TerraformVariable(self, "broker_name", + default="", + type=VariableType.STRING + ) + DataAwsMqBroker(self, "by_id", + broker_id=broker_id.string_value + ) + DataAwsMqBroker(self, "by_name", + broker_name=broker_name.string_value + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `broker_id` - (Optional) Unique id of the mq broker. +* `broker_name` - (Optional) Unique name of the mq broker. + +## Attribute Reference + +See the [`aws_mq_broker` resource](/docs/providers/aws/r/mq_broker.html) for details on the returned attributes. +They are identical except for user password, which is not returned when describing broker. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/mq_broker_instance_type_offerings.markdown b/website/docs/cdktf/python/d/mq_broker_instance_type_offerings.markdown new file mode 100644 index 00000000000..959c2ae2f4d --- /dev/null +++ b/website/docs/cdktf/python/d/mq_broker_instance_type_offerings.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "MQ" +layout: "aws" +page_title: "AWS: aws_mq_broker_instance_type_offerings" +description: |- + Provides a MQ Broker Instance Offerings data source. +--- + + + +# Data Source: aws_mq_broker_instance_type_offerings + +Provides information about a MQ Broker Instance Offerings. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_mq_broker_instance_type_offerings import DataAwsMqBrokerInstanceTypeOfferings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMqBrokerInstanceTypeOfferings(self, "all", + engine_type="ACTIVEMQ", + host_instance_type="mq.m5.large", + storage_type="EBS" + ) + DataAwsMqBrokerInstanceTypeOfferings(self, "empty") + DataAwsMqBrokerInstanceTypeOfferings(self, "engine", + engine_type="ACTIVEMQ" + ) + DataAwsMqBrokerInstanceTypeOfferings(self, "instance", + host_instance_type="mq.m5.large" + ) + DataAwsMqBrokerInstanceTypeOfferings(self, "storage", + storage_type="EBS" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engine_type` - (Optional) Filter response by engine type. +* `host_instance_type` - (Optional) Filter response by host instance type. +* `storage_type` - (Optional) Filter response by storage type. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `broker_instance_options` - Option for host instance type. See Broker Instance Options below. + +### Broker Instance Options + +* `availability_zones` - List of available AZs. See Availability Zones. below +* `engine_type` - Broker's engine type. +* `host_instance_type` - Broker's instance type. +* `storage_type` - Broker's storage type. +* `supported_deployment_modes` - The list of supported deployment modes. +* `supported_engine_versions` - The list of supported engine versions. + +### Availability Zones + +* `name` - Name of the Availability Zone. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/msk_broker_nodes.html.markdown b/website/docs/cdktf/python/d/msk_broker_nodes.html.markdown new file mode 100644 index 00000000000..8ac869686f8 --- /dev/null +++ b/website/docs/cdktf/python/d/msk_broker_nodes.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_broker_nodes" +description: |- + Get information on an Amazon MSK Broker Nodes +--- + + + +# Data Source: aws_msk_broker_nodes + +Get information on an Amazon MSK Broker Nodes. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_msk_broker_nodes import DataAwsMskBrokerNodes +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMskBrokerNodes(self, "example", + cluster_arn=Token.as_string(aws_msk_cluster_example.arn) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `cluster_arn` - (Required) ARN of the cluster the nodes belong to. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* [`node_info_list`](#nodes) - List of MSK Broker Nodes, sorted by broker ID in ascending order. + +### Nodes + +* `attached_eni_id` - Attached elastic network interface of the broker +* `broker_id` - ID of the broker +* `client_subnet` - Client subnet to which this broker node belongs +* `client_vpc_ip_address` - The client virtual private cloud (VPC) IP address +* `endpoints` - Set of endpoints for accessing the broker. This does not include ports +* `node_arn` - ARN of the node + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/msk_cluster.html.markdown b/website/docs/cdktf/python/d/msk_cluster.html.markdown new file mode 100644 index 00000000000..9a9717a264e --- /dev/null +++ b/website/docs/cdktf/python/d/msk_cluster.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_cluster" +description: |- + Get information on an Amazon MSK Cluster +--- + + + +# Data Source: aws_msk_cluster + +Get information on an Amazon MSK Cluster. + +-> **Note:** This data sources returns information on _provisioned_ clusters. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_msk_cluster import DataAwsMskCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMskCluster(self, "example", + cluster_name="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `cluster_name` - (Required) Name of the cluster. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the MSK cluster. +* `bootstrap_brokers` - Comma separated list of one or more hostname:port pairs of kafka brokers suitable to bootstrap connectivity to the kafka cluster. Contains a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `PLAINTEXT` or `TLS_PLAINTEXT`. The resource sorts values alphabetically. AWS may not always return all endpoints so this value is not guaranteed to be stable across applies. +* `bootstrap_brokers_public_sasl_iam` - One or more DNS names (or IP addresses) and SASL IAM port pairs. For example, `b-1-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9198,b-2-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9198,b-3-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9198`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS` and `client_authentication.0.sasl.0.iam` is set to `true` and `broker_node_group_info.0.connectivity_info.0.public_access.0.type` is set to `SERVICE_PROVIDED_EIPS` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrap_brokers_public_sasl_scram` - One or more DNS names (or IP addresses) and SASL SCRAM port pairs. For example, `b-1-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9196,b-2-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9196,b-3-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9196`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS` and `client_authentication.0.sasl.0.scram` is set to `true` and `broker_node_group_info.0.connectivity_info.0.public_access.0.type` is set to `SERVICE_PROVIDED_EIPS` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrap_brokers_public_tls` - One or more DNS names (or IP addresses) and TLS port pairs. For example, `b-1-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9194,b-2-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9194,b-3-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9194`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS` and `broker_node_group_info.0.connectivity_info.0.public_access.0.type` is set to `SERVICE_PROVIDED_EIPS` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrap_brokers_sasl_iam` - One or more DNS names (or IP addresses) and SASL IAM port pairs. For example, `b-1.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9098,b-2.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9098,b-3.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9098`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS` and `client_authentication.0.sasl.0.iam` is set to `true`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrap_brokers_sasl_scram` - One or more DNS names (or IP addresses) and SASL SCRAM port pairs. For example, `b-1.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9096,b-2.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9096,b-3.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9096`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS` and `client_authentication.0.sasl.0.scram` is set to `true`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrap_brokers_tls` - One or more DNS names (or IP addresses) and TLS port pairs. For example, `b-1.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9094,b-2.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9094,b-3.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9094`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `kafka_version` - Apache Kafka version. +* `number_of_broker_nodes` - Number of broker nodes in the cluster. +* `tags` - Map of key-value pairs assigned to the cluster. +* `zookeeper_connect_string` - A comma separated list of one or more hostname:port pairs to use to connect to the Apache Zookeeper cluster. The returned values are sorted alphbetically. The AWS API may not return all endpoints, so this value is not guaranteed to be stable across applies. +* `zookeeper_connect_string_tls` - A comma separated list of one or more hostname:port pairs to use to connect to the Apache Zookeeper cluster via TLS. The returned values are sorted alphabetically. The AWS API may not return all endpoints, so this value is not guaranteed to be stable across applies. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/msk_configuration.html.markdown b/website/docs/cdktf/python/d/msk_configuration.html.markdown new file mode 100644 index 00000000000..08ffafc1b13 --- /dev/null +++ b/website/docs/cdktf/python/d/msk_configuration.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_configuration" +description: |- + Get information on an Amazon MSK Configuration +--- + + + +# Data Source: aws_msk_configuration + +Get information on an Amazon MSK Configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_msk_configuration import DataAwsMskConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMskConfiguration(self, "example", + name="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the configuration. +* `latest_revision` - Latest revision of the configuration. +* `description` - Description of the configuration. +* `kafka_versions` - List of Apache Kafka versions which can use this configuration. +* `server_properties` - Contents of the server.properties file. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/msk_kafka_version.html.markdown b/website/docs/cdktf/python/d/msk_kafka_version.html.markdown new file mode 100644 index 00000000000..632125e1603 --- /dev/null +++ b/website/docs/cdktf/python/d/msk_kafka_version.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_kafka_version" +description: |- + Get information on a Amazon MSK Kafka Version +--- + + + +# Data Source: aws_msk_cluster + +Get information on a Amazon MSK Kafka Version + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_msk_kafka_version import DataAwsMskKafkaVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMskKafkaVersion(self, "example", + version="2.8.0" + ) + DataAwsMskKafkaVersion(self, "preferred", + preferred_versions=["2.4.1.1", "2.4.1", "2.2.1"] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `preferred_versions` - (Optional) Ordered list of preferred Kafka versions. The first match in this list will be returned. Either `preferred_versions` or `version` must be set. +* `version` - (Optional) Version of MSK Kafka. For example 2.4.1.1 or "2.2.1" etc. Either `preferred_versions` or `version` must be set. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `status` - Status of the MSK Kafka version eg. `ACTIVE` or `DEPRECATED`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/mskconnect_connector.html.markdown b/website/docs/cdktf/python/d/mskconnect_connector.html.markdown new file mode 100644 index 00000000000..713fc272de0 --- /dev/null +++ b/website/docs/cdktf/python/d/mskconnect_connector.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_connector" +description: |- + Get information on an Amazon MSK Connect Connector. +--- + + + +# Data Source: aws_mskconnect_connector + +Get information on an Amazon MSK Connect Connector. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_mskconnect_connector import DataAwsMskconnectConnector +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMskconnectConnector(self, "example", + name="example-mskconnector" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the connector. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the connector. +* `description` - Summary description of the connector. +* `version` - Current version of the connector. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/mskconnect_custom_plugin.html.markdown b/website/docs/cdktf/python/d/mskconnect_custom_plugin.html.markdown new file mode 100644 index 00000000000..c6354102e99 --- /dev/null +++ b/website/docs/cdktf/python/d/mskconnect_custom_plugin.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_custom_plugin" +description: |- + Get information on an Amazon MSK Connect custom plugin. +--- + + + +# Data Source: aws_mskconnect_custom_plugin + +Get information on an Amazon MSK Connect custom plugin. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_mskconnect_custom_plugin import DataAwsMskconnectCustomPlugin +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMskconnectCustomPlugin(self, "example", + name="example-debezium-1" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the custom plugin. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - the ARN of the custom plugin. +* `description` - a summary description of the custom plugin. +* `latest_revision` - an ID of the latest successfully created revision of the custom plugin. +* `state` - the state of the custom plugin. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/mskconnect_worker_configuration.html.markdown b/website/docs/cdktf/python/d/mskconnect_worker_configuration.html.markdown new file mode 100644 index 00000000000..0fc5b1e6de7 --- /dev/null +++ b/website/docs/cdktf/python/d/mskconnect_worker_configuration.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_worker_configuration" +description: |- + Get information on an Amazon MSK Connect worker configuration. +--- + + + +# Data Source: aws_mskconnect_worker_configuration + +Get information on an Amazon MSK Connect Worker Configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_mskconnect_worker_configuration import DataAwsMskconnectWorkerConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsMskconnectWorkerConfiguration(self, "example", + name="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the worker configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - the ARN of the worker configuration. +* `description` - a summary description of the worker configuration. +* `latest_revision` - an ID of the latest successfully created revision of the worker configuration. +* `properties_file_content` - contents of connect-distributed.properties file. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/nat_gateway.html.markdown b/website/docs/cdktf/python/d/nat_gateway.html.markdown new file mode 100644 index 00000000000..dd19fdad9ff --- /dev/null +++ b/website/docs/cdktf/python/d/nat_gateway.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_nat_gateway" +description: |- + Provides details about a specific VPC NAT Gateway. +--- + + + +# Data Source: aws_nat_gateway + +Provides details about a specific VPC NAT Gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_nat_gateway import DataAwsNatGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNatGateway(self, "default", + subnet_id=public.id + ) +``` + +### With tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_nat_gateway import DataAwsNatGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNatGateway(self, "default", + subnet_id=public.id, + tags={ + "Name": "gw NAT" + } + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +NAT Gateways in the current Region. The given filters must match exactly one +NAT Gateway whose data will be exported as attributes. + +* `id` - (Optional) ID of the specific NAT Gateway to retrieve. +* `subnet_id` - (Optional) ID of subnet that the NAT Gateway resides in. +* `vpc_id` - (Optional) ID of the VPC that the NAT Gateway resides in. +* `state` - (Optional) State of the NAT Gateway (pending | failed | available | deleting | deleted ). +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired NAT Gateway. +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNatGateways.html). +* `values` - (Required) Set of values that are accepted for the given field. + An Nat Gateway will be selected if any one of the given values matches. + +## Attribute Reference + +All of the argument attributes except `filter` block are also exported as +result attributes. This data source will complete the data by populating +any fields that are not included in the configuration with the data for +the selected Nat Gateway. + +* `allocation_id` - ID of the EIP allocated to the selected NAT Gateway. +* `association_id` - The association ID of the Elastic IP address that's associated with the NAT Gateway. Only available when `connectivity_type` is `public`. +* `connectivity_type` - Connectivity type of the NAT Gateway. +* `network_interface_id` - The ID of the ENI allocated to the selected NAT Gateway. +* `private_ip` - Private IP address of the selected NAT Gateway. +* `public_ip` - Public IP (EIP) address of the selected NAT Gateway. +* `secondary_allocation_ids` - Secondary allocation EIP IDs for the selected NAT Gateway. +* `secondary_private_ip_address_count` - The number of secondary private IPv4 addresses assigned to the selected NAT Gateway. +* `secondary_private_ip_addresses` - Secondary private IPv4 addresses assigned to the selected NAT Gateway. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/nat_gateways.html.markdown b/website/docs/cdktf/python/d/nat_gateways.html.markdown new file mode 100644 index 00000000000..f3bc345a09d --- /dev/null +++ b/website/docs/cdktf/python/d/nat_gateways.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_nat_gateways" +description: |- + Get information on Amazon NAT Gateways. +--- + + + +# Data Source: aws_nat_gateways + +This resource can be useful for getting back a list of NAT gateway ids to be referenced elsewhere. + +## Example Usage + +The following returns all NAT gateways in a specified VPC that are marked as available + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformCount, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_nat_gateway import DataAwsNatGateway +from imports.aws.data_aws_nat_gateways import DataAwsNatGateways +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ngws = DataAwsNatGateways(self, "ngws", + filter=[DataAwsNatGatewaysFilter( + name="state", + values=["available"] + ) + ], + vpc_id=vpc_id.string_value + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + ngw_count = TerraformCount.of(Token.as_number(Fn.length_of(ngws.ids))) + DataAwsNatGateway(self, "ngw", + id=Token.as_string(property_access(Fn.tolist(ngws.ids), [ngw_count.index])), + count=ngw_count + ) +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. +* `vpc_id` - (Optional) VPC ID that you want to filter from. +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired NAT Gateways. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNatGateways.html). +* `values` - (Required) Set of values that are accepted for the given field. + A Nat Gateway will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - List of all the NAT gateway ids found. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/neptune_engine_version.markdown b/website/docs/cdktf/python/d/neptune_engine_version.markdown new file mode 100644 index 00000000000..ced153fe4db --- /dev/null +++ b/website/docs/cdktf/python/d/neptune_engine_version.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_engine_version" +description: |- + Information about a Neptune engine version. +--- + + + +# Data Source: aws_neptune_engine_version + +Information about a Neptune engine version. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_neptune_engine_version import DataAwsNeptuneEngineVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNeptuneEngineVersion(self, "test", + preferred_versions=["1.0.3.0", "1.0.2.2", "1.0.2.1"] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engine` - (Optional) DB engine. (Default: `neptune`) +* `parameter_group_family` - (Optional) Name of a specific DB parameter group family. An example parameter group family is `neptune1`. +* `preferred_versions` - (Optional) Ordered list of preferred engine versions. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. If both the `version` and `preferred_versions` arguments are not configured, the data source will return the default version for the engine. +* `version` - (Optional) Version of the DB engine. For example, `1.0.1.0`, `1.0.2.2`, and `1.0.3.0`. If both the `version` and `preferred_versions` arguments are not configured, the data source will return the default version for the engine. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `engine_description` - Description of the database engine. +* `exportable_log_types` - Set of log types that the database engine has available for export to CloudWatch Logs. +* `supported_timezones` - Set of the time zones supported by this engine. +* `supports_log_exports_to_cloudwatch` - Indicates whether the engine version supports exporting the log types specified by `exportable_log_types` to CloudWatch Logs. +* `supports_read_replica` - Indicates whether the database engine version supports read replicas. +* `valid_upgrade_targets` - Set of engine versions that this database engine version can be upgraded to. +* `version_description` - Description of the database engine version. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/neptune_orderable_db_instance.markdown b/website/docs/cdktf/python/d/neptune_orderable_db_instance.markdown new file mode 100644 index 00000000000..f678d7ac75d --- /dev/null +++ b/website/docs/cdktf/python/d/neptune_orderable_db_instance.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_orderable_db_instance" +description: |- + Information about Neptune orderable DB instances. +--- + + + +# Data Source: aws_neptune_orderable_db_instance + +Information about Neptune orderable DB instances. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_neptune_orderable_db_instance import DataAwsNeptuneOrderableDbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNeptuneOrderableDbInstance(self, "test", + engine_version="1.0.3.0", + preferred_instance_classes=["db.r5.large", "db.r4.large", "db.t3.medium"] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engine` - (Optional) DB engine. (Default: `neptune`) +* `engine_version` - (Optional) Version of the DB engine. For example, `1.0.1.0`, `1.0.1.2`, `1.0.2.2`, and `1.0.3.0`. +* `instance_class` - (Optional) DB instance class. Examples of classes are `db.r5.large`, `db.r5.xlarge`, `db.r4.large`, `db.r5.4xlarge`, `db.r5.12xlarge`, `db.r4.xlarge`, and `db.t3.medium`. +* `license_model` - (Optional) License model. (Default: `amazon-license`) +* `preferred_instance_classes` - (Optional) Ordered list of preferred Neptune DB instance classes. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. +* `vpc` - (Optional) Enable to show only VPC offerings. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `availability_zones` - Availability zones where the instance is available. +* `max_iops_per_db_instance` - Maximum total provisioned IOPS for a DB instance. +* `max_iops_per_gib` - Maximum provisioned IOPS per GiB for a DB instance. +* `max_storage_size` - Maximum storage size for a DB instance. +* `min_iops_per_db_instance` - Minimum total provisioned IOPS for a DB instance. +* `min_iops_per_gib` - Minimum provisioned IOPS per GiB for a DB instance. +* `min_storage_size` - Minimum storage size for a DB instance. +* `multi_az_capable` - Whether a DB instance is Multi-AZ capable. +* `read_replica_capable` - Whether a DB instance can have a read replica. +* `storage_type` - Storage type for a DB instance. +* `supports_enhanced_monitoring` - Whether a DB instance supports Enhanced Monitoring at intervals from 1 to 60 seconds. +* `supports_iam_database_authentication` - Whether a DB instance supports IAM database authentication. +* `supports_iops` - Whether a DB instance supports provisioned IOPS. +* `supports_performance_insights` - Whether a DB instance supports Performance Insights. +* `supports_storage_encryption` - Whether a DB instance supports encrypted storage. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/network_acls.html.markdown b/website/docs/cdktf/python/d/network_acls.html.markdown new file mode 100644 index 00000000000..7ef31201d32 --- /dev/null +++ b/website/docs/cdktf/python/d/network_acls.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_acls" +description: |- + Provides a list of network ACL ids for a VPC +--- + + + +# Data Source: aws_network_acls + +## Example Usage + +The following shows outputting all network ACL ids in a vpc. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_network_acls import DataAwsNetworkAcls +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsNetworkAcls(self, "example", + vpc_id=vpc_id.string_value + ) + cdktf_terraform_output_example = TerraformOutput(self, "example_1", + value=example.ids + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + cdktf_terraform_output_example.override_logical_id("example") +``` + +The following example retrieves a list of all network ACL ids in a VPC with a custom +tag of `Tier` set to a value of "Private". + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_network_acls import DataAwsNetworkAcls +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkAcls(self, "example", + tags={ + "Tier": "Private" + }, + vpc_id=vpc_id.string_value + ) +``` + +The following example retrieves a network ACL id in a VPC which associated +with specific subnet. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_network_acls import DataAwsNetworkAcls +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkAcls(self, "example", + filter=[DataAwsNetworkAclsFilter( + name="association.subnet-id", + values=[test.id] + ) + ], + vpc_id=vpc_id.string_value + ) +``` + +## Argument Reference + +* `vpc_id` - (Optional) VPC ID that you want to filter from. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired network ACLs. + +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNetworkAcls.html). + +* `values` - (Required) Set of values that are accepted for the given field. + A VPC will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - List of all the network ACL ids found. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/network_interface.html.markdown b/website/docs/cdktf/python/d/network_interface.html.markdown new file mode 100644 index 00000000000..550555c1c3a --- /dev/null +++ b/website/docs/cdktf/python/d/network_interface.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_interface" +description: |- + Get information on a Network Interface resource. +--- + + + +# aws_network_interface + +Use this data source to get information about a Network Interface. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_network_interface import DataAwsNetworkInterface +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkInterface(self, "bar", + id="eni-01234567" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` – (Optional) Identifier for the network interface. +* `filter` – (Optional) One or more name/value pairs to filter off of. There are several valid keys, for a full reference, check out [describe-network-interfaces](https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-network-interfaces.html) in the AWS CLI reference. + +## Attribute Reference + +See the [Network Interface](/docs/providers/aws/r/network_interface.html) for details on the returned attributes. + +Additionally, the following attributes are exported: + +* `arn` - ARN of the network interface. +* `association` - Association information for an Elastic IP address (IPv4) associated with the network interface. See supported fields below. +* `availability_zone` - Availability Zone. +* `description` - Description of the network interface. +* `interface_type` - Type of interface. +* `ipv6_addresses` - List of IPv6 addresses to assign to the ENI. +* `mac_address` - MAC address. +* `owner_id` - AWS account ID of the owner of the network interface. +* `private_dns_name` - Private DNS name. +* `private_ip` - Private IPv4 address of the network interface within the subnet. +* `private_ips` - Private IPv4 addresses associated with the network interface. +* `requester_id` - ID of the entity that launched the instance on your behalf. +* `security_groups` - List of security groups for the network interface. +* `subnet_id` - ID of the subnet. +* `outpost_arn` - ARN of the Outpost. +* `tags` - Any tags assigned to the network interface. +* `vpc_id` - ID of the VPC. + +### `association` + +* `allocation_id` - Allocation ID. +* `association_id` - Association ID. +* `carrier_ip` - Carrier IP address associated with the network interface. This attribute is only set when the network interface is in a subnet which is associated with a Wavelength Zone. +* `customer_owned_ip` - Customer-owned IP address. +* `ip_owner_id` - ID of the Elastic IP address owner. +* `public_dns_name` - Public DNS name. +* `public_ip` - Address of the Elastic IP address bound to the network interface. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/network_interfaces.html.markdown b/website/docs/cdktf/python/d/network_interfaces.html.markdown new file mode 100644 index 00000000000..a05cb1ff7d6 --- /dev/null +++ b/website/docs/cdktf/python/d/network_interfaces.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_interfaces" +description: |- + Provides a list of network interface ids +--- + + + +# Data Source: aws_network_interfaces + +## Example Usage + +The following shows outputting all network interface ids in a region. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_network_interfaces import DataAwsNetworkInterfaces +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsNetworkInterfaces(self, "example") + cdktf_terraform_output_example = TerraformOutput(self, "example_1", + value=example.ids + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + cdktf_terraform_output_example.override_logical_id("example") +``` + +The following example retrieves a list of all network interface ids with a custom tag of `Name` set to a value of `test`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_network_interfaces import DataAwsNetworkInterfaces +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsNetworkInterfaces(self, "example", + tags={ + "Name": "test" + } + ) + TerraformOutput(self, "example1", + value=example.ids + ) +``` + +The following example retrieves a network interface ids which associated +with specific subnet. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_network_interfaces import DataAwsNetworkInterfaces +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsNetworkInterfaces(self, "example", + filter=[DataAwsNetworkInterfacesFilter( + name="subnet-id", + values=[test.id] + ) + ] + ) + cdktf_terraform_output_example = TerraformOutput(self, "example_1", + value=example.ids + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + cdktf_terraform_output_example.override_logical_id("example") +``` + +## Argument Reference + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired network interfaces. + +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNetworkInterfaces.html). + +* `values` - (Required) Set of values that are accepted for the given field. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - List of all the network interface ids found. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkfirewall_firewall.html.markdown b/website/docs/cdktf/python/d/networkfirewall_firewall.html.markdown new file mode 100644 index 00000000000..f822a1cde7b --- /dev/null +++ b/website/docs/cdktf/python/d/networkfirewall_firewall.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_firewall" +description: |- + Retrieve information about an AWS Network Firewall Firewall resource. +--- + + + +# Data Source: aws_networkfirewall_firewall + +Retrieve information about a firewall. + +## Example Usage + +### Find firewall policy by ARN + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkfirewall_firewall import DataAwsNetworkfirewallFirewall +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkfirewallFirewall(self, "example", + arn=arn + ) +``` + +### Find firewall policy by Name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkfirewall_firewall import DataAwsNetworkfirewallFirewall +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkfirewallFirewall(self, "example", + name="Test" + ) +``` + +### Find firewall policy by ARN and Name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkfirewall_firewall import DataAwsNetworkfirewallFirewall +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkfirewallFirewall(self, "example", + arn=arn, + name="Test" + ) +``` + +## Argument Reference + +One or more of the following arguments are required: + +* `arn` - ARN of the firewall. +* `name` - Descriptive name of the firewall. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the firewall. +* `delete_protection` - Boolean flag indicating whether it is possible to delete the firewall. +* `description` - Description of the firewall. +* `encryption_configuration` - AWS Key Management Service (AWS KMS) encryption settings for the firewall. + * `key_id` - The ID of the AWS Key Management Service (AWS KMS) customer managed key. + * `type` - The type of the AWS Key Management Service (AWS KMS) key use by the firewall. +* `firewall_policy_arn` - ARN of the VPC Firewall policy. +* `firewall_policy_change_protection` - A boolean flag indicating whether it is possible to change the associated firewall policy. +* `firewall_status` - Nested list of information about the current status of the firewall. + * `sync_states` - Set of subnets configured for use by the firewall. + * `attachment` - Nested list describing the attachment status of the firewall's association with a single VPC subnet. + * `endpoint_id` - The identifier of the firewall endpoint that AWS Network Firewall has instantiated in the subnet. You use this to identify the firewall endpoint in the VPC route tables, when you redirect the VPC traffic through the endpoint. + * `subnet_id` - The unique identifier of the subnet that you've specified to be used for a firewall endpoint. + * `availability_zone` - The Availability Zone where the subnet is configured. + * `capacity_usage_summary` - Aggregated count of all resources used by reference sets in a firewall. + * `cidrs` - Capacity usage of CIDR blocks used by IP set references in a firewall. + * `available_cidr_count` - Available number of CIDR blocks available for use by the IP set references in a firewall. + * `ip_set_references` - The list of IP set references used by a firewall. + * `resolved_cidr_count` - Total number of CIDR blocks used by the IP set references in a firewall. + * `utilized_cidr_count` - Number of CIDR blocks used by the IP set references in a firewall. + * `configuration_sync_state_summary` - Summary of sync states for all availability zones in which the firewall is configured. +* `id` - ARN that identifies the firewall. +* `name` - Descriptive name of the firewall. +* `subnet_change_protection` - A boolean flag indicating whether it is possible to change the associated subnet(s). +* `subnet_mapping` - Set of configuration blocks describing the public subnets. Each subnet must belong to a different Availability Zone in the VPC. AWS Network Firewall creates a firewall endpoint in each subnet. + * `subnet_id` - The unique identifier for the subnet. +* `tags` - Map of resource tags to associate with the resource. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `update_token` - String token used when updating a firewall. +* `vpc_id` - Unique identifier of the VPC where AWS Network Firewall should create the firewall. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkfirewall_firewall_policy.html.markdown b/website/docs/cdktf/python/d/networkfirewall_firewall_policy.html.markdown new file mode 100644 index 00000000000..bbaf6590a16 --- /dev/null +++ b/website/docs/cdktf/python/d/networkfirewall_firewall_policy.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_firewall_policy" +description: |- + Retrieve information about a firewall policy. +--- + + + +# Data Source: aws_networkfirewall_firewall_policy + +Retrieve information about a firewall policy. + +## Example Usage + +### Find firewall policy by name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkfirewall_firewall_policy import DataAwsNetworkfirewallFirewallPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkfirewallFirewallPolicy(self, "example", + name=firewall_policy_name.string_value + ) +``` + +### Find firewall policy by ARN + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkfirewall_firewall_policy import DataAwsNetworkfirewallFirewallPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkfirewallFirewallPolicy(self, "example", + arn=firewall_policy_arn.string_value + ) +``` + +### Find firewall policy by name and ARN + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkfirewall_firewall_policy import DataAwsNetworkfirewallFirewallPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkfirewallFirewallPolicy(self, "example", + arn=firewall_policy_arn.string_value, + name=firewall_policy_name.string_value + ) +``` + +AWS Network Firewall does not allow multiple firewall policies with the same name to be created in an account. It is possible, however, to have multiple firewall policies available in a single account with identical `name` values but distinct `arn` values, e.g. firewall policies shared via a [Resource Access Manager (RAM) share][1]. In that case specifying `arn`, or `name` and `arn`, is recommended. + +~> **Note:** If there are multiple firewall policies in an account with the same `name`, and `arn` is not specified, the default behavior will return the firewall policy with `name` that was created in the account. + +## Argument Reference + +One or more of the following arguments are required: + +* `arn` - ARN of the firewall policy. +* `name` - Descriptive name of the firewall policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `description` - Description of the firewall policy. +* `firewall_policy` - The [policy][2] for the specified firewall policy. +* `tags` - Key-value tags for the firewall policy. +* `update_token` - Token used for optimistic locking. + +[1]: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/ram_resource_share +[2]: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/networkfirewall_firewall_policy + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkfirewall_resource_policy.html.markdown b/website/docs/cdktf/python/d/networkfirewall_resource_policy.html.markdown new file mode 100644 index 00000000000..3f4d428d8c3 --- /dev/null +++ b/website/docs/cdktf/python/d/networkfirewall_resource_policy.html.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_resource_policy" +description: |- + Retrieve information about a Network Firewall resource policy. +--- + + + +# Data Source: aws_networkfirewall_resource_policy + +Retrieve information about a Network Firewall resource policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkfirewall_resource_policy import DataAwsNetworkfirewallResourcePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkfirewallResourcePolicy(self, "example", + resource_arn=resource_policy_arn.string_value + ) +``` + +## Argument Reference + +* `resource_arn` - (Required) The Amazon Resource Name (ARN) that identifies the resource policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) that identifies the resource policy. +* `policy` - The [policy][1] for the resource. + +[1]: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/networkfirewall_resource_policy + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkmanager_connection.html.markdown b/website/docs/cdktf/python/d/networkmanager_connection.html.markdown new file mode 100644 index 00000000000..b26ecfb44c9 --- /dev/null +++ b/website/docs/cdktf/python/d/networkmanager_connection.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_connection" +description: |- + Retrieve information about a connection. +--- + + + +# Data Source: aws_networkmanager_connection + +Retrieve information about a connection. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_connection import DataAwsNetworkmanagerConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkmanagerConnection(self, "example", + connection_id=connection_id.string_value, + global_network_id=global_network_id.string_value + ) +``` + +## Argument Reference + +* `global_network_id` - (Required) ID of the Global Network of the connection to retrieve. +* `connection_id` - (Required) ID of the specific connection to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the connection. +* `connected_device_id` - ID of the second device in the connection. +* `connected_link_id` - ID of the link for the second device. +* `description` - Description of the connection. +* `device_id` - ID of the first device in the connection. +* `link_id` - ID of the link for the first device. +* `tags` - Key-value tags for the connection. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkmanager_connections.html.markdown b/website/docs/cdktf/python/d/networkmanager_connections.html.markdown new file mode 100644 index 00000000000..7c7248a6403 --- /dev/null +++ b/website/docs/cdktf/python/d/networkmanager_connections.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_connections" +description: |- + Retrieve information about connections. +--- + + + +# Data Source: aws_networkmanager_connections + +Retrieve information about connections. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_connections import DataAwsNetworkmanagerConnections +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkmanagerConnections(self, "example", + global_network_id=global_network_id.string_value, + tags={ + "Env": "test" + } + ) +``` + +## Argument Reference + +* `device_id` - (Optional) ID of the device of the connections to retrieve. +* `global_network_id` - (Required) ID of the Global Network of the connections to retrieve. +* `tags` - (Optional) Restricts the list to the connections with these tags. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - IDs of the connections. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkmanager_core_network_policy_document.html.markdown b/website/docs/cdktf/python/d/networkmanager_core_network_policy_document.html.markdown new file mode 100644 index 00000000000..a01f749d3f4 --- /dev/null +++ b/website/docs/cdktf/python/d/networkmanager_core_network_policy_document.html.markdown @@ -0,0 +1,268 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_core_network_policy_document" +description: |- + Generates an Core Network policy document in JSON format +--- + + + +# Data Source: aws_networkmanager_core_network_policy_document + +Generates a Core Network policy document in JSON format for use with resources that expect core network policy documents such as `awscc_networkmanager_core_network`. It follows the API definition from the [core-network-policy documentation](https://docs.aws.amazon.com/vpc/latest/cloudwan/cloudwan-policies-json.html). + +Using this data source to generate policy documents is *optional*. It is also valid to use literal JSON strings in your configuration or to use the `file` interpolation function to read a raw JSON policy document from a file. + +-> For more information about building AWS Core Network policy documents with Terraform, see the [Using AWS & AWSCC Provider Together Guide](/docs/providers/aws/guides/using-aws-with-awscc-provider.html) + +## Example Usage + +### Basic Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_core_network_policy_document import DataAwsNetworkmanagerCoreNetworkPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkmanagerCoreNetworkPolicyDocument(self, "test", + attachment_policies=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentAttachmentPolicies( + action=DataAwsNetworkmanagerCoreNetworkPolicyDocumentAttachmentPoliciesAction( + association_method="constant", + segment="shared" + ), + condition_logic="or", + conditions=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentAttachmentPoliciesConditions( + key="segment", + operator="equals", + type="tag-value", + value="shared" + ) + ], + rule_number=100 + ), DataAwsNetworkmanagerCoreNetworkPolicyDocumentAttachmentPolicies( + action=DataAwsNetworkmanagerCoreNetworkPolicyDocumentAttachmentPoliciesAction( + association_method="constant", + segment="prod" + ), + condition_logic="or", + conditions=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentAttachmentPoliciesConditions( + key="segment", + operator="equals", + type="tag-value", + value="prod" + ) + ], + rule_number=200 + ) + ], + core_network_configuration=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfiguration( + asn_ranges=["64512-64555"], + edge_locations=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfigurationEdgeLocations( + asn=Token.as_string(64512), + location="us-east-1" + ), DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfigurationEdgeLocations( + asn=Token.as_string(64513), + location="eu-central-1" + ) + ], + vpn_ecmp_support=False + ) + ], + segment_actions=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegmentActions( + action="share", + mode="attachment-route", + segment="shared", + share_with=["*"] + ) + ], + segments=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegments( + description="Segment for shared services", + name="shared", + require_attachment_acceptance=True + ), DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegments( + description="Segment for prod services", + name="prod", + require_attachment_acceptance=True + ) + ] + ) +``` + +`data.aws_networkmanager_core_network_policy_document.test.json` will evaluate to: + +```json +{ + "version": "2021.12", + "core-network-configuration": { + "asn-ranges": [ + "64512-64555" + ], + "vpn-ecmp-support": false, + "edge-locations": [ + { + "location": "us-east-1", + "asn": 64512 + }, + { + "location": "eu-central-1", + "asn": 64513 + } + ] + }, + "segments": [ + { + "name": "shared", + "description": "Segment for shared services", + "require-attachment-acceptance": true + }, + { + "name": "prod", + "description": "Segment for prod services", + "require-attachment-acceptance": true + } + ], + "attachment-policies": [ + { + "rule-number": 100, + "action": { + "association-method": "constant", + "segment": "shared" + }, + "conditions": [ + { + "type": "tag-value", + "operator": "equals", + "key": "segment", + "value": "shared" + } + ], + "condition-logic": "or" + }, + { + "rule-number": 200, + "action": { + "association-method": "constant", + "segment": "prod" + }, + "conditions": [ + { + "type": "tag-value", + "operator": "equals", + "key": "segment", + "value": "prod" + } + ], + "condition-logic": "or" + } + ], + "segment-actions": [ + { + "action": "share", + "mode": "attachment-route", + "segment": "shared", + "share-with": "*" + } + ] +} +``` + +## Argument Reference + +The following arguments are available: + +* `attachment_policies` (Optional) - In a core network, all attachments use the block argument `attachment_policies` section to map an attachment to a segment. Instead of manually associating a segment to each attachment, attachments use tags, and then the tags are used to associate the attachment to the specified segment. Detailed below. +* `core_network_configuration` (Required) - The core network configuration section defines the Regions where a core network should operate. For AWS Regions that are defined in the policy, the core network creates a Core Network Edge where you can connect attachments. After it's created, each Core Network Edge is peered with every other defined Region and is configured with consistent segment and routing across all Regions. Regions cannot be removed until the associated attachments are deleted. Detailed below. +* `segments` (Required) - Block argument that defines the different segments in the network. Here you can provide descriptions, change defaults, and provide explicit Regional operational and route filters. The names defined for each segment are used in the `segment_actions` and `attachment_policies` section. Each segment is created, and operates, as a completely separated routing domain. By default, attachments can only communicate with other attachments in the same segment. Detailed below. +* `segment_actions` (Optional) - A block argument, `segment_actions` define how routing works between segments. By default, attachments can only communicate with other attachments in the same segment. Detailed below. + +### `attachment_policies` + +The following arguments are available: + +* `action` (Required) - Action to take when a condition is true. Detailed Below. +* `condition_logic` (Optional) - Valid values include `and` or `or`. This is a mandatory parameter only if you have more than one condition. The `condition_logic` apply to all of the conditions for a rule, which also means nested conditions of `and` or `or` are not supported. Use `or` if you want to associate the attachment with the segment by either the segment name or attachment tag value, or by the chosen conditions. Use `and` if you want to associate the attachment with the segment by either the segment name or attachment tag value and by the chosen conditions. Detailed Below. +* `conditions` (Required) - A block argument. Detailed Below. +* `description` (Optional) - A user-defined description that further helps identify the rule. +* `rule_number` (Required) - An integer from `1` to `65535` indicating the rule's order number. Rules are processed in order from the lowest numbered rule to the highest. Rules stop processing when a rule is matched. It's important to make sure that you number your rules in the exact order that you want them processed. + +### `action` + +The following arguments are available: + +* `association_method` (Required) - Defines how a segment is mapped. Values can be `constant` or `tag`. `constant` statically defines the segment to associate the attachment to. `tag` uses the value of a tag to dynamically try to map to a segment.reference_policies_elements_condition_operators.html) to evaluate. +* `segment` (Optional) - Name of the `segment` to share as defined in the `segments` section. This is used only when the `association_method` is `constant`. +* `tag_value_of_key` (Optional) - Maps the attachment to the value of a known key. This is used with the `association_method` is `tag`. For example a `tag` of `stage = “test”`, will map to a segment named `test`. The value must exactly match the name of a segment. This allows you to have many segments, but use only a single rule without having to define multiple nearly identical conditions. This prevents creating many similar conditions that all use the same keys to map to segments. +* `require_acceptance` (Optional) - Determines if this mapping should override the segment value for `require_attachment_acceptance`. You can only set this to `true`, indicating that this setting applies only to segments that have `require_attachment_acceptance` set to `false`. If the segment already has the default `require_attachment_acceptance`, you can set this to inherit segment’s acceptance value. + +### `conditions` + +The conditions block has 4 arguments `type`, `operator`, `key`, `value`. Setting or omitting each argument requires a combination of logic based on the value set to `type`. For that reason, please refer to the [AWS documentation](https://docs.aws.amazon.com/vpc/latest/cloudwan/cloudwan-policies-json.html) for complete usage docs. + +The following arguments are available: + +* `type` (Required) - Valid values include: `account-id`, `any`, `tag-value`, `tag-exists`, `resource-id`, `region`, `attachment-type`. +* `operator` (Optional) - Valid values include: `equals`, `not-equals`, `contains`, `begins-with`. +* `key` (Optional) - string value +* `value` (Optional) - string value + +### `core_network_configuration` + +The following arguments are available: + +* `asn_ranges` (Required) - List of strings containing Autonomous System Numbers (ASNs) to assign to Core Network Edges. By default, the core network automatically assigns an ASN for each Core Network Edge but you can optionally define the ASN in the edge-locations for each Region. The ASN uses an array of integer ranges only from `64512` to `65534` and `4200000000` to `4294967294` expressed as a string like `"64512-65534"`. No other ASN ranges can be used. +* `inside_cidr_blocks` (Optional) - The Classless Inter-Domain Routing (CIDR) block range used to create tunnels for AWS Transit Gateway Connect. The format is standard AWS CIDR range (for example, `10.0.1.0/24`). You can optionally define the inside CIDR in the Core Network Edges section per Region. The minimum is a `/24` for IPv4 or `/64` for IPv6. You can provide multiple `/24` subnets or a larger CIDR range. If you define a larger CIDR range, new Core Network Edges will be automatically assigned `/24` and `/64` subnets from the larger CIDR. an Inside CIDR block is required for attaching Connect attachments to a Core Network Edge. +* `vpn_ecmp_support` (Optional) - Indicates whether the core network forwards traffic over multiple equal-cost routes using VPN. The value can be either `true` or `false`. The default is `true`. +* `edge_locations` (Required) - A block value of AWS Region locations where you're creating Core Network Edges. Detailed below. + +### `edge_locations` + +The following arguments are available: + +* `locations` (Required) - An AWS Region code, such as `us-east-1`. +* `asn` (Optional) - ASN of the Core Network Edge in an AWS Region. By default, the ASN will be a single integer automatically assigned from `asn_ranges` +* `inside_cidr_blocks` (Optional) - The local CIDR blocks for this Core Network Edge for AWS Transit Gateway Connect attachments. By default, this CIDR block will be one or more optional IPv4 and IPv6 CIDR prefixes auto-assigned from `inside_cidr_blocks`. + +### `segments` + +The following arguments are available: + +* `allow_filter` (Optional) - List of strings of segment names that explicitly allows only routes from the segments that are listed in the array. Use the `allow_filter` setting if a segment has a well-defined group of other segments that connectivity should be restricted to. It is applied after routes have been shared in `segment_actions`. If a segment is listed in `allow_filter`, attachments between the two segments will have routes if they are also shared in the segment-actions area. For example, you might have a segment named "video-producer" that should only ever share routes with a "video-distributor" segment, no matter how many other share statements are created. +* `deny_filter` (Optional) - An array of segments that disallows routes from the segments listed in the array. It is applied only after routes have been shared in `segment_actions`. If a segment is listed in the `deny_filter`, attachments between the two segments will never have routes shared across them. For example, you might have a "financial" payment segment that should never share routes with a "development" segment, regardless of how many other share statements are created. Adding the payments segment to the deny-filter parameter prevents any shared routes from being created with other segments. +* `description` (Optional) - A user-defined string describing the segment. +* `edge_locations` (Optional) - A list of strings of AWS Region names. Allows you to define a more restrictive set of Regions for a segment. The edge location must be a subset of the locations that are defined for `edge_locations` in the `core_network_configuration`. +* `isolate_attachments` (Optional) - This Boolean setting determines whether attachments on the same segment can communicate with each other. If set to `true`, the only routes available will be either shared routes through the share actions, which are attachments in other segments, or static routes. The default value is `false`. For example, you might have a segment dedicated to "development" that should never allow VPCs to talk to each other, even if they’re on the same segment. In this example, you would keep the default parameter of `false`. +* `name` (Required) - Unique name for a segment. The name is a string used in other parts of the policy document, as well as in the console for metrics and other reference points. Valid characters are a–z, and 0–9. +* `require_attachment_acceptance` (Optional) - This Boolean setting determines whether attachment requests are automatically approved or require acceptance. The default is `true`, indicating that attachment requests require acceptance. For example, you might use this setting to allow a "sandbox" segment to allow any attachment request so that a core network or attachment administrator does not need to review and approve attachment requests. In this example, `require_attachment_acceptance` is set to `false`. + +### `segment_actions` + +`segment_actions` have differnet outcomes based on their `action` argument value. There are 2 valid values for `action`: `create-route` & `share`. Behaviors of the below arguments changed depending on the `action` you specify. For more details on their use see the [AWS documentation](https://docs.aws.amazon.com/vpc/latest/cloudwan/cloudwan-policies-json.html#cloudwan-segment-actions-json). + +~> **NOTE:** `share_with` and `share_with_except` break from the AWS API specification. The API has 1 argument `share-with` and it can accept 3 input types as valid (`"*"`, `[""]`, or `{ except: [""]}`). To emulate this behavior, `share_with` is always a list that can accept the argument `["*"]` as valid for `"*"` and `share_with_except` is a that can accept `[""]` as valid for `{ except: [""]}`. You may only specify one of: `share_with` or `share_with_except`. + +The following arguments are available: + +* `action` (Required) - Action to take for the chosen segment. Valid values `create-route` or `share`. +* `description` (Optional) - A user-defined string describing the segment action. +* `destination_cidr_blocks` (Optional) - List of strings containing CIDRs. You can define the IPv4 and IPv6 CIDR notation for each AWS Region. For example, `10.1.0.0/16` or `2001:db8::/56`. This is an array of CIDR notation strings. +* `destinations` (Optional) - A list of strings. Valid values include `["blackhole"]` or a list of attachment ids. +* `mode` (Optional) - String. This mode places the attachment and return routes in each of the `share_with` segments. Valid values include: `attachment-route`. +* `segment` (Optional) - Name of the segment. +* `share_with` (Optional) - A list of strings to share with. Must be a substring is all segments. Valid values include: `["*"]` or `[""]`. +* `share_with_except` (Optional) - A set subtraction of segments to not share with. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `json` - Standard JSON policy document rendered based on the arguments above. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkmanager_device.html.markdown b/website/docs/cdktf/python/d/networkmanager_device.html.markdown new file mode 100644 index 00000000000..e186728e8ce --- /dev/null +++ b/website/docs/cdktf/python/d/networkmanager_device.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_device" +description: |- + Retrieve information about a device. +--- + + + +# Data Source: aws_networkmanager_device + +Retrieve information about a device. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_device import DataAwsNetworkmanagerDevice +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, globalNetworkId): + super().__init__(scope, name) + DataAwsNetworkmanagerDevice(self, "example", + device_id=device_id.string_value, + global_network_id_id=global_network_id.value, + global_network_id=global_network_id + ) +``` + +## Argument Reference + +* `device_id` - (Required) ID of the device. +* `global_network_id` - (Required) ID of the global network. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the device. +* `aws_location` - AWS location of the device. Documented below. +* `description` - Description of the device. +* `location` - Location of the device. Documented below. +* `model` - Model of device. +* `serial_number` - Serial number of the device. +* `site_id` - ID of the site. +* `tags` - Key-value tags for the device. +* `type` - Type of device. +* `vendor` - Vendor of the device. + +The `aws_location` object supports the following: + +* `subnet_arn` - ARN of the subnet that the device is located in. +* `zone` - Zone that the device is located in. + +The `location` object supports the following: + +* `address` - Physical address. +* `latitude` - Latitude. +* `longitude` - Longitude. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkmanager_devices.html.markdown b/website/docs/cdktf/python/d/networkmanager_devices.html.markdown new file mode 100644 index 00000000000..f7df61c8468 --- /dev/null +++ b/website/docs/cdktf/python/d/networkmanager_devices.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_devices" +description: |- + Retrieve information about devices. +--- + + + +# Data Source: aws_networkmanager_devices + +Retrieve information about devices. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_devices import DataAwsNetworkmanagerDevices +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkmanagerDevices(self, "example", + global_network_id=global_network_id.string_value, + tags={ + "Env": "test" + } + ) +``` + +## Argument Reference + +* `global_network_id` - (Required) ID of the Global Network of the devices to retrieve. +* `site_id` - (Optional) ID of the site of the devices to retrieve. +* `tags` - (Optional) Restricts the list to the devices with these tags. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - IDs of the devices. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkmanager_global_network.html.markdown b/website/docs/cdktf/python/d/networkmanager_global_network.html.markdown new file mode 100644 index 00000000000..60447b0ea99 --- /dev/null +++ b/website/docs/cdktf/python/d/networkmanager_global_network.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_global_network" +description: |- + Retrieve information about a global network. +--- + + + +# Data Source: aws_networkmanager_global_network + +Retrieve information about a global network. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_global_network import DataAwsNetworkmanagerGlobalNetwork +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkmanagerGlobalNetwork(self, "example", + global_network_id=global_network_id.string_value + ) +``` + +## Argument Reference + +* `global_network_id` - (Required) ID of the specific global network to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the global network. +* `description` - Description of the global network. +* `tags` - Map of resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkmanager_global_networks.html.markdown b/website/docs/cdktf/python/d/networkmanager_global_networks.html.markdown new file mode 100644 index 00000000000..2e51569354c --- /dev/null +++ b/website/docs/cdktf/python/d/networkmanager_global_networks.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_global_networks" +description: |- + Retrieve information about global networks. +--- + + + +# Data Source: aws_networkmanager_global_networks + +Retrieve information about global networks. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_global_networks import DataAwsNetworkmanagerGlobalNetworks +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkmanagerGlobalNetworks(self, "example", + tags={ + "Env": "test" + } + ) +``` + +## Argument Reference + +* `tags` - (Optional) Restricts the list to the global networks with these tags. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - IDs of the global networks. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkmanager_link.html.markdown b/website/docs/cdktf/python/d/networkmanager_link.html.markdown new file mode 100644 index 00000000000..69e6d54da24 --- /dev/null +++ b/website/docs/cdktf/python/d/networkmanager_link.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_link" +description: |- + Retrieve information about a link. +--- + + + +# Data Source: aws_networkmanager_link + +Retrieve information about a link. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_link import DataAwsNetworkmanagerLink +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkmanagerLink(self, "example", + global_network_id=global_network_id.string_value, + link_id=link_id.string_value + ) +``` + +## Argument Reference + +* `global_network_id` - (Required) ID of the Global Network of the link to retrieve. +* `link_id` - (Required) ID of the specific link to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the link. +* `bandwidth` - Upload speed and download speed of the link as documented below +* `description` - Description of the link. +* `provider_name` - Provider of the link. +* `site_id` - ID of the site. +* `tags` - Key-value tags for the link. +* `type` - Type of the link. + +The `bandwidth` object supports the following: + +* `download_speed` - Download speed in Mbps. +* `upload_speed` - Upload speed in Mbps. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkmanager_links.html.markdown b/website/docs/cdktf/python/d/networkmanager_links.html.markdown new file mode 100644 index 00000000000..ace274eb728 --- /dev/null +++ b/website/docs/cdktf/python/d/networkmanager_links.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_links" +description: |- + Retrieve information about links. +--- + + + +# Data Source: aws_networkmanager_links + +Retrieve information about link. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_links import DataAwsNetworkmanagerLinks +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkmanagerLinks(self, "example", + global_network_id=global_network_id.string_value, + tags={ + "Env": "test" + } + ) +``` + +## Argument Reference + +* `global_network_id` - (Required) ID of the Global Network of the links to retrieve. +* `provider_name` - (Optional) Link provider to retrieve. +* `site_id` - (Optional) ID of the site of the links to retrieve. +* `tags` - (Optional) Restricts the list to the links with these tags. +* `type` - (Optional) Link type to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - IDs of the links. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkmanager_site.html.markdown b/website/docs/cdktf/python/d/networkmanager_site.html.markdown new file mode 100644 index 00000000000..10dda997707 --- /dev/null +++ b/website/docs/cdktf/python/d/networkmanager_site.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_site" +description: |- + Retrieve information about a site. +--- + + + +# Data Source: aws_networkmanager_site + +Retrieve information about a site. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_site import DataAwsNetworkmanagerSite +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkmanagerSite(self, "example", + global_network_id=global_network_id.string_value, + site_id=site_id.string_value + ) +``` + +## Argument Reference + +* `global_network_id` - (Required) ID of the Global Network of the site to retrieve. +* `site_id` - (Required) ID of the specific site to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the site. +* `description` - Description of the site. +* `location` - Site location as documented below. +* `tags` - Key-value tags for the Site. + +The `location` object supports the following: + +* `address` - Address of the location. +* `latitude` - Latitude of the location. +* `longitude` - Longitude of the location. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/networkmanager_sites.html.markdown b/website/docs/cdktf/python/d/networkmanager_sites.html.markdown new file mode 100644 index 00000000000..7608813a76c --- /dev/null +++ b/website/docs/cdktf/python/d/networkmanager_sites.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_sites" +description: |- + Retrieve information about sites. +--- + + + +# Data Source: aws_networkmanager_sites + +Retrieve information about sites. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_sites import DataAwsNetworkmanagerSites +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsNetworkmanagerSites(self, "example", + global_network_id=global_network_id.string_value, + tags={ + "Env": "test" + } + ) +``` + +## Argument Reference + +* `global_network_id` - (Required) ID of the Global Network of the sites to retrieve. +* `tags` - (Optional) Restricts the list to the sites with these tags. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - IDs of the sites. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/oam_link.html.markdown b/website/docs/cdktf/python/d/oam_link.html.markdown new file mode 100644 index 00000000000..ed6b23e939e --- /dev/null +++ b/website/docs/cdktf/python/d/oam_link.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_link" +description: |- + Terraform data source for managing an AWS CloudWatch Observability Access Manager Link. +--- + + + +# Data Source: aws_oam_link + +Terraform data source for managing an AWS CloudWatch Observability Access Manager Link. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_oam_link import DataAwsOamLink +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOamLink(self, "example", + link_identifier="arn:aws:oam:us-west-1:111111111111:link/abcd1234-a123-456a-a12b-a123b456c789" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `link_identifier` - (Required) ARN of the link. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the link. +* `label` - Label that is assigned to this link. +* `label_template` - Human-readable name used to identify this source account when you are viewing data from it in the monitoring account. +* `link_id` - ID string that AWS generated as part of the link ARN. +* `resource_types` - Types of data that the source account shares with the monitoring account. +* `sink_arn` - ARN of the sink that is used for this link. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/oam_links.html.markdown b/website/docs/cdktf/python/d/oam_links.html.markdown new file mode 100644 index 00000000000..252f05468d2 --- /dev/null +++ b/website/docs/cdktf/python/d/oam_links.html.markdown @@ -0,0 +1,40 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_links" +description: |- + Terraform data source for managing an AWS CloudWatch Observability Access Manager Links. +--- + + + +# Data Source: aws_oam_links + +Terraform data source for managing an AWS CloudWatch Observability Access Manager Links. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_oam_links import DataAwsOamLinks +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOamLinks(self, "example") +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARN of the Links. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/oam_sink.html.markdown b/website/docs/cdktf/python/d/oam_sink.html.markdown new file mode 100644 index 00000000000..2d002ae7ef7 --- /dev/null +++ b/website/docs/cdktf/python/d/oam_sink.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_sink" +description: |- + Terraform data source for managing an AWS CloudWatch Observability Access Manager Sink. +--- + + + +# Data Source: aws_oam_sink + +Terraform data source for managing an AWS CloudWatch Observability Access Manager Sink. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_oam_sink import DataAwsOamSink +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOamSink(self, "example", + sink_identifier="arn:aws:oam:us-west-1:111111111111:sink/abcd1234-a123-456a-a12b-a123b456c789" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `sink_identifier` - (Required) ARN of the sink. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the sink. +* `name` - Name of the sink. +* `sink_id` - Random ID string that AWS generated as part of the sink ARN. +* `tags` - Tags assigned to the sink. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/oam_sinks.html.markdown b/website/docs/cdktf/python/d/oam_sinks.html.markdown new file mode 100644 index 00000000000..18b674f8bf8 --- /dev/null +++ b/website/docs/cdktf/python/d/oam_sinks.html.markdown @@ -0,0 +1,40 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_sinks" +description: |- + Terraform data source for managing an AWS CloudWatch Observability Access Manager Sinks. +--- + + + +# Data Source: aws_oam_sinks + +Terraform data source for managing an AWS CloudWatch Observability Access Manager Sinks. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_oam_sinks import DataAwsOamSinks +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOamSinks(self, "example") +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARN of the Sinks. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/opensearch_domain.html.markdown b/website/docs/cdktf/python/d/opensearch_domain.html.markdown new file mode 100644 index 00000000000..5d7904d6703 --- /dev/null +++ b/website/docs/cdktf/python/d/opensearch_domain.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_domain" +description: |- + Get information on an OpenSearch Domain resource. +--- + + + +# Data Source: aws_opensearch_domain + +Use this data source to get information about an OpenSearch Domain + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_opensearch_domain import DataAwsOpensearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOpensearchDomain(self, "my_domain", + domain_name="my-domain-name" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `domain_name` – (Required) Name of the domain. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `access_policies` – Policy document attached to the domain. +* `advanced_options` - Key-value string pairs to specify advanced configuration options. +* `advanced_security_options` - Status of the OpenSearch domain's advanced security options. The block consists of the following attributes: + * `enabled` - Whether advanced security is enabled. + * `internal_user_database_enabled` - Whether the internal user database is enabled. +* `arn` – ARN of the domain. +* `auto_tune_options` - Configuration of the Auto-Tune options of the domain. + * `desired_state` - Auto-Tune desired state for the domain. + * `maintenance_schedule` - A list of the nested configurations for the Auto-Tune maintenance windows of the domain. + * `start_at` - Date and time at which the Auto-Tune maintenance schedule starts in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). + * `duration` - Configuration block for the duration of the Auto-Tune maintenance window. + * `value` - Duration of an Auto-Tune maintenance window. + * `unit` - Unit of time. + * `cron_expression_for_recurrence` - Cron expression for an Auto-Tune maintenance schedule. + * `rollback_on_disable` - Whether the domain is set to roll back to default Auto-Tune settings when disabling Auto-Tune. +* `cluster_config` - Cluster configuration of the domain. + * `cold_storage_options` - Configuration block containing cold storage configuration. + * `enabled` - Indicates cold storage is enabled. + * `instance_type` - Instance type of data nodes in the cluster. + * `instance_count` - Number of instances in the cluster. + * `dedicated_master_enabled` - Indicates whether dedicated master nodes are enabled for the cluster. + * `dedicated_master_type` - Instance type of the dedicated master nodes in the cluster. + * `dedicated_master_count` - Number of dedicated master nodes in the cluster. + * `zone_awareness_enabled` - Indicates whether zone awareness is enabled. + * `zone_awareness_config` - Configuration block containing zone awareness settings. + * `availability_zone_count` - Number of availability zones used. + * `warm_enabled` - Warm storage is enabled. + * `warm_count` - Number of warm nodes in the cluster. + * `warm_type` - Instance type for the OpenSearch cluster's warm nodes. +* `cognito_options` - Domain Amazon Cognito Authentication options for Dashboard. + * `enabled` - Whether Amazon Cognito Authentication is enabled. + * `user_pool_id` - Cognito User pool used by the domain. + * `identity_pool_id` - Cognito Identity pool used by the domain. + * `role_arn` - IAM Role with the AmazonOpenSearchServiceCognitoAccess policy attached. +* `created` – Status of the creation of the domain. +* `deleted` – Status of the deletion of the domain. +* `domain_id` – Unique identifier for the domain. +* `ebs_options` - EBS Options for the instances in the domain. + * `ebs_enabled` - Whether EBS volumes are attached to data nodes in the domain. + * `throughput` - The throughput (in MiB/s) of the EBS volumes attached to data nodes. + * `volume_type` - Type of EBS volumes attached to data nodes. + * `volume_size` - Size of EBS volumes attached to data nodes (in GB). + * `iops` - Baseline input/output (I/O) performance of EBS volumes attached to data nodes. +* `engine_version` – OpenSearch version for the domain. +* `encryption_at_rest` - Domain encryption at rest related options. + * `enabled` - Whether encryption at rest is enabled in the domain. + * `kms_key_id` - KMS key id used to encrypt data at rest. +* `endpoint` – Domain-specific endpoint used to submit index, search, and data upload requests. +* `dashboard_endpoint` - Domain-specific endpoint used to access the [Dashboard application](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/dashboards.html). +* `kibana_endpoint` - (**Deprecated**) Domain-specific endpoint for kibana without https scheme. Use the `dashboard_endpoint` attribute instead. +* `log_publishing_options` - Domain log publishing related options. + * `log_type` - Type of OpenSearch log being published. + * `cloudwatch_log_group_arn` - CloudWatch Log Group where the logs are published. + * `enabled` - Whether log publishing is enabled. +* `node_to_node_encryption` - Domain in transit encryption related options. + * `enabled` - Whether node to node encryption is enabled. +* `off_peak_window_options` - Off Peak update options + * `enabled` - Enabled disabled toggle for off-peak update window + * `off_peak_window` + * `window_start_time` - 10h window for updates + * `hours` - Starting hour of the 10-hour window for updates + * `minutes` - Starting minute of the 10-hour window for updates +* `processing` – Status of a configuration change in the domain. +* `snapshot_options` – Domain snapshot related options. + * `automated_snapshot_start_hour` - Hour during which the service takes an automated daily snapshot of the indices in the domain. +* `tags` - Tags assigned to the domain. +* `vpc_options` - VPC Options for private OpenSearch domains. + * `availability_zones` - Availability zones used by the domain. + * `security_group_ids` - Security groups used by the domain. + * `subnet_ids` - Subnets used by the domain. + * `vpc_id` - VPC used by the domain. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/opensearchserverless_access_policy.html.markdown b/website/docs/cdktf/python/d/opensearchserverless_access_policy.html.markdown new file mode 100644 index 00000000000..9be77c2cad0 --- /dev/null +++ b/website/docs/cdktf/python/d/opensearchserverless_access_policy.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_access_policy" +description: |- + Terraform data source for managing an AWS OpenSearch Serverless Access Policy. +--- + + + +# Data Source: aws_opensearchserverless_access_policy + +Terraform data source for managing an AWS OpenSearch Serverless Access Policy. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_opensearchserverless_access_policy import DataAwsOpensearchserverlessAccessPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOpensearchserverlessAccessPolicy(self, "example", + name=Token.as_string(aws_opensearchserverless_access_policy_example.name), + type=Token.as_string(aws_opensearchserverless_access_policy_example.type) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the policy. +* `type` - (Required) Type of access policy. Must be `data`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `description` - Description of the policy. Typically used to store information about the permissions defined in the policy. +* `policy` - JSON policy document to use as the content for the new policy. +* `policy_version` - Version of the policy. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/opensearchserverless_collection.html.markdown b/website/docs/cdktf/python/d/opensearchserverless_collection.html.markdown new file mode 100644 index 00000000000..5aff6c26bac --- /dev/null +++ b/website/docs/cdktf/python/d/opensearchserverless_collection.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_collection" +description: |- + Terraform data source for managing an AWS OpenSearch Serverless Collection. +--- + + + +# Data Source: aws_opensearchserverless_collection + +Terraform data source for managing an AWS OpenSearch Serverless Collection. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_opensearchserverless_collection import DataAwsOpensearchserverlessCollection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOpensearchserverlessCollection(self, "example", + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `id` - (Required) ID of the collection. Either `id` or `name` must be provided. +* `name` - (Required) Name of the collection. Either `name` or `id` must be provided. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the collection. +* `collection_endpoint` - Collection-specific endpoint used to submit index, search, and data upload requests to an OpenSearch Serverless collection. +* `created_date` - Date the Collection was created. +* `dashboard_endpont` - Collection-specific endpoint used to access OpenSearch Dashboards. +* `description` - Description of the collection. +* `kms_key_arn` - The ARN of the Amazon Web Services KMS key used to encrypt the collection. +* `last_modified_date` - Date the Collection was last modified. +* `tags` - A map of tags to assign to the collection. +* `type` - Type of collection. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/opensearchserverless_security_config.html.markdown b/website/docs/cdktf/python/d/opensearchserverless_security_config.html.markdown new file mode 100644 index 00000000000..bfd8faf6b02 --- /dev/null +++ b/website/docs/cdktf/python/d/opensearchserverless_security_config.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_security_config" +description: |- + Terraform data source for managing an AWS OpenSearch Serverless Security Config. +--- + + + +# Data Source: aws_opensearchserverless_security_config + +Terraform data source for managing an AWS OpenSearch Serverless Security Config. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_opensearchserverless_security_config import DataAwsOpensearchserverlessSecurityConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOpensearchserverlessSecurityConfig(self, "example", + id="saml/12345678912/example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `id` - (Required) The unique identifier of the security configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `config_version` - The version of the security configuration. +* `created_date` - The date the configuration was created. +* `description` - The description of the security configuration. +* `last_modified_date` - The date the configuration was last modified. +* `saml_options` - SAML options for the security configuration. +* `type` - The type of security configuration. + +### saml_options + +SAML options for the security configuration. + +* `group_attribute` - Group attribute for this SAML integration. +* `metadata` - The XML IdP metadata file generated from your identity provider. +* `session_timeout` - Session timeout, in minutes. Minimum is 5 minutes and maximum is 720 minutes (12 hours). Default is 60 minutes. +* `user_attribute` - User attribute for this SAML integration. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/opensearchserverless_security_policy.html.markdown b/website/docs/cdktf/python/d/opensearchserverless_security_policy.html.markdown new file mode 100644 index 00000000000..f6b537fc114 --- /dev/null +++ b/website/docs/cdktf/python/d/opensearchserverless_security_policy.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_security_policy" +description: |- + Get information on an OpenSearch Serverless Security Policy. +--- + + + +# Data Source: aws_opensearchserverless_security_policy + +Use this data source to get information about an AWS OpenSearch Serverless Security Policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_opensearchserverless_security_policy import DataAwsOpensearchserverlessSecurityPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOpensearchserverlessSecurityPolicy(self, "example", + name="example-security-policy", + type="encryption" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the policy +* `type` - (Required) Type of security policy. One of `encryption` or `network`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `created_date` - The date the security policy was created. +* `description` - Description of the security policy. +* `last_modified_date` - The date the security policy was last modified. +* `policy` - The JSON policy document without any whitespaces. +* `policy_version` - Version of the policy. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/opensearchserverless_vpc_endpoint.html.markdown b/website/docs/cdktf/python/d/opensearchserverless_vpc_endpoint.html.markdown new file mode 100644 index 00000000000..bf6afd2683c --- /dev/null +++ b/website/docs/cdktf/python/d/opensearchserverless_vpc_endpoint.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_vpc_endpoint" +description: |- + Terraform data source for managing an AWS OpenSearch Serverless VPC Endpoint. +--- + + + +# Data Source: aws_opensearchserverless_vpc_endpoint + +Terraform data source for managing an AWS OpenSearch Serverless VPC Endpoint. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_opensearchserverless_vpc_endpoint import DataAwsOpensearchserverlessVpcEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOpensearchserverlessVpcEndpoint(self, "example", + vpc_endpoint_id="vpce-829a4487959e2a839" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `vpc_endpoint_id` - (Required) The unique identifier of the endpoint. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `created_date` - The date the endpoint was created. +* `name` - The name of the endpoint. +* `security_group_ids` - The IDs of the security groups that define the ports, protocols, and sources for inbound traffic that you are authorizing into your endpoint. +* `subnet_ids` - The IDs of the subnets from which you access OpenSearch Serverless. +* `vpc_id` - The ID of the VPC from which you access OpenSearch Serverless. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/organizations_delegated_administrators.html.markdown b/website/docs/cdktf/python/d/organizations_delegated_administrators.html.markdown new file mode 100644 index 00000000000..13b266a3c43 --- /dev/null +++ b/website/docs/cdktf/python/d/organizations_delegated_administrators.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_delegated_administrators" +description: |- + Get a list of AWS accounts that are designated as delegated administrators in this organization +--- + + + +# Data Source: aws_organizations_delegated_administrators + +Get a list of AWS accounts that are designated as delegated administrators in this organization + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_organizations_delegated_administrators import DataAwsOrganizationsDelegatedAdministrators +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOrganizationsDelegatedAdministrators(self, "example", + service_principal="SERVICE PRINCIPAL" + ) +``` + +## Argument Reference + +* `service_principal` - (Optional) Specifies a service principal name. If specified, then the operation lists the delegated administrators only for the specified service. If you don't specify a service principal, the operation lists all delegated administrators for all services in your organization. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `delegated_administrators` - The list of delegated administrators in your organization, which have the following attributes: + * `arn` - The ARN of the delegated administrator's account. + * `delegation_enabled_date` - The date when the account was made a delegated administrator. + * `email` - The email address that is associated with the delegated administrator's AWS account. + * `id` - The unique identifier (ID) of the delegated administrator's account. + * `joined_method` - The method by which the delegated administrator's account joined the organization. + * `joined_timestamp` - The date when the delegated administrator's account became a part of the organization. + * `name` - The friendly name of the delegated administrator's account. + * `status` - The status of the delegated administrator's account in the organization. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/organizations_delegated_services.html.markdown b/website/docs/cdktf/python/d/organizations_delegated_services.html.markdown new file mode 100644 index 00000000000..fa2fe5f936a --- /dev/null +++ b/website/docs/cdktf/python/d/organizations_delegated_services.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_delegated_services" +description: |- + Get a list the AWS services for which the specified account is a delegated administrator +--- + + + +# Data Source: aws_organizations_delegated_services + +Get a list the AWS services for which the specified account is a delegated administrator + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_organizations_delegated_services import DataAwsOrganizationsDelegatedServices +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOrganizationsDelegatedServices(self, "example", + account_id="AWS ACCOUNT ID" + ) +``` + +## Argument Reference + +* `account_id` - (Required) Account ID number of a delegated administrator account in the organization. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `delegated_services` - Services for which the account is a delegated administrator, which have the following attributes: + * `delegation_enabled_date` - The date that the account became a delegated administrator for this service. + * `service_principal` - The name of an AWS service that can request an operation for the specified service. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/organizations_organization.html.markdown b/website/docs/cdktf/python/d/organizations_organization.html.markdown new file mode 100644 index 00000000000..88a7b70e722 --- /dev/null +++ b/website/docs/cdktf/python/d/organizations_organization.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organization" +description: |- + Get information about the organization that the user's account belongs to +--- + + + +# Data Source: aws_organizations_organization + +Get information about the organization that the user's account belongs to + +## Example Usage + +### List all account IDs for the organization + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_organizations_organization import DataAwsOrganizationsOrganization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsOrganizationsOrganization(self, "example") + TerraformOutput(self, "account_ids", + value=property_access(example.accounts, ["*", "id"]) + ) +``` + +### SNS topic that can be interacted by the organization only + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_organizations_organization import DataAwsOrganizationsOrganization +from imports.aws.sns_topic import SnsTopic +from imports.aws.sns_topic_policy import SnsTopicPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + sns_topic = SnsTopic(self, "sns_topic", + name="my-sns-topic" + ) + example = DataAwsOrganizationsOrganization(self, "example") + sns_topic_policy = DataAwsIamPolicyDocument(self, "sns_topic_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["SNS:Subscribe", "SNS:Publish"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=[Token.as_string(example.id)], + variable="aws:PrincipalOrgID" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=[sns_topic.arn] + ) + ] + ) + aws_sns_topic_policy_sns_topic_policy = SnsTopicPolicy(self, "sns_topic_policy_3", + arn=sns_topic.arn, + policy=Token.as_string(sns_topic_policy.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_policy_sns_topic_policy.override_logical_id("sns_topic_policy") +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the organization. +* `feature_set` - FeatureSet of the organization. +* `id` - ID of the organization. +* `master_account_arn` - ARN of the account that is designated as the master account for the organization. +* `master_account_email` - The email address that is associated with the AWS account that is designated as the master account for the organization. +* `master_account_id` - Unique identifier (ID) of the master account of an organization. + +### Master Account or Delegated Administrator Attribute Reference + +If the account is the master account or a delegated administrator for the organization, the following attributes are also exported: + +* `accounts` - List of organization accounts including the master account. For a list excluding the master account, see the `non_master_accounts` attribute. All elements have these attributes: + * `arn` - ARN of the account + * `email` - Email of the account + * `id` - Identifier of the account + * `name` - Name of the account + * `status` - Status of the account +* `aws_service_access_principals` - A list of AWS service principal names that have integration enabled with your organization. Organization must have `feature_set` set to `ALL`. For additional information, see the [AWS Organizations User Guide](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_integrate_services.html). +* `enabled_policy_types` - A list of Organizations policy types that are enabled in the Organization Root. Organization must have `feature_set` set to `ALL`. For additional information about valid policy types (e.g., `SERVICE_CONTROL_POLICY`), see the [AWS Organizations API Reference](https://docs.aws.amazon.com/organizations/latest/APIReference/API_EnablePolicyType.html). +* `non_master_accounts` - List of organization accounts excluding the master account. For a list including the master account, see the `accounts` attribute. All elements have these attributes: + * `arn` - ARN of the account + * `email` - Email of the account + * `id` - Identifier of the account + * `name` - Name of the account + * `status` - Status of the account +* `roots` - List of organization roots. All elements have these attributes: + * `arn` - ARN of the root + * `id` - Identifier of the root + * `name` - Name of the root + * `policy_types` - List of policy types enabled for this root. All elements have these attributes: + * `name` - The name of the policy type + * `status` - The status of the policy type as it relates to the associated root + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/organizations_organizational_unit_child_accounts.html.markdown b/website/docs/cdktf/python/d/organizations_organizational_unit_child_accounts.html.markdown new file mode 100644 index 00000000000..2ef803bee1e --- /dev/null +++ b/website/docs/cdktf/python/d/organizations_organizational_unit_child_accounts.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organizational_unit_child_accounts" +description: |- + Get all direct child accounts under a parent organizational unit. This only provides immediate children, not all children. +--- + + + +# Data Source: aws_organizations_organizational_unit_child_accounts + +Get all direct child accounts under a parent organizational unit. This only provides immediate children, not all children. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_organizations_organization import DataAwsOrganizationsOrganization +from imports.aws.data_aws_organizations_organizational_unit_child_accounts import DataAwsOrganizationsOrganizationalUnitChildAccounts +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + org = DataAwsOrganizationsOrganization(self, "org") + DataAwsOrganizationsOrganizationalUnitChildAccounts(self, "accounts", + parent_id=Token.as_string(property_access(org.roots, ["0", "id"])) + ) +``` + +## Argument Reference + +* `parent_id` - (Required) The parent ID of the accounts. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accounts` - List of child accounts, which have the following attributes: + * `arn` - The Amazon Resource Name (ARN) of the account. + * `email` - The email address associated with the AWS account. + * `id` - The unique identifier (ID) of the account. + * `name` - The friendly name of the account. + * `status` - The status of the account in the organization. +* `id` - Parent identifier of the organizational units. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/organizations_organizational_unit_descendant_accounts.html.markdown b/website/docs/cdktf/python/d/organizations_organizational_unit_descendant_accounts.html.markdown new file mode 100644 index 00000000000..e0371a566ba --- /dev/null +++ b/website/docs/cdktf/python/d/organizations_organizational_unit_descendant_accounts.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organizational_unit_descendant_accounts" +description: |- + Get all child accounts under a parent organizational unit. This provides all children. +--- + + + +# Data Source: aws_organizations_organizational_unit_descendant_accounts + +Get all direct child accounts under a parent organizational unit. This provides all children. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_organizations_organization import DataAwsOrganizationsOrganization +from imports.aws.data_aws_organizations_organizational_unit_descendant_accounts import DataAwsOrganizationsOrganizationalUnitDescendantAccounts +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + org = DataAwsOrganizationsOrganization(self, "org") + DataAwsOrganizationsOrganizationalUnitDescendantAccounts(self, "accounts", + parent_id=Token.as_string(property_access(org.roots, ["0", "id"])) + ) +``` + +## Argument Reference + +* `parent_id` - (Required) The parent ID of the accounts. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accounts` - List of child accounts, which have the following attributes: + * `arn` - The Amazon Resource Name (ARN) of the account. + * `email` - The email address associated with the AWS account. + * `id` - The unique identifier (ID) of the account. + * `name` - The friendly name of the account. + * `status` - The status of the account in the organization. +* `id` - Parent identifier of the organizational units. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/organizations_organizational_units.html.markdown b/website/docs/cdktf/python/d/organizations_organizational_units.html.markdown new file mode 100644 index 00000000000..59b3a83c7e5 --- /dev/null +++ b/website/docs/cdktf/python/d/organizations_organizational_units.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organizational_units" +description: |- + Get all direct child organizational units under a parent organizational unit. This only provides immediate children, not all children +--- + + + +# Data Source: aws_organizations_organizational_units + +Get all direct child organizational units under a parent organizational unit. This only provides immediate children, not all children. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_organizations_organization import DataAwsOrganizationsOrganization +from imports.aws.data_aws_organizations_organizational_units import DataAwsOrganizationsOrganizationalUnits +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + org = DataAwsOrganizationsOrganization(self, "org") + DataAwsOrganizationsOrganizationalUnits(self, "ou", + parent_id=Token.as_string(property_access(org.roots, ["0", "id"])) + ) +``` + +## Argument Reference + +* `parent_id` - (Required) Parent ID of the organizational unit. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `children` - List of child organizational units, which have the following attributes: + * `arn` - ARN of the organizational unit + * `name` - Name of the organizational unit + * `id` - ID of the organizational unit +* `id` - Parent identifier of the organizational units. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/organizations_policies.html.markdown b/website/docs/cdktf/python/d/organizations_policies.html.markdown new file mode 100644 index 00000000000..4d063339473 --- /dev/null +++ b/website/docs/cdktf/python/d/organizations_policies.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_policies" +description: |- + Terraform data source for managing an AWS Organizations Policies. +--- + + + +# Data Source: aws_organizations_policies + +Terraform data source for managing an AWS Organizations Policies. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformIterator, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_organizations_policies import DataAwsOrganizationsPolicies +from imports.aws.data_aws_organizations_policy import DataAwsOrganizationsPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsOrganizationsPolicies(self, "example", + filter="SERVICE_CONTROL_POLICY" + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_for_each_iterator = TerraformIterator.from_list( + Token.as_any(Fn.toset(example.ids))) + data_aws_organizations_policy_example = DataAwsOrganizationsPolicy(self, "example_1", + policy_id=Token.as_string(example_for_each_iterator.value), + for_each=example_for_each_iterator + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_organizations_policy_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `filter` - (Required) The type of policies to be returned in the response. Valid values are `SERVICE_CONTROL_POLICY | TAG_POLICY | BACKUP_POLICY | AISERVICES_OPT_OUT_POLICY` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - List of all the policy ids found. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/organizations_policies_for_target.html.markdown b/website/docs/cdktf/python/d/organizations_policies_for_target.html.markdown new file mode 100644 index 00000000000..7cedaf7dfc5 --- /dev/null +++ b/website/docs/cdktf/python/d/organizations_policies_for_target.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_policies_for_target" +description: |- + Terraform data source for managing an AWS Organizations Policies For Target. +--- + + + +# Data Source: aws_organizations_policies_for_target + +Terraform data source for managing an AWS Organizations Policies For Target. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, Fn, TerraformIterator, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_organizations_organization import DataAwsOrganizationsOrganization +from imports.aws.data_aws_organizations_policies_for_target import DataAwsOrganizationsPoliciesForTarget +from imports.aws.data_aws_organizations_policy import DataAwsOrganizationsPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsOrganizationsOrganization(self, "example") + data_aws_organizations_policies_for_target_example = + DataAwsOrganizationsPoliciesForTarget(self, "example_1", + filter="SERVICE_CONTROL_POLICY", + target_id=Token.as_string(property_access(example.roots, ["0", "id"])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_organizations_policies_for_target_example.override_logical_id("example") + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_for_each_iterator = TerraformIterator.from_list( + Token.as_any(Fn.toset(data_aws_organizations_policies_for_target_example.ids))) + data_aws_organizations_policy_example = DataAwsOrganizationsPolicy(self, "example_2", + policy_id=Token.as_string(example_for_each_iterator.value), + for_each=example_for_each_iterator + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_organizations_policy_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `target_id` - (Required) The root (string that begins with "r-" followed by 4-32 lowercase letters or digits), account (12 digit string), or Organizational Unit (string starting with "ou-" followed by 4-32 lowercase letters or digits. This string is followed by a second "-" dash and from 8-32 additional lowercase letters or digits.) +* `filter` - (Required) Must supply one of the 4 different policy filters for a target (SERVICE_CONTROL_POLICY | TAG_POLICY | BACKUP_POLICY | AISERVICES_OPT_OUT_POLICY) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - List of all the policy ids found. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/organizations_policy.html.markdown b/website/docs/cdktf/python/d/organizations_policy.html.markdown new file mode 100644 index 00000000000..03eabb399a3 --- /dev/null +++ b/website/docs/cdktf/python/d/organizations_policy.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_policy" +description: |- + Terraform data source for managing an AWS Organizations Policy. +--- + + + +# Data Source: aws_organizations_policy + +Terraform data source for managing an AWS Organizations Policy. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws. import DataAwsOrganizationalPolicies +from imports.aws.data_aws_organizations_organization import DataAwsOrganizationsOrganization +from imports.aws.data_aws_organizations_policies_for_target import DataAwsOrganizationsPoliciesForTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOrganizationalPolicies(self, "test", + policy_id=property_access(current.policies, ["0", "id"]) + ) + data_aws_organizations_organization_current = + DataAwsOrganizationsOrganization(self, "current") + data_aws_organizations_policies_for_target_current = + DataAwsOrganizationsPoliciesForTarget(self, "current_2", + filter="SERVICE_CONTROL_POLICY", + target_id=Token.as_string( + property_access(data_aws_organizations_organization_current.roots, ["0", "id" + ])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_organizations_policies_for_target_current.override_logical_id("current") +``` + +## Argument Reference + +The following arguments are required: + +* `policy_id` - (Required) The unique identifier (ID) of the policy that you want more details on. Policy id starts with a "p-" followed by 8-28 lowercase or uppercase letters, digits, and underscores. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of the policy. +* `aws_managed` - Indicates if a policy is an AWS managed policy. +* `content` - The text content of the policy. +* `description` - The description of the policy. +* `name` - The friendly name of the policy. +* `type` - The type of policy values can be `SERVICE_CONTROL_POLICY | TAG_POLICY | BACKUP_POLICY | AISERVICES_OPT_OUT_POLICY` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/organizations_resource_tags.html.markdown b/website/docs/cdktf/python/d/organizations_resource_tags.html.markdown new file mode 100644 index 00000000000..256b726eb14 --- /dev/null +++ b/website/docs/cdktf/python/d/organizations_resource_tags.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_resource_tags" +description: |- + Get tags attached to the specified AWS Organizations resource. +--- + + + +# Data Source: aws_organizations_resource_tags + +Get tags attached to the specified AWS Organizations resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_organizations_resource_tags import DataAwsOrganizationsResourceTags +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOrganizationsResourceTags(self, "account", + resource_id="123456123846" + ) +``` + +## Argument Reference + +* `resource_id` - (Required) ID of the resource with the tags to list. See details below. + +### resource_id + +You can specify any of the following taggable resources. + +* AWS account – specify the account ID number. +* Organizational unit – specify the OU ID that begins with `ou-` and looks similar to: `ou-1a2b-34uvwxyz` +* Root – specify the root ID that begins with `r-` and looks similar to: `r-1a2b` +* Policy – specify the policy ID that begins with `p-` and looks similar to: `p-12abcdefg3` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `tags` - Map of key=value pairs for each tag set on the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/outposts_asset.html.markdown b/website/docs/cdktf/python/d/outposts_asset.html.markdown new file mode 100644 index 00000000000..ff01c7370a1 --- /dev/null +++ b/website/docs/cdktf/python/d/outposts_asset.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_asset" +description: |- + Information about hardware assets in an Outpost. +--- + + + +# Data Source: aws_outposts_asset + +Information about a specific hardware asset in an Outpost. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformCount, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_outposts_asset import DataAwsOutpostsAsset +from imports.aws.data_aws_outposts_assets import DataAwsOutpostsAssets +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsOutpostsAssets(self, "example", + arn=Token.as_string(data_aws_outposts_outpost_example.arn) + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_count = TerraformCount.of( + Token.as_number(Fn.length_of(example.asset_ids))) + data_aws_outposts_asset_example = DataAwsOutpostsAsset(self, "example_1", + arn=Token.as_string(data_aws_outposts_outpost_example.arn), + asset_id=Token.as_string( + Fn.element(this_var.asset_ids, Token.as_number(example_count.index))), + count=example_count + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_outposts_asset_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) Outpost ARN. +* `asset_id` - (Required) ID of the asset. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `asset_type` - Type of the asset. +* `host_id` - Host ID of the Dedicated Hosts on the asset, if a Dedicated Host is provisioned. +* `rack_elevation` - Position of an asset in a rack measured in rack units. +* `rack_id` - Rack ID of the asset. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/outposts_assets.html.markdown b/website/docs/cdktf/python/d/outposts_assets.html.markdown new file mode 100644 index 00000000000..bd37ad0fa6f --- /dev/null +++ b/website/docs/cdktf/python/d/outposts_assets.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_assets" +description: |- + Information about hardware assets in an Outpost. +--- + + + +# Data Source: aws_outposts_assets + +Information about hardware assets in an Outpost. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_outposts_assets import DataAwsOutpostsAssets +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOutpostsAssets(self, "example", + arn=Token.as_string(data_aws_outposts_outpost_example.arn) + ) +``` + +### With Host ID Filter + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_outposts_assets import DataAwsOutpostsAssets +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOutpostsAssets(self, "example", + arn=Token.as_string(data_aws_outposts_outpost_example.arn), + host_id_filter=["h-x38g5n0yd2a0ueb61"] + ) +``` + +### With Status ID Filter + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_outposts_assets import DataAwsOutpostsAssets +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOutpostsAssets(self, "example", + arn=Token.as_string(data_aws_outposts_outpost_example.arn), + status_id_filter=["ACTIVE"] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Required) Outpost ARN. +* `host_id_filter` - (Optional) Filters by list of Host IDs of a Dedicated Host. +* `status_id_filter` - (Optional) Filters by list of state status. Valid values: "ACTIVE", "RETIRING". + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `asset_ids` - List of all the asset ids found. This data source will fail if none are found. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/outposts_outpost.html.markdown b/website/docs/cdktf/python/d/outposts_outpost.html.markdown new file mode 100644 index 00000000000..00143dfbf07 --- /dev/null +++ b/website/docs/cdktf/python/d/outposts_outpost.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_outpost" +description: |- + Provides details about an Outposts Outpost +--- + + + +# Data Source: aws_outposts_outpost + +Provides details about an Outposts Outpost. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_outposts_outpost import DataAwsOutpostsOutpost +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOutpostsOutpost(self, "example", + name="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` - (Optional) Identifier of the Outpost. +* `name` - (Optional) Name of the Outpost. +* `arn` - (Optional) ARN. +* `owner_id` - (Optional) AWS Account identifier of the Outpost owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `availability_zone` - Availability Zone name. +* `availability_zone_id` - Availability Zone identifier. +* `description` - The description of the Outpost. +* `lifecycle_status` - The life cycle status. +* `site_arn` - The Amazon Resource Name (ARN) of the site. +* `site_id` - The ID of the site. +* `supported_hardware_type` - The hardware type. +* `tags` - The Outpost tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/outposts_outpost_instance_type.html.markdown b/website/docs/cdktf/python/d/outposts_outpost_instance_type.html.markdown new file mode 100644 index 00000000000..52d01a1b42a --- /dev/null +++ b/website/docs/cdktf/python/d/outposts_outpost_instance_type.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_outpost_instance_type" +description: |- + Information about single Outpost Instance Type. +--- + + + +# Data Source: aws_outposts_outpost_instance_type + +Information about single Outpost Instance Type. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws. import Ec2Instance +from imports.aws.data_aws_outposts_outpost_instance_type import DataAwsOutpostsOutpostInstanceType +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsOutpostsOutpostInstanceType(self, "example", + arn=Token.as_string(data_aws_outposts_outpost_example.arn), + preferred_instance_types=["m5.large", "m5.4xlarge"] + ) + aws_ec2_instance_example = Ec2Instance(self, "example_1", + instance_type=example.instance_type + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ec2_instance_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) Outpost ARN. + +The following arguments are optional: + +* `instance_type` - (Optional) Desired instance type. Conflicts with `preferred_instance_types`. +* `preferred_instance_types` - (Optional) Ordered list of preferred instance types. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. Conflicts with `instance_type`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Outpost identifier. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/outposts_outpost_instance_types.html.markdown b/website/docs/cdktf/python/d/outposts_outpost_instance_types.html.markdown new file mode 100644 index 00000000000..5d1e048bfd9 --- /dev/null +++ b/website/docs/cdktf/python/d/outposts_outpost_instance_types.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_outpost_instance_types" +description: |- + Information about Outpost Instance Types. +--- + + + +# Data Source: aws_outposts_outpost_instance_types + +Information about Outposts Instance Types. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_outposts_outpost_instance_types import DataAwsOutpostsOutpostInstanceTypes +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOutpostsOutpostInstanceTypes(self, "example", + arn=Token.as_string(data_aws_outposts_outpost_example.arn) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) Outpost ARN. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `instance_types` - Set of instance types. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/outposts_outposts.html.markdown b/website/docs/cdktf/python/d/outposts_outposts.html.markdown new file mode 100644 index 00000000000..0d3f2e64336 --- /dev/null +++ b/website/docs/cdktf/python/d/outposts_outposts.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_outposts" +description: |- + Provides details about multiple Outposts +--- + + + +# Data Source: aws_outposts_outposts + +Provides details about multiple Outposts. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_outposts_outposts import DataAwsOutpostsOutposts +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOutpostsOutposts(self, "example", + site_id=id + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `availability_zone` - (Optional) Availability Zone name. +* `availability_zone_id` - (Optional) Availability Zone identifier. +* `site_id` - (Optional) Site identifier. +* `owner_id` - (Optional) AWS Account identifier of the Outpost owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of Amazon Resource Names (ARNs). +* `id` - AWS Region. +* `ids` - Set of identifiers. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/outposts_site.html.markdown b/website/docs/cdktf/python/d/outposts_site.html.markdown new file mode 100644 index 00000000000..fe12de19a2c --- /dev/null +++ b/website/docs/cdktf/python/d/outposts_site.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_site" +description: |- + Provides details about an Outposts Site +--- + + + +# Data Source: aws_outposts_site + +Provides details about an Outposts Site. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_outposts_site import DataAwsOutpostsSite +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOutpostsSite(self, "example", + name="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` - (Optional) Identifier of the Site. +* `name` - (Optional) Name of the Site. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `account_id` - AWS Account identifier. +* `description` - Description. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/outposts_sites.html.markdown b/website/docs/cdktf/python/d/outposts_sites.html.markdown new file mode 100644 index 00000000000..ccd55d17162 --- /dev/null +++ b/website/docs/cdktf/python/d/outposts_sites.html.markdown @@ -0,0 +1,43 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_sites" +description: |- + Provides details about multiple Outposts Sites. +--- + + + +# Data Source: aws_outposts_sites + +Provides details about multiple Outposts Sites. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_outposts_sites import DataAwsOutpostsSites +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsOutpostsSites(self, "all") +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - Set of Outposts Site identifiers. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/partition.html.markdown b/website/docs/cdktf/python/d/partition.html.markdown new file mode 100644 index 00000000000..c46c4cf1740 --- /dev/null +++ b/website/docs/cdktf/python/d/partition.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_partition" +description: |- + Get AWS partition identifier +--- + + + +# Data Source: aws_partition + +Use this data source to lookup information about the current AWS partition in +which Terraform is working. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_partition import DataAwsPartition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsPartition(self, "current") + DataAwsIamPolicyDocument(self, "s3_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:ListBucket"], + resources=["arn:${" + current.partition + "}:s3:::my-bucket"], + sid="1" + ) + ] + ) +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `dns_suffix` - Base DNS domain name for the current partition (e.g., `amazonaws.com` in AWS Commercial, `amazonaws.com.cn` in AWS China). +* `id` - Identifier of the current partition (e.g., `aws` in AWS Commercial, `aws-cn` in AWS China). +* `partition` - Identifier of the current partition (e.g., `aws` in AWS Commercial, `aws-cn` in AWS China). +* `reverse_dns_prefix` - Prefix of service names (e.g., `com.amazonaws` in AWS Commercial, `cn.com.amazonaws` in AWS China). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/prefix_list.html.markdown b/website/docs/cdktf/python/d/prefix_list.html.markdown new file mode 100644 index 00000000000..5ee35ebae68 --- /dev/null +++ b/website/docs/cdktf/python/d/prefix_list.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_prefix_list" +description: |- + Provides details about a specific prefix list +--- + + + +# Data Source: aws_prefix_list + +`aws_prefix_list` provides details about a specific AWS prefix list (PL) +in the current region. + +This can be used both to validate a prefix list given in a variable +and to obtain the CIDR blocks (IP address ranges) for the associated +AWS service. The latter may be useful e.g., for adding network ACL +rules. + +The [aws_ec2_managed_prefix_list](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/ec2_managed_prefix_list) data source is normally more appropriate to use given it can return customer-managed prefix list info, as well as additional attributes. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_prefix_list import DataAwsPrefixList +from imports.aws.network_acl import NetworkAcl +from imports.aws.network_acl_rule import NetworkAclRule +from imports.aws.vpc_endpoint import VpcEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bar = NetworkAcl(self, "bar", + vpc_id=foo.id + ) + private_s3 = VpcEndpoint(self, "private_s3", + service_name="com.amazonaws.us-west-2.s3", + vpc_id=foo.id + ) + data_aws_prefix_list_private_s3 = DataAwsPrefixList(self, "private_s3_2", + prefix_list_id=private_s3.prefix_list_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_prefix_list_private_s3.override_logical_id("private_s3") + aws_network_acl_rule_private_s3 = NetworkAclRule(self, "private_s3_3", + cidr_block=Token.as_string( + property_access(data_aws_prefix_list_private_s3.cidr_blocks, ["0"])), + egress=False, + from_port=443, + network_acl_id=bar.id, + protocol="tcp", + rule_action="allow", + rule_number=200, + to_port=443 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_network_acl_rule_private_s3.override_logical_id("private_s3") +``` + +### Filter + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_prefix_list import DataAwsPrefixList +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsPrefixList(self, "test", + filter=[DataAwsPrefixListFilter( + name="prefix-list-id", + values=["pl-68a54001"] + ) + ] + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +prefix lists. The given filters must match exactly one prefix list +whose data will be exported as attributes. + +* `prefix_list_id` - (Optional) ID of the prefix list to select. +* `name` - (Optional) Name of the prefix list to select. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribePrefixLists API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribePrefixLists.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the selected prefix list. +* `name` - Name of the selected prefix list. +* `cidr_blocks` - List of CIDR blocks for the AWS service associated with the prefix list. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/pricing_product.html.markdown b/website/docs/cdktf/python/d/pricing_product.html.markdown new file mode 100644 index 00000000000..06026c0c467 --- /dev/null +++ b/website/docs/cdktf/python/d/pricing_product.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "Pricing Calculator" +layout: "aws" +page_title: "AWS: aws_pricing_product" +description: |- + Get information regarding the pricing of an Amazon product +--- + + + +# Data Source: aws_pricing_product + +Use this data source to get the pricing information of all products in AWS. +This data source is only available in a us-east-1 or ap-south-1 provider. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_pricing_product import DataAwsPricingProduct +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsPricingProduct(self, "example", + filters=[DataAwsPricingProductFilters( + field="instanceType", + value="c5.xlarge" + ), DataAwsPricingProductFilters( + field="operatingSystem", + value="Linux" + ), DataAwsPricingProductFilters( + field="location", + value="US East (N. Virginia)" + ), DataAwsPricingProductFilters( + field="preInstalledSw", + value="NA" + ), DataAwsPricingProductFilters( + field="licenseModel", + value="No License required" + ), DataAwsPricingProductFilters( + field="tenancy", + value="Shared" + ), DataAwsPricingProductFilters( + field="capacitystatus", + value="Used" + ) + ], + service_code="AmazonEC2" + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_pricing_product import DataAwsPricingProduct +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsPricingProduct(self, "example", + filters=[DataAwsPricingProductFilters( + field="instanceType", + value="ds1.xlarge" + ), DataAwsPricingProductFilters( + field="location", + value="US East (N. Virginia)" + ) + ], + service_code="AmazonRedshift" + ) +``` + +## Argument Reference + +* `service_code` - (Required) Code of the service. Available service codes can be fetched using the DescribeServices pricing API call. +* `filters` - (Required) List of filters. Passed directly to the API (see GetProducts API reference). These filters must describe a single product, this resource will fail if more than one product is returned by the API. + +### filters + +* `field` (Required) Product attribute name that you want to filter on. +* `value` (Required) Product attribute value that you want to filter on. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `result` - Set to the product returned from the API. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/prometheus_workspace.html.markdown b/website/docs/cdktf/python/d/prometheus_workspace.html.markdown new file mode 100644 index 00000000000..a09d399a51b --- /dev/null +++ b/website/docs/cdktf/python/d/prometheus_workspace.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "AMP (Managed Prometheus)" +layout: "aws" +page_title: "AWS: aws_prometheus_workspace" +description: |- + Gets information on an Amazon Managed Prometheus workspace. +--- + + + +# Data Source: aws_prometheus_workspace + +Provides an Amazon Managed Prometheus workspace data source. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_prometheus_workspace import DataAwsPrometheusWorkspace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsPrometheusWorkspace(self, "example", + workspace_id="ws-41det8a1-2c67-6a1a-9381-9b83d3d78ef7" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `workspace_id` - (Required) Prometheus workspace ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Prometheus workspace. +* `created_date` - Creation date of the Prometheus workspace. +* `prometheus_endpoint` - Endpoint of the Prometheus workspace. +* `alias` - Prometheus workspace alias. +* `status` - Status of the Prometheus workspace. +* `tags` - Tags assigned to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/prometheus_workspaces.html.markdown b/website/docs/cdktf/python/d/prometheus_workspaces.html.markdown new file mode 100644 index 00000000000..58b99379830 --- /dev/null +++ b/website/docs/cdktf/python/d/prometheus_workspaces.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "AMP (Managed Prometheus)" +layout: "aws" +page_title: "AWS: aws_prometheus_workspaces" +description: |- + Gets the aliases, ARNs, and workspace IDs of Amazon Prometheus workspaces. +--- + + + +# Data Source: aws_prometheus_workspaces + +Provides the aliases, ARNs, and workspace IDs of Amazon Prometheus workspaces. + +## Example Usage + +The following example returns all of the workspaces in a region: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_prometheus_workspaces import DataAwsPrometheusWorkspaces +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsPrometheusWorkspaces(self, "example") +``` + +The following example filters the workspaces by alias. Only the workspaces with +aliases that begin with the value of `alias_prefix` will be returned: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_prometheus_workspaces import DataAwsPrometheusWorkspaces +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsPrometheusWorkspaces(self, "example", + alias_prefix="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `alias_prefix` - (Optional) Limits results to workspaces with aliases that begin with this value. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `aliases` - List of aliases of the matched Prometheus workspaces. +* `arns` - List of ARNs of the matched Prometheus workspaces. +* `workspace_ids` - List of workspace IDs of the matched Prometheus workspaces. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/qldb_ledger.html.markdown b/website/docs/cdktf/python/d/qldb_ledger.html.markdown new file mode 100644 index 00000000000..6cd0e855321 --- /dev/null +++ b/website/docs/cdktf/python/d/qldb_ledger.html.markdown @@ -0,0 +1,43 @@ +--- +subcategory: "QLDB (Quantum Ledger Database)" +layout: "aws" +page_title: "AWS: aws_qldb_ledger" +description: |- + Get information on a Amazon Quantum Ledger Database (QLDB) +--- + + + +# Data Source: aws_qldb_ledger + +Use this data source to fetch information about a Quantum Ledger Database. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_qldb_ledger import DataAwsQldbLedger +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsQldbLedger(self, "example", + name="an_example_ledger" + ) +``` + +## Argument Reference + +* `name` - (Required) Friendly name of the ledger to match. + +## Attribute Reference + +See the [QLDB Ledger Resource](/docs/providers/aws/r/qldb_ledger.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/quicksight_data_set.html.markdown b/website/docs/cdktf/python/d/quicksight_data_set.html.markdown new file mode 100644 index 00000000000..bf3b4bb7466 --- /dev/null +++ b/website/docs/cdktf/python/d/quicksight_data_set.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_data_set" +description: |- + Use this data source to fetch information about a QuickSight Data Set. +--- + + + +# Data Source: aws_quicksight_data_set + +Data source for managing a QuickSight Data Set. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_quicksight_data_set import DataAwsQuicksightDataSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsQuicksightDataSet(self, "example", + data_set_id="example-id" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `data_set_id` - (Required) Identifier for the data set. + +The following arguments are optional: + +* `aws_account_id` - (Optional) AWS account ID. + +## Attribute Reference + +See the [Data Set Resource](/docs/providers/aws/r/quicksight_data_set.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/quicksight_group.html.markdown b/website/docs/cdktf/python/d/quicksight_group.html.markdown new file mode 100644 index 00000000000..91292936179 --- /dev/null +++ b/website/docs/cdktf/python/d/quicksight_group.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_group" +description: |- + Use this data source to fetch information about a QuickSight Group. +--- + + + +# Data Source: aws_quicksight_group + +This data source can be used to fetch information about a specific +QuickSight group. By using this data source, you can reference QuickSight group +properties without having to hard code ARNs or unique IDs as input. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_quicksight_group import DataAwsQuicksightGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsQuicksightGroup(self, "example", + group_name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `group_name` - (Required) The name of the group that you want to match. + +The following arguments are optional: + +* `aws_account_id` - (Optional) AWS account ID. +* `namespace` - (Optional) QuickSight namespace. Defaults to `default`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) for the group. +* `description` - The group description. +* `principal_id` - The principal ID of the group. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/quicksight_theme.html.markdown b/website/docs/cdktf/python/d/quicksight_theme.html.markdown new file mode 100644 index 00000000000..edccd779370 --- /dev/null +++ b/website/docs/cdktf/python/d/quicksight_theme.html.markdown @@ -0,0 +1,134 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_theme" +description: |- + Use this data source to fetch information about a QuickSight Theme. +--- + + + +# Data Source: aws_quicksight_theme + +Terraform data source for managing an AWS QuickSight Theme. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_quicksight_theme import DataAwsQuicksightTheme +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsQuicksightTheme(self, "example", + theme_id="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `theme_id` - Identifier of the theme. + +The following arguments are optional: + +* `aws_account_id` - AWS account ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the theme. +* `base_theme_id` - The ID of the theme that a custom theme will inherit from. All themes inherit from one of the starting themes defined by Amazon QuickSight. +* `configuration` - The theme configuration, which contains the theme display properties. See [configuration](#configuration). +* `created_time` - The time that the theme was created. +* `id` - A comma-delimited string joining AWS account ID and theme ID. +* `last_updated_time` - The time that the theme was last updated. +* `name` - Display name of the theme. +* `permissions` - A set of resource permissions on the theme. See [permissions](#permissions). +* `status` - The theme creation status. +* `tags` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `version_description` - A description of the current theme version being created/updated. +* `version_number` - The version number of the theme version. + +### permissions + +* `actions` - List of IAM actions to grant or revoke permissions on. +* `principal` - ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### configuration + +* `data_color_palette` - Color properties that apply to chart data colors. See [data_color_palette](#data_color_palette). +* `sheet` - Display options related to sheets. See [sheet](#sheet). +* `typography` - Determines the typography options. See [typography](#typography). +* `ui_color_palette` - Color properties that apply to the UI and to charts, excluding the colors that apply to data. See [ui_color_palette](#ui_color_palette). + +### data_color_palette + +* `colors` - List of hexadecimal codes for the colors. Minimum of 8 items and maximum of 20 items. +* `empty_fill_color` - The hexadecimal code of a color that applies to charts where a lack of data is highlighted. +* `min_max_gradient` - The minimum and maximum hexadecimal codes that describe a color gradient. List of exactly 2 items. + +### sheet + +* `tile` - The display options for tiles. See [tile](#tile). +* `tile_layout` - The layout options for tiles. See [tile_layout](#tile_layout). + +### tile + +* `border` - The border around a tile. See [border](#border). + +### border + +* `show` - The option to enable display of borders for visuals. + +### tile_layout + +* `gutter` - The gutter settings that apply between tiles. See [gutter](#gutter). +* `margin` - The margin settings that apply around the outside edge of sheets. See [margin](#margin). + +### gutter + +* `show` - This Boolean value controls whether to display a gutter space between sheet tiles. + +### margin + +* `show` - This Boolean value controls whether to display sheet margins. + +### typography + +* `font_families` - Determines the list of font families. Maximum number of 5 items. See [font_families](#font_families). + +### font_families + +* `font_family` - Font family name. + +### ui_color_palette + +* `accent` - Color (hexadecimal) that applies to selected states and buttons. +* `accent_foreground` - Color (hexadecimal) that applies to any text or other elements that appear over the accent color. +* `danger` - Color (hexadecimal) that applies to error messages. +* `danger_foreground` - Color (hexadecimal) that applies to any text or other elements that appear over the error color. +* `dimension` - Color (hexadecimal) that applies to the names of fields that are identified as dimensions. +* `dimension_foreground` - Color (hexadecimal) that applies to any text or other elements that appear over the dimension color. +* `measure` - Color (hexadecimal) that applies to the names of fields that are identified as measures. +* `measure_foreground` - Color (hexadecimal) that applies to any text or other elements that appear over the measure color. +* `primary_background` - Color (hexadecimal) that applies to visuals and other high emphasis UI. +* `primary_foreground` - Color (hexadecimal) of text and other foreground elements that appear over the primary background regions, such as grid lines, borders, table banding, icons, and so on. +* `secondary_background` - Color (hexadecimal) that applies to the sheet background and sheet controls. +* `secondary_foreground` - Color (hexadecimal) that applies to any sheet title, sheet control text, or UI that appears over the secondary background. +* `success` - Color (hexadecimal) that applies to success messages, for example the check mark for a successful download. +* `success_foreground` - Color (hexadecimal) that applies to any text or other elements that appear over the success color. +* `warning` - Color (hexadecimal) that applies to warning and informational messages. +* `warning_foreground` - Color (hexadecimal) that applies to any text or other elements that appear over the warning color. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/quicksight_user.html.markdown b/website/docs/cdktf/python/d/quicksight_user.html.markdown new file mode 100644 index 00000000000..abcf91d728a --- /dev/null +++ b/website/docs/cdktf/python/d/quicksight_user.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_user" +description: |- + Use this data source to fetch information about a QuickSight User. +--- + + + +# Data Source: aws_quicksight_user + +This data source can be used to fetch information about a specific +QuickSight user. By using this data source, you can reference QuickSight user +properties without having to hard code ARNs or unique IDs as input. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_quicksight_user import DataAwsQuicksightUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsQuicksightUser(self, "example", + user_name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `user_name` - (Required) The name of the user that you want to match. + +The following arguments are optional: + +* `aws_account_id` - (Optional) AWS account ID. +* `namespace` - (Optional) QuickSight namespace. Defaults to `default`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `active` - The active status of user. When you create an Amazon QuickSight user that’s not an IAM user or an Active Directory user, that user is inactive until they sign in and provide a password. +* `arn` - The Amazon Resource Name (ARN) for the user. +* `email` - The user's email address. +* `identity_type` - The type of identity authentication used by the user. +* `principal_id` - The principal ID of the user. +* `user_role` - The Amazon QuickSight role for the user. The user role can be one of the following:. + - `READER`: A user who has read-only access to dashboards. + - `AUTHOR`: A user who can create data sources, datasets, analyses, and dashboards. + - `ADMIN`: A user who is an author, who can also manage Amazon QuickSight settings. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ram_resource_share.html.markdown b/website/docs/cdktf/python/d/ram_resource_share.html.markdown new file mode 100644 index 00000000000..b34504faf12 --- /dev/null +++ b/website/docs/cdktf/python/d/ram_resource_share.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "RAM (Resource Access Manager)" +layout: "aws" +page_title: "AWS: aws_ram_resource_share" +description: |- + Retrieve information about a RAM Resource Share +--- + + + +# Data Source: aws_ram_resource_share + +`aws_ram_resource_share` Retrieve information about a RAM Resource Share. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ram_resource_share import DataAwsRamResourceShare +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRamResourceShare(self, "example", + name="example", + resource_owner="SELF" + ) +``` + +## Search by filters + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ram_resource_share import DataAwsRamResourceShare +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRamResourceShare(self, "tag_filter", + filter=[DataAwsRamResourceShareFilter( + name="NameOfTag", + values=["exampleNameTagValue"] + ) + ], + name="MyResourceName", + resource_owner="SELF" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the resource share to retrieve. +* `resource_owner` (Required) Owner of the resource share. Valid values are `SELF` or `OTHER-ACCOUNTS`. + +* `resource_share_status` (Optional) Specifies that you want to retrieve details of only those resource shares that have this status. Valid values are `PENDING`, `ACTIVE`, `FAILED`, `DELETING`, and `DELETED`. +* `filter` - (Optional) Filter used to scope the list e.g., by tags. See [related docs] (https://docs.aws.amazon.com/ram/latest/APIReference/API_TagFilter.html). + * `name` - (Required) Name of the tag key to filter on. + * `values` - (Required) Value of the tag key. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the resource share. +* `id` - ARN of the resource share. +* `status` - Status of the RAM share. +* `owning_account_id` - ID of the AWS account that owns the resource share. +* `tags` - Tags attached to the RAM share + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/rds_certificate.html.markdown b/website/docs/cdktf/python/d/rds_certificate.html.markdown new file mode 100644 index 00000000000..a9a8ed2128b --- /dev/null +++ b/website/docs/cdktf/python/d/rds_certificate.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_certificate" +description: |- + Information about an RDS Certificate. +--- + + + +# Data Source: aws_rds_certificate + +Information about an RDS Certificate. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_rds_certificate import DataAwsRdsCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRdsCertificate(self, "example", + latest_valid_till=True + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` - (Optional) Certificate identifier. For example, `rds-ca-2019`. +* `latest_valid_till` - (Optional) When enabled, returns the certificate with the latest `ValidTill`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the certificate. +* `certificate_type` - Type of certificate. For example, `CA`. +* `customer_override` - Boolean whether there is an override for the default certificate identifier. +* `customer_override_valid_till` - If there is an override for the default certificate identifier, when the override expires. +* `thumbprint` - Thumbprint of the certificate. +* `valid_from` - [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) of certificate starting validity date. +* `valid_till` - [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) of certificate ending validity date. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/rds_cluster.html.markdown b/website/docs/cdktf/python/d/rds_cluster.html.markdown new file mode 100644 index 00000000000..d428f9b9d67 --- /dev/null +++ b/website/docs/cdktf/python/d/rds_cluster.html.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster" +description: |- + Provides an RDS cluster data source. +--- + + + +# Data Source: aws_rds_cluster + +Provides information about an RDS cluster. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_rds_cluster import DataAwsRdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRdsCluster(self, "clusterName", + cluster_identifier="clusterName" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `cluster_identifier` - (Required) Cluster identifier of the RDS cluster. + +## Attribute Reference + +See the [RDS Cluster Resource](/docs/providers/aws/r/rds_cluster.html) for details on the +returned attributes - they are identical for all attributes, except the `tags_all`. If you need to get the tags for this resource, use the attribute `tags` as described below. + +* `tags` - A map of tags assigned to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/rds_clusters.html.markdown b/website/docs/cdktf/python/d/rds_clusters.html.markdown new file mode 100644 index 00000000000..3ff017ec99a --- /dev/null +++ b/website/docs/cdktf/python/d/rds_clusters.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_clusters" +description: |- + Terraform data source for managing an AWS RDS (Relational Database) Clusters. +--- + + + +# Data Source: aws_rds_clusters + +Terraform data source for managing an AWS RDS (Relational Database) Clusters. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_rds_clusters import DataAwsRdsClusters +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRdsClusters(self, "example", + filter=[DataAwsRdsClustersFilter( + name="engine", + values=["aurora-postgresql"] + ) + ] + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [RDS DescribeDBClusters API Reference](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DescribeDBClusters.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `cluster_arns` - Set of cluster ARNs of the matched RDS clusters. +* `cluster_identifiers` - Set of ARNs of cluster identifiers of the matched RDS clusters. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/rds_engine_version.markdown b/website/docs/cdktf/python/d/rds_engine_version.markdown new file mode 100644 index 00000000000..7487fd68bec --- /dev/null +++ b/website/docs/cdktf/python/d/rds_engine_version.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_engine_version" +description: |- + Information about an RDS engine version. +--- + + + +# Data Source: aws_rds_engine_version + +Information about an RDS engine version. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_rds_engine_version import DataAwsRdsEngineVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRdsEngineVersion(self, "test", + engine="mysql", + preferred_versions=["8.0.27", "8.0.26"] + ) +``` + +### With `filter` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_rds_engine_version import DataAwsRdsEngineVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRdsEngineVersion(self, "test", + engine="aurora-postgresql", + filter=[DataAwsRdsEngineVersionFilter( + name="engine-mode", + values=["serverless"] + ) + ], + include_all=True, + version="10.14" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engine` - (Required) DB engine. Engine values include `aurora`, `aurora-mysql`, `aurora-postgresql`, `docdb`, `mariadb`, `mysql`, `neptune`, `oracle-ee`, `oracle-se`, `oracle-se1`, `oracle-se2`, `postgres`, `sqlserver-ee`, `sqlserver-ex`, `sqlserver-se`, and `sqlserver-web`. +* `default_only` - (Optional) When set to `true`, the default version for the specified `engine` or combination of `engine` and major `version` will be returned. Can be used to limit responses to a single version when they would otherwise fail for returning multiple versions. +* `filter` - (Optional) One or more name/value pairs to filter off of. There are several valid keys; for a full reference, check out [describe-db-engine-versions in the AWS CLI reference](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/describe-db-engine-versions.html). +* `include_all` - (Optional) When set to `true`, the specified `version` or member of `preferred_versions` will be returned even if it is `deprecated`. Otherwise, only `available` versions will be returned. +* `parameter_group_family` - (Optional) Name of a specific DB parameter group family. Examples of parameter group families are `mysql8.0`, `mariadb10.4`, and `postgres12`. +* `preferred_versions` - (Optional) Ordered list of preferred engine versions. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. If both the `version` and `preferred_versions` arguments are not configured, the data source will return the default version for the engine. +* `version` - (Optional) Version of the DB engine. For example, `5.7.22`, `10.1.34`, and `12.3`. If both the `version` and `preferred_versions` arguments are not configured, the data source will return the default version for the engine. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `default_character_set` - The default character set for new instances of this engine version. +* `engine_description` - Description of the database engine. +* `exportable_log_types` - Set of log types that the database engine has available for export to CloudWatch Logs. +* `status` - Status of the DB engine version, either available or deprecated. +* `supported_character_sets` - Set of the character sets supported by this engine. +* `supported_feature_names` - Set of features supported by the DB engine. +* `supported_modes` - Set of the supported DB engine modes. +* `supported_timezones` - Set of the time zones supported by this engine. +* `supports_global_databases` - Indicates whether you can use Aurora global databases with a specific DB engine version. +* `supports_log_exports_to_cloudwatch` - Indicates whether the engine version supports exporting the log types specified by `exportable_log_types` to CloudWatch Logs. +* `supports_parallel_query` - Indicates whether you can use Aurora parallel query with a specific DB engine version. +* `supports_read_replica` - Indicates whether the database engine version supports read replicas. +* `valid_upgrade_targets` - Set of engine versions that this database engine version can be upgraded to. +* `version_description` - Description of the database engine version. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/rds_orderable_db_instance.markdown b/website/docs/cdktf/python/d/rds_orderable_db_instance.markdown new file mode 100644 index 00000000000..76ae8a01090 --- /dev/null +++ b/website/docs/cdktf/python/d/rds_orderable_db_instance.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_orderable_db_instance" +description: |- + Information about RDS orderable DB instances. +--- + + + +# Data Source: aws_rds_orderable_db_instance + +Information about RDS orderable DB instances and valid parameter combinations. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_rds_orderable_db_instance import DataAwsRdsOrderableDbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRdsOrderableDbInstance(self, "test", + engine="mysql", + engine_version="5.7.22", + license_model="general-public-license", + preferred_instance_classes=["db.r6.xlarge", "db.m4.large", "db.t3.small"], + storage_type="standard" + ) +``` + +Valid parameter combinations can also be found with `preferred_engine_versions` and/or `preferred_instance_classes`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_rds_orderable_db_instance import DataAwsRdsOrderableDbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRdsOrderableDbInstance(self, "test", + engine="mysql", + license_model="general-public-license", + preferred_engine_versions=["5.6.35", "5.6.41", "5.6.44"], + preferred_instance_classes=["db.t2.small", "db.t3.medium", "db.t3.large"] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `availability_zone_group` - (Optional) Availability zone group. +* `engine` - (Required) DB engine. Engine values include `aurora`, `aurora-mysql`, `aurora-postgresql`, `docdb`, `mariadb`, `mysql`, `neptune`, `oracle-ee`, `oracle-se`, `oracle-se1`, `oracle-se2`, `postgres`, `sqlserver-ee`, `sqlserver-ex`, `sqlserver-se`, and `sqlserver-web`. +* `engine_version` - (Optional) Version of the DB engine. If none is provided, the AWS-defined default version will be used. +* `instance_class` - (Optional) DB instance class. Examples of classes are `db.m3.2xlarge`, `db.t2.small`, and `db.m3.medium`. +* `license_model` - (Optional) License model. Examples of license models are `general-public-license`, `bring-your-own-license`, and `amazon-license`. +* `preferred_instance_classes` - (Optional) Ordered list of preferred RDS DB instance classes. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. +* `preferred_engine_versions` - (Optional) Ordered list of preferred RDS DB instance engine versions. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. +* `storage_type` - (Optional) Storage types. Examples of storage types are `standard`, `io1`, `gp2`, and `aurora`. +* `supports_enhanced_monitoring` - (Optional) Enable this to ensure a DB instance supports Enhanced Monitoring at intervals from 1 to 60 seconds. +* `supports_global_databases` - (Optional) Enable this to ensure a DB instance supports Aurora global databases with a specific combination of other DB engine attributes. +* `supports_iam_database_authentication` - (Optional) Enable this to ensure a DB instance supports IAM database authentication. +* `supports_iops` - (Optional) Enable this to ensure a DB instance supports provisioned IOPS. +* `supports_kerberos_authentication` - (Optional) Enable this to ensure a DB instance supports Kerberos Authentication. +* `supports_performance_insights` - (Optional) Enable this to ensure a DB instance supports Performance Insights. +* `supports_storage_autoscaling` - (Optional) Enable this to ensure Amazon RDS can automatically scale storage for DB instances that use the specified DB instance class. +* `supports_storage_encryption` - (Optional) Enable this to ensure a DB instance supports encrypted storage. +* `vpc` - (Optional) Boolean that indicates whether to show only VPC or non-VPC offerings. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `availability_zones` - Availability zones where the instance is available. +* `max_iops_per_db_instance` - Maximum total provisioned IOPS for a DB instance. +* `max_iops_per_gib` - Maximum provisioned IOPS per GiB for a DB instance. +* `max_storage_size` - Maximum storage size for a DB instance. +* `min_iops_per_db_instance` - Minimum total provisioned IOPS for a DB instance. +* `min_iops_per_gib` - Minimum provisioned IOPS per GiB for a DB instance. +* `min_storage_size` - Minimum storage size for a DB instance. +* `multi_az_capable` - Whether a DB instance is Multi-AZ capable. +* `outpost_capable` - Whether a DB instance supports RDS on Outposts. +* `read_replica_capable` - Whether a DB instance can have a read replica. +* `supported_engine_modes` - A list of the supported DB engine modes. +* `supported_network_types` - The network types supported by the DB instance (`IPV4` or `DUAL`). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/rds_reserved_instance_offering.html.markdown b/website/docs/cdktf/python/d/rds_reserved_instance_offering.html.markdown new file mode 100644 index 00000000000..ef2e03a9def --- /dev/null +++ b/website/docs/cdktf/python/d/rds_reserved_instance_offering.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_reserved_instance_offering" +description: |- + Information about a single RDS Reserved Instance Offering. +--- + + + +# Data Source: aws_rds_reserved_instance_offering + +Information about a single RDS Reserved Instance Offering. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_rds_reserved_instance_offering import DataAwsRdsReservedInstanceOffering +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRdsReservedInstanceOffering(self, "test", + db_instance_class="db.t2.micro", + duration=31536000, + multi_az=False, + offering_type="All Upfront", + product_description="mysql" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `db_instance_class` - (Required) DB instance class for the reserved DB instance. +* `duration` - (Required) Duration of the reservation in years or seconds. Valid values are `1`, `3`, `31536000`, `94608000` +* `multi_az` - (Required) Whether the reservation applies to Multi-AZ deployments. +* `offering_type` - (Required) Offering type of this reserved DB instance. Valid values are `No Upfront`, `Partial Upfront`, `All Upfront`. +* `product_description` - (Required) Description of the reserved DB instance. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier for the reservation. Same as `offering_id`. +* `currency_code` - Currency code for the reserved DB instance. +* `fixed_price` - Fixed price charged for this reserved DB instance. +* `offering_id` - Unique identifier for the reservation. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/redshift_cluster.html.markdown b/website/docs/cdktf/python/d/redshift_cluster.html.markdown new file mode 100644 index 00000000000..725c16db7d9 --- /dev/null +++ b/website/docs/cdktf/python/d/redshift_cluster.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_cluster" +description: |- + Provides details about a specific redshift cluster +--- + + + +# Data Source: aws_redshift_cluster + +Provides details about a specific redshift cluster. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_redshift_cluster import DataAwsRedshiftCluster +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsRedshiftCluster(self, "example", + cluster_identifier="example-cluster" + ) + KinesisFirehoseDeliveryStream(self, "example_stream", + destination="redshift", + name="terraform-kinesis-firehose-example-stream", + redshift_configuration=KinesisFirehoseDeliveryStreamRedshiftConfiguration( + cluster_jdbcurl="jdbc:redshift://${" + example.endpoint + "}/${" + example.database_name + "}", + copy_options="delimiter '|'", + data_table_columns="example-col", + data_table_name="example-table", + password="Exampl3Pass", + role_arn=firehose_role.arn, + s3_configuration=KinesisFirehoseDeliveryStreamRedshiftConfigurationS3Configuration( + bucket_arn=bucket.arn, + buffer_interval=400, + buffer_size=10, + compression_format="GZIP", + role_arn=firehose_role.arn + ), + username="exampleuser" + ) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `cluster_identifier` - (Required) Cluster identifier + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of cluster. +* `allow_version_upgrade` - Whether major version upgrades can be applied during maintenance period +* `automated_snapshot_retention_period` - The backup retention period +* `aqua_configuration_status` - The value represents how the cluster is configured to use AQUA. +* `availability_zone` - Availability zone of the cluster +* `availability_zone_relocation_enabled` - Indicates whether the cluster is able to be relocated to another availability zone. +* `bucket_name` - Name of the S3 bucket where the log files are to be stored +* `cluster_identifier` - Cluster identifier +* `cluster_nodes` - Nodes in the cluster. Cluster node blocks are documented below +* `cluster_parameter_group_name` - The name of the parameter group to be associated with this cluster +* `cluster_public_key` - Public key for the cluster +* `cluster_revision_number` - The cluster revision number +* `cluster_subnet_group_name` - The name of a cluster subnet group to be associated with this cluster +* `cluster_type` - Cluster type +* `cluster_namespace_arn` - The namespace Amazon Resource Name (ARN) of the cluster +* `database_name` - Name of the default database in the cluster +* `default_iam_role_arn` - The ARN for the IAM role that was set as default for the cluster when the cluster was created. +* `elastic_ip` - Elastic IP of the cluster +* `enable_logging` - Whether cluster logging is enabled +* `encrypted` - Whether the cluster data is encrypted +* `endpoint` - Cluster endpoint +* `enhanced_vpc_routing` - Whether enhanced VPC routing is enabled +* `iam_roles` - IAM roles associated to the cluster +* `kms_key_id` - KMS encryption key associated to the cluster +* `master_username` - Username for the master DB user +* `node_type` - Cluster node type +* `number_of_nodes` - Number of nodes in the cluster +* `maintenance_track_name` - The name of the maintenance track for the restored cluster. +* `manual_snapshot_retention_period` - (Optional) The default number of days to retain a manual snapshot. +* `port` - Port the cluster responds on +* `preferred_maintenance_window` - The maintenance window +* `publicly_accessible` - Whether the cluster is publicly accessible +* `s3_key_prefix` - Folder inside the S3 bucket where the log files are stored +* `log_destination_type` - The log destination type. +* `log_exports` - Collection of exported log types. Log types include the connection log, user log and user activity log. +* `tags` - Tags associated to the cluster +* `vpc_id` - VPC Id associated with the cluster +* `vpc_security_group_ids` - The VPC security group Ids associated with the cluster + +Cluster nodes (for `cluster_nodes`) support the following attributes: + +* `node_role` - Whether the node is a leader node or a compute node +* `private_ip_address` - Private IP address of a node within a cluster +* `public_ip_address` - Public IP address of a node within a cluster + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/redshift_cluster_credentials.html.markdown b/website/docs/cdktf/python/d/redshift_cluster_credentials.html.markdown new file mode 100644 index 00000000000..51c3330f05b --- /dev/null +++ b/website/docs/cdktf/python/d/redshift_cluster_credentials.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_cluster_credentials" +description: |- + Provides redshift cluster credentials +--- + + + +# Data Source: aws_redshift_cluster_credentials + +Provides redshift cluster temporary credentials. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_redshift_cluster_credentials import DataAwsRedshiftClusterCredentials +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRedshiftClusterCredentials(self, "example", + cluster_identifier=Token.as_string(aws_redshift_cluster_example.cluster_identifier), + db_user=Token.as_string(aws_redshift_cluster_example.master_username) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `auto_create` - (Optional) Create a database user with the name specified for the user named in `db_user` if one does not exist. +* `cluster_identifier` - (Required) Unique identifier of the cluster that contains the database for which your are requesting credentials. +* `db_name` - (Optional) Name of a database that DbUser is authorized to log on to. If `db_name` is not specified, `db_user` can log on to any existing database. +* `db_user` - (Required) Name of a database user. If a user name matching `db_user` exists in the database, the temporary user credentials have the same permissions as the existing user. If `db_user` doesn't exist in the database and `auto_create` is `True`, a new user is created using the value for `db_user` with `PUBLIC` permissions. If a database user matching the value for `db_user` doesn't exist and `not` is `False`, then the command succeeds but the connection attempt will fail because the user doesn't exist in the database. +* `db_groups` - (Optional) List of the names of existing database groups that the user named in `db_user` will join for the current session, in addition to any group memberships for an existing user. If not specified, a new user is added only to `PUBLIC`. +* `duration_seconds` - (Optional) The number of seconds until the returned temporary password expires. Valid values are between `900` and `3600`. Default value is `900`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `db_password` - Temporary password that authorizes the user name returned by `db_user` to log on to the database `db_name`. +* `expiration` - Date and time the password in `db_password` expires. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/redshift_orderable_cluster.html.markdown b/website/docs/cdktf/python/d/redshift_orderable_cluster.html.markdown new file mode 100644 index 00000000000..c9309642eae --- /dev/null +++ b/website/docs/cdktf/python/d/redshift_orderable_cluster.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_orderable_cluster" +description: |- + Information about RDS orderable DB instances. +--- + + + +# Data Source: aws_redshift_orderable_cluster + +Information about Redshift Orderable Clusters and valid parameter combinations. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_redshift_orderable_cluster import DataAwsRedshiftOrderableCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRedshiftOrderableCluster(self, "test", + cluster_type="multi-node", + preferred_node_types=["dc2.large", "ds2.xlarge"] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `cluster_type` - (Optional) Reshift Cluster typeE.g., `multi-node` or `single-node` +* `cluster_version` - (Optional) Redshift Cluster versionE.g., `1.0` +* `node_type` - (Optional) Redshift Cluster node typeE.g., `dc2.8xlarge` +* `preferred_node_types` - (Optional) Ordered list of preferred Redshift Cluster node types. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `availability_zones` - List of Availability Zone names where the Redshift Cluster is available. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/redshift_service_account.html.markdown b/website/docs/cdktf/python/d/redshift_service_account.html.markdown new file mode 100644 index 00000000000..316e8345147 --- /dev/null +++ b/website/docs/cdktf/python/d/redshift_service_account.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_service_account" +description: |- + Get AWS Redshift Service Account for storing audit data in S3. +--- + + + +# Data Source: aws_redshift_service_account + +Use this data source to get the Account ID of the [AWS Redshift Service Account](http://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-enable-logging) +in a given region for the purpose of allowing Redshift to store audit data in S3. + +~> **Note:** AWS documentation [states that](https://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-bucket-permissions) a [service principal name](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html#principal-services) should be used instead of an AWS account ID in any relevant IAM policy. +The `aws_redshift_service_account` data source has been deprecated and will be removed in a future version. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_redshift_service_account import DataAwsRedshiftServiceAccount +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_policy import S3BucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bucket = S3Bucket(self, "bucket", + bucket="tf-redshift-logging-test-bucket", + force_destroy=True + ) + main = DataAwsRedshiftServiceAccount(self, "main") + allow_audit_logging = DataAwsIamPolicyDocument(self, "allow_audit_logging", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:PutObject"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[Token.as_string(main.arn)], + type="AWS" + ) + ], + resources=["${" + bucket.arn + "}/*"], + sid="Put bucket policy needed for audit logging" + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:GetBucketAcl"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[Token.as_string(main.arn)], + type="AWS" + ) + ], + resources=Token.as_list(data_aws_s3_bucket_bucket.arn), + sid="Get bucket policy needed for audit logging" + ) + ] + ) + aws_s3_bucket_policy_allow_audit_logging = S3BucketPolicy(self, "allow_audit_logging_3", + bucket=bucket.id, + policy=Token.as_string(allow_audit_logging.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_policy_allow_audit_logging.override_logical_id("allow_audit_logging") +``` + +## Argument Reference + +* `region` - (Optional) Name of the region whose AWS Redshift account ID is desired. +Defaults to the region from the AWS provider configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS Redshift service account in the selected region. +* `arn` - ARN of the AWS Redshift service account in the selected region. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/redshift_subnet_group.html.markdown b/website/docs/cdktf/python/d/redshift_subnet_group.html.markdown new file mode 100644 index 00000000000..0afb2f7e9f3 --- /dev/null +++ b/website/docs/cdktf/python/d/redshift_subnet_group.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_subnet_group" +description: |- + Provides details about a specific redshift subnet_group +--- + + + +# Data Source: aws_redshift_subnet_group + +Provides details about a specific redshift subnet group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_redshift_subnet_group import DataAwsRedshiftSubnetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRedshiftSubnetGroup(self, "example", + name=Token.as_string(aws_redshift_subnet_group_example.name) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the cluster subnet group for which information is requested. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Redshift Subnet Group name. +* `description` - Description of the Redshift Subnet group. +* `id` - Redshift Subnet group Name. +* `subnet_ids` - An array of VPC subnet IDs. +* `tags` - Tags associated to the Subnet Group + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/redshiftserverless_credentials.html.markdown b/website/docs/cdktf/python/d/redshiftserverless_credentials.html.markdown new file mode 100644 index 00000000000..d4fefecd270 --- /dev/null +++ b/website/docs/cdktf/python/d/redshiftserverless_credentials.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_credentials" +description: |- + Provides redshift serverless credentials +--- + + + +# Data Source: aws_redshiftserverless_credentials + +Provides redshift serverless temporary credentials for a workgroup. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_redshiftserverless_credentials import DataAwsRedshiftserverlessCredentials +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRedshiftserverlessCredentials(self, "example", + workgroup_name=Token.as_string(aws_redshiftserverless_workgroup_example.workgroup_name) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `workgroup_name` - (Required) The name of the workgroup associated with the database. +* `db_name` - (Optional) The name of the database to get temporary authorization to log on to. +* `duration_seconds` - (Optional) The number of seconds until the returned temporary password expires. The minimum is 900 seconds, and the maximum is 3600 seconds. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `db_password` - Temporary password that authorizes the user name returned by `db_user` to log on to the database `db_name`. +* `db_user` - A database user name that is authorized to log on to the database `db_name` using the password `db_password` . If the specified `db_user` exists in the database, the new user name has the same database privileges as the user named in `db_user` . By default, the user is added to PUBLIC. the user doesn't exist in the database. +* `expiration` - Date and time the password in `db_password` expires. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/redshiftserverless_namespace.html.markdown b/website/docs/cdktf/python/d/redshiftserverless_namespace.html.markdown new file mode 100644 index 00000000000..c98dd5e6f77 --- /dev/null +++ b/website/docs/cdktf/python/d/redshiftserverless_namespace.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_namespace" +description: |- + Terraform data source for managing an AWS Redshift Serverless Namespace. +--- + + + +# Data Source: aws_redshiftserverless_namespace + +Terraform data source for managing an AWS Redshift Serverless Namespace. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_redshiftserverless_namespace import DataAwsRedshiftserverlessNamespace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRedshiftserverlessNamespace(self, "example", + namespace_name="example-namespace" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `namespace_name` - (Required) The name of the namespace. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `admin_username` - The username of the administrator for the first database created in the namespace. +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Namespace. +* `db_name` - The name of the first database created in the namespace. +* `default_iam_role_arn` - The Amazon Resource Name (ARN) of the IAM role to set as a default in the namespace. When specifying `default_iam_role_arn`, it also must be part of `iam_roles`. +* `iam_roles` - A list of IAM roles to associate with the namespace. +* `kms_key_id` - The ARN of the Amazon Web Services Key Management Service key used to encrypt your data. +* `log_exports` - The types of logs the namespace can export. Available export types are `userlog`, `connectionlog`, and `useractivitylog`. +* `namespace_id` - The Redshift Namespace ID. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/redshiftserverless_workgroup.html.markdown b/website/docs/cdktf/python/d/redshiftserverless_workgroup.html.markdown new file mode 100644 index 00000000000..d529bb725d3 --- /dev/null +++ b/website/docs/cdktf/python/d/redshiftserverless_workgroup.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_workgroup" +description: |- + Terraform data source for managing an AWS Redshift Serverless Workgroup. +--- + + + +# Data Source: aws_redshiftserverless_workgroup + +Terraform data source for managing an AWS Redshift Serverless Workgroup. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_redshiftserverless_workgroup import DataAwsRedshiftserverlessWorkgroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRedshiftserverlessWorkgroup(self, "example", + workgroup_name=Token.as_string(aws_redshiftserverless_workgroup_example.workgroup_name) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `workgroup_name` - (Required) The name of the workgroup associated with the database. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Workgroup. +* `id` - The Redshift Workgroup Name. +* `endpoint` - The endpoint that is created from the workgroup. See `Endpoint` below. +* `enhanced_vpc_routing` - The value that specifies whether to turn on enhanced virtual private cloud (VPC) routing, which forces Amazon Redshift Serverless to route traffic through your VPC instead of over the internet. +* `publicly_accessible` - A value that specifies whether the workgroup can be accessed from a public network. +* `security_group_ids` - An array of security group IDs to associate with the workgroup. +* `subnet_ids` - An array of VPC subnet IDs to associate with the workgroup. When set, must contain at least three subnets spanning three Availability Zones. A minimum number of IP addresses is required and scales with the Base Capacity. For more information, see the following [AWS document](https://docs.aws.amazon.com/redshift/latest/mgmt/serverless-known-issues.html). +* `workgroup_id` - The Redshift Workgroup ID. + +### Endpoint + +* `address` - The DNS address of the VPC endpoint. +* `port` - The port that Amazon Redshift Serverless listens on. +* `vpc_endpoint` - The VPC endpoint or the Redshift Serverless workgroup. See `VPC Endpoint` below. + +#### VPC Endpoint + +* `vpc_endpoint_id` - The DNS address of the VPC endpoint. +* `vpc_id` - The port that Amazon Redshift Serverless listens on. +* `network_interface` - The network interfaces of the endpoint.. See `Network Interface` below. + +##### Network Interface + +* `availability_zone` - The availability Zone. +* `network_interface_id` - The unique identifier of the network interface. +* `private_ip_address` - The IPv4 address of the network interface within the subnet. +* `subnet_id` - The unique identifier of the subnet. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/region.html.markdown b/website/docs/cdktf/python/d/region.html.markdown new file mode 100644 index 00000000000..e200226c332 --- /dev/null +++ b/website/docs/cdktf/python/d/region.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_region" +description: |- + Provides details about a specific service region +--- + + + +# Data Source: aws_region + +`aws_region` provides details about a specific AWS region. + +As well as validating a given region name this resource can be used to +discover the name of the region configured within the provider. The latter +can be useful in a child module which is inheriting an AWS provider +configuration from its parent module. + +## Example Usage + +The following example shows how the resource might be used to obtain +the name of the AWS region configured on the provider. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_region import DataAwsRegion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRegion(self, "current") +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +regions. The given filters must match exactly one region whose data will be +exported as attributes. + +* `name` - (Optional) Full name of the region to select. + +* `endpoint` - (Optional) EC2 endpoint of the region to select. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - Name of the selected region. + +* `endpoint` - EC2 endpoint for the selected region. + +* `description` - Region's description in this format: "Location (Region name)". + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/regions.html.markdown b/website/docs/cdktf/python/d/regions.html.markdown new file mode 100644 index 00000000000..2c69c988246 --- /dev/null +++ b/website/docs/cdktf/python/d/regions.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_regions" +description: |- + Provides information about AWS Regions. +--- + + + +# Data Source: aws_regions + +Provides information about AWS Regions. Can be used to filter regions i.e., by Opt-In status or only regions enabled for current account. To get details like endpoint and description of each region the data source can be combined with the [`aws_region` data source](/docs/providers/aws/d/region.html). + +## Example Usage + +Enabled AWS Regions: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_regions import DataAwsRegions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRegions(self, "current") +``` + +All the regions regardless of the availability + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_regions import DataAwsRegions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRegions(self, "current", + all_regions=True + ) +``` + +To see regions that are filtered by `"not-opted-in"`, the `all_regions` argument needs to be set to `true` or no results will be returned. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_regions import DataAwsRegions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRegions(self, "current", + all_regions=True, + filter=[DataAwsRegionsFilter( + name="opt-in-status", + values=["not-opted-in"] + ) + ] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `all_regions` - (Optional) If true the source will query all regions regardless of availability. + +* `filter` - (Optional) Configuration block(s) to use as filters. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [describe-regions AWS CLI Reference][1]. +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the current partition (e.g., `aws` in AWS Commercial, `aws-cn` in AWS China). +* `names` - Names of regions that meets the criteria. + +[1]: https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-regions.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/resourcegroupstaggingapi_resources.html.markdown b/website/docs/cdktf/python/d/resourcegroupstaggingapi_resources.html.markdown new file mode 100644 index 00000000000..d8ea0938f1a --- /dev/null +++ b/website/docs/cdktf/python/d/resourcegroupstaggingapi_resources.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Resource Groups Tagging" +layout: "aws" +page_title: "AWS: aws_resourcegroupstaggingapi_resources" +description: |- + Provides details about resource tagging. +--- + + + +# Data Source: aws_resourcegroupstaggingapi_resources + +Provides details about resource tagging. + +## Example Usage + +### Get All Resource Tag Mappings + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_resourcegroupstaggingapi_resources import DataAwsResourcegroupstaggingapiResources +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsResourcegroupstaggingapiResources(self, "test") +``` + +### Filter By Tag Key and Value + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_resourcegroupstaggingapi_resources import DataAwsResourcegroupstaggingapiResources +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsResourcegroupstaggingapiResources(self, "test", + tag_filter=[DataAwsResourcegroupstaggingapiResourcesTagFilter( + key="tag-key", + values=["tag-value-1", "tag-value-2"] + ) + ] + ) +``` + +### Filter By Resource Type + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_resourcegroupstaggingapi_resources import DataAwsResourcegroupstaggingapiResources +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsResourcegroupstaggingapiResources(self, "test", + resource_type_filters=["ec2:instance"] + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `exclude_compliant_resources` - (Optional) Specifies whether to exclude resources that are compliant with the tag policy. You can use this parameter only if the `include_compliance_details` argument is also set to `true`. +* `include_compliance_details` - (Optional) Specifies whether to include details regarding the compliance with the effective tag policy. +* `tag_filter` - (Optional) Specifies a list of Tag Filters (keys and values) to restrict the output to only those resources that have the specified tag and, if included, the specified value. See [Tag Filter](#tag-filter) below. Conflicts with `resource_arn_list`. +* `resource_type_filters` - (Optional) Constraints on the resources that you want returned. The format of each resource type is `service:resourceType`. For example, specifying a resource type of `ec2` returns all Amazon EC2 resources (which includes EC2 instances). Specifying a resource type of `ec2:instance` returns only EC2 instances. +* `resource_arn_list` - (Optional) Specifies a list of ARNs of resources for which you want to retrieve tag data. Conflicts with `filter`. + +### Tag Filter + +A `tag_filter` block supports the following arguments: + +If you do specify `tag_filter`, the response returns only those resources that are currently associated with the specified tag. +If you don't specify a `tag_filter`, the response includes all resources that were ever associated with tags. Resources that currently don't have associated tags are shown with an empty tag set. + +* `key` - (Required) One part of a key-value pair that makes up a tag. +* `values` - (Optional) Optional part of a key-value pair that make up a tag. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `resource_tag_mapping_list` - List of objects matching the search criteria. + * `compliance_details` - List of objects with information that shows whether a resource is compliant with the effective tag policy, including details on any noncompliant tag keys. + * `compliance_status` - Whether the resource is compliant. + * `keys_with_noncompliant_values ` - Set of tag keys with non-compliant tag values. + * `non_compliant_keys ` - Set of non-compliant tag keys. + * `resource_arn` - ARN of the resource. + * `tags` - Map of tags assigned to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route.html.markdown b/website/docs/cdktf/python/d/route.html.markdown new file mode 100644 index 00000000000..3a65135bc60 --- /dev/null +++ b/website/docs/cdktf/python/d/route.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route" +description: |- + Provides details about a specific Route +--- + + + +# Data Source: aws_route + +`aws_route` provides details about a specific Route. + +This resource can prove useful when finding the resource associated with a CIDR. For example, finding the peering connection associated with a CIDR value. + +## Example Usage + +The following example shows how one might use a CIDR value to find a network interface id and use this to create a data source of that network interface. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_network_interface import DataAwsNetworkInterface +from imports.aws.data_aws_route import DataAwsRoute +from imports.aws.data_aws_route_table import DataAwsRouteTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + subnet_id = TerraformVariable(self, "subnet_id") + route = DataAwsRoute(self, "route", + destination_cidr_block="10.0.1.0/24", + route_table_id=selected.id + ) + DataAwsRouteTable(self, "selected", + subnet_id=subnet_id.string_value + ) + DataAwsNetworkInterface(self, "interface", + id=Token.as_string(route.network_interface_id) + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available Route in the current region. The given filters must match exactly oneRoute whose data will be exported as attributes. + +The following arguments are required: + +* `route_table_id` - (Required) ID of the specific Route Table containing the Route entry. + +The following arguments are optional: + +* `carrier_gateway_id` - (Optional) EC2 Carrier Gateway ID of the Route belonging to the Route Table. +* `core_network_arn` - (Optional) Core network ARN of the Route belonging to the Route Table. +* `destination_cidr_block` - (Optional) CIDR block of the Route belonging to the Route Table. +* `destination_ipv6_cidr_block` - (Optional) IPv6 CIDR block of the Route belonging to the Route Table. +* `destination_prefix_list_id` - (Optional) ID of a [managed prefix list](ec2_managed_prefix_list.html) destination of the Route belonging to the Route Table. +* `egress_only_gateway_id` - (Optional) Egress Only Gateway ID of the Route belonging to the Route Table. +* `gateway_id` - (Optional) Gateway ID of the Route belonging to the Route Table. +* `instance_id` - (Optional) Instance ID of the Route belonging to the Route Table. +* `local_gateway_id` - (Optional) Local Gateway ID of the Route belonging to the Route Table. +* `nat_gateway_id` - (Optional) NAT Gateway ID of the Route belonging to the Route Table. +* `network_interface_id` - (Optional) Network Interface ID of the Route belonging to the Route Table. +* `transit_gateway_id` - (Optional) EC2 Transit Gateway ID of the Route belonging to the Route Table. +* `vpc_peering_connection_id` - (Optional) VPC Peering Connection ID of the Route belonging to the Route Table. + +## Attribute Reference + +All of the argument attributes are also exported as result attributes when there is data available. For example, the `vpc_peering_connection_id` field will be empty when the route is attached to a Network Interface. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_delegation_set.html.markdown b/website/docs/cdktf/python/d/route53_delegation_set.html.markdown new file mode 100644 index 00000000000..c0dab10b324 --- /dev/null +++ b/website/docs/cdktf/python/d/route53_delegation_set.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_delegation_set" +description: |- + Provides details about a specific Route 53 Delegation Set +--- + + + +# Data Source: aws_route53_delegation_set + +`aws_route53_delegation_set` provides details about a specific Route 53 Delegation Set. + +This data source allows to find a list of name servers associated with a specific delegation set. + +## Example Usage + +The following example shows how to get a delegation set from its id. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_delegation_set import DataAwsRoute53DelegationSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53DelegationSet(self, "dset", + id="MQWGHCBFAKEID" + ) +``` + +## Argument Reference + +* `id` - (Required) Delegation set ID. + +The following attribute is additionally exported: + +* `arn` - ARN of the Delegation Set. +* `caller_reference` - Caller Reference of the delegation set. +* `name_servers` - List of DNS name servers for the delegation set. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_resolver_endpoint.html.markdown b/website/docs/cdktf/python/d/route53_resolver_endpoint.html.markdown new file mode 100644 index 00000000000..dbf156a2fa7 --- /dev/null +++ b/website/docs/cdktf/python/d/route53_resolver_endpoint.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_endpoint" +description: |- + Provides details about a specific Route 53 Resolver Endpoint +--- + + + +# Data Source: aws_route53_resolver_endpoint + +`aws_route53_resolver_endpoint` provides details about a specific Route53 Resolver Endpoint. + +This data source allows to find a list of IPaddresses associated with a specific Route53 Resolver Endpoint. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_endpoint import DataAwsRoute53ResolverEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverEndpoint(self, "example", + resolver_endpoint_id="rslvr-in-1abc2345ef678g91h" + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_endpoint import DataAwsRoute53ResolverEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverEndpoint(self, "example", + filter=[DataAwsRoute53ResolverEndpointFilter( + name="NAME", + values=["MyResolverExampleName"] + ) + ] + ) +``` + +## Argument Reference + +* `resolver_endpoint_id` - (Optional) ID of the Route53 Resolver Endpoint. +* `filter` - (Optional) One or more name/value pairs to use as filters. There are +several valid keys, for a full reference, check out +[Route53resolver Filter value in the AWS API reference][1]. + +In addition to all arguments above, the following attributes are exported: + +* `arn` - Computed ARN of the Route53 Resolver Endpoint. +* `direction` - Direction of the queries to or from the Resolver Endpoint . +* `ip_addresses` - List of IPaddresses that have been associated with the Resolver Endpoint. +* `status` - Current status of the Resolver Endpoint. +* `vpc_id` - ID of the Host VPC that the Resolver Endpoint resides in. + +[1]: https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_Filter.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_resolver_firewall_config.html.markdown b/website/docs/cdktf/python/d/route53_resolver_firewall_config.html.markdown new file mode 100644 index 00000000000..bfdf66c7ef6 --- /dev/null +++ b/website/docs/cdktf/python/d/route53_resolver_firewall_config.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_config" +description: |- + Provides details about a specific a Route 53 Resolver DNS Firewall config. +--- + + + +# Data Source: aws_route53_resolver_firewall_config + +`aws_route53_resolver_firewall_config` provides details about a specific a Route 53 Resolver DNS Firewall config. + +This data source allows to find a details about a specific a Route 53 Resolver DNS Firewall config. + +## Example Usage + +The following example shows how to get a firewall config using the VPC ID. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_firewall_config import DataAwsRoute53ResolverFirewallConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverFirewallConfig(self, "example", + resource_id="vpc-exampleid" + ) +``` + +## Argument Reference + +* `resource_id` - (Required) The ID of the VPC from Amazon VPC that the configuration is for. + +The following attribute is additionally exported: + +* `firewall_fail_open` - Determines how DNS Firewall operates during failures, for example when all traffic that is sent to DNS Firewall fails to receive a reply. +* `id` - The ID of the firewall configuration. +* `owner_id` - The Amazon Web Services account ID of the owner of the VPC that this firewall configuration applies to. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_resolver_firewall_domain_list.html.markdown b/website/docs/cdktf/python/d/route53_resolver_firewall_domain_list.html.markdown new file mode 100644 index 00000000000..617d0b5aafa --- /dev/null +++ b/website/docs/cdktf/python/d/route53_resolver_firewall_domain_list.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_domain_list" +description: |- + Retrieves the specified firewall domain list. +--- + + + +# Data Source: aws_route53_resolver_firewall_domain_list + +`aws_route53_resolver_firewall_domain_list` Retrieves the specified firewall domain list. + +This data source allows to retrieve details about a specific a Route 53 Resolver DNS Firewall domain list. + +## Example Usage + +The following example shows how to get a firewall domain list from its ID. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_firewall_domain_list import DataAwsRoute53ResolverFirewallDomainList +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverFirewallDomainList(self, "example", + firewall_domain_list_id="rslvr-fdl-example" + ) +``` + +## Argument Reference + +* `firewall_domain_list_id` - (Required) The ID of the domain list. + +The following attribute is additionally exported: + +* `arn` - The Amazon Resource Name (ARN) of the firewall domain list. +* `creation_time` - The date and time that the domain list was created, in Unix time format and Coordinated Universal Time (UTC). +* `creator_request_id` - A unique string defined by you to identify the request. +* `domain_count` - The number of domain names that are specified in the domain list. +* `name` - The name of the domain list. +* `managed_owner_name` - The owner of the list, used only for lists that are not managed by you. +* `modification_time` - The date and time that the domain list was last modified, in Unix time format and Coordinated Universal Time (UTC). +* `status` - The status of the domain list. +* `status_message` - Additional information about the status of the list, if available. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_resolver_firewall_rule_group.html.markdown b/website/docs/cdktf/python/d/route53_resolver_firewall_rule_group.html.markdown new file mode 100644 index 00000000000..843e083f1cc --- /dev/null +++ b/website/docs/cdktf/python/d/route53_resolver_firewall_rule_group.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rule_group" +description: |- + Retrieves the specified firewall rule group. +--- + + + +# Data Source: aws_route53_resolver_firewall_rule_group + +`aws_route53_resolver_firewall_rule_group` Retrieves the specified firewall rule group. + +This data source allows to retrieve details about a specific a Route 53 Resolver DNS Firewall rule group. + +## Example Usage + +The following example shows how to get a firewall rule group from its ID. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_firewall_rule_group import DataAwsRoute53ResolverFirewallRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverFirewallRuleGroup(self, "example", + firewall_rule_group_id="rslvr-frg-example" + ) +``` + +## Argument Reference + +* `firewall_rule_group_id` - (Required) The ID of the rule group. + +The following attribute is additionally exported: + +* `arn` - The ARN (Amazon Resource Name) of the rule group. +* `creation_time` - The date and time that the rule group was created, in Unix time format and Coordinated Universal Time (UTC). +* `creator_request_id` - A unique string defined by you to identify the request. +* `name` - The name of the rule group. +* `modification_time` - The date and time that the rule group was last modified, in Unix time format and Coordinated Universal Time (UTC). +* `owner_id` - The Amazon Web Services account ID for the account that created the rule group. When a rule group is shared with your account, this is the account that has shared the rule group with you. +* `rule_count` - The number of rules in the rule group. +* `share_status` - Whether the rule group is shared with other Amazon Web Services accounts, or was shared with the current account by another Amazon Web Services account. +* `status` - The status of the rule group. +* `status_message` - Additional information about the status of the rule group, if available. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_resolver_firewall_rule_group_association.html.markdown b/website/docs/cdktf/python/d/route53_resolver_firewall_rule_group_association.html.markdown new file mode 100644 index 00000000000..f1764e844f6 --- /dev/null +++ b/website/docs/cdktf/python/d/route53_resolver_firewall_rule_group_association.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rule_group_association" +description: |- + Retrieves the specified firewall rule group association. +--- + + + +# Data Source: aws_route53_resolver_firewall_rule_group_association + +`aws_route53_resolver_firewall_rule_group_association` Retrieves the specified firewall rule group association. + +This data source allows to retrieve details about a specific a Route 53 Resolver DNS Firewall rule group association. + +## Example Usage + +The following example shows how to get a firewall rule group association from its id. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_firewall_rule_group_association import DataAwsRoute53ResolverFirewallRuleGroupAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverFirewallRuleGroupAssociation(self, "example", + firewall_rule_group_association_id="rslvr-frgassoc-example" + ) +``` + +## Argument Reference + +* `firewall_rule_group_association_id` - (Required) The identifier for the association. + +The following attribute is additionally exported: + +* `arn` - The Amazon Resource Name (ARN) of the firewall rule group association. +* `creation_time` - The date and time that the association was created, in Unix time format and Coordinated Universal Time (UTC). +* `creator_request_id` - A unique string defined by you to identify the request. +* `firewall_rule_group_id` - The unique identifier of the firewall rule group. +* `managed_owner_name` - The owner of the association, used only for associations that are not managed by you. +* `modification_time` - The date and time that the association was last modified, in Unix time format and Coordinated Universal Time (UTC). +* `mutation_protection` - If enabled, this setting disallows modification or removal of the association, to help prevent against accidentally altering DNS firewall protections. +* `name` - The name of the association. +* `priority` - The setting that determines the processing order of the rule group among the rule groups that are associated with a single VPC. +* `status` - The current status of the association. +* `status_message` - Additional information about the status of the response, if available. +* `vpc_id` - The unique identifier of the VPC that is associated with the rule group. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_resolver_firewall_rules.html.markdown b/website/docs/cdktf/python/d/route53_resolver_firewall_rules.html.markdown new file mode 100644 index 00000000000..93a88b8b3ee --- /dev/null +++ b/website/docs/cdktf/python/d/route53_resolver_firewall_rules.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rules" +description: |- + Provides details about rules in a specific Route53 Resolver Firewall rule group. +--- + + + +# Data Source: aws_route53_resolver_firewall_rules + +`aws_route53_resolver_firewall_rules` Provides details about rules in a specific Route53 Resolver Firewall rule group. + +## Example Usage + +The following example shows how to get Route53 Resolver Firewall rules based on its associated firewall group id. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_firewall_rules import DataAwsRoute53ResolverFirewallRules +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverFirewallRules(self, "example", + firewall_rule_group_id=Token.as_string(aws_route53_resolver_firewall_rule_group_example.id) + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available resolver rules in the current region. +The given filters must match exactly one resolver rule whose data will be exported as attributes. + +* `firewall_rule_group_id` - (Required) The unique identifier of the firewall rule group that you want to retrieve the rules for. +* `action` - (Optional) The action that DNS Firewall should take on a DNS query when it matches one of the domains in the rule's domain list. +* `priority` - (Optional) The setting that determines the processing order of the rules in a rule group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `firewall_rules` - List with information about the firewall rules. See details below. + +### provisioning_artifact_details + +* `block_override_dns_type` - The DNS record's type. +* `block_override_domain` - The custom DNS record to send back in response to the query. +* `block_override_ttl` - The recommended amount of time, in seconds, for the DNS resolver or web browser to cache the provided override record. +* `block_response` - The way that you want DNS Firewall to block the request. +* `creation_time` - The date and time that the rule was created, in Unix time format and Coordinated Universal Time (UTC). +* `creator_request_id` - A unique string defined by you to identify the request. +* `firewall_domain_list_id` - The ID of the domain list that's used in the rule. +* `modification_time` - The date and time that the rule was last modified, in Unix time format and Coordinated Universal Time (UTC). +* `name` - The name of the rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_resolver_query_log_config.html.markdown b/website/docs/cdktf/python/d/route53_resolver_query_log_config.html.markdown new file mode 100644 index 00000000000..79586d66f2c --- /dev/null +++ b/website/docs/cdktf/python/d/route53_resolver_query_log_config.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_query_log_config" +description: |- + Provides details about a specific Route53 Resolver Query Logging Configuration. +--- + + + +# Data Source: aws_route53_resolver_query_log_config + +`aws_route53_resolver_query_log_config` provides details about a specific Route53 Resolver Query Logging Configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_query_log_config import DataAwsRoute53ResolverQueryLogConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverQueryLogConfig(self, "example", + resolver_query_log_config_id="rqlc-1abc2345ef678g91h" + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_query_log_config import DataAwsRoute53ResolverQueryLogConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverQueryLogConfig(self, "example", + filter=[DataAwsRoute53ResolverQueryLogConfigFilter( + name="Name", + values=["shared-query-log-config"] + ), DataAwsRoute53ResolverQueryLogConfigFilter( + name="ShareStatus", + values=["SHARED_WITH_ME"] + ) + ] + ) +``` + +## Argument Reference + +* `resolver_query_log_config_id` - (Optional) ID of the Route53 Resolver Query Logging Configuration. +* `filter` - (Optional) One or more name/value pairs to use as filters. There are +several valid keys, for a full reference, check out +[Route53resolver Filter value in the AWS API reference][1]. + +In addition to all arguments above, the following attributes are exported: + +* `id` - The ID for the query logging configuration. +* `arn` - Computed ARN of the Route53 Resolver Query Logging Configuration. +* `destination_arn` - The ARN of the resource that you want Resolver to send query logs: an Amazon S3 bucket, a CloudWatch Logs log group or a Kinesis Data Firehose delivery stream. +* `name` - The name of the query logging configuration. +* `owner_id` - The AWS account ID for the account that created the query logging configuration. +* `share_status` - An indication of whether the query logging configuration is shared with other AWS accounts or was shared with the current account by another AWS account. +* `tags` - Map of tags to assign to the service. + +[1]: https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_Filter.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_resolver_rule.html.markdown b/website/docs/cdktf/python/d/route53_resolver_rule.html.markdown new file mode 100644 index 00000000000..8315c738adb --- /dev/null +++ b/website/docs/cdktf/python/d/route53_resolver_rule.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_rule" +description: |- + Provides details about a specific Route53 Resolver rule +--- + + + +# Data Source: aws_route53_resolver_rule + +`aws_route53_resolver_rule` provides details about a specific Route53 Resolver rule. + +## Example Usage + +The following example shows how to get a Route53 Resolver rule based on its associated domain name and rule type. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_rule import DataAwsRoute53ResolverRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverRule(self, "example", + domain_name="subdomain.example.com", + rule_type="SYSTEM" + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available resolver rules in the current region. +The given filters must match exactly one resolver rule whose data will be exported as attributes. + +* `domain_name` - (Optional) Domain name the desired resolver rule forwards DNS queries for. Conflicts with `resolver_rule_id`. +* `name` - (Optional) Friendly name of the desired resolver rule. Conflicts with `resolver_rule_id`. +* `resolver_endpoint_id` (Optional) ID of the outbound resolver endpoint of the desired resolver rule. Conflicts with `resolver_rule_id`. +* `resolver_rule_id` (Optional) ID of the desired resolver rule. Conflicts with `domain_name`, `name`, `resolver_endpoint_id` and `rule_type`. +* `rule_type` - (Optional) Rule type of the desired resolver rule. Valid values are `FORWARD`, `SYSTEM` and `RECURSIVE`. Conflicts with `resolver_rule_id`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the resolver rule. +* `arn` - ARN (Amazon Resource Name) for the resolver rule. +* `owner_id` - When a rule is shared with another AWS account, the account ID of the account that the rule is shared with. +* `share_status` - Whether the rules is shared and, if so, whether the current account is sharing the rule with another account, or another account is sharing the rule with the current account. +Values are `NOT_SHARED`, `SHARED_BY_ME` or `SHARED_WITH_ME` +* `tags` - Map of tags assigned to the resolver rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_resolver_rules.html.markdown b/website/docs/cdktf/python/d/route53_resolver_rules.html.markdown new file mode 100644 index 00000000000..63137d9c44c --- /dev/null +++ b/website/docs/cdktf/python/d/route53_resolver_rules.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_rules" +description: |- + Provides details about a set of Route53 Resolver rules +--- + + + +# Data Source: aws_route53_resolver_rules + +`aws_route53_resolver_rules` provides details about a set of Route53 Resolver rules. + +## Example Usage + +### Retrieving the default resolver rule + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_rules import DataAwsRoute53ResolverRules +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverRules(self, "example", + owner_id="Route 53 Resolver", + rule_type="RECURSIVE", + share_status="NOT_SHARED" + ) +``` + +### Retrieving forward rules shared with me + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_rules import DataAwsRoute53ResolverRules +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverRules(self, "example", + rule_type="FORWARD", + share_status="SHARED_WITH_ME" + ) +``` + +### Retrieving rules by name regex + +Resolver rules whose name contains `abc`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_resolver_rules import DataAwsRoute53ResolverRules +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsRoute53ResolverRules(self, "example", + name_regex=".*abc.*" + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available resolver rules in the current region. + +* `name_regex` - (Optional) Regex string to filter resolver rule names. + The filtering is done locally, so could have a performance impact if the result is large. + This argument should be used along with other arguments to limit the number of results returned. +* `owner_id` (Optional) When the desired resolver rules are shared with another AWS account, the account ID of the account that the rules are shared with. +* `resolver_endpoint_id` (Optional) ID of the outbound resolver endpoint for the desired resolver rules. +* `rule_type` (Optional) Rule type of the desired resolver rules. Valid values are `FORWARD`, `SYSTEM` and `RECURSIVE`. +* `share_status` (Optional) Whether the desired resolver rules are shared and, if so, whether the current account is sharing the rules with another account, or another account is sharing the rules with the current account. Valid values are `NOT_SHARED`, `SHARED_BY_ME` or `SHARED_WITH_ME` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `resolver_rule_ids` - IDs of the matched resolver rules. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_traffic_policy_document.html.markdown b/website/docs/cdktf/python/d/route53_traffic_policy_document.html.markdown new file mode 100644 index 00000000000..c4c339e2033 --- /dev/null +++ b/website/docs/cdktf/python/d/route53_traffic_policy_document.html.markdown @@ -0,0 +1,225 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_traffic_policy_document" +description: |- + Generates an Route53 traffic policy document in JSON format +--- + + + +# Data Source: aws_route53_traffic_policy_document + +Generates an Route53 traffic policy document in JSON format for use with resources that expect policy documents such as [`aws_route53_traffic_policy`](/docs/providers/aws/r/route53_traffic_policy.html). + +## Example Usage + +### Basic Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.data_aws_route53_traffic_policy_document import DataAwsRoute53TrafficPolicyDocument +from imports.aws.route53_traffic_policy import Route53TrafficPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsRegion(self, "current") + example = DataAwsRoute53TrafficPolicyDocument(self, "example", + endpoint=[DataAwsRoute53TrafficPolicyDocumentEndpoint( + id="my_elb", + type="elastic-load-balancer", + value="elb-111111.${" + current.name + "}.elb.amazonaws.com" + ), DataAwsRoute53TrafficPolicyDocumentEndpoint( + id="site_down_banner", + region=Token.as_string(current.name), + type="s3-website", + value="www.example.com" + ) + ], + record_type="A", + rule=[DataAwsRoute53TrafficPolicyDocumentRule( + id="site_switch", + primary=DataAwsRoute53TrafficPolicyDocumentRulePrimary( + endpoint_reference="my_elb" + ), + secondary=DataAwsRoute53TrafficPolicyDocumentRuleSecondary( + endpoint_reference="site_down_banner" + ), + type="failover" + ) + ], + start_rule="site_switch" + ) + aws_route53_traffic_policy_example = Route53TrafficPolicy(self, "example_2", + comment="example comment", + document=Token.as_string(example.json), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_traffic_policy_example.override_logical_id("example") +``` + +### Complex Example + +The following example showcases the use of nested rules within the traffic policy document and introduces the `geoproximity` rule type. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_traffic_policy_document import DataAwsRoute53TrafficPolicyDocument +from imports.aws.route53_traffic_policy import Route53TrafficPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsRoute53TrafficPolicyDocument(self, "example", + endpoint=[DataAwsRoute53TrafficPolicyDocumentEndpoint( + id="na_endpoint_a", + type="elastic-load-balancer", + value="elb-111111.us-west-1.elb.amazonaws.com" + ), DataAwsRoute53TrafficPolicyDocumentEndpoint( + id="na_endpoint_b", + type="elastic-load-balancer", + value="elb-222222.us-west-1.elb.amazonaws.com" + ), DataAwsRoute53TrafficPolicyDocumentEndpoint( + id="eu_endpoint", + type="elastic-load-balancer", + value="elb-333333.eu-west-1.elb.amazonaws.com" + ), DataAwsRoute53TrafficPolicyDocumentEndpoint( + id="ap_endpoint", + type="elastic-load-balancer", + value="elb-444444.ap-northeast-2.elb.amazonaws.com" + ) + ], + record_type="A", + rule=[DataAwsRoute53TrafficPolicyDocumentRule( + id="na_rule", + primary=DataAwsRoute53TrafficPolicyDocumentRulePrimary( + endpoint_reference="na_endpoint_a" + ), + secondary=DataAwsRoute53TrafficPolicyDocumentRuleSecondary( + endpoint_reference="na_endpoint_b" + ), + type="failover" + ), DataAwsRoute53TrafficPolicyDocumentRule( + geo_proximity_location=[DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation( + bias=Token.as_string(10), + evaluate_target_health=True, + region="aws:route53:us-west-1", + rule_reference="na_rule" + ), DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation( + bias=Token.as_string(10), + endpoint_reference="eu_endpoint", + evaluate_target_health=True, + region="aws:route53:eu-west-1" + ), DataAwsRoute53TrafficPolicyDocumentRuleGeoProximityLocation( + bias=Token.as_string(0), + endpoint_reference="ap_endpoint", + evaluate_target_health=True, + region="aws:route53:ap-northeast-2" + ) + ], + id="geoproximity_rule", + type="geoproximity" + ) + ], + start_rule="geoproximity_rule" + ) + aws_route53_traffic_policy_example = Route53TrafficPolicy(self, "example_1", + comment="example comment", + document=Token.as_string(example.json), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_traffic_policy_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are optional: + +* `endpoint` (Optional) - Configuration block for the definitions of the endpoints that you want to use in this traffic policy. See below +* `record_type` (Optional) - DNS type of all of the resource record sets that Amazon Route 53 will create based on this traffic policy. +* `rule` (Optional) - Configuration block for definitions of the rules that you want to use in this traffic policy. See below +* `start_endpoint` (Optional) - An endpoint to be as the starting point for the traffic policy. +* `start_rule` (Optional) - A rule to be as the starting point for the traffic policy. +* `version` (Optional) - Version of the traffic policy format. + +### `endpoint` + +* `id` - (Required) ID of an endpoint you want to assign. +* `type` - (Optional) Type of the endpoint. Valid values are `value` , `cloudfront` , `elastic-load-balancer`, `s3-website` +* `region` - (Optional) To route traffic to an Amazon S3 bucket that is configured as a website endpoint, specify the region in which you created the bucket for `region`. +* `value` - (Optional) Value of the `type`. + +### `rule` + +* `id` - (Required) ID of a rule you want to assign. +* `type` - (Optional) Type of the rule. +* `primary` - (Optional) Configuration block for the settings for the rule or endpoint that you want to route traffic to whenever the corresponding resources are available. Only valid for `failover` type. See below +* `secondary` - (Optional) Configuration block for the rule or endpoint that you want to route traffic to whenever the primary resources are not available. Only valid for `failover` type. See below +* `location` - (Optional) Configuration block for when you add a geolocation rule, you configure your traffic policy to route your traffic based on the geographic location of your users. Only valid for `geo` type. See below +* `geo_proximity_location` - (Optional) Configuration block for when you add a geoproximity rule, you configure Amazon Route 53 to route traffic to your resources based on the geographic location of your resources. Only valid for `geoproximity` type. See below +* `regions` - (Optional) Configuration block for when you add a latency rule, you configure your traffic policy to route your traffic based on the latency (the time delay) between your users and the AWS regions where you've created AWS resources such as ELB load balancers and Amazon S3 buckets. Only valid for `latency` type. See below +* `items` - (Optional) Configuration block for when you add a multivalue answer rule, you configure your traffic policy to route traffic approximately randomly to your healthy resources. Only valid for `multivalue` type. See below + +### `primary` and `secondary` + +* `endpoint_reference` - (Optional) References to an endpoint. +* `evaluate_target_health` - (Optional) Indicates whether you want Amazon Route 53 to evaluate the health of the endpoint and route traffic only to healthy endpoints. +* `health_check` - (Optional) If you want to associate a health check with the endpoint or rule. +* `rule_reference` - (Optional) References to a rule. + +### `location` + +* `continent` - (Optional) Value of a continent. +* `country` - (Optional) Value of a country. +* `endpoint_reference` - (Optional) References to an endpoint. +* `evaluate_target_health` - (Optional) Indicates whether you want Amazon Route 53 to evaluate the health of the endpoint and route traffic only to healthy endpoints. +* `health_check` - (Optional) If you want to associate a health check with the endpoint or rule. +* `is_default` - (Optional) Indicates whether this set of values represents the default location. +* `rule_reference` - (Optional) References to a rule. +* `subdivision` - (Optional) Value of a subdivision. + +### `geo_proximity_location` + +* `bias` - (Optional) Specify a value for `bias` if you want to route more traffic to an endpoint from nearby endpoints (positive values) or route less traffic to an endpoint (negative values). +* `endpoint_reference` - (Optional) References to an endpoint. +* `evaluate_target_health` - (Optional) Indicates whether you want Amazon Route 53 to evaluate the health of the endpoint and route traffic only to healthy endpoints. +* `health_check` - (Optional) If you want to associate a health check with the endpoint or rule. +* `latitude` - (Optional) Represents the location south (negative) or north (positive) of the equator. Valid values are -90 degrees to 90 degrees. +* `longitude` - (Optional) Represents the location west (negative) or east (positive) of the prime meridian. Valid values are -180 degrees to 180 degrees. +* `region` - (Optional) If your endpoint is an AWS resource, specify the AWS Region that you created the resource in. +* `rule_reference` - (Optional) References to a rule. + +### `region` + +* `endpoint_reference` - (Optional) References to an endpoint. +* `evaluate_target_health` - (Optional) Indicates whether you want Amazon Route 53 to evaluate the health of the endpoint and route traffic only to healthy endpoints. +* `health_check` - (Optional) If you want to associate a health check with the endpoint or rule. +* `region` - (Optional) Region code for the AWS Region that you created the resource in. +* `rule_reference` - (Optional) References to a rule. + +### `item` + +* `endpoint_reference` - (Optional) References to an endpoint. +* `health_check` - (Optional) If you want to associate a health check with the endpoint or rule. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `json` - Standard JSON policy document rendered based on the arguments above. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route53_zone.html.markdown b/website/docs/cdktf/python/d/route53_zone.html.markdown new file mode 100644 index 00000000000..4f8ebed1660 --- /dev/null +++ b/website/docs/cdktf/python/d/route53_zone.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_zone" +description: |- + Provides details about a specific Route 53 Hosted Zone +--- + + + +# Data Source: aws_route53_zone + +`aws_route53_zone` provides details about a specific Route 53 Hosted Zone. + +This data source allows to find a Hosted Zone ID given Hosted Zone name and certain search criteria. + +## Example Usage + +The following example shows how to get a Hosted Zone from its name and from this data how to create a Record Set. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_zone import DataAwsRoute53Zone +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + selected = DataAwsRoute53Zone(self, "selected", + name="test.com.", + private_zone=True + ) + Route53Record(self, "www", + name="www.${" + selected.name + "}", + records=["10.0.0.1"], + ttl=Token.as_number("300"), + type="A", + zone_id=Token.as_string(selected.zone_id) + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +Hosted Zone. You have to use `zone_id` or `name`, not both of them. The given filter must match exactly one +Hosted Zone. If you use `name` field for private Hosted Zone, you need to add `private_zone` field to `true` + +* `zone_id` - (Optional) Hosted Zone id of the desired Hosted Zone. + +* `name` - (Optional) Hosted Zone name of the desired Hosted Zone. +* `private_zone` - (Optional) Used with `name` field to get a private Hosted Zone. +* `vpc_id` - (Optional) Used with `name` field to get a private Hosted Zone associated with the vpc_id (in this case, private_zone is not mandatory). +* `tags` - (Optional) Used with `name` field. A map of tags, each pair of which must exactly match a pair on the desired Hosted Zone. + +## Attribute Reference + +All of the argument attributes are also exported as +result attributes. This data source will complete the data by populating +any fields that are not included in the configuration with the data for +the selected Hosted Zone. + +The following attribute is additionally exported: + +* `arn` - ARN of the Hosted Zone. +* `caller_reference` - Caller Reference of the Hosted Zone. +* `comment` - Comment field of the Hosted Zone. +* `name_servers` - List of DNS name servers for the Hosted Zone. +* `primary_name_server` - The Route 53 name server that created the SOA record. +* `resource_record_set_count` - The number of Record Set in the Hosted Zone. +* `linked_service_principal` - The service that created the Hosted Zone (e.g., `servicediscovery.amazonaws.com`). +* `linked_service_description` - The description provided by the service that created the Hosted Zone (e.g., `arn:aws:servicediscovery:us-east-1:1234567890:namespace/ns-xxxxxxxxxxxxxxxx`). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route_table.html.markdown b/website/docs/cdktf/python/d/route_table.html.markdown new file mode 100644 index 00000000000..9920ff15bd2 --- /dev/null +++ b/website/docs/cdktf/python/d/route_table.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route_table" +description: |- + Provides details about a specific Route Table +--- + + + +# Data Source: aws_route_table + +`aws_route_table` provides details about a specific Route Table. + +This resource can prove useful when a module accepts a Subnet ID as an input variable and needs to, for example, add a route in the Route Table. + +## Example Usage + +The following example shows how one might accept a Route Table ID as a variable and use this data source to obtain the data necessary to create a route. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route_table import DataAwsRouteTable +from imports.aws.route import Route +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + subnet_id = TerraformVariable(self, "subnet_id") + selected = DataAwsRouteTable(self, "selected", + subnet_id=subnet_id.string_value + ) + Route(self, "route", + destination_cidr_block="10.0.1.0/22", + route_table_id=Token.as_string(selected.id), + vpc_peering_connection_id="pcx-45ff3dc1" + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available Route Table in the current region. The given filters must match exactly one Route Table whose data will be exported as attributes. + +The following arguments are optional: + +* `filter` - (Optional) Configuration block. Detailed below. +* `gateway_id` - (Optional) ID of an Internet Gateway or Virtual Private Gateway which is connected to the Route Table (not exported if not passed as a parameter). +* `route_table_id` - (Optional) ID of the specific Route Table to retrieve. +* `subnet_id` - (Optional) ID of a Subnet which is connected to the Route Table (not exported if not passed as a parameter). +* `tags` - (Optional) Map of tags, each pair of which must exactly match a pair on the desired Route Table. +* `vpc_id` - (Optional) ID of the VPC that the desired Route Table belongs to. + +### filter + +Complex filters can be expressed using one or more `filter` blocks. + +The following arguments are required: + +* `name` - (Required) Name of the field to filter by, as defined by [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeRouteTables.html). +* `values` - (Required) Set of values that are accepted for the given field. A Route Table will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the route table. +* `associations` - List of associations with attributes detailed below. +* `owner_id` - ID of the AWS account that owns the route table. +* `routes` - List of routes with attributes detailed below. + +### routes + +When relevant, routes are also exported with the following attributes: + +For destinations: + +* `cidr_block` - CIDR block of the route. +* `destination_prefix_list_id` - The ID of a [managed prefix list](ec2_managed_prefix_list.html) destination of the route. +* `ipv6_cidr_block` - IPv6 CIDR block of the route. + +For targets: + +* `carrier_gateway_id` - ID of the Carrier Gateway. +* `core_network_arn` - ARN of the core network. +* `egress_only_gateway_id` - ID of the Egress Only Internet Gateway. +* `gateway_id` - Internet Gateway ID. +* `instance_id` - EC2 instance ID. +* `local_gateway_id` - Local Gateway ID. +* `nat_gateway_id` - NAT Gateway ID. +* `network_interface_id` - ID of the elastic network interface (eni) to use. +* `transit_gateway_id` - EC2 Transit Gateway ID. +* `vpc_endpoint_id` - VPC Endpoint ID. +* `vpc_peering_connection_id` - VPC Peering ID. + +### associations + +Associations are also exported with the following attributes: + +* `gateway_id` - Gateway ID. Only set when associated with an Internet Gateway or Virtual Private Gateway. +* `main` - Whether the association is due to the main route table. +* `route_table_association_id` - Association ID. +* `route_table_id` - Route Table ID. +* `subnet_id` - Subnet ID. Only set when associated with a subnet. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/route_tables.html.markdown b/website/docs/cdktf/python/d/route_tables.html.markdown new file mode 100644 index 00000000000..e2f65ab97e9 --- /dev/null +++ b/website/docs/cdktf/python/d/route_tables.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route_tables" +description: |- + Get information on Amazon route tables. +--- + + + +# Data Source: aws_route_tables + +This resource can be useful for getting back a list of route table ids to be referenced elsewhere. + +## Example Usage + +The following adds a route for a particular cidr block to every (private +kops) route table in a specified vpc to use a particular vpc peering +connection. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformCount, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route_tables import DataAwsRouteTables +from imports.aws.route import Route +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + rts = DataAwsRouteTables(self, "rts", + filter=[DataAwsRouteTablesFilter( + name="tag:kubernetes.io/kops/role", + values=["private*"] + ) + ], + vpc_id=vpc_id.string_value + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + r_count = TerraformCount.of(Token.as_number(Fn.length_of(rts.ids))) + Route(self, "r", + destination_cidr_block="10.0.0.0/22", + route_table_id=Token.as_string( + property_access(Fn.tolist(rts.ids), [r_count.index])), + vpc_peering_connection_id="pcx-0e9a7a9ecd137dc54", + count=r_count + ) +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. + +* `vpc_id` - (Optional) VPC ID that you want to filter from. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired route tables. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeRouteTables.html). + +* `values` - (Required) Set of values that are accepted for the given field. + A Route Table will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - List of all the route table ids found. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/s3_account_public_access_block.html.markdown b/website/docs/cdktf/python/d/s3_account_public_access_block.html.markdown new file mode 100644 index 00000000000..01477ab5b4d --- /dev/null +++ b/website/docs/cdktf/python/d/s3_account_public_access_block.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3_account_public_access_block" +description: |- + Provides S3 account-level Public Access Block Configuration +--- + + + +# Data Source: aws_s3_account_public_access_block + +The S3 account public access block data source returns account-level public access block configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_s3_account_public_access_block import DataAwsS3AccountPublicAccessBlock +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsS3AccountPublicAccessBlock(self, "example") +``` + +## Argument Reference + +This data source supports the following arguments: + +* `account_id` - (Optional) AWS account ID to configure. Defaults to automatically determined account ID of the Terraform AWS provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS account ID +* `block_public_acls` - Whether or not Amazon S3 should block public ACLs for buckets in this account is enabled. Returns as `true` or `false`. +* `block_public_policy` - Whether or not Amazon S3 should block public bucket policies for buckets in this account is enabled. Returns as `true` or `false`. +* `ignore_public_acls` - Whether or not Amazon S3 should ignore public ACLs for buckets in this account is enabled. Returns as `true` or `false`. +* `restrict_public_buckets` - Whether or not Amazon S3 should restrict public bucket policies for buckets in this account is enabled. Returns as `true` or `false`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/s3_bucket.html.markdown b/website/docs/cdktf/python/d/s3_bucket.html.markdown new file mode 100644 index 00000000000..75cbc02f665 --- /dev/null +++ b/website/docs/cdktf/python/d/s3_bucket.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket" +description: |- + Provides details about a specific S3 bucket +--- + + + +# Data Source: aws_s3_bucket + +Provides details about a specific S3 bucket. + +This resource may prove useful when setting up a Route53 record, or an origin for a CloudFront +Distribution. + +## Example Usage + +### Route53 Record + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_route53_zone import DataAwsRoute53Zone +from imports.aws.data_aws_s3_bucket import DataAwsS3Bucket +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, evaluateTargetHealth): + super().__init__(scope, name) + test_zone = DataAwsRoute53Zone(self, "test_zone", + name="test.com." + ) + selected = DataAwsS3Bucket(self, "selected", + bucket="bucket.test.com" + ) + Route53Record(self, "example", + alias=Route53RecordAlias( + name=Token.as_string(selected.website_domain), + zone_id=Token.as_string(selected.hosted_zone_id), + evaluate_target_health=evaluate_target_health + ), + name="bucket", + type="A", + zone_id=Token.as_string(test_zone.id) + ) +``` + +### CloudFront Origin + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_distribution import CloudfrontDistribution +from imports.aws.data_aws_s3_bucket import DataAwsS3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, defaultCacheBehavior, enabled, restrictions, viewerCertificate): + super().__init__(scope, name) + selected = DataAwsS3Bucket(self, "selected", + bucket="a-test-bucket" + ) + CloudfrontDistribution(self, "test", + origin=[CloudfrontDistributionOrigin( + domain_name=Token.as_string(selected.bucket_domain_name), + origin_id="s3-selected-bucket" + ) + ], + default_cache_behavior=default_cache_behavior, + enabled=enabled, + restrictions=restrictions, + viewer_certificate=viewer_certificate + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Name of the bucket + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the bucket. +* `arn` - ARN of the bucket. Will be of format `arn:aws:s3:::bucketname`. +* `bucket_domain_name` - Bucket domain name. Will be of format `bucketname.s3.amazonaws.com`. +* `bucket_regional_domain_name` - The bucket region-specific domain name. The bucket domain name including the region name. Please refer to the [S3 endpoints reference](https://docs.aws.amazon.com/general/latest/gr/s3.html#s3_region) for format. Note: AWS CloudFront allows specifying an S3 region-specific endpoint when creating an S3 origin. This will prevent redirect issues from CloudFront to the S3 Origin URL. For more information, see the [Virtual Hosted-Style Requests for Other Regions](https://docs.aws.amazon.com/AmazonS3/latest/userguide/VirtualHosting.html#deprecated-global-endpoint) section in the AWS S3 User Guide. +* `hosted_zone_id` - The [Route 53 Hosted Zone ID](https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_website_region_endpoints) for this bucket's region. +* `region` - AWS region this bucket resides in. +* `website_endpoint` - Website endpoint, if the bucket is configured with a website. If not, this will be an empty string. +* `website_domain` - Domain of the website endpoint, if the bucket is configured with a website. If not, this will be an empty string. This is used to create Route 53 alias records. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/s3_bucket_object.html.markdown b/website/docs/cdktf/python/d/s3_bucket_object.html.markdown new file mode 100644 index 00000000000..5069562080c --- /dev/null +++ b/website/docs/cdktf/python/d/s3_bucket_object.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_object" +description: |- + Provides metadata and optionally content of an S3 object +--- + + + +# Data Source: aws_s3_bucket_object + +~> **NOTE:** The `aws_s3_bucket_object` data source is DEPRECATED and will be removed in a future version! Use `aws_s3_object` instead, where new features and fixes will be added. + +The S3 object data source allows access to the metadata and +_optionally_ (see below) content of an object stored inside S3 bucket. + +~> **Note:** The content of an object (`body` field) is available only for objects which have a human-readable `Content-Type` (`text/*` and `application/json`). This is to prevent printing unsafe characters and potentially downloading large amount of data which would be thrown away in favour of metadata. + +## Example Usage + +The following example retrieves a text object (which must have a `Content-Type` +value starting with `text/`) and uses it as the `user_data` for an EC2 instance: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_s3_bucket_object import DataAwsS3BucketObject +from imports.aws.instance import Instance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bootstrap_script = DataAwsS3BucketObject(self, "bootstrap_script", + bucket="ourcorp-deploy-config", + key="ec2-bootstrap-script.sh" + ) + Instance(self, "example", + ami="ami-2757f631", + instance_type="t2.micro", + user_data=Token.as_string(bootstrap_script.body) + ) +``` + +The following, more-complex example retrieves only the metadata for a zip +file stored in S3, which is then used to pass the most recent `version_id` +to AWS Lambda for use as a function implementation. More information about +Lambda functions is available in the documentation for +[`aws_lambda_function`](/docs/providers/aws/r/lambda_function.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_s3_bucket_object import DataAwsS3BucketObject +from imports.aws.lambda_function import LambdaFunction +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + lambda_ = DataAwsS3BucketObject(self, "lambda", + bucket="ourcorp-lambda-functions", + key="hello-world.zip" + ) + LambdaFunction(self, "test_lambda", + function_name="lambda_function_name", + handler="exports.test", + role=iam_for_lambda.arn, + s3_bucket=Token.as_string(lambda_.id), + s3_key=Token.as_string(lambda_.key), + s3_object_version=Token.as_string(lambda_.version_id) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Name of the bucket to read the object from. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified +* `key` - (Required) Full path to the object inside the bucket +* `version_id` - (Optional) Specific version ID of the object returned (defaults to latest version) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `body` - Object data (see **limitations above** to understand cases in which this field is actually available) +* `bucket_key_enabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. +* `cache_control` - Caching behavior along the request/reply chain. +* `content_disposition` - Presentational information for the object. +* `content_encoding` - What content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. +* `content_language` - Language the content is in. +* `content_length` - Size of the body in bytes. +* `content_type` - Standard MIME type describing the format of the object data. +* `etag` - [ETag](https://en.wikipedia.org/wiki/HTTP_ETag) generated for the object (an MD5 sum of the object content in case it's not encrypted) +* `expiration` - If the object expiration is configured (see [object lifecycle management](http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html)), the field includes this header. It includes the expiry-date and rule-id key value pairs providing object expiration information. The value of the rule-id is URL encoded. +* `expires` - Date and time at which the object is no longer cacheable. +* `last_modified` - Last modified date of the object in RFC1123 format (e.g., `Mon, 02 Jan 2006 15:04:05 MST`) +* `metadata` - Map of metadata stored with the object in S3 +* `object_lock_legal_hold_status` - Indicates whether this object has an active [legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds). This field is only returned if you have permission to view an object's legal hold status. +* `object_lock_mode` - Object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) currently in place for this object. +* `object_lock_retain_until_date` - The date and time when this object's object lock will expire. +* `server_side_encryption` - If the object is stored using server-side encryption (KMS or Amazon S3-managed encryption key), this field includes the chosen encryption and algorithm used. +* `sse_kms_key_id` - If present, specifies the ID of the Key Management Service (KMS) master encryption key that was used for the object. +* `storage_class` - [Storage class](http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html) information of the object. Available for all objects except for `Standard` storage class objects. +* `version_id` - Latest version ID of the object returned. +* `website_redirect_location` - If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata. +* `tags` - Map of tags assigned to the object. + +-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/index.html` and `index.html` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/s3_bucket_objects.html.markdown b/website/docs/cdktf/python/d/s3_bucket_objects.html.markdown new file mode 100644 index 00000000000..95e3309c534 --- /dev/null +++ b/website/docs/cdktf/python/d/s3_bucket_objects.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_objects" +description: |- + Returns keys and metadata of S3 objects +--- + + + +# Data Source: aws_s3_bucket_objects + +~> **NOTE:** The `aws_s3_bucket_objects` data source is DEPRECATED and will be removed in a future version! Use `aws_s3_objects` instead, where new features and fixes will be added. + +~> **NOTE on `max_keys`:** Retrieving very large numbers of keys can adversely affect Terraform's performance. + +The objects data source returns keys (i.e., file names) and other metadata about objects in an S3 bucket. + +## Example Usage + +The following example retrieves a list of all object keys in an S3 bucket and creates corresponding Terraform object data sources: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformCount, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_s3_bucket_objects import DataAwsS3BucketObjects +from imports.aws.data_aws_s3_object import DataAwsS3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + my_objects = DataAwsS3BucketObjects(self, "my_objects", + bucket="ourcorp" + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + object_info_count = TerraformCount.of( + Token.as_number(Fn.length_of(my_objects.keys))) + DataAwsS3Object(self, "object_info", + bucket=Token.as_string(my_objects.id), + key=Token.as_string( + Fn.element(my_objects.keys, Token.as_number(object_info_count.index))), + count=object_info_count + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Lists object keys in this S3 bucket. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified +* `prefix` - (Optional) Limits results to object keys with this prefix (Default: none) +* `delimiter` - (Optional) Character used to group keys (Default: none) +* `encoding_type` - (Optional) Encodes keys using this method (Default: none; besides none, only "url" can be used) +* `max_keys` - (Optional) Maximum object keys to return (Default: 1000) +* `start_after` - (Optional) Returns key names lexicographically after a specific object key in your bucket (Default: none; S3 lists object keys in UTF-8 character encoding in lexicographical order) +* `fetch_owner` - (Optional) Boolean specifying whether to populate the owner list (Default: false) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `keys` - List of strings representing object keys +* `common_prefixes` - List of any keys between `prefix` and the next occurrence of `delimiter` (i.e., similar to subdirectories of the `prefix` "directory"); the list is only returned when you specify `delimiter` +* `id` - S3 Bucket. +* `owners` - List of strings representing object owner IDs (see `fetch_owner` above) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/s3_bucket_policy.html.markdown b/website/docs/cdktf/python/d/s3_bucket_policy.html.markdown new file mode 100644 index 00000000000..21c7d87d072 --- /dev/null +++ b/website/docs/cdktf/python/d/s3_bucket_policy.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_policy" +description: |- + Provides IAM policy of an S3 bucket +--- + + + +# Data Source: aws_s3_bucket_policy + +The bucket policy data source returns IAM policy of an S3 bucket. + +## Example Usage + +The following example retrieves IAM policy of a specified S3 bucket. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_s3_bucket_policy import DataAwsS3BucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsS3BucketPolicy(self, "example", + bucket="example-bucket-name" + ) + TerraformOutput(self, "foo", + value=example.policy + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Bucket name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `policy` - IAM bucket policy. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/s3_object.html.markdown b/website/docs/cdktf/python/d/s3_object.html.markdown new file mode 100644 index 00000000000..e86e51e257a --- /dev/null +++ b/website/docs/cdktf/python/d/s3_object.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_object" +description: |- + Provides metadata and optionally content of an S3 object +--- + + + +# Data Source: aws_s3_object + +The S3 object data source allows access to the metadata and +_optionally_ (see below) content of an object stored inside S3 bucket. + +~> **Note:** The content of an object (`body` field) is available only for objects which have a human-readable `Content-Type` (`text/*` and `application/json`). This is to prevent printing unsafe characters and potentially downloading large amount of data which would be thrown away in favour of metadata. + +## Example Usage + +The following example retrieves a text object (which must have a `Content-Type` +value starting with `text/`) and uses it as the `user_data` for an EC2 instance: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_s3_object import DataAwsS3Object +from imports.aws.instance import Instance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bootstrap_script = DataAwsS3Object(self, "bootstrap_script", + bucket="ourcorp-deploy-config", + key="ec2-bootstrap-script.sh" + ) + Instance(self, "example", + ami="ami-2757f631", + instance_type="t2.micro", + user_data=Token.as_string(bootstrap_script.body) + ) +``` + +The following, more-complex example retrieves only the metadata for a zip +file stored in S3, which is then used to pass the most recent `version_id` +to AWS Lambda for use as a function implementation. More information about +Lambda functions is available in the documentation for +[`aws_lambda_function`](/docs/providers/aws/r/lambda_function.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_s3_object import DataAwsS3Object +from imports.aws.lambda_function import LambdaFunction +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + lambda_ = DataAwsS3Object(self, "lambda", + bucket="ourcorp-lambda-functions", + key="hello-world.zip" + ) + LambdaFunction(self, "test_lambda", + function_name="lambda_function_name", + handler="exports.test", + role=iam_for_lambda.arn, + s3_bucket=Token.as_string(lambda_.bucket), + s3_key=Token.as_string(lambda_.key), + s3_object_version=Token.as_string(lambda_.version_id) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Name of the bucket to read the object from. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified +* `key` - (Required) Full path to the object inside the bucket +* `version_id` - (Optional) Specific version ID of the object returned (defaults to latest version) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `body` - Object data (see **limitations above** to understand cases in which this field is actually available) +* `bucket_key_enabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. +* `cache_control` - Caching behavior along the request/reply chain. +* `content_disposition` - Presentational information for the object. +* `content_encoding` - What content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. +* `content_language` - Language the content is in. +* `content_length` - Size of the body in bytes. +* `content_type` - Standard MIME type describing the format of the object data. +* `etag` - [ETag](https://en.wikipedia.org/wiki/HTTP_ETag) generated for the object (an MD5 sum of the object content in case it's not encrypted) +* `expiration` - If the object expiration is configured (see [object lifecycle management](http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html)), the field includes this header. It includes the expiry-date and rule-id key value pairs providing object expiration information. The value of the rule-id is URL encoded. +* `expires` - Date and time at which the object is no longer cacheable. +* `last_modified` - Last modified date of the object in RFC1123 format (e.g., `Mon, 02 Jan 2006 15:04:05 MST`) +* `metadata` - Map of metadata stored with the object in S3 +* `object_lock_legal_hold_status` - Indicates whether this object has an active [legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds). This field is only returned if you have permission to view an object's legal hold status. +* `object_lock_mode` - Object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) currently in place for this object. +* `object_lock_retain_until_date` - The date and time when this object's object lock will expire. +* `server_side_encryption` - If the object is stored using server-side encryption (KMS or Amazon S3-managed encryption key), this field includes the chosen encryption and algorithm used. +* `sse_kms_key_id` - If present, specifies the ID of the Key Management Service (KMS) master encryption key that was used for the object. +* `storage_class` - [Storage class](http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html) information of the object. Available for all objects except for `Standard` storage class objects. +* `version_id` - Latest version ID of the object returned. +* `website_redirect_location` - If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata. +* `tags` - Map of tags assigned to the object. + +-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/index.html` and `index.html` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/s3_objects.html.markdown b/website/docs/cdktf/python/d/s3_objects.html.markdown new file mode 100644 index 00000000000..e636ae48cf9 --- /dev/null +++ b/website/docs/cdktf/python/d/s3_objects.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_objects" +description: |- + Returns keys and metadata of S3 objects +--- + + + +# Data Source: aws_s3_objects + +~> **NOTE on `max_keys`:** Retrieving very large numbers of keys can adversely affect Terraform's performance. + +The objects data source returns keys (i.e., file names) and other metadata about objects in an S3 bucket. + +## Example Usage + +The following example retrieves a list of all object keys in an S3 bucket and creates corresponding Terraform object data sources: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformCount, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_s3_object import DataAwsS3Object +from imports.aws.data_aws_s3_objects import DataAwsS3Objects +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + my_objects = DataAwsS3Objects(self, "my_objects", + bucket="ourcorp" + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + object_info_count = TerraformCount.of( + Token.as_number(Fn.length_of(my_objects.keys))) + DataAwsS3Object(self, "object_info", + bucket=Token.as_string(my_objects.id), + key=Token.as_string( + Fn.element(my_objects.keys, Token.as_number(object_info_count.index))), + count=object_info_count + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Lists object keys in this S3 bucket. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified +* `prefix` - (Optional) Limits results to object keys with this prefix (Default: none) +* `delimiter` - (Optional) Character used to group keys (Default: none) +* `encoding_type` - (Optional) Encodes keys using this method (Default: none; besides none, only "url" can be used) +* `max_keys` - (Optional) Maximum object keys to return (Default: 1000) +* `start_after` - (Optional) Returns key names lexicographically after a specific object key in your bucket (Default: none; S3 lists object keys in UTF-8 character encoding in lexicographical order) +* `fetch_owner` - (Optional) Boolean specifying whether to populate the owner list (Default: false) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `keys` - List of strings representing object keys +* `common_prefixes` - List of any keys between `prefix` and the next occurrence of `delimiter` (i.e., similar to subdirectories of the `prefix` "directory"); the list is only returned when you specify `delimiter` +* `id` - S3 Bucket. +* `owners` - List of strings representing object owner IDs (see `fetch_owner` above) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/s3control_multi_region_access_point.html.markdown b/website/docs/cdktf/python/d/s3control_multi_region_access_point.html.markdown new file mode 100644 index 00000000000..55b7413faf0 --- /dev/null +++ b/website/docs/cdktf/python/d/s3control_multi_region_access_point.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_multi_region_access_point" +description: |- + Provides details an S3 Multi-Region Access Point. +--- + + + +# Data Source: aws_s3control_multi_region_access_point + +Provides details on a specific S3 Multi-Region Access Point. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_s3_control_multi_region_access_point import DataAwsS3ControlMultiRegionAccessPoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsS3ControlMultiRegionAccessPoint(self, "example", + name="example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `account_id` - (Optional) The AWS account ID of the S3 Multi-Region Access Point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `name` - (Required) The name of the Multi-Region Access Point. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `alias` - The alias for the Multi-Region Access Point. +* `arn` - Amazon Resource Name (ARN) of the Multi-Region Access Point. +* `created_at` - Timestamp when the resource has been created. +* `domain_name` - The DNS domain name of the S3 Multi-Region Access Point in the format _`alias`_.accesspoint.s3-global.amazonaws.com. For more information, see the documentation on [Multi-Region Access Point Requests](https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiRegionAccessPointRequests.html). +* `public_access_block` - Public Access Block of the Multi-Region Access Point. Detailed below. +* `regions` - A collection of the regions and buckets associated with the Multi-Region Access Point. +* `status` - The current status of the Multi-Region Access Point. + +### public_access_block + +* `block_public_acls` - Specifies whether Amazon S3 should block public access control lists (ACLs). When set to `true` causes the following behavior: + * PUT Bucket acl and PUT Object acl calls fail if the specified ACL is public. + * PUT Object calls fail if the request includes a public ACL. + * PUT Bucket calls fail if the request includes a public ACL. +* `block_public_policy` - Specifies whether Amazon S3 should block public bucket policies for buckets in this account. When set to `true` causes Amazon S3 to: + * Reject calls to PUT Bucket policy if the specified bucket policy allows public access. +* `ignore_public_acls` - Specifies whether Amazon S3 should ignore public ACLs for buckets in this account. When set to `true` causes Amazon S3 to: + * Ignore all public ACLs on buckets in this account and any objects that they contain. +* `restrict_public_buckets` - Specifies whether Amazon S3 should restrict public bucket policies for buckets in this account. When set to `true`: + * Only the bucket owner and AWS Services can access buckets with public policies. + +### regions + +* `bucket` - The name of the bucket. +* `region` - The name of the region. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sagemaker_prebuilt_ecr_image.html.markdown b/website/docs/cdktf/python/d/sagemaker_prebuilt_ecr_image.html.markdown new file mode 100644 index 00000000000..8c839b8cbdf --- /dev/null +++ b/website/docs/cdktf/python/d/sagemaker_prebuilt_ecr_image.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_prebuilt_ecr_image" +description: |- + Get information about prebuilt Amazon SageMaker Docker images. +--- + + + +# Data Source: aws_sagemaker_prebuilt_ecr_image + +Get information about prebuilt Amazon SageMaker Docker images. + +~> **NOTE:** The AWS provider creates a validly constructed `registry_path` but does not verify that the `registry_path` corresponds to an existing image. For example, using a `registry_path` containing an `image_tag` that does not correspond to a Docker image in the ECR repository, will result in an error. + +## Example Usage + +Basic usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sagemaker_prebuilt_ecr_image import DataAwsSagemakerPrebuiltEcrImage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSagemakerPrebuiltEcrImage(self, "test", + image_tag="2.2-1.0.11.0", + repository_name="sagemaker-scikit-learn" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `repository_name` - (Required) Name of the repository, which is generally the algorithm or library. Values include `blazingtext`, `factorization-machines`, `forecasting-deepar`, `image-classification`, `ipinsights`, `kmeans`, `knn`, `lda`, `linear-learner`, `mxnet-inference-eia`, `mxnet-inference`, `mxnet-training`, `ntm`, `object-detection`, `object2vec`, `pca`, `pytorch-inference-eia`, `pytorch-inference`, `pytorch-training`, `randomcutforest`, `sagemaker-scikit-learn`, `sagemaker-sparkml-serving`, `sagemaker-xgboost`, `semantic-segmentation`, `seq2seq`, `tensorflow-inference-eia`, `tensorflow-inference`, `tensorflow-training`, `huggingface-tensorflow-training`, `huggingface-tensorflow-inference`, `huggingface-pytorch-training`, and `huggingface-pytorch-inference`. +* `dns_suffix` - (Optional) DNS suffix to use in the registry path. If not specified, the AWS provider sets it to the DNS suffix for the current region. +* `image_tag` - (Optional) Image tag for the Docker image. If not specified, the AWS provider sets the value to `1`, which for many repositories indicates the latest version. Some repositories, such as XGBoost, do not support `1` or `latest` and specific version must be used. +* `region` (Optional) - Region to use in the registry path. If not specified, the AWS provider sets it to the current region. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `registry_id` - Account ID containing the image. For example, `469771592824`. +* `registry_path` - Docker image URL. For example, `341280168497.dkr.ecr.ca-central-1.amazonaws.com/sagemaker-sparkml-serving:2.4`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/secretsmanager_random_password.html.markdown b/website/docs/cdktf/python/d/secretsmanager_random_password.html.markdown new file mode 100644 index 00000000000..c479d54a7e2 --- /dev/null +++ b/website/docs/cdktf/python/d/secretsmanager_random_password.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_random_password" +description: |- + Generate a random password +--- + + + +# Data Source: aws_secretsmanager_random_password + +Generate a random password. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_secretsmanager_random_password import DataAwsSecretsmanagerRandomPassword +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSecretsmanagerRandomPassword(self, "test", + exclude_numbers=True, + password_length=50 + ) +``` + +## Argument Reference + +* `exclude_characters` - (Optional) String of the characters that you don't want in the password. +* `exclude_lowercase` - (Optional) Specifies whether to exclude lowercase letters from the password. +* `exclude_numbers` - (Optional) Specifies whether to exclude numbers from the password. +* `exclude_punctuation` - (Optional) Specifies whether to exclude the following punctuation characters from the password: `! " # $ % & ' ( ) * + , - . / : ; < = > ? @ [ \ ] ^ _ ` { | } ~ .` +* `exclude_uppercase` - (Optional) Specifies whether to exclude uppercase letters from the password. +* `include_space` - (Optional) Specifies whether to include the space character. +* `password_length` - (Optional) Length of the password. +* `require_each_included_type` - (Optional) Specifies whether to include at least one upper and lowercase letter, one number, and one punctuation. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `random_password` - Random password. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/secretsmanager_secret.html.markdown b/website/docs/cdktf/python/d/secretsmanager_secret.html.markdown new file mode 100644 index 00000000000..66ae937d5a9 --- /dev/null +++ b/website/docs/cdktf/python/d/secretsmanager_secret.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret" +description: |- + Retrieve metadata information about a Secrets Manager secret +--- + + + +# Data Source: aws_secretsmanager_secret + +Retrieve metadata information about a Secrets Manager secret. To retrieve a secret value, see the [`aws_secretsmanager_secret_version` data source](/docs/providers/aws/d/secretsmanager_secret_version.html). + +## Example Usage + +### ARN + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_secretsmanager_secret import DataAwsSecretsmanagerSecret +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSecretsmanagerSecret(self, "by-arn", + arn="arn:aws:secretsmanager:us-east-1:123456789012:secret:example-123456" + ) +``` + +### Name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_secretsmanager_secret import DataAwsSecretsmanagerSecret +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSecretsmanagerSecret(self, "by-name", + name="example" + ) +``` + +## Argument Reference + +* `arn` - (Optional) ARN of the secret to retrieve. +* `name` - (Optional) Name of the secret to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the secret. +* `description` - Description of the secret. +* `kms_key_id` - Key Management Service (KMS) Customer Master Key (CMK) associated with the secret. +* `id` - ARN of the secret. +* `tags` - Tags of the secret. +* `policy` - Resource-based policy document that's attached to the secret. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/secretsmanager_secret_rotation.html.markdown b/website/docs/cdktf/python/d/secretsmanager_secret_rotation.html.markdown new file mode 100644 index 00000000000..7196a0d3098 --- /dev/null +++ b/website/docs/cdktf/python/d/secretsmanager_secret_rotation.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret_rotation" +description: |- + Retrieve information about a Secrets Manager secret rotation configuration +--- + + + +# Data Source: aws_secretsmanager_secret_rotation + +Retrieve information about a Secrets Manager secret rotation. To retrieve secret metadata, see the [`aws_secretsmanager_secret` data source](/docs/providers/aws/d/secretsmanager_secret.html). To retrieve a secret value, see the [`aws_secretsmanager_secret_version` data source](/docs/providers/aws/d/secretsmanager_secret_version.html). + +## Example Usage + +### Retrieve Secret Rotation Configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_secretsmanager_secret_rotation import DataAwsSecretsmanagerSecretRotation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSecretsmanagerSecretRotation(self, "example", + secret_id=Token.as_string(data_aws_secretsmanager_secret_example.id) + ) +``` + +## Argument Reference + +* `secret_id` - (Required) Specifies the secret containing the version that you want to retrieve. You can specify either the ARN or the friendly name of the secret. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `rotation_enabled` - ARN of the secret. +* `rotation_lambda_arn` - Decrypted part of the protected secret information that was originally provided as a string. +* `rotation_rules` - Decrypted part of the protected secret information that was originally provided as a binary. Base64 encoded. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/secretsmanager_secret_version.html.markdown b/website/docs/cdktf/python/d/secretsmanager_secret_version.html.markdown new file mode 100644 index 00000000000..8541359816b --- /dev/null +++ b/website/docs/cdktf/python/d/secretsmanager_secret_version.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret_version" +description: |- + Retrieve information about a Secrets Manager secret version including its secret value +--- + + + +# Data Source: aws_secretsmanager_secret_version + +Retrieve information about a Secrets Manager secret version, including its secret value. To retrieve secret metadata, see the [`aws_secretsmanager_secret` data source](/docs/providers/aws/d/secretsmanager_secret.html). + +## Example Usage + +### Retrieve Current Secret Version + +By default, this data sources retrieves information based on the `AWSCURRENT` staging label. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_secretsmanager_secret_version import DataAwsSecretsmanagerSecretVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSecretsmanagerSecretVersion(self, "secret-version", + secret_id=Token.as_string(example.id) + ) +``` + +### Retrieve Specific Secret Version + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_secretsmanager_secret_version import DataAwsSecretsmanagerSecretVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSecretsmanagerSecretVersion(self, "by-version-stage", + secret_id=Token.as_string(example.id), + version_stage="example" + ) +``` + +### Handling Key-Value Secret Strings in JSON + +Reading key-value pairs from JSON back into a native Terraform map can be accomplished in Terraform 0.12 and later with the [`jsondecode()` function](https://www.terraform.io/docs/configuration/functions/jsondecode.html): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, Fn, Token, property_access, TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TerraformOutput(self, "example", + value=property_access( + Fn.jsondecode(Token.as_string(example.secret_string)), ["\"key1\""]) + ) +``` + +## Argument Reference + +* `secret_id` - (Required) Specifies the secret containing the version that you want to retrieve. You can specify either the ARN or the friendly name of the secret. +* `version_id` - (Optional) Specifies the unique identifier of the version of the secret that you want to retrieve. Overrides `version_stage`. +* `version_stage` - (Optional) Specifies the secret version that you want to retrieve by the staging label attached to the version. Defaults to `AWSCURRENT`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the secret. +* `id` - Unique identifier of this version of the secret. +* `secret_string` - Decrypted part of the protected secret information that was originally provided as a string. +* `secret_binary` - Decrypted part of the protected secret information that was originally provided as a binary. +* `version_id` - Unique identifier of this version of the secret. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/secretsmanager_secrets.html.markdown b/website/docs/cdktf/python/d/secretsmanager_secrets.html.markdown new file mode 100644 index 00000000000..b9a808ec41e --- /dev/null +++ b/website/docs/cdktf/python/d/secretsmanager_secrets.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secrets" +description: |- + Get information on Secrets Manager secrets. +--- + + + +# Data Source: aws_secretsmanager_secrets + +Use this data source to get the ARNs and names of Secrets Manager secrets matching the specified criteria. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_secretsmanager_secrets import DataAwsSecretsmanagerSecrets +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSecretsmanagerSecrets(self, "example", + filter=[DataAwsSecretsmanagerSecretsFilter( + name="name", + values=["example"] + ) + ] + ) +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +## filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Secrets Manager ListSecrets API Reference](https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_ListSecrets.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Secrets Manager secrets. +* `names` - Set of names of the matched Secrets Manager secrets. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/security_group.html.markdown b/website/docs/cdktf/python/d/security_group.html.markdown new file mode 100644 index 00000000000..a18788442c5 --- /dev/null +++ b/website/docs/cdktf/python/d/security_group.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_security_group" +description: |- + Provides details about a specific Security Group +--- + + + +# Data Source: aws_security_group + +`aws_security_group` provides details about a specific Security Group. + +This resource can prove useful when a module accepts a Security Group id as +an input variable and needs to, for example, determine the id of the +VPC that the security group belongs to. + +## Example Usage + +The following example shows how one might accept a Security Group id as a variable +and use this data source to obtain the data necessary to create a subnet. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_security_group import DataAwsSecurityGroup +from imports.aws.subnet import Subnet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + security_group_id = TerraformVariable(self, "security_group_id") + selected = DataAwsSecurityGroup(self, "selected", + id=security_group_id.string_value + ) + Subnet(self, "subnet", + cidr_block="10.0.1.0/24", + vpc_id=Token.as_string(selected.vpc_id) + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +security group in the current region. The given filters must match exactly one +security group whose data will be exported as attributes. + +* `filter` - (Optional) Custom filter block as described below. + +* `id` - (Optional) Id of the specific security group to retrieve. + +* `name` - (Optional) Name that the desired security group must have. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired security group. + +* `vpc_id` - (Optional) Id of the VPC that the desired security group belongs to. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSecurityGroups.html). + +* `values` - (Required) Set of values that are accepted for the given field. + A Security Group will be selected if any one of the given values matches. + +## Attribute Reference + +All of the argument attributes except `filter` blocks are also exported as +result attributes. This data source will complete the data by populating +any fields that are not included in the configuration with the data for +the selected Security Group. + +The following fields are also exported: + +* `description` - Description of the security group. +* `arn` - Computed ARN of the security group. + +~> **Note:** The [default security group for a VPC](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_SecurityGroups.html#DefaultSecurityGroup) has the name `default`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/security_groups.html.markdown b/website/docs/cdktf/python/d/security_groups.html.markdown new file mode 100644 index 00000000000..456c0e69fa0 --- /dev/null +++ b/website/docs/cdktf/python/d/security_groups.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_security_groups" +description: |- + Get information about a set of Security Groups. +--- + + + +# Data Source: aws_security_groups + +Use this data source to get IDs and VPC membership of Security Groups that are created outside of Terraform. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_security_groups import DataAwsSecurityGroups +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSecurityGroups(self, "test", + tags={ + "Application": "k8s", + "Environment": "dev" + } + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_security_groups import DataAwsSecurityGroups +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSecurityGroups(self, "test", + filter=[DataAwsSecurityGroupsFilter( + name="group-name", + values=["*nodes*"] + ), DataAwsSecurityGroupsFilter( + name="vpc-id", + values=[vpc_id.string_value] + ) + ] + ) +``` + +## Argument Reference + +* `tags` - (Optional) Map of tags, each pair of which must exactly match for desired security groups. +* `filter` - (Optional) One or more name/value pairs to use as filters. There are several valid keys, for a full reference, check out [describe-security-groups in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - ARNs of the matched security groups. +* `id` - AWS Region. +* `ids` - IDs of the matches security groups. +* `vpc_ids` - VPC IDs of the matched security groups. The data source's tag or filter *will span VPCs* unless the `vpc-id` filter is also used. + +[1]: https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-security-groups.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/serverlessapplicationrepository_application.html.markdown b/website/docs/cdktf/python/d/serverlessapplicationrepository_application.html.markdown new file mode 100644 index 00000000000..f3a3d542b55 --- /dev/null +++ b/website/docs/cdktf/python/d/serverlessapplicationrepository_application.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Serverless Application Repository" +layout: "aws" +page_title: "AWS: aws_serverlessapplicationrepository_application" +description: |- + Get information on a AWS Serverless Application Repository application +--- + + + +# Data Source: aws_serverlessapplicationrepository_application + +Use this data source to get information about an AWS Serverless Application Repository application. For example, this can be used to determine the required `capabilities` for an application. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_serverlessapplicationrepository_application import DataAwsServerlessapplicationrepositoryApplication +from imports.aws.serverlessapplicationrepository_cloudformation_stack import ServerlessapplicationrepositoryCloudformationStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsServerlessapplicationrepositoryApplication(self, "example", + application_id="arn:aws:serverlessrepo:us-east-1:123456789012:applications/ExampleApplication" + ) + aws_serverlessapplicationrepository_cloudformation_stack_example = + ServerlessapplicationrepositoryCloudformationStack(self, "example_1", + application_id=Token.as_string(example.application_id), + capabilities=Token.as_list(example.required_capabilities), + name="Example", + semantic_version=Token.as_string(example.semantic_version) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_serverlessapplicationrepository_cloudformation_stack_example.override_logical_id("example") +``` + +## Argument Reference + +* `application_id` - (Required) ARN of the application. +* `semantic_version` - (Optional) Requested version of the application. By default, retrieves the latest version. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `application_id` - ARN of the application. +* `name` - Name of the application. +* `required_capabilities` - A list of capabilities describing the permissions needed to deploy the application. +* `source_code_url` - URL pointing to the source code of the application version. +* `template_url` - URL pointing to the Cloud Formation template for the application version. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/service.html.markdown b/website/docs/cdktf/python/d/service.html.markdown new file mode 100644 index 00000000000..fc8c6294d19 --- /dev/null +++ b/website/docs/cdktf/python/d/service.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_service" +description: |- + Compose and decompose AWS service DNS names +--- + + + +# Data Source: aws_service + +Use this data source to compose and decompose AWS service DNS names. + +## Example Usage + +### Get Service DNS Name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.data_aws_service import DataAwsService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsRegion(self, "current") + DataAwsService(self, "test", + region=Token.as_string(current.name), + service_id="ec2" + ) +``` + +### Use Service Reverse DNS Name to Get Components + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_service import DataAwsService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsService(self, "s3", + reverse_dns_name="cn.com.amazonaws.cn-north-1.s3" + ) +``` + +### Determine Regional Support for a Service + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_service import DataAwsService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsService(self, "s3", + reverse_dns_name="com.amazonaws.us-gov-west-1.waf" + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `dns_name` - (Optional) DNS name of the service (_e.g.,_ `rds.us-east-1.amazonaws.com`). One of `dns_name`, `reverse_dns_name`, or `service_id` is required. +* `partition` - (Optional) Partition corresponding to the region. +* `region` - (Optional) Region of the service (_e.g.,_ `us-west-2`, `ap-northeast-1`). +* `reverse_dns_name` - (Optional) Reverse DNS name of the service (_e.g.,_ `com.amazonaws.us-west-2.s3`). One of `dns_name`, `reverse_dns_name`, or `service_id` is required. +* `reverse_dns_prefix` - (Optional) Prefix of the service (_e.g.,_ `com.amazonaws` in AWS Commercial, `cn.com.amazonaws` in AWS China). +* `service_id` - (Optional) Service (_e.g.,_ `s3`, `rds`, `ec2`). One of `dns_name`, `reverse_dns_name`, or `service_id` is required. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `supported` - Whether the service is supported in the region's partition. New services may not be listed immediately as supported. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/service_discovery_dns_namespace.html.markdown b/website/docs/cdktf/python/d/service_discovery_dns_namespace.html.markdown new file mode 100644 index 00000000000..c545c4bb8d1 --- /dev/null +++ b/website/docs/cdktf/python/d/service_discovery_dns_namespace.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_dns_namespace" +description: |- + Retrieves information about a Service Discovery private or public DNS namespace. +--- + + + +# Data Source: aws_service_discovery_dns_namespace + +Retrieves information about a Service Discovery private or public DNS namespace. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_service_discovery_dns_namespace import DataAwsServiceDiscoveryDnsNamespace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsServiceDiscoveryDnsNamespace(self, "test", + name="example.terraform.local", + type="DNS_PRIVATE" + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the namespace. +* `type` - (Required) Type of the namespace. Allowed values are `DNS_PUBLIC` or `DNS_PRIVATE`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the namespace. +* `description` - Description of the namespace. +* `id` - Namespace ID. +* `hosted_zone` - ID for the hosted zone that Amazon Route 53 creates when you create a namespace. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/service_discovery_http_namespace.html.markdown b/website/docs/cdktf/python/d/service_discovery_http_namespace.html.markdown new file mode 100644 index 00000000000..9e00f340237 --- /dev/null +++ b/website/docs/cdktf/python/d/service_discovery_http_namespace.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_http_namespace" +description: |- + Retrieves information about a Service Discovery HTTP Namespace. +--- + + + +# Data Source: aws_service_discovery_http_namespace + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_service_discovery_http_namespace import DataAwsServiceDiscoveryHttpNamespace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsServiceDiscoveryHttpNamespace(self, "example", + name="development" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the http namespace. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of a namespace. +* `arn` - ARN that Amazon Route 53 assigns to the namespace when you create it. +* `description` - Description that you specify for the namespace when you create it. +* `http_name` - Name of an HTTP namespace. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/service_discovery_service.html.markdown b/website/docs/cdktf/python/d/service_discovery_service.html.markdown new file mode 100644 index 00000000000..2f8d2a11a78 --- /dev/null +++ b/website/docs/cdktf/python/d/service_discovery_service.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_service" +description: |- + Retrieves information about a Service Discovery Service +--- + + + +# Data Source: aws_service_discovery_service + +Retrieves information about a Service Discovery Service. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_service_discovery_service import DataAwsServiceDiscoveryService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsServiceDiscoveryService(self, "test", + name="example", + namespace_id="NAMESPACE_ID_VALUE" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the service. +* `namespace_id` - (Required) ID of the namespace that the service belongs to. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the service. +* `arn` - ARN of the service. +* `description` - Description of the service. +* `dns_config` - Complex type that contains information about the resource record sets that you want Amazon Route 53 to create when you register an instance. +* `health_check_config` - Complex type that contains settings for an optional health check. Only for Public DNS namespaces. +* `health_check_custom_config` - A complex type that contains settings for ECS managed health checks. +* `tags` - Map of tags to assign to the service. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tags_all` - (**Deprecated**) Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### dns_config + +This argument supports the following arguments: + +* `namespace_id` - ID of the namespace to use for DNS configuration. +* `dns_records` - An array that contains one DnsRecord object for each resource record set. +* `routing_policy` - Routing policy that you want to apply to all records that Route 53 creates when you register an instance and specify the service. Valid Values: MULTIVALUE, WEIGHTED + +#### dns_records + +This argument supports the following arguments: + +* `ttl` - Amount of time, in seconds, that you want DNS resolvers to cache the settings for this resource record set. +* `type` - Type of the resource, which indicates the value that Amazon Route 53 returns in response to DNS queries. Valid Values: A, AAAA, SRV, CNAME + +### health_check_config + +This argument supports the following arguments: + +* `failure_threshold` - Number of consecutive health checks. Maximum value of 10. +* `resource_path` - Path that you want Route 53 to request when performing health checks. Route 53 automatically adds the DNS name for the service. If you don't specify a value, the default value is /. +* `type` - The type of health check that you want to create, which indicates how Route 53 determines whether an endpoint is healthy. Valid Values: HTTP, HTTPS, TCP + +### health_check_custom_config + +This argument supports the following arguments: + +* `failure_threshold` - The number of 30-second intervals that you want service discovery to wait before it changes the health status of a service instance. Maximum value of 10. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/servicecatalog_constraint.html.markdown b/website/docs/cdktf/python/d/servicecatalog_constraint.html.markdown new file mode 100644 index 00000000000..8daec74f174 --- /dev/null +++ b/website/docs/cdktf/python/d/servicecatalog_constraint.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_constraint" +description: |- + Provides information on a Service Catalog Constraint +--- + + + +# Data Source: aws_servicecatalog_constraint + +Provides information on a Service Catalog Constraint. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_servicecatalog_constraint import DataAwsServicecatalogConstraint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsServicecatalogConstraint(self, "example", + accept_language="en", + id="cons-hrvy0335" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `id` - Constraint identifier. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `description` - Description of the constraint. +* `owner` - Owner of the constraint. +* `parameters` - Constraint parameters in JSON format. +* `portfolio_id` - Portfolio identifier. +* `product_id` - Product identifier. +* `status` - Constraint status. +* `type` - Type of constraint. Valid values are `LAUNCH`, `NOTIFICATION`, `RESOURCE_UPDATE`, `STACKSET`, and `TEMPLATE`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/servicecatalog_launch_paths.html.markdown b/website/docs/cdktf/python/d/servicecatalog_launch_paths.html.markdown new file mode 100644 index 00000000000..9799124d87a --- /dev/null +++ b/website/docs/cdktf/python/d/servicecatalog_launch_paths.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_launch_paths" +description: |- + Provides information on Service Catalog Launch Paths +--- + + + +# Data Source: aws_servicecatalog_launch_paths + +Lists the paths to the specified product. A path is how the user has access to a specified product, and is necessary when provisioning a product. A path also determines the constraints put on the product. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_servicecatalog_launch_paths import DataAwsServicecatalogLaunchPaths +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsServicecatalogLaunchPaths(self, "example", + product_id="prod-yakog5pdriver" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `product_id` - (Required) Product identifier. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `summaries` - Block with information about the launch path. See details below. + +### summaries + +* `constraint_summaries` - Block for constraints on the portfolio-product relationship. See details below. +* `path_id` - Identifier of the product path. +* `name` - Name of the portfolio to which the path was assigned. +* `tags` - Tags associated with this product path. + +### constraint_summaries + +* `description` - Description of the constraint. +* `type` - Type of constraint. Valid values are `LAUNCH`, `NOTIFICATION`, `STACKSET`, and `TEMPLATE`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/servicecatalog_portfolio.html.markdown b/website/docs/cdktf/python/d/servicecatalog_portfolio.html.markdown new file mode 100644 index 00000000000..c0eb25c8919 --- /dev/null +++ b/website/docs/cdktf/python/d/servicecatalog_portfolio.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_portfolio" +description: |- + Provides information for a Service Catalog Portfolio. +--- + + + +# Data Source: aws_servicecatalog_portfolio + +Provides information for a Service Catalog Portfolio. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_servicecatalog_portfolio import DataAwsServicecatalogPortfolio +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsServicecatalogPortfolio(self, "portfolio", + id="port-07052002" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `id` - (Required) Portfolio identifier. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Portfolio ARN. +* `created_time` - Time the portfolio was created. +* `description` - Description of the portfolio +* `name` - Portfolio name. +* `provider_name` - Name of the person or organization who owns the portfolio. +* `tags` - Tags applied to the portfolio. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/servicecatalog_portfolio_constraints.html.markdown b/website/docs/cdktf/python/d/servicecatalog_portfolio_constraints.html.markdown new file mode 100644 index 00000000000..c1a1e546c48 --- /dev/null +++ b/website/docs/cdktf/python/d/servicecatalog_portfolio_constraints.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_portfolio_constraints" +description: |- + Provides information on Service Catalog Portfolio Constraints +--- + + + +# Data Source: aws_servicecatalog_portfolio_constraints + +Provides information on Service Catalog Portfolio Constraints. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_servicecatalog_portfolio_constraints import DataAwsServicecatalogPortfolioConstraints +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsServicecatalogPortfolioConstraints(self, "example", + portfolio_id="port-3lli3b3an" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `portfolio_id` - (Required) Portfolio identifier. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `product_id` - (Optional) Product identifier. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `details` - List of information about the constraints. See details below. + +### details + +* `constraint_id` - Identifier of the constraint. +* `description` - Description of the constraint. +* `portfolio_id` - Identifier of the portfolio the product resides in. The constraint applies only to the instance of the product that lives within this portfolio. +* `product_id` - Identifier of the product the constraint applies to. A constraint applies to a specific instance of a product within a certain portfolio. +* `type` - Type of constraint. Valid values are `LAUNCH`, `NOTIFICATION`, `STACKSET`, and `TEMPLATE`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/servicecatalog_product.html.markdown b/website/docs/cdktf/python/d/servicecatalog_product.html.markdown new file mode 100644 index 00000000000..d45b4c1cdc5 --- /dev/null +++ b/website/docs/cdktf/python/d/servicecatalog_product.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_product" +description: |- + This data source provides information about a Service Catalog product. +--- + + + +# Data Source: aws_servicecatalog_product + +Use this data source to retrieve information about a Service Catalog product. + +~> **NOTE:** A "provisioning artifact" is also known as a "version," and a "distributor" is also known as a "vendor." + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_servicecatalog_product import DataAwsServicecatalogProduct +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsServicecatalogProduct(self, "example", + id="prod-dnigbtea24ste" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `id` - (Required) ID of the product. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values are `en` (English), `jp` (Japanese), `zh` (Chinese). The default value is `en`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the product. +* `created_time` - Time when the product was created. +* `description` - Description of the product. +* `distributor` - Vendor of the product. +* `has_default_path` - Whether the product has a default path. +* `name` - Name of the product. +* `owner` - Owner of the product. +* `status` - Status of the product. +* `support_description` - Field that provides support information about the product. +* `support_email` - Contact email for product support. +* `support_url` - Contact URL for product support. +* `tags` - Tags applied to the product. +* `type` - Type of product. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/servicecatalog_provisioning_artifacts.html.markdown b/website/docs/cdktf/python/d/servicecatalog_provisioning_artifacts.html.markdown new file mode 100644 index 00000000000..dc130e288da --- /dev/null +++ b/website/docs/cdktf/python/d/servicecatalog_provisioning_artifacts.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_provisioning_artifacts" +description: |- + Provides information on Service Catalog Provisioning Artifacts +--- + + + +# Data Source: aws_servicecatalog_provisioning_artifacts + +Lists the provisioning artifacts for the specified product. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_servicecatalog_provisioning_artifacts import DataAwsServicecatalogProvisioningArtifacts +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsServicecatalogProvisioningArtifacts(self, "example", + product_id="prod-yakog5pdriver" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `product_id` - (Required) Product identifier. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `provisioning_artifact_details` - List with information about the provisioning artifacts. See details below. + +### provisioning_artifact_details + +* `active` - Indicates whether the product version is active. +* `created_time` - The UTC time stamp of the creation time. +* `description` - The description of the provisioning artifact. +* `guidance` - Information set by the administrator to provide guidance to end users about which provisioning artifacts to use. +* `id` - The identifier of the provisioning artifact. +* `name` - The name of the provisioning artifact. +* `type` - The type of provisioning artifact. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/servicequotas_service.html.markdown b/website/docs/cdktf/python/d/servicequotas_service.html.markdown new file mode 100644 index 00000000000..47f98ec8ea3 --- /dev/null +++ b/website/docs/cdktf/python/d/servicequotas_service.html.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "Service Quotas" +layout: "aws" +page_title: "AWS: aws_servicequotas_service" +description: |- + Retrieve information about a Service Quotas Service +--- + + + +# Data Source: aws_servicequotas_service + +Retrieve information about a Service Quotas Service. + +~> **NOTE:** Global quotas apply to all AWS regions, but can only be accessed in `us-east-1` in the Commercial partition or `us-gov-west-1` in the GovCloud partition. In other regions, the AWS API will return the error `The request failed because the specified service does not exist.` + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_servicequotas_service import DataAwsServicequotasService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsServicequotasService(self, "example", + service_name="Amazon Virtual Private Cloud (Amazon VPC)" + ) +``` + +## Argument Reference + +* `service_name` - (Required) Service name to lookup within Service Quotas. Available values can be found with the [AWS CLI service-quotas list-services command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-services.html). + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Code of the service. +* `service_code` - Code of the service. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/servicequotas_service_quota.html.markdown b/website/docs/cdktf/python/d/servicequotas_service_quota.html.markdown new file mode 100644 index 00000000000..7302f56a118 --- /dev/null +++ b/website/docs/cdktf/python/d/servicequotas_service_quota.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "Service Quotas" +layout: "aws" +page_title: "AWS: aws_servicequotas_service_quota" +description: |- + Retrieve information about a Service Quota +--- + + + +# Data Source: aws_servicequotas_service_quota + +Retrieve information about a Service Quota. + +~> **NOTE:** Global quotas apply to all AWS regions, but can only be accessed in `us-east-1` in the Commercial partition or `us-gov-west-1` in the GovCloud partition. In other regions, the AWS API will return the error `The request failed because the specified service does not exist.` + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_servicequotas_service_quota import DataAwsServicequotasServiceQuota +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsServicequotasServiceQuota(self, "by_quota_code", + quota_code="L-F678F1CE", + service_code="vpc" + ) + DataAwsServicequotasServiceQuota(self, "by_quota_name", + quota_name="VPCs per Region", + service_code="vpc" + ) +``` + +## Argument Reference + +~> *NOTE:* Either `quota_code` or `quota_name` must be configured. + +* `service_code` - (Required) Service code for the quota. Available values can be found with the [`aws_servicequotas_service` data source](/docs/providers/aws/d/servicequotas_service.html) or [AWS CLI service-quotas list-services command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-services.html). +* `quota_code` - (Optional) Quota code within the service. When configured, the data source directly looks up the service quota. Available values can be found with the [AWS CLI service-quotas list-service-quotas command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-service-quotas.html). One of `quota_code` or `quota_name` must be specified. +* `quota_name` - (Optional) Quota name within the service. When configured, the data source searches through all service quotas to find the matching quota name. Available values can be found with the [AWS CLI service-quotas list-service-quotas command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-service-quotas.html). One of `quota_name` or `quota_code` must be specified. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `adjustable` - Whether the service quota is adjustable. +* `arn` - ARN of the service quota. +* `default_value` - Default value of the service quota. +* `global_quota` - Whether the service quota is global for the AWS account. +* `id` - ARN of the service quota. +* `service_name` - Name of the service. +* `usage_metric` - Information about the measurement. + * `metric_dimensions` - The metric dimensions. + * `class` + * `resource` + * `service` + * `type` + * `metric_name` - The name of the metric. + * `metric_namespace` - The namespace of the metric. + * `metric_statistic_recommendation` - The metric statistic that AWS recommend you use when determining quota usage. +* `value` - Current value of the service quota. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ses_active_receipt_rule_set.html.markdown b/website/docs/cdktf/python/d/ses_active_receipt_rule_set.html.markdown new file mode 100644 index 00000000000..0f54a4f67cb --- /dev/null +++ b/website/docs/cdktf/python/d/ses_active_receipt_rule_set.html.markdown @@ -0,0 +1,39 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_active_receipt_rule_set" +description: |- + Retrieve the active SES receipt rule set +--- + + + +# Data Source: aws_ses_active_receipt_rule_set + +Retrieve the active SES receipt rule set + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ses_active_receipt_rule_set import DataAwsSesActiveReceiptRuleSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSesActiveReceiptRuleSet(self, "main") +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - SES receipt rule set ARN. +* `rule_set_name` - Name of the rule set + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ses_domain_identity.markdown b/website/docs/cdktf/python/d/ses_domain_identity.markdown new file mode 100644 index 00000000000..e76d57124b6 --- /dev/null +++ b/website/docs/cdktf/python/d/ses_domain_identity.markdown @@ -0,0 +1,42 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_domain_identity" +description: |- + Retrieve the SES domain identity +--- + + + +# Data Source: aws_ses_domain_identity + +Retrieve the SES domain identity + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ses_domain_identity import DataAwsSesDomainIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSesDomainIdentity(self, "example", + domain="example.com" + ) +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the domain identity. +* `domain` - Name of the domain +* `verification_token` - Code which when added to the domain as a TXT record will signal to SES that the owner of the domain has authorized SES to act on their behalf. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ses_email_identity.markdown b/website/docs/cdktf/python/d/ses_email_identity.markdown new file mode 100644 index 00000000000..4130b29f70e --- /dev/null +++ b/website/docs/cdktf/python/d/ses_email_identity.markdown @@ -0,0 +1,41 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_email_identity" +description: |- + Retrieve the active SES email identity +--- + + + +# Data Source: aws_ses_email_identity + +Retrieve the active SES email identity + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ses_email_identity import DataAwsSesEmailIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSesEmailIdentity(self, "example", + email="awesome@example.com" + ) +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the email identity. +* `email` - Email identity. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sesv2_configuration_set.html.markdown b/website/docs/cdktf/python/d/sesv2_configuration_set.html.markdown new file mode 100644 index 00000000000..e01af3fda5d --- /dev/null +++ b/website/docs/cdktf/python/d/sesv2_configuration_set.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_configuration_set" +description: |- + Terraform data source for managing an AWS SESv2 (Simple Email V2) Configuration Set. +--- + + + +# Data Source: aws_sesv2_configuration_set + +Terraform data source for managing an AWS SESv2 (Simple Email V2) Configuration Set. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sesv2_configuration_set import DataAwsSesv2ConfigurationSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSesv2ConfigurationSet(self, "example", + configuration_set_name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `configuration_set_name` - (Required) The name of the configuration set. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `delivery_options` - An object that defines the dedicated IP pool that is used to send emails that you send using the configuration set. + * `sending_pool_name` - The name of the dedicated IP pool to associate with the configuration set. + * `tls_policy` - Specifies whether messages that use the configuration set are required to use Transport Layer Security (TLS). +* `reputation_options` - An object that defines whether or not Amazon SES collects reputation metrics for the emails that you send that use the configuration set. + * `last_fresh_start` - The date and time (in Unix time) when the reputation metrics were last given a fresh start. + * `reputation_metrics_enabled` - Specifies whether tracking of reputation metrics is enabled. +* `sending_options` - An object that defines whether or not Amazon SES can send email that you send using the configuration set. + * `sending_enabled` - Specifies whether email sending is enabled. +* `suppression_options` - An object that contains information about the suppression list preferences for your account. + * `suppressed_reasons` - A list that contains the reasons that email addresses are automatically added to the suppression list for your account. +* `tags` - Key-value map of resource tags for the container recipe. +* `tracking_options` - An object that defines the open and click tracking options for emails that you send using the configuration set. + * `custom_redirect_domain` - The domain to use for tracking open and click events. +* `vdm_options` - An object that contains information about the VDM preferences for your configuration set. + * `dashboard_options` - Specifies additional settings for your VDM configuration as applicable to the Dashboard. + * `engagement_metrics` - Specifies the status of your VDM engagement metrics collection. + * `guardian_options` - Specifies additional settings for your VDM configuration as applicable to the Guardian. + * `optimized_shared_delivery` - Specifies the status of your VDM optimized shared delivery. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sesv2_dedicated_ip_pool.html.markdown b/website/docs/cdktf/python/d/sesv2_dedicated_ip_pool.html.markdown new file mode 100644 index 00000000000..6761670e612 --- /dev/null +++ b/website/docs/cdktf/python/d/sesv2_dedicated_ip_pool.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_dedicated_ip_pool" +description: |- + Terraform data source for managing an AWS SESv2 (Simple Email V2) Dedicated IP Pool. +--- + + + +# Data Source: aws_sesv2_dedicated_ip_pool + +Terraform data source for managing an AWS SESv2 (Simple Email V2) Dedicated IP Pool. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sesv2_dedicated_ip_pool import DataAwsSesv2DedicatedIpPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSesv2DedicatedIpPool(self, "example", + pool_name="my-pool" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `pool_name` - (Required) Name of the dedicated IP pool. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Dedicated IP Pool. +* `dedicated_ips` - A list of objects describing the pool's dedicated IP's. See [`dedicated_ips`](#dedicated_ips). +* `scaling_mode` - (Optional) IP pool scaling mode. Valid values: `STANDARD`, `MANAGED`. +* `tags` - A map of tags attached to the pool. + +### dedicated_ips + +* `ip` - IPv4 address. +* `warmup_percentage` - Indicates how complete the dedicated IP warm-up process is. When this value equals `1`, the address has completed the warm-up process and is ready for use. +* `warmup_status` - The warm-up status of a dedicated IP address. Valid values: `IN_PROGRESS`, `DONE`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sesv2_email_identity.html.markdown b/website/docs/cdktf/python/d/sesv2_email_identity.html.markdown new file mode 100644 index 00000000000..41913ed0aa3 --- /dev/null +++ b/website/docs/cdktf/python/d/sesv2_email_identity.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_email_identity" +description: |- + Terraform data source for managing an AWS SESv2 (Simple Email V2) Email Identity. +--- + + + +# Data Source: aws_sesv2_email_identity + +Terraform data source for managing an AWS SESv2 (Simple Email V2) Email Identity. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sesv2_email_identity import DataAwsSesv2EmailIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSesv2EmailIdentity(self, "example", + email_identity="example.com" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `email_identity` - (Required) The name of the email identity. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Email Identity. +* `dkim_signing_attributes` - A list of objects that contains at most one element with information about the private key and selector that you want to use to configure DKIM for the identity for Bring Your Own DKIM (BYODKIM) for the identity, or, configures the key length to be used for Easy DKIM. + * `current_signing_key_length` - [Easy DKIM] The key length of the DKIM key pair in use. + * `last_key_generation_timestamp` - [Easy DKIM] The last time a key pair was generated for this identity. + * `next_signing_key_length` - [Easy DKIM] The key length of the future DKIM key pair to be generated. This can be changed at most once per day. + * `signing_attributes_origin` - A string that indicates how DKIM was configured for the identity. `AWS_SES` indicates that DKIM was configured for the identity by using Easy DKIM. `EXTERNAL` indicates that DKIM was configured for the identity by using Bring Your Own DKIM (BYODKIM). + * `status` - Describes whether or not Amazon SES has successfully located the DKIM records in the DNS records for the domain. See the [AWS SES API v2 Reference](https://docs.aws.amazon.com/ses/latest/APIReference-V2/API_DkimAttributes.html#SES-Type-DkimAttributes-Status) for supported statuses. + * `tokens` - If you used Easy DKIM to configure DKIM authentication for the domain, then this object contains a set of unique strings that you use to create a set of CNAME records that you add to the DNS configuration for your domain. When Amazon SES detects these records in the DNS configuration for your domain, the DKIM authentication process is complete. If you configured DKIM authentication for the domain by providing your own public-private key pair, then this object contains the selector for the public key. +* `identity_type` - The email identity type. Valid values: `EMAIL_ADDRESS`, `DOMAIN`. +* `tags` - Key-value mapping of resource tags. +* `verified_for_sending_status` - Specifies whether or not the identity is verified. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sesv2_email_identity_mail_from_attributes.html.markdown b/website/docs/cdktf/python/d/sesv2_email_identity_mail_from_attributes.html.markdown new file mode 100644 index 00000000000..47c077a9ebd --- /dev/null +++ b/website/docs/cdktf/python/d/sesv2_email_identity_mail_from_attributes.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_email_identity_mail_from_attributes" +description: |- + Terraform data source for managing an AWS SESv2 (Simple Email V2) Email Identity Mail From Attributes. +--- + + + +# Data Source: aws_sesv2_email_identity_mail_from_attributes + +Terraform data source for managing an AWS SESv2 (Simple Email V2) Email Identity Mail From Attributes. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sesv2_email_identity import DataAwsSesv2EmailIdentity +from imports.aws.data_aws_sesv2_email_identity_mail_from_attributes import DataAwsSesv2EmailIdentityMailFromAttributes +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsSesv2EmailIdentity(self, "example", + email_identity="example.com" + ) + data_aws_sesv2_email_identity_mail_from_attributes_example = + DataAwsSesv2EmailIdentityMailFromAttributes(self, "example_1", + email_identity=Token.as_string(example.email_identity) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_sesv2_email_identity_mail_from_attributes_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `email_identity` - (Required) The name of the email identity. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `behavior_on_mx_failure` - The action to take if the required MX record isn't found when you send an email. Valid values: `USE_DEFAULT_VALUE`, `REJECT_MESSAGE`. +* `mail_from_domain` - The custom MAIL FROM domain that you want the verified identity to use. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sfn_activity.html.markdown b/website/docs/cdktf/python/d/sfn_activity.html.markdown new file mode 100644 index 00000000000..fb0b2d4261b --- /dev/null +++ b/website/docs/cdktf/python/d/sfn_activity.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_activity" +description: |- + Use this data source to get information about a Step Functions Activity. +--- + + + +# Data Source: aws_sfn_activity + +Provides a Step Functions Activity data source + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sfn_activity import DataAwsSfnActivity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSfnActivity(self, "sfn_activity", + name="my-activity" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Optional) Name that identifies the activity. +* `arn` - (Optional) ARN that identifies the activity. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ARN that identifies the activity. +* `creation_date` - Date the activity was created. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sfn_alias.html.markdown b/website/docs/cdktf/python/d/sfn_alias.html.markdown new file mode 100644 index 00000000000..9e681a04991 --- /dev/null +++ b/website/docs/cdktf/python/d/sfn_alias.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_alias" +description: |- + Terraform data source for managing an AWS SFN (Step Functions) State Machine Alias. +--- + + + +# Data Source: aws_sfn_alias + +Terraform data source for managing an AWS SFN (Step Functions) State Machine Alias. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sfn_alias import DataAwsSfnAlias +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSfnAlias(self, "example", + name="my_sfn_alias", + statemachine_arn=sfn_test.arn + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the State Machine alias. +* `statemachine_arn` - (Required) ARN of the State Machine. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN identifying the State Machine alias. +* `creation_date` - Date the state machine Alias was created. +* `description` - Description of state machine alias. +* `routing_configuration` - Routing Configuration of state machine alias + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sfn_state_machine.html.markdown b/website/docs/cdktf/python/d/sfn_state_machine.html.markdown new file mode 100644 index 00000000000..0e97652a318 --- /dev/null +++ b/website/docs/cdktf/python/d/sfn_state_machine.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_state_machine" +description: |- + Get information on an Amazon Step Function State Machine +--- + + + +# Data Source: aws_sfn_state_machine + +Use this data source to get the ARN of a State Machine in AWS Step +Function (SFN). By using this data source, you can reference a +state machine without having to hard code the ARNs as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sfn_state_machine import DataAwsSfnStateMachine +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSfnStateMachine(self, "example", + name="an_example_sfn_name" + ) +``` + +## Argument Reference + +* `name` - (Required) Friendly name of the state machine to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Set to the ARN of the found state machine, suitable for referencing in other resources that support State Machines. +* `arn` - Set to the arn of the state function. +* `role_arn` - Set to the role_arn used by the state function. +* `definition` - Set to the state machine definition. +* `creation_date` - Date the state machine was created. +* `revision_id` - The revision identifier for the state machine. +* `status` - Set to the current status of the state machine. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sfn_state_machine_versions.html.markdown b/website/docs/cdktf/python/d/sfn_state_machine_versions.html.markdown new file mode 100644 index 00000000000..0a5c5817168 --- /dev/null +++ b/website/docs/cdktf/python/d/sfn_state_machine_versions.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_state_machine_versions" +description: |- + Terraform data source for managing an AWS SFN (Step Functions) State Machine Versions. +--- + + + +# Data Source: aws_sfn_state_machine_versions + +Terraform data source for managing an AWS SFN (Step Functions) State Machine Versions. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sfn_state_machine_versions import DataAwsSfnStateMachineVersions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSfnStateMachineVersions(self, "test", + statemachine_arn=Token.as_string(aws_sfn_state_machine_test.arn) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `statemachine_arn` - (Required) ARN of the State Machine. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `statemachine_versions` - ARN List identifying the statemachine versions. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/signer_signing_job.html.markdown b/website/docs/cdktf/python/d/signer_signing_job.html.markdown new file mode 100644 index 00000000000..d0b1964e442 --- /dev/null +++ b/website/docs/cdktf/python/d/signer_signing_job.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "Signer" +layout: "aws" +page_title: "AWS: aws_signer_signing_job" +description: |- + Provides a Signer Signing Job data source. +--- + + + +# Data Source: aws_signer_signing_job + +Provides information about a Signer Signing Job. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_signer_signing_job import DataAwsSignerSigningJob +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSignerSigningJob(self, "build_signing_job", + job_id="9ed7e5c3-b8d4-4da0-8459-44e0b068f7ee" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `job_id` - (Required) ID of the signing job on output. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `completed_at` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the signing job was completed. +* `created_at` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the signing job was created. +* `job_invoker` - IAM entity that initiated the signing job. +* `job_owner` - AWS account ID of the job owner. +* `platform_display_name` - A human-readable name for the signing platform associated with the signing job. +* `platform_id` - Platform to which your signed code image will be distributed. +* `profile_name` - Name of the profile that initiated the signing operation. +* `profile_version` - Version of the signing profile used to initiate the signing job. +* `requested_by` - IAM principal that requested the signing job. +* `revocation_record` - Revocation record if the signature generated by the signing job has been revoked. Contains a timestamp and the ID of the IAM entity that revoked the signature. +* `signature_expires_at` - The time when the signature of a signing job expires. +* `signed_object` - Name of the S3 bucket where the signed code image is saved by code signing. +* `source` - Object that contains the name of your S3 bucket or your raw code. +* `status` - Status of the signing job. +* `status_reason` - String value that contains the status reason. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/signer_signing_profile.html.markdown b/website/docs/cdktf/python/d/signer_signing_profile.html.markdown new file mode 100644 index 00000000000..bd244c17508 --- /dev/null +++ b/website/docs/cdktf/python/d/signer_signing_profile.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "Signer" +layout: "aws" +page_title: "AWS: aws_signer_signing_profile" +description: |- + Provides a Signer Signing Profile data source. +--- + + + +# Data Source: aws_signer_signing_profile + +Provides information about a Signer Signing Profile. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_signer_signing_profile import DataAwsSignerSigningProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSignerSigningProfile(self, "production_signing_profile", + name="prod_profile_DdW3Mk1foYL88fajut4mTVFGpuwfd4ACO6ANL0D1uIj7lrn8adK" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the target signing profile. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the signing profile. +* `platform_display_name` - A human-readable name for the signing platform associated with the signing profile. +* `platform_id` - ID of the platform that is used by the target signing profile. +* `revocation_record` - Revocation information for a signing profile. +* `signature_validity_period` - The validity period for a signing job. +* `status` - Status of the target signing profile. +* `tags` - List of tags associated with the signing profile. +* `version` - Current version of the signing profile. +* `version_arn` - Signing profile ARN, including the profile version. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sns_topic.html.markdown b/website/docs/cdktf/python/d/sns_topic.html.markdown new file mode 100644 index 00000000000..bc39e51d751 --- /dev/null +++ b/website/docs/cdktf/python/d/sns_topic.html.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_topic" +description: |- + Get information on a Amazon Simple Notification Service (SNS) Topic +--- + + + +# Data Source: aws_sns_topic + +Use this data source to get the ARN of a topic in AWS Simple Notification +Service (SNS). By using this data source, you can reference SNS topics +without having to hard code the ARNs as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sns_topic import DataAwsSnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSnsTopic(self, "example", + name="an_example_topic" + ) +``` + +## Argument Reference + +* `name` - (Required) Friendly name of the topic to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the found topic, suitable for referencing in other resources that support SNS topics. +* `id` - ARN of the found topic, suitable for referencing in other resources that support SNS topics. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sqs_queue.html.markdown b/website/docs/cdktf/python/d/sqs_queue.html.markdown new file mode 100644 index 00000000000..43b7d267b96 --- /dev/null +++ b/website/docs/cdktf/python/d/sqs_queue.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queue" +description: |- + Get information on an Amazon Simple Queue Service (SQS) Queue +--- + + + +# Data Source: aws_sqs_queue + +Use this data source to get the ARN and URL of queue in AWS Simple Queue Service (SQS). +By using this data source, you can reference SQS queues without having to hardcode +the ARNs as input. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sqs_queue import DataAwsSqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSqsQueue(self, "example", + name="queue" + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the queue to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the queue. +* `url` - URL of the queue. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/sqs_queues.html.markdown b/website/docs/cdktf/python/d/sqs_queues.html.markdown new file mode 100644 index 00000000000..8ac9c594aea --- /dev/null +++ b/website/docs/cdktf/python/d/sqs_queues.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queues" +description: |- + Terraform data source for managing an AWS SQS (Simple Queue) Queues. +--- + + + +# Data Source: aws_sqs_queues + +Terraform data source for managing an AWS SQS (Simple Queue) Queues. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_sqs_queues import DataAwsSqsQueues +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSqsQueues(self, "example", + queue_name_prefix="example" + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `queue_name_prefix` - (Optional) A string to use for filtering the list results. Only those queues whose name begins with the specified string are returned. Queue URLs and names are case-sensitive. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `queue_urls` - A list of queue URLs. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssm_document.html.markdown b/website/docs/cdktf/python/d/ssm_document.html.markdown new file mode 100644 index 00000000000..ea12c438d57 --- /dev/null +++ b/website/docs/cdktf/python/d/ssm_document.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_document" +description: |- + Provides a SSM Document data source +--- + + + +# Data Source: aws_ssm_document + +Gets the contents of the specified Systems Manager document. + +## Example Usage + +To get the contents of the document owned by AWS. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssm_document import DataAwsSsmDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = DataAwsSsmDocument(self, "foo", + document_format="YAML", + name="AWS-GatherSoftwareInventory" + ) + TerraformOutput(self, "content", + value=foo.content + ) +``` + +To get the contents of the custom document. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssm_document import DataAwsSsmDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSsmDocument(self, "test", + document_format="JSON", + name=Token.as_string(aws_ssm_document_test.name) + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the Systems Manager document. +* `document_format` - (Optional) Returns the document in the specified format. The document format can be either `JSON`, `YAML` and `TEXT`. JSON is the default format. +* `document_version` - (Optional) Document version for which you want information. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the document. If the document is an AWS managed document, this value will be set to the name of the document instead. +* `content` - Contents of the document. +* `document_type` - Type of the document. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssm_instances.html.markdown b/website/docs/cdktf/python/d/ssm_instances.html.markdown new file mode 100644 index 00000000000..2e930e94190 --- /dev/null +++ b/website/docs/cdktf/python/d/ssm_instances.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_instances" +description: |- + Get information on SSM managed instances. +--- + + + +# Data Source: aws_ssm_instances + +Use this data source to get the instance IDs of SSM managed instances. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssm_instances import DataAwsSsmInstances +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSsmInstances(self, "example", + filter=[DataAwsSsmInstancesFilter( + name="PlatformTypes", + values=["Linux"] + ) + ] + ) +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [SSM InstanceInformationStringFilter API Reference](https://docs.aws.amazon.com/systems-manager/latest/APIReference/API_InstanceInformationStringFilter.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - Set of instance IDs of the matched SSM managed instances. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssm_maintenance_windows.html.markdown b/website/docs/cdktf/python/d/ssm_maintenance_windows.html.markdown new file mode 100644 index 00000000000..49be087f7c0 --- /dev/null +++ b/website/docs/cdktf/python/d/ssm_maintenance_windows.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_maintenance_windows" +description: |- + Get information on SSM maintenance windows. +--- + + + +# Data Source: ssm_maintenance_windows + +Use this data source to get the window IDs of SSM maintenance windows. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssm_maintenance_windows import DataAwsSsmMaintenanceWindows +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSsmMaintenanceWindows(self, "example", + filter=[DataAwsSsmMaintenanceWindowsFilter( + name="Enabled", + values=["true"] + ) + ] + ) +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [SSM DescribeMaintenanceWindows API Reference](https://docs.aws.amazon.com/systems-manager/latest/APIReference/API_DescribeMaintenanceWindows.html#API_DescribeMaintenanceWindows_RequestSyntax). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - List of window IDs of the matched SSM maintenance windows. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssm_parameter.html.markdown b/website/docs/cdktf/python/d/ssm_parameter.html.markdown new file mode 100644 index 00000000000..1043f931ed2 --- /dev/null +++ b/website/docs/cdktf/python/d/ssm_parameter.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_parameter" +description: |- + Provides a SSM Parameter Data Source +--- + + + +# Data Source: aws_ssm_parameter + +Provides an SSM Parameter data source. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssm_parameter import DataAwsSsmParameter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSsmParameter(self, "foo", + name="foo" + ) +``` + +~> **Note:** The unencrypted value of a SecureString will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **Note:** The data source is currently following the behavior of the [SSM API](https://docs.aws.amazon.com/sdk-for-go/api/service/ssm/#Parameter) to return a string value, regardless of parameter type. For type `StringList`, we can use the built-in [split()](https://www.terraform.io/docs/configuration/functions/split.html) function to get values in a list. Example: `split(",", data.aws_ssm_parameter.subnets.value)` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the parameter. +* `with_decryption` - (Optional) Whether to return decrypted `SecureString` value. Defaults to `true`. + +In addition to all arguments above, the following attributes are exported: + +* `arn` - ARN of the parameter. +* `name` - Name of the parameter. +* `type` - Type of the parameter. Valid types are `String`, `StringList` and `SecureString`. +* `value` - Value of the parameter. This value is always marked as sensitive in the Terraform plan output, regardless of `type`. In Terraform CLI version 0.15 and later, this may require additional configuration handling for certain scenarios. For more information, see the [Terraform v0.15 Upgrade Guide](https://www.terraform.io/upgrade-guides/0-15.html#sensitive-output-values). +* `insecure_value` - Value of the parameter. **Use caution:** This value is never marked as sensitive. +* `version` - Version of the parameter. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssm_parameters_by_path.html.markdown b/website/docs/cdktf/python/d/ssm_parameters_by_path.html.markdown new file mode 100644 index 00000000000..cf11809c438 --- /dev/null +++ b/website/docs/cdktf/python/d/ssm_parameters_by_path.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_parameters_by_path" +description: |- + Provides SSM Parameters by path +--- + + + +# Data Source: aws_ssm_parameters_by_path + +Provides SSM Parameters by path. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssm_parameters_by_path import DataAwsSsmParametersByPath +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSsmParametersByPath(self, "foo", + path="/foo" + ) +``` + +~> **Note:** The unencrypted value of a SecureString will be stored in the raw state as plain-text. +[Read more about sensitive data in state](/docs/state/sensitive-data.html). + +~> **Note:** The data source is currently following the behavior of the [SSM API](https://docs.aws.amazon.com/sdk-for-go/api/service/ssm/#Parameter) to return a string value, regardless of parameter type. For type `StringList`, we can use the built-in [split()](https://www.terraform.io/docs/configuration/functions/split.html) function to get values in a list. Example: `split(",", data.aws_ssm_parameter.subnets.value)` + +## Argument Reference + +This data source supports the following arguments: + +* `path` - (Required) Prefix path of the parameter. +* `with_decryption` - (Optional) Whether to return decrypted `SecureString` value. Defaults to `true`. +* `recursive` - (Optional) Whether to recursively return parameters under `path`. Defaults to `false`. + +In addition to all arguments above, the following attributes are exported: + +* `arns` - ARNs of the parameters. +* `names` - Names of the parameters. +* `types` - Types of the parameters. +* `values` - Value of the parameters. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssm_patch_baseline.html.markdown b/website/docs/cdktf/python/d/ssm_patch_baseline.html.markdown new file mode 100644 index 00000000000..9f5231c977a --- /dev/null +++ b/website/docs/cdktf/python/d/ssm_patch_baseline.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_patch_baseline" +description: |- + Provides an SSM Patch Baseline data source +--- + + + +# Data Source: aws_ssm_patch_baseline + +Provides an SSM Patch Baseline data source. Useful if you wish to reuse the default baselines provided. + +## Example Usage + +To retrieve a baseline provided by AWS: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssm_patch_baseline import DataAwsSsmPatchBaseline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSsmPatchBaseline(self, "centos", + name_prefix="AWS-", + operating_system="CENTOS", + owner="AWS" + ) +``` + +To retrieve a baseline on your account: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssm_patch_baseline import DataAwsSsmPatchBaseline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSsmPatchBaseline(self, "default_custom", + default_baseline=True, + name_prefix="MyCustomBaseline", + operating_system="WINDOWS", + owner="Self" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `owner` - (Required) Owner of the baseline. Valid values: `All`, `AWS`, `Self` (the current account). +* `name_prefix` - (Optional) Filter results by the baseline name prefix. +* `default_baseline` - (Optional) Filters the results against the baselines default_baseline field. +* `operating_system` - (Optional) Specified OS for the baseline. Valid values: `AMAZON_LINUX`, `AMAZON_LINUX_2`, `UBUNTU`, `REDHAT_ENTERPRISE_LINUX`, `SUSE`, `CENTOS`, `ORACLE_LINUX`, `DEBIAN`, `MACOS`, `RASPBIAN` and `ROCKY_LINUX`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `approved_patches` - List of explicitly approved patches for the baseline. +* `approved_patches_compliance_level` - The compliance level for approved patches. +* `approved_patches_enable_non_security` - Indicates whether the list of approved patches includes non-security updates that should be applied to the instances. +* `approval_rule` - List of rules used to include patches in the baseline. + * `approve_after_days` - The number of days after the release date of each patch matched by the rule the patch is marked as approved in the patch baseline. + * `approve_until_date` - The cutoff date for auto approval of released patches. Any patches released on or before this date are installed automatically. Date is formatted as `YYYY-MM-DD`. Conflicts with `approve_after_days` + * `compliance_level` - The compliance level for patches approved by this rule. + * `enable_non_security` - Boolean enabling the application of non-security updates. + * `patch_filter` - The patch filter group that defines the criteria for the rule. + * `key` - The key for the filter. + * `values` - The value for the filter. +* `global_filter` - Set of global filters used to exclude patches from the baseline. + * `key` - The key for the filter. + * `values` - The value for the filter. +* `id` - ID of the baseline. +* `name` - Name of the baseline. +* `description` - Description of the baseline. +* `rejected_patches` - List of rejected patches. +* `rejected_patches_action` - The action specified to take on patches included in the `rejected_patches` list. +* `source` - Information about the patches to use to update the managed nodes, including target operating systems and source repositories. + * `configuration` - The value of the yum repo configuration. + * `name` - The name specified to identify the patch source. + * `products` - The specific operating system versions a patch repository applies to. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssmcontacts_contact.html.markdown b/website/docs/cdktf/python/d/ssmcontacts_contact.html.markdown new file mode 100644 index 00000000000..d48838cc35e --- /dev/null +++ b/website/docs/cdktf/python/d/ssmcontacts_contact.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_contact" +description: |- + Terraform data source for managing an AWS SSM Contact. +--- + + + +# Data Source: aws_ssmcontacts_contact + +Terraform data source for managing an AWS SSM Contact. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssmcontacts_contact import DataAwsSsmcontactsContact +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSsmcontactsContact(self, "example", + arn="arn:aws:ssm-contacts:us-west-2:123456789012:contact/contactalias" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the contact or escalation plan. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `alias` - A unique and identifiable alias of the contact or escalation plan. +* `type` - The type of contact engaged. A single contact is type `PERSONAL` and an escalation plan is type `ESCALATION`. +* `display_name` - Full friendly name of the contact or escalation plan. +* `tags` - Map of tags to assign to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssmcontacts_contact_channel.html.markdown b/website/docs/cdktf/python/d/ssmcontacts_contact_channel.html.markdown new file mode 100644 index 00000000000..a252abd8843 --- /dev/null +++ b/website/docs/cdktf/python/d/ssmcontacts_contact_channel.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_contact_channel" +description: |- + Terraform data source for managing an AWS SSM Contacts Contact Channel. +--- + + + +# Data Source: aws_ssmcontacts_contact_channel + +Terraform data source for managing an AWS SSM Contacts Contact Channel. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssmcontacts_contact_channel import DataAwsSsmcontactsContactChannel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSsmcontactsContactChannel(self, "example", + arn="arn:aws:ssm-contacts:us-west-2:123456789012:contact-channel/example" + ) +``` + +## Argument Reference + +The following arguments are required: + +- `arn` - Amazon Resource Name (ARN) of the contact channel. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +- `activation_status` - Whether the contact channel is activated. + +- `contact_id` - Amazon Resource Name (ARN) of the AWS SSM Contact that the contact channel belongs to. + +- `delivery_address` - Details used to engage the contact channel. + +- `name` - Name of the contact channel. + +- `type` - Type of the contact channel. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssmcontacts_plan.html.markdown b/website/docs/cdktf/python/d/ssmcontacts_plan.html.markdown new file mode 100644 index 00000000000..aa35b7b8de6 --- /dev/null +++ b/website/docs/cdktf/python/d/ssmcontacts_plan.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_plan" +description: |- + Terraform data source for managing an AWS SSM Contact Plan. +--- + + + +# Data Source: aws_ssmcontacts_plan + +Terraform data source for managing a Plan of an AWS SSM Contact. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssmcontacts_plan import DataAwsSsmcontactsPlan +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSsmcontactsPlan(self, "test", + contact_id="arn:aws:ssm-contacts:us-west-2:123456789012:contact/contactalias" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `contact_id` - (Required) The Amazon Resource Name (ARN) of the contact or escalation plan. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `stage` - List of stages. A contact has an engagement plan with stages that contact specified contact channels. An escalation plan uses stages that contact specified contacts. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssmincidents_replication_set.html.markdown b/website/docs/cdktf/python/d/ssmincidents_replication_set.html.markdown new file mode 100644 index 00000000000..29b568995ac --- /dev/null +++ b/website/docs/cdktf/python/d/ssmincidents_replication_set.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "SSM Incident Manager Incidents" +layout: "aws" +page_title: "AWS: aws_ssmincidents_replication_set" +description: |- + Terraform data source for managing an incident replication set in AWS Systems Manager Incident Manager. +--- + + + + +# Data Source: aws_ssmincidents_replication_set + +~> **NOTE:** The AWS Region specified by a Terraform provider must always be one of the Regions specified for the replication set. + +Use this Terraform data source to manage a replication set in AWS Systems Manager Incident Manager. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssmincidents_replication_set import DataAwsSsmincidentsReplicationSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSsmincidentsReplicationSet(self, "example") +``` + +## Argument Reference + +No arguments are required. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resouce Name (ARN) of the replication set. +* `tags` - All tags applied to the replication set. +* `created_by` - The ARN of the user who created the replication set. +* `deletion_protected` - If `true`, the last remaining Region in a replication set can’t be deleted. +* `last_modified_by` - The ARN of the user who last modified the replication set. +* `status` - The overall status of a replication set. + * Valid Values: `ACTIVE` | `CREATING` | `UPDATING` | `DELETING` | `FAILED` + +The `region` configuration block exports the following attributes for each Region: + +* `name` - The name of the Region. +* `kms_key_arn` - The ARN of the AWS Key Management Service (AWS KMS) encryption key. +* `status` - The current status of the Region. + * Valid Values: `ACTIVE` | `CREATING` | `UPDATING` | `DELETING` | `FAILED` +* `status_message` - More information about the status of a Region. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssmincidents_response_plan.html.markdown b/website/docs/cdktf/python/d/ssmincidents_response_plan.html.markdown new file mode 100644 index 00000000000..141b5d2064a --- /dev/null +++ b/website/docs/cdktf/python/d/ssmincidents_response_plan.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "SSM Incident Manager Incidents" +layout: "aws" +page_title: "AWS: aws_ssmincidents_response_plan" +description: |- + Terraform data source for managing a response plan in AWS Systems Manager Incident Manager. +--- + + + +# Data Source: aws_ssmincidents_response_plan + +Use this Terraform data source to manage a response plan in AWS Systems Manager Incident Manager. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmincidents_response_plan import SsmincidentsResponsePlan +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, incidentTemplate, name): + super().__init__(scope, name) + SsmincidentsResponsePlan(self, "example", + arn="exampleARN", + incident_template=incident_template, + name=name + ) +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the response plan. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - The name of the response plan. +* `tags` - The tags applied to the response plan. +* `display_name` - The long format of the response plan name. This field can contain spaces. +* `chat_channel` - The Chatbot chat channel used for collaboration during an incident. +* `engagements` - The Amazon Resource Name (ARN) for the contacts and escalation plans that the response plan engages during an incident. + +The `incident_template` configuration block exports the following attributes: + +* `title` - The title of a generated incident. +* `impact` - The impact value of a generated incident. The following values are supported: + * `1` - Severe Impact + * `2` - High Impact + * `3` - Medium Impact + * `4` - Low Impact + * `5` - No Impact +* `dedupe_string` - A string used to stop Incident Manager from creating multiple incident records for the same incident. +* `incident_tags` - The tags assigned to an incident template. When an incident starts, Incident Manager assigns the tags specified in the template to the incident. +* `summary` - The summary of an incident. +* `notification_target` - The Amazon Simple Notification Service (Amazon SNS) targets that this incident notifies when it is updated. The `notification_target` configuration block supports the following argument: + * `sns_topic_arn` - The ARN of the Amazon SNS topic. + +The `action` configuration block exports the following attributes: + +* `action` - (Optional) The actions that the response plan starts at the beginning of an incident. + * `ssm_automation` - The Systems Manager automation document to start as the runbook at the beginning of the incident. The following values are supported: + * `document_name` - The automation document's name. + * `role_arn` - The Amazon Resource Name (ARN) of the role that the automation document assumes when it runs commands. + * `document_version` - The version of the automation document to use at runtime. + * `target_account` - The account that runs the automation document. This can be in either the management account or an application account. + * `parameter` - The key-value pair parameters used when the automation document runs. The following values are supported: + * `name` - The name of parameter. + * `values` - The values for the associated parameter name. + * `dynamic_parameters` - The key-value pair used to resolve dynamic parameter values when processing a Systems Manager Automation runbook. + +The `integration` configuration block exports the following attributes: + +* `integration` - Information about third-party services integrated into the response plan. The following values are supported: + * `pagerduty` - Details about the PagerDuty configuration for a response plan. The following values are supported: + * `name` - The name of the PagerDuty configuration. + * `service_id` - The ID of the PagerDuty service that the response plan associates with an incident when it launches. + * `secret_id` - The ID of the AWS Secrets Manager secret that stores your PagerDuty key — either a General Access REST API Key or User Token REST API Key — and other user credentials. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssoadmin_instances.html.markdown b/website/docs/cdktf/python/d/ssoadmin_instances.html.markdown new file mode 100644 index 00000000000..bc3b9658825 --- /dev/null +++ b/website/docs/cdktf/python/d/ssoadmin_instances.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_instances" +description: |- + Get information on SSO Instances. +--- + + + +# Data Source: aws_ssoadmin_instances + +Use this data source to get ARNs and Identity Store IDs of Single Sign-On (SSO) Instances. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, Fn, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssoadmin_instances import DataAwsSsoadminInstances +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsSsoadminInstances(self, "example") + TerraformOutput(self, "arn", + value=property_access(Fn.tolist(example.arns), ["0"]) + ) + TerraformOutput(self, "identity_store_id", + value=property_access(Fn.tolist(example.identity_store_ids), ["0"]) + ) +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of Amazon Resource Names (ARNs) of the SSO Instances. +* `id` - AWS Region. +* `identity_store_ids` - Set of identifiers of the identity stores connected to the SSO Instances. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/ssoadmin_permission_set.html.markdown b/website/docs/cdktf/python/d/ssoadmin_permission_set.html.markdown new file mode 100644 index 00000000000..a5dd2138cc9 --- /dev/null +++ b/website/docs/cdktf/python/d/ssoadmin_permission_set.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_permission_set" +description: |- + Get information on a Single Sign-On (SSO) Permission Set. +--- + + + +# Data Source: aws_ssoadmin_permission_set + +Use this data source to get a Single Sign-On (SSO) Permission Set. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, property_access, Token, TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssoadmin_instances import DataAwsSsoadminInstances +from imports.aws.data_aws_ssoadmin_permission_set import DataAwsSsoadminPermissionSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsSsoadminInstances(self, "example") + data_aws_ssoadmin_permission_set_example = + DataAwsSsoadminPermissionSet(self, "example_1", + instance_arn=Token.as_string( + property_access(Fn.tolist(example.arns), ["0"])), + name="Example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_ssoadmin_permission_set_example.override_logical_id("example") + TerraformOutput(self, "arn", + value=data_aws_ssoadmin_permission_set_example.arn + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +~> **NOTE:** Either `arn` or `name` must be configured. + +* `arn` - (Optional) ARN of the permission set. +* `instance_arn` - (Required) ARN of the SSO Instance associated with the permission set. +* `name` - (Optional) Name of the SSO Permission Set. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ARN of the Permission Set. +* `description` - Description of the Permission Set. +* `relay_state` - Relay state URL used to redirect users within the application during the federation authentication process. +* `session_duration` - Length of time that the application user sessions are valid in the ISO-8601 standard. +* `tags` - Key-value map of resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/storagegateway_local_disk.html.markdown b/website/docs/cdktf/python/d/storagegateway_local_disk.html.markdown new file mode 100644 index 00000000000..71aa90a25d2 --- /dev/null +++ b/website/docs/cdktf/python/d/storagegateway_local_disk.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_local_disk" +description: |- + Retrieve information about a Storage Gateway local disk +--- + + + +# Data Source: aws_storagegateway_local_disk + +Retrieve information about a Storage Gateway local disk. The disk identifier is useful for adding the disk as a cache or upload buffer to a gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_storagegateway_local_disk import DataAwsStoragegatewayLocalDisk +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsStoragegatewayLocalDisk(self, "test", + disk_path=Token.as_string(aws_volume_attachment_test.device_name), + gateway_arn=Token.as_string(aws_storagegateway_gateway_test.arn) + ) +``` + +## Argument Reference + +* `gateway_arn` - (Required) ARN of the gateway. +* `disk_node` - (Optional) Device node of the local disk to retrieve. For example, `/dev/sdb`. +* `disk_path` - (Optional) Device path of the local disk to retrieve. For example, `/dev/xvdb` or `/dev/nvme1n1`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `disk_id` - Disk identifierE.g., `pci-0000:03:00.0-scsi-0:0:0:0` +* `id` - Disk identifierE.g., `pci-0000:03:00.0-scsi-0:0:0:0` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/subnet.html.markdown b/website/docs/cdktf/python/d/subnet.html.markdown new file mode 100644 index 00000000000..2233c4d2a6e --- /dev/null +++ b/website/docs/cdktf/python/d/subnet.html.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_subnet" +description: |- + Provides details about a specific VPC subnet +--- + + + +# Data Source: aws_subnet + +`aws_subnet` provides details about a specific VPC subnet. + +This resource can prove useful when a module accepts a subnet ID as an input variable and needs to, for example, determine the ID of the VPC that the subnet belongs to. + +## Example Usage + +The following example shows how one might accept a subnet ID as a variable and use this data source to obtain the data necessary to create a security group that allows connections from hosts in that subnet. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_subnet import DataAwsSubnet +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + subnet_id = TerraformVariable(self, "subnet_id") + selected = DataAwsSubnet(self, "selected", + id=subnet_id.string_value + ) + SecurityGroup(self, "subnet", + ingress=[SecurityGroupIngress( + cidr_blocks=[Token.as_string(selected.cidr_block)], + from_port=80, + protocol="tcp", + to_port=80 + ) + ], + vpc_id=Token.as_string(selected.vpc_id) + ) +``` + +### Filter Example + +If you want to match against tag `Name`, use: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_subnet import DataAwsSubnet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSubnet(self, "selected", + filter=[DataAwsSubnetFilter( + name="tag:Name", + values=["yakdriver"] + ) + ] + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available subnets in the current region. The given filters must match exactly one subnet whose data will be exported as attributes. + +The following arguments are optional: + +* `availability_zone` - (Optional) Availability zone where the subnet must reside. +* `availability_zone_id` - (Optional) ID of the Availability Zone for the subnet. This argument is not supported in all regions or partitions. If necessary, use `availability_zone` instead. +* `cidr_block` - (Optional) CIDR block of the desired subnet. +* `default_for_az` - (Optional) Whether the desired subnet must be the default subnet for its associated availability zone. +* `filter` - (Optional) Configuration block. Detailed below. +* `id` - (Optional) ID of the specific subnet to retrieve. +* `ipv6_cidr_block` - (Optional) IPv6 CIDR block of the desired subnet. +* `state` - (Optional) State that the desired subnet must have. +* `tags` - (Optional) Map of tags, each pair of which must exactly match a pair on the desired subnet. +* `vpc_id` - (Optional) ID of the VPC that the desired subnet belongs to. + +### filter + +This block allows for complex filters. You can use one or more `filter` blocks. + +The following arguments are required: + +* `name` - (Required) Name of the field to filter by, as defined by [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSubnets.html). +* `values` - (Required) Set of values that are accepted for the given field. A subnet will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the subnet. +* `assign_ipv6_address_on_creation` - Whether an IPv6 address is assigned on creation. +* `available_ip_address_count` - Available IP addresses of the subnet. +* `customer_owned_ipv4_pool` - Identifier of customer owned IPv4 address pool. +* `enable_dns64` - Whether DNS queries made to the Amazon-provided DNS Resolver in this subnet return synthetic IPv6 addresses for IPv4-only destinations. +* `enable_lni_at_device_index` - Indicates the device position for local network interfaces in this subnet. For example, 1 indicates local network interfaces in this subnet are the secondary network interface (eth1). A local network interface cannot be the primary network interface (eth0). +* `enable_resource_name_dns_aaaa_record_on_launch` - Indicates whether to respond to DNS queries for instance hostnames with DNS AAAA records. +* `enable_resource_name_dns_a_record_on_launch` - Indicates whether to respond to DNS queries for instance hostnames with DNS A records. +* `ipv6_cidr_block_association_id` - Association ID of the IPv6 CIDR block. +* `ipv6_native` - Whether this is an IPv6-only subnet. +* `map_customer_owned_ip_on_launch` - Whether customer owned IP addresses are assigned on network interface creation. +* `map_public_ip_on_launch` - Whether public IP addresses are assigned on instance launch. +* `outpost_arn` - ARN of the Outpost. +* `owner_id` - ID of the AWS account that owns the subnet. +* `private_dns_hostname_type_on_launch` - The type of hostnames assigned to instances in the subnet at launch. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/subnets.html.markdown b/website/docs/cdktf/python/d/subnets.html.markdown new file mode 100644 index 00000000000..da619c645d1 --- /dev/null +++ b/website/docs/cdktf/python/d/subnets.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_subnets" +description: |- + Get information about a set of subnets. +--- + + + +# Data Source: aws_subnets + +This resource can be useful for getting back a set of subnet IDs. + +## Example Usage + +The following shows outputting all CIDR blocks for every subnet ID in a VPC. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformIterator, TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_subnet import DataAwsSubnet +from imports.aws.data_aws_subnets import DataAwsSubnets +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsSubnets(self, "example", + filter=[DataAwsSubnetsFilter( + name="vpc-id", + values=[vpc_id.string_value] + ) + ] + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_for_each_iterator = TerraformIterator.from_list( + Token.as_any(Fn.toset(example.ids))) + data_aws_subnet_example = DataAwsSubnet(self, "example_1", + id=Token.as_string(example_for_each_iterator.value), + for_each=example_for_each_iterator + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_subnet_example.override_logical_id("example") + TerraformOutput(self, "subnet_cidr_blocks", + value="${[ for s in ${" + data_aws_subnet_example.fqn + "} : s.cidr_block]}" + ) +``` + +The following example retrieves a set of all subnets in a VPC with a custom +tag of `Tier` set to a value of "Private" so that the `aws_instance` resource +can loop through the subnets, putting instances across availability zones. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformIterator, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_subnets import DataAwsSubnets +from imports.aws.instance import Instance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + private = DataAwsSubnets(self, "private", + filter=[DataAwsSubnetsFilter( + name="vpc-id", + values=[vpc_id.string_value] + ) + ], + tags={ + "Tier": "Private" + } + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + app_for_each_iterator = TerraformIterator.from_list( + Token.as_any(Fn.toset(private.ids))) + Instance(self, "app", + ami=ami.string_value, + instance_type="t2.micro", + subnet_id=Token.as_string(app_for_each_iterator.value), + for_each=app_for_each_iterator + ) +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired subnets. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSubnets.html). + For example, if matching against tag `Name`, use: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_subnets import DataAwsSubnets +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsSubnets(self, "selected", + filter=[DataAwsSubnetsFilter( + name="tag:Name", + values=[""] + ) + ] + ) +``` + +* `values` - (Required) Set of values that are accepted for the given field. + Subnet IDs will be selected if any one of the given values match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - List of all the subnet ids found. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/transfer_server.html.markdown b/website/docs/cdktf/python/d/transfer_server.html.markdown new file mode 100644 index 00000000000..aeba8ef291f --- /dev/null +++ b/website/docs/cdktf/python/d/transfer_server.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_server" +description: |- + Get information on an AWS Transfer Server resource +--- + + + +# Data Source: aws_transfer_server + +Use this data source to get the ARN of an AWS Transfer Server for use in other +resources. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_transfer_server import DataAwsTransferServer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsTransferServer(self, "example", + server_id="s-1234567" + ) +``` + +## Argument Reference + +* `server_id` - (Required) ID for an SFTP server. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of Transfer Server. +* `certificate` - ARN of any certificate. +* `domain` - The domain of the storage system that is used for file transfers. +* `endpoint` - Endpoint of the Transfer Server (e.g., `s-12345678.server.transfer.REGION.amazonaws.com`). +* `endpoint_type` - Type of endpoint that the server is connected to. +* `identity_provider_type` - The mode of authentication enabled for this service. The default value is `SERVICE_MANAGED`, which allows you to store and access SFTP user credentials within the service. `API_GATEWAY` indicates that user authentication requires a call to an API Gateway endpoint URL provided by you to integrate an identity provider of your choice. +* `invocation_role` - ARN of the IAM role used to authenticate the user account with an `identity_provider_type` of `API_GATEWAY`. +* `logging_role` - ARN of an IAM role that allows the service to write your SFTP users’ activity to your Amazon CloudWatch logs for monitoring and auditing purposes. +* `protocols` - File transfer protocol or protocols over which your file transfer protocol client can connect to your server's endpoint. +* `security_policy_name` - The name of the security policy that is attached to the server. +* `structured_logging_destinations` - A set of ARNs of destinations that will receive structured logs from the transfer server such as CloudWatch Log Group ARNs. +* `url` - URL of the service endpoint used to authenticate users with an `identity_provider_type` of `API_GATEWAY`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc.html.markdown b/website/docs/cdktf/python/d/vpc.html.markdown index 1cd65241831..25ad4a0cc4d 100644 --- a/website/docs/cdktf/python/d/vpc.html.markdown +++ b/website/docs/cdktf/python/d/vpc.html.markdown @@ -81,7 +81,7 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A VPC will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` blocks are also exported as result attributes. This data source will complete the data by populating @@ -113,4 +113,4 @@ The following attribute is additionally exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc_dhcp_options.html.markdown b/website/docs/cdktf/python/d/vpc_dhcp_options.html.markdown index 76e151f3eae..ce9594c157c 100644 --- a/website/docs/cdktf/python/d/vpc_dhcp_options.html.markdown +++ b/website/docs/cdktf/python/d/vpc_dhcp_options.html.markdown @@ -71,7 +71,9 @@ For more information about filtering, see the [EC2 API documentation](https://do * `name` - (Required) Name of the field to filter. * `values` - (Required) Set of values for filtering. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the DHCP Options Set. * `dhcp_options_id` - EC2 DHCP Options ID @@ -90,4 +92,4 @@ For more information about filtering, see the [EC2 API documentation](https://do - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc_endpoint.html.markdown b/website/docs/cdktf/python/d/vpc_endpoint.html.markdown index 2907de937d7..7791a8cd6dc 100644 --- a/website/docs/cdktf/python/d/vpc_endpoint.html.markdown +++ b/website/docs/cdktf/python/d/vpc_endpoint.html.markdown @@ -59,13 +59,14 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A VPC Endpoint will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference In addition to all arguments above except `filter`, the following attributes are exported: * `arn` - ARN of the VPC endpoint. * `cidr_blocks` - List of CIDR blocks for the exposed AWS service. Applicable for endpoints of type `Gateway`. -* `dns_entry` - DNS entries for the VPC Endpoint. Applicable for endpoints of type `Interface`. DNS blocks are documented below. +* `dns_entry` - DNS entries for the VPC Endpoint. Applicable for endpoints of type `Interface`. [DNS entry blocks are documented below](#dns_entry-block). +* `dns_options` - DNS options for the VPC Endpoint. [DNS options blocks are documented below](#dns_options-block). * `network_interface_ids` - One or more network interfaces for the VPC Endpoint. Applicable for endpoints of type `Interface`. * `owner_id` - ID of the AWS account that owns the VPC endpoint. * `policy` - Policy document associated with the VPC Endpoint. Applicable for endpoints of type `Gateway`. @@ -77,15 +78,24 @@ In addition to all arguments above except `filter`, the following attributes are * `subnet_ids` - One or more subnets in which the VPC Endpoint is located. Applicable for endpoints of type `Interface`. * `vpc_endpoint_type` - VPC Endpoint type, `Gateway` or `Interface`. +### `dns_entry` Block + DNS blocks (for `dns_entry`) support the following attributes: * `dns_name` - DNS name. * `hosted_zone_id` - ID of the private hosted zone. +### `dns_options` Block + +DNS options (for `dns_options`) support the following attributes: + +* `dns_record_ip_type` - The DNS records created for the endpoint. +* `private_dns_only_for_inbound_resolver_endpoint` - Indicates whether to enable private DNS only for inbound endpoints. + ## Timeouts [Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc_endpoint_service.html.markdown b/website/docs/cdktf/python/d/vpc_endpoint_service.html.markdown index 98160c5f78b..c3c2db12754 100644 --- a/website/docs/cdktf/python/d/vpc_endpoint_service.html.markdown +++ b/website/docs/cdktf/python/d/vpc_endpoint_service.html.markdown @@ -101,14 +101,14 @@ The given filters must match exactly one VPC endpoint service whose data will be ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeVpcEndpointServices API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVpcEndpointServices.html). * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `acceptance_required` - Whether or not VPC endpoint connection requests to the service must be accepted by the service owner - `true` or `false`. * `arn` - ARN of the VPC endpoint service. @@ -128,4 +128,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc_ipam_pool.html.markdown b/website/docs/cdktf/python/d/vpc_ipam_pool.html.markdown index 4f91a3477f0..308f262e272 100644 --- a/website/docs/cdktf/python/d/vpc_ipam_pool.html.markdown +++ b/website/docs/cdktf/python/d/vpc_ipam_pool.html.markdown @@ -67,7 +67,7 @@ VPC whose data will be exported as attributes. * `name` - (Required) The name of the filter. Filter names are case-sensitive. * `values` - (Required) The filter values. Filter values are case-sensitive. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` blocks are also exported as result attributes. This data source will complete the data by populating @@ -98,4 +98,4 @@ The following attribute is additionally exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc_ipam_pool_cidrs.html.markdown b/website/docs/cdktf/python/d/vpc_ipam_pool_cidrs.html.markdown index 20cb8276773..940466ce762 100644 --- a/website/docs/cdktf/python/d/vpc_ipam_pool_cidrs.html.markdown +++ b/website/docs/cdktf/python/d/vpc_ipam_pool_cidrs.html.markdown @@ -96,7 +96,7 @@ VPC whose data will be exported as attributes. * `ipam_pool_id` - ID of the IPAM pool you would like the list of provisioned CIDRs. * `filter` - Custom filter block as described below. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` blocks are also exported as result attributes. This data source will complete the data by populating @@ -118,4 +118,4 @@ The following attribute is additionally exported: - `read` - (Default `1m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc_ipam_pools.html.markdown b/website/docs/cdktf/python/d/vpc_ipam_pools.html.markdown index dd31e8588f9..cb73af3a759 100644 --- a/website/docs/cdktf/python/d/vpc_ipam_pools.html.markdown +++ b/website/docs/cdktf/python/d/vpc_ipam_pools.html.markdown @@ -54,9 +54,9 @@ IPAM Pools in the current region. * `name` - (Required) The name of the filter. Filter names are case-sensitive. * `values` - (Required) The filter values. Filter values are case-sensitive. -## Attributes Reference +## Attribute Reference -In addition to all of the arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `ipam_pools` - List of IPAM pools and their attributes. See below for details @@ -80,4 +80,4 @@ The following attributes are available on each pool entry found. * `source_ipam_pool_id` - ID of the source IPAM pool. * `tags` - Map of tags to assigned to the resource. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc_ipam_preview_next_cidr.html.markdown b/website/docs/cdktf/python/d/vpc_ipam_preview_next_cidr.html.markdown index d053476aa9f..751f865e844 100644 --- a/website/docs/cdktf/python/d/vpc_ipam_preview_next_cidr.html.markdown +++ b/website/docs/cdktf/python/d/vpc_ipam_preview_next_cidr.html.markdown @@ -50,15 +50,15 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `disallowed_cidrs` - (Optional) Exclude a particular CIDR range from being returned by the pool. * `ipam_pool_id` - (Required) ID of the pool to which you want to assign a CIDR. * `netmask_length` - (Optional) Netmask length of the CIDR you would like to preview from the IPAM pool. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `cidr` - Previewed CIDR from the pool. * `id` - ID of the preview. @@ -69,4 +69,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc_peering_connection.html.markdown b/website/docs/cdktf/python/d/vpc_peering_connection.html.markdown index 0edc99cd039..66b93c09fcc 100644 --- a/website/docs/cdktf/python/d/vpc_peering_connection.html.markdown +++ b/website/docs/cdktf/python/d/vpc_peering_connection.html.markdown @@ -82,7 +82,7 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A VPC Peering Connection will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` are also exported as result attributes. @@ -96,12 +96,12 @@ All of the argument attributes except `filter` are also exported as result attri * `requester` - Configuration block that describes [VPC Peering Connection] (https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the requester VPC. -#### Accepter and Requester Attributes Reference +#### Accepter and Requester Attribute Reference * `allow_remote_vpc_dns_resolution` - Indicates whether a local VPC can resolve public DNS hostnames to private IP addresses when queried from instances in a peer VPC. -#### CIDR block set Attributes Reference +#### CIDR block set Attribute Reference * `cidr_block` - CIDR block associated to the VPC of the specific VPC Peering Connection. @@ -111,4 +111,4 @@ private IP addresses when queried from instances in a peer VPC. - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc_peering_connections.html.markdown b/website/docs/cdktf/python/d/vpc_peering_connections.html.markdown index cf1ede99bad..92b80c4cdb1 100644 --- a/website/docs/cdktf/python/d/vpc_peering_connections.html.markdown +++ b/website/docs/cdktf/python/d/vpc_peering_connections.html.markdown @@ -67,7 +67,7 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A VPC Peering Connection will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` are also exported as result attributes. @@ -80,4 +80,4 @@ All of the argument attributes except `filter` are also exported as result attri - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc_security_group_rule.html.markdown b/website/docs/cdktf/python/d/vpc_security_group_rule.html.markdown index f3146c01ec9..eead28cd30e 100644 --- a/website/docs/cdktf/python/d/vpc_security_group_rule.html.markdown +++ b/website/docs/cdktf/python/d/vpc_security_group_rule.html.markdown @@ -42,14 +42,14 @@ whose data will be exported as attributes. ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the EC2 [`DescribeSecurityGroupRules`](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSecurityGroupRules.html) API Reference. * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of the security group rule. * `cidr_ipv4` - The destination IPv4 CIDR range. @@ -64,4 +64,4 @@ In addition to all arguments above, the following attributes are exported: * `tags` - A map of tags assigned to the resource. * `to_port` - (Optional) The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpc_security_group_rules.html.markdown b/website/docs/cdktf/python/d/vpc_security_group_rules.html.markdown index 89375f4b3ef..2e26786d404 100644 --- a/website/docs/cdktf/python/d/vpc_security_group_rules.html.markdown +++ b/website/docs/cdktf/python/d/vpc_security_group_rules.html.markdown @@ -49,8 +49,10 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. Security group rule IDs will be selected if any one of the given values match. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `ids` - List of all the security group rule IDs found. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpclattice_auth_policy.html.markdown b/website/docs/cdktf/python/d/vpclattice_auth_policy.html.markdown index 91ed217d504..230a06781b2 100644 --- a/website/docs/cdktf/python/d/vpclattice_auth_policy.html.markdown +++ b/website/docs/cdktf/python/d/vpclattice_auth_policy.html.markdown @@ -39,11 +39,11 @@ The following arguments are required: * `resource_identifier` - (Required) The ID or Amazon Resource Name (ARN) of the service network or service for which the policy is created. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `policy` - The auth policy. The policy string in JSON must not contain newlines or blank lines. * `state` - The state of the auth policy. The auth policy is only active when the auth type is set to AWS_IAM. If you provide a policy, then authentication and authorization decisions are made based on this policy and the client's IAM policy. If the Auth type is NONE, then, any auth policy you provide will remain inactive. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpclattice_listener.html.markdown b/website/docs/cdktf/python/d/vpclattice_listener.html.markdown index 8842d7a8c26..e4b03fe2ef7 100644 --- a/website/docs/cdktf/python/d/vpclattice_listener.html.markdown +++ b/website/docs/cdktf/python/d/vpclattice_listener.html.markdown @@ -41,9 +41,9 @@ The following arguments are required: * `service_identifier` - (Required) ID or Amazon Resource Name (ARN) of the service network * `listener_identifier` - (Required) ID or Amazon Resource Name (ARN) of the listener -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the listener. * `created_at` - The date and time that the listener was created. @@ -57,4 +57,4 @@ In addition to all arguments above, the following attributes are exported: * `service_id` - The ID of the service. * `tags` - List of tags associated with the listener. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpclattice_resource_policy.html.markdown b/website/docs/cdktf/python/d/vpclattice_resource_policy.html.markdown index 4ab4db691ab..192008cacb7 100644 --- a/website/docs/cdktf/python/d/vpclattice_resource_policy.html.markdown +++ b/website/docs/cdktf/python/d/vpclattice_resource_policy.html.markdown @@ -39,10 +39,10 @@ The following arguments are required: * `resource_arn` - (Required) Resource ARN of the resource for which a policy is retrieved. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `policy` - JSON-encoded string representation of the applied resource policy. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpclattice_service.html.markdown b/website/docs/cdktf/python/d/vpclattice_service.html.markdown index 5d1600fd2d2..f4e3551d29a 100644 --- a/website/docs/cdktf/python/d/vpclattice_service.html.markdown +++ b/website/docs/cdktf/python/d/vpclattice_service.html.markdown @@ -39,9 +39,9 @@ The following arguments are required: * `service_identifier` - (Required) ID or Amazon Resource Name (ARN) of the service network -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the service. * `auth_type` - Type of IAM policy. Either `NONE` or `AWS_IAM`. @@ -52,4 +52,4 @@ In addition to all arguments above, the following attributes are exported: * `status` - Status of the service. * `tags` - List of tags associated with the service. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpclattice_service_network.html.markdown b/website/docs/cdktf/python/d/vpclattice_service_network.html.markdown index e2d0fddd33a..76f883ce686 100644 --- a/website/docs/cdktf/python/d/vpclattice_service_network.html.markdown +++ b/website/docs/cdktf/python/d/vpclattice_service_network.html.markdown @@ -39,9 +39,9 @@ The following arguments are required: * `service_network_identifier` - (Required) Identifier of the network service. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the Service Network. * `auth_type` - Authentication type for the service network. Either `NONE` or `AWS_IAM`. @@ -52,4 +52,4 @@ In addition to all arguments above, the following attributes are exported: * `number_of_associated_services` - Number of services associated with this service network. * `number_of_associated_vpcs` - Number of VPCs associated with this service network. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpcs.html.markdown b/website/docs/cdktf/python/d/vpcs.html.markdown index 57fa4b09d42..20cbd4760ce 100644 --- a/website/docs/cdktf/python/d/vpcs.html.markdown +++ b/website/docs/cdktf/python/d/vpcs.html.markdown @@ -105,7 +105,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A VPC will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - List of all the VPC Ids found. @@ -116,4 +118,4 @@ which take the following arguments: - `read` - (Default `20m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/vpn_gateway.html.markdown b/website/docs/cdktf/python/d/vpn_gateway.html.markdown new file mode 100644 index 00000000000..18efed521d4 --- /dev/null +++ b/website/docs/cdktf/python/d/vpn_gateway.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_gateway" +description: |- + Provides details about a specific VPN gateway. +--- + + + +# Data Source: aws_vpn_gateway + +The VPN Gateway data source provides details about +a specific VPN gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_vpn_gateway import DataAwsVpnGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + selected = DataAwsVpnGateway(self, "selected", + filter=[DataAwsVpnGatewayFilter( + name="tag:Name", + values=["vpn-gw"] + ) + ] + ) + TerraformOutput(self, "vpn_gateway_id", + value=selected.id + ) +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available VPN gateways. +The given filters must match exactly one VPN gateway whose data will be exported as attributes. + +* `id` - (Optional) ID of the specific VPN Gateway to retrieve. + +* `state` - (Optional) State of the specific VPN Gateway to retrieve. + +* `availability_zone` - (Optional) Availability Zone of the specific VPN Gateway to retrieve. + +* `attached_vpc_id` - (Optional) ID of a VPC attached to the specific VPN Gateway to retrieve. + +* `filter` - (Optional) Custom filter block as described below. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired VPN Gateway. + +* `amazon_side_asn` - (Optional) Autonomous System Number (ASN) for the Amazon side of the specific VPN Gateway to retrieve. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVpnGateways.html). + +* `values` - (Required) Set of values that are accepted for the given field. + A VPN Gateway will be selected if any one of the given values matches. + +## Attribute Reference + +All of the argument attributes are also exported as result attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/waf_ipset.html.markdown b/website/docs/cdktf/python/d/waf_ipset.html.markdown new file mode 100644 index 00000000000..656f2e9faf4 --- /dev/null +++ b/website/docs/cdktf/python/d/waf_ipset.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_ipset" +description: |- + Retrieves an AWS WAF IP set id. +--- + + + +# Data Source: aws_waf_ipset + +`aws_waf_ipset` Retrieves a WAF IP Set Resource Id. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_waf_ipset import DataAwsWafIpset +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafIpset(self, "example", + name="tfWAFIPSet" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF IP set. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF IP set. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/waf_rate_based_rule.html.markdown b/website/docs/cdktf/python/d/waf_rate_based_rule.html.markdown new file mode 100644 index 00000000000..7ac29e81476 --- /dev/null +++ b/website/docs/cdktf/python/d/waf_rate_based_rule.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_rate_based_rule" +description: |- + Retrieves an AWS WAF rate based rule id. +--- + + + +# Data Source: aws_waf_rate_based_rule + +`aws_waf_rate_based_rule` Retrieves a WAF Rate Based Rule Resource Id. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_waf_rate_based_rule import DataAwsWafRateBasedRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafRateBasedRule(self, "example", + name="tfWAFRateBasedRule" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF rate based rule. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF rate based rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/waf_rule.html.markdown b/website/docs/cdktf/python/d/waf_rule.html.markdown new file mode 100644 index 00000000000..4da3e6847f9 --- /dev/null +++ b/website/docs/cdktf/python/d/waf_rule.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_rule" +description: |- + Retrieves an AWS WAF rule id. +--- + + + +# Data Source: aws_waf_rule + +`aws_waf_rule` Retrieves a WAF Rule Resource Id. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_waf_rule import DataAwsWafRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafRule(self, "example", + name="tfWAFRule" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF rule. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/waf_subscribed_rule_group.html.markdown b/website/docs/cdktf/python/d/waf_subscribed_rule_group.html.markdown new file mode 100644 index 00000000000..e88210eddc2 --- /dev/null +++ b/website/docs/cdktf/python/d/waf_subscribed_rule_group.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_subscribed_rule_group" +description: |- + Retrieves information about a Managed WAF Rule Group from AWS Marketplace. +--- + + + +# Data Source: aws_waf_rule + +`aws_waf_subscribed_rule_group` retrieves information about a Managed WAF Rule Group from AWS Marketplace (needs to be subscribed to first). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_waf_subscribed_rule_group import DataAwsWafSubscribedRuleGroup +from imports.aws.waf_web_acl import WafWebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, defaultAction, metricName, name): + super().__init__(scope, name) + by_metric_name = DataAwsWafSubscribedRuleGroup(self, "by_metric_name", + metric_name="F5BotDetectionSignatures" + ) + by_name = DataAwsWafSubscribedRuleGroup(self, "by_name", + name="F5 Bot Detection Signatures For AWS WAF" + ) + WafWebAcl(self, "acl", + rules=[WafWebAclRules( + priority=1, + rule_id=Token.as_string(by_name.id), + type="GROUP" + ), WafWebAclRules( + priority=2, + rule_id=Token.as_string(by_metric_name.id), + type="GROUP" + ) + ], + default_action=default_action, + metric_name=metric_name, + name=name + ) +``` + +## Argument Reference + +This data source supports the following arguments: (at least one needs to be specified) + +* `name` - (Optional) Name of the WAF rule group. +* `metric_name` - (Optional) Name of the WAF rule group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF rule group. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/waf_web_acl.html.markdown b/website/docs/cdktf/python/d/waf_web_acl.html.markdown new file mode 100644 index 00000000000..a78c3f5302c --- /dev/null +++ b/website/docs/cdktf/python/d/waf_web_acl.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_web_acl" +description: |- + Retrieves a WAF Web ACL id. +--- + + + +# Data Source: aws_waf_web_acl + +`aws_waf_web_acl` Retrieves a WAF Web ACL Resource Id. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_waf_web_acl import DataAwsWafWebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafWebAcl(self, "example", + name="tfWAFWebACL" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF Web ACL. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF Web ACL. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/wafregional_ipset.html.markdown b/website/docs/cdktf/python/d/wafregional_ipset.html.markdown new file mode 100644 index 00000000000..d19a38670d9 --- /dev/null +++ b/website/docs/cdktf/python/d/wafregional_ipset.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_ipset" +description: |- + Retrieves an AWS WAF Regional IP set id. +--- + + + +# Data Source: aws_wafregional_ipset + +`aws_wafregional_ipset` Retrieves a WAF Regional IP Set Resource Id. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_wafregional_ipset import DataAwsWafregionalIpset +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafregionalIpset(self, "example", + name="tfWAFRegionalIPSet" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF Regional IP set. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF Regional IP set. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/wafregional_rate_based_rule.html.markdown b/website/docs/cdktf/python/d/wafregional_rate_based_rule.html.markdown new file mode 100644 index 00000000000..40e937d97af --- /dev/null +++ b/website/docs/cdktf/python/d/wafregional_rate_based_rule.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_rate_based_rule" +description: |- + Retrieves an AWS WAF Regional rate based rule id. +--- + + + +# Data Source: aws_wafregional_rate_based_rule + +`aws_wafregional_rate_based_rule` Retrieves a WAF Regional Rate Based Rule Resource Id. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_wafregional_rate_based_rule import DataAwsWafregionalRateBasedRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafregionalRateBasedRule(self, "example", + name="tfWAFRegionalRateBasedRule" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF Regional rate based rule. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF Regional rate based rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/wafregional_rule.html.markdown b/website/docs/cdktf/python/d/wafregional_rule.html.markdown new file mode 100644 index 00000000000..b37b6c49f20 --- /dev/null +++ b/website/docs/cdktf/python/d/wafregional_rule.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_rule" +description: |- + Retrieves an AWS WAF Regional rule id. +--- + + + +# Data Source: aws_wafregional_rule + +`aws_wafregional_rule` Retrieves a WAF Regional Rule Resource Id. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_wafregional_rule import DataAwsWafregionalRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafregionalRule(self, "example", + name="tfWAFRegionalRule" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF Regional rule. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF Regional rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/wafregional_subscribed_rule_group.html.markdown b/website/docs/cdktf/python/d/wafregional_subscribed_rule_group.html.markdown new file mode 100644 index 00000000000..5fbbc6b9651 --- /dev/null +++ b/website/docs/cdktf/python/d/wafregional_subscribed_rule_group.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_subscribed_rule_group" +description: |- + retrieves information about a Managed WAF Rule Group from AWS Marketplace for use in WAF Regional. +--- + + + +# Data Source: aws_wafregional_rule + +`aws_wafregional_subscribed_rule_group` retrieves information about a Managed WAF Rule Group from AWS Marketplace for use in WAF Regional (needs to be subscribed to first). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_wafregional_subscribed_rule_group import DataAwsWafregionalSubscribedRuleGroup +from imports.aws.wafregional_web_acl import WafregionalWebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, defaultAction, metricName, name): + super().__init__(scope, name) + by_metric_name = DataAwsWafregionalSubscribedRuleGroup(self, "by_metric_name", + metric_name="F5BotDetectionSignatures" + ) + by_name = DataAwsWafregionalSubscribedRuleGroup(self, "by_name", + name="F5 Bot Detection Signatures For AWS WAF" + ) + WafregionalWebAcl(self, "acl", + rules=[{ + "priority": 1, + "rule_id": by_name.id, + "type": "GROUP" + }, { + "priority": 2, + "rule_id": by_metric_name.id, + "type": "GROUP" + } + ], + default_action=default_action, + metric_name=metric_name, + name=name + ) +``` + +## Argument Reference + +This data source supports the following arguments: (at least one needs to be specified) + +* `name` - (Optional) Name of the WAF rule group. +* `metric_name` - (Optional) Name of the WAF rule group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF rule group. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/wafregional_web_acl.html.markdown b/website/docs/cdktf/python/d/wafregional_web_acl.html.markdown new file mode 100644 index 00000000000..f11ec7ce90e --- /dev/null +++ b/website/docs/cdktf/python/d/wafregional_web_acl.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_web_acl" +description: |- + Retrieves a WAF Regional Web ACL id. +--- + + + +# Data Source: aws_wafregional_web_acl + +`aws_wafregional_web_acl` Retrieves a WAF Regional Web ACL Resource Id. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_wafregional_web_acl import DataAwsWafregionalWebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafregionalWebAcl(self, "example", + name="tfWAFRegionalWebACL" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF Regional Web ACL. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF Regional Web ACL. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/wafv2_ip_set.html.markdown b/website/docs/cdktf/python/d/wafv2_ip_set.html.markdown new file mode 100644 index 00000000000..7dc4da49690 --- /dev/null +++ b/website/docs/cdktf/python/d/wafv2_ip_set.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_ip_set" +description: |- + Retrieves the summary of a WAFv2 IP Set. +--- + + + +# Data Source: aws_wafv2_ip_set + +Retrieves the summary of a WAFv2 IP Set. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_wafv2_ip_set import DataAwsWafv2IpSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafv2IpSet(self, "example", + name="some-ip-set", + scope="REGIONAL" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAFv2 IP Set. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `CLOUDFRONT` or `REGIONAL`. To work with CloudFront, you must also specify the region `us-east-1` (N. Virginia) on the AWS provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `addresses` - An array of strings that specifies zero or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. +* `arn` - ARN of the entity. +* `description` - Description of the set that helps with identification. +* `id` - Unique identifier for the set. +* `ip_address_version` - IP address version of the set. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/wafv2_regex_pattern_set.html.markdown b/website/docs/cdktf/python/d/wafv2_regex_pattern_set.html.markdown new file mode 100644 index 00000000000..7ae29ca4403 --- /dev/null +++ b/website/docs/cdktf/python/d/wafv2_regex_pattern_set.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_regex_pattern_set" +description: |- + Retrieves the summary of a WAFv2 Regex Pattern Set. +--- + + + +# Data Source: aws_wafv2_regex_pattern_set + +Retrieves the summary of a WAFv2 Regex Pattern Set. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_wafv2_regex_pattern_set import DataAwsWafv2RegexPatternSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafv2RegexPatternSet(self, "example", + name="some-regex-pattern-set", + scope="REGIONAL" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAFv2 Regex Pattern Set. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `CLOUDFRONT` or `REGIONAL`. To work with CloudFront, you must also specify the region `us-east-1` (N. Virginia) on the AWS provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the entity. +* `description` - Description of the set that helps with identification. +* `id` - Unique identifier for the set. +* `regular_expression` - One or more blocks of regular expression patterns that AWS WAF is searching for. See [Regular Expression](#regular-expression) below for details. + +### Regular Expression + +Each `regular_expression` supports the following argument: + +* `regex_string` - (Required) String representing the regular expression, see the AWS WAF [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-regex-pattern-set-creating.html) for more information. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/wafv2_rule_group.html.markdown b/website/docs/cdktf/python/d/wafv2_rule_group.html.markdown new file mode 100644 index 00000000000..8b5becfb8be --- /dev/null +++ b/website/docs/cdktf/python/d/wafv2_rule_group.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_rule_group" +description: |- + Retrieves the summary of a WAFv2 Rule Group. +--- + + + +# Data Source: aws_wafv2_rule_group + +Retrieves the summary of a WAFv2 Rule Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_wafv2_rule_group import DataAwsWafv2RuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafv2RuleGroup(self, "example", + name="some-rule-group", + scope="REGIONAL" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAFv2 Rule Group. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `CLOUDFRONT` or `REGIONAL`. To work with CloudFront, you must also specify the region `us-east-1` (N. Virginia) on the AWS provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the entity. +* `description` - Description of the rule group that helps with identification. +* `id` - Unique identifier of the rule group. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/wafv2_web_acl.html.markdown b/website/docs/cdktf/python/d/wafv2_web_acl.html.markdown new file mode 100644 index 00000000000..4838c169ca3 --- /dev/null +++ b/website/docs/cdktf/python/d/wafv2_web_acl.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_web_acl" +description: |- + Retrieves the summary of a WAFv2 Web ACL. +--- + + + +# Data Source: aws_wafv2_web_acl + +Retrieves the summary of a WAFv2 Web ACL. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_wafv2_web_acl import DataAwsWafv2WebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWafv2WebAcl(self, "example", + name="some-web-acl", + scope="REGIONAL" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAFv2 Web ACL. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `CLOUDFRONT` or `REGIONAL`. To work with CloudFront, you must also specify the region `us-east-1` (N. Virginia) on the AWS provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the entity. +* `description` - Description of the WebACL that helps with identification. +* `id` - Unique identifier of the WebACL. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/workspaces_bundle.html.markdown b/website/docs/cdktf/python/d/workspaces_bundle.html.markdown new file mode 100644 index 00000000000..ae30ea96fc4 --- /dev/null +++ b/website/docs/cdktf/python/d/workspaces_bundle.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_bundle" +description: |- + Retrieve information about an AWS WorkSpaces bundle. +--- + + + +# Data Source: aws_workspaces_bundle + +Retrieve information about an AWS WorkSpaces bundle. + +## Example Usage + +### By ID + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_workspaces_bundle import DataAwsWorkspacesBundle +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWorkspacesBundle(self, "example", + bundle_id="wsb-b0s22j3d7" + ) +``` + +### By Owner & Name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_workspaces_bundle import DataAwsWorkspacesBundle +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWorkspacesBundle(self, "example", + name="Value with Windows 10 and Office 2016", + owner="AMAZON" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bundle_id` – (Optional) ID of the bundle. +* `owner` – (Optional) Owner of the bundles. You have to leave it blank for own bundles. You cannot combine this parameter with `bundle_id`. +* `name` – (Optional) Name of the bundle. You cannot combine this parameter with `bundle_id`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `description` – The description of the bundle. +* `bundle_id` – The ID of the bundle. +* `name` – The name of the bundle. +* `owner` – The owner of the bundle. +* `compute_type` – The compute type. See supported fields below. +* `root_storage` – The root volume. See supported fields below. +* `user_storage` – The user storage. See supported fields below. + +### `compute_type` + +* `name` - Name of the compute type. + +### `root_storage` + +* `capacity` - Size of the root volume. + +### `user_storage` + +* `capacity` - Size of the user storage. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/workspaces_directory.html.markdown b/website/docs/cdktf/python/d/workspaces_directory.html.markdown new file mode 100644 index 00000000000..8ee6633d1e3 --- /dev/null +++ b/website/docs/cdktf/python/d/workspaces_directory.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_directory" +description: |- + Retrieve information about an AWS WorkSpaces directory. +--- + + + +# Data Source: aws_workspaces_directory + +Retrieve information about an AWS WorkSpaces directory. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_workspaces_directory import DataAwsWorkspacesDirectory +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWorkspacesDirectory(self, "example", + directory_id="d-9067783251" + ) +``` + +## Argument Reference + +* `directory_id` - (Required) Directory identifier for registration in WorkSpaces service. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - WorkSpaces directory identifier. +* `alias` - Directory alias. +* `customer_user_name` - User name for the service account. +* `directory_name` - Name of the directory. +* `directory_type` - Directory type. +* `dns_ip_addresses` - IP addresses of the DNS servers for the directory. +* `iam_role_id` - Identifier of the IAM role. This is the role that allows Amazon WorkSpaces to make calls to other services, such as Amazon EC2, on your behalf. +* `ip_group_ids` - Identifiers of the IP access control groups associated with the directory. +* `registration_code` - Registration code for the directory. This is the code that users enter in their Amazon WorkSpaces client application to connect to the directory. +* `self_service_permissions` – The permissions to enable or disable self-service capabilities. +* `subnet_ids` - Identifiers of the subnets where the directory resides. +* `tags` – A map of tags assigned to the WorkSpaces directory. +* `workspace_creation_properties` – The default properties that are used for creating WorkSpaces. Defined below. +* `workspace_access_properties` – (Optional) Specifies which devices and operating systems users can use to access their WorkSpaces. Defined below. +* `workspace_security_group_id` - The identifier of the security group that is assigned to new WorkSpaces. Defined below. + +### self_service_permissions + +* `change_compute_type` – Whether WorkSpaces directory users can change the compute type (bundle) for their workspace. +* `increase_volume_size` – Whether WorkSpaces directory users can increase the volume size of the drives on their workspace. +* `rebuild_workspace` – Whether WorkSpaces directory users can rebuild the operating system of a workspace to its original state. +* `restart_workspace` – Whether WorkSpaces directory users can restart their workspace. +* `switch_running_mode` – Whether WorkSpaces directory users can switch the running mode of their workspace. + +### workspace_access_properties + +* `device_type_android` – (Optional) Indicates whether users can use Android devices to access their WorkSpaces. +* `device_type_chromeos` – (Optional) Indicates whether users can use Chromebooks to access their WorkSpaces. +* `device_type_ios` – (Optional) Indicates whether users can use iOS devices to access their WorkSpaces. +* `device_type_linux` – (Optional) Indicates whether users can use Linux clients to access their WorkSpaces. +* `device_type_osx` – (Optional) Indicates whether users can use macOS clients to access their WorkSpaces. +* `device_type_web` – (Optional) Indicates whether users can access their WorkSpaces through a web browser. +* `device_type_windows` – (Optional) Indicates whether users can use Windows clients to access their WorkSpaces. +* `device_type_zeroclient` – (Optional) Indicates whether users can use zero client devices to access their WorkSpaces. + +### workspace_creation_properties + +* `custom_security_group_id` – The identifier of your custom security group. Should relate to the same VPC, where workspaces reside in. +* `default_ou` – The default organizational unit (OU) for your WorkSpace directories. +* `enable_internet_access` – Indicates whether internet access is enabled for your WorkSpaces. +* `enable_maintenance_mode` – Indicates whether maintenance mode is enabled for your WorkSpaces. For more information, see [WorkSpace Maintenance](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspace-maintenance.html). +* `user_enabled_as_local_administrator` – Indicates whether users are local administrators of their WorkSpaces. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/workspaces_image.html.markdown b/website/docs/cdktf/python/d/workspaces_image.html.markdown new file mode 100644 index 00000000000..7422a796c5d --- /dev/null +++ b/website/docs/cdktf/python/d/workspaces_image.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_image" +description: |- + Get information about Workspaces image. +--- + + + +# Data Source: aws_workspaces_image + +Use this data source to get information about a Workspaces image. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_workspaces_image import DataAwsWorkspacesImage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWorkspacesImage(self, "example", + image_id="wsi-ten5h0y19" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `image_id` – (Required) ID of the image. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` – The name of the image. +* `description` – The description of the image. +* `os` – The operating system that the image is running. +* `required_tenancy` – Specifies whether the image is running on dedicated hardware. When Bring Your Own License (BYOL) is enabled, this value is set to DEDICATED. For more information, see [Bring Your Own Windows Desktop Images](https://docs.aws.amazon.com/workspaces/latest/adminguide/byol-windows-images.html). +* `state` – The status of the image. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/d/workspaces_workspace.html.markdown b/website/docs/cdktf/python/d/workspaces_workspace.html.markdown new file mode 100644 index 00000000000..18a06c58133 --- /dev/null +++ b/website/docs/cdktf/python/d/workspaces_workspace.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_workspace" +description: |- + Get information about a WorkSpace in AWS Workspaces Service. +--- + + + +# Resource: aws_workspaces_workspace + +Use this data source to get information about a workspace in [AWS Workspaces](https://docs.aws.amazon.com/workspaces/latest/adminguide/amazon-workspaces.html) Service. + +## Example Usage + +### Filter By Workspace ID + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_workspaces_workspace import DataAwsWorkspacesWorkspace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWorkspacesWorkspace(self, "example", + workspace_id="ws-cj5xcxsz5" + ) +``` + +### Filter By Directory ID & User Name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_workspaces_workspace import DataAwsWorkspacesWorkspace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataAwsWorkspacesWorkspace(self, "example", + directory_id="d-9967252f57", + user_name="Example" + ) +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bundle_id` - (Optional) ID of the bundle for the WorkSpace. +* `directory_id` - (Optional) ID of the directory for the WorkSpace. You have to specify `user_name` along with `directory_id`. You cannot combine this parameter with `workspace_id`. +* `root_volume_encryption_enabled` - (Optional) Indicates whether the data stored on the root volume is encrypted. +* `tags` - (Optional) Tags for the WorkSpace. +* `user_name` – (Optional) User name of the user for the WorkSpace. This user name must exist in the directory for the WorkSpace. You cannot combine this parameter with `workspace_id`. +* `user_volume_encryption_enabled` – (Optional) Indicates whether the data stored on the user volume is encrypted. +* `volume_encryption_key` – (Optional) Symmetric AWS KMS customer master key (CMK) used to encrypt data stored on your WorkSpace. Amazon WorkSpaces does not support asymmetric CMKs. +* `workspace_id` - (Optional) ID of the WorkSpace. You cannot combine this parameter with `directory_id`. +* `workspace_properties` – (Optional) WorkSpace properties. + +`workspace_properties` supports the following: + +* `compute_type_name` – (Optional) Compute type. For more information, see [Amazon WorkSpaces Bundles](http://aws.amazon.com/workspaces/details/#Amazon_WorkSpaces_Bundles). Valid values are `VALUE`, `STANDARD`, `PERFORMANCE`, `POWER`, `GRAPHICS`, `POWERPRO` and `GRAPHICSPRO`. +* `root_volume_size_gib` – (Optional) Size of the root volume. +* `running_mode` – (Optional) Running mode. For more information, see [Manage the WorkSpace Running Mode](https://docs.aws.amazon.com/workspaces/latest/adminguide/running-mode.html). Valid values are `AUTO_STOP` and `ALWAYS_ON`. +* `running_mode_auto_stop_timeout_in_minutes` – (Optional) Time after a user logs off when WorkSpaces are automatically stopped. Configured in 60-minute intervals. +* `user_volume_size_gib` – (Optional) Size of the user storage. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Workspaces ID. +* `ip_address` - IP address of the WorkSpace. +* `computer_name` - Name of the WorkSpace, as seen by the operating system. +* `state` - Operational state of the WorkSpace. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/index.html.markdown b/website/docs/cdktf/python/index.html.markdown new file mode 100644 index 00000000000..680c63d09db --- /dev/null +++ b/website/docs/cdktf/python/index.html.markdown @@ -0,0 +1,781 @@ +--- +layout: "aws" +page_title: "Provider: AWS" +description: |- + Use the Amazon Web Services (AWS) provider to interact with the many resources supported by AWS. You must configure the provider with the proper credentials before you can use it. +--- + + + +# AWS Provider + +Use the Amazon Web Services (AWS) provider to interact with the +many resources supported by AWS. You must configure the provider +with the proper credentials before you can use it. + +Use the navigation to the left to read about the available resources. There are currently 1236 resources and 510 data sources available in the provider. + +To learn the basics of Terraform using this provider, follow the +hands-on [get started tutorials](https://learn.hashicorp.com/tutorials/terraform/infrastructure-as-code?in=terraform/aws-get-started&utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS). Interact with AWS services, +including Lambda, RDS, and IAM by following the [AWS services +tutorials](https://learn.hashicorp.com/collections/terraform/aws?utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS). + +## Example Usage + +Terraform 0.13 and later: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + region="us-east-1" + ) + Vpc(self, "example", + cidr_block="10.0.0.0/16" + ) +``` + +Terraform 0.12 and earlier: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + region="us-east-1" + ) + Vpc(self, "example", + cidr_block="10.0.0.0/16" + ) +``` + +## Authentication and Configuration + +Configuration for the AWS Provider can be derived from several sources, +which are applied in the following order: + +1. Parameters in the provider configuration +1. Environment variables +1. Shared credentials files +1. Shared configuration files +1. Container credentials +1. Instance profile credentials and region + +This order matches the precedence used by the +[AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-precedence) +and the [AWS SDKs](https://aws.amazon.com/tools/). + +The AWS Provider supports assuming an IAM role, either in +the provider configuration block parameter `assume_role` +or in [a named profile](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html). + +The AWS Provider supports assuming an IAM role using [web identity federation and OpenID Connect (OIDC)](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html#cli-configure-role-oidc). +This can be configured either using environment variables or in a named profile. + +When using a named profile, the AWS Provider also supports [sourcing credentials from an external process](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sourcing-external.html). + +### Provider Configuration + +!> **Warning:** Hard-coded credentials are not recommended in any Terraform +configuration and risks secret leakage should this file ever be committed to a +public version control system. + +Credentials can be provided by adding an `access_key`, `secret_key`, and optionally `token`, to the `aws` provider block. + +Usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + access_key="my-access-key", + region="us-west-2", + secret_key="my-secret-key" + ) +``` + +Other settings related to authorization can be configured, such as: + +* `profile` +* `shared_config_files` +* `shared_credentials_files` + +### Environment Variables + +Credentials can be provided by using the `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, and optionally `AWS_SESSION_TOKEN` environment variables. +The region can be set using the `AWS_REGION` or `AWS_DEFAULT_REGION` environment variables. + +For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws") +``` + +```console +% export AWS_ACCESS_KEY_ID="anaccesskey" +% export AWS_SECRET_ACCESS_KEY="asecretkey" +% export AWS_REGION="us-west-2" +% terraform plan +``` + +Other environment variables related to authorization are: + +* `AWS_PROFILE` +* `AWS_CONFIG_FILE` +* `AWS_SHARED_CREDENTIALS_FILE` + +### Shared Configuration and Credentials Files + +The AWS Provider can source credentials and other settings from the [shared configuration and credentials files](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html). +By default, these files are located at `$HOME/.aws/config` and `$HOME/.aws/credentials` on Linux and macOS, +and `"%USERPROFILE%\.aws\config"` and `"%USERPROFILE%\.aws\credentials"` on Windows. + +If no named profile is specified, the `default` profile is used. +Use the `profile` parameter or `AWS_PROFILE` environment variable to specify a named profile. + +The locations of the shared configuration and credentials files can be configured using either +the parameters `shared_config_files` and `shared_credentials_files` +or the environment variables `AWS_CONFIG_FILE` and `AWS_SHARED_CREDENTIALS_FILE`. + +For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + profile="customprofile", + shared_config_files=["/Users/tf_user/.aws/conf"], + shared_credentials_files=["/Users/tf_user/.aws/creds"] + ) +``` + +### Container Credentials + +If you're running Terraform on CodeBuild or ECS and have configured an [IAM Task Role](http://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html), Terraform can use the container's Task Role. This support is based on the underlying `AWS_CONTAINER_CREDENTIALS_RELATIVE_URI` and `AWS_CONTAINER_CREDENTIALS_FULL_URI` environment variables being automatically set by those services or manually for advanced usage. + +If you're running Terraform on EKS and have configured [IAM Roles for Service Accounts (IRSA)](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html), Terraform can use the pod's role. This support is based on the underlying `AWS_ROLE_ARN` and `AWS_WEB_IDENTITY_TOKEN_FILE` environment variables being automatically set by Kubernetes or manually for advanced usage. + +### Instance profile credentials and region + +When the AWS Provider is running on an EC2 instance with an IAM Instance Profile set, +the provider can source credentials from the [EC2 Instance Metadata Service](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html#instance-metadata-security-credentials). +Both IMDS v1 and IMDS v2 are supported. + +A custom endpoint for the metadata service can be provided using the `ec2_metadata_service_endpoint` parameter or the `AWS_EC2_METADATA_SERVICE_ENDPOINT` environment variable. + +### Assuming an IAM Role + +If provided with a role ARN, the AWS Provider will attempt to assume this role +using the supplied credentials. + +Usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + assume_role=[AwsProviderAssumeRole( + external_id="EXTERNAL_ID", + role_arn="arn:aws:iam::123456789012:role/ROLE_NAME", + session_name="SESSION_NAME" + ) + ] + ) +``` + +> **Hands-on:** Try the [Use AssumeRole to Provision AWS Resources Across Accounts](https://learn.hashicorp.com/tutorials/terraform/aws-assumerole) tutorial. + +### Assuming an IAM Role Using A Web Identity + +If provided with a role ARN and a token from a web identity provider, +the AWS Provider will attempt to assume this role using the supplied credentials. + +Usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + assume_role_with_web_identity=[AwsProviderAssumeRoleWithWebIdentity( + role_arn="arn:aws:iam::123456789012:role/ROLE_NAME", + session_name="SESSION_NAME", + web_identity_token_file="/Users/tf_user/secrets/web-identity-token" + ) + ] + ) +``` + +### Using an External Credentials Process + +To use an [external process to source credentials](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sourcing-external.html), +the process must be configured in a named profile, including the `default` profile. +The profile is configured in a shared configuration file. + +For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + profile="customprofile" + ) +``` + +```ini +[profile customprofile] +credential_process = custom-process --username jdoe +``` + +## AWS Configuration Reference + +|Setting|Provider|[Environment Variable][envvars]|[Shared Config][config]| +|-------|--------|-------------------------------|-----------------------| +|Access Key ID|`access_key`|`AWS_ACCESS_KEY_ID`|`aws_access_key_id`| +|Secret Access Key|`secret_key`|`AWS_SECRET_ACCESS_KEY`|`aws_secret_access_key`| +|Session Token|`token`|`AWS_SESSION_TOKEN`|`aws_session_token`| +|Region|`region`|`AWS_REGION` or `AWS_DEFAULT_REGION`|`region`| +|Custom CA Bundle |`custom_ca_bundle`|`AWS_CA_BUNDLE`|`ca_bundle`| +|EC2 IMDS Endpoint |`ec2_metadata_service_endpoint`|`AWS_EC2_METADATA_SERVICE_ENDPOINT`|N/A| +|EC2 IMDS Endpoint Mode|`ec2_metadata_service_endpoint_mode`|`AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE`|N/A| +|Disable EC2 IMDS|`skip_metadata_api_check`|`AWS_EC2_METADATA_DISABLED`|N/A| +|HTTP Proxy|`http_proxy`|`HTTP_PROXY` or `HTTPS_PROXY`|N/A| +|Max Retries|`max_retries`|`AWS_MAX_ATTEMPTS`|`max_attempts`| +|Profile|`profile`|`AWS_PROFILE` or `AWS_DEFAULT_PROFILE`|N/A| +|Retry Mode|`retry_mode`|`AWS_RETRY_MODE`|`retry_mode`| +|Shared Config Files|`shared_config_files`|`AWS_CONFIG_FILE`|N/A| +|Shared Credentials Files|`shared_credentials_files`|`AWS_SHARED_CREDENTIALS_FILE`|N/A| +|Use DualStack Endpoints|`use_dualstack_endpoint`|`AWS_USE_DUALSTACK_ENDPOINT`|`use_dualstack_endpoint`| +|Use FIPS Endpoints|`use_fips_endpoint`|`AWS_USE_FIPS_ENDPOINT`|`use_fips_endpoint`| + +### Assume Role Configuration Reference + +Configuation for assuming an IAM role can be done using provider configuration or a named profile in shared configuration files. +In the provider, all parameters for assuming an IAM role are set in the `assume_role` block. + +See the [assume role documentation](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html) for more information. + +|Setting|Provider|[Environment Variable][envvars]|[Shared Config][config]| +|-------|--------|--------|-----------------------| +|Role ARN|`role_arn`|`AWS_ROLE_ARN`|`role_arn`| +|Duration|`duration`|N/A|`duration_seconds`| +|External ID|`external_id`|N/A|`external_id`| +|Policy|`policy`|N/A|N/A| +|Policy ARNs|`policy_arns`|N/A|N/A| +|Session Name|`session_name`|`AWS_ROLE_SESSION_NAME`|`role_session_name`| +|Source Identity|`source_identity`|N/A|N/A| +|Tags|`tags`|N/A|N/A| +|Transitive Tag Keys|`transitive_tag_keys`|N/A|N/A| + +[envvars]: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html +[config]: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html#cli-configure-files-settings + +### Assume Role with Web Identity Configuration Reference + +Configuration for assuming an IAM role using web identify federation can be done using provider configuration, environment variables, or a named profile in shared configuration files. +In the provider, all parameters for assuming an IAM role are set in the `assume_role_with_web_identity` block. + +See the assume role documentation [section on web identities](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html#cli-configure-role-oidc) for more information. + +|Setting|Provider|[Environment Variable][envvars]|[Shared Config][config]| +|-------|--------|--------|-----------------------| +|Role ARN|`role_arn`|`AWS_ROLE_ARN`|`role_arn`| +|Web Identity Token|`web_identity_token`|N/A|N/A| +|Web Identity Token File|`web_identity_token_file`|`AWS_WEB_IDENTITY_TOKEN_FILE`|`web_identity_token_file`| +|Duration|`duration`|N/A|`duration_seconds`| +|Policy|`policy`|N/A|`policy`| +|Policy ARNs|`policy_arns`|N/A|`policy_arns`| +|Session Name|`session_name`|`AWS_ROLE_SESSION_NAME`|`role_session_name`| + +## Custom User-Agent Information + +By default, the underlying AWS client used by the Terraform AWS Provider creates requests with User-Agent headers including information about Terraform and AWS SDK for Go versions. To provide additional information in the User-Agent headers, the `TF_APPEND_USER_AGENT` environment variable can be set and its value will be directly added to HTTP requests. E.g., + +```console +% export TF_APPEND_USER_AGENT="JenkinsAgent/i-12345678 BuildID/1234 (Optional Extra Information)" +``` + +## Argument Reference + +In addition to [generic `provider` arguments](https://www.terraform.io/docs/configuration/providers.html) +(e.g., `alias` and `version`), the following arguments are supported in the AWS + `provider` block: + +* `access_key` - (Optional) AWS access key. Can also be set with the `AWS_ACCESS_KEY_ID` environment variable, or via a shared credentials file if `profile` is specified. See also `secret_key`. +* `allowed_account_ids` - (Optional) List of allowed AWS account IDs to prevent you from mistakenly using an incorrect one (and potentially end up destroying a live environment). Conflicts with `forbidden_account_ids`. +* `assume_role` - (Optional) Configuration block for assuming an IAM role. See the [`assume_role` Configuration Block](#assume_role-configuration-block) section below. Only one `assume_role` block may be in the configuration. +* `assume_role_with_web_identity` - (Optional) Configuration block for assuming an IAM role using a web identity. See the [`assume_role_with_web_identity` Configuration Block](#assume_role_with_web_identity-configuration-block) section below. Only one `assume_role_with_web_identity` block may be in the configuration. +* `custom_ca_bundle` - (Optional) File containing custom root and intermediate certificates. + Can also be set using the `AWS_CA_BUNDLE` environment variable. + Setting `ca_bundle` in the shared config file is not supported. +* `default_tags` - (Optional) Configuration block with resource tag settings to apply across all resources handled by this provider (see the [Terraform multiple provider instances documentation](/docs/configuration/providers.html#alias-multiple-provider-instances) for more information about additional provider configurations). This is designed to replace redundant per-resource `tags` configurations. Provider tags can be overridden with new values, but not excluded from specific resources. To override provider tag values, use the `tags` argument within a resource to configure new tag values for matching keys. See the [`default_tags`](#default_tags-configuration-block) Configuration Block section below for example usage and available arguments. This functionality is supported in all resources that implement `tags`, with the exception of the `aws_autoscaling_group` resource. +* `ec2_metadata_service_endpoint` - (Optional) Address of the EC2 metadata service (IMDS) endpoint to use. Can also be set with the `AWS_EC2_METADATA_SERVICE_ENDPOINT` environment variable. +* `ec2_metadata_service_endpoint_mode` - (Optional) Mode to use in communicating with the metadata service. Valid values are `IPv4` and `IPv6`. Can also be set with the `AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE` environment variable. +* `endpoints` - (Optional) Configuration block for customizing service endpoints. See the [Custom Service Endpoints Guide](/docs/providers/aws/guides/custom-service-endpoints.html) for more information about connecting to alternate AWS endpoints or AWS compatible solutions. See also `use_fips_endpoint`. +* `forbidden_account_ids` - (Optional) List of forbidden AWS account IDs to prevent you from mistakenly using the wrong one (and potentially end up destroying a live environment). Conflicts with `allowed_account_ids`. +* `http_proxy` - (Optional) Address of an HTTP proxy to use when accessing the AWS API. Can also be set using the `HTTP_PROXY` or `HTTPS_PROXY` environment variables. +* `ignore_tags` - (Optional) Configuration block with resource tag settings to ignore across all resources handled by this provider (except any individual service tag resources such as `aws_ec2_tag`) for situations where external systems are managing certain resource tags. Arguments to the configuration block are described below in the `ignore_tags` Configuration Block section. See the [Terraform multiple provider instances documentation](https://www.terraform.io/docs/configuration/providers.html#alias-multiple-provider-configurations) for more information about additional provider configurations. +* `insecure` - (Optional) Whether to explicitly allow the provider to perform "insecure" SSL requests. If omitted, the default value is `false`. +* `max_retries` - (Optional) Maximum number of times an API call is retried when AWS throttles requests or you experience transient failures. + The delay between the subsequent API calls increases exponentially. + If omitted, the default value is `25`. + Can also be set using the environment variable `AWS_MAX_ATTEMPTS` + and the shared configuration parameter `max_attempts`. +* `profile` - (Optional) AWS profile name as set in the shared configuration and credentials files. + Can also be set using either the environment variables `AWS_PROFILE` or `AWS_DEFAULT_PROFILE`. +* `region` - (Optional) AWS region where the provider will operate. The region must be set. + Can also be set with either the `AWS_REGION` or `AWS_DEFAULT_REGION` environment variables, + or via a shared config file parameter `region` if `profile` is used. + If credentials are retrieved from the EC2 Instance Metadata Service, the region can also be retrieved from the metadata. +* `retry_mode` - (Optional) Specifies how retries are attempted. + Valid values are `standard` and `adaptive`. + Can also be configured using the `AWS_RETRY_MODE` environment variable or the shared config file parameter `retry_mode`. +* `s3_use_path_style` - (Optional) Whether to enable the request to use path-style addressing, i.e., `https://s3.amazonaws.com/BUCKET/KEY`. By default, the S3 client will use virtual hosted bucket addressing, `https://BUCKET.s3.amazonaws.com/KEY`, when possible. Specific to the Amazon S3 service. +* `secret_key` - (Optional) AWS secret key. Can also be set with the `AWS_SECRET_ACCESS_KEY` environment variable, or via a shared configuration and credentials files if `profile` is used. See also `access_key`. +* `shared_config_files` - (Optional) List of paths to AWS shared config files. If not set, the default is `[~/.aws/config]`. A single value can also be set with the `AWS_CONFIG_FILE` environment variable. +* `shared_credentials_files` - (Optional) List of paths to the shared credentials file. If not set and a profile is used, the default value is `[~/.aws/credentials]`. A single value can also be set with the `AWS_SHARED_CREDENTIALS_FILE` environment variable. +* `skip_credentials_validation` - (Optional) Whether to skip credentials validation via the STS API. This can be useful for testing and for AWS API implementations that do not have STS available. +* `skip_metadata_api_check` - (Optional) Whether to skip the AWS Metadata API check. Useful for AWS API implementations that do not have a metadata API endpoint. Setting to `true` prevents Terraform from authenticating via the Metadata API. You may need to use other authentication methods like static credentials, configuration variables, or environment variables. +* `skip_region_validation` - (Optional) Whether to skip validating the region. Useful for AWS-like implementations that use their own region names or to bypass the validation for regions that aren't publicly available yet. +* `skip_requesting_account_id` - (Optional) Whether to skip requesting the account ID. Useful for AWS API implementations that do not have the IAM, STS API, or metadata API. When set to `true` and not determined previously, returns an empty account ID when manually constructing ARN attributes with the following: + - [`aws_api_gateway_deployment` resource](/docs/providers/aws/r/api_gateway_deployment.html) + - [`aws_api_gateway_rest_api` resource](/docs/providers/aws/r/api_gateway_rest_api.html) + - [`aws_api_gateway_stage` resource](/docs/providers/aws/r/api_gateway_stage.html) + - [`aws_apigatewayv2_api` data source](/docs/providers/aws/d/apigatewayv2_api.html) + - [`aws_apigatewayv2_api` resource](/docs/providers/aws/r/apigatewayv2_api.html) + - [`aws_apigatewayv2_stage` resource](/docs/providers/aws/r/apigatewayv2_stage.html) + - [`aws_appconfig_application` resource](/docs/providers/aws/r/appconfig_application.html) + - [`aws_appconfig_configuration_profile` resource](/docs/providers/aws/r/appconfig_configuration_profile.html) + - [`aws_appconfig_deployment` resource](/docs/providers/aws/r/appconfig_deployment.html) + - [`aws_appconfig_deployment_strategy` resource](/docs/providers/aws/r/appconfig_deployment_strategy.html) + - [`aws_appconfig_environment` resource](/docs/providers/aws/r/appconfig_environment.html) + - [`aws_appconfig_hosted_configuration_version` resource](/docs/providers/aws/r/appconfig_hosted_configuration_version.html) + - [`aws_athena_workgroup` resource](/docs/providers/aws/r/athena_workgroup.html) + - [`aws_budgets_budget` resource](/docs/providers/aws/r/budgets_budget.html) + - [`aws_codedeploy_app` resource](/docs/providers/aws/r/codedeploy_app.html) + - [`aws_codedeploy_deployment_group` resource](/docs/providers/aws/r/codedeploy_deployment_group.html) + - [`aws_cognito_identity_pool` resource](/docs/providers/aws/r/cognito_identity_pool.html) + - [`aws_cognito_user_pools` data source](/docs/providers/aws/d/cognito_user_pools.html) + - [`aws_default_vpc_dhcp_options`](/docs/providers/aws/r/default_vpc_dhcp_options.html) + - [`aws_dms_event_subscription` resource](/docs/providers/aws/r/dms_event_subscription.html) + - [`aws_dms_replication_subnet_group` resource](/docs/providers/aws/r/dms_replication_subnet_group.html) + - [`aws_dx_connection` resource](/docs/providers/aws/r/dx_connection.html) + - [`aws_dx_hosted_private_virtual_interface_accepter` resource](/docs/providers/aws/r/dx_hosted_private_virtual_interface_accepter.html) + - [`aws_dx_hosted_private_virtual_interface` resource](/docs/providers/aws/r/dx_hosted_private_virtual_interface.html) + - [`aws_dx_hosted_public_virtual_interface_accepter` resource](/docs/providers/aws/r/dx_hosted_public_virtual_interface_accepter.html) + - [`aws_dx_hosted_public_virtual_interface` resource](/docs/providers/aws/r/dx_hosted_public_virtual_interface.html) + - [`aws_dx_hosted_transit_virtual_interface_accepter` resource](/docs/providers/aws/r/dx_hosted_transit_virtual_interface_accepter.html) + - [`aws_dx_hosted_transit_virtual_interface` resource](/docs/providers/aws/r/dx_hosted_transit_virtual_interface.html) + - [`aws_dx_lag` resource](/docs/providers/aws/r/dx_lag.html) + - [`aws_dx_private_virtual_interface` resource](/docs/providers/aws/r/dx_private_virtual_interface.html) + - [`aws_dx_public_virtual_interface` resource](/docs/providers/aws/r/dx_public_virtual_interface.html) + - [`aws_dx_transit_virtual_interface` resource](/docs/providers/aws/r/dx_transit_virtual_interface.html) + - [`aws_ebs_volume` data source](/docs/providers/aws/d/ebs_volume.html) + - [`aws_ec2_client_vpn_endpoint` resource](/docs/providers/aws/r/ec2_client_vpn_endpoint.html) + - [`aws_ec2_traffic_mirror_filter` resource](/docs/providers/aws/r/ec2_traffic_mirror_filter.html) + - [`aws_ec2_traffic_mirror_filter_rule` resource](/docs/providers/aws/r/ec2_traffic_mirror_filter_rule.html) + - [`aws_ec2_traffic_mirror_session` resource](/docs/providers/aws/r/ec2_traffic_mirror_session.html) + - [`aws_ec2_traffic_mirror_target` resource](/docs/providers/aws/r/ec2_traffic_mirror_target.html) + - [`aws_ec2_transit_gateway_route_table` data source](/docs/providers/aws/d/ec2_transit_gateway_route_table.html) + - [`aws_ec2_transit_gateway_route_table` resource](/docs/providers/aws/r/ec2_transit_gateway_route_table.html) + - [`aws_ecs_capacity_provider` resource (import)](/docs/providers/aws/r/ecs_capacity_provider.html) + - [`aws_ecs_cluster` resource (import)](/docs/providers/aws/r/ecs_cluster.html) + - [`aws_ecs_service` resource (import)](/docs/providers/aws/r/ecs_service.html) + - [`aws_customer_gateway` data source](/docs/providers/aws/d/customer_gateway.html) + - [`aws_customer_gateway` resource](/docs/providers/aws/r/customer_gateway.html) + - [`aws_efs_access_point` data source](/docs/providers/aws/d/efs_access_point.html) + - [`aws_efs_access_point` resource](/docs/providers/aws/r/efs_access_point.html) + - [`aws_efs_file_system` data source](/docs/providers/aws/d/efs_file_system.html) + - [`aws_efs_file_system` resource](/docs/providers/aws/r/efs_file_system.html) + - [`aws_efs_mount_target` data source](/docs/providers/aws/d/efs_mount_target.html) + - [`aws_efs_mount_target` resource](/docs/providers/aws/r/efs_mount_target.html) + - [`aws_elasticache_cluster` data source](/docs/providers/aws/d/elasticache_cluster.html) + - [`aws_elasticache_cluster` resource](/docs/providers/aws/r/elasticache_cluster.html) + - [`aws_elb` data source](/docs/providers/aws/d/elb.html) + - [`aws_elb` resource](/docs/providers/aws/r/elb.html) + - [`aws_flow_log` resource](/docs/providers/aws/r/flow_log.html) + - [`aws_glue_catalog_database` resource](/docs/providers/aws/r/glue_catalog_database.html) + - [`aws_glue_catalog_table` resource](/docs/providers/aws/r/glue_catalog_table.html) + - [`aws_glue_connection` resource](/docs/providers/aws/r/glue_connection.html) + - [`aws_glue_crawler` resource](/docs/providers/aws/r/glue_crawler.html) + - [`aws_glue_job` resource](/docs/providers/aws/r/glue_job.html) + - [`aws_glue_ml_transform` resource](/docs/providers/aws/r/glue_ml_transform.html) + - [`aws_glue_trigger` resource](/docs/providers/aws/r/glue_trigger.html) + - [`aws_glue_user_defined_function` resource](/docs/providers/aws/r/glue_user_defined_function.html) + - [`aws_glue_workflow` resource](/docs/providers/aws/r/glue_workflow.html) + - [`aws_guardduty_detector` resource](/docs/providers/aws/r/guardduty_detector.html) + - [`aws_guardduty_ipset` resource](/docs/providers/aws/r/guardduty_ipset.html) + - [`aws_guardduty_threatintelset` resource](/docs/providers/aws/r/guardduty_threatintelset.html) + - [`aws_instance` data source](/docs/providers/aws/d/instance.html) + - [`aws_instance` resource](/docs/providers/aws/r/instance.html) + - [`aws_key_pair` resource](/docs/providers/aws/r/key_pair.html) + - [`aws_launch_template` data source](/docs/providers/aws/d/launch_template.html) + - [`aws_launch_template` resource](/docs/providers/aws/r/launch_template.html) + - [`aws_placement_group` resource](/docs/providers/aws/r/placement_group.html) + - [`aws_redshift_cluster` resource](/docs/providers/aws/r/redshift_cluster.html) + - [`aws_redshift_event_subscription` resource](/docs/providers/aws/r/redshift_event_subscription.html) + - [`aws_redshift_parameter_group` resource](/docs/providers/aws/r/redshift_parameter_group.html) + - [`aws_redshift_snapshot_copy_grant` resource](/docs/providers/aws/r/redshift_snapshot_copy_grant.html) + - [`aws_redshift_snapshot_schedule` resource](/docs/providers/aws/r/redshift_snapshot_schedule.html) + - [`aws_redshift_subnet_group` resource](/docs/providers/aws/r/redshift_subnet_group.html) + - [`aws_s3_account_public_access_block` resource](/docs/providers/aws/r/s3_account_public_access_block.html) + - [`aws_ses_active_receipt_rule_set` resource](/docs/providers/aws/r/ses_active_receipt_rule_set.html) + - [`aws_ses_configuration_set` resource](/docs/providers/aws/r/ses_configuration_set.html) + - [`aws_ses_domain_identity_verification` resource](/docs/providers/aws/r/ses_domain_identity_verification.html) + - [`aws_ses_domain_identity` resource](/docs/providers/aws/r/ses_domain_identity.html) + - [`aws_ses_email_identity` resource](/docs/providers/aws/r/ses_email_identity.html) + - [`aws_ses_event_destination` resource](/docs/providers/aws/r/ses_event_destination.html) + - [`aws_ses_receipt_filter` resource](/docs/providers/aws/r/ses_receipt_filter.html) + - [`aws_ses_receipt_rule` resource](/docs/providers/aws/r/ses_receipt_rule.html) + - [`aws_ses_template` resource](/docs/providers/aws/r/ses_template.html) + - [`aws_ssm_document` data source](/docs/providers/aws/d/ssm_document.html) + - [`aws_ssm_document` resource](/docs/providers/aws/r/ssm_document.html) + - [`aws_ssm_parameter` data source](/docs/providers/aws/d/ssm_parameter.html) + - [`aws_ssm_parameter` resource](/docs/providers/aws/r/ssm_parameter.html) + - [`aws_synthetics_canary` resource](/docs/providers/aws/r/synthetics_canary.html) + - [`aws_vpc_endpoint_service` data source](/docs/providers/aws/d/vpc_endpoint_service.html) + - [`aws_vpc_endpoint_service` resource](/docs/providers/aws/r/vpc_endpoint_service.html) + - [`aws_vpn_connection` resource](/docs/providers/aws/r/vpn_connection.html) + - [`aws_vpn_gateway` data source](/docs/providers/aws/d/vpn_gateway.html) + - [`aws_vpn_gateway` resource](/docs/providers/aws/r/vpn_gateway.html) + - [`aws_waf_geo_match_set` resource](/docs/providers/aws/r/waf_geo_match_set.html) + - [`aws_waf_ipset` resource](/docs/providers/aws/r/waf_ipset.html) + - [`aws_waf_rate_based_rule` resource](/docs/providers/aws/r/waf_rate_based_rule.html) + - [`aws_waf_regex_match_set` resource](/docs/providers/aws/r/waf_regex_match_set.html) + - [`aws_waf_regex_pattern_set` resource](/docs/providers/aws/r/waf_regex_pattern_set.html) + - [`aws_wafregional_ipset` resource](/docs/providers/aws/r/wafregional_ipset.html) + - [`aws_wafregional_rate_based_rule` resource](/docs/providers/aws/r/wafregional_rate_based_rule.html) + - [`aws_wafregional_rule` resource](/docs/providers/aws/r/wafregional_rule.html) + - [`aws_wafregional_rule_group` resource](/docs/providers/aws/r/wafregional_rule_group.html) + - [`aws_wafregional_web_acl` resource](/docs/providers/aws/r/wafregional_web_acl.html) + - [`aws_waf_rule` resource](/docs/providers/aws/r/waf_rule.html) + - [`aws_waf_rule_group` resource](/docs/providers/aws/r/waf_rule_group.html) + - [`aws_waf_size_constraint_set` resource](/docs/providers/aws/r/waf_size_constraint_set.html) + - [`aws_waf_web_acl` resource](/docs/providers/aws/r/waf_web_acl.html) + - [`aws_waf_xss_match_set` resource](/docs/providers/aws/r/waf_xss_match_set.html) +* `sts_region` - (Optional) AWS region for STS. If unset, AWS will use the same region for STS as other non-STS operations. +* `token` - (Optional) Session token for validating temporary credentials. Typically provided after successful identity federation or Multi-Factor Authentication (MFA) login. With MFA login, this is the session token provided afterward, not the 6 digit MFA code used to get temporary credentials. Can also be set with the `AWS_SESSION_TOKEN` environment variable. +* `use_dualstack_endpoint` - (Optional) Force the provider to resolve endpoints with DualStack capability. Can also be set with the `AWS_USE_DUALSTACK_ENDPOINT` environment variable or in a shared config file (`use_dualstack_endpoint`). +* `use_fips_endpoint` - (Optional) Force the provider to resolve endpoints with FIPS capability. Can also be set with the `AWS_USE_FIPS_ENDPOINT` environment variable or in a shared config file (`use_fips_endpoint`). + +### assume_role Configuration Block + +The `assume_role` configuration block supports the following arguments: + +* `duration` - (Optional) Duration of the assume role session. You can provide a value from 15 minutes up to the maximum session duration setting for the role. Represented by a string such as `1h`, `2h45m`, or `30m15s`. +* `external_id` - (Optional) External identifier to use when assuming the role. +* `policy` - (Optional) IAM Policy JSON describing further restricting permissions for the IAM Role being assumed. +* `policy_arns` - (Optional) Set of Amazon Resource Names (ARNs) of IAM Policies describing further restricting permissions for the IAM Role being assumed. +* `role_arn` - (Required) ARN of the IAM Role to assume. +* `session_name` - (Optional) Session name to use when assuming the role. +* `source_identity` - (Optional) Source identity specified by the principal assuming the role. +* `tags` - (Optional) Map of assume role session tags. +* `transitive_tag_keys` - (Optional) Set of assume role session tag keys to pass to any subsequent sessions. + +### assume_role_with_web_identity Configuration Block + +The `assume_role_with_web_identity` configuration block supports the following arguments: + +* `duration` - (Optional) Duration of the assume role session. + You can provide a value from 15 minutes up to the maximum session duration setting for the role. + Represented by a string such as `1h`, `2h45m`, or `30m15s`. +* `policy` - (Optional) IAM Policy JSON describing further restricting permissions for the IAM Role being assumed. +* `policy_arns` - (Optional) Set of Amazon Resource Names (ARNs) of IAM Policies describing further restricting permissions for the IAM Role being assumed. +* `role_arn` - (Required) ARN of the IAM Role to assume. + Can also be set with the `AWS_ROLE_ARN` environment variable. +* `session_name` - (Optional) Session name to use when assuming the role. + Can also be set with the `AWS_ROLE_SESSION_NAME` environment variable. +* `web_identity_token` - (Optional) Value of a web identity token from an OpenID Connect (OIDC) or OAuth provider. + One of `web_identity_token` or `web_identity_token_file` is required. +* `web_identity_token_file` - (Optional) File containing a web identity token from an OpenID Connect (OIDC) or OAuth provider. + One of `web_identity_token_file` or `web_identity_token` is required. + Can also be set with the `AWS_WEB_IDENTITY_TOKEN_FILE` environment variable. + +### default_tags Configuration Block + +> **Hands-on:** Try the [Configure Default Tags for AWS Resources](https://learn.hashicorp.com/tutorials/terraform/aws-default-tags?in=terraform/aws) tutorial. + +Example: Resource with provider default tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + default_tags=[AwsProviderDefaultTags( + tags={ + "Environment": "Test", + "Name": "Provider Tag" + } + ) + ] + ) + example = Vpc(self, "example") + TerraformOutput(self, "vpc_all_tags", + value=example.tags_all + ) + TerraformOutput(self, "vpc_resource_level_tags", + value=example.tags + ) +``` + +Outputs: + +```console +$ terraform apply +... +Outputs: + +vpc_all_tags = tomap({ + "Environment" = "Test" + "Name" = "Provider Tag" +}) +``` + +Example: Resource with tags and provider default tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + default_tags=[AwsProviderDefaultTags( + tags={ + "Environment": "Test", + "Name": "Provider Tag" + } + ) + ] + ) + example = Vpc(self, "example", + tags={ + "Owner": "example" + } + ) + TerraformOutput(self, "vpc_all_tags", + value=example.tags_all + ) + TerraformOutput(self, "vpc_resource_level_tags", + value=example.tags + ) +``` + +Outputs: + +```console +$ terraform apply +... +Outputs: + +vpc_all_tags = tomap({ + "Environment" = "Test" + "Name" = "Provider Tag" + "Owner" = "example" +}) +vpc_resource_level_tags = tomap({ + "Owner" = "example" +}) +``` + +Example: Resource overriding provider default tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + default_tags=[AwsProviderDefaultTags( + tags={ + "Environment": "Test", + "Name": "Provider Tag" + } + ) + ] + ) + example = Vpc(self, "example", + tags={ + "Environment": "Production" + } + ) + TerraformOutput(self, "vpc_all_tags", + value=example.tags_all + ) + TerraformOutput(self, "vpc_resource_level_tags", + value=example.tags + ) +``` + +Outputs: + +```console +$ terraform apply +... +Outputs: + +vpc_all_tags = tomap({ + "Environment" = "Production" + "Name" = "Provider Tag" +}) +vpc_resource_level_tags = tomap({ + "Environment" = "Production" +}) +``` + +The `default_tags` configuration block supports the following argument: + +* `tags` - (Optional) Key-value map of tags to apply to all resources. + +### ignore_tags Configuration Block + +Example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + ignore_tags=[AwsProviderIgnoreTags( + keys=["TagKey1"] + ) + ] + ) +``` + +The `ignore_tags` configuration block supports the following arguments: + +* `keys` - (Optional) List of exact resource tag keys to ignore across all resources handled by this provider. This configuration prevents Terraform from returning the tag in any `tags` attributes and displaying any configuration difference for the tag value. If any resource configuration still has this tag key configured in the `tags` argument, it will display a perpetual difference until the tag is removed from the argument or [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) is also used. +* `key_prefixes` - (Optional) List of resource tag key prefixes to ignore across all resources handled by this provider. This configuration prevents Terraform from returning any tag key matching the prefixes in any `tags` attributes and displaying any configuration difference for those tag values. If any resource configuration still has a tag matching one of the prefixes configured in the `tags` argument, it will display a perpetual difference until the tag is removed from the argument or [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) is also used. + +## Getting the Account ID + +If you use either `allowed_account_ids` or `forbidden_account_ids`, +Terraform uses several approaches to get the actual account ID +in order to compare it with allowed or forbidden IDs. + +Approaches differ per authentication providers: + +* EC2 instance w/ IAM Instance Profile - [Metadata API](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html) + is always used. Introduced in Terraform `0.6.16`. +* All other providers (environment variable, shared credentials file, ...) + will try three approaches in the following order + * `iam:GetUser` - Typically useful for IAM Users. It also means + that each user needs to be privileged to call `iam:GetUser` for themselves. + * `sts:GetCallerIdentity` - _Should_ work for both IAM Users and federated IAM Roles, + introduced in Terraform `0.6.16`. + * `iam:ListRoles` - This is specifically useful for IdP-federated profiles + which cannot use `iam:GetUser`. It also means that each federated user + need to be _assuming_ an IAM role which allows `iam:ListRoles`. + Used in Terraform `0.6.16+`. + There used to be no better way to get account ID out of the API + when using the federated account until `sts:GetCallerIdentity` was introduced. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/accessanalyzer_analyzer.html.markdown b/website/docs/cdktf/python/r/accessanalyzer_analyzer.html.markdown new file mode 100644 index 00000000000..a0d1f22bafe --- /dev/null +++ b/website/docs/cdktf/python/r/accessanalyzer_analyzer.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "IAM Access Analyzer" +layout: "aws" +page_title: "AWS: aws_accessanalyzer_analyzer" +description: |- + Manages an Access Analyzer Analyzer +--- + + + +# Resource: aws_accessanalyzer_analyzer + +Manages an Access Analyzer Analyzer. More information can be found in the [Access Analyzer User Guide](https://docs.aws.amazon.com/IAM/latest/UserGuide/what-is-access-analyzer.html). + +## Example Usage + +### Account Analyzer + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.accessanalyzer_analyzer import AccessanalyzerAnalyzer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AccessanalyzerAnalyzer(self, "example", + analyzer_name="example" + ) +``` + +### Organization Analyzer + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.accessanalyzer_analyzer import AccessanalyzerAnalyzer +from imports.aws.organizations_organization import OrganizationsOrganization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = OrganizationsOrganization(self, "example", + aws_service_access_principals=["access-analyzer.amazonaws.com"] + ) + aws_accessanalyzer_analyzer_example = AccessanalyzerAnalyzer(self, "example_1", + analyzer_name="example", + depends_on=[example], + type="ORGANIZATION" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_accessanalyzer_analyzer_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `analyzer_name` - (Required) Name of the Analyzer. + +The following arguments are optional: + +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) Type of Analyzer. Valid values are `ACCOUNT` or `ORGANIZATION`. Defaults to `ACCOUNT`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Analyzer. +* `id` - Analyzer name. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Access Analyzer Analyzers using the `analyzer_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Access Analyzer Analyzers using the `analyzer_name`. For example: + +```console +% terraform import aws_accessanalyzer_analyzer.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/accessanalyzer_archive_rule.html.markdown b/website/docs/cdktf/python/r/accessanalyzer_archive_rule.html.markdown new file mode 100644 index 00000000000..4cb7ba14803 --- /dev/null +++ b/website/docs/cdktf/python/r/accessanalyzer_archive_rule.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "IAM Access Analyzer" +layout: "aws" +page_title: "AWS: aws_accessanalyzer_archive_rule" +description: |- + Terraform resource for managing an AWS AccessAnalyzer Archive Rule. +--- + + + +# Resource: aws_accessanalyzer_archive_rule + +Terraform resource for managing an AWS AccessAnalyzer Archive Rule. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.accessanalyzer_archive_rule import AccessanalyzerArchiveRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AccessanalyzerArchiveRule(self, "example", + analyzer_name="example-analyzer", + filter=[AccessanalyzerArchiveRuleFilter( + criteria="condition.aws:UserId", + eq=["userid"] + ), AccessanalyzerArchiveRuleFilter( + criteria="error", + exists=Token.as_string(True) + ), AccessanalyzerArchiveRuleFilter( + criteria="isPublic", + eq=["false"] + ) + ], + rule_name="example-rule" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `analyzer_name` - (Required) Analyzer name. +* `filter` - (Required) Filter criteria for the archive rule. See [Filter](#filter) for more details. +* `rule_name` - (Required) Rule name. + +### Filter + +**Note** One comparator must be included with each filter. + +* `criteria` - (Required) Filter criteria. +* `contains` - (Optional) Contains comparator. +* `eq` - (Optional) Equals comparator. +* `exists` - (Optional) Boolean comparator. +* `neq` - (Optional) Not Equals comparator. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Resource ID in the format: `analyzer_name/rule_name`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AccessAnalyzer ArchiveRule using the `analyzer_name/rule_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AccessAnalyzer ArchiveRule using the `analyzer_name/rule_name`. For example: + +```console +% terraform import aws_accessanalyzer_archive_rule.example example-analyzer/example-rule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/account_alternate_contact.html.markdown b/website/docs/cdktf/python/r/account_alternate_contact.html.markdown new file mode 100644 index 00000000000..c955e5b9d8c --- /dev/null +++ b/website/docs/cdktf/python/r/account_alternate_contact.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Account Management" +layout: "aws" +page_title: "AWS: aws_account_alternate_contact" +description: |- + Manages the specified alternate contact attached to an AWS Account. +--- + + + +# Resource: aws_account_alternate_contact + +Manages the specified alternate contact attached to an AWS Account. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.account_alternate_contact import AccountAlternateContact +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AccountAlternateContact(self, "operations", + alternate_contact_type="OPERATIONS", + email_address="test@example.com", + name="Example", + phone_number="+1234567890", + title="Example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Optional) ID of the target account when managing member accounts. Will manage current user's account by default if omitted. +* `alternate_contact_type` - (Required) Type of the alternate contact. Allowed values are: `BILLING`, `OPERATIONS`, `SECURITY`. +* `email_address` - (Required) An email address for the alternate contact. +* `name` - (Required) Name of the alternate contact. +* `phone_number` - (Required) Phone number for the alternate contact. +* `title` - (Required) Title for the alternate contact. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `update` - (Default `5m`) +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the Alternate Contact for the current or another account using the `alternate_contact_type`. For example: + +Import the Alternate Contact for the current account: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import the Alternate Contact for another account using the `account_id` and `alternate_contact_type` separated by a forward slash (`/`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** the Alternate Contact for the current or another account using the `alternate_contact_type`. For example: + +Import the Alternate Contact for the current account: + +```console +% terraform import aws_account_alternate_contact.operations OPERATIONS +``` + +Import the Alternate Contact for another account using the `account_id` and `alternate_contact_type` separated by a forward slash (`/`): + +```console +% terraform import aws_account_alternate_contact.operations 1234567890/OPERATIONS +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/account_primary_contact.html.markdown b/website/docs/cdktf/python/r/account_primary_contact.html.markdown new file mode 100644 index 00000000000..9006bfbf22b --- /dev/null +++ b/website/docs/cdktf/python/r/account_primary_contact.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Account Management" +layout: "aws" +page_title: "AWS: aws_account_primary_contact" +description: |- + Manages the specified primary contact information associated with an AWS Account. +--- + + + +# Resource: aws_account_primary_contact + +Manages the specified primary contact information associated with an AWS Account. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.account_primary_contact import AccountPrimaryContact +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AccountPrimaryContact(self, "test", + address_line1="123 Any Street", + city="Seattle", + company_name="Example Corp, Inc.", + country_code="US", + district_or_county="King", + full_name="My Name", + phone_number="+64211111111", + postal_code="98101", + state_or_region="WA", + website_url="https://www.examplecorp.com" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Optional) The ID of the target account when managing member accounts. Will manage current user's account by default if omitted. +* `address_line_1` - (Required) The first line of the primary contact address. +* `address_line_2` - (Optional) The second line of the primary contact address, if any. +* `address_line_3` - (Optional) The third line of the primary contact address, if any. +* `city` - (Required) The city of the primary contact address. +* `company_name` - (Optional) The name of the company associated with the primary contact information, if any. +* `country_code` - (Required) The ISO-3166 two-letter country code for the primary contact address. +* `district_or_county` - (Optional) The district or county of the primary contact address, if any. +* `full_name` - (Required) The full name of the primary contact address. +* `phone_number` - (Required) The phone number of the primary contact information. The number will be validated and, in some countries, checked for activation. +* `postal_code` - (Required) The postal code of the primary contact address. +* `state_or_region` - (Optional) The state or region of the primary contact address. This field is required in selected countries. +* `website_url` - (Optional) The URL of the website associated with the primary contact information, if any. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the Primary Contact using the `account_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the Primary Contact using the `account_id`. For example: + +```console +% terraform import aws_account_primary_contact.test 1234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/acm_certificate.html.markdown b/website/docs/cdktf/python/r/acm_certificate.html.markdown new file mode 100644 index 00000000000..ac4ace2fc8b --- /dev/null +++ b/website/docs/cdktf/python/r/acm_certificate.html.markdown @@ -0,0 +1,280 @@ +--- +subcategory: "ACM (Certificate Manager)" +layout: "aws" +page_title: "AWS: aws_acm_certificate" +description: |- + Requests and manages a certificate from Amazon Certificate Manager (ACM). +--- + + + +# Resource: aws_acm_certificate + +The ACM certificate resource allows requesting and management of certificates +from the Amazon Certificate Manager. + +ACM certificates can be created in three ways: +Amazon-issued, where AWS provides the certificate authority and automatically manages renewal; +imported certificates, issued by another certificate authority; +and private certificates, issued using an ACM Private Certificate Authority. + +## Amazon-Issued Certificates + +For Amazon-issued certificates, this resource deals with requesting certificates and managing their attributes and life-cycle. +This resource does not deal with validation of a certificate but can provide inputs +for other resources implementing the validation. +It does not wait for a certificate to be issued. +Use a [`aws_acm_certificate_validation`](acm_certificate_validation.html) resource for this. + +Most commonly, this resource is used together with [`aws_route53_record`](route53_record.html) and +[`aws_acm_certificate_validation`](acm_certificate_validation.html) to request a DNS validated certificate, +deploy the required validation records and wait for validation to complete. + +Domain validation through email is also supported but should be avoided as it requires a manual step outside of Terraform. + +It's recommended to specify `create_before_destroy = true` in a [lifecycle][1] block to replace a certificate +which is currently in use (eg, by [`aws_lb_listener`](lb_listener.html)). + +## Certificates Imported from Other Certificate Authority + +Imported certificates can be used to make certificates created with an external certificate authority available for AWS services. + +As they are not managed by AWS, imported certificates are not eligible for automatic renewal. +New certificate materials can be supplied to an existing imported certificate to update it in place. + +## Private Certificates + +Private certificates are issued by an ACM Private Cerificate Authority, which can be created using the resource type [`aws_acmpca_certificate_authority`](acmpca_certificate_authority.html). + +Private certificates created using this resource are eligible for managed renewal if they have been exported or associated with another AWS service. +See [managed renewal documentation](https://docs.aws.amazon.com/acm/latest/userguide/managed-renewal.html) for more information. +By default, a certificate is valid for 395 days and the managed renewal process will start 60 days before expiration. +To renew the certificate earlier than 60 days before expiration, configure `early_renewal_duration`. + +## Example Usage + +### Create Certificate + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acm_certificate import AcmCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AcmCertificate(self, "cert", + domain_name="example.com", + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + tags={ + "Environment": "test" + }, + validation_method="DNS" + ) +``` + +### Custom Domain Validation Options + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acm_certificate import AcmCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AcmCertificate(self, "cert", + domain_name="testing.example.com", + validation_method="EMAIL", + validation_option=[AcmCertificateValidationOption( + domain_name="testing.example.com", + validation_domain="example.com" + ) + ] + ) +``` + +### Existing Certificate Body Import + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acm_certificate import AcmCertificate +from imports.tls.private_key import PrivateKey +from imports.tls.self_signed_cert import SelfSignedCert +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # The following providers are missing schema information and might need manual adjustments to synthesize correctly: tls. + # For a more precise conversion please use the --provider flag in convert. + example = PrivateKey(self, "example", + algorithm="RSA" + ) + tls_self_signed_cert_example = SelfSignedCert(self, "example_1", + allowed_uses=["key_encipherment", "digital_signature", "server_auth"], + key_algorithm="RSA", + private_key_pem=example.private_key_pem, + subject=[{ + "common_name": "example.com", + "organization": "ACME Examples, Inc" + } + ], + validity_period_hours=12 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + tls_self_signed_cert_example.override_logical_id("example") + AcmCertificate(self, "cert", + certificate_body=Token.as_string(tls_self_signed_cert_example.cert_pem), + private_key=Token.as_string(example.private_key_pem) + ) +``` + +### Referencing domain_validation_options With for_each Based Resources + +See the [`aws_acm_certificate_validation` resource](acm_certificate_validation.html) for a full example of performing DNS validation. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformIterator, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_for_each_iterator = TerraformIterator.from_list( + Token.as_any("${{ for dvo in ${" + aws_acm_certificate_example.domain_validation_options + "} : dvo.domain_name => {\n name = dvo.resource_record_name\n record = dvo.resource_record_value\n type = dvo.resource_record_type\n }}}")) + Route53Record(self, "example", + allow_overwrite=True, + name=Token.as_string( + property_access(example_for_each_iterator.value, ["name"])), + records=[ + Token.as_string( + property_access(example_for_each_iterator.value, ["record"])) + ], + ttl=60, + type=Token.as_string( + property_access(example_for_each_iterator.value, ["type"])), + zone_id=Token.as_string(aws_route53_zone_example.zone_id), + for_each=example_for_each_iterator + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* Creating an Amazon issued certificate + * `domain_name` - (Required) Domain name for which the certificate should be issued + * `subject_alternative_names` - (Optional) Set of domains that should be SANs in the issued certificate. To remove all elements of a previously configured list, set this value equal to an empty list (`[]`) or use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html) to trigger recreation. + * `validation_method` - (Optional) Which method to use for validation. `DNS` or `EMAIL` are valid. This parameter must not be set for certificates that were imported into ACM and then into Terraform. + * `key_algorithm` - (Optional) Specifies the algorithm of the public and private key pair that your Amazon issued certificate uses to encrypt data. See [ACM Certificate characteristics](https://docs.aws.amazon.com/acm/latest/userguide/acm-certificate.html#algorithms) for more details. + * `options` - (Optional) Configuration block used to set certificate options. Detailed below. + * `validation_option` - (Optional) Configuration block used to specify information about the initial validation of each domain name. Detailed below. +* Importing an existing certificate + * `private_key` - (Required) Certificate's PEM-formatted private key + * `certificate_body` - (Required) Certificate's PEM-formatted public key + * `certificate_chain` - (Optional) Certificate's PEM-formatted chain +* Creating a private CA issued certificate + * `certificate_authority_arn` - (Required) ARN of an ACM PCA + * `domain_name` - (Required) Domain name for which the certificate should be issued. + * `early_renewal_duration` - (Optional) Amount of time to start automatic renewal process before expiration. + Has no effect if less than 60 days. + Represented by either + a subset of [RFC 3339 duration](https://www.rfc-editor.org/rfc/rfc3339) supporting years, months, and days (e.g., `P90D`), + or a string such as `2160h`. +* `subject_alternative_names` - (Optional) Set of domains that should be SANs in the issued certificate. + To remove all elements of a previously configured list, set this value equal to an empty list (`[]`) + or use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html) to trigger recreation. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## options Configuration Block + +Supported nested arguments for the `options` configuration block: + +* `certificate_transparency_logging_preference` - (Optional) Whether certificate details should be added to a certificate transparency log. Valid values are `ENABLED` or `DISABLED`. See https://docs.aws.amazon.com/acm/latest/userguide/acm-concepts.html#concept-transparency for more details. + +## validation_option Configuration Block + +Supported nested arguments for the `validation_option` configuration block: + +* `domain_name` - (Required) Fully qualified domain name (FQDN) in the certificate. +* `validation_domain` - (Required) Domain name that you want ACM to use to send you validation emails. This domain name is the suffix of the email addresses that you want ACM to use. This must be the same as the `domain_name` value or a superdomain of the `domain_name` value. For example, if you request a certificate for `"testing.example.com"`, you can specify `"example.com"` for this value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN of the certificate +* `arn` - ARN of the certificate +* `domain_name` - Domain name for which the certificate is issued +* `domain_validation_options` - Set of domain validation objects which can be used to complete certificate validation. + Can have more than one element, e.g., if SANs are defined. + Only set if `DNS`-validation was used. +* `not_after` - Expiration date and time of the certificate. +* `not_before` - Start of the validity period of the certificate. +* `pending_renewal` - `true` if a Private certificate eligible for managed renewal is within the `early_renewal_duration` period. +* `renewal_eligibility` - Whether the certificate is eligible for managed renewal. +* `renewal_summary` - Contains information about the status of ACM's [managed renewal](https://docs.aws.amazon.com/acm/latest/userguide/acm-renewal.html) for the certificate. +* `status` - Status of the certificate. +* `type` - Source of the certificate. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `validation_emails` - List of addresses that received a validation email. Only set if `EMAIL` validation was used. + +Domain validation objects export the following attributes: + +* `domain_name` - Domain to be validated +* `resource_record_name` - The name of the DNS record to create to validate the certificate +* `resource_record_type` - The type of DNS record to create +* `resource_record_value` - The value the DNS record needs to have + +Renewal summary objects export the following attributes: + +* `renewal_status` - The status of ACM's managed renewal of the certificate +* `renewal_status_reason` - The reason that a renewal request was unsuccessful or is pending + +[1]: https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import certificates using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import certificates using their ARN. For example: + +```console +% terraform import aws_acm_certificate.cert arn:aws:acm:eu-central-1:123456789012:certificate/7e7a28d2-163f-4b8f-b9cd-822f96c08d6a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/acm_certificate_validation.html.markdown b/website/docs/cdktf/python/r/acm_certificate_validation.html.markdown new file mode 100644 index 00000000000..95d99a618a1 --- /dev/null +++ b/website/docs/cdktf/python/r/acm_certificate_validation.html.markdown @@ -0,0 +1,205 @@ +--- +subcategory: "ACM (Certificate Manager)" +layout: "aws" +page_title: "AWS: aws_acm_certificate_validation" +description: |- + Waits for and checks successful validation of an ACM certificate. +--- + + + +# Resource: aws_acm_certificate_validation + +This resource represents a successful validation of an ACM certificate in concert +with other resources. + +Most commonly, this resource is used together with [`aws_route53_record`](route53_record.html) and +[`aws_acm_certificate`](acm_certificate.html) to request a DNS validated certificate, +deploy the required validation records and wait for validation to complete. + +~> **WARNING:** This resource implements a part of the validation workflow. It does not represent a real-world entity in AWS, therefore changing or deleting this resource on its own has no immediate effect. + +## Example Usage + +### DNS Validation with Route 53 + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformIterator, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acm_certificate import AcmCertificate +from imports.aws.acm_certificate_validation import AcmCertificateValidation +from imports.aws.data_aws_route53_zone import DataAwsRoute53Zone +from imports.aws.lb_listener import LbListener +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, defaultAction, loadBalancerArn): + super().__init__(scope, name) + example = AcmCertificate(self, "example", + domain_name="example.com", + validation_method="DNS" + ) + data_aws_route53_zone_example = DataAwsRoute53Zone(self, "example_1", + name="example.com", + private_zone=False + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_route53_zone_example.override_logical_id("example") + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_for_each_iterator = TerraformIterator.from_list( + Token.as_any("${{ for dvo in ${" + example.domain_validation_options + "} : dvo.domain_name => {\n name = dvo.resource_record_name\n record = dvo.resource_record_value\n type = dvo.resource_record_type\n }}}")) + aws_route53_record_example = Route53Record(self, "example_2", + allow_overwrite=True, + name=Token.as_string( + property_access(example_for_each_iterator.value, ["name"])), + records=[ + Token.as_string( + property_access(example_for_each_iterator.value, ["record"])) + ], + ttl=60, + type=Token.as_string( + property_access(example_for_each_iterator.value, ["type"])), + zone_id=Token.as_string(data_aws_route53_zone_example.zone_id), + for_each=example_for_each_iterator + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_record_example.override_logical_id("example") + aws_acm_certificate_validation_example = AcmCertificateValidation(self, "example_3", + certificate_arn=example.arn, + validation_record_fqdns=Token.as_list("${[ for record in ${" + aws_route53_record_example.fqn + "} : record.fqdn]}") + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acm_certificate_validation_example.override_logical_id("example") + aws_lb_listener_example = LbListener(self, "example_4", + certificate_arn=Token.as_string(aws_acm_certificate_validation_example.certificate_arn), + default_action=default_action, + load_balancer_arn=load_balancer_arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_listener_example.override_logical_id("example") +``` + +### Alternative Domains DNS Validation with Route 53 + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformIterator, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acm_certificate import AcmCertificate +from imports.aws.acm_certificate_validation import AcmCertificateValidation +from imports.aws.data_aws_route53_zone import DataAwsRoute53Zone +from imports.aws.lb_listener import LbListener +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, defaultAction, loadBalancerArn): + super().__init__(scope, name) + example = AcmCertificate(self, "example", + domain_name="example.com", + subject_alternative_names=["www.example.com", "example.org"], + validation_method="DNS" + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_for_each_iterator = TerraformIterator.from_list( + Token.as_any("${{ for dvo in ${" + example.domain_validation_options + "} : dvo.domain_name => {\n name = dvo.resource_record_name\n record = dvo.resource_record_value\n type = dvo.resource_record_type\n zone_id = dvo.domain_name == \"example.org\" ? data.aws_route53_zone.example_org.zone_id : data.aws_route53_zone.example_com.zone_id\n }}}")) + aws_route53_record_example = Route53Record(self, "example_1", + allow_overwrite=True, + name=Token.as_string( + property_access(example_for_each_iterator.value, ["name"])), + records=[ + Token.as_string( + property_access(example_for_each_iterator.value, ["record"])) + ], + ttl=60, + type=Token.as_string( + property_access(example_for_each_iterator.value, ["type"])), + zone_id=Token.as_string( + property_access(example_for_each_iterator.value, ["zone_id"])), + for_each=example_for_each_iterator + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_record_example.override_logical_id("example") + DataAwsRoute53Zone(self, "example_com", + name="example.com", + private_zone=False + ) + DataAwsRoute53Zone(self, "example_org", + name="example.org", + private_zone=False + ) + aws_acm_certificate_validation_example = AcmCertificateValidation(self, "example_4", + certificate_arn=example.arn, + validation_record_fqdns=Token.as_list("${[ for record in ${" + aws_route53_record_example.fqn + "} : record.fqdn]}") + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acm_certificate_validation_example.override_logical_id("example") + aws_lb_listener_example = LbListener(self, "example_5", + certificate_arn=Token.as_string(aws_acm_certificate_validation_example.certificate_arn), + default_action=default_action, + load_balancer_arn=load_balancer_arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_listener_example.override_logical_id("example") +``` + +### Email Validation + +In this situation, the resource is simply a waiter for manual email approval of ACM certificates. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acm_certificate import AcmCertificate +from imports.aws.acm_certificate_validation import AcmCertificateValidation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AcmCertificate(self, "example", + domain_name="example.com", + validation_method="EMAIL" + ) + aws_acm_certificate_validation_example = AcmCertificateValidation(self, "example_1", + certificate_arn=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acm_certificate_validation_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate_arn` - (Required) ARN of the certificate that is being validated. +* `validation_record_fqdns` - (Optional) List of FQDNs that implement the validation. Only valid for DNS validation method ACM certificates. If this is set, the resource can implement additional sanity checks and has an explicit dependency on the resource that is implementing the validation + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Time at which the certificate was issued + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `75m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/acmpca_certificate.html.markdown b/website/docs/cdktf/python/r/acmpca_certificate.html.markdown new file mode 100644 index 00000000000..6e956333b99 --- /dev/null +++ b/website/docs/cdktf/python/r/acmpca_certificate.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_certificate" +description: |- + Provides a resource to issue a certificate using AWS Certificate Manager Private Certificate Authority (ACM PCA) +--- + + + +# Resource: aws_acmpca_certificate + +Provides a resource to issue a certificate using AWS Certificate Manager Private Certificate Authority (ACM PCA). + +Certificates created using `aws_acmpca_certificate` are not eligible for automatic renewal, +and must be replaced instead. +To issue a renewable certificate using an ACM PCA, create a [`aws_acm_certificate`](acm_certificate.html) +with the parameter `certificate_authority_arn`. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acmpca_certificate import AcmpcaCertificate +from imports.aws.acmpca_certificate_authority import AcmpcaCertificateAuthority +from imports.tls.cert_request import CertRequest +from imports.tls.private_key import PrivateKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, certificateAuthorityConfiguration): + super().__init__(scope, name) + # The following providers are missing schema information and might need manual adjustments to synthesize correctly: tls. + # For a more precise conversion please use the --provider flag in convert. + example = AcmpcaCertificateAuthority(self, "example", + permanent_deletion_time_in_days=7, + private_certificate_configuration=[{ + "key_algorithm": "RSA_4096", + "signing_algorithm": "SHA512WITHRSA", + "subject": [{ + "common_name": "example.com" + } + ] + } + ], + certificate_authority_configuration=certificate_authority_configuration + ) + key = PrivateKey(self, "key", + algorithm="RSA" + ) + csr = CertRequest(self, "csr", + key_algorithm="RSA", + private_key_pem=key.private_key_pem, + subject=[{ + "common_name": "example" + } + ] + ) + aws_acmpca_certificate_example = AcmpcaCertificate(self, "example_3", + certificate_authority_arn=example.arn, + certificate_signing_request=Token.as_string(csr.cert_request_pem), + signing_algorithm="SHA256WITHRSA", + validity=AcmpcaCertificateValidity( + type="YEARS", + value=Token.as_string(1) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_certificate_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate_authority_arn` - (Required) ARN of the certificate authority. +* `certificate_signing_request` - (Required) Certificate Signing Request in PEM format. +* `signing_algorithm` - (Required) Algorithm to use to sign certificate requests. Valid values: `SHA256WITHRSA`, `SHA256WITHECDSA`, `SHA384WITHRSA`, `SHA384WITHECDSA`, `SHA512WITHRSA`, `SHA512WITHECDSA`. +* `validity` - (Required) Configures end of the validity period for the certificate. See [validity block](#validity-block) below. +* `template_arn` - (Optional) Template to use when issuing a certificate. + See [ACM PCA Documentation](https://docs.aws.amazon.com/privateca/latest/userguide/UsingTemplates.html) for more information. +* `api_passthrough` - (Optional) Specifies X.509 certificate information to be included in the issued certificate. To use with API Passthrough templates + +### validity block + +* `type` - (Required) Determines how `value` is interpreted. Valid values: `DAYS`, `MONTHS`, `YEARS`, `ABSOLUTE`, `END_DATE`. +* `value` - (Required) If `type` is `DAYS`, `MONTHS`, or `YEARS`, the relative time until the certificate expires. If `type` is `ABSOLUTE`, the date in seconds since the Unix epoch. If `type` is `END_DATE`, the date in RFC 3339 format. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the certificate. +* `certificate` - PEM-encoded certificate value. +* `certificate_chain` - PEM-encoded certificate chain that includes any intermediate certificates and chains up to root CA. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ACM PCA Certificates using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ACM PCA Certificates using their ARN. For example: + +```console +% terraform import aws_acmpca_certificate.cert arn:aws:acm-pca:eu-west-1:675225743824:certificate-authority/08319ede-83g9-1400-8f21-c7d12b2b6edb/certificate/a4e9c2aa4bcfab625g1b9136464cd3a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/acmpca_certificate_authority.html.markdown b/website/docs/cdktf/python/r/acmpca_certificate_authority.html.markdown new file mode 100644 index 00000000000..61c8bd4b30e --- /dev/null +++ b/website/docs/cdktf/python/r/acmpca_certificate_authority.html.markdown @@ -0,0 +1,227 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_certificate_authority" +description: |- + Provides a resource to manage AWS Certificate Manager Private Certificate Authorities +--- + + + +# Resource: aws_acmpca_certificate_authority + +Provides a resource to manage AWS Certificate Manager Private Certificate Authorities (ACM PCA Certificate Authorities). + +~> **NOTE:** Creating this resource will leave the certificate authority in a `PENDING_CERTIFICATE` status, which means it cannot yet issue certificates. To complete this setup, you must fully sign the certificate authority CSR available in the `certificate_signing_request` attribute. The [`aws_acmpca_certificate_authority_certificate`](/docs/providers/aws/r/acmpca_certificate_authority_certificate.html) resource can be used for this purpose. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acmpca_certificate_authority import AcmpcaCertificateAuthority +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AcmpcaCertificateAuthority(self, "example", + certificate_authority_configuration=AcmpcaCertificateAuthorityCertificateAuthorityConfiguration( + key_algorithm="RSA_4096", + signing_algorithm="SHA512WITHRSA", + subject=AcmpcaCertificateAuthorityCertificateAuthorityConfigurationSubject( + common_name="example.com" + ) + ), + permanent_deletion_time_in_days=7 + ) +``` + +### Short-lived certificate + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acmpca_certificate_authority import AcmpcaCertificateAuthority +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AcmpcaCertificateAuthority(self, "example", + certificate_authority_configuration=AcmpcaCertificateAuthorityCertificateAuthorityConfiguration( + key_algorithm="RSA_4096", + signing_algorithm="SHA512WITHRSA", + subject=AcmpcaCertificateAuthorityCertificateAuthorityConfigurationSubject( + common_name="example.com" + ) + ), + usage_mode="SHORT_LIVED_CERTIFICATE" + ) +``` + +### Enable Certificate Revocation List + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acmpca_certificate_authority import AcmpcaCertificateAuthority +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_policy import S3BucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + acmpca_bucket_access = DataAwsIamPolicyDocument(self, "acmpca_bucket_access", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:GetBucketAcl", "s3:GetBucketLocation", "s3:PutObject", "s3:PutObjectAcl" + ], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["acm-pca.amazonaws.com"], + type="Service" + ) + ], + resources=[example.arn, "${" + example.arn + "}/*"] + ) + ] + ) + aws_s3_bucket_policy_example = S3BucketPolicy(self, "example_2", + bucket=example.id, + policy=Token.as_string(acmpca_bucket_access.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_policy_example.override_logical_id("example") + aws_acmpca_certificate_authority_example = AcmpcaCertificateAuthority(self, "example_3", + certificate_authority_configuration=AcmpcaCertificateAuthorityCertificateAuthorityConfiguration( + key_algorithm="RSA_4096", + signing_algorithm="SHA512WITHRSA", + subject=AcmpcaCertificateAuthorityCertificateAuthorityConfigurationSubject( + common_name="example.com" + ) + ), + depends_on=[aws_s3_bucket_policy_example], + revocation_configuration=AcmpcaCertificateAuthorityRevocationConfiguration( + crl_configuration=AcmpcaCertificateAuthorityRevocationConfigurationCrlConfiguration( + custom_cname="crl.example.com", + enabled=True, + expiration_in_days=7, + s3_bucket_name=example.id + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_certificate_authority_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate_authority_configuration` - (Required) Nested argument containing algorithms and certificate subject information. Defined below. +* `enabled` - (Optional) Whether the certificate authority is enabled or disabled. Defaults to `true`. Can only be disabled if the CA is in an `ACTIVE` state. +* `revocation_configuration` - (Optional) Nested argument containing revocation configuration. Defined below. +* `usage_mode` - (Optional) Specifies whether the CA issues general-purpose certificates that typically require a revocation mechanism, or short-lived certificates that may optionally omit revocation because they expire quickly. Short-lived certificate validity is limited to seven days. Defaults to `GENERAL_PURPOSE`. Valid values: `GENERAL_PURPOSE` and `SHORT_LIVED_CERTIFICATE`. +* `tags` - (Optional) Key-value map of user-defined tags that are attached to the certificate authority. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) Type of the certificate authority. Defaults to `SUBORDINATE`. Valid values: `ROOT` and `SUBORDINATE`. +* `key_storage_security_standard` - (Optional) Cryptographic key management compliance standard used for handling CA keys. Defaults to `FIPS_140_2_LEVEL_3_OR_HIGHER`. Valid values: `FIPS_140_2_LEVEL_3_OR_HIGHER` and `FIPS_140_2_LEVEL_2_OR_HIGHER`. Supported standard for each region can be found in the [Storage and security compliance of AWS Private CA private keys Documentation](https://docs.aws.amazon.com/privateca/latest/userguide/data-protection.html#private-keys). +* `permanent_deletion_time_in_days` - (Optional) Number of days to make a CA restorable after it has been deleted, must be between 7 to 30 days, with default to 30 days. + +### certificate_authority_configuration + +* `key_algorithm` - (Required) Type of the public key algorithm and size, in bits, of the key pair that your key pair creates when it issues a certificate. Valid values can be found in the [ACM PCA Documentation](https://docs.aws.amazon.com/privateca/latest/APIReference/API_CertificateAuthorityConfiguration.html). +* `signing_algorithm` - (Required) Name of the algorithm your private CA uses to sign certificate requests. Valid values can be found in the [ACM PCA Documentation](https://docs.aws.amazon.com/privateca/latest/APIReference/API_CertificateAuthorityConfiguration.html). +* `subject` - (Required) Nested argument that contains X.500 distinguished name information. At least one nested attribute must be specified. + +#### subject + +Contains information about the certificate subject. Identifies the entity that owns or controls the public key in the certificate. The entity can be a user, computer, device, or service. + +* `common_name` - (Optional) Fully qualified domain name (FQDN) associated with the certificate subject. Must be less than or equal to 64 characters in length. +* `country` - (Optional) Two digit code that specifies the country in which the certificate subject located. Must be less than or equal to 2 characters in length. +* `distinguished_name_qualifier` - (Optional) Disambiguating information for the certificate subject. Must be less than or equal to 64 characters in length. +* `generation_qualifier` - (Optional) Typically a qualifier appended to the name of an individual. Examples include Jr. for junior, Sr. for senior, and III for third. Must be less than or equal to 3 characters in length. +* `given_name` - (Optional) First name. Must be less than or equal to 16 characters in length. +* `initials` - (Optional) Concatenation that typically contains the first letter of the `given_name`, the first letter of the middle name if one exists, and the first letter of the `surname`. Must be less than or equal to 5 characters in length. +* `locality` - (Optional) Locality (such as a city or town) in which the certificate subject is located. Must be less than or equal to 128 characters in length. +* `organization` - (Optional) Legal name of the organization with which the certificate subject is affiliated. Must be less than or equal to 64 characters in length. +* `organizational_unit` - (Optional) Subdivision or unit of the organization (such as sales or finance) with which the certificate subject is affiliated. Must be less than or equal to 64 characters in length. +* `pseudonym` - (Optional) Typically a shortened version of a longer `given_name`. For example, Jonathan is often shortened to John. Elizabeth is often shortened to Beth, Liz, or Eliza. Must be less than or equal to 128 characters in length. +* `state` - (Optional) State in which the subject of the certificate is located. Must be less than or equal to 128 characters in length. +* `surname` - (Optional) Family name. In the US and the UK for example, the surname of an individual is ordered last. In Asian cultures the surname is typically ordered first. Must be less than or equal to 40 characters in length. +* `title` - (Optional) Title such as Mr. or Ms. which is pre-pended to the name to refer formally to the certificate subject. Must be less than or equal to 64 characters in length. + +### revocation_configuration + +* `crl_configuration` - (Optional) Nested argument containing configuration of the certificate revocation list (CRL), if any, maintained by the certificate authority. Defined below. +* `ocsp_configuration` - (Optional) Nested argument containing configuration of +the custom OCSP responder endpoint. Defined below. + +#### crl_configuration + +* `custom_cname` - (Optional) Name inserted into the certificate CRL Distribution Points extension that enables the use of an alias for the CRL distribution point. Use this value if you don't want the name of your S3 bucket to be public. Must be less than or equal to 253 characters in length. +* `enabled` - (Optional) Boolean value that specifies whether certificate revocation lists (CRLs) are enabled. Defaults to `false`. +* `expiration_in_days` - (Optional, Required if `enabled` is `true`) Number of days until a certificate expires. Must be between 1 and 5000. +* `s3_bucket_name` - (Optional, Required if `enabled` is `true`) Name of the S3 bucket that contains the CRL. If you do not provide a value for the `custom_cname` argument, the name of your S3 bucket is placed into the CRL Distribution Points extension of the issued certificate. You must specify a bucket policy that allows ACM PCA to write the CRL to your bucket. Must be between 3 and 255 characters in length. +* `s3_object_acl` - (Optional) Determines whether the CRL will be publicly readable or privately held in the CRL Amazon S3 bucket. Defaults to `PUBLIC_READ`. + +#### ocsp_configuration + +* `enabled` - (Required) Boolean value that specifies whether a custom OCSP responder is enabled. +* `ocsp_custom_cname` - (Optional) CNAME specifying a customized OCSP domain. Note: The value of the CNAME must not include a protocol prefix such as "http://" or "https://". + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN of the certificate authority. +* `arn` - ARN of the certificate authority. +* `certificate` - Base64-encoded certificate authority (CA) certificate. Only available after the certificate authority certificate has been imported. +* `certificate_chain` - Base64-encoded certificate chain that includes any intermediate certificates and chains up to root on-premises certificate that you used to sign your private CA certificate. The chain does not include your private CA certificate. Only available after the certificate authority certificate has been imported. +* `certificate_signing_request` - The base64 PEM-encoded certificate signing request (CSR) for your private CA certificate. +* `not_after` - Date and time after which the certificate authority is not valid. Only available after the certificate authority certificate has been imported. +* `not_before` - Date and time before which the certificate authority is not valid. Only available after the certificate authority certificate has been imported. +* `serial` - Serial number of the certificate authority. Only available after the certificate authority certificate has been imported. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_acmpca_certificate_authority` using the certificate authority ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_acmpca_certificate_authority` using the certificate authority ARN. For example: + +```console +% terraform import aws_acmpca_certificate_authority.example arn:aws:acm-pca:us-east-1:123456789012:certificate-authority/12345678-1234-1234-1234-123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/acmpca_certificate_authority_certificate.html.markdown b/website/docs/cdktf/python/r/acmpca_certificate_authority_certificate.html.markdown new file mode 100644 index 00000000000..3cdc594603f --- /dev/null +++ b/website/docs/cdktf/python/r/acmpca_certificate_authority_certificate.html.markdown @@ -0,0 +1,149 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_certificate_authority_certificate" +description: |- + Associates a certificate with an AWS Certificate Manager Private Certificate Authority +--- + + + +# Resource: aws_acmpca_certificate_authority_certificate + +Associates a certificate with an AWS Certificate Manager Private Certificate Authority (ACM PCA Certificate Authority). An ACM PCA Certificate Authority is unable to issue certificates until it has a certificate associated with it. A root level ACM PCA Certificate Authority is able to self-sign its own root certificate. + +## Example Usage + +### Self-Signed Root Certificate Authority Certificate + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acmpca_certificate import AcmpcaCertificate +from imports.aws.acmpca_certificate_authority import AcmpcaCertificateAuthority +from imports.aws.acmpca_certificate_authority_certificate import AcmpcaCertificateAuthorityCertificate +from imports.aws.data_aws_partition import DataAwsPartition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AcmpcaCertificateAuthority(self, "example", + certificate_authority_configuration=AcmpcaCertificateAuthorityCertificateAuthorityConfiguration( + key_algorithm="RSA_4096", + signing_algorithm="SHA512WITHRSA", + subject=AcmpcaCertificateAuthorityCertificateAuthorityConfigurationSubject( + common_name="example.com" + ) + ), + type="ROOT" + ) + current = DataAwsPartition(self, "current") + aws_acmpca_certificate_example = AcmpcaCertificate(self, "example_2", + certificate_authority_arn=example.arn, + certificate_signing_request=example.certificate_signing_request, + signing_algorithm="SHA512WITHRSA", + template_arn="arn:${" + current.partition + "}:acm-pca:::template/RootCACertificate/V1", + validity=AcmpcaCertificateValidity( + type="YEARS", + value=Token.as_string(1) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_certificate_example.override_logical_id("example") + aws_acmpca_certificate_authority_certificate_example = + AcmpcaCertificateAuthorityCertificate(self, "example_3", + certificate=Token.as_string(aws_acmpca_certificate_example.certificate), + certificate_authority_arn=example.arn, + certificate_chain=Token.as_string(aws_acmpca_certificate_example.certificate_chain) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_certificate_authority_certificate_example.override_logical_id("example") +``` + +### Certificate for Subordinate Certificate Authority + +Note that the certificate for the subordinate certificate authority must be issued by the root certificate authority using a signing request from the subordinate certificate authority. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acmpca_certificate import AcmpcaCertificate +from imports.aws.acmpca_certificate_authority import AcmpcaCertificateAuthority +from imports.aws.acmpca_certificate_authority_certificate import AcmpcaCertificateAuthorityCertificate +from imports.aws.data_aws_partition import DataAwsPartition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, certificateAuthorityArn, certificateSigningRequest, signingAlgorithm, validity, certificateAuthorityConfiguration, certificate, certificateAuthorityArn1): + super().__init__(scope, name) + AcmpcaCertificate(self, "root", + certificate_authority_arn=certificate_authority_arn, + certificate_signing_request=certificate_signing_request, + signing_algorithm=signing_algorithm, + validity=validity + ) + aws_acmpca_certificate_authority_root = AcmpcaCertificateAuthority(self, "root_1", + certificate_authority_configuration=certificate_authority_configuration + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_certificate_authority_root.override_logical_id("root") + subordinate = AcmpcaCertificateAuthority(self, "subordinate", + certificate_authority_configuration=AcmpcaCertificateAuthorityCertificateAuthorityConfiguration( + key_algorithm="RSA_2048", + signing_algorithm="SHA512WITHRSA", + subject=AcmpcaCertificateAuthorityCertificateAuthorityConfigurationSubject( + common_name="sub.example.com" + ) + ), + type="SUBORDINATE" + ) + aws_acmpca_certificate_authority_certificate_root = + AcmpcaCertificateAuthorityCertificate(self, "root_3", + certificate=certificate, + certificate_authority_arn=certificate_authority_arn1 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_certificate_authority_certificate_root.override_logical_id("root") + current = DataAwsPartition(self, "current") + aws_acmpca_certificate_subordinate = AcmpcaCertificate(self, "subordinate_5", + certificate_authority_arn=Token.as_string(aws_acmpca_certificate_authority_root.arn), + certificate_signing_request=subordinate.certificate_signing_request, + signing_algorithm="SHA512WITHRSA", + template_arn="arn:${" + current.partition + "}:acm-pca:::template/SubordinateCACertificate_PathLen0/V1", + validity=AcmpcaCertificateValidity( + type="YEARS", + value=Token.as_string(1) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_certificate_subordinate.override_logical_id("subordinate") + aws_acmpca_certificate_authority_certificate_subordinate = + AcmpcaCertificateAuthorityCertificate(self, "subordinate_6", + certificate=Token.as_string(aws_acmpca_certificate_subordinate.certificate), + certificate_authority_arn=subordinate.arn, + certificate_chain=Token.as_string(aws_acmpca_certificate_subordinate.certificate_chain) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_certificate_authority_certificate_subordinate.override_logical_id("subordinate") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate` - (Required) PEM-encoded certificate for the Certificate Authority. +* `certificate_authority_arn` - (Required) ARN of the Certificate Authority. +* `certificate_chain` - (Optional) PEM-encoded certificate chain that includes any intermediate certificates and chains up to root CA. Required for subordinate Certificate Authorities. Not allowed for root Certificate Authorities. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/acmpca_permission.html.markdown b/website/docs/cdktf/python/r/acmpca_permission.html.markdown new file mode 100644 index 00000000000..42e3ff4442f --- /dev/null +++ b/website/docs/cdktf/python/r/acmpca_permission.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_permission" +description: |- + Provides a resource to manage an AWS Certificate Manager Private Certificate Authorities Permission +--- + + + +# Resource: aws_acmpca_permission + +Provides a resource to manage an AWS Certificate Manager Private Certificate Authorities Permission. +Currently, this is only required in order to allow the ACM service to automatically renew certificates issued by a PCA. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acmpca_certificate_authority import AcmpcaCertificateAuthority +from imports.aws.acmpca_permission import AcmpcaPermission +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AcmpcaCertificateAuthority(self, "example", + certificate_authority_configuration=AcmpcaCertificateAuthorityCertificateAuthorityConfiguration( + key_algorithm="RSA_4096", + signing_algorithm="SHA512WITHRSA", + subject=AcmpcaCertificateAuthorityCertificateAuthorityConfigurationSubject( + common_name="example.com" + ) + ) + ) + aws_acmpca_permission_example = AcmpcaPermission(self, "example_1", + actions=["IssueCertificate", "GetCertificate", "ListPermissions"], + certificate_authority_arn=example.arn, + principal="acm.amazonaws.com" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_permission_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate_authority_arn` - (Required) ARN of the CA that grants the permissions. +* `actions` - (Required) Actions that the specified AWS service principal can use. These include `IssueCertificate`, `GetCertificate`, and `ListPermissions`. Note that in order for ACM to automatically rotate certificates issued by a PCA, it must be granted permission on all 3 actions, as per the example above. +* `principal` - (Required) AWS service or identity that receives the permission. At this time, the only valid principal is `acm.amazonaws.com`. +* `source_account` - (Optional) ID of the calling account + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `policy` - IAM policy that is associated with the permission. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/acmpca_policy.html.markdown b/website/docs/cdktf/python/r/acmpca_policy.html.markdown new file mode 100644 index 00000000000..bc65d4df32a --- /dev/null +++ b/website/docs/cdktf/python/r/acmpca_policy.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_policy" +description: |- + Attaches a resource based policy to an AWS Certificate Manager Private Certificate Authority (ACM PCA) +--- + + + +# Resource: aws_acmpca_policy + +Attaches a resource based policy to a private CA. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acmpca_policy import AcmpcaPolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsIamPolicyDocument(self, "example", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["acm-pca:DescribeCertificateAuthority", "acm-pca:GetCertificate", "acm-pca:GetCertificateAuthorityCertificate", "acm-pca:ListPermissions", "acm-pca:ListTags" + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[Token.as_string(current.account_id)], + type="AWS" + ) + ], + resources=[Token.as_string(aws_acmpca_certificate_authority_example.arn)], + sid="1" + ), DataAwsIamPolicyDocumentStatement( + actions=["acm-pca:IssueCertificate"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=["arn:aws:acm-pca:::template/EndEntityCertificate/V1"], + variable="acm-pca:TemplateArn" + ) + ], + effect=allow, + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[Token.as_string(current.account_id)], + type="AWS" + ) + ], + resources=[Token.as_string(aws_acmpca_certificate_authority_example.arn)], + sid="2" + ) + ] + ) + aws_acmpca_policy_example = AcmpcaPolicy(self, "example_1", + policy=Token.as_string(example.json), + resource_arn=Token.as_string(aws_acmpca_certificate_authority_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_arn` - (Required) ARN of the private CA to associate with the policy. +* `policy` - (Required) JSON-formatted IAM policy to attach to the specified private CA resource. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_acmpca_policy` using the `resource_arn` value. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_acmpca_policy` using the `resource_arn` value. For example: + +```console +% terraform import aws_acmpca_policy.example arn:aws:acm-pca:us-east-1:123456789012:certificate-authority/12345678-1234-1234-1234-123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ami.html.markdown b/website/docs/cdktf/python/r/ami.html.markdown new file mode 100644 index 00000000000..3109bb06d63 --- /dev/null +++ b/website/docs/cdktf/python/r/ami.html.markdown @@ -0,0 +1,157 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami" +description: |- + Creates and manages a custom Amazon Machine Image (AMI). +--- + + + +# Resource: aws_ami + +The AMI resource allows the creation and management of a completely-custom +*Amazon Machine Image* (AMI). + +If you just want to duplicate an existing AMI, possibly copying it to another +region, it's better to use `aws_ami_copy` instead. + +If you just want to share an existing AMI with another AWS account, +it's better to use `aws_ami_launch_permission` instead. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ami import Ami +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Ami(self, "example", + ebs_block_device=[AmiEbsBlockDevice( + device_name="/dev/xvda", + snapshot_id="snap-xxxxxxxx", + volume_size=8 + ) + ], + imds_support="v2.0", + name="terraform-example", + root_device_name="/dev/xvda", + virtualization_type="hvm" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Region-unique name for the AMI. +* `boot_mode` - (Optional) Boot mode of the AMI. For more information, see [Boot modes](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ami-boot.html) in the Amazon Elastic Compute Cloud User Guide. +* `deprecation_time` - (Optional) Date and time to deprecate the AMI. If you specified a value for seconds, Amazon EC2 rounds the seconds to the nearest minute. Valid values: [RFC3339 time string](https://tools.ietf.org/html/rfc3339#section-5.8) (`YYYY-MM-DDTHH:MM:SSZ`) +* `description` - (Optional) Longer, human-readable description for the AMI. +* `ena_support` - (Optional) Whether enhanced networking with ENA is enabled. Defaults to `false`. +* `root_device_name` - (Optional) Name of the root device (for example, `/dev/sda1`, or `/dev/xvda`). +* `virtualization_type` - (Optional) Keyword to choose what virtualization mode created instances + will use. Can be either "paravirtual" (the default) or "hvm". The choice of virtualization type + changes the set of further arguments that are required, as described below. +* `architecture` - (Optional) Machine architecture for created instances. Defaults to "x86_64". +* `ebs_block_device` - (Optional) Nested block describing an EBS block device that should be + attached to created instances. The structure of this block is described below. +* `ephemeral_block_device` - (Optional) Nested block describing an ephemeral block device that + should be attached to created instances. The structure of this block is described below. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tpm_support` - (Optional) If the image is configured for NitroTPM support, the value is `v2.0`. For more information, see [NitroTPM](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/nitrotpm.html) in the Amazon Elastic Compute Cloud User Guide. +* `imds_support` - (Optional) If EC2 instances started from this image should require the use of the Instance Metadata Service V2 (IMDSv2), set this argument to `v2.0`. For more information, see [Configure instance metadata options for new instances](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-IMDS-new-instances.html#configure-IMDS-new-instances-ami-configuration). + +When `virtualization_type` is "paravirtual" the following additional arguments apply: + +* `image_location` - (Required) Path to an S3 object containing an image manifest, e.g., created + by the `ec2-upload-bundle` command in the EC2 command line tools. +* `kernel_id` - (Required) ID of the kernel image (AKI) that will be used as the paravirtual + kernel in created instances. +* `ramdisk_id` - (Optional) ID of an initrd image (ARI) that will be used when booting the + created instances. + +When `virtualization_type` is "hvm" the following additional arguments apply: + +* `sriov_net_support` - (Optional) When set to "simple" (the default), enables enhanced networking + for created instances. No other value is supported at this time. + +Nested `ebs_block_device` blocks have the following structure: + +* `device_name` - (Required) Path at which the device is exposed to created instances. +* `delete_on_termination` - (Optional) Boolean controlling whether the EBS volumes created to + support each created instance will be deleted once that instance is terminated. +* `encrypted` - (Optional) Boolean controlling whether the created EBS volumes will be encrypted. Can't be used with `snapshot_id`. +* `iops` - (Required only when `volume_type` is `io1` or `io2`) Number of I/O operations per second the + created volumes will support. +* `snapshot_id` - (Optional) ID of an EBS snapshot that will be used to initialize the created + EBS volumes. If set, the `volume_size` attribute must be at least as large as the referenced + snapshot. +* `throughput` - (Optional) Throughput that the EBS volume supports, in MiB/s. Only valid for `volume_type` of `gp3`. +* `volume_size` - (Required unless `snapshot_id` is set) Size of created volumes in GiB. + If `snapshot_id` is set and `volume_size` is omitted then the volume will have the same size + as the selected snapshot. +* `volume_type` - (Optional) Type of EBS volume to create. Can be `standard`, `gp2`, `gp3`, `io1`, `io2`, `sc1` or `st1` (Default: `standard`). +* `outpost_arn` - (Optional) ARN of the Outpost on which the snapshot is stored. + +~> **Note:** You can specify `encrypted` or `snapshot_id` but not both. + +Nested `ephemeral_block_device` blocks have the following structure: + +* `device_name` - (Required) Path at which the device is exposed to created instances. +* `virtual_name` - (Required) Name for the ephemeral device, of the form "ephemeralN" where + *N* is a volume number starting from zero. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AMI. +* `id` - ID of the created AMI. +* `owner_id` - AWS account ID of the image owner. +* `root_snapshot_id` - Snapshot ID for the root volume (for EBS-backed AMIs) +* `usage_operation` - Operation of the Amazon EC2 instance and the billing code that is associated with the AMI. +* `platform_details` - Platform details associated with the billing code of the AMI. +* `image_owner_alias` - AWS account alias (for example, amazon, self) or the AWS account ID of the AMI owner. +* `image_type` - Type of image. +* `hypervisor` - Hypervisor type of the image. +* `owner_id` - AWS account ID of the image owner. +* `platform` - This value is set to windows for Windows AMIs; otherwise, it is blank. +* `public` - Whether the image has public launch permissions. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `40m`) +* `update` - (Default `40m`) +* `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ami` using the ID of the AMI. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_ami` using the ID of the AMI. For example: + +```console +% terraform import aws_ami.example ami-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ami_copy.html.markdown b/website/docs/cdktf/python/r/ami_copy.html.markdown new file mode 100644 index 00000000000..b937c05015a --- /dev/null +++ b/website/docs/cdktf/python/r/ami_copy.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami_copy" +description: |- + Duplicates an existing Amazon Machine Image (AMI) +--- + + + +# Resource: aws_ami_copy + +The "AMI copy" resource allows duplication of an Amazon Machine Image (AMI), +including cross-region copies. + +If the source AMI has associated EBS snapshots, those will also be duplicated +along with the AMI. + +This is useful for taking a single AMI provisioned in one region and making +it available in another for a multi-region deployment. + +Copying an AMI can take several minutes. The creation of this resource will +block until the new AMI is available for use on new instances. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ami_copy import AmiCopy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AmiCopy(self, "example", + description="A copy of ami-xxxxxxxx", + name="terraform-example", + source_ami_id="ami-xxxxxxxx", + source_ami_region="us-west-1", + tags={ + "Name": "HelloWorld" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Region-unique name for the AMI. +* `source_ami_id` - (Required) Id of the AMI to copy. This id must be valid in the region + given by `source_ami_region`. +* `source_ami_region` - (Required) Region from which the AMI will be copied. This may be the + same as the AWS provider region in order to create a copy within the same region. +* `destination_outpost_arn` - (Optional) ARN of the Outpost to which to copy the AMI. + Only specify this parameter when copying an AMI from an AWS Region to an Outpost. The AMI must be in the Region of the destination Outpost. +* `encrypted` - (Optional) Whether the destination snapshots of the copied image should be encrypted. Defaults to `false` +* `kms_key_id` - (Optional) Full ARN of the KMS Key to use when encrypting the snapshots of an image during a copy operation. If not specified, then the default AWS KMS Key will be used +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +This resource also exposes the full set of arguments from the [`aws_ami`](ami.html) resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AMI. +* `id` - ID of the created AMI. + +This resource also exports a full set of attributes corresponding to the arguments of the +[`aws_ami`](/docs/providers/aws/r/ami.html) resource, allowing the properties of the created AMI to be used elsewhere in the +configuration. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `40m`) +* `update` - (Default `40m`) +* `delete` - (Default `90m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ami_from_instance.html.markdown b/website/docs/cdktf/python/r/ami_from_instance.html.markdown new file mode 100644 index 00000000000..ebead693cb2 --- /dev/null +++ b/website/docs/cdktf/python/r/ami_from_instance.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami_from_instance" +description: |- + Creates an Amazon Machine Image (AMI) from an EBS-backed EC2 instance +--- + + + +# Resource: aws_ami_from_instance + +The "AMI from instance" resource allows the creation of an Amazon Machine +Image (AMI) modelled after an existing EBS-backed EC2 instance. + +The created AMI will refer to implicitly-created snapshots of the instance's +EBS volumes and mimick its assigned block device configuration at the time +the resource is created. + +This resource is best applied to an instance that is stopped when this instance +is created, so that the contents of the created image are predictable. When +applied to an instance that is running, *the instance will be stopped before taking +the snapshots and then started back up again*, resulting in a period of +downtime. + +Note that the source instance is inspected only at the initial creation of this +resource. Ongoing updates to the referenced instance will not be propagated into +the generated AMI. Users may taint or otherwise recreate the resource in order +to produce a fresh snapshot. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ami_from_instance import AmiFromInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AmiFromInstance(self, "example", + name="terraform-example", + source_instance_id="i-xxxxxxxx" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Region-unique name for the AMI. +* `source_instance_id` - (Required) ID of the instance to use as the basis of the AMI. +* `snapshot_without_reboot` - (Optional) Boolean that overrides the behavior of stopping + the instance before snapshotting. This is risky since it may cause a snapshot of an + inconsistent filesystem state, but can be used to avoid downtime if the user otherwise + guarantees that no filesystem writes will be underway at the time of snapshot. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `40m`) +* `update` - (Default `40m`) +* `delete` - (Default `90m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AMI. +* `id` - ID of the created AMI. + +This resource also exports a full set of attributes corresponding to the arguments of the +[`aws_ami`](/docs/providers/aws/r/ami.html) resource, allowing the properties of the created AMI to be used elsewhere in the +configuration. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ami_launch_permission.html.markdown b/website/docs/cdktf/python/r/ami_launch_permission.html.markdown new file mode 100644 index 00000000000..27464415717 --- /dev/null +++ b/website/docs/cdktf/python/r/ami_launch_permission.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami_launch_permission" +description: |- + Adds a launch permission to an Amazon Machine Image (AMI). +--- + + + +# Resource: aws_ami_launch_permission + +Adds a launch permission to an Amazon Machine Image (AMI). + +## Example Usage + +### AWS Account ID + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ami_launch_permission import AmiLaunchPermission +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AmiLaunchPermission(self, "example", + account_id="123456789012", + image_id="ami-12345678" + ) +``` + +### Public Access + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ami_launch_permission import AmiLaunchPermission +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AmiLaunchPermission(self, "example", + group="all", + image_id="ami-12345678" + ) +``` + +### Organization Access + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ami_launch_permission import AmiLaunchPermission +from imports.aws.data_aws_organizations_organization import DataAwsOrganizationsOrganization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsOrganizationsOrganization(self, "current") + AmiLaunchPermission(self, "example", + image_id="ami-12345678", + organization_arn=Token.as_string(current.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Optional) AWS account ID for the launch permission. +* `group` - (Optional) Name of the group for the launch permission. Valid values: `"all"`. +* `image_id` - (Required) ID of the AMI. +* `organization_arn` - (Optional) ARN of an organization for the launch permission. +* `organizational_unit_arn` - (Optional) ARN of an organizational unit for the launch permission. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Launch permission ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AMI Launch Permissions using `[ACCOUNT-ID|GROUP-NAME|ORGANIZATION-ARN|ORGANIZATIONAL-UNIT-ARN]/IMAGE-ID`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AMI Launch Permissions using `[ACCOUNT-ID|GROUP-NAME|ORGANIZATION-ARN|ORGANIZATIONAL-UNIT-ARN]/IMAGE-ID`. For example: + +```console +% terraform import aws_ami_launch_permission.example 123456789012/ami-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/amplify_app.html.markdown b/website/docs/cdktf/python/r/amplify_app.html.markdown new file mode 100644 index 00000000000..7c1b09b3892 --- /dev/null +++ b/website/docs/cdktf/python/r/amplify_app.html.markdown @@ -0,0 +1,245 @@ +--- +subcategory: "Amplify" +layout: "aws" +page_title: "AWS: aws_amplify_app" +description: |- + Provides an Amplify App resource. +--- + + + +# Resource: aws_amplify_app + +Provides an Amplify App resource, a fullstack serverless app hosted on the [AWS Amplify Console](https://docs.aws.amazon.com/amplify/latest/userguide/welcome.html). + +~> **Note:** When you create/update an Amplify App from Terraform, you may end up with the error "BadRequestException: You should at least provide one valid token" because of authentication issues. See the section "Repository with Tokens" below. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.amplify_app import AmplifyApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AmplifyApp(self, "example", + build_spec="version: 0.1\nfrontend:\n phases:\n preBuild:\n commands:\n - yarn install\n build:\n commands:\n - yarn run build\n artifacts:\n baseDirectory: build\n files:\n - '**/*'\n cache:\n paths:\n - node_modules/**/*\n\n", + custom_rule=[AmplifyAppCustomRule( + source="/<*>", + status="404", + target="/index.html" + ) + ], + environment_variables={ + "ENV": "test" + }, + name="example", + repository="https://github.com/example/app" + ) +``` + +### Repository with Tokens + +If you create a new Amplify App with the `repository` argument, you also need to set `oauth_token` or `access_token` for authentication. For GitHub, get a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) and set `access_token` as follows: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.amplify_app import AmplifyApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AmplifyApp(self, "example", + access_token="...", + name="example", + repository="https://github.com/example/app" + ) +``` + +You can omit `access_token` if you import an existing Amplify App created by the Amplify Console (using OAuth for authentication). + +### Auto Branch Creation + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.amplify_app import AmplifyApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AmplifyApp(self, "example", + auto_branch_creation_config=AmplifyAppAutoBranchCreationConfig( + enable_auto_build=True + ), + auto_branch_creation_patterns=["*", "*/**"], + enable_auto_branch_creation=True, + name="example" + ) +``` + +### Basic Authorization + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.amplify_app import AmplifyApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AmplifyApp(self, "example", + basic_auth_credentials=Token.as_string( + Fn.base64encode("username1:password1")), + enable_basic_auth=True, + name="example" + ) +``` + +### Rewrites and Redirects + +```terraform +resource "aws_amplify_app" "example" { + name = "example" + + # Reverse Proxy Rewrite for API requests + # https://docs.aws.amazon.com/amplify/latest/userguide/redirects.html#reverse-proxy-rewrite + custom_rule { + source = "/api/<*>" + status = "200" + target = "https://api.example.com/api/<*>" + } + + # Redirects for Single Page Web Apps (SPA) + # https://docs.aws.amazon.com/amplify/latest/userguide/redirects.html#redirects-for-single-page-web-apps-spa + custom_rule { + source = "" + status = "200" + target = "/index.html" + } +} +``` + +### Custom Image + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.amplify_app import AmplifyApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AmplifyApp(self, "example", + environment_variables={ + "__cUSTOM__iMAGE": "node:16" + }, + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name for an Amplify app. +* `access_token` - (Optional) Personal access token for a third-party source control system for an Amplify app. The personal access token is used to create a webhook and a read-only deploy key. The token is not stored. +* `auto_branch_creation_config` - (Optional) Automated branch creation configuration for an Amplify app. An `auto_branch_creation_config` block is documented below. +* `auto_branch_creation_patterns` - (Optional) Automated branch creation glob patterns for an Amplify app. +* `basic_auth_credentials` - (Optional) Credentials for basic authorization for an Amplify app. +* `build_spec` - (Optional) The [build specification](https://docs.aws.amazon.com/amplify/latest/userguide/build-settings.html) (build spec) for an Amplify app. +* `custom_rule` - (Optional) Custom rewrite and redirect rules for an Amplify app. A `custom_rule` block is documented below. +* `description` - (Optional) Description for an Amplify app. +* `enable_auto_branch_creation` - (Optional) Enables automated branch creation for an Amplify app. +* `enable_basic_auth` - (Optional) Enables basic authorization for an Amplify app. This will apply to all branches that are part of this app. +* `enable_branch_auto_build` - (Optional) Enables auto-building of branches for the Amplify App. +* `enable_branch_auto_deletion` - (Optional) Automatically disconnects a branch in the Amplify Console when you delete a branch from your Git repository. +* `environment_variables` - (Optional) Environment variables map for an Amplify app. +* `iam_service_role_arn` - (Optional) AWS Identity and Access Management (IAM) service role for an Amplify app. +* `oauth_token` - (Optional) OAuth token for a third-party source control system for an Amplify app. The OAuth token is used to create a webhook and a read-only deploy key. The OAuth token is not stored. +* `platform` - (Optional) Platform or framework for an Amplify app. Valid values: `WEB`, `WEB_COMPUTE`. Default value: `WEB`. +* `repository` - (Optional) Repository for an Amplify app. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +An `auto_branch_creation_config` block supports the following arguments: + +* `basic_auth_credentials` - (Optional) Basic authorization credentials for the autocreated branch. +* `build_spec` - (Optional) Build specification (build spec) for the autocreated branch. +* `enable_auto_build` - (Optional) Enables auto building for the autocreated branch. +* `enable_basic_auth` - (Optional) Enables basic authorization for the autocreated branch. +* `enable_performance_mode` - (Optional) Enables performance mode for the branch. +* `enable_pull_request_preview` - (Optional) Enables pull request previews for the autocreated branch. +* `environment_variables` - (Optional) Environment variables for the autocreated branch. +* `framework` - (Optional) Framework for the autocreated branch. +* `pull_request_environment_name` - (Optional) Amplify environment name for the pull request. +* `stage` - (Optional) Describes the current stage for the autocreated branch. Valid values: `PRODUCTION`, `BETA`, `DEVELOPMENT`, `EXPERIMENTAL`, `PULL_REQUEST`. + +A `custom_rule` block supports the following arguments: + +* `condition` - (Optional) Condition for a URL rewrite or redirect rule, such as a country code. +* `source` - (Required) Source pattern for a URL rewrite or redirect rule. +* `status` - (Optional) Status code for a URL rewrite or redirect rule. Valid values: `200`, `301`, `302`, `404`, `404-200`. +* `target` - (Required) Target pattern for a URL rewrite or redirect rule. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Amplify app. +* `default_domain` - Default domain for the Amplify app. +* `id` - Unique ID of the Amplify app. +* `production_branch` - Describes the information about a production branch for an Amplify app. A `production_branch` block is documented below. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +A `production_branch` block supports the following attributes: + +* `branch_name` - Branch name for the production branch. +* `last_deploy_time` - Last deploy time of the production branch. +* `status` - Status of the production branch. +* `thumbnail_url` - Thumbnail URL for the production branch. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amplify App using Amplify App ID (appId). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amplify App using Amplify App ID (appId). For example: + +```console +% terraform import aws_amplify_app.example d2ypk4k47z8u6 +``` + +App ID can be obtained from App ARN (e.g., `arn:aws:amplify:us-east-1:12345678:apps/d2ypk4k47z8u6`). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/amplify_backend_environment.html.markdown b/website/docs/cdktf/python/r/amplify_backend_environment.html.markdown new file mode 100644 index 00000000000..29b4f4ddfee --- /dev/null +++ b/website/docs/cdktf/python/r/amplify_backend_environment.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Amplify" +layout: "aws" +page_title: "AWS: aws_amplify_backend_environment" +description: |- + Provides an Amplify Backend Environment resource. +--- + + + +# Resource: aws_amplify_backend_environment + +Provides an Amplify Backend Environment resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.amplify_app import AmplifyApp +from imports.aws.amplify_backend_environment import AmplifyBackendEnvironment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AmplifyApp(self, "example", + name="example" + ) + aws_amplify_backend_environment_example = AmplifyBackendEnvironment(self, "example_1", + app_id=example.id, + deployment_artifacts="app-example-deployment", + environment_name="example", + stack_name="amplify-app-example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_amplify_backend_environment_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `app_id` - (Required) Unique ID for an Amplify app. +* `environment_name` - (Required) Name for the backend environment. +* `deployment_artifacts` - (Optional) Name of deployment artifacts. +* `stack_name` - (Optional) AWS CloudFormation stack name of a backend environment. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN for a backend environment that is part of an Amplify app. +* `id` - Unique ID of the Amplify backend environment. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amplify backend environment using `app_id` and `environment_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amplify backend environment using `app_id` and `environment_name`. For example: + +```console +% terraform import aws_amplify_backend_environment.example d2ypk4k47z8u6/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/amplify_branch.html.markdown b/website/docs/cdktf/python/r/amplify_branch.html.markdown new file mode 100644 index 00000000000..3a130a6d822 --- /dev/null +++ b/website/docs/cdktf/python/r/amplify_branch.html.markdown @@ -0,0 +1,220 @@ +--- +subcategory: "Amplify" +layout: "aws" +page_title: "AWS: aws_amplify_branch" +description: |- + Provides an Amplify Branch resource. +--- + + + +# Resource: aws_amplify_branch + +Provides an Amplify Branch resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.amplify_app import AmplifyApp +from imports.aws.amplify_branch import AmplifyBranch +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AmplifyApp(self, "example", + name="app" + ) + AmplifyBranch(self, "master", + app_id=example.id, + branch_name="master", + environment_variables={ + "REACT_APP_API_SERVER": "https://api.example.com" + }, + framework="React", + stage="PRODUCTION" + ) +``` + +### Basic Authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.amplify_app import AmplifyApp +from imports.aws.amplify_branch import AmplifyBranch +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AmplifyApp(self, "example", + name="app" + ) + AmplifyBranch(self, "master", + app_id=example.id, + basic_auth_credentials=Token.as_string( + Fn.base64encode("username:password")), + branch_name="master", + enable_basic_auth=True + ) +``` + +### Notifications + +Amplify Console uses EventBridge (formerly known as CloudWatch Events) and SNS for email notifications. To implement the same functionality, you need to set `enable_notification` in a `aws_amplify_branch` resource, as well as creating an EventBridge Rule, an SNS topic, and SNS subscriptions. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.amplify_app import AmplifyApp +from imports.aws.amplify_branch import AmplifyBranch +from imports.aws.cloudwatch_event_rule import CloudwatchEventRule +from imports.aws.cloudwatch_event_target import CloudwatchEventTarget +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.sns_topic import SnsTopic +from imports.aws.sns_topic_policy import SnsTopicPolicy +from imports.aws.sns_topic_subscription import SnsTopicSubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AmplifyApp(self, "example", + name="app" + ) + master = AmplifyBranch(self, "master", + app_id=example.id, + branch_name="master", + enable_notification=True + ) + amplify_app_master = CloudwatchEventRule(self, "amplify_app_master", + description="AWS Amplify build notifications for : App: ${" + app.id + "} Branch: ${" + master.branch_name + "}", + event_pattern=Token.as_string( + Fn.jsonencode({ + "detail": { + "app_id": [example.id], + "branch_name": [master.branch_name], + "job_status": ["SUCCEED", "FAILED", "STARTED"] + }, + "detail-type": ["Amplify Deployment Status Change"], + "source": ["aws.amplify"] + })), + name="amplify-${" + app.id + "}-${" + master.branch_name + "}-branch-notification" + ) + aws_sns_topic_amplify_app_master = SnsTopic(self, "amplify_app_master_3", + name="amplify-${" + app.id + "}_${" + master.branch_name + "}" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_amplify_app_master.override_logical_id("amplify_app_master") + SnsTopicSubscription(self, "this", + endpoint="user@acme.com", + protocol="email", + topic_arn=Token.as_string(aws_sns_topic_amplify_app_master.arn) + ) + data_aws_iam_policy_document_amplify_app_master = + DataAwsIamPolicyDocument(self, "amplify_app_master_5", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["SNS:Publish"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["events.amazonaws.com"], + type="Service" + ) + ], + resources=[Token.as_string(aws_sns_topic_amplify_app_master.arn)], + sid="Allow_Publish_Events ${" + master.arn + "}" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_amplify_app_master.override_logical_id("amplify_app_master") + aws_cloudwatch_event_target_amplify_app_master = CloudwatchEventTarget(self, "amplify_app_master_6", + arn=Token.as_string(aws_sns_topic_amplify_app_master.arn), + input_transformer=CloudwatchEventTargetInputTransformer( + input_paths={ + "app_id": "$.detail.appId", + "branch": "$.detail.branchName", + "job_id": "$.detail.jobId", + "region": "$.region", + "status": "$.detail.jobStatus" + }, + input_template="\\\"Build notification from the AWS Amplify Console for app: https://..amplifyapp.com/. Your build status is . Go to https://console.aws.amazon.com/amplify/home?region=#// to view details on your build. \\\"" + ), + rule=amplify_app_master.name, + target_id=master.branch_name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_target_amplify_app_master.override_logical_id("amplify_app_master") + aws_sns_topic_policy_amplify_app_master = SnsTopicPolicy(self, "amplify_app_master_7", + arn=Token.as_string(aws_sns_topic_amplify_app_master.arn), + policy=Token.as_string(data_aws_iam_policy_document_amplify_app_master.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_policy_amplify_app_master.override_logical_id("amplify_app_master") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `app_id` - (Required) Unique ID for an Amplify app. +* `branch_name` - (Required) Name for the branch. +* `backend_environment_arn` - (Optional) ARN for a backend environment that is part of an Amplify app. +* `basic_auth_credentials` - (Optional) Basic authorization credentials for the branch. +* `description` - (Optional) Description for the branch. +* `display_name` - (Optional) Display name for a branch. This is used as the default domain prefix. +* `enable_auto_build` - (Optional) Enables auto building for the branch. +* `enable_basic_auth` - (Optional) Enables basic authorization for the branch. +* `enable_notification` - (Optional) Enables notifications for the branch. +* `enable_performance_mode` - (Optional) Enables performance mode for the branch. +* `enable_pull_request_preview` - (Optional) Enables pull request previews for this branch. +* `environment_variables` - (Optional) Environment variables for the branch. +* `framework` - (Optional) Framework for the branch. +* `pull_request_environment_name` - (Optional) Amplify environment name for the pull request. +* `stage` - (Optional) Describes the current stage for the branch. Valid values: `PRODUCTION`, `BETA`, `DEVELOPMENT`, `EXPERIMENTAL`, `PULL_REQUEST`. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `ttl` - (Optional) Content Time To Live (TTL) for the website in seconds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the branch. +* `associated_resources` - A list of custom resources that are linked to this branch. +* `custom_domains` - Custom domains for the branch. +* `destination_branch` - Destination branch if the branch is a pull request branch. +* `source_branch` - Source branch if the branch is a pull request branch. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amplify branch using `app_id` and `branch_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amplify branch using `app_id` and `branch_name`. For example: + +```console +% terraform import aws_amplify_branch.master d2ypk4k47z8u6/master +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/amplify_domain_association.html.markdown b/website/docs/cdktf/python/r/amplify_domain_association.html.markdown new file mode 100644 index 00000000000..3c22c01bc81 --- /dev/null +++ b/website/docs/cdktf/python/r/amplify_domain_association.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "Amplify" +layout: "aws" +page_title: "AWS: aws_amplify_domain_association" +description: |- + Provides an Amplify Domain Association resource. +--- + + + +# Resource: aws_amplify_domain_association + +Provides an Amplify Domain Association resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.amplify_app import AmplifyApp +from imports.aws.amplify_branch import AmplifyBranch +from imports.aws.amplify_domain_association import AmplifyDomainAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AmplifyApp(self, "example", + custom_rule=[AmplifyAppCustomRule( + source="https://example.com", + status="302", + target="https://www.example.com" + ) + ], + name="app" + ) + master = AmplifyBranch(self, "master", + app_id=example.id, + branch_name="master" + ) + aws_amplify_domain_association_example = AmplifyDomainAssociation(self, "example_2", + app_id=example.id, + domain_name="example.com", + sub_domain=[AmplifyDomainAssociationSubDomain( + branch_name=master.branch_name, + prefix="" + ), AmplifyDomainAssociationSubDomain( + branch_name=master.branch_name, + prefix="www" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_amplify_domain_association_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `app_id` - (Required) Unique ID for an Amplify app. +* `domain_name` - (Required) Domain name for the domain association. +* `enable_auto_sub_domain` - (Optional) Enables the automated creation of subdomains for branches. +* `sub_domain` - (Required) Setting for the subdomain. Documented below. +* `wait_for_verification` - (Optional) If enabled, the resource will wait for the domain association status to change to `PENDING_DEPLOYMENT` or `AVAILABLE`. Setting this to `false` will skip the process. Default: `true`. + +The `sub_domain` configuration block supports the following arguments: + +* `branch_name` - (Required) Branch name setting for the subdomain. +* `prefix` - (Required) Prefix setting for the subdomain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the domain association. +* `certificate_verification_dns_record` - The DNS record for certificate verification. + +The `sub_domain` configuration block exports the following attributes: + +* `dns_record` - DNS record for the subdomain. +* `verified` - Verified status of the subdomain. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amplify domain association using `app_id` and `domain_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amplify domain association using `app_id` and `domain_name`. For example: + +```console +% terraform import aws_amplify_domain_association.app d2ypk4k47z8u6/example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/amplify_webhook.html.markdown b/website/docs/cdktf/python/r/amplify_webhook.html.markdown new file mode 100644 index 00000000000..27a86284b1d --- /dev/null +++ b/website/docs/cdktf/python/r/amplify_webhook.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Amplify" +layout: "aws" +page_title: "AWS: aws_amplify_webhook" +description: |- + Provides an Amplify Webhook resource. +--- + + + +# Resource: aws_amplify_webhook + +Provides an Amplify Webhook resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.amplify_app import AmplifyApp +from imports.aws.amplify_branch import AmplifyBranch +from imports.aws.amplify_webhook import AmplifyWebhook +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AmplifyApp(self, "example", + name="app" + ) + master = AmplifyBranch(self, "master", + app_id=example.id, + branch_name="master" + ) + aws_amplify_webhook_master = AmplifyWebhook(self, "master_2", + app_id=example.id, + branch_name=master.branch_name, + description="triggermaster" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_amplify_webhook_master.override_logical_id("master") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `app_id` - (Required) Unique ID for an Amplify app. +* `branch_name` - (Required) Name for a branch that is part of the Amplify app. +* `description` - (Optional) Description for a webhook. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the webhook. +* `url` - URL of the webhook. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amplify webhook using a webhook ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amplify webhook using a webhook ID. For example: + +```console +% terraform import aws_amplify_webhook.master a26b22a0-748b-4b57-b9a0-ae7e601fe4b1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_account.html.markdown b/website/docs/cdktf/python/r/api_gateway_account.html.markdown new file mode 100644 index 00000000000..362d276b767 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_account.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_account" +description: |- + Provides a settings of an API Gateway Account. +--- + + + +# Resource: aws_api_gateway_account + +Provides a settings of an API Gateway Account. Settings is applied region-wide per `provider` block. + +-> **Note:** As there is no API method for deleting account settings or resetting it to defaults, destroying this resource will keep your account settings intact + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_account import ApiGatewayAccount +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["apigateway.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + cloudwatch = DataAwsIamPolicyDocument(self, "cloudwatch", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:CreateLogGroup", "logs:CreateLogStream", "logs:DescribeLogGroups", "logs:DescribeLogStreams", "logs:PutLogEvents", "logs:GetLogEvents", "logs:FilterLogEvents" + ], + effect="Allow", + resources=["*"] + ) + ] + ) + aws_iam_role_cloudwatch = IamRole(self, "cloudwatch_2", + assume_role_policy=Token.as_string(assume_role.json), + name="api_gateway_cloudwatch_global" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_cloudwatch.override_logical_id("cloudwatch") + aws_iam_role_policy_cloudwatch = IamRolePolicy(self, "cloudwatch_3", + name="default", + policy=Token.as_string(cloudwatch.json), + role=Token.as_string(aws_iam_role_cloudwatch.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_cloudwatch.override_logical_id("cloudwatch") + ApiGatewayAccount(self, "demo", + cloudwatch_role_arn=Token.as_string(aws_iam_role_cloudwatch.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cloudwatch_role_arn` - (Optional) ARN of an IAM role for CloudWatch (to allow logging & monitoring). See more [in AWS Docs](https://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-stage-settings.html#how-to-stage-settings-console). Logging & monitoring can be enabled/disabled and otherwise tuned on the API Gateway Stage level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `throttle_settings` - Account-Level throttle settings. See exported fields below. + +`throttle_settings` block exports the following: + +* `burst_limit` - Absolute maximum number of times API Gateway allows the API to be called per second (RPS). +* `rate_limit` - Number of times API Gateway allows the API to be called per second on average (RPS). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway Accounts using the word `api-gateway-account`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import API Gateway Accounts using the word `api-gateway-account`. For example: + +```console +% terraform import aws_api_gateway_account.demo api-gateway-account +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_api_key.html.markdown b/website/docs/cdktf/python/r/api_gateway_api_key.html.markdown new file mode 100644 index 00000000000..cd112eee22d --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_api_key.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_api_key" +description: |- + Provides an API Gateway API Key. +--- + + + +# Resource: aws_api_gateway_api_key + +Provides an API Gateway API Key. + +~> **NOTE:** Since the API Gateway usage plans feature was launched on August 11, 2016, usage plans are now **required** to associate an API key with an API stage. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_api_key import ApiGatewayApiKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApiGatewayApiKey(self, "MyDemoApiKey", + name="demo" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the API key. +* `description` - (Optional) API key description. Defaults to "Managed by Terraform". +* `enabled` - (Optional) Whether the API key can be used by callers. Defaults to `true`. +* `value` - (Optional) Value of the API key. If specified, the value must be an alphanumeric string between 20 and 128 characters. If not specified, it will be automatically generated by AWS on creation. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the API key +* `created_date` - Creation date of the API key +* `last_updated_date` - Last update date of the API key +* `value` - Value of the API key +* `arn` - ARN +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway Keys using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import API Gateway Keys using the `id`. For example: + +```console +% terraform import aws_api_gateway_api_key.my_demo_key 8bklk8bl1k3sB38D9B3l0enyWT8c09B30lkq0blk +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_authorizer.html.markdown b/website/docs/cdktf/python/r/api_gateway_authorizer.html.markdown new file mode 100644 index 00000000000..7f03627d257 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_authorizer.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_authorizer" +description: |- + Provides an API Gateway Authorizer. +--- + + + +# Resource: aws_api_gateway_authorizer + +Provides an API Gateway Authorizer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_authorizer import ApiGatewayAuthorizer +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.lambda_function import LambdaFunction +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + demo = ApiGatewayRestApi(self, "demo", + name="auth-demo" + ) + invocation_assume_role = DataAwsIamPolicyDocument(self, "invocation_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["apigateway.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + lambda_assume_role = DataAwsIamPolicyDocument(self, "lambda_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["lambda.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + invocation_role = IamRole(self, "invocation_role", + assume_role_policy=Token.as_string(invocation_assume_role.json), + name="api_gateway_auth_invocation", + path="/" + ) + lambda_ = IamRole(self, "lambda", + assume_role_policy=Token.as_string(lambda_assume_role.json), + name="demo-lambda" + ) + authorizer = LambdaFunction(self, "authorizer", + filename="lambda-function.zip", + function_name="api_gateway_authorizer", + handler="exports.example", + role=lambda_.arn, + source_code_hash=Token.as_string( + Fn.filebase64sha256("lambda-function.zip")) + ) + invocation_policy = DataAwsIamPolicyDocument(self, "invocation_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["lambda:InvokeFunction"], + effect="Allow", + resources=[authorizer.arn] + ) + ] + ) + aws_api_gateway_authorizer_demo = ApiGatewayAuthorizer(self, "demo_7", + authorizer_credentials=invocation_role.arn, + authorizer_uri=authorizer.invoke_arn, + name="demo", + rest_api_id=demo.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_authorizer_demo.override_logical_id("demo") + aws_iam_role_policy_invocation_policy = IamRolePolicy(self, "invocation_policy_8", + name="default", + policy=Token.as_string(invocation_policy.json), + role=invocation_role.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_invocation_policy.override_logical_id("invocation_policy") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `authorizer_uri` - (Optional, required for type `TOKEN`/`REQUEST`) Authorizer's Uniform Resource Identifier (URI). This must be a well-formed Lambda function URI in the form of `arn:aws:apigateway:{region}:lambda:path/{service_api}`, + e.g., `arn:aws:apigateway:us-west-2:lambda:path/2015-03-31/functions/arn:aws:lambda:us-west-2:012345678912:function:my-function/invocations` +* `name` - (Required) Name of the authorizer +* `rest_api_id` - (Required) ID of the associated REST API +* `identity_source` - (Optional) Source of the identity in an incoming request. Defaults to `method.request.header.Authorization`. For `REQUEST` type, this may be a comma-separated list of values, including headers, query string parameters and stage variables - e.g., `"method.request.header.SomeHeaderName,method.request.querystring.SomeQueryStringName,stageVariables.SomeStageVariableName"` +* `type` - (Optional) Type of the authorizer. Possible values are `TOKEN` for a Lambda function using a single authorization token submitted in a custom header, `REQUEST` for a Lambda function using incoming request parameters, or `COGNITO_USER_POOLS` for using an Amazon Cognito user pool. Defaults to `TOKEN`. +* `authorizer_credentials` - (Optional) Credentials required for the authorizer. To specify an IAM Role for API Gateway to assume, use the IAM Role ARN. +* `authorizer_result_ttl_in_seconds` - (Optional) TTL of cached authorizer results in seconds. Defaults to `300`. +* `identity_validation_expression` - (Optional) Validation expression for the incoming identity. For `TOKEN` type, this value should be a regular expression. The incoming token from the client is matched against this expression, and will proceed if the token matches. If the token doesn't match, the client receives a 401 Unauthorized response. +* `provider_arns` - (Optional, required for type `COGNITO_USER_POOLS`) List of the Amazon Cognito user pool ARNs. Each element is of this format: `arn:aws:cognito-idp:{region}:{account_id}:userpool/{user_pool_id}`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the API Gateway Authorizer +* `id` - Authorizer identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS API Gateway Authorizer using the `REST-API-ID/AUTHORIZER-ID`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS API Gateway Authorizer using the `REST-API-ID/AUTHORIZER-ID`. For example: + +```console +% terraform import aws_api_gateway_authorizer.authorizer 12345abcde/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_base_path_mapping.html.markdown b/website/docs/cdktf/python/r/api_gateway_base_path_mapping.html.markdown new file mode 100644 index 00000000000..98cd528e570 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_base_path_mapping.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_base_path_mapping" +description: |- + Connects a custom domain with a deployed API +--- + + + +# Resource: aws_api_gateway_base_path_mapping + +Connects a custom domain name registered via `aws_api_gateway_domain_name` +with a deployed API so that its methods can be called via the +custom domain name. + +## Example Usage + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/api-gateway-rest-api-openapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_base_path_mapping import ApiGatewayBasePathMapping +from imports.aws.api_gateway_domain_name import ApiGatewayDomainName +from imports.aws.api_gateway_stage import ApiGatewayStage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ApiGatewayDomainName(self, "example", + certificate_body=Token.as_string( + Fn.file("${path.module}/example.com/example.crt")), + certificate_chain=Token.as_string( + Fn.file("${path.module}/example.com/ca.crt")), + certificate_name="example-api", + certificate_private_key=Token.as_string( + Fn.file("${path.module}/example.com/example.key")), + domain_name="example.com" + ) + aws_api_gateway_stage_example = ApiGatewayStage(self, "example_1", + deployment_id=Token.as_string(aws_api_gateway_deployment_example.id), + rest_api_id=Token.as_string(aws_api_gateway_rest_api_example.id), + stage_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_stage_example.override_logical_id("example") + aws_api_gateway_base_path_mapping_example = ApiGatewayBasePathMapping(self, "example_2", + api_id=Token.as_string(aws_api_gateway_rest_api_example.id), + domain_name=example.domain_name, + stage_name=Token.as_string(aws_api_gateway_stage_example.stage_name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_base_path_mapping_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain_name` - (Required) Already-registered domain name to connect the API to. +* `api_id` - (Required) ID of the API to connect. +* `stage_name` - (Optional) Name of a specific deployment stage to expose at the given path. If omitted, callers may select any stage by including its name as a path element after the base path. +* `base_path` - (Optional) Path segment that must be prepended to the path when accessing the API via this mapping. If omitted, the API is exposed at the root of the given domain. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_base_path_mapping` using the domain name and base path. For example: + +For an empty `base_path` or, in other words, a root path (`/`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +For a non-root `base_path`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_base_path_mapping` using the domain name and base path. For example: + +For an empty `base_path` or, in other words, a root path (`/`): + +```console +% terraform import aws_api_gateway_base_path_mapping.example example.com/ +``` + +For a non-root `base_path`: + +```console +% terraform import aws_api_gateway_base_path_mapping.example example.com/base-path +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_client_certificate.html.markdown b/website/docs/cdktf/python/r/api_gateway_client_certificate.html.markdown new file mode 100644 index 00000000000..7ac52043309 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_client_certificate.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_client_certificate" +description: |- + Provides an API Gateway Client Certificate. +--- + + + +# Resource: aws_api_gateway_client_certificate + +Provides an API Gateway Client Certificate. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_client_certificate import ApiGatewayClientCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApiGatewayClientCertificate(self, "demo", + description="My client certificate" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the client certificate. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the client certificate. +* `created_date` - Date when the client certificate was created. +* `expiration_date` - Date when the client certificate will expire. +* `pem_encoded_certificate` - The PEM-encoded public key of the client certificate. +* `arn` - ARN +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway Client Certificates using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import API Gateway Client Certificates using the id. For example: + +```console +% terraform import aws_api_gateway_client_certificate.demo ab1cqe +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_deployment.html.markdown b/website/docs/cdktf/python/r/api_gateway_deployment.html.markdown new file mode 100644 index 00000000000..1e7dbdf4ea2 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_deployment.html.markdown @@ -0,0 +1,190 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_deployment" +description: |- + Manages an API Gateway REST Deployment. +--- + + + +# Resource: aws_api_gateway_deployment + +Manages an API Gateway REST Deployment. A deployment is a snapshot of the REST API configuration. The deployment can then be published to callable endpoints via the [`aws_api_gateway_stage` resource](api_gateway_stage.html) and optionally managed further with the [`aws_api_gateway_base_path_mapping` resource](api_gateway_base_path_mapping.html), [`aws_api_gateway_domain_name` resource](api_gateway_domain_name.html), and [`aws_api_method_settings` resource](api_gateway_method_settings.html). For more information, see the [API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-deploy-api.html). + +To properly capture all REST API configuration in a deployment, this resource must have dependencies on all prior Terraform resources that manage resources/paths, methods, integrations, etc. + +* For REST APIs that are configured via OpenAPI specification ([`aws_api_gateway_rest_api` resource](api_gateway_rest_api.html) `body` argument), no special dependency setup is needed beyond referencing the `id` attribute of that resource unless additional Terraform resources have further customized the REST API. +* When the REST API configuration involves other Terraform resources ([`aws_api_gateway_integration` resource](api_gateway_integration.html), etc.), the dependency setup can be done with implicit resource references in the `triggers` argument or explicit resource references using the [resource `depends_on` meta-argument](https://www.terraform.io/docs/configuration/meta-arguments/depends_on.html). The `triggers` argument should be preferred over `depends_on`, since `depends_on` can only capture dependency ordering and will not cause the resource to recreate (redeploy the REST API) with upstream configuration changes. + +!> **WARNING:** We recommend using the [`aws_api_gateway_stage` resource](api_gateway_stage.html) instead of managing an API Gateway Stage via the `stage_name` argument of this resource. When this resource is recreated (REST API redeployment) with the `stage_name` configured, the stage is deleted and recreated. This will cause a temporary service interruption, increase Terraform plan differences, and can require a second Terraform apply to recreate any downstream stage configuration such as associated `aws_api_method_settings` resources. + +~> **NOTE:** Enable the [resource `lifecycle` configuration block `create_before_destroy` argument](https://www.terraform.io/language/meta-arguments/lifecycle#create_before_destroy) in this resource configuration to properly order redeployments in Terraform. Without enabling `create_before_destroy`, API Gateway can return errors such as `BadRequestException: Active stages pointing to this deployment must be moved or deleted` on recreation. + +## Example Usage + +### OpenAPI Specification + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/api-gateway-rest-api-openapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} +``` + +### Terraform Resources + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_deployment import ApiGatewayDeployment +from imports.aws.api_gateway_integration import ApiGatewayIntegration +from imports.aws.api_gateway_method import ApiGatewayMethod +from imports.aws.api_gateway_resource import ApiGatewayResource +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +from imports.aws.api_gateway_stage import ApiGatewayStage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ApiGatewayRestApi(self, "example", + name="example" + ) + aws_api_gateway_resource_example = ApiGatewayResource(self, "example_1", + parent_id=example.root_resource_id, + path_part="example", + rest_api_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_resource_example.override_logical_id("example") + aws_api_gateway_method_example = ApiGatewayMethod(self, "example_2", + authorization="NONE", + http_method="GET", + resource_id=Token.as_string(aws_api_gateway_resource_example.id), + rest_api_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_method_example.override_logical_id("example") + aws_api_gateway_integration_example = ApiGatewayIntegration(self, "example_3", + http_method=Token.as_string(aws_api_gateway_method_example.http_method), + resource_id=Token.as_string(aws_api_gateway_resource_example.id), + rest_api_id=example.id, + type="MOCK" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_integration_example.override_logical_id("example") + aws_api_gateway_deployment_example = ApiGatewayDeployment(self, "example_4", + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + rest_api_id=example.id, + triggers={ + "redeployment": Token.as_string( + Fn.sha1( + Token.as_string( + Fn.jsonencode([aws_api_gateway_resource_example.id, aws_api_gateway_method_example.id, aws_api_gateway_integration_example.id + ])))) + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_deployment_example.override_logical_id("example") + aws_api_gateway_stage_example = ApiGatewayStage(self, "example_5", + deployment_id=Token.as_string(aws_api_gateway_deployment_example.id), + rest_api_id=example.id, + stage_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_stage_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `rest_api_id` - (Required) REST API identifier. +* `description` - (Optional) Description of the deployment +* `stage_name` - (Optional) Name of the stage to create with this deployment. If the specified stage already exists, it will be updated to point to the new deployment. We recommend using the [`aws_api_gateway_stage` resource](api_gateway_stage.html) instead to manage stages. +* `stage_description` - (Optional) Description to set on the stage managed by the `stage_name` argument. +* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger a redeployment. To force a redeployment without changing these keys/values, use the [`-replace` option](https://developer.hashicorp.com/terraform/cli/commands/plan#replace-address) with `terraform plan` or `terraform apply`. +* `variables` - (Optional) Map to set on the stage managed by the `stage_name` argument. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the deployment +* `invoke_url` - URL to invoke the API pointing to the stage, + e.g., `https://z4675bid1j.execute-api.eu-west-2.amazonaws.com/prod` +* `execution_arn` - Execution ARN to be used in [`lambda_permission`](/docs/providers/aws/r/lambda_permission.html)'s `source_arn` + when allowing API Gateway to invoke a Lambda function, + e.g., `arn:aws:execute-api:eu-west-2:123456789012:z4675bid1j/prod` +* `created_date` - Creation date of the deployment + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_deployment` using `REST-API-ID/DEPLOYMENT-ID`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_deployment` using `REST-API-ID/DEPLOYMENT-ID`. For example: + +```console +% terraform import aws_api_gateway_deployment.example aabbccddee/1122334 +``` + +The `stage_name`, `stage_description`, and `variables` arguments cannot be imported. Use the [`aws_api_gateway_stage` resource](api_gateway_stage.html) to import and manage stages. + +The `triggers` argument cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_documentation_part.html.markdown b/website/docs/cdktf/python/r/api_gateway_documentation_part.html.markdown new file mode 100644 index 00000000000..0f0bd0485a8 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_documentation_part.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_documentation_part" +description: |- + Provides a settings of an API Gateway Documentation Part. +--- + + + +# Resource: aws_api_gateway_documentation_part + +Provides a settings of an API Gateway Documentation Part. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_documentation_part import ApiGatewayDocumentationPart +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ApiGatewayRestApi(self, "example", + name="example_api" + ) + aws_api_gateway_documentation_part_example = + ApiGatewayDocumentationPart(self, "example_1", + location=ApiGatewayDocumentationPartLocation( + method="GET", + path="/example", + type="METHOD" + ), + properties="{\\\"description\\\":\\\"Example description\\\"}", + rest_api_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_documentation_part_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `location` - (Required) Location of the targeted API entity of the to-be-created documentation part. See below. +* `properties` - (Required) Content map of API-specific key-value pairs describing the targeted API entity. The map must be encoded as a JSON string, e.g., "{ \"description\": \"The API does ...\" }". Only Swagger-compliant key-value pairs can be exported and, hence, published. +* `rest_api_id` - (Required) ID of the associated Rest API + +### Nested fields + +#### `location` + +See supported entity types for each field in the [official docs](https://docs.aws.amazon.com/apigateway/api-reference/resource/documentation-part/). + +* `method` - (Optional) HTTP verb of a method. The default value is `*` for any method. +* `name` - (Optional) Name of the targeted API entity. +* `path` - (Optional) URL path of the target. The default value is `/` for the root resource. +* `status_code` - (Optional) HTTP status code of a response. The default value is `*` for any status code. +* `type` - (Required) Type of API entity to which the documentation content appliesE.g., `API`, `METHOD` or `REQUEST_BODY` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the Documentation Part + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway documentation_parts using `REST-API-ID/DOC-PART-ID`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import API Gateway documentation_parts using `REST-API-ID/DOC-PART-ID`. For example: + +```console +% terraform import aws_api_gateway_documentation_part.example 5i4e1ko720/3oyy3t +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_documentation_version.html.markdown b/website/docs/cdktf/python/r/api_gateway_documentation_version.html.markdown new file mode 100644 index 00000000000..5411afb7f61 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_documentation_version.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_documentation_version" +description: |- + Provides a resource to manage an API Gateway Documentation Version. +--- + + + +# Resource: aws_api_gateway_documentation_version + +Provides a resource to manage an API Gateway Documentation Version. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_documentation_part import ApiGatewayDocumentationPart +from imports.aws.api_gateway_documentation_version import ApiGatewayDocumentationVersion +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ApiGatewayRestApi(self, "example", + name="example_api" + ) + aws_api_gateway_documentation_part_example = + ApiGatewayDocumentationPart(self, "example_1", + location=ApiGatewayDocumentationPartLocation( + type="API" + ), + properties="{\\\"description\\\":\\\"Example\\\"}", + rest_api_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_documentation_part_example.override_logical_id("example") + aws_api_gateway_documentation_version_example = + ApiGatewayDocumentationVersion(self, "example_2", + depends_on=[aws_api_gateway_documentation_part_example], + description="Example description", + rest_api_id=example.id, + version="example_version" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_documentation_version_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `version` - (Required) Version identifier of the API documentation snapshot. +* `rest_api_id` - (Required) ID of the associated Rest API +* `description` - (Optional) Description of the API documentation version. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway documentation versions using `REST-API-ID/VERSION`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import API Gateway documentation versions using `REST-API-ID/VERSION`. For example: + +```console +% terraform import aws_api_gateway_documentation_version.example 5i4e1ko720/example-version +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_domain_name.html.markdown b/website/docs/cdktf/python/r/api_gateway_domain_name.html.markdown new file mode 100644 index 00000000000..b0063888390 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_domain_name.html.markdown @@ -0,0 +1,262 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_domain_name" +description: |- + Registers a custom domain name for use with AWS API Gateway. +--- + + + +# Resource: aws_api_gateway_domain_name + +Registers a custom domain name for use with AWS API Gateway. Additional information about this functionality +can be found in the [API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-custom-domains.html). + +This resource just establishes ownership of and the TLS settings for +a particular domain name. An API can be attached to a particular path +under the registered domain name using +[the `aws_api_gateway_base_path_mapping` resource](api_gateway_base_path_mapping.html). + +API Gateway domains can be defined as either 'edge-optimized' or 'regional'. In an edge-optimized configuration, +API Gateway internally creates and manages a CloudFront distribution to route requests on the given hostname. In +addition to this resource it's necessary to create a DNS record corresponding to the given domain name which is an alias +(either Route53 alias or traditional CNAME) to the Cloudfront domain name exported in the `cloudfront_domain_name` +attribute. + +In a regional configuration, API Gateway does not create a CloudFront distribution to route requests to the API, though +a distribution can be created if needed. In either case, it is necessary to create a DNS record corresponding to the +given domain name which is an alias (either Route53 alias or traditional CNAME) to the regional domain name exported in +the `regional_domain_name` attribute. + +~> **Note:** API Gateway requires the use of AWS Certificate Manager (ACM) certificates instead of Identity and Access Management (IAM) certificates in regions that support ACM. Regions that support ACM can be found in the [Regions and Endpoints Documentation](https://docs.aws.amazon.com/general/latest/gr/rande.html#acm_region). To import an existing private key and certificate into ACM or request an ACM certificate, see the [`aws_acm_certificate` resource](/docs/providers/aws/r/acm_certificate.html). + +~> **Note:** The `aws_api_gateway_domain_name` resource expects dependency on the `aws_acm_certificate_validation` as +only verified certificates can be used. This can be made either explicitly by adding the +`depends_on = [aws_acm_certificate_validation.cert]` attribute. Or implicitly by referring certificate ARN +from the validation resource where it will be available after the resource creation: +`regional_certificate_arn = aws_acm_certificate_validation.cert.certificate_arn`. + +~> **Note:** All arguments including the private key will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/api-gateway-rest-api-openapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +### Edge Optimized (ACM Certificate) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_domain_name import ApiGatewayDomainName +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ApiGatewayDomainName(self, "example", + certificate_arn=Token.as_string(aws_acm_certificate_validation_example.certificate_arn), + domain_name="api.example.com" + ) + aws_route53_record_example = Route53Record(self, "example_1", + alias=Route53RecordAlias( + evaluate_target_health=True, + name=example.cloudfront_domain_name, + zone_id=example.cloudfront_zone_id + ), + name=example.domain_name, + type="A", + zone_id=Token.as_string(aws_route53_zone_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_record_example.override_logical_id("example") +``` + +### Edge Optimized (IAM Certificate) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_domain_name import ApiGatewayDomainName +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ApiGatewayDomainName(self, "example", + certificate_body=Token.as_string( + Fn.file("${path.module}/example.com/example.crt")), + certificate_chain=Token.as_string( + Fn.file("${path.module}/example.com/ca.crt")), + certificate_name="example-api", + certificate_private_key=Token.as_string( + Fn.file("${path.module}/example.com/example.key")), + domain_name="api.example.com" + ) + aws_route53_record_example = Route53Record(self, "example_1", + alias=Route53RecordAlias( + evaluate_target_health=True, + name=example.cloudfront_domain_name, + zone_id=example.cloudfront_zone_id + ), + name=example.domain_name, + type="A", + zone_id=Token.as_string(aws_route53_zone_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_record_example.override_logical_id("example") +``` + +### Regional (ACM Certificate) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_domain_name import ApiGatewayDomainName +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ApiGatewayDomainName(self, "example", + domain_name="api.example.com", + endpoint_configuration=ApiGatewayDomainNameEndpointConfiguration( + types=["REGIONAL"] + ), + regional_certificate_arn=Token.as_string(aws_acm_certificate_validation_example.certificate_arn) + ) + aws_route53_record_example = Route53Record(self, "example_1", + alias=Route53RecordAlias( + evaluate_target_health=True, + name=example.regional_domain_name, + zone_id=example.regional_zone_id + ), + name=example.domain_name, + type="A", + zone_id=Token.as_string(aws_route53_zone_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_record_example.override_logical_id("example") +``` + +### Regional (IAM Certificate) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_domain_name import ApiGatewayDomainName +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ApiGatewayDomainName(self, "example", + certificate_body=Token.as_string( + Fn.file("${path.module}/example.com/example.crt")), + certificate_chain=Token.as_string( + Fn.file("${path.module}/example.com/ca.crt")), + certificate_private_key=Token.as_string( + Fn.file("${path.module}/example.com/example.key")), + domain_name="api.example.com", + endpoint_configuration=ApiGatewayDomainNameEndpointConfiguration( + types=["REGIONAL"] + ), + regional_certificate_name="example-api" + ) + aws_route53_record_example = Route53Record(self, "example_1", + alias=Route53RecordAlias( + evaluate_target_health=True, + name=example.regional_domain_name, + zone_id=example.regional_zone_id + ), + name=example.domain_name, + type="A", + zone_id=Token.as_string(aws_route53_zone_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_record_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain_name` - (Required) Fully-qualified domain name to register. +* `endpoint_configuration` - (Optional) Configuration block defining API endpoint information including type. See below. +* `mutual_tls_authentication` - (Optional) Mutual TLS authentication configuration for the domain name. See below. +* `ownership_verification_certificate_arn` - (Optional) ARN of the AWS-issued certificate used to validate custom domain ownership (when `certificate_arn` is issued via an ACM Private CA or `mutual_tls_authentication` is configured with an ACM-imported certificate.) +* `security_policy` - (Optional) Transport Layer Security (TLS) version + cipher suite for this DomainName. Valid values are `TLS_1_0` and `TLS_1_2`. Must be configured to perform drift detection. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +When referencing an AWS-managed certificate, the following arguments are supported: + +* `certificate_arn` - (Optional) ARN for an AWS-managed certificate. AWS Certificate Manager is the only supported source. Used when an edge-optimized domain name is desired. Conflicts with `certificate_name`, `certificate_body`, `certificate_chain`, `certificate_private_key`, `regional_certificate_arn`, and `regional_certificate_name`. +* `regional_certificate_arn` - (Optional) ARN for an AWS-managed certificate. AWS Certificate Manager is the only supported source. Used when a regional domain name is desired. Conflicts with `certificate_arn`, `certificate_name`, `certificate_body`, `certificate_chain`, and `certificate_private_key`. + +When uploading a certificate, the following arguments are supported: + +* `certificate_body` - (Optional) Certificate issued for the domain name being registered, in PEM format. Only valid for `EDGE` endpoint configuration type. Conflicts with `certificate_arn`, `regional_certificate_arn`, and `regional_certificate_name`. +* `certificate_chain` - (Optional) Certificate for the CA that issued the certificate, along with any intermediate CA certificates required to create an unbroken chain to a certificate trusted by the intended API clients. Only valid for `EDGE` endpoint configuration type. Conflicts with `certificate_arn`, `regional_certificate_arn`, and `regional_certificate_name`. +* `certificate_name` - (Optional) Unique name to use when registering this certificate as an IAM server certificate. Conflicts with `certificate_arn`, `regional_certificate_arn`, and `regional_certificate_name`. Required if `certificate_arn` is not set. +* `certificate_private_key` - (Optional) Private key associated with the domain certificate given in `certificate_body`. Only valid for `EDGE` endpoint configuration type. Conflicts with `certificate_arn`, `regional_certificate_arn`, and `regional_certificate_name`. +* `regional_certificate_name` - (Optional) User-friendly name of the certificate that will be used by regional endpoint for this domain name. Conflicts with `certificate_arn`, `certificate_name`, `certificate_body`, `certificate_chain`, and `certificate_private_key`. + +### endpoint_configuration + +* `types` - (Required) List of endpoint types. This resource currently only supports managing a single value. Valid values: `EDGE` or `REGIONAL`. If unspecified, defaults to `EDGE`. Must be declared as `REGIONAL` in non-Commercial partitions. Refer to the [documentation](https://docs.aws.amazon.com/apigateway/latest/developerguide/create-regional-api.html) for more information on the difference between edge-optimized and regional APIs. + +### mutual_tls_authentication + +* `truststore_uri` - (Required) Amazon S3 URL that specifies the truststore for mutual TLS authentication, for example, `s3://bucket-name/key-name`. The truststore can contain certificates from public or private certificate authorities. To update the truststore, upload a new version to S3, and then update your custom domain name to use the new version. +* `truststore_version` - (Optional) Version of the S3 object that contains the truststore. To specify a version, you must have versioning enabled for the S3 bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of domain name. +* `certificate_upload_date` - Upload date associated with the domain certificate. +* `cloudfront_domain_name` - Hostname created by Cloudfront to represent the distribution that implements this domain name mapping. +* `cloudfront_zone_id` - For convenience, the hosted zone ID (`Z2FDTNDATAQYW2`) that can be used to create a Route53 alias record for the distribution. +* `id` - Internal identifier assigned to this domain name by API Gateway. +* `regional_domain_name` - Hostname for the custom domain's regional endpoint. +* `regional_zone_id` - Hosted zone ID that can be used to create a Route53 alias record for the regional endpoint. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway domain names using their `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import API Gateway domain names using their `name`. For example: + +```console +% terraform import aws_api_gateway_domain_name.example dev.example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_gateway_response.markdown b/website/docs/cdktf/python/r/api_gateway_gateway_response.markdown new file mode 100644 index 00000000000..103379396d9 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_gateway_response.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_gateway_response" +description: |- + Provides an API Gateway Gateway Response for a REST API Gateway. +--- + + + +# Resource: aws_api_gateway_gateway_response + +Provides an API Gateway Gateway Response for a REST API Gateway. + +## Example Usage + +```terraform +resource "aws_api_gateway_rest_api" "main" { + name = "MyDemoAPI" +} + +resource "aws_api_gateway_gateway_response" "test" { + rest_api_id = aws_api_gateway_rest_api.main.id + status_code = "401" + response_type = "UNAUTHORIZED" + + response_templates = { + "application/json" = "{\"message\":$context.error.messageString}" + } + + response_parameters = { + "gatewayresponse.header.Authorization" = "'Basic'" + } +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `rest_api_id` - (Required) String identifier of the associated REST API. +* `response_type` - (Required) Response type of the associated GatewayResponse. +* `status_code` - (Optional) HTTP status code of the Gateway Response. +* `response_templates` - (Optional) Map of templates used to transform the response body. +* `response_parameters` - (Optional) Map of parameters (paths, query strings and headers) of the Gateway Response. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_gateway_response` using `REST-API-ID/RESPONSE-TYPE`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_gateway_response` using `REST-API-ID/RESPONSE-TYPE`. For example: + +```console +% terraform import aws_api_gateway_gateway_response.example 12345abcde/UNAUTHORIZED +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_integration.html.markdown b/website/docs/cdktf/python/r/api_gateway_integration.html.markdown new file mode 100644 index 00000000000..f279877abe2 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_integration.html.markdown @@ -0,0 +1,264 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_integration" +description: |- + Provides an HTTP Method Integration for an API Gateway Integration. +--- + + + +# Resource: aws_api_gateway_integration + +Provides an HTTP Method Integration for an API Gateway Integration. + +## Example Usage + +```terraform +resource "aws_api_gateway_rest_api" "MyDemoAPI" { + name = "MyDemoAPI" + description = "This is my API for demonstration purposes" +} + +resource "aws_api_gateway_resource" "MyDemoResource" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + parent_id = aws_api_gateway_rest_api.MyDemoAPI.root_resource_id + path_part = "mydemoresource" +} + +resource "aws_api_gateway_method" "MyDemoMethod" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.MyDemoResource.id + http_method = "GET" + authorization = "NONE" +} + +resource "aws_api_gateway_integration" "MyDemoIntegration" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.MyDemoResource.id + http_method = aws_api_gateway_method.MyDemoMethod.http_method + type = "MOCK" + cache_key_parameters = ["method.request.path.param"] + cache_namespace = "foobar" + timeout_milliseconds = 29000 + + request_parameters = { + "integration.request.header.X-Authorization" = "'static'" + } + + # Transforms the incoming XML request to JSON + request_templates = { + "application/xml" = < \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_integration_response.html.markdown b/website/docs/cdktf/python/r/api_gateway_integration_response.html.markdown new file mode 100644 index 00000000000..1b37d150b1c --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_integration_response.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_integration_response" +description: |- + Provides an HTTP Method Integration Response for an API Gateway Resource. +--- + + + +# Resource: aws_api_gateway_integration_response + +Provides an HTTP Method Integration Response for an API Gateway Resource. + +-> **Note:** Depends on having `aws_api_gateway_integration` inside your rest api. To ensure this +you might need to add an explicit `depends_on` for clean runs. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_integration import ApiGatewayIntegration +from imports.aws.api_gateway_integration_response import ApiGatewayIntegrationResponse +from imports.aws.api_gateway_method import ApiGatewayMethod +from imports.aws.api_gateway_method_response import ApiGatewayMethodResponse +from imports.aws.api_gateway_resource import ApiGatewayResource +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + my_demo_api = ApiGatewayRestApi(self, "MyDemoAPI", + description="This is my API for demonstration purposes", + name="MyDemoAPI" + ) + my_demo_resource = ApiGatewayResource(self, "MyDemoResource", + parent_id=my_demo_api.root_resource_id, + path_part="mydemoresource", + rest_api_id=my_demo_api.id + ) + my_demo_method = ApiGatewayMethod(self, "MyDemoMethod", + authorization="NONE", + http_method="GET", + resource_id=my_demo_resource.id, + rest_api_id=my_demo_api.id + ) + response200 = ApiGatewayMethodResponse(self, "response_200", + http_method=my_demo_method.http_method, + resource_id=my_demo_resource.id, + rest_api_id=my_demo_api.id, + status_code="200" + ) + ApiGatewayIntegration(self, "MyDemoIntegration", + http_method=my_demo_method.http_method, + resource_id=my_demo_resource.id, + rest_api_id=my_demo_api.id, + type="MOCK" + ) + ApiGatewayIntegrationResponse(self, "MyDemoIntegrationResponse", + http_method=my_demo_method.http_method, + resource_id=my_demo_resource.id, + response_templates={ + "application/xml": "#set($inputRoot = $input.path('$'))\n\n\n $inputRoot.body\n\n\n" + }, + rest_api_id=my_demo_api.id, + status_code=response200.status_code + ) +``` + +## Argument Reference + +The following arguments are required: + +* `http_method` - (Required) HTTP method (`GET`, `POST`, `PUT`, `DELETE`, `HEAD`, `OPTIONS`, `ANY`). +* `resource_id` - (Required) API resource ID. +* `rest_api_id` - (Required) ID of the associated REST API. +* `status_code` - (Required) HTTP status code. + +The following arguments are optional: + +* `content_handling` - (Optional) How to handle request payload content type conversions. Supported values are `CONVERT_TO_BINARY` and `CONVERT_TO_TEXT`. If this property is not defined, the response payload will be passed through from the integration response to the method response without modification. +* `response_parameters` - (Optional) Map of response parameters that can be read from the backend response. For example: `response_parameters = { "method.response.header.X-Some-Header" = "integration.response.header.X-Some-Other-Header" }`. +* `response_templates` - (Optional) Map of templates used to transform the integration response body. +* `selection_pattern` - (Optional) Regular expression pattern used to choose an integration response based on the response from the backend. Omit configuring this to make the integration the default one. If the backend is an `AWS` Lambda function, the AWS Lambda function error header is matched. For all other `HTTP` and `AWS` backends, the HTTP status code is matched. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_integration_response` using `REST-API-ID/RESOURCE-ID/HTTP-METHOD/STATUS-CODE`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_integration_response` using `REST-API-ID/RESOURCE-ID/HTTP-METHOD/STATUS-CODE`. For example: + +```console +% terraform import aws_api_gateway_integration_response.example 12345abcde/67890fghij/GET/200 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_method.html.markdown b/website/docs/cdktf/python/r/api_gateway_method.html.markdown new file mode 100644 index 00000000000..7450e4174c2 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_method.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_method" +description: |- + Provides a HTTP Method for an API Gateway Resource. +--- + + + +# Resource: aws_api_gateway_method + +Provides a HTTP Method for an API Gateway Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_method import ApiGatewayMethod +from imports.aws.api_gateway_resource import ApiGatewayResource +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + my_demo_api = ApiGatewayRestApi(self, "MyDemoAPI", + description="This is my API for demonstration purposes", + name="MyDemoAPI" + ) + my_demo_resource = ApiGatewayResource(self, "MyDemoResource", + parent_id=my_demo_api.root_resource_id, + path_part="mydemoresource", + rest_api_id=my_demo_api.id + ) + ApiGatewayMethod(self, "MyDemoMethod", + authorization="NONE", + http_method="GET", + resource_id=my_demo_resource.id, + rest_api_id=my_demo_api.id + ) +``` + +## Usage with Cognito User Pool Authorizer + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_authorizer import ApiGatewayAuthorizer +from imports.aws.api_gateway_method import ApiGatewayMethod +from imports.aws.api_gateway_resource import ApiGatewayResource +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +from imports.aws.data_aws_cognito_user_pools import DataAwsCognitoUserPools +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + cognito_user_pool_name = TerraformVariable(self, "cognito_user_pool_name") + this_var = ApiGatewayRestApi(self, "this", + name="with-authorizer" + ) + data_aws_cognito_user_pools_this = DataAwsCognitoUserPools(self, "this_2", + name=cognito_user_pool_name.string_value + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_cognito_user_pools_this.override_logical_id("this") + aws_api_gateway_authorizer_this = ApiGatewayAuthorizer(self, "this_3", + name="CognitoUserPoolAuthorizer", + provider_arns=Token.as_list(data_aws_cognito_user_pools_this.arns), + rest_api_id=this_var.id, + type="COGNITO_USER_POOLS" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_authorizer_this.override_logical_id("this") + aws_api_gateway_resource_this = ApiGatewayResource(self, "this_4", + parent_id=this_var.root_resource_id, + path_part="{proxy+}", + rest_api_id=this_var.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_resource_this.override_logical_id("this") + ApiGatewayMethod(self, "any", + authorization="COGNITO_USER_POOLS", + authorizer_id=Token.as_string(aws_api_gateway_authorizer_this.id), + http_method="ANY", + request_parameters={ + "method.request.path.proxy": True + }, + resource_id=Token.as_string(aws_api_gateway_resource_this.id), + rest_api_id=this_var.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `rest_api_id` - (Required) ID of the associated REST API +* `resource_id` - (Required) API resource ID +* `http_method` - (Required) HTTP Method (`GET`, `POST`, `PUT`, `DELETE`, `HEAD`, `OPTIONS`, `ANY`) +* `authorization` - (Required) Type of authorization used for the method (`NONE`, `CUSTOM`, `AWS_IAM`, `COGNITO_USER_POOLS`) +* `authorizer_id` - (Optional) Authorizer id to be used when the authorization is `CUSTOM` or `COGNITO_USER_POOLS` +* `authorization_scopes` - (Optional) Authorization scopes used when the authorization is `COGNITO_USER_POOLS` +* `api_key_required` - (Optional) Specify if the method requires an API key +* `operation_name` - (Optional) Function name that will be given to the method when generating an SDK through API Gateway. If omitted, API Gateway will generate a function name based on the resource path and HTTP verb. +* `request_models` - (Optional) Map of the API models used for the request's content type + where key is the content type (e.g., `application/json`) + and value is either `Error`, `Empty` (built-in models) or `aws_api_gateway_model`'s `name`. +* `request_validator_id` - (Optional) ID of a `aws_api_gateway_request_validator` +* `request_parameters` - (Optional) Map of request parameters (from the path, query string and headers) that should be passed to the integration. The boolean value indicates whether the parameter is required (`true`) or optional (`false`). + For example: `request_parameters = {"method.request.header.X-Some-Header" = true "method.request.querystring.some-query-param" = true}` would define that the header `X-Some-Header` and the query string `some-query-param` must be provided in the request. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_method` using `REST-API-ID/RESOURCE-ID/HTTP-METHOD`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_method` using `REST-API-ID/RESOURCE-ID/HTTP-METHOD`. For example: + +```console +% terraform import aws_api_gateway_method.example 12345abcde/67890fghij/GET +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_method_response.html.markdown b/website/docs/cdktf/python/r/api_gateway_method_response.html.markdown new file mode 100644 index 00000000000..db634c96b48 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_method_response.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_method_response" +description: |- + Provides an HTTP Method Response for an API Gateway Resource. +--- + + + +# Resource: aws_api_gateway_method_response + +Provides an HTTP Method Response for an API Gateway Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_integration import ApiGatewayIntegration +from imports.aws.api_gateway_method import ApiGatewayMethod +from imports.aws.api_gateway_method_response import ApiGatewayMethodResponse +from imports.aws.api_gateway_resource import ApiGatewayResource +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + my_demo_api = ApiGatewayRestApi(self, "MyDemoAPI", + description="This is my API for demonstration purposes", + name="MyDemoAPI" + ) + my_demo_resource = ApiGatewayResource(self, "MyDemoResource", + parent_id=my_demo_api.root_resource_id, + path_part="mydemoresource", + rest_api_id=my_demo_api.id + ) + my_demo_method = ApiGatewayMethod(self, "MyDemoMethod", + authorization="NONE", + http_method="GET", + resource_id=my_demo_resource.id, + rest_api_id=my_demo_api.id + ) + ApiGatewayMethodResponse(self, "response_200", + http_method=my_demo_method.http_method, + resource_id=my_demo_resource.id, + rest_api_id=my_demo_api.id, + status_code="200" + ) + ApiGatewayIntegration(self, "MyDemoIntegration", + http_method=my_demo_method.http_method, + resource_id=my_demo_resource.id, + rest_api_id=my_demo_api.id, + type="MOCK" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `rest_api_id` - (Required) ID of the associated REST API +* `resource_id` - (Required) API resource ID +* `http_method` - (Required) HTTP Method (`GET`, `POST`, `PUT`, `DELETE`, `HEAD`, `OPTIONS`, `ANY`) +* `status_code` - (Required) HTTP status code +* `response_models` - (Optional) Map of the API models used for the response's content type +* `response_parameters` - (Optional) Map of response parameters that can be sent to the caller. + For example: `response_parameters = { "method.response.header.X-Some-Header" = true }` + would define that the header `X-Some-Header` can be provided on the response. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_method_response` using `REST-API-ID/RESOURCE-ID/HTTP-METHOD/STATUS-CODE`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_method_response` using `REST-API-ID/RESOURCE-ID/HTTP-METHOD/STATUS-CODE`. For example: + +```console +% terraform import aws_api_gateway_method_response.example 12345abcde/67890fghij/GET/200 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_method_settings.html.markdown b/website/docs/cdktf/python/r/api_gateway_method_settings.html.markdown new file mode 100644 index 00000000000..2bc44d701fc --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_method_settings.html.markdown @@ -0,0 +1,242 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_method_settings" +description: |- + Manages API Gateway Stage Method Settings +--- + + + +# Resource: aws_api_gateway_method_settings + +Manages API Gateway Stage Method Settings. For example, CloudWatch logging and metrics. + +~> **NOTE:** We recommend using this resource in conjunction with the [`aws_api_gateway_stage` resource](api_gateway_stage.html) instead of a stage managed by the [`aws_api_gateway_deployment` resource](api_gateway_deployment.html) optional `stage_name` argument. Stages managed by the `aws_api_gateway_deployment` resource are recreated on redeployment and this resource will require a second apply to recreate the method settings. + +## Example Usage + +### End-to-end + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/api-gateway-rest-api-openapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +### Basic Usage + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} + +resource "aws_api_gateway_method_settings" "all" { + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = aws_api_gateway_stage.example.stage_name + method_path = "*/*" + + settings { + metrics_enabled = true + logging_level = "ERROR" + } +} + +resource "aws_api_gateway_method_settings" "path_specific" { + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = aws_api_gateway_stage.example.stage_name + method_path = "path1/GET" + + settings { + metrics_enabled = true + logging_level = "INFO" + } +} +``` + +### CloudWatch Logging and Tracing + +The AWS Console API Gateway Editor displays multiple options for CloudWatch Logs that don't directly map to the options in the AWS API and Terraform. These examples show the `settings` blocks that are equivalent to the options the AWS Console gives for CloudWatch Logs. + +#### Off + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_method_settings import ApiGatewayMethodSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApiGatewayMethodSettings(self, "path_specific", + method_path="path1/GET", + rest_api_id=example.id, + settings=ApiGatewayMethodSettingsSettings( + logging_level="OFF" + ), + stage_name=Token.as_string(aws_api_gateway_stage_example.stage_name) + ) +``` + +#### Errors Only + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_method_settings import ApiGatewayMethodSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApiGatewayMethodSettings(self, "path_specific", + method_path="path1/GET", + rest_api_id=example.id, + settings=ApiGatewayMethodSettingsSettings( + data_trace_enabled=False, + logging_level="ERROR", + metrics_enabled=True + ), + stage_name=Token.as_string(aws_api_gateway_stage_example.stage_name) + ) +``` + +#### Errors and Info Logs + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_method_settings import ApiGatewayMethodSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApiGatewayMethodSettings(self, "path_specific", + method_path="path1/GET", + rest_api_id=example.id, + settings=ApiGatewayMethodSettingsSettings( + data_trace_enabled=False, + logging_level="INFO", + metrics_enabled=True + ), + stage_name=Token.as_string(aws_api_gateway_stage_example.stage_name) + ) +``` + +#### Full Request and Response Logs + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_method_settings import ApiGatewayMethodSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApiGatewayMethodSettings(self, "path_specific", + method_path="path1/GET", + rest_api_id=example.id, + settings=ApiGatewayMethodSettingsSettings( + data_trace_enabled=True, + logging_level="INFO", + metrics_enabled=True + ), + stage_name=Token.as_string(aws_api_gateway_stage_example.stage_name) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `rest_api_id` - (Required) ID of the REST API +* `stage_name` - (Required) Name of the stage +* `method_path` - (Required) Method path defined as `{resource_path}/{http_method}` for an individual method override, or `*/*` for overriding all methods in the stage. Ensure to trim any leading forward slashes in the path (e.g., `trimprefix(aws_api_gateway_resource.example.path, "/")`). +* `settings` - (Required) Settings block, see below. + +### `settings` + +* `metrics_enabled` - (Optional) Whether Amazon CloudWatch metrics are enabled for this method. +* `logging_level` - (Optional) Logging level for this method, which effects the log entries pushed to Amazon CloudWatch Logs. The available levels are `OFF`, `ERROR`, and `INFO`. +* `data_trace_enabled` - (Optional) Whether data trace logging is enabled for this method, which effects the log entries pushed to Amazon CloudWatch Logs. +* `throttling_burst_limit` - (Optional) Throttling burst limit. Default: `-1` (throttling disabled). +* `throttling_rate_limit` - (Optional) Throttling rate limit. Default: `-1` (throttling disabled). +* `caching_enabled` - (Optional) Whether responses should be cached and returned for requests. A cache cluster must be enabled on the stage for responses to be cached. +* `cache_ttl_in_seconds` - (Optional) Time to live (TTL), in seconds, for cached responses. The higher the TTL, the longer the response will be cached. +* `cache_data_encrypted` - (Optional) Whether the cached responses are encrypted. +* `require_authorization_for_cache_control` - (Optional) Whether authorization is required for a cache invalidation request. +* `unauthorized_cache_control_header_strategy` - (Optional) How to handle unauthorized requests for cache invalidation. The available values are `FAIL_WITH_403`, `SUCCEED_WITH_RESPONSE_HEADER`, `SUCCEED_WITHOUT_RESPONSE_HEADER`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_method_settings` using `REST-API-ID/STAGE-NAME/METHOD-PATH`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_method_settings` using `REST-API-ID/STAGE-NAME/METHOD-PATH`. For example: + +```console +% terraform import aws_api_gateway_method_settings.example 12345abcde/example/test/GET +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_model.html.markdown b/website/docs/cdktf/python/r/api_gateway_model.html.markdown new file mode 100644 index 00000000000..4e1d73a54b7 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_model.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_model" +description: |- + Provides a Model for a REST API Gateway. +--- + + + +# Resource: aws_api_gateway_model + +Provides a Model for a REST API Gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_model import ApiGatewayModel +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + my_demo_api = ApiGatewayRestApi(self, "MyDemoAPI", + description="This is my API for demonstration purposes", + name="MyDemoAPI" + ) + ApiGatewayModel(self, "MyDemoModel", + content_type="application/json", + description="a JSON schema", + name="user", + rest_api_id=my_demo_api.id, + schema=Token.as_string( + Fn.jsonencode({ + "type": "object" + })) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `rest_api_id` - (Required) ID of the associated REST API +* `name` - (Required) Name of the model +* `description` - (Optional) Description of the model +* `content_type` - (Required) Content type of the model +* `schema` - (Required) Schema of the model in a JSON form + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the model + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_model` using `REST-API-ID/NAME`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_model` using `REST-API-ID/NAME`. For example: + +```console +% terraform import aws_api_gateway_model.example 12345abcde/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_request_validator.html.markdown b/website/docs/cdktf/python/r/api_gateway_request_validator.html.markdown new file mode 100644 index 00000000000..75a9dea1f64 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_request_validator.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_request_validator" +description: |- + Manages an API Gateway Request Validator. +--- + + + +# Resource: aws_api_gateway_request_validator + +Manages an API Gateway Request Validator. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_request_validator import ApiGatewayRequestValidator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApiGatewayRequestValidator(self, "example", + name="example", + rest_api_id=Token.as_string(aws_api_gateway_rest_api_example.id), + validate_request_body=True, + validate_request_parameters=True + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the request validator +* `rest_api_id` - (Required) ID of the associated Rest API +* `validate_request_body` - (Optional) Boolean whether to validate request body. Defaults to `false`. +* `validate_request_parameters` - (Optional) Boolean whether to validate request parameters. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the request validator + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_request_validator` using `REST-API-ID/REQUEST-VALIDATOR-ID`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_request_validator` using `REST-API-ID/REQUEST-VALIDATOR-ID`. For example: + +```console +% terraform import aws_api_gateway_request_validator.example 12345abcde/67890fghij +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_resource.html.markdown b/website/docs/cdktf/python/r/api_gateway_resource.html.markdown new file mode 100644 index 00000000000..7e97f03d411 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_resource.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_resource" +description: |- + Provides an API Gateway Resource. +--- + + + +# Resource: aws_api_gateway_resource + +Provides an API Gateway Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_resource import ApiGatewayResource +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + my_demo_api = ApiGatewayRestApi(self, "MyDemoAPI", + description="This is my API for demonstration purposes", + name="MyDemoAPI" + ) + ApiGatewayResource(self, "MyDemoResource", + parent_id=my_demo_api.root_resource_id, + path_part="mydemoresource", + rest_api_id=my_demo_api.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `rest_api_id` - (Required) ID of the associated REST API +* `parent_id` - (Required) ID of the parent API resource +* `path_part` - (Required) Last path segment of this API resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Resource's identifier. +* `path` - Complete path for this API resource, including all parent paths. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_resource` using `REST-API-ID/RESOURCE-ID`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_resource` using `REST-API-ID/RESOURCE-ID`. For example: + +```console +% terraform import aws_api_gateway_resource.example 12345abcde/67890fghij +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_rest_api.html.markdown b/website/docs/cdktf/python/r/api_gateway_rest_api.html.markdown new file mode 100644 index 00000000000..399201c43bc --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_rest_api.html.markdown @@ -0,0 +1,303 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_rest_api" +description: |- + Manages an API Gateway REST API. +--- + + + +# Resource: aws_api_gateway_rest_api + +Manages an API Gateway REST API. The REST API can be configured via [importing an OpenAPI specification](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-import-api.html) in the `body` argument (with other arguments serving as overrides) or via other Terraform resources to manage the resources ([`aws_api_gateway_resource` resource](api_gateway_resource.html)), methods ([`aws_api_gateway_method` resource](api_gateway_method.html)), integrations ([`aws_api_gateway_integration` resource](api_gateway_integration.html)), etc. of the REST API. Once the REST API is configured, the [`aws_api_gateway_deployment` resource](api_gateway_deployment.html) can be used along with the [`aws_api_gateway_stage` resource](api_gateway_stage.html) to publish the REST API. + +-> **Note:** Amazon API Gateway Version 1 resources are used for creating and deploying REST APIs. To create and deploy WebSocket and HTTP APIs, use Amazon API Gateway Version 2 [resources](/docs/providers/aws/r/apigatewayv2_api.html). + +!> **WARN:** When importing Open API Specifications with the `body` argument, by default the API Gateway REST API will be replaced with the Open API Specification thus removing any existing methods, resources, integrations, or endpoints. Endpoint mutations are asynchronous operations, and race conditions with DNS are possible. To overcome this limitation, use the `put_rest_api_mode` attribute and set it to `merge`. + +## Example Usage + +### OpenAPI Specification + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/api-gateway-rest-api-openapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" + + endpoint_configuration { + types = ["REGIONAL"] + } +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} +``` + +### OpenAPI Specification with Private Endpoints + +Using `put_rest_api_mode` = `merge` when importing the OpenAPI Specification, the AWS control plane will not delete all existing literal properties that are not explicitly set in the OpenAPI definition. Impacted API Gateway properties: ApiKeySourceType, BinaryMediaTypes, Description, EndpointConfiguration, MinimumCompressionSize, Name, Policy). + +```terraform +data "aws_availability_zones" "available" { + state = "available" + + filter { + name = "opt-in-status" + values = ["opt-in-not-required"] + } +} + +data "aws_region" "current" {} + +resource "aws_vpc" "example" { + cidr_block = "10.0.0.0/16" + enable_dns_support = true + enable_dns_hostnames = true +} + +resource "aws_default_security_group" "example" { + vpc_id = aws_vpc.example.id +} + +resource "aws_subnet" "example" { + availability_zone = data.aws_availability_zones.available.names[0] + cidr_block = cidrsubnet(aws_vpc.example.cidr_block, 8, 0) + vpc_id = aws_vpc.example.id +} + +resource "aws_vpc_endpoint" "example" { + count = 3 + + private_dns_enabled = false + security_group_ids = [aws_default_security_group.example.id] + service_name = "com.amazonaws.${data.aws_region.current.name}.execute-api" + subnet_ids = [aws_subnet.example.id] + vpc_endpoint_type = "Interface" + vpc_id = aws_vpc.example.id +} + +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" + put_rest_api_mode = "merge" + + endpoint_configuration { + types = ["PRIVATE"] + vpc_endpoint_ids = [aws_vpc_endpoint.example[0].id, aws_vpc_endpoint.example[1].id, aws_vpc_endpoint.example[2].id] + } +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} +``` + +### Terraform Resources + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_deployment import ApiGatewayDeployment +from imports.aws.api_gateway_integration import ApiGatewayIntegration +from imports.aws.api_gateway_method import ApiGatewayMethod +from imports.aws.api_gateway_resource import ApiGatewayResource +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +from imports.aws.api_gateway_stage import ApiGatewayStage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ApiGatewayRestApi(self, "example", + name="example" + ) + aws_api_gateway_resource_example = ApiGatewayResource(self, "example_1", + parent_id=example.root_resource_id, + path_part="example", + rest_api_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_resource_example.override_logical_id("example") + aws_api_gateway_method_example = ApiGatewayMethod(self, "example_2", + authorization="NONE", + http_method="GET", + resource_id=Token.as_string(aws_api_gateway_resource_example.id), + rest_api_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_method_example.override_logical_id("example") + aws_api_gateway_integration_example = ApiGatewayIntegration(self, "example_3", + http_method=Token.as_string(aws_api_gateway_method_example.http_method), + resource_id=Token.as_string(aws_api_gateway_resource_example.id), + rest_api_id=example.id, + type="MOCK" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_integration_example.override_logical_id("example") + aws_api_gateway_deployment_example = ApiGatewayDeployment(self, "example_4", + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + rest_api_id=example.id, + triggers={ + "redeployment": Token.as_string( + Fn.sha1( + Token.as_string( + Fn.jsonencode([aws_api_gateway_resource_example.id, aws_api_gateway_method_example.id, aws_api_gateway_integration_example.id + ])))) + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_deployment_example.override_logical_id("example") + aws_api_gateway_stage_example = ApiGatewayStage(self, "example_5", + deployment_id=Token.as_string(aws_api_gateway_deployment_example.id), + rest_api_id=example.id, + stage_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_stage_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_key_source` - (Optional) Source of the API key for requests. Valid values are `HEADER` (default) and `AUTHORIZER`. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`x-amazon-apigateway-api-key-source` extension](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-swagger-extensions-api-key-source.html). If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `binary_media_types` - (Optional) List of binary media types supported by the REST API. By default, the REST API supports only UTF-8-encoded text payloads. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`x-amazon-apigateway-binary-media-types` extension](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-swagger-extensions-binary-media-types.html). If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `body` - (Optional) OpenAPI specification that defines the set of routes and integrations to create as part of the REST API. This configuration, and any updates to it, will replace all REST API configuration except values overridden in this resource configuration and other resource updates applied after this resource but before any `aws_api_gateway_deployment` creation. More information about REST API OpenAPI support can be found in the [API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-import-api.html). +* `description` - (Optional) Description of the REST API. If importing an OpenAPI specification via the `body` argument, this corresponds to the `info.description` field. If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `disable_execute_api_endpoint` - (Optional) Whether clients can invoke your API by using the default execute-api endpoint. By default, clients can invoke your API with the default https://{api_id}.execute-api.{region}.amazonaws.com endpoint. To require that clients use a custom domain name to invoke your API, disable the default endpoint. Defaults to `false`. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`x-amazon-apigateway-endpoint-configuration` extension `disableExecuteApiEndpoint` property](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-swagger-extensions-endpoint-configuration.html). If the argument value is `true` and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `endpoint_configuration` - (Optional) Configuration block defining API endpoint configuration including endpoint type. Defined below. +* `minimum_compression_size` - (Optional) Minimum response size to compress for the REST API. String containing an integer value between `-1` and `10485760` (10MB). `-1` will disable an existing compression configuration, and all other values will enable compression with the configured size. New resources can simply omit this argument to disable compression, rather than setting the value to `-1`. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`x-amazon-apigateway-minimum-compression-size` extension](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-openapi-minimum-compression-size.html). If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `name` - (Required) Name of the REST API. If importing an OpenAPI specification via the `body` argument, this corresponds to the `info.title` field. If the argument value is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `fail_on_warnings` - (Optional) Whether warnings while API Gateway is creating or updating the resource should return an error or not. Defaults to `false` +* `parameters` - (Optional) Map of customizations for importing the specification in the `body` argument. For example, to exclude DocumentationParts from an imported API, set `ignore` equal to `documentation`. Additional documentation, including other parameters such as `basepath`, can be found in the [API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-import-api.html). +* `policy` - (Optional) JSON formatted policy document that controls access to the API Gateway. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). Terraform will only perform drift detection of its value when present in a configuration. We recommend using the [`aws_api_gateway_rest_api_policy` resource](/docs/providers/aws/r/api_gateway_rest_api_policy.html) instead. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`x-amazon-apigateway-policy` extension](https://docs.aws.amazon.com/apigateway/latest/developerguide/openapi-extensions-policy.html). If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `put_rest_api_mode` - (Optional) Mode of the PutRestApi operation when importing an OpenAPI specification via the `body` argument (create or update operation). Valid values are `merge` and `overwrite`. If unspecificed, defaults to `overwrite` (for backwards compatibility). This corresponds to the [`x-amazon-apigateway-put-integration-method` extension](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-swagger-extensions-put-integration-method.html). If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +**Note**: If the `body` argument is provided, the OpenAPI specification will be used to configure the resources, methods and integrations for the Rest API. If this argument is provided, the following resources should not be managed as separate ones, as updates may cause manual resource updates to be overwritten: + +* `aws_api_gateway_resource` +* `aws_api_gateway_method` +* `aws_api_gateway_method_response` +* `aws_api_gateway_method_settings` +* `aws_api_gateway_integration` +* `aws_api_gateway_integration_response` +* `aws_api_gateway_gateway_response` +* `aws_api_gateway_model` + +### endpoint_configuration + +* `types` - (Required) List of endpoint types. This resource currently only supports managing a single value. Valid values: `EDGE`, `REGIONAL` or `PRIVATE`. If unspecified, defaults to `EDGE`. If set to `PRIVATE` recommend to set `put_rest_api_mode` = `merge` to not cause the endpoints and associated Route53 records to be deleted. Refer to the [documentation](https://docs.aws.amazon.com/apigateway/latest/developerguide/create-regional-api.html) for more information on the difference between edge-optimized and regional APIs. +* `vpc_endpoint_ids` - (Optional) Set of VPC Endpoint identifiers. It is only supported for `PRIVATE` endpoint type. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`x-amazon-apigateway-endpoint-configuration` extension `vpcEndpointIds` property](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-swagger-extensions-endpoint-configuration.html). If the argument value is provided and is different than the OpenAPI value, **the argument value will override the OpenAPI value**. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN +* `created_date` - Creation date of the REST API +* `execution_arn` - Execution ARN part to be used in [`lambda_permission`](/docs/providers/aws/r/lambda_permission.html)'s `source_arn` + when allowing API Gateway to invoke a Lambda function, + e.g., `arn:aws:execute-api:eu-west-2:123456789012:z4675bid1j`, which can be concatenated with allowed stage, method and resource path. +* `id` - ID of the REST API +* `root_resource_id` - Resource ID of the REST API's root +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_rest_api` using the REST API ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_rest_api` using the REST API ID. For example: + +```console +% terraform import aws_api_gateway_rest_api.example 12345abcde +``` + +~> **NOTE:** Resource import does not currently support the `body` attribute. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_rest_api_policy.html.markdown b/website/docs/cdktf/python/r/api_gateway_rest_api_policy.html.markdown new file mode 100644 index 00000000000..a19fedafe06 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_rest_api_policy.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_rest_api_policy" +description: |- + Provides an API Gateway REST API Policy. +--- + + + +# Resource: aws_api_gateway_rest_api_policy + +Provides an API Gateway REST API Policy. + +-> **Note:** Amazon API Gateway Version 1 resources are used for creating and deploying REST APIs. To create and deploy WebSocket and HTTP APIs, use Amazon API Gateway Version 2 [resources](/docs/providers/aws/r/apigatewayv2_api.html). + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +from imports.aws.api_gateway_rest_api_policy import ApiGatewayRestApiPolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = ApiGatewayRestApi(self, "test", + name="example-rest-api" + ) + data_aws_iam_policy_document_test = DataAwsIamPolicyDocument(self, "test_1", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["execute-api:Invoke"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="IpAddress", + values=["123.123.123.123/32"], + variable="aws:SourceIp" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=[test.execution_arn] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_test.override_logical_id("test") + aws_api_gateway_rest_api_policy_test = ApiGatewayRestApiPolicy(self, "test_2", + policy=Token.as_string(data_aws_iam_policy_document_test.json), + rest_api_id=test.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_rest_api_policy_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `rest_api_id` - (Required) ID of the REST API. +* `policy` - (Required) JSON formatted policy document that controls access to the API Gateway. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the REST API + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_rest_api_policy` using the REST API ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_rest_api_policy` using the REST API ID. For example: + +```console +% terraform import aws_api_gateway_rest_api_policy.example 12345abcde +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_stage.html.markdown b/website/docs/cdktf/python/r/api_gateway_stage.html.markdown new file mode 100644 index 00000000000..30c1fddb15f --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_stage.html.markdown @@ -0,0 +1,182 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_stage" +description: |- + Manages an API Gateway Stage. +--- + + + +# Resource: aws_api_gateway_stage + +Manages an API Gateway Stage. A stage is a named reference to a deployment, which can be done via the [`aws_api_gateway_deployment` resource](api_gateway_deployment.html). Stages can be optionally managed further with the [`aws_api_gateway_base_path_mapping` resource](api_gateway_base_path_mapping.html), [`aws_api_gateway_domain_name` resource](api_gateway_domain_name.html), and [`aws_api_method_settings` resource](api_gateway_method_settings.html). For more information, see the [API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-stages.html). + +## Example Usage + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/api-gateway-rest-api-openapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} + +resource "aws_api_gateway_method_settings" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = aws_api_gateway_stage.example.stage_name + method_path = "*/*" + + settings { + metrics_enabled = true + logging_level = "INFO" + } +} +``` + +### Managing the API Logging CloudWatch Log Group + +API Gateway provides the ability to [enable CloudWatch API logging](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-logging.html). To manage the CloudWatch Log Group when this feature is enabled, the [`aws_cloudwatch_log_group` resource](/docs/providers/aws/r/cloudwatch_log_group.html) can be used where the name matches the API Gateway naming convention. If the CloudWatch Log Group previously exists, import the [`aws_cloudwatch_log_group` resource into Terraform](/docs/providers/aws/r/cloudwatch_log_group.html#import) as a one time operation. You can recreate the environment without import. + +-> The below configuration uses [`depends_on`](https://www.terraform.io/language/meta-arguments/depends_on) to prevent ordering issues with API Gateway automatically creating the log group first and a variable for naming consistency. Other ordering and naming methodologies may be more appropriate for your environment. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +from imports.aws.api_gateway_stage import ApiGatewayStage +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, deploymentId, restApiId): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + stage_name = TerraformVariable(self, "stage_name", + default="example", + type=VariableType.STRING + ) + example = ApiGatewayRestApi(self, "example", + name=name + ) + aws_cloudwatch_log_group_example = CloudwatchLogGroup(self, "example_2", + name="API-Gateway-Execution-Logs_${" + example.id + "}/${" + stage_name.value + "}", + retention_in_days=7 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_group_example.override_logical_id("example") + aws_api_gateway_stage_example = ApiGatewayStage(self, "example_3", + depends_on=[aws_cloudwatch_log_group_example], + stage_name=stage_name.string_value, + deployment_id=deployment_id, + rest_api_id=rest_api_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_stage_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `rest_api_id` - (Required) ID of the associated REST API +* `stage_name` - (Required) Name of the stage +* `deployment_id` - (Required) ID of the deployment that the stage points to +* `access_log_settings` - (Optional) Enables access logs for the API stage. See [Access Log Settings](#access-log-settings) below. +* `cache_cluster_enabled` - (Optional) Whether a cache cluster is enabled for the stage +* `cache_cluster_size` - (Optional) Size of the cache cluster for the stage, if enabled. Allowed values include `0.5`, `1.6`, `6.1`, `13.5`, `28.4`, `58.2`, `118` and `237`. +* `canary_settings` - (Optional) Configuration settings of a canary deployment. See [Canary Settings](#canary-settings) below. +* `client_certificate_id` - (Optional) Identifier of a client certificate for the stage. +* `description` - (Optional) Description of the stage. +* `documentation_version` - (Optional) Version of the associated API documentation +* `variables` - (Optional) Map that defines the stage variables +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `xray_tracing_enabled` - (Optional) Whether active tracing with X-ray is enabled. Defaults to `false`. + +### Access Log Settings + +* `destination_arn` - (Required) ARN of the CloudWatch Logs log group or Kinesis Data Firehose delivery stream to receive access logs. If you specify a Kinesis Data Firehose delivery stream, the stream name must begin with `amazon-apigateway-`. Automatically removes trailing `:*` if present. +* `format` - (Required) Formatting and values recorded in the logs. +For more information on configuring the log format rules visit the AWS [documentation](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-logging.html) + +### Canary Settings + +* `percent_traffic` - (Optional) Percent `0.0` - `100.0` of traffic to divert to the canary deployment. +* `stage_variable_overrides` - (Optional) Map of overridden stage `variables` (including new variables) for the canary deployment. +* `use_stage_cache` - (Optional) Whether the canary deployment uses the stage cache. Defaults to false. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN +* `id` - ID of the stage +* `invoke_url` - URL to invoke the API pointing to the stage, + e.g., `https://z4675bid1j.execute-api.eu-west-2.amazonaws.com/prod` +* `execution_arn` - Execution ARN to be used in [`lambda_permission`](/docs/providers/aws/r/lambda_permission.html)'s `source_arn` + when allowing API Gateway to invoke a Lambda function, + e.g., `arn:aws:execute-api:eu-west-2:123456789012:z4675bid1j/prod` +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `web_acl_arn` - ARN of the WebAcl associated with the Stage. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_api_gateway_stage` using `REST-API-ID/STAGE-NAME`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_api_gateway_stage` using `REST-API-ID/STAGE-NAME`. For example: + +```console +% terraform import aws_api_gateway_stage.example 12345abcde/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_usage_plan.html.markdown b/website/docs/cdktf/python/r/api_gateway_usage_plan.html.markdown new file mode 100644 index 00000000000..539a8e62388 --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_usage_plan.html.markdown @@ -0,0 +1,164 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_usage_plan" +description: |- + Provides an API Gateway Usage Plan. +--- + + + +# Resource: aws_api_gateway_usage_plan + +Provides an API Gateway Usage Plan. + +## Example Usage + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "development" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "development" +} + +resource "aws_api_gateway_stage" "production" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "production" +} + +resource "aws_api_gateway_usage_plan" "example" { + name = "my-usage-plan" + description = "my description" + product_code = "MYCODE" + + api_stages { + api_id = aws_api_gateway_rest_api.example.id + stage = aws_api_gateway_stage.development.stage_name + } + + api_stages { + api_id = aws_api_gateway_rest_api.example.id + stage = aws_api_gateway_stage.production.stage_name + } + + quota_settings { + limit = 20 + offset = 2 + period = "WEEK" + } + + throttle_settings { + burst_limit = 5 + rate_limit = 10 + } +} +``` + +## Argument Reference + +The API Gateway Usage Plan argument layout is a structure composed of several sub-resources - these resources are laid out below. + +### Top-Level Arguments + +* `name` - (Required) Name of the usage plan. +* `description` - (Optional) Description of a usage plan. +* `api_stages` - (Optional) Associated [API stages](#api-stages-arguments) of the usage plan. +* `quota_settings` - (Optional) The [quota settings](#quota-settings-arguments) of the usage plan. +* `throttle_settings` - (Optional) The [throttling limits](#throttling-settings-arguments) of the usage plan. +* `product_code` - (Optional) AWS Marketplace product identifier to associate with the usage plan as a SaaS product on AWS Marketplace. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +#### Api Stages arguments + +* `api_id` (Required) - API Id of the associated API stage in a usage plan. +* `stage` (Required) - API stage name of the associated API stage in a usage plan. +* `throttle` - (Optional) The [throttling limits](#throttle) of the usage plan. + +##### Throttle + +* `path` (Required) - Method to apply the throttle settings for. Specfiy the path and method, for example `/test/GET`. +* `burst_limit` (Optional) - The API request burst limit, the maximum rate limit over a time ranging from one to a few seconds, depending upon whether the underlying token bucket is at its full capacity. +* `rate_limit` (Optional) - The API request steady-state rate limit. + +#### Quota Settings Arguments + +* `limit` (Optional) - Maximum number of requests that can be made in a given time period. +* `offset` (Optional) - Number of requests subtracted from the given limit in the initial time period. +* `period` (Optional) - Time period in which the limit applies. Valid values are "DAY", "WEEK" or "MONTH". + +#### Throttling Settings Arguments + +* `burst_limit` (Optional) - The API request burst limit, the maximum rate limit over a time ranging from one to a few seconds, depending upon whether the underlying token bucket is at its full capacity. +* `rate_limit` (Optional) - The API request steady-state rate limit. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the API resource +* `name` - Name of the usage plan. +* `description` - Description of a usage plan. +* `api_stages` - Associated API stages of the usage plan. +* `quota_settings` - Quota of the usage plan. +* `throttle_settings` - Throttling limits of the usage plan. +* `product_code` - AWS Marketplace product identifier to associate with the usage plan as a SaaS product on AWS Marketplace. +* `arn` - ARN +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS API Gateway Usage Plan using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS API Gateway Usage Plan using the `id`. For example: + +```console +% terraform import aws_api_gateway_usage_plan.myusageplan +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_usage_plan_key.html.markdown b/website/docs/cdktf/python/r/api_gateway_usage_plan_key.html.markdown new file mode 100644 index 00000000000..4d749fa4d6a --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_usage_plan_key.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_usage_plan_key" +description: |- + Provides an API Gateway Usage Plan Key. +--- + + + +# Resource: aws_api_gateway_usage_plan_key + +Provides an API Gateway Usage Plan Key. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_api_key import ApiGatewayApiKey +from imports.aws.api_gateway_rest_api import ApiGatewayRestApi +from imports.aws.api_gateway_usage_plan import ApiGatewayUsagePlan +from imports.aws.api_gateway_usage_plan_key import ApiGatewayUsagePlanKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + mykey = ApiGatewayApiKey(self, "mykey", + name="my_key" + ) + test = ApiGatewayRestApi(self, "test", + name="MyDemoAPI" + ) + myusageplan = ApiGatewayUsagePlan(self, "myusageplan", + api_stages=[ApiGatewayUsagePlanApiStages( + api_id=test.id, + stage=foo.stage_name + ) + ], + name="my_usage_plan" + ) + ApiGatewayUsagePlanKey(self, "main", + key_id=mykey.id, + key_type="API_KEY", + usage_plan_id=myusageplan.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `key_id` - (Required) Identifier of the API key resource. +* `key_type` - (Required) Type of the API key resource. Currently, the valid key type is API_KEY. +* `usage_plan_id` - (Required) Id of the usage plan resource representing to associate the key to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of a usage plan key. +* `key_id` - Identifier of the API gateway key resource. +* `key_type` - Type of a usage plan key. Currently, the valid key type is API_KEY. +* `usage_plan_id` - ID of the API resource +* `name` - Name of a usage plan key. +* `value` - Value of a usage plan key. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS API Gateway Usage Plan Key using the `USAGE-PLAN-ID/USAGE-PLAN-KEY-ID`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS API Gateway Usage Plan Key using the `USAGE-PLAN-ID/USAGE-PLAN-KEY-ID`. For example: + +```console +% terraform import aws_api_gateway_usage_plan_key.key 12345abcde/zzz +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/api_gateway_vpc_link.html.markdown b/website/docs/cdktf/python/r/api_gateway_vpc_link.html.markdown new file mode 100644 index 00000000000..310c62c93ff --- /dev/null +++ b/website/docs/cdktf/python/r/api_gateway_vpc_link.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_vpc_link" +description: |- + Provides an API Gateway VPC Link. +--- + + + +# Resource: aws_api_gateway_vpc_link + +Provides an API Gateway VPC Link. + +-> **Note:** Amazon API Gateway Version 1 VPC Links enable private integrations that connect REST APIs to private resources in a VPC. +To enable private integration for HTTP APIs, use the Amazon API Gateway Version 2 VPC Link [resource](/docs/providers/aws/r/apigatewayv2_vpc_link.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_vpc_link import ApiGatewayVpcLink +from imports.aws.lb import Lb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Lb(self, "example", + internal=True, + load_balancer_type="network", + name="example", + subnet_mapping=[LbSubnetMapping( + subnet_id="12345" + ) + ] + ) + aws_api_gateway_vpc_link_example = ApiGatewayVpcLink(self, "example_1", + description="example description", + name="example", + target_arns=[example.arn] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_vpc_link_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name used to label and identify the VPC link. +* `description` - (Optional) Description of the VPC link. +* `target_arns` - (Required, ForceNew) List of network load balancer arns in the VPC targeted by the VPC link. Currently AWS only supports 1 target. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the VpcLink. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway VPC Link using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import API Gateway VPC Link using the `id`. For example: + +```console +% terraform import aws_api_gateway_vpc_link.example 12345abcde +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_api.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_api.html.markdown new file mode 100644 index 00000000000..1f55e232e5e --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_api.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_api" +description: |- + Manages an Amazon API Gateway Version 2 API. +--- + + + +# Resource: aws_apigatewayv2_api + +Manages an Amazon API Gateway Version 2 API. + +-> **Note:** Amazon API Gateway Version 2 resources are used for creating and deploying WebSocket and HTTP APIs. To create and deploy REST APIs, use Amazon API Gateway Version 1 [resources](/docs/providers/aws/r/api_gateway_rest_api.html). + +## Example Usage + +### Basic WebSocket API + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_api import Apigatewayv2Api +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2Api(self, "example", + name="example-websocket-api", + protocol_type="WEBSOCKET", + route_selection_expression="$request.body.action" + ) +``` + +### Basic HTTP API + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_api import Apigatewayv2Api +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2Api(self, "example", + name="example-http-api", + protocol_type="HTTP" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the API. Must be less than or equal to 128 characters in length. +* `protocol_type` - (Required) API protocol. Valid values: `HTTP`, `WEBSOCKET`. +* `api_key_selection_expression` - (Optional) An [API key selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-apikey-selection-expressions). +Valid values: `$context.authorizer.usageIdentifierKey`, `$request.header.x-api-key`. Defaults to `$request.header.x-api-key`. +Applicable for WebSocket APIs. +* `cors_configuration` - (Optional) Cross-origin resource sharing (CORS) [configuration](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-cors.html). Applicable for HTTP APIs. +* `credentials_arn` - (Optional) Part of _quick create_. Specifies any credentials required for the integration. Applicable for HTTP APIs. +* `description` - (Optional) Description of the API. Must be less than or equal to 1024 characters in length. +* `disable_execute_api_endpoint` - (Optional) Whether clients can invoke the API by using the default `execute-api` endpoint. +By default, clients can invoke the API with the default `{api_id}.execute-api.{region}.amazonaws.com endpoint`. +To require that clients use a custom domain name to invoke the API, disable the default endpoint. +* `route_key` - (Optional) Part of _quick create_. Specifies any [route key](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-routes.html). Applicable for HTTP APIs. +* `route_selection_expression` - (Optional) The [route selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-route-selection-expressions) for the API. +Defaults to `$request.method $request.path`. +* `tags` - (Optional) Map of tags to assign to the API. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `target` - (Optional) Part of _quick create_. Quick create produces an API with an integration, a default catch-all route, and a default stage which is configured to automatically deploy changes. +For HTTP integrations, specify a fully qualified URL. For Lambda integrations, specify a function ARN. +The type of the integration will be `HTTP_PROXY` or `AWS_PROXY`, respectively. Applicable for HTTP APIs. +* `body` - (Optional) An OpenAPI specification that defines the set of routes and integrations to create as part of the HTTP APIs. Supported only for HTTP APIs. +* `version` - (Optional) Version identifier for the API. Must be between 1 and 64 characters in length. +* `fail_on_warnings` - (Optional) Whether warnings should return an error while API Gateway is creating or updating the resource using an OpenAPI specification. Defaults to `false`. Applicable for HTTP APIs. + +__Note__: If the `body` argument is provided, the OpenAPI specification will be used to configure the integrations and route for the HTTP API. If this argument is provided, the following resources should not be managed as separate ones, as updates may cause manual resource updates to be overwritten: + +* `aws_apigatewayv2_integration` +* `aws_apigatewayv2_route` + +Further more, the `name`, `description`, `cors_configuration`, `tags` and `version` fields should be specified in the Terraform configuration and the values will override any values specified in the OpenAPI document. + +The `cors_configuration` object supports the following: + +* `allow_credentials` - (Optional) Whether credentials are included in the CORS request. +* `allow_headers` - (Optional) Set of allowed HTTP headers. +* `allow_methods` - (Optional) Set of allowed HTTP methods. +* `allow_origins` - (Optional) Set of allowed origins. +* `expose_headers` - (Optional) Set of exposed HTTP headers. +* `max_age` - (Optional) Number of seconds that the browser should cache preflight request results. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - API identifier. +* `api_endpoint` - URI of the API, of the form `https://{api-id}.execute-api.{region}.amazonaws.com` for HTTP APIs and `wss://{api-id}.execute-api.{region}.amazonaws.com` for WebSocket APIs. +* `arn` - ARN of the API. +* `execution_arn` - ARN prefix to be used in an [`aws_lambda_permission`](/docs/providers/aws/r/lambda_permission.html)'s `source_arn` attribute +or in an [`aws_iam_policy`](/docs/providers/aws/r/iam_policy.html) to authorize access to the [`@connections` API](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-how-to-call-websocket-api-connections.html). +See the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-control-access-iam.html) for details. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_api` using the API identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_api` using the API identifier. For example: + +```console +% terraform import aws_apigatewayv2_api.example aabbccddee +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_api_mapping.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_api_mapping.html.markdown new file mode 100644 index 00000000000..84bf8da1763 --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_api_mapping.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_api_mapping" +description: |- + Manages an Amazon API Gateway Version 2 API mapping. +--- + + + +# Resource: aws_apigatewayv2_api_mapping + +Manages an Amazon API Gateway Version 2 API mapping. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-custom-domains.html). + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_api_mapping import Apigatewayv2ApiMapping +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2ApiMapping(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + domain_name=Token.as_string(aws_apigatewayv2_domain_name_example.id), + stage=Token.as_string(aws_apigatewayv2_stage_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) API identifier. +* `domain_name` - (Required) Domain name. Use the [`aws_apigatewayv2_domain_name`](/docs/providers/aws/r/apigatewayv2_domain_name.html) resource to configure a domain name. +* `stage` - (Required) API stage. Use the [`aws_apigatewayv2_stage`](/docs/providers/aws/r/apigatewayv2_stage.html) resource to configure an API stage. +* `api_mapping_key` - (Optional) The API mapping key. Refer to [REST API](https://docs.aws.amazon.com/apigateway/latest/developerguide/rest-api-mappings.html), [HTTP API](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-mappings.html) or [WebSocket API](https://docs.aws.amazon.com/apigateway/latest/developerguide/websocket-api-mappings.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - API mapping identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_api_mapping` using the API mapping identifier and domain name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_api_mapping` using the API mapping identifier and domain name. For example: + +```console +% terraform import aws_apigatewayv2_api_mapping.example 1122334/ws-api.example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_authorizer.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_authorizer.html.markdown new file mode 100644 index 00000000000..67146675d65 --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_authorizer.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_authorizer" +description: |- + Manages an Amazon API Gateway Version 2 authorizer. +--- + + + +# Resource: aws_apigatewayv2_authorizer + +Manages an Amazon API Gateway Version 2 authorizer. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +## Example Usage + +### Basic WebSocket API + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_authorizer import Apigatewayv2Authorizer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2Authorizer(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + authorizer_type="REQUEST", + authorizer_uri=Token.as_string(aws_lambda_function_example.invoke_arn), + identity_sources=["route.request.header.Auth"], + name="example-authorizer" + ) +``` + +### Basic HTTP API + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_authorizer import Apigatewayv2Authorizer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2Authorizer(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + authorizer_payload_format_version="2.0", + authorizer_type="REQUEST", + authorizer_uri=Token.as_string(aws_lambda_function_example.invoke_arn), + identity_sources=["$request.header.Authorization"], + name="example-authorizer" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) API identifier. +* `authorizer_type` - (Required) Authorizer type. Valid values: `JWT`, `REQUEST`. +Specify `REQUEST` for a Lambda function using incoming request parameters. +For HTTP APIs, specify `JWT` to use JSON Web Tokens. +* `name` - (Required) Name of the authorizer. Must be between 1 and 128 characters in length. +* `authorizer_credentials_arn` - (Optional) Required credentials as an IAM role for API Gateway to invoke the authorizer. +Supported only for `REQUEST` authorizers. +* `authorizer_payload_format_version` - (Optional) Format of the payload sent to an HTTP API Lambda authorizer. Required for HTTP API Lambda authorizers. +Valid values: `1.0`, `2.0`. +* `authorizer_result_ttl_in_seconds` - (Optional) Time to live (TTL) for cached authorizer results, in seconds. If it equals 0, authorization caching is disabled. +If it is greater than 0, API Gateway caches authorizer responses. The maximum value is 3600, or 1 hour. Defaults to `300`. +Supported only for HTTP API Lambda authorizers. +* `authorizer_uri` - (Optional) Authorizer's Uniform Resource Identifier (URI). +For `REQUEST` authorizers this must be a well-formed Lambda function URI, such as the `invoke_arn` attribute of the [`aws_lambda_function`](/docs/providers/aws/r/lambda_function.html) resource. +Supported only for `REQUEST` authorizers. Must be between 1 and 2048 characters in length. +* `enable_simple_responses` - (Optional) Whether a Lambda authorizer returns a response in a simple format. If enabled, the Lambda authorizer can return a boolean value instead of an IAM policy. +Supported only for HTTP APIs. +* `identity_sources` - (Optional) Identity sources for which authorization is requested. +For `REQUEST` authorizers the value is a list of one or more mapping expressions of the specified request parameters. +For `JWT` authorizers the single entry specifies where to extract the JSON Web Token (JWT) from inbound requests. +* `jwt_configuration` - (Optional) Configuration of a JWT authorizer. Required for the `JWT` authorizer type. +Supported only for HTTP APIs. + +The `jwt_configuration` object supports the following: + +* `audience` - (Optional) List of the intended recipients of the JWT. A valid JWT must provide an aud that matches at least one entry in this list. +* `issuer` - (Optional) Base domain of the identity provider that issues JSON Web Tokens, such as the `endpoint` attribute of the [`aws_cognito_user_pool`](/docs/providers/aws/r/cognito_user_pool.html) resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Authorizer identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_authorizer` using the API identifier and authorizer identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_authorizer` using the API identifier and authorizer identifier. For example: + +```console +% terraform import aws_apigatewayv2_authorizer.example aabbccddee/1122334 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_deployment.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_deployment.html.markdown new file mode 100644 index 00000000000..a943a58c03c --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_deployment.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_deployment" +description: |- + Manages an Amazon API Gateway Version 2 deployment. +--- + + + +# Resource: aws_apigatewayv2_deployment + +Manages an Amazon API Gateway Version 2 deployment. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +-> **Note:** Creating a deployment for an API requires at least one `aws_apigatewayv2_route` resource associated with that API. To avoid race conditions when all resources are being created together, you need to add implicit resource references via the `triggers` argument or explicit resource references using the [resource `depends_on` meta-argument](https://www.terraform.io/docs/configuration/meta-arguments/depends_on.html). + +-> Enable the [resource `lifecycle` configuration block `create_before_destroy` argument](https://www.terraform.io/language/meta-arguments/lifecycle#create_before_destroy) in this resource configuration to properly order redeployments in Terraform. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_deployment import Apigatewayv2Deployment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2Deployment(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + description="Example deployment", + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ) + ) +``` + +### Redeployment Triggers + +-> **NOTE:** This is an optional and Terraform 0.12 (or later) advanced configuration that shows calculating a hash of the API's Terraform resources to determine changes that should trigger a new deployment. This value will change after the first Terraform apply of new resources, triggering an immediate redeployment, however it will stabilize afterwards except for resource changes. The `triggers` map can also be configured in other, more complex ways to fit the environment, avoiding the immediate redeployment issue. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_deployment import Apigatewayv2Deployment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2Deployment(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + description="Example deployment", + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + triggers={ + "redeployment": Token.as_string( + Fn.sha1( + Token.as_string( + Fn.join(",", + Token.as_list( + Fn.tolist([ + Fn.jsonencode(aws_apigatewayv2_integration_example), + Fn.jsonencode(aws_apigatewayv2_route_example) + ])))))) + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) API identifier. +* `description` - (Optional) Description for the deployment resource. Must be less than or equal to 1024 characters in length. +* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger a redeployment. To force a redeployment without changing these keys/values, use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Deployment identifier. +* `auto_deployed` - Whether the deployment was automatically released. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_deployment` using the API identifier and deployment identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_deployment` using the API identifier and deployment identifier. For example: + +```console +% terraform import aws_apigatewayv2_deployment.example aabbccddee/1122334 +``` + +The `triggers` argument cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_domain_name.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_domain_name.html.markdown new file mode 100644 index 00000000000..9bf340a6788 --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_domain_name.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_domain_name" +description: |- + Manages an Amazon API Gateway Version 2 domain name. +--- + + + +# Resource: aws_apigatewayv2_domain_name + +Manages an Amazon API Gateway Version 2 domain name. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-custom-domains.html). + +-> **Note:** This resource establishes ownership of and the TLS settings for +a particular domain name. An API stage can be associated with the domain name using the `aws_apigatewayv2_api_mapping` resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_domain_name import Apigatewayv2DomainName +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2DomainName(self, "example", + domain_name="ws-api.example.com", + domain_name_configuration=Apigatewayv2DomainNameDomainNameConfiguration( + certificate_arn=Token.as_string(aws_acm_certificate_example.arn), + endpoint_type="REGIONAL", + security_policy="TLS_1_2" + ) + ) +``` + +### Associated Route 53 Resource Record + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_domain_name import Apigatewayv2DomainName +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Apigatewayv2DomainName(self, "example", + domain_name="http-api.example.com", + domain_name_configuration=Apigatewayv2DomainNameDomainNameConfiguration( + certificate_arn=Token.as_string(aws_acm_certificate_example.arn), + endpoint_type="REGIONAL", + security_policy="TLS_1_2" + ) + ) + aws_route53_record_example = Route53Record(self, "example_1", + alias=Route53RecordAlias( + evaluate_target_health=False, + name=Token.as_string( + property_access(example.domain_name_configuration, ["0", "target_domain_name" + ])), + zone_id=Token.as_string( + property_access(example.domain_name_configuration, ["0", "hosted_zone_id" + ])) + ), + name=example.domain_name, + type="A", + zone_id=Token.as_string(aws_route53_zone_example.zone_id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_record_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain_name` - (Required) Domain name. Must be between 1 and 512 characters in length. +* `domain_name_configuration` - (Required) Domain name configuration. See below. +* `mutual_tls_authentication` - (Optional) Mutual TLS authentication configuration for the domain name. +* `tags` - (Optional) Map of tags to assign to the domain name. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `domain_name_configuration` + +* `certificate_arn` - (Required) ARN of an AWS-managed certificate that will be used by the endpoint for the domain name. AWS Certificate Manager is the only supported source. Use the [`aws_acm_certificate`](/docs/providers/aws/r/acm_certificate.html) resource to configure an ACM certificate. +* `endpoint_type` - (Required) Endpoint type. Valid values: `REGIONAL`. +* `hosted_zone_id` - (Computed) Amazon Route 53 Hosted Zone ID of the endpoint. +* `ownership_verification_certificate_arn` - (Optional) ARN of the AWS-issued certificate used to validate custom domain ownership (when `certificate_arn` is issued via an ACM Private CA or `mutual_tls_authentication` is configured with an ACM-imported certificate.) +* `security_policy` - (Required) Transport Layer Security (TLS) version of the [security policy](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-custom-domain-tls-version.html) for the domain name. Valid values: `TLS_1_2`. +* `target_domain_name` - (Computed) Target domain name. + +### `mutual_tls_authentication` + +* `truststore_uri` - (Required) Amazon S3 URL that specifies the truststore for mutual TLS authentication, for example, `s3://bucket-name/key-name`. The truststore can contain certificates from public or private certificate authorities. To update the truststore, upload a new version to S3, and then update your custom domain name to use the new version. +* `truststore_version` - (Optional) Version of the S3 object that contains the truststore. To specify a version, you must have versioning enabled for the S3 bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `api_mapping_selection_expression` - [API mapping selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-mapping-selection-expressions) for the domain name. +* `arn` - ARN of the domain name. +* `id` - Domain name identifier. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `60m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_domain_name` using the domain name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_domain_name` using the domain name. For example: + +```console +% terraform import aws_apigatewayv2_domain_name.example ws-api.example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_integration.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_integration.html.markdown new file mode 100644 index 00000000000..3782e6b4d35 --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_integration.html.markdown @@ -0,0 +1,215 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_integration" +description: |- + Manages an Amazon API Gateway Version 2 integration. +--- + + + +# Resource: aws_apigatewayv2_integration + +Manages an Amazon API Gateway Version 2 integration. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_integration import Apigatewayv2Integration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2Integration(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + integration_type="MOCK" + ) +``` + +### Lambda Integration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_integration import Apigatewayv2Integration +from imports.aws.lambda_function import LambdaFunction +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = LambdaFunction(self, "example", + filename="example.zip", + function_name="Example", + handler="index.handler", + role=Token.as_string(aws_iam_role_example.arn), + runtime="nodejs16.x" + ) + aws_apigatewayv2_integration_example = Apigatewayv2Integration(self, "example_1", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + connection_type="INTERNET", + content_handling_strategy="CONVERT_TO_TEXT", + description="Lambda example", + integration_method="POST", + integration_type="AWS_PROXY", + integration_uri=example.invoke_arn, + passthrough_behavior="WHEN_NO_MATCH" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_apigatewayv2_integration_example.override_logical_id("example") +``` + +### AWS Service Integration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_integration import Apigatewayv2Integration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2Integration(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + credentials_arn=Token.as_string(aws_iam_role_example.arn), + description="SQS example", + integration_subtype="SQS-SendMessage", + integration_type="AWS_PROXY", + request_parameters={ + "MessageBody": "$request.body.message", + "QueueUrl": "$request.header.queueUrl" + } + ) +``` + +### Private Integration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_integration import Apigatewayv2Integration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2Integration(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + connection_id=Token.as_string(aws_apigatewayv2_vpc_link_example.id), + connection_type="VPC_LINK", + credentials_arn=Token.as_string(aws_iam_role_example.arn), + description="Example with a load balancer", + integration_method="ANY", + integration_type="HTTP_PROXY", + integration_uri=Token.as_string(aws_lb_listener_example.arn), + request_parameters={ + "append:header.authforintegration": "$context.authorizer.authorizerResponse", + "overwrite:path": "staticValueForIntegration" + }, + response_parameters=[Apigatewayv2IntegrationResponseParameters( + mappings={ + "append:header.auth": "$context.authorizer.authorizerResponse" + }, + status_code=Token.as_string(403) + ), Apigatewayv2IntegrationResponseParameters( + mappings={ + "overwrite:statuscode": "204" + }, + status_code=Token.as_string(200) + ) + ], + tls_config=Apigatewayv2IntegrationTlsConfig( + server_name_to_verify="example.com" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) API identifier. +* `integration_type` - (Required) Integration type of an integration. +Valid values: `AWS` (supported only for WebSocket APIs), `AWS_PROXY`, `HTTP` (supported only for WebSocket APIs), `HTTP_PROXY`, `MOCK` (supported only for WebSocket APIs). For an HTTP API private integration, use `HTTP_PROXY`. +* `connection_id` - (Optional) ID of the [VPC link](apigatewayv2_vpc_link.html) for a private integration. Supported only for HTTP APIs. Must be between 1 and 1024 characters in length. +* `connection_type` - (Optional) Type of the network connection to the integration endpoint. Valid values: `INTERNET`, `VPC_LINK`. Default is `INTERNET`. +* `content_handling_strategy` - (Optional) How to handle response payload content type conversions. Valid values: `CONVERT_TO_BINARY`, `CONVERT_TO_TEXT`. Supported only for WebSocket APIs. +* `credentials_arn` - (Optional) Credentials required for the integration, if any. +* `description` - (Optional) Description of the integration. +* `integration_method` - (Optional) Integration's HTTP method. Must be specified if `integration_type` is not `MOCK`. +* `integration_subtype` - (Optional) AWS service action to invoke. Supported only for HTTP APIs when `integration_type` is `AWS_PROXY`. See the [AWS service integration reference](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-aws-services-reference.html) documentation for supported values. Must be between 1 and 128 characters in length. +* `integration_uri` - (Optional) URI of the Lambda function for a Lambda proxy integration, when `integration_type` is `AWS_PROXY`. +For an `HTTP` integration, specify a fully-qualified URL. For an HTTP API private integration, specify the ARN of an Application Load Balancer listener, Network Load Balancer listener, or AWS Cloud Map service. +* `passthrough_behavior` - (Optional) Pass-through behavior for incoming requests based on the Content-Type header in the request, and the available mapping templates specified as the `request_templates` attribute. +Valid values: `WHEN_NO_MATCH`, `WHEN_NO_TEMPLATES`, `NEVER`. Default is `WHEN_NO_MATCH`. Supported only for WebSocket APIs. +* `payload_format_version` - (Optional) The [format of the payload](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html#http-api-develop-integrations-lambda.proxy-format) sent to an integration. Valid values: `1.0`, `2.0`. Default is `1.0`. +* `request_parameters` - (Optional) For WebSocket APIs, a key-value map specifying request parameters that are passed from the method request to the backend. +For HTTP APIs with a specified `integration_subtype`, a key-value map specifying parameters that are passed to `AWS_PROXY` integrations. +For HTTP APIs without a specified `integration_subtype`, a key-value map specifying how to transform HTTP requests before sending them to the backend. +See the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-parameter-mapping.html) for details. +* `request_templates` - (Optional) Map of [Velocity](https://velocity.apache.org/) templates that are applied on the request payload based on the value of the Content-Type header sent by the client. Supported only for WebSocket APIs. +* `response_parameters` - (Optional) Mappings to transform the HTTP response from a backend integration before returning the response to clients. Supported only for HTTP APIs. +* `template_selection_expression` - (Optional) The [template selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-template-selection-expressions) for the integration. +* `timeout_milliseconds` - (Optional) Custom timeout between 50 and 29,000 milliseconds for WebSocket APIs and between 50 and 30,000 milliseconds for HTTP APIs. +The default timeout is 29 seconds for WebSocket APIs and 30 seconds for HTTP APIs. +Terraform will only perform drift detection of its value when present in a configuration. +* `tls_config` - (Optional) TLS configuration for a private integration. Supported only for HTTP APIs. + +The `response_parameters` object supports the following: + +* `status_code` - (Required) HTTP status code in the range 200-599. +* `mappings` - (Required) Key-value map. The key of this map identifies the location of the request parameter to change, and how to change it. The corresponding value specifies the new data for the parameter. +See the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-parameter-mapping.html) for details. + +The `tls_config` object supports the following: + +* `server_name_to_verify` - (Optional) If you specify a server name, API Gateway uses it to verify the hostname on the integration's certificate. The server name is also included in the TLS handshake to support Server Name Indication (SNI) or virtual hosting. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Integration identifier. +* `integration_response_selection_expression` - The [integration response selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-integration-response-selection-expressions) for the integration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_integration` using the API identifier and integration identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_integration` using the API identifier and integration identifier. For example: + +```console +% terraform import aws_apigatewayv2_integration.example aabbccddee/1122334 +``` + +-> **Note:** The API Gateway managed integration created as part of [_quick_create_](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-basic-concept.html#apigateway-definition-quick-create) cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_integration_response.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_integration_response.html.markdown new file mode 100644 index 00000000000..66d693b5b57 --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_integration_response.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_integration_response" +description: |- + Manages an Amazon API Gateway Version 2 integration response. +--- + + + +# Resource: aws_apigatewayv2_integration_response + +Manages an Amazon API Gateway Version 2 integration response. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_integration_response import Apigatewayv2IntegrationResponse +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2IntegrationResponse(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + integration_id=Token.as_string(aws_apigatewayv2_integration_example.id), + integration_response_key="/200/" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) API identifier. +* `integration_id` - (Required) Identifier of the [`aws_apigatewayv2_integration`](/docs/providers/aws/r/apigatewayv2_integration.html). +* `integration_response_key` - (Required) Integration response key. +* `content_handling_strategy` - (Optional) How to handle response payload content type conversions. Valid values: `CONVERT_TO_BINARY`, `CONVERT_TO_TEXT`. +* `response_templates` - (Optional) Map of Velocity templates that are applied on the request payload based on the value of the Content-Type header sent by the client. +* `template_selection_expression` - (Optional) The [template selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-template-selection-expressions) for the integration response. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Integration response identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_integration_response` using the API identifier, integration identifier and integration response identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_integration_response` using the API identifier, integration identifier and integration response identifier. For example: + +```console +% terraform import aws_apigatewayv2_integration_response.example aabbccddee/1122334/998877 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_model.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_model.html.markdown new file mode 100644 index 00000000000..d8eee771ae4 --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_model.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_model" +description: |- + Manages an Amazon API Gateway Version 2 model. +--- + + + +# Resource: aws_apigatewayv2_model + +Manages an Amazon API Gateway Version 2 [model](https://docs.aws.amazon.com/apigateway/latest/developerguide/models-mappings.html#models-mappings-models). + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_model import Apigatewayv2Model +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2Model(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + content_type="application/json", + name="example", + schema=Token.as_string( + Fn.jsonencode({ + "$schema": "http://json-schema.org/draft-04/schema#", + "properties": { + "id": { + "type": "string" + } + }, + "title": "ExampleModel", + "type": "object" + })) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) API identifier. +* `content_type` - (Required) The content-type for the model, for example, `application/json`. Must be between 1 and 256 characters in length. +* `name` - (Required) Name of the model. Must be alphanumeric. Must be between 1 and 128 characters in length. +* `schema` - (Required) Schema for the model. This should be a [JSON schema draft 4](https://tools.ietf.org/html/draft-zyp-json-schema-04) model. Must be less than or equal to 32768 characters in length. +* `description` - (Optional) Description of the model. Must be between 1 and 128 characters in length. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Model identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_model` using the API identifier and model identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_model` using the API identifier and model identifier. For example: + +```console +% terraform import aws_apigatewayv2_model.example aabbccddee/1122334 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_route.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_route.html.markdown new file mode 100644 index 00000000000..b786ca77362 --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_route.html.markdown @@ -0,0 +1,135 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_route" +description: |- + Manages an Amazon API Gateway Version 2 route. +--- + + + +# Resource: aws_apigatewayv2_route + +Manages an Amazon API Gateway Version 2 route. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/welcome.html) for [WebSocket](https://docs.aws.amazon.com/apigateway/latest/developerguide/websocket-api-develop-routes.html) and [HTTP](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-routes.html) APIs. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_api import Apigatewayv2Api +from imports.aws.apigatewayv2_route import Apigatewayv2Route +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Apigatewayv2Api(self, "example", + name="example-websocket-api", + protocol_type="WEBSOCKET", + route_selection_expression="$request.body.action" + ) + aws_apigatewayv2_route_example = Apigatewayv2Route(self, "example_1", + api_id=example.id, + route_key="$default" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_apigatewayv2_route_example.override_logical_id("example") +``` + +### HTTP Proxy Integration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_api import Apigatewayv2Api +from imports.aws.apigatewayv2_integration import Apigatewayv2Integration +from imports.aws.apigatewayv2_route import Apigatewayv2Route +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Apigatewayv2Api(self, "example", + name="example-http-api", + protocol_type="HTTP" + ) + aws_apigatewayv2_integration_example = Apigatewayv2Integration(self, "example_1", + api_id=example.id, + integration_method="ANY", + integration_type="HTTP_PROXY", + integration_uri="https://example.com/{proxy}" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_apigatewayv2_integration_example.override_logical_id("example") + aws_apigatewayv2_route_example = Apigatewayv2Route(self, "example_2", + api_id=example.id, + route_key="ANY /example/{proxy+}", + target="integrations/${" + aws_apigatewayv2_integration_example.id + "}" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_apigatewayv2_route_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) API identifier. +* `route_key` - (Required) Route key for the route. For HTTP APIs, the route key can be either `$default`, or a combination of an HTTP method and resource path, for example, `GET /pets`. +* `api_key_required` - (Optional) Boolean whether an API key is required for the route. Defaults to `false`. Supported only for WebSocket APIs. +* `authorization_scopes` - (Optional) Authorization scopes supported by this route. The scopes are used with a JWT authorizer to authorize the method invocation. +* `authorization_type` - (Optional) Authorization type for the route. +For WebSocket APIs, valid values are `NONE` for open access, `AWS_IAM` for using AWS IAM permissions, and `CUSTOM` for using a Lambda authorizer. +For HTTP APIs, valid values are `NONE` for open access, `JWT` for using JSON Web Tokens, `AWS_IAM` for using AWS IAM permissions, and `CUSTOM` for using a Lambda authorizer. +Defaults to `NONE`. +* `authorizer_id` - (Optional) Identifier of the [`aws_apigatewayv2_authorizer`](apigatewayv2_authorizer.html) resource to be associated with this route. +* `model_selection_expression` - (Optional) The [model selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-model-selection-expressions) for the route. Supported only for WebSocket APIs. +* `operation_name` - (Optional) Operation name for the route. Must be between 1 and 64 characters in length. +* `request_models` - (Optional) Request models for the route. Supported only for WebSocket APIs. +* `request_parameter` - (Optional) Request parameters for the route. Supported only for WebSocket APIs. +* `route_response_selection_expression` - (Optional) The [route response selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-route-response-selection-expressions) for the route. Supported only for WebSocket APIs. +* `target` - (Optional) Target for the route, of the form `integrations/`*`IntegrationID`*, where *`IntegrationID`* is the identifier of an [`aws_apigatewayv2_integration`](apigatewayv2_integration.html) resource. + +The `request_parameter` object supports the following: + +* `request_parameter_key` - (Required) Request parameter key. This is a [request data mapping parameter](https://docs.aws.amazon.com/apigateway/latest/developerguide/websocket-api-data-mapping.html#websocket-mapping-request-parameters). +* `required` - (Required) Boolean whether or not the parameter is required. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Route identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_route` using the API identifier and route identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_route` using the API identifier and route identifier. For example: + +```console +% terraform import aws_apigatewayv2_route.example aabbccddee/1122334 +``` + +-> **Note:** The API Gateway managed route created as part of [_quick_create_](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-basic-concept.html#apigateway-definition-quick-create) cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_route_response.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_route_response.html.markdown new file mode 100644 index 00000000000..134563eec89 --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_route_response.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_route_response" +description: |- + Manages an Amazon API Gateway Version 2 route response. +--- + + + +# Resource: aws_apigatewayv2_route_response + +Manages an Amazon API Gateway Version 2 route response. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_route_response import Apigatewayv2RouteResponse +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2RouteResponse(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + route_id=Token.as_string(aws_apigatewayv2_route_example.id), + route_response_key="$default" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) API identifier. +* `route_id` - (Required) Identifier of the [`aws_apigatewayv2_route`](/docs/providers/aws/r/apigatewayv2_route.html). +* `route_response_key` - (Required) Route response key. +* `model_selection_expression` - (Optional) The [model selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-model-selection-expressions) for the route response. +* `response_models` - (Optional) Response models for the route response. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Route response identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_route_response` using the API identifier, route identifier and route response identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_route_response` using the API identifier, route identifier and route response identifier. For example: + +```console +% terraform import aws_apigatewayv2_route_response.example aabbccddee/1122334/998877 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_stage.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_stage.html.markdown new file mode 100644 index 00000000000..dab0ea15930 --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_stage.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_stage" +description: |- + Manages an Amazon API Gateway Version 2 stage. +--- + + + +# Resource: aws_apigatewayv2_stage + +Manages an Amazon API Gateway Version 2 stage. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_stage import Apigatewayv2Stage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2Stage(self, "example", + api_id=Token.as_string(aws_apigatewayv2_api_example.id), + name="example-stage" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `api_id` - (Required) API identifier. +* `name` - (Required) Name of the stage. Must be between 1 and 128 characters in length. + +The following arguments are optional: + +* `access_log_settings` - (Optional) Settings for logging access in this stage. +Use the [`aws_api_gateway_account`](/docs/providers/aws/r/api_gateway_account.html) resource to configure [permissions for CloudWatch Logging](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-logging.html#set-up-access-logging-permissions). +* `auto_deploy` - (Optional) Whether updates to an API automatically trigger a new deployment. Defaults to `false`. Applicable for HTTP APIs. +* `client_certificate_id` - (Optional) Identifier of a client certificate for the stage. Use the [`aws_api_gateway_client_certificate`](/docs/providers/aws/r/api_gateway_client_certificate.html) resource to configure a client certificate. +Supported only for WebSocket APIs. +* `default_route_settings` - (Optional) Default route settings for the stage. +* `deployment_id` - (Optional) Deployment identifier of the stage. Use the [`aws_apigatewayv2_deployment`](/docs/providers/aws/r/apigatewayv2_deployment.html) resource to configure a deployment. +* `description` - (Optional) Description for the stage. Must be less than or equal to 1024 characters in length. +* `route_settings` - (Optional) Route settings for the stage. +* `stage_variables` - (Optional) Map that defines the stage variables for the stage. +* `tags` - (Optional) Map of tags to assign to the stage. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### access_log_settings + +* `destination_arn` - (Required) ARN of the CloudWatch Logs log group to receive access logs. Any trailing `:*` is trimmed from the ARN. +* `format` - (Required) Single line [format](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-logging.html#apigateway-cloudwatch-log-formats) of the access logs of data. Refer to log settings for [HTTP](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-logging-variables.html) or [Websocket](https://docs.aws.amazon.com/apigateway/latest/developerguide/websocket-api-logging.html). + +### default_route_settings + +* `data_trace_enabled` - (Optional) Whether data trace logging is enabled for the default route. Affects the log entries pushed to Amazon CloudWatch Logs. +Defaults to `false`. Supported only for WebSocket APIs. +* `detailed_metrics_enabled` - (Optional) Whether detailed metrics are enabled for the default route. Defaults to `false`. +* `logging_level` - (Optional) Logging level for the default route. Affects the log entries pushed to Amazon CloudWatch Logs. +Valid values: `ERROR`, `INFO`, `OFF`. Defaults to `OFF`. Supported only for WebSocket APIs. Terraform will only perform drift detection of its value when present in a configuration. +* `throttling_burst_limit` - (Optional) Throttling burst limit for the default route. +* `throttling_rate_limit` - (Optional) Throttling rate limit for the default route. + +### route_settings + +* `route_key` - (Required) Route key. +* `data_trace_enabled` - (Optional) Whether data trace logging is enabled for the route. Affects the log entries pushed to Amazon CloudWatch Logs. +Defaults to `false`. Supported only for WebSocket APIs. +* `detailed_metrics_enabled` - (Optional) Whether detailed metrics are enabled for the route. Defaults to `false`. +* `logging_level` - (Optional) Logging level for the route. Affects the log entries pushed to Amazon CloudWatch Logs. +Valid values: `ERROR`, `INFO`, `OFF`. Defaults to `OFF`. Supported only for WebSocket APIs. Terraform will only perform drift detection of its value when present in a configuration. +* `throttling_burst_limit` - (Optional) Throttling burst limit for the route. +* `throttling_rate_limit` - (Optional) Throttling rate limit for the route. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Stage identifier. +* `arn` - ARN of the stage. +* `execution_arn` - ARN prefix to be used in an [`aws_lambda_permission`](/docs/providers/aws/r/lambda_permission.html)'s `source_arn` attribute. +For WebSocket APIs this attribute can additionally be used in an [`aws_iam_policy`](/docs/providers/aws/r/iam_policy.html) to authorize access to the [`@connections` API](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-how-to-call-websocket-api-connections.html). +See the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-control-access-iam.html) for details. +* `invoke_url` - URL to invoke the API pointing to the stage, + e.g., `wss://z4675bid1j.execute-api.eu-west-2.amazonaws.com/example-stage`, or `https://z4675bid1j.execute-api.eu-west-2.amazonaws.com/` +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_stage` using the API identifier and stage name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_stage` using the API identifier and stage name. For example: + +```console +% terraform import aws_apigatewayv2_stage.example aabbccddee/example-stage +``` + +-> **Note:** The API Gateway managed stage created as part of [_quick_create_](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-basic-concept.html#apigateway-definition-quick-create) cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apigatewayv2_vpc_link.html.markdown b/website/docs/cdktf/python/r/apigatewayv2_vpc_link.html.markdown new file mode 100644 index 00000000000..787b1f13613 --- /dev/null +++ b/website/docs/cdktf/python/r/apigatewayv2_vpc_link.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_vpc_link" +description: |- + Manages an Amazon API Gateway Version 2 VPC Link. +--- + + + +# Resource: aws_apigatewayv2_vpc_link + +Manages an Amazon API Gateway Version 2 VPC Link. + +-> **Note:** Amazon API Gateway Version 2 VPC Links enable private integrations that connect HTTP APIs to private resources in a VPC. +To enable private integration for REST APIs, use the Amazon API Gateway Version 1 VPC Link [resource](/docs/providers/aws/r/api_gateway_vpc_link.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apigatewayv2_vpc_link import Apigatewayv2VpcLink +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Apigatewayv2VpcLink(self, "example", + name="example", + security_group_ids=[Token.as_string(data_aws_security_group_example.id)], + subnet_ids=Token.as_list(data_aws_subnets_example.ids), + tags={ + "Usage": "example" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the VPC Link. Must be between 1 and 128 characters in length. +* `security_group_ids` - (Required) Security group IDs for the VPC Link. +* `subnet_ids` - (Required) Subnet IDs for the VPC Link. +* `tags` - (Optional) Map of tags to assign to the VPC Link. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - VPC Link identifier. +* `arn` - VPC Link ARN. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_apigatewayv2_vpc_link` using the VPC Link identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_apigatewayv2_vpc_link` using the VPC Link identifier. For example: + +```console +% terraform import aws_apigatewayv2_vpc_link.example aabbccddee +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/app_cookie_stickiness_policy.html.markdown b/website/docs/cdktf/python/r/app_cookie_stickiness_policy.html.markdown new file mode 100644 index 00000000000..6943ca41901 --- /dev/null +++ b/website/docs/cdktf/python/r/app_cookie_stickiness_policy.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_app_cookie_stickiness_policy" +description: |- + Provides an application cookie stickiness policy, which allows an ELB to wed its stickiness cookie to a cookie generated by your application. +--- + + + +# Resource: aws_app_cookie_stickiness_policy + +Provides an application cookie stickiness policy, which allows an ELB to wed its sticky cookie's expiration to a cookie generated by your application. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.app_cookie_stickiness_policy import AppCookieStickinessPolicy +from imports.aws.elb import Elb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + lb = Elb(self, "lb", + availability_zones=["us-east-1a"], + listener=[ElbListener( + instance_port=8000, + instance_protocol="http", + lb_port=80, + lb_protocol="http" + ) + ], + name="test-lb" + ) + AppCookieStickinessPolicy(self, "foo", + cookie_name="MyAppCookie", + lb_port=80, + load_balancer=lb.name, + name="foo_policy" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the stickiness policy. +* `load_balancer` - (Required) Name of load balancer to which the policy + should be attached. +* `lb_port` - (Required) Load balancer port to which the policy + should be applied. This must be an active listener on the load +balancer. +* `cookie_name` - (Required) Application cookie whose lifetime the ELB's cookie should follow. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the policy. +* `name` - Name of the stickiness policy. +* `load_balancer` - Name of load balancer to which the policy is attached. +* `lb_port` - Load balancer port to which the policy is applied. +* `cookie_name` - Application cookie whose lifetime the ELB's cookie should follow. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import application cookie stickiness policies using the ELB name, port, and policy name separated by colons (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import application cookie stickiness policies using the ELB name, port, and policy name separated by colons (`:`). For example: + +```console +% terraform import aws_app_cookie_stickiness_policy.example my-elb:80:my-policy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appautoscaling_policy.html.markdown b/website/docs/cdktf/python/r/appautoscaling_policy.html.markdown new file mode 100644 index 00000000000..ab1ec544773 --- /dev/null +++ b/website/docs/cdktf/python/r/appautoscaling_policy.html.markdown @@ -0,0 +1,432 @@ +--- +subcategory: "Application Auto Scaling" +layout: "aws" +page_title: "AWS: aws_appautoscaling_policy" +description: |- + Provides an Application AutoScaling Policy resource. +--- + + + +# Resource: aws_appautoscaling_policy + +Provides an Application AutoScaling Policy resource. + +## Example Usage + +### DynamoDB Table Autoscaling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_policy import AppautoscalingPolicy +from imports.aws.appautoscaling_target import AppautoscalingTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + dynamodb_table_read_target = AppautoscalingTarget(self, "dynamodb_table_read_target", + max_capacity=100, + min_capacity=5, + resource_id="table/tableName", + scalable_dimension="dynamodb:table:ReadCapacityUnits", + service_namespace="dynamodb" + ) + AppautoscalingPolicy(self, "dynamodb_table_read_policy", + name="DynamoDBReadCapacityUtilization:${" + dynamodb_table_read_target.resource_id + "}", + policy_type="TargetTrackingScaling", + resource_id=dynamodb_table_read_target.resource_id, + scalable_dimension=dynamodb_table_read_target.scalable_dimension, + service_namespace=dynamodb_table_read_target.service_namespace, + target_tracking_scaling_policy_configuration=AppautoscalingPolicyTargetTrackingScalingPolicyConfiguration( + predefined_metric_specification=AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationPredefinedMetricSpecification( + predefined_metric_type="DynamoDBReadCapacityUtilization" + ), + target_value=70 + ) + ) +``` + +### ECS Service Autoscaling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Op, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_policy import AppautoscalingPolicy +from imports.aws.appautoscaling_target import AppautoscalingTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ecs_target = AppautoscalingTarget(self, "ecs_target", + max_capacity=4, + min_capacity=1, + resource_id="service/clusterName/serviceName", + scalable_dimension="ecs:service:DesiredCount", + service_namespace="ecs" + ) + AppautoscalingPolicy(self, "ecs_policy", + name="scale-down", + policy_type="StepScaling", + resource_id=ecs_target.resource_id, + scalable_dimension=ecs_target.scalable_dimension, + service_namespace=ecs_target.service_namespace, + step_scaling_policy_configuration=AppautoscalingPolicyStepScalingPolicyConfiguration( + adjustment_type="ChangeInCapacity", + cooldown=60, + metric_aggregation_type="Maximum", + step_adjustment=[AppautoscalingPolicyStepScalingPolicyConfigurationStepAdjustment( + metric_interval_upper_bound=Token.as_string(0), + scaling_adjustment=Token.as_number(Op.negate(1)) + ) + ] + ) + ) +``` + +### Preserve desired count when updating an autoscaled ECS Service + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_service import EcsService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsService(self, "ecs_service", + cluster="clusterName", + desired_count=2, + lifecycle=TerraformResourceLifecycle( + ignore_changes=[desired_count] + ), + name="serviceName", + task_definition="taskDefinitionFamily:1" + ) +``` + +### Aurora Read Replica Autoscaling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_policy import AppautoscalingPolicy +from imports.aws.appautoscaling_target import AppautoscalingTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + replicas = AppautoscalingTarget(self, "replicas", + max_capacity=15, + min_capacity=1, + resource_id="cluster:${" + example.id + "}", + scalable_dimension="rds:cluster:ReadReplicaCount", + service_namespace="rds" + ) + aws_appautoscaling_policy_replicas = AppautoscalingPolicy(self, "replicas_1", + name="cpu-auto-scaling", + policy_type="TargetTrackingScaling", + resource_id=replicas.resource_id, + scalable_dimension=replicas.scalable_dimension, + service_namespace=replicas.service_namespace, + target_tracking_scaling_policy_configuration=AppautoscalingPolicyTargetTrackingScalingPolicyConfiguration( + predefined_metric_specification=AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationPredefinedMetricSpecification( + predefined_metric_type="RDSReaderAverageCPUUtilization" + ), + scale_in_cooldown=300, + scale_out_cooldown=300, + target_value=75 + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appautoscaling_policy_replicas.override_logical_id("replicas") +``` + +### Create target tracking scaling policy using metric math + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_policy import AppautoscalingPolicy +from imports.aws.appautoscaling_target import AppautoscalingTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ecs_target = AppautoscalingTarget(self, "ecs_target", + max_capacity=4, + min_capacity=1, + resource_id="service/clusterName/serviceName", + scalable_dimension="ecs:service:DesiredCount", + service_namespace="ecs" + ) + AppautoscalingPolicy(self, "example", + name="foo", + policy_type="TargetTrackingScaling", + resource_id=ecs_target.resource_id, + scalable_dimension=ecs_target.scalable_dimension, + service_namespace=ecs_target.service_namespace, + target_tracking_scaling_policy_configuration=AppautoscalingPolicyTargetTrackingScalingPolicyConfiguration( + customized_metric_specification=AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecification( + metrics=[AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecificationMetrics( + id="m1", + label="Get the queue size (the number of messages waiting to be processed)", + metric_stat=AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecificationMetricsMetricStat( + metric=AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecificationMetricsMetricStatMetric( + dimensions=[AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecificationMetricsMetricStatMetricDimensions( + name="QueueName", + value="my-queue" + ) + ], + metric_name="ApproximateNumberOfMessagesVisible", + namespace="AWS/SQS" + ), + stat="Sum" + ), + return_data=False + ), AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecificationMetrics( + id="m2", + label="Get the ECS running task count (the number of currently running tasks)", + metric_stat=AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecificationMetricsMetricStat( + metric=AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecificationMetricsMetricStatMetric( + dimensions=[AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecificationMetricsMetricStatMetricDimensions( + name="ClusterName", + value="default" + ), AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecificationMetricsMetricStatMetricDimensions( + name="ServiceName", + value="web-app" + ) + ], + metric_name="RunningTaskCount", + namespace="ECS/ContainerInsights" + ), + stat="Average" + ), + return_data=False + ), AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecificationMetrics( + expression="m1 / m2", + id="e1", + label="Calculate the backlog per instance", + return_data=True + ) + ] + ), + target_value=100 + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the policy. Must be between 1 and 255 characters in length. +* `policy_type` - (Optional) Policy type. Valid values are `StepScaling` and `TargetTrackingScaling`. Defaults to `StepScaling`. Certain services only support only one policy type. For more information see the [Target Tracking Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-target-tracking.html) and [Step Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-step-scaling-policies.html) documentation. +* `resource_id` - (Required) Resource type and unique identifier string for the resource associated with the scaling policy. Documentation can be found in the `ResourceId` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html) +* `scalable_dimension` - (Required) Scalable dimension of the scalable target. Documentation can be found in the `ScalableDimension` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html) +* `service_namespace` - (Required) AWS service namespace of the scalable target. Documentation can be found in the `ServiceNamespace` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html) +* `step_scaling_policy_configuration` - (Optional) Step scaling policy configuration, requires `policy_type = "StepScaling"` (default). See supported fields below. +* `target_tracking_scaling_policy_configuration` - (Optional) Target tracking policy, requires `policy_type = "TargetTrackingScaling"`. See supported fields below. + +### step_scaling_policy_configuration + +The `step_scaling_policy_configuration` configuration block supports the following arguments: + +* `adjustment_type` - (Required) Whether the adjustment is an absolute number or a percentage of the current capacity. Valid values are `ChangeInCapacity`, `ExactCapacity`, and `PercentChangeInCapacity`. +* `cooldown` - (Required) Amount of time, in seconds, after a scaling activity completes and before the next scaling activity can start. +* `metric_aggregation_type` - (Optional) Aggregation type for the policy's metrics. Valid values are "Minimum", "Maximum", and "Average". Without a value, AWS will treat the aggregation type as "Average". +* `min_adjustment_magnitude` - (Optional) Minimum number to adjust your scalable dimension as a result of a scaling activity. If the adjustment type is PercentChangeInCapacity, the scaling policy changes the scalable dimension of the scalable target by this amount. +* `step_adjustment` - (Optional) Set of adjustments that manage scaling. These have the following structure: + + ```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Op, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_policy import AppautoscalingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, resourceId, scalableDimension, serviceNamespace): + super().__init__(scope, name) + AppautoscalingPolicy(self, "ecs_policy", + step_scaling_policy_configuration=AppautoscalingPolicyStepScalingPolicyConfiguration( + step_adjustment=[AppautoscalingPolicyStepScalingPolicyConfigurationStepAdjustment( + metric_interval_lower_bound=Token.as_string(1), + metric_interval_upper_bound=Token.as_string(2), + scaling_adjustment=Token.as_number(Op.negate(1)) + ), AppautoscalingPolicyStepScalingPolicyConfigurationStepAdjustment( + metric_interval_lower_bound=Token.as_string(2), + metric_interval_upper_bound=Token.as_string(3), + scaling_adjustment=1 + ) + ] + ), + name=name, + resource_id=resource_id, + scalable_dimension=scalable_dimension, + service_namespace=service_namespace + ) +``` + +* `metric_interval_lower_bound` - (Optional) Lower bound for the difference between the alarm threshold and the CloudWatch metric. Without a value, AWS will treat this bound as negative infinity. +* `metric_interval_upper_bound` - (Optional) Upper bound for the difference between the alarm threshold and the CloudWatch metric. Without a value, AWS will treat this bound as infinity. The upper bound must be greater than the lower bound. +* `scaling_adjustment` - (Required) Number of members by which to scale, when the adjustment bounds are breached. A positive value scales up. A negative value scales down. + +### target_tracking_scaling_policy_configuration + +The `target_tracking_scaling_policy_configuration` configuration block supports the following arguments: + +* `target_value` - (Required) Target value for the metric. +* `disable_scale_in` - (Optional) Whether scale in by the target tracking policy is disabled. If the value is true, scale in is disabled and the target tracking policy won't remove capacity from the scalable resource. Otherwise, scale in is enabled and the target tracking policy can remove capacity from the scalable resource. The default value is `false`. +* `scale_in_cooldown` - (Optional) Amount of time, in seconds, after a scale in activity completes before another scale in activity can start. +* `scale_out_cooldown` - (Optional) Amount of time, in seconds, after a scale out activity completes before another scale out activity can start. +* `customized_metric_specification` - (Optional) Custom CloudWatch metric. Documentation can be found at: [AWS Customized Metric Specification](https://docs.aws.amazon.com/autoscaling/ec2/APIReference/API_CustomizedMetricSpecification.html). See supported fields below. +* `predefined_metric_specification` - (Optional) Predefined metric. See supported fields below. + +### target_tracking_scaling_policy_configuration customized_metric_specification + +Example usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_policy import AppautoscalingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, resourceId, scalableDimension, serviceNamespace): + super().__init__(scope, name) + AppautoscalingPolicy(self, "example", + policy_type="TargetTrackingScaling", + target_tracking_scaling_policy_configuration=AppautoscalingPolicyTargetTrackingScalingPolicyConfiguration( + customized_metric_specification=AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecification( + dimensions=[AppautoscalingPolicyTargetTrackingScalingPolicyConfigurationCustomizedMetricSpecificationDimensions( + name="MyOptionalMetricDimensionName", + value="MyOptionalMetricDimensionValue" + ) + ], + metric_name="MyUtilizationMetric", + namespace="MyNamespace", + statistic="Average", + unit="Percent" + ), + target_value=40 + ), + name=name, + resource_id=resource_id, + scalable_dimension=scalable_dimension, + service_namespace=service_namespace + ) +``` + +The `target_tracking_scaling_policy_configuration` `customized_metric_specification` configuration block supports the following arguments: + +* `dimensions` - (Optional) Configuration block(s) with the dimensions of the metric if the metric was published with dimensions. Detailed below. +* `metric_name` - (Optional) Name of the metric. +* `namespace` - (Optional) Namespace of the metric. +* `statistic` - (Optional) Statistic of the metric. Valid values: `Average`, `Minimum`, `Maximum`, `SampleCount`, and `Sum`. +* `unit` - (Optional) Unit of the metric. +* `metrics` - (Optional) Metrics to include, as a metric data query. + +### target_tracking_scaling_policy_configuration customized_metric_specification dimensions + +The `target_tracking_scaling_policy_configuration` `customized_metric_specification` `dimensions` configuration block supports the following arguments: + +* `name` - (Required) Name of the dimension. +* `value` - (Required) Value of the dimension. + +### target_tracking_scaling_policy_configuration customized_metric_specification metrics + +The `target_tracking_scaling_policy_configuration` `customized_metric_specification` `metrics` configuration block supports the following arguments: + +* `expression` - (Optional) Math expression used on the returned metric. You must specify either `expression` or `metric_stat`, but not both. +* `id` - (Required) Short name for the metric used in target tracking scaling policy. +* `label` - (Optional) Human-readable label for this metric or expression. +* `metric_stat` - (Optional) Structure that defines CloudWatch metric to be used in target tracking scaling policy. You must specify either `expression` or `metric_stat`, but not both. +* `return_data` - (Optional) Boolean that indicates whether to return the timestamps and raw data values of this metric, the default is true + +### target_tracking_scaling_policy_configuration customized_metric_specification metrics metric_stat + +The `target_tracking_scaling_policy_configuration` `customized_metric_specification` `metrics` `metric_stat` configuration block supports the following arguments: + +* `metric` - (Required) Structure that defines the CloudWatch metric to return, including the metric name, namespace, and dimensions. +* `stat` - (Required) Statistic of the metrics to return. +* `unit` - (Optional) Unit of the metrics to return. + +### target_tracking_scaling_policy_configuration customized_metric_specification metrics metric + +The `target_tracking_scaling_policy_configuration` `customized_metric_specification` `metrics` `metric` configuration block supports the following arguments: + +* `dimensions` - (Optional) Dimensions of the metric. +* `metric_name` - (Required) Name of the metric. +* `namespace` - (Required) Namespace of the metric. + +### target_tracking_scaling_policy_configuration customized_metric_specification metrics dimensions + +The `target_tracking_scaling_policy_configuration` `customized_metric_specification` `metrics` `dimensions` configuration block supports the following arguments: + +* `name` - (Required) Name of the dimension. +* `value` - (Required) Value of the dimension. + +### target_tracking_scaling_policy_configuration predefined_metric_specification + +The `target_tracking_scaling_policy_configuration` `predefined_metric_specification` configuration block supports the following arguments: + +* `predefined_metric_type` - (Required) Metric type. +* `resource_label` - (Optional) Reserved for future use if the `predefined_metric_type` is not `ALBRequestCountPerTarget`. If the `predefined_metric_type` is `ALBRequestCountPerTarget`, you must specify this argument. Documentation can be found at: [AWS Predefined Scaling Metric Specification](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_PredefinedScalingMetricSpecification.html). Must be less than or equal to 1023 characters in length. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `alarm_arns` - List of CloudWatch alarm ARNs associated with the scaling policy. +* `arn` - ARN assigned by AWS to the scaling policy. +* `name` - Scaling policy's name. +* `policy_type` - Scaling policy's type. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Application AutoScaling Policy using the `service-namespace` , `resource-id`, `scalable-dimension` and `policy-name` separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Application AutoScaling Policy using the `service-namespace` , `resource-id`, `scalable-dimension` and `policy-name` separated by `/`. For example: + +```console +% terraform import aws_appautoscaling_policy.test-policy service-namespace/resource-id/scalable-dimension/policy-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appautoscaling_scheduled_action.html.markdown b/website/docs/cdktf/python/r/appautoscaling_scheduled_action.html.markdown new file mode 100644 index 00000000000..b6ec9b532ea --- /dev/null +++ b/website/docs/cdktf/python/r/appautoscaling_scheduled_action.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "Application Auto Scaling" +layout: "aws" +page_title: "AWS: aws_appautoscaling_scheduled_action" +description: |- + Provides an Application AutoScaling ScheduledAction resource. +--- + + + +# Resource: aws_appautoscaling_scheduled_action + +Provides an Application AutoScaling ScheduledAction resource. + +## Example Usage + +### DynamoDB Table Autoscaling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_scheduled_action import AppautoscalingScheduledAction +from imports.aws.appautoscaling_target import AppautoscalingTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + dynamodb = AppautoscalingTarget(self, "dynamodb", + max_capacity=100, + min_capacity=5, + resource_id="table/tableName", + scalable_dimension="dynamodb:table:ReadCapacityUnits", + service_namespace="dynamodb" + ) + aws_appautoscaling_scheduled_action_dynamodb = + AppautoscalingScheduledAction(self, "dynamodb_1", + name="dynamodb", + resource_id=dynamodb.resource_id, + scalable_dimension=dynamodb.scalable_dimension, + scalable_target_action=AppautoscalingScheduledActionScalableTargetAction( + max_capacity=Token.as_string(200), + min_capacity=Token.as_string(1) + ), + schedule="at(2006-01-02T15:04:05)", + service_namespace=dynamodb.service_namespace + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appautoscaling_scheduled_action_dynamodb.override_logical_id("dynamodb") +``` + +### ECS Service Autoscaling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_scheduled_action import AppautoscalingScheduledAction +from imports.aws.appautoscaling_target import AppautoscalingTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ecs = AppautoscalingTarget(self, "ecs", + max_capacity=4, + min_capacity=1, + resource_id="service/clusterName/serviceName", + scalable_dimension="ecs:service:DesiredCount", + service_namespace="ecs" + ) + aws_appautoscaling_scheduled_action_ecs = + AppautoscalingScheduledAction(self, "ecs_1", + name="ecs", + resource_id=ecs.resource_id, + scalable_dimension=ecs.scalable_dimension, + scalable_target_action=AppautoscalingScheduledActionScalableTargetAction( + max_capacity=Token.as_string(10), + min_capacity=Token.as_string(1) + ), + schedule="at(2006-01-02T15:04:05)", + service_namespace=ecs.service_namespace + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appautoscaling_scheduled_action_ecs.override_logical_id("ecs") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the scheduled action. +* `service_namespace` - (Required) Namespace of the AWS service. Documentation can be found in the `ServiceNamespace` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_PutScheduledAction.html) Example: ecs +* `resource_id` - (Required) Identifier of the resource associated with the scheduled action. Documentation can be found in the `ResourceId` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_PutScheduledAction.html) +* `scalable_dimension` - (Required) Scalable dimension. Documentation can be found in the `ScalableDimension` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_PutScheduledAction.html) Example: ecs:service:DesiredCount +* `scalable_target_action` - (Required) New minimum and maximum capacity. You can set both values or just one. See [below](#scalable-target-action-arguments) +* `schedule` - (Required) Schedule for this action. The following formats are supported: At expressions - at(yyyy-mm-ddThh:mm:ss), Rate expressions - rate(valueunit), Cron expressions - cron(fields). Times for at expressions and cron expressions are evaluated using the time zone configured in `timezone`. Documentation can be found in the `Timezone` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_PutScheduledAction.html) +* `start_time` - (Optional) Date and time for the scheduled action to start in RFC 3339 format. The timezone is not affected by the setting of `timezone`. +* `end_time` - (Optional) Date and time for the scheduled action to end in RFC 3339 format. The timezone is not affected by the setting of `timezone`. +* `timezone` - (Optional) Time zone used when setting a scheduled action by using an at or cron expression. Does not affect timezone for `start_time` and `end_time`. Valid values are the [canonical names of the IANA time zones supported by Joda-Time](https://www.joda.org/joda-time/timezones.html), such as `Etc/GMT+9` or `Pacific/Tahiti`. Default is `UTC`. + +### Scalable Target Action Arguments + +* `max_capacity` - (Optional) Maximum capacity. At least one of `max_capacity` or `min_capacity` must be set. +* `min_capacity` - (Optional) Minimum capacity. At least one of `min_capacity` or `max_capacity` must be set. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the scheduled action. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appautoscaling_target.html.markdown b/website/docs/cdktf/python/r/appautoscaling_target.html.markdown new file mode 100644 index 00000000000..63df01049ef --- /dev/null +++ b/website/docs/cdktf/python/r/appautoscaling_target.html.markdown @@ -0,0 +1,178 @@ +--- +subcategory: "Application Auto Scaling" +layout: "aws" +page_title: "AWS: aws_appautoscaling_target" +description: |- + Provides an Application AutoScaling ScalableTarget resource. +--- + + + +# Resource: aws_appautoscaling_target + +Provides an Application AutoScaling ScalableTarget resource. To manage policies which get attached to the target, see the [`aws_appautoscaling_policy` resource](/docs/providers/aws/r/appautoscaling_policy.html). + +~> **NOTE:** Scalable targets created before 2023-03-20 may not have an assigned `arn`. These resource cannot use `tags` or participate in `default_tags`. To prevent `terraform plan` showing differences that can never be reconciled, use the [`lifecycle.ignore_changes`](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#ignore_changes) meta-argument. See the example below. + +~> **NOTE:** The [Application Auto Scaling service automatically attempts to manage IAM Service-Linked Roles](https://docs.aws.amazon.com/autoscaling/application/userguide/security_iam_service-with-iam.html#security_iam_service-with-iam-roles) when registering certain service namespaces for the first time. To manually manage this role, see the [`aws_iam_service_linked_role` resource](/docs/providers/aws/r/iam_service_linked_role.html). + +## Example Usage + +### DynamoDB Table Autoscaling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_target import AppautoscalingTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppautoscalingTarget(self, "dynamodb_table_read_target", + max_capacity=100, + min_capacity=5, + resource_id="table/${" + example.name + "}", + scalable_dimension="dynamodb:table:ReadCapacityUnits", + service_namespace="dynamodb" + ) +``` + +### DynamoDB Index Autoscaling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_target import AppautoscalingTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppautoscalingTarget(self, "dynamodb_index_read_target", + max_capacity=100, + min_capacity=5, + resource_id="table/${" + example.name + "}/index/${" + index_name.value + "}", + scalable_dimension="dynamodb:index:ReadCapacityUnits", + service_namespace="dynamodb" + ) +``` + +### ECS Service Autoscaling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_target import AppautoscalingTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppautoscalingTarget(self, "ecs_target", + max_capacity=4, + min_capacity=1, + resource_id="service/${" + example.name + "}/${" + aws_ecs_service_example.name + "}", + scalable_dimension="ecs:service:DesiredCount", + service_namespace="ecs" + ) +``` + +### Aurora Read Replica Autoscaling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_target import AppautoscalingTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppautoscalingTarget(self, "replicas", + max_capacity=15, + min_capacity=1, + resource_id="cluster:${" + example.id + "}", + scalable_dimension="rds:cluster:ReadReplicaCount", + service_namespace="rds" + ) +``` + +### Suppressing `tags_all` Differences For Older Resources + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appautoscaling_target import AppautoscalingTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppautoscalingTarget(self, "ecs_target", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[tags_all] + ), + max_capacity=4, + min_capacity=1, + resource_id="service/${" + example.name + "}/${" + aws_ecs_service_example.name + "}", + scalable_dimension="ecs:service:DesiredCount", + service_namespace="ecs" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `max_capacity` - (Required) Max capacity of the scalable target. +* `min_capacity` - (Required) Min capacity of the scalable target. +* `resource_id` - (Required) Resource type and unique identifier string for the resource associated with the scaling policy. Documentation can be found in the `ResourceId` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters) +* `role_arn` - (Optional) ARN of the IAM role that allows Application AutoScaling to modify your scalable target on your behalf. This defaults to an IAM Service-Linked Role for most services and custom IAM Roles are ignored by the API for those namespaces. See the [AWS Application Auto Scaling documentation](https://docs.aws.amazon.com/autoscaling/application/userguide/security_iam_service-with-iam.html#security_iam_service-with-iam-roles) for more information about how this service interacts with IAM. +* `scalable_dimension` - (Required) Scalable dimension of the scalable target. Documentation can be found in the `ScalableDimension` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters) +* `service_namespace` - (Required) AWS service namespace of the scalable target. Documentation can be found in the `ServiceNamespace` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters) +* `tags` - (Optional) Map of tags to assign to the scalable target. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the scalable target. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Application AutoScaling Target using the `service-namespace` , `resource-id` and `scalable-dimension` separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Application AutoScaling Target using the `service-namespace` , `resource-id` and `scalable-dimension` separated by `/`. For example: + +```console +% terraform import aws_appautoscaling_target.test-target service-namespace/resource-id/scalable-dimension +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appconfig_application.html.markdown b/website/docs/cdktf/python/r/appconfig_application.html.markdown new file mode 100644 index 00000000000..27b34dca55e --- /dev/null +++ b/website/docs/cdktf/python/r/appconfig_application.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_application" +description: |- + Provides an AppConfig Application resource. +--- + + + +# Resource: aws_appconfig_application + +Provides an AppConfig Application resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appconfig_application import AppconfigApplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppconfigApplication(self, "example", + description="Example AppConfig Application", + name="example-application-tf", + tags={ + "Type": "AppConfig Application" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name for the application. Must be between 1 and 64 characters in length. +* `description` - (Optional) Description of the application. Can be at most 1024 characters. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig Application. +* `id` - AppConfig application ID. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Applications using their application ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppConfig Applications using their application ID. For example: + +```console +% terraform import aws_appconfig_application.example 71rxuzt +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appconfig_configuration_profile.html.markdown b/website/docs/cdktf/python/r/appconfig_configuration_profile.html.markdown new file mode 100644 index 00000000000..8ae9eb916e2 --- /dev/null +++ b/website/docs/cdktf/python/r/appconfig_configuration_profile.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_configuration_profile" +description: |- + Provides an AppConfig Configuration Profile resource. +--- + + + +# Resource: aws_appconfig_configuration_profile + +Provides an AppConfig Configuration Profile resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appconfig_configuration_profile import AppconfigConfigurationProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppconfigConfigurationProfile(self, "example", + application_id=Token.as_string(aws_appconfig_application_example.id), + description="Example Configuration Profile", + location_uri="hosted", + name="example-configuration-profile-tf", + tags={ + "Type": "AppConfig Configuration Profile" + }, + validator=[AppconfigConfigurationProfileValidator( + content=Token.as_string(aws_lambda_function_example.arn), + type="LAMBDA" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required, Forces new resource) Application ID. Must be between 4 and 7 characters in length. +* `location_uri` - (Required, Forces new resource) URI to locate the configuration. You can specify the AWS AppConfig hosted configuration store, Systems Manager (SSM) document, an SSM Parameter Store parameter, or an Amazon S3 object. For the hosted configuration store, specify `hosted`. For an SSM document, specify either the document name in the format `ssm-document://` or the ARN. For a parameter, specify either the parameter name in the format `ssm-parameter://` or the ARN. For an Amazon S3 object, specify the URI in the following format: `s3:///`. +* `name` - (Required) Name for the configuration profile. Must be between 1 and 64 characters in length. +* `description` - (Optional) Description of the configuration profile. Can be at most 1024 characters. +* `retrieval_role_arn` - (Optional) ARN of an IAM role with permission to access the configuration at the specified `location_uri`. A retrieval role ARN is not required for configurations stored in the AWS AppConfig `hosted` configuration store. It is required for all other sources that store your configuration. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) Type of configurations contained in the profile. Valid values: `AWS.AppConfig.FeatureFlags` and `AWS.Freeform`. Default: `AWS.Freeform`. +* `validator` - (Optional) Set of methods for validating the configuration. Maximum of 2. See [Validator](#validator) below for more details. + +### Validator + +The `validator` block supports the following: + +* `content` - (Optional, Required when `type` is `LAMBDA`) Either the JSON Schema content or the ARN of an AWS Lambda function. +* `type` - (Optional) Type of validator. Valid values: `JSON_SCHEMA` and `LAMBDA`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig Configuration Profile. +* `configuration_profile_id` - The configuration profile ID. +* `id` - AppConfig configuration profile ID and application ID separated by a colon (`:`). +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Configuration Profiles using the configuration profile ID and application ID separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppConfig Configuration Profiles using the configuration profile ID and application ID separated by a colon (`:`). For example: + +```console +% terraform import aws_appconfig_configuration_profile.example 71abcde:11xxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appconfig_deployment.html.markdown b/website/docs/cdktf/python/r/appconfig_deployment.html.markdown new file mode 100644 index 00000000000..2eb5c1e421d --- /dev/null +++ b/website/docs/cdktf/python/r/appconfig_deployment.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_deployment" +description: |- + Provides an AppConfig Deployment resource. +--- + + + +# Resource: aws_appconfig_deployment + +Provides an AppConfig Deployment resource for an [`aws_appconfig_application` resource](appconfig_application.html.markdown). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appconfig_deployment import AppconfigDeployment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppconfigDeployment(self, "example", + application_id=Token.as_string(aws_appconfig_application_example.id), + configuration_profile_id=Token.as_string(aws_appconfig_configuration_profile_example.configuration_profile_id), + configuration_version=Token.as_string(aws_appconfig_hosted_configuration_version_example.version_number), + deployment_strategy_id=Token.as_string(aws_appconfig_deployment_strategy_example.id), + description="My example deployment", + environment_id=Token.as_string(aws_appconfig_environment_example.environment_id), + tags={ + "Type": "AppConfig Deployment" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required, Forces new resource) Application ID. Must be between 4 and 7 characters in length. +* `configuration_profile_id` - (Required, Forces new resource) Configuration profile ID. Must be between 4 and 7 characters in length. +* `configuration_version` - (Required, Forces new resource) Configuration version to deploy. Can be at most 1024 characters. +* `deployment_strategy_id` - (Required, Forces new resource) Deployment strategy ID or name of a predefined deployment strategy. See [Predefined Deployment Strategies](https://docs.aws.amazon.com/appconfig/latest/userguide/appconfig-creating-deployment-strategy.html#appconfig-creating-deployment-strategy-predefined) for more details. +* `description` - (Optional, Forces new resource) Description of the deployment. Can be at most 1024 characters. +* `environment_id` - (Required, Forces new resource) Environment ID. Must be between 4 and 7 characters in length. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AppConfig application ID, environment ID, and deployment number separated by a slash (`/`). +* `arn` - ARN of the AppConfig Deployment. +* `deployment_number` - Deployment number. +* `state` - State of the deployment. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Deployments using the application ID, environment ID, and deployment number separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppConfig Deployments using the application ID, environment ID, and deployment number separated by a slash (`/`). For example: + +```console +% terraform import aws_appconfig_deployment.example 71abcde/11xxxxx/1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appconfig_deployment_strategy.html.markdown b/website/docs/cdktf/python/r/appconfig_deployment_strategy.html.markdown new file mode 100644 index 00000000000..a5db7bca426 --- /dev/null +++ b/website/docs/cdktf/python/r/appconfig_deployment_strategy.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_deployment_strategy" +description: |- + Provides an AppConfig Deployment Strategy resource. +--- + + + +# Resource: aws_appconfig_deployment_strategy + +Provides an AppConfig Deployment Strategy resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appconfig_deployment_strategy import AppconfigDeploymentStrategy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppconfigDeploymentStrategy(self, "example", + deployment_duration_in_minutes=3, + description="Example Deployment Strategy", + final_bake_time_in_minutes=4, + growth_factor=10, + growth_type="LINEAR", + name="example-deployment-strategy-tf", + replicate_to="NONE", + tags={ + "Type": "AppConfig Deployment Strategy" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deployment_duration_in_minutes` - (Required) Total amount of time for a deployment to last. Minimum value of 0, maximum value of 1440. +* `growth_factor` - (Required) Percentage of targets to receive a deployed configuration during each interval. Minimum value of 1.0, maximum value of 100.0. +* `name` - (Required, Forces new resource) Name for the deployment strategy. Must be between 1 and 64 characters in length. +* `replicate_to` - (Required, Forces new resource) Where to save the deployment strategy. Valid values: `NONE` and `SSM_DOCUMENT`. +* `description` - (Optional) Description of the deployment strategy. Can be at most 1024 characters. +* `final_bake_time_in_minutes` - (Optional) Amount of time AWS AppConfig monitors for alarms before considering the deployment to be complete and no longer eligible for automatic roll back. Minimum value of 0, maximum value of 1440. +* `growth_type` - (Optional) Algorithm used to define how percentage grows over time. Valid value: `LINEAR` and `EXPONENTIAL`. Defaults to `LINEAR`. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AppConfig deployment strategy ID. +* `arn` - ARN of the AppConfig Deployment Strategy. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Deployment Strategies using their deployment strategy ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppConfig Deployment Strategies using their deployment strategy ID. For example: + +```console +% terraform import aws_appconfig_deployment_strategy.example 11xxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appconfig_environment.html.markdown b/website/docs/cdktf/python/r/appconfig_environment.html.markdown new file mode 100644 index 00000000000..d79cd1a9385 --- /dev/null +++ b/website/docs/cdktf/python/r/appconfig_environment.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_environment" +description: |- + Provides an AppConfig Environment resource. +--- + + + +# Resource: aws_appconfig_environment + +Provides an AppConfig Environment resource for an [`aws_appconfig_application` resource](appconfig_application.html.markdown). One or more environments can be defined for an application. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appconfig_application import AppconfigApplication +from imports.aws.appconfig_environment import AppconfigEnvironment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AppconfigApplication(self, "example", + description="Example AppConfig Application", + name="example-application-tf", + tags={ + "Type": "AppConfig Application" + } + ) + aws_appconfig_environment_example = AppconfigEnvironment(self, "example_1", + application_id=example.id, + description="Example AppConfig Environment", + monitor=[AppconfigEnvironmentMonitor( + alarm_arn=Token.as_string(aws_cloudwatch_metric_alarm_example.arn), + alarm_role_arn=Token.as_string(aws_iam_role_example.arn) + ) + ], + name="example-environment-tf", + tags={ + "Type": "AppConfig Environment" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appconfig_environment_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required, Forces new resource) AppConfig application ID. Must be between 4 and 7 characters in length. +* `name` - (Required) Name for the environment. Must be between 1 and 64 characters in length. +* `description` - (Optional) Description of the environment. Can be at most 1024 characters. +* `monitor` - (Optional) Set of Amazon CloudWatch alarms to monitor during the deployment process. Maximum of 5. See [Monitor](#monitor) below for more details. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Monitor + +The `monitor` block supports the following: + +* `alarm_arn` - (Required) ARN of the Amazon CloudWatch alarm. +* `alarm_role_arn` - (Optional) ARN of an IAM role for AWS AppConfig to monitor `alarm_arn`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig Environment. +* `id` - (**Deprecated**) AppConfig environment ID and application ID separated by a colon (`:`). +* `environment_id` - AppConfig environment ID. +* `state` - State of the environment. Possible values are `READY_FOR_DEPLOYMENT`, `DEPLOYING`, `ROLLING_BACK` + or `ROLLED_BACK`. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Environments using the environment ID and application ID separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppConfig Environments using the environment ID and application ID separated by a colon (`:`). For example: + +```console +% terraform import aws_appconfig_environment.example 71abcde:11xxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appconfig_extension.html.markdown b/website/docs/cdktf/python/r/appconfig_extension.html.markdown new file mode 100644 index 00000000000..95e37504e85 --- /dev/null +++ b/website/docs/cdktf/python/r/appconfig_extension.html.markdown @@ -0,0 +1,136 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_extension" +description: |- + Provides an AppConfig Extension resource. +--- + + + +# Resource: aws_appconfig_extension + +Provides an AppConfig Extension resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appconfig_extension import AppconfigExtension +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = SnsTopic(self, "test", + name="test" + ) + data_aws_iam_policy_document_test = DataAwsIamPolicyDocument(self, "test_1", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["appconfig.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_test.override_logical_id("test") + aws_iam_role_test = IamRole(self, "test_2", + assume_role_policy=Token.as_string(data_aws_iam_policy_document_test.json), + name="test" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_test.override_logical_id("test") + aws_appconfig_extension_test = AppconfigExtension(self, "test_3", + action_point=[AppconfigExtensionActionPoint( + action=[AppconfigExtensionActionPointAction( + name="test", + role_arn=Token.as_string(aws_iam_role_test.arn), + uri=test.arn + ) + ], + point="ON_DEPLOYMENT_COMPLETE" + ) + ], + description="test description", + name="test", + tags={ + "Type": "AppConfig Extension" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appconfig_extension_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name for the extension. Each extension name in your account must be unique. Extension versions use the same name. +* `description` - (Optional) Information about the extension. +* `action_point` - (Required) The action points defined in the extension. [Detailed below](#action_point). +* `parameter` - (Optional) The parameters accepted by the extension. You specify parameter values when you associate the extension to an AppConfig resource by using the CreateExtensionAssociation API action. For Lambda extension actions, these parameters are included in the Lambda request object. [Detailed below](#parameter). +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `action_point` + +Defines the actions the extension performs during the AppConfig workflow and at which point those actions are performed. The `action_point` configuration block supports the following arguments: + +* `point` - (Required) The point at which to perform the defined actions. Valid points are `PRE_CREATE_HOSTED_CONFIGURATION_VERSION`, `PRE_START_DEPLOYMENT`, `ON_DEPLOYMENT_START`, `ON_DEPLOYMENT_STEP`, `ON_DEPLOYMENT_BAKING`, `ON_DEPLOYMENT_COMPLETE`, `ON_DEPLOYMENT_ROLLED_BACK`. +* `action` - (Required) An action defines the tasks the extension performs during the AppConfig workflow. [Detailed below](#action). + +#### `action` + +The `action` configuration block supports configuring any number of the following arguments: + +* `name` - (Required) The action name. +* `role_arn` - (Required) An Amazon Resource Name (ARN) for an Identity and Access Management assume role. +* `uri` - (Required) The extension URI associated to the action point in the extension definition. The URI can be an Amazon Resource Name (ARN) for one of the following: an Lambda function, an Amazon Simple Queue Service queue, an Amazon Simple Notification Service topic, or the Amazon EventBridge default event bus. +* `description` - (Optional) Information about the action. + +#### `parameter` + +The `parameter` configuration block supports configuring any number of the following arguments: + +* `name` - (Required) The parameter name. +* `required` - (Required) Determines if a parameter value must be specified in the extension association. +* `description` - (Optional) Information about the parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig Extension. +* `id` - AppConfig Extension ID. +* `version` - The version number for the extension. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Extensions using their extension ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppConfig Extensions using their extension ID. For example: + +```console +% terraform import aws_appconfig_extension.example 71rxuzt +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appconfig_extension_association.html.markdown b/website/docs/cdktf/python/r/appconfig_extension_association.html.markdown new file mode 100644 index 00000000000..2b03c6ffadd --- /dev/null +++ b/website/docs/cdktf/python/r/appconfig_extension_association.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_extension_association" +description: |- + Associates an AppConfig Extension with a Resource. +--- + + + +# Resource: aws_appconfig_extension_association + +Associates an AppConfig Extension with a Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appconfig_application import AppconfigApplication +from imports.aws.appconfig_extension import AppconfigExtension +from imports.aws.appconfig_extension_association import AppconfigExtensionAssociation +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = AppconfigApplication(self, "test", + name="test" + ) + aws_sns_topic_test = SnsTopic(self, "test_1", + name="test" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_test.override_logical_id("test") + data_aws_iam_policy_document_test = DataAwsIamPolicyDocument(self, "test_2", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["appconfig.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_test.override_logical_id("test") + aws_iam_role_test = IamRole(self, "test_3", + assume_role_policy=Token.as_string(data_aws_iam_policy_document_test.json), + name="test" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_test.override_logical_id("test") + aws_appconfig_extension_test = AppconfigExtension(self, "test_4", + action_point=[AppconfigExtensionActionPoint( + action=[AppconfigExtensionActionPointAction( + name="test", + role_arn=Token.as_string(aws_iam_role_test.arn), + uri=Token.as_string(aws_sns_topic_test.arn) + ) + ], + point="ON_DEPLOYMENT_COMPLETE" + ) + ], + description="test description", + name="test", + tags={ + "Type": "AppConfig Extension" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appconfig_extension_test.override_logical_id("test") + aws_appconfig_extension_association_test = + AppconfigExtensionAssociation(self, "test_5", + extension_arn=Token.as_string(aws_appconfig_extension_test.arn), + resource_arn=test.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appconfig_extension_association_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `extension_arn` - (Required) The ARN of the extension defined in the association. +* `resource_arn` - (Optional) The ARN of the application, configuration profile, or environment to associate with the extension. +* `parameters` - (Optional) The parameter names and values defined for the association. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig Extension Association. +* `id` - AppConfig Extension Association ID. +* `extension_version` - The version number for the extension defined in the association. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Extension Associations using their extension association ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppConfig Extension Associations using their extension association ID. For example: + +```console +% terraform import aws_appconfig_extension_association.example 71rxuzt +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appconfig_hosted_configuration_version.html.markdown b/website/docs/cdktf/python/r/appconfig_hosted_configuration_version.html.markdown new file mode 100644 index 00000000000..ef10c717b0c --- /dev/null +++ b/website/docs/cdktf/python/r/appconfig_hosted_configuration_version.html.markdown @@ -0,0 +1,143 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_hosted_configuration_version" +description: |- + Provides an AppConfig Hosted Configuration Version resource. +--- + + + +# Resource: aws_appconfig_hosted_configuration_version + +Provides an AppConfig Hosted Configuration Version resource. + +## Example Usage + +### Freeform + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appconfig_hosted_configuration_version import AppconfigHostedConfigurationVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppconfigHostedConfigurationVersion(self, "example", + application_id=Token.as_string(aws_appconfig_application_example.id), + configuration_profile_id=Token.as_string(aws_appconfig_configuration_profile_example.configuration_profile_id), + content=Token.as_string( + Fn.jsonencode({ + "foo": "bar", + "fruit": ["apple", "pear", "orange"], + "is_thing_enabled": True + })), + content_type="application/json", + description="Example Freeform Hosted Configuration Version" + ) +``` + +### Feature Flags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appconfig_hosted_configuration_version import AppconfigHostedConfigurationVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppconfigHostedConfigurationVersion(self, "example", + application_id=Token.as_string(aws_appconfig_application_example.id), + configuration_profile_id=Token.as_string(aws_appconfig_configuration_profile_example.configuration_profile_id), + content=Token.as_string( + Fn.jsonencode({ + "flags": { + "bar": { + "attributes": { + "some_attribute": { + "constraints": { + "required": True, + "type": "string" + } + }, + "some_other_attribute": { + "constraints": { + "required": True, + "type": "number" + } + } + }, + "name": "bar" + }, + "foo": { + "_deprecation": { + "status": "planned" + }, + "name": "foo" + } + }, + "values": { + "bar": { + "enabled": "true", + "some_attribute": "Hello World", + "some_other_attribute": 123 + }, + "foo": { + "enabled": "true" + } + }, + "version": "1" + })), + content_type="application/json", + description="Example Feature Flag Configuration Version" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required, Forces new resource) Application ID. +* `configuration_profile_id` - (Required, Forces new resource) Configuration profile ID. +* `content` - (Required, Forces new resource) Content of the configuration or the configuration data. +* `content_type` - (Required, Forces new resource) Standard MIME type describing the format of the configuration content. For more information, see [Content-Type](https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.17). +* `description` - (Optional, Forces new resource) Description of the configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig hosted configuration version. +* `id` - AppConfig application ID, configuration profile ID, and version number separated by a slash (`/`). +* `version_number` - Version number of the hosted configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Hosted Configuration Versions using the application ID, configuration profile ID, and version number separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppConfig Hosted Configuration Versions using the application ID, configuration profile ID, and version number separated by a slash (`/`). For example: + +```console +% terraform import aws_appconfig_hosted_configuration_version.example 71abcde/11xxxxx/2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appflow_connector_profile.html.markdown b/website/docs/cdktf/python/r/appflow_connector_profile.html.markdown new file mode 100644 index 00000000000..d1ea50ec22e --- /dev/null +++ b/website/docs/cdktf/python/r/appflow_connector_profile.html.markdown @@ -0,0 +1,359 @@ +--- +subcategory: "AppFlow" +layout: "aws" +page_title: "AWS: aws_appflow_connector_profile" +description: |- + Provides an AppFlow Connector Profile resource. +--- + + + +# Resource: aws_appflow_connector_profile + +Provides an AppFlow connector profile resource. + +For information about AppFlow flows, see the [Amazon AppFlow API Reference][1]. +For specific information about creating an AppFlow connector profile, see the +[CreateConnectorProfile][2] page in the Amazon AppFlow API Reference. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appflow_connector_profile import AppflowConnectorProfile +from imports.aws.data_aws_iam_policy import DataAwsIamPolicy +from imports.aws.iam_role import IamRole +from imports.aws.redshift_cluster import RedshiftCluster +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = IamRole(self, "example", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "ec2.amazonaws.com" + }, + "Sid": "" + } + ], + "Version": "2012-10-17" + })), + managed_policy_arns=[Token.as_string(test.arn)], + name="example_role" + ) + aws_redshift_cluster_example = RedshiftCluster(self, "example_1", + cluster_identifier="example_cluster", + cluster_type="single-node", + database_name="example_db", + master_password="examplePassword123!", + master_username="exampleuser", + node_type="dc1.large" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_redshift_cluster_example.override_logical_id("example") + aws_s3_bucket_example = S3Bucket(self, "example_2", + bucket="example_bucket" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_example.override_logical_id("example") + data_aws_iam_policy_example = DataAwsIamPolicy(self, "example_3", + name="AmazonRedshiftAllCommandsFullAccess" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_example.override_logical_id("example") + aws_appflow_connector_profile_example = AppflowConnectorProfile(self, "example_4", + connection_mode="Public", + connector_profile_config=AppflowConnectorProfileConnectorProfileConfig( + connector_profile_credentials=AppflowConnectorProfileConnectorProfileConfigConnectorProfileCredentials( + redshift=AppflowConnectorProfileConnectorProfileConfigConnectorProfileCredentialsRedshift( + password=Token.as_string(aws_redshift_cluster_example.master_password), + username=Token.as_string(aws_redshift_cluster_example.master_username) + ) + ), + connector_profile_properties=AppflowConnectorProfileConnectorProfileConfigConnectorProfileProperties( + redshift=AppflowConnectorProfileConnectorProfileConfigConnectorProfilePropertiesRedshift( + bucket_name=Token.as_string(aws_s3_bucket_example.name), + database_url="jdbc:redshift://${" + aws_redshift_cluster_example.endpoint + "}/${" + aws_redshift_cluster_example.database_name + "}", + role_arn=example.arn + ) + ) + ), + connector_type="Redshift", + name="example_profile" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appflow_connector_profile_example.override_logical_id("example") +``` + +## Argument Reference + +The AppFlow connector profile argument layout is a complex structure. The following top-level arguments are supports: + +* `name ` (Required) - Name of the connector profile. The name is unique for each `ConnectorProfile` in your AWS account. +* `connection_mode` (Required) - Indicates the connection mode and specifies whether it is public or private. Private flows use AWS PrivateLink to route data over AWS infrastructure without exposing it to the public internet. One of: `Public`, `Private`. +* `connector_label` (Optional) - The label of the connector. The label is unique for each ConnectorRegistration in your AWS account. Only needed if calling for `CustomConnector` connector type. +* `connector_profile_config` (Required) - Defines the connector-specific configuration and credentials. See [Connector Profile Config](#connector-profile-config) for more details. +* `connector_type` (Required) - The type of connector. One of: `Amplitude`, `CustomConnector`, `CustomerProfiles`, `Datadog`, `Dynatrace`, `EventBridge`, `Googleanalytics`, `Honeycode`, `Infornexus`, `LookoutMetrics`, `Marketo`, `Redshift`, `S3`, `Salesforce`, `SAPOData`, `Servicenow`, `Singular`, `Slack`, `Snowflake`, `Trendmicro`, `Upsolver`, `Veeva`, `Zendesk`. +* `kms_arn` (Optional) - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key. + +### Connector Profile Config + +* `connector_profile_credentials` (Required) - The connector-specific credentials required by each connector. See [Connector Profile Credentials](#connector-profile-credentials) for more details. +* `connector_profile_properties` (Required) - The connector-specific properties of the profile configuration. See [Connector Profile Properties](#connector-profile-properties) for more details. + +### Connector Profile Credentials + +* `amplitude` (Optional) - The connector-specific credentials required when using Amplitude. See [Amplitude Connector Profile Credentials](#amplitude-connector-profile-credentials) for more details. +* `custom_connector` (Optional) - The connector-specific profile credentials required when using the custom connector. See [Custom Connector Profile Credentials](#custom-connector-profile-credentials) for more details. +* `datadog` (Optional) - Connector-specific credentials required when using Datadog. See [Datadog Connector Profile Credentials](#datadog-connector-profile-credentials) for more details. +* `dynatrace` (Optional) - The connector-specific credentials required when using Dynatrace. See [Dynatrace Connector Profile Credentials](#dynatrace-connector-profile-credentials) for more details. +* `google_analytics` (Optional) - The connector-specific credentials required when using Google Analytics. See [Google Analytics Connector Profile Credentials](#google-analytics-connector-profile-credentials) for more details. +* `honeycode` (Optional) - The connector-specific credentials required when using Amazon Honeycode. See [Honeycode Connector Profile Credentials](#honeycode-connector-profile-credentials) for more details. +* `infor_nexus` (Optional) - The connector-specific credentials required when using Infor Nexus. See [Infor Nexus Connector Profile Credentials](#infor-nexus-connector-profile-credentials) for more details. +* `marketo` (Optional) - Connector-specific credentials required when using Marketo. See [Marketo Connector Profile Credentials](#marketo-connector-profile-credentials) for more details. +* `redshift` (Optional) - Connector-specific credentials required when using Amazon Redshift. See [Redshift Connector Profile Credentials](#redshift-connector-profile-credentials) for more details. +* `salesforce` (Optional) - The connector-specific credentials required when using Salesforce. See [Salesforce Connector Profile Credentials](#salesforce-connector-profile-credentials) for more details. +* `sapo_data` (Optional) - The connector-specific credentials required when using SAPOData. See [SAPOData Connector Profile Credentials](#sapodata-connector-profile-credentials) for more details. +* `service_now` (Optional) - The connector-specific credentials required when using ServiceNow. See [ServiceNow Connector Profile Credentials](#servicenow-connector-profile-credentials) for more details. +* `singular` (Optional) - Connector-specific credentials required when using Singular. See [Singular Connector Profile Credentials](#singular-connector-profile-credentials) for more details. +* `slack` (Optional) - Connector-specific credentials required when using Slack. See [Slack Connector Profile Credentials](#amplitude-connector-profile-credentials) for more details. +* `snowflake` (Optional) - The connector-specific credentials required when using Snowflake. See [Snowflake Connector Profile Credentials](#snowflake-connector-profile-credentials) for more details. +* `trendmicro` (Optional) - The connector-specific credentials required when using Trend Micro. See [Trend Micro Connector Profile Credentials](#trendmicro-connector-profile-credentials) for more details. +* `veeva` (Optional) - Connector-specific credentials required when using Veeva. See [Veeva Connector Profile Credentials](#veeva-connector-profile-credentials) for more details. +* `zendesk` (Optional) - Connector-specific credentials required when using Zendesk. See [Zendesk Connector Profile Credentials](#zendesk-connector-profile-credentials) for more details. + +#### Amplitude Connector Profile Credentials + +* `api_key` (Required) - Unique alphanumeric identifier used to authenticate a user, developer, or calling program to your API. +* `secret_key` (Required) - The Secret Access Key portion of the credentials. + +#### Custom Connector Profile Credentials + +* `api_key` (Optional) - API keys required for the authentication of the user. + * `api_key` (Required) - The API key required for API key authentication. + * `api_secret_key` (Optional) - The API secret key required for API key authentication. +* `authentication_type` (Required) - The authentication type that the custom connector uses for authenticating while creating a connector profile. One of: `APIKEY`, `BASIC`, `CUSTOM`, `OAUTH2`. +* `basic` (Optional) - Basic credentials that are required for the authentication of the user. + * `password` (Required) - The password to use to connect to a resource. + * `username` (Required) - The username to use to connect to a resource. +* `custom` (Optional) - If the connector uses the custom authentication mechanism, this holds the required credentials. + * `credentials_map` (Optional) - A map that holds custom authentication credentials. + * `custom_authentication_type` (Required) - The custom authentication type that the connector uses. +* `oauth2` (Optional) - OAuth 2.0 credentials required for the authentication of the user. + * `access_token` (Optional) - The access token used to access the connector on your behalf. + * `client_id` (Optional) - The identifier for the desired client. + * `client_secret` (Optional) - The client secret used by the OAuth client to authenticate to the authorization server. + * `oauth_request` (Optional) - Used by select connectors for which the OAuth workflow is supported. See [OAuth Request](#oauth-request) for more details. + * `refresh_token` (Optional) - The refresh token used to refresh an expired access token. + +#### Datadog Connector Profile Credentials + +* `api_key` (Required) - Unique alphanumeric identifier used to authenticate a user, developer, or calling program to your API. +* `application_key` (Required) - Application keys, in conjunction with your API key, give you full access to Datadog’s programmatic API. Application keys are associated with the user account that created them. The application key is used to log all requests made to the API. + +#### Dynatrace Connector Profile Credentials + +* `api_token` (Required) - The API tokens used by Dynatrace API to authenticate various API calls. + +#### Google Analytics Connector Profile Credentials + +* `access_token` (Optional) - The credentials used to access protected Google Analytics resources. +* `client_id` (Required) - The identifier for the desired client. +* `client_secret` (Required) - The client secret used by the OAuth client to authenticate to the authorization server. +* `oauth_request` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. +* `refresh_token` (Optional) - The credentials used to acquire new access tokens. This is required only for OAuth2 access tokens, and is not required for OAuth1 access tokens. + +#### Honeycode Connector Profile Credentials + +* `access_token` (Optional) - The credentials used to access protected Amazon Honeycode resources. +* `oauth_request` (Optional) - Used by select connectors for which the OAuth workflow is supported, such as Salesforce, Google Analytics, Marketo, Zendesk, and Slack. See [OAuth Request](#oauth-request) for more details. +* `refresh_token` (Optional) - The credentials used to acquire new access tokens. + +#### Infor Nexus Connector Profile Credentials + +* `access_key_id` (Required) - The Access Key portion of the credentials. +* `datakey` (Required) - Encryption keys used to encrypt data. +* `secret_access_key` (Required) - The secret key used to sign requests. +* `user_id` (Required) - Identifier for the user. + +#### Marketo Connector Profile Credentials + +* `access_token` (Optional) - The credentials used to access protected Marketo resources. +* `client_id` (Required) - The identifier for the desired client. +* `client_secret` (Required) - The client secret used by the OAuth client to authenticate to the authorization server. +* `oauth_request` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. + +#### Redshift Connector Profile Credentials + +* `password` (Required) - Password that corresponds to the user name. +* `username` (Required) - Name of the user. + +#### Salesforce Connector Profile Credentials + +* `access_token` (Optional) - The credentials used to access protected Salesforce resources. +* `client_credentials_arn` (Optional) - The secret manager ARN, which contains the client ID and client secret of the connected app. +* `oauth_request` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. +* `refresh_token` (Optional) - The credentials used to acquire new access tokens. + +#### SAPOData Connector Profile Credentials + +* `basic_auth_credentials` (Optional) - The SAPOData basic authentication credentials. + * `password` (Required) - The password to use to connect to a resource. + * `username` (Required) - The username to use to connect to a resource. +* `oauth_credentials` (Optional) - The SAPOData OAuth type authentication credentials. + * `access_token` (Optional) - The access token used to access protected SAPOData resources. + * `client_id` (Required) - The identifier for the desired client. + * `client_secret` (Required) - The client secret used by the OAuth client to authenticate to the authorization server. + * `oauth_request` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. + * `refresh_token` (Optional) - The refresh token used to refresh expired access token. + +#### ServiceNow Connector Profile Credentials + +* `password` (Required) - Password that corresponds to the user name. +* `username` (Required) - Name of the user. + +#### Singular Connector Profile Credentials + +* `api_key` (Required) - Unique alphanumeric identifier used to authenticate a user, developer, or calling program to your API. + +#### Slack Connector Profile Credentials + +* `access_token` (Optional) - The credentials used to access protected Slack resources. +* `client_id` (Required) - The identifier for the client. +* `client_secret` (Required) - The client secret used by the OAuth client to authenticate to the authorization server. +* `oauth_request` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. + +#### Snowflake Connector Profile Credentials + +* `password` (Required) - Password that corresponds to the user name. +* `username` (Required) - Name of the user. + +#### Trendmicro Connector Profile Credentials + +* `api_secret_key` (Required) - The Secret Access Key portion of the credentials. + +#### Veeva Connector Profile Credentials + +* `password` (Required) - Password that corresponds to the user name. +* `username` (Required) - Name of the user. + +#### Zendesk Connector Profile Credentials + +* `access_token` (Optional) - The credentials used to access protected Zendesk resources. +* `client_id` (Required) - The identifier for the desired client. +* `client_secret` (Required) - The client secret used by the OAuth client to authenticate to the authorization server. +* `oauth_request` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. + +##### OAuth Request + +* `auth_code` (Optional) - The code provided by the connector when it has been authenticated via the connected app. +* `redirect_uri` (Optional) - The URL to which the authentication server redirects the browser after authorization has been granted. + +### Connector Profile Properties + +* `custom_connector` (Optional) - The connector-specific profile properties required when using the custom connector. See [Custom Connector Profile Properties](#custom-connector-profile-properties) for more details. +* `datadog` (Optional) - Connector-specific properties required when using Datadog. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `dynatrace` (Optional) - The connector-specific properties required when using Dynatrace. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `infor_nexus` (Optional) - The connector-specific properties required when using Infor Nexus. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `marketo` (Optional) - Connector-specific properties required when using Marketo. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `redshift` (Optional) - Connector-specific properties required when using Amazon Redshift. See [Redshift Connector Profile Properties](#redshift-connector-profile-properties) for more details. +* `salesforce` (Optional) - The connector-specific properties required when using Salesforce. See [Salesforce Connector Profile Properties](#salesforce-connector-profile-properties) for more details. +* `sapo_data` (Optional) - The connector-specific properties required when using SAPOData. See [SAPOData Connector Profile Properties](#sapodata-connector-profile-properties) for more details. +* `service_now` (Optional) - The connector-specific properties required when using ServiceNow. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `slack` (Optional) - Connector-specific properties required when using Slack. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `snowflake` (Optional) - The connector-specific properties required when using Snowflake. See [Snowflake Connector Profile Properties](#snowflake-connector-profile-properties) for more details. +* `veeva` (Optional) - Connector-specific properties required when using Veeva. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `zendesk` (Optional) - Connector-specific properties required when using Zendesk. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. + +#### Custom Connector Profile Properties + +* `oauth2_properties` (Optional) - The OAuth 2.0 properties required for OAuth 2.0 authentication. + * `oauth2_grant_type` (Required) - The OAuth 2.0 grant type used by connector for OAuth 2.0 authentication. One of: `AUTHORIZATION_CODE`, `CLIENT_CREDENTIALS`. + * `token_url` (Required) - The token URL required for OAuth 2.0 authentication. + * `token_url_custom_properties` (Optional) - Associates your token URL with a map of properties that you define. Use this parameter to provide any additional details that the connector requires to authenticate your request. +* `profile_properties` (Optional) - A map of properties that are required to create a profile for the custom connector. + +#### Generic Connector Profile Properties + +Datadog, Dynatrace, Infor Nexus, Marketo, ServiceNow, Slack, Veeva, and Zendesk all support the following attributes: + +* `instance_url` (Required) - The location of the Datadog resource. + +#### Redshift Connector Profile Properties + +* `bucket_name` (Required) - A name for the associated Amazon S3 bucket. +* `bucket_prefix` (Optional) - The object key for the destination bucket in which Amazon AppFlow places the files. +* `cluster_identifier` (Optional) - The unique ID that's assigned to an Amazon Redshift cluster. +* `database_name` (Optional) - The name of an Amazon Redshift database. +* `database_url` (Required) - The JDBC URL of the Amazon Redshift cluster. +* `data_api_role_arn` (Optional) - ARN of the IAM role that permits AppFlow to access the database through Data API. +* `role_arn` (Required) - ARN of the IAM role. + +#### Salesforce Connector Profile Properties + +* `instance_url` (Optional) - The location of the Salesforce resource. +* `is_sandbox_environment` (Optional) - Indicates whether the connector profile applies to a sandbox or production environment. + +#### SAPOData Connector Profile Properties + +* `application_host_url` (Required) - The location of the SAPOData resource. +* `application_service_path` (Required) - The application path to catalog service. +* `client_number` (Required) - The client number for the client creating the connection. +* `logon_language` (Optional) - The logon language of SAPOData instance. +* `oauth_properties` (Optional) - The SAPOData OAuth properties required for OAuth type authentication. + * `auth_code_url` (Required) - The authorization code url required to redirect to SAP Login Page to fetch authorization code for OAuth type authentication. + * `oauth_scopes` (Required) - The OAuth scopes required for OAuth type authentication. + * `token_url` (Required) - The token url required to fetch access/refresh tokens using authorization code and also to refresh expired access token using refresh token. +* `port_number` (Required) - The port number of the SAPOData instance. +* `private_link_service_name` (Optional) - The SAPOData Private Link service name to be used for private data transfers. + +#### Snowflake Connector Profile Properties + +* `account_name` (Optional) - The name of the account. +* `bucket_name` (Required) - The name of the Amazon S3 bucket associated with Snowflake. +* `bucket_prefix` (Optional) - The bucket path that refers to the Amazon S3 bucket associated with Snowflake. +* `private_link_service_name` (Optional) - The Snowflake Private Link service name to be used for private data transfers. +* `region` (Optional) - AWS Region of the Snowflake account. +* `stage` (Required) - Name of the Amazon S3 stage that was created while setting up an Amazon S3 stage in the Snowflake account. This is written in the following format: `..`. +* `warehouse` (Required) - The name of the Snowflake warehouse. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the connector profile. +* `credentials_arn` - ARN of the connector profile credentials. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppFlow Connector Profile using the connector profile `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppFlow Connector Profile using the connector profile `arn`. For example: + +```console +% terraform import aws_appflow_connector_profile.profile arn:aws:appflow:us-west-2:123456789012:connectorprofile/example-profile +``` + +[1]: https://docs.aws.amazon.com/appflow/1.0/APIReference/Welcome.html +[2]: https://docs.aws.amazon.com/appflow/1.0/APIReference/API_CreateConnectorProfile.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appflow_flow.html.markdown b/website/docs/cdktf/python/r/appflow_flow.html.markdown new file mode 100644 index 00000000000..a8b549d497d --- /dev/null +++ b/website/docs/cdktf/python/r/appflow_flow.html.markdown @@ -0,0 +1,437 @@ +--- +subcategory: "AppFlow" +layout: "aws" +page_title: "AWS: aws_appflow_flow" +description: |- + Provides an AppFlow Flow resource. +--- + + + +# Resource: aws_appflow_flow + +Provides an AppFlow flow resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appflow_flow import AppflowFlow +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_policy import S3BucketPolicy +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example_destination = S3Bucket(self, "example_destination", + bucket="example-destination" + ) + example_source = S3Bucket(self, "example_source", + bucket="example-source" + ) + S3Object(self, "example", + bucket=example_source.id, + key="example_source.csv", + source="example_source.csv" + ) + data_aws_iam_policy_document_example_destination = + DataAwsIamPolicyDocument(self, "example_destination_3", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:PutObject", "s3:AbortMultipartUpload", "s3:ListMultipartUploadParts", "s3:ListBucketMultipartUploads", "s3:GetBucketAcl", "s3:PutObjectAcl" + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["appflow.amazonaws.com"], + type="Service" + ) + ], + resources=["arn:aws:s3:::example_destination", "arn:aws:s3:::example_destination/*" + ], + sid="AllowAppFlowDestinationActions" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example_destination.override_logical_id("example_destination") + data_aws_iam_policy_document_example_source = DataAwsIamPolicyDocument(self, "example_source_4", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:ListBucket", "s3:GetObject"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["appflow.amazonaws.com"], + type="Service" + ) + ], + resources=["arn:aws:s3:::example_source", "arn:aws:s3:::example_source/*" + ], + sid="AllowAppFlowSourceActions" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example_source.override_logical_id("example_source") + aws_s3_bucket_policy_example_destination = S3BucketPolicy(self, "example_destination_5", + bucket=example_destination.id, + policy=Token.as_string(data_aws_iam_policy_document_example_destination.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_policy_example_destination.override_logical_id("example_destination") + aws_s3_bucket_policy_example_source = S3BucketPolicy(self, "example_source_6", + bucket=example_source.id, + policy=Token.as_string(data_aws_iam_policy_document_example_source.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_policy_example_source.override_logical_id("example_source") + aws_appflow_flow_example = AppflowFlow(self, "example_7", + destination_flow_config=[AppflowFlowDestinationFlowConfig( + connector_type="S3", + destination_connector_properties=AppflowFlowDestinationFlowConfigDestinationConnectorProperties( + s3=AppflowFlowDestinationFlowConfigDestinationConnectorPropertiesS3( + bucket_name=Token.as_string(aws_s3_bucket_policy_example_destination.bucket), + s3_output_format_config=AppflowFlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfig( + prefix_config=AppflowFlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig( + prefix_type="PATH" + ) + ) + ) + ) + ) + ], + name="example", + source_flow_config=AppflowFlowSourceFlowConfig( + connector_type="S3", + source_connector_properties=AppflowFlowSourceFlowConfigSourceConnectorProperties( + s3=AppflowFlowSourceFlowConfigSourceConnectorPropertiesS3( + bucket_name=Token.as_string(aws_s3_bucket_policy_example_source.bucket), + bucket_prefix="example" + ) + ) + ), + task=[AppflowFlowTask( + connector_operator=[AppflowFlowTaskConnectorOperator( + s3="NO_OP" + ) + ], + destination_field="exampleField", + source_fields=["exampleField"], + task_type="Map" + ) + ], + trigger_config=AppflowFlowTriggerConfig( + trigger_type="OnDemand" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appflow_flow_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the flow. +* `destination_flow_config` - (Required) A [Destination Flow Config](#destination-flow-config) that controls how Amazon AppFlow places data in the destination connector. +* `source_flow_config` - (Required) The [Source Flow Config](#source-flow-config) that controls how Amazon AppFlow retrieves data from the source connector. +* `task` - (Required) A [Task](#task) that Amazon AppFlow performs while transferring the data in the flow run. +* `trigger_config` - (Required) A [Trigger](#trigger-config) that determine how and when the flow runs. +* `description` - (Optional) Description of the flow you want to create. +* `kms_arn` - (Optional) ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### Destination Flow Config + +* `connector_type` - (Required) Type of connector, such as Salesforce, Amplitude, and so on. Valid values are `Salesforce`, `Singular`, `Slack`, `Redshift`, `S3`, `Marketo`, `Googleanalytics`, `Zendesk`, `Servicenow`, `Datadog`, `Trendmicro`, `Snowflake`, `Dynatrace`, `Infornexus`, `Amplitude`, `Veeva`, `EventBridge`, `LookoutMetrics`, `Upsolver`, `Honeycode`, `CustomerProfiles`, `SAPOData`, and `CustomConnector`. +* `destination_connector_properties` - (Required) This stores the information that is required to query a particular connector. See [Destination Connector Properties](#destination-connector-properties) for more information. +* `api_version` - (Optional) API version that the destination connector uses. +* `connector_profile_name` - (Optional) Name of the connector profile. This name must be unique for each connector profile in the AWS account. + +#### Destination Connector Properties + +* `custom_connector` - (Optional) Properties that are required to query the custom Connector. See [Custom Connector Destination Properties](#custom-connector-destination-properties) for more details. +* `customer_profiles` - (Optional) Properties that are required to query Amazon Connect Customer Profiles. See [Customer Profiles Destination Properties](#customer-profiles-destination-properties) for more details. +* `event_bridge` - (Optional) Properties that are required to query Amazon EventBridge. See [Generic Destination Properties](#generic-destination-properties) for more details. +* `honeycode` - (Optional) Properties that are required to query Amazon Honeycode. See [Generic Destination Properties](#generic-destination-properties) for more details. +* `marketo` - (Optional) Properties that are required to query Marketo. See [Generic Destination Properties](#generic-destination-properties) for more details. +* `redshift` - (Optional) Properties that are required to query Amazon Redshift. See [Redshift Destination Properties](#redshift-destination-properties) for more details. +* `s3` - (Optional) Properties that are required to query Amazon S3. See [S3 Destination Properties](#s3-destination-properties) for more details. +* `salesforce` - (Optional) Properties that are required to query Salesforce. See [Salesforce Destination Properties](#salesforce-destination-properties) for more details. +* `sapo_data` - (Optional) Properties that are required to query SAPOData. See [SAPOData Destination Properties](#sapodata-destination-properties) for more details. +* `snowflake` - (Optional) Properties that are required to query Snowflake. See [Snowflake Destination Properties](#snowflake-destination-properties) for more details. +* `upsolver` - (Optional) Properties that are required to query Upsolver. See [Upsolver Destination Properties](#upsolver-destination-properties) for more details. +* `zendesk` - (Optional) Properties that are required to query Zendesk. See [Zendesk Destination Properties](#zendesk-destination-properties) for more details. + +##### Generic Destination Properties + +EventBridge, Honeycode, and Marketo destination properties all support the following attributes: + +* `object` - (Required) Object specified in the flow destination. +* `error_handling_config` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. + +##### Custom Connector Destination Properties + +* `entity_name` - (Required) Entity specified in the custom connector as a destination in the flow. +* `custom_properties` - (Optional) Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items. +* `error_handling_config` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See [Error Handling Config](#error-handling-config) for more details. +* `id_field_names` - (Optional) Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert. +* `write_operation_type` - (Optional) Type of write operation to be performed in the custom connector when it's used as destination. Valid values are `INSERT`, `UPSERT`, `UPDATE`, and `DELETE`. + +##### Customer Profiles Destination Properties + +* `domain_name` - (Required) Unique name of the Amazon Connect Customer Profiles domain. +* `object_type_name` - (Optional) Object specified in the Amazon Connect Customer Profiles flow destination. + +##### Redshift Destination Properties + +* `intermediate_bucket_name` - (Required) Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift. +* `object` - (Required) Object specified in the Amazon Redshift flow destination. +* `bucket_prefix` - (Optional) Object key for the bucket in which Amazon AppFlow places the destination files. +* `error_handling_config` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. + +##### S3 Destination Properties + +* `bucket_name` - (Required) Amazon S3 bucket name in which Amazon AppFlow places the transferred data. +* `bucket_prefix` - (Optional) Object key for the bucket in which Amazon AppFlow places the destination files. +* `s3_output_format_config` - (Optional) Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See [S3 Output Format Config](#s3-output-format-config) for more details. + +###### S3 Output Format Config + +* `aggregation_config` - (Optional) Aggregation settings that you can use to customize the output format of your flow data. See [Aggregation Config](#aggregation-config) for more details. +* `file_type` - (Optional) File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are `CSV`, `JSON`, and `PARQUET`. +* `prefix_config` - (Optional) Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See [Prefix Config](#prefix-config) for more details. +* `preserve_source_data_typing` - (Optional, Boolean) Whether the data types from the source system need to be preserved (Only valid for `Parquet` file type) + +##### Salesforce Destination Properties + +* `object` - (Required) Object specified in the flow destination. +* `error_handling_config` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. +* `id_field_names` - (Optional) Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update or delete. +* `write_operation_type` - (Optional) This specifies the type of write operation to be performed in Salesforce. When the value is `UPSERT`, then `id_field_names` is required. Valid values are `INSERT`, `UPSERT`, `UPDATE`, and `DELETE`. + +##### SAPOData Destination Properties + +* `object_path` - (Required) Object path specified in the SAPOData flow destination. +* `error_handling_config` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. +* `id_field_names` - (Optional) Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update or delete. +* `success_response_handling_config` - (Optional) Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See [Success Response Handling Config](#success-response-handling-config) for more details. +* `write_operation` - (Optional) Possible write operations in the destination connector. When this value is not provided, this defaults to the `INSERT` operation. Valid values are `INSERT`, `UPSERT`, `UPDATE`, and `DELETE`. + +###### Success Response Handling Config + +* `bucket_name` - (Optional) Name of the Amazon S3 bucket. +* `bucket_prefix` - (Optional) Amazon S3 bucket prefix. + +##### Snowflake Destination Properties + +* `intermediate_bucket_name` - (Required) Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Snowflake. +* `object` - (Required) Object specified in the Amazon Snowflake flow destination. +* `bucket_prefix` - (Optional) Object key for the bucket in which Amazon AppFlow places the destination files. +* `error_handling_config` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. + +##### Upsolver Destination Properties + +* `bucket_name` - (Required) Upsolver Amazon S3 bucket name in which Amazon AppFlow places the transferred data. This must begin with `upsolver-appflow`. +* `bucket_prefix` - (Optional) Object key for the Upsolver Amazon S3 Bucket in which Amazon AppFlow places the destination files. +* `s3_output_format_config` - (Optional) Configuration that determines how Amazon AppFlow should format the flow output data when Upsolver is used as the destination. See [Upsolver S3 Output Format Config](#upsolver-s3-output-format-config) for more details. + +###### Upsolver S3 Output Format Config + +* `aggregation_config` - (Optional) Aggregation settings that you can use to customize the output format of your flow data. See [Aggregation Config](#aggregation-config) for more details. +* `file_type` - (Optional) File type that Amazon AppFlow places in the Upsolver Amazon S3 bucket. Valid values are `CSV`, `JSON`, and `PARQUET`. +* `prefix_config` - (Optional) Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See [Prefix Config](#prefix-config) for more details. + +###### Aggregation Config + +* `aggregation_type` - (Optional) Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are `None` and `SingleFile`. + +###### Prefix Config + +* `prefix_format` - (Optional) Determines the level of granularity that's included in the prefix. Valid values are `YEAR`, `MONTH`, `DAY`, `HOUR`, and `MINUTE`. +* `prefix_type` - (Optional) Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are `FILENAME`, `PATH`, and `PATH_AND_FILENAME`. + +##### Zendesk Destination Properties + +* `object` - (Required) Object specified in the flow destination. +* `error_handling_config` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. +* `id_field_names` - (Optional) Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update or delete. +* `write_operation_type` - (Optional) This specifies the type of write operation to be performed in Zendesk. When the value is `UPSERT`, then `id_field_names` is required. Valid values are `INSERT`, `UPSERT`, `UPDATE`, and `DELETE`. + +###### Error Handling Config + +* `bucket_name` - (Optional) Name of the Amazon S3 bucket. +* `bucket_prefix` - (Optional) Amazon S3 bucket prefix. +* `fail_on_first_destination_error` - (Optional, boolean) If the flow should fail after the first instance of a failure when attempting to place data in the destination. + +### Source Flow Config + +* `connector_type` - (Required) Type of connector, such as Salesforce, Amplitude, and so on. Valid values are `Salesforce`, `Singular`, `Slack`, `Redshift`, `S3`, `Marketo`, `Googleanalytics`, `Zendesk`, `Servicenow`, `Datadog`, `Trendmicro`, `Snowflake`, `Dynatrace`, `Infornexus`, `Amplitude`, `Veeva`, `EventBridge`, `LookoutMetrics`, `Upsolver`, `Honeycode`, `CustomerProfiles`, `SAPOData`, and `CustomConnector`. +* `source_connector_properties` - (Required) Information that is required to query a particular source connector. See [Source Connector Properties](#source-connector-properties) for details. +* `api_version` - (Optional) API version that the destination connector uses. +* `connector_profile_name` - (Optional) Name of the connector profile. This name must be unique for each connector profile in the AWS account. +* `incremental_pull_config` - (Optional) Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See [Incremental Pull Config](#incremental-pull-config) for more details. + +#### Source Connector Properties + +* `amplitude` - (Optional) Information that is required for querying Amplitude. See [Generic Source Properties](#generic-source-properties) for more details. +* `custom_connector` - (Optional) Properties that are applied when the custom connector is being used as a source. See [Custom Connector Source Properties](#custom-connector-source-properties). +* `datadog` - (Optional) Information that is required for querying Datadog. See [Generic Source Properties](#generic-source-properties) for more details. +* `dynratrace` - (Optional) Information that is required for querying Dynatrace. See [Generic Source Properties](#generic-source-properties) for more details. +* `infor_nexus` - (Optional) Information that is required for querying Infor Nexus. See [Generic Source Properties](#generic-source-properties) for more details. +* `marketo` - (Optional) Information that is required for querying Marketo. See [Generic Source Properties](#generic-source-properties) for more details. +* `s3` - (Optional) Information that is required for querying Amazon S3. See [S3 Source Properties](#s3-source-properties) for more details. +* `salesforce` - (Optional) Information that is required for querying Salesforce. See [Salesforce Source Properties](#s3-source-properties) for more details. +* `sapo_data` - (Optional) Information that is required for querying SAPOData as a flow source. See [SAPO Source Properties](#sapodata-source-properties) for more details. +* `service_now` - (Optional) Information that is required for querying ServiceNow. See [Generic Source Properties](#generic-source-properties) for more details. +* `singular` - (Optional) Information that is required for querying Singular. See [Generic Source Properties](#generic-source-properties) for more details. +* `slack` - (Optional) Information that is required for querying Slack. See [Generic Source Properties](#generic-source-properties) for more details. +* `trend_micro` - (Optional) Information that is required for querying Trend Micro. See [Generic Source Properties](#generic-source-properties) for more details. +* `veeva` - (Optional) Information that is required for querying Veeva. See [Veeva Source Properties](#veeva-source-properties) for more details. +* `zendesk` - (Optional) Information that is required for querying Zendesk. See [Generic Source Properties](#generic-source-properties) for more details. + +##### Generic Source Properties + +Amplitude, Datadog, Dynatrace, Google Analytics, Infor Nexus, Marketo, ServiceNow, Singular, Slack, Trend Micro, and Zendesk source properties all support the following attributes: + +* `object` - (Required) Object specified in the flow source. + +##### Custom Connector Source Properties + +* `entity_name` - (Required) Entity specified in the custom connector as a source in the flow. +* `custom_properties` - (Optional) Custom properties that are specific to the connector when it's used as a source in the flow. Maximum of 50 items. + +##### S3 Source Properties + +* `bucket_name` - (Required) Amazon S3 bucket name where the source files are stored. +* `bucket_prefix` - (Optional) Object key for the Amazon S3 bucket in which the source files are stored. +* `s3_input_format_config` - (Optional) When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See [S3 Input Format Config](#s3-input-format-config) for details. + +###### S3 Input Format Config + +* `s3_input_file_type` - (Optional) File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are `CSV` and `JSON`. + +##### Salesforce Source Properties + +* `object` - (Required) Object specified in the Salesforce flow source. +* `enable_dynamic_field_update` - (Optional, boolean) Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow. +* `include_deleted_records` - (Optional, boolean) Whether Amazon AppFlow includes deleted files in the flow run. + +##### SAPOData Source Properties + +* `object_path` - (Required) Object path specified in the SAPOData flow source. + +##### Veeva Source Properties + +* `object` - (Required) Object specified in the Veeva flow source. +* `document_type` - (Optional) Document type specified in the Veeva document extract flow. +* `include_all_versions` - (Optional, boolean) Boolean value to include All Versions of files in Veeva document extract flow. +* `include_renditions` - (Optional, boolean) Boolean value to include file renditions in Veeva document extract flow. +* `include_source_files` - (Optional, boolean) Boolean value to include source files in Veeva document extract flow. + +#### Incremental Pull Config + +* `datetime_type_field_name` - (Optional) Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source. + +### Task + +* `source_fields` - (Required) Source fields to which a particular task is applied. +* `task_type` - (Required) Particular task implementation that Amazon AppFlow performs. Valid values are `Arithmetic`, `Filter`, `Map`, `Map_all`, `Mask`, `Merge`, `Passthrough`, `Truncate`, and `Validate`. +* `connector_operator` - (Optional) Operation to be performed on the provided source fields. See [Connector Operator](#connector-operator) for details. +* `destination_field` - (Optional) Field in a destination connector, or a field value against which Amazon AppFlow validates a source field. +* `task_properties` - (Optional) Map used to store task-related information. The execution service looks for particular information based on the `TaskType`. Valid keys are `VALUE`, `VALUES`, `DATA_TYPE`, `UPPER_BOUND`, `LOWER_BOUND`, `SOURCE_DATA_TYPE`, `DESTINATION_DATA_TYPE`, `VALIDATION_ACTION`, `MASK_VALUE`, `MASK_LENGTH`, `TRUNCATE_LENGTH`, `MATH_OPERATION_FIELDS_ORDER`, `CONCAT_FORMAT`, `SUBFIELD_CATEGORY_MAP`, and `EXCLUDE_SOURCE_FIELDS_LIST`. + +#### Connector Operator + +* `amplitude` - (Optional) Operation to be performed on the provided Amplitude source fields. The only valid value is `BETWEEN`. +* `custom_connector` - (Optional) Operators supported by the custom connector. Valid values are `PROJECTION`, `LESS_THAN`, `GREATER_THAN`, `CONTAINS`, `BETWEEN`, `LESS_THAN_OR_EQUAL_TO`, `GREATER_THAN_OR_EQUAL_TO`, `EQUAL_TO`, `NOT_EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `datadog` - (Optional) Operation to be performed on the provided Datadog source fields. Valid values are `PROJECTION`, `BETWEEN`, `EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `dynatrace` - (Optional) Operation to be performed on the provided Dynatrace source fields. Valid values are `PROJECTION`, `BETWEEN`, `EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `google_analytics` - (Optional) Operation to be performed on the provided Google Analytics source fields. Valid values are `PROJECTION` and `BETWEEN`. +* `infor_nexus` - (Optional) Operation to be performed on the provided Infor Nexus source fields. Valid values are `PROJECTION`, `BETWEEN`, `EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `marketo` - (Optional) Operation to be performed on the provided Marketo source fields. Valid values are `PROJECTION`, `BETWEEN`, `EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `s3` - (Optional) Operation to be performed on the provided Amazon S3 source fields. Valid values are `PROJECTION`, `LESS_THAN`, `GREATER_THAN`, `BETWEEN`, `LESS_THAN_OR_EQUAL_TO`, `GREATER_THAN_OR_EQUAL_TO`, `EQUAL_TO`, `NOT_EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `salesforce` - (Optional) Operation to be performed on the provided Salesforce source fields. Valid values are `PROJECTION`, `LESS_THAN`, `GREATER_THAN`, `CONTAINS`, `BETWEEN`, `LESS_THAN_OR_EQUAL_TO`, `GREATER_THAN_OR_EQUAL_TO`, `EQUAL_TO`, `NOT_EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `sapo_data` - (Optional) Operation to be performed on the provided SAPOData source fields. Valid values are `PROJECTION`, `LESS_THAN`, `GREATER_THAN`, `CONTAINS`, `BETWEEN`, `LESS_THAN_OR_EQUAL_TO`, `GREATER_THAN_OR_EQUAL_TO`, `EQUAL_TO`, `NOT_EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `service_now` - (Optional) Operation to be performed on the provided ServiceNow source fields. Valid values are `PROJECTION`, `LESS_THAN`, `GREATER_THAN`, `CONTAINS`, `BETWEEN`, `LESS_THAN_OR_EQUAL_TO`, `GREATER_THAN_OR_EQUAL_TO`, `EQUAL_TO`, `NOT_EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `singular` - (Optional) Operation to be performed on the provided Singular source fields. Valid values are `PROJECTION`, `EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `slack` - (Optional) Operation to be performed on the provided Slack source fields. Valid values are `PROJECTION`, `LESS_THAN`, `GREATER_THAN`, `BETWEEN`, `LESS_THAN_OR_EQUAL_TO`, `GREATER_THAN_OR_EQUAL_TO`, `EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `trendmicro` - (Optional) Operation to be performed on the provided Trend Micro source fields. Valid values are `PROJECTION`, `EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `veeva` - (Optional) Operation to be performed on the provided Veeva source fields. Valid values are `PROJECTION`, `LESS_THAN`, `GREATER_THAN`, `CONTAINS`, `BETWEEN`, `LESS_THAN_OR_EQUAL_TO`, `GREATER_THAN_OR_EQUAL_TO`, `EQUAL_TO`, `NOT_EQUAL_TO`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. +* `zendesk` - (Optional) Operation to be performed on the provided Zendesk source fields. Valid values are `PROJECTION`, `GREATER_THAN`, `ADDITION`, `MULTIPLICATION`, `DIVISION`, `SUBTRACTION`, `MASK_ALL`, `MASK_FIRST_N`, `MASK_LAST_N`, `VALIDATE_NON_NULL`, `VALIDATE_NON_ZERO`, `VALIDATE_NON_NEGATIVE`, `VALIDATE_NUMERIC`, and `NO_OP`. + +### Trigger Config + +* `trigger_type` - (Required) Type of flow trigger. Valid values are `Scheduled`, `Event`, and `OnDemand`. +* `trigger_properties` - (Optional) Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the `Scheduled` trigger type. See [Scheduled Trigger Properties](#scheduled-trigger-properties) for details. + +#### Scheduled Trigger Properties + +The `trigger_properties` block only supports one attribute: `scheduled`, a block which in turn supports the following: + +* `schedule_expression` - (Required) Scheduling expression that determines the rate at which the schedule will run, for example `rate(5minutes)`. +* `data_pull_mode` - (Optional) Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are `Incremental` and `Complete`. +* `first_execution_from` - (Optional) Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp. +* `schedule_end_time` - (Optional) Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp. +* `schedule_offset` - (Optional) Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000. +* `schedule_start_time` - (Optional) Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp. +* `timezone` - (Optional) Time zone used when referring to the date and time of a scheduled-triggered flow, such as `America/New_York`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appflow_flow import AppflowFlow +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, triggerType, destinationFlowConfig, name, sourceFlowConfig, task): + super().__init__(scope, name) + AppflowFlow(self, "example", + trigger_config=AppflowFlowTriggerConfig( + scheduled=[{ + "schedule_expression": "rate(1minutes)" + } + ], + trigger_type=trigger_type + ), + destination_flow_config=destination_flow_config, + name=name, + source_flow_config=source_flow_config, + task=task + ) +``` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Flow's ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppFlow flows using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppFlow flows using the `arn`. For example: + +```console +% terraform import aws_appflow_flow.example arn:aws:appflow:us-west-2:123456789012:flow/example-flow +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appintegrations_data_integration.html.markdown b/website/docs/cdktf/python/r/appintegrations_data_integration.html.markdown new file mode 100644 index 00000000000..aa2e04e10b2 --- /dev/null +++ b/website/docs/cdktf/python/r/appintegrations_data_integration.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "AppIntegrations" +layout: "aws" +page_title: "AWS: aws_appintegrations_data_integration" +description: |- + Provides details about a specific Amazon AppIntegrations Data Integration +--- + + + +# Resource: aws_appintegrations_data_integration + +Provides an Amazon AppIntegrations Data Integration resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appintegrations_data_integration import AppintegrationsDataIntegration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppintegrationsDataIntegration(self, "example", + description="example", + kms_key=test.arn, + name="example", + schedule_config=AppintegrationsDataIntegrationScheduleConfig( + first_execution_from="1439788442681", + object="Account", + schedule_expression="rate(1 hour)" + ), + source_uri="Salesforce://AppFlow/example", + tags={ + "Key1": "Value1" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Specifies the description of the Data Integration. +* `kms_key` - (Required) Specifies the KMS key Amazon Resource Name (ARN) for the Data Integration. +* `name` - (Required) Specifies the name of the Data Integration. +* `schedule_config` - (Required) A block that defines the name of the data and how often it should be pulled from the source. The Schedule Config block is documented below. +* `source_uri` - (Required) Specifies the URI of the data source. Create an [AppFlow Connector Profile](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/appflow_connector_profile) and reference the name of the profile in the URL. An example of this value for Salesforce is `Salesforce://AppFlow/example` where `example` is the name of the AppFlow Connector Profile. +* `tags` - (Optional) Tags to apply to the Data Integration. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `schedule_config` block supports the following arguments: + +* `first_execution_from` - (Required) The start date for objects to import in the first flow run as an Unix/epoch timestamp in milliseconds or in ISO-8601 format. This needs to be a time in the past, meaning that the data created or updated before this given date will not be downloaded. +* `object` - (Required) The name of the object to pull from the data source. Examples of objects in Salesforce include `Case`, `Account`, or `Lead`. +* `schedule_expression` - (Required) How often the data should be pulled from data source. Examples include `rate(1 hour)`, `rate(3 hours)`, `rate(1 day)`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Data Integration. +* `id` - The identifier of the Data Integration. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon AppIntegrations Data Integrations using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon AppIntegrations Data Integrations using the `id`. For example: + +```console +% terraform import aws_appintegrations_data_integration.example 12345678-1234-1234-1234-123456789123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appintegrations_event_integration.html.markdown b/website/docs/cdktf/python/r/appintegrations_event_integration.html.markdown new file mode 100644 index 00000000000..62cea686e96 --- /dev/null +++ b/website/docs/cdktf/python/r/appintegrations_event_integration.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "AppIntegrations" +layout: "aws" +page_title: "AWS: aws_appintegrations_event_integration" +description: |- + Provides details about a specific Amazon AppIntegrations Event Integration +--- + + + +# Resource: aws_appintegrations_event_integration + +Provides an Amazon AppIntegrations Event Integration resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appintegrations_event_integration import AppintegrationsEventIntegration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppintegrationsEventIntegration(self, "example", + description="Example Description", + event_filter=AppintegrationsEventIntegrationEventFilter( + source="aws.partner/examplepartner.com" + ), + eventbridge_bus="default", + name="example-name", + tags={ + "Name": "Example Event Integration" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the Event Integration. +* `eventbridge_bus` - (Required) EventBridge bus. +* `event_filter` - (Required) Block that defines the configuration information for the event filter. The Event Filter block is documented below. +* `name` - (Required) Name of the Event Integration. +* `tags` - (Optional) Tags to apply to the Event Integration. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `event_filter` block supports the following arguments: + +* `source` - (Required) Source of the events. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Event Integration. +* `id` - Identifier of the Event Integration which is the name of the Event Integration. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon AppIntegrations Event Integrations using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon AppIntegrations Event Integrations using the `name`. For example: + +```console +% terraform import aws_appintegrations_event_integration.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/applicationinsights_application.html.markdown b/website/docs/cdktf/python/r/applicationinsights_application.html.markdown new file mode 100644 index 00000000000..ea7213adabd --- /dev/null +++ b/website/docs/cdktf/python/r/applicationinsights_application.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "CloudWatch Application Insights" +layout: "aws" +page_title: "AWS: aws_applicationinsights_application" +description: |- + Provides a CloudWatch Application Insights Application resource +--- + + + +# Resource: aws_applicationinsights_application + +Provides a ApplicationInsights Application resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.applicationinsights_application import ApplicationinsightsApplication +from imports.aws.resourcegroups_group import ResourcegroupsGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ResourcegroupsGroup(self, "example", + name="example", + resource_query=ResourcegroupsGroupResourceQuery( + query=Token.as_string( + Fn.jsonencode({ + "ResourceTypeFilters": ["AWS::EC2::Instance"], + "TagFilters": [{ + "Key": "Stage", + "Values": ["Test"] + } + ] + })) + ) + ) + aws_applicationinsights_application_example = + ApplicationinsightsApplication(self, "example_1", + resource_group_name=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_applicationinsights_application_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `resource_group_name` - (Required) Name of the resource group. + +The following arguments are optional: + +* `auto_config_enabled` - (Optional) Indicates whether Application Insights automatically configures unmonitored resources in the resource group. +* `auto_create` - (Optional) Configures all of the resources in the resource group by applying the recommended configurations. +* `cwe_monitor_enabled` - (Optional) Indicates whether Application Insights can listen to CloudWatch events for the application resources, such as instance terminated, failed deployment, and others. +* `grouping_type` - (Optional) Application Insights can create applications based on a resource group or on an account. To create an account-based application using all of the resources in the account, set this parameter to `ACCOUNT_BASED`. +* `ops_center_enabled` - (Optional) When set to `true`, creates opsItems for any problems detected on an application. +* `ops_item_sns_topic_arn` - (Optional) SNS topic provided to Application Insights that is associated to the created opsItem. Allows you to receive notifications for updates to the opsItem. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Application. +* `id` - Name of the resource group. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ApplicationInsights Applications using the `resource_group_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ApplicationInsights Applications using the `resource_group_name`. For example: + +```console +% terraform import aws_applicationinsights_application.some some-application +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appmesh_gateway_route.html.markdown b/website/docs/cdktf/python/r/appmesh_gateway_route.html.markdown new file mode 100644 index 00000000000..7dc48bd8f41 --- /dev/null +++ b/website/docs/cdktf/python/r/appmesh_gateway_route.html.markdown @@ -0,0 +1,197 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_gateway_route" +description: |- + Provides an AWS App Mesh gateway route resource. +--- + + + +# Resource: aws_appmesh_gateway_route + +Provides an AWS App Mesh gateway route resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_gateway_route import AppmeshGatewayRoute +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshGatewayRoute(self, "example", + mesh_name="example-service-mesh", + name="example-gateway-route", + spec=AppmeshGatewayRouteSpec( + http_route=AppmeshGatewayRouteSpecHttpRoute( + action=AppmeshGatewayRouteSpecHttpRouteAction( + target=AppmeshGatewayRouteSpecHttpRouteActionTarget( + virtual_service=AppmeshGatewayRouteSpecHttpRouteActionTargetVirtualService( + virtual_service_name=Token.as_string(aws_appmesh_virtual_service_example.name) + ) + ) + ), + match=AppmeshGatewayRouteSpecHttpRouteMatch( + prefix="/" + ) + ) + ), + tags={ + "Environment": "test" + }, + virtual_gateway_name=Token.as_string(aws_appmesh_virtual_gateway_example.name) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the gateway route. Must be between 1 and 255 characters in length. +* `mesh_name` - (Required) Name of the service mesh in which to create the gateway route. Must be between 1 and 255 characters in length. +* `virtual_gateway_name` - (Required) Name of the [virtual gateway](/docs/providers/aws/r/appmesh_virtual_gateway.html) to associate the gateway route with. Must be between 1 and 255 characters in length. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `spec` - (Required) Gateway route specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `grpc_route` - (Optional) Specification of a gRPC gateway route. +* `http_route` - (Optional) Specification of an HTTP gateway route. +* `http2_route` - (Optional) Specification of an HTTP/2 gateway route. +* `priority` - (Optional) Priority for the gateway route, between `0` and `1000`. + +The `grpc_route`, `http_route` and `http2_route` objects supports the following: + +* `action` - (Required) Action to take if a match is determined. +* `match` - (Required) Criteria for determining a request match. + +The `grpc_route`, `http_route` and `http2_route`'s `action` object supports the following: + +* `target` - (Required) Target that traffic is routed to when a request matches the gateway route. + +The `target` object supports the following: + +* `port` - (Optional) The port number that corresponds to the target for Virtual Service provider port. This is required when the provider (router or node) of the Virtual Service has multiple listeners. +* `virtual_service` - (Required) Virtual service gateway route target. + +The `virtual_service` object supports the following: + +* `virtual_service_name` - (Required) Name of the virtual service that traffic is routed to. Must be between 1 and 255 characters in length. + +The `http_route` and `http2_route`'s `action` object additionally supports the following: + +* `rewrite` - (Optional) Gateway route action to rewrite. + +The `rewrite` object supports the following: + +* `hostname` - (Optional) Host name to rewrite. +* `path` - (Optional) Exact path to rewrite. +* `prefix` - (Optional) Specified beginning characters to rewrite. + +The `hostname` object supports the following: + +* `default_target_hostname` - (Required) Default target host name to write to. Valid values: `ENABLED`, `DISABLED`. + +The `path` object supports the following: + +* `exact` - (Required) Value used to replace matched path. + +The `prefix` object supports the following: + +* `default_prefix` - (Optional) Default prefix used to replace the incoming route prefix when rewritten. Valid values: `ENABLED`, `DISABLED`. +* `value` - (Optional) Value used to replace the incoming route prefix when rewritten. + +The `grpc_route`'s `match` object supports the following: + +* `service_name` - (Required) Fully qualified domain name for the service to match from the request. +* `port` - (Optional) The port number to match from the request. + +The `http_route` and `http2_route`'s `match` object supports the following: + +* `header` - (Optional) Client request headers to match on. +* `hostname` - (Optional) Host name to match on. +* `path` - (Optional) Client request path to match on. +* `port` - (Optional) The port number to match from the request. +* `prefix` - (Optional) Path to match requests with. This parameter must always start with `/`, which by itself matches all requests to the virtual service name. +* `query_parameter` - (Optional) Client request query parameters to match on. + +The `header` object supports the following: + +* `name` - (Required) Name for the HTTP header in the client request that will be matched on. +* `invert` - (Optional) If `true`, the match is on the opposite of the `match` method and value. Default is `false`. +* `match` - (Optional) Method and value to match the header value sent with a request. Specify one match method. + +The `header`'s `match` object supports the following: + +* `exact` - (Optional) Header value sent by the client must match the specified value exactly. +* `prefix` - (Optional) Header value sent by the client must begin with the specified characters. +* `port`- (Optional) The port number to match from the request. +* `range`- (Optional) Object that specifies the range of numbers that the header value sent by the client must be included in. +* `regex` - (Optional) Header value sent by the client must include the specified characters. +* `suffix` - (Optional) Header value sent by the client must end with the specified characters. + +The `range` object supports the following: + +* `end` - (Required) End of the range. +* `start` - (Requited) Start of the range. + +The `hostname` object supports the following: + +* `exact` - (Optional) Exact host name to match on. +* `suffix` - (Optional) Specified ending characters of the host name to match on. + +The `path` object supports the following: + +* `exact` - (Optional) The exact path to match on. +* `regex` - (Optional) The regex used to match the path. + +The `query_parameter` object supports the following: + +* `name` - (Required) Name for the query parameter that will be matched on. +* `match` - (Optional) The query parameter to match on. + +The `query_parameter`'s `match` object supports the following: + +* `exact` - (Optional) The exact query parameter to match on. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the gateway route. +* `arn` - ARN of the gateway route. +* `created_date` - Creation date of the gateway route. +* `last_updated_date` - Last update date of the gateway route. +* `resource_owner` - Resource owner's AWS account ID. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh gateway routes using `mesh_name` and `virtual_gateway_name` together with the gateway route's `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Mesh gateway routes using `mesh_name` and `virtual_gateway_name` together with the gateway route's `name`. For example: + +```console +% terraform import aws_appmesh_gateway_route.example mesh/gw1/example-gateway-route +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appmesh_mesh.html.markdown b/website/docs/cdktf/python/r/appmesh_mesh.html.markdown new file mode 100644 index 00000000000..8e9b61aef24 --- /dev/null +++ b/website/docs/cdktf/python/r/appmesh_mesh.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_mesh" +description: |- + Provides an AWS App Mesh service mesh resource. +--- + + + +# Resource: aws_appmesh_mesh + +Provides an AWS App Mesh service mesh resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_mesh import AppmeshMesh +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshMesh(self, "simple", + name="simpleapp" + ) +``` + +### Egress Filter + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_mesh import AppmeshMesh +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshMesh(self, "simple", + name="simpleapp", + spec=AppmeshMeshSpec( + egress_filter=AppmeshMeshSpecEgressFilter( + type="ALLOW_ALL" + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the service mesh. Must be between 1 and 255 characters in length. +* `spec` - (Optional) Service mesh specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `egress_filter`- (Optional) Egress filter rules for the service mesh. + +The `egress_filter` object supports the following: + +* `type` - (Optional) Egress filter type. By default, the type is `DROP_ALL`. +Valid values are `ALLOW_ALL` and `DROP_ALL`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the service mesh. +* `arn` - ARN of the service mesh. +* `created_date` - Creation date of the service mesh. +* `last_updated_date` - Last update date of the service mesh. +* `mesh_owner` - AWS account ID of the service mesh's owner. +* `resource_owner` - Resource owner's AWS account ID. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh service meshes using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Mesh service meshes using the `name`. For example: + +```console +% terraform import aws_appmesh_mesh.simple simpleapp +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appmesh_route.html.markdown b/website/docs/cdktf/python/r/appmesh_route.html.markdown new file mode 100644 index 00000000000..11abbc6ca60 --- /dev/null +++ b/website/docs/cdktf/python/r/appmesh_route.html.markdown @@ -0,0 +1,377 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_route" +description: |- + Provides an AWS App Mesh route resource. +--- + + + +# Resource: aws_appmesh_route + +Provides an AWS App Mesh route resource. + +## Example Usage + +### HTTP Routing + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_route import AppmeshRoute +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshRoute(self, "serviceb", + mesh_name=simple.id, + name="serviceB-route", + spec=AppmeshRouteSpec( + http_route=AppmeshRouteSpecHttpRoute( + action=AppmeshRouteSpecHttpRouteAction( + weighted_target=[AppmeshRouteSpecHttpRouteActionWeightedTarget( + virtual_node=serviceb1.name, + weight=90 + ), AppmeshRouteSpecHttpRouteActionWeightedTarget( + virtual_node=serviceb2.name, + weight=10 + ) + ] + ), + match=AppmeshRouteSpecHttpRouteMatch( + prefix="/" + ) + ) + ), + virtual_router_name=Token.as_string(aws_appmesh_virtual_router_serviceb.name) + ) +``` + +### HTTP Header Routing + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_route import AppmeshRoute +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshRoute(self, "serviceb", + mesh_name=simple.id, + name="serviceB-route", + spec=AppmeshRouteSpec( + http_route=AppmeshRouteSpecHttpRoute( + action=AppmeshRouteSpecHttpRouteAction( + weighted_target=[AppmeshRouteSpecHttpRouteActionWeightedTarget( + virtual_node=Token.as_string(aws_appmesh_virtual_node_serviceb.name), + weight=100 + ) + ] + ), + match=AppmeshRouteSpecHttpRouteMatch( + header=[AppmeshRouteSpecHttpRouteMatchHeader( + match=AppmeshRouteSpecHttpRouteMatchHeaderMatch( + prefix="123" + ), + name="clientRequestId" + ) + ], + method="POST", + prefix="/", + scheme="https" + ) + ) + ), + virtual_router_name=Token.as_string(aws_appmesh_virtual_router_serviceb.name) + ) +``` + +### Retry Policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_route import AppmeshRoute +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshRoute(self, "serviceb", + mesh_name=simple.id, + name="serviceB-route", + spec=AppmeshRouteSpec( + http_route=AppmeshRouteSpecHttpRoute( + action=AppmeshRouteSpecHttpRouteAction( + weighted_target=[AppmeshRouteSpecHttpRouteActionWeightedTarget( + virtual_node=Token.as_string(aws_appmesh_virtual_node_serviceb.name), + weight=100 + ) + ] + ), + match=AppmeshRouteSpecHttpRouteMatch( + prefix="/" + ), + retry_policy=AppmeshRouteSpecHttpRouteRetryPolicy( + http_retry_events=["server-error"], + max_retries=1, + per_retry_timeout=AppmeshRouteSpecHttpRouteRetryPolicyPerRetryTimeout( + unit="s", + value=15 + ) + ) + ) + ), + virtual_router_name=Token.as_string(aws_appmesh_virtual_router_serviceb.name) + ) +``` + +### TCP Routing + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_route import AppmeshRoute +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshRoute(self, "serviceb", + mesh_name=simple.id, + name="serviceB-route", + spec=AppmeshRouteSpec( + tcp_route=AppmeshRouteSpecTcpRoute( + action=AppmeshRouteSpecTcpRouteAction( + weighted_target=[AppmeshRouteSpecTcpRouteActionWeightedTarget( + virtual_node=serviceb1.name, + weight=100 + ) + ] + ) + ) + ), + virtual_router_name=Token.as_string(aws_appmesh_virtual_router_serviceb.name) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the route. Must be between 1 and 255 characters in length. +* `mesh_name` - (Required) Name of the service mesh in which to create the route. Must be between 1 and 255 characters in length. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `virtual_router_name` - (Required) Name of the virtual router in which to create the route. Must be between 1 and 255 characters in length. +* `spec` - (Required) Route specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `grpc_route` - (Optional) GRPC routing information for the route. +* `http2_route` - (Optional) HTTP/2 routing information for the route. +* `http_route` - (Optional) HTTP routing information for the route. +* `priority` - (Optional) Priority for the route, between `0` and `1000`. +Routes are matched based on the specified value, where `0` is the highest priority. +* `tcp_route` - (Optional) TCP routing information for the route. + +The `grpc_route` object supports the following: + +* `action` - (Required) Action to take if a match is determined. +* `match` - (Required) Criteria for determining an gRPC request match. +* `retry_policy` - (Optional) Retry policy. +* `timeout` - (Optional) Types of timeouts. + +The `http2_route` and `http_route` objects supports the following: + +* `action` - (Required) Action to take if a match is determined. +* `match` - (Required) Criteria for determining an HTTP request match. +* `retry_policy` - (Optional) Retry policy. +* `timeout` - (Optional) Types of timeouts. + +The `tcp_route` object supports the following: + +* `action` - (Required) Action to take if a match is determined. +* `timeout` - (Optional) Types of timeouts. + +The `action` object supports the following: + +* `weighted_target` - (Required) Targets that traffic is routed to when a request matches the route. +You can specify one or more targets and their relative weights with which to distribute traffic. + +The `timeout` object supports the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. + +The `idle` object supports the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `grpc_route`'s `match` object supports the following: + +* `metadata` - (Optional) Data to match from the gRPC request. +* `method_name` - (Optional) Method name to match from the request. If you specify a name, you must also specify a `service_name`. +* `service_name` - (Optional) Fully qualified domain name for the service to match from the request. +* `port`- (Optional) The port number to match from the request. + +The `metadata` object supports the following: + +* `name` - (Required) Name of the route. Must be between 1 and 50 characters in length. +* `invert` - (Optional) If `true`, the match is on the opposite of the `match` criteria. Default is `false`. +* `match` - (Optional) Data to match from the request. + +The `metadata`'s `match` object supports the following: + +* `exact` - (Optional) Value sent by the client must match the specified value exactly. Must be between 1 and 255 characters in length. +* `prefix` - (Optional) Value sent by the client must begin with the specified characters. Must be between 1 and 255 characters in length. +* `port`- (Optional) The port number to match from the request. +* `range`- (Optional) Object that specifies the range of numbers that the value sent by the client must be included in. +* `regex` - (Optional) Value sent by the client must include the specified characters. Must be between 1 and 255 characters in length. +* `suffix` - (Optional) Value sent by the client must end with the specified characters. Must be between 1 and 255 characters in length. + +The `grpc_route`'s `retry_policy` object supports the following: + +* `grpc_retry_events` - (Optional) List of gRPC retry events. +Valid values: `cancelled`, `deadline-exceeded`, `internal`, `resource-exhausted`, `unavailable`. +* `http_retry_events` - (Optional) List of HTTP retry events. +Valid values: `client-error` (HTTP status code 409), `gateway-error` (HTTP status codes 502, 503, and 504), `server-error` (HTTP status codes 500, 501, 502, 503, 504, 505, 506, 507, 508, 510, and 511), `stream-error` (retry on refused stream). +* `max_retries` - (Required) Maximum number of retries. +* `per_retry_timeout` - (Required) Per-retry timeout. +* `tcp_retry_events` - (Optional) List of TCP retry events. The only valid value is `connection-error`. + +The `grpc_route`'s `timeout` object supports the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. +* `per_request` - (Optional) Per request timeout. + +The `idle` and `per_request` objects support the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `http2_route` and `http_route`'s `match` object supports the following: + +* `prefix` - (Optional) Path with which to match requests. +This parameter must always start with /, which by itself matches all requests to the virtual router service name. +* `port`- (Optional) The port number to match from the request. +* `header` - (Optional) Client request headers to match on. +* `method` - (Optional) Client request header method to match on. Valid values: `GET`, `HEAD`, `POST`, `PUT`, `DELETE`, `CONNECT`, `OPTIONS`, `TRACE`, `PATCH`. +* `path` - (Optional) Client request path to match on. +* `query_parameter` - (Optional) Client request query parameters to match on. +* `scheme` - (Optional) Client request header scheme to match on. Valid values: `http`, `https`. + +The `match`'s `path` object supports the following: + +* `exact` - (Optional) The exact path to match on. +* `regex` - (Optional) The regex used to match the path. + +The `match`'s `query_parameter` object supports the following: + +* `name` - (Required) Name for the query parameter that will be matched on. +* `match` - (Optional) The query parameter to match on. + +The `query_parameter`'s `match` object supports the following: + +* `exact` - (Optional) The exact query parameter to match on. + +The `http2_route` and `http_route`'s `retry_policy` object supports the following: + +* `http_retry_events` - (Optional) List of HTTP retry events. +Valid values: `client-error` (HTTP status code 409), `gateway-error` (HTTP status codes 502, 503, and 504), `server-error` (HTTP status codes 500, 501, 502, 503, 504, 505, 506, 507, 508, 510, and 511), `stream-error` (retry on refused stream). +* `max_retries` - (Required) Maximum number of retries. +* `per_retry_timeout` - (Required) Per-retry timeout. +* `tcp_retry_events` - (Optional) List of TCP retry events. The only valid value is `connection-error`. + +You must specify at least one value for `http_retry_events`, or at least one value for `tcp_retry_events`. + +The `http2_route` and `http_route`'s `timeout` object supports the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. +* `per_request` - (Optional) Per request timeout. + +The `idle` and `per_request` objects support the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `per_retry_timeout` object supports the following: + +* `unit` - (Required) Retry unit. Valid values: `ms`, `s`. +* `value` - (Required) Retry value. + +The `weighted_target` object supports the following: + +* `virtual_node` - (Required) Virtual node to associate with the weighted target. Must be between 1 and 255 characters in length. +* `weight` - (Required) Relative weight of the weighted target. An integer between 0 and 100. +* `port` - (Optional) The targeted port of the weighted object. + +The `header` object supports the following: + +* `name` - (Required) Name for the HTTP header in the client request that will be matched on. +* `invert` - (Optional) If `true`, the match is on the opposite of the `match` method and value. Default is `false`. +* `match` - (Optional) Method and value to match the header value sent with a request. Specify one match method. + +The `header`'s `match` object supports the following: + +* `exact` - (Optional) Header value sent by the client must match the specified value exactly. +* `prefix` - (Optional) Header value sent by the client must begin with the specified characters. +* `port`- (Optional) The port number to match from the request. +* `range`- (Optional) Object that specifies the range of numbers that the header value sent by the client must be included in. +* `regex` - (Optional) Header value sent by the client must include the specified characters. +* `suffix` - (Optional) Header value sent by the client must end with the specified characters. + +The `range` object supports the following: + +* `end` - (Required) End of the range. +* `start` - (Requited) Start of the range. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the route. +* `arn` - ARN of the route. +* `created_date` - Creation date of the route. +* `last_updated_date` - Last update date of the route. +* `resource_owner` - Resource owner's AWS account ID. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh virtual routes using `mesh_name` and `virtual_router_name` together with the route's `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Mesh virtual routes using `mesh_name` and `virtual_router_name` together with the route's `name`. For example: + +```console +% terraform import aws_appmesh_route.serviceb simpleapp/serviceB/serviceB-route +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appmesh_virtual_gateway.html.markdown b/website/docs/cdktf/python/r/appmesh_virtual_gateway.html.markdown new file mode 100644 index 00000000000..85539a68d4a --- /dev/null +++ b/website/docs/cdktf/python/r/appmesh_virtual_gateway.html.markdown @@ -0,0 +1,316 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_gateway" +description: |- + Provides an AWS App Mesh virtual gateway resource. +--- + + + +# Resource: aws_appmesh_virtual_gateway + +Provides an AWS App Mesh virtual gateway resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_virtual_gateway import AppmeshVirtualGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshVirtualGateway(self, "example", + mesh_name="example-service-mesh", + name="example-virtual-gateway", + spec=AppmeshVirtualGatewaySpec( + listener=[AppmeshVirtualGatewaySpecListener( + port_mapping=AppmeshVirtualGatewaySpecListenerPortMapping( + port=8080, + protocol="http" + ) + ) + ] + ), + tags={ + "Environment": "test" + } + ) +``` + +### Access Logs and TLS + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_virtual_gateway import AppmeshVirtualGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshVirtualGateway(self, "example", + mesh_name="example-service-mesh", + name="example-virtual-gateway", + spec=AppmeshVirtualGatewaySpec( + listener=[AppmeshVirtualGatewaySpecListener( + port_mapping=AppmeshVirtualGatewaySpecListenerPortMapping( + port=8080, + protocol="http" + ), + tls=AppmeshVirtualGatewaySpecListenerTls( + certificate=AppmeshVirtualGatewaySpecListenerTlsCertificate( + acm=AppmeshVirtualGatewaySpecListenerTlsCertificateAcm( + certificate_arn=Token.as_string(aws_acm_certificate_example.arn) + ) + ), + mode="STRICT" + ) + ) + ], + logging=AppmeshVirtualGatewaySpecLogging( + access_log=AppmeshVirtualGatewaySpecLoggingAccessLog( + file=AppmeshVirtualGatewaySpecLoggingAccessLogFile( + path="/var/log/access.log" + ) + ) + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the virtual gateway. Must be between 1 and 255 characters in length. +* `mesh_name` - (Required) Name of the service mesh in which to create the virtual gateway. Must be between 1 and 255 characters in length. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `spec` - (Required) Virtual gateway specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `listener` - (Required) Listeners that the mesh endpoint is expected to receive inbound traffic from. You can specify one listener. +* `backend_defaults` - (Optional) Defaults for backends. +* `logging` - (Optional) Inbound and outbound access logging information for the virtual gateway. + +The `backend_defaults` object supports the following: + +* `client_policy` - (Optional) Default client policy for virtual gateway backends. + +The `client_policy` object supports the following: + +* `tls` - (Optional) Transport Layer Security (TLS) client policy. + +The `tls` object supports the following: + +* `certificate` (Optional) Virtual gateway's client's Transport Layer Security (TLS) certificate. +* `enforce` - (Optional) Whether the policy is enforced. Default is `true`. +* `ports` - (Optional) One or more ports that the policy is enforced for. +* `validation` - (Required) TLS validation context. + +The `certificate` object supports the following: + +* `file` - (Optional) Local file certificate. +* `sds` - (Optional) A [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `file` object supports the following: + +* `certificate_chain` - (Required) Certificate chain for the certificate. +* `private_key` - (Required) Private key for a certificate stored on the file system of the mesh endpoint that the proxy is running on. + +The `sds` object supports the following: + +* `secret_name` - (Required) Name of the secret secret requested from the Secret Discovery Service provider representing Transport Layer Security (TLS) materials like a certificate or certificate chain. + +The `validation` object supports the following: + +* `subject_alternative_names` - (Optional) SANs for a virtual gateway's listener's Transport Layer Security (TLS) validation context. +* `trust` - (Required) TLS validation context trust. + +The `subject_alternative_names` object supports the following: + +* `match` - (Required) Criteria for determining a SAN's match. + +The `match` object supports the following: + +* `exact` - (Required) Values sent must match the specified values exactly. + +The `trust` object supports the following: + +* `acm` - (Optional) TLS validation context trust for an AWS Certificate Manager (ACM) certificate. +* `file` - (Optional) TLS validation context trust for a local file certificate. +* `sds` - (Optional) TLS validation context trust for a [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `acm` object supports the following: + +* `certificate_authority_arns` - (Required) One or more ACM ARNs. + +The `file` object supports the following: + +* `certificate_chain` - (Required) Certificate trust chain for a certificate stored on the file system of the mesh endpoint that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secret_name` - (Required) Name of the secret for a virtual gateway's Transport Layer Security (TLS) Secret Discovery Service validation context trust. + +The `listener` object supports the following: + +* `port_mapping` - (Required) Port mapping information for the listener. +* `connection_pool` - (Optional) Connection pool information for the listener. +* `health_check` - (Optional) Health check information for the listener. +* `tls` - (Optional) Transport Layer Security (TLS) properties for the listener + +The `logging` object supports the following: + +* `access_log` - (Optional) Access log configuration for a virtual gateway. + +The `access_log` object supports the following: + +* `file` - (Optional) File object to send virtual gateway access logs to. + +The `file` object supports the following: + +* `format` - (Optional) The specified format for the logs. +* `path` - (Required) File path to write access logs to. You can use `/dev/stdout` to send access logs to standard out. Must be between 1 and 255 characters in length. + +The `format` object supports the following: + +* `json` - (Optional) The logging format for JSON. +* `text` - (Optional) The logging format for text. Must be between 1 and 1000 characters in length. + +The `json` object supports the following: + +* `key` - (Required) The specified key for the JSON. Must be between 1 and 100 characters in length. +* `value` - (Required) The specified value for the JSON. Must be between 1 and 100 characters in length. + +The `port_mapping` object supports the following: + +* `port` - (Required) Port used for the port mapping. +* `protocol` - (Required) Protocol used for the port mapping. Valid values are `http`, `http2`, `tcp` and `grpc`. + +The `connection_pool` object supports the following: + +* `grpc` - (Optional) Connection pool information for gRPC listeners. +* `http` - (Optional) Connection pool information for HTTP listeners. +* `http2` - (Optional) Connection pool information for HTTP2 listeners. + +The `grpc` connection pool object supports the following: + +* `max_requests` - (Required) Maximum number of inflight requests Envoy can concurrently support across hosts in upstream cluster. Minimum value of `1`. + +The `http` connection pool object supports the following: + +* `max_connections` - (Required) Maximum number of outbound TCP connections Envoy can establish concurrently with all hosts in upstream cluster. Minimum value of `1`. +* `max_pending_requests` - (Optional) Number of overflowing requests after `max_connections` Envoy will queue to upstream cluster. Minimum value of `1`. + +The `http2` connection pool object supports the following: + +* `max_requests` - (Required) Maximum number of inflight requests Envoy can concurrently support across hosts in upstream cluster. Minimum value of `1`. + +The `health_check` object supports the following: + +* `healthy_threshold` - (Required) Number of consecutive successful health checks that must occur before declaring listener healthy. +* `interval_millis`- (Required) Time period in milliseconds between each health check execution. +* `protocol` - (Required) Protocol for the health check request. Valid values are `http`, `http2`, and `grpc`. +* `timeout_millis` - (Required) Amount of time to wait when receiving a response from the health check, in milliseconds. +* `unhealthy_threshold` - (Required) Number of consecutive failed health checks that must occur before declaring a virtual gateway unhealthy. +* `path` - (Optional) Destination path for the health check request. This is only required if the specified protocol is `http` or `http2`. +* `port` - (Optional) Destination port for the health check request. This port must match the port defined in the `port_mapping` for the listener. + +The `tls` object supports the following: + +* `certificate` - (Required) Listener's TLS certificate. +* `mode`- (Required) Listener's TLS mode. Valid values: `DISABLED`, `PERMISSIVE`, `STRICT`. +* `validation`- (Optional) Listener's Transport Layer Security (TLS) validation context. + +The `certificate` object supports the following: + +* `acm` - (Optional) An AWS Certificate Manager (ACM) certificate. +* `file` - (Optional) Local file certificate. +* `sds` - (Optional) A [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `acm` object supports the following: + +* `certificate_arn` - (Required) ARN for the certificate. + +The `file` object supports the following: + +* `certificate_chain` - (Required) Certificate chain for the certificate. Must be between 1 and 255 characters in length. +* `private_key` - (Required) Private key for a certificate stored on the file system of the mesh endpoint that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secret_name` - (Required) Name of the secret secret requested from the Secret Discovery Service provider representing Transport Layer Security (TLS) materials like a certificate or certificate chain. + +The `validation` object supports the following: + +* `subject_alternative_names` - (Optional) SANs for a virtual gateway's listener's Transport Layer Security (TLS) validation context. +* `trust` - (Required) TLS validation context trust. + +The `subject_alternative_names` object supports the following: + +* `match` - (Required) Criteria for determining a SAN's match. + +The `match` object supports the following: + +* `exact` - (Required) Values sent must match the specified values exactly. + +The `trust` object supports the following: + +* `file` - (Optional) TLS validation context trust for a local file certificate. +* `sds` - (Optional) TLS validation context trust for a [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `file` object supports the following: + +* `certificate_chain` - (Required) Certificate trust chain for a certificate stored on the file system of the mesh endpoint that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secret_name` - (Required) Name of the secret for a virtual gateway's Transport Layer Security (TLS) Secret Discovery Service validation context trust. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the virtual gateway. +* `arn` - ARN of the virtual gateway. +* `created_date` - Creation date of the virtual gateway. +* `last_updated_date` - Last update date of the virtual gateway. +* `resource_owner` - Resource owner's AWS account ID. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh virtual gateway using `mesh_name` together with the virtual gateway's `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Mesh virtual gateway using `mesh_name` together with the virtual gateway's `name`. For example: + +```console +% terraform import aws_appmesh_virtual_gateway.example mesh/gw1 +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appmesh_virtual_node.html.markdown b/website/docs/cdktf/python/r/appmesh_virtual_node.html.markdown new file mode 100644 index 00000000000..1c01db76041 --- /dev/null +++ b/website/docs/cdktf/python/r/appmesh_virtual_node.html.markdown @@ -0,0 +1,519 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_node" +description: |- + Provides an AWS App Mesh virtual node resource. +--- + + + +# Resource: aws_appmesh_virtual_node + +Provides an AWS App Mesh virtual node resource. + +## Breaking Changes + +Because of backward incompatible API changes (read [here](https://github.com/awslabs/aws-app-mesh-examples/issues/92)), `aws_appmesh_virtual_node` resource definitions created with provider versions earlier than v2.3.0 will need to be modified: + +* Rename the `service_name` attribute of the `dns` object to `hostname`. + +* Replace the `backends` attribute of the `spec` object with one or more `backend` configuration blocks, +setting `virtual_service_name` to the name of the service. + +The Terraform state associated with existing resources will automatically be migrated. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_virtual_node import AppmeshVirtualNode +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshVirtualNode(self, "serviceb1", + mesh_name=simple.id, + name="serviceBv1", + spec=AppmeshVirtualNodeSpec( + backend=[AppmeshVirtualNodeSpecBackend( + virtual_service=AppmeshVirtualNodeSpecBackendVirtualService( + virtual_service_name="servicea.simpleapp.local" + ) + ) + ], + listener=[AppmeshVirtualNodeSpecListener( + port_mapping=AppmeshVirtualNodeSpecListenerPortMapping( + port=8080, + protocol="http" + ) + ) + ], + service_discovery=AppmeshVirtualNodeSpecServiceDiscovery( + dns=AppmeshVirtualNodeSpecServiceDiscoveryDns( + hostname="serviceb.simpleapp.local" + ) + ) + ) + ) +``` + +### AWS Cloud Map Service Discovery + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_virtual_node import AppmeshVirtualNode +from imports.aws.service_discovery_http_namespace import ServiceDiscoveryHttpNamespace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ServiceDiscoveryHttpNamespace(self, "example", + name="example-ns" + ) + AppmeshVirtualNode(self, "serviceb1", + mesh_name=simple.id, + name="serviceBv1", + spec=AppmeshVirtualNodeSpec( + backend=[AppmeshVirtualNodeSpecBackend( + virtual_service=AppmeshVirtualNodeSpecBackendVirtualService( + virtual_service_name="servicea.simpleapp.local" + ) + ) + ], + listener=[AppmeshVirtualNodeSpecListener( + port_mapping=AppmeshVirtualNodeSpecListenerPortMapping( + port=8080, + protocol="http" + ) + ) + ], + service_discovery=AppmeshVirtualNodeSpecServiceDiscovery( + aws_cloud_map=AppmeshVirtualNodeSpecServiceDiscoveryAwsCloudMap( + attributes={ + "stack": "blue" + }, + namespace_name=example.name, + service_name="serviceb1" + ) + ) + ) + ) +``` + +### Listener Health Check + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_virtual_node import AppmeshVirtualNode +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshVirtualNode(self, "serviceb1", + mesh_name=simple.id, + name="serviceBv1", + spec=AppmeshVirtualNodeSpec( + backend=[AppmeshVirtualNodeSpecBackend( + virtual_service=AppmeshVirtualNodeSpecBackendVirtualService( + virtual_service_name="servicea.simpleapp.local" + ) + ) + ], + listener=[AppmeshVirtualNodeSpecListener( + health_check=AppmeshVirtualNodeSpecListenerHealthCheck( + healthy_threshold=2, + interval_millis=5000, + path="/ping", + protocol="http", + timeout_millis=2000, + unhealthy_threshold=2 + ), + port_mapping=AppmeshVirtualNodeSpecListenerPortMapping( + port=8080, + protocol="http" + ) + ) + ], + service_discovery=AppmeshVirtualNodeSpecServiceDiscovery( + dns=AppmeshVirtualNodeSpecServiceDiscoveryDns( + hostname="serviceb.simpleapp.local" + ) + ) + ) + ) +``` + +### Logging + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_virtual_node import AppmeshVirtualNode +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshVirtualNode(self, "serviceb1", + mesh_name=simple.id, + name="serviceBv1", + spec=AppmeshVirtualNodeSpec( + backend=[AppmeshVirtualNodeSpecBackend( + virtual_service=AppmeshVirtualNodeSpecBackendVirtualService( + virtual_service_name="servicea.simpleapp.local" + ) + ) + ], + listener=[AppmeshVirtualNodeSpecListener( + port_mapping=AppmeshVirtualNodeSpecListenerPortMapping( + port=8080, + protocol="http" + ) + ) + ], + logging=AppmeshVirtualNodeSpecLogging( + access_log=AppmeshVirtualNodeSpecLoggingAccessLog( + file=AppmeshVirtualNodeSpecLoggingAccessLogFile( + path="/dev/stdout" + ) + ) + ), + service_discovery=AppmeshVirtualNodeSpecServiceDiscovery( + dns=AppmeshVirtualNodeSpecServiceDiscoveryDns( + hostname="serviceb.simpleapp.local" + ) + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the virtual node. Must be between 1 and 255 characters in length. +* `mesh_name` - (Required) Name of the service mesh in which to create the virtual node. Must be between 1 and 255 characters in length. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `spec` - (Required) Virtual node specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `backend` - (Optional) Backends to which the virtual node is expected to send outbound traffic. +* `backend_defaults` - (Optional) Defaults for backends. +* `listener` - (Optional) Listeners from which the virtual node is expected to receive inbound traffic. +* `logging` - (Optional) Inbound and outbound access logging information for the virtual node. +* `service_discovery` - (Optional) Service discovery information for the virtual node. + +The `backend` object supports the following: + +* `virtual_service` - (Required) Virtual service to use as a backend for a virtual node. + +The `virtual_service` object supports the following: + +* `client_policy` - (Optional) Client policy for the backend. +* `virtual_service_name` - (Required) Name of the virtual service that is acting as a virtual node backend. Must be between 1 and 255 characters in length. + +The `client_policy` object supports the following: + +* `tls` - (Optional) Transport Layer Security (TLS) client policy. + +The `tls` object supports the following: + +* `certificate` (Optional) Virtual node's client's Transport Layer Security (TLS) certificate. +* `enforce` - (Optional) Whether the policy is enforced. Default is `true`. +* `ports` - (Optional) One or more ports that the policy is enforced for. +* `validation` - (Required) TLS validation context. + +The `certificate` object supports the following: + +* `file` - (Optional) Local file certificate. +* `sds` - (Optional) A [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `file` object supports the following: + +* `certificate_chain` - (Required) Certificate chain for the certificate. +* `private_key` - (Required) Private key for a certificate stored on the file system of the mesh endpoint that the proxy is running on. + +The `sds` object supports the following: + +* `secret_name` - (Required) Name of the secret secret requested from the Secret Discovery Service provider representing Transport Layer Security (TLS) materials like a certificate or certificate chain. + +The `validation` object supports the following: + +* `subject_alternative_names` - (Optional) SANs for a TLS validation context. +* `trust` - (Required) TLS validation context trust. + +The `subject_alternative_names` object supports the following: + +* `match` - (Required) Criteria for determining a SAN's match. + +The `match` object supports the following: + +* `exact` - (Required) Values sent must match the specified values exactly. + +The `trust` object supports the following: + +* `acm` - (Optional) TLS validation context trust for an AWS Certificate Manager (ACM) certificate. +* `file` - (Optional) TLS validation context trust for a local file certificate. +* `sds` - (Optional) TLS validation context trust for a [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `acm` object supports the following: + +* `certificate_authority_arns` - (Required) One or more ACM ARNs. + +The `file` object supports the following: + +* `certificate_chain` - (Required) Certificate trust chain for a certificate stored on the file system of the virtual node that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secret_name` - (Required) Name of the secret secret requested from the Secret Discovery Service provider representing Transport Layer Security (TLS) materials like a certificate or certificate chain. + +The `backend_defaults` object supports the following: + +* `client_policy` - (Optional) Default client policy for virtual service backends. See above for details. + +The `listener` object supports the following: + +* `port_mapping` - (Required) Port mapping information for the listener. +* `connection_pool` - (Optional) Connection pool information for the listener. +* `health_check` - (Optional) Health check information for the listener. +* `outlier_detection` - (Optional) Outlier detection information for the listener. +* `timeout` - (Optional) Timeouts for different protocols. +* `tls` - (Optional) Transport Layer Security (TLS) properties for the listener + +The `logging` object supports the following: + +* `access_log` - (Optional) Access log configuration for a virtual node. + +The `access_log` object supports the following: + +* `file` - (Optional) File object to send virtual node access logs to. + +The `file` object supports the following: + +* `format` - (Optional) The specified format for the logs. +* `path` - (Required) File path to write access logs to. You can use `/dev/stdout` to send access logs to standard out. Must be between 1 and 255 characters in length. + +The `format` object supports the following: + +* `json` - (Optional) The logging format for JSON. +* `text` - (Optional) The logging format for text. Must be between 1 and 1000 characters in length. + +The `json` object supports the following: + +* `key` - (Required) The specified key for the JSON. Must be between 1 and 100 characters in length. +* `value` - (Required) The specified value for the JSON. Must be between 1 and 100 characters in length. + +The `service_discovery` object supports the following: + +* `aws_cloud_map` - (Optional) Any AWS Cloud Map information for the virtual node. +* `dns` - (Optional) DNS service name for the virtual node. + +The `aws_cloud_map` object supports the following: + +* `attributes` - (Optional) String map that contains attributes with values that you can use to filter instances by any custom attribute that you specified when you registered the instance. Only instances that match all of the specified key/value pairs will be returned. +* `namespace_name` - (Required) Name of the AWS Cloud Map namespace to use. +Use the [`aws_service_discovery_http_namespace`](/docs/providers/aws/r/service_discovery_http_namespace.html) resource to configure a Cloud Map namespace. Must be between 1 and 1024 characters in length. +* `service_name` - (Required) Name of the AWS Cloud Map service to use. Use the [`aws_service_discovery_service`](/docs/providers/aws/r/service_discovery_service.html) resource to configure a Cloud Map service. Must be between 1 and 1024 characters in length. + +The `dns` object supports the following: + +* `hostname` - (Required) DNS host name for your virtual node. +* `ip_preference` - (Optional) The preferred IP version that this virtual node uses. Valid values: `IPv6_PREFERRED`, `IPv4_PREFERRED`, `IPv4_ONLY`, `IPv6_ONLY`. +* `response_type` - (Optional) The DNS response type for the virtual node. Valid values: `LOADBALANCER`, `ENDPOINTS`. + +The `port_mapping` object supports the following: + +* `port` - (Required) Port used for the port mapping. +* `protocol` - (Required) Protocol used for the port mapping. Valid values are `http`, `http2`, `tcp` and `grpc`. + +The `connection_pool` object supports the following: + +* `grpc` - (Optional) Connection pool information for gRPC listeners. +* `http` - (Optional) Connection pool information for HTTP listeners. +* `http2` - (Optional) Connection pool information for HTTP2 listeners. +* `tcp` - (Optional) Connection pool information for TCP listeners. + +The `grpc` connection pool object supports the following: + +* `max_requests` - (Required) Maximum number of inflight requests Envoy can concurrently support across hosts in upstream cluster. Minimum value of `1`. + +The `http` connection pool object supports the following: + +* `max_connections` - (Required) Maximum number of outbound TCP connections Envoy can establish concurrently with all hosts in upstream cluster. Minimum value of `1`. +* `max_pending_requests` - (Optional) Number of overflowing requests after `max_connections` Envoy will queue to upstream cluster. Minimum value of `1`. + +The `http2` connection pool object supports the following: + +* `max_requests` - (Required) Maximum number of inflight requests Envoy can concurrently support across hosts in upstream cluster. Minimum value of `1`. + +The `tcp` connection pool object supports the following: + +* `max_connections` - (Required) Maximum number of outbound TCP connections Envoy can establish concurrently with all hosts in upstream cluster. Minimum value of `1`. + +The `health_check` object supports the following: + +* `healthy_threshold` - (Required) Number of consecutive successful health checks that must occur before declaring listener healthy. +* `interval_millis`- (Required) Time period in milliseconds between each health check execution. +* `protocol` - (Required) Protocol for the health check request. Valid values are `http`, `http2`, `tcp` and `grpc`. +* `timeout_millis` - (Required) Amount of time to wait when receiving a response from the health check, in milliseconds. +* `unhealthy_threshold` - (Required) Number of consecutive failed health checks that must occur before declaring a virtual node unhealthy. +* `path` - (Optional) Destination path for the health check request. This is only required if the specified protocol is `http` or `http2`. +* `port` - (Optional) Destination port for the health check request. This port must match the port defined in the `port_mapping` for the listener. + +The `outlier_detection` object supports the following: + +* `base_ejection_duration` - (Required) Base amount of time for which a host is ejected. +* `interval` - (Required) Time interval between ejection sweep analysis. +* `max_ejection_percent` - (Required) Maximum percentage of hosts in load balancing pool for upstream service that can be ejected. Will eject at least one host regardless of the value. +Minimum value of `0`. Maximum value of `100`. +* `max_server_errors` - (Required) Number of consecutive `5xx` errors required for ejection. Minimum value of `1`. + +The `base_ejection_duration` and `interval` objects support the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `timeout` object supports the following: + +* `grpc` - (Optional) Timeouts for gRPC listeners. +* `http` - (Optional) Timeouts for HTTP listeners. +* `http2` - (Optional) Timeouts for HTTP2 listeners. +* `tcp` - (Optional) Timeouts for TCP listeners. + +The `grpc` timeout object supports the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. +* `per_request` - (Optional) Per request timeout. + +The `idle` and `per_request` objects support the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `http` and `http2` timeout objects support the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. +* `per_request` - (Optional) Per request timeout. + +The `idle` and `per_request` objects support the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `tcp` timeout object supports the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. + +The `idle` object supports the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `tls` object supports the following: + +* `certificate` - (Required) Listener's TLS certificate. +* `mode`- (Required) Listener's TLS mode. Valid values: `DISABLED`, `PERMISSIVE`, `STRICT`. +* `validation`- (Optional) Listener's Transport Layer Security (TLS) validation context. + +The `certificate` object supports the following: + +* `acm` - (Optional) An AWS Certificate Manager (ACM) certificate. +* `file` - (Optional) Local file certificate. +* `sds` - (Optional) A [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `acm` object supports the following: + +* `certificate_arn` - (Required) ARN for the certificate. + +The `file` object supports the following: + +* `certificate_chain` - (Required) Certificate chain for the certificate. Must be between 1 and 255 characters in length. +* `private_key` - (Required) Private key for a certificate stored on the file system of the virtual node that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secret_name` - (Required) Name of the secret secret requested from the Secret Discovery Service provider representing Transport Layer Security (TLS) materials like a certificate or certificate chain. + +The `validation` object supports the following: + +* `subject_alternative_names` - (Optional) SANs for a TLS validation context. +* `trust` - (Required) TLS validation context trust. + +The `subject_alternative_names` object supports the following: + +* `match` - (Required) Criteria for determining a SAN's match. + +The `match` object supports the following: + +* `exact` - (Required) Values sent must match the specified values exactly. + +The `trust` object supports the following: + +* `file` - (Optional) TLS validation context trust for a local file certificate. +* `sds` - (Optional) TLS validation context trust for a [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `file` object supports the following: + +* `certificate_chain` - (Required) Certificate trust chain for a certificate stored on the file system of the mesh endpoint that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secret_name` - (Required) Name of the secret for a virtual node's Transport Layer Security (TLS) Secret Discovery Service validation context trust. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the virtual node. +* `arn` - ARN of the virtual node. +* `created_date` - Creation date of the virtual node. +* `last_updated_date` - Last update date of the virtual node. +* `resource_owner` - Resource owner's AWS account ID. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh virtual nodes using `mesh_name` together with the virtual node's `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Mesh virtual nodes using `mesh_name` together with the virtual node's `name`. For example: + +```console +% terraform import aws_appmesh_virtual_node.serviceb1 simpleapp/serviceBv1 +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appmesh_virtual_router.html.markdown b/website/docs/cdktf/python/r/appmesh_virtual_router.html.markdown new file mode 100644 index 00000000000..335080ef42c --- /dev/null +++ b/website/docs/cdktf/python/r/appmesh_virtual_router.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_router" +description: |- + Provides an AWS App Mesh virtual router resource. +--- + + + +# Resource: aws_appmesh_virtual_router + +Provides an AWS App Mesh virtual router resource. + +## Breaking Changes + +Because of backward incompatible API changes (read [here](https://github.com/awslabs/aws-app-mesh-examples/issues/92) and [here](https://github.com/awslabs/aws-app-mesh-examples/issues/94)), `aws_appmesh_virtual_router` resource definitions created with provider versions earlier than v2.3.0 will need to be modified: + +* Remove service `service_names` from the `spec` argument. AWS has created a `aws_appmesh_virtual_service` resource for each service name. Import these resource using `terraform import`. + +* Add a `listener` configuration block to the `spec` argument. + +The Terraform state associated with existing resources will automatically be migrated. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_virtual_router import AppmeshVirtualRouter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshVirtualRouter(self, "serviceb", + mesh_name=simple.id, + name="serviceB", + spec=AppmeshVirtualRouterSpec( + listener=[AppmeshVirtualRouterSpecListener( + port_mapping=AppmeshVirtualRouterSpecListenerPortMapping( + port=8080, + protocol="http" + ) + ) + ] + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the virtual router. Must be between 1 and 255 characters in length. +* `mesh_name` - (Required) Name of the service mesh in which to create the virtual router. Must be between 1 and 255 characters in length. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `spec` - (Required) Virtual router specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `listener` - (Optional) Listeners that the virtual router is expected to receive inbound traffic from. +Currently only one listener is supported per virtual router. + +The `listener` object supports the following: + +* `port_mapping` - (Required) Port mapping information for the listener. + +The `port_mapping` object supports the following: + +* `port` - (Required) Port used for the port mapping. +* `protocol` - (Required) Protocol used for the port mapping. Valid values are `http`,`http2`, `tcp` and `grpc`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the virtual router. +* `arn` - ARN of the virtual router. +* `created_date` - Creation date of the virtual router. +* `last_updated_date` - Last update date of the virtual router. +* `resource_owner` - Resource owner's AWS account ID. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh virtual routers using `mesh_name` together with the virtual router's `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Mesh virtual routers using `mesh_name` together with the virtual router's `name`. For example: + +```console +% terraform import aws_appmesh_virtual_router.serviceb simpleapp/serviceB +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appmesh_virtual_service.html.markdown b/website/docs/cdktf/python/r/appmesh_virtual_service.html.markdown new file mode 100644 index 00000000000..b33095de017 --- /dev/null +++ b/website/docs/cdktf/python/r/appmesh_virtual_service.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_service" +description: |- + Provides an AWS App Mesh virtual service resource. +--- + + + +# Resource: aws_appmesh_virtual_service + +Provides an AWS App Mesh virtual service resource. + +## Example Usage + +### Virtual Node Provider + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_virtual_service import AppmeshVirtualService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshVirtualService(self, "servicea", + mesh_name=simple.id, + name="servicea.simpleapp.local", + spec=AppmeshVirtualServiceSpec( + provider=AppmeshVirtualServiceSpecProvider( + virtual_node=AppmeshVirtualServiceSpecProviderVirtualNode( + virtual_node_name=serviceb1.name + ) + ) + ) + ) +``` + +### Virtual Router Provider + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appmesh_virtual_service import AppmeshVirtualService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppmeshVirtualService(self, "servicea", + mesh_name=simple.id, + name="servicea.simpleapp.local", + spec=AppmeshVirtualServiceSpec( + provider=AppmeshVirtualServiceSpecProvider( + virtual_router=AppmeshVirtualServiceSpecProviderVirtualRouter( + virtual_router_name=serviceb.name + ) + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the virtual service. Must be between 1 and 255 characters in length. +* `mesh_name` - (Required) Name of the service mesh in which to create the virtual service. Must be between 1 and 255 characters in length. +* `mesh_owner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `spec` - (Required) Virtual service specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `provider`- (Optional) App Mesh object that is acting as the provider for a virtual service. You can specify a single virtual node or virtual router. + +The `provider` object supports the following: + +* `virtual_node` - (Optional) Virtual node associated with a virtual service. +* `virtual_router` - (Optional) Virtual router associated with a virtual service. + +The `virtual_node` object supports the following: + +* `virtual_node_name` - (Required) Name of the virtual node that is acting as a service provider. Must be between 1 and 255 characters in length. + +The `virtual_router` object supports the following: + +* `virtual_router_name` - (Required) Name of the virtual router that is acting as a service provider. Must be between 1 and 255 characters in length. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the virtual service. +* `arn` - ARN of the virtual service. +* `created_date` - Creation date of the virtual service. +* `last_updated_date` - Last update date of the virtual service. +* `resource_owner` - Resource owner's AWS account ID. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh virtual services using `mesh_name` together with the virtual service's `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Mesh virtual services using `mesh_name` together with the virtual service's `name`. For example: + +```console +% terraform import aws_appmesh_virtual_service.servicea simpleapp/servicea.simpleapp.local +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apprunner_auto_scaling_configuration_version.html.markdown b/website/docs/cdktf/python/r/apprunner_auto_scaling_configuration_version.html.markdown new file mode 100644 index 00000000000..bbcc843e2ed --- /dev/null +++ b/website/docs/cdktf/python/r/apprunner_auto_scaling_configuration_version.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_auto_scaling_configuration_version" +description: |- + Manages an App Runner AutoScaling Configuration Version. +--- + + + +# Resource: aws_apprunner_auto_scaling_configuration_version + +Manages an App Runner AutoScaling Configuration Version. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apprunner_auto_scaling_configuration_version import ApprunnerAutoScalingConfigurationVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApprunnerAutoScalingConfigurationVersion(self, "example", + auto_scaling_configuration_name="example", + max_concurrency=50, + max_size=10, + min_size=2, + tags={ + "Name": "example-apprunner-autoscaling" + } + ) +``` + +## Argument Reference + +The following arguments supported: + +* `auto_scaling_configuration_name` - (Required, Forces new resource) Name of the auto scaling configuration. +* `max_concurrency` - (Optional, Forces new resource) Maximal number of concurrent requests that you want an instance to process. When the number of concurrent requests goes over this limit, App Runner scales up your service. +* `max_size` - (Optional, Forces new resource) Maximal number of instances that App Runner provisions for your service. +* `min_size` - (Optional, Forces new resource) Minimal number of instances that App Runner provisions for your service. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of this auto scaling configuration version. +* `auto_scaling_configuration_revision` - The revision of this auto scaling configuration. +* `latest` - Whether the auto scaling configuration has the highest `auto_scaling_configuration_revision` among all configurations that share the same `auto_scaling_configuration_name`. +* `status` - Current state of the auto scaling configuration. An INACTIVE configuration revision has been deleted and can't be used. It is permanently removed some time after deletion. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner AutoScaling Configuration Versions using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Runner AutoScaling Configuration Versions using the `arn`. For example: + +```console +% terraform import aws_apprunner_auto_scaling_configuration_version.example "arn:aws:apprunner:us-east-1:1234567890:autoscalingconfiguration/example/1/69bdfe0115224b0db49398b7beb68e0f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apprunner_connection.html.markdown b/website/docs/cdktf/python/r/apprunner_connection.html.markdown new file mode 100644 index 00000000000..735adccc5ab --- /dev/null +++ b/website/docs/cdktf/python/r/apprunner_connection.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_connection" +description: |- + Manages an App Runner Connection. +--- + + + +# Resource: aws_apprunner_connection + +Manages an App Runner Connection. + +~> **NOTE:** After creation, you must complete the authentication handshake using the App Runner console. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apprunner_connection import ApprunnerConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApprunnerConnection(self, "example", + connection_name="example", + provider_type="GITHUB", + tags={ + "Name": "example-apprunner-connection" + } + ) +``` + +## Argument Reference + +The following arguments supported: + +* `connection_name` - (Required) Name of the connection. +* `provider_type` - (Required) Source repository provider. Valid values: `GITHUB`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the connection. +* `status` - Current state of the App Runner connection. When the state is `AVAILABLE`, you can use the connection to create an [`aws_apprunner_service` resource](apprunner_service.html). +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner Connections using the `connection_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Runner Connections using the `connection_name`. For example: + +```console +% terraform import aws_apprunner_connection.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apprunner_custom_domain_association.html.markdown b/website/docs/cdktf/python/r/apprunner_custom_domain_association.html.markdown new file mode 100644 index 00000000000..800c39c1b93 --- /dev/null +++ b/website/docs/cdktf/python/r/apprunner_custom_domain_association.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_custom_domain_association" +description: |- + Manages an App Runner Custom Domain association. +--- + + + +# Resource: aws_apprunner_custom_domain_association + +Manages an App Runner Custom Domain association. + +~> **NOTE:** After creation, you must use the information in the `certification_validation_records` attribute to add CNAME records to your Domain Name System (DNS). For each mapped domain name, add a mapping to the target App Runner subdomain (found in the `dns_target` attribute) and one or more certificate validation records. App Runner then performs DNS validation to verify that you own or control the domain name you associated. App Runner tracks domain validity in a certificate stored in AWS Certificate Manager (ACM). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apprunner_custom_domain_association import ApprunnerCustomDomainAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApprunnerCustomDomainAssociation(self, "example", + domain_name="example.com", + service_arn=Token.as_string(aws_apprunner_service_example.arn) + ) +``` + +## Argument Reference + +The following arguments supported: + +* `domain_name` - (Required) Custom domain endpoint to association. Specify a base domain e.g., `example.com` or a subdomain e.g., `subdomain.example.com`. +* `enable_www_subdomain` (Optional) Whether to associate the subdomain with the App Runner service in addition to the base domain. Defaults to `true`. +* `service_arn` - (Required) ARN of the App Runner service. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `domain_name` and `service_arn` separated by a comma (`,`). +* `certificate_validation_records` - A set of certificate CNAME records used for this domain name. See [Certificate Validation Records](#certificate-validation-records) below for more details. +* `dns_target` - App Runner subdomain of the App Runner service. The custom domain name is mapped to this target name. Attribute only available if resource created (not imported) with Terraform. + +### Certificate Validation Records + +The configuration block consists of the following arguments: + +* `name` - Certificate CNAME record name. +* `status` - Current state of the certificate CNAME record validation. It should change to `SUCCESS` after App Runner completes validation with your DNS. +* `type` - Record type, always `CNAME`. +* `value` - Certificate CNAME record value. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner Custom Domain Associations using the `domain_name` and `service_arn` separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Runner Custom Domain Associations using the `domain_name` and `service_arn` separated by a comma (`,`). For example: + +```console +% terraform import aws_apprunner_custom_domain_association.example example.com,arn:aws:apprunner:us-east-1:123456789012:service/example-app/8fe1e10304f84fd2b0df550fe98a71fa +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apprunner_observability_configuration.html.markdown b/website/docs/cdktf/python/r/apprunner_observability_configuration.html.markdown new file mode 100644 index 00000000000..197e59a211b --- /dev/null +++ b/website/docs/cdktf/python/r/apprunner_observability_configuration.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_observability_configuration" +description: |- + Manages an App Runner Observability Configuration. +--- + + + +# Resource: aws_apprunner_observability_configuration + +Manages an App Runner Observability Configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apprunner_observability_configuration import ApprunnerObservabilityConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApprunnerObservabilityConfiguration(self, "example", + observability_configuration_name="example", + tags={ + "Name": "example-apprunner-observability-configuration" + }, + trace_configuration=ApprunnerObservabilityConfigurationTraceConfiguration( + vendor="AWSXRAY" + ) + ) +``` + +## Argument Reference + +The following arguments supported: + +* `observability_configuration_name` - (Required, Forces new resource) Name of the observability configuration. +* `trace_configuration` - (Optional) Configuration of the tracing feature within this observability configuration. If you don't specify it, App Runner doesn't enable tracing. See [Trace Configuration](#trace-configuration) below for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Trace Configuration + +The `trace_configuration` block supports the following argument: + +* `vendor` - (Required) Implementation provider chosen for tracing App Runner services. Valid values: `AWSXRAY`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of this observability configuration. +* `observability_configuration_revision` - The revision of this observability configuration. +* `latest` - Whether the observability configuration has the highest `observability_configuration_revision` among all configurations that share the same `observability_configuration_name`. +* `status` - Current state of the observability configuration. An INACTIVE configuration revision has been deleted and can't be used. It is permanently removed some time after deletion. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner Observability Configuration using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Runner Observability Configuration using the `arn`. For example: + +```console +% terraform import aws_apprunner_observability_configuration.example arn:aws:apprunner:us-east-1:1234567890:observabilityconfiguration/example/1/d75bc7ea55b71e724fe5c23452fe22a1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apprunner_service.html.markdown b/website/docs/cdktf/python/r/apprunner_service.html.markdown new file mode 100644 index 00000000000..e4cde838a95 --- /dev/null +++ b/website/docs/cdktf/python/r/apprunner_service.html.markdown @@ -0,0 +1,316 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_service" +description: |- + Manages an App Runner Service. +--- + + + +# Resource: aws_apprunner_service + +Manages an App Runner Service. + +## Example Usage + +### Service with a Code Repository Source + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apprunner_service import ApprunnerService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApprunnerService(self, "example", + network_configuration=ApprunnerServiceNetworkConfiguration( + egress_configuration=ApprunnerServiceNetworkConfigurationEgressConfiguration( + egress_type="VPC", + vpc_connector_arn=connector.arn + ) + ), + service_name="example", + source_configuration=ApprunnerServiceSourceConfiguration( + authentication_configuration=ApprunnerServiceSourceConfigurationAuthenticationConfiguration( + connection_arn=Token.as_string(aws_apprunner_connection_example.arn) + ), + code_repository=ApprunnerServiceSourceConfigurationCodeRepository( + code_configuration=ApprunnerServiceSourceConfigurationCodeRepositoryCodeConfiguration( + code_configuration_values=ApprunnerServiceSourceConfigurationCodeRepositoryCodeConfigurationCodeConfigurationValues( + build_command="python setup.py develop", + port="8000", + runtime="PYTHON_3", + start_command="python runapp.py" + ), + configuration_source="API" + ), + repository_url="https://github.com/example/my-example-python-app", + source_code_version=ApprunnerServiceSourceConfigurationCodeRepositorySourceCodeVersion( + type="BRANCH", + value="main" + ) + ) + ), + tags={ + "Name": "example-apprunner-service" + } + ) +``` + +### Service with an Image Repository Source + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apprunner_service import ApprunnerService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApprunnerService(self, "example", + service_name="example", + source_configuration=ApprunnerServiceSourceConfiguration( + auto_deployments_enabled=False, + image_repository=ApprunnerServiceSourceConfigurationImageRepository( + image_configuration=ApprunnerServiceSourceConfigurationImageRepositoryImageConfiguration( + port="8000" + ), + image_identifier="public.ecr.aws/aws-containers/hello-app-runner:latest", + image_repository_type="ECR_PUBLIC" + ) + ), + tags={ + "Name": "example-apprunner-service" + } + ) +``` + +### Service with Observability Configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apprunner_observability_configuration import ApprunnerObservabilityConfiguration +from imports.aws.apprunner_service import ApprunnerService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ApprunnerObservabilityConfiguration(self, "example", + observability_configuration_name="example", + trace_configuration=ApprunnerObservabilityConfigurationTraceConfiguration( + vendor="AWSXRAY" + ) + ) + aws_apprunner_service_example = ApprunnerService(self, "example_1", + observability_configuration=ApprunnerServiceObservabilityConfiguration( + observability_configuration_arn=example.arn, + observability_enabled=True + ), + service_name="example", + source_configuration=ApprunnerServiceSourceConfiguration( + auto_deployments_enabled=False, + image_repository=ApprunnerServiceSourceConfigurationImageRepository( + image_configuration=ApprunnerServiceSourceConfigurationImageRepositoryImageConfiguration( + port="8000" + ), + image_identifier="public.ecr.aws/aws-containers/hello-app-runner:latest", + image_repository_type="ECR_PUBLIC" + ) + ), + tags={ + "Name": "example-apprunner-service" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_apprunner_service_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `service_name` - (Forces new resource) Name of the service. +* `source_configuration` - The source to deploy to the App Runner service. Can be a code or an image repository. See [Source Configuration](#source-configuration) below for more details. + +The following arguments are optional: + +* `auto_scaling_configuration_arn` - ARN of an App Runner automatic scaling configuration resource that you want to associate with your service. If not provided, App Runner associates the latest revision of a default auto scaling configuration. +* `encryption_configuration` - (Forces new resource) An optional custom encryption key that App Runner uses to encrypt the copy of your source repository that it maintains and your service logs. By default, App Runner uses an AWS managed CMK. See [Encryption Configuration](#encryption-configuration) below for more details. +* `health_check_configuration` - (Forces new resource) Settings of the health check that AWS App Runner performs to monitor the health of your service. See [Health Check Configuration](#health-check-configuration) below for more details. +* `instance_configuration` - The runtime configuration of instances (scaling units) of the App Runner service. See [Instance Configuration](#instance-configuration) below for more details. +* `network_configuration` - Configuration settings related to network traffic of the web application that the App Runner service runs. See [Network Configuration](#network-configuration) below for more details. +* `observability_configuration` - The observability configuration of your service. See [Observability Configuration](#observability-configuration) below for more details. +* `tags` - Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Encryption Configuration + +The `encryption_configuration` block supports the following argument: + +* `kms_key` - (Required) ARN of the KMS key used for encryption. + +### Health Check Configuration + +The `health_check_configuration` block supports the following arguments: + +* `healthy_threshold` - (Optional) Number of consecutive checks that must succeed before App Runner decides that the service is healthy. Defaults to 1. Minimum value of 1. Maximum value of 20. +* `interval` - (Optional) Time interval, in seconds, between health checks. Defaults to 5. Minimum value of 1. Maximum value of 20. +* `path` - (Optional) URL to send requests to for health checks. Defaults to `/`. Minimum length of 0. Maximum length of 51200. +* `protocol` - (Optional) IP protocol that App Runner uses to perform health checks for your service. Valid values: `TCP`, `HTTP`. Defaults to `TCP`. If you set protocol to `HTTP`, App Runner sends health check requests to the HTTP path specified by `path`. +* `timeout` - (Optional) Time, in seconds, to wait for a health check response before deciding it failed. Defaults to 2. Minimum value of 1. Maximum value of 20. +* `unhealthy_threshold` - (Optional) Number of consecutive checks that must fail before App Runner decides that the service is unhealthy. Defaults to 5. Minimum value of 1. Maximum value of 20. + +### Instance Configuration + +The `instance_configuration` block supports the following arguments: + +* `cpu` - (Optional) Number of CPU units reserved for each instance of your App Runner service represented as a String. Defaults to `1024`. Valid values: `256|512|1024|2048|4096|(0.25|0.5|1|2|4) vCPU`. +* `instance_role_arn` - (Optional) ARN of an IAM role that provides permissions to your App Runner service. These are permissions that your code needs when it calls any AWS APIs. +* `memory` - (Optional) Amount of memory, in MB or GB, reserved for each instance of your App Runner service. Defaults to `2048`. Valid values: `512|1024|2048|3072|4096|6144|8192|10240|12288|(0.5|1|2|3|4|6|8|10|12) GB`. + +### Source Configuration + +The `source_configuration` block supports the following arguments: + +~>**Note:** Either `code_repository` or `image_repository` must be specified (but not both). + +* `authentication_configuration` - (Optional) Describes resources needed to authenticate access to some source repositories. See [Authentication Configuration](#authentication-configuration) below for more details. +* `auto_deployments_enabled` - (Optional) Whether continuous integration from the source repository is enabled for the App Runner service. If set to `true`, each repository change (source code commit or new image version) starts a deployment. Defaults to `true`. +* `code_repository` - (Optional) Description of a source code repository. See [Code Repository](#code-repository) below for more details. +* `image_repository` - (Optional) Description of a source image repository. See [Image Repository](#image-repository) below for more details. + +### Authentication Configuration + +The `authentication_configuration` block supports the following arguments: + +* `access_role_arn` - (Optional) ARN of the IAM role that grants the App Runner service access to a source repository. Required for ECR image repositories (but not for ECR Public) +* `connection_arn` - (Optional) ARN of the App Runner connection that enables the App Runner service to connect to a source repository. Required for GitHub code repositories. + +### Network Configuration + +The `network_configuration` block supports the following arguments: + +* `ingress_configuration` - (Optional) Network configuration settings for inbound network traffic. See [Ingress Configuration](#ingress-configuration) below for more details. +* `egress_configuration` - (Optional) Network configuration settings for outbound message traffic. See [Egress Configuration](#egress-configuration) below for more details. +* `egress_type` - (Optional) Type of egress configuration.Set to DEFAULT for access to resources hosted on public networks.Set to VPC to associate your service to a custom VPC specified by VpcConnectorArn. +* `vpc_connector_arn` - ARN of the App Runner VPC connector that you want to associate with your App Runner service. Only valid when EgressType = VPC. + +### Ingress Configuration + +The `ingress_configuration` block supports the following argument: + +* `is_publicly_accessible` - (Required) Specifies whether your App Runner service is publicly accessible. To make the service publicly accessible set it to True. To make the service privately accessible, from only within an Amazon VPC set it to False. + +### Egress Configuration + +The `egress_configuration` block supports the following argument: + +* `egress_type` - The type of egress configuration. Valid values are: `DEFAULT` and `VPC`. +* `vpc_connector_arn` - The Amazon Resource Name (ARN) of the App Runner VPC connector that you want to associate with your App Runner service. Only valid when `EgressType = VPC`. + +### Observability Configuration + +The `observability_configuration` block supports the following arguments: + +* `observability_enabled` - (Required) When `true`, an observability configuration resource is associated with the service. +* `observability_configuration_arn` - (Optional) ARN of the observability configuration that is associated with the service. Specified only when `observability_enabled` is `true`. + +### Code Repository + +The `code_repository` block supports the following arguments: + +* `code_configuration` - (Optional) Configuration for building and running the service from a source code repository. See [Code Configuration](#code-configuration) below for more details. +* `repository_url` - (Required) Location of the repository that contains the source code. +* `source_code_version` - (Required) Version that should be used within the source code repository. See [Source Code Version](#source-code-version) below for more details. + +### Image Repository + +The `image_repository` block supports the following arguments: + +* `image_configuration` - (Optional) Configuration for running the identified image. See [Image Configuration](#image-configuration) below for more details. +* `image_identifier` - (Required) Identifier of an image. For an image in Amazon Elastic Container Registry (Amazon ECR), this is an image name. For the + image name format, see Pulling an image in the Amazon ECR User Guide. +* `image_repository_type` - (Required) Type of the image repository. This reflects the repository provider and whether the repository is private or public. Valid values: `ECR` , `ECR_PUBLIC`. + +### Code Configuration + +The `code_configuration` block supports the following arguments: + +* `code_configuration_values` - (Optional) Basic configuration for building and running the App Runner service. Use this parameter to quickly launch an App Runner service without providing an apprunner.yaml file in the source code repository (or ignoring the file if it exists). See [Code Configuration Values](#code-configuration-values) below for more details. +* `configuration_source` - (Required) Source of the App Runner configuration. Valid values: `REPOSITORY`, `API`. Values are interpreted as follows: + * `REPOSITORY` - App Runner reads configuration values from the apprunner.yaml file in the + source code repository and ignores the CodeConfigurationValues parameter. + * `API` - App Runner uses configuration values provided in the CodeConfigurationValues + parameter and ignores the apprunner.yaml file in the source code repository. + +### Code Configuration Values + +The `code_configuration_values` blocks supports the following arguments: + +* `build_command` - (Optional) Command App Runner runs to build your application. +* `port` - (Optional) Port that your application listens to in the container. Defaults to `"8080"`. +* `runtime` - (Required) Runtime environment type for building and running an App Runner service. Represents a programming language runtime. Valid values: `PYTHON_3`, `NODEJS_12`, `NODEJS_14`, `NODEJS_16`, `CORRETTO_8`, `CORRETTO_11`, `GO_1`, `DOTNET_6`, `PHP_81`, `RUBY_31`. +* `runtime_environment_secrets` - (Optional) Secrets and parameters available to your service as environment variables. A map of key/value pairs, where the key is the desired name of the Secret in the environment (i.e. it does not have to match the name of the secret in Secrets Manager or SSM Parameter Store), and the value is the ARN of the secret from AWS Secrets Manager or the ARN of the parameter in AWS SSM Parameter Store. +* `runtime_environment_variables` - (Optional) Environment variables available to your running App Runner service. A map of key/value pairs. Keys with a prefix of `AWSAPPRUNNER` are reserved for system use and aren't valid. +* `start_command` - (Optional) Command App Runner runs to start your application. + +### Image Configuration + +The `image_configuration` block supports the following arguments: + +* `port` - (Optional) Port that your application listens to in the container. Defaults to `"8080"`. +* `runtime_environment_secrets` - (Optional) Secrets and parameters available to your service as environment variables. A map of key/value pairs, where the key is the desired name of the Secret in the environment (i.e. it does not have to match the name of the secret in Secrets Manager or SSM Parameter Store), and the value is the ARN of the secret from AWS Secrets Manager or the ARN of the parameter in AWS SSM Parameter Store. +* `runtime_environment_variables` - (Optional) Environment variables available to your running App Runner service. A map of key/value pairs. Keys with a prefix of `AWSAPPRUNNER` are reserved for system use and aren't valid. +* `start_command` - (Optional) Command App Runner runs to start the application in the source image. If specified, this command overrides the Docker image’s default start command. + +### Source Code Version + +The `source_code_version` block supports the following arguments: + +* `type` - (Required) Type of version identifier. For a git-based repository, branches represent versions. Valid values: `BRANCH`. +* `value`- (Required) Source code version. For a git-based repository, a branch name maps to a specific version. App Runner uses the most recent commit to the branch. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the App Runner service. +* `service_id` - An alphanumeric ID that App Runner generated for this service. Unique within the AWS Region. +* `service_url` - Subdomain URL that App Runner generated for this service. You can use this URL to access your service web application. +* `status` - Current state of the App Runner service. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner Services using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Runner Services using the `arn`. For example: + +```console +% terraform import aws_apprunner_service.example arn:aws:apprunner:us-east-1:1234567890:service/example/0a03292a89764e5882c41d8f991c82fe +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apprunner_vpc_connector.html.markdown b/website/docs/cdktf/python/r/apprunner_vpc_connector.html.markdown new file mode 100644 index 00000000000..493c9358d6c --- /dev/null +++ b/website/docs/cdktf/python/r/apprunner_vpc_connector.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_vpc_connector" +description: |- + Manages an App Runner VPC Connector. +--- + + + +# Resource: aws_apprunner_vpc_connector + +Manages an App Runner VPC Connector. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apprunner_vpc_connector import ApprunnerVpcConnector +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApprunnerVpcConnector(self, "connector", + security_groups=["sg1", "sg2"], + subnets=["subnet1", "subnet2"], + vpc_connector_name="name" + ) +``` + +## Argument Reference + +The following arguments supported: + +* `vpc_connector_name` - (Required) Name for the VPC connector. +* `subnets` (Required) List of IDs of subnets that App Runner should use when it associates your service with a custom Amazon VPC. Specify IDs of subnets of a single Amazon VPC. App Runner determines the Amazon VPC from the subnets you specify. +* `security_groups` - List of IDs of security groups that App Runner should use for access to AWS resources under the specified subnets. If not specified, App Runner uses the default security group of the Amazon VPC. The default security group allows all outbound traffic. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of VPC connector. +* `status` - Current state of the VPC connector. If the status of a connector revision is INACTIVE, it was deleted and can't be used. Inactive connector revisions are permanently removed some time after they are deleted. +* `vpc_connector_revision` - The revision of VPC connector. It's unique among all the active connectors ("Status": "ACTIVE") that share the same Name. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner vpc connector using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Runner vpc connector using the `arn`. For example: + +```console +% terraform import aws_apprunner_vpc_connector.example arn:aws:apprunner:us-east-1:1234567890:vpcconnector/example/1/0a03292a89764e5882c41d8f991c82fe +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/apprunner_vpc_ingress_connection.html.markdown b/website/docs/cdktf/python/r/apprunner_vpc_ingress_connection.html.markdown new file mode 100644 index 00000000000..f32b5ecbee9 --- /dev/null +++ b/website/docs/cdktf/python/r/apprunner_vpc_ingress_connection.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_vpc_ingress_connection" +description: |- + Manages an App Runner VPC Ingress Connection. +--- + + + +# Resource: aws_apprunner_vpc_ingress_connection + +Manages an App Runner VPC Ingress Connection. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.apprunner_vpc_ingress_connection import ApprunnerVpcIngressConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ApprunnerVpcIngressConnection(self, "example", + ingress_vpc_configuration=ApprunnerVpcIngressConnectionIngressVpcConfiguration( + vpc_endpoint_id=apprunner.id, + vpc_id=default_var.id + ), + name="example", + service_arn=Token.as_string(aws_apprunner_service_example.arn), + tags={ + "foo": "bar" + } + ) +``` + +## Argument Reference + +The following arguments supported: + +* `name` - (Required) A name for the VPC Ingress Connection resource. It must be unique across all the active VPC Ingress Connections in your AWS account in the AWS Region. +* `service_arn` - (Required) The Amazon Resource Name (ARN) for this App Runner service that is used to create the VPC Ingress Connection resource. +* `ingress_vpc_configuration` - (Required) Specifications for the customer’s Amazon VPC and the related AWS PrivateLink VPC endpoint that are used to create the VPC Ingress Connection resource. See [Ingress VPC Configuration](#ingress-vpc-configuration) below for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Ingress VPC Configuration + +The `ingress_vpc_configuration` block supports the following argument: + +* `vpc_id` - (Required) The ID of the VPC that is used for the VPC endpoint. +* `vpc_endpoint_id` - (Required) The ID of the VPC endpoint that your App Runner service connects to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the VPC Ingress Connection. +* `domain_name` - The domain name associated with the VPC Ingress Connection resource. +* `status` - The current status of the VPC Ingress Connection. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner VPC Ingress Connection using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import App Runner VPC Ingress Connection using the `arn`. For example: + +```console +% terraform import aws_apprunner_vpc_ingress_connection.example "arn:aws:apprunner:us-west-2:837424938642:vpcingressconnection/example/b379f86381d74825832c2e82080342fa" +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appstream_directory_config.html.markdown b/website/docs/cdktf/python/r/appstream_directory_config.html.markdown new file mode 100644 index 00000000000..f12d1313280 --- /dev/null +++ b/website/docs/cdktf/python/r/appstream_directory_config.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_directory_config" +description: |- + Provides an AppStream Directory Config +--- + + + +# Resource: aws_appstream_directory_config + +Provides an AppStream Directory Config. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appstream_directory_config import AppstreamDirectoryConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppstreamDirectoryConfig(self, "example", + directory_name="NAME OF DIRECTORY", + organizational_unit_distinguished_names=["DISTINGUISHED NAME"], + service_account_credentials=AppstreamDirectoryConfigServiceAccountCredentials( + account_name="NAME OF ACCOUNT", + account_password="PASSWORD OF ACCOUNT" + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `directory_name` - (Required) Fully qualified name of the directory. +* `organizational_unit_distinguished_names` - (Required) Distinguished names of the organizational units for computer accounts. +* `service_account_credentials` - (Required) Configuration block for the name of the directory and organizational unit (OU) to use to join the directory config to a Microsoft Active Directory domain. See [`service_account_credentials`](#service_account_credentials) below. + +### `service_account_credentials` + +* `account_name` - (Required) User name of the account. This account must have the following privileges: create computer objects, join computers to the domain, and change/reset the password on descendant computer objects for the organizational units specified. +* `account_password` - (Required) Password for the account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier (ID) of the appstream directory config. +* `created_time` - Date and time, in UTC and extended RFC 3339 format, when the directory config was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appstream_directory_config` using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appstream_directory_config` using the id. For example: + +```console +% terraform import aws_appstream_directory_config.example directoryNameExample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appstream_fleet.html.markdown b/website/docs/cdktf/python/r/appstream_fleet.html.markdown new file mode 100644 index 00000000000..c1077ddd5de --- /dev/null +++ b/website/docs/cdktf/python/r/appstream_fleet.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_fleet" +description: |- + Provides an AppStream fleet +--- + + + +# Resource: aws_appstream_fleet + +Provides an AppStream fleet. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appstream_fleet import AppstreamFleet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppstreamFleet(self, "test_fleet", + compute_capacity=AppstreamFleetComputeCapacity( + desired_instances=1 + ), + description="test fleet", + display_name="test-fleet", + enable_default_internet_access=False, + fleet_type="ON_DEMAND", + idle_disconnect_timeout_in_seconds=60, + image_name="Amazon-AppStream2-Sample-Image-02-04-2019", + instance_type="stream.standard.large", + max_user_duration_in_seconds=600, + name="test-fleet", + tags={ + "TagName": "tag-value" + }, + vpc_config=AppstreamFleetVpcConfig( + subnet_ids=["subnet-06e9b13400c225127"] + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `compute_capacity` - (Required) Configuration block for the desired capacity of the fleet. See below. +* `instance_type` - (Required) Instance type to use when launching fleet instances. +* `name` - (Required) Unique name for the fleet. + +The following arguments are optional: + +* `description` - (Optional) Description to display. +* `disconnect_timeout_in_seconds` - (Optional) Amount of time that a streaming session remains active after users disconnect. +* `display_name` - (Optional) Human-readable friendly name for the AppStream fleet. +* `domain_join_info` - (Optional) Configuration block for the name of the directory and organizational unit (OU) to use to join the fleet to a Microsoft Active Directory domain. See below. +* `enable_default_internet_access` - (Optional) Enables or disables default internet access for the fleet. +* `fleet_type` - (Optional) Fleet type. Valid values are: `ON_DEMAND`, `ALWAYS_ON` +* `iam_role_arn` - (Optional) ARN of the IAM role to apply to the fleet. +* `idle_disconnect_timeout_in_seconds` - (Optional) Amount of time that users can be idle (inactive) before they are disconnected from their streaming session and the `disconnect_timeout_in_seconds` time interval begins. +* `image_name` - (Optional) Name of the image used to create the fleet. +* `image_arn` - (Optional) ARN of the public, private, or shared image to use. +* `stream_view` - (Optional) AppStream 2.0 view that is displayed to your users when they stream from the fleet. When `APP` is specified, only the windows of applications opened by users display. When `DESKTOP` is specified, the standard desktop that is provided by the operating system displays. If not specified, defaults to `APP`. +* `max_user_duration_in_seconds` - (Optional) Maximum amount of time that a streaming session can remain active, in seconds. +* `vpc_config` - (Optional) Configuration block for the VPC configuration for the image builder. See below. +* `tags` - (Optional) Map of tags to attach to AppStream instances. + +### `compute_capacity` + +* `desired_instances` - (Required) Desired number of streaming instances. + +### `domain_join_info` + +* `directory_name` - (Optional) Fully qualified name of the directory (for example, corp.example.com). +* `organizational_unit_distinguished_name` - (Optional) Distinguished name of the organizational unit for computer accounts. + +### `vpc_config` + +* `security_group_ids` - Identifiers of the security groups for the fleet or image builder. +* `subnet_ids` - Identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier (ID) of the appstream fleet. +* `arn` - ARN of the appstream fleet. +* `state` - State of the fleet. Can be `STARTING`, `RUNNING`, `STOPPING` or `STOPPED` +* `created_time` - Date and time, in UTC and extended RFC 3339 format, when the fleet was created. +* `compute_capacity` - Describes the capacity status for a fleet. + +### `compute_capacity` + +* `available` - Number of currently available instances that can be used to stream sessions. +* `in_use` - Number of instances in use for streaming. +* `running` - Total number of simultaneous streaming instances that are running. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appstream_fleet` using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appstream_fleet` using the id. For example: + +```console +% terraform import aws_appstream_fleet.example fleetNameExample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appstream_fleet_stack_association.html.markdown b/website/docs/cdktf/python/r/appstream_fleet_stack_association.html.markdown new file mode 100644 index 00000000000..cc5cc3d3130 --- /dev/null +++ b/website/docs/cdktf/python/r/appstream_fleet_stack_association.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_fleet_stack_association" +description: |- + Manages an AppStream Fleet Stack association. +--- + + + +# Resource: aws_appstream_fleet_stack_association + +Manages an AppStream Fleet Stack association. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appstream_fleet import AppstreamFleet +from imports.aws.appstream_fleet_stack_association import AppstreamFleetStackAssociation +from imports.aws.appstream_stack import AppstreamStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AppstreamFleet(self, "example", + compute_capacity=AppstreamFleetComputeCapacity( + desired_instances=1 + ), + image_name="Amazon-AppStream2-Sample-Image-02-04-2019", + instance_type="stream.standard.small", + name="NAME" + ) + aws_appstream_stack_example = AppstreamStack(self, "example_1", + name="STACK NAME" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appstream_stack_example.override_logical_id("example") + aws_appstream_fleet_stack_association_example = + AppstreamFleetStackAssociation(self, "example_2", + fleet_name=example.name, + stack_name=Token.as_string(aws_appstream_stack_example.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appstream_fleet_stack_association_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `fleet_name` - (Required) Name of the fleet. +* `stack_name` (Required) Name of the stack. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the appstream stack fleet association, composed of the `fleet_name` and `stack_name` separated by a slash (`/`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppStream Stack Fleet Association using the `fleet_name` and `stack_name` separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppStream Stack Fleet Association using the `fleet_name` and `stack_name` separated by a slash (`/`). For example: + +```console +% terraform import aws_appstream_fleet_stack_association.example fleetName/stackName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appstream_image_builder.html.markdown b/website/docs/cdktf/python/r/appstream_image_builder.html.markdown new file mode 100644 index 00000000000..94e898a7a63 --- /dev/null +++ b/website/docs/cdktf/python/r/appstream_image_builder.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_image_builder" +description: |- + Provides an AppStream image builder +--- + + + +# Resource: aws_appstream_image_builder + +Provides an AppStream image builder. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appstream_image_builder import AppstreamImageBuilder +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppstreamImageBuilder(self, "test_fleet", + description="Description of a ImageBuilder", + display_name="Display name of a ImageBuilder", + enable_default_internet_access=False, + image_name="AppStream-WinServer2019-10-05-2022", + instance_type="stream.standard.large", + name="Name", + tags={ + "Name": "Example Image Builder" + }, + vpc_config=AppstreamImageBuilderVpcConfig( + subnet_ids=[example.id] + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `instance_type` - (Required) Instance type to use when launching the image builder. +* `name` - (Required) Unique name for the image builder. + +The following arguments are optional: + +* `access_endpoint` - (Optional) Set of interface VPC endpoint (interface endpoint) objects. Maximum of 4. See below. +* `appstream_agent_version` - (Optional) Version of the AppStream 2.0 agent to use for this image builder. +* `description` - (Optional) Description to display. +* `display_name` - (Optional) Human-readable friendly name for the AppStream image builder. +* `domain_join_info` - (Optional) Configuration block for the name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain. See below. +* `enable_default_internet_access` - (Optional) Enables or disables default internet access for the image builder. +* `iam_role_arn` - (Optional) ARN of the IAM role to apply to the image builder. +* `image_arn` - (Optional, Required if `image_name` not provided) ARN of the public, private, or shared image to use. +* `image_name` - (Optional, Required if `image_arn` not provided) Name of the image used to create the image builder. +* `vpc_config` - (Optional) Configuration block for the VPC configuration for the image builder. See below. +* `tags` - (Optional) Map of tags to assign to the instance. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `access_endpoint` + +The `access_endpoint` block supports the following arguments: + +* `endpoint_type` - (Required) Type of interface endpoint. +* `vpce_id` - (Optional) Identifier (ID) of the VPC in which the interface endpoint is used. + +### `domain_join_info` + +The `domain_join_info` block supports the following arguments: + +* `directory_name` - (Optional) Fully qualified name of the directory (for example, corp.example.com). +* `organizational_unit_distinguished_name` - (Optional) Distinguished name of the organizational unit for computer accounts. + +### `vpc_config` + +The `vpc_config` block supports the following arguments: + +* `security_group_ids` - (Optional) Identifiers of the security groups for the image builder or image builder. +* `subnet_ids` - (Optional) Identifiers of the subnets to which a network interface is attached from the image builder instance or image builder instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the appstream image builder. +* `created_time` - Date and time, in UTC and extended RFC 3339 format, when the image builder was created. +* `id` - Name of the image builder. +* `state` - State of the image builder. Can be: `PENDING`, `UPDATING_AGENT`, `RUNNING`, `STOPPING`, `STOPPED`, `REBOOTING`, `SNAPSHOTTING`, `DELETING`, `FAILED`, `UPDATING`, `PENDING_QUALIFICATION` +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appstream_image_builder` using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appstream_image_builder` using the `name`. For example: + +```console +% terraform import aws_appstream_image_builder.example imageBuilderExample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appstream_stack.html.markdown b/website/docs/cdktf/python/r/appstream_stack.html.markdown new file mode 100644 index 00000000000..4a2ed4812da --- /dev/null +++ b/website/docs/cdktf/python/r/appstream_stack.html.markdown @@ -0,0 +1,156 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_stack" +description: |- + Provides an AppStream stack +--- + + + +# Resource: aws_appstream_stack + +Provides an AppStream stack. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appstream_stack import AppstreamStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppstreamStack(self, "example", + application_settings=AppstreamStackApplicationSettings( + enabled=True, + settings_group="SettingsGroup" + ), + description="stack description", + display_name="stack display name", + feedback_url="http://your-domain/feedback", + name="stack name", + redirect_url="http://your-domain/redirect", + storage_connectors=[AppstreamStackStorageConnectors( + connector_type="HOMEFOLDERS" + ) + ], + tags={ + "TagName": "TagValue" + }, + user_settings=[AppstreamStackUserSettings( + action="CLIPBOARD_COPY_FROM_LOCAL_DEVICE", + permission="ENABLED" + ), AppstreamStackUserSettings( + action="CLIPBOARD_COPY_TO_LOCAL_DEVICE", + permission="ENABLED" + ), AppstreamStackUserSettings( + action="DOMAIN_PASSWORD_SIGNIN", + permission="ENABLED" + ), AppstreamStackUserSettings( + action="DOMAIN_SMART_CARD_SIGNIN", + permission="DISABLED" + ), AppstreamStackUserSettings( + action="FILE_DOWNLOAD", + permission="ENABLED" + ), AppstreamStackUserSettings( + action="FILE_UPLOAD", + permission="ENABLED" + ), AppstreamStackUserSettings( + action="PRINTING_TO_LOCAL_DEVICE", + permission="ENABLED" + ) + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Unique name for the AppStream stack. + +The following arguments are optional: + +* `access_endpoints` - (Optional) Set of configuration blocks defining the interface VPC endpoints. Users of the stack can connect to AppStream 2.0 only through the specified endpoints. + See [`access_endpoints`](#access_endpoints) below. +* `application_settings` - (Optional) Settings for application settings persistence. + See [`application_settings`](#application_settings) below. +* `description` - (Optional) Description for the AppStream stack. +* `display_name` - (Optional) Stack name to display. +* `embed_host_domains` - (Optional) Domains where AppStream 2.0 streaming sessions can be embedded in an iframe. You must approve the domains that you want to host embedded AppStream 2.0 streaming sessions. +* `feedback_url` - (Optional) URL that users are redirected to after they click the Send Feedback link. If no URL is specified, no Send Feedback link is displayed. . +* `redirect_url` - (Optional) URL that users are redirected to after their streaming session ends. +* `storage_connectors` - (Optional) Configuration block for the storage connectors to enable. + See [`storage_connectors`](#storage_connectors) below. +* `user_settings` - (Optional) Configuration block for the actions that are enabled or disabled for users during their streaming sessions. If not provided, these settings are configured automatically by AWS. If provided, the terraform configuration should include a block for each configurable action. + See [`user_settings`](#user_settings) below. +* `streaming_experience_settings` - (Optional) The streaming protocol you want your stack to prefer. This can be UDP or TCP. Currently, UDP is only supported in the Windows native client. + See [`streaming_experience_settings`](#streaming_experience_settings) below. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `access_endpoints` + +* `endpoint_type` - (Required) Type of the interface endpoint. + See the [`AccessEndpoint` AWS API documentation](https://docs.aws.amazon.com/appstream2/latest/APIReference/API_AccessEndpoint.html) for valid values. +* `vpce_id` - (Optional) ID of the VPC in which the interface endpoint is used. + +### `application_settings` + +* `enabled` - (Required) Whether application settings should be persisted. +* `settings_group` - (Optional) Name of the settings group. + Required when `enabled` is `true`. + Can be up to 100 characters. + +### `storage_connectors` + +* `connector_type` - (Required) Type of storage connector. + Valid values are `HOMEFOLDERS`, `GOOGLE_DRIVE`, or `ONE_DRIVE`. +* `domains` - (Optional) Names of the domains for the account. +* `resource_identifier` - (Optional) ARN of the storage connector. + +### `user_settings` + +* `action` - (Required) Action that is enabled or disabled. + Valid values are `CLIPBOARD_COPY_FROM_LOCAL_DEVICE`, `CLIPBOARD_COPY_TO_LOCAL_DEVICE`, `FILE_UPLOAD`, `FILE_DOWNLOAD`, `PRINTING_TO_LOCAL_DEVICE`, `DOMAIN_PASSWORD_SIGNIN`, or `DOMAIN_SMART_CARD_SIGNIN`. +* `permission` - (Required) Whether the action is enabled or disabled. + Valid values are `ENABLED` or `DISABLED`. + +### `streaming_experience_settings` + +* `preferred_protocol` - (Optional) The preferred protocol that you want to use while streaming your application. + Valid values are `TCP` and `UDP`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the appstream stack. +* `created_time` - Date and time, in UTC and extended RFC 3339 format, when the stack was created. +* `id` - Unique ID of the appstream stack. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appstream_stack` using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appstream_stack` using the id. For example: + +```console +% terraform import aws_appstream_stack.example stackID +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appstream_user.html.markdown b/website/docs/cdktf/python/r/appstream_user.html.markdown new file mode 100644 index 00000000000..ca1ce526c4a --- /dev/null +++ b/website/docs/cdktf/python/r/appstream_user.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_user" +description: |- + Provides an AppStream user +--- + + + +# Resource: aws_appstream_user + +Provides an AppStream user. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appstream_user import AppstreamUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppstreamUser(self, "example", + authentication_type="USERPOOL", + first_name="FIRST NAME", + last_name="LAST NAME", + user_name="EMAIL" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `authentication_type` - (Required) Authentication type for the user. You must specify USERPOOL. Valid values: `API`, `SAML`, `USERPOOL` +* `user_name` - (Required) Email address of the user. + +The following arguments are optional: + +* `enabled` - (Optional) Whether the user in the user pool is enabled. +* `first_name` - (Optional) First name, or given name, of the user. +* `last_name` - (Optional) Last name, or surname, of the user. +* `send_email_notification` - (Optional) Send an email notification. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the appstream user. +* `created_time` - Date and time, in UTC and extended RFC 3339 format, when the user was created. +* `id` - Unique ID of the appstream user. +* `status` - Status of the user in the user pool. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appstream_user` using the `user_name` and `authentication_type` separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appstream_user` using the `user_name` and `authentication_type` separated by a slash (`/`). For example: + +```console +% terraform import aws_appstream_user.example UserName/AuthenticationType +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appstream_user_stack_association.html.markdown b/website/docs/cdktf/python/r/appstream_user_stack_association.html.markdown new file mode 100644 index 00000000000..942266ff426 --- /dev/null +++ b/website/docs/cdktf/python/r/appstream_user_stack_association.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_user_stack_association" +description: |- + Manages an AppStream User Stack association. +--- + + + +# Resource: aws_appstream_user_stack_association + +Manages an AppStream User Stack association. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appstream_stack import AppstreamStack +from imports.aws.appstream_user import AppstreamUser +from imports.aws.appstream_user_stack_association import AppstreamUserStackAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = AppstreamStack(self, "test", + name="STACK NAME" + ) + aws_appstream_user_test = AppstreamUser(self, "test_1", + authentication_type="USERPOOL", + user_name="EMAIL" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appstream_user_test.override_logical_id("test") + aws_appstream_user_stack_association_test = + AppstreamUserStackAssociation(self, "test_2", + authentication_type=Token.as_string(aws_appstream_user_test.authentication_type), + stack_name=test.name, + user_name=Token.as_string(aws_appstream_user_test.user_name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appstream_user_stack_association_test.override_logical_id("test") +``` + +## Argument Reference + +The following arguments are required: + +* `authentication_type` - (Required) Authentication type for the user. +* `stack_name` (Required) Name of the stack that is associated with the user. +* `user_name` (Required) Email address of the user who is associated with the stack. + +The following arguments are optional: + +* `send_email_notification` - (Optional) Whether a welcome email is sent to a user after the user is created in the user pool. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the appstream User Stack association. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppStream User Stack Association using the `user_name`, `authentication_type`, and `stack_name`, separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppStream User Stack Association using the `user_name`, `authentication_type`, and `stack_name`, separated by a slash (`/`). For example: + +```console +% terraform import aws_appstream_user_stack_association.example userName/auhtenticationType/stackName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appsync_api_cache.html.markdown b/website/docs/cdktf/python/r/appsync_api_cache.html.markdown new file mode 100644 index 00000000000..664aecd4ad3 --- /dev/null +++ b/website/docs/cdktf/python/r/appsync_api_cache.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_api_cache" +description: |- + Provides an AppSync API Cache. +--- + + + +# Resource: aws_appsync_api_cache + +Provides an AppSync API Cache. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_api_cache import AppsyncApiCache +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AppsyncGraphqlApi(self, "example", + authentication_type="API_KEY", + name="example" + ) + aws_appsync_api_cache_example = AppsyncApiCache(self, "example_1", + api_caching_behavior="FULL_REQUEST_CACHING", + api_id=example.id, + ttl=900, + type="LARGE" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appsync_api_cache_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) GraphQL API ID. +* `api_caching_behavior` - (Required) Caching behavior. Valid values are `FULL_REQUEST_CACHING` and `PER_RESOLVER_CACHING`. +* `type` - (Required) Cache instance type. Valid values are `SMALL`, `MEDIUM`, `LARGE`, `XLARGE`, `LARGE_2X`, `LARGE_4X`, `LARGE_8X`, `LARGE_12X`, `T2_SMALL`, `T2_MEDIUM`, `R4_LARGE`, `R4_XLARGE`, `R4_2XLARGE`, `R4_4XLARGE`, `R4_8XLARGE`. +* `ttl` - (Required) TTL in seconds for cache entries. +* `at_rest_encryption_enabled` - (Optional) At-rest encryption flag for cache. You cannot update this setting after creation. +* `transit_encryption_enabled` - (Optional) Transit encryption flag when connecting to cache. You cannot update this setting after creation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AppSync API ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appsync_api_cache` using the AppSync API ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appsync_api_cache` using the AppSync API ID. For example: + +```console +% terraform import aws_appsync_api_cache.example xxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appsync_api_key.html.markdown b/website/docs/cdktf/python/r/appsync_api_key.html.markdown new file mode 100644 index 00000000000..05e18f80076 --- /dev/null +++ b/website/docs/cdktf/python/r/appsync_api_key.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_api_key" +description: |- + Provides an AppSync API Key. +--- + + + +# Resource: aws_appsync_api_key + +Provides an AppSync API Key. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_api_key import AppsyncApiKey +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AppsyncGraphqlApi(self, "example", + authentication_type="API_KEY", + name="example" + ) + aws_appsync_api_key_example = AppsyncApiKey(self, "example_1", + api_id=example.id, + expires="2018-05-03T04:00:00Z" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appsync_api_key_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) ID of the associated AppSync API +* `description` - (Optional) API key description. Defaults to "Managed by Terraform". +* `expires` - (Optional) RFC3339 string representation of the expiry date. Rounded down to nearest hour. By default, it is 7 days from the date of creation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - API Key ID (Formatted as ApiId:Key) +* `key` - API key + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appsync_api_key` using the AppSync API ID and key separated by `:`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appsync_api_key` using the AppSync API ID and key separated by `:`. For example: + +```console +% terraform import aws_appsync_api_key.example xxxxx:yyyyy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appsync_datasource.html.markdown b/website/docs/cdktf/python/r/appsync_datasource.html.markdown new file mode 100644 index 00000000000..3db6722a8db --- /dev/null +++ b/website/docs/cdktf/python/r/appsync_datasource.html.markdown @@ -0,0 +1,221 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_datasource" +description: |- + Provides an AppSync Data Source. +--- + + + +# Resource: aws_appsync_datasource + +Provides an AppSync Data Source. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_datasource import AppsyncDatasource +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.dynamodb_table import DynamodbTable +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AppsyncGraphqlApi(self, "example", + authentication_type="API_KEY", + name="tf_appsync_example" + ) + aws_dynamodb_table_example = DynamodbTable(self, "example_1", + attribute=[DynamodbTableAttribute( + name="UserId", + type="S" + ) + ], + hash_key="UserId", + name="example", + read_capacity=1, + write_capacity=1 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dynamodb_table_example.override_logical_id("example") + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["appsync.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_3", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["dynamodb:*"], + effect="Allow", + resources=[Token.as_string(aws_dynamodb_table_example.arn)] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_iam_role_example = IamRole(self, "example_4", + assume_role_policy=Token.as_string(assume_role.json), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_iam_role_policy_example = IamRolePolicy(self, "example_5", + name="example", + policy=Token.as_string(data_aws_iam_policy_document_example.json), + role=Token.as_string(aws_iam_role_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_example.override_logical_id("example") + aws_appsync_datasource_example = AppsyncDatasource(self, "example_6", + api_id=example.id, + dynamodb_config=AppsyncDatasourceDynamodbConfig( + table_name=Token.as_string(aws_dynamodb_table_example.name) + ), + name="tf_appsync_example", + service_role_arn=Token.as_string(aws_iam_role_example.arn), + type="AMAZON_DYNAMODB" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appsync_datasource_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) API ID for the GraphQL API for the data source. +* `name` - (Required) User-supplied name for the data source. +* `type` - (Required) Type of the Data Source. Valid values: `AWS_LAMBDA`, `AMAZON_DYNAMODB`, `AMAZON_ELASTICSEARCH`, `HTTP`, `NONE`, `RELATIONAL_DATABASE`, `AMAZON_EVENTBRIDGE`, `AMAZON_OPENSEARCH_SERVICE`. +* `description` - (Optional) Description of the data source. +* `dynamodb_config` - (Optional) DynamoDB settings. See [DynamoDB Config](#dynamodb-config) +* `elasticsearch_config` - (Optional) Amazon Elasticsearch settings. See [ElasticSearch Config](#elasticsearch-config) +* `event_bridge_config` - (Optional) AWS EventBridge settings. See [Event Bridge Config](#event-bridge-config) +* `http_config` - (Optional) HTTP settings. See [HTTP Config](#http-config) +* `lambda_config` - (Optional) AWS Lambda settings. See [Lambda Config](#lambda-config) +* `opensearchservice_config` - (Optional) Amazon OpenSearch Service settings. See [OpenSearch Service Config](#opensearch-service-config) +* `relational_database_config` (Optional) AWS RDS settings. See [Relational Database Config](#relational-database-config) +* `service_role_arn` - (Optional) IAM service role ARN for the data source. + +### DynamoDB Config + +This argument supports the following arguments: + +* `table_name` - (Required) Name of the DynamoDB table. +* `region` - (Optional) AWS region of the DynamoDB table. Defaults to current region. +* `use_caller_credentials` - (Optional) Set to `true` to use Amazon Cognito credentials with this data source. +* `delta_sync_config` - (Optional) The DeltaSyncConfig for a versioned data source. See [Delta Sync Config](#delta-sync-config) +* `versioned` - (Optional) Detects Conflict Detection and Resolution with this data source. + +### Delta Sync Config + +* `base_table_ttl` - (Optional) The number of minutes that an Item is stored in the data source. +* `delta_sync_table_name` - (Required) The table name. +* `delta_sync_table_ttl` - (Optional) The number of minutes that a Delta Sync log entry is stored in the Delta Sync table. + +### ElasticSearch Config + +This argument supports the following arguments: + +* `endpoint` - (Required) HTTP endpoint of the Elasticsearch domain. +* `region` - (Optional) AWS region of Elasticsearch domain. Defaults to current region. + +### Event Bridge Config + +This argument supports the following arguments: + +* `event_bus_arn` - (Required) ARN for the EventBridge bus. + +### HTTP Config + +This argument supports the following arguments: + +* `endpoint` - (Required) HTTP URL. +* `authorization_config` - (Optional) Authorization configuration in case the HTTP endpoint requires authorization. See [Authorization Config](#authorization-config). + +#### Authorization Config + +This argument supports the following arguments: + +* `authorization_type` - (Optional) Authorization type that the HTTP endpoint requires. Default values is `AWS_IAM`. +* `aws_iam_config` - (Optional) Identity and Access Management (IAM) settings. See [AWS IAM Config](#aws-iam-config). + +##### AWS IAM Config + +This argument supports the following arguments: + +* `signing_region` - (Optional) Signing Amazon Web Services Region for IAM authorization. +* `signing_service_name`- (Optional) Signing service name for IAM authorization. + +### Lambda Config + +This argument supports the following arguments: + +* `function_arn` - (Required) ARN for the Lambda function. + +### OpenSearch Service Config + +This argument supports the following arguments: + +* `endpoint` - (Required) HTTP endpoint of the OpenSearch domain. +* `region` - (Optional) AWS region of the OpenSearch domain. Defaults to current region. + +### Relational Database Config + +This argument supports the following arguments: + +* `http_endpoint_config` - (Required) Amazon RDS HTTP endpoint configuration. See [HTTP Endpoint Config](#http-endpoint-config). +* `source_type` - (Optional) Source type for the relational database. Valid values: `RDS_HTTP_ENDPOINT`. + +#### HTTP Endpoint Config + +This argument supports the following arguments: + +* `db_cluster_identifier` - (Required) Amazon RDS cluster identifier. +* `aws_secret_store_arn` - (Required) AWS secret store ARN for database credentials. +* `database_name` - (Optional) Logical database name. +* `region` - (Optional) AWS Region for RDS HTTP endpoint. Defaults to current region. +* `schema` - (Optional) Logical schema name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appsync_datasource` using the `api_id`, a hyphen, and `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appsync_datasource` using the `api_id`, a hyphen, and `name`. For example: + +```console +% terraform import aws_appsync_datasource.example abcdef123456-example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appsync_domain_name.html.markdown b/website/docs/cdktf/python/r/appsync_domain_name.html.markdown new file mode 100644 index 00000000000..98dcdaa2a8b --- /dev/null +++ b/website/docs/cdktf/python/r/appsync_domain_name.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_domain_name" +description: |- + Provides an AppSync Domain Name. +--- + + + +# Resource: aws_appsync_domain_name + +Provides an AppSync Domain Name. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_domain_name import AppsyncDomainName +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppsyncDomainName(self, "example", + certificate_arn=Token.as_string(aws_acm_certificate_example.arn), + domain_name="api.example.com" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate_arn` - (Required) ARN of the certificate. This can be an Certificate Manager (ACM) certificate or an Identity and Access Management (IAM) server certificate. The certifiacte must reside in us-east-1. +* `description` - (Optional) A description of the Domain Name. +* `domain_name` - (Required) Domain name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Appsync Domain Name. +* `appsync_domain_name` - Domain name that AppSync provides. +* `hosted_zone_id` - ID of your Amazon Route 53 hosted zone. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appsync_domain_name` using the AppSync domain name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appsync_domain_name` using the AppSync domain name. For example: + +```console +% terraform import aws_appsync_domain_name.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appsync_domain_name_api_association.html.markdown b/website/docs/cdktf/python/r/appsync_domain_name_api_association.html.markdown new file mode 100644 index 00000000000..a611a0afce7 --- /dev/null +++ b/website/docs/cdktf/python/r/appsync_domain_name_api_association.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_domain_name_api_association" +description: |- + Provides an AppSync API Association. +--- + + + +# Resource: aws_appsync_domain_name_api_association + +Provides an AppSync API Association. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_domain_name_api_association import AppsyncDomainNameApiAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppsyncDomainNameApiAssociation(self, "example", + api_id=Token.as_string(aws_appsync_graphql_api_example.id), + domain_name=Token.as_string(aws_appsync_domain_name_example.domain_name) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) API ID. +* `domain_name` - (Required) Appsync domain name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Appsync domain name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appsync_domain_name_api_association` using the AppSync domain name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appsync_domain_name_api_association` using the AppSync domain name. For example: + +```console +% terraform import aws_appsync_domain_name_api_association.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appsync_function.html.markdown b/website/docs/cdktf/python/r/appsync_function.html.markdown new file mode 100644 index 00000000000..608f89c311b --- /dev/null +++ b/website/docs/cdktf/python/r/appsync_function.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_function" +description: |- + Provides an AppSync Function. +--- + + + +# Resource: aws_appsync_function + +Provides an AppSync Function. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_datasource import AppsyncDatasource +from imports.aws.appsync_function import AppsyncFunction +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AppsyncGraphqlApi(self, "example", + authentication_type="API_KEY", + name="example", + schema="type Mutation {\n putPost(id: ID!, title: String!): Post\n}\n\ntype Post {\n id: ID!\n title: String!\n}\n\ntype Query {\n singlePost(id: ID!): Post\n}\n\nschema {\n query: Query\n mutation: Mutation\n}\n\n" + ) + aws_appsync_datasource_example = AppsyncDatasource(self, "example_1", + api_id=example.id, + http_config=AppsyncDatasourceHttpConfig( + endpoint="http://example.com" + ), + name="example", + type="HTTP" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appsync_datasource_example.override_logical_id("example") + aws_appsync_function_example = AppsyncFunction(self, "example_2", + api_id=example.id, + data_source=Token.as_string(aws_appsync_datasource_example.name), + name="example", + request_mapping_template="{\n \"version\": \"2018-05-29\",\n \"method\": \"GET\",\n \"resourcePath\": \"/\",\n \"params\":{\n \"headers\": $utils.http.copyheaders($ctx.request.headers)\n }\n}\n\n", + response_mapping_template="#if($ctx.result.statusCode == 200)\n $ctx.result.body\n#else\n $utils.appendError($ctx.result.body, $ctx.result.statusCode)\n#end\n\n" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appsync_function_example.override_logical_id("example") +``` + +## Example Usage With Code + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_function import AppsyncFunction +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppsyncFunction(self, "example", + api_id=Token.as_string(aws_appsync_graphql_api_example.id), + code=Token.as_string(Fn.file("some-code-dir")), + data_source=Token.as_string(aws_appsync_datasource_example.name), + name="example", + runtime=AppsyncFunctionRuntime( + name="APPSYNC_JS", + runtime_version="1.0.0" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) ID of the associated AppSync API. +* `code` - (Optional) The function code that contains the request and response functions. When code is used, the runtime is required. The runtime value must be APPSYNC_JS. +* `data_source` - (Required) Function data source name. +* `max_batch_size` - (Optional) Maximum batching size for a resolver. Valid values are between `0` and `2000`. +* `name` - (Required) Function name. The function name does not have to be unique. +* `request_mapping_template` - (Optional) Function request mapping template. Functions support only the 2018-05-29 version of the request mapping template. +* `response_mapping_template` - (Optional) Function response mapping template. +* `description` - (Optional) Function description. +* `runtime` - (Optional) Describes a runtime used by an AWS AppSync pipeline resolver or AWS AppSync function. Specifies the name and version of the runtime to use. Note that if a runtime is specified, code must also be specified. See [Runtime](#runtime). +* `sync_config` - (Optional) Describes a Sync configuration for a resolver. See [Sync Config](#sync-config). +* `function_version` - (Optional) Version of the request mapping template. Currently the supported value is `2018-05-29`. Does not apply when specifying `code`. + +### Runtime + +This argument supports the following arguments: + +* `name` - (Optional) The name of the runtime to use. Currently, the only allowed value is `APPSYNC_JS`. +* `runtime_version` - (Optional) The version of the runtime to use. Currently, the only allowed version is `1.0.0`. + +### Sync Config + +This argument supports the following arguments: + +* `conflict_detection` - (Optional) Conflict Detection strategy to use. Valid values are `NONE` and `VERSION`. +* `conflict_handler` - (Optional) Conflict Resolution strategy to perform in the event of a conflict. Valid values are `NONE`, `OPTIMISTIC_CONCURRENCY`, `AUTOMERGE`, and `LAMBDA`. +* `lambda_conflict_handler_config` - (Optional) Lambda Conflict Handler Config when configuring `LAMBDA` as the Conflict Handler. See [Lambda Conflict Handler Config](#lambda-conflict-handler-config). + +#### Lambda Conflict Handler Config + +This argument supports the following arguments: + +* `lambda_conflict_handler_arn` - (Optional) ARN for the Lambda function to use as the Conflict Handler. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - API Function ID (Formatted as ApiId-FunctionId) +* `arn` - ARN of the Function object. +* `function_id` - Unique ID representing the Function object. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appsync_function` using the AppSync API ID and Function ID separated by `-`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appsync_function` using the AppSync API ID and Function ID separated by `-`. For example: + +```console +% terraform import aws_appsync_function.example xxxxx-yyyyy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appsync_graphql_api.html.markdown b/website/docs/cdktf/python/r/appsync_graphql_api.html.markdown new file mode 100644 index 00000000000..79fe8392c4a --- /dev/null +++ b/website/docs/cdktf/python/r/appsync_graphql_api.html.markdown @@ -0,0 +1,380 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_graphql_api" +description: |- + Provides an AppSync GraphQL API. +--- + + + +# Resource: aws_appsync_graphql_api + +Provides an AppSync GraphQL API. + +## Example Usage + +### API Key Authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppsyncGraphqlApi(self, "example", + authentication_type="API_KEY", + name="example" + ) +``` + +### AWS IAM Authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppsyncGraphqlApi(self, "example", + authentication_type="AWS_IAM", + name="example" + ) +``` + +### AWS Cognito User Pool Authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppsyncGraphqlApi(self, "example", + authentication_type="AMAZON_COGNITO_USER_POOLS", + name="example", + user_pool_config=AppsyncGraphqlApiUserPoolConfig( + aws_region=Token.as_string(current.name), + default_action="DENY", + user_pool_id=Token.as_string(aws_cognito_user_pool_example.id) + ) + ) +``` + +### OpenID Connect Authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppsyncGraphqlApi(self, "example", + authentication_type="OPENID_CONNECT", + name="example", + openid_connect_config=AppsyncGraphqlApiOpenidConnectConfig( + issuer="https://example.com" + ) + ) +``` + +### AWS Lambda Authorizer Authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +from imports.aws.lambda_permission import LambdaPermission +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AppsyncGraphqlApi(self, "example", + authentication_type="AWS_LAMBDA", + lambda_authorizer_config=AppsyncGraphqlApiLambdaAuthorizerConfig( + authorizer_uri="arn:aws:lambda:us-east-1:123456789012:function:custom_lambda_authorizer" + ), + name="example" + ) + LambdaPermission(self, "appsync_lambda_authorizer", + action="lambda:InvokeFunction", + function_name="custom_lambda_authorizer", + principal="appsync.amazonaws.com", + source_arn=example.arn, + statement_id="appsync_lambda_authorizer" + ) +``` + +### With Multiple Authentication Providers + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppsyncGraphqlApi(self, "example", + additional_authentication_provider=[AppsyncGraphqlApiAdditionalAuthenticationProvider( + authentication_type="AWS_IAM" + ) + ], + authentication_type="API_KEY", + name="example" + ) +``` + +### With Schema + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppsyncGraphqlApi(self, "example", + authentication_type="AWS_IAM", + name="example", + schema="schema {\n\tquery: Query\n}\ntype Query {\n test: Int\n}\n\n" + ) +``` + +### Enabling Logging + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, authenticationType, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["appsync.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + example = IamRole(self, "example", + assume_role_policy=Token.as_string(assume_role.json), + name="example" + ) + aws_iam_role_policy_attachment_example = IamRolePolicyAttachment(self, "example_2", + policy_arn="arn:aws:iam::aws:policy/service-role/AWSAppSyncPushToCloudWatchLogs", + role=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_example.override_logical_id("example") + aws_appsync_graphql_api_example = AppsyncGraphqlApi(self, "example_3", + log_config=AppsyncGraphqlApiLogConfig( + cloudwatch_logs_role_arn=example.arn, + field_log_level="ERROR" + ), + authentication_type=authentication_type, + name=name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appsync_graphql_api_example.override_logical_id("example") +``` + +### Associate Web ACL (v2) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +from imports.aws.wafv2_web_acl import Wafv2WebAcl +from imports.aws.wafv2_web_acl_association import Wafv2WebAclAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AppsyncGraphqlApi(self, "example", + authentication_type="API_KEY", + name="example" + ) + aws_wafv2_web_acl_example = Wafv2WebAcl(self, "example_1", + default_action=Wafv2WebAclDefaultAction( + allow=Wafv2WebAclDefaultActionAllow() + ), + description="Example of a managed rule.", + name="managed-rule-example", + rule=[Wafv2WebAclRule( + name="rule-1", + override_action=Wafv2WebAclRuleOverrideAction( + block=[{}] + ), + priority=1, + statement=Wafv2WebAclRuleStatement( + managed_rule_group_statement=Wafv2WebAclRuleStatementManagedRuleGroupStatement( + name="AWSManagedRulesCommonRuleSet", + vendor_name="AWS" + ) + ), + visibility_config=Wafv2WebAclRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-rule-metric-name", + sampled_requests_enabled=False + ) + ) + ], + scope="REGIONAL", + visibility_config=Wafv2WebAclVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-metric-name", + sampled_requests_enabled=False + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_wafv2_web_acl_example.override_logical_id("example") + aws_wafv2_web_acl_association_example = Wafv2WebAclAssociation(self, "example_2", + resource_arn=example.arn, + web_acl_arn=Token.as_string(aws_wafv2_web_acl_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_wafv2_web_acl_association_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `authentication_type` - (Required) Authentication type. Valid values: `API_KEY`, `AWS_IAM`, `AMAZON_COGNITO_USER_POOLS`, `OPENID_CONNECT`, `AWS_LAMBDA` +* `name` - (Required) User-supplied name for the GraphqlApi. +* `log_config` - (Optional) Nested argument containing logging configuration. Defined below. +* `openid_connect_config` - (Optional) Nested argument containing OpenID Connect configuration. Defined below. +* `user_pool_config` - (Optional) Amazon Cognito User Pool configuration. Defined below. +* `lambda_authorizer_config` - (Optional) Nested argument containing Lambda authorizer configuration. Defined below. +* `schema` - (Optional) Schema definition, in GraphQL schema language format. Terraform cannot perform drift detection of this configuration. +* `additional_authentication_provider` - (Optional) One or more additional authentication providers for the GraphqlApi. Defined below. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `xray_enabled` - (Optional) Whether tracing with X-ray is enabled. Defaults to false. +* `visibility` - (Optional) Sets the value of the GraphQL API to public (`GLOBAL`) or private (`PRIVATE`). If no value is provided, the visibility will be set to `GLOBAL` by default. This value cannot be changed once the API has been created. + +### log_config + +This argument supports the following arguments: + +* `cloudwatch_logs_role_arn` - (Required) Amazon Resource Name of the service role that AWS AppSync will assume to publish to Amazon CloudWatch logs in your account. +* `field_log_level` - (Required) Field logging level. Valid values: `ALL`, `ERROR`, `NONE`. +* `exclude_verbose_content` - (Optional) Set to TRUE to exclude sections that contain information such as headers, context, and evaluated mapping templates, regardless of logging level. Valid values: `true`, `false`. Default value: `false` + +### additional_authentication_provider + +This argument supports the following arguments: + +* `authentication_type` - (Required) Authentication type. Valid values: `API_KEY`, `AWS_IAM`, `AMAZON_COGNITO_USER_POOLS`, `OPENID_CONNECT`, `AWS_LAMBDA` +* `openid_connect_config` - (Optional) Nested argument containing OpenID Connect configuration. Defined below. +* `user_pool_config` - (Optional) Amazon Cognito User Pool configuration. Defined below. + +### openid_connect_config + +This argument supports the following arguments: + +* `issuer` - (Required) Issuer for the OpenID Connect configuration. The issuer returned by discovery MUST exactly match the value of iss in the ID Token. +* `auth_ttl` - (Optional) Number of milliseconds a token is valid after being authenticated. +* `client_id` - (Optional) Client identifier of the Relying party at the OpenID identity provider. This identifier is typically obtained when the Relying party is registered with the OpenID identity provider. You can specify a regular expression so the AWS AppSync can validate against multiple client identifiers at a time. +* `iat_ttl` - (Optional) Number of milliseconds a token is valid after being issued to a user. + +### user_pool_config + +This argument supports the following arguments: + +* `default_action` - (Required only if Cognito is used as the default auth provider) Action that you want your GraphQL API to take when a request that uses Amazon Cognito User Pool authentication doesn't match the Amazon Cognito User Pool configuration. Valid: `ALLOW` and `DENY` +* `user_pool_id` - (Required) User pool ID. +* `app_id_client_regex` - (Optional) Regular expression for validating the incoming Amazon Cognito User Pool app client ID. +* `aws_region` - (Optional) AWS region in which the user pool was created. + +### lambda_authorizer_config + +This argument supports the following arguments: + +* `authorizer_uri` - (Required) ARN of the Lambda function to be called for authorization. Note: This Lambda function must have a resource-based policy assigned to it, to allow `lambda:InvokeFunction` from service principal `appsync.amazonaws.com`. +* `authorizer_result_ttl_in_seconds` - (Optional) Number of seconds a response should be cached for. The default is 5 minutes (300 seconds). The Lambda function can override this by returning a `ttlOverride` key in its response. A value of 0 disables caching of responses. Minimum value of 0. Maximum value of 3600. +* `identity_validation_expression` - (Optional) Regular expression for validation of tokens before the Lambda function is called. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - API ID +* `arn` - ARN +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uris` - Map of URIs associated with the APIE.g., `uris["GRAPHQL"] = https://ID.appsync-api.REGION.amazonaws.com/graphql` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppSync GraphQL API using the GraphQL API ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AppSync GraphQL API using the GraphQL API ID. For example: + +```console +% terraform import aws_appsync_graphql_api.example 0123456789 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appsync_resolver.html.markdown b/website/docs/cdktf/python/r/appsync_resolver.html.markdown new file mode 100644 index 00000000000..6b5dfdfc9d3 --- /dev/null +++ b/website/docs/cdktf/python/r/appsync_resolver.html.markdown @@ -0,0 +1,170 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_resolver" +description: |- + Provides an AppSync Resolver. +--- + + + +# Resource: aws_appsync_resolver + +Provides an AppSync Resolver. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_datasource import AppsyncDatasource +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +from imports.aws.appsync_resolver import AppsyncResolver +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = AppsyncGraphqlApi(self, "test", + authentication_type="API_KEY", + name="tf-example", + schema="type Mutation {\n\tputPost(id: ID!, title: String!): Post\n}\n\ntype Post {\n\tid: ID!\n\ttitle: String!\n}\n\ntype Query {\n\tsinglePost(id: ID!): Post\n}\n\nschema {\n\tquery: Query\n\tmutation: Mutation\n}\n\n" + ) + AppsyncResolver(self, "Mutation_pipelineTest", + api_id=test.id, + field="pipelineTest", + kind="PIPELINE", + pipeline_config=AppsyncResolverPipelineConfig( + functions=[test1.function_id, test2.function_id, test3.function_id] + ), + request_template="{}", + response_template="$util.toJson($ctx.result)", + type="Mutation" + ) + aws_appsync_datasource_test = AppsyncDatasource(self, "test_2", + api_id=test.id, + http_config=AppsyncDatasourceHttpConfig( + endpoint="http://example.com" + ), + name="tf_example", + type="HTTP" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appsync_datasource_test.override_logical_id("test") + aws_appsync_resolver_test = AppsyncResolver(self, "test_3", + api_id=test.id, + caching_config=AppsyncResolverCachingConfig( + caching_keys=["$context.identity.sub", "$context.arguments.id"], + ttl=60 + ), + data_source=Token.as_string(aws_appsync_datasource_test.name), + field="singlePost", + request_template="{\n \"version\": \"2018-05-29\",\n \"method\": \"GET\",\n \"resourcePath\": \"/\",\n \"params\":{\n \"headers\": $utils.http.copyheaders($ctx.request.headers)\n }\n}\n\n", + response_template="#if($ctx.result.statusCode == 200)\n $ctx.result.body\n#else\n $utils.appendError($ctx.result.body, $ctx.result.statusCode)\n#end\n\n", + type="Query" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appsync_resolver_test.override_logical_id("test") +``` + +## Example Usage JS + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_resolver import AppsyncResolver +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AppsyncResolver(self, "example", + api_id=test.id, + code=Token.as_string(Fn.file("some-code-dir")), + field="pipelineTest", + kind="PIPELINE", + pipeline_config=AppsyncResolverPipelineConfig( + functions=[Token.as_string(aws_appsync_function_test.function_id)] + ), + runtime=AppsyncResolverRuntime( + name="APPSYNC_JS", + runtime_version="1.0.0" + ), + type="Query" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) API ID for the GraphQL API. +* `code` - (Optional) The function code that contains the request and response functions. When code is used, the runtime is required. The runtime value must be APPSYNC_JS. +* `type` - (Required) Type name from the schema defined in the GraphQL API. +* `field` - (Required) Field name from the schema defined in the GraphQL API. +* `request_template` - (Optional) Request mapping template for UNIT resolver or 'before mapping template' for PIPELINE resolver. Required for non-Lambda resolvers. +* `response_template` - (Optional) Response mapping template for UNIT resolver or 'after mapping template' for PIPELINE resolver. Required for non-Lambda resolvers. +* `data_source` - (Optional) Data source name. +* `max_batch_size` - (Optional) Maximum batching size for a resolver. Valid values are between `0` and `2000`. +* `kind` - (Optional) Resolver type. Valid values are `UNIT` and `PIPELINE`. +* `sync_config` - (Optional) Describes a Sync configuration for a resolver. See [Sync Config](#sync-config). +* `pipeline_config` - (Optional) The caching configuration for the resolver. See [Pipeline Config](#pipeline-config). +* `caching_config` - (Optional) The Caching Config. See [Caching Config](#caching-config). +* `runtime` - (Optional) Describes a runtime used by an AWS AppSync pipeline resolver or AWS AppSync function. Specifies the name and version of the runtime to use. Note that if a runtime is specified, code must also be specified. See [Runtime](#runtime). + +### Caching Config + +* `caching_keys` - (Optional) The caching keys for a resolver that has caching activated. Valid values are entries from the $context.arguments, $context.source, and $context.identity maps. +* `ttl` - (Optional) The TTL in seconds for a resolver that has caching activated. Valid values are between `1` and `3600` seconds. + +### Pipeline Config + +* `functions` - (Optional) A list of Function objects. + +### Sync Config + +* `conflict_detection` - (Optional) Conflict Detection strategy to use. Valid values are `NONE` and `VERSION`. +* `conflict_handler` - (Optional) Conflict Resolution strategy to perform in the event of a conflict. Valid values are `NONE`, `OPTIMISTIC_CONCURRENCY`, `AUTOMERGE`, and `LAMBDA`. +* `lambda_conflict_handler_config` - (Optional) Lambda Conflict Handler Config when configuring `LAMBDA` as the Conflict Handler. See [Lambda Conflict Handler Config](#lambda-conflict-handler-config). + +#### Lambda Conflict Handler Config + +* `lambda_conflict_handler_arn` - (Optional) ARN for the Lambda function to use as the Conflict Handler. + +### Runtime + +* `name` - (Optional) The name of the runtime to use. Currently, the only allowed value is `APPSYNC_JS`. +* `runtime_version` - (Optional) The version of the runtime to use. Currently, the only allowed version is `1.0.0`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_appsync_resolver` using the `api_id`, a hyphen, `type`, a hypen and `field`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_appsync_resolver` using the `api_id`, a hyphen, `type`, a hypen and `field`. For example: + +```console +% terraform import aws_appsync_resolver.example abcdef123456-exampleType-exampleField +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/appsync_type.html.markdown b/website/docs/cdktf/python/r/appsync_type.html.markdown new file mode 100644 index 00000000000..c4f8ad12909 --- /dev/null +++ b/website/docs/cdktf/python/r/appsync_type.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_type" +description: |- + Provides an AppSync Type. +--- + + + +# Resource: aws_appsync_type + +Provides an AppSync Type. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.appsync_graphql_api import AppsyncGraphqlApi +from imports.aws.appsync_type import AppsyncType +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AppsyncGraphqlApi(self, "example", + authentication_type="API_KEY", + name="example" + ) + aws_appsync_type_example = AppsyncType(self, "example_1", + api_id=example.id, + definition="type Mutation\n\n{\nputPost(id: ID!,title: String! ): Post\n\n}\n\n", + format="SDL" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_appsync_type_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `api_id` - (Required) GraphQL API ID. +* `format` - (Required) The type format: `SDL` or `JSON`. +* `definition` - (Required) The type definition. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the type. +* `description` - The type description. +* `id` - The ID is constructed from `api-id:format:name`. +* `name` - The type name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Appsync Types using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Appsync Types using the `id`. For example: + +```console +% terraform import aws_appsync_type.example api-id:format:name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/athena_data_catalog.html.markdown b/website/docs/cdktf/python/r/athena_data_catalog.html.markdown new file mode 100644 index 00000000000..dea5b38aeb4 --- /dev/null +++ b/website/docs/cdktf/python/r/athena_data_catalog.html.markdown @@ -0,0 +1,156 @@ +--- +subcategory: "Athena" +layout: "aws" +page_title: "AWS: aws_athena_data_catalog" +description: |- + Provides an Athena data catalog. +--- + + + +# Resource: aws_athena_data_catalog + +Provides an Athena data catalog. + +More information about Athena and Athena data catalogs can be found in the [Athena User Guide](https://docs.aws.amazon.com/athena/latest/ug/what-is.html). + +-> **Tip:** for a more detailed explanation on the usage of `parameters`, see the [DataCatalog API documentation](https://docs.aws.amazon.com/athena/latest/APIReference/API_DataCatalog.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.athena_data_catalog import AthenaDataCatalog +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AthenaDataCatalog(self, "example", + description="Example Athena data catalog", + name="athena-data-catalog", + parameters={ + "function": "arn:aws:lambda:eu-central-1:123456789012:function:not-important-lambda-function" + }, + tags={ + "Name": "example-athena-data-catalog" + }, + type="LAMBDA" + ) +``` + +### Hive based Data Catalog + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.athena_data_catalog import AthenaDataCatalog +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AthenaDataCatalog(self, "example", + description="Hive based Data Catalog", + name="hive-data-catalog", + parameters={ + "metadata-function": "arn:aws:lambda:eu-central-1:123456789012:function:not-important-lambda-function" + }, + type="HIVE" + ) +``` + +### Glue based Data Catalog + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.athena_data_catalog import AthenaDataCatalog +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AthenaDataCatalog(self, "example", + description="Glue based Data Catalog", + name="glue-data-catalog", + parameters={ + "catalog-id": "123456789012" + }, + type="GLUE" + ) +``` + +### Lambda based Data Catalog + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.athena_data_catalog import AthenaDataCatalog +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AthenaDataCatalog(self, "example", + description="Lambda based Data Catalog", + name="lambda-data-catalog", + parameters={ + "metadata-function": "arn:aws:lambda:eu-central-1:123456789012:function:not-important-lambda-function-1", + "record-function": "arn:aws:lambda:eu-central-1:123456789012:function:not-important-lambda-function-2" + }, + type="LAMBDA" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +- `name` - (Required) Name of the data catalog. The catalog name must be unique for the AWS account and can use a maximum of 128 alphanumeric, underscore, at sign, or hyphen characters. +- `type` - (Required) Type of data catalog: `LAMBDA` for a federated catalog, `GLUE` for AWS Glue Catalog, or `HIVE` for an external hive metastore. +- `parameters` - (Required) Key value pairs that specifies the Lambda function or functions to use for the data catalog. The mapping used depends on the catalog type. +- `description` - (Required) Description of the data catalog. +- `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `id` - Name of the data catalog. +- `arn` - ARN of the data catalog. +- `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import data catalogs using their `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import data catalogs using their `name`. For example: + +```console +% terraform import aws_athena_data_catalog.example example-data-catalog +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/athena_database.html.markdown b/website/docs/cdktf/python/r/athena_database.html.markdown new file mode 100644 index 00000000000..0578b17df35 --- /dev/null +++ b/website/docs/cdktf/python/r/athena_database.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "Athena" +layout: "aws" +page_title: "AWS: aws_athena_database" +description: |- + Provides an Athena database. +--- + + + +# Resource: aws_athena_database + +Provides an Athena database. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.athena_database import AthenaDatabase +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_athena_database_example = AthenaDatabase(self, "example_1", + bucket=example.id, + name="database_name" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_athena_database_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of S3 bucket to save the results of the query execution. +* `name` - (Required) Name of the database to create. +* `acl_configuration` - (Optional) That an Amazon S3 canned ACL should be set to control ownership of stored query results. See [ACL Configuration](#acl-configuration) below. +* `comment` - (Optional) Description of the database. +* `encryption_configuration` - (Optional) Encryption key block AWS Athena uses to decrypt the data in S3, such as an AWS Key Management Service (AWS KMS) key. See [Encryption Configuration](#encryption-configuration) below. +* `expected_bucket_owner` - (Optional) AWS account ID that you expect to be the owner of the Amazon S3 bucket. +* `force_destroy` - (Optional, Default: false) Boolean that indicates all tables should be deleted from the database so that the database can be destroyed without error. The tables are *not* recoverable. +* `properties` - (Optional) Key-value map of custom metadata properties for the database definition. + +### ACL Configuration + +* `s3_acl_option` - (Required) Amazon S3 canned ACL that Athena should specify when storing query results. Valid value is `BUCKET_OWNER_FULL_CONTROL`. + +~> **NOTE:** When Athena queries are executed, result files may be created in the specified bucket. Consider using `force_destroy` on the bucket too in order to avoid any problems when destroying the bucket. + +### Encryption Configuration + +* `encryption_option` - (Required) Type of key; one of `SSE_S3`, `SSE_KMS`, `CSE_KMS` +* `kms_key` - (Optional) KMS key ARN or ID; required for key types `SSE_KMS` and `CSE_KMS`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Database name + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Athena Databases using their name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Athena Databases using their name. For example: + +```console +% terraform import aws_athena_database.example example +``` + +Certain resource arguments, like `encryption_configuration` and `bucket`, do not have an API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.athena_database import AthenaDatabase +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AthenaDatabase(self, "example", + bucket=Token.as_string(aws_s3_bucket_example.id), + lifecycle=TerraformResourceLifecycle( + ignore_changes=[bucket] + ), + name="database_name" + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/athena_named_query.html.markdown b/website/docs/cdktf/python/r/athena_named_query.html.markdown new file mode 100644 index 00000000000..85f55c7a22f --- /dev/null +++ b/website/docs/cdktf/python/r/athena_named_query.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "Athena" +layout: "aws" +page_title: "AWS: aws_athena_named_query" +description: |- + Provides an Athena Named Query resource. +--- + + + +# Resource: aws_athena_named_query + +Provides an Athena Named Query resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.athena_database import AthenaDatabase +from imports.aws.athena_named_query import AthenaNamedQuery +from imports.aws.athena_workgroup import AthenaWorkgroup +from imports.aws.kms_key import KmsKey +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = KmsKey(self, "test", + deletion_window_in_days=7, + description="Athena KMS Key" + ) + hoge = S3Bucket(self, "hoge", + bucket="tf-test" + ) + aws_athena_database_hoge = AthenaDatabase(self, "hoge_2", + bucket=hoge.id, + name="users" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_athena_database_hoge.override_logical_id("hoge") + aws_athena_workgroup_test = AthenaWorkgroup(self, "test_3", + configuration=AthenaWorkgroupConfiguration( + result_configuration=AthenaWorkgroupConfigurationResultConfiguration( + encryption_configuration=AthenaWorkgroupConfigurationResultConfigurationEncryptionConfiguration( + encryption_option="SSE_KMS", + kms_key_arn=test.arn + ) + ) + ), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_athena_workgroup_test.override_logical_id("test") + AthenaNamedQuery(self, "foo", + database=Token.as_string(aws_athena_database_hoge.name), + name="bar", + query="SELECT * FROM ${" + aws_athena_database_hoge.name + "} limit 10;", + workgroup=Token.as_string(aws_athena_workgroup_test.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Plain language name for the query. Maximum length of 128. +* `workgroup` - (Optional) Workgroup to which the query belongs. Defaults to `primary` +* `database` - (Required) Database to which the query belongs. +* `query` - (Required) Text of the query itself. In other words, all query statements. Maximum length of 262144. +* `description` - (Optional) Brief explanation of the query. Maximum length of 1024. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the query. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Athena Named Query using the query ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Athena Named Query using the query ID. For example: + +```console +% terraform import aws_athena_named_query.example 0123456789 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/athena_workgroup.html.markdown b/website/docs/cdktf/python/r/athena_workgroup.html.markdown new file mode 100644 index 00000000000..511a73a3091 --- /dev/null +++ b/website/docs/cdktf/python/r/athena_workgroup.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "Athena" +layout: "aws" +page_title: "AWS: aws_athena_workgroup" +description: |- + Manages an Athena Workgroup. +--- + + + +# Resource: aws_athena_workgroup + +Provides an Athena Workgroup. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.athena_workgroup import AthenaWorkgroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AthenaWorkgroup(self, "example", + configuration=AthenaWorkgroupConfiguration( + enforce_workgroup_configuration=True, + publish_cloudwatch_metrics_enabled=True, + result_configuration=AthenaWorkgroupConfigurationResultConfiguration( + encryption_configuration=AthenaWorkgroupConfigurationResultConfigurationEncryptionConfiguration( + encryption_option="SSE_KMS", + kms_key_arn=Token.as_string(aws_kms_key_example.arn) + ), + output_location="s3://${" + aws_s3_bucket_example.bucket + "}/output/" + ) + ), + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the workgroup. +* `configuration` - (Optional) Configuration block with various settings for the workgroup. Documented below. +* `description` - (Optional) Description of the workgroup. +* `state` - (Optional) State of the workgroup. Valid values are `DISABLED` or `ENABLED`. Defaults to `ENABLED`. +* `tags` - (Optional) Key-value map of resource tags for the workgroup. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `force_destroy` - (Optional) Option to delete the workgroup and its contents even if the workgroup contains any named queries. + +### Configuration + +* `bytes_scanned_cutoff_per_query` - (Optional) Integer for the upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan. Must be at least `10485760`. +* `enforce_workgroup_configuration` - (Optional) Boolean whether the settings for the workgroup override client-side settings. For more information, see [Workgroup Settings Override Client-Side Settings](https://docs.aws.amazon.com/athena/latest/ug/workgroups-settings-override.html). Defaults to `true`. +* `engine_version` - (Optional) Configuration block for the Athena Engine Versioning. For more information, see [Athena Engine Versioning](https://docs.aws.amazon.com/athena/latest/ug/engine-versions.html). See [Engine Version](#engine-version) below. +* `execution_role` - (Optional) Role used in a notebook session for accessing the user's resources. +* `publish_cloudwatch_metrics_enabled` - (Optional) Boolean whether Amazon CloudWatch metrics are enabled for the workgroup. Defaults to `true`. +* `result_configuration` - (Optional) Configuration block with result settings. See [Result Configuration](#result-configuration) below. +* `requester_pays_enabled` - (Optional) If set to true , allows members assigned to a workgroup to reference Amazon S3 Requester Pays buckets in queries. If set to false , workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false . For more information about Requester Pays buckets, see [Requester Pays Buckets](https://docs.aws.amazon.com/AmazonS3/latest/dev/RequesterPaysBuckets.html) in the Amazon Simple Storage Service Developer Guide. + +#### Engine Version + +* `selected_engine_version` - (Optional) Requested engine version. Defaults to `AUTO`. + +#### Result Configuration + +* `encryption_configuration` - (Optional) Configuration block with encryption settings. See [Encryption Configuration](#encryption-configuration) below. +* `acl_configuration` - (Optional) That an Amazon S3 canned ACL should be set to control ownership of stored query results. See [ACL Configuration](#acl-configuration) below. +* `expected_bucket_owner` - (Optional) AWS account ID that you expect to be the owner of the Amazon S3 bucket. +* `output_location` - (Optional) Location in Amazon S3 where your query results are stored, such as `s3://path/to/query/bucket/`. For more information, see [Queries and Query Result Files](https://docs.aws.amazon.com/athena/latest/ug/querying.html). + +##### ACL Configuration + +* `s3_acl_option` - (Required) Amazon S3 canned ACL that Athena should specify when storing query results. Valid value is `BUCKET_OWNER_FULL_CONTROL`. + +##### Encryption Configuration + +* `encryption_option` - (Required) Whether Amazon S3 server-side encryption with Amazon S3-managed keys (`SSE_S3`), server-side encryption with KMS-managed keys (`SSE_KMS`), or client-side encryption with KMS-managed keys (`CSE_KMS`) is used. If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup. +* `kms_key_arn` - (Optional) For `SSE_KMS` and `CSE_KMS`, this is the KMS key ARN. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the workgroup +* `configuration` - Configuration block with various settings for the workgroup + * `engine_version` - Configuration block for the Athena Engine Versioning + * `effective_engine_version` - The engine version on which the query runs. If `selected_engine_version` is set to `AUTO`, the effective engine version is chosen by Athena. +* `id` - Workgroup name +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Athena Workgroups using their name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Athena Workgroups using their name. For example: + +```console +% terraform import aws_athena_workgroup.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/auditmanager_account_registration.html.markdown b/website/docs/cdktf/python/r/auditmanager_account_registration.html.markdown new file mode 100644 index 00000000000..f1ba02ab666 --- /dev/null +++ b/website/docs/cdktf/python/r/auditmanager_account_registration.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_account_registration" +description: |- + Terraform resource for managing AWS Audit Manager Account Registration. +--- + + + +# Resource: aws_auditmanager_account_registration + +Terraform resource for managing AWS Audit Manager Account Registration. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.auditmanager_account_registration import AuditmanagerAccountRegistration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AuditmanagerAccountRegistration(self, "example") +``` + +### Deregister On Destroy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.auditmanager_account_registration import AuditmanagerAccountRegistration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AuditmanagerAccountRegistration(self, "example", + deregister_on_destroy=True + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `delegated_admin_account` - (Optional) Identifier for the delegated administrator account. +* `deregister_on_destroy` - (Optional) Flag to deregister AuditManager in the account upon destruction. Defaults to `false` (ie. AuditManager will remain active in the account, even if this resource is removed). +* `kms_key` - (Optional) KMS key identifier. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier for the account registration. Since registration is applied per AWS region, this will be the active region name (ex. `us-east-1`). +* `status` - Status of the account registration request. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Account Registration resources using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Audit Manager Account Registration resources using the `id`. For example: + +```console +% terraform import aws_auditmanager_account_registration.example us-east-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/auditmanager_assessment.html.markdown b/website/docs/cdktf/python/r/auditmanager_assessment.html.markdown new file mode 100644 index 00000000000..9cd7941b617 --- /dev/null +++ b/website/docs/cdktf/python/r/auditmanager_assessment.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_assessment" +description: |- + Terraform resource for managing an AWS Audit Manager Assessment. +--- + + + +# Resource: aws_auditmanager_assessment + +Terraform resource for managing an AWS Audit Manager Assessment. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.auditmanager_assessment import AuditmanagerAssessment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AuditmanagerAssessment(self, "test", + assessment_reports_destination=[AuditmanagerAssessmentAssessmentReportsDestination( + destination="s3://${" + aws_s3_bucket_test.id + "}", + destination_type="S3" + ) + ], + framework_id=Token.as_string(aws_auditmanager_framework_test.id), + name="example", + roles=[AuditmanagerAssessmentRoles( + role_arn=Token.as_string(aws_iam_role_test.arn), + role_type="PROCESS_OWNER" + ) + ], + scope=[AuditmanagerAssessmentScope( + aws_accounts=[AuditmanagerAssessmentScopeAwsAccounts( + id=Token.as_string(current.account_id) + ) + ], + aws_services=[AuditmanagerAssessmentScopeAwsServices( + service_name="S3" + ) + ] + ) + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the assessment. +* `assessment_reports_destination` - (Required) Assessment report storage destination configuration. See [`assessment_reports_destination`](#assessment_reports_destination) below. +* `framework_id` - (Required) Unique identifier of the framework the assessment will be created from. +* `roles` - (Required) List of roles for the assessment. See [`roles`](#roles) below. +* `scope` - (Required) Amazon Web Services accounts and services that are in scope for the assessment. See [`scope`](#scope) below. + +The following arguments are optional: + +* `description` - (Optional) Description of the assessment. +* `tags` - (Optional) A map of tags to assign to the assessment. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### assessment_reports_destination + +* `destination` - (Required) Destination of the assessment report. This value be in the form `s3://{bucket_name}`. +* `destination_type` - (Required) Destination type. Currently, `S3` is the only valid value. + +### roles + +* `role_arn` - (Required) Amazon Resource Name (ARN) of the IAM role. +* `role_type` - (Required) Type of customer persona. For assessment creation, type must always be `PROCESS_OWNER`. + +### scope + +* `aws_accounts` - Amazon Web Services accounts that are in scope for the assessment. See [`aws_accounts`](#aws_accounts) below. +* `aws_services` - Amazon Web Services services that are included in the scope of the assessment. See [`aws_services`](#aws_services) below. + +### aws_accounts + +* `id` - (Required) Identifier for the Amazon Web Services account. + +### aws_services + +* `service_name` - (Required) Name of the Amazon Web Service. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the assessment. +* `id` - Unique identifier for the assessment. +* `roles_all` - Complete list of all roles with access to the assessment. This includes both roles explicitly configured via the `roles` block, and any roles which have access to all Audit Manager assessments by default. +* `status` - Status of the assessment. Valid values are `ACTIVE` and `INACTIVE`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Assessments using the assessment `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Audit Manager Assessments using the assessment `id`. For example: + +```console +% terraform import aws_auditmanager_assessment.example abc123-de45 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/auditmanager_assessment_delegation.html.markdown b/website/docs/cdktf/python/r/auditmanager_assessment_delegation.html.markdown new file mode 100644 index 00000000000..f19135e1334 --- /dev/null +++ b/website/docs/cdktf/python/r/auditmanager_assessment_delegation.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_assessment_delegation" +description: |- + Terraform resource for managing an AWS Audit Manager Assessment Delegation. +--- + + + +# Resource: aws_auditmanager_assessment_delegation + +Terraform resource for managing an AWS Audit Manager Assessment Delegation. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.auditmanager_assessment_delegation import AuditmanagerAssessmentDelegation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AuditmanagerAssessmentDelegation(self, "example", + assessment_id=Token.as_string(aws_auditmanager_assessment_example.id), + control_set_id="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + role_type="RESOURCE_OWNER" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `assessment_id` - (Required) Identifier for the assessment. +* `control_set_id` - (Required) Assessment control set name. This value is the control set name used during assessment creation (not the AWS-generated ID). The `_id` suffix on this attribute has been preserved to be consistent with the underlying AWS API. +* `role_arn` - (Required) Amazon Resource Name (ARN) of the IAM role. +* `role_type` - (Required) Type of customer persona. For assessment delegation, type must always be `RESOURCE_OWNER`. + +The following arguments are optional: + +* `comment` - (Optional) Comment describing the delegation request. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `delegation_id` - Unique identifier for the delegation. +* `id` - Unique identifier for the resource. This is a comma-separated string containing `assessment_id`, `role_arn`, and `control_set_id`. +* `status` - Status of the delegation. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Assessment Delegation using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Audit Manager Assessment Delegation using the `id`. For example: + +```console +% terraform import aws_auditmanager_assessment_delegation.example abcdef-123456,arn:aws:iam::012345678901:role/example,example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/auditmanager_assessment_report.html.markdown b/website/docs/cdktf/python/r/auditmanager_assessment_report.html.markdown new file mode 100644 index 00000000000..2dbd12dda97 --- /dev/null +++ b/website/docs/cdktf/python/r/auditmanager_assessment_report.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_assessment_report" +description: |- + Terraform resource for managing an AWS Audit Manager Assessment Report. +--- + + + +# Resource: aws_auditmanager_assessment_report + +Terraform resource for managing an AWS Audit Manager Assessment Report. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.auditmanager_assessment_report import AuditmanagerAssessmentReport +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AuditmanagerAssessmentReport(self, "test", + assessment_id=Token.as_string(aws_auditmanager_assessment_test.id), + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the assessment report. +* `assessment_id` - (Required) Unique identifier of the assessment to create the report from. + +The following arguments are optional: + +* `description` - (Optional) Description of the assessment report. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `author` - Name of the user who created the assessment report. +* `id` - Unique identifier for the assessment report. +* `status` - Current status of the specified assessment report. Valid values are `COMPLETE`, `IN_PROGRESS`, and `FAILED`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Assessment Reports using the assessment report `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Audit Manager Assessment Reports using the assessment report `id`. For example: + +```console +% terraform import aws_auditmanager_assessment_report.example abc123-de45 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/auditmanager_control.html.markdown b/website/docs/cdktf/python/r/auditmanager_control.html.markdown new file mode 100644 index 00000000000..a8fe472b092 --- /dev/null +++ b/website/docs/cdktf/python/r/auditmanager_control.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_control" +description: |- + Terraform resource for managing an AWS Audit Manager Control. +--- + + + +# Resource: aws_auditmanager_control + +Terraform resource for managing an AWS Audit Manager Control. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.auditmanager_control import AuditmanagerControl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AuditmanagerControl(self, "example", + control_mapping_sources=[AuditmanagerControlControlMappingSources( + source_name="example", + source_set_up_option="Procedural_Controls_Mapping", + source_type="MANUAL" + ) + ], + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the control. +* `control_mapping_sources` - (Required) Data mapping sources. See [`control_mapping_sources`](#control_mapping_sources) below. + +The following arguments are optional: + +* `action_plan_instructions` - (Optional) Recommended actions to carry out if the control isn't fulfilled. +* `action_plan_title` - (Optional) Title of the action plan for remediating the control. +* `description` - (Optional) Description of the control. +* `tags` - (Optional) A map of tags to assign to the control. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `testing_information` - (Optional) Steps to follow to determine if the control is satisfied. + +### control_mapping_sources + +The following arguments are required: + +* `source_name` - (Required) Name of the source. +* `source_set_up_option` - (Required) The setup option for the data source. This option reflects if the evidence collection is automated or manual. Valid values are `System_Controls_Mapping` (automated) and `Procedural_Controls_Mapping` (manual). +* `source_type` - (Required) Type of data source for evidence collection. If `source_set_up_option` is manual, the only valid value is `MANUAL`. If `source_set_up_option` is automated, valid values are `AWS_Cloudtrail`, `AWS_Config`, `AWS_Security_Hub`, or `AWS_API_Call`. + +The following arguments are optional: + +* `source_description` - (Optional) Description of the source. +* `source_frequency` - (Optional) Frequency of evidence collection. Valid values are `DAILY`, `WEEKLY`, or `MONTHLY`. +* `source_keyword` - (Optional) The keyword to search for in CloudTrail logs, Config rules, Security Hub checks, and Amazon Web Services API names. See [`source_keyword`](#source_keyword) below. +* `troubleshooting_text` - (Optional) Instructions for troubleshooting the control. + +### source_keyword + +The following arguments are required: + +* `keyword_input_type` - (Required) Input method for the keyword. Valid values are `SELECT_FROM_LIST`. +* `keyword_value` - (Required) The value of the keyword that's used when mapping a control data source. For example, this can be a CloudTrail event name, a rule name for Config, a Security Hub control, or the name of an Amazon Web Services API call. See the [Audit Manager supported control data sources documentation](https://docs.aws.amazon.com/audit-manager/latest/userguide/control-data-sources.html) for more information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the control. +* `control_mapping_sources.*.source_id` - Unique identifier for the source. +* `id` - Unique identifier for the control. +* `type` - Type of control, such as a custom control or a standard control. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an Audit Manager Control using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an Audit Manager Control using the `id`. For example: + +```console +% terraform import aws_auditmanager_control.example abc123-de45 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/auditmanager_framework.html.markdown b/website/docs/cdktf/python/r/auditmanager_framework.html.markdown new file mode 100644 index 00000000000..3cbdfae11f3 --- /dev/null +++ b/website/docs/cdktf/python/r/auditmanager_framework.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_framework" +description: |- + Terraform resource for managing an AWS Audit Manager Framework. +--- + + + +# Resource: aws_auditmanager_framework + +Terraform resource for managing an AWS Audit Manager Framework. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.auditmanager_framework import AuditmanagerFramework +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AuditmanagerFramework(self, "test", + control_sets=[AuditmanagerFrameworkControlSets( + controls=[AuditmanagerFrameworkControlSetsControls( + id=Token.as_string(aws_auditmanager_control_test.id) + ) + ], + name="example" + ) + ], + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the framework. +* `control_sets` - (Required) Control sets that are associated with the framework. See [`control_sets`](#control_sets) below. + +The following arguments are optional: + +* `compliance_type` - (Optional) Compliance type that the new custom framework supports, such as `CIS` or `HIPAA`. +* `description` - (Optional) Description of the framework. +* `tags` - (Optional) A map of tags to assign to the framework. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### control_sets + +* `name` - (Required) Name of the control set. +* `controls` - (Required) List of controls within the control set. See [`controls`](#controls) below. + +### controls + +* `id` - (Required) Unique identifier of the control. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the framework. +* `control_sets[*].id` - Unique identifier for the framework control set. +* `id` - Unique identifier for the framework. +* `framework_type` - Framework type, such as a custom framework or a standard framework. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Framework using the framework `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Audit Manager Framework using the framework `id`. For example: + +```console +% terraform import aws_auditmanager_framework.example abc123-de45 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/auditmanager_framework_share.html.markdown b/website/docs/cdktf/python/r/auditmanager_framework_share.html.markdown new file mode 100644 index 00000000000..a46ac5a43ad --- /dev/null +++ b/website/docs/cdktf/python/r/auditmanager_framework_share.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_framework_share" +description: |- + Terraform resource for managing an AWS Audit Manager Framework Share. +--- + + + +# Resource: aws_auditmanager_framework_share + +Terraform resource for managing an AWS Audit Manager Framework Share. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.auditmanager_framework_share import AuditmanagerFrameworkShare +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AuditmanagerFrameworkShare(self, "example", + destination_account="012345678901", + destination_region="us-east-1", + framework_id=Token.as_string(aws_auditmanager_framework_example.id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `destination_account` - (Required) Amazon Web Services account of the recipient. +* `destination_region` - (Required) Amazon Web Services region of the recipient. +* `framework_id` - (Required) Unique identifier for the shared custom framework. + +The following arguments are optional: + +* `comment` - (Optional) Comment from the sender about the share request. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier for the share request. +* `status` - Status of the share request. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Framework Share using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Audit Manager Framework Share using the `id`. For example: + +```console +% terraform import aws_auditmanager_framework_share.example abcdef-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/auditmanager_organization_admin_account_registration.html.markdown b/website/docs/cdktf/python/r/auditmanager_organization_admin_account_registration.html.markdown new file mode 100644 index 00000000000..1fcb2250043 --- /dev/null +++ b/website/docs/cdktf/python/r/auditmanager_organization_admin_account_registration.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_organization_admin_account_registration" +description: |- + Terraform resource for managing AWS Audit Manager Organization Admin Account Registration. +--- + + + +# Resource: aws_auditmanager_organization_admin_account_registration + +Terraform resource for managing AWS Audit Manager Organization Admin Account Registration. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.auditmanager_organization_admin_account_registration import AuditmanagerOrganizationAdminAccountRegistration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AuditmanagerOrganizationAdminAccountRegistration(self, "example", + admin_account_id="012345678901" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `admin_account_id` - (Required) Identifier for the organization administrator account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier for the organization administrator account. +* `organization_id` - Identifier for the organization. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Organization Admin Account Registration using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Audit Manager Organization Admin Account Registration using the `id`. For example: + +```console +% terraform import aws_auditmanager_organization_admin_account_registration.example 012345678901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/autoscaling_attachment.html.markdown b/website/docs/cdktf/python/r/autoscaling_attachment.html.markdown new file mode 100644 index 00000000000..6e8140d8971 --- /dev/null +++ b/website/docs/cdktf/python/r/autoscaling_attachment.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_attachment" +description: |- + Terraform resource for managing an AWS Auto Scaling Attachment. +--- + + + +# Resource: aws_autoscaling_attachment + +Attaches a load balancer to an Auto Scaling group. + +~> **NOTE on Auto Scaling Groups, Attachments and Traffic Source Attachments:** Terraform provides standalone Attachment (for attaching Classic Load Balancers and Application Load Balancer, Gateway Load Balancer, or Network Load Balancer target groups) and [Traffic Source Attachment](autoscaling_traffic_source_attachment.html) (for attaching Load Balancers and VPC Lattice target groups) resources and an [Auto Scaling Group](autoscaling_group.html) resource with `load_balancers`, `target_group_arns` and `traffic_source` attributes. Do not use the same traffic source in more than one of these resources. Doing so will cause a conflict of attachments. A [`lifecycle` configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) can be used to suppress differences if necessary. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_attachment import AutoscalingAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AutoscalingAttachment(self, "example", + autoscaling_group_name=Token.as_string(aws_autoscaling_group_example.id), + elb=Token.as_string(aws_elb_example.id) + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_attachment import AutoscalingAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AutoscalingAttachment(self, "example", + autoscaling_group_name=Token.as_string(aws_autoscaling_group_example.id), + lb_target_group_arn=Token.as_string(aws_lb_target_group_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `autoscaling_group_name` - (Required) Name of ASG to associate with the ELB. +* `elb` - (Optional) Name of the ELB. +* `lb_target_group_arn` - (Optional) ARN of a load balancer target group. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/autoscaling_group.html.markdown b/website/docs/cdktf/python/r/autoscaling_group.html.markdown new file mode 100644 index 00000000000..a318c9512dd --- /dev/null +++ b/website/docs/cdktf/python/r/autoscaling_group.html.markdown @@ -0,0 +1,911 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_group" +description: |- + Provides an Auto Scaling Group resource. +--- + + + +# Resource: aws_autoscaling_group + +Provides an Auto Scaling Group resource. + +-> **Note:** You must specify either `launch_configuration`, `launch_template`, or `mixed_instances_policy`. + +~> **NOTE on Auto Scaling Groups, Attachments and Traffic Source Attachments:** Terraform provides standalone [Attachment](autoscaling_attachment.html) (for attaching Classic Load Balancers and Application Load Balancer, Gateway Load Balancer, or Network Load Balancer target groups) and [Traffic Source Attachment](autoscaling_traffic_source_attachment.html) (for attaching Load Balancers and VPC Lattice target groups) resources and an Auto Scaling Group resource with `load_balancers`, `target_group_arns` and `traffic_source` attributes. Do not use the same traffic source in more than one of these resources. Doing so will cause a conflict of attachments. A [`lifecycle` configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) can be used to suppress differences if necessary. + +> **Hands-on:** Try the [Manage AWS Auto Scaling Groups](https://learn.hashicorp.com/tutorials/terraform/aws-asg?utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS) tutorial on HashiCorp Learn. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.placement_group import PlacementGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = PlacementGroup(self, "test", + name="test", + strategy="cluster" + ) + AutoscalingGroup(self, "bar", + desired_capacity=4, + force_delete=True, + health_check_grace_period=300, + health_check_type="ELB", + initial_lifecycle_hook=[AutoscalingGroupInitialLifecycleHook( + default_result="CONTINUE", + heartbeat_timeout=2000, + lifecycle_transition="autoscaling:EC2_INSTANCE_LAUNCHING", + name="foobar", + notification_metadata=Token.as_string( + Fn.jsonencode({ + "foo": "bar" + })), + notification_target_arn="arn:aws:sqs:us-east-1:444455556666:queue1*", + role_arn="arn:aws:iam::123456789012:role/S3Access" + ) + ], + launch_configuration=foobar.name, + max_size=5, + min_size=2, + name="foobar3-terraform-test", + placement_group=test.id, + tag=[AutoscalingGroupTag( + key="foo", + propagate_at_launch=True, + value="bar" + ), AutoscalingGroupTag( + key="lorem", + propagate_at_launch=False, + value="ipsum" + ) + ], + timeouts=[{ + "delete": "15m" + } + ], + vpc_zone_identifier=[example1.id, example2.id] + ) +``` + +### With Latest Version Of Launch Template + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.launch_template import LaunchTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foobar = LaunchTemplate(self, "foobar", + image_id="ami-1a2b3c", + instance_type="t2.micro", + name_prefix="foobar" + ) + AutoscalingGroup(self, "bar", + availability_zones=["us-east-1a"], + desired_capacity=1, + launch_template=AutoscalingGroupLaunchTemplate( + id=foobar.id, + version="$Latest" + ), + max_size=1, + min_size=1 + ) +``` + +### Mixed Instances Policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.launch_template import LaunchTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = LaunchTemplate(self, "example", + image_id=Token.as_string(data_aws_ami_example.id), + instance_type="c5.large", + name_prefix="example" + ) + aws_autoscaling_group_example = AutoscalingGroup(self, "example_1", + availability_zones=["us-east-1a"], + desired_capacity=1, + max_size=1, + min_size=1, + mixed_instances_policy=AutoscalingGroupMixedInstancesPolicy( + launch_template=AutoscalingGroupMixedInstancesPolicyLaunchTemplate( + launch_template_specification=AutoscalingGroupMixedInstancesPolicyLaunchTemplateLaunchTemplateSpecification( + launch_template_id=example.id + ), + override=[AutoscalingGroupMixedInstancesPolicyLaunchTemplateOverride( + instance_type="c4.large", + weighted_capacity="3" + ), AutoscalingGroupMixedInstancesPolicyLaunchTemplateOverride( + instance_type="c3.large", + weighted_capacity="2" + ) + ] + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_autoscaling_group_example.override_logical_id("example") +``` + +### Mixed Instances Policy with Spot Instances and Capacity Rebalance + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.launch_template import LaunchTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = LaunchTemplate(self, "example", + image_id=Token.as_string(data_aws_ami_example.id), + instance_type="c5.large", + name_prefix="example" + ) + aws_autoscaling_group_example = AutoscalingGroup(self, "example_1", + capacity_rebalance=True, + desired_capacity=12, + max_size=15, + min_size=12, + mixed_instances_policy=AutoscalingGroupMixedInstancesPolicy( + instances_distribution=AutoscalingGroupMixedInstancesPolicyInstancesDistribution( + on_demand_base_capacity=0, + on_demand_percentage_above_base_capacity=25, + spot_allocation_strategy="capacity-optimized" + ), + launch_template=AutoscalingGroupMixedInstancesPolicyLaunchTemplate( + launch_template_specification=AutoscalingGroupMixedInstancesPolicyLaunchTemplateLaunchTemplateSpecification( + launch_template_id=example.id + ), + override=[AutoscalingGroupMixedInstancesPolicyLaunchTemplateOverride( + instance_type="c4.large", + weighted_capacity="3" + ), AutoscalingGroupMixedInstancesPolicyLaunchTemplateOverride( + instance_type="c3.large", + weighted_capacity="2" + ) + ] + ) + ), + vpc_zone_identifier=[example1.id, example2.id] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_autoscaling_group_example.override_logical_id("example") +``` + +### Mixed Instances Policy with Instance level LaunchTemplateSpecification Overrides + +When using a diverse instance set, some instance types might require a launch template with configuration values unique to that instance type such as a different AMI (Graviton2), architecture specific user data script, different EBS configuration, or different networking configuration. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.launch_template import LaunchTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = LaunchTemplate(self, "example", + image_id=Token.as_string(data_aws_ami_example.id), + instance_type="c5.large", + name_prefix="example" + ) + example2 = LaunchTemplate(self, "example2", + image_id=Token.as_string(data_aws_ami_example2.id), + name_prefix="example2" + ) + aws_autoscaling_group_example = AutoscalingGroup(self, "example_2", + availability_zones=["us-east-1a"], + desired_capacity=1, + max_size=1, + min_size=1, + mixed_instances_policy=AutoscalingGroupMixedInstancesPolicy( + launch_template=AutoscalingGroupMixedInstancesPolicyLaunchTemplate( + launch_template_specification=AutoscalingGroupMixedInstancesPolicyLaunchTemplateLaunchTemplateSpecification( + launch_template_id=example.id + ), + override=[AutoscalingGroupMixedInstancesPolicyLaunchTemplateOverride( + instance_type="c4.large", + weighted_capacity="3" + ), AutoscalingGroupMixedInstancesPolicyLaunchTemplateOverride( + instance_type="c6g.large", + launch_template_specification=AutoscalingGroupMixedInstancesPolicyLaunchTemplateOverrideLaunchTemplateSpecification( + launch_template_id=example2.id + ), + weighted_capacity="2" + ) + ] + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_autoscaling_group_example.override_logical_id("example") +``` + +### Mixed Instances Policy with Attribute-based Instance Type Selection + +As an alternative to manually choosing instance types when creating a mixed instances group, you can specify a set of instance attributes that describe your compute requirements. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.launch_template import LaunchTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = LaunchTemplate(self, "example", + image_id=Token.as_string(data_aws_ami_example.id), + instance_type="c5.large", + name_prefix="example" + ) + aws_autoscaling_group_example = AutoscalingGroup(self, "example_1", + availability_zones=["us-east-1a"], + desired_capacity=1, + max_size=1, + min_size=1, + mixed_instances_policy=AutoscalingGroupMixedInstancesPolicy( + launch_template=AutoscalingGroupMixedInstancesPolicyLaunchTemplate( + launch_template_specification=AutoscalingGroupMixedInstancesPolicyLaunchTemplateLaunchTemplateSpecification( + launch_template_id=example.id + ), + override=[AutoscalingGroupMixedInstancesPolicyLaunchTemplateOverride( + instance_requirements=AutoscalingGroupMixedInstancesPolicyLaunchTemplateOverrideInstanceRequirements( + memory_mib=AutoscalingGroupMixedInstancesPolicyLaunchTemplateOverrideInstanceRequirementsMemoryMib( + min=1000 + ), + vcpu_count=AutoscalingGroupMixedInstancesPolicyLaunchTemplateOverrideInstanceRequirementsVcpuCount( + min=4 + ) + ) + ) + ] + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_autoscaling_group_example.override_logical_id("example") +``` + +### Dynamic tagging + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, Token, TerraformIterator, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + extra_tags = TerraformVariable(self, "extra_tags", + default=[{ + "key": "Foo", + "propagate_at_launch": True, + "value": "Bar" + }, { + "key": "Baz", + "propagate_at_launch": True, + "value": "Bam" + } + ] + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + test_dynamic_iterator0 = TerraformIterator.from_list( + Token.as_any(extra_tags.value)) + AutoscalingGroup(self, "test", + launch_configuration=foobar.name, + max_size=5, + min_size=2, + name="foobar3-terraform-test", + tag=test_dynamic_iterator0.dynamic({ + "key": property_access(test_dynamic_iterator0.value, ["key"]), + "propagate_at_launch": property_access(test_dynamic_iterator0.value, ["propagate_at_launch" + ]), + "value": property_access(test_dynamic_iterator0.value, ["value"]) + }), + vpc_zone_identifier=[example1.id, example2.id] + ) +``` + +### Automatically refresh all instances after the group is updated + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.data_aws_ami import DataAwsAmi +from imports.aws.launch_template import LaunchTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsAmi(self, "example", + filter=[DataAwsAmiFilter( + name="name", + values=["amzn-ami-hvm-*-x86_64-gp2"] + ) + ], + most_recent=True, + owners=["amazon"] + ) + aws_launch_template_example = LaunchTemplate(self, "example_1", + image_id=Token.as_string(example.id), + instance_type="t3.nano" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_launch_template_example.override_logical_id("example") + aws_autoscaling_group_example = AutoscalingGroup(self, "example_2", + availability_zones=["us-east-1a"], + desired_capacity=1, + instance_refresh=AutoscalingGroupInstanceRefresh( + preferences=AutoscalingGroupInstanceRefreshPreferences( + min_healthy_percentage=50 + ), + strategy="Rolling", + triggers=["tag"] + ), + launch_template=AutoscalingGroupLaunchTemplate( + id=Token.as_string(aws_launch_template_example.id), + version=Token.as_string(aws_launch_template_example.latest_version) + ), + max_size=2, + min_size=1, + tag=[AutoscalingGroupTag( + key="Key", + propagate_at_launch=True, + value="Value" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_autoscaling_group_example.override_logical_id("example") +``` + +### Auto Scaling group with Warm Pool + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.launch_template import LaunchTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AutoscalingGroup(self, "example", + availability_zones=["us-east-1a"], + desired_capacity=1, + max_size=5, + min_size=1, + warm_pool=AutoscalingGroupWarmPool( + instance_reuse_policy=AutoscalingGroupWarmPoolInstanceReusePolicy( + reuse_on_scale_in=True + ), + max_group_prepared_capacity=10, + min_size=1, + pool_state="Hibernated" + ) + ) + aws_launch_template_example = LaunchTemplate(self, "example_1", + image_id=Token.as_string(data_aws_ami_example.id), + instance_type="c5.large", + name_prefix="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_launch_template_example.override_logical_id("example") +``` + +### Auto Scaling group with Traffic Sources + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformIterator, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + test_dynamic_iterator0 = TerraformIterator.from_list( + Token.as_any(property_access(aws_vpclattice_target_group_test, ["*"]))) + AutoscalingGroup(self, "test", + force_delete=True, + max_size=1, + min_size=1, + vpc_zone_identifier=Token.as_list(aws_subnet_test.id), + traffic_source=test_dynamic_iterator0.dynamic({ + "identifier": property_access(test_dynamic_iterator0.value, ["arn"]), + "type": "vpc-lattice" + }) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +- `name` - (Optional) Name of the Auto Scaling Group. By default generated by Terraform. Conflicts with `name_prefix`. +- `name_prefix` - (Optional) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +- `max_size` - (Required) Maximum size of the Auto Scaling Group. +- `min_size` - (Required) Minimum size of the Auto Scaling Group. + (See also [Waiting for Capacity](#waiting-for-capacity) below.) +- `availability_zones` - (Optional) A list of Availability Zones where instances in the Auto Scaling group can be created. Used for launching into the default VPC subnet in each Availability Zone when not using the `vpc_zone_identifier` attribute, or for attaching a network interface when an existing network interface ID is specified in a launch template. Conflicts with `vpc_zone_identifier`. +- `capacity_rebalance` - (Optional) Whether capacity rebalance is enabled. Otherwise, capacity rebalance is disabled. +- `context` - (Optional) Reserved. +- `default_cooldown` - (Optional) Amount of time, in seconds, after a scaling activity completes before another scaling activity can start. +- `default_instance_warmup` - (Optional) Amount of time, in seconds, until a newly launched instance can contribute to the Amazon CloudWatch metrics. This delay lets an instance finish initializing before Amazon EC2 Auto Scaling aggregates instance metrics, resulting in more reliable usage data. Set this value equal to the amount of time that it takes for resource consumption to become stable after an instance reaches the InService state. (See [Set the default instance warmup for an Auto Scaling group](https://docs.aws.amazon.com/autoscaling/ec2/userguide/ec2-auto-scaling-default-instance-warmup.html)) +- `launch_configuration` - (Optional) Name of the launch configuration to use. +- `launch_template` - (Optional) Nested argument with Launch template specification to use to launch instances. See [Launch Template](#launch_template) below for more details. +- `mixed_instances_policy` (Optional) Configuration block containing settings to define launch targets for Auto Scaling groups. See [Mixed Instances Policy](#mixed_instances_policy) below for more details. +- `initial_lifecycle_hook` - (Optional) One or more + [Lifecycle Hooks](http://docs.aws.amazon.com/autoscaling/latest/userguide/lifecycle-hooks.html) + to attach to the Auto Scaling Group **before** instances are launched. The + syntax is exactly the same as the separate + [`aws_autoscaling_lifecycle_hook`](/docs/providers/aws/r/autoscaling_lifecycle_hook.html) + resource, without the `autoscaling_group_name` attribute. Please note that this will only work when creating + a new Auto Scaling Group. For all other use-cases, please use `aws_autoscaling_lifecycle_hook` resource. +- `health_check_grace_period` - (Optional, Default: 300) Time (in seconds) after instance comes into service before checking health. +- `health_check_type` - (Optional) "EC2" or "ELB". Controls how health checking is done. +- `desired_capacity` - (Optional) Number of Amazon EC2 instances that + should be running in the group. (See also [Waiting for + Capacity](#waiting-for-capacity) below.) +- `desired_capacity_type` - (Optional) The unit of measurement for the value specified for `desired_capacity`. Supported for attribute-based instance type selection only. Valid values: `"units"`, `"vcpu"`, `"memory-mib"`. +- `force_delete` - (Optional) Allows deleting the Auto Scaling Group without waiting + for all instances in the pool to terminate. You can force an Auto Scaling Group to delete + even if it's in the process of scaling a resource. Normally, Terraform + drains all the instances before deleting the group. This bypasses that + behavior and potentially leaves resources dangling. +- `load_balancers` (Optional) List of elastic load balancer names to add to the autoscaling + group names. Only valid for classic load balancers. For ALBs, use `target_group_arns` instead. To remove all load balancer attachments an empty list should be specified. +- `traffic_source` (Optional) Attaches one or more traffic sources to the specified Auto Scaling group. +- `vpc_zone_identifier` (Optional) List of subnet IDs to launch resources in. Subnets automatically determine which availability zones the group will reside. Conflicts with `availability_zones`. +- `target_group_arns` (Optional) Set of `aws_alb_target_group` ARNs, for use with Application or Network Load Balancing. To remove all target group attachments an empty list should be specified. +- `termination_policies` (Optional) List of policies to decide how the instances in the Auto Scaling Group should be terminated. The allowed values are `OldestInstance`, `NewestInstance`, `OldestLaunchConfiguration`, `ClosestToNextInstanceHour`, `OldestLaunchTemplate`, `AllocationStrategy`, `Default`. Additionally, the ARN of a Lambda function can be specified for custom termination policies. +- `suspended_processes` - (Optional) List of processes to suspend for the Auto Scaling Group. The allowed values are `Launch`, `Terminate`, `HealthCheck`, `ReplaceUnhealthy`, `AZRebalance`, `AlarmNotification`, `ScheduledActions`, `AddToLoadBalancer`, `InstanceRefresh`. + Note that if you suspend either the `Launch` or `Terminate` process types, it can prevent your Auto Scaling Group from functioning properly. +- `tag` (Optional) Configuration block(s) containing resource tags. See [Tag](#tag) below for more details. +- `placement_group` (Optional) Name of the placement group into which you'll launch your instances, if any. +- `metrics_granularity` - (Optional) Granularity to associate with the metrics to collect. The only valid value is `1Minute`. Default is `1Minute`. +- `enabled_metrics` - (Optional) List of metrics to collect. The allowed values are defined by the [underlying AWS API](https://docs.aws.amazon.com/autoscaling/ec2/APIReference/API_EnableMetricsCollection.html). +- `wait_for_capacity_timeout` (Default: "10m") Maximum + [duration](https://golang.org/pkg/time/#ParseDuration) that Terraform should + wait for ASG instances to be healthy before timing out. (See also [Waiting + for Capacity](#waiting-for-capacity) below.) Setting this to "0" causes + Terraform to skip all Capacity Waiting behavior. +- `min_elb_capacity` - (Optional) Setting this causes Terraform to wait for + this number of instances from this Auto Scaling Group to show up healthy in the + ELB only on creation. Updates will not wait on ELB instance number changes. + (See also [Waiting for Capacity](#waiting-for-capacity) below.) +- `wait_for_elb_capacity` - (Optional) Setting this will cause Terraform to wait + for exactly this number of healthy instances from this Auto Scaling Group in + all attached load balancers on both create and update operations. (Takes + precedence over `min_elb_capacity` behavior.) + (See also [Waiting for Capacity](#waiting-for-capacity) below.) +- `protect_from_scale_in` (Optional) Whether newly launched instances + are automatically protected from termination by Amazon EC2 Auto Scaling when + scaling in. For more information about preventing instances from terminating + on scale in, see [Using instance scale-in protection](https://docs.aws.amazon.com/autoscaling/ec2/userguide/ec2-auto-scaling-instance-protection.html) + in the Amazon EC2 Auto Scaling User Guide. +- `service_linked_role_arn` (Optional) ARN of the service-linked role that the ASG will use to call other AWS services +- `max_instance_lifetime` (Optional) Maximum amount of time, in seconds, that an instance can be in service, values must be either equal to 0 or between 86400 and 31536000 seconds. +- `instance_refresh` - (Optional) If this block is configured, start an + [Instance Refresh](https://docs.aws.amazon.com/autoscaling/ec2/userguide/asg-instance-refresh.html) + when this Auto Scaling Group is updated. Defined [below](#instance_refresh). +- `warm_pool` - (Optional) If this block is configured, add a [Warm Pool](https://docs.aws.amazon.com/autoscaling/ec2/userguide/ec2-auto-scaling-warm-pools.html) + to the specified Auto Scaling group. Defined [below](#warm_pool) + +### launch_template + +~> **NOTE:** Either `id` or `name` must be specified. + +The top-level `launch_template` block supports the following: + +- `id` - (Optional) ID of the launch template. Conflicts with `name`. +- `name` - (Optional) Name of the launch template. Conflicts with `id`. +- `version` - (Optional) Template version. Can be version number, `$Latest`, or `$Default`. (Default: `$Default`). + +### mixed_instances_policy + +- `instances_distribution` - (Optional) Nested argument containing settings on how to mix on-demand and Spot instances in the Auto Scaling group. Defined below. +- `launch_template` - (Required) Nested argument containing launch template settings along with the overrides to specify multiple instance types and weights. Defined below. + +#### mixed_instances_policy instances_distribution + +This configuration block supports the following: + +- `on_demand_allocation_strategy` - (Optional) Strategy to use when launching on-demand instances. Valid values: `prioritized`, `lowest-price`. Default: `prioritized`. +- `on_demand_base_capacity` - (Optional) Absolute minimum amount of desired capacity that must be fulfilled by on-demand instances. Default: `0`. +- `on_demand_percentage_above_base_capacity` - (Optional) Percentage split between on-demand and Spot instances above the base on-demand capacity. Default: `100`. +- `spot_allocation_strategy` - (Optional) How to allocate capacity across the Spot pools. Valid values: `lowest-price`, `capacity-optimized`, `capacity-optimized-prioritized`, and `price-capacity-optimized`. Default: `lowest-price`. +- `spot_instance_pools` - (Optional) Number of Spot pools per availability zone to allocate capacity. EC2 Auto Scaling selects the cheapest Spot pools and evenly allocates Spot capacity across the number of Spot pools that you specify. Only available with `spot_allocation_strategy` set to `lowest-price`. Otherwise it must be set to `0`, if it has been defined before. Default: `2`. +- `spot_max_price` - (Optional) Maximum price per unit hour that the user is willing to pay for the Spot instances. Default: an empty string which means the on-demand price. + +#### mixed_instances_policy launch_template + +This configuration block supports the following: + +- `launch_template_specification` - (Required) Nested argument defines the Launch Template. Defined below. +- `override` - (Optional) List of nested arguments provides the ability to specify multiple instance types. This will override the same parameter in the launch template. For on-demand instances, Auto Scaling considers the order of preference of instance types to launch based on the order specified in the overrides list. Defined below. + +##### mixed_instances_policy launch_template launch_template_specification + +~> **NOTE:** Either `launch_template_id` or `launch_template_name` must be specified. + +This configuration block supports the following: + +- `launch_template_id` - (Optional) ID of the launch template. Conflicts with `launch_template_name`. +- `launch_template_name` - (Optional) Name of the launch template. Conflicts with `launch_template_id`. +- `version` - (Optional) Template version. Can be version number, `$Latest`, or `$Default`. (Default: `$Default`). + +##### mixed_instances_policy launch_template override + +This configuration block supports the following: + +- `instance_type` - (Optional) Override the instance type in the Launch Template. +- `instance_requirements` - (Optional) Override the instance type in the Launch Template with instance types that satisfy the requirements. +- `launch_template_specification` - (Optional) Override the instance launch template specification in the Launch Template. +- `weighted_capacity` - (Optional) Number of capacity units, which gives the instance type a proportional weight to other instance types. + +###### mixed_instances_policy launch_template override instance_requirements + +This configuration block supports the following: + +~> **NOTE:** Both `memory_mib.min` and `vcpu_count.min` must be specified. + +- `accelerator_count` - (Optional) Block describing the minimum and maximum number of accelerators (GPUs, FPGAs, or AWS Inferentia chips). Default is no minimum or maximum. + - `min` - (Optional) Minimum. + - `max` - (Optional) Maximum. Set to `0` to exclude instance types with accelerators. +- `accelerator_manufacturers` - (Optional) List of accelerator manufacturer names. Default is any manufacturer. + + ``` + Valid names: + * amazon-web-services + * amd + * nvidia + * xilinx + ``` + +- `accelerator_names` - (Optional) List of accelerator names. Default is any acclerator. + + ``` + Valid names: + * a100 - NVIDIA A100 GPUs + * v100 - NVIDIA V100 GPUs + * k80 - NVIDIA K80 GPUs + * t4 - NVIDIA T4 GPUs + * m60 - NVIDIA M60 GPUs + * radeon-pro-v520 - AMD Radeon Pro V520 GPUs + * vu9p - Xilinx VU9P FPGAs + ``` + +- `accelerator_total_memory_mib` - (Optional) Block describing the minimum and maximum total memory of the accelerators. Default is no minimum or maximum. + + - `min` - (Optional) Minimum. + - `max` - (Optional) Maximum. + +- `accelerator_types` - (Optional) List of accelerator types. Default is any accelerator type. + + ``` + Valid types: + * fpga + * gpu + * inference + ``` + +- `allowed_instance_types` - (Optional) List of instance types to apply your specified attributes against. All other instance types are ignored, even if they match your specified attributes. You can use strings with one or more wild cards, represented by an asterisk (\*), to allow an instance type, size, or generation. The following are examples: `m5.8xlarge`, `c5*.*`, `m5a.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are allowing the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5a.*`, you are allowing all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is all instance types. + + ~> **NOTE:** If you specify `allowed_instance_types`, you can't specify `excluded_instance_types`. + +- `bare_metal` - (Optional) Indicate whether bare metal instace types should be `included`, `excluded`, or `required`. Default is `excluded`. +- `baseline_ebs_bandwidth_mbps` - (Optional) Block describing the minimum and maximum baseline EBS bandwidth, in Mbps. Default is no minimum or maximum. + - `min` - (Optional) Minimum. + - `max` - (Optional) Maximum. +- `burstable_performance` - (Optional) Indicate whether burstable performance instance types should be `included`, `excluded`, or `required`. Default is `excluded`. +- `cpu_manufacturers` (Optional) List of CPU manufacturer names. Default is any manufacturer. + + ~> **NOTE:** Don't confuse the CPU hardware manufacturer with the CPU hardware architecture. Instances will be launched with a compatible CPU architecture based on the Amazon Machine Image (AMI) that you specify in your launch template. + + ``` + Valid names: + * amazon-web-services + * amd + * intel + ``` + +- `excluded_instance_types` - (Optional) List of instance types to exclude. You can use strings with one or more wild cards, represented by an asterisk (\*), to exclude an instance type, size, or generation. The following are examples: `m5.8xlarge`, `c5*.*`, `m5a.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are excluding the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5a.*`, you are excluding all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is no excluded instance types. + + ~> **NOTE:** If you specify `excluded_instance_types`, you can't specify `allowed_instance_types`. + +- `instance_generations` - (Optional) List of instance generation names. Default is any generation. + + ``` + Valid names: + * current - Recommended for best performance. + * previous - For existing applications optimized for older instance types. + ``` + +- `local_storage` - (Optional) Indicate whether instance types with local storage volumes are `included`, `excluded`, or `required`. Default is `included`. +- `local_storage_types` - (Optional) List of local storage type names. Default any storage type. + + ``` + Value names: + * hdd - hard disk drive + * ssd - solid state drive + ``` + +- `memory_gib_per_vcpu` - (Optional) Block describing the minimum and maximum amount of memory (GiB) per vCPU. Default is no minimum or maximum. + - `min` - (Optional) Minimum. May be a decimal number, e.g. `0.5`. + - `max` - (Optional) Maximum. May be a decimal number, e.g. `0.5`. +- `memory_mib` - (Required) Block describing the minimum and maximum amount of memory (MiB). Default is no maximum. + - `min` - (Required) Minimum. + - `max` - (Optional) Maximum. +- `network_bandwidth_gbps` - (Optional) Block describing the minimum and maximum amount of network bandwidth, in gigabits per second (Gbps). Default is no minimum or maximum. + - `min` - (Optional) Minimum. + - `max` - (Optional) Maximum. +- `network_interface_count` - (Optional) Block describing the minimum and maximum number of network interfaces. Default is no minimum or maximum. + - `min` - (Optional) Minimum. + - `max` - (Optional) Maximum. +- `on_demand_max_price_percentage_over_lowest_price` - (Optional) Price protection threshold for On-Demand Instances. This is the maximum you’ll pay for an On-Demand Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 20. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. + +- `require_hibernate_support` - (Optional) Indicate whether instance types must support On-Demand Instance Hibernation, either `true` or `false`. Default is `false`. +- `spot_max_price_percentage_over_lowest_price` - (Optional) Price protection threshold for Spot Instances. This is the maximum you’ll pay for a Spot Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 100. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. + +- `total_local_storage_gb` - (Optional) Block describing the minimum and maximum total local storage (GB). Default is no minimum or maximum. + - `min` - (Optional) Minimum. May be a decimal number, e.g. `0.5`. + - `max` - (Optional) Maximum. May be a decimal number, e.g. `0.5`. +- `vcpu_count` - (Required) Block describing the minimum and maximum number of vCPUs. Default is no maximum. + - `min` - (Required) Minimum. + - `max` - (Optional) Maximum. + +### tag + +The `tag` attribute accepts exactly one tag declaration with the following fields: + +- `key` - (Required) Key +- `value` - (Required) Value +- `propagate_at_launch` - (Required) Enables propagation of the tag to + Amazon EC2 instances launched via this ASG + +To declare multiple tags, additional `tag` blocks can be specified. + +~> **NOTE:** Other AWS APIs may automatically add special tags to their associated Auto Scaling Group for management purposes, such as ECS Capacity Providers adding the `AmazonECSManaged` tag. These generally should be included in the configuration so Terraform does not attempt to remove them and so if the `min_size` was greater than zero on creation, that these tag(s) are applied to any initial EC2 Instances in the Auto Scaling Group. If these tag(s) were missing in the Auto Scaling Group configuration on creation, affected EC2 Instances missing the tags may require manual intervention of adding the tags to ensure they work properly with the other AWS service. + +### instance_refresh + +This configuration block supports the following: + +- `strategy` - (Required) Strategy to use for instance refresh. The only allowed value is `Rolling`. See [StartInstanceRefresh Action](https://docs.aws.amazon.com/autoscaling/ec2/APIReference/API_StartInstanceRefresh.html#API_StartInstanceRefresh_RequestParameters) for more information. +- `preferences` - (Optional) Override default parameters for Instance Refresh. + - `checkpoint_delay` - (Optional) Number of seconds to wait after a checkpoint. Defaults to `3600`. + - `checkpoint_percentages` - (Optional) List of percentages for each checkpoint. Values must be unique and in ascending order. To replace all instances, the final number must be `100`. + - `instance_warmup` - (Optional) Number of seconds until a newly launched instance is configured and ready to use. Default behavior is to use the Auto Scaling Group's health check grace period. + - `min_healthy_percentage` - (Optional) Amount of capacity in the Auto Scaling group that must remain healthy during an instance refresh to allow the operation to continue, as a percentage of the desired capacity of the Auto Scaling group. Defaults to `90`. + - `skip_matching` - (Optional) Replace instances that already have your desired configuration. Defaults to `false`. + - `auto_rollback` - (Optional) Automatically rollback if instance refresh fails. Defaults to `false`. This option may only be set to `true` when specifying a `launch_template` or `mixed_instances_policy`. +- `triggers` - (Optional) Set of additional property names that will trigger an Instance Refresh. A refresh will always be triggered by a change in any of `launch_configuration`, `launch_template`, or `mixed_instances_policy`. + +~> **NOTE:** A refresh is started when any of the following Auto Scaling Group properties change: `launch_configuration`, `launch_template`, `mixed_instances_policy`. Additional properties can be specified in the `triggers` property of `instance_refresh`. + +~> **NOTE:** A refresh will not start when `version = "$Latest"` is configured in the `launch_template` block. To trigger the instance refresh when a launch template is changed, configure `version` to use the `latest_version` attribute of the `aws_launch_template` resource. + +~> **NOTE:** Auto Scaling Groups support up to one active instance refresh at a time. When this resource is updated, any existing refresh is cancelled. + +~> **NOTE:** Depending on health check settings and group size, an instance refresh may take a long time or fail. This resource does not wait for the instance refresh to complete. + +### warm_pool + +This configuration block supports the following: + +- `instance_reuse_policy` - (Optional) Whether instances in the Auto Scaling group can be returned to the warm pool on scale in. The default is to terminate instances in the Auto Scaling group when the group scales in. +- `max_group_prepared_capacity` - (Optional) Total maximum number of instances that are allowed to be in the warm pool or in any state except Terminated for the Auto Scaling group. +- `min_size` - (Optional) Minimum number of instances to maintain in the warm pool. This helps you to ensure that there is always a certain number of warmed instances available to handle traffic spikes. Defaults to 0 if not specified. +- `pool_state` - (Optional) Sets the instance state to transition to after the lifecycle hooks finish. Valid values are: Stopped (default), Running or Hibernated. + +### traffic_source + +- `identifier` - Identifies the traffic source. For Application Load Balancers, Gateway Load Balancers, Network Load Balancers, and VPC Lattice, this will be the Amazon Resource Name (ARN) for a target group in this account and Region. For Classic Load Balancers, this will be the name of the Classic Load Balancer in this account and Region. +- `type` - Provides additional context for the value of Identifier. + The following lists the valid values: + `elb` if `identifier` is the name of a Classic Load Balancer. + `elbv2` if `identifier` is the ARN of an Application Load Balancer, Gateway Load Balancer, or Network Load Balancer target group. + `vpc-lattice` if `identifier` is the ARN of a VPC Lattice target group. + +##### instance_reuse_policy + +This configuration block supports the following: + +- `reuse_on_scale_in` - (Optional) Whether instances in the Auto Scaling group can be returned to the warm pool on scale in. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `id` - Auto Scaling Group id. +- `arn` - ARN for this Auto Scaling Group +- `availability_zones` - Availability zones of the Auto Scaling Group. +- `min_size` - Minimum size of the Auto Scaling Group +- `max_size` - Maximum size of the Auto Scaling Group +- `default_cooldown` - Time between a scaling activity and the succeeding scaling activity. +- `default_instance_warmup` - The duration of the default instance warmup, in seconds. +- `name` - Name of the Auto Scaling Group +- `health_check_grace_period` - Time after instance comes into service before checking health. +- `health_check_type` - "EC2" or "ELB". Controls how health checking is done. +- `desired_capacity` -The number of Amazon EC2 instances that should be running in the group. +- `launch_configuration` - The launch configuration of the Auto Scaling Group +- `predicted_capacity` - Predicted capacity of the group. +- `vpc_zone_identifier` (Optional) - The VPC zone identifier +- `warm_pool_size` - Current size of the warm pool. + +~> **NOTE:** When using `ELB` as the `health_check_type`, `health_check_grace_period` is required. + +~> **NOTE:** Terraform has two types of ways you can add lifecycle hooks - via +the `initial_lifecycle_hook` attribute from this resource, or via the separate +[`aws_autoscaling_lifecycle_hook`](/docs/providers/aws/r/autoscaling_lifecycle_hook.html) +resource. `initial_lifecycle_hook` exists here because any lifecycle hooks +added with `aws_autoscaling_lifecycle_hook` will not be added until the +Auto Scaling Group has been created, and depending on your +[capacity](#waiting-for-capacity) settings, after the initial instances have +been launched, creating unintended behavior. If you need hooks to run on all +instances, add them with `initial_lifecycle_hook` here, but take +care to not duplicate these hooks in `aws_autoscaling_lifecycle_hook`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `10m`) + +## Waiting for Capacity + +A newly-created ASG is initially empty and begins to scale to `min_size` (or +`desired_capacity`, if specified) by launching instances using the provided +Launch Configuration. These instances take time to launch and boot. + +On ASG Update, changes to these values also take time to result in the target +number of instances providing service. + +Terraform provides two mechanisms to help consistently manage ASG scale up +time across dependent resources. + +#### Waiting for ASG Capacity + +The first is default behavior. Terraform waits after ASG creation for +`min_size` (or `desired_capacity`, if specified) healthy instances to show up +in the ASG before continuing. + +If `min_size` or `desired_capacity` are changed in a subsequent update, +Terraform will also wait for the correct number of healthy instances before +continuing. + +Terraform considers an instance "healthy" when the ASG reports `HealthStatus: +"Healthy"` and `LifecycleState: "InService"`. See the [AWS AutoScaling +Docs](https://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/AutoScalingGroupLifecycle.html) +for more information on an ASG's lifecycle. + +Terraform will wait for healthy instances for up to +`wait_for_capacity_timeout`. If ASG creation is taking more than a few minutes, +it's worth investigating for scaling activity errors, which can be caused by +problems with the selected Launch Configuration. + +Setting `wait_for_capacity_timeout` to `"0"` disables ASG Capacity waiting. + +#### Waiting for ELB Capacity + +The second mechanism is optional, and affects ASGs with attached ELBs specified +via the `load_balancers` attribute or with ALBs specified with `target_group_arns`. + +The `min_elb_capacity` parameter causes Terraform to wait for at least the +requested number of instances to show up `"InService"` in all attached ELBs +during ASG creation. It has no effect on ASG updates. + +If `wait_for_elb_capacity` is set, Terraform will wait for exactly that number +of Instances to be `"InService"` in all attached ELBs on both creation and +updates. + +These parameters can be used to ensure that service is being provided before +Terraform moves on. If new instances don't pass the ELB's health checks for any +reason, the Terraform apply will time out, and the ASG will be marked as +tainted (i.e., marked to be destroyed in a follow up run). + +As with ASG Capacity, Terraform will wait for up to `wait_for_capacity_timeout` +for the proper number of instances to be healthy. + +#### Troubleshooting Capacity Waiting Timeouts + +If ASG creation takes more than a few minutes, this could indicate one of a +number of configuration problems. See the [AWS Docs on Load Balancer +Troubleshooting](https://docs.aws.amazon.com/ElasticLoadBalancing/latest/DeveloperGuide/elb-troubleshooting.html) +for more information. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Auto Scaling Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Auto Scaling Groups using the `name`. For example: + +```console +% terraform import aws_autoscaling_group.web web-asg +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/autoscaling_group_tag.html.markdown b/website/docs/cdktf/python/r/autoscaling_group_tag.html.markdown new file mode 100644 index 00000000000..aee3b7a8964 --- /dev/null +++ b/website/docs/cdktf/python/r/autoscaling_group_tag.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_group_tag" +description: |- + Manages an individual Autoscaling Group tag +--- + + + +# Resource: aws_autoscaling_group_tag + +Manages an individual Autoscaling Group (ASG) tag. This resource should only be used in cases where ASGs are created outside Terraform (e.g., ASGs implicitly created by EKS Node Groups). + +~> **NOTE:** This tagging resource should not be combined with the Terraform resource for managing the parent resource. For example, using `aws_autoscaling_group` and `aws_autoscaling_group_tag` to manage tags of the same ASG will cause a perpetual difference where the `aws_autoscaling_group` resource will try to remove the tag being added by the `aws_autoscaling_group_tag` resource. + +~> **NOTE:** This tagging resource does not use the [provider `ignore_tags` configuration](/docs/providers/aws/index.html#ignore_tags). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformIterator, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group_tag import AutoscalingGroupTagA +from imports.aws.eks_node_group import EksNodeGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, nodeRoleArn, scalingConfig, subnetIds): + super().__init__(scope, name) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_for_each_iterator = TerraformIterator.from_list( + Token.as_any( + Fn.toset("${[ for asg in ${" + + Fn.flatten("${[ for resources in ${" + aws_eks_node_group_example.resources + "} : resources.autoscaling_groups]}") + "} : asg.name]}"))) + AutoscalingGroupTagA(self, "example", + autoscaling_group_name=Token.as_string(example_for_each_iterator.value), + tag=AutoscalingGroupTagTag( + key="k8s.io/cluster-autoscaler/node-template/label/eks.amazonaws.com/capacityType", + propagate_at_launch=False, + value="SPOT" + ), + for_each=example_for_each_iterator + ) + aws_eks_node_group_example = EksNodeGroup(self, "example_1", + cluster_name="example", + node_group_name="example", + node_role_arn=node_role_arn, + scaling_config=scaling_config, + subnet_ids=subnet_ids + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_eks_node_group_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `autoscaling_group_name` - (Required) Name of the Autoscaling Group to apply the tag to. +* `tag` - (Required) Tag to create. The `tag` block is documented below. + +The `tag` block supports the following arguments: + +* `key` - (Required) Tag name. +* `value` - (Required) Tag value. +* `propagate_at_launch` - (Required) Whether to propagate the tags to instances launched by the ASG. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ASG name and key, separated by a comma (`,`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_autoscaling_group_tag` using the ASG name and key, separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_autoscaling_group_tag` using the ASG name and key, separated by a comma (`,`). For example: + +```console +% terraform import aws_autoscaling_group_tag.example asg-example,k8s.io/cluster-autoscaler/node-template/label/eks.amazonaws.com/capacityType +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/autoscaling_lifecycle_hook.html.markdown b/website/docs/cdktf/python/r/autoscaling_lifecycle_hook.html.markdown new file mode 100644 index 00000000000..64ac27c4ebc --- /dev/null +++ b/website/docs/cdktf/python/r/autoscaling_lifecycle_hook.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_lifecycle_hook" +description: |- + Provides an AutoScaling Lifecycle Hook resource. +--- + + + +# Resource: aws_autoscaling_lifecycle_hook + +Provides an AutoScaling Lifecycle Hook resource. + +~> **NOTE:** Terraform has two types of ways you can add lifecycle hooks - via +the `initial_lifecycle_hook` attribute from the +[`aws_autoscaling_group`](/docs/providers/aws/r/autoscaling_group.html) +resource, or via this one. Hooks added via this resource will not be added +until the autoscaling group has been created, and depending on your +[capacity](/docs/providers/aws/r/autoscaling_group.html#waiting-for-capacity) +settings, after the initial instances have been launched, creating unintended +behavior. If you need hooks to run on all instances, add them with +`initial_lifecycle_hook` in +[`aws_autoscaling_group`](/docs/providers/aws/r/autoscaling_group.html), +but take care to not duplicate those hooks with this resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.autoscaling_lifecycle_hook import AutoscalingLifecycleHook +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, maxSize, minSize): + super().__init__(scope, name) + foobar = AutoscalingGroup(self, "foobar", + availability_zones=["us-west-2a"], + health_check_type="EC2", + name="terraform-test-foobar5", + tag=[AutoscalingGroupTag( + key="Foo", + propagate_at_launch=True, + value="foo-bar" + ) + ], + termination_policies=["OldestInstance"], + max_size=max_size, + min_size=min_size + ) + aws_autoscaling_lifecycle_hook_foobar = AutoscalingLifecycleHook(self, "foobar_1", + autoscaling_group_name=foobar.name, + default_result="CONTINUE", + heartbeat_timeout=2000, + lifecycle_transition="autoscaling:EC2_INSTANCE_LAUNCHING", + name="foobar", + notification_metadata=Token.as_string( + Fn.jsonencode({ + "foo": "bar" + })), + notification_target_arn="arn:aws:sqs:us-east-1:444455556666:queue1*", + role_arn="arn:aws:iam::123456789012:role/S3Access" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_autoscaling_lifecycle_hook_foobar.override_logical_id("foobar") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the lifecycle hook. +* `autoscaling_group_name` - (Required) Name of the Auto Scaling group to which you want to assign the lifecycle hook +* `default_result` - (Optional) Defines the action the Auto Scaling group should take when the lifecycle hook timeout elapses or if an unexpected failure occurs. The value for this parameter can be either CONTINUE or ABANDON. The default value for this parameter is ABANDON. +* `heartbeat_timeout` - (Optional) Defines the amount of time, in seconds, that can elapse before the lifecycle hook times out. When the lifecycle hook times out, Auto Scaling performs the action defined in the DefaultResult parameter +* `lifecycle_transition` - (Required) Instance state to which you want to attach the lifecycle hook. For a list of lifecycle hook types, see [describe-lifecycle-hook-types](https://docs.aws.amazon.com/cli/latest/reference/autoscaling/describe-lifecycle-hook-types.html#examples) +* `notification_metadata` - (Optional) Contains additional information that you want to include any time Auto Scaling sends a message to the notification target. +* `notification_target_arn` - (Optional) ARN of the notification target that Auto Scaling will use to notify you when an instance is in the transition state for the lifecycle hook. This ARN target can be either an SQS queue or an SNS topic. +* `role_arn` - (Optional) ARN of the IAM role that allows the Auto Scaling group to publish to the specified notification target. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AutoScaling Lifecycle Hooks using the role autoscaling_group_name and name separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AutoScaling Lifecycle Hooks using the role autoscaling_group_name and name separated by `/`. For example: + +```console +% terraform import aws_autoscaling_lifecycle_hook.test-lifecycle-hook asg-name/lifecycle-hook-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/autoscaling_notification.html.markdown b/website/docs/cdktf/python/r/autoscaling_notification.html.markdown new file mode 100644 index 00000000000..e02d29c2cca --- /dev/null +++ b/website/docs/cdktf/python/r/autoscaling_notification.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_notification" +description: |- + Provides an AutoScaling Group with Notification support +--- + + + +# Resource: aws_autoscaling_notification + +Provides an AutoScaling Group with Notification support, via SNS Topics. Each of +the `notifications` map to a [Notification Configuration][2] inside Amazon Web +Services, and are applied to each AutoScaling Group you supply. + +## Example Usage + +Basic usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.autoscaling_notification import AutoscalingNotification +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, maxSize, minSize, maxSize1, minSize1): + super().__init__(scope, name) + bar = AutoscalingGroup(self, "bar", + name="foobar1-terraform-test", + max_size=max_size, + min_size=min_size + ) + foo = AutoscalingGroup(self, "foo", + name="barfoo-terraform-test", + max_size=max_size1, + min_size=min_size1 + ) + example = SnsTopic(self, "example", + name="example-topic" + ) + AutoscalingNotification(self, "example_notifications", + group_names=[bar.name, foo.name], + notifications=["autoscaling:EC2_INSTANCE_LAUNCH", "autoscaling:EC2_INSTANCE_TERMINATE", "autoscaling:EC2_INSTANCE_LAUNCH_ERROR", "autoscaling:EC2_INSTANCE_TERMINATE_ERROR" + ], + topic_arn=example.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `group_names` - (Required) List of AutoScaling Group Names +* `notifications` - (Required) List of Notification Types that trigger +notifications. Acceptable values are documented [in the AWS documentation here][1] +* `topic_arn` - (Required) Topic ARN for notifications to be sent through + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `group_names` +* `notifications` +* `topic_arn` + +[1]: https://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_NotificationConfiguration.html +[2]: https://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_DescribeNotificationConfigurations.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/autoscaling_policy.html.markdown b/website/docs/cdktf/python/r/autoscaling_policy.html.markdown new file mode 100644 index 00000000000..c1f31116576 --- /dev/null +++ b/website/docs/cdktf/python/r/autoscaling_policy.html.markdown @@ -0,0 +1,508 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_policy" +description: |- + Provides an AutoScaling Scaling Group resource. +--- + + + +# Resource: aws_autoscaling_policy + +Provides an AutoScaling Scaling Policy resource. + +~> **NOTE:** You may want to omit `desired_capacity` attribute from attached `aws_autoscaling_group` +when using autoscaling policies. It's good practice to pick either +[manual](https://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-manual-scaling.html) +or [dynamic](https://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-scale-based-on-demand.html) +(policy-based) scaling. + +> **Hands-on:** Try the [Manage AWS Auto Scaling Groups](https://learn.hashicorp.com/tutorials/terraform/aws-asg?utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS) tutorial on HashiCorp Learn. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.autoscaling_policy import AutoscalingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bar = AutoscalingGroup(self, "bar", + availability_zones=["us-east-1a"], + force_delete=True, + health_check_grace_period=300, + health_check_type="ELB", + launch_configuration=foo.name, + max_size=5, + min_size=2, + name="foobar3-terraform-test" + ) + AutoscalingPolicy(self, "bat", + adjustment_type="ChangeInCapacity", + autoscaling_group_name=bar.name, + cooldown=300, + name="foobar3-terraform-test", + scaling_adjustment=4 + ) +``` + +### Create target tarcking scaling policy using metric math + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_policy import AutoscalingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AutoscalingPolicy(self, "example", + autoscaling_group_name="my-test-asg", + name="foo", + policy_type="TargetTrackingScaling", + target_tracking_configuration=AutoscalingPolicyTargetTrackingConfiguration( + customized_metric_specification=AutoscalingPolicyTargetTrackingConfigurationCustomizedMetricSpecification( + metrics=[AutoscalingPolicyTargetTrackingConfigurationCustomizedMetricSpecificationMetrics( + id="m1", + label="Get the queue size (the number of messages waiting to be processed)", + metric_stat=AutoscalingPolicyTargetTrackingConfigurationCustomizedMetricSpecificationMetricsMetricStat( + metric=AutoscalingPolicyTargetTrackingConfigurationCustomizedMetricSpecificationMetricsMetricStatMetric( + dimensions=[AutoscalingPolicyTargetTrackingConfigurationCustomizedMetricSpecificationMetricsMetricStatMetricDimensions( + name="QueueName", + value="my-queue" + ) + ], + metric_name="ApproximateNumberOfMessagesVisible", + namespace="AWS/SQS" + ), + stat="Sum" + ), + return_data=False + ), AutoscalingPolicyTargetTrackingConfigurationCustomizedMetricSpecificationMetrics( + id="m2", + label="Get the group size (the number of InService instances)", + metric_stat=AutoscalingPolicyTargetTrackingConfigurationCustomizedMetricSpecificationMetricsMetricStat( + metric=AutoscalingPolicyTargetTrackingConfigurationCustomizedMetricSpecificationMetricsMetricStatMetric( + dimensions=[AutoscalingPolicyTargetTrackingConfigurationCustomizedMetricSpecificationMetricsMetricStatMetricDimensions( + name="AutoScalingGroupName", + value="my-asg" + ) + ], + metric_name="GroupInServiceInstances", + namespace="AWS/AutoScaling" + ), + stat="Average" + ), + return_data=False + ), AutoscalingPolicyTargetTrackingConfigurationCustomizedMetricSpecificationMetrics( + expression="m1 / m2", + id="e1", + label="Calculate the backlog per instance", + return_data=True + ) + ] + ), + target_value=100 + ) + ) +``` + +### Create predictive scaling policy using customized metrics + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_policy import AutoscalingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AutoscalingPolicy(self, "example", + autoscaling_group_name="my-test-asg", + name="foo", + policy_type="PredictiveScaling", + predictive_scaling_configuration=AutoscalingPolicyPredictiveScalingConfiguration( + metric_specification=AutoscalingPolicyPredictiveScalingConfigurationMetricSpecification( + customized_capacity_metric_specification=AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedCapacityMetricSpecification( + metric_data_queries=[AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedCapacityMetricSpecificationMetricDataQueries( + expression="SUM(SEARCH('{AWS/AutoScaling,AutoScalingGroupName} MetricName=\\\"GroupInServiceIntances\\\" my-test-asg', 'Average', 300))", + id="capacity_sum" + ) + ] + ), + customized_load_metric_specification=AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedLoadMetricSpecification( + metric_data_queries=[AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedLoadMetricSpecificationMetricDataQueries( + expression="SUM(SEARCH('{AWS/EC2,AutoScalingGroupName} MetricName=\\\"CPUUtilization\\\" my-test-asg', 'Sum', 3600))", + id="load_sum" + ) + ] + ), + customized_scaling_metric_specification=AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedScalingMetricSpecification( + metric_data_queries=[AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedScalingMetricSpecificationMetricDataQueries( + expression="SUM(SEARCH('{AWS/AutoScaling,AutoScalingGroupName} MetricName=\\\"GroupInServiceIntances\\\" my-test-asg', 'Average', 300))", + id="capacity_sum", + return_data=False + ), AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedScalingMetricSpecificationMetricDataQueries( + expression="SUM(SEARCH('{AWS/EC2,AutoScalingGroupName} MetricName=\\\"CPUUtilization\\\" my-test-asg', 'Sum', 300))", + id="load_sum", + return_data=False + ), AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedScalingMetricSpecificationMetricDataQueries( + expression="load_sum / (capacity_sum * PERIOD(capacity_sum) / 60)", + id="weighted_average" + ) + ] + ), + target_value=10 + ) + ) + ) +``` + +### Create predictive scaling policy using customized scaling and predefined load metric + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_policy import AutoscalingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AutoscalingPolicy(self, "example", + autoscaling_group_name="my-test-asg", + name="foo", + policy_type="PredictiveScaling", + predictive_scaling_configuration=AutoscalingPolicyPredictiveScalingConfiguration( + metric_specification=AutoscalingPolicyPredictiveScalingConfigurationMetricSpecification( + customized_scaling_metric_specification=AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedScalingMetricSpecification( + metric_data_queries=[AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedScalingMetricSpecificationMetricDataQueries( + id="scaling", + metric_stat=AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedScalingMetricSpecificationMetricDataQueriesMetricStat( + metric=AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedScalingMetricSpecificationMetricDataQueriesMetricStatMetric( + dimensions=[AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationCustomizedScalingMetricSpecificationMetricDataQueriesMetricStatMetricDimensions( + name="AutoScalingGroupName", + value="my-test-asg" + ) + ], + metric_name="CPUUtilization", + namespace="AWS/EC2" + ), + stat="Average" + ) + ) + ] + ), + predefined_load_metric_specification=AutoscalingPolicyPredictiveScalingConfigurationMetricSpecificationPredefinedLoadMetricSpecification( + predefined_metric_type="ASGTotalCPUUtilization", + resource_label="testLabel" + ), + target_value=10 + ) + ) + ) +``` + +## Argument Reference + +* `name` - (Required) Name of the policy. +* `autoscaling_group_name` - (Required) Name of the autoscaling group. +* `adjustment_type` - (Optional) Whether the adjustment is an absolute number or a percentage of the current capacity. Valid values are `ChangeInCapacity`, `ExactCapacity`, and `PercentChangeInCapacity`. +* `policy_type` - (Optional) Policy type, either "SimpleScaling", "StepScaling", "TargetTrackingScaling", or "PredictiveScaling". If this value isn't provided, AWS will default to "SimpleScaling." +* `predictive_scaling_configuration` - (Optional) Predictive scaling policy configuration to use with Amazon EC2 Auto Scaling. +* `estimated_instance_warmup` - (Optional) Estimated time, in seconds, until a newly launched instance will contribute CloudWatch metrics. Without a value, AWS will default to the group's specified cooldown period. +* `enabled` - (Optional) Whether the scaling policy is enabled or disabled. Default: `true`. + +The following argument is only available to "SimpleScaling" and "StepScaling" type policies: + +* `min_adjustment_magnitude` - (Optional) Minimum value to scale by when `adjustment_type` is set to `PercentChangeInCapacity`. + +The following arguments are only available to "SimpleScaling" type policies: + +* `cooldown` - (Optional) Amount of time, in seconds, after a scaling activity completes and before the next scaling activity can start. +* `scaling_adjustment` - (Optional) Number of instances by which to scale. `adjustment_type` determines the interpretation of this number (e.g., as an absolute number or as a percentage of the existing Auto Scaling group size). A positive increment adds to the current capacity and a negative value removes from the current capacity. + +The following arguments are only available to "StepScaling" type policies: + +* `metric_aggregation_type` - (Optional) Aggregation type for the policy's metrics. Valid values are "Minimum", "Maximum", and "Average". Without a value, AWS will treat the aggregation type as "Average". +* `step_adjustment` - (Optional) Set of adjustments that manage +group scaling. These have the following structure: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Op, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_policy import AutoscalingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, autoscalingGroupName, name): + super().__init__(scope, name) + AutoscalingPolicy(self, "example", + step_adjustment=[AutoscalingPolicyStepAdjustment( + metric_interval_lower_bound=Token.as_string(1), + metric_interval_upper_bound=Token.as_string(2), + scaling_adjustment=Token.as_number(Op.negate(1)) + ), AutoscalingPolicyStepAdjustment( + metric_interval_lower_bound=Token.as_string(2), + metric_interval_upper_bound=Token.as_string(3), + scaling_adjustment=1 + ) + ], + autoscaling_group_name=autoscaling_group_name, + name=name + ) +``` + +The following fields are available in step adjustments: + +* `scaling_adjustment` - (Required) Number of members by which to +scale, when the adjustment bounds are breached. A positive value scales +up. A negative value scales down. +* `metric_interval_lower_bound` - (Optional) Lower bound for the +difference between the alarm threshold and the CloudWatch metric. +Without a value, AWS will treat this bound as negative infinity. +* `metric_interval_upper_bound` - (Optional) Upper bound for the +difference between the alarm threshold and the CloudWatch metric. +Without a value, AWS will treat this bound as positive infinity. The upper bound +must be greater than the lower bound. + +Notice the bounds are **relative** to the alarm threshold, meaning that the starting point is not 0%, but the alarm threshold. Check the official [docs](https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-scaling-simple-step.html#as-scaling-steps) for a detailed example. + +The following arguments are only available to "TargetTrackingScaling" type policies: + +* `target_tracking_configuration` - (Optional) Target tracking policy. These have the following structure: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_policy import AutoscalingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, autoscalingGroupName, name): + super().__init__(scope, name) + AutoscalingPolicy(self, "example", + target_tracking_configuration=AutoscalingPolicyTargetTrackingConfiguration( + predefined_metric_specification=AutoscalingPolicyTargetTrackingConfigurationPredefinedMetricSpecification( + predefined_metric_type="ASGAverageCPUUtilization" + ), + target_value=40 + ), + autoscaling_group_name=autoscaling_group_name, + name=name + ) +``` + +The following fields are available in target tracking configuration: + +* `predefined_metric_specification` - (Optional) Predefined metric. Conflicts with `customized_metric_specification`. +* `customized_metric_specification` - (Optional) Customized metric. Conflicts with `predefined_metric_specification`. +* `target_value` - (Required) Target value for the metric. +* `disable_scale_in` - (Optional, Default: false) Whether scale in by the target tracking policy is disabled. + +### predefined_metric_specification + +This argument supports the following arguments: + +* `predefined_metric_type` - (Required) Metric type. +* `resource_label` - (Optional) Identifies the resource associated with the metric type. + +### customized_metric_specification + +This argument supports the following arguments: + +* `metric_dimension` - (Optional) Dimensions of the metric. +* `metric_name` - (Optional) Name of the metric. +* `namespace` - (Optional) Namespace of the metric. +* `statistic` - (Optional) Statistic of the metric. +* `unit` - (Optional) Unit of the metric. +* `metrics` - (Optional) Metrics to include, as a metric data query. + +#### metric_dimension + +This argument supports the following arguments: + +* `name` - (Required) Name of the dimension. +* `value` - (Required) Value of the dimension. + +#### metrics + +This argument supports the following arguments: + +* `expression` - (Optional) Math expression used on the returned metric. You must specify either `expression` or `metric_stat`, but not both. +* `id` - (Required) Short name for the metric used in target tracking scaling policy. +* `label` - (Optional) Human-readable label for this metric or expression. +* `metric_stat` - (Optional) Structure that defines CloudWatch metric to be used in target tracking scaling policy. You must specify either `expression` or `metric_stat`, but not both. +* `return_data` - (Optional) Boolean that indicates whether to return the timestamps and raw data values of this metric, the default is true + +##### metric_stat + +This argument supports the following arguments: + +* `metric` - (Required) Structure that defines the CloudWatch metric to return, including the metric name, namespace, and dimensions. +* `stat` - (Required) Statistic of the metrics to return. +* `unit` - (Optional) Unit of the metrics to return. + +##### metric + +This argument supports the following arguments: + +* `dimensions` - (Optional) Dimensions of the metric. +* `metric_name` - (Required) Name of the metric. +* `namespace` - (Required) Namespace of the metric. + +###### dimensions + +This argument supports the following arguments: + +* `name` - (Required) Name of the dimension. +* `value` - (Required) Value of the dimension. + +### predictive_scaling_configuration + +This argument supports the following arguments: + +* `max_capacity_breach_behavior` - (Optional) Defines the behavior that should be applied if the forecast capacity approaches or exceeds the maximum capacity of the Auto Scaling group. Valid values are `HonorMaxCapacity` or `IncreaseMaxCapacity`. Default is `HonorMaxCapacity`. +* `max_capacity_buffer` - (Optional) Size of the capacity buffer to use when the forecast capacity is close to or exceeds the maximum capacity. Valid range is `0` to `100`. If set to `0`, Amazon EC2 Auto Scaling may scale capacity higher than the maximum capacity to equal but not exceed forecast capacity. +* `metric_specification` - (Required) This structure includes the metrics and target utilization to use for predictive scaling. +* `mode` - (Optional) Predictive scaling mode. Valid values are `ForecastAndScale` and `ForecastOnly`. Default is `ForecastOnly`. +* `scheduling_buffer_time` - (Optional) Amount of time, in seconds, by which the instance launch time can be advanced. Minimum is `0`. + +#### metric_specification + +This argument supports the following arguments: + +* `customized_capacity_metric_specification` - (Optional) Customized capacity metric specification. The field is only valid when you use `customized_load_metric_specification` +* `customized_load_metric_specification` - (Optional) Customized load metric specification. +* `customized_scaling_metric_specification` - (Optional) Customized scaling metric specification. +* `predefined_load_metric_specification` - (Optional) Predefined load metric specification. +* `predefined_metric_pair_specification` - (Optional) Metric pair specification from which Amazon EC2 Auto Scaling determines the appropriate scaling metric and load metric to use. +* `predefined_scaling_metric_specification` - (Optional) Predefined scaling metric specification. + +##### predefined_load_metric_specification + +This argument supports the following arguments: + +* `predefined_metric_type` - (Required) Metric type. Valid values are `ASGTotalCPUUtilization`, `ASGTotalNetworkIn`, `ASGTotalNetworkOut`, or `ALBTargetGroupRequestCount`. +* `resource_label` - (Required) Label that uniquely identifies a specific Application Load Balancer target group from which to determine the request count served by your Auto Scaling group. + +##### predefined_metric_pair_specification + +This argument supports the following arguments: + +* `predefined_metric_type` - (Required) Which metrics to use. There are two different types of metrics for each metric type: one is a load metric and one is a scaling metric. For example, if the metric type is `ASGCPUUtilization`, the Auto Scaling group's total CPU metric is used as the load metric, and the average CPU metric is used for the scaling metric. Valid values are `ASGCPUUtilization`, `ASGNetworkIn`, `ASGNetworkOut`, or `ALBRequestCount`. +* `resource_label` - (Required) Label that uniquely identifies a specific Application Load Balancer target group from which to determine the request count served by your Auto Scaling group. + +##### predefined_scaling_metric_specification + +This argument supports the following arguments: + +* `predefined_metric_type` - (Required) Describes a scaling metric for a predictive scaling policy. Valid values are `ASGAverageCPUUtilization`, `ASGAverageNetworkIn`, `ASGAverageNetworkOut`, or `ALBRequestCountPerTarget`. +* `resource_label` - (Required) Label that uniquely identifies a specific Application Load Balancer target group from which to determine the request count served by your Auto Scaling group. + +##### customized_scaling_metric_specification + +This argument supports the following arguments: + +* `metric_data_queries` - (Required) List of up to 10 structures that defines custom scaling metric in predictive scaling policy + +##### customized_load_metric_specification + +This argument supports the following arguments: + +* `metric_data_queries` - (Required) List of up to 10 structures that defines custom load metric in predictive scaling policy + +##### customized_capacity_metric_specification + +This argument supports the following arguments: + +* `metric_data_queries` - (Required) List of up to 10 structures that defines custom capacity metric in predictive scaling policy + +##### metric_data_queries + +This argument supports the following arguments: + +* `expression` - (Optional) Math expression used on the returned metric. You must specify either `expression` or `metric_stat`, but not both. +* `id` - (Required) Short name for the metric used in predictive scaling policy. +* `label` - (Optional) Human-readable label for this metric or expression. +* `metric_stat` - (Optional) Structure that defines CloudWatch metric to be used in predictive scaling policy. You must specify either `expression` or `metric_stat`, but not both. +* `return_data` - (Optional) Boolean that indicates whether to return the timestamps and raw data values of this metric, the default is true + +##### metric_stat + +This argument supports the following arguments: + +* `metric` - (Required) Structure that defines the CloudWatch metric to return, including the metric name, namespace, and dimensions. +* `stat` - (Required) Statistic of the metrics to return. +* `unit` - (Optional) Unit of the metrics to return. + +##### metric + +This argument supports the following arguments: + +* `dimensions` - (Optional) Dimensions of the metric. +* `metric_name` - (Required) Name of the metric. +* `namespace` - (Required) Namespace of the metric. + +##### dimensions + +This argument supports the following arguments: + +* `name` - (Required) Name of the dimension. +* `value` - (Required) Value of the dimension. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN assigned by AWS to the scaling policy. +* `name` - Scaling policy's name. +* `autoscaling_group_name` - The scaling policy's assigned autoscaling group. +* `adjustment_type` - Scaling policy's adjustment type. +* `policy_type` - Scaling policy's type. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AutoScaling scaling policy using the role autoscaling_group_name and name separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AutoScaling scaling policy using the role autoscaling_group_name and name separated by `/`. For example: + +```console +% terraform import aws_autoscaling_policy.test-policy asg-name/policy-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/autoscaling_schedule.html.markdown b/website/docs/cdktf/python/r/autoscaling_schedule.html.markdown new file mode 100644 index 00000000000..35b5491bbea --- /dev/null +++ b/website/docs/cdktf/python/r/autoscaling_schedule.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_schedule" +description: |- + Provides an AutoScaling Schedule resource. +--- + + + +# Resource: aws_autoscaling_schedule + +Provides an AutoScaling Schedule resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.autoscaling_schedule import AutoscalingSchedule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foobar = AutoscalingGroup(self, "foobar", + availability_zones=["us-west-2a"], + force_delete=True, + health_check_grace_period=300, + health_check_type="ELB", + max_size=1, + min_size=1, + name="terraform-test-foobar5", + termination_policies=["OldestInstance"] + ) + aws_autoscaling_schedule_foobar = AutoscalingSchedule(self, "foobar_1", + autoscaling_group_name=foobar.name, + desired_capacity=0, + end_time="2016-12-12T06:00:00Z", + max_size=1, + min_size=0, + scheduled_action_name="foobar", + start_time="2016-12-11T18:00:00Z" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_autoscaling_schedule_foobar.override_logical_id("foobar") +``` + +## Argument Reference + +The following arguments are required: + +* `autoscaling_group_name` - (Required) The name of the Auto Scaling group. +* `scheduled_action_name` - (Required) The name of this scaling action. + +The following arguments are optional: + +* `desired_capacity` - (Optional) The initial capacity of the Auto Scaling group after the scheduled action runs and the capacity it attempts to maintain. Set to `-1` if you don't want to change the desired capacity at the scheduled time. Defaults to `0`. +* `end_time` - (Optional) The date and time for the recurring schedule to end, in UTC with the format `"YYYY-MM-DDThh:mm:ssZ"` (e.g. `"2021-06-01T00:00:00Z"`). +* `max_size` - (Optional) The maximum size of the Auto Scaling group. Set to `-1` if you don't want to change the maximum size at the scheduled time. Defaults to `0`. +* `min_size` - (Optional) The minimum size of the Auto Scaling group. Set to `-1` if you don't want to change the minimum size at the scheduled time. Defaults to `0`. +* `recurrence` - (Optional) The recurring schedule for this action specified using the Unix cron syntax format. +* `start_time` - (Optional) The date and time for the recurring schedule to start, in UTC with the format `"YYYY-MM-DDThh:mm:ssZ"` (e.g. `"2021-06-01T00:00:00Z"`). +* `time_zone` - (Optional) Specifies the time zone for a cron expression. Valid values are the canonical names of the IANA time zones (such as `Etc/GMT+9` or `Pacific/Tahiti`). + +~> **NOTE:** When `start_time` and `end_time` are specified with `recurrence` , they form the boundaries of when the recurring action will start and stop. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN assigned by AWS to the autoscaling schedule. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AutoScaling ScheduledAction using the `auto-scaling-group-name` and `scheduled-action-name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AutoScaling ScheduledAction using the `auto-scaling-group-name` and `scheduled-action-name`. For example: + +```console +% terraform import aws_autoscaling_schedule.resource-name auto-scaling-group-name/scheduled-action-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/autoscaling_traffic_source_attachment.html.markdown b/website/docs/cdktf/python/r/autoscaling_traffic_source_attachment.html.markdown new file mode 100644 index 00000000000..96e2a760a33 --- /dev/null +++ b/website/docs/cdktf/python/r/autoscaling_traffic_source_attachment.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_traffic_source_attachment" +description: |- + Terraform resource for managing an AWS Auto Scaling Traffic Source Attachment. +--- + + + +# Resource: aws_autoscaling_traffic_source_attachment + +Attaches a traffic source to an Auto Scaling group. + +~> **NOTE on Auto Scaling Groups, Attachments and Traffic Source Attachments:** Terraform provides standalone [Attachment](autoscaling_attachment.html) (for attaching Classic Load Balancers and Application Load Balancer, Gateway Load Balancer, or Network Load Balancer target groups) and Traffic Source Attachment (for attaching Load Balancers and VPC Lattice target groups) resources and an [Auto Scaling Group](autoscaling_group.html) resource with `load_balancers`, `target_group_arns` and `traffic_source` attributes. Do not use the same traffic source in more than one of these resources. Doing so will cause a conflict of attachments. A [`lifecycle` configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) can be used to suppress differences if necessary. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_traffic_source_attachment import AutoscalingTrafficSourceAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AutoscalingTrafficSourceAttachment(self, "example", + autoscaling_group_name=Token.as_string(aws_autoscaling_group_example.id), + traffic_source=AutoscalingTrafficSourceAttachmentTrafficSource( + identifier=Token.as_string(aws_lb_target_group_example.arn), + type="elbv2" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +- `autoscaling_group_name` - (Required) The name of the Auto Scaling group. +- `traffic_source` - (Required) The unique identifiers of a traffic sources. + +`traffic_source` supports the following: + +- `identifier` - (Required) Identifies the traffic source. For Application Load Balancers, Gateway Load Balancers, Network Load Balancers, and VPC Lattice, this will be the Amazon Resource Name (ARN) for a target group in this account and Region. For Classic Load Balancers, this will be the name of the Classic Load Balancer in this account and Region. +- `type` - (Required) Provides additional context for the value of `identifier`. + The following lists the valid values: + `elb` if `identifier` is the name of a Classic Load Balancer. + `elbv2` if `identifier` is the ARN of an Application Load Balancer, Gateway Load Balancer, or Network Load Balancer target group. + `vpc-lattice` if `identifier` is the ARN of a VPC Lattice target group. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/autoscalingplans_scaling_plan.html.markdown b/website/docs/cdktf/python/r/autoscalingplans_scaling_plan.html.markdown new file mode 100644 index 00000000000..9c6ef346286 --- /dev/null +++ b/website/docs/cdktf/python/r/autoscalingplans_scaling_plan.html.markdown @@ -0,0 +1,260 @@ +--- +subcategory: "Auto Scaling Plans" +layout: "aws" +page_title: "AWS: aws_autoscalingplans_scaling_plan" +description: |- + Manages an AWS Auto Scaling scaling plan. +--- + + + +# Resource: aws_autoscalingplans_scaling_plan + +Manages an AWS Auto Scaling scaling plan. +More information can be found in the [AWS Auto Scaling User Guide](https://docs.aws.amazon.com/autoscaling/plans/userguide/what-is-aws-auto-scaling.html). + +~> **NOTE:** The AWS Auto Scaling service uses an AWS IAM service-linked role to manage predictive scaling of Amazon EC2 Auto Scaling groups. The service attempts to automatically create this role the first time a scaling plan with predictive scaling enabled is created. +An [`aws_iam_service_linked_role`](/docs/providers/aws/r/iam_service_linked_role.html) resource can be used to manually manage this role. +See the [AWS documentation](https://docs.aws.amazon.com/autoscaling/plans/userguide/aws-auto-scaling-service-linked-roles.html#create-service-linked-role-manual) for more details. + +## Example Usage + +### Basic Dynamic Scaling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.autoscalingplans_scaling_plan import AutoscalingplansScalingPlan +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + available = DataAwsAvailabilityZones(self, "available") + example = AutoscalingGroup(self, "example", + availability_zones=[ + Token.as_string(property_access(available.names, ["0"])) + ], + launch_configuration=Token.as_string(aws_launch_configuration_example.name), + max_size=3, + min_size=0, + name_prefix="example", + tags=[{ + "key": "application", + "propagate_at_launch": True, + "value": "example" + } + ] + ) + aws_autoscalingplans_scaling_plan_example = + AutoscalingplansScalingPlan(self, "example_2", + application_source=AutoscalingplansScalingPlanApplicationSource( + tag_filter=[AutoscalingplansScalingPlanApplicationSourceTagFilter( + key="application", + values=["example"] + ) + ] + ), + name="example-dynamic-cost-optimization", + scaling_instruction=[AutoscalingplansScalingPlanScalingInstruction( + max_capacity=3, + min_capacity=0, + resource_id=Token.as_string( + Fn.format("autoScalingGroup/%s", [example.name])), + scalable_dimension="autoscaling:autoScalingGroup:DesiredCapacity", + service_namespace="autoscaling", + target_tracking_configuration=[AutoscalingplansScalingPlanScalingInstructionTargetTrackingConfiguration( + predefined_scaling_metric_specification=AutoscalingplansScalingPlanScalingInstructionTargetTrackingConfigurationPredefinedScalingMetricSpecification( + predefined_scaling_metric_type="ASGAverageCPUUtilization" + ), + target_value=70 + ) + ] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_autoscalingplans_scaling_plan_example.override_logical_id("example") +``` + +### Basic Predictive Scaling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.autoscalingplans_scaling_plan import AutoscalingplansScalingPlan +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + available = DataAwsAvailabilityZones(self, "available") + example = AutoscalingGroup(self, "example", + availability_zones=[ + Token.as_string(property_access(available.names, ["0"])) + ], + launch_configuration=Token.as_string(aws_launch_configuration_example.name), + max_size=3, + min_size=0, + name_prefix="example", + tags=[{ + "key": "application", + "propagate_at_launch": True, + "value": "example" + } + ] + ) + aws_autoscalingplans_scaling_plan_example = + AutoscalingplansScalingPlan(self, "example_2", + application_source=AutoscalingplansScalingPlanApplicationSource( + tag_filter=[AutoscalingplansScalingPlanApplicationSourceTagFilter( + key="application", + values=["example"] + ) + ] + ), + name="example-predictive-cost-optimization", + scaling_instruction=[AutoscalingplansScalingPlanScalingInstruction( + disable_dynamic_scaling=True, + max_capacity=3, + min_capacity=0, + predefined_load_metric_specification=AutoscalingplansScalingPlanScalingInstructionPredefinedLoadMetricSpecification( + predefined_load_metric_type="ASGTotalCPUUtilization" + ), + predictive_scaling_max_capacity_behavior="SetForecastCapacityToMaxCapacity", + predictive_scaling_mode="ForecastAndScale", + resource_id=Token.as_string( + Fn.format("autoScalingGroup/%s", [example.name])), + scalable_dimension="autoscaling:autoScalingGroup:DesiredCapacity", + service_namespace="autoscaling", + target_tracking_configuration=[AutoscalingplansScalingPlanScalingInstructionTargetTrackingConfiguration( + predefined_scaling_metric_specification=AutoscalingplansScalingPlanScalingInstructionTargetTrackingConfigurationPredefinedScalingMetricSpecification( + predefined_scaling_metric_type="ASGAverageCPUUtilization" + ), + target_value=70 + ) + ] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_autoscalingplans_scaling_plan_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the scaling plan. Names cannot contain vertical bars, colons, or forward slashes. +* `application_source` - (Required) CloudFormation stack or set of tags. You can create one scaling plan per application source. +* `scaling_instruction` - (Required) Scaling instructions. More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_ScalingInstruction.html). + +The `application_source` object supports the following: + +* `cloudformation_stack_arn` - (Optional) ARN of a AWS CloudFormation stack. +* `tag_filter` - (Optional) Set of tags. + +The `tag_filter` object supports the following: + +* `key` - (Required) Tag key. +* `values` - (Optional) Tag values. + +The `scaling_instruction` object supports the following: + +* `max_capacity` - (Required) Maximum capacity of the resource. The exception to this upper limit is if you specify a non-default setting for `predictive_scaling_max_capacity_behavior`. +* `min_capacity` - (Required) Minimum capacity of the resource. +* `resource_id` - (Required) ID of the resource. This string consists of the resource type and unique identifier. +* `scalable_dimension` - (Required) Scalable dimension associated with the resource. Valid values: `autoscaling:autoScalingGroup:DesiredCapacity`, `dynamodb:index:ReadCapacityUnits`, `dynamodb:index:WriteCapacityUnits`, `dynamodb:table:ReadCapacityUnits`, `dynamodb:table:WriteCapacityUnits`, `ecs:service:DesiredCount`, `ec2:spot-fleet-request:TargetCapacity`, `rds:cluster:ReadReplicaCount`. +* `service_namespace` - (Required) Namespace of the AWS service. Valid values: `autoscaling`, `dynamodb`, `ecs`, `ec2`, `rds`. +* `target_tracking_configuration` - (Required) Structure that defines new target tracking configurations. Each of these structures includes a specific scaling metric and a target value for the metric, along with various parameters to use with dynamic scaling. +More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_TargetTrackingConfiguration.html). +* `customized_load_metric_specification` - (Optional) Customized load metric to use for predictive scaling. You must specify either `customized_load_metric_specification` or `predefined_load_metric_specification` when configuring predictive scaling. +More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_CustomizedLoadMetricSpecification.html). +* `disable_dynamic_scaling` - (Optional) Boolean controlling whether dynamic scaling by AWS Auto Scaling is disabled. Defaults to `false`. +* `predefined_load_metric_specification` - (Optional) Predefined load metric to use for predictive scaling. You must specify either `predefined_load_metric_specification` or `customized_load_metric_specification` when configuring predictive scaling. +More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_PredefinedLoadMetricSpecification.html). +* `predictive_scaling_max_capacity_behavior`- (Optional) Defines the behavior that should be applied if the forecast capacity approaches or exceeds the maximum capacity specified for the resource. +Valid values: `SetForecastCapacityToMaxCapacity`, `SetMaxCapacityAboveForecastCapacity`, `SetMaxCapacityToForecastCapacity`. +* `predictive_scaling_max_capacity_buffer` - (Optional) Size of the capacity buffer to use when the forecast capacity is close to or exceeds the maximum capacity. +* `predictive_scaling_mode` - (Optional) Predictive scaling mode. Valid values: `ForecastAndScale`, `ForecastOnly`. +* `scaling_policy_update_behavior` - (Optional) Controls whether a resource's externally created scaling policies are kept or replaced. Valid values: `KeepExternalPolicies`, `ReplaceExternalPolicies`. Defaults to `KeepExternalPolicies`. +* `scheduled_action_buffer_time` - (Optional) Amount of time, in seconds, to buffer the run time of scheduled scaling actions when scaling out. + +The `customized_load_metric_specification` object supports the following: + +* `metric_name` - (Required) Name of the metric. +* `namespace` - (Required) Namespace of the metric. +* `statistic` - (Required) Statistic of the metric. Currently, the value must always be `Sum`. +* `dimensions` - (Optional) Dimensions of the metric. +* `unit` - (Optional) Unit of the metric. + +The `predefined_load_metric_specification` object supports the following: + +* `predefined_load_metric_type` - (Required) Metric type. Valid values: `ALBTargetGroupRequestCount`, `ASGTotalCPUUtilization`, `ASGTotalNetworkIn`, `ASGTotalNetworkOut`. +* `resource_label` - (Optional) Identifies the resource associated with the metric type. + +The `target_tracking_configuration` object supports the following: + +* `target_value` - (Required) Target value for the metric. +* `customized_scaling_metric_specification` - (Optional) Customized metric. You can specify either `customized_scaling_metric_specification` or `predefined_scaling_metric_specification`. +More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_CustomizedScalingMetricSpecification.html). +* `disable_scale_in` - (Optional) Boolean indicating whether scale in by the target tracking scaling policy is disabled. Defaults to `false`. +* `predefined_scaling_metric_specification` - (Optional) Predefined metric. You can specify either `predefined_scaling_metric_specification` or `customized_scaling_metric_specification`. +More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_PredefinedScalingMetricSpecification.html). +* `estimated_instance_warmup` - (Optional) Estimated time, in seconds, until a newly launched instance can contribute to the CloudWatch metrics. +This value is used only if the resource is an Auto Scaling group. +* `scale_in_cooldown` - (Optional) Amount of time, in seconds, after a scale in activity completes before another scale in activity can start. +This value is not used if the scalable resource is an Auto Scaling group. +* `scale_out_cooldown` - (Optional) Amount of time, in seconds, after a scale-out activity completes before another scale-out activity can start. +This value is not used if the scalable resource is an Auto Scaling group. + +The `customized_scaling_metric_specification` object supports the following: + +* `metric_name` - (Required) Name of the metric. +* `namespace` - (Required) Namespace of the metric. +* `statistic` - (Required) Statistic of the metric. Valid values: `Average`, `Maximum`, `Minimum`, `SampleCount`, `Sum`. +* `dimensions` - (Optional) Dimensions of the metric. +* `unit` - (Optional) Unit of the metric. + +The `predefined_scaling_metric_specification` object supports the following: + +* `predefined_scaling_metric_type` - (Required) Metric type. Valid values: `ALBRequestCountPerTarget`, `ASGAverageCPUUtilization`, `ASGAverageNetworkIn`, `ASGAverageNetworkOut`, `DynamoDBReadCapacityUtilization`, `DynamoDBWriteCapacityUtilization`, `ECSServiceAverageCPUUtilization`, `ECSServiceAverageMemoryUtilization`, `EC2SpotFleetRequestAverageCPUUtilization`, `EC2SpotFleetRequestAverageNetworkIn`, `EC2SpotFleetRequestAverageNetworkOut`, `RDSReaderAverageCPUUtilization`, `RDSReaderAverageDatabaseConnections`. +* `resource_label` - (Optional) Identifies the resource associated with the metric type. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Scaling plan identifier. +* `scaling_plan_version` - The version number of the scaling plan. This value is always 1. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Auto Scaling scaling plans using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Auto Scaling scaling plans using the `name`. For example: + +```console +% terraform import aws_autoscalingplans_scaling_plan.example MyScale1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/backup_framework.html.markdown b/website/docs/cdktf/python/r/backup_framework.html.markdown new file mode 100644 index 00000000000..ac82a87e726 --- /dev/null +++ b/website/docs/cdktf/python/r/backup_framework.html.markdown @@ -0,0 +1,167 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_framework" +description: |- + Provides an AWS Backup Framework resource. +--- + + + +# Resource: aws_backup_framework + +Provides an AWS Backup Framework resource. + +~> **Note:** For the Deployment Status of the Framework to be successful, please turn on resource tracking to enable AWS Config recording to track configuration changes of your backup resources. This can be done from the AWS Console. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_framework import BackupFramework +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BackupFramework(self, "Example", + control=[BackupFrameworkControl( + input_parameter=[BackupFrameworkControlInputParameter( + name="requiredRetentionDays", + value="35" + ) + ], + name="BACKUP_RECOVERY_POINT_MINIMUM_RETENTION_CHECK" + ), BackupFrameworkControl( + input_parameter=[BackupFrameworkControlInputParameter( + name="requiredFrequencyUnit", + value="hours" + ), BackupFrameworkControlInputParameter( + name="requiredRetentionDays", + value="35" + ), BackupFrameworkControlInputParameter( + name="requiredFrequencyValue", + value="1" + ) + ], + name="BACKUP_PLAN_MIN_FREQUENCY_AND_MIN_RETENTION_CHECK" + ), BackupFrameworkControl( + name="BACKUP_RECOVERY_POINT_ENCRYPTED" + ), BackupFrameworkControl( + name="BACKUP_RESOURCES_PROTECTED_BY_BACKUP_PLAN", + scope=BackupFrameworkControlScope( + compliance_resource_types=["EBS"] + ) + ), BackupFrameworkControl( + name="BACKUP_RECOVERY_POINT_MANUAL_DELETION_DISABLED" + ), BackupFrameworkControl( + input_parameter=[BackupFrameworkControlInputParameter( + name="maxRetentionDays", + value="100" + ), BackupFrameworkControlInputParameter( + name="minRetentionDays", + value="1" + ) + ], + name="BACKUP_RESOURCES_PROTECTED_BY_BACKUP_VAULT_LOCK", + scope=BackupFrameworkControlScope( + compliance_resource_types=["EBS"] + ) + ), BackupFrameworkControl( + input_parameter=[BackupFrameworkControlInputParameter( + name="recoveryPointAgeUnit", + value="days" + ), BackupFrameworkControlInputParameter( + name="recoveryPointAgeValue", + value="1" + ) + ], + name="BACKUP_LAST_RECOVERY_POINT_CREATED", + scope=BackupFrameworkControlScope( + compliance_resource_types=["EBS"] + ) + ) + ], + description="this is an example framework", + name="exampleFramework", + tags={ + "Name": "Example Framework" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `control` - (Required) One or more control blocks that make up the framework. Each control in the list has a name, input parameters, and scope. Detailed below. +* `description` - (Optional) The description of the framework with a maximum of 1,024 characters +* `name` - (Required) The unique name of the framework. The name must be between 1 and 256 characters, starting with a letter, and consisting of letters, numbers, and underscores. +* `tags` - (Optional) Metadata that you can assign to help organize the frameworks you create. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Control Arguments + +`control` has the following attributes: + +* `input_parameter` - (Optional) One or more input parameter blocks. An example of a control with two parameters is: "backup plan frequency is at least daily and the retention period is at least 1 year". The first parameter is daily. The second parameter is 1 year. Detailed below. +* `name` - (Required) The name of a control. This name is between 1 and 256 characters. +* `scope` - (Optional) The scope of a control. The control scope defines what the control will evaluate. Three examples of control scopes are: a specific backup plan, all backup plans with a specific tag, or all backup plans. Detailed below. + +### Input Parameter Arguments + +`input_parameter` has the following attributes: + +* `name` - (Optional) The name of a parameter, for example, BackupPlanFrequency. +* `value` - (Optional) The value of parameter, for example, hourly. + +### Scope Arguments + +`scope` has the following attributes: + +* `compliance_resource_ids` - (Optional) The ID of the only AWS resource that you want your control scope to contain. Minimum number of 1 item. Maximum number of 100 items. +* `compliance_resource_types` - (Optional) Describes whether the control scope includes one or more types of resources, such as EFS or RDS. +* `tags` - (Optional) The tag key-value pair applied to those AWS resources that you want to trigger an evaluation for a rule. A maximum of one key-value pair can be provided. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the backup framework. +* `creation_time` - The date and time that a framework is created, in Unix format and Coordinated Universal Time (UTC). +* `deployment_status` - The deployment status of a framework. The statuses are: `CREATE_IN_PROGRESS` | `UPDATE_IN_PROGRESS` | `DELETE_IN_PROGRESS` | `COMPLETED` | `FAILED`. +* `id` - The id of the backup framework. +* `status` - A framework consists of one or more controls. Each control governs a resource, such as backup plans, backup selections, backup vaults, or recovery points. You can also turn AWS Config recording on or off for each resource. For more information refer to the [AWS documentation for Framework Status](https://docs.aws.amazon.com/aws-backup/latest/devguide/API_DescribeFramework.html#Backup-DescribeFramework-response-FrameworkStatus) +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `2m`) +* `update` - (Default `2m`) +* `delete` - (Default `2m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup Framework using the `id` which corresponds to the name of the Backup Framework. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Backup Framework using the `id` which corresponds to the name of the Backup Framework. For example: + +```console +% terraform import aws_backup_framework.test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/backup_global_settings.html.markdown b/website/docs/cdktf/python/r/backup_global_settings.html.markdown new file mode 100644 index 00000000000..214e187b3dd --- /dev/null +++ b/website/docs/cdktf/python/r/backup_global_settings.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_global_settings" +description: |- + Provides an AWS Backup Global Settings resource. +--- + + + +# Resource: aws_backup_global_settings + +Provides an AWS Backup Global Settings resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_global_settings import BackupGlobalSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BackupGlobalSettings(self, "test", + global_settings={ + "is_cross_account_backup_enabled": "true" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `global_settings` - (Required) A list of resources along with the opt-in preferences for the account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS Account ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup Global Settings using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Backup Global Settings using the `id`. For example: + +```console +% terraform import aws_backup_global_settings.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/backup_plan.html.markdown b/website/docs/cdktf/python/r/backup_plan.html.markdown new file mode 100644 index 00000000000..e802ecc3bea --- /dev/null +++ b/website/docs/cdktf/python/r/backup_plan.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_plan" +description: |- + Provides an AWS Backup plan resource. +--- + + + +# Resource: aws_backup_plan + +Provides an AWS Backup plan resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_plan import BackupPlan +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BackupPlan(self, "example", + advanced_backup_setting=[BackupPlanAdvancedBackupSetting( + backup_options={ + "WindowsVSS": "enabled" + }, + resource_type="EC2" + ) + ], + name="tf_example_backup_plan", + rule=[BackupPlanRule( + lifecycle=BackupPlanRuleLifecycle( + delete_after=14 + ), + rule_name="tf_example_backup_rule", + schedule="cron(0 12 * * ? *)", + target_vault_name=test.name + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The display name of a backup plan. +* `rule` - (Required) A rule object that specifies a scheduled task that is used to back up a selection of resources. +* `advanced_backup_setting` - (Optional) An object that specifies backup options for each resource type. +* `tags` - (Optional) Metadata that you can assign to help organize the plans you create. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Rule Arguments + +`rule` supports the following attributes: + +* `rule_name` - (Required) An display name for a backup rule. +* `target_vault_name` - (Required) The name of a logical container where backups are stored. +* `schedule` - (Optional) A CRON expression specifying when AWS Backup initiates a backup job. +* `enable_continuous_backup` - (Optional) Enable continuous backups for supported resources. +* `start_window` - (Optional) The amount of time in minutes before beginning a backup. +* `completion_window` - (Optional) The amount of time in minutes AWS Backup attempts a backup before canceling the job and returning an error. +* `lifecycle` - (Optional) The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. Fields documented below. +* `recovery_point_tags` - (Optional) Metadata that you can assign to help organize the resources that you create. +* `copy_action` - (Optional) Configuration block(s) with copy operation settings. Detailed below. + +### Lifecycle Arguments + +`lifecycle` supports the following attributes: + +* `cold_storage_after` - (Optional) Specifies the number of days after creation that a recovery point is moved to cold storage. +* `delete_after` - (Optional) Specifies the number of days after creation that a recovery point is deleted. Must be 90 days greater than `cold_storage_after`. + +### Copy Action Arguments + +`copy_action` supports the following attributes: + +* `lifecycle` - (Optional) The lifecycle defines when a protected resource is copied over to a backup vault and when it expires. Fields documented above. +* `destination_vault_arn` - (Required) An Amazon Resource Name (ARN) that uniquely identifies the destination backup vault for the copied backup. + +### Advanced Backup Setting Arguments + +`advanced_backup_setting` supports the following arguments: + +* `backup_options` - (Required) Specifies the backup option for a selected resource. This option is only available for Windows VSS backup jobs. Set to `{ WindowsVSS = "enabled" }` to enable Windows VSS backup option and create a VSS Windows backup. +* `resource_type` - (Required) The type of AWS resource to be backed up. For VSS Windows backups, the only supported resource type is Amazon EC2. Valid values: `EC2`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the backup plan. +* `arn` - The ARN of the backup plan. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version` - Unique, randomly generated, Unicode, UTF-8 encoded string that serves as the version ID of the backup plan. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup Plan using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Backup Plan using the `id`. For example: + +```console +% terraform import aws_backup_plan.test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/backup_region_settings.html.markdown b/website/docs/cdktf/python/r/backup_region_settings.html.markdown new file mode 100644 index 00000000000..6df39c59d21 --- /dev/null +++ b/website/docs/cdktf/python/r/backup_region_settings.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_region_settings" +description: |- + Provides an AWS Backup Region Settings resource. +--- + + + +# Resource: aws_backup_region_settings + +Provides an AWS Backup Region Settings resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_region_settings import BackupRegionSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BackupRegionSettings(self, "test", + resource_type_management_preference={ + "DynamoDB": True, + "EFS": True + }, + resource_type_opt_in_preference={ + "Aurora": True, + "DocumentDB": True, + "DynamoDB": True, + "EBS": True, + "EC2": True, + "EFS": True, + "FSx": True, + "Neptune": True, + "RDS": True, + "Storage Gateway": True, + "VirtualMachine": True + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_type_opt_in_preference` - (Required) A map of services along with the opt-in preferences for the Region. +* `resource_type_management_preference` - (Optional) A map of services along with the management preferences for the Region. For more information, see the [AWS Documentation](https://docs.aws.amazon.com/aws-backup/latest/devguide/API_UpdateRegionSettings.html#API_UpdateRegionSettings_RequestSyntax). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS region. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup Region Settings using the `region`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Backup Region Settings using the `region`. For example: + +```console +% terraform import aws_backup_region_settings.test us-west-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/backup_report_plan.html.markdown b/website/docs/cdktf/python/r/backup_report_plan.html.markdown new file mode 100644 index 00000000000..0ea37a83449 --- /dev/null +++ b/website/docs/cdktf/python/r/backup_report_plan.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_report_plan" +description: |- + Provides an AWS Backup Report Plan resource. +--- + + + +# Resource: aws_backup_report_plan + +Provides an AWS Backup Report Plan resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_report_plan import BackupReportPlan +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BackupReportPlan(self, "example", + description="example description", + name="example_name", + report_delivery_channel=BackupReportPlanReportDeliveryChannel( + formats=["CSV", "JSON"], + s3_bucket_name="example-bucket-name" + ), + report_setting=BackupReportPlanReportSetting( + report_template="RESTORE_JOB_REPORT" + ), + tags={ + "Name": "Example Report Plan" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) The description of the report plan with a maximum of 1,024 characters +* `name` - (Required) The unique name of the report plan. The name must be between 1 and 256 characters, starting with a letter, and consisting of letters, numbers, and underscores. +* `report_delivery_channel` - (Required) An object that contains information about where and how to deliver your reports, specifically your Amazon S3 bucket name, S3 key prefix, and the formats of your reports. Detailed below. +* `report_setting` - (Required) An object that identifies the report template for the report. Reports are built using a report template. Detailed below. +* `tags` - (Optional) Metadata that you can assign to help organize the report plans you create. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Report Delivery Channel Arguments + +`report_delivery_channel` supports the following arguments: + +* `formats` - (Optional) A list of the format of your reports: CSV, JSON, or both. If not specified, the default format is CSV. +* `s3_bucket_name` - (Required) The unique name of the S3 bucket that receives your reports. +* `s3_key_prefix` - (Optional) The prefix for where Backup Audit Manager delivers your reports to Amazon S3. The prefix is this part of the following path: s3://your-bucket-name/prefix/Backup/us-west-2/year/month/day/report-name. If not specified, there is no prefix. + +### Report Setting Arguments + +`report_setting` supports the following arguments: + +* `accounts` - (Optional) Specifies the list of accounts a report covers. +* `framework_arns` - (Optional) Specifies the Amazon Resource Names (ARNs) of the frameworks a report covers. +* `number_of_frameworks` - (Optional) Specifies the number of frameworks a report covers. +* `organization_units` - (Optional) Specifies the list of Organizational Units a report covers. +* `regions` - (Optional) Specifies the list of regions a report covers. +* `report_template` - (Required) Identifies the report template for the report. Reports are built using a report template. The report templates are: `RESOURCE_COMPLIANCE_REPORT` | `CONTROL_COMPLIANCE_REPORT` | `BACKUP_JOB_REPORT` | `COPY_JOB_REPORT` | `RESTORE_JOB_REPORT`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the backup report plan. +* `creation_time` - The date and time that a report plan is created, in Unix format and Coordinated Universal Time (UTC). +* `deployment_status` - The deployment status of a report plan. The statuses are: `CREATE_IN_PROGRESS` | `UPDATE_IN_PROGRESS` | `DELETE_IN_PROGRESS` | `COMPLETED`. +* `id` - The id of the backup report plan. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup Report Plan using the `id` which corresponds to the name of the Backup Report Plan. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Backup Report Plan using the `id` which corresponds to the name of the Backup Report Plan. For example: + +```console +% terraform import aws_backup_report_plan.test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/backup_selection.html.markdown b/website/docs/cdktf/python/r/backup_selection.html.markdown new file mode 100644 index 00000000000..d809a488bd1 --- /dev/null +++ b/website/docs/cdktf/python/r/backup_selection.html.markdown @@ -0,0 +1,236 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_selection" +description: |- + Manages selection conditions for AWS Backup plan resources. +--- + + + +# Resource: aws_backup_selection + +Manages selection conditions for AWS Backup plan resources. + +## Example Usage + +### IAM Role + +-> For more information about creating and managing IAM Roles for backups and restores, see the [AWS Backup Developer Guide](https://docs.aws.amazon.com/aws-backup/latest/devguide/iam-service-roles.html). + +The below example creates an IAM role with the default managed IAM Policy for allowing AWS Backup to create backups. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_selection import BackupSelection +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, planId): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["backup.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + example = IamRole(self, "example", + assume_role_policy=Token.as_string(assume_role.json), + name="example" + ) + aws_iam_role_policy_attachment_example = IamRolePolicyAttachment(self, "example_2", + policy_arn="arn:aws:iam::aws:policy/service-role/AWSBackupServiceRolePolicyForBackup", + role=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_example.override_logical_id("example") + aws_backup_selection_example = BackupSelection(self, "example_3", + iam_role_arn=example.arn, + name=name, + plan_id=plan_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_backup_selection_example.override_logical_id("example") +``` + +### Selecting Backups By Tag + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_selection import BackupSelection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BackupSelection(self, "example", + iam_role_arn=Token.as_string(aws_iam_role_example.arn), + name="tf_example_backup_selection", + plan_id=Token.as_string(aws_backup_plan_example.id), + selection_tag=[BackupSelectionSelectionTag( + key="foo", + type="STRINGEQUALS", + value="bar" + ) + ] + ) +``` + +### Selecting Backups By Conditions + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_selection import BackupSelection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BackupSelection(self, "example", + condition=[BackupSelectionCondition( + string_equals=[BackupSelectionConditionStringEquals( + key="aws:ResourceTag/Component", + value="rds" + ) + ], + string_like=[BackupSelectionConditionStringLike( + key="aws:ResourceTag/Application", + value="app*" + ) + ], + string_not_equals=[BackupSelectionConditionStringNotEquals( + key="aws:ResourceTag/Backup", + value="false" + ) + ], + string_not_like=[BackupSelectionConditionStringNotLike( + key="aws:ResourceTag/Environment", + value="test*" + ) + ] + ) + ], + iam_role_arn=Token.as_string(aws_iam_role_example.arn), + name="tf_example_backup_selection", + plan_id=Token.as_string(aws_backup_plan_example.id), + resources=["*"] + ) +``` + +### Selecting Backups By Resource + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_selection import BackupSelection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BackupSelection(self, "example", + iam_role_arn=Token.as_string(aws_iam_role_example.arn), + name="tf_example_backup_selection", + plan_id=Token.as_string(aws_backup_plan_example.id), + resources=[ + Token.as_string(aws_db_instance_example.arn), + Token.as_string(aws_ebs_volume_example.arn), + Token.as_string(aws_efs_file_system_example.arn) + ] + ) +``` + +### Selecting Backups By Not Resource + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_selection import BackupSelection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BackupSelection(self, "example", + iam_role_arn=Token.as_string(aws_iam_role_example.arn), + name="tf_example_backup_selection", + not_resources=[ + Token.as_string(aws_db_instance_example.arn), + Token.as_string(aws_ebs_volume_example.arn), + Token.as_string(aws_efs_file_system_example.arn) + ], + plan_id=Token.as_string(aws_backup_plan_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The display name of a resource selection document. +* `plan_id` - (Required) The backup plan ID to be associated with the selection of resources. +* `iam_role_arn` - (Required) The ARN of the IAM role that AWS Backup uses to authenticate when restoring and backing up the target resource. See the [AWS Backup Developer Guide](https://docs.aws.amazon.com/aws-backup/latest/devguide/access-control.html#managed-policies) for additional information about using AWS managed policies or creating custom policies attached to the IAM role. +* `selection_tag` - (Optional) Tag-based conditions used to specify a set of resources to assign to a backup plan. +* `condition` - (Optional) A list of conditions that you define to assign resources to your backup plans using tags. +* `resources` - (Optional) An array of strings that either contain Amazon Resource Names (ARNs) or match patterns of resources to assign to a backup plan. +* `not_resources` - (Optional) An array of strings that either contain Amazon Resource Names (ARNs) or match patterns of resources to exclude from a backup plan. + +Tag conditions (`selection_tag`) support the following: + +* `type` - (Required) An operation, such as `StringEquals`, that is applied to a key-value pair used to filter resources in a selection. +* `key` - (Required) The key in a key-value pair. +* `value` - (Required) The value in a key-value pair. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Backup Selection identifier + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup selection using the role plan_id and id separated by `|`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Backup selection using the role plan_id and id separated by `|`. For example: + +```console +% terraform import aws_backup_selection.example plan-id|selection-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/backup_vault.html.markdown b/website/docs/cdktf/python/r/backup_vault.html.markdown new file mode 100644 index 00000000000..59e95c83e7d --- /dev/null +++ b/website/docs/cdktf/python/r/backup_vault.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_vault" +description: |- + Provides an AWS Backup vault resource. +--- + + + +# Resource: aws_backup_vault + +Provides an AWS Backup vault resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_vault import BackupVault +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BackupVault(self, "example", + kms_key_arn=Token.as_string(aws_kms_key_example.arn), + name="example_backup_vault" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `force_destroy` - (Optional, Default: `false`) A boolean that indicates that all recovery points stored in the vault are deleted so that the vault can be destroyed without error. +* `kms_key_arn` - (Optional) The server-side encryption key that is used to protect your backups. +* `name` - (Required) Name of the backup vault to create. +* `tags` - (Optional) Metadata that you can assign to help organize the resources that you create. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the vault. +* `arn` - The ARN of the vault. +* `recovery_points` - The number of recovery points that are stored in a backup vault. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup vault using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Backup vault using the `name`. For example: + +```console +% terraform import aws_backup_vault.test-vault TestVault +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/backup_vault_lock_configuration.html.markdown b/website/docs/cdktf/python/r/backup_vault_lock_configuration.html.markdown new file mode 100644 index 00000000000..0a3c06e19c2 --- /dev/null +++ b/website/docs/cdktf/python/r/backup_vault_lock_configuration.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_vault_lock_configuration" +description: |- + Provides an AWS Backup vault lock configuration resource. +--- + + + +# Resource: aws_backup_vault_lock_configuration + +Provides an AWS Backup vault lock configuration resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_vault_lock_configuration import BackupVaultLockConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BackupVaultLockConfiguration(self, "test", + backup_vault_name="example_backup_vault", + changeable_for_days=3, + max_retention_days=1200, + min_retention_days=7 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `backup_vault_name` - (Required) Name of the backup vault to add a lock configuration for. +* `changeable_for_days` - (Optional) The number of days before the lock date. If omitted creates a vault lock in `governance` mode, otherwise it will create a vault lock in `compliance` mode. +* `max_retention_days` - (Optional) The maximum retention period that the vault retains its recovery points. +* `min_retention_days` - (Optional) The minimum retention period that the vault retains its recovery points. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `backup_vault_name` - The name of the vault. +* `backup_vault_arn` - The ARN of the vault. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup vault lock configuration using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Backup vault lock configuration using the `name`. For example: + +```console +% terraform import aws_backup_vault_lock_configuration.test TestVault +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/backup_vault_notifications.html.markdown b/website/docs/cdktf/python/r/backup_vault_notifications.html.markdown new file mode 100644 index 00000000000..bd87c4ace20 --- /dev/null +++ b/website/docs/cdktf/python/r/backup_vault_notifications.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_vault_notifications" +description: |- + Provides an AWS Backup vault notifications resource. +--- + + + +# Resource: aws_backup_vault_notifications + +Provides an AWS Backup vault notifications resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_vault_notifications import BackupVaultNotifications +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.sns_topic import SnsTopic +from imports.aws.sns_topic_policy import SnsTopicPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = SnsTopic(self, "test", + name="backup-vault-events" + ) + data_aws_iam_policy_document_test = DataAwsIamPolicyDocument(self, "test_1", + policy_id="__default_policy_ID", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["SNS:Publish"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["backup.amazonaws.com"], + type="Service" + ) + ], + resources=[test.arn], + sid="__default_statement_ID" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_test.override_logical_id("test") + aws_backup_vault_notifications_test = BackupVaultNotifications(self, "test_2", + backup_vault_events=["BACKUP_JOB_STARTED", "RESTORE_JOB_COMPLETED"], + backup_vault_name="example_backup_vault", + sns_topic_arn=test.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_backup_vault_notifications_test.override_logical_id("test") + aws_sns_topic_policy_test = SnsTopicPolicy(self, "test_3", + arn=test.arn, + policy=Token.as_string(data_aws_iam_policy_document_test.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_policy_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `backup_vault_name` - (Required) Name of the backup vault to add notifications for. +* `sns_topic_arn` - (Required) The Amazon Resource Name (ARN) that specifies the topic for a backup vault’s events +* `backup_vault_events` - (Required) An array of events that indicate the status of jobs to back up resources to the backup vault. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the vault. +* `backup_vault_arn` - The ARN of the vault. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup vault notifications using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Backup vault notifications using the `name`. For example: + +```console +% terraform import aws_backup_vault_notifications.test TestVault +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/backup_vault_policy.html.markdown b/website/docs/cdktf/python/r/backup_vault_policy.html.markdown new file mode 100644 index 00000000000..5444c0ad3f8 --- /dev/null +++ b/website/docs/cdktf/python/r/backup_vault_policy.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_vault_policy" +description: |- + Provides an AWS Backup vault policy resource. +--- + + + +# Resource: aws_backup_vault_policy + +Provides an AWS Backup vault policy resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.backup_vault import BackupVault +from imports.aws.backup_vault_policy import BackupVaultPolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = BackupVault(self, "example", + name="example" + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_1", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["backup:DescribeBackupVault", "backup:DeleteBackupVault", "backup:PutBackupVaultAccessPolicy", "backup:DeleteBackupVaultAccessPolicy", "backup:GetBackupVaultAccessPolicy", "backup:StartBackupJob", "backup:GetBackupVaultNotifications", "backup:PutBackupVaultNotifications" + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=[example.arn] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_backup_vault_policy_example = BackupVaultPolicy(self, "example_2", + backup_vault_name=example.name, + policy=Token.as_string(data_aws_iam_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_backup_vault_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `backup_vault_name` - (Required) Name of the backup vault to add policy for. +* `policy` - (Required) The backup vault access policy document in JSON format. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the vault. +* `backup_vault_arn` - The ARN of the vault. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup vault policy using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Backup vault policy using the `name`. For example: + +```console +% terraform import aws_backup_vault_policy.test TestVault +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/batch_compute_environment.html.markdown b/website/docs/cdktf/python/r/batch_compute_environment.html.markdown new file mode 100644 index 00000000000..d557a6b87d5 --- /dev/null +++ b/website/docs/cdktf/python/r/batch_compute_environment.html.markdown @@ -0,0 +1,258 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_compute_environment" +description: |- + Creates a AWS Batch compute environment. +--- + + + +# Resource: aws_batch_compute_environment + +Creates a AWS Batch compute environment. Compute environments contain the Amazon ECS container instances that are used to run containerized batch jobs. + +For information about AWS Batch, see [What is AWS Batch?][1] . +For information about compute environment, see [Compute Environments][2] . + +~> **Note:** To prevent a race condition during environment deletion, make sure to set `depends_on` to the related `aws_iam_role_policy_attachment`; +otherwise, the policy may be destroyed too soon and the compute environment will then get stuck in the `DELETING` state, see [Troubleshooting AWS Batch][3] . + +## Example Usage + +### EC2 Type + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.batch_compute_environment import BatchComputeEnvironment +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_instance_profile import IamInstanceProfile +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +from imports.aws.placement_group import PlacementGroup +from imports.aws.security_group import SecurityGroup +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + sample = PlacementGroup(self, "sample", + name="sample", + strategy="cluster" + ) + aws_security_group_sample = SecurityGroup(self, "sample_1", + egress=[SecurityGroupEgress( + cidr_blocks=["0.0.0.0/0"], + from_port=0, + protocol="-1", + to_port=0 + ) + ], + name="aws_batch_compute_environment_security_group" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_security_group_sample.override_logical_id("sample") + aws_vpc_sample = Vpc(self, "sample_2", + cidr_block="10.1.0.0/16" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpc_sample.override_logical_id("sample") + batch_assume_role = DataAwsIamPolicyDocument(self, "batch_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["batch.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + ec2_assume_role = DataAwsIamPolicyDocument(self, "ec2_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["ec2.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + aws_batch_service_role = IamRole(self, "aws_batch_service_role", + assume_role_policy=Token.as_string(batch_assume_role.json), + name="aws_batch_service_role" + ) + ecs_instance_role = IamRole(self, "ecs_instance_role", + assume_role_policy=Token.as_string(ec2_assume_role.json), + name="ecs_instance_role" + ) + aws_iam_role_policy_attachment_aws_batch_service_role = + IamRolePolicyAttachment(self, "aws_batch_service_role_7", + policy_arn="arn:aws:iam::aws:policy/service-role/AWSBatchServiceRole", + role=aws_batch_service_role.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_aws_batch_service_role.override_logical_id("aws_batch_service_role") + aws_iam_role_policy_attachment_ecs_instance_role = + IamRolePolicyAttachment(self, "ecs_instance_role_8", + policy_arn="arn:aws:iam::aws:policy/service-role/AmazonEC2ContainerServiceforEC2Role", + role=ecs_instance_role.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_ecs_instance_role.override_logical_id("ecs_instance_role") + aws_subnet_sample = Subnet(self, "sample_9", + cidr_block="10.1.1.0/24", + vpc_id=Token.as_string(aws_vpc_sample.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_subnet_sample.override_logical_id("sample") + aws_iam_instance_profile_ecs_instance_role = IamInstanceProfile(self, "ecs_instance_role_10", + name="ecs_instance_role", + role=ecs_instance_role.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_instance_profile_ecs_instance_role.override_logical_id("ecs_instance_role") + aws_batch_compute_environment_sample = BatchComputeEnvironment(self, "sample_11", + compute_environment_name="sample", + compute_resources=BatchComputeEnvironmentComputeResources( + instance_role=Token.as_string(aws_iam_instance_profile_ecs_instance_role.arn), + instance_type=["c4.large"], + max_vcpus=16, + min_vcpus=0, + placement_group=sample.name, + security_group_ids=[Token.as_string(aws_security_group_sample.id)], + subnets=[Token.as_string(aws_subnet_sample.id)], + type="EC2" + ), + depends_on=[aws_iam_role_policy_attachment_aws_batch_service_role], + service_role=aws_batch_service_role.arn, + type="MANAGED" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_batch_compute_environment_sample.override_logical_id("sample") +``` + +### Fargate Type + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.batch_compute_environment import BatchComputeEnvironment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BatchComputeEnvironment(self, "sample", + compute_environment_name="sample", + compute_resources=BatchComputeEnvironmentComputeResources( + max_vcpus=16, + security_group_ids=[Token.as_string(aws_security_group_sample.id)], + subnets=[Token.as_string(aws_subnet_sample.id)], + type="FARGATE" + ), + depends_on=[aws_batch_service_role], + service_role=Token.as_string(aws_iam_role_aws_batch_service_role.arn), + type="MANAGED" + ) +``` + +## Argument Reference + +* `compute_environment_name` - (Optional, Forces new resource) The name for your compute environment. Up to 128 letters (uppercase and lowercase), numbers, and underscores are allowed. If omitted, Terraform will assign a random, unique name. +* `compute_environment_name_prefix` - (Optional, Forces new resource) Creates a unique compute environment name beginning with the specified prefix. Conflicts with `compute_environment_name`. +* `compute_resources` - (Optional) Details of the compute resources managed by the compute environment. This parameter is required for managed compute environments. See details below. +* `eks_configuration` - (Optional) Details for the Amazon EKS cluster that supports the compute environment. See details below. +* `service_role` - (Required) The full Amazon Resource Name (ARN) of the IAM role that allows AWS Batch to make calls to other AWS services on your behalf. +* `state` - (Optional) The state of the compute environment. If the state is `ENABLED`, then the compute environment accepts jobs from a queue and can scale out automatically based on queues. Valid items are `ENABLED` or `DISABLED`. Defaults to `ENABLED`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Required) The type of the compute environment. Valid items are `MANAGED` or `UNMANAGED`. + +### compute_resources + +* `allocation_strategy` - (Optional) The allocation strategy to use for the compute resource in case not enough instances of the best fitting instance type can be allocated. Valid items are `BEST_FIT_PROGRESSIVE`, `SPOT_CAPACITY_OPTIMIZED` or `BEST_FIT`. Defaults to `BEST_FIT`. See [AWS docs](https://docs.aws.amazon.com/batch/latest/userguide/allocation-strategies.html) for details. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `bid_percentage` - (Optional) Integer of maximum percentage that a Spot Instance price can be when compared with the On-Demand price for that instance type before instances are launched. For example, if your bid percentage is 20% (`20`), then the Spot price must be below 20% of the current On-Demand price for that EC2 instance. If you leave this field empty, the default value is 100% of the On-Demand price. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `desired_vcpus` - (Optional) The desired number of EC2 vCPUS in the compute environment. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `ec2_configuration` - (Optional) Provides information used to select Amazon Machine Images (AMIs) for EC2 instances in the compute environment. If Ec2Configuration isn't specified, the default is ECS_AL2. This parameter isn't applicable to jobs that are running on Fargate resources, and shouldn't be specified. +* `ec2_key_pair` - (Optional) The EC2 key pair that is used for instances launched in the compute environment. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `image_id` - (Optional) The Amazon Machine Image (AMI) ID used for instances launched in the compute environment. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. (Deprecated, use [`ec2_configuration`](#ec2_configuration) `image_id_override` instead) +* `instance_role` - (Optional) The Amazon ECS instance role applied to Amazon EC2 instances in a compute environment. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `instance_type` - (Optional) A list of instance types that may be launched. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `launch_template` - (Optional) The launch template to use for your compute resources. See details below. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `max_vcpus` - (Required) The maximum number of EC2 vCPUs that an environment can reach. +* `min_vcpus` - (Optional) The minimum number of EC2 vCPUs that an environment should maintain. For `EC2` or `SPOT` compute environments, if the parameter is not explicitly defined, a `0` default value will be set. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `placement_group` - (Optional) The Amazon EC2 placement group to associate with your compute resources. +* `security_group_ids` - (Optional) A list of EC2 security group that are associated with instances launched in the compute environment. This parameter is required for Fargate compute environments. +* `spot_iam_fleet_role` - (Optional) The Amazon Resource Name (ARN) of the Amazon EC2 Spot Fleet IAM role applied to a SPOT compute environment. This parameter is required for SPOT compute environments. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `subnets` - (Required) A list of VPC subnets into which the compute resources are launched. +* `tags` - (Optional) Key-value pair tags to be applied to resources that are launched in the compute environment. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `type` - (Required) The type of compute environment. Valid items are `EC2`, `SPOT`, `FARGATE` or `FARGATE_SPOT`. + +### ec2_configuration + +`ec2_configuration` supports the following: + +* `image_id_override` - (Optional) The AMI ID used for instances launched in the compute environment that match the image type. This setting overrides the `image_id` argument in the [`compute_resources`](#compute_resources) block. +* `image_type` - (Optional) The image type to match with the instance type to select an AMI. If the `image_id_override` parameter isn't specified, then a recent [Amazon ECS-optimized Amazon Linux 2 AMI](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs-optimized_AMI.html#al2ami) (`ECS_AL2`) is used. + +### launch_template + +`launch_template` supports the following: + +* `launch_template_id` - (Optional) ID of the launch template. You must specify either the launch template ID or launch template name in the request, but not both. +* `launch_template_name` - (Optional) Name of the launch template. +* `version` - (Optional) The version number of the launch template. Default: The default version of the launch template. + +### eks_configuration + +`eks_configuration` supports the following: + +* `eks_cluster_arn` - (Required) The Amazon Resource Name (ARN) of the Amazon EKS cluster. +* `kubernetes_namespace` - (Required) The namespace of the Amazon EKS cluster. AWS Batch manages pods in this namespace. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the compute environment. +* `ecs_cluster_arn` - The Amazon Resource Name (ARN) of the underlying Amazon ECS cluster used by the compute environment. +* `status` - The current status of the compute environment (for example, CREATING or VALID). +* `status_reason` - A short, human-readable string to provide additional details about the current status of the compute environment. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Batch compute using the `compute_environment_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS Batch compute using the `compute_environment_name`. For example: + +```console +% terraform import aws_batch_compute_environment.sample sample +``` + +[1]: http://docs.aws.amazon.com/batch/latest/userguide/what-is-batch.html +[2]: http://docs.aws.amazon.com/batch/latest/userguide/compute_environments.html +[3]: http://docs.aws.amazon.com/batch/latest/userguide/troubleshooting.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/batch_job_definition.html.markdown b/website/docs/cdktf/python/r/batch_job_definition.html.markdown new file mode 100644 index 00000000000..6da99311f35 --- /dev/null +++ b/website/docs/cdktf/python/r/batch_job_definition.html.markdown @@ -0,0 +1,195 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_job_definition" +description: |- + Provides a Batch Job Definition resource. +--- + + + +# Resource: aws_batch_job_definition + +Provides a Batch Job Definition resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.batch_job_definition import BatchJobDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BatchJobDefinition(self, "test", + container_properties=Token.as_string( + Fn.jsonencode({ + "command": ["ls", "-la"], + "environment": [{ + "name": "VARNAME", + "value": "VARVAL" + } + ], + "image": "busybox", + "mount_points": [{ + "container_path": "/tmp", + "read_only": False, + "source_volume": "tmp" + } + ], + "resource_requirements": [{ + "type": "VCPU", + "value": "0.25" + }, { + "type": "MEMORY", + "value": "512" + } + ], + "ulimits": [{ + "hard_limit": 1024, + "name": "nofile", + "soft_limit": 1024 + } + ], + "volumes": [{ + "host": { + "source_path": "/tmp" + }, + "name": "tmp" + } + ] + })), + name="tf_test_batch_job_definition", + type="container" + ) +``` + +### Fargate Platform Capability + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.batch_job_definition import BatchJobDefinition +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role_policy = DataAwsIamPolicyDocument(self, "assume_role_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["ecs-tasks.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + ecs_task_execution_role = IamRole(self, "ecs_task_execution_role", + assume_role_policy=Token.as_string(assume_role_policy.json), + name="tf_test_batch_exec_role" + ) + IamRolePolicyAttachment(self, "ecs_task_execution_role_policy", + policy_arn="arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy", + role=ecs_task_execution_role.name + ) + BatchJobDefinition(self, "test", + container_properties=Token.as_string( + Fn.jsonencode({ + "command": ["echo", "test"], + "execution_role_arn": ecs_task_execution_role.arn, + "fargate_platform_configuration": { + "platform_version": "LATEST" + }, + "image": "busybox", + "job_role_arn": "arn:aws:iam::123456789012:role/AWSBatchS3ReadOnly", + "resource_requirements": [{ + "type": "VCPU", + "value": "0.25" + }, { + "type": "MEMORY", + "value": "512" + } + ] + })), + name="tf_test_batch_job_definition", + platform_capabilities=["FARGATE"], + type="container" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Specifies the name of the job definition. +* `type` - (Required) The type of job definition. Must be `container`. + +The following arguments are optional: + +* `container_properties` - (Optional) A valid [container properties](http://docs.aws.amazon.com/batch/latest/APIReference/API_RegisterJobDefinition.html) + provided as a single valid JSON document. This parameter is required if the `type` parameter is `container`. +* `parameters` - (Optional) Specifies the parameter substitution placeholders to set in the job definition. +* `platform_capabilities` - (Optional) The platform capabilities required by the job definition. If no value is specified, it defaults to `EC2`. To run the job on Fargate resources, specify `FARGATE`. +* `propagate_tags` - (Optional) Specifies whether to propagate the tags from the job definition to the corresponding Amazon ECS task. Default is `false`. +* `retry_strategy` - (Optional) Specifies the retry strategy to use for failed jobs that are submitted with this job definition. + Maximum number of `retry_strategy` is `1`. Defined below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `timeout` - (Optional) Specifies the timeout for jobs so that if a job runs longer, AWS Batch terminates the job. Maximum number of `timeout` is `1`. Defined below. + +### retry_strategy + +* `attempts` - (Optional) The number of times to move a job to the `RUNNABLE` status. You may specify between `1` and `10` attempts. +* `evaluate_on_exit` - (Optional) The [evaluate on exit](#evaluate_on_exit) conditions under which the job should be retried or failed. If this parameter is specified, then the `attempts` parameter must also be specified. You may specify up to 5 configuration blocks. + +#### evaluate_on_exit + +* `action` - (Required) Specifies the action to take if all of the specified conditions are met. The values are not case sensitive. Valid values: `RETRY`, `EXIT`. +* `on_exit_code` - (Optional) A glob pattern to match against the decimal representation of the exit code returned for a job. +* `on_reason` - (Optional) A glob pattern to match against the reason returned for a job. +* `on_status_reason` - (Optional) A glob pattern to match against the status reason returned for a job. + +### timeout + +* `attempt_duration_seconds` - (Optional) The time duration in seconds after which AWS Batch terminates your jobs if they have not finished. The minimum value for the timeout is `60` seconds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of the job definition. +* `revision` - The revision of the job definition. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Batch Job Definition using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Batch Job Definition using the `arn`. For example: + +```console +% terraform import aws_batch_job_definition.test arn:aws:batch:us-east-1:123456789012:job-definition/sample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/batch_job_queue.html.markdown b/website/docs/cdktf/python/r/batch_job_queue.html.markdown new file mode 100644 index 00000000000..e9bc25e0a87 --- /dev/null +++ b/website/docs/cdktf/python/r/batch_job_queue.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_job_queue" +description: |- + Provides a Batch Job Queue resource. +--- + + + +# Resource: aws_batch_job_queue + +Provides a Batch Job Queue resource. + +## Example Usage + +### Basic Job Queue + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.batch_job_queue import BatchJobQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BatchJobQueue(self, "test_queue", + compute_environments=[test_environment1.arn, test_environment2.arn], + name="tf-test-batch-job-queue", + priority=1, + state="ENABLED" + ) +``` + +### Job Queue with a fair share scheduling policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.batch_job_queue import BatchJobQueue +from imports.aws.batch_scheduling_policy import BatchSchedulingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = BatchSchedulingPolicy(self, "example", + fair_share_policy=BatchSchedulingPolicyFairSharePolicy( + compute_reservation=1, + share_decay_seconds=3600, + share_distribution=[BatchSchedulingPolicyFairSharePolicyShareDistribution( + share_identifier="A1*", + weight_factor=0.1 + ) + ] + ), + name="example" + ) + aws_batch_job_queue_example = BatchJobQueue(self, "example_1", + compute_environments=[test_environment1.arn, test_environment2.arn], + name="tf-test-batch-job-queue", + priority=1, + scheduling_policy_arn=example.arn, + state="ENABLED" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_batch_job_queue_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Specifies the name of the job queue. +* `compute_environments` - (Required) Specifies the set of compute environments + mapped to a job queue and their order. The position of the compute environments + in the list will dictate the order. +* `priority` - (Required) The priority of the job queue. Job queues with a higher priority + are evaluated first when associated with the same compute environment. +* `scheduling_policy_arn` - (Optional) The ARN of the fair share scheduling policy. If this parameter is specified, the job queue uses a fair share scheduling policy. If this parameter isn't specified, the job queue uses a first in, first out (FIFO) scheduling policy. After a job queue is created, you can replace but can't remove the fair share scheduling policy. +* `state` - (Required) The state of the job queue. Must be one of: `ENABLED` or `DISABLED` +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of the job queue. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Batch Job Queue using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Batch Job Queue using the `arn`. For example: + +```console +% terraform import aws_batch_job_queue.test_queue arn:aws:batch:us-east-1:123456789012:job-queue/sample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/batch_scheduling_policy.html.markdown b/website/docs/cdktf/python/r/batch_scheduling_policy.html.markdown new file mode 100644 index 00000000000..91082e30e40 --- /dev/null +++ b/website/docs/cdktf/python/r/batch_scheduling_policy.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_scheduling_policy" +description: |- + Provides a Batch Scheduling Policy resource. +--- + + + +# Resource: aws_batch_scheduling_policy + +Provides a Batch Scheduling Policy resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.batch_scheduling_policy import BatchSchedulingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BatchSchedulingPolicy(self, "example", + fair_share_policy=BatchSchedulingPolicyFairSharePolicy( + compute_reservation=1, + share_decay_seconds=3600, + share_distribution=[BatchSchedulingPolicyFairSharePolicyShareDistribution( + share_identifier="A1*", + weight_factor=0.1 + ), BatchSchedulingPolicyFairSharePolicyShareDistribution( + share_identifier="A2", + weight_factor=0.2 + ) + ] + ), + name="example", + tags={ + "Name": "Example Batch Scheduling Policy" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fairshare_policy` - (Optional) A fairshare policy block specifies the `compute_reservation`, `share_delay_seconds`, and `share_distribution` of the scheduling policy. The `fairshare_policy` block is documented below. +* `name` - (Required) Specifies the name of the scheduling policy. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `fairshare_policy` block supports the following arguments: + +* `compute_reservation` - (Optional) A value used to reserve some of the available maximum vCPU for fair share identifiers that have not yet been used. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). +* `share_delay_seconds` - (Optional) The time period to use to calculate a fair share percentage for each fair share identifier in use, in seconds. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). +* `share_distribution` - (Optional) One or more share distribution blocks which define the weights for the fair share identifiers for the fair share policy. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). The `share_distribution` block is documented below. + +A `share_distribution` block supports the following arguments: + +* `share_identifier` - (Required) A fair share identifier or fair share identifier prefix. For more information, see [ShareAttributes](https://docs.aws.amazon.com/batch/latest/APIReference/API_ShareAttributes.html). +* `weight_factor` - (Optional) The weight factor for the fair share identifier. For more information, see [ShareAttributes](https://docs.aws.amazon.com/batch/latest/APIReference/API_ShareAttributes.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of the scheduling policy. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Batch Scheduling Policy using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Batch Scheduling Policy using the `arn`. For example: + +```console +% terraform import aws_batch_scheduling_policy.test_policy arn:aws:batch:us-east-1:123456789012:scheduling-policy/sample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/budgets_budget.html.markdown b/website/docs/cdktf/python/r/budgets_budget.html.markdown new file mode 100644 index 00000000000..007bc406d23 --- /dev/null +++ b/website/docs/cdktf/python/r/budgets_budget.html.markdown @@ -0,0 +1,369 @@ +--- +subcategory: "Web Services Budgets" +layout: "aws" +page_title: "AWS: aws_budgets_budget" +description: |- + Provides a budgets budget resource. +--- + + + +# Resource: aws_budgets_budget + +Provides a budgets budget resource. Budgets use the cost visualisation provided by Cost Explorer to show you the status of your budgets, to provide forecasts of your estimated costs, and to track your AWS usage, including your free tier usage. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.budgets_budget import BudgetsBudget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + BudgetsBudget(self, "ec2", + budget_type="COST", + cost_filter=[BudgetsBudgetCostFilter( + name="Service", + values=["Amazon Elastic Compute Cloud - Compute"] + ) + ], + limit_amount="1200", + limit_unit="USD", + name="budget-ec2-monthly", + notification=[BudgetsBudgetNotification( + comparison_operator="GREATER_THAN", + notification_type="FORECASTED", + subscriber_email_addresses=["test@example.com"], + threshold=100, + threshold_type="PERCENTAGE" + ) + ], + time_period_end="2087-06-15_00:00", + time_period_start="2017-07-01_00:00", + time_unit="MONTHLY" + ) +``` + +Create a budget for *$100*. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.budgets_budget import BudgetsBudget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, timeUnit): + super().__init__(scope, name) + BudgetsBudget(self, "cost", + budget_type="COST", + limit_amount="100", + limit_unit="USD", + time_unit=time_unit + ) +``` + +Create a budget with planned budget limits. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.budgets_budget import BudgetsBudget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, budgetType, timeUnit): + super().__init__(scope, name) + BudgetsBudget(self, "cost", + planned_limit=[BudgetsBudgetPlannedLimit( + amount="100", + start_time="2017-07-01_00:00", + unit="USD" + ), BudgetsBudgetPlannedLimit( + amount="200", + start_time="2017-08-01_00:00", + unit="USD" + ) + ], + budget_type=budget_type, + time_unit=time_unit + ) +``` + +Create a budget for s3 with a limit of *3 GB* of storage. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.budgets_budget import BudgetsBudget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, timeUnit): + super().__init__(scope, name) + BudgetsBudget(self, "s3", + budget_type="USAGE", + limit_amount="3", + limit_unit="GB", + time_unit=time_unit + ) +``` + +Create a Savings Plan Utilization Budget + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.budgets_budget import BudgetsBudget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, timeUnit): + super().__init__(scope, name) + BudgetsBudget(self, "savings_plan_utilization", + budget_type="SAVINGS_PLANS_UTILIZATION", + cost_types=BudgetsBudgetCostTypes( + include_credit=False, + include_discount=False, + include_other_subscription=False, + include_recurring=False, + include_refund=False, + include_subscription=True, + include_support=False, + include_tax=False, + include_upfront=False, + use_blended=False + ), + limit_amount="100.0", + limit_unit="PERCENTAGE", + time_unit=time_unit + ) +``` + +Create a RI Utilization Budget + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.budgets_budget import BudgetsBudget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, timeUnit): + super().__init__(scope, name) + BudgetsBudget(self, "ri_utilization", + budget_type="RI_UTILIZATION", + cost_filter=[BudgetsBudgetCostFilter( + name="Service", + values=["Amazon Relational Database Service"] + ) + ], + cost_types=BudgetsBudgetCostTypes( + include_credit=False, + include_discount=False, + include_other_subscription=False, + include_recurring=False, + include_refund=False, + include_subscription=True, + include_support=False, + include_tax=False, + include_upfront=False, + use_blended=False + ), + limit_amount="100.0", + limit_unit="PERCENTAGE", + time_unit=time_unit + ) +``` + +Create a Cost Filter using Resource Tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.budgets_budget import BudgetsBudget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, budgetType, timeUnit): + super().__init__(scope, name) + BudgetsBudget(self, "cost", + cost_filter=[BudgetsBudgetCostFilter( + name="TagKeyValue", + values=["TagKey$TagValue"] + ) + ], + budget_type=budget_type, + time_unit=time_unit + ) +``` + +Create a cost_filter using resource tags, obtaining the tag value from a terraform variable + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.budgets_budget import BudgetsBudget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, budgetType, timeUnit): + super().__init__(scope, name) + BudgetsBudget(self, "cost", + cost_filter=[BudgetsBudgetCostFilter( + name="TagKeyValue", + values=["TagKey$${var.TagValue}"] + ) + ], + budget_type=budget_type, + time_unit=time_unit + ) +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to the [AWS official +documentation](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/data-type-budget.html). + +This argument supports the following arguments: + +* `account_id` - (Optional) The ID of the target account for budget. Will use current user's account_id by default if omitted. +* `auto_adjust_data` - (Optional) Object containing [AutoAdjustData] which determines the budget amount for an auto-adjusting budget. +* `name` - (Optional) The name of a budget. Unique within accounts. +* `name_prefix` - (Optional) The prefix of the name of a budget. Unique within accounts. +* `budget_type` - (Required) Whether this budget tracks monetary cost or usage. +* `cost_filter` - (Optional) A list of [CostFilter](#cost-filter) name/values pair to apply to budget. +* `cost_types` - (Optional) Object containing [CostTypes](#cost-types) The types of cost included in a budget, such as tax and subscriptions. +* `limit_amount` - (Required) The amount of cost or usage being measured for a budget. +* `limit_unit` - (Required) The unit of measurement used for the budget forecast, actual spend, or budget threshold, such as dollars or GB. See [Spend](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/data-type-spend.html) documentation. +* `time_period_end` - (Optional) The end of the time period covered by the budget. There are no restrictions on the end date. Format: `2017-01-01_12:00`. +* `time_period_start` - (Optional) The start of the time period covered by the budget. If you don't specify a start date, AWS defaults to the start of your chosen time period. The start date must come before the end date. Format: `2017-01-01_12:00`. +* `time_unit` - (Required) The length of time until a budget resets the actual and forecasted spend. Valid values: `MONTHLY`, `QUARTERLY`, `ANNUALLY`, and `DAILY`. +* `notification` - (Optional) Object containing [Budget Notifications](#budget-notification). Can be used multiple times to define more than one budget notification. +* `planned_limit` - (Optional) Object containing [Planned Budget Limits](#planned-budget-limits). Can be used multiple times to plan more than one budget limit. See [PlannedBudgetLimits](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_Budget.html#awscostmanagement-Type-budgets_Budget-PlannedBudgetLimits) documentation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - id of resource. +* `arn` - The ARN of the budget. + +### Auto Adjust Data + +The parameters that determine the budget amount for an auto-adjusting budget. + +`auto_adjust_type` (Required) - The string that defines whether your budget auto-adjusts based on historical or forecasted data. Valid values: `FORECAST`,`HISTORICAL` +`historical_options` (Optional) - Configuration block of [Historical Options](#historical-options). Required for `auto_adjust_type` of `HISTORICAL` Configuration block that defines the historical data that your auto-adjusting budget is based on. +`last_auto_adjust_time` (Optional) - The last time that your budget was auto-adjusted. + +### Historical Options + +`budget_adjustment_period` (Required) - The number of budget periods included in the moving-average calculation that determines your auto-adjusted budget amount. +`lookback_available_periods` (Optional) - The integer that describes how many budget periods in your BudgetAdjustmentPeriod are included in the calculation of your current budget limit. If the first budget period in your BudgetAdjustmentPeriod has no cost data, then that budget period isn’t included in the average that determines your budget limit. You can’t set your own LookBackAvailablePeriods. The value is automatically calculated from the `budget_adjustment_period` and your historical cost data. + +### Cost Types + +Valid keys for `cost_types` parameter. + +* `include_credit` - A boolean value whether to include credits in the cost budget. Defaults to `true` +* `include_discount` - Whether a budget includes discounts. Defaults to `true` +* `include_other_subscription` - A boolean value whether to include other subscription costs in the cost budget. Defaults to `true` +* `include_recurring` - A boolean value whether to include recurring costs in the cost budget. Defaults to `true` +* `include_refund` - A boolean value whether to include refunds in the cost budget. Defaults to `true` +* `include_subscription` - A boolean value whether to include subscriptions in the cost budget. Defaults to `true` +* `include_support` - A boolean value whether to include support costs in the cost budget. Defaults to `true` +* `include_tax` - A boolean value whether to include tax in the cost budget. Defaults to `true` +* `include_upfront` - A boolean value whether to include upfront costs in the cost budget. Defaults to `true` +* `use_amortized` - Whether a budget uses the amortized rate. Defaults to `false` +* `use_blended` - A boolean value whether to use blended costs in the cost budget. Defaults to `false` + +Refer to [AWS CostTypes documentation](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_CostTypes.html) for further detail. + +### Cost Filter + +Based on your choice of budget type, you can choose one or more of the available budget filters. + +* `PurchaseType` +* `UsageTypeGroup` +* `Service` +* `Operation` +* `UsageType` +* `BillingEntity` +* `CostCategory` +* `LinkedAccount` +* `TagKeyValue` +* `LegalEntityName` +* `InvoicingEntity` +* `AZ` +* `Region` +* `InstanceType` + +Refer to [AWS CostFilter documentation](https://docs.aws.amazon.com/cost-management/latest/userguide/budgets-create-filters.html) for further detail. + +### Budget Notification + +Valid keys for `notification` parameter. + +* `comparison_operator` - (Required) Comparison operator to use to evaluate the condition. Can be `LESS_THAN`, `EQUAL_TO` or `GREATER_THAN`. +* `threshold` - (Required) Threshold when the notification should be sent. +* `threshold_type` - (Required) What kind of threshold is defined. Can be `PERCENTAGE` OR `ABSOLUTE_VALUE`. +* `notification_type` - (Required) What kind of budget value to notify on. Can be `ACTUAL` or `FORECASTED` +* `subscriber_email_addresses` - (Optional) E-Mail addresses to notify. Either this or `subscriber_sns_topic_arns` is required. +* `subscriber_sns_topic_arns` - (Optional) SNS topics to notify. Either this or `subscriber_email_addresses` is required. + +### Planned Budget Limits + +Valid keys for `planned_limit` parameter. + +* `start_time` - (Required) The start time of the budget limit. Format: `2017-01-01_12:00`. See [PlannedBudgetLimits](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_Budget.html#awscostmanagement-Type-budgets_Budget-PlannedBudgetLimits) documentation. +* `amount` - (Required) The amount of cost or usage being measured for a budget. +* `unit` - (Required) The unit of measurement used for the budget forecast, actual spend, or budget threshold, such as dollars or GB. See [Spend](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/data-type-spend.html) documentation. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import budgets using `AccountID:ActionID:BudgetName`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import budgets using `AccountID:ActionID:BudgetName`. For example: + +```console +% terraform import aws_budgets_budget.myBudget 123456789012:myBudget +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/budgets_budget_action.html.markdown b/website/docs/cdktf/python/r/budgets_budget_action.html.markdown new file mode 100644 index 00000000000..1a08572c80c --- /dev/null +++ b/website/docs/cdktf/python/r/budgets_budget_action.html.markdown @@ -0,0 +1,187 @@ +--- +subcategory: "Web Services Budgets" +layout: "aws" +page_title: "AWS: aws_budgets_budget_action" +description: |- + Provides a budget action resource. +--- + + + +# Resource: aws_budgets_budget_action + +Provides a budget action resource. Budget actions are cost savings controls that run either automatically on your behalf or by using a workflow approval process. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.budgets_budget import BudgetsBudget +from imports.aws.budgets_budget_action import BudgetsBudgetAction +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_partition import DataAwsPartition +from imports.aws.iam_policy import IamPolicy +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = BudgetsBudget(self, "example", + budget_type="USAGE", + limit_amount="10.0", + limit_unit="dollars", + name="example", + time_period_start="2006-01-02_15:04", + time_unit="MONTHLY" + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_1", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:Describe*"], + effect="Allow", + resources=["*"] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + current = DataAwsPartition(self, "current") + aws_iam_policy_example = IamPolicy(self, "example_3", + description="My example policy", + name="example", + policy=Token.as_string(data_aws_iam_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_policy_example.override_logical_id("example") + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["budgets.${" + current.dns_suffix + "}"], + type="Service" + ) + ] + ) + ] + ) + aws_iam_role_example = IamRole(self, "example_5", + assume_role_policy=Token.as_string(assume_role.json), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_budgets_budget_action_example = BudgetsBudgetAction(self, "example_6", + action_threshold=BudgetsBudgetActionActionThreshold( + action_threshold_type="ABSOLUTE_VALUE", + action_threshold_value=100 + ), + action_type="APPLY_IAM_POLICY", + approval_model="AUTOMATIC", + budget_name=example.name, + definition=BudgetsBudgetActionDefinition( + iam_action_definition=BudgetsBudgetActionDefinitionIamActionDefinition( + policy_arn=Token.as_string(aws_iam_policy_example.arn), + roles=[Token.as_string(aws_iam_role_example.name)] + ) + ), + execution_role_arn=Token.as_string(aws_iam_role_example.arn), + notification_type="ACTUAL", + subscriber=[BudgetsBudgetActionSubscriber( + address="example@example.example", + subscription_type="EMAIL" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_budgets_budget_action_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Optional) The ID of the target account for budget. Will use current user's account_id by default if omitted. +* `budget_name` - (Required) The name of a budget. +* `action_threshold` - (Required) The trigger threshold of the action. See [Action Threshold](#action-threshold). +* `action_type` - (Required) The type of action. This defines the type of tasks that can be carried out by this action. This field also determines the format for definition. Valid values are `APPLY_IAM_POLICY`, `APPLY_SCP_POLICY`, and `RUN_SSM_DOCUMENTS`. +* `approval_model` - (Required) This specifies if the action needs manual or automatic approval. Valid values are `AUTOMATIC` and `MANUAL`. +* `definition` - (Required) Specifies all of the type-specific parameters. See [Definition](#definition). +* `execution_role_arn` - (Required) The role passed for action execution and reversion. Roles and actions must be in the same account. +* `notification_type` - (Required) The type of a notification. Valid values are `ACTUAL` or `FORECASTED`. +* `subscriber` - (Required) A list of subscribers. See [Subscriber](#subscriber). + +### Action Threshold + +* `action_threshold_type` - (Required) The type of threshold for a notification. Valid values are `PERCENTAGE` or `ABSOLUTE_VALUE`. +* `action_threshold_value` - (Required) The threshold of a notification. + +### Subscriber + +* `address` - (Required) The address that AWS sends budget notifications to, either an SNS topic or an email. +* `subscription_type` - (Required) The type of notification that AWS sends to a subscriber. Valid values are `SNS` or `EMAIL`. + +### Definition + +* `iam_action_definition` - (Optional) The AWS Identity and Access Management (IAM) action definition details. See [IAM Action Definition](#iam-action-definition). +* `ssm_action_definition` - (Optional) The AWS Systems Manager (SSM) action definition details. See [SSM Action Definition](#ssm-action-definition). +* `scp_action_definition` - (Optional) The service control policies (SCPs) action definition details. See [SCP Action Definition](#scp-action-definition). + +#### IAM Action Definition + +* `policy_arn` - (Required) The Amazon Resource Name (ARN) of the policy to be attached. +* `groups` - (Optional) A list of groups to be attached. There must be at least one group. +* `roles` - (Optional) A list of roles to be attached. There must be at least one role. +* `users` - (Optional) A list of users to be attached. There must be at least one user. + +#### SCP Action Definition + +* `policy_id` - (Required) The policy ID attached. +* `target_ids` - (Optional) A list of target IDs. + +#### SSM Action Definition + +* `action_sub_type` - (Required) The action subType. Valid values are `STOP_EC2_INSTANCES` or `STOP_RDS_INSTANCES`. +* `instance_ids` - (Required) The EC2 and RDS instance IDs. +* `region` - (Required) The Region to run the SSM document. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `action_id` - The id of the budget action. +* `id` - ID of resource. +* `arn` - The ARN of the budget action. +* `status` - The status of the budget action. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import budget actions using `AccountID:ActionID:BudgetName`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import budget actions using `AccountID:ActionID:BudgetName`. For example: + +```console +% terraform import aws_budgets_budget_action.myBudget 123456789012:some-id:myBudget +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ce_anomaly_monitor.html.markdown b/website/docs/cdktf/python/r/ce_anomaly_monitor.html.markdown new file mode 100644 index 00000000000..946abc343a1 --- /dev/null +++ b/website/docs/cdktf/python/r/ce_anomaly_monitor.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_anomaly_monitor" +description: |- + Provides a CE Cost Anomaly Monitor +--- + + + +# Resource: aws_ce_anomaly_monitor + +Provides a CE Anomaly Monitor. + +## Example Usage + +There are two main types of a Cost Anomaly Monitor: `DIMENSIONAL` and `CUSTOM`. + +### Dimensional Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ce_anomaly_monitor import CeAnomalyMonitor +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CeAnomalyMonitor(self, "service_monitor", + monitor_dimension="SERVICE", + monitor_type="DIMENSIONAL", + name="AWSServiceMonitor" + ) +``` + +### Custom Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ce_anomaly_monitor import CeAnomalyMonitor +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CeAnomalyMonitor(self, "test", + monitor_specification=Token.as_string( + Fn.jsonencode({ + "And": "null", + "CostCategories": "null", + "Dimensions": "null", + "Not": "null", + "Or": "null", + "Tags": { + "Key": "CostCenter", + "MatchOptions": "null", + "Values": ["10000"] + } + })), + monitor_type="CUSTOM", + name="AWSCustomAnomalyMonitor" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the monitor. +* `monitor_type` - (Required) The possible type values. Valid values: `DIMENSIONAL` | `CUSTOM`. +* `monitor_dimension` - (Required, if `monitor_type` is `DIMENSIONAL`) The dimensions to evaluate. Valid values: `SERVICE`. +* `monitor_specification` - (Required, if `monitor_type` is `CUSTOM`) A valid JSON representation for the [Expression](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html) object. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the anomaly monitor. +* `id` - Unique ID of the anomaly monitor. Same as `arn`. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ce_anomaly_monitor` using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_ce_anomaly_monitor` using the `id`. For example: + +```console +% terraform import aws_ce_anomaly_monitor.example costAnomalyMonitorARN +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ce_anomaly_subscription.html.markdown b/website/docs/cdktf/python/r/ce_anomaly_subscription.html.markdown new file mode 100644 index 00000000000..cee923150dd --- /dev/null +++ b/website/docs/cdktf/python/r/ce_anomaly_subscription.html.markdown @@ -0,0 +1,230 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_anomaly_subscription" +description: |- + Provides a CE Cost Anomaly Subscription +--- + + + +# Resource: aws_ce_anomaly_subscription + +Provides a CE Anomaly Subscription. + +## Example Usage + +### Basic Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ce_anomaly_monitor import CeAnomalyMonitor +from imports.aws.ce_anomaly_subscription import CeAnomalySubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = CeAnomalyMonitor(self, "test", + monitor_dimension="SERVICE", + monitor_type="DIMENSIONAL", + name="AWSServiceMonitor" + ) + aws_ce_anomaly_subscription_test = CeAnomalySubscription(self, "test_1", + frequency="DAILY", + monitor_arn_list=[test.arn], + name="DAILYSUBSCRIPTION", + subscriber=[CeAnomalySubscriptionSubscriber( + address="abc@example.com", + type="EMAIL" + ) + ], + threshold=100 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ce_anomaly_subscription_test.override_logical_id("test") +``` + +### Threshold Expression + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ce_anomaly_subscription import CeAnomalySubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CeAnomalySubscription(self, "test", + frequency="DAILY", + monitor_arn_list=[Token.as_string(aws_ce_anomaly_monitor_test.arn)], + name="AWSServiceMonitor", + subscriber=[CeAnomalySubscriptionSubscriber( + address="abc@example.com", + type="EMAIL" + ) + ], + threshold_expression=CeAnomalySubscriptionThresholdExpression( + dimension=CeAnomalySubscriptionThresholdExpressionDimension( + key="ANOMALY_TOTAL_IMPACT_ABSOLUTE", + match_options=["GREATER_THAN_OR_EQUAL"], + values=["100.0"] + ) + ) + ) +``` + +### SNS Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ce_anomaly_monitor import CeAnomalyMonitor +from imports.aws.ce_anomaly_subscription import CeAnomalySubscription +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.sns_topic import SnsTopic +from imports.aws.sns_topic_policy import SnsTopicPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + anomaly_monitor = CeAnomalyMonitor(self, "anomaly_monitor", + monitor_dimension="SERVICE", + monitor_type="DIMENSIONAL", + name="AWSServiceMonitor" + ) + cost_anomaly_updates = SnsTopic(self, "cost_anomaly_updates", + name="CostAnomalyUpdates" + ) + sns_topic_policy = DataAwsIamPolicyDocument(self, "sns_topic_policy", + policy_id="__default_policy_ID", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["SNS:Publish"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["costalerts.amazonaws.com"], + type="Service" + ) + ], + resources=[cost_anomaly_updates.arn], + sid="AWSAnomalyDetectionSNSPublishingPermissions" + ), DataAwsIamPolicyDocumentStatement( + actions=["SNS:Subscribe", "SNS:SetTopicAttributes", "SNS:RemovePermission", "SNS:Receive", "SNS:Publish", "SNS:ListSubscriptionsByTopic", "SNS:GetTopicAttributes", "SNS:DeleteTopic", "SNS:AddPermission" + ], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=[account_id.string_value], + variable="AWS:SourceOwner" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=[cost_anomaly_updates.arn], + sid="__default_statement_ID" + ) + ] + ) + default_var = SnsTopicPolicy(self, "default", + arn=cost_anomaly_updates.arn, + policy=Token.as_string(sns_topic_policy.json) + ) + CeAnomalySubscription(self, "realtime_subscription", + depends_on=[default_var], + frequency="IMMEDIATE", + monitor_arn_list=[anomaly_monitor.arn], + name="RealtimeAnomalySubscription", + subscriber=[CeAnomalySubscriptionSubscriber( + address=cost_anomaly_updates.arn, + type="SNS" + ) + ], + threshold=0 + ) +``` + +## Argument Reference + +The following arguments are required: + +* `account_id` - (Optional) The unique identifier for the AWS account in which the anomaly subscription ought to be created. +* `frequency` - (Required) The frequency that anomaly reports are sent. Valid Values: `DAILY` | `IMMEDIATE` | `WEEKLY`. +* `monitor_arn_list` - (Required) A list of cost anomaly monitors. +* `name` - (Required) The name for the subscription. +* `subscriber` - (Required) A subscriber configuration. Multiple subscribers can be defined. + * `type` - (Required) The type of subscription. Valid Values: `SNS` | `EMAIL`. + * `address` - (Required) The address of the subscriber. If type is `SNS`, this will be the arn of the sns topic. If type is `EMAIL`, this will be the destination email address. +* `threshold` - (Optional) The dollar value that triggers a notification if the threshold is exceeded. Depracated, use `threshold_expression` instead. +* `threshold_expression` - (Optional) An Expression object used to specify the anomalies that you want to generate alerts for. See [Threshold Expression](#threshold-expression). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Threshold Expression + +* `and` - (Optional) Return results that match both [Dimension](#dimension) objects. +* `cost_category` - (Optional) Configuration block for the filter that's based on values. See [Cost Category](#cost-category) below. +* `dimension` - (Optional) Configuration block for the specific [Dimension](#dimension) to use for. +* `not` - (Optional) Return results that match both [Dimension](#dimension) object. +* `or` - (Optional) Return results that match both [Dimension](#dimension) object. +* `tags` - (Optional) Configuration block for the specific Tag to use for. See [Tags](#tags) below. + +### Cost Category + +* `key` - (Optional) Unique name of the Cost Category. +* `match_options` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - (Optional) Specific value of the Cost Category. + +### Dimension + +* `key` - (Optional) Unique name of the Cost Category. +* `match_options` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - (Optional) Specific value of the Cost Category. + +### Tags + +* `key` - (Optional) Key for the tag. +* `match_options` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - (Optional) Specific value of the Cost Category. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the anomaly subscription. +* `id` - Unique ID of the anomaly subscription. Same as `arn`. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ce_anomaly_subscription` using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_ce_anomaly_subscription` using the `id`. For example: + +```console +% terraform import aws_ce_anomaly_subscription.example AnomalySubscriptionARN +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ce_cost_allocation_tag.html.markdown b/website/docs/cdktf/python/r/ce_cost_allocation_tag.html.markdown new file mode 100644 index 00000000000..d4873139d7e --- /dev/null +++ b/website/docs/cdktf/python/r/ce_cost_allocation_tag.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_cost_allocation_tag" +description: |- + Provides a CE Cost Allocation Tag +--- + + + +# Resource: aws_ce_cost_allocation_tag + +Provides a CE Cost Allocation Tag. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ce_cost_allocation_tag import CeCostAllocationTag +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CeCostAllocationTag(self, "example", + status="Active", + tag_key="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `tag_key` - (Required) The key for the cost allocation tag. +* `status` - (Required) The status of a cost allocation tag. Valid values are `Active` and `Inactive`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The key for the cost allocation tag. +* `type` - The type of cost allocation tag. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ce_cost_allocation_tag` using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_ce_cost_allocation_tag` using the `id`. For example: + +```console +% terraform import aws_ce_cost_allocation_tag.example key +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ce_cost_category.html.markdown b/website/docs/cdktf/python/r/ce_cost_category.html.markdown new file mode 100644 index 00000000000..cfd94e72245 --- /dev/null +++ b/website/docs/cdktf/python/r/ce_cost_category.html.markdown @@ -0,0 +1,158 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_cost_category" +description: |- + Provides a CE Cost Category Definition +--- + + + +# Resource: aws_ce_cost_category + +Provides a CE Cost Category. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ce_cost_category import CeCostCategory +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CeCostCategory(self, "test", + name="NAME", + rule=[CeCostCategoryRule( + rule=CeCostCategoryRuleRule( + dimension=CeCostCategoryRuleRuleDimension( + key="LINKED_ACCOUNT_NAME", + match_options=["ENDS_WITH"], + values=["-prod"] + ) + ), + value="production" + ), CeCostCategoryRule( + rule=CeCostCategoryRuleRule( + dimension=CeCostCategoryRuleRuleDimension( + key="LINKED_ACCOUNT_NAME", + match_options=["ENDS_WITH"], + values=["-stg"] + ) + ), + value="staging" + ), CeCostCategoryRule( + rule=CeCostCategoryRuleRule( + dimension=CeCostCategoryRuleRuleDimension( + key="LINKED_ACCOUNT_NAME", + match_options=["ENDS_WITH"], + values=["-dev"] + ) + ), + value="testing" + ) + ], + rule_version="CostCategoryExpression.v1" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Unique name for the Cost Category. +* `rule` - (Required) Configuration block for the Cost Category rules used to categorize costs. See below. +* `rule_version` - (Required) Rule schema version in this particular Cost Category. +* `effective_start`- (Optional) The Cost Category's effective start date. It can only be a billing start date (first day of the month). If the date isn't provided, it's the first day of the current month. Dates can't be before the previous twelve months, or in the future. For example `2022-11-01T00:00:00Z`. + +The following arguments are optional: + +* `default_value` - (Optional) Default value for the cost category. +* `split_charge_rule` - (Optional) Configuration block for the split charge rules used to allocate your charges between your Cost Category values. See below. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `rule` + +* `inherited_value` - (Optional) Configuration block for the value the line item is categorized as if the line item contains the matched dimension. See below. +* `rule` - (Optional) Configuration block for the `Expression` object used to categorize costs. See below. +* `type` - (Optional) You can define the CostCategoryRule rule type as either `REGULAR` or `INHERITED_VALUE`. +* `value` - (Optional) Default value for the cost category. + +### `inherited_value` + +* `dimension_key` - (Optional) Key to extract cost category values. +* `dimension_name` - (Optional) Name of the dimension that's used to group costs. If you specify `LINKED_ACCOUNT_NAME`, the cost category value is based on account name. If you specify `TAG`, the cost category value will be based on the value of the specified tag key. Valid values are `LINKED_ACCOUNT_NAME`, `TAG` + +### `rule` + +* `and` - (Optional) Return results that match both `Dimension` objects. +* `cost_category` - (Optional) Configuration block for the filter that's based on `CostCategory` values. See below. +* `dimension` - (Optional) Configuration block for the specific `Dimension` to use for `Expression`. See below. +* `not` - (Optional) Return results that match both `Dimension` object. +* `or` - (Optional) Return results that match both `Dimension` object. +* `tags` - (Optional) Configuration block for the specific `Tag` to use for `Expression`. See below. + +### `cost_category` + +* `key` - (Optional) Unique name of the Cost Category. +* `match_options` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - (Optional) Specific value of the Cost Category. + +### `dimension` + +* `key` - (Optional) Unique name of the Cost Category. +* `match_options` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - (Optional) Specific value of the Cost Category. + +### `tags` + +* `key` - (Optional) Key for the tag. +* `match_options` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `EQUALS` and `CASE_SENSITIVE`. Valid values are: `EQUALS`, `ABSENT`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CASE_SENSITIVE`, `CASE_INSENSITIVE`. +* `values` - (Optional) Specific value of the Cost Category. + +### `split_charge_rule` + +* `method` - (Required) Method that's used to define how to split your source costs across your targets. Valid values are `FIXED`, `PROPORTIONAL`, `EVEN` +* `parameter` - (Optional) Configuration block for the parameters for a split charge method. This is only required for the `FIXED` method. See below. +* `source` - (Required) Cost Category value that you want to split. +* `targets` - (Required) Cost Category values that you want to split costs across. These values can't be used as a source in other split charge rules. + +### `parameter` + +* `type` - (Optional) Parameter type. +* `values` - (Optional) Parameter values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cost category. +* `effective_end` - Effective end data of your Cost Category. +* `id` - Unique ID of the cost category. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ce_cost_category` using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_ce_cost_category` using the id. For example: + +```console +% terraform import aws_ce_cost_category.example costCategoryARN +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chime_voice_connector.html.markdown b/website/docs/cdktf/python/r/chime_voice_connector.html.markdown new file mode 100644 index 00000000000..aebe598c68a --- /dev/null +++ b/website/docs/cdktf/python/r/chime_voice_connector.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector" +description: |- + Enables you to connect your phone system to the telephone network at a substantial cost savings by using SIP trunking. +--- + + + +# Resource: aws_chime_voice_connector + +Enables you to connect your phone system to the telephone network at a substantial cost savings by using SIP trunking. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chime_voice_connector import ChimeVoiceConnector +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ChimeVoiceConnector(self, "test", + aws_region="us-east-1", + name="connector-test-1", + require_encryption=True + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the Amazon Chime Voice Connector. +* `require_encryption` - (Required) When enabled, requires encryption for the Amazon Chime Voice Connector. + +The following arguments are optional: + +* `aws_region` - (Optional) The AWS Region in which the Amazon Chime Voice Connector is created. Default value: `us-east-1` +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN (Amazon Resource Name) of the Amazon Chime Voice Connector. +* `outbound_host_name` - The outbound host name for the Amazon Chime Voice Connector. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Configuration Recorder using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Configuration Recorder using the name. For example: + +```console +% terraform import aws_chime_voice_connector.test example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chime_voice_connector_group.html.markdown b/website/docs/cdktf/python/r/chime_voice_connector_group.html.markdown new file mode 100644 index 00000000000..b5d408c5e3a --- /dev/null +++ b/website/docs/cdktf/python/r/chime_voice_connector_group.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_group" +description: |- + Creates an Amazon Chime Voice Connector group under the administrator's AWS account. +--- + + + +# Resource: aws_chime_voice_connector_group + +Creates an Amazon Chime Voice Connector group under the administrator's AWS account. You can associate Amazon Chime Voice Connectors with the Amazon Chime Voice Connector group by including VoiceConnectorItems in the request. + +You can include Amazon Chime Voice Connectors from different AWS Regions in your group. This creates a fault tolerant mechanism for fallback in case of availability events. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chime_voice_connector import ChimeVoiceConnector +from imports.aws.chime_voice_connector_group import ChimeVoiceConnectorGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + vc1 = ChimeVoiceConnector(self, "vc1", + aws_region="us-east-1", + name="connector-test-1", + require_encryption=True + ) + vc2 = ChimeVoiceConnector(self, "vc2", + aws_region="us-west-2", + name="connector-test-2", + require_encryption=True + ) + ChimeVoiceConnectorGroup(self, "group", + connector=[ChimeVoiceConnectorGroupConnector( + priority=1, + voice_connector_id=vc1.id + ), ChimeVoiceConnectorGroupConnector( + priority=3, + voice_connector_id=vc2.id + ) + ], + name="test-group" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Amazon Chime Voice Connector group. +* `connector` - (Optional) The Amazon Chime Voice Connectors to route inbound calls to. + +### `connector` + +For Amazon Chime Voice Connector groups, the Amazon Chime Voice Connectors to which to route inbound calls. Includes priority configuration settings. Limit: 3 VoiceConnectorItems per Amazon Chime Voice Connector group. + +* `voice_connector_id` - (Required) The Amazon Chime Voice Connector ID. +* `priority` - (Required) The priority associated with the Amazon Chime Voice Connector, with 1 being the highest priority. Higher priority Amazon Chime Voice Connectors are attempted first. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Chime Voice Connector group ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Configuration Recorder using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Configuration Recorder using the name. For example: + +```console +% terraform import aws_chime_voice_connector_group.default example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chime_voice_connector_logging.html.markdown b/website/docs/cdktf/python/r/chime_voice_connector_logging.html.markdown new file mode 100644 index 00000000000..6bd18fa09a9 --- /dev/null +++ b/website/docs/cdktf/python/r/chime_voice_connector_logging.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_logging" +description: |- + Adds a logging configuration for the specified Amazon Chime Voice Connector. The logging configuration specifies whether SIP message logs are enabled for sending to Amazon CloudWatch Logs. +--- + + + +# Resource: aws_chime_voice_connector_logging + +Adds a logging configuration for the specified Amazon Chime Voice Connector. The logging configuration specifies whether SIP message logs are enabled for sending to Amazon CloudWatch Logs. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chime_voice_connector import ChimeVoiceConnector +from imports.aws.chime_voice_connector_logging import ChimeVoiceConnectorLogging +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = ChimeVoiceConnector(self, "default", + name="vc-name-test", + require_encryption=True + ) + aws_chime_voice_connector_logging_default = ChimeVoiceConnectorLogging(self, "default_1", + enable_media_metric_logs=True, + enable_sip_logs=True, + voice_connector_id=default_var.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_chime_voice_connector_logging_default.override_logical_id("default") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voice_connector_id` - (Required) The Amazon Chime Voice Connector ID. +* `enable_sip_logs` - (Optional) When true, enables SIP message logs for sending to Amazon CloudWatch Logs. +* `enable_media_metric_logs` - (Optional) When true, enables logging of detailed media metrics for Voice Connectors to Amazon CloudWatch logs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Chime Voice Connector ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime Voice Connector Logging using the `voice_connector_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Chime Voice Connector Logging using the `voice_connector_id`. For example: + +```console +% terraform import aws_chime_voice_connector_logging.default abcdef1ghij2klmno3pqr4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chime_voice_connector_origination.html.markdown b/website/docs/cdktf/python/r/chime_voice_connector_origination.html.markdown new file mode 100644 index 00000000000..6b7a50c3b53 --- /dev/null +++ b/website/docs/cdktf/python/r/chime_voice_connector_origination.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_origination" +description: |- + Enable origination settings to control inbound calling to your SIP infrastructure. +--- + + + +# Resource: aws_chime_voice_connector_origination + +Enable origination settings to control inbound calling to your SIP infrastructure. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chime_voice_connector import ChimeVoiceConnector +from imports.aws.chime_voice_connector_origination import ChimeVoiceConnectorOrigination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = ChimeVoiceConnector(self, "default", + name="test", + require_encryption=True + ) + aws_chime_voice_connector_origination_default = + ChimeVoiceConnectorOrigination(self, "default_1", + disabled=False, + route=[ChimeVoiceConnectorOriginationRoute( + host="127.0.0.1", + port=8081, + priority=1, + protocol="TCP", + weight=1 + ), ChimeVoiceConnectorOriginationRoute( + host="127.0.0.2", + port=8082, + priority=2, + protocol="TCP", + weight=10 + ) + ], + voice_connector_id=default_var.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_chime_voice_connector_origination_default.override_logical_id("default") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voice_connector_id` - (Required) The Amazon Chime Voice Connector ID. +* `route` - (Required) Set of call distribution properties defined for your SIP hosts. See [route](#route) below for more details. Minimum of 1. Maximum of 20. +* `disabled` - (Optional) When origination settings are disabled, inbound calls are not enabled for your Amazon Chime Voice Connector. + +### `route` + +Origination routes define call distribution properties for your SIP hosts to receive inbound calls using your Amazon Chime Voice Connector. Limit: Ten origination routes for each Amazon Chime Voice Connector. + +* `host` - (Required) The FQDN or IP address to contact for origination traffic. +* `port` - (Required) The designated origination route port. Defaults to `5060`. +* `priority` - (Required) The priority associated with the host, with 1 being the highest priority. Higher priority hosts are attempted first. +* `protocol` - (Required) The protocol to use for the origination route. Encryption-enabled Amazon Chime Voice Connectors use TCP protocol by default. +* `weight` - (Required) The weight associated with the host. If hosts are equal in priority, calls are redistributed among them based on their relative weight. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Chime Voice Connector ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime Voice Connector Origination using the `voice_connector_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Chime Voice Connector Origination using the `voice_connector_id`. For example: + +```console +% terraform import aws_chime_voice_connector_origination.default abcdef1ghij2klmno3pqr4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chime_voice_connector_streaming.html.markdown b/website/docs/cdktf/python/r/chime_voice_connector_streaming.html.markdown new file mode 100644 index 00000000000..0b034d54d36 --- /dev/null +++ b/website/docs/cdktf/python/r/chime_voice_connector_streaming.html.markdown @@ -0,0 +1,165 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_streaming" +description: |- + The streaming configuration associated with an Amazon Chime Voice Connector. Specifies whether media streaming is enabled for sending to Amazon Kinesis, and shows the retention period for the Amazon Kinesis data, in hours. +--- + + + +# Resource: aws_chime_voice_connector_streaming + +Adds a streaming configuration for the specified Amazon Chime Voice Connector. The streaming configuration specifies whether media streaming is enabled for sending to Amazon Kinesis. +It also sets the retention period, in hours, for the Amazon Kinesis data. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chime_voice_connector import ChimeVoiceConnector +from imports.aws.chime_voice_connector_streaming import ChimeVoiceConnectorStreaming +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = ChimeVoiceConnector(self, "default", + name="vc-name-test", + require_encryption=True + ) + aws_chime_voice_connector_streaming_default = + ChimeVoiceConnectorStreaming(self, "default_1", + data_retention=7, + disabled=False, + streaming_notification_targets=["SQS"], + voice_connector_id=default_var.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_chime_voice_connector_streaming_default.override_logical_id("default") +``` + +### Example Usage With Media Insights + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chime_voice_connector import ChimeVoiceConnector +from imports.aws.chime_voice_connector_streaming import ChimeVoiceConnectorStreaming +from imports.aws.chimesdkmediapipelines_media_insights_pipeline_configuration import ChimesdkmediapipelinesMediaInsightsPipelineConfiguration +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.kinesis_stream import KinesisStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = ChimeVoiceConnector(self, "default", + name="vc-name-test", + require_encryption=True + ) + example = KinesisStream(self, "example", + name="ExampleStream", + shard_count=2 + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["mediapipelines.chime.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + aws_iam_role_example = IamRole(self, "example_3", + assume_role_policy=Token.as_string(assume_role.json), + name="ExampleResourceAccessRole" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_chimesdkmediapipelines_media_insights_pipeline_configuration_example = + ChimesdkmediapipelinesMediaInsightsPipelineConfiguration(self, "example_4", + elements=[ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + amazon_transcribe_call_analytics_processor_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsAmazonTranscribeCallAnalyticsProcessorConfiguration( + language_code="en-US" + ), + type="AmazonTranscribeCallAnalyticsProcessor" + ), ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + kinesis_data_stream_sink_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsKinesisDataStreamSinkConfiguration( + insights_target=example.arn + ), + type="KinesisDataStreamSink" + ) + ], + name="ExampleConfig", + resource_access_role_arn=Token.as_string(aws_iam_role_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_chimesdkmediapipelines_media_insights_pipeline_configuration_example.override_logical_id("example") + aws_chime_voice_connector_streaming_default = + ChimeVoiceConnectorStreaming(self, "default_5", + data_retention=7, + disabled=False, + media_insights_configuration=ChimeVoiceConnectorStreamingMediaInsightsConfiguration( + configuration_arn=Token.as_string(aws_chimesdkmediapipelines_media_insights_pipeline_configuration_example.arn), + disabled=False + ), + streaming_notification_targets=["SQS"], + voice_connector_id=default_var.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_chime_voice_connector_streaming_default.override_logical_id("default") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voice_connector_id` - (Required) The Amazon Chime Voice Connector ID. +* `data_retention` - (Required) The retention period, in hours, for the Amazon Kinesis data. +* `disabled` - (Optional) When true, media streaming to Amazon Kinesis is turned off. Default: `false` +* `streaming_notification_targets` - (Optional) The streaming notification targets. Valid Values: `EventBridge | SNS | SQS` +* `media_insights_configuration` - (Optional) The media insights configuration. See [`media_insights_configuration`](#media_insights_configuration). + +### media_insights_configuration + +* `disabled` - (Optional) When `true`, the media insights configuration is not enabled. Defaults to `false`. +* `configuration_arn` - (Optional) The media insights configuration that will be invoked by the Voice Connector. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Chime Voice Connector ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime Voice Connector Streaming using the `voice_connector_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Chime Voice Connector Streaming using the `voice_connector_id`. For example: + +```console +% terraform import aws_chime_voice_connector_streaming.default abcdef1ghij2klmno3pqr4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chime_voice_connector_termination.html.markdown b/website/docs/cdktf/python/r/chime_voice_connector_termination.html.markdown new file mode 100644 index 00000000000..50a5b3be7c9 --- /dev/null +++ b/website/docs/cdktf/python/r/chime_voice_connector_termination.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_termination" +description: |- + Enable Termination settings to control outbound calling from your SIP infrastructure. +--- + + + +# Resource: aws_chime_voice_connector_termination + +Enable Termination settings to control outbound calling from your SIP infrastructure. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chime_voice_connector import ChimeVoiceConnector +from imports.aws.chime_voice_connector_termination import ChimeVoiceConnectorTermination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = ChimeVoiceConnector(self, "default", + name="vc-name-test", + require_encryption=True + ) + aws_chime_voice_connector_termination_default = + ChimeVoiceConnectorTermination(self, "default_1", + calling_regions=["US", "CA"], + cidr_allow_list=["50.35.78.96/31"], + cps_limit=1, + disabled=False, + voice_connector_id=default_var.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_chime_voice_connector_termination_default.override_logical_id("default") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voice_connector_id` - (Required) The Amazon Chime Voice Connector ID. +* `cidr_allow_list` - (Required) The IP addresses allowed to make calls, in CIDR format. +* `calling_regions` - (Required) The countries to which calls are allowed, in ISO 3166-1 alpha-2 format. +* `disabled` - (Optional) When termination settings are disabled, outbound calls can not be made. +* `default_phone_number` - (Optional) The default caller ID phone number. +* `cps_limit` - (Optional) The limit on calls per second. Max value based on account service quota. Default value of `1`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Chime Voice Connector ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime Voice Connector Termination using the `voice_connector_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Chime Voice Connector Termination using the `voice_connector_id`. For example: + +```console +% terraform import aws_chime_voice_connector_termination.default abcdef1ghij2klmno3pqr4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chime_voice_connector_termination_credentials.html.markdown b/website/docs/cdktf/python/r/chime_voice_connector_termination_credentials.html.markdown new file mode 100644 index 00000000000..b1b02f54000 --- /dev/null +++ b/website/docs/cdktf/python/r/chime_voice_connector_termination_credentials.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_termination_credentials" +description: |- + Adds termination SIP credentials for the specified Amazon Chime Voice Connector. +--- + + + +# Resource: aws_chime_voice_connector_termination_credentials + +Adds termination SIP credentials for the specified Amazon Chime Voice Connector. + +~> **Note:** Voice Connector Termination Credentials requires a [Voice Connector Termination](/docs/providers/aws/r/chime_voice_connector_termination.html) to be present. Use of `depends_on` (as shown below) is recommended to avoid race conditions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chime_voice_connector import ChimeVoiceConnector +from imports.aws.chime_voice_connector_termination import ChimeVoiceConnectorTermination +from imports.aws.chime_voice_connector_termination_credentials import ChimeVoiceConnectorTerminationCredentials +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = ChimeVoiceConnector(self, "default", + name="test", + require_encryption=True + ) + aws_chime_voice_connector_termination_default = + ChimeVoiceConnectorTermination(self, "default_1", + calling_regions=["US", "CA"], + cidr_allow_list=["50.35.78.96/31"], + cps_limit=1, + disabled=True, + voice_connector_id=default_var.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_chime_voice_connector_termination_default.override_logical_id("default") + aws_chime_voice_connector_termination_credentials_default = + ChimeVoiceConnectorTerminationCredentials(self, "default_2", + credentials=[ChimeVoiceConnectorTerminationCredentialsCredentials( + password="test!", + username="test" + ) + ], + depends_on=[aws_chime_voice_connector_termination_default], + voice_connector_id=default_var.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_chime_voice_connector_termination_credentials_default.override_logical_id("default") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voice_connector_id` - (Required) Amazon Chime Voice Connector ID. +* `credentials` - (Required) List of termination SIP credentials. + +### `credentials` + +The SIP credentials used to authenticate requests to your Amazon Chime Voice Connector. + +* `username` - (Required) RFC2617 compliant username associated with the SIP credentials. +* `password` - (Required) RFC2617 compliant password associated with the SIP credentials. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Chime Voice Connector ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime Voice Connector Termination Credentials using the `voice_connector_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Chime Voice Connector Termination Credentials using the `voice_connector_id`. For example: + +```console +% terraform import aws_chime_voice_connector_termination_credentials.default abcdef1ghij2klmno3pqr4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chimesdkmediapipelines_media_insights_pipeline_configuration.html.markdown b/website/docs/cdktf/python/r/chimesdkmediapipelines_media_insights_pipeline_configuration.html.markdown new file mode 100644 index 00000000000..7aba26625fc --- /dev/null +++ b/website/docs/cdktf/python/r/chimesdkmediapipelines_media_insights_pipeline_configuration.html.markdown @@ -0,0 +1,426 @@ +--- +subcategory: "Chime SDK Media Pipelines" +layout: "aws" +page_title: "AWS: aws_chimesdkmediapipelines_media_insights_pipeline_configuration" +description: |- + Terraform resource for managing an AWS Chime SDK Media Pipelines Media Insights Pipeline Configuration. +--- + + + +# Resource: aws_chimesdkmediapipelines_media_insights_pipeline_configuration + +Terraform resource for managing an AWS Chime SDK Media Pipelines Media Insights Pipeline Configuration. +Consult the [Call analytics developer guide](https://docs.aws.amazon.com/chime-sdk/latest/dg/call-analytics.html) for more detailed information about usage. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chimesdkmediapipelines_media_insights_pipeline_configuration import ChimesdkmediapipelinesMediaInsightsPipelineConfiguration +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.kinesis_stream import KinesisStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = KinesisStream(self, "example", + name="example", + shard_count=2 + ) + media_pipelines_assume_role = DataAwsIamPolicyDocument(self, "media_pipelines_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["mediapipelines.chime.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + call_analytics_role = IamRole(self, "call_analytics_role", + assume_role_policy=Token.as_string(media_pipelines_assume_role.json), + name="CallAnalyticsRole" + ) + ChimesdkmediapipelinesMediaInsightsPipelineConfiguration(self, "my_configuration", + elements=[ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + amazon_transcribe_call_analytics_processor_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsAmazonTranscribeCallAnalyticsProcessorConfiguration( + language_code="en-US" + ), + type="AmazonTranscribeCallAnalyticsProcessor" + ), ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + kinesis_data_stream_sink_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsKinesisDataStreamSinkConfiguration( + insights_target=example.arn + ), + type="KinesisDataStreamSink" + ) + ], + name="MyBasicConfiguration", + resource_access_role_arn=call_analytics_role.arn, + tags={ + "Key1": "Value1", + "Key2": "Value2" + } + ) +``` + +- The required policies on `call_analytics_role` will vary based on the selected processors. See [Call analytics resource access role](https://docs.aws.amazon.com/chime-sdk/latest/dg/ca-resource-access-role.html) for directions on choosing appropriate policies. + +### Transcribe Call Analytics processor usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chimesdkmediapipelines_media_insights_pipeline_configuration import ChimesdkmediapipelinesMediaInsightsPipelineConfiguration +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + transcribe_assume_role = DataAwsIamPolicyDocument(self, "transcribe_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["transcribe.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + post_call_role = IamRole(self, "post_call_role", + assume_role_policy=Token.as_string(transcribe_assume_role.json), + name="PostCallAccessRole" + ) + ChimesdkmediapipelinesMediaInsightsPipelineConfiguration(self, "my_configuration", + elements=[ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + amazon_transcribe_call_analytics_processor_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsAmazonTranscribeCallAnalyticsProcessorConfiguration( + call_analytics_stream_categories=["category_1", "category_2"], + content_redaction_type="PII", + enable_partial_results_stabilization=True, + filter_partial_results=True, + language_code="en-US", + language_model_name="MyLanguageModel", + partial_results_stability="high", + pii_entity_types="ADDRESS,BANK_ACCOUNT_NUMBER", + post_call_analytics_settings=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsAmazonTranscribeCallAnalyticsProcessorConfigurationPostCallAnalyticsSettings( + content_redaction_output="redacted", + data_access_role_arn=post_call_role.arn, + output_encryption_kms_key_id="MyKmsKeyId", + output_location="s3://MyBucket" + ), + vocabulary_filter_method="mask", + vocabulary_filter_name="MyVocabularyFilter", + vocabulary_name="MyVocabulary" + ), + type="AmazonTranscribeCallAnalyticsProcessor" + ), ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + kinesis_data_stream_sink_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsKinesisDataStreamSinkConfiguration( + insights_target=example.arn + ), + type="KinesisDataStreamSink" + ) + ], + name="MyCallAnalyticsConfiguration", + resource_access_role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### Real time alerts usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chimesdkmediapipelines_media_insights_pipeline_configuration import ChimesdkmediapipelinesMediaInsightsPipelineConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ChimesdkmediapipelinesMediaInsightsPipelineConfiguration(self, "my_configuration", + elements=[ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + amazon_transcribe_call_analytics_processor_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsAmazonTranscribeCallAnalyticsProcessorConfiguration( + language_code="en-US" + ), + type="AmazonTranscribeCallAnalyticsProcessor" + ), ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + kinesis_data_stream_sink_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsKinesisDataStreamSinkConfiguration( + insights_target=example.arn + ), + type="KinesisDataStreamSink" + ) + ], + name="MyRealTimeAlertConfiguration", + real_time_alert_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationRealTimeAlertConfiguration( + disabled=False, + rules=[ChimesdkmediapipelinesMediaInsightsPipelineConfigurationRealTimeAlertConfigurationRules( + issue_detection_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationRealTimeAlertConfigurationRulesIssueDetectionConfiguration( + rule_name="MyIssueDetectionRule" + ), + type="IssueDetection" + ), ChimesdkmediapipelinesMediaInsightsPipelineConfigurationRealTimeAlertConfigurationRules( + keyword_match_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationRealTimeAlertConfigurationRulesKeywordMatchConfiguration( + keywords=["keyword1", "keyword2"], + negate=False, + rule_name="MyKeywordMatchRule" + ), + type="KeywordMatch" + ), ChimesdkmediapipelinesMediaInsightsPipelineConfigurationRealTimeAlertConfigurationRules( + sentiment_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationRealTimeAlertConfigurationRulesSentimentConfiguration( + rule_name="MySentimentRule", + sentiment_type="NEGATIVE", + time_period=60 + ), + type="Sentiment" + ) + ] + ), + resource_access_role_arn=call_analytics_role.arn + ) +``` + +### Transcribe processor usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chimesdkmediapipelines_media_insights_pipeline_configuration import ChimesdkmediapipelinesMediaInsightsPipelineConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ChimesdkmediapipelinesMediaInsightsPipelineConfiguration(self, "my_configuration", + elements=[ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + amazon_transcribe_processor_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsAmazonTranscribeProcessorConfiguration( + content_identification_type="PII", + enable_partial_results_stabilization=True, + filter_partial_results=True, + language_code="en-US", + language_model_name="MyLanguageModel", + partial_results_stability="high", + pii_entity_types="ADDRESS,BANK_ACCOUNT_NUMBER", + show_speaker_label=True, + vocabulary_filter_method="mask", + vocabulary_filter_name="MyVocabularyFilter", + vocabulary_name="MyVocabulary" + ), + type="AmazonTranscribeProcessor" + ), ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + kinesis_data_stream_sink_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsKinesisDataStreamSinkConfiguration( + insights_target=example.arn + ), + type="KinesisDataStreamSink" + ) + ], + name="MyTranscribeConfiguration", + resource_access_role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### Voice analytics processor usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chimesdkmediapipelines_media_insights_pipeline_configuration import ChimesdkmediapipelinesMediaInsightsPipelineConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ChimesdkmediapipelinesMediaInsightsPipelineConfiguration(self, "my_configuration", + elements=[ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + type="VoiceAnalyticsProcessor", + voice_analytics_processor_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsVoiceAnalyticsProcessorConfiguration( + speaker_search_status="Enabled", + voice_tone_analysis_status="Enabled" + ) + ), ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + lambda_function_sink_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsLambdaFunctionSinkConfiguration( + insights_target="arn:aws:lambda:us-west-2:1111111111:function:MyFunction" + ), + type="LambdaFunctionSink" + ), ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + sns_topic_sink_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsSnsTopicSinkConfiguration( + insights_target="arn:aws:sns:us-west-2:1111111111:topic/MyTopic" + ), + type="SnsTopicSink" + ), ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + sqs_queue_sink_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsSqsQueueSinkConfiguration( + insights_target="arn:aws:sqs:us-west-2:1111111111:queue/MyQueue" + ), + type="SqsQueueSink" + ), ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + kinesis_data_stream_sink_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsKinesisDataStreamSinkConfiguration( + insights_target=test.arn + ), + type="KinesisDataStreamSink" + ) + ], + name="MyVoiceAnalyticsConfiguration", + resource_access_role_arn=example.arn + ) +``` + +### S3 Recording sink usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chimesdkmediapipelines_media_insights_pipeline_configuration import ChimesdkmediapipelinesMediaInsightsPipelineConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ChimesdkmediapipelinesMediaInsightsPipelineConfiguration(self, "my_configuration", + elements=[ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElements( + s3_recording_sink_configuration=ChimesdkmediapipelinesMediaInsightsPipelineConfigurationElementsS3RecordingSinkConfiguration( + destination="arn:aws:s3:::MyBucket" + ), + type="S3RecordingSink" + ) + ], + name="MyS3RecordingConfiguration", + resource_access_role_arn=example.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Configuration name. +* `resource_access_role_arn` - (Required) ARN of IAM Role used by service to invoke processors and sinks specified by configuration elements. +* `elements` - (Required) Collection of processors and sinks to transform media and deliver data. +* `real_time_alert_configuration` - (Optional) Configuration for real-time alert rules to send EventBridge notifications when certain conditions are met. +* `tags` - (Optional) Key-value map of tags for the resource. + +### Elements + +* `type` - (Required) Element type. +* `amazon_transcribe_call_analytics_processor_configuration` - (Optional) Configuration for Amazon Transcribe Call Analytics processor. + * `call_analytics_stream_categories` - (Optional) Filter for category events to be delivered to insights target. + * `content_identification_type` - (Optional) Labels all personally identifiable information (PII) identified in Utterance events. + * `content_redaction_type` - (Optional) Redacts all personally identifiable information (PII) identified in Utterance events. + * `enable_partial_results_stabilization` - (Optional) Enables partial result stabilization in Utterance events. + * `filter_partial_results` - (Optional) Filters partial Utterance events from delivery to the insights target. + * `language_code` - (Required) Language code for the transcription model. + * `language_model_name` - (Optional) Name of custom language model for transcription. + * `partial_results_stability` - (Optional) Level of stability to use when partial results stabilization is enabled. + * `pii_entity_types` - (Optional) Types of personally identifiable information (PII) to redact from an Utterance event. + * `post_call_analytics_settings` - (Optional) Settings for post call analytics. + * `content_redaction_output` - (Optional) Should output be redacted. + * `data_access_role_arn` - (Required) ARN of the role used by AWS Transcribe to upload your post call analysis. + * `output_encryption_kms_key_id` - (Optional) ID of the KMS key used to encrypt the output. + * `output_location` - (Required) The Amazon S3 location where you want your Call Analytics post-call transcription output stored. + * `vocabulary_filter_method` - (Optional) Method for applying a vocabulary filter to Utterance events. + * `vocabulary_filter_name` - (Optional) Name of the custom vocabulary filter to use when processing Utterance events. + * `vocabulary_name` - (Optional) Name of the custom vocabulary to use when processing Utterance events. +* `amazon_transcribe_processor_configuration` - (Optional) Configuration for Amazon Transcribe processor. + * `content_identification_type` - (Optional) Labels all personally identifiable information (PII) identified in Transcript events. + * `content_redaction_type` - (Optional) Redacts all personally identifiable information (PII) identified in Transcript events. + * `enable_partial_results_stabilization` - (Optional) Enables partial result stabilization in Transcript events. + * `filter_partial_results` - (Optional) Filters partial Utterance events from delivery to the insights target. + * `language_code` - (Required) Language code for the transcription model. + * `language_model_name` - (Optional) Name of custom language model for transcription. + * `partial_results_stability` - (Optional) Level of stability to use when partial results stabilization is enabled. + * `pii_entity_types` - (Optional) Types of personally identifiable information (PII) to redact from a Transcript event. + * `show_speaker_label` - (Optional) Enables speaker partitioning (diarization) in your Transcript events. + * `vocabulary_filter_method` - (Optional) Method for applying a vocabulary filter to Transcript events. + * `vocabulary_filter_name` - (Optional) Name of the custom vocabulary filter to use when processing Transcript events. + * `vocabulary_name` - (Optional) Name of the custom vocabulary to use when processing Transcript events. +* `kinesis_data_stream_sink_configuration` - (Optional) Configuration for Kinesis Data Stream sink. + * `insights_target` - (Required) Kinesis Data Stream to deliver results. +* `lambda_function_sink_configuration` - (Optional) Configuration for Lambda Function sink. + * `insights_target` - (Required) Lambda Function to deliver results. +* `sns_topic_sink_configuration` - (Optional) Configuration for SNS Topic sink. + * `insights_target` - (Required) SNS topic to deliver results. +* `sqs_queue_sink_configuration` - (Optional) Configuration for SQS Queue sink. + * `insights_target` - (Required) SQS queue to deliver results. +* `s3_recording_sink_configuration` - (Optional) Configuration for S3 recording sink. + * `destination` - (Required) S3 URI to deliver recordings. +* `voice_analytics_processor_configuration` - (Optional) Configuration for Voice analytics processor. + * `speaker_search_status` - (Required) Enable speaker search. + * `voice_tone_analysis_status` - (Required) Enable voice tone analysis. + +### Real time alert configuration + +* `rules` - (Required) Collection of real time alert rules + * `type` - (Required) Rule type. + * `issue_detection_configuration` - (Optional) Configuration for an issue detection rule. + * `rule_name` - (Required) Rule name. + * `keyword_match_configuration` - (Optional) Configuration for a keyword match rule. + * `rule_name` - (Required) Rule name. + * `keywords` - (Required) Collection of keywords to match. + * `negate` - (Optional) Negate the rule. + * `sentiment_configuration` - (Optional) Configuration for a sentiment rule. + * `rule_name` - (Required) Rule name. + * `sentiment_type` - (Required) Sentiment type to match. + * `time_period` - (Optional) Analysis interval. +* `disabled` - (Optional) Disables real time alert rules. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Media Insights Pipeline Configuration. +* `id` - Unique ID of the Media Insights Pipeline Configuration. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `3m`) +* `update` - (Default `3m`) +* `delete` - (Default `30s`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime SDK Media Pipelines Media Insights Pipeline Configuration using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Chime SDK Media Pipelines Media Insights Pipeline Configuration using the `id`. For example: + +```console +% terraform import aws_chimesdkmediapipelines_media_insights_pipeline_configuration.example abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chimesdkvoice_global_settings.html.markdown b/website/docs/cdktf/python/r/chimesdkvoice_global_settings.html.markdown new file mode 100644 index 00000000000..ebe7270d51b --- /dev/null +++ b/website/docs/cdktf/python/r/chimesdkvoice_global_settings.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Chime SDK Voice" +layout: "aws" +page_title: "AWS: aws_chimesdkvoice_global_settings" +description: |- + Terraform resource for managing Amazon Chime SDK Voice Global Settings. +--- + + + +# Resource: aws_chimesdkvoice_global_settings + +Terraform resource for managing Amazon Chime SDK Voice Global Settings. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chimesdkvoice_global_settings import ChimesdkvoiceGlobalSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ChimesdkvoiceGlobalSettings(self, "example", + voice_connector=ChimesdkvoiceGlobalSettingsVoiceConnector( + cdr_bucket="example-bucket-name" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voice_connector` - (Required) The Voice Connector settings. See [voice_connector](#voice_connector). + +### `voice_connector` + +The Amazon Chime SDK Voice Connector settings. Includes any Amazon S3 buckets designated for storing call detail records. + +* `cdr_bucket` - (Optional) The S3 bucket that stores the Voice Connector's call detail records. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS account ID for which the settings are applied. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Chime SDK Voice Global Settings using the `id` (AWS account ID). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS Chime SDK Voice Global Settings using the `id` (AWS account ID). For example: + +```console +% terraform import aws_chimesdkvoice_global_settings.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chimesdkvoice_sip_media_application.html.markdown b/website/docs/cdktf/python/r/chimesdkvoice_sip_media_application.html.markdown new file mode 100644 index 00000000000..f9bbdfbc5c5 --- /dev/null +++ b/website/docs/cdktf/python/r/chimesdkvoice_sip_media_application.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Chime SDK Voice" +layout: "aws" +page_title: "AWS: aws_chimesdkvoice_sip_media_application" +description: |- + A ChimeSDKVoice SIP Media Application is a managed object that passes values from a SIP rule to a target AWS Lambda function. +--- + + + +# Resource: aws_chimesdkvoice_sip_media_application + +A ChimeSDKVoice SIP Media Application is a managed object that passes values from a SIP rule to a target AWS Lambda function. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chimesdkvoice_sip_media_application import ChimesdkvoiceSipMediaApplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ChimesdkvoiceSipMediaApplication(self, "example", + aws_region="us-east-1", + endpoints=ChimesdkvoiceSipMediaApplicationEndpoints( + lambda_arn=test.arn + ), + name="example-sip-media-application" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `aws_region` - (Required) The AWS Region in which the AWS Chime SDK Voice Sip Media Application is created. +* `endpoints` - (Required) List of endpoints (Lambda Amazon Resource Names) specified for the SIP media application. Currently, only one endpoint is supported. See [`endpoints`](#endpoints). +* `name` - (Required) The name of the AWS Chime SDK Voice Sip Media Application. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `endpoints` + +The endpoint assigned to the SIP media application. + +* `lambda_arn` - (Required) Valid Amazon Resource Name (ARN) of the Lambda function, version, or alias. The function must be created in the same AWS Region as the SIP media application. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN (Amazon Resource Name) of the AWS Chime SDK Voice Sip Media Application +* `id` - The SIP media application ID. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a ChimeSDKVoice SIP Media Application using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a ChimeSDKVoice SIP Media Application using the `id`. For example: + +```console +% terraform import aws_chimesdkvoice_sip_media_application.example abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chimesdkvoice_sip_rule.html.markdown b/website/docs/cdktf/python/r/chimesdkvoice_sip_rule.html.markdown new file mode 100644 index 00000000000..5cfa42fdc69 --- /dev/null +++ b/website/docs/cdktf/python/r/chimesdkvoice_sip_rule.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Chime SDK Voice" +layout: "aws" +page_title: "AWS: aws_chimesdkvoice_sip_rule" +description: |- + A SIP rule associates your SIP media application with a phone number or a Request URI hostname. You can associate a SIP rule with more than one SIP media application. Each application then runs only that rule. +--- + + +# Resource: aws_chimesdkvoice_sip_rule + +A SIP rule associates your SIP media application with a phone number or a Request URI hostname. You can associate a SIP rule with more than one SIP media application. Each application then runs only that rule. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chimesdkvoice_sip_rule import ChimesdkvoiceSipRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ChimesdkvoiceSipRule(self, "example", + name="example-sip-rule", + target_applications=[ChimesdkvoiceSipRuleTargetApplications( + aws_region="us-east-1", + priority=1, + sip_media_application_id=example_sma.id + ) + ], + trigger_type="RequestUriHostname", + trigger_value=example_voice_connector.outbound_host_name + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the SIP rule. +* `target_applications` - (Required) List of SIP media applications with priority and AWS Region. Only one SIP application per AWS Region can be used. See [`target_applications`](#target_applications). +* `trigger_type` - (Required) The type of trigger assigned to the SIP rule in `trigger_value`. Valid values are `RequestUriHostname` or `ToPhoneNumber`. +* `trigger_value` - (Required) If `trigger_type` is `RequestUriHostname`, the value can be the outbound host name of an Amazon Chime Voice Connector. If `trigger_type` is `ToPhoneNumber`, the value can be a customer-owned phone number in the E164 format. The Sip Media Application specified in the Sip Rule is triggered if the request URI in an incoming SIP request matches the `RequestUriHostname`, or if the "To" header in the incoming SIP request matches the `ToPhoneNumber` value. + +The following arguments are optional: + +* `disabled` - (Optional) Enables or disables a rule. You must disable rules before you can delete them. + +### `target_applications` + +List of SIP media applications with priority and AWS Region. Only one SIP application per AWS Region can be used. + +* `aws_region` - (Required) The AWS Region of the target application. +* `priority` - (Required) Priority of the SIP media application in the target list. +* `sip_media_application_id` - (Required) The SIP media application ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The SIP rule ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a ChimeSDKVoice SIP Rule using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a ChimeSDKVoice SIP Rule using the `id`. For example: + +```console +% terraform import aws_chimesdkvoice_sip_rule.example abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/chimesdkvoice_voice_profile_domain.html.markdown b/website/docs/cdktf/python/r/chimesdkvoice_voice_profile_domain.html.markdown new file mode 100644 index 00000000000..5e08a8df885 --- /dev/null +++ b/website/docs/cdktf/python/r/chimesdkvoice_voice_profile_domain.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "Chime SDK Voice" +layout: "aws" +page_title: "AWS: aws_chimesdkvoice_voice_profile_domain" +description: |- + Terraform resource for managing an AWS Chime SDK Voice Profile Domain. +--- + + + +# Resource: aws_chimesdkvoice_voice_profile_domain + +Terraform resource for managing an AWS Chime SDK Voice Profile Domain. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.chimesdkvoice_voice_profile_domain import ChimesdkvoiceVoiceProfileDomain +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = KmsKey(self, "example", + deletion_window_in_days=7, + description="KMS Key for Voice Profile Domain" + ) + aws_chimesdkvoice_voice_profile_domain_example = + ChimesdkvoiceVoiceProfileDomain(self, "example_1", + description="My Voice Profile Domain", + name="ExampleVoiceProfileDomain", + server_side_encryption_configuration=ChimesdkvoiceVoiceProfileDomainServerSideEncryptionConfiguration( + kms_key_arn=example.arn + ), + tags={ + "key1": "value1" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_chimesdkvoice_voice_profile_domain_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of Voice Profile Domain. +* `server_side_encryption_configuration` - (Required) Configuration for server side encryption. + * `kms_key_arn` - (Required) ARN for KMS Key. + +The following arguments are optional: + +* `description` - (Optional) Description of Voice Profile Domain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Voice Profile Domain. +* `id` - ID of the Voice Profile Domain. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30s`) +* `update` - (Default `30s`) +* `delete` - (Default `30s`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Chime SDK Voice Profile Domain using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS Chime SDK Voice Profile Domain using the `id`. For example: + +```console +% terraform import aws_chimesdkvoice_voice_profile_domain.example abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cleanrooms_collaboration.html.markdown b/website/docs/cdktf/python/r/cleanrooms_collaboration.html.markdown new file mode 100644 index 00000000000..f14762e003f --- /dev/null +++ b/website/docs/cdktf/python/r/cleanrooms_collaboration.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Clean Rooms" +layout: "aws" +page_title: "AWS: aws_cleanrooms_collaboration" +description: |- + Provides a Clean Rooms Collaboration. +--- + + + +# Resource: aws_cleanrooms_collaboration + +Provides a AWS Clean Rooms collaboration. All members included in the definition will be invited to +join the collaboration and can create memberships. + +## Example Usage + +### Collaboration with tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cleanrooms_collaboration import CleanroomsCollaboration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, memberAbilities): + super().__init__(scope, name) + CleanroomsCollaboration(self, "test_collaboration", + creator_display_name="Creator ", + creator_member_abilities=["CAN_QUERY", "CAN_RECEIVE_RESULTS"], + data_encryption_metadata=CleanroomsCollaborationDataEncryptionMetadata( + allow_clear_text=True, + allow_duplicates=True, + allow_joins_on_columns_with_different_names=True, + preserve_nulls=False + ), + description="I made this collaboration with terraform!", + member=[CleanroomsCollaborationMember( + account_id=Token.as_string(123456789012), + display_name="Other member", + member_abilities=member_abilities + ) + ], + name="terraform-example-collaboration", + query_log_status="DISABLED", + tags={ + "Project": "Terraform" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) - The name of the collaboration. Collaboration names do not need to be unique. +* `description` - (Required) - A description for a collaboration. +* `creator_member_abilities` - (Required -Forces new resource) - The list of member abilities for the creator of the collaboration. Valid v +lues [may be found here](https://docs.aws.amazon.com/clean-rooms/latest/apireference/API_CreateCollaboration.html#API-CreateCollaboration-re +uest-creatorMemberAbilities) +* `creator_display_name` - (Required - Forces new resource) - The name for the member record for the collaboration creator. +* `query_log_status` - (Required - Forces new resource) - Determines if members of the collaboration can enable query logs within their own +emberships. Valid values [may be found here](https://docs.aws.amazon.com/clean-rooms/latest/apireference/API_CreateCollaboration.html#API-Cr +ateCollaboration-request-queryLogStatus). +* `data_encryption_metadata` - (Required - Forces new resource) - a collection of settings which determine how the [c3r client](https://docs +aws.amazon.com/clean-rooms/latest/userguide/crypto-computing.html) will encrypt data for use within this collaboration +* `data_encryption_metadata.allow_clear_text` - (Required - Forces new resource) - Indicates whether encrypted tables can contain cleartext data. This is a boolea + field. +* `data_encryption_metadata.allow_duplicates` - (Required - Forces new resource ) - Indicates whether Fingerprint columns can contain duplicate entries. This is a +boolean field. +* `data_encryption_metadata.allow_joins_on_columns_with_different_names` - (Required - Forces new resource) - Indicates whether Fingerprint columns can be joined +n any other Fingerprint column with a different name. This is a boolean field. +* `data_encryption_metadata.preserve_nulls` - (Required - Forces new resource) - Indicates whether NULL values are to be copied as NULL to encrypted tables (true) +or cryptographically processed (false). +* `member` - (Optional - Forces new resource) - Additional members of the collaboration which will be invited to join the collaboration. +* `member.account_id` - (Required - Forces new resource) - The account id for the invited member +* `member.display_name` - (Required - Forces new resource) - The display name for the invited member +* `member.member_abilities` - (Required - Forces new resource) - The list of abilities for the invited member. Valid values [may be found here](https://docs.aws.amazon.com/clean-rooms/latest/apireference/API_CreateCollaboration.html#API-CreateCollaboration-request-creatorMemberAbiliti +s +* `tags` - (Optional) - Key value pairs which tag the collaboration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The arn of the collaboration +* `id` - The id of the collaboration +* `create_time` - The date and time the collaboration was created +* `member status` - For each member included in the collaboration an additional computed attribute of status is added. These values [may be +ound here](https://docs.aws.amazon.com/clean-rooms/latest/apireference/API_MemberSummary.html#API-Type-MemberSummary-status) +* `updated_time` - The date and time he collaboration was last updated + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `1m`) +- `update` - (Default `1m`) +- `delete` - (Default `1m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloud9_environment_ec2.html.markdown b/website/docs/cdktf/python/r/cloud9_environment_ec2.html.markdown new file mode 100644 index 00000000000..1e6daf8a492 --- /dev/null +++ b/website/docs/cdktf/python/r/cloud9_environment_ec2.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "Cloud9" +layout: "aws" +page_title: "AWS: aws_cloud9_environment_ec2" +description: |- + Provides a Cloud9 EC2 Development Environment. +--- + + + +# Resource: aws_cloud9_environment_ec2 + +Provides a Cloud9 EC2 Development Environment. + +## Example Usage + +Basic usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloud9_environment_ec2 import Cloud9EnvironmentEc2 +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Cloud9EnvironmentEc2(self, "example", + instance_type="t2.micro", + name="example-env" + ) +``` + +Get the URL of the Cloud9 environment after creation: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloud9_environment_ec2 import Cloud9EnvironmentEc2 +from imports.aws.data_aws_instance import DataAwsInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name): + super().__init__(scope, name) + example = Cloud9EnvironmentEc2(self, "example", + instance_type="t2.micro", + name=name + ) + DataAwsInstance(self, "cloud9_instance", + filter=[DataAwsInstanceFilter( + name="tag:aws:cloud9:environment", + values=[example.id] + ) + ] + ) + TerraformOutput(self, "cloud9_url", + value="https://${" + region.value + "}.console.aws.amazon.com/cloud9/ide/${" + example.id + "}" + ) +``` + +Allocate a static IP to the Cloud9 environment: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloud9_environment_ec2 import Cloud9EnvironmentEc2 +from imports.aws.data_aws_instance import DataAwsInstance +from imports.aws.eip import Eip +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name): + super().__init__(scope, name) + example = Cloud9EnvironmentEc2(self, "example", + instance_type="t2.micro", + name=name + ) + cloud9_instance = DataAwsInstance(self, "cloud9_instance", + filter=[DataAwsInstanceFilter( + name="tag:aws:cloud9:environment", + values=[example.id] + ) + ] + ) + cloud9_eip = Eip(self, "cloud9_eip", + domain="vpc", + instance=Token.as_string(cloud9_instance.id) + ) + TerraformOutput(self, "cloud9_public_ip", + value=cloud9_eip.public_ip + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the environment. +* `instance_type` - (Required) The type of instance to connect to the environment, e.g., `t2.micro`. +* `automatic_stop_time_minutes` - (Optional) The number of minutes until the running instance is shut down after the environment has last been used. +* `connection_type` - (Optional) The connection type used for connecting to an Amazon EC2 environment. Valid values are `CONNECT_SSH` and `CONNECT_SSM`. For more information please refer [AWS documentation for Cloud9](https://docs.aws.amazon.com/cloud9/latest/user-guide/ec2-ssm.html). +* `description` - (Optional) The description of the environment. +* `image_id` - (Optional) The identifier for the Amazon Machine Image (AMI) that's used to create the EC2 instance. Valid values are + * `amazonlinux-1-x86_64` + * `amazonlinux-2-x86_64` + * `ubuntu-18.04-x86_64` + * `resolve:ssm:/aws/service/cloud9/amis/amazonlinux-1-x86_64` + * `resolve:ssm:/aws/service/cloud9/amis/amazonlinux-2-x86_64` + * `resolve:ssm:/aws/service/cloud9/amis/ubuntu-18.04-x86_64` +* `owner_arn` - (Optional) The ARN of the environment owner. This can be ARN of any AWS IAM principal. Defaults to the environment's creator. +* `subnet_id` - (Optional) The ID of the subnet in Amazon VPC that AWS Cloud9 will use to communicate with the Amazon EC2 instance. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the environment. +* `arn` - The ARN of the environment. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `type` - The type of the environment (e.g., `ssh` or `ec2`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloud9_environment_membership.markdown b/website/docs/cdktf/python/r/cloud9_environment_membership.markdown new file mode 100644 index 00000000000..cd6997105c3 --- /dev/null +++ b/website/docs/cdktf/python/r/cloud9_environment_membership.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Cloud9" +layout: "aws" +page_title: "AWS: aws_cloud9_environment_membership" +description: |- + Provides an environment member to an AWS Cloud9 development environment. +--- + + + +# Resource: aws_cloud9_environment_membership + +Provides an environment member to an AWS Cloud9 development environment. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloud9_environment_ec2 import Cloud9EnvironmentEc2 +from imports.aws.cloud9_environment_membership import Cloud9EnvironmentMembership +from imports.aws.iam_user import IamUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = Cloud9EnvironmentEc2(self, "test", + instance_type="t2.micro", + name="some-env" + ) + aws_iam_user_test = IamUser(self, "test_1", + name="some-user" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_user_test.override_logical_id("test") + aws_cloud9_environment_membership_test = Cloud9EnvironmentMembership(self, "test_2", + environment_id=test.id, + permissions="read-only", + user_arn=Token.as_string(aws_iam_user_test.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloud9_environment_membership_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `environment_id` - (Required) The ID of the environment that contains the environment member you want to add. +* `permissions` - (Required) The type of environment member permissions you want to associate with this environment member. Allowed values are `read-only` and `read-write` . +* `user_arn` - (Required) The Amazon Resource Name (ARN) of the environment member you want to add. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the environment membership. +* `user_id` - he user ID in AWS Identity and Access Management (AWS IAM) of the environment member. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloud9 environment membership using the `environment-id#user-arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloud9 environment membership using the `environment-id#user-arn`. For example: + +```console +% terraform import aws_cloud9_environment_membership.test environment-id#user-arn +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudcontrolapi_resource.html.markdown b/website/docs/cdktf/python/r/cloudcontrolapi_resource.html.markdown new file mode 100644 index 00000000000..96bd0a010d1 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudcontrolapi_resource.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Cloud Control API" +layout: "aws" +page_title: "AWS: aws_cloudcontrolapi_resource" +description: |- + Manages a Cloud Control API Resource. +--- + + + +# Resource: aws_cloudcontrolapi_resource + +Manages a Cloud Control API Resource. The configuration and lifecycle handling of these resources is proxied through Cloud Control API handlers to the backend service. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudcontrolapi_resource import CloudcontrolapiResource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudcontrolapiResource(self, "example", + desired_state=Token.as_string( + Fn.jsonencode({ + "ClusterName": "example", + "Tags": [{ + "Key": "CostCenter", + "Value": "IT" + } + ] + })), + type_name="AWS::ECS::Cluster" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `desired_state` - (Required) JSON string matching the CloudFormation resource type schema with desired configuration. Terraform configuration expressions can be converted into JSON using the [`jsonencode()` function](https://www.terraform.io/docs/language/functions/jsonencode.html). +* `type_name` - (Required) CloudFormation resource type name. For example, `AWS::EC2::VPC`. + +The following arguments are optional: + +* `role_arn` - (Optional) Amazon Resource Name (ARN) of the IAM Role to assume for operations. +* `schema` - (Optional) JSON string of the CloudFormation resource type schema which is used for plan time validation where possible. Automatically fetched if not provided. In large scale environments with multiple resources using the same `type_name`, it is recommended to fetch the schema once via the [`aws_cloudformation_type` data source](/docs/providers/aws/d/cloudformation_type.html) and use this argument to reduce `DescribeType` API operation throttling. This value is marked sensitive only to prevent large plan differences from showing. +* `type_version_id` - (Optional) Identifier of the CloudFormation resource type version. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `properties` - JSON string matching the CloudFormation resource type schema with current configuration. Underlying attributes can be referenced via the [`jsondecode()` function](https://www.terraform.io/docs/language/functions/jsondecode.html), for example, `jsondecode(data.aws_cloudcontrolapi_resource.example.properties)["example"]`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudformation_stack.html.markdown b/website/docs/cdktf/python/r/cloudformation_stack.html.markdown new file mode 100644 index 00000000000..280d8364019 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudformation_stack.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_stack" +description: |- + Provides a CloudFormation Stack resource. +--- + + + +# Resource: aws_cloudformation_stack + +Provides a CloudFormation Stack resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudformation_stack import CloudformationStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudformationStack(self, "network", + name="networking-stack", + parameters={ + "VPCCidr": "10.0.0.0/16" + }, + template_body=Token.as_string( + Fn.jsonencode({ + "Parameters": { + "VPCCidr": { + "Default": "10.0.0.0/16", + "Description": "Enter the CIDR block for the VPC. Default is 10.0.0.0/16.", + "Type": "String" + } + }, + "Resources": { + "my_vpc": { + "Properties": { + "CidrBlock": { + "Ref": "VPCCidr" + }, + "Tags": [{ + "Key": "Name", + "Value": "Primary_CF_VPC" + } + ] + }, + "Type": "AWS::EC2::VPC" + } + } + })) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Stack name. +* `template_body` - (Optional) Structure containing the template body (max size: 51,200 bytes). +* `template_url` - (Optional) Location of a file containing the template body (max size: 460,800 bytes). +* `capabilities` - (Optional) A list of capabilities. + Valid values: `CAPABILITY_IAM`, `CAPABILITY_NAMED_IAM`, or `CAPABILITY_AUTO_EXPAND` +* `disable_rollback` - (Optional) Set to true to disable rollback of the stack if stack creation failed. + Conflicts with `on_failure`. +* `notification_arns` - (Optional) A list of SNS topic ARNs to publish stack related events. +* `on_failure` - (Optional) Action to be taken if stack creation fails. This must be + one of: `DO_NOTHING`, `ROLLBACK`, or `DELETE`. Conflicts with `disable_rollback`. +* `parameters` - (Optional) A map of Parameter structures that specify input parameters for the stack. +* `policy_body` - (Optional) Structure containing the stack policy body. + Conflicts w/ `policy_url`. +* `policy_url` - (Optional) Location of a file containing the stack policy. + Conflicts w/ `policy_body`. +* `tags` - (Optional) Map of resource tags to associate with this stack. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `iam_role_arn` - (Optional) The ARN of an IAM role that AWS CloudFormation assumes to create the stack. If you don't specify a value, AWS CloudFormation uses the role that was previously associated with the stack. If no role is available, AWS CloudFormation uses a temporary session that is generated from your user credentials. +* `timeout_in_minutes` - (Optional) The amount of time that can pass before the stack status becomes `CREATE_FAILED`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A unique identifier of the stack. +* `outputs` - A map of outputs from the stack. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `update` - (Default `30m`) +- `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudformation Stacks using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloudformation Stacks using the `name`. For example: + +```console +% terraform import aws_cloudformation_stack.stack networking-stack +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudformation_stack_set.html.markdown b/website/docs/cdktf/python/r/cloudformation_stack_set.html.markdown new file mode 100644 index 00000000000..edca4529319 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudformation_stack_set.html.markdown @@ -0,0 +1,172 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_stack_set" +description: |- + Manages a CloudFormation StackSet. +--- + + + +# Resource: aws_cloudformation_stack_set + +Manages a CloudFormation StackSet. StackSets allow CloudFormation templates to be easily deployed across multiple accounts and regions via StackSet Instances ([`aws_cloudformation_stack_set_instance` resource](/docs/providers/aws/r/cloudformation_stack_set_instance.html)). Additional information about StackSets can be found in the [AWS CloudFormation User Guide](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/what-is-cfnstacksets.html). + +~> **NOTE:** All template parameters, including those with a `Default`, must be configured or ignored with the `lifecycle` configuration block `ignore_changes` argument. + +~> **NOTE:** All `NoEcho` template parameters must be ignored with the `lifecycle` configuration block `ignore_changes` argument. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudformation_stack_set import CloudformationStackSet +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + aws_cloud_formation_stack_set_administration_role_assume_role_policy = + DataAwsIamPolicyDocument(self, "AWSCloudFormationStackSetAdministrationRole_assume_role_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["cloudformation.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + aws_cloud_formation_stack_set_administration_role = IamRole(self, "AWSCloudFormationStackSetAdministrationRole", + assume_role_policy=Token.as_string(aws_cloud_formation_stack_set_administration_role_assume_role_policy.json), + name="AWSCloudFormationStackSetAdministrationRole" + ) + example = CloudformationStackSet(self, "example", + administration_role_arn=aws_cloud_formation_stack_set_administration_role.arn, + name="example", + parameters={ + "VPCCidr": "10.0.0.0/16" + }, + template_body=Token.as_string( + Fn.jsonencode({ + "Parameters": { + "VPCCidr": { + "Default": "10.0.0.0/16", + "Description": "Enter the CIDR block for the VPC. Default is 10.0.0.0/16.", + "Type": "String" + } + }, + "Resources": { + "my_vpc": { + "Properties": { + "CidrBlock": { + "Ref": "VPCCidr" + }, + "Tags": [{ + "Key": "Name", + "Value": "Primary_CF_VPC" + } + ] + }, + "Type": "AWS::EC2::VPC" + } + } + })) + ) + aws_cloud_formation_stack_set_administration_role_execution_policy = + DataAwsIamPolicyDocument(self, "AWSCloudFormationStackSetAdministrationRole_ExecutionPolicy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + resources=["arn:aws:iam::*:role/${" + example.execution_role_name + "}" + ] + ) + ] + ) + aws_iam_role_policy_aws_cloud_formation_stack_set_administration_role_execution_policy = + IamRolePolicy(self, "AWSCloudFormationStackSetAdministrationRole_ExecutionPolicy_4", + name="ExecutionPolicy", + policy=Token.as_string(aws_cloud_formation_stack_set_administration_role_execution_policy.json), + role=aws_cloud_formation_stack_set_administration_role.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_aws_cloud_formation_stack_set_administration_role_execution_policy.override_logical_id("AWSCloudFormationStackSetAdministrationRole_ExecutionPolicy") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `administration_role_arn` - (Optional) Amazon Resource Number (ARN) of the IAM Role in the administrator account. This must be defined when using the `SELF_MANAGED` permission model. +* `auto_deployment` - (Optional) Configuration block containing the auto-deployment model for your StackSet. This can only be defined when using the `SERVICE_MANAGED` permission model. + * `enabled` - (Optional) Whether or not auto-deployment is enabled. + * `retain_stacks_on_account_removal` - (Optional) Whether or not to retain stacks when the account is removed. +* `name` - (Required) Name of the StackSet. The name must be unique in the region where you create your StackSet. The name can contain only alphanumeric characters (case-sensitive) and hyphens. It must start with an alphabetic character and cannot be longer than 128 characters. +* `capabilities` - (Optional) A list of capabilities. Valid values: `CAPABILITY_IAM`, `CAPABILITY_NAMED_IAM`, `CAPABILITY_AUTO_EXPAND`. +* `operation_preferences` - (Optional) Preferences for how AWS CloudFormation performs a stack set update. +* `description` - (Optional) Description of the StackSet. +* `execution_role_name` - (Optional) Name of the IAM Role in all target accounts for StackSet operations. Defaults to `AWSCloudFormationStackSetExecutionRole` when using the `SELF_MANAGED` permission model. This should not be defined when using the `SERVICE_MANAGED` permission model. +* `managed_execution` - (Optional) Configuration block to allow StackSets to perform non-conflicting operations concurrently and queues conflicting operations. + * `active` - (Optional) When set to true, StackSets performs non-conflicting operations concurrently and queues conflicting operations. After conflicting operations finish, StackSets starts queued operations in request order. Default is false. +* `parameters` - (Optional) Key-value map of input parameters for the StackSet template. All template parameters, including those with a `Default`, must be configured or ignored with `lifecycle` configuration block `ignore_changes` argument. All `NoEcho` template parameters must be ignored with the `lifecycle` configuration block `ignore_changes` argument. +* `permission_model` - (Optional) Describes how the IAM roles required for your StackSet are created. Valid values: `SELF_MANAGED` (default), `SERVICE_MANAGED`. +* `call_as` - (Optional) Specifies whether you are acting as an account administrator in the organization's management account or as a delegated administrator in a member account. Valid values: `SELF` (default), `DELEGATED_ADMIN`. +* `tags` - (Optional) Key-value map of tags to associate with this StackSet and the Stacks created from it. AWS CloudFormation also propagates these tags to supported resources that are created in the Stacks. A maximum number of 50 tags can be specified. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `template_body` - (Optional) String containing the CloudFormation template body. Maximum size: 51,200 bytes. Conflicts with `template_url`. +* `template_url` - (Optional) String containing the location of a file containing the CloudFormation template body. The URL must point to a template that is located in an Amazon S3 bucket. Maximum location file size: 460,800 bytes. Conflicts with `template_body`. + +### `operation_preferences` Argument Reference + +The `operation_preferences` configuration block supports the following arguments: + +* `failure_tolerance_count` - (Optional) The number of accounts, per Region, for which this operation can fail before AWS CloudFormation stops the operation in that Region. +* `failure_tolerance_percentage` - (Optional) The percentage of accounts, per Region, for which this stack operation can fail before AWS CloudFormation stops the operation in that Region. +* `max_concurrent_count` - (Optional) The maximum number of accounts in which to perform this operation at one time. +* `max_concurrent_percentage` - (Optional) The maximum percentage of accounts in which to perform this operation at one time. +* `region_concurrency_type` - (Optional) The concurrency type of deploying StackSets operations in Regions, could be in parallel or one Region at a time. +* `region_order` - (Optional) The order of the Regions in where you want to perform the stack operation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the StackSet. +* `id` - Name of the StackSet. +* `stack_set_id` - Unique identifier of the StackSet. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `update` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFormation StackSets using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudFormation StackSets using the `name`. For example: + +```console +% terraform import aws_cloudformation_stack_set.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudformation_stack_set_instance.html.markdown b/website/docs/cdktf/python/r/cloudformation_stack_set_instance.html.markdown new file mode 100644 index 00000000000..f4835c396b7 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudformation_stack_set_instance.html.markdown @@ -0,0 +1,211 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_stack_set_instance" +description: |- + Manages a CloudFormation StackSet Instance. +--- + + + +# Resource: aws_cloudformation_stack_set_instance + +Manages a CloudFormation StackSet Instance. Instances are managed in the account and region of the StackSet after the target account permissions have been configured. Additional information about StackSets can be found in the [AWS CloudFormation User Guide](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/what-is-cfnstacksets.html). + +~> **NOTE:** All target accounts must have an IAM Role created that matches the name of the execution role configured in the StackSet (the `execution_role_name` argument in the `aws_cloudformation_stack_set` resource) in a trust relationship with the administrative account or administration IAM Role. The execution role must have appropriate permissions to manage resources defined in the template along with those required for StackSets to operate. See the [AWS CloudFormation User Guide](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-prereqs.html) for more details. + +~> **NOTE:** To retain the Stack during Terraform resource destroy, ensure `retain_stack = true` has been successfully applied into the Terraform state first. This must be completed _before_ an apply that would destroy the resource. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudformation_stack_set_instance import CloudformationStackSetInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudformationStackSetInstance(self, "example", + account_id="123456789012", + region="us-east-1", + stack_set_name=Token.as_string(aws_cloudformation_stack_set_example.name) + ) +``` + +### Example IAM Setup in Target Account + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + aws_cloud_formation_stack_set_execution_role_minimum_execution_policy = + DataAwsIamPolicyDocument(self, "AWSCloudFormationStackSetExecutionRole_MinimumExecutionPolicy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["cloudformation:*", "s3:*", "sns:*"], + effect="Allow", + resources=["*"] + ) + ] + ) + aws_cloud_formation_stack_set_execution_role_assume_role_policy = + DataAwsIamPolicyDocument(self, "AWSCloudFormationStackSetExecutionRole_assume_role_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[aws_cloud_formation_stack_set_administration_role.arn + ], + type="AWS" + ) + ] + ) + ] + ) + aws_cloud_formation_stack_set_execution_role = IamRole(self, "AWSCloudFormationStackSetExecutionRole", + assume_role_policy=Token.as_string(aws_cloud_formation_stack_set_execution_role_assume_role_policy.json), + name="AWSCloudFormationStackSetExecutionRole" + ) + aws_iam_role_policy_aws_cloud_formation_stack_set_execution_role_minimum_execution_policy = + IamRolePolicy(self, "AWSCloudFormationStackSetExecutionRole_MinimumExecutionPolicy_3", + name="MinimumExecutionPolicy", + policy=Token.as_string(aws_cloud_formation_stack_set_execution_role_minimum_execution_policy.json), + role=aws_cloud_formation_stack_set_execution_role.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_aws_cloud_formation_stack_set_execution_role_minimum_execution_policy.override_logical_id("AWSCloudFormationStackSetExecutionRole_MinimumExecutionPolicy") +``` + +### Example Deployment across Organizations account + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudformation_stack_set_instance import CloudformationStackSetInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudformationStackSetInstance(self, "example", + deployment_targets=CloudformationStackSetInstanceDeploymentTargets( + organizational_unit_ids=[ + Token.as_string( + property_access(aws_organizations_organization_example.roots, ["0", "id" + ])) + ] + ), + region="us-east-1", + stack_set_name=Token.as_string(aws_cloudformation_stack_set_example.name) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_set_name` - (Required) Name of the StackSet. +* `account_id` - (Optional) Target AWS Account ID to create a Stack based on the StackSet. Defaults to current account. +* `deployment_targets` - (Optional) The AWS Organizations accounts to which StackSets deploys. StackSets doesn't deploy stack instances to the organization management account, even if the organization management account is in your organization or in an OU in your organization. Drift detection is not possible for this argument. See [deployment_targets](#deployment_targets-argument-reference) below. +* `parameter_overrides` - (Optional) Key-value map of input parameters to override from the StackSet for this Instance. +* `region` - (Optional) Target AWS Region to create a Stack based on the StackSet. Defaults to current region. +* `retain_stack` - (Optional) During Terraform resource destroy, remove Instance from StackSet while keeping the Stack and its associated resources. Must be enabled in Terraform state _before_ destroy operation to take effect. You cannot reassociate a retained Stack or add an existing, saved Stack to a new StackSet. Defaults to `false`. +* `call_as` - (Optional) Specifies whether you are acting as an account administrator in the organization's management account or as a delegated administrator in a member account. Valid values: `SELF` (default), `DELEGATED_ADMIN`. +* `operation_preferences` - (Optional) Preferences for how AWS CloudFormation performs a stack set operation. + +### `deployment_targets` Argument Reference + +The `deployment_targets` configuration block supports the following arguments: + +* `organizational_unit_ids` - (Optional) The organization root ID or organizational unit (OU) IDs to which StackSets deploys. + +### `operation_preferences` Argument Reference + +The `operation_preferences` configuration block supports the following arguments: + +* `failure_tolerance_count` - (Optional) The number of accounts, per Region, for which this operation can fail before AWS CloudFormation stops the operation in that Region. +* `failure_tolerance_percentage` - (Optional) The percentage of accounts, per Region, for which this stack operation can fail before AWS CloudFormation stops the operation in that Region. +* `max_concurrent_count` - (Optional) The maximum number of accounts in which to perform this operation at one time. +* `max_concurrent_percentage` - (Optional) The maximum percentage of accounts in which to perform this operation at one time. +* `region_concurrency_type` - (Optional) The concurrency type of deploying StackSets operations in Regions, could be in parallel or one Region at a time. Valid values are `SEQUENTIAL` and `PARALLEL`. +* `region_order` - (Optional) The order of the Regions in where you want to perform the stack operation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier for the resource. If `deployment_targets` is set, this is a comma-delimited string combining stack set name, organizational unit IDs (`/`-delimited), and region (ie. `mystack,ou-123/ou-456,us-east-1`). Otherwise, this is a comma-delimited string combining stack set name, AWS account ID, and region (ie. `mystack,123456789012,us-east-1`). +* `organizational_unit_id` - The organization root ID or organizational unit (OU) ID in which the stack is deployed. +* `stack_id` - Stack identifier. +* `stack_instance_summaries` - List of stack instances created from an organizational unit deployment target. This will only be populated when `deployment_targets` is set. See [`stack_instance_summaries`](#stack_instance_summaries-attribute-reference). + +### `stack_instance_summaries` Attribute Reference + +* `account_id` - AWS account ID in which the stack is deployed. +* `organizational_unit_id` - Organizational unit ID in which the stack is deployed. +* `stack_id` - Stack identifier. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFormation StackSet Instances that target an AWS Account ID using the StackSet name, target AWS account ID, and target AWS region separated by commas (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import CloudFormation StackSet Instances that target AWS Organizational Units using the StackSet name, a slash (`/`) separated list of organizational unit IDs, and target AWS region separated by commas (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** CloudFormation StackSet Instances that target an AWS Account ID using the StackSet name, target AWS account ID, and target AWS region separated by commas (`,`). For example: + +```console +% terraform import aws_cloudformation_stack_set_instance.example example,123456789012,us-east-1 +``` + +Import CloudFormation StackSet Instances that target AWS Organizational Units using the StackSet name, a slash (`/`) separated list of organizational unit IDs, and target AWS region separated by commas (`,`): + +```console +% terraform import aws_cloudformation_stack_set_instance.example example,ou-sdas-123123123/ou-sdas-789789789,us-east-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudformation_type.html.markdown b/website/docs/cdktf/python/r/cloudformation_type.html.markdown new file mode 100644 index 00000000000..991a5a1b1c4 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudformation_type.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_type" +description: |- + Manages a version of a CloudFormation Type. +--- + + + +# Resource: aws_cloudformation_type + +Manages a version of a CloudFormation Type. + +~> **NOTE:** The destroy operation of this resource marks the version as deprecated. If this was the only `LIVE` version, the type is marked as deprecated. Enable the [resource `lifecycle` configuration block `create_before_destroy` argument](https://www.terraform.io/language/meta-arguments/lifecycle#create_before_destroy) in this resource configuration to properly order redeployments in Terraform. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudformation_type import CloudformationType +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudformationType(self, "example", + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + logging_config=CloudformationTypeLoggingConfig( + log_group_name=Token.as_string(aws_cloudwatch_log_group_example.name), + log_role_arn=Token.as_string(aws_iam_role_example.arn) + ), + schema_handler_package="s3://${" + aws_s3_object_example.bucket + "}/${" + aws_s3_object_example.key + "}", + type="RESOURCE", + type_name="ExampleCompany::ExampleService::ExampleResource" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `execution_role_arn` - (Optional) Amazon Resource Name (ARN) of the IAM Role for CloudFormation to assume when invoking the extension. If your extension calls AWS APIs in any of its handlers, you must create an IAM execution role that includes the necessary permissions to call those AWS APIs, and provision that execution role in your account. When CloudFormation needs to invoke the extension handler, CloudFormation assumes this execution role to create a temporary session token, which it then passes to the extension handler, thereby supplying your extension with the appropriate credentials. +* `logging_config` - (Optional) Configuration block containing logging configuration. +* `schema_handler_package` - (Required) URL to the S3 bucket containing the extension project package that contains the necessary files for the extension you want to register. Must begin with `s3://` or `https://`. For example, `s3://example-bucket/example-object`. +* `type` - (Optional) CloudFormation Registry Type. For example, `RESOURCE` or `MODULE`. +* `type_name` - (Optional) CloudFormation Type name. For example, `ExampleCompany::ExampleService::ExampleResource`. + +### logging_config + +The `logging_config` configuration block supports the following arguments: + +* `log_group_name` - (Required) Name of the CloudWatch Log Group where CloudFormation sends error logging information when invoking the type's handlers. +* `log_role_arn` - (Required) Amazon Resource Name (ARN) of the IAM Role CloudFormation assumes when sending error logging information to CloudWatch Logs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - (Optional) Amazon Resource Name (ARN) of the CloudFormation Type version. See also `type_arn`. +* `default_version_id` - Identifier of the CloudFormation Type default version. +* `deprecated_status` - Deprecation status of the version. +* `description` - Description of the version. +* `documentation_url` - URL of the documentation for the CloudFormation Type. +* `is_default_version` - Whether the CloudFormation Type version is the default version. +* `provisioning_type` - Provisioning behavior of the CloudFormation Type. +* `schema` - JSON document of the CloudFormation Type schema. +* `source_url` - URL of the source code for the CloudFormation Type. +* `type_arn` - (Optional) Amazon Resource Name (ARN) of the CloudFormation Type. See also `arn`. +* `version_id` - (Optional) Identifier of the CloudFormation Type version. +* `visibility` - Scope of the CloudFormation Type. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_cloudformation_type` using the type version Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_cloudformation_type` using the type version Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_cloudformation_type.example arn:aws:cloudformation:us-east-1:123456789012:type/resource/ExampleCompany-ExampleService-ExampleType/1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_cache_policy.html.markdown b/website/docs/cdktf/python/r/cloudfront_cache_policy.html.markdown new file mode 100644 index 00000000000..6b241cebcd2 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_cache_policy.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_cache_policy" +description: |- + Use the `aws_cloudfront_cache_policy` resource to manage cache policies for CloudFront distributions. This resource allows you to attach cache policies to cache behaviors, which determine the values included in the cache key, such as HTTP headers, cookies, and URL query strings. CloudFront uses the cache key to locate cached objects and return them to viewers. Additionally, the cache policy sets the default, minimum, and maximum time to live (TTL) values for objects in the CloudFront cache. +--- + + + +# Resource: aws_cloudfront_cache_policy + +## Example Usage + +Use the `aws_cloudfront_cache_policy` resource to create a cache policy for CloudFront. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_cache_policy import CloudfrontCachePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudfrontCachePolicy(self, "example", + comment="test comment", + default_ttl=50, + max_ttl=100, + min_ttl=1, + name="example-policy", + parameters_in_cache_key_and_forwarded_to_origin=CloudfrontCachePolicyParametersInCacheKeyAndForwardedToOrigin( + cookies_config=CloudfrontCachePolicyParametersInCacheKeyAndForwardedToOriginCookiesConfig( + cookie_behavior="whitelist", + cookies=CloudfrontCachePolicyParametersInCacheKeyAndForwardedToOriginCookiesConfigCookies( + items=["example"] + ) + ), + headers_config=CloudfrontCachePolicyParametersInCacheKeyAndForwardedToOriginHeadersConfig( + header_behavior="whitelist", + headers=CloudfrontCachePolicyParametersInCacheKeyAndForwardedToOriginHeadersConfigHeaders( + items=["example"] + ) + ), + query_strings_config=CloudfrontCachePolicyParametersInCacheKeyAndForwardedToOriginQueryStringsConfig( + query_string_behavior="whitelist", + query_strings=CloudfrontCachePolicyParametersInCacheKeyAndForwardedToOriginQueryStringsConfigQueryStrings( + items=["example"] + ) + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Unique name used to identify the cache policy. +* `min_ttl` - (Required) Minimum amount of time, in seconds, that objects should remain in the CloudFront cache before a new request is sent to the origin to check for updates. +* `max_ttl` - (Optional) Maximum amount of time, in seconds, that objects stay in the CloudFront cache before CloudFront sends another request to the origin to see if the object has been updated. +* `default_ttl` - (Optional) Amount of time, in seconds, that objects are allowed to remain in the CloudFront cache before CloudFront sends a new request to the origin server to check if the object has been updated. +* `comment` - (Optional) Description for the cache policy. +* `parameters_in_cache_key_and_forwarded_to_origin` - (Required) Configuration for including HTTP headers, cookies, and URL query strings in the cache key. For more information, refer to the [Parameters In Cache Key And Forwarded To Origin](#parameters-in-cache-key-and-forwarded-to-origin) section. + +### Parameters In Cache Key And Forwarded To Origin + +* `cookies_config` - (Required) Whether any cookies in viewer requests are included in the cache key and automatically included in requests that CloudFront sends to the origin. See [Cookies Config](#cookies-config) for more information. +* `headers_config` - (Required) Whether any HTTP headers are included in the cache key and automatically included in requests that CloudFront sends to the origin. See [Headers Config](#headers-config) for more information. +* `query_strings_config` - (Required) Whether any URL query strings in viewer requests are included in the cache key. It also automatically includes these query strings in requests that CloudFront sends to the origin. Please refer to the [Query String Config](#query-string-config) for more information. +* `enable_accept_encoding_brotli` - (Optional) Flag determines whether the Accept-Encoding HTTP header is included in the cache key and in requests that CloudFront sends to the origin. +* `enable_accept_encoding_gzip` - (Optional) Whether the Accept-Encoding HTTP header is included in the cache key and in requests sent to the origin by CloudFront. + +### Cookies Config + +* `cookie_behavior` - (Required) Whether any cookies in viewer requests are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values for `cookie_behavior` are `none`, `whitelist`, `allExcept`, and `all`. +* `cookies` - (Optional) Object that contains a list of cookie names. See [Items](#items) for more information. + +### Headers Config + +* `header_behavior` - (Required) Whether any HTTP headers are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values for `header_behavior` are `none` and `whitelist`. +* `headers` - (Optional) Object contains a list of header names. See [Items](#items) for more information. + +### Query String Config + +* `query_string_behavior` - (Required) Whether URL query strings in viewer requests are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values for `query_string_behavior` are `none`, `whitelist`, `allExcept`, and `all`. +* `query_strings` - (Optional) Configuration parameter that contains a list of query string names. See [Items](#items) for more information. + +### Items + +* `items` - (Required) List of item names, such as cookies, headers, or query strings. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - Current version of the cache policy. +* `id` - Identifier for the cache policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront cache policies using the `id` of the cache policy. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudFront cache policies using the `id` of the cache policy. For example: + +```console +% terraform import aws_cloudfront_cache_policy.policy 658327ea-f89d-4fab-a63d-7e88639e58f6 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_distribution.html.markdown b/website/docs/cdktf/python/r/cloudfront_distribution.html.markdown new file mode 100644 index 00000000000..7d62cf23203 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_distribution.html.markdown @@ -0,0 +1,523 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_distribution" +description: |- + Provides a CloudFront web distribution resource. +--- + + + +# Resource: aws_cloudfront_distribution + +Creates an Amazon CloudFront web distribution. + +For information about CloudFront distributions, see the [Amazon CloudFront Developer Guide][1]. For specific information about creating CloudFront web distributions, see the [POST Distribution][2] page in the Amazon CloudFront API Reference. + +~> **NOTE:** CloudFront distributions take about 15 minutes to reach a deployed state after creation or modification. During this time, deletes to resources will be blocked. If you need to delete a distribution that is enabled and you do not want to wait, you need to use the `retain_on_delete` flag. + +## Example Usage + +The following example below creates a CloudFront distribution with an S3 origin. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_distribution import CloudfrontDistribution +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + s3_origin_id = "myS3Origin" + b = S3Bucket(self, "b", + bucket="mybucket", + tags={ + "Name": "My bucket" + } + ) + S3BucketAcl(self, "b_acl", + acl="private", + bucket=b.id + ) + CloudfrontDistribution(self, "s3_distribution", + aliases=["mysite.example.com", "yoursite.example.com"], + comment="Some comment", + default_cache_behavior=CloudfrontDistributionDefaultCacheBehavior( + allowed_methods=["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT" + ], + cached_methods=["GET", "HEAD"], + default_ttl=3600, + forwarded_values=CloudfrontDistributionDefaultCacheBehaviorForwardedValues( + cookies=CloudfrontDistributionDefaultCacheBehaviorForwardedValuesCookies( + forward="none" + ), + query_string=False + ), + max_ttl=86400, + min_ttl=0, + target_origin_id=s3_origin_id, + viewer_protocol_policy="allow-all" + ), + default_root_object="index.html", + enabled=True, + is_ipv6_enabled=True, + logging_config=CloudfrontDistributionLoggingConfig( + bucket="mylogs.s3.amazonaws.com", + include_cookies=False, + prefix="myprefix" + ), + ordered_cache_behavior=[CloudfrontDistributionOrderedCacheBehavior( + allowed_methods=["GET", "HEAD", "OPTIONS"], + cached_methods=["GET", "HEAD", "OPTIONS"], + compress=True, + default_ttl=86400, + forwarded_values=CloudfrontDistributionOrderedCacheBehaviorForwardedValues( + cookies=CloudfrontDistributionOrderedCacheBehaviorForwardedValuesCookies( + forward="none" + ), + headers=["Origin"], + query_string=False + ), + max_ttl=31536000, + min_ttl=0, + path_pattern="/content/immutable/*", + target_origin_id=s3_origin_id, + viewer_protocol_policy="redirect-to-https" + ), CloudfrontDistributionOrderedCacheBehavior( + allowed_methods=["GET", "HEAD", "OPTIONS"], + cached_methods=["GET", "HEAD"], + compress=True, + default_ttl=3600, + forwarded_values=CloudfrontDistributionOrderedCacheBehaviorForwardedValues( + cookies=CloudfrontDistributionOrderedCacheBehaviorForwardedValuesCookies( + forward="none" + ), + query_string=False + ), + max_ttl=86400, + min_ttl=0, + path_pattern="/content/*", + target_origin_id=s3_origin_id, + viewer_protocol_policy="redirect-to-https" + ) + ], + origin=[CloudfrontDistributionOrigin( + domain_name=b.bucket_regional_domain_name, + origin_access_control_id=default_var.id, + origin_id=s3_origin_id + ) + ], + price_class="PriceClass_200", + restrictions=CloudfrontDistributionRestrictions( + geo_restriction=CloudfrontDistributionRestrictionsGeoRestriction( + locations=["US", "CA", "GB", "DE"], + restriction_type="whitelist" + ) + ), + tags={ + "Environment": "production" + }, + viewer_certificate=CloudfrontDistributionViewerCertificate( + cloudfront_default_certificate=True + ) + ) +``` + +The example below creates a CloudFront distribution with an origin group for failover routing: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_distribution import CloudfrontDistribution +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, allowedMethods, cachedMethods, viewerProtocolPolicy, enabled, restrictions, viewerCertificate): + super().__init__(scope, name) + CloudfrontDistribution(self, "s3_distribution", + default_cache_behavior=CloudfrontDistributionDefaultCacheBehavior( + target_origin_id="groupS3", + allowed_methods=allowed_methods, + cached_methods=cached_methods, + viewer_protocol_policy=viewer_protocol_policy + ), + origin=[CloudfrontDistributionOrigin( + domain_name=primary.bucket_regional_domain_name, + origin_id="primaryS3", + s3_origin_config=CloudfrontDistributionOriginS3OriginConfig( + origin_access_identity=default_var.cloudfront_access_identity_path + ) + ), CloudfrontDistributionOrigin( + domain_name=failover.bucket_regional_domain_name, + origin_id="failoverS3", + s3_origin_config=CloudfrontDistributionOriginS3OriginConfig( + origin_access_identity=default_var.cloudfront_access_identity_path + ) + ) + ], + origin_group=[CloudfrontDistributionOriginGroup( + failover_criteria=CloudfrontDistributionOriginGroupFailoverCriteria( + status_codes=[403, 404, 500, 502] + ), + member=[CloudfrontDistributionOriginGroupMember( + origin_id="primaryS3" + ), CloudfrontDistributionOriginGroupMember( + origin_id="failoverS3" + ) + ], + origin_id="groupS3" + ) + ], + enabled=enabled, + restrictions=restrictions, + viewer_certificate=viewer_certificate + ) +``` + +CloudFront distribution using [managed policies](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/using-managed-cache-policies.html) (ex: CachingDisabled): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_distribution import CloudfrontDistribution +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, cachedMethods, viewerProtocolPolicy): + super().__init__(scope, name) + s3_origin_id = "myS3Origin" + CloudfrontDistribution(self, "s3_distribution", + comment="Some comment", + default_cache_behavior=CloudfrontDistributionDefaultCacheBehavior( + allowed_methods=["GET", "HEAD", "OPTIONS"], + cache_policy_id="4135ea2d-6df8-44a3-9df3-4b5a84be39ad", + path_pattern="/content/*", + target_origin_id=s3_origin_id, + cached_methods=cached_methods, + viewer_protocol_policy=viewer_protocol_policy + ), + default_root_object="index.html", + enabled=True, + is_ipv6_enabled=True, + origin=[CloudfrontDistributionOrigin( + domain_name=primary.bucket_regional_domain_name, + origin_id="myS3Origin", + s3_origin_config=CloudfrontDistributionOriginS3OriginConfig( + origin_access_identity=default_var.cloudfront_access_identity_path + ) + ) + ], + restrictions=CloudfrontDistributionRestrictions( + geo_restriction=CloudfrontDistributionRestrictionsGeoRestriction( + locations=["US", "CA", "GB", "DE"], + restriction_type="whitelist" + ) + ), + viewer_certificate=CloudfrontDistributionViewerCertificate( + cloudfront_default_certificate=True + ) + ) +``` + +## Argument Reference + +The CloudFront distribution argument layout is a complex structure composed of several sub-resources - these resources are laid out below. + +### Top-Level Arguments + +* `aliases` (Optional) - Extra CNAMEs (alternate domain names), if any, for this distribution. +* `comment` (Optional) - Any comments you want to include about the distribution. +* `custom_error_response` (Optional) - One or more [custom error response](#custom-error-response-arguments) elements (multiples allowed). +* `default_cache_behavior` (Required) - [Default cache behavior](#default-cache-behavior-arguments) for this distribution (maximum one). Requires either `cache_policy_id` (preferred) or `forwarded_values` (deprecated) be set. +* `default_root_object` (Optional) - Object that you want CloudFront to return (for example, index.html) when an end user requests the root URL. +* `enabled` (Required) - Whether the distribution is enabled to accept end user requests for content. +* `is_ipv6_enabled` (Optional) - Whether the IPv6 is enabled for the distribution. +* `http_version` (Optional) - Maximum HTTP version to support on the distribution. Allowed values are `http1.1`, `http2`, `http2and3` and `http3`. The default is `http2`. +* `logging_config` (Optional) - The [logging configuration](#logging-config-arguments) that controls how logs are written to your distribution (maximum one). +* `ordered_cache_behavior` (Optional) - Ordered list of [cache behaviors](#cache-behavior-arguments) resource for this distribution. List from top to bottom in order of precedence. The topmost cache behavior will have precedence 0. +* `origin` (Required) - One or more [origins](#origin-arguments) for this distribution (multiples allowed). +* `origin_group` (Optional) - One or more [origin_group](#origin-group-arguments) for this distribution (multiples allowed). +* `price_class` (Optional) - Price class for this distribution. One of `PriceClass_All`, `PriceClass_200`, `PriceClass_100`. +* `restrictions` (Required) - The [restriction configuration](#restrictions-arguments) for this distribution (maximum one). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `viewer_certificate` (Required) - The [SSL configuration](#viewer-certificate-arguments) for this distribution (maximum one). +* `web_acl_id` (Optional) - Unique identifier that specifies the AWS WAF web ACL, if any, to associate with this distribution. To specify a web ACL created using the latest version of AWS WAF (WAFv2), use the ACL ARN, for example `aws_wafv2_web_acl.example.arn`. To specify a web ACL created using AWS WAF Classic, use the ACL ID, for example `aws_waf_web_acl.example.id`. The WAF Web ACL must exist in the WAF Global (CloudFront) region and the credentials configuring this argument must have `waf:GetWebACL` permissions assigned. +* `retain_on_delete` (Optional) - Disables the distribution instead of deleting it when destroying the resource through Terraform. If this is set, the distribution needs to be deleted manually afterwards. Default: `false`. +* `wait_for_deployment` (Optional) - If enabled, the resource will wait for the distribution status to change from `InProgress` to `Deployed`. Setting this to`false` will skip the process. Default: `true`. + +#### Cache Behavior Arguments + +~> **NOTE:** To achieve the setting of 'Use origin cache headers' without a linked cache policy, use the following TTL values: `min_ttl` = 0, `max_ttl` = 31536000, `default_ttl` = 86400. See [this issue](https://github.com/hashicorp/terraform-provider-aws/issues/19382) for additional context. + +* `allowed_methods` (Required) - Controls which HTTP methods CloudFront processes and forwards to your Amazon S3 bucket or your custom origin. +* `cached_methods` (Required) - Controls whether CloudFront caches the response to requests using the specified HTTP methods. +* `cache_policy_id` (Optional) - Unique identifier of the cache policy that is attached to the cache behavior. If configuring the `default_cache_behavior` either `cache_policy_id` or `forwarded_values` must be set. +* `compress` (Optional) - Whether you want CloudFront to automatically compress content for web requests that include `Accept-Encoding: gzip` in the request header (default: `false`). +* `default_ttl` (Optional) - Default amount of time (in seconds) that an object is in a CloudFront cache before CloudFront forwards another request in the absence of an `Cache-Control max-age` or `Expires` header. +* `field_level_encryption_id` (Optional) - Field level encryption configuration ID. +* `forwarded_values` (Optional, **Deprecated** use `cache_policy_id` or `origin_request_policy_id ` instead) - The [forwarded values configuration](#forwarded-values-arguments) that specifies how CloudFront handles query strings, cookies and headers (maximum one). +* `lambda_function_association` (Optional) - A [config block](#lambda-function-association) that triggers a lambda function with specific actions (maximum 4). +* `function_association` (Optional) - A [config block](#function-association) that triggers a cloudfront function with specific actions (maximum 2). +* `max_ttl` (Optional) - Maximum amount of time (in seconds) that an object is in a CloudFront cache before CloudFront forwards another request to your origin to determine whether the object has been updated. Only effective in the presence of `Cache-Control max-age`, `Cache-Control s-maxage`, and `Expires` headers. +* `min_ttl` (Optional) - Minimum amount of time that you want objects to stay in CloudFront caches before CloudFront queries your origin to see whether the object has been updated. Defaults to 0 seconds. +* `origin_request_policy_id` (Optional) - Unique identifier of the origin request policy that is attached to the behavior. +* `path_pattern` (Required) - Pattern (for example, `images/*.jpg`) that specifies which requests you want this cache behavior to apply to. +* `realtime_log_config_arn` (Optional) - ARN of the [real-time log configuration](cloudfront_realtime_log_config.html) that is attached to this cache behavior. +* `response_headers_policy_id` (Optional) - Identifier for a response headers policy. +* `smooth_streaming` (Optional) - Indicates whether you want to distribute media files in Microsoft Smooth Streaming format using the origin that is associated with this cache behavior. +* `target_origin_id` (Required) - Value of ID for the origin that you want CloudFront to route requests to when a request matches the path pattern either for a cache behavior or for the default cache behavior. +* `trusted_key_groups` (Optional) - List of key group IDs that CloudFront can use to validate signed URLs or signed cookies. See the [CloudFront User Guide](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-trusted-signers.html) for more information about this feature. +* `trusted_signers` (Optional) - List of AWS account IDs (or `self`) that you want to allow to create signed URLs for private content. See the [CloudFront User Guide](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-trusted-signers.html) for more information about this feature. +* `viewer_protocol_policy` (Required) - Use this element to specify the protocol that users can use to access the files in the origin specified by TargetOriginId when a request matches the path pattern in PathPattern. One of `allow-all`, `https-only`, or `redirect-to-https`. + +##### Forwarded Values Arguments + +* `cookies` (Required) - The [forwarded values cookies](#cookies-arguments) that specifies how CloudFront handles cookies (maximum one). +* `headers` (Optional) - Headers, if any, that you want CloudFront to vary upon for this cache behavior. Specify `*` to include all headers. +* `query_string` (Required) - Indicates whether you want CloudFront to forward query strings to the origin that is associated with this cache behavior. +* `query_string_cache_keys` (Optional) - When specified, along with a value of `true` for `query_string`, all query strings are forwarded, however only the query string keys listed in this argument are cached. When omitted with a value of `true` for `query_string`, all query string keys are cached. + +##### Lambda Function Association + +Lambda@Edge allows you to associate an AWS Lambda Function with a predefined +event. You can associate a single function per event type. See [What is +Lambda@Edge](http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/what-is-lambda-at-edge.html) +for more information. + +Example configuration: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_distribution import CloudfrontDistribution +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, allowedMethods, cachedMethods, pathPattern, targetOriginId, viewerProtocolPolicy, defaultCacheBehavior, enabled, origin, restrictions, viewerCertificate): + super().__init__(scope, name) + CloudfrontDistribution(self, "example", + ordered_cache_behavior=[CloudfrontDistributionOrderedCacheBehavior( + lambda_function_association=[CloudfrontDistributionOrderedCacheBehaviorLambdaFunctionAssociation( + event_type="viewer-request", + include_body=False, + lambda_arn=Token.as_string(aws_lambda_function_example.qualified_arn) + ) + ], + allowed_methods=allowed_methods, + cached_methods=cached_methods, + path_pattern=path_pattern, + target_origin_id=target_origin_id, + viewer_protocol_policy=viewer_protocol_policy + ) + ], + default_cache_behavior=default_cache_behavior, + enabled=enabled, + origin=origin, + restrictions=restrictions, + viewer_certificate=viewer_certificate + ) +``` + +* `event_type` (Required) - Specific event to trigger this function. Valid values: `viewer-request`, `origin-request`, `viewer-response`, `origin-response`. +* `lambda_arn` (Required) - ARN of the Lambda function. +* `include_body` (Optional) - When set to true it exposes the request body to the lambda function. Defaults to false. Valid values: `true`, `false`. + +##### Function Association + +With CloudFront Functions in Amazon CloudFront, you can write lightweight functions in JavaScript for high-scale, latency-sensitive CDN customizations. You can associate a single function per event type. See [CloudFront Functions](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/cloudfront-functions.html) +for more information. + +Example configuration: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_distribution import CloudfrontDistribution +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, allowedMethods, cachedMethods, pathPattern, targetOriginId, viewerProtocolPolicy, defaultCacheBehavior, enabled, origin, restrictions, viewerCertificate): + super().__init__(scope, name) + CloudfrontDistribution(self, "example", + ordered_cache_behavior=[CloudfrontDistributionOrderedCacheBehavior( + function_association=[CloudfrontDistributionOrderedCacheBehaviorFunctionAssociation( + event_type="viewer-request", + function_arn=Token.as_string(aws_cloudfront_function_example.arn) + ) + ], + allowed_methods=allowed_methods, + cached_methods=cached_methods, + path_pattern=path_pattern, + target_origin_id=target_origin_id, + viewer_protocol_policy=viewer_protocol_policy + ) + ], + default_cache_behavior=default_cache_behavior, + enabled=enabled, + origin=origin, + restrictions=restrictions, + viewer_certificate=viewer_certificate + ) +``` + +* `event_type` (Required) - Specific event to trigger this function. Valid values: `viewer-request` or `viewer-response`. +* `function_arn` (Required) - ARN of the CloudFront function. + +##### Cookies Arguments + +* `forward` (Required) - Whether you want CloudFront to forward cookies to the origin that is associated with this cache behavior. You can specify `all`, `none` or `whitelist`. If `whitelist`, you must include the subsequent `whitelisted_names`. +* `whitelisted_names` (Optional) - If you have specified `whitelist` to `forward`, the whitelisted cookies that you want CloudFront to forward to your origin. + +#### Custom Error Response Arguments + +* `error_caching_min_ttl` (Optional) - Minimum amount of time you want HTTP error codes to stay in CloudFront caches before CloudFront queries your origin to see whether the object has been updated. +* `error_code` (Required) - 4xx or 5xx HTTP status code that you want to customize. +* `response_code` (Optional) - HTTP status code that you want CloudFront to return with the custom error page to the viewer. +* `response_page_path` (Optional) - Path of the custom error page (for example, `/custom_404.html`). + +#### Default Cache Behavior Arguments + +The arguments for `default_cache_behavior` are the same as for +[`ordered_cache_behavior`](#cache-behavior-arguments), except for the `path_pattern` +argument should not be specified. + +#### Logging Config Arguments + +* `bucket` (Required) - Amazon S3 bucket to store the access logs in, for example, `myawslogbucket.s3.amazonaws.com`. +* `include_cookies` (Optional) - Whether to include cookies in access logs (default: `false`). +* `prefix` (Optional) - Prefix to the access log filenames for this distribution, for example, `myprefix/`. + +#### Origin Arguments + +* `connection_attempts` (Optional) - Number of times that CloudFront attempts to connect to the origin. Must be between 1-3. Defaults to 3. +* `connection_timeout` (Optional) - Number of seconds that CloudFront waits when trying to establish a connection to the origin. Must be between 1-10. Defaults to 10. +* `custom_origin_config` - The [CloudFront custom origin](#custom-origin-config-arguments) configuration information. If an S3 origin is required, use `origin_access_control_id` or `s3_origin_config` instead. +* `domain_name` (Required) - DNS domain name of either the S3 bucket, or web site of your custom origin. +* `custom_header` (Optional) - One or more sub-resources with `name` and `value` parameters that specify header data that will be sent to the origin (multiples allowed). +* `origin_access_control_id` (Optional) - Unique identifier of a [CloudFront origin access control][8] for this origin. +* `origin_id` (Required) - Unique identifier for the origin. +* `origin_path` (Optional) - Optional element that causes CloudFront to request your content from a directory in your Amazon S3 bucket or your custom origin. +* `origin_shield` - The [CloudFront Origin Shield](#origin-shield-arguments) configuration information. Using Origin Shield can help reduce the load on your origin. For more information, see [Using Origin Shield](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/origin-shield.html) in the Amazon CloudFront Developer Guide. +* `s3_origin_config` - The [CloudFront S3 origin](#s3-origin-config-arguments) configuration information. If a custom origin is required, use `custom_origin_config` instead. + +##### Custom Origin Config Arguments + +* `http_port` (Required) - HTTP port the custom origin listens on. +* `https_port` (Required) - HTTPS port the custom origin listens on. +* `origin_protocol_policy` (Required) - Origin protocol policy to apply to your origin. One of `http-only`, `https-only`, or `match-viewer`. +* `origin_ssl_protocols` (Required) - SSL/TLS protocols that you want CloudFront to use when communicating with your origin over HTTPS. A list of one or more of `SSLv3`, `TLSv1`, `TLSv1.1`, and `TLSv1.2`. +* `origin_keepalive_timeout` - (Optional) The Custom KeepAlive timeout, in seconds. By default, AWS enforces an upper limit of `60`. But you can request an [increase](http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/RequestAndResponseBehaviorCustomOrigin.html#request-custom-request-timeout). Defaults to `5`. +* `origin_read_timeout` - (Optional) The Custom Read timeout, in seconds. By default, AWS enforces an upper limit of `60`. But you can request an [increase](http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/RequestAndResponseBehaviorCustomOrigin.html#request-custom-request-timeout). Defaults to `30`. + +##### Origin Shield Arguments + +* `enabled` (Required) - Whether Origin Shield is enabled. +* `origin_shield_region` (Required) - AWS Region for Origin Shield. To specify a region, use the region code, not the region name. For example, specify the US East (Ohio) region as us-east-2. + +##### S3 Origin Config Arguments + +* `origin_access_identity` (Required) - The [CloudFront origin access identity][5] to associate with the origin. + +#### Origin Group Arguments + +* `origin_id` (Required) - Unique identifier for the origin group. +* `failover_criteria` (Required) - The [failover criteria](#failover-criteria-arguments) for when to failover to the secondary origin. +* `member` (Required) - Ordered [member](#member-arguments) configuration blocks assigned to the origin group, where the first member is the primary origin. You must specify two members. + +##### Failover Criteria Arguments + +* `status_codes` (Required) - List of HTTP status codes for the origin group. + +##### Member Arguments + +* `origin_id` (Required) - Unique identifier of the member origin. + +#### Restrictions Arguments + +The `restrictions` sub-resource takes another single sub-resource named `geo_restriction` (see the example for usage). + +The arguments of `geo_restriction` are: + +* `locations` (Required) - [ISO 3166-1-alpha-2 codes][4] for which you want CloudFront either to distribute your content (`whitelist`) or not distribute your content (`blacklist`). If the type is specified as `none` an empty array can be used. +* `restriction_type` (Required) - Method that you want to use to restrict distribution of your content by country: `none`, `whitelist`, or `blacklist`. + +#### Viewer Certificate Arguments + +* `acm_certificate_arn` - ARN of the [AWS Certificate Manager][6] certificate that you wish to use with this distribution. Specify this, `cloudfront_default_certificate`, or `iam_certificate_id`. The ACM certificate must be in US-EAST-1. +* `cloudfront_default_certificate` - `true` if you want viewers to use HTTPS to request your objects and you're using the CloudFront domain name for your distribution. Specify this, `acm_certificate_arn`, or `iam_certificate_id`. +* `iam_certificate_id` - IAM certificate identifier of the custom viewer certificate for this distribution if you are using a custom domain. Specify this, `acm_certificate_arn`, or `cloudfront_default_certificate`. +* `minimum_protocol_version` - Minimum version of the SSL protocol that you want CloudFront to use for HTTPS connections. Can only be set if `cloudfront_default_certificate = false`. See all possible values in [this](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/secure-connections-supported-viewer-protocols-ciphers.html) table under "Security policy." Some examples include: `TLSv1.2_2019` and `TLSv1.2_2021`. Default: `TLSv1`. **NOTE**: If you are using a custom certificate (specified with `acm_certificate_arn` or `iam_certificate_id`), and have specified `sni-only` in `ssl_support_method`, `TLSv1` or later must be specified. If you have specified `vip` in `ssl_support_method`, only `SSLv3` or `TLSv1` can be specified. If you have specified `cloudfront_default_certificate`, `TLSv1` must be specified. +* `ssl_support_method` - How you want CloudFront to serve HTTPS requests. One of `vip` or `sni-only`. Required if you specify `acm_certificate_arn` or `iam_certificate_id`. **NOTE:** `vip` causes CloudFront to use a dedicated IP address and may incur extra charges. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier for the distribution. For example: `EDFDVBD632BHDS5`. +* `arn` - ARN for the distribution. For example: `arn:aws:cloudfront::123456789012:distribution/EDFDVBD632BHDS5`, where `123456789012` is your AWS account ID. +* `caller_reference` - Internal value used by CloudFront to allow future updates to the distribution configuration. +* `status` - Current status of the distribution. `Deployed` if the distribution's information is fully propagated throughout the Amazon CloudFront system. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `trusted_key_groups` - List of nested attributes for active trusted key groups, if the distribution is set up to serve private content with signed URLs. + * `enabled` - `true` if any of the key groups have public keys that CloudFront can use to verify the signatures of signed URLs and signed cookies. + * `items` - List of nested attributes for each key group. + * `key_group_id` - ID of the key group that contains the public keys. + * `key_pair_ids` - Set of CloudFront key pair IDs. +* `trusted_signers` - List of nested attributes for active trusted signers, if the distribution is set up to serve private content with signed URLs. + * `enabled` - `true` if any of the AWS accounts listed as trusted signers have active CloudFront key pairs + * `items` - List of nested attributes for each trusted signer + * `aws_account_number` - AWS account ID or `self` + * `key_pair_ids` - Set of active CloudFront key pairs associated with the signer account +* `domain_name` - Domain name corresponding to the distribution. For example: `d604721fxaaqy9.cloudfront.net`. +* `last_modified_time` - Date and time the distribution was last modified. +* `in_progress_validation_batches` - Number of invalidation batches currently in progress. +* `etag` - Current version of the distribution's information. For example: `E2QWRUHAPOMQZL`. +* `hosted_zone_id` - CloudFront Route 53 zone ID that can be used to route an [Alias Resource Record Set][7] to. This attribute is simply an alias for the zone ID `Z2FDTNDATAQYW2`. + +[1]: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/Introduction.html +[2]: https://docs.aws.amazon.com/cloudfront/latest/APIReference/API_CreateDistribution.html +[3]: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-restricting-access-to-s3.html +[4]: http://www.iso.org/iso/country_codes/iso_3166_code_lists/country_names_and_code_elements.htm +[5]: /docs/providers/aws/r/cloudfront_origin_access_identity.html +[6]: https://aws.amazon.com/certificate-manager/ +[7]: http://docs.aws.amazon.com/Route53/latest/APIReference/CreateAliasRRSAPI.html +[8]: /docs/providers/aws/r/cloudfront_origin_access_control.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront Distributions using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudFront Distributions using the `id`. For example: + +```console +% terraform import aws_cloudfront_distribution.distribution E74FTE3EXAMPLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_field_level_encryption_config.html.markdown b/website/docs/cdktf/python/r/cloudfront_field_level_encryption_config.html.markdown new file mode 100644 index 00000000000..ef8614fc27a --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_field_level_encryption_config.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_field_level_encryption_config" +description: |- + Provides a CloudFront Field-level Encryption Config resource. +--- + + + +# Resource: aws_cloudfront_field_level_encryption_config + +Provides a CloudFront Field-level Encryption Config resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_field_level_encryption_config import CloudfrontFieldLevelEncryptionConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudfrontFieldLevelEncryptionConfig(self, "test", + comment="test comment", + content_type_profile_config=CloudfrontFieldLevelEncryptionConfigContentTypeProfileConfig( + content_type_profiles=CloudfrontFieldLevelEncryptionConfigContentTypeProfileConfigContentTypeProfiles( + items=[CloudfrontFieldLevelEncryptionConfigContentTypeProfileConfigContentTypeProfilesItems( + content_type="application/x-www-form-urlencoded", + format="URLEncoded" + ) + ] + ), + forward_when_content_type_is_unknown=True + ), + query_arg_profile_config=CloudfrontFieldLevelEncryptionConfigQueryArgProfileConfig( + forward_when_query_arg_profile_is_unknown=True, + query_arg_profiles=CloudfrontFieldLevelEncryptionConfigQueryArgProfileConfigQueryArgProfiles( + items=[CloudfrontFieldLevelEncryptionConfigQueryArgProfileConfigQueryArgProfilesItems( + profile_id=Token.as_string(aws_cloudfront_field_level_encryption_profile_test.id), + query_arg="Arg1" + ) + ] + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `comment` - (Optional) An optional comment about the Field Level Encryption Config. +* `content_type_profile_config` - (Required) [Content Type Profile Config](#content-type-profile-config) specifies when to forward content if a content type isn't recognized and profiles to use as by default in a request if a query argument doesn't specify a profile to use. +* `query_arg_profile_config` - (Required) [Query Arg Profile Config](#query-arg-profile-config) that specifies when to forward content if a profile isn't found and the profile that can be provided as a query argument in a request. + +### Content Type Profile Config + +* `forward_when_content_type_is_unknown` - (Required) specifies what to do when an unknown content type is provided for the profile. If true, content is forwarded without being encrypted when the content type is unknown. If false (the default), an error is returned when the content type is unknown. +* `content_type_profiles` - (Required) Object that contains an attribute `items` that contains the list of configurations for a field-level encryption content type-profile. See [Content Type Profile](#content-type-profile). + +### Content Type Profile + +* `content_type` - (Required) he content type for a field-level encryption content type-profile mapping. Valid value is `application/x-www-form-urlencoded`. +* `format` - (Required) The format for a field-level encryption content type-profile mapping. Valid value is `URLEncoded`. +* `profile_id` - (Optional) The profile ID for a field-level encryption content type-profile mapping. + +### Query Arg Profile Config + +* `forward_when_query_arg_profile_is_unknown` - (Required) Flag to set if you want a request to be forwarded to the origin even if the profile specified by the field-level encryption query argument, fle-profile, is unknown. +* `query_arg_profiles` - (Optional) Object that contains an attribute `items` that contains the list ofrofiles specified for query argument-profile mapping for field-level encryption. see [Query Arg Profile](#query-arg-profile). + +### Query Arg Profile + +* `profile_id` - (Required) ID of profile to use for field-level encryption query argument-profile mapping +* `query_arg` - (Required) Query argument for field-level encryption query argument-profile mapping. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `caller_reference` - Internal value used by CloudFront to allow future updates to the Field Level Encryption Config. +* `etag` - The current version of the Field Level Encryption Config. For example: `E2QWRUHAPOMQZL`. +* `id` - The identifier for the Field Level Encryption Config. For example: `K3D5EWEUDCCXON`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudfront Field Level Encryption Config using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloudfront Field Level Encryption Config using the `id`. For example: + +```console +% terraform import aws_cloudfront_field_level_encryption_config.config E74FTE3AEXAMPLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_field_level_encryption_profile.html.markdown b/website/docs/cdktf/python/r/cloudfront_field_level_encryption_profile.html.markdown new file mode 100644 index 00000000000..c3161a9a2f6 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_field_level_encryption_profile.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_field_level_encryption_profile" +description: |- + Provides a CloudFront Field-level Encryption Profile resource. +--- + + + +# Resource: aws_cloudfront_field_level_encryption_profile + +Provides a CloudFront Field-level Encryption Profile resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_field_level_encryption_profile import CloudfrontFieldLevelEncryptionProfile +from imports.aws.cloudfront_public_key import CloudfrontPublicKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudfrontPublicKey(self, "example", + comment="test public key", + encoded_key=Token.as_string(Fn.file("public_key.pem")), + name="test_key" + ) + CloudfrontFieldLevelEncryptionProfile(self, "test", + comment="test comment", + encryption_entities=CloudfrontFieldLevelEncryptionProfileEncryptionEntities( + items=[CloudfrontFieldLevelEncryptionProfileEncryptionEntitiesItems( + field_patterns=CloudfrontFieldLevelEncryptionProfileEncryptionEntitiesItemsFieldPatterns( + items=["DateOfBirth"] + ), + provider_id="test provider", + public_key_id=example.id + ) + ] + ), + name="test profile" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Field Level Encryption Profile. +* `comment` - (Optional) An optional comment about the Field Level Encryption Profile. +* `encryption_entities` - (Required) The [encryption entities](#encryption-entities) config block for field-level encryption profiles that contains an attribute `items` which includes the encryption key and field pattern specifications. + +### Encryption Entities + +* `public_key_id` - (Required) The public key associated with a set of field-level encryption patterns, to be used when encrypting the fields that match the patterns. +* `provider_id` - (Required) The provider associated with the public key being used for encryption. +* `field_patterns` - (Required) Object that contains an attribute `items` that contains the list of field patterns in a field-level encryption content type profile specify the fields that you want to be encrypted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `caller_reference` - Internal value used by CloudFront to allow future updates to the Field Level Encryption Profile. +* `etag` - The current version of the Field Level Encryption Profile. For example: `E2QWRUHAPOMQZL`. +* `id` - The identifier for the Field Level Encryption Profile. For example: `K3D5EWEUDCCXON`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudfront Field Level Encryption Profile using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloudfront Field Level Encryption Profile using the `id`. For example: + +```console +% terraform import aws_cloudfront_field_level_encryption_profile.profile K3D5EWEUDCCXON +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_function.html.markdown b/website/docs/cdktf/python/r/cloudfront_function.html.markdown new file mode 100644 index 00000000000..ceba31a3c88 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_function.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_function" +description: |- + Provides a CloudFront Function resource. With CloudFront Functions in Amazon CloudFront, you can write lightweight functions in JavaScript for high-scale, latency-sensitive CDN customizations. +--- + + + +# Resource: aws_cloudfront_function + +Provides a CloudFront Function resource. With CloudFront Functions in Amazon CloudFront, you can write lightweight functions in JavaScript for high-scale, latency-sensitive CDN customizations. + +See [CloudFront Functions](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/cloudfront-functions.html) + +~> **NOTE:** You cannot delete a function if it’s associated with a cache behavior. First, update your distributions to remove the function association from all cache behaviors, then delete the function. + +## Example Usage + +### Basic Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_function import CloudfrontFunction +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudfrontFunction(self, "test", + code=Token.as_string(Fn.file("${path.module}/function.js")), + comment="my function", + name="test", + publish=True, + runtime="cloudfront-js-1.0" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Unique name for your CloudFront Function. +* `code` - (Required) Source code of the function +* `runtime` - (Required) Identifier of the function's runtime. Currently only `cloudfront-js-1.0` is valid. + +The following arguments are optional: + +* `comment` - (Optional) Comment. +* `publish` - (Optional) Whether to publish creation/change as Live CloudFront Function Version. Defaults to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) identifying your CloudFront Function. +* `etag` - ETag hash of the function. This is the value for the `DEVELOPMENT` stage of the function. +* `live_stage_etag` - ETag hash of any `LIVE` stage of the function. +* `status` - Status of the function. Can be `UNPUBLISHED`, `UNASSOCIATED` or `ASSOCIATED`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront Functions using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudFront Functions using the `name`. For example: + +```console +% terraform import aws_cloudfront_function.test my_test_function +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_key_group.html.markdown b/website/docs/cdktf/python/r/cloudfront_key_group.html.markdown new file mode 100644 index 00000000000..168a4937f2b --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_key_group.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_key_group" +description: |- + Provides a CloudFront key group. +--- + + + +# Resource: aws_cloudfront_key_group + +## Example Usage + +The following example below creates a CloudFront key group. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_key_group import CloudfrontKeyGroup +from imports.aws.cloudfront_public_key import CloudfrontPublicKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudfrontPublicKey(self, "example", + comment="example public key", + encoded_key=Token.as_string(Fn.file("public_key.pem")), + name="example-key" + ) + aws_cloudfront_key_group_example = CloudfrontKeyGroup(self, "example_1", + comment="example key group", + items=[example.id], + name="example-key-group" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudfront_key_group_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `comment` - (Optional) A comment to describe the key group.. +* `items` - (Required) A list of the identifiers of the public keys in the key group. +* `name` - (Required) A name to identify the key group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - The identifier for this version of the key group. +* `id` - The identifier for the key group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront Key Group using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudFront Key Group using the `id`. For example: + +```console +% terraform import aws_cloudfront_key_group.example 4b4f2r1c-315d-5c2e-f093-216t50jed10f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_monitoring_subscription.html.markdown b/website/docs/cdktf/python/r/cloudfront_monitoring_subscription.html.markdown new file mode 100644 index 00000000000..4a4a68ee851 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_monitoring_subscription.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_monitoring_subscription" +description: |- + Provides a CloudFront monitoring subscription resource. +--- + + + +# Resource: aws_cloudfront_monitoring_subscription + +Provides a CloudFront real-time log configuration resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_monitoring_subscription import CloudfrontMonitoringSubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudfrontMonitoringSubscription(self, "example", + distribution_id=Token.as_string(aws_cloudfront_distribution_example.id), + monitoring_subscription=CloudfrontMonitoringSubscriptionMonitoringSubscription( + realtime_metrics_subscription_config=CloudfrontMonitoringSubscriptionMonitoringSubscriptionRealtimeMetricsSubscriptionConfig( + realtime_metrics_subscription_status="Enabled" + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `distribution_id` - (Required) The ID of the distribution that you are enabling metrics for. +* `monitoring_subscription` - (Required) A monitoring subscription. This structure contains information about whether additional CloudWatch metrics are enabled for a given CloudFront distribution. + +### monitoring_subscription + +* `realtime_metrics_subscription_config` - (Required) A subscription configuration for additional CloudWatch metrics. See below. + +### realtime_metrics_subscription_config + +* `realtime_metrics_subscription_status` - (Required) A flag that indicates whether additional CloudWatch metrics are enabled for a given CloudFront distribution. Valid values are `Enabled` and `Disabled`. See below. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the CloudFront monitoring subscription, which corresponds to the `distribution_id`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront monitoring subscription using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudFront monitoring subscription using the id. For example: + +```console +% terraform import aws_cloudfront_monitoring_subscription.example E3QYSUHO4VYRGB +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_origin_access_control.html.markdown b/website/docs/cdktf/python/r/cloudfront_origin_access_control.html.markdown new file mode 100644 index 00000000000..0c8b30d09e2 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_origin_access_control.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_access_control" +description: |- + Terraform resource for managing an AWS CloudFront Origin Access Control. +--- + + + +# Resource: aws_cloudfront_origin_access_control + +Manages an AWS CloudFront Origin Access Control, which is used by CloudFront Distributions with an Amazon S3 bucket as the origin. + +Read more about Origin Access Control in the [CloudFront Developer Guide](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-restricting-access-to-s3.html). + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_origin_access_control import CloudfrontOriginAccessControl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudfrontOriginAccessControl(self, "example", + description="Example Policy", + name="example", + origin_access_control_origin_type="s3", + signing_behavior="always", + signing_protocol="sigv4" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) A name that identifies the Origin Access Control. +* `description` - (Optional) The description of the Origin Access Control. Defaults to "Managed by Terraform" if omitted. +* `origin_access_control_origin_type` - (Required) The type of origin that this Origin Access Control is for. Valid values are `s3`, and `mediastore`. +* `signing_behavior` - (Required) Specifies which requests CloudFront signs. Specify `always` for the most common use case. Allowed values: `always`, `never`, and `no-override`. +* `signing_protocol` - (Required) Determines how CloudFront signs (authenticates) requests. The only valid value is `sigv4`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier of this Origin Access Control. +* `etag` - The current version of this Origin Access Control. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront Origin Access Control using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudFront Origin Access Control using the `id`. For example: + +```console +% terraform import aws_cloudfront_origin_access_control.example E327GJI25M56DG +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_origin_access_identity.html.markdown b/website/docs/cdktf/python/r/cloudfront_origin_access_identity.html.markdown new file mode 100644 index 00000000000..b0b18ff1c28 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_origin_access_identity.html.markdown @@ -0,0 +1,161 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_access_identity" +description: |- + Provides a CloudFront origin access identity. +--- + + + +# Resource: aws_cloudfront_origin_access_identity + +Creates an Amazon CloudFront origin access identity. + +For information about CloudFront distributions, see the +[Amazon CloudFront Developer Guide][1]. For more information on generating +origin access identities, see +[Using an Origin Access Identity to Restrict Access to Your Amazon S3 Content][2]. + +## Example Usage + +The following example below creates a CloudFront origin access identity. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_origin_access_identity import CloudfrontOriginAccessIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudfrontOriginAccessIdentity(self, "example", + comment="Some comment" + ) +``` + +## Argument Reference + +* `comment` (Optional) - An optional comment for the origin access identity. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier for the distribution. For example: `EDFDVBD632BHDS5`. +* `caller_reference` - Internal value used by CloudFront to allow future + updates to the origin access identity. +* `cloudfront_access_identity_path` - A shortcut to the full path for the + origin access identity to use in CloudFront, see below. +* `etag` - The current version of the origin access identity's information. + For example: `E2QWRUHAPOMQZL`. +* `iam_arn` - A pre-generated ARN for use in S3 bucket policies (see below). + Example: `arn:aws:iam::cloudfront:user/CloudFront Origin Access Identity + E2QWRUHAPOMQZL`. +* `s3_canonical_user_id` - The Amazon S3 canonical user ID for the origin + access identity, which you use when giving the origin access identity read + permission to an object in Amazon S3. + +## Using With CloudFront + +Normally, when referencing an origin access identity in CloudFront, you need to +prefix the ID with the `origin-access-identity/cloudfront/` special path. +The `cloudfront_access_identity_path` allows this to be circumvented. +The below snippet demonstrates use with the `s3_origin_config` structure for the +[`aws_cloudfront_distribution`][3] resource: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_distribution import CloudfrontDistribution +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, domainName, originId, defaultCacheBehavior, enabled, restrictions, viewerCertificate): + super().__init__(scope, name) + CloudfrontDistribution(self, "example", + origin=[CloudfrontDistributionOrigin( + s3_origin_config=CloudfrontDistributionOriginS3OriginConfig( + origin_access_identity=Token.as_string(aws_cloudfront_origin_access_identity_example.cloudfront_access_identity_path) + ), + domain_name=domain_name, + origin_id=origin_id + ) + ], + default_cache_behavior=default_cache_behavior, + enabled=enabled, + restrictions=restrictions, + viewer_certificate=viewer_certificate + ) +``` + +### Updating your bucket policy + +Note that the AWS API may translate the `s3_canonical_user_id` `CanonicalUser` +principal into an `AWS` IAM ARN principal when supplied in an +[`aws_s3_bucket`][4] bucket policy, causing spurious diffs in Terraform. If +you see this behaviour, use the `iam_arn` instead: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.s3_bucket_policy import S3BucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + s3_policy = DataAwsIamPolicyDocument(self, "s3_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:GetObject"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[example.iam_arn], + type="AWS" + ) + ], + resources=["${" + aws_s3_bucket_example.arn + "}/*"] + ) + ] + ) + S3BucketPolicy(self, "example", + bucket=Token.as_string(aws_s3_bucket_example.id), + policy=Token.as_string(s3_policy.json) + ) +``` + +[1]: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/Introduction.html +[2]: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-restricting-access-to-s3.html +[3]: /docs/providers/aws/r/cloudfront_distribution.html +[4]: /docs/providers/aws/r/s3_bucket.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudfront Origin Access Identities using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloudfront Origin Access Identities using the `id`. For example: + +```console +% terraform import aws_cloudfront_origin_access_identity.origin_access E74FTE3AEXAMPLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_origin_request_policy.html.markdown b/website/docs/cdktf/python/r/cloudfront_origin_request_policy.html.markdown new file mode 100644 index 00000000000..8e0c9673993 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_origin_request_policy.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_request_policy" +description: |- + Determines the values that CloudFront includes in requests that it sends to the origin. +--- + + + +# Resource: aws_cloudfront_origin_request_policy + +## Example Usage + +The following example below creates a CloudFront origin request policy. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_origin_request_policy import CloudfrontOriginRequestPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudfrontOriginRequestPolicy(self, "example", + comment="example comment", + cookies_config=CloudfrontOriginRequestPolicyCookiesConfig( + cookie_behavior="whitelist", + cookies=CloudfrontOriginRequestPolicyCookiesConfigCookies( + items=["example"] + ) + ), + headers_config=CloudfrontOriginRequestPolicyHeadersConfig( + header_behavior="whitelist", + headers=CloudfrontOriginRequestPolicyHeadersConfigHeaders( + items=["example"] + ) + ), + name="example-policy", + query_strings_config=CloudfrontOriginRequestPolicyQueryStringsConfig( + query_string_behavior="whitelist", + query_strings=CloudfrontOriginRequestPolicyQueryStringsConfigQueryStrings( + items=["example"] + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Unique name to identify the origin request policy. +* `comment` - (Optional) Comment to describe the origin request policy. +* `cookies_config` - (Required) Object that determines whether any cookies in viewer requests (and if so, which cookies) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Cookies Config](#cookies-config) for more information. +* `headers_config` - (Required) Object that determines whether any HTTP headers (and if so, which headers) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Headers Config](#headers-config) for more information. +* `query_strings_config` - (Required) Object that determines whether any URL query strings in viewer requests (and if so, which query strings) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Query String Config](#query-string-config) for more information. + +### Cookies Config + +`cookie_behavior` - (Required) Determines whether any cookies in viewer requests are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `all`, `allExcept`. +`cookies` - (Optional) Object that contains a list of cookie names. See [Items](#items) for more information. + +### Headers Config + +`header_behavior` - (Required) Determines whether any HTTP headers are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `allViewer`, `allViewerAndWhitelistCloudFront`, `allExcept`. +`headers` - (Optional) Object that contains a list of header names. See [Items](#items) for more information. + +### Query String Config + +`query_string_behavior` - (Required) Determines whether any URL query strings in viewer requests are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `all`, `allExcept`. +`query_strings` - (Optional) Object that contains a list of query string names. See [Items](#items) for more information. + +### Items + +`items` - (Required) List of item names (cookies, headers, or query strings). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - The current version of the origin request policy. +* `id` - The identifier for the origin request policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudfront Origin Request Policies using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloudfront Origin Request Policies using the `id`. For example: + +```console +% terraform import aws_cloudfront_origin_request_policy.policy ccca32ef-dce3-4df3-80df-1bd3000bc4d3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_public_key.html.markdown b/website/docs/cdktf/python/r/cloudfront_public_key.html.markdown new file mode 100644 index 00000000000..de573d961b1 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_public_key.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_public_key" +description: |- + Provides a CloudFront Public Key which you add to CloudFront to use with features like field-level encryption. +--- + + + +# Resource: aws_cloudfront_public_key + +## Example Usage + +The following example below creates a CloudFront public key. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_public_key import CloudfrontPublicKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudfrontPublicKey(self, "example", + comment="test public key", + encoded_key=Token.as_string(Fn.file("public_key.pem")), + name="test_key" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `comment` - (Optional) An optional comment about the public key. +* `encoded_key` - (Required) The encoded public key that you want to add to CloudFront to use with features like field-level encryption. +* `name` - (Optional) The name for the public key. By default generated by Terraform. +* `name_prefix` - (Optional) The name for the public key. Conflicts with `name`. + +**NOTE:** When setting `encoded_key` value, there needs a newline at the end of string. Otherwise, multiple runs of terraform will want to recreate the `aws_cloudfront_public_key` resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `caller_reference` - Internal value used by CloudFront to allow future updates to the public key configuration. +* `etag` - The current version of the public key. For example: `E2QWRUHAPOMQZL`. +* `id` - The identifier for the public key. For example: `K3D5EWEUDCCXON`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront Public Key using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudFront Public Key using the `id`. For example: + +```console +% terraform import aws_cloudfront_public_key.example K3D5EWEUDCCXON +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_realtime_log_config.html.markdown b/website/docs/cdktf/python/r/cloudfront_realtime_log_config.html.markdown new file mode 100644 index 00000000000..539fd0fdd8f --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_realtime_log_config.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_realtime_log_config" +description: |- + Provides a CloudFront real-time log configuration resource. +--- + + + +# Resource: aws_cloudfront_realtime_log_config + +Provides a CloudFront real-time log configuration resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_realtime_log_config import CloudfrontRealtimeLogConfig +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["cloudfront.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + example = DataAwsIamPolicyDocument(self, "example", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["kinesis:DescribeStreamSummary", "kinesis:DescribeStream", "kinesis:PutRecord", "kinesis:PutRecords" + ], + effect="Allow", + resources=[Token.as_string(aws_kinesis_stream_example.arn)] + ) + ] + ) + aws_iam_role_example = IamRole(self, "example_2", + assume_role_policy=Token.as_string(assume_role.json), + name="cloudfront-realtime-log-config-example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_iam_role_policy_example = IamRolePolicy(self, "example_3", + name="cloudfront-realtime-log-config-example", + policy=Token.as_string(example.json), + role=Token.as_string(aws_iam_role_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_example.override_logical_id("example") + aws_cloudfront_realtime_log_config_example = + CloudfrontRealtimeLogConfig(self, "example_4", + depends_on=[aws_iam_role_policy_example], + endpoint=CloudfrontRealtimeLogConfigEndpoint( + kinesis_stream_config=CloudfrontRealtimeLogConfigEndpointKinesisStreamConfig( + role_arn=Token.as_string(aws_iam_role_example.arn), + stream_arn=Token.as_string(aws_kinesis_stream_example.arn) + ), + stream_type="Kinesis" + ), + fields=["timestamp", "c-ip"], + name="example", + sampling_rate=75 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudfront_realtime_log_config_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `endpoint` - (Required) The Amazon Kinesis data streams where real-time log data is sent. +* `fields` - (Required) The fields that are included in each real-time log record. See the [AWS documentation](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/real-time-logs.html#understand-real-time-log-config-fields) for supported values. +* `name` - (Required) The unique name to identify this real-time log configuration. +* `sampling_rate` - (Required) The sampling rate for this real-time log configuration. The sampling rate determines the percentage of viewer requests that are represented in the real-time log data. An integer between `1` and `100`, inclusive. + +The `endpoint` object supports the following: + +* `kinesis_stream_config` - (Required) The Amazon Kinesis data stream configuration. +* `stream_type` - (Required) The type of data stream where real-time log data is sent. The only valid value is `Kinesis`. + +The `kinesis_stream_config` object supports the following: + +* `role_arn` - (Required) The ARN of an [IAM role](iam_role.html) that CloudFront can use to send real-time log data to the Kinesis data stream. +See the [AWS documentation](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/real-time-logs.html#understand-real-time-log-config-iam-role) for more information. +* `stream_arn` - (Required) The ARN of the [Kinesis data stream](kinesis_stream.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the CloudFront real-time log configuration. +* `arn` - The ARN (Amazon Resource Name) of the CloudFront real-time log configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront real-time log configurations using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudFront real-time log configurations using the ARN. For example: + +```console +% terraform import aws_cloudfront_realtime_log_config.example arn:aws:cloudfront::111122223333:realtime-log-config/ExampleNameForRealtimeLogConfig +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudfront_response_headers_policy.html.markdown b/website/docs/cdktf/python/r/cloudfront_response_headers_policy.html.markdown new file mode 100644 index 00000000000..d4602e96dc0 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudfront_response_headers_policy.html.markdown @@ -0,0 +1,220 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_response_headers_policy" +description: |- + Provides a CloudFront response headers policy resource. +--- + + + +# Resource: aws_cloudfront_response_headers_policy + +Provides a CloudFront response headers policy resource. +A response headers policy contains information about a set of HTTP response headers and their values. +After you create a response headers policy, you can use its ID to attach it to one or more cache behaviors in a CloudFront distribution. +When it’s attached to a cache behavior, CloudFront adds the headers in the policy to every response that it sends for requests that match the cache behavior. + +## Example Usage + +The example below creates a CloudFront response headers policy. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_response_headers_policy import CloudfrontResponseHeadersPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudfrontResponseHeadersPolicy(self, "example", + comment="test comment", + cors_config=CloudfrontResponseHeadersPolicyCorsConfig( + access_control_allow_credentials=True, + access_control_allow_headers=CloudfrontResponseHeadersPolicyCorsConfigAccessControlAllowHeaders( + items=["test"] + ), + access_control_allow_methods=CloudfrontResponseHeadersPolicyCorsConfigAccessControlAllowMethods( + items=["GET"] + ), + access_control_allow_origins=CloudfrontResponseHeadersPolicyCorsConfigAccessControlAllowOrigins( + items=["test.example.comtest"] + ), + origin_override=True + ), + name="example-policy" + ) +``` + +The example below creates a CloudFront response headers policy with a custom headers config. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_response_headers_policy import CloudfrontResponseHeadersPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudfrontResponseHeadersPolicy(self, "example", + custom_headers_config=CloudfrontResponseHeadersPolicyCustomHeadersConfig( + items=[CloudfrontResponseHeadersPolicyCustomHeadersConfigItems( + header="X-Permitted-Cross-Domain-Policies", + override=True, + value="none" + ), CloudfrontResponseHeadersPolicyCustomHeadersConfigItems( + header="X-Test", + override=True, + value="none" + ) + ] + ), + name="example-headers-policy" + ) +``` + +The example below creates a CloudFront response headers policy with a custom headers config and server timing headers config. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudfront_response_headers_policy import CloudfrontResponseHeadersPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudfrontResponseHeadersPolicy(self, "example", + custom_headers_config=CloudfrontResponseHeadersPolicyCustomHeadersConfig( + items=[CloudfrontResponseHeadersPolicyCustomHeadersConfigItems( + header="X-Permitted-Cross-Domain-Policies", + override=True, + value="none" + ) + ] + ), + name="example-headers-policy", + server_timing_headers_config=CloudfrontResponseHeadersPolicyServerTimingHeadersConfig( + enabled=True, + sampling_rate=50 + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A unique name to identify the response headers policy. +* `comment` - (Optional) A comment to describe the response headers policy. The comment cannot be longer than 128 characters. +* `cors_config` - (Optional) A configuration for a set of HTTP response headers that are used for Cross-Origin Resource Sharing (CORS). See [Cors Config](#cors-config) for more information. +* `custom_headers_config` - (Optional) Object that contains an attribute `items` that contains a list of custom headers. See [Custom Header](#custom-header) for more information. +* `remove_headers_config` - (Optional) A configuration for a set of HTTP headers to remove from the HTTP response. Object that contains an attribute `items` that contains a list of headers. See [Remove Header](#remove-header) for more information. +* `security_headers_config` - (Optional) A configuration for a set of security-related HTTP response headers. See [Security Headers Config](#security-headers-config) for more information. +* `server_timing_headers_config` - (Optional) A configuration for enabling the Server-Timing header in HTTP responses sent from CloudFront. See [Server Timing Headers Config](#server-timing-headers-config) for more information. + +### Cors Config + +* `access_control_allow_credentials` - (Required) A Boolean value that CloudFront uses as the value for the `Access-Control-Allow-Credentials` HTTP response header. +* `access_control_allow_headers` - (Required) Object that contains an attribute `items` that contains a list of HTTP header names that CloudFront includes as values for the `Access-Control-Allow-Headers` HTTP response header. +* `access_control_allow_methods` - (Required) Object that contains an attribute `items` that contains a list of HTTP methods that CloudFront includes as values for the `Access-Control-Allow-Methods` HTTP response header. Valid values: `GET` | `POST` | `OPTIONS` | `PUT` | `DELETE` | `HEAD` | `ALL` +* `access_control_allow_origins` - (Required) Object that contains an attribute `items` that contains a list of origins that CloudFront can use as the value for the `Access-Control-Allow-Origin` HTTP response header. +* `access_control_expose_headers` - (Optional) Object that contains an attribute `items` that contains a list of HTTP headers that CloudFront includes as values for the `Access-Control-Expose-Headers` HTTP response header. +* `access_control_max_age_sec` - (Optional) A number that CloudFront uses as the value for the `Access-Control-Max-Age` HTTP response header. +* `origin_override` - (Required) A Boolean value that determines how CloudFront behaves for the HTTP response header. + +### Custom Header + +* `header` - (Required) The HTTP response header name. +* `override` - (Required) Whether CloudFront overrides a response header with the same name received from the origin with the header specifies here. +* `value` - (Required) The value for the HTTP response header. + +### Remove Header + +* `header` - (Required) The HTTP header name. + +### Security Headers Config + +* `content_security_policy` - (Optional) The policy directives and their values that CloudFront includes as values for the `Content-Security-Policy` HTTP response header. See [Content Security Policy](#content-security-policy) for more information. +* `content_type_options` - (Optional) Determines whether CloudFront includes the `X-Content-Type-Options` HTTP response header with its value set to `nosniff`. See [Content Type Options](#content-type-options) for more information. +* `frame_options` - (Optional) Determines whether CloudFront includes the `X-Frame-Options` HTTP response header and the header’s value. See [Frame Options](#frame-options) for more information. +* `referrer_policy` - (Optional) Determines whether CloudFront includes the `Referrer-Policy` HTTP response header and the header’s value. See [Referrer Policy](#referrer-policy) for more information. +* `strict_transport_security` - (Optional) Determines whether CloudFront includes the `Strict-Transport-Security` HTTP response header and the header’s value. See [Strict Transport Security](#strict-transport-security) for more information. +* `xss_protection` - (Optional) Determine whether CloudFront includes the `X-XSS-Protection` HTTP response header and the header’s value. See [XSS Protection](#xss-protection) for more information. + +### Content Security Policy + +* `content_security_policy` - (Required) The policy directives and their values that CloudFront includes as values for the `Content-Security-Policy` HTTP response header. +* `override` - (Required) Whether CloudFront overrides the `Content-Security-Policy` HTTP response header received from the origin with the one specified in this response headers policy. + +### Content Type Options + +* `override` - (Required) Whether CloudFront overrides the `X-Content-Type-Options` HTTP response header received from the origin with the one specified in this response headers policy. + +### Frame Options + +* `frame_option` - (Required) The value of the `X-Frame-Options` HTTP response header. Valid values: `DENY` | `SAMEORIGIN` +* `override` - (Required) Whether CloudFront overrides the `X-Frame-Options` HTTP response header received from the origin with the one specified in this response headers policy. + +### Referrer Policy + +* `referrer_policy` - (Required) The value of the `Referrer-Policy` HTTP response header. Valid Values: `no-referrer` | `no-referrer-when-downgrade` | `origin` | `origin-when-cross-origin` | `same-origin` | `strict-origin` | `strict-origin-when-cross-origin` | `unsafe-url` +* `override` - (Required) Whether CloudFront overrides the `Referrer-Policy` HTTP response header received from the origin with the one specified in this response headers policy. + +### Strict Transport Security + +* `access_control_max_age_sec` - (Required) A number that CloudFront uses as the value for the `max-age` directive in the `Strict-Transport-Security` HTTP response header. +* `include_subdomains` - (Optional) Whether CloudFront includes the `includeSubDomains` directive in the `Strict-Transport-Security` HTTP response header. +* `override` - (Required) Whether CloudFront overrides the `Strict-Transport-Security` HTTP response header received from the origin with the one specified in this response headers policy. +* `preload` - (Optional) Whether CloudFront includes the `preload` directive in the `Strict-Transport-Security` HTTP response header. + +### XSS Protection + +* `mode_block` - (Optional) Whether CloudFront includes the `mode=block` directive in the `X-XSS-Protection` header. +* `override` - (Required) Whether CloudFront overrides the `X-XSS-Protection` HTTP response header received from the origin with the one specified in this response headers policy. +* `protection` - (Required) A Boolean value that determines the value of the `X-XSS-Protection` HTTP response header. When this setting is `true`, the value of the `X-XSS-Protection` header is `1`. When this setting is `false`, the value of the `X-XSS-Protection` header is `0`. +* `report_uri` - (Optional) A reporting URI, which CloudFront uses as the value of the report directive in the `X-XSS-Protection` header. You cannot specify a `report_uri` when `mode_block` is `true`. + +### Server Timing Headers Config + +* `enabled` - (Required) A Whether CloudFront adds the `Server-Timing` header to HTTP responses that it sends in response to requests that match a cache behavior that's associated with this response headers policy. +* `sampling_rate` - (Required) A number 0–100 (inclusive) that specifies the percentage of responses that you want CloudFront to add the Server-Timing header to. Valid range: Minimum value of 0.0. Maximum value of 100.0. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - The current version of the response headers policy. +* `id` - The identifier for the response headers policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudfront Response Headers Policies using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloudfront Response Headers Policies using the `id`. For example: + +```console +% terraform import aws_cloudfront_response_headers_policy.policy 658327ea-f89d-4fab-a63d-7e88639e58f9 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudhsm_v2_cluster.html.markdown b/website/docs/cdktf/python/r/cloudhsm_v2_cluster.html.markdown new file mode 100644 index 00000000000..11c0bf0adfb --- /dev/null +++ b/website/docs/cdktf/python/r/cloudhsm_v2_cluster.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "CloudHSM" +layout: "aws" +page_title: "AWS: aws_cloudhsm_v2_cluster" +description: |- + Provides a CloudHSM v2 resource. +--- + + + +# Resource: aws_cloudhsm_v2_cluster + +Creates an Amazon CloudHSM v2 cluster. + +For information about CloudHSM v2, see the +[AWS CloudHSM User Guide][1] and the [Amazon +CloudHSM API Reference][2]. + +~> **NOTE:** A CloudHSM Cluster can take several minutes to set up. +Practically no single attribute can be updated, except for `tags`. +If you need to delete a cluster, you have to remove its HSM modules first. +To initialize cluster, you have to add an HSM instance to the cluster, then sign CSR and upload it. + +## Example Usage + +The following example below creates a CloudHSM cluster. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformCount, Fn, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudhsm_v2_cluster import CloudhsmV2Cluster +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.provider import AwsProvider +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + region=aws_region.string_value + ) + cloudhsm_v2_vpc = Vpc(self, "cloudhsm_v2_vpc", + cidr_block="10.0.0.0/16", + tags={ + "Name": "example-aws_cloudhsm_v2_cluster" + } + ) + available = DataAwsAvailabilityZones(self, "available") + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + cloudhsm_v2_subnets_count = TerraformCount.of(Token.as_number("2")) + cloudhsm_v2_subnets = Subnet(self, "cloudhsm_v2_subnets", + availability_zone=Token.as_string( + Fn.element(available.names, + Token.as_number(cloudhsm_v2_subnets_count.index))), + cidr_block=Token.as_string( + Fn.element(subnets.value, Token.as_number(cloudhsm_v2_subnets_count.index))), + map_public_ip_on_launch=False, + tags={ + "Name": "example-aws_cloudhsm_v2_cluster" + }, + vpc_id=cloudhsm_v2_vpc.id, + count=cloudhsm_v2_subnets_count + ) + CloudhsmV2Cluster(self, "cloudhsm_v2_cluster", + hsm_type="hsm1.medium", + subnet_ids=Token.as_list(property_access(cloudhsm_v2_subnets, ["*", "id"])), + tags={ + "Name": "example-aws_cloudhsm_v2_cluster" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `source_backup_identifier` - (Optional) ID of Cloud HSM v2 cluster backup to be restored. +* `hsm_type` - (Required) The type of HSM module in the cluster. Currently, only `hsm1.medium` is supported. +* `subnet_ids` - (Required) The IDs of subnets in which cluster will operate. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `cluster_id` - The id of the CloudHSM cluster. +* `cluster_state` - The state of the CloudHSM cluster. +* `vpc_id` - The id of the VPC that the CloudHSM cluster resides in. +* `security_group_id` - The ID of the security group associated with the CloudHSM cluster. +* `cluster_certificates` - The list of cluster certificates. + * `cluster_certificates.0.cluster_certificate` - The cluster certificate issued (signed) by the issuing certificate authority (CA) of the cluster's owner. + * `cluster_certificates.0.cluster_csr` - The certificate signing request (CSR). Available only in `UNINITIALIZED` state after an HSM instance is added to the cluster. + * `cluster_certificates.0.aws_hardware_certificate` - The HSM hardware certificate issued (signed) by AWS CloudHSM. + * `cluster_certificates.0.hsm_certificate` - The HSM certificate issued (signed) by the HSM hardware. + * `cluster_certificates.0.manufacturer_hardware_certificate` - The HSM hardware certificate issued (signed) by the hardware manufacturer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[1]: https://docs.aws.amazon.com/cloudhsm/latest/userguide/introduction.html +[2]: https://docs.aws.amazon.com/cloudhsm/latest/APIReference/Welcome.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudHSM v2 Clusters using the cluster `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudHSM v2 Clusters using the cluster `id`. For example: + +```console +% terraform import aws_cloudhsm_v2_cluster.test_cluster cluster-aeb282a201 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudhsm_v2_hsm.html.markdown b/website/docs/cdktf/python/r/cloudhsm_v2_hsm.html.markdown new file mode 100644 index 00000000000..35e655b0662 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudhsm_v2_hsm.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "CloudHSM" +layout: "aws" +page_title: "AWS: aws_cloudhsm_v2_hsm" +description: |- + Provides a CloudHSM v2 HSM module resource. +--- + + + +# Resource: aws_cloudhsm_v2_hsm + +Creates an HSM module in Amazon CloudHSM v2 cluster. + +## Example Usage + +The following example below creates an HSM module in CloudHSM cluster. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudhsm_v2_hsm import CloudhsmV2Hsm +from imports.aws.data_aws_cloudhsm_v2_cluster import DataAwsCloudhsmV2Cluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + cluster = DataAwsCloudhsmV2Cluster(self, "cluster", + cluster_id=cloudhsm_cluster_id.string_value + ) + CloudhsmV2Hsm(self, "cloudhsm_v2_hsm", + cluster_id=Token.as_string(cluster.cluster_id), + subnet_id=Token.as_string(property_access(cluster.subnet_ids, ["0"])) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +~> **NOTE:** Either `subnet_id` or `availability_zone` must be specified. + +* `cluster_id` - (Required) The ID of Cloud HSM v2 cluster to which HSM will be added. +* `subnet_id` - (Optional) The ID of subnet in which HSM module will be located. Conflicts with `availability_zone`. +* `availability_zone` - (Optional) The IDs of AZ in which HSM module will be located. Conflicts with `subnet_id`. +* `ip_address` - (Optional) The IP address of HSM module. Must be within the CIDR of selected subnet. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `hsm_id` - The id of the HSM module. +* `hsm_state` - The state of the HSM module. +* `hsm_eni_id` - The id of the ENI interface allocated for HSM module. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import HSM modules using their HSM ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import HSM modules using their HSM ID. For example: + +```console +% terraform import aws_cloudhsm_v2_hsm.bar hsm-quo8dahtaca +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudsearch_domain.html.markdown b/website/docs/cdktf/python/r/cloudsearch_domain.html.markdown new file mode 100644 index 00000000000..f811b51e505 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudsearch_domain.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "CloudSearch" +layout: "aws" +page_title: "AWS: aws_cloudsearch_domain" +description: |- + Provides an CloudSearch domain resource. +--- + + + +# Resource: aws_cloudsearch_domain + +Provides an CloudSearch domain resource. + +Terraform waits for the domain to become `Active` when applying a configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudsearch_domain import CloudsearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudsearchDomain(self, "example", + index_field=[CloudsearchDomainIndexField( + analysis_scheme="_en_default_", + highlight=False, + name="headline", + return=True, + search=True, + sort=True, + type="text" + ), CloudsearchDomainIndexField( + facet=True, + name="price", + return=True, + search=True, + sort=True, + source_fields="headline", + type="double" + ) + ], + name="example-domain", + scaling_parameters=CloudsearchDomainScalingParameters( + desired_instance_type="search.medium" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `endpoint_options` - (Optional) Domain endpoint options. Documented below. +* `index_field` - (Optional) The index fields for documents added to the domain. Documented below. +* `multi_az` - (Optional) Whether or not to maintain extra instances for the domain in a second Availability Zone to ensure high availability. +* `name` - (Required) The name of the CloudSearch domain. +* `scaling_parameters` - (Optional) Domain scaling parameters. Documented below. + +### endpoint_options + +This configuration block supports the following attributes: + +* `enforce_https` - (Optional) Enables or disables the requirement that all requests to the domain arrive over HTTPS. +* `tls_security_policy` - (Optional) The minimum required TLS version. See the [AWS documentation](https://docs.aws.amazon.com/cloudsearch/latest/developerguide/API_DomainEndpointOptions.html) for valid values. + +### scaling_parameters + +This configuration block supports the following attributes: + +* `desired_instance_type` - (Optional) The instance type that you want to preconfigure for your domain. See the [AWS documentation](https://docs.aws.amazon.com/cloudsearch/latest/developerguide/API_ScalingParameters.html) for valid values. +* `desired_partition_count` - (Optional) The number of partitions you want to preconfigure for your domain. Only valid when you select `search.2xlarge` as the instance type. +* `desired_replication_count` - (Optional) The number of replicas you want to preconfigure for each index partition. + +### index_field + +This configuration block supports the following attributes: + +* `name` - (Required) A unique name for the field. Field names must begin with a letter and be at least 3 and no more than 64 characters long. The allowed characters are: `a`-`z` (lower-case letters), `0`-`9`, and `_` (underscore). The name `score` is reserved and cannot be used as a field name. +* `type` - (Required) The field type. Valid values: `date`, `date-array`, `double`, `double-array`, `int`, `int-array`, `literal`, `literal-array`, `text`, `text-array`. +* `analysis_scheme` - (Optional) The analysis scheme you want to use for a `text` field. The analysis scheme specifies the language-specific text processing options that are used during indexing. +* `default_value` - (Optional) The default value for the field. This value is used when no value is specified for the field in the document data. +* `facet` - (Optional) You can get facet information by enabling this. +* `highlight` - (Optional) You can highlight information. +* `return` - (Optional) You can enable returning the value of all searchable fields. +* `search` - (Optional) You can set whether this index should be searchable or not. +* `sort` - (Optional) You can enable the property to be sortable. +* `source_fields` - (Optional) A comma-separated list of source fields to map to the field. Specifying a source field copies data from one field to another, enabling you to use the same source data in different ways by configuring different options for the fields. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The domain's ARN. +* `document_service_endpoint` - The service endpoint for updating documents in a search domain. +* `domain_id` - An internally generated unique identifier for the domain. +* `search_service_endpoint` - The service endpoint for requesting search results from a search domain. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudSearch Domains using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudSearch Domains using the `name`. For example: + +```console +% terraform import aws_cloudsearch_domain.example example-domain +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudsearch_domain_service_access_policy.html.markdown b/website/docs/cdktf/python/r/cloudsearch_domain_service_access_policy.html.markdown new file mode 100644 index 00000000000..b8a90a8254d --- /dev/null +++ b/website/docs/cdktf/python/r/cloudsearch_domain_service_access_policy.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "CloudSearch" +layout: "aws" +page_title: "AWS: aws_cloudsearch_domain_service_access_policy" +description: |- + Provides an CloudSearch domain service access policy resource. +--- + + + +# Resource: aws_cloudsearch_domain_service_access_policy + +Provides an CloudSearch domain service access policy resource. + +Terraform waits for the domain service access policy to become `Active` when applying a configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudsearch_domain import CloudsearchDomain +from imports.aws.cloudsearch_domain_service_access_policy import CloudsearchDomainServiceAccessPolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudsearchDomain(self, "example", + name="example-domain" + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_1", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["cloudsearch:search", "cloudsearch:document"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="IpAddress", + values=["192.0.2.0/32"], + variable="aws:SourceIp" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="*" + ) + ], + sid="search_only" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_cloudsearch_domain_service_access_policy_example = + CloudsearchDomainServiceAccessPolicy(self, "example_2", + access_policy=Token.as_string(data_aws_iam_policy_document_example.json), + domain_name=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudsearch_domain_service_access_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `access_policy` - (Required) The access rules you want to configure. These rules replace any existing rules. See the [AWS documentation](https://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-access.html) for details. +* `domain_name` - (Required) The CloudSearch domain name the policy applies to. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `update` - (Default `20m`) +* `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudSearch domain service access policies using the domain name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudSearch domain service access policies using the domain name. For example: + +```console +% terraform import aws_cloudsearch_domain_service_access_policy.example example-domain +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudtrail.html.markdown b/website/docs/cdktf/python/r/cloudtrail.html.markdown new file mode 100644 index 00000000000..4d762c86fb2 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudtrail.html.markdown @@ -0,0 +1,448 @@ +--- +subcategory: "CloudTrail" +layout: "aws" +page_title: "AWS: aws_cloudtrail" +description: |- + Provides a CloudTrail resource. +--- + + + +# Resource: aws_cloudtrail + +Provides a CloudTrail resource. + +-> **Tip:** For a multi-region trail, this resource must be in the home region of the trail. + +-> **Tip:** For an organization trail, this resource must be in the master account of the organization. + +## Example Usage + +### Basic + +Enable CloudTrail to capture all compatible management events in region. +For capturing events from services like IAM, `include_global_service_events` must be enabled. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudtrail import Cloudtrail +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_partition import DataAwsPartition +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_policy import S3BucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="tf-test-trail", + force_destroy=True + ) + current = DataAwsCallerIdentity(self, "current") + data_aws_partition_current = DataAwsPartition(self, "current_2") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_partition_current.override_logical_id("current") + data_aws_region_current = DataAwsRegion(self, "current_3") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + aws_cloudtrail_example = Cloudtrail(self, "example_4", + include_global_service_events=False, + name="example", + s3_bucket_name=example.id, + s3_key_prefix="prefix" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudtrail_example.override_logical_id("example") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_5", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:GetBucketAcl"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=["arn:${" + data_aws_partition_current.partition + "}:cloudtrail:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:trail/example" + ], + variable="aws:SourceArn" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["cloudtrail.amazonaws.com"], + type="Service" + ) + ], + resources=[example.arn], + sid="AWSCloudTrailAclCheck" + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:PutObject"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=["bucket-owner-full-control"], + variable="s3:x-amz-acl" + ), DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=["arn:${" + data_aws_partition_current.partition + "}:cloudtrail:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:trail/example" + ], + variable="aws:SourceArn" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["cloudtrail.amazonaws.com"], + type="Service" + ) + ], + resources=["${" + example.arn + "}/prefix/AWSLogs/${" + current.account_id + "}/*" + ], + sid="AWSCloudTrailWrite" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_s3_bucket_policy_example = S3BucketPolicy(self, "example_6", + bucket=example.id, + policy=Token.as_string(data_aws_iam_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_policy_example.override_logical_id("example") +``` + +### Data Event Logging + +CloudTrail can log [Data Events](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-data-events-with-cloudtrail.html) for certain services such as S3 objects and Lambda function invocations. Additional information about data event configuration can be found in the following links: + +* [CloudTrail API DataResource documentation](https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_DataResource.html) (for basic event selector). +* [CloudTrail API AdvancedFieldSelector documentation](https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_AdvancedFieldSelector.html) (for advanced event selector). + +#### Logging All Lambda Function Invocations By Using Basic Event Selectors + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudtrail import Cloudtrail +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, s3BucketName): + super().__init__(scope, name) + Cloudtrail(self, "example", + event_selector=[CloudtrailEventSelector( + data_resource=[CloudtrailEventSelectorDataResource( + type="AWS::Lambda::Function", + values=["arn:aws:lambda"] + ) + ], + include_management_events=True, + read_write_type="All" + ) + ], + name=name, + s3_bucket_name=s3_bucket_name + ) +``` + +#### Logging All S3 Object Events By Using Basic Event Selectors + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudtrail import Cloudtrail +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, s3BucketName): + super().__init__(scope, name) + Cloudtrail(self, "example", + event_selector=[CloudtrailEventSelector( + data_resource=[CloudtrailEventSelectorDataResource( + type="AWS::S3::Object", + values=["arn:aws:s3"] + ) + ], + include_management_events=True, + read_write_type="All" + ) + ], + name=name, + s3_bucket_name=s3_bucket_name + ) +``` + +#### Logging Individual S3 Bucket Events By Using Basic Event Selectors + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudtrail import Cloudtrail +from imports.aws.data_aws_s3_bucket import DataAwsS3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, s3BucketName): + super().__init__(scope, name) + important_bucket = DataAwsS3Bucket(self, "important-bucket", + bucket="important-bucket" + ) + Cloudtrail(self, "example", + event_selector=[CloudtrailEventSelector( + data_resource=[CloudtrailEventSelectorDataResource( + type="AWS::S3::Object", + values=["${" + important_bucket.arn + "}/"] + ) + ], + include_management_events=True, + read_write_type="All" + ) + ], + name=name, + s3_bucket_name=s3_bucket_name + ) +``` + +#### Logging All S3 Object Events Except For Two S3 Buckets By Using Advanced Event Selectors + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudtrail import Cloudtrail +from imports.aws.data_aws_s3_bucket import DataAwsS3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, s3BucketName): + super().__init__(scope, name) + not_important_bucket1 = DataAwsS3Bucket(self, "not-important-bucket-1", + bucket="not-important-bucket-1" + ) + not_important_bucket2 = DataAwsS3Bucket(self, "not-important-bucket-2", + bucket="not-important-bucket-2" + ) + Cloudtrail(self, "example", + advanced_event_selector=[CloudtrailAdvancedEventSelector( + field_selector=[CloudtrailAdvancedEventSelectorFieldSelector( + equal_to=["Data"], + field="eventCategory" + ), CloudtrailAdvancedEventSelectorFieldSelector( + field="resources.ARN", + not_starts_with=["${" + not_important_bucket1.arn + "}/", "${" + not_important_bucket2.arn + "}/" + ] + ), CloudtrailAdvancedEventSelectorFieldSelector( + equal_to=["AWS::S3::Object"], + field="resources.type" + ) + ], + name="Log all S3 objects events except for two S3 buckets" + ), CloudtrailAdvancedEventSelector( + field_selector=[CloudtrailAdvancedEventSelectorFieldSelector( + equal_to=["Management"], + field="eventCategory" + ) + ], + name="Log readOnly and writeOnly management events" + ) + ], + name=name, + s3_bucket_name=s3_bucket_name + ) +``` + +#### Logging Individual S3 Buckets And Specific Event Names By Using Advanced Event Selectors + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudtrail import Cloudtrail +from imports.aws.data_aws_s3_bucket import DataAwsS3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, s3BucketName): + super().__init__(scope, name) + important_bucket1 = DataAwsS3Bucket(self, "important-bucket-1", + bucket="important-bucket-1" + ) + important_bucket2 = DataAwsS3Bucket(self, "important-bucket-2", + bucket="important-bucket-2" + ) + important_bucket3 = DataAwsS3Bucket(self, "important-bucket-3", + bucket="important-bucket-3" + ) + Cloudtrail(self, "example", + advanced_event_selector=[CloudtrailAdvancedEventSelector( + field_selector=[CloudtrailAdvancedEventSelectorFieldSelector( + equal_to=["Data"], + field="eventCategory" + ), CloudtrailAdvancedEventSelectorFieldSelector( + equal_to=["PutObject", "DeleteObject"], + field="eventName" + ), CloudtrailAdvancedEventSelectorFieldSelector( + field="resources.ARN", + starts_with=["${" + important_bucket1.arn + "}/", "${" + important_bucket2.arn + "}/" + ] + ), CloudtrailAdvancedEventSelectorFieldSelector( + equal_to=["false"], + field="readOnly" + ), CloudtrailAdvancedEventSelectorFieldSelector( + equal_to=["AWS::S3::Object"], + field="resources.type" + ) + ], + name="Log PutObject and DeleteObject events for two S3 buckets" + ), CloudtrailAdvancedEventSelector( + field_selector=[CloudtrailAdvancedEventSelectorFieldSelector( + equal_to=["Data"], + field="eventCategory" + ), CloudtrailAdvancedEventSelectorFieldSelector( + field="eventName", + starts_with=["Delete"] + ), CloudtrailAdvancedEventSelectorFieldSelector( + equal_to=["${" + important_bucket3.arn + "}/important-prefix"], + field="resources.ARN" + ), CloudtrailAdvancedEventSelectorFieldSelector( + equal_to=["false"], + field="readOnly" + ), CloudtrailAdvancedEventSelectorFieldSelector( + equal_to=["AWS::S3::Object"], + field="resources.type" + ) + ], + name="Log Delete* events for one S3 bucket" + ) + ], + name=name, + s3_bucket_name=s3_bucket_name + ) +``` + +#### Sending Events to CloudWatch Logs + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudtrail import Cloudtrail +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, s3BucketName): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="Example" + ) + aws_cloudtrail_example = Cloudtrail(self, "example_1", + cloud_watch_logs_group_arn="${" + example.arn + "}:*", + name=name, + s3_bucket_name=s3_bucket_name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudtrail_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the trail. +* `s3_bucket_name` - (Required) Name of the S3 bucket designated for publishing log files. + +The following arguments are optional: + +* `advanced_event_selector` - (Optional) Specifies an advanced event selector for enabling data event logging. Fields documented below. Conflicts with `event_selector`. +* `cloud_watch_logs_group_arn` - (Optional) Log group name using an ARN that represents the log group to which CloudTrail logs will be delivered. Note that CloudTrail requires the Log Stream wildcard. +* `cloud_watch_logs_role_arn` - (Optional) Role for the CloudWatch Logs endpoint to assume to write to a user’s log group. +* `enable_log_file_validation` - (Optional) Whether log file integrity validation is enabled. Defaults to `false`. +* `enable_logging` - (Optional) Enables logging for the trail. Defaults to `true`. Setting this to `false` will pause logging. +* `event_selector` - (Optional) Specifies an event selector for enabling data event logging. Fields documented below. Please note the [CloudTrail limits](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/WhatIsCloudTrail-Limits.html) when configuring these. Conflicts with `advanced_event_selector`. +* `include_global_service_events` - (Optional) Whether the trail is publishing events from global services such as IAM to the log files. Defaults to `true`. +* `insight_selector` - (Optional) Configuration block for identifying unusual operational activity. See details below. +* `is_multi_region_trail` - (Optional) Whether the trail is created in the current region or in all regions. Defaults to `false`. +* `is_organization_trail` - (Optional) Whether the trail is an AWS Organizations trail. Organization trails log events for the master account and all member accounts. Can only be created in the organization master account. Defaults to `false`. +* `kms_key_id` - (Optional) KMS key ARN to use to encrypt the logs delivered by CloudTrail. +* `s3_key_prefix` - (Optional) S3 key prefix that follows the name of the bucket you have designated for log file delivery. +* `sns_topic_name` - (Optional) Name of the Amazon SNS topic defined for notification of log file delivery. +* `tags` - (Optional) Map of tags to assign to the trail. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### event_selector + +* `data_resource` - (Optional) Configuration block for data events. See details below. +* `exclude_management_event_sources` (Optional) - A set of event sources to exclude. Valid values include: `kms.amazonaws.com` and `rdsdata.amazonaws.com`. `include_management_events` must be set to`true` to allow this. +* `include_management_events` - (Optional) Whether to include management events for your trail. Defaults to `true`. +* `read_write_type` - (Optional) Type of events to log. Valid values are `ReadOnly`, `WriteOnly`, `All`. Default value is `All`. + +#### data_resource + +* `type` - (Required) Resource type in which you want to log data events. You can specify only the following value: "AWS::S3::Object", "AWS::Lambda::Function" and "AWS::DynamoDB::Table". +* `values` - (Required) List of ARN strings or partial ARN strings to specify selectors for data audit events over data resources. ARN list is specific to single-valued `type`. For example, `arn:aws:s3:::/` for all objects in a bucket, `arn:aws:s3:::/key` for specific objects, `arn:aws:lambda` for all lambda events within an account, `arn:aws:lambda:::function:` for a specific Lambda function, `arn:aws:dynamodb` for all DDB events for all tables within an account, or `arn:aws:dynamodb:::table/` for a specific DynamoDB table. + +### insight_selector + +* `insight_type` - (Optional) Type of insights to log on a trail. Valid values are: `ApiCallRateInsight` and `ApiErrorRateInsight`. + +### Advanced Event Selector Arguments + +* `field_selector` (Required) - Specifies the selector statements in an advanced event selector. Fields documented below. +* `name` (Optional) - Name of the advanced event selector. + +#### Field Selector Arguments + +* `field` (Required) - Field in an event record on which to filter events to be logged. You can specify only the following values: `readOnly`, `eventSource`, `eventName`, `eventCategory`, `resources.type`, `resources.ARN`. +* `ends_with` (Optional) - A list of values that includes events that match the last few characters of the event record field specified as the value of `field`. +* `equals` (Optional) - A list of values that includes events that match the exact value of the event record field specified as the value of `field`. This is the only valid operator that you can use with the `readOnly`, `eventCategory`, and `resources.type` fields. +* `not_ends_with` (Optional) - A list of values that excludes events that match the last few characters of the event record field specified as the value of `field`. +* `not_equals` (Optional) - A list of values that excludes events that match the exact value of the event record field specified as the value of `field`. +* `not_starts_with` (Optional) - A list of values that excludes events that match the first few characters of the event record field specified as the value of `field`. +* `starts_with` (Optional) - A list of values that includes events that match the first few characters of the event record field specified as the value of `field`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the trail. +* `home_region` - Region in which the trail was created. +* `id` - Name of the trail. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudtrails using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloudtrails using the `name`. For example: + +```console +% terraform import aws_cloudtrail.sample my-sample-trail +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudtrail_event_data_store.html.markdown b/website/docs/cdktf/python/r/cloudtrail_event_data_store.html.markdown new file mode 100644 index 00000000000..35ca62bc73e --- /dev/null +++ b/website/docs/cdktf/python/r/cloudtrail_event_data_store.html.markdown @@ -0,0 +1,148 @@ +--- +subcategory: "CloudTrail" +layout: "aws" +page_title: "AWS: aws_cloudtrail_event_data_store" +description: |- + Provides a CloudTrail Event Data Store resource. +--- + + + +# Resource: aws_cloudtrail_event_data_store + +Provides a CloudTrail Event Data Store. + +More information about event data stores can be found in the [Event Data Store User Guide](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/query-event-data-store.html). + +-> **Tip:** For an organization event data store you must create this resource in the management account. + +## Example Usage + +### Basic + +The most simple event data store configuration requires us to only set the `name` attribute. The event data store will automatically capture all management events. To capture management events from all the regions, `multi_region_enabled` must be `true`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudtrail_event_data_store import CloudtrailEventDataStore +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudtrailEventDataStore(self, "example", + name="example-event-data-store" + ) +``` + +### Data Event Logging + +CloudTrail can log [Data Events](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-data-events-with-cloudtrail.html) for certain services such as S3 bucket objects and Lambda function invocations. Additional information about data event configuration can be found in the following links: + +- [CloudTrail API AdvancedFieldSelector documentation](https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_AdvancedFieldSelector.html) + +#### Log all DynamoDB PutEvent actions for a specific DynamoDB table + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudtrail_event_data_store import CloudtrailEventDataStore +from imports.aws.data_aws_dynamodb_table import DataAwsDynamodbTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name): + super().__init__(scope, name) + table = DataAwsDynamodbTable(self, "table", + name="not-important-dynamodb-table" + ) + CloudtrailEventDataStore(self, "example", + advanced_event_selector=[CloudtrailEventDataStoreAdvancedEventSelector( + field_selector=[CloudtrailEventDataStoreAdvancedEventSelectorFieldSelector( + equal_to=["Data"], + field="eventCategory" + ), CloudtrailEventDataStoreAdvancedEventSelectorFieldSelector( + equal_to=["AWS::DynamoDB::Table"], + field="resources.type" + ), CloudtrailEventDataStoreAdvancedEventSelectorFieldSelector( + equal_to=["PutItem"], + field="eventName" + ), CloudtrailEventDataStoreAdvancedEventSelectorFieldSelector( + equal_to=[Token.as_string(table.arn)], + field="resources.ARN" + ) + ], + name="Log all DynamoDB PutEvent actions for a specific DynamoDB table" + ) + ], + name=name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +- `name` - (Required) The name of the event data store. +- `advanced_event_selector` - (Optional) The advanced event selectors to use to select the events for the data store. For more information about how to use advanced event selectors, see [Log events by using advanced event selectors](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-data-events-with-cloudtrail.html#creating-data-event-selectors-advanced) in the CloudTrail User Guide. +- `multi_region_enabled` - (Optional) Specifies whether the event data store includes events from all regions, or only from the region in which the event data store is created. Default: `true`. +- `organization_enabled` - (Optional) Specifies whether an event data store collects events logged for an organization in AWS Organizations. Default: `false`. +- `retention_period` - (Optional) The retention period of the event data store, in days. You can set a retention period of up to 2555 days, the equivalent of seven years. Default: `2555`. +- `kms_key_id` - Specifies the AWS KMS key ID to use to encrypt the events delivered by CloudTrail. The value can be an alias name prefixed by alias/, a fully specified ARN to an alias, a fully specified ARN to a key, or a globally unique identifier. +- `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +- `termination_protection_enabled` - (Optional) Specifies whether termination protection is enabled for the event data store. If termination protection is enabled, you cannot delete the event data store until termination protection is disabled. Default: `true`. + +### Advanced Event Selector Arguments + +`advanced_event_selector` supports the following arguments: + +- `name` (Optional) - Specifies the name of the advanced event selector. +- `field_selector` (Required) - Specifies the selector statements in an advanced event selector. Fields documented below. + +#### Field Selector Arguments + +`field_selector` supports the following arguments: + +- `field` (Required) - Specifies a field in an event record on which to filter events to be logged. You can specify only the following values: `readOnly`, `eventSource`, `eventName`, `eventCategory`, `resources.type`, `resources.ARN`. +- `equals` (Optional) - A list of values that includes events that match the exact value of the event record field specified as the value of `field`. This is the only valid operator that you can use with the `readOnly`, `eventCategory`, and `resources.type` fields. +- `not_equals` (Optional) - A list of values that excludes events that match the exact value of the event record field specified as the value of `field`. +- `starts_with` (Optional) - A list of values that includes events that match the first few characters of the event record field specified as the value of `field`. +- `not_starts_with` (Optional) - A list of values that excludes events that match the first few characters of the event record field specified as the value of `field`. +- `ends_with` (Optional) - A list of values that includes events that match the last few characters of the event record field specified as the value of `field`. +- `not_ends_with` (Optional) - A list of values that excludes events that match the last few characters of the event record field specified as the value of `field`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - ARN of the event data store. +- `id` - Name of the event data store. +- `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import event data stores using their `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import event data stores using their `arn`. For example: + +```console +% terraform import aws_cloudtrail_event_data_store.example arn:aws:cloudtrail:us-east-1:123456789123:eventdatastore/22333815-4414-412c-b155-dd254033gfhf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_composite_alarm.html.markdown b/website/docs/cdktf/python/r/cloudwatch_composite_alarm.html.markdown new file mode 100644 index 00000000000..7cf1edba786 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_composite_alarm.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "CloudWatch" +layout: "aws" +page_title: "AWS: aws_cloudwatch_composite_alarm" +description: |- + Provides a CloudWatch Composite Alarm resource. +--- + + + +# Resource: aws_cloudwatch_composite_alarm + +Provides a CloudWatch Composite Alarm resource. + +~> **NOTE:** An alarm (composite or metric) cannot be destroyed when there are other composite alarms depending on it. This can lead to a cyclical dependency on update, as Terraform will unsuccessfully attempt to destroy alarms before updating the rule. Consider using `depends_on`, references to alarm names, and two-stage updates. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_composite_alarm import CloudwatchCompositeAlarm +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchCompositeAlarm(self, "example", + alarm_actions=Token.as_list(aws_sns_topic_example.arn), + alarm_description="This is a composite alarm!", + alarm_name="example-composite-alarm", + alarm_rule="ALARM(${" + alpha.alarm_name + "}) OR\nALARM(${" + bravo.alarm_name + "})\n\n", + ok_actions=Token.as_list(aws_sns_topic_example.arn) + ) +``` + +## Argument Reference + +* `actions_enabled` - (Optional, Forces new resource) Indicates whether actions should be executed during any changes to the alarm state of the composite alarm. Defaults to `true`. +* `alarm_actions` - (Optional) The set of actions to execute when this alarm transitions to the `ALARM` state from any other state. Each action is specified as an ARN. Up to 5 actions are allowed. +* `alarm_description` - (Optional) The description for the composite alarm. +* `alarm_name` - (Required) The name for the composite alarm. This name must be unique within the region. +* `alarm_rule` - (Required) An expression that specifies which other alarms are to be evaluated to determine this composite alarm's state. For syntax, see [Creating a Composite Alarm](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Create_Composite_Alarm.html). The maximum length is 10240 characters. +* `insufficient_data_actions` - (Optional) The set of actions to execute when this alarm transitions to the `INSUFFICIENT_DATA` state from any other state. Each action is specified as an ARN. Up to 5 actions are allowed. +* `ok_actions` - (Optional) The set of actions to execute when this alarm transitions to an `OK` state from any other state. Each action is specified as an ARN. Up to 5 actions are allowed. +* `tags` - (Optional) A map of tags to associate with the alarm. Up to 50 tags are allowed. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the composite alarm. +* `id` - The ID of the composite alarm resource, which is equivalent to its `alarm_name`. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a CloudWatch Composite Alarm using the `alarm_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a CloudWatch Composite Alarm using the `alarm_name`. For example: + +```console +% terraform import aws_cloudwatch_composite_alarm.test my-alarm +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_dashboard.html.markdown b/website/docs/cdktf/python/r/cloudwatch_dashboard.html.markdown new file mode 100644 index 00000000000..b2ce353275c --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_dashboard.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "CloudWatch" +layout: "aws" +page_title: "AWS: aws_cloudwatch_dashboard" +description: |- + Provides a CloudWatch Dashboard resource. +--- + + + +# Resource: aws_cloudwatch_dashboard + +Provides a CloudWatch Dashboard resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_dashboard import CloudwatchDashboard +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchDashboard(self, "main", + dashboard_body=Token.as_string( + Fn.jsonencode({ + "widgets": [{ + "height": 6, + "properties": { + "metrics": [["AWS/EC2", "CPUUtilization", "InstanceId", "i-012345"] + ], + "period": 300, + "region": "us-east-1", + "stat": "Average", + "title": "EC2 Instance CPU" + }, + "type": "metric", + "width": 12, + "x": 0, + "y": 0 + }, { + "height": 3, + "properties": { + "markdown": "Hello world" + }, + "type": "text", + "width": 3, + "x": 0, + "y": 7 + } + ] + })), + dashboard_name="my-dashboard" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dashboard_name` - (Required) The name of the dashboard. +* `dashboard_body` - (Required) The detailed information about the dashboard, including what widgets are included and their location on the dashboard. You can read more about the body structure in the [documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/CloudWatch-Dashboard-Body-Structure.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `dashboard_arn` - The Amazon Resource Name (ARN) of the dashboard. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch dashboards using the `dashboard_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch dashboards using the `dashboard_name`. For example: + +```console +% terraform import aws_cloudwatch_dashboard.sample dashboard_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_event_api_destination.html.markdown b/website/docs/cdktf/python/r/cloudwatch_event_api_destination.html.markdown new file mode 100644 index 00000000000..1686087004c --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_event_api_destination.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_api_destination" +description: |- + Provides an EventBridge event API Destination resource. +--- + + + +# Resource: aws_cloudwatch_event_api_destination + +Provides an EventBridge event API Destination resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_api_destination import CloudwatchEventApiDestination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchEventApiDestination(self, "test", + connection_arn=Token.as_string(aws_cloudwatch_event_connection_test.arn), + description="An API Destination", + http_method="POST", + invocation_endpoint="https://api.destination.com/endpoint", + invocation_rate_limit_per_second=20, + name="api-destination" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the new API Destination. The name must be unique for your account. Maximum of 64 characters consisting of numbers, lower/upper case letters, .,-,_. +* `description` - (Optional) The description of the new API Destination. Maximum of 512 characters. +* `invocation_endpoint` - (Required) URL endpoint to invoke as a target. This could be a valid endpoint generated by a partner service. You can include "*" as path parameters wildcards to be set from the Target HttpParameters. +* `http_method` - (Required) Select the HTTP method used for the invocation endpoint, such as GET, POST, PUT, etc. +* `invocation_rate_limit_per_second` - (Optional) Enter the maximum number of invocations per second to allow for this destination. Enter a value greater than 0 (default 300). +* `connection_arn` - (Required) ARN of the EventBridge Connection to use for the API Destination. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the event API Destination. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge API Destinations using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EventBridge API Destinations using the `name`. For example: + +```console +% terraform import aws_cloudwatch_event_api_destination.test api-destination +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_event_archive.html.markdown b/website/docs/cdktf/python/r/cloudwatch_event_archive.html.markdown new file mode 100644 index 00000000000..e6f81f4ed4e --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_event_archive.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_archive" +description: |- + Provides an EventBridge event archive resource. +--- + + + +# Resource: aws_cloudwatch_event_archive + +Provides an EventBridge event archive resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_archive import CloudwatchEventArchive +from imports.aws.cloudwatch_event_bus import CloudwatchEventBus +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + order = CloudwatchEventBus(self, "order", + name="orders" + ) + aws_cloudwatch_event_archive_order = CloudwatchEventArchive(self, "order_1", + event_source_arn=order.arn, + name="order-archive" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_archive_order.override_logical_id("order") +``` + +## Example all optional arguments + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_archive import CloudwatchEventArchive +from imports.aws.cloudwatch_event_bus import CloudwatchEventBus +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + order = CloudwatchEventBus(self, "order", + name="orders" + ) + aws_cloudwatch_event_archive_order = CloudwatchEventArchive(self, "order_1", + description="Archived events from order service", + event_pattern=Token.as_string( + Fn.jsonencode({ + "source": ["company.team.order"] + })), + event_source_arn=order.arn, + name="order-archive", + retention_days=7 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_archive_order.override_logical_id("order") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the new event archive. The archive name cannot exceed 48 characters. +* `event_source_arn` - (Required) Event bus source ARN from where these events should be archived. +* `description` - (Optional) The description of the new event archive. +* `event_pattern` - (Optional) Instructs the new event archive to only capture events matched by this pattern. By default, it attempts to archive every event received in the `event_source_arn`. +* `retention_days` - (Optional) The maximum number of days to retain events in the new event archive. By default, it archives indefinitely. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the event archive. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an EventBridge archive using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an EventBridge archive using the `name`. For example: + +```console +% terraform import aws_cloudwatch_event_archive.imported_event_archive order-archive +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_event_bus.html.markdown b/website/docs/cdktf/python/r/cloudwatch_event_bus.html.markdown new file mode 100644 index 00000000000..cbac18850eb --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_event_bus.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_bus" +description: |- + Provides an EventBridge event bus resource. +--- + + + +# Resource: aws_cloudwatch_event_bus + +Provides an EventBridge event bus resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_bus import CloudwatchEventBus +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchEventBus(self, "messenger", + name="chat-messages" + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_bus import CloudwatchEventBus +from imports.aws.data_aws_cloudwatch_event_source import DataAwsCloudwatchEventSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + examplepartner = DataAwsCloudwatchEventSource(self, "examplepartner", + name_prefix="aws.partner/examplepartner.com" + ) + aws_cloudwatch_event_bus_examplepartner = CloudwatchEventBus(self, "examplepartner_1", + event_source_name=Token.as_string(examplepartner.name), + name=Token.as_string(examplepartner.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_bus_examplepartner.override_logical_id("examplepartner") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the new event bus. The names of custom event buses can't contain the / character. To create a partner event bus, ensure the `name` matches the `event_source_name`. +* `event_source_name` (Optional) The partner event source that the new event bus will be matched with. Must match `name`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the event bus. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge event buses using the `name` (which can also be a partner event source name). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EventBridge event buses using the `name` (which can also be a partner event source name). For example: + +```console +% terraform import aws_cloudwatch_event_bus.messenger chat-messages +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_event_bus_policy.html.markdown b/website/docs/cdktf/python/r/cloudwatch_event_bus_policy.html.markdown new file mode 100644 index 00000000000..aa3c4496578 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_event_bus_policy.html.markdown @@ -0,0 +1,193 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_bus_policy" +description: |- + Provides a resource to create an EventBridge policy to support cross-account events. +--- + + + +# Resource: aws_cloudwatch_event_bus_policy + +Provides a resource to create an EventBridge resource policy to support cross-account events. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +~> **Note:** The EventBridge bus policy resource (`aws_cloudwatch_event_bus_policy`) is incompatible with the EventBridge permission resource (`aws_cloudwatch_event_permission`) and will overwrite permissions. + +## Example Usage + +### Account Access + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_bus_policy import CloudwatchEventBusPolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = DataAwsIamPolicyDocument(self, "test", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["events:PutEvents"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["123456789012"], + type="AWS" + ) + ], + resources=["arn:aws:events:eu-west-1:123456789012:event-bus/default" + ], + sid="DevAccountAccess" + ) + ] + ) + aws_cloudwatch_event_bus_policy_test = CloudwatchEventBusPolicy(self, "test_1", + event_bus_name=Token.as_string(aws_cloudwatch_event_bus_test.name), + policy=Token.as_string(test.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_bus_policy_test.override_logical_id("test") +``` + +### Organization Access + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_bus_policy import CloudwatchEventBusPolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = DataAwsIamPolicyDocument(self, "test", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["events:DescribeRule", "events:ListRules", "events:ListTargetsByRule", "events:ListTagsForResource" + ], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=[example.id], + variable="aws:PrincipalOrgID" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=["arn:aws:events:eu-west-1:123456789012:rule/*", "arn:aws:events:eu-west-1:123456789012:event-bus/default" + ], + sid="OrganizationAccess" + ) + ] + ) + aws_cloudwatch_event_bus_policy_test = CloudwatchEventBusPolicy(self, "test_1", + event_bus_name=Token.as_string(aws_cloudwatch_event_bus_test.name), + policy=Token.as_string(test.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_bus_policy_test.override_logical_id("test") +``` + +### Multiple Statements + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_bus_policy import CloudwatchEventBusPolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = DataAwsIamPolicyDocument(self, "test", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["events:PutEvents"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["123456789012"], + type="AWS" + ) + ], + resources=["arn:aws:events:eu-west-1:123456789012:event-bus/default" + ], + sid="DevAccountAccess" + ), DataAwsIamPolicyDocumentStatement( + actions=["events:DescribeRule", "events:ListRules", "events:ListTargetsByRule", "events:ListTagsForResource" + ], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=[example.id], + variable="aws:PrincipalOrgID" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=["arn:aws:events:eu-west-1:123456789012:rule/*", "arn:aws:events:eu-west-1:123456789012:event-bus/default" + ], + sid="OrganizationAccess" + ) + ] + ) + aws_cloudwatch_event_bus_policy_test = CloudwatchEventBusPolicy(self, "test_1", + event_bus_name=Token.as_string(aws_cloudwatch_event_bus_test.name), + policy=Token.as_string(test.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_bus_policy_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) The text of the policy. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `event_bus_name` - (Optional) The name of the event bus to set the permissions on. + If you omit this, the permissions are set on the `default` event bus. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the EventBridge event bus. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an EventBridge policy using the `event_bus_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an EventBridge policy using the `event_bus_name`. For example: + +```console +% terraform import aws_cloudwatch_event_bus_policy.DevAccountAccess example-event-bus +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_event_connection.html.markdown b/website/docs/cdktf/python/r/cloudwatch_event_connection.html.markdown new file mode 100644 index 00000000000..5b209a053c6 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_event_connection.html.markdown @@ -0,0 +1,251 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_connection" +description: |- + Provides an EventBridge connection resource. +--- + + + +# Resource: aws_cloudwatch_event_connection + +Provides an EventBridge connection resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_connection import CloudwatchEventConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchEventConnection(self, "test", + auth_parameters=CloudwatchEventConnectionAuthParameters( + api_key=CloudwatchEventConnectionAuthParametersApiKey( + key="x-signature", + value="1234" + ) + ), + authorization_type="API_KEY", + description="A connection description", + name="ngrok-connection" + ) +``` + +## Example Usage Basic Authorization + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_connection import CloudwatchEventConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchEventConnection(self, "test", + auth_parameters=CloudwatchEventConnectionAuthParameters( + basic=CloudwatchEventConnectionAuthParametersBasic( + password="Pass1234!", + username="user" + ) + ), + authorization_type="BASIC", + description="A connection description", + name="ngrok-connection" + ) +``` + +## Example Usage OAuth Authorization + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_connection import CloudwatchEventConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchEventConnection(self, "test", + auth_parameters=CloudwatchEventConnectionAuthParameters( + oauth=CloudwatchEventConnectionAuthParametersOauth( + authorization_endpoint="https://auth.url.com/endpoint", + client_parameters=CloudwatchEventConnectionAuthParametersOauthClientParameters( + client_id="1234567890", + client_secret="Pass1234!" + ), + http_method="GET", + oauth_http_parameters=CloudwatchEventConnectionAuthParametersOauthOauthHttpParameters( + body=[CloudwatchEventConnectionAuthParametersOauthOauthHttpParametersBody( + is_value_secret=False, + key="body-parameter-key", + value="body-parameter-value" + ) + ], + header=[CloudwatchEventConnectionAuthParametersOauthOauthHttpParametersHeader( + is_value_secret=False, + key="header-parameter-key", + value="header-parameter-value" + ) + ], + query_string=[CloudwatchEventConnectionAuthParametersOauthOauthHttpParametersQueryString( + is_value_secret=False, + key="query-string-parameter-key", + value="query-string-parameter-value" + ) + ] + ) + ) + ), + authorization_type="OAUTH_CLIENT_CREDENTIALS", + description="A connection description", + name="ngrok-connection" + ) +``` + +## Example Usage Invocation Http Parameters + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_connection import CloudwatchEventConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchEventConnection(self, "test", + auth_parameters=CloudwatchEventConnectionAuthParameters( + basic=CloudwatchEventConnectionAuthParametersBasic( + password="Pass1234!", + username="user" + ), + invocation_http_parameters=CloudwatchEventConnectionAuthParametersInvocationHttpParameters( + body=[CloudwatchEventConnectionAuthParametersInvocationHttpParametersBody( + is_value_secret=False, + key="body-parameter-key", + value="body-parameter-value" + ), CloudwatchEventConnectionAuthParametersInvocationHttpParametersBody( + is_value_secret=True, + key="body-parameter-key2", + value="body-parameter-value2" + ) + ], + header=[CloudwatchEventConnectionAuthParametersInvocationHttpParametersHeader( + is_value_secret=False, + key="header-parameter-key", + value="header-parameter-value" + ) + ], + query_string=[CloudwatchEventConnectionAuthParametersInvocationHttpParametersQueryString( + is_value_secret=False, + key="query-string-parameter-key", + value="query-string-parameter-value" + ) + ] + ) + ), + authorization_type="BASIC", + description="A connection description", + name="ngrok-connection" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the new connection. Maximum of 64 characters consisting of numbers, lower/upper case letters, .,-,_. +* `description` - (Optional) Enter a description for the connection. Maximum of 512 characters. +* `authorization_type` - (Required) Choose the type of authorization to use for the connection. One of `API_KEY`,`BASIC`,`OAUTH_CLIENT_CREDENTIALS`. +* `auth_parameters` - (Required) Parameters used for authorization. A maximum of 1 are allowed. Documented below. +* `invocation_http_parameters` - (Optional) Invocation Http Parameters are additional credentials used to sign each Invocation of the ApiDestination created from this Connection. If the ApiDestination Rule Target has additional HttpParameters, the values will be merged together, with the Connection Invocation Http Parameters taking precedence. Secret values are stored and managed by AWS Secrets Manager. A maximum of 1 are allowed. Documented below. + +`auth_parameters` support the following: + +* `api_key` - (Optional) Parameters used for API_KEY authorization. An API key to include in the header for each authentication request. A maximum of 1 are allowed. Conflicts with `basic` and `oauth`. Documented below. +* `basic` - (Optional) Parameters used for BASIC authorization. A maximum of 1 are allowed. Conflicts with `api_key` and `oauth`. Documented below. +* `oauth` - (Optional) Parameters used for OAUTH_CLIENT_CREDENTIALS authorization. A maximum of 1 are allowed. Conflicts with `basic` and `api_key`. Documented below. + +`api_key` support the following: + +* `key` - (Required) Header Name. +* `value` - (Required) Header Value. Created and stored in AWS Secrets Manager. + +`basic` support the following: + +* `username` - (Required) A username for the authorization. +* `password` - (Required) A password for the authorization. Created and stored in AWS Secrets Manager. + +`oauth` support the following: + +* `authorization_endpoint` - (Required) The URL to the authorization endpoint. +* `http_method` - (Required) A password for the authorization. Created and stored in AWS Secrets Manager. +* `client_parameters` - (Required) Contains the client parameters for OAuth authorization. Contains the following two parameters. + * `client_id` - (Required) The client ID for the credentials to use for authorization. Created and stored in AWS Secrets Manager. + * `client_secret` - (Required) The client secret for the credentials to use for authorization. Created and stored in AWS Secrets Manager. +* `oauth_http_parameters` - (Required) OAuth Http Parameters are additional credentials used to sign the request to the authorization endpoint to exchange the OAuth Client information for an access token. Secret values are stored and managed by AWS Secrets Manager. A maximum of 1 are allowed. Documented below. + +`invocation_http_parameters` and `oauth_http_parameters` support the following: + +* `body` - (Optional) Contains additional body string parameters for the connection. You can include up to 100 additional body string parameters per request. Each additional parameter counts towards the event payload size, which cannot exceed 64 KB. Each parameter can contain the following: + * `key` - (Required) The key for the parameter. + * `value` - (Required) The value associated with the key. Created and stored in AWS Secrets Manager if is secret. + * `is_value_secret` - (Optional) Specified whether the value is secret. + +* `header` - (Optional) Contains additional header parameters for the connection. You can include up to 100 additional body string parameters per request. Each additional parameter counts towards the event payload size, which cannot exceed 64 KB. Each parameter can contain the following: + * `key` - (Required) The key for the parameter. + * `value` - (Required) The value associated with the key. Created and stored in AWS Secrets Manager if is secret. + * `is_value_secret` - (Optional) Specified whether the value is secret. + +* `query_string` - (Optional) Contains additional query string parameters for the connection. You can include up to 100 additional body string parameters per request. Each additional parameter counts towards the event payload size, which cannot exceed 64 KB. Each parameter can contain the following: + * `key` - (Required) The key for the parameter. + * `value` - (Required) The value associated with the key. Created and stored in AWS Secrets Manager if is secret. + * `is_value_secret` - (Optional) Specified whether the value is secret. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the connection. +* `secret_arn` - The Amazon Resource Name (ARN) of the secret created from the authorization parameters specified for the connection. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge connection using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EventBridge EventBridge connection using the `name`. For example: + +```console +% terraform import aws_cloudwatch_event_connection.test ngrok-connection +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_event_endpoint.html.markdown b/website/docs/cdktf/python/r/cloudwatch_event_endpoint.html.markdown new file mode 100644 index 00000000000..f10e1c55d05 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_event_endpoint.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_endpoint" +description: |- + Provides a resource to create an EventBridge Global Endpoint. +--- + + + +# Resource: aws_cloudwatch_event_endpoint + +Provides a resource to create an EventBridge Global Endpoint. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_endpoint import CloudwatchEventEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchEventEndpoint(self, "this", + event_bus=[CloudwatchEventEndpointEventBus( + event_bus_arn=primary.arn + ), CloudwatchEventEndpointEventBus( + event_bus_arn=secondary.arn + ) + ], + name="global-endpoint", + replication_config=CloudwatchEventEndpointReplicationConfig( + state="DISABLED" + ), + role_arn=replication.arn, + routing_config=CloudwatchEventEndpointRoutingConfig( + failover_config=CloudwatchEventEndpointRoutingConfigFailoverConfig( + primary=CloudwatchEventEndpointRoutingConfigFailoverConfigPrimary( + health_check=Token.as_string(aws_route53_health_check_primary.arn) + ), + secondary=CloudwatchEventEndpointRoutingConfigFailoverConfigSecondary( + route="us-east-2" + ) + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) A description of the global endpoint. +* `event_bus` - (Required) The event buses to use. The names of the event buses must be identical in each Region. Exactly two event buses are required. Documented below. +* `name` - (Required) The name of the global endpoint. +* `replication_config` - (Optional) Parameters used for replication. Documented below. +* `role_arn` - (Optional) The ARN of the IAM role used for replication between event buses. +* `routing_config` - (Required) Parameters used for routing, including the health check and secondary Region. Documented below. + +`event_bus` supports the following: + +* `event_bus_arn` - (Required) The ARN of the event bus the endpoint is associated with. + +`replication_config` supports the following: + +* `state` - (Optional) The state of event replication. Valid values: `ENABLED`, `DISABLED`. The default state is `ENABLED`, which means you must supply a `role_arn`. If you don't have a `role_arn` or you don't want event replication enabled, set `state` to `DISABLED`. + +`routing_config` support the following: + +* `failover_config` - (Required) Parameters used for failover. This includes what triggers failover and what happens when it's triggered. Documented below. + +`failover_config` support the following: + +* `primary` - (Required) Parameters used for the primary Region. Documented below. +* `secondary` - (Required) Parameters used for the secondary Region, the Region that events are routed to when failover is triggered or event replication is enabled. Documented below. + +`primary` support the following: + +* `health_check` - (Required) The ARN of the health check used by the endpoint to determine whether failover is triggered. + +`secondary` support the following: + +* `route` - (Required) The name of the secondary Region. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the endpoint that was created. +* `endpoint_url` - The URL of the endpoint that was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge Global Endpoints using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EventBridge Global Endpoints using the `name`. For example: + +```console +% terraform import aws_cloudwatch_event_endpoint.imported_endpoint example-endpoint +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_event_permission.html.markdown b/website/docs/cdktf/python/r/cloudwatch_event_permission.html.markdown new file mode 100644 index 00000000000..e7ef2fdff16 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_event_permission.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_permission" +description: |- + Provides a resource to create an EventBridge permission to support cross-account events in the current account default event bus. +--- + + + +# Resource: aws_cloudwatch_event_permission + +Provides a resource to create an EventBridge permission to support cross-account events in the current account default event bus. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +~> **Note:** The EventBridge bus policy resource (`aws_cloudwatch_event_bus_policy`) is incompatible with the EventBridge permission resource (`aws_cloudwatch_event_permission`) and will overwrite permissions. + +## Example Usage + +### Account Access + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_permission import CloudwatchEventPermission +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchEventPermission(self, "DevAccountAccess", + principal="123456789012", + statement_id="DevAccountAccess" + ) +``` + +### Organization Access + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_permission import CloudwatchEventPermission +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchEventPermission(self, "OrganizationAccess", + condition=CloudwatchEventPermissionCondition( + key="aws:PrincipalOrgID", + type="StringEquals", + value=example.id + ), + principal="*", + statement_id="OrganizationAccess" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `principal` - (Required) The 12-digit AWS account ID that you are permitting to put events to your default event bus. Specify `*` to permit any account to put events to your default event bus, optionally limited by `condition`. +* `statement_id` - (Required) An identifier string for the external account that you are granting permissions to. +* `action` - (Optional) The action that you are enabling the other account to perform. Defaults to `events:PutEvents`. +* `condition` - (Optional) Configuration block to limit the event bus permissions you are granting to only accounts that fulfill the condition. Specified below. +* `event_bus_name` - (Optional) The name of the event bus to set the permissions on. + If you omit this, the permissions are set on the `default` event bus. + +### condition + +* `key` - (Required) Key for the condition. Valid values: `aws:PrincipalOrgID`. +* `type` - (Required) Type of condition. Value values: `StringEquals`. +* `value` - (Required) Value for the key. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The statement ID of the EventBridge permission. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge permissions using the `event_bus_name/statement_id` (if you omit `event_bus_name`, the `default` event bus will be used). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EventBridge permissions using the `event_bus_name/statement_id` (if you omit `event_bus_name`, the `default` event bus will be used). For example: + +```console +% terraform import aws_cloudwatch_event_permission.DevAccountAccess example-event-bus/DevAccountAccess +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_event_rule.html.markdown b/website/docs/cdktf/python/r/cloudwatch_event_rule.html.markdown new file mode 100644 index 00000000000..b1b01edab13 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_event_rule.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_rule" +description: |- + Provides an EventBridge Rule resource. +--- + + + +# Resource: aws_cloudwatch_event_rule + +Provides an EventBridge Rule resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```terraform +resource "aws_cloudwatch_event_rule" "console" { + name = "capture-aws-sign-in" + description = "Capture each AWS Console Sign In" + + event_pattern = jsonencode({ + detail-type = [ + "AWS Console Sign In via CloudTrail" + ] + }) +} + +resource "aws_cloudwatch_event_target" "sns" { + rule = aws_cloudwatch_event_rule.console.name + target_id = "SendToSNS" + arn = aws_sns_topic.aws_logins.arn +} + +resource "aws_sns_topic" "aws_logins" { + name = "aws-console-logins" +} + +resource "aws_sns_topic_policy" "default" { + arn = aws_sns_topic.aws_logins.arn + policy = data.aws_iam_policy_document.sns_topic_policy.json +} + +data "aws_iam_policy_document" "sns_topic_policy" { + statement { + effect = "Allow" + actions = ["SNS:Publish"] + + principals { + type = "Service" + identifiers = ["events.amazonaws.com"] + } + + resources = [aws_sns_topic.aws_logins.arn] + } +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the rule. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `schedule_expression` - (Optional) The scheduling expression. For example, `cron(0 20 * * ? *)` or `rate(5 minutes)`. At least one of `schedule_expression` or `event_pattern` is required. Can only be used on the default event bus. For more information, refer to the AWS documentation [Schedule Expressions for Rules](https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html). +* `event_bus_name` - (Optional) The name or ARN of the event bus to associate with this rule. + If you omit this, the `default` event bus is used. +* `event_pattern` - (Optional) The event pattern described a JSON object. At least one of `schedule_expression` or `event_pattern` is required. See full documentation of [Events and Event Patterns in EventBridge](https://docs.aws.amazon.com/eventbridge/latest/userguide/eventbridge-and-event-patterns.html) for details. +* `description` - (Optional) The description of the rule. +* `role_arn` - (Optional) The Amazon Resource Name (ARN) associated with the role that is used for target invocation. +* `is_enabled` - (Optional) Whether the rule should be enabled (defaults to `true`). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the rule. +* `arn` - The Amazon Resource Name (ARN) of the rule. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge Rules using the `event_bus_name/rule_name` (if you omit `event_bus_name`, the `default` event bus will be used). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EventBridge Rules using the `event_bus_name/rule_name` (if you omit `event_bus_name`, the `default` event bus will be used). For example: + +```console +% terraform import aws_cloudwatch_event_rule.console example-event-bus/capture-console-sign-in +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_event_target.html.markdown b/website/docs/cdktf/python/r/cloudwatch_event_target.html.markdown new file mode 100644 index 00000000000..53bcb6963ec --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_event_target.html.markdown @@ -0,0 +1,687 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_target" +description: |- + Provides an EventBridge Target resource. +--- + + + +# Resource: aws_cloudwatch_event_target + +Provides an EventBridge Target resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +### Kinesis Usage + +```terraform +resource "aws_cloudwatch_event_target" "yada" { + target_id = "Yada" + rule = aws_cloudwatch_event_rule.console.name + arn = aws_kinesis_stream.test_stream.arn + + run_command_targets { + key = "tag:Name" + values = ["FooBar"] + } + + run_command_targets { + key = "InstanceIds" + values = ["i-162058cd308bffec2"] + } +} + +resource "aws_cloudwatch_event_rule" "console" { + name = "capture-ec2-scaling-events" + description = "Capture all EC2 scaling events" + + event_pattern = jsonencode({ + source = [ + "aws.autoscaling" + ] + + detail-type = [ + "EC2 Instance Launch Successful", + "EC2 Instance Terminate Successful", + "EC2 Instance Launch Unsuccessful", + "EC2 Instance Terminate Unsuccessful" + ] + }) +} + +resource "aws_kinesis_stream" "test_stream" { + name = "terraform-kinesis-test" + shard_count = 1 +} +``` + +### SSM Document Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_rule import CloudwatchEventRule +from imports.aws.cloudwatch_event_target import CloudwatchEventTarget +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_policy import IamPolicy +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +from imports.aws.ssm_document import SsmDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + stop_instances = CloudwatchEventRule(self, "stop_instances", + description="Stop instances nightly", + name="StopInstance", + schedule_expression="cron(0 0 * * ? *)" + ) + stop_instance = SsmDocument(self, "stop_instance", + content=Token.as_string( + Fn.jsonencode({ + "description": "Stop an instance", + "parameters": {}, + "runtime_config": { + "aws:run_shell_script": { + "properties": [{ + "id": "0.aws:runShellScript", + "run_command": ["halt"] + } + ] + } + }, + "schema_version": "1.2" + })), + document_type="Command", + name="stop_instance" + ) + ssm_lifecycle = DataAwsIamPolicyDocument(self, "ssm_lifecycle", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ssm:SendCommand"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=["*"], + variable="ec2:ResourceTag/Terminate" + ) + ], + effect="Allow", + resources=["arn:aws:ec2:eu-west-1:1234567890:instance/*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["ssm:SendCommand"], + effect="Allow", + resources=[stop_instance.arn] + ) + ] + ) + ssm_lifecycle_trust = DataAwsIamPolicyDocument(self, "ssm_lifecycle_trust", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["events.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + aws_iam_policy_ssm_lifecycle = IamPolicy(self, "ssm_lifecycle_4", + name="SSMLifecycle", + policy=Token.as_string(ssm_lifecycle.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_policy_ssm_lifecycle.override_logical_id("ssm_lifecycle") + aws_iam_role_ssm_lifecycle = IamRole(self, "ssm_lifecycle_5", + assume_role_policy=Token.as_string(ssm_lifecycle_trust.json), + name="SSMLifecycle" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_ssm_lifecycle.override_logical_id("ssm_lifecycle") + aws_iam_role_policy_attachment_ssm_lifecycle = IamRolePolicyAttachment(self, "ssm_lifecycle_6", + policy_arn=Token.as_string(aws_iam_policy_ssm_lifecycle.arn), + role=Token.as_string(aws_iam_role_ssm_lifecycle.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_ssm_lifecycle.override_logical_id("ssm_lifecycle") + aws_cloudwatch_event_target_stop_instances = CloudwatchEventTarget(self, "stop_instances_7", + arn=stop_instance.arn, + role_arn=Token.as_string(aws_iam_role_ssm_lifecycle.arn), + rule=stop_instances.name, + run_command_targets=[CloudwatchEventTargetRunCommandTargets( + key="tag:Terminate", + values=["midnight"] + ) + ], + target_id="StopInstance" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_target_stop_instances.override_logical_id("stop_instances") +``` + +### RunCommand Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_rule import CloudwatchEventRule +from imports.aws.cloudwatch_event_target import CloudwatchEventTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + stop_instances = CloudwatchEventRule(self, "stop_instances", + description="Stop instances nightly", + name="StopInstance", + schedule_expression="cron(0 0 * * ? *)" + ) + aws_cloudwatch_event_target_stop_instances = CloudwatchEventTarget(self, "stop_instances_1", + arn="arn:aws:ssm:${" + aws_region.value + "}::document/AWS-RunShellScript", + input="{\\\"commands\\\":[\\\"halt\\\"]}", + role_arn=ssm_lifecycle.arn, + rule=stop_instances.name, + run_command_targets=[CloudwatchEventTargetRunCommandTargets( + key="tag:Terminate", + values=["midnight"] + ) + ], + target_id="StopInstance" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_target_stop_instances.override_logical_id("stop_instances") +``` + +### ECS Run Task with Role and Task Override Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_target import CloudwatchEventTarget +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["events.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + ecs_events_run_task_with_any_role = DataAwsIamPolicyDocument(self, "ecs_events_run_task_with_any_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["iam:PassRole"], + effect="Allow", + resources=["*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["ecs:RunTask"], + effect="Allow", + resources=[ + Token.as_string(Fn.replace(task_name.arn, "/:\\\\d+$/", ":*")) + ] + ) + ] + ) + ecs_events = IamRole(self, "ecs_events", + assume_role_policy=Token.as_string(assume_role.json), + name="ecs_events" + ) + aws_iam_role_policy_ecs_events_run_task_with_any_role = IamRolePolicy(self, "ecs_events_run_task_with_any_role_3", + name="ecs_events_run_task_with_any_role", + policy=Token.as_string(ecs_events_run_task_with_any_role.json), + role=ecs_events.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_ecs_events_run_task_with_any_role.override_logical_id("ecs_events_run_task_with_any_role") + CloudwatchEventTarget(self, "ecs_scheduled_task", + arn=cluster_name.arn, + ecs_target=CloudwatchEventTargetEcsTarget( + task_count=1, + task_definition_arn=task_name.arn + ), + input=Token.as_string( + Fn.jsonencode({ + "container_overrides": [{ + "command": ["bin/console", "scheduled-task"], + "name": "name-of-container-to-override" + } + ] + })), + role_arn=ecs_events.arn, + rule=every_hour.name, + target_id="run-scheduled-task-every-hour" + ) +``` + +### API Gateway target + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.api_gateway_deployment import ApiGatewayDeployment +from imports.aws.api_gateway_stage import ApiGatewayStage +from imports.aws.cloudwatch_event_rule import CloudwatchEventRule +from imports.aws.cloudwatch_event_target import CloudwatchEventTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, stageName): + super().__init__(scope, name) + example = ApiGatewayDeployment(self, "example", + rest_api_id=Token.as_string(aws_api_gateway_rest_api_example.id) + ) + aws_api_gateway_stage_example = ApiGatewayStage(self, "example_1", + deployment_id=example.id, + rest_api_id=Token.as_string(aws_api_gateway_rest_api_example.id), + stage_name=stage_name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_api_gateway_stage_example.override_logical_id("example") + aws_cloudwatch_event_rule_example = CloudwatchEventRule(self, "example_2") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_rule_example.override_logical_id("example") + aws_cloudwatch_event_target_example = CloudwatchEventTarget(self, "example_3", + arn="${" + aws_api_gateway_stage_example.execution_arn + "}/GET", + http_target=CloudwatchEventTargetHttpTarget( + header_parameters={ + "Env": "Test" + }, + query_string_parameters={ + "Body": "$.detail.body" + } + ), + rule=Token.as_string(aws_cloudwatch_event_rule_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_target_example.override_logical_id("example") +``` + +### Cross-Account Event Bus target + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_rule import CloudwatchEventRule +from imports.aws.cloudwatch_event_target import CloudwatchEventTarget +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_policy import IamPolicy +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + stop_instances = CloudwatchEventRule(self, "stop_instances", + description="Stop instances nightly", + name="StopInstance", + schedule_expression="cron(0 0 * * ? *)" + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["events.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + event_bus_invoke_remote_event_bus = DataAwsIamPolicyDocument(self, "event_bus_invoke_remote_event_bus", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["events:PutEvents"], + effect="Allow", + resources=["arn:aws:events:eu-west-1:1234567890:event-bus/My-Event-Bus" + ] + ) + ] + ) + aws_iam_policy_event_bus_invoke_remote_event_bus = IamPolicy(self, "event_bus_invoke_remote_event_bus_3", + name="event_bus_invoke_remote_event_bus", + policy=Token.as_string(event_bus_invoke_remote_event_bus.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_policy_event_bus_invoke_remote_event_bus.override_logical_id("event_bus_invoke_remote_event_bus") + aws_iam_role_event_bus_invoke_remote_event_bus = IamRole(self, "event_bus_invoke_remote_event_bus_4", + assume_role_policy=Token.as_string(assume_role.json), + name="event-bus-invoke-remote-event-bus" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_event_bus_invoke_remote_event_bus.override_logical_id("event_bus_invoke_remote_event_bus") + aws_iam_role_policy_attachment_event_bus_invoke_remote_event_bus = + IamRolePolicyAttachment(self, "event_bus_invoke_remote_event_bus_5", + policy_arn=Token.as_string(aws_iam_policy_event_bus_invoke_remote_event_bus.arn), + role=Token.as_string(aws_iam_role_event_bus_invoke_remote_event_bus.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_event_bus_invoke_remote_event_bus.override_logical_id("event_bus_invoke_remote_event_bus") + aws_cloudwatch_event_target_stop_instances = CloudwatchEventTarget(self, "stop_instances_6", + arn="arn:aws:events:eu-west-1:1234567890:event-bus/My-Event-Bus", + role_arn=Token.as_string(aws_iam_role_event_bus_invoke_remote_event_bus.arn), + rule=stop_instances.name, + target_id="StopInstance" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_target_stop_instances.override_logical_id("stop_instances") +``` + +### Input Transformer Usage - JSON Object + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_rule import CloudwatchEventRule +from imports.aws.cloudwatch_event_target import CloudwatchEventTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchEventRule(self, "example") + aws_cloudwatch_event_target_example = CloudwatchEventTarget(self, "example_1", + arn=Token.as_string(aws_lambda_function_example.arn), + input_transformer=CloudwatchEventTargetInputTransformer( + input_paths={ + "instance": "$.detail.instance", + "status": "$.detail.status" + }, + input_template="{\n \"instance_id\": ,\n \"instance_status\": \n}\n\n" + ), + rule=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_target_example.override_logical_id("example") +``` + +### Input Transformer Usage - Simple String + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_rule import CloudwatchEventRule +from imports.aws.cloudwatch_event_target import CloudwatchEventTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchEventRule(self, "example") + aws_cloudwatch_event_target_example = CloudwatchEventTarget(self, "example_1", + arn=Token.as_string(aws_lambda_function_example.arn), + input_transformer=CloudwatchEventTargetInputTransformer( + input_paths={ + "instance": "$.detail.instance", + "status": "$.detail.status" + }, + input_template="\\\" is in state \\\"" + ), + rule=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_target_example.override_logical_id("example") +``` + +### Cloudwatch Log Group Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_rule import CloudwatchEventRule +from imports.aws.cloudwatch_event_target import CloudwatchEventTarget +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.cloudwatch_log_resource_policy import CloudwatchLogResourcePolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchEventRule(self, "example", + description="GuardDuty Findings", + event_pattern=Token.as_string( + Fn.jsonencode({ + "source": ["aws.guardduty"] + })), + name="guard-duty_event_rule", + tags={ + "Environment": "example" + } + ) + aws_cloudwatch_log_group_example = CloudwatchLogGroup(self, "example_1", + name="/aws/events/guardduty/logs", + retention_in_days=1 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_group_example.override_logical_id("example") + example_log_policy = DataAwsIamPolicyDocument(self, "example_log_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:CreateLogStream"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["events.amazonaws.com", "delivery.logs.amazonaws.com" + ], + type="Service" + ) + ], + resources=["${" + aws_cloudwatch_log_group_example.arn + "}:*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["logs:PutLogEvents"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="ArnEquals", + values=[example.arn], + variable="aws:SourceArn" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["events.amazonaws.com", "delivery.logs.amazonaws.com" + ], + type="Service" + ) + ], + resources=["${" + aws_cloudwatch_log_group_example.arn + "}:*:*"] + ) + ] + ) + aws_cloudwatch_event_target_example = CloudwatchEventTarget(self, "example_3", + arn=Token.as_string(aws_cloudwatch_log_group_example.arn), + rule=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_event_target_example.override_logical_id("example") + aws_cloudwatch_log_resource_policy_example = + CloudwatchLogResourcePolicy(self, "example_4", + policy_document=Token.as_string(example_log_policy.json), + policy_name="guardduty-log-publishing-policy" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_resource_policy_example.override_logical_id("example") +``` + +## Argument Reference + +-> **Note:** In order to be able to have your AWS Lambda function or + SNS topic invoked by an EventBridge rule, you must set up the right permissions + using [`aws_lambda_permission`](/docs/providers/aws/r/lambda_permission.html) + or [`aws_sns_topic.policy`](/docs/providers/aws/r/sns_topic.html#policy). + More info [here](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-use-resource-based.html). + +The following arguments are required: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the target. +* `rule` - (Required) The name of the rule you want to add targets to. + +The following arguments are optional: + +* `batch_target` - (Optional) Parameters used when you are using the rule to invoke an Amazon Batch Job. Documented below. A maximum of 1 are allowed. +* `dead_letter_config` - (Optional) Parameters used when you are providing a dead letter config. Documented below. A maximum of 1 are allowed. +* `ecs_target` - (Optional) Parameters used when you are using the rule to invoke Amazon ECS Task. Documented below. A maximum of 1 are allowed. +* `event_bus_name` - (Optional) The name or ARN of the event bus to associate with the rule. + If you omit this, the `default` event bus is used. +* `http_target` - (Optional) Parameters used when you are using the rule to invoke an API Gateway REST endpoint. Documented below. A maximum of 1 is allowed. +* `input` - (Optional) Valid JSON text passed to the target. Conflicts with `input_path` and `input_transformer`. +* `input_path` - (Optional) The value of the [JSONPath](http://goessner.net/articles/JsonPath/) that is used for extracting part of the matched event when passing it to the target. Conflicts with `input` and `input_transformer`. +* `input_transformer` - (Optional) Parameters used when you are providing a custom input to a target based on certain event data. Conflicts with `input` and `input_path`. +* `kinesis_target` - (Optional) Parameters used when you are using the rule to invoke an Amazon Kinesis Stream. Documented below. A maximum of 1 are allowed. +* `role_arn` - (Optional) The Amazon Resource Name (ARN) of the IAM role to be used for this target when the rule is triggered. Required if `ecs_target` is used or target in `arn` is EC2 instance, Kinesis data stream, Step Functions state machine, or Event Bus in different account or region. +* `run_command_targets` - (Optional) Parameters used when you are using the rule to invoke Amazon EC2 Run Command. Documented below. A maximum of 5 are allowed. +* `redshift_target` - (Optional) Parameters used when you are using the rule to invoke an Amazon Redshift Statement. Documented below. A maximum of 1 are allowed. +* `retry_policy` - (Optional) Parameters used when you are providing retry policies. Documented below. A maximum of 1 are allowed. +* `sqs_target` - (Optional) Parameters used when you are using the rule to invoke an Amazon SQS Queue. Documented below. A maximum of 1 are allowed. +* `target_id` - (Optional) The unique target assignment ID. If missing, will generate a random, unique id. + +### batch_target + +* `job_definition` - (Required) The ARN or name of the job definition to use if the event target is an AWS Batch job. This job definition must already exist. +* `job_name` - (Required) The name to use for this execution of the job, if the target is an AWS Batch job. +* `array_size` - (Optional) The size of the array, if this is an array batch job. Valid values are integers between 2 and 10,000. +* `job_attempts` - (Optional) The number of times to attempt to retry, if the job fails. Valid values are 1 to 10. + +### capacity_provider_strategy + +* `capacity_provider` - (Required) Short name of the capacity provider. +* `weight` - (Required) The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. +* `base` - (Optional) The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. Defaults to `0`. + +### dead_letter_config + +* `arn` - (Optional) - ARN of the SQS queue specified as the target for the dead-letter queue. + +### ecs_target + +* `task_definition_arn` - (Required) The ARN of the task definition to use if the event target is an Amazon ECS cluster. +* `capacity_provider_strategy` - (Optional) The capacity provider strategy to use for the task. If a `capacity_provider_strategy` specified, the `launch_type` parameter must be omitted. If no `capacity_provider_strategy` or `launch_type` is specified, the default capacity provider strategy for the cluster is used. Can be one or more. See below. +* `enable_ecs_managed_tags` - (Optional) Specifies whether to enable Amazon ECS managed tags for the task. +* `enable_execute_command` - (Optional) Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. +* `group` - (Optional) Specifies an ECS task group for the task. The maximum length is 255 characters. +* `launch_type` - (Optional) Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. Valid values include: `EC2`, `EXTERNAL`, or `FARGATE`. +* `network_configuration` - (Optional) Use this if the ECS task uses the awsvpc network mode. This specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. Required if `launch_type` is `FARGATE` because the awsvpc mode is required for Fargate tasks. +* `ordered_placement_strategy` - (Optional) An array of placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. +* `placement_constraint` - (Optional) An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). See Below. +* `platform_version` - (Optional) Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as `1.1.0`. This is used only if LaunchType is FARGATE. For more information about valid platform versions, see [AWS Fargate Platform Versions](http://docs.aws.amazon.com/AmazonECS/latest/developerguide/platform_versions.html). +* `propagate_tags` - (Optional) Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. The only valid value is: `TASK_DEFINITION`. +* `task_count` - (Optional) The number of tasks to create based on the TaskDefinition. Defaults to `1`. +* `tags` - (Optional) A map of tags to assign to ecs resources. + +### http_target + +* `header_parameters` - (Optional) Enables you to specify HTTP headers to add to the request. +* `path_parameter_values` - (Optional) The list of values that correspond sequentially to any path variables in your endpoint ARN (for example `arn:aws:execute-api:us-east-1:123456:myapi/*/POST/pets/*`). +* `query_string_parameters` - (Optional) Represents keys/values of query string parameters that are appended to the invoked endpoint. + +### input_transformer + +* `input_template` - (Required) Template to customize data sent to the target. Must be valid JSON. To send a string value, the string value must include double quotes. Values must be escaped for both JSON and Terraform, e.g., `"\"Your string goes here.\\nA new line.\""` +* `input_paths` - (Optional) Key value pairs specified in the form of JSONPath (for example, time = $.time) + * You can have as many as 100 key-value pairs. + * You must use JSON dot notation, not bracket notation. + * The keys can't start with "AWS". + +### kinesis_target + +* `partition_key_path` - (Optional) The JSON path to be extracted from the event and used as the partition key. + +### network_configuration + +* `subnets` - (Required) The subnets associated with the task or service. +* `security_groups` - (Optional) The security groups associated with the task or service. If you do not specify a security group, the default security group for the VPC is used. +* `assign_public_ip` - (Optional) Assign a public IP address to the ENI (Fargate launch type only). Valid values are `true` or `false`. Defaults to `false`. + +For more information, see [Task Networking](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-networking.html) + +### ordered_placement_strategy + +* `type` - (Required) Type of placement strategy. The only valid values at this time are `binpack`, `random` and `spread`. +* `field` - (Optional) The field to apply the placement strategy against. For the `spread` placement strategy, valid values are `instanceId` (or `host`, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as `attribute:ecs.availability-zone`. For the `binpack` placement strategy, valid values are `cpu` and `memory`. For the `random` placement strategy, this field is not used. For more information, see [Amazon ECS task placement strategies](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-placement-strategies.html). + +### placement_constraint + +* `type` - (Required) Type of constraint. The only valid values at this time are `memberOf` and `distinctInstance`. +* `expression` - (Optional) Cluster Query Language expression to apply to the constraint. Does not need to be specified for the `distinctInstance` type. For more information, see [Cluster Query Language in the Amazon EC2 Container Service Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html). + +### redshift_target + +* `database` - (Required) The name of the database. +* `db_user` - (Optional) The database user name. +* `secrets_manager_arn` - (Optional) The name or ARN of the secret that enables access to the database. +* `sql` - (Optional) The SQL statement text to run. +* `statement_name` - (Optional) The name of the SQL statement. +* `with_event` - (Optional) Indicates whether to send an event back to EventBridge after the SQL statement runs. + +### retry_policy + +* `maximum_event_age_in_seconds` - (Optional) The age in seconds to continue to make retry attempts. +* `maximum_retry_attempts` - (Optional) maximum number of retry attempts to make before the request fails + +### run_command_targets + +* `key` - (Required) Can be either `tag:tag-key` or `InstanceIds`. +* `values` - (Required) If Key is `tag:tag-key`, Values is a list of tag values. If Key is `InstanceIds`, Values is a list of Amazon EC2 instance IDs. + +### sqs_target + +* `message_group_id` - (Optional) The FIFO message group ID to use as the target. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge Targets using `event_bus_name/rule-name/target-id` (if you omit `event_bus_name`, the `default` event bus will be used). For example: + + ```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EventBridge Targets using `event_bus_name/rule-name/target-id` (if you omit `event_bus_name`, the `default` event bus will be used). For example: + + ```console +% terraform import aws_cloudwatch_event_target.test-event-target rule-name/target-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_log_data_protection_policy.html.markdown b/website/docs/cdktf/python/r/cloudwatch_log_data_protection_policy.html.markdown new file mode 100644 index 00000000000..17e3db2fda8 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_log_data_protection_policy.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_data_protection_policy" +description: |- + Provides a CloudWatch Log Data Protection Policy resource. +--- + + + +# Resource: aws_cloudwatch_log_data_protection_policy + +Provides a CloudWatch Log Data Protection Policy resource. + +Read more about protecting sensitive user data in the [User Guide](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/mask-sensitive-log-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_data_protection_policy import CloudwatchLogDataProtectionPolicy +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="example" + ) + aws_s3_bucket_example = S3Bucket(self, "example_1", + bucket="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_example.override_logical_id("example") + aws_cloudwatch_log_data_protection_policy_example = + CloudwatchLogDataProtectionPolicy(self, "example_2", + log_group_name=example.name, + policy_document=Token.as_string( + Fn.jsonencode({ + "Name": "Example", + "Statement": [{ + "DataIdentifier": ["arn:aws:dataprotection::aws:data-identifier/EmailAddress" + ], + "Operation": { + "Audit": { + "FindingsDestination": { + "S3": { + "Bucket": aws_s3_bucket_example.bucket + } + } + } + }, + "Sid": "Audit" + }, { + "DataIdentifier": ["arn:aws:dataprotection::aws:data-identifier/EmailAddress" + ], + "Operation": { + "Deidentify": { + "MaskConfig": {} + } + }, + "Sid": "Redact" + } + ], + "Version": "2021-06-01" + })) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_data_protection_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `log_group_name` - (Required) The name of the log group under which the log stream is to be created. +* `policy_document` - (Required) Specifies the data protection policy in JSON. Read more at [Data protection policy syntax](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/mask-sensitive-log-data-start.html#mask-sensitive-log-data-policysyntax). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import this resource using the `log_group_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import this resource using the `log_group_name`. For example: + +```console +% terraform import aws_cloudwatch_log_data_protection_policy.example my-log-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_log_destination.html.markdown b/website/docs/cdktf/python/r/cloudwatch_log_destination.html.markdown new file mode 100644 index 00000000000..4e002c62a50 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_log_destination.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_destination" +description: |- + Provides a CloudWatch Logs destination. +--- + + + +# Resource: aws_cloudwatch_log_destination + +Provides a CloudWatch Logs destination resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_destination import CloudwatchLogDestination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchLogDestination(self, "test_destination", + name="test_destination", + role_arn=iam_for_cloudwatch.arn, + target_arn=kinesis_for_cloudwatch.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name for the log destination. +* `role_arn` - (Required) The ARN of an IAM role that grants Amazon CloudWatch Logs permissions to put data into the target. +* `target_arn` - (Required) The ARN of the target Amazon Kinesis stream resource for the destination. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the log destination. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Logs destinations using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Logs destinations using the `name`. For example: + +```console +% terraform import aws_cloudwatch_log_destination.test_destination test_destination +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_log_destination_policy.html.markdown b/website/docs/cdktf/python/r/cloudwatch_log_destination_policy.html.markdown new file mode 100644 index 00000000000..6f8dedc1e30 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_log_destination_policy.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_destination_policy" +description: |- + Provides a CloudWatch Logs destination policy. +--- + + + +# Resource: aws_cloudwatch_log_destination_policy + +Provides a CloudWatch Logs destination policy resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_destination import CloudwatchLogDestination +from imports.aws.cloudwatch_log_destination_policy import CloudwatchLogDestinationPolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_destination = CloudwatchLogDestination(self, "test_destination", + name="test_destination", + role_arn=iam_for_cloudwatch.arn, + target_arn=kinesis_for_cloudwatch.arn + ) + test_destination_policy = DataAwsIamPolicyDocument(self, "test_destination_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:PutSubscriptionFilter"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["123456789012"], + type="AWS" + ) + ], + resources=[test_destination.arn] + ) + ] + ) + aws_cloudwatch_log_destination_policy_test_destination_policy = + CloudwatchLogDestinationPolicy(self, "test_destination_policy_2", + access_policy=Token.as_string(test_destination_policy.json), + destination_name=test_destination.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_destination_policy_test_destination_policy.override_logical_id("test_destination_policy") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `destination_name` - (Required) A name for the subscription filter +* `access_policy` - (Required) The policy document. This is a JSON formatted string. +* `force_update` - (Optional) Specify true if you are updating an existing destination policy to grant permission to an organization ID instead of granting permission to individual AWS accounts. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Logs destination policies using the `destination_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Logs destination policies using the `destination_name`. For example: + +```console +% terraform import aws_cloudwatch_log_destination_policy.test_destination_policy test_destination +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_log_group.html.markdown b/website/docs/cdktf/python/r/cloudwatch_log_group.html.markdown new file mode 100644 index 00000000000..6da2918ac6d --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_log_group.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_group" +description: |- + Provides a CloudWatch Log Group resource. +--- + + + +# Resource: aws_cloudwatch_log_group + +Provides a CloudWatch Log Group resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchLogGroup(self, "yada", + name="Yada", + tags={ + "Application": "serviceA", + "Environment": "production" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the log group. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `skip_destroy` - (Optional) Set to true if you do not wish the log group (and any logs it may contain) to be deleted at destroy time, and instead just remove the log group from the Terraform state. +* `retention_in_days` - (Optional) Specifies the number of days + you want to retain log events in the specified log group. Possible values are: 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, 1096, 1827, 2192, 2557, 2922, 3288, 3653, and 0. + If you select 0, the events in the log group are always retained and never expire. +* `kms_key_id` - (Optional) The ARN of the KMS Key to use when encrypting log data. Please note, after the AWS KMS CMK is disassociated from the log group, +AWS CloudWatch Logs stops encrypting newly ingested data for the log group. All previously ingested data remains encrypted, and AWS CloudWatch Logs requires +permissions for the CMK whenever the encrypted data is requested. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the log group. Any `:*` suffix added by the API, denoting all CloudWatch Log Streams under the CloudWatch Log Group, is removed for greater compatibility with other AWS services that do not accept the suffix. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudwatch Log Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloudwatch Log Groups using the `name`. For example: + +```console +% terraform import aws_cloudwatch_log_group.test_group yada +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_log_metric_filter.html.markdown b/website/docs/cdktf/python/r/cloudwatch_log_metric_filter.html.markdown new file mode 100644 index 00000000000..4453037a7c9 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_log_metric_filter.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_metric_filter" +description: |- + Provides a CloudWatch Log Metric Filter resource. +--- + + + +# Resource: aws_cloudwatch_log_metric_filter + +Provides a CloudWatch Log Metric Filter resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.cloudwatch_log_metric_filter import CloudwatchLogMetricFilter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + dada = CloudwatchLogGroup(self, "dada", + name="MyApp/access.log" + ) + CloudwatchLogMetricFilter(self, "yada", + log_group_name=dada.name, + metric_transformation=CloudwatchLogMetricFilterMetricTransformation( + name="EventCount", + namespace="YourNamespace", + value="1" + ), + name="MyAppAccessCount", + pattern="" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name for the metric filter. +* `pattern` - (Required) A valid [CloudWatch Logs filter pattern](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/FilterAndPatternSyntax.html) + for extracting metric data out of ingested log events. +* `log_group_name` - (Required) The name of the log group to associate the metric filter with. +* `metric_transformation` - (Required) A block defining collection of information needed to define how metric data gets emitted. See below. + +The `metric_transformation` block supports the following arguments: + +* `name` - (Required) The name of the CloudWatch metric to which the monitored log information should be published (e.g., `ErrorCount`) +* `namespace` - (Required) The destination namespace of the CloudWatch metric. +* `value` - (Required) What to publish to the metric. For example, if you're counting the occurrences of a particular term like "Error", the value will be "1" for each occurrence. If you're counting the bytes transferred the published value will be the value in the log event. +* `default_value` - (Optional) The value to emit when a filter pattern does not match a log event. Conflicts with `dimensions`. +* `dimensions` - (Optional) Map of fields to use as dimensions for the metric. Up to 3 dimensions are allowed. Conflicts with `default_value`. +* `unit` - (Optional) The unit to assign to the metric. If you omit this, the unit is set as `None`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the metric filter. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Log Metric Filter using the `log_group_name:name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Log Metric Filter using the `log_group_name:name`. For example: + +```console +% terraform import aws_cloudwatch_log_metric_filter.test /aws/lambda/function:test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_log_resource_policy.html.markdown b/website/docs/cdktf/python/r/cloudwatch_log_resource_policy.html.markdown new file mode 100644 index 00000000000..b7def7b8f75 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_log_resource_policy.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_resource_policy" +description: |- + Provides a resource to manage a CloudWatch log resource policy +--- + + + +# Resource: aws_cloudwatch_log_resource_policy + +Provides a resource to manage a CloudWatch log resource policy. + +## Example Usage + +### Elasticsearch Log Publishing + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_resource_policy import CloudwatchLogResourcePolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + elasticsearch_log_publishing_policy = DataAwsIamPolicyDocument(self, "elasticsearch-log-publishing-policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:CreateLogStream", "logs:PutLogEvents", "logs:PutLogEventsBatch" + ], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["es.amazonaws.com"], + type="Service" + ) + ], + resources=["arn:aws:logs:*"] + ) + ] + ) + aws_cloudwatch_log_resource_policy_elasticsearch_log_publishing_policy = + CloudwatchLogResourcePolicy(self, "elasticsearch-log-publishing-policy_1", + policy_document=Token.as_string(elasticsearch_log_publishing_policy.json), + policy_name="elasticsearch-log-publishing-policy" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_resource_policy_elasticsearch_log_publishing_policy.override_logical_id("elasticsearch-log-publishing-policy") +``` + +### Route53 Query Logging + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_resource_policy import CloudwatchLogResourcePolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + route53_query_logging_policy = DataAwsIamPolicyDocument(self, "route53-query-logging-policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:CreateLogStream", "logs:PutLogEvents"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["route53.amazonaws.com"], + type="Service" + ) + ], + resources=["arn:aws:logs:*:*:log-group:/aws/route53/*"] + ) + ] + ) + aws_cloudwatch_log_resource_policy_route53_query_logging_policy = + CloudwatchLogResourcePolicy(self, "route53-query-logging-policy_1", + policy_document=Token.as_string(route53_query_logging_policy.json), + policy_name="route53-query-logging-policy" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_resource_policy_route53_query_logging_policy.override_logical_id("route53-query-logging-policy") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy_document` - (Required) Details of the resource policy, including the identity of the principal that is enabled to put logs to this account. This is formatted as a JSON string. Maximum length of 5120 characters. +* `policy_name` - (Required) Name of the resource policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the CloudWatch log resource policy + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch log resource policies using the policy name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch log resource policies using the policy name. For example: + +```console +% terraform import aws_cloudwatch_log_resource_policy.MyPolicy MyPolicy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_log_stream.html.markdown b/website/docs/cdktf/python/r/cloudwatch_log_stream.html.markdown new file mode 100644 index 00000000000..6143286ab00 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_log_stream.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_stream" +description: |- + Provides a CloudWatch Log Stream resource. +--- + + + +# Resource: aws_cloudwatch_log_stream + +Provides a CloudWatch Log Stream resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.cloudwatch_log_stream import CloudwatchLogStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + yada = CloudwatchLogGroup(self, "yada", + name="Yada" + ) + CloudwatchLogStream(self, "foo", + log_group_name=yada.name, + name="SampleLogStream1234" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the log stream. Must not be longer than 512 characters and must not contain `:` +* `log_group_name` - (Required) The name of the log group under which the log stream is to be created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the log stream. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudwatch Log Stream using the stream's `log_group_name` and `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloudwatch Log Stream using the stream's `log_group_name` and `name`. For example: + +```console +% terraform import aws_cloudwatch_log_stream.foo Yada:SampleLogStream1234 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_log_subscription_filter.html.markdown b/website/docs/cdktf/python/r/cloudwatch_log_subscription_filter.html.markdown new file mode 100644 index 00000000000..eec64173d24 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_log_subscription_filter.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_subscription_filter" +description: |- + Provides a CloudWatch Logs subscription filter. +--- + + + +# Resource: aws_cloudwatch_log_subscription_filter + +Provides a CloudWatch Logs subscription filter resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_subscription_filter import CloudwatchLogSubscriptionFilter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchLogSubscriptionFilter(self, "test_lambdafunction_logfilter", + destination_arn=test_logstream.arn, + distribution="Random", + filter_pattern="logtype test", + log_group_name="/aws/lambda/example_lambda_name", + name="test_lambdafunction_logfilter", + role_arn=iam_for_lambda.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name for the subscription filter +* `destination_arn` - (Required) The ARN of the destination to deliver matching log events to. Kinesis stream or Lambda function ARN. +* `filter_pattern` - (Required) A valid CloudWatch Logs filter pattern for subscribing to a filtered stream of log events. Use empty string `""` to match everything. For more information, see the [Amazon CloudWatch Logs User Guide](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html). +* `log_group_name` - (Required) The name of the log group to associate the subscription filter with +* `role_arn` - (Optional) The ARN of an IAM role that grants Amazon CloudWatch Logs permissions to deliver ingested log events to the destination. If you use Lambda as a destination, you should skip this argument and use `aws_lambda_permission` resource for granting access from CloudWatch logs to the destination Lambda function. +* `distribution` - (Optional) The method used to distribute log data to the destination. By default log data is grouped by log stream, but the grouping can be set to random for a more even distribution. This property is only applicable when the destination is an Amazon Kinesis stream. Valid values are "Random" and "ByLogStream". + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Logs subscription filter using the log group name and subscription filter name separated by `|`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Logs subscription filter using the log group name and subscription filter name separated by `|`. For example: + +```console +% terraform import aws_cloudwatch_log_subscription_filter.test_lambdafunction_logfilter /aws/lambda/example_lambda_name|test_lambdafunction_logfilter +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_metric_alarm.html.markdown b/website/docs/cdktf/python/r/cloudwatch_metric_alarm.html.markdown new file mode 100644 index 00000000000..6aacc8b650c --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_metric_alarm.html.markdown @@ -0,0 +1,314 @@ +--- +subcategory: "CloudWatch" +layout: "aws" +page_title: "AWS: aws_cloudwatch_metric_alarm" +description: |- + Provides a CloudWatch Metric Alarm resource. +--- + + + +# Resource: aws_cloudwatch_metric_alarm + +Provides a CloudWatch Metric Alarm resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_metric_alarm import CloudwatchMetricAlarm +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchMetricAlarm(self, "foobar", + alarm_description="This metric monitors ec2 cpu utilization", + alarm_name="terraform-test-foobar5", + comparison_operator="GreaterThanOrEqualToThreshold", + evaluation_periods=2, + insufficient_data_actions=[], + metric_name="CPUUtilization", + namespace="AWS/EC2", + period=120, + statistic="Average", + threshold=80 + ) +``` + +## Example in Conjunction with Scaling Policies + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_policy import AutoscalingPolicy +from imports.aws.cloudwatch_metric_alarm import CloudwatchMetricAlarm +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bat = AutoscalingPolicy(self, "bat", + adjustment_type="ChangeInCapacity", + autoscaling_group_name=bar.name, + cooldown=300, + name="foobar3-terraform-test", + scaling_adjustment=4 + ) + aws_cloudwatch_metric_alarm_bat = CloudwatchMetricAlarm(self, "bat_1", + alarm_actions=[bat.arn], + alarm_description="This metric monitors ec2 cpu utilization", + alarm_name="terraform-test-foobar5", + comparison_operator="GreaterThanOrEqualToThreshold", + dimensions={ + "AutoScalingGroupName": bar.name + }, + evaluation_periods=2, + metric_name="CPUUtilization", + namespace="AWS/EC2", + period=120, + statistic="Average", + threshold=80 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_metric_alarm_bat.override_logical_id("bat") +``` + +## Example with an Expression + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_metric_alarm import CloudwatchMetricAlarm +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchMetricAlarm(self, "foobar", + alarm_description="Request error rate has exceeded 10%", + alarm_name="terraform-test-foobar", + comparison_operator="GreaterThanOrEqualToThreshold", + evaluation_periods=2, + insufficient_data_actions=[], + metric_query=[CloudwatchMetricAlarmMetricQuery( + expression="m2/m1*100", + id="e1", + label="Error Rate", + return_data=Token.as_boolean("true") + ), CloudwatchMetricAlarmMetricQuery( + id="m1", + metric=CloudwatchMetricAlarmMetricQueryMetric( + dimensions={ + "LoadBalancer": "app/web" + }, + metric_name="RequestCount", + namespace="AWS/ApplicationELB", + period=120, + stat="Sum", + unit="Count" + ) + ), CloudwatchMetricAlarmMetricQuery( + id="m2", + metric=CloudwatchMetricAlarmMetricQueryMetric( + dimensions={ + "LoadBalancer": "app/web" + }, + metric_name="HTTPCode_ELB_5XX_Count", + namespace="AWS/ApplicationELB", + period=120, + stat="Sum", + unit="Count" + ) + ) + ], + threshold=10 + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_metric_alarm import CloudwatchMetricAlarm +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchMetricAlarm(self, "xx_anomaly_detection", + alarm_description="This metric monitors ec2 cpu utilization", + alarm_name="terraform-test-foobar", + comparison_operator="GreaterThanUpperThreshold", + evaluation_periods=2, + insufficient_data_actions=[], + metric_query=[CloudwatchMetricAlarmMetricQuery( + expression="ANOMALY_DETECTION_BAND(m1)", + id="e1", + label="CPUUtilization (Expected)", + return_data=Token.as_boolean("true") + ), CloudwatchMetricAlarmMetricQuery( + id="m1", + metric=CloudwatchMetricAlarmMetricQueryMetric( + dimensions={ + "InstanceId": "i-abc123" + }, + metric_name="CPUUtilization", + namespace="AWS/EC2", + period=120, + stat="Average", + unit="Count" + ), + return_data=Token.as_boolean("true") + ) + ], + threshold_metric_id="e1" + ) +``` + +## Example of monitoring Healthy Hosts on NLB using Target Group and NLB + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_metric_alarm import CloudwatchMetricAlarm +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchMetricAlarm(self, "nlb_healthyhosts", + actions_enabled=Token.as_boolean("true"), + alarm_actions=[sns.arn], + alarm_description="Number of healthy nodes in Target Group", + alarm_name="alarmname", + comparison_operator="LessThanThreshold", + dimensions={ + "LoadBalancer": lb.arn_suffix, + "TargetGroup": lb_tg.arn_suffix + }, + evaluation_periods=1, + metric_name="HealthyHostCount", + namespace="AWS/NetworkELB", + ok_actions=[sns.arn], + period=60, + statistic="Average", + threshold=logstash_servers_count.number_value + ) +``` + +~> **NOTE:** You cannot create a metric alarm consisting of both `statistic` and `extended_statistic` parameters. +You must choose one or the other + +## Argument Reference + +See [related part of AWS Docs](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_PutMetricAlarm.html) +for details about valid values. + +This argument supports the following arguments: + +* `alarm_name` - (Required) The descriptive name for the alarm. This name must be unique within the user's AWS account +* `comparison_operator` - (Required) The arithmetic operation to use when comparing the specified Statistic and Threshold. The specified Statistic value is used as the first operand. Either of the following is supported: `GreaterThanOrEqualToThreshold`, `GreaterThanThreshold`, `LessThanThreshold`, `LessThanOrEqualToThreshold`. Additionally, the values `LessThanLowerOrGreaterThanUpperThreshold`, `LessThanLowerThreshold`, and `GreaterThanUpperThreshold` are used only for alarms based on anomaly detection models. +* `evaluation_periods` - (Required) The number of periods over which data is compared to the specified threshold. +* `metric_name` - (Optional) The name for the alarm's associated metric. + See docs for [supported metrics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `namespace` - (Optional) The namespace for the alarm's associated metric. See docs for the [list of namespaces](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/aws-namespaces.html). + See docs for [supported metrics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `period` - (Optional) The period in seconds over which the specified `statistic` is applied. + Valid values are `10`, `30`, or any multiple of `60`. +* `statistic` - (Optional) The statistic to apply to the alarm's associated metric. + Either of the following is supported: `SampleCount`, `Average`, `Sum`, `Minimum`, `Maximum` +* `threshold` - (Optional) The value against which the specified statistic is compared. This parameter is required for alarms based on static thresholds, but should not be used for alarms based on anomaly detection models. +* `threshold_metric_id` - (Optional) If this is an alarm based on an anomaly detection model, make this value match the ID of the ANOMALY_DETECTION_BAND function. +* `actions_enabled` - (Optional) Indicates whether or not actions should be executed during any changes to the alarm's state. Defaults to `true`. +* `alarm_actions` - (Optional) The list of actions to execute when this alarm transitions into an ALARM state from any other state. Each action is specified as an Amazon Resource Name (ARN). +* `alarm_description` - (Optional) The description for the alarm. +* `datapoints_to_alarm` - (Optional) The number of datapoints that must be breaching to trigger the alarm. +* `dimensions` - (Optional) The dimensions for the alarm's associated metric. For the list of available dimensions see the AWS documentation [here](http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `insufficient_data_actions` - (Optional) The list of actions to execute when this alarm transitions into an INSUFFICIENT_DATA state from any other state. Each action is specified as an Amazon Resource Name (ARN). +* `ok_actions` - (Optional) The list of actions to execute when this alarm transitions into an OK state from any other state. Each action is specified as an Amazon Resource Name (ARN). +* `unit` - (Optional) The unit for the alarm's associated metric. +* `extended_statistic` - (Optional) The percentile statistic for the metric associated with the alarm. Specify a value between p0.0 and p100. +* `treat_missing_data` - (Optional) Sets how this alarm is to handle missing data points. The following values are supported: `missing`, `ignore`, `breaching` and `notBreaching`. Defaults to `missing`. +* `evaluate_low_sample_count_percentiles` - (Optional) Used only for alarms based on percentiles. + If you specify `ignore`, the alarm state will not change during periods with too few data points to be statistically significant. + If you specify `evaluate` or omit this parameter, the alarm will always be evaluated and possibly change state no matter how many data points are available. +The following values are supported: `ignore`, and `evaluate`. +* `metric_query` (Optional) Enables you to create an alarm based on a metric math expression. You may specify at most 20. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +~> **NOTE:** If you specify at least one `metric_query`, you may not specify a `metric_name`, `namespace`, `period` or `statistic`. If you do not specify a `metric_query`, you must specify each of these (although you may use `extended_statistic` instead of `statistic`). + +### Nested fields + +#### `metric_query` + +* `id` - (Required) A short name used to tie this object to the results in the response. If you are performing math expressions on this set of data, this name represents that data and can serve as a variable in the mathematical expression. The valid characters are letters, numbers, and underscore. The first character must be a lowercase letter. +* `account_id` - (Optional) The ID of the account where the metrics are located, if this is a cross-account alarm. +* `expression` - (Optional) The math expression to be performed on the returned data, if this object is performing a math expression. This expression can use the id of the other metrics to refer to those metrics, and can also use the id of other expressions to use the result of those expressions. For more information about metric math expressions, see Metric Math Syntax and Functions in the [Amazon CloudWatch User Guide](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/using-metric-math.html#metric-math-syntax). +* `label` - (Optional) A human-readable label for this metric or expression. This is especially useful if this is an expression, so that you know what the value represents. +* `metric` - (Optional) The metric to be returned, along with statistics, period, and units. Use this parameter only if this object is retrieving a metric and not performing a math expression on returned data. +* `period` - (Optional) Granularity in seconds of returned data points. + For metrics with regular resolution, valid values are any multiple of `60`. + For high-resolution metrics, valid values are `1`, `5`, `10`, `30`, or any multiple of `60`. +* `return_data` - (Optional) Specify exactly one `metric_query` to be `true` to use that `metric_query` result as the alarm. + +~> **NOTE:** You must specify either `metric` or `expression`. Not both. + +#### `metric` + +* `dimensions` - (Optional) The dimensions for this metric. For the list of available dimensions see the AWS documentation [here](http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `metric_name` - (Required) The name for this metric. + See docs for [supported metrics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `namespace` - (Required) The namespace for this metric. See docs for the [list of namespaces](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/aws-namespaces.html). + See docs for [supported metrics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `period` - (Required) Granularity in seconds of returned data points. + For metrics with regular resolution, valid values are any multiple of `60`. + For high-resolution metrics, valid values are `1`, `5`, `10`, `30`, or any multiple of `60`. +* `stat` - (Required) The statistic to apply to this metric. + See docs for [supported statistics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Statistics-definitions.html). +* `unit` - (Optional) The unit for this metric. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the CloudWatch Metric Alarm. +* `id` - The ID of the health check. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Metric Alarm using the `alarm_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Metric Alarm using the `alarm_name`. For example: + +```console +% terraform import aws_cloudwatch_metric_alarm.test alarm-12345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_metric_stream.html.markdown b/website/docs/cdktf/python/r/cloudwatch_metric_stream.html.markdown new file mode 100644 index 00000000000..1222d9250c9 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_metric_stream.html.markdown @@ -0,0 +1,242 @@ +--- +subcategory: "CloudWatch" +layout: "aws" +page_title: "AWS: aws_cloudwatch_metric_stream" +description: |- + Provides a CloudWatch Metric Stream resource. +--- + + + +# Resource: aws_cloudwatch_metric_stream + +Provides a CloudWatch Metric Stream resource. + +## Example Usage + +### Filters + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_metric_stream import CloudwatchMetricStream +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bucket = S3Bucket(self, "bucket", + bucket="metric-stream-test-bucket" + ) + S3BucketAcl(self, "bucket_acl", + acl="private", + bucket=bucket.id + ) + firehose_assume_role = DataAwsIamPolicyDocument(self, "firehose_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["firehose.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + firehose_to_s3 = DataAwsIamPolicyDocument(self, "firehose_to_s3", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:AbortMultipartUpload", "s3:GetBucketLocation", "s3:GetObject", "s3:ListBucket", "s3:ListBucketMultipartUploads", "s3:PutObject" + ], + effect="Allow", + resources=[bucket.arn, "${" + bucket.arn + "}/*"] + ) + ] + ) + streams_assume_role = DataAwsIamPolicyDocument(self, "streams_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["streams.metrics.cloudwatch.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + aws_iam_role_firehose_to_s3 = IamRole(self, "firehose_to_s3_5", + assume_role_policy=Token.as_string(firehose_assume_role.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_firehose_to_s3.override_logical_id("firehose_to_s3") + metric_stream_to_firehose = IamRole(self, "metric_stream_to_firehose", + assume_role_policy=Token.as_string(streams_assume_role.json), + name="metric_stream_to_firehose_role" + ) + aws_iam_role_policy_firehose_to_s3 = IamRolePolicy(self, "firehose_to_s3_7", + name="default", + policy=Token.as_string(firehose_to_s3.json), + role=Token.as_string(aws_iam_role_firehose_to_s3.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_firehose_to_s3.override_logical_id("firehose_to_s3") + s3_stream = KinesisFirehoseDeliveryStream(self, "s3_stream", + destination="extended_s3", + extended_s3_configuration=KinesisFirehoseDeliveryStreamExtendedS3Configuration( + bucket_arn=bucket.arn, + role_arn=Token.as_string(aws_iam_role_firehose_to_s3.arn) + ), + name="metric-stream-test-stream" + ) + data_aws_iam_policy_document_metric_stream_to_firehose = + DataAwsIamPolicyDocument(self, "metric_stream_to_firehose_9", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["firehose:PutRecord", "firehose:PutRecordBatch"], + effect="Allow", + resources=[s3_stream.arn] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_metric_stream_to_firehose.override_logical_id("metric_stream_to_firehose") + CloudwatchMetricStream(self, "main", + firehose_arn=s3_stream.arn, + include_filter=[CloudwatchMetricStreamIncludeFilter( + metric_names=["CPUUtilization", "NetworkOut"], + namespace="AWS/EC2" + ), CloudwatchMetricStreamIncludeFilter( + metric_names=[], + namespace="AWS/EBS" + ) + ], + name="my-metric-stream", + output_format="json", + role_arn=metric_stream_to_firehose.arn + ) + aws_iam_role_policy_metric_stream_to_firehose = IamRolePolicy(self, "metric_stream_to_firehose_11", + name="default", + policy=Token.as_string(data_aws_iam_policy_document_metric_stream_to_firehose.json), + role=metric_stream_to_firehose.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_metric_stream_to_firehose.override_logical_id("metric_stream_to_firehose") +``` + +### Additional Statistics + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_metric_stream import CloudwatchMetricStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchMetricStream(self, "main", + firehose_arn=s3_stream.arn, + name="my-metric-stream", + output_format="json", + role_arn=metric_stream_to_firehose.arn, + statistics_configuration=[CloudwatchMetricStreamStatisticsConfiguration( + additional_statistics=["p1", "tm99"], + include_metric=[CloudwatchMetricStreamStatisticsConfigurationIncludeMetric( + metric_name="CPUUtilization", + namespace="AWS/EC2" + ) + ] + ), CloudwatchMetricStreamStatisticsConfiguration( + additional_statistics=["TS(50.5:)"], + include_metric=[CloudwatchMetricStreamStatisticsConfigurationIncludeMetric( + metric_name="CPUUtilization", + namespace="AWS/EC2" + ) + ] + ) + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `firehose_arn` - (Required) ARN of the Amazon Kinesis Firehose delivery stream to use for this metric stream. +* `role_arn` - (Required) ARN of the IAM role that this metric stream will use to access Amazon Kinesis Firehose resources. For more information about role permissions, see [Trust between CloudWatch and Kinesis Data Firehose](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-metric-streams-trustpolicy.html). +* `output_format` - (Required) Output format for the stream. Possible values are `json` and `opentelemetry0.7`. For more information about output formats, see [Metric streams output formats](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-metric-streams-formats.html). + +The following arguments are optional: + +* `exclude_filter` - (Optional) List of exclusive metric filters. If you specify this parameter, the stream sends metrics from all metric namespaces except for the namespaces and the conditional metric names that you specify here. If you don't specify metric names or provide empty metric names whole metric namespace is excluded. Conflicts with `include_filter`. +* `include_filter` - (Optional) List of inclusive metric filters. If you specify this parameter, the stream sends only the conditional metric names from the metric namespaces that you specify here. If you don't specify metric names or provide empty metric names whole metric namespace is included. Conflicts with `exclude_filter`. +* `name` - (Optional, Forces new resource) Friendly name of the metric stream. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional, Forces new resource) Creates a unique friendly name beginning with the specified prefix. Conflicts with `name`. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `statistics_configuration` - (Optional) For each entry in this array, you specify one or more metrics and the list of additional statistics to stream for those metrics. The additional statistics that you can stream depend on the stream's `output_format`. If the OutputFormat is `json`, you can stream any additional statistic that is supported by CloudWatch, listed in [CloudWatch statistics definitions](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Statistics-definitions.html.html). If the OutputFormat is `opentelemetry0.7`, you can stream percentile statistics (p99 etc.). See details below. +* `include_linked_accounts_metrics` (Optional) If you are creating a metric stream in a monitoring account, specify true to include metrics from source accounts that are linked to this monitoring account, in the metric stream. The default is false. For more information about linking accounts, see [CloudWatch cross-account observability](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-Unified-Cross-Account.html). + +### Nested Fields + +#### `exclude_filter` + +* `namespace` - (Required) Name of the metric namespace in the filter. +* `metric_names` - (Optional) An array that defines the metrics you want to exclude for this metric namespace + +#### `include_filter` + +* `namespace` - (Required) Name of the metric namespace in the filter. +* `metric_names` - (Optional) An array that defines the metrics you want to include for this metric namespace + +#### `statistics_configurations` + +* `additional_statistics` - (Required) The additional statistics to stream for the metrics listed in `include_metrics`. +* `include_metric` - (Required) An array that defines the metrics that are to have additional statistics streamed. See details below. + +#### `include_metrics` + +* `metric_name` - (Required) The name of the metric. +* `namespace` - (Required) The namespace of the metric. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the metric stream. +* `creation_date` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the metric stream was created. +* `last_update_date` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the metric stream was last updated. +* `state` - State of the metric stream. Possible values are `running` and `stopped`. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch metric streams using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch metric streams using the `name`. For example: + +```console +% terraform import aws_cloudwatch_metric_stream.sample sample-stream-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cloudwatch_query_definition.html.markdown b/website/docs/cdktf/python/r/cloudwatch_query_definition.html.markdown new file mode 100644 index 00000000000..1377049ab88 --- /dev/null +++ b/website/docs/cdktf/python/r/cloudwatch_query_definition.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_query_definition" +description: |- + Provides a CloudWatch Logs query definition resource. +--- + + + +# Resource: aws_cloudwatch_query_definition + +Provides a CloudWatch Logs query definition resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_query_definition import CloudwatchQueryDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CloudwatchQueryDefinition(self, "example", + log_group_names=["/aws/logGroup1", "/aws/logGroup2"], + name="custom_query", + query_string="fields @timestamp, @message\n| sort @timestamp desc\n| limit 25\n\n" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the query. +* `query_string` - (Required) The query to save. You can read more about CloudWatch Logs Query Syntax in the [documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/CWL_QuerySyntax.html). +* `log_group_names` - (Optional) Specific log groups to use with the query. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `query_definition_id` - The query definition ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch query definitions using the query definition ARN. The ARN can be found on the "Edit Query" page for the query in the AWS Console. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch query definitions using the query definition ARN. The ARN can be found on the "Edit Query" page for the query in the AWS Console. For example: + +```console +% terraform import aws_cloudwatch_query_definition.example arn:aws:logs:us-west-2:123456789012:query-definition:269951d7-6f75-496d-9d7b-6b7a5486bdbd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codeartifact_domain.html.markdown b/website/docs/cdktf/python/r/codeartifact_domain.html.markdown new file mode 100644 index 00000000000..08af90f378e --- /dev/null +++ b/website/docs/cdktf/python/r/codeartifact_domain.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_domain" +description: |- + Provides a CodeArtifact Domain resource. +--- + + + +# Resource: aws_codeartifact_domain + +Provides a CodeArtifact Domain Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codeartifact_domain import CodeartifactDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodeartifactDomain(self, "example", + domain="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) The name of the domain to create. All domain names in an AWS Region that are in the same AWS account must be unique. The domain name is used as the prefix in DNS hostnames. Do not use sensitive information in a domain name because it is publicly discoverable. +* `encryption_key` - (Optional) The encryption key for the domain. This is used to encrypt content stored in a domain. The KMS Key Amazon Resource Name (ARN). The default aws/codeartifact AWS KMS master key is used if this element is absent. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the Domain. +* `arn` - The ARN of the Domain. +* `owner` - The AWS account ID that owns the domain. +* `repository_count` - The number of repositories in the domain. +* `created_time` - A timestamp that represents the date and time the domain was created in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `asset_size_bytes` - The total size of all assets in the domain. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeArtifact Domain using the CodeArtifact Domain arn. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeArtifact Domain using the CodeArtifact Domain arn. For example: + +```console +% terraform import aws_codeartifact_domain.example arn:aws:codeartifact:us-west-2:012345678912:domain/tf-acc-test-8593714120730241305 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codeartifact_domain_permissions_policy.html.markdown b/website/docs/cdktf/python/r/codeartifact_domain_permissions_policy.html.markdown new file mode 100644 index 00000000000..74a9e9d8a20 --- /dev/null +++ b/website/docs/cdktf/python/r/codeartifact_domain_permissions_policy.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_domain_permissions_policy" +description: |- + Provides a CodeArtifact Domain Permissions Policy resource. +--- + + + +# Resource: aws_codeartifact_domain_permissions_policy + +Provides a CodeArtifact Domains Permissions Policy Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codeartifact_domain import CodeartifactDomain +from imports.aws.codeartifact_domain_permissions_policy import CodeartifactDomainPermissionsPolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = KmsKey(self, "example", + description="domain key" + ) + aws_codeartifact_domain_example = CodeartifactDomain(self, "example_1", + domain="example", + encryption_key=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codeartifact_domain_example.override_logical_id("example") + test = DataAwsIamPolicyDocument(self, "test", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["codeartifact:CreateRepository"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="*" + ) + ], + resources=[Token.as_string(aws_codeartifact_domain_example.arn)] + ) + ] + ) + aws_codeartifact_domain_permissions_policy_test = + CodeartifactDomainPermissionsPolicy(self, "test_3", + domain=Token.as_string(aws_codeartifact_domain_example.domain), + policy_document=Token.as_string(test.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codeartifact_domain_permissions_policy_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) The name of the domain on which to set the resource policy. +* `policy_document` - (Required) A JSON policy string to be set as the access control resource policy on the provided domain. +* `domain_owner` - (Optional) The account number of the AWS account that owns the domain. +* `policy_revision` - (Optional) The current revision of the resource policy to be set. This revision is used for optimistic locking, which prevents others from overwriting your changes to the domain's resource policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Name of Domain. +* `resource_arn` - The ARN of the resource associated with the resource policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeArtifact Domain Permissions Policies using the CodeArtifact Domain ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeArtifact Domain Permissions Policies using the CodeArtifact Domain ARN. For example: + +```console +% terraform import aws_codeartifact_domain_permissions_policy.example arn:aws:codeartifact:us-west-2:012345678912:domain/tf-acc-test-1928056699409417367 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codeartifact_repository.html.markdown b/website/docs/cdktf/python/r/codeartifact_repository.html.markdown new file mode 100644 index 00000000000..9cd648275e4 --- /dev/null +++ b/website/docs/cdktf/python/r/codeartifact_repository.html.markdown @@ -0,0 +1,149 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_repository" +description: |- + Provides a CodeArtifact Repository resource. +--- + + + +# Resource: aws_codeartifact_repository + +Provides a CodeArtifact Repository Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codeartifact_domain import CodeartifactDomain +from imports.aws.codeartifact_repository import CodeartifactRepository +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = KmsKey(self, "example", + description="domain key" + ) + aws_codeartifact_domain_example = CodeartifactDomain(self, "example_1", + domain="example", + encryption_key=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codeartifact_domain_example.override_logical_id("example") + CodeartifactRepository(self, "test", + domain=Token.as_string(aws_codeartifact_domain_example.domain), + repository="example" + ) +``` + +## Example Usage with upstream repository + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codeartifact_repository import CodeartifactRepository +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + upstream = CodeartifactRepository(self, "upstream", + domain=test.domain, + repository="upstream" + ) + CodeartifactRepository(self, "test", + domain=example.domain, + repository="example", + upstream=[CodeartifactRepositoryUpstream( + repository_name=upstream.repository + ) + ] + ) +``` + +## Example Usage with external connection + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codeartifact_repository import CodeartifactRepository +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodeartifactRepository(self, "test", + domain=example.domain, + external_connections=CodeartifactRepositoryExternalConnections( + external_connection_name="public:npmjs" + ), + repository="example" + ) + CodeartifactRepository(self, "upstream", + domain=Token.as_string(aws_codeartifact_domain_test.domain), + repository="upstream" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) The domain that contains the created repository. +* `repository` - (Required) The name of the repository to create. +* `domain_owner` - (Optional) The account number of the AWS account that owns the domain. +* `description` - (Optional) The description of the repository. +* `upstream` - (Optional) A list of upstream repositories to associate with the repository. The order of the upstream repositories in the list determines their priority order when AWS CodeArtifact looks for a requested package version. see [Upstream](#upstream) +* `external_connections` - An array of external connections associated with the repository. Only one external connection can be set per repository. see [External Connections](#external-connections). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Upstream + +* `repository_name` - (Required) The name of an upstream repository. + +### External Connections + +* `external_connection_name` - (Required) The name of the external connection associated with a repository. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the repository. +* `arn` - The ARN of the repository. +* `administrator_account` - The account number of the AWS account that manages the repository. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeArtifact Repository using the CodeArtifact Repository ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeArtifact Repository using the CodeArtifact Repository ARN. For example: + +```console +% terraform import aws_codeartifact_repository.example arn:aws:codeartifact:us-west-2:012345678912:repository/tf-acc-test-6968272603913957763/tf-acc-test-6968272603913957763 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codeartifact_repository_permissions_policy.html.markdown b/website/docs/cdktf/python/r/codeartifact_repository_permissions_policy.html.markdown new file mode 100644 index 00000000000..dba9edd930b --- /dev/null +++ b/website/docs/cdktf/python/r/codeartifact_repository_permissions_policy.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_repository_permissions_policy" +description: |- + Provides a CodeArtifact Repository Permissions Policy resource. +--- + + + +# Resource: aws_codeartifact_repository_permissions_policy + +Provides a CodeArtifact Repostory Permissions Policy Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codeartifact_domain import CodeartifactDomain +from imports.aws.codeartifact_repository import CodeartifactRepository +from imports.aws.codeartifact_repository_permissions_policy import CodeartifactRepositoryPermissionsPolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = KmsKey(self, "example", + description="domain key" + ) + aws_codeartifact_domain_example = CodeartifactDomain(self, "example_1", + domain="example", + encryption_key=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codeartifact_domain_example.override_logical_id("example") + aws_codeartifact_repository_example = CodeartifactRepository(self, "example_2", + domain=Token.as_string(aws_codeartifact_domain_example.domain), + repository="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codeartifact_repository_example.override_logical_id("example") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_3", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["codeartifact:ReadFromRepository"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="*" + ) + ], + resources=[Token.as_string(aws_codeartifact_repository_example.arn)] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_codeartifact_repository_permissions_policy_example = + CodeartifactRepositoryPermissionsPolicy(self, "example_4", + domain=Token.as_string(aws_codeartifact_domain_example.domain), + policy_document=Token.as_string(data_aws_iam_policy_document_example.json), + repository=Token.as_string(aws_codeartifact_repository_example.repository) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codeartifact_repository_permissions_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repository` - (Required) The name of the repository to set the resource policy on. +* `domain` - (Required) The name of the domain on which to set the resource policy. +* `policy_document` - (Required) A JSON policy string to be set as the access control resource policy on the provided domain. +* `domain_owner` - (Optional) The account number of the AWS account that owns the domain. +* `policy_revision` - (Optional) The current revision of the resource policy to be set. This revision is used for optimistic locking, which prevents others from overwriting your changes to the domain's resource policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the resource associated with the resource policy. +* `resource_arn` - The ARN of the resource associated with the resource policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeArtifact Repository Permissions Policies using the CodeArtifact Repository ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeArtifact Repository Permissions Policies using the CodeArtifact Repository ARN. For example: + +```console +% terraform import aws_codeartifact_repository_permissions_policy.example arn:aws:codeartifact:us-west-2:012345678912:repository/tf-acc-test-6968272603913957763/tf-acc-test-6968272603913957763 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codebuild_project.html.markdown b/website/docs/cdktf/python/r/codebuild_project.html.markdown new file mode 100644 index 00000000000..82ca41f7d40 --- /dev/null +++ b/website/docs/cdktf/python/r/codebuild_project.html.markdown @@ -0,0 +1,409 @@ +--- +subcategory: "CodeBuild" +layout: "aws" +page_title: "AWS: aws_codebuild_project" +description: |- + Provides a CodeBuild Project resource. +--- + + + +# Resource: aws_codebuild_project + +Provides a CodeBuild Project resource. See also the [`aws_codebuild_webhook` resource](/docs/providers/aws/r/codebuild_webhook.html), which manages the webhook to the source (e.g., the "rebuild every time a code change is pushed" option in the CodeBuild web console). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codebuild_project import CodebuildProject +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_1", + acl="private", + bucket=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["codebuild.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_3", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:CreateLogGroup", "logs:CreateLogStream", "logs:PutLogEvents" + ], + effect="Allow", + resources=["*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["ec2:CreateNetworkInterface", "ec2:DescribeDhcpOptions", "ec2:DescribeNetworkInterfaces", "ec2:DeleteNetworkInterface", "ec2:DescribeSubnets", "ec2:DescribeSecurityGroups", "ec2:DescribeVpcs" + ], + effect="Allow", + resources=["*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["ec2:CreateNetworkInterfacePermission"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=[example1.arn, example2.arn], + variable="ec2:Subnet" + ), DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=["codebuild.amazonaws.com"], + variable="ec2:AuthorizedService" + ) + ], + effect="Allow", + resources=["arn:aws:ec2:us-east-1:123456789012:network-interface/*" + ] + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + effect="Allow", + resources=[example.arn, "${" + example.arn + "}/*"] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_iam_role_example = IamRole(self, "example_4", + assume_role_policy=Token.as_string(assume_role.json), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_iam_role_policy_example = IamRolePolicy(self, "example_5", + policy=Token.as_string(data_aws_iam_policy_document_example.json), + role=Token.as_string(aws_iam_role_example.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_example.override_logical_id("example") + aws_codebuild_project_example = CodebuildProject(self, "example_6", + artifacts=CodebuildProjectArtifacts( + type="NO_ARTIFACTS" + ), + build_timeout=Token.as_number("5"), + cache=CodebuildProjectCache( + location=example.bucket, + type="S3" + ), + description="test_codebuild_project", + environment=CodebuildProjectEnvironment( + compute_type="BUILD_GENERAL1_SMALL", + environment_variable=[CodebuildProjectEnvironmentEnvironmentVariable( + name="SOME_KEY1", + value="SOME_VALUE1" + ), CodebuildProjectEnvironmentEnvironmentVariable( + name="SOME_KEY2", + type="PARAMETER_STORE", + value="SOME_VALUE2" + ) + ], + image="aws/codebuild/amazonlinux2-x86_64-standard:4.0", + image_pull_credentials_type="CODEBUILD", + type="LINUX_CONTAINER" + ), + logs_config=CodebuildProjectLogsConfig( + cloudwatch_logs=CodebuildProjectLogsConfigCloudwatchLogs( + group_name="log-group", + stream_name="log-stream" + ), + s3_logs=CodebuildProjectLogsConfigS3Logs( + location="${" + example.id + "}/build-log", + status="ENABLED" + ) + ), + name="test-project", + service_role=Token.as_string(aws_iam_role_example.arn), + source=CodebuildProjectSource( + git_clone_depth=1, + git_submodules_config=CodebuildProjectSourceGitSubmodulesConfig( + fetch_submodules=True + ), + location="https://github.com/mitchellh/packer.git", + type="GITHUB" + ), + source_version="master", + tags={ + "Environment": "Test" + }, + vpc_config=CodebuildProjectVpcConfig( + security_group_ids=[ + Token.as_string(aws_security_group_example1.id), + Token.as_string(aws_security_group_example2.id) + ], + subnets=[example1.id, example2.id], + vpc_id=Token.as_string(aws_vpc_example.id) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codebuild_project_example.override_logical_id("example") + CodebuildProject(self, "project-with-cache", + artifacts=CodebuildProjectArtifacts( + type="NO_ARTIFACTS" + ), + build_timeout=Token.as_number("5"), + cache=CodebuildProjectCache( + modes=["LOCAL_DOCKER_LAYER_CACHE", "LOCAL_SOURCE_CACHE"], + type="LOCAL" + ), + description="test_codebuild_project_cache", + environment=CodebuildProjectEnvironment( + compute_type="BUILD_GENERAL1_SMALL", + environment_variable=[CodebuildProjectEnvironmentEnvironmentVariable( + name="SOME_KEY1", + value="SOME_VALUE1" + ) + ], + image="aws/codebuild/amazonlinux2-x86_64-standard:4.0", + image_pull_credentials_type="CODEBUILD", + type="LINUX_CONTAINER" + ), + name="test-project-cache", + queued_timeout=Token.as_number("5"), + service_role=Token.as_string(aws_iam_role_example.arn), + source=CodebuildProjectSource( + git_clone_depth=1, + location="https://github.com/mitchellh/packer.git", + type="GITHUB" + ), + tags={ + "Environment": "Test" + } + ) +``` + +## Argument Reference + +The following arguments are required: + +* `artifacts` - (Required) Configuration block. Detailed below. +* `environment` - (Required) Configuration block. Detailed below. +* `name` - (Required) Project's name. +* `service_role` - (Required) Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account. +* `source` - (Required) Configuration block. Detailed below. + +The following arguments are optional: + +* `badge_enabled` - (Optional) Generates a publicly-accessible URL for the projects build badge. Available as `badge_url` attribute when enabled. +* `build_batch_config` - (Optional) Defines the batch build options for the project. +* `build_timeout` - (Optional) Number of minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait until timing out any related build that does not get marked as completed. The default is 60 minutes. +* `cache` - (Optional) Configuration block. Detailed below. +* `concurrent_build_limit` - (Optional) Specify a maximum number of concurrent builds for the project. The value specified must be greater than 0 and less than the account concurrent running builds limit. +* `description` - (Optional) Short description of the project. +* `file_system_locations` - (Optional) A set of file system locations to mount inside the build. File system locations are documented below. +* `encryption_key` - (Optional) AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build project's build output artifacts. +* `logs_config` - (Optional) Configuration block. Detailed below. +* `project_visibility` - (Optional) Specifies the visibility of the project's builds. Possible values are: `PUBLIC_READ` and `PRIVATE`. Default value is `PRIVATE`. +* `resource_access_role` - The ARN of the IAM role that enables CodeBuild to access the CloudWatch Logs and Amazon S3 artifacts for the project's builds. +* `queued_timeout` - (Optional) Number of minutes, from 5 to 480 (8 hours), a build is allowed to be queued before it times out. The default is 8 hours. +* `secondary_artifacts` - (Optional) Configuration block. Detailed below. +* `secondary_sources` - (Optional) Configuration block. Detailed below. +* `secondary_source_version` - (Optional) Configuration block. Detailed below. +* `source_version` - (Optional) Version of the build input to be built for this project. If not specified, the latest version is used. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_config` - (Optional) Configuration block. Detailed below. + +### artifacts + +* `artifact_identifier` - (Optional) Artifact identifier. Must be the same specified inside the AWS CodeBuild build specification. +* `bucket_owner_access` - (Optional) Specifies the bucket owner's access for objects that another account uploads to their Amazon S3 bucket. By default, only the account that uploads the objects to the bucket has access to these objects. This property allows you to give the bucket owner access to these objects. Valid values are `NONE`, `READ_ONLY`, and `FULL`. your CodeBuild service role must have the `s3:PutBucketAcl` permission. This permission allows CodeBuild to modify the access control list for the bucket. +* `encryption_disabled` - (Optional) Whether to disable encrypting output artifacts. If `type` is set to `NO_ARTIFACTS`, this value is ignored. Defaults to `false`. +* `location` - (Optional) Information about the build output artifact location. If `type` is set to `CODEPIPELINE` or `NO_ARTIFACTS`, this value is ignored. If `type` is set to `S3`, this is the name of the output bucket. +* `name` - (Optional) Name of the project. If `type` is set to `S3`, this is the name of the output artifact object +* `namespace_type` - (Optional) Namespace to use in storing build artifacts. If `type` is set to `S3`, then valid values are `BUILD_ID`, `NONE`. +* `override_artifact_name` (Optional) Whether a name specified in the build specification overrides the artifact name. +* `packaging` - (Optional) Type of build output artifact to create. If `type` is set to `S3`, valid values are `NONE`, `ZIP` +* `path` - (Optional) If `type` is set to `S3`, this is the path to the output artifact. +* `type` - (Required) Build output artifact's type. Valid values: `CODEPIPELINE`, `NO_ARTIFACTS`, `S3`. + +### build_batch_config + +* `combine_artifacts` - (Optional) Specifies if the build artifacts for the batch build should be combined into a single artifact location. +* `restrictions` - (Optional) Configuration block specifying the restrictions for the batch build. Detailed below. +* `service_role` - (Required) Specifies the service role ARN for the batch build project. +* `timeout_in_mins` - (Optional) Specifies the maximum amount of time, in minutes, that the batch build must be completed in. + +#### build_batch_config: restrictions + +* `compute_types_allowed` - (Optional) An array of strings that specify the compute types that are allowed for the batch build. See [Build environment compute types](https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-compute-types.html) in the AWS CodeBuild User Guide for these values. +* `maximum_builds_allowed` - (Optional) Specifies the maximum number of builds allowed. + +### cache + +* `location` - (Required when cache type is `S3`) Location where the AWS CodeBuild project stores cached resources. For type `S3`, the value must be a valid S3 bucket name/prefix. +* `modes` - (Required when cache type is `LOCAL`) Specifies settings that AWS CodeBuild uses to store and reuse build dependencies. Valid values: `LOCAL_SOURCE_CACHE`, `LOCAL_DOCKER_LAYER_CACHE`, `LOCAL_CUSTOM_CACHE`. +* `type` - (Optional) Type of storage that will be used for the AWS CodeBuild project cache. Valid values: `NO_CACHE`, `LOCAL`, `S3`. Defaults to `NO_CACHE`. + +### environment + +* `certificate` - (Optional) ARN of the S3 bucket, path prefix and object key that contains the PEM-encoded certificate. +* `compute_type` - (Required) Information about the compute resources the build project will use. Valid values: `BUILD_GENERAL1_SMALL`, `BUILD_GENERAL1_MEDIUM`, `BUILD_GENERAL1_LARGE`, `BUILD_GENERAL1_2XLARGE`. `BUILD_GENERAL1_SMALL` is only valid if `type` is set to `LINUX_CONTAINER`. When `type` is set to `LINUX_GPU_CONTAINER`, `compute_type` must be `BUILD_GENERAL1_LARGE`. +* `environment_variable` - (Optional) Configuration block. Detailed below. +* `image_pull_credentials_type` - (Optional) Type of credentials AWS CodeBuild uses to pull images in your build. Valid values: `CODEBUILD`, `SERVICE_ROLE`. When you use a cross-account or private registry image, you must use SERVICE_ROLE credentials. When you use an AWS CodeBuild curated image, you must use CodeBuild credentials. Defaults to `CODEBUILD`. +* `image` - (Required) Docker image to use for this build project. Valid values include [Docker images provided by CodeBuild](https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-available.html) (e.g `aws/codebuild/amazonlinux2-x86_64-standard:4.0`), [Docker Hub images](https://hub.docker.com/) (e.g., `hashicorp/terraform:latest`), and full Docker repository URIs such as those for ECR (e.g., `137112412989.dkr.ecr.us-west-2.amazonaws.com/amazonlinux:latest`). +* `privileged_mode` - (Optional) Whether to enable running the Docker daemon inside a Docker container. Defaults to `false`. +* `registry_credential` - (Optional) Configuration block. Detailed below. +* `type` - (Required) Type of build environment to use for related builds. Valid values: `LINUX_CONTAINER`, `LINUX_GPU_CONTAINER`, `WINDOWS_CONTAINER` (deprecated), `WINDOWS_SERVER_2019_CONTAINER`, `ARM_CONTAINER`. For additional information, see the [CodeBuild User Guide](https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-compute-types.html). + +#### environment: environment_variable + +* `name` - (Required) Environment variable's name or key. +* `type` - (Optional) Type of environment variable. Valid values: `PARAMETER_STORE`, `PLAINTEXT`, `SECRETS_MANAGER`. +* `value` - (Required) Environment variable's value. + +#### environment: registry_credential + +Credentials for access to a private Docker registry. + +* `credential` - (Required) ARN or name of credentials created using AWS Secrets Manager. +* `credential_provider` - (Required) Service that created the credentials to access a private Docker registry. Valid value: `SECRETS_MANAGER` (AWS Secrets Manager). + +### file_system_locations + +See [ProjectFileSystemLocation](https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ProjectFileSystemLocation.html) for more details of the fields. + +* `identifier` - (Optional) The name used to access a file system created by Amazon EFS. CodeBuild creates an environment variable by appending the identifier in all capital letters to CODEBUILD\_. For example, if you specify my-efs for identifier, a new environment variable is create named CODEBUILD_MY-EFS. +* `location` - (Optional) A string that specifies the location of the file system created by Amazon EFS. Its format is `efs-dns-name:/directory-path`. +* `mount_options` - (Optional) The mount options for a file system created by AWS EFS. +* `mount_point` - (Optional) The location in the container where you mount the file system. +* `type` - (Optional) The type of the file system. The one supported type is `EFS`. + +### logs_config + +* `cloudwatch_logs` - (Optional) Configuration block. Detailed below. +* `s3_logs` - (Optional) Configuration block. Detailed below. + +#### logs_config: cloudwatch_logs + +* `group_name` - (Optional) Group name of the logs in CloudWatch Logs. +* `status` - (Optional) Current status of logs in CloudWatch Logs for a build project. Valid values: `ENABLED`, `DISABLED`. Defaults to `ENABLED`. +* `stream_name` - (Optional) Stream name of the logs in CloudWatch Logs. + +#### logs_config: s3_logs + +* `encryption_disabled` - (Optional) Whether to disable encrypting S3 logs. Defaults to `false`. +* `location` - (Optional) Name of the S3 bucket and the path prefix for S3 logs. Must be set if status is `ENABLED`, otherwise it must be empty. +* `status` - (Optional) Current status of logs in S3 for a build project. Valid values: `ENABLED`, `DISABLED`. Defaults to `DISABLED`. +* `bucket_owner_access` - (Optional) Specifies the bucket owner's access for objects that another account uploads to their Amazon S3 bucket. By default, only the account that uploads the objects to the bucket has access to these objects. This property allows you to give the bucket owner access to these objects. Valid values are `NONE`, `READ_ONLY`, and `FULL`. your CodeBuild service role must have the `s3:PutBucketAcl` permission. This permission allows CodeBuild to modify the access control list for the bucket. + +### secondary_artifacts + +* `artifact_identifier` - (Required) Artifact identifier. Must be the same specified inside the AWS CodeBuild build specification. +* `bucket_owner_access` - (Optional) Specifies the bucket owner's access for objects that another account uploads to their Amazon S3 bucket. By default, only the account that uploads the objects to the bucket has access to these objects. This property allows you to give the bucket owner access to these objects. Valid values are `NONE`, `READ_ONLY`, and `FULL`. The CodeBuild service role must have the `s3:PutBucketAcl` permission. This permission allows CodeBuild to modify the access control list for the bucket. +* `encryption_disabled` - (Optional) Whether to disable encrypting output artifacts. If `type` is set to `NO_ARTIFACTS`, this value is ignored. Defaults to `false`. +* `location` - (Optional) Information about the build output artifact location. If `type` is set to `CODEPIPELINE` or `NO_ARTIFACTS`, this value is ignored if specified. If `type` is set to `S3`, this is the name of the output bucket. If `path` is not specified, `location` can specify the path of the output artifact in the output bucket. +* `name` - (Optional) Name of the project. If `type` is set to `CODEPIPELINE` or `NO_ARTIFACTS`, this value is ignored if specified. If `type` is set to `S3`, this is the name of the output artifact object. +* `namespace_type` - (Optional) Namespace to use in storing build artifacts. If `type` is set to `CODEPIPELINE` or `NO_ARTIFACTS`, this value is ignored if specified. If `type` is set to `S3`, valid values are `BUILD_ID` or `NONE`. +* `override_artifact_name` (Optional) Whether a name specified in the build specification overrides the artifact name. +* `packaging` - (Optional) Type of build output artifact to create. If `type` is set to `CODEPIPELINE` or `NO_ARTIFACTS`, this value is ignored if specified. If `type` is set to `S3`, valid values are `NONE` or `ZIP`. +* `path` - (Optional) Along with `namespace_type` and `name`, the pattern that AWS CodeBuild uses to name and store the output artifact. If `type` is set to `CODEPIPELINE` or `NO_ARTIFACTS`, this value is ignored if specified. If `type` is set to `S3`, this is the path to the output artifact. +* `type` - (Required) Build output artifact's type. Valid values `CODEPIPELINE`, `NO_ARTIFACTS`, and `S3`. + +### secondary_sources + +* `buildspec` - (Optional) The build spec declaration to use for this build project's related builds. This must be set when `type` is `NO_SOURCE`. It can either be a path to a file residing in the repository to be built or a local file path leveraging the `file()` built-in. +* `git_clone_depth` - (Optional) Truncate git history to this many commits. Use `0` for a `Full` checkout which you need to run commands like `git branch --show-current`. See [AWS CodePipeline User Guide: Tutorial: Use full clone with a GitHub pipeline source](https://docs.aws.amazon.com/codepipeline/latest/userguide/tutorials-github-gitclone.html) for details. +* `git_submodules_config` - (Optional) Configuration block. Detailed below. +* `insecure_ssl` - (Optional) Ignore SSL warnings when connecting to source control. +* `location` - (Optional) Location of the source code from git or s3. +* `report_build_status` - (Optional) Whether to report the status of a build's start and finish to your source provider. This option is only valid when your source provider is `GITHUB`, `BITBUCKET`, or `GITHUB_ENTERPRISE`. +* `build_status_config` - (Optional) Configuration block that contains information that defines how the build project reports the build status to the source provider. This option is only used when the source provider is `GITHUB`, `GITHUB_ENTERPRISE`, or `BITBUCKET`. `build_status_config` blocks are documented below. +* `source_identifier` - (Required) An identifier for this project source. The identifier can only contain alphanumeric characters and underscores, and must be less than 128 characters in length. +* `type` - (Required) Type of repository that contains the source code to be built. Valid values: `CODECOMMIT`, `CODEPIPELINE`, `GITHUB`, `GITHUB_ENTERPRISE`, `BITBUCKET` or `S3`. + +#### secondary_sources: git_submodules_config + +This block is only valid when the `type` is `CODECOMMIT`, `GITHUB` or `GITHUB_ENTERPRISE`. + +* `fetch_submodules` - (Required) Whether to fetch Git submodules for the AWS CodeBuild build project. + +#### secondary_sources: build_status_config + +* `context` - (Optional) Specifies the context of the build status CodeBuild sends to the source provider. The usage of this parameter depends on the source provider. +* `target_url` - (Optional) Specifies the target url of the build status CodeBuild sends to the source provider. The usage of this parameter depends on the source provider. + +### secondary_source_version + +* `source_identifier` - (Required) An identifier for a source in the build project. +* `source_version` - (Required) The source version for the corresponding source identifier. See [AWS docs](https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ProjectSourceVersion.html#CodeBuild-Type-ProjectSourceVersion-sourceVersion) for more details. + +### source + +* `buildspec` - (Optional) Build specification to use for this build project's related builds. This must be set when `type` is `NO_SOURCE`. +* `git_clone_depth` - (Optional) Truncate git history to this many commits. Use `0` for a `Full` checkout which you need to run commands like `git branch --show-current`. See [AWS CodePipeline User Guide: Tutorial: Use full clone with a GitHub pipeline source](https://docs.aws.amazon.com/codepipeline/latest/userguide/tutorials-github-gitclone.html) for details. +* `git_submodules_config` - (Optional) Configuration block. Detailed below. +* `insecure_ssl` - (Optional) Ignore SSL warnings when connecting to source control. +* `location` - (Optional) Location of the source code from git or s3. +* `report_build_status` - (Optional) Whether to report the status of a build's start and finish to your source provider. This option is only valid when the `type` is `BITBUCKET` or `GITHUB`. +* `build_status_config` - (Optional) Configuration block that contains information that defines how the build project reports the build status to the source provider. This option is only used when the source provider is `GITHUB`, `GITHUB_ENTERPRISE`, or `BITBUCKET`. `build_status_config` blocks are documented below. +* `type` - (Required) Type of repository that contains the source code to be built. Valid values: `CODECOMMIT`, `CODEPIPELINE`, `GITHUB`, `GITHUB_ENTERPRISE`, `BITBUCKET`, `S3`, `NO_SOURCE`. + +#### source: git_submodules_config + +This block is only valid when the `type` is `CODECOMMIT`, `GITHUB` or `GITHUB_ENTERPRISE`. + +* `fetch_submodules` - (Required) Whether to fetch Git submodules for the AWS CodeBuild build project. + +#### source: build_status_config + +* `context` - (Optional) Specifies the context of the build status CodeBuild sends to the source provider. The usage of this parameter depends on the source provider. +* `target_url` - (Optional) Specifies the target url of the build status CodeBuild sends to the source provider. The usage of this parameter depends on the source provider. + +### vpc_config + +* `security_group_ids` - (Required) Security group IDs to assign to running builds. +* `subnets` - (Required) Subnet IDs within which to run builds. +* `vpc_id` - (Required) ID of the VPC within which to run builds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the CodeBuild project. +* `badge_url` - URL of the build badge when `badge_enabled` is enabled. +* `id` - Name (if imported via `name`) or ARN (if created via Terraform or imported via ARN) of the CodeBuild project. +* `public_project_alias` - The project identifier used with the public build APIs. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeBuild Project using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeBuild Project using the `name`. For example: + +```console +% terraform import aws_codebuild_project.name project-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codebuild_report_group.html.markdown b/website/docs/cdktf/python/r/codebuild_report_group.html.markdown new file mode 100644 index 00000000000..4184758b11d --- /dev/null +++ b/website/docs/cdktf/python/r/codebuild_report_group.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "CodeBuild" +layout: "aws" +page_title: "AWS: aws_codebuild_report_group" +description: |- + Provides a CodeBuild Report Group resource. +--- + + + +# Resource: aws_codebuild_report_group + +Provides a CodeBuild Report Groups Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codebuild_report_group import CodebuildReportGroup +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.kms_key import KmsKey +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="my-test" + ) + current = DataAwsCallerIdentity(self, "current") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_2", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["kms:*"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["arn:aws:iam::${" + current.account_id + "}:root"], + type="AWS" + ) + ], + resources=["*"], + sid="Enable IAM User Permissions" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_kms_key_example = KmsKey(self, "example_3", + deletion_window_in_days=7, + description="my test kms key", + policy=Token.as_string(data_aws_iam_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_key_example.override_logical_id("example") + aws_codebuild_report_group_example = CodebuildReportGroup(self, "example_4", + export_config=CodebuildReportGroupExportConfig( + s3_destination=CodebuildReportGroupExportConfigS3Destination( + bucket=example.id, + encryption_disabled=False, + encryption_key=Token.as_string(aws_kms_key_example.arn), + packaging="NONE", + path="/some" + ), + type="S3" + ), + name="my test report group", + type="TEST" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codebuild_report_group_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of a Report Group. +* `type` - (Required) The type of the Report Group. Valid value are `TEST` and `CODE_COVERAGE`. +* `export_config` - (Required) Information about the destination where the raw data of this Report Group is exported. see [Export Config](#export-config) documented below. +* `delete_reports` - (Optional) If `true`, deletes any reports that belong to a report group before deleting the report group. If `false`, you must delete any reports in the report group before deleting it. Default value is `false`. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Export Config + +* `type` - (Required) The export configuration type. Valid values are `S3` and `NO_EXPORT`. +* `s3_destination` - (Required) contains information about the S3 bucket where the run of a report is exported. see [S3 Destination](#s3-destination) documented below. + +#### S3 Destination + +* `bucket`- (Required) The name of the S3 bucket where the raw data of a report are exported. +* `encryption_key` - (Required) The encryption key for the report's encrypted raw data. The KMS key ARN. +* `encryption_disabled`- (Optional) A boolean value that specifies if the results of a report are encrypted. + **Note: the API does not currently allow setting encryption as disabled** +* `packaging` - (Optional) The type of build output artifact to create. Valid values are: `NONE` (default) and `ZIP`. +* `path` - (Optional) The path to the exported report's raw data results. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of Report Group. +* `arn` - The ARN of Report Group. +* `created` - The date and time this Report Group was created. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeBuild Report Group using the CodeBuild Report Group arn. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeBuild Report Group using the CodeBuild Report Group arn. For example: + +```console +% terraform import aws_codebuild_report_group.example arn:aws:codebuild:us-west-2:123456789:report-group/report-group-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codebuild_resource_policy.html.markdown b/website/docs/cdktf/python/r/codebuild_resource_policy.html.markdown new file mode 100644 index 00000000000..5c7c5b231e8 --- /dev/null +++ b/website/docs/cdktf/python/r/codebuild_resource_policy.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "CodeBuild" +layout: "aws" +page_title: "AWS: aws_codebuild_resource_policy" +description: |- + Provides a CodeBuild Resource Policy resource. +--- + + + +# Resource: aws_codebuild_resource_policy + +Provides a CodeBuild Resource Policy Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codebuild_report_group import CodebuildReportGroup +from imports.aws.codebuild_resource_policy import CodebuildResourcePolicy +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_partition import DataAwsPartition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CodebuildReportGroup(self, "example", + export_config=CodebuildReportGroupExportConfig( + type="NO_EXPORT" + ), + name="example", + type="TEST" + ) + current = DataAwsCallerIdentity(self, "current") + data_aws_partition_current = DataAwsPartition(self, "current_2") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_partition_current.override_logical_id("current") + aws_codebuild_resource_policy_example = CodebuildResourcePolicy(self, "example_3", + policy=Token.as_string( + Fn.jsonencode({ + "Id": "default", + "Statement": [{ + "Action": ["codebuild:BatchGetReportGroups", "codebuild:BatchGetReports", "codebuild:ListReportsForReportGroup", "codebuild:DescribeTestCases" + ], + "Effect": "Allow", + "Principal": { + "AWS": "arn:${" + data_aws_partition_current.partition + "}:iam::${" + current.account_id + "}:root" + }, + "Resource": example.arn, + "Sid": "default" + } + ], + "Version": "2012-10-17" + })), + resource_arn=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codebuild_resource_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_arn` - (Required) The ARN of the Project or ReportGroup resource you want to associate with a resource policy. +* `policy` - (Required) A JSON-formatted resource policy. For more information, see [Sharing a Projec](https://docs.aws.amazon.com/codebuild/latest/userguide/project-sharing.html#project-sharing-share) and [Sharing a Report Group](https://docs.aws.amazon.com/codebuild/latest/userguide/report-groups-sharing.html#report-groups-sharing-share). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of Resource. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeBuild Resource Policy using the CodeBuild Resource Policy arn. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeBuild Resource Policy using the CodeBuild Resource Policy arn. For example: + +```console +% terraform import aws_codebuild_resource_policy.example arn:aws:codebuild:us-west-2:123456789:report-group/report-group-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codebuild_source_credential.html.markdown b/website/docs/cdktf/python/r/codebuild_source_credential.html.markdown new file mode 100644 index 00000000000..1148b191e85 --- /dev/null +++ b/website/docs/cdktf/python/r/codebuild_source_credential.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "CodeBuild" +layout: "aws" +page_title: "AWS: aws_codebuild_source_credential" +description: |- + Provides a CodeBuild Source Credential resource. +--- + + + +# Resource: aws_codebuild_source_credential + +Provides a CodeBuild Source Credentials Resource. + +~> **NOTE:** +[Codebuild only allows a single credential per given server type in a given region](https://docs.aws.amazon.com/cdk/api/v2/docs/aws-cdk-lib.aws_codebuild.GitHubSourceCredentials.html). Therefore, when you define `aws_codebuild_source_credential`, [`aws_codebuild_project` resource](/docs/providers/aws/r/codebuild_project.html) defined in the same module will use it. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codebuild_source_credential import CodebuildSourceCredential +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodebuildSourceCredential(self, "example", + auth_type="PERSONAL_ACCESS_TOKEN", + server_type="GITHUB", + token="example" + ) +``` + +### Bitbucket Server Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codebuild_source_credential import CodebuildSourceCredential +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodebuildSourceCredential(self, "example", + auth_type="BASIC_AUTH", + server_type="BITBUCKET", + token="example", + user_name="test-user" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `auth_type` - (Required) The type of authentication used to connect to a GitHub, GitHub Enterprise, or Bitbucket repository. An OAUTH connection is not supported by the API. +* `server_type` - (Required) The source provider used for this project. +* `token` - (Required) For `GitHub` or `GitHub Enterprise`, this is the personal access token. For `Bitbucket`, this is the app password. +* `user_name` - (Optional) The Bitbucket username when the authType is `BASIC_AUTH`. This parameter is not valid for other types of source providers or connections. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of Source Credential. +* `arn` - The ARN of Source Credential. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeBuild Source Credential using the CodeBuild Source Credential arn. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeBuild Source Credential using the CodeBuild Source Credential arn. For example: + +```console +% terraform import aws_codebuild_source_credential.example arn:aws:codebuild:us-west-2:123456789:token:github +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codebuild_webhook.html.markdown b/website/docs/cdktf/python/r/codebuild_webhook.html.markdown new file mode 100644 index 00000000000..89cae439dc6 --- /dev/null +++ b/website/docs/cdktf/python/r/codebuild_webhook.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "CodeBuild" +layout: "aws" +page_title: "AWS: aws_codebuild_webhook" +description: |- + Provides a CodeBuild Webhook resource. +--- + + + +# Resource: aws_codebuild_webhook + +Manages a CodeBuild webhook, which is an endpoint accepted by the CodeBuild service to trigger builds from source code repositories. Depending on the source type of the CodeBuild project, the CodeBuild service may also automatically create and delete the actual repository webhook as well. + +## Example Usage + +### Bitbucket and GitHub + +When working with [Bitbucket](https://bitbucket.org) and [GitHub](https://github.com) source CodeBuild webhooks, the CodeBuild service will automatically create (on `aws_codebuild_webhook` resource creation) and delete (on `aws_codebuild_webhook` resource deletion) the Bitbucket/GitHub repository webhook using its granted OAuth permissions. This behavior cannot be controlled by Terraform. + +~> **Note:** The AWS account that Terraform uses to create this resource *must* have authorized CodeBuild to access Bitbucket/GitHub's OAuth API in each applicable region. This is a manual step that must be done *before* creating webhooks with this resource. If OAuth is not configured, AWS will return an error similar to `ResourceNotFoundException: Could not find access token for server type github`. More information can be found in the CodeBuild User Guide for [Bitbucket](https://docs.aws.amazon.com/codebuild/latest/userguide/sample-bitbucket-pull-request.html) and [GitHub](https://docs.aws.amazon.com/codebuild/latest/userguide/sample-github-pull-request.html). + +~> **Note:** Further managing the automatically created Bitbucket/GitHub webhook with the `bitbucket_hook`/`github_repository_webhook` resource is only possible with importing that resource after creation of the `aws_codebuild_webhook` resource. The CodeBuild API does not ever provide the `secret` attribute for the `aws_codebuild_webhook` resource in this scenario. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codebuild_webhook import CodebuildWebhook +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodebuildWebhook(self, "example", + build_type="BUILD", + filter_group=[CodebuildWebhookFilterGroup( + filter=[CodebuildWebhookFilterGroupFilter( + pattern="PUSH", + type="EVENT" + ), CodebuildWebhookFilterGroupFilter( + pattern="master", + type="BASE_REF" + ) + ] + ) + ], + project_name=Token.as_string(aws_codebuild_project_example.name) + ) +``` + +### GitHub Enterprise + +When working with [GitHub Enterprise](https://enterprise.github.com/) source CodeBuild webhooks, the GHE repository webhook must be separately managed (e.g., manually or with the `github_repository_webhook` resource). + +More information creating webhooks with GitHub Enterprise can be found in the [CodeBuild User Guide](https://docs.aws.amazon.com/codebuild/latest/userguide/sample-github-enterprise.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codebuild_webhook import CodebuildWebhook +from imports.github.repository_webhook import RepositoryWebhook +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # The following providers are missing schema information and might need manual adjustments to synthesize correctly: github. + # For a more precise conversion please use the --provider flag in convert. + example = CodebuildWebhook(self, "example", + project_name=Token.as_string(aws_codebuild_project_example.name) + ) + github_repository_webhook_example = RepositoryWebhook(self, "example_1", + active=True, + configuration=[{ + "content_type": "json", + "insecure_ssl": False, + "secret": example.secret, + "url": example.payload_url + } + ], + events=["push"], + name="example", + repository=github_repository_example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + github_repository_webhook_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `project_name` - (Required) The name of the build project. +* `build_type` - (Optional) The type of build this webhook will trigger. Valid values for this parameter are: `BUILD`, `BUILD_BATCH`. +* `branch_filter` - (Optional) A regular expression used to determine which branches get built. Default is all branches are built. We recommend using `filter_group` over `branch_filter`. +* `filter_group` - (Optional) Information about the webhook's trigger. Filter group blocks are documented below. + +`filter_group` supports the following: + +* `filter` - (Required) A webhook filter for the group. Filter blocks are documented below. + +`filter` supports the following: + +* `type` - (Required) The webhook filter group's type. Valid values for this parameter are: `EVENT`, `BASE_REF`, `HEAD_REF`, `ACTOR_ACCOUNT_ID`, `FILE_PATH`, `COMMIT_MESSAGE`. At least one filter group must specify `EVENT` as its type. +* `pattern` - (Required) For a filter that uses `EVENT` type, a comma-separated string that specifies one event: `PUSH`, `PULL_REQUEST_CREATED`, `PULL_REQUEST_UPDATED`, `PULL_REQUEST_REOPENED`. `PULL_REQUEST_MERGED` works with GitHub & GitHub Enterprise only. For a filter that uses any of the other filter types, a regular expression. +* `exclude_matched_pattern` - (Optional) If set to `true`, the specified filter does *not* trigger a build. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the build project. +* `payload_url` - The CodeBuild endpoint where webhook events are sent. +* `secret` - The secret token of the associated repository. Not returned by the CodeBuild API for all source types. +* `url` - The URL to the webhook. + +~> **Note:** The `secret` attribute is only set on resource creation, so if the secret is manually rotated, terraform will not pick up the change on subsequent runs. In that case, the webhook resource should be tainted and re-created to get the secret back in sync. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeBuild Webhooks using the CodeBuild Project name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeBuild Webhooks using the CodeBuild Project name. For example: + +```console +% terraform import aws_codebuild_webhook.example MyProjectName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codecommit_approval_rule_template.html.markdown b/website/docs/cdktf/python/r/codecommit_approval_rule_template.html.markdown new file mode 100644 index 00000000000..7b4bfdd9068 --- /dev/null +++ b/website/docs/cdktf/python/r/codecommit_approval_rule_template.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_approval_rule_template" +description: |- + Provides a CodeCommit Approval Rule Template Resource. +--- + + + +# Resource: aws_codecommit_approval_rule_template + +Provides a CodeCommit Approval Rule Template Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codecommit_approval_rule_template import CodecommitApprovalRuleTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodecommitApprovalRuleTemplate(self, "example", + content=Token.as_string( + Fn.jsonencode({ + "DestinationReferences": ["refs/heads/master"], + "Statements": [{ + "ApprovalPoolMembers": ["arn:aws:sts::123456789012:assumed-role/CodeCommitReview/*" + ], + "NumberOfApprovalsNeeded": 2, + "Type": "Approvers" + } + ], + "Version": "2018-11-08" + })), + description="This is an example approval rule template", + name="MyExampleApprovalRuleTemplate" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Required) The content of the approval rule template. Maximum of 3000 characters. +* `name` - (Required) The name for the approval rule template. Maximum of 100 characters. +* `description` - (Optional) The description of the approval rule template. Maximum of 1000 characters. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `approval_rule_template_id` - The ID of the approval rule template +* `creation_date` - The date the approval rule template was created, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `last_modified_date` - The date the approval rule template was most recently changed, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `last_modified_user` - The Amazon Resource Name (ARN) of the user who made the most recent changes to the approval rule template. +* `rule_content_sha256` - The SHA-256 hash signature for the content of the approval rule template. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeCommit approval rule templates using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeCommit approval rule templates using the `name`. For example: + +```console +% terraform import aws_codecommit_approval_rule_template.imported ExistingApprovalRuleTemplateName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codecommit_approval_rule_template_association.html.markdown b/website/docs/cdktf/python/r/codecommit_approval_rule_template_association.html.markdown new file mode 100644 index 00000000000..06bdbd4ba38 --- /dev/null +++ b/website/docs/cdktf/python/r/codecommit_approval_rule_template_association.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_approval_rule_template_association" +description: |- + Associates a CodeCommit Approval Rule Template with a Repository. +--- + + + +# Resource: aws_codecommit_approval_rule_template_association + +Associates a CodeCommit Approval Rule Template with a Repository. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codecommit_approval_rule_template_association import CodecommitApprovalRuleTemplateAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodecommitApprovalRuleTemplateAssociation(self, "example", + approval_rule_template_name=Token.as_string(aws_codecommit_approval_rule_template_example.name), + repository_name=Token.as_string(aws_codecommit_repository_example.repository_name) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `approval_rule_template_name` - (Required) The name for the approval rule template. +* `repository_name` - (Required) The name of the repository that you want to associate with the template. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the approval rule template and name of the repository, separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeCommit approval rule template associations using the `approval_rule_template_name` and `repository_name` separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeCommit approval rule template associations using the `approval_rule_template_name` and `repository_name` separated by a comma (`,`). For example: + +```console +% terraform import aws_codecommit_approval_rule_template_association.example approver-rule-for-example,MyExampleRepo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codecommit_repository.html.markdown b/website/docs/cdktf/python/r/codecommit_repository.html.markdown new file mode 100644 index 00000000000..b11396f49eb --- /dev/null +++ b/website/docs/cdktf/python/r/codecommit_repository.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_repository" +description: |- + Provides a CodeCommit Repository Resource. +--- + + + +# Resource: aws_codecommit_repository + +Provides a CodeCommit Repository Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codecommit_repository import CodecommitRepository +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodecommitRepository(self, "test", + description="This is the Sample App Repository", + repository_name="MyTestRepository" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repository_name` - (Required) The name for the repository. This needs to be less than 100 characters. +* `description` - (Optional) The description of the repository. This needs to be less than 1000 characters +* `default_branch` - (Optional) The default branch of the repository. The branch specified here needs to exist. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `repository_id` - The ID of the repository +* `arn` - The ARN of the repository +* `clone_url_http` - The URL to use for cloning the repository over HTTPS. +* `clone_url_ssh` - The URL to use for cloning the repository over SSH. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Codecommit repository using repository name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Codecommit repository using repository name. For example: + +```console +% terraform import aws_codecommit_repository.imported ExistingRepo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codecommit_trigger.html.markdown b/website/docs/cdktf/python/r/codecommit_trigger.html.markdown new file mode 100644 index 00000000000..1386ef5d64b --- /dev/null +++ b/website/docs/cdktf/python/r/codecommit_trigger.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_trigger" +description: |- + Provides a CodeCommit Trigger Resource. +--- + + + +# Resource: aws_codecommit_trigger + +Provides a CodeCommit Trigger Resource. + +~> **NOTE:** Terraform currently can create only one trigger per repository, even if multiple aws_codecommit_trigger resources are defined. Moreover, creating triggers with Terraform will delete all other triggers in the repository (also manually-created triggers). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codecommit_repository import CodecommitRepository +from imports.aws.codecommit_trigger import CodecommitTrigger +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = CodecommitRepository(self, "test", + repository_name="test" + ) + aws_codecommit_trigger_test = CodecommitTrigger(self, "test_1", + repository_name=test.repository_name, + trigger=[CodecommitTriggerTrigger( + destination_arn=Token.as_string(aws_sns_topic_test.arn), + events=["all"], + name="all" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codecommit_trigger_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repository_name` - (Required) The name for the repository. This needs to be less than 100 characters. +* `name` - (Required) The name of the trigger. +* `destination_arn` - (Required) The ARN of the resource that is the target for a trigger. For example, the ARN of a topic in Amazon Simple Notification Service (SNS). +* `custom_data` - (Optional) Any custom data associated with the trigger that will be included in the information sent to the target of the trigger. +* `branches` - (Optional) The branches that will be included in the trigger configuration. If no branches are specified, the trigger will apply to all branches. +* `events` - (Required) The repository events that will cause the trigger to run actions in another service, such as sending a notification through Amazon Simple Notification Service (SNS). If no events are specified, the trigger will run for all repository events. Event types include: `all`, `updateReference`, `createReference`, `deleteReference`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `configuration_id` - System-generated unique identifier. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codedeploy_app.html.markdown b/website/docs/cdktf/python/r/codedeploy_app.html.markdown new file mode 100644 index 00000000000..9b86d3fb907 --- /dev/null +++ b/website/docs/cdktf/python/r/codedeploy_app.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "CodeDeploy" +layout: "aws" +page_title: "AWS: aws_codedeploy_app" +description: |- + Provides a CodeDeploy application. +--- + + + +# Resource: aws_codedeploy_app + +Provides a CodeDeploy application to be used as a basis for deployments + +## Example Usage + +### ECS Application + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codedeploy_app import CodedeployApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodedeployApp(self, "example", + compute_platform="ECS", + name="example" + ) +``` + +### Lambda Application + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codedeploy_app import CodedeployApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodedeployApp(self, "example", + compute_platform="Lambda", + name="example" + ) +``` + +### Server Application + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codedeploy_app import CodedeployApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodedeployApp(self, "example", + compute_platform="Server", + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the application. +* `compute_platform` - (Optional) The compute platform can either be `ECS`, `Lambda`, or `Server`. Default is `Server`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the CodeDeploy application. +* `application_id` - The application ID. +* `id` - Amazon's assigned ID for the application. +* `name` - The application's name. +* `github_account_name` - The name for a connection to a GitHub account. +* `linked_to_github` - Whether the user has authenticated with GitHub for the specified application. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeDeploy Applications using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeDeploy Applications using the `name`. For example: + +```console +% terraform import aws_codedeploy_app.example my-application +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codedeploy_deployment_config.html.markdown b/website/docs/cdktf/python/r/codedeploy_deployment_config.html.markdown new file mode 100644 index 00000000000..47ba3f0dfeb --- /dev/null +++ b/website/docs/cdktf/python/r/codedeploy_deployment_config.html.markdown @@ -0,0 +1,172 @@ +--- +subcategory: "CodeDeploy" +layout: "aws" +page_title: "AWS: aws_codedeploy_deployment_config" +description: |- + Provides a CodeDeploy deployment config. +--- + + + +# Resource: aws_codedeploy_deployment_config + +Provides a CodeDeploy deployment config for an application + +## Example Usage + +### Server Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codedeploy_deployment_config import CodedeployDeploymentConfig +from imports.aws.codedeploy_deployment_group import CodedeployDeploymentGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = CodedeployDeploymentConfig(self, "foo", + deployment_config_name="test-deployment-config", + minimum_healthy_hosts=CodedeployDeploymentConfigMinimumHealthyHosts( + type="HOST_COUNT", + value=2 + ) + ) + aws_codedeploy_deployment_group_foo = CodedeployDeploymentGroup(self, "foo_1", + alarm_configuration=CodedeployDeploymentGroupAlarmConfiguration( + alarms=["my-alarm-name"], + enabled=True + ), + app_name=foo_app.name, + auto_rollback_configuration=CodedeployDeploymentGroupAutoRollbackConfiguration( + enabled=True, + events=["DEPLOYMENT_FAILURE"] + ), + deployment_config_name=foo.id, + deployment_group_name="bar", + ec2_tag_filter=[CodedeployDeploymentGroupEc2TagFilter( + key="filterkey", + type="KEY_AND_VALUE", + value="filtervalue" + ) + ], + service_role_arn=foo_role.arn, + trigger_configuration=[CodedeployDeploymentGroupTriggerConfiguration( + trigger_events=["DeploymentFailure"], + trigger_name="foo-trigger", + trigger_target_arn="foo-topic-arn" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codedeploy_deployment_group_foo.override_logical_id("foo") +``` + +### Lambda Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codedeploy_deployment_config import CodedeployDeploymentConfig +from imports.aws.codedeploy_deployment_group import CodedeployDeploymentGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = CodedeployDeploymentConfig(self, "foo", + compute_platform="Lambda", + deployment_config_name="test-deployment-config", + traffic_routing_config=CodedeployDeploymentConfigTrafficRoutingConfig( + time_based_linear=CodedeployDeploymentConfigTrafficRoutingConfigTimeBasedLinear( + interval=10, + percentage=10 + ), + type="TimeBasedLinear" + ) + ) + aws_codedeploy_deployment_group_foo = CodedeployDeploymentGroup(self, "foo_1", + alarm_configuration=CodedeployDeploymentGroupAlarmConfiguration( + alarms=["my-alarm-name"], + enabled=True + ), + app_name=foo_app.name, + auto_rollback_configuration=CodedeployDeploymentGroupAutoRollbackConfiguration( + enabled=True, + events=["DEPLOYMENT_STOP_ON_ALARM"] + ), + deployment_config_name=foo.id, + deployment_group_name="bar", + service_role_arn=foo_role.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codedeploy_deployment_group_foo.override_logical_id("foo") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deployment_config_name` - (Required) The name of the deployment config. +* `compute_platform` - (Optional) The compute platform can be `Server`, `Lambda`, or `ECS`. Default is `Server`. +* `minimum_healthy_hosts` - (Optional) A minimum_healthy_hosts block. Required for `Server` compute platform. Minimum Healthy Hosts are documented below. +* `traffic_routing_config` - (Optional) A traffic_routing_config block. Traffic Routing Config is documented below. + +The `minimum_healthy_hosts` block supports the following: + +* `type` - (Required) The type can either be `FLEET_PERCENT` or `HOST_COUNT`. +* `value` - (Required) The value when the type is `FLEET_PERCENT` represents the minimum number of healthy instances as +a percentage of the total number of instances in the deployment. If you specify FLEET_PERCENT, at the start of the +deployment, AWS CodeDeploy converts the percentage to the equivalent number of instance and rounds up fractional instances. +When the type is `HOST_COUNT`, the value represents the minimum number of healthy instances as an absolute value. + +The `traffic_routing_config` block supports the following: + +* `type` - (Optional) Type of traffic routing config. One of `TimeBasedCanary`, `TimeBasedLinear`, `AllAtOnce`. +* `time_based_canary` - (Optional) The time based canary configuration information. If `type` is `TimeBasedLinear`, use `time_based_linear` instead. +* `time_based_linear` - (Optional) The time based linear configuration information. If `type` is `TimeBasedCanary`, use `time_based_canary` instead. + +The `time_based_canary` block supports the following: + +* `interval` - (Optional) The number of minutes between the first and second traffic shifts of a `TimeBasedCanary` deployment. +* `percentage` - (Optional) The percentage of traffic to shift in the first increment of a `TimeBasedCanary` deployment. + +The `time_based_linear` block supports the following: + +* `interval` - (Optional) The number of minutes between each incremental traffic shift of a `TimeBasedLinear` deployment. +* `percentage` - (Optional) The percentage of traffic that is shifted at the start of each increment of a `TimeBasedLinear` deployment. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The deployment group's config name. +* `deployment_config_id` - The AWS Assigned deployment config id + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeDeploy Deployment Configurations using the `deployment_config_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeDeploy Deployment Configurations using the `deployment_config_name`. For example: + +```console +% terraform import aws_codedeploy_deployment_config.example my-deployment-config +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codedeploy_deployment_group.html.markdown b/website/docs/cdktf/python/r/codedeploy_deployment_group.html.markdown new file mode 100644 index 00000000000..9c71a5b90f1 --- /dev/null +++ b/website/docs/cdktf/python/r/codedeploy_deployment_group.html.markdown @@ -0,0 +1,409 @@ +--- +subcategory: "CodeDeploy" +layout: "aws" +page_title: "AWS: aws_codedeploy_deployment_group" +description: |- + Provides a CodeDeploy deployment group. +--- + + + +# Resource: aws_codedeploy_deployment_group + +Provides a CodeDeploy Deployment Group for a CodeDeploy Application + +~> **NOTE on blue/green deployments:** When using `green_fleet_provisioning_option` with the `COPY_AUTO_SCALING_GROUP` action, CodeDeploy will create a new ASG with a different name. This ASG is _not_ managed by terraform and will conflict with existing configuration and state. You may want to use a different approach to managing deployments that involve multiple ASG, such as `DISCOVER_EXISTING` with separate blue and green ASG. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codedeploy_app import CodedeployApp +from imports.aws.codedeploy_deployment_group import CodedeployDeploymentGroup +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CodedeployApp(self, "example", + name="example-app" + ) + aws_sns_topic_example = SnsTopic(self, "example_1", + name="example-topic" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_example.override_logical_id("example") + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["codedeploy.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + aws_iam_role_example = IamRole(self, "example_3", + assume_role_policy=Token.as_string(assume_role.json), + name="example-role" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + IamRolePolicyAttachment(self, "AWSCodeDeployRole", + policy_arn="arn:aws:iam::aws:policy/service-role/AWSCodeDeployRole", + role=Token.as_string(aws_iam_role_example.name) + ) + aws_codedeploy_deployment_group_example = CodedeployDeploymentGroup(self, "example_5", + alarm_configuration=CodedeployDeploymentGroupAlarmConfiguration( + alarms=["my-alarm-name"], + enabled=True + ), + app_name=example.name, + auto_rollback_configuration=CodedeployDeploymentGroupAutoRollbackConfiguration( + enabled=True, + events=["DEPLOYMENT_FAILURE"] + ), + deployment_group_name="example-group", + ec2_tag_set=[CodedeployDeploymentGroupEc2TagSet( + ec2_tag_filter=[CodedeployDeploymentGroupEc2TagSetEc2TagFilter( + key="filterkey1", + type="KEY_AND_VALUE", + value="filtervalue" + ), CodedeployDeploymentGroupEc2TagSetEc2TagFilter( + key="filterkey2", + type="KEY_AND_VALUE", + value="filtervalue" + ) + ] + ) + ], + service_role_arn=Token.as_string(aws_iam_role_example.arn), + trigger_configuration=[CodedeployDeploymentGroupTriggerConfiguration( + trigger_events=["DeploymentFailure"], + trigger_name="example-trigger", + trigger_target_arn=Token.as_string(aws_sns_topic_example.arn) + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codedeploy_deployment_group_example.override_logical_id("example") +``` + +### Blue Green Deployments with ECS + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codedeploy_app import CodedeployApp +from imports.aws.codedeploy_deployment_group import CodedeployDeploymentGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CodedeployApp(self, "example", + compute_platform="ECS", + name="example" + ) + aws_codedeploy_deployment_group_example = CodedeployDeploymentGroup(self, "example_1", + app_name=example.name, + auto_rollback_configuration=CodedeployDeploymentGroupAutoRollbackConfiguration( + enabled=True, + events=["DEPLOYMENT_FAILURE"] + ), + blue_green_deployment_config=CodedeployDeploymentGroupBlueGreenDeploymentConfig( + deployment_ready_option=CodedeployDeploymentGroupBlueGreenDeploymentConfigDeploymentReadyOption( + action_on_timeout="CONTINUE_DEPLOYMENT" + ), + terminate_blue_instances_on_deployment_success=CodedeployDeploymentGroupBlueGreenDeploymentConfigTerminateBlueInstancesOnDeploymentSuccess( + action="TERMINATE", + termination_wait_time_in_minutes=5 + ) + ), + deployment_config_name="CodeDeployDefault.ECSAllAtOnce", + deployment_group_name="example", + deployment_style=CodedeployDeploymentGroupDeploymentStyle( + deployment_option="WITH_TRAFFIC_CONTROL", + deployment_type="BLUE_GREEN" + ), + ecs_service=CodedeployDeploymentGroupEcsService( + cluster_name=Token.as_string(aws_ecs_cluster_example.name), + service_name=Token.as_string(aws_ecs_service_example.name) + ), + load_balancer_info=CodedeployDeploymentGroupLoadBalancerInfo( + target_group_pair_info=CodedeployDeploymentGroupLoadBalancerInfoTargetGroupPairInfo( + prod_traffic_route=CodedeployDeploymentGroupLoadBalancerInfoTargetGroupPairInfoProdTrafficRoute( + listener_arns=[Token.as_string(aws_lb_listener_example.arn)] + ), + target_group=[CodedeployDeploymentGroupLoadBalancerInfoTargetGroupPairInfoTargetGroup( + name=blue.name + ), CodedeployDeploymentGroupLoadBalancerInfoTargetGroupPairInfoTargetGroup( + name=green.name + ) + ] + ) + ), + service_role_arn=Token.as_string(aws_iam_role_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codedeploy_deployment_group_example.override_logical_id("example") +``` + +### Blue Green Deployments with Servers and Classic ELB + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codedeploy_app import CodedeployApp +from imports.aws.codedeploy_deployment_group import CodedeployDeploymentGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CodedeployApp(self, "example", + name="example-app" + ) + aws_codedeploy_deployment_group_example = CodedeployDeploymentGroup(self, "example_1", + app_name=example.name, + blue_green_deployment_config=CodedeployDeploymentGroupBlueGreenDeploymentConfig( + deployment_ready_option=CodedeployDeploymentGroupBlueGreenDeploymentConfigDeploymentReadyOption( + action_on_timeout="STOP_DEPLOYMENT", + wait_time_in_minutes=60 + ), + green_fleet_provisioning_option=CodedeployDeploymentGroupBlueGreenDeploymentConfigGreenFleetProvisioningOption( + action="DISCOVER_EXISTING" + ), + terminate_blue_instances_on_deployment_success=CodedeployDeploymentGroupBlueGreenDeploymentConfigTerminateBlueInstancesOnDeploymentSuccess( + action="KEEP_ALIVE" + ) + ), + deployment_group_name="example-group", + deployment_style=CodedeployDeploymentGroupDeploymentStyle( + deployment_option="WITH_TRAFFIC_CONTROL", + deployment_type="BLUE_GREEN" + ), + load_balancer_info=CodedeployDeploymentGroupLoadBalancerInfo( + elb_info=[CodedeployDeploymentGroupLoadBalancerInfoElbInfo( + name=Token.as_string(aws_elb_example.name) + ) + ] + ), + service_role_arn=Token.as_string(aws_iam_role_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codedeploy_deployment_group_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `app_name` - (Required) The name of the application. +* `deployment_group_name` - (Required) The name of the deployment group. +* `service_role_arn` - (Required) The service role ARN that allows deployments. +* `alarm_configuration` - (Optional) Configuration block of alarms associated with the deployment group (documented below). +* `auto_rollback_configuration` - (Optional) Configuration block of the automatic rollback configuration associated with the deployment group (documented below). +* `autoscaling_groups` - (Optional) Autoscaling groups associated with the deployment group. +* `blue_green_deployment_config` - (Optional) Configuration block of the blue/green deployment options for a deployment group (documented below). +* `deployment_config_name` - (Optional) The name of the group's deployment config. The default is "CodeDeployDefault.OneAtATime". +* `deployment_style` - (Optional) Configuration block of the type of deployment, either in-place or blue/green, you want to run and whether to route deployment traffic behind a load balancer (documented below). +* `ec2_tag_filter` - (Optional) Tag filters associated with the deployment group. See the AWS docs for details. +* `ec2_tag_set` - (Optional) Configuration block(s) of Tag filters associated with the deployment group, which are also referred to as tag groups (documented below). See the AWS docs for details. +* `ecs_service` - (Optional) Configuration block(s) of the ECS services for a deployment group (documented below). +* `load_balancer_info` - (Optional) Single configuration block of the load balancer to use in a blue/green deployment (documented below). +* `on_premises_instance_tag_filter` - (Optional) On premise tag filters associated with the group. See the AWS docs for details. +* `trigger_configuration` - (Optional) Configuration block(s) of the triggers for the deployment group (documented below). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### alarm_configuration Argument Reference + +You can configure a deployment to stop when a **CloudWatch** alarm detects that a metric has fallen below or exceeded a defined threshold. `alarm_configuration` supports the following: + +* `alarms` - (Optional) A list of alarms configured for the deployment group. _A maximum of 10 alarms can be added to a deployment group_. +* `enabled` - (Optional) Indicates whether the alarm configuration is enabled. This option is useful when you want to temporarily deactivate alarm monitoring for a deployment group without having to add the same alarms again later. +* `ignore_poll_alarm_failure` - (Optional) Indicates whether a deployment should continue if information about the current state of alarms cannot be retrieved from CloudWatch. The default value is `false`. + * `true`: The deployment will proceed even if alarm status information can't be retrieved. + * `false`: The deployment will stop if alarm status information can't be retrieved. + +_Only one `alarm_configuration` is allowed_. + +### auto_rollback_configuration Argument Reference + +You can configure a deployment group to automatically rollback when a deployment fails or when a monitoring threshold you specify is met. In this case, the last known good version of an application revision is deployed. `auto_rollback_configuration` supports the following: + +* `enabled` - (Optional) Indicates whether a defined automatic rollback configuration is currently enabled for this Deployment Group. If you enable automatic rollback, you must specify at least one event type. +* `events` - (Optional) The event type or types that trigger a rollback. Supported types are `DEPLOYMENT_FAILURE` and `DEPLOYMENT_STOP_ON_ALARM`. + +_Only one `auto_rollback_configuration` is allowed_. + +### blue_green_deployment_config Argument Reference + +You can configure options for a blue/green deployment. `blue_green_deployment_config` supports the following: + +* `deployment_ready_option` - (Optional) Information about the action to take when newly provisioned instances are ready to receive traffic in a blue/green deployment (documented below). +* `green_fleet_provisioning_option` - (Optional) Information about how instances are provisioned for a replacement environment in a blue/green deployment (documented below). +* `terminate_blue_instances_on_deployment_success` - (Optional) Information about whether to terminate instances in the original fleet during a blue/green deployment (documented below). + +_Only one `blue_green_deployment_config` is allowed_. + +You can configure how traffic is rerouted to instances in a replacement environment in a blue/green deployment. `deployment_ready_option` supports the following: + +* `action_on_timeout` - (Optional) When to reroute traffic from an original environment to a replacement environment in a blue/green deployment. + * `CONTINUE_DEPLOYMENT`: Register new instances with the load balancer immediately after the new application revision is installed on the instances in the replacement environment. + * `STOP_DEPLOYMENT`: Do not register new instances with load balancer unless traffic is rerouted manually. If traffic is not rerouted manually before the end of the specified wait period, the deployment status is changed to Stopped. +* `wait_time_in_minutes` - (Optional) The number of minutes to wait before the status of a blue/green deployment changed to Stopped if rerouting is not started manually. Applies only to the `STOP_DEPLOYMENT` option for `action_on_timeout`. + +You can configure how instances will be added to the replacement environment in a blue/green deployment. `green_fleet_provisioning_option` supports the following: + +* `action` - (Optional) The method used to add instances to a replacement environment. + * `DISCOVER_EXISTING`: Use instances that already exist or will be created manually. + * `COPY_AUTO_SCALING_GROUP`: Use settings from a specified **Auto Scaling** group to define and create instances in a new Auto Scaling group. _Exactly one Auto Scaling group must be specified_ when selecting `COPY_AUTO_SCALING_GROUP`. Use `autoscaling_groups` to specify the Auto Scaling group. + +You can configure how instances in the original environment are terminated when a blue/green deployment is successful. `terminate_blue_instances_on_deployment_success` supports the following: + +* `action` - (Optional) The action to take on instances in the original environment after a successful blue/green deployment. + * `TERMINATE`: Instances are terminated after a specified wait time. + * `KEEP_ALIVE`: Instances are left running after they are deregistered from the load balancer and removed from the deployment group. +* `termination_wait_time_in_minutes` - (Optional) The number of minutes to wait after a successful blue/green deployment before terminating instances from the original environment. + +### deployment_style Argument Reference + +You can configure the type of deployment, either in-place or blue/green, you want to run and whether to route deployment traffic behind a load balancer. `deployment_style` supports the following: + +* `deployment_option` - (Optional) Indicates whether to route deployment traffic behind a load balancer. Valid Values are `WITH_TRAFFIC_CONTROL` or `WITHOUT_TRAFFIC_CONTROL`. Default is `WITHOUT_TRAFFIC_CONTROL`. +* `deployment_type` - (Optional) Indicates whether to run an in-place deployment or a blue/green deployment. Valid Values are `IN_PLACE` or `BLUE_GREEN`. Default is `IN_PLACE`. + +_Only one `deployment_style` is allowed_. + +### ec2_tag_filter Argument Reference + +The `ec2_tag_filter` configuration block supports the following: + +* `key` - (Optional) The key of the tag filter. +* `type` - (Optional) The type of the tag filter, either `KEY_ONLY`, `VALUE_ONLY`, or `KEY_AND_VALUE`. +* `value` - (Optional) The value of the tag filter. + +Multiple occurrences of `ec2_tag_filter` are allowed, where any instance that matches to at least one of the tag filters is selected. + +### ec2_tag_set Argument Reference + +You can form a tag group by putting a set of tag filters into `ec2_tag_set`. If multiple tag groups are specified, any instance that matches to at least one tag filter of every tag group is selected. + +### ecs_service Argument Reference + +Each `ecs_service` configuration block supports the following: + +* `cluster_name` - (Required) The name of the ECS cluster. +* `service_name` - (Required) The name of the ECS service. + +### load_balancer_info Argument Reference + +You can configure the **Load Balancer** to use in a deployment. `load_balancer_info` supports the following: + +* `elb_info` - (Optional) The Classic Elastic Load Balancer to use in a deployment. Conflicts with `target_group_info` and `target_group_pair_info`. +* `target_group_info` - (Optional) The (Application/Network Load Balancer) target group to use in a deployment. Conflicts with `elb_info` and `target_group_pair_info`. +* `target_group_pair_info` - (Optional) The (Application/Network Load Balancer) target group pair to use in a deployment. Conflicts with `elb_info` and `target_group_info`. + +#### load_balancer_info elb_info Argument Reference + +The `elb_info` configuration block supports the following: + +* `name` - (Optional) The name of the load balancer that will be used to route traffic from original instances to replacement instances in a blue/green deployment. For in-place deployments, the name of the load balancer that instances are deregistered from so they are not serving traffic during a deployment, and then re-registered with after the deployment completes. + +#### load_balancer_info target_group_info Argument Reference + +The `target_group_info` configuration block supports the following: + +* `name` - (Optional) The name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment completes. + +#### load_balancer_info target_group_pair_info Argument Reference + +The `target_group_pair_info` configuration block supports the following: + +* `prod_traffic_route` - (Required) Configuration block for the production traffic route (documented below). +* `target_group` - (Required) Configuration blocks for a target group within a target group pair (documented below). +* `test_traffic_route` - (Optional) Configuration block for the test traffic route (documented below). + +##### load_balancer_info target_group_pair_info prod_traffic_route Argument Reference + +The `prod_traffic_route` configuration block supports the following: + +* `listener_arns` - (Required) List of Amazon Resource Names (ARNs) of the load balancer listeners. Must contain exactly one listener ARN. + +##### load_balancer_info target_group_pair_info target_group Argument Reference + +The `target_group` configuration block supports the following: + +* `name` - (Required) Name of the target group. + +##### load_balancer_info target_group_pair_info test_traffic_route Argument Reference + +The `test_traffic_route` configuration block supports the following: + +* `listener_arns` - (Required) List of Amazon Resource Names (ARNs) of the load balancer listeners. + +### on_premises_instance_tag_filter Argument Reference + +The `on_premises_instance_tag_filter` configuration block supports the following: + +* `key` - (Optional) The key of the tag filter. +* `type` - (Optional) The type of the tag filter, either `KEY_ONLY`, `VALUE_ONLY`, or `KEY_AND_VALUE`. +* `value` - (Optional) The value of the tag filter. + +### trigger_configuration Argument Reference + +Add triggers to a Deployment Group to receive notifications about events related to deployments or instances in the group. Notifications are sent to subscribers of the **SNS** topic associated with the trigger. _CodeDeploy must have permission to publish to the topic from this deployment group_. `trigger_configuration` supports the following: + +* `trigger_events` - (Required) The event type or types for which notifications are triggered. Some values that are supported: `DeploymentStart`, `DeploymentSuccess`, `DeploymentFailure`, `DeploymentStop`, `DeploymentRollback`, `InstanceStart`, `InstanceSuccess`, `InstanceFailure`. See [the CodeDeploy documentation][1] for all possible values. +* `trigger_name` - (Required) The name of the notification trigger. +* `trigger_target_arn` - (Required) The ARN of the SNS topic through which notifications are sent. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the CodeDeploy deployment group. +* `id` - Application name and deployment group name. +* `compute_platform` - The destination platform type for the deployment. +* `deployment_group_id` - The ID of the CodeDeploy deployment group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeDeploy Deployment Groups using `app_name`, a colon, and `deployment_group_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeDeploy Deployment Groups using `app_name`, a colon, and `deployment_group_name`. For example: + +```console +% terraform import aws_codedeploy_deployment_group.example my-application:my-deployment-group +``` + +[1]: http://docs.aws.amazon.com/codedeploy/latest/userguide/monitoring-sns-event-notifications-create-trigger.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codegurureviewer_repository_association.html.markdown b/website/docs/cdktf/python/r/codegurureviewer_repository_association.html.markdown new file mode 100644 index 00000000000..0c0d67feee8 --- /dev/null +++ b/website/docs/cdktf/python/r/codegurureviewer_repository_association.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "CodeGuru Reviewer" +layout: "aws" +page_title: "AWS: aws_codegurureviewer_repository_association" +description: |- + Terraform resource for managing an AWS CodeGuru Reviewer Repository Association. +--- + + + +# Resource: aws_codegurureviewer_repository_association + +Terraform resource for managing an AWS CodeGuru Reviewer Repository Association. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codecommit_repository import CodecommitRepository +from imports.aws.codegurureviewer_repository_association import CodegurureviewerRepositoryAssociation +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CodecommitRepository(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=["codeguruReviewer"] + ), + repository_name="example-repo" + ) + aws_kms_key_example = KmsKey(self, "example_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_key_example.override_logical_id("example") + aws_codegurureviewer_repository_association_example = + CodegurureviewerRepositoryAssociation(self, "example_2", + kms_key_details=CodegurureviewerRepositoryAssociationKmsKeyDetails( + encryption_option="CUSTOMER_MANAGED_CMK", + kms_key_id=Token.as_string(aws_kms_key_example.key_id) + ), + repository=CodegurureviewerRepositoryAssociationRepository( + codecommit=CodegurureviewerRepositoryAssociationRepositoryCodecommit( + name=example.repository_name + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codegurureviewer_repository_association_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `repository` - (Required) An object describing the repository to associate. Valid values: `bitbucket`, `codecommit`, `github_enterprise_server`, or `s3_bucket`. Block is documented below. Note: for repositories that leverage CodeStar connections (ex. `bitbucket`, `github_enterprise_server`) the connection must be in `Available` status prior to creating this resource. + +The following arguments are optional: + +* `kms_key_details` - (Optional) An object describing the KMS key to asssociate. Block is documented below. + +## repository + +This configuration block supports the following: + +### bitbucket + +* `connection_arn` - (Required) The Amazon Resource Name (ARN) of an AWS CodeStar Connections connection. +* `name` - (Required) The name of the third party source repository. +* `owner` - (Required) The username for the account that owns the repository. + +### codecommit + +* `name` - (Required) The name of the AWS CodeCommit repository. + +### github_enterprise_server + +* `connection_arn` - (Required) The Amazon Resource Name (ARN) of an AWS CodeStar Connections connection. +* `name` - (Required) The name of the third party source repository. +* `owner` - (Required) The username for the account that owns the repository. + +### s3_bucket + +* `bucket_name` - (Required) The name of the S3 bucket used for associating a new S3 repository. Note: The name must begin with `codeguru-reviewer-`. +* `name` - (Required) The name of the repository in the S3 bucket. + +## kms_key_details + +This configuration block supports the following: + +* `encryption_option` - (Optional) The encryption option for a repository association. It is either owned by AWS Key Management Service (KMS) (`AWS_OWNED_CMK`) or customer managed (`CUSTOMER_MANAGED_CMK`). +* `kms_key_id` - (Optional) The ID of the AWS KMS key that is associated with a repository association. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) identifying the repository association. +* `association_id` - The ID of the repository association. +* `connection_arn` - The Amazon Resource Name (ARN) of an AWS CodeStar Connections connection. +* `id` - The Amazon Resource Name (ARN) identifying the repository association. +* `name` - The name of the repository. +* `owner` - The owner of the repository. +* `provider_type` - The provider type of the repository association. +* `state` - The state of the repository association. +* `state_reason` - A description of why the repository association is in the current state. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `update` - (Default `180m`) +* `delete` - (Default `90m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codepipeline.markdown b/website/docs/cdktf/python/r/codepipeline.markdown new file mode 100644 index 00000000000..7ef84316629 --- /dev/null +++ b/website/docs/cdktf/python/r/codepipeline.markdown @@ -0,0 +1,228 @@ +--- +subcategory: "CodePipeline" +layout: "aws" +page_title: "AWS: aws_codepipeline" +description: |- + Provides a CodePipeline +--- + + + +# Resource: aws_codepipeline + +Provides a CodePipeline. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codepipeline import Codepipeline +from imports.aws.codestarconnections_connection import CodestarconnectionsConnection +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_kms_alias import DataAwsKmsAlias +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CodestarconnectionsConnection(self, "example", + name="example-connection", + provider_type="GitHub" + ) + codepipeline_bucket = S3Bucket(self, "codepipeline_bucket", + bucket="test-bucket" + ) + S3BucketAcl(self, "codepipeline_bucket_acl", + acl="private", + bucket=codepipeline_bucket.id + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["codepipeline.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + codepipeline_policy = DataAwsIamPolicyDocument(self, "codepipeline_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:GetObject", "s3:GetObjectVersion", "s3:GetBucketVersioning", "s3:PutObjectAcl", "s3:PutObject" + ], + effect="Allow", + resources=[codepipeline_bucket.arn, "${" + codepipeline_bucket.arn + "}/*" + ] + ), DataAwsIamPolicyDocumentStatement( + actions=["codestar-connections:UseConnection"], + effect="Allow", + resources=[example.arn] + ), DataAwsIamPolicyDocumentStatement( + actions=["codebuild:BatchGetBuilds", "codebuild:StartBuild"], + effect="Allow", + resources=["*"] + ) + ] + ) + s3_kmskey = DataAwsKmsAlias(self, "s3kmskey", + name="alias/myKmsKey" + ) + codepipeline_role = IamRole(self, "codepipeline_role", + assume_role_policy=Token.as_string(assume_role.json), + name="test-role" + ) + aws_iam_role_policy_codepipeline_policy = IamRolePolicy(self, "codepipeline_policy_7", + name="codepipeline_policy", + policy=Token.as_string(codepipeline_policy.json), + role=codepipeline_role.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_codepipeline_policy.override_logical_id("codepipeline_policy") + Codepipeline(self, "codepipeline", + artifact_store=[CodepipelineArtifactStore( + encryption_key=CodepipelineArtifactStoreEncryptionKey( + id=Token.as_string(s3_kmskey.arn), + type="KMS" + ), + location=codepipeline_bucket.bucket, + type="S3" + ) + ], + name="tf-test-pipeline", + role_arn=codepipeline_role.arn, + stage=[CodepipelineStage( + action=[CodepipelineStageAction( + category="Source", + configuration={ + "BranchName": "main", + "ConnectionArn": example.arn, + "FullRepositoryId": "my-organization/example" + }, + name="Source", + output_artifacts=["source_output"], + owner="AWS", + provider="CodeStarSourceConnection", + version="1" + ) + ], + name="Source" + ), CodepipelineStage( + action=[CodepipelineStageAction( + category="Build", + configuration={ + "ProjectName": "test" + }, + input_artifacts=["source_output"], + name="Build", + output_artifacts=["build_output"], + owner="AWS", + provider="CodeBuild", + version="1" + ) + ], + name="Build" + ), CodepipelineStage( + action=[CodepipelineStageAction( + category="Deploy", + configuration={ + "ActionMode": "REPLACE_ON_FAILURE", + "Capabilities": "CAPABILITY_AUTO_EXPAND,CAPABILITY_IAM", + "OutputFileName": "CreateStackOutput.json", + "StackName": "MyStack", + "TemplatePath": "build_output::sam-templated.yaml" + }, + input_artifacts=["build_output"], + name="Deploy", + owner="AWS", + provider="CloudFormation", + version="1" + ) + ], + name="Deploy" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the pipeline. +* `role_arn` - (Required) A service role Amazon Resource Name (ARN) that grants AWS CodePipeline permission to make calls to AWS services on your behalf. +* `artifact_store` (Required) One or more artifact_store blocks. Artifact stores are documented below. +* `stage` (Minimum of at least two `stage` blocks is required) A stage block. Stages are documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +An `artifact_store` block supports the following arguments: + +* `location` - (Required) The location where AWS CodePipeline stores artifacts for a pipeline; currently only `S3` is supported. +* `type` - (Required) The type of the artifact store, such as Amazon S3 +* `encryption_key` - (Optional) The encryption key block AWS CodePipeline uses to encrypt the data in the artifact store, such as an AWS Key Management Service (AWS KMS) key. If you don't specify a key, AWS CodePipeline uses the default key for Amazon Simple Storage Service (Amazon S3). An `encryption_key` block is documented below. +* `region` - (Optional) The region where the artifact store is located. Required for a cross-region CodePipeline, do not provide for a single-region CodePipeline. + +An `encryption_key` block supports the following arguments: + +* `id` - (Required) The KMS key ARN or ID +* `type` - (Required) The type of key; currently only `KMS` is supported + +A `stage` block supports the following arguments: + +* `name` - (Required) The name of the stage. +* `action` - (Required) The action(s) to include in the stage. Defined as an `action` block below + +An `action` block supports the following arguments: + +* `category` - (Required) A category defines what kind of action can be taken in the stage, and constrains the provider type for the action. Possible values are `Approval`, `Build`, `Deploy`, `Invoke`, `Source` and `Test`. +* `owner` - (Required) The creator of the action being called. Possible values are `AWS`, `Custom` and `ThirdParty`. +* `name` - (Required) The action declaration's name. +* `provider` - (Required) The provider of the service being called by the action. Valid providers are determined by the action category. Provider names are listed in the [Action Structure Reference](https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference.html) documentation. +* `version` - (Required) A string that identifies the action type. +* `configuration` - (Optional) A map of the action declaration's configuration. Configurations options for action types and providers can be found in the [Pipeline Structure Reference](http://docs.aws.amazon.com/codepipeline/latest/userguide/reference-pipeline-structure.html#action-requirements) and [Action Structure Reference](https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference.html) documentation. +* `input_artifacts` - (Optional) A list of artifact names to be worked on. +* `output_artifacts` - (Optional) A list of artifact names to output. Output artifact names must be unique within a pipeline. +* `role_arn` - (Optional) The ARN of the IAM service role that will perform the declared action. This is assumed through the roleArn for the pipeline. +* `run_order` - (Optional) The order in which actions are run. +* `region` - (Optional) The region in which to run the action. +* `namespace` - (Optional) The namespace all output variables will be accessed from. + +~> **Note:** The input artifact of an action must exactly match the output artifact declared in a preceding action, but the input artifact does not have to be the next action in strict sequence from the action that provided the output artifact. Actions in parallel can declare different output artifacts, which are in turn consumed by different following actions. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The codepipeline ID. +* `arn` - The codepipeline ARN. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodePipelines using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodePipelines using the name. For example: + +```console +% terraform import aws_codepipeline.foo example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codepipeline_custom_action_type.html.markdown b/website/docs/cdktf/python/r/codepipeline_custom_action_type.html.markdown new file mode 100644 index 00000000000..975b36b531d --- /dev/null +++ b/website/docs/cdktf/python/r/codepipeline_custom_action_type.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "CodePipeline" +layout: "aws" +page_title: "AWS: aws_codepipeline_custom_action_type" +description: |- + Provides a CodePipeline CustomActionType. +--- + + + +# Resource: aws_codepipeline_custom_action_type + +Provides a CodeDeploy CustomActionType + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codepipeline_custom_action_type import CodepipelineCustomActionType +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodepipelineCustomActionType(self, "example", + category="Build", + input_artifact_details=CodepipelineCustomActionTypeInputArtifactDetails( + maximum_count=1, + minimum_count=0 + ), + output_artifact_details=CodepipelineCustomActionTypeOutputArtifactDetails( + maximum_count=1, + minimum_count=0 + ), + provider_name="example", + version="1" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `category` - (Required) The category of the custom action. Valid values: `Source`, `Build`, `Deploy`, `Test`, `Invoke`, `Approval` +* `configuration_property` - (Optional) The configuration properties for the custom action. Max 10 items. + +The `configuration_property` object supports the following: + +* `description` - (Optional) The description of the action configuration property. +* `key` - (Required) Whether the configuration property is a key. +* `name` - (Required) The name of the action configuration property. +* `queryable` - (Optional) Indicates that the property will be used in conjunction with PollForJobs. +* `required` - (Required) Whether the configuration property is a required value. +* `secret`- (Required) Whether the configuration property is secret. +* `type`- (Optional) The type of the configuration property. Valid values: `String`, `Number`, `Boolean` + +* `input_artifact_details` - (Required) The details of the input artifact for the action. + +The `input_artifact_details` object supports the following: + +* `maximum_count` - (Required) The maximum number of artifacts allowed for the action type. Min: 0, Max: 5 +* `minimum_count` - (Required) The minimum number of artifacts allowed for the action type. Min: 0, Max: 5 + +* `output_artifact_details` - (Required) The details of the output artifact of the action. + +The `output_artifact_details` object supports the following: + +* `maximum_count` - (Required) The maximum number of artifacts allowed for the action type. Min: 0, Max: 5 +* `minimum_count` - (Required) The minimum number of artifacts allowed for the action type. Min: 0, Max: 5 + +* `provider_name` - (Required) The provider of the service used in the custom action +* `settings` - (Optional) The settings for an action type. + +The `settings` object supports the following: + +* `entity_url_template` - (Optional) The URL returned to the AWS CodePipeline console that provides a deep link to the resources of the external system. +* `execution_url_template` - (Optional) The URL returned to the AWS CodePipeline console that contains a link to the top-level landing page for the external system. +* `revision_url_template` - (Optional) The URL returned to the AWS CodePipeline console that contains a link to the page where customers can update or change the configuration of the external action. +* `third_party_configuration_url` - (Optional) The URL of a sign-up page where users can sign up for an external service and perform initial configuration of the action provided by that service. + +* `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `version` - (Required) The version identifier of the custom action. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Composed of category, provider and version. For example, `Build:terraform:1` +* `arn` - The action ARN. +* `owner` - The creator of the action being called. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeDeploy CustomActionType using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeDeploy CustomActionType using the `id`. For example: + +```console +% terraform import aws_codepipeline_custom_action_type.example Build:terraform:1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codepipeline_webhook.markdown b/website/docs/cdktf/python/r/codepipeline_webhook.markdown new file mode 100644 index 00000000000..2ad7ee2398e --- /dev/null +++ b/website/docs/cdktf/python/r/codepipeline_webhook.markdown @@ -0,0 +1,161 @@ +--- +subcategory: "CodePipeline" +layout: "aws" +page_title: "AWS: aws_codepipeline_webhook" +description: |- + Provides a CodePipeline Webhook +--- + + + +# Resource: aws_codepipeline_webhook + +Provides a CodePipeline Webhook. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codepipeline import Codepipeline +from imports.aws.codepipeline_webhook import CodepipelineWebhook +from imports.github.repository_webhook import RepositoryWebhook +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # The following providers are missing schema information and might need manual adjustments to synthesize correctly: github. + # For a more precise conversion please use the --provider flag in convert. + webhook_secret = "super-secret" + bar = Codepipeline(self, "bar", + artifact_store=[CodepipelineArtifactStore( + encryption_key=CodepipelineArtifactStoreEncryptionKey( + id=Token.as_string(s3_kmskey.arn), + type="KMS" + ), + location=Token.as_string(aws_s3_bucket_bar.bucket), + type="S3" + ) + ], + name="tf-test-pipeline", + role_arn=Token.as_string(aws_iam_role_bar.arn), + stage=[CodepipelineStage( + action=[CodepipelineStageAction( + category="Source", + configuration={ + "Branch": "master", + "Owner": "my-organization", + "Repo": "test" + }, + name="Source", + output_artifacts=["test"], + owner="ThirdParty", + provider="GitHub", + version="1" + ) + ], + name="Source" + ), CodepipelineStage( + action=[CodepipelineStageAction( + category="Build", + configuration={ + "ProjectName": "test" + }, + input_artifacts=["test"], + name="Build", + owner="AWS", + provider="CodeBuild", + version="1" + ) + ], + name="Build" + ) + ] + ) + aws_codepipeline_webhook_bar = CodepipelineWebhook(self, "bar_1", + authentication="GITHUB_HMAC", + authentication_configuration=CodepipelineWebhookAuthenticationConfiguration( + secret_token=webhook_secret + ), + filter=[CodepipelineWebhookFilter( + json_path="$.ref", + match_equals="refs/heads/{Branch}" + ) + ], + name="test-webhook-github-bar", + target_action="Source", + target_pipeline=bar.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codepipeline_webhook_bar.override_logical_id("bar") + github_repository_webhook_bar = RepositoryWebhook(self, "bar_2", + configuration=[{ + "content_type": "json", + "insecure_ssl": True, + "secret": webhook_secret, + "url": aws_codepipeline_webhook_bar.url + } + ], + events=["push"], + name="web", + repository=repo.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + github_repository_webhook_bar.override_logical_id("bar") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the webhook. +* `authentication` - (Required) The type of authentication to use. One of `IP`, `GITHUB_HMAC`, or `UNAUTHENTICATED`. +* `authentication_configuration` - (Optional) An `auth` block. Required for `IP` and `GITHUB_HMAC`. Auth blocks are documented below. +* `filter` (Required) One or more `filter` blocks. Filter blocks are documented below. +* `target_action` - (Required) The name of the action in a pipeline you want to connect to the webhook. The action must be from the source (first) stage of the pipeline. +* `target_pipeline` - (Required) The name of the pipeline. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +An `authentication_configuration` block supports the following arguments: + +* `secret_token` - (Optional) The shared secret for the GitHub repository webhook. Set this as `secret` in your `github_repository_webhook`'s `configuration` block. Required for `GITHUB_HMAC`. +* `allowed_ip_range` - (Optional) A valid CIDR block for `IP` filtering. Required for `IP`. + +A `filter` block supports the following arguments: + +* `json_path` - (Required) The [JSON path](https://github.com/json-path/JsonPath) to filter on. +* `match_equals` - (Required) The value to match on (e.g., `refs/heads/{Branch}`). See [AWS docs](https://docs.aws.amazon.com/codepipeline/latest/APIReference/API_WebhookFilterRule.html) for details. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The CodePipeline webhook's ARN. +* `id` - The CodePipeline webhook's ARN. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `url` - The CodePipeline webhook's URL. POST events to this endpoint to trigger the target. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodePipeline Webhooks using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodePipeline Webhooks using their ARN. For example: + +```console +% terraform import aws_codepipeline_webhook.example arn:aws:codepipeline:us-west-2:123456789012:webhook:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codestarconnections_connection.markdown b/website/docs/cdktf/python/r/codestarconnections_connection.markdown new file mode 100644 index 00000000000..0e65e73446c --- /dev/null +++ b/website/docs/cdktf/python/r/codestarconnections_connection.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "CodeStar Connections" +layout: "aws" +page_title: "AWS: aws_codestarconnections_connection" +description: |- + Provides a CodeStar Connection +--- + + + +# Resource: aws_codestarconnections_connection + +Provides a CodeStar Connection. + +~> **NOTE:** The `aws_codestarconnections_connection` resource is created in the state `PENDING`. Authentication with the connection provider must be completed in the AWS Console. See the [AWS documentation](https://docs.aws.amazon.com/dtconsole/latest/userguide/connections-update.html) for details. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codepipeline import Codepipeline +from imports.aws.codestarconnections_connection import CodestarconnectionsConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, location, type, category, name, owner, provider, version, category1, name1, owner1, provider1, version1): + super().__init__(scope, name) + example = CodestarconnectionsConnection(self, "example", + name="example-connection", + provider_type="Bitbucket" + ) + aws_codepipeline_example = Codepipeline(self, "example_1", + artifact_store=[CodepipelineArtifactStore( + location=location, + type=type + ) + ], + name="tf-test-pipeline", + role_arn=codepipeline_role.arn, + stage=[CodepipelineStage( + action=[CodepipelineStageAction( + category="Source", + configuration={ + "BranchName": "main", + "ConnectionArn": example.arn, + "FullRepositoryId": "my-organization/test" + }, + name="Source", + output_artifacts=["source_output"], + owner="AWS", + provider="CodeStarSourceConnection", + version="1" + ) + ], + name="Source" + ), CodepipelineStage( + action=[CodepipelineStageAction( + category=category, + name=name, + owner=owner, + provider=provider, + version=version + ) + ], + name="Build" + ), CodepipelineStage( + action=[CodepipelineStageAction( + category=category1, + name=name1, + owner=owner1, + provider=provider1, + version=version1 + ) + ], + name="Deploy" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_codepipeline_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the connection to be created. The name must be unique in the calling AWS account. Changing `name` will create a new resource. +* `provider_type` - (Optional) The name of the external provider where your third-party code repository is configured. Valid values are `Bitbucket`, `GitHub` or `GitHubEnterpriseServer`. Changing `provider_type` will create a new resource. Conflicts with `host_arn` +* `host_arn` - (Optional) The Amazon Resource Name (ARN) of the host associated with the connection. Conflicts with `provider_type` +* `tags` - (Optional) Map of key-value resource tags to associate with the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The codestar connection ARN. +* `arn` - The codestar connection ARN. +* `connection_status` - The codestar connection status. Possible values are `PENDING`, `AVAILABLE` and `ERROR`. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeStar connections using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeStar connections using the ARN. For example: + +```console +% terraform import aws_codestarconnections_connection.test-connection arn:aws:codestar-connections:us-west-1:0123456789:connection/79d4d357-a2ee-41e4-b350-2fe39ae59448 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codestarconnections_host.markdown b/website/docs/cdktf/python/r/codestarconnections_host.markdown new file mode 100644 index 00000000000..8dfe7b6be90 --- /dev/null +++ b/website/docs/cdktf/python/r/codestarconnections_host.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "CodeStar Connections" +layout: "aws" +page_title: "AWS: aws_codestarconnections_host" +description: |- + Provides a CodeStar Host +--- + + + +# Resource: aws_codestarconnections_host + +Provides a CodeStar Host. + +~> **NOTE:** The `aws_codestarconnections_host` resource is created in the state `PENDING`. Authentication with the host provider must be completed in the AWS Console. For more information visit [Set up a pending host](https://docs.aws.amazon.com/dtconsole/latest/userguide/connections-host-setup.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codestarconnections_host import CodestarconnectionsHost +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CodestarconnectionsHost(self, "example", + name="example-host", + provider_endpoint="https://example.com", + provider_type="GitHubEnterpriseServer" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the host to be created. The name must be unique in the calling AWS account. +* `provider_endpoint` - (Required) The endpoint of the infrastructure to be represented by the host after it is created. +* `provider_type` - (Required) The name of the external provider where your third-party code repository is configured. +* `vpc_configuration` - (Optional) The VPC configuration to be provisioned for the host. A VPC must be configured, and the infrastructure to be represented by the host must already be connected to the VPC. + +A `vpc_configuration` block supports the following arguments: + +* `security_group_ids` - (Required) ID of the security group or security groups associated with the Amazon VPC connected to the infrastructure where your provider type is installed. +* `subnet_ids` - (Required) The ID of the subnet or subnets associated with the Amazon VPC connected to the infrastructure where your provider type is installed. +* `tls_certificate` - (Optional) The value of the Transport Layer Security (TLS) certificate associated with the infrastructure where your provider type is installed. +* `vpc_id` - (Required) The ID of the Amazon VPC connected to the infrastructure where your provider type is installed. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The CodeStar Host ARN. +* `arn` - The CodeStar Host ARN. +* `status` - The CodeStar Host status. Possible values are `PENDING`, `AVAILABLE`, `VPC_CONFIG_DELETING`, `VPC_CONFIG_INITIALIZING`, and `VPC_CONFIG_FAILED_INITIALIZATION`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeStar Host using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeStar Host using the ARN. For example: + +```console +% terraform import aws_codestarconnections_host.example-host arn:aws:codestar-connections:us-west-1:0123456789:host/79d4d357-a2ee-41e4-b350-2fe39ae59448 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/codestarnotifications_notification_rule.markdown b/website/docs/cdktf/python/r/codestarnotifications_notification_rule.markdown new file mode 100644 index 00000000000..fb8b07e3791 --- /dev/null +++ b/website/docs/cdktf/python/r/codestarnotifications_notification_rule.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "CodeStar Notifications" +layout: "aws" +page_title: "AWS: aws_codestarnotifications_notification_rule" +description: |- + Provides a CodeStar Notifications Rule +--- + + + +# Resource: aws_codestarnotifications_notification_rule + +Provides a CodeStar Notifications Rule. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.codecommit_repository import CodecommitRepository +from imports.aws.codestarnotifications_notification_rule import CodestarnotificationsNotificationRule +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.sns_topic import SnsTopic +from imports.aws.sns_topic_policy import SnsTopicPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + code = CodecommitRepository(self, "code", + repository_name="example-code-repo" + ) + notif = SnsTopic(self, "notif", + name="notification" + ) + notif_access = DataAwsIamPolicyDocument(self, "notif_access", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sns:Publish"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["codestar-notifications.amazonaws.com"], + type="Service" + ) + ], + resources=[notif.arn] + ) + ] + ) + CodestarnotificationsNotificationRule(self, "commits", + detail_type="BASIC", + event_type_ids=["codecommit-repository-comments-on-commits"], + name="example-code-repo-commits", + resource=code.arn, + target=[CodestarnotificationsNotificationRuleTarget( + address=notif.arn + ) + ] + ) + SnsTopicPolicy(self, "default", + arn=notif.arn, + policy=Token.as_string(notif_access.json) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `detail_type` - (Required) The level of detail to include in the notifications for this resource. Possible values are `BASIC` and `FULL`. +* `event_type_ids` - (Required) A list of event types associated with this notification rule. + For list of allowed events see [here](https://docs.aws.amazon.com/codestar-notifications/latest/userguide/concepts.html#concepts-api). +* `name` - (Required) The name of notification rule. +* `resource` - (Required) The ARN of the resource to associate with the notification rule. +* `status` - (Optional) The status of the notification rule. Possible values are `ENABLED` and `DISABLED`, default is `ENABLED`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `target` - (Optional) Configuration blocks containing notification target information. Can be specified multiple times. At least one target must be specified on creation. + +An `target` block supports the following arguments: + +* `address` - (Required) The ARN of notification rule target. For example, a SNS Topic ARN. +* `type` - (Optional) The type of the notification target. Default value is `SNS`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The codestar notification rule ARN. +* `arn` - The codestar notification rule ARN. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeStar notification rule using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CodeStar notification rule using the ARN. For example: + +```console +% terraform import aws_codestarnotifications_notification_rule.foo arn:aws:codestar-notifications:us-west-1:0123456789:notificationrule/2cdc68a3-8f7c-4893-b6a5-45b362bd4f2b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_identity_pool.markdown b/website/docs/cdktf/python/r/cognito_identity_pool.markdown new file mode 100644 index 00000000000..38b23829bff --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_identity_pool.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "Cognito Identity" +layout: "aws" +page_title: "AWS: aws_cognito_identity_pool" +description: |- + Provides an AWS Cognito Identity Pool. +--- + + + +# Resource: aws_cognito_identity_pool + +Provides an AWS Cognito Identity Pool. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_identity_pool import CognitoIdentityPool +from imports.aws.iam_saml_provider import IamSamlProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = IamSamlProvider(self, "default", + name="my-saml-provider", + saml_metadata_document=Token.as_string(Fn.file("saml-metadata.xml")) + ) + CognitoIdentityPool(self, "main", + allow_classic_flow=False, + allow_unauthenticated_identities=False, + cognito_identity_providers=[CognitoIdentityPoolCognitoIdentityProviders( + client_id="6lhlkkfbfb4q5kpp90urffae", + provider_name="cognito-idp.us-east-1.amazonaws.com/us-east-1_Tv0493apJ", + server_side_token_check=False + ), CognitoIdentityPoolCognitoIdentityProviders( + client_id="7kodkvfqfb4qfkp39eurffae", + provider_name="cognito-idp.us-east-1.amazonaws.com/eu-west-1_Zr231apJu", + server_side_token_check=False + ) + ], + identity_pool_name="identity pool", + openid_connect_provider_arns=["arn:aws:iam::123456789012:oidc-provider/id.example.com" + ], + saml_provider_arns=[default_var.arn], + supported_login_providers={ + "accounts.google.com": "123456789012.apps.googleusercontent.com", + "graph.facebook.com": "7346241598935552" + } + ) +``` + +## Argument Reference + +The Cognito Identity Pool argument layout is a structure composed of several sub-resources - these resources are laid out below. + +* `identity_pool_name` (Required) - The Cognito Identity Pool name. +* `allow_unauthenticated_identities` (Required) - Whether the identity pool supports unauthenticated logins or not. +* `allow_classic_flow` (Optional) - Enables or disables the classic / basic authentication flow. Default is `false`. +* `developer_provider_name` (Optional) - The "domain" by which Cognito will refer to your users. This name acts as a placeholder that allows your +backend and the Cognito service to communicate about the developer provider. +* `cognito_identity_providers` (Optional) - An array of [Amazon Cognito Identity user pools](#cognito-identity-providers) and their client IDs. +* `openid_connect_provider_arns` (Optional) - Set of OpendID Connect provider ARNs. +* `saml_provider_arns` (Optional) - An array of Amazon Resource Names (ARNs) of the SAML provider for your identity. +* `supported_login_providers` (Optional) - Key-Value pairs mapping provider names to provider app IDs. +* `tags` - (Optional) A map of tags to assign to the Identity Pool. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +#### Cognito Identity Providers + +* `client_id` (Optional) - The client ID for the Amazon Cognito Identity User Pool. +* `provider_name` (Optional) - The provider name for an Amazon Cognito Identity User Pool. +* `server_side_token_check` (Optional) - Whether server-side token validation is enabled for the identity provider’s token or not. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - An identity pool ID, e.g. `us-west-2:1a234567-8901-234b-5cde-f6789g01h2i3`. +* `arn` - The ARN of the identity pool. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito Identity Pool using its ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cognito Identity Pool using its ID. For example: + +```console +% terraform import aws_cognito_identity_pool.mypool us-west-2:1a234567-8901-234b-5cde-f6789g01h2i3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_identity_pool_provider_principal_tag.markdown b/website/docs/cdktf/python/r/cognito_identity_pool_provider_principal_tag.markdown new file mode 100644 index 00000000000..7385bf96fdc --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_identity_pool_provider_principal_tag.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Cognito Identity" +layout: "aws" +page_title: "AWS: aws_cognito_identity_pool_provider_principal_tag" +description: |- + Provides an AWS Cognito Identity Principal Mapping. +--- + + + +# Resource: aws_cognito_identity_pool_provider_principal_tag + +Provides an AWS Cognito Identity Principal Mapping. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_identity_pool import CognitoIdentityPool +from imports.aws.cognito_identity_pool_provider_principal_tag import CognitoIdentityPoolProviderPrincipalTag +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_client import CognitoUserPoolClient +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CognitoUserPool(self, "example", + auto_verified_attributes=["email"], + name="user pool" + ) + aws_cognito_user_pool_client_example = CognitoUserPoolClient(self, "example_1", + name="client", + supported_identity_providers=Token.as_list( + Fn.compact(Token.as_list(["COGNITO"]))), + user_pool_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_pool_client_example.override_logical_id("example") + aws_cognito_identity_pool_example = CognitoIdentityPool(self, "example_2", + allow_unauthenticated_identities=False, + cognito_identity_providers=[CognitoIdentityPoolCognitoIdentityProviders( + client_id=Token.as_string(aws_cognito_user_pool_client_example.id), + provider_name=example.endpoint, + server_side_token_check=False + ) + ], + identity_pool_name="identity pool" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_identity_pool_example.override_logical_id("example") + aws_cognito_identity_pool_provider_principal_tag_example = + CognitoIdentityPoolProviderPrincipalTag(self, "example_3", + identity_pool_id=Token.as_string(aws_cognito_identity_pool_example.id), + identity_provider_name=example.endpoint, + principal_tags={ + "test": "value" + }, + use_defaults=False + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_identity_pool_provider_principal_tag_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `identity_pool_id` (Required) - An identity pool ID. +* `identity_provider_name` (Required) - The name of the identity provider. +* `principal_tags`: (Optional: []) - String to string map of variables. +* `use_defaults`: (Optional: true) use default (username and clientID) attribute mappings. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito Identity Pool Roles Attachment using the Identity Pool ID and provider name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cognito Identity Pool Roles Attachment using the Identity Pool ID and provider name. For example: + +```console +% terraform import aws_cognito_identity_pool_provider_principal_tag.example us-west-2_abc123:CorpAD +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_identity_pool_roles_attachment.markdown b/website/docs/cdktf/python/r/cognito_identity_pool_roles_attachment.markdown new file mode 100644 index 00000000000..34ffe190363 --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_identity_pool_roles_attachment.markdown @@ -0,0 +1,155 @@ +--- +subcategory: "Cognito Identity" +layout: "aws" +page_title: "AWS: aws_cognito_identity_pool_roles_attachment" +description: |- + Provides an AWS Cognito Identity Pool Roles Attachment. +--- + + + +# Resource: aws_cognito_identity_pool_roles_attachment + +Provides an AWS Cognito Identity Pool Roles Attachment. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_identity_pool import CognitoIdentityPool +from imports.aws.cognito_identity_pool_roles_attachment import CognitoIdentityPoolRolesAttachment +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = CognitoIdentityPool(self, "main", + allow_unauthenticated_identities=False, + identity_pool_name="identity pool", + supported_login_providers={ + "graph.facebook.com": "7346241598935555" + } + ) + authenticated = DataAwsIamPolicyDocument(self, "authenticated", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRoleWithWebIdentity"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=[main.id], + variable="cognito-identity.amazonaws.com:aud" + ), DataAwsIamPolicyDocumentStatementCondition( + test="ForAnyValue:StringLike", + values=["authenticated"], + variable="cognito-identity.amazonaws.com:amr" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["cognito-identity.amazonaws.com"], + type="Federated" + ) + ] + ) + ] + ) + authenticated_role_policy = DataAwsIamPolicyDocument(self, "authenticated_role_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["mobileanalytics:PutEvents", "cognito-sync:*", "cognito-identity:*" + ], + effect="Allow", + resources=["*"] + ) + ] + ) + aws_iam_role_authenticated = IamRole(self, "authenticated_3", + assume_role_policy=Token.as_string(authenticated.json), + name="cognito_authenticated" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_authenticated.override_logical_id("authenticated") + aws_iam_role_policy_authenticated = IamRolePolicy(self, "authenticated_4", + name="authenticated_policy", + policy=Token.as_string(authenticated_role_policy.json), + role=Token.as_string(aws_iam_role_authenticated.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_authenticated.override_logical_id("authenticated") + aws_cognito_identity_pool_roles_attachment_main = + CognitoIdentityPoolRolesAttachment(self, "main_5", + identity_pool_id=main.id, + role_mapping=[CognitoIdentityPoolRolesAttachmentRoleMapping( + ambiguous_role_resolution="AuthenticatedRole", + identity_provider="graph.facebook.com", + mapping_rule=[CognitoIdentityPoolRolesAttachmentRoleMappingMappingRule( + claim="isAdmin", + match_type="Equals", + role_arn=Token.as_string(aws_iam_role_authenticated.arn), + value="paid" + ) + ], + type="Rules" + ) + ], + roles={ + "authenticated": Token.as_string(aws_iam_role_authenticated.arn) + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_identity_pool_roles_attachment_main.override_logical_id("main") +``` + +## Argument Reference + +The Cognito Identity Pool Roles Attachment argument layout is a structure composed of several sub-resources - these resources are laid out below. + +* `identity_pool_id` (Required) - An identity pool ID in the format `REGION_GUID`. +* `role_mapping` (Optional) - A List of [Role Mapping](#role-mappings). +* `roles` (Required) - The map of roles associated with this pool. For a given role, the key will be either "authenticated" or "unauthenticated" and the value will be the Role ARN. + +#### Role Mappings + +* `identity_provider` (Required) - A string identifying the identity provider, for example, "graph.facebook.com" or "cognito-idp.us-east-1.amazonaws.com/us-east-1_abcdefghi:app_client_id". Depends on `cognito_identity_providers` set on `aws_cognito_identity_pool` resource or a `aws_cognito_identity_provider` resource. +* `ambiguous_role_resolution` (Optional) - Specifies the action to be taken if either no rules match the claim value for the Rules type, or there is no cognito:preferred_role claim and there are multiple cognito:roles matches for the Token type. `Required` if you specify Token or Rules as the Type. +* `mapping_rule` (Optional) - The [Rules Configuration](#rules-configuration) to be used for mapping users to roles. You can specify up to 25 rules per identity provider. Rules are evaluated in order. The first one to match specifies the role. +* `type` (Required) - The role mapping type. + +#### Rules Configuration + +* `claim` (Required) - The claim name that must be present in the token, for example, "isAdmin" or "paid". +* `match_type` (Required) - The match condition that specifies how closely the claim value in the IdP token must match Value. +* `role_arn` (Required) - The role ARN. +* `value` (Required) - A brief string that the claim must match, for example, "paid" or "yes". + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identity pool ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito Identity Pool Roles Attachment using the Identity Pool ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cognito Identity Pool Roles Attachment using the Identity Pool ID. For example: + +```console +% terraform import aws_cognito_identity_pool_roles_attachment.example us-west-2:b64805ad-cb56-40ba-9ffc-f5d8207e6d42 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_identity_provider.html.markdown b/website/docs/cdktf/python/r/cognito_identity_provider.html.markdown new file mode 100644 index 00000000000..c3880d5ab7d --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_identity_provider.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_identity_provider" +side_bar_current: "docs-aws-resource-cognito-identity-provider" +description: |- + Provides a Cognito User Identity Provider resource. +--- + + + +# Resource: aws_cognito_identity_provider + +Provides a Cognito User Identity Provider resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_identity_provider import CognitoIdentityProvider +from imports.aws.cognito_user_pool import CognitoUserPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CognitoUserPool(self, "example", + auto_verified_attributes=["email"], + name="example-pool" + ) + CognitoIdentityProvider(self, "example_provider", + attribute_mapping={ + "email": "email", + "username": "sub" + }, + provider_details={ + "authorize_scopes": "email", + "client_id": "your client_id", + "client_secret": "your client_secret" + }, + provider_name="Google", + provider_type="Google", + user_pool_id=example.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `user_pool_id` (Required) - The user pool id +* `provider_name` (Required) - The provider name +* `provider_type` (Required) - The provider type. [See AWS API for valid values](https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_CreateIdentityProvider.html#CognitoUserPools-CreateIdentityProvider-request-ProviderType) +* `attribute_mapping` (Optional) - The map of attribute mapping of user pool attributes. [AttributeMapping in AWS API documentation](https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_CreateIdentityProvider.html#CognitoUserPools-CreateIdentityProvider-request-AttributeMapping) +* `idp_identifiers` (Optional) - The list of identity providers. +* `provider_details` (Optional) - The map of identity details, such as access token + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_cognito_identity_provider` resources using their User Pool ID and Provider Name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_cognito_identity_provider` resources using their User Pool ID and Provider Name. For example: + +```console +% terraform import aws_cognito_identity_provider.example us-west-2_abc123:CorpAD +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_managed_user_pool_client.html.markdown b/website/docs/cdktf/python/r/cognito_managed_user_pool_client.html.markdown new file mode 100644 index 00000000000..2b7fd14cac4 --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_managed_user_pool_client.html.markdown @@ -0,0 +1,186 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_managed_user_pool_client" +description: |- + Use the `aws_cognito_user_pool_client` resource to manage a Cognito User Pool Client. This resource is created by another service. +--- + + + +# Resource: aws_cognito_managed_user_pool_client + +Use the `aws_cognito_user_pool_client` resource to manage a Cognito User Pool Client. + +**This resource is advanced** and has special caveats to consider before use. Please read this document completely before using the resource. + +Use the `aws_cognito_managed_user_pool_client` resource to manage a Cognito User Pool Client that is automatically created by an AWS service. For instance, when [configuring an OpenSearch Domain to use Cognito authentication](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/cognito-auth.html), the OpenSearch service creates the User Pool Client during setup and removes it when it is no longer required. As a result, the `aws_cognito_managed_user_pool_client` resource does not create or delete this resource, but instead assumes management of it. + +Use the [`aws_cognito_user_pool_client`](cognito_user_pool_client.html) resource to manage Cognito User Pool Clients for normal use cases. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_identity_pool import CognitoIdentityPool +from imports.aws.cognito_managed_user_pool_client import CognitoManagedUserPoolClient +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_partition import DataAwsPartition +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +from imports.aws.opensearch_domain import OpensearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CognitoIdentityPool(self, "example", + identity_pool_name="example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[cognito_identity_providers] + ) + ) + aws_cognito_user_pool_example = CognitoUserPool(self, "example_1", + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_pool_example.override_logical_id("example") + current = DataAwsPartition(self, "current") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_3", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["es.${" + current.dns_suffix + "}"], + type="Service" + ) + ], + sid="" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_iam_role_example = IamRole(self, "example_4", + assume_role_policy=Token.as_string(data_aws_iam_policy_document_example.json), + name="example-role", + path="/service-role/" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_iam_role_policy_attachment_example = IamRolePolicyAttachment(self, "example_5", + policy_arn="arn:${" + current.partition + "}:iam::aws:policy/AmazonESCognitoAccess", + role=Token.as_string(aws_iam_role_example.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_example.override_logical_id("example") + aws_opensearch_domain_example = OpensearchDomain(self, "example_6", + cognito_options=OpensearchDomainCognitoOptions( + enabled=True, + identity_pool_id=example.id, + role_arn=Token.as_string(aws_iam_role_example.arn), + user_pool_id=Token.as_string(aws_cognito_user_pool_example.id) + ), + depends_on=[aws_cognito_user_pool_domain_example, aws_iam_role_policy_attachment_example + ], + domain_name="example", + ebs_options=OpensearchDomainEbsOptions( + ebs_enabled=True, + volume_size=10 + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_opensearch_domain_example.override_logical_id("example") + aws_cognito_managed_user_pool_client_example = + CognitoManagedUserPoolClient(self, "example_7", + depends_on=[aws_opensearch_domain_example], + name_prefix="AmazonOpenSearchService-example", + user_pool_id=Token.as_string(aws_cognito_user_pool_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_managed_user_pool_client_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `user_pool_id` - (Required) User pool that the client belongs to. +* `name_pattern` - (Required, one of `name_pattern` or `name_prefix`) Regular expression that matches the name of the desired User Pool Client. It must only match one User Pool Client. +* `name_prefix` - (Required, one of `name_prefix` or `name_pattern`) String that matches the beginning of the name of the desired User Pool Client. It must match only one User Pool Client. + +The following arguments are optional: + +* `access_token_validity` - (Optional) Time limit, between 5 minutes and 1 day, after which the access token is no longer valid and cannot be used. By default, the unit is hours. The unit can be overridden by a value in `token_validity_units.access_token`. +* `allowed_oauth_flows_user_pool_client` - (Optional) Whether the client is allowed to use the OAuth protocol when interacting with Cognito user pools. +* `allowed_oauth_flows` - (Optional) List of allowed OAuth flows, including code, implicit, and client_credentials. +* `allowed_oauth_scopes` - (Optional) List of allowed OAuth scopes, including phone, email, openid, profile, and aws.cognito.signin.user.admin. +* `analytics_configuration` - (Optional) Configuration block for Amazon Pinpoint analytics that collects metrics for this user pool. See [details below](#analytics_configuration). +* `auth_session_validity` - (Optional) Duration, in minutes, of the session token created by Amazon Cognito for each API request in an authentication flow. The session token must be responded to by the native user of the user pool before it expires. Valid values for `auth_session_validity` are between `3` and `15`, with a default value of `3`. +* `callback_urls` - (Optional) List of allowed callback URLs for the identity providers. +* `default_redirect_uri` - (Optional) Default redirect URI and must be included in the list of callback URLs. +* `enable_token_revocation` - (Optional) Enables or disables token revocation. +* `enable_propagate_additional_user_context_data` - (Optional) Enables the propagation of additional user context data. +* `explicit_auth_flows` - (Optional) List of authentication flows. The available options include ADMIN_NO_SRP_AUTH, CUSTOM_AUTH_FLOW_ONLY, USER_PASSWORD_AUTH, ALLOW_ADMIN_USER_PASSWORD_AUTH, ALLOW_CUSTOM_AUTH, ALLOW_USER_PASSWORD_AUTH, ALLOW_USER_SRP_AUTH, and ALLOW_REFRESH_TOKEN_AUTH. +* `generate_secret` - (Optional) Boolean flag indicating whether an application secret should be generated. +* `id_token_validity` - (Optional) Time limit, between 5 minutes and 1 day, after which the ID token is no longer valid and cannot be used. By default, the unit is hours. The unit can be overridden by a value in `token_validity_units.id_token`. +* `logout_urls` - (Optional) List of allowed logout URLs for the identity providers. +* `prevent_user_existence_errors` - (Optional) Setting determines the errors and responses returned by Cognito APIs when a user does not exist in the user pool during authentication, account confirmation, and password recovery. +* `read_attributes` - (Optional) List of user pool attributes that the application client can read from. +* `refresh_token_validity` - (Optional) Time limit, between 60 minutes and 10 years, after which the refresh token is no longer valid and cannot be used. By default, the unit is days. The unit can be overridden by a value in `token_validity_units.refresh_token`. +* `supported_identity_providers` - (Optional) List of provider names for the identity providers that are supported on this client. It uses the `provider_name` attribute of the `aws_cognito_identity_provider` resource(s), or the equivalent string(s). +* `token_validity_units` - (Optional) Configuration block for representing the validity times in units. See details below. [Detailed below](#token_validity_units). +* `write_attributes` - (Optional) List of user pool attributes that the application client can write to. + +### analytics_configuration + +Either `application_arn` or `application_id` is required for this configuration block. + +* `application_arn` - (Optional) Application ARN for an Amazon Pinpoint application. It conflicts with `external_id` and `role_arn`. +* `application_id` - (Optional) Unique identifier for an Amazon Pinpoint application. +* `external_id` - (Optional) ID for the Analytics Configuration and conflicts with `application_arn`. +* `role_arn` - (Optional) ARN of an IAM role that authorizes Amazon Cognito to publish events to Amazon Pinpoint analytics. It conflicts with `application_arn`. +* `user_data_shared` - (Optional) If `user_data_shared` is set to `true`, Amazon Cognito will include user data in the events it publishes to Amazon Pinpoint analytics. + +### token_validity_units + +Valid values for the following arguments are: `seconds`, `minutes`, `hours`, or `days`. + +* `access_token` - (Optional) Time unit for the value in `access_token_validity` and defaults to `hours`. +* `id_token` - (Optional) Time unit for the value in `id_token_validity`, and it defaults to `hours`. +* `refresh_token` - (Optional) Time unit for the value in `refresh_token_validity` and defaults to `days`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `client_secret` - Client secret of the user pool client. +* `id` - Unique identifier for the user pool client. +* `name` - Name of the user pool client. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Pool Clients using the `id` of the Cognito User Pool and the `id` of the Cognito User Pool Client. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cognito User Pool Clients using the `id` of the Cognito User Pool and the `id` of the Cognito User Pool Client. For example: + +```console +% terraform import aws_cognito_managed_user_pool_client.client us-west-2_abc123/3ho4ek12345678909nh3fmhpko +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_resource_server.html.markdown b/website/docs/cdktf/python/r/cognito_resource_server.html.markdown new file mode 100644 index 00000000000..32cee9473ef --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_resource_server.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_resource_server" +side_bar_current: "docs-aws-resource-cognito-resource-server" +description: |- + Provides a Cognito Resource Server. +--- + + + +# Resource: aws_cognito_resource_server + +Provides a Cognito Resource Server. + +## Example Usage + +### Create a basic resource server + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_resource_server import CognitoResourceServer +from imports.aws.cognito_user_pool import CognitoUserPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + pool = CognitoUserPool(self, "pool", + name="pool" + ) + CognitoResourceServer(self, "resource", + identifier="https://example.com", + name="example", + user_pool_id=pool.id + ) +``` + +### Create a resource server with sample-scope + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_resource_server import CognitoResourceServer +from imports.aws.cognito_user_pool import CognitoUserPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + pool = CognitoUserPool(self, "pool", + name="pool" + ) + CognitoResourceServer(self, "resource", + identifier="https://example.com", + name="example", + scope=[CognitoResourceServerScope( + scope_description="a Sample Scope Description", + scope_name="sample-scope" + ) + ], + user_pool_id=pool.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `identifier` - (Required) An identifier for the resource server. +* `name` - (Required) A name for the resource server. +* `scope` - (Optional) A list of [Authorization Scope](#authorization-scope). + +### Authorization Scope + +* `scope_name` - (Required) The scope name. +* `scope_description` - (Required) The scope description. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `scope_identifiers` - A list of all scopes configured for this resource server in the format identifier/scope_name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_cognito_resource_server` using their User Pool ID and Identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_cognito_resource_server` using their User Pool ID and Identifier. For example: + +```console +% terraform import aws_cognito_resource_server.example "us-west-2_abc123|https://example.com" +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_risk_configuration.html.markdown b/website/docs/cdktf/python/r/cognito_risk_configuration.html.markdown new file mode 100644 index 00000000000..675b0258908 --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_risk_configuration.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_risk_configuration" +description: |- + Provides a Cognito Risk Configuration resource. +--- + + + +# Resource: aws_cognito_risk_configuration + +Provides a Cognito Risk Configuration resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_risk_configuration import CognitoRiskConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CognitoRiskConfiguration(self, "example", + risk_exception_configuration=CognitoRiskConfigurationRiskExceptionConfiguration( + blocked_ip_range_list=["10.10.10.10/32"] + ), + user_pool_id=Token.as_string(aws_cognito_user_pool_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `user_pool_id` - (Required) The user pool ID. +* `client_id` - (Optional) The app client ID. When the client ID is not provided, the same risk configuration is applied to all the clients in the User Pool. +* `account_takeover_risk_configuration` - (Optional) The account takeover risk configuration. See details below. +* `compromised_credentials_risk_configuration` - (Optional) The compromised credentials risk configuration. See details below. +* `risk_exception_configuration` - (Optional) The configuration to override the risk decision. See details below. + +### account_takeover_risk_configuration + +* `notify_configuration` - (Required) The notify configuration used to construct email notifications. See details below. +* `actions` - (Required) Account takeover risk configuration actions. See details below. + +#### notify_configuration + +* `block_email` - (Optional) Email template used when a detected risk event is blocked. See notify email type below. +* `mfa_email` - (Optional) The multi-factor authentication (MFA) email template used when MFA is challenged as part of a detected risk. See notify email type below. +* `no_action_email` - (Optional) The email template used when a detected risk event is allowed. See notify email type below. +* `from` - (Optional) The email address that is sending the email. The address must be either individually verified with Amazon Simple Email Service, or from a domain that has been verified with Amazon SES. +* `reply_to` - (Optional) The destination to which the receiver of an email should reply to. +* `source_arn` - (Required) The Amazon Resource Name (ARN) of the identity that is associated with the sending authorization policy. This identity permits Amazon Cognito to send for the email address specified in the From parameter. + +##### notify email type + +* `html_body` - (Required) The email HTML body. +* `subject` - (Required) The email subject. +* `text_body` - (Required) The email text body. + +#### actions + +* `high_action` - (Optional) Action to take for a high risk. See action block below. +* `low_action` - (Optional) Action to take for a low risk. See action block below. +* `medium_action` - (Optional) Action to take for a medium risk. See action block below. + +##### action + +* `event_action` - (Required) The action to take in response to the account takeover action. Valid values are `BLOCK`, `MFA_IF_CONFIGURED`, `MFA_REQUIRED` and `NO_ACTION`. +* `notify` - (Required) Whether to send a notification. + +### compromised_credentials_risk_configuration + +* `event_filter` - (Optional) Perform the action for these events. The default is to perform all events if no event filter is specified. Valid values are `SIGN_IN`, `PASSWORD_CHANGE`, and `SIGN_UP`. +* `actions` - (Required) The compromised credentials risk configuration actions. See details below. + +#### actions + +* `event_action` - (Optional) The event action. Valid values are `BLOCK` or `NO_ACTION`. + +### risk_exception_configuration + +* `blocked_ip_range_list` - (Optional) Overrides the risk decision to always block the pre-authentication requests. + The IP range is in CIDR notation, a compact representation of an IP address and its routing prefix. + Can contain a maximum of 200 items. +* `skipped_ip_range_list` - (Optional) Risk detection isn't performed on the IP addresses in this range list. + The IP range is in CIDR notation. + Can contain a maximum of 200 items. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The user pool ID or the user pool ID and Client Id separated by a `:` if the configuration is client specific. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito Risk Configurations using the user pool ID or the user pool ID and Client Id separated by a `:`. For example: + +Import using the user pool ID: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import using the user pool ID and Client ID separated by a `:`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** Cognito Risk Configurations using the user pool ID or the user pool ID and Client Id separated by a `:`. For example: + +Import using the user pool ID: + +```console +% terraform import aws_cognito_risk_configuration.main example +``` + +Import using the user pool ID and Client ID separated by a `:`: + +```console +% terraform import aws_cognito_risk_configuration.main example:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_user.html.markdown b/website/docs/cdktf/python/r/cognito_user.html.markdown new file mode 100644 index 00000000000..abd90798ad2 --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_user.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user" +description: |- + Provides a Cognito User resource. +--- + + + +# Resource: aws_cognito_user + +Provides a Cognito User Resource. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user import CognitoUser +from imports.aws.cognito_user_pool import CognitoUserPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CognitoUserPool(self, "example", + name="MyExamplePool" + ) + aws_cognito_user_example = CognitoUser(self, "example_1", + user_pool_id=example.id, + username="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_example.override_logical_id("example") +``` + +### Setting user attributes + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user import CognitoUser +from imports.aws.cognito_user_pool import CognitoUserPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CognitoUserPool(self, "example", + name="mypool", + schema=[CognitoUserPoolSchema( + attribute_data_type="Boolean", + developer_only_attribute=False, + mutable=False, + name="terraform", + required=False + ), CognitoUserPoolSchema( + attribute_data_type="String", + developer_only_attribute=False, + mutable=False, + name="foo", + required=False, + string_attribute_constraints=CognitoUserPoolSchemaStringAttributeConstraints() + ) + ] + ) + aws_cognito_user_example = CognitoUser(self, "example_1", + attributes={ + "email": "no-reply@hashicorp.com", + "email_verified": Token.as_string(True), + "foo": "bar", + "terraform": Token.as_string(True) + }, + user_pool_id=example.id, + username="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `user_pool_id` - (Required) The user pool ID for the user pool where the user will be created. +* `username` - (Required) The username for the user. Must be unique within the user pool. Must be a UTF-8 string between 1 and 128 characters. After the user is created, the username cannot be changed. + +The following arguments are optional: + +* `attributes` - (Optional) A map that contains user attributes and attribute values to be set for the user. +* `client_metadata` - (Optional) A map of custom key-value pairs that you can provide as input for any custom workflows that user creation triggers. Amazon Cognito does not store the `client_metadata` value. This data is available only to Lambda triggers that are assigned to a user pool to support custom workflows. If your user pool configuration does not include triggers, the ClientMetadata parameter serves no purpose. For more information, see [Customizing User Pool Workflows with Lambda Triggers](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-identity-pools-working-with-aws-lambda-triggers.html). +* `desired_delivery_mediums` - (Optional) A list of mediums to the welcome message will be sent through. Allowed values are `EMAIL` and `SMS`. If it's provided, make sure you have also specified `email` attribute for the `EMAIL` medium and `phone_number` for the `SMS`. More than one value can be specified. Amazon Cognito does not store the `desired_delivery_mediums` value. Defaults to `["SMS"]`. +* `enabled` - (Optional) Specifies whether the user should be enabled after creation. The welcome message will be sent regardless of the `enabled` value. The behavior can be changed with `message_action` argument. Defaults to `true`. +* `force_alias_creation` - (Optional) If this parameter is set to True and the `phone_number` or `email` address specified in the `attributes` parameter already exists as an alias with a different user, Amazon Cognito will migrate the alias from the previous user to the newly created user. The previous user will no longer be able to log in using that alias. Amazon Cognito does not store the `force_alias_creation` value. Defaults to `false`. +* `message_action` - (Optional) Set to `RESEND` to resend the invitation message to a user that already exists and reset the expiration limit on the user's account. Set to `SUPPRESS` to suppress sending the message. Only one value can be specified. Amazon Cognito does not store the `message_action` value. +* `password` - (Optional) The user's permanent password. This password must conform to the password policy specified by user pool the user belongs to. The welcome message always contains only `temporary_password` value. You can suppress sending the welcome message with the `message_action` argument. Amazon Cognito does not store the `password` value. Conflicts with `temporary_password`. +* `temporary_password` - (Optional) The user's temporary password. Conflicts with `password`. +* `validation_data` - (Optional) The user's validation data. This is an array of name-value pairs that contain user attributes and attribute values that you can use for custom validation, such as restricting the types of user accounts that can be registered. Amazon Cognito does not store the `validation_data` value. For more information, see [Customizing User Pool Workflows with Lambda Triggers](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-identity-pools-working-with-aws-lambda-triggers.html). + +~> **NOTE:** Clearing `password` or `temporary_password` does not reset user's password in Cognito. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `status` - current user status. +* `sub` - unique user id that is never reassignable to another user. +* `mfa_preference` - user's settings regarding MFA settings and preferences. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User using the `user_pool_id`/`name` attributes concatenated. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cognito User using the `user_pool_id`/`name` attributes concatenated. For example: + +```console +% terraform import aws_cognito_user.user us-east-1_vG78M4goG/user +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_user_group.html.markdown b/website/docs/cdktf/python/r/cognito_user_group.html.markdown new file mode 100644 index 00000000000..60ab532534a --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_user_group.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_group" +description: |- + Provides a Cognito User Group resource. +--- + + + +# Resource: aws_cognito_user_group + +Provides a Cognito User Group resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_group import CognitoUserGroup +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = CognitoUserPool(self, "main", + name="identity pool" + ) + group_role = DataAwsIamPolicyDocument(self, "group_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRoleWithWebIdentity"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=["us-east-1:12345678-dead-beef-cafe-123456790ab"], + variable="cognito-identity.amazonaws.com:aud" + ), DataAwsIamPolicyDocumentStatementCondition( + test="ForAnyValue:StringLike", + values=["authenticated"], + variable="cognito-identity.amazonaws.com:amr" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["cognito-identity.amazonaws.com"], + type="Federated" + ) + ] + ) + ] + ) + aws_iam_role_group_role = IamRole(self, "group_role_2", + assume_role_policy=Token.as_string(group_role.json), + name="user-group-role" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_group_role.override_logical_id("group_role") + aws_cognito_user_group_main = CognitoUserGroup(self, "main_3", + description="Managed by Terraform", + name="user-group", + precedence=42, + role_arn=Token.as_string(aws_iam_role_group_role.arn), + user_pool_id=main.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_group_main.override_logical_id("main") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the user group. +* `user_pool_id` - (Required) The user pool ID. +* `description` - (Optional) The description of the user group. +* `precedence` - (Optional) The precedence of the user group. +* `role_arn` - (Optional) The ARN of the IAM role to be associated with the user group. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Groups using the `user_pool_id`/`name` attributes concatenated. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cognito User Groups using the `user_pool_id`/`name` attributes concatenated. For example: + +```console +% terraform import aws_cognito_user_group.group us-east-1_vG78M4goG/user-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_user_in_group.html.markdown b/website/docs/cdktf/python/r/cognito_user_in_group.html.markdown new file mode 100644 index 00000000000..fc42851af5c --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_user_in_group.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_in_group" +description: |- + Adds the specified user to the specified group. +--- + + + +# Resource: aws_cognito_user_in_group + +Adds the specified user to the specified group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user import CognitoUser +from imports.aws.cognito_user_group import CognitoUserGroup +from imports.aws.cognito_user_in_group import CognitoUserInGroup +from imports.aws.cognito_user_pool import CognitoUserPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CognitoUserPool(self, "example", + name="example", + password_policy=CognitoUserPoolPasswordPolicy( + minimum_length=6, + require_numbers=False, + require_symbols=False, + require_uppercase=False, + temporary_password_validity_days=7 + ) + ) + aws_cognito_user_example = CognitoUser(self, "example_1", + user_pool_id=example.id, + username="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_example.override_logical_id("example") + aws_cognito_user_group_example = CognitoUserGroup(self, "example_2", + name="example", + user_pool_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_group_example.override_logical_id("example") + aws_cognito_user_in_group_example = CognitoUserInGroup(self, "example_3", + group_name=Token.as_string(aws_cognito_user_group_example.name), + user_pool_id=example.id, + username=Token.as_string(aws_cognito_user_example.username) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_in_group_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `user_pool_id` - (Required) The user pool ID of the user and group. +* `group_name` - (Required) The name of the group to which the user is to be added. +* `username` - (Required) The username of the user to be added to the group. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_user_pool.markdown b/website/docs/cdktf/python/r/cognito_user_pool.markdown new file mode 100644 index 00000000000..5373dd12d95 --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_user_pool.markdown @@ -0,0 +1,312 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool" +description: |- + Provides a Cognito User Pool resource. +--- + + + +# Resource: aws_cognito_user_pool + +Provides a Cognito User Pool resource. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CognitoUserPool(self, "pool", + name="mypool" + ) +``` + +### Enabling SMS and Software Token Multi-Factor Authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name): + super().__init__(scope, name) + CognitoUserPool(self, "example", + mfa_configuration="ON", + sms_authentication_message="Your code is {####}", + sms_configuration=CognitoUserPoolSmsConfiguration( + external_id="example", + sns_caller_arn=Token.as_string(aws_iam_role_example.arn), + sns_region="us-east-1" + ), + software_token_mfa_configuration=CognitoUserPoolSoftwareTokenMfaConfiguration( + enabled=True + ), + name=name + ) +``` + +### Using Account Recovery Setting + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CognitoUserPool(self, "test", + account_recovery_setting=CognitoUserPoolAccountRecoverySetting( + recovery_mechanism=[CognitoUserPoolAccountRecoverySettingRecoveryMechanism( + name="verified_email", + priority=1 + ), CognitoUserPoolAccountRecoverySettingRecoveryMechanism( + name="verified_phone_number", + priority=2 + ) + ] + ), + name="mypool" + ) +``` + +## Argument Reference + +The following argument is required: + +* `name` - (Required) Name of the user pool. + +The following arguments are optional: + +* `account_recovery_setting` - (Optional) Configuration block to define which verified available method a user can use to recover their forgotten password. [Detailed below](#account_recovery_setting). +* `admin_create_user_config` - (Optional) Configuration block for creating a new user profile. [Detailed below](#admin_create_user_config). +* `alias_attributes` - (Optional) Attributes supported as an alias for this user pool. Valid values: `phone_number`, `email`, or `preferred_username`. Conflicts with `username_attributes`. +* `auto_verified_attributes` - (Optional) Attributes to be auto-verified. Valid values: `email`, `phone_number`. +* `deletion_protection` - (Optional) When active, DeletionProtection prevents accidental deletion of your user pool. Before you can delete a user pool that you have protected against deletion, you must deactivate this feature. Valid values are `ACTIVE` and `INACTIVE`, Default value is `INACTIVE`. +* `device_configuration` - (Optional) Configuration block for the user pool's device tracking. [Detailed below](#device_configuration). +* `email_configuration` - (Optional) Configuration block for configuring email. [Detailed below](#email_configuration). +* `email_verification_message` - (Optional) String representing the email verification message. Conflicts with `verification_message_template` configuration block `email_message` argument. +* `email_verification_subject` - (Optional) String representing the email verification subject. Conflicts with `verification_message_template` configuration block `email_subject` argument. +* `lambda_config` - (Optional) Configuration block for the AWS Lambda triggers associated with the user pool. [Detailed below](#lambda_config). +* `mfa_configuration` - (Optional) Multi-Factor Authentication (MFA) configuration for the User Pool. Defaults of `OFF`. Valid values are `OFF` (MFA Tokens are not required), `ON` (MFA is required for all users to sign in; requires at least one of `sms_configuration` or `software_token_mfa_configuration` to be configured), or `OPTIONAL` (MFA Will be required only for individual users who have MFA Enabled; requires at least one of `sms_configuration` or `software_token_mfa_configuration` to be configured). +* `password_policy` - (Optional) Configuration block for information about the user pool password policy. [Detailed below](#password_policy). +* `schema` - (Optional) Configuration block for the schema attributes of a user pool. [Detailed below](#schema). Schema attributes from the [standard attribute set](https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-settings-attributes.html#cognito-user-pools-standard-attributes) only need to be specified if they are different from the default configuration. Attributes can be added, but not modified or removed. Maximum of 50 attributes. +* `sms_authentication_message` - (Optional) String representing the SMS authentication message. The Message must contain the `{####}` placeholder, which will be replaced with the code. +* `sms_configuration` - (Optional) Configuration block for Short Message Service (SMS) settings. [Detailed below](#sms_configuration). These settings apply to SMS user verification and SMS Multi-Factor Authentication (MFA). Due to Cognito API restrictions, the SMS configuration cannot be removed without recreating the Cognito User Pool. For user data safety, this resource will ignore the removal of this configuration by disabling drift detection. To force resource recreation after this configuration has been applied, see the [`taint` command](https://www.terraform.io/docs/commands/taint.html). +* `sms_verification_message` - (Optional) String representing the SMS verification message. Conflicts with `verification_message_template` configuration block `sms_message` argument. +* `software_token_mfa_configuration` - (Optional) Configuration block for software token Mult-Factor Authentication (MFA) settings. [Detailed below](#software_token_mfa_configuration). +* `tags` - (Optional) Map of tags to assign to the User Pool. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `user_attribute_update_settings` - (Optional) Configuration block for user attribute update settings. [Detailed below](#user_attribute_update_settings). +* `user_pool_add_ons` - (Optional) Configuration block for user pool add-ons to enable user pool advanced security mode features. [Detailed below](#user_pool_add_ons). +* `username_attributes` - (Optional) Whether email addresses or phone numbers can be specified as usernames when a user signs up. Conflicts with `alias_attributes`. +* `username_configuration` - (Optional) Configuration block for username configuration. [Detailed below](#username_configuration). +* `verification_message_template` - (Optional) Configuration block for verification message templates. [Detailed below](#verification_message_template). + +### account_recovery_setting + +* `recovery_mechanism` - (Optional) List of Account Recovery Options of the following structure: + * `name` - (Required) Recovery method for a user. Can be of the following: `verified_email`, `verified_phone_number`, and `admin_only`. + * `priority` - (Required) Positive integer specifying priority of a method with 1 being the highest priority. + +### admin_create_user_config + +* `allow_admin_create_user_only` - (Optional) Set to True if only the administrator is allowed to create user profiles. Set to False if users can sign themselves up via an app. +* `invite_message_template` - (Optional) Invite message template structure. [Detailed below](#invite_message_template). + +#### invite_message_template + +* `email_message` - (Optional) Message template for email messages. Must contain `{username}` and `{####}` placeholders, for username and temporary password, respectively. +* `email_subject` - (Optional) Subject line for email messages. +* `sms_message` - (Optional) Message template for SMS messages. Must contain `{username}` and `{####}` placeholders, for username and temporary password, respectively. + +### device_configuration + +* `challenge_required_on_new_device` - (Optional) Whether a challenge is required on a new device. Only applicable to a new device. +* `device_only_remembered_on_user_prompt` - (Optional) Whether a device is only remembered on user prompt. `false` equates to "Always" remember, `true` is "User Opt In," and not using a `device_configuration` block is "No." + +### email_configuration + +* `configuration_set` - (Optional) Email configuration set name from SES. +* `email_sending_account` - (Optional) Email delivery method to use. `COGNITO_DEFAULT` for the default email functionality built into Cognito or `DEVELOPER` to use your Amazon SES configuration. Required to be `DEVELOPER` if `from_email_address` is set. +* `from_email_address` - (Optional) Sender’s email address or sender’s display name with their email address (e.g., `john@example.com`, `John Smith ` or `\"John Smith Ph.D.\" `). Escaped double quotes are required around display names that contain certain characters as specified in [RFC 5322](https://tools.ietf.org/html/rfc5322). +* `reply_to_email_address` - (Optional) REPLY-TO email address. +* `source_arn` - (Optional) ARN of the SES verified email identity to use. Required if `email_sending_account` is set to `DEVELOPER`. + +### lambda_config + +* `create_auth_challenge` - (Optional) ARN of the lambda creating an authentication challenge. +* `custom_message` - (Optional) Custom Message AWS Lambda trigger. +* `define_auth_challenge` - (Optional) Defines the authentication challenge. +* `post_authentication` - (Optional) Post-authentication AWS Lambda trigger. +* `post_confirmation` - (Optional) Post-confirmation AWS Lambda trigger. +* `pre_authentication` - (Optional) Pre-authentication AWS Lambda trigger. +* `pre_sign_up` - (Optional) Pre-registration AWS Lambda trigger. +* `pre_token_generation` - (Optional) Allow to customize identity token claims before token generation. +* `user_migration` - (Optional) User migration Lambda config type. +* `verify_auth_challenge_response` - (Optional) Verifies the authentication challenge response. +* `kms_key_id` - (Optional) The Amazon Resource Name of Key Management Service Customer master keys. Amazon Cognito uses the key to encrypt codes and temporary passwords sent to CustomEmailSender and CustomSMSSender. +* `custom_email_sender` - (Optional) A custom email sender AWS Lambda trigger. See [custom_email_sender](#custom_email_sender) Below. +* `custom_sms_sender` - (Optional) A custom SMS sender AWS Lambda trigger. See [custom_sms_sender](#custom_sms_sender) Below. + +#### custom_email_sender + +* `lambda_arn` - (Required) The Lambda Amazon Resource Name of the Lambda function that Amazon Cognito triggers to send email notifications to users. +* `lambda_version` - (Required) The Lambda version represents the signature of the "request" attribute in the "event" information Amazon Cognito passes to your custom email Lambda function. The only supported value is `V1_0`. + +#### custom_sms_sender + +* `lambda_arn` - (Required) The Lambda Amazon Resource Name of the Lambda function that Amazon Cognito triggers to send SMS notifications to users. +* `lambda_version` - (Required) The Lambda version represents the signature of the "request" attribute in the "event" information Amazon Cognito passes to your custom SMS Lambda function. The only supported value is `V1_0`. + +### password_policy + +* `minimum_length` - (Optional) Minimum length of the password policy that you have set. +* `require_lowercase` - (Optional) Whether you have required users to use at least one lowercase letter in their password. +* `require_numbers` - (Optional) Whether you have required users to use at least one number in their password. +* `require_symbols` - (Optional) Whether you have required users to use at least one symbol in their password. +* `require_uppercase` - (Optional) Whether you have required users to use at least one uppercase letter in their password. +* `temporary_password_validity_days` - (Optional) In the password policy you have set, refers to the number of days a temporary password is valid. If the user does not sign-in during this time, their password will need to be reset by an administrator. + +### schema + +~> **NOTE:** When defining an `attribute_data_type` of `String` or `Number`, the respective attribute constraints configuration block (e.g `string_attribute_constraints` or `number_attribute_constraints`) is **required** to prevent recreation of the Terraform resource. This requirement is true for both standard (e.g., name, email) and custom schema attributes. + +* `attribute_data_type` - (Required) Attribute data type. Must be one of `Boolean`, `Number`, `String`, `DateTime`. +* `developer_only_attribute` - (Optional) Whether the attribute type is developer only. +* `mutable` - (Optional) Whether the attribute can be changed once it has been created. +* `name` - (Required) Name of the attribute. +* `number_attribute_constraints` - (Required when `attribute_data_type` is `Number`) Configuration block for the constraints for an attribute of the number type. [Detailed below](#number_attribute_constraints). +* `required` - (Optional) Whether a user pool attribute is required. If the attribute is required and the user does not provide a value, registration or sign-in will fail. +* `string_attribute_constraints` - (Required when `attribute_data_type` is `String`) Constraints for an attribute of the string type. [Detailed below](#string_attribute_constraints). + +#### schema: Defaults for Standard Attributes + +The [standard attributes](https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-settings-attributes.html#cognito-user-pools-standard-attributes) have the following defaults. Note that attributes which match the default values are not stored in Terraform state when importing. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name): + super().__init__(scope, name) + CognitoUserPool(self, "example", + schema=[CognitoUserPoolSchema( + attribute_data_type="", + developer_only_attribute=False, + mutable=True, + name="", + required=False, + string_attribute_constraints=CognitoUserPoolSchemaStringAttributeConstraints( + max_length=Token.as_string(2048), + min_length=Token.as_string(0) + ) + ) + ], + name=name + ) +``` + +#### number_attribute_constraints + +* `max_value` - (Optional) Maximum value of an attribute that is of the number data type. +* `min_value` - (Optional) Minimum value of an attribute that is of the number data type. + +#### string_attribute_constraints + +* `max_length` - (Optional) Maximum length of an attribute value of the string type. +* `min_length` - (Optional) Minimum length of an attribute value of the string type. + +### sms_configuration + +* `external_id` - (Required) External ID used in IAM role trust relationships. For more information about using external IDs, see [How to Use an External ID When Granting Access to Your AWS Resources to a Third Party](http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html). +* `sns_caller_arn` - (Required) ARN of the Amazon SNS caller. This is usually the IAM role that you've given Cognito permission to assume. +* `sns_region` - (Optional) The AWS Region to use with Amazon SNS integration. You can choose the same Region as your user pool, or a supported Legacy Amazon SNS alternate Region. Amazon Cognito resources in the Asia Pacific (Seoul) AWS Region must use your Amazon SNS configuration in the Asia Pacific (Tokyo) Region. For more information, see [SMS message settings for Amazon Cognito user pools](https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-sms-settings.html). + +### software_token_mfa_configuration + +The following arguments are required in the `software_token_mfa_configuration` configuration block: + +* `enabled` - (Required) Boolean whether to enable software token Multi-Factor (MFA) tokens, such as Time-based One-Time Password (TOTP). To disable software token MFA When `sms_configuration` is not present, the `mfa_configuration` argument must be set to `OFF` and the `software_token_mfa_configuration` configuration block must be fully removed. + +### user_attribute_update_settings + +* `attributes_require_verification_before_update` - (Required) A list of attributes requiring verification before update. If set, the provided value(s) must also be set in `auto_verified_attributes`. Valid values: `email`, `phone_number`. + +### user_pool_add_ons + +* `advanced_security_mode` - (Required) Mode for advanced security, must be one of `OFF`, `AUDIT` or `ENFORCED`. + +### username_configuration + +* `case_sensitive` - (Required) Whether username case sensitivity will be applied for all users in the user pool through Cognito APIs. + +### verification_message_template + +* `default_email_option` - (Optional) Default email option. Must be either `CONFIRM_WITH_CODE` or `CONFIRM_WITH_LINK`. Defaults to `CONFIRM_WITH_CODE`. +* `email_message` - (Optional) Email message template. Must contain the `{####}` placeholder. Conflicts with `email_verification_message` argument. +* `email_message_by_link` - (Optional) Email message template for sending a confirmation link to the user, it must contain the `{##Click Here##}` placeholder. +* `email_subject` - (Optional) Subject line for the email message template. Conflicts with `email_verification_subject` argument. +* `email_subject_by_link` - (Optional) Subject line for the email message template for sending a confirmation link to the user. +* `sms_message` - (Optional) SMS message template. Must contain the `{####}` placeholder. Conflicts with `sms_verification_message` argument. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the user pool. +* `creation_date` - Date the user pool was created. +* `custom_domain` - A custom domain name that you provide to Amazon Cognito. This parameter applies only if you use a custom domain to host the sign-up and sign-in pages for your application. For example: `auth.example.com`. +* `domain` - Holds the domain prefix if the user pool has a domain associated with it. +* `endpoint` - Endpoint name of the user pool. Example format: `cognito-idp.REGION.amazonaws.com/xxxx_yyyyy` +* `estimated_number_of_users` - A number estimating the size of the user pool. +* `id` - ID of the user pool. +* `last_modified_date` - Date the user pool was last modified. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Pools using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cognito User Pools using the `id`. For example: + +```console +% terraform import aws_cognito_user_pool.pool us-west-2_abc123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_user_pool_client.html.markdown b/website/docs/cdktf/python/r/cognito_user_pool_client.html.markdown new file mode 100644 index 00000000000..9e731d4b751 --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_user_pool_client.html.markdown @@ -0,0 +1,260 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_client" +description: |- + Provides a Cognito User Pool Client resource. +--- + + + +# Resource: aws_cognito_user_pool_client + +Provides a Cognito User Pool Client resource. + +To manage a User Pool Client created by another service, such as when [configuring an OpenSearch Domain to use Cognito authentication](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/cognito-auth.html), +use the [`aws_cognito_managed_user_pool_client` resource](cognito_managed_user_pool_client.html) instead. + +## Example Usage + +### Create a basic user pool client + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_client import CognitoUserPoolClient +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + pool = CognitoUserPool(self, "pool", + name="pool" + ) + CognitoUserPoolClient(self, "client", + name="client", + user_pool_id=pool.id + ) +``` + +### Create a user pool client with no SRP authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_client import CognitoUserPoolClient +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + pool = CognitoUserPool(self, "pool", + name="pool" + ) + CognitoUserPoolClient(self, "client", + explicit_auth_flows=["ADMIN_NO_SRP_AUTH"], + generate_secret=True, + name="client", + user_pool_id=pool.id + ) +``` + +### Create a user pool client with pinpoint analytics + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_client import CognitoUserPoolClient +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.pinpoint_app import PinpointApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = CognitoUserPool(self, "test", + name="pool" + ) + aws_pinpoint_app_test = PinpointApp(self, "test_1", + name="pinpoint" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_pinpoint_app_test.override_logical_id("test") + current = DataAwsCallerIdentity(self, "current") + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["cognito-idp.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + data_aws_iam_policy_document_test = DataAwsIamPolicyDocument(self, "test_4", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["mobiletargeting:UpdateEndpoint", "mobiletargeting:PutEvents" + ], + effect="Allow", + resources=["arn:aws:mobiletargeting:*:${" + current.account_id + "}:apps/${" + aws_pinpoint_app_test.application_id + "}*" + ] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_test.override_logical_id("test") + aws_iam_role_test = IamRole(self, "test_5", + assume_role_policy=Token.as_string(assume_role.json), + name="role" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_test.override_logical_id("test") + aws_iam_role_policy_test = IamRolePolicy(self, "test_6", + name="role_policy", + policy=Token.as_string(data_aws_iam_policy_document_test.json), + role=Token.as_string(aws_iam_role_test.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_test.override_logical_id("test") + aws_cognito_user_pool_client_test = CognitoUserPoolClient(self, "test_7", + analytics_configuration=[CognitoUserPoolClientAnalyticsConfiguration( + application_id=Token.as_string(aws_pinpoint_app_test.application_id), + external_id="some_id", + role_arn=Token.as_string(aws_iam_role_test.arn), + user_data_shared=True + ) + ], + name="pool_client", + user_pool_id=test.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_pool_client_test.override_logical_id("test") +``` + +### Create a user pool client with Cognito as the identity provider + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_client import CognitoUserPoolClient +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + pool = CognitoUserPool(self, "pool", + name="pool" + ) + CognitoUserPoolClient(self, "userpool_client", + allowed_oauth_flows=["code", "implicit"], + allowed_oauth_flows_user_pool_client=True, + allowed_oauth_scopes=["email", "openid"], + callback_urls=["https://example.com"], + name="client", + supported_identity_providers=["COGNITO"], + user_pool_id=pool.id + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the application client. +* `user_pool_id` - (Required) User pool the client belongs to. + +The following arguments are optional: + +* `access_token_validity` - (Optional) Time limit, between 5 minutes and 1 day, after which the access token is no longer valid and cannot be used. + By default, the unit is hours. + The unit can be overridden by a value in `token_validity_units.access_token`. +* `allowed_oauth_flows_user_pool_client` - (Optional) Whether the client is allowed to follow the OAuth protocol when interacting with Cognito user pools. +* `allowed_oauth_flows` - (Optional) List of allowed OAuth flows (code, implicit, client_credentials). +* `allowed_oauth_scopes` - (Optional) List of allowed OAuth scopes (phone, email, openid, profile, and aws.cognito.signin.user.admin). +* `analytics_configuration` - (Optional) Configuration block for Amazon Pinpoint analytics for collecting metrics for this user pool. [Detailed below](#analytics_configuration). +* `auth_session_validity` - (Optional) Amazon Cognito creates a session token for each API request in an authentication flow. AuthSessionValidity is the duration, in minutes, of that session token. Your user pool native user must respond to each authentication challenge before the session expires. Valid values between `3` and `15`. Default value is `3`. +* `callback_urls` - (Optional) List of allowed callback URLs for the identity providers. +* `default_redirect_uri` - (Optional) Default redirect URI. Must be in the list of callback URLs. +* `enable_token_revocation` - (Optional) Enables or disables token revocation. +* `enable_propagate_additional_user_context_data` - (Optional) Activates the propagation of additional user context data. +* `explicit_auth_flows` - (Optional) List of authentication flows (ADMIN_NO_SRP_AUTH, CUSTOM_AUTH_FLOW_ONLY, USER_PASSWORD_AUTH, ALLOW_ADMIN_USER_PASSWORD_AUTH, ALLOW_CUSTOM_AUTH, ALLOW_USER_PASSWORD_AUTH, ALLOW_USER_SRP_AUTH, ALLOW_REFRESH_TOKEN_AUTH). +* `generate_secret` - (Optional) Should an application secret be generated. +* `id_token_validity` - (Optional) Time limit, between 5 minutes and 1 day, after which the ID token is no longer valid and cannot be used. + By default, the unit is hours. + The unit can be overridden by a value in `token_validity_units.id_token`. +* `logout_urls` - (Optional) List of allowed logout URLs for the identity providers. +* `prevent_user_existence_errors` - (Optional) Choose which errors and responses are returned by Cognito APIs during authentication, account confirmation, and password recovery when the user does not exist in the user pool. When set to `ENABLED` and the user does not exist, authentication returns an error indicating either the username or password was incorrect, and account confirmation and password recovery return a response indicating a code was sent to a simulated destination. When set to `LEGACY`, those APIs will return a `UserNotFoundException` exception if the user does not exist in the user pool. +* `read_attributes` - (Optional) List of user pool attributes the application client can read from. +* `refresh_token_validity` - (Optional) Time limit, between 60 minutes and 10 years, after which the refresh token is no longer valid and cannot be used. + By default, the unit is days. + The unit can be overridden by a value in `token_validity_units.refresh_token`. +* `supported_identity_providers` - (Optional) List of provider names for the identity providers that are supported on this client. Uses the `provider_name` attribute of `aws_cognito_identity_provider` resource(s), or the equivalent string(s). +* `token_validity_units` - (Optional) Configuration block for units in which the validity times are represented in. [Detailed below](#token_validity_units). +* `write_attributes` - (Optional) List of user pool attributes the application client can write to. + +### analytics_configuration + +Either `application_arn` or `application_id` is required. + +* `application_arn` - (Optional) Application ARN for an Amazon Pinpoint application. Conflicts with `external_id` and `role_arn`. +* `application_id` - (Optional) Application ID for an Amazon Pinpoint application. +* `external_id` - (Optional) ID for the Analytics Configuration. Conflicts with `application_arn`. +* `role_arn` - (Optional) ARN of an IAM role that authorizes Amazon Cognito to publish events to Amazon Pinpoint analytics. Conflicts with `application_arn`. +* `user_data_shared` (Optional) If set to `true`, Amazon Cognito will include user data in the events it publishes to Amazon Pinpoint analytics. + +### token_validity_units + +Valid values for the following arguments are: `seconds`, `minutes`, `hours` or `days`. + +* `access_token` - (Optional) Time unit in for the value in `access_token_validity`, defaults to `hours`. +* `id_token` - (Optional) Time unit in for the value in `id_token_validity`, defaults to `hours`. +* `refresh_token` - (Optional) Time unit in for the value in `refresh_token_validity`, defaults to `days`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `client_secret` - Client secret of the user pool client. +* `id` - ID of the user pool client. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Pool Clients using the `id` of the Cognito User Pool, and the `id` of the Cognito User Pool Client. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cognito User Pool Clients using the `id` of the Cognito User Pool, and the `id` of the Cognito User Pool Client. For example: + +```console +% terraform import aws_cognito_user_pool_client.client us-west-2_abc123/3ho4ek12345678909nh3fmhpko +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_user_pool_domain.html.markdown b/website/docs/cdktf/python/r/cognito_user_pool_domain.html.markdown new file mode 100644 index 00000000000..002365bc295 --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_user_pool_domain.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_domain" +description: |- + Provides a Cognito User Pool Domain resource. +--- + + + +# Resource: aws_cognito_user_pool_domain + +Provides a Cognito User Pool Domain resource. + +## Example Usage + +### Amazon Cognito domain + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_domain import CognitoUserPoolDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CognitoUserPool(self, "example", + name="example-pool" + ) + CognitoUserPoolDomain(self, "main", + domain="example-domain", + user_pool_id=example.id + ) +``` + +### Custom Cognito domain + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_domain import CognitoUserPoolDomain +from imports.aws.data_aws_route53_zone import DataAwsRoute53Zone +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CognitoUserPool(self, "example", + name="example-pool" + ) + main = CognitoUserPoolDomain(self, "main", + certificate_arn=cert.arn, + domain="example-domain", + user_pool_id=example.id + ) + data_aws_route53_zone_example = DataAwsRoute53Zone(self, "example_2", + name="example.com" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_route53_zone_example.override_logical_id("example") + Route53Record(self, "auth-cognito-A", + alias=Route53RecordAlias( + evaluate_target_health=False, + name=main.cloudfront_distribution, + zone_id=main.cloudfront_distribution_zone_id + ), + name=main.domain, + type="A", + zone_id=Token.as_string(data_aws_route53_zone_example.zone_id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) For custom domains, this is the fully-qualified domain name, such as auth.example.com. For Amazon Cognito prefix domains, this is the prefix alone, such as auth. +* `user_pool_id` - (Required) The user pool ID. +* `certificate_arn` - (Optional) The ARN of an ISSUED ACM certificate in us-east-1 for a custom domain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `aws_account_id` - The AWS account ID for the user pool owner. +* `cloudfront_distribution` - The Amazon CloudFront endpoint (e.g. `dpp0gtxikpq3y.cloudfront.net`) that you use as the target of the alias that you set up with your Domain Name Service (DNS) provider. +* `cloudfront_distribution_arn` - The URL of the CloudFront distribution. This is required to generate the ALIAS `aws_route53_record` +* `cloudfront_distribution_zone_id` - The Route 53 hosted zone ID of the CloudFront distribution. +* `s3_bucket` - The S3 bucket where the static files for this domain are stored. +* `version` - The app version. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Pool Domains using the `domain`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cognito User Pool Domains using the `domain`. For example: + +```console +% terraform import aws_cognito_user_pool_domain.main auth.example.org +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cognito_user_pool_ui_customization.html.markdown b/website/docs/cdktf/python/r/cognito_user_pool_ui_customization.html.markdown new file mode 100644 index 00000000000..7d727cd35a3 --- /dev/null +++ b/website/docs/cdktf/python/r/cognito_user_pool_ui_customization.html.markdown @@ -0,0 +1,134 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_ui_customization" +description: |- + Provides a Cognito User Pool UI Customization resource. +--- + + + +# Resource: aws_cognito_user_pool_ui_customization + +Provides a Cognito User Pool UI Customization resource. + +~> **Note:** To use this resource, the user pool must have a domain associated with it. For more information, see the Amazon Cognito Developer Guide on [Customizing the Built-in Sign-In and Sign-up Webpages](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-app-ui-customization.html). + +## Example Usage + +### UI customization settings for a single client + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_client import CognitoUserPoolClient +from imports.aws.cognito_user_pool_domain import CognitoUserPoolDomain +from imports.aws.cognito_user_pool_ui_customization import CognitoUserPoolUiCustomization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CognitoUserPool(self, "example", + name="example" + ) + aws_cognito_user_pool_client_example = CognitoUserPoolClient(self, "example_1", + name="example", + user_pool_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_pool_client_example.override_logical_id("example") + aws_cognito_user_pool_domain_example = CognitoUserPoolDomain(self, "example_2", + domain="example", + user_pool_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_pool_domain_example.override_logical_id("example") + aws_cognito_user_pool_ui_customization_example = + CognitoUserPoolUiCustomization(self, "example_3", + client_id=Token.as_string(aws_cognito_user_pool_client_example.id), + css=".label-customizable {font-weight: 400;}", + image_file=Token.as_string(Fn.filebase64("logo.png")), + user_pool_id=Token.as_string(aws_cognito_user_pool_domain_example.user_pool_id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_pool_ui_customization_example.override_logical_id("example") +``` + +### UI customization settings for all clients + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_domain import CognitoUserPoolDomain +from imports.aws.cognito_user_pool_ui_customization import CognitoUserPoolUiCustomization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CognitoUserPool(self, "example", + name="example" + ) + aws_cognito_user_pool_domain_example = CognitoUserPoolDomain(self, "example_1", + domain="example", + user_pool_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_pool_domain_example.override_logical_id("example") + aws_cognito_user_pool_ui_customization_example = + CognitoUserPoolUiCustomization(self, "example_2", + css=".label-customizable {font-weight: 400;}", + image_file=Token.as_string(Fn.filebase64("logo.png")), + user_pool_id=Token.as_string(aws_cognito_user_pool_domain_example.user_pool_id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_pool_ui_customization_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `client_id` (Optional) The client ID for the client app. Defaults to `ALL`. If `ALL` is specified, the `css` and/or `image_file` settings will be used for every client that has no UI customization set previously. +* `css` (Optional) - The CSS values in the UI customization, provided as a String. At least one of `css` or `image_file` is required. +* `image_file` (Optional) - The uploaded logo image for the UI customization, provided as a base64-encoded String. Drift detection is not possible for this argument. At least one of `css` or `image_file` is required. +* `user_pool_id` (Required) - The user pool ID for the user pool. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `creation_date` - The creation date in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) for the UI customization. +* `css_version` - The CSS version number. +* `image_url` - The logo image URL for the UI customization. +* `last_modified_date` - The last-modified date in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) for the UI customization. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Pool UI Customizations using the `user_pool_id` and `client_id` separated by `,`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cognito User Pool UI Customizations using the `user_pool_id` and `client_id` separated by `,`. For example: + +```console +% terraform import aws_cognito_user_pool_ui_customization.example us-west-2_ZCTarbt5C,12bu4fuk3mlgqa2rtrujgp6egq +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/comprehend_document_classifier.html.markdown b/website/docs/cdktf/python/r/comprehend_document_classifier.html.markdown new file mode 100644 index 00000000000..23bbfa03706 --- /dev/null +++ b/website/docs/cdktf/python/r/comprehend_document_classifier.html.markdown @@ -0,0 +1,162 @@ +--- +subcategory: "Comprehend" +layout: "aws" +page_title: "AWS: aws_comprehend_document_classifier" +description: |- + Terraform resource for managing an AWS Comprehend Document Classifier. +--- + + + +# Resource: aws_comprehend_document_classifier + +Terraform resource for managing an AWS Comprehend Document Classifier. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.comprehend_document_classifier import ComprehendDocumentClassifier +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, bucket, key, bucket1, key1): + super().__init__(scope, name) + documents = S3Object(self, "documents", + bucket=bucket, + key=key + ) + S3Object(self, "entities", + bucket=bucket1, + key=key1 + ) + ComprehendDocumentClassifier(self, "example", + data_access_role_arn=Token.as_string(aws_iam_role_example.arn), + depends_on=[aws_iam_role_policy_example], + input_data_config=ComprehendDocumentClassifierInputDataConfig( + s3_uri="s3://${" + test.bucket + "}/${" + documents.id + "}" + ), + language_code="en", + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `data_access_role_arn` - (Required) The ARN for an IAM Role which allows Comprehend to read the training and testing data. +* `input_data_config` - (Required) Configuration for the training and testing data. + See the [`input_data_config` Configuration Block](#input_data_config-configuration-block) section below. +* `language_code` - (Required) Two-letter language code for the language. + One of `en`, `es`, `fr`, `it`, `de`, or `pt`. +* `name` - (Required) Name for the Document Classifier. + Has a maximum length of 63 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + +The following arguments are optional: + +* `mode` - (Optional, Default: `MULTI_CLASS`) The document classification mode. + One of `MULTI_CLASS` or `MULTI_LABEL`. + `MULTI_CLASS` is also known as "Single Label" in the AWS Console. +* `model_kms_key_id` - (Optional) KMS Key used to encrypt trained Document Classifiers. + Can be a KMS Key ID or a KMS Key ARN. +* `output_data_config` - (Optional) Configuration for the output results of training. + See the [`output_data_config` Configuration Block](#output_data_config-configuration-block) section below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` Configuration Block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `version_name` - (Optional) Name for the version of the Document Classifier. + Each version must have a unique name within the Document Classifier. + If omitted, Terraform will assign a random, unique version name. + If explicitly set to `""`, no version name will be set. + Has a maximum length of 63 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + Conflicts with `version_name_prefix`. +* `version_name_prefix` - (Optional) Creates a unique version name beginning with the specified prefix. + Has a maximum length of 37 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + Conflicts with `version_name`. +* `volume_kms_key_id` - (Optional) KMS Key used to encrypt storage volumes during job processing. + Can be a KMS Key ID or a KMS Key ARN. +* `vpc_config` - (Optional) Configuration parameters for VPC to contain Document Classifier resources. + See the [`vpc_config` Configuration Block](#vpc_config-configuration-block) section below. + +### `input_data_config` Configuration Block + +* `augmented_manifests` - (Optional) List of training datasets produced by Amazon SageMaker Ground Truth. + Used if `data_format` is `AUGMENTED_MANIFEST`. + See the [`augmented_manifests` Configuration Block](#augmented_manifests-configuration-block) section below. +* `data_format` - (Optional, Default: `COMPREHEND_CSV`) The format for the training data. + One of `COMPREHEND_CSV` or `AUGMENTED_MANIFEST`. +* `label_delimiter` - (Optional) Delimiter between labels when training a multi-label classifier. + Valid values are `|`, `~`, `!`, `@`, `#`, `$`, `%`, `^`, `*`, `-`, `_`, `+`, `=`, `\`, `:`, `;`, `>`, `?`, `/`, ``, and ``. + Default is `|`. +* `s3_uri` - (Optional) Location of training documents. + Used if `data_format` is `COMPREHEND_CSV`. +* `test_s3uri` - (Optional) Location of test documents. + +### `augmented_manifests` Configuration Block + +* `annotation_data_s3_uri` - (Optional) Location of annotation files. +* `attribute_names` - (Required) The JSON attribute that contains the annotations for the training documents. +* `document_type` - (Optional, Default: `PLAIN_TEXT_DOCUMENT`) Type of augmented manifest. + One of `PLAIN_TEXT_DOCUMENT` or `SEMI_STRUCTURED_DOCUMENT`. +* `s3_uri` - (Required) Location of augmented manifest file. +* `source_documents_s3_uri` - (Optional) Location of source PDF files. +* `split` - (Optional, Default: `TRAIN`) Purpose of data in augmented manifest. + One of `TRAIN` or `TEST`. + +### `output_data_config` Configuration Block + +* `kms_key_id` - (Optional) KMS Key used to encrypt the output documents. + Can be a KMS Key ID, a KMS Key ARN, a KMS Alias name, or a KMS Alias ARN. +* `output_s3_uri` - (Computed) Full path for the output documents. +* `s3_uri` - (Required) Destination path for the output documents. + The full path to the output file will be returned in `output_s3_uri`. + +### `vpc_config` Configuration Block + +* `security_group_ids` - (Required) List of security group IDs. +* `subnets` - (Required) List of VPC subnets. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Document Classifier version. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +`aws_comprehend_document_classifier` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +* `create` - (Optional, Default: `60m`) +* `update` - (Optional, Default: `60m`) +* `delete` - (Optional, Default: `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Comprehend Document Classifier using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Comprehend Document Classifier using the ARN. For example: + +```console +% terraform import aws_comprehend_document_classifier.example arn:aws:comprehend:us-west-2:123456789012:document_classifier/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/comprehend_entity_recognizer.html.markdown b/website/docs/cdktf/python/r/comprehend_entity_recognizer.html.markdown new file mode 100644 index 00000000000..c1f845b5631 --- /dev/null +++ b/website/docs/cdktf/python/r/comprehend_entity_recognizer.html.markdown @@ -0,0 +1,185 @@ +--- +subcategory: "Comprehend" +layout: "aws" +page_title: "AWS: aws_comprehend_entity_recognizer" +description: |- + Terraform resource for managing an AWS Comprehend Entity Recognizer. +--- + + + +# Resource: aws_comprehend_entity_recognizer + +Terraform resource for managing an AWS Comprehend Entity Recognizer. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.comprehend_entity_recognizer import ComprehendEntityRecognizer +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, bucket, key, bucket1, key1): + super().__init__(scope, name) + documents = S3Object(self, "documents", + bucket=bucket, + key=key + ) + entities = S3Object(self, "entities", + bucket=bucket1, + key=key1 + ) + ComprehendEntityRecognizer(self, "example", + data_access_role_arn=Token.as_string(aws_iam_role_example.arn), + depends_on=[aws_iam_role_policy_example], + input_data_config=ComprehendEntityRecognizerInputDataConfig( + documents=ComprehendEntityRecognizerInputDataConfigDocuments( + s3_uri="s3://${" + aws_s3_bucket_documents.bucket + "}/${" + documents.id + "}" + ), + entity_list=ComprehendEntityRecognizerInputDataConfigEntityListStruct( + s3_uri="s3://${" + aws_s3_bucket_entities.bucket + "}/${" + entities.id + "}" + ), + entity_types=[ComprehendEntityRecognizerInputDataConfigEntityTypes( + type="ENTITY_1" + ), ComprehendEntityRecognizerInputDataConfigEntityTypes( + type="ENTITY_2" + ) + ] + ), + language_code="en", + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `data_access_role_arn` - (Required) The ARN for an IAM Role which allows Comprehend to read the training and testing data. +* `input_data_config` - (Required) Configuration for the training and testing data. + See the [`input_data_config` Configuration Block](#input_data_config-configuration-block) section below. +* `language_code` - (Required) Two-letter language code for the language. + One of `en`, `es`, `fr`, `it`, `de`, or `pt`. +* `name` - (Required) Name for the Entity Recognizer. + Has a maximum length of 63 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + +The following arguments are optional: + +* `model_kms_key_id` - (Optional) The ID or ARN of a KMS Key used to encrypt trained Entity Recognizers. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` Configuration Block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `version_name` - (Optional) Name for the version of the Entity Recognizer. + Each version must have a unique name within the Entity Recognizer. + If omitted, Terraform will assign a random, unique version name. + If explicitly set to `""`, no version name will be set. + Has a maximum length of 63 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + Conflicts with `version_name_prefix`. +* `version_name_prefix` - (Optional) Creates a unique version name beginning with the specified prefix. + Has a maximum length of 37 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + Conflicts with `version_name`. +* `volume_kms_key_id` - (Optional) ID or ARN of a KMS Key used to encrypt storage volumes during job processing. +* `vpc_config` - (Optional) Configuration parameters for VPC to contain Entity Recognizer resources. + See the [`vpc_config` Configuration Block](#vpc_config-configuration-block) section below. + +### `input_data_config` Configuration Block + +* `annotations` - (Optional) Specifies location of the document annotation data. + See the [`annotations` Configuration Block](#annotations-configuration-block) section below. + One of `annotations` or `entity_list` is required. +* `augmented_manifests` - (Optional) List of training datasets produced by Amazon SageMaker Ground Truth. + Used if `data_format` is `AUGMENTED_MANIFEST`. + See the [`augmented_manifests` Configuration Block](#augmented_manifests-configuration-block) section below. +* `data_format` - (Optional, Default: `COMPREHEND_CSV`) The format for the training data. + One of `COMPREHEND_CSV` or `AUGMENTED_MANIFEST`. +* `documents` - (Optional) Specifies a collection of training documents. + Used if `data_format` is `COMPREHEND_CSV`. + See the [`documents` Configuration Block](#documents-configuration-block) section below. +* `entity_list` - (Optional) Specifies location of the entity list data. + See the [`entity_list` Configuration Block](#entity_list-configuration-block) section below. + One of `entity_list` or `annotations` is required. +* `entity_types` - (Required) Set of entity types to be recognized. + Has a maximum of 25 items. + See the [`entity_types` Configuration Block](#entity_types-configuration-block) section below. + +### `annotations` Configuration Block + +* `s3_uri` - (Required) Location of training annotations. +* `test_s3uri` - (Optional) Location of test annotations. + +### `augmented_manifests` Configuration Block + +* `annotation_data_s3_uri` - (Optional) Location of annotation files. +* `attribute_names` - (Required) The JSON attribute that contains the annotations for the training documents. +* `document_type` - (Optional, Default: `PLAIN_TEXT_DOCUMENT`) Type of augmented manifest. + One of `PLAIN_TEXT_DOCUMENT` or `SEMI_STRUCTURED_DOCUMENT`. +* `s3_uri` - (Required) Location of augmented manifest file. +* `source_documents_s3_uri` - (Optional) Location of source PDF files. +* `split` - (Optional, Default: `TRAIN`) Purpose of data in augmented manifest. + One of `TRAIN` or `TEST`. + +### `documents` Configuration Block + +* `input_format` - (Optional, Default: `ONE_DOC_PER_LINE`) Specifies how the input files should be processed. + One of `ONE_DOC_PER_LINE` or `ONE_DOC_PER_FILE`. +* `s3_uri` - (Required) Location of training documents. +* `test_s3uri` - (Optional) Location of test documents. + +### `entity_list` Configuration Block + +* `s3_uri` - (Required) Location of entity list. + +### `entity_types` Configuration Block + +* `type` - (Required) An entity type to be matched by the Entity Recognizer. + Cannot contain a newline (`\n`), carriage return (`\r`), or tab (`\t`). + +### `vpc_config` Configuration Block + +* `security_group_ids` - (Required) List of security group IDs. +* `subnets` - (Required) List of VPC subnets. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Entity Recognizer version. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +`aws_comprehend_entity_recognizer` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +* `create` - (Optional, Default: `60m`) +* `update` - (Optional, Default: `60m`) +* `delete` - (Optional, Default: `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Comprehend Entity Recognizer using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Comprehend Entity Recognizer using the ARN. For example: + +```console +% terraform import aws_comprehend_entity_recognizer.example arn:aws:comprehend:us-west-2:123456789012:entity-recognizer/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_aggregate_authorization.markdown b/website/docs/cdktf/python/r/config_aggregate_authorization.markdown new file mode 100644 index 00000000000..157ee5f27be --- /dev/null +++ b/website/docs/cdktf/python/r/config_aggregate_authorization.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_aggregate_authorization" +description: |- + Manages an AWS Config Aggregate Authorization. +--- + + + +# Resource: aws_config_aggregate_authorization + +Manages an AWS Config Aggregate Authorization + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_aggregate_authorization import ConfigAggregateAuthorization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConfigAggregateAuthorization(self, "example", + account_id="123456789012", + region="eu-west-2" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Required) Account ID +* `region` - (Required) Region +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the authorization +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config aggregate authorizations using `account_id:region`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Config aggregate authorizations using `account_id:region`. For example: + +```console +% terraform import aws_config_aggregate_authorization.example 123456789012:us-east-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_config_rule.html.markdown b/website/docs/cdktf/python/r/config_config_rule.html.markdown new file mode 100644 index 00000000000..3ae6705b79f --- /dev/null +++ b/website/docs/cdktf/python/r/config_config_rule.html.markdown @@ -0,0 +1,238 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_config_rule" +description: |- + Provides an AWS Config Rule. +--- + + + +# Resource: aws_config_config_rule + +Provides an AWS Config Rule. + +~> **Note:** Config Rule requires an existing [Configuration Recorder](/docs/providers/aws/r/config_configuration_recorder.html) to be present. Use of `depends_on` is recommended (as shown below) to avoid race conditions. + +## Example Usage + +### AWS Managed Rules + +AWS managed rules can be used by setting the source owner to `AWS` and the source identifier to the name of the managed rule. More information about AWS managed rules can be found in the [AWS Config Developer Guide](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_config_rule import ConfigConfigRule +from imports.aws.config_configuration_recorder import ConfigConfigurationRecorder +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["config.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + p = DataAwsIamPolicyDocument(self, "p", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["config:Put*"], + effect="Allow", + resources=["*"] + ) + ] + ) + r = IamRole(self, "r", + assume_role_policy=Token.as_string(assume_role.json), + name="my-awsconfig-role" + ) + aws_iam_role_policy_p = IamRolePolicy(self, "p_3", + name="my-awsconfig-policy", + policy=Token.as_string(p.json), + role=r.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_p.override_logical_id("p") + foo = ConfigConfigurationRecorder(self, "foo", + name="example", + role_arn=r.arn + ) + aws_config_config_rule_r = ConfigConfigRule(self, "r_5", + depends_on=[foo], + name="example", + source=ConfigConfigRuleSource( + owner="AWS", + source_identifier="S3_BUCKET_VERSIONING_ENABLED" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_config_rule_r.override_logical_id("r") +``` + +### Custom Rules + +Custom rules can be used by setting the source owner to `CUSTOM_LAMBDA` and the source identifier to the Amazon Resource Name (ARN) of the Lambda Function. The AWS Config service must have permissions to invoke the Lambda Function, e.g., via the [`aws_lambda_permission` resource](/docs/providers/aws/r/lambda_permission.html). More information about custom rules can be found in the [AWS Config Developer Guide](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_develop-rules.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_config_rule import ConfigConfigRule +from imports.aws.config_configuration_recorder import ConfigConfigurationRecorder +from imports.aws.lambda_function import LambdaFunction +from imports.aws.lambda_permission import LambdaPermission +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, roleArn, functionName, role, name): + super().__init__(scope, name) + example = ConfigConfigurationRecorder(self, "example", + role_arn=role_arn + ) + aws_lambda_function_example = LambdaFunction(self, "example_1", + function_name=function_name, + role=role + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lambda_function_example.override_logical_id("example") + aws_lambda_permission_example = LambdaPermission(self, "example_2", + action="lambda:InvokeFunction", + function_name=Token.as_string(aws_lambda_function_example.arn), + principal="config.amazonaws.com", + statement_id="AllowExecutionFromConfig" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lambda_permission_example.override_logical_id("example") + aws_config_config_rule_example = ConfigConfigRule(self, "example_3", + depends_on=[example, aws_lambda_permission_example], + source=ConfigConfigRuleSource( + owner="CUSTOM_LAMBDA", + source_identifier=Token.as_string(aws_lambda_function_example.arn) + ), + name=name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_config_rule_example.override_logical_id("example") +``` + +### Custom Policies + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_config_rule import ConfigConfigRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConfigConfigRule(self, "example", + name="example", + source=ConfigConfigRuleSource( + custom_policy_details=ConfigConfigRuleSourceCustomPolicyDetails( + policy_runtime="guard-2.x.x", + policy_text="\t rule tableisactive when\n\t\t resourceType == \"AWS::DynamoDB::Table\" {\n\t\t configuration.tableStatus == ['ACTIVE']\n\t }\n\t \n\t rule checkcompliance when\n\t\t resourceType == \"AWS::DynamoDB::Table\"\n\t\t tableisactive {\n\t\t\t supplementaryConfiguration.ContinuousBackupsDescription.pointInTimeRecoveryDescription.pointInTimeRecoveryStatus == \"ENABLED\"\n\t }\n\n" + ), + owner="CUSTOM_POLICY", + source_detail=[ConfigConfigRuleSourceSourceDetail( + message_type="ConfigurationItemChangeNotification" + ) + ] + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the rule +* `description` - (Optional) Description of the rule +* `input_parameters` - (Optional) A string in JSON format that is passed to the AWS Config rule Lambda function. +* `maximum_execution_frequency` - (Optional) The maximum frequency with which AWS Config runs evaluations for a rule. +* `scope` - (Optional) Scope defines which resources can trigger an evaluation for the rule. See [Source](#source) Below. +* `source` - (Required) Source specifies the rule owner, the rule identifier, and the notifications that cause the function to evaluate your AWS resources. See [Scope](#scope) Below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Scope + +Defines which resources can trigger an evaluation for the rule. +If you do not specify a scope, evaluations are triggered when any resource in the recording group changes. + +* `compliance_resource_id` - (Optional) The IDs of the only AWS resource that you want to trigger an evaluation for the rule. If you specify a resource ID, you must specify one resource type for `compliance_resource_types`. +* `compliance_resource_types` - (Optional) A list of resource types of only those AWS resources that you want to trigger an evaluation for the ruleE.g., `AWS::EC2::Instance`. You can only specify one type if you also specify a resource ID for `compliance_resource_id`. See [relevant part of AWS Docs](http://docs.aws.amazon.com/config/latest/APIReference/API_ResourceIdentifier.html#config-Type-ResourceIdentifier-resourceType) for available types. +* `tag_key` - (Optional, Required if `tag_value` is specified) The tag key that is applied to only those AWS resources that you want you want to trigger an evaluation for the rule. +* `tag_value` - (Optional) The tag value applied to only those AWS resources that you want to trigger an evaluation for the rule. + +### Source + +Provides the rule owner (AWS or customer), the rule identifier, and the notifications that cause the function to evaluate your AWS resources. + +* `owner` - (Required) Indicates whether AWS or the customer owns and manages the AWS Config rule. Valid values are `AWS`, `CUSTOM_LAMBDA` or `CUSTOM_POLICY`. For more information about managed rules, see the [AWS Config Managed Rules documentation](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html). For more information about custom rules, see the [AWS Config Custom Rules documentation](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_develop-rules.html). Custom Lambda Functions require permissions to allow the AWS Config service to invoke them, e.g., via the [`aws_lambda_permission` resource](/docs/providers/aws/r/lambda_permission.html). +* `source_identifier` - (Optional) For AWS Config managed rules, a predefined identifier, e.g `IAM_PASSWORD_POLICY`. For custom Lambda rules, the identifier is the ARN of the Lambda Function, such as `arn:aws:lambda:us-east-1:123456789012:function:custom_rule_name` or the [`arn` attribute of the `aws_lambda_function` resource](/docs/providers/aws/r/lambda_function.html#arn). +* `source_detail` - (Optional) Provides the source and type of the event that causes AWS Config to evaluate your AWS resources. Only valid if `owner` is `CUSTOM_LAMBDA` or `CUSTOM_POLICY`. See [Source Detail](#source-detail) Below. +* `custom_policy_details` - (Optional) Provides the runtime system, policy definition, and whether debug logging is enabled. Required when owner is set to `CUSTOM_POLICY`. See [Custom Policy Details](#custom-policy-details) Below. + +#### Source Detail + +* `event_source` - (Optional) The source of the event, such as an AWS service, that triggers AWS Config to evaluate your AWSresources. This defaults to `aws.config` and is the only valid value. +* `maximum_execution_frequency` - (Optional) The frequency that you want AWS Config to run evaluations for a rule that istriggered periodically. If specified, requires `message_type` to be `ScheduledNotification`. +* `message_type` - (Optional) The type of notification that triggers AWS Config to run an evaluation for a rule. You canspecify the following notification types: + * `ConfigurationItemChangeNotification` - Triggers an evaluation when AWS Config delivers a configuration item as a result of a resource change. + * `OversizedConfigurationItemChangeNotification` - Triggers an evaluation when AWS Config delivers an oversized configuration item. AWS Config may generate this notification type when a resource changes and the notification exceeds the maximum size allowed by Amazon SNS. + * `ScheduledNotification` - Triggers a periodic evaluation at the frequency specified for `maximum_execution_frequency`. + * `ConfigurationSnapshotDeliveryCompleted` - Triggers a periodic evaluation when AWS Config delivers a configuration snapshot. + +#### Custom Policy Details + +* `enable_debug_log_delivery` - (Optional) The boolean expression for enabling debug logging for your Config Custom Policy rule. The default value is `false`. +* `policy_runtime` - (Required) The runtime system for your Config Custom Policy rule. Guard is a policy-as-code language that allows you to write policies that are enforced by Config Custom Policy rules. For more information about Guard, see the [Guard GitHub Repository](https://github.com/aws-cloudformation/cloudformation-guard). +* `policy_text` - (Required) The policy definition containing the logic for your Config Custom Policy rule. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the config rule +* `rule_id` - The ID of the config rule +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config Rule using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Config Rule using the name. For example: + +```console +% terraform import aws_config_config_rule.foo example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_configuration_aggregator.html.markdown b/website/docs/cdktf/python/r/config_configuration_aggregator.html.markdown new file mode 100644 index 00000000000..6682b1914b7 --- /dev/null +++ b/website/docs/cdktf/python/r/config_configuration_aggregator.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_configuration_aggregator" +description: |- + Manages an AWS Config Configuration Aggregator. +--- + + + +# Resource: aws_config_configuration_aggregator + +Manages an AWS Config Configuration Aggregator + +## Example Usage + +### Account Based Aggregation + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_configuration_aggregator import ConfigConfigurationAggregator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConfigConfigurationAggregator(self, "account", + account_aggregation_source=ConfigConfigurationAggregatorAccountAggregationSource( + account_ids=["123456789012"], + regions=["us-west-2"] + ), + name="example" + ) +``` + +### Organization Based Aggregation + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_configuration_aggregator import ConfigConfigurationAggregator +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["config.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + organization = IamRole(self, "organization", + assume_role_policy=Token.as_string(assume_role.json), + name="example" + ) + aws_iam_role_policy_attachment_organization = IamRolePolicyAttachment(self, "organization_2", + policy_arn="arn:aws:iam::aws:policy/service-role/AWSConfigRoleForOrganizations", + role=organization.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_organization.override_logical_id("organization") + aws_config_configuration_aggregator_organization = + ConfigConfigurationAggregator(self, "organization_3", + depends_on=[aws_iam_role_policy_attachment_organization], + name="example", + organization_aggregation_source=ConfigConfigurationAggregatorOrganizationAggregationSource( + all_regions=True, + role_arn=organization.arn + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_configuration_aggregator_organization.override_logical_id("organization") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the configuration aggregator. +* `account_aggregation_source` - (Optional) The account(s) to aggregate config data from as documented below. +* `organization_aggregation_source` - (Optional) The organization to aggregate config data from as documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Either `account_aggregation_source` or `organization_aggregation_source` must be specified. + +### `account_aggregation_source` + +* `account_ids` - (Required) List of 12-digit account IDs of the account(s) being aggregated. +* `all_regions` - (Optional) If true, aggregate existing AWS Config regions and future regions. +* `regions` - (Optional) List of source regions being aggregated. + +Either `regions` or `all_regions` (as true) must be specified. + +### `organization_aggregation_source` + +~> **Note:** If your source type is an organization, you must be signed in to the master account and all features must be enabled in your organization. AWS Config calls EnableAwsServiceAccess API to enable integration between AWS Config and AWS Organizations. + +* `all_regions` - (Optional) If true, aggregate existing AWS Config regions and future regions. +* `regions` - (Optional) List of source regions being aggregated. +* `role_arn` - (Required) ARN of the IAM role used to retrieve AWS Organization details associated with the aggregator account. + +Either `regions` or `all_regions` (as true) must be specified. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the aggregator +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Configuration Aggregators using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Configuration Aggregators using the name. For example: + +```console +% terraform import aws_config_configuration_aggregator.example foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_configuration_recorder.html.markdown b/website/docs/cdktf/python/r/config_configuration_recorder.html.markdown new file mode 100644 index 00000000000..95b72ec55b7 --- /dev/null +++ b/website/docs/cdktf/python/r/config_configuration_recorder.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_configuration_recorder" +description: |- + Provides an AWS Config Configuration Recorder. +--- + + + +# Resource: aws_config_configuration_recorder + +Provides an AWS Config Configuration Recorder. Please note that this resource **does not start** the created recorder automatically. + +~> **Note:** _Starting_ the Configuration Recorder requires a [delivery channel](/docs/providers/aws/r/config_delivery_channel.html) (while delivery channel creation requires Configuration Recorder). This is why [`aws_config_configuration_recorder_status`](/docs/providers/aws/r/config_configuration_recorder_status.html) is a separate resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_configuration_recorder import ConfigConfigurationRecorder +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["config.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + r = IamRole(self, "r", + assume_role_policy=Token.as_string(assume_role.json), + name="awsconfig-example" + ) + ConfigConfigurationRecorder(self, "foo", + name="example", + role_arn=r.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the recorder. Defaults to `default`. Changing it recreates the resource. +* `role_arn` - (Required) Amazon Resource Name (ARN) of the IAM role. Used to make read or write requests to the delivery channel and to describe the AWS resources associated with the account. See [AWS Docs](http://docs.aws.amazon.com/config/latest/developerguide/iamrole-permissions.html) for more details. +* `recording_group` - (Optional) Recording group - see below. + +### `recording_group` + +* `all_supported` - (Optional) Specifies whether AWS Config records configuration changes for every supported type of regional resource (which includes any new type that will become supported in the future). Conflicts with `resource_types`. Defaults to `true`. +* `exclusion_by_resource_types` - (Optional) An object that specifies how AWS Config excludes resource types from being recorded by the configuration recorder.To use this option, you must set the useOnly field of RecordingStrategy to `EXCLUSION_BY_RESOURCE_TYPES` Requires `all_supported = false`. Conflicts with `resource_types`. +* `include_global_resource_types` - (Optional) Specifies whether AWS Config includes all supported types of _global resources_ with the resources that it records. Requires `all_supported = true`. Conflicts with `resource_types`. +* `recording_strategy` - (Optional) Recording Strategy - see below.. +* `resource_types` - (Optional) A list that specifies the types of AWS resources for which AWS Config records configuration changes (for example, `AWS::EC2::Instance` or `AWS::CloudTrail::Trail`). See [relevant part of AWS Docs](http://docs.aws.amazon.com/config/latest/APIReference/API_ResourceIdentifier.html#config-Type-ResourceIdentifier-resourceType) for available types. In order to use this attribute, `all_supported` must be set to false. + +#### `recording_strategy` + +* ` use_only` - (Optional) The recording strategy for the configuration recorder.See [relevant part of AWS Docs](https://docs.aws.amazon.com/config/latest/APIReference/API_RecordingStrategy.html) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the recorder + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Configuration Recorder using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Configuration Recorder using the name. For example: + +```console +% terraform import aws_config_configuration_recorder.foo example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_configuration_recorder_status.html.markdown b/website/docs/cdktf/python/r/config_configuration_recorder_status.html.markdown new file mode 100644 index 00000000000..bf94863b0af --- /dev/null +++ b/website/docs/cdktf/python/r/config_configuration_recorder_status.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_configuration_recorder_status" +description: |- + Manages status of an AWS Config Configuration Recorder. +--- + + + +# Resource: aws_config_configuration_recorder_status + +Manages status (recording / stopped) of an AWS Config Configuration Recorder. + +~> **Note:** Starting Configuration Recorder requires a [Delivery Channel](/docs/providers/aws/r/config_delivery_channel.html) to be present. Use of `depends_on` (as shown below) is recommended to avoid race conditions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_configuration_recorder import ConfigConfigurationRecorder +from imports.aws.config_configuration_recorder_status import ConfigConfigurationRecorderStatus +from imports.aws.config_delivery_channel import ConfigDeliveryChannel +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + b = S3Bucket(self, "b", + bucket="awsconfig-example" + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["config.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + p = DataAwsIamPolicyDocument(self, "p", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + effect="Allow", + resources=[b.arn, "${" + b.arn + "}/*"] + ) + ] + ) + foo = ConfigDeliveryChannel(self, "foo", + name="example", + s3_bucket_name=b.bucket + ) + r = IamRole(self, "r", + assume_role_policy=Token.as_string(assume_role.json), + name="example-awsconfig" + ) + aws_iam_role_policy_p = IamRolePolicy(self, "p_5", + name="awsconfig-example", + policy=Token.as_string(p.json), + role=r.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_p.override_logical_id("p") + IamRolePolicyAttachment(self, "a", + policy_arn="arn:aws:iam::aws:policy/service-role/AWS_ConfigRole", + role=r.name + ) + aws_config_configuration_recorder_foo = ConfigConfigurationRecorder(self, "foo_7", + name="example", + role_arn=r.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_configuration_recorder_foo.override_logical_id("foo") + aws_config_configuration_recorder_status_foo = + ConfigConfigurationRecorderStatus(self, "foo_8", + depends_on=[foo], + is_enabled=True, + name=Token.as_string(aws_config_configuration_recorder_foo.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_configuration_recorder_status_foo.override_logical_id("foo") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the recorder +* `is_enabled` - (Required) Whether the configuration recorder should be enabled or disabled. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Configuration Recorder Status using the name of the Configuration Recorder. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Configuration Recorder Status using the name of the Configuration Recorder. For example: + +```console +% terraform import aws_config_configuration_recorder_status.foo example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_conformance_pack.html.markdown b/website/docs/cdktf/python/r/config_conformance_pack.html.markdown new file mode 100644 index 00000000000..bb8277e9554 --- /dev/null +++ b/website/docs/cdktf/python/r/config_conformance_pack.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_conformance_pack" +description: |- + Manages a Config Conformance Pack +--- + + + +# Resource: aws_config_conformance_pack + +Manages a Config Conformance Pack. More information about this collection of Config rules and remediation actions can be found in the +[Conformance Packs](https://docs.aws.amazon.com/config/latest/developerguide/conformance-packs.html) documentation. +Sample Conformance Pack templates may be found in the +[AWS Config Rules Repository](https://github.com/awslabs/aws-config-rules/tree/master/aws-config-conformance-packs). + +~> **NOTE:** The account must have a Configuration Recorder with proper IAM permissions before the Conformance Pack will +successfully create or update. See also the +[`aws_config_configuration_recorder` resource](/docs/providers/aws/r/config_configuration_recorder.html). + +## Example Usage + +### Template Body + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_conformance_pack import ConfigConformancePack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConfigConformancePack(self, "example", + depends_on=[aws_config_configuration_recorder_example], + input_parameter=[ConfigConformancePackInputParameter( + parameter_name="AccessKeysRotatedParameterMaxAccessKeyAge", + parameter_value="90" + ) + ], + name="example", + template_body="Parameters:\n AccessKeysRotatedParameterMaxAccessKeyAge:\n Type: String\nResources:\n IAMPasswordPolicy:\n Properties:\n ConfigRuleName: IAMPasswordPolicy\n Source:\n Owner: AWS\n SourceIdentifier: IAM_PASSWORD_POLICY\n Type: AWS::Config::ConfigRule\n\n" + ) +``` + +### Template S3 URI + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_conformance_pack import ConfigConformancePack +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_s3_object_example = S3Object(self, "example_1", + bucket=example.id, + content="Resources:\n IAMPasswordPolicy:\n Properties:\n ConfigRuleName: IAMPasswordPolicy\n Source:\n Owner: AWS\n SourceIdentifier: IAM_PASSWORD_POLICY\n Type: AWS::Config::ConfigRule\n\n", + key="example-key" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_object_example.override_logical_id("example") + aws_config_conformance_pack_example = ConfigConformancePack(self, "example_2", + depends_on=[aws_config_configuration_recorder_example], + name="example", + template_s3_uri="s3://${" + example.bucket + "}/${" + aws_s3_object_example.key + "}" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_conformance_pack_example.override_logical_id("example") +``` + +## Argument Reference + +~> **Note:** If both `template_body` and `template_s3_uri` are specified, AWS Config uses the `template_s3_uri` and ignores the `template_body`. + +This argument supports the following arguments: + +* `name` - (Required, Forces new resource) The name of the conformance pack. Must begin with a letter and contain from 1 to 256 alphanumeric characters and hyphens. +* `delivery_s3_bucket` - (Optional) Amazon S3 bucket where AWS Config stores conformance pack templates. Maximum length of 63. +* `delivery_s3_key_prefix` - (Optional) The prefix for the Amazon S3 bucket. Maximum length of 1024. +* `input_parameter` - (Optional) Set of configuration blocks describing input parameters passed to the conformance pack template. Documented below. When configured, the parameters must also be included in the `template_body` or in the template stored in Amazon S3 if using `template_s3_uri`. +* `template_body` - (Optional, required if `template_s3_uri` is not provided) A string containing full conformance pack template body. Maximum length of 51200. Drift detection is not possible with this argument. +* `template_s3_uri` - (Optional, required if `template_body` is not provided) Location of file, e.g., `s3://bucketname/prefix`, containing the template body. The uri must point to the conformance pack template that is located in an Amazon S3 bucket in the same region as the conformance pack. Maximum length of 1024. Drift detection is not possible with this argument. + +### input_parameter Argument Reference + +The `input_parameter` configuration block supports the following arguments: + +* `parameter_name` - (Required) The input key. +* `parameter_value` - (Required) The input value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the conformance pack. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config Conformance Packs using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Config Conformance Packs using the `name`. For example: + +```console +% terraform import aws_config_conformance_pack.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_delivery_channel.html.markdown b/website/docs/cdktf/python/r/config_delivery_channel.html.markdown new file mode 100644 index 00000000000..b00b1d4f6b9 --- /dev/null +++ b/website/docs/cdktf/python/r/config_delivery_channel.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_delivery_channel" +description: |- + Provides an AWS Config Delivery Channel. +--- + + + +# Resource: aws_config_delivery_channel + +Provides an AWS Config Delivery Channel. + +~> **Note:** Delivery Channel requires a [Configuration Recorder](/docs/providers/aws/r/config_configuration_recorder.html) to be present. Use of `depends_on` (as shown below) is recommended to avoid race conditions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_configuration_recorder import ConfigConfigurationRecorder +from imports.aws.config_delivery_channel import ConfigDeliveryChannel +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + b = S3Bucket(self, "b", + bucket="example-awsconfig", + force_destroy=True + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["config.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + p = DataAwsIamPolicyDocument(self, "p", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + effect="Allow", + resources=[b.arn, "${" + b.arn + "}/*"] + ) + ] + ) + r = IamRole(self, "r", + assume_role_policy=Token.as_string(assume_role.json), + name="awsconfig-example" + ) + aws_iam_role_policy_p = IamRolePolicy(self, "p_4", + name="awsconfig-example", + policy=Token.as_string(p.json), + role=r.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_p.override_logical_id("p") + foo = ConfigConfigurationRecorder(self, "foo", + name="example", + role_arn=r.arn + ) + aws_config_delivery_channel_foo = ConfigDeliveryChannel(self, "foo_6", + depends_on=[foo], + name="example", + s3_bucket_name=b.bucket + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_delivery_channel_foo.override_logical_id("foo") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the delivery channel. Defaults to `default`. Changing it recreates the resource. +* `s3_bucket_name` - (Required) The name of the S3 bucket used to store the configuration history. +* `s3_key_prefix` - (Optional) The prefix for the specified S3 bucket. +* `s3_kms_key_arn` - (Optional) The ARN of the AWS KMS key used to encrypt objects delivered by AWS Config. Must belong to the same Region as the destination S3 bucket. +* `sns_topic_arn` - (Optional) The ARN of the SNS topic that AWS Config delivers notifications to. +* `snapshot_delivery_properties` - (Optional) Options for how AWS Config delivers configuration snapshots. See below + +### `snapshot_delivery_properties` + +* `delivery_frequency` - (Optional) - The frequency with which AWS Config recurringly delivers configuration snapshotsE.g., `One_Hour` or `Three_Hours`. Valid values are listed [here](https://docs.aws.amazon.com/config/latest/APIReference/API_ConfigSnapshotDeliveryProperties.html#API_ConfigSnapshotDeliveryProperties_Contents). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the delivery channel. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Delivery Channel using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Delivery Channel using the name. For example: + +```console +% terraform import aws_config_delivery_channel.foo example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_organization_conformance_pack.html.markdown b/website/docs/cdktf/python/r/config_organization_conformance_pack.html.markdown new file mode 100644 index 00000000000..2e40aa5b8c2 --- /dev/null +++ b/website/docs/cdktf/python/r/config_organization_conformance_pack.html.markdown @@ -0,0 +1,149 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_organization_conformance_pack" +description: |- + Manages a Config Organization Conformance Pack +--- + + + +# Resource: aws_config_organization_conformance_pack + +Manages a Config Organization Conformance Pack. More information can be found in the [Managing Conformance Packs Across all Accounts in Your Organization](https://docs.aws.amazon.com/config/latest/developerguide/conformance-pack-organization-apis.html) and [AWS Config Managed Rules](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html) documentation. Example conformance pack templates may be found in the [AWS Config Rules Repository](https://github.com/awslabs/aws-config-rules/tree/master/aws-config-conformance-packs). + +~> **NOTE:** This resource must be created in the Organization master account or a delegated administrator account, and the Organization must have all features enabled. Every Organization account except those configured in the `excluded_accounts` argument must have a Configuration Recorder with proper IAM permissions before the Organization Conformance Pack will successfully create or update. See also the [`aws_config_configuration_recorder` resource](/docs/providers/aws/r/config_configuration_recorder.html). + +## Example Usage + +### Using Template Body + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_organization_conformance_pack import ConfigOrganizationConformancePack +from imports.aws.organizations_organization import OrganizationsOrganization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = OrganizationsOrganization(self, "example", + aws_service_access_principals=["config-multiaccountsetup.amazonaws.com"], + feature_set="ALL" + ) + aws_config_organization_conformance_pack_example = + ConfigOrganizationConformancePack(self, "example_1", + depends_on=[aws_config_configuration_recorder_example, example], + input_parameter=[ConfigOrganizationConformancePackInputParameter( + parameter_name="AccessKeysRotatedParameterMaxAccessKeyAge", + parameter_value="90" + ) + ], + name="example", + template_body="Parameters:\n AccessKeysRotatedParameterMaxAccessKeyAge:\n Type: String\nResources:\n IAMPasswordPolicy:\n Properties:\n ConfigRuleName: IAMPasswordPolicy\n Source:\n Owner: AWS\n SourceIdentifier: IAM_PASSWORD_POLICY\n Type: AWS::Config::ConfigRule\n\n" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_organization_conformance_pack_example.override_logical_id("example") +``` + +### Using Template S3 URI + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_organization_conformance_pack import ConfigOrganizationConformancePack +from imports.aws.organizations_organization import OrganizationsOrganization +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = OrganizationsOrganization(self, "example", + aws_service_access_principals=["config-multiaccountsetup.amazonaws.com"], + feature_set="ALL" + ) + aws_s3_bucket_example = S3Bucket(self, "example_1", + bucket="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_example.override_logical_id("example") + aws_s3_object_example = S3Object(self, "example_2", + bucket=Token.as_string(aws_s3_bucket_example.id), + content="Resources:\n IAMPasswordPolicy:\n Properties:\n ConfigRuleName: IAMPasswordPolicy\n Source:\n Owner: AWS\n SourceIdentifier: IAM_PASSWORD_POLICY\n Type: AWS::Config::ConfigRule\n\n", + key="example-key" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_object_example.override_logical_id("example") + aws_config_organization_conformance_pack_example = + ConfigOrganizationConformancePack(self, "example_3", + depends_on=[aws_config_configuration_recorder_example, example], + name="example", + template_s3_uri="s3://${" + aws_s3_bucket_example.bucket + "}/${" + aws_s3_object_example.key + "}" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_organization_conformance_pack_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required, Forces new resource) The name of the organization conformance pack. Must begin with a letter and contain from 1 to 128 alphanumeric characters and hyphens. +* `delivery_s3_bucket` - (Optional) Amazon S3 bucket where AWS Config stores conformance pack templates. Delivery bucket must begin with `awsconfigconforms` prefix. Maximum length of 63. +* `delivery_s3_key_prefix` - (Optional) The prefix for the Amazon S3 bucket. Maximum length of 1024. +* `excluded_accounts` - (Optional) Set of AWS accounts to be excluded from an organization conformance pack while deploying a conformance pack. Maximum of 1000 accounts. +* `input_parameter` - (Optional) Set of configuration blocks describing input parameters passed to the conformance pack template. Documented below. When configured, the parameters must also be included in the `template_body` or in the template stored in Amazon S3 if using `template_s3_uri`. +* `template_body` - (Optional, Conflicts with `template_s3_uri`) A string containing full conformance pack template body. Maximum length of 51200. Drift detection is not possible with this argument. +* `template_s3_uri` - (Optional, Conflicts with `template_body`) Location of file, e.g., `s3://bucketname/prefix`, containing the template body. The uri must point to the conformance pack template that is located in an Amazon S3 bucket in the same region as the conformance pack. Maximum length of 1024. Drift detection is not possible with this argument. + +### input_parameter Argument Reference + +The `input_parameter` configuration block supports the following arguments: + +* `parameter_name` - (Required) The input key. +* `parameter_value` - (Required) The input value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the organization conformance pack. +* `id` - The name of the organization conformance pack. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `10m`) +- `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config Organization Conformance Packs using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Config Organization Conformance Packs using the `name`. For example: + +```console +% terraform import aws_config_organization_conformance_pack.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_organization_custom_policy_rule.html.markdown b/website/docs/cdktf/python/r/config_organization_custom_policy_rule.html.markdown new file mode 100644 index 00000000000..45f24a666e8 --- /dev/null +++ b/website/docs/cdktf/python/r/config_organization_custom_policy_rule.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_organization_custom_policy_rule" +description: |- + Terraform resource for managing an AWS Config Organization Custom Policy. +--- + + + +# Resource: aws_config_organization_custom_policy_rule + +Manages a Config Organization Custom Policy Rule. More information about these rules can be found in the [Enabling AWS Config Rules Across all Accounts in Your Organization](https://docs.aws.amazon.com/config/latest/developerguide/config-rule-multi-account-deployment.html) and [AWS Config Managed Rules](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html) documentation. For working with Organization Managed Rules (those invoking an AWS managed rule), see the [`aws_config_organization_managed__rule` resource](/docs/providers/aws/r/config_organization_managed_rule.html). + +~> **NOTE:** This resource must be created in the Organization master account and rules will include the master account unless its ID is added to the `excluded_accounts` argument. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_organization_custom_policy_rule import ConfigOrganizationCustomPolicyRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, triggerTypes): + super().__init__(scope, name) + ConfigOrganizationCustomPolicyRule(self, "example", + name="example_rule_name", + policy_runtime="guard-2.x.x", + policy_text="let status = ['ACTIVE']\n\nrule tableisactive when\n resourceType == \"AWS::DynamoDB::Table\" {\n configuration.tableStatus == %status\n}\n\nrule checkcompliance when\n resourceType == \"AWS::DynamoDB::Table\"\n tableisactive {\n let pitr = supplementaryConfiguration.ContinuousBackupsDescription.pointInTimeRecoveryDescription.pointInTimeRecoveryStatus\n %pitr == \"ENABLED\"\n }\n\n", + resource_types_scope=["AWS::DynamoDB::Table"], + trigger_types=trigger_types + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) name of the rule +* `policy_text` - (Required) policy definition containing the logic for your organization AWS Config Custom Policy rule +* `policy_runtime` - (Required) runtime system for your organization AWS Config Custom Policy rules +* `trigger_types` - (Required) List of notification types that trigger AWS Config to run an evaluation for the rule. Valid values: `ConfigurationItemChangeNotification`, `OversizedConfigurationItemChangeNotification` + +The following arguments are optional: + +* `description` - (Optional) Description of the rule +* `debug_log_delivery_accounts` - (Optional) List of AWS account identifiers to exclude from the rule +* `excluded_accounts` - (Optional) List of AWS account identifiers to exclude from the rule +* `input_parameters` - (Optional) A string in JSON format that is passed to the AWS Config Rule Lambda Function +* `maximum_execution_frequency` - (Optional) Maximum frequency with which AWS Config runs evaluations for a rule, if the rule is triggered at a periodic frequency. Defaults to `TwentyFour_Hours` for periodic frequency triggered rules. Valid values: `One_Hour`, `Three_Hours`, `Six_Hours`, `Twelve_Hours`, or `TwentyFour_Hours`. +* `resource_id_scope` - (Optional) Identifier of the AWS resource to evaluate +* `resource_types_scope` - (Optional) List of types of AWS resources to evaluate +* `tag_key_scope` - (Optional, Required if `tag_value_scope` is configured) Tag key of AWS resources to evaluate +* `tag_value_scope` - (Optional) Tag value of AWS resources to evaluate + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the rule + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `20m`) +* `update` - (Default `20m`) +* `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a Config Organization Custom Policy Rule using the `name` argument. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a Config Organization Custom Policy Rule using the `name` argument. For example: + +```console +% terraform import aws_config_organization_custom_policy_rule.example example_rule_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_organization_custom_rule.html.markdown b/website/docs/cdktf/python/r/config_organization_custom_rule.html.markdown new file mode 100644 index 00000000000..a1e1fe50bf9 --- /dev/null +++ b/website/docs/cdktf/python/r/config_organization_custom_rule.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_organization_custom_rule" +description: |- + Manages a Config Organization Custom Rule +--- + + + +# Resource: aws_config_organization_custom_rule + +Manages a Config Organization Custom Rule. More information about these rules can be found in the [Enabling AWS Config Rules Across all Accounts in Your Organization](https://docs.aws.amazon.com/config/latest/developerguide/config-rule-multi-account-deployment.html) and [AWS Config Managed Rules](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html) documentation. For working with Organization Managed Rules (those invoking an AWS managed rule), see the [`aws_config_organization_managed__rule` resource](/docs/providers/aws/r/config_organization_managed_rule.html). + +~> **NOTE:** This resource must be created in the Organization master account and rules will include the master account unless its ID is added to the `excluded_accounts` argument. + +~> **NOTE:** The proper Lambda permission to allow the AWS Config service invoke the Lambda Function must be in place before the rule will successfully create or update. See also the [`aws_lambda_permission` resource](/docs/providers/aws/r/lambda_permission.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_organization_custom_rule import ConfigOrganizationCustomRule +from imports.aws.lambda_permission import LambdaPermission +from imports.aws.organizations_organization import OrganizationsOrganization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = LambdaPermission(self, "example", + action="lambda:InvokeFunction", + function_name=Token.as_string(aws_lambda_function_example.arn), + principal="config.amazonaws.com", + statement_id="AllowExecutionFromConfig" + ) + aws_organizations_organization_example = OrganizationsOrganization(self, "example_1", + aws_service_access_principals=["config-multiaccountsetup.amazonaws.com"], + feature_set="ALL" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_organizations_organization_example.override_logical_id("example") + aws_config_organization_custom_rule_example = + ConfigOrganizationCustomRule(self, "example_2", + depends_on=[example, aws_organizations_organization_example], + lambda_function_arn=Token.as_string(aws_lambda_function_example.arn), + name="example", + trigger_types=["ConfigurationItemChangeNotification"] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_organization_custom_rule_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `lambda_function_arn` - (Required) Amazon Resource Name (ARN) of the rule Lambda Function +* `name` - (Required) The name of the rule +* `trigger_types` - (Required) List of notification types that trigger AWS Config to run an evaluation for the rule. Valid values: `ConfigurationItemChangeNotification`, `OversizedConfigurationItemChangeNotification`, and `ScheduledNotification` +* `description` - (Optional) Description of the rule +* `excluded_accounts` - (Optional) List of AWS account identifiers to exclude from the rule +* `input_parameters` - (Optional) A string in JSON format that is passed to the AWS Config Rule Lambda Function +* `maximum_execution_frequency` - (Optional) The maximum frequency with which AWS Config runs evaluations for a rule, if the rule is triggered at a periodic frequency. Defaults to `TwentyFour_Hours` for periodic frequency triggered rules. Valid values: `One_Hour`, `Three_Hours`, `Six_Hours`, `Twelve_Hours`, or `TwentyFour_Hours`. +* `resource_id_scope` - (Optional) Identifier of the AWS resource to evaluate +* `resource_types_scope` - (Optional) List of types of AWS resources to evaluate +* `tag_key_scope` - (Optional, Required if `tag_value_scope` is configured) Tag key of AWS resources to evaluate +* `tag_value_scope` - (Optional) Tag value of AWS resources to evaluate + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the rule + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `delete` - (Default `5m`) +* `update` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config Organization Custom Rules using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Config Organization Custom Rules using the name. For example: + +```console +% terraform import aws_config_organization_custom_rule.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_organization_managed_rule.html.markdown b/website/docs/cdktf/python/r/config_organization_managed_rule.html.markdown new file mode 100644 index 00000000000..15668ecb2d1 --- /dev/null +++ b/website/docs/cdktf/python/r/config_organization_managed_rule.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_organization_managed_rule" +description: |- + Manages a Config Organization Managed Rule +--- + + + +# Resource: aws_config_organization_managed_rule + +Manages a Config Organization Managed Rule. More information about these rules can be found in the [Enabling AWS Config Rules Across all Accounts in Your Organization](https://docs.aws.amazon.com/config/latest/developerguide/config-rule-multi-account-deployment.html) and [AWS Config Managed Rules](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html) documentation. For working with Organization Custom Rules (those invoking a custom Lambda Function), see the [`aws_config_organization_custom_rule` resource](/docs/providers/aws/r/config_organization_custom_rule.html). + +~> **NOTE:** This resource must be created in the Organization master account and rules will include the master account unless its ID is added to the `excluded_accounts` argument. + +~> **NOTE:** Every Organization account except those configured in the `excluded_accounts` argument must have a Configuration Recorder with proper IAM permissions before the rule will successfully create or update. See also the [`aws_config_configuration_recorder` resource](/docs/providers/aws/r/config_configuration_recorder.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_organization_managed_rule import ConfigOrganizationManagedRule +from imports.aws.organizations_organization import OrganizationsOrganization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = OrganizationsOrganization(self, "example", + aws_service_access_principals=["config-multiaccountsetup.amazonaws.com"], + feature_set="ALL" + ) + aws_config_organization_managed_rule_example = + ConfigOrganizationManagedRule(self, "example_1", + depends_on=[example], + name="example", + rule_identifier="IAM_PASSWORD_POLICY" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_organization_managed_rule_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the rule +* `rule_identifier` - (Required) Identifier of an available AWS Config Managed Rule to call. For available values, see the [List of AWS Config Managed Rules](https://docs.aws.amazon.com/config/latest/developerguide/managed-rules-by-aws-config.html) documentation +* `description` - (Optional) Description of the rule +* `excluded_accounts` - (Optional) List of AWS account identifiers to exclude from the rule +* `input_parameters` - (Optional) A string in JSON format that is passed to the AWS Config Rule Lambda Function +* `maximum_execution_frequency` - (Optional) The maximum frequency with which AWS Config runs evaluations for a rule, if the rule is triggered at a periodic frequency. Defaults to `TwentyFour_Hours` for periodic frequency triggered rules. Valid values: `One_Hour`, `Three_Hours`, `Six_Hours`, `Twelve_Hours`, or `TwentyFour_Hours`. +* `resource_id_scope` - (Optional) Identifier of the AWS resource to evaluate +* `resource_types_scope` - (Optional) List of types of AWS resources to evaluate +* `tag_key_scope` - (Optional, Required if `tag_value_scope` is configured) Tag key of AWS resources to evaluate +* `tag_value_scope` - (Optional) Tag value of AWS resources to evaluate + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the rule + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `delete` - (Default `5m`) +* `update` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config Organization Managed Rules using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Config Organization Managed Rules using the name. For example: + +```console +% terraform import aws_config_organization_managed_rule.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/config_remediation_configuration.html.markdown b/website/docs/cdktf/python/r/config_remediation_configuration.html.markdown new file mode 100644 index 00000000000..81fb2eeecdb --- /dev/null +++ b/website/docs/cdktf/python/r/config_remediation_configuration.html.markdown @@ -0,0 +1,136 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_remediation_configuration" +description: |- + Provides an AWS Config Remediation Configuration. +--- + + + +# Resource: aws_config_remediation_configuration + +Provides an AWS Config Remediation Configuration. + +~> **Note:** Config Remediation Configuration requires an existing [Config Rule](/docs/providers/aws/r/config_config_rule.html) to be present. + +## Example Usage + +AWS managed rules can be used by setting the source owner to `AWS` and the source identifier to the name of the managed rule. More information about AWS managed rules can be found in the [AWS Config Developer Guide](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.config_config_rule import ConfigConfigRule +from imports.aws.config_remediation_configuration import ConfigRemediationConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + this_var = ConfigConfigRule(self, "this", + name="example", + source=ConfigConfigRuleSource( + owner="AWS", + source_identifier="S3_BUCKET_VERSIONING_ENABLED" + ) + ) + aws_config_remediation_configuration_this = + ConfigRemediationConfiguration(self, "this_1", + automatic=True, + config_rule_name=this_var.name, + execution_controls=ConfigRemediationConfigurationExecutionControls( + ssm_controls=ConfigRemediationConfigurationExecutionControlsSsmControls( + concurrent_execution_rate_percentage=25, + error_percentage=20 + ) + ), + maximum_automatic_attempts=10, + parameter=[ConfigRemediationConfigurationParameter( + name="AutomationAssumeRole", + static_value="arn:aws:iam::875924563244:role/security_config" + ), ConfigRemediationConfigurationParameter( + name="BucketName", + resource_value="RESOURCE_ID" + ), ConfigRemediationConfigurationParameter( + name="SSEAlgorithm", + static_value="AES256" + ) + ], + resource_type="AWS::S3::Bucket", + retry_attempt_seconds=600, + target_id="AWS-EnableS3BucketEncryption", + target_type="SSM_DOCUMENT", + target_version="1" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_config_remediation_configuration_this.override_logical_id("this") +``` + +## Argument Reference + +The following arguments are required: + +* `config_rule_name` - (Required) Name of the AWS Config rule. +* `target_id` - (Required) Target ID is the name of the public document. +* `target_type` - (Required) Type of the target. Target executes remediation. For example, SSM document. + +The following arguments are optional: + +* `automatic` - (Optional) Remediation is triggered automatically if `true`. +* `execution_controls` - (Optional) Configuration block for execution controls. See below. +* `maximum_automatic_attempts` - (Optional) Maximum number of failed attempts for auto-remediation. If you do not select a number, the default is 5. +* `parameter` - (Optional) Can be specified multiple times for each parameter. Each parameter block supports arguments below. +* `resource_type` - (Optional) Type of resource. +* `retry_attempt_seconds` - (Optional) Maximum time in seconds that AWS Config runs auto-remediation. If you do not select a number, the default is 60 seconds. +* `target_version` - (Optional) Version of the target. For example, version of the SSM document + +### `execution_controls` + +* `ssm_controls` - (Required) Configuration block for SSM controls. See below. + +#### `ssm_controls` + +One or both of these values are required. + +* `concurrent_execution_rate_percentage` - (Optional) Maximum percentage of remediation actions allowed to run in parallel on the non-compliant resources for that specific rule. The default value is 10%. +* `error_percentage` - (Optional) Percentage of errors that are allowed before SSM stops running automations on non-compliant resources for that specific rule. The default is 50%. + +### `parameter` + +The value is either a dynamic (resource) value or a static value. You must select either a dynamic value or a static value. + +* `name` - (Required) Name of the attribute. +* `resource_value` - (Optional) Value is dynamic and changes at run-time. +* `static_value` - (Optional) Value is static and does not change at run-time. +* `static_values` - (Optional) List of static values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Config Remediation Configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Remediation Configurations using the name config_rule_name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Remediation Configurations using the name config_rule_name. For example: + +```console +% terraform import aws_config_remediation_configuration.this example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_bot_association.markdown b/website/docs/cdktf/python/r/connect_bot_association.markdown new file mode 100644 index 00000000000..23e9359fdc5 --- /dev/null +++ b/website/docs/cdktf/python/r/connect_bot_association.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_bot_association" +description: |- + Associates an Amazon Connect instance to an Amazon Lex (V1) bot +--- + + + +# Resource: aws_connect_bot_association + +Allows the specified Amazon Connect instance to access the specified Amazon Lex (V1) bot. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) and [Add an Amazon Lex bot](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-lex.html). + +~> **NOTE:** This resource only currently supports Amazon Lex (V1) Associations. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_bot_association import ConnectBotAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectBotAssociation(self, "example", + instance_id=Token.as_string(aws_connect_instance_example.id), + lex_bot=ConnectBotAssociationLexBot( + lex_region="us-west-2", + name="Test" + ) + ) +``` + +### Including a sample Lex bot + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_bot_association import ConnectBotAssociation +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.lex_bot import LexBot +from imports.aws.lex_intent import LexIntent +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = LexIntent(self, "example", + create_version=True, + fulfillment_activity=LexIntentFulfillmentActivity( + type="ReturnIntent" + ), + name="connect_lex_intent", + sample_utterances=["I would like to pick up flowers."] + ) + current = DataAwsRegion(self, "current") + aws_lex_bot_example = LexBot(self, "example_2", + abort_statement=LexBotAbortStatement( + message=[LexBotAbortStatementMessage( + content="Sorry, I am not able to assist at this time.", + content_type="PlainText" + ) + ] + ), + child_directed=False, + clarification_prompt=LexBotClarificationPrompt( + max_attempts=2, + message=[LexBotClarificationPromptMessage( + content="I didn't understand you, what would you like to do?", + content_type="PlainText" + ) + ] + ), + intent=[LexBotIntent( + intent_name=example.name, + intent_version="1" + ) + ], + name="connect_lex_bot", + process_behavior="BUILD" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lex_bot_example.override_logical_id("example") + aws_connect_bot_association_example = ConnectBotAssociation(self, "example_3", + instance_id=Token.as_string(aws_connect_instance_example.id), + lex_bot=ConnectBotAssociationLexBot( + lex_region=Token.as_string(current.name), + name=Token.as_string(aws_lex_bot_example.name) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_connect_bot_association_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instance_id` - (Required) The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. +* `lex_bot` - (Required) Configuration information of an Amazon Lex (V1) bot. Detailed below. + +### lex_bot + +The `lex_bot` configuration block supports the following: + +* `name` - (Required) The name of the Amazon Lex (V1) bot. +* `lex_region` - (Optional) The Region that the Amazon Lex (V1) bot was created in. Defaults to current region. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Connect instance ID, Lex (V1) bot name, and Lex (V1) bot region separated by colons (`:`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_connect_bot_association` using the Amazon Connect instance ID, Lex (V1) bot name, and Lex (V1) bot region separated by colons (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_connect_bot_association` using the Amazon Connect instance ID, Lex (V1) bot name, and Lex (V1) bot region separated by colons (`:`). For example: + +```console +% terraform import aws_connect_bot_association.example aaaaaaaa-bbbb-cccc-dddd-111111111111:Example:us-west-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_contact_flow.html.markdown b/website/docs/cdktf/python/r/connect_contact_flow.html.markdown new file mode 100644 index 00000000000..3f4afa1b03c --- /dev/null +++ b/website/docs/cdktf/python/r/connect_contact_flow.html.markdown @@ -0,0 +1,152 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_contact_flow" +description: |- + Provides details about a specific Amazon Connect Contact Flow. +--- + + + +# Resource: aws_connect_contact_flow + +Provides an Amazon Connect Contact Flow resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +This resource embeds or references Contact Flows specified in Amazon Connect Contact Flow Language. For more information see +[Amazon Connect Flow language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html) + +!> **WARN:** Contact Flows exported from the Console [Contact Flow import/export](https://docs.aws.amazon.com/connect/latest/adminguide/contact-flow-import-export.html) are not in the Amazon Connect Contact Flow Language and can not be used with this resource. Instead, the recommendation is to use the AWS CLI [`describe-contact-flow`](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/connect/describe-contact-flow.html). +See [example](#with-external-content) below which uses `jq` to extract the `Content` attribute and saves it to a local file. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_contact_flow import ConnectContactFlow +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectContactFlow(self, "test", + content=Token.as_string( + Fn.jsonencode({ + "Actions": [{ + "Identifier": "12345678-1234-1234-1234-123456789012", + "Parameters": { + "Text": "Thanks for calling the sample flow!" + }, + "Transitions": { + "Conditions": [], + "Errors": [], + "NextAction": "abcdef-abcd-abcd-abcd-abcdefghijkl" + }, + "Type": "MessageParticipant" + }, { + "Identifier": "abcdef-abcd-abcd-abcd-abcdefghijkl", + "Parameters": {}, + "Transitions": {}, + "Type": "DisconnectParticipant" + } + ], + "StartAction": "12345678-1234-1234-1234-123456789012", + "Version": "2019-10-30" + })), + description="Test Contact Flow Description", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Test", + tags={ + "Application": "Terraform", + "Method": "Create", + "Name": "Test Contact Flow" + }, + type="CONTACT_FLOW" + ) +``` + +### With External Content + +Use the AWS CLI to extract Contact Flow Content: + +```console +% aws connect describe-contact-flow --instance-id 1b3c5d8-1b3c-1b3c-1b3c-1b3c5d81b3c5 --contact-flow-id c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 --region us-west-2 | jq '.ContactFlow.Content | fromjson' > contact_flow.json +``` + +Use the generated file as input: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_contact_flow import ConnectContactFlow +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectContactFlow(self, "test", + content_hash=Token.as_string(Fn.filebase64sha256("contact_flow.json")), + description="Test Contact Flow Description", + filename="contact_flow.json", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Test", + tags={ + "Application": "Terraform", + "Method": "Create", + "Name": "Test Contact Flow" + }, + type="CONTACT_FLOW" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Optional) Specifies the content of the Contact Flow, provided as a JSON string, written in Amazon Connect Contact Flow Language. If defined, the `filename` argument cannot be used. +* `content_hash` - (Optional) Used to trigger updates. Must be set to a base64-encoded SHA256 hash of the Contact Flow source specified with `filename`. The usual way to set this is filebase64sha256("mycontact_flow.json") (Terraform 0.11.12 and later) or base64sha256(file("mycontact_flow.json")) (Terraform 0.11.11 and earlier), where "mycontact_flow.json" is the local filename of the Contact Flow source. +* `description` - (Optional) Specifies the description of the Contact Flow. +* `filename` - (Optional) The path to the Contact Flow source within the local filesystem. Conflicts with `content`. +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) Specifies the name of the Contact Flow. +* `tags` - (Optional) Tags to apply to the Contact Flow. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional, Forces new resource) Specifies the type of the Contact Flow. Defaults to `CONTACT_FLOW`. Allowed Values are: `CONTACT_FLOW`, `CUSTOMER_QUEUE`, `CUSTOMER_HOLD`, `CUSTOMER_WHISPER`, `AGENT_HOLD`, `AGENT_WHISPER`, `OUTBOUND_WHISPER`, `AGENT_TRANSFER`, `QUEUE_TRANSFER`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Contact Flow. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Contact Flow separated by a colon (`:`). +* `contact_flow_id` - The identifier of the Contact Flow. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Contact Flows using the `instance_id` and `contact_flow_id` separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect Contact Flows using the `instance_id` and `contact_flow_id` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_contact_flow.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_contact_flow_module.html.markdown b/website/docs/cdktf/python/r/connect_contact_flow_module.html.markdown new file mode 100644 index 00000000000..e9c14103599 --- /dev/null +++ b/website/docs/cdktf/python/r/connect_contact_flow_module.html.markdown @@ -0,0 +1,164 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_contact_flow_module" +description: |- + Provides details about a specific Amazon Connect Contact Flow Module. +--- + + + +# Resource: aws_connect_contact_flow_module + +Provides an Amazon Connect Contact Flow Module resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +This resource embeds or references Contact Flows Modules specified in Amazon Connect Contact Flow Language. For more information see +[Amazon Connect Flow language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html) + +!> **WARN:** Contact Flow Modules exported from the Console [See Contact Flow import/export which is the same for Contact Flow Modules](https://docs.aws.amazon.com/connect/latest/adminguide/contact-flow-import-export.html) are not in the Amazon Connect Contact Flow Language and can not be used with this resource. Instead, the recommendation is to use the AWS CLI [`describe-contact-flow-module`](https://docs.aws.amazon.com/cli/latest/reference/connect/describe-contact-flow-module.html). +See [example](#with-external-content) below which uses `jq` to extract the `Content` attribute and saves it to a local file. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_contact_flow_module import ConnectContactFlowModule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectContactFlowModule(self, "example", + content=Token.as_string( + Fn.jsonencode({ + "Actions": [{ + "Identifier": "12345678-1234-1234-1234-123456789012", + "Parameters": { + "Text": "Hello contact flow module" + }, + "Transitions": { + "Conditions": [], + "Errors": [], + "NextAction": "abcdef-abcd-abcd-abcd-abcdefghijkl" + }, + "Type": "MessageParticipant" + }, { + "Identifier": "abcdef-abcd-abcd-abcd-abcdefghijkl", + "Parameters": {}, + "Transitions": {}, + "Type": "DisconnectParticipant" + } + ], + "Settings": { + "InputParameters": [], + "OutputParameters": [], + "Transitions": [{ + "Description": "", + "DisplayName": "Success", + "ReferenceName": "Success" + }, { + "Description": "", + "DisplayName": "Error", + "ReferenceName": "Error" + } + ] + }, + "StartAction": "12345678-1234-1234-1234-123456789012", + "Version": "2019-10-30" + })), + description="Example Contact Flow Module Description", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example", + tags={ + "Application": "Terraform", + "Method": "Create", + "Name": "Example Contact Flow Module" + } + ) +``` + +### With External Content + +Use the AWS CLI to extract Contact Flow Content: + +```console +% aws connect describe-contact-flow-module --instance-id 1b3c5d8-1b3c-1b3c-1b3c-1b3c5d81b3c5 --contact-flow-module-id c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 --region us-west-2 | jq '.ContactFlowModule.Content | fromjson' > contact_flow_module.json +``` + +Use the generated file as input: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_contact_flow_module import ConnectContactFlowModule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectContactFlowModule(self, "example", + content_hash=Token.as_string( + Fn.filebase64sha256("contact_flow_module.json")), + description="Example Contact Flow Module Description", + filename="contact_flow_module.json", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example", + tags={ + "Application": "Terraform", + "Method": "Create", + "Name": "Example Contact Flow Module" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Optional) Specifies the content of the Contact Flow Module, provided as a JSON string, written in Amazon Connect Contact Flow Language. If defined, the `filename` argument cannot be used. +* `content_hash` - (Optional) Used to trigger updates. Must be set to a base64-encoded SHA256 hash of the Contact Flow Module source specified with `filename`. The usual way to set this is filebase64sha256("contact_flow_module.json") (Terraform 0.11.12 and later) or base64sha256(file("contact_flow_module.json")) (Terraform 0.11.11 and earlier), where "contact_flow_module.json" is the local filename of the Contact Flow Module source. +* `description` - (Optional) Specifies the description of the Contact Flow Module. +* `filename` - (Optional) The path to the Contact Flow Module source within the local filesystem. Conflicts with `content`. +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) Specifies the name of the Contact Flow Module. +* `tags` - (Optional) Tags to apply to the Contact Flow Module. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Contact Flow Module. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Contact Flow Module separated by a colon (`:`). +* `contact_flow_module_id` - The identifier of the Contact Flow Module. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Contact Flow Modules using the `instance_id` and `contact_flow_module_id` separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect Contact Flow Modules using the `instance_id` and `contact_flow_module_id` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_contact_flow_module.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_hours_of_operation.html.markdown b/website/docs/cdktf/python/r/connect_hours_of_operation.html.markdown new file mode 100644 index 00000000000..150eb357b86 --- /dev/null +++ b/website/docs/cdktf/python/r/connect_hours_of_operation.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_hours_of_operation" +description: |- + Provides details about a specific Amazon Connect Hours of Operation. +--- + + + +# Resource: aws_connect_hours_of_operation + +Provides an Amazon Connect Hours of Operation resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_hours_of_operation import ConnectHoursOfOperation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectHoursOfOperation(self, "test", + config=[ConnectHoursOfOperationConfigA( + day="MONDAY", + end_time=ConnectHoursOfOperationConfigEndTime( + hours=23, + minutes=8 + ), + start_time=ConnectHoursOfOperationConfigStartTime( + hours=8, + minutes=0 + ) + ), ConnectHoursOfOperationConfigA( + day="TUESDAY", + end_time=ConnectHoursOfOperationConfigEndTime( + hours=21, + minutes=0 + ), + start_time=ConnectHoursOfOperationConfigStartTime( + hours=9, + minutes=0 + ) + ) + ], + description="Monday office hours", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Office Hours", + tags={ + "Name": "Example Hours of Operation" + }, + time_zone="EST" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `config` - (Required) One or more config blocks which define the configuration information for the hours of operation: day, start time, and end time . Config blocks are documented below. +* `description` - (Optional) Specifies the description of the Hours of Operation. +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) Specifies the name of the Hours of Operation. +* `tags` - (Optional) Tags to apply to the Hours of Operation. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `time_zone` - (Required) Specifies the time zone of the Hours of Operation. + +A `config` block supports the following arguments: + +* `day` - (Required) Specifies the day that the hours of operation applies to. +* `end_time` - (Required) A end time block specifies the time that your contact center closes. The `end_time` is documented below. +* `start_time` - (Required) A start time block specifies the time that your contact center opens. The `start_time` is documented below. + +A `end_time` block supports the following arguments: + +* `hours` - (Required) Specifies the hour of closing. +* `minutes` - (Required) Specifies the minute of closing. + +A `start_time` block supports the following arguments: + +* `hours` - (Required) Specifies the hour of opening. +* `minutes` - (Required) Specifies the minute of opening. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Hours of Operation. +* `hours_of_operation_id` - The identifier for the hours of operation. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Hours of Operation separated by a colon (`:`). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Hours of Operations using the `instance_id` and `hours_of_operation_id` separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect Hours of Operations using the `instance_id` and `hours_of_operation_id` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_hours_of_operation.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_instance.html.markdown b/website/docs/cdktf/python/r/connect_instance.html.markdown new file mode 100644 index 00000000000..e4b49111b76 --- /dev/null +++ b/website/docs/cdktf/python/r/connect_instance.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_instance" +description: |- + Provides details about a specific Connect Instance. +--- + + + +# Resource: aws_connect_instance + +Provides an Amazon Connect instance resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +!> **WARN:** Amazon Connect enforces a limit of [100 combined instance creation and deletions every 30 days](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-service-limits.html#feature-limits). For example, if you create 80 instances and delete 20 of them, you must wait 30 days to create or delete another instance. Use care when creating or deleting instances. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_instance import ConnectInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectInstance(self, "test", + identity_management_type="CONNECT_MANAGED", + inbound_calls_enabled=True, + instance_alias="friendly-name-connect", + outbound_calls_enabled=True + ) +``` + +## Example Usage with Existing Active Directory + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_instance import ConnectInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectInstance(self, "test", + directory_id=Token.as_string(aws_directory_service_directory_test.id), + identity_management_type="EXISTING_DIRECTORY", + inbound_calls_enabled=True, + instance_alias="friendly-name-connect", + outbound_calls_enabled=True + ) +``` + +## Example Usage with SAML + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_instance import ConnectInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectInstance(self, "test", + identity_management_type="SAML", + inbound_calls_enabled=True, + instance_alias="friendly-name-connect", + outbound_calls_enabled=True + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `auto_resolve_best_voices_enabled` - (Optional) Specifies whether auto resolve best voices is enabled. Defaults to `true`. +* `contact_flow_logs_enabled` - (Optional) Specifies whether contact flow logs are enabled. Defaults to `false`. +* `contact_lens_enabled` - (Optional) Specifies whether contact lens is enabled. Defaults to `true`. +* `directory_id` - (Optional) The identifier for the directory if identity_management_type is `EXISTING_DIRECTORY`. +* `early_media_enabled` - (Optional) Specifies whether early media for outbound calls is enabled . Defaults to `true` if outbound calls is enabled. +* `identity_management_type` - (Required) Specifies the identity management type attached to the instance. Allowed Values are: `SAML`, `CONNECT_MANAGED`, `EXISTING_DIRECTORY`. +* `inbound_calls_enabled` - (Required) Specifies whether inbound calls are enabled. +* `instance_alias` - (Optional) Specifies the name of the instance. Required if `directory_id` not specified. +* `multi_party_conference_enabled` - (Optional) Specifies whether multi-party calls/conference is enabled. Defaults to `false`. +* `outbound_calls_enabled` - (Required) Specifies whether outbound calls are enabled. + + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the instance. +* `arn` - Amazon Resource Name (ARN) of the instance. +* `created_time` - When the instance was created. +* `service_role` - The service role of the instance. +* `status` - The state of the instance. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Connect instances using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Connect instances using the `id`. For example: + +```console +% terraform import aws_connect_instance.example f1288a1f-6193-445a-b47e-af739b2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_instance_storage_config.html.markdown b/website/docs/cdktf/python/r/connect_instance_storage_config.html.markdown new file mode 100644 index 00000000000..4d132f35f3f --- /dev/null +++ b/website/docs/cdktf/python/r/connect_instance_storage_config.html.markdown @@ -0,0 +1,238 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_instance_storage_config" +description: |- + Provides details about a specific Amazon Connect Instance Storage Config. +--- + + + +# Resource: aws_connect_instance_storage_config + +Provides an Amazon Connect Instance Storage Config resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +### Storage Config Kinesis Firehose Config + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_instance_storage_config import ConnectInstanceStorageConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectInstanceStorageConfig(self, "example", + instance_id=Token.as_string(aws_connect_instance_example.id), + resource_type="CONTACT_TRACE_RECORDS", + storage_config=ConnectInstanceStorageConfigStorageConfig( + kinesis_firehose_config=ConnectInstanceStorageConfigStorageConfigKinesisFirehoseConfig( + firehose_arn=Token.as_string(aws_kinesis_firehose_delivery_stream_example.arn) + ), + storage_type="KINESIS_FIREHOSE" + ) + ) +``` + +### Storage Config Kinesis Stream Config + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_instance_storage_config import ConnectInstanceStorageConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectInstanceStorageConfig(self, "example", + instance_id=Token.as_string(aws_connect_instance_example.id), + resource_type="CONTACT_TRACE_RECORDS", + storage_config=ConnectInstanceStorageConfigStorageConfig( + kinesis_stream_config=ConnectInstanceStorageConfigStorageConfigKinesisStreamConfig( + stream_arn=Token.as_string(aws_kinesis_stream_example.arn) + ), + storage_type="KINESIS_STREAM" + ) + ) +``` + +### Storage Config Kinesis Video Stream Config + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_instance_storage_config import ConnectInstanceStorageConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectInstanceStorageConfig(self, "example", + instance_id=Token.as_string(aws_connect_instance_example.id), + resource_type="MEDIA_STREAMS", + storage_config=ConnectInstanceStorageConfigStorageConfig( + kinesis_video_stream_config=ConnectInstanceStorageConfigStorageConfigKinesisVideoStreamConfig( + encryption_config=ConnectInstanceStorageConfigStorageConfigKinesisVideoStreamConfigEncryptionConfig( + encryption_type="KMS", + key_id=Token.as_string(aws_kms_key_example.arn) + ), + prefix="example", + retention_period_hours=3 + ), + storage_type="KINESIS_VIDEO_STREAM" + ) + ) +``` + +### Storage Config S3 Config + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_instance_storage_config import ConnectInstanceStorageConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectInstanceStorageConfig(self, "example", + instance_id=Token.as_string(aws_connect_instance_example.id), + resource_type="CHAT_TRANSCRIPTS", + storage_config=ConnectInstanceStorageConfigStorageConfig( + s3_config=ConnectInstanceStorageConfigStorageConfigS3Config( + bucket_name=Token.as_string(aws_s3_bucket_example.id), + bucket_prefix="example" + ), + storage_type="S3" + ) + ) +``` + +### Storage Config S3 Config with Encryption Config + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_instance_storage_config import ConnectInstanceStorageConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectInstanceStorageConfig(self, "example", + instance_id=Token.as_string(aws_connect_instance_example.id), + resource_type="CHAT_TRANSCRIPTS", + storage_config=ConnectInstanceStorageConfigStorageConfig( + s3_config=ConnectInstanceStorageConfigStorageConfigS3Config( + bucket_name=Token.as_string(aws_s3_bucket_example.id), + bucket_prefix="example", + encryption_config=ConnectInstanceStorageConfigStorageConfigS3ConfigEncryptionConfig( + encryption_type="KMS", + key_id=Token.as_string(aws_kms_key_example.arn) + ) + ), + storage_type="S3" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `resource_type` - (Required) A valid resource type. Valid Values: `AGENT_EVENTS` | `ATTACHMENTS` | `CALL_RECORDINGS` | `CHAT_TRANSCRIPTS` | `CONTACT_EVALUATIONS` | `CONTACT_TRACE_RECORDS` | `MEDIA_STREAMS` | `REAL_TIME_CONTACT_ANALYSIS_SEGMENTS` | `SCHEDULED_REPORTS`. +* `storage_config` - (Required) Specifies the storage configuration options for the Connect Instance. [Documented below](#storage_config). + +### `storage_config` + +The `storage_config` configuration block supports the following arguments: + +* `kinesis_firehose_config` - (Required if `type` is set to `KINESIS_FIREHOSE`) A block that specifies the configuration of the Kinesis Firehose delivery stream. [Documented below](#kinesis_firehose_config). +* `kinesis_stream_config` - (Required if `type` is set to `KINESIS_STREAM`) A block that specifies the configuration of the Kinesis data stream. [Documented below](#kinesis_stream_config). +* `kinesis_video_stream_config` - (Required if `type` is set to `KINESIS_VIDEO_STREAM`) A block that specifies the configuration of the Kinesis video stream. [Documented below](#kinesis_video_stream_config). +* `s3_config` - (Required if `type` is set to `S3`) A block that specifies the configuration of S3 Bucket. [Documented below](#s3_config). +* `storage_type` - (Required) A valid storage type. Valid Values: `S3` | `KINESIS_VIDEO_STREAM` | `KINESIS_STREAM` | `KINESIS_FIREHOSE`. + +#### `kinesis_firehose_config` + +The `kinesis_firehose_config` configuration block supports the following arguments: + +* `firehose_arn` - (Required) The Amazon Resource Name (ARN) of the delivery stream. + +#### `kinesis_stream_config` + +The `kinesis_stream_config` configuration block supports the following arguments: + +* `stream_arn` - (Required) The Amazon Resource Name (ARN) of the data stream. + +#### `kinesis_video_stream_config` + +The `kinesis_video_stream_config` configuration block supports the following arguments: + +* `encryption_config` - (Required) The encryption configuration. [Documented below](#encryption_config). +* `prefix` - (Required) The prefix of the video stream. Minimum length of `1`. Maximum length of `128`. When read from the state, the value returned is `-connect--contact-` since the API appends additional details to the `prefix`. +* `retention_period_hours` - (Required) The number of hours data is retained in the stream. Kinesis Video Streams retains the data in a data store that is associated with the stream. Minimum value of `0`. Maximum value of `87600`. A value of `0`, indicates that the stream does not persist data. + +#### `s3_config` + +The `s3_config` configuration block supports the following arguments: + +* `bucket_name` - (Required) The S3 bucket name. +* `bucket_prefix` - (Required) The S3 bucket prefix. +* `encryption_config` - (Optional) The encryption configuration. [Documented below](#encryption_config). + +#### `encryption_config` + +The `encryption_config` configuration block supports the following arguments: + +* `encryption_type` - (Required) The type of encryption. Valid Values: `KMS`. +* `key_id` - (Required) The full ARN of the encryption key. Be sure to provide the full ARN of the encryption key, not just the ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `association_id` - The existing association identifier that uniquely identifies the resource type and storage config for the given instance ID. +* `id` - The identifier of the hosting Amazon Connect Instance, `association_id`, and `resource_type` separated by a colon (`:`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Instance Storage Configs using the `instance_id`, `association_id`, and `resource_type` separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect Instance Storage Configs using the `instance_id`, `association_id`, and `resource_type` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_instance_storage_config.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5:CHAT_TRANSCRIPTS +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_lambda_function_association.markdown b/website/docs/cdktf/python/r/connect_lambda_function_association.markdown new file mode 100644 index 00000000000..9af1f57f20c --- /dev/null +++ b/website/docs/cdktf/python/r/connect_lambda_function_association.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_lambda_function_association" +description: |- + Provides details about a specific Connect Lambda Function Association. +--- + + + +# Resource: aws_connect_lambda_function_association + +Provides an Amazon Connect Lambda Function Association. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) and [Invoke AWS Lambda functions](https://docs.aws.amazon.com/connect/latest/adminguide/connect-lambda-functions.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_lambda_function_association import ConnectLambdaFunctionAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectLambdaFunctionAssociation(self, "example", + function_arn=Token.as_string(aws_lambda_function_example.arn), + instance_id=Token.as_string(aws_connect_instance_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `function_arn` - (Required) Amazon Resource Name (ARN) of the Lambda Function, omitting any version or alias qualifier. +* `instance_id` - (Required) The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Connect instance ID and Lambda Function ARN separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_connect_lambda_function_association` using the `instance_id` and `function_arn` separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_connect_lambda_function_association` using the `instance_id` and `function_arn` separated by a comma (`,`). For example: + +```console +% terraform import aws_connect_lambda_function_association.example aaaaaaaa-bbbb-cccc-dddd-111111111111,arn:aws:lambda:us-west-2:123456789123:function:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_phone_number.html.markdown b/website/docs/cdktf/python/r/connect_phone_number.html.markdown new file mode 100644 index 00000000000..e7f59332c80 --- /dev/null +++ b/website/docs/cdktf/python/r/connect_phone_number.html.markdown @@ -0,0 +1,141 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_phone_number" +description: |- + Provides details about a specific Amazon Connect Phone Number. +--- + + + +# Resource: aws_connect_phone_number + +Provides an Amazon Connect Phone Number resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_phone_number import ConnectPhoneNumber +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectPhoneNumber(self, "example", + country_code="US", + tags={ + "hello": "world" + }, + target_arn=Token.as_string(aws_connect_instance_example.arn), + type="DID" + ) +``` + +### Description + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_phone_number import ConnectPhoneNumber +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectPhoneNumber(self, "example", + country_code="US", + description="example description", + target_arn=Token.as_string(aws_connect_instance_example.arn), + type="DID" + ) +``` + +### Prefix to filter phone numbers + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_phone_number import ConnectPhoneNumber +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectPhoneNumber(self, "example", + country_code="US", + prefix="+18005", + target_arn=Token.as_string(aws_connect_instance_example.arn), + type="DID" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `country_code` - (Required, Forces new resource) The ISO country code. For a list of Valid values, refer to [PhoneNumberCountryCode](https://docs.aws.amazon.com/connect/latest/APIReference/API_SearchAvailablePhoneNumbers.html#connect-SearchAvailablePhoneNumbers-request-PhoneNumberCountryCode). +* `description` - (Optional, Forces new resource) The description of the phone number. +* `prefix` - (Optional, Forces new resource) The prefix of the phone number that is used to filter available phone numbers. If provided, it must contain `+` as part of the country code. Do not specify this argument when importing the resource. +* `tags` - (Optional) Tags to apply to the Phone Number. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `target_arn` - (Required) The Amazon Resource Name (ARN) for Amazon Connect instances that phone numbers are claimed to. +* `type` - (Required, Forces new resource) The type of phone number. Valid Values: `TOLL_FREE` | `DID`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the phone number. +* `phone_number` - The phone number. Phone numbers are formatted `[+] [country code] [subscriber number including area code]`. +* `id` - The identifier of the phone number. +* `status` - A block that specifies status of the phone number. [Documented below](#status). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### `status` + +The `status` configuration block supports the following attributes: + +* `message` - The status message. +* `status` - The status of the phone number. Valid Values: `CLAIMED` | `IN_PROGRESS` | `FAILED`. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `2m`) +* `update` - (Default `2m`) +* `delete` - (Default `2m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Phone Numbers using its `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect Phone Numbers using its `id`. For example: + +```console +% terraform import aws_connect_phone_number.example 12345678-abcd-1234-efgh-9876543210ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_queue.html.markdown b/website/docs/cdktf/python/r/connect_queue.html.markdown new file mode 100644 index 00000000000..ac44e1ef3bc --- /dev/null +++ b/website/docs/cdktf/python/r/connect_queue.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_queue" +description: |- + Provides details about a specific Amazon Connect Queue +--- + + + +# Resource: aws_connect_queue + +Provides an Amazon Connect Queue resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_queue import ConnectQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectQueue(self, "test", + description="Example Description", + hours_of_operation_id="12345678-1234-1234-1234-123456789012", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example Name", + tags={ + "Name": "Example Queue" + } + ) +``` + +### With Quick Connect IDs + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_queue import ConnectQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectQueue(self, "test", + description="Example Description", + hours_of_operation_id="12345678-1234-1234-1234-123456789012", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example Name", + quick_connect_ids=["12345678-abcd-1234-abcd-123456789012"], + tags={ + "Name": "Example Queue with Quick Connect IDs" + } + ) +``` + +### With Outbound Caller Config + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_queue import ConnectQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectQueue(self, "test", + description="Example Description", + hours_of_operation_id="12345678-1234-1234-1234-123456789012", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example Name", + outbound_caller_config=ConnectQueueOutboundCallerConfig( + outbound_caller_id_name="example", + outbound_caller_id_number_id="12345678-abcd-1234-abcd-123456789012", + outbound_flow_id="87654321-defg-1234-defg-987654321234" + ), + tags={ + "Name": "Example Queue with Outbound Caller Config" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Specifies the description of the Queue. +* `hours_of_operation_id` - (Required) Specifies the identifier of the Hours of Operation. +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `max_contacts` - (Optional) Specifies the maximum number of contacts that can be in the queue before it is considered full. Minimum value of 0. +* `name` - (Required) Specifies the name of the Queue. +* `outbound_caller_config` - (Required) A block that defines the outbound caller ID name, number, and outbound whisper flow. The Outbound Caller Config block is documented below. +* `quick_connect_ids` - (Optional) Specifies a list of quick connects ids that determine the quick connects available to agents who are working the queue. +* `status` - (Optional) Specifies the description of the Queue. Valid values are `ENABLED`, `DISABLED`. +* `tags` - (Optional) Tags to apply to the Queue. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `outbound_caller_config` block supports the following arguments: + +* `outbound_caller_id_name` - (Optional) Specifies the caller ID name. +* `outbound_caller_id_number_id` - (Optional) Specifies the caller ID number. +* `outbound_flow_id` - (Optional) Specifies outbound whisper flow to be used during an outbound call. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Queue. +* `queue_id` - The identifier for the Queue. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Queue separated by a colon (`:`). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Queues using the `instance_id` and `queue_id` separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect Queues using the `instance_id` and `queue_id` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_queue.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_quick_connect.html.markdown b/website/docs/cdktf/python/r/connect_quick_connect.html.markdown new file mode 100644 index 00000000000..48733d2ac35 --- /dev/null +++ b/website/docs/cdktf/python/r/connect_quick_connect.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_quick_connect" +description: |- + Provides details about a specific Amazon Quick Connect +--- + + + +# Resource: aws_connect_quick_connect + +Provides an Amazon Connect Quick Connect resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_quick_connect import ConnectQuickConnect +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectQuickConnect(self, "test", + description="quick connect phone number", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="Example Name", + quick_connect_config=ConnectQuickConnectQuickConnectConfig( + phone_config=[ConnectQuickConnectQuickConnectConfigPhoneConfig( + phone_number="+12345678912" + ) + ], + quick_connect_type="PHONE_NUMBER" + ), + tags={ + "Name": "Example Quick Connect" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Specifies the description of the Quick Connect. +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) Specifies the name of the Quick Connect. +* `quick_connect_config` - (Required) A block that defines the configuration information for the Quick Connect: `quick_connect_type` and one of `phone_config`, `queue_config`, `user_config` . The Quick Connect Config block is documented below. +* `tags` - (Optional) Tags to apply to the Quick Connect. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `quick_connect_config` block supports the following arguments: + +* `quick_connect_type` - (Required) Specifies the configuration type of the quick connect. valid values are `PHONE_NUMBER`, `QUEUE`, `USER`. +* `phone_config` - (Optional) Specifies the phone configuration of the Quick Connect. This is required only if `quick_connect_type` is `PHONE_NUMBER`. The `phone_config` block is documented below. +* `queue_config` - (Optional) Specifies the queue configuration of the Quick Connect. This is required only if `quick_connect_type` is `QUEUE`. The `queue_config` block is documented below. +* `user_config` - (Optional) Specifies the user configuration of the Quick Connect. This is required only if `quick_connect_type` is `USER`. The `user_config` block is documented below. + +A `phone_config` block supports the following arguments: + +* `phone_number` - (Required) Specifies the phone number in in E.164 format. + +A `queue_config` block supports the following arguments: + +* `contact_flow_id` - (Required) Specifies the identifier of the contact flow. +* `queue_id` - (Required) Specifies the identifier for the queue. + +A `user_config` block supports the following arguments: + +* `contact_flow_id` - (Required) Specifies the identifier of the contact flow. +* `user_id` - (Required) Specifies the identifier for the user. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Quick Connect. +* `quick_connect_id` - The identifier for the Quick Connect. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Quick Connect separated by a colon (`:`). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Quick Connects using the `instance_id` and `quick_connect_id` separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect Quick Connects using the `instance_id` and `quick_connect_id` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_quick_connect.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_routing_profile.html.markdown b/website/docs/cdktf/python/r/connect_routing_profile.html.markdown new file mode 100644 index 00000000000..c85760ce0d5 --- /dev/null +++ b/website/docs/cdktf/python/r/connect_routing_profile.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_routing_profile" +description: |- + Provides details about a specific Amazon Connect Routing Profile. +--- + + + +# Resource: aws_connect_routing_profile + +Provides an Amazon Connect Routing Profile resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_routing_profile import ConnectRoutingProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectRoutingProfile(self, "example", + default_outbound_queue_id="12345678-1234-1234-1234-123456789012", + description="example description", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + media_concurrencies=[ConnectRoutingProfileMediaConcurrencies( + channel="VOICE", + concurrency=1 + ) + ], + name="example", + queue_configs=[ConnectRoutingProfileQueueConfigs( + channel="VOICE", + delay=2, + priority=1, + queue_id="12345678-1234-1234-1234-123456789012" + ) + ], + tags={ + "Name": "Example Routing Profile" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `default_outbound_queue_id` - (Required) Specifies the default outbound queue for the Routing Profile. +* `description` - (Required) Specifies the description of the Routing Profile. +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `media_concurrencies` - (Required) One or more `media_concurrencies` blocks that specify the channels that agents can handle in the Contact Control Panel (CCP) for this Routing Profile. The `media_concurrencies` block is documented below. +* `name` - (Required) Specifies the name of the Routing Profile. +* `queue_configs` - (Optional) One or more `queue_configs` blocks that specify the inbound queues associated with the routing profile. If no queue is added, the agent only can make outbound calls. The `queue_configs` block is documented below. +* `tags` - (Optional) Tags to apply to the Routing Profile. If configured with a provider +[`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `media_concurrencies` block supports the following arguments: + +* `channel` - (Required) Specifies the channels that agents can handle in the Contact Control Panel (CCP). Valid values are `VOICE`, `CHAT`, `TASK`. +* `concurrency` - (Required) Specifies the number of contacts an agent can have on a channel simultaneously. Valid Range for `VOICE`: Minimum value of 1. Maximum value of 1. Valid Range for `CHAT`: Minimum value of 1. Maximum value of 10. Valid Range for `TASK`: Minimum value of 1. Maximum value of 10. + +A `queue_configs` block supports the following arguments: + +* `channel` - (Required) Specifies the channels agents can handle in the Contact Control Panel (CCP) for this routing profile. Valid values are `VOICE`, `CHAT`, `TASK`. +* `delay` - (Required) Specifies the delay, in seconds, that a contact should be in the queue before they are routed to an available agent +* `priority` - (Required) Specifies the order in which contacts are to be handled for the queue. +* `queue_id` - (Required) Specifies the identifier for the queue. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Routing Profile. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Routing Profile separated by a colon (`:`). +* `queue_configs` - In addition to the arguments used in the `queue_configs` argument block, there are additional attributes exported within the `queue_configs` block. These additional attributes are documented below. +* `routing_profile_id` - The identifier for the Routing Profile. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +A `queue_configs` block supports the following attributes in addition to the arguments defined earlier: + +* `queue_arn` - ARN for the queue. +* `queue_name` - Name for the queue. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Routing Profiles using the `instance_id` and `routing_profile_id` separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect Routing Profiles using the `instance_id` and `routing_profile_id` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_routing_profile.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_security_profile.html.markdown b/website/docs/cdktf/python/r/connect_security_profile.html.markdown new file mode 100644 index 00000000000..203abf41529 --- /dev/null +++ b/website/docs/cdktf/python/r/connect_security_profile.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_security_profile" +description: |- + Provides details about a specific Amazon Connect Security Profile. +--- + + + +# Resource: aws_connect_security_profile + +Provides an Amazon Connect Security Profile resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_security_profile import ConnectSecurityProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectSecurityProfile(self, "example", + description="example description", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="example", + permissions=["BasicAgentAccess", "OutboundCallAccess"], + tags={ + "Name": "Example Security Profile" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Specifies the description of the Security Profile. +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) Specifies the name of the Security Profile. +* `permissions` - (Optional) Specifies a list of permissions assigned to the security profile. +* `tags` - (Optional) Tags to apply to the Security Profile. If configured with a provider +[`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Security Profile. +* `organization_resource_id` - The organization resource identifier for the security profile. +* `security_profile_id` - The identifier for the Security Profile. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Security Profile separated by a colon (`:`). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Security Profiles using the `instance_id` and `security_profile_id` separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect Security Profiles using the `instance_id` and `security_profile_id` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_security_profile.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_user.html.markdown b/website/docs/cdktf/python/r/connect_user.html.markdown new file mode 100644 index 00000000000..1a33fe177fd --- /dev/null +++ b/website/docs/cdktf/python/r/connect_user.html.markdown @@ -0,0 +1,237 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user" +description: |- + Provides details about a specific Amazon Connect User +--- + + + +# Resource: aws_connect_user + +Provides an Amazon Connect User resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_user import ConnectUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectUser(self, "example", + identity_info=ConnectUserIdentityInfo( + first_name="example", + last_name="example2" + ), + instance_id=Token.as_string(aws_connect_instance_example.id), + name="example", + password="Password123", + phone_config=ConnectUserPhoneConfig( + after_contact_work_time_limit=0, + phone_type="SOFT_PHONE" + ), + routing_profile_id=Token.as_string(aws_connect_routing_profile_example.routing_profile_id), + security_profile_ids=[ + Token.as_string(aws_connect_security_profile_example.security_profile_id) + ] + ) +``` + +### With hierarchy_group_id + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_user import ConnectUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectUser(self, "example", + hierarchy_group_id=Token.as_string(aws_connect_user_hierarchy_group_example.hierarchy_group_id), + identity_info=ConnectUserIdentityInfo( + first_name="example", + last_name="example2" + ), + instance_id=Token.as_string(aws_connect_instance_example.id), + name="example", + password="Password123", + phone_config=ConnectUserPhoneConfig( + after_contact_work_time_limit=0, + phone_type="SOFT_PHONE" + ), + routing_profile_id=Token.as_string(aws_connect_routing_profile_example.routing_profile_id), + security_profile_ids=[ + Token.as_string(aws_connect_security_profile_example.security_profile_id) + ] + ) +``` + +### With identity_info filled + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_user import ConnectUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectUser(self, "example", + identity_info=ConnectUserIdentityInfo( + email="example@example.com", + first_name="example", + last_name="example2" + ), + instance_id=Token.as_string(aws_connect_instance_example.id), + name="example", + password="Password123", + phone_config=ConnectUserPhoneConfig( + after_contact_work_time_limit=0, + phone_type="SOFT_PHONE" + ), + routing_profile_id=Token.as_string(aws_connect_routing_profile_example.routing_profile_id), + security_profile_ids=[ + Token.as_string(aws_connect_security_profile_example.security_profile_id) + ] + ) +``` + +### With phone_config phone type as desk phone + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_user import ConnectUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectUser(self, "example", + instance_id=Token.as_string(aws_connect_instance_example.id), + name="example", + password="Password123", + phone_config=ConnectUserPhoneConfig( + after_contact_work_time_limit=0, + phone_type="SOFT_PHONE" + ), + routing_profile_id=Token.as_string(aws_connect_routing_profile_example.routing_profile_id), + security_profile_ids=[ + Token.as_string(aws_connect_security_profile_example.security_profile_id) + ] + ) +``` + +### With multiple Security profile ids specified in security_profile_ids + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_user import ConnectUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectUser(self, "example", + instance_id=Token.as_string(aws_connect_instance_example.id), + name="example", + password="Password123", + phone_config=ConnectUserPhoneConfig( + after_contact_work_time_limit=0, + auto_accept=False, + desk_phone_number="+112345678912", + phone_type="DESK_PHONE" + ), + routing_profile_id=Token.as_string(aws_connect_routing_profile_example.routing_profile_id), + security_profile_ids=[ + Token.as_string(aws_connect_security_profile_example.security_profile_id), example2.security_profile_id + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `directory_user_id` - (Optional) The identifier of the user account in the directory used for identity management. If Amazon Connect cannot access the directory, you can specify this identifier to authenticate users. If you include the identifier, we assume that Amazon Connect cannot access the directory. Otherwise, the identity information is used to authenticate users from your directory. This parameter is required if you are using an existing directory for identity management in Amazon Connect when Amazon Connect cannot access your directory to authenticate users. If you are using SAML for identity management and include this parameter, an error is returned. +* `hierarchy_group_id` - (Optional) The identifier of the hierarchy group for the user. +* `identity_info` - (Optional) A block that contains information about the identity of the user. Documented below. +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) The user name for the account. For instances not using SAML for identity management, the user name can include up to 20 characters. If you are using SAML for identity management, the user name can include up to 64 characters from `[a-zA-Z0-9_-.\@]+`. +* `password` - (Optional) The password for the user account. A password is required if you are using Amazon Connect for identity management. Otherwise, it is an error to include a password. +* `phone_config` - (Required) A block that contains information about the phone settings for the user. Documented below. +* `routing_profile_id` - (Required) The identifier of the routing profile for the user. +* `security_profile_ids` - (Required) A list of identifiers for the security profiles for the user. Specify a minimum of 1 and maximum of 10 security profile ids. For more information, see [Best Practices for Security Profiles](https://docs.aws.amazon.com/connect/latest/adminguide/security-profile-best-practices.html) in the Amazon Connect Administrator Guide. +* `tags` - (Optional) Tags to apply to the user. If configured with a provider +[`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `identity_info` block supports the following arguments: + +* `email` - (Optional) The email address. If you are using SAML for identity management and include this parameter, an error is returned. Note that updates to the `email` is supported. From the [UpdateUserIdentityInfo API documentation](https://docs.aws.amazon.com/connect/latest/APIReference/API_UpdateUserIdentityInfo.html) it is strongly recommended to limit who has the ability to invoke `UpdateUserIdentityInfo`. Someone with that ability can change the login credentials of other users by changing their email address. This poses a security risk to your organization. They can change the email address of a user to the attacker's email address, and then reset the password through email. For more information, see [Best Practices for Security Profiles](https://docs.aws.amazon.com/connect/latest/adminguide/security-profile-best-practices.html) in the Amazon Connect Administrator Guide. +* `first_name` - (Optional) The first name. This is required if you are using Amazon Connect or SAML for identity management. Minimum length of 1. Maximum length of 100. +* `last_name` - (Optional) The last name. This is required if you are using Amazon Connect or SAML for identity management. Minimum length of 1. Maximum length of 100. + +A `phone_config` block supports the following arguments: + +* `after_contact_work_time_limit` - (Optional) The After Call Work (ACW) timeout setting, in seconds. Minimum value of 0. +* `auto_accept` - (Optional) When Auto-Accept Call is enabled for an available agent, the agent connects to contacts automatically. +* `desk_phone_number` - (Optional) The phone number for the user's desk phone. Required if `phone_type` is set as `DESK_PHONE`. +* `phone_type` - (Required) The phone type. Valid values are `DESK_PHONE` and `SOFT_PHONE`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the user. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the user +separated by a colon (`:`). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `user_id` - The identifier for the user. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Users using the `instance_id` and `user_id` separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect Users using the `instance_id` and `user_id` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_user.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_user_hierarchy_group.html.markdown b/website/docs/cdktf/python/r/connect_user_hierarchy_group.html.markdown new file mode 100644 index 00000000000..9cbfae22591 --- /dev/null +++ b/website/docs/cdktf/python/r/connect_user_hierarchy_group.html.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user_hierarchy_group" +description: |- + Provides details about a specific Amazon Connect User Hierarchy Group +--- + + + +# Resource: aws_connect_user_hierarchy_group + +Provides an Amazon Connect User Hierarchy Group resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +~> **NOTE:** The User Hierarchy Structure must be created before creating a User Hierarchy Group. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_user_hierarchy_group import ConnectUserHierarchyGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectUserHierarchyGroup(self, "example", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="example", + tags={ + "Name": "Example User Hierarchy Group" + } + ) +``` + +### With a parent group + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_user_hierarchy_group import ConnectUserHierarchyGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + parent = ConnectUserHierarchyGroup(self, "parent", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="parent", + tags={ + "Name": "Example User Hierarchy Group Parent" + } + ) + ConnectUserHierarchyGroup(self, "child", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + name="child", + parent_group_id=parent.hierarchy_group_id, + tags={ + "Name": "Example User Hierarchy Group Child" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) The name of the user hierarchy group. Must not be more than 100 characters. +* `parent_group_id` - (Optional) The identifier for the parent hierarchy group. The user hierarchy is created at level one if the parent group ID is null. +* `tags` - (Optional) Tags to apply to the hierarchy group. If configured with a provider +[`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the hierarchy group. +* `hierarchy_group_id` - The identifier for the hierarchy group. +* `hierarchy_path` - A block that contains information about the levels in the hierarchy group. The `hierarchy_path` block is documented below. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the hierarchy group +separated by a colon (`:`). +* `level_id` - The identifier of the level in the hierarchy group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +A `hierarchy_path` block supports the following attributes: + +* `level_one` - A block that defines the details of level one. The level block is documented below. +* `level_two` - A block that defines the details of level two. The level block is documented below. +* `level_three` - A block that defines the details of level three. The level block is documented below. +* `level_four` - A block that defines the details of level four. The level block is documented below. +* `level_five` - A block that defines the details of level five. The level block is documented below. + +A level block supports the following attributes: + +* `arn` - The Amazon Resource Name (ARN) of the hierarchy group. +* `id` - The identifier of the hierarchy group. +* `name` - The name of the hierarchy group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect User Hierarchy Groups using the `instance_id` and `hierarchy_group_id` separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect User Hierarchy Groups using the `instance_id` and `hierarchy_group_id` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_user_hierarchy_group.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_user_hierarchy_structure.html.markdown b/website/docs/cdktf/python/r/connect_user_hierarchy_structure.html.markdown new file mode 100644 index 00000000000..b701e71d201 --- /dev/null +++ b/website/docs/cdktf/python/r/connect_user_hierarchy_structure.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user_hierarchy_structure" +description: |- + Provides details about a specific Amazon Connect User Hierarchy Structure +--- + + + +# Resource: aws_connect_user_hierarchy_structure + +Provides an Amazon Connect User Hierarchy Structure resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_user_hierarchy_structure import ConnectUserHierarchyStructure +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectUserHierarchyStructure(self, "example", + hierarchy_structure=ConnectUserHierarchyStructureHierarchyStructure( + level_one=ConnectUserHierarchyStructureHierarchyStructureLevelOne( + name="levelone" + ) + ), + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111" + ) +``` + +### With Five Levels + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_user_hierarchy_structure import ConnectUserHierarchyStructure +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectUserHierarchyStructure(self, "example", + hierarchy_structure=ConnectUserHierarchyStructureHierarchyStructure( + level_five=ConnectUserHierarchyStructureHierarchyStructureLevelFive( + name="levelfive" + ), + level_four=ConnectUserHierarchyStructureHierarchyStructureLevelFour( + name="levelfour" + ), + level_one=ConnectUserHierarchyStructureHierarchyStructureLevelOne( + name="levelone" + ), + level_three=ConnectUserHierarchyStructureHierarchyStructureLevelThree( + name="levelthree" + ), + level_two=ConnectUserHierarchyStructureHierarchyStructureLevelTwo( + name="leveltwo" + ) + ), + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `hierarchy_structure` - (Required) A block that defines the hierarchy structure's levels. The `hierarchy_structure` block is documented below. +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. + +A `hierarchy_structure` block supports the following arguments: + +* `level_one` - (Optional) A block that defines the details of level one. The level block is documented below. +* `level_two` - (Optional) A block that defines the details of level two. The level block is documented below. +* `level_three` - (Optional) A block that defines the details of level three. The level block is documented below. +* `level_four` - (Optional) A block that defines the details of level four. The level block is documented below. +* `level_five` - (Optional) A block that defines the details of level five. The level block is documented below. + +Each level block supports the following arguments: + +* `name` - (Required) The name of the user hierarchy level. Must not be more than 50 characters. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `hierarchy_structure` - In addition to the arguments defined initially, there are attributes added to the levels created. These additional attributes are documented below. +* `id` - The identifier of the hosting Amazon Connect Instance. + +A level block supports the following additional attributes: + +* `arn` - The Amazon Resource Name (ARN) of the hierarchy level. +* `id` - The identifier of the hierarchy level. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect User Hierarchy Structures using the `instance_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect User Hierarchy Structures using the `instance_id`. For example: + +```console +% terraform import aws_connect_user_hierarchy_structure.example f1288a1f-6193-445a-b47e-af739b2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/connect_vocabulary.html.markdown b/website/docs/cdktf/python/r/connect_vocabulary.html.markdown new file mode 100644 index 00000000000..948b6be66d0 --- /dev/null +++ b/website/docs/cdktf/python/r/connect_vocabulary.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_vocabulary" +description: |- + Provides details about a specific Amazon Connect Vocabulary +--- + + + +# Resource: aws_connect_vocabulary + +Provides an Amazon Connect Vocabulary resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.connect_vocabulary import ConnectVocabulary +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ConnectVocabulary(self, "example", + content="Phrase\tIPA\tSoundsLike\tDisplayAs\nLos-Angeles\t\t\tLos Angeles\nF.B.I.\tɛ f b i aɪ\t\tFBI\nEtienne\t\teh-tee-en\t\n", + instance_id="aaaaaaaa-bbbb-cccc-dddd-111111111111", + language_code="en-US", + name="example", + tags={ + "Key1": "Value1" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Required) The content of the custom vocabulary in plain-text format with a table of values. Each row in the table represents a word or a phrase, described with Phrase, IPA, SoundsLike, and DisplayAs fields. Separate the fields with TAB characters. For more information, see [Create a custom vocabulary using a table](https://docs.aws.amazon.com/transcribe/latest/dg/custom-vocabulary.html#create-vocabulary-table). Minimum length of `1`. Maximum length of `60000`. +* `instance_id` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `language_code` - (Required) The language code of the vocabulary entries. For a list of languages and their corresponding language codes, see [What is Amazon Transcribe?](https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html). Valid Values are `ar-AE`, `de-CH`, `de-DE`, `en-AB`, `en-AU`, `en-GB`, `en-IE`, `en-IN`, `en-US`, `en-WL`, `es-ES`, `es-US`, `fr-CA`, `fr-FR`, `hi-IN`, `it-IT`, `ja-JP`, `ko-KR`, `pt-BR`, `pt-PT`, `zh-CN`. +* `name` - (Required) A unique name of the custom vocabulary. Must not be more than 140 characters. +* `tags` - (Optional) Tags to apply to the vocabulary. If configured with a provider +[`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `delete` - (Default `100m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the vocabulary. +* `failure_reason` - The reason why the custom vocabulary was not created. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the vocabulary +separated by a colon (`:`). +* `last_modified_time` - The timestamp when the custom vocabulary was last modified. +* `state` - The current state of the custom vocabulary. Valid values are `CREATION_IN_PROGRESS`, `ACTIVE`, `CREATION_FAILED`, `DELETE_IN_PROGRESS`. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vocabulary_id` - The identifier of the custom vocabulary. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Vocabularies using the `instance_id` and `vocabulary_id` separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Connect Vocabularies using the `instance_id` and `vocabulary_id` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_vocabulary.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/controltower_control.html.markdown b/website/docs/cdktf/python/r/controltower_control.html.markdown new file mode 100644 index 00000000000..2abb1d53c17 --- /dev/null +++ b/website/docs/cdktf/python/r/controltower_control.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Control Tower" +layout: "aws" +page_title: "AWS: aws_controltower_control" +description: |- + Allows the application of pre-defined controls to organizational units. +--- + + + +# Resource: aws_controltower_control + +Allows the application of pre-defined controls to organizational units. For more information on usage, please see the +[AWS Control Tower User Guide](https://docs.aws.amazon.com/controltower/latest/userguide/enable-guardrails.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.controltower_control import ControltowerControl +from imports.aws.data_aws_organizations_organization import DataAwsOrganizationsOrganization +from imports.aws.data_aws_organizations_organizational_units import DataAwsOrganizationsOrganizationalUnits +from imports.aws.data_aws_region import DataAwsRegion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsOrganizationsOrganization(self, "example") + data_aws_organizations_organizational_units_example = + DataAwsOrganizationsOrganizationalUnits(self, "example_1", + parent_id=Token.as_string(property_access(example.roots, ["0", "id"])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_organizations_organizational_units_example.override_logical_id("example") + current = DataAwsRegion(self, "current") + aws_controltower_control_example = ControltowerControl(self, "example_3", + control_identifier="arn:aws:controltower:${" + current.name + "}::control/AWS-GR_EC2_VOLUME_INUSE_CHECK", + target_identifier=Token.as_string( + property_access("${[ for x in ${" + data_aws_organizations_organizational_units_example.children + "} : x.arn if x.name == \"Infrastructure\"]}", ["0"])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_controltower_control_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `control_identifier` - (Required) The ARN of the control. Only Strongly recommended and Elective controls are permitted, with the exception of the Region deny guardrail. +* `target_identifier` - (Required) The ARN of the organizational unit. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the organizational unit. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Control Tower Controls using their `organizational_unit_arn/control_identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Control Tower Controls using their `organizational_unit_arn/control_identifier`. For example: + +```console +% terraform import aws_controltower_control.example arn:aws:organizations::123456789101:ou/o-qqaejywet/ou-qg5o-ufbhdtv3,arn:aws:controltower:us-east-1::control/WTDSMKDKDNLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/cur_report_definition.html.markdown b/website/docs/cdktf/python/r/cur_report_definition.html.markdown new file mode 100644 index 00000000000..55aacbd81d5 --- /dev/null +++ b/website/docs/cdktf/python/r/cur_report_definition.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Cost and Usage Report" +layout: "aws" +page_title: "AWS: aws_cur_report_definition" +description: |- + Provides a Cost and Usage Report Definition. +--- + + + +# Resource: aws_cur_report_definition + +Manages Cost and Usage Report Definitions. + +~> *NOTE:* The AWS Cost and Usage Report service is only available in `us-east-1` currently. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cur_report_definition import CurReportDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CurReportDefinition(self, "example_cur_report_definition", + additional_artifacts=["REDSHIFT", "QUICKSIGHT"], + additional_schema_elements=["RESOURCES", "SPLIT_COST_ALLOCATION_DATA"], + compression="GZIP", + format="textORcsv", + report_name="example-cur-report-definition", + s3_bucket="example-bucket-name", + s3_region="us-east-1", + time_unit="HOURLY" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `report_name` - (Required) Unique name for the report. Must start with a number/letter and is case sensitive. Limited to 256 characters. +* `time_unit` - (Required) The frequency on which report data are measured and displayed. Valid values are: `DAILY`, `HOURLY`, `MONTHLY`. +* `format` - (Required) Format for report. Valid values are: `textORcsv`, `Parquet`. If `Parquet` is used, then Compression must also be `Parquet`. +* `compression` - (Required) Compression format for report. Valid values are: `GZIP`, `ZIP`, `Parquet`. If `Parquet` is used, then format must also be `Parquet`. +* `additional_schema_elements` - (Required) A list of schema elements. Valid values are: `RESOURCES`, `SPLIT_COST_ALLOCATION_DATA`. +* `s3_bucket` - (Required) Name of the existing S3 bucket to hold generated reports. +* `s3_prefix` - (Optional) Report path prefix. Limited to 256 characters. +* `s3_region` - (Required) Region of the existing S3 bucket to hold generated reports. +* `additional_artifacts` - (Required) A list of additional artifacts. Valid values are: `REDSHIFT`, `QUICKSIGHT`, `ATHENA`. When ATHENA exists within additional_artifacts, no other artifact type can be declared and report_versioning must be `OVERWRITE_REPORT`. +* `refresh_closed_reports` - (Optional) Set to true to update your reports after they have been finalized if AWS detects charges related to previous months. +* `report_versioning` - (Optional) Overwrite the previous version of each report or to deliver the report in addition to the previous versions. Valid values are: `CREATE_NEW_REPORT` and `OVERWRITE_REPORT`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the cur report. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Report Definitions using the `report_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Report Definitions using the `report_name`. For example: + +```console +% terraform import aws_cur_report_definition.example_cur_report_definition example-cur-report-definition +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/customer_gateway.html.markdown b/website/docs/cdktf/python/r/customer_gateway.html.markdown new file mode 100644 index 00000000000..3de5ae4f22b --- /dev/null +++ b/website/docs/cdktf/python/r/customer_gateway.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_customer_gateway" +description: |- + Provides a customer gateway inside a VPC. These objects can be + connected to VPN gateways via VPN connections, and allow you to + establish tunnels between your network and the VPC. +--- + + + +# Resource: aws_customer_gateway + +Provides a customer gateway inside a VPC. These objects can be connected to VPN gateways via VPN connections, and allow you to establish tunnels between your network and the VPC. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.customer_gateway import CustomerGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + CustomerGateway(self, "main", + bgp_asn=Token.as_string(65000), + ip_address="172.83.124.10", + tags={ + "Name": "main-customer-gateway" + }, + type="ipsec.1" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bgp_asn` - (Required) The gateway's Border Gateway Protocol (BGP) Autonomous System Number (ASN). +* `certificate_arn` - (Optional) The Amazon Resource Name (ARN) for the customer gateway certificate. +* `device_name` - (Optional) A name for the customer gateway device. +* `ip_address` - (Optional) The IPv4 address for the customer gateway device's outside interface. +* `type` - (Required) The type of customer gateway. The only type AWS + supports at this time is "ipsec.1". +* `tags` - (Optional) Tags to apply to the gateway. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The amazon-assigned ID of the gateway. +* `arn` - The ARN of the customer gateway. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Customer Gateways using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Customer Gateways using the `id`. For example: + +```console +% terraform import aws_customer_gateway.main cgw-b4dc3961 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dataexchange_data_set.html.markdown b/website/docs/cdktf/python/r/dataexchange_data_set.html.markdown new file mode 100644 index 00000000000..a5f76d6ebc6 --- /dev/null +++ b/website/docs/cdktf/python/r/dataexchange_data_set.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "Data Exchange" +layout: "aws" +page_title: "AWS: aws_dataexchange_data_set" +description: |- + Provides a DataExchange DataSet +--- + + + +# Resource: aws_dataexchange_data_set + +Provides a resource to manage AWS Data Exchange DataSets. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dataexchange_data_set import DataexchangeDataSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataexchangeDataSet(self, "example", + asset_type="S3_SNAPSHOT", + description="example", + name="example" + ) +``` + +## Argument Reference + +* `asset_type` - (Required) The type of asset that is added to a data set. Valid values are: `S3_SNAPSHOT`, `REDSHIFT_DATA_SHARE`, and `API_GATEWAY_API`. +* `description` - (Required) A description for the data set. +* `name` - (Required) The name of the data set. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Id of the data set. +* `arn` - The Amazon Resource Name of this data set. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DataExchange DataSets using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DataExchange DataSets using their ARN. For example: + +```console +% terraform import aws_dataexchange_data_set.example arn:aws:dataexchange:us-west-2:123456789012:data-sets/4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dataexchange_revision.html.markdown b/website/docs/cdktf/python/r/dataexchange_revision.html.markdown new file mode 100644 index 00000000000..8895ac34afa --- /dev/null +++ b/website/docs/cdktf/python/r/dataexchange_revision.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Data Exchange" +layout: "aws" +page_title: "AWS: aws_dataexchange_revision" +description: |- + Provides a DataExchange Revision +--- + + + +# Resource: aws_dataexchange_revision + +Provides a resource to manage AWS Data Exchange Revisions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dataexchange_revision import DataexchangeRevision +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DataexchangeRevision(self, "example", + data_set_id=Token.as_string(aws_dataexchange_data_set_example.id) + ) +``` + +## Argument Reference + +* `data_set_id` - (Required) The dataset id. +* `comment` - (Required) An optional comment about the revision. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Id of the data set. +* `revision_id` - The Id of the revision. +* `arn` - The Amazon Resource Name of this data set. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DataExchange Revisions using their `data-set-id:revision-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DataExchange Revisions using their `data-set-id:revision-id`. For example: + +```console +% terraform import aws_dataexchange_revision.example 4fa784c7-ccb4-4dbf-ba4f-02198320daa1:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datapipeline_pipeline.html.markdown b/website/docs/cdktf/python/r/datapipeline_pipeline.html.markdown new file mode 100644 index 00000000000..d4fde6fab63 --- /dev/null +++ b/website/docs/cdktf/python/r/datapipeline_pipeline.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Data Pipeline" +layout: "aws" +page_title: "AWS: aws_datapipeline_pipeline" +description: |- + Provides a AWS DataPipeline Pipeline. +--- + + + +# Resource: aws_datapipeline_pipeline + +Provides a DataPipeline Pipeline resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datapipeline_pipeline import DatapipelinePipeline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatapipelinePipeline(self, "default", + name="tf-pipeline-default" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of Pipeline. +* `description` - (Optional) The description of Pipeline. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the client certificate. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datapipeline_pipeline` using the id (Pipeline ID). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datapipeline_pipeline` using the id (Pipeline ID). For example: + +```console +% terraform import aws_datapipeline_pipeline.default df-1234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datapipeline_pipeline_definition.html.markdown b/website/docs/cdktf/python/r/datapipeline_pipeline_definition.html.markdown new file mode 100644 index 00000000000..38196465f66 --- /dev/null +++ b/website/docs/cdktf/python/r/datapipeline_pipeline_definition.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "Data Pipeline" +layout: "aws" +page_title: "AWS: aws_datapipeline_pipeline_definition" +description: |- + Provides a DataPipeline Definition. +--- + + + +# Resource: aws_datapipeline_pipeline_definition + +Provides a DataPipeline Pipeline Definition resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datapipeline_pipeline import DatapipelinePipeline +from imports.aws.datapipeline_pipeline_definition import DatapipelinePipelineDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = DatapipelinePipeline(self, "default", + name="tf-pipeline-default" + ) + DatapipelinePipelineDefinition(self, "example", + pipeline_id=default_var.id, + pipeline_object=[DatapipelinePipelineDefinitionPipelineObject( + field=[DatapipelinePipelineDefinitionPipelineObjectField( + key="workerGroup", + string_value="workerGroup" + ) + ], + id="Default", + name="Default" + ), DatapipelinePipelineDefinitionPipelineObject( + field=[DatapipelinePipelineDefinitionPipelineObjectField( + key="startDateTime", + string_value="2012-12-12T00:00:00" + ), DatapipelinePipelineDefinitionPipelineObjectField( + key="type", + string_value="Schedule" + ), DatapipelinePipelineDefinitionPipelineObjectField( + key="period", + string_value="1 hour" + ), DatapipelinePipelineDefinitionPipelineObjectField( + key="endDateTime", + string_value="2012-12-21T18:00:00" + ) + ], + id="Schedule", + name="Schedule" + ), DatapipelinePipelineDefinitionPipelineObject( + field=[DatapipelinePipelineDefinitionPipelineObjectField( + key="type", + string_value="ShellCommandActivity" + ), DatapipelinePipelineDefinitionPipelineObjectField( + key="command", + string_value="echo hello" + ), DatapipelinePipelineDefinitionPipelineObjectField( + key="parent", + string_value="Default" + ), DatapipelinePipelineDefinitionPipelineObjectField( + key="schedule", + string_value="Schedule" + ) + ], + id="SayHello", + name="SayHello" + ) + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `pipeline_id` - (Required) ID of the pipeline. +* `pipeline_object` - (Required) Configuration block for the objects that define the pipeline. See below + +The following arguments are optional: + +* `parameter_object` - (Optional) Configuration block for the parameter objects used in the pipeline definition. See below +* `parameter_value` - (Optional) Configuration block for the parameter values used in the pipeline definition. See below + +### `pipeline_object` + +* `field` - (Required) Configuration block for Key-value pairs that define the properties of the object. See below +* `id` - (Required) ID of the object. +* `name` - (Required) ARN of the storage connector. + +### `field` + +* `key` - (Required) Field identifier. +* `ref_value` - (Optional) Field value, expressed as the identifier of another object +* `string_value` - (Optional) Field value, expressed as a String. + +### `parameter_object` + +* `attribute` - (Required) Configuration block for attributes of the parameter object. See below +* `id` - (Required) ID of the parameter object. + +### `attribute` + +* `key` - (Required) Field identifier. +* `string_value` - (Required) Field value, expressed as a String. + +### `parameter_value` + +* `id` - (Required) ID of the parameter value. +* `string_value` - (Required) Field value, expressed as a String. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the datapipeline definition. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datapipeline_pipeline_definition` using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datapipeline_pipeline_definition` using the id. For example: + +```console +% terraform import aws_datapipeline_pipeline_definition.example df-1234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datasync_agent.html.markdown b/website/docs/cdktf/python/r/datasync_agent.html.markdown new file mode 100644 index 00000000000..9048c00bfcc --- /dev/null +++ b/website/docs/cdktf/python/r/datasync_agent.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_agent" +description: |- + Manages an AWS DataSync Agent in the provider region +--- + + + +# Resource: aws_datasync_agent + +Manages an AWS DataSync Agent deployed on premises. + +~> **NOTE:** One of `activation_key` or `ip_address` must be provided for resource creation (agent activation). Neither is required for resource import. If using `ip_address`, Terraform must be able to make an HTTP (port 80) GET request to the specified IP address from where it is running. The agent will turn off that HTTP server after activation. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_agent import DatasyncAgent +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncAgent(self, "example", + ip_address="1.2.3.4", + name="example" + ) +``` + +## Example Usage with VPC Endpoints + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_network_interface import DataAwsNetworkInterface +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.datasync_agent import DatasyncAgent +from imports.aws.vpc_endpoint import VpcEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsRegion(self, "current") + example = VpcEndpoint(self, "example", + security_group_ids=[Token.as_string(aws_security_group_example.id)], + service_name="com.amazonaws.${" + current.name + "}.datasync", + subnet_ids=[Token.as_string(aws_subnet_example.id)], + vpc_endpoint_type="Interface", + vpc_id=Token.as_string(aws_vpc_example.id) + ) + data_aws_network_interface_example = DataAwsNetworkInterface(self, "example_2", + id=Token.as_string( + property_access(Fn.tolist(example.network_interface_ids), ["0"])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_network_interface_example.override_logical_id("example") + aws_datasync_agent_example = DatasyncAgent(self, "example_3", + ip_address="1.2.3.4", + name="example", + private_link_endpoint=Token.as_string(data_aws_network_interface_example.private_ip), + security_group_arns=[Token.as_string(aws_security_group_example.arn)], + subnet_arns=[Token.as_string(aws_subnet_example.arn)], + vpc_endpoint_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_datasync_agent_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the DataSync Agent. +* `activation_key` - (Optional) DataSync Agent activation key during resource creation. Conflicts with `ip_address`. If an `ip_address` is provided instead, Terraform will retrieve the `activation_key` as part of the resource creation. +* `ip_address` - (Optional) DataSync Agent IP address to retrieve activation key during resource creation. Conflicts with `activation_key`. DataSync Agent must be accessible on port 80 from where Terraform is running. +* `private_link_endpoint` - (Optional) The IP address of the VPC endpoint the agent should connect to when retrieving an activation key during resource creation. Conflicts with `activation_key`. +* `security_group_arns` - (Optional) The ARNs of the security groups used to protect your data transfer task subnets. +* `subnet_arns` - (Optional) The Amazon Resource Names (ARNs) of the subnets in which DataSync will create elastic network interfaces for each data transfer task. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Agent. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_endpoint_id` - (Optional) The ID of the VPC (virtual private cloud) endpoint that the agent has access to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Agent. +* `arn` - Amazon Resource Name (ARN) of the DataSync Agent. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datasync_agent` using the DataSync Agent Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datasync_agent` using the DataSync Agent Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_agent.example arn:aws:datasync:us-east-1:123456789012:agent/agent-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datasync_location_efs.html.markdown b/website/docs/cdktf/python/r/datasync_location_efs.html.markdown new file mode 100644 index 00000000000..23c732ac6e2 --- /dev/null +++ b/website/docs/cdktf/python/r/datasync_location_efs.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_efs" +description: |- + Manages an EFS Location within AWS DataSync. +--- + + + +# Resource: aws_datasync_location_efs + +Manages an AWS DataSync EFS Location. + +~> **NOTE:** The EFS File System must have a mounted EFS Mount Target before creating this resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_location_efs import DatasyncLocationEfs +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncLocationEfs(self, "example", + ec2_config=DatasyncLocationEfsEc2Config( + security_group_arns=[Token.as_string(aws_security_group_example.arn)], + subnet_arn=Token.as_string(aws_subnet_example.arn) + ), + efs_file_system_arn=Token.as_string(aws_efs_mount_target_example.file_system_arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `access_point_arn` - (Optional) Specifies the Amazon Resource Name (ARN) of the access point that DataSync uses to access the Amazon EFS file system. +* `ec2_config` - (Required) Configuration block containing EC2 configurations for connecting to the EFS File System. +* `efs_file_system_arn` - (Required) Amazon Resource Name (ARN) of EFS File System. +* `file_system_access_role_arn` - (Optional) Specifies an Identity and Access Management (IAM) role that DataSync assumes when mounting the Amazon EFS file system. +* `in_transit_encryption` - (Optional) Specifies whether you want DataSync to use TLS encryption when transferring data to or from your Amazon EFS file system. Valid values are `NONE` and `TLS1_2`. +* `subdirectory` - (Optional) Subdirectory to perform actions as source or destination. Default `/`. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### ec2_config Argument Reference + +The `ec2_config` configuration block supports the following arguments: + +* `security_group_arns` - (Required) List of Amazon Resource Names (ARNs) of the EC2 Security Groups that are associated with the EFS Mount Target. +* `subnet_arn` - (Required) Amazon Resource Name (ARN) of the EC2 Subnet that is associated with the EFS Mount Target. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datasync_location_efs` using the DataSync Task Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datasync_location_efs` using the DataSync Task Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_efs.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datasync_location_fsx_lustre_file_system.html.markdown b/website/docs/cdktf/python/r/datasync_location_fsx_lustre_file_system.html.markdown new file mode 100644 index 00000000000..8e24d6c500d --- /dev/null +++ b/website/docs/cdktf/python/r/datasync_location_fsx_lustre_file_system.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_fsx_lustre_file_system" +description: |- + Manages an FSx Lustre Location within AWS DataSync. +--- + + + +# Resource: aws_datasync_location_fsx_lustre_file_system + +Manages an AWS DataSync FSx Lustre Location. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_location_fsx_lustre_file_system import DatasyncLocationFsxLustreFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncLocationFsxLustreFileSystem(self, "example", + fsx_filesystem_arn=Token.as_string(aws_fsx_lustre_file_system_example.arn), + security_group_arns=[Token.as_string(aws_security_group_example.arn)] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fsx_filesystem_arn` - (Required) The Amazon Resource Name (ARN) for the FSx for Lustre file system. +* `security_group_arns` - (Optional) The Amazon Resource Names (ARNs) of the security groups that are to use to configure the FSx for Lustre file system. +* `subdirectory` - (Optional) Subdirectory to perform actions as source or destination. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uri` - The URL of the FSx for Lustre location that was described. +* `creation_time` - The time that the FSx for Lustre location was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datasync_location_fsx_lustre_file_system` using the `DataSync-ARN#FSx-Lustre-ARN`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datasync_location_fsx_lustre_file_system` using the `DataSync-ARN#FSx-Lustre-ARN`. For example: + +```console +% terraform import aws_datasync_location_fsx_lustre_file_system.example arn:aws:datasync:us-west-2:123456789012:location/loc-12345678901234567#arn:aws:fsx:us-west-2:476956259333:file-system/fs-08e04cd442c1bb94a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datasync_location_fsx_openzfs_file_system.html.markdown b/website/docs/cdktf/python/r/datasync_location_fsx_openzfs_file_system.html.markdown new file mode 100644 index 00000000000..d620181d35d --- /dev/null +++ b/website/docs/cdktf/python/r/datasync_location_fsx_openzfs_file_system.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_fsx_openzfs_file_system" +description: |- + Manages an FSx OpenZfs Location within AWS DataSync. +--- + + + +# Resource: aws_datasync_location_fsx_openzfs_file_system + +Manages an AWS DataSync FSx OpenZfs Location. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_location_fsx_openzfs_file_system import DatasyncLocationFsxOpenzfsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncLocationFsxOpenzfsFileSystem(self, "example", + fsx_filesystem_arn=Token.as_string(aws_fsx_openzfs_file_system_example.arn), + protocol=DatasyncLocationFsxOpenzfsFileSystemProtocol( + nfs=DatasyncLocationFsxOpenzfsFileSystemProtocolNfs( + mount_options=DatasyncLocationFsxOpenzfsFileSystemProtocolNfsMountOptions( + version="AUTOMATIC" + ) + ) + ), + security_group_arns=[Token.as_string(aws_security_group_example.arn)] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fsx_filesystem_arn` - (Required) The Amazon Resource Name (ARN) for the FSx for OpenZfs file system. +* `protocol` - (Required) The type of protocol that DataSync uses to access your file system. See below. +* `security_group_arns` - (Optional) The Amazon Resource Names (ARNs) of the security groups that are to use to configure the FSx for openzfs file system. +* `subdirectory` - (Optional) Subdirectory to perform actions as source or destination. Must start with `/fsx`. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### protocol + +* `nfs` - (Required) Represents the Network File System (NFS) protocol that DataSync uses to access your FSx for OpenZFS file system. See below. + +### nfs + +* `mount_options` - (Required) Represents the mount options that are available for DataSync to access an NFS location. See below. + +### mount_options + +* `version` - (Optional) The specific NFS version that you want DataSync to use for mounting your NFS share. Valid values: `AUTOMATIC`, `NFS3`, `NFS4_0` and `NFS4_1`. Default: `AUTOMATIC` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uri` - The URL of the FSx for openzfs location that was described. +* `creation_time` - The time that the FSx for openzfs location was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datasync_location_fsx_openzfs_file_system` using the `DataSync-ARN#FSx-openzfs-ARN`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datasync_location_fsx_openzfs_file_system` using the `DataSync-ARN#FSx-openzfs-ARN`. For example: + +```console +% terraform import aws_datasync_location_fsx_openzfs_file_system.example arn:aws:datasync:us-west-2:123456789012:location/loc-12345678901234567#arn:aws:fsx:us-west-2:123456789012:file-system/fs-08e04cd442c1bb94a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datasync_location_fsx_windows_file_system.html.markdown b/website/docs/cdktf/python/r/datasync_location_fsx_windows_file_system.html.markdown new file mode 100644 index 00000000000..8db3034fee2 --- /dev/null +++ b/website/docs/cdktf/python/r/datasync_location_fsx_windows_file_system.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_fsx_windows_file_system" +description: |- + Manages an FSx Windows Location within AWS DataSync. +--- + + + +# Resource: aws_datasync_location_fsx_windows_file_system + +Manages an AWS DataSync FSx Windows Location. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_location_fsx_windows_file_system import DatasyncLocationFsxWindowsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncLocationFsxWindowsFileSystem(self, "example", + fsx_filesystem_arn=Token.as_string(aws_fsx_windows_file_system_example.arn), + password="SuperSecretPassw0rd", + security_group_arns=[Token.as_string(aws_security_group_example.arn)], + user="SomeUser" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fsx_filesystem_arn` - (Required) The Amazon Resource Name (ARN) for the FSx for Windows file system. +* `password` - (Required) The password of the user who has the permissions to access files and folders in the FSx for Windows file system. +* `user` - (Required) The user who has the permissions to access files and folders in the FSx for Windows file system. +* `domain` - (Optional) The name of the Windows domain that the FSx for Windows server belongs to. +* `security_group_arns` - (Optional) The Amazon Resource Names (ARNs) of the security groups that are to use to configure the FSx for Windows file system. +* `subdirectory` - (Optional) Subdirectory to perform actions as source or destination. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uri` - The URL of the FSx for Windows location that was described. +* `creation_time` - The time that the FSx for Windows location was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datasync_location_fsx_windows_file_system` using the `DataSync-ARN#FSx-Windows-ARN`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datasync_location_fsx_windows_file_system` using the `DataSync-ARN#FSx-Windows-ARN`. For example: + +```console +% terraform import aws_datasync_location_fsx_windows_file_system.example arn:aws:datasync:us-west-2:123456789012:location/loc-12345678901234567#arn:aws:fsx:us-west-2:476956259333:file-system/fs-08e04cd442c1bb94a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datasync_location_hdfs.html.markdown b/website/docs/cdktf/python/r/datasync_location_hdfs.html.markdown new file mode 100644 index 00000000000..78684f59868 --- /dev/null +++ b/website/docs/cdktf/python/r/datasync_location_hdfs.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_hdfs" +description: |- + Manages an AWS DataSync HDFS Location +--- + + + +# Resource: aws_datasync_location_hdfs + +Manages an HDFS Location within AWS DataSync. + +~> **NOTE:** The DataSync Agents must be available before creating this resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_location_hdfs import DatasyncLocationHdfs +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncLocationHdfs(self, "example", + agent_arns=[Token.as_string(aws_datasync_agent_example.arn)], + authentication_type="SIMPLE", + name_node=[DatasyncLocationHdfsNameNode( + hostname=Token.as_string(aws_instance_example.private_dns), + port=80 + ) + ], + simple_user="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `agent_arns` - (Required) A list of DataSync Agent ARNs with which this location will be associated. +* `authentication_type` - (Required) The type of authentication used to determine the identity of the user. Valid values are `SIMPLE` and `KERBEROS`. +* `name_node` - (Required) The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below. +* `simple_user` - (Optional) The user name used to identify the client on the host operating system. If `SIMPLE` is specified for `authentication_type`, this parameter is required. +* `block_size` - (Optional) The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB). +* `replication_factor` - (Optional) The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes. +* `kerberos_keytab` - (Optional) The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. If `KERBEROS` is specified for `authentication_type`, this parameter is required. +* `kerberos_krb5_conf` - (Optional) The krb5.conf file that contains the Kerberos configuration information. If `KERBEROS` is specified for `authentication_type`, this parameter is required. +* `kerberos_principal` - (Optional) The Kerberos principal with access to the files and folders on the HDFS cluster. If `KERBEROS` is specified for `authentication_type`, this parameter is required. +* `kms_key_provider_uri` - (Optional) The URI of the HDFS cluster's Key Management Server (KMS). +* `qop_configuration` - (Optional) The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If `qop_configuration` isn't specified, `rpc_protection` and `data_transfer_protection` default to `PRIVACY`. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. +* `subdirectory` - (Optional) A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### name_node Argument Reference + +* `hostname` - (Required) The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network. +* `port` - (Required) The port that the NameNode uses to listen to client requests. + +### qop_configuration Argument Reference + +* `data_transfer_protection` - (Optional) The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are `DISABLED`, `AUTHENTICATION`, `INTEGRITY` and `PRIVACY`. +* `rpc_protection` - (Optional)The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are `DISABLED`, `AUTHENTICATION`, `INTEGRITY` and `PRIVACY`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datasync_location_hdfs` using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datasync_location_hdfs` using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_hdfs.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datasync_location_nfs.html.markdown b/website/docs/cdktf/python/r/datasync_location_nfs.html.markdown new file mode 100644 index 00000000000..e056ffc84f3 --- /dev/null +++ b/website/docs/cdktf/python/r/datasync_location_nfs.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_nfs" +description: |- + Manages an AWS DataSync NFS Location +--- + + + +# Resource: aws_datasync_location_nfs + +Manages an NFS Location within AWS DataSync. + +~> **NOTE:** The DataSync Agents must be available before creating this resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_location_nfs import DatasyncLocationNfs +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncLocationNfs(self, "example", + on_prem_config=DatasyncLocationNfsOnPremConfig( + agent_arns=[Token.as_string(aws_datasync_agent_example.arn)] + ), + server_hostname="nfs.example.com", + subdirectory="/exported/path" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `mount_options` - (Optional) Configuration block containing mount options used by DataSync to access the NFS Server. +* `on_prem_config` - (Required) Configuration block containing information for connecting to the NFS File System. +* `server_hostname` - (Required) Specifies the IP address or DNS name of the NFS server. The DataSync Agent(s) use this to mount the NFS server. +* `subdirectory` - (Required) Subdirectory to perform actions as source or destination. Should be exported by the NFS server. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### mount_options Argument Reference + +The `mount_options` configuration block supports the following arguments: + +* `version` - (Optional) The specific NFS version that you want DataSync to use for mounting your NFS share. Valid values: `AUTOMATIC`, `NFS3`, `NFS4_0` and `NFS4_1`. Default: `AUTOMATIC` + +### on_prem_config Argument Reference + +The `on_prem_config` configuration block supports the following arguments: + +* `agent_arns` - (Required) List of Amazon Resource Names (ARNs) of the DataSync Agents used to connect to the NFS server. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datasync_location_nfs` using the DataSync Task Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datasync_location_nfs` using the DataSync Task Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_nfs.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datasync_location_object_storage.html.markdown b/website/docs/cdktf/python/r/datasync_location_object_storage.html.markdown new file mode 100644 index 00000000000..2f108de200a --- /dev/null +++ b/website/docs/cdktf/python/r/datasync_location_object_storage.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_object_storage" +description: |- + Manages an AWS DataSync Object Storage Location +--- + + + +# Resource: aws_datasync_location_object_storage + +Manages a Object Storage Location within AWS DataSync. + +~> **NOTE:** The DataSync Agents must be available before creating this resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_location_object_storage import DatasyncLocationObjectStorage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncLocationObjectStorage(self, "example", + agent_arns=[Token.as_string(aws_datasync_agent_example.arn)], + bucket_name="example", + server_hostname="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `agent_arns` - (Required) A list of DataSync Agent ARNs with which this location will be associated. +* `access_key` - (Optional) The access key is used if credentials are required to access the self-managed object storage server. If your object storage requires a user name and password to authenticate, use `access_key` and `secret_key` to provide the user name and password, respectively. +* `bucket_name` - (Required) The bucket on the self-managed object storage server that is used to read data from. +* `secret_key` - (Optional) The secret key is used if credentials are required to access the self-managed object storage server. If your object storage requires a user name and password to authenticate, use `access_key` and `secret_key` to provide the user name and password, respectively. +* `server_certificate` - (Optional) Specifies a certificate to authenticate with an object storage system that uses a private or self-signed certificate authority (CA). You must specify a Base64-encoded .pem string. The certificate can be up to 32768 bytes (before Base64 encoding). +* `server_hostname` - (Required) The name of the self-managed object storage server. This value is the IP address or Domain Name Service (DNS) name of the object storage server. An agent uses this host name to mount the object storage server in a network. +* `server_protocol` - (Optional) The protocol that the object storage server uses to communicate. Valid values are `HTTP` or `HTTPS`. +* `server_port` - (Optional) The port that your self-managed object storage server accepts inbound network traffic on. The server port is set by default to TCP 80 (`HTTP`) or TCP 443 (`HTTPS`). You can specify a custom port if your self-managed object storage server requires one. +* `subdirectory` - (Optional) A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `uri` - The URL of the Object Storage location that was described. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datasync_location_object_storage` using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datasync_location_object_storage` using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_object_storage.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datasync_location_s3.html.markdown b/website/docs/cdktf/python/r/datasync_location_s3.html.markdown new file mode 100644 index 00000000000..f21a47981f5 --- /dev/null +++ b/website/docs/cdktf/python/r/datasync_location_s3.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_s3" +description: |- + Manages an AWS DataSync S3 Location +--- + + + +# Resource: aws_datasync_location_s3 + +Manages an S3 Location within AWS DataSync. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_location_s3 import DatasyncLocationS3 +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncLocationS3(self, "example", + s3_bucket_arn=Token.as_string(aws_s3_bucket_example.arn), + s3_config=DatasyncLocationS3S3Config( + bucket_access_role_arn=Token.as_string(aws_iam_role_example.arn) + ), + subdirectory="/example/prefix" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `agent_arns` - (Optional) A list of DataSync Agent ARNs with which this location will be associated. +* `s3_bucket_arn` - (Required) Amazon Resource Name (ARN) of the S3 Bucket. +* `s3_config` - (Required) Configuration block containing information for connecting to S3. +* `s3_storage_class` - (Optional) The Amazon S3 storage class that you want to store your files in when this location is used as a task destination. [Valid values](https://docs.aws.amazon.com/datasync/latest/userguide/create-s3-location.html#using-storage-classes) +* `subdirectory` - (Required) Prefix to perform actions as source or destination. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### s3_config Argument Reference + +The `s3_config` configuration block supports the following arguments: + +* `bucket_access_role_arn` - (Required) ARN of the IAM Role used to connect to the S3 Bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datasync_location_s3` using the DataSync Task Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datasync_location_s3` using the DataSync Task Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_s3.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datasync_location_smb.html.markdown b/website/docs/cdktf/python/r/datasync_location_smb.html.markdown new file mode 100644 index 00000000000..ebef2e1fab8 --- /dev/null +++ b/website/docs/cdktf/python/r/datasync_location_smb.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_smb" +description: |- + Manages an AWS DataSync SMB Location +--- + + + +# Resource: aws_datasync_location_smb + +Manages a SMB Location within AWS DataSync. + +~> **NOTE:** The DataSync Agents must be available before creating this resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_location_smb import DatasyncLocationSmb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncLocationSmb(self, "example", + agent_arns=[Token.as_string(aws_datasync_agent_example.arn)], + password="ANotGreatPassword", + server_hostname="smb.example.com", + subdirectory="/exported/path", + user="Guest" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `agent_arns` - (Required) A list of DataSync Agent ARNs with which this location will be associated. +* `domain` - (Optional) The name of the Windows domain the SMB server belongs to. +* `mount_options` - (Optional) Configuration block containing mount options used by DataSync to access the SMB Server. Can be `AUTOMATIC`, `SMB2`, or `SMB3`. +* `password` - (Required) The password of the user who can mount the share and has file permissions in the SMB. +* `server_hostname` - (Required) Specifies the IP address or DNS name of the SMB server. The DataSync Agent(s) use this to mount the SMB share. +* `subdirectory` - (Required) Subdirectory to perform actions as source or destination. Should be exported by the NFS server. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `user` - (Required) The user who can mount the share and has file and folder permissions in the SMB share. + +### mount_options Argument Reference + +The `mount_options` configuration block supports the following arguments: + +* `version` - (Optional) The specific SMB version that you want DataSync to use for mounting your SMB share. Valid values: `AUTOMATIC`, `SMB2`, and `SMB3`. Default: `AUTOMATIC` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datasync_location_smb` using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datasync_location_smb` using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_smb.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/datasync_task.html.markdown b/website/docs/cdktf/python/r/datasync_task.html.markdown new file mode 100644 index 00000000000..fec0458c5f7 --- /dev/null +++ b/website/docs/cdktf/python/r/datasync_task.html.markdown @@ -0,0 +1,175 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_task" +description: |- + Manages an AWS DataSync Task +--- + + + +# Resource: aws_datasync_task + +Manages an AWS DataSync Task, which represents a configuration for synchronization. Starting an execution of these DataSync Tasks (actually synchronizing files) is performed outside of this Terraform resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Op, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_task import DatasyncTask +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncTask(self, "example", + destination_location_arn=destination.arn, + name="example", + options=DatasyncTaskOptions( + bytes_per_second=Token.as_number(Op.negate(1)) + ), + source_location_arn=source.arn + ) +``` + +## Example Usage with Scheduling + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_task import DatasyncTask +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncTask(self, "example", + destination_location_arn=destination.arn, + name="example", + schedule=DatasyncTaskSchedule( + schedule_expression="cron(0 12 ? * SUN,WED *)" + ), + source_location_arn=source.arn + ) +``` + +## Example Usage with Filtering + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.datasync_task import DatasyncTask +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DatasyncTask(self, "example", + destination_location_arn=destination.arn, + excludes=DatasyncTaskExcludes( + filter_type="SIMPLE_PATTERN", + value="/folder1|/folder2" + ), + includes=DatasyncTaskIncludes( + filter_type="SIMPLE_PATTERN", + value="/folder1|/folder2" + ), + name="example", + source_location_arn=source.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `destination_location_arn` - (Required) Amazon Resource Name (ARN) of destination DataSync Location. +* `source_location_arn` - (Required) Amazon Resource Name (ARN) of source DataSync Location. +* `cloudwatch_log_group_arn` - (Optional) Amazon Resource Name (ARN) of the CloudWatch Log Group that is used to monitor and log events in the sync task. +* `excludes` - (Optional) Filter rules that determines which files to exclude from a task. +* `includes` - (Optional) Filter rules that determines which files to include in a task. +* `name` - (Optional) Name of the DataSync Task. +* `options` - (Optional) Configuration block containing option that controls the default behavior when you start an execution of this DataSync Task. For each individual task execution, you can override these options by specifying an overriding configuration in those executions. +* `schedule` - (Optional) Specifies a schedule used to periodically transfer files from a source to a destination location. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Task. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### options Argument Reference + +~> **NOTE:** If `atime` is set to `BEST_EFFORT`, `mtime` must be set to `PRESERVE`. If `atime` is set to `NONE`, `mtime` must be set to `NONE`. + +The `options` configuration block supports the following arguments: + +* `atime` - (Optional) A file metadata that shows the last time a file was accessed (that is when the file was read or written to). If set to `BEST_EFFORT`, the DataSync Task attempts to preserve the original (that is, the version before sync `PREPARING` phase) `atime` attribute on all source files. Valid values: `BEST_EFFORT`, `NONE`. Default: `BEST_EFFORT`. +* `bytes_per_second` - (Optional) Limits the bandwidth utilized. For example, to set a maximum of 1 MB, set this value to `1048576`. Value values: `-1` or greater. Default: `-1` (unlimited). +* `gid` - (Optional) Group identifier of the file's owners. Valid values: `BOTH`, `INT_VALUE`, `NAME`, `NONE`. Default: `INT_VALUE` (preserve integer value of the ID). +* `log_level` - (Optional) Determines the type of logs that DataSync publishes to a log stream in the Amazon CloudWatch log group that you provide. Valid values: `OFF`, `BASIC`, `TRANSFER`. Default: `OFF`. +* `mtime` - (Optional) A file metadata that indicates the last time a file was modified (written to) before the sync `PREPARING` phase. Value values: `NONE`, `PRESERVE`. Default: `PRESERVE`. +* `object_tags` - (Optional) Specifies whether object tags are maintained when transferring between object storage systems. If you want your DataSync task to ignore object tags, specify the NONE value. Valid values: `PRESERVE`, `NONE`. Default value: `PRESERVE`. +* `overwrite_mode` - (Optional) Determines whether files at the destination should be overwritten or preserved when copying files. Valid values: `ALWAYS`, `NEVER`. Default: `ALWAYS`. +* `posix_permissions` - (Optional) Determines which users or groups can access a file for a specific purpose such as reading, writing, or execution of the file. Valid values: `NONE`, `PRESERVE`. Default: `PRESERVE`. +* `preserve_deleted_files` - (Optional) Whether files deleted in the source should be removed or preserved in the destination file system. Valid values: `PRESERVE`, `REMOVE`. Default: `PRESERVE`. +* `preserve_devices` - (Optional) Whether the DataSync Task should preserve the metadata of block and character devices in the source files system, and recreate the files with that device name and metadata on the destination. The DataSync Task can’t sync the actual contents of such devices, because many of the devices are non-terminal and don’t return an end of file (EOF) marker. Valid values: `NONE`, `PRESERVE`. Default: `NONE` (ignore special devices). +* `security_descriptor_copy_flags` - (Optional) Determines which components of the SMB security descriptor are copied from source to destination objects. This value is only used for transfers between SMB and Amazon FSx for Windows File Server locations, or between two Amazon FSx for Windows File Server locations. Valid values: `NONE`, `OWNER_DACL`, `OWNER_DACL_SACL`. Default: `OWNER_DACL`. +* `task_queueing` - (Optional) Determines whether tasks should be queued before executing the tasks. Valid values: `ENABLED`, `DISABLED`. Default `ENABLED`. +* `transfer_mode` - (Optional) Determines whether DataSync transfers only the data and metadata that differ between the source and the destination location, or whether DataSync transfers all the content from the source, without comparing to the destination location. Valid values: `CHANGED`, `ALL`. Default: `CHANGED` +* `uid` - (Optional) User identifier of the file's owners. Valid values: `BOTH`, `INT_VALUE`, `NAME`, `NONE`. Default: `INT_VALUE` (preserve integer value of the ID). +* `verify_mode` - (Optional) Whether a data integrity verification should be performed at the end of a task execution after all data and metadata have been transferred. Valid values: `NONE`, `POINT_IN_TIME_CONSISTENT`, `ONLY_FILES_TRANSFERRED`. Default: `POINT_IN_TIME_CONSISTENT`. + +### Schedule + +* `schedule_expression` - (Required) Specifies the schedule you want your task to use for repeated executions. For more information, see [Schedule Expressions for Rules](https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html). + +### excludes Argument Reference + +* `filter_type` - (Optional) The type of filter rule to apply. Valid values: `SIMPLE_PATTERN`. +* `value` - (Optional) A single filter string that consists of the patterns to exclude. The patterns are delimited by "|" (that is, a pipe), for example: `/folder1|/folder2` + +### includes Argument Reference + +* `filter_type` - (Optional) The type of filter rule to apply. Valid values: `SIMPLE_PATTERN`. +* `value` - (Optional) A single filter string that consists of the patterns to include. The patterns are delimited by "|" (that is, a pipe), for example: `/folder1|/folder2` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Task. +* `arn` - Amazon Resource Name (ARN) of the DataSync Task. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_datasync_task` using the DataSync Task Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_datasync_task` using the DataSync Task Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_task.example arn:aws:datasync:us-east-1:123456789012:task/task-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dax_cluster.html.markdown b/website/docs/cdktf/python/r/dax_cluster.html.markdown new file mode 100644 index 00000000000..7c0c706e126 --- /dev/null +++ b/website/docs/cdktf/python/r/dax_cluster.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "DynamoDB Accelerator (DAX)" +layout: "aws" +page_title: "AWS: aws_dax_cluster" +description: |- + Provides an DAX Cluster resource. +--- + + + +# Resource: aws_dax_cluster + +Provides a DAX Cluster resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dax_cluster import DaxCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DaxCluster(self, "bar", + cluster_name="cluster-example", + iam_role_arn=Token.as_string(example.arn), + node_type="dax.r4.large", + replication_factor=1 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cluster_endpoint_encryption_type` – (Optional) The type of encryption the +cluster's endpoint should support. Valid values are: `NONE` and `TLS`. +Default value is `NONE`. + +* `cluster_name` – (Required) Group identifier. DAX converts this name to +lowercase + +* `iam_role_arn` - (Required) A valid Amazon Resource Name (ARN) that identifies +an IAM role. At runtime, DAX will assume this role and use the role's +permissions to access DynamoDB on your behalf + +* `node_type` – (Required) The compute and memory capacity of the nodes. See +[Nodes][1] for supported node types + +* `replication_factor` – (Required) The number of nodes in the DAX cluster. A +replication factor of 1 will create a single-node cluster, without any read +replicas + +* `availability_zones` - (Optional) List of Availability Zones in which the +nodes will be created + +* `description` – (Optional) Description for the cluster + +* `notification_topic_arn` – (Optional) An Amazon Resource Name (ARN) of an +SNS topic to send DAX notifications to. Example: +`arn:aws:sns:us-east-1:012345678999:my_sns_topic` + +* `parameter_group_name` – (Optional) Name of the parameter group to associate +with this DAX cluster + +* `maintenance_window` – (Optional) Specifies the weekly time range for when +maintenance on the cluster is performed. The format is `ddd:hh24:mi-ddd:hh24:mi` +(24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: +`sun:05:00-sun:09:00` + +* `security_group_ids` – (Optional) One or more VPC security groups associated +with the cluster + +* `server_side_encryption` - (Optional) Encrypt at rest options + +* `subnet_group_name` – (Optional) Name of the subnet group to be used for the +cluster + +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `server_side_encryption` object supports the following: + +* `enabled` - (Optional) Whether to enable encryption at rest. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the DAX cluster + +* `nodes` - List of node objects including `id`, `address`, `port` and +`availability_zone`. Referenceable e.g., as +`${aws_dax_cluster.test.nodes.0.address}` + +* `configuration_endpoint` - The configuration endpoint for this DAX cluster, +consisting of a DNS name and a port number + +* `cluster_address` - The DNS name of the DAX cluster without the port appended + +* `port` - The port used by the configuration endpoint + +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `45m`) +- `update` - (Default `45m`) +- `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DAX Clusters using the `cluster_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DAX Clusters using the `cluster_name`. For example: + +```console +% terraform import aws_dax_cluster.my_cluster my_cluster +``` + +[1]: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DAX.concepts.cluster.html#DAX.concepts.nodes + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dax_parameter_group.html.markdown b/website/docs/cdktf/python/r/dax_parameter_group.html.markdown new file mode 100644 index 00000000000..b5e54e071ac --- /dev/null +++ b/website/docs/cdktf/python/r/dax_parameter_group.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "DynamoDB Accelerator (DAX)" +layout: "aws" +page_title: "AWS: aws_dax_parameter_group" +description: |- + Provides an DAX Parameter Group resource. +--- + + + +# Resource: aws_dax_parameter_group + +Provides a DAX Parameter Group resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dax_parameter_group import DaxParameterGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DaxParameterGroup(self, "example", + name="example", + parameters=[DaxParameterGroupParameters( + name="query-ttl-millis", + value="100000" + ), DaxParameterGroupParameters( + name="record-ttl-millis", + value="100000" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` – (Required) The name of the parameter group. + +* `description` - (Optional, ForceNew) A description of the parameter group. + +* `parameters` – (Optional) The parameters of the parameter group. + +## parameters + +`parameters` supports the following: + +* `name` - (Required) The name of the parameter. +* `value` - (Required) The value for the parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the parameter group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DAX Parameter Group using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DAX Parameter Group using the `name`. For example: + +```console +% terraform import aws_dax_parameter_group.example my_dax_pg +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dax_subnet_group.html.markdown b/website/docs/cdktf/python/r/dax_subnet_group.html.markdown new file mode 100644 index 00000000000..c7275bb24b0 --- /dev/null +++ b/website/docs/cdktf/python/r/dax_subnet_group.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "DynamoDB Accelerator (DAX)" +layout: "aws" +page_title: "AWS: aws_dax_subnet_group" +description: |- + Provides an DAX Subnet Group resource. +--- + + + +# Resource: aws_dax_subnet_group + +Provides a DAX Subnet Group resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dax_subnet_group import DaxSubnetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DaxSubnetGroup(self, "example", + name="example", + subnet_ids=[example1.id, example2.id] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` – (Required) The name of the subnet group. +* `description` - (Optional) A description of the subnet group. +* `subnet_ids` – (Required) A list of VPC subnet IDs for the subnet group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the subnet group. +* `vpc_id` – VPC ID of the subnet group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DAX Subnet Group using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DAX Subnet Group using the `name`. For example: + +```console +% terraform import aws_dax_subnet_group.example my_dax_sg +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_cluster_snapshot.html.markdown b/website/docs/cdktf/python/r/db_cluster_snapshot.html.markdown new file mode 100644 index 00000000000..ab0eedb9079 --- /dev/null +++ b/website/docs/cdktf/python/r/db_cluster_snapshot.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_cluster_snapshot" +description: |- + Manages an RDS database cluster snapshot. +--- + + + +# Resource: aws_db_cluster_snapshot + +Manages an RDS database cluster snapshot for Aurora clusters. For managing RDS database instance snapshots, see the [`aws_db_snapshot` resource](/docs/providers/aws/r/db_snapshot.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_cluster_snapshot import DbClusterSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbClusterSnapshot(self, "example", + db_cluster_identifier=Token.as_string(aws_rds_cluster_example.id), + db_cluster_snapshot_identifier="resourcetestsnapshot1234" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `db_cluster_identifier` - (Required) The DB Cluster Identifier from which to take the snapshot. +* `db_cluster_snapshot_identifier` - (Required) The Identifier for the snapshot. +* `tags` - (Optional) A map of tags to assign to the DB cluster. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `allocated_storage` - Allocated storage size in gigabytes (GB). +* `availability_zones` - List of EC2 Availability Zones that instances in the DB cluster snapshot can be restored in. +* `db_cluster_snapshot_arn` - The Amazon Resource Name (ARN) for the DB Cluster Snapshot. +* `engine` - Name of the database engine. +* `engine_version` - Version of the database engine for this DB cluster snapshot. +* `kms_key_id` - If storage_encrypted is true, the AWS KMS key identifier for the encrypted DB cluster snapshot. +* `license_model` - License model information for the restored DB cluster. +* `port` - Port that the DB cluster was listening on at the time of the snapshot. +* `source_db_cluster_snapshot_identifier` - DB Cluster Snapshot ARN that the DB Cluster Snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `storage_encrypted` - Whether the DB cluster snapshot is encrypted. +* `status` - The status of this DB Cluster Snapshot. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - The VPC ID associated with the DB cluster snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_db_cluster_snapshot` using the cluster snapshot identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_db_cluster_snapshot` using the cluster snapshot identifier. For example: + +```console +% terraform import aws_db_cluster_snapshot.example my-cluster-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_event_subscription.html.markdown b/website/docs/cdktf/python/r/db_event_subscription.html.markdown new file mode 100644 index 00000000000..97155e30584 --- /dev/null +++ b/website/docs/cdktf/python/r/db_event_subscription.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_event_subscription" +description: |- + Provides a DB event subscription resource. +--- + + + +# Resource: aws_db_event_subscription + +Provides a DB event subscription resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_event_subscription import DbEventSubscription +from imports.aws.db_instance import DbInstance +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = DbInstance(self, "default", + allocated_storage=10, + db_name="mydb", + db_subnet_group_name="my_database_subnet_group", + engine="mysql", + engine_version="5.6.17", + instance_class="db.t2.micro", + parameter_group_name="default.mysql5.6", + password="bar", + username="foo" + ) + aws_sns_topic_default = SnsTopic(self, "default_1", + name="rds-events" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_default.override_logical_id("default") + aws_db_event_subscription_default = DbEventSubscription(self, "default_2", + event_categories=["availability", "deletion", "failover", "failure", "low storage", "maintenance", "notification", "read replica", "recovery", "restoration" + ], + name="rds-event-sub", + sns_topic=Token.as_string(aws_sns_topic_default.arn), + source_ids=[default_var.identifier], + source_type="db-instance" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_db_event_subscription_default.override_logical_id("default") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the DB event subscription. By default generated by Terraform. +* `name_prefix` - (Optional) The name of the DB event subscription. Conflicts with `name`. +* `sns_topic` - (Required) The SNS topic to send events to. +* `source_ids` - (Optional) A list of identifiers of the event sources for which events will be returned. If not specified, then all sources are included in the response. If specified, a source_type must also be specified. +* `source_type` - (Optional) The type of source that will be generating the events. Valid options are `db-instance`, `db-security-group`, `db-parameter-group`, `db-snapshot`, `db-cluster`, `db-cluster-snapshot`, or `db-proxy`. If not set, all sources will be subscribed to. +* `event_categories` - (Optional) A list of event categories for a SourceType that you want to subscribe to. See http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Events.html or run `aws rds describe-event-categories`. +* `enabled` - (Optional) A boolean flag to enable/disable the subscription. Defaults to true. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the RDS event notification subscription +* `arn` - The Amazon Resource Name of the RDS event notification subscription +* `customer_aws_id` - The AWS customer account associated with the RDS event notification subscription +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40m`) +- `delete` - (Default `40m`) +- `update` - (Default `40m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB Event Subscriptions using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DB Event Subscriptions using the `name`. For example: + +```console +% terraform import aws_db_event_subscription.default rds-event-sub +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_instance.html.markdown b/website/docs/cdktf/python/r/db_instance.html.markdown new file mode 100644 index 00000000000..d73e361f733 --- /dev/null +++ b/website/docs/cdktf/python/r/db_instance.html.markdown @@ -0,0 +1,527 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_instance" +description: |- + Provides an RDS instance resource. +--- + + + +# Resource: aws_db_instance + +Provides an RDS instance resource. A DB instance is an isolated database +environment in the cloud. A DB instance can contain multiple user-created +databases. + +Changes to a DB instance can occur when you manually change a parameter, such as +`allocated_storage`, and are reflected in the next maintenance window. Because +of this, Terraform may report a difference in its planning phase because a +modification has not yet taken place. You can use the `apply_immediately` flag +to instruct the service to apply the change immediately (see documentation +below). + +When upgrading the major version of an engine, `allow_major_version_upgrade` must be set to `true`. + +~> **Note:** using `apply_immediately` can result in a brief downtime as the server reboots. +See the AWS Docs on [RDS Instance Maintenance][instance-maintenance] for more information. + +~> **Note:** All arguments including the username and password will be stored in the raw state as plain-text. +[Read more about sensitive data instate](https://www.terraform.io/docs/state/sensitive-data.html). + +> **Hands-on:** Try the [Manage AWS RDS Instances](https://learn.hashicorp.com/tutorials/terraform/aws-rds) tutorial on HashiCorp Learn. + +## RDS Instance Class Types + +Amazon RDS supports three types of instance classes: Standard, Memory Optimized, and Burstable Performance. +For more information please read the AWS RDS documentation about [DB Instance Class Types](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.DBInstanceClass.html) + +## Low-Downtime Updates + +By default, RDS applies updates to DB Instances in-place, which can lead to service interruptions. +Low-downtime updates minimize service interruptions by performing the updates with an [RDS Blue/Green deployment][blue-green] and switching over the instances when complete. + +Low-downtime updates are only available for DB Instances using MySQL and MariaDB, +as other engines are not supported by RDS Blue/Green deployments. + +Backups must be enabled to use low-downtime updates. + +Enable low-downtime updates by setting `blue_green_update.enabled` to `true`. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance import DbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbInstance(self, "default", + allocated_storage=10, + db_name="mydb", + engine="mysql", + engine_version="5.7", + instance_class="db.t3.micro", + parameter_group_name="default.mysql5.7", + password="foobarbaz", + skip_final_snapshot=True, + username="foo" + ) +``` + +### RDS Custom for Oracle Usage with Replica + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_kms_key import DataAwsKmsKey +from imports.aws.data_aws_rds_orderable_db_instance import DataAwsRdsOrderableDbInstance +from imports.aws.db_instance import DbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + by_id = DataAwsKmsKey(self, "by_id", + key_id="example-ef278353ceba4a5a97de6784565b9f78" + ) + custom_oracle = DataAwsRdsOrderableDbInstance(self, "custom-oracle", + engine="custom-oracle-ee", + engine_version="19.c.ee.002", + license_model="bring-your-own-license", + preferred_instance_classes=["db.r5.24xlarge", "db.r5.16xlarge", "db.r5.12xlarge" + ], + storage_type="gp3" + ) + default_var = DbInstance(self, "default", + allocated_storage=50, + auto_minor_version_upgrade=False, + backup_retention_period=7, + custom_iam_instance_profile="AWSRDSCustomInstanceProfile", + db_subnet_group_name=db_subnet_group_name, + engine=Token.as_string(custom_oracle.engine), + engine_version=Token.as_string(custom_oracle.engine_version), + identifier="ee-instance-demo", + instance_class=Token.as_string(custom_oracle.instance_class), + kms_key_id=Token.as_string(by_id.arn), + license_model=Token.as_string(custom_oracle.license_model), + multi_az=False, + password="avoid-plaintext-passwords", + storage_encrypted=True, + timeouts=[{ + "create": "3h", + "delete": "3h", + "update": "3h" + } + ], + username="test" + ) + DbInstance(self, "test-replica", + auto_minor_version_upgrade=False, + backup_retention_period=7, + custom_iam_instance_profile="AWSRDSCustomInstanceProfile", + identifier="ee-instance-replica", + instance_class=Token.as_string(custom_oracle.instance_class), + kms_key_id=Token.as_string(by_id.arn), + multi_az=False, + replica_mode="mounted", + replicate_source_db=default_var.identifier, + skip_final_snapshot=True, + storage_encrypted=True, + timeouts=[{ + "create": "3h", + "delete": "3h", + "update": "3h" + } + ] + ) +``` + +### Storage Autoscaling + +To enable Storage Autoscaling with instances that support the feature, define the `max_allocated_storage` argument higher than the `allocated_storage` argument. Terraform will automatically hide differences with the `allocated_storage` argument value if autoscaling occurs. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance import DbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, instanceClass): + super().__init__(scope, name) + DbInstance(self, "example", + allocated_storage=50, + max_allocated_storage=100, + instance_class=instance_class + ) +``` + +### Managed Master Passwords via Secrets Manager, default KMS Key + +-> More information about RDS/Aurora Aurora integrates with Secrets Manager to manage master user passwords for your DB clusters can be found in the [RDS User Guide](https://aws.amazon.com/about-aws/whats-new/2022/12/amazon-rds-integration-aws-secrets-manager/) and [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/rds-secrets-manager.html). + +You can specify the `manage_master_user_password` attribute to enable managing the master password with Secrets Manager. You can also update an existing cluster to use Secrets Manager by specify the `manage_master_user_password` attribute and removing the `password` attribute (removal is required). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance import DbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbInstance(self, "default", + allocated_storage=10, + db_name="mydb", + engine="mysql", + engine_version="5.7", + instance_class="db.t3.micro", + manage_master_user_password=True, + parameter_group_name="default.mysql5.7", + username="foo" + ) +``` + +### Managed Master Passwords via Secrets Manager, specific KMS Key + +-> More information about RDS/Aurora Aurora integrates with Secrets Manager to manage master user passwords for your DB clusters can be found in the [RDS User Guide](https://aws.amazon.com/about-aws/whats-new/2022/12/amazon-rds-integration-aws-secrets-manager/) and [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/rds-secrets-manager.html). + +You can specify the `master_user_secret_kms_key_id` attribute to specify a specific KMS Key. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance import DbInstance +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = KmsKey(self, "example", + description="Example KMS Key" + ) + DbInstance(self, "default", + allocated_storage=10, + db_name="mydb", + engine="mysql", + engine_version="5.7", + instance_class="db.t3.micro", + manage_master_user_password=True, + master_user_secret_kms_key_id=example.key_id, + parameter_group_name="default.mysql5.7", + username="foo" + ) +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to the [AWS official +documentation](http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html). + +This argument supports the following arguments: + +* `allocated_storage` - (Required unless a `snapshot_identifier` or `replicate_source_db` is provided) The allocated storage in gibibytes. If `max_allocated_storage` is configured, this argument represents the initial storage allocation and differences from the configuration will be ignored automatically when Storage Autoscaling occurs. If `replicate_source_db` is set, the value is ignored during the creation of the instance. +* `allow_major_version_upgrade` - (Optional) Indicates that major version +upgrades are allowed. Changing this parameter does not result in an outage and +the change is asynchronously applied as soon as possible. +* `apply_immediately` - (Optional) Specifies whether any database modifications +are applied immediately, or during the next maintenance window. Default is +`false`. See [Amazon RDS Documentation for more +information.](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.DBInstance.Modifying.html) +* `auto_minor_version_upgrade` - (Optional) Indicates that minor engine upgrades +will be applied automatically to the DB instance during the maintenance window. +Defaults to true. +* `availability_zone` - (Optional) The AZ for the RDS instance. +* `backup_retention_period` - (Optional) The days to retain backups for. + Must be between `0` and `35`. + Default is `0`. + Must be greater than `0` if the database is used as a source for a [Read Replica][instance-replication], + uses [low-downtime updates](#low-downtime-updates), + or will use [RDS Blue/Green deployments][blue-green]. +* `backup_target` - (Optional, Forces new resource) Specifies where automated backups and manual snapshots are stored. Possible values are `region` (default) and `outposts`. See [Working with Amazon RDS on AWS Outposts](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-on-outposts.html) for more information. +* `backup_window` - (Optional) The daily time range (in UTC) during which automated backups are created if they are enabled. + Example: "09:46-10:16". Must not overlap with `maintenance_window`. +* `blue_green_update` - (Optional) Enables low-downtime updates using [RDS Blue/Green deployments][blue-green]. + See [blue_green_update](#blue_green_update) below +* `ca_cert_identifier` - (Optional) The identifier of the CA certificate for the DB instance. +* `character_set_name` - (Optional) The character set name to use for DB +encoding in Oracle and Microsoft SQL instances (collation). This can't be changed. See [Oracle Character Sets +Supported in Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.OracleCharacterSets.html) +or [Server-Level Collation for Microsoft SQL Server](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.SQLServer.CommonDBATasks.Collation.html) for more information. +* `copy_tags_to_snapshot` – (Optional, boolean) Copy all Instance `tags` to snapshots. Default is `false`. +* `custom_iam_instance_profile` - (Optional) The instance profile associated with the underlying Amazon EC2 instance of an RDS Custom DB instance. +* `db_name` - (Optional) The name of the database to create when the DB instance is created. If this parameter is not specified, no database is created in the DB instance. Note that this does not apply for Oracle or SQL Server engines. See the [AWS documentation](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/create-db-instance.html) for more details on what applies for those engines. If you are providing an Oracle db name, it needs to be in all upper case. Cannot be specified for a replica. +* `db_subnet_group_name` - (Optional) Name of [DB subnet group](/docs/providers/aws/r/db_subnet_group.html). DB instance will +be created in the VPC associated with the DB subnet group. If unspecified, will +be created in the `default` VPC, or in EC2 Classic, if available. When working +with read replicas, it should be specified only if the source database +specifies an instance in another AWS Region. See [DBSubnetGroupName in API +action CreateDBInstanceReadReplica](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstanceReadReplica.html) +for additional read replica contraints. +* `delete_automated_backups` - (Optional) Specifies whether to remove automated backups immediately after the DB instance is deleted. Default is `true`. +* `deletion_protection` - (Optional) If the DB instance should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`. +* `domain` - (Optional) The ID of the Directory Service Active Directory domain to create the instance in. +* `domain_iam_role_name` - (Optional, but required if domain is provided) The name of the IAM role to be used when making API calls to the Directory Service. +* `enabled_cloudwatch_logs_exports` - (Optional) Set of log types to enable for exporting to CloudWatch logs. If omitted, no logs will be exported. Valid values (depending on `engine`). MySQL and MariaDB: `audit`, `error`, `general`, `slowquery`. PostgreSQL: `postgresql`, `upgrade`. MSSQL: `agent` , `error`. Oracle: `alert`, `audit`, `listener`, `trace`. +* `engine` - (Required unless a `snapshot_identifier` or `replicate_source_db` is provided) The database engine to use. For supported values, see the Engine parameter in [API action CreateDBInstance](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html). Note that for Amazon Aurora instances the engine must match the [DB cluster](/docs/providers/aws/r/rds_cluster.html)'s engine'. For information on the difference between the available Aurora MySQL engines see [Comparison between Aurora MySQL 1 and Aurora MySQL 2](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraMySQL.Updates.20180206.html) in the Amazon RDS User Guide. +* `engine_version` - (Optional) The engine version to use. If `auto_minor_version_upgrade` is enabled, you can provide a prefix of the version such as `5.7` (for `5.7.10`). The actual engine version used is returned in the attribute `engine_version_actual`, see [Attribute Reference](#attribute-reference) below. For supported values, see the EngineVersion parameter in [API action CreateDBInstance](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html). Note that for Amazon Aurora instances the engine version must match the [DB cluster](/docs/providers/aws/r/rds_cluster.html)'s engine version'. +* `final_snapshot_identifier` - (Optional) The name of your final DB snapshot +when this DB instance is deleted. Must be provided if `skip_final_snapshot` is +set to `false`. The value must begin with a letter, only contain alphanumeric characters and hyphens, and not end with a hyphen or contain two consecutive hyphens. Must not be provided when deleting a read replica. +* `iam_database_authentication_enabled` - (Optional) Specifies whether mappings of AWS Identity and Access Management (IAM) accounts to database +accounts is enabled. +* `identifier` - (Optional) The name of the RDS instance, if omitted, Terraform will assign a random, unique identifier. Required if `restore_to_point_in_time` is specified. +* `identifier_prefix` - (Optional) Creates a unique identifier beginning with the specified prefix. Conflicts with `identifier`. +* `instance_class` - (Required) The instance type of the RDS instance. +* `iops` - (Optional) The amount of provisioned IOPS. Setting this implies a +storage_type of "io1". Can only be set when `storage_type` is `"io1"` or `"gp3"`. +Cannot be specified for gp3 storage if the `allocated_storage` value is below a per-`engine` threshold. +See the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html#gp3-storage) for details. +* `kms_key_id` - (Optional) The ARN for the KMS encryption key. If creating an +encrypted replica, set this to the destination KMS ARN. +* `license_model` - (Optional, but required for some DB engines, i.e., Oracle +SE1) License model information for this DB instance. +* `maintenance_window` - (Optional) The window to perform maintenance in. +Syntax: "ddd:hh24:mi-ddd:hh24:mi". Eg: "Mon:00:00-Mon:03:00". See [RDS +Maintenance Window +docs](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_UpgradeDBInstance.Maintenance.html#AdjustingTheMaintenanceWindow) +for more information. +* `manage_master_user_password` - (Optional) Set to true to allow RDS to manage the master user password in Secrets Manager. Cannot be set if `password` is provided. +* `master_user_secret_kms_key_id` - (Optional) The Amazon Web Services KMS key identifier is the key ARN, key ID, alias ARN, or alias name for the KMS key. To use a KMS key in a different Amazon Web Services account, specify the key ARN or alias ARN. If not specified, the default KMS key for your Amazon Web Services account is used. +* `max_allocated_storage` - (Optional) When configured, the upper limit to which Amazon RDS can automatically scale the storage of the DB instance. Configuring this will automatically ignore differences to `allocated_storage`. Must be greater than or equal to `allocated_storage` or `0` to disable Storage Autoscaling. +* `monitoring_interval` - (Optional) The interval, in seconds, between points +when Enhanced Monitoring metrics are collected for the DB instance. To disable +collecting Enhanced Monitoring metrics, specify 0. The default is 0. Valid +Values: 0, 1, 5, 10, 15, 30, 60. +* `monitoring_role_arn` - (Optional) The ARN for the IAM role that permits RDS +to send enhanced monitoring metrics to CloudWatch Logs. You can find more +information on the [AWS +Documentation](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Monitoring.html) +what IAM permissions are needed to allow Enhanced Monitoring for RDS Instances. +* `multi_az` - (Optional) Specifies if the RDS instance is multi-AZ +* `nchar_character_set_name` - (Optional, Forces new resource) The national character set is used in the NCHAR, NVARCHAR2, and NCLOB data types for Oracle instances. This can't be changed. See [Oracle Character Sets +Supported in Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.OracleCharacterSets.html). +* `network_type` - (Optional) The network type of the DB instance. Valid values: `IPV4`, `DUAL`. +* `option_group_name` - (Optional) Name of the DB option group to associate. +* `parameter_group_name` - (Optional) Name of the DB parameter group to +associate. +* `password` - (Required unless `manage_master_user_password` is set to true or unless a `snapshot_identifier` or `replicate_source_db` +is provided or `manage_master_user_password` is set.) Password for the master DB user. Note that this may show up in +logs, and it will be stored in the state file. Cannot be set if `manage_master_user_password` is set to `true`. +* `performance_insights_enabled` - (Optional) Specifies whether Performance Insights are enabled. Defaults to false. +* `performance_insights_kms_key_id` - (Optional) The ARN for the KMS key to encrypt Performance Insights data. When specifying `performance_insights_kms_key_id`, `performance_insights_enabled` needs to be set to true. Once KMS key is set, it can never be changed. +* `performance_insights_retention_period` - (Optional) Amount of time in days to retain Performance Insights data. Valid values are `7`, `731` (2 years) or a multiple of `31`. When specifying `performance_insights_retention_period`, `performance_insights_enabled` needs to be set to true. Defaults to '7'. +* `port` - (Optional) The port on which the DB accepts connections. +* `publicly_accessible` - (Optional) Bool to control if instance is publicly +accessible. Default is `false`. +* `replica_mode` - (Optional) Specifies whether the replica is in either `mounted` or `open-read-only` mode. This attribute +is only supported by Oracle instances. Oracle replicas operate in `open-read-only` mode unless otherwise specified. See [Working with Oracle Read Replicas](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/oracle-read-replicas.html) for more information. +* `replicate_source_db` - (Optional) Specifies that this resource is a Replicate +database, and to use this value as the source database. This correlates to the +`identifier` of another Amazon RDS Database to replicate (if replicating within +a single region) or ARN of the Amazon RDS Database to replicate (if replicating +cross-region). Note that if you are +creating a cross-region replica of an encrypted database you will also need to +specify a `kms_key_id`. See [DB Instance Replication][instance-replication] and [Working with +PostgreSQL and MySQL Read Replicas](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_ReadRepl.html) +for more information on using Replication. +* `restore_to_point_in_time` - (Optional, Forces new resource) A configuration block for restoring a DB instance to an arbitrary point in time. Requires the `identifier` argument to be set with the name of the new DB instance to be created. See [Restore To Point In Time](#restore-to-point-in-time) below for details. +* `s3_import` - (Optional) Restore from a Percona Xtrabackup in S3. See [Importing Data into an Amazon RDS MySQL DB Instance](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/MySQL.Procedural.Importing.html) +* `skip_final_snapshot` - (Optional) Determines whether a final DB snapshot is +created before the DB instance is deleted. If true is specified, no DBSnapshot +is created. If false is specified, a DB snapshot is created before the DB +instance is deleted, using the value from `final_snapshot_identifier`. Default +is `false`. +* `snapshot_identifier` - (Optional) Specifies whether or not to create this +database from a snapshot. This correlates to the snapshot ID you'd find in the +RDS console, e.g: rds:production-2015-06-26-06-05. +* `storage_encrypted` - (Optional) Specifies whether the DB instance is +encrypted. Note that if you are creating a cross-region read replica this field +is ignored and you should instead declare `kms_key_id` with a valid ARN. The +default is `false` if not specified. +* `storage_type` - (Optional) One of "standard" (magnetic), "gp2" (general +purpose SSD), "gp3" (general purpose SSD that needs `iops` independently) +or "io1" (provisioned IOPS SSD). The default is "io1" if `iops` is specified, +"gp2" if not. +* `storage_throughput` - (Optional) The storage throughput value for the DB instance. Can only be set when `storage_type` is `"gp3"`. Cannot be specified if the `allocated_storage` value is below a per-`engine` threshold. See the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html#gp3-storage) for details. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `timezone` - (Optional) Time zone of the DB instance. `timezone` is currently +only supported by Microsoft SQL Server. The `timezone` can only be set on +creation. See [MSSQL User +Guide](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_SQLServer.html#SQLServer.Concepts.General.TimeZone) +for more information. +* `username` - (Required unless a `snapshot_identifier` or `replicate_source_db` +is provided) Username for the master DB user. Cannot be specified for a replica. +* `vpc_security_group_ids` - (Optional) List of VPC security groups to +associate. +* `customer_owned_ip_enabled` - (Optional) Indicates whether to enable a customer-owned IP address (CoIP) for an RDS on Outposts DB instance. See [CoIP for RDS on Outposts](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-on-outposts.html#rds-on-outposts.coip) for more information. + +~> **NOTE:** Removing the `replicate_source_db` attribute from an existing RDS +Replicate database managed by Terraform will promote the database to a fully +standalone database. + +### Restore To Point In Time + +-> **Note:** You can restore to any point in time before the source DB instance's `latest_restorable_time` or a point up to the number of days specified in the source DB instance's `backup_retention_period`. +For more information, please refer to the [Developer Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_PIT.html). +This setting does not apply to `aurora-mysql` or `aurora-postgresql` DB engines. For Aurora, refer to the [`aws_rds_cluster` resource documentation](/docs/providers/aws/r/rds_cluster.html#restore_in_time). + +The `restore_to_point_in_time` block supports the following arguments: + +* `restore_time` - (Optional) The date and time to restore from. Value must be a time in Universal Coordinated Time (UTC) format and must be before the latest restorable time for the DB instance. Cannot be specified with `use_latest_restorable_time`. +* `source_db_instance_identifier` - (Optional) The identifier of the source DB instance from which to restore. Must match the identifier of an existing DB instance. Required if `source_db_instance_automated_backups_arn` or `source_dbi_resource_id` is not specified. +* `source_db_instance_automated_backups_arn` - (Optional) The ARN of the automated backup from which to restore. Required if `source_db_instance_identifier` or `source_dbi_resource_id` is not specified. +* `source_dbi_resource_id` - (Optional) The resource ID of the source DB instance from which to restore. Required if `source_db_instance_identifier` or `source_db_instance_automated_backups_arn` is not specified. +* `use_latest_restorable_time` - (Optional) A boolean value that indicates whether the DB instance is restored from the latest backup time. Defaults to `false`. Cannot be specified with `restore_time`. + +### S3 Import Options + +Full details on the core parameters and impacts are in the API Docs: [RestoreDBInstanceFromS3](http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_RestoreDBInstanceFromS3.html). Sample + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance import DbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, instanceClass): + super().__init__(scope, name) + DbInstance(self, "db", + s3_import=DbInstanceS3Import( + bucket_name="mybucket", + bucket_prefix="backups", + ingestion_role="arn:aws:iam::1234567890:role/role-xtrabackup-rds-restore", + source_engine="mysql", + source_engine_version="5.6" + ), + instance_class=instance_class + ) +``` + +* `bucket_name` - (Required) The bucket name where your backup is stored +* `bucket_prefix` - (Optional) Can be blank, but is the path to your backup +* `ingestion_role` - (Required) Role applied to load the data. +* `source_engine` - (Required, as of Feb 2018 only 'mysql' supported) Source engine for the backup +* `source_engine_version` - (Required, as of Feb 2018 only '5.6' supported) Version of the source engine used to make the backup + +This will not recreate the resource if the S3 object changes in some way. It's only used to initialize the database. + +## blue_green_update + +* `enabled` - (Optional) Enables [low-downtime updates](#low-downtime-updates) when `true`. + Default is `false`. + +[instance-replication]: +https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.Replication.html +[instance-maintenance]: +https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_UpgradeDBInstance.Maintenance.html +[blue-green]: +https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/blue-green-deployments.html + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `address` - The hostname of the RDS instance. See also `endpoint` and `port`. +* `arn` - The ARN of the RDS instance. +* `allocated_storage` - The amount of allocated storage. +* `availability_zone` - The availability zone of the instance. +* `backup_retention_period` - The backup retention period. +* `backup_window` - The backup window. +* `ca_cert_identifier` - Identifier of the CA certificate for the +DB instance. +* `db_name` - The database name. +* `domain` - The ID of the Directory Service Active Directory domain the instance is joined to +* `domain_iam_role_name` - The name of the IAM role to be used when making API calls to the Directory Service. +* `endpoint` - The connection endpoint in `address:port` format. +* `engine` - The database engine. +* `engine_version_actual` - The running version of the database. +* `hosted_zone_id` - The canonical hosted zone ID of the DB instance (to be used +in a Route 53 Alias record). +* `id` - RDS DBI resource ID. +* `instance_class`- The RDS instance class. +* `latest_restorable_time` - The latest time, in UTC [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), to which a database can be restored with point-in-time restore. +* `listener_endpoint` - Specifies the listener connection endpoint for SQL Server Always On. See [endpoint](#endpoint) below. +* `maintenance_window` - The instance maintenance window. +* `master_user_secret` - A block that specifies the master user secret. Only available when `manage_master_user_password` is set to true. [Documented below](#master_user_secret). +* `multi_az` - If the RDS instance is multi AZ enabled. +* `port` - The database port. +* `resource_id` - The RDS Resource ID of this instance. +* `status` - The RDS instance status. +* `storage_encrypted` - Whether the DB instance is encrypted. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `username` - The master username for the database. + +On Oracle and Microsoft SQL instances the following is exported additionally: + +* `character_set_name` - The character set (collation) used on Oracle and Microsoft SQL instances. + +### Endpoint + +* `address` - Specifies the DNS address of the DB instance. +* `hosted_zone_id` - Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. +* `port` - Specifies the port that the database engine is listening on. + +### master_user_secret + +The `master_user_secret` configuration block supports the following attributes: + +* `kms_key_id` - The Amazon Web Services KMS key identifier that is used to encrypt the secret. +* `secret_arn` - The Amazon Resource Name (ARN) of the secret. +* `secret_status` - The status of the secret. Valid Values: `creating` | `active` | `rotating` | `impaired`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40m`) +- `update` - (Default `80m`) +- `delete` - (Default `60m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB Instances using the `identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DB Instances using the `identifier`. For example: + +```console +% terraform import aws_db_instance.default mydb-rds-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_instance_automated_backups_replication.markdown b/website/docs/cdktf/python/r/db_instance_automated_backups_replication.markdown new file mode 100644 index 00000000000..278b8571c00 --- /dev/null +++ b/website/docs/cdktf/python/r/db_instance_automated_backups_replication.markdown @@ -0,0 +1,153 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_instance_automated_backups_replication" +description: |- + Enables replication of automated backups to a different AWS Region. +--- + + + +# Resource: aws_db_instance_automated_backups_replication + +Manage cross-region replication of automated backups to a different AWS Region. Documentation for cross-region automated backup replication can be found at: + +* [Replicating automated backups to another AWS Region](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_ReplicateBackups.html) + +-> **Note:** This resource has to be created in the destination region. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance_automated_backups_replication import DbInstanceAutomatedBackupsReplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbInstanceAutomatedBackupsReplication(self, "default", + retention_period=14, + source_db_instance_arn="arn:aws:rds:us-west-2:123456789012:db:mydatabase" + ) +``` + +## Encrypting the automated backup with KMS + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance_automated_backups_replication import DbInstanceAutomatedBackupsReplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbInstanceAutomatedBackupsReplication(self, "default", + kms_key_id="arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012", + source_db_instance_arn="arn:aws:rds:us-west-2:123456789012:db:mydatabase" + ) +``` + +## Example including a RDS DB instance + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance import DbInstance +from imports.aws.db_instance_automated_backups_replication import DbInstanceAutomatedBackupsReplication +from imports.aws.kms_key import KmsKey +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + region="us-east-1" + ) + replica = AwsProvider(self, "aws_1", + alias="replica", + region="us-west-2" + ) + default_var = DbInstance(self, "default", + allocated_storage=10, + backup_retention_period=7, + db_name="mydb", + engine="postgres", + engine_version="13.4", + identifier="mydb", + instance_class="db.t3.micro", + password="mustbeeightcharacters", + skip_final_snapshot=True, + storage_encrypted=True, + username="masterusername" + ) + aws_kms_key_default = KmsKey(self, "default_3", + description="Encryption key for automated backups", + provider=replica + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_key_default.override_logical_id("default") + aws_db_instance_automated_backups_replication_default = + DbInstanceAutomatedBackupsReplication(self, "default_4", + kms_key_id=Token.as_string(aws_kms_key_default.arn), + provider=replica, + source_db_instance_arn=default_var.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_db_instance_automated_backups_replication_default.override_logical_id("default") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `kms_key_id` - (Optional, Forces new resource) The AWS KMS key identifier for encryption of the replicated automated backups. The KMS key ID is the Amazon Resource Name (ARN) for the KMS encryption key in the destination AWS Region, for example, `arn:aws:kms:us-east-1:123456789012:key/AKIAIOSFODNN7EXAMPLE`. +* `pre_signed_url` - (Optional, Forces new resource) A URL that contains a [Signature Version 4](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html) signed request for the [`StartDBInstanceAutomatedBackupsReplication`](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_StartDBInstanceAutomatedBackupsReplication.html) action to be called in the AWS Region of the source DB instance. +* `retention_period` - (Optional, Forces new resource) The retention period for the replicated automated backups, defaults to `7`. +* `source_db_instance_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the source DB instance for the replicated automated backups, for example, `arn:aws:rds:us-west-2:123456789012:db:mydatabase`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the replicated automated backups. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `75m`) +- `delete` - (Default `75m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS instance automated backups replication using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import RDS instance automated backups replication using the `arn`. For example: + +```console +% terraform import aws_db_instance_automated_backups_replication.default arn:aws:rds:us-east-1:123456789012:auto-backup:ab-faaa2mgdj1vmp4xflr7yhsrmtbtob7ltrzzz2my +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_instance_role_association.html.markdown b/website/docs/cdktf/python/r/db_instance_role_association.html.markdown new file mode 100644 index 00000000000..8148934b60e --- /dev/null +++ b/website/docs/cdktf/python/r/db_instance_role_association.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_instance_role_association" +description: |- + Manages an RDS DB Instance association with an IAM Role. +--- + + + +# Resource: aws_db_instance_role_association + +Manages an RDS DB Instance association with an IAM Role. Example use cases: + +* [Amazon RDS Oracle integration with Amazon S3](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/oracle-s3-integration.html) +* [Importing Amazon S3 Data into an RDS PostgreSQL DB Instance](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_PostgreSQL.S3Import.html) + +-> To manage the RDS DB Instance IAM Role for [Enhanced Monitoring](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Monitoring.OS.html), see the `aws_db_instance` resource `monitoring_role_arn` argument instead. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance_role_association import DbInstanceRoleAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbInstanceRoleAssociation(self, "example", + db_instance_identifier=Token.as_string(aws_db_instance_example.identifier), + feature_name="S3_INTEGRATION", + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `db_instance_identifier` - (Required) DB Instance Identifier to associate with the IAM Role. +* `feature_name` - (Required) Name of the feature for association. This can be found in the AWS documentation relevant to the integration or a full list is available in the `SupportedFeatureNames` list returned by [AWS CLI rds describe-db-engine-versions](https://docs.aws.amazon.com/cli/latest/reference/rds/describe-db-engine-versions.html). +* `role_arn` - (Required) Amazon Resource Name (ARN) of the IAM Role to associate with the DB Instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - DB Instance Identifier and IAM Role ARN separated by a comma (`,`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_db_instance_role_association` using the DB Instance Identifier and IAM Role ARN separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_db_instance_role_association` using the DB Instance Identifier and IAM Role ARN separated by a comma (`,`). For example: + +```console +% terraform import aws_db_instance_role_association.example my-db-instance,arn:aws:iam::123456789012:role/my-role +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_option_group.html.markdown b/website/docs/cdktf/python/r/db_option_group.html.markdown new file mode 100644 index 00000000000..931b298f56d --- /dev/null +++ b/website/docs/cdktf/python/r/db_option_group.html.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_option_group" +description: |- + Provides an RDS DB option group resource. +--- + + + +# Resource: aws_db_option_group + +Provides an RDS DB option group resource. Documentation of the available options for various RDS engines can be found at: + +* [MariaDB Options](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.MariaDB.Options.html) +* [Microsoft SQL Server Options](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.SQLServer.Options.html) +* [MySQL Options](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.MySQL.Options.html) +* [Oracle Options](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.Oracle.Options.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_option_group import DbOptionGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbOptionGroup(self, "example", + engine_name="sqlserver-ee", + major_engine_version="11.00", + name="option-group-test-terraform", + option=[DbOptionGroupOption( + option_name="Timezone", + option_settings=[DbOptionGroupOptionOptionSettings( + name="TIME_ZONE", + value="UTC" + ) + ] + ), DbOptionGroupOption( + option_name="SQLSERVER_BACKUP_RESTORE", + option_settings=[DbOptionGroupOptionOptionSettings( + name="IAM_ROLE_ARN", + value=Token.as_string(aws_iam_role_example.arn) + ) + ] + ), DbOptionGroupOption( + option_name="TDE" + ) + ], + option_group_description="Terraform Option Group" + ) +``` + +~> **Note:** Any modifications to the `aws_db_option_group` are set to happen immediately as we default to applying immediately. + +~> **WARNING:** You can perform a destroy on a `aws_db_option_group`, as long as it is not associated with any Amazon RDS resource. An option group can be associated with a DB instance, a manual DB snapshot, or an automated DB snapshot. + +If you try to delete an option group that is associated with an Amazon RDS resource, an error similar to the following is returned: + +> An error occurred (InvalidOptionGroupStateFault) when calling the DeleteOptionGroup operation: The option group 'optionGroupName' cannot be deleted because it is in use. + +More information about this can be found [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_WorkingWithOptionGroups.html#USER_WorkingWithOptionGroups.Delete). + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the option group. If omitted, Terraform will assign a random, unique name. Must be lowercase, to match as it is stored in AWS. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. Must be lowercase, to match as it is stored in AWS. +* `option_group_description` - (Optional) The description of the option group. Defaults to "Managed by Terraform". +* `engine_name` - (Required) Specifies the name of the engine that this option group should be associated with. +* `major_engine_version` - (Required) Specifies the major version of the engine that this option group should be associated with. +* `option` - (Optional) A list of Options to apply. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Option blocks support the following: + +* `option_name` - (Required) The Name of the Option (e.g., MEMCACHED). +* `option_settings` - (Optional) A list of option settings to apply. +* `port` - (Optional) The Port number when connecting to the Option (e.g., 11211). +* `version` - (Optional) The version of the option (e.g., 13.1.0.0). +* `db_security_group_memberships` - (Optional) A list of DB Security Groups for which the option is enabled. +* `vpc_security_group_memberships` - (Optional) A list of VPC Security Groups for which the option is enabled. + +Option Settings blocks support the following: + +* `name` - (Optional) The Name of the setting. +* `value` - (Optional) The Value of the setting. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The db option group name. +* `arn` - The ARN of the db option group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `15m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB Option groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DB Option groups using the `name`. For example: + +```console +% terraform import aws_db_option_group.example mysql-option-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_parameter_group.html.markdown b/website/docs/cdktf/python/r/db_parameter_group.html.markdown new file mode 100644 index 00000000000..503394a2c7f --- /dev/null +++ b/website/docs/cdktf/python/r/db_parameter_group.html.markdown @@ -0,0 +1,148 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_parameter_group" +description: |- + Provides an RDS DB parameter group resource. +--- + + + +# Resource: aws_db_parameter_group + +Provides an RDS DB parameter group resource. Documentation of the available parameters for various RDS engines can be found at: + +* [Aurora MySQL Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraMySQL.Reference.html) +* [Aurora PostgreSQL Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraPostgreSQL.Reference.html) +* [MariaDB Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.MariaDB.Parameters.html) +* [Oracle Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_ModifyInstance.Oracle.html#USER_ModifyInstance.Oracle.sqlnet) +* [PostgreSQL Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.PostgreSQL.CommonDBATasks.html#Appendix.PostgreSQL.CommonDBATasks.Parameters) + +> **Hands-on:** For an example of the `aws_db_parameter_group` in use, follow the [Manage AWS RDS Instances](https://learn.hashicorp.com/tutorials/terraform/aws-rds?in=terraform/aws&utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS) tutorial on HashiCorp Learn. + +~> **NOTE:** After applying your changes, you may encounter a perpetual diff in your Terraform plan +output for a `parameter` whose `value` remains unchanged but whose `apply_method` is changing +(e.g., from `immediate` to `pending-reboot`, or `pending-reboot` to `immediate`). If only the +apply method of a parameter is changing, the AWS API will not register this change. To change +the `apply_method` of a parameter, its value must also change. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_parameter_group import DbParameterGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbParameterGroup(self, "default", + family="mysql5.6", + name="rds-pg", + parameter=[DbParameterGroupParameter( + name="character_set_server", + value="utf8" + ), DbParameterGroupParameter( + name="character_set_client", + value="utf8" + ) + ] + ) +``` + +### `create_before_destroy` Lifecycle Configuration + +The [`create_before_destroy`](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#create_before_destroy) +lifecycle configuration is necessary for modifications that force re-creation of an existing, +in-use parameter group. This includes common situations like changing the group `name` or +bumping the `family` version during a major version upgrade. This configuration will prevent destruction +of the deposed parameter group while still in use by the database during upgrade. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance import DbInstance +from imports.aws.db_parameter_group import DbParameterGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, instanceClass): + super().__init__(scope, name) + example = DbParameterGroup(self, "example", + family="postgres13", + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + name="my-pg", + parameter=[DbParameterGroupParameter( + name="log_connections", + value="1" + ) + ] + ) + aws_db_instance_example = DbInstance(self, "example_1", + apply_immediately=True, + parameter_group_name=example.name, + instance_class=instance_class + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_db_instance_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the DB parameter group. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `family` - (Required, Forces new resource) The family of the DB parameter group. +* `description` - (Optional, Forces new resource) The description of the DB parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of DB parameters to apply. Note that parameters may differ from a family to an other. Full list of all parameters can be discovered via [`aws rds describe-db-parameters`](https://docs.aws.amazon.com/cli/latest/reference/rds/describe-db-parameters.html) after initial creation of the group. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +* `name` - (Required) The name of the DB parameter. +* `value` - (Required) The value of the DB parameter. +* `apply_method` - (Optional) "immediate" (default), or "pending-reboot". Some + engines can't apply some parameters without a reboot, and you will need to + specify "pending-reboot" here. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The db parameter group name. +* `arn` - The ARN of the db parameter group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB Parameter groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DB Parameter groups using the `name`. For example: + +```console +% terraform import aws_db_parameter_group.rds_pg rds-pg +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_proxy.html.markdown b/website/docs/cdktf/python/r/db_proxy.html.markdown new file mode 100644 index 00000000000..28c75ad2aa8 --- /dev/null +++ b/website/docs/cdktf/python/r/db_proxy.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_proxy" +description: |- + Provides an RDS DB proxy resource. +--- + + + +# Resource: aws_db_proxy + +Provides an RDS DB proxy resource. For additional information, see the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-proxy.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_proxy import DbProxy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbProxy(self, "example", + auth=[DbProxyAuth( + auth_scheme="SECRETS", + description="example", + iam_auth="DISABLED", + secret_arn=Token.as_string(aws_secretsmanager_secret_example.arn) + ) + ], + debug_logging=False, + engine_family="MYSQL", + idle_client_timeout=1800, + name="example", + require_tls=True, + role_arn=Token.as_string(aws_iam_role_example.arn), + tags={ + "Key": "value", + "Name": "example" + }, + vpc_security_group_ids=[Token.as_string(aws_security_group_example.id)], + vpc_subnet_ids=[Token.as_string(aws_subnet_example.id)] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The identifier for the proxy. This name must be unique for all proxies owned by your AWS account in the specified AWS Region. An identifier must begin with a letter and must contain only ASCII letters, digits, and hyphens; it can't end with a hyphen or contain two consecutive hyphens. +* `auth` - (Required) Configuration block(s) with authorization mechanisms to connect to the associated instances or clusters. Described below. +* `debug_logging` - (Optional) Whether the proxy includes detailed information about SQL statements in its logs. This information helps you to debug issues involving SQL behavior or the performance and scalability of the proxy connections. The debug information includes the text of SQL statements that you submit through the proxy. Thus, only enable this setting when needed for debugging, and only when you have security measures in place to safeguard any sensitive information that appears in the logs. +* `engine_family` - (Required, Forces new resource) The kinds of databases that the proxy can connect to. This value determines which database network protocol the proxy recognizes when it interprets network traffic to and from the database. For Aurora MySQL, RDS for MariaDB, and RDS for MySQL databases, specify `MYSQL`. For Aurora PostgreSQL and RDS for PostgreSQL databases, specify `POSTGRESQL`. For RDS for Microsoft SQL Server, specify `SQLSERVER`. Valid values are `MYSQL`, `POSTGRESQL`, and `SQLSERVER`. +* `idle_client_timeout` - (Optional) The number of seconds that a connection to the proxy can be inactive before the proxy disconnects it. You can set this value higher or lower than the connection timeout limit for the associated database. +* `require_tls` - (Optional) A Boolean parameter that specifies whether Transport Layer Security (TLS) encryption is required for connections to the proxy. By enabling this setting, you can enforce encrypted TLS connections to the proxy. +* `role_arn` - (Required) The Amazon Resource Name (ARN) of the IAM role that the proxy uses to access secrets in AWS Secrets Manager. +* `vpc_security_group_ids` - (Optional) One or more VPC security group IDs to associate with the new proxy. +* `vpc_subnet_ids` - (Required) One or more VPC subnet IDs to associate with the new proxy. +* `tags` - (Optional) A mapping of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +`auth` blocks support the following: + +* `auth_scheme` - (Optional) The type of authentication that the proxy uses for connections from the proxy to the underlying database. One of `SECRETS`. +* `client_password_auth_type` - (Optional) The type of authentication the proxy uses for connections from clients. Valid values are `MYSQL_NATIVE_PASSWORD`, `POSTGRES_SCRAM_SHA_256`, `POSTGRES_MD5`, and `SQL_SERVER_AUTHENTICATION`. +* `description` - (Optional) A user-specified description about the authentication used by a proxy to log in as a specific database user. +* `iam_auth` - (Optional) Whether to require or disallow AWS Identity and Access Management (IAM) authentication for connections to the proxy. One of `DISABLED`, `REQUIRED`. +* `secret_arn` - (Optional) The Amazon Resource Name (ARN) representing the secret that the proxy uses to authenticate to the RDS DB instance or Aurora DB cluster. These secrets are stored within Amazon Secrets Manager. +* `username` - (Optional) The name of the database user to which the proxy connects. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) for the proxy. +* `arn` - The Amazon Resource Name (ARN) for the proxy. +* `endpoint` - The endpoint that you can use to connect to the proxy. You include the endpoint value in the connection string for a database client application. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `update` - (Default `30m`) +- `delete` - (Default `60m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB proxies using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DB proxies using the `name`. For example: + +```console +% terraform import aws_db_proxy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_proxy_default_target_group.html.markdown b/website/docs/cdktf/python/r/db_proxy_default_target_group.html.markdown new file mode 100644 index 00000000000..4d242a5f7ef --- /dev/null +++ b/website/docs/cdktf/python/r/db_proxy_default_target_group.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_proxy_default_target_group" +description: |- + Manage an RDS DB proxy default target group resource. +--- + + + +# Resource: aws_db_proxy_default_target_group + +Provides a resource to manage an RDS DB proxy default target group resource. + +The `aws_db_proxy_default_target_group` behaves differently from normal resources, in that Terraform does not _create_ or _destroy_ this resource, since it implicitly exists as part of an RDS DB Proxy. On Terraform resource creation it is automatically imported and on resource destruction, Terraform performs no actions in RDS. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_proxy import DbProxy +from imports.aws.db_proxy_default_target_group import DbProxyDefaultTargetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DbProxy(self, "example", + auth=[DbProxyAuth( + auth_scheme="SECRETS", + description="example", + iam_auth="DISABLED", + secret_arn=Token.as_string(aws_secretsmanager_secret_example.arn) + ) + ], + debug_logging=False, + engine_family="MYSQL", + idle_client_timeout=1800, + name="example", + require_tls=True, + role_arn=Token.as_string(aws_iam_role_example.arn), + tags={ + "Key": "value", + "Name": "example" + }, + vpc_security_group_ids=[Token.as_string(aws_security_group_example.id)], + vpc_subnet_ids=[Token.as_string(aws_subnet_example.id)] + ) + aws_db_proxy_default_target_group_example = DbProxyDefaultTargetGroup(self, "example_1", + connection_pool_config=DbProxyDefaultTargetGroupConnectionPoolConfig( + connection_borrow_timeout=120, + init_query="SET x=1, y=2", + max_connections_percent=100, + max_idle_connections_percent=50, + session_pinning_filters=["EXCLUDE_VARIABLE_SETS"] + ), + db_proxy_name=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_db_proxy_default_target_group_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `db_proxy_name` - (Required) Name of the RDS DB Proxy. +* `connection_pool_config` - (Optional) The settings that determine the size and behavior of the connection pool for the target group. + +`connection_pool_config` blocks support the following: + +* `connection_borrow_timeout` - (Optional) The number of seconds for a proxy to wait for a connection to become available in the connection pool. Only applies when the proxy has opened its maximum number of connections and all connections are busy with client sessions. +* `init_query` - (Optional) One or more SQL statements for the proxy to run when opening each new database connection. Typically used with `SET` statements to make sure that each connection has identical settings such as time zone and character set. This setting is empty by default. For multiple statements, use semicolons as the separator. You can also include multiple variables in a single `SET` statement, such as `SET x=1, y=2`. +* `max_connections_percent` - (Optional) The maximum size of the connection pool for each target in a target group. For Aurora MySQL, it is expressed as a percentage of the max_connections setting for the RDS DB instance or Aurora DB cluster used by the target group. +* `max_idle_connections_percent` - (Optional) Controls how actively the proxy closes idle database connections in the connection pool. A high value enables the proxy to leave a high percentage of idle connections open. A low value causes the proxy to close idle client connections and return the underlying database connections to the connection pool. For Aurora MySQL, it is expressed as a percentage of the max_connections setting for the RDS DB instance or Aurora DB cluster used by the target group. +* `session_pinning_filters` - (Optional) Each item in the list represents a class of SQL operations that normally cause all later statements in a session using a proxy to be pinned to the same underlying database connection. Including an item in the list exempts that class of SQL operations from the pinning behavior. Currently, the only allowed value is `EXCLUDE_VARIABLE_SETS`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the RDS DB Proxy. +* `arn` - The Amazon Resource Name (ARN) representing the target group. +* `name` - The name of the default target group. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `update` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB proxy default target groups using the `db_proxy_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DB proxy default target groups using the `db_proxy_name`. For example: + +```console +% terraform import aws_db_proxy_default_target_group.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_proxy_endpoint.html.markdown b/website/docs/cdktf/python/r/db_proxy_endpoint.html.markdown new file mode 100644 index 00000000000..51ad588893f --- /dev/null +++ b/website/docs/cdktf/python/r/db_proxy_endpoint.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_proxy_endpoint" +description: |- + Provides an RDS DB proxy endpoint resource. +--- + + + +# Resource: aws_db_proxy_endpoint + +Provides an RDS DB proxy endpoint resource. For additional information, see the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-proxy-endpoints.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_proxy_endpoint import DbProxyEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbProxyEndpoint(self, "example", + db_proxy_endpoint_name="example", + db_proxy_name=test.name, + target_role="READ_ONLY", + vpc_subnet_ids=Token.as_list(property_access(aws_subnet_test, ["*", "id"])) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `db_proxy_endpoint_name` - (Required) The identifier for the proxy endpoint. An identifier must begin with a letter and must contain only ASCII letters, digits, and hyphens; it can't end with a hyphen or contain two consecutive hyphens. +* `db_proxy_name` - (Required) The name of the DB proxy associated with the DB proxy endpoint that you create. +* `vpc_subnet_ids` - (Required) One or more VPC subnet IDs to associate with the new proxy. +* `vpc_security_group_ids` - (Optional) One or more VPC security group IDs to associate with the new proxy. +* `target_role` - (Optional) Indicates whether the DB proxy endpoint can be used for read/write or read-only operations. The default is `READ_WRITE`. Valid values are `READ_WRITE` and `READ_ONLY`. +* `tags` - (Optional) A mapping of tags to assign to the resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the proxy and proxy endpoint separated by `/`, `DB-PROXY-NAME/DB-PROXY-ENDPOINT-NAME`. +* `arn` - The Amazon Resource Name (ARN) for the proxy endpoint. +* `endpoint` - The endpoint that you can use to connect to the proxy. You include the endpoint value in the connection string for a database client application. +* `is_default` - Indicates whether this endpoint is the default endpoint for the associated DB proxy. +* `vpc_id` - The VPC ID of the DB proxy endpoint. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `update` - (Default `30m`) +- `delete` - (Default `60m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB proxy endpoints using the `DB-PROXY-NAME/DB-PROXY-ENDPOINT-NAME`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DB proxy endpoints using the `DB-PROXY-NAME/DB-PROXY-ENDPOINT-NAME`. For example: + +```console +% terraform import aws_db_proxy_endpoint.example example/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_proxy_target.html.markdown b/website/docs/cdktf/python/r/db_proxy_target.html.markdown new file mode 100644 index 00000000000..be33bac481f --- /dev/null +++ b/website/docs/cdktf/python/r/db_proxy_target.html.markdown @@ -0,0 +1,136 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_proxy_target" +description: |- + Provides an RDS DB proxy target resource. +--- + + + +# Resource: aws_db_proxy_target + +Provides an RDS DB proxy target resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_proxy import DbProxy +from imports.aws.db_proxy_default_target_group import DbProxyDefaultTargetGroup +from imports.aws.db_proxy_target import DbProxyTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DbProxy(self, "example", + auth=[DbProxyAuth( + auth_scheme="SECRETS", + description="example", + iam_auth="DISABLED", + secret_arn=Token.as_string(aws_secretsmanager_secret_example.arn) + ) + ], + debug_logging=False, + engine_family="MYSQL", + idle_client_timeout=1800, + name="example", + require_tls=True, + role_arn=Token.as_string(aws_iam_role_example.arn), + tags={ + "Key": "value", + "Name": "example" + }, + vpc_security_group_ids=[Token.as_string(aws_security_group_example.id)], + vpc_subnet_ids=[Token.as_string(aws_subnet_example.id)] + ) + aws_db_proxy_default_target_group_example = DbProxyDefaultTargetGroup(self, "example_1", + connection_pool_config=DbProxyDefaultTargetGroupConnectionPoolConfig( + connection_borrow_timeout=120, + init_query="SET x=1, y=2", + max_connections_percent=100, + max_idle_connections_percent=50, + session_pinning_filters=["EXCLUDE_VARIABLE_SETS"] + ), + db_proxy_name=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_db_proxy_default_target_group_example.override_logical_id("example") + aws_db_proxy_target_example = DbProxyTarget(self, "example_2", + db_instance_identifier=Token.as_string(aws_db_instance_example.identifier), + db_proxy_name=example.name, + target_group_name=Token.as_string(aws_db_proxy_default_target_group_example.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_db_proxy_target_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `db_proxy_name` - (Required, Forces new resource) The name of the DB proxy. +* `target_group_name` - (Required, Forces new resource) The name of the target group. +* `db_instance_identifier` - (Optional, Forces new resource) DB instance identifier. +* `db_cluster_identifier` - (Optional, Forces new resource) DB cluster identifier. + +**NOTE:** Either `db_instance_identifier` or `db_cluster_identifier` should be specified and both should not be specified together + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `endpoint` - Hostname for the target RDS DB Instance. Only returned for `RDS_INSTANCE` type. +* `id` - Identifier of `db_proxy_name`, `target_group_name`, target type (e.g., `RDS_INSTANCE` or `TRACKED_CLUSTER`), and resource identifier separated by forward slashes (`/`). +* `port` - Port for the target RDS DB Instance or Aurora DB Cluster. +* `rds_resource_id` - Identifier representing the DB Instance or DB Cluster target. +* `target_arn` - Amazon Resource Name (ARN) for the DB instance or DB cluster. Currently not returned by the RDS API. +* `tracked_cluster_id` - DB Cluster identifier for the DB Instance target. Not returned unless manually importing an `RDS_INSTANCE` target that is part of a DB Cluster. +* `type` - Type of targetE.g., `RDS_INSTANCE` or `TRACKED_CLUSTER` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS DB Proxy Targets using the `db_proxy_name`, `target_group_name`, target type (such as `RDS_INSTANCE` or `TRACKED_CLUSTER`), and resource identifier separated by forward slashes (`/`). For example: + +Instances: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Provisioned Clusters: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** RDS DB Proxy Targets using the `db_proxy_name`, `target_group_name`, target type (such as `RDS_INSTANCE` or `TRACKED_CLUSTER`), and resource identifier separated by forward slashes (`/`). For example: + +Instances: + +```console +% terraform import aws_db_proxy_target.example example-proxy/default/RDS_INSTANCE/example-instance +``` + +Provisioned Clusters: + +```console +% terraform import aws_db_proxy_target.example example-proxy/default/TRACKED_CLUSTER/example-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_snapshot.html.markdown b/website/docs/cdktf/python/r/db_snapshot.html.markdown new file mode 100644 index 00000000000..c8adaf54319 --- /dev/null +++ b/website/docs/cdktf/python/r/db_snapshot.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_snapshot" +description: |- + Manages an RDS database instance snapshot. +--- + + + +# Resource: aws_db_snapshot + +Manages an RDS database instance snapshot. For managing RDS database cluster snapshots, see the [`aws_db_cluster_snapshot` resource](/docs/providers/aws/r/db_cluster_snapshot.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance import DbInstance +from imports.aws.db_snapshot import DbSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bar = DbInstance(self, "bar", + allocated_storage=10, + backup_retention_period=0, + db_name="baz", + engine="mysql", + engine_version="5.6.21", + instance_class="db.t2.micro", + maintenance_window="Fri:09:00-Fri:09:30", + parameter_group_name="default.mysql5.6", + password="barbarbarbar", + username="foo" + ) + DbSnapshot(self, "test", + db_instance_identifier=bar.identifier, + db_snapshot_identifier="testsnapshot1234" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `db_instance_identifier` - (Required) The DB Instance Identifier from which to take the snapshot. +* `db_snapshot_identifier` - (Required) The Identifier for the snapshot. +* `shared_accounts` - (Optional) List of AWS Account ids to share snapshot with, use `all` to make snaphot public. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `allocated_storage` - Specifies the allocated storage size in gigabytes (GB). +* `availability_zone` - Specifies the name of the Availability Zone the DB instance was located in at the time of the DB snapshot. +* `db_snapshot_arn` - The Amazon Resource Name (ARN) for the DB snapshot. +* `encrypted` - Specifies whether the DB snapshot is encrypted. +* `engine` - Specifies the name of the database engine. +* `engine_version` - Specifies the version of the database engine. +* `iops` - Specifies the Provisioned IOPS (I/O operations per second) value of the DB instance at the time of the snapshot. +* `kms_key_id` - The ARN for the KMS encryption key. +* `license_model` - License model information for the restored DB instance. +* `option_group_name` - Provides the option group name for the DB snapshot. +* `source_db_snapshot_identifier` - The DB snapshot Arn that the DB snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `source_region` - The region that the DB snapshot was created in or copied from. +* `status` - Specifies the status of this DB snapshot. +* `storage_type` - Specifies the storage type associated with DB snapshot. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - Provides the VPC ID associated with the DB snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_db_snapshot` using the snapshot identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_db_snapshot` using the snapshot identifier. For example: + +```console +% terraform import aws_db_snapshot.example my-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_snapshot_copy.html.markdown b/website/docs/cdktf/python/r/db_snapshot_copy.html.markdown new file mode 100644 index 00000000000..df5a7c4df93 --- /dev/null +++ b/website/docs/cdktf/python/r/db_snapshot_copy.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_snapshot_copy" +description: |- + Manages an RDS database instance snapshot copy. +--- + + + +# Resource: aws_db_snapshot_copy + +Manages an RDS database instance snapshot copy. For managing RDS database cluster snapshots, see the [`aws_db_cluster_snapshot` resource](/docs/providers/aws/r/db_cluster_snapshot.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance import DbInstance +from imports.aws.db_snapshot import DbSnapshot +from imports.aws.db_snapshot_copy import DbSnapshotCopy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DbInstance(self, "example", + allocated_storage=10, + backup_retention_period=0, + db_name="baz", + engine="mysql", + engine_version="5.6.21", + instance_class="db.t2.micro", + maintenance_window="Fri:09:00-Fri:09:30", + parameter_group_name="default.mysql5.6", + password="barbarbarbar", + username="foo" + ) + aws_db_snapshot_example = DbSnapshot(self, "example_1", + db_instance_identifier=example.identifier, + db_snapshot_identifier="testsnapshot1234" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_db_snapshot_example.override_logical_id("example") + aws_db_snapshot_copy_example = DbSnapshotCopy(self, "example_2", + source_db_snapshot_identifier=Token.as_string(aws_db_snapshot_example.db_snapshot_arn), + target_db_snapshot_identifier="testsnapshot1234-copy" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_db_snapshot_copy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `copy_tags` - (Optional) Whether to copy existing tags. Defaults to `false`. +* `destination_region` - (Optional) The Destination region to place snapshot copy. +* `kms_key_id` - (Optional) KMS key ID. +* `option_group_name`- (Optional) The name of an option group to associate with the copy of the snapshot. +* `presigned_url` - (Optional) he URL that contains a Signature Version 4 signed request. +* `source_db_snapshot_identifier` - (Required) Snapshot identifier of the source snapshot. +* `target_custom_availability_zone` - (Optional) The external custom Availability Zone. +* `target_db_snapshot_identifier` - (Required) The Identifier for the snapshot. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Snapshot Identifier. +* `allocated_storage` - Specifies the allocated storage size in gigabytes (GB). +* `availability_zone` - Specifies the name of the Availability Zone the DB instance was located in at the time of the DB snapshot. +* `db_snapshot_arn` - The Amazon Resource Name (ARN) for the DB snapshot. +* `encrypted` - Specifies whether the DB snapshot is encrypted. +* `engine` - Specifies the name of the database engine. +* `engine_version` - Specifies the version of the database engine. +* `iops` - Specifies the Provisioned IOPS (I/O operations per second) value of the DB instance at the time of the snapshot. +* `kms_key_id` - The ARN for the KMS encryption key. +* `license_model` - License model information for the restored DB instance. +* `option_group_name` - Provides the option group name for the DB snapshot. +* `source_db_snapshot_identifier` - The DB snapshot Arn that the DB snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `source_region` - The region that the DB snapshot was created in or copied from. +* `storage_type` - Specifies the storage type associated with DB snapshot. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - Provides the VPC ID associated with the DB snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_db_snapshot_copy` using the snapshot identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_db_snapshot_copy` using the snapshot identifier. For example: + +```console +% terraform import aws_db_snapshot_copy.example my-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/db_subnet_group.html.markdown b/website/docs/cdktf/python/r/db_subnet_group.html.markdown new file mode 100644 index 00000000000..16623d130a0 --- /dev/null +++ b/website/docs/cdktf/python/r/db_subnet_group.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_subnet_group" +description: |- + Provides an RDS DB subnet group resource. +--- + + + +# Resource: aws_db_subnet_group + +Provides an RDS DB subnet group resource. + +> **Hands-on:** For an example of the `aws_db_subnet_group` in use, follow the [Manage AWS RDS Instances](https://learn.hashicorp.com/tutorials/terraform/aws-rds?in=terraform/aws&utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS) tutorial on HashiCorp Learn. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_subnet_group import DbSubnetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbSubnetGroup(self, "default", + name="main", + subnet_ids=[frontend.id, backend.id], + tags={ + "Name": "My DB subnet group" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the DB subnet group. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) The description of the DB subnet group. Defaults to "Managed by Terraform". +* `subnet_ids` - (Required) A list of VPC subnet IDs. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The db subnet group name. +* `arn` - The ARN of the db subnet group. +* `supported_network_types` - The network type of the db subnet group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - Provides the VPC ID of the DB subnet group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB Subnet groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DB Subnet groups using the `name`. For example: + +```console +% terraform import aws_db_subnet_group.default production-subnet-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/default_network_acl.html.markdown b/website/docs/cdktf/python/r/default_network_acl.html.markdown new file mode 100644 index 00000000000..11e3227ef8e --- /dev/null +++ b/website/docs/cdktf/python/r/default_network_acl.html.markdown @@ -0,0 +1,227 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_network_acl" +description: |- + Manage a default network ACL. +--- + + + +# Resource: aws_default_network_acl + +Provides a resource to manage a VPC's default network ACL. This resource can manage the default network ACL of the default or a non-default VPC. + +~> **NOTE:** This is an advanced resource with special caveats. Please read this document in its entirety before using this resource. The `aws_default_network_acl` behaves differently from normal resources. Terraform does not _create_ this resource but instead attempts to "adopt" it into management. + +Every VPC has a default network ACL that can be managed but not destroyed. When Terraform first adopts the Default Network ACL, it **immediately removes all rules in the ACL**. It then proceeds to create any rules specified in the configuration. This step is required so that only the rules specified in the configuration are created. + +This resource treats its inline rules as absolute; only the rules defined inline are created, and any additions/removals external to this resource will result in diffs being shown. For these reasons, this resource is incompatible with the `aws_network_acl_rule` resource. + +For more information about Network ACLs, see the AWS Documentation on [Network ACLs][aws-network-acls]. + +## Example Usage + +### Basic Example + +The following config gives the Default Network ACL the same rules that AWS includes but pulls the resource under management by Terraform. This means that any ACL rules added or changed will be detected as drift. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Op, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.default_network_acl import DefaultNetworkAcl +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + mainvpc = Vpc(self, "mainvpc", + cidr_block="10.1.0.0/16" + ) + DefaultNetworkAcl(self, "default", + default_network_acl_id=mainvpc.default_network_acl_id, + egress=[DefaultNetworkAclEgress( + action="allow", + cidr_block="0.0.0.0/0", + from_port=0, + protocol=Token.as_string(Op.negate(1)), + rule_no=100, + to_port=0 + ) + ], + ingress=[DefaultNetworkAclIngress( + action="allow", + cidr_block="0.0.0.0/0", + from_port=0, + protocol=Token.as_string(Op.negate(1)), + rule_no=100, + to_port=0 + ) + ] + ) +``` + +### Example: Deny All Egress Traffic, Allow Ingress + +The following denies all Egress traffic by omitting any `egress` rules, while including the default `ingress` rule to allow all traffic. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Op, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.default_network_acl import DefaultNetworkAcl +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + mainvpc = Vpc(self, "mainvpc", + cidr_block="10.1.0.0/16" + ) + DefaultNetworkAcl(self, "default", + default_network_acl_id=mainvpc.default_network_acl_id, + ingress=[DefaultNetworkAclIngress( + action="allow", + cidr_block=Token.as_string(aws_default_vpc_mainvpc.cidr_block), + from_port=0, + protocol=Token.as_string(Op.negate(1)), + rule_no=100, + to_port=0 + ) + ] + ) +``` + +### Example: Deny All Traffic To Any Subnet In The Default Network ACL + +This config denies all traffic in the Default ACL. This can be useful if you want to lock down the VPC to force all resources to assign a non-default ACL. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.default_network_acl import DefaultNetworkAcl +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + mainvpc = Vpc(self, "mainvpc", + cidr_block="10.1.0.0/16" + ) + DefaultNetworkAcl(self, "default", + default_network_acl_id=mainvpc.default_network_acl_id + ) +``` + +### Managing Subnets In A Default Network ACL + +Within a VPC, all Subnets must be associated with a Network ACL. In order to "delete" the association between a Subnet and a non-default Network ACL, the association is destroyed by replacing it with an association between the Subnet and the Default ACL instead. + +When managing the Default Network ACL, you cannot "remove" Subnets. Instead, they must be reassigned to another Network ACL, or the Subnet itself must be destroyed. Because of these requirements, removing the `subnet_ids` attribute from the configuration of a `aws_default_network_acl` resource may result in a reoccurring plan, until the Subnets are reassigned to another Network ACL or are destroyed. + +Because Subnets are by default associated with the Default Network ACL, any non-explicit association will show up as a plan to remove the Subnet. For example: if you have a custom `aws_network_acl` with two subnets attached, and you remove the `aws_network_acl` resource, after successfully destroying this resource future plans will show a diff on the managed `aws_default_network_acl`, as those two Subnets have been orphaned by the now destroyed network acl and thus adopted by the Default Network ACL. In order to avoid a reoccurring plan, they will need to be reassigned, destroyed, or added to the `subnet_ids` attribute of the `aws_default_network_acl` entry. + +As an alternative to the above, you can also specify the following lifecycle configuration in your `aws_default_network_acl` resource: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.default_network_acl import DefaultNetworkAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, defaultNetworkAclId): + super().__init__(scope, name) + DefaultNetworkAcl(self, "default", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[subnet_ids] + ), + default_network_acl_id=default_network_acl_id + ) +``` + +### Removing `aws_default_network_acl` From Your Configuration + +Each AWS VPC comes with a Default Network ACL that cannot be deleted. The `aws_default_network_acl` allows you to manage this Network ACL, but Terraform cannot destroy it. Removing this resource from your configuration will remove it from your statefile and management, **but will not destroy the Network ACL.** All Subnets associations and ingress or egress rules will be left as they are at the time of removal. You can resume managing them via the AWS Console. + +## Argument Reference + +The following arguments are required: + +* `default_network_acl_id` - (Required) Network ACL ID to manage. This attribute is exported from `aws_vpc`, or manually found via the AWS Console. + +The following arguments are optional: + +* `egress` - (Optional) Configuration block for an egress rule. Detailed below. +* `ingress` - (Optional) Configuration block for an ingress rule. Detailed below. +* `subnet_ids` - (Optional) List of Subnet IDs to apply the ACL to. See the notes above on Managing Subnets in the Default Network ACL +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### egress and ingress + +Both the `egress` and `ingress` configuration blocks have the same arguments. + +The following arguments are required: + +* `action` - (Required) The action to take. +* `from_port` - (Required) The from port to match. +* `protocol` - (Required) The protocol to match. If using the -1 'all' protocol, you must specify a from and to port of 0. +* `rule_no` - (Required) The rule number. Used for ordering. +* `to_port` - (Required) The to port to match. + +The following arguments are optional: + +* `cidr_block` - (Optional) The CIDR block to match. This must be a valid network mask. +* `icmp_code` - (Optional) The ICMP type code to be used. Default 0. +* `icmp_type` - (Optional) The ICMP type to be used. Default 0. +* `ipv6_cidr_block` - (Optional) The IPv6 CIDR block. + +-> For more information on ICMP types and codes, see [Internet Control Message Protocol (ICMP) Parameters](https://www.iana.org/assignments/icmp-parameters/icmp-parameters.xhtml). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Default Network ACL +* `id` - ID of the Default Network ACL +* `owner_id` - ID of the AWS account that owns the Default Network ACL +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - ID of the associated VPC + +[aws-network-acls]: http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_ACLs.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Default Network ACLs using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Default Network ACLs using the `id`. For example: + +```console +% terraform import aws_default_network_acl.sample acl-7aaabd18 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/default_route_table.html.markdown b/website/docs/cdktf/python/r/default_route_table.html.markdown new file mode 100644 index 00000000000..890eadbe988 --- /dev/null +++ b/website/docs/cdktf/python/r/default_route_table.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_route_table" +description: |- + Provides a resource to manage a default route table of a VPC. +--- + + + +# Resource: aws_default_route_table + +Provides a resource to manage a default route table of a VPC. This resource can manage the default route table of the default or a non-default VPC. + +~> **NOTE:** This is an advanced resource with special caveats. Please read this document in its entirety before using this resource. The `aws_default_route_table` resource behaves differently from normal resources. Terraform does not _create_ this resource but instead attempts to "adopt" it into management. **Do not** use both `aws_default_route_table` to manage a default route table **and** `aws_main_route_table_association` with the same VPC due to possible route conflicts. See [aws_main_route_table_association][tf-main-route-table-association] documentation for more details. + +Every VPC has a default route table that can be managed but not destroyed. When Terraform first adopts a default route table, it **immediately removes all defined routes**. It then proceeds to create any routes specified in the configuration. This step is required so that only the routes specified in the configuration exist in the default route table. + +For more information, see the Amazon VPC User Guide on [Route Tables](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Route_Tables.html). For information about managing normal route tables in Terraform, see [`aws_route_table`](/docs/providers/aws/r/route_table.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.default_route_table import DefaultRouteTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DefaultRouteTable(self, "example", + default_route_table_id=Token.as_string(aws_vpc_example.default_route_table_id), + route=[DefaultRouteTableRoute( + cidr_block="10.0.1.0/24", + gateway_id=Token.as_string(aws_internet_gateway_example.id) + ), DefaultRouteTableRoute( + egress_only_gateway_id=Token.as_string(aws_egress_only_internet_gateway_example.id), + ipv6_cidr_block="::/0" + ) + ], + tags={ + "Name": "example" + } + ) +``` + +To subsequently remove all managed routes: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.default_route_table import DefaultRouteTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DefaultRouteTable(self, "example", + default_route_table_id=Token.as_string(aws_vpc_example.default_route_table_id), + route=[], + tags={ + "Name": "example" + } + ) +``` + +## Argument Reference + +The following arguments are required: + +* `default_route_table_id` - (Required) ID of the default route table. + +The following arguments are optional: + +* `propagating_vgws` - (Optional) List of virtual gateways for propagation. +* `route` - (Optional) Configuration block of routes. Detailed below. This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). This means that omitting this argument is interpreted as ignoring any existing routes. To remove all managed routes an empty list should be specified. See the example above. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### route + +This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +One of the following destination arguments must be supplied: + +* `cidr_block` - (Required) The CIDR block of the route. +* `ipv6_cidr_block` - (Optional) The Ipv6 CIDR block of the route +* `destination_prefix_list_id` - (Optional) The ID of a [managed prefix list](ec2_managed_prefix_list.html) destination of the route. + +One of the following target arguments must be supplied: + +* `core_network_arn` - (Optional) The Amazon Resource Name (ARN) of a core network. +* `egress_only_gateway_id` - (Optional) Identifier of a VPC Egress Only Internet Gateway. +* `gateway_id` - (Optional) Identifier of a VPC internet gateway or a virtual private gateway. +* `instance_id` - (Optional) Identifier of an EC2 instance. +* `nat_gateway_id` - (Optional) Identifier of a VPC NAT gateway. +* `network_interface_id` - (Optional) Identifier of an EC2 network interface. +* `transit_gateway_id` - (Optional) Identifier of an EC2 Transit Gateway. +* `vpc_endpoint_id` - (Optional) Identifier of a VPC Endpoint. This route must be removed prior to VPC Endpoint deletion. +* `vpc_peering_connection_id` - (Optional) Identifier of a VPC peering connection. + +Note that the default route, mapping the VPC's CIDR block to "local", is created implicitly and cannot be specified. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the route table. +* `arn` - The ARN of the route table. +* `owner_id` - ID of the AWS account that owns the route table. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - ID of the VPC. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `2m`) +- `update` - (Default `2m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Default VPC route tables using the `vpc_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Default VPC route tables using the `vpc_id`. For example: + +```console +% terraform import aws_default_route_table.example vpc-33cc44dd +``` + +[aws-route-tables]: http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_Route_Tables.html#Route_Replacing_Main_Table +[tf-route-tables]: /docs/providers/aws/r/route_table.html +[tf-main-route-table-association]: /docs/providers/aws/r/main_route_table_association.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/default_security_group.html.markdown b/website/docs/cdktf/python/r/default_security_group.html.markdown new file mode 100644 index 00000000000..00963822504 --- /dev/null +++ b/website/docs/cdktf/python/r/default_security_group.html.markdown @@ -0,0 +1,155 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_security_group" +description: |- + Manage a default security group resource. +--- + + + +# Resource: aws_default_security_group + +Provides a resource to manage a default security group. This resource can manage the default security group of the default or a non-default VPC. + +~> **NOTE:** This is an advanced resource with special caveats. Please read this document in its entirety before using this resource. The `aws_default_security_group` resource behaves differently from normal resources. Terraform does not _create_ this resource but instead attempts to "adopt" it into management. + +When Terraform first begins managing the default security group, it **immediately removes all ingress and egress rules in the Security Group**. It then creates any rules specified in the configuration. This way only the rules specified in the configuration are created. + +This resource treats its inline rules as absolute; only the rules defined inline are created, and any additions/removals external to this resource will result in diff shown. For these reasons, this resource is incompatible with the `aws_security_group_rule` resource. + +For more information about default security groups, see the AWS documentation on [Default Security Groups][aws-default-security-groups]. To manage normal security groups, see the [`aws_security_group`](/docs/providers/aws/r/security_group.html) resource. + +## Example Usage + +The following config gives the default security group the same rules that AWS provides by default but under management by Terraform. This means that any ingress or egress rules added or changed will be detected as drift. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Op, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.default_security_group import DefaultSecurityGroup +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + mainvpc = Vpc(self, "mainvpc", + cidr_block="10.1.0.0/16" + ) + DefaultSecurityGroup(self, "default", + egress=[DefaultSecurityGroupEgress( + cidr_blocks=["0.0.0.0/0"], + from_port=0, + protocol="-1", + to_port=0 + ) + ], + ingress=[DefaultSecurityGroupIngress( + from_port=0, + protocol=Token.as_string(Op.negate(1)), + self_attribute=True, + to_port=0 + ) + ], + vpc_id=mainvpc.id + ) +``` + +### Example Config To Deny All Egress Traffic, Allowing Ingress + +The following denies all Egress traffic by omitting any `egress` rules, while including the default `ingress` rule to allow all traffic. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Op, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.default_security_group import DefaultSecurityGroup +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + mainvpc = Vpc(self, "mainvpc", + cidr_block="10.1.0.0/16" + ) + DefaultSecurityGroup(self, "default", + ingress=[DefaultSecurityGroupIngress( + from_port=0, + protocol=Token.as_string(Op.negate(1)), + self_attribute=True, + to_port=0 + ) + ], + vpc_id=mainvpc.id + ) +``` + +### Removing `aws_default_security_group` From Your Configuration + +Removing this resource from your configuration will remove it from your statefile and management, but will not destroy the Security Group. All ingress or egress rules will be left as they are at the time of removal. You can resume managing them via the AWS Console. + +## Argument Reference + +The following arguments are optional: + +* `egress` - (Optional, VPC only) Configuration block. Detailed below. +* `ingress` - (Optional) Configuration block. Detailed below. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_id` - (Optional, Forces new resource) VPC ID. **Note that changing the `vpc_id` will _not_ restore any default security group rules that were modified, added, or removed.** It will be left in its current state. + +### egress and ingress + +Both arguments are processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +Both `egress` and `ingress` objects have the same arguments. + +* `cidr_blocks` - (Optional) List of CIDR blocks. +* `description` - (Optional) Description of this rule. +* `from_port` - (Required) Start port (or ICMP type number if protocol is `icmp`) +* `ipv6_cidr_blocks` - (Optional) List of IPv6 CIDR blocks. +* `prefix_list_ids` - (Optional) List of prefix list IDs (for allowing access to VPC endpoints) +* `protocol` - (Required) Protocol. If you select a protocol of "-1" (semantically equivalent to `all`, which is not a valid value here), you must specify a `from_port` and `to_port` equal to `0`. If not `icmp`, `tcp`, `udp`, or `-1` use the [protocol number](https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml). +* `security_groups` - (Optional) List of security groups. A group name can be used relative to the default VPC. Otherwise, group ID. +* `self` - (Optional) Whether the security group itself will be added as a source to this egress rule. +* `to_port` - (Required) End range port (or ICMP code if protocol is `icmp`). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the security group. +* `description` - Description of the security group. +* `id` - ID of the security group. +* `name` - Name of the security group. +* `owner_id` - Owner ID. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[aws-default-security-groups]: http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-network-security.html#default-security-group + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Groups using the security group `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Security Groups using the security group `id`. For example: + +```console +% terraform import aws_default_security_group.default_sg sg-903004f8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/default_subnet.html.markdown b/website/docs/cdktf/python/r/default_subnet.html.markdown new file mode 100644 index 00000000000..f9d82dae7b8 --- /dev/null +++ b/website/docs/cdktf/python/r/default_subnet.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_subnet" +description: |- + Manage a default subnet resource. +--- + + + +# Resource: aws_default_subnet + +Provides a resource to manage a [default subnet](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/default-vpc.html#default-vpc-basics) in the current region. + +**This is an advanced resource** and has special caveats to be aware of when using it. Please read this document in its entirety before using this resource. + +The `aws_default_subnet` resource behaves differently from normal resources in that if a default subnet exists in the specified Availability Zone, Terraform does not _create_ this resource, but instead "adopts" it into management. +If no default subnet exists, Terraform creates a new default subnet. +By default, `terraform destroy` does not delete the default subnet but does remove the resource from Terraform state. +Set the `force_destroy` argument to `true` to delete the default subnet. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.default_subnet import DefaultSubnet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DefaultSubnet(self, "default_az1", + availability_zone="us-west-2a", + tags={ + "Name": "Default subnet for us-west-2a" + } + ) +``` + +## Argument Reference + +The arguments of an `aws_default_subnet` differ slightly from those of [`aws_subnet`](subnet.html): + +* `availability_zone` is required +* The `availability_zone_id`, `cidr_block` and `vpc_id` arguments become computed attributes +* The default value for `map_public_ip_on_launch` is `true` + +This resource supports the following additional arguments: + +* `force_destroy` - (Optional) Whether destroying the resource deletes the default subnet. Default: `false` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `availability_zone_id` - The AZ ID of the subnet +* `cidr_block` - The IPv4 CIDR block assigned to the subnet +* `vpc_id` - The ID of the VPC the subnet is in + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import subnets using the subnet `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import subnets using the subnet `id`. For example: + +```console +% terraform import aws_default_subnet.public_subnet subnet-9d4a7b6c +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/default_vpc.html.markdown b/website/docs/cdktf/python/r/default_vpc.html.markdown new file mode 100644 index 00000000000..ce1521dc241 --- /dev/null +++ b/website/docs/cdktf/python/r/default_vpc.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_vpc" +description: |- + Manage a default VPC resource. +--- + + + +# Resource: aws_default_vpc + +Provides a resource to manage the [default AWS VPC](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/default-vpc.html) +in the current AWS Region. + +If you created your AWS account after 2013-12-04 you have a default VPC in each AWS Region. + +**This is an advanced resource** and has special caveats to be aware of when using it. Please read this document in its entirety before using this resource. + +The `aws_default_vpc` resource behaves differently from normal resources in that if a default VPC exists, Terraform does not _create_ this resource, but instead "adopts" it into management. +If no default VPC exists, Terraform creates a new default VPC, which leads to the implicit creation of [other resources](https://docs.aws.amazon.com/vpc/latest/userguide/default-vpc.html#default-vpc-components). +By default, `terraform destroy` does not delete the default VPC but does remove the resource from Terraform state. +Set the `force_destroy` argument to `true` to delete the default VPC. + +## Example Usage + +Basic usage with tags: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.default_vpc import DefaultVpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DefaultVpc(self, "default", + tags={ + "Name": "Default VPC" + } + ) +``` + +## Argument Reference + +The arguments of an `aws_default_vpc` differ slightly from those of [`aws_vpc`](vpc.html): + +* The `cidr_block` and `instance_tenancy` arguments become computed attributes +* The default value for `enable_dns_hostnames` is `true` + +This resource supports the following additional arguments: + +* `force_destroy` - (Optional) Whether destroying the resource deletes the default VPC. Default: `false` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `cidr_block` - The primary IPv4 CIDR block for the VPC +* `instance_tenancy` - The allowed tenancy of instances launched into the VPC + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Default VPCs using the VPC `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Default VPCs using the VPC `id`. For example: + +```console +% terraform import aws_default_vpc.default vpc-a01106c2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/default_vpc_dhcp_options.html.markdown b/website/docs/cdktf/python/r/default_vpc_dhcp_options.html.markdown new file mode 100644 index 00000000000..df8eca1e908 --- /dev/null +++ b/website/docs/cdktf/python/r/default_vpc_dhcp_options.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_vpc_dhcp_options" +description: |- + Manage the default VPC DHCP Options resource. +--- + + + +# Resource: aws_default_vpc_dhcp_options + +Provides a resource to manage the [default AWS DHCP Options Set](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_DHCP_Options.html#AmazonDNS) +in the current region. + +Each AWS region comes with a default set of DHCP options. +**This is an advanced resource**, and has special caveats to be aware of when +using it. Please read this document in its entirety before using this resource. + +The `aws_default_vpc_dhcp_options` behaves differently from normal resources, in that +Terraform does not _create_ this resource, but instead "adopts" it +into management. + +## Example Usage + +Basic usage with tags: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.default_vpc_dhcp_options import DefaultVpcDhcpOptions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DefaultVpcDhcpOptions(self, "default", + tags={ + "Name": "Default DHCP Option Set" + } + ) +``` + +## Argument Reference + +The arguments of an `aws_default_vpc_dhcp_options` differ slightly from `aws_vpc_dhcp_options` resources. +Namely, the `domain_name`, `domain_name_servers` and `ntp_servers` arguments are computed. +The following arguments are still supported: + +* `netbios_name_servers` - (Optional) List of NETBIOS name servers. +* `netbios_node_type` - (Optional) The NetBIOS node type (1, 2, 4, or 8). AWS recommends to specify 2 since broadcast and multicast are not supported in their network. For more information about these node types, see [RFC 2132](http://www.ietf.org/rfc/rfc2132.txt). +* `owner_id` - The ID of the AWS account that owns the DHCP options set. +* `tags` - (Optional) A map of tags to assign to the resource. + +### Removing `aws_default_vpc_dhcp_options` from your configuration + +The `aws_default_vpc_dhcp_options` resource allows you to manage a region's default DHCP Options Set, +but Terraform cannot destroy it. Removing this resource from your configuration +will remove it from your statefile and management, but will not destroy the DHCP Options Set. +You can resume managing the DHCP Options Set via the AWS Console. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the DHCP Options Set. +* `arn` - The ARN of the DHCP Options Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC DHCP Options using the DHCP Options `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import VPC DHCP Options using the DHCP Options `id`. For example: + +```console +% terraform import aws_default_vpc_dhcp_options.default_options dopt-d9070ebb +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/detective_graph.html.markdown b/website/docs/cdktf/python/r/detective_graph.html.markdown new file mode 100644 index 00000000000..80aedb09b04 --- /dev/null +++ b/website/docs/cdktf/python/r/detective_graph.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Detective" +layout: "aws" +page_title: "AWS: aws_detective_graph" +description: |- + Provides a resource to manage an Amazon Detective graph. +--- + + + +# Resource: aws_detective_graph + +Provides a resource to manage an [AWS Detective Graph](https://docs.aws.amazon.com/detective/latest/APIReference/API_CreateGraph.html). As an AWS account may own only one Detective graph per region, provisioning multiple Detective graphs requires a separate provider configuration for each graph. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.detective_graph import DetectiveGraph +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DetectiveGraph(self, "example", + tags={ + "Name": "example-detective-graph" + } + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the instance. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN of the Detective Graph. +* `graph_arn` - ARN of the Detective Graph. +* `created_time` - Date and time, in UTC and extended RFC 3339 format, when the Amazon Detective Graph was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_detective_graph` using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_detective_graph` using the ARN. For example: + +```console +% terraform import aws_detective_graph.example arn:aws:detective:us-east-1:123456789101:graph:231684d34gh74g4bae1dbc7bd807d02d +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/detective_invitation_accepter.html.markdown b/website/docs/cdktf/python/r/detective_invitation_accepter.html.markdown new file mode 100644 index 00000000000..2a283f1b4c0 --- /dev/null +++ b/website/docs/cdktf/python/r/detective_invitation_accepter.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Detective" +layout: "aws" +page_title: "AWS: aws_detective_invitation_accepter" +description: |- + Provides a resource to manage an Amazon Detective member invitation accepter. +--- + + + +# Resource: aws_detective_invitation_accepter + +Provides a resource to manage an [Amazon Detective Invitation Accepter](https://docs.aws.amazon.com/detective/latest/APIReference/API_AcceptInvitation.html). Ensure that the accepter is configured to use the AWS account you wish to _accept_ the invitation from the primary graph owner account. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.detective_graph import DetectiveGraph +from imports.aws.detective_invitation_accepter import DetectiveInvitationAccepter +from imports.aws.detective_member import DetectiveMember +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = DetectiveGraph(self, "primary") + aws_detective_member_primary = DetectiveMember(self, "primary_1", + account_id="ACCOUNT ID", + email_address="EMAIL", + graph_arn=primary.id, + message="Message of the invite" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_detective_member_primary.override_logical_id("primary") + DetectiveInvitationAccepter(self, "member", + depends_on=[aws_detective_member_primary], + graph_arn=primary.graph_arn, + provider="awsalternate" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `graph_arn` - (Required) ARN of the behavior graph that the member account is accepting the invitation for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier (ID) of the Detective invitation accepter. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_detective_invitation_accepter` using the graph ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_detective_invitation_accepter` using the graph ARN. For example: + +```console +% terraform import aws_detective_invitation_accepter.example arn:aws:detective:us-east-1:123456789101:graph:231684d34gh74g4bae1dbc7bd807d02d +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/detective_member.html.markdown b/website/docs/cdktf/python/r/detective_member.html.markdown new file mode 100644 index 00000000000..b0bde2e3de2 --- /dev/null +++ b/website/docs/cdktf/python/r/detective_member.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Detective" +layout: "aws" +page_title: "AWS: aws_detective_member" +description: |- + Provides a resource to manage an Amazon Detective member. +--- + + + +# Resource: aws_detective_member + +Provides a resource to manage an [Amazon Detective Member](https://docs.aws.amazon.com/detective/latest/APIReference/API_CreateMembers.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.detective_graph import DetectiveGraph +from imports.aws.detective_member import DetectiveMember +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DetectiveGraph(self, "example") + aws_detective_member_example = DetectiveMember(self, "example_1", + account_id="AWS ACCOUNT ID", + disable_email_notification=True, + email_address="EMAIL", + graph_arn=example.id, + message="Message of the invitation" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_detective_member_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Required) AWS account ID for the account. +* `email_address` - (Required) Email address for the account. +* `graph_arn` - (Required) ARN of the behavior graph to invite the member accounts to contribute their data to. +* `message` - (Optional) A custom message to include in the invitation. Amazon Detective adds this message to the standard content that it sends for an invitation. +* `disable_email_notification` - (Optional) If set to true, then the root user of the invited account will _not_ receive an email notification. This notification is in addition to an alert that the root user receives in AWS Personal Health Dashboard. By default, this is set to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier (ID) of the Detective. +* `status` - Current membership status of the member account. +* `administrator_id` - AWS account ID for the administrator account. +* `volume_usage_in_bytes` - Data volume in bytes per day for the member account. +* `invited_time` - Date and time, in UTC and extended RFC 3339 format, when an Amazon Detective membership invitation was last sent to the account. +* `updated_time` - Date and time, in UTC and extended RFC 3339 format, of the most recent change to the member account's status. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_detective_member` using the ARN of the graph followed by the account ID of the member account. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_detective_member` using the ARN of the graph followed by the account ID of the member account. For example: + +```console +% terraform import aws_detective_member.example arn:aws:detective:us-east-1:123456789101:graph:231684d34gh74g4bae1dbc7bd807d02d/123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/devicefarm_device_pool.html.markdown b/website/docs/cdktf/python/r/devicefarm_device_pool.html.markdown new file mode 100644 index 00000000000..557efa6394d --- /dev/null +++ b/website/docs/cdktf/python/r/devicefarm_device_pool.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_device_pool" +description: |- + Provides a Devicefarm device_pool +--- + + + +# Resource: aws_devicefarm_device_pool + +Provides a resource to manage AWS Device Farm Device Pools. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.devicefarm_device_pool import DevicefarmDevicePool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DevicefarmDevicePool(self, "example", + name="example", + project_arn=Token.as_string(aws_devicefarm_project_example.arn), + rule=[DevicefarmDevicePoolRule( + attribute="OS_VERSION", + operator="EQUALS", + value="\\\"AVAILABLE\\\"" + ) + ] + ) +``` + +## Argument Reference + +* `name` - (Required) The name of the Device Pool +* `project_arn` - (Required) The ARN of the project for the device pool. +* `rule` - (Required) The device pool's rules. See [Rule](#rule). +* `description` - (Optional) The device pool's description. +* `max_devices` - (Optional) The number of devices that Device Farm can add to your device pool. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Rule + +* `attribute` - (Optional) The rule's stringified attribute. Valid values are: `APPIUM_VERSION`, `ARN`, `AVAILABILITY`, `FLEET_TYPE`, `FORM_FACTOR`, `INSTANCE_ARN`, `INSTANCE_LABELS`, `MANUFACTURER`, `MODEL`, `OS_VERSION`, `PLATFORM`, `REMOTE_ACCESS_ENABLED`, `REMOTE_DEBUG_ENABLED`. +* `operator` - (Optional) Specifies how Device Farm compares the rule's attribute to the value. For the operators that are supported by each attribute. Valid values are: `EQUALS`, `NOT_IN`, `IN`, `GREATER_THAN`, `GREATER_THAN_OR_EQUALS`, `LESS_THAN`, `LESS_THAN_OR_EQUALS`, `CONTAINS`. +* `value` - (Optional) The rule's value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this Device Pool +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Device Pools using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DeviceFarm Device Pools using their ARN. For example: + +```console +% terraform import aws_devicefarm_device_pool.example arn:aws:devicefarm:us-west-2:123456789012:devicepool:4fa784c7-ccb4-4dbf-ba4f-02198320daa1/4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/devicefarm_instance_profile.html.markdown b/website/docs/cdktf/python/r/devicefarm_instance_profile.html.markdown new file mode 100644 index 00000000000..5102619a94d --- /dev/null +++ b/website/docs/cdktf/python/r/devicefarm_instance_profile.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_instance_profile" +description: |- + Provides a Devicefarm instance profile +--- + + + +# Resource: aws_devicefarm_instance_profile + +Provides a resource to manage AWS Device Farm Instance Profiles. +∂ +~> **NOTE:** AWS currently has limited regional support for Device Farm (e.g., `us-west-2`). See [AWS Device Farm endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/devicefarm.html) for information on supported regions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.devicefarm_instance_profile import DevicefarmInstanceProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DevicefarmInstanceProfile(self, "example", + name="example" + ) +``` + +## Argument Reference + +* `description` - (Optional) The description of the instance profile. +* `exclude_app_packages_from_cleanup` - (Optional) An array of strings that specifies the list of app packages that should not be cleaned up from the device after a test run. +* `name` - (Required) The name for the instance profile. +* `package_cleanup` - (Optional) When set to `true`, Device Farm removes app packages after a test run. The default value is `false` for private devices. +* `reboot_after_use` - (Optional) When set to `true`, Device Farm reboots the instance after a test run. The default value is `true`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this instance profile. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Instance Profiles using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DeviceFarm Instance Profiles using their ARN. For example: + +```console +% terraform import aws_devicefarm_instance_profile.example arn:aws:devicefarm:us-west-2:123456789012:instanceprofile:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/devicefarm_network_profile.html.markdown b/website/docs/cdktf/python/r/devicefarm_network_profile.html.markdown new file mode 100644 index 00000000000..78e94b136c0 --- /dev/null +++ b/website/docs/cdktf/python/r/devicefarm_network_profile.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_network_profile" +description: |- + Provides a Devicefarm network profile +--- + + + +# Resource: aws_devicefarm_network_profile + +Provides a resource to manage AWS Device Farm Network Profiles. +∂ +~> **NOTE:** AWS currently has limited regional support for Device Farm (e.g., `us-west-2`). See [AWS Device Farm endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/devicefarm.html) for information on supported regions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.devicefarm_network_profile import DevicefarmNetworkProfile +from imports.aws.devicefarm_project import DevicefarmProject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DevicefarmProject(self, "example", + name="example" + ) + aws_devicefarm_network_profile_example = DevicefarmNetworkProfile(self, "example_1", + name="example", + project_arn=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_devicefarm_network_profile_example.override_logical_id("example") +``` + +## Argument Reference + +* `description` - (Optional) The description of the network profile. +* `downlink_bandwidth_bits` - (Optional) The data throughput rate in bits per second, as an integer from `0` to `104857600`. Default value is `104857600`. +* `downlink_delay_ms` - (Optional) Delay time for all packets to destination in milliseconds as an integer from `0` to `2000`. +* `downlink_jitter_ms` - (Optional) Time variation in the delay of received packets in milliseconds as an integer from `0` to `2000`. +* `downlink_loss_percent` - (Optional) Proportion of received packets that fail to arrive from `0` to `100` percent. +* `name` - (Required) The name for the network profile. +* `uplink_bandwidth_bits` - (Optional) The data throughput rate in bits per second, as an integer from `0` to `104857600`. Default value is `104857600`. +* `uplink_delay_ms` - (Optional) Delay time for all packets to destination in milliseconds as an integer from `0` to `2000`. +* `uplink_jitter_ms` - (Optional) Time variation in the delay of received packets in milliseconds as an integer from `0` to `2000`. +* `uplink_loss_percent` - (Optional) Proportion of received packets that fail to arrive from `0` to `100` percent. +* `project_arn` - (Required) The ARN of the project for the network profile. +* `type` - (Optional) The type of network profile to create. Valid values are listed are `PRIVATE` and `CURATED`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this network profile. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Network Profiles using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DeviceFarm Network Profiles using their ARN. For example: + +```console +% terraform import aws_devicefarm_network_profile.example arn:aws:devicefarm:us-west-2:123456789012:networkprofile:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/devicefarm_project.html.markdown b/website/docs/cdktf/python/r/devicefarm_project.html.markdown new file mode 100644 index 00000000000..6236276a59a --- /dev/null +++ b/website/docs/cdktf/python/r/devicefarm_project.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_project" +description: |- + Provides a Devicefarm project +--- + + + +# Resource: aws_devicefarm_project + +Provides a resource to manage AWS Device Farm Projects. + +For more information about Device Farm Projects, see the AWS Documentation on +[Device Farm Projects][aws-get-project]. + +~> **NOTE:** AWS currently has limited regional support for Device Farm (e.g., `us-west-2`). See [AWS Device Farm endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/devicefarm.html) for information on supported regions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.devicefarm_project import DevicefarmProject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DevicefarmProject(self, "awesome_devices", + name="my-device-farm" + ) +``` + +## Argument Reference + +* `name` - (Required) The name of the project +* `default_job_timeout_minutes` - (Optional) Sets the execution timeout value (in minutes) for a project. All test runs in this project use the specified execution timeout value unless overridden when scheduling a run. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this project +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[aws-get-project]: http://docs.aws.amazon.com/devicefarm/latest/APIReference/API_GetProject.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Projects using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DeviceFarm Projects using their ARN. For example: + +```console +% terraform import aws_devicefarm_project.example arn:aws:devicefarm:us-west-2:123456789012:project:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/devicefarm_test_grid_project.html.markdown b/website/docs/cdktf/python/r/devicefarm_test_grid_project.html.markdown new file mode 100644 index 00000000000..c9ccb023d19 --- /dev/null +++ b/website/docs/cdktf/python/r/devicefarm_test_grid_project.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_test_grid_project" +description: |- + Provides a Devicefarm test_grid_project +--- + + + +# Resource: aws_devicefarm_test_grid_project + +Provides a resource to manage AWS Device Farm Test Grid Projects. + +~> **NOTE:** AWS currently has limited regional support for Device Farm (e.g., `us-west-2`). See [AWS Device Farm endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/devicefarm.html) for information on supported regions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.devicefarm_test_grid_project import DevicefarmTestGridProject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DevicefarmTestGridProject(self, "example", + name="example", + vpc_config=DevicefarmTestGridProjectVpcConfig( + security_group_ids=Token.as_list( + property_access(aws_security_group_example, ["*", "id"])), + subnet_ids=Token.as_list(property_access(aws_subnet_example, ["*", "id"])), + vpc_id=Token.as_string(aws_vpc_example.id) + ) + ) +``` + +## Argument Reference + +* `name` - (Required) The name of the Selenium testing project. +* `description` - (Optional) Human-readable description of the project. +* `vpc_config` - (Required) The VPC security groups and subnets that are attached to a project. See [VPC Config](#vpc-config) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### VPC Config + +* `security_group_ids` - (Required) A list of VPC security group IDs in your Amazon VPC. +* `subnet_ids` - (Required) A list of VPC subnet IDs in your Amazon VPC. +* `vpc_id` - (Required) The ID of the Amazon VPC. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this Test Grid Project. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Test Grid Projects using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DeviceFarm Test Grid Projects using their ARN. For example: + +```console +% terraform import aws_devicefarm_test_grid_project.example arn:aws:devicefarm:us-west-2:123456789012:testgrid-project:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/devicefarm_upload.html.markdown b/website/docs/cdktf/python/r/devicefarm_upload.html.markdown new file mode 100644 index 00000000000..fc752224728 --- /dev/null +++ b/website/docs/cdktf/python/r/devicefarm_upload.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_upload" +description: |- + Provides a Devicefarm upload +--- + + + +# Resource: aws_devicefarm_upload + +Provides a resource to manage AWS Device Farm Uploads. + +~> **NOTE:** AWS currently has limited regional support for Device Farm (e.g., `us-west-2`). See [AWS Device Farm endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/devicefarm.html) for information on supported regions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.devicefarm_project import DevicefarmProject +from imports.aws.devicefarm_upload import DevicefarmUpload +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DevicefarmProject(self, "example", + name="example" + ) + aws_devicefarm_upload_example = DevicefarmUpload(self, "example_1", + name="example", + project_arn=example.arn, + type="APPIUM_JAVA_TESTNG_TEST_SPEC" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_devicefarm_upload_example.override_logical_id("example") +``` + +## Argument Reference + +* `content_type` - (Optional) The upload's content type (for example, application/octet-stream). +* `name` - (Required) The upload's file name. The name should not contain any forward slashes (/). If you are uploading an iOS app, the file name must end with the .ipa extension. If you are uploading an Android app, the file name must end with the .apk extension. For all others, the file name must end with the .zip file extension. +* `project_arn` - (Required) The ARN of the project for the upload. +* `type` - (Required) The upload's upload type. See [AWS Docs](https://docs.aws.amazon.com/devicefarm/latest/APIReference/API_CreateUpload.html#API_CreateUpload_RequestSyntax) for valid list of values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this upload. +* `url` - The presigned Amazon S3 URL that was used to store a file using a PUT request. +* `category` - The upload's category. +* `metadata` - The upload's metadata. For example, for Android, this contains information that is parsed from the manifest and is displayed in the AWS Device Farm console after the associated app is uploaded. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Uploads using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DeviceFarm Uploads using their ARN. For example: + +```console +% terraform import aws_devicefarm_upload.example arn:aws:devicefarm:us-west-2:123456789012:upload:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/directory_service_conditional_forwarder.html.markdown b/website/docs/cdktf/python/r/directory_service_conditional_forwarder.html.markdown new file mode 100644 index 00000000000..d0af2e8c9f2 --- /dev/null +++ b/website/docs/cdktf/python/r/directory_service_conditional_forwarder.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_conditional_forwarder" +description: |- + Provides a conditional forwarder for managed Microsoft AD in AWS Directory Service. +--- + + + +# Resource: aws_directory_service_conditional_forwarder + +Provides a conditional forwarder for managed Microsoft AD in AWS Directory Service. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.directory_service_conditional_forwarder import DirectoryServiceConditionalForwarder +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DirectoryServiceConditionalForwarder(self, "example", + directory_id=ad.id, + dns_ips=["8.8.8.8", "8.8.4.4"], + remote_domain_name="example.com" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `directory_id` - (Required) ID of directory. +* `dns_ips` - (Required) A list of forwarder IP addresses. +* `remote_domain_name` - (Required) The fully qualified domain name of the remote domain for which forwarders will be used. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import conditional forwarders using the directory id and remote_domain_name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import conditional forwarders using the directory id and remote_domain_name. For example: + +```console +% terraform import aws_directory_service_conditional_forwarder.example d-1234567890:example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/directory_service_directory.html.markdown b/website/docs/cdktf/python/r/directory_service_directory.html.markdown new file mode 100644 index 00000000000..166ad55e32b --- /dev/null +++ b/website/docs/cdktf/python/r/directory_service_directory.html.markdown @@ -0,0 +1,225 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_directory" +description: |- + Provides a directory in AWS Directory Service. +--- + + + +# Resource: aws_directory_service_directory + +Provides a Simple or Managed Microsoft directory in AWS Directory Service. + +~> **Note:** All arguments including the password and customer username will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +### SimpleAD + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.directory_service_directory import DirectoryServiceDirectory +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = Vpc(self, "main", + cidr_block="10.0.0.0/16" + ) + bar = Subnet(self, "bar", + availability_zone="us-west-2b", + cidr_block="10.0.2.0/24", + vpc_id=main.id + ) + foo = Subnet(self, "foo", + availability_zone="us-west-2a", + cidr_block="10.0.1.0/24", + vpc_id=main.id + ) + aws_directory_service_directory_bar = DirectoryServiceDirectory(self, "bar_3", + name="corp.notexample.com", + password="SuperSecretPassw0rd", + size="Small", + tags={ + "Project": "foo" + }, + vpc_settings=DirectoryServiceDirectoryVpcSettings( + subnet_ids=[foo.id, bar.id], + vpc_id=main.id + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_directory_bar.override_logical_id("bar") +``` + +### Microsoft Active Directory (MicrosoftAD) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.directory_service_directory import DirectoryServiceDirectory +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = Vpc(self, "main", + cidr_block="10.0.0.0/16" + ) + bar = Subnet(self, "bar", + availability_zone="us-west-2b", + cidr_block="10.0.2.0/24", + vpc_id=main.id + ) + foo = Subnet(self, "foo", + availability_zone="us-west-2a", + cidr_block="10.0.1.0/24", + vpc_id=main.id + ) + aws_directory_service_directory_bar = DirectoryServiceDirectory(self, "bar_3", + edition="Standard", + name="corp.notexample.com", + password="SuperSecretPassw0rd", + tags={ + "Project": "foo" + }, + type="MicrosoftAD", + vpc_settings=DirectoryServiceDirectoryVpcSettings( + subnet_ids=[foo.id, bar.id], + vpc_id=main.id + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_directory_bar.override_logical_id("bar") +``` + +### Microsoft Active Directory Connector (ADConnector) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.directory_service_directory import DirectoryServiceDirectory +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = Vpc(self, "main", + cidr_block="10.0.0.0/16" + ) + bar = Subnet(self, "bar", + availability_zone="us-west-2b", + cidr_block="10.0.2.0/24", + vpc_id=main.id + ) + foo = Subnet(self, "foo", + availability_zone="us-west-2a", + cidr_block="10.0.1.0/24", + vpc_id=main.id + ) + DirectoryServiceDirectory(self, "connector", + connect_settings=DirectoryServiceDirectoryConnectSettings( + customer_dns_ips=["A.B.C.D"], + customer_username="Admin", + subnet_ids=[foo.id, bar.id], + vpc_id=main.id + ), + name="corp.notexample.com", + password="SuperSecretPassw0rd", + size="Small", + type="ADConnector" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The fully qualified name for the directory, such as `corp.example.com` +* `password` - (Required) The password for the directory administrator or connector user. +* `size` - (Optional) (For `SimpleAD` and `ADConnector` types) The size of the directory (`Small` or `Large` are accepted values). `Large` by default. +* `vpc_settings` - (Required for `SimpleAD` and `MicrosoftAD`) VPC related information about the directory. Fields documented below. +* `connect_settings` - (Required for `ADConnector`) Connector related information about the directory. Fields documented below. +* `alias` - (Optional) The alias for the directory (must be unique amongst all aliases in AWS). Required for `enable_sso`. +* `description` - (Optional) A textual description for the directory. +* `desired_number_of_domain_controllers` - (Optional) The number of domain controllers desired in the directory. Minimum value of `2`. Scaling of domain controllers is only supported for `MicrosoftAD` directories. +* `short_name` - (Optional) The short name of the directory, such as `CORP`. +* `enable_sso` - (Optional) Whether to enable single-sign on for the directory. Requires `alias`. Defaults to `false`. +* `type` (Optional) - The directory type (`SimpleAD`, `ADConnector` or `MicrosoftAD` are accepted values). Defaults to `SimpleAD`. +* `edition` - (Optional, for type `MicrosoftAD` only) The MicrosoftAD edition (`Standard` or `Enterprise`). Defaults to `Enterprise`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +**vpc_settings** supports the following: + +* `subnet_ids` - (Required) The identifiers of the subnets for the directory servers (2 subnets in 2 different AZs). +* `vpc_id` - (Required) The identifier of the VPC that the directory is in. + +**connect_settings** supports the following: + +* `customer_username` - (Required) The username corresponding to the password provided. +* `customer_dns_ips` - (Required) The DNS IP addresses of the domain to connect to. +* `subnet_ids` - (Required) The identifiers of the subnets for the directory servers (2 subnets in 2 different AZs). +* `vpc_id` - (Required) The identifier of the VPC that the directory is in. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The directory identifier. +* `access_url` - The access URL for the directory, such as `http://alias.awsapps.com`. +* `dns_ip_addresses` - A list of IP addresses of the DNS servers for the directory or connector. +* `security_group_id` - The ID of the security group created by the directory. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +`connect_settings` (for `ADConnector`) is also exported with the following attributes: + +* `connect_ips` - The IP addresses of the AD Connector servers. + +## Timeouts + +`aws_directory_service_directory` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +- `create` - (Default `60 minutes`) Used for directory creation +- `update` - (Default `60 minutes`) Used for directory update +- `delete` - (Default `60 minutes`) Used for directory deletion + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DirectoryService directories using the directory `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DirectoryService directories using the directory `id`. For example: + +```console +% terraform import aws_directory_service_directory.sample d-926724cf57 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/directory_service_log_subscription.html.markdown b/website/docs/cdktf/python/r/directory_service_log_subscription.html.markdown new file mode 100644 index 00000000000..781f5a0bcbf --- /dev/null +++ b/website/docs/cdktf/python/r/directory_service_log_subscription.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_log_subscription" +description: |- + Provides a Log subscription for AWS Directory Service that pushes logs to cloudwatch. +--- + + + +# Resource: aws_directory_service_log_subscription + +Provides a Log subscription for AWS Directory Service that pushes logs to cloudwatch. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.cloudwatch_log_resource_policy import CloudwatchLogResourcePolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.directory_service_log_subscription import DirectoryServiceLogSubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="/aws/directoryservice/${" + aws_directory_service_directory_example.id + "}", + retention_in_days=14 + ) + aws_directory_service_log_subscription_example = + DirectoryServiceLogSubscription(self, "example_1", + directory_id=Token.as_string(aws_directory_service_directory_example.id), + log_group_name=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_log_subscription_example.override_logical_id("example") + ad_log_policy = DataAwsIamPolicyDocument(self, "ad-log-policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:CreateLogStream", "logs:PutLogEvents"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["ds.amazonaws.com"], + type="Service" + ) + ], + resources=["${" + example.arn + "}:*"] + ) + ] + ) + aws_cloudwatch_log_resource_policy_ad_log_policy = + CloudwatchLogResourcePolicy(self, "ad-log-policy_3", + policy_document=Token.as_string(ad_log_policy.json), + policy_name="ad-log-policy" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_resource_policy_ad_log_policy.override_logical_id("ad-log-policy") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `directory_id` - (Required) ID of directory. +* `log_group_name` - (Required) Name of the cloudwatch log group to which the logs should be published. The log group should be already created and the directory service principal should be provided with required permission to create stream and publish logs. Changing this value would delete the current subscription and create a new one. A directory can only have one log subscription at a time. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Directory Service Log Subscriptions using the directory id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Directory Service Log Subscriptions using the directory id. For example: + +```console +% terraform import aws_directory_service_log_subscription.msad d-1234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/directory_service_radius_settings.html.markdown b/website/docs/cdktf/python/r/directory_service_radius_settings.html.markdown new file mode 100644 index 00000000000..b692069a451 --- /dev/null +++ b/website/docs/cdktf/python/r/directory_service_radius_settings.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_radius_settings" +description: |- + Manages a directory's multi-factor authentication (MFA) using a Remote Authentication Dial In User Service (RADIUS) server. +--- + + + +# Resource: aws_directory_service_radius_settings + +Manages a directory's multi-factor authentication (MFA) using a Remote Authentication Dial In User Service (RADIUS) server. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.directory_service_radius_settings import DirectoryServiceRadiusSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DirectoryServiceRadiusSettings(self, "example", + authentication_protocol="PAP", + directory_id=Token.as_string(aws_directory_service_directory_example.id), + display_label="example", + radius_port=1812, + radius_retries=4, + radius_servers=["10.0.1.5"], + radius_timeout=1, + shared_secret="12345678" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `authentication_protocol` - (Optional) The protocol specified for your RADIUS endpoints. Valid values: `PAP`, `CHAP`, `MS-CHAPv1`, `MS-CHAPv2`. +* `directory_id` - (Required) The identifier of the directory for which you want to manager RADIUS settings. +* `display_label` - (Required) Display label. +* `radius_port` - (Required) The port that your RADIUS server is using for communications. Your self-managed network must allow inbound traffic over this port from the AWS Directory Service servers. +* `radius_retries` - (Required) The maximum number of times that communication with the RADIUS server is attempted. Minimum value of `0`. Maximum value of `10`. +* `radius_servers` - (Required) An array of strings that contains the fully qualified domain name (FQDN) or IP addresses of the RADIUS server endpoints, or the FQDN or IP addresses of your RADIUS server load balancer. +* `radius_timeout` - (Required) The amount of time, in seconds, to wait for the RADIUS server to respond. Minimum value of `1`. Maximum value of `50`. +* `shared_secret` - (Required) Required for enabling RADIUS on the directory. +* `use_same_username` - (Optional) Not currently used. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The directory identifier. + +## Timeouts + +`aws_directory_service_radius_settings` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +- `create` - (Default `30 minutes`) Used for RADIUS settings creation +- `update` - (Default `30 minutes`) Used for RADIUS settings update + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RADIUS settings using the directory ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import RADIUS settings using the directory ID. For example: + +```console +% terraform import aws_directory_service_radius_settings.example d-926724cf57 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/directory_service_region.html.markdown b/website/docs/cdktf/python/r/directory_service_region.html.markdown new file mode 100644 index 00000000000..16500efab0b --- /dev/null +++ b/website/docs/cdktf/python/r/directory_service_region.html.markdown @@ -0,0 +1,192 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_region" +description: |- + Manages a replicated Region and directory for Multi-Region replication. +--- + + + +# Resource: aws_directory_service_region + +Manages a replicated Region and directory for Multi-Region replication. +Multi-Region replication is only supported for the Enterprise Edition of AWS Managed Microsoft AD. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformCount, property_access, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.directory_service_directory import DirectoryServiceDirectory +from imports.aws.directory_service_region import DirectoryServiceRegion +from imports.aws.provider import AwsProvider +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + region="us-west-2" + ) + secondary = AwsProvider(self, "aws_1", + alias="secondary", + region="us-east-2" + ) + example = Vpc(self, "example", + cidr_block="10.0.0.0/16", + tags={ + "Name": "Primary" + } + ) + example_secondary = Vpc(self, "example-secondary", + cidr_block="10.1.0.0/16", + provider=secondary, + tags={ + "Name": "Secondary" + } + ) + available = DataAwsAvailabilityZones(self, "available", + filter=[DataAwsAvailabilityZonesFilter( + name="opt-in-status", + values=["opt-in-not-required"] + ) + ], + state="available" + ) + available_secondary = DataAwsAvailabilityZones(self, "available-secondary", + filter=[DataAwsAvailabilityZonesFilter( + name="opt-in-status", + values=["opt-in-not-required"] + ) + ], + provider=secondary, + state="available" + ) + data_aws_region_example = DataAwsRegion(self, "example_6", + provider=secondary + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_example.override_logical_id("example") + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_count = TerraformCount.of(Token.as_number("2")) + aws_subnet_example = Subnet(self, "example_7", + availability_zone=Token.as_string( + property_access(available.names, [example_count.index])), + cidr_block=Token.as_string( + Fn.cidrsubnet(example.cidr_block, 8, Token.as_number(example_count.index))), + tags={ + "Name": "Primary" + }, + vpc_id=example.id, + count=example_count + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_subnet_example.override_logical_id("example") + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_secondary_count = TerraformCount.of(Token.as_number("2")) + aws_subnet_example_secondary = Subnet(self, "example-secondary_8", + availability_zone=Token.as_string( + property_access(available_secondary.names, [example_secondary_count.index])), + cidr_block=Token.as_string( + Fn.cidrsubnet(example_secondary.cidr_block, 8, + Token.as_number(example_secondary_count.index))), + provider=secondary, + tags={ + "Name": "Secondary" + }, + vpc_id=example_secondary.id, + count=example_secondary_count + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_subnet_example_secondary.override_logical_id("example-secondary") + aws_directory_service_directory_example = DirectoryServiceDirectory(self, "example_9", + name="example.com", + password="SuperSecretPassw0rd", + type="MicrosoftAD", + vpc_settings=DirectoryServiceDirectoryVpcSettings( + subnet_ids=Token.as_list( + property_access(aws_subnet_example, ["*", "id"])), + vpc_id=example.id + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_directory_example.override_logical_id("example") + aws_directory_service_region_example = DirectoryServiceRegion(self, "example_10", + directory_id=Token.as_string(aws_directory_service_directory_example.id), + region_name=Token.as_string(data_aws_region_example.name), + tags={ + "Name": "Secondary" + }, + vpc_settings=DirectoryServiceRegionVpcSettings( + subnet_ids=Token.as_list( + property_access(aws_subnet_example_secondary, ["*", "id"])), + vpc_id=example_secondary.id + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_region_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `desired_number_of_domain_controllers` - (Optional) The number of domain controllers desired in the replicated directory. Minimum value of `2`. +* `directory_id` - (Required) The identifier of the directory to which you want to add Region replication. +* `region_name` - (Required) The name of the Region where you want to add domain controllers for replication. +* `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_settings` - (Required) VPC information in the replicated Region. Detailed below. + +### `vpc_settings` + +* `subnet_ids` - (Required) The identifiers of the subnets for the directory servers. +* `vpc_id` - (Optional) The identifier of the VPC in which to create the directory. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +`aws_directory_service_region` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +- `create` - (Default `180 minutes`) Used for Region addition +- `update` - (Default `90 minutes`) Used for replicated directory update +- `delete` - (Default `90 minutes`) Used for Region removal + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Replicated Regions using directory ID,Region name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Replicated Regions using directory ID,Region name. For example: + +```console +% terraform import aws_directory_service_region.example d-9267651497,us-east-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/directory_service_shared_directory.html.markdown b/website/docs/cdktf/python/r/directory_service_shared_directory.html.markdown new file mode 100644 index 00000000000..0a4c2a20d30 --- /dev/null +++ b/website/docs/cdktf/python/r/directory_service_shared_directory.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_shared_directory" +description: |- + Manages a directory in your account (directory owner) shared with another account (directory consumer). +--- + + + +# Resource: aws_directory_service_shared_directory + +Manages a directory in your account (directory owner) shared with another account (directory consumer). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.directory_service_directory import DirectoryServiceDirectory +from imports.aws.directory_service_shared_directory import DirectoryServiceSharedDirectory +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DirectoryServiceDirectory(self, "example", + edition="Standard", + name="tf-example", + password="SuperSecretPassw0rd", + type="MicrosoftAD", + vpc_settings=DirectoryServiceDirectoryVpcSettings( + subnet_ids=Token.as_list(property_access(aws_subnet_example, ["*", "id"])), + vpc_id=Token.as_string(aws_vpc_example.id) + ) + ) + aws_directory_service_shared_directory_example = + DirectoryServiceSharedDirectory(self, "example_1", + directory_id=example.id, + notes="You wanna have a catch?", + target=DirectoryServiceSharedDirectoryTarget( + id=Token.as_string(receiver.account_id) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_shared_directory_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `directory_id` - (Required) Identifier of the Managed Microsoft AD directory that you want to share with other accounts. +* `target` - (Required) Identifier for the directory consumer account with whom the directory is to be shared. See below. + +The following arguments are optional: + +* `method` - (Optional) Method used when sharing a directory. Valid values are `ORGANIZATIONS` and `HANDSHAKE`. Default is `HANDSHAKE`. +* `notes` - (Optional, Sensitive) Message sent by the directory owner to the directory consumer to help the directory consumer administrator determine whether to approve or reject the share invitation. + +### `target` + +* `id` - (Required) Identifier of the directory consumer account. +* `type` - (Optional) Type of identifier to be used in the `id` field. Valid value is `ACCOUNT`. Default is `ACCOUNT`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the shared directory. +* `shared_directory_id` - Identifier of the directory that is stored in the directory consumer account that corresponds to the shared directory in the owner account. + +## Timeouts + +`aws_directory_service_shared_directory` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +- `delete` - (Default `60 minutes`) Used for shared directory deletion + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Directory Service Shared Directories using the owner directory ID/shared directory ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Directory Service Shared Directories using the owner directory ID/shared directory ID. For example: + +```console +% terraform import aws_directory_service_shared_directory.example d-1234567890/d-9267633ece +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/directory_service_shared_directory_accepter.html.markdown b/website/docs/cdktf/python/r/directory_service_shared_directory_accepter.html.markdown new file mode 100644 index 00000000000..0b83c12cd61 --- /dev/null +++ b/website/docs/cdktf/python/r/directory_service_shared_directory_accepter.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_shared_directory_accepter" +description: |- + Accepts a shared directory in a consumer account. +--- + + + +# Resource: aws_directory_service_shared_directory_accepter + +Accepts a shared directory in a consumer account. + +~> **NOTE:** Destroying this resource removes the shared directory from the consumer account only. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.directory_service_shared_directory import DirectoryServiceSharedDirectory +from imports.aws.directory_service_shared_directory_accepter import DirectoryServiceSharedDirectoryAccepter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DirectoryServiceSharedDirectory(self, "example", + directory_id=Token.as_string(aws_directory_service_directory_example.id), + notes="example", + target=DirectoryServiceSharedDirectoryTarget( + id=Token.as_string(receiver.account_id) + ) + ) + aws_directory_service_shared_directory_accepter_example = + DirectoryServiceSharedDirectoryAccepter(self, "example_1", + provider="awsalternate", + shared_directory_id=example.shared_directory_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_shared_directory_accepter_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `shared_directory_id` - (Required) Identifier of the directory that is stored in the directory consumer account that corresponds to the shared directory in the owner account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the shared directory. +* `method` - Method used when sharing a directory (i.e., `ORGANIZATIONS` or `HANDSHAKE`). +* `notes` - Message sent by the directory owner to the directory consumer to help the directory consumer administrator determine whether to approve or reject the share invitation. +* `owner_account_id` - Account identifier of the directory owner. +* `owner_directory_id` - Identifier of the Managed Microsoft AD directory from the perspective of the directory owner. + +## Timeouts + +`aws_directory_service_shared_directory_accepter` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +- `create` - (Default `60 minutes`) Used for directory creation +- `delete` - (Default `60 minutes`) Used for directory deletion + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Directory Service Shared Directories using the shared directory ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Directory Service Shared Directories using the shared directory ID. For example: + +```console +% terraform import aws_directory_service_shared_directory_accepter.example d-9267633ece +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/directory_service_trust.html.markdown b/website/docs/cdktf/python/r/directory_service_trust.html.markdown new file mode 100644 index 00000000000..4ebb1b9a881 --- /dev/null +++ b/website/docs/cdktf/python/r/directory_service_trust.html.markdown @@ -0,0 +1,166 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_trust" +description: |- + Manages a trust relationship between two Active Directory Directories. +--- + + + +# Resource: aws_directory_service_trust + +Manages a trust relationship between two Active Directory Directories. + +The directories may either be both AWS Managed Microsoft AD domains or an AWS Managed Microsoft AD domain and a self-managed Active Directory Domain. + +The Trust relationship must be configured on both sides of the relationship. +If a Trust has only been created on one side, it will be in the state `VerifyFailed`. +Once the second Trust is created, the first will update to the correct state. + +## Example Usage + +### Two-Way Trust + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.directory_service_directory import DirectoryServiceDirectory +from imports.aws.directory_service_trust import DirectoryServiceTrust +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, password, password1): + super().__init__(scope, name) + one = DirectoryServiceDirectory(self, "one", + name="one.example.com", + type="MicrosoftAD", + password=password + ) + two = DirectoryServiceDirectory(self, "two", + name="two.example.com", + type="MicrosoftAD", + password=password1 + ) + aws_directory_service_trust_one = DirectoryServiceTrust(self, "one_2", + conditional_forwarder_ip_addrs=two.dns_ip_addresses, + directory_id=one.id, + remote_domain_name=two.name, + trust_direction="Two-Way", + trust_password="Some0therPassword" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_trust_one.override_logical_id("one") + aws_directory_service_trust_two = DirectoryServiceTrust(self, "two_3", + conditional_forwarder_ip_addrs=one.dns_ip_addresses, + directory_id=two.id, + remote_domain_name=one.name, + trust_direction="Two-Way", + trust_password="Some0therPassword" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_trust_two.override_logical_id("two") +``` + +### One-Way Trust + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.directory_service_directory import DirectoryServiceDirectory +from imports.aws.directory_service_trust import DirectoryServiceTrust +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, password, password1): + super().__init__(scope, name) + one = DirectoryServiceDirectory(self, "one", + name="one.example.com", + type="MicrosoftAD", + password=password + ) + two = DirectoryServiceDirectory(self, "two", + name="two.example.com", + type="MicrosoftAD", + password=password1 + ) + aws_directory_service_trust_one = DirectoryServiceTrust(self, "one_2", + conditional_forwarder_ip_addrs=two.dns_ip_addresses, + directory_id=one.id, + remote_domain_name=two.name, + trust_direction="One-Way: Incoming", + trust_password="Some0therPassword" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_trust_one.override_logical_id("one") + aws_directory_service_trust_two = DirectoryServiceTrust(self, "two_3", + conditional_forwarder_ip_addrs=one.dns_ip_addresses, + directory_id=two.id, + remote_domain_name=one.name, + trust_direction="One-Way: Outgoing", + trust_password="Some0therPassword" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_trust_two.override_logical_id("two") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `conditional_forwarder_ip_addrs` - (Optional) Set of IPv4 addresses for the DNS server associated with the remote Directory. + Can contain between 1 and 4 values. +* `delete_associated_conditional_forwarder` - (Optional) Whether to delete the conditional forwarder when deleting the Trust relationship. +* `directory_id` - (Required) ID of the Directory. +* `remote_domain_name` - (Required) Fully qualified domain name of the remote Directory. +* `selective_auth` - (Optional) Whether to enable selective authentication. + Valid values are `Enabled` and `Disabled`. + Default value is `Disabled`. +* `trust_direction` - (Required) The direction of the Trust relationship. + Valid values are `One-Way: Outgoing`, `One-Way: Incoming`, and `Two-Way`. +* `trust_password` - (Required) Password for the Trust. + Does not need to match the passwords for either Directory. + Can contain upper- and lower-case letters, numbers, and punctuation characters. + May be up to 128 characters long. +* `trust_type` - (Optional) Type of the Trust relationship. + Valid values are `Forest` and `External`. + Default value is `Forest`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `created_date_time` - Date and time when the Trust was created. +* `id` - The Trust identifier. +* `last_updated_date_time` - Date and time when the Trust was last updated. +* `state_last_updated_date_time` - Date and time when the Trust state in `trust_state` was last updated. +* `trust_state` - State of the Trust relationship. + One of `Created`, `VerifyFailed`,`Verified`, `UpdateFailed`,`Updated`,`Deleted`, or `Failed`. +* `trust_state_reason` - Reason for the Trust state set in `trust_state`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the Trust relationship using the directory ID and remote domain name, separated by a `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the Trust relationship using the directory ID and remote domain name, separated by a `/`. For example: + +```console +% terraform import aws_directory_service_trust.example d-926724cf57/directory.example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dlm_lifecycle_policy.markdown b/website/docs/cdktf/python/r/dlm_lifecycle_policy.markdown new file mode 100644 index 00000000000..15036523cbc --- /dev/null +++ b/website/docs/cdktf/python/r/dlm_lifecycle_policy.markdown @@ -0,0 +1,369 @@ +--- +subcategory: "DLM (Data Lifecycle Manager)" +layout: "aws" +page_title: "AWS: aws_dlm_lifecycle_policy" +description: |- + Provides a Data Lifecycle Manager (DLM) lifecycle policy for managing snapshots. +--- + + + +# Resource: aws_dlm_lifecycle_policy + +Provides a [Data Lifecycle Manager (DLM) lifecycle policy](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/snapshot-lifecycle.html) for managing snapshots. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.dlm_lifecycle_policy import DlmLifecyclePolicy +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["dlm.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + dlm_lifecycle = DataAwsIamPolicyDocument(self, "dlm_lifecycle", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:CreateSnapshot", "ec2:CreateSnapshots", "ec2:DeleteSnapshot", "ec2:DescribeInstances", "ec2:DescribeVolumes", "ec2:DescribeSnapshots" + ], + effect="Allow", + resources=["*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["ec2:CreateTags"], + effect="Allow", + resources=["arn:aws:ec2:*::snapshot/*"] + ) + ] + ) + dlm_lifecycle_role = IamRole(self, "dlm_lifecycle_role", + assume_role_policy=Token.as_string(assume_role.json), + name="dlm-lifecycle-role" + ) + aws_iam_role_policy_dlm_lifecycle = IamRolePolicy(self, "dlm_lifecycle_3", + name="dlm-lifecycle-policy", + policy=Token.as_string(dlm_lifecycle.json), + role=dlm_lifecycle_role.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_dlm_lifecycle.override_logical_id("dlm_lifecycle") + DlmLifecyclePolicy(self, "example", + description="example DLM lifecycle policy", + execution_role_arn=dlm_lifecycle_role.arn, + policy_details=DlmLifecyclePolicyPolicyDetails( + resource_types=["VOLUME"], + schedule=[DlmLifecyclePolicyPolicyDetailsSchedule( + copy_tags=False, + create_rule=DlmLifecyclePolicyPolicyDetailsScheduleCreateRule( + interval=24, + interval_unit="HOURS", + times=["23:45"] + ), + name="2 weeks of daily snapshots", + retain_rule=DlmLifecyclePolicyPolicyDetailsScheduleRetainRule( + count=14 + ), + tags_to_add={ + "snapshot_creator": "DLM" + } + ) + ], + target_tags={ + "snapshot": "true" + } + ), + state="ENABLED" + ) +``` + +### Example Cross-Region Snapshot Copy Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.dlm_lifecycle_policy import DlmLifecyclePolicy +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + key = DataAwsIamPolicyDocument(self, "key", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["kms:*"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["arn:aws:iam::${" + current.account_id + "}:root"], + type="AWS" + ) + ], + resources=["*"], + sid="Enable IAM User Permissions" + ) + ] + ) + dlm_cross_region_copy_cmk = KmsKey(self, "dlm_cross_region_copy_cmk", + description="Example Alternate Region KMS Key", + policy=Token.as_string(key.json), + provider=alternate + ) + DlmLifecyclePolicy(self, "example", + description="example DLM lifecycle policy", + execution_role_arn=dlm_lifecycle_role.arn, + policy_details=DlmLifecyclePolicyPolicyDetails( + resource_types=["VOLUME"], + schedule=[DlmLifecyclePolicyPolicyDetailsSchedule( + copy_tags=False, + create_rule=DlmLifecyclePolicyPolicyDetailsScheduleCreateRule( + interval=24, + interval_unit="HOURS", + times=["23:45"] + ), + cross_region_copy_rule=[DlmLifecyclePolicyPolicyDetailsScheduleCrossRegionCopyRule( + cmk_arn=dlm_cross_region_copy_cmk.arn, + copy_tags=True, + encrypted=True, + retain_rule=DlmLifecyclePolicyPolicyDetailsScheduleCrossRegionCopyRuleRetainRule( + interval=30, + interval_unit="DAYS" + ), + target="us-west-2" + ) + ], + name="2 weeks of daily snapshots", + retain_rule=DlmLifecyclePolicyPolicyDetailsScheduleRetainRule( + count=14 + ), + tags_to_add={ + "snapshot_creator": "DLM" + } + ) + ], + target_tags={ + "snapshot": "true" + } + ), + state="ENABLED" + ) +``` + +### Example Event Based Policy Usage + +``` +data "aws_caller_identity" "current" {} + +resource "aws_dlm_lifecycle_policy" "example" { + description = "tf-acc-basic" + execution_role_arn = aws_iam_role.example.arn + + policy_details { + policy_type = "EVENT_BASED_POLICY" + + action { + name = "tf-acc-basic" + cross_region_copy { + encryption_configuration {} + retain_rule { + interval = 15 + interval_unit = "MONTHS" + } + + target = %[1]q + } + } + + event_source { + type = "MANAGED_CWE" + parameters { + description_regex = "^.*Created for policy: policy-1234567890abcdef0.*$" + event_type = "shareSnapshot" + snapshot_owner = [data.aws_caller_identity.current.account_id] + } + } + } +} + +data "aws_iam_policy" "example" { + name = "AWSDataLifecycleManagerServiceRole" +} + +resource "aws_iam_role_policy_attachment" "example" { + role = aws_iam_role.example.id + policy_arn = data.aws_iam_policy.example.arn +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Required) A description for the DLM lifecycle policy. +* `execution_role_arn` - (Required) The ARN of an IAM role that is able to be assumed by the DLM service. +* `policy_details` - (Required) See the [`policy_details` configuration](#policy-details-arguments) block. Max of 1. +* `state` - (Optional) Whether the lifecycle policy should be enabled or disabled. `ENABLED` or `DISABLED` are valid values. Defaults to `ENABLED`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +#### Policy Details arguments + +* `action` - (Optional) The actions to be performed when the event-based policy is triggered. You can specify only one action per policy. This parameter is required for event-based policies only. If you are creating a snapshot or AMI policy, omit this parameter. See the [`action` configuration](#action-arguments) block. +* `event_source` - (Optional) The event that triggers the event-based policy. This parameter is required for event-based policies only. If you are creating a snapshot or AMI policy, omit this parameter. See the [`event_source` configuration](#event-source-arguments) block. +* `resource_types` - (Optional) A list of resource types that should be targeted by the lifecycle policy. Valid values are `VOLUME` and `INSTANCE`. +* `resource_locations` - (Optional) The location of the resources to backup. If the source resources are located in an AWS Region, specify `CLOUD`. If the source resources are located on an Outpost in your account, specify `OUTPOST`. If you specify `OUTPOST`, Amazon Data Lifecycle Manager backs up all resources of the specified type with matching target tags across all of the Outposts in your account. Valid values are `CLOUD` and `OUTPOST`. +* `policy_type` - (Optional) The valid target resource types and actions a policy can manage. Specify `EBS_SNAPSHOT_MANAGEMENT` to create a lifecycle policy that manages the lifecycle of Amazon EBS snapshots. Specify `IMAGE_MANAGEMENT` to create a lifecycle policy that manages the lifecycle of EBS-backed AMIs. Specify `EVENT_BASED_POLICY` to create an event-based policy that performs specific actions when a defined event occurs in your AWS account. Default value is `EBS_SNAPSHOT_MANAGEMENT`. +* `parameters` - (Optional) A set of optional parameters for snapshot and AMI lifecycle policies. See the [`parameters` configuration](#parameters-arguments) block. +* `schedule` - (Optional) See the [`schedule` configuration](#schedule-arguments) block. +* `target_tags` (Optional) A map of tag keys and their values. Any resources that match the `resource_types` and are tagged with _any_ of these tags will be targeted. + +~> Note: You cannot have overlapping lifecycle policies that share the same `target_tags`. Terraform is unable to detect this at plan time but it will fail during apply. + +#### Action arguments + +* `cross_region_copy` - (Optional) The rule for copying shared snapshots across Regions. See the [`cross_region_copy` configuration](#action-cross-region-copy-rule-arguments) block. +* `name` - (Optional) A descriptive name for the action. + +##### Action Cross Region Copy Rule arguments + +* `encryption_configuration` - (Required) The encryption settings for the copied snapshot. See the [`encryption_configuration`](#encryption-configuration-arguments) block. Max of 1 per action. +* `retain_rule` - (Required) Specifies the retention rule for cross-Region snapshot copies. See the [`retain_rule`](#cross-region-copy-rule-retain-rule-arguments) block. Max of 1 per action. +* `target` - (Required) The target Region or the Amazon Resource Name (ARN) of the target Outpost for the snapshot copies. + +###### Encryption Configuration arguments + +* `cmk_arn` - (Optional) The Amazon Resource Name (ARN) of the AWS KMS key to use for EBS encryption. If this parameter is not specified, the default KMS key for the account is used. +* `encrypted` - (Required) To encrypt a copy of an unencrypted snapshot when encryption by default is not enabled, enable encryption using this parameter. Copies of encrypted snapshots are encrypted, even if this parameter is false or when encryption by default is not enabled. + +#### Event Source arguments + +* `parameters` - (Required) Information about the event. See the [`parameters` configuration](#event-source-parameters-arguments) block. +* `type` - (Required) The source of the event. Currently only managed CloudWatch Events rules are supported. Valid values are `MANAGED_CWE`. + +##### Event Source Parameters arguments + +* `description_regex` - (Required) The snapshot description that can trigger the policy. The description pattern is specified using a regular expression. The policy runs only if a snapshot with a description that matches the specified pattern is shared with your account. +* `event_type` - (Required) The type of event. Currently, only `shareSnapshot` events are supported. +* `snapshot_owner` - (Required) The IDs of the AWS accounts that can trigger policy by sharing snapshots with your account. The policy only runs if one of the specified AWS accounts shares a snapshot with your account. + +#### Parameters arguments + +* `exclude_boot_volume` - (Optional) Indicates whether to exclude the root volume from snapshots created using CreateSnapshots. The default is `false`. +* `no_reboot` - (Optional) Applies to AMI lifecycle policies only. Indicates whether targeted instances are rebooted when the lifecycle policy runs. `true` indicates that targeted instances are not rebooted when the policy runs. `false` indicates that target instances are rebooted when the policy runs. The default is `true` (instances are not rebooted). + +#### Schedule arguments + +* `copy_tags` - (Optional) Copy all user-defined tags on a source volume to snapshots of the volume created by this policy. +* `create_rule` - (Required) See the [`create_rule`](#create-rule-arguments) block. Max of 1 per schedule. +* `cross_region_copy_rule` (Optional) - See the [`cross_region_copy_rule`](#cross-region-copy-rule-arguments) block. Max of 3 per schedule. +* `name` - (Required) A name for the schedule. +* `deprecate_rule` - (Required) See the [`deprecate_rule`](#deprecate-rule-arguments) block. Max of 1 per schedule. +* `fast_restore_rule` - (Required) See the [`fast_restore_rule`](#fast-restore-rule-arguments) block. Max of 1 per schedule. +* `retain_rule` - (Required) See the [`retain_rule`](#retain-rule-arguments) block. Max of 1 per schedule. +* `share_rule` - (Required) See the [`share_rule`](#share-rule-arguments) block. Max of 1 per schedule. +* `tags_to_add` - (Optional) A map of tag keys and their values. DLM lifecycle policies will already tag the snapshot with the tags on the volume. This configuration adds extra tags on top of these. +* `variable_tags` - (Optional) A map of tag keys and variable values, where the values are determined when the policy is executed. Only `$(instance-id)` or `$(timestamp)` are valid values. Can only be used when `resource_types` is `INSTANCE`. + +#### Create Rule arguments + +* `cron_expression` - (Optional) The schedule, as a Cron expression. The schedule interval must be between 1 hour and 1 year. +* `interval` - (Optional) How often this lifecycle policy should be evaluated. `1`, `2`,`3`,`4`,`6`,`8`,`12` or `24` are valid values. +* `interval_unit` - (Optional) The unit for how often the lifecycle policy should be evaluated. `HOURS` is currently the only allowed value and also the default value. +* `location` - (Optional) Specifies the destination for snapshots created by the policy. To create snapshots in the same Region as the source resource, specify `CLOUD`. To create snapshots on the same Outpost as the source resource, specify `OUTPOST_LOCAL`. If you omit this parameter, `CLOUD` is used by default. If the policy targets resources in an AWS Region, then you must create snapshots in the same Region as the source resource. If the policy targets resources on an Outpost, then you can create snapshots on the same Outpost as the source resource, or in the Region of that Outpost. Valid values are `CLOUD` and `OUTPOST_LOCAL`. +* `times` - (Optional) A list of times in 24 hour clock format that sets when the lifecycle policy should be evaluated. Max of 1. + +#### Deprecate Rule arguments + +* `count` - (Optional) Specifies the number of oldest AMIs to deprecate. Must be an integer between `1` and `1000`. +* `interval` - (Optional) Specifies the period after which to deprecate AMIs created by the schedule. The maximum is 100 years. This is equivalent to 1200 months, 5200 weeks, or 36500 days. +* `interval_unit` - (Optional) The unit of time for time-based retention. Valid values are `DAYS`, `WEEKS`, `MONTHS`, `YEARS`. + +#### Fast Restore Rule arguments + +* `availability_zones` - (Required) The Availability Zones in which to enable fast snapshot restore. +* `count` - (Optional) The number of snapshots to be enabled with fast snapshot restore. Must be an integer between `1` and `1000`. +* `interval` - (Optional) The amount of time to enable fast snapshot restore. The maximum is 100 years. This is equivalent to 1200 months, 5200 weeks, or 36500 days. +* `interval_unit` - (Optional) The unit of time for enabling fast snapshot restore. Valid values are `DAYS`, `WEEKS`, `MONTHS`, `YEARS`. + +#### Retain Rule arguments + +* `count` - (Optional) How many snapshots to keep. Must be an integer between `1` and `1000`. +* `interval` - (Optional) The amount of time to retain each snapshot. The maximum is 100 years. This is equivalent to 1200 months, 5200 weeks, or 36500 days. +* `interval_unit` - (Optional) The unit of time for time-based retention. Valid values are `DAYS`, `WEEKS`, `MONTHS`, `YEARS`. + +#### Share Rule arguments + +* `target_accounts` - (Required) The IDs of the AWS accounts with which to share the snapshots. +* `interval` - (Optional) The period after which snapshots that are shared with other AWS accounts are automatically unshared. +* `interval_unit` - (Optional) The unit of time for the automatic unsharing interval. Valid values are `DAYS`, `WEEKS`, `MONTHS`, `YEARS`. + +#### Cross Region Copy Rule arguments + +* `cmk_arn` - (Optional) The Amazon Resource Name (ARN) of the AWS KMS customer master key (CMK) to use for EBS encryption. If this argument is not specified, the default KMS key for the account is used. +* `copy_tags` - (Optional) Whether to copy all user-defined tags from the source snapshot to the cross-region snapshot copy. +* `deprecate_rule` - (Optional) The AMI deprecation rule for cross-Region AMI copies created by the rule. See the [`deprecate_rule`](#cross-region-copy-rule-deprecate-rule-arguments) block. +* `encrypted` - (Required) To encrypt a copy of an unencrypted snapshot if encryption by default is not enabled, enable encryption using this parameter. Copies of encrypted snapshots are encrypted, even if this parameter is false or if encryption by default is not enabled. +* `retain_rule` - (Required) The retention rule that indicates how long snapshot copies are to be retained in the destination Region. See the [`retain_rule`](#cross-region-copy-rule-retain-rule-arguments) block. Max of 1 per schedule. +* `target` - (Required) The target Region or the Amazon Resource Name (ARN) of the target Outpost for the snapshot copies. + +#### Cross Region Copy Rule Deprecate Rule arguments + +* `interval` - (Required) The period after which to deprecate the cross-Region AMI copies. The period must be less than or equal to the cross-Region AMI copy retention period, and it can't be greater than 10 years. This is equivalent to 120 months, 520 weeks, or 3650 days. +* `interval_unit` - (Required) The unit of time in which to measure the `interval`. Valid values: `DAYS`, `WEEKS`, `MONTHS`, or `YEARS`. + +#### Cross Region Copy Rule Retain Rule arguments + +* `interval` - (Required) The amount of time to retain each snapshot. The maximum is 100 years. This is equivalent to 1200 months, 5200 weeks, or 36500 days. +* `interval_unit` - (Required) The unit of time for time-based retention. Valid values: `DAYS`, `WEEKS`, `MONTHS`, or `YEARS`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the DLM Lifecycle Policy. +* `id` - Identifier of the DLM Lifecycle Policy. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DLM lifecycle policies using their policy ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DLM lifecycle policies using their policy ID. For example: + +```console +% terraform import aws_dlm_lifecycle_policy.example policy-abcdef12345678901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dms_certificate.html.markdown b/website/docs/cdktf/python/r/dms_certificate.html.markdown new file mode 100644 index 00000000000..7f4fa4480cb --- /dev/null +++ b/website/docs/cdktf/python/r/dms_certificate.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_certificate" +description: |- + Provides a DMS (Data Migration Service) certificate resource. +--- + + + +# Resource: aws_dms_certificate + +Provides a DMS (Data Migration Service) certificate resource. DMS certificates can be created, deleted, and imported. + +~> **Note:** All arguments including the PEM encoded certificate will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dms_certificate import DmsCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DmsCertificate(self, "test", + certificate_id="test-dms-certificate-tf", + certificate_pem="...", + tags={ + "Name": "test" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate_id` - (Required) The certificate identifier. + + - Must contain from 1 to 255 alphanumeric characters and hyphens. + +* `certificate_pem` - (Optional) The contents of the .pem X.509 certificate file for the certificate. Either `certificate_pem` or `certificate_wallet` must be set. +* `certificate_wallet` - (Optional) The contents of the Oracle Wallet certificate for use with SSL, provided as a base64-encoded String. Either `certificate_pem` or `certificate_wallet` must be set. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `certificate_arn` - The Amazon Resource Name (ARN) for the certificate. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import certificates using the `certificate_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import certificates using the `certificate_id`. For example: + +```console +% terraform import aws_dms_certificate.test test-dms-certificate-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dms_endpoint.html.markdown b/website/docs/cdktf/python/r/dms_endpoint.html.markdown new file mode 100644 index 00000000000..302bced7441 --- /dev/null +++ b/website/docs/cdktf/python/r/dms_endpoint.html.markdown @@ -0,0 +1,238 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_endpoint" +description: |- + Provides a DMS (Data Migration Service) endpoint resource. +--- + + + +# Resource: aws_dms_endpoint + +Provides a DMS (Data Migration Service) endpoint resource. DMS endpoints can be created, updated, deleted, and imported. + +~> **Note:** All arguments including the password will be stored in the raw state as plain-text. [Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **Note:** The `s3_settings` argument is deprecated, may not be maintained, and will be removed in a future version. Use the [`aws_dms_s3_endpoint`](/docs/providers/aws/r/dms_s3_endpoint.html) resource instead. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dms_endpoint import DmsEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DmsEndpoint(self, "test", + certificate_arn="arn:aws:acm:us-east-1:123456789012:certificate/12345678-1234-1234-1234-123456789012", + database_name="test", + endpoint_id="test-dms-endpoint-tf", + endpoint_type="source", + engine_name="aurora", + extra_connection_attributes="", + kms_key_arn="arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012", + password="test", + port=3306, + server_name="test", + ssl_mode="none", + tags={ + "Name": "test" + }, + username="test" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `endpoint_id` - (Required) Database endpoint identifier. Identifiers must contain from 1 to 255 alphanumeric characters or hyphens, begin with a letter, contain only ASCII letters, digits, and hyphens, not end with a hyphen, and not contain two consecutive hyphens. +* `endpoint_type` - (Required) Type of endpoint. Valid values are `source`, `target`. +* `engine_name` - (Required) Type of engine for the endpoint. Valid values are `aurora`, `aurora-postgresql`, `azuredb`, `azure-sql-managed-instance`, `db2`, `db2-zos`, `docdb`, `dynamodb`, `elasticsearch`, `kafka`, `kinesis`, `mariadb`, `mongodb`, `mysql`, `opensearch`, `oracle`, `postgres`, `redshift`, `s3`, `sqlserver`, `sybase`. Please note that some of engine names are available only for `target` endpoint type (e.g. `redshift`). +* `kms_key_arn` - (Required when `engine_name` is `mongodb`, cannot be set when `engine_name` is `s3`, optional otherwise) ARN for the KMS key that will be used to encrypt the connection parameters. If you do not specify a value for `kms_key_arn`, then AWS DMS will use your default encryption key. AWS KMS creates the default encryption key for your AWS account. Your AWS account has a different default encryption key for each AWS region. To encrypt an S3 target with a KMS Key, use the parameter `s3_settings.server_side_encryption_kms_key_id`. When `engine_name` is `redshift`, `kms_key_arn` is the KMS Key for the Redshift target and the parameter `redshift_settings.server_side_encryption_kms_key_id` encrypts the S3 intermediate storage. + +The following arguments are optional: + +* `certificate_arn` - (Optional, Default: empty string) ARN for the certificate. +* `database_name` - (Optional) Name of the endpoint database. +* `elasticsearch_settings` - (Optional) Configuration block for OpenSearch settings. See below. +* `extra_connection_attributes` - (Optional) Additional attributes associated with the connection. For available attributes for a `source` Endpoint, see [Sources for data migration](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Source.html). For available attributes for a `target` Endpoint, see [Targets for data migration](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.html). +* `kafka_settings` - (Optional) Configuration block for Kafka settings. See below. +* `kinesis_settings` - (Optional) Configuration block for Kinesis settings. See below. +* `mongodb_settings` - (Optional) Configuration block for MongoDB settings. See below. +* `password` - (Optional) Password to be used to login to the endpoint database. +* `port` - (Optional) Port used by the endpoint database. +* `redshift_settings` - (Optional) Configuration block for Redshift settings. See below. +* `s3_settings` - (Optional) (**Deprecated**, use the [`aws_dms_s3_endpoint`](/docs/providers/aws/r/dms_s3_endpoint.html) resource instead) Configuration block for S3 settings. See below. +* `secrets_manager_access_role_arn` - (Optional) ARN of the IAM role that specifies AWS DMS as the trusted entity and has the required permissions to access the value in SecretsManagerSecret. +* `secrets_manager_arn` - (Optional) Full ARN, partial ARN, or friendly name of the SecretsManagerSecret that contains the endpoint connection details. Supported only when `engine_name` is `aurora`, `aurora-postgresql`, `mariadb`, `mongodb`, `mysql`, `oracle`, `postgres`, `redshift`, or `sqlserver`. +* `server_name` - (Optional) Host name of the server. +* `service_access_role` - (Optional) ARN used by the service access IAM role for dynamodb endpoints. +* `ssl_mode` - (Optional, Default: `none`) SSL mode to use for the connection. Valid values are `none`, `require`, `verify-ca`, `verify-full` +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `username` - (Optional) User name to be used to login to the endpoint database. + +### elasticsearch_settings + +-> Additional information can be found in the [Using Amazon OpenSearch Service as a Target for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Elasticsearch.html). + +* `endpoint_uri` - (Required) Endpoint for the OpenSearch cluster. +* `error_retry_duration` - (Optional) Maximum number of seconds for which DMS retries failed API requests to the OpenSearch cluster. Default is `300`. +* `full_load_error_percentage` - (Optional) Maximum percentage of records that can fail to be written before a full load operation stops. Default is `10`. +* `service_access_role_arn` - (Required) ARN of the IAM Role with permissions to write to the OpenSearch cluster. + +### kafka_settings + +-> Additional information can be found in the [Using Apache Kafka as a Target for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Kafka.html). + +* `broker` - (Required) Kafka broker location. Specify in the form broker-hostname-or-ip:port. +* `include_control_details` - (Optional) Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. Default is `false`. +* `include_null_and_empty` - (Optional) Include NULL and empty columns for records migrated to the endpoint. Default is `false`. +* `include_partition_value` - (Optional) Shows the partition value within the Kafka message output unless the partition type is `schema-table-type`. Default is `false`. +* `include_table_alter_operations` - (Optional) Includes any data definition language (DDL) operations that change the table in the control data, such as `rename-table`, `drop-table`, `add-column`, `drop-column`, and `rename-column`. Default is `false`. +* `include_transaction_details` - (Optional) Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for `transaction_id`, previous `transaction_id`, and `transaction_record_id` (the record offset within a transaction). Default is `false`. +* `message_format` - (Optional) Output format for the records created on the endpoint. Message format is `JSON` (default) or `JSON_UNFORMATTED` (a single line with no tab). +* `message_max_bytes` - (Optional) Maximum size in bytes for records created on the endpoint Default is `1,000,000`. +* `no_hex_prefix` - (Optional) Set this optional parameter to true to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, AWS DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the `no_hex_prefix` endpoint setting to enable migration of RAW data type columns without adding the `'0x'` prefix. +* `partition_include_schema_table` - (Optional) Prefixes schema and table names to partition values, when the partition type is `primary-key-type`. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. Default is `false`. +* `sasl_password` - (Optional) Secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication. +* `sasl_username` - (Optional) Secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication. +* `security_protocol` - (Optional) Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include `ssl-encryption`, `ssl-authentication`, and `sasl-ssl`. `sasl-ssl` requires `sasl_username` and `sasl_password`. +* `ssl_ca_certificate_arn` - (Optional) ARN for the private certificate authority (CA) cert that AWS DMS uses to securely connect to your Kafka target endpoint. +* `ssl_client_certificate_arn` - (Optional) ARN of the client certificate used to securely connect to a Kafka target endpoint. +* `ssl_client_key_arn` - (Optional) ARN for the client private key used to securely connect to a Kafka target endpoint. +* `ssl_client_key_password` - (Optional) Password for the client private key used to securely connect to a Kafka target endpoint. +* `topic` - (Optional) Kafka topic for migration. Default is `kafka-default-topic`. + +### kinesis_settings + +-> Additional information can be found in the [Using Amazon Kinesis Data Streams as a Target for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Kinesis.html). + +* `include_control_details` - (Optional) Shows detailed control information for table definition, column definition, and table and column changes in the Kinesis message output. Default is `false`. +* `include_null_and_empty` - (Optional) Include NULL and empty columns in the target. Default is `false`. +* `include_partition_value` - (Optional) Shows the partition value within the Kinesis message output, unless the partition type is schema-table-type. Default is `false`. +* `include_table_alter_operations` - (Optional) Includes any data definition language (DDL) operations that change the table in the control data. Default is `false`. +* `include_transaction_details` - (Optional) Provides detailed transaction information from the source database. Default is `false`. +* `message_format` - (Optional) Output format for the records created. Default is `json`. Valid values are `json` and `json-unformatted` (a single line with no tab). +* `partition_include_schema_table` - (Optional) Prefixes schema and table names to partition values, when the partition type is primary-key-type. Default is `false`. +* `service_access_role_arn` - (Optional) ARN of the IAM Role with permissions to write to the Kinesis data stream. +* `stream_arn` - (Optional) ARN of the Kinesis data stream. + +### mongodb_settings + +-> Additional information can be found in the [Using MongoDB as a Source for AWS DMS documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Source.MongoDB.html). + +* `auth_mechanism` - (Optional) Authentication mechanism to access the MongoDB source endpoint. Default is `default`. +* `auth_source` - (Optional) Authentication database name. Not used when `auth_type` is `no`. Default is `admin`. +* `auth_type` - (Optional) Authentication type to access the MongoDB source endpoint. Default is `password`. +* `docs_to_investigate` - (Optional) Number of documents to preview to determine the document organization. Use this setting when `nesting_level` is set to `one`. Default is `1000`. +* `extract_doc_id` - (Optional) Document ID. Use this setting when `nesting_level` is set to `none`. Default is `false`. +* `nesting_level` - (Optional) Specifies either document or table mode. Default is `none`. Valid values are `one` (table mode) and `none` (document mode). + +### redis_settings + +-> Additional information can be found in the [Using Redis as a target for AWS Database Migration Service](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Redis.html). + +* `auth_password` - (Optional) The password provided with the auth-role and auth-token options of the AuthType setting for a Redis target endpoint. +* `auth_type` - (Required) The type of authentication to perform when connecting to a Redis target. Options include `none`, `auth-token`, and `auth-role`. The `auth-token` option requires an `auth_password` value to be provided. The `auth-role` option requires `auth_user_name` and `auth_password` values to be provided. +* `auth_user_name` - (Optional) The username provided with the `auth-role` option of the AuthType setting for a Redis target endpoint. +* `server_name` - (Required) Fully qualified domain name of the endpoint. +* `port` - (Required) Transmission Control Protocol (TCP) port for the endpoint. +* `ssl_ca_certificate_arn` - (Optional) The Amazon Resource Name (ARN) for the certificate authority (CA) that DMS uses to connect to your Redis target endpoint. +* `ssl_security_protocol`- (Optional) The plaintext option doesn't provide Transport Layer Security (TLS) encryption for traffic between endpoint and database. Options include `plaintext`, `ssl-encryption`. The default is `ssl-encryption`. + +### redshift_settings + +-> Additional information can be found in the [Using Amazon Redshift as a Target for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Redshift.html). + +* `bucket_folder` - (Optional) Custom S3 Bucket Object prefix for intermediate storage. +* `bucket_name` - (Optional) Custom S3 Bucket name for intermediate storage. +* `encryption_mode` - (Optional) The server-side encryption mode that you want to encrypt your intermediate .csv object files copied to S3. Defaults to `SSE_S3`. Valid values are `SSE_S3` and `SSE_KMS`. +* `server_side_encryption_kms_key_id` - (Required when `encryption_mode` is `SSE_KMS`, must not be set otherwise) ARN or Id of KMS Key to use when `encryption_mode` is `SSE_KMS`. +* `service_access_role_arn` - (Optional) Amazon Resource Name (ARN) of the IAM Role with permissions to read from or write to the S3 Bucket for intermediate storage. + +### s3_settings + +~> **Deprecated:** This argument is deprecated, may not be maintained, and will be removed in a future version. Use the [`aws_dms_s3_endpoint`](/docs/providers/aws/r/dms_s3_endpoint.html) resource instead. + +-> Additional information can be found in the [Using Amazon S3 as a Source for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Source.S3.html) and [Using Amazon S3 as a Target for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html). + +* `add_column_name` - (Optional) Whether to add column name information to the .csv output file. Default is `false`. +* `bucket_folder` - (Optional) S3 object prefix. +* `bucket_name` - (Optional) S3 bucket name. +* `canned_acl_for_objects` - (Optional) Predefined (canned) access control list for objects created in an S3 bucket. Valid values include `none`, `private`, `public-read`, `public-read-write`, `authenticated-read`, `aws-exec-read`, `bucket-owner-read`, and `bucket-owner-full-control`. Default is `none`. +* `cdc_inserts_and_updates` - (Optional) Whether to write insert and update operations to .csv or .parquet output files. Default is `false`. +* `cdc_inserts_only` - (Optional) Whether to write insert operations to .csv or .parquet output files. Default is `false`. +* `cdc_max_batch_interval` - (Optional) Maximum length of the interval, defined in seconds, after which to output a file to Amazon S3. Default is `60`. +* `cdc_min_file_size` - (Optional) Minimum file size condition as defined in kilobytes to output a file to Amazon S3. Default is `32000`. **NOTE:** Previously, this setting was measured in megabytes but now represents kilobytes. Update configurations accordingly. +* `cdc_path` - (Optional) Folder path of CDC files. For an S3 source, this setting is required if a task captures change data; otherwise, it's optional. If `cdc_path` is set, AWS DMS reads CDC files from this path and replicates the data changes to the target endpoint. Supported in AWS DMS versions 3.4.2 and later. +* `compression_type` - (Optional) Set to compress target files. Default is `NONE`. Valid values are `GZIP` and `NONE`. +* `csv_delimiter` - (Optional) Delimiter used to separate columns in the source files. Default is `,`. +* `csv_no_sup_value` - (Optional) String to use for all columns not included in the supplemental log. +* `csv_null_value` - (Optional) String to as null when writing to the target. +* `csv_row_delimiter` - (Optional) Delimiter used to separate rows in the source files. Default is `\n`. +* `data_format` - (Optional) Output format for the files that AWS DMS uses to create S3 objects. Valid values are `csv` and `parquet`. Default is `csv`. +* `data_page_size` - (Optional) Size of one data page in bytes. Default is `1048576` (1 MiB). +* `date_partition_delimiter` - (Optional) Date separating delimiter to use during folder partitioning. Valid values are `SLASH`, `UNDERSCORE`, `DASH`, and `NONE`. Default is `SLASH`. +* `date_partition_enabled` - (Optional) Partition S3 bucket folders based on transaction commit dates. Default is `false`. +* `date_partition_sequence` - (Optional) Date format to use during folder partitioning. Use this parameter when `date_partition_enabled` is set to true. Valid values are `YYYYMMDD`, `YYYYMMDDHH`, `YYYYMM`, `MMYYYYDD`, and `DDMMYYYY`. Default is `YYYYMMDD`. +* `dict_page_size_limit` - (Optional) Maximum size in bytes of an encoded dictionary page of a column. Default is `1048576` (1 MiB). +* `enable_statistics` - (Optional) Whether to enable statistics for Parquet pages and row groups. Default is `true`. +* `encoding_type` - (Optional) Type of encoding to use. Value values are `rle_dictionary`, `plain`, and `plain_dictionary`. Default is `rle_dictionary`. +* `encryption_mode` - (Optional) Server-side encryption mode that you want to encrypt your .csv or .parquet object files copied to S3. Valid values are `SSE_S3` and `SSE_KMS`. Default is `SSE_S3`. +* `external_table_definition` - (Optional) JSON document that describes how AWS DMS should interpret the data. +* `ignore_header_rows` - (Optional) When this value is set to `1`, DMS ignores the first row header in a .csv file. Default is `0`. +* `include_op_for_full_load` - (Optional) Whether to enable a full load to write INSERT operations to the .csv output files only to indicate how the rows were added to the source database. Default is `false`. +* `max_file_size` - (Optional) Maximum size (in KB) of any .csv file to be created while migrating to an S3 target during full load. Valid values are from `1` to `1048576`. Default is `1048576` (1 GB). +* `parquet_timestamp_in_millisecond` - (Optional) - Specifies the precision of any TIMESTAMP column values written to an S3 object file in .parquet format. Default is `false`. +* `parquet_version` - (Optional) Version of the .parquet file format. Default is `parquet-1-0`. Valid values are `parquet-1-0` and `parquet-2-0`. +* `preserve_transactions` - (Optional) Whether DMS saves the transaction order for a CDC load on the S3 target specified by `cdc_path`. Default is `false`. +* `rfc_4180` - (Optional) For an S3 source, whether each leading double quotation mark has to be followed by an ending double quotation mark. Default is `true`. +* `row_group_length` - (Optional) Number of rows in a row group. Default is `10000`. +* `server_side_encryption_kms_key_id` - (Required when `encryption_mode` is `SSE_KMS`, must not be set otherwise) ARN or Id of KMS Key to use when `encryption_mode` is `SSE_KMS`. +* `service_access_role_arn` - (Optional) ARN of the IAM Role with permissions to read from or write to the S3 Bucket. +* `timestamp_column_name` - (Optional) Column to add with timestamp information to the endpoint data for an Amazon S3 target. +* `use_csv_no_sup_value` - (Optional) Whether to use `csv_no_sup_value` for columns not included in the supplemental log. +* `use_task_start_time_for_full_load_timestamp` - (Optional) When set to true, uses the task start time as the timestamp column value instead of the time data is written to target. For full load, when set to true, each row of the timestamp column contains the task start time. For CDC loads, each row of the timestamp column contains the transaction commit time. When set to false, the full load timestamp in the timestamp column increments with the time data arrives at the target. Default is `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `endpoint_arn` - ARN for the endpoint. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import endpoints using the `endpoint_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import endpoints using the `endpoint_id`. For example: + +```console +% terraform import aws_dms_endpoint.test test-dms-endpoint-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dms_event_subscription.html.markdown b/website/docs/cdktf/python/r/dms_event_subscription.html.markdown new file mode 100644 index 00000000000..488163dcb36 --- /dev/null +++ b/website/docs/cdktf/python/r/dms_event_subscription.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_event_subscription" +description: |- + Provides a DMS (Data Migration Service) event subscription resource. +--- + + + +# Resource: aws_dms_event_subscription + +Provides a DMS (Data Migration Service) event subscription resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dms_event_subscription import DmsEventSubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DmsEventSubscription(self, "example", + enabled=True, + event_categories=["creation", "failure"], + name="my-favorite-event-subscription", + sns_topic_arn=Token.as_string(aws_sns_topic_example.arn), + source_ids=[ + Token.as_string(aws_dms_replication_task_example.replication_task_id) + ], + source_type="replication-task", + tags={ + "Name": "example" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of event subscription. +* `enabled` - (Optional, Default: true) Whether the event subscription should be enabled. +* `event_categories` - (Optional) List of event categories to listen for, see `DescribeEventCategories` for a canonical list. +* `source_type` - (Optional, Default: all events) Type of source for events. Valid values: `replication-instance` or `replication-task` +* `source_ids` - (Required) Ids of sources to listen to. +* `sns_topic_arn` - (Required) SNS topic arn to send events on. +* `tags` - (Optional) Map of resource tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the DMS Event Subscription. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import event subscriptions using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import event subscriptions using the `name`. For example: + +```console +% terraform import aws_dms_event_subscription.test my-awesome-event-subscription +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dms_replication_instance.html.markdown b/website/docs/cdktf/python/r/dms_replication_instance.html.markdown new file mode 100644 index 00000000000..b48243a1792 --- /dev/null +++ b/website/docs/cdktf/python/r/dms_replication_instance.html.markdown @@ -0,0 +1,161 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_instance" +description: |- + Provides a DMS (Data Migration Service) replication instance resource. +--- + + + +# Resource: aws_dms_replication_instance + +Provides a DMS (Data Migration Service) replication instance resource. DMS replication instances can be created, updated, deleted, and imported. + +## Example Usage + +Create required roles and then create a DMS instance, setting the depends_on to the required role policy attachments. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.dms_replication_instance import DmsReplicationInstance +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + dms_assume_role = DataAwsIamPolicyDocument(self, "dms_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["dms.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + dms_access_for_endpoint = IamRole(self, "dms-access-for-endpoint", + assume_role_policy=Token.as_string(dms_assume_role.json), + name="dms-access-for-endpoint" + ) + dms_cloudwatch_logs_role = IamRole(self, "dms-cloudwatch-logs-role", + assume_role_policy=Token.as_string(dms_assume_role.json), + name="dms-cloudwatch-logs-role" + ) + dms_vpc_role = IamRole(self, "dms-vpc-role", + assume_role_policy=Token.as_string(dms_assume_role.json), + name="dms-vpc-role" + ) + dms_access_for_endpoint_amazon_dms_redshift_s3_role = + IamRolePolicyAttachment(self, "dms-access-for-endpoint-AmazonDMSRedshiftS3Role", + policy_arn="arn:aws:iam::aws:policy/service-role/AmazonDMSRedshiftS3Role", + role=dms_access_for_endpoint.name + ) + dms_cloudwatch_logs_role_amazon_dms_cloud_watch_logs_role = + IamRolePolicyAttachment(self, "dms-cloudwatch-logs-role-AmazonDMSCloudWatchLogsRole", + policy_arn="arn:aws:iam::aws:policy/service-role/AmazonDMSCloudWatchLogsRole", + role=dms_cloudwatch_logs_role.name + ) + dms_vpc_role_amazon_dmsvpc_management_role = IamRolePolicyAttachment(self, "dms-vpc-role-AmazonDMSVPCManagementRole", + policy_arn="arn:aws:iam::aws:policy/service-role/AmazonDMSVPCManagementRole", + role=dms_vpc_role.name + ) + DmsReplicationInstance(self, "test", + allocated_storage=20, + apply_immediately=True, + auto_minor_version_upgrade=True, + availability_zone="us-west-2c", + depends_on=[dms_access_for_endpoint_amazon_dms_redshift_s3_role, dms_cloudwatch_logs_role_amazon_dms_cloud_watch_logs_role, dms_vpc_role_amazon_dmsvpc_management_role + ], + engine_version="3.1.4", + kms_key_arn="arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012", + multi_az=False, + preferred_maintenance_window="sun:10:30-sun:14:30", + publicly_accessible=True, + replication_instance_class="dms.t2.micro", + replication_instance_id="test-dms-replication-instance-tf", + replication_subnet_group_id=test_dms_replication_subnet_group_tf.id, + tags={ + "Name": "test" + }, + vpc_security_group_ids=["sg-12345678"] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `allocated_storage` - (Optional, Default: 50, Min: 5, Max: 6144) The amount of storage (in gigabytes) to be initially allocated for the replication instance. +* `allow_major_version_upgrade` - (Optional, Default: false) Indicates that major version upgrades are allowed. +* `apply_immediately` - (Optional, Default: false) Indicates whether the changes should be applied immediately or during the next maintenance window. Only used when updating an existing resource. +* `auto_minor_version_upgrade` - (Optional, Default: false) Indicates that minor engine upgrades will be applied automatically to the replication instance during the maintenance window. +* `availability_zone` - (Optional) The EC2 Availability Zone that the replication instance will be created in. +* `engine_version` - (Optional) The engine version number of the replication instance. +* `kms_key_arn` - (Optional) The Amazon Resource Name (ARN) for the KMS key that will be used to encrypt the connection parameters. If you do not specify a value for `kms_key_arn`, then AWS DMS will use your default encryption key. AWS KMS creates the default encryption key for your AWS account. Your AWS account has a different default encryption key for each AWS region. +* `multi_az` - (Optional) Specifies if the replication instance is a multi-az deployment. You cannot set the `availability_zone` parameter if the `multi_az` parameter is set to `true`. +* `preferred_maintenance_window` - (Optional) The weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). + + - Default: A 30-minute window selected at random from an 8-hour block of time per region, occurring on a random day of the week. + - Format: `ddd:hh24:mi-ddd:hh24:mi` + - Valid Days: `mon, tue, wed, thu, fri, sat, sun` + - Constraints: Minimum 30-minute window. + +* `publicly_accessible` - (Optional, Default: false) Specifies the accessibility options for the replication instance. A value of true represents an instance with a public IP address. A value of false represents an instance with a private IP address. +* `replication_instance_class` - (Required) The compute and memory capacity of the replication instance as specified by the replication instance class. See [AWS DMS User Guide](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_ReplicationInstance.Types.html) for available instance sizes and advice on which one to choose. +* `replication_instance_id` - (Required) The replication instance identifier. This parameter is stored as a lowercase string. + + - Must contain from 1 to 63 alphanumeric characters or hyphens. + - First character must be a letter. + - Cannot end with a hyphen + - Cannot contain two consecutive hyphens. + +* `replication_subnet_group_id` - (Optional) A subnet group to associate with the replication instance. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_security_group_ids` - (Optional) A list of VPC security group IDs to be used with the replication instance. The VPC security groups must work with the VPC containing the replication instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `replication_instance_arn` - The Amazon Resource Name (ARN) of the replication instance. +* `replication_instance_private_ips` - A list of the private IP addresses of the replication instance. +* `replication_instance_public_ips` - A list of the public IP addresses of the replication instance. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40m`) +- `update` - (Default `30m`) +- `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import replication instances using the `replication_instance_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import replication instances using the `replication_instance_id`. For example: + +```console +% terraform import aws_dms_replication_instance.test test-dms-replication-instance-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dms_replication_subnet_group.html.markdown b/website/docs/cdktf/python/r/dms_replication_subnet_group.html.markdown new file mode 100644 index 00000000000..f9937e669f8 --- /dev/null +++ b/website/docs/cdktf/python/r/dms_replication_subnet_group.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_subnet_group" +description: |- + Provides a DMS (Data Migration Service) subnet group resource. +--- + + + +# Resource: aws_dms_replication_subnet_group + +Provides a DMS (Data Migration Service) replication subnet group resource. DMS replication subnet groups can be created, updated, deleted, and imported. + +~> **Note:** AWS requires a special IAM role called `dms-vpc-role` when using this resource. See the example below to create it as part of your configuration. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dms_replication_subnet_group import DmsReplicationSubnetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DmsReplicationSubnetGroup(self, "example", + replication_subnet_group_description="Example replication subnet group", + replication_subnet_group_id="example-dms-replication-subnet-group-tf", + subnet_ids=["subnet-12345678", "subnet-12345679"], + tags={ + "Name": "example" + } + ) +``` + +### Creating special IAM role + +If your account does not already include the `dms-vpc-role` IAM role, you will need to create it to allow DMS to manage subnets in the VPC. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dms_replication_subnet_group import DmsReplicationSubnetGroup +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + dms_vpc_role = IamRole(self, "dms-vpc-role", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "dms.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + })), + description="Allows DMS to manage VPC", + name="dms-vpc-role" + ) + example = IamRolePolicyAttachment(self, "example", + policy_arn="arn:aws:iam::aws:policy/service-role/AmazonDMSVPCManagementRole", + role=dms_vpc_role.name + ) + aws_dms_replication_subnet_group_example = DmsReplicationSubnetGroup(self, "example_2", + depends_on=[example], + replication_subnet_group_description="Example", + replication_subnet_group_id="example-id", + subnet_ids=["subnet-12345678", "subnet-12345679"], + tags={ + "Name": "example-id" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dms_replication_subnet_group_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `replication_subnet_group_description` - (Required) Description for the subnet group. +* `replication_subnet_group_id` - (Required) Name for the replication subnet group. This value is stored as a lowercase string. It must contain no more than 255 alphanumeric characters, periods, spaces, underscores, or hyphens and cannot be `default`. +* `subnet_ids` - (Required) List of at least 2 EC2 subnet IDs for the subnet group. The subnets must cover at least 2 availability zones. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - The ID of the VPC the subnet group is in. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `15m`) +- `update` - (Default `15m`) +- `delete` - (Default `15m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import replication subnet groups using the `replication_subnet_group_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import replication subnet groups using the `replication_subnet_group_id`. For example: + +```console +% terraform import aws_dms_replication_subnet_group.test test-dms-replication-subnet-group-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dms_replication_task.html.markdown b/website/docs/cdktf/python/r/dms_replication_task.html.markdown new file mode 100644 index 00000000000..b297dd7cbd1 --- /dev/null +++ b/website/docs/cdktf/python/r/dms_replication_task.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_task" +description: |- + Provides a DMS (Data Migration Service) replication task resource. +--- + + + +# Resource: aws_dms_replication_task + +Provides a DMS (Data Migration Service) replication task resource. DMS replication tasks can be created, updated, deleted, and imported. + +~> **NOTE:** Changing most arguments will stop the task if it is running. You can set `start_replication_task` to resume the task afterwards. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dms_replication_task import DmsReplicationTask +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DmsReplicationTask(self, "test", + cdc_start_time=Token.as_string(1484346880), + migration_type="full-load", + replication_instance_arn=test_dms_replication_instance_tf.replication_instance_arn, + replication_task_id="test-dms-replication-task-tf", + replication_task_settings="...", + source_endpoint_arn=test_dms_source_endpoint_tf.endpoint_arn, + table_mappings="{\\\"rules\\\":[{\\\"rule-type\\\":\\\"selection\\\",\\\"rule-id\\\":\\\"1\\\",\\\"rule-name\\\":\\\"1\\\",\\\"object-locator\\\":{\\\"schema-name\\\":\\\"%\\\",\\\"table-name\\\":\\\"%\\\"},\\\"rule-action\\\":\\\"include\\\"}]}", + tags={ + "Name": "test" + }, + target_endpoint_arn=test_dms_target_endpoint_tf.endpoint_arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cdc_start_position` - (Optional, Conflicts with `cdc_start_time`) Indicates when you want a change data capture (CDC) operation to start. The value can be in date, checkpoint, or LSN/SCN format depending on the source engine. For more information, see [Determining a CDC native start point](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Task.CDC.html#CHAP_Task.CDC.StartPoint.Native). +* `cdc_start_time` - (Optional, Conflicts with `cdc_start_position`) The Unix timestamp integer for the start of the Change Data Capture (CDC) operation. +* `migration_type` - (Required) The migration type. Can be one of `full-load | cdc | full-load-and-cdc`. +* `replication_instance_arn` - (Required) The Amazon Resource Name (ARN) of the replication instance. +* `replication_task_id` - (Required) The replication task identifier. + + - Must contain from 1 to 255 alphanumeric characters or hyphens. + - First character must be a letter. + - Cannot end with a hyphen. + - Cannot contain two consecutive hyphens. + +* `replication_task_settings` - (Optional) An escaped JSON string that contains the task settings. For a complete list of task settings, see [Task Settings for AWS Database Migration Service Tasks](http://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.CustomizingTasks.TaskSettings.html). +* `source_endpoint_arn` - (Required) The Amazon Resource Name (ARN) string that uniquely identifies the source endpoint. +* `start_replication_task` - (Optional) Whether to run or stop the replication task. +* `table_mappings` - (Required) An escaped JSON string that contains the table mappings. For information on table mapping see [Using Table Mapping with an AWS Database Migration Service Task to Select and Filter Data](http://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.CustomizingTasks.TableMapping.html) +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `target_endpoint_arn` - (Required) The Amazon Resource Name (ARN) string that uniquely identifies the target endpoint. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `replication_task_arn` - The Amazon Resource Name (ARN) for the replication task. +* `status` - Replication Task status. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import replication tasks using the `replication_task_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import replication tasks using the `replication_task_id`. For example: + +```console +% terraform import aws_dms_replication_task.test test-dms-replication-task-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dms_s3_endpoint.html.markdown b/website/docs/cdktf/python/r/dms_s3_endpoint.html.markdown new file mode 100644 index 00000000000..ab48e88911c --- /dev/null +++ b/website/docs/cdktf/python/r/dms_s3_endpoint.html.markdown @@ -0,0 +1,204 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_s3_endpoint" +description: |- + Provides a DMS (Data Migration Service) S3 endpoint resource. +--- + + + +# Resource: aws_dms_s3_endpoint + +Provides a DMS (Data Migration Service) S3 endpoint resource. DMS S3 endpoints can be created, updated, deleted, and imported. + +~> **Note:** AWS is deprecating `extra_connection_attributes`, such as used with `aws_dms_endpoint`. This resource is an alternative to `aws_dms_endpoint` and does not use `extra_connection_attributes`. (AWS currently includes `extra_connection_attributes` in the raw responses to the AWS Provider requests and so they may be visible in Terraform logs.) + +~> **Note:** Some of this resource's arguments have default values that come from the AWS Provider. Other default values are provided by AWS and subject to change without notice. When relying on AWS defaults, the Terraform state will often have a zero value. For example, the AWS Provider does not provide a default for `cdc_max_batch_interval` but the AWS default is `60` (seconds). However, the Terraform state will show `0` since this is the value return by AWS when no value is present. Below, we aim to flag the defaults that come from AWS (_e.g._, "AWS default..."). + +## Example Usage + +### Minimal Configuration + +This is the minimal configuration for an `aws_dms_s3_endpoint`. This endpoint will rely on the AWS Provider and AWS defaults. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dms_s3_endpoint import DmsS3Endpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DmsS3Endpoint(self, "example", + bucket_name="beckut_name", + depends_on=[aws_iam_role_policy_example], + endpoint_id="donnedtipi", + endpoint_type="target", + service_access_role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### Complete Configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dms_s3_endpoint import DmsS3Endpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DmsS3Endpoint(self, "example", + add_column_name=True, + add_trailing_padding_character=False, + bucket_folder="folder", + bucket_name="bucket_name", + canned_acl_for_objects="private", + cdc_inserts_and_updates=True, + cdc_inserts_only=False, + cdc_max_batch_interval=100, + cdc_min_file_size=16, + cdc_path="cdc/path", + compression_type="GZIP", + csv_delimiter=";", + csv_no_sup_value="x", + csv_null_value="?", + csv_row_delimiter="\\r\\n", + data_format="parquet", + data_page_size=1100000, + date_partition_delimiter="UNDERSCORE", + date_partition_enabled=True, + date_partition_sequence="yyyymmddhh", + date_partition_timezone="Asia/Seoul", + depends_on=[aws_iam_role_policy_example], + dict_page_size_limit=1000000, + enable_statistics=False, + encoding_type="plain", + encryption_mode="SSE_S3", + endpoint_id="donnedtipi", + endpoint_type="target", + expected_bucket_owner=Token.as_string(current.account_id), + external_table_definition="etd", + ignore_header_rows=1, + include_op_for_full_load=True, + max_file_size=1000000, + parquet_timestamp_in_millisecond=True, + parquet_version="parquet-2-0", + preserve_transactions=False, + rfc4180=False, + row_group_length=11000, + server_side_encryption_kms_key_id=Token.as_string(aws_kms_key_example.arn), + service_access_role_arn=Token.as_string(aws_iam_role_example.arn), + ssl_mode="none", + tags={ + "Name": "donnedtipi", + "Remove": "to-remove", + "Update": "to-update" + }, + timestamp_column_name="tx_commit_time", + use_csv_no_sup_value=False, + use_task_start_time_for_full_load_timestamp=True + ) +``` + +## Argument Reference + +The following arguments are required: + +* `bucket_name` - (Required) S3 bucket name. +* `cdc_path` - (Required for CDC; otherwise, Optional) Folder path of CDC files. If `cdc_path` is set, AWS DMS reads CDC files from this path and replicates the data changes to the target endpoint. Supported in AWS DMS versions 3.4.2 and later. +* `endpoint_id` - (Required) Database endpoint identifier. Identifiers must contain from 1 to 255 alphanumeric characters or hyphens, begin with a letter, contain only ASCII letters, digits, and hyphens, not end with a hyphen, and not contain two consecutive hyphens. +* `endpoint_type` - (Required) Type of endpoint. Valid values are `source`, `target`. +* `external_table_definition` - (Required for `source` endpoints; otherwise, Optional) JSON document that describes how AWS DMS should interpret the data. +* `service_access_role_arn` - (Required) ARN of the IAM role with permissions to the S3 Bucket. + +The following arguments are optional: + +* `add_column_name` - (Optional) Whether to add column name information to the .csv output file. Default is `false`. +* `add_trailing_padding_character` - (Optional) Whether to add padding. Default is `false`. (Ignored for source endpoints.) +* `bucket_folder` - (Optional) S3 object prefix. +* `canned_acl_for_objects` - (Optional) Predefined (canned) access control list for objects created in an S3 bucket. Valid values include `none`, `private`, `public-read`, `public-read-write`, `authenticated-read`, `aws-exec-read`, `bucket-owner-read`, and `bucket-owner-full-control`. Default is `none`. +* `cdc_inserts_and_updates` - (Optional) Whether to write insert and update operations to .csv or .parquet output files. Default is `false`. +* `cdc_inserts_only` - (Optional) Whether to write insert operations to .csv or .parquet output files. Default is `false`. +* `cdc_max_batch_interval` - (Optional) Maximum length of the interval, defined in seconds, after which to output a file to Amazon S3. (AWS default is `60`.) +* `cdc_min_file_size` - (Optional) Minimum file size condition as defined in kilobytes to output a file to Amazon S3. (AWS default is 32000 KB.) +* `certificate_arn` - (Optional, Default: empty string) ARN for the certificate. +* `compression_type` - (Optional) Set to compress target files. Valid values are `GZIP` and `NONE`. Default is `NONE`. (Ignored for source endpoints.) +* `csv_delimiter` - (Optional) Delimiter used to separate columns in the source files. Default is `,`. +* `csv_no_sup_value` - (Optional) Only applies if output files for a CDC load are written in .csv format. If `use_csv_no_sup_value` is set to `true`, string to use for all columns not included in the supplemental log. If you do not specify a string value, DMS uses the null value for these columns regardless of `use_csv_no_sup_value`. (Ignored for source endpoints.) +* `csv_null_value` - (Optional) String to as null when writing to the target. (AWS default is `NULL`.) +* `csv_row_delimiter` - (Optional) Delimiter used to separate rows in the source files. Default is newline (_i.e._, `\n`). +* `data_format` - (Optional) Output format for the files that AWS DMS uses to create S3 objects. Valid values are `csv` and `parquet`. (Ignored for source endpoints -- only `csv` is valid.) +* `data_page_size` - (Optional) Size of one data page in bytes. (AWS default is 1 MiB, _i.e._, `1048576`.) +* `date_partition_delimiter` - (Optional) Date separating delimiter to use during folder partitioning. Valid values are `SLASH`, `UNDERSCORE`, `DASH`, and `NONE`. (AWS default is `SLASH`.) (Ignored for source endpoints.) +* `date_partition_enabled` - (Optional) Partition S3 bucket folders based on transaction commit dates. Default is `false`. (Ignored for source endpoints.) +* `date_partition_sequence` - (Optional) Date format to use during folder partitioning. Use this parameter when `date_partition_enabled` is set to true. Valid values are `YYYYMMDD`, `YYYYMMDDHH`, `YYYYMM`, `MMYYYYDD`, and `DDMMYYYY`. (AWS default is `YYYYMMDD`.) (Ignored for source endpoints.) +* `date_partition_timezone` - (Optional) Convert the current UTC time to a timezone. The conversion occurs when a date partition folder is created and a CDC filename is generated. The timezone format is Area/Location (_e.g._, `Europe/Paris`). Use this when `date_partition_enabled` is `true`. (Ignored for source endpoints.) +* `detach_target_on_lob_lookup_failure_parquet` - (Optional) Undocumented argument for use as directed by AWS Support. +* `dict_page_size_limit` - (Optional) Maximum size in bytes of an encoded dictionary page of a column. (AWS default is 1 MiB, _i.e._, `1048576`.) +* `enable_statistics` - (Optional) Whether to enable statistics for Parquet pages and row groups. Default is `true`. +* `encoding_type` - (Optional) Type of encoding to use. Value values are `rle_dictionary`, `plain`, and `plain_dictionary`. (AWS default is `rle_dictionary`.) +* `encryption_mode` - (Optional) Server-side encryption mode that you want to encrypt your .csv or .parquet object files copied to S3. Valid values are `SSE_S3` and `SSE_KMS`. (AWS default is `SSE_S3`.) (Ignored for source endpoints -- only `SSE_S3` is valid.) +* `expected_bucket_owner` - (Optional) Bucket owner to prevent sniping. Value is an AWS account ID. +* `ignore_header_rows` - (Optional, Force New) When this value is set to `1`, DMS ignores the first row header in a .csv file. (AWS default is `0`.) +* `include_op_for_full_load` - (Optional) Whether to enable a full load to write INSERT operations to the .csv output files only to indicate how the rows were added to the source database. Default is `false`. +* `kms_key_arn` - (Optional) ARN for the KMS key that will be used to encrypt the connection parameters. If you do not specify a value for `kms_key_arn`, then AWS DMS will use your default encryption key. AWS KMS creates the default encryption key for your AWS account. Your AWS account has a different default encryption key for each AWS region. +* `max_file_size` - (Optional) Maximum size (in KB) of any .csv file to be created while migrating to an S3 target during full load. Valid values are from `1` to `1048576`. (AWS default is 1 GB, _i.e._, `1048576`.) +* `parquet_timestamp_in_millisecond` - (Optional) - Specifies the precision of any TIMESTAMP column values written to an S3 object file in .parquet format. Default is `false`. (Ignored for source endpoints.) +* `parquet_version` - (Optional) Version of the .parquet file format. Valid values are `parquet-1-0` and `parquet-2-0`. (AWS default is `parquet-1-0`.) (Ignored for source endpoints.) +* `preserve_transactions` - (Optional) Whether DMS saves the transaction order for a CDC load on the S3 target specified by `cdc_path`. Default is `false`. (Ignored for source endpoints.) +* `rfc_4180` - (Optional) For an S3 source, whether each leading double quotation mark has to be followed by an ending double quotation mark. Default is `true`. +* `row_group_length` - (Optional) Number of rows in a row group. (AWS default is `10000`.) +* `server_side_encryption_kms_key_id` - (Optional) When `encryption_mode` is `SSE_KMS`, ARN for the AWS KMS key. (Ignored for source endpoints -- only `SSE_S3` `encryption_mode` is valid.) +* `ssl_mode` - (Optional) SSL mode to use for the connection. Valid values are `none`, `require`, `verify-ca`, `verify-full`. (AWS default is `none`.) +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `timestamp_column_name` - (Optional) Column to add with timestamp information to the endpoint data for an Amazon S3 target. +* `use_csv_no_sup_value` - (Optional) Whether to use `csv_no_sup_value` for columns not included in the supplemental log. (Ignored for source endpoints.) +* `use_task_start_time_for_full_load_timestamp` - (Optional) When set to `true`, uses the task start time as the timestamp column value instead of the time data is written to target. For full load, when set to `true`, each row of the timestamp column contains the task start time. For CDC loads, each row of the timestamp column contains the transaction commit time.When set to false, the full load timestamp in the timestamp column increments with the time data arrives at the target. Default is `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `endpoint_arn` - ARN for the endpoint. +* `engine_display_name` - Expanded name for the engine name. +* `external_id` - Can be used for cross-account validation. Use it in another account with `aws_dms_s3_endpoint` to create the endpoint cross-account. +* `status` - Status of the endpoint. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import endpoints using the `endpoint_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import endpoints using the `endpoint_id`. For example: + +```console +% terraform import aws_dms_s3_endpoint.example example-dms-endpoint-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/docdb_cluster.html.markdown b/website/docs/cdktf/python/r/docdb_cluster.html.markdown new file mode 100644 index 00000000000..515a67dbcb9 --- /dev/null +++ b/website/docs/cdktf/python/r/docdb_cluster.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_cluster" +description: |- + Manages a DocumentDB Aurora Cluster +--- + + + +# Resource: aws_docdb_cluster + +Manages a DocumentDB Cluster. + +Changes to a DocumentDB Cluster can occur when you manually change a +parameter, such as `port`, and are reflected in the next maintenance +window. Because of this, Terraform may report a difference in its planning +phase because a modification has not yet taken place. You can use the +`apply_immediately` flag to instruct the service to apply the change immediately +(see documentation below). + +~> **Note:** using `apply_immediately` can result in a brief downtime as the server reboots. +~> **Note:** All arguments including the username and password will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.docdb_cluster import DocdbCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DocdbCluster(self, "docdb", + backup_retention_period=5, + cluster_identifier="my-docdb-cluster", + engine="docdb", + master_password="mustbeeightchars", + master_username="foo", + preferred_backup_window="07:00-09:00", + skip_final_snapshot=True + ) +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation](https://docs.aws.amazon.com/cli/latest/reference/docdb/create-db-cluster.html). + +This argument supports the following arguments: + +* `apply_immediately` - (Optional) Specifies whether any cluster modifications + are applied immediately, or during the next maintenance window. Default is + `false`. +* `availability_zones` - (Optional) A list of EC2 Availability Zones that + instances in the DB cluster can be created in. +* `backup_retention_period` - (Optional) The days to retain backups for. Default `1` +* `cluster_identifier_prefix` - (Optional, Forces new resource) Creates a unique cluster identifier beginning with the specified prefix. Conflicts with `cluster_identifier`. +* `cluster_identifier` - (Optional, Forces new resources) The cluster identifier. If omitted, Terraform will assign a random, unique identifier. +* `db_subnet_group_name` - (Optional) A DB subnet group to associate with this DB instance. +* `db_cluster_parameter_group_name` - (Optional) A cluster parameter group to associate with the cluster. +* `deletion_protection` - (Optional) A value that indicates whether the DB cluster has deletion protection enabled. The database can't be deleted when deletion protection is enabled. By default, deletion protection is disabled. +* `enabled_cloudwatch_logs_exports` - (Optional) List of log types to export to cloudwatch. If omitted, no logs will be exported. + The following log types are supported: `audit`, `profiler`. +* `engine_version` - (Optional) The database engine version. Updating this argument results in an outage. +* `engine` - (Optional) The name of the database engine to be used for this DB cluster. Defaults to `docdb`. Valid Values: `docdb` +* `final_snapshot_identifier` - (Optional) The name of your final DB snapshot + when this DB cluster is deleted. If omitted, no final snapshot will be + made. +* `global_cluster_identifier` - (Optional) The global cluster identifier specified on [`aws_docdb_global_cluster`](/docs/providers/aws/r/docdb_global_cluster.html). +* `kms_key_id` - (Optional) The ARN for the KMS encryption key. When specifying `kms_key_id`, `storage_encrypted` needs to be set to true. +* `master_password` - (Required unless a `snapshot_identifier` or unless a `global_cluster_identifier` is provided when the cluster is the "secondary" cluster of a global database) Password for the master DB user. Note that this may + show up in logs, and it will be stored in the state file. Please refer to the DocumentDB Naming Constraints. +* `master_username` - (Required unless a `snapshot_identifier` or unless a `global_cluster_identifier` is provided when the cluster is the "secondary" cluster of a global database) Username for the master DB user. +* `port` - (Optional) The port on which the DB accepts connections +* `preferred_backup_window` - (Optional) The daily time range during which automated backups are created if automated backups are enabled using the BackupRetentionPeriod parameter.Time in UTC +Default: A 30-minute window selected at random from an 8-hour block of time per regionE.g., 04:00-09:00 +* `preferred_maintenance_window` - (Optional) The weekly time range during which system maintenance can occur, in (UTC) e.g., wed:04:00-wed:04:30 +* `skip_final_snapshot` - (Optional) Determines whether a final DB snapshot is created before the DB cluster is deleted. If true is specified, no DB snapshot is created. If false is specified, a DB snapshot is created before the DB cluster is deleted, using the value from `final_snapshot_identifier`. Default is `false`. +* `snapshot_identifier` - (Optional) Specifies whether or not to create this cluster from a snapshot. You can use either the name or ARN when specifying a DB cluster snapshot, or the ARN when specifying a DB snapshot. Automated snapshots **should not** be used for this attribute, unless from a different cluster. Automated snapshots are deleted as part of cluster destruction when the resource is replaced. +* `storage_encrypted` - (Optional) Specifies whether the DB cluster is encrypted. The default is `false`. +* `tags` - (Optional) A map of tags to assign to the DB cluster. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_security_group_ids` - (Optional) List of VPC security groups to associate + with the Cluster + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster +* `cluster_members` – List of DocumentDB Instances that are a part of this cluster +* `cluster_resource_id` - The DocumentDB Cluster Resource ID +* `endpoint` - The DNS address of the DocumentDB instance +* `hosted_zone_id` - The Route53 Hosted Zone ID of the endpoint +* `id` - The DocumentDB Cluster Identifier +* `reader_endpoint` - A read-only endpoint for the DocumentDB cluster, automatically load-balanced across replicas +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120m`) +- `update` - (Default `120m`) +- `delete` - (Default `120m`) +any cleanup task during the destroying process. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DocumentDB Clusters using the `cluster_identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DocumentDB Clusters using the `cluster_identifier`. For example: + +```console +% terraform import aws_docdb_cluster.docdb_cluster docdb-prod-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/docdb_cluster_instance.html.markdown b/website/docs/cdktf/python/r/docdb_cluster_instance.html.markdown new file mode 100644 index 00000000000..c440cbde944 --- /dev/null +++ b/website/docs/cdktf/python/r/docdb_cluster_instance.html.markdown @@ -0,0 +1,145 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_cluster_instance" +description: |- + Provides an DocumentDB Cluster Resource Instance +--- + + + +# Resource: aws_docdb_cluster_instance + +Provides an DocumentDB Cluster Resource Instance. A Cluster Instance Resource defines +attributes that are specific to a single instance in a [DocumentDB Cluster][1]. + +You do not designate a primary and subsequent replicas. Instead, you simply add DocumentDB +Instances and DocumentDB manages the replication. You can use the [count][3] +meta-parameter to make multiple instances and join them all to the same DocumentDB +Cluster, or you may specify different Cluster Instance resources with various +`instance_class` sizes. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformCount, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.docdb_cluster import DocdbCluster +from imports.aws.docdb_cluster_instance import DocdbClusterInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = DocdbCluster(self, "default", + availability_zones=["us-west-2a", "us-west-2b", "us-west-2c"], + cluster_identifier="docdb-cluster-demo", + master_password="barbut8chars", + master_username="foo" + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + cluster_instances_count = TerraformCount.of(Token.as_number("2")) + DocdbClusterInstance(self, "cluster_instances", + cluster_identifier=default_var.id, + identifier="docdb-cluster-demo-${" + cluster_instances_count.index + "}", + instance_class="db.r5.large", + count=cluster_instances_count + ) +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation](https://docs.aws.amazon.com/cli/latest/reference/docdb/create-db-instance.html). + +This argument supports the following arguments: + +* `apply_immediately` - (Optional) Specifies whether any database modifications + are applied immediately, or during the next maintenance window. Default is`false`. +* `auto_minor_version_upgrade` - (Optional) This parameter does not apply to Amazon DocumentDB. Amazon DocumentDB does not perform minor version upgrades regardless of the value set (see [docs](https://docs.aws.amazon.com/documentdb/latest/developerguide/API_DBInstance.html)). Default `true`. +* `availability_zone` - (Optional, Computed) The EC2 Availability Zone that the DB instance is created in. See [docs](https://docs.aws.amazon.com/documentdb/latest/developerguide/API_CreateDBInstance.html) about the details. +* `cluster_identifier` - (Required) The identifier of the [`aws_docdb_cluster`](/docs/providers/aws/r/docdb_cluster.html) in which to launch this instance. +* `enable_performance_insights` - (Optional) A value that indicates whether to enable Performance Insights for the DB Instance. Default `false`. See [docs] (https://docs.aws.amazon.com/documentdb/latest/developerguide/performance-insights.html) about the details. +* `engine` - (Optional) The name of the database engine to be used for the DocumentDB instance. Defaults to `docdb`. Valid Values: `docdb`. +* `identifier` - (Optional, Forces new resource) The identifier for the DocumentDB instance, if omitted, Terraform will assign a random, unique identifier. +* `identifier_prefix` - (Optional, Forces new resource) Creates a unique identifier beginning with the specified prefix. Conflicts with `identifier`. +* `instance_class` - (Required) The instance class to use. For details on CPU and memory, see [Scaling for DocumentDB Instances][2]. + DocumentDB currently supports the below instance classes. + Please see [AWS Documentation][4] for complete details. + - db.r5.large + - db.r5.xlarge + - db.r5.2xlarge + - db.r5.4xlarge + - db.r5.12xlarge + - db.r5.24xlarge + - db.r4.large + - db.r4.xlarge + - db.r4.2xlarge + - db.r4.4xlarge + - db.r4.8xlarge + - db.r4.16xlarge + - db.t3.medium +* `performance_insights_kms_key_id` - (Optional) The KMS key identifier is the key ARN, key ID, alias ARN, or alias name for the KMS key. If you do not specify a value for PerformanceInsightsKMSKeyId, then Amazon DocumentDB uses your default KMS key. +* `preferred_maintenance_window` - (Optional) The window to perform maintenance in. + Syntax: "ddd:hh24:mi-ddd:hh24:mi". Eg: "Mon:00:00-Mon:03:00". +* `promotion_tier` - (Optional) Default 0. Failover Priority setting on instance level. The reader who has lower tier has higher priority to get promoter to writer. +* `tags` - (Optional) A map of tags to assign to the instance. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster instance +* `db_subnet_group_name` - The DB subnet group to associate with this DB instance. +* `dbi_resource_id` - The region-unique, immutable identifier for the DB instance. +* `endpoint` - The DNS address for this instance. May not be writable +* `engine_version` - The database engine version +* `kms_key_id` - The ARN for the KMS encryption key if one is set to the cluster. +* `port` - The database port +* `preferred_backup_window` - The daily time range during which automated backups are created if automated backups are enabled. +* `storage_encrypted` - Specifies whether the DB cluster is encrypted. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `writer` – Boolean indicating if this instance is writable. `False` indicates this instance is a read replica. +* `ca_cert_identifier` - (Optional) The identifier of the CA certificate for the DB instance. + +[1]: /docs/providers/aws/r/docdb_cluster.html +[2]: https://docs.aws.amazon.com/documentdb/latest/developerguide/db-cluster-manage-performance.html#db-cluster-manage-scaling-instance +[3]: https://www.terraform.io/docs/configuration/meta-arguments/count.html +[4]: https://docs.aws.amazon.com/documentdb/latest/developerguide/db-instance-classes.html#db-instance-class-specs + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `90m`) +restoring from Snapshots +- `update` - (Default `90m`) +- `delete` - (Default `90m`) +the time required to take snapshots + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DocumentDB Cluster Instances using the `identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DocumentDB Cluster Instances using the `identifier`. For example: + +```console +% terraform import aws_docdb_cluster_instance.prod_instance_1 aurora-cluster-instance-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/docdb_cluster_parameter_group.html.markdown b/website/docs/cdktf/python/r/docdb_cluster_parameter_group.html.markdown new file mode 100644 index 00000000000..4a950414bc6 --- /dev/null +++ b/website/docs/cdktf/python/r/docdb_cluster_parameter_group.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_cluster_parameter_group" +description: |- + Manages a DocumentDB Cluster Parameter Group +--- + + + +# Resource: aws_docdb_cluster_parameter_group + +Manages a DocumentDB Cluster Parameter Group + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.docdb_cluster_parameter_group import DocdbClusterParameterGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DocdbClusterParameterGroup(self, "example", + description="docdb cluster parameter group", + family="docdb3.6", + name="example", + parameter=[DocdbClusterParameterGroupParameter( + name="tls", + value="enabled" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the DocumentDB cluster parameter group. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `family` - (Required, Forces new resource) The family of the DocumentDB cluster parameter group. +* `description` - (Optional, Forces new resource) The description of the DocumentDB cluster parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of DocumentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +~> **NOTE:** These arguments take a `string` representation of their values. + +* `name` - (Required) The name of the DocumentDB parameter. +* `value` - (Required) The value of the DocumentDB parameter. +* `apply_method` - (Optional) Valid values are `immediate` and `pending-reboot`. Defaults to `pending-reboot`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The DocumentDB cluster parameter group name. +* `arn` - The ARN of the DocumentDB cluster parameter group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DocumentDB Cluster Parameter Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DocumentDB Cluster Parameter Groups using the `name`. For example: + +```console +% terraform import aws_docdb_cluster_parameter_group.cluster_pg production-pg-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/docdb_cluster_snapshot.html.markdown b/website/docs/cdktf/python/r/docdb_cluster_snapshot.html.markdown new file mode 100644 index 00000000000..237510e35de --- /dev/null +++ b/website/docs/cdktf/python/r/docdb_cluster_snapshot.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_cluster_snapshot" +description: |- + Manages a DocumentDB database cluster snapshot. +--- + + + +# Resource: aws_docdb_cluster_snapshot + +Manages a DocumentDB database cluster snapshot for DocumentDB clusters. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.docdb_cluster_snapshot import DocdbClusterSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DocdbClusterSnapshot(self, "example", + db_cluster_identifier=Token.as_string(aws_docdb_cluster_example.id), + db_cluster_snapshot_identifier="resourcetestsnapshot1234" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `db_cluster_identifier` - (Required) The DocumentDB Cluster Identifier from which to take the snapshot. +* `db_cluster_snapshot_identifier` - (Required) The Identifier for the snapshot. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `availability_zones` - List of EC2 Availability Zones that instances in the DocumentDB cluster snapshot can be restored in. +* `db_cluster_snapshot_arn` - The Amazon Resource Name (ARN) for the DocumentDB Cluster Snapshot. +* `engine` - Specifies the name of the database engine. +* `engine_version` - Version of the database engine for this DocumentDB cluster snapshot. +* `kms_key_id` - If storage_encrypted is true, the AWS KMS key identifier for the encrypted DocumentDB cluster snapshot. +* `port` - Port that the DocumentDB cluster was listening on at the time of the snapshot. +* `source_db_cluster_snapshot_identifier` - The DocumentDB Cluster Snapshot Arn that the DocumentDB Cluster Snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `storage_encrypted` - Specifies whether the DocumentDB cluster snapshot is encrypted. +* `status` - The status of this DocumentDB Cluster Snapshot. +* `vpc_id` - The VPC ID associated with the DocumentDB cluster snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_docdb_cluster_snapshot` using the cluster snapshot identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_docdb_cluster_snapshot` using the cluster snapshot identifier. For example: + +```console +% terraform import aws_docdb_cluster_snapshot.example my-cluster-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/docdb_event_subscription.html.markdown b/website/docs/cdktf/python/r/docdb_event_subscription.html.markdown new file mode 100644 index 00000000000..dd53b5b6c97 --- /dev/null +++ b/website/docs/cdktf/python/r/docdb_event_subscription.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_event_subscription" +description: |- + Provides a DocumentDB event subscription resource. +--- + + + +# Resource: aws_docdb_event_subscription + +Provides a DocumentDB event subscription resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.docdb_cluster import DocdbCluster +from imports.aws.docdb_event_subscription import DocdbEventSubscription +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DocdbCluster(self, "example", + availability_zones=[ + Token.as_string(property_access(available.names, ["0"])), + Token.as_string(property_access(available.names, ["1"])), + Token.as_string(property_access(available.names, ["2"])) + ], + cluster_identifier="example", + master_password="mustbeeightcharaters", + master_username="foo", + skip_final_snapshot=True + ) + aws_sns_topic_example = SnsTopic(self, "example_1", + name="example-events" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_example.override_logical_id("example") + aws_docdb_event_subscription_example = DocdbEventSubscription(self, "example_2", + enabled=True, + event_categories=["creation", "failure"], + name="example", + sns_topic_arn=Token.as_string(aws_sns_topic_example.arn), + source_ids=[example.id], + source_type="db-cluster" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_docdb_event_subscription_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the DocumentDB event subscription. By default generated by Terraform. +* `name_prefix` - (Optional) The name of the DocumentDB event subscription. Conflicts with `name`. +* `sns_topic` - (Required) The SNS topic to send events to. +* `source_ids` - (Optional) A list of identifiers of the event sources for which events will be returned. If not specified, then all sources are included in the response. If specified, a source_type must also be specified. +* `source_type` - (Optional) The type of source that will be generating the events. Valid options are `db-instance`, `db-cluster`, `db-parameter-group`, `db-security-group`,` db-cluster-snapshot`. If not set, all sources will be subscribed to. +* `event_categories` - (Optional) A list of event categories for a SourceType that you want to subscribe to. See https://docs.aws.amazon.com/documentdb/latest/developerguide/API_Event.html or run `aws docdb describe-event-categories`. +* `enabled` - (Optional) A boolean flag to enable/disable the subscription. Defaults to true. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the DocumentDB event notification subscription +* `arn` - The Amazon Resource Name of the DocumentDB event notification subscription +* `customer_aws_id` - The AWS customer account associated with the DocumentDB event notification subscription +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40m`) +- `delete` - (Default `40m`) +- `update` - (Default `40m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DocumentDB Event Subscriptions using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DocumentDB Event Subscriptions using the `name`. For example: + +```console +% terraform import aws_docdb_event_subscription.example event-sub +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/docdb_global_cluster.html.markdown b/website/docs/cdktf/python/r/docdb_global_cluster.html.markdown new file mode 100644 index 00000000000..217af31aa0f --- /dev/null +++ b/website/docs/cdktf/python/r/docdb_global_cluster.html.markdown @@ -0,0 +1,191 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_global_cluster" +description: |- + Manages a DocumentDB Global Cluster +--- + + + +# Resource: aws_docdb_global_cluster + +Manages an DocumentDB Global Cluster. A global cluster consists of one primary region and up to five read-only secondary regions. You issue write operations directly to the primary cluster in the primary region and Amazon DocumentDB automatically replicates the data to the secondary regions using dedicated infrastructure. + +More information about DocumentDB Global Clusters can be found in the [DocumentDB Developer Guide](https://docs.aws.amazon.com/documentdb/latest/developerguide/global-clusters.html). + +## Example Usage + +### New DocumentDB Global Cluster + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.docdb_cluster import DocdbCluster +from imports.aws.docdb_cluster_instance import DocdbClusterInstance +from imports.aws.docdb_global_cluster import DocdbGlobalCluster +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = AwsProvider(self, "aws", + alias="primary", + region="us-east-2" + ) + secondary = AwsProvider(self, "aws_1", + alias="secondary", + region="us-east-1" + ) + example = DocdbGlobalCluster(self, "example", + engine="docdb", + engine_version="4.0.0", + global_cluster_identifier="global-test" + ) + aws_docdb_cluster_primary = DocdbCluster(self, "primary", + cluster_identifier="test-primary-cluster", + db_subnet_group_name="default", + engine=example.engine, + engine_version=example.engine_version, + global_cluster_identifier=example.id, + master_password="somepass123", + master_username="username", + provider=primary + ) + aws_docdb_cluster_secondary = DocdbCluster(self, "secondary", + cluster_identifier="test-secondary-cluster", + db_subnet_group_name="default", + engine=example.engine, + engine_version=example.engine_version, + global_cluster_identifier=example.id, + provider=secondary + ) + aws_docdb_cluster_instance_primary = DocdbClusterInstance(self, "primary_5", + cluster_identifier=Token.as_string(aws_docdb_cluster_primary.id), + engine=example.engine, + identifier="test-primary-cluster-instance", + instance_class="db.r5.large", + provider=primary + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_docdb_cluster_instance_primary.override_logical_id("primary") + aws_docdb_cluster_instance_secondary = DocdbClusterInstance(self, "secondary_6", + cluster_identifier=Token.as_string(aws_docdb_cluster_secondary.id), + depends_on=[aws_docdb_cluster_instance_primary], + engine=example.engine, + identifier="test-secondary-cluster-instance", + instance_class="db.r5.large", + provider=secondary + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_docdb_cluster_instance_secondary.override_logical_id("secondary") +``` + +### New Global Cluster From Existing DB Cluster + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.docdb_cluster import DocdbCluster +from imports.aws.docdb_global_cluster import DocdbGlobalCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DocdbCluster(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[global_cluster_identifier] + ) + ) + aws_docdb_global_cluster_example = DocdbGlobalCluster(self, "example_1", + global_cluster_identifier="example", + source_db_cluster_identifier=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_docdb_global_cluster_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `global_cluster_identifier` - (Required, Forces new resources) The global cluster identifier. +* `database_name` - (Optional, Forces new resources) Name for an automatically created database on cluster creation. +* `deletion_protection` - (Optional) If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`. +* `engine` - (Optional, Forces new resources) Name of the database engine to be used for this DB cluster. Terraform will only perform drift detection if a configuration value is provided. Current Valid values: `docdb`. Defaults to `docdb`. Conflicts with `source_db_cluster_identifier`. +* `engine_version` - (Optional) Engine version of the global database. Upgrading the engine version will result in all cluster members being immediately updated and will. + * **NOTE:** Upgrading major versions is not supported. +* `source_db_cluster_identifier` - (Optional) Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. Terraform cannot perform drift detection of this value. +* `storage_encrypted` - (Optional, Forces new resources) Specifies whether the DB cluster is encrypted. The default is `false` unless `source_db_cluster_identifier` is specified and encrypted. Terraform will only perform drift detection if a configuration value is provided. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Global Cluster Amazon Resource Name (ARN) +* `global_cluster_members` - Set of objects containing Global Cluster members. + * `db_cluster_arn` - Amazon Resource Name (ARN) of member DB Cluster. + * `is_writer` - Whether the member is the primary DB Cluster. +* `global_cluster_resource_id` - AWS Region-unique, immutable identifier for the global database cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. +* `id` - DocumentDB Global Cluster ID. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_docdb_global_cluster` using the Global Cluster identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_docdb_global_cluster` using the Global Cluster identifier. For example: + +```console +% terraform import aws_docdb_global_cluster.example example +``` + +Certain resource arguments, like `source_db_cluster_identifier`, do not have an API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.docdb_global_cluster import DocdbGlobalCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, globalClusterIdentifier): + super().__init__(scope, name) + DocdbGlobalCluster(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[source_db_cluster_identifier] + ), + global_cluster_identifier=global_cluster_identifier + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/docdb_subnet_group.html.markdown b/website/docs/cdktf/python/r/docdb_subnet_group.html.markdown new file mode 100644 index 00000000000..b162e6af58c --- /dev/null +++ b/website/docs/cdktf/python/r/docdb_subnet_group.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_subnet_group" +description: |- + Provides an DocumentDB subnet group resource. +--- + + + +# Resource: aws_docdb_subnet_group + +Provides an DocumentDB subnet group resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.docdb_subnet_group import DocdbSubnetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DocdbSubnetGroup(self, "default", + name="main", + subnet_ids=[frontend.id, backend.id], + tags={ + "Name": "My docdb subnet group" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the docDB subnet group. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) The description of the docDB subnet group. Defaults to "Managed by Terraform". +* `subnet_ids` - (Required) A list of VPC subnet IDs. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The docDB subnet group name. +* `arn` - The ARN of the docDB subnet group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DocumentDB Subnet groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DocumentDB Subnet groups using the `name`. For example: + +```console +% terraform import aws_docdb_subnet_group.default production-subnet-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_bgp_peer.html.markdown b/website/docs/cdktf/python/r/dx_bgp_peer.html.markdown new file mode 100644 index 00000000000..bda9e7724e8 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_bgp_peer.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_bgp_peer" +description: |- + Provides a Direct Connect BGP peer resource. +--- + + + +# Resource: aws_dx_bgp_peer + +Provides a Direct Connect BGP peer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_bgp_peer import DxBgpPeer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DxBgpPeer(self, "peer", + address_family="ipv6", + bgp_asn=65351, + virtual_interface_id=foo.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `address_family` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgp_asn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `virtual_interface_id` - (Required) The ID of the Direct Connect virtual interface on which to create the BGP peer. +* `amazon_address` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. +Required for IPv4 BGP peers on public virtual interfaces. +* `bgp_auth_key` - (Optional) The authentication key for BGP configuration. +* `customer_address` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. +Required for IPv4 BGP peers on public virtual interfaces. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the BGP peer resource. +* `bgp_status` - The Up/Down state of the BGP peer. +* `bgp_peer_id` - The ID of the BGP peer. +* `aws_device` - The Direct Connect endpoint on which the BGP peer terminates. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_connection.html.markdown b/website/docs/cdktf/python/r/dx_connection.html.markdown new file mode 100644 index 00000000000..fe9ccaa2316 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_connection.html.markdown @@ -0,0 +1,136 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_connection" +description: |- + Provides a Connection of Direct Connect. +--- + + + +# Resource: aws_dx_connection + +Provides a Connection of Direct Connect. + +## Example Usage + +### Create a connection + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_connection import DxConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DxConnection(self, "hoge", + bandwidth="1Gbps", + location="EqDC2", + name="tf-dx-connection" + ) +``` + +### Request a MACsec-capable connection + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_connection import DxConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DxConnection(self, "example", + bandwidth="10Gbps", + location="EqDA2", + name="tf-dx-connection", + request_macsec=True + ) +``` + +### Configure encryption mode for MACsec-capable connections + +-> **NOTE:** You can only specify the `encryption_mode` argument once the connection is in an `Available` state. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_connection import DxConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DxConnection(self, "example", + bandwidth="10Gbps", + encryption_mode="must_encrypt", + location="EqDC2", + name="tf-dx-connection", + request_macsec=True + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bandwidth` - (Required) The bandwidth of the connection. Valid values for dedicated connections: 1Gbps, 10Gbps. Valid values for hosted connections: 50Mbps, 100Mbps, 200Mbps, 300Mbps, 400Mbps, 500Mbps, 1Gbps, 2Gbps, 5Gbps, 10Gbps and 100Gbps. Case sensitive. +* `encryption_mode` - (Optional) The connection MAC Security (MACsec) encryption mode. MAC Security (MACsec) is only available on dedicated connections. Valid values are `no_encrypt`, `should_encrypt`, and `must_encrypt`. +* `location` - (Required) The AWS Direct Connect location where the connection is located. See [DescribeLocations](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_DescribeLocations.html) for the list of AWS Direct Connect locations. Use `locationCode`. +* `name` - (Required) The name of the connection. +* `provider_name` - (Optional) The name of the service provider associated with the connection. +* `request_macsec` - (Optional) Boolean value indicating whether you want the connection to support MAC Security (MACsec). MAC Security (MACsec) is only available on dedicated connections. See [MACsec prerequisites](https://docs.aws.amazon.com/directconnect/latest/UserGuide/direct-connect-mac-sec-getting-started.html#mac-sec-prerequisites) for more information about MAC Security (MACsec) prerequisites. Default value: `false`. + +~> **NOTE:** Changing the value of `request_macsec` will cause the resource to be destroyed and re-created. + +* `skip_destroy` - (Optional) Set to true if you do not wish the connection to be deleted at destroy time, and instead just removed from the Terraform state. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the connection. +* `aws_device` - The Direct Connect endpoint on which the physical connection terminates. +* `has_logical_redundancy` - Indicates whether the connection supports a secondary BGP peer in the same address family (IPv4/IPv6). +* `id` - The ID of the connection. +* `jumbo_frame_capable` - Boolean value representing if jumbo frames have been enabled for this connection. +* `macsec_capable` - Boolean value indicating whether the connection supports MAC Security (MACsec). +* `owner_account_id` - The ID of the AWS account that owns the connection. +* `partner_name` - The name of the AWS Direct Connect service provider associated with the connection. +* `port_encryption_status` - The MAC Security (MACsec) port link status of the connection. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vlan_id` - The VLAN ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect connections using the connection `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect connections using the connection `id`. For example: + +```console +% terraform import aws_dx_connection.test_connection dxcon-ffre0ec3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_connection_association.html.markdown b/website/docs/cdktf/python/r/dx_connection_association.html.markdown new file mode 100644 index 00000000000..afa3c425530 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_connection_association.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_connection_association" +description: |- + Associates a Direct Connect Connection with a LAG. +--- + + + +# Resource: aws_dx_connection_association + +Associates a Direct Connect Connection with a LAG. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_connection import DxConnection +from imports.aws.dx_connection_association import DxConnectionAssociation +from imports.aws.dx_lag import DxLag +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DxConnection(self, "example", + bandwidth="1Gbps", + location="EqSe2-EQ", + name="example" + ) + aws_dx_lag_example = DxLag(self, "example_1", + connections_bandwidth="1Gbps", + location="EqSe2-EQ", + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dx_lag_example.override_logical_id("example") + aws_dx_connection_association_example = DxConnectionAssociation(self, "example_2", + connection_id=example.id, + lag_id=Token.as_string(aws_dx_lag_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dx_connection_association_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connection_id` - (Required) The ID of the connection. +* `lag_id` - (Required) The ID of the LAG with which to associate the connection. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_connection_confirmation.html.markdown b/website/docs/cdktf/python/r/dx_connection_confirmation.html.markdown new file mode 100644 index 00000000000..8bcebfb37d6 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_connection_confirmation.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_connection_confirmation" +description: |- + Provides a confirmation of the creation of the specified hosted connection on an interconnect. +--- + + + +# Resource: aws_dx_connection_confirmation + +Provides a confirmation of the creation of the specified hosted connection on an interconnect. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_connection_confirmation import DxConnectionConfirmation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DxConnectionConfirmation(self, "confirmation", + connection_id="dxcon-ffabc123" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connection_id` - (Required) The ID of the hosted connection. + +### Removing `aws_dx_connection_confirmation` from your configuration + +Removing an `aws_dx_connection_confirmation` resource from your configuration will remove it +from your statefile and management, **but will not destroy the Hosted Connection.** + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the connection. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_gateway.html.markdown b/website/docs/cdktf/python/r/dx_gateway.html.markdown new file mode 100644 index 00000000000..ad64608b0b9 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_gateway.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_gateway" +description: |- + Provides a Direct Connect Gateway. +--- + + + +# Resource: aws_dx_gateway + +Provides a Direct Connect Gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_gateway import DxGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DxGateway(self, "example", + amazon_side_asn="64512", + name="tf-dxg-example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the connection. +* `amazon_side_asn` - (Required) The ASN to be configured on the Amazon side of the connection. The ASN must be in the private range of 64,512 to 65,534 or 4,200,000,000 to 4,294,967,294. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the gateway. +* `owner_account_id` - AWS Account ID of the gateway. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect Gateways using the gateway `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect Gateways using the gateway `id`. For example: + +```console +% terraform import aws_dx_gateway.test abcd1234-dcba-5678-be23-cdef9876ab45 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_gateway_association.html.markdown b/website/docs/cdktf/python/r/dx_gateway_association.html.markdown new file mode 100644 index 00000000000..30266043ba3 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_gateway_association.html.markdown @@ -0,0 +1,185 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_gateway_association" +description: |- + Associates a Direct Connect Gateway with a VGW or transit gateway. +--- + + + +# Resource: aws_dx_gateway_association + +Associates a Direct Connect Gateway with a VGW or transit gateway. + +To create a cross-account association, create an [`aws_dx_gateway_association_proposal` resource](/docs/providers/aws/r/dx_gateway_association_proposal.html) +in the AWS account that owns the VGW or transit gateway and then accept the proposal in the AWS account that owns the Direct Connect Gateway +by creating an `aws_dx_gateway_association` resource with the `proposal_id` and `associated_gateway_owner_account_id` attributes set. + +## Example Usage + +### VPN Gateway Association + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_gateway import DxGateway +from imports.aws.dx_gateway_association import DxGatewayAssociation +from imports.aws.vpc import Vpc +from imports.aws.vpn_gateway import VpnGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DxGateway(self, "example", + amazon_side_asn="64512", + name="example" + ) + aws_vpc_example = Vpc(self, "example_1", + cidr_block="10.255.255.0/28" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpc_example.override_logical_id("example") + aws_vpn_gateway_example = VpnGateway(self, "example_2", + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpn_gateway_example.override_logical_id("example") + aws_dx_gateway_association_example = DxGatewayAssociation(self, "example_3", + associated_gateway_id=Token.as_string(aws_vpn_gateway_example.id), + dx_gateway_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dx_gateway_association_example.override_logical_id("example") +``` + +### Transit Gateway Association + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_gateway import DxGateway +from imports.aws.dx_gateway_association import DxGatewayAssociation +from imports.aws.ec2_transit_gateway import Ec2TransitGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DxGateway(self, "example", + amazon_side_asn="64512", + name="example" + ) + aws_ec2_transit_gateway_example = Ec2TransitGateway(self, "example_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ec2_transit_gateway_example.override_logical_id("example") + aws_dx_gateway_association_example = DxGatewayAssociation(self, "example_2", + allowed_prefixes=["10.255.255.0/30", "10.255.255.8/30"], + associated_gateway_id=Token.as_string(aws_ec2_transit_gateway_example.id), + dx_gateway_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dx_gateway_association_example.override_logical_id("example") +``` + +### Allowed Prefixes + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_gateway import DxGateway +from imports.aws.dx_gateway_association import DxGatewayAssociation +from imports.aws.vpc import Vpc +from imports.aws.vpn_gateway import VpnGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DxGateway(self, "example", + amazon_side_asn="64512", + name="example" + ) + aws_vpc_example = Vpc(self, "example_1", + cidr_block="10.255.255.0/28" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpc_example.override_logical_id("example") + aws_vpn_gateway_example = VpnGateway(self, "example_2", + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpn_gateway_example.override_logical_id("example") + aws_dx_gateway_association_example = DxGatewayAssociation(self, "example_3", + allowed_prefixes=["210.52.109.0/24", "175.45.176.0/22"], + associated_gateway_id=Token.as_string(aws_vpn_gateway_example.id), + dx_gateway_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dx_gateway_association_example.override_logical_id("example") +``` + +A full example of how to create a VPN Gateway in one AWS account, create a Direct Connect Gateway in a second AWS account, and associate the VPN Gateway with the Direct Connect Gateway via the `aws_dx_gateway_association_proposal` and `aws_dx_gateway_association` resources can be found in [the `./examples/dx-gateway-cross-account-vgw-association` directory within the Github Repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/dx-gateway-cross-account-vgw-association). + +## Argument Reference + +~> **NOTE:** `dx_gateway_id` and `associated_gateway_id` must be specified for single account Direct Connect gateway associations. + +This argument supports the following arguments: + +* `dx_gateway_id` - (Required) The ID of the Direct Connect gateway. +* `associated_gateway_id` - (Optional) The ID of the VGW or transit gateway with which to associate the Direct Connect gateway. +Used for single account Direct Connect gateway associations. +* `associated_gateway_owner_account_id` - (Optional) The ID of the AWS account that owns the VGW or transit gateway with which to associate the Direct Connect gateway. +Used for cross-account Direct Connect gateway associations. +* `proposal_id` - (Optional) The ID of the Direct Connect gateway association proposal. +Used for cross-account Direct Connect gateway associations. +* `allowed_prefixes` - (Optional) VPC prefixes (CIDRs) to advertise to the Direct Connect gateway. Defaults to the CIDR block of the VPC associated with the Virtual Gateway. To enable drift detection, must be configured. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Direct Connect gateway association resource. +* `associated_gateway_type` - The type of the associated gateway, `transitGateway` or `virtualPrivateGateway`. +* `dx_gateway_association_id` - The ID of the Direct Connect gateway association. +* `dx_gateway_owner_account_id` - The ID of the AWS account that owns the Direct Connect gateway. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `update` - (Default `30m`) +- `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect gateway associations using `dx_gateway_id` together with `associated_gateway_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect gateway associations using `dx_gateway_id` together with `associated_gateway_id`. For example: + +```console +% terraform import aws_dx_gateway_association.example 345508c3-7215-4aef-9832-07c125d5bd0f/vgw-98765432 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_gateway_association_proposal.html.markdown b/website/docs/cdktf/python/r/dx_gateway_association_proposal.html.markdown new file mode 100644 index 00000000000..616ca8eeb70 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_gateway_association_proposal.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_gateway_association_proposal" +description: |- + Manages a Direct Connect Gateway Association Proposal. +--- + + + +# Resource: aws_dx_gateway_association_proposal + +Manages a Direct Connect Gateway Association Proposal, typically for enabling cross-account associations. For single account associations, see the [`aws_dx_gateway_association` resource](/docs/providers/aws/r/dx_gateway_association.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_gateway_association_proposal import DxGatewayAssociationProposal +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DxGatewayAssociationProposal(self, "example", + associated_gateway_id=Token.as_string(aws_vpn_gateway_example.id), + dx_gateway_id=Token.as_string(aws_dx_gateway_example.id), + dx_gateway_owner_account_id=Token.as_string(aws_dx_gateway_example.owner_account_id) + ) +``` + +A full example of how to create a VPN Gateway in one AWS account, create a Direct Connect Gateway in a second AWS account, and associate the VPN Gateway with the Direct Connect Gateway via the `aws_dx_gateway_association_proposal` and `aws_dx_gateway_association` resources can be found in [the `./examples/dx-gateway-cross-account-vgw-association` directory within the Github Repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/dx-gateway-cross-account-vgw-association). + +## Argument Reference + +This resource supports the following arguments: + +* `associated_gateway_id` - (Required) The ID of the VGW or transit gateway with which to associate the Direct Connect gateway. +* `dx_gateway_id` - (Required) Direct Connect Gateway identifier. +* `dx_gateway_owner_account_id` - (Required) AWS Account identifier of the Direct Connect Gateway's owner. +* `allowed_prefixes` - (Optional) VPC prefixes (CIDRs) to advertise to the Direct Connect gateway. Defaults to the CIDR block of the VPC associated with the Virtual Gateway. To enable drift detection, must be configured. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Direct Connect Gateway Association Proposal identifier. +* `associated_gateway_owner_account_id` - The ID of the AWS account that owns the VGW or transit gateway with which to associate the Direct Connect gateway. +* `associated_gateway_type` - The type of the associated gateway, `transitGateway` or `virtualPrivateGateway`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect Gateway Association Proposals using either a proposal ID or proposal ID, Direct Connect Gateway ID and associated gateway ID separated by `/`. For example: + +Using a proposal ID: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using a proposal ID, Direct Connect Gateway ID and associated gateway ID separated by `/`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**With `terraform import`**, import Direct Connect Gateway Association Proposals using either a proposal ID or proposal ID, Direct Connect Gateway ID and associated gateway ID separated by `/`. For example: + +Using a proposal ID: + +```console +% terraform import aws_dx_gateway_association_proposal.example ac90e981-b718-4364-872d-65478c84fafe +``` + +Using a proposal ID, Direct Connect Gateway ID and associated gateway ID separated by `/`: + +```console +% terraform import aws_dx_gateway_association_proposal.example ac90e981-b718-4364-872d-65478c84fafe/abcd1234-dcba-5678-be23-cdef9876ab45/vgw-12345678 +``` + +The latter case is useful when a previous proposal has been accepted and deleted by AWS. +The `aws_dx_gateway_association_proposal` resource will then represent a pseudo-proposal for the same Direct Connect Gateway and associated gateway. If no previous proposal is available, use a tool like [`uuidgen`](http://manpages.ubuntu.com/manpages/bionic/man1/uuidgen.1.html) to generate a new random pseudo-proposal ID. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_hosted_connection.html.markdown b/website/docs/cdktf/python/r/dx_hosted_connection.html.markdown new file mode 100644 index 00000000000..3817c052057 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_hosted_connection.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_connection" +description: |- + Provides a hosted connection on the specified interconnect or a link aggregation group (LAG) of interconnects. Intended for use by AWS Direct Connect Partners only. +--- + + + +# Resource: aws_dx_hosted_connection + +Provides a hosted connection on the specified interconnect or a link aggregation group (LAG) of interconnects. Intended for use by AWS Direct Connect Partners only. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_hosted_connection import DxHostedConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DxHostedConnection(self, "hosted", + bandwidth="100Mbps", + connection_id="dxcon-ffabc123", + name="tf-dx-hosted-connection", + owner_account_id="123456789012", + vlan=1 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the connection. +* `bandwidth` - (Required) The bandwidth of the connection. Valid values for dedicated connections: 1Gbps, 10Gbps. Valid values for hosted connections: 50Mbps, 100Mbps, 200Mbps, 300Mbps, 400Mbps, 500Mbps, 1Gbps, 2Gbps, 5Gbps and 10Gbps. Case sensitive. +* `connection_id` - (Required) The ID of the interconnect or LAG. +* `owner_account_id` - (Required) The ID of the AWS account of the customer for the connection. +* `vlan` - (Required) The dedicated VLAN provisioned to the hosted connection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the connection. +* `jumbo_frame_capable` - Boolean value representing if jumbo frames have been enabled for this connection. +* `has_logical_redundancy` - Indicates whether the connection supports a secondary BGP peer in the same address family (IPv4/IPv6). +* `aws_device` - The Direct Connect endpoint on which the physical connection terminates. +* `state` - The state of the connection. Possible values include: ordering, requested, pending, available, down, deleting, deleted, rejected, unknown. See [AllocateHostedConnection](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_AllocateHostedConnection.html) for a description of each connection state. +* `lag_id` - The ID of the LAG. +* `loa_issue_time` - The time of the most recent call to [DescribeLoa](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_DescribeLoa.html) for this connection. +* `location` - The location of the connection. +* `partner_name` - The name of the AWS Direct Connect service provider associated with the connection. +* `provider_name` - The name of the service provider associated with the connection. +* `region` - The AWS Region where the connection is located. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_hosted_private_virtual_interface.html.markdown b/website/docs/cdktf/python/r/dx_hosted_private_virtual_interface.html.markdown new file mode 100644 index 00000000000..1fa77bdf93f --- /dev/null +++ b/website/docs/cdktf/python/r/dx_hosted_private_virtual_interface.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_private_virtual_interface" +description: |- + Provides a Direct Connect hosted private virtual interface resource. +--- + + + +# Resource: aws_dx_hosted_private_virtual_interface + +Provides a Direct Connect hosted private virtual interface resource. This resource represents the allocator's side of the hosted virtual interface. +A hosted virtual interface is a virtual interface that is owned by another AWS account. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_hosted_private_virtual_interface import DxHostedPrivateVirtualInterface +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, ownerAccountId): + super().__init__(scope, name) + DxHostedPrivateVirtualInterface(self, "foo", + address_family="ipv4", + bgp_asn=65352, + connection_id="dxcon-zzzzzzzz", + name="vif-foo", + vlan=4094, + owner_account_id=owner_account_id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `address_family` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgp_asn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connection_id` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `owner_account_id` - (Required) The AWS account that will own the new virtual interface. +* `vlan` - (Required) The VLAN ID. +* `amazon_address` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `mtu` - (Optional) The maximum transmission unit (MTU) is the size, in bytes, of the largest permissible packet that can be passed over the connection. The MTU of a virtual private interface can be either `1500` or `9001` (jumbo frames). Default is `1500`. +* `bgp_auth_key` - (Optional) The authentication key for BGP configuration. +* `customer_address` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `jumbo_frame_capable` - Indicates whether jumbo frames (9001 MTU) are supported. +* `aws_device` - The Direct Connect endpoint on which the virtual interface terminates. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted private virtual interfaces using the VIF `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect hosted private virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_private_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_hosted_private_virtual_interface_accepter.html.markdown b/website/docs/cdktf/python/r/dx_hosted_private_virtual_interface_accepter.html.markdown new file mode 100644 index 00000000000..1c4cd7ecbd1 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_hosted_private_virtual_interface_accepter.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_private_virtual_interface_accepter" +description: |- + Provides a resource to manage the accepter's side of a Direct Connect hosted private virtual interface. +--- + + + +# Resource: aws_dx_hosted_private_virtual_interface_accepter + +Provides a resource to manage the accepter's side of a Direct Connect hosted private virtual interface. +This resource accepts ownership of a private virtual interface created by another AWS account. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.dx_hosted_private_virtual_interface import DxHostedPrivateVirtualInterface +from imports.aws.dx_hosted_private_virtual_interface_accepter import DxHostedPrivateVirtualInterfaceAccepter +from imports.aws.provider import AwsProvider +from imports.aws.vpn_gateway import VpnGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws") + accepter = AwsProvider(self, "aws_1", + alias="accepter" + ) + vpn_gw = VpnGateway(self, "vpn_gw", + provider=accepter + ) + data_aws_caller_identity_accepter = DataAwsCallerIdentity(self, "accepter", + provider=accepter + ) + creator = DxHostedPrivateVirtualInterface(self, "creator", + address_family="ipv4", + bgp_asn=65352, + connection_id="dxcon-zzzzzzzz", + depends_on=[vpn_gw], + name="vif-foo", + owner_account_id=Token.as_string(data_aws_caller_identity_accepter.account_id), + vlan=4094 + ) + aws_dx_hosted_private_virtual_interface_accepter_accepter = + DxHostedPrivateVirtualInterfaceAccepter(self, "accepter_5", + provider=accepter, + tags={ + "Side": "Accepter" + }, + virtual_interface_id=creator.id, + vpn_gateway_id=vpn_gw.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dx_hosted_private_virtual_interface_accepter_accepter.override_logical_id("accepter") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `virtual_interface_id` - (Required) The ID of the Direct Connect virtual interface to accept. +* `dx_gateway_id` - (Optional) The ID of the Direct Connect gateway to which to connect the virtual interface. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpn_gateway_id` - (Optional) The ID of the [virtual private gateway](vpn_gateway.html) to which to connect the virtual interface. + +### Removing `aws_dx_hosted_private_virtual_interface_accepter` from your configuration + +AWS allows a Direct Connect hosted private virtual interface to be deleted from either the allocator's or accepter's side. +However, Terraform only allows the Direct Connect hosted private virtual interface to be deleted from the allocator's side +by removing the corresponding `aws_dx_hosted_private_virtual_interface` resource from your configuration. +Removing a `aws_dx_hosted_private_virtual_interface_accepter` resource from your configuration will remove it +from your statefile and management, **but will not delete the Direct Connect virtual interface.** + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted private virtual interfaces using the VIF `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect hosted private virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_private_virtual_interface_accepter.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_hosted_public_virtual_interface.html.markdown b/website/docs/cdktf/python/r/dx_hosted_public_virtual_interface.html.markdown new file mode 100644 index 00000000000..98411735de9 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_hosted_public_virtual_interface.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_public_virtual_interface" +description: |- + Provides a Direct Connect hosted public virtual interface resource. +--- + + + +# Resource: aws_dx_hosted_public_virtual_interface + +Provides a Direct Connect hosted public virtual interface resource. This resource represents the allocator's side of the hosted virtual interface. +A hosted virtual interface is a virtual interface that is owned by another AWS account. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_hosted_public_virtual_interface import DxHostedPublicVirtualInterface +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, ownerAccountId): + super().__init__(scope, name) + DxHostedPublicVirtualInterface(self, "foo", + address_family="ipv4", + amazon_address="175.45.176.2/30", + bgp_asn=65352, + connection_id="dxcon-zzzzzzzz", + customer_address="175.45.176.1/30", + name="vif-foo", + route_filter_prefixes=["210.52.109.0/24", "175.45.176.0/22"], + vlan=4094, + owner_account_id=owner_account_id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `address_family` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgp_asn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connection_id` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `owner_account_id` - (Required) The AWS account that will own the new virtual interface. +* `route_filter_prefixes` - (Required) A list of routes to be advertised to the AWS network in this region. +* `vlan` - (Required) The VLAN ID. +* `amazon_address` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `bgp_auth_key` - (Optional) The authentication key for BGP configuration. +* `customer_address` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `aws_device` - The Direct Connect endpoint on which the virtual interface terminates. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted public virtual interfaces using the VIF `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect hosted public virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_public_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_hosted_public_virtual_interface_accepter.html.markdown b/website/docs/cdktf/python/r/dx_hosted_public_virtual_interface_accepter.html.markdown new file mode 100644 index 00000000000..e3646054106 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_hosted_public_virtual_interface_accepter.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_public_virtual_interface_accepter" +description: |- + Provides a resource to manage the accepter's side of a Direct Connect hosted public virtual interface. +--- + + + +# Resource: aws_dx_hosted_public_virtual_interface_accepter + +Provides a resource to manage the accepter's side of a Direct Connect hosted public virtual interface. +This resource accepts ownership of a public virtual interface created by another AWS account. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.dx_hosted_public_virtual_interface import DxHostedPublicVirtualInterface +from imports.aws.dx_hosted_public_virtual_interface_accepter import DxHostedPublicVirtualInterfaceAccepter +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws") + accepter = AwsProvider(self, "aws_1", + alias="accepter" + ) + data_aws_caller_identity_accepter = DataAwsCallerIdentity(self, "accepter", + provider=accepter + ) + creator = DxHostedPublicVirtualInterface(self, "creator", + address_family="ipv4", + amazon_address="175.45.176.2/30", + bgp_asn=65352, + connection_id="dxcon-zzzzzzzz", + customer_address="175.45.176.1/30", + name="vif-foo", + owner_account_id=Token.as_string(data_aws_caller_identity_accepter.account_id), + route_filter_prefixes=["210.52.109.0/24", "175.45.176.0/22"], + vlan=4094 + ) + aws_dx_hosted_public_virtual_interface_accepter_accepter = + DxHostedPublicVirtualInterfaceAccepter(self, "accepter_4", + provider=accepter, + tags={ + "Side": "Accepter" + }, + virtual_interface_id=creator.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dx_hosted_public_virtual_interface_accepter_accepter.override_logical_id("accepter") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `virtual_interface_id` - (Required) The ID of the Direct Connect virtual interface to accept. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Removing `aws_dx_hosted_public_virtual_interface_accepter` from your configuration + +AWS allows a Direct Connect hosted public virtual interface to be deleted from either the allocator's or accepter's side. +However, Terraform only allows the Direct Connect hosted public virtual interface to be deleted from the allocator's side +by removing the corresponding `aws_dx_hosted_public_virtual_interface` resource from your configuration. +Removing a `aws_dx_hosted_public_virtual_interface_accepter` resource from your configuration will remove it +from your statefile and management, **but will not delete the Direct Connect virtual interface.** + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted public virtual interfaces using the VIF `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect hosted public virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_public_virtual_interface_accepter.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_hosted_transit_virtual_interface.html.markdown b/website/docs/cdktf/python/r/dx_hosted_transit_virtual_interface.html.markdown new file mode 100644 index 00000000000..81158e06d97 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_hosted_transit_virtual_interface.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_transit_virtual_interface" +description: |- + Provides a Direct Connect hosted transit virtual interface resource. +--- + + + +# Resource: aws_dx_hosted_transit_virtual_interface + +Provides a Direct Connect hosted transit virtual interface resource. +This resource represents the allocator's side of the hosted virtual interface. +A hosted virtual interface is a virtual interface that is owned by another AWS account. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_hosted_transit_virtual_interface import DxHostedTransitVirtualInterface +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, ownerAccountId): + super().__init__(scope, name) + DxHostedTransitVirtualInterface(self, "example", + address_family="ipv4", + bgp_asn=65352, + connection_id=Token.as_string(aws_dx_connection_example.id), + name="tf-transit-vif-example", + vlan=4094, + owner_account_id=owner_account_id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `address_family` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgp_asn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connection_id` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `owner_account_id` - (Required) The AWS account that will own the new virtual interface. +* `vlan` - (Required) The VLAN ID. +* `amazon_address` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `bgp_auth_key` - (Optional) The authentication key for BGP configuration. +* `customer_address` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. +* `mtu` - (Optional) The maximum transmission unit (MTU) is the size, in bytes, of the largest permissible packet that can be passed over the connection. The MTU of a virtual transit interface can be either `1500` or `8500` (jumbo frames). Default is `1500`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `aws_device` - The Direct Connect endpoint on which the virtual interface terminates. +* `jumbo_frame_capable` - Indicates whether jumbo frames (8500 MTU) are supported. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted transit virtual interfaces using the VIF `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect hosted transit virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_transit_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_hosted_transit_virtual_interface_accepter.html.markdown b/website/docs/cdktf/python/r/dx_hosted_transit_virtual_interface_accepter.html.markdown new file mode 100644 index 00000000000..2526ced2b6b --- /dev/null +++ b/website/docs/cdktf/python/r/dx_hosted_transit_virtual_interface_accepter.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_transit_virtual_interface_accepter" +description: |- + Provides a resource to manage the accepter's side of a Direct Connect hosted transit virtual interface. +--- + + + +# Resource: aws_dx_hosted_transit_virtual_interface_accepter + +Provides a resource to manage the accepter's side of a Direct Connect hosted transit virtual interface. +This resource accepts ownership of a transit virtual interface created by another AWS account. + +-> **NOTE:** AWS allows a Direct Connect hosted transit virtual interface to be deleted from either the allocator's or accepter's side. However, Terraform only allows the Direct Connect hosted transit virtual interface to be deleted from the allocator's side by removing the corresponding `aws_dx_hosted_transit_virtual_interface` resource from your configuration. Removing a `aws_dx_hosted_transit_virtual_interface_accepter` resource from your configuration will remove it from your statefile and management, **but will not delete the Direct Connect virtual interface.** + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.dx_gateway import DxGateway +from imports.aws.dx_hosted_transit_virtual_interface import DxHostedTransitVirtualInterface +from imports.aws.dx_hosted_transit_virtual_interface_accepter import DxHostedTransitVirtualInterfaceAccepter +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws") + accepter = AwsProvider(self, "aws_1", + alias="accepter" + ) + example = DxGateway(self, "example", + amazon_side_asn=Token.as_string(64512), + name="tf-dxg-example", + provider=accepter + ) + data_aws_caller_identity_accepter = DataAwsCallerIdentity(self, "accepter", + provider=accepter + ) + creator = DxHostedTransitVirtualInterface(self, "creator", + address_family="ipv4", + bgp_asn=65352, + connection_id="dxcon-zzzzzzzz", + depends_on=[example], + name="tf-transit-vif-example", + owner_account_id=Token.as_string(data_aws_caller_identity_accepter.account_id), + vlan=4094 + ) + aws_dx_hosted_transit_virtual_interface_accepter_accepter = + DxHostedTransitVirtualInterfaceAccepter(self, "accepter_5", + dx_gateway_id=example.id, + provider=accepter, + tags={ + "Side": "Accepter" + }, + virtual_interface_id=creator.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dx_hosted_transit_virtual_interface_accepter_accepter.override_logical_id("accepter") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dx_gateway_id` - (Required) The ID of the [Direct Connect gateway](dx_gateway.html) to which to connect the virtual interface. +* `virtual_interface_id` - (Required) The ID of the Direct Connect virtual interface to accept. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted transit virtual interfaces using the VIF `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect hosted transit virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_transit_virtual_interface_accepter.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_lag.html.markdown b/website/docs/cdktf/python/r/dx_lag.html.markdown new file mode 100644 index 00000000000..9397e349268 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_lag.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_lag" +description: |- + Provides a Direct Connect LAG. +--- + + + +# Resource: aws_dx_lag + +Provides a Direct Connect LAG. Connections can be added to the LAG via the [`aws_dx_connection`](/docs/providers/aws/r/dx_connection.html) and [`aws_dx_connection_association`](/docs/providers/aws/r/dx_connection_association.html) resources. + +~> *NOTE:* When creating a LAG, if no existing connection is specified, Direct Connect will create a connection and Terraform will remove this unmanaged connection during resource creation. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_lag import DxLag +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DxLag(self, "hoge", + connections_bandwidth="1Gbps", + force_destroy=True, + location="EqDC2", + name="tf-dx-lag" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the LAG. +* `connections_bandwidth` - (Required) The bandwidth of the individual physical connections bundled by the LAG. Valid values: 50Mbps, 100Mbps, 200Mbps, 300Mbps, 400Mbps, 500Mbps, 1Gbps, 2Gbps, 5Gbps, 10Gbps and 100Gbps. Case sensitive. +* `location` - (Required) The AWS Direct Connect location in which the LAG should be allocated. See [DescribeLocations](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_DescribeLocations.html) for the list of AWS Direct Connect locations. Use `locationCode`. +* `connection_id` - (Optional) The ID of an existing dedicated connection to migrate to the LAG. +* `force_destroy` - (Optional, Default:false) A boolean that indicates all connections associated with the LAG should be deleted so that the LAG can be destroyed without error. These objects are *not* recoverable. +* `provider_name` - (Optional) The name of the service provider associated with the LAG. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the LAG. +* `has_logical_redundancy` - Indicates whether the LAG supports a secondary BGP peer in the same address family (IPv4/IPv6). +* `id` - The ID of the LAG. +* `jumbo_frame_capable` -Indicates whether jumbo frames (9001 MTU) are supported. +* `owner_account_id` - The ID of the AWS account that owns the LAG. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect LAGs using the LAG `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect LAGs using the LAG `id`. For example: + +```console +% terraform import aws_dx_lag.test_lag dxlag-fgnsp5rq +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_macsec_key_association.html.markdown b/website/docs/cdktf/python/r/dx_macsec_key_association.html.markdown new file mode 100644 index 00000000000..a8fbcb08a01 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_macsec_key_association.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_macsec_key_association" +description: |- + Provides a MAC Security (MACSec) secret key resource for use with Direct Connect. +--- + + + +# Resource: aws_dx_macsec_key_association + +Provides a MAC Security (MACSec) secret key resource for use with Direct Connect. See [MACsec prerequisites](https://docs.aws.amazon.com/directconnect/latest/UserGuide/direct-connect-mac-sec-getting-started.html#mac-sec-prerequisites) for information about MAC Security (MACsec) prerequisites. + +Creating this resource will also create a resource of type [`aws_secretsmanager_secret`](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/secretsmanager_secret) which is managed by Direct Connect. While you can import this resource into your Terraform state, because this secret is managed by Direct Connect, you will not be able to make any modifications to it. See [How AWS Direct Connect uses AWS Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/integrating_how-services-use-secrets_directconnect.html) for details. + +~> **Note:** All arguments including `ckn` and `cak` will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **Note:** The `secret_arn` argument can only be used to reference a previously created MACSec key. You cannot associate a Secrets Manager secret created outside of the `aws_dx_macsec_key_association` resource. + +## Example Usage + +### Create MACSec key with CKN and CAK + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dx_connection import DataAwsDxConnection +from imports.aws.dx_macsec_key_association import DxMacsecKeyAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsDxConnection(self, "example", + name="tf-dx-connection" + ) + DxMacsecKeyAssociation(self, "test", + cak="abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789", + ckn="0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + connection_id=Token.as_string(example.id) + ) +``` + +### Create MACSec key with existing Secrets Manager secret + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_dx_connection import DataAwsDxConnection +from imports.aws.data_aws_secretsmanager_secret import DataAwsSecretsmanagerSecret +from imports.aws.dx_macsec_key_association import DxMacsecKeyAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsDxConnection(self, "example", + name="tf-dx-connection" + ) + data_aws_secretsmanager_secret_example = DataAwsSecretsmanagerSecret(self, "example_1", + name="directconnect!prod/us-east-1/directconnect/0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_secretsmanager_secret_example.override_logical_id("example") + DxMacsecKeyAssociation(self, "test", + connection_id=Token.as_string(example.id), + secret_arn=Token.as_string(data_aws_secretsmanager_secret_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cak` - (Optional) The MAC Security (MACsec) CAK to associate with the dedicated connection. The valid values are 64 hexadecimal characters (0-9, A-E). Required if using `ckn`. +* `ckn` - (Optional) The MAC Security (MACsec) CKN to associate with the dedicated connection. The valid values are 64 hexadecimal characters (0-9, A-E). Required if using `cak`. +* `connection_id` - (Required) The ID of the dedicated Direct Connect connection. The connection must be a dedicated connection in the `AVAILABLE` state. +* `secret_arn` - (Optional) The Amazon Resource Name (ARN) of the MAC Security (MACsec) secret key to associate with the dedicated connection. + +~> **Note:** `ckn` and `cak` are mutually exclusive with `secret_arn` - these arguments cannot be used together. If you use `ckn` and `cak`, you should not use `secret_arn`. If you use the `secret_arn` argument to reference an existing MAC Security (MACSec) secret key, you should not use `ckn` or `cak`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the MAC Security (MACSec) secret key resource. +* `start_on` - The date in UTC format that the MAC Security (MACsec) secret key takes effect. +* `state` - The state of the MAC Security (MACsec) secret key. The possible values are: associating, associated, disassociating, disassociated. See [MacSecKey](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_MacSecKey.html#DX-Type-MacSecKey-state) for descriptions of each state. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_private_virtual_interface.html.markdown b/website/docs/cdktf/python/r/dx_private_virtual_interface.html.markdown new file mode 100644 index 00000000000..b754a7489cb --- /dev/null +++ b/website/docs/cdktf/python/r/dx_private_virtual_interface.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_private_virtual_interface" +description: |- + Provides a Direct Connect private virtual interface resource. +--- + + + +# Resource: aws_dx_private_virtual_interface + +Provides a Direct Connect private virtual interface resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_private_virtual_interface import DxPrivateVirtualInterface +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DxPrivateVirtualInterface(self, "foo", + address_family="ipv4", + bgp_asn=65352, + connection_id="dxcon-zzzzzzzz", + name="vif-foo", + vlan=4094 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `address_family` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgp_asn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connection_id` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `vlan` - (Required) The VLAN ID. +* `amazon_address` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `bgp_auth_key` - (Optional) The authentication key for BGP configuration. +* `customer_address` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. +* `dx_gateway_id` - (Optional) The ID of the Direct Connect gateway to which to connect the virtual interface. +* `mtu` - (Optional) The maximum transmission unit (MTU) is the size, in bytes, of the largest permissible packet that can be passed over the connection. +The MTU of a virtual private interface can be either `1500` or `9001` (jumbo frames). Default is `1500`. +* `sitelink_enabled` - (Optional) Indicates whether to enable or disable SiteLink. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpn_gateway_id` - (Optional) The ID of the [virtual private gateway](vpn_gateway.html) to which to connect the virtual interface. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `aws_device` - The Direct Connect endpoint on which the virtual interface terminates. +* `jumbo_frame_capable` - Indicates whether jumbo frames (9001 MTU) are supported. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect private virtual interfaces using the VIF `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect private virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_private_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_public_virtual_interface.html.markdown b/website/docs/cdktf/python/r/dx_public_virtual_interface.html.markdown new file mode 100644 index 00000000000..0fcc9baee23 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_public_virtual_interface.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_public_virtual_interface" +description: |- + Provides a Direct Connect public virtual interface resource. +--- + + + +# Resource: aws_dx_public_virtual_interface + +Provides a Direct Connect public virtual interface resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_public_virtual_interface import DxPublicVirtualInterface +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DxPublicVirtualInterface(self, "foo", + address_family="ipv4", + amazon_address="175.45.176.2/30", + bgp_asn=65352, + connection_id="dxcon-zzzzzzzz", + customer_address="175.45.176.1/30", + name="vif-foo", + route_filter_prefixes=["210.52.109.0/24", "175.45.176.0/22"], + vlan=4094 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `address_family` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgp_asn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connection_id` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `vlan` - (Required) The VLAN ID. +* `amazon_address` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `bgp_auth_key` - (Optional) The authentication key for BGP configuration. +* `customer_address` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. +* `route_filter_prefixes` - (Required) A list of routes to be advertised to the AWS network in this region. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `aws_device` - The Direct Connect endpoint on which the virtual interface terminates. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect public virtual interfaces using the VIF `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect public virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_public_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dx_transit_virtual_interface.html.markdown b/website/docs/cdktf/python/r/dx_transit_virtual_interface.html.markdown new file mode 100644 index 00000000000..006dbb73b41 --- /dev/null +++ b/website/docs/cdktf/python/r/dx_transit_virtual_interface.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_transit_virtual_interface" +description: |- + Provides a Direct Connect transit virtual interface resource. +--- + + + +# Resource: aws_dx_transit_virtual_interface + +Provides a Direct Connect transit virtual interface resource. +A transit virtual interface is a VLAN that transports traffic from a [Direct Connect gateway](dx_gateway.html) to one or more [transit gateways](ec2_transit_gateway.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dx_gateway import DxGateway +from imports.aws.dx_transit_virtual_interface import DxTransitVirtualInterface +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DxGateway(self, "example", + amazon_side_asn=Token.as_string(64512), + name="tf-dxg-example" + ) + aws_dx_transit_virtual_interface_example = DxTransitVirtualInterface(self, "example_1", + address_family="ipv4", + bgp_asn=65352, + connection_id=Token.as_string(aws_dx_connection_example.id), + dx_gateway_id=example.id, + name="tf-transit-vif-example", + vlan=4094 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dx_transit_virtual_interface_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `address_family` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgp_asn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connection_id` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `dx_gateway_id` - (Required) The ID of the Direct Connect gateway to which to connect the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `vlan` - (Required) The VLAN ID. +* `amazon_address` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `bgp_auth_key` - (Optional) The authentication key for BGP configuration. +* `customer_address` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. +* `mtu` - (Optional) The maximum transmission unit (MTU) is the size, in bytes, of the largest permissible packet that can be passed over the connection. +The MTU of a virtual transit interface can be either `1500` or `8500` (jumbo frames). Default is `1500`. +* `sitelink_enabled` - (Optional) Indicates whether to enable or disable SiteLink. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `aws_device` - The Direct Connect endpoint on which the virtual interface terminates. +* `jumbo_frame_capable` - Indicates whether jumbo frames (8500 MTU) are supported. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect transit virtual interfaces using the VIF `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Direct Connect transit virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_transit_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dynamodb_contributor_insights.html.markdown b/website/docs/cdktf/python/r/dynamodb_contributor_insights.html.markdown new file mode 100644 index 00000000000..6660ce22085 --- /dev/null +++ b/website/docs/cdktf/python/r/dynamodb_contributor_insights.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_contributor_insights" +description: |- + Provides a DynamoDB contributor insights resource +--- + + + +# Resource: aws_dynamodb_contributor_insights + +Provides a DynamoDB contributor insights resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dynamodb_contributor_insights import DynamodbContributorInsights +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DynamodbContributorInsights(self, "test", + table_name="ExampleTableName" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `table_name` - (Required) The name of the table to enable contributor insights +* `index_name` - (Optional) The global secondary index name + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_dynamodb_contributor_insights` using the format `name:table_name/index:index_name`, followed by the account number. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_dynamodb_contributor_insights` using the format `name:table_name/index:index_name`, followed by the account number. For example: + +```console +% terraform import aws_dynamodb_contributor_insights.test name:ExampleTableName/index:ExampleIndexName/123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dynamodb_global_table.html.markdown b/website/docs/cdktf/python/r/dynamodb_global_table.html.markdown new file mode 100644 index 00000000000..040197fa95d --- /dev/null +++ b/website/docs/cdktf/python/r/dynamodb_global_table.html.markdown @@ -0,0 +1,123 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_global_table" +description: |- + Manages DynamoDB Global Tables V1 (version 2017.11.29) +--- + + + +# Resource: aws_dynamodb_global_table + +Manages [DynamoDB Global Tables V1 (version 2017.11.29)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V1.html). These are layered on top of existing DynamoDB Tables. + +~> **NOTE:** To instead manage [DynamoDB Global Tables V2 (version 2019.11.21)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V2.html), use the [`aws_dynamodb_table` resource](/docs/providers/aws/r/dynamodb_table.html) `replica` configuration block. + +~> Note: There are many restrictions before you can properly create DynamoDB Global Tables in multiple regions. See the [AWS DynamoDB Global Table Requirements](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables_reqs_bestpractices.html) for more information. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dynamodb_global_table import DynamodbGlobalTable +from imports.aws.dynamodb_table import DynamodbTable +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + us_east1 = AwsProvider(self, "aws", + alias="us-east-1", + region="us-east-1" + ) + us_west2 = AwsProvider(self, "aws_1", + alias="us-west-2", + region="us-west-2" + ) + aws_dynamodb_table_us_east1 = DynamodbTable(self, "us-east-1", + attribute=[DynamodbTableAttribute( + name="myAttribute", + type="S" + ) + ], + hash_key="myAttribute", + name="myTable", + provider=us_east1, + read_capacity=1, + stream_enabled=True, + stream_view_type="NEW_AND_OLD_IMAGES", + write_capacity=1 + ) + aws_dynamodb_table_us_west2 = DynamodbTable(self, "us-west-2", + attribute=[DynamodbTableAttribute( + name="myAttribute", + type="S" + ) + ], + hash_key="myAttribute", + name="myTable", + provider=us_west2, + read_capacity=1, + stream_enabled=True, + stream_view_type="NEW_AND_OLD_IMAGES", + write_capacity=1 + ) + DynamodbGlobalTable(self, "myTable", + depends_on=[aws_dynamodb_table_us_east1, aws_dynamodb_table_us_west2], + name="myTable", + provider=us_east1, + replica=[DynamodbGlobalTableReplica( + region_name="us-east-1" + ), DynamodbGlobalTableReplica( + region_name="us-west-2" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the global table. Must match underlying DynamoDB Table names in all regions. +* `replica` - (Required) Underlying DynamoDB Table. At least 1 replica must be defined. See below. + +### Nested Fields + +#### `replica` + +* `region_name` - (Required) AWS region name of replica DynamoDB TableE.g., `us-east-1` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the DynamoDB Global Table +* `arn` - The ARN of the DynamoDB Global Table + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DynamoDB Global Tables using the global table name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DynamoDB Global Tables using the global table name. For example: + +```console +% terraform import aws_dynamodb_global_table.MyTable MyTable +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dynamodb_kinesis_streaming_destination.html.markdown b/website/docs/cdktf/python/r/dynamodb_kinesis_streaming_destination.html.markdown new file mode 100644 index 00000000000..0aa49a36d2a --- /dev/null +++ b/website/docs/cdktf/python/r/dynamodb_kinesis_streaming_destination.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_kinesis_streaming_destination" +description: |- + Enables a Kinesis streaming destination for a DynamoDB table +--- + + + +# Resource: aws_dynamodb_kinesis_streaming_destination + +Enables a [Kinesis streaming destination](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/kds.html) for data replication of a DynamoDB table. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dynamodb_kinesis_streaming_destination import DynamodbKinesisStreamingDestination +from imports.aws.dynamodb_table import DynamodbTable +from imports.aws.kinesis_stream import KinesisStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DynamodbTable(self, "example", + attribute=[DynamodbTableAttribute( + name="id", + type="S" + ) + ], + hash_key="id", + name="orders" + ) + aws_kinesis_stream_example = KinesisStream(self, "example_1", + name="order_item_changes", + shard_count=1 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kinesis_stream_example.override_logical_id("example") + aws_dynamodb_kinesis_streaming_destination_example = + DynamodbKinesisStreamingDestination(self, "example_2", + stream_arn=Token.as_string(aws_kinesis_stream_example.arn), + table_name=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dynamodb_kinesis_streaming_destination_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stream_arn` - (Required) The ARN for a Kinesis data stream. This must exist in the same account and region as the DynamoDB table. + +* `table_name` - (Required) The name of the DynamoDB table. There + can only be one Kinesis streaming destination for a given DynamoDB table. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `table_name` and `stream_arn` separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DynamoDB Kinesis Streaming Destinations using the `table_name` and `stream_arn` separated by `,`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DynamoDB Kinesis Streaming Destinations using the `table_name` and `stream_arn` separated by `,`. For example: + +```console +% terraform import aws_dynamodb_kinesis_streaming_destination.example example,arn:aws:kinesis:us-east-1:111122223333:exampleStreamName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dynamodb_table.html.markdown b/website/docs/cdktf/python/r/dynamodb_table.html.markdown new file mode 100644 index 00000000000..9b71053f10a --- /dev/null +++ b/website/docs/cdktf/python/r/dynamodb_table.html.markdown @@ -0,0 +1,313 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_table" +description: |- + Provides a DynamoDB table resource +--- + + + +# Resource: aws_dynamodb_table + +Provides a DynamoDB table resource. + +~> **Note:** We recommend using `lifecycle` [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) for `read_capacity` and/or `write_capacity` if there's [autoscaling policy](/docs/providers/aws/r/appautoscaling_policy.html) attached to the table. + +~> **Note:** When using [aws_dynamodb_table_replica](/docs/providers/aws/r/dynamodb_table_replica.html) with this resource, use `lifecycle` [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) for `replica`, _e.g._, `lifecycle { ignore_changes = [replica] }`. + +## DynamoDB Table attributes + +Only define attributes on the table object that are going to be used as: + +* Table hash key or range key +* LSI or GSI hash key or range key + +The DynamoDB API expects attribute structure (name and type) to be passed along when creating or updating GSI/LSIs or creating the initial table. In these cases it expects the Hash / Range keys to be provided. Because these get re-used in numerous places (i.e the table's range key could be a part of one or more GSIs), they are stored on the table object to prevent duplication and increase consistency. If you add attributes here that are not used in these scenarios it can cause an infinite loop in planning. + +## Example Usage + +### Basic Example + +The following dynamodb table description models the table and GSI shown in the [AWS SDK example documentation](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GSI.html) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dynamodb_table import DynamodbTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DynamodbTable(self, "basic-dynamodb-table", + attribute=[DynamodbTableAttribute( + name="UserId", + type="S" + ), DynamodbTableAttribute( + name="GameTitle", + type="S" + ), DynamodbTableAttribute( + name="TopScore", + type="N" + ) + ], + billing_mode="PROVISIONED", + global_secondary_index=[DynamodbTableGlobalSecondaryIndex( + hash_key="GameTitle", + name="GameTitleIndex", + non_key_attributes=["UserId"], + projection_type="INCLUDE", + range_key="TopScore", + read_capacity=10, + write_capacity=10 + ) + ], + hash_key="UserId", + name="GameScores", + range_key="GameTitle", + read_capacity=20, + tags={ + "Environment": "production", + "Name": "dynamodb-table-1" + }, + ttl=DynamodbTableTtl( + attribute_name="TimeToExist", + enabled=False + ), + write_capacity=20 + ) +``` + +### Global Tables + +This resource implements support for [DynamoDB Global Tables V2 (version 2019.11.21)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V2.html) via `replica` configuration blocks. For working with [DynamoDB Global Tables V1 (version 2017.11.29)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V1.html), see the [`aws_dynamodb_global_table` resource](/docs/providers/aws/r/dynamodb_global_table.html). + +~> **Note:** [aws_dynamodb_table_replica](/docs/providers/aws/r/dynamodb_table_replica.html) is an alternate way of configuring Global Tables. Do not use `replica` configuration blocks of `aws_dynamodb_table` together with [aws_dynamodb_table_replica](/docs/providers/aws/r/dynamodb_table_replica.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dynamodb_table import DynamodbTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DynamodbTable(self, "example", + attribute=[DynamodbTableAttribute( + name="TestTableHashKey", + type="S" + ) + ], + billing_mode="PAY_PER_REQUEST", + hash_key="TestTableHashKey", + name="example", + replica=[DynamodbTableReplica( + region_name="us-east-2" + ), DynamodbTableReplica( + region_name="us-west-2" + ) + ], + stream_enabled=True, + stream_view_type="NEW_AND_OLD_IMAGES" + ) +``` + +### Replica Tagging + +You can manage global table replicas' tags in various ways. This example shows using `replica.*.propagate_tags` for the first replica and the `aws_dynamodb_tag` resource for the other. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.dynamodb_table import DynamodbTable +from imports.aws.dynamodb_tag import DynamodbTag +from imports.aws.provider import AwsProvider +from imports.awsalternate.provider import AwsalternateProvider +from imports.awsthird.provider import AwsthirdProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # The following providers are missing schema information and might need manual adjustments to synthesize correctly: awsalternate, awsthird. + # For a more precise conversion please use the --provider flag in convert. + AwsProvider(self, "aws", + region="us-west-2" + ) + AwsalternateProvider(self, "awsalternate", + region="us-east-1" + ) + AwsthirdProvider(self, "awsthird", + region="us-east-2" + ) + alternate = DataAwsRegion(self, "alternate", + provider="awsalternate" + ) + current = DataAwsRegion(self, "current") + third = DataAwsRegion(self, "third", + provider="awsthird" + ) + example = DynamodbTable(self, "example", + attribute=[DynamodbTableAttribute( + name="TestTableHashKey", + type="S" + ) + ], + billing_mode="PAY_PER_REQUEST", + hash_key="TestTableHashKey", + name="example-13281", + replica=[DynamodbTableReplica( + region_name=Token.as_string(alternate.name) + ), DynamodbTableReplica( + propagate_tags=True, + region_name=Token.as_string(third.name) + ) + ], + stream_enabled=True, + stream_view_type="NEW_AND_OLD_IMAGES", + tags={ + "Architect": "Eleanor", + "Zone": "SW" + } + ) + aws_dynamodb_tag_example = DynamodbTag(self, "example_7", + key="Architect", + resource_arn=Token.as_string( + Fn.replace(example.arn, + Token.as_string(current.name), + Token.as_string(alternate.name))), + value="Gigi" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dynamodb_tag_example.override_logical_id("example") +``` + +## Argument Reference + +Required arguments: + +* `attribute` - (Required) Set of nested attribute definitions. Only required for `hash_key` and `range_key` attributes. See below. +* `hash_key` - (Required, Forces new resource) Attribute to use as the hash (partition) key. Must also be defined as an `attribute`. See below. +* `name` - (Required) Unique within a region name of the table. + +Optional arguments: + +* `billing_mode` - (Optional) Controls how you are charged for read and write throughput and how you manage capacity. The valid values are `PROVISIONED` and `PAY_PER_REQUEST`. Defaults to `PROVISIONED`. +* `deletion_protection_enabled` - (Optional) Enables deletion protection for table. Defaults to `false`. +* `global_secondary_index` - (Optional) Describe a GSI for the table; subject to the normal limits on the number of GSIs, projected attributes, etc. See below. +* `local_secondary_index` - (Optional, Forces new resource) Describe an LSI on the table; these can only be allocated _at creation_ so you cannot change this definition after you have created the resource. See below. +* `point_in_time_recovery` - (Optional) Enable point-in-time recovery options. See below. +* `range_key` - (Optional, Forces new resource) Attribute to use as the range (sort) key. Must also be defined as an `attribute`, see below. +* `read_capacity` - (Optional) Number of read units for this table. If the `billing_mode` is `PROVISIONED`, this field is required. +* `replica` - (Optional) Configuration block(s) with [DynamoDB Global Tables V2 (version 2019.11.21)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V2.html) replication configurations. See below. +* `restore_date_time` - (Optional) Time of the point-in-time recovery point to restore. +* `restore_source_name` - (Optional) Name of the table to restore. Must match the name of an existing table. +* `restore_to_latest_time` - (Optional) If set, restores table to the most recent point-in-time recovery point. +* `server_side_encryption` - (Optional) Encryption at rest options. AWS DynamoDB tables are automatically encrypted at rest with an AWS-owned Customer Master Key if this argument isn't specified. See below. +* `stream_enabled` - (Optional) Whether Streams are enabled. +* `stream_view_type` - (Optional) When an item in the table is modified, StreamViewType determines what information is written to the table's stream. Valid values are `KEYS_ONLY`, `NEW_IMAGE`, `OLD_IMAGE`, `NEW_AND_OLD_IMAGES`. +* `table_class` - (Optional) Storage class of the table. + Valid values are `STANDARD` and `STANDARD_INFREQUENT_ACCESS`. + Default value is `STANDARD`. +* `tags` - (Optional) A map of tags to populate on the created table. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `ttl` - (Optional) Configuration block for TTL. See below. +* `write_capacity` - (Optional) Number of write units for this table. If the `billing_mode` is `PROVISIONED`, this field is required. + +### `attribute` + +* `name` - (Required) Name of the attribute +* `type` - (Required) Attribute type. Valid values are `S` (string), `N` (number), `B` (binary). + +#### `global_secondary_index` + +* `hash_key` - (Required) Name of the hash key in the index; must be defined as an attribute in the resource. +* `name` - (Required) Name of the index. +* `non_key_attributes` - (Optional) Only required with `INCLUDE` as a projection type; a list of attributes to project into the index. These do not need to be defined as attributes on the table. +* `projection_type` - (Required) One of `ALL`, `INCLUDE` or `KEYS_ONLY` where `ALL` projects every attribute into the index, `KEYS_ONLY` projects into the index only the table and index hash_key and sort_key attributes , `INCLUDE` projects into the index all of the attributes that are defined in `non_key_attributes` in addition to the attributes that that`KEYS_ONLY` project. +* `range_key` - (Optional) Name of the range key; must be defined +* `read_capacity` - (Optional) Number of read units for this index. Must be set if billing_mode is set to PROVISIONED. +* `write_capacity` - (Optional) Number of write units for this index. Must be set if billing_mode is set to PROVISIONED. + +### `local_secondary_index` + +* `name` - (Required) Name of the index +* `non_key_attributes` - (Optional) Only required with `INCLUDE` as a projection type; a list of attributes to project into the index. These do not need to be defined as attributes on the table. +* `projection_type` - (Required) One of `ALL`, `INCLUDE` or `KEYS_ONLY` where `ALL` projects every attribute into the index, `KEYS_ONLY` projects into the index only the table and index hash_key and sort_key attributes , `INCLUDE` projects into the index all of the attributes that are defined in `non_key_attributes` in addition to the attributes that that`KEYS_ONLY` project. +* `range_key` - (Required) Name of the range key. + +### `point_in_time_recovery` + +* `enabled` - (Required) Whether to enable point-in-time recovery. It can take 10 minutes to enable for new tables. If the `point_in_time_recovery` block is not provided, this defaults to `false`. + +### `replica` + +* `kms_key_arn` - (Optional, Forces new resource) ARN of the CMK that should be used for the AWS KMS encryption. This argument should only be used if the key is different from the default KMS-managed DynamoDB key, `alias/aws/dynamodb`. **Note:** This attribute will _not_ be populated with the ARN of _default_ keys. +* `point_in_time_recovery` - (Optional) Whether to enable Point In Time Recovery for the replica. Default is `false`. +* `propagate_tags` - (Optional) Whether to propagate the global table's tags to a replica. Default is `false`. Changes to tags only move in one direction: from global (source) to replica. In other words, tag drift on a replica will not trigger an update. Tag or replica changes on the global table, whether from drift or configuration changes, are propagated to replicas. Changing from `true` to `false` on a subsequent `apply` means replica tags are left as they were, unmanaged, not deleted. +* `region_name` - (Required) Region name of the replica. + +### `server_side_encryption` + +* `enabled` - (Required) Whether or not to enable encryption at rest using an AWS managed KMS customer master key (CMK). If `enabled` is `false` then server-side encryption is set to AWS-_owned_ key (shown as `DEFAULT` in the AWS console). Potentially confusingly, if `enabled` is `true` and no `kms_key_arn` is specified then server-side encryption is set to the _default_ KMS-_managed_ key (shown as `KMS` in the AWS console). The [AWS KMS documentation](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html) explains the difference between AWS-_owned_ and KMS-_managed_ keys. +* `kms_key_arn` - (Optional) ARN of the CMK that should be used for the AWS KMS encryption. This argument should only be used if the key is different from the default KMS-managed DynamoDB key, `alias/aws/dynamodb`. **Note:** This attribute will _not_ be populated with the ARN of _default_ keys. + +### `ttl` + +* `enabled` - (Required) Whether TTL is enabled. +* `attribute_name` - (Required) Name of the table attribute to store the TTL timestamp in. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the table +* `id` - Name of the table +* `replica.*.arn` - ARN of the replica +* `replica.*.stream_arn` - ARN of the replica Table Stream. Only available when `stream_enabled = true`. +* `replica.*.stream_label` - Timestamp, in ISO 8601 format, for the replica stream. Note that this timestamp is not a unique identifier for the stream on its own. However, the combination of AWS customer ID, table name and this field is guaranteed to be unique. It can be used for creating CloudWatch Alarms. Only available when `stream_enabled = true`. +* `stream_arn` - ARN of the Table Stream. Only available when `stream_enabled = true` +* `stream_label` - Timestamp, in ISO 8601 format, for this stream. Note that this timestamp is not a unique identifier for the stream on its own. However, the combination of AWS customer ID, table name and this field is guaranteed to be unique. It can be used for creating CloudWatch Alarms. Only available when `stream_enabled = true`. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +~> **Note:** There are a variety of default timeouts set internally. If you set a shorter custom timeout than one of the defaults, the custom timeout will not be respected as the longer of the custom or internal default will be used. + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `60m`) +* `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DynamoDB tables using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DynamoDB tables using the `name`. For example: + +```console +% terraform import aws_dynamodb_table.basic-dynamodb-table GameScores +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dynamodb_table_item.html.markdown b/website/docs/cdktf/python/r/dynamodb_table_item.html.markdown new file mode 100644 index 00000000000..db0b02eb640 --- /dev/null +++ b/website/docs/cdktf/python/r/dynamodb_table_item.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_table_item" +description: |- + Provides a DynamoDB table item resource +--- + + + +# Resource: aws_dynamodb_table_item + +Provides a DynamoDB table item resource + +-> **Note:** This resource is not meant to be used for managing large amounts of data in your table, it is not designed to scale. + You should perform **regular backups** of all data in the table, see [AWS docs for more](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/BackupRestore.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dynamodb_table import DynamodbTable +from imports.aws.dynamodb_table_item import DynamodbTableItem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DynamodbTable(self, "example", + attribute=[DynamodbTableAttribute( + name="exampleHashKey", + type="S" + ) + ], + hash_key="exampleHashKey", + name="example-name", + read_capacity=10, + write_capacity=10 + ) + aws_dynamodb_table_item_example = DynamodbTableItem(self, "example_1", + hash_key=example.hash_key, + item="{\n \"exampleHashKey\": {\"S\": \"something\"},\n \"one\": {\"N\": \"11111\"},\n \"two\": {\"N\": \"22222\"},\n \"three\": {\"N\": \"33333\"},\n \"four\": {\"N\": \"44444\"}\n}\n\n", + table_name=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dynamodb_table_item_example.override_logical_id("example") +``` + +## Argument Reference + +~> **Note:** Names included in `item` are represented internally with everything but letters removed. There is the possibility of collisions if two names, once filtered, are the same. For example, the names `your-name-here` and `yournamehere` will overlap and cause an error. + +This argument supports the following arguments: + +* `hash_key` - (Required) Hash key to use for lookups and identification of the item +* `item` - (Required) JSON representation of a map of attribute name/value pairs, one for each attribute. Only the primary key attributes are required; you can optionally provide other attribute name-value pairs for the item. +* `range_key` - (Optional) Range key to use for lookups and identification of the item. Required if there is range key defined in the table. +* `table_name` - (Required) Name of the table to contain the item. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +## Import + +You cannot import DynamoDB table items. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dynamodb_table_replica.html.markdown b/website/docs/cdktf/python/r/dynamodb_table_replica.html.markdown new file mode 100644 index 00000000000..5d7a8c83335 --- /dev/null +++ b/website/docs/cdktf/python/r/dynamodb_table_replica.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_table_replica" +description: |- + Provides a DynamoDB table replica resource +--- + + + +# Resource: aws_dynamodb_table_replica + +Provides a DynamoDB table replica resource for [DynamoDB Global Tables V2 (version 2019.11.21)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V2.html). + +~> **Note:** Use `lifecycle` [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) for `replica` in the associated [aws_dynamodb_table](/docs/providers/aws/r/dynamodb_table.html) configuration. + +~> **Note:** Do not use the `replica` configuration block of [aws_dynamodb_table](/docs/providers/aws/r/dynamodb_table.html) together with this resource as the two configuration options are mutually exclusive. + +## Example Usage + +### Basic Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.dynamodb_table import DynamodbTable +from imports.aws.dynamodb_table_replica import DynamodbTableReplicaA +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = AwsProvider(self, "aws", + alias="main", + region="us-west-2" + ) + alt = AwsProvider(self, "aws_1", + alias="alt", + region="us-east-2" + ) + example = DynamodbTable(self, "example", + attribute=[DynamodbTableAttribute( + name="BrodoBaggins", + type="S" + ) + ], + billing_mode="PAY_PER_REQUEST", + hash_key="BrodoBaggins", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[replica] + ), + name="TestTable", + provider=main, + stream_enabled=True, + stream_view_type="NEW_AND_OLD_IMAGES" + ) + aws_dynamodb_table_replica_example = DynamodbTableReplicaA(self, "example_3", + global_table_arn=example.arn, + provider=alt, + tags={ + "Name": "IZPAWS", + "Pozo": "Amargo" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dynamodb_table_replica_example.override_logical_id("example") +``` + +## Argument Reference + +Required arguments: + +* `global_table_arn` - (Required) ARN of the _main_ or global table which this resource will replicate. + +Optional arguments: + +* `kms_key_arn` - (Optional, Forces new resource) ARN of the CMK that should be used for the AWS KMS encryption. This argument should only be used if the key is different from the default KMS-managed DynamoDB key, `alias/aws/dynamodb`. **Note:** This attribute will _not_ be populated with the ARN of _default_ keys. +* `point_in_time_recovery` - (Optional) Whether to enable Point In Time Recovery for the replica. Default is `false`. +* `table_class_override` - (Optional, Forces new resource) Storage class of the table replica. Valid values are `STANDARD` and `STANDARD_INFREQUENT_ACCESS`. If not used, the table replica will use the same class as the global table. +* `tags` - (Optional) Map of tags to populate on the created table. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the table replica. +* `id` - Name of the table and region of the main global table joined with a semicolon (_e.g._, `TableName:us-east-1`). +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DynamoDB table replicas using the `table-name:main-region`. For example: + +~> **Note:** When importing, use the region where the initial or _main_ global table resides, _not_ the region of the replica. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DynamoDB table replicas using the `table-name:main-region`. For example: + +~> **Note:** When importing, use the region where the initial or _main_ global table resides, _not_ the region of the replica. + +```console +% terraform import aws_dynamodb_table_replica.example TestTable:us-west-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/dynamodb_tag.html.markdown b/website/docs/cdktf/python/r/dynamodb_tag.html.markdown new file mode 100644 index 00000000000..f522de4de02 --- /dev/null +++ b/website/docs/cdktf/python/r/dynamodb_tag.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_tag" +description: |- + Manages an individual DynamoDB resource tag +--- + + + +# Resource: aws_dynamodb_tag + +Manages an individual DynamoDB resource tag. This resource should only be used in cases where DynamoDB resources are created outside Terraform (e.g., Table replicas in other regions). + +~> **NOTE:** This tagging resource should not be combined with the Terraform resource for managing the parent resource. For example, using `aws_dynamodb_table` and `aws_dynamodb_tag` to manage tags of the same DynamoDB Table in the same region will cause a perpetual difference where the `aws_dynamodb_cluster` resource will try to remove the tag being added by the `aws_dynamodb_tag` resource. + +~> **NOTE:** This tagging resource does not use the [provider `ignore_tags` configuration](/docs/providers/aws/index.html#ignore_tags). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.dynamodb_table import DynamodbTable +from imports.aws.dynamodb_tag import DynamodbTag +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + region="us-west-2" + ) + replica = AwsProvider(self, "aws_1", + alias="replica", + region="us-east-1" + ) + current = DataAwsRegion(self, "current") + data_aws_region_replica = DataAwsRegion(self, "replica", + provider=replica + ) + DynamodbTable(self, "example", + replica=[DynamodbTableReplica( + region_name=Token.as_string(data_aws_region_replica.name) + ) + ], + name=name + ) + DynamodbTag(self, "test", + key="testkey", + provider=replica, + resource_arn=Token.as_string( + Fn.replace( + Token.as_string(aws_dynamodb_table_test.arn), + Token.as_string(current.name), + Token.as_string(data_aws_region_replica.name))), + value="testvalue" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_arn` - (Required) Amazon Resource Name (ARN) of the DynamoDB resource to tag. +* `key` - (Required) Tag name. +* `value` - (Required) Tag value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - DynamoDB resource identifier and key, separated by a comma (`,`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_dynamodb_tag` using the DynamoDB resource identifier and key, separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_dynamodb_tag` using the DynamoDB resource identifier and key, separated by a comma (`,`). For example: + +```console +% terraform import aws_dynamodb_tag.example arn:aws:dynamodb:us-east-1:123456789012:table/example,Name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ebs_default_kms_key.html.markdown b/website/docs/cdktf/python/r/ebs_default_kms_key.html.markdown new file mode 100644 index 00000000000..75aabc33e0b --- /dev/null +++ b/website/docs/cdktf/python/r/ebs_default_kms_key.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_default_kms_key" +description: |- + Manages the default customer master key (CMK) that your AWS account uses to encrypt EBS volumes. +--- + + + +# Resource: aws_ebs_default_kms_key + +Provides a resource to manage the default customer master key (CMK) that your AWS account uses to encrypt EBS volumes. + +Your AWS account has an AWS-managed default CMK that is used for encrypting an EBS volume when no CMK is specified in the API call that creates the volume. +By using the `aws_ebs_default_kms_key` resource, you can specify a customer-managed CMK to use in place of the AWS-managed default CMK. + +~> **NOTE:** Creating an `aws_ebs_default_kms_key` resource does not enable default EBS encryption. Use the [`aws_ebs_encryption_by_default`](ebs_encryption_by_default.html) to enable default EBS encryption. + +~> **NOTE:** Destroying this resource will reset the default CMK to the account's AWS-managed default CMK for EBS. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ebs_default_kms_key import EbsDefaultKmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EbsDefaultKmsKey(self, "example", + key_arn=Token.as_string(aws_kms_key_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `key_arn` - (Required, ForceNew) The ARN of the AWS Key Management Service (AWS KMS) customer master key (CMK) to use to encrypt the EBS volume. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EBS default KMS CMK using the KMS key ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the EBS default KMS CMK using the KMS key ARN. For example: + +```console +% terraform import aws_ebs_default_kms_key.example arn:aws:kms:us-east-1:123456789012:key/abcd-1234 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ebs_encryption_by_default.html.markdown b/website/docs/cdktf/python/r/ebs_encryption_by_default.html.markdown new file mode 100644 index 00000000000..1d8e9a3f58a --- /dev/null +++ b/website/docs/cdktf/python/r/ebs_encryption_by_default.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_encryption_by_default" +description: |- + Manages whether default EBS encryption is enabled for your AWS account in the current AWS region. +--- + + + +# Resource: aws_ebs_encryption_by_default + +Provides a resource to manage whether default EBS encryption is enabled for your AWS account in the current AWS region. To manage the default KMS key for the region, see the [`aws_ebs_default_kms_key` resource](/docs/providers/aws/r/ebs_default_kms_key.html). + +~> **NOTE:** Removing this Terraform resource disables default EBS encryption. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ebs_encryption_by_default import EbsEncryptionByDefault +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EbsEncryptionByDefault(self, "example", + enabled=True + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `enabled` - (Optional) Whether or not default EBS encryption is enabled. Valid values are `true` or `false`. Defaults to `true`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the default EBS encryption state. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the default EBS encryption state. For example: + +```console +% terraform import aws_ebs_encryption_by_default.example default +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ebs_snapshot.html.markdown b/website/docs/cdktf/python/r/ebs_snapshot.html.markdown new file mode 100644 index 00000000000..9926df53bc1 --- /dev/null +++ b/website/docs/cdktf/python/r/ebs_snapshot.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_snapshot" +description: |- + Provides an elastic block storage snapshot resource. +--- + + + +# Resource: aws_ebs_snapshot + +Creates a Snapshot of an EBS Volume. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ebs_snapshot import EbsSnapshot +from imports.aws.ebs_volume import EbsVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = EbsVolume(self, "example", + availability_zone="us-west-2a", + size=40, + tags={ + "Name": "HelloWorld" + } + ) + EbsSnapshot(self, "example_snapshot", + tags={ + "Name": "HelloWorld_snap" + }, + volume_id=example.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `volume_id` - (Required) The Volume ID of which to make a snapshot. +* `description` - (Optional) A description of what the snapshot is. +* `outpost_arn` - (Optional) The Amazon Resource Name (ARN) of the Outpost on which to create a local snapshot. +* `storage_tier` - (Optional) The name of the storage tier. Valid values are `archive` and `standard`. Default value is `standard`. +* `permanent_restore` - (Optional) Indicates whether to permanently restore an archived snapshot. +* `temporary_restore_days` - (Optional) Specifies the number of days for which to temporarily restore an archived snapshot. Required for temporary restores only. The snapshot will be automatically re-archived after this period. +* `tags` - (Optional) A map of tags to assign to the snapshot. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the EBS Snapshot. +* `id` - The snapshot ID (e.g., snap-59fcb34e). +* `owner_id` - The AWS account ID of the EBS snapshot owner. +* `owner_alias` - Value from an Amazon-maintained list (`amazon`, `aws-marketplace`, `microsoft`) of snapshot owners. +* `encrypted` - Whether the snapshot is encrypted. +* `volume_size` - The size of the drive in GiBs. +* `kms_key_id` - The ARN for the KMS encryption key. +* `data_encryption_key_id` - The data encryption key identifier for the snapshot. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EBS Snapshot using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EBS Snapshot using the `id`. For example: + +```console +% terraform import aws_ebs_snapshot.id snap-049df61146c4d7901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ebs_snapshot_copy.html.markdown b/website/docs/cdktf/python/r/ebs_snapshot_copy.html.markdown new file mode 100644 index 00000000000..8fa715eb2a3 --- /dev/null +++ b/website/docs/cdktf/python/r/ebs_snapshot_copy.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_snapshot_copy" +description: |- + Duplicates an existing Amazon snapshot +--- + + + +# Resource: aws_ebs_snapshot_copy + +Creates a Snapshot of a snapshot. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ebs_snapshot import EbsSnapshot +from imports.aws.ebs_snapshot_copy import EbsSnapshotCopy +from imports.aws.ebs_volume import EbsVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = EbsVolume(self, "example", + availability_zone="us-west-2a", + size=40, + tags={ + "Name": "HelloWorld" + } + ) + example_snapshot = EbsSnapshot(self, "example_snapshot", + tags={ + "Name": "HelloWorld_snap" + }, + volume_id=example.id + ) + EbsSnapshotCopy(self, "example_copy", + source_region="us-west-2", + source_snapshot_id=example_snapshot.id, + tags={ + "Name": "HelloWorld_copy_snap" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) A description of what the snapshot is. +* `encrypted` - Whether the snapshot is encrypted. +* `kms_key_id` - The ARN for the KMS encryption key. +* `source_snapshot_id` The ARN for the snapshot to be copied. +* `source_region` The region of the source snapshot. +* `storage_tier` - (Optional) The name of the storage tier. Valid values are `archive` and `standard`. Default value is `standard`. +* `permanent_restore` - (Optional) Indicates whether to permanently restore an archived snapshot. +* `temporary_restore_days` - (Optional) Specifies the number of days for which to temporarily restore an archived snapshot. Required for temporary restores only. The snapshot will be automatically re-archived after this period. +* `tags` - A map of tags for the snapshot. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the EBS Snapshot. +* `id` - The snapshot ID (e.g., snap-59fcb34e). +* `owner_id` - The AWS account ID of the snapshot owner. +* `owner_alias` - Value from an Amazon-maintained list (`amazon`, `aws-marketplace`, `microsoft`) of snapshot owners. +* `volume_size` - The size of the drive in GiBs. +* `data_encryption_key_id` - The data encryption key identifier for the snapshot. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ebs_snapshot_import.html.markdown b/website/docs/cdktf/python/r/ebs_snapshot_import.html.markdown new file mode 100644 index 00000000000..78b4d3bfe0b --- /dev/null +++ b/website/docs/cdktf/python/r/ebs_snapshot_import.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_snapshot_import" +description: |- + Provides an elastic block storage snapshot import resource. +--- + + + +# Resource: aws_ebs_snapshot_import + +Imports a disk image from S3 as a Snapshot. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ebs_snapshot_import import EbsSnapshotImport +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EbsSnapshotImport(self, "example", + disk_container=EbsSnapshotImportDiskContainer( + format="VHD", + user_bucket=EbsSnapshotImportDiskContainerUserBucket( + s3_bucket="disk-images", + s3_key="source.vhd" + ) + ), + role_name="disk-image-import", + tags={ + "Name": "HelloWorld" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `client_data` - (Optional) The client-specific data. Detailed below. +* `description` - (Optional) The description string for the import snapshot task. +* `disk_container` - (Required) Information about the disk container. Detailed below. +* `encrypted` - (Optional) Specifies whether the destination snapshot of the imported image should be encrypted. The default KMS key for EBS is used unless you specify a non-default KMS key using KmsKeyId. +* `kms_key_id` - (Optional) An identifier for the symmetric KMS key to use when creating the encrypted snapshot. This parameter is only required if you want to use a non-default KMS key; if this parameter is not specified, the default KMS key for EBS is used. If a KmsKeyId is specified, the Encrypted flag must also be set. +* `storage_tier` - (Optional) The name of the storage tier. Valid values are `archive` and `standard`. Default value is `standard`. +* `permanent_restore` - (Optional) Indicates whether to permanently restore an archived snapshot. +* `temporary_restore_days` - (Optional) Specifies the number of days for which to temporarily restore an archived snapshot. Required for temporary restores only. The snapshot will be automatically re-archived after this period. +* `role_name` - (Optional) The name of the IAM Role the VM Import/Export service will assume. This role needs certain permissions. See https://docs.aws.amazon.com/vm-import/latest/userguide/vmie_prereqs.html#vmimport-role. Default: `vmimport` +* `tags` - (Optional) A map of tags to assign to the snapshot. + +### client_data Configuration Block + +* `comment` - (Optional) A user-defined comment about the disk upload. +* `upload_start` - (Optional) The time that the disk upload starts. +* `upload_end` - (Optional) The time that the disk upload ends. +* `upload_size` - (Optional) The size of the uploaded disk image, in GiB. + +### disk_container Configuration Block + +* `description` - (Optional) The description of the disk image being imported. +* `format` - (Required) The format of the disk image being imported. One of `VHD` or `VMDK`. +* `url` - (Optional) The URL to the Amazon S3-based disk image being imported. It can either be a https URL (https://..) or an Amazon S3 URL (s3://..). One of `url` or `user_bucket` must be set. +* `user_bucket` - (Optional) The Amazon S3 bucket for the disk image. One of `url` or `user_bucket` must be set. Detailed below. + +### user_bucket Configuration Block + +* `s3_bucket` - The name of the Amazon S3 bucket where the disk image is located. +* `s3_key` - The file name of the disk image. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `60m`) +- `delete` - (Default `10m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the EBS Snapshot. +* `id` - The snapshot ID (e.g., snap-59fcb34e). +* `owner_id` - The AWS account ID of the EBS snapshot owner. +* `owner_alias` - Value from an Amazon-maintained list (`amazon`, `aws-marketplace`, `microsoft`) of snapshot owners. +* `volume_size` - The size of the drive in GiBs. +* `data_encryption_key_id` - The data encryption key identifier for the snapshot. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ebs_volume.html.markdown b/website/docs/cdktf/python/r/ebs_volume.html.markdown new file mode 100644 index 00000000000..340ba9f88db --- /dev/null +++ b/website/docs/cdktf/python/r/ebs_volume.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_volume" +description: |- + Provides an elastic block storage resource. +--- + + + +# Resource: aws_ebs_volume + +Manages a single EBS volume. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ebs_volume import EbsVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EbsVolume(self, "example", + availability_zone="us-west-2a", + size=40, + tags={ + "Name": "HelloWorld" + } + ) +``` + +~> **NOTE:** At least one of `size` or `snapshot_id` is required when specifying an EBS volume + +## Argument Reference + +This resource supports the following arguments: + +* `availability_zone` - (Required) The AZ where the EBS volume will exist. +* `encrypted` - (Optional) If true, the disk will be encrypted. +* `final_snapshot` - (Optional) If true, snapshot will be created before volume deletion. Any tags on the volume will be migrated to the snapshot. By default set to false +* `iops` - (Optional) The amount of IOPS to provision for the disk. Only valid for `type` of `io1`, `io2` or `gp3`. +* `multi_attach_enabled` - (Optional) Specifies whether to enable Amazon EBS Multi-Attach. Multi-Attach is supported on `io1` and `io2` volumes. +* `size` - (Optional) The size of the drive in GiBs. +* `snapshot_id` (Optional) A snapshot to base the EBS volume off of. +* `outpost_arn` - (Optional) The Amazon Resource Name (ARN) of the Outpost. +* `type` - (Optional) The type of EBS volume. Can be `standard`, `gp2`, `gp3`, `io1`, `io2`, `sc1` or `st1` (Default: `gp2`). +* `kms_key_id` - (Optional) The ARN for the KMS encryption key. When specifying `kms_key_id`, `encrypted` needs to be set to true. Note: Terraform must be running with credentials which have the `GenerateDataKeyWithoutPlaintext` permission on the specified KMS key as required by the [EBS KMS CMK volume provisioning process](https://docs.aws.amazon.com/kms/latest/developerguide/services-ebs.html#ebs-cmk) to prevent a volume from being created and almost immediately deleted. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `throughput` - (Optional) The throughput that the volume supports, in MiB/s. Only valid for `type` of `gp3`. + +~> **NOTE:** When changing the `size`, `iops` or `type` of an instance, there are [considerations](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/considerations.html) to be aware of. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The volume ID (e.g., vol-59fcb34e). +* `arn` - The volume ARN (e.g., arn:aws:ec2:us-east-1:0123456789012:volume/vol-59fcb34e). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `update` - (Default `5m`) +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EBS Volumes using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EBS Volumes using the `id`. For example: + +```console +% terraform import aws_ebs_volume.id vol-049df61146c4d7901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_availability_zone_group.html.markdown b/website/docs/cdktf/python/r/ec2_availability_zone_group.html.markdown index c41d1a12d71..97df87e3d60 100644 --- a/website/docs/cdktf/python/r/ec2_availability_zone_group.html.markdown +++ b/website/docs/cdktf/python/r/ec2_availability_zone_group.html.markdown @@ -41,18 +41,29 @@ The following arguments are required: * `group_name` - (Required) Name of the Availability Zone Group. * `opt_in_status` - (Required) Indicates whether to enable or disable Availability Zone Group. Valid values: `opted-in` or `not-opted-in`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - Name of the Availability Zone Group. ## Import -EC2 Availability Zone Groups can be imported using the group name, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EC2 Availability Zone Groups using the group name. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_availability_zone_group.example us-west-2-lax-1 + +Using `terraform import`, import EC2 Availability Zone Groups using the group name. For example: + +```console +% terraform import aws_ec2_availability_zone_group.example us-west-2-lax-1 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_capacity_reservation.html.markdown b/website/docs/cdktf/python/r/ec2_capacity_reservation.html.markdown index 23bb48777d2..8c767d30158 100644 --- a/website/docs/cdktf/python/r/ec2_capacity_reservation.html.markdown +++ b/website/docs/cdktf/python/r/ec2_capacity_reservation.html.markdown @@ -36,7 +36,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `availability_zone` - (Required) The Availability Zone in which to create the Capacity Reservation. * `ebs_optimized` - (Optional) Indicates whether the Capacity Reservation supports EBS-optimized instances. @@ -52,9 +52,9 @@ The following arguments are supported: * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `tenancy` - (Optional) Indicates the tenancy of the Capacity Reservation. Specify either `default` or `dedicated`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The Capacity Reservation ID. * `owner_id` - The ID of the AWS account that owns the Capacity Reservation. @@ -63,10 +63,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Capacity Reservations can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Capacity Reservations using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_capacity_reservation.web cr-0123456789abcdef0 + +Using `terraform import`, import Capacity Reservations using the `id`. For example: + +```console +% terraform import aws_ec2_capacity_reservation.web cr-0123456789abcdef0 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_carrier_gateway.html.markdown b/website/docs/cdktf/python/r/ec2_carrier_gateway.html.markdown index 6c724ac5e09..abc4ace6b72 100644 --- a/website/docs/cdktf/python/r/ec2_carrier_gateway.html.markdown +++ b/website/docs/cdktf/python/r/ec2_carrier_gateway.html.markdown @@ -36,14 +36,14 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `vpc_id` - (Required) The ID of the VPC to associate with the carrier gateway. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the carrier gateway. * `id` - The ID of the carrier gateway. @@ -52,11 +52,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_carrier_gateway` can be imported using the carrier gateway's ID, -e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_carrier_gateway` using the carrier gateway's ID. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_carrier_gateway.example cgw-12345 + +Using `terraform import`, import `aws_ec2_carrier_gateway` using the carrier gateway's ID. For example: + +```console +% terraform import aws_ec2_carrier_gateway.example cgw-12345 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_client_vpn_authorization_rule.html.markdown b/website/docs/cdktf/python/r/ec2_client_vpn_authorization_rule.html.markdown index 75f0922c1e6..4b272e9ac6a 100644 --- a/website/docs/cdktf/python/r/ec2_client_vpn_authorization_rule.html.markdown +++ b/website/docs/cdktf/python/r/ec2_client_vpn_authorization_rule.html.markdown @@ -36,7 +36,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `client_vpn_endpoint_id` - (Required) The ID of the Client VPN endpoint. * `target_network_cidr` - (Required) The IPv4 address range, in CIDR notation, of the network to which the authorization rule applies. @@ -44,9 +44,9 @@ The following arguments are supported: * `authorize_all_groups` - (Optional) Indicates whether the authorization rule grants access to all clients. One of `access_group_id` or `authorize_all_groups` must be set. * `description` - (Optional) A brief description of the authorization rule. -## Attributes Reference +## Attribute Reference -No additional attributes are exported. +This resource exports no additional attributes. ## Timeouts @@ -57,14 +57,42 @@ No additional attributes are exported. ## Import -AWS Client VPN authorization rules can be imported using the endpoint ID and target network CIDR. If there is a specific group name that is included as well. All values are separated by a `,`. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Client VPN authorization rules using the endpoint ID and target network CIDR. If there is a specific group name, include that also. All values are separated by a `,`. For example: +Using the endpoint ID and target network CIDR: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_client_vpn_authorization_rule.example cvpn-endpoint-0ac3a1abbccddd666,10.1.0.0/24 + +Using the endpoint ID, target network CIDR, and group name: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` +**Using `terraform import` to import** AWS Client VPN authorization rules using the endpoint ID and target network CIDR. If there is a specific group name, include that also. All values are separated by a `,`. For example: + +Using the endpoint ID and target network CIDR: + +```console +% terraform import aws_ec2_client_vpn_authorization_rule.example cvpn-endpoint-0ac3a1abbccddd666,10.1.0.0/24 ``` -$ terraform import aws_ec2_client_vpn_authorization_rule.example cvpn-endpoint-0ac3a1abbccddd666,10.1.0.0/24,team-a + +Using the endpoint ID, target network CIDR, and group name: + +```console +% terraform import aws_ec2_client_vpn_authorization_rule.example cvpn-endpoint-0ac3a1abbccddd666,10.1.0.0/24,team-a ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_client_vpn_endpoint.html.markdown b/website/docs/cdktf/python/r/ec2_client_vpn_endpoint.html.markdown index eb6c7963d40..a701b1efa73 100644 --- a/website/docs/cdktf/python/r/ec2_client_vpn_endpoint.html.markdown +++ b/website/docs/cdktf/python/r/ec2_client_vpn_endpoint.html.markdown @@ -46,7 +46,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `authentication_options` - (Required) Information about the authentication method to be used to authenticate clients. * `client_cidr_block` - (Required) The IPv4 address range, in CIDR notation, from which to assign client IP addresses. The address range cannot overlap with the local CIDR of the VPC in which the associated subnet is located, or the routes that you add manually. The address range cannot be changed after the Client VPN endpoint has been created. The CIDR block should be /22 or greater. @@ -93,9 +93,9 @@ One of the following arguments must be supplied: * `cloudwatch_log_stream` - (Optional) The name of the CloudWatch Logs log stream to which the connection data is published. * `enabled` - (Required) Indicates whether connection logging is enabled. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the Client VPN endpoint. * `dns_name` - The DNS name to be used by clients when establishing their VPN session. @@ -104,10 +104,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -AWS Client VPN endpoints can be imported using the `id` value found via `aws ec2 describe-client-vpn-endpoints`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Client VPN endpoints using the `id` value found via `aws ec2 describe-client-vpn-endpoints`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_client_vpn_endpoint.example cvpn-endpoint-0ac3a1abbccddd666 + +Using `terraform import`, import AWS Client VPN endpoints using the `id` value found via `aws ec2 describe-client-vpn-endpoints`. For example: + +```console +% terraform import aws_ec2_client_vpn_endpoint.example cvpn-endpoint-0ac3a1abbccddd666 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_client_vpn_network_association.html.markdown b/website/docs/cdktf/python/r/ec2_client_vpn_network_association.html.markdown index 03694af8181..5a34ded92e3 100644 --- a/website/docs/cdktf/python/r/ec2_client_vpn_network_association.html.markdown +++ b/website/docs/cdktf/python/r/ec2_client_vpn_network_association.html.markdown @@ -35,14 +35,14 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `client_vpn_endpoint_id` - (Required) The ID of the Client VPN endpoint. * `subnet_id` - (Required) The ID of the subnet to associate with the Client VPN endpoint. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The unique ID of the target network association. * `association_id` - The unique ID of the target network association. @@ -57,10 +57,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -AWS Client VPN network associations can be imported using the endpoint ID and the association ID. Values are separated by a `,`. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Client VPN network associations using the endpoint ID and the association ID. Values are separated by a `,`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_client_vpn_network_association.example cvpn-endpoint-0ac3a1abbccddd666,vpn-assoc-0b8db902465d069ad + +Using `terraform import`, import AWS Client VPN network associations using the endpoint ID and the association ID. Values are separated by a `,`. For example: + +```console +% terraform import aws_ec2_client_vpn_network_association.example cvpn-endpoint-0ac3a1abbccddd666,vpn-assoc-0b8db902465d069ad ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_client_vpn_route.html.markdown b/website/docs/cdktf/python/r/ec2_client_vpn_route.html.markdown index e2e7e57f090..211a9b2f79b 100644 --- a/website/docs/cdktf/python/r/ec2_client_vpn_route.html.markdown +++ b/website/docs/cdktf/python/r/ec2_client_vpn_route.html.markdown @@ -60,16 +60,16 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `client_vpn_endpoint_id` - (Required) The ID of the Client VPN endpoint. * `destination_cidr_block` - (Required) The IPv4 address range, in CIDR notation, of the route destination. * `description` - (Optional) A brief description of the route. * `target_vpc_subnet_id` - (Required) The ID of the Subnet to route the traffic through. It must already be attached to the Client VPN. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the Client VPN endpoint. * `origin` - Indicates how the Client VPN route was added. Will be `add-route` for routes created by this resource. @@ -84,10 +84,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -AWS Client VPN routes can be imported using the endpoint ID, target subnet ID, and destination CIDR block. All values are separated by a `,`. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Client VPN routes using the endpoint ID, target subnet ID, and destination CIDR block. All values are separated by a `,`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_client_vpn_route.example cvpn-endpoint-1234567890abcdef,subnet-9876543210fedcba,10.1.0.0/24 + +Using `terraform import`, import AWS Client VPN routes using the endpoint ID, target subnet ID, and destination CIDR block. All values are separated by a `,`. For example: + +```console +% terraform import aws_ec2_client_vpn_route.example cvpn-endpoint-1234567890abcdef,subnet-9876543210fedcba,10.1.0.0/24 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_fleet.html.markdown b/website/docs/cdktf/python/r/ec2_fleet.html.markdown index eeae5e5a1be..3c05a02c717 100644 --- a/website/docs/cdktf/python/r/ec2_fleet.html.markdown +++ b/website/docs/cdktf/python/r/ec2_fleet.html.markdown @@ -43,7 +43,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `context` - (Optional) Reserved. * `excess_capacity_termination_policy` - (Optional) Whether running instances should be terminated if the total target capacity of the EC2 Fleet is decreased below the current size of the EC2. Valid values: `no-termination`, `termination`. Defaults to `termination`. Supported only for fleets of type `maintain`. @@ -220,9 +220,9 @@ This configuration block supports the following: * `total_target_capacity` - (Required) The number of units to request, filled using `default_target_capacity_type`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - Fleet identifier * `arn` - The ARN of the fleet @@ -246,10 +246,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_fleet` can be imported by using the Fleet identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_fleet` using the Fleet identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_fleet.example fleet-b9b55d27-c5fc-41ac-a6f3-48fcc91f080c + +Using `terraform import`, import `aws_ec2_fleet` using the Fleet identifier. For example: + +```console +% terraform import aws_ec2_fleet.example fleet-b9b55d27-c5fc-41ac-a6f3-48fcc91f080c ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_host.html.markdown b/website/docs/cdktf/python/r/ec2_host.html.markdown index 3d544fd6c99..53e36ce9a83 100644 --- a/website/docs/cdktf/python/r/ec2_host.html.markdown +++ b/website/docs/cdktf/python/r/ec2_host.html.markdown @@ -36,8 +36,9 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: +* `asset_id` - (Optional) The ID of the Outpost hardware asset on which to allocate the Dedicated Hosts. This parameter is supported only if you specify OutpostArn. If you are allocating the Dedicated Hosts in a Region, omit this parameter. * `auto_placement` - (Optional) Indicates whether the host accepts any untargeted instance launches that match its instance type configuration, or if it only accepts Host tenancy instance launches that specify its unique host ID. Valid values: `on`, `off`. Default: `on`. * `availability_zone` - (Required) The Availability Zone in which to allocate the Dedicated Host. * `host_recovery` - (Optional) Indicates whether to enable or disable host recovery for the Dedicated Host. Valid values: `on`, `off`. Default: `off`. @@ -46,9 +47,9 @@ The following arguments are supported: * `outpost_arn` - (Optional) The Amazon Resource Name (ARN) of the AWS Outpost on which to allocate the Dedicated Host. * `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the allocated Dedicated Host. This is used to launch an instance onto a specific host. * `arn` - The ARN of the Dedicated Host. @@ -57,10 +58,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Hosts can be imported using the host `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import hosts using the host `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_host.example h-0385a99d0e4b20cbb + +Using `terraform import`, import hosts using the host `id`. For example: + +```console +% terraform import aws_ec2_host.example h-0385a99d0e4b20cbb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_instance_connect_endpoint.html.markdown b/website/docs/cdktf/python/r/ec2_instance_connect_endpoint.html.markdown new file mode 100644 index 00000000000..462b5c30b26 --- /dev/null +++ b/website/docs/cdktf/python/r/ec2_instance_connect_endpoint.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ec2_instance_connect_endpoint" +description: |- + Provides an EC2 Instance Connect Endpoint resource. +--- + + + +# Resource: aws_ec2_instance_connect_endpoint + +Manages an EC2 Instance Connect Endpoint. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ec2_instance_connect_endpoint import Ec2InstanceConnectEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Ec2InstanceConnectEndpoint(self, "example", + subnet_id=Token.as_string(aws_subnet_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `preserve_client_ip` - (Optional) Indicates whether your client's IP address is preserved as the source. Default: `true`. +* `security_group_ids` - (Optional) One or more security groups to associate with the endpoint. If you don't specify a security group, the default security group for the VPC will be associated with the endpoint. +* `subnet_id` - (Required) The ID of the subnet in which to create the EC2 Instance Connect Endpoint. +* `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the EC2 Instance Connect Endpoint. +* `availability_zone` - The Availability Zone of the EC2 Instance Connect Endpoint. +* `dns_name` - The DNS name of the EC2 Instance Connect Endpoint. +* `fips_dns_name` - The DNS name of the EC2 Instance Connect FIPS Endpoint. +* `network_interface_ids` - The IDs of the ENIs that Amazon EC2 automatically created when creating the EC2 Instance Connect Endpoint. +* `owner_id` - The ID of the AWS account that created the EC2 Instance Connect Endpoint. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - The ID of the VPC in which the EC2 Instance Connect Endpoint was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EC2 Instance Connect Endpoints using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EC2 Instance Connect Endpoints using the `id`. For example: + +```console +% terraform import aws_ec2_instance_connect_endpoint.example eice-012345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_instance_state.html.markdown b/website/docs/cdktf/python/r/ec2_instance_state.html.markdown index 89d7e2c157e..93be781cf0b 100644 --- a/website/docs/cdktf/python/r/ec2_instance_state.html.markdown +++ b/website/docs/cdktf/python/r/ec2_instance_state.html.markdown @@ -68,9 +68,9 @@ The following arguments are optional: * `force` - (Optional) Whether to request a forced stop when `state` is `stopped`. Otherwise (_i.e._, `state` is `running`), ignored. When an instance is forced to stop, it does not flush file system caches or file system metadata, and you must subsequently perform file system check and repair. Not recommended for Windows instances. Defaults to `false`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - ID of the instance (matches `instance_id`). @@ -84,10 +84,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_instance_state` can be imported by using the `instance_id` attribute, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_instance_state` using the `instance_id` attribute. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_instance_state.test i-02cae6557dfcf2f96 + +Using `terraform import`, import `aws_ec2_instance_state` using the `instance_id` attribute. For example: + +```console +% terraform import aws_ec2_instance_state.test i-02cae6557dfcf2f96 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_local_gateway_route.html.markdown b/website/docs/cdktf/python/r/ec2_local_gateway_route.html.markdown index a164c4409eb..e341a87bf46 100644 --- a/website/docs/cdktf/python/r/ec2_local_gateway_route.html.markdown +++ b/website/docs/cdktf/python/r/ec2_local_gateway_route.html.markdown @@ -41,18 +41,29 @@ The following arguments are required: * `local_gateway_route_table_id` - (Required) Identifier of EC2 Local Gateway Route Table. * `local_gateway_virtual_interface_group_id` - (Required) Identifier of EC2 Local Gateway Virtual Interface Group. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Local Gateway Route Table identifier and destination CIDR block separated by underscores (`_`) ## Import -`aws_ec2_local_gateway_route` can be imported by using the EC2 Local Gateway Route Table identifier and destination CIDR block separated by underscores (`_`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_local_gateway_route` using the EC2 Local Gateway Route Table identifier and destination CIDR block separated by underscores (`_`). For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_local_gateway_route.example lgw-rtb-12345678_172.16.0.0/16 + +Using `terraform import`, import `aws_ec2_local_gateway_route` using the EC2 Local Gateway Route Table identifier and destination CIDR block separated by underscores (`_`). For example: + +```console +% terraform import aws_ec2_local_gateway_route.example lgw-rtb-12345678_172.16.0.0/16 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_local_gateway_route_table_vpc_association.html.markdown b/website/docs/cdktf/python/r/ec2_local_gateway_route_table_vpc_association.html.markdown index adfba7f2d6f..f7fbc3fe0f9 100644 --- a/website/docs/cdktf/python/r/ec2_local_gateway_route_table_vpc_association.html.markdown +++ b/website/docs/cdktf/python/r/ec2_local_gateway_route_table_vpc_association.html.markdown @@ -57,19 +57,30 @@ The following arguments are optional: * `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - Identifier of EC2 Local Gateway Route Table VPC Association. * `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). ## Import -`aws_ec2_local_gateway_route_table_vpc_association` can be imported by using the Local Gateway Route Table VPC Association identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_local_gateway_route_table_vpc_association` using the Local Gateway Route Table VPC Association identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_local_gateway_route_table_vpc_association.example lgw-vpc-assoc-1234567890abcdef + +Using `terraform import`, import `aws_ec2_local_gateway_route_table_vpc_association` using the Local Gateway Route Table VPC Association identifier. For example: + +```console +% terraform import aws_ec2_local_gateway_route_table_vpc_association.example lgw-vpc-assoc-1234567890abcdef ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_managed_prefix_list.html.markdown b/website/docs/cdktf/python/r/ec2_managed_prefix_list.html.markdown index 34673ccec5f..b55d0960482 100644 --- a/website/docs/cdktf/python/r/ec2_managed_prefix_list.html.markdown +++ b/website/docs/cdktf/python/r/ec2_managed_prefix_list.html.markdown @@ -60,7 +60,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `address_family` - (Required, Forces new resource) Address family (`IPv4` or `IPv6`) of this prefix list. * `entry` - (Optional) Configuration block for prefix list entry. Detailed below. Different entries may have overlapping CIDR blocks, but a particular CIDR should not be duplicated. @@ -73,9 +73,9 @@ The following arguments are supported: * `cidr` - (Required) CIDR block of this entry. * `description` - (Optional) Description of this entry. Due to API limitations, updating only the description of an existing entry requires temporarily removing and re-adding the entry. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the prefix list. * `id` - ID of the prefix list. @@ -85,10 +85,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Prefix Lists can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Prefix Lists using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_managed_prefix_list.default pl-0570a1d2d725c16be + +Using `terraform import`, import Prefix Lists using the `id`. For example: + +```console +% terraform import aws_ec2_managed_prefix_list.default pl-0570a1d2d725c16be ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_managed_prefix_list_entry.html.markdown b/website/docs/cdktf/python/r/ec2_managed_prefix_list_entry.html.markdown index 19da23afbac..44325d45ba7 100644 --- a/website/docs/cdktf/python/r/ec2_managed_prefix_list_entry.html.markdown +++ b/website/docs/cdktf/python/r/ec2_managed_prefix_list_entry.html.markdown @@ -3,30 +3,22 @@ subcategory: "VPC (Virtual Private Cloud)" layout: "aws" page_title: "AWS: aws_ec2_managed_prefix_list_entry" description: |- - Provides a managed prefix list entry resource. + Use the `aws_ec2_managed_prefix_list_entry` resource to manage a managed prefix list entry. --- # Resource: aws_ec2_managed_prefix_list_entry -Provides a managed prefix list entry resource. +Use the `aws_prefix_list_entry` resource to manage a managed prefix list entry. -~> **NOTE on Managed Prefix Lists and Managed Prefix List Entries:** Terraform -currently provides both a standalone Managed Prefix List Entry resource (a single entry), -and a [Managed Prefix List resource](ec2_managed_prefix_list.html) with entries defined -in-line. At this time you cannot use a Managed Prefix List with in-line rules in -conjunction with any Managed Prefix List Entry resources. Doing so will cause a conflict -of entries and will overwrite entries. +~> **NOTE:** Terraform currently provides two resources for managing Managed Prefix Lists and Managed Prefix List Entries. The standalone resource, [Managed Prefix List Entry](ec2_managed_prefix_list_entry.html), is used to manage a single entry. The [Managed Prefix List resource](ec2_managed_prefix_list.html) is used to manage multiple entries defined in-line. It is important to note that you cannot use a Managed Prefix List with in-line rules in conjunction with any Managed Prefix List Entry resources. This will result in a conflict of entries and will cause the entries to be overwritten. -~> **NOTE on Managed Prefix Lists with many entries:** To improved execution times on larger -updates, if you plan to create a prefix list with more than 100 entries, it is **recommended** -that you use the inline `entry` block as part of the [Managed Prefix List resource](ec2_managed_prefix_list.html) -resource instead. +~> **NOTE:** To improve execution times on larger updates, it is recommended to use the inline `entry` block as part of the Managed Prefix List resource when creating a prefix list with more than 100 entries. You can find more information about the resource [here](ec2_managed_prefix_list.html). ## Example Usage -Basic usage +Basic usage. ```python # Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug @@ -58,24 +50,35 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidr` - (Required) CIDR block of this entry. -* `description` - (Optional) Description of this entry. Due to API limitations, updating only the description of an entry requires recreating the entry. +* `description` - (Optional) Description of this entry. Please note that due to API limitations, updating only the description of an entry will require recreating the entry. * `prefix_list_id` - (Required) CIDR block of this entry. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - ID of the managed prefix list entry. ## Import -Prefix List Entries can be imported using the `prefix_list_id` and `cidr` separated by a `,`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import prefix list entries using `prefix_list_id` and `cidr` separated by a comma (`,`). For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_managed_prefix_list_entry.default pl-0570a1d2d725c16be,10.0.3.0/24 + +Using `terraform import`, import prefix list entries using `prefix_list_id` and `cidr` separated by a comma (`,`). For example: + +```console +% terraform import aws_ec2_managed_prefix_list_entry.default pl-0570a1d2d725c16be,10.0.3.0/24 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_network_insights_analysis.html.markdown b/website/docs/cdktf/python/r/ec2_network_insights_analysis.html.markdown index bffa594940f..c126efd4af7 100644 --- a/website/docs/cdktf/python/r/ec2_network_insights_analysis.html.markdown +++ b/website/docs/cdktf/python/r/ec2_network_insights_analysis.html.markdown @@ -49,9 +49,9 @@ The following arguments are optional: * `wait_for_completion` - (Optional) If enabled, the resource will wait for the Network Insights Analysis status to change to `succeeded` or `failed`. Setting this to `false` will skip the process. Default: `true`. * `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `alternate_path_hints` - Potential intermediate components of a feasible path. Described below. * `arn` - ARN of the Network Insights Analysis. @@ -73,10 +73,21 @@ The `alternate_path_hints` object supports the following: ## Import -Network Insights Analyses can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Insights Analyses using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_network_insights_analysis.test nia-0462085c957f11a55 + +Using `terraform import`, import Network Insights Analyses using the `id`. For example: + +```console +% terraform import aws_ec2_network_insights_analysis.test nia-0462085c957f11a55 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_network_insights_path.html.markdown b/website/docs/cdktf/python/r/ec2_network_insights_path.html.markdown index dff190eb7eb..3239479130b 100644 --- a/website/docs/cdktf/python/r/ec2_network_insights_path.html.markdown +++ b/website/docs/cdktf/python/r/ec2_network_insights_path.html.markdown @@ -48,9 +48,9 @@ The following arguments are optional: * `destination_port` - (Optional) Destination port to analyze access to. * `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the Network Insights Path. * `id` - ID of the Network Insights Path. @@ -58,10 +58,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Network Insights Paths can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Insights Paths using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_network_insights_path.test nip-00edfba169923aefd + +Using `terraform import`, import Network Insights Paths using the `id`. For example: + +```console +% terraform import aws_ec2_network_insights_path.test nip-00edfba169923aefd ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_serial_console_access.html.markdown b/website/docs/cdktf/python/r/ec2_serial_console_access.html.markdown index 1f481ae4a19..fc1fcb344a7 100644 --- a/website/docs/cdktf/python/r/ec2_serial_console_access.html.markdown +++ b/website/docs/cdktf/python/r/ec2_serial_console_access.html.markdown @@ -35,20 +35,31 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `enabled` - (Optional) Whether or not serial console access is enabled. Valid values are `true` or `false`. Defaults to `true`. -## Attributes Reference +## Attribute Reference -No additional attributes are exported. +This resource exports no additional attributes. ## Import -Serial console access state can be imported, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import serial console access state. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_serial_console_access.example default + +Using `terraform import`, import serial console access state. For example: + +```console +% terraform import aws_ec2_serial_console_access.example default ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_subnet_cidr_reservation.html.markdown b/website/docs/cdktf/python/r/ec2_subnet_cidr_reservation.html.markdown index 3e079c505d9..ffe61484886 100644 --- a/website/docs/cdktf/python/r/ec2_subnet_cidr_reservation.html.markdown +++ b/website/docs/cdktf/python/r/ec2_subnet_cidr_reservation.html.markdown @@ -35,26 +35,37 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidr_block` - (Required) The CIDR block for the reservation. * `reservation_type` - (Required) The type of reservation to create. Valid values: `explicit`, `prefix` * `subnet_id` - (Required) The ID of the subnet to create the reservation for. * `description` - (Optional) A brief description of the reservation. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - ID of the CIDR reservation. * `owner_id` - ID of the AWS account that owns this CIDR reservation. ## Import -Existing CIDR reservations can be imported using `SUBNET_ID:RESERVATION_ID`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Existing CIDR reservations using `SUBNET_ID:RESERVATION_ID`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_subnet_cidr_reservation.example subnet-01llsxvsxabqiymcz:scr-4mnvz6wb7otksjcs9 + +Using `terraform import`, import Existing CIDR reservations using `SUBNET_ID:RESERVATION_ID`. For example: + +```console +% terraform import aws_ec2_subnet_cidr_reservation.example subnet-01llsxvsxabqiymcz:scr-4mnvz6wb7otksjcs9 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_tag.html.markdown b/website/docs/cdktf/python/r/ec2_tag.html.markdown index 83c43234b22..54fafc5e87b 100644 --- a/website/docs/cdktf/python/r/ec2_tag.html.markdown +++ b/website/docs/cdktf/python/r/ec2_tag.html.markdown @@ -59,24 +59,35 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `resource_id` - (Required) The ID of the EC2 resource to manage the tag for. * `key` - (Required) The tag name. * `value` - (Required) The value of the tag. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 resource identifier and key, separated by a comma (`,`) ## Import -`aws_ec2_tag` can be imported by using the EC2 resource identifier and key, separated by a comma (`,`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_tag` using the EC2 resource identifier and key, separated by a comma (`,`). For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_tag.example tgw-attach-1234567890abcdef,Name + +Using `terraform import`, import `aws_ec2_tag` using the EC2 resource identifier and key, separated by a comma (`,`). For example: + +```console +% terraform import aws_ec2_tag.example tgw-attach-1234567890abcdef,Name ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_traffic_mirror_filter.html.markdown b/website/docs/cdktf/python/r/ec2_traffic_mirror_filter.html.markdown index 598a8626cac..8b5344e58ab 100644 --- a/website/docs/cdktf/python/r/ec2_traffic_mirror_filter.html.markdown +++ b/website/docs/cdktf/python/r/ec2_traffic_mirror_filter.html.markdown @@ -37,15 +37,15 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional, Forces new resource) A description of the filter. * `network_services` - (Optional) List of amazon network services that should be mirrored. Valid values: `amazon-dns`. * `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the traffic mirror filter. * `id` - The name of the filter. @@ -53,10 +53,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Traffic mirror filter can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import traffic mirror filter using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_traffic_mirror_filter.foo tmf-0fbb93ddf38198f64 + +Using `terraform import`, import traffic mirror filter using the `id`. For example: + +```console +% terraform import aws_ec2_traffic_mirror_filter.foo tmf-0fbb93ddf38198f64 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_traffic_mirror_filter_rule.html.markdown b/website/docs/cdktf/python/r/ec2_traffic_mirror_filter_rule.html.markdown index 9fe692c36a6..fe5433fb19c 100644 --- a/website/docs/cdktf/python/r/ec2_traffic_mirror_filter_rule.html.markdown +++ b/website/docs/cdktf/python/r/ec2_traffic_mirror_filter_rule.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional) Description of the traffic mirror filter rule. * `traffic_mirror_filter_id` - (Required) ID of the traffic mirror filter to which this rule should be added @@ -83,19 +83,30 @@ Traffic mirror port range support following attributes: * `from_port` - (Optional) Starting port of the range * `to_port` - (Optional) Ending port of the range -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the traffic mirror filter rule. * `id` - Name of the traffic mirror filter rule. ## Import -Traffic mirror rules can be imported using the `traffic_mirror_filter_id` and `id` separated by `:` e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import traffic mirror rules using the `traffic_mirror_filter_id` and `id` separated by `:`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_traffic_mirror_filter_rule.rule tmf-0fbb93ddf38198f64:tmfr-05a458f06445d0aee + +Using `terraform import`, import traffic mirror rules using the `traffic_mirror_filter_id` and `id` separated by `:`. For example: + +```console +% terraform import aws_ec2_traffic_mirror_filter_rule.rule tmf-0fbb93ddf38198f64:tmfr-05a458f06445d0aee ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_traffic_mirror_session.html.markdown b/website/docs/cdktf/python/r/ec2_traffic_mirror_session.html.markdown index 533f2d9435f..2ffedc6860e 100644 --- a/website/docs/cdktf/python/r/ec2_traffic_mirror_session.html.markdown +++ b/website/docs/cdktf/python/r/ec2_traffic_mirror_session.html.markdown @@ -49,7 +49,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional) A description of the traffic mirror session. * `network_interface_id` - (Required, Forces new) ID of the source network interface. Not all network interfaces are eligible as mirror sources. On EC2 instances only nitro based instances support mirroring. @@ -60,9 +60,9 @@ The following arguments are supported: * `virtual_network_id` - (Optional) - The VXLAN ID for the Traffic Mirror session. For more information about the VXLAN protocol, see RFC 7348. If you do not specify a VirtualNetworkId, an account-wide unique id is chosen at random. * `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the traffic mirror session. * `id` - The name of the session. @@ -71,10 +71,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Traffic mirror sessions can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import traffic mirror sessions using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_traffic_mirror_session.session tms-0d8aa3ca35897b82e + +Using `terraform import`, import traffic mirror sessions using the `id`. For example: + +```console +% terraform import aws_ec2_traffic_mirror_session.session tms-0d8aa3ca35897b82e ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_traffic_mirror_target.html.markdown b/website/docs/cdktf/python/r/ec2_traffic_mirror_target.html.markdown index 45d7cf5544d..6657c1aa290 100644 --- a/website/docs/cdktf/python/r/ec2_traffic_mirror_target.html.markdown +++ b/website/docs/cdktf/python/r/ec2_traffic_mirror_target.html.markdown @@ -45,7 +45,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional, Forces new) A description of the traffic mirror session. * `network_interface_id` - (Optional, Forces new) The network interface ID that is associated with the target. @@ -55,9 +55,9 @@ The following arguments are supported: **NOTE:** Either `network_interface_id` or `network_load_balancer_arn` should be specified and both should not be specified together -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the Traffic Mirror target. * `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). @@ -66,10 +66,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Traffic mirror targets can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import traffic mirror targets using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_traffic_mirror_target.target tmt-0c13a005422b86606 + +Using `terraform import`, import traffic mirror targets using the `id`. For example: + +```console +% terraform import aws_ec2_traffic_mirror_target.target tmt-0c13a005422b86606 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway.html.markdown index 318b71200fd..413f30a3e35 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway.html.markdown @@ -33,7 +33,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `amazon_side_asn` - (Optional) Private Autonomous System Number (ASN) for the Amazon side of a BGP session. The range is `64512` to `65534` for 16-bit ASNs and `4200000000` to `4294967294` for 32-bit ASNs. Default value: `64512`. @@ -49,9 +49,9 @@ The following arguments are supported: * `transit_gateway_cidr_blocks` - (Optional) One or more IPv4 or IPv6 CIDR blocks for the transit gateway. Must be a size /24 CIDR block or larger for IPv4, or a size /64 CIDR block or larger for IPv6. * `vpn_ecmp_support` - (Optional) Whether VPN Equal Cost Multipath Protocol support is enabled. Valid values: `disable`, `enable`. Default value: `enable`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - EC2 Transit Gateway Amazon Resource Name (ARN) * `association_default_route_table_id` - Identifier of the default association route table @@ -70,10 +70,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway` can be imported by using the EC2 Transit Gateway identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway` using the EC2 Transit Gateway identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway.example tgw-12345678 + +Using `terraform import`, import `aws_ec2_transit_gateway` using the EC2 Transit Gateway identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway.example tgw-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_connect.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_connect.html.markdown index 21a7481e2ed..fb4375defc3 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_connect.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_connect.html.markdown @@ -40,18 +40,18 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: -* `protocol` - (Optional) The tunnel protocol. Valida values: `gre`. Default is `gre`. +* `protocol` - (Optional) The tunnel protocol. Valid values: `gre`. Default is `gre`. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Connect. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `transit_gateway_default_route_table_association` - (Optional) Boolean whether the Connect should be associated with the EC2 Transit Gateway association default route table. This cannot be configured or perform drift detection with Resource Access Manager shared EC2 Transit Gateways. Default value: `true`. * `transit_gateway_default_route_table_propagation` - (Optional) Boolean whether the Connect should propagate routes with the EC2 Transit Gateway propagation default route table. This cannot be configured or perform drift detection with Resource Access Manager shared EC2 Transit Gateways. Default value: `true`. * `transit_gateway_id` - (Required) Identifier of EC2 Transit Gateway. * `transport_attachment_id` - (Required) The underlaying VPC attachment -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). @@ -66,10 +66,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway_connect` can be imported by using the EC2 Transit Gateway Connect identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_connect` using the EC2 Transit Gateway Connect identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway_connect.example tgw-attach-12345678 + +Using `terraform import`, import `aws_ec2_transit_gateway_connect` using the EC2 Transit Gateway Connect identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_connect.example tgw-attach-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_connect_peer.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_connect_peer.html.markdown index 2076d81bee9..2f35405badc 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_connect_peer.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_connect_peer.html.markdown @@ -43,7 +43,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `bgp_asn` - (Optional) The BGP ASN number assigned customer device. If not provided, it will use the same BGP ASN as is associated with Transit Gateway. * `inside_cidr_blocks` - (Required) The CIDR block that will be used for addressing within the tunnel. It must contain exactly one IPv4 CIDR block and up to one IPv6 CIDR block. The IPv4 CIDR block must be /29 size and must be within 169.254.0.0/16 range, with exception of: 169.254.0.0/29, 169.254.1.0/29, 169.254.2.0/29, 169.254.3.0/29, 169.254.4.0/29, 169.254.5.0/29, 169.254.169.248/29. The IPv6 CIDR block must be /125 size and must be within fd00::/8. The first IP from each CIDR block is assigned for customer gateway, the second and third is for Transit Gateway (An example: from range 169.254.100.0/29, .1 is assigned to customer gateway and .2 and .3 are assigned to Transit Gateway) @@ -52,9 +52,9 @@ The following arguments are supported: * `transit_gateway_address` - (Optional) The IP address assigned to Transit Gateway, which will be used as tunnel endpoint. This address must be from associated Transit Gateway CIDR block. The address must be from the same address family as `peer_address`. If not set explicitly, it will be selected from associated Transit Gateway CIDR blocks * `transit_gateway_attachment_id` - (Required) The Transit Gateway Connect -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Connect Peer identifier * `arn` - EC2 Transit Gateway Connect Peer ARN @@ -71,10 +71,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway_connect_peer` can be imported by using the EC2 Transit Gateway Connect Peer identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_connect_peer` using the EC2 Transit Gateway Connect Peer identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway_connect_peer.example tgw-connect-peer-12345678 + +Using `terraform import`, import `aws_ec2_transit_gateway_connect_peer` using the EC2 Transit Gateway Connect Peer identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_connect_peer.example tgw-connect-peer-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_domain.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_domain.html.markdown index ddc7a5bcfe8..b8b4ad1e29b 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_domain.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_domain.html.markdown @@ -141,7 +141,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transit_gateway_id` - (Required) EC2 Transit Gateway identifier. The EC2 Transit Gateway must have `multicast_support` enabled. * `auto_accept_shared_associations` - (Optional) Whether to automatically accept cross-account subnet associations that are associated with the EC2 Transit Gateway Multicast Domain. Valid values: `disable`, `enable`. Default value: `disable`. @@ -149,9 +149,9 @@ The following arguments are supported: * `static_sources_support` - (Optional) Whether to enable support for statically configuring multicast group sources for the EC2 Transit Gateway Multicast Domain. Valid values: `disable`, `enable`. Default value: `disable`. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Multicast Domain. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Multicast Domain identifier. * `arn` - EC2 Transit Gateway Multicast Domain Amazon Resource Name (ARN). @@ -167,10 +167,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway_multicast_domain` can be imported by using the EC2 Transit Gateway Multicast Domain identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_multicast_domain` using the EC2 Transit Gateway Multicast Domain identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -terraform import aws_ec2_transit_gateway_multicast_domain.example tgw-mcast-domain-12345 + +Using `terraform import`, import `aws_ec2_transit_gateway_multicast_domain` using the EC2 Transit Gateway Multicast Domain identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_multicast_domain.example tgw-mcast-domain-12345 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_domain_association.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_domain_association.html.markdown index 8daca70bb3f..7638afa3734 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_domain_association.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_domain_association.html.markdown @@ -58,15 +58,15 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `subnet_id` - (Required) The ID of the subnet to associate with the transit gateway multicast domain. * `transit_gateway_attachment_id` - (Required) The ID of the transit gateway attachment. * `transit_gateway_multicast_domain_id` - (Required) The ID of the transit gateway multicast domain. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Multicast Domain Association identifier. @@ -77,4 +77,4 @@ In addition to all arguments above, the following attributes are exported: - `create` - (Default `10m`) - `delete` - (Default `10m`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_group_member.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_group_member.html.markdown index 891442db7dd..bb089a7c007 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_group_member.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_group_member.html.markdown @@ -36,16 +36,16 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `group_ip_address` - (Required) The IP address assigned to the transit gateway multicast group. * `network_interface_id` - (Required) The group members' network interface ID to register with the transit gateway multicast group. * `transit_gateway_multicast_domain_id` - (Required) The ID of the transit gateway multicast domain. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Multicast Group Member identifier. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_group_source.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_group_source.html.markdown index 02af800e0c2..f0e796a27fa 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_group_source.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_multicast_group_source.html.markdown @@ -36,16 +36,16 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `group_ip_address` - (Required) The IP address assigned to the transit gateway multicast group. * `network_interface_id` - (Required) The group members' network interface ID to register with the transit gateway multicast group. * `transit_gateway_multicast_domain_id` - (Required) The ID of the transit gateway multicast domain. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Multicast Group Member identifier. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_peering_attachment.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_peering_attachment.html.markdown index ffd538c9b23..1b46fac3285 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_peering_attachment.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_peering_attachment.html.markdown @@ -70,7 +70,7 @@ A full example of how to create a Transit Gateway in one AWS account, share it w ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `peer_account_id` - (Optional) Account ID of EC2 Transit Gateway to peer with. Defaults to the account ID the [AWS provider][1] is currently connected to. * `peer_region` - (Required) Region of EC2 Transit Gateway to peer with. @@ -78,21 +78,32 @@ The following arguments are supported: * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Peering Attachment. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `transit_gateway_id` - (Required) Identifier of EC2 Transit Gateway. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). ## Import -`aws_ec2_transit_gateway_peering_attachment` can be imported by using the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_peering_attachment` using the EC2 Transit Gateway Attachment identifier. For example: -```sh -terraform import aws_ec2_transit_gateway_peering_attachment.example tgw-attach-12345678 +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_ec2_transit_gateway_peering_attachment` using the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_peering_attachment.example tgw-attach-12345678 ``` [1]: /docs/providers/aws/index.html - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_peering_attachment_accepter.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_peering_attachment_accepter.html.markdown index 41361a94179..878f3f0e53f 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_peering_attachment_accepter.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_peering_attachment_accepter.html.markdown @@ -38,14 +38,14 @@ A full example of how to create a Transit Gateway in one AWS account, share it w ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transit_gateway_attachment_id` - (Required) The ID of the EC2 Transit Gateway Peering Attachment to manage. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Peering Attachment. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `transit_gateway_id` - Identifier of EC2 Transit Gateway. @@ -55,10 +55,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway_peering_attachment_accepter` can be imported by using the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_peering_attachment_accepter` using the EC2 Transit Gateway Attachment identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway_peering_attachment_accepter.example tgw-attach-12345678 + +Using `terraform import`, import `aws_ec2_transit_gateway_peering_attachment_accepter` using the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_peering_attachment_accepter.example tgw-attach-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_policy_table.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_policy_table.html.markdown index 5da680efa88..9682e940fc5 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_policy_table.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_policy_table.html.markdown @@ -36,14 +36,14 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transit_gateway_id` - (Required) EC2 Transit Gateway identifier. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Policy Table. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - EC2 Transit Gateway Policy Table Amazon Resource Name (ARN). * `id` - EC2 Transit Gateway Policy Table identifier. @@ -52,10 +52,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway_policy_table` can be imported by using the EC2 Transit Gateway Policy Table identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_policy_table` using the EC2 Transit Gateway Policy Table identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway_policy_table.example tgw-rtb-12345678 + +Using `terraform import`, import `aws_ec2_transit_gateway_policy_table` using the EC2 Transit Gateway Policy Table identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_policy_table.example tgw-rtb-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_policy_table_association.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_policy_table_association.html.markdown index 5f7989e4313..01d5a82bdef 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_policy_table_association.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_policy_table_association.html.markdown @@ -34,14 +34,14 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transit_gateway_attachment_id` - (Required) Identifier of EC2 Transit Gateway Attachment. * `transit_gateway_policy_table_id` - (Required) Identifier of EC2 Transit Gateway Policy Table. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Policy Table identifier combined with EC2 Transit Gateway Attachment identifier * `resource_id` - Identifier of the resource @@ -49,10 +49,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway_policy_table_association` can be imported by using the EC2 Transit Gateway Policy Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_policy_table_association` using the EC2 Transit Gateway Policy Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway_policy_table_association.example tgw-rtb-12345678_tgw-attach-87654321 + +Using `terraform import`, import `aws_ec2_transit_gateway_policy_table_association` using the EC2 Transit Gateway Policy Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_policy_table_association.example tgw-rtb-12345678_tgw-attach-87654321 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_prefix_list_reference.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_prefix_list_reference.html.markdown index 4ab84308341..01758784fd2 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_prefix_list_reference.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_prefix_list_reference.html.markdown @@ -68,18 +68,29 @@ The following arguments are optional: * `blackhole` - (Optional) Indicates whether to drop traffic that matches the Prefix List. Defaults to `false`. * `transit_gateway_attachment_id` - (Optional) Identifier of EC2 Transit Gateway Attachment. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Route Table identifier and EC2 Prefix List identifier, separated by an underscore (`_`) ## Import -`aws_ec2_transit_gateway_prefix_list_reference` can be imported by using the EC2 Transit Gateway Route Table identifier and EC2 Prefix List identifier, separated by an underscore (`_`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_prefix_list_reference` using the EC2 Transit Gateway Route Table identifier and EC2 Prefix List identifier, separated by an underscore (`_`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_ec2_transit_gateway_prefix_list_reference` using the EC2 Transit Gateway Route Table identifier and EC2 Prefix List identifier, separated by an underscore (`_`). For example: ```console -$ terraform import aws_ec2_transit_gateway_prefix_list_reference.example tgw-rtb-12345678_pl-12345678 +% terraform import aws_ec2_transit_gateway_prefix_list_reference.example tgw-rtb-12345678_pl-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_route.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_route.html.markdown index 21a8d377390..5568cbb22c5 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_route.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_route.html.markdown @@ -58,25 +58,36 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `destination_cidr_block` - (Required) IPv4 or IPv6 RFC1924 CIDR used for destination matches. Routing decisions are based on the most specific match. * `transit_gateway_attachment_id` - (Optional) Identifier of EC2 Transit Gateway Attachment (required if `blackhole` is set to false). * `blackhole` - (Optional) Indicates whether to drop traffic that matches this route (default to `false`). * `transit_gateway_route_table_id` - (Required) Identifier of EC2 Transit Gateway Route Table. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Route Table identifier combined with destination ## Import -`aws_ec2_transit_gateway_route` can be imported by using the EC2 Transit Gateway Route Table, an underscore, and the destination, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_route` using the EC2 Transit Gateway Route Table, an underscore, and the destination. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway_route.example tgw-rtb-12345678_0.0.0.0/0 + +Using `terraform import`, import `aws_ec2_transit_gateway_route` using the EC2 Transit Gateway Route Table, an underscore, and the destination. For example: + +```console +% terraform import aws_ec2_transit_gateway_route.example tgw-rtb-12345678_0.0.0.0/0 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_route_table.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_route_table.html.markdown index 798812fa10e..d7cf35ea7c0 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_route_table.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_route_table.html.markdown @@ -33,14 +33,14 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transit_gateway_id` - (Required) Identifier of EC2 Transit Gateway. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Route Table. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - EC2 Transit Gateway Route Table Amazon Resource Name (ARN). * `default_association_route_table` - Boolean whether this is the default association route table for the EC2 Transit Gateway. @@ -50,10 +50,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway_route_table` can be imported by using the EC2 Transit Gateway Route Table identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_route_table` using the EC2 Transit Gateway Route Table identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway_route_table.example tgw-rtb-12345678 + +Using `terraform import`, import `aws_ec2_transit_gateway_route_table` using the EC2 Transit Gateway Route Table identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_route_table.example tgw-rtb-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_route_table_association.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_route_table_association.html.markdown index 5142d47eb1b..318680ef594 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_route_table_association.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_route_table_association.html.markdown @@ -34,15 +34,15 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transit_gateway_attachment_id` - (Required) Identifier of EC2 Transit Gateway Attachment. * `transit_gateway_route_table_id` - (Required) Identifier of EC2 Transit Gateway Route Table. * `replace_existing_association` - (Optional) Boolean whether the Gateway Attachment should remove any current Route Table association before associating with the specified Route Table. Default value: `false`. This argument is intended for use with EC2 Transit Gateways shared into the current account, otherwise the `transit_gateway_default_route_table_association` argument of the `aws_ec2_transit_gateway_vpc_attachment` resource should be used. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Route Table identifier combined with EC2 Transit Gateway Attachment identifier * `resource_id` - Identifier of the resource @@ -50,10 +50,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway_route_table_association` can be imported by using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_route_table_association` using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway_route_table_association.example tgw-rtb-12345678_tgw-attach-87654321 + +Using `terraform import`, import `aws_ec2_transit_gateway_route_table_association` using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_route_table_association.example tgw-rtb-12345678_tgw-attach-87654321 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_route_table_propagation.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_route_table_propagation.html.markdown index fc13a52b0c8..bb8442332ff 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_route_table_propagation.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_route_table_propagation.html.markdown @@ -34,14 +34,14 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transit_gateway_attachment_id` - (Required) Identifier of EC2 Transit Gateway Attachment. * `transit_gateway_route_table_id` - (Required) Identifier of EC2 Transit Gateway Route Table. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Route Table identifier combined with EC2 Transit Gateway Attachment identifier * `resource_id` - Identifier of the resource @@ -49,10 +49,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway_route_table_propagation` can be imported by using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_route_table_propagation` using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway_route_table_propagation.example tgw-rtb-12345678_tgw-attach-87654321 + +Using `terraform import`, import `aws_ec2_transit_gateway_route_table_propagation` using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_route_table_propagation.example tgw-rtb-12345678_tgw-attach-87654321 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_vpc_attachment.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_vpc_attachment.html.markdown index a539cd80df1..1e89fc63b84 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_vpc_attachment.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_vpc_attachment.html.markdown @@ -37,7 +37,7 @@ A full example of how to create a Transit Gateway in one AWS account, share it w ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `subnet_ids` - (Required) Identifiers of EC2 Subnets. * `transit_gateway_id` - (Required) Identifier of EC2 Transit Gateway. @@ -49,9 +49,9 @@ The following arguments are supported: * `transit_gateway_default_route_table_association` - (Optional) Boolean whether the VPC Attachment should be associated with the EC2 Transit Gateway association default route table. This cannot be configured or perform drift detection with Resource Access Manager shared EC2 Transit Gateways. Default value: `true`. * `transit_gateway_default_route_table_propagation` - (Optional) Boolean whether the VPC Attachment should propagate routes with the EC2 Transit Gateway propagation default route table. This cannot be configured or perform drift detection with Resource Access Manager shared EC2 Transit Gateways. Default value: `true`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). @@ -59,10 +59,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway_vpc_attachment` can be imported by using the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_vpc_attachment` using the EC2 Transit Gateway Attachment identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway_vpc_attachment.example tgw-attach-12345678 + +Using `terraform import`, import `aws_ec2_transit_gateway_vpc_attachment` using the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_vpc_attachment.example tgw-attach-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ec2_transit_gateway_vpc_attachment_accepter.html.markdown b/website/docs/cdktf/python/r/ec2_transit_gateway_vpc_attachment_accepter.html.markdown index 1bc8c29d40c..5306e17ece1 100644 --- a/website/docs/cdktf/python/r/ec2_transit_gateway_vpc_attachment_accepter.html.markdown +++ b/website/docs/cdktf/python/r/ec2_transit_gateway_vpc_attachment_accepter.html.markdown @@ -44,16 +44,16 @@ A full example of how to create a Transit Gateway in one AWS account, share it w ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transit_gateway_attachment_id` - (Required) The ID of the EC2 Transit Gateway Attachment to manage. * `transit_gateway_default_route_table_association` - (Optional) Boolean whether the VPC Attachment should be associated with the EC2 Transit Gateway association default route table. Default value: `true`. * `transit_gateway_default_route_table_propagation` - (Optional) Boolean whether the VPC Attachment should propagate routes with the EC2 Transit Gateway propagation default route table. Default value: `true`. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway VPC Attachment. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). @@ -67,10 +67,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_ec2_transit_gateway_vpc_attachment_accepter` can be imported by using the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ec2_transit_gateway_vpc_attachment_accepter` using the EC2 Transit Gateway Attachment identifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_ec2_transit_gateway_vpc_attachment_accepter.example tgw-attach-12345678 + +Using `terraform import`, import `aws_ec2_transit_gateway_vpc_attachment_accepter` using the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_vpc_attachment_accepter.example tgw-attach-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecr_lifecycle_policy.html.markdown b/website/docs/cdktf/python/r/ecr_lifecycle_policy.html.markdown new file mode 100644 index 00000000000..e87bf0c72d4 --- /dev/null +++ b/website/docs/cdktf/python/r/ecr_lifecycle_policy.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_lifecycle_policy" +description: |- + Manages an ECR repository lifecycle policy. +--- + + + +# Resource: aws_ecr_lifecycle_policy + +Manages an ECR repository lifecycle policy. + +~> **NOTE:** Only one `aws_ecr_lifecycle_policy` resource can be used with the same ECR repository. To apply multiple rules, they must be combined in the `policy` JSON. + +~> **NOTE:** The AWS ECR API seems to reorder rules based on `rulePriority`. If you define multiple rules that are not sorted in ascending `rulePriority` order in the Terraform code, the resource will be flagged for recreation every `terraform plan`. + +## Example Usage + +### Policy on untagged image + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecr_lifecycle_policy import EcrLifecyclePolicy +from imports.aws.ecr_repository import EcrRepository +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = EcrRepository(self, "foo", + name="bar" + ) + EcrLifecyclePolicy(self, "foopolicy", + policy="{\n \"rules\": [\n {\n \"rulePriority\": 1,\n \"description\": \"Expire images older than 14 days\",\n \"selection\": {\n \"tagStatus\": \"untagged\",\n \"countType\": \"sinceImagePushed\",\n \"countUnit\": \"days\",\n \"countNumber\": 14\n },\n \"action\": {\n \"type\": \"expire\"\n }\n }\n ]\n}\n\n", + repository=foo.name + ) +``` + +### Policy on tagged image + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecr_lifecycle_policy import EcrLifecyclePolicy +from imports.aws.ecr_repository import EcrRepository +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = EcrRepository(self, "foo", + name="bar" + ) + EcrLifecyclePolicy(self, "foopolicy", + policy="{\n \"rules\": [\n {\n \"rulePriority\": 1,\n \"description\": \"Keep last 30 images\",\n \"selection\": {\n \"tagStatus\": \"tagged\",\n \"tagPrefixList\": [\"v\"],\n \"countType\": \"imageCountMoreThan\",\n \"countNumber\": 30\n },\n \"action\": {\n \"type\": \"expire\"\n }\n }\n ]\n}\n\n", + repository=foo.name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repository` - (Required) Name of the repository to apply the policy. +* `policy` - (Required) The policy document. This is a JSON formatted string. See more details about [Policy Parameters](http://docs.aws.amazon.com/AmazonECR/latest/userguide/LifecyclePolicies.html#lifecycle_policy_parameters) in the official AWS docs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `repository` - The name of the repository. +* `registry_id` - The registry ID where the repository was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Lifecycle Policy using the name of the repository. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECR Lifecycle Policy using the name of the repository. For example: + +```console +% terraform import aws_ecr_lifecycle_policy.example tf-example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecr_pull_through_cache_rule.html.markdown b/website/docs/cdktf/python/r/ecr_pull_through_cache_rule.html.markdown new file mode 100644 index 00000000000..33c893f8b3c --- /dev/null +++ b/website/docs/cdktf/python/r/ecr_pull_through_cache_rule.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_pull_through_cache_rule" +description: |- + Provides an Elastic Container Registry Pull Through Cache Rule. +--- + + + +# Resource: aws_ecr_pull_through_cache_rule + +Provides an Elastic Container Registry Pull Through Cache Rule. + +More information about pull through cache rules, including the set of supported +upstream repositories, see [Using pull through cache rules](https://docs.aws.amazon.com/AmazonECR/latest/userguide/pull-through-cache.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecr_pull_through_cache_rule import EcrPullThroughCacheRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcrPullThroughCacheRule(self, "example", + ecr_repository_prefix="ecr-public", + upstream_registry_url="public.ecr.aws" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `ecr_repository_prefix` - (Required, Forces new resource) The repository name prefix to use when caching images from the source registry. +* `upstream_registry_url` - (Required, Forces new resource) The registry URL of the upstream public registry to use as the source. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `registry_id` - The registry ID where the repository was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a pull-through cache rule using the `ecr_repository_prefix`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a pull-through cache rule using the `ecr_repository_prefix`. For example: + +```console +% terraform import aws_ecr_pull_through_cache_rule.example ecr-public +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecr_registry_policy.html.markdown b/website/docs/cdktf/python/r/ecr_registry_policy.html.markdown new file mode 100644 index 00000000000..e95163f47d6 --- /dev/null +++ b/website/docs/cdktf/python/r/ecr_registry_policy.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_registry_policy" +description: |- + Provides an Elastic Container Registry Policy. +--- + + + +# Resource: aws_ecr_registry_policy + +Provides an Elastic Container Registry Policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_partition import DataAwsPartition +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.ecr_registry_policy import EcrRegistryPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + data_aws_partition_current = DataAwsPartition(self, "current_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_partition_current.override_logical_id("current") + data_aws_region_current = DataAwsRegion(self, "current_2") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + EcrRegistryPolicy(self, "example", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["ecr:ReplicateImage"], + "Effect": "Allow", + "Principal": { + "AWS": "arn:${" + data_aws_partition_current.partition + "}:iam::${" + current.account_id + "}:root" + }, + "Resource": ["arn:${" + data_aws_partition_current.partition + "}:ecr:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:repository/*" + ], + "Sid": "testpolicy" + } + ], + "Version": "2012-10-17" + })) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `registry_id` - The registry ID where the registry was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Registry Policy using the registry id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECR Registry Policy using the registry id. For example: + +```console +% terraform import aws_ecr_registry_policy.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecr_registry_scanning_configuration.html.markdown b/website/docs/cdktf/python/r/ecr_registry_scanning_configuration.html.markdown new file mode 100644 index 00000000000..902db9405d8 --- /dev/null +++ b/website/docs/cdktf/python/r/ecr_registry_scanning_configuration.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_registry_scanning_configuration" +description: |- + Provides an Elastic Container Registry Scanning Configuration. +--- + + + +# Resource: aws_ecr_registry_scanning_configuration + +Provides an Elastic Container Registry Scanning Configuration. Can't be completely deleted, instead reverts to the default `BASIC` scanning configuration without rules. + +## Example Usage + +### Basic example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecr_registry_scanning_configuration import EcrRegistryScanningConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcrRegistryScanningConfiguration(self, "configuration", + rule=[EcrRegistryScanningConfigurationRule( + repository_filter=[EcrRegistryScanningConfigurationRuleRepositoryFilter( + filter="example", + filter_type="WILDCARD" + ) + ], + scan_frequency="CONTINUOUS_SCAN" + ) + ], + scan_type="ENHANCED" + ) +``` + +### Multiple rules + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecr_registry_scanning_configuration import EcrRegistryScanningConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcrRegistryScanningConfiguration(self, "test", + rule=[EcrRegistryScanningConfigurationRule( + repository_filter=[EcrRegistryScanningConfigurationRuleRepositoryFilter( + filter="*", + filter_type="WILDCARD" + ) + ], + scan_frequency="SCAN_ON_PUSH" + ), EcrRegistryScanningConfigurationRule( + repository_filter=[EcrRegistryScanningConfigurationRuleRepositoryFilter( + filter="example", + filter_type="WILDCARD" + ) + ], + scan_frequency="CONTINUOUS_SCAN" + ) + ], + scan_type="ENHANCED" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +- `scan_type` - (Required) the scanning type to set for the registry. Can be either `ENHANCED` or `BASIC`. +- `rule` - (Optional) One or multiple blocks specifying scanning rules to determine which repository filters are used and at what frequency scanning will occur. See [below for schema](#rule). + +### rule + +- `repository_filter` - (Required) One or more repository filter blocks, containing a `filter` (required string filtering repositories, see pattern regex [here](https://docs.aws.amazon.com/AmazonECR/latest/APIReference/API_ScanningRepositoryFilter.html)) and a `filter_type` (required string, currently only `WILDCARD` is supported). +- `scan_frequency` - (Required) The frequency that scans are performed at for a private registry. Can be `SCAN_ON_PUSH`, `CONTINUOUS_SCAN`, or `MANUAL`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `registry_id` - The registry ID the scanning configuration applies to. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Scanning Configurations using the `registry_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECR Scanning Configurations using the `registry_id`. For example: + +```console +% terraform import aws_ecr_registry_scanning_configuration.example 012345678901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecr_replication_configuration.html.markdown b/website/docs/cdktf/python/r/ecr_replication_configuration.html.markdown new file mode 100644 index 00000000000..36be1150f97 --- /dev/null +++ b/website/docs/cdktf/python/r/ecr_replication_configuration.html.markdown @@ -0,0 +1,178 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_replication_configuration" +description: |- + Provides an Elastic Container Registry Replication Configuration. +--- + + + +# Resource: aws_ecr_replication_configuration + +Provides an Elastic Container Registry Replication Configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_regions import DataAwsRegions +from imports.aws.ecr_replication_configuration import EcrReplicationConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + example = DataAwsRegions(self, "example") + aws_ecr_replication_configuration_example = + EcrReplicationConfiguration(self, "example_2", + replication_configuration=EcrReplicationConfigurationReplicationConfiguration( + rule=[EcrReplicationConfigurationReplicationConfigurationRule( + destination=[EcrReplicationConfigurationReplicationConfigurationRuleDestination( + region=Token.as_string(property_access(example.names, ["0"])), + registry_id=Token.as_string(current.account_id) + ) + ] + ) + ] + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ecr_replication_configuration_example.override_logical_id("example") +``` + +## Multiple Region Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_regions import DataAwsRegions +from imports.aws.ecr_replication_configuration import EcrReplicationConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + example = DataAwsRegions(self, "example") + aws_ecr_replication_configuration_example = + EcrReplicationConfiguration(self, "example_2", + replication_configuration=EcrReplicationConfigurationReplicationConfiguration( + rule=[EcrReplicationConfigurationReplicationConfigurationRule( + destination=[EcrReplicationConfigurationReplicationConfigurationRuleDestination( + region=Token.as_string(property_access(example.names, ["0"])), + registry_id=Token.as_string(current.account_id) + ), EcrReplicationConfigurationReplicationConfigurationRuleDestination( + region=Token.as_string(property_access(example.names, ["1"])), + registry_id=Token.as_string(current.account_id) + ) + ] + ) + ] + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ecr_replication_configuration_example.override_logical_id("example") +``` + +## Repository Filter Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_regions import DataAwsRegions +from imports.aws.ecr_replication_configuration import EcrReplicationConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + example = DataAwsRegions(self, "example") + aws_ecr_replication_configuration_example = + EcrReplicationConfiguration(self, "example_2", + replication_configuration=EcrReplicationConfigurationReplicationConfiguration( + rule=[EcrReplicationConfigurationReplicationConfigurationRule( + destination=[EcrReplicationConfigurationReplicationConfigurationRuleDestination( + region=Token.as_string(property_access(example.names, ["0"])), + registry_id=Token.as_string(current.account_id) + ) + ], + repository_filter=[EcrReplicationConfigurationReplicationConfigurationRuleRepositoryFilter( + filter="prod-microservice", + filter_type="PREFIX_MATCH" + ) + ] + ) + ] + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ecr_replication_configuration_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `replication_configuration` - (Required) Replication configuration for a registry. See [Replication Configuration](#replication-configuration). + +### Replication Configuration + +* `rule` - (Required) The replication rules for a replication configuration. A maximum of 10 are allowed per `replication_configuration`. See [Rule](#rule) + +### Rule + +* `destination` - (Required) the details of a replication destination. A maximum of 25 are allowed per `rule`. See [Destination](#destination). +* `repository_filter` - (Optional) filters for a replication rule. See [Repository Filter](#repository-filter). + +### Destination + +* `region` - (Required) A Region to replicate to. +* `registry_id` - (Required) The account ID of the destination registry to replicate to. + +### Repository Filter + +* `filter` - (Required) The repository filter details. +* `filter_type` - (Required) The repository filter type. The only supported value is `PREFIX_MATCH`, which is a repository name prefix specified with the filter parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `registry_id` - The registry ID where the replication configuration was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Replication Configuration using the `registry_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECR Replication Configuration using the `registry_id`. For example: + +```console +% terraform import aws_ecr_replication_configuration.service 012345678912 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecr_repository.html.markdown b/website/docs/cdktf/python/r/ecr_repository.html.markdown new file mode 100644 index 00000000000..2eb66fbd091 --- /dev/null +++ b/website/docs/cdktf/python/r/ecr_repository.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_repository" +description: |- + Provides an Elastic Container Registry Repository. +--- + + + +# Resource: aws_ecr_repository + +Provides an Elastic Container Registry Repository. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecr_repository import EcrRepository +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcrRepository(self, "foo", + image_scanning_configuration=EcrRepositoryImageScanningConfiguration( + scan_on_push=True + ), + image_tag_mutability="MUTABLE", + name="bar" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the repository. +* `encryption_configuration` - (Optional) Encryption configuration for the repository. See [below for schema](#encryption_configuration). +* `force_delete` - (Optional) If `true`, will delete the repository even if it contains images. + Defaults to `false`. +* `image_tag_mutability` - (Optional) The tag mutability setting for the repository. Must be one of: `MUTABLE` or `IMMUTABLE`. Defaults to `MUTABLE`. +* `image_scanning_configuration` - (Optional) Configuration block that defines image scanning configuration for the repository. By default, image scanning must be manually triggered. See the [ECR User Guide](https://docs.aws.amazon.com/AmazonECR/latest/userguide/image-scanning.html) for more information about image scanning. + * `scan_on_push` - (Required) Indicates whether images are scanned after being pushed to the repository (true) or not scanned (false). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### encryption_configuration + +* `encryption_type` - (Optional) The encryption type to use for the repository. Valid values are `AES256` or `KMS`. Defaults to `AES256`. +* `kms_key` - (Optional) The ARN of the KMS key to use when `encryption_type` is `KMS`. If not specified, uses the default AWS managed key for ECR. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Full ARN of the repository. +* `registry_id` - The registry ID where the repository was created. +* `repository_url` - The URL of the repository (in the form `aws_account_id.dkr.ecr.region.amazonaws.com/repositoryName`). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Repositories using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECR Repositories using the `name`. For example: + +```console +% terraform import aws_ecr_repository.service test-service +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecr_repository_policy.html.markdown b/website/docs/cdktf/python/r/ecr_repository_policy.html.markdown new file mode 100644 index 00000000000..7aa40ab3efc --- /dev/null +++ b/website/docs/cdktf/python/r/ecr_repository_policy.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_repository_policy" +description: |- + Provides an Elastic Container Registry Repository Policy. +--- + + + +# Resource: aws_ecr_repository_policy + +Provides an Elastic Container Registry Repository Policy. + +Note that currently only one policy may be applied to a repository. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.ecr_repository import EcrRepository +from imports.aws.ecr_repository_policy import EcrRepositoryPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = EcrRepository(self, "foo", + name="bar" + ) + foopolicy = DataAwsIamPolicyDocument(self, "foopolicy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage", "ecr:BatchCheckLayerAvailability", "ecr:PutImage", "ecr:InitiateLayerUpload", "ecr:UploadLayerPart", "ecr:CompleteLayerUpload", "ecr:DescribeRepositories", "ecr:GetRepositoryPolicy", "ecr:ListImages", "ecr:DeleteRepository", "ecr:BatchDeleteImage", "ecr:SetRepositoryPolicy", "ecr:DeleteRepositoryPolicy" + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["123456789012"], + type="AWS" + ) + ], + sid="new policy" + ) + ] + ) + aws_ecr_repository_policy_foopolicy = EcrRepositoryPolicy(self, "foopolicy_2", + policy=Token.as_string(foopolicy.json), + repository=foo.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ecr_repository_policy_foopolicy.override_logical_id("foopolicy") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repository` - (Required) Name of the repository to apply the policy. +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `repository` - The name of the repository. +* `registry_id` - The registry ID where the repository was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Repository Policy using the repository name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECR Repository Policy using the repository name. For example: + +```console +% terraform import aws_ecr_repository_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecrpublic_repository.html.markdown b/website/docs/cdktf/python/r/ecrpublic_repository.html.markdown new file mode 100644 index 00000000000..37c7dd216eb --- /dev/null +++ b/website/docs/cdktf/python/r/ecrpublic_repository.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "ECR Public" +layout: "aws" +page_title: "AWS: aws_ecrpublic_repository" +description: |- + Provides a Public Elastic Container Registry Repository. +--- + + + +# Resource: aws_ecrpublic_repository + +Provides a Public Elastic Container Registry Repository. + +~> **NOTE:** This resource can only be used in the `us-east-1` region. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecrpublic_repository import EcrpublicRepository +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + us_east1 = AwsProvider(self, "aws", + alias="us_east_1", + region="us-east-1" + ) + EcrpublicRepository(self, "foo", + catalog_data=EcrpublicRepositoryCatalogData( + about_text="About Text", + architectures=["ARM"], + description="Description", + logo_image_blob=Token.as_string(Fn.filebase64(png)), + operating_systems=["Linux"], + usage_text="Usage Text" + ), + provider=us_east1, + repository_name="bar", + tags={ + "env": "production" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repository_name` - (Required) Name of the repository. +* `catalog_data` - (Optional) Catalog data configuration for the repository. See [below for schema](#catalog_data). +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### catalog_data + +* `about_text` - (Optional) A detailed description of the contents of the repository. It is publicly visible in the Amazon ECR Public Gallery. The text must be in markdown format. +* `architectures` - (Optional) The system architecture that the images in the repository are compatible with. On the Amazon ECR Public Gallery, the following supported architectures will appear as badges on the repository and are used as search filters: `ARM`, `ARM 64`, `x86`, `x86-64` +* `description` - (Optional) A short description of the contents of the repository. This text appears in both the image details and also when searching for repositories on the Amazon ECR Public Gallery. +* `logo_image_blob` - (Optional) The base64-encoded repository logo payload. (Only visible for verified accounts) Note that drift detection is disabled for this attribute. +* `operating_systems` - (Optional) The operating systems that the images in the repository are compatible with. On the Amazon ECR Public Gallery, the following supported operating systems will appear as badges on the repository and are used as search filters: `Linux`, `Windows` +* `usage_text` - (Optional) Detailed information on how to use the contents of the repository. It is publicly visible in the Amazon ECR Public Gallery. The usage text provides context, support information, and additional usage details for users of the repository. The text must be in markdown format. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Full ARN of the repository. +* `id` - The repository name. +* `registry_id` - The registry ID where the repository was created. +* `repository_uri` - The URI of the repository. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Public Repositories using the `repository_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECR Public Repositories using the `repository_name`. For example: + +```console +% terraform import aws_ecrpublic_repository.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecrpublic_repository_policy.html.markdown b/website/docs/cdktf/python/r/ecrpublic_repository_policy.html.markdown new file mode 100644 index 00000000000..189f9447b77 --- /dev/null +++ b/website/docs/cdktf/python/r/ecrpublic_repository_policy.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "ECR Public" +layout: "aws" +page_title: "AWS: aws_ecrpublic_repository_policy" +description: |- + Provides an Elastic Container Registry Public Repository Policy. +--- + + + +# Resource: aws_ecrpublic_repository_policy + +Provides an Elastic Container Registry Public Repository Policy. + +Note that currently only one policy may be applied to a repository. + +~> **NOTE:** This resource can only be used in the `us-east-1` region. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.ecrpublic_repository import EcrpublicRepository +from imports.aws.ecrpublic_repository_policy import EcrpublicRepositoryPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = EcrpublicRepository(self, "example", + repository_name="example" + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_1", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage", "ecr:BatchCheckLayerAvailability", "ecr:PutImage", "ecr:InitiateLayerUpload", "ecr:UploadLayerPart", "ecr:CompleteLayerUpload", "ecr:DescribeRepositories", "ecr:GetRepositoryPolicy", "ecr:ListImages", "ecr:DeleteRepository", "ecr:BatchDeleteImage", "ecr:SetRepositoryPolicy", "ecr:DeleteRepositoryPolicy" + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["123456789012"], + type="AWS" + ) + ], + sid="new policy" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_ecrpublic_repository_policy_example = EcrpublicRepositoryPolicy(self, "example_2", + policy=Token.as_string(data_aws_iam_policy_document_example.json), + repository_name=example.repository_name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ecrpublic_repository_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repository_name` - (Required) Name of the repository to apply the policy. +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `registry_id` - The registry ID where the repository was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Public Repository Policy using the repository name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECR Public Repository Policy using the repository name. For example: + +```console +% terraform import aws_ecrpublic_repository_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecs_account_setting_default.html.markdown b/website/docs/cdktf/python/r/ecs_account_setting_default.html.markdown new file mode 100644 index 00000000000..98fff18c9ee --- /dev/null +++ b/website/docs/cdktf/python/r/ecs_account_setting_default.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_account_setting_default" +description: |- + Provides an ECS Default account setting. +--- + + + +# Resource: aws_ecs_account_setting_default + +Provides an ECS default account setting for a specific ECS Resource name within a specific region. More information can be found on the [ECS Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs-account-settings.html). + +~> **NOTE:** The AWS API does not delete this resource. When you run `destroy`, the provider will attempt to disable the setting. + +~> **NOTE:** Your AWS account may not support disabling `containerInstanceLongArnFormat`, `serviceLongArnFormat`, and `taskLongArnFormat`. If your account does not support disabling these, "destroying" this resource will not disable the setting nor cause a Terraform error. However, the AWS Provider will log an AWS error: `InvalidParameterException: You can no longer disable Long Arn settings`. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_account_setting_default import EcsAccountSettingDefault +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsAccountSettingDefault(self, "test", + name="taskLongArnFormat", + value="enabled" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the account setting to set. Valid values are `serviceLongArnFormat`, `taskLongArnFormat`, `containerInstanceLongArnFormat`, `awsvpcTrunking` and `containerInsights`. +* `value` - (Required) State of the setting. Valid values are `enabled` and `disabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN that identifies the account setting. +* `prinicpal_arn` - ARN that identifies the account setting. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS Account Setting defaults using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECS Account Setting defaults using the `name`. For example: + +```console +% terraform import aws_ecs_account_setting_default.example taskLongArnFormat +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecs_capacity_provider.html.markdown b/website/docs/cdktf/python/r/ecs_capacity_provider.html.markdown new file mode 100644 index 00000000000..766b1a3e2b3 --- /dev/null +++ b/website/docs/cdktf/python/r/ecs_capacity_provider.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_capacity_provider" +description: |- + Provides an ECS cluster capacity provider. +--- + + + +# Resource: aws_ecs_capacity_provider + +Provides an ECS cluster capacity provider. More information can be found on the [ECS Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-capacity-providers.html). + +~> **NOTE:** Associating an ECS Capacity Provider to an Auto Scaling Group will automatically add the `AmazonECSManaged` tag to the Auto Scaling Group. This tag should be included in the `aws_autoscaling_group` resource configuration to prevent Terraform from removing it in subsequent executions as well as ensuring the `AmazonECSManaged` tag is propagated to all EC2 Instances in the Auto Scaling Group if `min_size` is above 0 on creation. Any EC2 Instances in the Auto Scaling Group without this tag must be manually be updated, otherwise they may cause unexpected scaling behavior and metrics. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.ecs_capacity_provider import EcsCapacityProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, maxSize, minSize): + super().__init__(scope, name) + test = AutoscalingGroup(self, "test", + tag=[AutoscalingGroupTag( + key="AmazonECSManaged", + propagate_at_launch=True, + value=Token.as_string(True) + ) + ], + max_size=max_size, + min_size=min_size + ) + aws_ecs_capacity_provider_test = EcsCapacityProvider(self, "test_1", + auto_scaling_group_provider=EcsCapacityProviderAutoScalingGroupProvider( + auto_scaling_group_arn=test.arn, + managed_scaling=EcsCapacityProviderAutoScalingGroupProviderManagedScaling( + maximum_scaling_step_size=1000, + minimum_scaling_step_size=1, + status="ENABLED", + target_capacity=10 + ), + managed_termination_protection="ENABLED" + ), + name="test" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ecs_capacity_provider_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `auto_scaling_group_provider` - (Required) Configuration block for the provider for the ECS auto scaling group. Detailed below. +* `name` - (Required) Name of the capacity provider. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `auto_scaling_group_provider` + +* `auto_scaling_group_arn` - (Required) - ARN of the associated auto scaling group. +* `managed_scaling` - (Optional) - Configuration block defining the parameters of the auto scaling. Detailed below. +* `managed_termination_protection` - (Optional) - Enables or disables container-aware termination of instances in the auto scaling group when scale-in happens. Valid values are `ENABLED` and `DISABLED`. + +### `managed_scaling` + +* `instance_warmup_period` - (Optional) Period of time, in seconds, after a newly launched Amazon EC2 instance can contribute to CloudWatch metrics for Auto Scaling group. If this parameter is omitted, the default value of 300 seconds is used. +* `maximum_scaling_step_size` - (Optional) Maximum step adjustment size. A number between 1 and 10,000. +* `minimum_scaling_step_size` - (Optional) Minimum step adjustment size. A number between 1 and 10,000. +* `status` - (Optional) Whether auto scaling is managed by ECS. Valid values are `ENABLED` and `DISABLED`. +* `target_capacity` - (Optional) Target utilization for the capacity provider. A number between 1 and 100. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN that identifies the capacity provider. +* `id` - ARN that identifies the capacity provider. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS Capacity Providers using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECS Capacity Providers using the `name`. For example: + +```console +% terraform import aws_ecs_capacity_provider.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecs_cluster.html.markdown b/website/docs/cdktf/python/r/ecs_cluster.html.markdown new file mode 100644 index 00000000000..b0d370889a2 --- /dev/null +++ b/website/docs/cdktf/python/r/ecs_cluster.html.markdown @@ -0,0 +1,143 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_cluster" +description: |- + Provides an ECS cluster. +--- + + + +# Resource: aws_ecs_cluster + +Provides an ECS cluster. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_cluster import EcsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsCluster(self, "foo", + name="white-hart", + setting=[EcsClusterSetting( + name="containerInsights", + value="enabled" + ) + ] + ) +``` + +### Example with Log Configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.ecs_cluster import EcsCluster +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="example" + ) + aws_kms_key_example = KmsKey(self, "example_1", + deletion_window_in_days=7, + description="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_key_example.override_logical_id("example") + EcsCluster(self, "test", + configuration=EcsClusterConfiguration( + execute_command_configuration=EcsClusterConfigurationExecuteCommandConfiguration( + kms_key_id=Token.as_string(aws_kms_key_example.arn), + log_configuration=EcsClusterConfigurationExecuteCommandConfigurationLogConfiguration( + cloud_watch_encryption_enabled=True, + cloud_watch_log_group_name=example.name + ), + logging="OVERRIDE" + ) + ), + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `configuration` - (Optional) The execute command configuration for the cluster. Detailed below. +* `name` - (Required) Name of the cluster (up to 255 letters, numbers, hyphens, and underscores) +* `service_connect_defaults` - (Optional) Configures a default Service Connect namespace. Detailed below. +* `setting` - (Optional) Configuration block(s) with cluster settings. For example, this can be used to enable CloudWatch Container Insights for a cluster. Detailed below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `configuration` + +* `execute_command_configuration` - (Optional) The details of the execute command configuration. Detailed below. + +#### `execute_command_configuration` + +* `kms_key_id` - (Optional) The AWS Key Management Service key ID to encrypt the data between the local client and the container. +* `log_configuration` - (Optional) The log configuration for the results of the execute command actions Required when `logging` is `OVERRIDE`. Detailed below. +* `logging` - (Optional) The log setting to use for redirecting logs for your execute command results. Valid values are `NONE`, `DEFAULT`, and `OVERRIDE`. + +##### `log_configuration` + +* `cloud_watch_encryption_enabled` - (Optional) Whether or not to enable encryption on the CloudWatch logs. If not specified, encryption will be disabled. +* `cloud_watch_log_group_name` - (Optional) The name of the CloudWatch log group to send logs to. +* `s3_bucket_name` - (Optional) The name of the S3 bucket to send logs to. +* `s3_bucket_encryption_enabled` - (Optional) Whether or not to enable encryption on the logs sent to S3. If not specified, encryption will be disabled. +* `s3_key_prefix` - (Optional) An optional folder in the S3 bucket to place logs in. + +### `setting` + +* `name` - (Required) Name of the setting to manage. Valid values: `containerInsights`. +* `value` - (Required) The value to assign to the setting. Valid values are `enabled` and `disabled`. + +### `service_connect_defaults` + +* `namespace` - (Required) The ARN of the [`aws_service_discovery_http_namespace`](/docs/providers/aws/r/service_discovery_http_namespace.html) that's used when you create a service and don't specify a Service Connect configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN that identifies the cluster. +* `id` - ARN that identifies the cluster. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS clusters using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECS clusters using the `name`. For example: + +```console +% terraform import aws_ecs_cluster.stateless stateless-app +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecs_cluster_capacity_providers.html.markdown b/website/docs/cdktf/python/r/ecs_cluster_capacity_providers.html.markdown new file mode 100644 index 00000000000..b064e071d84 --- /dev/null +++ b/website/docs/cdktf/python/r/ecs_cluster_capacity_providers.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_cluster_capacity_providers" +description: |- + Provides an ECS cluster capacity providers resource. +--- + + + +# Resource: aws_ecs_cluster_capacity_providers + +Manages the capacity providers of an ECS Cluster. + +More information about capacity providers can be found in the [ECS User Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-capacity-providers.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_cluster import EcsCluster +from imports.aws.ecs_cluster_capacity_providers import EcsClusterCapacityProviders +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = EcsCluster(self, "example", + name="my-cluster" + ) + aws_ecs_cluster_capacity_providers_example = + EcsClusterCapacityProviders(self, "example_1", + capacity_providers=["FARGATE"], + cluster_name=example.name, + default_capacity_provider_strategy=[EcsClusterCapacityProvidersDefaultCapacityProviderStrategy( + base=1, + capacity_provider="FARGATE", + weight=100 + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ecs_cluster_capacity_providers_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `capacity_providers` - (Optional) Set of names of one or more capacity providers to associate with the cluster. Valid values also include `FARGATE` and `FARGATE_SPOT`. +* `cluster_name` - (Required, Forces new resource) Name of the ECS cluster to manage capacity providers for. +* `default_capacity_provider_strategy` - (Optional) Set of capacity provider strategies to use by default for the cluster. Detailed below. + +### default_capacity_provider_strategy Configuration Block + +* `capacity_provider` - (Required) Name of the capacity provider. +* `weight` - (Optional) The relative percentage of the total number of launched tasks that should use the specified capacity provider. The `weight` value is taken into consideration after the `base` count of tasks has been satisfied. Defaults to `0`. +* `base` - (Optional) The number of tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. Defaults to `0`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same as `cluster_name`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS cluster capacity providers using the `cluster_name` attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECS cluster capacity providers using the `cluster_name` attribute. For example: + +```console +% terraform import aws_ecs_cluster_capacity_providers.example my-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecs_service.html.markdown b/website/docs/cdktf/python/r/ecs_service.html.markdown new file mode 100644 index 00000000000..0a13341bb46 --- /dev/null +++ b/website/docs/cdktf/python/r/ecs_service.html.markdown @@ -0,0 +1,372 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_service" +description: |- + Provides an ECS service. +--- + + + +# Resource: aws_ecs_service + +-> **Note:** To prevent a race condition during service deletion, make sure to set `depends_on` to the related `aws_iam_role_policy`; otherwise, the policy may be destroyed too soon and the ECS service will then get stuck in the `DRAINING` state. + +Provides an ECS service - effectively a task that is expected to run until an error occurs or a user terminates it (typically a webserver or a database). + +See [ECS Services section in AWS developer guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs_services.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_service import EcsService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsService(self, "mongo", + cluster=foo.id, + depends_on=[aws_iam_role_policy_foo], + desired_count=3, + iam_role=Token.as_string(aws_iam_role_foo.arn), + load_balancer=[EcsServiceLoadBalancer( + container_name="mongo", + container_port=8080, + target_group_arn=Token.as_string(aws_lb_target_group_foo.arn) + ) + ], + name="mongodb", + ordered_placement_strategy=[EcsServiceOrderedPlacementStrategy( + field="cpu", + type="binpack" + ) + ], + placement_constraints=[EcsServicePlacementConstraints( + expression="attribute:ecs.availability-zone in [us-west-2a, us-west-2b]", + type="memberOf" + ) + ], + task_definition=Token.as_string(aws_ecs_task_definition_mongo.arn) + ) +``` + +### Ignoring Changes to Desired Count + +You can utilize the generic Terraform resource [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignore_changes` to create an ECS service with an initial count of running instances, then ignore any changes to that count caused externally (e.g., Application Autoscaling). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_service import EcsService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name): + super().__init__(scope, name) + EcsService(self, "example", + desired_count=2, + lifecycle=TerraformResourceLifecycle( + ignore_changes=[desired_count] + ), + name=name + ) +``` + +### Daemon Scheduling Strategy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_service import EcsService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsService(self, "bar", + cluster=foo.id, + name="bar", + scheduling_strategy="DAEMON", + task_definition=Token.as_string(aws_ecs_task_definition_bar.arn) + ) +``` + +### CloudWatch Deployment Alarms + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_service import EcsService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsService(self, "example", + alarms=EcsServiceAlarms( + alarm_names=[Token.as_string(aws_cloudwatch_metric_alarm_example.alarm_name)], + enable=True, + rollback=True + ), + cluster=Token.as_string(aws_ecs_cluster_example.id), + name="example" + ) +``` + +### External Deployment Controller + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_service import EcsService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsService(self, "example", + cluster=Token.as_string(aws_ecs_cluster_example.id), + deployment_controller=EcsServiceDeploymentController( + type="EXTERNAL" + ), + name="example" + ) +``` + +### Redeploy Service On Every Apply + +The key used with `triggers` is arbitrary. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_service import EcsService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name): + super().__init__(scope, name) + EcsService(self, "example", + force_new_deployment=True, + triggers={ + "redeployment": Token.as_string(Fn.timestamp()) + }, + name=name + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the service (up to 255 letters, numbers, hyphens, and underscores) + +The following arguments are optional: + +* `alarms` - (Optional) Information about the CloudWatch alarms. [See below](#alarms). +* `capacity_provider_strategy` - (Optional) Capacity provider strategies to use for the service. Can be one or more. These can be updated without destroying and recreating the service only if `force_new_deployment = true` and not changing from 0 `capacity_provider_strategy` blocks to greater than 0, or vice versa. See below. +* `cluster` - (Optional) ARN of an ECS cluster. +* `deployment_circuit_breaker` - (Optional) Configuration block for deployment circuit breaker. See below. +* `deployment_controller` - (Optional) Configuration block for deployment controller configuration. See below. +* `deployment_maximum_percent` - (Optional) Upper limit (as a percentage of the service's desiredCount) of the number of running tasks that can be running in a service during a deployment. Not valid when using the `DAEMON` scheduling strategy. +* `deployment_minimum_healthy_percent` - (Optional) Lower limit (as a percentage of the service's desiredCount) of the number of running tasks that must remain running and healthy in a service during a deployment. +* `desired_count` - (Optional) Number of instances of the task definition to place and keep running. Defaults to 0. Do not specify if using the `DAEMON` scheduling strategy. +* `enable_ecs_managed_tags` - (Optional) Specifies whether to enable Amazon ECS managed tags for the tasks within the service. +* `enable_execute_command` - (Optional) Specifies whether to enable Amazon ECS Exec for the tasks within the service. +* `force_new_deployment` - (Optional) Enable to force a new task deployment of the service. This can be used to update tasks to use a newer Docker image with same image/tag combination (e.g., `myimage:latest`), roll Fargate tasks onto a newer platform version, or immediately deploy `ordered_placement_strategy` and `placement_constraints` updates. +* `health_check_grace_period_seconds` - (Optional) Seconds to ignore failing load balancer health checks on newly instantiated tasks to prevent premature shutdown, up to 2147483647. Only valid for services configured to use load balancers. +* `iam_role` - (Optional) ARN of the IAM role that allows Amazon ECS to make calls to your load balancer on your behalf. This parameter is required if you are using a load balancer with your service, but only if your task definition does not use the `awsvpc` network mode. If using `awsvpc` network mode, do not specify this role. If your account has already created the Amazon ECS service-linked role, that role is used by default for your service unless you specify a role here. +* `launch_type` - (Optional) Launch type on which to run your service. The valid values are `EC2`, `FARGATE`, and `EXTERNAL`. Defaults to `EC2`. +* `load_balancer` - (Optional) Configuration block for load balancers. See below. +* `network_configuration` - (Optional) Network configuration for the service. This parameter is required for task definitions that use the `awsvpc` network mode to receive their own Elastic Network Interface, and it is not supported for other network modes. See below. +* `ordered_placement_strategy` - (Optional) Service level strategy rules that are taken into consideration during task placement. List from top to bottom in order of precedence. Updates to this configuration will take effect next task deployment unless `force_new_deployment` is enabled. The maximum number of `ordered_placement_strategy` blocks is `5`. See below. +* `placement_constraints` - (Optional) Rules that are taken into consideration during task placement. Updates to this configuration will take effect next task deployment unless `force_new_deployment` is enabled. Maximum number of `placement_constraints` is `10`. See below. +* `platform_version` - (Optional) Platform version on which to run your service. Only applicable for `launch_type` set to `FARGATE`. Defaults to `LATEST`. More information about Fargate platform versions can be found in the [AWS ECS User Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/platform_versions.html). +* `propagate_tags` - (Optional) Specifies whether to propagate the tags from the task definition or the service to the tasks. The valid values are `SERVICE` and `TASK_DEFINITION`. +* `scheduling_strategy` - (Optional) Scheduling strategy to use for the service. The valid values are `REPLICA` and `DAEMON`. Defaults to `REPLICA`. Note that [*Tasks using the Fargate launch type or the `CODE_DEPLOY` or `EXTERNAL` deployment controller types don't support the `DAEMON` scheduling strategy*](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_CreateService.html). +* `service_connect_configuration` - (Optional) The ECS Service Connect configuration for this service to discover and connect to services, and be discovered by, and connected from, other services within a namespace. See below. +* `service_registries` - (Optional) Service discovery registries for the service. The maximum number of `service_registries` blocks is `1`. See below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `task_definition` - (Optional) Family and revision (`family:revision`) or full ARN of the task definition that you want to run in your service. Required unless using the `EXTERNAL` deployment controller. If a revision is not specified, the latest `ACTIVE` revision is used. +* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger an in-place update (redeployment). Useful with `timestamp()`. See example above. +* `wait_for_steady_state` - (Optional) If `true`, Terraform will wait for the service to reach a steady state (like [`aws ecs wait services-stable`](https://docs.aws.amazon.com/cli/latest/reference/ecs/wait/services-stable.html)) before continuing. Default `false`. + +### alarms + +The `alarms` configuration block supports the following: + +* `alarm_names` - (Required) One or more CloudWatch alarm names. +* `enable` - (Required) Determines whether to use the CloudWatch alarm option in the service deployment process. +* `rollback` - (Required) Determines whether to configure Amazon ECS to roll back the service if a service deployment fails. If rollback is used, when a service deployment fails, the service is rolled back to the last deployment that completed successfully. + +### capacity_provider_strategy + +The `capacity_provider_strategy` configuration block supports the following: + +* `base` - (Optional) Number of tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. +* `capacity_provider` - (Required) Short name of the capacity provider. +* `weight` - (Required) Relative percentage of the total number of launched tasks that should use the specified capacity provider. + +### deployment_circuit_breaker + +The `deployment_circuit_breaker` configuration block supports the following: + +* `enable` - (Required) Whether to enable the deployment circuit breaker logic for the service. +* `rollback` - (Required) Whether to enable Amazon ECS to roll back the service if a service deployment fails. If rollback is enabled, when a service deployment fails, the service is rolled back to the last deployment that completed successfully. + +### deployment_controller + +The `deployment_controller` configuration block supports the following: + +* `type` - (Optional) Type of deployment controller. Valid values: `CODE_DEPLOY`, `ECS`, `EXTERNAL`. Default: `ECS`. + +### load_balancer + +`load_balancer` supports the following: + +* `elb_name` - (Required for ELB Classic) Name of the ELB (Classic) to associate with the service. +* `target_group_arn` - (Required for ALB/NLB) ARN of the Load Balancer target group to associate with the service. +* `container_name` - (Required) Name of the container to associate with the load balancer (as it appears in a container definition). +* `container_port` - (Required) Port on the container to associate with the load balancer. + +-> **Version note:** Multiple `load_balancer` configuration block support was added in Terraform AWS Provider version 2.22.0. This allows configuration of [ECS service support for multiple target groups](https://aws.amazon.com/about-aws/whats-new/2019/07/amazon-ecs-services-now-support-multiple-load-balancer-target-groups/). + +### network_configuration + +`network_configuration` support the following: + +* `subnets` - (Required) Subnets associated with the task or service. +* `security_groups` - (Optional) Security groups associated with the task or service. If you do not specify a security group, the default security group for the VPC is used. +* `assign_public_ip` - (Optional) Assign a public IP address to the ENI (Fargate launch type only). Valid values are `true` or `false`. Default `false`. + +For more information, see [Task Networking](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-networking.html) + +### ordered_placement_strategy + +`ordered_placement_strategy` supports the following: + +* `type` - (Required) Type of placement strategy. Must be one of: `binpack`, `random`, or `spread` +* `field` - (Optional) For the `spread` placement strategy, valid values are `instanceId` (or `host`, + which has the same effect), or any platform or custom attribute that is applied to a container instance. + For the `binpack` type, valid values are `memory` and `cpu`. For the `random` type, this attribute is not + needed. For more information, see [Placement Strategy](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_PlacementStrategy.html). + +-> **Note:** for `spread`, `host` and `instanceId` will be normalized, by AWS, to be `instanceId`. This means the statefile will show `instanceId` but your config will differ if you use `host`. + +### placement_constraints + +`placement_constraints` support the following: + +* `type` - (Required) Type of constraint. The only valid values at this time are `memberOf` and `distinctInstance`. +* `expression` - (Optional) Cluster Query Language expression to apply to the constraint. Does not need to be specified for the `distinctInstance` type. For more information, see [Cluster Query Language in the Amazon EC2 Container Service Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html). + +### service_registries + +`service_registries` support the following: + +* `registry_arn` - (Required) ARN of the Service Registry. The currently supported service registry is Amazon Route 53 Auto Naming Service(`aws_service_discovery_service`). For more information, see [Service](https://docs.aws.amazon.com/Route53/latest/APIReference/API_autonaming_Service.html) +* `port` - (Optional) Port value used if your Service Discovery service specified an SRV record. +* `container_port` - (Optional) Port value, already specified in the task definition, to be used for your service discovery service. +* `container_name` - (Optional) Container name value, already specified in the task definition, to be used for your service discovery service. + +### service_connect_configuration + +`service_connect_configuration` supports the following: + +* `enabled` - (Required) Specifies whether to use Service Connect with this service. +* `log_configuration` - (Optional) The log configuration for the container. See below. +* `namespace` - (Optional) The namespace name or ARN of the [`aws_service_discovery_http_namespace`](/docs/providers/aws/r/service_discovery_http_namespace.html) for use with Service Connect. +* `service` - (Optional) The list of Service Connect service objects. See below. + +### log_configuration + +`log_configuration` supports the following: + +* `log_driver` - (Required) The log driver to use for the container. +* `options` - (Optional) The configuration options to send to the log driver. +* `secret_option` - (Optional) The secrets to pass to the log configuration. See below. + +### secret_option + +`secret_option` supports the following: + +* `name` - (Required) The name of the secret. +* `value_from` - (Required) The secret to expose to the container. The supported values are either the full ARN of the AWS Secrets Manager secret or the full ARN of the parameter in the SSM Parameter Store. + +### service + +`service` supports the following: + +* `client_alias` - (Optional) The list of client aliases for this Service Connect service. You use these to assign names that can be used by client applications. The maximum number of client aliases that you can have in this list is 1. See below. +* `discovery_name` - (Optional) The name of the new AWS Cloud Map service that Amazon ECS creates for this Amazon ECS service. +* `ingress_port_override` - (Optional) The port number for the Service Connect proxy to listen on. +* `port_name` - (Required) The name of one of the `portMappings` from all the containers in the task definition of this Amazon ECS service. + +### client_alias + +`client_alias` supports the following: + +* `dns_name` - (Optional) The name that you use in the applications of client tasks to connect to this service. +* `port` - (Required) The listening port number for the Service Connect proxy. This port is available inside of all of the tasks within the same namespace. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `cluster` - Amazon Resource Name (ARN) of cluster which the service runs on. +* `desired_count` - Number of instances of the task definition. +* `iam_role` - ARN of IAM role used for ELB. +* `id` - ARN that identifies the service. +* `name` - Name of the service. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20m`) +- `update` - (Default `20m`) +- `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS services using the `name` together with ecs cluster `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECS services using the `name` together with ecs cluster `name`. For example: + +```console +% terraform import aws_ecs_service.imported cluster-name/service-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecs_tag.html.markdown b/website/docs/cdktf/python/r/ecs_tag.html.markdown new file mode 100644 index 00000000000..1523345ea10 --- /dev/null +++ b/website/docs/cdktf/python/r/ecs_tag.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_tag" +description: |- + Manages an individual ECS resource tag +--- + + + +# Resource: aws_ecs_tag + +Manages an individual ECS resource tag. This resource should only be used in cases where ECS resources are created outside Terraform (e.g., ECS Clusters implicitly created by Batch Compute Environments). + +~> **NOTE:** This tagging resource should not be combined with the Terraform resource for managing the parent resource. For example, using `aws_ecs_cluster` and `aws_ecs_tag` to manage tags of the same ECS Cluster will cause a perpetual difference where the `aws_ecs_cluster` resource will try to remove the tag being added by the `aws_ecs_tag` resource. + +~> **NOTE:** This tagging resource does not use the [provider `ignore_tags` configuration](/docs/providers/aws/index.html#ignore_tags). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.batch_compute_environment import BatchComputeEnvironment +from imports.aws.ecs_tag import EcsTag +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = BatchComputeEnvironment(self, "example", + compute_environment_name="example", + service_role=Token.as_string(aws_iam_role_example.arn), + type="UNMANAGED" + ) + aws_ecs_tag_example = EcsTag(self, "example_1", + key="Name", + resource_arn=example.ecs_cluster_arn, + value="Hello World" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ecs_tag_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_arn` - (Required) Amazon Resource Name (ARN) of the ECS resource to tag. +* `key` - (Required) Tag name. +* `value` - (Required) Tag value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ECS resource identifier and key, separated by a comma (`,`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_ecs_tag` using the ECS resource identifier and key, separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_ecs_tag` using the ECS resource identifier and key, separated by a comma (`,`). For example: + +```console +% terraform import aws_ecs_tag.example arn:aws:ecs:us-east-1:123456789012:cluster/example,Name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecs_task_definition.html.markdown b/website/docs/cdktf/python/r/ecs_task_definition.html.markdown new file mode 100644 index 00000000000..cad7b49d366 --- /dev/null +++ b/website/docs/cdktf/python/r/ecs_task_definition.html.markdown @@ -0,0 +1,382 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_task_definition" +description: |- + Manages a revision of an ECS task definition. +--- + + + +# Resource: aws_ecs_task_definition + +Manages a revision of an ECS task definition to be used in `aws_ecs_service`. + +## Example Usage + +### Basic Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_task_definition import EcsTaskDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsTaskDefinition(self, "service", + container_definitions=Token.as_string( + Fn.jsonencode([{ + "cpu": 10, + "essential": True, + "image": "service-first", + "memory": 512, + "name": "first", + "port_mappings": [{ + "container_port": 80, + "host_port": 80 + } + ] + }, { + "cpu": 10, + "essential": True, + "image": "service-second", + "memory": 256, + "name": "second", + "port_mappings": [{ + "container_port": 443, + "host_port": 443 + } + ] + } + ])), + family="service", + placement_constraints=[EcsTaskDefinitionPlacementConstraints( + expression="attribute:ecs.availability-zone in [us-west-2a, us-west-2b]", + type="memberOf" + ) + ], + volume=[EcsTaskDefinitionVolume( + host_path="/ecs/service-storage", + name="service-storage" + ) + ] + ) +``` + +### With AppMesh Proxy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_task_definition import EcsTaskDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsTaskDefinition(self, "service", + container_definitions=Token.as_string( + Fn.file("task-definitions/service.json")), + family="service", + proxy_configuration=EcsTaskDefinitionProxyConfiguration( + container_name="applicationContainerName", + properties={ + "AppPorts": "8080", + "EgressIgnoredIPs": "169.254.170.2,169.254.169.254", + "IgnoredUID": "1337", + "ProxyEgressPort": Token.as_string(15001), + "ProxyIngressPort": Token.as_string(15000) + }, + type="APPMESH" + ) + ) +``` + +### Example Using `docker_volume_configuration` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_task_definition import EcsTaskDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsTaskDefinition(self, "service", + container_definitions=Token.as_string( + Fn.file("task-definitions/service.json")), + family="service", + volume=[EcsTaskDefinitionVolume( + docker_volume_configuration=EcsTaskDefinitionVolumeDockerVolumeConfiguration( + autoprovision=True, + driver="local", + driver_opts={ + "device": "${" + fs.dns_name + "}:/", + "o": "addr=${" + fs.dns_name + "},rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport", + "type": "nfs" + }, + scope="shared" + ), + name="service-storage" + ) + ] + ) +``` + +### Example Using `efs_volume_configuration` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_task_definition import EcsTaskDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsTaskDefinition(self, "service", + container_definitions=Token.as_string( + Fn.file("task-definitions/service.json")), + family="service", + volume=[EcsTaskDefinitionVolume( + efs_volume_configuration=EcsTaskDefinitionVolumeEfsVolumeConfiguration( + authorization_config=EcsTaskDefinitionVolumeEfsVolumeConfigurationAuthorizationConfig( + access_point_id=test.id, + iam="ENABLED" + ), + file_system_id=fs.id, + root_directory="/opt/data", + transit_encryption="ENABLED", + transit_encryption_port=2999 + ), + name="service-storage" + ) + ] + ) +``` + +### Example Using `fsx_windows_file_server_volume_configuration` + +```terraform +resource "aws_ecs_task_definition" "service" { + family = "service" + container_definitions = file("task-definitions/service.json") + + volume { + name = "service-storage" + + fsx_windows_file_server_volume_configuration { + file_system_id = aws_fsx_windows_file_system.test.id + root_directory = "\\data" + + authorization_config { + credentials_parameter = aws_secretsmanager_secret_version.test.arn + domain = aws_directory_service_directory.test.name + } + } + } +} + +resource "aws_secretsmanager_secret_version" "test" { + secret_id = aws_secretsmanager_secret.test.id + secret_string = jsonencode({ username : "admin", password : aws_directory_service_directory.test.password }) +} +``` + +### Example Using `container_definitions` and `inference_accelerator` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_task_definition import EcsTaskDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsTaskDefinition(self, "test", + container_definitions="[\n {\n \"cpu\": 10,\n \"command\": [\"sleep\", \"10\"],\n \"entryPoint\": [\"/\"],\n \"environment\": [\n {\"name\": \"VARNAME\", \"value\": \"VARVAL\"}\n ],\n \"essential\": true,\n \"image\": \"jenkins\",\n \"memory\": 128,\n \"name\": \"jenkins\",\n \"portMappings\": [\n {\n \"containerPort\": 80,\n \"hostPort\": 8080\n }\n ],\n \"resourceRequirements\":[\n {\n \"type\":\"InferenceAccelerator\",\n \"value\":\"device_1\"\n }\n ]\n }\n]\n\n", + family="test", + inference_accelerator=[EcsTaskDefinitionInferenceAccelerator( + device_name="device_1", + device_type="eia1.medium" + ) + ] + ) +``` + +### Example Using `runtime_platform` and `fargate` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_task_definition import EcsTaskDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsTaskDefinition(self, "test", + container_definitions="[\n {\n \"name\": \"iis\",\n \"image\": \"mcr.microsoft.com/windows/servercore/iis\",\n \"cpu\": 1024,\n \"memory\": 2048,\n \"essential\": true\n }\n]\n\n", + cpu=Token.as_string(1024), + family="test", + memory=Token.as_string(2048), + network_mode="awsvpc", + requires_compatibilities=["FARGATE"], + runtime_platform=EcsTaskDefinitionRuntimePlatform( + cpu_architecture="X86_64", + operating_system_family="WINDOWS_SERVER_2019_CORE" + ) + ) +``` + +## Argument Reference + +~> **NOTE:** Proper escaping is required for JSON field values containing quotes (`"`) such as `environment` values. If directly setting the JSON, they should be escaped as `\"` in the JSON, e.g., `"value": "I \"love\" escaped quotes"`. If using a Terraform variable value, they should be escaped as `\\\"` in the variable, e.g., `value = "I \\\"love\\\" escaped quotes"` in the variable and `"value": "${var.myvariable}"` in the JSON. + +The following arguments are required: + +* `container_definitions` - (Required) A list of valid [container definitions](http://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_ContainerDefinition.html) provided as a single valid JSON document. Please note that you should only provide values that are part of the container definition document. For a detailed description of what parameters are available, see the [Task Definition Parameters](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definition_parameters.html) section from the official [Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide). +* `family` - (Required) A unique name for your task definition. + +The following arguments are optional: + +* `cpu` - (Optional) Number of cpu units used by the task. If the `requires_compatibilities` is `FARGATE` this field is required. +* `execution_role_arn` - (Optional) ARN of the task execution role that the Amazon ECS container agent and the Docker daemon can assume. +* `inference_accelerator` - (Optional) Configuration block(s) with Inference Accelerators settings. [Detailed below.](#inference_accelerator) +* `ipc_mode` - (Optional) IPC resource namespace to be used for the containers in the task The valid values are `host`, `task`, and `none`. +* `memory` - (Optional) Amount (in MiB) of memory used by the task. If the `requires_compatibilities` is `FARGATE` this field is required. +* `network_mode` - (Optional) Docker networking mode to use for the containers in the task. Valid values are `none`, `bridge`, `awsvpc`, and `host`. +* `runtime_platform` - (Optional) Configuration block for [runtime_platform](#runtime_platform) that containers in your task may use. +* `pid_mode` - (Optional) Process namespace to use for the containers in the task. The valid values are `host` and `task`. +* `placement_constraints` - (Optional) Configuration block for rules that are taken into consideration during task placement. Maximum number of `placement_constraints` is `10`. [Detailed below](#placement_constraints). +* `proxy_configuration` - (Optional) Configuration block for the App Mesh proxy. [Detailed below.](#proxy_configuration) +* `ephemeral_storage` - (Optional) The amount of ephemeral storage to allocate for the task. This parameter is used to expand the total amount of ephemeral storage available, beyond the default amount, for tasks hosted on AWS Fargate. See [Ephemeral Storage](#ephemeral_storage). +* `requires_compatibilities` - (Optional) Set of launch types required by the task. The valid values are `EC2` and `FARGATE`. +* `skip_destroy` - (Optional) Whether to retain the old revision when the resource is destroyed or replacement is necessary. Default is `false`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `task_role_arn` - (Optional) ARN of IAM role that allows your Amazon ECS container task to make calls to other AWS services. +* `volume` - (Optional) Configuration block for [volumes](#volume) that containers in your task may use. Detailed below. + +### volume + +* `docker_volume_configuration` - (Optional) Configuration block to configure a [docker volume](#docker_volume_configuration). Detailed below. +* `efs_volume_configuration` - (Optional) Configuration block for an [EFS volume](#efs_volume_configuration). Detailed below. +* `fsx_windows_file_server_volume_configuration` - (Optional) Configuration block for an [FSX Windows File Server volume](#fsx_windows_file_server_volume_configuration). Detailed below. +* `host_path` - (Optional) Path on the host container instance that is presented to the container. If not set, ECS will create a nonpersistent data volume that starts empty and is deleted after the task has finished. +* `name` - (Required) Name of the volume. This name is referenced in the `sourceVolume` +parameter of container definition in the `mountPoints` section. + +### docker_volume_configuration + +For more information, see [Specifying a Docker volume in your Task Definition Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/docker-volumes.html#specify-volume-config) + +* `autoprovision` - (Optional) If this value is `true`, the Docker volume is created if it does not already exist. *Note*: This field is only used if the scope is `shared`. +* `driver_opts` - (Optional) Map of Docker driver specific options. +* `driver` - (Optional) Docker volume driver to use. The driver value must match the driver name provided by Docker because it is used for task placement. +* `labels` - (Optional) Map of custom metadata to add to your Docker volume. +* `scope` - (Optional) Scope for the Docker volume, which determines its lifecycle, either `task` or `shared`. Docker volumes that are scoped to a `task` are automatically provisioned when the task starts and destroyed when the task stops. Docker volumes that are scoped as `shared` persist after the task stops. + +### efs_volume_configuration + +For more information, see [Specifying an EFS volume in your Task Definition Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/efs-volumes.html#specify-efs-config) + +* `file_system_id` - (Required) ID of the EFS File System. +* `root_directory` - (Optional) Directory within the Amazon EFS file system to mount as the root directory inside the host. If this parameter is omitted, the root of the Amazon EFS volume will be used. Specifying / will have the same effect as omitting this parameter. This argument is ignored when using `authorization_config`. +* `transit_encryption` - (Optional) Whether or not to enable encryption for Amazon EFS data in transit between the Amazon ECS host and the Amazon EFS server. Transit encryption must be enabled if Amazon EFS IAM authorization is used. Valid values: `ENABLED`, `DISABLED`. If this parameter is omitted, the default value of `DISABLED` is used. +* `transit_encryption_port` - (Optional) Port to use for transit encryption. If you do not specify a transit encryption port, it will use the port selection strategy that the Amazon EFS mount helper uses. +* `authorization_config` - (Optional) Configuration block for [authorization](#authorization_config) for the Amazon EFS file system. Detailed below. + +### runtime_platform + +* `operating_system_family` - (Optional) If the `requires_compatibilities` is `FARGATE` this field is required; must be set to a valid option from the [operating system family in the runtime platform](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definition_parameters.html#runtime-platform) setting +* `cpu_architecture` - (Optional) Must be set to either `X86_64` or `ARM64`; see [cpu architecture](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definition_parameters.html#runtime-platform) + +#### authorization_config + +* `access_point_id` - (Optional) Access point ID to use. If an access point is specified, the root directory value will be relative to the directory set for the access point. If specified, transit encryption must be enabled in the EFSVolumeConfiguration. +* `iam` - (Optional) Whether or not to use the Amazon ECS task IAM role defined in a task definition when mounting the Amazon EFS file system. If enabled, transit encryption must be enabled in the EFSVolumeConfiguration. Valid values: `ENABLED`, `DISABLED`. If this parameter is omitted, the default value of `DISABLED` is used. + +### fsx_windows_file_server_volume_configuration + +For more information, see [Specifying an FSX Windows File Server volume in your Task Definition Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/tutorial-wfsx-volumes.html) + +* `file_system_id` - (Required) The Amazon FSx for Windows File Server file system ID to use. +* `root_directory` - (Required) The directory within the Amazon FSx for Windows File Server file system to mount as the root directory inside the host. +* `authorization_config` - (Required) Configuration block for [authorization](#authorization_config) for the Amazon FSx for Windows File Server file system detailed below. + +#### authorization_config + +* `credentials_parameter` - (Required) The authorization credential option to use. The authorization credential options can be provided using either the Amazon Resource Name (ARN) of an AWS Secrets Manager secret or AWS Systems Manager Parameter Store parameter. The ARNs refer to the stored credentials. +* `domain` - (Required) A fully qualified domain name hosted by an AWS Directory Service Managed Microsoft AD (Active Directory) or self-hosted AD on Amazon EC2. + +### placement_constraints + +* `expression` - (Optional) Cluster Query Language expression to apply to the constraint. For more information, see [Cluster Query Language in the Amazon EC2 Container Service Developer Guide](http://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html). +* `type` - (Required) Type of constraint. Use `memberOf` to restrict selection to a group of valid candidates. Note that `distinctInstance` is not supported in task definitions. + +### proxy_configuration + +* `container_name` - (Required) Name of the container that will serve as the App Mesh proxy. +* `properties` - (Required) Set of network configuration parameters to provide the Container Network Interface (CNI) plugin, specified a key-value mapping. +* `type` - (Optional) Proxy type. The default value is `APPMESH`. The only supported value is `APPMESH`. + +### ephemeral_storage + +* `size_in_gib` - (Required) The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is `21` GiB and the maximum supported value is `200` GiB. + +### inference_accelerator + +* `device_name` - (Required) Elastic Inference accelerator device name. The deviceName must also be referenced in a container definition as a ResourceRequirement. +* `device_type` - (Required) Elastic Inference accelerator type to use. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Full ARN of the Task Definition (including both `family` and `revision`). +* `arn_without_revision` - ARN of the Task Definition with the trailing `revision` removed. This may be useful for situations where the latest task definition is always desired. If a revision isn't specified, the latest ACTIVE revision is used. See the [AWS documentation](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_StartTask.html#ECS-StartTask-request-taskDefinition) for details. +* `revision` - Revision of the task in a particular family. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS Task Definitions using their ARNs. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECS Task Definitions using their ARNs. For example: + +```console +% terraform import aws_ecs_task_definition.example arn:aws:ecs:us-east-1:012345678910:task-definition/mytaskfamily:123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ecs_task_set.html.markdown b/website/docs/cdktf/python/r/ecs_task_set.html.markdown new file mode 100644 index 00000000000..93c3485b190 --- /dev/null +++ b/website/docs/cdktf/python/r/ecs_task_set.html.markdown @@ -0,0 +1,172 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_task_set" +description: |- + Provides an ECS task set. +--- + + + +# Resource: aws_ecs_task_set + +Provides an ECS task set - effectively a task that is expected to run until an error occurs or a user terminates it (typically a webserver or a database). + +See [ECS Task Set section in AWS developer guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/deployment-type-external.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_task_set import EcsTaskSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EcsTaskSet(self, "example", + cluster=Token.as_string(aws_ecs_cluster_example.id), + load_balancer=[EcsTaskSetLoadBalancer( + container_name="mongo", + container_port=8080, + target_group_arn=Token.as_string(aws_lb_target_group_example.arn) + ) + ], + service=Token.as_string(aws_ecs_service_example.id), + task_definition=Token.as_string(aws_ecs_task_definition_example.arn) + ) +``` + +### Ignoring Changes to Scale + +You can utilize the generic Terraform resource [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignore_changes` to create an ECS service with an initial count of running instances, then ignore any changes to that count caused externally (e.g. Application Autoscaling). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ecs_task_set import EcsTaskSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, cluster, service, taskDefinition): + super().__init__(scope, name) + EcsTaskSet(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=["scale"] + ), + scale=EcsTaskSetScale( + value=50 + ), + cluster=cluster, + service=service, + task_definition=task_definition + ) +``` + +## Argument Reference + +The following arguments are required: + +* `service` - (Required) The short name or ARN of the ECS service. +* `cluster` - (Required) The short name or ARN of the cluster that hosts the service to create the task set in. +* `task_definition` - (Required) The family and revision (`family:revision`) or full ARN of the task definition that you want to run in your service. + +The following arguments are optional: + +* `capacity_provider_strategy` - (Optional) The capacity provider strategy to use for the service. Can be one or more. [Defined below](#capacity_provider_strategy). +* `external_id` - (Optional) The external ID associated with the task set. +* `force_delete` - (Optional) Whether to allow deleting the task set without waiting for scaling down to 0. You can force a task set to delete even if it's in the process of scaling a resource. Normally, Terraform drains all the tasks before deleting the task set. This bypasses that behavior and potentially leaves resources dangling. +* `launch_type` - (Optional) The launch type on which to run your service. The valid values are `EC2`, `FARGATE`, and `EXTERNAL`. Defaults to `EC2`. +* `load_balancer` - (Optional) Details on load balancers that are used with a task set. [Detailed below](#load_balancer). +* `platform_version` - (Optional) The platform version on which to run your service. Only applicable for `launch_type` set to `FARGATE`. Defaults to `LATEST`. More information about Fargate platform versions can be found in the [AWS ECS User Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/platform_versions.html). +* `network_configuration` - (Optional) The network configuration for the service. This parameter is required for task definitions that use the `awsvpc` network mode to receive their own Elastic Network Interface, and it is not supported for other network modes. [Detailed below](#network_configuration). +* `scale` - (Optional) A floating-point percentage of the desired number of tasks to place and keep running in the task set. [Detailed below](#scale). +* `service_registries` - (Optional) The service discovery registries for the service. The maximum number of `service_registries` blocks is `1`. [Detailed below](#service_registries). +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. If you have set `copy_tags_to_backups` to true, and you specify one or more tags, no existing file system tags are copied from the file system to the backup. +* `wait_until_stable` - (Optional) Whether `terraform` should wait until the task set has reached `STEADY_STATE`. +* `wait_until_stable_timeout` - (Optional) Wait timeout for task set to reach `STEADY_STATE`. Valid time units include `ns`, `us` (or `µs`), `ms`, `s`, `m`, and `h`. Default `10m`. + +## capacity_provider_strategy + +The `capacity_provider_strategy` configuration block supports the following: + +* `capacity_provider` - (Required) The short name or full Amazon Resource Name (ARN) of the capacity provider. +* `weight` - (Required) The relative percentage of the total number of launched tasks that should use the specified capacity provider. +* `base` - (Optional) The number of tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. + +## load_balancer + +The `load_balancer` configuration block supports the following: + +* `container_name` - (Required) The name of the container to associate with the load balancer (as it appears in a container definition). +* `load_balancer_name` - (Optional, Required for ELB Classic) The name of the ELB (Classic) to associate with the service. +* `target_group_arn` - (Optional, Required for ALB/NLB) The ARN of the Load Balancer target group to associate with the service. +* `container_port` - (Optional) The port on the container to associate with the load balancer. Defaults to `0` if not specified. + +~> **Note:** Specifying multiple `load_balancer` configurations is still not supported by AWS for ECS task set. + +## network_configuration + +The `network_configuration` configuration block supports the following: + +* `subnets` - (Required) The subnets associated with the task or service. Maximum of 16. +* `security_groups` - (Optional) The security groups associated with the task or service. If you do not specify a security group, the default security group for the VPC is used. Maximum of 5. +* `assign_public_ip` - (Optional) Whether to assign a public IP address to the ENI (`FARGATE` launch type only). Valid values are `true` or `false`. Default `false`. + +For more information, see [Task Networking](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-networking.html). + +## scale + +The `scale` configuration block supports the following: + +* `unit` - (Optional) The unit of measure for the scale value. Default: `PERCENT`. +* `value` - (Optional) The value, specified as a percent total of a service's `desiredCount`, to scale the task set. Defaults to `0` if not specified. Accepted values are numbers between 0.0 and 100.0. + +## service_registries + +`service_registries` support the following: + +* `registry_arn` - (Required) The ARN of the Service Registry. The currently supported service registry is Amazon Route 53 Auto Naming Service([`aws_service_discovery_service` resource](/docs/providers/aws/r/service_discovery_service.html)). For more information, see [Service](https://docs.aws.amazon.com/Route53/latest/APIReference/API_autonaming_Service.html). +* `port` - (Optional) The port value used if your Service Discovery service specified an SRV record. +* `container_port` - (Optional) The port value, already specified in the task definition, to be used for your service discovery service. +* `container_name` - (Optional) The container name value, already specified in the task definition, to be used for your service discovery service. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `task_set_id`, `service` and `cluster` separated by commas (`,`). +* `arn` - The Amazon Resource Name (ARN) that identifies the task set. +* `stability_status` - The stability status. This indicates whether the task set has reached a steady state. +* `status` - The status of the task set. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `task_set_id` - The ID of the task set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS Task Sets using the `task_set_id`, `service`, and `cluster` separated by commas (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ECS Task Sets using the `task_set_id`, `service`, and `cluster` separated by commas (`,`). For example: + +```console +% terraform import aws_ecs_task_set.example ecs-svc/7177320696926227436,arn:aws:ecs:us-west-2:123456789101:service/example/example-1234567890,arn:aws:ecs:us-west-2:123456789101:cluster/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/efs_access_point.html.markdown b/website/docs/cdktf/python/r/efs_access_point.html.markdown new file mode 100644 index 00000000000..7402a070fd1 --- /dev/null +++ b/website/docs/cdktf/python/r/efs_access_point.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_access_point" +description: |- + Provides an Elastic File System (EFS) access point. +--- + + + +# Resource: aws_efs_access_point + +Provides an Elastic File System (EFS) access point. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.efs_access_point import EfsAccessPoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EfsAccessPoint(self, "test", + file_system_id=foo.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `file_system_id` - (Required) ID of the file system for which the access point is intended. +* `posix_user` - (Optional) Operating system user and group applied to all file system requests made using the access point. [Detailed](#posix_user) below. +* `root_directory`- (Optional) Directory on the Amazon EFS file system that the access point provides access to. [Detailed](#root_directory) below. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### posix_user + +* `gid` - (Required) POSIX group ID used for all file system operations using this access point. +* `secondary_gids` - (Optional) Secondary POSIX group IDs used for all file system operations using this access point. +* `uid` - (Required) POSIX user ID used for all file system operations using this access point. + +### root_directory + +The access point exposes the specified file system path as the root directory of your file system to applications using the access point. NFS clients using the access point can only access data in the access point's RootDirectory and it's subdirectories. + +* `creation_info` - (Optional) POSIX IDs and permissions to apply to the access point's Root Directory. See [Creation Info](#creation_info) below. +* `path` - (Optional) Path on the EFS file system to expose as the root directory to NFS clients using the access point to access the EFS file system. A path can have up to four subdirectories. If the specified path does not exist, you are required to provide `creation_info`. + +### creation_info + +If the `path` specified does not exist, EFS creates the root directory using the `creation_info` settings when a client connects to an access point. + +* `owner_gid` - (Required) POSIX group ID to apply to the `root_directory`. +* `owner_uid` - (Required) POSIX user ID to apply to the `root_directory`. +* `permissions` - (Required) POSIX permissions to apply to the RootDirectory, in the format of an octal number representing the file's mode bits. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the access point. +* `file_system_arn` - ARN of the file system. +* `id` - ID of the access point. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EFS access points using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the EFS access points using the `id`. For example: + +```console +% terraform import aws_efs_access_point.test fsap-52a643fb +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/efs_backup_policy.html.markdown b/website/docs/cdktf/python/r/efs_backup_policy.html.markdown new file mode 100644 index 00000000000..232033077cf --- /dev/null +++ b/website/docs/cdktf/python/r/efs_backup_policy.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_backup_policy" +description: |- + Provides an Elastic File System (EFS) Backup Policy resource. +--- + + + +# Resource: aws_efs_backup_policy + +Provides an Elastic File System (EFS) Backup Policy resource. +Backup policies turn automatic backups on or off for an existing file system. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.efs_backup_policy import EfsBackupPolicy +from imports.aws.efs_file_system import EfsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + fs = EfsFileSystem(self, "fs", + creation_token="my-product" + ) + EfsBackupPolicy(self, "policy", + backup_policy=EfsBackupPolicyBackupPolicy( + status="ENABLED" + ), + file_system_id=fs.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `file_system_id` - (Required) The ID of the EFS file system. +* `backup_policy` - (Required) A backup_policy object (documented below). + +### Backup Policy Arguments + +`backup_policy` supports the following arguments: + +* `status` - (Required) A status of the backup policy. Valid values: `ENABLED`, `DISABLED`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID that identifies the file system (e.g., fs-ccfc0d65). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EFS backup policies using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the EFS backup policies using the `id`. For example: + +```console +% terraform import aws_efs_backup_policy.example fs-6fa144c6 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/efs_file_system.html.markdown b/website/docs/cdktf/python/r/efs_file_system.html.markdown new file mode 100644 index 00000000000..dc18db27c5c --- /dev/null +++ b/website/docs/cdktf/python/r/efs_file_system.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_file_system" +description: |- + Provides an Elastic File System (EFS) File System resource. +--- + + + +# Resource: aws_efs_file_system + +Provides an Elastic File System (EFS) File System resource. + +## Example Usage + +### EFS File System w/ tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.efs_file_system import EfsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EfsFileSystem(self, "foo", + creation_token="my-product", + tags={ + "Name": "MyProduct" + } + ) +``` + +### Using lifecycle policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.efs_file_system import EfsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EfsFileSystem(self, "foo_with_lifecyle_policy", + creation_token="my-product", + lifecycle_policy=[EfsFileSystemLifecyclePolicy( + transition_to_ia="AFTER_30_DAYS" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `availability_zone_name` - (Optional) the AWS Availability Zone in which to create the file system. Used to create a file system that uses One Zone storage classes. See [user guide](https://docs.aws.amazon.com/efs/latest/ug/storage-classes.html) for more information. +* `creation_token` - (Optional) A unique name (a maximum of 64 characters are allowed) +used as reference when creating the Elastic File System to ensure idempotent file +system creation. By default generated by Terraform. See [Elastic File System](http://docs.aws.amazon.com/efs/latest/ug/) +user guide for more information. +* `encrypted` - (Optional) If true, the disk will be encrypted. +* `kms_key_id` - (Optional) The ARN for the KMS encryption key. When specifying kms_key_id, encrypted needs to be set to true. +* `lifecycle_policy` - (Optional) A file system [lifecycle policy](https://docs.aws.amazon.com/efs/latest/ug/API_LifecyclePolicy.html) object (documented below). +* `performance_mode` - (Optional) The file system performance mode. Can be either `"generalPurpose"` or `"maxIO"` (Default: `"generalPurpose"`). +* `provisioned_throughput_in_mibps` - (Optional) The throughput, measured in MiB/s, that you want to provision for the file system. Only applicable with `throughput_mode` set to `provisioned`. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `throughput_mode` - (Optional) Throughput mode for the file system. Defaults to `bursting`. Valid values: `bursting`, `provisioned`, or `elastic`. When using `provisioned`, also set `provisioned_throughput_in_mibps`. + +### Lifecycle Policy Arguments + +`lifecycle_policy` supports the following arguments: + +* `transition_to_ia` - (Optional) Indicates how long it takes to transition files to the IA storage class. Valid values: `AFTER_1_DAY`, `AFTER_7_DAYS`, `AFTER_14_DAYS`, `AFTER_30_DAYS`, `AFTER_60_DAYS`, or `AFTER_90_DAYS`. +* `transition_to_primary_storage_class` - (Optional) Describes the policy used to transition a file from infequent access storage to primary storage. Valid values: `AFTER_1_ACCESS`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `availability_zone_id` - The identifier of the Availability Zone in which the file system's One Zone storage classes exist. +* `id` - The ID that identifies the file system (e.g., fs-ccfc0d65). +* `dns_name` - The DNS name for the filesystem per [documented convention](http://docs.aws.amazon.com/efs/latest/ug/mounting-fs-mount-cmd-dns-name.html). +* `owner_id` - The AWS account that created the file system. If the file system was createdby an IAM user, the parent account to which the user belongs is the owner. +* `number_of_mount_targets` - The current number of mount targets that the file system has. +* `size_in_bytes` - The latest known metered size (in bytes) of data stored in the file system, the value is not the exact size that the file system was at any point in time. See [Size In Bytes](#size-in-bytes). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### Size In Bytes + +* `value` - The latest known metered size (in bytes) of data stored in the file system. +* `value_in_ia` - The latest known metered size (in bytes) of data stored in the Infrequent Access storage class. +* `value_in_standard` - The latest known metered size (in bytes) of data stored in the Standard storage class. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EFS file systems using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the EFS file systems using the `id`. For example: + +```console +% terraform import aws_efs_file_system.foo fs-6fa144c6 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/efs_file_system_policy.html.markdown b/website/docs/cdktf/python/r/efs_file_system_policy.html.markdown new file mode 100644 index 00000000000..1e476546c34 --- /dev/null +++ b/website/docs/cdktf/python/r/efs_file_system_policy.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_file_system_policy" +description: |- + Provides an Elastic File System (EFS) File System Policy resource. +--- + + + +# Resource: aws_efs_file_system_policy + +Provides an Elastic File System (EFS) File System Policy resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.efs_file_system import EfsFileSystem +from imports.aws.efs_file_system_policy import EfsFileSystemPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + fs = EfsFileSystem(self, "fs", + creation_token="my-product" + ) + policy = DataAwsIamPolicyDocument(self, "policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["elasticfilesystem:ClientMount", "elasticfilesystem:ClientWrite" + ], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="Bool", + values=["true"], + variable="aws:SecureTransport" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=[fs.arn], + sid="ExampleStatement01" + ) + ] + ) + aws_efs_file_system_policy_policy = EfsFileSystemPolicy(self, "policy_2", + file_system_id=fs.id, + policy=Token.as_string(policy.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_efs_file_system_policy_policy.override_logical_id("policy") +``` + +## Argument Reference + +The following arguments are required: + +* `file_system_id` - (Required) The ID of the EFS file system. +* `policy` - (Required) The JSON formatted file system policy for the EFS file system. see [Docs](https://docs.aws.amazon.com/efs/latest/ug/access-control-overview.html#access-control-manage-access-intro-resource-policies) for more info. + +The following arguments are optional: + +* `bypass_policy_lockout_safety_check` - (Optional) A flag to indicate whether to bypass the `aws_efs_file_system_policy` lockout safety check. The policy lockout safety check determines whether the policy in the request will prevent the principal making the request will be locked out from making future `PutFileSystemPolicy` requests on the file system. Set `bypass_policy_lockout_safety_check` to `true` only when you intend to prevent the principal that is making the request from making a subsequent `PutFileSystemPolicy` request on the file system. The default value is `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID that identifies the file system (e.g., fs-ccfc0d65). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EFS file system policies using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the EFS file system policies using the `id`. For example: + +```console +% terraform import aws_efs_file_system_policy.foo fs-6fa144c6 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/efs_mount_target.html.markdown b/website/docs/cdktf/python/r/efs_mount_target.html.markdown new file mode 100644 index 00000000000..2b20fd5b369 --- /dev/null +++ b/website/docs/cdktf/python/r/efs_mount_target.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_mount_target" +description: |- + Provides an Elastic File System (EFS) mount target. +--- + + + +# Resource: aws_efs_mount_target + +Provides an Elastic File System (EFS) mount target. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.efs_mount_target import EfsMountTarget +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = Vpc(self, "foo", + cidr_block="10.0.0.0/16" + ) + alpha = Subnet(self, "alpha", + availability_zone="us-west-2a", + cidr_block="10.0.1.0/24", + vpc_id=foo.id + ) + aws_efs_mount_target_alpha = EfsMountTarget(self, "alpha_2", + file_system_id=Token.as_string(aws_efs_file_system_foo.id), + subnet_id=alpha.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_efs_mount_target_alpha.override_logical_id("alpha") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `file_system_id` - (Required) The ID of the file system for which the mount target is intended. +* `subnet_id` - (Required) The ID of the subnet to add the mount target in. +* `ip_address` - (Optional) The address (within the address range of the specified subnet) at +which the file system may be mounted via the mount target. +* `security_groups` - (Optional) A list of up to 5 VPC security group IDs (that must +be for the same VPC as subnet specified) in effect for the mount target. + +## Attribute Reference + +~> **Note:** The `dns_name` and `mount_target_dns_name` attributes are only useful if the mount target is in a VPC that has +support for DNS hostnames enabled. See [Using DNS with Your VPC](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/vpc-dns.html) +and [VPC resource](/docs/providers/aws/r/vpc.html#enable_dns_hostnames) in Terraform for more information. + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the mount target. +* `dns_name` - The DNS name for the EFS file system. +* `mount_target_dns_name` - The DNS name for the given subnet/AZ per [documented convention](http://docs.aws.amazon.com/efs/latest/ug/mounting-fs-mount-cmd-dns-name.html). +* `file_system_arn` - Amazon Resource Name of the file system. +* `network_interface_id` - The ID of the network interface that Amazon EFS created when it created the mount target. +* `availability_zone_name` - The name of the Availability Zone (AZ) that the mount target resides in. +* `availability_zone_id` - The unique and consistent identifier of the Availability Zone (AZ) that the mount target resides in. +* `owner_id` - AWS account ID that owns the resource. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EFS mount targets using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the EFS mount targets using the `id`. For example: + +```console +% terraform import aws_efs_mount_target.alpha fsmt-52a643fb +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/efs_replication_configuration.html.markdown b/website/docs/cdktf/python/r/efs_replication_configuration.html.markdown new file mode 100644 index 00000000000..c836ff9965b --- /dev/null +++ b/website/docs/cdktf/python/r/efs_replication_configuration.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_replication_configuration" +description: Provides an Elastic File System (EFS) Replication Configuration. +--- + + + +# Resource: aws_efs_replication_configuration + +Creates a replica of an existing EFS file system in the same or another region. Creating this resource causes the source EFS file system to be replicated to a new read-only destination EFS file system. Deleting this resource will cause the replication from source to destination to stop and the destination file system will no longer be read only. + +~> **NOTE:** Deleting this resource does **not** delete the destination file system that was created. + +## Example Usage + +Will create a replica using regional storage in us-west-2 that will be encrypted by the default EFS KMS key `/aws/elasticfilesystem`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.efs_file_system import EfsFileSystem +from imports.aws.efs_replication_configuration import EfsReplicationConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = EfsFileSystem(self, "example") + aws_efs_replication_configuration_example = + EfsReplicationConfiguration(self, "example_1", + destination=EfsReplicationConfigurationDestination( + region="us-west-2" + ), + source_file_system_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_efs_replication_configuration_example.override_logical_id("example") +``` + +Replica will be created as One Zone storage in the us-west-2b Availability Zone and encrypted with the specified KMS key. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.efs_file_system import EfsFileSystem +from imports.aws.efs_replication_configuration import EfsReplicationConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = EfsFileSystem(self, "example") + aws_efs_replication_configuration_example = + EfsReplicationConfiguration(self, "example_1", + destination=EfsReplicationConfigurationDestination( + availability_zone_name="us-west-2b", + kms_key_id="1234abcd-12ab-34cd-56ef-1234567890ab" + ), + source_file_system_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_efs_replication_configuration_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `source_file_system_id` - (Required) The ID of the file system that is to be replicated. +* `destination` - (Required) A destination configuration block (documented below). + +### Destination Arguments + +`destination` supports the following arguments: + +* `availability_zone_name` - (Optional) The availability zone in which the replica should be created. If specified, the replica will be created with One Zone storage. If omitted, regional storage will be used. +* `kms_key_id` - (Optional) The Key ID, ARN, alias, or alias ARN of the KMS key that should be used to encrypt the replica file system. If omitted, the default KMS key for EFS `/aws/elasticfilesystem` will be used. +* `region` - (Optional) The region in which the replica should be created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `creation_time` - When the replication configuration was created. +* `original_source_file_system_arn` - The Amazon Resource Name (ARN) of the original source Amazon EFS file system in the replication configuration. +* `source_file_system_arn` - The Amazon Resource Name (ARN) of the current source file system in the replication configuration. +* `source_file_system_region` - The AWS Region in which the source Amazon EFS file system is located. +* `destination[0].file_system_id` - The fs ID of the replica. +* `destination[0].status` - The status of the replication. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) +* `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EFS Replication Configurations using the file system ID of either the source or destination file system. When importing, the `availability_zone_name` and `kms_key_id` attributes must **not** be set in the configuration. The AWS API does not return these values when querying the replication configuration and their presence will therefore show as a diff in a subsequent plan. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EFS Replication Configurations using the file system ID of either the source or destination file system. When importing, the `availability_zone_name` and `kms_key_id` attributes must **not** be set in the configuration. The AWS API does not return these values when querying the replication configuration and their presence will therefore show as a diff in a subsequent plan. For example: + +```console +% terraform import aws_efs_replication_configuration.example fs-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/egress_only_internet_gateway.html.markdown b/website/docs/cdktf/python/r/egress_only_internet_gateway.html.markdown new file mode 100644 index 00000000000..cf38dccf9c5 --- /dev/null +++ b/website/docs/cdktf/python/r/egress_only_internet_gateway.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_egress_only_internet_gateway" +description: |- + Provides a resource to create an egress-only Internet gateway. +--- + + + +# Resource: aws_egress_only_internet_gateway + +[IPv6 only] Creates an egress-only Internet gateway for your VPC. +An egress-only Internet gateway is used to enable outbound communication +over IPv6 from instances in your VPC to the Internet, and prevents hosts +outside of your VPC from initiating an IPv6 connection with your instance. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.egress_only_internet_gateway import EgressOnlyInternetGateway +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Vpc(self, "example", + assign_generated_ipv6_cidr_block=True, + cidr_block="10.1.0.0/16" + ) + aws_egress_only_internet_gateway_example = EgressOnlyInternetGateway(self, "example_1", + tags={ + "Name": "main" + }, + vpc_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_egress_only_internet_gateway_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpc_id` - (Required) The VPC ID to create in. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the egress-only Internet gateway. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Egress-only Internet gateways using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Egress-only Internet gateways using the `id`. For example: + +```console +% terraform import aws_egress_only_internet_gateway.example eigw-015e0e244e24dfe8a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/eip.html.markdown b/website/docs/cdktf/python/r/eip.html.markdown new file mode 100644 index 00000000000..d442e85d0b8 --- /dev/null +++ b/website/docs/cdktf/python/r/eip.html.markdown @@ -0,0 +1,205 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_eip" +description: |- + Provides an Elastic IP resource. +--- + + + +# Resource: aws_eip + +Provides an Elastic IP resource. + +~> **Note:** EIP may require IGW to exist prior to association. Use `depends_on` to set an explicit dependency on the IGW. + +~> **Note:** Do not use `network_interface` to associate the EIP to `aws_lb` or `aws_nat_gateway` resources. Instead use the `allocation_id` available in those resources to allow AWS to manage the association, otherwise you will see `AuthFailure` errors. + +## Example Usage + +### Single EIP associated with an instance + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.eip import Eip +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Eip(self, "lb", + domain="vpc", + instance=web.id + ) +``` + +### Multiple EIPs associated with a single network interface + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.eip import Eip +from imports.aws.network_interface import NetworkInterface +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + multi_ip = NetworkInterface(self, "multi-ip", + private_ips=["10.0.0.10", "10.0.0.11"], + subnet_id=main.id + ) + Eip(self, "one", + associate_with_private_ip="10.0.0.10", + domain="vpc", + network_interface=multi_ip.id + ) + Eip(self, "two", + associate_with_private_ip="10.0.0.11", + domain="vpc", + network_interface=multi_ip.id + ) +``` + +### Attaching an EIP to an Instance with a pre-assigned private ip (VPC Only) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.eip import Eip +from imports.aws.instance import Instance +from imports.aws.internet_gateway import InternetGateway +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = Vpc(self, "default", + cidr_block="10.0.0.0/16", + enable_dns_hostnames=True + ) + gw = InternetGateway(self, "gw", + vpc_id=default_var.id + ) + tf_test_subnet = Subnet(self, "tf_test_subnet", + cidr_block="10.0.0.0/24", + depends_on=[gw], + map_public_ip_on_launch=True, + vpc_id=default_var.id + ) + foo = Instance(self, "foo", + ami="ami-5189a661", + instance_type="t2.micro", + private_ip="10.0.0.12", + subnet_id=tf_test_subnet.id + ) + Eip(self, "bar", + associate_with_private_ip="10.0.0.12", + depends_on=[gw], + domain="vpc", + instance=foo.id + ) +``` + +### Allocating EIP from the BYOIP pool + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.eip import Eip +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Eip(self, "byoip-ip", + domain="vpc", + public_ipv4_pool="ipv4pool-ec2-012345" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `address` - (Optional) IP address from an EC2 BYOIP pool. This option is only available for VPC EIPs. +* `associate_with_private_ip` - (Optional) User-specified primary or secondary private IP address to associate with the Elastic IP address. If no private IP address is specified, the Elastic IP address is associated with the primary private IP address. +* `customer_owned_ipv4_pool` - (Optional) ID of a customer-owned address pool. For more on customer owned IP addressed check out [Customer-owned IP addresses guide](https://docs.aws.amazon.com/outposts/latest/userguide/outposts-networking-components.html#ip-addressing). +* `domain` - Indicates if this EIP is for use in VPC (`vpc`). +* `instance` - (Optional) EC2 instance ID. +* `network_border_group` - (Optional) Location from which the IP address is advertised. Use this parameter to limit the address to this location. +* `network_interface` - (Optional) Network interface ID to associate with. +* `public_ipv4_pool` - (Optional) EC2 IPv4 address pool identifier or `amazon`. + This option is only available for VPC EIPs. +* `tags` - (Optional) Map of tags to assign to the resource. Tags can only be applied to EIPs in a VPC. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc` - (Optional **Deprecated**) Boolean if the EIP is in a VPC or not. Use `domain` instead. + Defaults to `true` unless the region supports EC2-Classic. + +~> **NOTE:** You can specify either the `instance` ID or the `network_interface` ID, but not both. Including both will **not** return an error from the AWS API, but will have undefined behavior. See the relevant [AssociateAddress API Call][1] for more information. + +~> **NOTE:** Specifying both `public_ipv4_pool` and `address` won't cause an error but `address` will be used in the +case both options are defined as the api only requires one or the other. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `allocation_id` - ID that AWS assigns to represent the allocation of the Elastic IP address for use with instances in a VPC. +* `association_id` - ID representing the association of the address with an instance in a VPC. +* `carrier_ip` - Carrier IP address. +* `customer_owned_ip` - Customer owned IP. +* `id` - Contains the EIP allocation ID. +* `private_dns` - The Private DNS associated with the Elastic IP address (if in VPC). +* `private_ip` - Contains the private IP address (if in VPC). +* `public_dns` - Public DNS associated with the Elastic IP address. +* `public_ip` - Contains the public IP address. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +~> **Note:** The resource computes the `public_dns` and `private_dns` attributes according to the [VPC DNS Guide](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-dns.html#vpc-dns-hostnames) as they are not available with the EC2 API. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `15m`) +- `update` - (Default `5m`) +- `delete` - (Default `3m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EIPs in a VPC using their Allocation ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EIPs in a VPC using their Allocation ID. For example: + +```console +% terraform import aws_eip.bar eipalloc-00a10e96 +``` + +[1]: https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_AssociateAddress.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/eip_association.html.markdown b/website/docs/cdktf/python/r/eip_association.html.markdown new file mode 100644 index 00000000000..b3414de6b6e --- /dev/null +++ b/website/docs/cdktf/python/r/eip_association.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_eip_association" +description: |- + Provides an AWS EIP Association +--- + + + +# Resource: aws_eip_association + +Provides an AWS EIP Association as a top level resource, to associate and +disassociate Elastic IPs from AWS Instances and Network Interfaces. + +~> **NOTE:** Do not use this resource to associate an EIP to `aws_lb` or `aws_nat_gateway` resources. Instead use the `allocation_id` available in those resources to allow AWS to manage the association, otherwise you will see `AuthFailure` errors. + +~> **NOTE:** `aws_eip_association` is useful in scenarios where EIPs are either +pre-existing or distributed to customers or users and therefore cannot be changed. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.eip import Eip +from imports.aws.eip_association import EipAssociation +from imports.aws.instance import Instance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Eip(self, "example", + domain="vpc" + ) + web = Instance(self, "web", + ami="ami-21f78e11", + availability_zone="us-west-2a", + instance_type="t2.micro", + tags={ + "Name": "HelloWorld" + } + ) + EipAssociation(self, "eip_assoc", + allocation_id=example.id, + instance_id=web.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `allocation_id` - (Optional) The allocation ID. This is required for EC2-VPC. +* `allow_reassociation` - (Optional, Boolean) Whether to allow an Elastic IP to +be re-associated. Defaults to `true` in VPC. +* `instance_id` - (Optional) The ID of the instance. This is required for +EC2-Classic. For EC2-VPC, you can specify either the instance ID or the +network interface ID, but not both. The operation fails if you specify an +instance ID unless exactly one network interface is attached. +* `network_interface_id` - (Optional) The ID of the network interface. If the +instance has more than one network interface, you must specify a network +interface ID. +* `private_ip_address` - (Optional) The primary or secondary private IP address +to associate with the Elastic IP address. If no private IP address is +specified, the Elastic IP address is associated with the primary private IP +address. +* `public_ip` - (Optional) The Elastic IP address. This is required for EC2-Classic. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `association_id` - The ID that represents the association of the Elastic IP +address with an instance. +* `allocation_id` - As above +* `instance_id` - As above +* `network_interface_id` - As above +* `private_ip_address` - As above +* `public_ip` - As above + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EIP Assocations using their association IDs. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EIP Assocations using their association IDs. For example: + +```console +% terraform import aws_eip_association.test eipassoc-ab12c345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/eks_addon.html.markdown b/website/docs/cdktf/python/r/eks_addon.html.markdown index cd3636c7ad9..7a41d3a1e3c 100644 --- a/website/docs/cdktf/python/r/eks_addon.html.markdown +++ b/website/docs/cdktf/python/r/eks_addon.html.markdown @@ -217,9 +217,9 @@ The following arguments are optional: for service accounts on your cluster](https://docs.aws.amazon.com/eks/latest/userguide/enable-iam-roles-for-service-accounts.html) in the Amazon EKS User Guide. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of the EKS add-on. * `id` - EKS Cluster name and EKS Addon name separated by a colon (`:`). @@ -238,10 +238,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -EKS add-on can be imported using the `cluster_name` and `addon_name` separated by a colon (`:`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS add-on using the `cluster_name` and `addon_name` separated by a colon (`:`). For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_eks_addon.my_eks_addon my_cluster_name:my_addon_name + +Using `terraform import`, import EKS add-on using the `cluster_name` and `addon_name` separated by a colon (`:`). For example: + +```console +% terraform import aws_eks_addon.my_eks_addon my_cluster_name:my_addon_name ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/eks_cluster.html.markdown b/website/docs/cdktf/python/r/eks_cluster.html.markdown index ccf3a87d87c..66fee230a0f 100644 --- a/website/docs/cdktf/python/r/eks_cluster.html.markdown +++ b/website/docs/cdktf/python/r/eks_cluster.html.markdown @@ -260,14 +260,14 @@ The following arguments are optional: ### encryption_config -The following arguments are supported in the `encryption_config` configuration block: +The `encryption_config` configuration block supports the following arguments: * `provider` - (Required) Configuration block with provider for encryption. Detailed below. * `resources` - (Required) List of strings with resources to be encrypted. Valid values: `secrets`. #### provider -The following arguments are supported in the `provider` configuration block: +The `provider` configuration block supports the following arguments: * `key_arn` - (Required) ARN of the Key Management Service (KMS) customer master key (CMK). The CMK must be symmetric, created in the same region as the cluster, and if the CMK was created in a different account, the user must have access to the CMK. For more information, see [Allowing Users in Other Accounts to Use a CMK in the AWS Key Management Service Developer Guide](https://docs.aws.amazon.com/kms/latest/developerguide/key-policy-modifying-external-accounts.html). @@ -281,7 +281,7 @@ The following arguments are supported in the `provider` configuration block: ### kubernetes_network_config -The following arguments are supported in the `kubernetes_network_config` configuration block: +The `kubernetes_network_config` configuration block supports the following arguments: * `service_ipv4_cidr` - (Optional) The CIDR block to assign Kubernetes pod and service IP addresses from. If you don't specify a block, Kubernetes assigns addresses from either the 10.100.0.0/16 or 172.20.0.0/16 CIDR blocks. We recommend that you specify a block that does not overlap with resources in other networks that are peered or connected to your VPC. You can only specify a custom CIDR block when you create a cluster, changing this value will force a new cluster to be created. The block must meet the following requirements: @@ -294,7 +294,7 @@ The following arguments are supported in the `kubernetes_network_config` configu ### outpost_config -The following arguments are supported in the `outpost_config` configuration block: +The `outpost_config` configuration block supports the following arguments: * `control_plane_instance_type` - (Required) The Amazon EC2 instance type that you want to use for your local Amazon EKS cluster on Outposts. The instance type that you specify is used for all Kubernetes control plane instances. The instance type can't be changed after cluster creation. Choose an instance type based on the number of nodes that your cluster will have. If your cluster will have: @@ -307,15 +307,15 @@ The following arguments are supported in the `outpost_config` configuration bloc For a list of the available Amazon EC2 instance types, see Compute and storage in AWS Outposts rack features The control plane is not automatically scaled by Amazon EKS. * `control_plane_placement` - (Optional) An object representing the placement configuration for all the control plane instances of your local Amazon EKS cluster on AWS Outpost. -The following arguments are supported in the `control_plane_placement` configuration block: +The `control_plane_placement` configuration block supports the following arguments: * `group_name` - (Required) The name of the placement group for the Kubernetes control plane instances. This setting can't be changed after cluster creation. * `outpost_arns` - (Required) The ARN of the Outpost that you want to use for your local Amazon EKS cluster on Outposts. This argument is a list of arns, but only a single Outpost ARN is supported currently. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the cluster. * `certificate_authority` - Attribute block containing `certificate-authority-data` for your cluster. Detailed below. @@ -358,10 +358,21 @@ Note that the `update` timeout is used separately for both `version` and `vpc_co ## Import -EKS Clusters can be imported using the `name`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS Clusters using the `name`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_eks_cluster.my_cluster my_cluster + +Using `terraform import`, import EKS Clusters using the `name`. For example: + +```console +% terraform import aws_eks_cluster.my_cluster my_cluster ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/eks_fargate_profile.html.markdown b/website/docs/cdktf/python/r/eks_fargate_profile.html.markdown index 74693367dcc..38c9c5d79a7 100644 --- a/website/docs/cdktf/python/r/eks_fargate_profile.html.markdown +++ b/website/docs/cdktf/python/r/eks_fargate_profile.html.markdown @@ -98,9 +98,9 @@ The following arguments are optional: * `labels` - (Optional) Key-value map of Kubernetes labels for selection. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of the EKS Fargate Profile. * `id` - EKS Cluster name and EKS Fargate Profile name separated by a colon (`:`). @@ -116,10 +116,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -EKS Fargate Profiles can be imported using the `cluster_name` and `fargate_profile_name` separated by a colon (`:`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS Fargate Profiles using the `cluster_name` and `fargate_profile_name` separated by a colon (`:`). For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_eks_fargate_profile.my_fargate_profile my_cluster:my_fargate_profile + +Using `terraform import`, import EKS Fargate Profiles using the `cluster_name` and `fargate_profile_name` separated by a colon (`:`). For example: + +```console +% terraform import aws_eks_fargate_profile.my_fargate_profile my_cluster:my_fargate_profile ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/eks_identity_provider_config.html.markdown b/website/docs/cdktf/python/r/eks_identity_provider_config.html.markdown index 034ad645f35..c8df1ba0b65 100644 --- a/website/docs/cdktf/python/r/eks_identity_provider_config.html.markdown +++ b/website/docs/cdktf/python/r/eks_identity_provider_config.html.markdown @@ -38,7 +38,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cluster_name` – (Required) Name of the EKS Cluster. * `oidc` - (Required) Nested attribute containing [OpenID Connect](https://openid.net/connect/) identity provider information for the cluster. Detailed below. @@ -55,9 +55,9 @@ The following arguments are supported: * `username_claim` - (Optional) The JWT claim that the provider will use as the username. * `username_prefix` - (Optional) A prefix that is prepended to username claims. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of the EKS Identity Provider Configuration. * `id` - EKS Cluster name and EKS Identity Provider Configuration name separated by a colon (`:`). @@ -73,10 +73,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -EKS Identity Provider Configurations can be imported using the `cluster_name` and `identity_provider_config_name` separated by a colon (`:`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS Identity Provider Configurations using the `cluster_name` and `identity_provider_config_name` separated by a colon (`:`). For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_eks_identity_provider_config.my_identity_provider_config my_cluster:my_identity_provider_config + +Using `terraform import`, import EKS Identity Provider Configurations using the `cluster_name` and `identity_provider_config_name` separated by a colon (`:`). For example: + +```console +% terraform import aws_eks_identity_provider_config.my_identity_provider_config my_cluster:my_identity_provider_config ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/eks_node_group.html.markdown b/website/docs/cdktf/python/r/eks_node_group.html.markdown index 92d29f0b0b1..67369c14b75 100644 --- a/website/docs/cdktf/python/r/eks_node_group.html.markdown +++ b/website/docs/cdktf/python/r/eks_node_group.html.markdown @@ -246,9 +246,9 @@ The following arguments are mutually exclusive. * `max_unavailable` - (Optional) Desired max number of unavailable worker nodes during node group update. * `max_unavailable_percentage` - (Optional) Desired max percentage of unavailable worker nodes during node group update. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of the EKS Node Group. * `id` - EKS Cluster name and EKS Node Group name separated by a colon (`:`). @@ -269,10 +269,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -EKS Node Groups can be imported using the `cluster_name` and `node_group_name` separated by a colon (`:`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS Node Groups using the `cluster_name` and `node_group_name` separated by a colon (`:`). For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_eks_node_group.my_node_group my_cluster:my_node_group + +Using `terraform import`, import EKS Node Groups using the `cluster_name` and `node_group_name` separated by a colon (`:`). For example: + +```console +% terraform import aws_eks_node_group.my_node_group my_cluster:my_node_group ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elastic_beanstalk_application.html.markdown b/website/docs/cdktf/python/r/elastic_beanstalk_application.html.markdown new file mode 100644 index 00000000000..4f78e918073 --- /dev/null +++ b/website/docs/cdktf/python/r/elastic_beanstalk_application.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_application" +description: |- + Provides an Elastic Beanstalk Application Resource +--- + + + +# Resource: aws_elastic_beanstalk_application + +Provides an Elastic Beanstalk Application Resource. Elastic Beanstalk allows +you to deploy and manage applications in the AWS cloud without worrying about +the infrastructure that runs those applications. + +This resource creates an application that has one configuration template named +`default`, and no application versions + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elastic_beanstalk_application import ElasticBeanstalkApplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticBeanstalkApplication(self, "tftest", + appversion_lifecycle=ElasticBeanstalkApplicationAppversionLifecycle( + delete_source_from_s3=True, + max_count=128, + service_role=beanstalk_service.arn + ), + description="tf-test-desc", + name="tf-test-name" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the application, must be unique within your account +* `description` - (Optional) Short description of the application +* `tags` - (Optional) Key-value map of tags for the Elastic Beanstalk Application. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Application version lifecycle (`appversion_lifecycle`) supports the following settings. Only one of either `max_count` or `max_age_in_days` can be provided: + +* `service_role` - (Required) The ARN of an IAM service role under which the application version is deleted. Elastic Beanstalk must have permission to assume this role. +* `max_count` - (Optional) The maximum number of application versions to retain ('max_age_in_days' and 'max_count' cannot be enabled simultaneously.). +* `max_age_in_days` - (Optional) The number of days to retain an application version ('max_age_in_days' and 'max_count' cannot be enabled simultaneously.). +* `delete_source_from_s3` - (Optional) Set to `true` to delete a version's source bundle from S3 when the application version is deleted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS for this Elastic Beanstalk Application. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elastic Beanstalk Applications using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Elastic Beanstalk Applications using the `name`. For example: + +```console +% terraform import aws_elastic_beanstalk_application.tf_test tf-test-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elastic_beanstalk_application_version.html.markdown b/website/docs/cdktf/python/r/elastic_beanstalk_application_version.html.markdown new file mode 100644 index 00000000000..7ac296a6274 --- /dev/null +++ b/website/docs/cdktf/python/r/elastic_beanstalk_application_version.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_application_version" +description: |- + Provides an Elastic Beanstalk Application Version Resource +--- + + + +# Resource: aws_elastic_beanstalk_application_version + +Provides an Elastic Beanstalk Application Version Resource. Elastic Beanstalk allows +you to deploy and manage applications in the AWS cloud without worrying about +the infrastructure that runs those applications. + +This resource creates a Beanstalk Application Version that can be deployed to a Beanstalk +Environment. + +~> **NOTE on Application Version Resource:** When using the Application Version resource with multiple +[Elastic Beanstalk Environments](elastic_beanstalk_environment.html) it is possible that an error may be returned +when attempting to delete an Application Version while it is still in use by a different environment. +To work around this you can either create each environment in a separate AWS account or create your `aws_elastic_beanstalk_application_version` resources with a unique names in your Elastic Beanstalk Application. For example <revision>-<environment>. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elastic_beanstalk_application import ElasticBeanstalkApplication +from imports.aws.elastic_beanstalk_application_version import ElasticBeanstalkApplicationVersion +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticBeanstalkApplication(self, "default", + description="tf-test-desc", + name="tf-test-name" + ) + aws_s3_bucket_default = S3Bucket(self, "default_1", + bucket="tftest.applicationversion.bucket" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_default.override_logical_id("default") + aws_s3_object_default = S3Object(self, "default_2", + bucket=Token.as_string(aws_s3_bucket_default.id), + key="beanstalk/go-v1.zip", + source="go-v1.zip" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_object_default.override_logical_id("default") + aws_elastic_beanstalk_application_version_default = + ElasticBeanstalkApplicationVersion(self, "default_3", + application="tf-test-name", + bucket=Token.as_string(aws_s3_bucket_default.id), + description="application version created by terraform", + key=Token.as_string(aws_s3_object_default.id), + name="tf-test-version-label" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_elastic_beanstalk_application_version_default.override_logical_id("default") +``` + +## Argument Reference + +The following arguments are required: + +* `application` - (Required) Name of the Beanstalk Application the version is associated with. +* `bucket` - (Required) S3 bucket that contains the Application Version source bundle. +* `key` - (Required) S3 object that is the Application Version source bundle. +* `name` - (Required) Unique name for the this Application Version. + +The following arguments are optional: + +* `description` - (Optional) Short description of the Application Version. +* `force_delete` - (Optional) On delete, force an Application Version to be deleted when it may be in use by multiple Elastic Beanstalk Environments. +* `tags` - (Optional) Key-value map of tags for the Elastic Beanstalk Application Version. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN assigned by AWS for this Elastic Beanstalk Application. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elastic_beanstalk_configuration_template.html.markdown b/website/docs/cdktf/python/r/elastic_beanstalk_configuration_template.html.markdown new file mode 100644 index 00000000000..e03083e9ee6 --- /dev/null +++ b/website/docs/cdktf/python/r/elastic_beanstalk_configuration_template.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_configuration_template" +description: |- + Provides an Elastic Beanstalk Configuration Template +--- + + + +# Resource: aws_elastic_beanstalk_configuration_template + +Provides an Elastic Beanstalk Configuration Template, which are associated with +a specific application and are used to deploy different versions of the +application with the same configuration settings. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elastic_beanstalk_application import ElasticBeanstalkApplication +from imports.aws.elastic_beanstalk_configuration_template import ElasticBeanstalkConfigurationTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + tftest = ElasticBeanstalkApplication(self, "tftest", + description="tf-test-desc", + name="tf-test-name" + ) + ElasticBeanstalkConfigurationTemplate(self, "tf_template", + application=tftest.name, + name="tf-test-template-config", + solution_stack_name="64bit Amazon Linux 2015.09 v2.0.8 running Go 1.4" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A unique name for this Template. +* `application` – (Required) name of the application to associate with this configuration template +* `description` - (Optional) Short description of the Template +* `environment_id` – (Optional) The ID of the environment used with this configuration template +* `setting` – (Optional) Option settings to configure the new Environment. These + override specific values that are set as defaults. The format is detailed + below in [Option Settings](#option-settings) +* `solution_stack_name` – (Optional) A solution stack to base your Template +off of. Example stacks can be found in the [Amazon API documentation][1] + +## Option Settings + +The `setting` field supports the following format: + +* `namespace` - unique namespace identifying the option's associated AWS resource +* `name` - name of the configuration option +* `value` - value for the configuration option +* `resource` - (Optional) resource name for [scheduled action](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/command-options-general.html#command-options-general-autoscalingscheduledaction) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `name` +* `application` +* `description` +* `environment_id` +* `option_settings` +* `solution_stack_name` + +[1]: https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/concepts.platforms.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elastic_beanstalk_environment.html.markdown b/website/docs/cdktf/python/r/elastic_beanstalk_environment.html.markdown new file mode 100644 index 00000000000..f89f5286c91 --- /dev/null +++ b/website/docs/cdktf/python/r/elastic_beanstalk_environment.html.markdown @@ -0,0 +1,174 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_environment" +description: |- + Provides an Elastic Beanstalk Environment Resource +--- + + + +# Resource: aws_elastic_beanstalk_environment + +Provides an Elastic Beanstalk Environment Resource. Elastic Beanstalk allows +you to deploy and manage applications in the AWS cloud without worrying about +the infrastructure that runs those applications. + +Environments are often things such as `development`, `integration`, or +`production`. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elastic_beanstalk_application import ElasticBeanstalkApplication +from imports.aws.elastic_beanstalk_environment import ElasticBeanstalkEnvironment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + tftest = ElasticBeanstalkApplication(self, "tftest", + description="tf-test-desc", + name="tf-test-name" + ) + ElasticBeanstalkEnvironment(self, "tfenvtest", + application=tftest.name, + name="tf-test-name", + solution_stack_name="64bit Amazon Linux 2015.03 v2.0.3 running Go 1.4" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A unique name for this Environment. This name is used + in the application URL +* `application` – (Required) Name of the application that contains the version + to be deployed +* `cname_prefix` - (Optional) Prefix to use for the fully qualified DNS name of + the Environment. +* `description` - (Optional) Short description of the Environment +* `tier` - (Optional) Elastic Beanstalk Environment tier. Valid values are `Worker` + or `WebServer`. If tier is left blank `WebServer` will be used. +* `setting` – (Optional) Option settings to configure the new Environment. These + override specific values that are set as defaults. The format is detailed + below in [Option Settings](#option-settings) +* `solution_stack_name` – (Optional) A solution stack to base your environment +off of. Example stacks can be found in the [Amazon API documentation][1] +* `template_name` – (Optional) The name of the Elastic Beanstalk Configuration + template to use in deployment +* `platform_arn` – (Optional) The [ARN][2] of the Elastic Beanstalk [Platform][3] + to use in deployment +* `wait_for_ready_timeout` - (Default `20m`) The maximum + [duration](https://golang.org/pkg/time/#ParseDuration) that Terraform should + wait for an Elastic Beanstalk Environment to be in a ready state before timing + out. +* `poll_interval` – The time between polling the AWS API to +check if changes have been applied. Use this to adjust the rate of API calls +for any `create` or `update` action. Minimum `10s`, maximum `180s`. Omit this to +use the default behavior, which is an exponential backoff +* `version_label` - (Optional) The name of the Elastic Beanstalk Application Version +to use in deployment. +* `tags` - (Optional) A set of tags to apply to the Environment. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Option Settings + +Some options can be stack-specific, check [AWS Docs](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/command-options-general.html) +for supported options and examples. + +The `setting` and `all_settings` mappings support the following format: + +* `namespace` - unique namespace identifying the option's associated AWS resource +* `name` - name of the configuration option +* `value` - value for the configuration option +* `resource` - (Optional) resource name for [scheduled action](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/command-options-general.html#command-options-general-autoscalingscheduledaction) + +### Example With Options + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elastic_beanstalk_application import ElasticBeanstalkApplication +from imports.aws.elastic_beanstalk_environment import ElasticBeanstalkEnvironment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + tftest = ElasticBeanstalkApplication(self, "tftest", + description="tf-test-desc", + name="tf-test-name" + ) + ElasticBeanstalkEnvironment(self, "tfenvtest", + application=tftest.name, + name="tf-test-name", + setting=[ElasticBeanstalkEnvironmentSetting( + name="VPCId", + namespace="aws:ec2:vpc", + value="vpc-xxxxxxxx" + ), ElasticBeanstalkEnvironmentSetting( + name="Subnets", + namespace="aws:ec2:vpc", + value="subnet-xxxxxxxx" + ) + ], + solution_stack_name="64bit Amazon Linux 2015.03 v2.0.3 running Go 1.4" + ) +``` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the Elastic Beanstalk Environment. +* `name` - Name of the Elastic Beanstalk Environment. +* `description` - Description of the Elastic Beanstalk Environment. +* `tier` - The environment tier specified. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `application` – The Elastic Beanstalk Application specified for this environment. +* `setting` – Settings specifically set for this Environment. +* `all_settings` – List of all option settings configured in this Environment. These + are a combination of default settings and their overrides from `setting` in + the configuration. +* `cname` - Fully qualified DNS name for this Environment. +* `autoscaling_groups` - The autoscaling groups used by this Environment. +* `instances` - Instances used by this Environment. +* `launch_configurations` - Launch configurations in use by this Environment. +* `load_balancers` - Elastic load balancers in use by this Environment. +* `queues` - SQS queues in use by this Environment. +* `triggers` - Autoscaling triggers in use by this Environment. +* `endpoint_url` - The URL to the Load Balancer for this Environment + +[1]: https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/concepts.platforms.html +[2]: https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html +[3]: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-platformarn + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elastic Beanstalk Environments using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Elastic Beanstalk Environments using the `id`. For example: + +```console +% terraform import aws_elastic_beanstalk_environment.prodenv e-rpqsewtp2j +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elasticache_cluster.html.markdown b/website/docs/cdktf/python/r/elasticache_cluster.html.markdown new file mode 100644 index 00000000000..761c23f6731 --- /dev/null +++ b/website/docs/cdktf/python/r/elasticache_cluster.html.markdown @@ -0,0 +1,297 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_cluster" +description: |- + Provides an ElastiCache Cluster resource. +--- + + + +# Resource: aws_elasticache_cluster + +Provides an ElastiCache Cluster resource, which manages either a +[Memcached cluster](https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/WhatIs.html), a +[single-node Redis instance](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/WhatIs.html), or a +[read replica in a Redis (Cluster Mode Enabled) replication group]. + +For working with Redis (Cluster Mode Enabled) replication groups, see the +[`aws_elasticache_replication_group` resource](/docs/providers/aws/r/elasticache_replication_group.html). + +~> **Note:** When you change an attribute, such as `num_cache_nodes`, by default +it is applied in the next maintenance window. Because of this, Terraform may report +a difference in its planning phase because the actual modification has not yet taken +place. You can use the `apply_immediately` flag to instruct the service to apply the +change immediately. Using `apply_immediately` can result in a brief downtime as the server reboots. +See the AWS Documentation on Modifying an ElastiCache Cache Cluster for +[ElastiCache for Memcached](https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/Clusters.Modify.html) or +[ElastiCache for Redis](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.Modify.html) +for more information. + +~> **Note:** Any attribute changes that re-create the resource will be applied immediately, regardless of the value of `apply_immediately`. + +## Example Usage + +### Memcached Cluster + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_cluster import ElasticacheCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticacheCluster(self, "example", + cluster_id="cluster-example", + engine="memcached", + node_type="cache.m4.large", + num_cache_nodes=2, + parameter_group_name="default.memcached1.4", + port=11211 + ) +``` + +### Redis Instance + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_cluster import ElasticacheCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticacheCluster(self, "example", + cluster_id="cluster-example", + engine="redis", + engine_version="3.2.10", + node_type="cache.m4.large", + num_cache_nodes=1, + parameter_group_name="default.redis3.2", + port=6379 + ) +``` + +### Redis Cluster Mode Disabled Read Replica Instance + +These inherit their settings from the replication group. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_cluster import ElasticacheCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticacheCluster(self, "replica", + cluster_id="cluster-example", + replication_group_id=example.id + ) +``` + +### Redis Log Delivery configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_cluster import ElasticacheCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticacheCluster(self, "test", + apply_immediately=True, + cluster_id="mycluster", + engine="redis", + log_delivery_configuration=[ElasticacheClusterLogDeliveryConfiguration( + destination=example.name, + destination_type="cloudwatch-logs", + log_format="text", + log_type="slow-log" + ), ElasticacheClusterLogDeliveryConfiguration( + destination=Token.as_string(aws_kinesis_firehose_delivery_stream_example.name), + destination_type="kinesis-firehose", + log_format="json", + log_type="engine-log" + ) + ], + node_type="cache.t3.micro", + num_cache_nodes=1, + port=6379 + ) +``` + +### Elasticache Cluster in Outpost + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_outposts_outpost import DataAwsOutpostsOutpost +from imports.aws.data_aws_outposts_outposts import DataAwsOutpostsOutposts +from imports.aws.elasticache_cluster import ElasticacheCluster +from imports.aws.elasticache_subnet_group import ElasticacheSubnetGroup +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Vpc(self, "example", + cidr_block="10.0.0.0/16" + ) + data_aws_outposts_outposts_example = DataAwsOutpostsOutposts(self, "example_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_outposts_outposts_example.override_logical_id("example") + aws_subnet_example = Subnet(self, "example_2", + cidr_block="10.0.1.0/24", + tags={ + "Name": "my-subnet" + }, + vpc_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_subnet_example.override_logical_id("example") + data_aws_outposts_outpost_example = DataAwsOutpostsOutpost(self, "example_3", + id=Token.as_string( + property_access(Fn.tolist(data_aws_outposts_outposts_example.ids), ["0"])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_outposts_outpost_example.override_logical_id("example") + aws_elasticache_subnet_group_example = ElasticacheSubnetGroup(self, "example_4", + name="my-cache-subnet", + subnet_ids=[Token.as_string(aws_subnet_example.id)] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_elasticache_subnet_group_example.override_logical_id("example") + aws_elasticache_cluster_example = ElasticacheCluster(self, "example_5", + cluster_id="cluster-example", + engine="memcached", + node_type="cache.r5.large", + num_cache_nodes=2, + outpost_mode="single-outpost", + parameter_group_name="default.memcached1.4", + port=11211, + preferred_outpost_arn=Token.as_string(data_aws_outposts_outpost_example.arn), + subnet_group_name=Token.as_string(aws_elasticache_subnet_group_example.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_elasticache_cluster_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `cluster_id` – (Required) Group identifier. ElastiCache converts this name to lowercase. Changing this value will re-create the resource. +* `engine` – (Optional, Required if `replication_group_id` is not specified) Name of the cache engine to be used for this cache cluster. Valid values are `memcached` or `redis`. +* `node_type` – (Required unless `replication_group_id` is provided) The instance class used. See AWS documentation for information on [supported node types for Redis](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/CacheNodes.SupportedTypes.html) and [guidance on selecting node types for Redis](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/nodes-select-size.html). See AWS documentation for information on [supported node types for Memcached](https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/CacheNodes.SupportedTypes.html) and [guidance on selecting node types for Memcached](https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/nodes-select-size.html). For Memcached, changing this value will re-create the resource. +* `num_cache_nodes` – (Required unless `replication_group_id` is provided) The initial number of cache nodes that the cache cluster will have. For Redis, this value must be 1. For Memcached, this value must be between 1 and 40. If this number is reduced on subsequent runs, the highest numbered nodes will be removed. +* `parameter_group_name` – (Required unless `replication_group_id` is provided) The name of the parameter group to associate with this cache cluster. + +The following arguments are optional: + +* `apply_immediately` - (Optional) Whether any database modifications are applied immediately, or during the next maintenance window. Default is `false`. See [Amazon ElastiCache Documentation for more information.](https://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_ModifyCacheCluster.html). +* `auto_minor_version_upgrade` - (Optional) Specifies whether minor version engine upgrades will be applied automatically to the underlying Cache Cluster instances during the maintenance window. + Only supported for engine type `"redis"` and if the engine version is 6 or higher. + Defaults to `true`. +* `availability_zone` - (Optional) Availability Zone for the cache cluster. If you want to create cache nodes in multi-az, use `preferred_availability_zones` instead. Default: System chosen Availability Zone. Changing this value will re-create the resource. +* `az_mode` - (Optional, Memcached only) Whether the nodes in this Memcached node group are created in a single Availability Zone or created across multiple Availability Zones in the cluster's region. Valid values for this parameter are `single-az` or `cross-az`, default is `single-az`. If you want to choose `cross-az`, `num_cache_nodes` must be greater than `1`. +* `engine_version` – (Optional) Version number of the cache engine to be used. + If not set, defaults to the latest version. + See [Describe Cache Engine Versions](https://docs.aws.amazon.com/cli/latest/reference/elasticache/describe-cache-engine-versions.html) in the AWS Documentation for supported versions. + When `engine` is `redis` and the version is 7 or higher, the major and minor version should be set, e.g., `7.2`. + When the version is 6, the major and minor version can be set, e.g., `6.2`, + or the minor version can be unspecified which will use the latest version at creation time, e.g., `6.x`. + Otherwise, specify the full version desired, e.g., `5.0.6`. + The actual engine version used is returned in the attribute `engine_version_actual`, see [Attribute Reference](#attribute-reference) below. +* `final_snapshot_identifier` - (Optional, Redis only) Name of your final cluster snapshot. If omitted, no final snapshot will be made. +* `ip_discovery` - (Optional) The IP version to advertise in the discovery protocol. Valid values are `ipv4` or `ipv6`. +* `log_delivery_configuration` - (Optional, Redis only) Specifies the destination and format of Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log). See the documentation on [Amazon ElastiCache](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html). See [Log Delivery Configuration](#log-delivery-configuration) below for more details. +* `maintenance_window` – (Optional) Specifies the weekly time range for when maintenance +on the cache cluster is performed. The format is `ddd:hh24:mi-ddd:hh24:mi` (24H Clock UTC). +The minimum maintenance window is a 60 minute period. Example: `sun:05:00-sun:09:00`. +* `network_type` - (Optional) The IP versions for cache cluster connections. IPv6 is supported with Redis engine `6.2` onword or Memcached version `1.6.6` for all [Nitro system](https://aws.amazon.com/ec2/nitro/) instances. Valid values are `ipv4`, `ipv6` or `dual_stack`. +* `notification_topic_arn` – (Optional) ARN of an SNS topic to send ElastiCache notifications to. Example: `arn:aws:sns:us-east-1:012345678999:my_sns_topic`. +* `outpost_mode` - (Optional) Specify the outpost mode that will apply to the cache cluster creation. Valid values are `"single-outpost"` and `"cross-outpost"`, however AWS currently only supports `"single-outpost"` mode. +* `port` – (Optional) The port number on which each of the cache nodes will accept connections. For Memcached the default is 11211, and for Redis the default port is 6379. Cannot be provided with `replication_group_id`. Changing this value will re-create the resource. +* `preferred_availability_zones` - (Optional, Memcached only) List of the Availability Zones in which cache nodes are created. If you are creating your cluster in an Amazon VPC you can only locate nodes in Availability Zones that are associated with the subnets in the selected subnet group. The number of Availability Zones listed must equal the value of `num_cache_nodes`. If you want all the nodes in the same Availability Zone, use `availability_zone` instead, or repeat the Availability Zone multiple times in the list. Default: System chosen Availability Zones. Detecting drift of existing node availability zone is not currently supported. Updating this argument by itself to migrate existing node availability zones is not currently supported and will show a perpetual difference. +* `preferred_outpost_arn` - (Optional, Required if `outpost_mode` is specified) The outpost ARN in which the cache cluster will be created. +* `replication_group_id` - (Optional, Required if `engine` is not specified) ID of the replication group to which this cluster should belong. If this parameter is specified, the cluster is added to the specified replication group as a read replica; otherwise, the cluster is a standalone primary that is not part of any replication group. +* `security_group_ids` – (Optional, VPC only) One or more VPC security groups associated with the cache cluster +* `snapshot_arns` – (Optional, Redis only) Single-element string list containing an Amazon Resource Name (ARN) of a Redis RDB snapshot file stored in Amazon S3. The object name cannot contain any commas. Changing `snapshot_arns` forces a new resource. +* `snapshot_name` - (Optional, Redis only) Name of a snapshot from which to restore data into the new node group. Changing `snapshot_name` forces a new resource. +* `snapshot_retention_limit` - (Optional, Redis only) Number of days for which ElastiCache will retain automatic cache cluster snapshots before deleting them. For example, if you set SnapshotRetentionLimit to 5, then a snapshot that was taken today will be retained for 5 days before being deleted. If the value of SnapshotRetentionLimit is set to zero (0), backups are turned off. Please note that setting a `snapshot_retention_limit` is not supported on cache.t1.micro cache nodes +* `snapshot_window` - (Optional, Redis only) Daily time range (in UTC) during which ElastiCache will begin taking a daily snapshot of your cache cluster. Example: 05:00-09:00 +* `subnet_group_name` – (Optional, VPC only) Name of the subnet group to be used for the cache cluster. Changing this value will re-create the resource. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Log Delivery Configuration + +The `log_delivery_configuration` block allows the streaming of Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log) to CloudWatch Logs or Kinesis Data Firehose. Max of 2 blocks. + +* `destination` - Name of either the CloudWatch Logs LogGroup or Kinesis Data Firehose resource. +* `destination_type` - For CloudWatch Logs use `cloudwatch-logs` or for Kinesis Data Firehose use `kinesis-firehose`. +* `log_format` - Valid values are `json` or `text` +* `log_type` - Valid values are `slow-log` or `engine-log`. Max 1 of each. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the created ElastiCache Cluster. +* `engine_version_actual` - Because ElastiCache pulls the latest minor or patch for a version, this attribute returns the running version of the cache engine. +* `cache_nodes` - List of node objects including `id`, `address`, `port` and `availability_zone`. +* `cluster_address` - (Memcached only) DNS name of the cache cluster without the port appended. +* `configuration_endpoint` - (Memcached only) Configuration endpoint to allow host discovery. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40m`) +- `update` - (Default `80m`) +- `delete` - (Default `40m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache Clusters using the `cluster_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ElastiCache Clusters using the `cluster_id`. For example: + +```console +% terraform import aws_elasticache_cluster.my_cluster my_cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elasticache_global_replication_group.html.markdown b/website/docs/cdktf/python/r/elasticache_global_replication_group.html.markdown new file mode 100644 index 00000000000..bfe2367528d --- /dev/null +++ b/website/docs/cdktf/python/r/elasticache_global_replication_group.html.markdown @@ -0,0 +1,183 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_global_replication_group" +description: |- + Provides an ElastiCache Global Replication Group resource. +--- + + + +# Resource: aws_elasticache_global_replication_group + +Provides an ElastiCache Global Replication Group resource, which manages replication between two or more Replication Groups in different regions. For more information, see the [ElastiCache User Guide](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Redis-Global-Datastore.html). + +## Example Usage + +### Global replication group with one secondary replication group + +The global replication group depends on the primary group existing. Secondary replication groups depend on the global replication group. Terraform dependency management will handle this transparently using resource value references. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_global_replication_group import ElasticacheGlobalReplicationGroup +from imports.aws.elasticache_replication_group import ElasticacheReplicationGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = ElasticacheReplicationGroup(self, "primary", + description="primary replication group", + engine="redis", + engine_version="5.0.6", + node_type="cache.m5.large", + num_cache_clusters=1, + replication_group_id="example-primary" + ) + example = ElasticacheGlobalReplicationGroup(self, "example", + global_replication_group_id_suffix="example", + primary_replication_group_id=primary.id + ) + ElasticacheReplicationGroup(self, "secondary", + description="secondary replication group", + global_replication_group_id=example.global_replication_group_id, + num_cache_clusters=1, + provider=other_region, + replication_group_id="example-secondary" + ) +``` + +### Managing Redis Engine Versions + +The initial Redis version is determined by the version set on the primary replication group. +However, once it is part of a Global Replication Group, +the Global Replication Group manages the version of all member replication groups. + +The member replication groups must have [`lifecycle.ignore_changes[engine_version]`](https://www.terraform.io/language/meta-arguments/lifecycle) set, +or Terraform will always return a diff. + +In this example, +the primary replication group will be created with Redis 6.0, +and then upgraded to Redis 6.2 once added to the Global Replication Group. +The secondary replication group will be created with Redis 6.2. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle, TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_global_replication_group import ElasticacheGlobalReplicationGroup +from imports.aws.elasticache_replication_group import ElasticacheReplicationGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = ElasticacheReplicationGroup(self, "primary", + description="primary replication group", + engine="redis", + engine_version="6.0", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[engine_version] + ), + node_type="cache.m5.large", + num_cache_clusters=1, + replication_group_id="example-primary" + ) + example = ElasticacheGlobalReplicationGroup(self, "example", + engine_version="6.2", + global_replication_group_id_suffix="example", + primary_replication_group_id=primary.id + ) + ElasticacheReplicationGroup(self, "secondary", + description="secondary replication group", + global_replication_group_id=example.global_replication_group_id, + lifecycle=TerraformResourceLifecycle( + ignore_changes=[engine_version] + ), + num_cache_clusters=1, + provider=other_region, + replication_group_id="example-secondary" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `automatic_failover_enabled` - (Optional) Specifies whether read-only replicas will be automatically promoted to read/write primary if the existing primary fails. + When creating, by default the Global Replication Group inherits the automatic failover setting of the primary replication group. +* `cache_node_type` - (Optional) The instance class used. + See AWS documentation for information on [supported node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/CacheNodes.SupportedTypes.html) + and [guidance on selecting node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/nodes-select-size.html). + When creating, by default the Global Replication Group inherits the node type of the primary replication group. +* `engine_version` - (Optional) Redis version to use for the Global Replication Group. + When creating, by default the Global Replication Group inherits the version of the primary replication group. + If a version is specified, the Global Replication Group and all member replication groups will be upgraded to this version. + Cannot be downgraded without replacing the Global Replication Group and all member replication groups. + When the version is 7 or higher, the major and minor version should be set, e.g., `7.2`. + When the version is 6, the major and minor version can be set, e.g., `6.2`, + or the minor version can be unspecified which will use the latest version at creation time, e.g., `6.x`. + The actual engine version used is returned in the attribute `engine_version_actual`, see [Attribute Reference](#attribute-reference) below. +* `global_replication_group_id_suffix` – (Required) The suffix name of a Global Datastore. If `global_replication_group_id_suffix` is changed, creates a new resource. +* `primary_replication_group_id` – (Required) The ID of the primary cluster that accepts writes and will replicate updates to the secondary cluster. If `primary_replication_group_id` is changed, creates a new resource. +* `global_replication_group_description` – (Optional) A user-created description for the global replication group. +* `num_node_groups` - (Optional) The number of node groups (shards) on the global replication group. +* `parameter_group_name` - (Optional) An ElastiCache Parameter Group to use for the Global Replication Group. + Required when upgrading a major engine version, but will be ignored if left configured after the upgrade is complete. + Specifying without a major version upgrade will fail. + Note that ElastiCache creates a copy of this parameter group for each member replication group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the ElastiCache Global Replication Group. +* `arn` - The ARN of the ElastiCache Global Replication Group. +* `engine_version_actual` - The full version number of the cache engine running on the members of this global replication group. +* `at_rest_encryption_enabled` - A flag that indicate whether the encryption at rest is enabled. +* `auth_token_enabled` - A flag that indicate whether AuthToken (password) is enabled. +* `cluster_enabled` - Indicates whether the Global Datastore is cluster enabled. +* `engine` - The name of the cache engine to be used for the clusters in this global replication group. +* `global_replication_group_id` - The full ID of the global replication group. +* `global_node_groups` - Set of node groups (shards) on the global replication group. + Has the values: + * `global_node_group_id` - The ID of the global node group. + * `slots` - The keyspace for this node group. +* `transit_encryption_enabled` - A flag that indicates whether the encryption in transit is enabled. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `update` - (Default `60m`) +* `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache Global Replication Groups using the `global_replication_group_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ElastiCache Global Replication Groups using the `global_replication_group_id`. For example: + +```console +% terraform import aws_elasticache_global_replication_group.my_global_replication_group okuqm-global-replication-group-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elasticache_parameter_group.html.markdown b/website/docs/cdktf/python/r/elasticache_parameter_group.html.markdown new file mode 100644 index 00000000000..e39d72a58e6 --- /dev/null +++ b/website/docs/cdktf/python/r/elasticache_parameter_group.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_parameter_group" +description: |- + Provides an ElastiCache parameter group resource. +--- + + + +# Resource: aws_elasticache_parameter_group + +Provides an ElastiCache parameter group resource. + +~> **NOTE:** Attempting to remove the `reserved-memory` parameter when `family` is set to `redis2.6` or `redis2.8` may show a perpetual difference in Terraform due to an ElastiCache API limitation. Leave that parameter configured with any value to workaround the issue. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_parameter_group import ElasticacheParameterGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticacheParameterGroup(self, "default", + family="redis2.8", + name="cache-params", + parameter=[ElasticacheParameterGroupParameter( + name="activerehashing", + value="yes" + ), ElasticacheParameterGroupParameter( + name="min-slaves-to-write", + value="2" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the ElastiCache parameter group. +* `family` - (Required) The family of the ElastiCache parameter group. +* `description` - (Optional) The description of the ElastiCache parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of ElastiCache parameters to apply. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +* `name` - (Required) The name of the ElastiCache parameter. +* `value` - (Required) The value of the ElastiCache parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ElastiCache parameter group name. +* `arn` - The AWS ARN associated with the parameter group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache Parameter Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ElastiCache Parameter Groups using the `name`. For example: + +```console +% terraform import aws_elasticache_parameter_group.default redis-params +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elasticache_replication_group.html.markdown b/website/docs/cdktf/python/r/elasticache_replication_group.html.markdown new file mode 100644 index 00000000000..1677552c9dd --- /dev/null +++ b/website/docs/cdktf/python/r/elasticache_replication_group.html.markdown @@ -0,0 +1,321 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_replication_group" +description: |- + Provides an ElastiCache Replication Group resource. +--- + + + +# Resource: aws_elasticache_replication_group + +Provides an ElastiCache Replication Group resource. + +For working with a [Memcached cluster](https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/WhatIs.html) or a +[single-node Redis instance (Cluster Mode Disabled)](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/WhatIs.html), +see the [`aws_elasticache_cluster` resource](/docs/providers/aws/r/elasticache_cluster.html). + +~> **Note:** When you change an attribute, such as `engine_version`, by +default the ElastiCache API applies it in the next maintenance window. Because +of this, Terraform may report a difference in its planning phase because the +actual modification has not yet taken place. You can use the +`apply_immediately` flag to instruct the service to apply the change +immediately. Using `apply_immediately` can result in a brief downtime as +servers reboots. +See the AWS Documentation on +[Modifying an ElastiCache Cache Cluster](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.Modify.html) +for more information. + +~> **Note:** Any attribute changes that re-create the resource will be applied immediately, regardless of the value of `apply_immediately`. + +~> **Note:** Be aware of the terminology collision around "cluster" for `aws_elasticache_replication_group`. For example, it is possible to create a ["Cluster Mode Disabled [Redis] Cluster"](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.Create.CON.Redis.html). With "Cluster Mode Enabled", the data will be stored in shards (called "node groups"). See [Redis Cluster Configuration](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/cluster-create-determine-requirements.html#redis-cluster-configuration) for a diagram of the differences. To enable cluster mode, use a parameter group that has cluster mode enabled. The default parameter groups provided by AWS end with ".cluster.on", for example `default.redis6.x.cluster.on`. + +## Example Usage + +### Redis Cluster Mode Disabled + +To create a single shard primary with single read replica: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_replication_group import ElasticacheReplicationGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticacheReplicationGroup(self, "example", + automatic_failover_enabled=True, + description="example description", + node_type="cache.m4.large", + num_cache_clusters=2, + parameter_group_name="default.redis3.2", + port=6379, + preferred_cache_cluster_azs=["us-west-2a", "us-west-2b"], + replication_group_id="tf-rep-group-1" + ) +``` + +You have two options for adjusting the number of replicas: + +* Adjusting `num_cache_clusters` directly. This will attempt to automatically add or remove replicas, but provides no granular control (e.g., preferred availability zone, cache cluster ID) for the added or removed replicas. This also currently expects cache cluster IDs in the form of `replication_group_id-00#`. +* Otherwise for fine grained control of the underlying cache clusters, they can be added or removed with the [`aws_elasticache_cluster` resource](/docs/providers/aws/r/elasticache_cluster.html) and its `replication_group_id` attribute. In this situation, you will need to utilize the [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignore_changes` to prevent perpetual differences during Terraform plan with the `num_cache_cluster` attribute. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformCount, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_cluster import ElasticacheCluster +from imports.aws.elasticache_replication_group import ElasticacheReplicationGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ElasticacheReplicationGroup(self, "example", + automatic_failover_enabled=True, + description="example description", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[num_cache_clusters] + ), + node_type="cache.m4.large", + num_cache_clusters=2, + parameter_group_name="default.redis3.2", + port=6379, + preferred_cache_cluster_azs=["us-west-2a", "us-west-2b"], + replication_group_id="tf-rep-group-1" + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + replica_count = TerraformCount.of(Token.as_number("1")) + ElasticacheCluster(self, "replica", + cluster_id="tf-rep-group-1-${" + replica_count.index + "}", + replication_group_id=example.id, + count=replica_count + ) +``` + +### Redis Cluster Mode Enabled + +To create two shards with a primary and a single read replica each: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_replication_group import ElasticacheReplicationGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticacheReplicationGroup(self, "baz", + automatic_failover_enabled=True, + description="example description", + node_type="cache.t2.small", + num_node_groups=2, + parameter_group_name="default.redis3.2.cluster.on", + port=6379, + replicas_per_node_group=1, + replication_group_id="tf-redis-cluster" + ) +``` + +### Redis Log Delivery configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_replication_group import ElasticacheReplicationGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticacheReplicationGroup(self, "test", + apply_immediately=True, + auto_minor_version_upgrade=Token.as_string(False), + description="test description", + log_delivery_configuration=[ElasticacheReplicationGroupLogDeliveryConfiguration( + destination=example.name, + destination_type="cloudwatch-logs", + log_format="text", + log_type="slow-log" + ), ElasticacheReplicationGroupLogDeliveryConfiguration( + destination=Token.as_string(aws_kinesis_firehose_delivery_stream_example.name), + destination_type="kinesis-firehose", + log_format="json", + log_type="engine-log" + ) + ], + maintenance_window="tue:06:30-tue:07:30", + node_type="cache.t3.small", + port=6379, + replication_group_id="myreplicaciongroup", + snapshot_window="01:00-02:00" + ) +``` + +~> **Note:** We currently do not support passing a `primary_cluster_id` in order to create the Replication Group. + +~> **Note:** Automatic Failover is unavailable for Redis versions earlier than 2.8.6, +and unavailable on T1 node types. For T2 node types, it is only available on Redis version 3.2.4 or later with cluster mode enabled. See the [High Availability Using Replication Groups](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Replication.html) guide +for full details on using Replication Groups. + +### Creating a secondary replication group for a global replication group + +A Global Replication Group can have one one two secondary Replication Groups in different regions. These are added to an existing Global Replication Group. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_global_replication_group import ElasticacheGlobalReplicationGroup +from imports.aws.elasticache_replication_group import ElasticacheReplicationGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = ElasticacheReplicationGroup(self, "primary", + description="primary replication group", + engine="redis", + engine_version="5.0.6", + node_type="cache.m5.large", + num_cache_clusters=1, + provider=other_region, + replication_group_id="example-primary" + ) + example = ElasticacheGlobalReplicationGroup(self, "example", + global_replication_group_id_suffix="example", + primary_replication_group_id=primary.id, + provider=other_region + ) + ElasticacheReplicationGroup(self, "secondary", + description="secondary replication group", + global_replication_group_id=example.global_replication_group_id, + num_cache_clusters=1, + replication_group_id="example-secondary" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `description` – (Required) User-created description for the replication group. Must not be empty. +* `replication_group_id` – (Required) Replication group identifier. This parameter is stored as a lowercase string. + +The following arguments are optional: + +* `apply_immediately` - (Optional) Specifies whether any modifications are applied immediately, or during the next maintenance window. Default is `false`. +* `at_rest_encryption_enabled` - (Optional) Whether to enable encryption at rest. +* `auth_token` - (Optional) Password used to access a password protected server. Can be specified only if `transit_encryption_enabled = true`. +* `auto_minor_version_upgrade` - (Optional) Specifies whether minor version engine upgrades will be applied automatically to the underlying Cache Cluster instances during the maintenance window. + Only supported for engine type `"redis"` and if the engine version is 6 or higher. + Defaults to `true`. +* `automatic_failover_enabled` - (Optional) Specifies whether a read-only replica will be automatically promoted to read/write primary if the existing primary fails. If enabled, `num_cache_clusters` must be greater than 1. Must be enabled for Redis (cluster mode enabled) replication groups. Defaults to `false`. +* `data_tiering_enabled` - (Optional) Enables data tiering. Data tiering is only supported for replication groups using the r6gd node type. This parameter must be set to `true` when using r6gd nodes. +* `engine` - (Optional) Name of the cache engine to be used for the clusters in this replication group. The only valid value is `redis`. +* `engine_version` - (Optional) Version number of the cache engine to be used for the cache clusters in this replication group. + If the version is 7 or higher, the major and minor version should be set, e.g., `7.2`. + If the version is 6, the major and minor version can be set, e.g., `6.2`, + or the minor version can be unspecified which will use the latest version at creation time, e.g., `6.x`. + Otherwise, specify the full version desired, e.g., `5.0.6`. + The actual engine version used is returned in the attribute `engine_version_actual`, see [Attribute Reference](#attribute-reference) below. +* `final_snapshot_identifier` - (Optional) The name of your final node group (shard) snapshot. ElastiCache creates the snapshot from the primary node in the cluster. If omitted, no final snapshot will be made. +* `global_replication_group_id` - (Optional) The ID of the global replication group to which this replication group should belong. If this parameter is specified, the replication group is added to the specified global replication group as a secondary replication group; otherwise, the replication group is not part of any global replication group. If `global_replication_group_id` is set, the `num_node_groups` parameter cannot be set. +* `kms_key_id` - (Optional) The ARN of the key that you wish to use if encrypting at rest. If not supplied, uses service managed encryption. Can be specified only if `at_rest_encryption_enabled = true`. +* `log_delivery_configuration` - (Optional, Redis only) Specifies the destination and format of Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log). See the documentation on [Amazon ElastiCache](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log). See [Log Delivery Configuration](#log-delivery-configuration) below for more details. +* `maintenance_window` – (Optional) Specifies the weekly time range for when maintenance on the cache cluster is performed. The format is `ddd:hh24:mi-ddd:hh24:mi` (24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: `sun:05:00-sun:09:00` +* `multi_az_enabled` - (Optional) Specifies whether to enable Multi-AZ Support for the replication group. If `true`, `automatic_failover_enabled` must also be enabled. Defaults to `false`. +* `node_type` - (Optional) Instance class to be used. See AWS documentation for information on [supported node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/CacheNodes.SupportedTypes.html) and [guidance on selecting node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/nodes-select-size.html). Required unless `global_replication_group_id` is set. Cannot be set if `global_replication_group_id` is set. +* `notification_topic_arn` – (Optional) ARN of an SNS topic to send ElastiCache notifications to. Example: `arn:aws:sns:us-east-1:012345678999:my_sns_topic` +* `num_cache_clusters` - (Optional) Number of cache clusters (primary and replicas) this replication group will have. If Multi-AZ is enabled, the value of this parameter must be at least 2. Updates will occur before other modifications. Conflicts with `num_node_groups`. Defaults to `1`. +* `num_node_groups` - (Optional) Number of node groups (shards) for this Redis replication group. + Changing this number will trigger a resizing operation before other settings modifications. +* `parameter_group_name` - (Optional) Name of the parameter group to associate with this replication group. If this argument is omitted, the default cache parameter group for the specified engine is used. To enable "cluster mode", i.e., data sharding, use a parameter group that has the parameter `cluster-enabled` set to true. +* `port` – (Optional) Port number on which each of the cache nodes will accept connections. For Memcache the default is 11211, and for Redis the default port is 6379. +* `preferred_cache_cluster_azs` - (Optional) List of EC2 availability zones in which the replication group's cache clusters will be created. The order of the availability zones in the list is considered. The first item in the list will be the primary node. Ignored when updating. +* `replicas_per_node_group` - (Optional) Number of replica nodes in each node group. + Changing this number will trigger a resizing operation before other settings modifications. + Valid values are 0 to 5. +* `security_group_ids` - (Optional) One or more Amazon VPC security groups associated with this replication group. Use this parameter only when you are creating a replication group in an Amazon Virtual Private Cloud +* `security_group_names` - (Optional) List of cache security group names to associate with this replication group. +* `snapshot_arns` – (Optional) List of ARNs that identify Redis RDB snapshot files stored in Amazon S3. The names object names cannot contain any commas. +* `snapshot_name` - (Optional) Name of a snapshot from which to restore data into the new node group. Changing the `snapshot_name` forces a new resource. +* `snapshot_retention_limit` - (Optional, Redis only) Number of days for which ElastiCache will retain automatic cache cluster snapshots before deleting them. For example, if you set SnapshotRetentionLimit to 5, then a snapshot that was taken today will be retained for 5 days before being deleted. If the value of `snapshot_retention_limit` is set to zero (0), backups are turned off. Please note that setting a `snapshot_retention_limit` is not supported on cache.t1.micro cache nodes +* `snapshot_window` - (Optional, Redis only) Daily time range (in UTC) during which ElastiCache will begin taking a daily snapshot of your cache cluster. The minimum snapshot window is a 60 minute period. Example: `05:00-09:00` +* `subnet_group_name` - (Optional) Name of the cache subnet group to be used for the replication group. +* `tags` - (Optional) Map of tags to assign to the resource. Adding tags to this resource will add or overwrite any existing tags on the clusters in the replication group and not to the group itself. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `transit_encryption_enabled` - (Optional) Whether to enable encryption in transit. +* `user_group_ids` - (Optional) User Group ID to associate with the replication group. Only a maximum of one (1) user group ID is valid. **NOTE:** This argument _is_ a set because the AWS specification allows for multiple IDs. However, in practice, AWS only allows a maximum size of one. + +### Log Delivery Configuration + +The `log_delivery_configuration` block allows the streaming of Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log) to CloudWatch Logs or Kinesis Data Firehose. Max of 2 blocks. + +* `destination` - Name of either the CloudWatch Logs LogGroup or Kinesis Data Firehose resource. +* `destination_type` - For CloudWatch Logs use `cloudwatch-logs` or for Kinesis Data Firehose use `kinesis-firehose`. +* `log_format` - Valid values are `json` or `text` +* `log_type` - Valid values are `slow-log` or `engine-log`. Max 1 of each. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the created ElastiCache Replication Group. +* `engine_version_actual` - Because ElastiCache pulls the latest minor or patch for a version, this attribute returns the running version of the cache engine. +* `cluster_enabled` - Indicates if cluster mode is enabled. +* `configuration_endpoint_address` - Address of the replication group configuration endpoint when cluster mode is enabled. +* `id` - ID of the ElastiCache Replication Group. +* `member_clusters` - Identifiers of all the nodes that are part of this replication group. +* `primary_endpoint_address` - (Redis only) Address of the endpoint for the primary node in the replication group, if the cluster mode is disabled. +* `reader_endpoint_address` - (Redis only) Address of the endpoint for the reader node in the replication group, if the cluster mode is disabled. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `delete` - (Default `40m`) +* `update` - (Default `40m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache Replication Groups using the `replication_group_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ElastiCache Replication Groups using the `replication_group_id`. For example: + +```console +% terraform import aws_elasticache_replication_group.my_replication_group replication-group-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elasticache_subnet_group.html.markdown b/website/docs/cdktf/python/r/elasticache_subnet_group.html.markdown new file mode 100644 index 00000000000..d3e8a5c9fcb --- /dev/null +++ b/website/docs/cdktf/python/r/elasticache_subnet_group.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_subnet_group" +description: |- + Provides an ElastiCache Subnet Group resource. +--- + + + +# Resource: aws_elasticache_subnet_group + +Provides an ElastiCache Subnet Group resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_subnet_group import ElasticacheSubnetGroup +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = Vpc(self, "foo", + cidr_block="10.0.0.0/16", + tags={ + "Name": "tf-test" + } + ) + aws_subnet_foo = Subnet(self, "foo_1", + availability_zone="us-west-2a", + cidr_block="10.0.0.0/24", + tags={ + "Name": "tf-test" + }, + vpc_id=foo.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_subnet_foo.override_logical_id("foo") + ElasticacheSubnetGroup(self, "bar", + name="tf-test-cache-subnet", + subnet_ids=[Token.as_string(aws_subnet_foo.id)] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` – (Required) Name for the cache subnet group. ElastiCache converts this name to lowercase. +* `description` – (Optional) Description for the cache subnet group. Defaults to "Managed by Terraform". +* `subnet_ids` – (Required) List of VPC Subnet IDs for the cache subnet group +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `description` - The Description of the ElastiCache Subnet Group. +* `name` - The Name of the ElastiCache Subnet Group. +* `subnet_ids` - The Subnet IDs of the ElastiCache Subnet Group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache Subnet Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ElastiCache Subnet Groups using the `name`. For example: + +```console +% terraform import aws_elasticache_subnet_group.bar tf-test-cache-subnet +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elasticache_user.html.markdown b/website/docs/cdktf/python/r/elasticache_user.html.markdown new file mode 100644 index 00000000000..2511aade14c --- /dev/null +++ b/website/docs/cdktf/python/r/elasticache_user.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_user" +description: |- + Provides an ElastiCache user. +--- + + + +# Resource: aws_elasticache_user + +Provides an ElastiCache user resource. + +~> **Note:** All arguments including the username and passwords will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_user import ElasticacheUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticacheUser(self, "test", + access_string="on ~app::* -@all +@read +@hash +@bitmap +@geo -setbit -bitfield -hset -hsetnx -hmset -hincrby -hincrbyfloat -hdel -bitop -geoadd -georadius -georadiusbymember", + engine="REDIS", + passwords=["password123456789"], + user_id="testUserId", + user_name="testUserName" + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_user import ElasticacheUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticacheUser(self, "test", + access_string="on ~* +@all", + authentication_mode=ElasticacheUserAuthenticationMode( + type="iam" + ), + engine="REDIS", + user_id="testUserId", + user_name="testUserName" + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_user import ElasticacheUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticacheUser(self, "test", + access_string="on ~* +@all", + authentication_mode=ElasticacheUserAuthenticationMode( + passwords=["password1", "password2"], + type="password" + ), + engine="REDIS", + user_id="testUserId", + user_name="testUserName" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `access_string` - (Required) Access permissions string used for this user. See [Specifying Permissions Using an Access String](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.RBAC.html#Access-string) for more details. +* `engine` - (Required) The current supported value is `REDIS`. +* `user_id` - (Required) The ID of the user. +* `user_name` - (Required) The username of the user. + +The following arguments are optional: + +* `authentication_mode` - (Optional) Denotes the user's authentication properties. Detailed below. +* `no_password_required` - (Optional) Indicates a password is not required for this user. +* `passwords` - (Optional) Passwords used for this user. You can create up to two passwords for each user. +* `tags` - (Optional) A list of tags to be added to this resource. A tag is a key-value pair. + +### authentication_mode Configuration Block + +* `passwords` - (Optional) Specifies the passwords to use for authentication if `type` is set to `password`. +* `type` - (Required) Specifies the authentication type. Possible options are: `password`, `no-password-required` or `iam`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the created ElastiCache User. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `update` - (Default `5m`) +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache users using the `user_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ElastiCache users using the `user_id`. For example: + +```console +% terraform import aws_elasticache_user.my_user userId1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elasticache_user_group.html.markdown b/website/docs/cdktf/python/r/elasticache_user_group.html.markdown new file mode 100644 index 00000000000..749bd37dd10 --- /dev/null +++ b/website/docs/cdktf/python/r/elasticache_user_group.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_user_group" +description: |- + Provides an ElastiCache user group. +--- + + + +# Resource: aws_elasticache_user_group + +Provides an ElastiCache user group resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_user import ElasticacheUser +from imports.aws.elasticache_user_group import ElasticacheUserGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = ElasticacheUser(self, "test", + access_string="on ~app::* -@all +@read +@hash +@bitmap +@geo -setbit -bitfield -hset -hsetnx -hmset -hincrby -hincrbyfloat -hdel -bitop -geoadd -georadius -georadiusbymember", + engine="REDIS", + passwords=["password123456789"], + user_id="testUserId", + user_name="default" + ) + aws_elasticache_user_group_test = ElasticacheUserGroup(self, "test_1", + engine="REDIS", + user_group_id="userGroupId", + user_ids=[test.user_id] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_elasticache_user_group_test.override_logical_id("test") +``` + +## Argument Reference + +The following arguments are required: + +* `engine` - (Required) The current supported value is `REDIS`. +* `user_group_id` - (Required) The ID of the user group. + +The following arguments are optional: + +* `user_ids` - (Optional) The list of user IDs that belong to the user group. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The user group identifier. +* `arn` - The ARN that identifies the user group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache user groups using the `user_group_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ElastiCache user groups using the `user_group_id`. For example: + +```console +% terraform import aws_elasticache_user_group.my_user_group userGoupId1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elasticache_user_group_association.html.markdown b/website/docs/cdktf/python/r/elasticache_user_group_association.html.markdown new file mode 100644 index 00000000000..d7981aee10a --- /dev/null +++ b/website/docs/cdktf/python/r/elasticache_user_group_association.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_user_group_association" +description: |- + Associate an ElastiCache user and user group. +--- + + + +# Resource: aws_elasticache_user_group_association + +Associate an existing ElastiCache user and an existing user group. + +~> **NOTE:** Terraform will detect changes in the `aws_elasticache_user_group` since `aws_elasticache_user_group_association` changes the user IDs associated with the user group. You can ignore these changes with the `lifecycle` `ignore_changes` meta argument as shown in the example. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticache_user import ElasticacheUser +from imports.aws.elasticache_user_group import ElasticacheUserGroup +from imports.aws.elasticache_user_group_association import ElasticacheUserGroupAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = ElasticacheUser(self, "default", + access_string="on ~app::* -@all +@read +@hash +@bitmap +@geo -setbit -bitfield -hset -hsetnx -hmset -hincrby -hincrbyfloat -hdel -bitop -geoadd -georadius -georadiusbymember", + engine="REDIS", + passwords=["password123456789"], + user_id="defaultUserID", + user_name="default" + ) + example = ElasticacheUser(self, "example", + access_string="on ~app::* -@all +@read +@hash +@bitmap +@geo -setbit -bitfield -hset -hsetnx -hmset -hincrby -hincrbyfloat -hdel -bitop -geoadd -georadius -georadiusbymember", + engine="REDIS", + passwords=["password123456789"], + user_id="exampleUserID", + user_name="exampleuser" + ) + aws_elasticache_user_group_example = ElasticacheUserGroup(self, "example_2", + engine="REDIS", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[user_ids] + ), + user_group_id="userGroupId", + user_ids=[default_var.user_id] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_elasticache_user_group_example.override_logical_id("example") + aws_elasticache_user_group_association_example = + ElasticacheUserGroupAssociation(self, "example_3", + user_group_id=Token.as_string(aws_elasticache_user_group_example.user_group_id), + user_id=example.user_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_elasticache_user_group_association_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `user_group_id` - (Required) ID of the user group. +* `user_id` - (Required) ID of the user to associated with the user group. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache user group associations using the `user_group_id` and `user_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ElastiCache user group associations using the `user_group_id` and `user_id`. For example: + +```console +% terraform import aws_elasticache_user_group_association.example userGoupId1,userId +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elasticsearch_domain.html.markdown b/website/docs/cdktf/python/r/elasticsearch_domain.html.markdown new file mode 100644 index 00000000000..f3c2df1deaa --- /dev/null +++ b/website/docs/cdktf/python/r/elasticsearch_domain.html.markdown @@ -0,0 +1,396 @@ +--- +subcategory: "Elasticsearch" +layout: "aws" +page_title: "AWS: aws_elasticsearch_domain" +description: |- + Terraform resource for managing an AWS Elasticsearch Domain. +--- + + + +# Resource: aws_elasticsearch_domain + +Manages an AWS Elasticsearch Domain. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticsearch_domain import ElasticsearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElasticsearchDomain(self, "example", + cluster_config=ElasticsearchDomainClusterConfig( + instance_type="r4.large.elasticsearch" + ), + domain_name="example", + elasticsearch_version="7.10", + tags={ + "Domain": "TestDomain" + } + ) +``` + +### Access Policy + +-> See also: [`aws_elasticsearch_domain_policy` resource](/docs/providers/aws/r/elasticsearch_domain_policy.html) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.elasticsearch_domain import ElasticsearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + domain = TerraformVariable(self, "domain", + default="tf-test" + ) + current = DataAwsCallerIdentity(self, "current") + data_aws_region_current = DataAwsRegion(self, "current_2") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + ElasticsearchDomain(self, "example", + access_policies="{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Action\": \"es:*\",\n \"Principal\": \"*\",\n \"Effect\": \"Allow\",\n \"Resource\": \"arn:aws:es:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:domain/${" + domain.value + "}/*\",\n \"Condition\": {\n \"IpAddress\": {\"aws:SourceIp\": [\"66.193.100.22/32\"]}\n }\n }\n ]\n}\n\n", + domain_name=domain.string_value + ) +``` + +### Log Publishing to CloudWatch Logs + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.cloudwatch_log_resource_policy import CloudwatchLogResourcePolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.elasticsearch_domain import ElasticsearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, domainName): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="example" + ) + aws_elasticsearch_domain_example = ElasticsearchDomain(self, "example_1", + log_publishing_options=[ElasticsearchDomainLogPublishingOptions( + cloudwatch_log_group_arn=example.arn, + log_type="INDEX_SLOW_LOGS" + ) + ], + domain_name=domain_name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_elasticsearch_domain_example.override_logical_id("example") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_2", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:PutLogEvents", "logs:PutLogEventsBatch", "logs:CreateLogStream" + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["es.amazonaws.com"], + type="Service" + ) + ], + resources=["arn:aws:logs:*"] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_cloudwatch_log_resource_policy_example = + CloudwatchLogResourcePolicy(self, "example_3", + policy_document=Token.as_string(data_aws_iam_policy_document_example.json), + policy_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_resource_policy_example.override_logical_id("example") +``` + +### VPC based ES + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.data_aws_subnets import DataAwsSubnets +from imports.aws.data_aws_vpc import DataAwsVpc +from imports.aws.elasticsearch_domain import ElasticsearchDomain +from imports.aws.iam_service_linked_role import IamServiceLinkedRole +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + domain = TerraformVariable(self, "domain", + default="tf-test" + ) + vpc = TerraformVariable(self, "vpc") + es = IamServiceLinkedRole(self, "es", + aws_service_name="opensearchservice.amazonaws.com" + ) + current = DataAwsCallerIdentity(self, "current") + data_aws_region_current = DataAwsRegion(self, "current_4") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + selected = DataAwsVpc(self, "selected", + tags={ + "Name": vpc.string_value + } + ) + aws_security_group_es = SecurityGroup(self, "es_6", + description="Managed by Terraform", + ingress=[SecurityGroupIngress( + cidr_blocks=[Token.as_string(selected.cidr_block)], + from_port=443, + protocol="tcp", + to_port=443 + ) + ], + name="${" + vpc.value + "}-elasticsearch-${" + domain.value + "}", + vpc_id=Token.as_string(selected.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_security_group_es.override_logical_id("es") + data_aws_subnets_selected = DataAwsSubnets(self, "selected_7", + filter=[DataAwsSubnetsFilter( + name="vpc-id", + values=[Token.as_string(selected.id)] + ) + ], + tags={ + "Tier": "private" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_subnets_selected.override_logical_id("selected") + aws_elasticsearch_domain_es = ElasticsearchDomain(self, "es_8", + access_policies="{\n\t\"Version\": \"2012-10-17\",\n\t\"Statement\": [\n\t\t{\n\t\t\t\"Action\": \"es:*\",\n\t\t\t\"Principal\": \"*\",\n\t\t\t\"Effect\": \"Allow\",\n\t\t\t\"Resource\": \"arn:aws:es:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:domain/${" + domain.value + "}/*\"\n\t\t}\n\t]\n}\n\n", + advanced_options={ + "rest.action.multi.allow_explicit_index": "true" + }, + cluster_config=ElasticsearchDomainClusterConfig( + instance_type="m4.large.elasticsearch", + zone_awareness_enabled=True + ), + depends_on=[es], + domain_name=domain.string_value, + elasticsearch_version="6.3", + tags={ + "Domain": "TestDomain" + }, + vpc_options=ElasticsearchDomainVpcOptions( + security_group_ids=[Token.as_string(aws_security_group_es.id)], + subnet_ids=[ + Token.as_string(property_access(data_aws_subnets_selected.ids, ["0"])), + Token.as_string(property_access(data_aws_subnets_selected.ids, ["1"])) + ] + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_elasticsearch_domain_es.override_logical_id("es") +``` + +## Argument Reference + +The following arguments are required: + +* `domain_name` - (Required) Name of the domain. + +The following arguments are optional: + +* `access_policies` - (Optional) IAM policy document specifying the access policies for the domain. +* `advanced_options` - (Optional) Key-value string pairs to specify advanced configuration options. Note that the values for these configuration options must be strings (wrapped in quotes) or they may be wrong and cause a perpetual diff, causing Terraform to want to recreate your Elasticsearch domain on every apply. +* `advanced_security_options` - (Optional) Configuration block for [fine-grained access control](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/fgac.html). Detailed below. +* `auto_tune_options` - (Optional) Configuration block for the Auto-Tune options of the domain. Detailed below. +* `cluster_config` - (Optional) Configuration block for the cluster of the domain. Detailed below. +* `cognito_options` - (Optional) Configuration block for authenticating Kibana with Cognito. Detailed below. +* `domain_endpoint_options` - (Optional) Configuration block for domain endpoint HTTP(S) related options. Detailed below. +* `ebs_options` - (Optional) Configuration block for EBS related options, may be required based on chosen [instance size](https://aws.amazon.com/elasticsearch-service/pricing/). Detailed below. +* `elasticsearch_version` - (Optional) Version of Elasticsearch to deploy. Defaults to `1.5`. +* `encrypt_at_rest` - (Optional) Configuration block for encrypt at rest options. Only available for [certain instance types](http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/aes-supported-instance-types.html). Detailed below. +* `log_publishing_options` - (Optional) Configuration block for publishing slow and application logs to CloudWatch Logs. This block can be declared multiple times, for each log_type, within the same resource. Detailed below. +* `node_to_node_encryption` - (Optional) Configuration block for node-to-node encryption options. Detailed below. +* `snapshot_options` - (Optional) Configuration block for snapshot related options. Detailed below. DEPRECATED. For domains running Elasticsearch 5.3 and later, Amazon ES takes hourly automated snapshots, making this setting irrelevant. For domains running earlier versions of Elasticsearch, Amazon ES takes daily automated snapshots. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_options` - (Optional) Configuration block for VPC related options. Adding or removing this configuration forces a new resource ([documentation](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-vpc.html#es-vpc-limitations)). Detailed below. + +### advanced_security_options + +* `enabled` - (Required, Forces new resource) Whether advanced security is enabled. +* `internal_user_database_enabled` - (Optional, Default: false) Whether the internal user database is enabled. If not set, defaults to `false` by the AWS API. +* `master_user_options` - (Optional) Configuration block for the main user. Detailed below. + +#### master_user_options + +* `master_user_arn` - (Optional) ARN for the main user. Only specify if `internal_user_database_enabled` is not set or set to `false`. +* `master_user_name` - (Optional) Main user's username, which is stored in the Amazon Elasticsearch Service domain's internal database. Only specify if `internal_user_database_enabled` is set to `true`. +* `master_user_password` - (Optional) Main user's password, which is stored in the Amazon Elasticsearch Service domain's internal database. Only specify if `internal_user_database_enabled` is set to `true`. + +### auto_tune_options + +* `desired_state` - (Required) The Auto-Tune desired state for the domain. Valid values: `ENABLED` or `DISABLED`. +* `maintenance_schedule` - (Required if `rollback_on_disable` is set to `DEFAULT_ROLLBACK`) Configuration block for Auto-Tune maintenance windows. Can be specified multiple times for each maintenance window. Detailed below. +* `rollback_on_disable` - (Optional) Whether to roll back to default Auto-Tune settings when disabling Auto-Tune. Valid values: `DEFAULT_ROLLBACK` or `NO_ROLLBACK`. + +#### maintenance_schedule + +* `start_at` - (Required) Date and time at which to start the Auto-Tune maintenance schedule in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `duration` - (Required) Configuration block for the duration of the Auto-Tune maintenance window. Detailed below. +* `cron_expression_for_recurrence` - (Required) A cron expression specifying the recurrence pattern for an Auto-Tune maintenance schedule. + +##### duration + +* `value` - (Required) An integer specifying the value of the duration of an Auto-Tune maintenance window. +* `unit` - (Required) The unit of time specifying the duration of an Auto-Tune maintenance window. Valid values: `HOURS`. + +### cluster_config + +* `cold_storage_options` - (Optional) Configuration block containing cold storage configuration. Detailed below. +* `dedicated_master_count` - (Optional) Number of dedicated main nodes in the cluster. +* `dedicated_master_enabled` - (Optional) Whether dedicated main nodes are enabled for the cluster. +* `dedicated_master_type` - (Optional) Instance type of the dedicated main nodes in the cluster. +* `instance_count` - (Optional) Number of instances in the cluster. +* `instance_type` - (Optional) Instance type of data nodes in the cluster. +* `warm_count` - (Optional) Number of warm nodes in the cluster. Valid values are between `2` and `150`. `warm_count` can be only and must be set when `warm_enabled` is set to `true`. +* `warm_enabled` - (Optional) Whether to enable warm storage. +* `warm_type` - (Optional) Instance type for the Elasticsearch cluster's warm nodes. Valid values are `ultrawarm1.medium.elasticsearch`, `ultrawarm1.large.elasticsearch` and `ultrawarm1.xlarge.elasticsearch`. `warm_type` can be only and must be set when `warm_enabled` is set to `true`. +* `zone_awareness_config` - (Optional) Configuration block containing zone awareness settings. Detailed below. +* `zone_awareness_enabled` - (Optional) Whether zone awareness is enabled, set to `true` for multi-az deployment. To enable awareness with three Availability Zones, the `availability_zone_count` within the `zone_awareness_config` must be set to `3`. + +#### cold_storage_options + +* `enabled` - (Optional) Boolean to enable cold storage for an Elasticsearch domain. Defaults to `false`. Master and ultrawarm nodes must be enabled for cold storage. + +#### zone_awareness_config + +* `availability_zone_count` - (Optional) Number of Availability Zones for the domain to use with `zone_awareness_enabled`. Defaults to `2`. Valid values: `2` or `3`. + +### cognito_options + +AWS documentation: [Amazon Cognito Authentication for Kibana](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-cognito-auth.html) + +* `enabled` - (Optional, Default: false) Whether Amazon Cognito authentication with Kibana is enabled or not. +* `identity_pool_id` - (Required) ID of the Cognito Identity Pool to use. +* `role_arn` - (Required) ARN of the IAM role that has the AmazonESCognitoAccess policy attached. +* `user_pool_id` - (Required) ID of the Cognito User Pool to use. + +### domain_endpoint_options + +* `custom_endpoint_certificate_arn` - (Optional) ACM certificate ARN for your custom endpoint. +* `custom_endpoint_enabled` - (Optional) Whether to enable custom endpoint for the Elasticsearch domain. +* `custom_endpoint` - (Optional) Fully qualified domain for your custom endpoint. +* `enforce_https` - (Optional) Whether or not to require HTTPS. Defaults to `true`. +* `tls_security_policy` - (Optional) Name of the TLS security policy that needs to be applied to the HTTPS endpoint. Valid values: `Policy-Min-TLS-1-0-2019-07` and `Policy-Min-TLS-1-2-2019-07`. Terraform will only perform drift detection if a configuration value is provided. + +### ebs_options + +* `ebs_enabled` - (Required) Whether EBS volumes are attached to data nodes in the domain. +* `iops` - (Optional) Baseline input/output (I/O) performance of EBS volumes attached to data nodes. Applicable only for the GP3 and Provisioned IOPS EBS volume types. +* `throughput` - (Required if `volume_type` is set to `gp3`) Specifies the throughput (in MiB/s) of the EBS volumes attached to data nodes. Applicable only for the gp3 volume type. +* `volume_size` - (Required if `ebs_enabled` is set to `true`.) Size of EBS volumes attached to data nodes (in GiB). +* `volume_type` - (Optional) Type of EBS volumes attached to data nodes. + +### encrypt_at_rest + +~> **Note:** You can enable `encrypt_at_rest` _in place_ for an existing, unencrypted domain only if your Elasticsearch version is 6.7 or greater. For lower versions, if you enable `encrypt_at_rest`, Terraform with recreate the domain, potentially causing data loss. For any version, if you disable `encrypt_at_rest` for an existing, encrypted domain, Terraform will recreate the domain, potentially causing data loss. If you change the `kms_key_id`, Terraform will also recreate the domain, potentially causing data loss. + +* `enabled` - (Required) Whether to enable encryption at rest. If the `encrypt_at_rest` block is not provided then this defaults to `false`. Enabling encryption on new domains requires `elasticsearch_version` 5.1 or greater. +* `kms_key_id` - (Optional) KMS key ARN to encrypt the Elasticsearch domain with. If not specified then it defaults to using the `aws/es` service KMS key. Note that KMS will accept a KMS key ID but will return the key ARN. To prevent Terraform detecting unwanted changes, use the key ARN instead. + +### log_publishing_options + +* `cloudwatch_log_group_arn` - (Required) ARN of the Cloudwatch log group to which log needs to be published. +* `enabled` - (Optional, Default: true) Whether given log publishing option is enabled or not. +* `log_type` - (Required) Type of Elasticsearch log. Valid values: `INDEX_SLOW_LOGS`, `SEARCH_SLOW_LOGS`, `ES_APPLICATION_LOGS`, `AUDIT_LOGS`. + +### node_to_node_encryption + +~> **Note:** You can enable `node_to_node_encryption` _in place_ for an existing, unencrypted domain only if your Elasticsearch version is 6.7 or greater. For lower versions, if you enable `node_to_node_encryption`, Terraform will recreate the domain, potentially causing data loss. For any version, if you disable `node_to_node_encryption` for an existing, node-to-node encrypted domain, Terraform will recreate the domain, potentially causing data loss. + +* `enabled` - (Required) Whether to enable node-to-node encryption. If the `node_to_node_encryption` block is not provided then this defaults to `false`. Enabling node-to-node encryption of a new domain requires an `elasticsearch_version` of `6.0` or greater. + +### snapshot_options + +* `automated_snapshot_start_hour` - (Required) Hour during which the service takes an automated daily snapshot of the indices in the domain. + +### vpc_options + +AWS documentation: [VPC Support for Amazon Elasticsearch Service Domains](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-vpc.html) + +~> **Note:** You must have created the service linked role for the Elasticsearch service to use `vpc_options`. If you need to create the service linked role at the same time as the Elasticsearch domain then you must use `depends_on` to make sure that the role is created before the Elasticsearch domain. See the [VPC based ES domain example](#vpc-based-es) above. + +-> Security Groups and Subnets referenced in these attributes must all be within the same VPC. This determines what VPC the endpoints are created in. + +* `security_group_ids` - (Optional) List of VPC Security Group IDs to be applied to the Elasticsearch domain endpoints. If omitted, the default Security Group for the VPC will be used. +* `subnet_ids` - (Required) List of VPC Subnet IDs for the Elasticsearch domain endpoints to be created in. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the domain. +* `domain_id` - Unique identifier for the domain. +* `domain_name` - Name of the Elasticsearch domain. +* `endpoint` - Domain-specific endpoint used to submit index, search, and data upload requests. +* `kibana_endpoint` - Domain-specific endpoint for kibana without https scheme. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_options.0.availability_zones` - If the domain was created inside a VPC, the names of the availability zones the configured `subnet_ids` were created inside. +* `vpc_options.0.vpc_id` - If the domain was created inside a VPC, the ID of the VPC. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `update` - (Default `60m`) +* `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elasticsearch domains using the `domain_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Elasticsearch domains using the `domain_name`. For example: + +```console +% terraform import aws_elasticsearch_domain.example domain_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elasticsearch_domain_policy.html.markdown b/website/docs/cdktf/python/r/elasticsearch_domain_policy.html.markdown new file mode 100644 index 00000000000..c1ecd7a9e7f --- /dev/null +++ b/website/docs/cdktf/python/r/elasticsearch_domain_policy.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Elasticsearch" +layout: "aws" +page_title: "AWS: aws_elasticsearch_domain_policy" +description: |- + Provides an Elasticsearch Domain Policy. +--- + + + +# Resource: aws_elasticsearch_domain_policy + +Allows setting policy to an Elasticsearch domain while referencing domain attributes (e.g., ARN) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticsearch_domain import ElasticsearchDomain +from imports.aws.elasticsearch_domain_policy import ElasticsearchDomainPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ElasticsearchDomain(self, "example", + domain_name="tf-test", + elasticsearch_version="2.3" + ) + ElasticsearchDomainPolicy(self, "main", + access_policies="{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Action\": \"es:*\",\n \"Principal\": \"*\",\n \"Effect\": \"Allow\",\n \"Condition\": {\n \"IpAddress\": {\"aws:SourceIp\": \"127.0.0.1/32\"}\n },\n \"Resource\": \"${" + example.arn + "}/*\"\n }\n ]\n}\n\n", + domain_name=example.domain_name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain_name` - (Required) Name of the domain. +* `access_policies` - (Optional) IAM policy document specifying the access policies for the domain + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elasticsearch_domain_saml_options.html.markdown b/website/docs/cdktf/python/r/elasticsearch_domain_saml_options.html.markdown new file mode 100644 index 00000000000..bb910518ae1 --- /dev/null +++ b/website/docs/cdktf/python/r/elasticsearch_domain_saml_options.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "Elasticsearch" +layout: "aws" +page_title: "AWS: aws_elasticsearch_domain_saml_options" +description: |- + Terraform resource for managing SAML authentication options for an AWS Elasticsearch Domain. +--- + + + +# Resource: aws_elasticsearch_domain_saml_options + +Manages SAML authentication options for an AWS Elasticsearch Domain. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticsearch_domain import ElasticsearchDomain +from imports.aws.elasticsearch_domain_saml_options import ElasticsearchDomainSamlOptions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ElasticsearchDomain(self, "example", + cluster_config=ElasticsearchDomainClusterConfig( + instance_type="r4.large.elasticsearch" + ), + domain_name="example", + elasticsearch_version="1.5", + snapshot_options=ElasticsearchDomainSnapshotOptions( + automated_snapshot_start_hour=23 + ), + tags={ + "Domain": "TestDomain" + } + ) + aws_elasticsearch_domain_saml_options_example = + ElasticsearchDomainSamlOptions(self, "example_1", + domain_name=example.domain_name, + saml_options=ElasticsearchDomainSamlOptionsSamlOptions( + enabled=True, + idp=ElasticsearchDomainSamlOptionsSamlOptionsIdp( + entity_id="https://example.com", + metadata_content=Token.as_string(Fn.file("./saml-metadata.xml")) + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_elasticsearch_domain_saml_options_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `domain_name` - (Required) Name of the domain. + +The following arguments are optional: + +* `saml_options` - (Optional) The SAML authentication options for an AWS Elasticsearch Domain. + +### saml_options + +* `enabled` - (Required) Whether SAML authentication is enabled. +* `idp` - (Optional) Information from your identity provider. +* `master_backend_role` - (Optional) This backend role from the SAML IdP receives full permissions to the cluster, equivalent to a new master user. +* `master_user_name` - (Optional) This username from the SAML IdP receives full permissions to the cluster, equivalent to a new master user. +* `roles_key` - (Optional) Element of the SAML assertion to use for backend roles. Default is roles. +* `session_timeout_minutes` - (Optional) Duration of a session in minutes after a user logs in. Default is 60. Maximum value is 1,440. +* `subject_key` - (Optional) Custom SAML attribute to use for user names. Default is an empty string - `""`. This will cause Elasticsearch to use the `NameID` element of the `Subject`, which is the default location for name identifiers in the SAML specification. + +#### idp + +* `entity_id` - (Required) The unique Entity ID of the application in SAML Identity Provider. +* `metadata_content` - (Required) The Metadata of the SAML application in xml format. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the domain the SAML options are associated with. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elasticsearch domains using the `domain_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Elasticsearch domains using the `domain_name`. For example: + +```console +% terraform import aws_elasticsearch_domain_saml_options.example domain_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elastictranscoder_pipeline.html.markdown b/website/docs/cdktf/python/r/elastictranscoder_pipeline.html.markdown new file mode 100644 index 00000000000..3bd5a5edf67 --- /dev/null +++ b/website/docs/cdktf/python/r/elastictranscoder_pipeline.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "Elastic Transcoder" +layout: "aws" +page_title: "AWS: aws_elastictranscoder_pipeline" +description: |- + Provides an Elastic Transcoder pipeline resource. +--- + + + +# Resource: aws_elastictranscoder_pipeline + +Provides an Elastic Transcoder pipeline resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elastictranscoder_pipeline import ElastictranscoderPipeline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElastictranscoderPipeline(self, "bar", + content_config=ElastictranscoderPipelineContentConfig( + bucket=content_bucket.id, + storage_class="Standard" + ), + input_bucket=input_bucket.id, + name="aws_elastictranscoder_pipeline_tf_test_", + role=test_role.arn, + thumbnail_config=ElastictranscoderPipelineThumbnailConfig( + bucket=thumb_bucket.id, + storage_class="Standard" + ) + ) +``` + +## Argument Reference + +See ["Create Pipeline"](http://docs.aws.amazon.com/elastictranscoder/latest/developerguide/create-pipeline.html) in the AWS docs for reference. + +This argument supports the following arguments: + +* `aws_kms_key_arn` - (Optional) The AWS Key Management Service (AWS KMS) key that you want to use with this pipeline. +* `content_config` - (Optional) The ContentConfig object specifies information about the Amazon S3 bucket in which you want Elastic Transcoder to save transcoded files and playlists. (documented below) +* `content_config_permissions` - (Optional) The permissions for the `content_config` object. (documented below) +* `input_bucket` - (Required) The Amazon S3 bucket in which you saved the media files that you want to transcode and the graphics that you want to use as watermarks. +* `name` - (Optional, Forces new resource) The name of the pipeline. Maximum 40 characters +* `notifications` - (Optional) The Amazon Simple Notification Service (Amazon SNS) topic that you want to notify to report job status. (documented below) +* `output_bucket` - (Optional) The Amazon S3 bucket in which you want Elastic Transcoder to save the transcoded files. +* `role` - (Required) The IAM Amazon Resource Name (ARN) for the role that you want Elastic Transcoder to use to transcode jobs for this pipeline. +* `thumbnail_config` - (Optional) The ThumbnailConfig object specifies information about the Amazon S3 bucket in which you want Elastic Transcoder to save thumbnail files. (documented below) +* `thumbnail_config_permissions` - (Optional) The permissions for the `thumbnail_config` object. (documented below) + +The `content_config` object specifies information about the Amazon S3 bucket in +which you want Elastic Transcoder to save transcoded files and playlists: which +bucket to use, and the storage class that you want to assign to the files. If +you specify values for `content_config`, you must also specify values for +`thumbnail_config`. If you specify values for `content_config` and +`thumbnail_config`, omit the `output_bucket` object. + +The `content_config` object supports the following: + +* `bucket` - The Amazon S3 bucket in which you want Elastic Transcoder to save transcoded files and playlists. +* `storage_class` - The Amazon S3 storage class, `Standard` or `ReducedRedundancy`, that you want Elastic Transcoder to assign to the files and playlists that it stores in your Amazon S3 bucket. + +The `content_config_permissions` object supports the following: + +* `access` - The permission that you want to give to the AWS user that you specified in `content_config_permissions.grantee`. Valid values are `Read`, `ReadAcp`, `WriteAcp` or `FullControl`. +* `grantee` - The AWS user or group that you want to have access to transcoded files and playlists. +* `grantee_type` - Specify the type of value that appears in the `content_config_permissions.grantee` object. Valid values are `Canonical`, `Email` or `Group`. + +The `notifications` object supports the following: + +* `completed` - The topic ARN for the Amazon SNS topic that you want to notify when Elastic Transcoder has finished processing a job in this pipeline. +* `error` - The topic ARN for the Amazon SNS topic that you want to notify when Elastic Transcoder encounters an error condition while processing a job in this pipeline. +* `progressing` - The topic ARN for the Amazon Simple Notification Service (Amazon SNS) topic that you want to notify when Elastic Transcoder has started to process a job in this pipeline. +* `warning` - The topic ARN for the Amazon SNS topic that you want to notify when Elastic Transcoder encounters a warning condition while processing a job in this pipeline. + +The `thumbnail_config` object specifies information about the Amazon S3 bucket in +which you want Elastic Transcoder to save thumbnail files: which bucket to use, +which users you want to have access to the files, the type of access you want +users to have, and the storage class that you want to assign to the files. If +you specify values for `content_config`, you must also specify values for +`thumbnail_config` even if you don't want to create thumbnails. (You control +whether to create thumbnails when you create a job. For more information, see +ThumbnailPattern in the topic Create Job.) If you specify values for +`content_config` and `thumbnail_config`, omit the OutputBucket object. + +The `thumbnail_config` object supports the following: + +* `bucket` - The Amazon S3 bucket in which you want Elastic Transcoder to save thumbnail files. +* `storage_class` - The Amazon S3 storage class, Standard or ReducedRedundancy, that you want Elastic Transcoder to assign to the thumbnails that it stores in your Amazon S3 bucket. + +The `thumbnail_config_permissions` object supports the following: + +* `access` - The permission that you want to give to the AWS user that you specified in `thumbnail_config_permissions.grantee`. Valid values are `Read`, `ReadAcp`, `WriteAcp` or `FullControl`. +* `grantee` - The AWS user or group that you want to have access to thumbnail files. +* `grantee_type` - Specify the type of value that appears in the `thumbnail_config_permissions.grantee` object. Valid values are `Canonical`, `Email` or `Group`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Elastictranscoder pipeline. +* `arn` - The ARN of the Elastictranscoder pipeline. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elastic Transcoder pipelines using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Elastic Transcoder pipelines using the `id`. For example: + +```console +% terraform import aws_elastictranscoder_pipeline.basic_pipeline 1407981661351-cttk8b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elastictranscoder_preset.html.markdown b/website/docs/cdktf/python/r/elastictranscoder_preset.html.markdown new file mode 100644 index 00000000000..c3cd981eae0 --- /dev/null +++ b/website/docs/cdktf/python/r/elastictranscoder_preset.html.markdown @@ -0,0 +1,195 @@ +--- +subcategory: "Elastic Transcoder" +layout: "aws" +page_title: "AWS: aws_elastictranscoder_preset" +description: |- + Provides an Elastic Transcoder preset resource. +--- + + + +# Resource: aws_elastictranscoder_preset + +Provides an Elastic Transcoder preset resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elastictranscoder_preset import ElastictranscoderPreset +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElastictranscoderPreset(self, "bar", + audio=ElastictranscoderPresetAudio( + audio_packing_mode="SingleTrack", + bit_rate=Token.as_string(96), + channels=Token.as_string(2), + codec="AAC", + sample_rate=Token.as_string(44100) + ), + audio_codec_options=ElastictranscoderPresetAudioCodecOptions( + profile="AAC-LC" + ), + container="mp4", + description="Sample Preset", + name="sample_preset", + thumbnails=ElastictranscoderPresetThumbnails( + format="png", + interval=Token.as_string(120), + max_height="auto", + max_width="auto", + padding_policy="Pad", + sizing_policy="Fit" + ), + video=ElastictranscoderPresetVideo( + bit_rate="1600", + codec="H.264", + display_aspect_ratio="16:9", + fixed_gop="false", + frame_rate="auto", + keyframes_max_dist=Token.as_string(240), + max_frame_rate="60", + max_height="auto", + max_width="auto", + padding_policy="Pad", + sizing_policy="Fit" + ), + video_codec_options={ + "ColorSpaceConversionMode": "None", + "InterlacedMode": "Progressive", + "Level": "2.2", + "MaxReferenceFrames": Token.as_string(3), + "Profile": "main" + }, + video_watermarks=[ElastictranscoderPresetVideoWatermarks( + horizontal_align="Right", + horizontal_offset="10px", + id="Terraform Test", + max_height="20%", + max_width="20%", + opacity="55.5", + sizing_policy="ShrinkToFit", + target="Content", + vertical_align="Bottom", + vertical_offset="10px" + ) + ] + ) +``` + +## Argument Reference + +See ["Create Preset"](http://docs.aws.amazon.com/elastictranscoder/latest/developerguide/create-preset.html) in the AWS docs for reference. + +This argument supports the following arguments: + +* `audio` - (Optional, Forces new resource) Audio parameters object (documented below). +* `audio_codec_options` - (Optional, Forces new resource) Codec options for the audio parameters (documented below) +* `container` - (Required, Forces new resource) The container type for the output file. Valid values are `flac`, `flv`, `fmp4`, `gif`, `mp3`, `mp4`, `mpg`, `mxf`, `oga`, `ogg`, `ts`, and `webm`. +* `description` - (Optional, Forces new resource) A description of the preset (maximum 255 characters) +* `name` - (Optional, Forces new resource) The name of the preset. (maximum 40 characters) +* `thumbnails` - (Optional, Forces new resource) Thumbnail parameters object (documented below) +* `video` - (Optional, Forces new resource) Video parameters object (documented below) +* `video_watermarks` - (Optional, Forces new resource) Watermark parameters for the video parameters (documented below) +* `video_codec_options` (Optional, Forces new resource) Codec options for the video parameters + +The `audio` object supports the following: + +* `audio_packing_mode` - The method of organizing audio channels and tracks. Use Audio:Channels to specify the number of channels in your output, and Audio:AudioPackingMode to specify the number of tracks and their relation to the channels. If you do not specify an Audio:AudioPackingMode, Elastic Transcoder uses SingleTrack. +* `bit_rate` - The bit rate of the audio stream in the output file, in kilobits/second. Enter an integer between 64 and 320, inclusive. +* `channels` - The number of audio channels in the output file +* `codec` - The audio codec for the output file. Valid values are `AAC`, `flac`, `mp2`, `mp3`, `pcm`, and `vorbis`. +* `sample_rate` - The sample rate of the audio stream in the output file, in hertz. Valid values are: `auto`, `22050`, `32000`, `44100`, `48000`, `96000` + +The `audio_codec_options` object supports the following: + +* `bit_depth` - The bit depth of a sample is how many bits of information are included in the audio samples. Valid values are `16` and `24`. (FLAC/PCM Only) +* `bit_order` - The order the bits of a PCM sample are stored in. The supported value is LittleEndian. (PCM Only) +* `profile` - If you specified AAC for Audio:Codec, choose the AAC profile for the output file. +* `signed` - Whether audio samples are represented with negative and positive numbers (signed) or only positive numbers (unsigned). The supported value is Signed. (PCM Only) + +The `thumbnails` object supports the following: + +* `aspect_ratio` - The aspect ratio of thumbnails. The following values are valid: auto, 1:1, 4:3, 3:2, 16:9 +* `format` - The format of thumbnails, if any. Valid formats are jpg and png. +* `interval` - The approximate number of seconds between thumbnails. The value must be an integer. The actual interval can vary by several seconds from one thumbnail to the next. +* `max_height` - The maximum height of thumbnails, in pixels. If you specify auto, Elastic Transcoder uses 1080 (Full HD) as the default value. If you specify a numeric value, enter an even integer between 32 and 3072, inclusive. +* `max_width` - The maximum width of thumbnails, in pixels. If you specify auto, Elastic Transcoder uses 1920 (Full HD) as the default value. If you specify a numeric value, enter an even integer between 32 and 4096, inclusive. +* `padding_policy` - When you set PaddingPolicy to Pad, Elastic Transcoder might add black bars to the top and bottom and/or left and right sides of thumbnails to make the total size of the thumbnails match the values that you specified for thumbnail MaxWidth and MaxHeight settings. +* `resolution` - The width and height of thumbnail files in pixels, in the format WidthxHeight, where both values are even integers. The values cannot exceed the width and height that you specified in the Video:Resolution object. (To better control resolution and aspect ratio of thumbnails, we recommend that you use the thumbnail values `max_width`, `max_height`, `sizing_policy`, and `padding_policy` instead of `resolution` and `aspect_ratio`. The two groups of settings are mutually exclusive. Do not use them together) +* `sizing_policy` - A value that controls scaling of thumbnails. Valid values are: `Fit`, `Fill`, `Stretch`, `Keep`, `ShrinkToFit`, and `ShrinkToFill`. + +The `video` object supports the following: + +* `aspect_ratio` - The display aspect ratio of the video in the output file. Valid values are: `auto`, `1:1`, `4:3`, `3:2`, `16:9`. (Note; to better control resolution and aspect ratio of output videos, we recommend that you use the values `max_width`, `max_height`, `sizing_policy`, `padding_policy`, and `display_aspect_ratio` instead of `resolution` and `aspect_ratio`.) +* `bit_rate` - The bit rate of the video stream in the output file, in kilobits/second. You can configure variable bit rate or constant bit rate encoding. +* `codec` - The video codec for the output file. Valid values are `gif`, `H.264`, `mpeg2`, `vp8`, and `vp9`. +* `display_aspect_ratio` - The value that Elastic Transcoder adds to the metadata in the output file. If you set DisplayAspectRatio to auto, Elastic Transcoder chooses an aspect ratio that ensures square pixels. If you specify another option, Elastic Transcoder sets that value in the output file. +* `fixed_gop` - Whether to use a fixed value for Video:FixedGOP. Not applicable for containers of type gif. Valid values are true and false. Also known as, Fixed Number of Frames Between Keyframes. +* `frame_rate` - The frames per second for the video stream in the output file. The following values are valid: `auto`, `10`, `15`, `23.97`, `24`, `25`, `29.97`, `30`, `50`, `60`. +* `keyframes_max_dist` - The maximum number of frames between key frames. Not applicable for containers of type gif. +* `max_frame_rate` - If you specify auto for FrameRate, Elastic Transcoder uses the frame rate of the input video for the frame rate of the output video, up to the maximum frame rate. If you do not specify a MaxFrameRate, Elastic Transcoder will use a default of 30. +* `max_height` - The maximum height of the output video in pixels. If you specify auto, Elastic Transcoder uses 1080 (Full HD) as the default value. If you specify a numeric value, enter an even integer between 96 and 3072, inclusive. +* `max_width` - The maximum width of the output video in pixels. If you specify auto, Elastic Transcoder uses 1920 (Full HD) as the default value. If you specify a numeric value, enter an even integer between 128 and 4096, inclusive. +* `padding_policy` - When you set PaddingPolicy to Pad, Elastic Transcoder might add black bars to the top and bottom and/or left and right sides of the output video to make the total size of the output video match the values that you specified for `max_width` and `max_height`. +* `resolution` - The width and height of the video in the output file, in pixels. Valid values are `auto` and `widthxheight`. (see note for `aspect_ratio`) +* `sizing_policy` - A value that controls scaling of the output video. Valid values are: `Fit`, `Fill`, `Stretch`, `Keep`, `ShrinkToFit`, `ShrinkToFill`. + +The `video_watermarks` object supports the following: + +* `horizontal_align` - The horizontal position of the watermark unless you specify a nonzero value for `horzontal_offset`. +* `horizontal_offset` - The amount by which you want the horizontal position of the watermark to be offset from the position specified by `horizontal_align`. +* `id` - A unique identifier for the settings for one watermark. The value of Id can be up to 40 characters long. You can specify settings for up to four watermarks. +* `max_height` - The maximum height of the watermark. +* `max_width` - The maximum width of the watermark. +* `opacity` - A percentage that indicates how much you want a watermark to obscure the video in the location where it appears. +* `sizing_policy` - A value that controls scaling of the watermark. Valid values are: `Fit`, `Stretch`, `ShrinkToFit` +* `target` - A value that determines how Elastic Transcoder interprets values that you specified for `video_watermarks.horizontal_offset`, `video_watermarks.vertical_offset`, `video_watermarks.max_width`, and `video_watermarks.max_height`. Valid values are `Content` and `Frame`. +* `vertical_align` - The vertical position of the watermark unless you specify a nonzero value for `vertical_align`. Valid values are `Top`, `Bottom`, `Center`. +* `vertical_offset` - The amount by which you want the vertical position of the watermark to be offset from the position specified by `vertical_align` + +The `video_codec_options` map supports the following: + +* `Profile` - The codec profile that you want to use for the output file. (H.264/VP8 Only) +* `Level` - The H.264 level that you want to use for the output file. Elastic Transcoder supports the following levels: `1`, `1b`, `1.1`, `1.2`, `1.3`, `2`, `2.1`, `2.2`, `3`, `3.1`, `3.2`, `4`, `4.1` (H.264 only) +* `MaxReferenceFrames` - The maximum number of previously decoded frames to use as a reference for decoding future frames. Valid values are integers 0 through 16. (H.264 only) +* `MaxBitRate` - The maximum number of kilobits per second in the output video. Specify a value between 16 and 62,500 inclusive, or `auto`. (Optional, H.264/MPEG2/VP8/VP9 only) +* `BufferSize` - The maximum number of kilobits in any x seconds of the output video. This window is commonly 10 seconds, the standard segment duration when you're using ts for the container type of the output video. Specify an integer greater than 0. If you specify MaxBitRate and omit BufferSize, Elastic Transcoder sets BufferSize to 10 times the value of MaxBitRate. (Optional, H.264/MPEG2/VP8/VP9 only) +* `InterlacedMode` - The interlace mode for the output video. (Optional, H.264/MPEG2 Only) +* `ColorSpaceConversion` - The color space conversion Elastic Transcoder applies to the output video. Valid values are `None`, `Bt709toBt601`, `Bt601toBt709`, and `Auto`. (Optional, H.264/MPEG2 Only) +* `ChromaSubsampling` - The sampling pattern for the chroma (color) channels of the output video. Valid values are `yuv420p` and `yuv422p`. +* `LoopCount` - The number of times you want the output gif to loop (Gif only) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Elastic Transcoder Preset. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elastic Transcoder presets using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Elastic Transcoder presets using the `id`. For example: + +```console +% terraform import aws_elastictranscoder_preset.basic_preset 1407981661351-cttk8b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elb.html.markdown b/website/docs/cdktf/python/r/elb.html.markdown new file mode 100644 index 00000000000..8be08997a6f --- /dev/null +++ b/website/docs/cdktf/python/r/elb.html.markdown @@ -0,0 +1,177 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_elb" +description: |- + Provides an Elastic Load Balancer resource. +--- + + + +# Resource: aws_elb + +Provides an Elastic Load Balancer resource, also known as a "Classic +Load Balancer" after the release of +[Application/Network Load Balancers](/docs/providers/aws/r/lb.html). + +~> **NOTE on ELB Instances and ELB Attachments:** Terraform currently +provides both a standalone [ELB Attachment resource](elb_attachment.html) +(describing an instance attached to an ELB), and an ELB resource with +`instances` defined in-line. At this time you cannot use an ELB with in-line +instances in conjunction with a ELB Attachment resources. Doing so will cause a +conflict and will overwrite attachments. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elb import Elb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Elb(self, "bar", + access_logs=ElbAccessLogs( + bucket="foo", + bucket_prefix="bar", + interval=60 + ), + availability_zones=["us-west-2a", "us-west-2b", "us-west-2c"], + connection_draining=True, + connection_draining_timeout=400, + cross_zone_load_balancing=True, + health_check=ElbHealthCheck( + healthy_threshold=2, + interval=30, + target="HTTP:8000/", + timeout=3, + unhealthy_threshold=2 + ), + idle_timeout=400, + instances=[foo.id], + listener=[ElbListener( + instance_port=8000, + instance_protocol="http", + lb_port=80, + lb_protocol="http" + ), ElbListener( + instance_port=8000, + instance_protocol="http", + lb_port=443, + lb_protocol="https", + ssl_certificate_id="arn:aws:iam::123456789012:server-certificate/certName" + ) + ], + name="foobar-terraform-elb", + tags={ + "Name": "foobar-terraform-elb" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the ELB. By default generated by Terraform. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +* `access_logs` - (Optional) An Access Logs block. Access Logs documented below. +* `availability_zones` - (Required for an EC2-classic ELB) The AZ's to serve traffic in. +* `security_groups` - (Optional) A list of security group IDs to assign to the ELB. + Only valid if creating an ELB within a VPC +* `subnets` - (Required for a VPC ELB) A list of subnet IDs to attach to the ELB. When an update to subnets will remove all current subnets, this will force a new resource. +* `instances` - (Optional) A list of instance ids to place in the ELB pool. +* `internal` - (Optional) If true, ELB will be an internal ELB. +* `listener` - (Required) A list of listener blocks. Listeners documented below. +* `health_check` - (Optional) A health_check block. Health Check documented below. +* `cross_zone_load_balancing` - (Optional) Enable cross-zone load balancing. Default: `true` +* `idle_timeout` - (Optional) The time in seconds that the connection is allowed to be idle. Default: `60` +* `connection_draining` - (Optional) Boolean to enable connection draining. Default: `false` +* `connection_draining_timeout` - (Optional) The time in seconds to allow for connections to drain. Default: `300` +* `desync_mitigation_mode` - (Optional) Determines how the load balancer handles requests that might pose a security risk to an application due to HTTP desync. Valid values are `monitor`, `defensive` (default), `strictest`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Exactly one of `availability_zones` or `subnets` must be specified: this +determines if the ELB exists in a VPC or in EC2-classic. + +Access Logs (`access_logs`) support the following: + +* `bucket` - (Required) The S3 bucket name to store the logs in. +* `bucket_prefix` - (Optional) The S3 bucket prefix. Logs are stored in the root if not configured. +* `interval` - (Optional) The publishing interval in minutes. Valid values: `5` and `60`. Default: `60` +* `enabled` - (Optional) Boolean to enable / disable `access_logs`. Default is `true` + +Listeners (`listener`) support the following: + +* `instance_port` - (Required) The port on the instance to route to +* `instance_protocol` - (Required) The protocol to use to the instance. Valid + values are `HTTP`, `HTTPS`, `TCP`, or `SSL` +* `lb_port` - (Required) The port to listen on for the load balancer +* `lb_protocol` - (Required) The protocol to listen on. Valid values are `HTTP`, + `HTTPS`, `TCP`, or `SSL` +* `ssl_certificate_id` - (Optional) The ARN of an SSL certificate you have +uploaded to AWS IAM. **Note ECDSA-specific restrictions below. Only valid when `lb_protocol` is either HTTPS or SSL** + +Health Check (`health_check`) supports the following: + +* `healthy_threshold` - (Required) The number of checks before the instance is declared healthy. +* `unhealthy_threshold` - (Required) The number of checks before the instance is declared unhealthy. +* `target` - (Required) The target of the check. Valid pattern is "${PROTOCOL}:${PORT}${PATH}", where PROTOCOL + values are: + * `HTTP`, `HTTPS` - PORT and PATH are required + * `TCP`, `SSL` - PORT is required, PATH is not supported +* `interval` - (Required) The interval between checks. +* `timeout` - (Required) The length of time before the check times out. + +## Note on ECDSA Key Algorithm + +If the ARN of the `ssl_certificate_id` that is pointed to references a +certificate that was signed by an ECDSA key, note that ELB only supports the +P256 and P384 curves. Using a certificate signed by a key using a different +curve could produce the error `ERR_SSL_VERSION_OR_CIPHER_MISMATCH` in your +browser. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the ELB +* `arn` - The ARN of the ELB +* `name` - The name of the ELB +* `dns_name` - The DNS name of the ELB +* `instances` - The list of instances in the ELB +* `source_security_group` - The name of the security group that you can use as + part of your inbound rules for your load balancer's back-end application + instances. Use this for Classic or Default VPC only. +* `source_security_group_id` - The ID of the security group that you can use as + part of your inbound rules for your load balancer's back-end application + instances. Only available on ELBs launched in a VPC. +* `zone_id` - The canonical hosted zone ID of the ELB (to be used in a Route 53 Alias record) +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ELBs using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import ELBs using the `name`. For example: + +```console +% terraform import aws_elb.bar elb-production-12345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/elb_attachment.html.markdown b/website/docs/cdktf/python/r/elb_attachment.html.markdown new file mode 100644 index 00000000000..10aae9728e0 --- /dev/null +++ b/website/docs/cdktf/python/r/elb_attachment.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_elb_attachment" +description: |- + Provides an Elastic Load Balancer Attachment resource. +--- + + + +# Resource: aws_elb_attachment + +Attaches an EC2 instance to an Elastic Load Balancer (ELB). For attaching resources with Application Load Balancer (ALB) or Network Load Balancer (NLB), see the [`aws_lb_target_group_attachment` resource](/docs/providers/aws/r/lb_target_group_attachment.html). + +~> **NOTE on ELB Instances and ELB Attachments:** Terraform currently provides +both a standalone ELB Attachment resource (describing an instance attached to +an ELB), and an [Elastic Load Balancer resource](elb.html) with +`instances` defined in-line. At this time you cannot use an ELB with in-line +instances in conjunction with an ELB Attachment resource. Doing so will cause a +conflict and will overwrite attachments. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elb_attachment import ElbAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ElbAttachment(self, "baz", + elb=bar.id, + instance=foo.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `elb` - (Required) The name of the ELB. +* `instance` - (Required) Instance ID to place in the ELB pool. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/emr_block_public_access_configuration.html.markdown b/website/docs/cdktf/python/r/emr_block_public_access_configuration.html.markdown new file mode 100644 index 00000000000..cb721763373 --- /dev/null +++ b/website/docs/cdktf/python/r/emr_block_public_access_configuration.html.markdown @@ -0,0 +1,154 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_block_public_access_configuration" +description: |- + Terraform resource for managing an AWS EMR Block Public Access Configuration. +--- + + + +# Resource: aws_emr_block_public_access_configuration + +Terraform resource for managing an AWS EMR block public access configuration. This region level security configuration restricts the launch of EMR clusters that have associated security groups permitting public access on unspecified ports. See the [EMR Block Public Access Configuration](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-block-public-access.html) documentation for further information. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_block_public_access_configuration import EmrBlockPublicAccessConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrBlockPublicAccessConfiguration(self, "example", + block_public_security_group_rules=True + ) +``` + +### Default Configuration + +By default, each AWS region is equipped with a block public access configuration that prevents EMR clusters from being launched if they have security group rules permitting public access on any port except for port 22. The default configuration can be managed using this Terraform resource. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_block_public_access_configuration import EmrBlockPublicAccessConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrBlockPublicAccessConfiguration(self, "example", + block_public_security_group_rules=True, + permitted_public_security_group_rule_range=[EmrBlockPublicAccessConfigurationPermittedPublicSecurityGroupRuleRange( + max_range=22, + min_range=22 + ) + ] + ) +``` + +~> **NOTE:** If an `aws_emr_block_public_access_configuration` Terraform resource is destroyed, the configuration will reset to this default configuration. + +### Multiple Permitted Public Security Group Rule Ranges + +The resource permits specification of multiple `permitted_public_security_group_rule_range` blocks. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_block_public_access_configuration import EmrBlockPublicAccessConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrBlockPublicAccessConfiguration(self, "example", + block_public_security_group_rules=True, + permitted_public_security_group_rule_range=[EmrBlockPublicAccessConfigurationPermittedPublicSecurityGroupRuleRange( + max_range=22, + min_range=22 + ), EmrBlockPublicAccessConfigurationPermittedPublicSecurityGroupRuleRange( + max_range=101, + min_range=100 + ) + ] + ) +``` + +### Disabling Block Public Access + +To permit EMR clusters to be launched in the configured region regardless of associated security group rules, the Block Public Access feature can be disabled using this Terraform resource. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_block_public_access_configuration import EmrBlockPublicAccessConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrBlockPublicAccessConfiguration(self, "example", + block_public_security_group_rules=False + ) +``` + +## Argument Reference + +The following arguments are required: + +* `block_public_security_group_rules` - (Required) Enable or disable EMR Block Public Access. + +The following arguments are optional: + +* `permitted_public_security_group_rule_range` - (Optional) Configuration block for defining permitted public security group rule port ranges. Can be defined multiple times per resource. Only valid if `block_public_security_group_rules` is set to `true`. + +### `permitted_public_security_group_rule_range` + +This block is used to define a range of TCP ports that should form exceptions to the Block Public Access Configuration. If an attempt is made to launch an EMR cluster in the configured region and account, with `block_public_security_group_rules = true`, the EMR cluster will be permitted to launch even if there are security group rules permitting public access to ports in this range. + +* `min_range` - (Required) The first port in the range of TCP ports. +* `max_range` - (Required) The final port in the range of TCP ports. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the current EMR Block Public Access Configuration. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the current EMR Block Public Access Configuration. For example: + +```console +% terraform import aws_emr_block_public_access_configuration.example current +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/emr_cluster.html.markdown b/website/docs/cdktf/python/r/emr_cluster.html.markdown new file mode 100644 index 00000000000..612a3479f47 --- /dev/null +++ b/website/docs/cdktf/python/r/emr_cluster.html.markdown @@ -0,0 +1,729 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_cluster" +description: |- + Provides an Elastic MapReduce Cluster +--- + + + +# Resource: aws_emr_cluster + +Provides an Elastic MapReduce Cluster, a web service that makes it easy to process large amounts of data efficiently. See [Amazon Elastic MapReduce Documentation](https://aws.amazon.com/documentation/elastic-mapreduce/) for more information. + +To configure [Instance Groups](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-group-configuration.html#emr-plan-instance-groups) for [task nodes](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-master-core-task-nodes.html#emr-plan-task), see the [`aws_emr_instance_group` resource](/docs/providers/aws/r/emr_instance_group.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_cluster import EmrCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrCluster(self, "cluster", + additional_info="{\n \"instanceAwsClientConfiguration\": {\n \"proxyPort\": 8099,\n \"proxyHost\": \"myproxy.example.com\"\n }\n}\n\n", + applications=["Spark"], + bootstrap_action=[EmrClusterBootstrapAction( + args=["instance.isMaster=true", "echo running on master node"], + name="runif", + path="s3://elasticmapreduce/bootstrap-actions/run-if" + ) + ], + configurations_json=" [\n {\n \"Classification\": \"hadoop-env\",\n \"Configurations\": [\n {\n \"Classification\": \"export\",\n \"Properties\": {\n \"JAVA_HOME\": \"/usr/lib/jvm/java-1.8.0\"\n }\n }\n ],\n \"Properties\": {}\n },\n {\n \"Classification\": \"spark-env\",\n \"Configurations\": [\n {\n \"Classification\": \"export\",\n \"Properties\": {\n \"JAVA_HOME\": \"/usr/lib/jvm/java-1.8.0\"\n }\n }\n ],\n \"Properties\": {}\n }\n ]\n\n", + core_instance_group=EmrClusterCoreInstanceGroup( + autoscaling_policy="{\n\"Constraints\": {\n \"MinCapacity\": 1,\n \"MaxCapacity\": 2\n},\n\"Rules\": [\n {\n \"Name\": \"ScaleOutMemoryPercentage\",\n \"Description\": \"Scale out if YARNMemoryAvailablePercentage is less than 15\",\n \"Action\": {\n \"SimpleScalingPolicyConfiguration\": {\n \"AdjustmentType\": \"CHANGE_IN_CAPACITY\",\n \"ScalingAdjustment\": 1,\n \"CoolDown\": 300\n }\n },\n \"Trigger\": {\n \"CloudWatchAlarmDefinition\": {\n \"ComparisonOperator\": \"LESS_THAN\",\n \"EvaluationPeriods\": 1,\n \"MetricName\": \"YARNMemoryAvailablePercentage\",\n \"Namespace\": \"AWS/ElasticMapReduce\",\n \"Period\": 300,\n \"Statistic\": \"AVERAGE\",\n \"Threshold\": 15.0,\n \"Unit\": \"PERCENT\"\n }\n }\n }\n]\n}\n\n", + bid_price="0.30", + ebs_config=[EmrClusterCoreInstanceGroupEbsConfig( + size=Token.as_number("40"), + type="gp2", + volumes_per_instance=1 + ) + ], + instance_count=1, + instance_type="c4.large" + ), + ebs_root_volume_size=100, + ec2_attributes=EmrClusterEc2Attributes( + emr_managed_master_security_group=sg.id, + emr_managed_slave_security_group=sg.id, + instance_profile=emr_profile.arn, + subnet_id=main.id + ), + keep_job_flow_alive_when_no_steps=True, + master_instance_group=EmrClusterMasterInstanceGroup( + instance_type="m4.large" + ), + name="emr-test-arn", + release_label="emr-4.6.0", + service_role=iam_emr_service_role.arn, + tags={ + "env": "env", + "role": "rolename" + }, + termination_protection=False + ) +``` + +The `aws_emr_cluster` resource typically requires two IAM roles, one for the EMR Cluster to use as a service role, and another is assigned to every EC2 instance in a cluster and each application process that runs on a cluster assumes this role for permissions to interact with other AWS services. An additional role, the Auto Scaling role, is required if your cluster uses automatic scaling in Amazon EMR. + +The default AWS managed EMR service role is called `EMR_DefaultRole` with Amazon managed policy `AmazonEMRServicePolicy_v2` attached. The name of default instance profile role is `EMR_EC2_DefaultRole` with default managed policy `AmazonElasticMapReduceforEC2Role` attached, but it is on the path to deprecation and will not be replaced with another default managed policy. You'll need to create and specify an instance profile to replace the deprecated role and default policy. See the [Configure IAM service roles for Amazon EMR](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-iam-roles.html) guide for more information on these IAM roles. There is also a fully-bootable example Terraform configuration at the bottom of this page. + +### Instance Fleet + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_cluster import EmrCluster +from imports.aws.emr_instance_fleet import EmrInstanceFleet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, releaseLabel, serviceRole): + super().__init__(scope, name) + example = EmrCluster(self, "example", + core_instance_fleet=EmrClusterCoreInstanceFleet( + instance_type_configs=[EmrClusterCoreInstanceFleetInstanceTypeConfigs( + bid_price_as_percentage_of_on_demand_price=80, + ebs_config=[EmrClusterCoreInstanceFleetInstanceTypeConfigsEbsConfig( + size=100, + type="gp2", + volumes_per_instance=1 + ) + ], + instance_type="m3.xlarge", + weighted_capacity=1 + ), EmrClusterCoreInstanceFleetInstanceTypeConfigs( + bid_price_as_percentage_of_on_demand_price=100, + ebs_config=[EmrClusterCoreInstanceFleetInstanceTypeConfigsEbsConfig( + size=100, + type="gp2", + volumes_per_instance=1 + ) + ], + instance_type="m4.xlarge", + weighted_capacity=1 + ), EmrClusterCoreInstanceFleetInstanceTypeConfigs( + bid_price_as_percentage_of_on_demand_price=100, + ebs_config=[EmrClusterCoreInstanceFleetInstanceTypeConfigsEbsConfig( + size=100, + type="gp2", + volumes_per_instance=1 + ) + ], + instance_type="m4.2xlarge", + weighted_capacity=2 + ) + ], + launch_specifications=EmrClusterCoreInstanceFleetLaunchSpecifications( + spot_specification=[EmrClusterCoreInstanceFleetLaunchSpecificationsSpotSpecification( + allocation_strategy="capacity-optimized", + block_duration_minutes=0, + timeout_action="SWITCH_TO_ON_DEMAND", + timeout_duration_minutes=10 + ) + ] + ), + name="core fleet", + target_on_demand_capacity=2, + target_spot_capacity=2 + ), + master_instance_fleet=EmrClusterMasterInstanceFleet( + instance_type_configs=[EmrClusterMasterInstanceFleetInstanceTypeConfigs( + instance_type="m4.xlarge" + ) + ], + target_on_demand_capacity=1 + ), + name=name, + release_label=release_label, + service_role=service_role + ) + EmrInstanceFleet(self, "task", + cluster_id=example.id, + instance_type_configs=[EmrInstanceFleetInstanceTypeConfigs( + bid_price_as_percentage_of_on_demand_price=100, + ebs_config=[EmrInstanceFleetInstanceTypeConfigsEbsConfig( + size=100, + type="gp2", + volumes_per_instance=1 + ) + ], + instance_type="m4.xlarge", + weighted_capacity=1 + ), EmrInstanceFleetInstanceTypeConfigs( + bid_price_as_percentage_of_on_demand_price=100, + ebs_config=[EmrInstanceFleetInstanceTypeConfigsEbsConfig( + size=100, + type="gp2", + volumes_per_instance=1 + ) + ], + instance_type="m4.2xlarge", + weighted_capacity=2 + ) + ], + launch_specifications=EmrInstanceFleetLaunchSpecifications( + spot_specification=[EmrInstanceFleetLaunchSpecificationsSpotSpecification( + allocation_strategy="capacity-optimized", + block_duration_minutes=0, + timeout_action="TERMINATE_CLUSTER", + timeout_duration_minutes=10 + ) + ] + ), + name="task fleet", + target_on_demand_capacity=1, + target_spot_capacity=1 + ) +``` + +### Enable Debug Logging + +[Debug logging in EMR](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-plan-debugging.html) is implemented as a step. It is highly recommended that you utilize the [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignore_changes` if other steps are being managed outside of Terraform. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_cluster import EmrCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, releaseLabel, serviceRole): + super().__init__(scope, name) + EmrCluster(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[step] + ), + step=[EmrClusterStep( + action_on_failure="TERMINATE_CLUSTER", + hadoop_jar_step=[EmrClusterStepHadoopJarStep( + args=["state-pusher-script"], + jar="command-runner.jar" + ) + ], + name="Setup Hadoop Debugging" + ) + ], + name=name, + release_label=release_label, + service_role=service_role + ) +``` + +### Multiple Node Master Instance Group + +Available in EMR version 5.23.0 and later, an EMR Cluster can be launched with three master nodes for high availability. Additional information about this functionality and its requirements can be found in the [EMR Management Guide](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-plan-ha.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_cluster import EmrCluster +from imports.aws.subnet import Subnet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, vpcId, instanceType, instanceProfile, instanceType1, name, serviceRole): + super().__init__(scope, name) + example = Subnet(self, "example", + map_public_ip_on_launch=True, + vpc_id=vpc_id + ) + aws_emr_cluster_example = EmrCluster(self, "example_1", + core_instance_group=EmrClusterCoreInstanceGroup( + instance_type=instance_type + ), + ec2_attributes=EmrClusterEc2Attributes( + subnet_id=example.id, + instance_profile=instance_profile + ), + master_instance_group=EmrClusterMasterInstanceGroup( + instance_count=3, + instance_type=instance_type1 + ), + release_label="emr-5.24.1", + termination_protection=True, + name=name, + service_role=service_role + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_emr_cluster_example.override_logical_id("example") +``` + +### Bootable Cluster + +**NOTE:** This configuration demonstrates a minimal configuration needed to boot an example EMR Cluster. It is not meant to display best practices. As with all examples, use at your own risk. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.emr_cluster import EmrCluster +from imports.aws.iam_instance_profile import IamInstanceProfile +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.internet_gateway import InternetGateway +from imports.aws.main_route_table_association import MainRouteTableAssociation +from imports.aws.route_table import RouteTable +from imports.aws.security_group import SecurityGroup +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = Vpc(self, "main", + cidr_block="168.31.0.0/16", + enable_dns_hostnames=True, + tags={ + "name": "emr_test" + } + ) + ec2_assume_role = DataAwsIamPolicyDocument(self, "ec2_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=Token.as_list("sts:AssumeRole"), + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["ec2.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + emr_assume_role = DataAwsIamPolicyDocument(self, "emr_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=Token.as_list("sts:AssumeRole"), + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["elasticmapreduce.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + iam_emr_profile_policy = DataAwsIamPolicyDocument(self, "iam_emr_profile_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["cloudwatch:*", "dynamodb:*", "ec2:Describe*", "elasticmapreduce:Describe*", "elasticmapreduce:ListBootstrapActions", "elasticmapreduce:ListClusters", "elasticmapreduce:ListInstanceGroups", "elasticmapreduce:ListInstances", "elasticmapreduce:ListSteps", "kinesis:CreateStream", "kinesis:DeleteStream", "kinesis:DescribeStream", "kinesis:GetRecords", "kinesis:GetShardIterator", "kinesis:MergeShards", "kinesis:PutRecord", "kinesis:SplitShard", "rds:Describe*", "s3:*", "sdb:*", "sns:*", "sqs:*" + ], + effect="Allow", + resources=["*"] + ) + ] + ) + iam_emr_service_policy = DataAwsIamPolicyDocument(self, "iam_emr_service_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:AuthorizeSecurityGroupEgress", "ec2:AuthorizeSecurityGroupIngress", "ec2:CancelSpotInstanceRequests", "ec2:CreateNetworkInterface", "ec2:CreateSecurityGroup", "ec2:CreateTags", "ec2:DeleteNetworkInterface", "ec2:DeleteSecurityGroup", "ec2:DeleteTags", "ec2:DescribeAvailabilityZones", "ec2:DescribeAccountAttributes", "ec2:DescribeDhcpOptions", "ec2:DescribeInstanceStatus", "ec2:DescribeInstances", "ec2:DescribeKeyPairs", "ec2:DescribeNetworkAcls", "ec2:DescribeNetworkInterfaces", "ec2:DescribePrefixLists", "ec2:DescribeRouteTables", "ec2:DescribeSecurityGroups", "ec2:DescribeSpotInstanceRequests", "ec2:DescribeSpotPriceHistory", "ec2:DescribeSubnets", "ec2:DescribeVpcAttribute", "ec2:DescribeVpcEndpoints", "ec2:DescribeVpcEndpointServices", "ec2:DescribeVpcs", "ec2:DetachNetworkInterface", "ec2:ModifyImageAttribute", "ec2:ModifyInstanceAttribute", "ec2:RequestSpotInstances", "ec2:RevokeSecurityGroupEgress", "ec2:RunInstances", "ec2:TerminateInstances", "ec2:DeleteVolume", "ec2:DescribeVolumeStatus", "ec2:DescribeVolumes", "ec2:DetachVolume", "iam:GetRole", "iam:GetRolePolicy", "iam:ListInstanceProfiles", "iam:ListRolePolicies", "iam:PassRole", "s3:CreateBucket", "s3:Get*", "s3:List*", "sdb:BatchPutAttributes", "sdb:Select", "sqs:CreateQueue", "sqs:Delete*", "sqs:GetQueue*", "sqs:PurgeQueue", "sqs:ReceiveMessage" + ], + effect="Allow", + resources=["*"] + ) + ] + ) + iam_emr_profile_role = IamRole(self, "iam_emr_profile_role", + assume_role_policy=Token.as_string(ec2_assume_role.json), + name="iam_emr_profile_role" + ) + iam_emr_service_role = IamRole(self, "iam_emr_service_role", + assume_role_policy=Token.as_string(emr_assume_role.json), + name="iam_emr_service_role" + ) + aws_iam_role_policy_iam_emr_profile_policy = IamRolePolicy(self, "iam_emr_profile_policy_7", + name="iam_emr_profile_policy", + policy=Token.as_string(iam_emr_profile_policy.json), + role=iam_emr_profile_role.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_iam_emr_profile_policy.override_logical_id("iam_emr_profile_policy") + aws_iam_role_policy_iam_emr_service_policy = IamRolePolicy(self, "iam_emr_service_policy_8", + name="iam_emr_service_policy", + policy=Token.as_string(iam_emr_service_policy.json), + role=iam_emr_service_role.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_iam_emr_service_policy.override_logical_id("iam_emr_service_policy") + gw = InternetGateway(self, "gw", + vpc_id=main.id + ) + r = RouteTable(self, "r", + route=[RouteTableRoute( + cidr_block="0.0.0.0/0", + gateway_id=gw.id + ) + ], + vpc_id=main.id + ) + aws_subnet_main = Subnet(self, "main_11", + cidr_block="168.31.0.0/20", + tags={ + "name": "emr_test" + }, + vpc_id=main.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_subnet_main.override_logical_id("main") + emr_profile = IamInstanceProfile(self, "emr_profile", + name="emr_profile", + role=iam_emr_profile_role.name + ) + MainRouteTableAssociation(self, "a", + route_table_id=r.id, + vpc_id=main.id + ) + allow_access = SecurityGroup(self, "allow_access", + depends_on=[aws_subnet_main], + description="Allow inbound traffic", + egress=[SecurityGroupEgress( + cidr_blocks=["0.0.0.0/0"], + from_port=0, + protocol="-1", + to_port=0 + ) + ], + ingress=[SecurityGroupIngress( + cidr_blocks=[main.cidr_block], + from_port=0, + protocol="-1", + to_port=0 + ) + ], + lifecycle=TerraformResourceLifecycle( + ignore_changes=[ingress, egress] + ), + name="allow_access", + tags={ + "name": "emr_test" + }, + vpc_id=main.id + ) + EmrCluster(self, "cluster", + applications=["Spark"], + bootstrap_action=[EmrClusterBootstrapAction( + args=["instance.isMaster=true", "echo running on master node"], + name="runif", + path="s3://elasticmapreduce/bootstrap-actions/run-if" + ) + ], + configurations_json=" [\n {\n \"Classification\": \"hadoop-env\",\n \"Configurations\": [\n {\n \"Classification\": \"export\",\n \"Properties\": {\n \"JAVA_HOME\": \"/usr/lib/jvm/java-1.8.0\"\n }\n }\n ],\n \"Properties\": {}\n },\n {\n \"Classification\": \"spark-env\",\n \"Configurations\": [\n {\n \"Classification\": \"export\",\n \"Properties\": {\n \"JAVA_HOME\": \"/usr/lib/jvm/java-1.8.0\"\n }\n }\n ],\n \"Properties\": {}\n }\n ]\n\n", + core_instance_group=EmrClusterCoreInstanceGroup( + instance_count=1, + instance_type="m5.xlarge" + ), + ec2_attributes=EmrClusterEc2Attributes( + emr_managed_master_security_group=allow_access.id, + emr_managed_slave_security_group=allow_access.id, + instance_profile=emr_profile.arn, + subnet_id=Token.as_string(aws_subnet_main.id) + ), + master_instance_group=EmrClusterMasterInstanceGroup( + instance_type="m5.xlarge" + ), + name="emr-test-arn", + release_label="emr-4.6.0", + service_role=iam_emr_service_role.arn, + tags={ + "dns_zone": "env_zone", + "env": "env", + "name": "name-env", + "role": "rolename" + } + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the job flow. +* `release_label` - (Required) Release label for the Amazon EMR release. +* `service_role` - (Required) IAM role that will be assumed by the Amazon EMR service to access AWS resources. + +The following arguments are optional: + +* `additional_info` - (Optional) JSON string for selecting additional features such as adding proxy information. Note: Currently there is no API to retrieve the value of this argument after EMR cluster creation from provider, therefore Terraform cannot detect drift from the actual EMR cluster if its value is changed outside Terraform. +* `applications` - (Optional) A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster. For a list of applications available for each Amazon EMR release version, see the [Amazon EMR Release Guide](https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-release-components.html). +* `autoscaling_role` - (Optional) IAM role for automatic scaling policies. The IAM role provides permissions that the automatic scaling feature requires to launch and terminate EC2 instances in an instance group. +* `auto_termination_policy` - (Optional) An auto-termination policy for an Amazon EMR cluster. An auto-termination policy defines the amount of idle time in seconds after which a cluster automatically terminates. See [Auto Termination Policy](#auto_termination_policy) Below. +* `bootstrap_action` - (Optional) Ordered list of bootstrap actions that will be run before Hadoop is started on the cluster nodes. See below. +* `configurations` - (Optional) List of configurations supplied for the EMR cluster you are creating. Supply a configuration object for applications to override their default configuration. See [AWS Documentation](https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-configure-apps.html) for more information. +* `configurations_json` - (Optional) JSON string for supplying list of configurations for the EMR cluster. + +~> **NOTE on `configurations_json`:** If the `Configurations` value is empty then you should skip the `Configurations` field instead of providing an empty list as a value, `"Configurations": []`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_cluster import EmrCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, releaseLabel, serviceRole): + super().__init__(scope, name) + EmrCluster(self, "cluster", + configurations_json=" [\n {\n \"Classification\": \"hadoop-env\",\n \"Configurations\": [\n {\n \"Classification\": \"export\",\n \"Properties\": {\n \"JAVA_HOME\": \"/usr/lib/jvm/java-1.8.0\"\n }\n }\n ],\n \"Properties\": {}\n }\n ]\n\n", + name=name, + release_label=release_label, + service_role=service_role + ) +``` + +* `core_instance_fleet` - (Optional) Configuration block to use an [Instance Fleet](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-fleet.html) for the core node type. Cannot be specified if any `core_instance_group` configuration blocks are set. Detailed below. +* `core_instance_group` - (Optional) Configuration block to use an [Instance Group](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-group-configuration.html#emr-plan-instance-groups) for the [core node type](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-master-core-task-nodes.html#emr-plan-core). +* `custom_ami_id` - (Optional) Custom Amazon Linux AMI for the cluster (instead of an EMR-owned AMI). Available in Amazon EMR version 5.7.0 and later. +* `ebs_root_volume_size` - (Optional) Size in GiB of the EBS root device volume of the Linux AMI that is used for each EC2 instance. Available in Amazon EMR version 4.x and later. +* `ec2_attributes` - (Optional) Attributes for the EC2 instances running the job flow. See below. +* `keep_job_flow_alive_when_no_steps` - (Optional) Switch on/off run cluster with no steps or when all steps are complete (default is on) +* `kerberos_attributes` - (Optional) Kerberos configuration for the cluster. See below. +* `list_steps_states` - (Optional) List of [step states](https://docs.aws.amazon.com/emr/latest/APIReference/API_StepStatus.html) used to filter returned steps +* `log_encryption_kms_key_id` - (Optional) AWS KMS customer master key (CMK) key ID or arn used for encrypting log files. This attribute is only available with EMR version 5.30.0 and later, excluding EMR 6.0.0. +* `log_uri` - (Optional) S3 bucket to write the log files of the job flow. If a value is not provided, logs are not created. +* `master_instance_fleet` - (Optional) Configuration block to use an [Instance Fleet](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-fleet.html) for the master node type. Cannot be specified if any `master_instance_group` configuration blocks are set. Detailed below. +* `master_instance_group` - (Optional) Configuration block to use an [Instance Group](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-group-configuration.html#emr-plan-instance-groups) for the [master node type](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-master-core-task-nodes.html#emr-plan-master). +* `placement_group_config` - (Optional) The specified placement group configuration for an Amazon EMR cluster. +* `scale_down_behavior` - (Optional) Way that individual Amazon EC2 instances terminate when an automatic scale-in activity occurs or an `instance group` is resized. +* `security_configuration` - (Optional) Security configuration name to attach to the EMR cluster. Only valid for EMR clusters with `release_label` 4.8.0 or greater. +* `step` - (Optional) List of steps to run when creating the cluster. See below. It is highly recommended to utilize the [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignore_changes` if other steps are being managed outside of Terraform. This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). +* `step_concurrency_level` - (Optional) Number of steps that can be executed concurrently. You can specify a maximum of 256 steps. Only valid for EMR clusters with `release_label` 5.28.0 or greater (default is 1). +* `tags` - (Optional) list of tags to apply to the EMR Cluster. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `termination_protection` - (Optional) Switch on/off termination protection (default is `false`, except when using multiple master nodes). Before attempting to destroy the resource when termination protection is enabled, this configuration must be applied with its value set to `false`. +* `visible_to_all_users` - (Optional) Whether the job flow is visible to all IAM users of the AWS account associated with the job flow. Default value is `true`. + +### bootstrap_action + +* `args` - (Optional) List of command line arguments to pass to the bootstrap action script. +* `name` - (Required) Name of the bootstrap action. +* `path` - (Required) Location of the script to run during a bootstrap action. Can be either a location in Amazon S3 or on a local file system. + +### auto_termination_policy + +* `idle_timeout` - (Optional) Specifies the amount of idle time in seconds after which the cluster automatically terminates. You can specify a minimum of `60` seconds and a maximum of `604800` seconds (seven days). + +### configurations + +A configuration classification that applies when provisioning cluster instances, which can include configurations for applications and software that run on the cluster. See [Configuring Applications](https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-configure-apps.html). + +* `classification` - (Optional) Classification within a configuration. +* `properties` - (Optional) Map of properties specified within a configuration classification. + +### core_instance_fleet + +* `instance_type_configs` - (Optional) Configuration block for instance fleet. +* `launch_specifications` - (Optional) Configuration block for launch specification. +* `name` - (Optional) Friendly name given to the instance fleet. +* `target_on_demand_capacity` - (Optional) The target capacity of On-Demand units for the instance fleet, which determines how many On-Demand instances to provision. +* `target_spot_capacity` - (Optional) Target capacity of Spot units for the instance fleet, which determines how many Spot instances to provision. + +#### instance_type_configs + +* `bid_price` - (Optional) Bid price for each EC2 Spot instance type as defined by `instance_type`. Expressed in USD. If neither `bid_price` nor `bid_price_as_percentage_of_on_demand_price` is provided, `bid_price_as_percentage_of_on_demand_price` defaults to 100%. +* `bid_price_as_percentage_of_on_demand_price` - (Optional) Bid price, as a percentage of On-Demand price, for each EC2 Spot instance as defined by `instance_type`. Expressed as a number (for example, 20 specifies 20%). If neither `bid_price` nor `bid_price_as_percentage_of_on_demand_price` is provided, `bid_price_as_percentage_of_on_demand_price` defaults to 100%. +* `configurations` - (Optional) Configuration classification that applies when provisioning cluster instances, which can include configurations for applications and software that run on the cluster. List of `configuration` blocks. +* `ebs_config` - (Optional) Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. +* `instance_type` - (Required) EC2 instance type, such as m4.xlarge. +* `weighted_capacity` - (Optional) Number of units that a provisioned instance of this type provides toward fulfilling the target capacities defined in `aws_emr_instance_fleet`. + +#### launch_specifications + +* `on_demand_specification` - (Optional) Configuration block for on demand instances launch specifications. +* `spot_specification` - (Optional) Configuration block for spot instances launch specifications. + +##### on_demand_specification + +The launch specification for On-Demand instances in the instance fleet, which determines the allocation strategy. +The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x versions. On-Demand instances allocation strategy is available in Amazon EMR version 5.12.1 and later. + +* `allocation_strategy` - (Required) Specifies the strategy to use in launching On-Demand instance fleets. Currently, the only option is `lowest-price` (the default), which launches the lowest price first. + +##### spot_specification + +The launch specification for Spot instances in the fleet, which determines the defined duration, provisioning timeout behavior, and allocation strategy. + +* `allocation_strategy` - (Required) Specifies the strategy to use in launching Spot instance fleets. Valid values include `capacity-optimized`, `diversified`, `lowest-price`, `price-capacity-optimized`. See the [AWS documentation](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-fleet.html#emr-instance-fleet-allocation-strategy) for details on each strategy type. +* `block_duration_minutes` - (Optional) Defined duration for Spot instances (also known as Spot blocks) in minutes. When specified, the Spot instance does not terminate before the defined duration expires, and defined duration pricing for Spot instances applies. Valid values are 60, 120, 180, 240, 300, or 360. The duration period starts as soon as a Spot instance receives its instance ID. At the end of the duration, Amazon EC2 marks the Spot instance for termination and provides a Spot instance termination notice, which gives the instance a two-minute warning before it terminates. +* `timeout_action` - (Required) Action to take when TargetSpotCapacity has not been fulfilled when the TimeoutDurationMinutes has expired; that is, when all Spot instances could not be provisioned within the Spot provisioning timeout. Valid values are `TERMINATE_CLUSTER` and `SWITCH_TO_ON_DEMAND`. SWITCH_TO_ON_DEMAND specifies that if no Spot instances are available, On-Demand Instances should be provisioned to fulfill any remaining Spot capacity. +* `timeout_duration_minutes` - (Required) Spot provisioning timeout period in minutes. If Spot instances are not provisioned within this time period, the TimeOutAction is taken. Minimum value is 5 and maximum value is 1440. The timeout applies only during initial provisioning, when the cluster is first created. + +### core_instance_group + +* `autoscaling_policy` - (Optional) String containing the [EMR Auto Scaling Policy](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-automatic-scaling.html) JSON. +* `bid_price` - (Optional) Bid price for each EC2 instance in the instance group, expressed in USD. By setting this attribute, the instance group is being declared as a Spot Instance, and will implicitly create a Spot request. Leave this blank to use On-Demand Instances. +* `ebs_config` - (Optional) Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. +* `instance_count` - (Optional) Target number of instances for the instance group. Must be at least 1. Defaults to 1. +* `instance_type` - (Required) EC2 instance type for all instances in the instance group. +* `name` - (Optional) Friendly name given to the instance group. + +#### ebs_config + +* `iops` - (Optional) Number of I/O operations per second (IOPS) that the volume supports. +* `size` - (Required) Volume size, in gibibytes (GiB). +* `type` - (Required) Volume type. Valid options are `gp3`, `gp2`, `io1`, `standard`, `st1` and `sc1`. See [EBS Volume Types](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSVolumeTypes.html). +* `throughput` - (Optional) The throughput, in mebibyte per second (MiB/s). +* `volumes_per_instance` - (Optional) Number of EBS volumes with this configuration to attach to each EC2 instance in the instance group (default is 1). + +### ec2_attributes + +Attributes for the Amazon EC2 instances running the job flow: + +* `additional_master_security_groups` - (Optional) String containing a comma separated list of additional Amazon EC2 security group IDs for the master node. +* `additional_slave_security_groups` - (Optional) String containing a comma separated list of additional Amazon EC2 security group IDs for the slave nodes as a comma separated string. +* `emr_managed_master_security_group` - (Optional) Identifier of the Amazon EC2 EMR-Managed security group for the master node. +* `emr_managed_slave_security_group` - (Optional) Identifier of the Amazon EC2 EMR-Managed security group for the slave nodes. +* `instance_profile` - (Required) Instance Profile for EC2 instances of the cluster assume this role. +* `key_name` - (Optional) Amazon EC2 key pair that can be used to ssh to the master node as the user called `hadoop`. +* `service_access_security_group` - (Optional) Identifier of the Amazon EC2 service-access security group - required when the cluster runs on a private subnet. +* `subnet_id` - (Optional) VPC subnet id where you want the job flow to launch. Cannot specify the `cc1.4xlarge` instance type for nodes of a job flow launched in an Amazon VPC. +* `subnet_ids` - (Optional) List of VPC subnet id-s where you want the job flow to launch. Amazon EMR identifies the best Availability Zone to launch instances according to your fleet specifications. + +~> **NOTE on EMR-Managed security groups:** These security groups will have any missing inbound or outbound access rules added and maintained by AWS, to ensure proper communication between instances in a cluster. The EMR service will maintain these rules for groups provided in `emr_managed_master_security_group` and `emr_managed_slave_security_group`; attempts to remove the required rules may succeed, only for the EMR service to re-add them in a matter of minutes. This may cause Terraform to fail to destroy an environment that contains an EMR cluster, because the EMR service does not revoke rules added on deletion, leaving a cyclic dependency between the security groups that prevents their deletion. To avoid this, use the `revoke_rules_on_delete` optional attribute for any Security Group used in `emr_managed_master_security_group` and `emr_managed_slave_security_group`. See [Amazon EMR-Managed Security Groups](http://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-man-sec-groups.html) for more information about the EMR-managed security group rules. + +### kerberos_attributes + +* `ad_domain_join_password` - (Optional) Active Directory password for `ad_domain_join_user`. Terraform cannot perform drift detection of this configuration. +* `ad_domain_join_user` - (Optional) Required only when establishing a cross-realm trust with an Active Directory domain. A user with sufficient privileges to join resources to the domain. Terraform cannot perform drift detection of this configuration. +* `cross_realm_trust_principal_password` - (Optional) Required only when establishing a cross-realm trust with a KDC in a different realm. The cross-realm principal password, which must be identical across realms. Terraform cannot perform drift detection of this configuration. +* `kdc_admin_password` - (Required) Password used within the cluster for the kadmin service on the cluster-dedicated KDC, which maintains Kerberos principals, password policies, and keytabs for the cluster. Terraform cannot perform drift detection of this configuration. +* `realm` - (Required) Name of the Kerberos realm to which all nodes in a cluster belong. For example, `EC2.INTERNAL` + +### master_instance_fleet + +* `instance_type_configs` - (Optional) Configuration block for instance fleet. +* `launch_specifications` - (Optional) Configuration block for launch specification. +* `name` - (Optional) Friendly name given to the instance fleet. +* `target_on_demand_capacity` - (Optional) Target capacity of On-Demand units for the instance fleet, which determines how many On-Demand instances to provision. +* `target_spot_capacity` - (Optional) Target capacity of Spot units for the instance fleet, which determines how many Spot instances to provision. + +#### instance_type_configs + +See `instance_type_configs` above, under `core_instance_fleet`. + +#### launch_specifications + +See `launch_specifications` above, under `core_instance_fleet`. + +### master_instance_group + +Supported nested arguments for the `master_instance_group` configuration block: + +* `bid_price` - (Optional) Bid price for each EC2 instance in the instance group, expressed in USD. By setting this attribute, the instance group is being declared as a Spot Instance, and will implicitly create a Spot request. Leave this blank to use On-Demand Instances. +* `ebs_config` - (Optional) Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. +* `instance_count` - (Optional) Target number of instances for the instance group. Must be 1 or 3. Defaults to 1. Launching with multiple master nodes is only supported in EMR version 5.23.0+, and requires this resource's `core_instance_group` to be configured. Public (Internet accessible) instances must be created in VPC subnets that have [map public IP on launch](/docs/providers/aws/r/subnet.html#map_public_ip_on_launch) enabled. Termination protection is automatically enabled when launched with multiple master nodes and Terraform must have the `termination_protection = false` configuration applied before destroying this resource. +* `instance_type` - (Required) EC2 instance type for all instances in the instance group. +* `name` - (Optional) Friendly name given to the instance group. + +#### ebs_config + +See `ebs_config` under `core_instance_group` above. + +### step + +This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +* `action_on_failure` - (Required) Action to take if the step fails. Valid values: `TERMINATE_JOB_FLOW`, `TERMINATE_CLUSTER`, `CANCEL_AND_WAIT`, and `CONTINUE` +* `hadoop_jar_step` - (Required) JAR file used for the step. See below. +* `name` - (Required) Name of the step. + +#### hadoop_jar_step + +This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +* `args` - (Optional) List of command line arguments passed to the JAR file's main function when executed. +* `jar` - (Required) Path to a JAR file run during the step. +* `main_class` - (Optional) Name of the main class in the specified Java file. If not specified, the JAR file should specify a Main-Class in its manifest file. +* `properties` - (Optional) Key-Value map of Java properties that are set when the step runs. You can use these properties to pass key value pairs to your main function. + +### placement_group_config + +* `instance_role` - (Required) Role of the instance in the cluster. Valid Values: `MASTER`, `CORE`, `TASK`. +* `placement_strategy` - (Optional) EC2 Placement Group strategy associated with instance role. Valid Values: `SPREAD`, `PARTITION`, `CLUSTER`, `NONE`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `applications` - Applications installed on this cluster. +* `arn`- ARN of the cluster. +* `bootstrap_action` - List of bootstrap actions that will be run before Hadoop is started on the cluster nodes. +* `configurations` - List of Configurations supplied to the EMR cluster. +* `core_instance_group.0.id` - Core node type Instance Group ID, if using Instance Group for this node type. +* `ec2_attributes` - Provides information about the EC2 instances in a cluster grouped by category: key name, subnet ID, IAM instance profile, and so on. +* `id` - ID of the cluster. +* `log_uri` - Path to the Amazon S3 location where logs for this cluster are stored. +* `master_instance_group.0.id` - Master node type Instance Group ID, if using Instance Group for this node type. +* `master_public_dns` - The DNS name of the master node. If the cluster is on a private subnet, this is the private DNS name. On a public subnet, this is the public DNS name. +* `name` - Name of the cluster. +* `release_label` - Release label for the Amazon EMR release. +* `service_role` - IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `visible_to_all_users` - Indicates whether the job flow is visible to all IAM users of the AWS account associated with the job flow. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR clusters using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EMR clusters using the `id`. For example: + +```console +% terraform import aws_emr_cluster.cluster j-123456ABCDEF +``` + +Since the API does not return the actual values for Kerberos configurations, environments with those Terraform configurations will need to use the [`lifecycle` configuration block `ignore_changes` argument](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) available to all Terraform resources to prevent perpetual differences. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_cluster import EmrCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, releaseLabel, serviceRole): + super().__init__(scope, name) + EmrCluster(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[kerberos_attributes] + ), + name=name, + release_label=release_label, + service_role=service_role + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/emr_instance_fleet.html.markdown b/website/docs/cdktf/python/r/emr_instance_fleet.html.markdown new file mode 100644 index 00000000000..f4b12ae2d10 --- /dev/null +++ b/website/docs/cdktf/python/r/emr_instance_fleet.html.markdown @@ -0,0 +1,163 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_instance_fleet" +description: |- + Provides an Elastic MapReduce Cluster Instance Fleet +--- + + + +# Resource: aws_emr_instance_fleet + +Provides an Elastic MapReduce Cluster Instance Fleet configuration. +See [Amazon Elastic MapReduce Documentation](https://aws.amazon.com/documentation/emr/) for more information. + +~> **NOTE:** At this time, Instance Fleets cannot be destroyed through the API nor +web interface. Instance Fleets are destroyed when the EMR Cluster is destroyed. +Terraform will resize any Instance Fleet to zero when destroying the resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_instance_fleet import EmrInstanceFleet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrInstanceFleet(self, "task", + cluster_id=cluster.id, + instance_type_configs=[EmrInstanceFleetInstanceTypeConfigs( + bid_price_as_percentage_of_on_demand_price=100, + ebs_config=[EmrInstanceFleetInstanceTypeConfigsEbsConfig( + size=100, + type="gp2", + volumes_per_instance=1 + ) + ], + instance_type="m4.xlarge", + weighted_capacity=1 + ), EmrInstanceFleetInstanceTypeConfigs( + bid_price_as_percentage_of_on_demand_price=100, + ebs_config=[EmrInstanceFleetInstanceTypeConfigsEbsConfig( + size=100, + type="gp2", + volumes_per_instance=1 + ) + ], + instance_type="m4.2xlarge", + weighted_capacity=2 + ) + ], + launch_specifications=EmrInstanceFleetLaunchSpecifications( + spot_specification=[EmrInstanceFleetLaunchSpecificationsSpotSpecification( + allocation_strategy="capacity-optimized", + block_duration_minutes=0, + timeout_action="TERMINATE_CLUSTER", + timeout_duration_minutes=10 + ) + ] + ), + name="task fleet", + target_on_demand_capacity=1, + target_spot_capacity=1 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cluster_id` - (Required) ID of the EMR Cluster to attach to. Changing this forces a new resource to be created. +* `instance_type_configs` - (Optional) Configuration block for instance fleet +* `launch_specifications` - (Optional) Configuration block for launch specification +* `target_on_demand_capacity` - (Optional) The target capacity of On-Demand units for the instance fleet, which determines how many On-Demand instances to provision. +* `target_spot_capacity` - (Optional) The target capacity of Spot units for the instance fleet, which determines how many Spot instances to provision. +* `name` - (Optional) Friendly name given to the instance fleet. + +## instance_type_configs Configuration Block + +* `bid_price` - (Optional) The bid price for each EC2 Spot instance type as defined by `instance_type`. Expressed in USD. If neither `bid_price` nor `bid_price_as_percentage_of_on_demand_price` is provided, `bid_price_as_percentage_of_on_demand_price` defaults to 100%. +* `bid_price_as_percentage_of_on_demand_price` - (Optional) The bid price, as a percentage of On-Demand price, for each EC2 Spot instance as defined by `instance_type`. Expressed as a number (for example, 20 specifies 20%). If neither `bid_price` nor `bid_price_as_percentage_of_on_demand_price` is provided, `bid_price_as_percentage_of_on_demand_price` defaults to 100%. +* `configurations` - (Optional) A configuration classification that applies when provisioning cluster instances, which can include configurations for applications and software that run on the cluster. List of `configuration` blocks. +* `ebs_config` - (Optional) Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. +* `instance_type` - (Required) An EC2 instance type, such as m4.xlarge. +* `weighted_capacity` - (Optional) The number of units that a provisioned instance of this type provides toward fulfilling the target capacities defined in `aws_emr_instance_fleet`. + +## configurations Configuration Block + +A configuration classification that applies when provisioning cluster instances, which can include configurations for applications and software that run on the cluster. See [Configuring Applications](https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-configure-apps.html). + +* `classification` - (Optional) The classification within a configuration. +* `properties` - (Optional) A map of properties specified within a configuration classification + +## ebs_config + +Attributes for the EBS volumes attached to each EC2 instance in the `master_instance_group` and `core_instance_group` configuration blocks: + +* `size` - (Required) The volume size, in gibibytes (GiB). +* `type` - (Required) The volume type. Valid options are `gp2`, `io1`, `standard` and `st1`. See [EBS Volume Types](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSVolumeTypes.html). +* `iops` - (Optional) The number of I/O operations per second (IOPS) that the volume supports +* `volumes_per_instance` - (Optional) The number of EBS volumes with this configuration to attach to each EC2 instance in the instance group (default is 1) + +## launch_specifications Configuration Block + +* `on_demand_specification` - (Optional) Configuration block for on demand instances launch specifications +* `spot_specification` - (Optional) Configuration block for spot instances launch specifications + +## on_demand_specification Configuration Block + +The launch specification for On-Demand instances in the instance fleet, which determines the allocation strategy. +The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x versions. On-Demand instances allocation strategy is available in Amazon EMR version 5.12.1 and later. + +* `allocation_strategy` - (Required) Specifies the strategy to use in launching On-Demand instance fleets. Currently, the only option is `lowest-price` (the default), which launches the lowest price first. + +## spot_specification Configuration Block + +The launch specification for Spot instances in the fleet, which determines the defined duration, provisioning timeout behavior, and allocation strategy. + +* `allocation_strategy` - (Required) Specifies the strategy to use in launching Spot instance fleets. Currently, the only option is `capacity-optimized` (the default), which launches instances from Spot instance pools with optimal capacity for the number of instances that are launching. +* `block_duration_minutes` - (Optional) The defined duration for Spot instances (also known as Spot blocks) in minutes. When specified, the Spot instance does not terminate before the defined duration expires, and defined duration pricing for Spot instances applies. Valid values are 60, 120, 180, 240, 300, or 360. The duration period starts as soon as a Spot instance receives its instance ID. At the end of the duration, Amazon EC2 marks the Spot instance for termination and provides a Spot instance termination notice, which gives the instance a two-minute warning before it terminates. +* `timeout_action` - (Required) The action to take when TargetSpotCapacity has not been fulfilled when the TimeoutDurationMinutes has expired; that is, when all Spot instances could not be provisioned within the Spot provisioning timeout. Valid values are `TERMINATE_CLUSTER` and `SWITCH_TO_ON_DEMAND`. SWITCH_TO_ON_DEMAND specifies that if no Spot instances are available, On-Demand Instances should be provisioned to fulfill any remaining Spot capacity. +* `timeout_duration_minutes` - (Required) The spot provisioning timeout period in minutes. If Spot instances are not provisioned within this time period, the TimeOutAction is taken. Minimum value is 5 and maximum value is 1440. The timeout applies only during initial provisioning, when the cluster is first created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier of the instance fleet. + +* `provisioned_on_demand_capacity` The number of On-Demand units that have been provisioned for the instance +fleet to fulfill TargetOnDemandCapacity. This provisioned capacity might be less than or greater than TargetOnDemandCapacity. + +* `provisioned_spot_capacity` The number of Spot units that have been provisioned for this instance fleet +to fulfill TargetSpotCapacity. This provisioned capacity might be less than or greater than TargetSpotCapacity. + +* `status` The current status of the instance fleet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR Instance Fleet using the EMR Cluster identifier and Instance Fleet identifier separated by a forward slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EMR Instance Fleet using the EMR Cluster identifier and Instance Fleet identifier separated by a forward slash (`/`). For example: + +```console +% terraform import aws_emr_instance_fleet.example j-123456ABCDEF/if-15EK4O09RZLNR +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/emr_instance_group.html.markdown b/website/docs/cdktf/python/r/emr_instance_group.html.markdown new file mode 100644 index 00000000000..4651da0bd2e --- /dev/null +++ b/website/docs/cdktf/python/r/emr_instance_group.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_instance_group" +description: |- + Provides an Elastic MapReduce Cluster Instance Group +--- + + + +# Resource: aws_emr_instance_group + +Provides an Elastic MapReduce Cluster Instance Group configuration. +See [Amazon Elastic MapReduce Documentation](https://aws.amazon.com/documentation/emr/) for more information. + +~> **NOTE:** At this time, Instance Groups cannot be destroyed through the API nor +web interface. Instance Groups are destroyed when the EMR Cluster is destroyed. +Terraform will resize any Instance Group to zero when destroying the resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_instance_group import EmrInstanceGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrInstanceGroup(self, "task", + cluster_id=tf_test_cluster.id, + instance_count=1, + instance_type="m5.xlarge", + name="my little instance group" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` (Required) Human friendly name given to the instance group. Changing this forces a new resource to be created. +* `cluster_id` (Required) ID of the EMR Cluster to attach to. Changing this forces a new resource to be created. +* `instance_type` (Required) The EC2 instance type for all instances in the instance group. Changing this forces a new resource to be created. +* `instance_count` (Optional) target number of instances for the instance group. defaults to 0. +* `bid_price` - (Optional) If set, the bid price for each EC2 instance in the instance group, expressed in USD. By setting this attribute, the instance group is being declared as a Spot Instance, and will implicitly create a Spot request. Leave this blank to use On-Demand Instances. +* `ebs_optimized` (Optional) Indicates whether an Amazon EBS volume is EBS-optimized. Changing this forces a new resource to be created. +* `ebs_config` (Optional) One or more `ebs_config` blocks as defined below. Changing this forces a new resource to be created. +* `autoscaling_policy` - (Optional) The autoscaling policy document. This is a JSON formatted string. See [EMR Auto Scaling](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-automatic-scaling.html) +* `configurations_json` - (Optional) A JSON string for supplying list of configurations specific to the EMR instance group. Note that this can only be changed when using EMR release 5.21 or later. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_instance_group import EmrInstanceGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, clusterId, instanceType): + super().__init__(scope, name) + EmrInstanceGroup(self, "task", + configurations_json=" [\n {\n \"Classification\": \"hadoop-env\",\n \"Configurations\": [\n {\n \"Classification\": \"export\",\n \"Properties\": {\n \"JAVA_HOME\": \"/usr/lib/jvm/java-1.8.0\"\n }\n }\n ],\n \"Properties\": {}\n }\n ]\n\n", + cluster_id=cluster_id, + instance_type=instance_type + ) +``` + +`ebs_config` supports the following: + +* `iops` - (Optional) The number of I/O operations per second (IOPS) that the volume supports. +* `size` - (Optional) The volume size, in gibibytes (GiB). This can be a number from 1 - 1024. If the volume type is EBS-optimized, the minimum value is 10. +* `type` - (Optional) The volume type. Valid options are 'gp2', 'io1' and 'standard'. +* `volumes_per_instance` - (Optional) The number of EBS Volumes to attach per instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The EMR Instance ID +* `running_instance_count` The number of instances currently running in this instance group. +* `status` The current status of the instance group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR task instance group using their EMR Cluster id and Instance Group id separated by a forward-slash `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EMR task instance group using their EMR Cluster id and Instance Group id separated by a forward-slash `/`. For example: + +```console +% terraform import aws_emr_instance_group.task_group j-123456ABCDEF/ig-15EK4O09RZLNR +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/emr_managed_scaling_policy.html.markdown b/website/docs/cdktf/python/r/emr_managed_scaling_policy.html.markdown new file mode 100644 index 00000000000..35e20879bcf --- /dev/null +++ b/website/docs/cdktf/python/r/emr_managed_scaling_policy.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_managed_scaling_policy" +description: |- + Provides a resource for EMR Managed Scaling policy +--- + + + +# Resource: aws_emr_managed_scaling_policy + +Provides a Managed Scaling policy for EMR Cluster. With Amazon EMR versions 5.30.0 and later (except for Amazon EMR 6.0.0), you can enable EMR managed scaling to automatically increase or decrease the number of instances or units in your cluster based on workload. See [Using EMR Managed Scaling in Amazon EMR](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-managed-scaling.html) for more information. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_cluster import EmrCluster +from imports.aws.emr_managed_scaling_policy import EmrManagedScalingPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, serviceRole): + super().__init__(scope, name) + sample = EmrCluster(self, "sample", + core_instance_group=EmrClusterCoreInstanceGroup( + instance_type="c4.large" + ), + master_instance_group=EmrClusterMasterInstanceGroup( + instance_type="m4.large" + ), + name="emr-sample-cluster", + release_label="emr-5.30.0", + service_role=service_role + ) + EmrManagedScalingPolicy(self, "samplepolicy", + cluster_id=sample.id, + compute_limits=[EmrManagedScalingPolicyComputeLimits( + maximum_capacity_units=10, + maximum_core_capacity_units=10, + maximum_ondemand_capacity_units=2, + minimum_capacity_units=2, + unit_type="Instances" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cluster_id` - (Required) ID of the EMR cluster +* `compute_limits` - (Required) Configuration block with compute limit settings. Described below. + +### compute_limits + +* `unit_type` - (Required) The unit type used for specifying a managed scaling policy. Valid Values: `InstanceFleetUnits` | `Instances` | `VCPU` +* `minimum_capacity_units` - (Required) The lower boundary of EC2 units. It is measured through VCPU cores or instances for instance groups and measured through units for instance fleets. Managed scaling activities are not allowed beyond this boundary. The limit only applies to the core and task nodes. The master node cannot be scaled after initial configuration. +* `maximum_capacity_units` - (Required) The upper boundary of EC2 units. It is measured through VCPU cores or instances for instance groups and measured through units for instance fleets. Managed scaling activities are not allowed beyond this boundary. The limit only applies to the core and task nodes. The master node cannot be scaled after initial configuration. +* `maximum_ondemand_capacity_units` - (Optional) The upper boundary of On-Demand EC2 units. It is measured through VCPU cores or instances for instance groups and measured through units for instance fleets. The On-Demand units are not allowed to scale beyond this boundary. The parameter is used to split capacity allocation between On-Demand and Spot instances. +* `maximum_core_capacity_units` - (Optional) The upper boundary of EC2 units for core node type in a cluster. It is measured through VCPU cores or instances for instance groups and measured through units for instance fleets. The core units are not allowed to scale beyond this boundary. The parameter is used to split capacity allocation between core and task nodes. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR Managed Scaling Policies using the EMR Cluster identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EMR Managed Scaling Policies using the EMR Cluster identifier. For example: + +```console +% terraform import aws_emr_managed_scaling_policy.example j-123456ABCDEF +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/emr_security_configuration.html.markdown b/website/docs/cdktf/python/r/emr_security_configuration.html.markdown new file mode 100644 index 00000000000..e4bd05b79de --- /dev/null +++ b/website/docs/cdktf/python/r/emr_security_configuration.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_security_configuration" +description: |- + Provides a resource to manage AWS EMR Security Configurations +--- + + + +# Resource: aws_emr_security_configuration + +Provides a resource to manage AWS EMR Security Configurations + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_security_configuration import EmrSecurityConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrSecurityConfiguration(self, "foo", + configuration="{\n \"EncryptionConfiguration\": {\n \"AtRestEncryptionConfiguration\": {\n \"S3EncryptionConfiguration\": {\n \"EncryptionMode\": \"SSE-S3\"\n },\n \"LocalDiskEncryptionConfiguration\": {\n \"EncryptionKeyProviderType\": \"AwsKms\",\n \"AwsKmsKey\": \"arn:aws:kms:us-west-2:187416307283:alias/tf_emr_test_key\"\n }\n },\n \"EnableInTransitEncryption\": false,\n \"EnableAtRestEncryption\": true\n }\n}\n\n", + name="emrsc_other" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the EMR Security Configuration. By default generated by Terraform. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +* `configuration` - (Required) A JSON formatted Security Configuration + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the EMR Security Configuration (Same as the `name`) +* `name` - The Name of the EMR Security Configuration +* `configuration` - The JSON formatted Security Configuration +* `creation_date` - Date the Security Configuration was created + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR Security Configurations using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EMR Security Configurations using the `name`. For example: + +```console +% terraform import aws_emr_security_configuration.sc example-sc-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/emr_studio.html.markdown b/website/docs/cdktf/python/r/emr_studio.html.markdown new file mode 100644 index 00000000000..f6b00784833 --- /dev/null +++ b/website/docs/cdktf/python/r/emr_studio.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_studio" +description: |- + Provides an Elastic MapReduce Studio +--- + + + +# Resource: aws_emr_studio + +Provides an Elastic MapReduce Studio. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_studio import EmrStudio +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrStudio(self, "example", + auth_mode="SSO", + default_s3_location="s3://${" + test.bucket + "}/test", + engine_security_group_id=Token.as_string(aws_security_group_test.id), + name="example", + service_role=Token.as_string(aws_iam_role_test.arn), + subnet_ids=[Token.as_string(aws_subnet_test.id)], + user_role=Token.as_string(aws_iam_role_test.arn), + vpc_id=Token.as_string(aws_vpc_test.id), + workspace_security_group_id=Token.as_string(aws_security_group_test.id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `auth_mode`- (Required) Specifies whether the Studio authenticates users using IAM or Amazon Web Services SSO. Valid values are `SSO` or `IAM`. +* `default_s3_location` - (Required) The Amazon S3 location to back up Amazon EMR Studio Workspaces and notebook files. +* `name` - (Required) A descriptive name for the Amazon EMR Studio. +* `engine_security_group_id` - (Required) The ID of the Amazon EMR Studio Engine security group. The Engine security group allows inbound network traffic from the Workspace security group, and it must be in the same VPC specified by `vpc_id`. +* `service_role` - (Required) The IAM role that the Amazon EMR Studio assumes. The service role provides a way for Amazon EMR Studio to interoperate with other Amazon Web Services services. +* `subnet_ids` - (Required) A list of subnet IDs to associate with the Amazon EMR Studio. A Studio can have a maximum of 5 subnets. The subnets must belong to the VPC specified by `vpc_id`. Studio users can create a Workspace in any of the specified subnets. +* `vpc_id` - (Required) The ID of the Amazon Virtual Private Cloud (Amazon VPC) to associate with the Studio. +* `workspace_security_group_id` - (Required) The ID of the Amazon EMR Studio Workspace security group. The Workspace security group allows outbound network traffic to resources in the Engine security group, and it must be in the same VPC specified by `vpc_id`. + +The following arguments are optional: + +* `description` - (Optional) A detailed description of the Amazon EMR Studio. +* `idp_auth_url` - (Optional) The authentication endpoint of your identity provider (IdP). Specify this value when you use IAM authentication and want to let federated users log in to a Studio with the Studio URL and credentials from your IdP. Amazon EMR Studio redirects users to this endpoint to enter credentials. +* `idp_relay_state_parameter_name` - (Optional) The name that your identity provider (IdP) uses for its RelayState parameter. For example, RelayState or TargetSource. Specify this value when you use IAM authentication and want to let federated users log in to a Studio using the Studio URL. The RelayState parameter differs by IdP. +* `tags` - (Optional) list of tags to apply to the EMR Cluster. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `user_role` - (Optional) - The IAM user role that users and groups assume when logged in to an Amazon EMR Studio. Only specify a User Role when you use Amazon Web Services SSO authentication. The permissions attached to the User Role can be scoped down for each user or group using session policies. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn`- ARN of the studio. +* `url` - The unique access URL of the Amazon EMR Studio. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR studios using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EMR studios using the `id`. For example: + +```console +% terraform import aws_emr_studio.studio es-123456ABCDEF +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/emr_studio_session_mapping.html.markdown b/website/docs/cdktf/python/r/emr_studio_session_mapping.html.markdown new file mode 100644 index 00000000000..bb9f6467c37 --- /dev/null +++ b/website/docs/cdktf/python/r/emr_studio_session_mapping.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_studio_session_mapping" +description: |- + Provides an Elastic MapReduce Studio +--- + + + +# Resource: aws_emr_studio_session_mapping + +Provides an Elastic MapReduce Studio Session Mapping. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emr_studio_session_mapping import EmrStudioSessionMapping +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrStudioSessionMapping(self, "example", + identity_id="example", + identity_type="USER", + session_policy_arn=Token.as_string(aws_iam_policy_example.arn), + studio_id=Token.as_string(aws_emr_studio_example.id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `identity_id`- (Optional) The globally unique identifier (GUID) of the user or group from the Amazon Web Services SSO Identity Store. +* `identity_name` - (Optional) The name of the user or group from the Amazon Web Services SSO Identity Store. +* `identity_type` - (Required) Specifies whether the identity to map to the Amazon EMR Studio is a `USER` or a `GROUP`. +* `session_policy_arn` - (Required) The Amazon Resource Name (ARN) for the session policy that will be applied to the user or group. You should specify the ARN for the session policy that you want to apply, not the ARN of your user role. +* `studio_id` - (Required) The ID of the Amazon EMR Studio to which the user or group will be mapped. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id`- The id of the Elastic MapReduce Studio Session Mapping. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR studio session mappings using `studio-id:identity-type:identity-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EMR studio session mappings using `studio-id:identity-type:identity-id`. For example: + +```console +% terraform import aws_emr_studio_session_mapping.example es-xxxxx:USER:xxxxx-xxx-xxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/emrcontainers_job_template.markdown b/website/docs/cdktf/python/r/emrcontainers_job_template.markdown new file mode 100644 index 00000000000..e12af8dbbba --- /dev/null +++ b/website/docs/cdktf/python/r/emrcontainers_job_template.markdown @@ -0,0 +1,131 @@ +--- +subcategory: "EMR Containers" +layout: "aws" +page_title: "AWS: aws_emrcontainers_job_template" +description: |- + Manages an EMR Containers (EMR on EKS) Job Template +--- + + + +# Resource: aws_emrcontainers_job_template + +Manages an EMR Containers (EMR on EKS) Job Template. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emrcontainers_job_template import EmrcontainersJobTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrcontainersJobTemplate(self, "example", + job_template_data=EmrcontainersJobTemplateJobTemplateData( + execution_role_arn=Token.as_string(aws_iam_role_example.arn), + job_driver=EmrcontainersJobTemplateJobTemplateDataJobDriver( + spark_sql_job_driver=EmrcontainersJobTemplateJobTemplateDataJobDriverSparkSqlJobDriver( + entry_point="default" + ) + ), + release_label="emr-6.10.0-latest" + ), + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `job_template_data` - (Required) The job template data which holds values of StartJobRun API request. +* `kms_key_arn` - (Optional) The KMS key ARN used to encrypt the job template. +* `name` – (Required) The specified name of the job template. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### job_template_data Arguments + +* `configuration_overrides` - (Optional) The configuration settings that are used to override defaults configuration. +* `execution_role_arn` - (Required) The execution role ARN of the job run. +* `job_driver` - (Required) Specify the driver that the job runs on. Exactly one of the two available job drivers is required, either sparkSqlJobDriver or sparkSubmitJobDriver. +* `job_tags` - (Optional) The tags assigned to jobs started using the job template. +* `release_label` - (Required) The release version of Amazon EMR. + +#### configuration_overrides Arguments + +* `application_configuration` - (Optional) The configurations for the application running by the job run. +* `monitoring_configuration` - (Optional) The configurations for monitoring. + +##### application_configuration Arguments + +* `classification` - (Required) The classification within a configuration. +* `configurations` - (Optional) A list of additional configurations to apply within a configuration object. +* `properties` - (Optional) A set of properties specified within a configuration classification. + +##### monitoring_configuration Arguments + +* `cloud_watch_monitoring_configuration` - (Optional) Monitoring configurations for CloudWatch. +* `persistent_app_ui` - (Optional) Monitoring configurations for the persistent application UI. +* `s3_monitoring_configuration` - (Optional) Amazon S3 configuration for monitoring log publishing. + +###### cloud_watch_monitoring_configuration Arguments + +* `log_group_name` - (Required) The name of the log group for log publishing. +* `log_stream_name_prefix` - (Optional) The specified name prefix for log streams. + +###### s3_monitoring_configuration Arguments + +* `log_uri` - (Optional) Amazon S3 destination URI for log publishing. + +#### job_driver Arguments + +* `spark_sql_job_driver` - (Optional) The job driver for job type. +* `spark_submit_job_driver` - (Optional) The job driver parameters specified for spark submit. + +##### spark_sql_job_driver Arguments + +* `entry_point` - (Optional) The SQL file to be executed. +* `spark_sql_parameters` - (Optional) The Spark parameters to be included in the Spark SQL command. + +##### spark_submit_job_driver Arguments + +* `entry_point` - (Required) The entry point of job application. +* `entry_point_arguments` - (Optional) The arguments for job application. +* `spark_submit_parameters` - (Optional) The Spark submit parameters that are used for job runs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the job template. +* `id` - The ID of the job template. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS job templates using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EKS job templates using the `id`. For example: + +```console +% terraform import aws_emrcontainers_job_template.example a1b2c3d4e5f6g7h8i9j10k11l +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/emrcontainers_virtual_cluster.markdown b/website/docs/cdktf/python/r/emrcontainers_virtual_cluster.markdown new file mode 100644 index 00000000000..c747210757d --- /dev/null +++ b/website/docs/cdktf/python/r/emrcontainers_virtual_cluster.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "EMR Containers" +layout: "aws" +page_title: "AWS: aws_emrcontainers_virtual_cluster" +description: |- + Manages an EMR Containers (EMR on EKS) Virtual Cluster +--- + + + +# Resource: aws_emrcontainers_virtual_cluster + +Manages an EMR Containers (EMR on EKS) Virtual Cluster. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emrcontainers_virtual_cluster import EmrcontainersVirtualCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrcontainersVirtualCluster(self, "example", + container_provider=EmrcontainersVirtualClusterContainerProvider( + id=Token.as_string(aws_eks_cluster_example.name), + info=EmrcontainersVirtualClusterContainerProviderInfo( + eks_info=EmrcontainersVirtualClusterContainerProviderInfoEksInfo( + namespace="default" + ) + ), + type="EKS" + ), + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `container_provider` - (Required) Configuration block for the container provider associated with your cluster. +* `name` – (Required) Name of the virtual cluster. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### container_provider Arguments + +* `id` - The name of the container provider that is running your EMR Containers cluster +* `info` - Nested list containing information about the configuration of the container provider + * `eks_info` - Nested list containing EKS-specific information about the cluster where the EMR Containers cluster is running + * `namespace` - The namespace where the EMR Containers cluster is running +* `type` - The type of the container provider + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cluster. +* `id` - The ID of the cluster. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS Clusters using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EKS Clusters using the `id`. For example: + +```console +% terraform import aws_emrcontainers_virtual_cluster.example a1b2c3d4e5f6g7h8i9j10k11l +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/emrserverless_application.markdown b/website/docs/cdktf/python/r/emrserverless_application.markdown new file mode 100644 index 00000000000..368a88a7fda --- /dev/null +++ b/website/docs/cdktf/python/r/emrserverless_application.markdown @@ -0,0 +1,178 @@ +--- +subcategory: "EMR Serverless" +layout: "aws" +page_title: "AWS: aws_emrserverless_application" +description: |- + Manages an EMR Serverless Application +--- + + + +# Resource: aws_emrserverless_application + +Manages an EMR Serverless Application. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emrserverless_application import EmrserverlessApplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrserverlessApplication(self, "example", + name="example", + release_label="emr-6.6.0", + type="hive" + ) +``` + +### Initial Capacity Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emrserverless_application import EmrserverlessApplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrserverlessApplication(self, "example", + initial_capacity=[EmrserverlessApplicationInitialCapacity( + initial_capacity_config=EmrserverlessApplicationInitialCapacityInitialCapacityConfig( + worker_configuration=EmrserverlessApplicationInitialCapacityInitialCapacityConfigWorkerConfiguration( + cpu="2 vCPU", + memory="10 GB" + ), + worker_count=1 + ), + initial_capacity_type="HiveDriver" + ) + ], + name="example", + release_label="emr-6.6.0", + type="hive" + ) +``` + +### Maximum Capacity Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.emrserverless_application import EmrserverlessApplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EmrserverlessApplication(self, "example", + maximum_capacity=EmrserverlessApplicationMaximumCapacity( + cpu="2 vCPU", + memory="10 GB" + ), + name="example", + release_label="emr-6.6.0", + type="hive" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `architecture` – (Optional) The CPU architecture of an application. Valid values are `ARM64` or `X86_64`. Default value is `X86_64`. +* `auto_start_configuration` – (Optional) The configuration for an application to automatically start on job submission. +* `auto_stop_configuration` – (Optional) The configuration for an application to automatically stop after a certain amount of time being idle. +* `image_configuration` – (Optional) The image configuration applied to all worker types. +* `initial_capacity` – (Optional) The capacity to initialize when the application is created. +* `maximum_capacity` – (Optional) The maximum capacity to allocate when the application is created. This is cumulative across all workers at any given point in time, not just when an application is created. No new resources will be created once any one of the defined limits is hit. +* `name` – (Required) The name of the application. +* `network_configuration` – (Optional) The network configuration for customer VPC connectivity. +* `release_label` – (Required) The EMR release version associated with the application. +* `type` – (Required) The type of application you want to start, such as `spark` or `hive`. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### auto_start_configuration Arguments + +* `enabled` - (Optional) Enables the application to automatically start on job submission. Defaults to `true`. + +### auto_stop_configuration Arguments + +* `enabled` - (Optional) Enables the application to automatically stop after a certain amount of time being idle. Defaults to `true`. +* `idle_timeout_minutes` - (Optional) The amount of idle time in minutes after which your application will automatically stop. Defaults to `15` minutes. + +### initial_capacity Arguments + +* `initial_capacity_config` - (Optional) The initial capacity configuration per worker. +* `initial_capacity_type` - (Required) The worker type for an analytics framework. For Spark applications, the key can either be set to `Driver` or `Executor`. For Hive applications, it can be set to `HiveDriver` or `TezTask`. + +### maximum_capacity Arguments + +* `cpu` - (Required) The maximum allowed CPU for an application. +* `disk` - (Optional) The maximum allowed disk for an application. +* `memory` - (Required) The maximum allowed resources for an application. + +### network_configuration Arguments + +* `security_group_ids` - (Optional) The array of security group Ids for customer VPC connectivity. +* `subnet_ids` - (Optional) The array of subnet Ids for customer VPC connectivity. + +#### image_configuration Arguments + +* `image_uri` - (Required) The image URI. + +#### initial_capacity_config Arguments + +* `worker_configuration` - (Optional) The resource configuration of the initial capacity configuration. +* `worker_count` - (Required) The number of workers in the initial capacity configuration. + +##### worker_configuration Arguments + +* `cpu` - (Required) The CPU requirements for every worker instance of the worker type. +* `disk` - (Optional) The disk requirements for every worker instance of the worker type. +* `memory` - (Required) The memory requirements for every worker instance of the worker type. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cluster. +* `id` - The ID of the cluster. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR Severless applications using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EMR Severless applications using the `id`. For example: + +```console +% terraform import aws_emrserverless_application.example id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/evidently_feature.html.markdown b/website/docs/cdktf/python/r/evidently_feature.html.markdown new file mode 100644 index 00000000000..9a742754d7a --- /dev/null +++ b/website/docs/cdktf/python/r/evidently_feature.html.markdown @@ -0,0 +1,225 @@ +--- +subcategory: "CloudWatch Evidently" +layout: "aws" +page_title: "AWS: aws_evidently_feature" +description: |- + Provides a CloudWatch Evidently Feature resource. +--- + + + +# Resource: aws_evidently_feature + +Provides a CloudWatch Evidently Feature resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_feature import EvidentlyFeature +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyFeature(self, "example", + description="example description", + name="example", + project=Token.as_string(aws_evidently_project_example.name), + tags={ + "Key1": "example Feature" + }, + variations=[EvidentlyFeatureVariations( + name="Variation1", + value=EvidentlyFeatureVariationsValue( + string_value="example" + ) + ) + ] + ) +``` + +### With default variation + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_feature import EvidentlyFeature +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyFeature(self, "example", + default_variation="Variation2", + name="example", + project=Token.as_string(aws_evidently_project_example.name), + variations=[EvidentlyFeatureVariations( + name="Variation1", + value=EvidentlyFeatureVariationsValue( + string_value="exampleval1" + ) + ), EvidentlyFeatureVariations( + name="Variation2", + value=EvidentlyFeatureVariationsValue( + string_value="exampleval2" + ) + ) + ] + ) +``` + +### With entity overrides + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_feature import EvidentlyFeature +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyFeature(self, "example", + entity_overrides={ + "test1": "Variation1" + }, + name="example", + project=Token.as_string(aws_evidently_project_example.name), + variations=[EvidentlyFeatureVariations( + name="Variation1", + value=EvidentlyFeatureVariationsValue( + string_value="exampleval1" + ) + ), EvidentlyFeatureVariations( + name="Variation2", + value=EvidentlyFeatureVariationsValue( + string_value="exampleval2" + ) + ) + ] + ) +``` + +### With evaluation strategy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_feature import EvidentlyFeature +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyFeature(self, "example", + entity_overrides={ + "test1": "Variation1" + }, + evaluation_strategy="ALL_RULES", + name="example", + project=Token.as_string(aws_evidently_project_example.name), + variations=[EvidentlyFeatureVariations( + name="Variation1", + value=EvidentlyFeatureVariationsValue( + string_value="exampleval1" + ) + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `default_variation` - (Optional) The name of the variation to use as the default variation. The default variation is served to users who are not allocated to any ongoing launches or experiments of this feature. This variation must also be listed in the `variations` structure. If you omit `default_variation`, the first variation listed in the `variations` structure is used as the default variation. +* `description` - (Optional) Specifies the description of the feature. +* `entity_overrides` - (Optional) Specify users that should always be served a specific variation of a feature. Each user is specified by a key-value pair . For each key, specify a user by entering their user ID, account ID, or some other identifier. For the value, specify the name of the variation that they are to be served. +* `evaluation_strategy` - (Optional) Specify `ALL_RULES` to activate the traffic allocation specified by any ongoing launches or experiments. Specify `DEFAULT_VARIATION` to serve the default variation to all users instead. +* `name` - (Required) The name for the new feature. Minimum length of `1`. Maximum length of `127`. +* `project` - (Required) The name or ARN of the project that is to contain the new feature. +* `tags` - (Optional) Tags to apply to the feature. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `variations` - (Required) One or more blocks that contain the configuration of the feature's different variations. [Detailed below](#variations) + +### `variations` + +The `variations` block supports the following arguments: + +* `name` - (Required) The name of the variation. Minimum length of `1`. Maximum length of `127`. +* `value` - (Required) A block that specifies the value assigned to this variation. [Detailed below](#value) + +#### `value` + +The `value` block supports the following arguments: + +~> **NOTE:** You must specify exactly one of `bool_value`, `double_value`, `long_value`, `string_value`. + +* `bool_value` - (Optional) If this feature uses the Boolean variation type, this field contains the Boolean value of this variation. +* `double_value` - (Optional) If this feature uses the double integer variation type, this field contains the double integer value of this variation. +* `long_value` - (Optional) If this feature uses the long variation type, this field contains the long value of this variation. Minimum value of `-9007199254740991`. Maximum value of `9007199254740991`. +* `string_value` - (Optional) If this feature uses the string variation type, this field contains the string value of this variation. Minimum length of `0`. Maximum length of `512`. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `2m`) +* `delete` - (Default `2m`) +* `update` - (Default `2m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the feature. +* `created_time` - The date and time that the feature is created. +* `evaluation_rules` - One or more blocks that define the evaluation rules for the feature. [Detailed below](#evaluation_rules) +* `id` - The feature `name` and the project `name` or `arn` separated by a colon (`:`). +* `last_updated_time` - The date and time that the feature was most recently updated. +* `status` - The current state of the feature. Valid values are `AVAILABLE` and `UPDATING`. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `value_type` - Defines the type of value used to define the different feature variations. Valid Values: `STRING`, `LONG`, `DOUBLE`, `BOOLEAN`. + +### `evaluation_rules` + +The `evaluation_rules` block supports the following attributes: + +* `name` - The name of the experiment or launch. +* `type` - This value is `aws.evidently.splits` if this is an evaluation rule for a launch, and it is `aws.evidently.onlineab` if this is an evaluation rule for an experiment. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Evidently Feature using the feature `name` and `name` or `arn` of the hosting CloudWatch Evidently Project separated by a `:`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Evidently Feature using the feature `name` and `name` or `arn` of the hosting CloudWatch Evidently Project separated by a `:`. For example: + +```console +% terraform import aws_evidently_feature.example exampleFeatureName:arn:aws:evidently:us-east-1:123456789012:project/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/evidently_launch.html.markdown b/website/docs/cdktf/python/r/evidently_launch.html.markdown new file mode 100644 index 00000000000..04dfe999d56 --- /dev/null +++ b/website/docs/cdktf/python/r/evidently_launch.html.markdown @@ -0,0 +1,447 @@ +--- +subcategory: "CloudWatch Evidently" +layout: "aws" +page_title: "AWS: aws_evidently_launch" +description: |- + Provides a CloudWatch Evidently Launch resource. +--- + + + +# Resource: aws_evidently_launch + +Provides a CloudWatch Evidently Launch resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_launch import EvidentlyLaunch +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyLaunch(self, "example", + groups=[EvidentlyLaunchGroups( + feature=Token.as_string(aws_evidently_feature_example.name), + name="Variation1", + variation="Variation1" + ) + ], + name="example", + project=Token.as_string(aws_evidently_project_example.name), + scheduled_splits_config=EvidentlyLaunchScheduledSplitsConfig( + steps=[EvidentlyLaunchScheduledSplitsConfigSteps( + group_weights={ + "Variation1": 0 + }, + start_time="2024-01-07 01:43:59+00:00" + ) + ] + ) + ) +``` + +### With description + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_launch import EvidentlyLaunch +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyLaunch(self, "example", + description="example description", + groups=[EvidentlyLaunchGroups( + feature=Token.as_string(aws_evidently_feature_example.name), + name="Variation1", + variation="Variation1" + ) + ], + name="example", + project=Token.as_string(aws_evidently_project_example.name), + scheduled_splits_config=EvidentlyLaunchScheduledSplitsConfig( + steps=[EvidentlyLaunchScheduledSplitsConfigSteps( + group_weights={ + "Variation1": 0 + }, + start_time="2024-01-07 01:43:59+00:00" + ) + ] + ) + ) +``` + +### With multiple groups + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_launch import EvidentlyLaunch +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyLaunch(self, "example", + groups=[EvidentlyLaunchGroups( + description="first-group", + feature=Token.as_string(aws_evidently_feature_example.name), + name="Variation1", + variation="Variation1" + ), EvidentlyLaunchGroups( + description="second-group", + feature=Token.as_string(aws_evidently_feature_example.name), + name="Variation2", + variation="Variation2" + ) + ], + name="example", + project=Token.as_string(aws_evidently_project_example.name), + scheduled_splits_config=EvidentlyLaunchScheduledSplitsConfig( + steps=[EvidentlyLaunchScheduledSplitsConfigSteps( + group_weights={ + "Variation1": 0, + "Variation2": 0 + }, + start_time="2024-01-07 01:43:59+00:00" + ) + ] + ) + ) +``` + +### With metric_monitors + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_launch import EvidentlyLaunch +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyLaunch(self, "example", + groups=[EvidentlyLaunchGroups( + feature=Token.as_string(aws_evidently_feature_example.name), + name="Variation1", + variation="Variation1" + ) + ], + metric_monitors=[EvidentlyLaunchMetricMonitors( + metric_definition=EvidentlyLaunchMetricMonitorsMetricDefinition( + entity_id_key="entity_id_key1", + event_pattern="{\\\"Price\\\":[{\\\"numeric\\\":[\\\">\\\",11,\\\"<=\\\",22]}]}", + name="name1", + unit_label="unit_label1", + value_key="value_key1" + ) + ), EvidentlyLaunchMetricMonitors( + metric_definition=EvidentlyLaunchMetricMonitorsMetricDefinition( + entity_id_key="entity_id_key2", + event_pattern="{\\\"Price\\\":[{\\\"numeric\\\":[\\\">\\\",9,\\\"<=\\\",19]}]}", + name="name2", + unit_label="unit_label2", + value_key="value_key2" + ) + ) + ], + name="example", + project=Token.as_string(aws_evidently_project_example.name), + scheduled_splits_config=EvidentlyLaunchScheduledSplitsConfig( + steps=[EvidentlyLaunchScheduledSplitsConfigSteps( + group_weights={ + "Variation1": 0 + }, + start_time="2024-01-07 01:43:59+00:00" + ) + ] + ) + ) +``` + +### With randomization_salt + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_launch import EvidentlyLaunch +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyLaunch(self, "example", + groups=[EvidentlyLaunchGroups( + feature=Token.as_string(aws_evidently_feature_example.name), + name="Variation1", + variation="Variation1" + ) + ], + name="example", + project=Token.as_string(aws_evidently_project_example.name), + randomization_salt="example randomization salt", + scheduled_splits_config=EvidentlyLaunchScheduledSplitsConfig( + steps=[EvidentlyLaunchScheduledSplitsConfigSteps( + group_weights={ + "Variation1": 0 + }, + start_time="2024-01-07 01:43:59+00:00" + ) + ] + ) + ) +``` + +### With multiple steps + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_launch import EvidentlyLaunch +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyLaunch(self, "example", + groups=[EvidentlyLaunchGroups( + feature=Token.as_string(aws_evidently_feature_example.name), + name="Variation1", + variation="Variation1" + ), EvidentlyLaunchGroups( + feature=Token.as_string(aws_evidently_feature_example.name), + name="Variation2", + variation="Variation2" + ) + ], + name="example", + project=Token.as_string(aws_evidently_project_example.name), + scheduled_splits_config=EvidentlyLaunchScheduledSplitsConfig( + steps=[EvidentlyLaunchScheduledSplitsConfigSteps( + group_weights={ + "Variation1": 15, + "Variation2": 10 + }, + start_time="2024-01-07 01:43:59+00:00" + ), EvidentlyLaunchScheduledSplitsConfigSteps( + group_weights={ + "Variation1": 20, + "Variation2": 25 + }, + start_time="2024-01-08 01:43:59+00:00" + ) + ] + ) + ) +``` + +### With segment overrides + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_launch import EvidentlyLaunch +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyLaunch(self, "example", + groups=[EvidentlyLaunchGroups( + feature=Token.as_string(aws_evidently_feature_example.name), + name="Variation1", + variation="Variation1" + ), EvidentlyLaunchGroups( + feature=Token.as_string(aws_evidently_feature_example.name), + name="Variation2", + variation="Variation2" + ) + ], + name="example", + project=Token.as_string(aws_evidently_project_example.name), + scheduled_splits_config=EvidentlyLaunchScheduledSplitsConfig( + steps=[EvidentlyLaunchScheduledSplitsConfigSteps( + group_weights={ + "Variation1": 0, + "Variation2": 0 + }, + segment_overrides=[EvidentlyLaunchScheduledSplitsConfigStepsSegmentOverrides( + evaluation_order=1, + segment=Token.as_string(aws_evidently_segment_example.name), + weights={ + "Variation2": 10000 + } + ), EvidentlyLaunchScheduledSplitsConfigStepsSegmentOverrides( + evaluation_order=2, + segment=Token.as_string(aws_evidently_segment_example.name), + weights={ + "Variation1": 40000, + "Variation2": 30000 + } + ) + ], + start_time="2024-01-08 01:43:59+00:00" + ) + ] + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Specifies the description of the launch. +* `groups` - (Required) One or up to five blocks that contain the feature and variations that are to be used for the launch. [Detailed below](#groups). +* `metric_monitors` - (Optional) One or up to three blocks that define the metrics that will be used to monitor the launch performance. [Detailed below](#metric_monitors). +* `name` - (Required) The name for the new launch. Minimum length of `1`. Maximum length of `127`. +* `project` - (Required) The name or ARN of the project that is to contain the new launch. +* `randomization_salt` - (Optional) When Evidently assigns a particular user session to a launch, it must use a randomization ID to determine which variation the user session is served. This randomization ID is a combination of the entity ID and randomizationSalt. If you omit randomizationSalt, Evidently uses the launch name as the randomizationSalt. +* `scheduled_splits_config` - (Optional) A block that defines the traffic allocation percentages among the feature variations during each step of the launch. [Detailed below](#scheduled_splits_config). +* `tags` - (Optional) Tags to apply to the launch. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `groups` + +The `groups` block supports the following arguments: + +* `description` - (Optional) Specifies the description of the launch group. +* `feature` - (Required) Specifies the name of the feature that the launch is using. +* `name` - (Required) Specifies the name of the lahnch group. +* `variation` - (Required) Specifies the feature variation to use for this launch group. + +### `metric_monitors` + +The `metric_monitors` block supports the following arguments: + +* `metric_definition` - (Required) A block that defines the metric. [Detailed below](#metric_definition). + +#### `metric_definition` + +The `metric_definition` block supports the following arguments: + +* `entity_id_key` - (Required) Specifies the entity, such as a user or session, that does an action that causes a metric value to be recorded. An example is `userDetails.userID`. +* `event_pattern` - (Required) Specifies The EventBridge event pattern that defines how the metric is recorded. +* `name` - (Required) Specifies the name for the metric. +* `unit_label` - (Optional) Specifies a label for the units that the metric is measuring. +* `value_key` - (Required) Specifies the value that is tracked to produce the metric. + +### `scheduled_splits_config` + +The `scheduled_splits_config` block supports the following arguments: + +* `steps` - (Required) One or up to six blocks that define the traffic allocation percentages among the feature variations during each step of the launch. This also defines the start time of each step. [Detailed below](#steps). + +#### `steps` + +The `steps` block supports the following arguments: + +* `group_weights` - (Required) The traffic allocation percentages among the feature variations during one step of a launch. This is a set of key-value pairs. The keys are variation names. The values represent the percentage of traffic to allocate to that variation during this step. For more information, refer to the [AWS documentation for ScheduledSplitConfig groupWeights](https://docs.aws.amazon.com/cloudwatchevidently/latest/APIReference/API_ScheduledSplitConfig.html). +* `segment_overrides` - (Required) One or up to six blocks that specify different traffic splits for one or more audience segments. A segment is a portion of your audience that share one or more characteristics. Examples could be Chrome browser users, users in Europe, or Firefox browser users in Europe who also fit other criteria that your application collects, such as age. [Detailed below](#segment_overrides). +* `start_time` - (Required) Specifies the date and time that this step of the launch starts. + +##### `segment_overrides` + +* `evaluation_order` - (Required) Specifies a number indicating the order to use to evaluate segment overrides, if there are more than one. Segment overrides with lower numbers are evaluated first. +* `segment` - (Required) The name or ARN of the segment to use. +* `weights` - (Required) The traffic allocation percentages among the feature variations to assign to this segment. This is a set of key-value pairs. The keys are variation names. The values represent the amount of traffic to allocate to that variation for this segment. This is expressed in thousandths of a percent, so a weight of 50000 represents 50% of traffic. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `2m`) +* `delete` - (Default `2m`) +* `update` - (Default `2m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the launch. +* `created_time` - The date and time that the launch is created. +* `execution` - A block that contains information about the start and end times of the launch. [Detailed below](#execution) +* `id` - The launch `name` and the project `name` or `arn` separated by a colon (`:`). +* `last_updated_time` - The date and time that the launch was most recently updated. +* `status` - The current state of the launch. Valid values are `CREATED`, `UPDATING`, `RUNNING`, `COMPLETED`, and `CANCELLED`. +* `status_reason` - If the launch was stopped, this is the string that was entered by the person who stopped the launch, to explain why it was stopped. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `type` - The type of launch. + +### `execution` + +The `execution` block supports the following attributes: + +* `ended_time` - The date and time that the launch ended. +* `started_time` - The date and time that the launch started. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Evidently Launch using the `name` of the launch and `name` of the project or `arn` of the hosting CloudWatch Evidently Project separated by a `:`. For example: + +Import using the `name` of the launch and `name` of the project separated by a `:`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import using the `name` of the launch and `arn` of the project separated by a `:`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** CloudWatch Evidently Launch using the `name` of the launch and `name` of the project or `arn` of the hosting CloudWatch Evidently Project separated by a `:`. For example: + +Import using the `name` of the launch and `name` of the project separated by a `:`: + +```console +% terraform import aws_evidently_launch.example exampleLaunchName:exampleProjectName +``` + +Import using the `name` of the launch and `arn` of the project separated by a `:`: + +```console +% terraform import aws_evidently_launch.example exampleLaunchName:arn:aws:evidently:us-east-1:123456789012:project/exampleProjectName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/evidently_project.html.markdown b/website/docs/cdktf/python/r/evidently_project.html.markdown new file mode 100644 index 00000000000..588de516982 --- /dev/null +++ b/website/docs/cdktf/python/r/evidently_project.html.markdown @@ -0,0 +1,165 @@ +--- +subcategory: "CloudWatch Evidently" +layout: "aws" +page_title: "AWS: aws_evidently_project" +description: |- + Provides a CloudWatch Evidently Project resource. +--- + + + +# Resource: aws_evidently_project + +Provides a CloudWatch Evidently Project resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_project import EvidentlyProject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyProject(self, "example", + description="Example Description", + name="Example", + tags={ + "Key1": "example Project" + } + ) +``` + +### Store evaluation events in a CloudWatch Log Group + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_project import EvidentlyProject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyProject(self, "example", + data_delivery=EvidentlyProjectDataDelivery( + cloudwatch_logs=EvidentlyProjectDataDeliveryCloudwatchLogs( + log_group="example-log-group-name" + ) + ), + description="Example Description", + name="Example", + tags={ + "Key1": "example Project" + } + ) +``` + +### Store evaluation events in an S3 bucket + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_project import EvidentlyProject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlyProject(self, "example", + data_delivery=EvidentlyProjectDataDelivery( + s3_destination=EvidentlyProjectDataDeliveryS3Destination( + bucket="example-bucket-name", + prefix="example" + ) + ), + description="Example Description", + name="Example", + tags={ + "Key1": "example Project" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `data_delivery` - (Optional) A block that contains information about where Evidently is to store evaluation events for longer term storage, if you choose to do so. If you choose not to store these events, Evidently deletes them after using them to produce metrics and other experiment results that you can view. See below. +* `description` - (Optional) Specifies the description of the project. +* `name` - (Required) A name for the project. +* `tags` - (Optional) Tags to apply to the project. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `data_delivery` block supports the following arguments: + +~> **NOTE:** You can't specify both `cloudwatch_logs` and `s3_destination`. + +* `cloudwatch_logs` - (Optional) A block that defines the CloudWatch Log Group that stores the evaluation events. See below. +* `s3_destination` - (Optional) A block that defines the S3 bucket and prefix that stores the evaluation events. See below. + +The `cloudwatch_logs` block supports the following arguments: + +* `log_group` - (Optional) The name of the log group where the project stores evaluation events. + +The `s3_destination` block supports the following arguments: + +* `bucket` - (Optional) The name of the bucket in which Evidently stores evaluation events. +* `prefix` - (Optional) The bucket prefix in which Evidently stores evaluation events. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `2m`) +* `delete` - (Default `2m`) +* `update` - (Default `2m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `active_experiment_count` - The number of ongoing experiments currently in the project. +* `active_launch_count` - The number of ongoing launches currently in the project. +* `arn` - The ARN of the project. +* `created_time` - The date and time that the project is created. +* `experiment_count` - The number of experiments currently in the project. This includes all experiments that have been created and not deleted, whether they are ongoing or not. +* `feature_count` - The number of features currently in the project. +* `id` - The ID has the same value as the arn of the project. +* `last_updated_time` - The date and time that the project was most recently updated. +* `launch_count` - The number of launches currently in the project. This includes all launches that have been created and not deleted, whether they are ongoing or not. +* `status` - The current state of the project. Valid values are `AVAILABLE` and `UPDATING`. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Evidently Project using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Evidently Project using the `arn`. For example: + +```console +% terraform import aws_evidently_project.example arn:aws:evidently:us-east-1:123456789012:segment/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/evidently_segment.html.markdown b/website/docs/cdktf/python/r/evidently_segment.html.markdown new file mode 100644 index 00000000000..e797db0219f --- /dev/null +++ b/website/docs/cdktf/python/r/evidently_segment.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "CloudWatch Evidently" +layout: "aws" +page_title: "AWS: aws_evidently_segment" +description: |- + Provides a CloudWatch Evidently Segment resource. +--- + + + +# Resource: aws_evidently_segment + +Provides a CloudWatch Evidently Segment resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_segment import EvidentlySegment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlySegment(self, "example", + name="example", + pattern="{\\\"Price\\\":[{\\\"numeric\\\":[\\\">\\\",10,\\\"<=\\\",20]}]}", + tags={ + "Key1": "example Segment" + } + ) +``` + +### With JSON object in pattern + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_segment import EvidentlySegment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlySegment(self, "example", + name="example", + pattern=" {\n \"Price\": [\n {\n \"numeric\": [\">\",10,\"<=\",20]\n }\n ]\n }\n\n", + tags={ + "Key1": "example Segment" + } + ) +``` + +### With Description + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.evidently_segment import EvidentlySegment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + EvidentlySegment(self, "example", + description="example", + name="example", + pattern="{\\\"Price\\\":[{\\\"numeric\\\":[\\\">\\\",10,\\\"<=\\\",20]}]}" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional, Forces new resource) Specifies the description of the segment. +* `name` - (Required, Forces new resource) A name for the segment. +* `pattern` - (Required, Forces new resource) The pattern to use for the segment. For more information about pattern syntax, see [Segment rule pattern syntax](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-Evidently-segments.html#CloudWatch-Evidently-segments-syntax.html). +* `tags` - (Optional) Tags to apply to the segment. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the segment. +* `created_time` - The date and time that the segment is created. +* `experiment_count` - The number of experiments that this segment is used in. This count includes all current experiments, not just those that are currently running. +* `id` - The ID has the same value as the ARN of the segment. +* `last_updated_time` - The date and time that this segment was most recently updated. +* `launch_count` - The number of launches that this segment is used in. This count includes all current launches, not just those that are currently running. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Evidently Segment using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Evidently Segment using the `arn`. For example: + +```console +% terraform import aws_evidently_segment.example arn:aws:evidently:us-west-2:123456789012:segment/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/finspace_kx_cluster.html.markdown b/website/docs/cdktf/python/r/finspace_kx_cluster.html.markdown new file mode 100644 index 00000000000..d54791a6972 --- /dev/null +++ b/website/docs/cdktf/python/r/finspace_kx_cluster.html.markdown @@ -0,0 +1,213 @@ +--- +subcategory: "FinSpace" +layout: "aws" +page_title: "AWS: aws_finspace_kx_cluster" +description: |- + Terraform resource for managing an AWS FinSpace Kx Cluster. +--- + + + +# Resource: aws_finspace_kx_cluster + +Terraform resource for managing an AWS FinSpace Kx Cluster. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.finspace_kx_cluster import FinspaceKxCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, cacheConfigurations): + super().__init__(scope, name) + FinspaceKxCluster(self, "example", + availability_zone_id="use1-az2", + az_mode="SINGLE", + cache_storage_configurations=[FinspaceKxClusterCacheStorageConfigurations( + size=1200, + type="CACHE_1000" + ) + ], + capacity_configuration=FinspaceKxClusterCapacityConfiguration( + node_count=2, + node_type="kx.s.2xlarge" + ), + code=FinspaceKxClusterCode( + s3_bucket=test.id, + s3_key=object.key + ), + database=[FinspaceKxClusterDatabase( + cache_configuration=[{ + "cache_type": "CACHE_1000", + "db_paths": "/" + } + ], + database_name=Token.as_string(aws_finspace_kx_database_example.name), + cache_configurations=cache_configurations + ) + ], + environment_id=Token.as_string(aws_finspace_kx_environment_example.id), + name="my-tf-kx-cluster", + release_label="1.0", + type="HDB", + vpc_configuration=FinspaceKxClusterVpcConfiguration( + ip_address_type="IP_V4", + security_group_ids=[Token.as_string(aws_security_group_example.id)], + subnet_ids=[Token.as_string(aws_subnet_example.id)], + vpc_id=Token.as_string(aws_vpc_test.id) + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `az_mode` - (Required) The number of availability zones you want to assign per cluster. This can be one of the following: + * SINGLE - Assigns one availability zone per cluster. + * MULTI - Assigns all the availability zones per cluster. +* `capacity_configuration` - (Required) Structure for the metadata of a cluster. Includes information like the CPUs needed, memory of instances, and number of instances. See [capacity_configuration](#capacity_configuration). +* `environment_id` - (Required) Unique identifier for the KX environment. +* `name` - (Required) Unique name for the cluster that you want to create. +* `release_label` - (Required) Version of FinSpace Managed kdb to run. +* `type` - (Required) Type of KDB database. The following types are available: + * HDB - Historical Database. The data is only accessible with read-only permissions from one of the FinSpace managed KX databases mounted to the cluster. + * RDB - Realtime Database. This type of database captures all the data from a ticker plant and stores it in memory until the end of day, after which it writes all of its data to a disk and reloads the HDB. This cluster type requires local storage for temporary storage of data during the savedown process. If you specify this field in your request, you must provide the `savedownStorageConfiguration` parameter. + * GATEWAY - A gateway cluster allows you to access data across processes in kdb systems. It allows you to create your own routing logic using the initialization scripts and custom code. This type of cluster does not require a writable local storage. +* `vpc_configuration` - (Required) Configuration details about the network where the Privatelink endpoint of the cluster resides. See [vpc_configuration](#vpc_configuration). + +The following arguments are optional: + +* `auto_scaling_configuration` - (Optional) Configuration based on which FinSpace will scale in or scale out nodes in your cluster. See [auto_scaling_configuration](#auto_scaling_configuration). +* `availability_zone_id` - (Optional) The availability zone identifiers for the requested regions. Required when `az_mode` is set to SINGLE. +* `cache_storage_configurations` - (Optional) Configurations for a read only cache storage associated with a cluster. This cache will be stored as an FSx Lustre that reads from the S3 store. See [cache_storage_configuration](#cache_storage_configuration). +* `code` - (Optional) Details of the custom code that you want to use inside a cluster when analyzing data. Consists of the S3 source bucket, location, object version, and the relative path from where the custom code is loaded into the cluster. See [code](#code). +* `command_line_arguments` - (Optional) List of key-value pairs to make available inside the cluster. +* `database` - (Optional) KX database that will be available for querying. Defined below. +* `description` - (Optional) Description of the cluster. +* `execution_role` - (Optional) An IAM role that defines a set of permissions associated with a cluster. These permissions are assumed when a cluster attempts to access another cluster. +* `initialization_script` - (Optional) Path to Q program that will be run at launch of a cluster. This is a relative path within .zip file that contains the custom code, which will be loaded on the cluster. It must include the file name itself. For example, somedir/init.q. +* `savedown_storage_configuration` - (Optional) Size and type of the temporary storage that is used to hold data during the savedown process. This parameter is required when you choose `type` as RDB. All the data written to this storage space is lost when the cluster node is restarted. See [savedown_storage_configuration](#savedown_storage_configuration). +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### auto_scaling_configuration + +The auto_scaling_configuration block supports the following arguments: + +* `auto_scaling_metric` - (Required) Metric your cluster will track in order to scale in and out. For example, CPU_UTILIZATION_PERCENTAGE is the average CPU usage across all nodes in a cluster. +* `min_node_count` - (Required) Lowest number of nodes to scale. Must be at least 1 and less than the `max_node_count`. If nodes in cluster belong to multiple availability zones, then `min_node_count` must be at least 3. +* `max_node_count` - (Required) Highest number of nodes to scale. Cannot be greater than 5 +* `metric_target` - (Required) Desired value of chosen `auto_scaling_metric`. When metric drops below this value, cluster will scale in. When metric goes above this value, cluster will scale out. Can be set between 0 and 100 percent. +* `scale_in_cooldown_seconds` - (Required) Duration in seconds that FinSpace will wait after a scale in event before initiating another scaling event. +* `scale_out_cooldown_seconds` - (Required) Duration in seconds that FinSpace will wait after a scale out event before initiating another scaling event. + +### capacity_configuration + +The capacity_configuration block supports the following arguments: + +* `node_type` - (Required) Determines the hardware of the host computer used for your cluster instance. Each node type offers different memory and storage capabilities. Choose a node type based on the requirements of the application or software that you plan to run on your instance. + + You can only specify one of the following values: + * kx.s.large – The node type with a configuration of 12 GiB memory and 2 vCPUs. + * kx.s.xlarge – The node type with a configuration of 27 GiB memory and 4 vCPUs. + * kx.s.2xlarge – The node type with a configuration of 54 GiB memory and 8 vCPUs. + * kx.s.4xlarge – The node type with a configuration of 108 GiB memory and 16 vCPUs. + * kx.s.8xlarge – The node type with a configuration of 216 GiB memory and 32 vCPUs. + * kx.s.16xlarge – The node type with a configuration of 432 GiB memory and 64 vCPUs. + * kx.s.32xlarge – The node type with a configuration of 864 GiB memory and 128 vCPUs. +* `node_count` - (Required) Number of instances running in a cluster. Must be at least 1 and at most 5. + +### cache_storage_configuration + +The cache_storage_configuration block supports the following arguments: + +* `type` - (Required) Type of cache storage . The valid values are: + * CACHE_1000 - This type provides at least 1000 MB/s disk access throughput. +* `size` - (Required) Size of cache in Gigabytes. + +### code + +The code block supports the following arguments: + +* `s3_bucket` - (Required) Unique name for the S3 bucket. +* `s3_key` - (Required) Full S3 path (excluding bucket) to the .zip file that contains the code to be loaded onto the cluster when it’s started. +* `s3_object_version` - (Optional) Version of an S3 Object. + +### database + +The database block supports the following arguments: + +* `database_name` - (Required) Name of the KX database. +* `cache_configurations` - (Optional) Configuration details for the disk cache to increase performance reading from a KX database mounted to the cluster. See [cache_configurations](#cache_configurations). +* `changeset_id` - (Optional) A unique identifier of the changeset that is associated with the cluster. + +#### cache_configurations + +The cache_configuration block supports the following arguments: + +* `cache_type` - (Required) Type of disk cache. +* `db_paths` - (Optional) Paths within the database to cache. + +### savedown_storage_configuration + +The savedown_storage_configuration block supports the following arguments: + +* `type` - (Required) Type of writeable storage space for temporarily storing your savedown data. The valid values are: + * SDS01 - This type represents 3000 IOPS and io2 ebs volume type. +* `size` - (Required) Size of temporary storage in bytes. + +### vpc_configuration + +The vpc_configuration block supports the following arguments: + +* `vpc_id` - (Required) Identifier of the VPC endpoint +* `security_group_ids` - (Required) Unique identifier of the VPC security group applied to the VPC endpoint ENI for the cluster. +* `subnet_ids `- (Required) Identifier of the subnet that the Privatelink VPC endpoint uses to connect to the cluster. +* `ip_address_type` - (Required) IP address type for cluster network configuration parameters. The following type is available: IP_V4 - IP address version 4. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) identifier of the KX cluster. +* `created_timestamp` - Timestamp at which the cluster is created in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `id` - A comma-delimited string joining environment ID and cluster name. +* `last_modified_timestamp` - Last timestamp at which the cluster was updated in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `2m`) +* `delete` - (Default `40m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an AWS FinSpace Kx Cluster using the `id` (environment ID and cluster name, comma-delimited). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an AWS FinSpace Kx Cluster using the `id` (environment ID and cluster name, comma-delimited). For example: + +```console +% terraform import aws_finspace_kx_cluster.example n3ceo7wqxoxcti5tujqwzs,my-tf-kx-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/finspace_kx_database.html.markdown b/website/docs/cdktf/python/r/finspace_kx_database.html.markdown new file mode 100644 index 00000000000..65e25d60079 --- /dev/null +++ b/website/docs/cdktf/python/r/finspace_kx_database.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "FinSpace" +layout: "aws" +page_title: "AWS: aws_finspace_kx_database" +description: |- + Terraform resource for managing an AWS FinSpace Kx Database. +--- + + + +# Resource: aws_finspace_kx_database + +Terraform resource for managing an AWS FinSpace Kx Database. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.finspace_kx_database import FinspaceKxDatabase +from imports.aws.finspace_kx_environment import FinspaceKxEnvironment +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = KmsKey(self, "example", + deletion_window_in_days=7, + description="Example KMS Key" + ) + aws_finspace_kx_environment_example = FinspaceKxEnvironment(self, "example_1", + kms_key_id=example.arn, + name="my-tf-kx-environment" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_finspace_kx_environment_example.override_logical_id("example") + aws_finspace_kx_database_example = FinspaceKxDatabase(self, "example_2", + description="Example database description", + environment_id=Token.as_string(aws_finspace_kx_environment_example.id), + name="my-tf-kx-database" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_finspace_kx_database_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `environment_id` - (Required) Unique identifier for the KX environment. +* `name` - (Required) Name of the KX database. + +The following arguments are optional: + +* `description` - (Optional) Description of the KX database. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) identifier of the KX database. +* `created_timestamp` - Timestamp at which the databse is created in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `id` - A comma-delimited string joining environment ID and database name. +* `last_modified_timestamp` - Last timestamp at which the database was updated in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an AWS FinSpace Kx Database using the `id` (environment ID and database name, comma-delimited). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an AWS FinSpace Kx Database using the `id` (environment ID and database name, comma-delimited). For example: + +```console +% terraform import aws_finspace_kx_database.example n3ceo7wqxoxcti5tujqwzs,my-tf-kx-database +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/finspace_kx_environment.html.markdown b/website/docs/cdktf/python/r/finspace_kx_environment.html.markdown new file mode 100644 index 00000000000..3597d2d43b5 --- /dev/null +++ b/website/docs/cdktf/python/r/finspace_kx_environment.html.markdown @@ -0,0 +1,153 @@ +--- +subcategory: "FinSpace" +layout: "aws" +page_title: "AWS: aws_finspace_kx_environment" +description: |- + Terraform resource for managing an AWS FinSpace Kx Environment. +--- + + + +# Resource: aws_finspace_kx_environment + +Terraform resource for managing an AWS FinSpace Kx Environment. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.finspace_kx_environment import FinspaceKxEnvironment +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = KmsKey(self, "example", + deletion_window_in_days=7, + description="Sample KMS Key" + ) + aws_finspace_kx_environment_example = FinspaceKxEnvironment(self, "example_1", + kms_key_id=example.arn, + name="my-tf-kx-environment" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_finspace_kx_environment_example.override_logical_id("example") +``` + +### With Network Setup + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ec2_transit_gateway import Ec2TransitGateway +from imports.aws.finspace_kx_environment import FinspaceKxEnvironment +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Ec2TransitGateway(self, "example", + description="example" + ) + aws_kms_key_example = KmsKey(self, "example_1", + deletion_window_in_days=7, + description="Sample KMS Key" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_key_example.override_logical_id("example") + FinspaceKxEnvironment(self, "example_env", + custom_dns_configuration=[FinspaceKxEnvironmentCustomDnsConfiguration( + custom_dns_server_ip="10.0.0.76", + custom_dns_server_name="example.finspace.amazonaws.com" + ) + ], + description="Environment description", + kms_key_id=Token.as_string(aws_kms_key_example.arn), + name="my-tf-kx-environment", + transit_gateway_configuration=FinspaceKxEnvironmentTransitGatewayConfiguration( + routable_cidr_space="100.64.0.0/26", + transit_gateway_id=example.id + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the KX environment that you want to create. +* `kms_key_id` - (Required) KMS key ID to encrypt your data in the FinSpace environment. + +The following arguments are optional: + +* `custom_dns_configuration` - (Optional) List of DNS server name and server IP. This is used to set up Route-53 outbound resolvers. Defined below. +* `description` - (Optional) Description for the KX environment. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `transit_gateway_configuration` - (Optional) Transit gateway and network configuration that is used to connect the KX environment to an internal network. Defined below. + +### custom_dns_configuration + +The custom_dns_configuration block supports the following arguments: + +* `custom_dns_server_ip` - (Required) IP address of the DNS server. +* `custom_dns_server_name` - (Required) Name of the DNS server. + +### transit_gateway_configuration + +The transit_gateway_configuration block supports the following arguments: + +* `routable_cidr_space` - (Required) Routing CIDR on behalf of KX environment. It could be any “/26 range in the 100.64.0.0 CIDR space. After providing, it will be added to the customer’s transit gateway routing table so that the traffics could be routed to KX network. +* `transit_gateway_id` - (Required) Identifier of the transit gateway created by the customer to connect outbound traffics from KX network to your internal network. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) identifier of the KX environment. +* `availability_zones` - AWS Availability Zone IDs that this environment is available in. Important when selecting VPC subnets to use in cluster creation. +* `created_timestamp` - Timestamp at which the environment is created in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `id` - Unique identifier for the KX environment. +* `infrastructure_account_id` - Unique identifier for the AWS environment infrastructure account. +* `last_modified_timestamp` - Last timestamp at which the environment was updated in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `status` - Status of environment creation +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an AWS FinSpace Kx Environment using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an AWS FinSpace Kx Environment using the `id`. For example: + +```console +% terraform import aws_finspace_kx_environment.example n3ceo7wqxoxcti5tujqwzs +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/finspace_kx_user.html.markdown b/website/docs/cdktf/python/r/finspace_kx_user.html.markdown new file mode 100644 index 00000000000..939fbb04b0e --- /dev/null +++ b/website/docs/cdktf/python/r/finspace_kx_user.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "FinSpace" +layout: "aws" +page_title: "AWS: aws_finspace_kx_user" +description: |- + Terraform resource for managing an AWS FinSpace Kx User. +--- + + + +# Resource: aws_finspace_kx_user + +Terraform resource for managing an AWS FinSpace Kx User. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.finspace_kx_environment import FinspaceKxEnvironment +from imports.aws.finspace_kx_user import FinspaceKxUser +from imports.aws.iam_role import IamRole +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = IamRole(self, "example", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "ec2.amazonaws.com" + }, + "Sid": "" + } + ], + "Version": "2012-10-17" + })), + name="example-role" + ) + aws_kms_key_example = KmsKey(self, "example_1", + deletion_window_in_days=7, + description="Example KMS Key" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_key_example.override_logical_id("example") + aws_finspace_kx_environment_example = FinspaceKxEnvironment(self, "example_2", + kms_key_id=Token.as_string(aws_kms_key_example.arn), + name="my-tf-kx-environment" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_finspace_kx_environment_example.override_logical_id("example") + aws_finspace_kx_user_example = FinspaceKxUser(self, "example_3", + environment_id=Token.as_string(aws_finspace_kx_environment_example.id), + iam_role=example.arn, + name="my-tf-kx-user" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_finspace_kx_user_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) A unique identifier for the user. +* `environment_id` - (Required) Unique identifier for the KX environment. +* `iam_role` - (Required) IAM role ARN to be associated with the user. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) identifier of the KX user. +* `id` - A comma-delimited string joining environment ID and user name. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an AWS FinSpace Kx User using the `id` (environment ID and user name, comma-delimited). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an AWS FinSpace Kx User using the `id` (environment ID and user name, comma-delimited). For example: + +```console +% terraform import aws_finspace_kx_user.example n3ceo7wqxoxcti5tujqwzs,my-tf-kx-user +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fis_experiment_template.html.markdown b/website/docs/cdktf/python/r/fis_experiment_template.html.markdown new file mode 100644 index 00000000000..c5103a2e0c7 --- /dev/null +++ b/website/docs/cdktf/python/r/fis_experiment_template.html.markdown @@ -0,0 +1,168 @@ +--- +subcategory: "FIS (Fault Injection Simulator)" +layout: "aws" +page_title: "AWS: aws_fis_experiment_template" +description: |- + Provides an FIS Experiment Template. +--- + + + +# Resource: aws_fis_experiment_template + +Provides an FIS Experiment Template, which can be used to run an experiment. +An experiment template contains one or more actions to run on specified targets during an experiment. +It also contains the stop conditions that prevent the experiment from going out of bounds. +See [Amazon Fault Injection Simulator](https://docs.aws.amazon.com/fis/index.html) +for more information. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fis_experiment_template import FisExperimentTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FisExperimentTemplate(self, "example", + action=[FisExperimentTemplateAction( + action_id="aws:ec2:terminate-instances", + name="example-action", + target=FisExperimentTemplateActionTarget( + key="Instances", + value="example-target" + ) + ) + ], + description="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + stop_condition=[FisExperimentTemplateStopCondition( + source="none" + ) + ], + target=[FisExperimentTemplateTarget( + name="example-target", + resource_tag=[FisExperimentTemplateTargetResourceTag( + key="env", + value="example" + ) + ], + resource_type="aws:ec2:instance", + selection_mode="COUNT(1)" + ) + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `action` - (Required) Action to be performed during an experiment. See below. +* `description` - (Required) Description for the experiment template. +* `role_arn` - (Required) ARN of an IAM role that grants the AWS FIS service permission to perform service actions on your behalf. +* `stop_condition` - (Required) When an ongoing experiment should be stopped. See below. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `target` - (Optional) Target of an action. See below. +* `log_configuration` - (Optional) The configuration for experiment logging. See below. + +### `action` + +* `action_id` - (Required) ID of the action. To find out what actions are supported see [AWS FIS actions reference](https://docs.aws.amazon.com/fis/latest/userguide/fis-actions-reference.html). +* `name` - (Required) Friendly name of the action. +* `description` - (Optional) Description of the action. +* `parameter` - (Optional) Parameter(s) for the action, if applicable. See below. +* `start_after` - (Optional) Set of action names that must complete before this action can be executed. +* `target` - (Optional) Action's target, if applicable. See below. + +#### `parameter` + +* `key` - (Required) Parameter name. +* `value` - (Required) Parameter value. + +For a list of parameters supported by each action, see [AWS FIS actions reference](https://docs.aws.amazon.com/fis/latest/userguide/fis-actions-reference.html). + +#### `target` (`action.*.target`) + +* `key` - (Required) Target type. Valid values are `Cluster` (EKS Cluster), `Clusters` (ECS Clusters), `DBInstances` (RDS DB Instances), `Instances` (EC2 Instances), `Nodegroups` (EKS Node groups), `Roles` (IAM Roles), `SpotInstances` (EC2 Spot Instances), `Subnets` (VPC Subnets), `Volumes` (EBS Volumes) , `Pods` (EKS Pods), `Tasks` (ECS Tasks). See the [documentation](https://docs.aws.amazon.com/fis/latest/userguide/actions.html#action-targets) for more details. +* `value` - (Required) Target name, referencing a corresponding target. + +### `stop_condition` + +* `source` - (Required) Source of the condition. One of `none`, `aws:cloudwatch:alarm`. +* `value` - (Optional) ARN of the CloudWatch alarm. Required if the source is a CloudWatch alarm. + +### `target` + +* `name` - (Required) Friendly name given to the target. +* `resource_type` - (Required) AWS resource type. The resource type must be supported for the specified action. To find out what resource types are supported, see [Targets for AWS FIS](https://docs.aws.amazon.com/fis/latest/userguide/targets.html#resource-types). +* `selection_mode` - (Required) Scopes the identified resources. Valid values are `ALL` (all identified resources), `COUNT(n)` (randomly select `n` of the identified resources), `PERCENT(n)` (randomly select `n` percent of the identified resources). +* `filter` - (Optional) Filter(s) for the target. Filters can be used to select resources based on specific attributes returned by the respective describe action of the resource type. For more information, see [Targets for AWS FIS](https://docs.aws.amazon.com/fis/latest/userguide/targets.html#target-filters). See below. +* `resource_arns` - (Optional) Set of ARNs of the resources to target with an action. Conflicts with `resource_tag`. +* `resource_tag` - (Optional) Tag(s) the resources need to have to be considered a valid target for an action. Conflicts with `resource_arns`. See below. +* `parameters` - (Optional) The resource type parameters. + +~> **NOTE:** The `target` configuration block requires either `resource_arns` or `resource_tag`. + +#### `filter` + +* `path` - (Required) Attribute path for the filter. +* `values` - (Required) Set of attribute values for the filter. + +~> **NOTE:** Values specified in a `filter` are joined with an `OR` clause, while values across multiple `filter` blocks are joined with an `AND` clause. For more information, see [Targets for AWS FIS](https://docs.aws.amazon.com/fis/latest/userguide/targets.html#target-filters). + +#### `resource_tag` + +* `key` - (Required) Tag key. +* `value` - (Required) Tag value. + +### `log_configuration` + +* `log_schema_version` - (Required) The schema version. See [documentation](https://docs.aws.amazon.com/fis/latest/userguide/monitoring-logging.html#experiment-log-schema) for the list of schema versions. +* `cloudwatch_logs_configuration` - (Optional) The configuration for experiment logging to Amazon CloudWatch Logs. See below. +* `s3_configuration` - (Optional) The configuration for experiment logging to Amazon S3. See below. + +#### `cloudwatch_logs_configuration` + +* `log_group_arn` - (Required) The Amazon Resource Name (ARN) of the destination Amazon CloudWatch Logs log group. + +#### `s3_configuration` + +* `bucket_name` - (Required) The name of the destination bucket. +* `prefix` - (Optional) The bucket prefix. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Experiment Template ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FIS Experiment Templates using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import FIS Experiment Templates using the `id`. For example: + +```console +% terraform import aws_fis_experiment_template.template EXT123AbCdEfGhIjK +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/flow_log.html.markdown b/website/docs/cdktf/python/r/flow_log.html.markdown new file mode 100644 index 00000000000..d5e06625e01 --- /dev/null +++ b/website/docs/cdktf/python/r/flow_log.html.markdown @@ -0,0 +1,291 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_flow_log" +description: |- + Provides a VPC/Subnet/ENI Flow Log +--- + + + +# Resource: aws_flow_log + +Provides a VPC/Subnet/ENI/Transit Gateway/Transit Gateway Attachment Flow Log to capture IP traffic for a specific network +interface, subnet, or VPC. Logs are sent to a CloudWatch Log Group, a S3 Bucket, or Amazon Kinesis Data Firehose + +## Example Usage + +### CloudWatch Logging + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.flow_log import FlowLog +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="example" + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["vpc-flow-logs.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_2", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:CreateLogGroup", "logs:CreateLogStream", "logs:PutLogEvents", "logs:DescribeLogGroups", "logs:DescribeLogStreams" + ], + effect="Allow", + resources=["*"] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_iam_role_example = IamRole(self, "example_3", + assume_role_policy=Token.as_string(assume_role.json), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_iam_role_policy_example = IamRolePolicy(self, "example_4", + name="example", + policy=Token.as_string(data_aws_iam_policy_document_example.json), + role=Token.as_string(aws_iam_role_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_example.override_logical_id("example") + aws_flow_log_example = FlowLog(self, "example_5", + iam_role_arn=Token.as_string(aws_iam_role_example.arn), + log_destination=example.arn, + traffic_type="ALL", + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_flow_log_example.override_logical_id("example") +``` + +### Amazon Kinesis Data Firehose logging + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.flow_log import FlowLog +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_1", + acl="private", + bucket=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["firehose.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_3", + actions=["logs:CreateLogDelivery", "logs:DeleteLogDelivery", "logs:ListLogDeliveries", "logs:GetLogDelivery", "firehose:TagDeliveryStream" + ], + effect="Allow", + resources=["*"] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_iam_role_example = IamRole(self, "example_4", + assume_role_policy=Token.as_string(assume_role.json), + name="firehose_test_role" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_iam_role_policy_example = IamRolePolicy(self, "example_5", + name="test", + policy=Token.as_string(data_aws_iam_policy_document_example.json), + role=Token.as_string(aws_iam_role_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_example.override_logical_id("example") + aws_kinesis_firehose_delivery_stream_example = + KinesisFirehoseDeliveryStream(self, "example_6", + destination="extended_s3", + extended_s3_configuration=KinesisFirehoseDeliveryStreamExtendedS3Configuration( + bucket_arn=example.arn, + role_arn=Token.as_string(aws_iam_role_example.arn) + ), + name="kinesis_firehose_test", + tags={ + "LogDeliveryEnabled": "true" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kinesis_firehose_delivery_stream_example.override_logical_id("example") + aws_flow_log_example = FlowLog(self, "example_7", + log_destination=Token.as_string(aws_kinesis_firehose_delivery_stream_example.arn), + log_destination_type="kinesis-data-firehose", + traffic_type="ALL", + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_flow_log_example.override_logical_id("example") +``` + +### S3 Logging + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.flow_log import FlowLog +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_flow_log_example = FlowLog(self, "example_1", + log_destination=example.arn, + log_destination_type="s3", + traffic_type="ALL", + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_flow_log_example.override_logical_id("example") +``` + +### S3 Logging in Apache Parquet format with per-hour partitions + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.flow_log import FlowLog +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_flow_log_example = FlowLog(self, "example_1", + destination_options=FlowLogDestinationOptions( + file_format="parquet", + per_hour_partition=True + ), + log_destination=example.arn, + log_destination_type="s3", + traffic_type="ALL", + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_flow_log_example.override_logical_id("example") +``` + +## Argument Reference + +~> **NOTE:** One of `eni_id`, `subnet_id`, `transit_gateway_id`, `transit_gateway_attachment_id`, or `vpc_id` must be specified. + +This argument supports the following arguments: + +* `traffic_type` - (Required) The type of traffic to capture. Valid values: `ACCEPT`,`REJECT`, `ALL`. +* `deliver_cross_account_role` - (Optional) ARN of the IAM role that allows Amazon EC2 to publish flow logs across accounts. +* `eni_id` - (Optional) Elastic Network Interface ID to attach to +* `iam_role_arn` - (Optional) The ARN for the IAM role that's used to post flow logs to a CloudWatch Logs log group +* `log_destination_type` - (Optional) The type of the logging destination. Valid values: `cloud-watch-logs`, `s3`, `kinesis-data-firehose`. Default: `cloud-watch-logs`. +* `log_destination` - (Optional) The ARN of the logging destination. Either `log_destination` or `log_group_name` must be set. +* `log_group_name` - (Optional) **Deprecated:** Use `log_destination` instead. The name of the CloudWatch log group. Either `log_group_name` or `log_destination` must be set. +* `subnet_id` - (Optional) Subnet ID to attach to +* `transit_gateway_id` - (Optional) Transit Gateway ID to attach to +* `transit_gateway_attachment_id` - (Optional) Transit Gateway Attachment ID to attach to +* `vpc_id` - (Optional) VPC ID to attach to +* `log_format` - (Optional) The fields to include in the flow log record, in the order in which they should appear. +* `max_aggregation_interval` - (Optional) The maximum interval of time + during which a flow of packets is captured and aggregated into a flow + log record. Valid Values: `60` seconds (1 minute) or `600` seconds (10 + minutes). Default: `600`. When `transit_gateway_id` or `transit_gateway_attachment_id` is specified, `max_aggregation_interval` *must* be 60 seconds (1 minute). +* `destination_options` - (Optional) Describes the destination options for a flow log. More details below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### destination_options + +Describes the destination options for a flow log. + +* `file_format` - (Optional) The format for the flow log. Default value: `plain-text`. Valid values: `plain-text`, `parquet`. +* `hive_compatible_partitions` - (Optional) Indicates whether to use Hive-compatible prefixes for flow logs stored in Amazon S3. Default value: `false`. +* `per_hour_partition` - (Optional) Indicates whether to partition the flow log per hour. This reduces the cost and response time for queries. Default value: `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Flow Log ID +* `arn` - The ARN of the Flow Log. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Flow Logs using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Flow Logs using the `id`. For example: + +```console +% terraform import aws_flow_log.test_flow_log fl-1a2b3c4d +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fms_admin_account.html.markdown b/website/docs/cdktf/python/r/fms_admin_account.html.markdown new file mode 100644 index 00000000000..7afcb113271 --- /dev/null +++ b/website/docs/cdktf/python/r/fms_admin_account.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "FMS (Firewall Manager)" +layout: "aws" +page_title: "AWS: aws_fms_admin_account" +description: |- + Provides a resource to associate/disassociate an AWS Firewall Manager administrator account +--- + + + +# Resource: aws_fms_admin_account + +Provides a resource to associate/disassociate an AWS Firewall Manager administrator account. This operation must be performed in the `us-east-1` region. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fms_admin_account import FmsAdminAccount +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FmsAdminAccount(self, "example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Optional) The AWS account ID to associate with AWS Firewall Manager as the AWS Firewall Manager administrator account. This can be an AWS Organizations master account or a member account. Defaults to the current account. Must be configured to perform drift detection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS account ID of the AWS Firewall Manager administrator account. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Firewall Manager administrator account association using the account ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Firewall Manager administrator account association using the account ID. For example: + +```console +% terraform import aws_fms_admin_account.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fms_policy.html.markdown b/website/docs/cdktf/python/r/fms_policy.html.markdown new file mode 100644 index 00000000000..86b23d92ea4 --- /dev/null +++ b/website/docs/cdktf/python/r/fms_policy.html.markdown @@ -0,0 +1,145 @@ +--- +subcategory: "FMS (Firewall Manager)" +layout: "aws" +page_title: "AWS: aws_fms_policy" +description: |- + Provides a resource to create an AWS Firewall Manager policy +--- + + + +# Resource: aws_fms_policy + +Provides a resource to create an AWS Firewall Manager policy. You need to be using AWS organizations and have enabled the Firewall Manager administrator account. + +~> **NOTE:** Due to limitations with testing, we provide it as best effort. If you find it useful, and have the ability to help test or notice issues, consider reaching out to us on [GitHub](https://github.com/hashicorp/terraform-provider-aws). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fms_policy import FmsPolicy +from imports.aws.wafregional_rule_group import WafregionalRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = WafregionalRuleGroup(self, "example", + metric_name="WAFRuleGroupExample", + name="WAF-Rule-Group-Example" + ) + aws_fms_policy_example = FmsPolicy(self, "example_1", + exclude_resource_tags=False, + name="FMS-Policy-Example", + remediation_enabled=False, + resource_type="AWS::ElasticLoadBalancingV2::LoadBalancer", + security_service_policy_data=FmsPolicySecurityServicePolicyData( + managed_service_data=Token.as_string( + Fn.jsonencode({ + "default_action": { + "type": "BLOCK" + }, + "override_customer_web_aCLAssociation": False, + "rule_groups": [{ + "id": example.id, + "override_action": { + "type": "COUNT" + } + } + ], + "type": "WAF" + })), + type="WAF" + ), + tags={ + "Name": "example-fms-policy" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_fms_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required, Forces new resource) The friendly name of the AWS Firewall Manager Policy. +* `delete_all_policy_resources` - (Optional) If true, the request will also perform a clean-up process. Defaults to `true`. More information can be found here [AWS Firewall Manager delete policy](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_DeletePolicy.html) +* `delete_unused_fm_managed_resources` - (Optional) If true, Firewall Manager will automatically remove protections from resources that leave the policy scope. Defaults to `false`. More information can be found here [AWS Firewall Manager policy contents](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_Policy.html) +* `description` - (Optional) The description of the AWS Network Firewall firewall policy. +* `exclude_map` - (Optional) A map of lists of accounts and OU's to exclude from the policy. +* `exclude_resource_tags` - (Required, Forces new resource) A boolean value, if true the tags that are specified in the `resource_tags` are not protected by this policy. If set to false and resource_tags are populated, resources that contain tags will be protected by this policy. +* `include_map` - (Optional) A map of lists of accounts and OU's to include in the policy. +* `remediation_enabled` - (Required) A boolean value, indicates if the policy should automatically applied to resources that already exist in the account. +* `resource_tags` - (Optional) A map of resource tags, that if present will filter protections on resources based on the exclude_resource_tags. +* `resource_type` - (Optional) A resource type to protect. Conflicts with `resource_type_list`. See the [FMS API Reference](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_Policy.html#fms-Type-Policy-ResourceType) for more information about supported values. +* `resource_type_list` - (Optional) A list of resource types to protect. Conflicts with `resource_type`. See the [FMS API Reference](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_Policy.html#fms-Type-Policy-ResourceType) for more information about supported values. Lists with only one element are not supported, instead use `resource_type`. +* `security_service_policy_data` - (Required) The objects to include in Security Service Policy Data. Documented below. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## `exclude_map` Configuration Block + +* `account` - (Optional) A list of AWS Organization member Accounts that you want to exclude from this AWS FMS Policy. +* `orgunit` - (Optional) A list of IDs of the AWS Organizational Units that you want to exclude from this AWS FMS Policy. Specifying an OU is the equivalent of specifying all accounts in the OU and in any of its child OUs, including any child OUs and accounts that are added at a later time. + +You can specify inclusions or exclusions, but not both. If you specify an `include_map`, AWS Firewall Manager applies the policy to all accounts specified by the `include_map`, and does not evaluate any `exclude_map` specifications. If you do not specify an `include_map`, then Firewall Manager applies the policy to all accounts except for those specified by the `exclude_map`. + +## `include_map` Configuration Block + +* `account` - (Optional) A list of AWS Organization member Accounts that you want to include for this AWS FMS Policy. +* `orgunit` - (Optional) A list of IDs of the AWS Organizational Units that you want to include for this AWS FMS Policy. Specifying an OU is the equivalent of specifying all accounts in the OU and in any of its child OUs, including any child OUs and accounts that are added at a later time. + +You can specify inclusions or exclusions, but not both. If you specify an `include_map`, AWS Firewall Manager applies the policy to all accounts specified by the `include_map`, and does not evaluate any `exclude_map` specifications. If you do not specify an `include_map`, then Firewall Manager applies the policy to all accounts except for those specified by the `exclude_map`. + +## `security_service_policy_data` Configuration Block + +* `managed_service_data` - (Optional) Details about the service that are specific to the service type, in JSON format. For service type `SHIELD_ADVANCED`, this is an empty string. Examples depending on `type` can be found in the [AWS Firewall Manager SecurityServicePolicyData API Reference](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_SecurityServicePolicyData.html). +* `policy_option` - (Optional) Contains the Network Firewall firewall policy options to configure a centralized deployment model. Documented below. +* `type` - (Required, Forces new resource) The service that the policy is using to protect the resources. For the current list of supported types, please refer to the [AWS Firewall Manager SecurityServicePolicyData API Type Reference](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_SecurityServicePolicyData.html#fms-Type-SecurityServicePolicyData-Type). + +## `policy_option` Configuration Block + +* `network_firewall_policy` - (Optional) Defines the deployment model to use for the firewall policy. Documented below. +* `thirdparty_firewall_policy` - (Optional) Defines the policy options for a third-party firewall policy. Documented below. + +## `network_firewall_policy` Configuration Block + +* `firewall_deployment_model` - (Optional) Defines the deployment model to use for the firewall policy. To use a distributed model, remove the `policy_option` section. Valid values are `CENTRALIZED` and `DISTRIBUTED`. + +## `thirdparty_firewall_policy` Configuration Block + +* `firewall_deployment_model` - (Optional) Defines the deployment model to use for the third-party firewall policy. Valid values are `CENTRALIZED` and `DISTRIBUTED`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS account ID of the AWS Firewall Manager administrator account. +* `policy_update_token` - A unique identifier for each update to the policy. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Firewall Manager policies using the policy ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Firewall Manager policies using the policy ID. For example: + +```console +% terraform import aws_fms_policy.example 5be49585-a7e3-4c49-dde1-a179fe4a619a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fsx_backup.html.markdown b/website/docs/cdktf/python/r/fsx_backup.html.markdown new file mode 100644 index 00000000000..e48cbeffbea --- /dev/null +++ b/website/docs/cdktf/python/r/fsx_backup.html.markdown @@ -0,0 +1,178 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_backup" +description: |- + Manages a FSx Backup. +--- + + + +# Resource: aws_fsx_backup + +Provides a FSx Backup resource. + +## Example Usage + +## Lustre Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_backup import FsxBackup +from imports.aws.fsx_lustre_file_system import FsxLustreFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = FsxLustreFileSystem(self, "example", + deployment_type="PERSISTENT_1", + per_unit_storage_throughput=50, + storage_capacity=1200, + subnet_ids=[Token.as_string(aws_subnet_example.id)] + ) + aws_fsx_backup_example = FsxBackup(self, "example_1", + file_system_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_fsx_backup_example.override_logical_id("example") +``` + +## Windows Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_backup import FsxBackup +from imports.aws.fsx_windows_file_system import FsxWindowsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = FsxWindowsFileSystem(self, "example", + active_directory_id=eample.id, + skip_final_backup=True, + storage_capacity=32, + subnet_ids=[example1.id], + throughput_capacity=8 + ) + aws_fsx_backup_example = FsxBackup(self, "example_1", + file_system_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_fsx_backup_example.override_logical_id("example") +``` + +## ONTAP Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_backup import FsxBackup +from imports.aws.fsx_ontap_volume import FsxOntapVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = FsxOntapVolume(self, "example", + junction_path="/example", + name="example", + size_in_megabytes=1024, + storage_efficiency_enabled=True, + storage_virtual_machine_id=test.id + ) + aws_fsx_backup_example = FsxBackup(self, "example_1", + volume_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_fsx_backup_example.override_logical_id("example") +``` + +## OpenZFS Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_backup import FsxBackup +from imports.aws.fsx_openzfs_file_system import FsxOpenzfsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = FsxOpenzfsFileSystem(self, "example", + deployment_type="SINGLE_AZ_1", + storage_capacity=64, + subnet_ids=[Token.as_string(aws_subnet_example.id)], + throughput_capacity=64 + ) + aws_fsx_backup_example = FsxBackup(self, "example_1", + file_system_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_fsx_backup_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +Note - Only file_system_id or volume_id can be specified. file_system_id is used for Lustre and Windows, volume_id is used for ONTAP. + +* `file_system_id` - (Optional) The ID of the file system to back up. Required if backing up Lustre or Windows file systems. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. If you have set `copy_tags_to_backups` to true, and you specify one or more tags, no existing file system tags are copied from the file system to the backup. +* `volume_id` - (Optional) The ID of the volume to back up. Required if backing up a ONTAP Volume. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the backup. +* `id` - Identifier of the backup, e.g., `fs-12345678` +* `kms_key_id` - The ID of the AWS Key Management Service (AWS KMS) key used to encrypt the backup of the Amazon FSx file system's data at rest. +* `owner_id` - AWS account identifier that created the file system. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `type` - The type of the file system backup. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) +* `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx Backups using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import FSx Backups using the `id`. For example: + +```console +% terraform import aws_fsx_backup.example fs-543ab12b1ca672f33 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fsx_data_repository_association.html.markdown b/website/docs/cdktf/python/r/fsx_data_repository_association.html.markdown new file mode 100644 index 00000000000..d1fc53946c7 --- /dev/null +++ b/website/docs/cdktf/python/r/fsx_data_repository_association.html.markdown @@ -0,0 +1,127 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_data_repository_association" +description: |- + Manages a FSx for Lustre Data Repository Association. +--- + + + +# Resource: aws_fsx_data_repository_association + +Manages a FSx for Lustre Data Repository Association. See [Linking your file system to an S3 bucket](https://docs.aws.amazon.com/fsx/latest/LustreGuide/create-dra-linked-data-repo.html) for more information. + +~> **NOTE:** Data Repository Associations are only compatible with AWS FSx for Lustre File Systems and `PERSISTENT_2` deployment type. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_data_repository_association import FsxDataRepositoryAssociation +from imports.aws.fsx_lustre_file_system import FsxLustreFileSystem +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = FsxLustreFileSystem(self, "example", + deployment_type="PERSISTENT_2", + per_unit_storage_throughput=125, + storage_capacity=1200, + subnet_ids=[Token.as_string(aws_subnet_example.id)] + ) + aws_s3_bucket_example = S3Bucket(self, "example_1", + bucket="my-bucket" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_example.override_logical_id("example") + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_2", + acl="private", + bucket=Token.as_string(aws_s3_bucket_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") + aws_fsx_data_repository_association_example = + FsxDataRepositoryAssociation(self, "example_3", + data_repository_path="s3://${" + aws_s3_bucket_example.id + "}", + file_system_id=example.id, + file_system_path="/my-bucket", + s3=FsxDataRepositoryAssociationS3( + auto_export_policy=FsxDataRepositoryAssociationS3AutoExportPolicy( + events=["NEW", "CHANGED", "DELETED"] + ), + auto_import_policy=FsxDataRepositoryAssociationS3AutoImportPolicy( + events=["NEW", "CHANGED", "DELETED"] + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_fsx_data_repository_association_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `batch_import_meta_data_on_create` - (Optional) Set to true to run an import data repository task to import metadata from the data repository to the file system after the data repository association is created. Defaults to `false`. +* `data_repository_path` - (Required) The path to the Amazon S3 data repository that will be linked to the file system. The path must be an S3 bucket s3://myBucket/myPrefix/. This path specifies where in the S3 data repository files will be imported from or exported to. The same S3 bucket cannot be linked more than once to the same file system. +* `file_system_id` - (Required) The ID of the Amazon FSx file system to on which to create a data repository association. +* `file_system_path` - (Required) A path on the file system that points to a high-level directory (such as `/ns1/`) or subdirectory (such as `/ns1/subdir/`) that will be mapped 1-1 with `data_repository_path`. The leading forward slash in the name is required. Two data repository associations cannot have overlapping file system paths. For example, if a data repository is associated with file system path `/ns1/`, then you cannot link another data repository with file system path `/ns1/ns2`. This path specifies where in your file system files will be exported from or imported to. This file system directory can be linked to only one Amazon S3 bucket, and no other S3 bucket can be linked to the directory. +* `imported_file_chunk_size` - (Optional) For files imported from a data repository, this value determines the stripe count and maximum amount of data per file (in MiB) stored on a single physical disk. The maximum number of disks that a single file can be striped across is limited by the total number of disks that make up the file system. +* `s3` - (Optional) See the [`s3` configuration](#s3-arguments) block. Max of 1. +The configuration for an Amazon S3 data repository linked to an Amazon FSx Lustre file system with a data repository association. The configuration defines which file events (new, changed, or deleted files or directories) are automatically imported from the linked data repository to the file system or automatically exported from the file system to the data repository. +* `delete_data_in_filesystem` - (Optional) Set to true to delete files from the file system upon deleting this data repository association. Defaults to `false`. +* `tags` - (Optional) A map of tags to assign to the data repository association. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +#### S3 arguments + +* `auto_export_policy` - (Optional) Specifies the type of updated objects that will be automatically exported from your file system to the linked S3 bucket. See the [`events` configuration](#events-arguments) block. +* `auto_import_policy` - (Optional) Specifies the type of updated objects that will be automatically imported from the linked S3 bucket to your file system. See the [`events` configuration](#events-arguments) block. + +#### Events arguments + +* `events` - (Optional) A list of file event types to automatically export to your linked S3 bucket or import from the linked S3 bucket. Valid values are `NEW`, `CHANGED`, `DELETED`. Max of 3. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `id` - Identifier of the data repository association, e.g., `dra-12345678` +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) +* `update` - (Default `10m`) +* `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx Data Repository Associations using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import FSx Data Repository Associations using the `id`. For example: + +```console +% terraform import aws_fsx_data_repository_association.example dra-0b1cfaeca11088b10 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fsx_file_cache.html.markdown b/website/docs/cdktf/python/r/fsx_file_cache.html.markdown new file mode 100644 index 00000000000..e1996a55042 --- /dev/null +++ b/website/docs/cdktf/python/r/fsx_file_cache.html.markdown @@ -0,0 +1,148 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_file_cache" +description: |- + Terraform resource for managing an Amazon File Cache cache. +--- + + + +# Resource: aws_fsx_file_cache + +Terraform resource for managing an Amazon File Cache cache. +See the [Create File Cache](https://docs.aws.amazon.com/fsx/latest/APIReference/API_CreateFileCache.html) for more information. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_file_cache import FsxFileCache +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FsxFileCache(self, "example", + data_repository_association=[FsxFileCacheDataRepositoryAssociation( + data_repository_path="nfs://filer.domain.com", + data_repository_subdirectories=["test", "test2"], + file_cache_path="/ns1", + nfs=[FsxFileCacheDataRepositoryAssociationNfs( + dns_ips=["192.168.0.1", "192.168.0.2"], + version="NFS3" + ) + ] + ) + ], + file_cache_type="LUSTRE", + file_cache_type_version="2.12", + lustre_configuration=[FsxFileCacheLustreConfiguration( + deployment_type="CACHE_1", + metadata_configuration=[FsxFileCacheLustreConfigurationMetadataConfiguration( + storage_capacity=2400 + ) + ], + per_unit_storage_throughput=1000, + weekly_maintenance_start_time="2:05:00" + ) + ], + storage_capacity=1200, + subnet_ids=[test1.id] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `file_cache_type` - The type of cache that you're creating. The only supported value is `LUSTRE`. +* `file_cache_type_version` - The version for the type of cache that you're creating. The only supported value is `2.12`. +* `storage_capacity` - The storage capacity of the cache in gibibytes (GiB). Valid values are `1200` GiB, `2400` GiB, and increments of `2400` GiB. +* `subnet_ids` - A list of subnet IDs that the cache will be accessible from. You can specify only one subnet ID. + +The following arguments are optional: + +* `copy_tags_to_data_repository_associations` - A boolean flag indicating whether tags for the cache should be copied to data repository associations. This value defaults to false. +* `data_repository_association` - See the [`data_repository_association` configuration](#data-repository-association-arguments) block. Max of 8. +A list of up to 8 configurations for data repository associations (DRAs) to be created during the cache creation. The DRAs link the cache to either an Amazon S3 data repository or a Network File System (NFS) data repository that supports the NFSv3 protocol. The DRA configurations must meet the following requirements: 1) All configurations on the list must be of the same data repository type, either all S3 or all NFS. A cache can't link to different data repository types at the same time. 2) An NFS DRA must link to an NFS file system that supports the NFSv3 protocol. DRA automatic import and automatic export is not supported. +* `kms_key_id` - Specifies the ID of the AWS Key Management Service (AWS KMS) key to use for encrypting data on an Amazon File Cache. If a KmsKeyId isn't specified, the Amazon FSx-managed AWS KMS key for your account is used. +* `lustre_configuration` - See the [`lustre_configuration`](#lustre-configuration-arguments) block. Required when `file_cache_type` is `LUSTRE`. +* `security_group_ids` - A list of IDs specifying the security groups to apply to all network interfaces created for Amazon File Cache access. +* `tags` - (Optional) A map of tags to assign to the file cache. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +#### Data Repository Association arguments + +The `data_repository_association` configuration block supports the following arguments: + +* `file_cache_path` - (Required) A path on the cache that points to a high-level directory (such as /ns1/) or subdirectory (such as /ns1/subdir/) that will be mapped 1-1 with DataRepositoryPath. The leading forward slash in the name is required. Two data repository associations cannot have overlapping cache paths. For example, if a data repository is associated with cache path /ns1/, then you cannot link another data repository with cache path /ns1/ns2. This path specifies where in your cache files will be exported from. This cache directory can be linked to only one data repository, and no data repository other can be linked to the directory. Note: The cache path can only be set to root (/) on an NFS DRA when DataRepositorySubdirectories is specified. If you specify root (/) as the cache path, you can create only one DRA on the cache. The cache path cannot be set to root (/) for an S3 DRA. +* `data_repository_path` - (Optional) The path to the S3 or NFS data repository that links to the cache. +* `data_repository_subdirectories` - (Optional) A list of NFS Exports that will be linked with this data repository association. The Export paths are in the format /exportpath1. To use this parameter, you must configure DataRepositoryPath as the domain name of the NFS file system. The NFS file system domain name in effect is the root of the subdirectories. Note that DataRepositorySubdirectories is not supported for S3 data repositories. Max of 500. +* `nfs` - (Optional) - (Optional) See the [`nfs` configuration](#nfs-arguments) block. + +#### NFS arguments + +The `nfs` configuration block supports the following arguments: + +* `version` - (Required) - The version of the NFS (Network File System) protocol of the NFS data repository. The only supported value is NFS3, which indicates that the data repository must support the NFSv3 protocol. The only supported value is `NFS3`. +* `dns_ips` - (Optional) - A list of up to 2 IP addresses of DNS servers used to resolve the NFS file system domain name. The provided IP addresses can either be the IP addresses of a DNS forwarder or resolver that the customer manages and runs inside the customer VPC, or the IP addresses of the on-premises DNS servers. + +#### Lustre Configuration arguments + +The `lustre_configuration` configuration block supports the following arguments: + +* `deployment_type` - (Required) Specifies the cache deployment type. The only supported value is `CACHE_1`. +* `metadata_configuration` - (Required) The configuration for a Lustre MDT (Metadata Target) storage volume. See the [`metadata_configuration`](#metadata-configuration-arguments) block. +* `per_unit_storage_throughput` - (Required) Provisions the amount of read and write throughput for each 1 tebibyte (TiB) of cache storage capacity, in MB/s/TiB. The only supported value is `1000`. +* `weekly_maintenance_start_time` - (Optional) A recurring weekly time, in the format `D:HH:MM`. `D` is the day of the week, for which `1` represents Monday and `7` represents Sunday. `HH` is the zero-padded hour of the day (0-23), and `MM` is the zero-padded minute of the hour. For example, 1:05:00 specifies maintenance at 5 AM Monday. See the [ISO week date](https://en.wikipedia.org/wiki/ISO_week_date) for more information. + +#### Metadata Configuration arguments + +The `metadata_configuration` configuration block supports the following arguments: + +* `storage_capacity` - (Required) The storage capacity of the Lustre MDT (Metadata Target) storage volume in gibibytes (GiB). The only supported value is `2400` GiB. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) for the resource. +* `data_repository_association_ids` - A list of IDs of data repository associations that are associated with this cache. +* `dns_name` - The Domain Name System (DNS) name for the cache. +* `file_cache_id` - The system-generated, unique ID of the cache. +* `id` - The system-generated, unique ID of the cache. +* `network_interface_ids` - A list of network interface IDs. +* `vpc_id` - The ID of your virtual private cloud (VPC). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon File Cache cache using the resource `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon File Cache cache using the resource `id`. For example: + +```console +% terraform import aws_fsx_file_cache.example fc-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fsx_lustre_file_system.html.markdown b/website/docs/cdktf/python/r/fsx_lustre_file_system.html.markdown new file mode 100644 index 00000000000..76306295550 --- /dev/null +++ b/website/docs/cdktf/python/r/fsx_lustre_file_system.html.markdown @@ -0,0 +1,139 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_lustre_file_system" +description: |- + Manages a FSx Lustre File System. +--- + + + +# Resource: aws_fsx_lustre_file_system + +Manages a FSx Lustre File System. See the [FSx Lustre Guide](https://docs.aws.amazon.com/fsx/latest/LustreGuide/what-is.html) for more information. + +~> **NOTE:** `auto_import_policy`, `export_path`, `import_path` and `imported_file_chunk_size` are not supported with the `PERSISTENT_2` deployment type. Use `aws_fsx_data_repository_association` instead. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_lustre_file_system import FsxLustreFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FsxLustreFileSystem(self, "example", + import_path="s3://${" + aws_s3_bucket_example.bucket + "}", + storage_capacity=1200, + subnet_ids=[Token.as_string(aws_subnet_example.id)] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `storage_capacity` - (Optional) The storage capacity (GiB) of the file system. Minimum of `1200`. See more details at [Allowed values for Fsx storage capacity](https://docs.aws.amazon.com/fsx/latest/APIReference/API_CreateFileSystem.html#FSx-CreateFileSystem-request-StorageCapacity). Update is allowed only for `SCRATCH_2`, `PERSISTENT_1` and `PERSISTENT_2` deployment types, See more details at [Fsx Storage Capacity Update](https://docs.aws.amazon.com/fsx/latest/APIReference/API_UpdateFileSystem.html#FSx-UpdateFileSystem-request-StorageCapacity). Required when not creating filesystem for a backup. +* `subnet_ids` - (Required) A list of IDs for the subnets that the file system will be accessible from. File systems currently support only one subnet. The file server is also launched in that subnet's Availability Zone. +* `backup_id` - (Optional) The ID of the source backup to create the filesystem from. +* `export_path` - (Optional) S3 URI (with optional prefix) where the root of your Amazon FSx file system is exported. Can only be specified with `import_path` argument and the path must use the same Amazon S3 bucket as specified in `import_path`. Set equal to `import_path` to overwrite files on export. Defaults to `s3://{IMPORT BUCKET}/FSxLustre{CREATION TIMESTAMP}`. Only supported on `PERSISTENT_1` deployment types. +* `import_path` - (Optional) S3 URI (with optional prefix) that you're using as the data repository for your FSx for Lustre file system. For example, `s3://example-bucket/optional-prefix/`. Only supported on `PERSISTENT_1` deployment types. +* `imported_file_chunk_size` - (Optional) For files imported from a data repository, this value determines the stripe count and maximum amount of data per file (in MiB) stored on a single physical disk. Can only be specified with `import_path` argument. Defaults to `1024`. Minimum of `1` and maximum of `512000`. Only supported on `PERSISTENT_1` deployment types. +* `security_group_ids` - (Optional) A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `weekly_maintenance_start_time` - (Optional) The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone. +* `deployment_type` - (Optional) - The filesystem deployment type. One of: `SCRATCH_1`, `SCRATCH_2`, `PERSISTENT_1`, `PERSISTENT_2`. +* `kms_key_id` - (Optional) ARN for the KMS Key to encrypt the file system at rest, applicable for `PERSISTENT_1` and `PERSISTENT_2` deployment_type. Defaults to an AWS managed KMS Key. +* `per_unit_storage_throughput` - (Optional) - Describes the amount of read and write throughput for each 1 tebibyte of storage, in MB/s/TiB, required for the `PERSISTENT_1` and `PERSISTENT_2` deployment_type. Valid values for `PERSISTENT_1` deployment_type and `SSD` storage_type are 50, 100, 200. Valid values for `PERSISTENT_1` deployment_type and `HDD` storage_type are 12, 40. Valid values for `PERSISTENT_2` deployment_type and ` SSD` storage_type are 125, 250, 500, 1000. +* `automatic_backup_retention_days` - (Optional) The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days. only valid for `PERSISTENT_1` and `PERSISTENT_2` deployment_type. +* `storage_type` - (Optional) - The filesystem storage type. Either `SSD` or `HDD`, defaults to `SSD`. `HDD` is only supported on `PERSISTENT_1` deployment types. +* `drive_cache_type` - (Optional) - The type of drive cache used by `PERSISTENT_1` filesystems that are provisioned with `HDD` storage_type. Required for `HDD` storage_type, set to either `READ` or `NONE`. +* `daily_automatic_backup_start_time` - (Optional) A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. only valid for `PERSISTENT_1` and `PERSISTENT_2` deployment_type. Requires `automatic_backup_retention_days` to be set. +* `auto_import_policy` - (Optional) How Amazon FSx keeps your file and directory listings up to date as you add or modify objects in your linked S3 bucket. see [Auto Import Data Repo](https://docs.aws.amazon.com/fsx/latest/LustreGuide/autoimport-data-repo.html) for more details. Only supported on `PERSISTENT_1` deployment types. +* `copy_tags_to_backups` - (Optional) A boolean flag indicating whether tags for the file system should be copied to backups. Applicable for `PERSISTENT_1` and `PERSISTENT_2` deployment_type. The default value is false. +* `data_compression_type` - (Optional) Sets the data compression configuration for the file system. Valid values are `LZ4` and `NONE`. Default value is `NONE`. Unsetting this value reverts the compression type back to `NONE`. +* `file_system_type_version` - (Optional) Sets the Lustre version for the file system that you're creating. Valid values are 2.10 for `SCRATCH_1`, `SCRATCH_2` and `PERSISTENT_1` deployment types. Valid values for 2.12 include all deployment types. +* `log_configuration` - (Optional) The Lustre logging configuration used when creating an Amazon FSx for Lustre file system. When logging is enabled, Lustre logs error and warning events for data repositories associated with your file system to Amazon CloudWatch Logs. +* `root_squash_configuration` - (Optional) The Lustre root squash configuration used when creating an Amazon FSx for Lustre file system. When enabled, root squash restricts root-level access from clients that try to access your file system as a root user. + +### log_configuration + +* `destination` - (Optional) The Amazon Resource Name (ARN) that specifies the destination of the logs. The name of the Amazon CloudWatch Logs log group must begin with the `/aws/fsx` prefix. If you do not provide a destination, Amazon FSx will create and use a log stream in the CloudWatch Logs `/aws/fsx/lustre` log group. +* `level` - (Optional) Sets which data repository events are logged by Amazon FSx. Valid values are `WARN_ONLY`, `FAILURE_ONLY`, `ERROR_ONLY`, `WARN_ERROR` and `DISABLED`. Default value is `DISABLED`. + +### root_squash_configuration + +* `no_squash_nids` - (Optional) When root squash is enabled, you can optionally specify an array of NIDs of clients for which root squash does not apply. A client NID is a Lustre Network Identifier used to uniquely identify a client. You can specify the NID as either a single address or a range of addresses: 1. A single address is described in standard Lustre NID format by specifying the client’s IP address followed by the Lustre network ID (for example, 10.0.1.6@tcp). 2. An address range is described using a dash to separate the range (for example, 10.0.[2-10].[1-255]@tcp). +* `root_squash` - (Optional) You enable root squash by setting a user ID (UID) and group ID (GID) for the file system in the format UID:GID (for example, 365534:65534). The UID and GID values can range from 0 to 4294967294. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `dns_name` - DNS name for the file system, e.g., `fs-12345678.fsx.us-west-2.amazonaws.com` +* `id` - Identifier of the file system, e.g., `fs-12345678` +* `network_interface_ids` - Set of Elastic Network Interface identifiers from which the file system is accessible. As explained in the [documentation](https://docs.aws.amazon.com/fsx/latest/LustreGuide/mounting-on-premises.html), the first network interface returned is the primary network interface. +* `mount_name` - The value to be used when mounting the filesystem. +* `owner_id` - AWS account identifier that created the file system. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - Identifier of the Virtual Private Cloud for the file system. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx File Systems using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import FSx File Systems using the `id`. For example: + +```console +% terraform import aws_fsx_lustre_file_system.example fs-543ab12b1ca672f33 +``` + +Certain resource arguments, like `security_group_ids`, do not have a FSx API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_lustre_file_system import FsxLustreFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, subnetIds): + super().__init__(scope, name) + FsxLustreFileSystem(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[security_group_ids] + ), + security_group_ids=[Token.as_string(aws_security_group_example.id)], + subnet_ids=subnet_ids + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fsx_ontap_file_system.html.markdown b/website/docs/cdktf/python/r/fsx_ontap_file_system.html.markdown new file mode 100644 index 00000000000..5e15350f8de --- /dev/null +++ b/website/docs/cdktf/python/r/fsx_ontap_file_system.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_ontap_file_system" +description: |- + Manages an Amazon FSx for NetApp ONTAP file system. +--- + + + +# Resource: aws_fsx_ontap_file_system + +Manages an Amazon FSx for NetApp ONTAP file system. +See the [FSx ONTAP User Guide](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/what-is-fsx-ontap.html) for more information. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_ontap_file_system import FsxOntapFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FsxOntapFileSystem(self, "test", + deployment_type="MULTI_AZ_1", + preferred_subnet_id=test1.id, + storage_capacity=1024, + subnet_ids=[test1.id, test2.id], + throughput_capacity=512 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `storage_capacity` - (Optional) The storage capacity (GiB) of the file system. Valid values between `1024` and `196608`. +* `subnet_ids` - (Required) A list of IDs for the subnets that the file system will be accessible from. Upto 2 subnets can be provided. +* `preferred_subnet_id` - (Required) The ID for a subnet. A subnet is a range of IP addresses in your virtual private cloud (VPC). +* `security_group_ids` - (Optional) A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces. +* `weekly_maintenance_start_time` - (Optional) The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone. +* `deployment_type` - (Optional) - The filesystem deployment type. Supports `MULTI_AZ_1` and `SINGLE_AZ_1`. +* `kms_key_id` - (Optional) ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key. +* `automatic_backup_retention_days` - (Optional) The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days. +* `daily_automatic_backup_start_time` - (Optional) A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automatic_backup_retention_days` to be set. +* `disk_iops_configuration` - (Optional) The SSD IOPS configuration for the Amazon FSx for NetApp ONTAP file system. See [Disk Iops Configuration](#disk-iops-configuration) Below. +* `endpoint_ip_address_range` - (Optional) Specifies the IP address range in which the endpoints to access your file system will be created. By default, Amazon FSx selects an unused IP address range for you from the 198.19.* range. +* `storage_type` - (Optional) - The filesystem storage type. defaults to `SSD`. +* `fsx_admin_password` - (Optional) The ONTAP administrative password for the fsxadmin user that you can use to administer your file system using the ONTAP CLI and REST API. +* `route_table_ids` - (Optional) Specifies the VPC route tables in which your file system's endpoints will be created. You should specify all VPC route tables associated with the subnets in which your clients are located. By default, Amazon FSx selects your VPC's default route table. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `throughput_capacity` - (Required) Sets the throughput capacity (in MBps) for the file system that you're creating. Valid values are `128`, `256`, `512`, `1024`, `2048`, and `4096`. + +### Disk Iops Configuration + +* `iops` - (Optional) - The total number of SSD IOPS provisioned for the file system. +* `mode` - (Optional) - Specifies whether the number of IOPS for the file system is using the system. Valid values are `AUTOMATIC` and `USER_PROVISIONED`. Default value is `AUTOMATIC`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `dns_name` - DNS name for the file system, e.g., `fs-12345678.fsx.us-west-2.amazonaws.com` +* `endpoints` - The endpoints that are used to access data or to manage the file system using the NetApp ONTAP CLI, REST API, or NetApp SnapMirror. See [Endpoints](#endpoints) below. +* `id` - Identifier of the file system, e.g., `fs-12345678` +* `network_interface_ids` - Set of Elastic Network Interface identifiers from which the file system is accessible The first network interface returned is the primary network interface. +* `owner_id` - AWS account identifier that created the file system. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - Identifier of the Virtual Private Cloud for the file system. + +### Endpoints + +* `intercluster` - An endpoint for managing your file system by setting up NetApp SnapMirror with other ONTAP systems. See [Endpoint](#endpoint). +* `management` - An endpoint for managing your file system using the NetApp ONTAP CLI and NetApp ONTAP API. See [Endpoint](#endpoint). + +#### Endpoint + +* `dns_name` - The Domain Name Service (DNS) name for the file system. You can mount your file system using its DNS name. +* `ip_addresses` - IP addresses of the file system endpoint. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `update` - (Default `60m`) +* `delete` - (Default `60m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx File Systems using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import FSx File Systems using the `id`. For example: + +```console +% terraform import aws_fsx_ontap_file_system.example fs-543ab12b1ca672f33 +``` + +Certain resource arguments, like `security_group_ids`, do not have a FSx API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_ontap_file_system import FsxOntapFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, deploymentType, preferredSubnetId, subnetIds, throughputCapacity): + super().__init__(scope, name) + FsxOntapFileSystem(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[security_group_ids] + ), + security_group_ids=[Token.as_string(aws_security_group_example.id)], + deployment_type=deployment_type, + preferred_subnet_id=preferred_subnet_id, + subnet_ids=subnet_ids, + throughput_capacity=throughput_capacity + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fsx_ontap_storage_virtual_machine.html.markdown b/website/docs/cdktf/python/r/fsx_ontap_storage_virtual_machine.html.markdown new file mode 100644 index 00000000000..6d2c392d993 --- /dev/null +++ b/website/docs/cdktf/python/r/fsx_ontap_storage_virtual_machine.html.markdown @@ -0,0 +1,172 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_ontap_storage_virtual_machine" +description: |- + Manages a FSx Storage Virtual Machine. +--- + + + +# Resource: aws_fsx_ontap_storage_virtual_machine + +Manages a FSx Storage Virtual Machine. +See the [FSx ONTAP User Guide](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/managing-svms.html) for more information. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_ontap_storage_virtual_machine import FsxOntapStorageVirtualMachine +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FsxOntapStorageVirtualMachine(self, "test", + file_system_id=Token.as_string(aws_fsx_ontap_file_system_test.id), + name="test" + ) +``` + +### Using a Self-Managed Microsoft Active Directory + +Additional information for using AWS Directory Service with ONTAP File Systems can be found in the [FSx ONTAP Guide](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/self-managed-AD.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_ontap_storage_virtual_machine import FsxOntapStorageVirtualMachine +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FsxOntapStorageVirtualMachine(self, "test", + active_directory_configuration=FsxOntapStorageVirtualMachineActiveDirectoryConfiguration( + netbios_name="mysvm", + self_managed_active_directory_configuration=FsxOntapStorageVirtualMachineActiveDirectoryConfigurationSelfManagedActiveDirectoryConfiguration( + dns_ips=["10.0.0.111", "10.0.0.222"], + domain_name="corp.example.com", + password="avoid-plaintext-passwords", + username="Admin" + ) + ), + file_system_id=Token.as_string(aws_fsx_ontap_file_system_test.id), + name="mysvm" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `active_directory_configuration` - (Optional) Configuration block that Amazon FSx uses to join the FSx ONTAP Storage Virtual Machine(SVM) to your Microsoft Active Directory (AD) directory. Detailed below. +* `file_system_id` - (Required) The ID of the Amazon FSx ONTAP File System that this SVM will be created on. +* `name` - (Required) The name of the SVM. You can use a maximum of 47 alphanumeric characters, plus the underscore (_) special character. +* `root_volume_security_style` - (Optional) Specifies the root volume security style, Valid values are `UNIX`, `NTFS`, and `MIXED`. All volumes created under this SVM will inherit the root security style unless the security style is specified on the volume. Default value is `UNIX`. +* `tags` - (Optional) A map of tags to assign to the storage virtual machine. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### active_directory_configuration + +The `active_directory_configuration` configuration block supports the following arguments: + +* `netbios_name` - (Required) The NetBIOS name of the Active Directory computer object that will be created for your SVM. This is often the same as the SVM name but can be different. AWS limits to 15 characters because of standard NetBIOS naming limits. +* `self_managed_active_directory` - (Optional) Configuration block that Amazon FSx uses to join the SVM to your self-managed (including on-premises) Microsoft Active Directory (AD) directory. + +### self_managed_active_directory + +The `self_managed_active_directory` configuration block supports the following arguments: + +* `dns_ips` - (Required) A list of up to three IP addresses of DNS servers or domain controllers in the self-managed AD directory. +* `domain_name` - (Required) The fully qualified domain name of the self-managed AD directory. For example, `corp.example.com`. +* `password` - (Required) The password for the service account on your self-managed AD domain that Amazon FSx will use to join to your AD domain. +* `username` - (Required) The user name for the service account on your self-managed AD domain that Amazon FSx will use to join to your AD domain. +* `file_system_administrators_group` - (Optional) The name of the domain group whose members are granted administrative privileges for the SVM. The group that you specify must already exist in your domain. Defaults to `Domain Admins`. +* `organizational_unit_distinguished_name` - (Optional) The fully qualified distinguished name of the organizational unit within your self-managed AD directory that the Windows File Server instance will join. For example, `OU=FSx,DC=yourdomain,DC=corp,DC=com`. Only accepts OU as the direct parent of the SVM. If none is provided, the SVM is created in the default location of your self-managed AD directory. To learn more, see [RFC 2253](https://tools.ietf.org/html/rfc2253). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the storage virtual machine. +* `endpoints` - The endpoints that are used to access data or to manage the storage virtual machine using the NetApp ONTAP CLI, REST API, or NetApp SnapMirror. See [Endpoints](#endpoints) below. +* `id` - Identifier of the storage virtual machine, e.g., `svm-12345678` +* `subtype` - Describes the SVM's subtype, e.g. `DEFAULT` +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uuid` - The SVM's UUID (universally unique identifier). + +### Endpoints + +* `iscsi` - An endpoint for accessing data on your storage virtual machine via iSCSI protocol. See [Endpoint](#endpoint). +* `management` - An endpoint for managing your file system using the NetApp ONTAP CLI and NetApp ONTAP API. See [Endpoint](#endpoint). +* `nfs` - An endpoint for accessing data on your storage virtual machine via NFS protocol. See [Endpoint](#endpoint). +* `smb` - An endpoint for accessing data on your storage virtual machine via SMB protocol. This is only set if an active_directory_configuration has been set. See [Endpoint](#endpoint). + +#### Endpoint + +* `dns_name` - The Domain Name Service (DNS) name for the storage virtual machine. You can mount your storage virtual machine using its DNS name. +* `ip_addresses` - IP addresses of the storage virtual machine endpoint. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `delete` - (Default `30m`) +* `update` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx Storage Virtual Machine using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import FSx Storage Virtual Machine using the `id`. For example: + +```console +% terraform import aws_fsx_ontap_storage_virtual_machine.example svm-12345678abcdef123 +``` + +Certain resource arguments, like `svm_admin_password` and the `self_managed_active_directory` configuation block `password`, do not have a FSx API method for reading the information after creation. If these arguments are set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_ontap_storage_virtual_machine import FsxOntapStorageVirtualMachine +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, fileSystemId, name): + super().__init__(scope, name) + FsxOntapStorageVirtualMachine(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[svm_admin_password] + ), + svm_admin_password="avoid-plaintext-passwords", + file_system_id=file_system_id, + name=name + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fsx_ontap_volume.html.markdown b/website/docs/cdktf/python/r/fsx_ontap_volume.html.markdown new file mode 100644 index 00000000000..7c18feaabb7 --- /dev/null +++ b/website/docs/cdktf/python/r/fsx_ontap_volume.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_ontap_volume" +description: |- + Manages a FSx ONTAP Volume. +--- + + + +# Resource: aws_fsx_ontap_volume + +Manages a FSx ONTAP Volume. +See the [FSx ONTAP User Guide](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/managing-volumes.html) for more information. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_ontap_volume import FsxOntapVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FsxOntapVolume(self, "test", + junction_path="/test", + name="test", + size_in_megabytes=1024, + storage_efficiency_enabled=True, + storage_virtual_machine_id=Token.as_string(aws_fsx_ontap_storage_virtual_machine_test.id) + ) +``` + +### Using Tiering Policy + +Additional information on tiering policy with ONTAP Volumes can be found in the [FSx ONTAP Guide](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/managing-volumes.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_ontap_volume import FsxOntapVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FsxOntapVolume(self, "test", + junction_path="/test", + name="test", + size_in_megabytes=1024, + storage_efficiency_enabled=True, + storage_virtual_machine_id=Token.as_string(aws_fsx_ontap_storage_virtual_machine_test.id), + tiering_policy=FsxOntapVolumeTieringPolicy( + cooling_period=31, + name="AUTO" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Volume. You can use a maximum of 203 alphanumeric characters, plus the underscore (_) special character. +* `junction_path` - (Optional) Specifies the location in the storage virtual machine's namespace where the volume is mounted. The junction_path must have a leading forward slash, such as `/vol3` +* `ontap_volume_type` - (Optional) Specifies the type of volume, valid values are `RW`, `DP`. Default value is `RW`. These can be set by the ONTAP CLI or API. This setting is used as part of migration and replication [Migrating to Amazon FSx for NetApp ONTAP](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/migrating-fsx-ontap.html) +* `security_style` - (Optional) Specifies the volume security style, Valid values are `UNIX`, `NTFS`, and `MIXED`. +* `size_in_megabytes` - (Required) Specifies the size of the volume, in megabytes (MB), that you are creating. +* `skip_final_backup` - (Optional) When enabled, will skip the default final backup taken when the volume is deleted. This configuration must be applied separately before attempting to delete the resource to have the desired behavior. Defaults to `false`. +* `storage_efficiency_enabled` - (Optional) Set to true to enable deduplication, compression, and compaction storage efficiency features on the volume. +* `storage_virtual_machine_id` - (Required) Specifies the storage virtual machine in which to create the volume. +* `tags` - (Optional) A map of tags to assign to the volume. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### tiering_policy + +The `tiering_policy` configuration block supports the following arguments: + +* `name` - (Required) Specifies the tiering policy for the ONTAP volume for moving data to the capacity pool storage. Valid values are `SNAPSHOT_ONLY`, `AUTO`, `ALL`, `NONE`. Default value is `SNAPSHOT_ONLY`. +* `cooling_period` - (Optional) Specifies the number of days that user data in a volume must remain inactive before it is considered "cold" and moved to the capacity pool. Used with `AUTO` and `SNAPSHOT_ONLY` tiering policies only. Valid values are whole numbers between 2 and 183. Default values are 31 days for `AUTO` and 2 days for `SNAPSHOT_ONLY`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the volune. +* `id` - Identifier of the volume, e.g., `fsvol-12345678` +* `file_system_id` - Describes the file system for the volume, e.g. `fs-12345679` +* `flexcache_endpoint_type` - Specifies the FlexCache endpoint type of the volume, Valid values are `NONE`, `ORIGIN`, `CACHE`. Default value is `NONE`. These can be set by the ONTAP CLI or API and are use with FlexCache feature. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uuid` - The Volume's UUID (universally unique identifier). +* `volume_type` - The type of volume, currently the only valid value is `ONTAP`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `delete` - (Default `30m`) +* `update` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx ONTAP volume using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import FSx ONTAP volume using the `id`. For example: + +```console +% terraform import aws_fsx_ontap_volume.example fsvol-12345678abcdef123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fsx_openzfs_file_system.html.markdown b/website/docs/cdktf/python/r/fsx_openzfs_file_system.html.markdown new file mode 100644 index 00000000000..df1fcb3f0ef --- /dev/null +++ b/website/docs/cdktf/python/r/fsx_openzfs_file_system.html.markdown @@ -0,0 +1,154 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_openzfs_file_system" +description: |- + Manages an Amazon FSx for OpenZFS file system. +--- + + + +# Resource: aws_fsx_openzfs_file_system + +Manages an Amazon FSx for OpenZFS file system. +See the [FSx OpenZFS User Guide](https://docs.aws.amazon.com/fsx/latest/OpenZFSGuide/what-is-fsx.html) for more information. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_openzfs_file_system import FsxOpenzfsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FsxOpenzfsFileSystem(self, "test", + deployment_type="SINGLE_AZ_1", + storage_capacity=64, + subnet_ids=[test1.id], + throughput_capacity=64 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deployment_type` - (Required) - The filesystem deployment type. Valid values: `SINGLE_AZ_1` and `SINGLE_AZ_2`. +* `storage_capacity` - (Required) The storage capacity (GiB) of the file system. Valid values between `64` and `524288`. +* `subnet_ids` - (Required) A list of IDs for the subnets that the file system will be accessible from. Exactly 1 subnet need to be provided. +* `throughput_capacity` - (Required) Throughput (MB/s) of the file system. Valid values depend on `deployment_type`. Must be one of `64`, `128`, `256`, `512`, `1024`, `2048`, `3072`, `4096` for `SINGLE_AZ_1`. Must be one of `160`, `320`, `640`, `1280`, `2560`, `3840`, `5120`, `7680`, `10240` for `SINGLE_AZ_2`. +* `automatic_backup_retention_days` - (Optional) The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days. +* `backup_id` - (Optional) The ID of the source backup to create the filesystem from. +* `copy_tags_to_backups` - (Optional) A boolean flag indicating whether tags for the file system should be copied to backups. The default value is false. +* `copy_tags_to_volumes` - (Optional) A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false. +* `daily_automatic_backup_start_time` - (Optional) A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automatic_backup_retention_days` to be set. +* `disk_iops_configuration` - (Optional) The SSD IOPS configuration for the Amazon FSx for OpenZFS file system. See [Disk Iops Configuration](#disk-iops-configuration) Below. +* `kms_key_id` - (Optional) ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key. +* `root_volume_configuration` - (Optional) The configuration for the root volume of the file system. All other volumes are children or the root volume. See [Root Volume Configuration](#root-volume-configuration) Below. +* `security_group_ids` - (Optional) A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces. +* `storage_type` - (Optional) The filesystem storage type. Only `SSD` is supported. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `weekly_maintenance_start_time` - (Optional) The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone. + +### Disk Iops Configuration + +* `iops` - (Optional) - The total number of SSD IOPS provisioned for the file system. +* `mode` - (Optional) - Specifies whether the number of IOPS for the file system is using the system. Valid values are `AUTOMATIC` and `USER_PROVISIONED`. Default value is `AUTOMATIC`. + +### Root Volume Configuration + +* `copy_tags_to_snapshots` - (Optional) - A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false. +* `data_compression_type` - (Optional) - Method used to compress the data on the volume. Valid values are `LZ4`, `NONE` or `ZSTD`. Child volumes that don't specify compression option will inherit from parent volume. This option on file system applies to the root volume. +* `nfs_exports` - (Optional) - NFS export configuration for the root volume. Exactly 1 item. See [NFS Exports](#nfs-exports) Below. +* `read_only` - (Optional) - specifies whether the volume is read-only. Default is false. +* `record_size_kib` - (Optional) - Specifies the record size of an OpenZFS root volume, in kibibytes (KiB). Valid values are `4`, `8`, `16`, `32`, `64`, `128`, `256`, `512`, or `1024` KiB. The default is `128` KiB. +* `user_and_group_quotas` - (Optional) - Specify how much storage users or groups can use on the volume. Maximum of 100 items. See [User and Group Quotas](#user-and-group-quotas) Below. + +### NFS Exports + +* `client_configurations` - (Required) - A list of configuration objects that contain the client and options for mounting the OpenZFS file system. Maximum of 25 items. See [Client Configurations](#client configurations) Below. + +### Client Configurations + +* `clients` - (Required) - A value that specifies who can mount the file system. You can provide a wildcard character (*), an IP address (0.0.0.0), or a CIDR address (192.0.2.0/24. By default, Amazon FSx uses the wildcard character when specifying the client. +* `options` - (Required) - The options to use when mounting the file system. Maximum of 20 items. See the [Linix NFS exports man page](https://linux.die.net/man/5/exports) for more information. `crossmount` and `sync` are used by default. + +### User and Group Quotas + +* `id` - (Required) - The ID of the user or group. Valid values between `0` and `2147483647` +* `storage_capacity_quota_gib` - (Required) - The amount of storage that the user or group can use in gibibytes (GiB). Valid values between `0` and `2147483647` +* `type` - (Required) - A value that specifies whether the quota applies to a user or group. Valid values are `USER` or `GROUP`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `dns_name` - DNS name for the file system, e.g., `fs-12345678.fsx.us-west-2.amazonaws.com` +* `id` - Identifier of the file system, e.g., `fs-12345678` +* `network_interface_ids` - Set of Elastic Network Interface identifiers from which the file system is accessible The first network interface returned is the primary network interface. +* `root_volume_id` - Identifier of the root volume, e.g., `fsvol-12345678` +* `owner_id` - AWS account identifier that created the file system. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - Identifier of the Virtual Private Cloud for the file system. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `update` - (Default `60m`) +* `delete` - (Default `60m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx File Systems using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import FSx File Systems using the `id`. For example: + +```console +% terraform import aws_fsx_openzfs_file_system.example fs-543ab12b1ca672f33 +``` + +Certain resource arguments, like `security_group_ids`, do not have a FSx API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_openzfs_file_system import FsxOpenzfsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, deploymentType, subnetIds, throughputCapacity): + super().__init__(scope, name) + FsxOpenzfsFileSystem(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[security_group_ids] + ), + security_group_ids=[Token.as_string(aws_security_group_example.id)], + deployment_type=deployment_type, + subnet_ids=subnet_ids, + throughput_capacity=throughput_capacity + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fsx_openzfs_snapshot.html.markdown b/website/docs/cdktf/python/r/fsx_openzfs_snapshot.html.markdown new file mode 100644 index 00000000000..fa73bc5f4b3 --- /dev/null +++ b/website/docs/cdktf/python/r/fsx_openzfs_snapshot.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_openzfs_snapshot" +description: |- + Manages an Amazon FSx for OpenZFS snapshot. +--- + + + +# Resource: aws_fsx_openzfs_snapshot + +Manages an Amazon FSx for OpenZFS volume. +See the [FSx OpenZFS User Guide](https://docs.aws.amazon.com/fsx/latest/OpenZFSGuide/what-is-fsx.html) for more information. + +## Example Usage + +### Root volume Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_openzfs_file_system import FsxOpenzfsFileSystem +from imports.aws.fsx_openzfs_snapshot import FsxOpenzfsSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = FsxOpenzfsFileSystem(self, "example", + deployment_type="SINGLE_AZ_1", + storage_capacity=64, + subnet_ids=[Token.as_string(aws_subnet_example.id)], + throughput_capacity=64 + ) + aws_fsx_openzfs_snapshot_example = FsxOpenzfsSnapshot(self, "example_1", + name="example", + volume_id=example.root_volume_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_fsx_openzfs_snapshot_example.override_logical_id("example") +``` + +### Child volume Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_openzfs_file_system import FsxOpenzfsFileSystem +from imports.aws.fsx_openzfs_snapshot import FsxOpenzfsSnapshot +from imports.aws.fsx_openzfs_volume import FsxOpenzfsVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = FsxOpenzfsFileSystem(self, "example", + deployment_type="SINGLE_AZ_1", + storage_capacity=64, + subnet_ids=[Token.as_string(aws_subnet_example.id)], + throughput_capacity=64 + ) + aws_fsx_openzfs_volume_example = FsxOpenzfsVolume(self, "example_1", + name="example", + parent_volume_id=example.root_volume_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_fsx_openzfs_volume_example.override_logical_id("example") + aws_fsx_openzfs_snapshot_example = FsxOpenzfsSnapshot(self, "example_2", + name="example", + volume_id=Token.as_string(aws_fsx_openzfs_volume_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_fsx_openzfs_snapshot_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Snapshot. You can use a maximum of 203 alphanumeric characters plus either _ or - or : or . for the name. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. If you have set `copy_tags_to_backups` to true, and you specify one or more tags, no existing file system tags are copied from the file system to the backup. +* `volume_id` - (Optional) The ID of the volume to snapshot. This can be the root volume or a child volume. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the snapshot. +* `id` - Identifier of the snapshot, e.g., `fsvolsnap-12345678` +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `delete` - (Default `30m`) +* `update` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx OpenZFS snapshot using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import FSx OpenZFS snapshot using the `id`. For example: + +```console +% terraform import aws_fsx_openzfs_snapshot.example fs-543ab12b1ca672f33 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fsx_openzfs_volume.html.markdown b/website/docs/cdktf/python/r/fsx_openzfs_volume.html.markdown new file mode 100644 index 00000000000..7c06f21271e --- /dev/null +++ b/website/docs/cdktf/python/r/fsx_openzfs_volume.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_openzfs_volume" +description: |- + Manages an Amazon FSx for OpenZFS volume. +--- + + + +# Resource: aws_fsx_openzfs_volume + +Manages an Amazon FSx for OpenZFS volume. +See the [FSx OpenZFS User Guide](https://docs.aws.amazon.com/fsx/latest/OpenZFSGuide/what-is-fsx.html) for more information. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_openzfs_volume import FsxOpenzfsVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FsxOpenzfsVolume(self, "test", + name="testvolume", + parent_volume_id=Token.as_string(aws_fsx_openzfs_file_system_test.root_volume_id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Volume. You can use a maximum of 203 alphanumeric characters, plus the underscore (_) special character. +* `parent_volume_id` - (Required) The volume id of volume that will be the parent volume for the volume being created, this could be the root volume created from the `aws_fsx_openzfs_file_system` resource with the `root_volume_id` or the `id` property of another `aws_fsx_openzfs_volume`. +* `origin_snapshot` - (Optional) The ARN of the source snapshot to create the volume from. +* `copy_tags_to_snapshots` - (Optional) A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false. +* `data_compression_type` - (Optional) Method used to compress the data on the volume. Valid values are `NONE` or `ZSTD`. Child volumes that don't specify compression option will inherit from parent volume. This option on file system applies to the root volume. +* `nfs_exports` - (Optional) NFS export configuration for the root volume. Exactly 1 item. See [NFS Exports](#nfs-exports) Below. +* `read_only` - (Optional) specifies whether the volume is read-only. Default is false. +* `record_size_kib` - (Optional) The record size of an OpenZFS volume, in kibibytes (KiB). Valid values are `4`, `8`, `16`, `32`, `64`, `128`, `256`, `512`, or `1024` KiB. The default is `128` KiB. +* `storage_capacity_quota_gib` - (Optional) The maximum amount of storage in gibibytes (GiB) that the volume can use from its parent. +* `storage_capacity_reservation_gib` - (Optional) The amount of storage in gibibytes (GiB) to reserve from the parent volume. +* `user_and_group_quotas` - (Optional) - Specify how much storage users or groups can use on the volume. Maximum of 100 items. See [User and Group Quotas](#user-and-group-quotas) Below. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### NFS Exports + +* `client_configurations` - (Required) - A list of configuration objects that contain the client and options for mounting the OpenZFS file system. Maximum of 25 items. See [Client Configurations](#client configurations) Below. + +### Client Configurations + +* `clients` - (Required) - A value that specifies who can mount the file system. You can provide a wildcard character (*), an IP address (0.0.0.0), or a CIDR address (192.0.2.0/24. By default, Amazon FSx uses the wildcard character when specifying the client. +* `options` - (Required) - The options to use when mounting the file system. Maximum of 20 items. See the [Linix NFS exports man page](https://linux.die.net/man/5/exports) for more information. `crossmount` and `sync` are used by default. + +### User and Group Quotas + +* `id` - (Required) - The ID of the user or group. Valid values between `0` and `2147483647` +* `storage_capacity_quota_gib` - (Required) - The amount of storage that the user or group can use in gibibytes (GiB). Valid values between `0` and `2147483647` +* `Type` - (Required) - A value that specifies whether the quota applies to a user or group. Valid values are `USER` or `GROUP`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `id` - Identifier of the file system, e.g., `fsvol-12345678` +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx Volumes using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import FSx Volumes using the `id`. For example: + +```console +% terraform import aws_fsx_openzfs_volume.example fsvol-543ab12b1ca672f33 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/fsx_windows_file_system.html.markdown b/website/docs/cdktf/python/r/fsx_windows_file_system.html.markdown new file mode 100644 index 00000000000..9cedd6adffd --- /dev/null +++ b/website/docs/cdktf/python/r/fsx_windows_file_system.html.markdown @@ -0,0 +1,184 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_windows_file_system" +description: |- + Manages a FSx Windows File System. +--- + + + +# Resource: aws_fsx_windows_file_system + +Manages a FSx Windows File System. See the [FSx Windows Guide](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/what-is.html) for more information. + +~> **NOTE:** Either the `active_directory_id` argument or `self_managed_active_directory` configuration block must be specified. + +## Example Usage + +### Using AWS Directory Service + +Additional information for using AWS Directory Service with Windows File Systems can be found in the [FSx Windows Guide](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/fsx-aws-managed-ad.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_windows_file_system import FsxWindowsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FsxWindowsFileSystem(self, "example", + active_directory_id=Token.as_string(aws_directory_service_directory_example.id), + kms_key_id=Token.as_string(aws_kms_key_example.arn), + storage_capacity=300, + subnet_ids=[Token.as_string(aws_subnet_example.id)], + throughput_capacity=1024 + ) +``` + +### Using a Self-Managed Microsoft Active Directory + +Additional information for using AWS Directory Service with Windows File Systems can be found in the [FSx Windows Guide](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/self-managed-AD.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_windows_file_system import FsxWindowsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + FsxWindowsFileSystem(self, "example", + kms_key_id=Token.as_string(aws_kms_key_example.arn), + self_managed_active_directory=FsxWindowsFileSystemSelfManagedActiveDirectory( + dns_ips=["10.0.0.111", "10.0.0.222"], + domain_name="corp.example.com", + password="avoid-plaintext-passwords", + username="Admin" + ), + storage_capacity=300, + subnet_ids=[Token.as_string(aws_subnet_example.id)], + throughput_capacity=1024 + ) +``` + +## Argument Reference + +The following arguments are required: + +* `subnet_ids` - (Required) A list of IDs for the subnets that the file system will be accessible from. To specify more than a single subnet set `deployment_type` to `MULTI_AZ_1`. +* `throughput_capacity` - (Required) Throughput (megabytes per second) of the file system in power of 2 increments. Minimum of `8` and maximum of `2048`. + +The following arguments are optional: + +* `active_directory_id` - (Optional) The ID for an existing Microsoft Active Directory instance that the file system should join when it's created. Cannot be specified with `self_managed_active_directory`. +* `aliases` - (Optional) An array DNS alias names that you want to associate with the Amazon FSx file system. For more information, see [Working with DNS Aliases](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/managing-dns-aliases.html) +* `audit_log_configuration` - (Optional) The configuration that Amazon FSx for Windows File Server uses to audit and log user accesses of files, folders, and file shares on the Amazon FSx for Windows File Server file system. See below. +* `automatic_backup_retention_days` - (Optional) The number of days to retain automatic backups. Minimum of `0` and maximum of `90`. Defaults to `7`. Set to `0` to disable. +* `backup_id` - (Optional) The ID of the source backup to create the filesystem from. +* `copy_tags_to_backups` - (Optional) A boolean flag indicating whether tags on the file system should be copied to backups. Defaults to `false`. +* `daily_automatic_backup_start_time` - (Optional) The preferred time (in `HH:MM` format) to take daily automatic backups, in the UTC time zone. +* `deployment_type` - (Optional) Specifies the file system deployment type, valid values are `MULTI_AZ_1`, `SINGLE_AZ_1` and `SINGLE_AZ_2`. Default value is `SINGLE_AZ_1`. +* `kms_key_id` - (Optional) ARN for the KMS Key to encrypt the file system at rest. Defaults to an AWS managed KMS Key. +* `preferred_subnet_id` - (Optional) Specifies the subnet in which you want the preferred file server to be located. Required for when deployment type is `MULTI_AZ_1`. +* `security_group_ids` - (Optional) A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces. +* `self_managed_active_directory` - (Optional) Configuration block that Amazon FSx uses to join the Windows File Server instance to your self-managed (including on-premises) Microsoft Active Directory (AD) directory. Cannot be specified with `active_directory_id`. Detailed below. +* `skip_final_backup` - (Optional) When enabled, will skip the default final backup taken when the file system is deleted. This configuration must be applied separately before attempting to delete the resource to have the desired behavior. Defaults to `false`. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `storage_capacity` - (Optional) Storage capacity (GiB) of the file system. Minimum of 32 and maximum of 65536. If the storage type is set to `HDD` the minimum value is 2000. Required when not creating filesystem for a backup. +* `storage_type` - (Optional) Specifies the storage type, Valid values are `SSD` and `HDD`. `HDD` is supported on `SINGLE_AZ_2` and `MULTI_AZ_1` Windows file system deployment types. Default value is `SSD`. +* `weekly_maintenance_start_time` - (Optional) The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone. + +### self_managed_active_directory + +The `self_managed_active_directory` configuration block supports the following arguments: + +* `dns_ips` - (Required) A list of up to two IP addresses of DNS servers or domain controllers in the self-managed AD directory. The IP addresses need to be either in the same VPC CIDR range as the file system or in the private IP version 4 (IPv4) address ranges as specified in [RFC 1918](https://tools.ietf.org/html/rfc1918). +* `domain_name` - (Required) The fully qualified domain name of the self-managed AD directory. For example, `corp.example.com`. +* `password` - (Required) The password for the service account on your self-managed AD domain that Amazon FSx will use to join to your AD domain. +* `username` - (Required) The user name for the service account on your self-managed AD domain that Amazon FSx will use to join to your AD domain. +* `file_system_administrators_group` - (Optional) The name of the domain group whose members are granted administrative privileges for the file system. Administrative privileges include taking ownership of files and folders, and setting audit controls (audit ACLs) on files and folders. The group that you specify must already exist in your domain. Defaults to `Domain Admins`. +* `organizational_unit_distinguished_name` - (Optional) The fully qualified distinguished name of the organizational unit within your self-managed AD directory that the Windows File Server instance will join. For example, `OU=FSx,DC=yourdomain,DC=corp,DC=com`. Only accepts OU as the direct parent of the file system. If none is provided, the FSx file system is created in the default location of your self-managed AD directory. To learn more, see [RFC 2253](https://tools.ietf.org/html/rfc2253). + +### audit_log_configuration + +* `audit_log_destination` - (Optional) The Amazon Resource Name (ARN) for the destination of the audit logs. The destination can be any Amazon CloudWatch Logs log group ARN or Amazon Kinesis Data Firehose delivery stream ARN. Can be specified when `file_access_audit_log_level` and `file_share_access_audit_log_level` are not set to `DISABLED`. The name of the Amazon CloudWatch Logs log group must begin with the `/aws/fsx` prefix. The name of the Amazon Kinesis Data Firehouse delivery stream must begin with the `aws-fsx` prefix. If you do not provide a destination in `audit_log_destionation`, Amazon FSx will create and use a log stream in the CloudWatch Logs /aws/fsx/windows log group. +* `file_access_audit_log_level` - (Optional) Sets which attempt type is logged by Amazon FSx for file and folder accesses. Valid values are `SUCCESS_ONLY`, `FAILURE_ONLY`, `SUCCESS_AND_FAILURE`, and `DISABLED`. Default value is `DISABLED`. +* `file_share_access_audit_log_level` - (Optional) Sets which attempt type is logged by Amazon FSx for file share accesses. Valid values are `SUCCESS_ONLY`, `FAILURE_ONLY`, `SUCCESS_AND_FAILURE`, and `DISABLED`. Default value is `DISABLED`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `dns_name` - DNS name for the file system, e.g., `fs-12345678.corp.example.com` (domain name matching the Active Directory domain name) +* `id` - Identifier of the file system (e.g. `fs-12345678`). +* `network_interface_ids` - Set of Elastic Network Interface identifiers from which the file system is accessible. +* `owner_id` - AWS account identifier that created the file system. +* `preferred_file_server_ip` - The IP address of the primary, or preferred, file server. +* `remote_administration_endpoint` - For `MULTI_AZ_1` deployment types, use this endpoint when performing administrative tasks on the file system using Amazon FSx Remote PowerShell. For `SINGLE_AZ_1` deployment types, this is the DNS name of the file system. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_id` - Identifier of the Virtual Private Cloud for the file system. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `45m`) +* `delete` - (Default `30m`) +* `update` - (Default `45m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx File Systems using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import FSx File Systems using the `id`. For example: + +```console +% terraform import aws_fsx_windows_file_system.example fs-543ab12b1ca672f33 +``` + +Certain resource arguments, like `security_group_ids` and the `self_managed_active_directory` configuation block `password`, do not have a FSx API method for reading the information after creation. If these arguments are set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.fsx_windows_file_system import FsxWindowsFileSystem +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, subnetIds, throughputCapacity): + super().__init__(scope, name) + FsxWindowsFileSystem(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[security_group_ids] + ), + security_group_ids=[Token.as_string(aws_security_group_example.id)], + subnet_ids=subnet_ids, + throughput_capacity=throughput_capacity + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/gamelift_alias.html.markdown b/website/docs/cdktf/python/r/gamelift_alias.html.markdown new file mode 100644 index 00000000000..4784c5861ad --- /dev/null +++ b/website/docs/cdktf/python/r/gamelift_alias.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_alias" +description: |- + Provides a GameLift Alias resource. +--- + + + +# Resource: aws_gamelift_alias + +Provides a GameLift Alias resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.gamelift_alias import GameliftAlias +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GameliftAlias(self, "example", + description="Example Description", + name="example-alias", + routing_strategy=GameliftAliasRoutingStrategy( + message="Example Message", + type="TERMINAL" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the alias. +* `description` - (Optional) Description of the alias. +* `routing_strategy` - (Required) Specifies the fleet and/or routing type to use for the alias. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Fields + +#### `routing_strategy` + +* `fleet_id` - (Optional) ID of the GameLift Fleet to point the alias to. +* `message` - (Optional) Message text to be used with the `TERMINAL` routing strategy. +* `type` - (Required) Type of routing strategyE.g., `SIMPLE` or `TERMINAL` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Alias ID. +* `arn` - Alias ARN. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Aliases using the ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GameLift Aliases using the ID. For example: + +```console +% terraform import aws_gamelift_alias.example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/gamelift_build.html.markdown b/website/docs/cdktf/python/r/gamelift_build.html.markdown new file mode 100644 index 00000000000..fe4991e89e1 --- /dev/null +++ b/website/docs/cdktf/python/r/gamelift_build.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_build" +description: |- + Provides a GameLift Build resource. +--- + + + +# Resource: aws_gamelift_build + +Provides an GameLift Build resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.gamelift_build import GameliftBuild +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GameliftBuild(self, "test", + name="example-build", + operating_system="WINDOWS_2012", + storage_location=GameliftBuildStorageLocation( + bucket=Token.as_string(aws_s3_bucket_test.id), + key=Token.as_string(aws_s3_object_test.key), + role_arn=Token.as_string(aws_iam_role_test.arn) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the build +* `operating_system` - (Required) Operating system that the game server binaries are built to run onE.g., `WINDOWS_2012`, `AMAZON_LINUX` or `AMAZON_LINUX_2`. +* `storage_location` - (Required) Information indicating where your game build files are stored. See below. +* `version` - (Optional) Version that is associated with this build. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Fields + +#### `storage_location` + +* `bucket` - (Required) Name of your S3 bucket. +* `key` - (Required) Name of the zip file containing your build files. +* `role_arn` - (Required) ARN of the access role that allows Amazon GameLift to access your S3 bucket. +* `object_version` - (Optional) A specific version of the file. If not set, the latest version of the file is retrieved. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - GameLift Build ID. +* `arn` - GameLift Build ARN. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Builds using the ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GameLift Builds using the ID. For example: + +```console +% terraform import aws_gamelift_build.example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/gamelift_fleet.html.markdown b/website/docs/cdktf/python/r/gamelift_fleet.html.markdown new file mode 100644 index 00000000000..056c4e61d38 --- /dev/null +++ b/website/docs/cdktf/python/r/gamelift_fleet.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_fleet" +description: |- + Provides a GameLift Fleet resource. +--- + + + +# Resource: aws_gamelift_fleet + +Provides a GameLift Fleet resource. + +## Example Usage + +```terraform +resource "aws_gamelift_fleet" "example" { + build_id = aws_gamelift_build.example.id + ec2_instance_type = "t2.micro" + fleet_type = "ON_DEMAND" + name = "example-fleet-name" + + runtime_configuration { + server_process { + concurrent_executions = 1 + launch_path = "C:\\game\\GomokuServer.exe" + } + } +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `build_id` - (Optional) ID of the GameLift Build to be deployed on the fleet. +* `certificate_configuration` - (Optional) Prompts GameLift to generate a TLS/SSL certificate for the fleet. See [certificate_configuration](#certificate_configuration). +* `description` - (Optional) Human-readable description of the fleet. +* `ec2_inbound_permission` - (Optional) Range of IP addresses and port settings that permit inbound traffic to access server processes running on the fleet. See below. +* `ec2_instance_type` - (Required) Name of an EC2 instance typeE.g., `t2.micro` +* `fleet_type` - (Optional) Type of fleet. This value must be `ON_DEMAND` or `SPOT`. Defaults to `ON_DEMAND`. +* `instance_role_arn` - (Optional) ARN of an IAM role that instances in the fleet can assume. +* `metric_groups` - (Optional) List of names of metric groups to add this fleet to. A metric group tracks metrics across all fleets in the group. Defaults to `default`. +* `name` - (Required) The name of the fleet. +* `new_game_session_protection_policy` - (Optional) Game session protection policy to apply to all instances in this fleetE.g., `FullProtection`. Defaults to `NoProtection`. +* `resource_creation_limit_policy` - (Optional) Policy that limits the number of game sessions an individual player can create over a span of time for this fleet. See below. +* `runtime_configuration` - (Optional) Instructions for launching server processes on each instance in the fleet. See below. +* `script_id` - (Optional) ID of the GameLift Script to be deployed on the fleet. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Fields + +#### `certificate_configuration` + +* `certificate_type` - (Optional) Indicates whether a TLS/SSL certificate is generated for a fleet. Valid values are `DISABLED` and `GENERATED`. Default value is `DISABLED`. + +#### `ec2_inbound_permission` + +* `from_port` - (Required) Starting value for a range of allowed port numbers. +* `ip_range` - (Required) Range of allowed IP addresses expressed in CIDR notationE.g., `000.000.000.000/[subnet mask]` or `0.0.0.0/[subnet mask]`. +* `protocol` - (Required) Network communication protocol used by the fleetE.g., `TCP` or `UDP` +* `to_port` - (Required) Ending value for a range of allowed port numbers. Port numbers are end-inclusive. This value must be higher than `from_port`. + +#### `resource_creation_limit_policy` + +* `new_game_sessions_per_creator` - (Optional) Maximum number of game sessions that an individual can create during the policy period. +* `policy_period_in_minutes` - (Optional) Time span used in evaluating the resource creation limit policy. + +#### `runtime_configuration` + +* `game_session_activation_timeout_seconds` - (Optional) Maximum amount of time (in seconds) that a game session can remain in status `ACTIVATING`. +* `max_concurrent_game_session_activations` - (Optional) Maximum number of game sessions with status `ACTIVATING` to allow on an instance simultaneously. +* `server_process` - (Optional) Collection of server process configurations that describe which server processes to run on each instance in a fleet. See below. + +#### `server_process` + +* `concurrent_executions` - (Required) Number of server processes using this configuration to run concurrently on an instance. +* `launch_path` - (Required) Location of the server executable in a game build. All game builds are installed on instances at the root : for Windows instances `C:\game`, and for Linux instances `/local/game`. +* `parameters` - (Optional) Optional list of parameters to pass to the server executable on launch. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Fleet ID. +* `arn` - Fleet ARN. +* `build_arn` - Build ARN. +* `operating_system` - Operating system of the fleet's computing resources. +* `script_arn` - Script ARN. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `70m`) +* `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Fleets using the ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GameLift Fleets using the ID. For example: + +```console +% terraform import aws_gamelift_fleet.example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/gamelift_game_server_group.markdown b/website/docs/cdktf/python/r/gamelift_game_server_group.markdown new file mode 100644 index 00000000000..fefaf72a34b --- /dev/null +++ b/website/docs/cdktf/python/r/gamelift_game_server_group.markdown @@ -0,0 +1,233 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_game_server_group" +description: |- + Provides a GameLift Game Server Group resource. +--- + + + +# Resource: aws_gamelift_game_server_group + +Provides an GameLift Game Server Group resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.gamelift_game_server_group import GameliftGameServerGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GameliftGameServerGroup(self, "example", + depends_on=[aws_iam_role_policy_attachment_example], + game_server_group_name="example", + instance_definition=[GameliftGameServerGroupInstanceDefinition( + instance_type="c5.large" + ), GameliftGameServerGroupInstanceDefinition( + instance_type="c5a.large" + ) + ], + launch_template=GameliftGameServerGroupLaunchTemplate( + id=Token.as_string(aws_launch_template_example.id) + ), + max_size=1, + min_size=1, + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +Full usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.gamelift_game_server_group import GameliftGameServerGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GameliftGameServerGroup(self, "example", + auto_scaling_policy=GameliftGameServerGroupAutoScalingPolicy( + estimated_instance_warmup=60, + target_tracking_configuration=GameliftGameServerGroupAutoScalingPolicyTargetTrackingConfiguration( + target_value=75 + ) + ), + balancing_strategy="SPOT_ONLY", + depends_on=[aws_iam_role_policy_attachment_example], + game_server_group_name="example", + game_server_protection_policy="FULL_PROTECTION", + instance_definition=[GameliftGameServerGroupInstanceDefinition( + instance_type="c5.large", + weighted_capacity="1" + ), GameliftGameServerGroupInstanceDefinition( + instance_type="c5.2xlarge", + weighted_capacity="2" + ) + ], + launch_template=GameliftGameServerGroupLaunchTemplate( + id=Token.as_string(aws_launch_template_example.id), + version="1" + ), + max_size=1, + min_size=1, + role_arn=Token.as_string(aws_iam_role_example.arn), + tags={ + "Name": "example" + }, + vpc_subnets=["subnet-12345678", "subnet-23456789"] + ) +``` + +### Example IAM Role for GameLift Game Server Group + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_partition import DataAwsPartition +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["autoscaling.amazonaws.com", "gamelift.amazonaws.com" + ], + type="Service" + ) + ] + ) + ] + ) + current = DataAwsPartition(self, "current") + example = IamRole(self, "example", + assume_role_policy=Token.as_string(assume_role.json), + name="gamelift-game-server-group-example" + ) + aws_iam_role_policy_attachment_example = IamRolePolicyAttachment(self, "example_3", + policy_arn="arn:${" + current.partition + "}:iam::aws:policy/GameLiftGameServerGroupPolicy", + role=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `balancing_strategy` - (Optional) Indicates how GameLift FleetIQ balances the use of Spot Instances and On-Demand Instances. + Valid values: `SPOT_ONLY`, `SPOT_PREFERRED`, `ON_DEMAND_ONLY`. Defaults to `SPOT_PREFERRED`. +* `game_server_group_name` - (Required) Name of the game server group. + This value is used to generate unique ARN identifiers for the EC2 Auto Scaling group and the GameLift FleetIQ game server group. +* `game_server_protection_policy` - (Optional) Indicates whether instances in the game server group are protected from early termination. + Unprotected instances that have active game servers running might be terminated during a scale-down event, + causing players to be dropped from the game. + Protected instances cannot be terminated while there are active game servers running except in the event + of a forced game server group deletion. + Valid values: `NO_PROTECTION`, `FULL_PROTECTION`. Defaults to `NO_PROTECTION`. +* `max_size` - (Required) The maximum number of instances allowed in the EC2 Auto Scaling group. + During automatic scaling events, GameLift FleetIQ and EC2 do not scale up the group above this maximum. +* `min_size` - (Required) The minimum number of instances allowed in the EC2 Auto Scaling group. + During automatic scaling events, GameLift FleetIQ and EC2 do not scale down the group below this minimum. +* `role_arn` - (Required) ARN for an IAM role that allows Amazon GameLift to access your EC2 Auto Scaling groups. +* `tags` - (Optional) Key-value map of resource tags +* `vpc_subnets` - (Optional) A list of VPC subnets to use with instances in the game server group. + By default, all GameLift FleetIQ-supported Availability Zones are used. + +### `auto_scaling_policy` + +Configuration settings to define a scaling policy for the Auto Scaling group that is optimized for game hosting. +The scaling policy uses the metric `PercentUtilizedGameServers` to maintain a buffer of idle game servers that +can immediately accommodate new games and players. + +* `estimated_instance_warmup` - (Optional) Length of time, in seconds, it takes for a new instance to start + new game server processes and register with GameLift FleetIQ. + Specifying a warm-up time can be useful, particularly with game servers that take a long time to start up, + because it avoids prematurely starting new instances. Defaults to `60`. + +#### `target_tracking_configuration` + +Settings for a target-based scaling policy applied to Auto Scaling group. +These settings are used to create a target-based policy that tracks the GameLift FleetIQ metric `PercentUtilizedGameServers` +and specifies a target value for the metric. + +* `target_value` - (Required) Desired value to use with a game server group target-based scaling policy. + +### `instance_definition` + +The EC2 instance types and sizes to use in the Auto Scaling group. +The instance definitions must specify at least two different instance types that are supported by GameLift FleetIQ. + +* `instance_type` - (Required) An EC2 instance type. +* `weighted_capacity` - (Optional) Instance weighting that indicates how much this instance type contributes + to the total capacity of a game server group. + Instance weights are used by GameLift FleetIQ to calculate the instance type's cost per unit hour and better identify + the most cost-effective options. + +### `launch_template` + +The EC2 launch template that contains configuration settings and game server code to be deployed to all instances in the game server group. +You can specify the template using either the template name or ID. + +* `id` - (Optional) A unique identifier for an existing EC2 launch template. +* `name` - (Optional) A readable identifier for an existing EC2 launch template. +* `version` - (Optional) The version of the EC2 launch template to use. If none is set, the default is the first version created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the GameLift Game Server Group. +* `arn` - The ARN of the GameLift Game Server Group. +* `auto_scaling_group_arn` - The ARN of the created EC2 Auto Scaling group. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Game Server Group using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GameLift Game Server Group using the `name`. For example: + +```console +% terraform import aws_gamelift_game_server_group.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/gamelift_game_session_queue.html.markdown b/website/docs/cdktf/python/r/gamelift_game_session_queue.html.markdown new file mode 100644 index 00000000000..3b123da232a --- /dev/null +++ b/website/docs/cdktf/python/r/gamelift_game_session_queue.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_game_session_queue" +description: |- + Provides a GameLift Game Session Queue resource. +--- + + + +# Resource: aws_gamelift_game_session_queue + +Provides an GameLift Game Session Queue resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.gamelift_game_session_queue import GameliftGameSessionQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GameliftGameSessionQueue(self, "test", + destinations=[us_west2_fleet.arn, eu_central1_fleet.arn], + name="example-session-queue", + notification_target=game_session_queue_notifications.arn, + player_latency_policy=[GameliftGameSessionQueuePlayerLatencyPolicy( + maximum_individual_player_latency_milliseconds=100, + policy_duration_seconds=5 + ), GameliftGameSessionQueuePlayerLatencyPolicy( + maximum_individual_player_latency_milliseconds=200 + ) + ], + timeout_in_seconds=60 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the session queue. +* `timeout_in_seconds` - (Required) Maximum time a game session request can remain in the queue. +* `custom_event_data` - (Optional) Information to be added to all events that are related to this game session queue. +* `destinations` - (Optional) List of fleet/alias ARNs used by session queue for placing game sessions. +* `notification_target` - (Optional) An SNS topic ARN that is set up to receive game session placement notifications. +* `player_latency_policy` - (Optional) One or more policies used to choose fleet based on player latency. See below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Fields + +#### `player_latency_policy` + +* `maximum_individual_player_latency_milliseconds` - (Required) Maximum latency value that is allowed for any player. +* `policy_duration_seconds` - (Optional) Length of time that the policy is enforced while placing a new game session. Absence of value for this attribute means that the policy is enforced until the queue times out. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Game Session Queue ARN. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Game Session Queues using their `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GameLift Game Session Queues using their `name`. For example: + +```console +% terraform import aws_gamelift_game_session_queue.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/gamelift_script.html.markdown b/website/docs/cdktf/python/r/gamelift_script.html.markdown new file mode 100644 index 00000000000..21f228eb17e --- /dev/null +++ b/website/docs/cdktf/python/r/gamelift_script.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_script" +description: |- + Provides a GameLift Script resource. +--- + + + +# Resource: aws_gamelift_script + +Provides an GameLift Script resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.gamelift_script import GameliftScript +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GameliftScript(self, "example", + name="example-script", + storage_location=GameliftScriptStorageLocation( + bucket=Token.as_string(aws_s3_bucket_example.id), + key=Token.as_string(aws_s3_object_example.key), + role_arn=Token.as_string(aws_iam_role_example.arn) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the script +* `storage_location` - (Optional) Information indicating where your game script files are stored. See below. +* `version` - (Optional) Version that is associated with this script. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `zip_file` - (Optional) A data object containing your Realtime scripts and dependencies as a zip file. The zip file can have one or multiple files. Maximum size of a zip file is 5 MB. + +### Nested Fields + +#### `storage_location` + +* `bucket` - (Required) Name of your S3 bucket. +* `key` - (Required) Name of the zip file containing your script files. +* `role_arn` - (Required) ARN of the access role that allows Amazon GameLift to access your S3 bucket. +* `object_version` - (Optional) A specific version of the file. If not set, the latest version of the file is retrieved. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - GameLift Script ID. +* `arn` - GameLift Script ARN. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Scripts using the ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GameLift Scripts using the ID. For example: + +```console +% terraform import aws_gamelift_script.example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glacier_vault.html.markdown b/website/docs/cdktf/python/r/glacier_vault.html.markdown new file mode 100644 index 00000000000..985a99de59c --- /dev/null +++ b/website/docs/cdktf/python/r/glacier_vault.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "S3 Glacier" +layout: "aws" +page_title: "AWS: aws_glacier_vault" +description: |- + Provides a Glacier Vault. +--- + + + +# Resource: aws_glacier_vault + +Provides a Glacier Vault Resource. You can refer to the [Glacier Developer Guide](https://docs.aws.amazon.com/amazonglacier/latest/dev/working-with-vaults.html) for a full explanation of the Glacier Vault functionality + +~> **NOTE:** When removing a Glacier Vault, the Vault must be empty. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.glacier_vault import GlacierVault +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + aws_sns_topic = SnsTopic(self, "aws_sns_topic", + name="glacier-sns-topic" + ) + my_archive = DataAwsIamPolicyDocument(self, "my_archive", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["glacier:InitiateJob", "glacier:GetJobOutput"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="*" + ) + ], + resources=["arn:aws:glacier:eu-west-1:432981146916:vaults/MyArchive" + ], + sid="add-read-only-perm" + ) + ] + ) + aws_glacier_vault_my_archive = GlacierVault(self, "my_archive_2", + access_policy=Token.as_string(my_archive.json), + name="MyArchive", + notification=GlacierVaultNotification( + events=["ArchiveRetrievalCompleted", "InventoryRetrievalCompleted"], + sns_topic=aws_sns_topic.arn + ), + tags={ + "Test": "MyArchive" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_glacier_vault_my_archive.override_logical_id("my_archive") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Vault. Names can be between 1 and 255 characters long and the valid characters are a-z, A-Z, 0-9, '_' (underscore), '-' (hyphen), and '.' (period). +* `access_policy` - (Optional) The policy document. This is a JSON formatted string. + The heredoc syntax or `file` function is helpful here. Use the [Glacier Developer Guide](https://docs.aws.amazon.com/amazonglacier/latest/dev/vault-access-policy.html) for more information on Glacier Vault Policy +* `notification` - (Optional) The notifications for the Vault. Fields documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +**notification** supports the following: + +* `events` - (Required) You can configure a vault to publish a notification for `ArchiveRetrievalCompleted` and `InventoryRetrievalCompleted` events. +* `sns_topic` - (Required) The SNS Topic ARN. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `location` - The URI of the vault that was created. +* `arn` - The ARN of the vault. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glacier Vaults using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glacier Vaults using the `name`. For example: + +```console +% terraform import aws_glacier_vault.archive my_archive +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glacier_vault_lock.html.markdown b/website/docs/cdktf/python/r/glacier_vault_lock.html.markdown new file mode 100644 index 00000000000..d41cd811d6f --- /dev/null +++ b/website/docs/cdktf/python/r/glacier_vault_lock.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "S3 Glacier" +layout: "aws" +page_title: "AWS: aws_glacier_vault_lock" +description: |- + Manages a Glacier Vault Lock. +--- + + + +# Resource: aws_glacier_vault_lock + +Manages a Glacier Vault Lock. You can refer to the [Glacier Developer Guide](https://docs.aws.amazon.com/amazonglacier/latest/dev/vault-lock.html) for a full explanation of the Glacier Vault Lock functionality. + +~> **NOTE:** This resource allows you to test Glacier Vault Lock policies by setting the `complete_lock` argument to `false`. When testing policies in this manner, the Glacier Vault Lock automatically expires after 24 hours and Terraform will show this resource as needing recreation after that time. To permanently apply the policy, set the `complete_lock` argument to `true`. When changing `complete_lock` to `true`, it is expected the resource will show as recreating. + +!> **WARNING:** Once a Glacier Vault Lock is completed, it is immutable. The deletion of the Glacier Vault Lock is not be possible and attempting to remove it from Terraform will return an error. Set the `ignore_deletion_error` argument to `true` and apply this configuration before attempting to delete this resource via Terraform or use `terraform state rm` to remove this resource from Terraform management. + +## Example Usage + +### Testing Glacier Vault Lock Policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.glacier_vault import GlacierVault +from imports.aws.glacier_vault_lock import GlacierVaultLock +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = GlacierVault(self, "example", + name="example" + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_1", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["glacier:DeleteArchive"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="NumericLessThanEquals", + values=["365"], + variable="glacier:ArchiveAgeinDays" + ) + ], + effect="Deny", + resources=[example.arn] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_glacier_vault_lock_example = GlacierVaultLock(self, "example_2", + complete_lock=False, + policy=Token.as_string(data_aws_iam_policy_document_example.json), + vault_name=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_glacier_vault_lock_example.override_logical_id("example") +``` + +### Permanently Applying Glacier Vault Lock Policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glacier_vault_lock import GlacierVaultLock +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlacierVaultLock(self, "example", + complete_lock=True, + policy=Token.as_string(data_aws_iam_policy_document_example.json), + vault_name=Token.as_string(aws_glacier_vault_example.name) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `complete_lock` - (Required) Boolean whether to permanently apply this Glacier Lock Policy. Once completed, this cannot be undone. If set to `false`, the Glacier Lock Policy remains in a testing mode for 24 hours. After that time, the Glacier Lock Policy is automatically removed by Glacier and the Terraform resource will show as needing recreation. Changing this from `false` to `true` will show as resource recreation, which is expected. Changing this from `true` to `false` is not possible unless the Glacier Vault is recreated at the same time. +* `policy` - (Required) JSON string containing the IAM policy to apply as the Glacier Vault Lock policy. +* `vault_name` - (Required) The name of the Glacier Vault. +* `ignore_deletion_error` - (Optional) Allow Terraform to ignore the error returned when attempting to delete the Glacier Lock Policy. This can be used to delete or recreate the Glacier Vault via Terraform, for example, if the Glacier Vault Lock policy permits that action. This should only be used in conjunction with `complete_lock` being set to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Glacier Vault name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glacier Vault Locks using the Glacier Vault name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glacier Vault Locks using the Glacier Vault name. For example: + +```console +% terraform import aws_glacier_vault_lock.example example-vault +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/globalaccelerator_accelerator.html.markdown b/website/docs/cdktf/python/r/globalaccelerator_accelerator.html.markdown new file mode 100644 index 00000000000..92e4e34b656 --- /dev/null +++ b/website/docs/cdktf/python/r/globalaccelerator_accelerator.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_accelerator" +description: |- + Provides a Global Accelerator accelerator. +--- + + + +# Resource: aws_globalaccelerator_accelerator + +Creates a Global Accelerator accelerator. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.globalaccelerator_accelerator import GlobalacceleratorAccelerator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlobalacceleratorAccelerator(self, "example", + attributes=GlobalacceleratorAcceleratorAttributes( + flow_logs_enabled=True, + flow_logs_s3_bucket="example-bucket", + flow_logs_s3_prefix="flow-logs/" + ), + enabled=True, + ip_address_type="IPV4", + ip_addresses=["1.2.3.4"], + name="Example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the accelerator. +* `ip_address_type` - (Optional) The value for the address type. Defaults to `IPV4`. Valid values: `IPV4`, `DUAL_STACK`. +* `ip_addresses` - (Optional) The IP addresses to use for BYOIP accelerators. If not specified, the service assigns IP addresses. Valid values: 1 or 2 IPv4 addresses. +* `enabled` - (Optional) Indicates whether the accelerator is enabled. Defaults to `true`. Valid values: `true`, `false`. +* `attributes` - (Optional) The attributes of the accelerator. Fields documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +`attributes` supports the following arguments: + +* `flow_logs_enabled` - (Optional) Indicates whether flow logs are enabled. Defaults to `false`. Valid values: `true`, `false`. +* `flow_logs_s3_bucket` - (Optional) The name of the Amazon S3 bucket for the flow logs. Required if `flow_logs_enabled` is `true`. +* `flow_logs_s3_prefix` - (Optional) The prefix for the location in the Amazon S3 bucket for the flow logs. Required if `flow_logs_enabled` is `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the accelerator. +* `dns_name` - The DNS name of the accelerator. For example, `a5d53ff5ee6bca4ce.awsglobalaccelerator.com`. +* `dual_stack_dns_name` - The Domain Name System (DNS) name that Global Accelerator creates that points to a dual-stack accelerator's four static IP addresses: two IPv4 addresses and two IPv6 addresses. For example, `a1234567890abcdef.dualstack.awsglobalaccelerator.com`. +* `hosted_zone_id` -- The Global Accelerator Route 53 zone ID that can be used to + route an [Alias Resource Record Set][1] to the Global Accelerator. This attribute + is simply an alias for the zone ID `Z2BJ6XQ5FK7U4H`. +* `ip_sets` - IP address set associated with the accelerator. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +**ip_sets** exports the following attributes: + +* `ip_addresses` - A list of IP addresses in the IP address set. +* `ip_family` - The type of IP addresses included in this IP set. + +[1]: https://docs.aws.amazon.com/Route53/latest/APIReference/API_AliasTarget.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator accelerators using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Global Accelerator accelerators using the `arn`. For example: + +```console +% terraform import aws_globalaccelerator_accelerator.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/globalaccelerator_custom_routing_accelerator.html.markdown b/website/docs/cdktf/python/r/globalaccelerator_custom_routing_accelerator.html.markdown new file mode 100644 index 00000000000..15d06e78d16 --- /dev/null +++ b/website/docs/cdktf/python/r/globalaccelerator_custom_routing_accelerator.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_custom_routing_accelerator" +description: |- + Provides a Global Accelerator custom routing accelerator. +--- + + + +# Resource: aws_globalaccelerator_custom_routing_accelerator + +Creates a Global Accelerator custom routing accelerator. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.globalaccelerator_custom_routing_accelerator import GlobalacceleratorCustomRoutingAccelerator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlobalacceleratorCustomRoutingAccelerator(self, "example", + attributes=GlobalacceleratorCustomRoutingAcceleratorAttributes( + flow_logs_enabled=True, + flow_logs_s3_bucket="example-bucket", + flow_logs_s3_prefix="flow-logs/" + ), + enabled=True, + ip_address_type="IPV4", + ip_addresses=["1.2.3.4"], + name="Example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of a custom routing accelerator. +* `ip_address_type` - (Optional) The IP address type that an accelerator supports. For a custom routing accelerator, the value must be `"IPV4"`. +* `ip_addresses` - (Optional) The IP addresses to use for BYOIP accelerators. If not specified, the service assigns IP addresses. Valid values: 1 or 2 IPv4 addresses. +* `enabled` - (Optional) Indicates whether the accelerator is enabled. Defaults to `true`. Valid values: `true`, `false`. +* `attributes` - (Optional) The attributes of the accelerator. Fields documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +`attributes` supports the following arguments: + +* `flow_logs_enabled` - (Optional) Indicates whether flow logs are enabled. Defaults to `false`. Valid values: `true`, `false`. +* `flow_logs_s3_bucket` - (Optional) The name of the Amazon S3 bucket for the flow logs. Required if `flow_logs_enabled` is `true`. +* `flow_logs_s3_prefix` - (Optional) The prefix for the location in the Amazon S3 bucket for the flow logs. Required if `flow_logs_enabled` is `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the custom accelerator. +* `dns_name` - The DNS name of the accelerator. For example, `a5d53ff5ee6bca4ce.awsglobalaccelerator.com`. +* `hosted_zone_id` -- The Global Accelerator Route 53 zone ID that can be used to + route an [Alias Resource Record Set][1] to the Global Accelerator. This attribute + is simply an alias for the zone ID `Z2BJ6XQ5FK7U4H`. +* `ip_sets` - IP address set associated with the accelerator. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +**ip_sets** exports the following attributes: + +* `ip_addresses` - A list of IP addresses in the IP address set. +* `ip_family` - The type of IP addresses included in this IP set. + +[1]: https://docs.aws.amazon.com/Route53/latest/APIReference/API_AliasTarget.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator custom routing accelerators using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Global Accelerator custom routing accelerators using the `arn`. For example: + +```console +% terraform import aws_globalaccelerator_custom_routing_accelerator.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/globalaccelerator_custom_routing_endpoint_group.html.markdown b/website/docs/cdktf/python/r/globalaccelerator_custom_routing_endpoint_group.html.markdown new file mode 100644 index 00000000000..60f9a26af34 --- /dev/null +++ b/website/docs/cdktf/python/r/globalaccelerator_custom_routing_endpoint_group.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_custom_routing_endpoint_group" +description: |- + Provides a Global Accelerator custom routing endpoint group. +--- + + + +# Resource: aws_globalaccelerator_custom_routing_endpoint_group + +Provides a Global Accelerator custom routing endpoint group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.globalaccelerator_custom_routing_endpoint_group import GlobalacceleratorCustomRoutingEndpointGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlobalacceleratorCustomRoutingEndpointGroup(self, "example", + destination_configuration=[GlobalacceleratorCustomRoutingEndpointGroupDestinationConfiguration( + from_port=80, + protocols=["TCP"], + to_port=8080 + ) + ], + endpoint_configuration=[GlobalacceleratorCustomRoutingEndpointGroupEndpointConfiguration( + endpoint_id=Token.as_string(aws_subnet_example.id) + ) + ], + listener_arn=Token.as_string(aws_globalaccelerator_custom_routing_listener_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `listener_arn` - (Required) The Amazon Resource Name (ARN) of the custom routing listener. +* `destination_configuration` - (Required) The port ranges and protocols for all endpoints in a custom routing endpoint group to accept client traffic on. Fields documented below. +* `endpoint_configuration` - (Optional) The list of endpoint objects. Fields documented below. +* `endpoint_group_region` (Optional) - The name of the AWS Region where the custom routing endpoint group is located. + +`destination_configuration` supports the following arguments: + +* `from_port` - (Required) The first port, inclusive, in the range of ports for the endpoint group that is associated with a custom routing accelerator. +* `protocols` - (Required) The protocol for the endpoint group that is associated with a custom routing accelerator. The protocol can be either `"TCP"` or `"UDP"`. +* `to_port` - (Required) The last port, inclusive, in the range of ports for the endpoint group that is associated with a custom routing accelerator. + +`endpoint_configuration` supports the following arguments: + +* `endpoint_id` - (Optional) An ID for the endpoint. For custom routing accelerators, this is the virtual private cloud (VPC) subnet ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the custom routing endpoint group. +* `arn` - The Amazon Resource Name (ARN) of the custom routing endpoint group. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator custom routing endpoint groups using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Global Accelerator custom routing endpoint groups using the `id`. For example: + +```console +% terraform import aws_globalaccelerator_custom_routing_endpoint_group.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/listener/xxxxxxx/endpoint-group/xxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/globalaccelerator_custom_routing_listener.html.markdown b/website/docs/cdktf/python/r/globalaccelerator_custom_routing_listener.html.markdown new file mode 100644 index 00000000000..68455a8e9d1 --- /dev/null +++ b/website/docs/cdktf/python/r/globalaccelerator_custom_routing_listener.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_custom_routing_listener" +description: |- + Provides a Global Accelerator custom routing listener. +--- + + + +# Resource: aws_globalaccelerator_custom_routing_listener + +Provides a Global Accelerator custom routing listener. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.globalaccelerator_custom_routing_accelerator import GlobalacceleratorCustomRoutingAccelerator +from imports.aws.globalaccelerator_custom_routing_listener import GlobalacceleratorCustomRoutingListener +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = GlobalacceleratorCustomRoutingAccelerator(self, "example", + attributes=GlobalacceleratorCustomRoutingAcceleratorAttributes( + flow_logs_enabled=True, + flow_logs_s3_bucket="example-bucket", + flow_logs_s3_prefix="flow-logs/" + ), + enabled=True, + ip_address_type="IPV4", + name="Example" + ) + aws_globalaccelerator_custom_routing_listener_example = + GlobalacceleratorCustomRoutingListener(self, "example_1", + accelerator_arn=example.id, + port_range=[GlobalacceleratorCustomRoutingListenerPortRange( + from_port=80, + to_port=80 + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_globalaccelerator_custom_routing_listener_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accelerator_arn` - (Required) The Amazon Resource Name (ARN) of a custom routing accelerator. +* `port_range` - (Optional) The list of port ranges for the connections from clients to the accelerator. Fields documented below. + +`port_range` supports the following arguments: + +* `from_port` - (Optional) The first port in the range of ports, inclusive. +* `to_port` - (Optional) The last port in the range of ports, inclusive. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the custom routing listener. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator custom routing listeners using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Global Accelerator custom routing listeners using the `id`. For example: + +```console +% terraform import aws_globalaccelerator_custom_routing_listener.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/listener/xxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/globalaccelerator_endpoint_group.html.markdown b/website/docs/cdktf/python/r/globalaccelerator_endpoint_group.html.markdown new file mode 100644 index 00000000000..8f505896141 --- /dev/null +++ b/website/docs/cdktf/python/r/globalaccelerator_endpoint_group.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_endpoint_group" +description: |- + Provides a Global Accelerator endpoint group. +--- + + + +# Resource: aws_globalaccelerator_endpoint_group + +Provides a Global Accelerator endpoint group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.globalaccelerator_endpoint_group import GlobalacceleratorEndpointGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlobalacceleratorEndpointGroup(self, "example", + endpoint_configuration=[GlobalacceleratorEndpointGroupEndpointConfiguration( + endpoint_id=Token.as_string(aws_lb_example.arn), + weight=100 + ) + ], + listener_arn=Token.as_string(aws_globalaccelerator_listener_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `listener_arn` - (Required) The Amazon Resource Name (ARN) of the listener. +* `endpoint_group_region` (Optional) - The name of the AWS Region where the endpoint group is located. +* `health_check_interval_seconds` - (Optional) The time—10 seconds or 30 seconds—between each health check for an endpoint. The default value is 30. +* `health_check_path` - (Optional) If the protocol is HTTP/S, then this specifies the path that is the destination for health check targets. The default value is slash (`/`). Terraform will only perform drift detection of its value when present in a configuration. +* `health_check_port` - (Optional) The port that AWS Global Accelerator uses to check the health of endpoints that are part of this endpoint group. The default port is the listener port that this endpoint group is associated with. If listener port is a list of ports, Global Accelerator uses the first port in the list. +Terraform will only perform drift detection of its value when present in a configuration. +* `health_check_protocol` - (Optional) The protocol that AWS Global Accelerator uses to check the health of endpoints that are part of this endpoint group. The default value is TCP. +* `threshold_count` - (Optional) The number of consecutive health checks required to set the state of a healthy endpoint to unhealthy, or to set an unhealthy endpoint to healthy. The default value is 3. +* `traffic_dial_percentage` - (Optional) The percentage of traffic to send to an AWS Region. Additional traffic is distributed to other endpoint groups for this listener. The default value is 100. +* `endpoint_configuration` - (Optional) The list of endpoint objects. Fields documented below. +* `port_override` - (Optional) Override specific listener ports used to route traffic to endpoints that are part of this endpoint group. Fields documented below. + +`endpoint_configuration` supports the following arguments: + +* `client_ip_preservation_enabled` - (Optional) Indicates whether client IP address preservation is enabled for an Application Load Balancer endpoint. See the [AWS documentation](https://docs.aws.amazon.com/global-accelerator/latest/dg/preserve-client-ip-address.html) for more details. The default value is `false`. +**Note:** When client IP address preservation is enabled, the Global Accelerator service creates an EC2 Security Group in the VPC named `GlobalAccelerator` that must be deleted (potentially outside of Terraform) before the VPC will successfully delete. If this EC2 Security Group is not deleted, Terraform will retry the VPC deletion for a few minutes before reporting a `DependencyViolation` error. This cannot be resolved by re-running Terraform. +* `endpoint_id` - (Optional) An ID for the endpoint. If the endpoint is a Network Load Balancer or Application Load Balancer, this is the Amazon Resource Name (ARN) of the resource. If the endpoint is an Elastic IP address, this is the Elastic IP address allocation ID. +* `weight` - (Optional) The weight associated with the endpoint. When you add weights to endpoints, you configure AWS Global Accelerator to route traffic based on proportions that you specify. + +`port_override` supports the following arguments: + +* `endpoint_port` - (Required) The endpoint port that you want a listener port to be mapped to. This is the port on the endpoint, such as the Application Load Balancer or Amazon EC2 instance. +* `listener_port` - (Required) The listener port that you want to map to a specific endpoint port. This is the port that user traffic arrives to the Global Accelerator on. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the endpoint group. +* `arn` - The Amazon Resource Name (ARN) of the endpoint group. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator endpoint groups using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Global Accelerator endpoint groups using the `id`. For example: + +```console +% terraform import aws_globalaccelerator_endpoint_group.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/listener/xxxxxxx/endpoint-group/xxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/globalaccelerator_listener.html.markdown b/website/docs/cdktf/python/r/globalaccelerator_listener.html.markdown new file mode 100644 index 00000000000..4867df75311 --- /dev/null +++ b/website/docs/cdktf/python/r/globalaccelerator_listener.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_listener" +description: |- + Provides a Global Accelerator listener. +--- + + + +# Resource: aws_globalaccelerator_listener + +Provides a Global Accelerator listener. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.globalaccelerator_accelerator import GlobalacceleratorAccelerator +from imports.aws.globalaccelerator_listener import GlobalacceleratorListener +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = GlobalacceleratorAccelerator(self, "example", + attributes=GlobalacceleratorAcceleratorAttributes( + flow_logs_enabled=True, + flow_logs_s3_bucket="example-bucket", + flow_logs_s3_prefix="flow-logs/" + ), + enabled=True, + ip_address_type="IPV4", + name="Example" + ) + aws_globalaccelerator_listener_example = GlobalacceleratorListener(self, "example_1", + accelerator_arn=example.id, + client_affinity="SOURCE_IP", + port_range=[GlobalacceleratorListenerPortRange( + from_port=80, + to_port=80 + ) + ], + protocol="TCP" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_globalaccelerator_listener_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accelerator_arn` - (Required) The Amazon Resource Name (ARN) of your accelerator. +* `client_affinity` - (Optional) Direct all requests from a user to the same endpoint. Valid values are `NONE`, `SOURCE_IP`. Default: `NONE`. If `NONE`, Global Accelerator uses the "five-tuple" properties of source IP address, source port, destination IP address, destination port, and protocol to select the hash value. If `SOURCE_IP`, Global Accelerator uses the "two-tuple" properties of source (client) IP address and destination IP address to select the hash value. +* `protocol` - (Optional) The protocol for the connections from clients to the accelerator. Valid values are `TCP`, `UDP`. +* `port_range` - (Optional) The list of port ranges for the connections from clients to the accelerator. Fields documented below. + +`port_range` supports the following arguments: + +* `from_port` - (Optional) The first port in the range of ports, inclusive. +* `to_port` - (Optional) The last port in the range of ports, inclusive. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the listener. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator listeners using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Global Accelerator listeners using the `id`. For example: + +```console +% terraform import aws_globalaccelerator_listener.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/listener/xxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_catalog_database.html.markdown b/website/docs/cdktf/python/r/glue_catalog_database.html.markdown new file mode 100644 index 00000000000..5179b2fea47 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_catalog_database.html.markdown @@ -0,0 +1,115 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_catalog_database" +description: |- + Provides a Glue Catalog Database. +--- + + + +# Resource: aws_glue_catalog_database + +Provides a Glue Catalog Database Resource. You can refer to the [Glue Developer Guide](http://docs.aws.amazon.com/glue/latest/dg/populate-data-catalog.html) for a full explanation of the Glue Data Catalog functionality + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_catalog_database import GlueCatalogDatabase +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueCatalogDatabase(self, "aws_glue_catalog_database", + name="MyCatalogDatabase" + ) +``` + +### Create Table Default Permissions + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_catalog_database import GlueCatalogDatabase +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueCatalogDatabase(self, "aws_glue_catalog_database", + create_table_default_permission=[GlueCatalogDatabaseCreateTableDefaultPermission( + permissions=["SELECT"], + principal=GlueCatalogDatabaseCreateTableDefaultPermissionPrincipal( + data_lake_principal_identifier="IAM_ALLOWED_PRINCIPALS" + ) + ) + ], + name="MyCatalogDatabase" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `catalog_id` - (Optional) ID of the Glue Catalog to create the database in. If omitted, this defaults to the AWS Account ID. +* `create_table_default_permission` - (Optional) Creates a set of default permissions on the table for principals. See [`create_table_default_permission`](#create_table_default_permission) below. +* `description` - (Optional) Description of the database. +* `location_uri` - (Optional) Location of the database (for example, an HDFS path). +* `name` - (Required) Name of the database. The acceptable characters are lowercase letters, numbers, and the underscore character. +* `parameters` - (Optional) List of key-value pairs that define parameters and properties of the database. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `target_database` - (Optional) Configuration block for a target database for resource linking. See [`target_database`](#target_database) below. + +### target_database + +* `catalog_id` - (Required) ID of the Data Catalog in which the database resides. +* `database_name` - (Required) Name of the catalog database. +* `region` - (Optional) Region of the target database. + +### create_table_default_permission + +* `permissions` - (Optional) The permissions that are granted to the principal. +* `principal` - (Optional) The principal who is granted permissions.. See [`principal`](#principal) below. + +#### principal + +* `data_lake_principal_identifier` - (Optional) An identifier for the Lake Formation principal. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Glue Catalog Database. +* `id` - Catalog ID and name of the database. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Catalog Databases using the `catalog_id:name`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Catalog Databases using the `catalog_id:name`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```console +% terraform import aws_glue_catalog_database.database 123456789012:my_database +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_catalog_table.html.markdown b/website/docs/cdktf/python/r/glue_catalog_table.html.markdown new file mode 100644 index 00000000000..5c8cf48a76e --- /dev/null +++ b/website/docs/cdktf/python/r/glue_catalog_table.html.markdown @@ -0,0 +1,215 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_catalog_table" +description: |- + Provides a Glue Catalog Table. +--- + + + +# Resource: aws_glue_catalog_table + +Provides a Glue Catalog Table Resource. You can refer to the [Glue Developer Guide](http://docs.aws.amazon.com/glue/latest/dg/populate-data-catalog.html) for a full explanation of the Glue Data Catalog functionality. + +## Example Usage + +### Basic Table + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_catalog_table import GlueCatalogTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueCatalogTable(self, "aws_glue_catalog_table", + database_name="MyCatalogDatabase", + name="MyCatalogTable" + ) +``` + +### Parquet Table for Athena + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_catalog_table import GlueCatalogTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueCatalogTable(self, "aws_glue_catalog_table", + database_name="MyCatalogDatabase", + name="MyCatalogTable", + parameters={ + "EXTERNAL": "TRUE", + "parquet.compression": "SNAPPY" + }, + storage_descriptor=GlueCatalogTableStorageDescriptor( + columns=[GlueCatalogTableStorageDescriptorColumns( + name="my_string", + type="string" + ), GlueCatalogTableStorageDescriptorColumns( + name="my_double", + type="double" + ), GlueCatalogTableStorageDescriptorColumns( + comment="", + name="my_date", + type="date" + ), GlueCatalogTableStorageDescriptorColumns( + comment="", + name="my_bigint", + type="bigint" + ), GlueCatalogTableStorageDescriptorColumns( + comment="", + name="my_struct", + type="struct" + ) + ], + input_format="org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + location="s3://my-bucket/event-streams/my-stream", + output_format="org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + ser_de_info=GlueCatalogTableStorageDescriptorSerDeInfo( + name="my-stream", + parameters={ + "serialization.format": Token.as_string(1) + }, + serialization_library="org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe" + ) + ), + table_type="EXTERNAL_TABLE" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the table. For Hive compatibility, this must be entirely lowercase. +* `database_name` - (Required) Name of the metadata database where the table metadata resides. For Hive compatibility, this must be all lowercase. + +The follow arguments are optional: + +* `catalog_id` - (Optional) ID of the Glue Catalog and database to create the table in. If omitted, this defaults to the AWS Account ID plus the database name. +* `description` - (Optional) Description of the table. +* `owner` - (Optional) Owner of the table. +* `parameters` - (Optional) Properties associated with this table, as a list of key-value pairs. +* `partition_index` - (Optional) Configuration block for a maximum of 3 partition indexes. See [`partition_index`](#partition_index) below. +* `partition_keys` - (Optional) Configuration block of columns by which the table is partitioned. Only primitive types are supported as partition keys. See [`partition_keys`](#partition_keys) below. +* `retention` - (Optional) Retention time for this table. +* `storage_descriptor` - (Optional) Configuration block for information about the physical storage of this table. For more information, refer to the [Glue Developer Guide](https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-catalog-tables.html#aws-glue-api-catalog-tables-StorageDescriptor). See [`storage_descriptor`](#storage_descriptor) below. +* `table_type` - (Optional) Type of this table (EXTERNAL_TABLE, VIRTUAL_VIEW, etc.). While optional, some Athena DDL queries such as `ALTER TABLE` and `SHOW CREATE TABLE` will fail if this argument is empty. +* `target_table` - (Optional) Configuration block of a target table for resource linking. See [`target_table`](#target_table) below. +* `view_expanded_text` - (Optional) If the table is a view, the expanded text of the view; otherwise null. +* `view_original_text` - (Optional) If the table is a view, the original text of the view; otherwise null. + +### partition_index + +~> **NOTE:** A `partition_index` cannot be added to an existing `glue_catalog_table`. +This will destroy and recreate the table, possibly resulting in data loss. +To add an index to an existing table, see the [`glue_partition_index` resource](/docs/providers/aws/r/glue_partition_index.html) for configuration details. + +* `index_name` - (Required) Name of the partition index. +* `keys` - (Required) Keys for the partition index. + +### partition_keys + +* `comment` - (Optional) Free-form text comment. +* `name` - (Required) Name of the Partition Key. +* `type` - (Optional) Datatype of data in the Partition Key. + +### storage_descriptor + +* `bucket_columns` - (Optional) List of reducer grouping columns, clustering columns, and bucketing columns in the table. +* `columns` - (Optional) Configuration block for columns in the table. See [`columns`](#columns) below. +* `compressed` - (Optional) Whether the data in the table is compressed. +* `input_format` - (Optional) Input format: SequenceFileInputFormat (binary), or TextInputFormat, or a custom format. +* `location` - (Optional) Physical location of the table. By default this takes the form of the warehouse location, followed by the database location in the warehouse, followed by the table name. +* `number_of_buckets` - (Optional) Must be specified if the table contains any dimension columns. +* `output_format` - (Optional) Output format: SequenceFileOutputFormat (binary), or IgnoreKeyTextOutputFormat, or a custom format. +* `parameters` - (Optional) User-supplied properties in key-value form. +* `schema_reference` - (Optional) Object that references a schema stored in the AWS Glue Schema Registry. When creating a table, you can pass an empty list of columns for the schema, and instead use a schema reference. See [Schema Reference](#schema_reference) below. +* `ser_de_info` - (Optional) Configuration block for serialization and deserialization ("SerDe") information. See [`ser_de_info`](#ser_de_info) below. +* `skewed_info` - (Optional) Configuration block with information about values that appear very frequently in a column (skewed values). See [`skewed_info`](#skewed_info) below. +* `sort_columns` - (Optional) Configuration block for the sort order of each bucket in the table. See [`sort_columns`](#sort_columns) below. +* `stored_as_sub_directories` - (Optional) Whether the table data is stored in subdirectories. + +#### columns + +* `comment` - (Optional) Free-form text comment. +* `name` - (Required) Name of the Column. +* `parameters` - (Optional) Key-value pairs defining properties associated with the column. +* `type` - (Optional) Datatype of data in the Column. + +#### schema_reference + +* `schema_id` - (Optional) Configuration block that contains schema identity fields. Either this or the `schema_version_id` has to be provided. See [`schema_id`](#schema_id) below. +* `schema_version_id` - (Optional) Unique ID assigned to a version of the schema. Either this or the `schema_id` has to be provided. +* `schema_version_number` - (Required) Version number of the schema. + +##### schema_id + +* `registry_name` - (Optional) Name of the schema registry that contains the schema. Must be provided when `schema_name` is specified and conflicts with `schema_arn`. +* `schema_arn` - (Optional) ARN of the schema. One of `schema_arn` or `schema_name` has to be provided. +* `schema_name` - (Optional) Name of the schema. One of `schema_arn` or `schema_name` has to be provided. + +#### ser_de_info + +* `name` - (Optional) Name of the SerDe. +* `parameters` - (Optional) Map of initialization parameters for the SerDe, in key-value form. +* `serialization_library` - (Optional) Usually the class that implements the SerDe. An example is `org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe`. + +#### sort_columns + +* `column` - (Required) Name of the column. +* `sort_order` - (Required) Whether the column is sorted in ascending (`1`) or descending order (`0`). + +#### skewed_info + +* `skewed_column_names` - (Optional) List of names of columns that contain skewed values. +* `skewed_column_value_location_maps` - (Optional) List of values that appear so frequently as to be considered skewed. +* `skewed_column_values` - (Optional) Map of skewed values to the columns that contain them. + +### target_table + +* `catalog_id` - (Required) ID of the Data Catalog in which the table resides. +* `database_name` - (Required) Name of the catalog database that contains the target table. +* `name` - (Required) Name of the target table. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the Glue Table. +* `id` - Catalog ID, Database name and of the name table. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Tables using the catalog ID (usually AWS account ID), database name, and table name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Tables using the catalog ID (usually AWS account ID), database name, and table name. For example: + +```console +% terraform import aws_glue_catalog_table.MyTable 123456789012:MyDatabase:MyTable +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_classifier.html.markdown b/website/docs/cdktf/python/r/glue_classifier.html.markdown new file mode 100644 index 00000000000..cc09fac39dd --- /dev/null +++ b/website/docs/cdktf/python/r/glue_classifier.html.markdown @@ -0,0 +1,165 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_classifier" +description: |- + Provides an Glue Classifier resource. +--- + + + +# Resource: aws_glue_classifier + +Provides a Glue Classifier resource. + +~> **NOTE:** It is only valid to create one type of classifier (csv, grok, JSON, or XML). Changing classifier types will recreate the classifier. + +## Example Usage + +### Csv Classifier + +```terraform +resource "aws_glue_classifier" "example" { + name = "example" + + csv_classifier { + allow_single_column = false + contains_header = "PRESENT" + delimiter = "," + disable_value_trimming = false + header = ["example1", "example2"] + quote_symbol = "'" + } +} +``` + +### Grok Classifier + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_classifier import GlueClassifier +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueClassifier(self, "example", + grok_classifier=GlueClassifierGrokClassifier( + classification="example", + grok_pattern="example" + ), + name="example" + ) +``` + +### JSON Classifier + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_classifier import GlueClassifier +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueClassifier(self, "example", + json_classifier=GlueClassifierJsonClassifier( + json_path="example" + ), + name="example" + ) +``` + +### XML Classifier + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_classifier import GlueClassifier +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueClassifier(self, "example", + name="example", + xml_classifier=GlueClassifierXmlClassifier( + classification="example", + row_tag="example" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `csv_classifier` - (Optional) A classifier for Csv content. Defined below. +* `grok_classifier` – (Optional) A classifier that uses grok patterns. Defined below. +* `json_classifier` – (Optional) A classifier for JSON content. Defined below. +* `name` – (Required) The name of the classifier. +* `xml_classifier` – (Optional) A classifier for XML content. Defined below. + +### csv_classifier + +* `allow_single_column` - (Optional) Enables the processing of files that contain only one column. +* `contains_header` - (Optional) Indicates whether the CSV file contains a header. This can be one of "ABSENT", "PRESENT", or "UNKNOWN". +* `custom_datatype_configured` - (Optional) A custom symbol to denote what combines content into a single column value. It must be different from the column delimiter. +* `custom_datatypes` - (Optional) A list of supported custom datatypes. Valid values are `BINARY`, `BOOLEAN`, `DATE`, `DECIMAL`, `DOUBLE`, `FLOAT`, `INT`, `LONG`, `SHORT`, `STRING`, `TIMESTAMP`. +* `delimiter` - (Optional) The delimiter used in the Csv to separate columns. +* `disable_value_trimming` - (Optional) Specifies whether to trim column values. +* `header` - (Optional) A list of strings representing column names. +* `quote_symbol` - (Optional) A custom symbol to denote what combines content into a single column value. It must be different from the column delimiter. + +### grok_classifier + +* `classification` - (Required) An identifier of the data format that the classifier matches, such as Twitter, JSON, Omniture logs, Amazon CloudWatch Logs, and so on. +* `custom_patterns` - (Optional) Custom grok patterns used by this classifier. +* `grok_pattern` - (Required) The grok pattern used by this classifier. + +### json_classifier + +* `json_path` - (Required) A `JsonPath` string defining the JSON data for the classifier to classify. AWS Glue supports a subset of `JsonPath`, as described in [Writing JsonPath Custom Classifiers](https://docs.aws.amazon.com/glue/latest/dg/custom-classifier.html#custom-classifier-json). + +### xml_classifier + +* `classification` - (Required) An identifier of the data format that the classifier matches. +* `row_tag` - (Required) The XML tag designating the element that contains each record in an XML document being parsed. Note that this cannot identify a self-closing element (closed by `/>`). An empty row element that contains only attributes can be parsed as long as it ends with a closing tag (for example, `` is okay, but `` is not). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the classifier + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Classifiers using their name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Classifiers using their name. For example: + +```console +% terraform import aws_glue_classifier.MyClassifier MyClassifier +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_connection.html.markdown b/website/docs/cdktf/python/r/glue_connection.html.markdown new file mode 100644 index 00000000000..13020473fa4 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_connection.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_connection" +description: |- + Provides an Glue Connection resource. +--- + + + +# Resource: aws_glue_connection + +Provides a Glue Connection resource. + +## Example Usage + +### Non-VPC Connection + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_connection import GlueConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueConnection(self, "example", + connection_properties={ + "JDBC_CONNECTION_URL": "jdbc:mysql://example.com/exampledatabase", + "PASSWORD": "examplepassword", + "USERNAME": "exampleusername" + }, + name="example" + ) +``` + +### Non-VPC Connection with secret manager reference + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws. import DataAwsSecretmanagerSecret +from imports.aws.glue_connection import GlueConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsSecretmanagerSecret(self, "example", + name="example-secret" + ) + aws_glue_connection_example = GlueConnection(self, "example_1", + connection_properties={ + "JDBC_CONNECTION_URL": "jdbc:mysql://example.com/exampledatabase", + "SECRET_ID": Token.as_string(example.name) + }, + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_glue_connection_example.override_logical_id("example") +``` + +### VPC Connection + +For more information, see the [AWS Documentation](https://docs.aws.amazon.com/glue/latest/dg/populate-add-connection.html#connection-JDBC-VPC). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_connection import GlueConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueConnection(self, "example", + connection_properties={ + "JDBC_CONNECTION_URL": "jdbc:mysql://${" + aws_rds_cluster_example.endpoint + "}/exampledatabase", + "PASSWORD": "examplepassword", + "USERNAME": "exampleusername" + }, + name="example", + physical_connection_requirements=GlueConnectionPhysicalConnectionRequirements( + availability_zone=Token.as_string(aws_subnet_example.availability_zone), + security_group_id_list=[Token.as_string(aws_security_group_example.id)], + subnet_id=Token.as_string(aws_subnet_example.id) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `catalog_id` – (Optional) The ID of the Data Catalog in which to create the connection. If none is supplied, the AWS account ID is used by default. +* `connection_properties` – (Optional) A map of key-value pairs used as parameters for this connection. +* `connection_type` – (Optional) The type of the connection. Supported are: `CUSTOM`, `JDBC`, `KAFKA`, `MARKETPLACE`, `MONGODB`, and `NETWORK`. Defaults to `JBDC`. +* `description` – (Optional) Description of the connection. +* `match_criteria` – (Optional) A list of criteria that can be used in selecting this connection. +* `name` – (Required) The name of the connection. +* `physical_connection_requirements` - (Optional) A map of physical connection requirements, such as VPC and SecurityGroup. Defined below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### physical_connection_requirements + +* `availability_zone` - (Optional) The availability zone of the connection. This field is redundant and implied by `subnet_id`, but is currently an api requirement. +* `security_group_id_list` - (Optional) The security group ID list used by the connection. +* `subnet_id` - (Optional) The subnet ID used by the connection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Catalog ID and name of the connection +* `arn` - The ARN of the Glue Connection. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Connections using the `CATALOG-ID` (AWS account ID if not custom) and `NAME`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Connections using the `CATALOG-ID` (AWS account ID if not custom) and `NAME`. For example: + +```console +% terraform import aws_glue_connection.MyConnection 123456789012:MyConnection +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_crawler.html.markdown b/website/docs/cdktf/python/r/glue_crawler.html.markdown new file mode 100644 index 00000000000..57bde1f85b6 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_crawler.html.markdown @@ -0,0 +1,314 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_crawler" +description: |- + Manages a Glue Crawler +--- + + + +# Resource: aws_glue_crawler + +Manages a Glue Crawler. More information can be found in the [AWS Glue Developer Guide](https://docs.aws.amazon.com/glue/latest/dg/add-crawler.html) + +## Example Usage + +### DynamoDB Target Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_crawler import GlueCrawler +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueCrawler(self, "example", + database_name=Token.as_string(aws_glue_catalog_database_example.name), + dynamodb_target=[GlueCrawlerDynamodbTarget( + path="table-name" + ) + ], + name="example", + role=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### JDBC Target Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_crawler import GlueCrawler +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueCrawler(self, "example", + database_name=Token.as_string(aws_glue_catalog_database_example.name), + jdbc_target=[GlueCrawlerJdbcTarget( + connection_name=Token.as_string(aws_glue_connection_example.name), + path="database-name/%" + ) + ], + name="example", + role=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### S3 Target Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_crawler import GlueCrawler +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueCrawler(self, "example", + database_name=Token.as_string(aws_glue_catalog_database_example.name), + name="example", + role=Token.as_string(aws_iam_role_example.arn), + s3_target=[GlueCrawlerS3Target( + path="s3://${" + aws_s3_bucket_example.bucket + "}" + ) + ] + ) +``` + +### Catalog Target Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_crawler import GlueCrawler +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueCrawler(self, "example", + catalog_target=[GlueCrawlerCatalogTarget( + database_name=Token.as_string(aws_glue_catalog_database_example.name), + tables=[Token.as_string(aws_glue_catalog_table_example.name)] + ) + ], + configuration="{\n \"Version\":1.0,\n \"Grouping\": {\n \"TableGroupingPolicy\": \"CombineCompatibleSchemas\"\n }\n}\n\n", + database_name=Token.as_string(aws_glue_catalog_database_example.name), + name="example", + role=Token.as_string(aws_iam_role_example.arn), + schema_change_policy=GlueCrawlerSchemaChangePolicy( + delete_behavior="LOG" + ) + ) +``` + +### MongoDB Target Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_crawler import GlueCrawler +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueCrawler(self, "example", + database_name=Token.as_string(aws_glue_catalog_database_example.name), + mongodb_target=[GlueCrawlerMongodbTarget( + connection_name=Token.as_string(aws_glue_connection_example.name), + path="database-name/%" + ) + ], + name="example", + role=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### Configuration Settings Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_crawler import GlueCrawler +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueCrawler(self, "events_crawler", + configuration=Token.as_string( + Fn.jsonencode({ + "CrawlerOutput": { + "Partitions": { + "AddOrUpdateBehavior": "InheritFromTable" + } + }, + "Grouping": { + "TableGroupingPolicy": "CombineCompatibleSchemas" + }, + "Version": 1 + })), + database_name=glue_database.name, + name="events_crawler_${" + environment_name.value + "}", + role=glue_role.arn, + s3_target=[GlueCrawlerS3Target( + path="s3://${" + data_lake_bucket.bucket + "}" + ) + ], + schedule="cron(0 1 * * ? *)", + tags=Token.as_string_map(tags.value) + ) +``` + +## Argument Reference + +~> **NOTE:** Must specify at least one of `dynamodb_target`, `jdbc_target`, `s3_target`, `mongodb_target` or `catalog_target`. + +This argument supports the following arguments: + +* `database_name` (Required) Glue database where results are written. +* `name` (Required) Name of the crawler. +* `role` (Required) The IAM role friendly name (including path without leading slash), or ARN of an IAM role, used by the crawler to access other resources. +* `classifiers` (Optional) List of custom classifiers. By default, all AWS classifiers are included in a crawl, but these custom classifiers always override the default classifiers for a given classification. +* `configuration` (Optional) JSON string of configuration information. For more details see [Setting Crawler Configuration Options](https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html). +* `description` (Optional) Description of the crawler. +* `delta_target` (Optional) List of nested Delta Lake target arguments. See [Delta Target](#delta-target) below. +* `dynamodb_target` (Optional) List of nested DynamoDB target arguments. See [Dynamodb Target](#dynamodb-target) below. +* `jdbc_target` (Optional) List of nested JBDC target arguments. See [JDBC Target](#jdbc-target) below. +* `s3_target` (Optional) List nested Amazon S3 target arguments. See [S3 Target](#s3-target) below. +* `mongodb_target` (Optional) List nested MongoDB target arguments. See [MongoDB Target](#mongodb-target) below. +* `iceberg_target` (Optional) List nested Iceberg target arguments. See [Iceberg Target](#iceberg-target) below. +* `schedule` (Optional) A cron expression used to specify the schedule. For more information, see [Time-Based Schedules for Jobs and Crawlers](https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html). For example, to run something every day at 12:15 UTC, you would specify: `cron(15 12 * * ? *)`. +* `schema_change_policy` (Optional) Policy for the crawler's update and deletion behavior. See [Schema Change Policy](#schema-change-policy) below. +* `lake_formation_configuration` (Optional) Specifies Lake Formation configuration settings for the crawler. See [Lake Formation Configuration](#lake-formation-configuration) below. +* `lineage_configuration` (Optional) Specifies data lineage configuration settings for the crawler. See [Lineage Configuration](#lineage-configuration) below. +* `recrawl_policy` (Optional) A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since the last crawler run.. See [Recrawl Policy](#recrawl-policy) below. +* `security_configuration` (Optional) The name of Security Configuration to be used by the crawler +* `table_prefix` (Optional) The table prefix used for catalog tables that are created. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Dynamodb Target + +* `path` - (Required) The name of the DynamoDB table to crawl. +* `scan_all` - (Optional) Indicates whether to scan all the records, or to sample rows from the table. Scanning all the records can take a long time when the table is not a high throughput table. defaults to `true`. +* `scan_rate` - (Optional) The percentage of the configured read capacity units to use by the AWS Glue crawler. The valid values are null or a value between 0.1 to 1.5. + +### JDBC Target + +* `connection_name` - (Required) The name of the connection to use to connect to the JDBC target. +* `path` - (Required) The path of the JDBC target. +* `exclusions` - (Optional) A list of glob patterns used to exclude from the crawl. +* `enable_additional_metadata` - (Optional) Specify a value of `RAWTYPES` or `COMMENTS` to enable additional metadata intable responses. `RAWTYPES` provides the native-level datatype. `COMMENTS` provides comments associated with a column or table in the database. + +### S3 Target + +* `path` - (Required) The path to the Amazon S3 target. +* `connection_name` - (Optional) The name of a connection which allows crawler to access data in S3 within a VPC. +* `exclusions` - (Optional) A list of glob patterns used to exclude from the crawl. +* `sample_size` - (Optional) Sets the number of files in each leaf folder to be crawled when crawling sample files in a dataset. If not set, all the files are crawled. A valid value is an integer between 1 and 249. +* `event_queue_arn` - (Optional) The ARN of the SQS queue to receive S3 notifications from. +* `dlq_event_queue_arn` - (Optional) The ARN of the dead-letter SQS queue. + +### Catalog Target + +* `connection_name` - (Optional) The name of the connection for an Amazon S3-backed Data Catalog table to be a target of the crawl when using a Catalog connection type paired with a `NETWORK` Connection type. +* `database_name` - (Required) The name of the Glue database to be synchronized. +* `tables` - (Required) A list of catalog tables to be synchronized. +* `event_queue_arn` - (Optional) A valid Amazon SQS ARN. +* `dlq_event_queue_arn` - (Optional) A valid Amazon SQS ARN. + +~> **Note:** `deletion_behavior` of catalog target doesn't support `DEPRECATE_IN_DATABASE`. + +-> **Note:** `configuration` for catalog target crawlers will have `{ ... "Grouping": { "TableGroupingPolicy": "CombineCompatibleSchemas"} }` by default. + +### MongoDB Target + +* `connection_name` - (Required) The name of the connection to use to connect to the Amazon DocumentDB or MongoDB target. +* `path` - (Required) The path of the Amazon DocumentDB or MongoDB target (database/collection). +* `scan_all` - (Optional) Indicates whether to scan all the records, or to sample rows from the table. Scanning all the records can take a long time when the table is not a high throughput table. Default value is `true`. + +### Iceberg Target + +* `connection_name` - (Optional) The name of the connection to use to connect to the Iceberg target. +* `paths` - (Required) One or more Amazon S3 paths that contains Iceberg metadata folders as s3://bucket/prefix. +* `exclusions` - (Optional) A list of glob patterns used to exclude from the crawl. +* `maximum_traversal_depth` - (Required) The maximum depth of Amazon S3 paths that the crawler can traverse to discover the Iceberg metadata folder in your Amazon S3 path. Used to limit the crawler run time. Valid values are between `1` and `20`. + +### Delta Target + +* `connection_name` - (Optional) The name of the connection to use to connect to the Delta table target. +* `create_native_delta_table` (Optional) Specifies whether the crawler will create native tables, to allow integration with query engines that support querying of the Delta transaction log directly. +* `delta_tables` - (Required) A list of the Amazon S3 paths to the Delta tables. +* `write_manifest` - (Required) Specifies whether to write the manifest files to the Delta table path. + +### Schema Change Policy + +* `delete_behavior` - (Optional) The deletion behavior when the crawler finds a deleted object. Valid values: `LOG`, `DELETE_FROM_DATABASE`, or `DEPRECATE_IN_DATABASE`. Defaults to `DEPRECATE_IN_DATABASE`. +* `update_behavior` - (Optional) The update behavior when the crawler finds a changed schema. Valid values: `LOG` or `UPDATE_IN_DATABASE`. Defaults to `UPDATE_IN_DATABASE`. + +### Lake Formation Configuration + +* `account_id` - (Optional) Required for cross account crawls. For same account crawls as the target data, this can omitted. +* `use_lake_formation_credentials` - (Optional) Specifies whether to use Lake Formation credentials for the crawler instead of the IAM role credentials. + +### Lineage Configuration + +* `crawler_lineage_settings` - (Optional) Specifies whether data lineage is enabled for the crawler. Valid values are: `ENABLE` and `DISABLE`. Default value is `DISABLE`. + +### Recrawl Policy + +* `recrawl_behavior` - (Optional) Specifies whether to crawl the entire dataset again, crawl only folders that were added since the last crawler run, or crawl what S3 notifies the crawler of via SQS. Valid Values are: `CRAWL_EVENT_MODE`, `CRAWL_EVERYTHING` and `CRAWL_NEW_FOLDERS_ONLY`. Default value is `CRAWL_EVERYTHING`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Crawler name +* `arn` - The ARN of the crawler +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Crawlers using `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Crawlers using `name`. For example: + +```console +% terraform import aws_glue_crawler.MyJob MyJob +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_data_catalog_encryption_settings.html.markdown b/website/docs/cdktf/python/r/glue_data_catalog_encryption_settings.html.markdown new file mode 100644 index 00000000000..427e218772d --- /dev/null +++ b/website/docs/cdktf/python/r/glue_data_catalog_encryption_settings.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_data_catalog_encryption_settings" +description: |- + Provides a Glue Data Catalog Encryption Settings resource. +--- + + + +# Resource: aws_glue_data_catalog_encryption_settings + +Provides a Glue Data Catalog Encryption Settings resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_data_catalog_encryption_settings import GlueDataCatalogEncryptionSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueDataCatalogEncryptionSettings(self, "example", + data_catalog_encryption_settings=GlueDataCatalogEncryptionSettingsDataCatalogEncryptionSettings( + connection_password_encryption=GlueDataCatalogEncryptionSettingsDataCatalogEncryptionSettingsConnectionPasswordEncryption( + aws_kms_key_id=test.arn, + return_connection_password_encrypted=True + ), + encryption_at_rest=GlueDataCatalogEncryptionSettingsDataCatalogEncryptionSettingsEncryptionAtRest( + catalog_encryption_mode="SSE-KMS", + sse_aws_kms_key_id=test.arn + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `data_catalog_encryption_settings` – (Required) The security configuration to set. see [Data Catalog Encryption Settings](#data_catalog_encryption_settings). +* `catalog_id` – (Optional) The ID of the Data Catalog to set the security configuration for. If none is provided, the AWS account ID is used by default. + +### data_catalog_encryption_settings + +* `connection_password_encryption` - (Required) When connection password protection is enabled, the Data Catalog uses a customer-provided key to encrypt the password as part of CreateConnection or UpdateConnection and store it in the ENCRYPTED_PASSWORD field in the connection properties. You can enable catalog encryption or only password encryption. see [Connection Password Encryption](#connection_password_encryption). +* `encryption_at_rest` - (Required) Specifies the encryption-at-rest configuration for the Data Catalog. see [Encryption At Rest](#encryption_at_rest). + +### connection_password_encryption + +* `return_connection_password_encrypted` - (Required) When set to `true`, passwords remain encrypted in the responses of GetConnection and GetConnections. This encryption takes effect independently of the catalog encryption. +* `aws_kms_key_id` - (Optional) A KMS key ARN that is used to encrypt the connection password. If connection password protection is enabled, the caller of CreateConnection and UpdateConnection needs at least `kms:Encrypt` permission on the specified AWS KMS key, to encrypt passwords before storing them in the Data Catalog. + +### encryption_at_rest + +* `catalog_encryption_mode` - (Required) The encryption-at-rest mode for encrypting Data Catalog data. Valid values are `DISABLED` and `SSE-KMS`. +* `sse_aws_kms_key_id` - (Optional) The ARN of the AWS KMS key to use for encryption at rest. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Data Catalog to set the security configuration for. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Data Catalog Encryption Settings using `CATALOG-ID` (AWS account ID if not custom). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Data Catalog Encryption Settings using `CATALOG-ID` (AWS account ID if not custom). For example: + +```console +% terraform import aws_glue_data_catalog_encryption_settings.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_data_quality_ruleset.html.markdown b/website/docs/cdktf/python/r/glue_data_quality_ruleset.html.markdown new file mode 100644 index 00000000000..36ef84bd9d6 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_data_quality_ruleset.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_data_quality_ruleset" +description: |- + Provides a Glue Data Quality Ruleset. +--- + + + +# Resource: aws_glue_data_quality_ruleset + +Provides a Glue Data Quality Ruleset Resource. You can refer to the [Glue Developer Guide](https://docs.aws.amazon.com/glue/latest/dg/glue-data-quality.html) for a full explanation of the Glue Data Quality Ruleset functionality + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_data_quality_ruleset import GlueDataQualityRuleset +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueDataQualityRuleset(self, "example", + name="example", + ruleset="Rules = [Completeness \\\"colA\\\" between 0.4 and 0.8]" + ) +``` + +### With description + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_data_quality_ruleset import GlueDataQualityRuleset +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueDataQualityRuleset(self, "example", + description="example", + name="example", + ruleset="Rules = [Completeness \\\"colA\\\" between 0.4 and 0.8]" + ) +``` + +### With tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_data_quality_ruleset import GlueDataQualityRuleset +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueDataQualityRuleset(self, "example", + name="example", + ruleset="Rules = [Completeness \\\"colA\\\" between 0.4 and 0.8]", + tags={ + "hello": "world" + } + ) +``` + +### With target_table + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_data_quality_ruleset import GlueDataQualityRuleset +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueDataQualityRuleset(self, "example", + name="example", + ruleset="Rules = [Completeness \\\"colA\\\" between 0.4 and 0.8]", + target_table=GlueDataQualityRulesetTargetTable( + database_name=Token.as_string(aws_glue_catalog_database_example.name), + table_name=Token.as_string(aws_glue_catalog_table_example.name) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the data quality ruleset. +* `name` - (Required, Forces new resource) Name of the data quality ruleset. +* `ruleset` - (Optional) A Data Quality Definition Language (DQDL) ruleset. For more information, see the AWS Glue developer guide. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `target_table` - (Optional, Forces new resource) A Configuration block specifying a target table associated with the data quality ruleset. See [`target_table`](#target_table) below. + +### target_table + +* `catalog_id` - (Optional, Forces new resource) The catalog id where the AWS Glue table exists. +* `database_name` - (Required, Forces new resource) Name of the database where the AWS Glue table exists. +* `table_name` - (Required, Forces new resource) Name of the AWS Glue table. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Glue Data Quality Ruleset. +* `created_on` - The time and date that this data quality ruleset was created. +* `last_modified_on` - The time and date that this data quality ruleset was created. +* `recommendation_run_id` - When a ruleset was created from a recommendation run, this run ID is generated to link the two together. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Data Quality Ruleset using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Data Quality Ruleset using the `name`. For example: + +```console +% terraform import aws_glue_data_quality_ruleset.example exampleName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_dev_endpoint.markdown b/website/docs/cdktf/python/r/glue_dev_endpoint.markdown new file mode 100644 index 00000000000..e522f3284d0 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_dev_endpoint.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_dev_endpoint" +description: |- + Provides a Glue Development Endpoint resource. +--- + + + +# Resource: aws_glue_dev_endpoint + +Provides a Glue Development Endpoint resource. + +## Example Usage + +Basic usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.glue_dev_endpoint import GlueDevEndpoint +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsIamPolicyDocument(self, "example", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["glue.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + aws_iam_role_example = IamRole(self, "example_1", + assume_role_policy=Token.as_string(example.json), + name="AWSGlueServiceRole-foo" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + IamRolePolicyAttachment(self, "example-AWSGlueServiceRole", + policy_arn="arn:aws:iam::aws:policy/service-role/AWSGlueServiceRole", + role=Token.as_string(aws_iam_role_example.name) + ) + aws_glue_dev_endpoint_example = GlueDevEndpoint(self, "example_3", + name="foo", + role_arn=Token.as_string(aws_iam_role_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_glue_dev_endpoint_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `arguments` - (Optional) A map of arguments used to configure the endpoint. +* `extra_jars_s3_path` - (Optional) Path to one or more Java Jars in an S3 bucket that should be loaded in this endpoint. +* `extra_python_libs_s3_path` - (Optional) Path(s) to one or more Python libraries in an S3 bucket that should be loaded in this endpoint. Multiple values must be complete paths separated by a comma. +* `glue_version` - (Optional) - Specifies the versions of Python and Apache Spark to use. Defaults to AWS Glue version 0.9. +* `name` - (Required) The name of this endpoint. It must be unique in your account. +* `number_of_nodes` - (Optional) The number of AWS Glue Data Processing Units (DPUs) to allocate to this endpoint. Conflicts with `worker_type`. +* `number_of_workers` - (Optional) The number of workers of a defined worker type that are allocated to this endpoint. This field is available only when you choose worker type G.1X or G.2X. +* `public_key` - (Optional) The public key to be used by this endpoint for authentication. +* `public_keys` - (Optional) A list of public keys to be used by this endpoint for authentication. +* `role_arn` - (Required) The IAM role for this endpoint. +* `security_configuration` - (Optional) The name of the Security Configuration structure to be used with this endpoint. +* `security_group_ids` - (Optional) Security group IDs for the security groups to be used by this endpoint. +* `subnet_id` - (Optional) The subnet ID for the new endpoint to use. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `worker_type` - (Optional) The type of predefined worker that is allocated to this endpoint. Accepts a value of Standard, G.1X, or G.2X. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the endpoint. +* `name` - The name of the new endpoint. +* `private_address` - A private IP address to access the endpoint within a VPC, if this endpoint is created within one. +* `public_address` - The public IP address used by this endpoint. The PublicAddress field is present only when you create a non-VPC endpoint. +* `yarn_endpoint_address` - The YARN endpoint address used by this endpoint. +* `zeppelin_remote_spark_interpreter_port` - The Apache Zeppelin port for the remote Apache Spark interpreter. +* `availability_zone` - The AWS availability zone where this endpoint is located. +* `vpc_id` - he ID of the VPC used by this endpoint. +* `status` - The current status of this endpoint. +* `failure_reason` - The reason for a current failure in this endpoint. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a Glue Development Endpoint using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a Glue Development Endpoint using the `name`. For example: + +```console +% terraform import aws_glue_dev_endpoint.example foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_job.html.markdown b/website/docs/cdktf/python/r/glue_job.html.markdown new file mode 100644 index 00000000000..d1ac188cef7 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_job.html.markdown @@ -0,0 +1,224 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_job" +description: |- + Provides an Glue Job resource. +--- + + + +# Resource: aws_glue_job + +Provides a Glue Job resource. + +-> Glue functionality, such as monitoring and logging of jobs, is typically managed with the `default_arguments` argument. See the [Special Parameters Used by AWS Glue](https://docs.aws.amazon.com/glue/latest/dg/aws-glue-programming-etl-glue-arguments.html) topic in the Glue developer guide for additional information. + +## Example Usage + +### Python Job + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_job import GlueJob +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueJob(self, "example", + command=GlueJobCommand( + script_location="s3://${" + aws_s3_bucket_example.bucket + "}/example.py" + ), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### Ray Job + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_job import GlueJob +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueJob(self, "example", + command=GlueJobCommand( + name="glueray", + python_version="3.9", + runtime="Ray2.4", + script_location="s3://${" + aws_s3_bucket_example.bucket + "}/example.py" + ), + glue_version="4.0", + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + worker_type="Z.2X" + ) +``` + +### Scala Job + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_job import GlueJob +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueJob(self, "example", + command=GlueJobCommand( + script_location="s3://${" + aws_s3_bucket_example.bucket + "}/example.scala" + ), + default_arguments={ + "--job-language": "scala" + }, + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### Streaming Job + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_job import GlueJob +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueJob(self, "example", + command=GlueJobCommand( + name="gluestreaming", + script_location="s3://${" + aws_s3_bucket_example.bucket + "}/example.script" + ), + name="example streaming job", + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### Enabling CloudWatch Logs and Metrics + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.glue_job import GlueJob +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, command, name, roleArn): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="example", + retention_in_days=14 + ) + aws_glue_job_example = GlueJob(self, "example_1", + default_arguments={ + "--continuous-log-logGroup": example.name, + "--enable-continuous-cloudwatch-log": "true", + "--enable-continuous-log-filter": "true", + "--enable-metrics": "" + }, + command=command, + name=name, + role_arn=role_arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_glue_job_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `command` – (Required) The command of the job. Defined below. +* `connections` – (Optional) The list of connections used for this job. +* `default_arguments` – (Optional) The map of default arguments for this job. You can specify arguments here that your own job-execution script consumes, as well as arguments that AWS Glue itself consumes. For information about how to specify and consume your own Job arguments, see the [Calling AWS Glue APIs in Python](http://docs.aws.amazon.com/glue/latest/dg/aws-glue-programming-python-calling.html) topic in the developer guide. For information about the key-value pairs that AWS Glue consumes to set up your job, see the [Special Parameters Used by AWS Glue](http://docs.aws.amazon.com/glue/latest/dg/aws-glue-programming-python-glue-arguments.html) topic in the developer guide. +* `non_overridable_arguments` – (Optional) Non-overridable arguments for this job, specified as name-value pairs. +* `description` – (Optional) Description of the job. +* `execution_property` – (Optional) Execution property of the job. Defined below. +* `glue_version` - (Optional) The version of glue to use, for example "1.0". Ray jobs should set this to 4.0 or greater. For information about available versions, see the [AWS Glue Release Notes](https://docs.aws.amazon.com/glue/latest/dg/release-notes.html). +* `execution_class` - (Optional) Indicates whether the job is run with a standard or flexible execution class. The standard execution class is ideal for time-sensitive workloads that require fast job startup and dedicated resources. Valid value: `FLEX`, `STANDARD`. +* `max_capacity` – (Optional) The maximum number of AWS Glue data processing units (DPUs) that can be allocated when this job runs. `Required` when `pythonshell` is set, accept either `0.0625` or `1.0`. Use `number_of_workers` and `worker_type` arguments instead with `glue_version` `2.0` and above. +* `max_retries` – (Optional) The maximum number of times to retry this job if it fails. +* `name` – (Required) The name you assign to this job. It must be unique in your account. +* `notification_property` - (Optional) Notification property of the job. Defined below. +* `role_arn` – (Required) The ARN of the IAM role associated with this job. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `timeout` – (Optional) The job timeout in minutes. The default is 2880 minutes (48 hours) for `glueetl` and `pythonshell` jobs, and null (unlimited) for `gluestreaming` jobs. +* `security_configuration` - (Optional) The name of the Security Configuration to be associated with the job. +* `worker_type` - (Optional) The type of predefined worker that is allocated when a job runs. Accepts a value of Standard, G.1X, G.2X, or G.025X for Spark jobs. Accepts the value Z.2X for Ray jobs. + * For the Standard worker type, each worker provides 4 vCPU, 16 GB of memory and a 50GB disk, and 2 executors per worker. + * For the G.1X worker type, each worker maps to 1 DPU (4 vCPU, 16 GB of memory, 64 GB disk), and provides 1 executor per worker. Recommended for memory-intensive jobs. + * For the G.2X worker type, each worker maps to 2 DPU (8 vCPU, 32 GB of memory, 128 GB disk), and provides 1 executor per worker. Recommended for memory-intensive jobs. + * For the G.025X worker type, each worker maps to 0.25 DPU (2 vCPU, 4GB of memory, 64 GB disk), and provides 1 executor per worker. Recommended for low volume streaming jobs. Only available for Glue version 3.0. + * For the Z.2X worker type, each worker maps to 2 M-DPU (8vCPU, 64 GB of m emory, 128 GB disk), and provides up to 8 Ray workers based on the autoscaler. +* `number_of_workers` - (Optional) The number of workers of a defined workerType that are allocated when a job runs. + +### command Argument Reference + +* `name` - (Optional) The name of the job command. Defaults to `glueetl`. Use `pythonshell` for Python Shell Job Type, `glueray` for Ray Job Type, or `gluestreaming` for Streaming Job Type. `max_capacity` needs to be set if `pythonshell` is chosen. +* `script_location` - (Required) Specifies the S3 path to a script that executes a job. +* `python_version` - (Optional) The Python version being used to execute a Python shell job. Allowed values are 2, 3 or 3.9. Version 3 refers to Python 3.6. +* `runtime` - (Optional) In Ray jobs, runtime is used to specify the versions of Ray, Python and additional libraries available in your environment. This field is not used in other job types. For supported runtime environment values, see [Working with Ray jobs](https://docs.aws.amazon.com/glue/latest/dg/ray-jobs-section.html#author-job-ray-runtimes) in the Glue Developer Guide. + +### execution_property Argument Reference + +* `max_concurrent_runs` - (Optional) The maximum number of concurrent runs allowed for a job. The default is 1. + +### notification_property Argument Reference + +* `notify_delay_after` - (Optional) After a job run starts, the number of minutes to wait before sending a job run delay notification. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Glue Job +* `id` - Job name +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Jobs using `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Jobs using `name`. For example: + +```console +% terraform import aws_glue_job.MyJob MyJob +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_ml_transform.html.markdown b/website/docs/cdktf/python/r/glue_ml_transform.html.markdown new file mode 100644 index 00000000000..69dd02349b5 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_ml_transform.html.markdown @@ -0,0 +1,189 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_ml_transform" +description: |- + Provides a Glue ML Transform resource. +--- + + + +# Resource: aws_glue_ml_transform + +Provides a Glue ML Transform resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_catalog_database import GlueCatalogDatabase +from imports.aws.glue_catalog_table import GlueCatalogTable +from imports.aws.glue_ml_transform import GlueMlTransform +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = GlueCatalogDatabase(self, "test", + name="example" + ) + aws_glue_catalog_table_test = GlueCatalogTable(self, "test_1", + database_name=test.name, + name="example", + owner="my_owner", + parameters={ + "param1": "param1_val" + }, + partition_keys=[GlueCatalogTablePartitionKeys( + comment="my_column_1_comment", + name="my_column_1", + type="int" + ), GlueCatalogTablePartitionKeys( + comment="my_column_2_comment", + name="my_column_2", + type="string" + ) + ], + retention=1, + storage_descriptor=GlueCatalogTableStorageDescriptor( + bucket_columns=["bucket_column_1"], + columns=[GlueCatalogTableStorageDescriptorColumns( + comment="my_column1_comment", + name="my_column_1", + type="int" + ), GlueCatalogTableStorageDescriptorColumns( + comment="my_column2_comment", + name="my_column_2", + type="string" + ) + ], + compressed=False, + input_format="SequenceFileInputFormat", + location="my_location", + number_of_buckets=1, + output_format="SequenceFileInputFormat", + parameters={ + "param1": "param1_val" + }, + ser_de_info=GlueCatalogTableStorageDescriptorSerDeInfo( + name="ser_de_name", + parameters={ + "param1": "param_val_1" + }, + serialization_library="org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" + ), + skewed_info=GlueCatalogTableStorageDescriptorSkewedInfo( + skewed_column_names=["my_column_1"], + skewed_column_value_location_maps={ + "my_column_1": "my_column_1_val_loc_map" + }, + skewed_column_values=["skewed_val_1"] + ), + sort_columns=[GlueCatalogTableStorageDescriptorSortColumns( + column="my_column_1", + sort_order=1 + ) + ], + stored_as_sub_directories=False + ), + table_type="VIRTUAL_VIEW", + view_expanded_text="view_expanded_text_1", + view_original_text="view_original_text_1" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_glue_catalog_table_test.override_logical_id("test") + aws_glue_ml_transform_test = GlueMlTransform(self, "test_2", + depends_on=[aws_iam_role_policy_attachment_test], + input_record_tables=[GlueMlTransformInputRecordTables( + database_name=Token.as_string(aws_glue_catalog_table_test.database_name), + table_name=Token.as_string(aws_glue_catalog_table_test.name) + ) + ], + name="example", + parameters=GlueMlTransformParameters( + find_matches_parameters=GlueMlTransformParametersFindMatchesParameters( + primary_key_column_name="my_column_1" + ), + transform_type="FIND_MATCHES" + ), + role_arn=Token.as_string(aws_iam_role_test.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_glue_ml_transform_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` – (Required) The name you assign to this ML Transform. It must be unique in your account. +* `input_record_tables` - (Required) A list of AWS Glue table definitions used by the transform. see [Input Record Tables](#input_record_tables). +* `parameters` - (Required) The algorithmic parameters that are specific to the transform type used. Conditionally dependent on the transform type. see [Parameters](#parameters). +* `role_arn` – (Required) The ARN of the IAM role associated with this ML Transform. +* `description` – (Optional) Description of the ML Transform. +* `glue_version` - (Optional) The version of glue to use, for example "1.0". For information about available versions, see the [AWS Glue Release Notes](https://docs.aws.amazon.com/glue/latest/dg/release-notes.html). +* `max_capacity` – (Optional) The number of AWS Glue data processing units (DPUs) that are allocated to task runs for this transform. You can allocate from `2` to `100` DPUs; the default is `10`. `max_capacity` is a mutually exclusive option with `number_of_workers` and `worker_type`. +* `max_retries` – (Optional) The maximum number of times to retry this ML Transform if it fails. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `timeout` – (Optional) The ML Transform timeout in minutes. The default is 2880 minutes (48 hours). +* `worker_type` - (Optional) The type of predefined worker that is allocated when an ML Transform runs. Accepts a value of `Standard`, `G.1X`, or `G.2X`. Required with `number_of_workers`. +* `number_of_workers` - (Optional) The number of workers of a defined `worker_type` that are allocated when an ML Transform runs. Required with `worker_type`. + +### input_record_tables + +* `database_name` - (Required) A database name in the AWS Glue Data Catalog. +* `table_name` - (Required) A table name in the AWS Glue Data Catalog. +* `catalog_id` - (Optional) A unique identifier for the AWS Glue Data Catalog. +* `connection_name`- (Optional) The name of the connection to the AWS Glue Data Catalog. + +### parameters + +* `transform_type` - (Required) The type of machine learning transform. For information about the types of machine learning transforms, see [Creating Machine Learning Transforms](http://docs.aws.amazon.com/glue/latest/dg/add-job-machine-learning-transform.html). +* `find_matches_parameters` - (Required) The parameters for the find matches algorithm. see [Find Matches Parameters](#find_matches_parameters). + +#### find_matches_parameters + +* `accuracy_cost_trade_off` - (Optional) The value that is selected when tuning your transform for a balance between accuracy and cost. +* `enforce_provided_labels` - (Optional) The value to switch on or off to force the output to match the provided labels from users. +* `precision_recall_trade_off` - (Optional) The value selected when tuning your transform for a balance between precision and recall. +* `primary_key_column_name` - (Optional) The name of a column that uniquely identifies rows in the source table. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Glue ML Transform. +* `id` - Glue ML Transform ID. +* `label_count` - The number of labels available for this transform. +* `schema` - The object that represents the schema that this transform accepts. see [Schema](#schema). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### schema + +* `name` - The name of the column. +* `data_type` - The type of data in the column. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue ML Transforms using `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue ML Transforms using `id`. For example: + +```console +% terraform import aws_glue_ml_transform.example tfm-c2cafbe83b1c575f49eaca9939220e2fcd58e2d5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_partition.html.markdown b/website/docs/cdktf/python/r/glue_partition.html.markdown new file mode 100644 index 00000000000..e92450765a0 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_partition.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_partition" +description: |- + Provides a Glue Partition. +--- + + + +# Resource: aws_glue_partition + +Provides a Glue Partition Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_partition import GluePartition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, partitionValues): + super().__init__(scope, name) + GluePartition(self, "example", + database_name="some-database", + table_name="some-table", + values=["some-value"], + partition_values=partition_values + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `database_name` - (Required) Name of the metadata database where the table metadata resides. For Hive compatibility, this must be all lowercase. +* `partition_values` - (Required) The values that define the partition. +* `catalog_id` - (Optional) ID of the Glue Catalog and database to create the table in. If omitted, this defaults to the AWS Account ID plus the database name. +* `storage_descriptor` - (Optional) A [storage descriptor](#storage_descriptor) object containing information about the physical storage of this table. You can refer to the [Glue Developer Guide](https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-catalog-tables.html#aws-glue-api-catalog-tables-StorageDescriptor) for a full explanation of this object. +* `parameters` - (Optional) Properties associated with this table, as a list of key-value pairs. + +##### storage_descriptor + +* `columns` - (Optional) A list of the [Columns](#column) in the table. +* `location` - (Optional) The physical location of the table. By default this takes the form of the warehouse location, followed by the database location in the warehouse, followed by the table name. +* `input_format` - (Optional) The input format: SequenceFileInputFormat (binary), or TextInputFormat, or a custom format. +* `output_format` - (Optional) The output format: SequenceFileOutputFormat (binary), or IgnoreKeyTextOutputFormat, or a custom format. +* `compressed` - (Optional) True if the data in the table is compressed, or False if not. +* `number_of_buckets` - (Optional) Must be specified if the table contains any dimension columns. +* `ser_de_info` - (Optional) [Serialization/deserialization (SerDe)](#ser_de_info) information. +* `bucket_columns` - (Optional) A list of reducer grouping columns, clustering columns, and bucketing columns in the table. +* `sort_columns` - (Optional) A list of [Order](#sort_columns) objects specifying the sort order of each bucket in the table. +* `parameters` - (Optional) User-supplied properties in key-value form. +* `skewed_info` - (Optional) Information about values that appear very frequently in a column (skewed values). +* `stored_as_sub_directories` - (Optional) True if the table data is stored in subdirectories, or False if not. + +##### column + +* `name` - (Required) The name of the Column. +* `type` - (Optional) The datatype of data in the Column. +* `comment` - (Optional) Free-form text comment. + +##### ser_de_info + +* `name` - (Optional) Name of the SerDe. +* `parameters` - (Optional) A map of initialization parameters for the SerDe, in key-value form. +* `serialization_library` - (Optional) Usually the class that implements the SerDe. An example is: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe. + +##### sort_columns + +* `column` - (Required) The name of the column. +* `sort_order` - (Required) Indicates that the column is sorted in ascending order (== 1), or in descending order (==0). + +##### skewed_info + +* `skewed_column_names` - (Optional) A list of names of columns that contain skewed values. +* `skewed_column_value_location_maps` - (Optional) A list of values that appear so frequently as to be considered skewed. +* `skewed_column_values` - (Optional) A map of skewed values to the columns that contain them. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - partition id. +* `creation_time` - The time at which the partition was created. +* `last_analyzed_time` - The last time at which column statistics were computed for this partition. +* `last_accessed_time` - The last time at which the partition was accessed. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Partitions using the catalog ID (usually AWS account ID), database name, table name and partition values. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Partitions using the catalog ID (usually AWS account ID), database name, table name and partition values. For example: + +```console +% terraform import aws_glue_partition.part 123456789012:MyDatabase:MyTable:val1#val2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_partition_index.html.markdown b/website/docs/cdktf/python/r/glue_partition_index.html.markdown new file mode 100644 index 00000000000..0b349258017 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_partition_index.html.markdown @@ -0,0 +1,155 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_partition_index" +description: |- + Provides a Glue Partition Index. +--- + + + +# Resource: aws_glue_partition_index + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_catalog_database import GlueCatalogDatabase +from imports.aws.glue_catalog_table import GlueCatalogTable +from imports.aws.glue_partition_index import GluePartitionIndex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = GlueCatalogDatabase(self, "example", + name="example" + ) + aws_glue_catalog_table_example = GlueCatalogTable(self, "example_1", + database_name=example.name, + name="example", + owner="my_owner", + parameters={ + "param1": "param1_val" + }, + partition_keys=[GlueCatalogTablePartitionKeys( + comment="my_column_1_comment", + name="my_column_1", + type="int" + ), GlueCatalogTablePartitionKeys( + comment="my_column_2_comment", + name="my_column_2", + type="string" + ) + ], + retention=1, + storage_descriptor=GlueCatalogTableStorageDescriptor( + bucket_columns=["bucket_column_1"], + columns=[GlueCatalogTableStorageDescriptorColumns( + comment="my_column1_comment", + name="my_column_1", + type="int" + ), GlueCatalogTableStorageDescriptorColumns( + comment="my_column2_comment", + name="my_column_2", + type="string" + ) + ], + compressed=False, + input_format="SequenceFileInputFormat", + location="my_location", + number_of_buckets=1, + output_format="SequenceFileInputFormat", + parameters={ + "param1": "param1_val" + }, + ser_de_info=GlueCatalogTableStorageDescriptorSerDeInfo( + name="ser_de_name", + parameters={ + "param1": "param_val_1" + }, + serialization_library="org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe" + ), + skewed_info=GlueCatalogTableStorageDescriptorSkewedInfo( + skewed_column_names=["my_column_1"], + skewed_column_value_location_maps={ + "my_column_1": "my_column_1_val_loc_map" + }, + skewed_column_values=["skewed_val_1"] + ), + sort_columns=[GlueCatalogTableStorageDescriptorSortColumns( + column="my_column_1", + sort_order=1 + ) + ], + stored_as_sub_directories=False + ), + table_type="VIRTUAL_VIEW", + view_expanded_text="view_expanded_text_1", + view_original_text="view_original_text_1" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_glue_catalog_table_example.override_logical_id("example") + aws_glue_partition_index_example = GluePartitionIndex(self, "example_2", + database_name=example.name, + partition_index=GluePartitionIndexPartitionIndex( + index_name="example", + keys=["my_column_1", "my_column_2"] + ), + table_name=Token.as_string(aws_glue_catalog_table_example.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_glue_partition_index_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `table_name` - (Required) Name of the table. For Hive compatibility, this must be entirely lowercase. +* `database_name` - (Required) Name of the metadata database where the table metadata resides. For Hive compatibility, this must be all lowercase. +* `partition_index` - (Required) Configuration block for a partition index. See [`partition_index`](#partition_index) below. +* `catalog_id` - (Optional) The catalog ID where the table resides. + +### partition_index + +* `index_name` - (Required) Name of the partition index. +* `keys` - (Required) Keys for the partition index. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Catalog ID, Database name, table name, and index name. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) +* `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Partition Indexes using the catalog ID (usually AWS account ID), database name, table name, and index name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Partition Indexes using the catalog ID (usually AWS account ID), database name, table name, and index name. For example: + +```console +% terraform import aws_glue_partition_index.example 123456789012:MyDatabase:MyTable:index-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_registry.html.markdown b/website/docs/cdktf/python/r/glue_registry.html.markdown new file mode 100644 index 00000000000..ad6fb31d6a4 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_registry.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_registry" +description: |- + Provides a Glue Registry resource. +--- + + + +# Resource: aws_glue_registry + +Provides a Glue Registry resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_registry import GlueRegistry +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueRegistry(self, "example", + registry_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `registry_name` – (Required) The Name of the registry. +* `description` – (Optional) A description of the registry. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Glue Registry. +* `id` - Amazon Resource Name (ARN) of Glue Registry. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Registries using `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Registries using `arn`. For example: + +```console +% terraform import aws_glue_registry.example arn:aws:glue:us-west-2:123456789012:registry/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_resource_policy.html.markdown b/website/docs/cdktf/python/r/glue_resource_policy.html.markdown new file mode 100644 index 00000000000..3c074356ca7 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_resource_policy.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_resource_policy" +description: |- + Provides a resource to configure the aws glue resource policy. +--- + + + +# Resource: aws_glue_resource_policy + +Provides a Glue resource policy. Only one can exist per region. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_partition import DataAwsPartition +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.glue_resource_policy import GlueResourcePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + data_aws_partition_current = DataAwsPartition(self, "current_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_partition_current.override_logical_id("current") + data_aws_region_current = DataAwsRegion(self, "current_2") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + glue_example_policy = DataAwsIamPolicyDocument(self, "glue-example-policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["glue:CreateTable"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=["arn:${" + data_aws_partition_current.partition + "}:glue:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:*" + ] + ) + ] + ) + GlueResourcePolicy(self, "example", + policy=Token.as_string(glue_example_policy.json) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` – (Required) The policy to be applied to the aws glue data catalog. +* `enable_hybrid` - (Optional) Indicates that you are using both methods to grant cross-account. Valid values are `TRUE` and `FALSE`. Note the terraform will not perform drift detetction on this field as its not return on read. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Resource Policy using the account ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Resource Policy using the account ID. For example: + +```console +% terraform import aws_glue_resource_policy.Test 12356789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_schema.html.markdown b/website/docs/cdktf/python/r/glue_schema.html.markdown new file mode 100644 index 00000000000..f9c2b46a4ce --- /dev/null +++ b/website/docs/cdktf/python/r/glue_schema.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_schema" +description: |- + Provides a Glue Schema resource. +--- + + + +# Resource: aws_glue_schema + +Provides a Glue Schema resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_schema import GlueSchema +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueSchema(self, "example", + compatibility="NONE", + data_format="AVRO", + registry_arn=test.arn, + schema_definition="{\\\"type\\\": \\\"record\\\", \\\"name\\\": \\\"r1\\\", \\\"fields\\\": [ {\\\"name\\\": \\\"f1\\\", \\\"type\\\": \\\"int\\\"}, {\\\"name\\\": \\\"f2\\\", \\\"type\\\": \\\"string\\\"} ]}", + schema_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `schema_name` – (Required) The Name of the schema. +* `registry_arn` - (Required) The ARN of the Glue Registry to create the schema in. +* `data_format` - (Required) The data format of the schema definition. Valid values are `AVRO`, `JSON` and `PROTOBUF`. +* `compatibility` - (Required) The compatibility mode of the schema. Values values are: `NONE`, `DISABLED`, `BACKWARD`, `BACKWARD_ALL`, `FORWARD`, `FORWARD_ALL`, `FULL`, and `FULL_ALL`. +* `schema_definition` - (Required) The schema definition using the `data_format` setting for `schema_name`. +* `description` – (Optional) A description of the schema. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the schema. +* `id` - Amazon Resource Name (ARN) of the schema. +* `registry_name` - The name of the Glue Registry. +* `latest_schema_version` - The latest version of the schema associated with the returned schema definition. +* `next_schema_version` - The next version of the schema associated with the returned schema definition. +* `schema_checkpoint` - The version number of the checkpoint (the last time the compatibility mode was changed). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Registries using `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Registries using `arn`. For example: + +```console +% terraform import aws_glue_schema.example arn:aws:glue:us-west-2:123456789012:schema/example/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_security_configuration.html.markdown b/website/docs/cdktf/python/r/glue_security_configuration.html.markdown new file mode 100644 index 00000000000..c5d4fa98786 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_security_configuration.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_security_configuration" +description: |- + Manages a Glue Security Configuration +--- + + + +# Resource: aws_glue_security_configuration + +Manages a Glue Security Configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_security_configuration import GlueSecurityConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueSecurityConfiguration(self, "example", + encryption_configuration=GlueSecurityConfigurationEncryptionConfiguration( + cloudwatch_encryption=GlueSecurityConfigurationEncryptionConfigurationCloudwatchEncryption( + cloudwatch_encryption_mode="DISABLED" + ), + job_bookmarks_encryption=GlueSecurityConfigurationEncryptionConfigurationJobBookmarksEncryption( + job_bookmarks_encryption_mode="DISABLED" + ), + s3_encryption=GlueSecurityConfigurationEncryptionConfigurationS3Encryption( + kms_key_arn=Token.as_string(data_aws_kms_key_example.arn), + s3_encryption_mode="SSE-KMS" + ) + ), + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `encryption_configuration` – (Required) Configuration block containing encryption configuration. Detailed below. +* `name` – (Required) Name of the security configuration. + +### encryption_configuration Argument Reference + +* `cloudwatch_encryption ` - (Required) A `cloudwatch_encryption ` block as described below, which contains encryption configuration for CloudWatch. +* `job_bookmarks_encryption ` - (Required) A `job_bookmarks_encryption ` block as described below, which contains encryption configuration for job bookmarks. +* `s3_encryption` - (Required) A `s3_encryption ` block as described below, which contains encryption configuration for S3 data. + +#### cloudwatch_encryption Argument Reference + +* `cloudwatch_encryption_mode` - (Optional) Encryption mode to use for CloudWatch data. Valid values: `DISABLED`, `SSE-KMS`. Default value: `DISABLED`. +* `kms_key_arn` - (Optional) Amazon Resource Name (ARN) of the KMS key to be used to encrypt the data. + +#### job_bookmarks_encryption Argument Reference + +* `job_bookmarks_encryption_mode` - (Optional) Encryption mode to use for job bookmarks data. Valid values: `CSE-KMS`, `DISABLED`. Default value: `DISABLED`. +* `kms_key_arn` - (Optional) Amazon Resource Name (ARN) of the KMS key to be used to encrypt the data. + +#### s3_encryption Argument Reference + +* `s3_encryption_mode` - (Optional) Encryption mode to use for S3 data. Valid values: `DISABLED`, `SSE-KMS`, `SSE-S3`. Default value: `DISABLED`. +* `kms_key_arn` - (Optional) Amazon Resource Name (ARN) of the KMS key to be used to encrypt the data. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Glue security configuration name + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Security Configurations using `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Security Configurations using `name`. For example: + +```console +% terraform import aws_glue_security_configuration.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_trigger.html.markdown b/website/docs/cdktf/python/r/glue_trigger.html.markdown new file mode 100644 index 00000000000..98c02fb1724 --- /dev/null +++ b/website/docs/cdktf/python/r/glue_trigger.html.markdown @@ -0,0 +1,245 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_trigger" +description: |- + Manages a Glue Trigger resource. +--- + + + +# Resource: aws_glue_trigger + +Manages a Glue Trigger resource. + +## Example Usage + +### Conditional Trigger + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_trigger import GlueTrigger +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueTrigger(self, "example", + actions=[GlueTriggerActions( + job_name=example1.name + ) + ], + name="example", + predicate=GlueTriggerPredicate( + conditions=[GlueTriggerPredicateConditions( + job_name=example2.name, + state="SUCCEEDED" + ) + ] + ), + type="CONDITIONAL" + ) +``` + +### On-Demand Trigger + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_trigger import GlueTrigger +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueTrigger(self, "example", + actions=[GlueTriggerActions( + job_name=Token.as_string(aws_glue_job_example.name) + ) + ], + name="example", + type="ON_DEMAND" + ) +``` + +### Scheduled Trigger + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_trigger import GlueTrigger +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueTrigger(self, "example", + actions=[GlueTriggerActions( + job_name=Token.as_string(aws_glue_job_example.name) + ) + ], + name="example", + schedule="cron(15 12 * * ? *)", + type="SCHEDULED" + ) +``` + +### Conditional Trigger with Crawler Action + +**Note:** Triggers can have both a crawler action and a crawler condition, just no example provided. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_trigger import GlueTrigger +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueTrigger(self, "example", + actions=[GlueTriggerActions( + crawler_name=example1.name + ) + ], + name="example", + predicate=GlueTriggerPredicate( + conditions=[GlueTriggerPredicateConditions( + job_name=example2.name, + state="SUCCEEDED" + ) + ] + ), + type="CONDITIONAL" + ) +``` + +### Conditional Trigger with Crawler Condition + +**Note:** Triggers can have both a crawler action and a crawler condition, just no example provided. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_trigger import GlueTrigger +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueTrigger(self, "example", + actions=[GlueTriggerActions( + job_name=example1.name + ) + ], + name="example", + predicate=GlueTriggerPredicate( + conditions=[GlueTriggerPredicateConditions( + crawl_state="SUCCEEDED", + crawler_name=example2.name + ) + ] + ), + type="CONDITIONAL" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `actions` – (Required) List of actions initiated by this trigger when it fires. See [Actions](#actions) Below. +* `description` – (Optional) A description of the new trigger. +* `enabled` – (Optional) Start the trigger. Defaults to `true`. +* `name` – (Required) The name of the trigger. +* `predicate` – (Optional) A predicate to specify when the new trigger should fire. Required when trigger type is `CONDITIONAL`. See [Predicate](#predicate) Below. +* `schedule` – (Optional) A cron expression used to specify the schedule. [Time-Based Schedules for Jobs and Crawlers](https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html) +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `start_on_creation` – (Optional) Set to true to start `SCHEDULED` and `CONDITIONAL` triggers when created. True is not supported for `ON_DEMAND` triggers. +* `type` – (Required) The type of trigger. Valid values are `CONDITIONAL`, `EVENT`, `ON_DEMAND`, and `SCHEDULED`. +* `workflow_name` - (Optional) A workflow to which the trigger should be associated to. Every workflow graph (DAG) needs a starting trigger (`ON_DEMAND` or `SCHEDULED` type) and can contain multiple additional `CONDITIONAL` triggers. +* `event_batching_condition` - (Optional) Batch condition that must be met (specified number of events received or batch time window expired) before EventBridge event trigger fires. See [Event Batching Condition](#event-batching-condition). + +### Actions + +* `arguments` - (Optional) Arguments to be passed to the job. You can specify arguments here that your own job-execution script consumes, as well as arguments that AWS Glue itself consumes. +* `crawler_name` - (Optional) The name of the crawler to be executed. Conflicts with `job_name`. +* `job_name` - (Optional) The name of a job to be executed. Conflicts with `crawler_name`. +* `timeout` - (Optional) The job run timeout in minutes. It overrides the timeout value of the job. +* `security_configuration` - (Optional) The name of the Security Configuration structure to be used with this action. +* `notification_property` - (Optional) Specifies configuration properties of a job run notification. See [Notification Property](#notification-property) details below. + +#### Notification Property + +* `notify_delay_after` - (Optional) After a job run starts, the number of minutes to wait before sending a job run delay notification. + +### Predicate + +* `conditions` - (Required) A list of the conditions that determine when the trigger will fire. See [Conditions](#conditions). +* `logical` - (Optional) How to handle multiple conditions. Defaults to `AND`. Valid values are `AND` or `ANY`. + +#### Conditions + +* `job_name` - (Optional) The name of the job to watch. If this is specified, `state` must also be specified. Conflicts with `crawler_name`. +* `state` - (Optional) The condition job state. Currently, the values supported are `SUCCEEDED`, `STOPPED`, `TIMEOUT` and `FAILED`. If this is specified, `job_name` must also be specified. Conflicts with `crawler_state`. +* `crawler_name` - (Optional) The name of the crawler to watch. If this is specified, `crawl_state` must also be specified. Conflicts with `job_name`. +* `crawl_state` - (Optional) The condition crawl state. Currently, the values supported are `RUNNING`, `SUCCEEDED`, `CANCELLED`, and `FAILED`. If this is specified, `crawler_name` must also be specified. Conflicts with `state`. +* `logical_operator` - (Optional) A logical operator. Defaults to `EQUALS`. + +### Event Batching Condition + +* `batch_size` - (Required)Number of events that must be received from Amazon EventBridge before EventBridge event trigger fires. +* `batch_window` - (Optional) Window of time in seconds after which EventBridge event trigger fires. Window starts when first event is received. Default value is `900`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Glue Trigger +* `id` - Trigger name +* `state` - The current state of the trigger. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Triggers using `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Triggers using `name`. For example: + +```console +% terraform import aws_glue_trigger.MyTrigger MyTrigger +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_user_defined_function.html.markdown b/website/docs/cdktf/python/r/glue_user_defined_function.html.markdown new file mode 100644 index 00000000000..dc820ac173e --- /dev/null +++ b/website/docs/cdktf/python/r/glue_user_defined_function.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_user_defined_function" +description: |- + Provides a Glue User Defined Function. +--- + + + +# Resource: aws_glue_user_defined_function + +Provides a Glue User Defined Function Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_catalog_database import GlueCatalogDatabase +from imports.aws.glue_user_defined_function import GlueUserDefinedFunction +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = GlueCatalogDatabase(self, "example", + name="my_database" + ) + aws_glue_user_defined_function_example = GlueUserDefinedFunction(self, "example_1", + catalog_id=example.catalog_id, + class_name="class", + database_name=example.name, + name="my_func", + owner_name="owner", + owner_type="GROUP", + resource_uris=[GlueUserDefinedFunctionResourceUris( + resource_type="ARCHIVE", + uri="uri" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_glue_user_defined_function_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the function. +* `catalog_id` - (Optional) ID of the Glue Catalog to create the function in. If omitted, this defaults to the AWS Account ID. +* `database_name` - (Required) The name of the Database to create the Function. +* `class_name` - (Required) The Java class that contains the function code. +* `owner_name` - (Required) The owner of the function. +* `owner_type` - (Required) The owner type. can be one of `USER`, `ROLE`, and `GROUP`. +* `resource_uris` - (Optional) The configuration block for Resource URIs. See [resource uris](#resource-uris) below for more details. + +### Resource URIs + +* `resource_type` - (Required) The type of the resource. can be one of `JAR`, `FILE`, and `ARCHIVE`. +* `uri` - (Required) The URI for accessing the resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id`- The id of the Glue User Defined Function. +* `arn`- The ARN of the Glue User Defined Function. +* `create_time`- The time at which the function was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue User Defined Functions using the `catalog_id:database_name:function_name`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue User Defined Functions using the `catalog_id:database_name:function_name`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```console +% terraform import aws_glue_user_defined_function.func 123456789012:my_database:my_func +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/glue_workflow.html.markdown b/website/docs/cdktf/python/r/glue_workflow.html.markdown new file mode 100644 index 00000000000..0ce7fc27abe --- /dev/null +++ b/website/docs/cdktf/python/r/glue_workflow.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_workflow" +description: |- + Provides a Glue Workflow resource. +--- + + + +# Resource: aws_glue_workflow + +Provides a Glue Workflow resource. +The workflow graph (DAG) can be build using the `aws_glue_trigger` resource. +See the example below for creating a graph with four nodes (two triggers and two jobs). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_trigger import GlueTrigger +from imports.aws.glue_workflow import GlueWorkflow +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = GlueWorkflow(self, "example", + name="example" + ) + GlueTrigger(self, "example-inner", + actions=[GlueTriggerActions( + job_name="another-example-job" + ) + ], + name="trigger-inner", + predicate=GlueTriggerPredicate( + conditions=[GlueTriggerPredicateConditions( + job_name="example-job", + state="SUCCEEDED" + ) + ] + ), + type="CONDITIONAL", + workflow_name=example.name + ) + GlueTrigger(self, "example-start", + actions=[GlueTriggerActions( + job_name="example-job" + ) + ], + name="trigger-start", + type="ON_DEMAND", + workflow_name=example.name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` – (Required) The name you assign to this workflow. +* `default_run_properties` – (Optional) A map of default run properties for this workflow. These properties are passed to all jobs associated to the workflow. +* `description` – (Optional) Description of the workflow. +* `max_concurrent_runs` - (Optional) Prevents exceeding the maximum number of concurrent runs of any of the component jobs. If you leave this parameter blank, there is no limit to the number of concurrent workflow runs. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Glue Workflow +* `id` - Workflow name +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Workflows using `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Glue Workflows using `name`. For example: + +```console +% terraform import aws_glue_workflow.MyWorkflow MyWorkflow +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/grafana_license_association.html.markdown b/website/docs/cdktf/python/r/grafana_license_association.html.markdown new file mode 100644 index 00000000000..b90207a78c4 --- /dev/null +++ b/website/docs/cdktf/python/r/grafana_license_association.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_license_association" +description: |- + Provides an Amazon Managed Grafana workspace license association resource. +--- + + + +# Resource: aws_grafana_license_association + +Provides an Amazon Managed Grafana workspace license association resource. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.grafana_license_association import GrafanaLicenseAssociation +from imports.aws.grafana_workspace import GrafanaWorkspace +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume = IamRole(self, "assume", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "grafana.amazonaws.com" + }, + "Sid": "" + } + ], + "Version": "2012-10-17" + })), + name="grafana-assume" + ) + example = GrafanaWorkspace(self, "example", + account_access_type="CURRENT_ACCOUNT", + authentication_providers=["SAML"], + permission_type="SERVICE_MANAGED", + role_arn=assume.arn + ) + aws_grafana_license_association_example = GrafanaLicenseAssociation(self, "example_2", + license_type="ENTERPRISE_FREE_TRIAL", + workspace_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_grafana_license_association_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `license_type` - (Required) The type of license for the workspace license association. Valid values are `ENTERPRISE` and `ENTERPRISE_FREE_TRIAL`. +* `workspace_id` - (Required) The workspace id. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `free_trial_expiration` - If `license_type` is set to `ENTERPRISE_FREE_TRIAL`, this is the expiration date of the free trial. +* `license_expiration` - If `license_type` is set to `ENTERPRISE`, this is the expiration date of the enterprise license. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Grafana workspace license association using the workspace's `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Grafana workspace license association using the workspace's `id`. For example: + +```console +% terraform import aws_grafana_license_association.example g-2054c75a02 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/grafana_role_association.html.markdown b/website/docs/cdktf/python/r/grafana_role_association.html.markdown new file mode 100644 index 00000000000..222840eafca --- /dev/null +++ b/website/docs/cdktf/python/r/grafana_role_association.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_role_association" +description: |- + Provides an Amazon Managed Grafana workspace role association resource. +--- + + + +# Resource: aws_grafana_role_association + +Provides an Amazon Managed Grafana workspace role association resource. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.grafana_role_association import GrafanaRoleAssociation +from imports.aws.grafana_workspace import GrafanaWorkspace +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume = IamRole(self, "assume", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "grafana.amazonaws.com" + }, + "Sid": "" + } + ], + "Version": "2012-10-17" + })), + name="grafana-assume" + ) + example = GrafanaWorkspace(self, "example", + account_access_type="CURRENT_ACCOUNT", + authentication_providers=["SAML"], + permission_type="SERVICE_MANAGED", + role_arn=assume.arn + ) + aws_grafana_role_association_example = GrafanaRoleAssociation(self, "example_2", + role="ADMIN", + user_ids=["USER_ID_1", "USER_ID_2"], + workspace_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_grafana_role_association_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `role` - (Required) The grafana role. Valid values can be found [here](https://docs.aws.amazon.com/grafana/latest/APIReference/API_UpdateInstruction.html#ManagedGrafana-Type-UpdateInstruction-role). +* `workspace_id` - (Required) The workspace id. + +The following arguments are optional: + +* `group_ids` - (Optional) The AWS SSO group ids to be assigned the role given in `role`. +* `user_ids` - (Optional) The AWS SSO user ids to be assigned the role given in `role`. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/grafana_workspace.html.markdown b/website/docs/cdktf/python/r/grafana_workspace.html.markdown new file mode 100644 index 00000000000..f9501ce9ad1 --- /dev/null +++ b/website/docs/cdktf/python/r/grafana_workspace.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_workspace" +description: |- + Provides an Amazon Managed Grafana workspace resource. +--- + + + +# Resource: aws_grafana_workspace + +Provides an Amazon Managed Grafana workspace resource. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.grafana_workspace import GrafanaWorkspace +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume = IamRole(self, "assume", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "grafana.amazonaws.com" + }, + "Sid": "" + } + ], + "Version": "2012-10-17" + })), + name="grafana-assume" + ) + GrafanaWorkspace(self, "example", + account_access_type="CURRENT_ACCOUNT", + authentication_providers=["SAML"], + permission_type="SERVICE_MANAGED", + role_arn=assume.arn + ) +``` + +## Argument Reference + +The following arguments are required: + +* `account_access_type` - (Required) The type of account access for the workspace. Valid values are `CURRENT_ACCOUNT` and `ORGANIZATION`. If `ORGANIZATION` is specified, then `organizational_units` must also be present. +* `authentication_providers` - (Required) The authentication providers for the workspace. Valid values are `AWS_SSO`, `SAML`, or both. +* `permission_type` - (Required) The permission type of the workspace. If `SERVICE_MANAGED` is specified, the IAM roles and IAM policy attachments are generated automatically. If `CUSTOMER_MANAGED` is specified, the IAM roles and IAM policy attachments will not be created. + +The following arguments are optional: + +* `configuration` - (Optional) The configuration string for the workspace that you create. For more information about the format and configuration options available, see [Working in your Grafana workspace](https://docs.aws.amazon.com/grafana/latest/userguide/AMG-configure-workspace.html). +* `data_sources` - (Optional) The data sources for the workspace. Valid values are `AMAZON_OPENSEARCH_SERVICE`, `ATHENA`, `CLOUDWATCH`, `PROMETHEUS`, `REDSHIFT`, `SITEWISE`, `TIMESTREAM`, `XRAY` +* `description` - (Optional) The workspace description. +* `grafana_version` - (Optional) Specifies the version of Grafana to support in the new workspace. Supported values are `8.4` and `9.4`. If not specified, defaults to `8.4`. +* `name` - (Optional) The Grafana workspace name. +* `network_access_control` - (Optional) Configuration for network access to your workspace.See [Network Access Control](#network-access-control) below. +* `notification_destinations` - (Optional) The notification destinations. If a data source is specified here, Amazon Managed Grafana will create IAM roles and permissions needed to use these destinations. Must be set to `SNS`. +* `organization_role_name` - (Optional) The role name that the workspace uses to access resources through Amazon Organizations. +* `organizational_units` - (Optional) The Amazon Organizations organizational units that the workspace is authorized to use data sources from. +* `role_arn` - (Optional) The IAM role ARN that the workspace assumes. +* `stack_set_name` - (Optional) The AWS CloudFormation stack set name that provisions IAM roles to be used by the workspace. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_configuration` - (Optional) The configuration settings for an Amazon VPC that contains data sources for your Grafana workspace to connect to. See [VPC Configuration](#vpc-configuration) below. + +### Network Access Control + +* `prefix_list_ids` - (Required) - An array of prefix list IDs. +* `vpce_ids` - (Required) - An array of Amazon VPC endpoint IDs for the workspace. The only VPC endpoints that can be specified here are interface VPC endpoints for Grafana workspaces (using the com.amazonaws.[region].grafana-workspace service endpoint). Other VPC endpoints will be ignored. + +### VPC Configuration + +* `security_group_ids` - (Required) - The list of Amazon EC2 security group IDs attached to the Amazon VPC for your Grafana workspace to connect. +* `subnet_ids` - (Required) - The list of Amazon EC2 subnet IDs created in the Amazon VPC for your Grafana workspace to connect. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Grafana workspace. +* `endpoint` - The endpoint of the Grafana workspace. +* `grafana_version` - The version of Grafana running on the workspace. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Grafana Workspace using the workspace's `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Grafana Workspace using the workspace's `id`. For example: + +```console +% terraform import aws_grafana_workspace.example g-2054c75a02 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/grafana_workspace_api_key.html.markdown b/website/docs/cdktf/python/r/grafana_workspace_api_key.html.markdown new file mode 100644 index 00000000000..f2fd6c739ae --- /dev/null +++ b/website/docs/cdktf/python/r/grafana_workspace_api_key.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_workspace_api_key" +description: |- + Creates a Grafana API key for the workspace. This key can be used to authenticate requests sent to the workspace's HTTP API. +--- + + + +# Resource: aws_grafana_workspace_api_key + +Provides an Amazon Managed Grafana workspace API Key resource. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.grafana_workspace_api_key import GrafanaWorkspaceApiKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GrafanaWorkspaceApiKey(self, "key", + key_name="test-key", + key_role="VIEWER", + seconds_to_live=3600, + workspace_id=test.id + ) +``` + +## Argument Reference + +The following arguments are required: + +- `key_name` - (Required) Specifies the name of the API key. Key names must be unique to the workspace. +- `key_role` - (Required) Specifies the permission level of the API key. Valid values are `VIEWER`, `EDITOR`, or `ADMIN`. +- `seconds_to_live` - (Required) Specifies the time in seconds until the API key expires. Keys can be valid for up to 30 days. +- `workspace_id` - (Required) The ID of the workspace that the API key is valid for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `key` - The key token in JSON format. Use this value as a bearer token to authenticate HTTP requests to the workspace. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/grafana_workspace_saml_configuration.html.markdown b/website/docs/cdktf/python/r/grafana_workspace_saml_configuration.html.markdown new file mode 100644 index 00000000000..6b522a48bab --- /dev/null +++ b/website/docs/cdktf/python/r/grafana_workspace_saml_configuration.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_workspace_saml_configuration" +description: |- + Provides an Amazon Managed Grafana workspace SAML configuration resource. +--- + + + +# Resource: aws_grafana_workspace_saml_configuration + +Provides an Amazon Managed Grafana workspace SAML configuration resource. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.grafana_workspace import GrafanaWorkspace +from imports.aws.grafana_workspace_saml_configuration import GrafanaWorkspaceSamlConfiguration +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume = IamRole(self, "assume", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "grafana.amazonaws.com" + }, + "Sid": "" + } + ], + "Version": "2012-10-17" + })), + name="grafana-assume" + ) + example = GrafanaWorkspace(self, "example", + account_access_type="CURRENT_ACCOUNT", + authentication_providers=["SAML"], + permission_type="SERVICE_MANAGED", + role_arn=assume.arn + ) + aws_grafana_workspace_saml_configuration_example = + GrafanaWorkspaceSamlConfiguration(self, "example_2", + editor_role_values=["editor"], + idp_metadata_url="https://my_idp_metadata.url", + workspace_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_grafana_workspace_saml_configuration_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `editor_role_values` - (Required) The editor role values. +* `workspace_id` - (Required) The workspace id. + +The following arguments are optional: + +* `admin_role_values` - (Optional) The admin role values. +* `allowed_organizations` - (Optional) The allowed organizations. +* `email_assertion` - (Optional) The email assertion. +* `groups_assertion` - (Optional) The groups assertion. +* `idp_metadata_url` - (Optional) The IDP Metadata URL. Note that either `idp_metadata_url` or `idp_metadata_xml` (but not both) must be specified. +* `idp_metadata_xml` - (Optional) The IDP Metadata XML. Note that either `idp_metadata_url` or `idp_metadata_xml` (but not both) must be specified. +* `login_assertion` - (Optional) The login assertion. +* `login_validity_duration` - (Optional) The login validity duration. +* `name_assertion` - (Optional) The name assertion. +* `org_assertion` - (Optional) The org assertion. +* `role_assertion` - (Optional) The role assertion. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `status` - The status of the SAML configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Grafana Workspace SAML configuration using the workspace's `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Grafana Workspace SAML configuration using the workspace's `id`. For example: + +```console +% terraform import aws_grafana_workspace_saml_configuration.example g-2054c75a02 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/guardduty_detector.html.markdown b/website/docs/cdktf/python/r/guardduty_detector.html.markdown new file mode 100644 index 00000000000..e6150c71823 --- /dev/null +++ b/website/docs/cdktf/python/r/guardduty_detector.html.markdown @@ -0,0 +1,145 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_detector" +description: |- + Provides a resource to manage a GuardDuty detector +--- + + + +# Resource: aws_guardduty_detector + +Provides a resource to manage a GuardDuty detector. + +~> **NOTE:** Deleting this resource is equivalent to "disabling" GuardDuty for an AWS region, which removes all existing findings. You can set the `enable` attribute to `false` to instead "suspend" monitoring and feedback reporting while keeping existing data. See the [Suspending or Disabling Amazon GuardDuty documentation](https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_suspend-disable.html) for more information. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.guardduty_detector import GuarddutyDetector +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GuarddutyDetector(self, "MyDetector", + datasources=GuarddutyDetectorDatasources( + kubernetes=GuarddutyDetectorDatasourcesKubernetes( + audit_logs=GuarddutyDetectorDatasourcesKubernetesAuditLogs( + enable=False + ) + ), + malware_protection=GuarddutyDetectorDatasourcesMalwareProtection( + scan_ec2_instance_with_findings=GuarddutyDetectorDatasourcesMalwareProtectionScanEc2InstanceWithFindings( + ebs_volumes=GuarddutyDetectorDatasourcesMalwareProtectionScanEc2InstanceWithFindingsEbsVolumes( + enable=True + ) + ) + ), + s3_logs=GuarddutyDetectorDatasourcesS3Logs( + enable=True + ) + ), + enable=True + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `enable` - (Optional) Enable monitoring and feedback reporting. Setting to `false` is equivalent to "suspending" GuardDuty. Defaults to `true`. +* `finding_publishing_frequency` - (Optional) Specifies the frequency of notifications sent for subsequent finding occurrences. If the detector is a GuardDuty member account, the value is determined by the GuardDuty primary account and cannot be modified, otherwise defaults to `SIX_HOURS`. For standalone and GuardDuty primary accounts, it must be configured in Terraform to enable drift detection. Valid values for standalone and primary accounts: `FIFTEEN_MINUTES`, `ONE_HOUR`, `SIX_HOURS`. See [AWS Documentation](https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_findings_cloudwatch.html#guardduty_findings_cloudwatch_notification_frequency) for more information. +* `datasources` - (Optional) Describes which data sources will be enabled for the detector. See [Data Sources](#data-sources) below for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Data Sources + +The `datasources` block supports the following: + +* `s3_logs` - (Optional) Configures [S3 protection](https://docs.aws.amazon.com/guardduty/latest/ug/s3-protection.html). + See [S3 Logs](#s3-logs) below for more details. +* `kubernetes` - (Optional) Configures [Kubernetes protection](https://docs.aws.amazon.com/guardduty/latest/ug/kubernetes-protection.html). + See [Kubernetes](#kubernetes) and [Kubernetes Audit Logs](#kubernetes-audit-logs) below for more details. +* `malware_protection` - (Optional) Configures [Malware Protection](https://docs.aws.amazon.com/guardduty/latest/ug/malware-protection.html). + See [Malware Protection](#malware-protection), [Scan EC2 instance with findings](#scan-ec2-instance-with-findings) and [EBS volumes](#ebs-volumes) below for more details. + +### S3 Logs + +The `s3_logs` block supports the following: + +* `enable` - (Required) If true, enables [S3 protection](https://docs.aws.amazon.com/guardduty/latest/ug/s3-protection.html). + Defaults to `true`. + +### Kubernetes + +The `kubernetes` block supports the following: + +* `audit_logs` - (Required) Configures Kubernetes audit logs as a data source for [Kubernetes protection](https://docs.aws.amazon.com/guardduty/latest/ug/kubernetes-protection.html). + See [Kubernetes Audit Logs](#kubernetes-audit-logs) below for more details. + +### Kubernetes Audit Logs + +The `audit_logs` block supports the following: + +* `enable` - (Required) If true, enables Kubernetes audit logs as a data source for [Kubernetes protection](https://docs.aws.amazon.com/guardduty/latest/ug/kubernetes-protection.html). + Defaults to `true`. + +### Malware Protection + +`malware_protection` block supports the following: + +* `scan_ec2_instance_with_findings` - (Required) Configure whether [Malware Protection](https://docs.aws.amazon.com/guardduty/latest/ug/malware-protection.html) is enabled as data source for EC2 instances with findings for the detector. + See [Scan EC2 instance with findings](#scan-ec2-instance-with-findings) below for more details. + +#### Scan EC2 instance with findings + +The `scan_ec2_instance_with_findings` block supports the following: + +* `ebs_volumes` - (Required) Configure whether scanning EBS volumes is enabled as data source for the detector for instances with findings. + See [EBS volumes](#ebs-volumes) below for more details. + +#### EBS volumes + +The `ebs_volumes` block supports the following: + +* `enable` - (Required) If true, enables [Malware Protection](https://docs.aws.amazon.com/guardduty/latest/ug/malware-protection.html) as data source for the detector. + Defaults to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `account_id` - The AWS account ID of the GuardDuty detector +* `arn` - Amazon Resource Name (ARN) of the GuardDuty detector +* `id` - The ID of the GuardDuty detector +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty detectors using the detector ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GuardDuty detectors using the detector ID. For example: + +```console +% terraform import aws_guardduty_detector.MyDetector 00b00fd5aecc0ab60a708659477e9617 +``` + +The ID of the detector can be retrieved via the [AWS CLI](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/guardduty/list-detectors.html) using `aws guardduty list-detectors`. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/guardduty_filter.html.markdown b/website/docs/cdktf/python/r/guardduty_filter.html.markdown new file mode 100644 index 00000000000..d5433f85005 --- /dev/null +++ b/website/docs/cdktf/python/r/guardduty_filter.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_filter" +description: |- + Provides a resource to manage a GuardDuty filter +--- + + + +# Resource: aws_guardduty_filter + +Provides a resource to manage a GuardDuty filter. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.guardduty_filter import GuarddutyFilter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GuarddutyFilter(self, "MyFilter", + action="ARCHIVE", + detector_id=example.id, + finding_criteria=GuarddutyFilterFindingCriteria( + criterion=[GuarddutyFilterFindingCriteriaCriterion( + equal_to=["eu-west-1"], + field="region" + ), GuarddutyFilterFindingCriteriaCriterion( + field="service.additionalInfo.threatListName", + not_equals=["some-threat", "another-threat"] + ), GuarddutyFilterFindingCriteriaCriterion( + field="updatedAt", + greater_than="2020-01-01T00:00:00Z", + less_than="2020-02-01T00:00:00Z" + ), GuarddutyFilterFindingCriteriaCriterion( + field="severity", + greater_than_or_equal="4" + ) + ] + ), + name="MyFilter", + rank=1 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `detector_id` - (Required) ID of a GuardDuty detector, attached to your account. +* `name` - (Required) The name of your filter. +* `description` - (Optional) Description of the filter. +* `rank` - (Required) Specifies the position of the filter in the list of current filters. Also specifies the order in which this filter is applied to the findings. +* `action` - (Required) Specifies the action that is to be applied to the findings that match the filter. Can be one of `ARCHIVE` or `NOOP`. +* `tags` (Optional) - The tags that you want to add to the Filter resource. A tag consists of a key and a value. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `finding_criteria` (Required) - Represents the criteria to be used in the filter for querying findings. Contains one or more `criterion` blocks, documented [below](#criterion). + +### criterion + +The `criterion` block suports the following: + +* `field` - (Required) The name of the field to be evaluated. The full list of field names can be found in [AWS documentation](https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_filter-findings.html#filter_criteria). +* `equals` - (Optional) List of string values to be evaluated. +* `not_equals` - (Optional) List of string values to be evaluated. +* `greater_than` - (Optional) A value to be evaluated. Accepts either an integer or a date in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `greater_than_or_equal` - (Optional) A value to be evaluated. Accepts either an integer or a date in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `less_than` - (Optional) A value to be evaluated. Accepts either an integer or a date in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `less_than_or_equal` - (Optional) A value to be evaluated. Accepts either an integer or a date in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the GuardDuty filter. +* `id` - A compound field, consisting of the ID of the GuardDuty detector and the name of the filter. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty filters using the detector ID and filter's name separated by a colon. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GuardDuty filters using the detector ID and filter's name separated by a colon. For example: + +```console +% terraform import aws_guardduty_filter.MyFilter 00b00fd5aecc0ab60a708659477e9617:MyFilter +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/guardduty_invite_accepter.html.markdown b/website/docs/cdktf/python/r/guardduty_invite_accepter.html.markdown new file mode 100644 index 00000000000..19568a6732f --- /dev/null +++ b/website/docs/cdktf/python/r/guardduty_invite_accepter.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_invite_accepter" +description: |- + Provides a resource to accept a pending GuardDuty invite on creation, ensure the detector has the correct primary account on read, and disassociate with the primary account upon removal. +--- + + + +# Resource: aws_guardduty_invite_accepter + +Provides a resource to accept a pending GuardDuty invite on creation, ensure the detector has the correct primary account on read, and disassociate with the primary account upon removal. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.guardduty_detector import GuarddutyDetector +from imports.aws.guardduty_invite_accepter import GuarddutyInviteAccepter +from imports.aws.guardduty_member import GuarddutyMember +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = AwsProvider(self, "aws", + alias="primary" + ) + member = AwsProvider(self, "aws_1", + alias="member" + ) + aws_guardduty_detector_member = GuarddutyDetector(self, "member", + provider=member + ) + aws_guardduty_detector_primary = GuarddutyDetector(self, "primary", + provider=primary + ) + aws_guardduty_member_member = GuarddutyMember(self, "member_4", + account_id=Token.as_string(aws_guardduty_detector_member.account_id), + detector_id=Token.as_string(aws_guardduty_detector_primary.id), + email="required@example.com", + invite=True, + provider=primary + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_guardduty_member_member.override_logical_id("member") + aws_guardduty_invite_accepter_member = GuarddutyInviteAccepter(self, "member_5", + depends_on=[aws_guardduty_member_member], + detector_id=Token.as_string(aws_guardduty_detector_member.id), + master_account_id=Token.as_string(aws_guardduty_detector_primary.account_id), + provider=member + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_guardduty_invite_accepter_member.override_logical_id("member") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `detector_id` - (Required) The detector ID of the member GuardDuty account. +* `master_account_id` - (Required) AWS account ID for primary account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - GuardDuty member detector ID + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `1m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_guardduty_invite_accepter` using the member GuardDuty detector ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_guardduty_invite_accepter` using the member GuardDuty detector ID. For example: + +```console +% terraform import aws_guardduty_invite_accepter.member 00b00fd5aecc0ab60a708659477e9617 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/guardduty_ipset.html.markdown b/website/docs/cdktf/python/r/guardduty_ipset.html.markdown new file mode 100644 index 00000000000..8c1cbf9a52b --- /dev/null +++ b/website/docs/cdktf/python/r/guardduty_ipset.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "GuardDuty" +layout: aws +page_title: 'AWS: aws_guardduty_ipset' +description: Provides a resource to manage a GuardDuty IPSet +--- + + + +# Resource: aws_guardduty_ipset + +Provides a resource to manage a GuardDuty IPSet. + +~> **Note:** Currently in GuardDuty, users from member accounts cannot upload and further manage IPSets. IPSets that are uploaded by the primary account are imposed on GuardDuty functionality in its member accounts. See the [GuardDuty API Documentation](https://docs.aws.amazon.com/guardduty/latest/ug/create-ip-set.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.guardduty_detector import GuarddutyDetector +from imports.aws.guardduty_ipset import GuarddutyIpset +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = GuarddutyDetector(self, "primary", + enable=True + ) + bucket = S3Bucket(self, "bucket") + S3BucketAcl(self, "bucket_acl", + acl="private", + bucket=bucket.id + ) + my_ip_set = S3Object(self, "MyIPSet", + bucket=bucket.id, + content="10.0.0.0/8\n\n", + key="MyIPSet" + ) + GuarddutyIpset(self, "example", + activate=True, + detector_id=primary.id, + format="TXT", + location="https://s3.amazonaws.com/${" + my_ip_set.bucket + "}/${" + my_ip_set.key + "}", + name="MyIPSet" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `activate` - (Required) Specifies whether GuardDuty is to start using the uploaded IPSet. +* `detector_id` - (Required) The detector ID of the GuardDuty. +* `format` - (Required) The format of the file that contains the IPSet. Valid values: `TXT` | `STIX` | `OTX_CSV` | `ALIEN_VAULT` | `PROOF_POINT` | `FIRE_EYE` +* `location` - (Required) The URI of the file that contains the IPSet. +* `name` - (Required) The friendly name to identify the IPSet. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the GuardDuty IPSet. +* `id` - The ID of the GuardDuty IPSet. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty IPSet using the primary GuardDuty detector ID and IPSet ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GuardDuty IPSet using the primary GuardDuty detector ID and IPSet ID. For example: + +```console +% terraform import aws_guardduty_ipset.MyIPSet 00b00fd5aecc0ab60a708659477e9617:123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/guardduty_member.html.markdown b/website/docs/cdktf/python/r/guardduty_member.html.markdown new file mode 100644 index 00000000000..ebb8f3de57f --- /dev/null +++ b/website/docs/cdktf/python/r/guardduty_member.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_member" +description: |- + Provides a resource to manage a GuardDuty member +--- + + + +# Resource: aws_guardduty_member + +Provides a resource to manage a GuardDuty member. To accept invitations in member accounts, see the [`aws_guardduty_invite_accepter` resource](/docs/providers/aws/r/guardduty_invite_accepter.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.guardduty_detector import GuarddutyDetector +from imports.aws.guardduty_member import GuarddutyMember +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + member = GuarddutyDetector(self, "member", + enable=True, + provider=dev + ) + primary = GuarddutyDetector(self, "primary", + enable=True + ) + aws_guardduty_member_member = GuarddutyMember(self, "member_2", + account_id=member.account_id, + detector_id=primary.id, + email="required@example.com", + invitation_message="please accept guardduty invitation", + invite=True + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_guardduty_member_member.override_logical_id("member") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Required) AWS account ID for member account. +* `detector_id` - (Required) The detector ID of the GuardDuty account where you want to create member accounts. +* `email` - (Required) Email address for member account. +* `invite` - (Optional) Boolean whether to invite the account to GuardDuty as a member. Defaults to `false`. To detect if an invitation needs to be (re-)sent, the Terraform state value is `true` based on a `relationship_status` of `Disabled`, `Enabled`, `Invited`, or `EmailVerificationInProgress`. +* `invitation_message` - (Optional) Message for invitation. +* `disable_email_notification` - (Optional) Boolean whether an email notification is sent to the accounts. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the GuardDuty member +* `relationship_status` - The status of the relationship between the member account and its primary account. More information can be found in [Amazon GuardDuty API Reference](https://docs.aws.amazon.com/guardduty/latest/ug/get-members.html). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `1m`) +- `update` - (Default `1m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty members using the primary GuardDuty detector ID and member AWS account ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GuardDuty members using the primary GuardDuty detector ID and member AWS account ID. For example: + +```console +% terraform import aws_guardduty_member.MyMember 00b00fd5aecc0ab60a708659477e9617:123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/guardduty_organization_admin_account.html.markdown b/website/docs/cdktf/python/r/guardduty_organization_admin_account.html.markdown new file mode 100644 index 00000000000..a3ed1a8912f --- /dev/null +++ b/website/docs/cdktf/python/r/guardduty_organization_admin_account.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_organization_admin_account" +description: |- + Manages a GuardDuty Organization Admin Account +--- + + + +# Resource: aws_guardduty_organization_admin_account + +Manages a GuardDuty Organization Admin Account. The AWS account utilizing this resource must be an Organizations primary account. More information about Organizations support in GuardDuty can be found in the [GuardDuty User Guide](https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_organizations.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.guardduty_detector import GuarddutyDetector +from imports.aws.guardduty_organization_admin_account import GuarddutyOrganizationAdminAccount +from imports.aws.organizations_organization import OrganizationsOrganization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GuarddutyDetector(self, "example") + aws_organizations_organization_example = OrganizationsOrganization(self, "example_1", + aws_service_access_principals=["guardduty.amazonaws.com"], + feature_set="ALL" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_organizations_organization_example.override_logical_id("example") + aws_guardduty_organization_admin_account_example = + GuarddutyOrganizationAdminAccount(self, "example_2", + admin_account_id="123456789012", + depends_on=[aws_organizations_organization_example] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_guardduty_organization_admin_account_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `admin_account_id` - (Required) AWS account identifier to designate as a delegated administrator for GuardDuty. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS account identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty Organization Admin Account using the AWS account ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GuardDuty Organization Admin Account using the AWS account ID. For example: + +```console +% terraform import aws_guardduty_organization_admin_account.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/guardduty_organization_configuration.html.markdown b/website/docs/cdktf/python/r/guardduty_organization_configuration.html.markdown new file mode 100644 index 00000000000..0aefbe1c92b --- /dev/null +++ b/website/docs/cdktf/python/r/guardduty_organization_configuration.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_organization_configuration" +description: |- + Manages the GuardDuty Organization Configuration +--- + + + +# Resource: aws_guardduty_organization_configuration + +Manages the GuardDuty Organization Configuration in the current AWS Region. The AWS account utilizing this resource must have been assigned as a delegated Organization administrator account, e.g., via the [`aws_guardduty_organization_admin_account` resource](/docs/providers/aws/r/guardduty_organization_admin_account.html). More information about Organizations support in GuardDuty can be found in the [GuardDuty User Guide](https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_organizations.html). + +~> **NOTE:** This is an advanced Terraform resource. Terraform will automatically assume management of the GuardDuty Organization Configuration without import and perform no actions on removal from the Terraform configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.guardduty_detector import GuarddutyDetector +from imports.aws.guardduty_organization_configuration import GuarddutyOrganizationConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = GuarddutyDetector(self, "example", + enable=True + ) + aws_guardduty_organization_configuration_example = + GuarddutyOrganizationConfiguration(self, "example_1", + auto_enable_organization_members="ALL", + datasources=GuarddutyOrganizationConfigurationDatasources( + kubernetes=GuarddutyOrganizationConfigurationDatasourcesKubernetes( + audit_logs=GuarddutyOrganizationConfigurationDatasourcesKubernetesAuditLogs( + enable=True + ) + ), + malware_protection=GuarddutyOrganizationConfigurationDatasourcesMalwareProtection( + scan_ec2_instance_with_findings=GuarddutyOrganizationConfigurationDatasourcesMalwareProtectionScanEc2InstanceWithFindings( + ebs_volumes=GuarddutyOrganizationConfigurationDatasourcesMalwareProtectionScanEc2InstanceWithFindingsEbsVolumes( + auto_enable=True + ) + ) + ), + s3_logs=GuarddutyOrganizationConfigurationDatasourcesS3Logs( + auto_enable=True + ) + ), + detector_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_guardduty_organization_configuration_example.override_logical_id("example") +``` + +## Argument Reference + +~> **NOTE:** One of `auto_enable` or `auto_enable_organization_members` must be specified. + +This argument supports the following arguments: + +* `auto_enable` - (Optional) *Deprecated:* Use `auto_enable_organization_members` instead. When this setting is enabled, all new accounts that are created in, or added to, the organization are added as a member accounts of the organization’s GuardDuty delegated administrator and GuardDuty is enabled in that AWS Region. +* `auto_enable_organization_members` - (Optional) Indicates the auto-enablement configuration of GuardDuty for the member accounts in the organization. Valid values are `ALL`, `NEW`, `NONE`. +* `detector_id` - (Required) The detector ID of the GuardDuty account. +* `datasources` - (Optional) Configuration for the collected datasources. + +`datasources` supports the following: + +* `s3_logs` - (Optional) Enable S3 Protection automatically for new member accounts. +* `kubernetes` - (Optional) Enable Kubernetes Audit Logs Monitoring automatically for new member accounts. +* `malware_protection` - (Optional) Enable Malware Protection automatically for new member accounts. + +### S3 Logs + +`s3_logs` block supports the following: + +* `auto_enable` - (Optional) Set to `true` if you want S3 data event logs to be automatically enabled for new members of the organization. Default: `false` + +### Kubernetes + +`kubernetes` block supports the following: + +* `audit_logs` - (Required) Enable Kubernetes Audit Logs Monitoring automatically for new member accounts. [Kubernetes protection](https://docs.aws.amazon.com/guardduty/latest/ug/kubernetes-protection.html). + See [Kubernetes Audit Logs](#kubernetes-audit-logs) below for more details. + +#### Kubernetes Audit Logs + +The `audit_logs` block supports the following: + +* `enable` - (Required) If true, enables Kubernetes audit logs as a data source for [Kubernetes protection](https://docs.aws.amazon.com/guardduty/latest/ug/kubernetes-protection.html). + Defaults to `true`. + +### Malware Protection + +`malware_protection` block supports the following: + +* `scan_ec2_instance_with_findings` - (Required) Configure whether [Malware Protection](https://docs.aws.amazon.com/guardduty/latest/ug/malware-protection.html) for EC2 instances with findings should be auto-enabled for new members joining the organization. + See [Scan EC2 instance with findings](#scan-ec2-instance-with-findings) below for more details. + +#### Scan EC2 instance with findings + +The `scan_ec2_instance_with_findings` block supports the following: + +* `ebs_volumes` - (Required) Configure whether scanning EBS volumes should be auto-enabled for new members joining the organization + See [EBS volumes](#ebs-volumes) below for more details. + +#### EBS volumes + +The `ebs_volumes` block supports the following: + +* `auto_enable` - (Required) If true, enables [Malware Protection](https://docs.aws.amazon.com/guardduty/latest/ug/malware-protection.html) for all new accounts joining the organization. + Defaults to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the GuardDuty Detector. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty Organization Configurations using the GuardDuty Detector ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GuardDuty Organization Configurations using the GuardDuty Detector ID. For example: + +```console +% terraform import aws_guardduty_organization_configuration.example 00b00fd5aecc0ab60a708659477e9617 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/guardduty_publishing_destination.html.markdown b/website/docs/cdktf/python/r/guardduty_publishing_destination.html.markdown new file mode 100644 index 00000000000..f81ed581676 --- /dev/null +++ b/website/docs/cdktf/python/r/guardduty_publishing_destination.html.markdown @@ -0,0 +1,152 @@ +--- +subcategory: "GuardDuty" +layout: aws +page_title: 'AWS: aws_guardduty_publishing_destination' +description: Provides a resource to manage a GuardDuty PublishingDestination +--- + + + +# Resource: aws_guardduty_publishing_destination + +Provides a resource to manage a GuardDuty PublishingDestination. Requires an existing GuardDuty Detector. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.guardduty_detector import GuarddutyDetector +from imports.aws.guardduty_publishing_destination import GuarddutyPublishingDestination +from imports.aws.kms_key import KmsKey +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_policy import S3BucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_gd = GuarddutyDetector(self, "test_gd", + enable=True + ) + gd_bucket = S3Bucket(self, "gd_bucket", + bucket="example", + force_destroy=True + ) + S3BucketAcl(self, "gd_bucket_acl", + acl="private", + bucket=gd_bucket.id + ) + current = DataAwsCallerIdentity(self, "current") + bucket_pol = DataAwsIamPolicyDocument(self, "bucket_pol", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:PutObject"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["guardduty.amazonaws.com"], + type="Service" + ) + ], + resources=["${" + gd_bucket.arn + "}/*"], + sid="Allow PutObject" + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:GetBucketLocation"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["guardduty.amazonaws.com"], + type="Service" + ) + ], + resources=[gd_bucket.arn], + sid="Allow GetBucketLocation" + ) + ] + ) + data_aws_region_current = DataAwsRegion(self, "current_5") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + gd_bucket_policy = S3BucketPolicy(self, "gd_bucket_policy", + bucket=gd_bucket.id, + policy=Token.as_string(bucket_pol.json) + ) + kms_pol = DataAwsIamPolicyDocument(self, "kms_pol", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["kms:GenerateDataKey"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["guardduty.amazonaws.com"], + type="Service" + ) + ], + resources=["arn:aws:kms:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:key/*" + ], + sid="Allow GuardDuty to encrypt findings" + ), DataAwsIamPolicyDocumentStatement( + actions=["kms:*"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["arn:aws:iam::${" + current.account_id + "}:root"], + type="AWS" + ) + ], + resources=["arn:aws:kms:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:key/*" + ], + sid="Allow all users to modify/delete key (test only)" + ) + ] + ) + gd_key = KmsKey(self, "gd_key", + deletion_window_in_days=7, + description="Temporary key for AccTest of TF", + policy=Token.as_string(kms_pol.json) + ) + GuarddutyPublishingDestination(self, "test", + depends_on=[gd_bucket_policy], + destination_arn=gd_bucket.arn, + detector_id=test_gd.id, + kms_key_arn=gd_key.arn + ) +``` + +~> **Note:** Please do not use this simple example for Bucket-Policy and KMS Key Policy in a production environment. It is much too open for such a use-case. Refer to the AWS documentation here: https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_exportfindings.html + +## Argument Reference + +This resource supports the following arguments: + +* `detector_id` - (Required) The detector ID of the GuardDuty. +* `destination_arn` - (Required) The bucket arn and prefix under which the findings get exported. Bucket-ARN is required, the prefix is optional and will be `AWSLogs/[Account-ID]/GuardDuty/[Region]/` if not provided +* `kms_key_arn` - (Required) The ARN of the KMS key used to encrypt GuardDuty findings. GuardDuty enforces this to be encrypted. +* `destination_type`- (Optional) Currently there is only "S3" available as destination type which is also the default value + +~> **Note:** In case of missing permissions (S3 Bucket Policy _or_ KMS Key permissions) the resource will fail to create. If the permissions are changed after resource creation, this can be asked from the AWS API via the "DescribePublishingDestination" call (https://docs.aws.amazon.com/cli/latest/reference/guardduty/describe-publishing-destination.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the GuardDuty PublishingDestination and the detector ID. Format: `:` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty PublishingDestination using the master GuardDuty detector ID and PublishingDestinationID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GuardDuty PublishingDestination using the master GuardDuty detector ID and PublishingDestinationID. For example: + +```console +% terraform import aws_guardduty_publishing_destination.test a4b86f26fa42e7e7cf0d1c333ea77777:a4b86f27a0e464e4a7e0516d242f1234 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/guardduty_threatintelset.html.markdown b/website/docs/cdktf/python/r/guardduty_threatintelset.html.markdown new file mode 100644 index 00000000000..ae34313117c --- /dev/null +++ b/website/docs/cdktf/python/r/guardduty_threatintelset.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "GuardDuty" +layout: aws +page_title: 'AWS: aws_guardduty_threatintelset' +description: Provides a resource to manage a GuardDuty ThreatIntelSet +--- + + + +# Resource: aws_guardduty_threatintelset + +Provides a resource to manage a GuardDuty ThreatIntelSet. + +~> **Note:** Currently in GuardDuty, users from member accounts cannot upload and further manage ThreatIntelSets. ThreatIntelSets that are uploaded by the primary account are imposed on GuardDuty functionality in its member accounts. See the [GuardDuty API Documentation](https://docs.aws.amazon.com/guardduty/latest/ug/create-threat-intel-set.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.guardduty_detector import GuarddutyDetector +from imports.aws.guardduty_threatintelset import GuarddutyThreatintelset +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = GuarddutyDetector(self, "primary", + enable=True + ) + bucket = S3Bucket(self, "bucket") + S3BucketAcl(self, "bucket_acl", + acl="private", + bucket=bucket.id + ) + my_threat_intel_set = S3Object(self, "MyThreatIntelSet", + acl="public-read", + bucket=bucket.id, + content="10.0.0.0/8\n\n", + key="MyThreatIntelSet" + ) + aws_guardduty_threatintelset_my_threat_intel_set = + GuarddutyThreatintelset(self, "MyThreatIntelSet_4", + activate=True, + detector_id=primary.id, + format="TXT", + location="https://s3.amazonaws.com/${" + my_threat_intel_set.bucket + "}/${" + my_threat_intel_set.key + "}", + name="MyThreatIntelSet" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_guardduty_threatintelset_my_threat_intel_set.override_logical_id("MyThreatIntelSet") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `activate` - (Required) Specifies whether GuardDuty is to start using the uploaded ThreatIntelSet. +* `detector_id` - (Required) The detector ID of the GuardDuty. +* `format` - (Required) The format of the file that contains the ThreatIntelSet. Valid values: `TXT` | `STIX` | `OTX_CSV` | `ALIEN_VAULT` | `PROOF_POINT` | `FIRE_EYE` +* `location` - (Required) The URI of the file that contains the ThreatIntelSet. +* `name` - (Required) The friendly name to identify the ThreatIntelSet. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the GuardDuty ThreatIntelSet. +* `id` - The ID of the GuardDuty ThreatIntelSet and the detector ID. Format: `:` +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty ThreatIntelSet using the primary GuardDuty detector ID and ThreatIntelSetID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import GuardDuty ThreatIntelSet using the primary GuardDuty detector ID and ThreatIntelSetID. For example: + +```console +% terraform import aws_guardduty_threatintelset.MyThreatIntelSet 00b00fd5aecc0ab60a708659477e9617:123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_access_key.html.markdown b/website/docs/cdktf/python/r/iam_access_key.html.markdown new file mode 100644 index 00000000000..f1c7be9faf1 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_access_key.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_access_key" +description: |- + Provides an IAM access key. This is a set of credentials that allow API requests to be made as an IAM user. +--- + + + +# Resource: aws_iam_access_key + +Provides an IAM access key. This is a set of credentials that allow API requests to be made as an IAM user. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_access_key import IamAccessKey +from imports.aws.iam_user import IamUser +from imports.aws.iam_user_policy import IamUserPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + lb = IamUser(self, "lb", + name="loadbalancer", + path="/system/" + ) + lb_ro = DataAwsIamPolicyDocument(self, "lb_ro", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:Describe*"], + effect="Allow", + resources=["*"] + ) + ] + ) + aws_iam_access_key_lb = IamAccessKey(self, "lb_2", + pgp_key="keybase:some_person_that_exists", + user=lb.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_access_key_lb.override_logical_id("lb") + aws_iam_user_policy_lb_ro = IamUserPolicy(self, "lb_ro_3", + name="test", + policy=Token.as_string(lb_ro.json), + user=lb.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_user_policy_lb_ro.override_logical_id("lb_ro") + TerraformOutput(self, "secret", + value=aws_iam_access_key_lb.encrypted_secret + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_access_key import IamAccessKey +from imports.aws.iam_user import IamUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = IamUser(self, "test", + name="test", + path="/test/" + ) + aws_iam_access_key_test = IamAccessKey(self, "test_1", + user=test.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_access_key_test.override_logical_id("test") + TerraformOutput(self, "aws_iam_smtp_password_v4", + value=aws_iam_access_key_test.ses_smtp_password_v4 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `pgp_key` - (Optional) Either a base-64 encoded PGP public key, or a keybase username in the form `keybase:some_person_that_exists`, for use in the `encrypted_secret` output attribute. If providing a base-64 encoded PGP public key, make sure to provide the "raw" version and not the "armored" one (e.g. avoid passing the `-a` option to `gpg --export`). +* `status` - (Optional) Access key status to apply. Defaults to `Active`. Valid values are `Active` and `Inactive`. +* `user` - (Required) IAM user to associate with this access key. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `create_date` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the access key was created. +* `encrypted_secret` - Encrypted secret, base64 encoded, if `pgp_key` was specified. This attribute is not available for imported resources. The encrypted secret may be decrypted using the command line, for example: `terraform output -raw encrypted_secret | base64 --decode | keybase pgp decrypt`. +* `encrypted_ses_smtp_password_v4` - Encrypted SES SMTP password, base64 encoded, if `pgp_key` was specified. This attribute is not available for imported resources. The encrypted password may be decrypted using the command line, for example: `terraform output -raw encrypted_ses_smtp_password_v4 | base64 --decode | keybase pgp decrypt`. +* `id` - Access key ID. +* `key_fingerprint` - Fingerprint of the PGP key used to encrypt the secret. This attribute is not available for imported resources. +* `secret` - Secret access key. This attribute is not available for imported resources. Note that this will be written to the state file. If you use this, please protect your backend state file judiciously. Alternatively, you may supply a `pgp_key` instead, which will prevent the secret from being stored in plaintext, at the cost of preventing the use of the secret key in automation. +* `ses_smtp_password_v4` - Secret access key converted into an SES SMTP password by applying [AWS's documented Sigv4 conversion algorithm](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/smtp-credentials.html#smtp-credentials-convert). This attribute is not available for imported resources. As SigV4 is region specific, valid Provider regions are `ap-south-1`, `ap-southeast-2`, `eu-central-1`, `eu-west-1`, `us-east-1` and `us-west-2`. See current [AWS SES regions](https://docs.aws.amazon.com/general/latest/gr/rande.html#ses_region). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Access Keys using the identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Access Keys using the identifier. For example: + +```console +% terraform import aws_iam_access_key.example AKIA1234567890 +``` + +Resource attributes such as `encrypted_secret`, `key_fingerprint`, `pgp_key`, `secret`, `ses_smtp_password_v4`, and `encrypted_ses_smtp_password_v4` are not available for imported resources as this information cannot be read from the IAM API. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_account_alias.html.markdown b/website/docs/cdktf/python/r/iam_account_alias.html.markdown new file mode 100644 index 00000000000..78d7fc4f83c --- /dev/null +++ b/website/docs/cdktf/python/r/iam_account_alias.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_account_alias" +description: |- + Manages the account alias for the AWS Account. +--- + + + +# Resource: aws_iam_account_alias + +-> **Note:** There is only a single account alias per AWS account. + +Manages the account alias for the AWS Account. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_account_alias import IamAccountAlias +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamAccountAlias(self, "alias", + account_alias="my-account-alias" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_alias` - (Required) The account alias + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the current Account Alias using the `account_alias`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the current Account Alias using the `account_alias`. For example: + +```console +% terraform import aws_iam_account_alias.alias my-account-alias +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_account_password_policy.html.markdown b/website/docs/cdktf/python/r/iam_account_password_policy.html.markdown new file mode 100644 index 00000000000..2719a3c4404 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_account_password_policy.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_account_password_policy" +description: |- + Manages Password Policy for the AWS Account. +--- + + + +# Resource: aws_iam_account_password_policy + +-> **Note:** There is only a single policy allowed per AWS account. An existing policy will be lost when using this resource as an effect of this limitation. + +Manages Password Policy for the AWS Account. +See more about [Account Password Policy](http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html) +in the official AWS docs. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_account_password_policy import IamAccountPasswordPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamAccountPasswordPolicy(self, "strict", + allow_users_to_change_password=True, + minimum_password_length=8, + require_lowercase_characters=True, + require_numbers=True, + require_symbols=True, + require_uppercase_characters=True + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `allow_users_to_change_password` - (Optional) Whether to allow users to change their own password +* `hard_expiry` - (Optional) Whether users are prevented from setting a new password after their password has expired (i.e., require administrator reset) +* `max_password_age` - (Optional) The number of days that an user password is valid. +* `minimum_password_length` - (Optional) Minimum length to require for user passwords. +* `password_reuse_prevention` - (Optional) The number of previous passwords that users are prevented from reusing. +* `require_lowercase_characters` - (Optional) Whether to require lowercase characters for user passwords. +* `require_numbers` - (Optional) Whether to require numbers for user passwords. +* `require_symbols` - (Optional) Whether to require symbols for user passwords. +* `require_uppercase_characters` - (Optional) Whether to require uppercase characters for user passwords. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `expire_passwords` - Indicates whether passwords in the account expire. Returns `true` if `max_password_age` contains a value greater than `0`. Returns `false` if it is `0` or _not present_. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Account Password Policy using the word `iam-account-password-policy`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Account Password Policy using the word `iam-account-password-policy`. For example: + +```console +% terraform import aws_iam_account_password_policy.strict iam-account-password-policy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_group.html.markdown b/website/docs/cdktf/python/r/iam_group.html.markdown new file mode 100644 index 00000000000..94337b18ffc --- /dev/null +++ b/website/docs/cdktf/python/r/iam_group.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_group" +description: |- + Provides an IAM group. +--- + + + +# Resource: aws_iam_group + +Provides an IAM group. + +~> **NOTE on user management:** Using `aws_iam_group_membership` or `aws_iam_user_group_membership` resources in addition to manually managing user/group membership using the console may lead to configuration drift or conflicts. For this reason, it's recommended to either manage membership entirely with Terraform or entirely within the AWS console. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_group import IamGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamGroup(self, "developers", + name="developers", + path="/users/" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The group's name. The name must consist of upper and lowercase alphanumeric characters with no spaces. You can also include any of the following characters: `=,.@-_.`. Group names are not distinguished by case. For example, you cannot create groups named both "ADMINS" and "admins". +* `path` - (Optional, default "/") Path in which to create the group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The group's ID. +* `arn` - The ARN assigned by AWS for this group. +* `name` - The group's name. +* `path` - The path of the group in IAM. +* `unique_id` - The [unique ID][1] assigned by AWS. + + [1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html#GUIDs + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Groups using the `name`. For example: + +```console +% terraform import aws_iam_group.developers developers +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_group_membership.html.markdown b/website/docs/cdktf/python/r/iam_group_membership.html.markdown new file mode 100644 index 00000000000..9675517ec63 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_group_membership.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_group_membership" +description: |- + Provides a top level resource to manage IAM Group membership for IAM Users. +--- + + + +# Resource: aws_iam_group_membership + +~> **WARNING:** Multiple aws_iam_group_membership resources with the same group name will produce inconsistent behavior! + +Provides a top level resource to manage IAM Group membership for IAM Users. For +more information on managing IAM Groups or IAM Users, see [IAM Groups][1] or +[IAM Users][2] + +~> **Note:** `aws_iam_group_membership` will conflict with itself if used more than once with the same group. To non-exclusively manage the users in a group, see the +[`aws_iam_user_group_membership` resource][3]. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_group import IamGroup +from imports.aws.iam_group_membership import IamGroupMembership +from imports.aws.iam_user import IamUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + group = IamGroup(self, "group", + name="test-group" + ) + user_one = IamUser(self, "user_one", + name="test-user" + ) + user_two = IamUser(self, "user_two", + name="test-user-two" + ) + IamGroupMembership(self, "team", + group=group.name, + name="tf-testing-group-membership", + users=[user_one.name, user_two.name] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name to identify the Group Membership +* `users` - (Required) A list of IAM User names to associate with the Group +* `group` – (Required) The IAM Group name to attach the list of `users` to + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `name` - The name to identify the Group Membership +* `users` - list of IAM User names +* `group` – IAM Group name + +[1]: /docs/providers/aws/r/iam_group.html +[2]: /docs/providers/aws/r/iam_user.html +[3]: /docs/providers/aws/r/iam_user_group_membership.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_group_policy.html.markdown b/website/docs/cdktf/python/r/iam_group_policy.html.markdown new file mode 100644 index 00000000000..c54d0279b73 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_group_policy.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_group_policy" +description: |- + Provides an IAM policy attached to a group. +--- + + + +# Resource: aws_iam_group_policy + +Provides an IAM policy attached to a group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_group import IamGroup +from imports.aws.iam_group_policy import IamGroupPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + my_developers = IamGroup(self, "my_developers", + name="developers", + path="/users/" + ) + IamGroupPolicy(self, "my_developer_policy", + group=my_developers.name, + name="my_developer_policy", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["ec2:Describe*"], + "Effect": "Allow", + "Resource": "*" + } + ], + "Version": "2012-10-17" + })) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) +* `name` - (Optional) The name of the policy. If omitted, Terraform will +assign a random, unique name. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +* `group` - (Required) The IAM group to attach to the policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The group policy ID. +* `group` - The group to which this policy applies. +* `name` - The name of the policy. +* `policy` - The policy document attached to the group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Group Policies using the `group_name:group_policy_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Group Policies using the `group_name:group_policy_name`. For example: + +```console +% terraform import aws_iam_group_policy.mypolicy group_of_mypolicy_name:mypolicy_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_group_policy_attachment.markdown b/website/docs/cdktf/python/r/iam_group_policy_attachment.markdown new file mode 100644 index 00000000000..3be2a1d391f --- /dev/null +++ b/website/docs/cdktf/python/r/iam_group_policy_attachment.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_group_policy_attachment" +description: |- + Attaches a Managed IAM Policy to an IAM group +--- + + + +# Resource: aws_iam_group_policy_attachment + +Attaches a Managed IAM Policy to an IAM group + +~> **NOTE:** The usage of this resource conflicts with the `aws_iam_policy_attachment` resource and will permanently show a difference if both are defined. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_group import IamGroup +from imports.aws.iam_group_policy_attachment import IamGroupPolicyAttachment +from imports.aws.iam_policy import IamPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + group = IamGroup(self, "group", + name="test-group" + ) + policy = IamPolicy(self, "policy", + description="A test policy", + name="test-policy", + policy="{ ... policy JSON ... }" + ) + IamGroupPolicyAttachment(self, "test-attach", + group=group.name, + policy_arn=policy.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `group` (Required) - The group the policy should be applied to +* `policy_arn` (Required) - The ARN of the policy you want to apply + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM group policy attachments using the group name and policy arn separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM group policy attachments using the group name and policy arn separated by `/`. For example: + +```console +% terraform import aws_iam_group_policy_attachment.test-attach test-group/arn:aws:iam::xxxxxxxxxxxx:policy/test-policy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_instance_profile.html.markdown b/website/docs/cdktf/python/r/iam_instance_profile.html.markdown new file mode 100644 index 00000000000..996d5d5785e --- /dev/null +++ b/website/docs/cdktf/python/r/iam_instance_profile.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_instance_profile" +description: |- + Provides an IAM instance profile. +--- + + + +# Resource: aws_iam_instance_profile + +Provides an IAM instance profile. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_instance_profile import IamInstanceProfile +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["ec2.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + role = IamRole(self, "role", + assume_role_policy=Token.as_string(assume_role.json), + name="test_role", + path="/" + ) + IamInstanceProfile(self, "test_profile", + name="test_profile", + role=role.name + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `name` - (Optional, Forces new resource) Name of the instance profile. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. Can be a string of characters consisting of upper and lowercase alphanumeric characters and these special characters: `_`, `+`, `=`, `,`, `.`, `@`, `-`. Spaces are not allowed. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `path` - (Optional, default "/") Path to the instance profile. For more information about paths, see [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html) in the IAM User Guide. Can be a string of characters consisting of either a forward slash (`/`) by itself or a string that must begin and end with forward slashes. Can include any ASCII character from the ! (\u0021) through the DEL character (\u007F), including most punctuation characters, digits, and upper and lowercase letters. +* `role` - (Optional) Name of the role to add to the profile. +* `tags` - (Optional) Map of resource tags for the IAM Instance Profile. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN assigned by AWS to the instance profile. +* `create_date` - Creation timestamp of the instance profile. +* `id` - Instance profile's ID. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `unique_id` - [Unique ID][1] assigned by AWS. + + [1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html#GUIDs + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Instance Profiles using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Instance Profiles using the `name`. For example: + +```console +% terraform import aws_iam_instance_profile.test_profile app-instance-profile-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_openid_connect_provider.html.markdown b/website/docs/cdktf/python/r/iam_openid_connect_provider.html.markdown new file mode 100644 index 00000000000..4afaf1aa970 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_openid_connect_provider.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_openid_connect_provider" +description: |- + Provides an IAM OpenID Connect provider. +--- + + + +# Resource: aws_iam_openid_connect_provider + +Provides an IAM OpenID Connect provider. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_openid_connect_provider import IamOpenidConnectProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamOpenidConnectProvider(self, "default", + client_id_list=["266362248691-342342xasdasdasda-apps.googleusercontent.com" + ], + thumbprint_list=["cf23df2207d99a74fbe169e3eba035e633b65d94"], + url="https://accounts.google.com" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `url` - (Required) The URL of the identity provider. Corresponds to the _iss_ claim. +* `client_id_list` - (Required) A list of client IDs (also known as audiences). When a mobile or web app registers with an OpenID Connect provider, they establish a value that identifies the application. (This is the value that's sent as the client_id parameter on OAuth requests.) +* `thumbprint_list` - (Required) A list of server certificate thumbprints for the OpenID Connect (OIDC) identity provider's server certificate(s). +* `tags` - (Optional) Map of resource tags for the IAM OIDC provider. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS for this provider. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM OpenID Connect Providers using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM OpenID Connect Providers using the `arn`. For example: + +```console +% terraform import aws_iam_openid_connect_provider.default arn:aws:iam::123456789012:oidc-provider/accounts.google.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_policy.html.markdown b/website/docs/cdktf/python/r/iam_policy.html.markdown new file mode 100644 index 00000000000..d8db1948751 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_policy.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_policy" +description: |- + Provides an IAM policy. +--- + + + +# Resource: aws_iam_policy + +Provides an IAM policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_policy import IamPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamPolicy(self, "policy", + description="My test policy", + name="test_policy", + path="/", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["ec2:Describe*"], + "Effect": "Allow", + "Resource": "*" + } + ], + "Version": "2012-10-17" + })) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional, Forces new resource) Description of the IAM policy. +* `name` - (Optional, Forces new resource) The name of the policy. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `path` - (Optional, default "/") Path in which to create the policy. + See [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html) for more information. +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) +* `tags` - (Optional) Map of resource tags for the IAM Policy. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN assigned by AWS to this policy. +* `arn` - The ARN assigned by AWS to this policy. +* `description` - The description of the policy. +* `name` - The name of the policy. +* `path` - The path of the policy in IAM. +* `policy` - The policy document. +* `policy_id` - The policy's ID. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Policies using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Policies using the `arn`. For example: + +```console +% terraform import aws_iam_policy.administrator arn:aws:iam::123456789012:policy/UsersManageOwnCredentials +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_policy_attachment.html.markdown b/website/docs/cdktf/python/r/iam_policy_attachment.html.markdown new file mode 100644 index 00000000000..c16532e775b --- /dev/null +++ b/website/docs/cdktf/python/r/iam_policy_attachment.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_policy_attachment" +description: |- + Attaches a Managed IAM Policy to user(s), role(s), and/or group(s) +--- + + + +# Resource: aws_iam_policy_attachment + +Attaches a Managed IAM Policy to user(s), role(s), and/or group(s) + +!> **WARNING:** The aws_iam_policy_attachment resource creates **exclusive** attachments of IAM policies. Across the entire AWS account, all of the users/roles/groups to which a single policy is attached must be declared by a single aws_iam_policy_attachment resource. This means that even any users/roles/groups that have the attached policy via any other mechanism (including other Terraform resources) will have that attached policy revoked by this resource. Consider `aws_iam_role_policy_attachment`, `aws_iam_user_policy_attachment`, or `aws_iam_group_policy_attachment` instead. These resources do not enforce exclusive attachment of an IAM policy. + +~> **NOTE:** The usage of this resource conflicts with the `aws_iam_group_policy_attachment`, `aws_iam_role_policy_attachment`, and `aws_iam_user_policy_attachment` resources and will permanently show a difference if both are defined. + +~> **NOTE:** For a given role, this resource is incompatible with using the [`aws_iam_role` resource](/docs/providers/aws/r/iam_role.html) `managed_policy_arns` argument. When using that argument and this resource, both will attempt to manage the role's managed policy attachments and Terraform will show a permanent difference. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_group import IamGroup +from imports.aws.iam_policy import IamPolicy +from imports.aws.iam_policy_attachment import IamPolicyAttachment +from imports.aws.iam_role import IamRole +from imports.aws.iam_user import IamUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + group = IamGroup(self, "group", + name="test-group" + ) + user = IamUser(self, "user", + name="test-user" + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["ec2.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + policy = DataAwsIamPolicyDocument(self, "policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:Describe*"], + effect="Allow", + resources=["*"] + ) + ] + ) + aws_iam_policy_policy = IamPolicy(self, "policy_4", + description="A test policy", + name="test-policy", + policy=Token.as_string(policy.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_policy_policy.override_logical_id("policy") + role = IamRole(self, "role", + assume_role_policy=Token.as_string(assume_role.json), + name="test-role" + ) + IamPolicyAttachment(self, "test-attach", + groups=[group.name], + name="test-attachment", + policy_arn=Token.as_string(aws_iam_policy_policy.arn), + roles=[role.name], + users=[user.name] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` (Required) - The name of the attachment. This cannot be an empty string. +* `users` (Optional) - The user(s) the policy should be applied to +* `roles` (Optional) - The role(s) the policy should be applied to +* `groups` (Optional) - The group(s) the policy should be applied to +* `policy_arn` (Required) - The ARN of the policy you want to apply + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The policy's ID. +* `name` - The name of the attachment. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_role.html.markdown b/website/docs/cdktf/python/r/iam_role.html.markdown new file mode 100644 index 00000000000..e4ee674b598 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_role.html.markdown @@ -0,0 +1,292 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_role" +description: |- + Provides an IAM role. +--- + + + +# Resource: aws_iam_role + +Provides an IAM role. + +~> **NOTE:** If policies are attached to the role via the [`aws_iam_policy_attachment` resource](/docs/providers/aws/r/iam_policy_attachment.html) and you are modifying the role `name` or `path`, the `force_detach_policies` argument must be set to `true` and applied before attempting the operation otherwise you will encounter a `DeleteConflict` error. The [`aws_iam_role_policy_attachment` resource (recommended)](/docs/providers/aws/r/iam_role_policy_attachment.html) does not have this requirement. + +~> **NOTE:** If you use this resource's `managed_policy_arns` argument or `inline_policy` configuration blocks, this resource will take over exclusive management of the role's respective policy types (e.g., both policy types if both arguments are used). These arguments are incompatible with other ways of managing a role's policies, such as [`aws_iam_policy_attachment`](/docs/providers/aws/r/iam_policy_attachment.html), [`aws_iam_role_policy_attachment`](/docs/providers/aws/r/iam_role_policy_attachment.html), and [`aws_iam_role_policy`](/docs/providers/aws/r/iam_role_policy.html). If you attempt to manage a role's policies by multiple means, you will get resource cycling and/or errors. + +## Example Usage + +### Basic Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamRole(self, "test_role", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "ec2.amazonaws.com" + }, + "Sid": "" + } + ], + "Version": "2012-10-17" + })), + name="test_role", + tags={ + "tag-key": "tag-value" + } + ) +``` + +### Example of Using Data Source for Assume Role Policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + instance_assume_role_policy = DataAwsIamPolicyDocument(self, "instance_assume_role_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["ec2.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + IamRole(self, "instance", + assume_role_policy=Token.as_string(instance_assume_role_policy.json), + name="instance_role", + path="/system/" + ) +``` + +### Example of Exclusive Inline Policies + +This example creates an IAM role with two inline IAM policies. If someone adds another inline policy out-of-band, on the next apply, Terraform will remove that policy. If someone deletes these policies out-of-band, Terraform will recreate them. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + inline_policy = DataAwsIamPolicyDocument(self, "inline_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:DescribeAccountAttributes"], + resources=["*"] + ) + ] + ) + IamRole(self, "example", + assume_role_policy=Token.as_string(instance_assume_role_policy.json), + inline_policy=[IamRoleInlinePolicy( + name="my_inline_policy", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["ec2:Describe*"], + "Effect": "Allow", + "Resource": "*" + } + ], + "Version": "2012-10-17" + })) + ), IamRoleInlinePolicy( + name="policy-8675309", + policy=Token.as_string(inline_policy.json) + ) + ], + name="yak_role" + ) +``` + +### Example of Removing Inline Policies + +This example creates an IAM role with what appears to be empty IAM `inline_policy` argument instead of using `inline_policy` as a configuration block. The result is that if someone were to add an inline policy out-of-band, on the next apply, Terraform will remove that policy. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamRole(self, "example", + assume_role_policy=Token.as_string(instance_assume_role_policy.json), + inline_policy=[IamRoleInlinePolicy()], + name="yak_role" + ) +``` + +### Example of Exclusive Managed Policies + +This example creates an IAM role and attaches two managed IAM policies. If someone attaches another managed policy out-of-band, on the next apply, Terraform will detach that policy. If someone detaches these policies out-of-band, Terraform will attach them again. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_policy import IamPolicy +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + policy_one = IamPolicy(self, "policy_one", + name="policy-618033", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["ec2:Describe*"], + "Effect": "Allow", + "Resource": "*" + } + ], + "Version": "2012-10-17" + })) + ) + policy_two = IamPolicy(self, "policy_two", + name="policy-381966", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["s3:ListAllMyBuckets", "s3:ListBucket", "s3:HeadBucket"], + "Effect": "Allow", + "Resource": "*" + } + ], + "Version": "2012-10-17" + })) + ) + IamRole(self, "example", + assume_role_policy=Token.as_string(instance_assume_role_policy.json), + managed_policy_arns=[policy_one.arn, policy_two.arn], + name="yak_role" + ) +``` + +### Example of Removing Managed Policies + +This example creates an IAM role with an empty `managed_policy_arns` argument. If someone attaches a policy out-of-band, on the next apply, Terraform will detach that policy. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_role import IamRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamRole(self, "example", + assume_role_policy=Token.as_string(instance_assume_role_policy.json), + managed_policy_arns=[], + name="yak_role" + ) +``` + +## Argument Reference + +The following argument is required: + +* `assume_role_policy` - (Required) Policy that grants an entity permission to assume the role. + +~> **NOTE:** The `assume_role_policy` is very similar to but slightly different than a standard IAM policy and cannot use an `aws_iam_policy` resource. However, it _can_ use an `aws_iam_policy_document` [data source](/docs/providers/aws/d/iam_policy_document.html). See the example above of how this works. + +The following arguments are optional: + +* `description` - (Optional) Description of the role. +* `force_detach_policies` - (Optional) Whether to force detaching any policies the role has before destroying it. Defaults to `false`. +* `inline_policy` - (Optional) Configuration block defining an exclusive set of IAM inline policies associated with the IAM role. See below. If no blocks are configured, Terraform will not manage any inline policies in this resource. Configuring one empty block (i.e., `inline_policy {}`) will cause Terraform to remove _all_ inline policies added out of band on `apply`. +* `managed_policy_arns` - (Optional) Set of exclusive IAM managed policy ARNs to attach to the IAM role. If this attribute is not configured, Terraform will ignore policy attachments to this resource. When configured, Terraform will align the role's managed policy attachments with this set by attaching or detaching managed policies. Configuring an empty set (i.e., `managed_policy_arns = []`) will cause Terraform to remove _all_ managed policy attachments. +* `max_session_duration` - (Optional) Maximum session duration (in seconds) that you want to set for the specified role. If you do not specify a value for this setting, the default maximum of one hour is applied. This setting can have a value from 1 hour to 12 hours. +* `name` - (Optional, Forces new resource) Friendly name of the role. If omitted, Terraform will assign a random, unique name. See [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html) for more information. +* `name_prefix` - (Optional, Forces new resource) Creates a unique friendly name beginning with the specified prefix. Conflicts with `name`. +* `path` - (Optional) Path to the role. See [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html) for more information. +* `permissions_boundary` - (Optional) ARN of the policy that is used to set the permissions boundary for the role. +* `tags` - Key-value mapping of tags for the IAM role. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### inline_policy + +This configuration block supports the following: + +~> **NOTE:** Since one empty block (i.e., `inline_policy {}`) is valid syntactically to remove out of band policies on `apply`, `name` and `policy` are technically _optional_. However, they are both _required_ in order to manage actual inline policies. Not including one or the other may not result in Terraform errors but will result in unpredictable and incorrect behavior. + +* `name` - (Required) Name of the role policy. +* `policy` - (Required) Policy document as a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/tutorials/terraform/aws-iam-policy). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) specifying the role. +* `create_date` - Creation date of the IAM role. +* `id` - Name of the role. +* `name` - Name of the role. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `unique_id` - Stable and unique string identifying the role. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Roles using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Roles using the `name`. For example: + +```console +% terraform import aws_iam_role.developer developer_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_role_policy.html.markdown b/website/docs/cdktf/python/r/iam_role_policy.html.markdown new file mode 100644 index 00000000000..2d149ef1366 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_role_policy.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_role_policy" +description: |- + Provides an IAM role policy. +--- + + + +# Resource: aws_iam_role_policy + +Provides an IAM role inline policy. + +~> **NOTE:** For a given role, this resource is incompatible with using the [`aws_iam_role` resource](/docs/providers/aws/r/iam_role.html) `inline_policy` argument. When using that argument and this resource, both will attempt to manage the role's inline policies and Terraform will show a permanent difference. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_role = IamRole(self, "test_role", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "ec2.amazonaws.com" + }, + "Sid": "" + } + ], + "Version": "2012-10-17" + })), + name="test_role" + ) + IamRolePolicy(self, "test_policy", + name="test_policy", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["ec2:Describe*"], + "Effect": "Allow", + "Resource": "*" + } + ], + "Version": "2012-10-17" + })), + role=test_role.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the role policy. If omitted, Terraform will +assign a random, unique name. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +* `policy` - (Required) The inline policy document. This is a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) +* `role` - (Required) The name of the IAM role to attach to the policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The role policy ID, in the form of `role_name:role_policy_name`. +* `name` - The name of the policy. +* `policy` - The policy document attached to the role. +* `role` - The name of the role associated with the policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Role Policies using the `role_name:role_policy_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Role Policies using the `role_name:role_policy_name`. For example: + +```console +% terraform import aws_iam_role_policy.mypolicy role_of_mypolicy_name:mypolicy_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_role_policy_attachment.markdown b/website/docs/cdktf/python/r/iam_role_policy_attachment.markdown new file mode 100644 index 00000000000..072db080d26 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_role_policy_attachment.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_role_policy_attachment" +description: |- + Attaches a Managed IAM Policy to an IAM role +--- + + + +# Resource: aws_iam_role_policy_attachment + +Attaches a Managed IAM Policy to an IAM role + +~> **NOTE:** The usage of this resource conflicts with the `aws_iam_policy_attachment` resource and will permanently show a difference if both are defined. + +~> **NOTE:** For a given role, this resource is incompatible with using the [`aws_iam_role` resource](/docs/providers/aws/r/iam_role.html) `managed_policy_arns` argument. When using that argument and this resource, both will attempt to manage the role's managed policy attachments and Terraform will show a permanent difference. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_policy import IamPolicy +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["ec2.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + policy = DataAwsIamPolicyDocument(self, "policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:Describe*"], + effect="Allow", + resources=["*"] + ) + ] + ) + aws_iam_policy_policy = IamPolicy(self, "policy_2", + description="A test policy", + name="test-policy", + policy=Token.as_string(policy.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_policy_policy.override_logical_id("policy") + role = IamRole(self, "role", + assume_role_policy=Token.as_string(assume_role.json), + name="test-role" + ) + IamRolePolicyAttachment(self, "test-attach", + policy_arn=Token.as_string(aws_iam_policy_policy.arn), + role=role.name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `role` (Required) - The name of the IAM role to which the policy should be applied +* `policy_arn` (Required) - The ARN of the policy you want to apply + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM role policy attachments using the role name and policy arn separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM role policy attachments using the role name and policy arn separated by `/`. For example: + +```console +% terraform import aws_iam_role_policy_attachment.test-attach test-role/arn:aws:iam::xxxxxxxxxxxx:policy/test-policy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_saml_provider.html.markdown b/website/docs/cdktf/python/r/iam_saml_provider.html.markdown new file mode 100644 index 00000000000..184a6f39837 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_saml_provider.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_saml_provider" +description: |- + Provides an IAM SAML provider. +--- + + + +# Resource: aws_iam_saml_provider + +Provides an IAM SAML provider. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_saml_provider import IamSamlProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamSamlProvider(self, "default", + name="myprovider", + saml_metadata_document=Token.as_string(Fn.file("saml-metadata.xml")) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the provider to create. +* `saml_metadata_document` - (Required) An XML document generated by an identity provider that supports SAML 2.0. +* `tags` - (Optional) Map of resource tags for the IAM SAML provider. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS for this provider. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `valid_until` - The expiration date and time for the SAML provider in RFC1123 format, e.g., `Mon, 02 Jan 2006 15:04:05 MST`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM SAML Providers using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM SAML Providers using the `arn`. For example: + +```console +% terraform import aws_iam_saml_provider.default arn:aws:iam::123456789012:saml-provider/SAMLADFS +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_security_token_service_preferences.html.markdown b/website/docs/cdktf/python/r/iam_security_token_service_preferences.html.markdown new file mode 100644 index 00000000000..8ef4162e8ee --- /dev/null +++ b/website/docs/cdktf/python/r/iam_security_token_service_preferences.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_security_token_service_preferences" +description: |- + Provides an IAM Security Token Service Preferences resource. +--- + + + +# Resource: aws_iam_security_token_service_preferences + +Provides an IAM Security Token Service Preferences resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws. import IamSecurityTokenServicePreferences +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamSecurityTokenServicePreferences(self, "example", + global_endpoint_token_version="v2Token" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `global_endpoint_token_version` - (Required) The version of the STS global endpoint token. Valid values: `v1Token`, `v2Token`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS Account ID. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_server_certificate.html.markdown b/website/docs/cdktf/python/r/iam_server_certificate.html.markdown new file mode 100644 index 00000000000..81d3d2fe2d1 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_server_certificate.html.markdown @@ -0,0 +1,173 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_server_certificate" +description: |- + Provides an IAM Server Certificate +--- + + + +# Resource: aws_iam_server_certificate + +Provides an IAM Server Certificate resource to upload Server Certificates. +Certs uploaded to IAM can easily work with other AWS services such as: + +- AWS Elastic Beanstalk +- Elastic Load Balancing +- CloudFront +- AWS OpsWorks + +For information about server certificates in IAM, see [Managing Server +Certificates][2] in AWS Documentation. + +~> **Note:** All arguments including the private key will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +**Using certs on file:** + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_server_certificate import IamServerCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamServerCertificate(self, "test_cert", + certificate_body=Token.as_string(Fn.file("self-ca-cert.pem")), + name="some_test_cert", + private_key=Token.as_string(Fn.file("test-key.pem")) + ) +``` + +**Example with cert in-line:** + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_server_certificate import IamServerCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamServerCertificate(self, "test_cert_alt", + certificate_body="-----BEGIN CERTIFICATE-----\n[......] # cert contents\n-----END CERTIFICATE-----\n\n", + name="alt_test_cert", + private_key="-----BEGIN RSA PRIVATE KEY-----\n[......] # cert contents\n-----END RSA PRIVATE KEY-----\n\n" + ) +``` + +**Use in combination with an AWS ELB resource:** + +Some properties of an IAM Server Certificates cannot be updated while they are +in use. In order for Terraform to effectively manage a Certificate in this situation, it is +recommended you utilize the `name_prefix` attribute and enable the +`create_before_destroy` [lifecycle block][lifecycle]. This will allow Terraform +to create a new, updated `aws_iam_server_certificate` resource and replace it in +dependant resources before attempting to destroy the old version. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elb import Elb +from imports.aws.iam_server_certificate import IamServerCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_cert = IamServerCertificate(self, "test_cert", + certificate_body=Token.as_string(Fn.file("self-ca-cert.pem")), + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + name_prefix="example-cert", + private_key=Token.as_string(Fn.file("test-key.pem")) + ) + Elb(self, "ourapp", + availability_zones=["us-west-2a"], + cross_zone_load_balancing=True, + listener=[ElbListener( + instance_port=8000, + instance_protocol="http", + lb_port=443, + lb_protocol="https", + ssl_certificate_id=test_cert.arn + ) + ], + name="terraform-asg-deployment-example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the Server Certificate. Do not include the + path in this value. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +* `certificate_body` – (Required) The contents of the public key certificate in + PEM-encoded format. +* `certificate_chain` – (Optional) The contents of the certificate chain. + This is typically a concatenation of the PEM-encoded public key certificates + of the chain. +* `private_key` – (Required) The contents of the private key in PEM-encoded format. +* `path` - (Optional) The IAM path for the server certificate. If it is not + included, it defaults to a slash (/). If this certificate is for use with + AWS CloudFront, the path must be in format `/cloudfront/your_path_here`. + See [IAM Identifiers][1] for more details on IAM Paths. +* `tags` - (Optional) Map of resource tags for the server certificate. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +~> **NOTE:** AWS performs behind-the-scenes modifications to some certificate files if they do not adhere to a specific format. These modifications will result in terraform forever believing that it needs to update the resources since the local and AWS file contents will not match after theses modifications occur. In order to prevent this from happening you must ensure that all your PEM-encoded files use UNIX line-breaks and that `certificate_body` contains only one certificate. All other certificates should go in `certificate_chain`. It is common for some Certificate Authorities to issue certificate files that have DOS line-breaks and that are actually multiple certificates concatenated together in order to form a full certificate chain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the server certificate. +* `expiration` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) on which the certificate is set to expire. +* `id` - The unique Server Certificate name +* `name` - The name of the Server Certificate +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `upload_date` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) when the server certificate was uploaded. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Server Certificates using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Server Certificates using the `name`. For example: + +```console +% terraform import aws_iam_server_certificate.certificate example.com-certificate-until-2018 +``` + +[1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html +[2]: https://docs.aws.amazon.com/IAM/latest/UserGuide/ManagingServerCerts.html +[lifecycle]: /docs/configuration/resources.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_service_linked_role.html.markdown b/website/docs/cdktf/python/r/iam_service_linked_role.html.markdown new file mode 100644 index 00000000000..90510e3d151 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_service_linked_role.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_service_linked_role" +description: |- + Provides an IAM service-linked role. +--- + + + +# Resource: aws_iam_service_linked_role + +Provides an [IAM service-linked role](https://docs.aws.amazon.com/IAM/latest/UserGuide/using-service-linked-roles.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_service_linked_role import IamServiceLinkedRole +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamServiceLinkedRole(self, "elasticbeanstalk", + aws_service_name="elasticbeanstalk.amazonaws.com" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `aws_service_name` - (Required, Forces new resource) The AWS service to which this role is attached. You use a string similar to a URL but without the `http://` in front. For example: `elasticbeanstalk.amazonaws.com`. To find the full list of services that support service-linked roles, check [the docs](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_aws-services-that-work-with-iam.html). +* `custom_suffix` - (Optional, forces new resource) Additional string appended to the role name. Not all AWS services support custom suffixes. +* `description` - (Optional) The description of the role. +* `tags` - Key-value mapping of tags for the IAM role. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the role. +* `arn` - The Amazon Resource Name (ARN) specifying the role. +* `create_date` - The creation date of the IAM role. +* `name` - The name of the role. +* `path` - The path of the role. +* `unique_id` - The stable and unique string identifying the role. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM service-linked roles using role ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM service-linked roles using role ARN. For example: + +```console +% terraform import aws_iam_service_linked_role.elasticbeanstalk arn:aws:iam::123456789012:role/aws-service-role/elasticbeanstalk.amazonaws.com/AWSServiceRoleForElasticBeanstalk +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_service_specific_credential.html.markdown b/website/docs/cdktf/python/r/iam_service_specific_credential.html.markdown new file mode 100644 index 00000000000..7d35cf3e440 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_service_specific_credential.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_service_specific_credential" +description: |- + Provides an IAM Service Specific Credential. +--- + + + +# Resource: aws_iam_service_specific_credential + +Provides an IAM Service Specific Credential. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_service_specific_credential import IamServiceSpecificCredential +from imports.aws.iam_user import IamUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = IamUser(self, "example", + name="example" + ) + aws_iam_service_specific_credential_example = + IamServiceSpecificCredential(self, "example_1", + service_name="codecommit.amazonaws.com", + user_name=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_service_specific_credential_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `service_name` - (Required) The name of the AWS service that is to be associated with the credentials. The service you specify here is the only service that can be accessed using these credentials. +* `user_name` - (Required) The name of the IAM user that is to be associated with the credentials. The new service-specific credentials have the same permissions as the associated user except that they can be used only to access the specified service. +* `status` - (Optional) The status to be assigned to the service-specific credential. Valid values are `Active` and `Inactive`. Default value is `Active`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The combination of `service_name` and `user_name` as such: `service_name:user_name:service_specific_credential_id`. +* `service_password` - The generated password for the service-specific credential. +* `service_user_name` - The generated user name for the service-specific credential. This value is generated by combining the IAM user's name combined with the ID number of the AWS account, as in `jane-at-123456789012`, for example. +* `service_specific_credential_id` - The unique identifier for the service-specific credential. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Service Specific Credentials using the `service_name:user_name:service_specific_credential_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Service Specific Credentials using the `service_name:user_name:service_specific_credential_id`. For example: + +```console +% terraform import aws_iam_service_specific_credential.default `codecommit.amazonaws.com:example:some-id` +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_signing_certificate.html.markdown b/website/docs/cdktf/python/r/iam_signing_certificate.html.markdown new file mode 100644 index 00000000000..ed4a8a2e7f4 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_signing_certificate.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_signing_certificate" +description: |- + Provides an IAM Signing Certificate +--- + + + +# Resource: aws_iam_signing_certificate + +Provides an IAM Signing Certificate resource to upload Signing Certificates. + +~> **Note:** All arguments including the certificate body will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +**Using certs on file:** + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_signing_certificate import IamSigningCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, userName): + super().__init__(scope, name) + IamSigningCertificate(self, "test_cert", + certificate_body=Token.as_string(Fn.file("self-ca-cert.pem")), + username="some_test_cert", + user_name=user_name + ) +``` + +**Example with cert in-line:** + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_signing_certificate import IamSigningCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, userName): + super().__init__(scope, name) + IamSigningCertificate(self, "test_cert_alt", + certificate_body="-----BEGIN CERTIFICATE-----\n[......] # cert contents\n-----END CERTIFICATE-----\n\n", + username="some_test_cert", + user_name=user_name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate_body` – (Required) The contents of the signing certificate in PEM-encoded format. +* `status` – (Optional) The status you want to assign to the certificate. `Active` means that the certificate can be used for programmatic calls to Amazon Web Services `Inactive` means that the certificate cannot be used. +* `user_name` – (Required) The name of the user the signing certificate is for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `certificate_id` - The ID for the signing certificate. +* `id` - The `certificate_id:user_name` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Signing Certificates using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Signing Certificates using the `id`. For example: + +```console +% terraform import aws_iam_signing_certificate.certificate IDIDIDIDID:user-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_user.html.markdown b/website/docs/cdktf/python/r/iam_user.html.markdown new file mode 100644 index 00000000000..46401a1a97c --- /dev/null +++ b/website/docs/cdktf/python/r/iam_user.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user" +description: |- + Provides an IAM user. +--- + + + +# Resource: aws_iam_user + +Provides an IAM user. + +~> *NOTE:* If policies are attached to the user via the [`aws_iam_policy_attachment` resource](/docs/providers/aws/r/iam_policy_attachment.html) and you are modifying the user `name` or `path`, the `force_destroy` argument must be set to `true` and applied before attempting the operation otherwise you will encounter a `DeleteConflict` error. The [`aws_iam_user_policy_attachment` resource (recommended)](/docs/providers/aws/r/iam_user_policy_attachment.html) does not have this requirement. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_access_key import IamAccessKey +from imports.aws.iam_user import IamUser +from imports.aws.iam_user_policy import IamUserPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + lb = IamUser(self, "lb", + name="loadbalancer", + path="/system/", + tags={ + "tag-key": "tag-value" + } + ) + lb_ro = DataAwsIamPolicyDocument(self, "lb_ro", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ec2:Describe*"], + effect="Allow", + resources=["*"] + ) + ] + ) + aws_iam_access_key_lb = IamAccessKey(self, "lb_2", + user=lb.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_access_key_lb.override_logical_id("lb") + aws_iam_user_policy_lb_ro = IamUserPolicy(self, "lb_ro_3", + name="test", + policy=Token.as_string(lb_ro.json), + user=lb.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_user_policy_lb_ro.override_logical_id("lb_ro") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The user's name. The name must consist of upper and lowercase alphanumeric characters with no spaces. You can also include any of the following characters: `=,.@-_.`. User names are not distinguished by case. For example, you cannot create users named both "TESTUSER" and "testuser". +* `path` - (Optional, default "/") Path in which to create the user. +* `permissions_boundary` - (Optional) The ARN of the policy that is used to set the permissions boundary for the user. +* `force_destroy` - (Optional, default false) When destroying this user, destroy even if it + has non-Terraform-managed IAM access keys, login profile or MFA devices. Without `force_destroy` + a user with non-Terraform-managed access keys and login profile will fail to be destroyed. +* `tags` - Key-value map of tags for the IAM user. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS for this user. +* `name` - The user's name. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `unique_id` - The [unique ID][1] assigned by AWS. + + [1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html#GUIDs + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Users using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Users using the `name`. For example: + +```console +% terraform import aws_iam_user.lb loadbalancer +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_user_group_membership.html.markdown b/website/docs/cdktf/python/r/iam_user_group_membership.html.markdown new file mode 100644 index 00000000000..fd5ed1d5efb --- /dev/null +++ b/website/docs/cdktf/python/r/iam_user_group_membership.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_group_membership" +description: |- + Provides a resource for adding an IAM User to IAM Groups without conflicting + with itself. +--- + + + +# Resource: aws_iam_user_group_membership + +Provides a resource for adding an [IAM User][2] to [IAM Groups][1]. This +resource can be used multiple times with the same user for non-overlapping +groups. + +To exclusively manage the users in a group, see the +[`aws_iam_group_membership` resource][3]. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_group import IamGroup +from imports.aws.iam_user import IamUser +from imports.aws.iam_user_group_membership import IamUserGroupMembership +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + group1 = IamGroup(self, "group1", + name="group1" + ) + group2 = IamGroup(self, "group2", + name="group2" + ) + group3 = IamGroup(self, "group3", + name="group3" + ) + user1 = IamUser(self, "user1", + name="user1" + ) + IamUserGroupMembership(self, "example1", + groups=[group1.name, group2.name], + user=user1.name + ) + IamUserGroupMembership(self, "example2", + groups=[group3.name], + user=user1.name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `user` - (Required) The name of the [IAM User][2] to add to groups +* `groups` - (Required) A list of [IAM Groups][1] to add the user to + +## Attribute Reference + +This resource exports no additional attributes. + +[1]: /docs/providers/aws/r/iam_group.html +[2]: /docs/providers/aws/r/iam_user.html +[3]: /docs/providers/aws/r/iam_group_membership.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM user group membership using the user name and group names separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM user group membership using the user name and group names separated by `/`. For example: + +```console +% terraform import aws_iam_user_group_membership.example1 user1/group1/group2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_user_login_profile.html.markdown b/website/docs/cdktf/python/r/iam_user_login_profile.html.markdown new file mode 100644 index 00000000000..0c084185a1f --- /dev/null +++ b/website/docs/cdktf/python/r/iam_user_login_profile.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_login_profile" +description: |- + Manages an IAM User Login Profile +--- + + + +# Resource: aws_iam_user_login_profile + +Manages an IAM User Login Profile with limited support for password creation during Terraform resource creation. Uses PGP to encrypt the password for safe transport to the user. PGP keys can be obtained from Keybase. + +-> To reset an IAM User login password via Terraform, you can use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html) or change any of the arguments. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_user import IamUser +from imports.aws.iam_user_login_profile import IamUserLoginProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = IamUser(self, "example", + force_destroy=True, + name="example", + path="/" + ) + aws_iam_user_login_profile_example = IamUserLoginProfile(self, "example_1", + pgp_key="keybase:some_person_that_exists", + user=example.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_user_login_profile_example.override_logical_id("example") + TerraformOutput(self, "password", + value=aws_iam_user_login_profile_example.encrypted_password + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `user` - (Required) The IAM user's name. +* `pgp_key` - (Optional) Either a base-64 encoded PGP public key, or a keybase username in the form `keybase:username`. Only applies on resource creation. Drift detection is not possible with this argument. +* `password_length` - (Optional) The length of the generated password on resource creation. Only applies on resource creation. Drift detection is not possible with this argument. Default value is `20`. +* `password_reset_required` - (Optional) Whether the user should be forced to reset the generated password on resource creation. Only applies on resource creation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `password` - The plain text password, only available when `pgp_key` is not provided. +* `key_fingerprint` - The fingerprint of the PGP key used to encrypt the password. Only available if password was handled on Terraform resource creation, not import. +* `encrypted_password` - The encrypted password, base64 encoded. Only available if password was handled on Terraform resource creation, not import. + +~> **NOTE:** The encrypted password may be decrypted using the command line, + for example: `terraform output password | base64 --decode | keybase pgp decrypt`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM User Login Profiles without password information via the IAM User name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM User Login Profiles without password information via the IAM User name. For example: + +```console +% terraform import aws_iam_user_login_profile.example myusername +``` + +Since Terraform has no method to read the PGP or password information during import, use the [Terraform resource `lifecycle` configuration block `ignore_changes` argument](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to ignore them (unless you want to recreate a password). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_user_login_profile import IamUserLoginProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, user): + super().__init__(scope, name) + IamUserLoginProfile(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[password_length, password_reset_required, pgp_key] + ), + user=user + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_user_policy.html.markdown b/website/docs/cdktf/python/r/iam_user_policy.html.markdown new file mode 100644 index 00000000000..5e987f41ab6 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_user_policy.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_policy" +description: |- + Provides an IAM policy attached to a user. +--- + + + +# Resource: aws_iam_user_policy + +Provides an IAM policy attached to a user. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_access_key import IamAccessKey +from imports.aws.iam_user import IamUser +from imports.aws.iam_user_policy import IamUserPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + lb = IamUser(self, "lb", + name="loadbalancer", + path="/system/" + ) + IamUserPolicy(self, "lb_ro", + name="test", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["ec2:Describe*"], + "Effect": "Allow", + "Resource": "*" + } + ], + "Version": "2012-10-17" + })), + user=lb.name + ) + aws_iam_access_key_lb = IamAccessKey(self, "lb_2", + user=lb.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_access_key_lb.override_logical_id("lb") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `name` - (Optional) The name of the policy. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `user` - (Required) IAM user to which to attach this policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The user policy ID, in the form of `user_name:user_policy_name`. +* `name` - The name of the policy (always set). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM User Policies using the `user_name:user_policy_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM User Policies using the `user_name:user_policy_name`. For example: + +```console +% terraform import aws_iam_user_policy.mypolicy user_of_mypolicy_name:mypolicy_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_user_policy_attachment.markdown b/website/docs/cdktf/python/r/iam_user_policy_attachment.markdown new file mode 100644 index 00000000000..8bd48d363e7 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_user_policy_attachment.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_policy_attachment" +description: |- + Attaches a Managed IAM Policy to an IAM user +--- + + + +# Resource: aws_iam_user_policy_attachment + +Attaches a Managed IAM Policy to an IAM user + +~> **NOTE:** The usage of this resource conflicts with the `aws_iam_policy_attachment` resource and will permanently show a difference if both are defined. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_policy import IamPolicy +from imports.aws.iam_user import IamUser +from imports.aws.iam_user_policy_attachment import IamUserPolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + policy = IamPolicy(self, "policy", + description="A test policy", + name="test-policy", + policy="{ ... policy JSON ... }" + ) + user = IamUser(self, "user", + name="test-user" + ) + IamUserPolicyAttachment(self, "test-attach", + policy_arn=policy.arn, + user=user.name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `user` (Required) - The user the policy should be applied to +* `policy_arn` (Required) - The ARN of the policy you want to apply + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM user policy attachments using the user name and policy arn separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM user policy attachments using the user name and policy arn separated by `/`. For example: + +```console +% terraform import aws_iam_user_policy_attachment.test-attach test-user/arn:aws:iam::xxxxxxxxxxxx:policy/test-policy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_user_ssh_key.html.markdown b/website/docs/cdktf/python/r/iam_user_ssh_key.html.markdown new file mode 100644 index 00000000000..5afd2df5011 --- /dev/null +++ b/website/docs/cdktf/python/r/iam_user_ssh_key.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_ssh_key" +description: |- + Uploads an SSH public key and associates it with the specified IAM user. +--- + + + +# Resource: aws_iam_user_ssh_key + +Uploads an SSH public key and associates it with the specified IAM user. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_user import IamUser +from imports.aws.iam_user_ssh_key import IamUserSshKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + user = IamUser(self, "user", + name="test-user", + path="/" + ) + aws_iam_user_ssh_key_user = IamUserSshKey(self, "user_1", + encoding="SSH", + public_key="ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD3F6tyPEFEzV0LX3X8BsXdMsQz1x2cEikKDEY0aIj41qgxMCP/iteneqXSIFZBp5vizPvaoIR3Um9xK7PGoW8giupGn+EPuxIA4cDM4vzOqOkiMPhz5XK0whEjkVzTo4+S0puvDZuwIsdiW9mxhJc7tgBNL0cYlWSYVkz4G/fslNfRPW5mYAM49f4fhtxPb5ok4Q2Lg9dPKVHO/Bgeu5woMc7RY0p1ej6D4CKFE6lymSDJpW0YHX/wqE9+cfEauh7xZcG0q9t2ta6F6fmX0agvpFyZo8aFbXeUBr7osSCJNgvavWbM/06niWrOvYX2xwWdhXmXSrbX8ZbabVohBK41 mytest@mydomain.com", + username=user.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_user_ssh_key_user.override_logical_id("user") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `username` - (Required) The name of the IAM user to associate the SSH public key with. +* `encoding` - (Required) Specifies the public key encoding format to use in the response. To retrieve the public key in ssh-rsa format, use `SSH`. To retrieve the public key in PEM format, use `PEM`. +* `public_key` - (Required) The SSH public key. The public key must be encoded in ssh-rsa format or PEM format. +* `status` - (Optional) The status to assign to the SSH public key. Active means the key can be used for authentication with an AWS CodeCommit repository. Inactive means the key cannot be used. Default is `active`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `ssh_public_key_id` - The unique identifier for the SSH public key. +* `fingerprint` - The MD5 message digest of the SSH public key. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSH public keys using the `username`, `ssh_public_key_id`, and `encoding`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSH public keys using the `username`, `ssh_public_key_id`, and `encoding`. For example: + +```console +% terraform import aws_iam_user_ssh_key.user user:APKAJNCNNJICVN7CFKCA:SSH +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iam_virtual_mfa_device.html.markdown b/website/docs/cdktf/python/r/iam_virtual_mfa_device.html.markdown new file mode 100644 index 00000000000..59af71e8f6a --- /dev/null +++ b/website/docs/cdktf/python/r/iam_virtual_mfa_device.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_virtual_mfa_device" +description: |- + Provides an IAM Virtual MFA Device +--- + + + +# Resource: aws_iam_virtual_mfa_device + +Provides an IAM Virtual MFA Device. + +~> **Note:** All attributes will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **Note:** A virtual MFA device cannot be directly associated with an IAM User from Terraform. + To associate the virtual MFA device with a user and enable it, use the code returned in either `base_32_string_seed` or `qr_code_png` to generate TOTP authentication codes. + The authentication codes can then be used with the AWS CLI command [`aws iam enable-mfa-device`](https://docs.aws.amazon.com/cli/latest/reference/iam/enable-mfa-device.html) or the AWS API call [`EnableMFADevice`](https://docs.aws.amazon.com/IAM/latest/APIReference/API_EnableMFADevice.html). + +## Example Usage + +**Using certs on file:** + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_virtual_mfa_device import IamVirtualMfaDevice +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IamVirtualMfaDevice(self, "example", + virtual_mfa_device_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `virtual_mfa_device_name` - (Required) The name of the virtual MFA device. Use with path to uniquely identify a virtual MFA device. +* `path` – (Optional) The path for the virtual MFA device. +* `tags` - (Optional) Map of resource tags for the virtual mfa device. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the virtual mfa device. +* `base_32_string_seed` - The base32 seed defined as specified in [RFC3548](https://tools.ietf.org/html/rfc3548.txt). The `base_32_string_seed` is base64-encoded. +* `enable_date` - The date and time when the virtual MFA device was enabled. +* `qr_code_png` - A QR code PNG image that encodes `otpauth://totp/$virtualMFADeviceName@$AccountName?secret=$Base32String` where `$virtualMFADeviceName` is one of the create call arguments. AccountName is the user name if set (otherwise, the account ID), and Base32String is the seed in base32 format. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `user_name` - The associated IAM User name if the virtual MFA device is enabled. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Virtual MFA Devices using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IAM Virtual MFA Devices using the `arn`. For example: + +```console +% terraform import aws_iam_virtual_mfa_device.example arn:aws:iam::123456789012:mfa/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/identitystore_group.html.markdown b/website/docs/cdktf/python/r/identitystore_group.html.markdown new file mode 100644 index 00000000000..62384b6028d --- /dev/null +++ b/website/docs/cdktf/python/r/identitystore_group.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "SSO Identity Store" +layout: "aws" +page_title: "AWS: aws_identitystore_group" +description: |- + Terraform resource for managing an AWS IdentityStore Group. +--- + + + +# Resource: aws_identitystore_group + +Terraform resource for managing an AWS IdentityStore Group. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.identitystore_group import IdentitystoreGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IdentitystoreGroup(self, "this", + description="Example description", + display_name="Example group", + identity_store_id=Token.as_string( + property_access(Fn.tolist(example.identity_store_ids), ["0"])) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `identity_store_id` - (Required) The globally unique identifier for the identity store. + +The following arguments are optional: + +* `display_name` - (Optional) A string containing the name of the group. This value is commonly displayed when the group is referenced. +* `description` - (Optional) A string containing the description of the group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `group_id` - The identifier of the newly created group in the identity store. +* `external_ids` - A list of external IDs that contains the identifiers issued to this resource by an external identity provider. See [External IDs](#external-ids) below. + +### External IDs + +* `id` - The identifier issued to this resource by an external identity provider. +* `issuer` - The issuer for an external identifier. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `update` - (Default `180m`) +* `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an Identity Store Group using the combination `identity_store_id/group_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an Identity Store Group using the combination `identity_store_id/group_id`. For example: + +```console +% terraform import aws_identitystore_group.example d-9c6705e95c/b8a1c340-8031-7071-a2fb-7dc540320c30 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/identitystore_group_membership.html.markdown b/website/docs/cdktf/python/r/identitystore_group_membership.html.markdown new file mode 100644 index 00000000000..79bd8e93d2b --- /dev/null +++ b/website/docs/cdktf/python/r/identitystore_group_membership.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "SSO Identity Store" +layout: "aws" +page_title: "AWS: aws_identitystore_group_membership" +description: |- + Terraform resource for managing an AWS IdentityStore Group Membership. +--- + + + +# Resource: aws_identitystore_group_membership + +Terraform resource for managing an AWS IdentityStore Group Membership. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssoadmin_instances import DataAwsSsoadminInstances +from imports.aws.identitystore_group import IdentitystoreGroup +from imports.aws.identitystore_group_membership import IdentitystoreGroupMembership +from imports.aws.identitystore_user import IdentitystoreUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsSsoadminInstances(self, "example") + aws_identitystore_group_example = IdentitystoreGroup(self, "example_1", + description="Some group name", + display_name="MyGroup", + identity_store_id=Token.as_string( + property_access(Fn.tolist(example.identity_store_ids), ["0"])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_identitystore_group_example.override_logical_id("example") + aws_identitystore_user_example = IdentitystoreUser(self, "example_2", + display_name="John Doe", + identity_store_id=Token.as_string( + property_access(Fn.tolist(example.identity_store_ids), ["0"])), + name=IdentitystoreUserName( + family_name="Doe", + given_name="John" + ), + user_name="john.doe@example.com" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_identitystore_user_example.override_logical_id("example") + aws_identitystore_group_membership_example = + IdentitystoreGroupMembership(self, "example_3", + group_id=Token.as_string(aws_identitystore_group_example.group_id), + identity_store_id=Token.as_string( + property_access(Fn.tolist(example.identity_store_ids), ["0"])), + member_id=Token.as_string(aws_identitystore_user_example.user_id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_identitystore_group_membership_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `member_id` - (Required) The identifier for a user in the Identity Store. +* `group_id` - (Required) The identifier for a group in the Identity Store. +* `identity_store_id` - (Required) Identity Store ID associated with the Single Sign-On Instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `membership_id` - The identifier of the newly created group membership in the Identity Store. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_identitystore_group_membership` using the `identity_store_id/membership_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_identitystore_group_membership` using the `identity_store_id/membership_id`. For example: + +```console +% terraform import aws_identitystore_group_membership.example d-0000000000/00000000-0000-0000-0000-000000000000 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/identitystore_user.html.markdown b/website/docs/cdktf/python/r/identitystore_user.html.markdown new file mode 100644 index 00000000000..c64db7a5758 --- /dev/null +++ b/website/docs/cdktf/python/r/identitystore_user.html.markdown @@ -0,0 +1,141 @@ +--- +subcategory: "SSO Identity Store" +layout: "aws" +page_title: "AWS: aws_identitystore_user" +description: |- + Terraform resource for managing an AWS Identity Store User. +--- + + + +# Resource: aws_identitystore_user + +This resource manages a User resource within an Identity Store. + +-> **Note:** If you use an external identity provider or Active Directory as your identity source, +use this resource with caution. IAM Identity Center does not support outbound synchronization, +so your identity source does not automatically update with the changes that you make to +users using this resource. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.identitystore_user import IdentitystoreUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IdentitystoreUser(self, "example", + display_name="John Doe", + emails=IdentitystoreUserEmails( + value="john@example.com" + ), + identity_store_id=Token.as_string( + property_access( + Fn.tolist(data_aws_ssoadmin_instances_example.identity_store_ids), ["0"])), + name=IdentitystoreUserName( + family_name="Doe", + given_name="John" + ), + user_name="johndoe" + ) +``` + +## Argument Reference + +-> Unless specified otherwise, all fields can contain up to 1024 characters of free-form text. + +The following arguments are required: + +* `display_name` - (Required) The name that is typically displayed when the user is referenced. +* `identity_store_id` - (Required, Forces new resource) The globally unique identifier for the identity store that this user is in. +* `name` - (Required) Details about the user's full name. Detailed below. +* `user_name` - (Required, Forces new resource) A unique string used to identify the user. This value can consist of letters, accented characters, symbols, numbers, and punctuation. This value is specified at the time the user is created and stored as an attribute of the user object in the identity store. The limit is 128 characters. + +The following arguments are optional: + +* `addresses` - (Optional) Details about the user's address. At most 1 address is allowed. Detailed below. +* `emails` - (Optional) Details about the user's email. At most 1 email is allowed. Detailed below. +* `locale` - (Optional) The user's geographical region or location. +* `nickname` - (Optional) An alternate name for the user. +* `phone_numbers` - (Optional) Details about the user's phone number. At most 1 phone number is allowed. Detailed below. +* `preferred_language` - (Optional) The preferred language of the user. +* `profile_url` - (Optional) An URL that may be associated with the user. +* `timezone` - (Optional) The user's time zone. +* `title` - (Optional) The user's title. +* `user_type` - (Optional) The user type. + +### addresses Configuration Block + +* `country` - (Optional) The country that this address is in. +* `formatted` - (Optional) The name that is typically displayed when the address is shown for display. +* `locality` - (Optional) The address locality. +* `postal_code` - (Optional) The postal code of the address. +* `primary` - (Optional) When `true`, this is the primary address associated with the user. +* `region` - (Optional) The region of the address. +* `street_address` - (Optional) The street of the address. +* `type` - (Optional) The type of address. + +### emails Configuration Block + +* `primary` - (Optional) When `true`, this is the primary email associated with the user. +* `type` - (Optional) The type of email. +* `value` - (Optional) The email address. This value must be unique across the identity store. + +### name Configuration Block + +The following arguments are required: + +* `family_name` - (Required) The family name of the user. +* `given_name` - (Required) The given name of the user. + +The following arguments are optional: + +* `formatted` - (Optional) The name that is typically displayed when the name is shown for display. +* `honorific_prefix` - (Optional) The honorific prefix of the user. +* `honorific_suffix` - (Optional) The honorific suffix of the user. +* `middle_name` - (Optional) The middle name of the user. + +### phone_numbers Configuration Block + +* `primary` - (Optional) When `true`, this is the primary phone number associated with the user. +* `type` - (Optional) The type of phone number. +* `value` - (Optional) The user's phone number. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `external_ids` - A list of identifiers issued to this resource by an external identity provider. + * `id` - The identifier issued to this resource by an external identity provider. + * `issuer` - The issuer for an external identifier. +* `user_id` - The identifier for this user in the identity store. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an Identity Store User using the combination `identity_store_id/user_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an Identity Store User using the combination `identity_store_id/user_id`. For example: + +```console +% terraform import aws_identitystore_user.example d-9c6705e95c/065212b4-9061-703b-5876-13a517ae2a7c +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/imagebuilder_component.html.markdown b/website/docs/cdktf/python/r/imagebuilder_component.html.markdown new file mode 100644 index 00000000000..ccd19eb7e7e --- /dev/null +++ b/website/docs/cdktf/python/r/imagebuilder_component.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_component" +description: |- + Manage an Image Builder Component +--- + + + +# Resource: aws_imagebuilder_component + +Manages an Image Builder Component. + +## Example Usage + +### Inline Data Document + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.imagebuilder_component import ImagebuilderComponent +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ImagebuilderComponent(self, "example", + data=Token.as_string( + Fn.yamlencode({ + "phases": [{ + "name": "build", + "steps": [{ + "action": "ExecuteBash", + "inputs": { + "commands": ["echo 'hello world'"] + }, + "name": "example", + "on_failure": "Continue" + } + ] + } + ], + "schema_version": 1 + })), + name="example", + platform="Linux", + version="1.0.0" + ) +``` + +### URI Document + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.imagebuilder_component import ImagebuilderComponent +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ImagebuilderComponent(self, "example", + name="example", + platform="Linux", + uri="s3://${" + aws_s3_object_example.bucket + "}/${" + aws_s3_object_example.key + "}", + version="1.0.0" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the component. +* `platform` - (Required) Platform of the component. +* `version` - (Required) Version of the component. + +The following attributes are optional: + +* `change_description` - (Optional) Change description of the component. +* `data` - (Optional) Inline YAML string with data of the component. Exactly one of `data` and `uri` can be specified. Terraform will only perform drift detection of its value when present in a configuration. +* `description` - (Optional) Description of the component. +* `kms_key_id` - (Optional) Amazon Resource Name (ARN) of the Key Management Service (KMS) Key used to encrypt the component. +* `skip_destroy` - (Optional) Whether to retain the old version when the resource is destroyed or replacement is necessary. Defaults to `false`. +* `supported_os_versions` - (Optional) Set of Operating Systems (OS) supported by the component. +* `tags` - (Optional) Key-value map of resource tags for the component. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `uri` - (Optional) S3 URI with data of the component. Exactly one of `data` and `uri` can be specified. + +~> **NOTE:** Updating `data` or `uri` requires specifying a new `version`. This causes replacement of the resource. The `skip_destroy` argument can be used to retain the old version. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - (Required) Amazon Resource Name (ARN) of the component. +* `date_created` - Date the component was created. +* `encrypted` - Encryption status of the component. +* `owner` - Owner of the component. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `type` - Type of the component. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_imagebuilder_components` resources using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_imagebuilder_components` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_component.example arn:aws:imagebuilder:us-east-1:123456789012:component/example/1.0.0/1 +``` + +Certain resource arguments, such as `uri`, cannot be read via the API and imported into Terraform. Terraform will display a difference for these arguments the first run after import if declared in the Terraform configuration for an imported resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/imagebuilder_container_recipe.html.markdown b/website/docs/cdktf/python/r/imagebuilder_container_recipe.html.markdown new file mode 100644 index 00000000000..6142af4e7d6 --- /dev/null +++ b/website/docs/cdktf/python/r/imagebuilder_container_recipe.html.markdown @@ -0,0 +1,156 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_container_recipe" +description: |- + Manage an Image Builder Container Recipe +--- + + + +# Resource: aws_imagebuilder_container_recipe + +Manages an Image Builder Container Recipe. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.imagebuilder_container_recipe import ImagebuilderContainerRecipe +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ImagebuilderContainerRecipe(self, "example", + component=[ImagebuilderContainerRecipeComponent( + component_arn=Token.as_string(aws_imagebuilder_component_example.arn), + parameter=[ImagebuilderContainerRecipeComponentParameter( + name="Parameter1", + value="Value1" + ), ImagebuilderContainerRecipeComponentParameter( + name="Parameter2", + value="Value2" + ) + ] + ) + ], + container_type="DOCKER", + dockerfile_template_data="FROM {{{ imagebuilder:parentImage }}}\n{{{ imagebuilder:environments }}}\n{{{ imagebuilder:components }}}\n\n", + name="example", + parent_image="arn:aws:imagebuilder:eu-central-1:aws:image/amazon-linux-x86-latest/x.x.x", + target_repository=ImagebuilderContainerRecipeTargetRepository( + repository_name=Token.as_string(aws_ecr_repository_example.name), + service="ECR" + ), + version="1.0.0" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `component` - (Required) Ordered configuration block(s) with components for the container recipe. Detailed below. +* `container_type` - (Required) The type of the container to create. Valid values: `DOCKER`. +* `name` - (Required) The name of the container recipe. +* `parent_image` (Required) The base image for the container recipe. +* `target_repository` (Required) The destination repository for the container image. Detailed below. +* `version` (Required) Version of the container recipe. + +The following attributes are optional: + +* `description` - (Optional) The description of the container recipe. +* `dockerfile_template_data` - (Optional) The Dockerfile template used to build the image as an inline data blob. +* `dockerfile_template_uri` - (Optional) The Amazon S3 URI for the Dockerfile that will be used to build the container image. +* `instance_configuration` - (Optional) Configuration block used to configure an instance for building and testing container images. Detailed below. +* `kms_key_id` - (Optional) The KMS key used to encrypt the container image. +* `platform_override` - (Optional) Specifies the operating system platform when you use a custom base image. +* `tags` - (Optional) Key-value map of resource tags for the container recipe. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `working_directory` - (Optional) The working directory to be used during build and test workflows. + +### component + +The `component` block supports the following arguments: + +* `component_arn` - (Required) Amazon Resource Name (ARN) of the Image Builder Component to associate. +* `parameter` - (Optional) Configuration block(s) for parameters to configure the component. Detailed below. + +### parameter + +The following arguments are required: + +* `name` - (Required) The name of the component parameter. +* `value` - (Required) The value for the named component parameter. + +### target_repository + +The following arguments are required: + +* `repository_name` - (Required) The name of the container repository where the output container image is stored. This name is prefixed by the repository location. +* `service` - (Required) The service in which this image is registered. Valid values: `ECR`. + +### instance_configuration + +The following arguments are optional: + +* `block_device_mapping` - (Optional) Configuration block(s) with block device mappings for the container recipe. Detailed below. +* `image` - (Optional) The AMI ID to use as the base image for a container build and test instance. If not specified, Image Builder will use the appropriate ECS-optimized AMI as a base image. + +### block_device_mapping + +The following arguments are optional: + +* `device_name` - (Optional) Name of the device. For example, `/dev/sda` or `/dev/xvdb`. +* `ebs` - (Optional) Configuration block with Elastic Block Storage (EBS) block device mapping settings. Detailed below. +* `no_device` - (Optional) Set to `true` to remove a mapping from the parent image. +* `virtual_name` - (Optional) Virtual device name. For example, `ephemeral0`. Instance store volumes are numbered starting from 0. + +#### ebs + +The following arguments are optional: + +* `delete_on_termination` - (Optional) Whether to delete the volume on termination. Defaults to unset, which is the value inherited from the parent image. +* `encrypted` - (Optional) Whether to encrypt the volume. Defaults to unset, which is the value inherited from the parent image. +* `iops` - (Optional) Number of Input/Output (I/O) operations per second to provision for an `io1` or `io2` volume. +* `kms_key_id` - (Optional) Amazon Resource Name (ARN) of the Key Management Service (KMS) Key for encryption. +* `snapshot_id` - (Optional) Identifier of the EC2 Volume Snapshot. +* `throughput` - (Optional) For GP3 volumes only. The throughput in MiB/s that the volume supports. +* `volume_size` - (Optional) Size of the volume, in GiB. +* `volume_type` - (Optional) Type of the volume. For example, `gp2` or `io2`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - (Required) Amazon Resource Name (ARN) of the container recipe. +* `date_created` - Date the container recipe was created. +* `encrypted` - A flag that indicates if the target container is encrypted. +* `owner` - Owner of the container recipe. +* `platform` - Platform of the container recipe. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_imagebuilder_container_recipe` resources using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_imagebuilder_container_recipe` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_container_recipe.example arn:aws:imagebuilder:us-east-1:123456789012:container-recipe/example/1.0.0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/imagebuilder_distribution_configuration.html.markdown b/website/docs/cdktf/python/r/imagebuilder_distribution_configuration.html.markdown new file mode 100644 index 00000000000..0a4168e8693 --- /dev/null +++ b/website/docs/cdktf/python/r/imagebuilder_distribution_configuration.html.markdown @@ -0,0 +1,160 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_distribution_configuration" +description: |- + Manage an Image Builder Distribution Configuration +--- + + + +# Resource: aws_imagebuilder_distribution_configuration + +Manages an Image Builder Distribution Configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.imagebuilder_distribution_configuration import ImagebuilderDistributionConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ImagebuilderDistributionConfiguration(self, "example", + distribution=[ImagebuilderDistributionConfigurationDistribution( + ami_distribution_configuration=ImagebuilderDistributionConfigurationDistributionAmiDistributionConfiguration( + ami_tags={ + "CostCenter": "IT" + }, + launch_permission=ImagebuilderDistributionConfigurationDistributionAmiDistributionConfigurationLaunchPermission( + user_ids=["123456789012"] + ), + name="example-{{ imagebuilder:buildDate }}" + ), + launch_template_configuration=[ImagebuilderDistributionConfigurationDistributionLaunchTemplateConfiguration( + launch_template_id="lt-0aaa1bcde2ff3456" + ) + ], + region="us-east-1" + ) + ], + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the distribution configuration. +* `distribution` - (Required) One or more configuration blocks with distribution settings. Detailed below. + +The following arguments are optional: + +* `description` - (Optional) Description of the distribution configuration. +* `tags` - (Optional) Key-value map of resource tags for the distribution configuration. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### distribution + +The following arguments are required: + +* `region` - (Required) AWS Region for the distribution. + +The following arguments are optional: + +* `ami_distribution_configuration` - (Optional) Configuration block with Amazon Machine Image (AMI) distribution settings. Detailed below. +* `container_distribution_configuration` - (Optional) Configuration block with container distribution settings. Detailed below. +* `fast_launch_configuration` - (Optional) Set of Windows faster-launching configurations to use for AMI distribution. Detailed below. +* `launch_template_configuration` - (Optional) Set of launch template configuration settings that apply to image distribution. Detailed below. +* `license_configuration_arns` - (Optional) Set of Amazon Resource Names (ARNs) of License Manager License Configurations. + +### ami_distribution_configuration + +The following arguments are optional: + +* `ami_tags` - (Optional) Key-value map of tags to apply to the distributed AMI. +* `description` - (Optional) Description to apply to the distributed AMI. +* `kms_key_id` - (Optional) Amazon Resource Name (ARN) of the Key Management Service (KMS) Key to encrypt the distributed AMI. +* `launch_permission` - (Optional) Configuration block of EC2 launch permissions to apply to the distributed AMI. Detailed below. +* `name` - (Optional) Name to apply to the distributed AMI. +* `target_account_ids` - (Optional) Set of AWS Account identifiers to distribute the AMI. + +### launch_permission + +The following arguments are optional: + +* `organization_arns` - (Optional) Set of AWS Organization ARNs to assign. +* `organizational_unit_arns` - (Optional) Set of AWS Organizational Unit ARNs to assign. +* `user_groups` - (Optional) Set of EC2 launch permission user groups to assign. Use `all` to distribute a public AMI. +* `user_ids` - (Optional) Set of AWS Account identifiers to assign. + +### container_distribution_configuration + +* `container_tags` - (Optional) Set of tags that are attached to the container distribution configuration. +* `description` - (Optional) Description of the container distribution configuration. +* `target_repository` (Required) Configuration block with the destination repository for the container distribution configuration. + +### target_repository + +* `repository_name` - (Required) The name of the container repository where the output container image is stored. This name is prefixed by the repository location. +* `service` - (Required) The service in which this image is registered. Valid values: `ECR`. + +### fast_launch_configuration + +* `account_id` - (Required) The owner account ID for the fast-launch enabled Windows AMI. +* `enabled` - (Required) A Boolean that represents the current state of faster launching for the Windows AMI. Set to `true` to start using Windows faster launching, or `false` to stop using it. +* `launch_template` - (Optional) Configuration block for the launch template that the fast-launch enabled Windows AMI uses when it launches Windows instances to create pre-provisioned snapshots. Detailed below. +* `max_parallel_launches` - (Optional) The maximum number of parallel instances that are launched for creating resources. +* `snapshot_configuration` - (Optional) Configuration block for managing the number of snapshots that are created from pre-provisioned instances for the Windows AMI when faster launching is enabled. Detailed below. + +### launch_template + +* `launch_template_id` - (Optional) The ID of the launch template to use for faster launching for a Windows AMI. +* `launch_template_name` - (Optional) The name of the launch template to use for faster launching for a Windows AMI. +* `launch_template_version` - (Optional) The version of the launch template to use for faster launching for a Windows AMI. + +### snapshot_configuration + +* `target_resource_count` - (Optional) The number of pre-provisioned snapshots to keep on hand for a fast-launch enabled Windows AMI. + +### launch_template_configuration + +* `default` - (Optional) Indicates whether to set the specified Amazon EC2 launch template as the default launch template. Defaults to `true`. +* `account_id` - The account ID that this configuration applies to. +* `launch_template_id` - (Required) The ID of the Amazon EC2 launch template to use. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - (Required) Amazon Resource Name (ARN) of the distribution configuration. +* `date_created` - Date the distribution configuration was created. +* `date_updated` - Date the distribution configuration was updated. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_imagebuilder_distribution_configurations` resources using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_imagebuilder_distribution_configurations` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_distribution_configuration.example arn:aws:imagebuilder:us-east-1:123456789012:distribution-configuration/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/imagebuilder_image.html.markdown b/website/docs/cdktf/python/r/imagebuilder_image.html.markdown new file mode 100644 index 00000000000..b688fdd318c --- /dev/null +++ b/website/docs/cdktf/python/r/imagebuilder_image.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image" +description: |- + Manages an Image Builder Image +--- + + + +# Resource: aws_imagebuilder_image + +Manages an Image Builder Image. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.imagebuilder_image import ImagebuilderImage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ImagebuilderImage(self, "example", + distribution_configuration_arn=Token.as_string(aws_imagebuilder_distribution_configuration_example.arn), + image_recipe_arn=Token.as_string(aws_imagebuilder_image_recipe_example.arn), + infrastructure_configuration_arn=Token.as_string(aws_imagebuilder_infrastructure_configuration_example.arn) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `infrastructure_configuration_arn` - (Required) Amazon Resource Name (ARN) of the Image Builder Infrastructure Configuration. + +The following arguments are optional: + +* `container_recipe_arn` - (Optional) - Amazon Resource Name (ARN) of the container recipe. +* `distribution_configuration_arn` - (Optional) Amazon Resource Name (ARN) of the Image Builder Distribution Configuration. +* `enhanced_image_metadata_enabled` - (Optional) Whether additional information about the image being created is collected. Defaults to `true`. +* `image_recipe_arn` - (Optional) Amazon Resource Name (ARN) of the image recipe. +* `image_tests_configuration` - (Optional) Configuration block with image tests configuration. Detailed below. +* `tags` - (Optional) Key-value map of resource tags for the Image Builder Image. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### image_tests_configuration + +The following arguments are optional: + +* `image_tests_enabled` - (Optional) Whether image tests are enabled. Defaults to `true`. +* `timeout_minutes` - (Optional) Number of minutes before image tests time out. Valid values are between `60` and `1440`. Defaults to `720`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the image. +* `date_created` - Date the image was created. +* `platform` - Platform of the image. +* `os_version` - Operating System version of the image. +* `output_resources` - List of objects with resources created by the image. + * `amis` - Set of objects with each Amazon Machine Image (AMI) created. + * `account_id` - Account identifier of the AMI. + * `description` - Description of the AMI. + * `image` - Identifier of the AMI. + * `name` - Name of the AMI. + * `region` - Region of the AMI. + * `containers` - Set of objects with each container image created and stored in the output repository. + * `image_uris` - Set of URIs for created containers. + * `region` - Region of the container image. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version` - Version of the image. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_imagebuilder_image` resources using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_imagebuilder_image` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_image.example arn:aws:imagebuilder:us-east-1:123456789012:image/example/1.0.0/1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/imagebuilder_image_pipeline.html.markdown b/website/docs/cdktf/python/r/imagebuilder_image_pipeline.html.markdown new file mode 100644 index 00000000000..95c3a5b7a8a --- /dev/null +++ b/website/docs/cdktf/python/r/imagebuilder_image_pipeline.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_pipeline" +description: |- + Manages an Image Builder Image Pipeline +--- + + + +# Resource: aws_imagebuilder_image_pipeline + +Manages an Image Builder Image Pipeline. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.imagebuilder_image_pipeline import ImagebuilderImagePipeline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ImagebuilderImagePipeline(self, "example", + image_recipe_arn=Token.as_string(aws_imagebuilder_image_recipe_example.arn), + infrastructure_configuration_arn=Token.as_string(aws_imagebuilder_infrastructure_configuration_example.arn), + name="example", + schedule=ImagebuilderImagePipelineSchedule( + schedule_expression="cron(0 0 * * ? *)" + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `infrastructure_configuration_arn` - (Required) Amazon Resource Name (ARN) of the Image Builder Infrastructure Configuration. +* `name` - (Required) Name of the image pipeline. + +The following arguments are optional: + +* `container_recipe_arn` - (Optional) Amazon Resource Name (ARN) of the container recipe. +* `description` - (Optional) Description of the image pipeline. +* `distribution_configuration_arn` - (Optional) Amazon Resource Name (ARN) of the Image Builder Distribution Configuration. +* `enhanced_image_metadata_enabled` - (Optional) Whether additional information about the image being created is collected. Defaults to `true`. +* `image_recipe_arn` - (Optional) Amazon Resource Name (ARN) of the image recipe. +* `image_tests_configuration` - (Optional) Configuration block with image tests configuration. Detailed below. +* `schedule` - (Optional) Configuration block with schedule settings. Detailed below. +* `status` - (Optional) Status of the image pipeline. Valid values are `DISABLED` and `ENABLED`. Defaults to `ENABLED`. +* `tags` - (Optional) Key-value map of resource tags for the image pipeline. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### image_tests_configuration + +The following arguments are optional: + +* `image_tests_enabled` - (Optional) Whether image tests are enabled. Defaults to `true`. +* `timeout_minutes` - (Optional) Number of minutes before image tests time out. Valid values are between `60` and `1440`. Defaults to `720`. + +### schedule + +The following arguments are required: + +* `schedule_expression` - (Required) Cron expression of how often the pipeline start condition is evaluated. For example, `cron(0 0 * * ? *)` is evaluated every day at midnight UTC. Configurations using the five field syntax that was previously accepted by the API, such as `cron(0 0 * * *)`, must be updated to the six field syntax. For more information, see the [Image Builder User Guide](https://docs.aws.amazon.com/imagebuilder/latest/userguide/cron-expressions.html). + +The following arguments are optional: + +* `pipeline_execution_start_condition` - (Optional) Condition when the pipeline should trigger a new image build. Valid values are `EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE` and `EXPRESSION_MATCH_ONLY`. Defaults to `EXPRESSION_MATCH_AND_DEPENDENCY_UPDATES_AVAILABLE`. + +* `timezone` - (Optional) The timezone that applies to the scheduling expression. For example, "Etc/UTC", "America/Los_Angeles" in the [IANA timezone format](https://www.joda.org/joda-time/timezones.html). If not specified this defaults to UTC. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the image pipeline. +* `date_created` - Date the image pipeline was created. +* `date_last_run` - Date the image pipeline was last run. +* `date_next_run` - Date the image pipeline will run next. +* `date_updated` - Date the image pipeline was updated. +* `platform` - Platform of the image pipeline. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_imagebuilder_image_pipeline` resources using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_imagebuilder_image_pipeline` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_image_pipeline.example arn:aws:imagebuilder:us-east-1:123456789012:image-pipeline/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/imagebuilder_image_recipe.html.markdown b/website/docs/cdktf/python/r/imagebuilder_image_recipe.html.markdown new file mode 100644 index 00000000000..e85bf44a295 --- /dev/null +++ b/website/docs/cdktf/python/r/imagebuilder_image_recipe.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_recipe" +description: |- + Manage an Image Builder Image Recipe +--- + + + +# Resource: aws_imagebuilder_image_recipe + +Manages an Image Builder Image Recipe. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.imagebuilder_image_recipe import ImagebuilderImageRecipe +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ImagebuilderImageRecipe(self, "example", + block_device_mapping=[ImagebuilderImageRecipeBlockDeviceMapping( + device_name="/dev/xvdb", + ebs=ImagebuilderImageRecipeBlockDeviceMappingEbs( + delete_on_termination=Token.as_string(True), + volume_size=100, + volume_type="gp2" + ) + ) + ], + component=[ImagebuilderImageRecipeComponent( + component_arn=Token.as_string(aws_imagebuilder_component_example.arn), + parameter=[ImagebuilderImageRecipeComponentParameter( + name="Parameter1", + value="Value1" + ), ImagebuilderImageRecipeComponentParameter( + name="Parameter2", + value="Value2" + ) + ] + ) + ], + name="example", + parent_image="arn:${" + current.partition + "}:imagebuilder:${" + data_aws_region_current.name + "}:aws:image/amazon-linux-2-x86/x.x.x", + version="1.0.0" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `component` - Ordered configuration block(s) with components for the image recipe. Detailed below. +* `name` - Name of the image recipe. +* `parent_image` - The image recipe uses this image as a base from which to build your customized image. The value can be the base image ARN or an AMI ID. +* `version` - The semantic version of the image recipe, which specifies the version in the following format, with numeric values in each position to indicate a specific version: major.minor.patch. For example: 1.0.0. + +The following attributes are optional: + +* `block_device_mapping` - Configuration block(s) with block device mappings for the image recipe. Detailed below. +* `description` - Description of the image recipe. +* `systems_manager_agent` - Configuration block for the Systems Manager Agent installed by default by Image Builder. Detailed below. +* `tags` - Key-value map of resource tags for the image recipe. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `user_data_base64` Base64 encoded user data. Use this to provide commands or a command script to run when you launch your build instance. +* `working_directory` - The working directory to be used during build and test workflows. + +### block_device_mapping + +The following arguments are optional: + +* `device_name` - Name of the device. For example, `/dev/sda` or `/dev/xvdb`. +* `ebs` - Configuration block with Elastic Block Storage (EBS) block device mapping settings. Detailed below. +* `no_device` - Set to `true` to remove a mapping from the parent image. +* `virtual_name` - Virtual device name. For example, `ephemeral0`. Instance store volumes are numbered starting from 0. + +#### ebs + +The following arguments are optional: + +* `delete_on_termination` - Whether to delete the volume on termination. Defaults to unset, which is the value inherited from the parent image. +* `encrypted` - Whether to encrypt the volume. Defaults to unset, which is the value inherited from the parent image. +* `iops` - Number of Input/Output (I/O) operations per second to provision for an `io1` or `io2` volume. +* `kms_key_id` - Amazon Resource Name (ARN) of the Key Management Service (KMS) Key for encryption. +* `snapshot_id` - Identifier of the EC2 Volume Snapshot. +* `throughput` - For GP3 volumes only. The throughput in MiB/s that the volume supports. +* `volume_size` - Size of the volume, in GiB. +* `volume_type` - Type of the volume. For example, `gp2` or `io2`. + +### component + +The `component` block supports the following arguments: + +* `component_arn` - (Required) Amazon Resource Name (ARN) of the Image Builder Component to associate. +* `parameter` - (Optional) Configuration block(s) for parameters to configure the component. Detailed below. + +### parameter + +The following arguments are required: + +* `name` - The name of the component parameter. +* `value` - The value for the named component parameter. + +### systems_manager_agent + +The following arguments are required: + +* `uninstall_after_build` - Whether to remove the Systems Manager Agent after the image has been built. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - (Required) Amazon Resource Name (ARN) of the image recipe. +* `date_created` - Date the image recipe was created. +* `owner` - Owner of the image recipe. +* `platform` - Platform of the image recipe. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_imagebuilder_image_recipe` resources using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_imagebuilder_image_recipe` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_image_recipe.example arn:aws:imagebuilder:us-east-1:123456789012:image-recipe/example/1.0.0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/imagebuilder_infrastructure_configuration.html.markdown b/website/docs/cdktf/python/r/imagebuilder_infrastructure_configuration.html.markdown new file mode 100644 index 00000000000..a56aacd2292 --- /dev/null +++ b/website/docs/cdktf/python/r/imagebuilder_infrastructure_configuration.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_infrastructure_configuration" +description: |- + Manages an Image Builder Infrastructure Configuration +--- + + + +# Resource: aws_imagebuilder_infrastructure_configuration + +Manages an Image Builder Infrastructure Configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.imagebuilder_infrastructure_configuration import ImagebuilderInfrastructureConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ImagebuilderInfrastructureConfiguration(self, "example", + description="example description", + instance_profile_name=Token.as_string(aws_iam_instance_profile_example.name), + instance_types=["t2.nano", "t3.micro"], + key_pair=Token.as_string(aws_key_pair_example.key_name), + logging=ImagebuilderInfrastructureConfigurationLogging( + s3_logs=ImagebuilderInfrastructureConfigurationLoggingS3Logs( + s3_bucket_name=Token.as_string(aws_s3_bucket_example.bucket), + s3_key_prefix="logs" + ) + ), + name="example", + security_group_ids=[Token.as_string(aws_security_group_example.id)], + sns_topic_arn=Token.as_string(aws_sns_topic_example.arn), + subnet_id=main.id, + tags={ + "foo": "bar" + }, + terminate_instance_on_failure=True + ) +``` + +## Argument Reference + +The following arguments are required: + +* `instance_profile_name` - (Required) Name of IAM Instance Profile. +* `name` - (Required) Name for the configuration. + +The following arguments are optional: + +* `description` - (Optional) Description for the configuration. +* `instance_metadata_options` - (Optional) Configuration block with instance metadata options for the HTTP requests that pipeline builds use to launch EC2 build and test instances. Detailed below. +* `instance_types` - (Optional) Set of EC2 Instance Types. +* `key_pair` - (Optional) Name of EC2 Key Pair. +* `logging` - (Optional) Configuration block with logging settings. Detailed below. +* `resource_tags` - (Optional) Key-value map of resource tags to assign to infrastructure created by the configuration. +* `security_group_ids` - (Optional) Set of EC2 Security Group identifiers. +* `sns_topic_arn` - (Optional) Amazon Resource Name (ARN) of SNS Topic. +* `subnet_id` - (Optional) EC2 Subnet identifier. Also requires `security_group_ids` argument. +* `tags` - (Optional) Key-value map of resource tags to assign to the configuration. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `terminate_instance_on_failure` - (Optional) Enable if the instance should be terminated when the pipeline fails. Defaults to `false`. + +### instance_metadata_options + +The following arguments are optional: + +* `http_put_response_hop_limit` - The number of hops that an instance can traverse to reach its destonation. +* `http_tokens` - Whether a signed token is required for instance metadata retrieval requests. Valid values: `required`, `optional`. + +### logging + +The following arguments are required: + +* `s3_logs` - (Required) Configuration block with S3 logging settings. Detailed below. + +### s3_logs + +The following arguments are required: + +* `s3_bucket_name` - (Required) Name of the S3 Bucket. + +The following arguments are optional: + +* `s3_key_prefix` - (Optional) Prefix to use for S3 logs. Defaults to `/`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the configuration. +* `arn` - Amazon Resource Name (ARN) of the configuration. +* `date_created` - Date when the configuration was created. +* `date_updated` - Date when the configuration was updated. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_imagebuilder_infrastructure_configuration` using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_imagebuilder_infrastructure_configuration` using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_infrastructure_configuration.example arn:aws:imagebuilder:us-east-1:123456789012:infrastructure-configuration/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/inspector2_delegated_admin_account.html.markdown b/website/docs/cdktf/python/r/inspector2_delegated_admin_account.html.markdown new file mode 100644 index 00000000000..613ccf99920 --- /dev/null +++ b/website/docs/cdktf/python/r/inspector2_delegated_admin_account.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Inspector" +layout: "aws" +page_title: "AWS: aws_inspector2_delegated_admin_account" +description: |- + Terraform resource for managing an Amazon Inspector Delegated Admin Account. +--- + + + +# Resource: aws_inspector2_delegated_admin_account + +Terraform resource for managing an Amazon Inspector Delegated Admin Account. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.inspector2_delegated_admin_account import Inspector2DelegatedAdminAccount +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + Inspector2DelegatedAdminAccount(self, "example", + account_id=Token.as_string(current.account_id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `account_id` - (Required) Account to enable as delegated admin account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `relationship_status` - Status of this delegated admin account. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `15m`) +* `delete` - (Default `15m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Inspector Delegated Admin Account using the `account_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Inspector Delegated Admin Account using the `account_id`. For example: + +```console +% terraform import aws_inspector2_delegated_admin_account.example 012345678901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/inspector2_enabler.html.markdown b/website/docs/cdktf/python/r/inspector2_enabler.html.markdown new file mode 100644 index 00000000000..6592e75bd86 --- /dev/null +++ b/website/docs/cdktf/python/r/inspector2_enabler.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Inspector" +layout: "aws" +page_title: "AWS: aws_inspector2_enabler" +description: |- + Terraform resource for enabling Amazon Inspector resource scans. +--- + + + +# Resource: aws_inspector2_enabler + +Terraform resource for enabling Amazon Inspector resource scans. + +This resource must be created in the Organization's Administrator Account. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.inspector2_enabler import Inspector2Enabler +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Inspector2Enabler(self, "example", + account_ids=["123456789012"], + resource_types=["EC2"] + ) +``` + +### For the Calling Account + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.inspector2_enabler import Inspector2Enabler +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + Inspector2Enabler(self, "test", + account_ids=[Token.as_string(current.account_id)], + resource_types=["ECR", "EC2"] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `account_ids` - (Required) Set of account IDs. + Can contain one of: the Organization's Administrator Account, or one or more Member Accounts. +* `resource_types` - (Required) Type of resources to scan. + Valid values are `EC2`, `ECR`, and `LAMBDA`. + At least one item is required. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/inspector2_member_association.html.markdown b/website/docs/cdktf/python/r/inspector2_member_association.html.markdown new file mode 100644 index 00000000000..2e675c49427 --- /dev/null +++ b/website/docs/cdktf/python/r/inspector2_member_association.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Inspector" +layout: "aws" +page_title: "AWS: aws_inspector2_member_association" +description: |- + Terraform resource for managing an Amazon Inspector Member Association. +--- + + + +# Resource: aws_inspector2_member_association + +Terraform resource for associating accounts to existing Inspector instances. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.inspector2_member_association import Inspector2MemberAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Inspector2MemberAssociation(self, "example", + account_id="123456789012" + ) +``` + +## Argument Reference + +The following argument is required: + +* `account_id` - (Required) ID of the account to associate + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `delegated_admin_account_id` - Account ID of the delegated administrator account +* `relationship_status` - Status of the member relationship +* `updated_at` - Date and time of the last update of the relationship + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Inspector Member Association using the `account_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Inspector Member Association using the `account_id`. For example: + +```console +% terraform import aws_inspector2_member_association.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/inspector2_organization_configuration.html.markdown b/website/docs/cdktf/python/r/inspector2_organization_configuration.html.markdown new file mode 100644 index 00000000000..0e0b3a26219 --- /dev/null +++ b/website/docs/cdktf/python/r/inspector2_organization_configuration.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "Inspector" +layout: "aws" +page_title: "AWS: aws_inspector2_organization_configuration" +description: |- + Terraform resource for managing an Amazon Inspector Organization Configuration. +--- + + + +# Resource: aws_inspector2_organization_configuration + +Terraform resource for managing an Amazon Inspector Organization Configuration. + +~> **NOTE:** In order for this resource to work, the account you use must be an Inspector Delegated Admin Account. + +~> **NOTE:** When this resource is deleted, EC2, ECR and Lambda scans will no longer be automatically enabled for new members of your Amazon Inspector organization. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.inspector2_organization_configuration import Inspector2OrganizationConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Inspector2OrganizationConfiguration(self, "example", + auto_enable=Inspector2OrganizationConfigurationAutoEnable( + ec2=True, + ecr=False, + lambda_=True + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `auto_enable` - (Required) Configuration block for auto enabling. See below. + +### `auto_enable` + +* `ec2` - (Required) Whether Amazon EC2 scans are automatically enabled for new members of your Amazon Inspector organization. +* `ecr` - (Required) Whether Amazon ECR scans are automatically enabled for new members of your Amazon Inspector organization. +* `lambda` - (Optional) Whether Lambda Function scans are automatically enabled for new members of your Amazon Inspector organization. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `max_account_limit_reached` - Whether your configuration reached the max account limit. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/inspector_assessment_target.html.markdown b/website/docs/cdktf/python/r/inspector_assessment_target.html.markdown new file mode 100644 index 00000000000..7a9fb2974f8 --- /dev/null +++ b/website/docs/cdktf/python/r/inspector_assessment_target.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Inspector Classic" +layout: "aws" +page_title: "AWS: aws_inspector_assessment_target" +description: |- + Provides an Inspector Classic Assessment Target. +--- + + + +# Resource: aws_inspector_assessment_target + +Provides an Inspector Classic Assessment Target + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.inspector_assessment_target import InspectorAssessmentTarget +from imports.aws.inspector_resource_group import InspectorResourceGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bar = InspectorResourceGroup(self, "bar", + tags={ + "Env": "bar", + "Name": "foo" + } + ) + InspectorAssessmentTarget(self, "foo", + name="assessment target", + resource_group_arn=bar.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the assessment target. +* `resource_group_arn` (Optional) Inspector Resource Group Amazon Resource Name (ARN) stating tags for instance matching. If not specified, all EC2 instances in the current AWS account and region are included in the assessment target. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The target assessment ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Inspector Classic Assessment Targets using their Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Inspector Classic Assessment Targets using their Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_inspector_assessment_target.example arn:aws:inspector:us-east-1:123456789012:target/0-xxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/inspector_assessment_template.html.markdown b/website/docs/cdktf/python/r/inspector_assessment_template.html.markdown new file mode 100644 index 00000000000..bd60770dc89 --- /dev/null +++ b/website/docs/cdktf/python/r/inspector_assessment_template.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Inspector Classic" +layout: "aws" +page_title: "AWS: aws_inspector_assessment_template" +description: |- + Provides an Inspector Classic Assessment Template. +--- + + + +# Resource: aws_inspector_assessment_template + +Provides an Inspector Classic Assessment Template + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.inspector_assessment_template import InspectorAssessmentTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + InspectorAssessmentTemplate(self, "example", + duration=3600, + event_subscription=[InspectorAssessmentTemplateEventSubscription( + event="ASSESSMENT_RUN_COMPLETED", + topic_arn=Token.as_string(aws_sns_topic_example.arn) + ) + ], + name="example", + rules_package_arns=["arn:aws:inspector:us-west-2:758058086616:rulespackage/0-9hgA516p", "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-H5hpSawc", "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-JJOtZiqQ", "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-vg5GGHSD" + ], + target_arn=Token.as_string(aws_inspector_assessment_target_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the assessment template. +* `target_arn` - (Required) The assessment target ARN to attach the template to. +* `duration` - (Required) The duration of the inspector run. +* `rules_package_arns` - (Required) The rules to be used during the run. +* `event_subscription` - (Optional) A block that enables sending notifications about a specified assessment template event to a designated SNS topic. See [Event Subscriptions](#event-subscriptions) for details. +* `tags` - (Optional) Key-value map of tags for the Inspector assessment template. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Event Subscriptions + +The event subscription configuration block supports the following arguments: + +* `event` - (Required) The event for which you want to receive SNS notifications. Valid values are `ASSESSMENT_RUN_STARTED`, `ASSESSMENT_RUN_COMPLETED`, `ASSESSMENT_RUN_STATE_CHANGED`, and `FINDING_REPORTED`. +* `topic_arn` - (Required) The ARN of the SNS topic to which notifications are sent. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The template assessment ARN. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_inspector_assessment_template` using the template assessment ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_inspector_assessment_template` using the template assessment ARN. For example: + +```console +% terraform import aws_inspector_assessment_template.example arn:aws:inspector:us-west-2:123456789012:target/0-9IaAzhGR/template/0-WEcjR8CH +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/inspector_resource_group.html.markdown b/website/docs/cdktf/python/r/inspector_resource_group.html.markdown new file mode 100644 index 00000000000..a38d512c53e --- /dev/null +++ b/website/docs/cdktf/python/r/inspector_resource_group.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Inspector Classic" +layout: "aws" +page_title: "AWS: aws_inspector_resource_group" +description: |- + Provides an Amazon Inspector Classic Resource Group. +--- + + + +# Resource: aws_inspector_resource_group + +Provides an Amazon Inspector Classic Resource Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.inspector_resource_group import InspectorResourceGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + InspectorResourceGroup(self, "example", + tags={ + "Env": "bar", + "Name": "foo" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `tags` - (Required) Key-value map of tags that are used to select the EC2 instances to be included in an [Amazon Inspector assessment target](/docs/providers/aws/r/inspector_assessment_target.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The resource group ARN. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/instance.html.markdown b/website/docs/cdktf/python/r/instance.html.markdown index 0c17c25a439..10d19fcec7f 100644 --- a/website/docs/cdktf/python/r/instance.html.markdown +++ b/website/docs/cdktf/python/r/instance.html.markdown @@ -233,7 +233,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `ami` - (Optional) AMI to use for the instance. Required unless `launch_template` is specified and the Launch Template specifes an AMI. If an AMI is specified in the Launch Template, setting `ami` will override the AMI specified in the Launch Template. * `associate_public_ip_address` - (Optional) Whether to associate a public IP address with an instance in a VPC. @@ -457,9 +457,9 @@ The `launch_template` block supports the following: * `name` - Name of the launch template. Conflicts with `id`. * `version` - Template version. Can be a specific version number, `$Latest` or `$Default`. The default value is `$Default`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the instance. * `capacity_reservation_specification` - Capacity reservation specification of the instance. @@ -496,10 +496,21 @@ For `instance_market_options`, in addition to the arguments above, the following ## Import -Instances can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import instances using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_instance.web i-12345678 + +Using `terraform import`, import instances using the `id`. For example: + +```console +% terraform import aws_instance.web i-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/internet_gateway.html.markdown b/website/docs/cdktf/python/r/internet_gateway.html.markdown new file mode 100644 index 00000000000..dab1c386be6 --- /dev/null +++ b/website/docs/cdktf/python/r/internet_gateway.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_internet_gateway" +description: |- + Provides a resource to create a VPC Internet Gateway. +--- + + + +# Resource: aws_internet_gateway + +Provides a resource to create a VPC Internet Gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.internet_gateway import InternetGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + InternetGateway(self, "gw", + tags={ + "Name": "main" + }, + vpc_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpc_id` - (Optional) The VPC ID to create in. See the [aws_internet_gateway_attachment](internet_gateway_attachment.html) resource for an alternate way to attach an Internet Gateway to a VPC. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +-> **Note:** It's recommended to denote that the AWS Instance or Elastic IP depends on the Internet Gateway. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.instance import Instance +from imports.aws.internet_gateway import InternetGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + gw = InternetGateway(self, "gw", + vpc_id=main.id + ) + Instance(self, "foo", + depends_on=[gw] + ) +``` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Internet Gateway. +* `arn` - The ARN of the Internet Gateway. +* `owner_id` - The ID of the AWS account that owns the internet gateway. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20m`) +- `update` - (Default `20m`) +- `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Internet Gateways using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Internet Gateways using the `id`. For example: + +```console +% terraform import aws_internet_gateway.gw igw-c0a643a9 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/internet_gateway_attachment.html.markdown b/website/docs/cdktf/python/r/internet_gateway_attachment.html.markdown new file mode 100644 index 00000000000..bfd0d581457 --- /dev/null +++ b/website/docs/cdktf/python/r/internet_gateway_attachment.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_internet_gateway_attachment" +description: |- + Provides a resource to create a VPC Internet Gateway Attachment. +--- + + + +# Resource: aws_internet_gateway_attachment + +Provides a resource to create a VPC Internet Gateway Attachment. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.internet_gateway import InternetGateway +from imports.aws.internet_gateway_attachment import InternetGatewayAttachment +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = InternetGateway(self, "example") + aws_vpc_example = Vpc(self, "example_1", + cidr_block="10.1.0.0/16" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpc_example.override_logical_id("example") + aws_internet_gateway_attachment_example = InternetGatewayAttachment(self, "example_2", + internet_gateway_id=example.id, + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_internet_gateway_attachment_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `internet_gateway_id` - (Required) The ID of the internet gateway. +* `vpc_id` - (Required) The ID of the VPC. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the VPC and Internet Gateway separated by a colon. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20m`) +- `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Internet Gateway Attachments using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Internet Gateway Attachments using the `id`. For example: + +```console +% terraform import aws_internet_gateway_attachment.example igw-c0a643a9:vpc-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/internetmonitor_monitor.html.markdown b/website/docs/cdktf/python/r/internetmonitor_monitor.html.markdown new file mode 100644 index 00000000000..bb7ee4b1216 --- /dev/null +++ b/website/docs/cdktf/python/r/internetmonitor_monitor.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "CloudWatch Internet Monitor" +layout: "aws" +page_title: "AWS: aws_internetmonitor_monitor" +description: |- + Provides a CloudWatch Internet Monitor Monitor resource +--- + + + +# Resource: aws_internetmonitor_monitor + +Provides a Internet Monitor Monitor resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.internetmonitor_monitor import InternetmonitorMonitor +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + InternetmonitorMonitor(self, "example", + monitor_name="exmple" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `monitor_name` - (Required) The name of the monitor. + +The following arguments are optional: + +* `health_events_config` - (Optional) Health event thresholds. A health event threshold percentage, for performance and availability, determines when Internet Monitor creates a health event when there's an internet issue that affects your application end users. See [Health Events Config](#health-events-config) below. +* `internet_measurements_log_delivery` - (Optional) Publish internet measurements for Internet Monitor to an Amazon S3 bucket in addition to CloudWatch Logs. +* `max_city_networks_to_monitor` - (Optional) The maximum number of city-networks to monitor for your resources. A city-network is the location (city) where clients access your application resources from and the network or ASN, such as an internet service provider (ISP), that clients access the resources through. This limit helps control billing costs. +* `resources` - (Optional) The resources to include in a monitor, which you provide as a set of Amazon Resource Names (ARNs). +* `status` - (Optional) The status for a monitor. The accepted values for Status with the UpdateMonitor API call are the following: `ACTIVE` and `INACTIVE`. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `traffic_percentage_to_monitor` - (Optional) The percentage of the internet-facing traffic for your application that you want to monitor with this monitor. + +### Health Events Config + +Defines the health event threshold percentages, for performance score and availability score. Amazon CloudWatch Internet Monitor creates a health event when there's an internet issue that affects your application end users where a health score percentage is at or below a set threshold. If you don't set a health event threshold, the default value is 95%. + +* `availability_score_threshold` - (Optional) The health event threshold percentage set for availability scores. +* `performance_score_threshold` - (Optional) The health event threshold percentage set for performance scores. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Monitor. +* `id` - Name of the monitor. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Internet Monitor Monitors using the `monitor_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Internet Monitor Monitors using the `monitor_name`. For example: + +```console +% terraform import aws_internetmonitor_monitor.some some-monitor +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_authorizer.html.markdown b/website/docs/cdktf/python/r/iot_authorizer.html.markdown new file mode 100644 index 00000000000..24ace73d40c --- /dev/null +++ b/website/docs/cdktf/python/r/iot_authorizer.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_authorizer" +description: |- + Creates and manages an AWS IoT Authorizer. +--- + + + +# Resource: aws_iot_authorizer + +Creates and manages an AWS IoT Authorizer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_authorizer import IotAuthorizer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IotAuthorizer(self, "example", + authorizer_function_arn=Token.as_string(aws_lambda_function_example.arn), + name="example", + signing_disabled=False, + status="ACTIVE", + token_key_name="Token-Header", + token_signing_public_keys={ + "Key1": Token.as_string( + Fn.file("test-fixtures/iot-authorizer-signing-key.pem")) + } + ) +``` + +## Argument Reference + +* `authorizer_function_arn` - (Required) The ARN of the authorizer's Lambda function. +* `enable_caching_for_http` - (Optional) Specifies whether the HTTP caching is enabled or not. Default: `false`. +* `name` - (Required) The name of the authorizer. +* `signing_disabled` - (Optional) Specifies whether AWS IoT validates the token signature in an authorization request. Default: `false`. +* `status` - (Optional) The status of Authorizer request at creation. Valid values: `ACTIVE`, `INACTIVE`. Default: `ACTIVE`. +* `token_key_name` - (Optional) The name of the token key used to extract the token from the HTTP headers. This value is required if signing is enabled in your authorizer. +* `token_signing_public_keys` - (Optional) The public keys used to verify the digital signature returned by your custom authentication service. This value is required if signing is enabled in your authorizer. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the authorizer. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IOT Authorizers using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IOT Authorizers using the name. For example: + +```console +% terraform import aws_iot_authorizer.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_certificate.html.markdown b/website/docs/cdktf/python/r/iot_certificate.html.markdown new file mode 100644 index 00000000000..a626fe01508 --- /dev/null +++ b/website/docs/cdktf/python/r/iot_certificate.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_certificate" +description: |- + Creates and manages an AWS IoT certificate. +--- + + + +# Resource: aws_iot_certificate + +Creates and manages an AWS IoT certificate. + +## Example Usage + +### With CSR + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_certificate import IotCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IotCertificate(self, "cert", + active=True, + csr=Token.as_string(Fn.file("/my/csr.pem")) + ) +``` + +### Without CSR + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_certificate import IotCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IotCertificate(self, "cert", + active=True + ) +``` + +### From existing certificate without a CA + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_certificate import IotCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IotCertificate(self, "cert", + active=True, + certificate_pem=Token.as_string(Fn.file("/my/cert.pem")) + ) +``` + +## Argument Reference + +* `active` - (Required) Boolean flag to indicate if the certificate should be active +* `csr` - (Optional) The certificate signing request. Review + [CreateCertificateFromCsr](https://docs.aws.amazon.com/iot/latest/apireference/API_CreateCertificateFromCsr.html) + for more information on generating a certificate from a certificate signing request (CSR). + If none is specified both the certificate and keys will be generated, review [CreateKeysAndCertificate](https://docs.aws.amazon.com/iot/latest/apireference/API_CreateKeysAndCertificate.html) + for more information on generating keys and a certificate. +* `certificate_pem` - (Optional) The certificate to be registered. If `ca_pem` is unspecified, review + [RegisterCertificateWithoutCA](https://docs.aws.amazon.com/iot/latest/apireference/API_RegisterCertificateWithoutCA.html). + If `ca_pem` is specified, review + [RegisterCertificate](https://docs.aws.amazon.com/iot/latest/apireference/API_RegisterCertificate.html) + for more information on registering a certificate. +* `ca_pem` - (Optional) The CA certificate for the certificate to be registered. If this is set, the CA needs to be registered with AWS IoT beforehand. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The internal ID assigned to this certificate. +* `arn` - The ARN of the created certificate. +* `certificate_pem` - The certificate data, in PEM format. +* `public_key` - When neither CSR nor certificate is provided, the public key. +* `private_key` - When neither CSR nor certificate is provided, the private key. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_indexing_configuration.html.markdown b/website/docs/cdktf/python/r/iot_indexing_configuration.html.markdown new file mode 100644 index 00000000000..a8a2f5b5859 --- /dev/null +++ b/website/docs/cdktf/python/r/iot_indexing_configuration.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_indexing_configuration" +description: |- + Managing IoT Thing indexing. +--- + + + +# Resource: aws_iot_indexing_configuration + +Managing [IoT Thing indexing](https://docs.aws.amazon.com/iot/latest/developerguide/managing-index.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_indexing_configuration import IotIndexingConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IotIndexingConfiguration(self, "example", + thing_indexing_configuration=IotIndexingConfigurationThingIndexingConfiguration( + custom_field=[IotIndexingConfigurationThingIndexingConfigurationCustomField( + name="shadow.desired.power", + type="Boolean" + ), IotIndexingConfigurationThingIndexingConfigurationCustomField( + name="attributes.version", + type="Number" + ), IotIndexingConfigurationThingIndexingConfigurationCustomField( + name="shadow.name.thing1shadow.desired.DefaultDesired", + type="String" + ), IotIndexingConfigurationThingIndexingConfigurationCustomField( + name="deviceDefender.securityProfile1.NUMBER_VALUE_BEHAVIOR.lastViolationValue.number", + type="Number" + ) + ], + device_defender_indexing_mode="VIOLATIONS", + named_shadow_indexing_mode="ON", + thing_connectivity_indexing_mode="STATUS", + thing_indexing_mode="REGISTRY_AND_SHADOW" + ) + ) +``` + +## Argument Reference + +* `thing_group_indexing_configuration` - (Optional) Thing group indexing configuration. See below. +* `thing_indexing_configuration` - (Optional) Thing indexing configuration. See below. + +### thing_group_indexing_configuration + +The `thing_group_indexing_configuration` configuration block supports the following: + +* `custom_field` - (Optional) A list of thing group fields to index. This list cannot contain any managed fields. See below. +* `managed_field` - (Optional) Contains fields that are indexed and whose types are already known by the Fleet Indexing service. See below. +* `thing_group_indexing_mode` - (Required) Thing group indexing mode. Valid values: `OFF`, `ON`. + +### thing_indexing_configuration + +The `thing_indexing_configuration` configuration block supports the following: + +* `custom_field` - (Optional) Contains custom field names and their data type. See below. +* `device_defender_indexing_mode` - (Optional) Device Defender indexing mode. Valid values: `VIOLATIONS`, `OFF`. Default: `OFF`. +* `managed_field` - (Optional) Contains fields that are indexed and whose types are already known by the Fleet Indexing service. See below. +* `named_shadow_indexing_mode` - (Optional) [Named shadow](https://docs.aws.amazon.com/iot/latest/developerguide/iot-device-shadows.html) indexing mode. Valid values: `ON`, `OFF`. Default: `OFF`. +* `thing_connectivity_indexing_mode` - (Optional) Thing connectivity indexing mode. Valid values: `STATUS`, `OFF`. Default: `OFF`. +* `thing_indexing_mode` - (Required) Thing indexing mode. Valid values: `REGISTRY`, `REGISTRY_AND_SHADOW`, `OFF`. + +### field + +The `custom_field` and `managed_field` configuration blocks supports the following: + +* `name` - (Optional) The name of the field. +* `type` - (Optional) The data type of the field. Valid values: `Number`, `String`, `Boolean`. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_logging_options.html.markdown b/website/docs/cdktf/python/r/iot_logging_options.html.markdown new file mode 100644 index 00000000000..fb922eb2ae8 --- /dev/null +++ b/website/docs/cdktf/python/r/iot_logging_options.html.markdown @@ -0,0 +1,45 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_logging_options" +description: |- + Provides a resource to manage default logging options. +--- + + + +# Resource: aws_iot_logging_options + +Provides a resource to manage [default logging options](https://docs.aws.amazon.com/iot/latest/developerguide/configure-logging.html#configure-logging-console). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_logging_options import IotLoggingOptions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IotLoggingOptions(self, "example", + default_log_level="WARN", + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +## Argument Reference + +* `default_log_level` - (Optional) The default logging level. Valid Values: `"DEBUG"`, `"INFO"`, `"ERROR"`, `"WARN"`, `"DISABLED"`. +* `disable_all_logs` - (Optional) If `true` all logs are disabled. The default is `false`. +* `role_arn` - (Required) The ARN of the role that allows IoT to write to Cloudwatch logs. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_policy.html.markdown b/website/docs/cdktf/python/r/iot_policy.html.markdown new file mode 100644 index 00000000000..fda0587f707 --- /dev/null +++ b/website/docs/cdktf/python/r/iot_policy.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_policy" +description: |- + Provides an IoT policy. +--- + + + +# Resource: aws_iot_policy + +Provides an IoT policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_policy import IotPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IotPolicy(self, "pubsub", + name="PubSubToAnyTopic", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["iot:*"], + "Effect": "Allow", + "Resource": "*" + } + ], + "Version": "2012-10-17" + })) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the policy. +* `policy` - (Required) The policy document. This is a JSON formatted string. Use the [IoT Developer Guide](http://docs.aws.amazon.com/iot/latest/developerguide/iot-policies.html) for more information on IoT Policies. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS to this policy. +* `name` - The name of this policy. +* `default_version_id` - The default version of this policy. +* `policy` - The policy document. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT policies using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IoT policies using the `name`. For example: + +```console +% terraform import aws_iot_policy.pubsub PubSubToAnyTopic +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_policy_attachment.html.markdown b/website/docs/cdktf/python/r/iot_policy_attachment.html.markdown new file mode 100644 index 00000000000..1ceff848a79 --- /dev/null +++ b/website/docs/cdktf/python/r/iot_policy_attachment.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_policy_attachment" +description: |- + Provides an IoT policy attachment. +--- + + + +# Resource: aws_iot_policy_attachment + +Provides an IoT policy attachment. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iot_certificate import IotCertificate +from imports.aws.iot_policy import IotPolicy +from imports.aws.iot_policy_attachment import IotPolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + cert = IotCertificate(self, "cert", + active=True, + csr=Token.as_string(Fn.file("csr.pem")) + ) + pubsub = DataAwsIamPolicyDocument(self, "pubsub", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["iot:*"], + effect="Allow", + resources=["*"] + ) + ] + ) + aws_iot_policy_pubsub = IotPolicy(self, "pubsub_2", + name="PubSubToAnyTopic", + policy=Token.as_string(pubsub.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iot_policy_pubsub.override_logical_id("pubsub") + IotPolicyAttachment(self, "att", + policy=Token.as_string(aws_iot_policy_pubsub.name), + target=cert.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) The name of the policy to attach. +* `target` - (Required) The identity to which the policy is attached. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_provisioning_template.html.markdown b/website/docs/cdktf/python/r/iot_provisioning_template.html.markdown new file mode 100644 index 00000000000..5ef7a3232ad --- /dev/null +++ b/website/docs/cdktf/python/r/iot_provisioning_template.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_provisioning_template" +description: |- + Manages an IoT fleet provisioning template. +--- + + + +# Resource: aws_iot_provisioning_template + +Manages an IoT fleet provisioning template. For more info, see the AWS documentation on [fleet provisioning](https://docs.aws.amazon.com/iot/latest/developerguide/provision-wo-cert.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +from imports.aws.iot_policy import IotPolicy +from imports.aws.iot_provisioning_template import IotProvisioningTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + device_policy = DataAwsIamPolicyDocument(self, "device_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["iot:Subscribe"], + resources=["*"] + ) + ] + ) + iot_assume_role_policy = DataAwsIamPolicyDocument(self, "iot_assume_role_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["iot.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + iot_fleet_provisioning = IamRole(self, "iot_fleet_provisioning", + assume_role_policy=Token.as_string(iot_assume_role_policy.json), + name="IoTProvisioningServiceRole", + path="/service-role/" + ) + IamRolePolicyAttachment(self, "iot_fleet_provisioning_registration", + policy_arn="arn:aws:iam::aws:policy/service-role/AWSIoTThingsRegistration", + role=iot_fleet_provisioning.name + ) + aws_iot_policy_device_policy = IotPolicy(self, "device_policy_4", + name="DevicePolicy", + policy=Token.as_string(device_policy.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iot_policy_device_policy.override_logical_id("device_policy") + IotProvisioningTemplate(self, "fleet", + description="My provisioning template", + name="FleetTemplate", + provisioning_role_arn=iot_fleet_provisioning.arn, + template_body=Token.as_string( + Fn.jsonencode({ + "Parameters": { + "SerialNumber": { + "Type": "String" + } + }, + "Resources": { + "certificate": { + "Properties": { + "CertificateId": { + "Ref": "AWS::IoT::Certificate::Id" + }, + "Status": "Active" + }, + "Type": "AWS::IoT::Certificate" + }, + "policy": { + "Properties": { + "PolicyName": aws_iot_policy_device_policy.name + }, + "Type": "AWS::IoT::Policy" + } + } + })) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the fleet provisioning template. +* `description` - (Optional) The description of the fleet provisioning template. +* `enabled` - (Optional) True to enable the fleet provisioning template, otherwise false. +* `pre_provisioning_hook` - (Optional) Creates a pre-provisioning hook template. Details below. +* `provisioning_role_arn` - (Required) The role ARN for the role associated with the fleet provisioning template. This IoT role grants permission to provision a device. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `template_body` - (Required) The JSON formatted contents of the fleet provisioning template. + +### pre_provisioning_hook + +The `pre_provisioning_hook` configuration block supports the following: + +* `payload_version` - (Optional) The version of the payload that was sent to the target function. The only valid (and the default) payload version is `"2020-04-01"`. +* `target_arn` - (Optional) The ARN of the target function. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN that identifies the provisioning template. +* `default_version_id` - The default version of the fleet provisioning template. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT fleet provisioning templates using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IoT fleet provisioning templates using the `name`. For example: + +```console +% terraform import aws_iot_provisioning_template.fleet FleetProvisioningTemplate +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_role_alias.html.markdown b/website/docs/cdktf/python/r/iot_role_alias.html.markdown new file mode 100644 index 00000000000..2aa894563d4 --- /dev/null +++ b/website/docs/cdktf/python/r/iot_role_alias.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_role_alias" +description: |- + Provides an IoT role alias. +--- + + + +# Resource: aws_iot_role_alias + +Provides an IoT role alias. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iot_role_alias import IotRoleAlias +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + actions=["sts:AssumeRole"], + effect="Allow", + principals=[{ + "identifiers": ["credentials.iot.amazonaws.com"], + "type": "Service" + } + ] + ) + role = IamRole(self, "role", + assume_role_policy=Token.as_string(assume_role.json), + name="dynamodb-access-role" + ) + IotRoleAlias(self, "alias", + alias="Thermostat-dynamodb-access-role-alias", + role_arn=role.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `alias` - (Required) The name of the role alias. +* `role_arn` - (Required) The identity of the role to which the alias refers. +* `credential_duration` - (Optional) The duration of the credential, in seconds. If you do not specify a value for this setting, the default maximum of one hour is applied. This setting can have a value from 900 seconds (15 minutes) to 43200 seconds (12 hours). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS to this role alias. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IOT Role Alias using the alias. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IOT Role Alias using the alias. For example: + +```console +% terraform import aws_iot_role_alias.example myalias +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_thing.html.markdown b/website/docs/cdktf/python/r/iot_thing.html.markdown new file mode 100644 index 00000000000..a585ba9885d --- /dev/null +++ b/website/docs/cdktf/python/r/iot_thing.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_thing" +description: |- + Creates and manages an AWS IoT Thing. +--- + + + +# Resource: aws_iot_thing + +Creates and manages an AWS IoT Thing. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_thing import IotThing +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IotThing(self, "example", + attributes={ + "First": "examplevalue" + }, + name="example" + ) +``` + +## Argument Reference + +* `name` - (Required) The name of the thing. +* `attributes` - (Optional) Map of attributes of the thing. +* `thing_type_name` - (Optional) The thing type name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `default_client_id` - The default client ID. +* `version` - The current version of the thing record in the registry. +* `arn` - The ARN of the thing. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IOT Things using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IOT Things using the name. For example: + +```console +% terraform import aws_iot_thing.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_thing_group.html.markdown b/website/docs/cdktf/python/r/iot_thing_group.html.markdown new file mode 100644 index 00000000000..0bfe10e18eb --- /dev/null +++ b/website/docs/cdktf/python/r/iot_thing_group.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_thing_group" +description: |- + Manages an AWS IoT Thing Group. +--- + + + +# Resource: aws_iot_thing_group + +Manages an AWS IoT Thing Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_thing_group import IotThingGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + parent = IotThingGroup(self, "parent", + name="parent" + ) + IotThingGroup(self, "example", + name="example", + parent_group_name=parent.name, + properties=IotThingGroupProperties( + attribute_payload=IotThingGroupPropertiesAttributePayload( + attributes={ + "One": "11111", + "Two": "TwoTwo" + } + ), + description="This is my thing group" + ), + tags={ + "terraform": "true" + } + ) +``` + +## Argument Reference + +* `name` - (Required) The name of the Thing Group. +* `parent_group_name` - (Optional) The name of the parent Thing Group. +* `properties` - (Optional) The Thing Group properties. Defined below. +* `tags` - (Optional) Key-value mapping of resource tags + +### properties Reference + +* `attribute_payload` - (Optional) The Thing Group attributes. Defined below. +* `description` - (Optional) A description of the Thing Group. + +### attribute_payload Reference + +* `attributes` - (Optional) Key-value map. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the Thing Group. +* `id` - The Thing Group ID. +* `version` - The current version of the Thing Group record in the registry. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT Things Groups using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IoT Things Groups using the name. For example: + +```console +% terraform import aws_iot_thing_group.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_thing_group_membership.html.markdown b/website/docs/cdktf/python/r/iot_thing_group_membership.html.markdown new file mode 100644 index 00000000000..594d8e93a7c --- /dev/null +++ b/website/docs/cdktf/python/r/iot_thing_group_membership.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_thing_group_membership" +description: |- + Adds an IoT Thing to an IoT Thing Group. +--- + + + +# Resource: aws_iot_thing_group_membership + +Adds an IoT Thing to an IoT Thing Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_thing_group_membership import IotThingGroupMembership +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IotThingGroupMembership(self, "example", + override_dynamic_group=True, + thing_group_name="example-group", + thing_name="example-thing" + ) +``` + +## Argument Reference + +* `thing_name` - (Required) The name of the thing to add to a group. +* `thing_group_name` - (Required) The name of the group to which you are adding a thing. +* `override_dynamic_group` - (Optional) Override dynamic thing groups with static thing groups when 10-group limit is reached. If a thing belongs to 10 thing groups, and one or more of those groups are dynamic thing groups, adding a thing to a static group removes the thing from the last dynamic group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The membership ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT Thing Group Membership using the thing group name and thing name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IoT Thing Group Membership using the thing group name and thing name. For example: + +```console +% terraform import aws_iot_thing_group_membership.example thing_group_name/thing_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_thing_principal_attachment.html.markdown b/website/docs/cdktf/python/r/iot_thing_principal_attachment.html.markdown new file mode 100644 index 00000000000..80b9010c2c2 --- /dev/null +++ b/website/docs/cdktf/python/r/iot_thing_principal_attachment.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_thing_principal_attachment" +description: |- + Provides AWS IoT Thing Principal attachment. +--- + + + +# Resource: aws_iot_thing_principal_attachment + +Attaches Principal to AWS IoT Thing. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_certificate import IotCertificate +from imports.aws.iot_thing import IotThing +from imports.aws.iot_thing_principal_attachment import IotThingPrincipalAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + cert = IotCertificate(self, "cert", + active=True, + csr=Token.as_string(Fn.file("csr.pem")) + ) + example = IotThing(self, "example", + name="example" + ) + IotThingPrincipalAttachment(self, "att", + principal=cert.arn, + thing=example.name + ) +``` + +## Argument Reference + +* `principal` - (Required) The AWS IoT Certificate ARN or Amazon Cognito Identity ID. +* `thing` - (Required) The name of the thing. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_thing_type.html.markdown b/website/docs/cdktf/python/r/iot_thing_type.html.markdown new file mode 100644 index 00000000000..fa665d80c03 --- /dev/null +++ b/website/docs/cdktf/python/r/iot_thing_type.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_thing_type" +description: |- + Creates and manages an AWS IoT Thing Type. +--- + + + +# Resource: aws_iot_thing_type + +Creates and manages an AWS IoT Thing Type. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_thing_type import IotThingType +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IotThingType(self, "foo", + name="my_iot_thing" + ) +``` + +## Argument Reference + +* `name` - (Required, Forces New Resource) The name of the thing type. +* `deprecated` - (Optional, Defaults to false) Whether the thing type is deprecated. If true, no new things could be associated with this type. +* `properties` - (Optional), Configuration block that can contain the following properties of the thing type: + * `description` - (Optional, Forces New Resource) The description of the thing type. + * `searchable_attributes` - (Optional, Forces New Resource) A list of searchable thing attribute names. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the created AWS IoT Thing Type. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IOT Thing Types using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IOT Thing Types using the name. For example: + +```console +% terraform import aws_iot_thing_type.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_topic_rule.html.markdown b/website/docs/cdktf/python/r/iot_topic_rule.html.markdown new file mode 100644 index 00000000000..e89602f384d --- /dev/null +++ b/website/docs/cdktf/python/r/iot_topic_rule.html.markdown @@ -0,0 +1,267 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_topic_rule" +description: |- + Creates and manages an AWS IoT topic rule +--- + + + +# Resource: aws_iot_topic_rule + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.iot_topic_rule import IotTopicRule +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + myerrortopic = SnsTopic(self, "myerrortopic", + name="myerrortopic" + ) + mytopic = SnsTopic(self, "mytopic", + name="mytopic" + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["iot.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + iam_policy_for_lambda = DataAwsIamPolicyDocument(self, "iam_policy_for_lambda", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sns:Publish"], + effect="Allow", + resources=[mytopic.arn] + ) + ] + ) + role = IamRole(self, "role", + assume_role_policy=Token.as_string(assume_role.json), + name="myrole" + ) + aws_iam_role_policy_iam_policy_for_lambda = IamRolePolicy(self, "iam_policy_for_lambda_5", + name="mypolicy", + policy=Token.as_string(iam_policy_for_lambda.json), + role=role.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_iam_policy_for_lambda.override_logical_id("iam_policy_for_lambda") + IotTopicRule(self, "rule", + description="Example rule", + enabled=True, + error_action=IotTopicRuleErrorAction( + sns=IotTopicRuleErrorActionSns( + message_format="RAW", + role_arn=role.arn, + target_arn=myerrortopic.arn + ) + ), + name="MyRule", + sns=[IotTopicRuleSns( + message_format="RAW", + role_arn=role.arn, + target_arn=mytopic.arn + ) + ], + sql="SELECT * FROM 'topic/test'", + sql_version="2016-03-23" + ) +``` + +## Argument Reference + +* `name` - (Required) The name of the rule. +* `description` - (Optional) The description of the rule. +* `enabled` - (Required) Specifies whether the rule is enabled. +* `sql` - (Required) The SQL statement used to query the topic. For more information, see AWS IoT SQL Reference (http://docs.aws.amazon.com/iot/latest/developerguide/iot-rules.html#aws-iot-sql-reference) in the AWS IoT Developer Guide. +* `sql_version` - (Required) The version of the SQL rules engine to use when evaluating the rule. +* `error_action` - (Optional) Configuration block with error action to be associated with the rule. See the documentation for `cloudwatch_alarm`, `cloudwatch_logs`, `cloudwatch_metric`, `dynamodb`, `dynamodbv2`, `elasticsearch`, `firehose`, `http`, `iot_analytics`, `iot_events`, `kafka`, `kinesis`, `lambda`, `republish`, `s3`, `sns`, `sqs`, `step_functions`, `timestream` configuration blocks for further configuration details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `cloudwatch_alarm` object takes the following arguments: + +* `alarm_name` - (Required) The CloudWatch alarm name. +* `role_arn` - (Required) The IAM role ARN that allows access to the CloudWatch alarm. +* `state_reason` - (Required) The reason for the alarm change. +* `state_value` - (Required) The value of the alarm state. Acceptable values are: OK, ALARM, INSUFFICIENT_DATA. + +The `cloudwatch_logs` object takes the following arguments: + +* `log_group_name` - (Required) The CloudWatch log group name. +* `role_arn` - (Required) The IAM role ARN that allows access to the CloudWatch alarm. + +The `cloudwatch_metric` object takes the following arguments: + +* `metric_name` - (Required) The CloudWatch metric name. +* `metric_namespace` - (Required) The CloudWatch metric namespace name. +* `metric_timestamp` - (Optional) An optional Unix timestamp (http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#about_timestamp). +* `metric_unit` - (Required) The metric unit (supported units can be found here: http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#Unit) +* `metric_value` - (Required) The CloudWatch metric value. +* `role_arn` - (Required) The IAM role ARN that allows access to the CloudWatch metric. + +The `dynamodb` object takes the following arguments: + +* `hash_key_field` - (Required) The hash key name. +* `hash_key_type` - (Optional) The hash key type. Valid values are "STRING" or "NUMBER". +* `hash_key_value` - (Required) The hash key value. +* `payload_field` - (Optional) The action payload. +* `range_key_field` - (Optional) The range key name. +* `range_key_type` - (Optional) The range key type. Valid values are "STRING" or "NUMBER". +* `range_key_value` - (Optional) The range key value. +* `operation` - (Optional) The operation. Valid values are "INSERT", "UPDATE", or "DELETE". +* `role_arn` - (Required) The ARN of the IAM role that grants access to the DynamoDB table. +* `table_name` - (Required) The name of the DynamoDB table. + +The `dynamodbv2` object takes the following arguments: + +* `put_item` - (Required) Configuration block with DynamoDB Table to which the message will be written. Nested arguments below. + * `table_name` - (Required) The name of the DynamoDB table. +* `role_arn` - (Required) The ARN of the IAM role that grants access to the DynamoDB table. + +The `elasticsearch` object takes the following arguments: + +* `endpoint` - (Required) The endpoint of your Elasticsearch domain. +* `id` - (Required) The unique identifier for the document you are storing. +* `index` - (Required) The Elasticsearch index where you want to store your data. +* `role_arn` - (Required) The IAM role ARN that has access to Elasticsearch. +* `type` - (Required) The type of document you are storing. + +The `firehose` object takes the following arguments: + +* `delivery_stream_name` - (Required) The delivery stream name. +* `role_arn` - (Required) The IAM role ARN that grants access to the Amazon Kinesis Firehose stream. +* `separator` - (Optional) A character separator that is used to separate records written to the Firehose stream. Valid values are: '\n' (newline), '\t' (tab), '\r\n' (Windows newline), ',' (comma). +* `batch_mode` - (Optional) The payload that contains a JSON array of records will be sent to Kinesis Firehose via a batch call. + +The `http` object takes the following arguments: + +* `url` - (Required) The HTTPS URL. +* `confirmation_url` - (Optional) The HTTPS URL used to verify ownership of `url`. +* `http_header` - (Optional) Custom HTTP header IoT Core should send. It is possible to define more than one custom header. + +The `http_header` object takes the following arguments: + +* `key` - (Required) The name of the HTTP header. +* `value` - (Required) The value of the HTTP header. + +The `iot_analytics` object takes the following arguments: + +* `channel_name` - (Required) Name of AWS IOT Analytics channel. +* `role_arn` - (Required) The ARN of the IAM role that grants access. +* `batch_mode` - (Optional) The payload that contains a JSON array of records will be sent to IoT Analytics via a batch call. + +The `iot_events` object takes the following arguments: + +* `input_name` - (Required) The name of the AWS IoT Events input. +* `role_arn` - (Required) The ARN of the IAM role that grants access. +* `message_id` - (Optional) Use this to ensure that only one input (message) with a given messageId is processed by an AWS IoT Events detector. +* `batch_mode` - (Optional) The payload that contains a JSON array of records will be sent to IoT Events via a batch call. + +The `kafka` object takes the following arguments: + +* `client_properties` - (Required) Properties of the Apache Kafka producer client. For more info, see the [AWS documentation](https://docs.aws.amazon.com/iot/latest/developerguide/apache-kafka-rule-action.html). +* `destination_arn` - (Required) The ARN of Kafka action's VPC [`aws_iot_topic_rule_destination`](iot_topic_rule_destination.html) . +* `key` - (Optional) The Kafka message key. +* `partition` - (Optional) The Kafka message partition. +* `topic` - (Optional) The Kafka topic for messages to be sent to the Kafka broker. + +The `kinesis` object takes the following arguments: + +* `partition_key` - (Optional) The partition key. +* `role_arn` - (Required) The ARN of the IAM role that grants access to the Amazon Kinesis stream. +* `stream_name` - (Required) The name of the Amazon Kinesis stream. + +The `lambda` object takes the following arguments: + +* `function_arn` - (Required) The ARN of the Lambda function. + +The `republish` object takes the following arguments: + +* `role_arn` - (Required) The ARN of the IAM role that grants access. +* `topic` - (Required) The name of the MQTT topic the message should be republished to. +* `qos` - (Optional) The Quality of Service (QoS) level to use when republishing messages. Valid values are 0 or 1. The default value is 0. + +The `s3` object takes the following arguments: + +* `bucket_name` - (Required) The Amazon S3 bucket name. +* `canned_acl` - (Optional) The Amazon S3 canned ACL that controls access to the object identified by the object key. [Valid values](https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl). +* `key` - (Required) The object key. +* `role_arn` - (Required) The ARN of the IAM role that grants access. + +The `sns` object takes the following arguments: + +* `message_format` - (Required) The message format of the message to publish. Accepted values are "JSON" and "RAW". +* `role_arn` - (Required) The ARN of the IAM role that grants access. +* `target_arn` - (Required) The ARN of the SNS topic. + +The `sqs` object takes the following arguments: + +* `queue_url` - (Required) The URL of the Amazon SQS queue. +* `role_arn` - (Required) The ARN of the IAM role that grants access. +* `use_base64` - (Required) Specifies whether to use Base64 encoding. + +The `step_functions` object takes the following arguments: + +* `execution_name_prefix` - (Optional) The prefix used to generate, along with a UUID, the unique state machine execution name. +* `state_machine_name` - (Required) The name of the Step Functions state machine whose execution will be started. +* `role_arn` - (Required) The ARN of the IAM role that grants access to start execution of the state machine. + +The `timestream` object takes the following arguments: + +* `database_name` - (Required) The name of an Amazon Timestream database. +* `dimension` - (Required) Configuration blocks with metadata attributes of the time series that are written in each measure record. Nested arguments below. + * `name` - (Required) The metadata dimension name. This is the name of the column in the Amazon Timestream database table record. + * `value` - (Required) The value to write in this column of the database record. +* `role_arn` - (Required) The ARN of the role that grants permission to write to the Amazon Timestream database table. +* `table_name` - (Required) The name of the database table into which to write the measure records. +* `timestamp` - (Optional) Configuration block specifying an application-defined value to replace the default value assigned to the Timestream record's timestamp in the time column. Nested arguments below. + * `unit` - (Required) The precision of the timestamp value that results from the expression described in value. Valid values: `SECONDS`, `MILLISECONDS`, `MICROSECONDS`, `NANOSECONDS`. + * `value` - (Required) An expression that returns a long epoch time value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the topic rule +* `arn` - The ARN of the topic rule +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT Topic Rules using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IoT Topic Rules using the `name`. For example: + +```console +% terraform import aws_iot_topic_rule.rule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/iot_topic_rule_destination.html.markdown b/website/docs/cdktf/python/r/iot_topic_rule_destination.html.markdown new file mode 100644 index 00000000000..9be89f01231 --- /dev/null +++ b/website/docs/cdktf/python/r/iot_topic_rule_destination.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_topic_rule_destination" +description: |- + Creates and manages an AWS IoT topic rule destination +--- + + + +# Resource: aws_iot_topic_rule_destination + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iot_topic_rule_destination import IotTopicRuleDestination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IotTopicRuleDestination(self, "example", + vpc_configuration=IotTopicRuleDestinationVpcConfiguration( + role_arn=Token.as_string(aws_iam_role_example.arn), + security_groups=[Token.as_string(aws_security_group_example.id)], + subnet_ids=Token.as_list(property_access(aws_subnet_example, ["*", "id"])), + vpc_id=Token.as_string(aws_vpc_example.id) + ) + ) +``` + +## Argument Reference + +* `enabled` - (Optional) Whether or not to enable the destination. Default: `true`. +* `vpc_configuration` - (Required) Configuration of the virtual private cloud (VPC) connection. For more info, see the [AWS documentation](https://docs.aws.amazon.com/iot/latest/developerguide/vpc-rule-action.html). + +The `vpc_configuration` object takes the following arguments: + +* `role_arn` - (Required) The ARN of a role that has permission to create and attach to elastic network interfaces (ENIs). +* `security_groups` - (Optional) The security groups of the VPC destination. +* `subnet_ids` - (Required) The subnet IDs of the VPC destination. +* `vpc_id` - (Required) The ID of the VPC. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the topic rule destination + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT topic rule destinations using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IoT topic rule destinations using the `arn`. For example: + +```console +% terraform import aws_iot_topic_rule_destination.example arn:aws:iot:us-west-2:123456789012:ruledestination/vpc/2ce781c8-68a6-4c52-9c62-63fe489ecc60 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ivs_channel.html.markdown b/website/docs/cdktf/python/r/ivs_channel.html.markdown new file mode 100644 index 00000000000..23535c50916 --- /dev/null +++ b/website/docs/cdktf/python/r/ivs_channel.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "IVS (Interactive Video)" +layout: "aws" +page_title: "AWS: aws_ivs_channel" +description: |- + Terraform resource for managing an AWS IVS (Interactive Video) Channel. +--- + + + +# Resource: aws_ivs_channel + +Terraform resource for managing an AWS IVS (Interactive Video) Channel. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ivs_channel import IvsChannel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IvsChannel(self, "example", + name="channel-1" + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `authorized` - (Optional) If `true`, channel is private (enabled for playback authorization). +* `latency_mode` - (Optional) Channel latency mode. Valid values: `NORMAL`, `LOW`. +* `name` - (Optional) Channel name. +* `recording_configuration_arn` - (Optional) Recording configuration ARN. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) Channel type, which determines the allowable resolution and bitrate. Valid values: `STANDARD`, `BASIC`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Channel. +* `ingest_endpoint` - Channel ingest endpoint, part of the definition of an ingest server, used when setting up streaming software. +* `playback_url` - Channel playback URL. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IVS (Interactive Video) Channel using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IVS (Interactive Video) Channel using the ARN. For example: + +```console +% terraform import aws_ivs_channel.example arn:aws:ivs:us-west-2:326937407773:channel/0Y1lcs4U7jk5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ivs_playback_key_pair.html.markdown b/website/docs/cdktf/python/r/ivs_playback_key_pair.html.markdown new file mode 100644 index 00000000000..5e2ec435630 --- /dev/null +++ b/website/docs/cdktf/python/r/ivs_playback_key_pair.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "IVS (Interactive Video)" +layout: "aws" +page_title: "AWS: aws_ivs_playback_key_pair" +description: |- + Terraform resource for managing an AWS IVS (Interactive Video) Playback Key Pair. +--- + + + +# Resource: aws_ivs_playback_key_pair + +Terraform resource for managing an AWS IVS (Interactive Video) Playback Key Pair. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ivs_playback_key_pair import IvsPlaybackKeyPair +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IvsPlaybackKeyPair(self, "example", + public_key=Token.as_string(Fn.file("./public-key.pem")) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `public_key` - (Required) Public portion of a customer-generated key pair. Must be an ECDSA public key in PEM format. + +The following arguments are optional: + +* `name` - (Optional) Playback Key Pair name. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Playback Key Pair. +* `fingerprint` - Key-pair identifier. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IVS (Interactive Video) Playback Key Pair using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IVS (Interactive Video) Playback Key Pair using the ARN. For example: + +```console +% terraform import aws_ivs_playback_key_pair.example arn:aws:ivs:us-west-2:326937407773:playback-key/KDJRJNQhiQzA +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ivs_recording_configuration.html.markdown b/website/docs/cdktf/python/r/ivs_recording_configuration.html.markdown new file mode 100644 index 00000000000..04d39f20891 --- /dev/null +++ b/website/docs/cdktf/python/r/ivs_recording_configuration.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "IVS (Interactive Video)" +layout: "aws" +page_title: "AWS: aws_ivs_recording_configuration" +description: |- + Terraform resource for managing an AWS IVS (Interactive Video) Recording Configuration. +--- + + + +# Resource: aws_ivs_recording_configuration + +Terraform resource for managing an AWS IVS (Interactive Video) Recording Configuration. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ivs_recording_configuration import IvsRecordingConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IvsRecordingConfiguration(self, "example", + destination_configuration=IvsRecordingConfigurationDestinationConfiguration( + s3=IvsRecordingConfigurationDestinationConfigurationS3( + bucket_name="ivs-stream-archive" + ) + ), + name="recording_configuration-1" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `destination_configuration` - Object containing destination configuration for where recorded video will be stored. + * `s3` - S3 destination configuration where recorded videos will be stored. + * `bucket_name` - S3 bucket name where recorded videos will be stored. + +The following arguments are optional: + +* `name` - (Optional) Recording Configuration name. +* `recording_reconnect_window_seconds` - (Optional) If a broadcast disconnects and then reconnects within the specified interval, the multiple streams will be considered a single broadcast and merged together. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `thumbnail_configuration` - (Optional) Object containing information to enable/disable the recording of thumbnails for a live session and modify the interval at which thumbnails are generated for the live session. + * `recording_mode` - (Optional) Thumbnail recording mode. Valid values: `DISABLED`, `INTERVAL`. + * `target_interval_seconds` (Configurable [and required] only if `recording_mode` is `INTERVAL`) - The targeted thumbnail-generation interval in seconds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Recording Configuration. +* `state` - The current state of the Recording Configuration. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `10m`) +* `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IVS (Interactive Video) Recording Configuration using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IVS (Interactive Video) Recording Configuration using the ARN. For example: + +```console +% terraform import aws_ivs_recording_configuration.example arn:aws:ivs:us-west-2:326937407773:recording-configuration/KAk1sHBl2L47 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ivschat_logging_configuration.html.markdown b/website/docs/cdktf/python/r/ivschat_logging_configuration.html.markdown new file mode 100644 index 00000000000..6caf13f5f34 --- /dev/null +++ b/website/docs/cdktf/python/r/ivschat_logging_configuration.html.markdown @@ -0,0 +1,201 @@ +--- +subcategory: "IVS (Interactive Video) Chat" +layout: "aws" +page_title: "AWS: aws_ivschat_logging_configuration" +description: |- + Terraform resource for managing an AWS IVS (Interactive Video) Chat Logging Configuration. +--- + + + +# Resource: aws_ivschat_logging_configuration + +Terraform resource for managing an AWS IVS (Interactive Video) Chat Logging Configuration. + +## Example Usage + +### Basic Usage - Logging to CloudWatch + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.ivschat_logging_configuration import IvschatLoggingConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example") + aws_ivschat_logging_configuration_example = + IvschatLoggingConfiguration(self, "example_1", + destination_configuration=IvschatLoggingConfigurationDestinationConfiguration( + cloudwatch_logs=IvschatLoggingConfigurationDestinationConfigurationCloudwatchLogs( + log_group_name=example.name + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ivschat_logging_configuration_example.override_logical_id("example") +``` + +### Basic Usage - Logging to Kinesis Firehose with Extended S3 + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.ivschat_logging_configuration import IvschatLoggingConfiguration +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket_prefix="tf-ivschat-logging-bucket" + ) + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_1", + acl="private", + bucket=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["firehose.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + aws_iam_role_example = IamRole(self, "example_3", + assume_role_policy=Token.as_string(assume_role.json), + name="firehose_example_role" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_kinesis_firehose_delivery_stream_example = + KinesisFirehoseDeliveryStream(self, "example_4", + destination="extended_s3", + extended_s3_configuration=KinesisFirehoseDeliveryStreamExtendedS3Configuration( + bucket_arn=example.arn, + role_arn=Token.as_string(aws_iam_role_example.arn) + ), + name="terraform-kinesis-firehose-extended-s3-example-stream", + tags={ + "LogDeliveryEnabled": "true" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kinesis_firehose_delivery_stream_example.override_logical_id("example") + aws_ivschat_logging_configuration_example = + IvschatLoggingConfiguration(self, "example_5", + destination_configuration=IvschatLoggingConfigurationDestinationConfiguration( + firehose=IvschatLoggingConfigurationDestinationConfigurationFirehose( + delivery_stream_name=Token.as_string(aws_kinesis_firehose_delivery_stream_example.name) + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ivschat_logging_configuration_example.override_logical_id("example") +``` + +### Basic Usage - Logging to S3 + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ivschat_logging_configuration import IvschatLoggingConfiguration +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket_name="tf-ivschat-logging", + force_destroy=True + ) + aws_ivschat_logging_configuration_example = + IvschatLoggingConfiguration(self, "example_1", + destination_configuration=IvschatLoggingConfigurationDestinationConfiguration( + s3=IvschatLoggingConfigurationDestinationConfigurationS3( + bucket_name=example.id + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ivschat_logging_configuration_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `destination_configuration` - (Required) Object containing destination configuration for where chat activity will be logged. This object must contain exactly one of the following children arguments: + * `cloudwatch_logs` - An Amazon CloudWatch Logs destination configuration where chat activity will be logged. + * `log_group_name` - Name of the Amazon Cloudwatch Logs destination where chat activity will be logged. + * `firehose` - An Amazon Kinesis Data Firehose destination configuration where chat activity will be logged. + * `delivery_stream_name` - Name of the Amazon Kinesis Firehose delivery stream where chat activity will be logged. + * `s3` - An Amazon S3 destination configuration where chat activity will be logged. + * `bucket_name` - Name of the Amazon S3 bucket where chat activity will be logged. + +The following arguments are optional: + +* `name` - (Optional) Logging Configuration name. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Logging Configuration. +* `id` - ID of the Logging Configuration. +* `state` - State of the Logging Configuration. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IVS (Interactive Video) Chat Logging Configuration using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IVS (Interactive Video) Chat Logging Configuration using the ARN. For example: + +```console +% terraform import aws_ivschat_logging_configuration.example arn:aws:ivschat:us-west-2:326937407773:logging-configuration/MMUQc8wcqZmC +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ivschat_room.html.markdown b/website/docs/cdktf/python/r/ivschat_room.html.markdown new file mode 100644 index 00000000000..8ba382bc97e --- /dev/null +++ b/website/docs/cdktf/python/r/ivschat_room.html.markdown @@ -0,0 +1,136 @@ +--- +subcategory: "IVS (Interactive Video) Chat" +layout: "aws" +page_title: "AWS: aws_ivschat_room" +description: |- + Terraform resource for managing an AWS IVS (Interactive Video) Chat Room. +--- + + + +# Resource: aws_ivschat_room + +Terraform resource for managing an AWS IVS (Interactive Video) Chat Room. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ivschat_room import IvschatRoom +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + IvschatRoom(self, "example", + name="tf-room" + ) +``` + +## Usage with Logging Configuration to S3 Bucket + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ivschat_logging_configuration import IvschatLoggingConfiguration +from imports.aws.ivschat_room import IvschatRoom +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket_prefix="tf-ivschat-logging-bucket-", + force_destroy=True + ) + aws_ivschat_logging_configuration_example = + IvschatLoggingConfiguration(self, "example_1", + destination_configuration=IvschatLoggingConfigurationDestinationConfiguration( + s3=IvschatLoggingConfigurationDestinationConfigurationS3( + bucket_name=example.id + ) + ), + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + name="tf-ivschat-loggingconfiguration" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ivschat_logging_configuration_example.override_logical_id("example") + aws_ivschat_room_example = IvschatRoom(self, "example_2", + logging_configuration_identifiers=[ + Token.as_string(aws_ivschat_logging_configuration_example.arn) + ], + name="tf-ivschat-room" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ivschat_room_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are optional: + +* `logging_configuration_identifiers` - (Optional) List of Logging Configuration + ARNs to attach to the room. +* `maximum_message_length` - (Optional) Maximum number of characters in a single + message. Messages are expected to be UTF-8 encoded and this limit applies + specifically to rune/code-point count, not number of bytes. +* `maximum_message_rate_per_second` - (Optional) Maximum number of messages per + second that can be sent to the room (by all clients). +* `message_review_handler` - (Optional) Configuration information for optional + review of messages. + * `fallback_result` - (Optional) The fallback behavior (whether the message + is allowed or denied) if the handler does not return a valid response, + encounters an error, or times out. Valid values: `ALLOW`, `DENY`. + * `uri` - (Optional) ARN of the lambda message review handler function. +* `name` - (Optional) Room name. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Room. +* `id` - Room ID +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IVS (Interactive Video) Chat Room using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import IVS (Interactive Video) Chat Room using the ARN. For example: + +```console +% terraform import aws_ivschat_room.example arn:aws:ivschat:us-west-2:326937407773:room/GoXEXyB4VwHb +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kendra_data_source.html.markdown b/website/docs/cdktf/python/r/kendra_data_source.html.markdown new file mode 100644 index 00000000000..d99f70124f9 --- /dev/null +++ b/website/docs/cdktf/python/r/kendra_data_source.html.markdown @@ -0,0 +1,652 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_data_source" +description: |- + Terraform resource for managing an AWS Kendra Data Source. +--- + + + +# Resource: aws_kendra_data_source + +Terraform resource for managing an AWS Kendra Data Source. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + description="example", + index_id=Token.as_string(aws_kendra_index_example.id), + language_code="en", + name="example", + tags={ + "hello": "world" + }, + type="CUSTOM" + ) +``` + +### S3 Connector + +#### With Schedule + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + s3_configuration=KendraDataSourceConfigurationS3Configuration( + bucket_name=Token.as_string(aws_s3_bucket_example.id) + ) + ), + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + schedule="cron(9 10 1 * ? *)", + type="S3" + ) +``` + +#### With Access Control List + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + s3_configuration=KendraDataSourceConfigurationS3Configuration( + access_control_list_configuration=KendraDataSourceConfigurationS3ConfigurationAccessControlListConfiguration( + key_path="s3://${" + aws_s3_bucket_example.id + "}/path-1" + ), + bucket_name=Token.as_string(aws_s3_bucket_example.id) + ) + ), + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + type="S3" + ) +``` + +#### With Documents Metadata Configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + s3_configuration=KendraDataSourceConfigurationS3Configuration( + bucket_name=Token.as_string(aws_s3_bucket_example.id), + documents_metadata_configuration=KendraDataSourceConfigurationS3ConfigurationDocumentsMetadataConfiguration( + s3_prefix="example" + ), + exclusion_patterns=["example"], + inclusion_patterns=["hello"], + inclusion_prefixes=["world"] + ) + ), + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + type="S3" + ) +``` + +### Web Crawler Connector + +#### With Seed URLs + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + web_crawler_configuration=KendraDataSourceConfigurationWebCrawlerConfiguration( + urls=KendraDataSourceConfigurationWebCrawlerConfigurationUrls( + seed_url_configuration=KendraDataSourceConfigurationWebCrawlerConfigurationUrlsSeedUrlConfiguration( + seed_urls=["REPLACE_WITH_YOUR_URL"] + ) + ) + ) + ), + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + type="WEBCRAWLER" + ) +``` + +#### With Site Maps + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + web_crawler_configuration=KendraDataSourceConfigurationWebCrawlerConfiguration( + urls=KendraDataSourceConfigurationWebCrawlerConfigurationUrls( + site_maps_configuration=KendraDataSourceConfigurationWebCrawlerConfigurationUrlsSiteMapsConfiguration( + site_maps=["REPLACE_WITH_YOUR_URL"] + ) + ) + ) + ), + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + type="WEBCRAWLER" + ) +``` + +#### With Web Crawler Mode + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + web_crawler_configuration=KendraDataSourceConfigurationWebCrawlerConfiguration( + urls=KendraDataSourceConfigurationWebCrawlerConfigurationUrls( + seed_url_configuration=KendraDataSourceConfigurationWebCrawlerConfigurationUrlsSeedUrlConfiguration( + seed_urls=["REPLACE_WITH_YOUR_URL"], + web_crawler_mode="SUBDOMAINS" + ) + ) + ) + ), + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + type="WEBCRAWLER" + ) +``` + +#### With Authentication Configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + web_crawler_configuration=KendraDataSourceConfigurationWebCrawlerConfiguration( + authentication_configuration=KendraDataSourceConfigurationWebCrawlerConfigurationAuthenticationConfiguration( + basic_authentication=[KendraDataSourceConfigurationWebCrawlerConfigurationAuthenticationConfigurationBasicAuthentication( + credentials=Token.as_string(aws_secretsmanager_secret_example.arn), + host="a.example.com", + port=Token.as_number("443") + ) + ] + ), + urls=KendraDataSourceConfigurationWebCrawlerConfigurationUrls( + seed_url_configuration=KendraDataSourceConfigurationWebCrawlerConfigurationUrlsSeedUrlConfiguration( + seed_urls=["REPLACE_WITH_YOUR_URL"] + ) + ) + ) + ), + depends_on=[aws_secretsmanager_secret_version_example], + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + type="WEBCRAWLER" + ) +``` + +#### With Crawl Depth + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + web_crawler_configuration=KendraDataSourceConfigurationWebCrawlerConfiguration( + crawl_depth=3, + urls=KendraDataSourceConfigurationWebCrawlerConfigurationUrls( + seed_url_configuration=KendraDataSourceConfigurationWebCrawlerConfigurationUrlsSeedUrlConfiguration( + seed_urls=["REPLACE_WITH_YOUR_URL"] + ) + ) + ) + ), + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + type="WEBCRAWLER" + ) +``` + +#### With Max Links Per Page + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + web_crawler_configuration=KendraDataSourceConfigurationWebCrawlerConfiguration( + max_links_per_page=100, + urls=KendraDataSourceConfigurationWebCrawlerConfigurationUrls( + seed_url_configuration=KendraDataSourceConfigurationWebCrawlerConfigurationUrlsSeedUrlConfiguration( + seed_urls=["REPLACE_WITH_YOUR_URL"] + ) + ) + ) + ), + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + type="WEBCRAWLER" + ) +``` + +#### With Max Urls Per Minute Crawl Rate + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + web_crawler_configuration=KendraDataSourceConfigurationWebCrawlerConfiguration( + max_urls_per_minute_crawl_rate=300, + urls=KendraDataSourceConfigurationWebCrawlerConfigurationUrls( + seed_url_configuration=KendraDataSourceConfigurationWebCrawlerConfigurationUrlsSeedUrlConfiguration( + seed_urls=["REPLACE_WITH_YOUR_URL"] + ) + ) + ) + ), + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + type="WEBCRAWLER" + ) +``` + +#### With Proxy Configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + web_crawler_configuration=KendraDataSourceConfigurationWebCrawlerConfiguration( + proxy_configuration=KendraDataSourceConfigurationWebCrawlerConfigurationProxyConfiguration( + credentials=Token.as_string(aws_secretsmanager_secret_example.arn), + host="a.example.com", + port=Token.as_number("443") + ), + urls=KendraDataSourceConfigurationWebCrawlerConfigurationUrls( + seed_url_configuration=KendraDataSourceConfigurationWebCrawlerConfigurationUrlsSeedUrlConfiguration( + seed_urls=["REPLACE_WITH_YOUR_URL"] + ) + ) + ) + ), + depends_on=[aws_secretsmanager_secret_version_example], + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + type="WEBCRAWLER" + ) +``` + +#### With URL Exclusion and Inclusion Patterns + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_data_source import KendraDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraDataSource(self, "example", + configuration=KendraDataSourceConfiguration( + web_crawler_configuration=KendraDataSourceConfigurationWebCrawlerConfiguration( + url_exclusion_patterns=["example"], + url_inclusion_patterns=["hello"], + urls=KendraDataSourceConfigurationWebCrawlerConfigurationUrls( + seed_url_configuration=KendraDataSourceConfigurationWebCrawlerConfigurationUrlsSeedUrlConfiguration( + seed_urls=["REPLACE_WITH_YOUR_URL"] + ) + ) + ) + ), + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn), + type="WEBCRAWLER" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `index_id` - (Required, Forces new resource) The identifier of the index for your Amazon Kendra data source. +* `name` - (Required) A name for your data source connector. +* `role_arn` - (Required, Optional in one scenario) The Amazon Resource Name (ARN) of a role with permission to access the data source connector. For more information, see [IAM roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). You can't specify the `role_arn` parameter when the `type` parameter is set to `CUSTOM`. The `role_arn` parameter is required for all other data sources. +* `type` - (Required, Forces new resource) The type of data source repository. For an updated list of values, refer to [Valid Values for Type](https://docs.aws.amazon.com/kendra/latest/dg/API_CreateDataSource.html#Kendra-CreateDataSource-request-Type). + +The following arguments are optional: + +* `configuration` - (Optional) A block with the configuration information to connect to your Data Source repository. You can't specify the `configuration` block when the `type` parameter is set to `CUSTOM`. [Detailed below](#configuration-block). +* `custom_document_enrichment_configuration` - (Optional) A block with the configuration information for altering document metadata and content during the document ingestion process. For more information on how to create, modify and delete document metadata, or make other content alterations when you ingest documents into Amazon Kendra, see [Customizing document metadata during the ingestion process](https://docs.aws.amazon.com/kendra/latest/dg/custom-document-enrichment.html). [Detailed below](#custom_document_enrichment_configuration-block). +* `description` - (Optional) A description for the Data Source connector. +* `language_code` - (Optional) The code for a language. This allows you to support a language for all documents when creating the Data Source connector. English is supported by default. For more information on supported languages, including their codes, see [Adding documents in languages other than English](https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html). +* `schedule` - (Optional) Sets the frequency for Amazon Kendra to check the documents in your Data Source repository and update the index. If you don't set a schedule Amazon Kendra will not periodically update the index. You can call the `StartDataSourceSyncJob` API to update the index. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### configuration Block + +The `configuration` configuration block supports the following arguments: + +* `s3_configuration` - (Required if `type` is set to `S3`) A block that provides the configuration information to connect to an Amazon S3 bucket as your data source. [Detailed below](#s3_configuration-block). +* `web_crawler_configuration` - (Required if `type` is set to `WEBCRAWLER`) A block that provides the configuration information required for Amazon Kendra Web Crawler. [Detailed below](#web_crawler_configuration-block). + +### s3_configuration Block + +The `s3_configuration` configuration block supports the following arguments: + +* `access_control_list_configuration` - (Optional) A block that provides the path to the S3 bucket that contains the user context filtering files for the data source. For the format of the file, see [Access control for S3 data sources](https://docs.aws.amazon.com/kendra/latest/dg/s3-acl.html). [Detailed below](#access_control_list_configuration-block). +* `bucket_name` - (Required) The name of the bucket that contains the documents. +* `documents_metadata_configuration` - (Optional) A block that defines the Document metadata files that contain information such as the document access control information, source URI, document author, and custom attributes. Each metadata file contains metadata about a single document. [Detailed below](#documents_metadata_configuration-block). +* `exclusion_patterns` - (Optional) A list of glob patterns for documents that should not be indexed. If a document that matches an inclusion prefix or inclusion pattern also matches an exclusion pattern, the document is not indexed. Refer to [Exclusion Patterns for more examples](https://docs.aws.amazon.com/kendra/latest/dg/API_S3DataSourceConfiguration.html#Kendra-Type-S3DataSourceConfiguration-ExclusionPatterns). +* `inclusion_patterns` - (Optional) A list of glob patterns for documents that should be indexed. If a document that matches an inclusion pattern also matches an exclusion pattern, the document is not indexed. Refer to [Inclusion Patterns for more examples](https://docs.aws.amazon.com/kendra/latest/dg/API_S3DataSourceConfiguration.html#Kendra-Type-S3DataSourceConfiguration-InclusionPatterns). +* `inclusion_prefixes` - (Optional) A list of S3 prefixes for the documents that should be included in the index. + +### access_control_list_configuration Block + +The `access_control_list_configuration` configuration block supports the following arguments: + +* `key_path` - (Optional) Path to the AWS S3 bucket that contains the ACL files. + +### documents_metadata_configuration Block + +The `documents_metadata_configuration` configuration block supports the following arguments: + +* `s3_prefix` - (Optional) A prefix used to filter metadata configuration files in the AWS S3 bucket. The S3 bucket might contain multiple metadata files. Use `s3_prefix` to include only the desired metadata files. + +### web_crawler_configuration Block + +The `web_crawler_configuration` configuration block supports the following arguments: + +* `authentication_configuration` - (Optional) A block with the configuration information required to connect to websites using authentication. You can connect to websites using basic authentication of user name and password. You use a secret in AWS Secrets Manager to store your authentication credentials. You must provide the website host name and port number. For example, the host name of `https://a.example.com/page1.html` is `"a.example.com"` and the port is `443`, the standard port for HTTPS. [Detailed below](#authentication_configuration-block). +* `crawl_depth` - (Optional) Specifies the number of levels in a website that you want to crawl. The first level begins from the website seed or starting point URL. For example, if a website has 3 levels – index level (i.e. seed in this example), sections level, and subsections level – and you are only interested in crawling information up to the sections level (i.e. levels 0-1), you can set your depth to 1. The default crawl depth is set to `2`. Minimum value of `0`. Maximum value of `10`. +* `max_content_size_per_page_in_mega_bytes` - (Optional) The maximum size (in MB) of a webpage or attachment to crawl. Files larger than this size (in MB) are skipped/not crawled. The default maximum size of a webpage or attachment is set to `50` MB. Minimum value of `1.0e-06`. Maximum value of `50`. +* `max_links_per_page` - (Optional) The maximum number of URLs on a webpage to include when crawling a website. This number is per webpage. As a website’s webpages are crawled, any URLs the webpages link to are also crawled. URLs on a webpage are crawled in order of appearance. The default maximum links per page is `100`. Minimum value of `1`. Maximum value of `1000`. +* `max_urls_per_minute_crawl_rate` - (Optional) The maximum number of URLs crawled per website host per minute. The default maximum number of URLs crawled per website host per minute is `300`. Minimum value of `1`. Maximum value of `300`. +* `proxy_configuration` - (Optional) Configuration information required to connect to your internal websites via a web proxy. You must provide the website host name and port number. For example, the host name of `https://a.example.com/page1.html` is `"a.example.com"` and the port is `443`, the standard port for HTTPS. Web proxy credentials are optional and you can use them to connect to a web proxy server that requires basic authentication. To store web proxy credentials, you use a secret in [AWS Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/intro.html). [Detailed below](#proxy_configuration-block). +* `url_exclusion_patterns` - (Optional) A list of regular expression patterns to exclude certain URLs to crawl. URLs that match the patterns are excluded from the index. URLs that don't match the patterns are included in the index. If a URL matches both an inclusion and exclusion pattern, the exclusion pattern takes precedence and the URL file isn't included in the index. Array Members: Minimum number of `0` items. Maximum number of `100` items. Length Constraints: Minimum length of `1`. Maximum length of `150`. +* `url_inclusion_patterns` - (Optional) A list of regular expression patterns to include certain URLs to crawl. URLs that match the patterns are included in the index. URLs that don't match the patterns are excluded from the index. If a URL matches both an inclusion and exclusion pattern, the exclusion pattern takes precedence and the URL file isn't included in the index. Array Members: Minimum number of `0` items. Maximum number of `100` items. Length Constraints: Minimum length of `1`. Maximum length of `150`. +* `urls` - (Required) A block that specifies the seed or starting point URLs of the websites or the sitemap URLs of the websites you want to crawl. You can include website subdomains. You can list up to `100` seed URLs and up to `3` sitemap URLs. You can only crawl websites that use the secure communication protocol, Hypertext Transfer Protocol Secure (HTTPS). If you receive an error when crawling a website, it could be that the website is blocked from crawling. When selecting websites to index, you must adhere to the [Amazon Acceptable Use Policy](https://aws.amazon.com/aup/) and all other Amazon terms. Remember that you must only use Amazon Kendra Web Crawler to index your own webpages, or webpages that you have authorization to index. [Detailed below](#urls-block). + +### authentication_configuration Block + +The `authentication_configuration` configuration block supports the following arguments: + +* `basic_authentication` - (Optional) The list of configuration information that's required to connect to and crawl a website host using basic authentication credentials. The list includes the name and port number of the website host. [Detailed below](#basic_authentication-block). + +### basic_authentication Block + +The `basic_authentication` configuration block supports the following arguments: + +* `credentials` - (Required) Your secret ARN, which you can create in AWS Secrets Manager. You use a secret if basic authentication credentials are required to connect to a website. The secret stores your credentials of user name and password. +* `host` - (Required) The name of the website host you want to connect to using authentication credentials. For example, the host name of `https://a.example.com/page1.html` is `"a.example.com"`. +* `port` - (Required) The port number of the website host you want to connect to using authentication credentials. For example, the port for `https://a.example.com/page1.html` is `443`, the standard port for HTTPS. + +### proxy_configuration Block + +The `proxy_configuration` configuration block supports the following arguments: + +* `credentials` - (Optional) Your secret ARN, which you can create in AWS Secrets Manager. The credentials are optional. You use a secret if web proxy credentials are required to connect to a website host. Amazon Kendra currently support basic authentication to connect to a web proxy server. The secret stores your credentials. +* `host` - (Required) The name of the website host you want to connect to via a web proxy server. For example, the host name of `https://a.example.com/page1.html` is `"a.example.com"`. +* `port` - (Required) The port number of the website host you want to connect to via a web proxy server. For example, the port for `https://a.example.com/page1.html` is `443`, the standard port for HTTPS. + +### urls Block + +The `urls` configuration block supports the following arguments: + +* `seed_url_configuration` - (Optional) A block that specifies the configuration of the seed or starting point URLs of the websites you want to crawl. You can choose to crawl only the website host names, or the website host names with subdomains, or the website host names with subdomains and other domains that the webpages link to. You can list up to `100` seed URLs. [Detailed below](#seed_url_configuration-block). +* `site_maps_configuration` - (Optional) A block that specifies the configuration of the sitemap URLs of the websites you want to crawl. Only URLs belonging to the same website host names are crawled. You can list up to `3` sitemap URLs. [Detailed below](#site_maps_configuration-block). + +### seed_url_configuration Block + +The `seed_url_configuration` configuration block supports the following arguments: + +* `seed_urls` - (Required) The list of seed or starting point URLs of the websites you want to crawl. The list can include a maximum of `100` seed URLs. Array Members: Minimum number of `0` items. Maximum number of `100` items. Length Constraints: Minimum length of `1`. Maximum length of `2048`. +* `web_crawler_mode` - (Optional) The default mode is set to `HOST_ONLY`. You can choose one of the following modes: + * `HOST_ONLY` – crawl only the website host names. For example, if the seed URL is `"abc.example.com"`, then only URLs with host name `"abc.example.com"` are crawled. + * `SUBDOMAINS` – crawl the website host names with subdomains. For example, if the seed URL is `"abc.example.com"`, then `"a.abc.example.com"` and `"b.abc.example.com"` are also crawled. + * `EVERYTHING` – crawl the website host names with subdomains and other domains that the webpages link to. + +### site_maps_configuration Block + +The `site_maps_configuration` configuration block supports the following arguments: + +* `site_maps` - (Required) The list of sitemap URLs of the websites you want to crawl. The list can include a maximum of `3` sitemap URLs. + +### custom_document_enrichment_configuration Block + +The `custom_document_enrichment_configuration` configuration block supports the following arguments: + +* `inline_configurations` - (Optional) Configuration information to alter document attributes or metadata fields and content when ingesting documents into Amazon Kendra. Minimum number of `0` items. Maximum number of `100` items. [Detailed below](#inline_configurations-block). +* `post_extraction_hook_configuration` - (Optional) A block that specifies the configuration information for invoking a Lambda function in AWS Lambda on the structured documents with their metadata and text extracted. You can use a Lambda function to apply advanced logic for creating, modifying, or deleting document metadata and content. For more information, see [Advanced data manipulation](https://docs.aws.amazon.com/kendra/latest/dg/custom-document-enrichment.html#advanced-data-manipulation). [Detailed below](#pre_extraction_hook_configuration-and-post_extraction_hook_configuration-blocks). +* `pre_extraction_hook_configuration` - (Optional) Configuration information for invoking a Lambda function in AWS Lambda on the original or raw documents before extracting their metadata and text. You can use a Lambda function to apply advanced logic for creating, modifying, or deleting document metadata and content. For more information, see [Advanced data manipulation](https://docs.aws.amazon.com/kendra/latest/dg/custom-document-enrichment.html#advanced-data-manipulation). [Detailed below](#pre_extraction_hook_configuration-and-post_extraction_hook_configuration-blocks). +* `role_arn` - (Optional) The Amazon Resource Name (ARN) of a role with permission to run `pre_extraction_hook_configuration` and `post_extraction_hook_configuration` for altering document metadata and content during the document ingestion process. For more information, see [IAM roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). + +### inline_configurations Block + +The `inline_configurations` configuration block supports the following arguments: + +* `condition` - (Optional) Configuration of the condition used for the target document attribute or metadata field when ingesting documents into Amazon Kendra. See [condition](#condition-block). +* `document_content_deletion` - (Optional) `TRUE` to delete content if the condition used for the target attribute is met. +* `target` - (Optional) Configuration of the target document attribute or metadata field when ingesting documents into Amazon Kendra. You can also include a value. [Detailed below](#target-block). + +### condition Block + +The `condition` configuration blocks supports the following arguments: + +* `condition_document_attribute_key` - (Required) The identifier of the document attribute used for the condition. For example, `_source_uri` could be an identifier for the attribute or metadata field that contains source URIs associated with the documents. Amazon Kendra currently does not support `_document_body` as an attribute key used for the condition. +* `condition_on_value` - (Optional) The value used by the operator. For example, you can specify the value 'financial' for strings in the `_source_uri` field that partially match or contain this value. See [condition_on_value](#condition_on_value-block). +* `operator` - (Required) The condition operator. For example, you can use `Contains` to partially match a string. Valid Values: `GreaterThan` | `GreaterThanOrEquals` | `LessThan` | `LessThanOrEquals` | `Equals` | `NotEquals` | `Contains` | `NotContains` | `Exists` | `NotExists` | `BeginsWith`. + +### target Block + +The `target` configuration block supports the following arguments: + +* `target_document_attribute_key` - (Optional) The identifier of the target document attribute or metadata field. For example, 'Department' could be an identifier for the target attribute or metadata field that includes the department names associated with the documents. +* `target_document_attribute_value` - (Optional) The target value you want to create for the target attribute. For example, 'Finance' could be the target value for the target attribute key 'Department'. See [target_document_attribute_value](#target_document_attribute_value-block). +* `target_document_attribute_value_deletion` - (Optional) `TRUE` to delete the existing target value for your specified target attribute key. You cannot create a target value and set this to `TRUE`. To create a target value (`TargetDocumentAttributeValue`), set this to `FALSE`. + +### target_document_attribute_value Block + +The `target_document_attribute_value` configuration blocks supports the following arguments: + +* `date_value` - (Optional) A date expressed as an ISO 8601 string. It is important for the time zone to be included in the ISO 8601 date-time format. As of this writing only UTC is supported. For example, `2012-03-25T12:30:10+00:00`. +* `long_value` - (Optional) A long integer value. +* `string_list_value` - (Optional) A list of strings. +* `string` - (Optional) A string, such as "department". + +### pre_extraction_hook_configuration and post_extraction_hook_configuration Blocks + +The `pre_extraction_hook_configuration` and `post_extraction_hook_configuration` configuration blocks each supports the following arguments: + +* `invocation_condition` - (Optional) A block that specifies the condition used for when a Lambda function should be invoked. For example, you can specify a condition that if there are empty date-time values, then Amazon Kendra should invoke a function that inserts the current date-time. See [invocation_condition](#invocation_condition-block). +* `lambda_arn` - (Required) The Amazon Resource Name (ARN) of a Lambda Function that can manipulate your document metadata fields or attributes and content. +* `s3_bucket` - (Required) Stores the original, raw documents or the structured, parsed documents before and after altering them. For more information, see [Data contracts for Lambda functions](https://docs.aws.amazon.com/kendra/latest/dg/custom-document-enrichment.html#cde-data-contracts-lambda). + +### invocation_condition Block + +The `invocation_condition` configuration blocks supports the following arguments: + +* `condition_document_attribute_key` - (Required) The identifier of the document attribute used for the condition. For example, `_source_uri` could be an identifier for the attribute or metadata field that contains source URIs associated with the documents. Amazon Kendra currently does not support `_document_body` as an attribute key used for the condition. +* `condition_on_value` - (Optional) The value used by the operator. For example, you can specify the value 'financial' for strings in the `_source_uri` field that partially match or contain this value. See [condition_on_value](#condition_on_value-block). +* `operator` - (Required) The condition operator. For example, you can use `Contains` to partially match a string. Valid Values: `GreaterThan` | `GreaterThanOrEquals` | `LessThan` | `LessThanOrEquals` | `Equals` | `NotEquals` | `Contains` | `NotContains` | `Exists` | `NotExists` | `BeginsWith`. + +### condition_on_value Block + +The `condition_on_value` configuration blocks supports the following arguments: + +* `date_value` - (Optional) A date expressed as an ISO 8601 string. It is important for the time zone to be included in the ISO 8601 date-time format. As of this writing only UTC is supported. For example, `2012-03-25T12:30:10+00:00`. +* `long_value` - (Optional) A long integer value. +* `string_list_value` - (Optional) A list of strings. +* `string` - (Optional) A string, such as "department". + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Data Source. +* `created_at` - The Unix timestamp of when the Data Source was created. +* `data_source_id` - The unique identifiers of the Data Source. +* `error_message` - When the Status field value is `FAILED`, the ErrorMessage field contains a description of the error that caused the Data Source to fail. +* `id` - The unique identifiers of the Data Source and index separated by a slash (`/`). +* `status` - The current status of the Data Source. When the status is `ACTIVE` the Data Source is ready to use. When the status is `FAILED`, the `error_message` field contains the reason that the Data Source failed. +* `updated_at` - The Unix timestamp of when the Data Source was last updated. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kendra Data Source using the unique identifiers of the data_source and index separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Kendra Data Source using the unique identifiers of the data_source and index separated by a slash (`/`). For example: + +```console +% terraform import aws_kendra_data_source.example 1045d08d-66ef-4882-b3ed-dfb7df183e90/b34dfdf7-1f2b-4704-9581-79e00296845f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kendra_experience.html.markdown b/website/docs/cdktf/python/r/kendra_experience.html.markdown new file mode 100644 index 00000000000..3c8dd478040 --- /dev/null +++ b/website/docs/cdktf/python/r/kendra_experience.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_experience" +description: |- + Terraform resource for managing an AWS Kendra Experience. +--- + + + +# Resource: aws_kendra_experience + +Terraform resource for managing an AWS Kendra Experience. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_experience import KendraExperience +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraExperience(self, "example", + configuration=KendraExperienceConfiguration( + content_source_configuration=KendraExperienceConfigurationContentSourceConfiguration( + direct_put_content=True, + faq_ids=[Token.as_string(aws_kendra_faq_example.faq_id)] + ), + user_identity_configuration=KendraExperienceConfigurationUserIdentityConfiguration( + identity_attribute_name="12345ec453-1546651e-79c4-4554-91fa-00b43ccfa245" + ) + ), + description="My Kendra Experience", + index_id=Token.as_string(aws_kendra_index_example.id), + name="example", + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +## Argument Reference + +~> **NOTE:** By default of the AWS Kendra API, updates to an existing `aws_kendra_experience` resource (e.g. updating the `name`) will also update the `configuration.content_source_configuration.direct_put_content` parameter to `false` if not already provided. + +The following arguments are required: + +* `index_id` - (Required, Forces new resource) The identifier of the index for your Amazon Kendra experience. +* `name` - (Required) A name for your Amazon Kendra experience. +* `role_arn` - (Required) The Amazon Resource Name (ARN) of a role with permission to access `Query API`, `QuerySuggestions API`, `SubmitFeedback API`, and `AWS SSO` that stores your user and group information. For more information, see [IAM roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). + +The following arguments are optional: + +* `description` - (Optional, Forces new resource if removed) A description for your Amazon Kendra experience. +* `configuration` - (Optional) Configuration information for your Amazon Kendra experience. Terraform will only perform drift detection of its value when present in a configuration. [Detailed below](#configuration). + +### `configuration` + +~> **NOTE:** By default of the AWS Kendra API, the `content_source_configuration.direct_put_content` parameter will be set to `false` if not provided. + +The `configuration` configuration block supports the following arguments: + +* `content_source_configuration` - (Optional, Required if `user_identity_configuration` not provided) The identifiers of your data sources and FAQs. Or, you can specify that you want to use documents indexed via the `BatchPutDocument API`. Terraform will only perform drift detection of its value when present in a configuration. [Detailed below](#content_source_configuration). +* `user_identity_configuration` - (Optional, Required if `content_source_configuration` not provided) The AWS SSO field name that contains the identifiers of your users, such as their emails. [Detailed below](#user_identity_configuration). + +### `content_source_configuration` + +The `content_source_configuration` configuration block supports the following arguments: + +* `data_source_ids` - (Optional) The identifiers of the data sources you want to use for your Amazon Kendra experience. Maximum number of 100 items. +* `direct_put_content` - (Optional) Whether to use documents you indexed directly using the `BatchPutDocument API`. Defaults to `false`. +* `faq_ids` - (Optional) The identifier of the FAQs that you want to use for your Amazon Kendra experience. Maximum number of 100 items. + +### `user_identity_configuration` + +The `user_identity_configuration` configuration block supports the following argument: + +* `identity_attribute_name` - (Required) The AWS SSO field name that contains the identifiers of your users, such as their emails. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifiers of the experience and index separated by a slash (`/`). +* `arn` - ARN of the Experience. +* `endpoints` - Shows the endpoint URLs for your Amazon Kendra experiences. The URLs are unique and fully hosted by AWS. + * `endpoint` - The endpoint of your Amazon Kendra experience. + * `endpoint_type` - The type of endpoint for your Amazon Kendra experience. +* `experience_id` - The unique identifier of the experience. +* `status` - The current processing status of your Amazon Kendra experience. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kendra Experience using the unique identifiers of the experience and index separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Kendra Experience using the unique identifiers of the experience and index separated by a slash (`/`). For example: + +```console +% terraform import aws_kendra_experience.example 1045d08d-66ef-4882-b3ed-dfb7df183e90/b34dfdf7-1f2b-4704-9581-79e00296845f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kendra_faq.html.markdown b/website/docs/cdktf/python/r/kendra_faq.html.markdown new file mode 100644 index 00000000000..ab6f9087f75 --- /dev/null +++ b/website/docs/cdktf/python/r/kendra_faq.html.markdown @@ -0,0 +1,157 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_faq" +description: |- + Terraform resource for managing an AWS Kendra FAQ. +--- + + + +# Resource: aws_kendra_faq + +Terraform resource for managing an AWS Kendra FAQ. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_faq import KendraFaq +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraFaq(self, "example", + index_id=Token.as_string(aws_kendra_index_example.id), + name="Example", + role_arn=Token.as_string(aws_iam_role_example.arn), + s3_path=KendraFaqS3Path( + bucket=Token.as_string(aws_s3_bucket_example.id), + key=Token.as_string(aws_s3_object_example.key) + ), + tags={ + "Name": "Example Kendra Faq" + } + ) +``` + +### With File Format + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_faq import KendraFaq +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraFaq(self, "example", + file_format="CSV", + index_id=Token.as_string(aws_kendra_index_example.id), + name="Example", + role_arn=Token.as_string(aws_iam_role_example.arn), + s3_path=KendraFaqS3Path( + bucket=Token.as_string(aws_s3_bucket_example.id), + key=Token.as_string(aws_s3_object_example.key) + ) + ) +``` + +### With Language Code + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_faq import KendraFaq +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraFaq(self, "example", + index_id=Token.as_string(aws_kendra_index_example.id), + language_code="en", + name="Example", + role_arn=Token.as_string(aws_iam_role_example.arn), + s3_path=KendraFaqS3Path( + bucket=Token.as_string(aws_s3_bucket_example.id), + key=Token.as_string(aws_s3_object_example.key) + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `index_id`- (Required, Forces new resource) The identifier of the index for a FAQ. +* `name` - (Required, Forces new resource) The name that should be associated with the FAQ. +* `role_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of a role with permission to access the S3 bucket that contains the FAQs. For more information, see [IAM Roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). +* `s3_path` - (Required, Forces new resource) The S3 location of the FAQ input data. Detailed below. + +The `s3_path` configuration block supports the following arguments: + +* `bucket` - (Required, Forces new resource) The name of the S3 bucket that contains the file. +* `key` - (Required, Forces new resource) The name of the file. + +The following arguments are optional: + +* `description` - (Optional, Forces new resource) The description for a FAQ. +* `file_format` - (Optional, Forces new resource) The file format used by the input files for the FAQ. Valid Values are `CSV`, `CSV_WITH_HEADER`, `JSON`. +* `language_code` - (Optional, Forces new resource) The code for a language. This shows a supported language for the FAQ document. English is supported by default. For more information on supported languages, including their codes, see [Adding documents in languages other than English](https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the FAQ. +* `created_at` - The Unix datetime that the FAQ was created. +* `error_message` - When the Status field value is `FAILED`, this contains a message that explains why. +* `faq_id` - The identifier of the FAQ. +* `id` - The unique identifiers of the FAQ and index separated by a slash (`/`) +* `status` - The status of the FAQ. It is ready to use when the status is ACTIVE. +* `updated_at` - The date and time that the FAQ was last updated. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_kendra_faq` using the unique identifiers of the FAQ and index separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_kendra_faq` using the unique identifiers of the FAQ and index separated by a slash (`/`). For example: + +```console +% terraform import aws_kendra_faq.example faq-123456780/idx-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kendra_index.html.markdown b/website/docs/cdktf/python/r/kendra_index.html.markdown new file mode 100644 index 00000000000..6d6c47388df --- /dev/null +++ b/website/docs/cdktf/python/r/kendra_index.html.markdown @@ -0,0 +1,762 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_index" +description: |- + Provides an Amazon Kendra Index resource. +--- + + + +# Resource: aws_kendra_index + +Provides an Amazon Kendra Index resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_index import KendraIndex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraIndex(self, "example", + description="example", + edition="DEVELOPER_EDITION", + name="example", + role_arn=this_var.arn, + tags={ + "Key1": "Value1" + } + ) +``` + +### With capacity units + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_index import KendraIndex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraIndex(self, "example", + capacity_units=KendraIndexCapacityUnits( + query_capacity_units=2, + storage_capacity_units=2 + ), + edition="DEVELOPER_EDITION", + name="example", + role_arn=this_var.arn + ) +``` + +### With server side encryption configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_index import KendraIndex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraIndex(self, "example", + name="example", + role_arn=this_var.arn, + server_side_encryption_configuration=KendraIndexServerSideEncryptionConfiguration( + kms_key_id=Token.as_string(data_aws_kms_key_this.arn) + ) + ) +``` + +### With user group resolution configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_index import KendraIndex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraIndex(self, "example", + name="example", + role_arn=this_var.arn, + user_group_resolution_configuration=KendraIndexUserGroupResolutionConfiguration( + user_group_resolution_mode="AWS_SSO" + ) + ) +``` + +### With Document Metadata Configuration Updates + +#### Specifying the predefined elements + +Refer to [Amazon Kendra documentation on built-in document fields](https://docs.aws.amazon.com/kendra/latest/dg/hiw-index.html#index-reserved-fields) for more information. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_index import KendraIndex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraIndex(self, "example", + document_metadata_configuration_updates=[KendraIndexDocumentMetadataConfigurationUpdates( + name="_authors", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1 + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=False + ), + type="STRING_LIST_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_category", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_created_at", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + duration="25920000s", + freshness=False, + importance=1, + rank_order="ASCENDING" + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="DATE_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_data_source_id", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_document_title", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=2, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=True, + facetable=False, + searchable=True, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_excerpt_page_number", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=2, + rank_order="ASCENDING" + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=False + ), + type="LONG_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_faq_id", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_file_type", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_language_code", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_last_updated_at", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + duration="25920000s", + freshness=False, + importance=1, + rank_order="ASCENDING" + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="DATE_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_source_uri", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=True, + facetable=False, + searchable=False, + sortable=False + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_tenant_id", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_version", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_view_count", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + rank_order="ASCENDING" + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="LONG_VALUE" + ) + ], + name="example", + role_arn=this_var.arn + ) +``` + +#### Appending additional elements + +The example below shows additional elements with names, `example-string-value`, `example-long-value`, `example-string-list-value`, `example-date-value` representing the 4 types of `STRING_VALUE`, `LONG_VALUE`, `STRING_LIST_VALUE`, `DATE_VALUE` respectively. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_index import KendraIndex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraIndex(self, "example", + document_metadata_configuration_updates=[KendraIndexDocumentMetadataConfigurationUpdates( + name="_authors", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1 + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=False + ), + type="STRING_LIST_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_category", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_created_at", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + duration="25920000s", + freshness=False, + importance=1, + rank_order="ASCENDING" + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="DATE_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_data_source_id", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_document_title", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=2, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=True, + facetable=False, + searchable=True, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_excerpt_page_number", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=2, + rank_order="ASCENDING" + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=False + ), + type="LONG_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_faq_id", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_file_type", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_language_code", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_last_updated_at", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + duration="25920000s", + freshness=False, + importance=1, + rank_order="ASCENDING" + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="DATE_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_source_uri", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=True, + facetable=False, + searchable=False, + sortable=False + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_tenant_id", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_version", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="_view_count", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + rank_order="ASCENDING" + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=False, + facetable=False, + searchable=False, + sortable=True + ), + type="LONG_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="example-string-value", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + values_importance_map={} + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=True, + facetable=True, + searchable=True, + sortable=True + ), + type="STRING_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="example-long-value", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1, + rank_order="ASCENDING" + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=True, + facetable=True, + searchable=False, + sortable=True + ), + type="LONG_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="example-string-list-value", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + importance=1 + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=True, + facetable=True, + searchable=True, + sortable=False + ), + type="STRING_LIST_VALUE" + ), KendraIndexDocumentMetadataConfigurationUpdates( + name="example-date-value", + relevance=KendraIndexDocumentMetadataConfigurationUpdatesRelevance( + duration="25920000s", + freshness=False, + importance=1, + rank_order="ASCENDING" + ), + search=KendraIndexDocumentMetadataConfigurationUpdatesSearch( + displayable=True, + facetable=True, + searchable=False, + sortable=False + ), + type="DATE_VALUE" + ) + ], + name="example", + role_arn=this_var.arn + ) +``` + +### With JSON token type configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_index import KendraIndex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraIndex(self, "example", + name="example", + role_arn=this_var.arn, + user_token_configurations=KendraIndexUserTokenConfigurations( + json_token_type_configuration=KendraIndexUserTokenConfigurationsJsonTokenTypeConfiguration( + group_attribute_field="groups", + user_name_attribute_field="username" + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `capacity_units` - (Optional) A block that sets the number of additional document storage and query capacity units that should be used by the index. [Detailed below](#capacity_units). +* `description` - (Optional) The description of the Index. +* `document_metadata_configuration_updates` - (Optional) One or more blocks that specify the configuration settings for any metadata applied to the documents in the index. Minimum number of 0 items. Maximum number of 500 items. If specified, you must define all elements, including those that are provided by default. These index fields are documented at [Amazon Kendra Index documentation](https://docs.aws.amazon.com/kendra/latest/dg/hiw-index.html). For an example resource that defines these default index fields, refer to the [default example above](#specifying-the-predefined-elements). For an example resource that appends additional index fields, refer to the [append example above](#appending-additional-elements). All arguments for each block must be specified. Note that blocks cannot be removed since index fields cannot be deleted. This argument is [detailed below](#document_metadata_configuration_updates). +* `edition` - (Optional) The Amazon Kendra edition to use for the index. Choose `DEVELOPER_EDITION` for indexes intended for development, testing, or proof of concept. Use `ENTERPRISE_EDITION` for your production databases. Once you set the edition for an index, it can't be changed. Defaults to `ENTERPRISE_EDITION` +* `name` - (Required) Specifies the name of the Index. +* `role_arn` - (Required) An AWS Identity and Access Management (IAM) role that gives Amazon Kendra permissions to access your Amazon CloudWatch logs and metrics. This is also the role you use when you call the `BatchPutDocument` API to index documents from an Amazon S3 bucket. +* `server_side_encryption_configuration` - (Optional) A block that specifies the identifier of the AWS KMS customer managed key (CMK) that's used to encrypt data indexed by Amazon Kendra. Amazon Kendra doesn't support asymmetric CMKs. [Detailed below](#server_side_encryption_configuration). +* `user_context_policy` - (Optional) The user context policy. Valid values are `ATTRIBUTE_FILTER` or `USER_TOKEN`. For more information, refer to [UserContextPolicy](https://docs.aws.amazon.com/kendra/latest/APIReference/API_CreateIndex.html#kendra-CreateIndex-request-UserContextPolicy). Defaults to `ATTRIBUTE_FILTER`. +* `user_group_resolution_configuration` - (Optional) A block that enables fetching access levels of groups and users from an AWS Single Sign-On identity source. To configure this, see [UserGroupResolutionConfiguration](https://docs.aws.amazon.com/kendra/latest/dg/API_UserGroupResolutionConfiguration.html). [Detailed below](#user_group_resolution_configuration). +* `user_token_configurations` - (Optional) A block that specifies the user token configuration. [Detailed below](#user_token_configurations). +* `tags` - (Optional) Tags to apply to the Index. If configured with a provider +[`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `capacity_units` + +A `capacity_units` block supports the following arguments: + +* `query_capacity_units` - (Required) The amount of extra query capacity for an index and GetQuerySuggestions capacity. For more information, refer to [QueryCapacityUnits](https://docs.aws.amazon.com/kendra/latest/dg/API_CapacityUnitsConfiguration.html#Kendra-Type-CapacityUnitsConfiguration-QueryCapacityUnits). +* `storage_capacity_units` - (Required) The amount of extra storage capacity for an index. A single capacity unit provides 30 GB of storage space or 100,000 documents, whichever is reached first. Minimum value of 0. + +### `document_metadata_configuration_updates` + +A `document_metadata_configuration_updates` block supports the following arguments: + +* `name` - (Required) The name of the index field. Minimum length of 1. Maximum length of 30. +* `relevance` - (Required) A block that provides manual tuning parameters to determine how the field affects the search results. [Detailed below](#relevance) +* `search` - (Required) A block that provides information about how the field is used during a search. Documented below. [Detailed below](#search) +* `type` - (Required) The data type of the index field. Valid values are `STRING_VALUE`, `STRING_LIST_VALUE`, `LONG_VALUE`, `DATE_VALUE`. + +#### `relevance` + +A `relevance` block supports the following attributes: + +* `duration` - (Required if type is of `DATE_VALUE`) Specifies the time period that the boost applies to. For more information, refer to [Duration](https://docs.aws.amazon.com/kendra/latest/dg/API_Relevance.html#Kendra-Type-Relevance-Duration). +* `freshness` - (Required if type is of `DATE_VALUE`) Indicates that this field determines how "fresh" a document is. For more information, refer to [Freshness](https://docs.aws.amazon.com/kendra/latest/dg/API_Relevance.html#Kendra-Type-Relevance-Freshness). +* `importance` - (Required for all types) The relative importance of the field in the search. Larger numbers provide more of a boost than smaller numbers. Minimum value of 1. Maximum value of 10. +* `rank_order` - (Required if type is of `DATE_VALUE`, or `LONG_VALUE`) Determines how values should be interpreted. For more information, refer to [RankOrder](https://docs.aws.amazon.com/kendra/latest/dg/API_Relevance.html#Kendra-Type-Relevance-RankOrder). +* `values_importance_map` - (Required if type is of `STRING_VALUE`) A list of values that should be given a different boost when they appear in the result list. For more information, refer to [ValueImportanceMap](https://docs.aws.amazon.com/kendra/latest/dg/API_Relevance.html#Kendra-Type-Relevance-ValueImportanceMap). + +#### `search` + +A `search` block supports the following attributes: + +* `displayable` - (Required) Determines whether the field is returned in the query response. The default is `true`. +* `facetable` - (Required) Indicates that the field can be used to create search facets, a count of results for each value in the field. The default is `false`. +* `searchable` - (Required) Determines whether the field is used in the search. If the Searchable field is true, you can use relevance tuning to manually tune how Amazon Kendra weights the field in the search. The default is `true` for `string` fields and `false` for `number` and `date` fields. +* `sortable` - (Required) Determines whether the field can be used to sort the results of a query. If you specify sorting on a field that does not have Sortable set to true, Amazon Kendra returns an exception. The default is `false`. + +### `server_side_encryption_configuration` + +A `server_side_encryption_configuration` block supports the following arguments: + +* `kms_key_id` - (Optional) The identifier of the AWS KMScustomer master key (CMK). Amazon Kendra doesn't support asymmetric CMKs. + +### `user_group_resolution_configuration` + +A `user_group_resolution_configuration` block supports the following arguments: + +* `user_group_resolution_mode` - (Required) The identity store provider (mode) you want to use to fetch access levels of groups and users. AWS Single Sign-On is currently the only available mode. Your users and groups must exist in an AWS SSO identity source in order to use this mode. Valid Values are `AWS_SSO` or `NONE`. + +### `user_token_configurations` + +A `user_token_configurations` block supports the following arguments: + +* `json_token_type_configuration` - (Optional) A block that specifies the information about the JSON token type configuration. [Detailed below](#json_token_type_configuration). +* `jwt_token_type_configuration` - (Optional) A block that specifies the information about the JWT token type configuration. [Detailed below](#jwt_token_type_configuration). + +#### `json_token_type_configuration` + +A `json_token_type_configuration` block supports the following arguments: + +* `group_attribute_field` - (Required) The group attribute field. Minimum length of 1. Maximum length of 2048. +* `user_name_attribute_field` - (Required) The user name attribute field. Minimum length of 1. Maximum length of 2048. + +#### `jwt_token_type_configuration` + +A `jwt_token_type_configuration` block supports the following arguments: + +* `claim_regex` - (Optional) The regular expression that identifies the claim. Minimum length of 1. Maximum length of 100. +* `group_attribute_field` - (Optional) The group attribute field. Minimum length of 1. Maximum length of 100. +* `issuer` - (Optional) The issuer of the token. Minimum length of 1. Maximum length of 65. +* `key_location` - (Required) The location of the key. Valid values are `URL` or `SECRET_MANAGER` +* `secrets_manager_arn` - (Optional) The Amazon Resource Name (ARN) of the secret. +* `url` - (Optional) The signing key URL. Valid pattern is `^(https?|ftp|file):\/\/([^\s]*)` +* `user_name_attribute_field` - (Optional) The user name attribute field. Minimum length of 1. Maximum length of 100. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `40m`) +* `delete` - (Default `40m`) +* `update` - (Default `40m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Index. +* `created_at` - The Unix datetime that the index was created. +* `error_message` - When the Status field value is `FAILED`, this contains a message that explains why. +* `id` - The identifier of the Index. +* `index_statistics` - A block that provides information about the number of FAQ questions and answers and the number of text documents indexed. [Detailed below](#index_statistics). +* `status` - The current status of the index. When the value is `ACTIVE`, the index is ready for use. If the Status field value is `FAILED`, the `error_message` field contains a message that explains why. +* `updated_at` - The Unix datetime that the index was last updated. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### `index_statistics` + +A `index_statistics` block supports the following attributes: + +* `faq_statistics` - A block that specifies the number of question and answer topics in the index. [Detailed below](#faq_statistics). +* `text_document_statistics` - A block that specifies the number of text documents indexed. [Detailed below](#text_document_statistics). + +#### `faq_statistics` + +A `faq_statistics` block supports the following attributes: + +* `indexed_question_answers_count` - The total number of FAQ questions and answers contained in the index. + +#### `text_document_statistics` + +A `text_document_statistics` block supports the following attributes: + +* `indexed_text_bytes` - The total size, in bytes, of the indexed documents. +* `indexed_text_documents_count` - The number of text documents indexed. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Kendra Indexes using its `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Amazon Kendra Indexes using its `id`. For example: + +```console +% terraform import aws_kendra_index.example 12345678-1234-5678-9123-123456789123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kendra_query_suggestions_block_list.html.markdown b/website/docs/cdktf/python/r/kendra_query_suggestions_block_list.html.markdown new file mode 100644 index 00000000000..0edb4c92c3c --- /dev/null +++ b/website/docs/cdktf/python/r/kendra_query_suggestions_block_list.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_query_suggestions_block_list" +description: |- + Terraform resource for managing an AWS Kendra block list used for query suggestions for an index +--- + + + +# Resource: aws_kendra_query_suggestions_block_list + +Use the `aws_kendra_index_block_list` resource to manage an AWS Kendra block list used for query suggestions for an index. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_query_suggestions_block_list import KendraQuerySuggestionsBlockList +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraQuerySuggestionsBlockList(self, "example", + index_id=Token.as_string(aws_kendra_index_example.id), + name="Example", + role_arn=Token.as_string(aws_iam_role_example.arn), + source_s3_path=KendraQuerySuggestionsBlockListSourceS3Path( + bucket=Token.as_string(aws_s3_bucket_example.id), + key="example/suggestions.txt" + ), + tags={ + "Name": "Example Kendra Index" + } + ) +``` + +## Argument Reference + +The following arguments are required: + +* `index_id` - (Required, Forces New Resource) Identifier of the index for a block list. +* `name` - (Required) Name for the block list. +* `role_arn` - (Required) IAM (Identity and Access Management) role used to access the block list text file in S3. +* `source_s3_path` - (Required) S3 path where your block list text file is located. See details below. + +The `source_s3_path` configuration block supports the following arguments: + +* `bucket` - (Required) Name of the S3 bucket that contains the file. +* `key` - (Required) Name of the file. + +The following arguments are optional: + +* `description` - (Optional) Description for a block list. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block), tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the block list. +* `query_suggestions_block_list_id` - Unique identifier of the block list. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider's [default_tags configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +Configuration options for operation timeouts can be found [here](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts). + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the `aws_kendra_query_suggestions_block_list` resource using the unique identifiers of the block list and index separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the `aws_kendra_query_suggestions_block_list` resource using the unique identifiers of the block list and index separated by a slash (`/`). For example: + +```console +% terraform import aws_kendra_query_suggestions_block_list.example blocklist-123456780/idx-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kendra_thesaurus.html.markdown b/website/docs/cdktf/python/r/kendra_thesaurus.html.markdown new file mode 100644 index 00000000000..8f7f4f005db --- /dev/null +++ b/website/docs/cdktf/python/r/kendra_thesaurus.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_thesaurus" +description: |- + Terraform resource for managing an AWS Kendra Thesaurus. +--- + + + +# Resource: aws_kendra_thesaurus + +Terraform resource for managing an AWS Kendra Thesaurus. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kendra_thesaurus import KendraThesaurus +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KendraThesaurus(self, "example", + index_id=Token.as_string(aws_kendra_index_example.id), + name="Example", + role_arn=Token.as_string(aws_iam_role_example.arn), + source_s3_path=KendraThesaurusSourceS3Path( + bucket=Token.as_string(aws_s3_bucket_example.id), + key=Token.as_string(aws_s3_object_example.key) + ), + tags={ + "Name": "Example Kendra Thesaurus" + } + ) +``` + +## Argument Reference + +The following arguments are required: + +* `index_id`- (Required, Forces new resource) The identifier of the index for a thesaurus. +* `name` - (Required) The name for the thesaurus. +* `role_arn` - (Required) The IAM (Identity and Access Management) role used to access the thesaurus file in S3. +* `source_s3_path` - (Required) The S3 path where your thesaurus file sits in S3. Detailed below. + +The `source_s3_path` configuration block supports the following arguments: + +* `bucket` - (Required) The name of the S3 bucket that contains the file. +* `key` - (Required) The name of the file. + +The following arguments are optional: + +* `description` - (Optional) The description for a thesaurus. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the thesaurus. +* `id` - The unique identifiers of the thesaurus and index separated by a slash (`/`). +* `status` - The current status of the thesaurus. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_kendra_thesaurus` using the unique identifiers of the thesaurus and index separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_kendra_thesaurus` using the unique identifiers of the thesaurus and index separated by a slash (`/`). For example: + +```console +% terraform import aws_kendra_thesaurus.example thesaurus-123456780/idx-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/key_pair.html.markdown b/website/docs/cdktf/python/r/key_pair.html.markdown new file mode 100644 index 00000000000..545b921b3f4 --- /dev/null +++ b/website/docs/cdktf/python/r/key_pair.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_key_pair" +description: |- + Provides a Key Pair resource. Currently this supports importing an existing key pair but not creating a new key pair. +--- + + + +# Resource: aws_key_pair + +Provides an [EC2 key pair](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html) resource. A key pair is used to control login access to EC2 instances. + +Currently this resource requires an existing user-supplied key pair. This key pair's public key will be registered with AWS to allow logging-in to EC2 instances. + +When importing an existing key pair the public key material may be in any format supported by AWS. Supported formats (per the [AWS documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html#how-to-generate-your-own-key-and-import-it-to-aws)) are: + +* OpenSSH public key format (the format in ~/.ssh/authorized_keys) +* Base64 encoded DER format +* SSH public key file format as specified in RFC4716 + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.key_pair import KeyPair +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KeyPair(self, "deployer", + key_name="deployer-key", + public_key="ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD3F6tyPEFEzV0LX3X8BsXdMsQz1x2cEikKDEY0aIj41qgxMCP/iteneqXSIFZBp5vizPvaoIR3Um9xK7PGoW8giupGn+EPuxIA4cDM4vzOqOkiMPhz5XK0whEjkVzTo4+S0puvDZuwIsdiW9mxhJc7tgBNL0cYlWSYVkz4G/fslNfRPW5mYAM49f4fhtxPb5ok4Q2Lg9dPKVHO/Bgeu5woMc7RY0p1ej6D4CKFE6lymSDJpW0YHX/wqE9+cfEauh7xZcG0q9t2ta6F6fmX0agvpFyZo8aFbXeUBr7osSCJNgvavWbM/06niWrOvYX2xwWdhXmXSrbX8ZbabVohBK41 email@example.com" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `key_name` - (Optional) The name for the key pair. If neither `key_name` nor `key_name_prefix` is provided, Terraform will create a unique key name using the prefix `terraform-`. +* `key_name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `key_name`. If neither `key_name` nor `key_name_prefix` is provided, Terraform will create a unique key name using the prefix `terraform-`. +* `public_key` - (Required) The public key material. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The key pair name. +* `arn` - The key pair ARN. +* `key_name` - The key pair name. +* `key_pair_id` - The key pair ID. +* `key_type` - The type of key pair. +* `fingerprint` - The MD5 public key fingerprint as specified in section 4 of RFC 4716. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Key Pairs using the `key_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Key Pairs using the `key_name`. For example: + +```console +% terraform import aws_key_pair.deployer deployer-key +``` + +~> **NOTE:** The AWS API does not include the public key in the response, so `terraform apply` will attempt to replace the key pair. There is currently no supported workaround for this limitation. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/keyspaces_keyspace.html.markdown b/website/docs/cdktf/python/r/keyspaces_keyspace.html.markdown new file mode 100644 index 00000000000..64d4c410ab2 --- /dev/null +++ b/website/docs/cdktf/python/r/keyspaces_keyspace.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Keyspaces (for Apache Cassandra)" +layout: "aws" +page_title: "AWS: aws_keyspaces_keyspace" +description: |- + Provides a Keyspaces Keyspace. +--- + + + +# Resource: aws_keyspaces_keyspace + +Provides a Keyspaces Keyspace. + +More information about keyspaces can be found in the [Keyspaces User Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/what-is-keyspaces.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.keyspaces_keyspace import KeyspacesKeyspace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KeyspacesKeyspace(self, "example", + name="my_keyspace" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required, Forces new resource) The name of the keyspace to be created. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the keyspace. +* `arn` - The ARN of the keyspace. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `1m`) +- `delete` - (Default `1m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a keyspace using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a keyspace using the `name`. For example: + +```console +% terraform import aws_keyspaces_keyspace.example my_keyspace +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/keyspaces_table.html.markdown b/website/docs/cdktf/python/r/keyspaces_table.html.markdown new file mode 100644 index 00000000000..cae2b2fd6d9 --- /dev/null +++ b/website/docs/cdktf/python/r/keyspaces_table.html.markdown @@ -0,0 +1,153 @@ +--- +subcategory: "Keyspaces (for Apache Cassandra)" +layout: "aws" +page_title: "AWS: aws_keyspaces_table" +description: |- + Provides a Keyspaces Table. +--- + + + +# Resource: aws_keyspaces_table + +Provides a Keyspaces Table. + +More information about Keyspaces tables can be found in the [Keyspaces Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/working-with-tables.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.keyspaces_table import KeyspacesTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KeyspacesTable(self, "example", + keyspace_name=Token.as_string(aws_keyspaces_keyspace_example.name), + schema_definition=KeyspacesTableSchemaDefinition( + column=[KeyspacesTableSchemaDefinitionColumn( + name="Message", + type="ASCII" + ) + ], + partition_key=[KeyspacesTableSchemaDefinitionPartitionKey( + name="Message" + ) + ] + ), + table_name="my_table" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `keyspace_name` - (Required) The name of the keyspace that the table is going to be created in. +* `table_name` - (Required) The name of the table. + +The following arguments are optional: + +* `capacity_specification` - (Optional) Specifies the read/write throughput capacity mode for the table. +* `client_side_timestamps` - (Optional) Enables client-side timestamps for the table. By default, the setting is disabled. +* `comment` - (Optional) A description of the table. +* `default_time_to_live` - (Optional) The default Time to Live setting in seconds for the table. More information can be found in the [Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/TTL-how-it-works.html#ttl-howitworks_default_ttl). +* `encryption_specification` - (Optional) Specifies how the encryption key for encryption at rest is managed for the table. More information can be found in the [Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/EncryptionAtRest.html). +* `point_in_time_recovery` - (Optional) Specifies if point-in-time recovery is enabled or disabled for the table. More information can be found in the [Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/PointInTimeRecovery.html). +* `schema_definition` - (Optional) Describes the schema of the table. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `ttl` - (Optional) Enables Time to Live custom settings for the table. More information can be found in the [Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/TTL.html). + +The `capacity_specification` object takes the following arguments: + +* `read_capacity_units` - (Optional) The throughput capacity specified for read operations defined in read capacity units (RCUs). +* `throughput_mode` - (Optional) The read/write throughput capacity mode for a table. Valid values: `PAY_PER_REQUEST`, `PROVISIONED`. The default value is `PAY_PER_REQUEST`. +* `write_capacity_units` - (Optional) The throughput capacity specified for write operations defined in write capacity units (WCUs). + +The `client_side_timestamps` object takes the following arguments: + +* `status` - (Required) Shows how to enable client-side timestamps settings for the specified table. Valid values: `ENABLED`. + +The `comment` object takes the following arguments: + +* `message` - (Required) A description of the table. + +The `encryption_specification` object takes the following arguments: + +* `kms_key_identifier` - (Optional) The Amazon Resource Name (ARN) of the customer managed KMS key. +* `type` - (Optional) The encryption option specified for the table. Valid values: `AWS_OWNED_KMS_KEY`, `CUSTOMER_MANAGED_KMS_KEY`. The default value is `AWS_OWNED_KMS_KEY`. + +The `point_in_time_recovery` object takes the following arguments: + +* `status` - (Optional) Valid values: `ENABLED`, `DISABLED`. The default value is `DISABLED`. + +The `schema_definition` object takes the following arguments: + +* `column` - (Required) The regular columns of the table. +* `partition_key` - (Required) The columns that are part of the partition key of the table . +* `clustering_key` - (Required) The columns that are part of the clustering key of the table. +* `static_column` - (Required) The columns that have been defined as `STATIC`. Static columns store values that are shared by all rows in the same partition. + +The `column` object takes the following arguments: + +* `name` - (Required) The name of the column. +* `type` - (Required) The data type of the column. See the [Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/cql.elements.html#cql.data-types) for a list of available data types. + +The `partition_key` object takes the following arguments: + +* `name` - (Required) The name of the partition key column. + +The `clustering_key` object takes the following arguments: + +* `name` - (Required) The name of the clustering key column. +* `order_by` - (Required) The order modifier. Valid values: `ASC`, `DESC`. + +The `static_column` object takes the following arguments: + +* `name` - (Required) The name of the static column. + +The `ttl` object takes the following arguments: + +* `status` - (Optional) Valid values: `ENABLED`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the table. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `30m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a table using the `keyspace_name` and `table_name` separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a table using the `keyspace_name` and `table_name` separated by `/`. For example: + +```console +% terraform import aws_keyspaces_table.example my_keyspace/my_table +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kinesis_analytics_application.html.markdown b/website/docs/cdktf/python/r/kinesis_analytics_application.html.markdown new file mode 100644 index 00000000000..c6d990f3af4 --- /dev/null +++ b/website/docs/cdktf/python/r/kinesis_analytics_application.html.markdown @@ -0,0 +1,394 @@ +--- +subcategory: "Kinesis Analytics" +layout: "aws" +page_title: "AWS: aws_kinesis_analytics_application" +description: |- + Provides a AWS Kinesis Analytics Application +--- + + + +# Resource: aws_kinesis_analytics_application + +Provides a Kinesis Analytics Application resource. Kinesis Analytics is a managed service that +allows processing and analyzing streaming data using standard SQL. + +For more details, see the [Amazon Kinesis Analytics Documentation][1]. + +-> **Note:** To manage Amazon Kinesis Data Analytics for Apache Flink applications, use the [`aws_kinesisanalyticsv2_application`](/docs/providers/aws/r/kinesisanalyticsv2_application.html) resource. + +## Example Usage + +### Kinesis Stream Input + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesis_analytics_application import KinesisAnalyticsApplication +from imports.aws.kinesis_stream import KinesisStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_stream = KinesisStream(self, "test_stream", + name="terraform-kinesis-test", + shard_count=1 + ) + KinesisAnalyticsApplication(self, "test_application", + inputs=KinesisAnalyticsApplicationInputs( + kinesis_stream=KinesisAnalyticsApplicationInputsKinesisStream( + resource_arn=test_stream.arn, + role_arn=test.arn + ), + name_prefix="test_prefix", + parallelism=KinesisAnalyticsApplicationInputsParallelism( + count=1 + ), + schema=KinesisAnalyticsApplicationInputsSchema( + record_columns=[KinesisAnalyticsApplicationInputsSchemaRecordColumns( + mapping="$.test", + name="test", + sql_type="VARCHAR(8)" + ) + ], + record_encoding="UTF-8", + record_format=KinesisAnalyticsApplicationInputsSchemaRecordFormat( + mapping_parameters=KinesisAnalyticsApplicationInputsSchemaRecordFormatMappingParameters( + json=KinesisAnalyticsApplicationInputsSchemaRecordFormatMappingParametersJson( + record_row_path="$" + ) + ) + ) + ) + ), + name="kinesis-analytics-application-test" + ) +``` + +### Starting An Application + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.cloudwatch_log_stream import CloudwatchLogStream +from imports.aws.kinesis_analytics_application import KinesisAnalyticsApplication +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +from imports.aws.kinesis_stream import KinesisStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="analytics" + ) + aws_cloudwatch_log_stream_example = CloudwatchLogStream(self, "example_1", + log_group_name=example.name, + name="example-kinesis-application" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_stream_example.override_logical_id("example") + aws_kinesis_firehose_delivery_stream_example = + KinesisFirehoseDeliveryStream(self, "example_2", + destination="extended_s3", + extended_s3_configuration=KinesisFirehoseDeliveryStreamExtendedS3Configuration( + bucket_arn=Token.as_string(aws_s3_bucket_example.arn), + role_arn=Token.as_string(aws_iam_role_example.arn) + ), + name="example-kinesis-delivery-stream" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kinesis_firehose_delivery_stream_example.override_logical_id("example") + aws_kinesis_stream_example = KinesisStream(self, "example_3", + name="example-kinesis-stream", + shard_count=1 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kinesis_stream_example.override_logical_id("example") + KinesisAnalyticsApplication(self, "test", + cloudwatch_logging_options=KinesisAnalyticsApplicationCloudwatchLoggingOptions( + log_stream_arn=Token.as_string(aws_cloudwatch_log_stream_example.arn), + role_arn=Token.as_string(aws_iam_role_example.arn) + ), + inputs=KinesisAnalyticsApplicationInputs( + kinesis_stream=KinesisAnalyticsApplicationInputsKinesisStream( + resource_arn=Token.as_string(aws_kinesis_stream_example.arn), + role_arn=Token.as_string(aws_iam_role_example.arn) + ), + name_prefix="example_prefix", + schema=KinesisAnalyticsApplicationInputsSchema( + record_columns=[KinesisAnalyticsApplicationInputsSchemaRecordColumns( + name="COLUMN_1", + sql_type="INTEGER" + ) + ], + record_format=KinesisAnalyticsApplicationInputsSchemaRecordFormat( + mapping_parameters=KinesisAnalyticsApplicationInputsSchemaRecordFormatMappingParameters( + csv=KinesisAnalyticsApplicationInputsSchemaRecordFormatMappingParametersCsv( + record_column_delimiter=",", + record_row_delimiter="|" + ) + ) + ) + ), + starting_position_configuration=[KinesisAnalyticsApplicationInputsStartingPositionConfiguration( + starting_position="NOW" + ) + ] + ), + name="example-application", + outputs=[KinesisAnalyticsApplicationOutputs( + kinesis_firehose=KinesisAnalyticsApplicationOutputsKinesisFirehose( + resource_arn=Token.as_string(aws_kinesis_firehose_delivery_stream_example.arn), + role_arn=Token.as_string(aws_iam_role_example.arn) + ), + name="OUTPUT_1", + schema=KinesisAnalyticsApplicationOutputsSchema( + record_format_type="CSV" + ) + ) + ], + start_application=True + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the Kinesis Analytics Application. +* `code` - (Optional) SQL Code to transform input data, and generate output. +* `description` - (Optional) Description of the application. +* `cloudwatch_logging_options` - (Optional) The CloudWatch log stream options to monitor application errors. +See [CloudWatch Logging Options](#cloudwatch-logging-options) below for more details. +* `inputs` - (Optional) Input configuration of the application. See [Inputs](#inputs) below for more details. +* `outputs` - (Optional) Output destination configuration of the application. See [Outputs](#outputs) below for more details. +* `reference_data_sources` - (Optional) An S3 Reference Data Source for the application. +See [Reference Data Sources](#reference-data-sources) below for more details. +* `start_application` - (Optional) Whether to start or stop the Kinesis Analytics Application. To start an application, an input with a defined `starting_position` must be configured. +To modify an application's starting position, first stop the application by setting `start_application = false`, then update `starting_position` and set `start_application = true`. +* `tags` - Key-value map of tags for the Kinesis Analytics Application. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### CloudWatch Logging Options + +Configure a CloudWatch Log Stream to monitor application errors. + +The `cloudwatch_logging_options` block supports the following: + +* `log_stream_arn` - (Required) The ARN of the CloudWatch Log Stream. +* `role_arn` - (Required) The ARN of the IAM Role used to send application messages. + +### Inputs + +Configure an Input for the Kinesis Analytics Application. You can only have 1 Input configured. + +The `inputs` block supports the following: + +* `name_prefix` - (Required) The Name Prefix to use when creating an in-application stream. +* `schema` - (Required) The Schema format of the data in the streaming source. See [Source Schema](#source-schema) below for more details. +* `kinesis_firehose` - (Optional) The Kinesis Firehose configuration for the streaming source. Conflicts with `kinesis_stream`. +See [Kinesis Firehose](#kinesis-firehose) below for more details. +* `kinesis_stream` - (Optional) The Kinesis Stream configuration for the streaming source. Conflicts with `kinesis_firehose`. +See [Kinesis Stream](#kinesis-stream) below for more details. +* `parallelism` - (Optional) The number of Parallel in-application streams to create. +See [Parallelism](#parallelism) below for more details. +* `processing_configuration` - (Optional) The Processing Configuration to transform records as they are received from the stream. +See [Processing Configuration](#processing-configuration) below for more details. +* `starting_position_configuration` (Optional) The point at which the application starts processing records from the streaming source. +See [Starting Position Configuration](#starting-position-configuration) below for more details. + +### Outputs + +Configure Output destinations for the Kinesis Analytics Application. You can have a maximum of 3 destinations configured. + +The `outputs` block supports the following: + +* `name` - (Required) The Name of the in-application stream. +* `schema` - (Required) The Schema format of the data written to the destination. See [Destination Schema](#destination-schema) below for more details. +* `kinesis_firehose` - (Optional) The Kinesis Firehose configuration for the destination stream. Conflicts with `kinesis_stream`. +See [Kinesis Firehose](#kinesis-firehose) below for more details. +* `kinesis_stream` - (Optional) The Kinesis Stream configuration for the destination stream. Conflicts with `kinesis_firehose`. +See [Kinesis Stream](#kinesis-stream) below for more details. +* `lambda` - (Optional) The Lambda function destination. See [Lambda](#lambda) below for more details. + +### Reference Data Sources + +Add a Reference Data Source to the Kinesis Analytics Application. You can only have 1 Reference Data Source. + +The `reference_data_sources` block supports the following: + +* `schema` - (Required) The Schema format of the data in the streaming source. See [Source Schema](#source-schema) below for more details. +* `table_name` - (Required) The in-application Table Name. +* `s3` - (Optional) The S3 configuration for the reference data source. See [S3 Reference](#s3-reference) below for more details. + +#### Kinesis Firehose + +Configuration for a Kinesis Firehose delivery stream. + +The `kinesis_firehose` block supports the following: + +* `resource_arn` - (Required) The ARN of the Kinesis Firehose delivery stream. +* `role_arn` - (Required) The ARN of the IAM Role used to access the stream. + +#### Kinesis Stream + +Configuration for a Kinesis Stream. + +The `kinesis_stream` block supports the following: + +* `resource_arn` - (Required) The ARN of the Kinesis Stream. +* `role_arn` - (Required) The ARN of the IAM Role used to access the stream. + +#### Destination Schema + +The Schema format of the data in the destination. + +The `schema` block supports the following: + +* `record_format_type` - (Required) The Format Type of the records on the output stream. Can be `CSV` or `JSON`. + +#### Source Schema + +The Schema format of the data in the streaming source. + +The `schema` block supports the following: + +* `record_columns` - (Required) The Record Column mapping for the streaming source data element. +See [Record Columns](#record-columns) below for more details. +* `record_format` - (Required) The Record Format and mapping information to schematize a record. +See [Record Format](#record-format) below for more details. +* `record_encoding` - (Optional) The Encoding of the record in the streaming source. + +#### Parallelism + +Configures the number of Parallel in-application streams to create. + +The `parallelism` block supports the following: + +* `count` - (Required) The Count of streams. + +#### Processing Configuration + +The Processing Configuration to transform records as they are received from the stream. + +The `processing_configuration` block supports the following: + +* `lambda` - (Required) The Lambda function configuration. See [Lambda](#lambda) below for more details. + +#### Lambda + +The Lambda function that pre-processes records in the stream. + +The `lambda` block supports the following: + +* `resource_arn` - (Required) The ARN of the Lambda function. +* `role_arn` - (Required) The ARN of the IAM Role used to access the Lambda function. + +#### Starting Position Configuration + +The point at which the application reads from the streaming source. + +The `starting_position_configuration` block supports the following: + +* `starting_position` - (Required) The starting position on the stream. Valid values: `LAST_STOPPED_POINT`, `NOW`, `TRIM_HORIZON`. + +#### Record Columns + +The Column mapping of each data element in the streaming source to the corresponding column in the in-application stream. + +The `record_columns` block supports the following: + +* `name` - (Required) Name of the column. +* `sql_type` - (Required) The SQL Type of the column. +* `mapping` - (Optional) The Mapping reference to the data element. + +#### Record Format + +The Record Format and relevant mapping information that should be applied to schematize the records on the stream. + +The `record_format` block supports the following: + +* `record_format_type` - (Required) The type of Record Format. Can be `CSV` or `JSON`. +* `mapping_parameters` - (Optional) The Mapping Information for the record format. +See [Mapping Parameters](#mapping-parameters) below for more details. + +#### Mapping Parameters + +Provides Mapping information specific to the record format on the streaming source. + +The `mapping_parameters` block supports the following: + +* `csv` - (Optional) Mapping information when the record format uses delimiters. +See [CSV Mapping Parameters](#csv-mapping-parameters) below for more details. +* `json` - (Optional) Mapping information when JSON is the record format on the streaming source. +See [JSON Mapping Parameters](#json-mapping-parameters) below for more details. + +#### CSV Mapping Parameters + +Mapping information when the record format uses delimiters. + +The `csv` block supports the following: + +* `record_column_delimiter` - (Required) The Column Delimiter. +* `record_row_delimiter` - (Required) The Row Delimiter. + +#### JSON Mapping Parameters + +Mapping information when JSON is the record format on the streaming source. + +The `json` block supports the following: + +* `record_row_path` - (Required) Path to the top-level parent that contains the records. + +#### S3 Reference + +Identifies the S3 bucket and object that contains the reference data. + +The `s3` blcok supports the following: + +* `bucket_arn` - (Required) The S3 Bucket ARN. +* `file_key` - (Required) The File Key name containing reference data. +* `role_arn` - (Required) The IAM Role ARN to read the data. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the Kinesis Analytics Application. +* `arn` - The ARN of the Kinesis Analytics Appliation. +* `create_timestamp` - The Timestamp when the application version was created. +* `last_update_timestamp` - The Timestamp when the application was last updated. +* `status` - The Status of the application. +* `version` - The Version of the application. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[1]: https://docs.aws.amazon.com/kinesisanalytics/latest/dev/what-is.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kinesis Analytics Application using ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Kinesis Analytics Application using ARN. For example: + +```console +% terraform import aws_kinesis_analytics_application.example arn:aws:kinesisanalytics:us-west-2:1234567890:application/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kinesis_firehose_delivery_stream.html.markdown b/website/docs/cdktf/python/r/kinesis_firehose_delivery_stream.html.markdown new file mode 100644 index 00000000000..b75397e4faf --- /dev/null +++ b/website/docs/cdktf/python/r/kinesis_firehose_delivery_stream.html.markdown @@ -0,0 +1,903 @@ +--- +subcategory: "Kinesis Firehose" +layout: "aws" +page_title: "AWS: aws_kinesis_firehose_delivery_stream" +description: |- + Provides a AWS Kinesis Firehose Delivery Stream +--- + + + +# Resource: aws_kinesis_firehose_delivery_stream + +Provides a Kinesis Firehose Delivery Stream resource. Amazon Kinesis Firehose is a fully managed, elastic service to easily deliver real-time data streams to destinations such as Amazon S3 and Amazon Redshift. + +For more details, see the [Amazon Kinesis Firehose Documentation][1]. + +## Example Usage + +### Extended S3 Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +from imports.aws.lambda_function import LambdaFunction +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bucket = S3Bucket(self, "bucket", + bucket="tf-test-bucket" + ) + S3BucketAcl(self, "bucket_acl", + acl="private", + bucket=bucket.id + ) + firehose_assume_role = DataAwsIamPolicyDocument(self, "firehose_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["firehose.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + lambda_assume_role = DataAwsIamPolicyDocument(self, "lambda_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["lambda.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + firehose_role = IamRole(self, "firehose_role", + assume_role_policy=Token.as_string(firehose_assume_role.json), + name="firehose_test_role" + ) + lambda_iam = IamRole(self, "lambda_iam", + assume_role_policy=Token.as_string(lambda_assume_role.json), + name="lambda_iam" + ) + lambda_processor = LambdaFunction(self, "lambda_processor", + filename="lambda.zip", + function_name="firehose_lambda_processor", + handler="exports.handler", + role=lambda_iam.arn, + runtime="nodejs16.x" + ) + KinesisFirehoseDeliveryStream(self, "extended_s3_stream", + destination="extended_s3", + extended_s3_configuration=KinesisFirehoseDeliveryStreamExtendedS3Configuration( + bucket_arn=bucket.arn, + processing_configuration=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfiguration( + enabled=Token.as_boolean("true"), + processors=[KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfigurationProcessors( + parameters=[KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfigurationProcessorsParameters( + parameter_name="LambdaArn", + parameter_value="${" + lambda_processor.arn + "}:$LATEST" + ) + ], + type="Lambda" + ) + ] + ), + role_arn=firehose_role.arn + ), + name="terraform-kinesis-firehose-extended-s3-test-stream" + ) +``` + +### Extended S3 Destination with dynamic partitioning + +These examples use built-in Firehose functionality, rather than requiring a lambda. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KinesisFirehoseDeliveryStream(self, "extended_s3_stream", + destination="extended_s3", + extended_s3_configuration=KinesisFirehoseDeliveryStreamExtendedS3Configuration( + bucket_arn=bucket.arn, + buffering_size=64, + dynamic_partitioning_configuration=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationDynamicPartitioningConfiguration( + enabled=Token.as_boolean("true") + ), + error_output_prefix="errors/year=!{timestamp:yyyy}/month=!{timestamp:MM}/day=!{timestamp:dd}/hour=!{timestamp:HH}/!{firehose:error-output-type}/", + prefix="data/customer_id=!{partitionKeyFromQuery:customer_id}/year=!{timestamp:yyyy}/month=!{timestamp:MM}/day=!{timestamp:dd}/hour=!{timestamp:HH}/", + processing_configuration=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfiguration( + enabled=Token.as_boolean("true"), + processors=[KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfigurationProcessors( + parameters=[KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfigurationProcessorsParameters( + parameter_name="SubRecordType", + parameter_value="JSON" + ) + ], + type="RecordDeAggregation" + ), KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfigurationProcessors( + type="AppendDelimiterToRecord" + ), KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfigurationProcessors( + parameters=[KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfigurationProcessorsParameters( + parameter_name="JsonParsingEngine", + parameter_value="JQ-1.6" + ), KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfigurationProcessorsParameters( + parameter_name="MetadataExtractionQuery", + parameter_value="{customer_id:.customer_id}" + ) + ], + type="MetadataExtraction" + ) + ] + ), + role_arn=firehose_role.arn + ), + name="terraform-kinesis-firehose-extended-s3-test-stream" + ) +``` + +Multiple Dynamic Partitioning Keys (maximum of 50) can be added by comma separating the `parameter_value`. + +The following example adds the Dynamic Partitioning Keys: `store_id` and `customer_id` to the S3 prefix. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KinesisFirehoseDeliveryStream(self, "extended_s3_stream", + destination="extended_s3", + extended_s3_configuration=KinesisFirehoseDeliveryStreamExtendedS3Configuration( + bucket_arn=bucket.arn, + buffering_size=64, + dynamic_partitioning_configuration=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationDynamicPartitioningConfiguration( + enabled=Token.as_boolean("true") + ), + error_output_prefix="errors/year=!{timestamp:yyyy}/month=!{timestamp:MM}/day=!{timestamp:dd}/hour=!{timestamp:HH}/!{firehose:error-output-type}/", + prefix="data/store_id=!{partitionKeyFromQuery:store_id}/customer_id=!{partitionKeyFromQuery:customer_id}/year=!{timestamp:yyyy}/month=!{timestamp:MM}/day=!{timestamp:dd}/hour=!{timestamp:HH}/", + processing_configuration=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfiguration( + enabled=Token.as_boolean("true"), + processors=[KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfigurationProcessors( + parameters=[KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfigurationProcessorsParameters( + parameter_name="JsonParsingEngine", + parameter_value="JQ-1.6" + ), KinesisFirehoseDeliveryStreamExtendedS3ConfigurationProcessingConfigurationProcessorsParameters( + parameter_name="MetadataExtractionQuery", + parameter_value="{store_id:.store_id,customer_id:.customer_id}" + ) + ], + type="MetadataExtraction" + ) + ] + ), + role_arn=firehose_role.arn + ), + name="terraform-kinesis-firehose-extended-s3-test-stream" + ) +``` + +### Redshift Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +from imports.aws.redshift_cluster import RedshiftCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_cluster = RedshiftCluster(self, "test_cluster", + cluster_identifier="tf-redshift-cluster", + cluster_type="single-node", + database_name="test", + master_password="T3stPass", + master_username="testuser", + node_type="dc1.large" + ) + KinesisFirehoseDeliveryStream(self, "test_stream", + destination="redshift", + name="terraform-kinesis-firehose-test-stream", + redshift_configuration=KinesisFirehoseDeliveryStreamRedshiftConfiguration( + cluster_jdbcurl="jdbc:redshift://${" + test_cluster.endpoint + "}/${" + test_cluster.database_name + "}", + copy_options="delimiter '|'", + data_table_columns="test-col", + data_table_name="test-table", + password="T3stPass", + role_arn=firehose_role.arn, + s3_backup_configuration=KinesisFirehoseDeliveryStreamRedshiftConfigurationS3BackupConfiguration( + bucket_arn=bucket.arn, + buffering_interval=300, + buffering_size=15, + compression_format="GZIP", + role_arn=firehose_role.arn + ), + s3_backup_mode="Enabled", + s3_configuration=KinesisFirehoseDeliveryStreamRedshiftConfigurationS3Configuration( + bucket_arn=bucket.arn, + buffering_interval=400, + buffering_size=10, + compression_format="GZIP", + role_arn=firehose_role.arn + ), + username="testuser" + ) + ) +``` + +### Elasticsearch Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elasticsearch_domain import ElasticsearchDomain +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_cluster = ElasticsearchDomain(self, "test_cluster", + domain_name="firehose-es-test" + ) + KinesisFirehoseDeliveryStream(self, "test_stream", + destination="elasticsearch", + elasticsearch_configuration=KinesisFirehoseDeliveryStreamElasticsearchConfiguration( + domain_arn=test_cluster.arn, + index_name="test", + processing_configuration=KinesisFirehoseDeliveryStreamElasticsearchConfigurationProcessingConfiguration( + enabled=Token.as_boolean("true"), + processors=[KinesisFirehoseDeliveryStreamElasticsearchConfigurationProcessingConfigurationProcessors( + parameters=[KinesisFirehoseDeliveryStreamElasticsearchConfigurationProcessingConfigurationProcessorsParameters( + parameter_name="LambdaArn", + parameter_value="${" + lambda_processor.arn + "}:$LATEST" + ) + ], + type="Lambda" + ) + ] + ), + role_arn=firehose_role.arn, + s3_configuration=KinesisFirehoseDeliveryStreamElasticsearchConfigurationS3Configuration( + bucket_arn=bucket.arn, + buffering_interval=400, + buffering_size=10, + compression_format="GZIP", + role_arn=firehose_role.arn + ), + type_name="test" + ), + name="terraform-kinesis-firehose-test-stream" + ) +``` + +### Elasticsearch Destination With VPC + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.elasticsearch_domain import ElasticsearchDomain +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_cluster = ElasticsearchDomain(self, "test_cluster", + cluster_config=ElasticsearchDomainClusterConfig( + instance_count=2, + instance_type="t2.small.elasticsearch", + zone_awareness_enabled=True + ), + domain_name="es-test", + ebs_options=ElasticsearchDomainEbsOptions( + ebs_enabled=True, + volume_size=10 + ), + vpc_options=ElasticsearchDomainVpcOptions( + security_group_ids=[first.id], + subnet_ids=[Token.as_string(aws_subnet_first.id), second.id] + ) + ) + firehose_elasticsearch = DataAwsIamPolicyDocument(self, "firehose-elasticsearch", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["es:*"], + effect="Allow", + resources=[test_cluster.arn, "${" + test_cluster.arn + "}/*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["ec2:DescribeVpcs", "ec2:DescribeVpcAttribute", "ec2:DescribeSubnets", "ec2:DescribeSecurityGroups", "ec2:DescribeNetworkInterfaces", "ec2:CreateNetworkInterface", "ec2:CreateNetworkInterfacePermission", "ec2:DeleteNetworkInterface" + ], + effect="Allow", + resources=["*"] + ) + ] + ) + aws_iam_role_policy_firehose_elasticsearch = IamRolePolicy(self, "firehose-elasticsearch_2", + name="elasticsearch", + policy=Token.as_string(firehose_elasticsearch.json), + role=firehose.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_firehose_elasticsearch.override_logical_id("firehose-elasticsearch") + KinesisFirehoseDeliveryStream(self, "test", + depends_on=[aws_iam_role_policy_firehose_elasticsearch], + destination="elasticsearch", + elasticsearch_configuration=KinesisFirehoseDeliveryStreamElasticsearchConfiguration( + domain_arn=test_cluster.arn, + index_name="test", + role_arn=firehose.arn, + s3_configuration=KinesisFirehoseDeliveryStreamElasticsearchConfigurationS3Configuration( + bucket_arn=bucket.arn, + role_arn=firehose.arn + ), + type_name="test", + vpc_config=KinesisFirehoseDeliveryStreamElasticsearchConfigurationVpcConfig( + role_arn=firehose.arn, + security_group_ids=[first.id], + subnet_ids=[Token.as_string(aws_subnet_first.id), second.id] + ) + ), + name="terraform-kinesis-firehose-es" + ) +``` + +### Opensearch Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +from imports.aws.opensearch_domain import OpensearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_cluster = OpensearchDomain(self, "test_cluster", + domain_name="firehose-os-test" + ) + KinesisFirehoseDeliveryStream(self, "test_stream", + destination="opensearch", + name="terraform-kinesis-firehose-test-stream", + opensearch_configuration=KinesisFirehoseDeliveryStreamOpensearchConfiguration( + domain_arn=test_cluster.arn, + index_name="test", + processing_configuration=KinesisFirehoseDeliveryStreamOpensearchConfigurationProcessingConfiguration( + enabled=Token.as_boolean("true"), + processors=[KinesisFirehoseDeliveryStreamOpensearchConfigurationProcessingConfigurationProcessors( + parameters=[KinesisFirehoseDeliveryStreamOpensearchConfigurationProcessingConfigurationProcessorsParameters( + parameter_name="LambdaArn", + parameter_value="${" + lambda_processor.arn + "}:$LATEST" + ) + ], + type="Lambda" + ) + ] + ), + role_arn=firehose_role.arn, + s3_configuration=KinesisFirehoseDeliveryStreamOpensearchConfigurationS3Configuration( + bucket_arn=bucket.arn, + buffering_interval=400, + buffering_size=10, + compression_format="GZIP", + role_arn=firehose_role.arn + ) + ) + ) +``` + +### Opensearch Destination With VPC + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +from imports.aws.opensearch_domain import OpensearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_cluster = OpensearchDomain(self, "test_cluster", + cluster_config=OpensearchDomainClusterConfig( + instance_count=2, + instance_type="m4.large.search", + zone_awareness_enabled=True + ), + domain_name="es-test", + ebs_options=OpensearchDomainEbsOptions( + ebs_enabled=True, + volume_size=10 + ), + vpc_options=OpensearchDomainVpcOptions( + security_group_ids=[first.id], + subnet_ids=[Token.as_string(aws_subnet_first.id), second.id] + ) + ) + firehose_opensearch = IamRolePolicy(self, "firehose-opensearch", + name="opensearch", + policy="{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Effect\": \"Allow\",\n \"Action\": [\n \"es:*\"\n ],\n \"Resource\": [\n \"${" + test_cluster.arn + "}\",\n \"${" + test_cluster.arn + "}/*\"\n ]\n },\n {\n \"Effect\": \"Allow\",\n \"Action\": [\n \"ec2:DescribeVpcs\",\n \"ec2:DescribeVpcAttribute\",\n \"ec2:DescribeSubnets\",\n \"ec2:DescribeSecurityGroups\",\n \"ec2:DescribeNetworkInterfaces\",\n \"ec2:CreateNetworkInterface\",\n \"ec2:CreateNetworkInterfacePermission\",\n \"ec2:DeleteNetworkInterface\"\n ],\n \"Resource\": [\n \"*\"\n ]\n }\n ]\n}\n\n", + role=firehose.id + ) + KinesisFirehoseDeliveryStream(self, "test", + depends_on=[firehose_opensearch], + destination="opensearch", + name="terraform-kinesis-firehose-os", + opensearch_configuration=KinesisFirehoseDeliveryStreamOpensearchConfiguration( + domain_arn=test_cluster.arn, + index_name="test", + role_arn=firehose.arn, + s3_configuration=KinesisFirehoseDeliveryStreamOpensearchConfigurationS3Configuration( + bucket_arn=bucket.arn, + role_arn=firehose.arn + ), + vpc_config=KinesisFirehoseDeliveryStreamOpensearchConfigurationVpcConfig( + role_arn=firehose.arn, + security_group_ids=[first.id], + subnet_ids=[Token.as_string(aws_subnet_first.id), second.id] + ) + ) + ) +``` + +### Splunk Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KinesisFirehoseDeliveryStream(self, "test_stream", + destination="splunk", + name="terraform-kinesis-firehose-test-stream", + splunk_configuration=KinesisFirehoseDeliveryStreamSplunkConfiguration( + hec_acknowledgment_timeout=600, + hec_endpoint="https://http-inputs-mydomain.splunkcloud.com:443", + hec_endpoint_type="Event", + hec_token="51D4DA16-C61B-4F5F-8EC7-ED4301342A4A", + s3_backup_mode="FailedEventsOnly", + s3_configuration=KinesisFirehoseDeliveryStreamSplunkConfigurationS3Configuration( + bucket_arn=bucket.arn, + buffering_interval=400, + buffering_size=10, + compression_format="GZIP", + role_arn=firehose.arn + ) + ) + ) +``` + +### HTTP Endpoint (e.g., New Relic) Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KinesisFirehoseDeliveryStream(self, "test_stream", + destination="http_endpoint", + http_endpoint_configuration=KinesisFirehoseDeliveryStreamHttpEndpointConfiguration( + access_key="my-key", + buffering_interval=600, + buffering_size=15, + name="New Relic", + request_configuration=KinesisFirehoseDeliveryStreamHttpEndpointConfigurationRequestConfiguration( + common_attributes=[KinesisFirehoseDeliveryStreamHttpEndpointConfigurationRequestConfigurationCommonAttributes( + name="testname", + value="testvalue" + ), KinesisFirehoseDeliveryStreamHttpEndpointConfigurationRequestConfigurationCommonAttributes( + name="testname2", + value="testvalue2" + ) + ], + content_encoding="GZIP" + ), + role_arn=firehose.arn, + s3_backup_mode="FailedDataOnly", + s3_configuration=KinesisFirehoseDeliveryStreamHttpEndpointConfigurationS3Configuration( + bucket_arn=bucket.arn, + buffering_interval=400, + buffering_size=10, + compression_format="GZIP", + role_arn=firehose.arn + ), + url="https://aws-api.newrelic.com/firehose/v1" + ), + name="terraform-kinesis-firehose-test-stream" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name to identify the stream. This is unique to the AWS account and region the Stream is created in. When using for WAF logging, name must be prefixed with `aws-waf-logs-`. See [AWS Documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-policies.html#waf-policies-logging-config) for more details. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `kinesis_source_configuration` - (Optional) Allows the ability to specify the kinesis stream that is used as the source of the firehose delivery stream. +* `server_side_encryption` - (Optional) Encrypt at rest options. +Server-side encryption should not be enabled when a kinesis stream is configured as the source of the firehose delivery stream. +* `destination` – (Required) This is the destination to where the data is delivered. The only options are `s3` (Deprecated, use `extended_s3` instead), `extended_s3`, `redshift`, `elasticsearch`, `splunk`, `http_endpoint` and `opensearch`. +is redshift). More details are given below. +* `extended_s3_configuration` - (Optional, only Required when `destination` is `extended_s3`) Enhanced configuration options for the s3 destination. More details are given below. +* `redshift_configuration` - (Optional) Configuration options if redshift is the destination. +Using `redshift_configuration` requires the user to also specify a +`s3_configuration` block. More details are given below. +* `elasticsearch_configuration` - (Optional) Configuration options if elasticsearch is the destination. More details are given below. +* `opensearch_configuration` - (Optional) Configuration options if opensearch is the destination. More details are given below. +* `splunk_configuration` - (Optional) Configuration options if splunk is the destination. More details are given below. +* `http_endpoint_configuration` - (Optional) Configuration options if http_endpoint is the destination. requires the user to also specify a `s3_configuration` block. More details are given below. + +The `kinesis_source_configuration` object supports the following: + +* `kinesis_stream_arn` (Required) The kinesis stream used as the source of the firehose delivery stream. +* `role_arn` (Required) The ARN of the role that provides access to the source Kinesis stream. + +The `server_side_encryption` object supports the following: + +* `enabled` - (Optional) Whether to enable encryption at rest. Default is `false`. +* `key_type`- (Optional) Type of encryption key. Default is `AWS_OWNED_CMK`. Valid values are `AWS_OWNED_CMK` and `CUSTOMER_MANAGED_CMK` +* `key_arn` - (Optional) Amazon Resource Name (ARN) of the encryption key. Required when `key_type` is `CUSTOMER_MANAGED_CMK`. + +The `extended_s3_configuration` object supports the same fields from [s3_configuration](#s3-configuration) as well as the following: + +* `data_format_conversion_configuration` - (Optional) Nested argument for the serializer, deserializer, and schema for converting data from the JSON format to the Parquet or ORC format before writing it to Amazon S3. More details given below. +* `processing_configuration` - (Optional) The data processing configuration. More details are given below. +* `s3_backup_mode` - (Optional) The Amazon S3 backup mode. Valid values are `Disabled` and `Enabled`. Default value is `Disabled`. +* `s3_backup_configuration` - (Optional) The configuration for backup in Amazon S3. Required if `s3_backup_mode` is `Enabled`. Supports the same fields as `s3_configuration` object. +* `dynamic_partitioning_configuration` - (Optional) The configuration for dynamic partitioning. See [Dynamic Partitioning Configuration](#dynamic_partitioning_configuration) below for more details. Required when using dynamic partitioning. + +The `redshift_configuration` object supports the following: + +* `cluster_jdbcurl` - (Required) The jdbcurl of the redshift cluster. +* `username` - (Required) The username that the firehose delivery stream will assume. It is strongly recommended that the username and password provided is used exclusively for Amazon Kinesis Firehose purposes, and that the permissions for the account are restricted for Amazon Redshift INSERT permissions. +* `password` - (Required) The password for the username above. +* `retry_duration` - (Optional) The length of time during which Firehose retries delivery after a failure, starting from the initial request and including the first attempt. The default value is 3600 seconds (60 minutes). Firehose does not retry if the value of DurationInSeconds is 0 (zero) or if the first delivery attempt takes longer than the current value. +* `role_arn` - (Required) The arn of the role the stream assumes. +* `s3_configuration` - (Required) The S3 Configuration. See [s3_configuration](#s3-configuration) for more details. +* `s3_backup_mode` - (Optional) The Amazon S3 backup mode. Valid values are `Disabled` and `Enabled`. Default value is `Disabled`. +* `s3_backup_configuration` - (Optional) The configuration for backup in Amazon S3. Required if `s3_backup_mode` is `Enabled`. Supports the same fields as `s3_configuration` object. +* `data_table_name` - (Required) The name of the table in the redshift cluster that the s3 bucket will copy to. +* `copy_options` - (Optional) Copy options for copying the data from the s3 intermediate bucket into redshift, for example to change the default delimiter. For valid values, see the [AWS documentation](http://docs.aws.amazon.com/firehose/latest/APIReference/API_CopyCommand.html) +* `data_table_columns` - (Optional) The data table columns that will be targeted by the copy command. +* `cloudwatch_logging_options` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below +* `processing_configuration` - (Optional) The data processing configuration. More details are given below. + +The `elasticsearch_configuration` object supports the following: + +* `buffering_interval` - (Optional) Buffer incoming data for the specified period of time, in seconds between 60 to 900, before delivering it to the destination. The default value is 300s. +* `buffering_size` - (Optional) Buffer incoming data to the specified size, in MBs between 1 to 100, before delivering it to the destination. The default value is 5MB. +* `domain_arn` - (Optional) The ARN of the Amazon ES domain. The pattern needs to be `arn:.*`. Conflicts with `cluster_endpoint`. +* `cluster_endpoint` - (Optional) The endpoint to use when communicating with the cluster. Conflicts with `domain_arn`. +* `index_name` - (Required) The Elasticsearch index name. +* `index_rotation_period` - (Optional) The Elasticsearch index rotation period. Index rotation appends a timestamp to the IndexName to facilitate expiration of old data. Valid values are `NoRotation`, `OneHour`, `OneDay`, `OneWeek`, and `OneMonth`. The default value is `OneDay`. +* `retry_duration` - (Optional) After an initial failure to deliver to Amazon Elasticsearch, the total amount of time, in seconds between 0 to 7200, during which Firehose re-attempts delivery (including the first attempt). After this time has elapsed, the failed documents are written to Amazon S3. The default value is 300s. There will be no retry if the value is 0. +* `role_arn` - (Required) The ARN of the IAM role to be assumed by Firehose for calling the Amazon ES Configuration API and for indexing documents. The IAM role must have permission for `DescribeElasticsearchDomain`, `DescribeElasticsearchDomains`, and `DescribeElasticsearchDomainConfig`. The pattern needs to be `arn:.*`. +* `s3_configuration` - (Required) The S3 Configuration. See [s3_configuration](#s3-configuration) for more details. +* `s3_backup_mode` - (Optional) Defines how documents should be delivered to Amazon S3. Valid values are `FailedDocumentsOnly` and `AllDocuments`. Default value is `FailedDocumentsOnly`. +* `type_name` - (Optional) The Elasticsearch type name with maximum length of 100 characters. +* `cloudwatch_logging_options` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below +* `vpc_config` - (Optional) The VPC configuration for the delivery stream to connect to Elastic Search associated with the VPC. More details are given below +* `processing_configuration` - (Optional) The data processing configuration. More details are given below. + +The `opensearch_configuration` object supports the following: + +* `buffering_interval` - (Optional) Buffer incoming data for the specified period of time, in seconds between 60 to 900, before delivering it to the destination. The default value is 300s. +* `buffering_size` - (Optional) Buffer incoming data to the specified size, in MBs between 1 to 100, before delivering it to the destination. The default value is 5MB. +* `domain_arn` - (Optional) The ARN of the Amazon ES domain. The pattern needs to be `arn:.*`. Conflicts with `cluster_endpoint`. +* `cluster_endpoint` - (Optional) The endpoint to use when communicating with the cluster. Conflicts with `domain_arn`. +* `index_name` - (Required) The Opensearch index name. +* `index_rotation_period` - (Optional) The Opensearch index rotation period. Index rotation appends a timestamp to the IndexName to facilitate expiration of old data. Valid values are `NoRotation`, `OneHour`, `OneDay`, `OneWeek`, and `OneMonth`. The default value is `OneDay`. +* `retry_duration` - (Optional) After an initial failure to deliver to Amazon OpenSearch, the total amount of time, in seconds between 0 to 7200, during which Firehose re-attempts delivery (including the first attempt). After this time has elapsed, the failed documents are written to Amazon S3. The default value is 300s. There will be no retry if the value is 0. +* `role_arn` - (Required) The ARN of the IAM role to be assumed by Firehose for calling the Amazon ES Configuration API and for indexing documents. The IAM role must have permission for `DescribeDomain`, `DescribeDomains`, and `DescribeDomainConfig`. The pattern needs to be `arn:.*`. +* `s3_configuration` - (Required) The S3 Configuration. See [s3_configuration](#s3-configuration) for more details. +* `s3_backup_mode` - (Optional) Defines how documents should be delivered to Amazon S3. Valid values are `FailedDocumentsOnly` and `AllDocuments`. Default value is `FailedDocumentsOnly`. +* `type_name` - (Optional) The Elasticsearch type name with maximum length of 100 characters. Types are deprecated in OpenSearch_1.1. TypeName must be empty. +* `cloudwatch_logging_options` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below +* `vpc_config` - (Optional) The VPC configuration for the delivery stream to connect to OpenSearch associated with the VPC. More details are given below +* `processing_configuration` - (Optional) The data processing configuration. More details are given below. + +The `splunk_configuration` objects supports the following: + +* `hec_acknowledgment_timeout` - (Optional) The amount of time, in seconds between 180 and 600, that Kinesis Firehose waits to receive an acknowledgment from Splunk after it sends it data. +* `hec_endpoint` - (Required) The HTTP Event Collector (HEC) endpoint to which Kinesis Firehose sends your data. +* `hec_endpoint_type` - (Optional) The HEC endpoint type. Valid values are `Raw` or `Event`. The default value is `Raw`. +* `hec_token` - (Required) The GUID that you obtain from your Splunk cluster when you create a new HEC endpoint. +* `s3_configuration` - (Required) The S3 Configuration. See [s3_configuration](#s3-configuration) for more details. +* `s3_backup_mode` - (Optional) Defines how documents should be delivered to Amazon S3. Valid values are `FailedEventsOnly` and `AllEvents`. Default value is `FailedEventsOnly`. +* `retry_duration` - (Optional) After an initial failure to deliver to Splunk, the total amount of time, in seconds between 0 to 7200, during which Firehose re-attempts delivery (including the first attempt). After this time has elapsed, the failed documents are written to Amazon S3. The default value is 300s. There will be no retry if the value is 0. +* `cloudwatch_logging_options` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below. +* `processing_configuration` - (Optional) The data processing configuration. More details are given below. + +The `http_endpoint_configuration` objects supports the following: + +* `url` - (Required) The HTTP endpoint URL to which Kinesis Firehose sends your data. +* `name` - (Optional) The HTTP endpoint name. +* `access_key` - (Optional) The access key required for Kinesis Firehose to authenticate with the HTTP endpoint selected as the destination. +* `role_arn` - (Required) Kinesis Data Firehose uses this IAM role for all the permissions that the delivery stream needs. The pattern needs to be `arn:.*`. +* `s3_configuration` - (Required) The S3 Configuration. See [s3_configuration](#s3-configuration) for more details. +* `s3_backup_mode` - (Optional) Defines how documents should be delivered to Amazon S3. Valid values are `FailedDataOnly` and `AllData`. Default value is `FailedDataOnly`. +* `buffering_size` - (Optional) Buffer incoming data to the specified size, in MBs, before delivering it to the destination. The default value is 5. +* `buffering_interval` - (Optional) Buffer incoming data for the specified period of time, in seconds, before delivering it to the destination. The default value is 300 (5 minutes). +* `cloudwatch_logging_options` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below. +* `processing_configuration` - (Optional) The data processing configuration. More details are given below. +* `request_configuration` - (Optional) The request configuration. More details are given below. +* `retry_duration` - (Optional) Total amount of seconds Firehose spends on retries. This duration starts after the initial attempt fails, It does not include the time periods during which Firehose waits for acknowledgment from the specified destination after each attempt. Valid values between `0` and `7200`. Default is `300`. + +The `cloudwatch_logging_options` object supports the following: + +* `enabled` - (Optional) Enables or disables the logging. Defaults to `false`. +* `log_group_name` - (Optional) The CloudWatch group name for logging. This value is required if `enabled` is true. +* `log_stream_name` - (Optional) The CloudWatch log stream name for logging. This value is required if `enabled` is true. + +The `processing_configuration` object supports the following: + +* `enabled` - (Optional) Enables or disables data processing. +* `processors` - (Optional) Array of data processors. More details are given below + +The `processors` array objects support the following: + +* `type` - (Required) The type of processor. Valid Values: `RecordDeAggregation`, `Lambda`, `MetadataExtraction`, `AppendDelimiterToRecord`. Validation is done against [AWS SDK constants](https://docs.aws.amazon.com/sdk-for-go/api/service/firehose/#pkg-constants); so that values not explicitly listed may also work. +* `parameters` - (Optional) Array of processor parameters. More details are given below + +The `parameters` array objects support the following: + +* `parameter_name` - (Required) Parameter name. Valid Values: `LambdaArn`, `NumberOfRetries`, `MetadataExtractionQuery`, `JsonParsingEngine`, `RoleArn`, `BufferSizeInMBs`, `BufferIntervalInSeconds`, `SubRecordType`, `Delimiter`. Validation is done against [AWS SDK constants](https://docs.aws.amazon.com/sdk-for-go/api/service/firehose/#pkg-constants); so that values not explicitly listed may also work. +* `parameter_value` - (Required) Parameter value. Must be between 1 and 512 length (inclusive). When providing a Lambda ARN, you should specify the resource version as well. + +~> **NOTE:** Parameters with default values, including `NumberOfRetries`(default: 3), `RoleArn`(default: firehose role ARN), `BufferSizeInMBs`(default: 3), and `BufferIntervalInSeconds`(default: 60), are not stored in terraform state. To prevent perpetual differences, it is therefore recommended to only include parameters with non-default values. + +The `request_configuration` object supports the following: + +* `content_encoding` - (Optional) Kinesis Data Firehose uses the content encoding to compress the body of a request before sending the request to the destination. Valid values are `NONE` and `GZIP`. Default value is `NONE`. +* `common_attributes` - (Optional) Describes the metadata sent to the HTTP endpoint destination. More details are given below + +The `common_attributes` array objects support the following: + +* `name` - (Required) The name of the HTTP endpoint common attribute. +* `value` - (Required) The value of the HTTP endpoint common attribute. + +The `vpc_config` object supports the following: + +* `subnet_ids` - (Required) A list of subnet IDs to associate with Kinesis Firehose. +* `security_group_ids` - (Required) A list of security group IDs to associate with Kinesis Firehose. +* `role_arn` - (Required) The ARN of the IAM role to be assumed by Firehose for calling the Amazon EC2 configuration API and for creating network interfaces. Make sure role has necessary [IAM permissions](https://docs.aws.amazon.com/firehose/latest/dev/controlling-access.html#using-iam-es-vpc) + +### data_format_conversion_configuration + +~> **NOTE:** Once configured, the data format conversion configuration can only be disabled, in which the configuration values will remain, but will not be active. It is not currently possible to completely remove the configuration without recreating the resource. + +Example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, bucketArn, roleArn, destination, name): + super().__init__(scope, name) + KinesisFirehoseDeliveryStream(self, "example", + extended_s3_configuration=KinesisFirehoseDeliveryStreamExtendedS3Configuration( + buffering_size=128, + data_format_conversion_configuration=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationDataFormatConversionConfiguration( + input_format_configuration=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationDataFormatConversionConfigurationInputFormatConfiguration( + deserializer=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationDataFormatConversionConfigurationInputFormatConfigurationDeserializer( + hive_json_ser_de=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationDataFormatConversionConfigurationInputFormatConfigurationDeserializerHiveJsonSerDe() + ) + ), + output_format_configuration=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationDataFormatConversionConfigurationOutputFormatConfiguration( + serializer=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationDataFormatConversionConfigurationOutputFormatConfigurationSerializer( + orc_ser_de=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationDataFormatConversionConfigurationOutputFormatConfigurationSerializerOrcSerDe() + ) + ), + schema_configuration=KinesisFirehoseDeliveryStreamExtendedS3ConfigurationDataFormatConversionConfigurationSchemaConfiguration( + database_name=Token.as_string(aws_glue_catalog_table_example.database_name), + role_arn=Token.as_string(aws_iam_role_example.arn), + table_name=Token.as_string(aws_glue_catalog_table_example.name) + ) + ), + bucket_arn=bucket_arn, + role_arn=role_arn + ), + destination=destination, + name=name + ) +``` + +* `input_format_configuration` - (Required) Nested argument that specifies the deserializer that you want Kinesis Data Firehose to use to convert the format of your data from JSON. More details below. +* `output_format_configuration` - (Required) Nested argument that specifies the serializer that you want Kinesis Data Firehose to use to convert the format of your data to the Parquet or ORC format. More details below. +* `schema_configuration` - (Required) Nested argument that specifies the AWS Glue Data Catalog table that contains the column information. More details below. +* `enabled` - (Optional) Defaults to `true`. Set it to `false` if you want to disable format conversion while preserving the configuration details. + +#### S3 Configuration + +* `role_arn` - (Required) The ARN of the AWS credentials. +* `bucket_arn` - (Required) The ARN of the S3 bucket +* `prefix` - (Optional) The "YYYY/MM/DD/HH" time format prefix is automatically used for delivered S3 files. You can specify an extra prefix to be added in front of the time format prefix. Note that if the prefix ends with a slash, it appears as a folder in the S3 bucket +* `buffering_size` - (Optional) Buffer incoming data to the specified size, in MBs, before delivering it to the destination. The default value is 5. + We recommend setting SizeInMBs to a value greater than the amount of data you typically ingest into the delivery stream in 10 seconds. For example, if you typically ingest data at 1 MB/sec set SizeInMBs to be 10 MB or higher. +* `buffering_interval` - (Optional) Buffer incoming data for the specified period of time, in seconds, before delivering it to the destination. The default value is 300. +* `compression_format` - (Optional) The compression format. If no value is specified, the default is `UNCOMPRESSED`. Other supported values are `GZIP`, `ZIP`, `Snappy`, & `HADOOP_SNAPPY`. +* `error_output_prefix` - (Optional) Prefix added to failed records before writing them to S3. Not currently supported for `redshift` destination. This prefix appears immediately following the bucket name. For information about how to specify this prefix, see [Custom Prefixes for Amazon S3 Objects](https://docs.aws.amazon.com/firehose/latest/dev/s3-prefixes.html). +* `kms_key_arn` - (Optional) Specifies the KMS key ARN the stream will use to encrypt data. If not set, no encryption will + be used. +* `cloudwatch_logging_options` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below + +#### input_format_configuration + +* `deserializer` - (Required) Nested argument that specifies which deserializer to use. You can choose either the Apache Hive JSON SerDe or the OpenX JSON SerDe. More details below. + +##### deserializer + +~> **NOTE:** One of the deserializers must be configured. If no nested configuration needs to occur simply declare as `XXX_json_ser_de = []` or `XXX_json_ser_de {}`. + +* `hive_json_ser_de` - (Optional) Nested argument that specifies the native Hive / HCatalog JsonSerDe. More details below. +* `open_x_json_ser_de` - (Optional) Nested argument that specifies the OpenX SerDe. More details below. + +###### hive_json_ser_de + +* `timestamp_formats` - (Optional) A list of how you want Kinesis Data Firehose to parse the date and time stamps that may be present in your input data JSON. To specify these format strings, follow the pattern syntax of JodaTime's DateTimeFormat format strings. For more information, see [Class DateTimeFormat](https://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html). You can also use the special value millis to parse time stamps in epoch milliseconds. If you don't specify a format, Kinesis Data Firehose uses java.sql.Timestamp::valueOf by default. + +###### open_x_json_ser_de + +* `case_insensitive` - (Optional) When set to true, which is the default, Kinesis Data Firehose converts JSON keys to lowercase before deserializing them. +* `column_to_json_key_mappings` - (Optional) A map of column names to JSON keys that aren't identical to the column names. This is useful when the JSON contains keys that are Hive keywords. For example, timestamp is a Hive keyword. If you have a JSON key named timestamp, set this parameter to `{ ts = "timestamp" }` to map this key to a column named ts. +* `convert_dots_in_json_keys_to_underscores` - (Optional) When set to `true`, specifies that the names of the keys include dots and that you want Kinesis Data Firehose to replace them with underscores. This is useful because Apache Hive does not allow dots in column names. For example, if the JSON contains a key whose name is "a.b", you can define the column name to be "a_b" when using this option. Defaults to `false`. + +#### output_format_configuration + +* `serializer` - (Required) Nested argument that specifies which serializer to use. You can choose either the ORC SerDe or the Parquet SerDe. More details below. + +##### serializer + +~> **NOTE:** One of the serializers must be configured. If no nested configuration needs to occur simply declare as `XXX_ser_de = []` or `XXX_ser_de {}`. + +* `orc_ser_de` - (Optional) Nested argument that specifies converting data to the ORC format before storing it in Amazon S3. For more information, see [Apache ORC](https://orc.apache.org/docs/). More details below. +* `parquet_ser_de` - (Optional) Nested argument that specifies converting data to the Parquet format before storing it in Amazon S3. For more information, see [Apache Parquet](https://parquet.apache.org/documentation/latest/). More details below. + +###### orc_ser_de + +* `block_size_bytes` - (Optional) The Hadoop Distributed File System (HDFS) block size. This is useful if you intend to copy the data from Amazon S3 to HDFS before querying. The default is 256 MiB and the minimum is 64 MiB. Kinesis Data Firehose uses this value for padding calculations. +* `bloom_filter_columns` - (Optional) A list of column names for which you want Kinesis Data Firehose to create bloom filters. +* `bloom_filter_false_positive_probability` - (Optional) The Bloom filter false positive probability (FPP). The lower the FPP, the bigger the Bloom filter. The default value is `0.05`, the minimum is `0`, and the maximum is `1`. +* `compression` - (Optional) The compression code to use over data blocks. The default is `SNAPPY`. +* `dictionary_key_threshold` - (Optional) A float that represents the fraction of the total number of non-null rows. To turn off dictionary encoding, set this fraction to a number that is less than the number of distinct keys in a dictionary. To always use dictionary encoding, set this threshold to `1`. +* `enable_padding` - (Optional) Set this to `true` to indicate that you want stripes to be padded to the HDFS block boundaries. This is useful if you intend to copy the data from Amazon S3 to HDFS before querying. The default is `false`. +* `format_version` - (Optional) The version of the file to write. The possible values are `V0_11` and `V0_12`. The default is `V0_12`. +* `padding_tolerance` - (Optional) A float between 0 and 1 that defines the tolerance for block padding as a decimal fraction of stripe size. The default value is `0.05`, which means 5 percent of stripe size. For the default values of 64 MiB ORC stripes and 256 MiB HDFS blocks, the default block padding tolerance of 5 percent reserves a maximum of 3.2 MiB for padding within the 256 MiB block. In such a case, if the available size within the block is more than 3.2 MiB, a new, smaller stripe is inserted to fit within that space. This ensures that no stripe crosses block boundaries and causes remote reads within a node-local task. Kinesis Data Firehose ignores this parameter when `enable_padding` is `false`. +* `row_index_stride` - (Optional) The number of rows between index entries. The default is `10000` and the minimum is `1000`. +* `stripe_size_bytes` - (Optional) The number of bytes in each stripe. The default is 64 MiB and the minimum is 8 MiB. + +###### parquet_ser_de + +* `block_size_bytes` - (Optional) The Hadoop Distributed File System (HDFS) block size. This is useful if you intend to copy the data from Amazon S3 to HDFS before querying. The default is 256 MiB and the minimum is 64 MiB. Kinesis Data Firehose uses this value for padding calculations. +* `compression` - (Optional) The compression code to use over data blocks. The possible values are `UNCOMPRESSED`, `SNAPPY`, and `GZIP`, with the default being `SNAPPY`. Use `SNAPPY` for higher decompression speed. Use `GZIP` if the compression ratio is more important than speed. +* `enable_dictionary_compression` - (Optional) Indicates whether to enable dictionary compression. +* `max_padding_bytes` - (Optional) The maximum amount of padding to apply. This is useful if you intend to copy the data from Amazon S3 to HDFS before querying. The default is `0`. +* `page_size_bytes` - (Optional) The Parquet page size. Column chunks are divided into pages. A page is conceptually an indivisible unit (in terms of compression and encoding). The minimum value is 64 KiB and the default is 1 MiB. +* `writer_version` - (Optional) Indicates the version of row format to output. The possible values are `V1` and `V2`. The default is `V1`. + +#### schema_configuration + +* `database_name` - (Required) Specifies the name of the AWS Glue database that contains the schema for the output data. +* `role_arn` - (Required) The role that Kinesis Data Firehose can use to access AWS Glue. This role must be in the same account you use for Kinesis Data Firehose. Cross-account roles aren't allowed. +* `table_name` - (Required) Specifies the AWS Glue table that contains the column information that constitutes your data schema. +* `catalog_id` - (Optional) The ID of the AWS Glue Data Catalog. If you don't supply this, the AWS account ID is used by default. +* `region` - (Optional) If you don't specify an AWS Region, the default is the current region. +* `version_id` - (Optional) Specifies the table version for the output data schema. Defaults to `LATEST`. + +#### dynamic_partitioning_configuration + +Required when using [dynamic partitioning](https://docs.aws.amazon.com/firehose/latest/dev/dynamic-partitioning.html). + +* `enabled` - (Optional) Enables or disables dynamic partitioning. Defaults to `false`. +* `retry_duration` - (Optional) Total amount of seconds Firehose spends on retries. Valid values between 0 and 7200. Default is 300. + +~> **NOTE:** You can enable dynamic partitioning only when you create a new delivery stream. Once you enable dynamic partitioning on a delivery stream, it cannot be disabled on this delivery stream. Therefore, Terraform will recreate the resource whenever dynamic partitioning is enabled or disabled. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the Stream +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[1]: https://aws.amazon.com/documentation/firehose/ + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `10m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kinesis Firehose Delivery streams using the stream ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Kinesis Firehose Delivery streams using the stream ARN. For example: + +```console +% terraform import aws_kinesis_firehose_delivery_stream.foo arn:aws:firehose:us-east-1:XXX:deliverystream/example +``` + +Note: Import does not work for stream destination `s3`. Consider using `extended_s3` since `s3` destination is deprecated. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kinesis_stream.html.markdown b/website/docs/cdktf/python/r/kinesis_stream.html.markdown new file mode 100644 index 00000000000..1540ff8b90d --- /dev/null +++ b/website/docs/cdktf/python/r/kinesis_stream.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "Kinesis" +layout: "aws" +page_title: "AWS: aws_kinesis_stream" +description: |- + Provides a AWS Kinesis Stream +--- + + + +# Resource: aws_kinesis_stream + +Provides a Kinesis Stream resource. Amazon Kinesis is a managed service that +scales elastically for real-time processing of streaming big data. + +For more details, see the [Amazon Kinesis Documentation][1]. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesis_stream import KinesisStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KinesisStream(self, "test_stream", + name="terraform-kinesis-test", + retention_period=48, + shard_count=1, + shard_level_metrics=["IncomingBytes", "OutgoingBytes"], + stream_mode_details=KinesisStreamStreamModeDetails( + stream_mode="PROVISIONED" + ), + tags={ + "Environment": "test" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name to identify the stream. This is unique to the AWS account and region the Stream is created in. +* `shard_count` – (Optional) The number of shards that the stream will use. If the `stream_mode` is `PROVISIONED`, this field is required. +Amazon has guidelines for specifying the Stream size that should be referenced when creating a Kinesis stream. See [Amazon Kinesis Streams][2] for more. +* `retention_period` - (Optional) Length of time data records are accessible after they are added to the stream. The maximum value of a stream's retention period is 8760 hours. Minimum value is 24. Default is 24. +* `shard_level_metrics` - (Optional) A list of shard-level CloudWatch metrics which can be enabled for the stream. See [Monitoring with CloudWatch][3] for more. Note that the value ALL should not be used; instead you should provide an explicit list of metrics you wish to enable. +* `enforce_consumer_deletion` - (Optional) A boolean that indicates all registered consumers should be deregistered from the stream so that the stream can be destroyed without error. The default value is `false`. +* `encryption_type` - (Optional) The encryption type to use. The only acceptable values are `NONE` or `KMS`. The default value is `NONE`. +* `kms_key_id` - (Optional) The GUID for the customer-managed KMS key to use for encryption. You can also use a Kinesis-owned master key by specifying the alias `alias/aws/kinesis`. +* `stream_mode_details` - (Optional) Indicates the [capacity mode](https://docs.aws.amazon.com/streams/latest/dev/how-do-i-size-a-stream.html) of the data stream. Detailed below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### stream_mode_details Configuration Block + +* `stream_mode` - (Required) Specifies the capacity mode of the stream. Must be either `PROVISIONED` or `ON_DEMAND`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique Stream id +* `name` - The unique Stream name +* `shard_count` - The count of Shards for this Stream +* `arn` - The Amazon Resource Name (ARN) specifying the Stream (same as `id`) +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `update` - (Default `120m`) +- `delete` - (Default `120m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kinesis Streams using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Kinesis Streams using the `name`. For example: + +```console +% terraform import aws_kinesis_stream.test_stream terraform-kinesis-test +``` + +[1]: https://aws.amazon.com/documentation/kinesis/ +[2]: https://docs.aws.amazon.com/kinesis/latest/dev/amazon-kinesis-streams.html +[3]: https://docs.aws.amazon.com/streams/latest/dev/monitoring-with-cloudwatch.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kinesis_stream_consumer.html.markdown b/website/docs/cdktf/python/r/kinesis_stream_consumer.html.markdown new file mode 100644 index 00000000000..89f2bc3795e --- /dev/null +++ b/website/docs/cdktf/python/r/kinesis_stream_consumer.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Kinesis" +layout: "aws" +page_title: "AWS: aws_kinesis_stream_consumer" +description: |- + Manages a Kinesis Stream Consumer. +--- + + + +# Resource: aws_kinesis_stream_consumer + +Provides a resource to manage a Kinesis Stream Consumer. + +-> **Note:** You can register up to 20 consumers per stream. A given consumer can only be registered with one stream at a time. + +For more details, see the [Amazon Kinesis Stream Consumer Documentation][1]. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesis_stream import KinesisStream +from imports.aws.kinesis_stream_consumer import KinesisStreamConsumer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = KinesisStream(self, "example", + name="example-stream", + shard_count=1 + ) + aws_kinesis_stream_consumer_example = KinesisStreamConsumer(self, "example_1", + name="example-consumer", + stream_arn=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kinesis_stream_consumer_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required, Forces new resource) Name of the stream consumer. +* `stream_arn` – (Required, Forces new resource) Amazon Resource Name (ARN) of the data stream the consumer is registered with. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the stream consumer. +* `creation_timestamp` - Approximate timestamp in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) of when the stream consumer was created. +* `id` - Amazon Resource Name (ARN) of the stream consumer. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kinesis Stream Consumers using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Kinesis Stream Consumers using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_kinesis_stream_consumer.example arn:aws:kinesis:us-west-2:123456789012:stream/example/consumer/example:1616044553 +``` + +[1]: https://docs.aws.amazon.com/streams/latest/dev/amazon-kinesis-consumers.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kinesis_video_stream.html.markdown b/website/docs/cdktf/python/r/kinesis_video_stream.html.markdown new file mode 100644 index 00000000000..9c0d10ef028 --- /dev/null +++ b/website/docs/cdktf/python/r/kinesis_video_stream.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Kinesis Video" +layout: "aws" +page_title: "AWS: aws_kinesis_video_stream" +description: |- + Provides a AWS Kinesis Video Stream +--- + + + +# Resource: aws_kinesis_video_stream + +Provides a Kinesis Video Stream resource. Amazon Kinesis Video Streams makes it easy to securely stream video from connected devices to AWS for analytics, machine learning (ML), playback, and other processing. + +For more details, see the [Amazon Kinesis Documentation][1]. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesis_video_stream import KinesisVideoStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KinesisVideoStream(self, "default", + data_retention_in_hours=1, + device_name="kinesis-video-device-name", + media_type="video/h264", + name="terraform-kinesis-video-stream", + tags={ + "Name": "terraform-kinesis-video-stream" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name to identify the stream. This is unique to the +AWS account and region the Stream is created in. +* `data_retention_in_hours` – (Optional) The number of hours that you want to retain the data in the stream. Kinesis Video Streams retains the data in a data store that is associated with the stream. The default value is `0`, indicating that the stream does not persist data. +* `device_name` - (Optional) The name of the device that is writing to the stream. **In the current implementation, Kinesis Video Streams does not use this name.** +* `kms_key_id` - (Optional) The ID of the AWS Key Management Service (AWS KMS) key that you want Kinesis Video Streams to use to encrypt stream data. If no key ID is specified, the default, Kinesis Video-managed key (`aws/kinesisvideo`) is used. +* `media_type` - (Optional) The media type of the stream. Consumers of the stream can use this information when processing the stream. For more information about media types, see [Media Types][2]. If you choose to specify the MediaType, see [Naming Requirements][3] for guidelines. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique Stream id +* `arn` - The Amazon Resource Name (ARN) specifying the Stream (same as `id`) +* `creation_time` - A time stamp that indicates when the stream was created. +* `version` - The version of the stream. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `update` - (Default `120m`) +- `delete` - (Default `120m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kinesis Streams using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Kinesis Streams using the `arn`. For example: + +```console +% terraform import aws_kinesis_video_stream.test_stream arn:aws:kinesisvideo:us-west-2:123456789012:stream/terraform-kinesis-test/1554978910975 +``` + +[1]: https://aws.amazon.com/documentation/kinesis/ +[2]: http://www.iana.org/assignments/media-types/media-types.xhtml +[3]: https://tools.ietf.org/html/rfc6838#section-4.2 + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kinesisanalyticsv2_application.html.markdown b/website/docs/cdktf/python/r/kinesisanalyticsv2_application.html.markdown new file mode 100644 index 00000000000..57d8e5e3706 --- /dev/null +++ b/website/docs/cdktf/python/r/kinesisanalyticsv2_application.html.markdown @@ -0,0 +1,534 @@ +--- +subcategory: "Kinesis Analytics V2" +layout: "aws" +page_title: "AWS: aws_kinesisanalyticsv2_application" +description: |- + Manages a Kinesis Analytics v2 Application. +--- + + + +# Resource: aws_kinesisanalyticsv2_application + +Manages a Kinesis Analytics v2 Application. +This resource can be used to manage both Kinesis Data Analytics for SQL applications and Kinesis Data Analytics for Apache Flink applications. + +-> **Note:** Kinesis Data Analytics for SQL applications created using this resource cannot currently be viewed in the AWS Console. To manage Kinesis Data Analytics for SQL applications that can also be viewed in the AWS Console, use the [`aws_kinesis_analytics_application`](/docs/providers/aws/r/kinesis_analytics_application.html) resource. + +## Example Usage + +### Apache Flink Application + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesisanalyticsv2_application import Kinesisanalyticsv2Application +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example-flink-application" + ) + aws_s3_object_example = S3Object(self, "example_1", + bucket=example.id, + key="example-flink-application", + source="flink-app.jar" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_object_example.override_logical_id("example") + aws_kinesisanalyticsv2_application_example = + Kinesisanalyticsv2Application(self, "example_2", + application_configuration=Kinesisanalyticsv2ApplicationApplicationConfiguration( + application_code_configuration=Kinesisanalyticsv2ApplicationApplicationConfigurationApplicationCodeConfiguration( + code_content=Kinesisanalyticsv2ApplicationApplicationConfigurationApplicationCodeConfigurationCodeContent( + s3_content_location=Kinesisanalyticsv2ApplicationApplicationConfigurationApplicationCodeConfigurationCodeContentS3ContentLocation( + bucket_arn=example.arn, + file_key=Token.as_string(aws_s3_object_example.key) + ) + ), + code_content_type="ZIPFILE" + ), + environment_properties=Kinesisanalyticsv2ApplicationApplicationConfigurationEnvironmentProperties( + property_group=[Kinesisanalyticsv2ApplicationApplicationConfigurationEnvironmentPropertiesPropertyGroup( + property_group_id="PROPERTY-GROUP-1", + property_map={ + "Key1": "Value1" + } + ), Kinesisanalyticsv2ApplicationApplicationConfigurationEnvironmentPropertiesPropertyGroup( + property_group_id="PROPERTY-GROUP-2", + property_map={ + "KeyA": "ValueA", + "KeyB": "ValueB" + } + ) + ] + ), + flink_application_configuration=Kinesisanalyticsv2ApplicationApplicationConfigurationFlinkApplicationConfiguration( + checkpoint_configuration=Kinesisanalyticsv2ApplicationApplicationConfigurationFlinkApplicationConfigurationCheckpointConfiguration( + configuration_type="DEFAULT" + ), + monitoring_configuration=Kinesisanalyticsv2ApplicationApplicationConfigurationFlinkApplicationConfigurationMonitoringConfiguration( + configuration_type="CUSTOM", + log_level="DEBUG", + metrics_level="TASK" + ), + parallelism_configuration=Kinesisanalyticsv2ApplicationApplicationConfigurationFlinkApplicationConfigurationParallelismConfiguration( + auto_scaling_enabled=True, + configuration_type="CUSTOM", + parallelism=10, + parallelism_per_kpu=4 + ) + ) + ), + name="example-flink-application", + runtime_environment="FLINK-1_8", + service_execution_role=Token.as_string(aws_iam_role_example.arn), + tags={ + "Environment": "test" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kinesisanalyticsv2_application_example.override_logical_id("example") +``` + +### SQL Application + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.cloudwatch_log_stream import CloudwatchLogStream +from imports.aws.kinesisanalyticsv2_application import Kinesisanalyticsv2Application +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="example-sql-application" + ) + aws_cloudwatch_log_stream_example = CloudwatchLogStream(self, "example_1", + log_group_name=example.name, + name="example-sql-application" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_stream_example.override_logical_id("example") + aws_kinesisanalyticsv2_application_example = + Kinesisanalyticsv2Application(self, "example_2", + application_configuration=Kinesisanalyticsv2ApplicationApplicationConfiguration( + application_code_configuration=Kinesisanalyticsv2ApplicationApplicationConfigurationApplicationCodeConfiguration( + code_content=Kinesisanalyticsv2ApplicationApplicationConfigurationApplicationCodeConfigurationCodeContent( + text_content="SELECT 1;\n\n" + ), + code_content_type="PLAINTEXT" + ), + sql_application_configuration=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfiguration( + input=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationInput( + input_parallelism=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationInputInputParallelism( + count=3 + ), + input_schema=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationInputInputSchema( + record_column=[Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationInputInputSchemaRecordColumn( + mapping="MAPPING-1", + name="COLUMN_1", + sql_type="VARCHAR(8)" + ), Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationInputInputSchemaRecordColumn( + name="COLUMN_2", + sql_type="DOUBLE" + ) + ], + record_encoding="UTF-8", + record_format=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationInputInputSchemaRecordFormat( + mapping_parameters=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationInputInputSchemaRecordFormatMappingParameters( + csv_mapping_parameters=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationInputInputSchemaRecordFormatMappingParametersCsvMappingParameters( + record_column_delimiter=",", + record_row_delimiter="\n\n" + ) + ), + record_format_type="CSV" + ) + ), + kinesis_streams_input=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationInputKinesisStreamsInput( + resource_arn=Token.as_string(aws_kinesis_stream_example.arn) + ), + name_prefix="PREFIX_1" + ), + output=[Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationOutput( + destination_schema=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationOutputDestinationSchema( + record_format_type="JSON" + ), + lambda_output=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationOutputLambdaOutput( + resource_arn=Token.as_string(aws_lambda_function_example.arn) + ), + name="OUTPUT_1" + ), Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationOutput( + destination_schema=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationOutputDestinationSchema( + record_format_type="CSV" + ), + kinesis_firehose_output=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationOutputKinesisFirehoseOutput( + resource_arn=Token.as_string(aws_kinesis_firehose_delivery_stream_example.arn) + ), + name="OUTPUT_2" + ) + ], + reference_data_source=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationReferenceDataSource( + reference_schema=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationReferenceDataSourceReferenceSchema( + record_column=[Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationReferenceDataSourceReferenceSchemaRecordColumn( + name="COLUMN_1", + sql_type="INTEGER" + ) + ], + record_format=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationReferenceDataSourceReferenceSchemaRecordFormat( + mapping_parameters=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationReferenceDataSourceReferenceSchemaRecordFormatMappingParameters( + json_mapping_parameters=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationReferenceDataSourceReferenceSchemaRecordFormatMappingParametersJsonMappingParameters( + record_row_path="$" + ) + ), + record_format_type="JSON" + ) + ), + s3_reference_data_source=Kinesisanalyticsv2ApplicationApplicationConfigurationSqlApplicationConfigurationReferenceDataSourceS3ReferenceDataSource( + bucket_arn=Token.as_string(aws_s3_bucket_example.arn), + file_key="KEY-1" + ), + table_name="TABLE-1" + ) + ) + ), + cloudwatch_logging_options=Kinesisanalyticsv2ApplicationCloudwatchLoggingOptions( + log_stream_arn=Token.as_string(aws_cloudwatch_log_stream_example.arn) + ), + name="example-sql-application", + runtime_environment="SQL-1_0", + service_execution_role=Token.as_string(aws_iam_role_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kinesisanalyticsv2_application_example.override_logical_id("example") +``` + +### VPC Configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesisanalyticsv2_application import Kinesisanalyticsv2Application +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example-flink-application" + ) + aws_s3_object_example = S3Object(self, "example_1", + bucket=example.id, + key="example-flink-application", + source="flink-app.jar" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_object_example.override_logical_id("example") + aws_kinesisanalyticsv2_application_example = + Kinesisanalyticsv2Application(self, "example_2", + application_configuration=Kinesisanalyticsv2ApplicationApplicationConfiguration( + application_code_configuration=Kinesisanalyticsv2ApplicationApplicationConfigurationApplicationCodeConfiguration( + code_content=Kinesisanalyticsv2ApplicationApplicationConfigurationApplicationCodeConfigurationCodeContent( + s3_content_location=Kinesisanalyticsv2ApplicationApplicationConfigurationApplicationCodeConfigurationCodeContentS3ContentLocation( + bucket_arn=example.arn, + file_key=Token.as_string(aws_s3_object_example.key) + ) + ), + code_content_type="ZIPFILE" + ), + vpc_configuration=Kinesisanalyticsv2ApplicationApplicationConfigurationVpcConfiguration( + security_group_ids=[ + Token.as_string( + property_access(aws_security_group_example, ["0", "id"])), + Token.as_string( + property_access(aws_security_group_example, ["1", "id"])) + ], + subnet_ids=[Token.as_string(aws_subnet_example.id)] + ) + ), + name="example-flink-application", + runtime_environment="FLINK-1_8", + service_execution_role=Token.as_string(aws_iam_role_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kinesisanalyticsv2_application_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the application. +* `runtime_environment` - (Required) The runtime environment for the application. Valid values: `SQL-1_0`, `FLINK-1_6`, `FLINK-1_8`, `FLINK-1_11`, `FLINK-1_13`, `FLINK-1_15`. +* `service_execution_role` - (Required) The ARN of the [IAM role](/docs/providers/aws/r/iam_role.html) used by the application to access Kinesis data streams, Kinesis Data Firehose delivery streams, Amazon S3 objects, and other external resources. +* `application_configuration` - (Optional) The application's configuration +* `cloudwatch_logging_options` - (Optional) A [CloudWatch log stream](/docs/providers/aws/r/cloudwatch_log_stream.html) to monitor application configuration errors. +* `description` - (Optional) A summary description of the application. +* `force_stop` - (Optional) Whether to force stop an unresponsive Flink-based application. +* `start_application` - (Optional) Whether to start or stop the application. +* `tags` - (Optional) A map of tags to assign to the application. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `application_configuration` object supports the following: + +* `application_code_configuration` - (Required) The code location and type parameters for the application. +* `application_snapshot_configuration` - (Optional) Describes whether snapshots are enabled for a Flink-based application. +* `environment_properties` - (Optional) Describes execution properties for a Flink-based application. +* `flink_application_configuration` - (Optional) The configuration of a Flink-based application. +* `run_configuration` - (Optional) Describes the starting properties for a Flink-based application. +* `sql_application_configuration` - (Optional) The configuration of a SQL-based application. +* `vpc_configuration` - (Optional) The VPC configuration of a Flink-based application. + +The `application_code_configuration` object supports the following: + +* `code_content_type` - (Required) Specifies whether the code content is in text or zip format. Valid values: `PLAINTEXT`, `ZIPFILE`. +* `code_content` - (Optional) The location and type of the application code. + +The `code_content` object supports the following: + +* `s3_content_location` - (Optional) Information about the Amazon S3 bucket containing the application code. +* `text_content` - (Optional) The text-format code for the application. + +The `s3_content_location` object supports the following: + +* `bucket_arn` - (Required) The ARN for the S3 bucket containing the application code. +* `file_key` - (Required) The file key for the object containing the application code. +* `object_version` - (Optional) The version of the object containing the application code. + +The `application_snapshot_configuration` object supports the following: + +* `snapshots_enabled` - (Required) Describes whether snapshots are enabled for a Flink-based Kinesis Data Analytics application. + +The `environment_properties` object supports the following: + +* `property_group` - (Required) Describes the execution property groups. + +The `property_group` object supports the following: + +* `property_group_id` - (Required) The key of the application execution property key-value map. +* `property_map` - (Required) Application execution property key-value map. + +The `flink_application_configuration` object supports the following: + +* `checkpoint_configuration` - (Optional) Describes an application's checkpointing configuration. +* `monitoring_configuration` - (Optional) Describes configuration parameters for CloudWatch logging for an application. +* `parallelism_configuration` - (Optional) Describes parameters for how an application executes multiple tasks simultaneously. + +The `checkpoint_configuration` object supports the following: + +* `configuration_type` - (Required) Describes whether the application uses Kinesis Data Analytics' default checkpointing behavior. Valid values: `CUSTOM`, `DEFAULT`. Set this attribute to `CUSTOM` in order for any specified `checkpointing_enabled`, `checkpoint_interval`, or `min_pause_between_checkpoints` attribute values to be effective. If this attribute is set to `DEFAULT`, the application will always use the following values: + * `checkpointing_enabled = true` + * `checkpoint_interval = 60000` + * `min_pause_between_checkpoints = 5000` +* `checkpointing_enabled` - (Optional) Describes whether checkpointing is enabled for a Flink-based Kinesis Data Analytics application. +* `checkpoint_interval` - (Optional) Describes the interval in milliseconds between checkpoint operations. +* `min_pause_between_checkpoints` - (Optional) Describes the minimum time in milliseconds after a checkpoint operation completes that a new checkpoint operation can start. + +The `monitoring_configuration` object supports the following: + +* `configuration_type` - (Required) Describes whether to use the default CloudWatch logging configuration for an application. Valid values: `CUSTOM`, `DEFAULT`. Set this attribute to `CUSTOM` in order for any specified `log_level` or `metrics_level` attribute values to be effective. +* `log_level` - (Optional) Describes the verbosity of the CloudWatch Logs for an application. Valid values: `DEBUG`, `ERROR`, `INFO`, `WARN`. +* `metrics_level` - (Optional) Describes the granularity of the CloudWatch Logs for an application. Valid values: `APPLICATION`, `OPERATOR`, `PARALLELISM`, `TASK`. + +The `parallelism_configuration` object supports the following: + +* `configuration_type` - (Required) Describes whether the application uses the default parallelism for the Kinesis Data Analytics service. Valid values: `CUSTOM`, `DEFAULT`. Set this attribute to `CUSTOM` in order for any specified `auto_scaling_enabled`, `parallelism`, or `parallelism_per_kpu` attribute values to be effective. +* `auto_scaling_enabled` - (Optional) Describes whether the Kinesis Data Analytics service can increase the parallelism of the application in response to increased throughput. +* `parallelism` - (Optional) Describes the initial number of parallel tasks that a Flink-based Kinesis Data Analytics application can perform. +* `parallelism_per_kpu` - (Optional) Describes the number of parallel tasks that a Flink-based Kinesis Data Analytics application can perform per Kinesis Processing Unit (KPU) used by the application. + +The `run_configuration` object supports the following: + +* `application_restore_configuration` - (Optional) The restore behavior of a restarting application. +* `flink_run_configuration` - (Optional) The starting parameters for a Flink-based Kinesis Data Analytics application. + +The `application_restore_configuration` object supports the following: + +* `application_restore_type` - (Required) Specifies how the application should be restored. Valid values: `RESTORE_FROM_CUSTOM_SNAPSHOT`, `RESTORE_FROM_LATEST_SNAPSHOT`, `SKIP_RESTORE_FROM_SNAPSHOT`. +* `snapshot_name` - (Optional) The identifier of an existing snapshot of application state to use to restart an application. The application uses this value if `RESTORE_FROM_CUSTOM_SNAPSHOT` is specified for `application_restore_type`. + +The `flink_run_configuration` object supports the following: + +* `allow_non_restored_state` - (Optional) When restoring from a snapshot, specifies whether the runtime is allowed to skip a state that cannot be mapped to the new program. Default is `false`. + +The `sql_application_configuration` object supports the following: + +* `input` - (Optional) The input stream used by the application. +* `output` - (Optional) The destination streams used by the application. +* `reference_data_source` - (Optional) The reference data source used by the application. + +The `input` object supports the following: + +* `input_schema` - (Required) Describes the format of the data in the streaming source, and how each data element maps to corresponding columns in the in-application stream that is being created. +* `name_prefix` - (Required) The name prefix to use when creating an in-application stream. +* `input_parallelism` - (Optional) Describes the number of in-application streams to create. +* `input_processing_configuration` - (Optional) The input processing configuration for the input. +An input processor transforms records as they are received from the stream, before the application's SQL code executes. +* `input_starting_position_configuration` (Optional) The point at which the application starts processing records from the streaming source. +* `kinesis_firehose_input` - (Optional) If the streaming source is a [Kinesis Data Firehose delivery stream](/docs/providers/aws/r/kinesis_firehose_delivery_stream.html), identifies the delivery stream's ARN. +* `kinesis_streams_input` - (Optional) If the streaming source is a [Kinesis data stream](/docs/providers/aws/r/kinesis_stream.html), identifies the stream's Amazon Resource Name (ARN). + +The `input_parallelism` object supports the following: + +* `count` - (Optional) The number of in-application streams to create. + +The `input_processing_configuration` object supports the following: + +* `input_lambda_processor` - (Required) Describes the [Lambda function](/docs/providers/aws/r/lambda_function.html) that is used to preprocess the records in the stream before being processed by your application code. + +The `input_lambda_processor` object supports the following: + +* `resource_arn` - (Required) The ARN of the Lambda function that operates on records in the stream. + +The `input_schema` object supports the following: + +* `record_column` - (Required) Describes the mapping of each data element in the streaming source to the corresponding column in the in-application stream. +* `record_format` - (Required) Specifies the format of the records on the streaming source. +* `record_encoding` - (Optional) Specifies the encoding of the records in the streaming source. For example, `UTF-8`. + +The `record_column` object supports the following: + +* `name` - (Required) The name of the column that is created in the in-application input stream or reference table. +* `sql_type` - (Required) The type of column created in the in-application input stream or reference table. +* `mapping` - (Optional) A reference to the data element in the streaming input or the reference data source. + +The `record_format` object supports the following: + +* `mapping_parameters` - (Required) Provides additional mapping information specific to the record format (such as JSON, CSV, or record fields delimited by some delimiter) on the streaming source. +* `record_format_type` - (Required) The type of record format. Valid values: `CSV`, `JSON`. + +The `mapping_parameters` object supports the following: + +* `csv_mapping_parameters` - (Optional) Provides additional mapping information when the record format uses delimiters (for example, CSV). +* `json_mapping_parameters` - (Optional) Provides additional mapping information when JSON is the record format on the streaming source. + +The `csv_mapping_parameters` object supports the following: + +* `record_column_delimiter` - (Required) The column delimiter. For example, in a CSV format, a comma (`,`) is the typical column delimiter. +* `record_row_delimiter` - (Required) The row delimiter. For example, in a CSV format, `\n` is the typical row delimiter. + +The `json_mapping_parameters` object supports the following: + +* `record_row_path` - (Required) The path to the top-level parent that contains the records. + +The `input_starting_position_configuration` object supports the following: + +~> **NOTE:** To modify an application's starting position, first stop the application by setting `start_application = false`, then update `starting_position` and set `start_application = true`. + +* `input_starting_position` - (Required) The starting position on the stream. Valid values: `LAST_STOPPED_POINT`, `NOW`, `TRIM_HORIZON`. + +The `kinesis_firehose_input` object supports the following: + +* `resource_arn` - (Required) The ARN of the delivery stream. + +The `kinesis_streams_input` object supports the following: + +* `resource_arn` - (Required) The ARN of the input Kinesis data stream to read. + +The `output` object supports the following: + +* `destination_schema` - (Required) Describes the data format when records are written to the destination. +* `name` - (Required) The name of the in-application stream. +* `kinesis_firehose_output` - (Optional) Identifies a [Kinesis Data Firehose delivery stream](/docs/providers/aws/r/kinesis_firehose_delivery_stream.html) as the destination. +* `kinesis_streams_output` - (Optional) Identifies a [Kinesis data stream](/docs/providers/aws/r/kinesis_stream.html) as the destination. +* `lambda_output` - (Optional) Identifies a [Lambda function](/docs/providers/aws/r/lambda_function.html) as the destination. + +The `destination_schema` object supports the following: + +* `record_format_type` - (Required) Specifies the format of the records on the output stream. Valid values: `CSV`, `JSON`. + +The `kinesis_firehose_output` object supports the following: + +* `resource_arn` - (Required) The ARN of the destination delivery stream to write to. + +The `kinesis_streams_output` object supports the following: + +* `resource_arn` - (Required) The ARN of the destination Kinesis data stream to write to. + +The `lambda_output` object supports the following: + +* `resource_arn` - (Required) The ARN of the destination Lambda function to write to. + +The `reference_data_source` object supports the following: + +* `reference_schema` - (Required) Describes the format of the data in the streaming source, and how each data element maps to corresponding columns created in the in-application stream. +* `s3_reference_data_source` - (Required) Identifies the S3 bucket and object that contains the reference data. +* `table_name` - (Required) The name of the in-application table to create. + +The `reference_schema` object supports the following: + +* `record_column` - (Required) Describes the mapping of each data element in the streaming source to the corresponding column in the in-application stream. +* `record_format` - (Required) Specifies the format of the records on the streaming source. +* `record_encoding` - (Optional) Specifies the encoding of the records in the streaming source. For example, `UTF-8`. + +The `s3_reference_data_source` object supports the following: + +* `bucket_arn` - (Required) The ARN of the S3 bucket. +* `file_key` - (Required) The object key name containing the reference data. + +The `vpc_configuration` object supports the following: + +* `security_group_ids` - (Required) The [Security Group](/docs/providers/aws/r/security_group.html) IDs used by the VPC configuration. +* `subnet_ids` - (Required) The [Subnet](/docs/providers/aws/r/subnet.html) IDs used by the VPC configuration. + +The `cloudwatch_logging_options` object supports the following: + +* `log_stream_arn` - (Required) The ARN of the CloudWatch log stream to receive application messages. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The application identifier. +* `arn` - The ARN of the application. +* `create_timestamp` - The current timestamp when the application was created. +* `last_update_timestamp` - The current timestamp when the application was last updated. +* `status` - The status of the application. +* `version_id` - The current application version. Kinesis Data Analytics updates the `version_id` each time the application is updated. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_kinesisanalyticsv2_application` using the application ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_kinesisanalyticsv2_application` using the application ARN. For example: + +```console +% terraform import aws_kinesisanalyticsv2_application.example arn:aws:kinesisanalytics:us-west-2:123456789012:application/example-sql-application +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kinesisanalyticsv2_application_snapshot.html.markdown b/website/docs/cdktf/python/r/kinesisanalyticsv2_application_snapshot.html.markdown new file mode 100644 index 00000000000..30d4fd35769 --- /dev/null +++ b/website/docs/cdktf/python/r/kinesisanalyticsv2_application_snapshot.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Kinesis Analytics V2" +layout: "aws" +page_title: "AWS: aws_kinesisanalyticsv2_application_snapshot" +description: |- + Manages a Kinesis Analytics v2 Application Snapshot. +--- + + + +# Resource: aws_kinesisanalyticsv2_application_snapshot + +Manages a Kinesis Analytics v2 Application Snapshot. +Snapshots are the AWS implementation of [Flink Savepoints](https://ci.apache.org/projects/flink/flink-docs-release-1.11/ops/state/savepoints.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kinesisanalyticsv2_application_snapshot import Kinesisanalyticsv2ApplicationSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Kinesisanalyticsv2ApplicationSnapshot(self, "example", + application_name=Token.as_string(aws_kinesisanalyticsv2_application_example.name), + snapshot_name="example-snapshot" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_name` - (Required) The name of an existing [Kinesis Analytics v2 Application](/docs/providers/aws/r/kinesisanalyticsv2_application.html). Note that the application must be running for a snapshot to be created. +* `snapshot_name` - (Required) The name of the application snapshot. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The application snapshot identifier. +* `application_version_id` - The current application version ID when the snapshot was created. +* `snapshot_creation_timestamp` - The timestamp of the application snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_kinesisanalyticsv2_application` using `application_name` together with `snapshot_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_kinesisanalyticsv2_application` using `application_name` together with `snapshot_name`. For example: + +```console +% terraform import aws_kinesisanalyticsv2_application_snapshot.example example-application/example-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kms_alias.html.markdown b/website/docs/cdktf/python/r/kms_alias.html.markdown new file mode 100644 index 00000000000..6a8e7d1c80c --- /dev/null +++ b/website/docs/cdktf/python/r/kms_alias.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_alias" +description: |- + Provides a display name for a customer master key. +--- + + + +# Resource: aws_kms_alias + +Provides an alias for a KMS customer master key. AWS Console enforces 1-to-1 mapping between aliases & keys, +but API (hence Terraform too) allows you to create as many aliases as +the [account limits](http://docs.aws.amazon.com/kms/latest/developerguide/limits.html) allow you. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_alias import KmsAlias +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + a = KmsKey(self, "a") + aws_kms_alias_a = KmsAlias(self, "a_1", + name="alias/my-key-alias", + target_key_id=a.key_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_alias_a.override_logical_id("a") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The display name of the alias. The name must start with the word "alias" followed by a forward slash (alias/) +* `name_prefix` - (Optional) Creates an unique alias beginning with the specified prefix. +The name must start with the word "alias" followed by a forward slash (alias/). Conflicts with `name`. +* `target_key_id` - (Required) Identifier for the key for which the alias is for, can be either an ARN or key_id. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the key alias. +* `target_key_arn` - The Amazon Resource Name (ARN) of the target key identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS aliases using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import KMS aliases using the `name`. For example: + +```console +% terraform import aws_kms_alias.a alias/my-key-alias +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kms_ciphertext.html.markdown b/website/docs/cdktf/python/r/kms_ciphertext.html.markdown new file mode 100644 index 00000000000..59f27a46025 --- /dev/null +++ b/website/docs/cdktf/python/r/kms_ciphertext.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_ciphertext" +description: |- + Provides ciphertext encrypted using a KMS key +--- + + + +# Resource: aws_kms_ciphertext + +The KMS ciphertext resource allows you to encrypt plaintext into ciphertext +by using an AWS KMS customer master key. The value returned by this resource +is stable across every apply. For a changing ciphertext value each apply, see +the [`aws_kms_ciphertext` data source](/docs/providers/aws/d/kms_ciphertext.html). + +~> **Note:** All arguments including the plaintext be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_ciphertext import KmsCiphertext +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + oauth_config = KmsKey(self, "oauth_config", + description="oauth config", + is_enabled=True + ) + KmsCiphertext(self, "oauth", + key_id=oauth_config.key_id, + plaintext="{\n \"client_id\": \"e587dbae22222f55da22\",\n \"client_secret\": \"8289575d00000ace55e1815ec13673955721b8a5\"\n}\n\n" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `plaintext` - (Required) Data to be encrypted. Note that this may show up in logs, and it will be stored in the state file. +* `key_id` - (Required) Globally unique key ID for the customer master key. +* `context` - (Optional) An optional mapping that makes up the encryption context. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `ciphertext_blob` - Base64 encoded ciphertext + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kms_custom_key_store.html.markdown b/website/docs/cdktf/python/r/kms_custom_key_store.html.markdown new file mode 100644 index 00000000000..0a7a5b9a433 --- /dev/null +++ b/website/docs/cdktf/python/r/kms_custom_key_store.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_custom_key_store" +description: |- + Terraform resource for managing an AWS KMS (Key Management) Custom Key Store. +--- + + + +# Resource: aws_kms_custom_key_store + +Terraform resource for managing an AWS KMS (Key Management) Custom Key Store. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_custom_key_store import KmsCustomKeyStore +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KmsCustomKeyStore(self, "test", + cloud_hsm_cluster_id=cloud_hsm_cluster_id.string_value, + custom_key_store_name="kms-custom-key-store-test", + key_store_password="noplaintextpasswords1", + trust_anchor_certificate=Token.as_string(Fn.file("anchor-certificate.crt")) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `cloud_hsm_cluster_id` - (Required) Cluster ID of CloudHSM. +* `custom_key_store_name` - (Required) Unique name for Custom Key Store. +* `key_store_password` - (Required) Password for `kmsuser` on CloudHSM. +* `trust_anchor_certificate` - (Required) Customer certificate used for signing on CloudHSM. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Custom Key Store ID + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `15m`) +* `update` - (Default `15m`) +* `delete` - (Default `15m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS (Key Management) Custom Key Store using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import KMS (Key Management) Custom Key Store using the `id`. For example: + +```console +% terraform import aws_kms_custom_key_store.example cks-5ebd4ef395a96288e +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kms_external_key.html.markdown b/website/docs/cdktf/python/r/kms_external_key.html.markdown new file mode 100644 index 00000000000..96f35334b8d --- /dev/null +++ b/website/docs/cdktf/python/r/kms_external_key.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_external_key" +description: |- + Manages a single-Region or multi-Region primary KMS key that uses external key material. +--- + + + +# Resource: aws_kms_external_key + +Manages a single-Region or multi-Region primary KMS key that uses external key material. +To instead manage a single-Region or multi-Region primary KMS key where AWS automatically generates and potentially rotates key material, see the [`aws_kms_key` resource](/docs/providers/aws/r/kms_key.html). + +~> **Note:** All arguments including the key material will be stored in the raw state as plain-text. [Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_external_key import KmsExternalKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KmsExternalKey(self, "example", + description="KMS EXTERNAL for AMI encryption" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bypass_policy_lockout_safety_check` - (Optional) Specifies whether to disable the policy lockout check performed when creating or updating the key's policy. Setting this value to `true` increases the risk that the key becomes unmanageable. For more information, refer to the scenario in the [Default Key Policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default-allow-root-enable-iam) section in the AWS Key Management Service Developer Guide. Defaults to `false`. +* `deletion_window_in_days` - (Optional) Duration in days after which the key is deleted after destruction of the resource. Must be between `7` and `30` days. Defaults to `30`. +* `description` - (Optional) Description of the key. +* `enabled` - (Optional) Specifies whether the key is enabled. Keys pending import can only be `false`. Imported keys default to `true` unless expired. +* `key_material_base64` - (Optional) Base64 encoded 256-bit symmetric encryption key material to import. The CMK is permanently associated with this key material. The same key material can be reimported, but you cannot import different key material. +* `multi_region` - (Optional) Indicates whether the KMS key is a multi-Region (`true`) or regional (`false`) key. Defaults to `false`. +* `policy` - (Optional) A key policy JSON document. If you do not provide a key policy, AWS KMS attaches a default key policy to the CMK. +* `tags` - (Optional) A key-value map of tags to assign to the key. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `valid_to` - (Optional) Time at which the imported key material expires. When the key material expires, AWS KMS deletes the key material and the CMK becomes unusable. If not specified, key material does not expire. Valid values: [RFC3339 time string](https://tools.ietf.org/html/rfc3339#section-5.8) (`YYYY-MM-DDTHH:MM:SSZ`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the key. +* `expiration_model` - Whether the key material expires. Empty when pending key material import, otherwise `KEY_MATERIAL_EXPIRES` or `KEY_MATERIAL_DOES_NOT_EXPIRE`. +* `id` - The unique identifier for the key. +* `key_state` - The state of the CMK. +* `key_usage` - The cryptographic operations for which you can use the CMK. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS External Keys using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import KMS External Keys using the `id`. For example: + +```console +% terraform import aws_kms_external_key.a arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kms_grant.html.markdown b/website/docs/cdktf/python/r/kms_grant.html.markdown new file mode 100644 index 00000000000..b26bb60171d --- /dev/null +++ b/website/docs/cdktf/python/r/kms_grant.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_grant" +description: |- + Provides a resource-based access control mechanism for KMS Customer Master Keys. +--- + + + +# Resource: aws_kms_grant + +Provides a resource-based access control mechanism for a KMS customer master key. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.kms_grant import KmsGrant +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + a = KmsKey(self, "a") + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=Token.as_list("lambda.amazonaws.com"), + type="Service" + ) + ] + ) + ] + ) + aws_iam_role_a = IamRole(self, "a_2", + assume_role_policy=Token.as_string(assume_role.json), + name="iam-role-for-grant" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_a.override_logical_id("a") + aws_kms_grant_a = KmsGrant(self, "a_3", + constraints=[KmsGrantConstraints( + encryption_context_equals={ + "Department": "Finance" + } + ) + ], + grantee_principal=Token.as_string(aws_iam_role_a.arn), + key_id=a.key_id, + name="my-grant", + operations=["Encrypt", "Decrypt", "GenerateDataKey"] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_grant_a.override_logical_id("a") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resources) A friendly name for identifying the grant. +* `key_id` - (Required, Forces new resources) The unique identifier for the customer master key (CMK) that the grant applies to. Specify the key ID or the Amazon Resource Name (ARN) of the CMK. To specify a CMK in a different AWS account, you must use the key ARN. +* `grantee_principal` - (Required, Forces new resources) The principal that is given permission to perform the operations that the grant permits in ARN format. Note that due to eventual consistency issues around IAM principals, terraform's state may not always be refreshed to reflect what is true in AWS. +* `operations` - (Required, Forces new resources) A list of operations that the grant permits. The permitted values are: `Decrypt`, `Encrypt`, `GenerateDataKey`, `GenerateDataKeyWithoutPlaintext`, `ReEncryptFrom`, `ReEncryptTo`, `Sign`, `Verify`, `GetPublicKey`, `CreateGrant`, `RetireGrant`, `DescribeKey`, `GenerateDataKeyPair`, or `GenerateDataKeyPairWithoutPlaintext`. +* `retiring_principal` - (Optional, Forces new resources) The principal that is given permission to retire the grant by using RetireGrant operation in ARN format. Note that due to eventual consistency issues around IAM principals, terraform's state may not always be refreshed to reflect what is true in AWS. +* `constraints` - (Optional, Forces new resources) A structure that you can use to allow certain operations in the grant only when the desired encryption context is present. For more information about encryption context, see [Encryption Context](http://docs.aws.amazon.com/kms/latest/developerguide/encryption-context.html). +* `grant_creation_tokens` - (Optional, Forces new resources) A list of grant tokens to be used when creating the grant. See [Grant Tokens](http://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#grant_token) for more information about grant tokens. +* `retire_on_delete` -(Defaults to false, Forces new resources) If set to false (the default) the grants will be revoked upon deletion, and if set to true the grants will try to be retired upon deletion. Note that retiring grants requires special permissions, hence why we default to revoking grants. + See [RetireGrant](https://docs.aws.amazon.com/kms/latest/APIReference/API_RetireGrant.html) for more information. + +The `constraints` block supports the following arguments: + +* `encryption_context_equals` - (Optional) A list of key-value pairs that must match the encryption context in subsequent cryptographic operation requests. The grant allows the operation only when the encryption context in the request is the same as the encryption context specified in this constraint. Conflicts with `encryption_context_subset`. +* `encryption_context_subset` - (Optional) A list of key-value pairs that must be included in the encryption context of subsequent cryptographic operation requests. The grant allows the cryptographic operation only when the encryption context in the request includes the key-value pairs specified in this constraint, although it can include additional key-value pairs. Conflicts with `encryption_context_equals`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `grant_id` - The unique identifier for the grant. +* `grant_token` - The grant token for the created grant. For more information, see [Grant Tokens](http://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#grant_token). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS Grants using the Key ID and Grant ID separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import KMS Grants using the Key ID and Grant ID separated by a colon (`:`). For example: + +```console +% terraform import aws_kms_grant.test 1234abcd-12ab-34cd-56ef-1234567890ab:abcde1237f76e4ba7987489ac329fbfba6ad343d6f7075dbd1ef191f0120514 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kms_key.html.markdown b/website/docs/cdktf/python/r/kms_key.html.markdown new file mode 100644 index 00000000000..81ef5786195 --- /dev/null +++ b/website/docs/cdktf/python/r/kms_key.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_key" +description: |- + Manages a single-Region or multi-Region primary KMS key. +--- + + + +# Resource: aws_kms_key + +Manages a single-Region or multi-Region primary KMS key. + +~> **NOTE on KMS Key Policy:** KMS Key Policy can be configured in either the standalone resource [`aws_kms_key_policy`](kms_key_policy.html) +or with the parameter `policy` in this resource. +Configuring with both will cause inconsistencies and may overwrite configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_key import KmsKey +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + KmsKey(self, "a", + deletion_window_in_days=10, + description="KMS key 1" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) The description of the key as viewed in AWS console. +* `key_usage` - (Optional) Specifies the intended use of the key. Valid values: `ENCRYPT_DECRYPT`, `SIGN_VERIFY`, or `GENERATE_VERIFY_MAC`. +Defaults to `ENCRYPT_DECRYPT`. +* `custom_key_store_id` - (Optional) ID of the KMS [Custom Key Store](https://docs.aws.amazon.com/kms/latest/developerguide/create-cmk-keystore.html) where the key will be stored instead of KMS (eg CloudHSM). +* `customer_master_key_spec` - (Optional) Specifies whether the key contains a symmetric key or an asymmetric key pair and the encryption algorithms or signing algorithms that the key supports. +Valid values: `SYMMETRIC_DEFAULT`, `RSA_2048`, `RSA_3072`, `RSA_4096`, `HMAC_256`, `ECC_NIST_P256`, `ECC_NIST_P384`, `ECC_NIST_P521`, or `ECC_SECG_P256K1`. Defaults to `SYMMETRIC_DEFAULT`. For help with choosing a key spec, see the [AWS KMS Developer Guide](https://docs.aws.amazon.com/kms/latest/developerguide/symm-asymm-choose.html). +* `policy` - (Optional) A valid policy JSON document. Although this is a key policy, not an IAM policy, an [`aws_iam_policy_document`](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document), in the form that designates a principal, can be used. For more information about building policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +~> **NOTE:** Note: All KMS keys must have a key policy. If a key policy is not specified, AWS gives the KMS key a [default key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default) that gives all principals in the owning account unlimited access to all KMS operations for the key. This default key policy effectively delegates all access control to IAM policies and KMS grants. + +* `bypass_policy_lockout_safety_check` - (Optional) A flag to indicate whether to bypass the key policy lockout safety check. +Setting this value to true increases the risk that the KMS key becomes unmanageable. Do not set this value to true indiscriminately. +For more information, refer to the scenario in the [Default Key Policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default-allow-root-enable-iam) section in the _AWS Key Management Service Developer Guide_. +The default value is `false`. +* `deletion_window_in_days` - (Optional) The waiting period, specified in number of days. After the waiting period ends, AWS KMS deletes the KMS key. +If you specify a value, it must be between `7` and `30`, inclusive. If you do not specify a value, it defaults to `30`. +If the KMS key is a multi-Region primary key with replicas, the waiting period begins when the last of its replica keys is deleted. Otherwise, the waiting period begins immediately. +* `is_enabled` - (Optional) Specifies whether the key is enabled. Defaults to `true`. +* `enable_key_rotation` - (Optional) Specifies whether [key rotation](http://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html) is enabled. Defaults to `false`. +* `multi_region` - (Optional) Indicates whether the KMS key is a multi-Region (`true`) or regional (`false`) key. Defaults to `false`. +* `tags` - (Optional) A map of tags to assign to the object. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the key. +* `key_id` - The globally unique identifier for the key. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS Keys using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import KMS Keys using the `id`. For example: + +```console +% terraform import aws_kms_key.a 1234abcd-12ab-34cd-56ef-1234567890ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kms_key_policy.html.markdown b/website/docs/cdktf/python/r/kms_key_policy.html.markdown new file mode 100644 index 00000000000..923e5c554c9 --- /dev/null +++ b/website/docs/cdktf/python/r/kms_key_policy.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_key_policy" +description: |- + Attaches a policy to a KMS Key. +--- + + + +# Resource: aws_kms_key_policy + +Attaches a policy to a KMS Key. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_key import KmsKey +from imports.aws.kms_key_policy import KmsKeyPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = KmsKey(self, "example", + description="example" + ) + aws_kms_key_policy_example = KmsKeyPolicy(self, "example_1", + key_id=example.id, + policy=Token.as_string( + Fn.jsonencode({ + "Id": "example", + "Statement": [{ + "Action": "kms:*", + "Effect": "Allow", + "Principal": { + "AWS": "*" + }, + "Resource": "*", + "Sid": "Enable IAM User Permissions" + } + ], + "Version": "2012-10-17" + })) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_key_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `key_id` - (Required) The ID of the KMS Key to attach the policy. +* `policy` - (Required) A valid policy JSON document. Although this is a key policy, not an IAM policy, an [`aws_iam_policy_document`](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document), in the form that designates a principal, can be used. For more information about building policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +~> **NOTE:** Note: All KMS keys must have a key policy. If a key policy is not specified, or this resource is destroyed, AWS gives the KMS key a [default key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default) that gives all principals in the owning account unlimited access to all KMS operations for the key. This default key policy effectively delegates all access control to IAM policies and KMS grants. + +* `bypass_policy_lockout_safety_check` - (Optional) A flag to indicate whether to bypass the key policy lockout safety check. +Setting this value to true increases the risk that the KMS key becomes unmanageable. Do not set this value to true indiscriminately. If this value is set, and the resource is destroyed, a warning will be shown, and the resource will be removed from state. +For more information, refer to the scenario in the [Default Key Policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default-allow-root-enable-iam) section in the _AWS Key Management Service Developer Guide_. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS Key Policies using the `key_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import KMS Key Policies using the `key_id`. For example: + +```console +% terraform import aws_kms_key_policy.a 1234abcd-12ab-34cd-56ef-1234567890ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kms_replica_external_key.html.markdown b/website/docs/cdktf/python/r/kms_replica_external_key.html.markdown new file mode 100644 index 00000000000..9a76363f535 --- /dev/null +++ b/website/docs/cdktf/python/r/kms_replica_external_key.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_replica_external_key" +description: |- + Manages a KMS multi-Region replica key that uses external key material. +--- + + + +# Resource: aws_kms_replica_external_key + +Manages a KMS multi-Region replica key that uses external key material. +See the [AWS KMS Developer Guide](https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-import.html) for more information on importing key material into multi-Region keys. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_external_key import KmsExternalKey +from imports.aws.kms_replica_external_key import KmsReplicaExternalKey +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = AwsProvider(self, "aws", + alias="primary", + region="us-east-1" + ) + AwsProvider(self, "aws_1", + region="us-west-2" + ) + KmsExternalKey(self, "primary", + deletion_window_in_days=30, + description="Multi-Region primary key", + enabled=True, + key_material_base64="...", + multi_region=True, + provider=primary + ) + KmsReplicaExternalKey(self, "replica", + deletion_window_in_days=7, + description="Multi-Region replica key", + key_material_base64="...", + primary_key_arn=Token.as_string(aws_kms_external_primary.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bypass_policy_lockout_safety_check` - (Optional) A flag to indicate whether to bypass the key policy lockout safety check. +Setting this value to true increases the risk that the KMS key becomes unmanageable. Do not set this value to true indiscriminately. +For more information, refer to the scenario in the [Default Key Policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default-allow-root-enable-iam) section in the _AWS Key Management Service Developer Guide_. +The default value is `false`. +* `deletion_window_in_days` - (Optional) The waiting period, specified in number of days. After the waiting period ends, AWS KMS deletes the KMS key. +If you specify a value, it must be between `7` and `30`, inclusive. If you do not specify a value, it defaults to `30`. +* `description` - (Optional) A description of the KMS key. +* `enabled` - (Optional) Specifies whether the replica key is enabled. Disabled KMS keys cannot be used in cryptographic operations. Keys pending import can only be `false`. Imported keys default to `true` unless expired. +* `key_material_base64` - (Optional) Base64 encoded 256-bit symmetric encryption key material to import. The KMS key is permanently associated with this key material. The same key material can be [reimported](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html#reimport-key-material), but you cannot import different key material. +* `policy` - (Optional) The key policy to attach to the KMS key. If you do not specify a key policy, AWS KMS attaches the [default key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default) to the KMS key. +For more information about building policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `primary_key_arn` - (Required) The ARN of the multi-Region primary key to replicate. The primary key must be in a different AWS Region of the same AWS Partition. You can create only one replica of a given primary key in each AWS Region. +* `tags` - (Optional) A map of tags to assign to the replica key. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `valid_to` - (Optional) Time at which the imported key material expires. When the key material expires, AWS KMS deletes the key material and the key becomes unusable. If not specified, key material does not expire. Valid values: [RFC3339 time string](https://tools.ietf.org/html/rfc3339#section-5.8) (`YYYY-MM-DDTHH:MM:SSZ`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the replica key. The key ARNs of related multi-Region keys differ only in the Region value. +* `expiration_model` - Whether the key material expires. Empty when pending key material import, otherwise `KEY_MATERIAL_EXPIRES` or `KEY_MATERIAL_DOES_NOT_EXPIRE`. +* `key_id` - The key ID of the replica key. Related multi-Region keys have the same key ID. +* `key_state` - The state of the replica key. +* `key_usage` - The [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations) for which you can use the KMS key. This is a shared property of multi-Region keys. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS multi-Region replica keys using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import KMS multi-Region replica keys using the `id`. For example: + +```console +% terraform import aws_kms_replica_external_key.example 1234abcd-12ab-34cd-56ef-1234567890ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/kms_replica_key.html.markdown b/website/docs/cdktf/python/r/kms_replica_key.html.markdown new file mode 100644 index 00000000000..ee3b7890081 --- /dev/null +++ b/website/docs/cdktf/python/r/kms_replica_key.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_replica_key" +description: |- + Manages a KMS multi-Region replica key. +--- + + + +# Resource: aws_kms_replica_key + +Manages a KMS multi-Region replica key. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_key import KmsKey +from imports.aws.kms_replica_key import KmsReplicaKey +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = AwsProvider(self, "aws", + alias="primary", + region="us-east-1" + ) + AwsProvider(self, "aws_1", + region="us-west-2" + ) + aws_kms_key_primary = KmsKey(self, "primary", + deletion_window_in_days=30, + description="Multi-Region primary key", + multi_region=True, + provider=primary + ) + KmsReplicaKey(self, "replica", + deletion_window_in_days=7, + description="Multi-Region replica key", + primary_key_arn=Token.as_string(aws_kms_key_primary.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bypass_policy_lockout_safety_check` - (Optional) A flag to indicate whether to bypass the key policy lockout safety check. +Setting this value to true increases the risk that the KMS key becomes unmanageable. Do not set this value to true indiscriminately. +For more information, refer to the scenario in the [Default Key Policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default-allow-root-enable-iam) section in the _AWS Key Management Service Developer Guide_. +The default value is `false`. +* `deletion_window_in_days` - (Optional) The waiting period, specified in number of days. After the waiting period ends, AWS KMS deletes the KMS key. +If you specify a value, it must be between `7` and `30`, inclusive. If you do not specify a value, it defaults to `30`. +* `description` - (Optional) A description of the KMS key. +* `enabled` - (Optional) Specifies whether the replica key is enabled. Disabled KMS keys cannot be used in cryptographic operations. The default value is `true`. +* `policy` - (Optional) The key policy to attach to the KMS key. If you do not specify a key policy, AWS KMS attaches the [default key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default) to the KMS key. +For more information about building policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `primary_key_arn` - (Required) The ARN of the multi-Region primary key to replicate. The primary key must be in a different AWS Region of the same AWS Partition. You can create only one replica of a given primary key in each AWS Region. +* `tags` - (Optional) A map of tags to assign to the replica key. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the replica key. The key ARNs of related multi-Region keys differ only in the Region value. +* `key_id` - The key ID of the replica key. Related multi-Region keys have the same key ID. +* `key_rotation_enabled` - A Boolean value that specifies whether key rotation is enabled. This is a shared property of multi-Region keys. +* `key_spec` - The type of key material in the KMS key. This is a shared property of multi-Region keys. +* `key_usage` - The [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations) for which you can use the KMS key. This is a shared property of multi-Region keys. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS multi-Region replica keys using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import KMS multi-Region replica keys using the `id`. For example: + +```console +% terraform import aws_kms_replica_key.example 1234abcd-12ab-34cd-56ef-1234567890ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lakeformation_data_lake_settings.html.markdown b/website/docs/cdktf/python/r/lakeformation_data_lake_settings.html.markdown new file mode 100644 index 00000000000..a21fc2d43dc --- /dev/null +++ b/website/docs/cdktf/python/r/lakeformation_data_lake_settings.html.markdown @@ -0,0 +1,135 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_data_lake_settings" +description: |- + Manages data lake administrators and default database and table permissions +--- + + + +# Resource: aws_lakeformation_data_lake_settings + +Manages Lake Formation principals designated as data lake administrators and lists of principal permission entries for default create database and default create table permissions. + +~> **NOTE:** Lake Formation introduces fine-grained access control for data in your data lake. Part of the changes include the `IAMAllowedPrincipals` principal in order to make Lake Formation backwards compatible with existing IAM and Glue permissions. For more information, see [Changing the Default Security Settings for Your Data Lake](https://docs.aws.amazon.com/lake-formation/latest/dg/change-settings.html) and [Upgrading AWS Glue Data Permissions to the AWS Lake Formation Model](https://docs.aws.amazon.com/lake-formation/latest/dg/upgrade-glue-lake-formation.html). + +## Example Usage + +### Data Lake Admins + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lakeformation_data_lake_settings import LakeformationDataLakeSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LakeformationDataLakeSettings(self, "example", + admins=[test.arn, Token.as_string(aws_iam_role_test.arn)] + ) +``` + +### Create Default Permissions + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lakeformation_data_lake_settings import LakeformationDataLakeSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LakeformationDataLakeSettings(self, "example", + admins=[test.arn, Token.as_string(aws_iam_role_test.arn)], + create_database_default_permissions=[LakeformationDataLakeSettingsCreateDatabaseDefaultPermissions( + permissions=["SELECT", "ALTER", "DROP"], + principal=test.arn + ) + ], + create_table_default_permissions=[LakeformationDataLakeSettingsCreateTableDefaultPermissions( + permissions=["ALL"], + principal=Token.as_string(aws_iam_role_test.arn) + ) + ] + ) +``` + +### Enable EMR access to LakeFormation resources + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lakeformation_data_lake_settings import LakeformationDataLakeSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LakeformationDataLakeSettings(self, "example", + admins=[test.arn, Token.as_string(aws_iam_role_test.arn)], + allow_external_data_filtering=True, + authorized_session_tag_value_list=["Amazon EMR"], + create_database_default_permissions=[LakeformationDataLakeSettingsCreateDatabaseDefaultPermissions( + permissions=["SELECT", "ALTER", "DROP"], + principal=test.arn + ) + ], + create_table_default_permissions=[LakeformationDataLakeSettingsCreateTableDefaultPermissions( + permissions=["ALL"], + principal=Token.as_string(aws_iam_role_test.arn) + ) + ], + external_data_filtering_allow_list=[ + Token.as_string(current.account_id), + Token.as_string(third_party.account_id) + ] + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `admins` – (Optional) Set of ARNs of AWS Lake Formation principals (IAM users or roles). +* `catalog_id` – (Optional) Identifier for the Data Catalog. By default, the account ID. +* `create_database_default_permissions` - (Optional) Up to three configuration blocks of principal permissions for default create database permissions. Detailed below. +* `create_table_default_permissions` - (Optional) Up to three configuration blocks of principal permissions for default create table permissions. Detailed below. +* `trusted_resource_owners` – (Optional) List of the resource-owning account IDs that the caller's account can use to share their user access details (user ARNs). +* `allow_external_data_filtering` - (Optional) Whether to allow Amazon EMR clusters to access data managed by Lake Formation. +* `external_data_filtering_allow_list` - (Optional) A list of the account IDs of Amazon Web Services accounts with Amazon EMR clusters that are to perform data filtering. +* `authorized_session_tag_value_list` - (Optional) Lake Formation relies on a privileged process secured by Amazon EMR or the third party integrator to tag the user's role while assuming it. + +~> **NOTE:** Although optional, not including `admins`, `create_database_default_permissions`, `create_table_default_permissions`, and/or `trusted_resource_owners` results in the setting being cleared. + +### create_database_default_permissions + +The following arguments are optional: + +* `permissions` - (Optional) List of permissions that are granted to the principal. Valid values may include `ALL`, `SELECT`, `ALTER`, `DROP`, `DELETE`, `INSERT`, `DESCRIBE`, and `CREATE_TABLE`. For more details, see [Lake Formation Permissions Reference](https://docs.aws.amazon.com/lake-formation/latest/dg/lf-permissions-reference.html). +* `principal` - (Optional) Principal who is granted permissions. To enforce metadata and underlying data access control only by IAM on new databases and tables set `principal` to `IAM_ALLOWED_PRINCIPALS` and `permissions` to `["ALL"]`. + +### create_table_default_permissions + +The following arguments are optional: + +* `permissions` - (Optional) List of permissions that are granted to the principal. Valid values may include `ALL`, `SELECT`, `ALTER`, `DROP`, `DELETE`, `INSERT`, and `DESCRIBE`. For more details, see [Lake Formation Permissions Reference](https://docs.aws.amazon.com/lake-formation/latest/dg/lf-permissions-reference.html). +* `principal` - (Optional) Principal who is granted permissions. To enforce metadata and underlying data access control only by IAM on new databases and tables set `principal` to `IAM_ALLOWED_PRINCIPALS` and `permissions` to `["ALL"]`. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lakeformation_lf_tag.html.markdown b/website/docs/cdktf/python/r/lakeformation_lf_tag.html.markdown new file mode 100644 index 00000000000..caac88e9d19 --- /dev/null +++ b/website/docs/cdktf/python/r/lakeformation_lf_tag.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_lf_tag" +description: |- + Creates a tag with the specified name and values. +--- + + + +# Resource: aws_lakeformation_lf_tag + +Creates an LF-Tag with the specified name and values. Each key must have at least one value. The maximum number of values permitted is 15. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lakeformation_lf_tag import LakeformationLfTag +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LakeformationLfTag(self, "example", + key="module", + values=["Orders", "Sales", "Customers"] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `catalog_id` - (Optional) ID of the Data Catalog to create the tag in. If omitted, this defaults to the AWS Account ID. +* `key` - (Required) Key-name for the tag. +* `values` - (Required) List of possible values an attribute can take. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Catalog ID and key-name of the tag + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lake Formation LF-Tags using the `catalog_id:key`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Lake Formation LF-Tags using the `catalog_id:key`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```console +% terraform import aws_lakeformation_lf_tag.example 123456789012:some_key +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lakeformation_permissions.html.markdown b/website/docs/cdktf/python/r/lakeformation_permissions.html.markdown new file mode 100644 index 00000000000..6c49c99f310 --- /dev/null +++ b/website/docs/cdktf/python/r/lakeformation_permissions.html.markdown @@ -0,0 +1,308 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_permissions" +description: |- + Grants permissions to the principal to access metadata in the Data Catalog and data organized in underlying data storage such as Amazon S3. +--- + + + +# Resource: aws_lakeformation_permissions + +Grants permissions to the principal to access metadata in the Data Catalog and data organized in underlying data storage such as Amazon S3. Permissions are granted to a principal, in a Data Catalog, relative to a Lake Formation resource, which includes the Data Catalog, databases, tables, LF-tags, and LF-tag policies. For more information, see [Security and Access Control to Metadata and Data in Lake Formation](https://docs.aws.amazon.com/lake-formation/latest/dg/security-data-access.html). + +!> **WARNING:** Lake Formation permissions are not in effect by default within AWS. Using this resource will not secure your data and will result in errors if you do not change the security settings for existing resources and the default security settings for new resources. See [Default Behavior and `IAMAllowedPrincipals`](#default-behavior-and-iamallowedprincipals) for additional details. + +~> **NOTE:** In general, the `principal` should _NOT_ be a Lake Formation administrator or the entity (e.g., IAM role) that is running Terraform. Administrators have implicit permissions. These should be managed by granting or not granting administrator rights using `aws_lakeformation_data_lake_settings`, _not_ with this resource. + +## Default Behavior and `IAMAllowedPrincipals` + +**_Lake Formation permissions are not in effect by default within AWS._** `IAMAllowedPrincipals` (i.e., `IAM_ALLOWED_PRINCIPALS`) conflicts with individual Lake Formation permissions (i.e., non-`IAMAllowedPrincipals` permissions), will cause unexpected behavior, and may result in errors. + +When using Lake Formation, choose ONE of the following options as they are mutually exclusive: + +1. Use this resource (`aws_lakeformation_permissions`), change the default security settings using [`aws_lakeformation_data_lake_settings`](/docs/providers/aws/r/lakeformation_data_lake_settings.html), and remove existing `IAMAllowedPrincipals` permissions +2. Use `IAMAllowedPrincipals` without `aws_lakeformation_permissions` + +This example shows removing the `IAMAllowedPrincipals` default security settings and making the caller a Lake Formation admin. Since `create_database_default_permissions` and `create_table_default_permissions` are not set in the [`aws_lakeformation_data_lake_settings`](/docs/providers/aws/r/lakeformation_data_lake_settings.html) resource, they are cleared. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_session_context import DataAwsIamSessionContext +from imports.aws.lakeformation_data_lake_settings import LakeformationDataLakeSettings +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + data_aws_iam_session_context_current = DataAwsIamSessionContext(self, "current_1", + arn=Token.as_string(current.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_session_context_current.override_logical_id("current") + LakeformationDataLakeSettings(self, "test", + admins=[Token.as_string(data_aws_iam_session_context_current.issuer_arn)] + ) +``` + +To remove existing `IAMAllowedPrincipals` permissions, use the [AWS Lake Formation Console](https://console.aws.amazon.com/lakeformation/) or [AWS CLI](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/lakeformation/batch-revoke-permissions.html). + +`IAMAllowedPrincipals` is a hook to maintain backwards compatibility with AWS Glue. `IAMAllowedPrincipals` is a pseudo-entity group that acts like a Lake Formation principal. The group includes any IAM users and roles that are allowed access to your Data Catalog resources by your IAM policies. + +This is Lake Formation's default behavior: + +* Lake Formation grants `Super` permission to `IAMAllowedPrincipals` on all existing AWS Glue Data Catalog resources. +* Lake Formation enables "Use only IAM access control" for new Data Catalog resources. + +For more details, see [Changing the Default Security Settings for Your Data Lake](https://docs.aws.amazon.com/lake-formation/latest/dg/change-settings.html). + +### Problem Using `IAMAllowedPrincipals` + +AWS does not support combining `IAMAllowedPrincipals` permissions and non-`IAMAllowedPrincipals` permissions. Doing so results in unexpected permissions and behaviors. For example, this configuration grants a user `SELECT` on a column in a table. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.glue_catalog_database import GlueCatalogDatabase +from imports.aws.glue_catalog_table import GlueCatalogTable +from imports.aws.lakeformation_permissions import LakeformationPermissions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + GlueCatalogDatabase(self, "example", + name="sadabate" + ) + aws_glue_catalog_table_example = GlueCatalogTable(self, "example_1", + database_name=test.name, + name="abelt", + storage_descriptor=GlueCatalogTableStorageDescriptor( + columns=[GlueCatalogTableStorageDescriptorColumns( + name="event", + type="string" + ) + ] + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_glue_catalog_table_example.override_logical_id("example") + aws_lakeformation_permissions_example = LakeformationPermissions(self, "example_2", + permissions=["SELECT"], + principal="arn:aws:iam:us-east-1:123456789012:user/SanHolo", + table_with_columns=LakeformationPermissionsTableWithColumns( + column_names=["event"], + database_name=Token.as_string(aws_glue_catalog_table_example.database_name), + name=Token.as_string(aws_glue_catalog_table_example.name) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lakeformation_permissions_example.override_logical_id("example") +``` + +The resulting permissions depend on whether the table had `IAMAllowedPrincipals` (IAP) permissions or not. + +| Result With IAP | Result Without IAP | +| ---- | ---- | +| `SELECT` column wildcard (i.e., all columns) | `SELECT` on `"event"` (as expected) | + +## Using Lake Formation Permissions + +Lake Formation grants implicit permissions to data lake administrators, database creators, and table creators. These implicit permissions cannot be revoked _per se_. If this resource reads implicit permissions, it will attempt to revoke them, which causes an error when the resource is destroyed. + +There are two ways to avoid these errors. First, and the way we recommend, is to avoid using this resource with principals that have implicit permissions. A second, error-prone option, is to grant explicit permissions (and `permissions_with_grant_option`) to "overwrite" a principal's implicit permissions, which you can then revoke with this resource. For more information, see [Implicit Lake Formation Permissions](https://docs.aws.amazon.com/lake-formation/latest/dg/implicit-permissions.html). + +If the `principal` is also a data lake administrator, AWS grants implicit permissions that can cause errors using this resource. For example, AWS implicitly grants a `principal`/administrator `permissions` and `permissions_with_grant_option` of `ALL`, `ALTER`, `DELETE`, `DESCRIBE`, `DROP`, `INSERT`, and `SELECT` on a table. If you use this resource to explicitly grant the `principal`/administrator `permissions` but _not_ `permissions_with_grant_option` of `ALL`, `ALTER`, `DELETE`, `DESCRIBE`, `DROP`, `INSERT`, and `SELECT` on the table, this resource will read the implicit `permissions_with_grant_option` and attempt to revoke them when the resource is destroyed. Doing so will cause an `InvalidInputException: No permissions revoked` error because you cannot revoke implicit permissions _per se_. To workaround this problem, explicitly grant the `principal`/administrator `permissions` _and_ `permissions_with_grant_option`, which can then be revoked. Similarly, granting a `principal`/administrator permissions on a table with columns and providing `column_names`, will result in a `InvalidInputException: Permissions modification is invalid` error because you are narrowing the implicit permissions. Instead, set `wildcard` to `true` and remove the `column_names`. + +## Example Usage + +### Grant Permissions For A Lake Formation S3 Resource + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lakeformation_permissions import LakeformationPermissions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LakeformationPermissions(self, "example", + data_location=LakeformationPermissionsDataLocation( + arn=Token.as_string(aws_lakeformation_resource_example.arn) + ), + permissions=["ALL"], + principal=workflow_role.arn + ) +``` + +### Grant Permissions For A Glue Catalog Database + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lakeformation_permissions import LakeformationPermissions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LakeformationPermissions(self, "example", + database=LakeformationPermissionsDatabase( + catalog_id="110376042874", + name=Token.as_string(aws_glue_catalog_database_example.name) + ), + permissions=["CREATE_TABLE", "ALTER", "DROP"], + principal=workflow_role.arn + ) +``` + +### Grant Permissions Using Tag-Based Access Control + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lakeformation_permissions import LakeformationPermissions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LakeformationPermissions(self, "test", + lf_tag_policy=LakeformationPermissionsLfTagPolicy( + expression=[LakeformationPermissionsLfTagPolicyExpression( + key="Team", + values=["Sales"] + ), LakeformationPermissionsLfTagPolicyExpression( + key="Environment", + values=["Dev", "Production"] + ) + ], + resource_type="DATABASE" + ), + permissions=["CREATE_TABLE", "ALTER", "DROP"], + principal=sales_role.arn + ) +``` + +## Argument Reference + +The following arguments are required: + +* `permissions` – (Required) List of permissions granted to the principal. Valid values may include `ALL`, `ALTER`, `ASSOCIATE`, `CREATE_DATABASE`, `CREATE_TABLE`, `DATA_LOCATION_ACCESS`, `DELETE`, `DESCRIBE`, `DROP`, `INSERT`, and `SELECT`. For details on each permission, see [Lake Formation Permissions Reference](https://docs.aws.amazon.com/lake-formation/latest/dg/lf-permissions-reference.html). +* `principal` – (Required) Principal to be granted the permissions on the resource. Supported principals include `IAM_ALLOWED_PRINCIPALS` (see [Default Behavior and `IAMAllowedPrincipals`](#default-behavior-and-iamallowedprincipals) above), IAM roles, users, groups, SAML groups and users, QuickSight groups, OUs, and organizations as well as AWS account IDs for cross-account permissions. For more information, see [Lake Formation Permissions Reference](https://docs.aws.amazon.com/lake-formation/latest/dg/lf-permissions-reference.html). + +~> **NOTE:** We highly recommend that the `principal` _NOT_ be a Lake Formation administrator (granted using `aws_lakeformation_data_lake_settings`). The entity (e.g., IAM role) running Terraform will most likely need to be a Lake Formation administrator. As such, the entity will have implicit permissions and does not need permissions granted through this resource. + +One of the following is required: + +* `catalog_resource` - (Optional) Whether the permissions are to be granted for the Data Catalog. Defaults to `false`. +* `data_location` - (Optional) Configuration block for a data location resource. Detailed below. +* `database` - (Optional) Configuration block for a database resource. Detailed below. +* `lf_tag` - (Optional) Configuration block for an LF-tag resource. Detailed below. +* `lf_tag_policy` - (Optional) Configuration block for an LF-tag policy resource. Detailed below. +* `table` - (Optional) Configuration block for a table resource. Detailed below. +* `table_with_columns` - (Optional) Configuration block for a table with columns resource. Detailed below. + +The following arguments are optional: + +* `catalog_id` – (Optional) Identifier for the Data Catalog. By default, the account ID. The Data Catalog is the persistent metadata store. It contains database definitions, table definitions, and other control information to manage your Lake Formation environment. +* `permissions_with_grant_option` - (Optional) Subset of `permissions` which the principal can pass. + +### data_location + +The following argument is required: + +* `arn` – (Required) Amazon Resource Name (ARN) that uniquely identifies the data location resource. + +The following argument is optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog where the location is registered with Lake Formation. By default, it is the account ID of the caller. + +### database + +The following argument is required: + +* `name` – (Required) Name of the database resource. Unique to the Data Catalog. + +The following argument is optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### lf_tag + +The following arguments are required: + +* `key` – (Required) The key-name for the tag. +* `values` - (Required) A list of possible values an attribute can take. + +The following argument is optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### lf_tag_policy + +The following arguments are required: + +* `resource_type` – (Required) The resource type for which the tag policy applies. Valid values are `DATABASE` and `TABLE`. +* `expression` - (Required) A list of tag conditions that apply to the resource's tag policy. Configuration block for tag conditions that apply to the policy. See [`expression`](#expression) below. + +The following argument is optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +#### expression + +* `key` – (Required) The key-name of an LF-Tag. +* `values` - (Required) A list of possible values of an LF-Tag. + +### table + +The following argument is required: + +* `database_name` – (Required) Name of the database for the table. Unique to a Data Catalog. +* `name` - (Required, at least one of `name` or `wildcard`) Name of the table. +* `wildcard` - (Required, at least one of `name` or `wildcard`) Whether to use a wildcard representing every table under a database. Defaults to `false`. + +The following arguments are optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### table_with_columns + +The following arguments are required: + +* `column_names` - (Required, at least one of `column_names` or `wildcard`) Set of column names for the table. +* `database_name` – (Required) Name of the database for the table with columns resource. Unique to the Data Catalog. +* `name` – (Required) Name of the table resource. +* `wildcard` - (Required, at least one of `column_names` or `wildcard`) Whether to use a column wildcard. If `excluded_column_names` is included, `wildcard` must be set to `true` to avoid Terraform reporting a difference. + +The following arguments are optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. +* `excluded_column_names` - (Optional) Set of column names for the table to exclude. If `excluded_column_names` is included, `wildcard` must be set to `true` to avoid Terraform reporting a difference. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lakeformation_resource.html.markdown b/website/docs/cdktf/python/r/lakeformation_resource.html.markdown new file mode 100644 index 00000000000..b5aa39b905f --- /dev/null +++ b/website/docs/cdktf/python/r/lakeformation_resource.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_resource" +description: |- + Registers a Lake Formation resource as managed by the Data Catalog. +--- + + + +# Resource: aws_lakeformation_resource + +Registers a Lake Formation resource (e.g., S3 bucket) as managed by the Data Catalog. In other words, the S3 path is added to the data lake. + +Choose a role that has read/write access to the chosen Amazon S3 path or use the service-linked role. When you register the S3 path, the service-linked role and a new inline policy are created on your behalf. Lake Formation adds the first path to the inline policy and attaches it to the service-linked role. When you register subsequent paths, Lake Formation adds the path to the existing policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_s3_bucket import DataAwsS3Bucket +from imports.aws.lakeformation_resource import LakeformationResource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsS3Bucket(self, "example", + bucket="an-example-bucket" + ) + aws_lakeformation_resource_example = LakeformationResource(self, "example_1", + arn=Token.as_string(example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lakeformation_resource_example.override_logical_id("example") +``` + +## Argument Reference + +* `arn` – (Required) Amazon Resource Name (ARN) of the resource, an S3 path. +* `role_arn` – (Optional) Role that has read/write access to the resource. If not provided, the Lake Formation service-linked role must exist and is used. + +~> **NOTE:** AWS does not support registering an S3 location with an IAM role and subsequently updating the S3 location registration to a service-linked role. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `last_modified` - (Optional) The date and time the resource was last modified in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lakeformation_resource_lf_tags.html.markdown b/website/docs/cdktf/python/r/lakeformation_resource_lf_tags.html.markdown new file mode 100644 index 00000000000..4c367d3f0ca --- /dev/null +++ b/website/docs/cdktf/python/r/lakeformation_resource_lf_tags.html.markdown @@ -0,0 +1,162 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_resource_lf_tags" +description: |- + Manages an attachment between one or more LF-tags and an existing Lake Formation resource. +--- + + + +# Resource: aws_lakeformation_resource_lf_tags + +Manages an attachment between one or more existing LF-tags and an existing Lake Formation resource. + +## Example Usage + +### Database Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lakeformation_lf_tag import LakeformationLfTag +from imports.aws.lakeformation_resource_lf_tags import LakeformationResourceLfTags +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = LakeformationLfTag(self, "example", + key="right", + values=["abbey", "village", "luffield", "woodcote", "copse", "chapel", "stowe", "club" + ] + ) + aws_lakeformation_resource_lf_tags_example = + LakeformationResourceLfTags(self, "example_1", + database=LakeformationResourceLfTagsDatabase( + name=Token.as_string(aws_glue_catalog_database_example.name) + ), + lf_tag=[LakeformationResourceLfTagsLfTag( + key=example.key, + value="stowe" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lakeformation_resource_lf_tags_example.override_logical_id("example") +``` + +### Multiple Tags Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lakeformation_lf_tag import LakeformationLfTag +from imports.aws.lakeformation_resource_lf_tags import LakeformationResourceLfTags +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LakeformationLfTag(self, "example", + key="right", + values=["abbey", "village", "luffield", "woodcote", "copse", "chapel", "stowe", "club" + ] + ) + LakeformationLfTag(self, "example2", + key="left", + values=["farm", "theloop", "aintree", "brooklands", "maggotts", "becketts", "vale" + ] + ) + aws_lakeformation_resource_lf_tags_example = + LakeformationResourceLfTags(self, "example_2", + database=LakeformationResourceLfTagsDatabase( + name=Token.as_string(aws_glue_catalog_database_example.name) + ), + lf_tag=[LakeformationResourceLfTagsLfTag( + key="right", + value="luffield" + ), LakeformationResourceLfTagsLfTag( + key="left", + value="aintree" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lakeformation_resource_lf_tags_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `lf_tag` – (Required) Set of LF-tags to attach to the resource. See below. + +Exactly one of the following is required: + +* `database` - (Optional) Configuration block for a database resource. See below. +* `table` - (Optional) Configuration block for a table resource. See below. +* `table_with_columns` - (Optional) Configuration block for a table with columns resource. See below. + +The following arguments are optional: + +* `catalog_id` – (Optional) Identifier for the Data Catalog. By default, the account ID. The Data Catalog is the persistent metadata store. It contains database definitions, table definitions, and other control information to manage your Lake Formation environment. + +### lf_tag + +The following arguments are required: + +* `key` – (Required) Key name for an existing LF-tag. +* `value` - (Required) Value from the possible values for the LF-tag. + +The following argument is optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### database + +The following argument is required: + +* `name` – (Required) Name of the database resource. Unique to the Data Catalog. + +The following argument is optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### table + +The following argument is required: + +* `database_name` – (Required) Name of the database for the table. Unique to a Data Catalog. +* `name` - (Required, at least one of `name` or `wildcard`) Name of the table. +* `wildcard` - (Required, at least one of `name` or `wildcard`) Whether to use a wildcard representing every table under a database. Defaults to `false`. + +The following arguments are optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### table_with_columns + +The following arguments are required: + +* `column_names` - (Required, at least one of `column_names` or `wildcard`) Set of column names for the table. +* `database_name` – (Required) Name of the database for the table with columns resource. Unique to the Data Catalog. +* `name` – (Required) Name of the table resource. +* `wildcard` - (Required, at least one of `column_names` or `wildcard`) Whether to use a column wildcard. If `excluded_column_names` is included, `wildcard` must be set to `true` to avoid Terraform reporting a difference. + +The following arguments are optional: + +* `catalog_id` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. +* `excluded_column_names` - (Optional) Set of column names for the table to exclude. If `excluded_column_names` is included, `wildcard` must be set to `true` to avoid Terraform reporting a difference. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lambda_alias.html.markdown b/website/docs/cdktf/python/r/lambda_alias.html.markdown index fff13bc3d1e..eb686a8e4f5 100644 --- a/website/docs/cdktf/python/r/lambda_alias.html.markdown +++ b/website/docs/cdktf/python/r/lambda_alias.html.markdown @@ -50,13 +50,13 @@ class MyConvertedCode(TerraformStack): * `function_version` - (Required) Lambda function version for which you are creating the alias. Pattern: `(\$LATEST|[0-9]+)`. * `routing_config` - (Optional) The Lambda alias' route configuration settings. Fields documented below -For **routing_config** the following attributes are supported: +`routing_config` supports the following arguments: * `additional_version_weights` - (Optional) A map that defines the proportion of events that should be sent to different versions of a lambda function. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) identifying your Lambda function alias. * `invoke_arn` - The ARN to be used for invoking Lambda Function from API Gateway - to be used in [`aws_api_gateway_integration`](/docs/providers/aws/r/api_gateway_integration.html)'s `uri` @@ -67,10 +67,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda Function Aliases can be imported using the `function_name/alias`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda Function Aliases using the `function_name/alias`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_lambda_alias.test_lambda_alias my_test_lambda_function/my_alias + +Using `terraform import`, import Lambda Function Aliases using the `function_name/alias`. For example: + +```console +% terraform import aws_lambda_alias.test_lambda_alias my_test_lambda_function/my_alias ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lambda_code_signing_config.html.markdown b/website/docs/cdktf/python/r/lambda_code_signing_config.html.markdown index c3b1d346f0f..596ea5a13ff 100644 --- a/website/docs/cdktf/python/r/lambda_code_signing_config.html.markdown +++ b/website/docs/cdktf/python/r/lambda_code_signing_config.html.markdown @@ -53,9 +53,9 @@ The `policies` block supports the following argument: * `untrusted_artifact_on_deployment` - (Required) Code signing configuration policy for deployment validation failure. If you set the policy to Enforce, Lambda blocks the deployment request if code-signing validation checks fail. If you set the policy to Warn, Lambda allows the deployment and creates a CloudWatch log. Valid values: `Warn`, `Enforce`. Default value: `Warn`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of the code signing configuration. * `config_id` - Unique identifier for the code signing configuration. @@ -65,10 +65,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Code Signing Configs can be imported using their ARN, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Code Signing Configs using their ARN. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_lambda_code_signing_config.imported_csc arn:aws:lambda:us-west-2:123456789012:code-signing-config:csc-0f6c334abcdea4d8b + +Using `terraform import`, import Code Signing Configs using their ARN. For example: + +```console +% terraform import aws_lambda_code_signing_config.imported_csc arn:aws:lambda:us-west-2:123456789012:code-signing-config:csc-0f6c334abcdea4d8b ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lambda_event_source_mapping.html.markdown b/website/docs/cdktf/python/r/lambda_event_source_mapping.html.markdown index f10e3ef4f40..a88607e523a 100644 --- a/website/docs/cdktf/python/r/lambda_event_source_mapping.html.markdown +++ b/website/docs/cdktf/python/r/lambda_event_source_mapping.html.markdown @@ -248,7 +248,7 @@ class MyConvertedCode(TerraformStack): * `maximum_record_age_in_seconds`: - (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive). * `maximum_retry_attempts`: - (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000. * `parallelization_factor`: - (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10. -* `queues` - (Optional) The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. A single queue name must be specified. +* `queues` - (Optional) The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name. * `scaling_config` - (Optional) Scaling configuration of the event source. Only available for SQS queues. Detailed below. * `self_managed_event_source`: - (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include `source_access_configuration`. Detailed below. * `self_managed_kafka_event_source_config` - (Optional) Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below. @@ -301,9 +301,9 @@ class MyConvertedCode(TerraformStack): * `type` - (Required) The type of this configuration. For Self Managed Kafka you will need to supply blocks for type `VPC_SUBNET` and `VPC_SECURITY_GROUP`. * `uri` - (Required) The URI for this configuration. For type `VPC_SUBNET` the value should be `subnet:subnet_id` where `subnet_id` is the value you would find in an aws_subnet resource's id attribute. For type `VPC_SECURITY_GROUP` the value should be `security_group:security_group_id` where `security_group_id` is the value you would find in an aws_security_group resource's id attribute. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `function_arn` - The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from `function_name` above.) * `last_modified` - The date this resource was last modified. @@ -317,10 +317,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda event source mappings can be imported using the `UUID` (event source mapping identifier), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda event source mappings using the `UUID` (event source mapping identifier). For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_lambda_event_source_mapping.event_source_mapping 12345kxodurf3443 + +Using `terraform import`, import Lambda event source mappings using the `UUID` (event source mapping identifier). For example: + +```console +% terraform import aws_lambda_event_source_mapping.event_source_mapping 12345kxodurf3443 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lambda_function.html.markdown b/website/docs/cdktf/python/r/lambda_function.html.markdown index 576e1278246..2c6f544e98e 100644 --- a/website/docs/cdktf/python/r/lambda_function.html.markdown +++ b/website/docs/cdktf/python/r/lambda_function.html.markdown @@ -74,7 +74,7 @@ class MyConvertedCode(TerraformStack): function_name="lambda_function_name", handler="index.test", role=iam_for_lambda.arn, - runtime="nodejs16.x", + runtime="nodejs18.x", source_code_hash=Token.as_string(lambda_.output_base64_sha256) ) ``` @@ -150,7 +150,7 @@ class MyConvertedCode(TerraformStack): function_name="lambda_function_name", handler="index.test", role=iam_for_lambda.arn, - runtime="nodejs14.x" + runtime="nodejs18.x" ) ``` @@ -370,9 +370,9 @@ For network connectivity to AWS resources in a VPC, specify a list of security g * `security_group_ids` - (Required) List of security group IDs associated with the Lambda function. * `subnet_ids` - (Required) List of subnet IDs associated with the Lambda function. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) identifying your Lambda Function. * `invoke_arn` - ARN to be used for invoking Lambda Function from API Gateway - to be used in [`aws_api_gateway_integration`](/docs/providers/aws/r/api_gateway_integration.html)'s `uri`. @@ -409,10 +409,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda Functions can be imported using the `function_name`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda Functions using the `function_name`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_lambda_function.test_lambda my_test_lambda_function + +Using `terraform import`, import Lambda Functions using the `function_name`. For example: + +```console +% terraform import aws_lambda_function.test_lambda my_test_lambda_function ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lambda_function_event_invoke_config.html.markdown b/website/docs/cdktf/python/r/lambda_function_event_invoke_config.html.markdown index 527d75d6c9a..3665549fd56 100644 --- a/website/docs/cdktf/python/r/lambda_function_event_invoke_config.html.markdown +++ b/website/docs/cdktf/python/r/lambda_function_event_invoke_config.html.markdown @@ -158,38 +158,84 @@ The following arguments are required: * `destination` - (Required) Amazon Resource Name (ARN) of the destination resource. See the [Lambda Developer Guide](https://docs.aws.amazon.com/lambda/latest/dg/invocation-async.html#invocation-async-destinations) for acceptable resource types and associated IAM permissions. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - Fully qualified Lambda Function name or Amazon Resource Name (ARN) ## Import -Lambda Function Event Invoke Configs can be imported using the fully qualified Function name or Amazon Resource Name (ARN), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda Function Event Invoke Configs using the fully qualified Function name or Amazon Resource Name (ARN). For example: ARN without qualifier (all versions and aliases): -``` -$ terraform import aws_lambda_function_event_invoke_config.example arn:aws:us-east-1:123456789012:function:my_function +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` ARN with qualifier: -``` -$ terraform import aws_lambda_function_event_invoke_config.example arn:aws:us-east-1:123456789012:function:my_function:production +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` Name without qualifier (all versions and aliases): -``` -$ terraform import aws_lambda_function_event_invoke_config.example my_function +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` Name with qualifier: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_lambda_function_event_invoke_config.example my_function:production + +**Using `terraform import` to import** Lambda Function Event Invoke Configs using the fully qualified Function name or Amazon Resource Name (ARN). For example: + +ARN without qualifier (all versions and aliases): + +```console +% terraform import aws_lambda_function_event_invoke_config.example arn:aws:us-east-1:123456789012:function:my_function +``` + +ARN with qualifier: + +```console +% terraform import aws_lambda_function_event_invoke_config.example arn:aws:us-east-1:123456789012:function:my_function:production +``` + +Name without qualifier (all versions and aliases): + +```console +% terraform import aws_lambda_function_event_invoke_config.example my_function +``` + +Name with qualifier: + +```console +% terraform import aws_lambda_function_event_invoke_config.example my_function:production ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lambda_function_url.html.markdown b/website/docs/cdktf/python/r/lambda_function_url.html.markdown index 734994a0dd2..556f18ae0e5 100644 --- a/website/docs/cdktf/python/r/lambda_function_url.html.markdown +++ b/website/docs/cdktf/python/r/lambda_function_url.html.markdown @@ -66,9 +66,9 @@ This configuration block supports the following attributes: * `expose_headers` - (Optional) The HTTP headers in your function response that you want to expose to origins that call the function URL. * `max_age` - (Optional) The maximum amount of time, in seconds, that web browsers can cache results of a preflight request. By default, this is set to `0`, which means that the browser doesn't cache results. The maximum value is `86400`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `function_arn` - The Amazon Resource Name (ARN) of the function. * `function_url` - The HTTP URL endpoint for the function in the format `https://.lambda-url..on.aws`. @@ -76,10 +76,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda function URLs can be imported using the `function_name` or `function_name/qualifier`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda function URLs using the `function_name` or `function_name/qualifier`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_lambda_function_url.test_lambda_url my_test_lambda_function + +Using `terraform import`, import Lambda function URLs using the `function_name` or `function_name/qualifier`. For example: + +```console +% terraform import aws_lambda_function_url.test_lambda_url my_test_lambda_function ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lambda_invocation.html.markdown b/website/docs/cdktf/python/r/lambda_invocation.html.markdown index c5d636d628e..c926a61388b 100644 --- a/website/docs/cdktf/python/r/lambda_invocation.html.markdown +++ b/website/docs/cdktf/python/r/lambda_invocation.html.markdown @@ -166,10 +166,10 @@ The following arguments are optional: * `terraform_key` - (Optional) The JSON key used to store lifecycle information in the input JSON payload. Defaults to `tf`. This additional key is only included when `lifecycle_scope` is set to `CRUD`. * `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger a re-invocation. To force a re-invocation without changing these keys/values, use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html). -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `result` - String result of the lambda function invocation. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lambda_layer_version.html.markdown b/website/docs/cdktf/python/r/lambda_layer_version.html.markdown index 762c469f55b..724cf91a389 100644 --- a/website/docs/cdktf/python/r/lambda_layer_version.html.markdown +++ b/website/docs/cdktf/python/r/lambda_layer_version.html.markdown @@ -67,9 +67,9 @@ The following arguments are optional: * `skip_destroy` - (Optional) Whether to retain the old version of a previously deployed Lambda Layer. Default is `false`. When this is not set to `true`, changing any of `compatible_architectures`, `compatible_runtimes`, `description`, `filename`, `layer_name`, `license_info`, `s3_bucket`, `s3_key`, `s3_object_version`, or `source_code_hash` forces deletion of the existing layer version and creation of a new layer version. * `source_code_hash` - (Optional) Used to trigger updates. Must be set to a base64-encoded SHA256 hash of the package file specified with either `filename` or `s3_key`. The usual way to set this is `${filebase64sha256("file.zip")}` (Terraform 0.11.12 or later) or `${base64sha256(file("file.zip"))}` (Terraform 0.11.11 and earlier), where "file.zip" is the local filename of the lambda layer source archive. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the Lambda Layer with version. * `created_date` - Date this resource was created. @@ -86,12 +86,23 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda Layers can be imported using `arn`. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda Layers using `arn`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import \ + +Using `terraform import`, import Lambda Layers using `arn`. For example: + +```console +% terraform import \ aws_lambda_layer_version.test_layer \ arn:aws:lambda:_REGION_:_ACCOUNT_ID_:layer:_LAYER_NAME_:_LAYER_VERSION_ ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lambda_layer_version_permission.html.markdown b/website/docs/cdktf/python/r/lambda_layer_version_permission.html.markdown index a8c4fc1043d..444a1f20401 100644 --- a/website/docs/cdktf/python/r/lambda_layer_version_permission.html.markdown +++ b/website/docs/cdktf/python/r/lambda_layer_version_permission.html.markdown @@ -41,7 +41,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `action` - (Required) Action, which will be allowed. `lambda:GetLayerVersion` value is suggested by AWS documantation. * `layer_name` (Required) The name or ARN of the Lambda Layer, which you want to grant access to. @@ -51,9 +51,9 @@ The following arguments are supported: * `version_number` (Required) Version of Lambda Layer, which you want to grant access to. Note: permissions only apply to a single version of a layer. * `skip_destroy` - (Optional) Whether to retain the old version of a previously deployed Lambda Layer. Default is `false`. When this is not set to `true`, changing any of `compatible_architectures`, `compatible_runtimes`, `description`, `filename`, `layer_name`, `license_info`, `s3_bucket`, `s3_key`, `s3_object_version`, or `source_code_hash` forces deletion of the existing layer version and creation of a new layer version. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The `layer_name` and `version_number`, separated by a comma (`,`). * `revision_id` - A unique identifier for the current revision of the policy. @@ -61,12 +61,23 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda Layer Permissions can be imported using `layer_name` and `version_number`, separated by a comma (`,`). +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda Layer Permissions using `layer_name` and `version_number`, separated by a comma (`,`). For example: -```sh -$ terraform import aws_lambda_layer_version_permission.example arn:aws:lambda:us-west-2:123456654321:layer:test_layer1,1 +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Lambda Layer Permissions using `layer_name` and `version_number`, separated by a comma (`,`). For example: + +```console +% terraform import aws_lambda_layer_version_permission.example arn:aws:lambda:us-west-2:123456654321:layer:test_layer1,1 ``` [1]: https://docs.aws.amazon.com/lambda/latest/dg/access-control-resource-based.html#permissions-resource-xaccountlayer - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lambda_permission.html.markdown b/website/docs/cdktf/python/r/lambda_permission.html.markdown index 7c46cb9599b..0cd754a2465 100644 --- a/website/docs/cdktf/python/r/lambda_permission.html.markdown +++ b/website/docs/cdktf/python/r/lambda_permission.html.markdown @@ -305,18 +305,40 @@ class MyConvertedCode(TerraformStack): [2]: https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-control-access-using-iam-policies-to-invoke-api.html [3]: https://docs.aws.amazon.com/lambda/latest/dg/urls-auth.html -## Attributes Reference +## Attribute Reference -No additional attributes are exported. +This resource exports no additional attributes. ## Import -Lambda permission statements can be imported using function_name/statement_id, with an optional qualifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda permission statements using function_name/statement_id with an optional qualifier. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Lambda permission statements using function_name/statement_id with an optional qualifier. For example: + +```console +% terraform import aws_lambda_permission.test_lambda_permission my_test_lambda_function/AllowExecutionFromCloudWatch ``` -$ terraform import aws_lambda_permission.test_lambda_permission my_test_lambda_function/AllowExecutionFromCloudWatch -$ terraform import aws_lambda_permission.test_lambda_permission my_test_lambda_function:qualifier_name/AllowExecutionFromCloudWatch +```console +% terraform import aws_lambda_permission.test_lambda_permission my_test_lambda_function:qualifier_name/AllowExecutionFromCloudWatch ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lambda_provisioned_concurrency_config.html.markdown b/website/docs/cdktf/python/r/lambda_provisioned_concurrency_config.html.markdown index 71048841fa3..c214717b8cd 100644 --- a/website/docs/cdktf/python/r/lambda_provisioned_concurrency_config.html.markdown +++ b/website/docs/cdktf/python/r/lambda_provisioned_concurrency_config.html.markdown @@ -70,9 +70,9 @@ The following arguments are optional: * `skip_destroy` - (Optional) Whether to retain the provisoned concurrency configuration upon destruction. Defaults to `false`. If set to `true`, the resource in simply removed from state instead. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - Lambda Function name and qualifier separated by a comma (`,`). @@ -85,10 +85,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -A Lambda Provisioned Concurrency Configuration can be imported using the `function_name` and `qualifier` separated by a comma (`,`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a Lambda Provisioned Concurrency Configuration using the `function_name` and `qualifier` separated by a comma (`,`). For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_lambda_provisioned_concurrency_config.example my_function,production + +Using `terraform import`, import a Lambda Provisioned Concurrency Configuration using the `function_name` and `qualifier` separated by a comma (`,`). For example: + +```console +% terraform import aws_lambda_provisioned_concurrency_config.example my_function,production ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/launch_configuration.html.markdown b/website/docs/cdktf/python/r/launch_configuration.html.markdown new file mode 100644 index 00000000000..dfbcaa1ad78 --- /dev/null +++ b/website/docs/cdktf/python/r/launch_configuration.html.markdown @@ -0,0 +1,277 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_launch_configuration" +description: |- + Provides a resource to create a new launch configuration, used for autoscaling groups. +--- + + + +# Resource: aws_launch_configuration + +Provides a resource to create a new launch configuration, used for autoscaling groups. + +!> **WARNING:** The use of launch configurations is discouraged in favour of launch templates. Read more in the [AWS EC2 Documentation](https://docs.aws.amazon.com/autoscaling/ec2/userguide/launch-configurations.html). + +-> **Note** When using `aws_launch_configuration` with `aws_autoscaling_group`, it is recommended to use the `name_prefix` (Optional) instead of the `name` (Optional) attribute. This will allow Terraform lifecycles to detect changes to the launch configuration and update the autoscaling group correctly. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ami import DataAwsAmi +from imports.aws.launch_configuration import LaunchConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ubuntu = DataAwsAmi(self, "ubuntu", + filter=[DataAwsAmiFilter( + name="name", + values=["ubuntu/images/hvm-ssd/ubuntu-trusty-14.04-amd64-server-*"] + ), DataAwsAmiFilter( + name="virtualization-type", + values=["hvm"] + ) + ], + most_recent=True, + owners=["099720109477"] + ) + LaunchConfiguration(self, "as_conf", + image_id=Token.as_string(ubuntu.id), + instance_type="t2.micro", + name="web_config" + ) +``` + +## Using with AutoScaling Groups + +Launch Configurations cannot be updated after creation with the Amazon +Web Service API. In order to update a Launch Configuration, Terraform will +destroy the existing resource and create a replacement. In order to effectively +use a Launch Configuration resource with an [AutoScaling Group resource][1], +it's recommended to specify `create_before_destroy` in a [lifecycle][2] block. +Either omit the Launch Configuration `name` attribute, or specify a partial name +with `name_prefix`. Example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle, TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.data_aws_ami import DataAwsAmi +from imports.aws.launch_configuration import LaunchConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ubuntu = DataAwsAmi(self, "ubuntu", + filter=[DataAwsAmiFilter( + name="name", + values=["ubuntu/images/hvm-ssd/ubuntu-trusty-14.04-amd64-server-*"] + ), DataAwsAmiFilter( + name="virtualization-type", + values=["hvm"] + ) + ], + most_recent=True, + owners=["099720109477"] + ) + as_conf = LaunchConfiguration(self, "as_conf", + image_id=Token.as_string(ubuntu.id), + instance_type="t2.micro", + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + name_prefix="terraform-lc-example-" + ) + AutoscalingGroup(self, "bar", + launch_configuration=as_conf.name, + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + max_size=2, + min_size=1, + name="terraform-asg-example" + ) +``` + +With this setup Terraform generates a unique name for your Launch +Configuration and can then update the AutoScaling Group without conflict before +destroying the previous Launch Configuration. + +## Using with Spot Instances + +Launch configurations can set the spot instance pricing to be used for the +Auto Scaling Group to reserve instances. Simply specifying the `spot_price` +parameter will set the price on the Launch Configuration which will attempt to +reserve your instances at this price. See the [AWS Spot Instance +documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-spot-instances.html) +for more information or how to launch [Spot Instances][3] with Terraform. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.autoscaling_group import AutoscalingGroup +from imports.aws.data_aws_ami import DataAwsAmi +from imports.aws.launch_configuration import LaunchConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, maxSize, minSize): + super().__init__(scope, name) + ubuntu = DataAwsAmi(self, "ubuntu", + filter=[DataAwsAmiFilter( + name="name", + values=["ubuntu/images/hvm-ssd/ubuntu-trusty-14.04-amd64-server-*"] + ), DataAwsAmiFilter( + name="virtualization-type", + values=["hvm"] + ) + ], + most_recent=True, + owners=["099720109477"] + ) + as_conf = LaunchConfiguration(self, "as_conf", + image_id=Token.as_string(ubuntu.id), + instance_type="m4.large", + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + spot_price="0.001" + ) + AutoscalingGroup(self, "bar", + launch_configuration=as_conf.name, + name="terraform-asg-example", + max_size=max_size, + min_size=min_size + ) +``` + +## Argument Reference + +The following arguments are required: + +* `image_id` - (Required) The EC2 image ID to launch. +* `instance_type` - (Required) The size of instance to launch. + +The following arguments are optional: + +* `associate_public_ip_address` - (Optional) Associate a public ip address with an instance in a VPC. +* `ebs_block_device` - (Optional) Additional EBS block devices to attach to the instance. See [Block Devices](#block-devices) below for details. +* `ebs_optimized` - (Optional) If true, the launched EC2 instance will be EBS-optimized. +* `enable_monitoring` - (Optional) Enables/disables detailed monitoring. This is enabled by default. +* `ephemeral_block_device` - (Optional) Customize Ephemeral (also known as "Instance Store") volumes on the instance. See [Block Devices](#block-devices) below for details. +* `iam_instance_profile` - (Optional) The name attribute of the IAM instance profile to associate with launched instances. +* `key_name` - (Optional) The key name that should be used for the instance. +* `metadata_options` - The metadata options for the instance. + * `http_endpoint` - The state of the metadata service: `enabled`, `disabled`. + * `http_tokens` - If session tokens are required: `optional`, `required`. + * `http_put_response_hop_limit` - The desired HTTP PUT response hop limit for instance metadata requests. +* `name` - (Optional) The name of the launch configuration. If you leave this blank, Terraform will auto-generate a unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `security_groups` - (Optional) A list of associated security group IDS. +* `placement_tenancy` - (Optional) The tenancy of the instance. Valid values are `default` or `dedicated`, see [AWS's Create Launch Configuration](http://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_CreateLaunchConfiguration.html) for more details. +* `root_block_device` - (Optional) Customize details about the root block device of the instance. See [Block Devices](#block-devices) below for details. +* `spot_price` - (Optional; Default: On-demand price) The maximum price to use for reserving spot instances. +* `user_data` - (Optional) The user data to provide when launching the instance. Do not pass gzip-compressed data via this argument; see `user_data_base64` instead. +* `user_data_base64` - (Optional) Can be used instead of `user_data` to pass base64-encoded binary data directly. Use this instead of `user_data` whenever the value is not a valid UTF-8 string. For example, gzip-encoded user data must be base64-encoded and passed via this argument to avoid corruption. + +## Block devices + +Each of the `*_block_device` attributes controls a portion of the AWS +Launch Configuration's "Block Device Mapping". It's a good idea to familiarize yourself with [AWS's Block Device +Mapping docs](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/block-device-mapping-concepts.html) +to understand the implications of using these attributes. + +Each AWS Instance type has a different set of Instance Store block devices +available for attachment. AWS [publishes a +list](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#StorageOnInstanceTypes) +of which ephemeral devices are available on each type. The devices are always +identified by the `virtual_name` in the format `ephemeral{0..N}`. + +~> **NOTE:** Changes to `*_block_device` configuration of _existing_ resources +cannot currently be detected by Terraform. After updating to block device +configuration, resource recreation can be manually triggered by using the +[`taint` command](https://www.terraform.io/docs/commands/taint.html). + +### ebs_block_device + +Modifying any of the `ebs_block_device` settings requires resource replacement. + +* `device_name` - (Required) The name of the device to mount. +* `snapshot_id` - (Optional) The Snapshot ID to mount. +* `volume_type` - (Optional) The type of volume. Can be `standard`, `gp2`, `gp3`, `st1`, `sc1` or `io1`. +* `volume_size` - (Optional) The size of the volume in gigabytes. +* `iops` - (Optional) The amount of provisioned + [IOPS](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). + This must be set with a `volume_type` of `"io1"`. +* `throughput` - (Optional) The throughput (MiBps) to provision for a `gp3` volume. +* `delete_on_termination` - (Optional) Whether the volume should be destroyed + on instance termination (Default: `true`). +* `encrypted` - (Optional) Whether the volume should be encrypted or not. Defaults to `false`. +* `no_device` - (Optional) Whether the device in the block device mapping of the AMI is suppressed. + +### ephemeral_block_device + +* `device_name` - (Required) The name of the block device to mount on the instance. +* `no_device` - (Optional) Whether the device in the block device mapping of the AMI is suppressed. +* `virtual_name` - (Optional) The [Instance Store Device Name](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#InstanceStoreDeviceNames). + +### root_block_device + +-> Modifying any of the `root_block_device` settings requires resource replacement. + +* `delete_on_termination` - (Optional) Whether the volume should be destroyed on instance termination. Defaults to `true`. +* `encrypted` - (Optional) Whether the volume should be encrypted or not. Defaults to `false`. +* `iops` - (Optional) The amount of provisioned [IOPS](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). This must be set with a `volume_type` of `io1`. +* `throughput` - (Optional) The throughput (MiBps) to provision for a `gp3` volume. +* `volume_size` - (Optional) The size of the volume in gigabytes. +* `volume_type` - (Optional) The type of volume. Can be `standard`, `gp2`, `gp3`, `st1`, `sc1` or `io1`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the launch configuration. +* `arn` - The Amazon Resource Name of the launch configuration. +* `name` - The name of the launch configuration. + +[1]: /docs/providers/aws/r/autoscaling_group.html +[2]: https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html +[3]: /docs/providers/aws/r/spot_instance_request.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import launch configurations using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import launch configurations using the `name`. For example: + +```console +% terraform import aws_launch_configuration.as_conf terraform-lg-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/launch_template.html.markdown b/website/docs/cdktf/python/r/launch_template.html.markdown new file mode 100644 index 00000000000..762aa150805 --- /dev/null +++ b/website/docs/cdktf/python/r/launch_template.html.markdown @@ -0,0 +1,515 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_launch_template" +description: |- + Provides an EC2 launch template resource. Can be used to create instances or auto scaling groups. +--- + + + +# Resource: aws_launch_template + +Provides an EC2 launch template resource. Can be used to create instances or auto scaling groups. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.launch_template import LaunchTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LaunchTemplate(self, "foo", + block_device_mappings=[LaunchTemplateBlockDeviceMappings( + device_name="/dev/sdf", + ebs=LaunchTemplateBlockDeviceMappingsEbs( + volume_size=20 + ) + ) + ], + capacity_reservation_specification=LaunchTemplateCapacityReservationSpecification( + capacity_reservation_preference="open" + ), + cpu_options=LaunchTemplateCpuOptions( + core_count=4, + threads_per_core=2 + ), + credit_specification=LaunchTemplateCreditSpecification( + cpu_credits="standard" + ), + disable_api_stop=True, + disable_api_termination=True, + ebs_optimized=Token.as_string(True), + elastic_gpu_specifications=[LaunchTemplateElasticGpuSpecifications( + type="test" + ) + ], + elastic_inference_accelerator=LaunchTemplateElasticInferenceAccelerator( + type="eia1.medium" + ), + iam_instance_profile=LaunchTemplateIamInstanceProfile( + name="test" + ), + image_id="ami-test", + instance_initiated_shutdown_behavior="terminate", + instance_market_options=LaunchTemplateInstanceMarketOptions( + market_type="spot" + ), + instance_type="t2.micro", + kernel_id="test", + key_name="test", + license_specification=[LaunchTemplateLicenseSpecification( + license_configuration_arn="arn:aws:license-manager:eu-west-1:123456789012:license-configuration:lic-0123456789abcdef0123456789abcdef" + ) + ], + metadata_options=LaunchTemplateMetadataOptions( + http_endpoint="enabled", + http_put_response_hop_limit=1, + http_tokens="required", + instance_metadata_tags="enabled" + ), + monitoring=LaunchTemplateMonitoring( + enabled=True + ), + name="foo", + network_interfaces=[LaunchTemplateNetworkInterfaces( + associate_public_ip_address=Token.as_string(True) + ) + ], + placement=LaunchTemplatePlacement( + availability_zone="us-west-2a" + ), + ram_disk_id="test", + tag_specifications=[LaunchTemplateTagSpecifications( + resource_type="instance", + tags={ + "Name": "test" + } + ) + ], + user_data=Token.as_string(Fn.filebase64("${path.module}/example.sh")), + vpc_security_group_ids=["sg-12345678"] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `block_device_mappings` - (Optional) Specify volumes to attach to the instance besides the volumes specified by the AMI. + See [Block Devices](#block-devices) below for details. +* `capacity_reservation_specification` - (Optional) Targeting for EC2 capacity reservations. See [Capacity Reservation Specification](#capacity-reservation-specification) below for more details. +* `cpu_options` - (Optional) The CPU options for the instance. See [CPU Options](#cpu-options) below for more details. +* `credit_specification` - (Optional) Customize the credit specification of the instance. See [Credit + Specification](#credit-specification) below for more details. +* `default_version` - (Optional) Default Version of the launch template. +* `description` - (Optional) Description of the launch template. +* `disable_api_stop` - (Optional) If true, enables [EC2 Instance Stop Protection](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Stop_Start.html#Using_StopProtection). +* `disable_api_termination` - (Optional) If `true`, enables [EC2 Instance + Termination Protection](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#Using_ChangingDisableAPITermination) +* `ebs_optimized` - (Optional) If `true`, the launched EC2 instance will be EBS-optimized. +* `elastic_gpu_specifications` - (Optional) The elastic GPU to attach to the instance. See [Elastic GPU](#elastic-gpu) + below for more details. +* `elastic_inference_accelerator` - (Optional) Configuration block containing an Elastic Inference Accelerator to attach to the instance. See [Elastic Inference Accelerator](#elastic-inference-accelerator) below for more details. +* `enclave_options` - (Optional) Enable Nitro Enclaves on launched instances. See [Enclave Options](#enclave-options) below for more details. +* `hibernation_options` - (Optional) The hibernation options for the instance. See [Hibernation Options](#hibernation-options) below for more details. +* `iam_instance_profile` - (Optional) The IAM Instance Profile to launch the instance with. See [Instance Profile](#instance-profile) + below for more details. +* `image_id` - (Optional) The AMI from which to launch the instance. +* `instance_initiated_shutdown_behavior` - (Optional) Shutdown behavior for the instance. Can be `stop` or `terminate`. + (Default: `stop`). +* `instance_market_options` - (Optional) The market (purchasing) option for the instance. See [Market Options](#market-options) + below for details. +* `instance_requirements` - (Optional) The attribute requirements for the type of instance. If present then `instance_type` cannot be present. +* `instance_type` - (Optional) The type of the instance. If present then `instance_requirements` cannot be present. +* `kernel_id` - (Optional) The kernel ID. +* `key_name` - (Optional) The key name to use for the instance. +* `license_specification` - (Optional) A list of license specifications to associate with. See [License Specification](#license-specification) below for more details. +* `maintenance_options` - (Optional) The maintenance options for the instance. See [Maintenance Options](#maintenance-options) below for more details. +* `metadata_options` - (Optional) Customize the metadata options for the instance. See [Metadata Options](#metadata-options) below for more details. +* `monitoring` - (Optional) The monitoring option for the instance. See [Monitoring](#monitoring) below for more details. +* `name` - (Optional) The name of the launch template. If you leave this blank, Terraform will auto-generate a unique name. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `network_interfaces` - (Optional) Customize network interfaces to be attached at instance boot time. See [Network + Interfaces](#network-interfaces) below for more details. +* `placement` - (Optional) The placement of the instance. See [Placement](#placement) below for more details. +* `private_dns_name_options` - (Optional) The options for the instance hostname. The default values are inherited from the subnet. See [Private DNS Name Options](#private-dns-name-options) below for more details. +* `ram_disk_id` - (Optional) The ID of the RAM disk. +* `security_group_names` - (Optional) A list of security group names to associate with. If you are creating Instances in a VPC, use + `vpc_security_group_ids` instead. +* `tag_specifications` - (Optional) The tags to apply to the resources during launch. See [Tag Specifications](#tag-specifications) below for more details. +* `tags` - (Optional) A map of tags to assign to the launch template. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `update_default_version` - (Optional) Whether to update Default Version each update. Conflicts with `default_version`. +* `user_data` - (Optional) The base64-encoded user data to provide when launching the instance. +* `vpc_security_group_ids` - (Optional) A list of security group IDs to associate with. Conflicts with `network_interfaces.security_groups` + +### Block devices + +Configure additional volumes of the instance besides specified by the AMI. It's a good idea to familiarize yourself with + [AWS's Block Device Mapping docs](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/block-device-mapping-concepts.html) + to understand the implications of using these attributes. + +To find out more information for an existing AMI to override the configuration, such as `device_name`, you can use the [AWS CLI ec2 describe-images command](https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-images.html). + +Each `block_device_mappings` supports the following: + +* `device_name` - (Required) The name of the device to mount. +* `ebs` - (Optional) Configure EBS volume properties. +* `no_device` - (Optional) Suppresses the specified device included in the AMI's block device mapping. +* `virtual_name` - (Optional) The [Instance Store Device + Name](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#InstanceStoreDeviceNames) + (e.g., `"ephemeral0"`). + +The `ebs` block supports the following: + +* `delete_on_termination` - (Optional) Whether the volume should be destroyed on instance termination. + See [Preserving Amazon EBS Volumes on Instance Termination](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#preserving-volumes-on-termination) for more information. +* `encrypted` - (Optional) Enables [EBS encryption](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html) on the volume. + Cannot be used with `snapshot_id`. +* `iops` - (Optional) The amount of provisioned [IOPS](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). + This must be set with a `volume_type` of `"io1/io2/gp3"`. +* `kms_key_id` - (Optional) The ARN of the AWS Key Management Service (AWS KMS) customer master key (CMK) to use when creating the encrypted volume. + `encrypted` must be set to `true` when this is set. +* `snapshot_id` - (Optional) The Snapshot ID to mount. +* `throughput` - (Optional) The throughput to provision for a `gp3` volume in MiB/s (specified as an integer, e.g., 500), with a maximum of 1,000 MiB/s. +* `volume_size` - (Optional) The size of the volume in gigabytes. +* `volume_type` - (Optional) The volume type. + Can be one of `standard`, `gp2`, `gp3`, `io1`, `io2`, `sc1` or `st1`. + +### Capacity Reservation Specification + +The `capacity_reservation_specification` block supports the following: + +* `capacity_reservation_preference` - Indicates the instance's Capacity Reservation preferences. Can be `open` or `none`. (Default `none`). +* `capacity_reservation_target` - Used to target a specific Capacity Reservation: + +The `capacity_reservation_target` block supports the following: + +* `capacity_reservation_id` - The ID of the Capacity Reservation in which to run the instance. +* `capacity_reservation_resource_group_arn` - The ARN of the Capacity Reservation resource group in which to run the instance. + +### CPU Options + +The `cpu_options` block supports the following: + +* `amd_sev_snp` - Indicates whether to enable the instance for AMD SEV-SNP. AMD SEV-SNP is supported with M6a, R6a, and C6a instance types only. Valid values are `enabled` and `disabled`. +* `core_count` - The number of CPU cores for the instance. +* `threads_per_core` - The number of threads per CPU core. + To disable Intel Hyper-Threading Technology for the instance, specify a value of 1. + Otherwise, specify the default value of 2. + +Both number of CPU cores and threads per core must be specified. Valid number of CPU cores and threads per core for the instance type can be found in the [CPU Options Documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-optimize-cpu.html?shortFooter=true#cpu-options-supported-instances-values) + +### Credit Specification + +Credit specification can be applied/modified to the EC2 Instance at any time. + +The `credit_specification` block supports the following: + +* `cpu_credits` - The credit option for CPU usage. + Can be `standard` or `unlimited`. + T3 instances are launched as `unlimited` by default. + T2 instances are launched as `standard` by default. + +### Elastic GPU + +Attach an elastic GPU the instance. + +The `elastic_gpu_specifications` block supports the following: + +* `type` - The [Elastic GPU Type](https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/elastic-gpus.html#elastic-gpus-basics) + +### Elastic Inference Accelerator + +Attach an Elastic Inference Accelerator to the instance. Additional information about Elastic Inference in EC2 can be found in the [EC2 User Guide](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/elastic-inference.html). + +The `elastic_inference_accelerator` configuration block supports the following: + +* `type` - (Required) Accelerator type. + +### Enclave Options + +The `enclave_options` block supports the following: + +* `enabled` - If set to `true`, Nitro Enclaves will be enabled on the instance. + +For more information, see the documentation on [Nitro Enclaves](https://docs.aws.amazon.com/enclaves/latest/user/nitro-enclave.html). + +### Hibernation Options + +The `hibernation_options` block supports the following: + +* `configured` - If set to `true`, the launched EC2 instance will hibernation enabled. + +### Instance Profile + +The [IAM Instance Profile](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html) +to attach. + +The `iam_instance_profile` block supports the following: + +* `arn` - The Amazon Resource Name (ARN) of the instance profile. +* `name` - The name of the instance profile. + +### Instance Requirements + +This configuration block supports the following: + +~> **NOTE:** Both `memory_mib.min` and `vcpu_count.min` must be specified. + +* `accelerator_count` - (Optional) Block describing the minimum and maximum number of accelerators (GPUs, FPGAs, or AWS Inferentia chips). Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. Set to `0` to exclude instance types with accelerators. +* `accelerator_manufacturers` - (Optional) List of accelerator manufacturer names. Default is any manufacturer. + + ``` + Valid names: + * amazon-web-services + * amd + * nvidia + * xilinx + ``` + +* `accelerator_names` - (Optional) List of accelerator names. Default is any acclerator. + + ``` + Valid names: + * a100 - NVIDIA A100 GPUs + * v100 - NVIDIA V100 GPUs + * k80 - NVIDIA K80 GPUs + * t4 - NVIDIA T4 GPUs + * m60 - NVIDIA M60 GPUs + * radeon-pro-v520 - AMD Radeon Pro V520 GPUs + * vu9p - Xilinx VU9P FPGAs + ``` + +* `accelerator_total_memory_mib` - (Optional) Block describing the minimum and maximum total memory of the accelerators. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `accelerator_types` - (Optional) List of accelerator types. Default is any accelerator type. + + ``` + Valid types: + * fpga + * gpu + * inference + ``` + +* `allowed_instance_types` - (Optional) List of instance types to apply your specified attributes against. All other instance types are ignored, even if they match your specified attributes. You can use strings with one or more wild cards, represented by an asterisk (\*), to allow an instance type, size, or generation. The following are examples: `m5.8xlarge`, `c5*.*`, `m5a.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are allowing the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5a.*`, you are allowing all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is all instance types. + + ~> **NOTE:** If you specify `allowed_instance_types`, you can't specify `excluded_instance_types`. + +* `bare_metal` - (Optional) Indicate whether bare metal instace types should be `included`, `excluded`, or `required`. Default is `excluded`. +* `baseline_ebs_bandwidth_mbps` - (Optional) Block describing the minimum and maximum baseline EBS bandwidth, in Mbps. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `burstable_performance` - (Optional) Indicate whether burstable performance instance types should be `included`, `excluded`, or `required`. Default is `excluded`. +* `cpu_manufacturers` (Optional) List of CPU manufacturer names. Default is any manufacturer. + + ~> **NOTE:** Don't confuse the CPU hardware manufacturer with the CPU hardware architecture. Instances will be launched with a compatible CPU architecture based on the Amazon Machine Image (AMI) that you specify in your launch template. + + ``` + Valid names: + * amazon-web-services + * amd + * intel + ``` + +* `excluded_instance_types` - (Optional) List of instance types to exclude. You can use strings with one or more wild cards, represented by an asterisk (\*), to exclude an instance type, size, or generation. The following are examples: `m5.8xlarge`, `c5*.*`, `m5a.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are excluding the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5a.*`, you are excluding all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is no excluded instance types. + + ~> **NOTE:** If you specify `excluded_instance_types`, you can't specify `allowed_instance_types`. + +* `instance_generations` - (Optional) List of instance generation names. Default is any generation. + + ``` + Valid names: + * current - Recommended for best performance. + * previous - For existing applications optimized for older instance types. + ``` + +* `local_storage` - (Optional) Indicate whether instance types with local storage volumes are `included`, `excluded`, or `required`. Default is `included`. +* `local_storage_types` - (Optional) List of local storage type names. Default any storage type. + + ``` + Value names: + * hdd - hard disk drive + * ssd - solid state drive + ``` + +* `memory_gib_per_vcpu` - (Optional) Block describing the minimum and maximum amount of memory (GiB) per vCPU. Default is no minimum or maximum. + * `min` - (Optional) Minimum. May be a decimal number, e.g. `0.5`. + * `max` - (Optional) Maximum. May be a decimal number, e.g. `0.5`. +* `memory_mib` - (Required) Block describing the minimum and maximum amount of memory (MiB). Default is no maximum. + * `min` - (Required) Minimum. + * `max` - (Optional) Maximum. +* `network_bandwidth_gbps` - (Optional) Block describing the minimum and maximum amount of network bandwidth, in gigabits per second (Gbps). Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `network_interface_count` - (Optional) Block describing the minimum and maximum number of network interfaces. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `on_demand_max_price_percentage_over_lowest_price` - (Optional) The price protection threshold for On-Demand Instances. This is the maximum you’ll pay for an On-Demand Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 20. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. +* `require_hibernate_support` - (Optional) Indicate whether instance types must support On-Demand Instance Hibernation, either `true` or `false`. Default is `false`. +* `spot_max_price_percentage_over_lowest_price` - (Optional) The price protection threshold for Spot Instances. This is the maximum you’ll pay for a Spot Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 100. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. +* `total_local_storage_gb` - (Optional) Block describing the minimum and maximum total local storage (GB). Default is no minimum or maximum. + * `min` - (Optional) Minimum. May be a decimal number, e.g. `0.5`. + * `max` - (Optional) Maximum. May be a decimal number, e.g. `0.5`. +* `vcpu_count` - (Required) Block describing the minimum and maximum number of vCPUs. Default is no maximum. + * `min` - (Required) Minimum. + * `max` - (Optional) Maximum. + +### License Specification + +Associate one of more license configurations. + +The `license_specification` block supports the following: + +* `license_configuration_arn` - (Required) ARN of the license configuration. + +### Maintenance Options + +The `maintenance_options` block supports the following: + +* `auto_recovery` - (Optional) Disables the automatic recovery behavior of your instance or sets it to default. Can be `"default"` or `"disabled"`. See [Recover your instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-recover.html) for more details. + +### Market Options + +The market (purchasing) option for the instances. + +The `instance_market_options` block supports the following: + +* `market_type` - The market type. Can be `spot`. +* `spot_options` - The options for [Spot Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-spot-instances.html) + +The `spot_options` block supports the following: + +* `block_duration_minutes` - The required duration in minutes. This value must be a multiple of 60. +* `instance_interruption_behavior` - The behavior when a Spot Instance is interrupted. Can be `hibernate`, + `stop`, or `terminate`. (Default: `terminate`). +* `max_price` - The maximum hourly price you're willing to pay for the Spot Instances. +* `spot_instance_type` - The Spot Instance request type. Can be `one-time`, or `persistent`. +* `valid_until` - The end date of the request. + +### Metadata Options + +The metadata options for the instances. + +The `metadata_options` block supports the following: + +* `http_endpoint` - (Optional) Whether the metadata service is available. Can be `"enabled"` or `"disabled"`. (Default: `"enabled"`). +* `http_tokens` - (Optional) Whether or not the metadata service requires session tokens, also referred to as _Instance Metadata Service Version 2 (IMDSv2)_. Can be `"optional"` or `"required"`. (Default: `"optional"`). +* `http_put_response_hop_limit` - (Optional) The desired HTTP PUT response hop limit for instance metadata requests. The larger the number, the further instance metadata requests can travel. Can be an integer from `1` to `64`. (Default: `1`). +* `http_protocol_ipv6` - (Optional) Enables or disables the IPv6 endpoint for the instance metadata service. Can be `"enabled"` or `"disabled"`. +* `instance_metadata_tags` - (Optional) Enables or disables access to instance tags from the instance metadata service. Can be `"enabled"` or `"disabled"`. + +For more information, see the documentation on the [Instance Metadata Service](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html). + +### Monitoring + +The `monitoring` block supports the following: + +* `enabled` - If `true`, the launched EC2 instance will have detailed monitoring enabled. + +### Network Interfaces + +Attaches one or more [Network Interfaces](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-eni.html) to the instance. + +Check limitations for autoscaling group in [Creating an Auto Scaling Group Using a Launch Template Guide](https://docs.aws.amazon.com/autoscaling/ec2/userguide/create-asg-launch-template.html#limitations) + +Each `network_interfaces` block supports the following: + +* `associate_carrier_ip_address` - (Optional) Associate a Carrier IP address with `eth0` for a new network interface. + Use this option when you launch an instance in a Wavelength Zone and want to associate a Carrier IP address with the network interface. + Boolean value, can be left unset. +* `associate_public_ip_address` - (Optional) Associate a public ip address with the network interface. + Boolean value, can be left unset. +* `delete_on_termination` - (Optional) Whether the network interface should be destroyed on instance termination. +* `description` - (Optional) Description of the network interface. +* `device_index` - (Optional) The integer index of the network interface attachment. +* `interface_type` - (Optional) The type of network interface. To create an Elastic Fabric Adapter (EFA), specify `efa`. +* `ipv4_prefix_count` - (Optional) The number of IPv4 prefixes to be automatically assigned to the network interface. Conflicts with `ipv4_prefixes` +* `ipv4_prefixes` - (Optional) One or more IPv4 prefixes to be assigned to the network interface. Conflicts with `ipv4_prefix_count` +* `ipv6_addresses` - (Optional) One or more specific IPv6 addresses from the IPv6 CIDR block range of your subnet. Conflicts with `ipv6_address_count` +* `ipv6_address_count` - (Optional) The number of IPv6 addresses to assign to a network interface. Conflicts with `ipv6_addresses` +* `ipv6_prefix_count` - (Optional) The number of IPv6 prefixes to be automatically assigned to the network interface. Conflicts with `ipv6_prefixes` +* `ipv6_prefixes` - (Optional) One or more IPv6 prefixes to be assigned to the network interface. Conflicts with `ipv6_prefix_count` +* `network_interface_id` - (Optional) The ID of the network interface to attach. +* `network_card_index` - (Optional) The index of the network card. Some instance types support multiple network cards. The primary network interface must be assigned to network card index 0. The default is network card index 0. +* `private_ip_address` - (Optional) The primary private IPv4 address. +* `ipv4_address_count` - (Optional) The number of secondary private IPv4 addresses to assign to a network interface. Conflicts with `ipv4_addresses` +* `ipv4_addresses` - (Optional) One or more private IPv4 addresses to associate. Conflicts with `ipv4_address_count` +* `security_groups` - (Optional) A list of security group IDs to associate. +* `subnet_id` - (Optional) The VPC Subnet ID to associate. + +### Placement + +The [Placement Group](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/placement-groups.html) of the instance. + +The `placement` block supports the following: + +* `affinity` - (Optional) The affinity setting for an instance on a Dedicated Host. +* `availability_zone` - (Optional) The Availability Zone for the instance. +* `group_name` - (Optional) The name of the placement group for the instance. +* `host_id` - (Optional) The ID of the Dedicated Host for the instance. +* `host_resource_group_arn` - (Optional) The ARN of the Host Resource Group in which to launch instances. +* `spread_domain` - (Optional) Reserved for future use. +* `tenancy` - (Optional) The tenancy of the instance (if the instance is running in a VPC). Can be `default`, `dedicated`, or `host`. +* `partition_number` - (Optional) The number of the partition the instance should launch in. Valid only if the placement group strategy is set to partition. + +### Private DNS Name Options + +The `private_dns_name_options` block supports the following: + +* `enable_resource_name_dns_aaaa_record` - (Optional) Indicates whether to respond to DNS queries for instance hostnames with DNS AAAA records. +* `enable_resource_name_dns_a_record` - (Optional) Indicates whether to respond to DNS queries for instance hostnames with DNS A records. +* `hostname_type` - (Optional) The type of hostname for Amazon EC2 instances. For IPv4 only subnets, an instance DNS name must be based on the instance IPv4 address. For IPv6 native subnets, an instance DNS name must be based on the instance ID. For dual-stack subnets, you can specify whether DNS names use the instance IPv4 address or the instance ID. Valid values: `ip-name` and `resource-name`. + +### Tag Specifications + +The tags to apply to the resources during launch. You can tag instances, volumes, elastic GPUs and spot instance requests. More information can be found in the [EC2 API documentation](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_LaunchTemplateTagSpecificationRequest.html). + +Each `tag_specifications` block supports the following: + +* `resource_type` - (Optional) The type of resource to tag. +* `tags` -(Optional) A map of tags to assign to the resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the launch template. +* `id` - The ID of the launch template. +* `latest_version` - The latest version of the launch template. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Launch Templates using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Launch Templates using the `id`. For example: + +```console +% terraform import aws_launch_template.web lt-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lb.html.markdown b/website/docs/cdktf/python/r/lb.html.markdown new file mode 100644 index 00000000000..34e3160a076 --- /dev/null +++ b/website/docs/cdktf/python/r/lb.html.markdown @@ -0,0 +1,220 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb" +description: |- + Provides a Load Balancer resource. +--- + + + +# Resource: aws_lb + +Provides a Load Balancer resource. + +~> **Note:** `aws_alb` is known as `aws_lb`. The functionality is identical. + +## Example Usage + +### Application Load Balancer + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb import Lb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Lb(self, "test", + access_logs=LbAccessLogs( + bucket=lb_logs.id, + enabled=True, + prefix="test-lb" + ), + enable_deletion_protection=True, + internal=False, + load_balancer_type="application", + name="test-lb-tf", + security_groups=[lb_sg.id], + subnets=Token.as_list("${[ for subnet in ${" + public.fqn + "} : subnet.id]}"), + tags={ + "Environment": "production" + } + ) +``` + +### Network Load Balancer + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb import Lb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Lb(self, "test", + enable_deletion_protection=True, + internal=False, + load_balancer_type="network", + name="test-lb-tf", + subnets=Token.as_list("${[ for subnet in ${" + public.fqn + "} : subnet.id]}"), + tags={ + "Environment": "production" + } + ) +``` + +### Specifying Elastic IPs + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb import Lb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Lb(self, "example", + load_balancer_type="network", + name="example", + subnet_mapping=[LbSubnetMapping( + allocation_id=example1.id, + subnet_id=Token.as_string(aws_subnet_example1.id) + ), LbSubnetMapping( + allocation_id=example2.id, + subnet_id=Token.as_string(aws_subnet_example2.id) + ) + ] + ) +``` + +### Specifying private IP addresses for an internal-facing load balancer + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb import Lb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Lb(self, "example", + load_balancer_type="network", + name="example", + subnet_mapping=[LbSubnetMapping( + private_ipv4_address="10.0.1.15", + subnet_id=example1.id + ), LbSubnetMapping( + private_ipv4_address="10.0.2.15", + subnet_id=example2.id + ) + ] + ) +``` + +## Argument Reference + +~> **NOTE:** Please note that internal LBs can only use `ipv4` as the `ip_address_type`. You can only change to `dualstack` `ip_address_type` if the selected subnets are IPv6 enabled. + +~> **NOTE:** Please note that one of either `subnets` or `subnet_mapping` is required. + +This argument supports the following arguments: + +* `access_logs` - (Optional) An Access Logs block. Access Logs documented below. +* `customer_owned_ipv4_pool` - (Optional) The ID of the customer owned ipv4 pool to use for this load balancer. +* `desync_mitigation_mode` - (Optional) Determines how the load balancer handles requests that might pose a security risk to an application due to HTTP desync. Valid values are `monitor`, `defensive` (default), `strictest`. +* `drop_invalid_header_fields` - (Optional) Indicates whether HTTP headers with header fields that are not valid are removed by the load balancer (true) or routed to targets (false). The default is false. Elastic Load Balancing requires that message header names contain only alphanumeric characters and hyphens. Only valid for Load Balancers of type `application`. +* `enable_cross_zone_load_balancing` - (Optional) If true, cross-zone load balancing of the load balancer will be enabled. For `network` and `gateway` type load balancers, this feature is disabled by default (`false`). For `application` load balancer this feature is always enabled (`true`) and cannot be disabled. Defaults to `false`. +* `enable_deletion_protection` - (Optional) If true, deletion of the load balancer will be disabled via the AWS API. This will prevent Terraform from deleting the load balancer. Defaults to `false`. +* `enable_http2` - (Optional) Indicates whether HTTP/2 is enabled in `application` load balancers. Defaults to `true`. +* `enable_tls_version_and_cipher_suite_headers` - (Optional) Indicates whether the two headers (`x-amzn-tls-version` and `x-amzn-tls-cipher-suite`), which contain information about the negotiated TLS version and cipher suite, are added to the client request before sending it to the target. Only valid for Load Balancers of type `application`. Defaults to `false` +* `enable_xff_client_port` - (Optional) Indicates whether the X-Forwarded-For header should preserve the source port that the client used to connect to the load balancer in `application` load balancers. Defaults to `false`. +* `enable_waf_fail_open` - (Optional) Indicates whether to allow a WAF-enabled load balancer to route requests to targets if it is unable to forward the request to AWS WAF. Defaults to `false`. +* `idle_timeout` - (Optional) The time in seconds that the connection is allowed to be idle. Only valid for Load Balancers of type `application`. Default: 60. +* `internal` - (Optional) If true, the LB will be internal. Defaults to `false`. +* `ip_address_type` - (Optional) The type of IP addresses used by the subnets for your load balancer. The possible values are `ipv4` and `dualstack`. +* `load_balancer_type` - (Optional) The type of load balancer to create. Possible values are `application`, `gateway`, or `network`. The default value is `application`. +* `name` - (Optional) The name of the LB. This name must be unique within your AWS account, can have a maximum of 32 characters, +must contain only alphanumeric characters or hyphens, and must not begin or end with a hyphen. If not specified, +Terraform will autogenerate a name beginning with `tf-lb`. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `security_groups` - (Optional) A list of security group IDs to assign to the LB. Only valid for Load Balancers of type `application`. +* `preserve_host_header` - (Optional) Indicates whether the Application Load Balancer should preserve the Host header in the HTTP request and send it to the target without any change. Defaults to `false`. +* `subnet_mapping` - (Optional) A subnet mapping block as documented below. +* `subnets` - (Optional) A list of subnet IDs to attach to the LB. Subnets +cannot be updated for Load Balancers of type `network`. Changing this value +for load balancers of type `network` will force a recreation of the resource. +* `xff_header_processing_mode` - (Optional) Determines how the load balancer modifies the `X-Forwarded-For` header in the HTTP request before sending the request to the target. The possible values are `append`, `preserve`, and `remove`. Only valid for Load Balancers of type `application`. The default is `append`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### access_logs + +* `bucket` - (Required) The S3 bucket name to store the logs in. +* `enabled` - (Optional) Boolean to enable / disable `access_logs`. Defaults to `false`, even when `bucket` is specified. +* `prefix` - (Optional) The S3 bucket prefix. Logs are stored in the root if not configured. + +### subnet_mapping + +* `subnet_id` - (Required) ID of the subnet of which to attach to the load balancer. You can specify only one subnet per Availability Zone. +* `allocation_id` - (Optional) The allocation ID of the Elastic IP address for an internet-facing load balancer. +* `ipv6_address` - (Optional) The IPv6 address. You associate IPv6 CIDR blocks with your VPC and choose the subnets where you launch both internet-facing and internal Application Load Balancers or Network Load Balancers. +* `private_ipv4_address` - (Optional) The private IPv4 address for an internal load balancer. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the load balancer (matches `id`). +* `arn_suffix` - The ARN suffix for use with CloudWatch Metrics. +* `dns_name` - The DNS name of the load balancer. +* `id` - The ARN of the load balancer (matches `arn`). +* `subnet_mapping.*.outpost_id` - ID of the Outpost containing the load balancer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `zone_id` - The canonical hosted zone ID of the load balancer (to be used in a Route 53 Alias record). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import LBs using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import LBs using their ARN. For example: + +```console +% terraform import aws_lb.bar arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lb_cookie_stickiness_policy.html.markdown b/website/docs/cdktf/python/r/lb_cookie_stickiness_policy.html.markdown new file mode 100644 index 00000000000..f1e6068e3b6 --- /dev/null +++ b/website/docs/cdktf/python/r/lb_cookie_stickiness_policy.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_lb_cookie_stickiness_policy" +description: |- + Provides a load balancer cookie stickiness policy, which allows an ELB to control the sticky session lifetime of the browser. +--- + + + +# Resource: aws_lb_cookie_stickiness_policy + +Provides a load balancer cookie stickiness policy, which allows an ELB to control the sticky session lifetime of the browser. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elb import Elb +from imports.aws.lb_cookie_stickiness_policy import LbCookieStickinessPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + lb = Elb(self, "lb", + availability_zones=["us-east-1a"], + listener=[ElbListener( + instance_port=8000, + instance_protocol="http", + lb_port=80, + lb_protocol="http" + ) + ], + name="test-lb" + ) + LbCookieStickinessPolicy(self, "foo", + cookie_expiration_period=600, + lb_port=80, + load_balancer=lb.id, + name="foo-policy" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the stickiness policy. +* `load_balancer` - (Required) The load balancer to which the policy + should be attached. +* `lb_port` - (Required) The load balancer port to which the policy + should be applied. This must be an active listener on the load +balancer. +* `cookie_expiration_period` - (Optional) The time period after which + the session cookie should be considered stale, expressed in seconds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `name` - The name of the stickiness policy. +* `load_balancer` - The load balancer to which the policy is attached. +* `lb_port` - The load balancer port to which the policy is applied. +* `cookie_expiration_period` - The time period after which the session cookie is considered stale, expressed in seconds. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lb_listener.html.markdown b/website/docs/cdktf/python/r/lb_listener.html.markdown new file mode 100644 index 00000000000..1826b134e6c --- /dev/null +++ b/website/docs/cdktf/python/r/lb_listener.html.markdown @@ -0,0 +1,456 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_listener" +description: |- + Provides a Load Balancer Listener resource. +--- + + + +# Resource: aws_lb_listener + +Provides a Load Balancer Listener resource. + +~> **Note:** `aws_alb_listener` is known as `aws_lb_listener`. The functionality is identical. + +## Example Usage + +### Forward Action + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb import Lb +from imports.aws.lb_listener import LbListener +from imports.aws.lb_target_group import LbTargetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + front_end = Lb(self, "front_end") + aws_lb_target_group_front_end = LbTargetGroup(self, "front_end_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_target_group_front_end.override_logical_id("front_end") + aws_lb_listener_front_end = LbListener(self, "front_end_2", + certificate_arn="arn:aws:iam::187416307283:server-certificate/test_cert_rab3wuqwgja25ct3n4jdj2tzu4", + default_action=[LbListenerDefaultAction( + target_group_arn=Token.as_string(aws_lb_target_group_front_end.arn), + type="forward" + ) + ], + load_balancer_arn=front_end.arn, + port=Token.as_number("443"), + protocol="HTTPS", + ssl_policy="ELBSecurityPolicy-2016-08" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_listener_front_end.override_logical_id("front_end") +``` + +To a NLB: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb_listener import LbListener +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LbListener(self, "front_end", + alpn_policy="HTTP2Preferred", + certificate_arn="arn:aws:iam::187416307283:server-certificate/test_cert_rab3wuqwgja25ct3n4jdj2tzu4", + default_action=[LbListenerDefaultAction( + target_group_arn=Token.as_string(aws_lb_target_group_front_end.arn), + type="forward" + ) + ], + load_balancer_arn=Token.as_string(aws_lb_front_end.arn), + port=Token.as_number("443"), + protocol="TLS" + ) +``` + +### Redirect Action + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb import Lb +from imports.aws.lb_listener import LbListener +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + front_end = Lb(self, "front_end") + aws_lb_listener_front_end = LbListener(self, "front_end_1", + default_action=[LbListenerDefaultAction( + redirect=LbListenerDefaultActionRedirect( + port="443", + protocol="HTTPS", + status_code="HTTP_301" + ), + type="redirect" + ) + ], + load_balancer_arn=front_end.arn, + port=Token.as_number("80"), + protocol="HTTP" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_listener_front_end.override_logical_id("front_end") +``` + +### Fixed-response Action + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb import Lb +from imports.aws.lb_listener import LbListener +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + front_end = Lb(self, "front_end") + aws_lb_listener_front_end = LbListener(self, "front_end_1", + default_action=[LbListenerDefaultAction( + fixed_response=LbListenerDefaultActionFixedResponse( + content_type="text/plain", + message_body="Fixed response content", + status_code="200" + ), + type="fixed-response" + ) + ], + load_balancer_arn=front_end.arn, + port=Token.as_number("80"), + protocol="HTTP" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_listener_front_end.override_logical_id("front_end") +``` + +### Authenticate-cognito Action + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_client import CognitoUserPoolClient +from imports.aws.cognito_user_pool_domain import CognitoUserPoolDomain +from imports.aws.lb import Lb +from imports.aws.lb_listener import LbListener +from imports.aws.lb_target_group import LbTargetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, name1, userPoolId, domain, userPoolId1): + super().__init__(scope, name) + pool = CognitoUserPool(self, "pool", + name=name + ) + client = CognitoUserPoolClient(self, "client", + name=name1, + user_pool_id=user_pool_id + ) + domain = CognitoUserPoolDomain(self, "domain", + domain=domain, + user_pool_id=user_pool_id1 + ) + front_end = Lb(self, "front_end") + aws_lb_target_group_front_end = LbTargetGroup(self, "front_end_4") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_target_group_front_end.override_logical_id("front_end") + aws_lb_listener_front_end = LbListener(self, "front_end_5", + default_action=[LbListenerDefaultAction( + authenticate_cognito=LbListenerDefaultActionAuthenticateCognito( + user_pool_arn=pool.arn, + user_pool_client_id=client.id, + user_pool_domain=domain.domain + ), + type="authenticate-cognito" + ), LbListenerDefaultAction( + target_group_arn=Token.as_string(aws_lb_target_group_front_end.arn), + type="forward" + ) + ], + load_balancer_arn=front_end.arn, + port=Token.as_number("80"), + protocol="HTTP" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_listener_front_end.override_logical_id("front_end") +``` + +### Authenticate-OIDC Action + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb import Lb +from imports.aws.lb_listener import LbListener +from imports.aws.lb_target_group import LbTargetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + front_end = Lb(self, "front_end") + aws_lb_target_group_front_end = LbTargetGroup(self, "front_end_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_target_group_front_end.override_logical_id("front_end") + aws_lb_listener_front_end = LbListener(self, "front_end_2", + default_action=[LbListenerDefaultAction( + authenticate_oidc=LbListenerDefaultActionAuthenticateOidc( + authorization_endpoint="https://example.com/authorization_endpoint", + client_id="client_id", + client_secret="client_secret", + issuer="https://example.com", + token_endpoint="https://example.com/token_endpoint", + user_info_endpoint="https://example.com/user_info_endpoint" + ), + type="authenticate-oidc" + ), LbListenerDefaultAction( + target_group_arn=Token.as_string(aws_lb_target_group_front_end.arn), + type="forward" + ) + ], + load_balancer_arn=front_end.arn, + port=Token.as_number("80"), + protocol="HTTP" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_listener_front_end.override_logical_id("front_end") +``` + +### Gateway Load Balancer Listener + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb import Lb +from imports.aws.lb_listener import LbListener +from imports.aws.lb_target_group import LbTargetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Lb(self, "example", + load_balancer_type="gateway", + name="example", + subnet_mapping=[LbSubnetMapping( + subnet_id=Token.as_string(aws_subnet_example.id) + ) + ] + ) + aws_lb_target_group_example = LbTargetGroup(self, "example_1", + health_check=LbTargetGroupHealthCheck( + port=Token.as_string(80), + protocol="HTTP" + ), + name="example", + port=6081, + protocol="GENEVE", + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_target_group_example.override_logical_id("example") + aws_lb_listener_example = LbListener(self, "example_2", + default_action=[LbListenerDefaultAction( + target_group_arn=Token.as_string(aws_lb_target_group_example.id), + type="forward" + ) + ], + load_balancer_arn=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_listener_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `default_action` - (Required) Configuration block for default actions. Detailed below. +* `load_balancer_arn` - (Required, Forces New Resource) ARN of the load balancer. + +The following arguments are optional: + +* `alpn_policy` - (Optional) Name of the Application-Layer Protocol Negotiation (ALPN) policy. Can be set if `protocol` is `TLS`. Valid values are `HTTP1Only`, `HTTP2Only`, `HTTP2Optional`, `HTTP2Preferred`, and `None`. +* `certificate_arn` - (Optional) ARN of the default SSL server certificate. Exactly one certificate is required if the protocol is HTTPS. For adding additional SSL certificates, see the [`aws_lb_listener_certificate` resource](/docs/providers/aws/r/lb_listener_certificate.html). +* `port` - (Optional) Port on which the load balancer is listening. Not valid for Gateway Load Balancers. +* `protocol` - (Optional) Protocol for connections from clients to the load balancer. For Application Load Balancers, valid values are `HTTP` and `HTTPS`, with a default of `HTTP`. For Network Load Balancers, valid values are `TCP`, `TLS`, `UDP`, and `TCP_UDP`. Not valid to use `UDP` or `TCP_UDP` if dual-stack mode is enabled. Not valid for Gateway Load Balancers. +* `ssl_policy` - (Optional) Name of the SSL Policy for the listener. Required if `protocol` is `HTTPS` or `TLS`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +~> **NOTE::** Please note that listeners that are attached to Application Load Balancers must use either `HTTP` or `HTTPS` protocols while listeners that are attached to Network Load Balancers must use the `TCP` protocol. + +### default_action + +The following arguments are required: + +* `type` - (Required) Type of routing action. Valid values are `forward`, `redirect`, `fixed-response`, `authenticate-cognito` and `authenticate-oidc`. + +The following arguments are optional: + +* `authenticate_cognito` - (Optional) Configuration block for using Amazon Cognito to authenticate users. Specify only when `type` is `authenticate-cognito`. Detailed below. +* `authenticate_oidc` - (Optional) Configuration block for an identity provider that is compliant with OpenID Connect (OIDC). Specify only when `type` is `authenticate-oidc`. Detailed below. +* `fixed_response` - (Optional) Information for creating an action that returns a custom HTTP response. Required if `type` is `fixed-response`. +* `forward` - (Optional) Configuration block for creating an action that distributes requests among one or more target groups. Specify only if `type` is `forward`. If you specify both `forward` block and `target_group_arn` attribute, you can specify only one target group using `forward` and it must be the same target group specified in `target_group_arn`. Detailed below. +* `order` - (Optional) Order for the action. This value is required for rules with multiple actions. The action with the lowest value for order is performed first. Valid values are between `1` and `50000`. +* `redirect` - (Optional) Configuration block for creating a redirect action. Required if `type` is `redirect`. Detailed below. +* `target_group_arn` - (Optional) ARN of the Target Group to which to route traffic. Specify only if `type` is `forward` and you want to route to a single target group. To route to one or more target groups, use a `forward` block instead. + +#### authenticate_cognito + +The following arguments are required: + +* `user_pool_arn` - (Required) ARN of the Cognito user pool. +* `user_pool_client_id` - (Required) ID of the Cognito user pool client. +* `user_pool_domain` - (Required) Domain prefix or fully-qualified domain name of the Cognito user pool. + +The following arguments are optional: + +* `authentication_request_extra_params` - (Optional) Query parameters to include in the redirect request to the authorization endpoint. Max: 10. Detailed below. +* `on_unauthenticated_request` - (Optional) Behavior if the user is not authenticated. Valid values are `deny`, `allow` and `authenticate`. +* `scope` - (Optional) Set of user claims to be requested from the IdP. +* `session_cookie_name` - (Optional) Name of the cookie used to maintain session information. +* `session_timeout` - (Optional) Maximum duration of the authentication session, in seconds. + +##### authentication_request_extra_params + +* `key` - (Required) Key of query parameter. +* `value` - (Required) Value of query parameter. + +#### authenticate_oidc + +The following arguments are required: + +* `authorization_endpoint` - (Required) Authorization endpoint of the IdP. +* `client_id` - (Required) OAuth 2.0 client identifier. +* `client_secret` - (Required) OAuth 2.0 client secret. +* `issuer` - (Required) OIDC issuer identifier of the IdP. +* `token_endpoint` - (Required) Token endpoint of the IdP. +* `user_info_endpoint` - (Required) User info endpoint of the IdP. + +The following arguments are optional: + +* `authentication_request_extra_params` - (Optional) Query parameters to include in the redirect request to the authorization endpoint. Max: 10. +* `on_unauthenticated_request` - (Optional) Behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate` +* `scope` - (Optional) Set of user claims to be requested from the IdP. +* `session_cookie_name` - (Optional) Name of the cookie used to maintain session information. +* `session_timeout` - (Optional) Maximum duration of the authentication session, in seconds. + +#### fixed_response + +The following arguments are required: + +* `content_type` - (Required) Content type. Valid values are `text/plain`, `text/css`, `text/html`, `application/javascript` and `application/json`. + +The following arguments are optional: + +* `message_body` - (Optional) Message body. +* `status_code` - (Optional) HTTP response code. Valid values are `2XX`, `4XX`, or `5XX`. + +#### forward + +The following arguments are required: + +* `target_group` - (Required) Set of 1-5 target group blocks. Detailed below. + +The following arguments are optional: + +* `stickiness` - (Optional) Configuration block for target group stickiness for the rule. Detailed below. + +##### target_group + +The following arguments are required: + +* `arn` - (Required) ARN of the target group. + +The following arguments are optional: + +* `weight` - (Optional) Weight. The range is 0 to 999. + +##### stickiness + +The following arguments are required: + +* `duration` - (Required) Time period, in seconds, during which requests from a client should be routed to the same target group. The range is 1-604800 seconds (7 days). + +The following arguments are optional: + +* `enabled` - (Optional) Whether target group stickiness is enabled. Default is `false`. + +#### redirect + +~> **NOTE::** You can reuse URI components using the following reserved keywords: `#{protocol}`, `#{host}`, `#{port}`, `#{path}` (the leading "/" is removed) and `#{query}`. + +The following arguments are required: + +* `status_code` - (Required) HTTP redirect code. The redirect is either permanent (`HTTP_301`) or temporary (`HTTP_302`). + +The following arguments are optional: + +* `host` - (Optional) Hostname. This component is not percent-encoded. The hostname can contain `#{host}`. Defaults to `#{host}`. +* `path` - (Optional) Absolute path, starting with the leading "/". This component is not percent-encoded. The path can contain #{host}, #{path}, and #{port}. Defaults to `/#{path}`. +* `port` - (Optional) Port. Specify a value from `1` to `65535` or `#{port}`. Defaults to `#{port}`. +* `protocol` - (Optional) Protocol. Valid values are `HTTP`, `HTTPS`, or `#{protocol}`. Defaults to `#{protocol}`. +* `query` - (Optional) Query parameters, URL-encoded when necessary, but not percent-encoded. Do not include the leading "?". Defaults to `#{query}`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the listener (matches `id`). +* `id` - ARN of the listener (matches `arn`). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import listeners using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import listeners using their ARN. For example: + +```console +% terraform import aws_lb_listener.front_end arn:aws:elasticloadbalancing:us-west-2:187416307283:listener/app/front-end-alb/8e4497da625e2d8a/9ab28ade35828f96 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lb_listener_certificate.html.markdown b/website/docs/cdktf/python/r/lb_listener_certificate.html.markdown new file mode 100644 index 00000000000..f099cfd42f0 --- /dev/null +++ b/website/docs/cdktf/python/r/lb_listener_certificate.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_listener_certificate" +description: |- + Provides a Load Balancer Listener Certificate resource. +--- + + + +# Resource: aws_lb_listener_certificate + +Provides a Load Balancer Listener Certificate resource. + +This resource is for additional certificates and does not replace the default certificate on the listener. + +~> **Note:** `aws_alb_listener_certificate` is known as `aws_lb_listener_certificate`. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acm_certificate import AcmCertificate +from imports.aws.lb import Lb +from imports.aws.lb_listener import LbListener +from imports.aws.lb_listener_certificate import LbListenerCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, defaultAction, loadBalancerArn): + super().__init__(scope, name) + example = AcmCertificate(self, "example") + Lb(self, "front_end") + aws_lb_listener_front_end = LbListener(self, "front_end_2", + default_action=default_action, + load_balancer_arn=load_balancer_arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_listener_front_end.override_logical_id("front_end") + aws_lb_listener_certificate_example = LbListenerCertificate(self, "example_3", + certificate_arn=example.arn, + listener_arn=Token.as_string(aws_lb_listener_front_end.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_listener_certificate_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `listener_arn` - (Required, Forces New Resource) The ARN of the listener to which to attach the certificate. +* `certificate_arn` - (Required, Forces New Resource) The ARN of the certificate to attach to the listener. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `listener_arn` and `certificate_arn` separated by a `_`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Listener Certificates using the listener arn and certificate arn, separated by an underscore (`_`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Listener Certificates using the listener arn and certificate arn, separated by an underscore (`_`). For example: + +```console +% terraform import aws_lb_listener_certificate.example arn:aws:elasticloadbalancing:us-west-2:123456789012:listener/app/test/8e4497da625e2d8a/9ab28ade35828f96/67b3d2d36dd7c26b_arn:aws:iam::123456789012:server-certificate/tf-acc-test-6453083910015726063 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lb_listener_rule.html.markdown b/website/docs/cdktf/python/r/lb_listener_rule.html.markdown new file mode 100644 index 00000000000..cba51740b4d --- /dev/null +++ b/website/docs/cdktf/python/r/lb_listener_rule.html.markdown @@ -0,0 +1,338 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_listener_rule" +description: |- + Provides a Load Balancer Listener Rule resource. +--- + + + +# Resource: aws_lb_listener_rule + +Provides a Load Balancer Listener Rule resource. + +~> **Note:** `aws_alb_listener_rule` is known as `aws_lb_listener_rule`. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_client import CognitoUserPoolClient +from imports.aws.cognito_user_pool_domain import CognitoUserPoolDomain +from imports.aws.lb import Lb +from imports.aws.lb_listener import LbListener +from imports.aws.lb_listener_rule import LbListenerRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, name1, userPoolId, domain, userPoolId1, defaultAction, loadBalancerArn, condition, condition1): + super().__init__(scope, name) + pool = CognitoUserPool(self, "pool", + name=name + ) + client = CognitoUserPoolClient(self, "client", + name=name1, + user_pool_id=user_pool_id + ) + domain = CognitoUserPoolDomain(self, "domain", + domain=domain, + user_pool_id=user_pool_id1 + ) + Lb(self, "front_end") + aws_lb_listener_front_end = LbListener(self, "front_end_4", + default_action=default_action, + load_balancer_arn=load_balancer_arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_listener_front_end.override_logical_id("front_end") + LbListenerRule(self, "admin", + action=[LbListenerRuleAction( + authenticate_cognito=LbListenerRuleActionAuthenticateCognito( + user_pool_arn=pool.arn, + user_pool_client_id=client.id, + user_pool_domain=domain.domain + ), + type="authenticate-cognito" + ), LbListenerRuleAction( + target_group_arn=static.arn, + type="forward" + ) + ], + listener_arn=Token.as_string(aws_lb_listener_front_end.arn), + condition=condition + ) + LbListenerRule(self, "health_check", + action=[LbListenerRuleAction( + fixed_response=LbListenerRuleActionFixedResponse( + content_type="text/plain", + message_body="HEALTHY", + status_code="200" + ), + type="fixed-response" + ) + ], + condition=[LbListenerRuleCondition( + query_string=[LbListenerRuleConditionQueryString( + key="health", + value="check" + ), LbListenerRuleConditionQueryString( + value="bar" + ) + ] + ) + ], + listener_arn=Token.as_string(aws_lb_listener_front_end.arn) + ) + LbListenerRule(self, "host_based_routing", + action=[LbListenerRuleAction( + forward=LbListenerRuleActionForward( + stickiness=LbListenerRuleActionForwardStickiness( + duration=600, + enabled=True + ), + target_group=[LbListenerRuleActionForwardTargetGroup( + arn=main.arn, + weight=80 + ), LbListenerRuleActionForwardTargetGroup( + arn=canary.arn, + weight=20 + ) + ] + ), + type="forward" + ) + ], + condition=[LbListenerRuleCondition( + host_header=LbListenerRuleConditionHostHeader( + values=["my-service.*.terraform.io"] + ) + ) + ], + listener_arn=Token.as_string(aws_lb_listener_front_end.arn), + priority=99 + ) + LbListenerRule(self, "host_based_weighted_routing", + action=[LbListenerRuleAction( + target_group_arn=static.arn, + type="forward" + ) + ], + condition=[LbListenerRuleCondition( + host_header=LbListenerRuleConditionHostHeader( + values=["my-service.*.terraform.io"] + ) + ) + ], + listener_arn=Token.as_string(aws_lb_listener_front_end.arn), + priority=99 + ) + LbListenerRule(self, "oidc", + action=[LbListenerRuleAction( + authenticate_oidc=LbListenerRuleActionAuthenticateOidc( + authorization_endpoint="https://example.com/authorization_endpoint", + client_id="client_id", + client_secret="client_secret", + issuer="https://example.com", + token_endpoint="https://example.com/token_endpoint", + user_info_endpoint="https://example.com/user_info_endpoint" + ), + type="authenticate-oidc" + ), LbListenerRuleAction( + target_group_arn=static.arn, + type="forward" + ) + ], + listener_arn=Token.as_string(aws_lb_listener_front_end.arn), + condition=condition1 + ) + LbListenerRule(self, "redirect_http_to_https", + action=[LbListenerRuleAction( + redirect=LbListenerRuleActionRedirect( + port="443", + protocol="HTTPS", + status_code="HTTP_301" + ), + type="redirect" + ) + ], + condition=[LbListenerRuleCondition( + http_header=LbListenerRuleConditionHttpHeader( + http_header_name="X-Forwarded-For", + values=["192.168.1.*"] + ) + ) + ], + listener_arn=Token.as_string(aws_lb_listener_front_end.arn) + ) + LbListenerRule(self, "static", + action=[LbListenerRuleAction( + target_group_arn=static.arn, + type="forward" + ) + ], + condition=[LbListenerRuleCondition( + path_pattern=LbListenerRuleConditionPathPattern( + values=["/static/*"] + ) + ), LbListenerRuleCondition( + host_header=LbListenerRuleConditionHostHeader( + values=["example.com"] + ) + ) + ], + listener_arn=Token.as_string(aws_lb_listener_front_end.arn), + priority=100 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `listener_arn` - (Required, Forces New Resource) The ARN of the listener to which to attach the rule. +* `priority` - (Optional) The priority for the rule between `1` and `50000`. Leaving it unset will automatically set the rule with next available priority after currently existing highest rule. A listener can't have multiple rules with the same priority. +* `action` - (Required) An Action block. Action blocks are documented below. +* `condition` - (Required) A Condition block. Multiple condition blocks of different types can be set and all must be satisfied for the rule to match. Condition blocks are documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Action Blocks + +Action Blocks (for `action`) support the following: + +* `type` - (Required) The type of routing action. Valid values are `forward`, `redirect`, `fixed-response`, `authenticate-cognito` and `authenticate-oidc`. +* `target_group_arn` - (Optional) The ARN of the Target Group to which to route traffic. Specify only if `type` is `forward` and you want to route to a single target group. To route to one or more target groups, use a `forward` block instead. +* `forward` - (Optional) Information for creating an action that distributes requests among one or more target groups. Specify only if `type` is `forward`. If you specify both `forward` block and `target_group_arn` attribute, you can specify only one target group using `forward` and it must be the same target group specified in `target_group_arn`. +* `redirect` - (Optional) Information for creating a redirect action. Required if `type` is `redirect`. +* `fixed_response` - (Optional) Information for creating an action that returns a custom HTTP response. Required if `type` is `fixed-response`. +* `authenticate_cognito` - (Optional) Information for creating an authenticate action using Cognito. Required if `type` is `authenticate-cognito`. +* `authenticate_oidc` - (Optional) Information for creating an authenticate action using OIDC. Required if `type` is `authenticate-oidc`. + +Forward Blocks (for `forward`) support the following: + +* `target_group` - (Required) One or more target groups block. +* `stickiness` - (Optional) The target group stickiness for the rule. + +Target Group Blocks (for `target_group`) supports the following: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the target group. +* `weight` - (Optional) The weight. The range is 0 to 999. + +Target Group Stickiness Config Blocks (for `stickiness`) supports the following: + +* `enabled` - (Required) Indicates whether target group stickiness is enabled. +* `duration` - (Optional) The time period, in seconds, during which requests from a client should be routed to the same target group. The range is 1-604800 seconds (7 days). + +Redirect Blocks (for `redirect`) support the following: + +~> **NOTE::** You can reuse URI components using the following reserved keywords: `#{protocol}`, `#{host}`, `#{port}`, `#{path}` (the leading "/" is removed) and `#{query}`. + +* `host` - (Optional) The hostname. This component is not percent-encoded. The hostname can contain `#{host}`. Defaults to `#{host}`. +* `path` - (Optional) The absolute path, starting with the leading "/". This component is not percent-encoded. The path can contain #{host}, #{path}, and #{port}. Defaults to `/#{path}`. +* `port` - (Optional) The port. Specify a value from `1` to `65535` or `#{port}`. Defaults to `#{port}`. +* `protocol` - (Optional) The protocol. Valid values are `HTTP`, `HTTPS`, or `#{protocol}`. Defaults to `#{protocol}`. +* `query` - (Optional) The query parameters, URL-encoded when necessary, but not percent-encoded. Do not include the leading "?". Defaults to `#{query}`. +* `status_code` - (Required) The HTTP redirect code. The redirect is either permanent (`HTTP_301`) or temporary (`HTTP_302`). + +Fixed-response Blocks (for `fixed_response`) support the following: + +* `content_type` - (Required) The content type. Valid values are `text/plain`, `text/css`, `text/html`, `application/javascript` and `application/json`. +* `message_body` - (Optional) The message body. +* `status_code` - (Optional) The HTTP response code. Valid values are `2XX`, `4XX`, or `5XX`. + +Authenticate Cognito Blocks (for `authenticate_cognito`) supports the following: + +* `authentication_request_extra_params` - (Optional) The query parameters to include in the redirect request to the authorization endpoint. Max: 10. +* `on_unauthenticated_request` - (Optional) The behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate` +* `scope` - (Optional) The set of user claims to be requested from the IdP. +* `session_cookie_name` - (Optional) The name of the cookie used to maintain session information. +* `session_timeout` - (Optional) The maximum duration of the authentication session, in seconds. +* `user_pool_arn` - (Required) The ARN of the Cognito user pool. +* `user_pool_client_id` - (Required) The ID of the Cognito user pool client. +* `user_pool_domain` - (Required) The domain prefix or fully-qualified domain name of the Cognito user pool. + +Authenticate OIDC Blocks (for `authenticate_oidc`) supports the following: + +* `authentication_request_extra_params` - (Optional) The query parameters to include in the redirect request to the authorization endpoint. Max: 10. +* `authorization_endpoint` - (Required) The authorization endpoint of the IdP. +* `client_id` - (Required) The OAuth 2.0 client identifier. +* `client_secret` - (Required) The OAuth 2.0 client secret. +* `issuer` - (Required) The OIDC issuer identifier of the IdP. +* `on_unauthenticated_request` - (Optional) The behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate` +* `scope` - (Optional) The set of user claims to be requested from the IdP. +* `session_cookie_name` - (Optional) The name of the cookie used to maintain session information. +* `session_timeout` - (Optional) The maximum duration of the authentication session, in seconds. +* `token_endpoint` - (Required) The token endpoint of the IdP. +* `user_info_endpoint` - (Required) The user info endpoint of the IdP. + +Authentication Request Extra Params Blocks (for `authentication_request_extra_params`) supports the following: + +* `key` - (Required) The key of query parameter +* `value` - (Required) The value of query parameter + +### Condition Blocks + +One or more condition blocks can be set per rule. Most condition types can only be specified once per rule except for `http-header` and `query-string` which can be specified multiple times. + +Condition Blocks (for `condition`) support the following: + +* `host_header` - (Optional) Contains a single `values` item which is a list of host header patterns to match. The maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). Only one pattern needs to match for the condition to be satisfied. +* `http_header` - (Optional) HTTP headers to match. [HTTP Header block](#http-header-blocks) fields documented below. +* `http_request_method` - (Optional) Contains a single `values` item which is a list of HTTP request methods or verbs to match. Maximum size is 40 characters. Only allowed characters are A-Z, hyphen (-) and underscore (\_). Comparison is case sensitive. Wildcards are not supported. Only one needs to match for the condition to be satisfied. AWS recommends that GET and HEAD requests are routed in the same way because the response to a HEAD request may be cached. +* `path_pattern` - (Optional) Contains a single `values` item which is a list of path patterns to match against the request URL. Maximum size of each pattern is 128 characters. Comparison is case sensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). Only one pattern needs to match for the condition to be satisfied. Path pattern is compared only to the path of the URL, not to its query string. To compare against the query string, use a `query_string` condition. +* `query_string` - (Optional) Query strings to match. [Query String block](#query-string-blocks) fields documented below. +* `source_ip` - (Optional) Contains a single `values` item which is a list of source IP CIDR notations to match. You can use both IPv4 and IPv6 addresses. Wildcards are not supported. Condition is satisfied if the source IP address of the request matches one of the CIDR blocks. Condition is not satisfied by the addresses in the `X-Forwarded-For` header, use `http_header` condition instead. + +~> **NOTE::** Exactly one of `host_header`, `http_header`, `http_request_method`, `path_pattern`, `query_string` or `source_ip` must be set per condition. + +#### HTTP Header Blocks + +HTTP Header Blocks (for `http_header`) support the following: + +* `http_header_name` - (Required) Name of HTTP header to search. The maximum size is 40 characters. Comparison is case insensitive. Only RFC7240 characters are supported. Wildcards are not supported. You cannot use HTTP header condition to specify the host header, use a `host-header` condition instead. +* `values` - (Required) List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string. + +#### Query String Blocks + +Query String Blocks (for `query_string`) support the following: + +* `values` - (Required) Query string pairs or values to match. Query String Value blocks documented below. Multiple `values` blocks can be specified, see example above. Maximum size of each string is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). To search for a literal '\*' or '?' character in a query string, escape the character with a backslash (\\). Only one pair needs to match for the condition to be satisfied. + +Query String Value Blocks (for `query_string.values`) support the following: + +* `key` - (Optional) Query string key pattern to match. +* `value` - (Required) Query string value pattern to match. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the rule (matches `arn`) +* `arn` - The ARN of the rule (matches `id`) +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import rules using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import rules using their ARN. For example: + +```console +% terraform import aws_lb_listener_rule.front_end arn:aws:elasticloadbalancing:us-west-2:187416307283:listener-rule/app/test/8e4497da625e2d8a/9ab28ade35828f96/67b3d2d36dd7c26b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lb_ssl_negotiation_policy.html.markdown b/website/docs/cdktf/python/r/lb_ssl_negotiation_policy.html.markdown new file mode 100644 index 00000000000..7111f9b02e6 --- /dev/null +++ b/website/docs/cdktf/python/r/lb_ssl_negotiation_policy.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_lb_ssl_negotiation_policy" +description: |- + Provides a load balancer SSL negotiation policy, which allows an ELB to control which ciphers and protocols are supported during SSL negotiations between a client and a load balancer. +--- + + + +# Resource: aws_lb_ssl_negotiation_policy + +Provides a load balancer SSL negotiation policy, which allows an ELB to control the ciphers and protocols that are supported during SSL negotiations between a client and a load balancer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elb import Elb +from imports.aws.lb_ssl_negotiation_policy import LbSslNegotiationPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + lb = Elb(self, "lb", + availability_zones=["us-east-1a"], + listener=[ElbListener( + instance_port=8000, + instance_protocol="https", + lb_port=443, + lb_protocol="https", + ssl_certificate_id="arn:aws:iam::123456789012:server-certificate/certName" + ) + ], + name="test-lb" + ) + LbSslNegotiationPolicy(self, "foo", + attribute=[LbSslNegotiationPolicyAttribute( + name="Protocol-TLSv1", + value="false" + ), LbSslNegotiationPolicyAttribute( + name="Protocol-TLSv1.1", + value="false" + ), LbSslNegotiationPolicyAttribute( + name="Protocol-TLSv1.2", + value="true" + ), LbSslNegotiationPolicyAttribute( + name="Server-Defined-Cipher-Order", + value="true" + ), LbSslNegotiationPolicyAttribute( + name="ECDHE-RSA-AES128-GCM-SHA256", + value="true" + ), LbSslNegotiationPolicyAttribute( + name="AES128-GCM-SHA256", + value="true" + ), LbSslNegotiationPolicyAttribute( + name="EDH-RSA-DES-CBC3-SHA", + value="false" + ) + ], + lb_port=443, + load_balancer=lb.id, + name="foo-policy" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the SSL negotiation policy. +* `load_balancer` - (Required) The load balancer to which the policy + should be attached. +* `lb_port` - (Required) The load balancer port to which the policy + should be applied. This must be an active listener on the load +balancer. +* `attribute` - (Optional) An SSL Negotiation policy attribute. Each has two properties: + * `name` - The name of the attribute + * `value` - The value of the attribute +* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger a redeployment. To force a redeployment without changing these keys/values, use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html). + +To set your attributes, please see the [AWS Elastic Load Balancing Developer Guide](http://docs.aws.amazon.com/ElasticLoadBalancing/latest/DeveloperGuide/elb-security-policy-table.html) for a listing of the supported SSL protocols, SSL options, and SSL ciphers. + +~> **NOTE:** The AWS documentation references Server Order Preference, which the AWS Elastic Load Balancing API refers to as `Server-Defined-Cipher-Order`. If you wish to set Server Order Preference, use this value instead. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `name` - The name of the stickiness policy. +* `load_balancer` - The load balancer to which the policy is attached. +* `lb_port` - The load balancer port to which the policy is applied. +* `attribute` - The SSL Negotiation policy attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lb_target_group.html.markdown b/website/docs/cdktf/python/r/lb_target_group.html.markdown new file mode 100644 index 00000000000..1dce2ba0687 --- /dev/null +++ b/website/docs/cdktf/python/r/lb_target_group.html.markdown @@ -0,0 +1,207 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_target_group" +description: |- + Provides a Target Group resource for use with Load Balancers. +--- + + + +# Resource: aws_lb_target_group + +Provides a Target Group resource for use with Load Balancer resources. + +~> **Note:** `aws_alb_target_group` is known as `aws_lb_target_group`. The functionality is identical. + +## Example Usage + +### Instance Target Group + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb_target_group import LbTargetGroup +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = Vpc(self, "main", + cidr_block="10.0.0.0/16" + ) + LbTargetGroup(self, "test", + name="tf-example-lb-tg", + port=80, + protocol="HTTP", + vpc_id=main.id + ) +``` + +### IP Target Group + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb_target_group import LbTargetGroup +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = Vpc(self, "main", + cidr_block="10.0.0.0/16" + ) + LbTargetGroup(self, "ip-example", + name="tf-example-lb-tg", + port=80, + protocol="HTTP", + target_type="ip", + vpc_id=main.id + ) +``` + +### Lambda Target Group + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb_target_group import LbTargetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LbTargetGroup(self, "lambda-example", + name="tf-example-lb-tg", + target_type="lambda" + ) +``` + +### ALB Target Group + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lb_target_group import LbTargetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LbTargetGroup(self, "alb-example", + name="tf-example-lb-alb-tg", + port=80, + protocol="TCP", + target_type="alb", + vpc_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connection_termination` - (Optional) Whether to terminate connections at the end of the deregistration timeout on Network Load Balancers. See [doc](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-target-groups.html#deregistration-delay) for more information. Default is `false`. +* `deregistration_delay` - (Optional) Amount time for Elastic Load Balancing to wait before changing the state of a deregistering target from draining to unused. The range is 0-3600 seconds. The default value is 300 seconds. +* `health_check` - (Optional, Maximum of 1) Health Check configuration block. Detailed below. +* `lambda_multi_value_headers_enabled` - (Optional) Whether the request and response headers exchanged between the load balancer and the Lambda function include arrays of values or strings. Only applies when `target_type` is `lambda`. Default is `false`. +* `load_balancing_algorithm_type` - (Optional) Determines how the load balancer selects targets when routing requests. Only applicable for Application Load Balancer Target Groups. The value is `round_robin` or `least_outstanding_requests`. The default is `round_robin`. +* `load_balancing_cross_zone_enabled` - (Optional) Indicates whether cross zone load balancing is enabled. The value is `"true"`, `"false"` or `"use_load_balancer_configuration"`. The default is `"use_load_balancer_configuration"`. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. Cannot be longer than 6 characters. +* `name` - (Optional, Forces new resource) Name of the target group. If omitted, Terraform will assign a random, unique name. This name must be unique per region per account, can have a maximum of 32 characters, must contain only alphanumeric characters or hyphens, and must not begin or end with a hyphen. +* `port` - (May be required, Forces new resource) Port on which targets receive traffic, unless overridden when registering a specific target. Required when `target_type` is `instance`, `ip` or `alb`. Does not apply when `target_type` is `lambda`. +* `preserve_client_ip` - (Optional) Whether client IP preservation is enabled. See [doc](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-target-groups.html#client-ip-preservation) for more information. +* `protocol_version` - (Optional, Forces new resource) Only applicable when `protocol` is `HTTP` or `HTTPS`. The protocol version. Specify `GRPC` to send requests to targets using gRPC. Specify `HTTP2` to send requests to targets using HTTP/2. The default is `HTTP1`, which sends requests to targets using HTTP/1.1 +* `protocol` - (May be required, Forces new resource) Protocol to use for routing traffic to the targets. Should be one of `GENEVE`, `HTTP`, `HTTPS`, `TCP`, `TCP_UDP`, `TLS`, or `UDP`. Required when `target_type` is `instance`, `ip` or `alb`. Does not apply when `target_type` is `lambda`. +* `proxy_protocol_v2` - (Optional) Whether to enable support for proxy protocol v2 on Network Load Balancers. See [doc](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-target-groups.html#proxy-protocol) for more information. Default is `false`. +* `slow_start` - (Optional) Amount time for targets to warm up before the load balancer sends them a full share of requests. The range is 30-900 seconds or 0 to disable. The default value is 0 seconds. +* `stickiness` - (Optional, Maximum of 1) Stickiness configuration block. Detailed below. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `target_failover` - (Optional) Target failover block. Only applicable for Gateway Load Balancer target groups. See [target_failover](#target_failover) for more information. +* `target_type` - (May be required, Forces new resource) Type of target that you must specify when registering targets with this target group. See [doc](https://docs.aws.amazon.com/elasticloadbalancing/latest/APIReference/API_CreateTargetGroup.html) for supported values. The default is `instance`. + + Note that you can't specify targets for a target group using both instance IDs and IP addresses. + + If the target type is `ip`, specify IP addresses from the subnets of the virtual private cloud (VPC) for the target group, the RFC 1918 range (10.0.0.0/8, 172.16.0.0/12, and 192.168.0.0/16), and the RFC 6598 range (100.64.0.0/10). You can't specify publicly routable IP addresses. + + Network Load Balancers do not support the `lambda` target type. + + Application Load Balancers do not support the `alb` target type. +* `ip_address_type` (Optional, forces new resource) The type of IP addresses used by the target group, only supported when target type is set to `ip`. Possible values are `ipv4` or `ipv6`. +* `vpc_id` - (Optional, Forces new resource) Identifier of the VPC in which to create the target group. Required when `target_type` is `instance`, `ip` or `alb`. Does not apply when `target_type` is `lambda`. + +### health_check + +~> **Note:** The Health Check parameters you can set vary by the `protocol` of the Target Group. Many parameters cannot be set to custom values for `network` load balancers at this time. See http://docs.aws.amazon.com/elasticloadbalancing/latest/APIReference/API_CreateTargetGroup.html for a complete reference. Keep in mind, that health checks produce actual requests to the backend. The underlying function is invoked when `target_type` is set to `lambda`. + +* `enabled` - (Optional) Whether health checks are enabled. Defaults to `true`. +* `healthy_threshold` - (Optional) Number of consecutive health check successes required before considering a target healthy. The range is 2-10. Defaults to 3. +* `interval` - (Optional) Approximate amount of time, in seconds, between health checks of an individual target. The range is 5-300. For `lambda` target groups, it needs to be greater than the timeout of the underlying `lambda`. Defaults to 30. +* `matcher` (May be required) Response codes to use when checking for a healthy responses from a target. You can specify multiple values (for example, "200,202" for HTTP(s) or "0,12" for GRPC) or a range of values (for example, "200-299" or "0-99"). Required for HTTP/HTTPS/GRPC ALB. Only applies to Application Load Balancers (i.e., HTTP/HTTPS/GRPC) not Network Load Balancers (i.e., TCP). +* `path` - (May be required) Destination for the health check request. Required for HTTP/HTTPS ALB and HTTP NLB. Only applies to HTTP/HTTPS. +* `port` - (Optional) The port the load balancer uses when performing health checks on targets. Default is traffic-port. +* `protocol` - (Optional) Protocol the load balancer uses when performing health checks on targets. Must be either `TCP`, `HTTP`, or `HTTPS`. The TCP protocol is not supported for health checks if the protocol of the target group is HTTP or HTTPS. Defaults to HTTP. +* `timeout` - (optional) Amount of time, in seconds, during which no response from a target means a failed health check. The range is 2–120 seconds. For target groups with a protocol of HTTP, the default is 6 seconds. For target groups with a protocol of TCP, TLS or HTTPS, the default is 10 seconds. For target groups with a protocol of GENEVE, the default is 5 seconds. If the target type is lambda, the default is 30 seconds. +* `unhealthy_threshold` - (Optional) Number of consecutive health check failures required before considering a target unhealthy. The range is 2-10. Defaults to 3. + +### stickiness + +~> **NOTE:** Currently, an NLB (i.e., protocol of `HTTP` or `HTTPS`) can have an invalid `stickiness` block with `type` set to `lb_cookie` as long as `enabled` is set to `false`. However, please update your configurations to avoid errors in a future version of the provider: either remove the invalid `stickiness` block or set the `type` to `source_ip`. + +* `cookie_duration` - (Optional) Only used when the type is `lb_cookie`. The time period, in seconds, during which requests from a client should be routed to the same target. After this time period expires, the load balancer-generated cookie is considered stale. The range is 1 second to 1 week (604800 seconds). The default value is 1 day (86400 seconds). +* `cookie_name` - (Optional) Name of the application based cookie. AWSALB, AWSALBAPP, and AWSALBTG prefixes are reserved and cannot be used. Only needed when type is `app_cookie`. +* `enabled` - (Optional) Boolean to enable / disable `stickiness`. Default is `true`. +* `type` - (Required) The type of sticky sessions. The only current possible values are `lb_cookie`, `app_cookie` for ALBs, `source_ip` for NLBs, and `source_ip_dest_ip`, `source_ip_dest_ip_proto` for GWLBs. + +### target_failover + +~> **NOTE:** This block is only applicable for a Gateway Load Balancer (GWLB). The two attributes `on_deregistration` and `on_unhealthy` cannot be set independently. The value you set for both attributes must be the same. + +* `on_deregistration` - (Optional) Indicates how the GWLB handles existing flows when a target is deregistered. Possible values are `rebalance` and `no_rebalance`. Must match the attribute value set for `on_unhealthy`. Default: `no_rebalance`. +* `on_unhealthy` - Indicates how the GWLB handles existing flows when a target is unhealthy. Possible values are `rebalance` and `no_rebalance`. Must match the attribute value set for `on_deregistration`. Default: `no_rebalance`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn_suffix` - ARN suffix for use with CloudWatch Metrics. +* `arn` - ARN of the Target Group (matches `id`). +* `id` - ARN of the Target Group (matches `arn`). +* `name` - Name of the Target Group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Target Groups using their ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Target Groups using their ARN. For example: + +```console +% terraform import aws_lb_target_group.app_front_end arn:aws:elasticloadbalancing:us-west-2:187416307283:targetgroup/app-front-end/20cfe21448b66314 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lb_target_group_attachment.html.markdown b/website/docs/cdktf/python/r/lb_target_group_attachment.html.markdown new file mode 100644 index 00000000000..355b3f88a97 --- /dev/null +++ b/website/docs/cdktf/python/r/lb_target_group_attachment.html.markdown @@ -0,0 +1,158 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_target_group_attachment" +description: |- + Provides the ability to register instances and containers with a LB + target group +--- + + + +# Resource: aws_lb_target_group_attachment + +Provides the ability to register instances and containers with an Application Load Balancer (ALB) or Network Load Balancer (NLB) target group. For attaching resources with Elastic Load Balancer (ELB), see the [`aws_elb_attachment` resource](/docs/providers/aws/r/elb_attachment.html). + +~> **Note:** `aws_alb_target_group_attachment` is known as `aws_lb_target_group_attachment`. The functionality is identical. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.instance import Instance +from imports.aws.lb_target_group import LbTargetGroup +from imports.aws.lb_target_group_attachment import LbTargetGroupAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = Instance(self, "test") + aws_lb_target_group_test = LbTargetGroup(self, "test_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_target_group_test.override_logical_id("test") + aws_lb_target_group_attachment_test = LbTargetGroupAttachment(self, "test_2", + port=80, + target_group_arn=Token.as_string(aws_lb_target_group_test.arn), + target_id=test.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_target_group_attachment_test.override_logical_id("test") +``` + +### Lambda Target + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lambda_function import LambdaFunction +from imports.aws.lambda_permission import LambdaPermission +from imports.aws.lb_target_group import LbTargetGroup +from imports.aws.lb_target_group_attachment import LbTargetGroupAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, functionName, role): + super().__init__(scope, name) + test = LambdaFunction(self, "test", + function_name=function_name, + role=role + ) + aws_lb_target_group_test = LbTargetGroup(self, "test_1", + name="test", + target_type="lambda" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_target_group_test.override_logical_id("test") + with_lb = LambdaPermission(self, "with_lb", + action="lambda:InvokeFunction", + function_name=test.function_name, + principal="elasticloadbalancing.amazonaws.com", + source_arn=Token.as_string(aws_lb_target_group_test.arn), + statement_id="AllowExecutionFromlb" + ) + aws_lb_target_group_attachment_test = LbTargetGroupAttachment(self, "test_3", + depends_on=[with_lb], + target_group_arn=Token.as_string(aws_lb_target_group_test.arn), + target_id=test.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_target_group_attachment_test.override_logical_id("test") +``` + +### Registering Multiple Targets + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformCount, TerraformIterator, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.instance import Instance +from imports.aws.lb_target_group import LbTargetGroup +from imports.aws.lb_target_group_attachment import LbTargetGroupAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_count = TerraformCount.of(Token.as_number("3")) + example = Instance(self, "example", + count=example_count + ) + aws_lb_target_group_example = LbTargetGroup(self, "example_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_target_group_example.override_logical_id("example") + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_for_each_iterator = TerraformIterator.from_list( + Token.as_any("${{ for k, v in ${" + example.fqn + "} : v.id => v}}")) + aws_lb_target_group_attachment_example = LbTargetGroupAttachment(self, "example_2", + port=80, + target_group_arn=Token.as_string(aws_lb_target_group_example.arn), + target_id=Token.as_string( + property_access(example_for_each_iterator.value, ["id"])), + for_each=example_for_each_iterator + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lb_target_group_attachment_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `target_group_arn` - (Required) The ARN of the target group with which to register targets. +* `target_id` (Required) The ID of the target. This is the Instance ID for an instance, or the container ID for an ECS container. If the target type is `ip`, specify an IP address. If the target type is `lambda`, specify the Lambda function ARN. If the target type is `alb`, specify the ALB ARN. + +The following arguments are optional: + +* `availability_zone` - (Optional) The Availability Zone where the IP address of the target is to be registered. If the private IP address is outside of the VPC scope, this value must be set to `all`. +* `port` - (Optional) The port on which targets receive traffic. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A unique identifier for the attachment. + +## Import + +You cannot import Target Group Attachments. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lex_bot.html.markdown b/website/docs/cdktf/python/r/lex_bot.html.markdown new file mode 100644 index 00000000000..752f64e1337 --- /dev/null +++ b/website/docs/cdktf/python/r/lex_bot.html.markdown @@ -0,0 +1,165 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_bot" +description: |- + Provides an Amazon Lex bot resource. +--- + + + +# Resource: aws_lex_bot + +Provides an Amazon Lex Bot resource. For more information see +[Amazon Lex: How It Works](https://docs.aws.amazon.com/lex/latest/dg/how-it-works.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lex_bot import LexBot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LexBot(self, "order_flowers_bot", + abort_statement=LexBotAbortStatement( + message=[LexBotAbortStatementMessage( + content="Sorry, I am not able to assist at this time", + content_type="PlainText" + ) + ] + ), + child_directed=False, + clarification_prompt=LexBotClarificationPrompt( + max_attempts=2, + message=[LexBotClarificationPromptMessage( + content="I didn't understand you, what would you like to do?", + content_type="PlainText" + ) + ] + ), + create_version=False, + description="Bot to order flowers on the behalf of a user", + idle_session_ttl_in_seconds=600, + intent=[LexBotIntent( + intent_name="OrderFlowers", + intent_version="1" + ) + ], + locale="en-US", + name="OrderFlowers", + process_behavior="BUILD", + voice_id="Salli" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `abort_statement` - (Required) The message that Amazon Lex uses to abort a conversation. Attributes are documented under [statement](#statement). +* `child_directed` - (Required) By specifying true, you confirm that your use of Amazon Lex is related to a website, program, or other application that is directed or targeted, in whole or in part, to children under age 13 and subject to COPPA. For more information see the [Amazon Lex FAQ](https://aws.amazon.com/lex/faqs#data-security) and the [Amazon Lex PutBot API Docs](https://docs.aws.amazon.com/lex/latest/dg/API_PutBot.html#lex-PutBot-request-childDirected). +* `clarification_prompt` - (Required) The message that Amazon Lex uses when it doesn't understand the user's request. Attributes are documented under [prompt](#prompt). +* `create_version` - (Optional) Determines if a new bot version is created when the initial resource is created and on each update. Defaults to `false`. +* `description` - (Optional) A description of the bot. Must be less than or equal to 200 characters in length. +* `detect_sentiment` - (Optional) When set to true user utterances are sent to Amazon Comprehend for sentiment analysis. If you don't specify detectSentiment, the default is `false`. +* `enable_model_improvements` - (Optional) Set to `true` to enable access to natural language understanding improvements. When you set the `enable_model_improvements` parameter to true you can use the `nlu_intent_confidence_threshold` parameter to configure confidence scores. For more information, see [Confidence Scores](https://docs.aws.amazon.com/lex/latest/dg/confidence-scores.html). You can only set the `enable_model_improvements` parameter in certain Regions. If you set the parameter to true, your bot has access to accuracy improvements. For more information see the [Amazon Lex Bot PutBot API Docs](https://docs.aws.amazon.com/lex/latest/dg/API_PutBot.html#lex-PutBot-request-enableModelImprovements). +* `idle_session_ttl_in_seconds` - (Optional) The maximum time in seconds that Amazon Lex retains the data gathered in a conversation. Default is `300`. Must be a number between 60 and 86400 (inclusive). +* `locale` - (Optional) Specifies the target locale for the bot. Any intent used in the bot must be compatible with the locale of the bot. For available locales, see [Amazon Lex Bot PutBot API Docs](https://docs.aws.amazon.com/lex/latest/dg/API_PutBot.html#lex-PutBot-request-locale). Default is `en-US`. +* `intent` - (Required) A set of Intent objects. Each intent represents a command that a user can express. Attributes are documented under [intent](#intent). Can have up to 250 Intent objects. +* `name` - (Required) The name of the bot that you want to create, case sensitive. Must be between 2 and 50 characters in length. +* `nlu_intent_confidence_threshold` - (Optional) Determines the threshold where Amazon Lex will insert the AMAZON.FallbackIntent, AMAZON.KendraSearchIntent, or both when returning alternative intents in a PostContent or PostText response. AMAZON.FallbackIntent and AMAZON.KendraSearchIntent are only inserted if they are configured for the bot. For more information see [Amazon Lex Bot PutBot API Docs](https://docs.aws.amazon.com/lex/latest/dg/API_PutBot.html#lex-PutBot-request-nluIntentConfidenceThreshold) This value requires `enable_model_improvements` to be set to `true` and the default is `0`. Must be a float between 0 and 1. +* `process_behavior` - (Optional) If you set the `process_behavior` element to `BUILD`, Amazon Lex builds the bot so that it can be run. If you set the element to `SAVE` Amazon Lex saves the bot, but doesn't build it. Default is `SAVE`. +* `voice_id` - (Optional) The Amazon Polly voice ID that you want Amazon Lex to use for voice interactions with the user. The locale configured for the voice must match the locale of the bot. For more information, see [Available Voices](http://docs.aws.amazon.com/polly/latest/dg/voicelist.html) in the Amazon Polly Developer Guide. + +### intent + +Identifies the specific version of an intent. + +* `intent_name` - (Required) The name of the intent. Must be less than or equal to 100 characters in length. +* `intent_version` - (Required) The version of the intent. Must be less than or equal to 64 characters in length. + +### message + +The message object that provides the message text and its type. + +* `content` - (Required) The text of the message. +* `content_type` - (Required) The content type of the message string. +* `group_number` - (Optional) Identifies the message group that the message belongs to. When a group +is assigned to a message, Amazon Lex returns one message from each group in the response. + +### prompt + +Obtains information from the user. To define a prompt, provide one or more messages and specify the +number of attempts to get information from the user. If you provide more than one message, Amazon +Lex chooses one of the messages to use to prompt the user. + +* `max_attempts` - (Required) The number of times to prompt the user for information. +* `message` - (Required) A set of messages, each of which provides a message string and its type. +You can specify the message string in plain text or in Speech Synthesis Markup Language (SSML). +Attributes are documented under [message](#message). +* `response_card` - (Optional) The response card. Amazon Lex will substitute session attributes and +slot values into the response card. For more information, see +[Example: Using a Response Card](https://docs.aws.amazon.com/lex/latest/dg/ex-resp-card.html). + +### statement + +A statement is a map with a set of message maps and an optional response card string. Messages +convey information to the user. At runtime, Amazon Lex selects the message to convey. + +* `message` - (Required) A set of messages, each of which provides a message string and its type. You +can specify the message string in plain text or in Speech Synthesis Markup Language (SSML). Attributes +are documented under [message](#message). +* `response_card` - (Optional) The response card. Amazon Lex will substitute session attributes and +slot values into the response card. For more information, see +[Example: Using a Response Card](https://docs.aws.amazon.com/lex/latest/dg/ex-resp-card.html). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `checksum` - Checksum identifying the version of the bot that was created. The checksum is not +included as an argument because the resource will add it automatically when updating the bot. +* `created_date` - The date when the bot version was created. +* `failure_reason` - If status is FAILED, Amazon Lex provides the reason that it failed to build the bot. +* `last_updated_date` - The date when the $LATEST version of this bot was updated. +* `status` - When you send a request to create or update a bot, Amazon Lex sets the status response +element to BUILDING. After Amazon Lex builds the bot, it sets status to READY. If Amazon Lex can't +build the bot, it sets status to FAILED. Amazon Lex returns the reason for the failure in the +failure_reason response element. +* `version` - The version of the bot. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import bots using their name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import bots using their name. For example: + +```console +% terraform import aws_lex_bot.order_flowers_bot OrderFlowers +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lex_bot_alias.html.markdown b/website/docs/cdktf/python/r/lex_bot_alias.html.markdown new file mode 100644 index 00000000000..33974bc6c4b --- /dev/null +++ b/website/docs/cdktf/python/r/lex_bot_alias.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_bot_alias" +description: |- + Provides an Amazon Lex Bot Alias resource. +--- + + + +# Resource: aws_lex_bot_alias + +Provides an Amazon Lex Bot Alias resource. For more information see +[Amazon Lex: How It Works](https://docs.aws.amazon.com/lex/latest/dg/how-it-works.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lex_bot_alias import LexBotAlias +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LexBotAlias(self, "order_flowers_prod", + bot_name="OrderFlowers", + bot_version="1", + description="Production Version of the OrderFlowers Bot.", + name="OrderFlowersProd" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bot_name` - (Required) The name of the bot. +* `bot_version` - (Required) The name of the bot. +* `conversation_logs` - (Optional) The settings that determine how Amazon Lex uses conversation logs for the alias. Attributes are documented under [conversation_logs](#conversation_logs). +* `description` - (Optional) A description of the alias. Must be less than or equal to 200 characters in length. +* `name` - (Required) The name of the alias. The name is not case sensitive. Must be less than or equal to 100 characters in length. + +### conversation_logs + +Contains information about conversation log settings. + +* `iam_role_arn` - (Required) The Amazon Resource Name (ARN) of the IAM role used to write your logs to CloudWatch Logs or an S3 bucket. Must be between 20 and 2048 characters in length. +* `log_settings` - (Optional) The settings for your conversation logs. You can log text, audio, or both. Attributes are documented under [log_settings](#log_settings). + +### log_settings + +The settings for conversation logs. + +* `destination` - (Required) The destination where logs are delivered. Options are `CLOUDWATCH_LOGS` or `S3`. +* `kms_key_arn` - (Optional) The Amazon Resource Name (ARN) of the key used to encrypt audio logs in an S3 bucket. This can only be specified when `destination` is set to `S3`. Must be between 20 and 2048 characters in length. +* `log_type` - (Required) The type of logging that is enabled. Options are `AUDIO` or `TEXT`. +* `resource_arn` - (Required) The Amazon Resource Name (ARN) of the CloudWatch Logs log group or S3 bucket where the logs are delivered. Must be less than or equal to 2048 characters in length. +* `resource_prefix` - (Computed) The prefix of the S3 object key for `AUDIO` logs or the log stream name for `TEXT` logs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the bot alias. +* `checksum` - Checksum of the bot alias. +* `created_date` - The date that the bot alias was created. +* `last_updated_date` - The date that the bot alias was updated. When you create a resource, the creation date and the last updated date are the same. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1m`) +* `update` - (Default `1m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import bot aliases using an ID with the format `bot_name:bot_alias_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import bot aliases using an ID with the format `bot_name:bot_alias_name`. For example: + +```console +% terraform import aws_lex_bot_alias.order_flowers_prod OrderFlowers:OrderFlowersProd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lex_intent.html.markdown b/website/docs/cdktf/python/r/lex_intent.html.markdown new file mode 100644 index 00000000000..95b05d71b47 --- /dev/null +++ b/website/docs/cdktf/python/r/lex_intent.html.markdown @@ -0,0 +1,270 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_intent" +description: |- + Provides an Amazon Lex intent resource. +--- + + + +# Resource: aws_lex_intent + +Provides an Amazon Lex Intent resource. For more information see +[Amazon Lex: How It Works](https://docs.aws.amazon.com/lex/latest/dg/how-it-works.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lex_intent import LexIntent +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LexIntent(self, "order_flowers_intent", + confirmation_prompt=LexIntentConfirmationPrompt( + max_attempts=2, + message=[LexIntentConfirmationPromptMessage( + content="Okay, your {FlowerType} will be ready for pickup by {PickupTime} on {PickupDate}. Does this sound okay?", + content_type="PlainText" + ) + ] + ), + create_version=False, + description="Intent to order a bouquet of flowers for pick up", + fulfillment_activity=LexIntentFulfillmentActivity( + type="ReturnIntent" + ), + name="OrderFlowers", + rejection_statement=LexIntentRejectionStatement( + message=[LexIntentRejectionStatementMessage( + content="Okay, I will not place your order.", + content_type="PlainText" + ) + ] + ), + sample_utterances=["I would like to order some flowers", "I would like to pick up flowers" + ], + slot=[LexIntentSlot( + description="The type of flowers to pick up", + name="FlowerType", + priority=1, + sample_utterances=["I would like to order {FlowerType}"], + slot_constraint="Required", + slot_type="FlowerTypes", + slot_type_version="$$LATEST", + value_elicitation_prompt=LexIntentSlotValueElicitationPrompt( + max_attempts=2, + message=[LexIntentSlotValueElicitationPromptMessage( + content="What type of flowers would you like to order?", + content_type="PlainText" + ) + ] + ) + ), LexIntentSlot( + description="The date to pick up the flowers", + name="PickupDate", + priority=2, + sample_utterances=["I would like to order {FlowerType}"], + slot_constraint="Required", + slot_type="AMAZON.DATE", + slot_type_version="$$LATEST", + value_elicitation_prompt=LexIntentSlotValueElicitationPrompt( + max_attempts=2, + message=[LexIntentSlotValueElicitationPromptMessage( + content="What day do you want the {FlowerType} to be picked up?", + content_type="PlainText" + ) + ] + ) + ), LexIntentSlot( + description="The time to pick up the flowers", + name="PickupTime", + priority=3, + sample_utterances=["I would like to order {FlowerType}"], + slot_constraint="Required", + slot_type="AMAZON.TIME", + slot_type_version="$$LATEST", + value_elicitation_prompt=LexIntentSlotValueElicitationPrompt( + max_attempts=2, + message=[LexIntentSlotValueElicitationPromptMessage( + content="Pick up the {FlowerType} at what time on {PickupDate}?", + content_type="PlainText" + ) + ] + ) + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `conclusion_statement` - (Optional) The statement that you want Amazon Lex to convey to the user +after the intent is successfully fulfilled by the Lambda function. This element is relevant only if +you provide a Lambda function in the `fulfillment_activity`. If you return the intent to the client +application, you can't specify this element. The `follow_up_prompt` and `conclusion_statement` are +mutually exclusive. You can specify only one. Attributes are documented under [statement](#statement). +* `confirmation_prompt` - (Optional) Prompts the user to confirm the intent. This question should +have a yes or no answer. You you must provide both the `rejection_statement` and `confirmation_prompt`, +or neither. Attributes are documented under [prompt](#prompt). +* `create_version` - (Optional) Determines if a new slot type version is created when the initial +resource is created and on each update. Defaults to `false`. +* `description` - (Optional) A description of the intent. Must be less than or equal to 200 characters in length. +* `dialog_code_hook` - (Optional) Specifies a Lambda function to invoke for each user input. You can +invoke this Lambda function to personalize user interaction. Attributes are documented under [code_hook](#code_hook). +* `follow_up_prompt` - (Optional) Amazon Lex uses this prompt to solicit additional activity after +fulfilling an intent. For example, after the OrderPizza intent is fulfilled, you might prompt the +user to order a drink. The `follow_up_prompt` field and the `conclusion_statement` field are mutually +exclusive. You can specify only one. Attributes are documented under [follow_up_prompt](#follow_up_prompt). +* `fulfillment_activity` - (Required) Describes how the intent is fulfilled. For example, after a +user provides all of the information for a pizza order, `fulfillment_activity` defines how the bot +places an order with a local pizza store. Attributes are documented under [fulfillment_activity](#fulfillment_activity). +* `name` - (Required) The name of the intent, not case sensitive. Must be less than or equal to 100 characters in length. +* `parent_intent_signature` - (Optional) A unique identifier for the built-in intent to base this +intent on. To find the signature for an intent, see +[Standard Built-in Intents](https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/standard-intents) +in the Alexa Skills Kit. +* `rejection_statement` - (Optional) When the user answers "no" to the question defined in +`confirmation_prompt`, Amazon Lex responds with this statement to acknowledge that the intent was +canceled. You must provide both the `rejection_statement` and the `confirmation_prompt`, or neither. +Attributes are documented under [statement](#statement). +* `sample_utterances` - (Optional) An array of utterances (strings) that a user might say to signal +the intent. For example, "I want {PizzaSize} pizza", "Order {Quantity} {PizzaSize} pizzas". +In each utterance, a slot name is enclosed in curly braces. Must have between 1 and 10 items in the list, and each item must be less than or equal to 200 characters in length. +* `slot` - (Optional) An list of intent slots. At runtime, Amazon Lex elicits required slot values +from the user using prompts defined in the slots. Attributes are documented under [slot](#slot). + +### code_hook + +Specifies a Lambda function that verifies requests to a bot or fulfills the user's request to a bot. + +* `message_version` - (Required) The version of the request-response that you want Amazon Lex to use +to invoke your Lambda function. For more information, see +[Using Lambda Functions](https://docs.aws.amazon.com/lex/latest/dg/using-lambda.html). Must be less than or equal to 5 characters in length. +* `uri` - (Required) The Amazon Resource Name (ARN) of the Lambda function. + +### follow_up_prompt + +A prompt for additional activity after an intent is fulfilled. For example, after the OrderPizza +intent is fulfilled, you might prompt the user to find out whether the user wants to order drinks. + +* `prompt` - (Required) Prompts for information from the user. Attributes are documented under [prompt](#prompt). +* `rejection_statement` - (Optional) If the user answers "no" to the question defined in the prompt field, +Amazon Lex responds with this statement to acknowledge that the intent was canceled. Attributes are +documented below under [statement](#statement). + +### fulfillment_activity + +Describes how the intent is fulfilled after the user provides all of the information required for the intent. + +* `type` - (Required) How the intent should be fulfilled, either by running a Lambda function or by +returning the slot data to the client application. Type can be either `ReturnIntent` or `CodeHook`, as documented [here](https://docs.aws.amazon.com/lex/latest/dg/API_FulfillmentActivity.html). +* `code_hook` - (Optional) A description of the Lambda function that is run to fulfill the intent. +Required if type is CodeHook. Attributes are documented under [code_hook](#code_hook). + +### message + +The message object that provides the message text and its type. + +* `content` - (Required) The text of the message. Must be less than or equal to 1000 characters in length. +* `content_type` - (Required) The content type of the message string. +* `group_number` - (Optional) Identifies the message group that the message belongs to. When a group +is assigned to a message, Amazon Lex returns one message from each group in the response. Must be a number between 1 and 5 (inclusive). + +### prompt + +Obtains information from the user. To define a prompt, provide one or more messages and specify the +number of attempts to get information from the user. If you provide more than one message, Amazon +Lex chooses one of the messages to use to prompt the user. + +* `max_attempts` - (Required) The number of times to prompt the user for information. Must be a number between 1 and 5 (inclusive). +* `message` - (Required) A set of messages, each of which provides a message string and its type. +You can specify the message string in plain text or in Speech Synthesis Markup Language (SSML). +Attributes are documented under [message](#message). Must contain between 1 and 15 messages. +* `response_card` - (Optional) The response card. Amazon Lex will substitute session attributes and +slot values into the response card. For more information, see +[Example: Using a Response Card](https://docs.aws.amazon.com/lex/latest/dg/ex-resp-card.html). Must be less than or equal to 50000 characters in length. + +### slot + +Identifies the version of a specific slot. + +* `name` - (Required) The name of the intent slot that you want to create. The name is case sensitive. Must be less than or equal to 100 characters in length. +* `slot_constraint` - (Required) Specifies whether the slot is required or optional. +* `description` - (Optional) A description of the bot. Must be less than or equal to 200 characters in length. +* `priority` - (Optional) Directs Lex the order in which to elicit this slot value from the user. +For example, if the intent has two slots with priorities 1 and 2, AWS Lex first elicits a value for +the slot with priority 1. If multiple slots share the same priority, the order in which Lex elicits +values is arbitrary. Must be between 1 and 100. +* `response_card` - (Optional) The response card. Amazon Lex will substitute session attributes and +slot values into the response card. For more information, see +[Example: Using a Response Card](https://docs.aws.amazon.com/lex/latest/dg/ex-resp-card.html). Must be less than or equal to 50000 characters in length. +* `sample_utterances` - (Optional) If you know a specific pattern with which users might respond to +an Amazon Lex request for a slot value, you can provide those utterances to improve accuracy. This +is optional. In most cases, Amazon Lex is capable of understanding user utterances. Must have between 1 and 10 items in the list, and each item must be less than or equal to 200 characters in length. +* `slot_type` - (Optional) The type of the slot, either a custom slot type that you defined or one of +the built-in slot types. Must be less than or equal to 100 characters in length. +* `slot_type_version` - (Optional) The version of the slot type. Must be less than or equal to 64 characters in length. +* `value_elicitation_prompt` - (Optional) The prompt that Amazon Lex uses to elicit the slot value +from the user. Attributes are documented under [prompt](#prompt). + +### statement + +A statement is a map with a set of message maps and an optional response card string. Messages +convey information to the user. At runtime, Amazon Lex selects the message to convey. + +* `message` - (Required) A set of messages, each of which provides a message string and its type. +You can specify the message string in plain text or in Speech Synthesis Markup Language (SSML). +Attributes are documented under [message](#message). Must contain between 1 and 15 messages. +* `response_card` - (Optional) The response card. Amazon Lex will substitute session attributes and +slot values into the response card. For more information, see +[Example: Using a Response Card](https://docs.aws.amazon.com/lex/latest/dg/ex-resp-card.html). Must be less than or equal to 50000 characters in length. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the Lex intent. +* `checksum` - Checksum identifying the version of the intent that was created. The checksum is not +included as an argument because the resource will add it automatically when updating the intent. +* `created_date` - The date when the intent version was created. +* `last_updated_date` - The date when the $LATEST version of this intent was updated. +* `version` - The version of the bot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1m`) +* `update` - (Default `1m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import intents using their name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import intents using their name. For example: + +```console +% terraform import aws_lex_intent.order_flowers_intent OrderFlowers +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lex_slot_type.html.markdown b/website/docs/cdktf/python/r/lex_slot_type.html.markdown new file mode 100644 index 00000000000..869c7f3933a --- /dev/null +++ b/website/docs/cdktf/python/r/lex_slot_type.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_slot_type" +description: |- + Provides details about a specific Amazon Lex Slot Type +--- + + + +# Resource: aws_lex_slot_type + +Provides an Amazon Lex Slot Type resource. For more information see +[Amazon Lex: How It Works](https://docs.aws.amazon.com/lex/latest/dg/how-it-works.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lex_slot_type import LexSlotType +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LexSlotType(self, "flower_types", + create_version=True, + description="Types of flowers to order", + enumeration_value=[LexSlotTypeEnumerationValue( + synonyms=["Lirium", "Martagon"], + value="lilies" + ), LexSlotTypeEnumerationValue( + synonyms=["Eduardoregelia", "Podonix"], + value="tulips" + ) + ], + name="FlowerTypes", + value_selection_strategy="ORIGINAL_VALUE" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `enumeration_value` - (Required) A list of EnumerationValue objects that defines the values that +the slot type can take. Each value can have a list of synonyms, which are additional values that help +train the machine learning model about the values that it resolves for a slot. Attributes are +documented under [enumeration_value](#enumeration_value). +* `name` - (Required) The name of the slot type. The name is not case sensitive. Must be less than or equal to 100 characters in length. +* `create_version` - (Optional) +Determines if a new slot type version is created when the initial resource is created and on each +update. Defaults to `false`. +* `description` - (Optional) A description of the slot type. Must be less than or equal to 200 characters in length. +* `value_selection_strategy` - (Optional) Determines the slot resolution strategy that Amazon Lex +uses to return slot type values. `ORIGINAL_VALUE` returns the value entered by the user if the user +value is similar to the slot value. `TOP_RESOLUTION` returns the first value in the resolution list +if there is a resolution list for the slot, otherwise null is returned. Defaults to `ORIGINAL_VALUE`. + +### enumeration_value + +Each slot type can have a set of values. Each enumeration value represents a value the slot type +can take. + +For example, a pizza ordering bot could have a slot type that specifies the type of crust that the +pizza should have. The slot type could include the values: thick, thin, stuffed. + +* `synonyms` - (Optional) Additional values related to the slot type value. Each item must be less than or equal to 140 characters in length. +* `value` - (Required) The value of the slot type. Must be less than or equal to 140 characters in length. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1m`) +* `update` - (Default `1m`) +* `delete` - (Default `5m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `checksum` - Checksum identifying the version of the slot type that was created. The checksum is +not included as an argument because the resource will add it automatically when updating the slot type. +* `created_date` - The date when the slot type version was created. +* `last_updated_date` - The date when the `$LATEST` version of this slot type was updated. +* `version` - The version of the slot type. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import slot types using their name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import slot types using their name. For example: + +```console +% terraform import aws_lex_slot_type.flower_types FlowerTypes +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/licensemanager_association.markdown b/website/docs/cdktf/python/r/licensemanager_association.markdown new file mode 100644 index 00000000000..893c63aaafa --- /dev/null +++ b/website/docs/cdktf/python/r/licensemanager_association.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_association" +description: |- + Provides a License Manager association resource. +--- + + + +# Resource: aws_licensemanager_association + +Provides a License Manager association. + +~> **Note:** License configurations can also be associated with launch templates by specifying the `license_specifications` block for an `aws_launch_template`. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ami import DataAwsAmi +from imports.aws.instance import Instance +from imports.aws.licensemanager_association import LicensemanagerAssociation +from imports.aws.licensemanager_license_configuration import LicensemanagerLicenseConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = LicensemanagerLicenseConfiguration(self, "example", + license_counting_type="Instance", + name="Example" + ) + data_aws_ami_example = DataAwsAmi(self, "example_1", + filter=[DataAwsAmiFilter( + name="name", + values=["amzn-ami-vpc-nat*"] + ) + ], + most_recent=True, + owners=["amazon"] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_ami_example.override_logical_id("example") + aws_instance_example = Instance(self, "example_2", + ami=Token.as_string(data_aws_ami_example.id), + instance_type="t2.micro" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_instance_example.override_logical_id("example") + aws_licensemanager_association_example = LicensemanagerAssociation(self, "example_3", + license_configuration_arn=example.arn, + resource_arn=Token.as_string(aws_instance_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_licensemanager_association_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `license_configuration_arn` - (Required) ARN of the license configuration. +* `resource_arn` - (Required) ARN of the resource associated with the license configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The license configuration ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import license configurations using `resource_arn,license_configuration_arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import license configurations using `resource_arn,license_configuration_arn`. For example: + +```console +% terraform import aws_licensemanager_association.example arn:aws:ec2:eu-west-1:123456789012:image/ami-123456789abcdef01,arn:aws:license-manager:eu-west-1:123456789012:license-configuration:lic-0123456789abcdef0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/licensemanager_grant.html.markdown b/website/docs/cdktf/python/r/licensemanager_grant.html.markdown new file mode 100644 index 00000000000..e40eb2e7375 --- /dev/null +++ b/website/docs/cdktf/python/r/licensemanager_grant.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_grant" +description: |- + Provides a License Manager grant resource. +--- + + + +# Resource: aws_licensemanager_grant + +Provides a License Manager grant. This allows for sharing licenses with other AWS accounts. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.licensemanager_grant import LicensemanagerGrant +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LicensemanagerGrant(self, "test", + allowed_operations=["ListPurchasedLicenses", "CheckoutLicense", "CheckInLicense", "ExtendConsumptionLicense", "CreateToken" + ], + home_region="us-east-1", + license_arn="arn:aws:license-manager::111111111111:license:l-exampleARN", + name="share-license-with-account", + principal="arn:aws:iam::111111111112:root" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The Name of the grant. +* `allowed_operations` - (Required) A list of the allowed operations for the grant. This is a subset of the allowed operations on the license. +* `license_arn` - (Required) The ARN of the license to grant. +* `principal` - (Required) The target account for the grant in the form of the ARN for an account principal of the root user. +* `home_region` - (Required) The home region for the license. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The grant ARN (Same as `arn`). +* `arn` - The grant ARN. +* `parent_arn` - The parent ARN. +* `status` - The grant status. +* `version` - The grant version. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_licensemanager_grant` using the grant arn. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_licensemanager_grant` using the grant arn. For example: + +```console +% terraform import aws_licensemanager_grant.test arn:aws:license-manager::123456789011:grant:g-01d313393d9e443d8664cc054db1e089 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/licensemanager_grant_accepter.html.markdown b/website/docs/cdktf/python/r/licensemanager_grant_accepter.html.markdown new file mode 100644 index 00000000000..22fd54aada0 --- /dev/null +++ b/website/docs/cdktf/python/r/licensemanager_grant_accepter.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_grant_accepter" +description: |- + Accepts a License Manager grant resource. +--- + + + +# Resource: aws_licensemanager_grant_accepter + +Accepts a License Manager grant. This allows for sharing licenses with other aws accounts. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.licensemanager_grant_accepter import LicensemanagerGrantAccepter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LicensemanagerGrantAccepter(self, "test", + grant_arn="arn:aws:license-manager::123456789012:grant:g-1cf9fba4ba2f42dcab11c686c4b4d329" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `grant_arn` - (Required) The ARN of the grant to accept. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The grant ARN (Same as `arn`). +* `arn` - The grant ARN. +* `name` - The Name of the grant. +* `allowed_operations` - A list of the allowed operations for the grant. +* `license_arn` - The ARN of the license for the grant. +* `principal` - The target account for the grant. +* `home_region` - The home region for the license. +* `parent_arn` - The parent ARN. +* `status` - The grant status. +* `version` - The grant version. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_licensemanager_grant_accepter` using the grant arn. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_licensemanager_grant_accepter` using the grant arn. For example: + +```console +% terraform import aws_licensemanager_grant_accepter.test arn:aws:license-manager::123456789012:grant:g-1cf9fba4ba2f42dcab11c686c4b4d329 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/licensemanager_license_configuration.markdown b/website/docs/cdktf/python/r/licensemanager_license_configuration.markdown new file mode 100644 index 00000000000..028450700b6 --- /dev/null +++ b/website/docs/cdktf/python/r/licensemanager_license_configuration.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_license_configuration" +description: |- + Provides a License Manager license configuration resource. +--- + + + +# Resource: aws_licensemanager_license_configuration + +Provides a License Manager license configuration resource. + +~> **Note:** Removing the `license_count` attribute is not supported by the License Manager API - use `terraform taint aws_licensemanager_license_configuration.` to recreate the resource instead. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.licensemanager_license_configuration import LicensemanagerLicenseConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LicensemanagerLicenseConfiguration(self, "example", + description="Example", + license_count=10, + license_count_hard_limit=True, + license_counting_type="Socket", + license_rules=["#minimumSockets=2"], + name="Example", + tags={ + "foo": "barr" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the license configuration. +* `description` - (Optional) Description of the license configuration. +* `license_count` - (Optional) Number of licenses managed by the license configuration. +* `license_count_hard_limit` - (Optional) Sets the number of available licenses as a hard limit. +* `license_counting_type` - (Required) Dimension to use to track license inventory. Specify either `vCPU`, `Instance`, `Core` or `Socket`. +* `license_rules` - (Optional) Array of configured License Manager rules. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Rules + +License rules should be in the format of `#RuleType=RuleValue`. Supported rule types: + +* `minimumVcpus` - Resource must have minimum vCPU count in order to use the license. Default: 1 +* `maximumVcpus` - Resource must have maximum vCPU count in order to use the license. Default: unbounded, limit: 10000 +* `minimumCores` - Resource must have minimum core count in order to use the license. Default: 1 +* `maximumCores` - Resource must have maximum core count in order to use the license. Default: unbounded, limit: 10000 +* `minimumSockets` - Resource must have minimum socket count in order to use the license. Default: 1 +* `maximumSockets` - Resource must have maximum socket count in order to use the license. Default: unbounded, limit: 10000 +* `allowedTenancy` - Defines where the license can be used. If set, restricts license usage to selected tenancies. Specify a comma delimited list of `EC2-Default`, `EC2-DedicatedHost`, `EC2-DedicatedInstance` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The license configuration ARN. +* `id` - The license configuration ARN. +* `owner_account_id` - Account ID of the owner of the license configuration. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import license configurations using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import license configurations using the `id`. For example: + +```console +% terraform import aws_licensemanager_license_configuration.example arn:aws:license-manager:eu-west-1:123456789012:license-configuration:lic-0123456789abcdef0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_bucket.html.markdown b/website/docs/cdktf/python/r/lightsail_bucket.html.markdown new file mode 100644 index 00000000000..0cdd78ee2a0 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_bucket.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_bucket" +description: |- + Provides a lightsail bucket +--- + + + +# Resource: aws_lightsail_bucket + +Provides a lightsail bucket. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_bucket import LightsailBucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailBucket(self, "test", + bundle_id="small_1_0", + name="mytestbucket" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name for the bucket. +* `bundle_id` - (Required) - The ID of the bundle to use for the bucket. A bucket bundle specifies the monthly cost, storage space, and data transfer quota for a bucket. Use the [get-bucket-bundles](https://docs.aws.amazon.com/cli/latest/reference/lightsail/get-bucket-bundles.html) cli command to get a list of bundle IDs that you can specify. +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this bucket (matches `name`). +* `arn` - The ARN of the lightsail bucket. +* `availability_zone` - The resource Availability Zone. Follows the format us-east-2a (case-sensitive). +* `created_at` - The timestamp when the bucket was created. +* `region` - The Amazon Web Services Region name. +* `support_code` - The support code for the resource. Include this code in your email to support when you have questions about a resource in Lightsail. This code enables our support team to look up your Lightsail information more easily. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_bucket` using the `name` attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_bucket` using the `name` attribute. For example: + +```console +% terraform import aws_lightsail_bucket.test example-bucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_bucket_access_key.html.markdown b/website/docs/cdktf/python/r/lightsail_bucket_access_key.html.markdown new file mode 100644 index 00000000000..d594e35c8c0 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_bucket_access_key.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_bucket_access_key" +description: |- + Provides a lightsail bucket access key. This is a set of credentials that allow API requests to be made to the lightsail bucket. +--- + + + +# Resource: aws_lightsail_bucket_access_key + +Provides a lightsail bucket access key. This is a set of credentials that allow API requests to be made to the lightsail bucket. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws. import LightsailBucketAccessKeyAccessKey +from imports.aws.lightsail_bucket import LightsailBucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailBucket(self, "test", + bundle_id="small_1_0", + name="mytestbucket" + ) + aws_lightsail_bucket_access_key_access_key_test = + LightsailBucketAccessKeyAccessKey(self, "test_1", + bucket_name=aws_lightsail_bucket_access_key_test.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_bucket_access_key_access_key_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket_name` - (Required) The name of the bucket that the new access key will belong to, and grant access to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes separated by a `,` to create a unique id: `bucket_name`,`access_key_id` +* `access_key_id` - The ID of the access key. +* `created_at` - The timestamp when the access key was created. +* `secret_access_key` - The secret access key used to sign requests. This attribute is not available for imported resources. Note that this will be written to the state file. +* `status` - The status of the access key. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_bucket_access_key` using the `id` attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_bucket_access_key` using the `id` attribute. For example: + +```console +% terraform import aws_lightsail_bucket_access_key.test example-bucket,AKIA47VOQ2KPR7LLRZ6D +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_bucket_resource_access.html.markdown b/website/docs/cdktf/python/r/lightsail_bucket_resource_access.html.markdown new file mode 100644 index 00000000000..8be06697849 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_bucket_resource_access.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_bucket_resource_access" +description: |- + Provides a lightsail resource access to a bucket. +--- + + + +# Resource: aws_lightsail_bucket_resource_access + +Provides a lightsail resource access to a bucket. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_bucket import LightsailBucket +from imports.aws.lightsail_bucket_resource_access import LightsailBucketResourceAccess +from imports.aws.lightsail_instance import LightsailInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailBucket(self, "test", + bundle_id="small_1_0", + name="mytestbucket" + ) + aws_lightsail_bucket_resource_access_test = + LightsailBucketResourceAccess(self, "test_1", + bucket_name=Token.as_string(aws_lightsail_bucket_resource_access_test.id), + resource_name=id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_bucket_resource_access_test.override_logical_id("test") + aws_lightsail_instance_test = LightsailInstance(self, "test_2", + availability_zone="us-east-1b", + blueprint_id="amazon_linux_2", + bundle_id="nano_1_0", + name="mytestinstance" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_instance_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket_name` - (Required) The name of the bucket to grant access to. +* `resource_name` - (Required) The name of the resource to be granted bucket access. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes separated by a `,` to create a unique id: `bucket_name`,`resource_name` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_bucket_resource_access` using the `id` attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_bucket_resource_access` using the `id` attribute. For example: + +```console +% terraform import aws_lightsail_bucket_resource_access.test example-bucket,example-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_certificate.html.markdown b/website/docs/cdktf/python/r/lightsail_certificate.html.markdown new file mode 100644 index 00000000000..5072d4594b5 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_certificate.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_certificate" +description: |- + Provides a lightsail certificate +--- + + + +# Resource: aws_lightsail_certificate + +Provides a lightsail certificate. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_certificate import LightsailCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailCertificate(self, "test", + domain_name="testdomain.com", + name="test", + subject_alternative_names=["www.testdomain.com"] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Lightsail load balancer. +* `domain_name` - (Required) A domain name for which the certificate should be issued. +* `subject_alternative_names` - (Optional) Set of domains that should be SANs in the issued certificate. `domain_name` attribute is automatically added as a Subject Alternative Name. +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the lightsail certificate (matches `name`). +* `arn` - The ARN of the lightsail certificate. +* `created_at` - The timestamp when the instance was created. +* `domain_validation_options` - Set of domain validation objects which can be used to complete certificate validation. Can have more than one element, e.g., if SANs are defined. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider `default_tags` configuration block. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_certificate` using the certificate name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_certificate` using the certificate name. For example: + +```console +% terraform import aws_lightsail_certificate.test CertificateName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_container_service.html.markdown b/website/docs/cdktf/python/r/lightsail_container_service.html.markdown new file mode 100644 index 00000000000..6ffbe2a84cc --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_container_service.html.markdown @@ -0,0 +1,215 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_container_service" +description: |- + Provides a resource to manage Lightsail container service +--- + + + +# Resource: aws_lightsail_container_service + +An Amazon Lightsail container service is a highly scalable compute and networking resource on which you can deploy, run, +and manage containers. For more information, see +[Container services in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-container-services). + +~> **Note:** For more information about the AWS Regions in which you can create Amazon Lightsail container services, +see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail). + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_container_service import LightsailContainerService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailContainerService(self, "my_container_service", + is_disabled=False, + name="container-service-1", + power="nano", + scale=1, + tags={ + "foo1": "bar1", + "foo2": "" + } + ) +``` + +### Public Domain Names + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_container_service import LightsailContainerService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, power, scale): + super().__init__(scope, name) + LightsailContainerService(self, "my_container_service", + public_domain_names=LightsailContainerServicePublicDomainNames( + certificate=[LightsailContainerServicePublicDomainNamesCertificate( + certificate_name="example-certificate", + domain_names=["www.example.com"] + ) + ] + ), + name=name, + power=power, + scale=scale + ) +``` + +### Private Registry Access + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.ecr_repository_policy import EcrRepositoryPolicy +from imports.aws.lightsail_container_service import LightsailContainerService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name, power, scale): + super().__init__(scope, name) + default_var = LightsailContainerService(self, "default", + private_registry_access=LightsailContainerServicePrivateRegistryAccess( + ecr_image_puller_role=LightsailContainerServicePrivateRegistryAccessEcrImagePullerRole( + is_active=True + ) + ), + name=name, + power=power, + scale=scale + ) + data_aws_iam_policy_document_default = DataAwsIamPolicyDocument(self, "default_1", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["ecr:BatchGetImage", "ecr:GetDownloadUrlForLayer"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[ + Token.as_string( + property_access(default_var.private_registry_access, ["0", "ecr_image_puller_role", "0", "principal_arn" + ])) + ], + type="AWS" + ) + ] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_default.override_logical_id("default") + aws_ecr_repository_policy_default = EcrRepositoryPolicy(self, "default_2", + policy=Token.as_string(data_aws_iam_policy_document_default.json), + repository=Token.as_string(aws_ecr_repository_default.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ecr_repository_policy_default.override_logical_id("default") +``` + +## Argument Reference + +~> **NOTE:** You must create and validate an SSL/TLS certificate before you can use `public_domain_names` with your +container service. For more information, see +[Enabling and managing custom domains for your Amazon Lightsail container services](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-creating-container-services-certificates). + +This argument supports the following arguments: + +* `name` - (Required) The name for the container service. Names must be of length 1 to 63, and be + unique within each AWS Region in your Lightsail account. +* `power` - (Required) The power specification for the container service. The power specifies the amount of memory, + the number of vCPUs, and the monthly price of each node of the container service. + Possible values: `nano`, `micro`, `small`, `medium`, `large`, `xlarge`. +* `scale` - (Required) The scale specification for the container service. The scale specifies the allocated compute + nodes of the container service. +* `is_disabled` - (Optional) A Boolean value indicating whether the container service is disabled. Defaults to `false`. +* `public_domain_names` - (Optional) The public domain names to use with the container service, such as example.com + and www.example.com. You can specify up to four public domain names for a container service. The domain names that you + specify are used when you create a deployment with a container configured as the public endpoint of your container + service. If you don't specify public domain names, then you can use the default domain of the container service. + Defined below. +* `private_registry_access` - (Optional) An object to describe the configuration for the container service to access private container image repositories, such as Amazon Elastic Container Registry (Amazon ECR) private repositories. See [Private Registry Access](#private-registry-access) below for more details. +* `tags` - (Optional) Map of container service tags. To tag at launch, specify the tags in the Launch Template. If + configured with a provider + [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) + present, tags with matching keys will overwrite those defined at the provider-level. + +### Private Registry Access + +The `private_registry_access` block supports the following arguments: + +* `ecr_image_puller_role` - (Optional) Describes a request to configure an Amazon Lightsail container service to access private container image repositories, such as Amazon Elastic Container Registry (Amazon ECR) private repositories. See [ECR Image Puller Role](#ecr-image-puller-role) below for more details. + +### ECR Image Puller Role + +The `ecr_image_puller_role` blocks supports the following arguments: + +* `is_active` - (Optional) A Boolean value that indicates whether to activate the role. The default is `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the container service. +* `availability_zone` - The Availability Zone. Follows the format us-east-2a (case-sensitive). +* `id` - Same as `name`. +* `power_id` - The ID of the power of the container service. +* `principal_arn`- The principal ARN of the container service. The principal ARN can be used to create a trust + relationship between your standard AWS account and your Lightsail container service. This allows you to give your + service permission to access resources in your standard AWS account. +* `private_domain_name` - The private domain name of the container service. The private domain name is accessible only + by other resources within the default virtual private cloud (VPC) of your Lightsail account. +* `region_name` - The AWS Region name. +* `resource_type` - The Lightsail resource type of the container service (i.e., ContainerService). +* `state` - The current state of the container service. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider + [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `url` - The publicly accessible URL of the container service. If no public endpoint is specified in the + currentDeployment, this URL returns a 404 response. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lightsail Container Service using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Lightsail Container Service using the `name`. For example: + +```console +% terraform import aws_lightsail_container_service.my_container_service container-service-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_container_service_deployment_version.html.markdown b/website/docs/cdktf/python/r/lightsail_container_service_deployment_version.html.markdown new file mode 100644 index 00000000000..e0f18417d2d --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_container_service_deployment_version.html.markdown @@ -0,0 +1,135 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_container_service_deployment_version" +description: |- + Provides a resource to manage a deployment version for your Amazon Lightsail container service. +--- + + + +# Resource: aws_lightsail_container_service_deployment_version + +Provides a resource to manage a deployment version for your Amazon Lightsail container service. + +~> **NOTE:** The Amazon Lightsail container service must be enabled to create a deployment. + +~> **NOTE:** This resource allows you to manage an Amazon Lightsail container service deployment version but Terraform cannot destroy it. Removing this resource from your configuration will remove it from your statefile and Terraform management. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_container_service_deployment_version import LightsailContainerServiceDeploymentVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailContainerServiceDeploymentVersion(self, "example", + container=[LightsailContainerServiceDeploymentVersionContainer( + command=[], + container_name="hello-world", + environment={ + "MY_ENVIRONMENT_VARIABLE": "my_value" + }, + image="amazon/amazon-lightsail:hello-world", + ports={ + "80": "HTTP" + } + ) + ], + public_endpoint=LightsailContainerServiceDeploymentVersionPublicEndpoint( + container_name="hello-world", + container_port=80, + health_check=LightsailContainerServiceDeploymentVersionPublicEndpointHealthCheck( + healthy_threshold=2, + interval_seconds=5, + path="/", + success_codes="200-499", + timeout_seconds=2, + unhealthy_threshold=2 + ) + ), + service_name=Token.as_string(aws_lightsail_container_service_example.name) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `service_name` - (Required) The name for the container service. +* `container` - (Required) A set of configuration blocks that describe the settings of the containers that will be launched on the container service. Maximum of 53. [Detailed below](#container). +* `public_endpoint` - (Optional) A configuration block that describes the settings of the public endpoint for the container service. [Detailed below](#public_endpoint). + +### `container` + +The `container` configuration block supports the following arguments: + +* `container_name` - (Required) The name for the container. +* `image` - (Required) The name of the image used for the container. Container images sourced from your Lightsail container service, that are registered and stored on your service, start with a colon (`:`). For example, `:container-service-1.mystaticwebsite.1`. Container images sourced from a public registry like Docker Hub don't start with a colon. For example, `nginx:latest` or `nginx`. +* `command` - (Optional) The launch command for the container. A list of string. +* `environment` - (Optional) A key-value map of the environment variables of the container. +* `ports` - (Optional) A key-value map of the open firewall ports of the container. Valid values: `HTTP`, `HTTPS`, `TCP`, `UDP`. + +### `public_endpoint` + +The `public_endpoint` configuration block supports the following arguments: + +* `container_name` - (Required) The name of the container for the endpoint. +* `container_port` - (Required) The port of the container to which traffic is forwarded to. +* `health_check` - (Required) A configuration block that describes the health check configuration of the container. [Detailed below](#health_check). + +### `health_check` + +The `health_check` configuration block supports the following arguments: + +* `healthy_threshold` - (Optional) The number of consecutive health checks successes required before moving the container to the Healthy state. Defaults to 2. +* `unhealthy_threshold` - (Optional) The number of consecutive health checks failures required before moving the container to the Unhealthy state. Defaults to 2. +* `timeout_seconds` - (Optional) The amount of time, in seconds, during which no response means a failed health check. You can specify between 2 and 60 seconds. Defaults to 2. +* `interval_seconds` - (Optional) The approximate interval, in seconds, between health checks of an individual container. You can specify between 5 and 300 seconds. Defaults to 5. +* `path` - (Optional) The path on the container on which to perform the health check. Defaults to "/". +* `success_codes` - (Optional) The HTTP codes to use when checking for a successful response from a container. You can specify values between 200 and 499. Defaults to "200-499". + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `service_name` and `version` separation by a slash (`/`). +* `created_at` - The timestamp when the deployment was created. +* `state` - The current state of the container service. +* `version` - The version number of the deployment. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lightsail Container Service Deployment Version using the `service_name` and `version` separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Lightsail Container Service Deployment Version using the `service_name` and `version` separated by a slash (`/`). For example: + +```console +% terraform import aws_lightsail_container_service_deployment_version.example container-service-1/1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_database.html.markdown b/website/docs/cdktf/python/r/lightsail_database.html.markdown new file mode 100644 index 00000000000..d754e6d10c4 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_database.html.markdown @@ -0,0 +1,268 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_database" +description: |- + Provides a Lightsail Database +--- + + + +# Resource: aws_lightsail_database + +Provides a Lightsail Database. Amazon Lightsail is a service to provide easy virtual private servers +with custom software already setup. See [What is Amazon Lightsail?](https://lightsail.aws.amazon.com/ls/docs/getting-started/article/what-is-amazon-lightsail) +for more information. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones"](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services/) for more details + +## Example Usage + +### Basic mysql blueprint + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_database import LightsailDatabase +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, relationalDatabaseName): + super().__init__(scope, name) + LightsailDatabase(self, "test", + availability_zone="us-east-1a", + blueprint_id="mysql_8_0", + bundle_id="micro_1_0", + master_database_name="testdatabasename", + master_password="testdatabasepassword", + master_username="test", + name="test", + relational_database_name=relational_database_name + ) +``` + +### Basic postrgres blueprint + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_database import LightsailDatabase +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, relationalDatabaseName): + super().__init__(scope, name) + LightsailDatabase(self, "test", + availability_zone="us-east-1a", + blueprint_id="postgres_12", + bundle_id="micro_1_0", + master_database_name="testdatabasename", + master_password="testdatabasepassword", + master_username="test", + name="test", + relational_database_name=relational_database_name + ) +``` + +### Custom backup and maintenance windows + +Below is an example that sets a custom backup and maintenance window. Times are specified in UTC. This example will allow daily backups to take place between 16:00 and 16:30 each day. This example also requires any maintiance tasks (anything that would cause an outage, including changing some attributes) to take place on Tuesdays between 17:00 and 17:30. An action taken against this database that would cause an outage will wait until this time window to make the requested changes. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_database import LightsailDatabase +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, relationalDatabaseName): + super().__init__(scope, name) + LightsailDatabase(self, "test", + availability_zone="us-east-1a", + blueprint_id="postgres_12", + bundle_id="micro_1_0", + master_database_name="testdatabasename", + master_password="testdatabasepassword", + master_username="test", + name="test", + preferred_backup_window="16:00-16:30", + preferred_maintenance_window="Tue:17:00-Tue:17:30", + relational_database_name=relational_database_name + ) +``` + +### Final Snapshots + +To enable creating a final snapshot of your database on deletion, use the `final_snapshot_name` argument to provide a name to be used for the snapshot. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_database import LightsailDatabase +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, relationalDatabaseName): + super().__init__(scope, name) + LightsailDatabase(self, "test", + availability_zone="us-east-1a", + blueprint_id="postgres_12", + bundle_id="micro_1_0", + final_snapshot_name="MyFinalSnapshot", + master_database_name="testdatabasename", + master_password="testdatabasepassword", + master_username="test", + name="test", + preferred_backup_window="16:00-16:30", + preferred_maintenance_window="Tue:17:00-Tue:17:30", + relational_database_name=relational_database_name + ) +``` + +### Apply Immediately + +To enable applying changes immediately instead of waiting for a maintiance window, use the `apply_immediately` argument. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_database import LightsailDatabase +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, relationalDatabaseName): + super().__init__(scope, name) + LightsailDatabase(self, "test", + apply_immediately=True, + availability_zone="us-east-1a", + blueprint_id="postgres_12", + bundle_id="micro_1_0", + master_database_name="testdatabasename", + master_password="testdatabasepassword", + master_username="test", + name="test", + relational_database_name=relational_database_name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name to use for your new Lightsail database resource. Names be unique within each AWS Region in your Lightsail account. +* `availability_zone` - The Availability Zone in which to create your new database. Use the us-east-2a case-sensitive format. +* `master_database_name` - (Required) The name of the master database created when the Lightsail database resource is created. +* `master_password` - (Sensitive) The password for the master user of your new database. The password can include any printable ASCII character except "/", """, or "@". +* `master_username` - The master user name for your new database. +* `blueprint_id` - (Required) The blueprint ID for your new database. A blueprint describes the major engine version of a database. You can get a list of database blueprints IDs by using the AWS CLI command: `aws lightsail get-relational-database-blueprints` +* `bundle_id` - (Required) The bundle ID for your new database. A bundle describes the performance specifications for your database (see list below). You can get a list of database bundle IDs by using the AWS CLI command: `aws lightsail get-relational-database-bundles`. +* `preferred_backup_window` - The daily time range during which automated backups are created for your new database if automated backups are enabled. Must be in the hh24:mi-hh24:mi format. Example: `16:00-16:30`. Specified in Coordinated Universal Time (UTC). +* `preferred_maintenance_window` - The weekly time range during which system maintenance can occur on your new database. Must be in the ddd:hh24:mi-ddd:hh24:mi format. Specified in Coordinated Universal Time (UTC). Example: `Tue:17:00-Tue:17:30` +* `publicly_accessible` - Specifies the accessibility options for your new database. A value of true specifies a database that is available to resources outside of your Lightsail account. A value of false specifies a database that is available only to your Lightsail resources in the same region as your database. +* `apply_immediately` - When true , applies changes immediately. When false , applies changes during the preferred maintenance window. Some changes may cause an outage. +* `backup_retention_enabled` - When true, enables automated backup retention for your database. When false, disables automated backup retention for your database. Disabling backup retention deletes all automated database backups. Before disabling this, you may want to create a snapshot of your database. +* `skip_final_snapshot` - Determines whether a final database snapshot is created before your database is deleted. If true is specified, no database snapshot is created. If false is specified, a database snapshot is created before your database is deleted. You must specify the final relational database snapshot name parameter if the skip final snapshot parameter is false. +* `final_snapshot_name` - (Required unless `skip_final_snapshot = true`) The name of the database snapshot created if skip final snapshot is false, which is the default value for that parameter. +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. + +## Blueprint Ids + +A list of all available Lightsail Blueprints for Relational Databases the [aws lightsail get-relational-database-blueprints](https://docs.aws.amazon.com/cli/latest/reference/lightsail/get-relational-database-blueprints.html) aws cli command. + +### Examples + +- `mysql_8_0` +- `postgres_12` + +### Prefix + +A Blueprint ID starts with a prefix of the engine type. + +### Suffix + +A Blueprint ID has a sufix of the engine version. + +## Bundles + +A list of all available Lightsail Bundles for Relational Databases the [aws lightsail get-relational-database-bundles](https://docs.aws.amazon.com/cli/latest/reference/lightsail/get-relational-database-bundles.html) aws cli command. + +### Examples + +- `small_1_0` +- `small_ha_1_0` +- `large_1_0` +- `large_ha_1_0` + +### Prefix + +A Bundle ID starts with one of the below size prefixes: + +- `micro_` +- `small_` +- `medium_` +- `large_` + +### Infixes (Optional for HA Database) + +A Bundle Id can have the following infix added in order to use the HA option of the selected bundle. + +- `ha_` + +### Suffix + +A Bundle ID ends with one of the following suffix: `1_0` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the Lightsail instance (matches `arn`). +* `arn` - The ARN of the Lightsail instance (matches `id`). +* `ca_certificate_identifier` - The certificate associated with the database. +* `created_at` - The timestamp when the instance was created. +* `engine` - The database software (for example, MySQL). +* `engine_version` - The database engine version (for example, 5.7.23). +* `cpu_count` - The number of vCPUs for the database. +* `ram_size` - The amount of RAM in GB for the database. +* `disk_size` - The size of the disk for the database. +* `master_endpoint_port` - The master endpoint network port for the database. +* `master_endpoint_address` - The master endpoint fqdn for the database. +* `secondary_availability_zone` - Describes the secondary Availability Zone of a high availability database. The secondary database is used for failover support of a high availability database. +* `support_code` - The support code for the database. Include this code in your email to support when you have questions about a database in Lightsail. This code enables our support team to look up your Lightsail information more easily. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lightsail Databases using their name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Lightsail Databases using their name. For example: + +```console +% terraform import aws_lightsail_database.foo 'bar' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_disk.html.markdown b/website/docs/cdktf/python/r/lightsail_disk.html.markdown new file mode 100644 index 00000000000..a2ed5c1c516 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_disk.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_disk" +description: |- + Provides a Lightsail Disk resource +--- + + + +# Resource: aws_lightsail_disk + +Provides a Lightsail Disk resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.lightsail_disk import LightsailDisk +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + available = DataAwsAvailabilityZones(self, "available", + filter=[DataAwsAvailabilityZonesFilter( + name="opt-in-status", + values=["opt-in-not-required"] + ) + ], + state="available" + ) + LightsailDisk(self, "test", + availability_zone=Token.as_string(property_access(available.names, ["0"])), + name="test", + size_in_gb=8 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Lightsail load balancer. +* `size_in_gb` - (Required) The instance port the load balancer will connect. +* `availability_zone` - (Required) The Availability Zone in which to create your disk. +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the disk (matches `name`). +* `arn` - The ARN of the Lightsail load balancer. +* `created_at` - The timestamp when the load balancer was created. +* `support_code` - The support code for the disk. Include this code in your email to support when you have questions about a disk in Lightsail. This code enables our support team to look up your Lightsail information more easily. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_disk` using the name attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_disk` using the name attribute. For example: + +```console +% terraform import aws_lightsail_disk.test test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_disk_attachment.html.markdown b/website/docs/cdktf/python/r/lightsail_disk_attachment.html.markdown new file mode 100644 index 00000000000..d34c683c025 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_disk_attachment.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_disk_attachment" +description: |- + Attaches a Lightsail disk to a Lightsail Instance +--- + + + +# Resource: aws_lightsail_disk_attachment + +Attaches a Lightsail disk to a Lightsail Instance + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.lightsail_disk import LightsailDisk +from imports.aws.lightsail_disk_attachment import LightsailDiskAttachment +from imports.aws.lightsail_instance import LightsailInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + available = DataAwsAvailabilityZones(self, "available", + filter=[DataAwsAvailabilityZonesFilter( + name="opt-in-status", + values=["opt-in-not-required"] + ) + ], + state="available" + ) + test = LightsailDisk(self, "test", + availability_zone=Token.as_string(property_access(available.names, ["0"])), + name="test-disk", + size_in_gb=8 + ) + aws_lightsail_instance_test = LightsailInstance(self, "test_2", + availability_zone=Token.as_string(property_access(available.names, ["0"])), + blueprint_id="amazon_linux_2", + bundle_id="nano_1_0", + name="test-instance" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_instance_test.override_logical_id("test") + aws_lightsail_disk_attachment_test = LightsailDiskAttachment(self, "test_3", + disk_name=test.name, + disk_path="/dev/xvdf", + instance_name=Token.as_string(aws_lightsail_instance_test.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_disk_attachment_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `disk_name` - (Required) The name of the Lightsail Disk. +* `instance_name` - (Required) The name of the Lightsail Instance to attach to. +* `disk_path` - (Required) The disk path to expose to the instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes to create a unique id: `disk_name`,`instance_name` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_disk` using the id attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_disk` using the id attribute. For example: + +```console +% terraform import aws_lightsail_disk_attachment.test test-disk,test-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_distribution.html.markdown b/website/docs/cdktf/python/r/lightsail_distribution.html.markdown new file mode 100644 index 00000000000..0b04df04a13 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_distribution.html.markdown @@ -0,0 +1,306 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_distribution" +description: |- + Terraform resource for managing an AWS Lightsail Distribution. +--- + + + +# Resource: aws_lightsail_distribution + +Terraform resource for managing an AWS Lightsail Distribution. + +## Example Usage + +### Basic Usage + +Below is a basic example with a bucket as an origin. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_bucket import LightsailBucket +from imports.aws.lightsail_distribution import LightsailDistribution +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = LightsailBucket(self, "test", + bundle_id="small_1_0", + name="test-bucket" + ) + aws_lightsail_distribution_test = LightsailDistribution(self, "test_1", + bundle_id="small_1_0", + cache_behavior_settings=LightsailDistributionCacheBehaviorSettings( + allowed_http_methods="GET,HEAD,OPTIONS,PUT,PATCH,POST,DELETE", + cached_http_methods="GET,HEAD", + default_ttl=86400, + forwarded_cookies=LightsailDistributionCacheBehaviorSettingsForwardedCookies( + option="none" + ), + forwarded_headers=LightsailDistributionCacheBehaviorSettingsForwardedHeaders( + option="default" + ), + forwarded_query_strings=LightsailDistributionCacheBehaviorSettingsForwardedQueryStrings( + option=False + ), + maximum_ttl=31536000, + minimum_ttl=0 + ), + default_cache_behavior=LightsailDistributionDefaultCacheBehavior( + behavior="cache" + ), + name="test-distribution", + origin=LightsailDistributionOrigin( + name=test.name, + region_name=test.region + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_distribution_test.override_logical_id("test") +``` + +### instance origin example + +Below is an example of an instance as the origin. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.lightsail_distribution import LightsailDistribution +from imports.aws.lightsail_instance import LightsailInstance +from imports.aws.lightsail_static_ip import LightsailStaticIp +from imports.aws.lightsail_static_ip_attachment import LightsailStaticIpAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = LightsailStaticIp(self, "test", + name="test-static-ip" + ) + available = DataAwsAvailabilityZones(self, "available", + filter=[DataAwsAvailabilityZonesFilter( + name="opt-in-status", + values=["opt-in-not-required"] + ) + ], + state="available" + ) + aws_lightsail_instance_test = LightsailInstance(self, "test_2", + availability_zone=Token.as_string(property_access(available.names, ["0"])), + blueprint_id="amazon_linux_2", + bundle_id="micro_1_0", + name="test-instance" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_instance_test.override_logical_id("test") + aws_lightsail_static_ip_attachment_test = LightsailStaticIpAttachment(self, "test_3", + instance_name=Token.as_string(aws_lightsail_instance_test.name), + static_ip_name=test.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_static_ip_attachment_test.override_logical_id("test") + aws_lightsail_distribution_test = LightsailDistribution(self, "test_4", + bundle_id="small_1_0", + default_cache_behavior=LightsailDistributionDefaultCacheBehavior( + behavior="cache" + ), + depends_on=[aws_lightsail_static_ip_attachment_test], + name="test-distribution", + origin=LightsailDistributionOrigin( + name=Token.as_string(aws_lightsail_instance_test.name), + region_name=Token.as_string(available.id) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_distribution_test.override_logical_id("test") +``` + +### lb origin example + +Below is an example with a load balancer as an origin + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.lightsail_distribution import LightsailDistribution +from imports.aws.lightsail_instance import LightsailInstance +from imports.aws.lightsail_lb import LightsailLb +from imports.aws.lightsail_lb_attachment import LightsailLbAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = LightsailLb(self, "test", + health_check_path="/", + instance_port=Token.as_number("80"), + name="test-load-balancer", + tags={ + "foo": "bar" + } + ) + available = DataAwsAvailabilityZones(self, "available", + filter=[DataAwsAvailabilityZonesFilter( + name="opt-in-status", + values=["opt-in-not-required"] + ) + ], + state="available" + ) + aws_lightsail_instance_test = LightsailInstance(self, "test_2", + availability_zone=Token.as_string(property_access(available.names, ["0"])), + blueprint_id="amazon_linux_2", + bundle_id="nano_1_0", + name="test-instance" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_instance_test.override_logical_id("test") + aws_lightsail_lb_attachment_test = LightsailLbAttachment(self, "test_3", + instance_name=Token.as_string(aws_lightsail_instance_test.name), + lb_name=test.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_lb_attachment_test.override_logical_id("test") + aws_lightsail_distribution_test = LightsailDistribution(self, "test_4", + bundle_id="small_1_0", + default_cache_behavior=LightsailDistributionDefaultCacheBehavior( + behavior="cache" + ), + depends_on=[aws_lightsail_lb_attachment_test], + name="test-distribution", + origin=LightsailDistributionOrigin( + name=test.name, + region_name=Token.as_string(available.id) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_distribution_test.override_logical_id("test") +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the distribution. +* `bundle_id` - (Required) Bundle ID to use for the distribution. +* `default_cache_behavior` - (Required) Object that describes the default cache behavior of the distribution. [Detailed below](#default_cache_behavior) +* `origin` - (Required) Object that describes the origin resource of the distribution, such as a Lightsail instance, bucket, or load balancer. [Detailed below](#origin) +* `cache_behavior_settings` - (Required) An object that describes the cache behavior settings of the distribution. [Detailed below](#cache_behavior_settings) + +The following arguments are optional: + +* `cache_behavior` - (Optional) A set of configuration blocks that describe the per-path cache behavior of the distribution. [Detailed below](#cache_behavior) +* `certificate_name` - (Optional) The name of the SSL/TLS certificate attached to the distribution, if any. +* `ip_address_type` - (Optional) The IP address type of the distribution. Default: `dualstack`. +* `is_enabled` - (Optional) Indicates whether the distribution is enabled. Default: `true`. +* `tags` - (Optional) Map of tags for the Lightsail Distribution. If + configured with a provider + [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) + present, tags with matching keys will overwrite those defined at the provider-level. + +### default_cache_behavior + +* `behavior` - (Required) The cache behavior of the distribution. Valid values: `cache` and `dont-cache`. + +### origin + +* `name` - (Required) The name of the origin resource. Your origin can be an instance with an attached static IP, a bucket, or a load balancer that has at least one instance attached to it. +* `protocol_policy` - (Optional) The protocol that your Amazon Lightsail distribution uses when establishing a connection with your origin to pull content. +* `region_name` - (Required) The AWS Region name of the origin resource. +* `resource_type` - (Computed) The resource type of the origin resource (e.g., Instance). + +### cache_behavior_settings + +* `allowed_http_methods` - (Optional) The HTTP methods that are processed and forwarded to the distribution's origin. +* `cached_http_methods` - (Optional) The HTTP method responses that are cached by your distribution. +* `default_ttl` - (Optional) The default amount of time that objects stay in the distribution's cache before the distribution forwards another request to the origin to determine whether the content has been updated. +* `forwarded_cookies` - (Required) An object that describes the cookies that are forwarded to the origin. Your content is cached based on the cookies that are forwarded. [Detailed below](#forwarded_cookies) +* `forwarded_headers` - (Required) An object that describes the headers that are forwarded to the origin. Your content is cached based on the headers that are forwarded. [Detailed below](#forwarded_headers) +* `forwarded_query_strings` - (Required) An object that describes the query strings that are forwarded to the origin. Your content is cached based on the query strings that are forwarded. [Detailed below](#forwarded_query_strings) +* `maximum_ttl` - (Optional) The maximum amount of time that objects stay in the distribution's cache before the distribution forwards another request to the origin to determine whether the object has been updated. +* `minimum_ttl` - (Optional) The minimum amount of time that objects stay in the distribution's cache before the distribution forwards another request to the origin to determine whether the object has been updated. + +#### forwarded_cookies + +* `cookies_allow_list` - (Required) The specific cookies to forward to your distribution's origin. +* `option` - (Optional) Specifies which cookies to forward to the distribution's origin for a cache behavior: all, none, or allow-list to forward only the cookies specified in the cookiesAllowList parameter. + +#### forwarded_headers + +* `headers_allow_list` - (Required) The specific headers to forward to your distribution's origin. +* `option` - (Optional) The headers that you want your distribution to forward to your origin and base caching on. + +#### forwarded_query_strings + +* `option` - (Optional) Indicates whether the distribution forwards and caches based on query strings. +* `query_strings_allowed_list` - (Required) The specific query strings that the distribution forwards to the origin. + +### cache_behavior + +* `behavior` - (Required) The cache behavior for the specified path. +* `path` - (Required) The path to a directory or file to cached, or not cache. Use an asterisk symbol to specify wildcard directories (path/to/assets/\*), and file types (\*.html, \*jpg, \*js). Directories and file paths are case-sensitive. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `alternative_domain_names` - The alternate domain names of the distribution. +* `arn` - The Amazon Resource Name (ARN) of the distribution. +* `created_at` - The timestamp when the distribution was created. +* `domain_name` - The domain name of the distribution. +* `location` - An object that describes the location of the distribution, such as the AWS Region and Availability Zone. [Detailed below](#location) +* `origin_public_dns` - The public DNS of the origin. +* `resource_type` - The Lightsail resource type (e.g., Distribution). +* `status` - The status of the distribution. +* `support_code` - The support code. Include this code in your email to support when you have questions about your Lightsail distribution. This code enables our support team to look up your Lightsail information more easily. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### location + +* `availability_zone` - The Availability Zone. Follows the format us-east-2a (case-sensitive). +* `region_name` - The AWS Region name. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `update` - (Default `180m`) +* `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lightsail Distribution using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Lightsail Distribution using the `id`. For example: + +```console +% terraform import aws_lightsail_distribution.example rft-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_domain.html.markdown b/website/docs/cdktf/python/r/lightsail_domain.html.markdown new file mode 100644 index 00000000000..dd37d58eee1 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_domain.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_domain" +description: |- + Provides an Lightsail Domain +--- + + + +# Resource: aws_lightsail_domain + +Creates a domain resource for the specified domain (e.g., example.com). +You cannot register a new domain name using Lightsail. You must register +a domain name using Amazon Route 53 or another domain name registrar. +If you have already registered your domain, you can enter its name in +this parameter to manage the DNS records for that domain. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_domain import LightsailDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailDomain(self, "domain_test", + domain_name="mydomain.com" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain_name` - (Required) The name of the Lightsail domain to manage + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this domain +* `arn` - The ARN of the Lightsail domain + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_domain_entry.html.markdown b/website/docs/cdktf/python/r/lightsail_domain_entry.html.markdown new file mode 100644 index 00000000000..fceae8f92f7 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_domain_entry.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_domain_entry" +description: |- + Provides an Lightsail Domain Entry +--- + + + +# Resource: aws_lightsail_domain_entry + +Creates a domain entry resource + +~> **NOTE on `id`:** In an effort to simplify imports, this resource `id` field has been updated to the standard resource id separator, a comma (`,`). For backward compatibility, the previous separator (underscore `_`) can still be used to read and import existing resources. When state is refreshed, the `id` will be updated to use the new standard separator. The previous separator will be deprecated in a future major release. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_domain import LightsailDomain +from imports.aws.lightsail_domain_entry import LightsailDomainEntry +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailDomain(self, "test", + domain_name="mydomain.com" + ) + aws_lightsail_domain_entry_test = LightsailDomainEntry(self, "test_1", + domain_name=domain_test.domain_name, + name="www", + target="127.0.0.1", + type="A" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_domain_entry_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain_name` - (Required) The name of the Lightsail domain in which to create the entry +* `name` - (Required) Name of the entry record +* `type` - (Required) Type of record +* `target` - (Required) Target of the domain entry +* `is_alias` - (Optional) If the entry should be an alias Defaults to `false` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes to create a unique id: `name`,`domain_name`,`type`,`target` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_domain_entry` using the id attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_domain_entry` using the id attribute. For example: + +```console +% terraform import aws_lightsail_domain_entry.example www,mydomain.com,A,127.0.0.1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_instance.html.markdown b/website/docs/cdktf/python/r/lightsail_instance.html.markdown new file mode 100644 index 00000000000..fb9b37db2f3 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_instance.html.markdown @@ -0,0 +1,213 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_instance" +description: |- + Provides an Lightsail Instance +--- + + + +# Resource: aws_lightsail_instance + +Provides a Lightsail Instance. Amazon Lightsail is a service to provide easy virtual private servers +with custom software already setup. See [What is Amazon Lightsail?](https://lightsail.aws.amazon.com/ls/docs/getting-started/article/what-is-amazon-lightsail) +for more information. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_instance import LightsailInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailInstance(self, "gitlab_test", + availability_zone="us-east-1b", + blueprint_id="amazon_linux_2", + bundle_id="nano_1_0", + key_pair_name="some_key_name", + name="custom_gitlab", + tags={ + "foo": "bar" + } + ) +``` + +### Example With User Data + +Lightsail user data is handled differently than ec2 user data. Lightsail user data only accepts a single lined string. The below example shows installing apache and creating the index page. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_instance import LightsailInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailInstance(self, "custom", + availability_zone="us-east-1b", + blueprint_id="amazon_linux_2", + bundle_id="nano_1_0", + name="custom", + user_data="sudo yum install -y httpd && sudo systemctl start httpd && sudo systemctl enable httpd && echo '

Deployed via Terraform

' | sudo tee /var/www/html/index.html" + ) +``` + +### Enable Auto Snapshots + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_instance import LightsailInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailInstance(self, "test", + add_on=LightsailInstanceAddOn( + snapshot_time="06:00", + status="Enabled", + type="AutoSnapshot" + ), + availability_zone="us-east-1b", + blueprint_id="amazon_linux_2", + bundle_id="nano_1_0", + name="custom_instance", + tags={ + "foo": "bar" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Lightsail Instance. Names be unique within each AWS Region in your Lightsail account. +* `availability_zone` - (Required) The Availability Zone in which to create your +instance (see list below) +* `blueprint_id` - (Required) The ID for a virtual private server image. A list of available blueprint IDs can be obtained using the AWS CLI command: `aws lightsail get-blueprints` +* `bundle_id` - (Required) The bundle of specification information (see list below) +* `key_pair_name` - (Optional) The name of your key pair. Created in the +Lightsail console (cannot use `aws_key_pair` at this time) +* `user_data` - (Optional) Single lined launch script as a string to configure server with additional user data +* `ip_address_type` - (Optional) The IP address type of the Lightsail Instance. Valid Values: `dualstack` | `ipv4`. +* `add_on` - (Optional) The add on configuration for the instance. [Detailed below](#add_on). +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `add_on` + +Defines the add on configuration for the instance. The `add_on` configuration block supports the following arguments: + +* `type` - (Required) The add-on type. There is currently only one valid type `AutoSnapshot`. +* `snapshot_time` - (Required) The daily time when an automatic snapshot will be created. Must be in HH:00 format, and in an hourly increment and specified in Coordinated Universal Time (UTC). The snapshot will be automatically created between the time specified and up to 45 minutes after. +* `status` - (Required) The status of the add on. Valid Values: `Enabled`, `Disabled`. + +## Availability Zones + +Lightsail currently supports the following Availability Zones (e.g., `us-east-1a`): + +- `ap-northeast-1{a,c,d}` +- `ap-northeast-2{a,c}` +- `ap-south-1{a,b}` +- `ap-southeast-1{a,b,c}` +- `ap-southeast-2{a,b,c}` +- `ca-central-1{a,b}` +- `eu-central-1{a,b,c}` +- `eu-west-1{a,b,c}` +- `eu-west-2{a,b,c}` +- `eu-west-3{a,b,c}` +- `us-east-1{a,b,c,d,e,f}` +- `us-east-2{a,b,c}` +- `us-west-2{a,b,c}` + +## Bundles + +Lightsail currently supports the following Bundle IDs (e.g., an instance in `ap-northeast-1` would use `small_2_0`): + +### Prefix + +A Bundle ID starts with one of the below size prefixes: + +- `nano_` +- `micro_` +- `small_` +- `medium_` +- `large_` +- `xlarge_` +- `2xlarge_` + +### Suffix + +A Bundle ID ends with one of the following suffixes depending on Availability Zone: + +- ap-northeast-1: `2_0` +- ap-northeast-2: `2_0` +- ap-south-1: `2_1` +- ap-southeast-1: `2_0` +- ap-southeast-2: `2_2` +- ca-central-1: `2_0` +- eu-central-1: `2_0` +- eu-west-1: `2_0` +- eu-west-2: `2_0` +- eu-west-3: `2_0` +- us-east-1: `2_0` +- us-east-2: `2_0` +- us-west-2: `2_0` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the Lightsail instance (matches `arn`). +* `arn` - The ARN of the Lightsail instance (matches `id`). +* `created_at` - The timestamp when the instance was created. +* `cpu_count` - The number of vCPUs the instance has. +* `ram_size` - The amount of RAM in GB on the instance (e.g., 1.0). +* `ipv6_addresses` - List of IPv6 addresses for the Lightsail instance. +* `private_ip_address` - The private IP address of the instance. +* `public_ip_address` - The public IP address of the instance. +* `is_static_ip` - A Boolean value indicating whether this instance has a static IP assigned to it. +* `username` - The user name for connecting to the instance (e.g., ec2-user). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lightsail Instances using their name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Lightsail Instances using their name. For example: + +```console +% terraform import aws_lightsail_instance.gitlab_test 'custom_gitlab' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_instance_public_ports.html.markdown b/website/docs/cdktf/python/r/lightsail_instance_public_ports.html.markdown new file mode 100644 index 00000000000..fb042b5969b --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_instance_public_ports.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_instance_public_ports" +description: |- + Provides an Lightsail Instance +--- + + + +# Resource: aws_lightsail_instance_public_ports + +Opens ports for a specific Amazon Lightsail instance, and specifies the IP addresses allowed to connect to the instance through the ports, and the protocol. + +-> See [What is Amazon Lightsail?](https://lightsail.aws.amazon.com/ls/docs/getting-started/article/what-is-amazon-lightsail) for more information. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_instance import LightsailInstance +from imports.aws.lightsail_instance_public_ports import LightsailInstancePublicPorts +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = LightsailInstance(self, "test", + availability_zone=Token.as_string(property_access(available.names, ["0"])), + blueprint_id="amazon_linux_2", + bundle_id="nano_1_0", + name="yak_sail" + ) + aws_lightsail_instance_public_ports_test = + LightsailInstancePublicPorts(self, "test_1", + instance_name=test.name, + port_info=[LightsailInstancePublicPortsPortInfo( + from_port=80, + protocol="tcp", + to_port=80 + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_instance_public_ports_test.override_logical_id("test") +``` + +## Argument Reference + +The following arguments are required: + +* `instance_name` - (Required) Name of the Lightsail Instance. +* `port_info` - (Required) Configuration block with port information. AWS closes all currently open ports that are not included in the `port_info`. Detailed below. + +### port_info + +The following arguments are required: + +* `from_port` - (Required) First port in a range of open ports on an instance. +* `protocol` - (Required) IP protocol name. Valid values are `tcp`, `all`, `udp`, and `icmp`. +* `to_port` - (Required) Last port in a range of open ports on an instance. + +The following arguments are optional: + +* `cidrs` - (Optional) Set of CIDR blocks. +* `cidr_list_aliases` - (Optional) Set of CIDR aliases that define access for a preconfigured range of IP addresses. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_key_pair.html.markdown b/website/docs/cdktf/python/r/lightsail_key_pair.html.markdown new file mode 100644 index 00000000000..4a7c0324e86 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_key_pair.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_key_pair" +description: |- + Provides an Lightsail Key Pair +--- + + + +# Resource: aws_lightsail_key_pair + +Provides a Lightsail Key Pair, for use with Lightsail Instances. These key pairs +are separate from EC2 Key Pairs, and must be created or imported for use with +Lightsail. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details + +## Example Usage + +### Create New Key Pair + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_key_pair import LightsailKeyPair +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailKeyPair(self, "lg_key_pair", + name="lg_key_pair" + ) +``` + +### Create New Key Pair with PGP Encrypted Private Key + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_key_pair import LightsailKeyPair +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailKeyPair(self, "lg_key_pair", + name="lg_key_pair", + pgp_key="keybase:keybaseusername" + ) +``` + +### Existing Public Key Import + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_key_pair import LightsailKeyPair +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailKeyPair(self, "lg_key_pair", + name="importing", + public_key=Token.as_string(Fn.file("~/.ssh/id_rsa.pub")) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the Lightsail Key Pair. If omitted, a unique name will be generated by Terraform +* `pgp_key` – (Optional) An optional PGP key to encrypt the resulting private key material. Only used when creating a new key pair +* `public_key` - (Required) The public key material. This public key will be imported into Lightsail +* `tags` - (Optional) A map of tags to assign to the collection. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +~> **NOTE:** a PGP key is not required, however it is strongly encouraged. Without a PGP key, the private key material will be stored in state unencrypted.`pgp_key` is ignored if `public_key` is supplied. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this key pair. +* `arn` - The ARN of the Lightsail key pair. +* `encrypted_fingerprint` - The MD5 public key fingerprint for the encrypted private key. +* `encrypted_private_key` – the private key material, base 64 encoded and encrypted with the given `pgp_key`. This is only populated when creating a new key and `pgp_key` is supplied. +* `fingerprint` - The MD5 public key fingerprint as specified in section 4 of RFC 4716. +* `public_key` - the public key, base64 encoded. +* `private_key` - the private key, base64 encoded. This is only populated when creating a new key, and when no `pgp_key` is provided. + +## Import + +You cannot import Lightsail Key Pairs because the private and public key are only available on initial creation. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_lb.html.markdown b/website/docs/cdktf/python/r/lightsail_lb.html.markdown new file mode 100644 index 00000000000..c64abcb0203 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_lb.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb" +description: |- + Provides a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb + +Creates a Lightsail load balancer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_lb import LightsailLb +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailLb(self, "test", + health_check_path="/", + instance_port=Token.as_number("80"), + name="test-load-balancer", + tags={ + "foo": "bar" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Lightsail load balancer. +* `instance_port` - (Required) The instance port the load balancer will connect. +* `health_check_path` - (Optional) The health check path of the load balancer. Default value "/". +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this load balancer (matches `name`). +* `arn` - The ARN of the Lightsail load balancer. +* `created_at` - The timestamp when the load balancer was created. +* `dns_name` - The DNS name of the load balancer. +* `protocol` - The protocol of the load balancer. +* `public_ports` - The public ports of the load balancer. +* `support_code` - The support code for the database. Include this code in your email to support when you have questions about a database in Lightsail. This code enables our support team to look up your Lightsail information more easily. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_lb` using the name attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_lb` using the name attribute. For example: + +```console +% terraform import aws_lightsail_lb.test example-load-balancer +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_lb_attachment.html.markdown b/website/docs/cdktf/python/r/lightsail_lb_attachment.html.markdown new file mode 100644 index 00000000000..b9fb08bd883 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_lb_attachment.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb_attachment" +description: |- + Attaches a Lightsail Instance to a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb_attachment + +Attaches a Lightsail Instance to a Lightsail Load Balancer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.lightsail_instance import LightsailInstance +from imports.aws.lightsail_lb import LightsailLb +from imports.aws.lightsail_lb_attachment import LightsailLbAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = LightsailLb(self, "test", + health_check_path="/", + instance_port=Token.as_number("80"), + name="test-load-balancer", + tags={ + "foo": "bar" + } + ) + available = DataAwsAvailabilityZones(self, "available", + filter=[DataAwsAvailabilityZonesFilter( + name="opt-in-status", + values=["opt-in-not-required"] + ) + ], + state="available" + ) + aws_lightsail_instance_test = LightsailInstance(self, "test_2", + availability_zone=Token.as_string(property_access(available.names, ["0"])), + blueprint_id="amazon_linux_2", + bundle_id="nano_1_0", + name="test-instance" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_instance_test.override_logical_id("test") + aws_lightsail_lb_attachment_test = LightsailLbAttachment(self, "test_3", + instance_name=Token.as_string(aws_lightsail_instance_test.name), + lb_name=test.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_lb_attachment_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `lb_name` - (Required) The name of the Lightsail load balancer. +* `instance_name` - (Required) The name of the instance to attach to the load balancer. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes to create a unique id: `lb_name`,`instance_name` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_lb_attachment` using the name attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_lb_attachment` using the name attribute. For example: + +```console +% terraform import aws_lightsail_lb_attachment.test example-load-balancer,example-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_lb_certificate.html.markdown b/website/docs/cdktf/python/r/lightsail_lb_certificate.html.markdown new file mode 100644 index 00000000000..a3061205c4e --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_lb_certificate.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb_certificate" +description: |- + Provides a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb_certificate + +Creates a Lightsail load balancer Certificate resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_lb import LightsailLb +from imports.aws.lightsail_lb_certificate import LightsailLbCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = LightsailLb(self, "test", + health_check_path="/", + instance_port=Token.as_number("80"), + name="test-load-balancer", + tags={ + "foo": "bar" + } + ) + aws_lightsail_lb_certificate_test = LightsailLbCertificate(self, "test_1", + domain_name="test.com", + lb_name=test.id, + name="test-load-balancer-certificate" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_lb_certificate_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain_name` - (Required) The domain name (e.g., example.com) for your SSL/TLS certificate. +* `lb_name` - (Required) The load balancer name where you want to create the SSL/TLS certificate. +* `name` - (Required) The SSL/TLS certificate name. +* `name` - (Required) The SSL/TLS certificate name. +* `subject_alternative_names` - (Optional) Set of domains that should be SANs in the issued certificate. `domain_name` attribute is automatically added as a Subject Alternative Name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes to create a unique id: `lb_name`,`name` +* `arn` - The ARN of the lightsail certificate. +* `created_at` - The timestamp when the instance was created. +* `domain_validation_options` - Set of domain validation objects which can be used to complete certificate validation. Can have more than one element, e.g., if SANs are defined. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_lb_certificate` using the id attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_lb_certificate` using the id attribute. For example: + +```console +% terraform import aws_lightsail_lb_certificate.test example-load-balancer,example-load-balancer-certificate +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_lb_certificate_attachment.html.markdown b/website/docs/cdktf/python/r/lightsail_lb_certificate_attachment.html.markdown new file mode 100644 index 00000000000..39c362f5295 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_lb_certificate_attachment.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb_certificate_attachment" +description: |- + Attaches a Lightsail Load Balancer Certificate to a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb_certificate_attachment + +Attaches a Lightsail Load Balancer Certificate to a Lightsail Load Balancer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_lb import LightsailLb +from imports.aws.lightsail_lb_certificate import LightsailLbCertificate +from imports.aws.lightsail_lb_certificate_attachment import LightsailLbCertificateAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = LightsailLb(self, "test", + health_check_path="/", + instance_port=Token.as_number("80"), + name="test-load-balancer", + tags={ + "foo": "bar" + } + ) + aws_lightsail_lb_certificate_test = LightsailLbCertificate(self, "test_1", + domain_name="test.com", + lb_name=test.id, + name="test-load-balancer-certificate" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_lb_certificate_test.override_logical_id("test") + aws_lightsail_lb_certificate_attachment_test = + LightsailLbCertificateAttachment(self, "test_2", + certificate_name=Token.as_string(aws_lightsail_lb_certificate_test.name), + lb_name=test.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_lb_certificate_attachment_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `lb_name` - (Required) The name of the load balancer to which you want to associate the SSL/TLS certificate. +* `certificate_name` - (Required) The name of your SSL/TLS certificate. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes to create a unique id: `lb_name`,`certificate_name` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_lb_certificate_attachment` using the name attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_lb_certificate_attachment` using the name attribute. For example: + +```console +% terraform import aws_lightsail_lb_certificate_attachment.test example-load-balancer,example-certificate +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_lb_https_redirection_policy.html.markdown b/website/docs/cdktf/python/r/lightsail_lb_https_redirection_policy.html.markdown new file mode 100644 index 00000000000..ebec674a426 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_lb_https_redirection_policy.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb_https_redirection_policy" +description: |- + Configures Https Redirection for a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb_https_redirection_policy + +Configures Https Redirection for a Lightsail Load Balancer. A valid Certificate must be attached to the load balancer in order to enable https redirection. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_lb import LightsailLb +from imports.aws.lightsail_lb_certificate import LightsailLbCertificate +from imports.aws.lightsail_lb_certificate_attachment import LightsailLbCertificateAttachment +from imports.aws.lightsail_lb_https_redirection_policy import LightsailLbHttpsRedirectionPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = LightsailLb(self, "test", + health_check_path="/", + instance_port=Token.as_number("80"), + name="test-load-balancer", + tags={ + "foo": "bar" + } + ) + aws_lightsail_lb_certificate_test = LightsailLbCertificate(self, "test_1", + domain_name="test.com", + lb_name=test.id, + name="test-load-balancer-certificate" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_lb_certificate_test.override_logical_id("test") + aws_lightsail_lb_certificate_attachment_test = + LightsailLbCertificateAttachment(self, "test_2", + certificate_name=Token.as_string(aws_lightsail_lb_certificate_test.name), + lb_name=test.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_lb_certificate_attachment_test.override_logical_id("test") + aws_lightsail_lb_https_redirection_policy_test = + LightsailLbHttpsRedirectionPolicy(self, "test_3", + enabled=True, + lb_name=test.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_lb_https_redirection_policy_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `lb_name` - (Required) The name of the load balancer to which you want to enable http to https redirection. +* `enabled` - (Required) - The Https Redirection state of the load balancer. `true` to activate http to https redirection or `false` to deactivate http to https redirection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this load balancer (matches `lb_name`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_lb_https_redirection_policy` using the `lb_name` attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_lb_https_redirection_policy` using the `lb_name` attribute. For example: + +```console +% terraform import aws_lightsail_lb_https_redirection_policy.test example-load-balancer +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_lb_stickiness_policy.html.markdown b/website/docs/cdktf/python/r/lightsail_lb_stickiness_policy.html.markdown new file mode 100644 index 00000000000..128e8e97468 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_lb_stickiness_policy.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb_stickiness_policy" +description: |- + Configures Session Stickiness for a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb_stickiness_policy + +Configures Session Stickiness for a Lightsail Load Balancer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_lb import LightsailLb +from imports.aws.lightsail_lb_stickiness_policy import LightsailLbStickinessPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = LightsailLb(self, "test", + health_check_path="/", + instance_port=Token.as_number("80"), + name="test-load-balancer", + tags={ + "foo": "bar" + } + ) + aws_lightsail_lb_stickiness_policy_test = LightsailLbStickinessPolicy(self, "test_1", + cookie_duration=900, + enabled=True, + lb_name=test.name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_lb_stickiness_policy_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `lb_name` - (Required) The name of the load balancer to which you want to enable session stickiness. +* `cookie_duration` - (Required) The cookie duration in seconds. This determines the length of the session stickiness. +* `enabled` - (Required) - The Session Stickiness state of the load balancer. `true` to activate session stickiness or `false` to deactivate session stickiness. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this load balancer (matches `lb_name`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_lightsail_lb_stickiness_policy` using the `lb_name` attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_lightsail_lb_stickiness_policy` using the `lb_name` attribute. For example: + +```console +% terraform import aws_lightsail_lb_stickiness_policy.test example-load-balancer +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_static_ip.html.markdown b/website/docs/cdktf/python/r/lightsail_static_ip.html.markdown new file mode 100644 index 00000000000..79854b16295 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_static_ip.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_static_ip" +description: |- + Provides an Lightsail Static IP +--- + + + +# Resource: aws_lightsail_static_ip + +Allocates a static IP address. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_static_ip import LightsailStaticIp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LightsailStaticIp(self, "test", + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name for the allocated static IP + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the Lightsail static IP +* `ip_address` - The allocated static IP address +* `support_code` - The support code. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/lightsail_static_ip_attachment.html.markdown b/website/docs/cdktf/python/r/lightsail_static_ip_attachment.html.markdown new file mode 100644 index 00000000000..249d734a386 --- /dev/null +++ b/website/docs/cdktf/python/r/lightsail_static_ip_attachment.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_static_ip_attachment" +description: |- + Provides an Lightsail Static IP Attachment +--- + + + +# Resource: aws_lightsail_static_ip_attachment + +Provides a static IP address attachment - relationship between a Lightsail static IP & Lightsail instance. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.lightsail_instance import LightsailInstance +from imports.aws.lightsail_static_ip import LightsailStaticIp +from imports.aws.lightsail_static_ip_attachment import LightsailStaticIpAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = LightsailInstance(self, "test", + availability_zone="us-east-1b", + blueprint_id="string", + bundle_id="string", + key_pair_name="some_key_name", + name="example" + ) + aws_lightsail_static_ip_test = LightsailStaticIp(self, "test_1", + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_static_ip_test.override_logical_id("test") + aws_lightsail_static_ip_attachment_test = LightsailStaticIpAttachment(self, "test_2", + instance_name=test.id, + static_ip_name=Token.as_string(aws_lightsail_static_ip_test.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_lightsail_static_ip_attachment_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `static_ip_name` - (Required) The name of the allocated static IP +* `instance_name` - (Required) The name of the Lightsail instance to attach the IP to + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `ip_address` - The allocated static IP address + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/load_balancer_backend_server_policy.html.markdown b/website/docs/cdktf/python/r/load_balancer_backend_server_policy.html.markdown new file mode 100644 index 00000000000..063bc636906 --- /dev/null +++ b/website/docs/cdktf/python/r/load_balancer_backend_server_policy.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_load_balancer_backend_server_policy" +description: |- + Attaches a load balancer policy to an ELB backend server. +--- + + + +# Resource: aws_load_balancer_backend_server_policy + +Attaches a load balancer policy to an ELB backend server. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elb import Elb +from imports.aws.load_balancer_backend_server_policy import LoadBalancerBackendServerPolicy +from imports.aws.load_balancer_policy import LoadBalancerPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + wu_tang = Elb(self, "wu-tang", + availability_zones=["us-east-1a"], + listener=[ElbListener( + instance_port=443, + instance_protocol="http", + lb_port=443, + lb_protocol="https", + ssl_certificate_id="arn:aws:iam::000000000000:server-certificate/wu-tang.net" + ) + ], + name="wu-tang", + tags={ + "Name": "wu-tang" + } + ) + LoadBalancerPolicy(self, "wu-tang-ca-pubkey-policy", + load_balancer_name=wu_tang.name, + policy_attribute=[LoadBalancerPolicyPolicyAttribute( + name="PublicKey", + value=Token.as_string(Fn.file("wu-tang-pubkey")) + ) + ], + policy_name="wu-tang-ca-pubkey-policy", + policy_type_name="PublicKeyPolicyType" + ) + wu_tang_root_ca_backend_auth_policy = LoadBalancerPolicy(self, "wu-tang-root-ca-backend-auth-policy", + load_balancer_name=wu_tang.name, + policy_attribute=[LoadBalancerPolicyPolicyAttribute( + name="PublicKeyPolicyName", + value=wu_tang_root_ca_pubkey_policy.policy_name + ) + ], + policy_name="wu-tang-root-ca-backend-auth-policy", + policy_type_name="BackendServerAuthenticationPolicyType" + ) + LoadBalancerBackendServerPolicy(self, "wu-tang-backend-auth-policies-443", + instance_port=443, + load_balancer_name=wu_tang.name, + policy_names=[wu_tang_root_ca_backend_auth_policy.policy_name] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `load_balancer_name` - (Required) The load balancer to attach the policy to. +* `policy_names` - (Required) List of Policy Names to apply to the backend server. +* `instance_port` - (Required) The instance port to apply the policy to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `load_balancer_name` - The load balancer on which the policy is defined. +* `instance_port` - The backend port the policies are applied to + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/load_balancer_listener_policy.html.markdown b/website/docs/cdktf/python/r/load_balancer_listener_policy.html.markdown new file mode 100644 index 00000000000..15352553670 --- /dev/null +++ b/website/docs/cdktf/python/r/load_balancer_listener_policy.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_load_balancer_listener_policy" +description: |- + Attaches a load balancer policy to an ELB Listener. +--- + + + +# Resource: aws_load_balancer_listener_policy + +Attaches a load balancer policy to an ELB Listener. + +## Example Usage + +### Custom Policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elb import Elb +from imports.aws.load_balancer_listener_policy import LoadBalancerListenerPolicy +from imports.aws.load_balancer_policy import LoadBalancerPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + wu_tang = Elb(self, "wu-tang", + availability_zones=["us-east-1a"], + listener=[ElbListener( + instance_port=443, + instance_protocol="http", + lb_port=443, + lb_protocol="https", + ssl_certificate_id="arn:aws:iam::000000000000:server-certificate/wu-tang.net" + ) + ], + name="wu-tang", + tags={ + "Name": "wu-tang" + } + ) + wu_tang_ssl = LoadBalancerPolicy(self, "wu-tang-ssl", + load_balancer_name=wu_tang.name, + policy_attribute=[LoadBalancerPolicyPolicyAttribute( + name="ECDHE-ECDSA-AES128-GCM-SHA256", + value="true" + ), LoadBalancerPolicyPolicyAttribute( + name="Protocol-TLSv1.2", + value="true" + ) + ], + policy_name="wu-tang-ssl", + policy_type_name="SSLNegotiationPolicyType" + ) + LoadBalancerListenerPolicy(self, "wu-tang-listener-policies-443", + load_balancer_name=wu_tang.name, + load_balancer_port=443, + policy_names=[wu_tang_ssl.policy_name] + ) +``` + +This example shows how to customize the TLS settings of an HTTPS listener. + +### AWS Predefined Security Policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elb import Elb +from imports.aws.load_balancer_listener_policy import LoadBalancerListenerPolicy +from imports.aws.load_balancer_policy import LoadBalancerPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + wu_tang = Elb(self, "wu-tang", + availability_zones=["us-east-1a"], + listener=[ElbListener( + instance_port=443, + instance_protocol="http", + lb_port=443, + lb_protocol="https", + ssl_certificate_id="arn:aws:iam::000000000000:server-certificate/wu-tang.net" + ) + ], + name="wu-tang", + tags={ + "Name": "wu-tang" + } + ) + wu_tang_ssl_tls11 = LoadBalancerPolicy(self, "wu-tang-ssl-tls-1-1", + load_balancer_name=wu_tang.name, + policy_attribute=[LoadBalancerPolicyPolicyAttribute( + name="Reference-Security-Policy", + value="ELBSecurityPolicy-TLS-1-1-2017-01" + ) + ], + policy_name="wu-tang-ssl", + policy_type_name="SSLNegotiationPolicyType" + ) + LoadBalancerListenerPolicy(self, "wu-tang-listener-policies-443", + load_balancer_name=wu_tang.name, + load_balancer_port=443, + policy_names=[wu_tang_ssl_tls11.policy_name] + ) +``` + +This example shows how to add a [Predefined Security Policy for ELBs](https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-security-policy-table.html) + +## Argument Reference + +This resource supports the following arguments: + +* `load_balancer_name` - (Required) The load balancer to attach the policy to. +* `load_balancer_port` - (Required) The load balancer listener port to apply the policy to. +* `policy_names` - (Required) List of Policy Names to apply to the backend server. +* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger an update. To force an update without changing these keys/values, use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `load_balancer_name` - The load balancer on which the policy is defined. +* `load_balancer_port` - The load balancer listener port the policies are applied to + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/load_balancer_policy.html.markdown b/website/docs/cdktf/python/r/load_balancer_policy.html.markdown new file mode 100644 index 00000000000..7ca5ed7035e --- /dev/null +++ b/website/docs/cdktf/python/r/load_balancer_policy.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_load_balancer_policy" +description: |- + Provides a load balancer policy, which can be attached to an ELB listener or backend server. +--- + + + +# Resource: aws_load_balancer_policy + +Provides a load balancer policy, which can be attached to an ELB listener or backend server. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elb import Elb +from imports.aws.load_balancer_backend_server_policy import LoadBalancerBackendServerPolicy +from imports.aws.load_balancer_listener_policy import LoadBalancerListenerPolicy +from imports.aws.load_balancer_policy import LoadBalancerPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + wu_tang = Elb(self, "wu-tang", + availability_zones=["us-east-1a"], + listener=[ElbListener( + instance_port=443, + instance_protocol="http", + lb_port=443, + lb_protocol="https", + ssl_certificate_id="arn:aws:iam::000000000000:server-certificate/wu-tang.net" + ) + ], + name="wu-tang", + tags={ + "Name": "wu-tang" + } + ) + LoadBalancerPolicy(self, "wu-tang-ca-pubkey-policy", + load_balancer_name=wu_tang.name, + policy_attribute=[LoadBalancerPolicyPolicyAttribute( + name="PublicKey", + value=Token.as_string(Fn.file("wu-tang-pubkey")) + ) + ], + policy_name="wu-tang-ca-pubkey-policy", + policy_type_name="PublicKeyPolicyType" + ) + wu_tang_root_ca_backend_auth_policy = LoadBalancerPolicy(self, "wu-tang-root-ca-backend-auth-policy", + load_balancer_name=wu_tang.name, + policy_attribute=[LoadBalancerPolicyPolicyAttribute( + name="PublicKeyPolicyName", + value=wu_tang_root_ca_pubkey_policy.policy_name + ) + ], + policy_name="wu-tang-root-ca-backend-auth-policy", + policy_type_name="BackendServerAuthenticationPolicyType" + ) + wu_tang_ssl = LoadBalancerPolicy(self, "wu-tang-ssl", + load_balancer_name=wu_tang.name, + policy_attribute=[LoadBalancerPolicyPolicyAttribute( + name="ECDHE-ECDSA-AES128-GCM-SHA256", + value="true" + ), LoadBalancerPolicyPolicyAttribute( + name="Protocol-TLSv1.2", + value="true" + ) + ], + policy_name="wu-tang-ssl", + policy_type_name="SSLNegotiationPolicyType" + ) + LoadBalancerPolicy(self, "wu-tang-ssl-tls-1-1", + load_balancer_name=wu_tang.name, + policy_attribute=[LoadBalancerPolicyPolicyAttribute( + name="Reference-Security-Policy", + value="ELBSecurityPolicy-TLS-1-1-2017-01" + ) + ], + policy_name="wu-tang-ssl", + policy_type_name="SSLNegotiationPolicyType" + ) + LoadBalancerBackendServerPolicy(self, "wu-tang-backend-auth-policies-443", + instance_port=443, + load_balancer_name=wu_tang.name, + policy_names=[wu_tang_root_ca_backend_auth_policy.policy_name] + ) + LoadBalancerListenerPolicy(self, "wu-tang-listener-policies-443", + load_balancer_name=wu_tang.name, + load_balancer_port=443, + policy_names=[wu_tang_ssl.policy_name] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `load_balancer_name` - (Required) The load balancer on which the policy is defined. +* `policy_name` - (Required) The name of the load balancer policy. +* `policy_type_name` - (Required) The policy type. +* `policy_attribute` - (Optional) Policy attribute to apply to the policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `policy_name` - The name of the stickiness policy. +* `policy_type_name` - The policy type of the policy. +* `load_balancer_name` - The load balancer on which the policy is defined. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/location_geofence_collection.html.markdown b/website/docs/cdktf/python/r/location_geofence_collection.html.markdown new file mode 100644 index 00000000000..b78537aecf8 --- /dev/null +++ b/website/docs/cdktf/python/r/location_geofence_collection.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_geofence_collection" +description: |- + Terraform resource for managing an AWS Location Geofence Collection. +--- + + + +# Resource: aws_location_geofence_collection + +Terraform resource for managing an AWS Location Geofence Collection. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.location_geofence_collection import LocationGeofenceCollection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LocationGeofenceCollection(self, "example", + collection_name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `collection_name` - (Required) The name of the geofence collection. + +The following arguments are optional: + +* `description` - (Optional) The optional description for the geofence collection. +* `kms_key_id` - (Optional) A key identifier for an AWS KMS customer managed key assigned to the Amazon Location resource. +* `tags` - (Optional) Key-value tags for the geofence collection. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `collection_arn` - The Amazon Resource Name (ARN) for the geofence collection resource. Used when you need to specify a resource across all AWS. +* `create_time` - The timestamp for when the geofence collection resource was created in ISO 8601 format. +* `update_time` - The timestamp for when the geofence collection resource was last updated in ISO 8601 format. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Location Geofence Collection using the `collection_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Location Geofence Collection using the `collection_name`. For example: + +```console +% terraform import aws_location_geofence_collection.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/location_map.html.markdown b/website/docs/cdktf/python/r/location_map.html.markdown new file mode 100644 index 00000000000..94cea8abfd2 --- /dev/null +++ b/website/docs/cdktf/python/r/location_map.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_map" +description: |- + Provides a Location Service Map. +--- + + + +# Resource: aws_location_map + +Provides a Location Service Map. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.location_map import LocationMap +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LocationMap(self, "example", + configuration=LocationMapConfiguration( + style="VectorHereBerlin" + ), + map_name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `configuration` - (Required) Configuration block with the map style selected from an available data provider. Detailed below. +* `map_name` - (Required) The name for the map resource. + +The following arguments are optional: + +* `description` - (Optional) An optional description for the map resource. +* `tags` - (Optional) Key-value tags for the map. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### configuration + +The following arguments are required: + +* `style` - (Required) Specifies the map style selected from an available data provider. Valid values can be found in the [Location Service CreateMap API Reference](https://docs.aws.amazon.com/location/latest/APIReference/API_CreateMap.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `create_time` - The timestamp for when the map resource was created in ISO 8601 format. +* `map_arn` - The Amazon Resource Name (ARN) for the map resource. Used to specify a resource across all AWS. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `update_time` - The timestamp for when the map resource was last updated in ISO 8601 format. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_location_map` resources using the map name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_location_map` resources using the map name. For example: + +```console +% terraform import aws_location_map.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/location_place_index.html.markdown b/website/docs/cdktf/python/r/location_place_index.html.markdown new file mode 100644 index 00000000000..c8fc4e58748 --- /dev/null +++ b/website/docs/cdktf/python/r/location_place_index.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_place_index" +description: |- + Provides a Location Service Place Index. +--- + + + +# Resource: aws_location_place_index + +Provides a Location Service Place Index. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.location_place_index import LocationPlaceIndex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LocationPlaceIndex(self, "example", + data_source="Here", + index_name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `data_source` - (Required) Specifies the geospatial data provider for the new place index. +* `index_name` - (Required) The name of the place index resource. + +The following arguments are optional: + +* `data_source_configuration` - (Optional) Configuration block with the data storage option chosen for requesting Places. Detailed below. +* `description` - (Optional) The optional description for the place index resource. +* `tags` - (Optional) Key-value tags for the place index. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### data_source_configuration + +The following arguments are optional: + +* `intended_use` - (Optional) Specifies how the results of an operation will be stored by the caller. Valid values: `SingleUse`, `Storage`. Default: `SingleUse`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `create_time` - The timestamp for when the place index resource was created in ISO 8601 format. +* `index_arn` - The Amazon Resource Name (ARN) for the place index resource. Used to specify a resource across AWS. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `update_time` - The timestamp for when the place index resource was last update in ISO 8601. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_location_place_index` resources using the place index name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_location_place_index` resources using the place index name. For example: + +```console +% terraform import aws_location_place_index.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/location_route_calculator.html.markdown b/website/docs/cdktf/python/r/location_route_calculator.html.markdown new file mode 100644 index 00000000000..680e7429199 --- /dev/null +++ b/website/docs/cdktf/python/r/location_route_calculator.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_route_calculator" +description: |- + Provides a Location Service Route Calculator. +--- + + + +# Resource: aws_location_route_calculator + +Provides a Location Service Route Calculator. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.location_route_calculator import LocationRouteCalculator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LocationRouteCalculator(self, "example", + calculator_name="example", + data_source="Here" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `calculator_name` - (Required) The name of the route calculator resource. +* `data_source` - (Required) Specifies the data provider of traffic and road network data. + +The following arguments are optional: + +* `description` - (Optional) The optional description for the route calculator resource. +* `tags` - (Optional) Key-value tags for the route calculator. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `calculator_arn` - The Amazon Resource Name (ARN) for the Route calculator resource. Use the ARN when you specify a resource across AWS. +* `create_time` - The timestamp for when the route calculator resource was created in ISO 8601 format. +* `update_time` - The timestamp for when the route calculator resource was last update in ISO 8601. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_location_route_calculator` using the route calculator name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_location_route_calculator` using the route calculator name. For example: + +```console +% terraform import aws_location_route_calculator.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/location_tracker.html.markdown b/website/docs/cdktf/python/r/location_tracker.html.markdown new file mode 100644 index 00000000000..cbe6b5f94c5 --- /dev/null +++ b/website/docs/cdktf/python/r/location_tracker.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_tracker" +description: |- + Provides a Location Service Tracker. +--- + + + +# Resource: aws_location_tracker + +Provides a Location Service Tracker. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.location_tracker import LocationTracker +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + LocationTracker(self, "example", + tracker_name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `tracker_name` - (Required) The name of the tracker resource. + +The following arguments are optional: + +* `description` - (Optional) The optional description for the tracker resource. +* `kms_key_id` - (Optional) A key identifier for an AWS KMS customer managed key assigned to the Amazon Location resource. +* `position_filtering` - (Optional) The position filtering method of the tracker resource. Valid values: `TimeBased`, `DistanceBased`, `AccuracyBased`. Default: `TimeBased`. +* `tags` - (Optional) Key-value tags for the tracker. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `create_time` - The timestamp for when the tracker resource was created in ISO 8601 format. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `tracker_arn` - The Amazon Resource Name (ARN) for the tracker resource. Used when you need to specify a resource across all AWS. +* `update_time` - The timestamp for when the tracker resource was last updated in ISO 8601 format. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_location_tracker` resources using the tracker name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_location_tracker` resources using the tracker name. For example: + +```console +% terraform import aws_location_tracker.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/location_tracker_association.html.markdown b/website/docs/cdktf/python/r/location_tracker_association.html.markdown new file mode 100644 index 00000000000..e0813e19eab --- /dev/null +++ b/website/docs/cdktf/python/r/location_tracker_association.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_tracker_association" +description: |- + Terraform resource for managing an AWS Location Tracker Association. +--- + + + +# Resource: aws_location_tracker_association + +Terraform resource for managing an AWS Location Tracker Association. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.location_geofence_collection import LocationGeofenceCollection +from imports.aws.location_tracker import LocationTracker +from imports.aws.location_tracker_association import LocationTrackerAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = LocationGeofenceCollection(self, "example", + collection_name="example" + ) + aws_location_tracker_example = LocationTracker(self, "example_1", + tracker_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_location_tracker_example.override_logical_id("example") + aws_location_tracker_association_example = LocationTrackerAssociation(self, "example_2", + consumer_arn=example.collection_arn, + tracker_name=Token.as_string(aws_location_tracker_example.tracker_name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_location_tracker_association_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `consumer_arn` - (Required) The Amazon Resource Name (ARN) for the geofence collection to be associated to tracker resource. Used when you need to specify a resource across all AWS. +* `tracker_name` - (Required) The name of the tracker resource to be associated with a geofence collection. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +`aws_location_tracker_association` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +* `create` - (Optional, Default: `30m`) +* `delete` - (Optional, Default: `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Location Tracker Association using the `tracker_name|consumer_arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Location Tracker Association using the `tracker_name|consumer_arn`. For example: + +```console +% terraform import aws_location_tracker_association.example "tracker_name|consumer_arn" +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/macie2_account.html.markdown b/website/docs/cdktf/python/r/macie2_account.html.markdown new file mode 100644 index 00000000000..af751ab1885 --- /dev/null +++ b/website/docs/cdktf/python/r/macie2_account.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_account" +description: |- + Provides a resource to manage Amazon Macie on an AWS Account. +--- + + + +# Resource: aws_macie2_account + +Provides a resource to manage an [AWS Macie Account](https://docs.aws.amazon.com/macie/latest/APIReference/macie.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.macie2_account import Macie2Account +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Macie2Account(self, "test", + finding_publishing_frequency="FIFTEEN_MINUTES", + status="ENABLED" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `finding_publishing_frequency` - (Optional) Specifies how often to publish updates to policy findings for the account. This includes publishing updates to AWS Security Hub and Amazon EventBridge (formerly called Amazon CloudWatch Events). Valid values are `FIFTEEN_MINUTES`, `ONE_HOUR` or `SIX_HOURS`. +* `status` - (Optional) Specifies the status for the account. To enable Amazon Macie and start all Macie activities for the account, set this value to `ENABLED`. Valid values are `ENABLED` or `PAUSED`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie account. +* `service_role` - The Amazon Resource Name (ARN) of the service-linked role that allows Macie to monitor and analyze data in AWS resources for the account. +* `created_at` - The date and time, in UTC and extended RFC 3339 format, when the Amazon Macie account was created. +* `updated_at` - The date and time, in UTC and extended RFC 3339 format, of the most recent change to the status of the Macie account. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_macie2_account` using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_macie2_account` using the id. For example: + +```console +% terraform import aws_macie2_account.example abcd1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/macie2_classification_export_configuration.html.markdown b/website/docs/cdktf/python/r/macie2_classification_export_configuration.html.markdown new file mode 100644 index 00000000000..7c87542316e --- /dev/null +++ b/website/docs/cdktf/python/r/macie2_classification_export_configuration.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_classification_export_configuration" +description: |- + Provides a resource to manage Classification Results - Export Configuration +--- + + + +# Resource: aws_macie2_classification_export_configuration + +Provides a resource to manage an [Amazon Macie Classification Export Configuration](https://docs.aws.amazon.com/macie/latest/APIReference/classification-export-configuration.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.macie2_account import Macie2Account +from imports.aws.macie2_classification_export_configuration import Macie2ClassificationExportConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Macie2Account(self, "example") + aws_macie2_classification_export_configuration_example = + Macie2ClassificationExportConfiguration(self, "example_1", + depends_on=[example], + s3_destination=Macie2ClassificationExportConfigurationS3Destination( + bucket_name=Token.as_string(aws_s3_bucket_example.bucket), + key_prefix="exampleprefix/", + kms_key_arn=Token.as_string(aws_kms_key_example.arn) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_macie2_classification_export_configuration_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `s3_destination` - (Required) Configuration block for a S3 Destination. Defined below + +### s3_destination Configuration Block + +The `s3_destination` configuration block supports the following arguments: + +* `bucket_name` - (Required) The Amazon S3 bucket name in which Amazon Macie exports the data classification results. +* `key_prefix` - (Optional) The object key for the bucket in which Amazon Macie exports the data classification results. +* `kms_key_arn` - (Required) Amazon Resource Name (ARN) of the KMS key to be used to encrypt the data. + +Additional information can be found in the [Storing and retaining sensitive data discovery results with Amazon Macie for AWS Macie documentation](https://docs.aws.amazon.com/macie/latest/user/discovery-results-repository-s3.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_macie2_classification_export_configuration` using the account ID and region. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_macie2_classification_export_configuration` using the account ID and region. For example: + +```console +% terraform import aws_macie2_classification_export_configuration.example 123456789012:us-west-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/macie2_classification_job.html.markdown b/website/docs/cdktf/python/r/macie2_classification_job.html.markdown new file mode 100644 index 00000000000..9830e1ae1a4 --- /dev/null +++ b/website/docs/cdktf/python/r/macie2_classification_job.html.markdown @@ -0,0 +1,170 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_classification_job" +description: |- + Provides a resource to manage an AWS Macie Classification Job. +--- + + + +# Resource: aws_macie2_classification_job + +Provides a resource to manage an [AWS Macie Classification Job](https://docs.aws.amazon.com/macie/latest/APIReference/jobs.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.macie2_account import Macie2Account +from imports.aws.macie2_classification_job import Macie2ClassificationJob +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = Macie2Account(self, "test") + aws_macie2_classification_job_test = Macie2ClassificationJob(self, "test_1", + depends_on=[test], + job_type="ONE_TIME", + name="NAME OF THE CLASSIFICATION JOB", + s3_job_definition=Macie2ClassificationJobS3JobDefinition( + bucket_definitions=[Macie2ClassificationJobS3JobDefinitionBucketDefinitions( + account_id="ACCOUNT ID", + buckets=["S3 BUCKET NAME"] + ) + ] + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_macie2_classification_job_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `schedule_frequency` - (Optional) The recurrence pattern for running the job. To run the job only once, don't specify a value for this property and set the value for the `job_type` property to `ONE_TIME`. (documented below) +* `custom_data_identifier_ids` - (Optional) The custom data identifiers to use for data analysis and classification. +* `sampling_percentage` - (Optional) The sampling depth, as a percentage, to apply when processing objects. This value determines the percentage of eligible objects that the job analyzes. If this value is less than 100, Amazon Macie selects the objects to analyze at random, up to the specified percentage, and analyzes all the data in those objects. +* `name` - (Optional) A custom name for the job. The name can contain as many as 500 characters. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) A custom description of the job. The description can contain as many as 200 characters. +* `initial_run` - (Optional) Specifies whether to analyze all existing, eligible objects immediately after the job is created. +* `job_type` - (Required) The schedule for running the job. Valid values are: `ONE_TIME` - Run the job only once. If you specify this value, don't specify a value for the `schedule_frequency` property. `SCHEDULED` - Run the job on a daily, weekly, or monthly basis. If you specify this value, use the `schedule_frequency` property to define the recurrence pattern for the job. +* `s3_job_definition` - (Optional) The S3 buckets that contain the objects to analyze, and the scope of that analysis. (documented below) +* `tags` - (Optional) A map of key-value pairs that specifies the tags to associate with the job. A job can have a maximum of 50 tags. Each tag consists of a tag key and an associated tag value. The maximum length of a tag key is 128 characters. The maximum length of a tag value is 256 characters. +* `job_status` - (Optional) The status for the job. Valid values are: `CANCELLED`, `RUNNING` and `USER_PAUSED` + +The `schedule_frequency` object supports the following: + +* `daily_schedule` - (Optional) Specifies a daily recurrence pattern for running the job. +* `weekly_schedule` - (Optional) Specifies a weekly recurrence pattern for running the job. +* `monthly_schedule` - (Optional) Specifies a monthly recurrence pattern for running the job. + +The `s3_job_definition` object supports the following: + +* `bucket_criteria` - (Optional) The property- and tag-based conditions that determine which S3 buckets to include or exclude from the analysis. Conflicts with `bucket_definitions`. (documented below) +* `bucket_definitions` - (Optional) An array of objects, one for each AWS account that owns buckets to analyze. Each object specifies the account ID for an account and one or more buckets to analyze for the account. Conflicts with `bucket_criteria`. (documented below) +* `scoping` - (Optional) The property- and tag-based conditions that determine which objects to include or exclude from the analysis. (documented below) + +### bucket_criteria Configuration Block + +The `bucket_criteria` object supports the following: + +* `excludes` - (Optional) The property- or tag-based conditions that determine which S3 buckets to exclude from the analysis. (documented below) +* `includes` - (Optional) The property- or tag-based conditions that determine which S3 buckets to include in the analysis. (documented below) + +The `excludes` and `includes` object supports the following: + +* `and` - (Optional) An array of conditions, one for each condition that determines which S3 buckets to include or exclude from the job. (documented below) + +The `and` object supports the following: + +* `simple_criterion` - (Optional) A property-based condition that defines a property, operator, and one or more values for including or excluding an S3 buckets from the job. (documented below) +* `tag_criterion` - (Optional) A tag-based condition that defines the operator and tag keys or tag key and value pairs for including or excluding an S3 buckets from the job. (documented below) + +The `simple_criterion` object supports the following: + +* `comparator` - (Required) The operator to use in a condition. Valid combination of values are available in the [AWS Documentation](https://docs.aws.amazon.com/macie/latest/APIReference/jobs.html#jobs-model-jobcomparator) +* `key` - (Required) The object property to use in the condition. Valid combination of values are available in the [AWS Documentation](https://docs.aws.amazon.com/macie/latest/APIReference/jobs.html#jobs-model-simplecriterionkeyforjob) +* `values` - (Required) An array that lists the values to use in the condition. Valid combination of values are available in the [AWS Documentation](https://docs.aws.amazon.com/macie/latest/APIReference/jobs.html#jobs-model-simplecriterionforjob) + +The `tag_criterion` object supports the following: + +* `comparator` - (Required) The operator to use in the condition. Valid combination and values are available in the [AWS Documentation](https://docs.aws.amazon.com/macie/latest/APIReference/jobs.html#jobs-model-jobcomparator) +* `tag_values` - (Required) The tag key and value pairs to use in the condition. One or more blocks are allowed. (documented below) + +The `tag_values` object supports the following: + +* `key` - (Required) The tag key. +* `value` - (Required) The tag value. + +### bucket_definitions Configuration Block + +The `bucket_definitions` object supports the following: + +* `account_id` - (Required) The unique identifier for the AWS account that owns the buckets. +* `buckets` - (Required) An array that lists the names of the buckets. + +### scoping Configuration Block + +The `scoping` object supports the following: + +* `excludes` - (Optional) The property- or tag-based conditions that determine which objects to exclude from the analysis. (documented below) +* `includes` - (Optional) The property- or tag-based conditions that determine which objects to include in the analysis. (documented below) + +The `excludes` and `includes` object supports the following: + +* `and` - (Optional) An array of conditions, one for each condition that determines which objects to include or exclude from the job. (documented below) + +The `and` object supports the following: + +* `simple_scope_term` - (Optional) A property-based condition that defines a property, operator, and one or more values for including or excluding an object from the job. (documented below) +* `tag_scope_term` - (Optional) A tag-based condition that defines the operator and tag keys or tag key and value pairs for including or excluding an object from the job. (documented below) + +The `simple_scope_term` object supports the following: + +* `comparator` - (Optional) The operator to use in a condition. Valid values are: `EQ`, `GT`, `GTE`, `LT`, `LTE`, `NE`, `CONTAINS`, `STARTS_WITH` +* `values` - (Optional) An array that lists the values to use in the condition. +* `key` - (Optional) The object property to use in the condition. + +The `tag_scope_term` object supports the following: + +* `comparator` - (Optional) The operator to use in the condition. +* `tag_values` - (Optional) The tag keys or tag key and value pairs to use in the condition. +* `key` - (Required) The tag key to use in the condition. The only valid value is `TAG`. +* `target` - (Required) The type of object to apply the condition to. The only valid value is `S3_OBJECT`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie classification job. +* `created_at` - The date and time, in UTC and extended RFC 3339 format, when the job was created. +* `user_paused_details` - If the current status of the job is `USER_PAUSED`, specifies when the job was paused and when the job or job run will expire and be cancelled if it isn't resumed. This value is present only if the value for `job-status` is `USER_PAUSED`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_macie2_classification_job` using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_macie2_classification_job` using the id. For example: + +```console +% terraform import aws_macie2_classification_job.example abcd1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/macie2_custom_data_identifier.html.markdown b/website/docs/cdktf/python/r/macie2_custom_data_identifier.html.markdown new file mode 100644 index 00000000000..3be8f2a41bf --- /dev/null +++ b/website/docs/cdktf/python/r/macie2_custom_data_identifier.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_custom_data_identifier" +description: |- + Provides a resource to manage an AWS Macie Custom Data Identifier. +--- + + + +# Resource: aws_macie2_custom_data_identifier + +Provides a resource to manage an [AWS Macie Custom Data Identifier](https://docs.aws.amazon.com/macie/latest/APIReference/custom-data-identifiers-id.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.macie2_account import Macie2Account +from imports.aws.macie2_custom_data_identifier import Macie2CustomDataIdentifier +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Macie2Account(self, "example") + aws_macie2_custom_data_identifier_example = Macie2CustomDataIdentifier(self, "example_1", + depends_on=[test], + description="DESCRIPTION", + ignore_words=["ignore"], + keywords=["keyword"], + maximum_match_distance=10, + name="NAME OF CUSTOM DATA IDENTIFIER", + regex="[0-9]{3}-[0-9]{2}-[0-9]{4}" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_macie2_custom_data_identifier_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `regex` - (Optional) The regular expression (regex) that defines the pattern to match. The expression can contain as many as 512 characters. +* `keywords` - (Optional) An array that lists specific character sequences (keywords), one of which must be within proximity (`maximum_match_distance`) of the regular expression to match. The array can contain as many as 50 keywords. Each keyword can contain 3 - 90 characters. Keywords aren't case sensitive. +* `ignore_words` - (Optional) An array that lists specific character sequences (ignore words) to exclude from the results. If the text matched by the regular expression is the same as any string in this array, Amazon Macie ignores it. The array can contain as many as 10 ignore words. Each ignore word can contain 4 - 90 characters. Ignore words are case sensitive. +* `name` - (Optional) A custom name for the custom data identifier. The name can contain as many as 128 characters. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) A custom description of the custom data identifier. The description can contain as many as 512 characters. +* `maximum_match_distance` - (Optional) The maximum number of characters that can exist between text that matches the regex pattern and the character sequences specified by the keywords array. Macie includes or excludes a result based on the proximity of a keyword to text that matches the regex pattern. The distance can be 1 - 300 characters. The default value is 50. +* `tags` - (Optional) A map of key-value pairs that specifies the tags to associate with the custom data identifier. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie custom data identifier. +* `deleted` - Specifies whether the custom data identifier was deleted. If you delete a custom data identifier, Amazon Macie doesn't delete it permanently. Instead, it soft deletes the identifier. +* `created_at` - The date and time, in UTC and extended RFC 3339 format, when the Amazon Macie account was created. +* `arn` - The Amazon Resource Name (ARN) of the custom data identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_macie2_custom_data_identifier` using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_macie2_custom_data_identifier` using the id. For example: + +```console +% terraform import aws_macie2_custom_data_identifier.example abcd1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/macie2_findings_filter.html.markdown b/website/docs/cdktf/python/r/macie2_findings_filter.html.markdown new file mode 100644 index 00000000000..c34084f2555 --- /dev/null +++ b/website/docs/cdktf/python/r/macie2_findings_filter.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_findings_filter" +description: |- + Provides a resource to manage an Amazon Macie Findings Filter. +--- + + + +# Resource: aws_macie2_findings_filter + +Provides a resource to manage an [Amazon Macie Findings Filter](https://docs.aws.amazon.com/macie/latest/APIReference/findingsfilters-id.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.macie2_account import Macie2Account +from imports.aws.macie2_findings_filter import Macie2FindingsFilter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Macie2Account(self, "example") + Macie2FindingsFilter(self, "test", + action="ARCHIVE", + depends_on=[aws_macie2_account_test], + description="DESCRIPTION", + finding_criteria=Macie2FindingsFilterFindingCriteria( + criterion=[Macie2FindingsFilterFindingCriteriaCriterion( + eq=[Token.as_string(current.name)], + field="region" + ) + ] + ), + name="NAME OF THE FINDINGS FILTER", + position=1 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `finding_criteria` - (Required) The criteria to use to filter findings. +* `name` - (Optional) A custom name for the filter. The name must contain at least 3 characters and can contain as many as 64 characters. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) A custom description of the filter. The description can contain as many as 512 characters. +* `action` - (Required) The action to perform on findings that meet the filter criteria (`finding_criteria`). Valid values are: `ARCHIVE`, suppress (automatically archive) the findings; and, `NOOP`, don't perform any action on the findings. +* `position` - (Optional) The position of the filter in the list of saved filters on the Amazon Macie console. This value also determines the order in which the filter is applied to findings, relative to other filters that are also applied to the findings. +* `tags` - (Optional) A map of key-value pairs that specifies the tags to associate with the filter. + +The `finding_criteria` object supports the following: + +* `criterion` - (Optional) A condition that specifies the property, operator, and one or more values to use to filter the results. (documented below) + +The `criterion` object supports the following: + +* `field` - (Required) The name of the field to be evaluated. +* `eq_exact_match` - (Optional) The value for the property exclusively matches (equals an exact match for) all the specified values. If you specify multiple values, Amazon Macie uses AND logic to join the values. +* `eq` - (Optional) The value for the property matches (equals) the specified value. If you specify multiple values, Amazon Macie uses OR logic to join the values. +* `neq` - (Optional) The value for the property doesn't match (doesn't equal) the specified value. If you specify multiple values, Amazon Macie uses OR logic to join the values. +* `lt` - (Optional) The value for the property is less than the specified value. +* `lte` - (Optional) The value for the property is less than or equal to the specified value. +* `gt` - (Optional) The value for the property is greater than the specified value. +* `gte` - (Optional) The value for the property is greater than or equal to the specified value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie Findings Filter. +* `arn` - The Amazon Resource Name (ARN) of the Findings Filter. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_macie2_findings_filter` using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_macie2_findings_filter` using the id. For example: + +```console +% terraform import aws_macie2_findings_filter.example abcd1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/macie2_invitation_accepter.html.markdown b/website/docs/cdktf/python/r/macie2_invitation_accepter.html.markdown new file mode 100644 index 00000000000..d0be2a1382c --- /dev/null +++ b/website/docs/cdktf/python/r/macie2_invitation_accepter.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_invitation_accepter" +description: |- + Provides a resource to manage an Amazon Macie Invitation Accepter. +--- + + + +# Resource: aws_macie2_invitation_accepter + +Provides a resource to manage an [Amazon Macie Invitation Accepter](https://docs.aws.amazon.com/macie/latest/APIReference/invitations-accept.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.macie2_account import Macie2Account +from imports.aws.macie2_invitation_accepter import Macie2InvitationAccepter +from imports.aws.macie2_member import Macie2Member +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Macie2Account(self, "member") + primary = Macie2Account(self, "primary", + provider="awsalternate" + ) + aws_macie2_member_primary = Macie2Member(self, "primary_2", + account_id="ACCOUNT ID", + depends_on=[primary], + email="EMAIL", + invitation_message="Message of the invite", + invite=True, + provider="awsalternate" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_macie2_member_primary.override_logical_id("primary") + aws_macie2_invitation_accepter_member = Macie2InvitationAccepter(self, "member_3", + administrator_account_id="ADMINISTRATOR ACCOUNT ID", + depends_on=[aws_macie2_member_primary] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_macie2_invitation_accepter_member.override_logical_id("member") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `administrator_account_id` - (Required) The AWS account ID for the account that sent the invitation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie invitation accepter. +* `invitation_id` - The unique identifier for the invitation. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_macie2_invitation_accepter` using the admin account ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_macie2_invitation_accepter` using the admin account ID. For example: + +```console +% terraform import aws_macie2_invitation_accepter.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/macie2_member.html.markdown b/website/docs/cdktf/python/r/macie2_member.html.markdown new file mode 100644 index 00000000000..c337750a330 --- /dev/null +++ b/website/docs/cdktf/python/r/macie2_member.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_member" +description: |- + Provides a resource to manage an Amazon Macie Member. +--- + + + +# Resource: aws_macie2_member + +Provides a resource to manage an [Amazon Macie Member](https://docs.aws.amazon.com/macie/latest/APIReference/members-id.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.macie2_account import Macie2Account +from imports.aws.macie2_member import Macie2Member +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Macie2Account(self, "example") + aws_macie2_member_example = Macie2Member(self, "example_1", + account_id="AWS ACCOUNT ID", + depends_on=[example], + email="EMAIL", + invitation_disable_email_notification=True, + invitation_message="Message of the invitation", + invite=True + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_macie2_member_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Required) The AWS account ID for the account. +* `email` - (Required) The email address for the account. +* `tags` - (Optional) A map of key-value pairs that specifies the tags to associate with the account in Amazon Macie. +* `status` - (Optional) Specifies the status for the account. To enable Amazon Macie and start all Macie activities for the account, set this value to `ENABLED`. Valid values are `ENABLED` or `PAUSED`. +* `invite` - (Optional) Send an invitation to a member +* `invitation_message` - (Optional) A custom message to include in the invitation. Amazon Macie adds this message to the standard content that it sends for an invitation. +* `invitation_disable_email_notification` - (Optional) Specifies whether to send an email notification to the root user of each account that the invitation will be sent to. This notification is in addition to an alert that the root user receives in AWS Personal Health Dashboard. To send an email notification to the root user of each account, set this value to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie Member. +* `arn` - The Amazon Resource Name (ARN) of the account. +* `relationship_status` - The current status of the relationship between the account and the administrator account. +* `administrator_account_id` - The AWS account ID for the administrator account. +* `invited_at` - The date and time, in UTC and extended RFC 3339 format, when an Amazon Macie membership invitation was last sent to the account. This value is null if a Macie invitation hasn't been sent to the account. +* `updated_at` - The date and time, in UTC and extended RFC 3339 format, of the most recent change to the status of the relationship between the account and the administrator account. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_macie2_member` using the account ID of the member account. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_macie2_member` using the account ID of the member account. For example: + +```console +% terraform import aws_macie2_member.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/macie2_organization_admin_account.html.markdown b/website/docs/cdktf/python/r/macie2_organization_admin_account.html.markdown new file mode 100644 index 00000000000..9eba47ccedc --- /dev/null +++ b/website/docs/cdktf/python/r/macie2_organization_admin_account.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_organization_admin_account" +description: |- + Provides a resource to manage an Amazon Macie Organization Admin Account. +--- + + + +# Resource: aws_macie2_organization_admin_account + +Provides a resource to manage an [Amazon Macie Organization Admin Account](https://docs.aws.amazon.com/macie/latest/APIReference/admin.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.macie2_account import Macie2Account +from imports.aws.macie2_organization_admin_account import Macie2OrganizationAdminAccount +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Macie2Account(self, "example") + aws_macie2_organization_admin_account_example = + Macie2OrganizationAdminAccount(self, "example_1", + admin_account_id="ID OF THE ADMIN ACCOUNT", + depends_on=[example] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_macie2_organization_admin_account_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `admin_account_id` - (Required) The AWS account ID for the account to designate as the delegated Amazon Macie administrator account for the organization. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie organization admin account. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_macie2_organization_admin_account` using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_macie2_organization_admin_account` using the id. For example: + +```console +% terraform import aws_macie2_organization_admin_account.example abcd1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/main_route_table_association.html.markdown b/website/docs/cdktf/python/r/main_route_table_association.html.markdown new file mode 100644 index 00000000000..94f72089440 --- /dev/null +++ b/website/docs/cdktf/python/r/main_route_table_association.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_main_route_table_association" +description: |- + Provides a resource for managing the main routing table of a VPC. +--- + + + +# Resource: aws_main_route_table_association + +Provides a resource for managing the main routing table of a VPC. + +~> **NOTE:** **Do not** use both `aws_default_route_table` to manage a default route table **and** `aws_main_route_table_association` with the same VPC due to possible route conflicts. See [aws_default_route_table][tf-default-route-table] documentation for more details. +For more information, see the Amazon VPC User Guide on [Route Tables](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Route_Tables.html). For information about managing normal route tables in Terraform, see [`aws_route_table`](/docs/providers/aws/r/route_table.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.main_route_table_association import MainRouteTableAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MainRouteTableAssociation(self, "a", + route_table_id=bar.id, + vpc_id=foo.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpc_id` - (Required) The ID of the VPC whose main route table should be set +* `route_table_id` - (Required) The ID of the Route Table to set as the new + main route table for the target VPC + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Route Table Association +* `original_route_table_id` - Used internally, see __Notes__ below + +## Notes + +On VPC creation, the AWS API always creates an initial Main Route Table. This +resource records the ID of that Route Table under `original_route_table_id`. +The "Delete" action for a `main_route_table_association` consists of resetting +this original table as the Main Route Table for the VPC. You'll see this +additional Route Table in the AWS console; it must remain intact in order for +the `main_route_table_association` delete to work properly. + +[aws-route-tables]: http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_Route_Tables.html#Route_Replacing_Main_Table +[tf-route-tables]: /docs/providers/aws/r/route_table.html +[tf-default-route-table]: /docs/providers/aws/r/default_route_table.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `update` - (Default `2m`) +- `delete` - (Default `5m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/media_convert_queue.html.markdown b/website/docs/cdktf/python/r/media_convert_queue.html.markdown new file mode 100644 index 00000000000..bae9fd2765b --- /dev/null +++ b/website/docs/cdktf/python/r/media_convert_queue.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Elemental MediaConvert" +layout: "aws" +page_title: "AWS: aws_media_convert_queue" +description: |- + Provides an AWS Elemental MediaConvert Queue. +--- + + + +# Resource: aws_media_convert_queue + +Provides an AWS Elemental MediaConvert Queue. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.media_convert_queue import MediaConvertQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MediaConvertQueue(self, "test", + name="tf-test-queue" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A unique identifier describing the queue +* `description` - (Optional) A description of the queue +* `pricing_plan` - (Optional) Specifies whether the pricing plan for the queue is on-demand or reserved. Valid values are `ON_DEMAND` or `RESERVED`. Default to `ON_DEMAND`. +* `reservation_plan_settings` - (Optional) A detail pricing plan of the reserved queue. See below. +* `status` - (Optional) A status of the queue. Valid values are `ACTIVE` or `RESERVED`. Default to `PAUSED`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Fields + +#### `reservation_plan_settings` + +* `commitment` - (Required) The length of the term of your reserved queue pricing plan commitment. Valid value is `ONE_YEAR`. +* `renewal_type` - (Required) Specifies whether the term of your reserved queue pricing plan. Valid values are `AUTO_RENEW` or `EXPIRE`. +* `reserved_slots` - (Required) Specifies the number of reserved transcode slots (RTS) for queue. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The same as `name` +* `arn` - The Arn of the queue +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Media Convert Queue using the queue name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Media Convert Queue using the queue name. For example: + +```console +% terraform import aws_media_convert_queue.test tf-test-queue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/media_package_channel.html.markdown b/website/docs/cdktf/python/r/media_package_channel.html.markdown new file mode 100644 index 00000000000..716e29031ca --- /dev/null +++ b/website/docs/cdktf/python/r/media_package_channel.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Elemental MediaPackage" +layout: "aws" +page_title: "AWS: aws_media_package_channel" +description: |- + Provides an AWS Elemental MediaPackage Channel. +--- + + + +# Resource: aws_media_package_channel + +Provides an AWS Elemental MediaPackage Channel. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.media_package_channel import MediaPackageChannel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MediaPackageChannel(self, "kittens", + channel_id="kitten-channel", + description="A channel dedicated to amusing videos of kittens." + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `channel_id` - (Required) A unique identifier describing the channel +* `description` - (Optional) A description of the channel +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The same as `channel_id` +* `arn` - The ARN of the channel +* `hls_ingest` - A single item list of HLS ingest information + * `ingest_endpoints` - A list of the ingest endpoints + * `password` - The password + * `url` - The URL + * `username` - The username +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Media Package Channels using the channel ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Media Package Channels using the channel ID. For example: + +```console +% terraform import aws_media_package_channel.kittens kittens-channel +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/media_store_container.html.markdown b/website/docs/cdktf/python/r/media_store_container.html.markdown new file mode 100644 index 00000000000..f92c67e2bb9 --- /dev/null +++ b/website/docs/cdktf/python/r/media_store_container.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Elemental MediaStore" +layout: "aws" +page_title: "AWS: aws_media_store_container" +description: |- + Provides a MediaStore Container. +--- + + + +# Resource: aws_media_store_container + +Provides a MediaStore Container. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.media_store_container import MediaStoreContainer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MediaStoreContainer(self, "example", + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the container. Must contain alphanumeric characters or underscores. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the container. +* `endpoint` - The DNS endpoint of the container. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaStore Container using the MediaStore Container Name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MediaStore Container using the MediaStore Container Name. For example: + +```console +% terraform import aws_media_store_container.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/media_store_container_policy.html.markdown b/website/docs/cdktf/python/r/media_store_container_policy.html.markdown new file mode 100644 index 00000000000..b0c12183465 --- /dev/null +++ b/website/docs/cdktf/python/r/media_store_container_policy.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Elemental MediaStore" +layout: "aws" +page_title: "AWS: aws_media_store_container_policy" +description: |- + Provides a MediaStore Container Policy. +--- + + + +# Resource: aws_media_store_container_policy + +Provides a MediaStore Container Policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.media_store_container import MediaStoreContainer +from imports.aws.media_store_container_policy import MediaStoreContainerPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = MediaStoreContainer(self, "example", + name="example" + ) + current = DataAwsCallerIdentity(self, "current") + data_aws_region_current = DataAwsRegion(self, "current_2") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_3", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["mediastore:*"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="Bool", + values=["true"], + variable="aws:SecureTransport" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["arn:aws:iam::${" + current.account_id + "}:root"], + type="AWS" + ) + ], + resources=["arn:aws:mediastore:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:container/${" + example.name + "}/*" + ], + sid="MediaStoreFullAccess" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_media_store_container_policy_example = MediaStoreContainerPolicy(self, "example_4", + container_name=example.name, + policy=Token.as_string(data_aws_iam_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_media_store_container_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `container_name` - (Required) The name of the container. +* `policy` - (Required) The contents of the policy. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaStore Container Policy using the MediaStore Container Name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MediaStore Container Policy using the MediaStore Container Name. For example: + +```console +% terraform import aws_media_store_container_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/medialive_channel.html.markdown b/website/docs/cdktf/python/r/medialive_channel.html.markdown new file mode 100644 index 00000000000..ac6b9a6a036 --- /dev/null +++ b/website/docs/cdktf/python/r/medialive_channel.html.markdown @@ -0,0 +1,687 @@ +--- +subcategory: "Elemental MediaLive" +layout: "aws" +page_title: "AWS: aws_medialive_channel" +description: |- + Terraform resource for managing an AWS MediaLive Channel. +--- + + + +# Resource: aws_medialive_channel + +Terraform resource for managing an AWS MediaLive Channel. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.medialive_channel import MedialiveChannel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MedialiveChannel(self, "example", + channel_class="STANDARD", + destinations=[MedialiveChannelDestinations( + id="destination", + settings=[MedialiveChannelDestinationsSettings( + url="s3://${" + main.id + "}/test1" + ), MedialiveChannelDestinationsSettings( + url="s3://${" + main2.id + "}/test2" + ) + ] + ) + ], + encoder_settings=MedialiveChannelEncoderSettings( + audio_descriptions=[MedialiveChannelEncoderSettingsAudioDescriptions( + audio_selector_name="example audio selector", + name="audio-selector" + ) + ], + output_groups=[MedialiveChannelEncoderSettingsOutputGroups( + output_group_settings=MedialiveChannelEncoderSettingsOutputGroupsOutputGroupSettings( + archive_group_settings=[MedialiveChannelEncoderSettingsOutputGroupsOutputGroupSettingsArchiveGroupSettings( + destination=MedialiveChannelEncoderSettingsOutputGroupsOutputGroupSettingsArchiveGroupSettingsDestination( + destination_ref_id="destination" + ) + ) + ] + ), + outputs=[MedialiveChannelEncoderSettingsOutputGroupsOutputs( + audio_description_names=["audio-selector"], + output_name="example-name", + output_settings=MedialiveChannelEncoderSettingsOutputGroupsOutputsOutputSettings( + archive_output_settings=MedialiveChannelEncoderSettingsOutputGroupsOutputsOutputSettingsArchiveOutputSettings( + container_settings=MedialiveChannelEncoderSettingsOutputGroupsOutputsOutputSettingsArchiveOutputSettingsContainerSettings( + m2_ts_settings=MedialiveChannelEncoderSettingsOutputGroupsOutputsOutputSettingsArchiveOutputSettingsContainerSettingsM2TsSettings( + audio_buffer_model="ATSC", + buffer_model="MULTIPLEX", + rate_mode="CBR" + ) + ), + extension="m2ts", + name_modifier="_1" + ) + ), + video_description_name="example-video" + ) + ] + ) + ], + timecode_config=MedialiveChannelEncoderSettingsTimecodeConfig( + source="EMBEDDED" + ), + video_descriptions=[MedialiveChannelEncoderSettingsVideoDescriptions( + name="example-video" + ) + ] + ), + input_attachments=[MedialiveChannelInputAttachments( + input_attachment_name="example-input", + input_id=Token.as_string(aws_medialive_input_example.id) + ) + ], + input_specification=MedialiveChannelInputSpecification( + codec="AVC", + input_resolution="HD", + maximum_bitrate="MAX_20_MBPS" + ), + name="example-channel", + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `channel_class` - (Required) Concise argument description. +* `destinations` - (Required) Destinations for channel. See [Destinations](#destinations) for more details. +* `encoder_settings` - (Required) Encoder settings. See [Encoder Settings](#encoder-settings) for more details. +* `input_specification` - (Required) Specification of network and file inputs for the channel. +* `name` - (Required) Name of the Channel. + +The following arguments are optional: + +* `cdi_input_specification` - (Optional) Specification of CDI inputs for this channel. See [CDI Input Specification](#cdi-input-specification) for more details. +* `input_attachments` - (Optional) Input attachments for the channel. See [Input Attachments](#input-attachments) for more details. +* `log_level` - (Optional) The log level to write to Cloudwatch logs. +* `maintenance` - (Optional) Maintenance settings for this channel. See [Maintenance](#maintenance) for more details. +* `role_arn` - (Optional) Concise argument description. +* `start_channel` - (Optional) Whether to start/stop channel. Default: `false` +* `tags` - (Optional) A map of tags to assign to the channel. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc` - (Optional) Settings for the VPC outputs. + +### CDI Input Specification + +* `resolution` - (Required) - Maximum CDI input resolution. + +### Destinations + +* `id` - (Required) User-specified id. Ths is used in an output group or an output. +* `media_package_settings` - (Optional) Destination settings for a MediaPackage output; one destination for both encoders. See [Media Package Settings](#media-package-settings) for more details. +* `multiplex_settings` - (Optional) Destination settings for a Multiplex output; one destination for both encoders. See [Multiplex Settings](#multiplex-settings) for more details. +* `settings` - (Optional) Destination settings for a standard output; one destination for each redundant encoder. See [Settings](#settings) for more details. + +### Encoder Settings + +* `audio_descriptions` - (Required) Audio descriptions for the channel. See [Audio Descriptions](#audio-descriptions) for more details. +* `output_groups` - (Required) Output groups for the channel. See [Output Groups](#output-groups) for more details. +* `timecode_config` - (Required) Contains settings used to acquire and adjust timecode information from inputs. See [Timecode Config](#timecode-config) for more details. +* `video_descriptions` - (Required) Video Descriptions. See [Video Descriptions](#video-descriptions) for more details. +* `caption_descriptions` - (Optional) Caption Descriptions. See [Caption Descriptions](#caption-descriptions) for more details. +* `global_configuration` - (Optional) Configuration settings that apply to the event as a whole. See [Global Configuration](#global-configuration) for more details. +* `motion_graphics_configuration` - (Optional) Settings for motion graphics. See [Motion Graphics Configuration](#motion-graphics-configuration) for more details. +* `nielsen_configuration` - (Optional) Nielsen configuration settings. See [Nielsen Configuration](#nielsen-configuration) for more details. +* `avail_blanking` - (Optional) Settings for ad avail blanking. See [Avail Blanking](#avail-blanking) for more details. + +### Input Attachments + +* `input_attachment_name` - (Optional) User-specified name for the attachment. +* `input_id` - (Required) The ID of the input. +* `input_settings` - (Optional) Settings of an input. See [Input Settings](#input-settings) for more details + +### Input Settings + +* `audio_selectors` - (Optional) Used to select the audio stream to decode for inputs that have multiple. See [Audio Selectors](#audio-selectors) for more details. +* `caption_selectors` - (Optional) Used to select the caption input to use for inputs that have multiple available. See [Caption Selectors](#caption-selectors) for more details. +* `deblock_filter` - (Optional) Enable or disable the deblock filter when filtering. +* `denoise_filter` - (Optional) Enable or disable the denoise filter when filtering. +* `filter_strength` - (Optional) Adjusts the magnitude of filtering from 1 (minimal) to 5 (strongest). +* `input_filter` - (Optional) Turns on the filter for the input. +* `network_input_settings` - (Optional) Input settings. See [Network Input Settings](#network-input-settings) for more details. +* `scte35_pid` - (Optional) PID from which to read SCTE-35 messages. +* `smpte2038_data_preference` - (Optional) Specifies whether to extract applicable ancillary data from a SMPTE-2038 source in the input. +* `source_end_behavior` - (Optional) Loop input if it is a file. + +### Audio Selectors + +* `name` - (Required) The name of the audio selector. + +### Caption Selectors + +* `name` - (Optional) The name of the caption selector. +* `language_code` - (Optional) When specified this field indicates the three letter language code of the caption track to extract from the source. + +### Network Input Settings + +* `hls_input_settings` - (Optional) Specifies HLS input settings when the uri is for a HLS manifest. See [HLS Input Settings](#hls-input-settings) for more details. +* `server_validation` - (Optional) Check HTTPS server certificates. + +### HLS Input Settings + +* `bandwidth` - (Optional) The bitrate is specified in bits per second, as in an HLS manifest. +* `buffer_segments` - (Optional) Buffer segments. +* `retries` - (Optional) The number of consecutive times that attempts to read a manifest or segment must fail before the input is considered unavailable. +* `retry_interval` - (Optional) The number of seconds between retries when an attempt to read a manifest or segment fails. +* `scte35_source_type` - (Optional) Identifies the source for the SCTE-35 messages that MediaLive will ingest. + +### Maintenance + +* `maintenance_day` - (Optional) The day of the week to use for maintenance. +* `maintenance_start_time` - (Optional) The hour maintenance will start. + +### Media Package Settings + +* `channel_id` - (Required) ID of the channel in MediaPackage that is the destination for this output group. + +### Multiplex Settings + +* `multiplex_id` - (Required) The ID of the Multiplex that the encoder is providing output to. +* `program_name` - (Optional) The program name of the Multiplex program that the encoder is providing output to. + +### Settings + +* `password_param` - (Optional) Key used to extract the password from EC2 Parameter store. +* `stream_name` - (Optional) Stream name RTMP destinations (URLs of type rtmp://) +* `url` - (Optional) A URL specifying a destination. +* `username` - (Optional) Username for destination. + +### Audio Descriptions + +* `audio_selector_name` - (Required) The name of the audio selector used as the source for this AudioDescription. +* `name` - (Required) The name of this audio description. +* `audio_normalization_settings` - (Optional) Advanced audio normalization settings. See [Audio Normalization Settings](#audio-normalization-settings) for more details. +* `audio_type` - (Optional) Applies only if audioTypeControl is useConfigured. The values for audioType are defined in ISO-IEC 13818-1. +* `audio_type_control` - (Optional) Determined how audio type is determined. +* `audio_watermark_settings` - (Optional) Settings to configure one or more solutions that insert audio watermarks in the audio encode. See [Audio Watermark Settings](#audio-watermark-settings) for more details. +* `codec_settings` - (Optional) Audio codec settings. See [Audio Codec Settings](#audio-codec-settings) for more details. + +### Audio Normalization Settings + +* `algorithm` - (Optional) Audio normalization algorithm to use. itu17701 conforms to the CALM Act specification, itu17702 to the EBU R-128 specification. +* `algorithm_control` - (Optional) Algorithm control for the audio description. +* `target_lkfs` - (Optional) Target LKFS (loudness) to adjust volume to. + +### Audio Watermark Settings + +* `nielsen_watermark_settings` - (Optional) Settings to configure Nielsen Watermarks in the audio encode. See [Nielsen Watermark Settings](#nielsen-watermark-settings) for more details. + +### Audio Codec Settings + +* `aac_settings` - (Optional) Aac Settings. See [AAC Settings](#aac-settings) for more details. +* `ac3_settings` - (Optional) Ac3 Settings. See [AC3 Settings](#ac3-settings) for more details. +* `eac3_atmos_settings` - (Optional) - Eac3 Atmos Settings. See [EAC3 Atmos Settings](#eac3-atmos-settings) +* `eac3_settings` - (Optional) - Eac3 Settings. See [EAC3 Settings](#eac3-settings) + +### AAC Settings + +* `bitrate` - (Optional) Average bitrate in bits/second. +* `coding_mode` - (Optional) Mono, Stereo, or 5.1 channel layout. +* `input_type` - (Optional) Set to "broadcasterMixedAd" when input contains pre-mixed main audio + AD (narration) as a stereo pair. +* `profile` - (Optional) AAC profile. +* `rate_control_mode` - (Optional) The rate control mode. +* `raw_format` - (Optional) Sets LATM/LOAS AAC output for raw containers. +* `sample_rate` - (Optional) Sample rate in Hz. +* `spec` - (Optional) Use MPEG-2 AAC audio instead of MPEG-4 AAC audio for raw or MPEG-2 Transport Stream containers. +* `vbr_quality` - (Optional) VBR Quality Level - Only used if rateControlMode is VBR. + +### AC3 Settings + +* `bitrate` - (Optional) Average bitrate in bits/second. +* `bitstream_mode` - (Optional) Specifies the bitstream mode (bsmod) for the emitted AC-3 stream. +* `coding_mode` - (Optional) Dolby Digital coding mode. +* `dialnorm` - (Optional) Sets the dialnorm of the output. +* `drc_profile` - (Optional) If set to filmStandard, adds dynamic range compression signaling to the output bitstream as defined in the Dolby Digital specification. +* `lfe_filter` - (Optional) When set to enabled, applies a 120Hz lowpass filter to the LFE channel prior to encoding. +* `metadata_control` - (Optional) Metadata control. + +### EAC3 Atmos Settings + +* `bitrate` - (Optional) Average bitrate in bits/second. +* `coding_mode` - (Optional) Dolby Digital Plus with dolby Atmos coding mode. +* `dialnorm` - (Optional) Sets the dialnorm for the output. +* `drc_line` - (Optional) Sets the Dolby dynamic range compression profile. +* `drc_rf` - (Optional) Sets the profile for heavy Dolby dynamic range compression. +* `height_trim` - (Optional) Height dimensional trim. +* `surround_trim` - (Optional) Surround dimensional trim. + +### EAC3 Settings + +* `attenuation_control` - (Optional) Sets the attenuation control. +* `bitrate` - (Optional) Average bitrate in bits/second. +* `bitstream_mode` - (Optional) Specifies the bitstream mode (bsmod) for the emitted AC-3 stream. +* `coding_mode` - (Optional) Dolby Digital Plus coding mode. + +### Nielsen Watermark Settings + +* `nielsen_cbet_settings` - (Optional) Used to insert watermarks of type Nielsen CBET. See [Nielsen CBET Settings](#nielsen-cbet-settings) for more details. +* `nielsen_distribution_type` - (Optional) Distribution types to assign to the watermarks. Options are `PROGRAM_CONTENT` and `FINAL_DISTRIBUTOR`. +* `nielsen_naes_ii_nw_settings` - (Optional) Used to insert watermarks of type Nielsen NAES, II (N2) and Nielsen NAES VI (NW). See [Nielsen NAES II NW Settings](#nielsen-naes-ii-nw-settings) for more details. + +### Nielsen CBET Settings + +* `cbet_check_digit` - (Required) CBET check digits to use for the watermark. +* `cbet_stepaside` - (Required) Determines the method of CBET insertion mode when prior encoding is detected on the same layer. +* `csid` - (Required) CBET source ID to use in the watermark. + +### Nielsen NAES II NW Settings + +* `check_digit` - (Required) Check digit string for the watermark. +* `sid` - (Required) The Nielsen Source ID to include in the watermark. + +### Output Groups + +* `output_group_settings` - (Required) Settings associated with the output group. See [Output Group Settings](#output-group-settings) for more details. +* `outputs` - (Required) List of outputs. See [Outputs](#outputs) for more details. +* `name` - (Optional) Custom output group name defined by the user. + +### Output Group Settings + +* `archive_group_settings` - (Optional) Archive group settings. See [Archive Group Settings](#archive-group-settings) for more details. +* `media_package_group_settings` - (Optional) Media package group settings. See [Media Package Group Settings](#media-package-group-settings) for more details. +* `multiplex_group_sttings` - (Optional) Multiplex group settings. Attribute can be passed as an empty block. +* `rtmp_group_settings` - (Optional) RTMP group settings. See [RTMP Group Settings](#rtmp-group-settings) for more details. +* `udp_group_sttings` - (Optional) UDP group settings. See [UDP Group Settings](#udp-group-settings) for more details. + +### Outputs + +* `output_settings` - (Required) Settings for output. See [Output Settings](#output-settings) for more details. +* `audio_description_names` - (Optional) The names of the audio descriptions used as audio sources for the output. +* `caption_description_names` - (Optional) The names of the caption descriptions used as caption sources for the output. +* `output_name` - (Required) The name used to identify an output. +* `video_description_name` - (Optional) The name of the video description used as video source for the output. + +### Timecode Config + +* `source` - (Optional) The source for the timecode that will be associated with the events outputs. +* `sync_threshold` - (Optional) Threshold in frames beyond which output timecode is resynchronized to the input timecode. + +### Video Descriptions + +* `name` - (Required) The name of the video description. +* `codec_settings` - (Optional) The video codec settings. See [Video Codec Settings](#video-codec-settings) for more details. +* `height` - Output video height in pixels. +* `respond_to_afd` - (Optional) Indicate how to respond to the AFD values that might be in the input video. +* `scaling_behavior` - (Optional) Behavior on how to scale. +* `sharpness` - (Optional) Changes the strength of the anti-alias filter used for scaling. +* `width` - (Optional) Output video width in pixels. + +### Video Codec Settings + +* `frame_capture_settings` - (Optional) Frame capture settings. See [Frame Capture Settings](#frame-capture-settings) for more details. +* `h264_settings` - (Optional) H264 settings. See [H264 Settings](#h264-settings) for more details. + +### Frame Capture Settings + +* `capture_interval` - (Optional) The frequency at which to capture frames for inclusion in the output. +* `capture_interval_units` - (Optional) Unit for the frame capture interval. + +### H264 Settings + +* `adaptive_quantization` - (Optional) Enables or disables adaptive quantization. +* `afd_signaling` - (Optional) Indicates that AFD values will be written into the output stream. +* `bitrate` - (Optional) Average bitrate in bits/second. +* `buf_fil_pct` - (Optional) Percentage of the buffer that should initially be filled. +* `buf_size` - (Optional) Size of buffer in bits. +* `color_metadata` - (Optional) Includes color space metadata in the output. +* `entropy_encoding` - (Optional) Entropy encoding mode. +* `filter_settings` - (Optional) Filters to apply to an encode. See [H264 Filter Settings](#h264-filter-settings) for more details. +* `fixed_afd` - (Optional) Four bit AFD value to write on all frames of video in the output stream. +* `flicer_aq` - (Optional) Makes adjustments within each frame to reduce flicker on the I-frames. +* `force_field_pictures` - (Optional) Controls whether coding is performed on a field basis or on a frame basis. +* `framerate_control` - (Optional) Indicates how the output video frame rate is specified. +* `framerate_denominator` - (Optional) Framerate denominator. +* `framerate_numerator` - (Optional) Framerate numerator. +* `gop_b_reference` - (Optional) GOP-B reference. +* `gop_closed_cadence` - (Optional) Frequency of closed GOPs. +* `gop_num_b_frames` - (Optional) Number of B-frames between reference frames. +* `gop_size` - (Optional) GOP size in units of either frames of seconds per `gop_size_units`. +* `gop_size_units` - (Optional) Indicates if the `gop_size` is specified in frames or seconds. +* `level` - (Optional) H264 level. +* `look_ahead_rate_control` - (Optional) Amount of lookahead. +* `max_bitrate` - (Optional) Set the maximum bitrate in order to accommodate expected spikes in the complexity of the video. +* `min_interval` - (Optional) Min interval. +* `num_ref_frames` - (Optional) Number of reference frames to use. +* `par_control` - (Optional) Indicates how the output pixel aspect ratio is specified. +* `par_denominator` - (Optional) Pixel Aspect Ratio denominator. +* `par_numerator` - (Optional) Pixel Aspect Ratio numerator. +* `profile` - (Optional) H264 profile. +* `quality_level` - (Optional) Quality level. +* `qvbr_quality_level` - (Optional) Controls the target quality for the video encode. +* `rate_control_mode` - (Optional) Rate control mode. +* `scan_type` - (Optional) Sets the scan type of the output. +* `scene_change_detect` - (Optional) Scene change detection. +* `slices` - (Optional) Number of slices per picture. +* `softness` - (Optional) Softness. +* `spatial_aq` - (Optional) Makes adjustments within each frame based on spatial variation of content complexity. +* `subgop_length` - (Optional) Subgop length. +* `syntax` - (Optional) Produces a bitstream compliant with SMPTE RP-2027. +* `temporal_aq` - (Optional) Makes adjustments within each frame based on temporal variation of content complexity. +* `timecode_insertion` - (Optional) Determines how timecodes should be inserted into the video elementary stream. + +### H264 Filter Settings + +* `temporal_filter_settings` - (Optional) Temporal filter settings. See [Temporal Filter Settings](#temporal-filter-settings) + +### H265 Settings + +* `adaptive_quantization` - (Optional) Enables or disables adaptive quantization. +* `afd_signaling` - (Optional) Indicates that AFD values will be written into the output stream. +* `alternative_transfer_function` - (Optional) Whether or not EML should insert an Alternative Transfer Function SEI message. +* `bitrate` - (Required) Average bitrate in bits/second. +* `buf_size` - (Optional) Size of buffer in bits. +* `color_metadata` - (Optional) Includes color space metadata in the output. +* `color_space_settings` (Optional) Define the color metadata for the output. [H265 Color Space Settings](#h265-color-space-settings) for more details. +* `filter_settings` - (Optional) Filters to apply to an encode. See [H265 Filter Settings](#h265-filter-settings) for more details. +* `fixed_afd` - (Optional) Four bit AFD value to write on all frames of video in the output stream. +* `flicer_aq` - (Optional) Makes adjustments within each frame to reduce flicker on the I-frames. +* `framerate_denominator` - (Required) Framerate denominator. +* `framerate_numerator` - (Required) Framerate numerator. +* `gop_closed_cadence` - (Optional) Frequency of closed GOPs. +* `gop_size` - (Optional) GOP size in units of either frames of seconds per `gop_size_units`. +* `gop_size_units` - (Optional) Indicates if the `gop_size` is specified in frames or seconds. +* `level` - (Optional) H265 level. +* `look_ahead_rate_control` - (Optional) Amount of lookahead. +* `max_bitrate` - (Optional) Set the maximum bitrate in order to accommodate expected spikes in the complexity of the video. +* `min_interval` - (Optional) Min interval. +* `par_denominator` - (Optional) Pixel Aspect Ratio denominator. +* `par_numerator` - (Optional) Pixel Aspect Ratio numerator. +* `profile` - (Optional) H265 profile. +* `qvbr_quality_level` - (Optional) Controls the target quality for the video encode. +* `rate_control_mode` - (Optional) Rate control mode. +* `scan_type` - (Optional) Sets the scan type of the output. +* `scene_change_detect` - (Optional) Scene change detection. +* `slices` - (Optional) Number of slices per picture. +* `tier` - (Optional) Set the H265 tier in the output. +* `timecode_burnin_settings` - (Optional) Apply a burned in timecode. See [H265 Timecode Burnin Settings](#h265-timecode-burnin-settings) for more details. +* `timecode_insertion` = (Optional) Determines how timecodes should be inserted into the video elementary stream. + +### H265 Color Space Settings + +* `color_space_passthrough_settings` - (Optional) Sets the colorspace metadata to be passed through. +* `dolby_vision81_settings` - (Optional) Set the colorspace to Dolby Vision81. +* `hdr10_settings` - (Optional) Set the colorspace to be HDR10. See [H265 HDR10 Settings](#h265-hdr10-settings) for more details. +* `rec601_settings` - (Optional) Set the colorspace to Rec. 601. +* `rec709_settings` - (Optional) Set the colorspace to Rec. 709. + +### H265 HDR10 Settings + +* `max_cll` - (Optional) Sets the MaxCLL value for HDR10. +* `max_fall` - (Optional) Sets the MaxFALL value for HDR10. + +### H265 Filter Settings + +* `temporal_filter_settings` - (Optional) Temporal filter settings. See [Temporal Filter Settings](#temporal-filter-settings) + +### H265 Timecode Burnin Settings + +* `timecode_burnin_font_size` - (Optional) Sets the size of the burned in timecode. +* `timecode_burnin_position` - (Optional) Sets the position of the burned in timecode. +* `prefix` - (Optional) Set a prefix on the burned in timecode. + +### Temporal Filter Settings + +* `post_filter_sharpening` - (Optional) Post filter sharpening. +* `strength` - (Optional) Filter strength. + +### Caption Descriptions + +* `accessibility` - (Optional) Indicates whether the caption track implements accessibility features such as written descriptions of spoken dialog, music, and sounds. +* `caption_selector_name` - (Required) Specifies which input caption selector to use as a caption source when generating output captions. This field should match a captionSelector name. +* `destination_settings` - (Optional) Additional settings for captions destination that depend on the destination type. See [Destination Settings](#destination-settings) for more details. +* `language_code` - (Optional) ISO 639-2 three-digit code. +* `language_description` - (Optional) Human readable information to indicate captions available for players (eg. English, or Spanish). +* `name` - (Required) Name of the caption description. Used to associate a caption description with an output. Names must be unique within an event. + +### Destination Settings + +* `arib_destination_settings` - (Optional) Arib Destination Settings. +* `burn_in_destination_settings` - (Optional) Burn In Destination Settings. See [Burn In Destination Settings](#burn-in-destination-settings) for more details. +* `dvb_sub_destination_settings` - (Optional) Dvb Sub Destination Settings. See [Dvb Sub Destination Settings](#dvb-sub-destination-settings) for more details. +* `ebu_tt_d_destination_settings` - (Optional) Ebu Tt D Destination Settings. See [Ebu Tt D Destination Settings](#ebu-tt-d-destination-settings) for more details. +* `embedded_destination_settings` - (Optional) Embedded Destination Settings. +* `embedded_plus_scte20_destination_settings` - (Optional) Embedded Plus Scte20 Destination Settings. +* `rtmp_caption_info_destination_settings` - (Optional) Rtmp Caption Info Destination Settings. +* `scte20_plus_embedded_destination_settings` - (Optional) Scte20 Plus Embedded Destination Settings. +* `scte27_destination_settings` – (Optional) Scte27 Destination Settings. +* `smpteTt_destination_settings` – (Optional) Smpte Tt Destination Settings. +* `teletext_destination_settings` – (Optional) Teletext Destination Settings. +* `ttml_destination_settings` – (Optional) Ttml Destination Settings. See [Ttml Destination Settings](#ttml-destination-settings) for more details. +* `webvtt_destination_settings` - (Optional) Webvtt Destination Settings. See [Webvtt Destination Settings](#webvtt-destination-settings) for more details. + +### Burn In Destination Settings + +* `alignment` – (Optional) If no explicit xPosition or yPosition is provided, setting alignment to centered will place the captions at the bottom center of the output. Similarly, setting a left alignment will align captions to the bottom left of the output. If x and y positions are given in conjunction with the alignment parameter, the font will be justified (either left or centered) relative to those coordinates. Selecting “smart” justification will left-justify live subtitles and center-justify pre-recorded subtitles. All burn-in and DVB-Sub font settings must match. +* `background_color` – (Optional) Specifies the color of the rectangle behind the captions. All burn-in and DVB-Sub font settings must match. +* `background_opacity` – (Optional) Specifies the opacity of the background rectangle. 255 is opaque; 0 is transparent. Leaving this parameter out is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match. +* `font` – (Optional) External font file used for caption burn-in. File extension must be ‘ttf’ or ‘tte’. Although the user can select output fonts for many different types of input captions, embedded, STL and teletext sources use a strict grid system. Using external fonts with these caption sources could cause unexpected display of proportional fonts. All burn-in and DVB-Sub font settings must match. See [Font](#font) for more details. +* `font_color` – (Optional) Specifies the color of the burned-in captions. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `font_opacity` – (Optional) Specifies the opacity of the burned-in captions. 255 is opaque; 0 is transparent. All burn-in and DVB-Sub font settings must match. +* `font_resolution` – (Optional) Font resolution in DPI (dots per inch); default is 96 dpi. All burn-in and DVB-Sub font settings must match. +* `font_size` – (Optional) When set to ‘auto’ fontSize will scale depending on the size of the output. Giving a positive integer will specify the exact font size in points. All burn-in and DVB-Sub font settings must match. +* `outline_color` – (Optional) Specifies font outline color. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `outline_size` – (Optional) Specifies font outline size in pixels. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `shadow_color` – (Optional) Specifies the color of the shadow cast by the captions. All burn-in and DVB-Sub font settings must match. +* `shadow_opacity` – (Optional) Specifies the opacity of the shadow. 255 is opaque; 0 is transparent. Leaving this parameter out is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match. +* `shadow_x_offset` – (Optional) Specifies the horizontal offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels to the left. All burn-in and DVB-Sub font settings must match. +* `shadow_y_offset` – (Optional) Specifies the vertical offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels above the text. All burn-in and DVB-Sub font settings must match. +* `teletext_grid_control` – (Optional) Controls whether a fixed grid size will be used to generate the output subtitles bitmap. Only applicable for Teletext inputs and DVB-Sub/Burn-in outputs. +* `x_position` – (Optional) Specifies the horizontal position of the caption relative to the left side of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the left of the output. If no explicit xPosition is provided, the horizontal caption position will be determined by the alignment parameter. All burn-in and DVB-Sub font settings must match. +* `y_position` – (Optional) Specifies the vertical position of the caption relative to the top of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the top of the output. If no explicit yPosition is provided, the caption will be positioned towards the bottom of the output. All burn-in and DVB-Sub font settings must match. + +### Dvb Sub Destination Settings + +* `alignment` – (Optional) If no explicit xPosition or yPosition is provided, setting alignment to centered will place the captions at the bottom center of the output. Similarly, setting a left alignment will align captions to the bottom left of the output. If x and y positions are given in conjunction with the alignment parameter, the font will be justified (either left or centered) relative to those coordinates. Selecting “smart” justification will left-justify live subtitles and center-justify pre-recorded subtitles. This option is not valid for source captions that are STL or 608/embedded. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `background_color` – (Optional) Specifies the color of the rectangle behind the captions. All burn-in and DVB-Sub font settings must match. +* `background_opacity` – (Optional) Specifies the opacity of the background rectangle. 255 is opaque; 0 is transparent. Leaving this parameter blank is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match. +* `font` – (Optional) External font file used for caption burn-in. File extension must be ‘ttf’ or ‘tte’. Although the user can select output fonts for many different types of input captions, embedded, STL and teletext sources use a strict grid system. Using external fonts with these caption sources could cause unexpected display of proportional fonts. All burn-in and DVB-Sub font settings must match. See [Font](#font) for more details. +* `font_color` – (Optional) Specifies the color of the burned-in captions. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `font_opacity` – (Optional) Specifies the opacity of the burned-in captions. 255 is opaque; 0 is transparent. All burn-in and DVB-Sub font settings must match. +* `font_resolution` – (Optional) Font resolution in DPI (dots per inch); default is 96 dpi. All burn-in and DVB-Sub font settings must match. +* `font_size` – (Optional) When set to auto fontSize will scale depending on the size of the output. Giving a positive integer will specify the exact font size in points. All burn-in and DVB-Sub font settings must match. +* `outline_color` – (Optional) Specifies font outline color. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `outline_size` – (Optional) Specifies font outline size in pixels. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `shadow_color` – (Optional) Specifies the color of the shadow cast by the captions. All burn-in and DVB-Sub font settings must match. +* `shadow_opacity` – (Optional) Specifies the opacity of the shadow. 255 is opaque; 0 is transparent. Leaving this parameter blank is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match. +* `shadow_x_offset` – (Optional) Specifies the horizontal offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels to the left. All burn-in and DVB-Sub font settings must match. +* `shadow_y_offset` – (Optional) Specifies the vertical offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels above the text. All burn-in and DVB-Sub font settings must match. +* `teletext_grid_control` – (Optional) Controls whether a fixed grid size will be used to generate the output subtitles bitmap. Only applicable for Teletext inputs and DVB-Sub/Burn-in outputs. +* `x_position` – (Optional) Specifies the horizontal position of the caption relative to the left side of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the left of the output. If no explicit xPosition is provided, the horizontal caption position will be determined by the alignment parameter. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `y_position` – (Optional) Specifies the vertical position of the caption relative to the top of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the top of the output. If no explicit yPosition is provided, the caption will be positioned towards the bottom of the output. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. + +### Ebu Tt D Destination Settings + +* `copyright_holder` – (Optional) Complete this field if you want to include the name of the copyright holder in the copyright tag in the captions metadata. +* `fill_line_gap` – (Optional) Specifies how to handle the gap between the lines (in multi-line captions). - enabled: Fill with the captions background color (as specified in the input captions). - disabled: Leave the gap unfilled. +* `font_family` – (Optional) Specifies the font family to include in the font data attached to the EBU-TT captions. Valid only if styleControl is set to include. If you leave this field empty, the font family is set to “monospaced”. (If styleControl is set to exclude, the font family is always set to “monospaced”.) You specify only the font family. All other style information (color, bold, position and so on) is copied from the input captions. The size is always set to 100% to allow the downstream player to choose the size. - Enter a list of font families, as a comma-separated list of font names, in order of preference. The name can be a font family (such as “Arial”), or a generic font family (such as “serif”), or “default” (to let the downstream player choose the font). - Leave blank to set the family to “monospace”. +* `style_control` – (Optional) Specifies the style information (font color, font position, and so on) to include in the font data that is attached to the EBU-TT captions. - include: Take the style information (font color, font position, and so on) from the source captions and include that information in the font data attached to the EBU-TT captions. This option is valid only if the source captions are Embedded or Teletext. - exclude: In the font data attached to the EBU-TT captions, set the font family to “monospaced”. Do not include any other style information. + +### Ttml Destination Settings + +* `style_control` – (Optional) This field is not currently supported and will not affect the output styling. Leave the default value. + +### Webvtt Destination Settings + +* `style_control` - (Optional) Controls whether the color and position of the source captions is passed through to the WebVTT output captions. PASSTHROUGH - Valid only if the source captions are EMBEDDED or TELETEXT. NO\_STYLE\_DATA - Don’t pass through the style. The output captions will not contain any font styling information. + +### Font + +* `password_param` – (Optional) Key used to extract the password from EC2 Parameter store. +* `uri` – (Required) Path to a file accessible to the live stream. +* `username` – (Optional) Username to be used. + +### Global Configuration + +* `initial_audio_gain` – (Optional) Value to set the initial audio gain for the Live Event. +* `input_end_action` – (Optional) Indicates the action to take when the current input completes (e.g. end-of-file). When switchAndLoopInputs is configured the encoder will restart at the beginning of the first input. When “none” is configured the encoder will transcode either black, a solid color, or a user specified slate images per the “Input Loss Behavior” configuration until the next input switch occurs (which is controlled through the Channel Schedule API). +* `input_loss_behavior` - (Optional) Settings for system actions when input is lost. See [Input Loss Behavior](#input-loss-behavior) for more details. +* `output_locking_mode` – (Optional) Indicates how MediaLive pipelines are synchronized. PIPELINE\_LOCKING - MediaLive will attempt to synchronize the output of each pipeline to the other. EPOCH\_LOCKING - MediaLive will attempt to synchronize the output of each pipeline to the Unix epoch. +* `output_timing_source` – (Optional) Indicates whether the rate of frames emitted by the Live encoder should be paced by its system clock (which optionally may be locked to another source via NTP) or should be locked to the clock of the source that is providing the input stream. +* `support_low_framerate_inputs` – (Optional) Adjusts video input buffer for streams with very low video framerates. This is commonly set to enabled for music channels with less than one video frame per second. + +### Input Loss Behavior + +* `password_param` – (Optional) Key used to extract the password from EC2 Parameter store. +* `uri` – (Required) Path to a file accessible to the live stream. +* `username` – (Optional) Username to be used. + +### Motion Graphics Configuration + +* `motion_graphics_insertion` – (Optional) Motion Graphics Insertion. +* `motion_graphics_settings`– (Required) Motion Graphics Settings. See [Motion Graphics Settings](#motion-graphics-settings) for more details. + +### Motion Graphics Settings + +* `html_motion_graphics_settings` – (Optional) Html Motion Graphics Settings. + +### Nielsen Configuration + +* `distributor_id` – (Optional) Enter the Distributor ID assigned to your organization by Nielsen. +* `nielsen_pcm_to_id3_tagging` – (Optional) Enables Nielsen PCM to ID3 tagging. + +### Avail Blanking + +* `avail_blanking_image` - (Optional) Blanking image to be used. See [Avail Blanking Image](#avail-blanking-image) for more details. +* `state` - (Optional) When set to enabled, causes video, audio and captions to be blanked when insertion metadata is added. + +### Avail Blanking Image + +* `uri` - (Required) Path to a file accessible to the live stream. +* `password_param` - (Optional) Key used to extract the password from EC2 Parameter store. +* `username` - (Optional). Username to be used. + +### Archive Group Settings + +* `destination` - (Required) A director and base filename where archive files should be written. See [Destination](#destination) for more details. +* `archive_cdn_settings` - (Optional) Parameters that control the interactions with the CDN. See [Archive CDN Settings](#archive-cdn-settings) for more details. +* `rollover_interval` - (Optional) Number of seconds to write to archive file before closing and starting a new one. + +### Media Package Group Settings + +* `destination` - (Required) A director and base filename where archive files should be written. See [Destination](#destination) for more details. + +### RTMP Group Settings + +* `ad_markers` - (Optional) The ad marker type for this output group. +* `authentication_scheme` - (Optional) Authentication scheme to use when connecting with CDN. +* `cache_full_behavior` - (Optional) Controls behavior when content cache fills up. +* `cache_length` - (Optional) Cache length in seconds, is used to calculate buffer size. +* `caption_data` - (Optional) Controls the types of data that passes to onCaptionInfo outputs. +* `input_loss_action` - (Optional) Controls the behavior of the RTMP group if input becomes unavailable. +* `restart_delay` - (Optional) Number of seconds to wait until a restart is initiated. + +### UDP Group Settings + +* `input_loss_action` - (Optional) Specifies behavior of last resort when input video os lost. +* `timed_metadata_id3_frame` - (Optional) Indicates ID3 frame that has the timecode. +* `timed_metadta_id3_period`- (Optional) Timed metadata interval in seconds. + +### Destination + +* `destination_ref_id` - (Required) Reference ID for the destination. + +### Archive CDN Settings + +* `archive_s3_settings` - (Optional) Archive S3 Settings. See [Archive S3 Settings](#archive-s3-settings) for more details. + +### Archive S3 Settings + +* `canned_acl` - (Optional) Specify the canned ACL to apply to each S3 request. + +### Output Settings + +* `archive_output_settings` - (Optional) Archive output settings. See [Archive Output Settings](#archive-output-settings) for more details. +* `media_package_output_settings` - (Optional) Media package output settings. This can be set as an empty block. +* `multiplex_output_settings` - (Optional) Multiplex output settings. See [Multiplex Output Settings](#multiplex-output-settings) for more details. +* `rtmp_output_settings` - (Optional) RTMP output settings. See [RTMP Output Settings](#rtmp-output-settings) for more details. +* `udp_output_settings` - (Optional) UDP output settings. See [UDP Output Settings](#udp-output-settings) for more details + +### Archive Output Settings + +* `container_settings` - (Required) Settings specific to the container type of the file. See [Container Settings](#container-settings) for more details. +* `extension` - (Optional) Output file extension. +* `name_modifier` - (Optional) String concatenated to the end of the destination filename. Required for multiple outputs of the same type. + +### Multiplex Output Settings + +* `destination` - (Required) Destination is a multiplex. See [Destination](#destination) for more details. + +### RTMP Output Settings + +- `destination` - (Required) The RTMP endpoint excluding the stream name. See [Destination](#destination) for more details. +- `certificate_mode` - (Optional) Setting to allow self signed or verified RTMP certificates. +- `connection_retry_interval` - (Optional) Number of seconds to wait before retrying connection to the flash media server if the connection is lost. +- `num_retries` - (Optional) Number of retry attempts. + +### Container Settings + +* `m2ts_settings` - (Optional) M2ts Settings. See [M2ts Settings](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-medialive-channel-m2tssettings.html) for more details. +* `raw_settings`- (Optional) Raw Settings. This can be set as an empty block. + +### UDP Output Settings + +* `container_settings` - (Required) UDP container settings. See [Container Settings](#container-settings) for more details. +* `destination` - (Required) Destination address and port number for RTP or UDP packets. See [Destination](#destination) for more details. +* `buffer_msec` - (Optional) UDP output buffering in milliseconds. +* `fec_output_setting` - (Optional) Settings for enabling and adjusting Forward Error Correction on UDP outputs. See [FEC Output Settings](#fec-output-settings) for more details. + +### FEC Output Settings + +* `column_depth` - (Optional) The height of the FEC protection matrix. +* `include_fec` - (Optional) Enables column only or column and row based FEC. +* `row_length` - (Optional) The width of the FEC protection matrix. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Channel. +* `channel_id` - ID of the Channel. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `15m`) +* `update` - (Default `15m`) +* `delete` - (Default `15m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaLive Channel using the `channel_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MediaLive Channel using the `channel_id`. For example: + +```console +% terraform import aws_medialive_channel.example 1234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/medialive_input.html.markdown b/website/docs/cdktf/python/r/medialive_input.html.markdown new file mode 100644 index 00000000000..13edc5bf17f --- /dev/null +++ b/website/docs/cdktf/python/r/medialive_input.html.markdown @@ -0,0 +1,131 @@ +--- +subcategory: "Elemental MediaLive" +layout: "aws" +page_title: "AWS: aws_medialive_input" +description: |- + Terraform resource for managing an AWS MediaLive Input. +--- + + + +# Resource: aws_medialive_input + +Terraform resource for managing an AWS MediaLive Input. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.medialive_input import MedialiveInput +from imports.aws.medialive_input_security_group import MedialiveInputSecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = MedialiveInputSecurityGroup(self, "example", + tags={ + "ENVIRONMENT": "prod" + }, + whitelist_rules=[MedialiveInputSecurityGroupWhitelistRules( + cidr="10.0.0.8/32" + ) + ] + ) + aws_medialive_input_example = MedialiveInput(self, "example_1", + input_security_groups=[example.id], + name="example-input", + tags={ + "ENVIRONMENT": "prod" + }, + type="UDP_PUSH" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_medialive_input_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the input. +* `input_security_groups` - (Required) List of input security groups. +* `type` - (Required) The different types of inputs that AWS Elemental MediaLive supports. + +The following arguments are optional: + +* `destinations` - (Optional) Destination settings for PUSH type inputs. See [Destinations](#destinations) for more details. +* `input_devices` - (Optional) Settings for the devices. See [Input Devices](#input-devices) for more details. +* `media_connect_flows` - (Optional) A list of the MediaConnect Flows. See [Media Connect Flows](#media-connect-flows) for more details. +* `role_arn` - (Optional) The ARN of the role this input assumes during and after creation. +* `sources` - (Optional) The source URLs for a PULL-type input. See [Sources](#sources) for more details. +* `tags`- (Optional) A map of tags to assign to the Input. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc` - (Optional) Settings for a private VPC Input. See [VPC](#vpc) for more details. + +### Destinations + +* `stream_name` - A unique name for the location the RTMP stream is being pushed to. + +### Input Devices + +* `id` - The unique ID for the device. + +### Media Connect Flows + +* `flow_arn` - The ARN of the MediaConnect Flow + +### Sources + +* `password_param` - The key used to extract the password from EC2 Parameter store. +* `url` - The URL where the stream is pulled from. +* `username` - The username for the input source. + +### VPC + +* `subnet_ids` - A list of 2 VPC subnet IDs from the same VPC. +* `security_group_ids` - A list of up to 5 EC2 VPC security group IDs to attach to the Input. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Input. +* `attached_channels` - Channels attached to Input. +* `input_class` - The input class. +* `input_partner_ids` - A list of IDs for all Inputs which are partners of this one. +* `input_source_type` - Source type of the input. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaLive Input using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MediaLive Input using the `id`. For example: + +```console +% terraform import aws_medialive_input.example 12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/medialive_input_security_group.html.markdown b/website/docs/cdktf/python/r/medialive_input_security_group.html.markdown new file mode 100644 index 00000000000..603c539a113 --- /dev/null +++ b/website/docs/cdktf/python/r/medialive_input_security_group.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Elemental MediaLive" +layout: "aws" +page_title: "AWS: aws_medialive_input_security_group" +description: |- + Terraform resource for managing an AWS MediaLive InputSecurityGroup. +--- + + + +# Resource: aws_medialive_input_security_group + +Terraform resource for managing an AWS MediaLive InputSecurityGroup. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.medialive_input_security_group import MedialiveInputSecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MedialiveInputSecurityGroup(self, "example", + tags={ + "ENVIRONMENT": "prod" + }, + whitelist_rules=[MedialiveInputSecurityGroupWhitelistRules( + cidr="10.0.0.8/32" + ) + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `whitelist_rules` - (Required) Whitelist rules. See [Whitelist Rules](#whitelist-rules) for more details. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the InputSecurityGroup. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Whitelist Rules + +* `cidr` (Required) - The IPv4 CIDR that's whitelisted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - InputSecurityGroup Id. +* `arn` - ARN of the InputSecurityGroup. +* `inputs` - The list of inputs currently using this InputSecurityGroup. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaLive InputSecurityGroup using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MediaLive InputSecurityGroup using the `id`. For example: + +```console +% terraform import aws_medialive_input_security_group.example 123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/medialive_multiplex.html.markdown b/website/docs/cdktf/python/r/medialive_multiplex.html.markdown new file mode 100644 index 00000000000..881c1fa547a --- /dev/null +++ b/website/docs/cdktf/python/r/medialive_multiplex.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Elemental MediaLive" +layout: "aws" +page_title: "AWS: aws_medialive_multiplex" +description: |- + Terraform resource for managing an AWS MediaLive Multiplex. +--- + + + +# Resource: aws_medialive_multiplex + +Terraform resource for managing an AWS MediaLive Multiplex. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.medialive_multiplex import MedialiveMultiplex +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + available = DataAwsAvailabilityZones(self, "available", + state="available" + ) + MedialiveMultiplex(self, "example", + availability_zones=[ + Token.as_string(property_access(available.names, ["0"])), + Token.as_string(property_access(available.names, ["1"])) + ], + multiplex_settings=MedialiveMultiplexMultiplexSettings( + maximum_video_buffer_delay_milliseconds=1000, + transport_stream_bitrate=1000000, + transport_stream_id=1, + transport_stream_reserved_bitrate=1 + ), + name="example-multiplex-changed", + start_multiplex=True, + tags={ + "tag1": "value1" + } + ) +``` + +## Argument Reference + +The following arguments are required: + +* `availability_zones` - (Required) A list of availability zones. You must specify exactly two. +* `multiplex_settings`- (Required) Multiplex settings. See [Multiplex Settings](#multiplex-settings) for more details. +* `name` - (Required) name of Multiplex. + +The following arguments are optional: + +* `start_multiplex` - (Optional) Whether to start the Multiplex. Defaults to `false`. +* `tags` - (Optional) A map of tags to assign to the Multiplex. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Multiplex Settings + +* `transport_stream_bitrate` - (Required) Transport stream bit rate. +* `transport_stream_id` - (Required) Unique ID for each multiplex. +* `transport_stream_reserved_bitrate` - (Optional) Transport stream reserved bit rate. +* `maximum_video_buffer_delay_milliseconds` - (Optional) Maximum video buffer delay. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Multiplex. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaLive Multiplex using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MediaLive Multiplex using the `id`. For example: + +```console +% terraform import aws_medialive_multiplex.example 12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/medialive_multiplex_program.html.markdown b/website/docs/cdktf/python/r/medialive_multiplex_program.html.markdown new file mode 100644 index 00000000000..bb95b220872 --- /dev/null +++ b/website/docs/cdktf/python/r/medialive_multiplex_program.html.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "Elemental MediaLive" +layout: "aws" +page_title: "AWS: aws_medialive_multiplex_program" +description: |- + Terraform resource for managing an AWS MediaLive MultiplexProgram. +--- + + + +# Resource: aws_medialive_multiplex_program + +Terraform resource for managing an AWS MediaLive MultiplexProgram. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.medialive_multiplex import MedialiveMultiplex +from imports.aws.medialive_multiplex_program import MedialiveMultiplexProgram +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + available = DataAwsAvailabilityZones(self, "available", + state="available" + ) + example = MedialiveMultiplex(self, "example", + availability_zones=[ + Token.as_string(property_access(available.names, ["0"])), + Token.as_string(property_access(available.names, ["1"])) + ], + multiplex_settings=MedialiveMultiplexMultiplexSettings( + maximum_video_buffer_delay_milliseconds=1000, + transport_stream_bitrate=1000000, + transport_stream_id=1, + transport_stream_reserved_bitrate=1 + ), + name="example-multiplex-changed", + start_multiplex=True, + tags={ + "tag1": "value1" + } + ) + aws_medialive_multiplex_program_example = MedialiveMultiplexProgram(self, "example_2", + multiplex_id=example.id, + multiplex_program_settings=[MedialiveMultiplexProgramMultiplexProgramSettings( + preferred_channel_pipeline="CURRENTLY_ACTIVE", + program_number=1, + video_settings=[MedialiveMultiplexProgramMultiplexProgramSettingsVideoSettings( + constant_bitrate=100000 + ) + ] + ) + ], + program_name="example_program" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_medialive_multiplex_program_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `multiplex_id` - (Required) Multiplex ID. +* `program_name` - (Required) Unique program name. +* `multiplex_program_settings` - (Required) MultiplexProgram settings. See [Multiplex Program Settings](#multiple-program-settings) for more details. + +The following arguments are optional: + +### Multiple Program Settings + +* `program_number` - (Required) Unique program number. +* `preferred_channel_pipeline` - (Required) Enum for preferred channel pipeline. Options are `CURRENTLY_ACTIVE`, `PIPELINE_0`, or `PIPELINE_1`. +* `service_descriptor` - (Optional) Service Descriptor. See [Service Descriptor](#service-descriptor) for more details. +* `video_settings` - (Optional) Video settings. See [Video Settings](#video-settings) for more details. + +### Service Descriptor + +* `provider_name` - (Required) Unique provider name. +* `service_name` - (Required) Unique service name. + +### Video Settings + +* `constant_bitrate` - (Optional) Constant bitrate value. +* `statmux_settings` - (Optional) Statmux settings. See [Statmux Settings](#statmux-settings) for more details. + +### Statmux Settings + +* `minimum_bitrate` - (Optional) Minimum bitrate. +* `maximum_bitrate` - (Optional) Maximum bitrate. +* `priority` - (Optional) Priority value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the MultiplexProgram. +* `example_attribute` - Concise description. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaLive MultiplexProgram using the `id`, or a combination of "`program_name`/`multiplex_id`". For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MediaLive MultiplexProgram using the `id`, or a combination of "`program_name`/`multiplex_id`". For example: + +```console +% terraform import aws_medialive_multiplex_program.example example_program/1234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/memorydb_acl.html.markdown b/website/docs/cdktf/python/r/memorydb_acl.html.markdown new file mode 100644 index 00000000000..52526593f13 --- /dev/null +++ b/website/docs/cdktf/python/r/memorydb_acl.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_acl" +description: |- + Provides a MemoryDB ACL. +--- + + + +# Resource: aws_memorydb_acl + +Provides a MemoryDB ACL. + +More information about users and ACL-s can be found in the [MemoryDB User Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/clusters.acls.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.memorydb_acl import MemorydbAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MemorydbAcl(self, "example", + name="my-acl", + user_names=["my-user-1", "my-user-2"] + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `name` - (Optional, Forces new resource) Name of the ACL. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `user_names` - (Optional) Set of MemoryDB user names to be included in this ACL. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same as `name`. +* `arn` - The ARN of the ACL. +* `minimum_engine_version` - The minimum engine version supported by the ACL. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an ACL using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an ACL using the `name`. For example: + +```console +% terraform import aws_memorydb_acl.example my-acl +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/memorydb_cluster.html.markdown b/website/docs/cdktf/python/r/memorydb_cluster.html.markdown new file mode 100644 index 00000000000..510a8e47ea6 --- /dev/null +++ b/website/docs/cdktf/python/r/memorydb_cluster.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_cluster" +description: |- + Provides a MemoryDB Cluster. +--- + + + +# Resource: aws_memorydb_cluster + +Provides a MemoryDB Cluster. + +More information about MemoryDB can be found in the [Developer Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/what-is-memorydb-for-redis.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.memorydb_cluster import MemorydbCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MemorydbCluster(self, "example", + acl_name="open-access", + name="my-cluster", + node_type="db.t4g.small", + num_shards=2, + security_group_ids=[Token.as_string(aws_security_group_example.id)], + snapshot_retention_limit=7, + subnet_group_name=Token.as_string(aws_memorydb_subnet_group_example.id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `acl_name` - (Required) The name of the Access Control List to associate with the cluster. +* `node_type` - (Required) The compute and memory capacity of the nodes in the cluster. See AWS documentation on [supported node types](https://docs.aws.amazon.com/memorydb/latest/devguide/nodes.supportedtypes.html) as well as [vertical scaling](https://docs.aws.amazon.com/memorydb/latest/devguide/cluster-vertical-scaling.html). + +The following arguments are optional: + +* `auto_minor_version_upgrade` - (Optional, Forces new resource) When set to `true`, the cluster will automatically receive minor engine version upgrades after launch. Defaults to `true`. +* `data_tiering` - (Optional, Forces new resource) Enables data tiering. This option is not supported by all instance types. For more information, see [Data tiering](https://docs.aws.amazon.com/memorydb/latest/devguide/data-tiering.html). +* `description` - (Optional) Description for the cluster. Defaults to `"Managed by Terraform"`. +* `engine_version` - (Optional) Version number of the Redis engine to be used for the cluster. Downgrades are not supported. +* `final_snapshot_name` - (Optional) Name of the final cluster snapshot to be created when this resource is deleted. If omitted, no final snapshot will be made. +* `kms_key_arn` - (Optional, Forces new resource) ARN of the KMS key used to encrypt the cluster at rest. +* `maintenance_window` - (Optional) Specifies the weekly time range during which maintenance on the cluster is performed. Specify as a range in the format `ddd:hh24:mi-ddd:hh24:mi` (24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: `sun:23:00-mon:01:30`. +* `name` - (Optional, Forces new resource) Name of the cluster. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `num_replicas_per_shard` - (Optional) The number of replicas to apply to each shard, up to a maximum of 5. Defaults to `1` (i.e. 2 nodes per shard). +* `num_shards` - (Optional) The number of shards in the cluster. Defaults to `1`. +* `parameter_group_name` - (Optional) The name of the parameter group associated with the cluster. +* `port` - (Optional, Forces new resource) The port number on which each of the nodes accepts connections. Defaults to `6379`. +* `security_group_ids` - (Optional) Set of VPC Security Group ID-s to associate with this cluster. +* `snapshot_arns` - (Optional, Forces new resource) List of ARN-s that uniquely identify RDB snapshot files stored in S3. The snapshot files will be used to populate the new cluster. Object names in the ARN-s cannot contain any commas. +* `snapshot_name` - (Optional, Forces new resource) The name of a snapshot from which to restore data into the new cluster. +* `snapshot_retention_limit` - (Optional) The number of days for which MemoryDB retains automatic snapshots before deleting them. When set to `0`, automatic backups are disabled. Defaults to `0`. +* `snapshot_window` - (Optional) The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of your shard. Example: `05:00-09:00`. +* `sns_topic_arn` - (Optional) ARN of the SNS topic to which cluster notifications are sent. +* `subnet_group_name` - (Optional, Forces new resource) The name of the subnet group to be used for the cluster. Defaults to a subnet group consisting of default VPC subnets. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tls_enabled` - (Optional, Forces new resource) A flag to enable in-transit encryption on the cluster. When set to `false`, the `acl_name` must be `open-access`. Defaults to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same as `name`. +* `arn` - The ARN of the cluster. +* `cluster_endpoint` + * `address` - DNS hostname of the cluster configuration endpoint. + * `port` - Port number that the cluster configuration endpoint is listening on. +* `engine_patch_version` - Patch version number of the Redis engine used by the cluster. +* `shards` - Set of shards in this cluster. + * `name` - Name of this shard. + * `num_nodes` - Number of individual nodes in this shard. + * `slots` - Keyspace for this shard. Example: `0-16383`. + * `nodes` - Set of nodes in this shard. + * `availability_zone` - The Availability Zone in which the node resides. + * `create_time` - The date and time when the node was created. Example: `2022-01-01T21:00:00Z`. + * `name` - Name of this node. + * `endpoint` + * `address` - DNS hostname of the node. + * `port` - Port number that this node is listening on. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120m`) +- `update` - (Default `120m`) +- `delete` - (Default `120m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a cluster using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a cluster using the `name`. For example: + +```console +% terraform import aws_memorydb_cluster.example my-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/memorydb_parameter_group.html.markdown b/website/docs/cdktf/python/r/memorydb_parameter_group.html.markdown new file mode 100644 index 00000000000..8c1b4da5bcd --- /dev/null +++ b/website/docs/cdktf/python/r/memorydb_parameter_group.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_parameter_group" +description: |- + Provides a MemoryDB Parameter Group. +--- + + + +# Resource: aws_memorydb_parameter_group + +Provides a MemoryDB Parameter Group. + +More information about parameter groups can be found in the [MemoryDB User Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/parametergroups.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.memorydb_parameter_group import MemorydbParameterGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MemorydbParameterGroup(self, "example", + family="memorydb_redis6", + name="my-parameter-group", + parameter=[MemorydbParameterGroupParameter( + name="activedefrag", + value="yes" + ) + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `family` - (Required, Forces new resource) The engine version that the parameter group can be used with. + +The following arguments are optional: + +* `name` - (Optional, Forces new resource) Name of the parameter group. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional, Forces new resource) Description for the parameter group. Defaults to `"Managed by Terraform"`. +* `parameter` - (Optional) Set of MemoryDB parameters to apply. Any parameters not specified will fall back to their family defaults. Detailed below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### parameter Configuration Block + +* `name` - (Required) The name of the parameter. +* `value` - (Required) The value of the parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same as `name`. +* `arn` - The ARN of the parameter group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a parameter group using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a parameter group using the `name`. For example: + +```console +% terraform import aws_memorydb_parameter_group.example my-parameter-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/memorydb_snapshot.html.markdown b/website/docs/cdktf/python/r/memorydb_snapshot.html.markdown new file mode 100644 index 00000000000..0546a9210a2 --- /dev/null +++ b/website/docs/cdktf/python/r/memorydb_snapshot.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_snapshot" +description: |- + Provides a MemoryDB Snapshot. +--- + + + +# Resource: aws_memorydb_snapshot + +Provides a MemoryDB Snapshot. + +More information about snapshot and restore can be found in the [MemoryDB User Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/snapshots.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.memorydb_snapshot import MemorydbSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MemorydbSnapshot(self, "example", + cluster_name=Token.as_string(aws_memorydb_cluster_example.name), + name="my-snapshot" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cluster_name` - (Required, Forces new resource) Name of the MemoryDB cluster to take a snapshot of. +* `name` - (Optional, Forces new resource) Name of the snapshot. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `kms_key_arn` - (Optional, Forces new resource) ARN of the KMS key used to encrypt the snapshot at rest. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the snapshot. +* `arn` - The ARN of the snapshot. +* `cluster_configuration` - The configuration of the cluster from which the snapshot was taken. + * `description` - Description for the cluster. + * `engine_version` - Version number of the Redis engine used by the cluster. + * `maintenance_window` - The weekly time range during which maintenance on the cluster is performed. + * `name` - Name of the cluster. + * `node_type` - Compute and memory capacity of the nodes in the cluster. + * `num_shards` - Number of shards in the cluster. + * `parameter_group_name` - Name of the parameter group associated with the cluster. + * `port` - Port number on which the cluster accepts connections. + * `snapshot_retention_limit` - Number of days for which MemoryDB retains automatic snapshots before deleting them. + * `snapshot_window` - The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of the shard. + * `subnet_group_name` - Name of the subnet group used by the cluster. + * `topic_arn` - ARN of the SNS topic to which cluster notifications are sent. + * `vpc_id` - The VPC in which the cluster exists. +* `source` - Indicates whether the snapshot is from an automatic backup (`automated`) or was created manually (`manual`). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120m`) +- `delete` - (Default `120m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a snapshot using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a snapshot using the `name`. For example: + +```console +% terraform import aws_memorydb_snapshot.example my-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/memorydb_subnet_group.html.markdown b/website/docs/cdktf/python/r/memorydb_subnet_group.html.markdown new file mode 100644 index 00000000000..c28b582afd6 --- /dev/null +++ b/website/docs/cdktf/python/r/memorydb_subnet_group.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_subnet_group" +description: |- + Provides a MemoryDB Subnet Group. +--- + + + +# Resource: aws_memorydb_subnet_group + +Provides a MemoryDB Subnet Group. + +More information about subnet groups can be found in the [MemoryDB User Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/subnetgroups.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.memorydb_subnet_group import MemorydbSubnetGroup +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Vpc(self, "example", + cidr_block="10.0.0.0/16" + ) + aws_subnet_example = Subnet(self, "example_1", + availability_zone="us-west-2a", + cidr_block="10.0.0.0/24", + vpc_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_subnet_example.override_logical_id("example") + aws_memorydb_subnet_group_example = MemorydbSubnetGroup(self, "example_2", + name="my-subnet-group", + subnet_ids=[Token.as_string(aws_subnet_example.id)] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_memorydb_subnet_group_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `subnet_ids` - (Required) Set of VPC Subnet ID-s for the subnet group. At least one subnet must be provided. + +The following arguments are optional: + +* `name` - (Optional, Forces new resource) Name of the subnet group. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) Description for the subnet group. Defaults to `"Managed by Terraform"`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the subnet group. +* `arn` - The ARN of the subnet group. +* `vpc_id` - The VPC in which the subnet group exists. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a subnet group using its `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a subnet group using its `name`. For example: + +```console +% terraform import aws_memorydb_subnet_group.example my-subnet-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/memorydb_user.html.markdown b/website/docs/cdktf/python/r/memorydb_user.html.markdown new file mode 100644 index 00000000000..58f865a0933 --- /dev/null +++ b/website/docs/cdktf/python/r/memorydb_user.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_user" +description: |- + Provides a MemoryDB User. +--- + + + +# Resource: aws_memorydb_user + +Provides a MemoryDB User. + +More information about users and ACL-s can be found in the [MemoryDB User Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/clusters.acls.html). + +~> **Note:** All arguments including the username and passwords will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.memorydb_user import MemorydbUser +from imports.random.password import Password +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # The following providers are missing schema information and might need manual adjustments to synthesize correctly: random. + # For a more precise conversion please use the --provider flag in convert. + example = Password(self, "example", + length=16 + ) + aws_memorydb_user_example = MemorydbUser(self, "example_1", + access_string="on ~* &* +@all", + authentication_mode=MemorydbUserAuthenticationMode( + passwords=[Token.as_string(example.result)], + type="password" + ), + user_name="my-user" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_memorydb_user_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `access_string` - (Required) The access permissions string used for this user. +* `authentication_mode` - (Required) Denotes the user's authentication properties. Detailed below. +* `user_name` - (Required, Forces new resource) Name of the MemoryDB user. Up to 40 characters. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### authentication_mode Configuration Block + +* `passwords` - (Required) The set of passwords used for authentication. You can create up to two passwords for each user. +* `type` - (Required) Indicates whether the user requires a password to authenticate. Must be set to `password`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same as `user_name`. +* `arn` - The ARN of the user. +* `minimum_engine_version` - The minimum engine version supported for the user. +* `authentication_mode` configuration block + * `password_count` - The number of passwords belonging to the user. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a user using the `user_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a user using the `user_name`. For example: + +```console +% terraform import aws_memorydb_user.example my-user +``` + +The `passwords` are not available for imported resources, as this information cannot be read back from the MemoryDB API. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/mq_broker.html.markdown b/website/docs/cdktf/python/r/mq_broker.html.markdown new file mode 100644 index 00000000000..3b4f7e7e616 --- /dev/null +++ b/website/docs/cdktf/python/r/mq_broker.html.markdown @@ -0,0 +1,221 @@ +--- +subcategory: "MQ" +layout: "aws" +page_title: "AWS: aws_mq_broker" +description: |- + Provides an MQ Broker Resource +--- + + + +# Resource: aws_mq_broker + +Provides an Amazon MQ broker resource. This resources also manages users for the broker. + +-> For more information on Amazon MQ, see [Amazon MQ documentation](https://docs.aws.amazon.com/amazon-mq/latest/developer-guide/welcome.html). + +~> **NOTE:** Amazon MQ currently places limits on **RabbitMQ** brokers. For example, a RabbitMQ broker cannot have: instances with an associated IP address of an ENI attached to the broker, an associated LDAP server to authenticate and authorize broker connections, storage type `EFS`, audit logging, or `configuration` blocks. Although this resource allows you to create RabbitMQ users, RabbitMQ users cannot have console access or groups. Also, Amazon MQ does not return information about RabbitMQ users so drift detection is not possible. + +~> **NOTE:** Changes to an MQ Broker can occur when you change a parameter, such as `configuration` or `user`, and are reflected in the next maintenance window. Because of this, Terraform may report a difference in its planning phase because a modification has not yet taken place. You can use the `apply_immediately` flag to instruct the service to apply the change immediately (see documentation below). Using `apply_immediately` can result in a brief downtime as the broker reboots. + +~> **NOTE:** All arguments including the username and password will be stored in the raw state as plain-text. [Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +### Basic Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.mq_broker import MqBroker +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MqBroker(self, "example", + broker_name="example", + configuration=MqBrokerConfiguration( + id=test.id, + revision=test.latest_revision + ), + engine_type="ActiveMQ", + engine_version="5.15.9", + host_instance_type="mq.t2.micro", + security_groups=[Token.as_string(aws_security_group_test.id)], + user=[MqBrokerUser( + password="MindTheGap", + username="ExampleUser" + ) + ] + ) +``` + +### High-throughput Optimized Example + +This example shows the use of EBS storage for high-throughput optimized performance. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.mq_broker import MqBroker +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MqBroker(self, "example", + broker_name="example", + configuration=MqBrokerConfiguration( + id=test.id, + revision=test.latest_revision + ), + engine_type="ActiveMQ", + engine_version="5.15.9", + host_instance_type="mq.m5.large", + security_groups=[Token.as_string(aws_security_group_test.id)], + storage_type="ebs", + user=[MqBrokerUser( + password="MindTheGap", + username="ExampleUser" + ) + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `broker_name` - (Required) Name of the broker. +* `engine_type` - (Required) Type of broker engine. Valid values are `ActiveMQ` and `RabbitMQ`. +* `engine_version` - (Required) Version of the broker engine. See the [AmazonMQ Broker Engine docs](https://docs.aws.amazon.com/amazon-mq/latest/developer-guide/broker-engine.html) for supported versions. For example, `5.15.0`. +* `host_instance_type` - (Required) Broker's instance type. For example, `mq.t3.micro`, `mq.m5.large`. +* `user` - (Required) Configuration block for broker users. For `engine_type` of `RabbitMQ`, Amazon MQ does not return broker users preventing this resource from making user updates and drift detection. Detailed below. + +The following arguments are optional: + +* `apply_immediately` - (Optional) Specifies whether any broker modifications are applied immediately, or during the next maintenance window. Default is `false`. +* `authentication_strategy` - (Optional) Authentication strategy used to secure the broker. Valid values are `simple` and `ldap`. `ldap` is not supported for `engine_type` `RabbitMQ`. +* `auto_minor_version_upgrade` - (Optional) Whether to automatically upgrade to new minor versions of brokers as Amazon MQ makes releases available. +* `configuration` - (Optional) Configuration block for broker configuration. Applies to `engine_type` of `ActiveMQ` only. Detailed below. +* `deployment_mode` - (Optional) Deployment mode of the broker. Valid values are `SINGLE_INSTANCE`, `ACTIVE_STANDBY_MULTI_AZ`, and `CLUSTER_MULTI_AZ`. Default is `SINGLE_INSTANCE`. +* `encryption_options` - (Optional) Configuration block containing encryption options. Detailed below. +* `ldap_server_metadata` - (Optional) Configuration block for the LDAP server used to authenticate and authorize connections to the broker. Not supported for `engine_type` `RabbitMQ`. Detailed below. (Currently, AWS may not process changes to LDAP server metadata.) +* `logs` - (Optional) Configuration block for the logging configuration of the broker. Detailed below. +* `maintenance_window_start_time` - (Optional) Configuration block for the maintenance window start time. Detailed below. +* `publicly_accessible` - (Optional) Whether to enable connections from applications outside of the VPC that hosts the broker's subnets. +* `security_groups` - (Optional) List of security group IDs assigned to the broker. +* `storage_type` - (Optional) Storage type of the broker. For `engine_type` `ActiveMQ`, the valid values are `efs` and `ebs`, and the AWS-default is `efs`. For `engine_type` `RabbitMQ`, only `ebs` is supported. When using `ebs`, only the `mq.m5` broker instance type family is supported. +* `subnet_ids` - (Optional) List of subnet IDs in which to launch the broker. A `SINGLE_INSTANCE` deployment requires one subnet. An `ACTIVE_STANDBY_MULTI_AZ` deployment requires multiple subnets. +* `tags` - (Optional) Map of tags to assign to the broker. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### configuration + +The following arguments are optional: + +* `id` - (Optional) The Configuration ID. +* `revision` - (Optional) Revision of the Configuration. + +### encryption_options + +The following arguments are optional: + +* `kms_key_id` - (Optional) Amazon Resource Name (ARN) of Key Management Service (KMS) Customer Master Key (CMK) to use for encryption at rest. Requires setting `use_aws_owned_key` to `false`. To perform drift detection when AWS-managed CMKs or customer-managed CMKs are in use, this value must be configured. +* `use_aws_owned_key` - (Optional) Whether to enable an AWS-owned KMS CMK that is not in your account. Defaults to `true`. Setting to `false` without configuring `kms_key_id` will create an AWS-managed CMK aliased to `aws/mq` in your account. + +### ldap_server_metadata + +The following arguments are optional: + +* `hosts` - (Optional) List of a fully qualified domain name of the LDAP server and an optional failover server. +* `role_base` - (Optional) Fully qualified name of the directory to search for a user’s groups. +* `role_name` - (Optional) Specifies the LDAP attribute that identifies the group name attribute in the object returned from the group membership query. +* `role_search_matching` - (Optional) Search criteria for groups. +* `role_search_subtree` - (Optional) Whether the directory search scope is the entire sub-tree. +* `service_account_password` - (Optional) Service account password. +* `service_account_username` - (Optional) Service account username. +* `user_base` - (Optional) Fully qualified name of the directory where you want to search for users. +* `user_role_name` - (Optional) Specifies the name of the LDAP attribute for the user group membership. +* `user_search_matching` - (Optional) Search criteria for users. +* `user_search_subtree` - (Optional) Whether the directory search scope is the entire sub-tree. + +### logs + +The following arguments are optional: + +* `audit` - (Optional) Enables audit logging. Auditing is only possible for `engine_type` of `ActiveMQ`. User management action made using JMX or the ActiveMQ Web Console is logged. Defaults to `false`. +* `general` - (Optional) Enables general logging via CloudWatch. Defaults to `false`. + +### maintenance_window_start_time + +The following arguments are required: + +* `day_of_week` - (Required) Day of the week, e.g., `MONDAY`, `TUESDAY`, or `WEDNESDAY`. +* `time_of_day` - (Required) Time, in 24-hour format, e.g., `02:00`. +* `time_zone` - (Required) Time zone in either the Country/City format or the UTC offset format, e.g., `CET`. + +### user + +* `console_access` - (Optional) Whether to enable access to the [ActiveMQ Web Console](http://activemq.apache.org/web-console.html) for the user. Applies to `engine_type` of `ActiveMQ` only. +* `groups` - (Optional) List of groups (20 maximum) to which the ActiveMQ user belongs. Applies to `engine_type` of `ActiveMQ` only. +* `password` - (Required) Password of the user. It must be 12 to 250 characters long, at least 4 unique characters, and must not contain commas. +* `replication_user` - (Optional) Whether to set set replication user. Defaults to `false`. +* `username` - (Required) Username of the user. + +~> **NOTE:** AWS currently does not support updating RabbitMQ users. Updates to users can only be in the RabbitMQ UI. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the broker. +* `id` - Unique ID that Amazon MQ generates for the broker. +* `instances` - List of information about allocated brokers (both active & standby). + * `instances.0.console_url` - The URL of the [ActiveMQ Web Console](http://activemq.apache.org/web-console.html) or the [RabbitMQ Management UI](https://www.rabbitmq.com/management.html#external-monitoring) depending on `engine_type`. + * `instances.0.ip_address` - IP Address of the broker. + * `instances.0.endpoints` - Broker's wire-level protocol endpoints in the following order & format referenceable e.g., as `instances.0.endpoints.0` (SSL): + * For `ActiveMQ`: + * `ssl://broker-id.mq.us-west-2.amazonaws.com:61617` + * `amqp+ssl://broker-id.mq.us-west-2.amazonaws.com:5671` + * `stomp+ssl://broker-id.mq.us-west-2.amazonaws.com:61614` + * `mqtt+ssl://broker-id.mq.us-west-2.amazonaws.com:8883` + * `wss://broker-id.mq.us-west-2.amazonaws.com:61619` + * For `RabbitMQ`: + * `amqps://broker-id.mq.us-west-2.amazonaws.com:5671` +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MQ Brokers using their broker id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MQ Brokers using their broker id. For example: + +```console +% terraform import aws_mq_broker.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/mq_configuration.html.markdown b/website/docs/cdktf/python/r/mq_configuration.html.markdown new file mode 100644 index 00000000000..5b6f56921c4 --- /dev/null +++ b/website/docs/cdktf/python/r/mq_configuration.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "MQ" +layout: "aws" +page_title: "AWS: aws_mq_configuration" +description: |- + Provides an MQ configuration Resource +--- + + + +# Resource: aws_mq_configuration + +Provides an MQ Configuration Resource. + +For more information on Amazon MQ, see [Amazon MQ documentation](https://docs.aws.amazon.com/amazon-mq/latest/developer-guide/welcome.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.mq_configuration import MqConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MqConfiguration(self, "example", + data="\n\n \n \n \n \n \n\n\n", + description="Example Configuration", + engine_type="ActiveMQ", + engine_version="5.15.0", + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `data` - (Required) Broker configuration in XML format. See [official docs](https://docs.aws.amazon.com/amazon-mq/latest/developer-guide/amazon-mq-broker-configuration-parameters.html) for supported parameters and format of the XML. +* `engine_type` - (Required) Type of broker engine. Valid values are `ActiveMQ` and `RabbitMQ`. +* `engine_version` - (Required) Version of the broker engine. +* `name` - (Required) Name of the configuration. + +The following arguments are optional: + +* `authentication_strategy` - (Optional) Authentication strategy associated with the configuration. Valid values are `simple` and `ldap`. `ldap` is not supported for `engine_type` `RabbitMQ`. +* `description` - (Optional) Description of the configuration. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the configuration. +* `id` - Unique ID that Amazon MQ generates for the configuration. +* `latest_revision` - Latest revision of the configuration. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MQ Configurations using the configuration ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MQ Configurations using the configuration ID. For example: + +```console +% terraform import aws_mq_configuration.example c-0187d1eb-88c8-475a-9b79-16ef5a10c94f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/msk_cluster.html.markdown b/website/docs/cdktf/python/r/msk_cluster.html.markdown new file mode 100644 index 00000000000..20bf5651c8b --- /dev/null +++ b/website/docs/cdktf/python/r/msk_cluster.html.markdown @@ -0,0 +1,365 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_cluster" +description: |- + Terraform resource for managing an AWS Managed Streaming for Kafka cluster. +--- + + + +# Resource: aws_msk_cluster + +Manages an Amazon MSK cluster. + +-> **Note:** This resource manages _provisioned_ clusters. To manage a _serverless_ Amazon MSK cluster, use the [`aws_msk_serverless_cluster`](/docs/providers/aws/r/msk_serverless_cluster.html) resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, property_access, TerraformOutput, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.kinesis_firehose_delivery_stream import KinesisFirehoseDeliveryStream +from imports.aws.kms_key import KmsKey +from imports.aws.msk_cluster import MskCluster +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.security_group import SecurityGroup +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = CloudwatchLogGroup(self, "test", + name="msk_broker_logs" + ) + kms = KmsKey(self, "kms", + description="example" + ) + bucket = S3Bucket(self, "bucket", + bucket="msk-broker-logs-bucket" + ) + S3BucketAcl(self, "bucket_acl", + acl="private", + bucket=bucket.id + ) + vpc = Vpc(self, "vpc", + cidr_block="192.168.0.0/22" + ) + azs = DataAwsAvailabilityZones(self, "azs", + state="available" + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["firehose.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + firehose_role = IamRole(self, "firehose_role", + assume_role_policy=Token.as_string(assume_role.json), + name="firehose_test_role" + ) + test_stream = KinesisFirehoseDeliveryStream(self, "test_stream", + destination="extended_s3", + extended_s3_configuration=KinesisFirehoseDeliveryStreamExtendedS3Configuration( + bucket_arn=bucket.arn, + role_arn=firehose_role.arn + ), + lifecycle=TerraformResourceLifecycle( + ignore_changes=["logDeliveryEnabled"] + ), + name="terraform-kinesis-firehose-msk-broker-logs-stream", + tags={ + "LogDeliveryEnabled": "placeholder" + } + ) + sg = SecurityGroup(self, "sg", + vpc_id=vpc.id + ) + subnet_az1 = Subnet(self, "subnet_az1", + availability_zone=Token.as_string(property_access(azs.names, ["0"])), + cidr_block="192.168.0.0/24", + vpc_id=vpc.id + ) + subnet_az2 = Subnet(self, "subnet_az2", + availability_zone=Token.as_string(property_access(azs.names, ["1"])), + cidr_block="192.168.1.0/24", + vpc_id=vpc.id + ) + subnet_az3 = Subnet(self, "subnet_az3", + availability_zone=Token.as_string(property_access(azs.names, ["2"])), + cidr_block="192.168.2.0/24", + vpc_id=vpc.id + ) + example = MskCluster(self, "example", + broker_node_group_info=MskClusterBrokerNodeGroupInfo( + client_subnets=[subnet_az1.id, subnet_az2.id, subnet_az3.id], + instance_type="kafka.m5.large", + security_groups=[sg.id], + storage_info=MskClusterBrokerNodeGroupInfoStorageInfo( + ebs_storage_info=MskClusterBrokerNodeGroupInfoStorageInfoEbsStorageInfo( + volume_size=1000 + ) + ) + ), + cluster_name="example", + encryption_info=MskClusterEncryptionInfo( + encryption_at_rest_kms_key_arn=kms.arn + ), + kafka_version="3.2.0", + logging_info=MskClusterLoggingInfo( + broker_logs=MskClusterLoggingInfoBrokerLogs( + cloudwatch_logs=MskClusterLoggingInfoBrokerLogsCloudwatchLogs( + enabled=True, + log_group=test.name + ), + firehose=MskClusterLoggingInfoBrokerLogsFirehose( + delivery_stream=test_stream.name, + enabled=True + ), + s3=MskClusterLoggingInfoBrokerLogsS3( + bucket=bucket.id, + enabled=True, + prefix="logs/msk-" + ) + ) + ), + number_of_broker_nodes=3, + open_monitoring=MskClusterOpenMonitoring( + prometheus=MskClusterOpenMonitoringPrometheus( + jmx_exporter=MskClusterOpenMonitoringPrometheusJmxExporter( + enabled_in_broker=True + ), + node_exporter=MskClusterOpenMonitoringPrometheusNodeExporter( + enabled_in_broker=True + ) + ) + ), + tags={ + "foo": "bar" + } + ) + TerraformOutput(self, "bootstrap_brokers_tls", + value=example.bootstrap_brokers_tls, + description="TLS connection host:port pairs" + ) + TerraformOutput(self, "zookeeper_connect_string", + value=example.zookeeper_connect_string + ) +``` + +### With volume_throughput argument + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.msk_cluster import MskCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MskCluster(self, "example", + broker_node_group_info=MskClusterBrokerNodeGroupInfo( + client_subnets=[subnet_az1.id, subnet_az2.id, subnet_az3.id], + instance_type="kafka.m5.4xlarge", + security_groups=[sg.id], + storage_info=MskClusterBrokerNodeGroupInfoStorageInfo( + ebs_storage_info=MskClusterBrokerNodeGroupInfoStorageInfoEbsStorageInfo( + provisioned_throughput=MskClusterBrokerNodeGroupInfoStorageInfoEbsStorageInfoProvisionedThroughput( + enabled=True, + volume_throughput=250 + ), + volume_size=1000 + ) + ) + ), + cluster_name="example", + kafka_version="2.7.1", + number_of_broker_nodes=3 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `broker_node_group_info` - (Required) Configuration block for the broker nodes of the Kafka cluster. +* `cluster_name` - (Required) Name of the MSK cluster. +* `kafka_version` - (Required) Specify the desired Kafka software version. +* `number_of_broker_nodes` - (Required) The desired total number of broker nodes in the kafka cluster. It must be a multiple of the number of specified client subnets. +* `client_authentication` - (Optional) Configuration block for specifying a client authentication. See below. +* `configuration_info` - (Optional) Configuration block for specifying a MSK Configuration to attach to Kafka brokers. See below. +* `encryption_info` - (Optional) Configuration block for specifying encryption. See below. +* `enhanced_monitoring` - (Optional) Specify the desired enhanced MSK CloudWatch monitoring level. See [Monitoring Amazon MSK with Amazon CloudWatch](https://docs.aws.amazon.com/msk/latest/developerguide/monitoring.html) +* `open_monitoring` - (Optional) Configuration block for JMX and Node monitoring for the MSK cluster. See below. +* `logging_info` - (Optional) Configuration block for streaming broker logs to Cloudwatch/S3/Kinesis Firehose. See below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### broker_node_group_info Argument Reference + +* `client_subnets` - (Required) A list of subnets to connect to in client VPC ([documentation](https://docs.aws.amazon.com/msk/1.0/apireference/clusters.html#clusters-prop-brokernodegroupinfo-clientsubnets)). +* `instance_type` - (Required) Specify the instance type to use for the kafka brokersE.g., kafka.m5.large. ([Pricing info](https://aws.amazon.com/msk/pricing/)) +* `security_groups` - (Required) A list of the security groups to associate with the elastic network interfaces to control who can communicate with the cluster. +* `az_distribution` - (Optional) The distribution of broker nodes across availability zones ([documentation](https://docs.aws.amazon.com/msk/1.0/apireference/clusters.html#clusters-model-brokerazdistribution)). Currently the only valid value is `DEFAULT`. +* `connectivity_info` - (Optional) Information about the cluster access configuration. See below. For security reasons, you can't turn on public access while creating an MSK cluster. However, you can update an existing cluster to make it publicly accessible. You can also create a new cluster and then update it to make it publicly accessible ([documentation](https://docs.aws.amazon.com/msk/latest/developerguide/public-access.html)). +* `storage_info` - (Optional) A block that contains information about storage volumes attached to MSK broker nodes. See below. + +### broker_node_group_info connectivity_info Argument Reference + +* `public_access` - (Optional) Access control settings for brokers. See below. + +### connectivity_info public_access Argument Reference + +* `type` - (Optional) Public access type. Valida values: `DISABLED`, `SERVICE_PROVIDED_EIPS`. + +### broker_node_group_info storage_info Argument Reference + +* `ebs_storage_info` - (Optional) A block that contains EBS volume information. See below. + +### storage_info ebs_storage_info Argument Reference + +* `provisioned_throughput` - (Optional) A block that contains EBS volume provisioned throughput information. To provision storage throughput, you must choose broker type kafka.m5.4xlarge or larger. See below. +* `volume_size` - (Optional) The size in GiB of the EBS volume for the data drive on each broker node. Minimum value of `1` and maximum value of `16384`. + +### ebs_storage_info provisioned_throughput Argument Reference + +* `enabled` - (Optional) Controls whether provisioned throughput is enabled or not. Default value: `false`. +* `volume_throughput` - (Optional) Throughput value of the EBS volumes for the data drive on each kafka broker node in MiB per second. The minimum value is `250`. The maximum value varies between broker type. You can refer to the valid values for the maximum volume throughput at the following [documentation on throughput bottlenecks](https://docs.aws.amazon.com/msk/latest/developerguide/msk-provision-throughput.html#throughput-bottlenecks) + +### client_authentication Argument Reference + +* `sasl` - (Optional) Configuration block for specifying SASL client authentication. See below. +* `tls` - (Optional) Configuration block for specifying TLS client authentication. See below. +* `unauthenticated` - (Optional) Enables unauthenticated access. + +#### client_authentication sasl Argument Reference + +* `iam` - (Optional) Enables IAM client authentication. Defaults to `false`. +* `scram` - (Optional) Enables SCRAM client authentication via AWS Secrets Manager. Defaults to `false`. + +#### client_authentication tls Argument Reference + +* `certificate_authority_arns` - (Optional) List of ACM Certificate Authority Amazon Resource Names (ARNs). + +### configuration_info Argument Reference + +* `arn` - (Required) Amazon Resource Name (ARN) of the MSK Configuration to use in the cluster. +* `revision` - (Required) Revision of the MSK Configuration to use in the cluster. + +### encryption_info Argument Reference + +* `encryption_in_transit` - (Optional) Configuration block to specify encryption in transit. See below. +* `encryption_at_rest_kms_key_arn` - (Optional) You may specify a KMS key short ID or ARN (it will always output an ARN) to use for encrypting your data at rest. If no key is specified, an AWS managed KMS ('aws/msk' managed service) key will be used for encrypting the data at rest. + +#### encryption_info encryption_in_transit Argument Reference + +* `client_broker` - (Optional) Encryption setting for data in transit between clients and brokers. Valid values: `TLS`, `TLS_PLAINTEXT`, and `PLAINTEXT`. Default value is `TLS`. +* `in_cluster` - (Optional) Whether data communication among broker nodes is encrypted. Default value: `true`. + +#### open_monitoring Argument Reference + +* `prometheus` - (Required) Configuration block for Prometheus settings for open monitoring. See below. + +#### open_monitoring prometheus Argument Reference + +* `jmx_exporter` - (Optional) Configuration block for JMX Exporter. See below. +* `node_exporter` - (Optional) Configuration block for Node Exporter. See below. + +#### open_monitoring prometheus jmx_exporter Argument Reference + +* `enabled_in_broker` - (Required) Indicates whether you want to enable or disable the JMX Exporter. + +#### open_monitoring prometheus node_exporter Argument Reference + +* `enabled_in_broker` - (Required) Indicates whether you want to enable or disable the Node Exporter. + +#### logging_info Argument Reference + +* `broker_logs` - (Required) Configuration block for Broker Logs settings for logging info. See below. + +#### logging_info broker_logs cloudwatch_logs Argument Reference + +* `enabled` - (Optional) Indicates whether you want to enable or disable streaming broker logs to Cloudwatch Logs. +* `log_group` - (Optional) Name of the Cloudwatch Log Group to deliver logs to. + +#### logging_info broker_logs firehose Argument Reference + +* `enabled` - (Optional) Indicates whether you want to enable or disable streaming broker logs to Kinesis Data Firehose. +* `delivery_stream` - (Optional) Name of the Kinesis Data Firehose delivery stream to deliver logs to. + +#### logging_info broker_logs s3 Argument Reference + +* `enabled` - (Optional) Indicates whether you want to enable or disable streaming broker logs to S3. +* `bucket` - (Optional) Name of the S3 bucket to deliver logs to. +* `prefix` - (Optional) Prefix to append to the folder name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the MSK cluster. +* `bootstrap_brokers` - Comma separated list of one or more hostname:port pairs of kafka brokers suitable to bootstrap connectivity to the kafka cluster. Contains a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `PLAINTEXT` or `TLS_PLAINTEXT`. The resource sorts values alphabetically. AWS may not always return all endpoints so this value is not guaranteed to be stable across applies. +* `bootstrap_brokers_public_sasl_iam` - One or more DNS names (or IP addresses) and SASL IAM port pairs. For example, `b-1-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9198,b-2-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9198,b-3-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9198`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS` and `client_authentication.0.sasl.0.iam` is set to `true` and `broker_node_group_info.0.connectivity_info.0.public_access.0.type` is set to `SERVICE_PROVIDED_EIPS` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrap_brokers_public_sasl_scram` - One or more DNS names (or IP addresses) and SASL SCRAM port pairs. For example, `b-1-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9196,b-2-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9196,b-3-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9196`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS` and `client_authentication.0.sasl.0.scram` is set to `true` and `broker_node_group_info.0.connectivity_info.0.public_access.0.type` is set to `SERVICE_PROVIDED_EIPS` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrap_brokers_public_tls` - One or more DNS names (or IP addresses) and TLS port pairs. For example, `b-1-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9194,b-2-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9194,b-3-public.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9194`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS` and `broker_node_group_info.0.connectivity_info.0.public_access.0.type` is set to `SERVICE_PROVIDED_EIPS` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrap_brokers_sasl_iam` - One or more DNS names (or IP addresses) and SASL IAM port pairs. For example, `b-1.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9098,b-2.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9098,b-3.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9098`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS` and `client_authentication.0.sasl.0.iam` is set to `true`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrap_brokers_sasl_scram` - One or more DNS names (or IP addresses) and SASL SCRAM port pairs. For example, `b-1.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9096,b-2.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9096,b-3.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9096`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS` and `client_authentication.0.sasl.0.scram` is set to `true`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrap_brokers_tls` - One or more DNS names (or IP addresses) and TLS port pairs. For example, `b-1.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9094,b-2.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9094,b-3.exampleClusterName.abcde.c2.kafka.us-east-1.amazonaws.com:9094`. This attribute will have a value if `encryption_info.0.encryption_in_transit.0.client_broker` is set to `TLS_PLAINTEXT` or `TLS`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `current_version` - Current version of the MSK Cluster used for updates, e.g., `K13V1IB3VIYZZH` +* `encryption_info.0.encryption_at_rest_kms_key_arn` - The ARN of the KMS key used for encryption at rest of the broker data volumes. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `storage_mode` - Controls storage mode for supported storage tiers. Valid values are: `LOCAL` or `TIERED`. +* `zookeeper_connect_string` - A comma separated list of one or more hostname:port pairs to use to connect to the Apache Zookeeper cluster. The returned values are sorted alphabetically. The AWS API may not return all endpoints, so this value is not guaranteed to be stable across applies. +* `zookeeper_connect_string_tls` - A comma separated list of one or more hostname:port pairs to use to connect to the Apache Zookeeper cluster via TLS. The returned values are sorted alphabetically. The AWS API may not return all endpoints, so this value is not guaranteed to be stable across applies. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `120m`) +* `update` - (Default `120m`) +Note that the `update` timeout is used separately for `storage_info`, `instance_type`, `number_of_broker_nodes`, `configuration_info`, `kafka_version` and monitoring and logging update timeouts. +* `delete` - (Default `120m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK clusters using the cluster `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MSK clusters using the cluster `arn`. For example: + +```console +% terraform import aws_msk_cluster.example arn:aws:kafka:us-west-2:123456789012:cluster/example/279c0212-d057-4dba-9aa9-1c4e5a25bfc7-3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/msk_configuration.html.markdown b/website/docs/cdktf/python/r/msk_configuration.html.markdown new file mode 100644 index 00000000000..9800450f979 --- /dev/null +++ b/website/docs/cdktf/python/r/msk_configuration.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_configuration" +description: |- + Terraform resource for managing an Amazon Managed Streaming for Kafka configuration +--- + + + +# Resource: aws_msk_configuration + +Manages an Amazon Managed Streaming for Kafka configuration. More information can be found on the [MSK Developer Guide](https://docs.aws.amazon.com/msk/latest/developerguide/msk-configuration.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.msk_configuration import MskConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MskConfiguration(self, "example", + kafka_versions=["2.1.0"], + name="example", + server_properties="auto.create.topics.enable = true\ndelete.topic.enable = true\n\n" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `server_properties` - (Required) Contents of the server.properties file. Supported properties are documented in the [MSK Developer Guide](https://docs.aws.amazon.com/msk/latest/developerguide/msk-configuration-properties.html). +* `kafka_versions` - (Required) List of Apache Kafka versions which can use this configuration. +* `name` - (Required) Name of the configuration. +* `description` - (Optional) Description of the configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the configuration. +* `latest_revision` - Latest revision of the configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK configurations using the configuration ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MSK configurations using the configuration ARN. For example: + +```console +% terraform import aws_msk_configuration.example arn:aws:kafka:us-west-2:123456789012:configuration/example/279c0212-d057-4dba-9aa9-1c4e5a25bfc7-3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/msk_scram_secret_association.html.markdown b/website/docs/cdktf/python/r/msk_scram_secret_association.html.markdown new file mode 100644 index 00000000000..d17457ed79e --- /dev/null +++ b/website/docs/cdktf/python/r/msk_scram_secret_association.html.markdown @@ -0,0 +1,141 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_scram_secret_association" +description: |- + Associates SCRAM secrets with a Managed Streaming for Kafka (MSK) cluster. +--- + + + +# Resource: aws_msk_scram_secret_association + +Associates SCRAM secrets stored in the Secrets Manager service with a Managed Streaming for Kafka (MSK) cluster. + +-> **Note:** The following assumes the MSK cluster has SASL/SCRAM authentication enabled. See below for example usage or refer to the [Username/Password Authentication](https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html) section of the MSK Developer Guide for more details. + +To set up username and password authentication for a cluster, create an [`aws_secretsmanager_secret` resource](/docs/providers/aws/r/secretsmanager_secret.html) and associate +a username and password with the secret with an [`aws_secretsmanager_secret_version` resource](/docs/providers/aws/r/secretsmanager_secret_version.html). When creating a secret for the cluster, +the `name` must have the prefix `AmazonMSK_` and you must either use an existing custom AWS KMS key or create a new +custom AWS KMS key for your secret with the [`aws_kms_key` resource](/docs/providers/aws/r/kms_key.html). It is important to note that a policy is required for the `aws_secretsmanager_secret` +resource in order for Kafka to be able to read it. This policy is attached automatically when the `aws_msk_scram_secret_association` is used, +however, this policy will not be in terraform and as such, will present a diff on plan/apply. For that reason, you must use the [`aws_secretsmanager_secret_policy` +resource](/docs/providers/aws/r/secretsmanager_secret_policy.html) as shown below in order to ensure that the state is in a clean state after the creation of secret and the association to the cluster. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.kms_key import KmsKey +from imports.aws.msk_cluster import MskCluster +from imports.aws.msk_scram_secret_association import MskScramSecretAssociation +from imports.aws.secretsmanager_secret import SecretsmanagerSecret +from imports.aws.secretsmanager_secret_policy import SecretsmanagerSecretPolicy +from imports.aws.secretsmanager_secret_version import SecretsmanagerSecretVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, brokerNodeGroupInfo, kafkaVersion, numberOfBrokerNodes): + super().__init__(scope, name) + example = KmsKey(self, "example", + description="Example Key for MSK Cluster Scram Secret Association" + ) + aws_msk_cluster_example = MskCluster(self, "example_1", + client_authentication=MskClusterClientAuthentication( + sasl=MskClusterClientAuthenticationSasl( + scram=True + ) + ), + cluster_name="example", + broker_node_group_info=broker_node_group_info, + kafka_version=kafka_version, + number_of_broker_nodes=number_of_broker_nodes + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_msk_cluster_example.override_logical_id("example") + aws_secretsmanager_secret_example = SecretsmanagerSecret(self, "example_2", + kms_key_id=example.key_id, + name="AmazonMSK_example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_secretsmanager_secret_example.override_logical_id("example") + aws_secretsmanager_secret_version_example = + SecretsmanagerSecretVersion(self, "example_3", + secret_id=Token.as_string(aws_secretsmanager_secret_example.id), + secret_string=Token.as_string( + Fn.jsonencode({ + "password": "pass", + "username": "user" + })) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_secretsmanager_secret_version_example.override_logical_id("example") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_4", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["secretsmanager:getSecretValue"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["kafka.amazonaws.com"], + type="Service" + ) + ], + resources=[Token.as_string(aws_secretsmanager_secret_example.arn)], + sid="AWSKafkaResourcePolicy" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_msk_scram_secret_association_example = MskScramSecretAssociation(self, "example_5", + cluster_arn=Token.as_string(aws_msk_cluster_example.arn), + depends_on=[aws_secretsmanager_secret_version_example], + secret_arn_list=[Token.as_string(aws_secretsmanager_secret_example.arn)] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_msk_scram_secret_association_example.override_logical_id("example") + aws_secretsmanager_secret_policy_example = SecretsmanagerSecretPolicy(self, "example_6", + policy=Token.as_string(data_aws_iam_policy_document_example.json), + secret_arn=Token.as_string(aws_secretsmanager_secret_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_secretsmanager_secret_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cluster_arn` - (Required, Forces new resource) Amazon Resource Name (ARN) of the MSK cluster. +* `secret_arn_list` - (Required) List of AWS Secrets Manager secret ARNs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the MSK cluster. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK SCRAM Secret Associations using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MSK SCRAM Secret Associations using the `id`. For example: + +```console +% terraform import aws_msk_scram_secret_association.example arn:aws:kafka:us-west-2:123456789012:cluster/example/279c0212-d057-4dba-9aa9-1c4e5a25bfc7-3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/msk_serverless_cluster.html.markdown b/website/docs/cdktf/python/r/msk_serverless_cluster.html.markdown new file mode 100644 index 00000000000..89c1ee5ea02 --- /dev/null +++ b/website/docs/cdktf/python/r/msk_serverless_cluster.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_serverless_cluster" +description: |- + Terraform resource for managing an Amazon MSK Serverless cluster. +--- + + + +# Resource: aws_msk_serverless_cluster + +Manages an Amazon MSK Serverless cluster. + +-> **Note:** To manage a _provisioned_ Amazon MSK cluster, use the [`aws_msk_cluster`](/docs/providers/aws/r/msk_cluster.html) resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.msk_serverless_cluster import MskServerlessCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MskServerlessCluster(self, "example", + client_authentication=MskServerlessClusterClientAuthentication( + sasl=MskServerlessClusterClientAuthenticationSasl( + iam=MskServerlessClusterClientAuthenticationSaslIam( + enabled=True + ) + ) + ), + cluster_name="Example", + vpc_config=[MskServerlessClusterVpcConfig( + security_group_ids=[Token.as_string(aws_security_group_example.id)], + subnet_ids=Token.as_list( + property_access(aws_subnet_example, ["*", "id"])) + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `client_authentication` - (Required) Specifies client authentication information for the serverless cluster. See below. +* `cluster_name` - (Required) The name of the serverless cluster. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_config` - (Required) VPC configuration information. See below. + +### client_authentication Argument Reference + +* `sasl` - (Required) Details for client authentication using SASL. See below. + +### sasl Argument Reference + +* `iam` - (Required) Details for client authentication using IAM. See below. + +### iam Argument Reference + +* `enabled` - (Required) Whether SASL/IAM authentication is enabled or not. + +### vpc_config Argument Reference + +* `security_group_ids` - (Optional) Specifies up to five security groups that control inbound and outbound traffic for the serverless cluster. +* `subnet_ids` - (Required) A list of subnets in at least two different Availability Zones that host your client applications. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the serverless cluster. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `120m`) +* `delete` - (Default `120m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK serverless clusters using the cluster `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MSK serverless clusters using the cluster `arn`. For example: + +```console +% terraform import aws_msk_serverless_cluster.example arn:aws:kafka:us-west-2:123456789012:cluster/example/279c0212-d057-4dba-9aa9-1c4e5a25bfc7-3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/mskconnect_connector.html.markdown b/website/docs/cdktf/python/r/mskconnect_connector.html.markdown new file mode 100644 index 00000000000..0017207cde8 --- /dev/null +++ b/website/docs/cdktf/python/r/mskconnect_connector.html.markdown @@ -0,0 +1,217 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_connector" +description: |- + Provides an Amazon MSK Connect Connector resource. +--- + + + +# Resource: aws_mskconnect_connector + +Provides an Amazon MSK Connect Connector resource. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.mskconnect_connector import MskconnectConnector +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MskconnectConnector(self, "example", + capacity=MskconnectConnectorCapacity( + autoscaling=MskconnectConnectorCapacityAutoscaling( + max_worker_count=2, + mcu_count=1, + min_worker_count=1, + scale_in_policy=MskconnectConnectorCapacityAutoscalingScaleInPolicy( + cpu_utilization_percentage=20 + ), + scale_out_policy=MskconnectConnectorCapacityAutoscalingScaleOutPolicy( + cpu_utilization_percentage=80 + ) + ) + ), + connector_configuration={ + "connector.class": "com.github.jcustenborder.kafka.connect.simulator.SimulatorSinkConnector", + "tasks.max": "1", + "topics": "example" + }, + kafka_cluster=MskconnectConnectorKafkaCluster( + apache_kafka_cluster=MskconnectConnectorKafkaClusterApacheKafkaCluster( + bootstrap_servers=Token.as_string(aws_msk_cluster_example.bootstrap_brokers_tls), + vpc=MskconnectConnectorKafkaClusterApacheKafkaClusterVpc( + security_groups=[Token.as_string(aws_security_group_example.id)], + subnets=[example1.id, example2.id, example3.id] + ) + ) + ), + kafka_cluster_client_authentication=MskconnectConnectorKafkaClusterClientAuthentication( + authentication_type="NONE" + ), + kafka_cluster_encryption_in_transit=MskconnectConnectorKafkaClusterEncryptionInTransit( + encryption_type="TLS" + ), + kafkaconnect_version="2.7.1", + name="example", + plugin=[MskconnectConnectorPlugin( + custom_plugin=MskconnectConnectorPluginCustomPlugin( + arn=Token.as_string(aws_mskconnect_custom_plugin_example.arn), + revision=Token.as_number(aws_mskconnect_custom_plugin_example.latest_revision) + ) + ) + ], + service_execution_role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `capacity` - (Required) Information about the capacity allocated to the connector. See below. +* `connector_configuration` - (Required) A map of keys to values that represent the configuration for the connector. +* `description` - (Optional) A summary description of the connector. +* `kafka_cluster` - (Required) Specifies which Apache Kafka cluster to connect to. See below. +* `kafka_cluster_client_authentication` - (Required) Details of the client authentication used by the Apache Kafka cluster. See below. +* `kafka_cluster_encryption_in_transit` - (Required) Details of encryption in transit to the Apache Kafka cluster. See below. +* `kafkaconnect_version` - (Required) The version of Kafka Connect. It has to be compatible with both the Apache Kafka cluster's version and the plugins. +* `log_delivery` - (Optional) Details about log delivery. See below. +* `name` - (Required) The name of the connector. +* `plugin` - (Required) Specifies which plugins to use for the connector. See below. +* `service_execution_role_arn` - (Required) The Amazon Resource Name (ARN) of the IAM role used by the connector to access the Amazon Web Services resources that it needs. The types of resources depends on the logic of the connector. For example, a connector that has Amazon S3 as a destination must have permissions that allow it to write to the S3 destination bucket. +* `worker_configuration` - (Optional) Specifies which worker configuration to use with the connector. See below. + +### capacity Configuration Block + +* `autoscaling` - (Optional) Information about the auto scaling parameters for the connector. See below. +* `provisioned_capacity` - (Optional) Details about a fixed capacity allocated to a connector. See below. + +### autoscaling Configuration Block + +* `max_worker_count` - (Required) The maximum number of workers allocated to the connector. +* `mcu_count` - (Optional) The number of microcontroller units (MCUs) allocated to each connector worker. Valid values: `1`, `2`, `4`, `8`. The default value is `1`. +* `min_worker_count` - (Required) The minimum number of workers allocated to the connector. +* `scale_in_policy` - (Optional) The scale-in policy for the connector. See below. +* `scale_out_policy` - (Optional) The scale-out policy for the connector. See below. + +### scale_in_policy Configuration Block + +* `cpu_utilization_percentage` - (Required) Specifies the CPU utilization percentage threshold at which you want connector scale in to be triggered. + +### scale_out_policy Configuration Block + +* `cpu_utilization_percentage` - (Required) The CPU utilization percentage threshold at which you want connector scale out to be triggered. + +### provisioned_capacity Configuration Block + +* `mcu_count` - (Optional) The number of microcontroller units (MCUs) allocated to each connector worker. Valid values: `1`, `2`, `4`, `8`. The default value is `1`. +* `worker_count` - (Required) The number of workers that are allocated to the connector. + +### kafka_cluster Configuration Block + +* `apache_kafka_cluster` - (Required) The Apache Kafka cluster to which the connector is connected. + +### apache_kafka_cluster Configuration Block + +* `bootstrap_servers` - (Required) The bootstrap servers of the cluster. +* `vpc` - (Required) Details of an Amazon VPC which has network connectivity to the Apache Kafka cluster. + +### vpc Configuration Block + +* `security_groups` - (Required) The security groups for the connector. +* `subnets` - (Required) The subnets for the connector. + +### kafka_cluster_client_authentication Configuration Block + +* `authentication_type` - (Optional) The type of client authentication used to connect to the Apache Kafka cluster. Valid values: `IAM`, `NONE`. A value of `NONE` means that no client authentication is used. The default value is `NONE`. + +### kafka_cluster_encryption_in_transit Configuration Block + +* `encryption_type` - (Optional) The type of encryption in transit to the Apache Kafka cluster. Valid values: `PLAINTEXT`, `TLS`. The default values is `PLAINTEXT`. + +### log_delivery Configuration Block + +* `worker_log_delivery` - (Required) The workers can send worker logs to different destination types. This configuration specifies the details of these destinations. See below. + +### worker_log_delivery Configuration Block + +* `cloudwatch_logs` - (Optional) Details about delivering logs to Amazon CloudWatch Logs. See below. +* `firehose` - (Optional) Details about delivering logs to Amazon Kinesis Data Firehose. See below. +* `s3` - (Optional) Details about delivering logs to Amazon S3. See below. + +### cloudwatch_logs Configuration Block + +* `enabled` - (Optional) Whether log delivery to Amazon CloudWatch Logs is enabled. +* `log_group` - (Required) The name of the CloudWatch log group that is the destination for log delivery. + +### firehose Configuration Block + +* `delivery_stream` - (Optional) The name of the Kinesis Data Firehose delivery stream that is the destination for log delivery. +* `enabled` - (Required) Specifies whether connector logs get delivered to Amazon Kinesis Data Firehose. + +### s3 Configuration Block + +* `bucket` - (Optional) The name of the S3 bucket that is the destination for log delivery. +* `enabled` - (Required) Specifies whether connector logs get sent to the specified Amazon S3 destination. +* `prefix` - (Optional) The S3 prefix that is the destination for log delivery. + +### plugin Configuration Block + +* `custom_plugin` - (Required) Details about a custom plugin. See below. + +### custom_plugin Configuration Block + +* `arn` - (Required) The Amazon Resource Name (ARN) of the custom plugin. +* `revision` - (Required) The revision of the custom plugin. + +### worker_configuration Configuration Block + +* `arn` - (Required) The Amazon Resource Name (ARN) of the worker configuration. +* `revision` - (Required) The revision of the worker configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the connector. +* `version` - The current version of the connector. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `20m`) +* `update` - (Default `20m`) +* `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK Connect Connector using the connector's `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MSK Connect Connector using the connector's `arn`. For example: + +```console +% terraform import aws_mskconnect_connector.example 'arn:aws:kafkaconnect:eu-central-1:123456789012:connector/example/264edee4-17a3-412e-bd76-6681cfc93805-3' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/mskconnect_custom_plugin.html.markdown b/website/docs/cdktf/python/r/mskconnect_custom_plugin.html.markdown new file mode 100644 index 00000000000..a79a4c33376 --- /dev/null +++ b/website/docs/cdktf/python/r/mskconnect_custom_plugin.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_custom_plugin" +description: |- + Provides an Amazon MSK Connect custom plugin resource. +--- + + + +# Resource: aws_mskconnect_custom_plugin + +Provides an Amazon MSK Connect Custom Plugin Resource. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.mskconnect_custom_plugin import MskconnectCustomPlugin +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_s3_object_example = S3Object(self, "example_1", + bucket=example.id, + key="debezium.zip", + source="debezium.zip" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_object_example.override_logical_id("example") + aws_mskconnect_custom_plugin_example = MskconnectCustomPlugin(self, "example_2", + content_type="ZIP", + location=MskconnectCustomPluginLocation( + s3=MskconnectCustomPluginLocationS3( + bucket_arn=example.arn, + file_key=Token.as_string(aws_s3_object_example.key) + ) + ), + name="debezium-example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_mskconnect_custom_plugin_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the custom plugin.. +* `content_type` - (Required) The type of the plugin file. Allowed values are `ZIP` and `JAR`. +* `location` - (Required) Information about the location of a custom plugin. See below. + +The following arguments are optional: + +* `description` - (Optional) A summary description of the custom plugin. + +### location Argument Reference + +* `s3` - (Required) Information of the plugin file stored in Amazon S3. See below. + +#### location s3 Argument Reference + +* `bucket_arn` - (Required) The Amazon Resource Name (ARN) of an S3 bucket. +* `file_key` - (Required) The file key for an object in an S3 bucket. +* `object_version` - (Optional) The version of an object in an S3 bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - the Amazon Resource Name (ARN) of the custom plugin. +* `latest_revision` - an ID of the latest successfully created revision of the custom plugin. +* `state` - the state of the custom plugin. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) +* `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK Connect Custom Plugin using the plugin's `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MSK Connect Custom Plugin using the plugin's `arn`. For example: + +```console +% terraform import aws_mskconnect_custom_plugin.example 'arn:aws:kafkaconnect:eu-central-1:123456789012:custom-plugin/debezium-example/abcdefgh-1234-5678-9abc-defghijklmno-4' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/mskconnect_worker_configuration.html.markdown b/website/docs/cdktf/python/r/mskconnect_worker_configuration.html.markdown new file mode 100644 index 00000000000..68e50244417 --- /dev/null +++ b/website/docs/cdktf/python/r/mskconnect_worker_configuration.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_worker_configuration" +description: |- + Provides an Amazon MSK Connect worker configuration resource. +--- + + + +# Resource: aws_mskconnect_worker_configuration + +Provides an Amazon MSK Connect Worker Configuration Resource. + +## Example Usage + +### Basic configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.mskconnect_worker_configuration import MskconnectWorkerConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MskconnectWorkerConfiguration(self, "example", + name="example", + properties_file_content="key.converter=org.apache.kafka.connect.storage.StringConverter\nvalue.converter=org.apache.kafka.connect.storage.StringConverter\n\n" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the worker configuration. +* `properties_file_content` - (Required) Contents of connect-distributed.properties file. The value can be either base64 encoded or in raw format. + +The following arguments are optional: + +* `description` - (Optional) A summary description of the worker configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - the Amazon Resource Name (ARN) of the worker configuration. +* `latest_revision` - an ID of the latest successfully created revision of the worker configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK Connect Worker Configuration using the plugin's `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MSK Connect Worker Configuration using the plugin's `arn`. For example: + +```console +% terraform import aws_mskconnect_worker_configuration.example 'arn:aws:kafkaconnect:eu-central-1:123456789012:worker-configuration/example/8848493b-7fcc-478c-a646-4a52634e3378-4' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/mwaa_environment.html.markdown b/website/docs/cdktf/python/r/mwaa_environment.html.markdown new file mode 100644 index 00000000000..3a968a59ac4 --- /dev/null +++ b/website/docs/cdktf/python/r/mwaa_environment.html.markdown @@ -0,0 +1,245 @@ +--- +subcategory: "MWAA (Managed Workflows for Apache Airflow)" +layout: "aws" +page_title: "AWS: aws_mwaa_environment" +description: |- + Creates a MWAA Environment +--- + + + +# Resource: aws_mwaa_environment + +Creates a MWAA Environment resource. + +## Example Usage + +A MWAA Environment requires an IAM role (`aws_iam_role`), two subnets in the private zone (`aws_subnet`) and a versioned S3 bucket (`aws_s3_bucket`). + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.mwaa_environment import MwaaEnvironment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MwaaEnvironment(self, "example", + dag_s3_path="dags/", + execution_role_arn=Token.as_string(aws_iam_role_example.arn), + name="example", + network_configuration=MwaaEnvironmentNetworkConfiguration( + security_group_ids=[Token.as_string(aws_security_group_example.id)], + subnet_ids=Token.as_list(property_access(private, ["*", "id"])) + ), + source_bucket_arn=Token.as_string(aws_s3_bucket_example.arn) + ) +``` + +### Example with Airflow configuration options + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.mwaa_environment import MwaaEnvironment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MwaaEnvironment(self, "example", + airflow_configuration_options={ + "core.default_task_retries": Token.as_string(16), + "core.parallelism": Token.as_string(1) + }, + dag_s3_path="dags/", + execution_role_arn=Token.as_string(aws_iam_role_example.arn), + name="example", + network_configuration=MwaaEnvironmentNetworkConfiguration( + security_group_ids=[Token.as_string(aws_security_group_example.id)], + subnet_ids=Token.as_list(property_access(private, ["*", "id"])) + ), + source_bucket_arn=Token.as_string(aws_s3_bucket_example.arn) + ) +``` + +### Example with logging configurations + +Note that Airflow task logs are enabled by default with the `INFO` log level. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.mwaa_environment import MwaaEnvironment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MwaaEnvironment(self, "example", + dag_s3_path="dags/", + execution_role_arn=Token.as_string(aws_iam_role_example.arn), + logging_configuration=MwaaEnvironmentLoggingConfiguration( + dag_processing_logs=MwaaEnvironmentLoggingConfigurationDagProcessingLogs( + enabled=True, + log_level="DEBUG" + ), + scheduler_logs=MwaaEnvironmentLoggingConfigurationSchedulerLogs( + enabled=True, + log_level="INFO" + ), + task_logs=MwaaEnvironmentLoggingConfigurationTaskLogs( + enabled=True, + log_level="WARNING" + ), + webserver_logs=MwaaEnvironmentLoggingConfigurationWebserverLogs( + enabled=True, + log_level="ERROR" + ), + worker_logs=MwaaEnvironmentLoggingConfigurationWorkerLogs( + enabled=True, + log_level="CRITICAL" + ) + ), + name="example", + network_configuration=MwaaEnvironmentNetworkConfiguration( + security_group_ids=[Token.as_string(aws_security_group_example.id)], + subnet_ids=Token.as_list(property_access(private, ["*", "id"])) + ), + source_bucket_arn=Token.as_string(aws_s3_bucket_example.arn) + ) +``` + +### Example with tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.mwaa_environment import MwaaEnvironment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + MwaaEnvironment(self, "example", + dag_s3_path="dags/", + execution_role_arn=Token.as_string(aws_iam_role_example.arn), + name="example", + network_configuration=MwaaEnvironmentNetworkConfiguration( + security_group_ids=[Token.as_string(aws_security_group_example.id)], + subnet_ids=Token.as_list(property_access(private, ["*", "id"])) + ), + source_bucket_arn=Token.as_string(aws_s3_bucket_example.arn), + tags={ + "Environment": "production", + "Name": "example" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `airflow_configuration_options` - (Optional) The `airflow_configuration_options` parameter specifies airflow override options. Check the [Official documentation](https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-env-variables.html#configuring-env-variables-reference) for all possible configuration options. +* `airflow_version` - (Optional) Airflow version of your environment, will be set by default to the latest version that MWAA supports. +* `dag_s3_path` - (Required) The relative path to the DAG folder on your Amazon S3 storage bucket. For example, dags. For more information, see [Importing DAGs on Amazon MWAA](https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-import.html). +* `environment_class` - (Optional) Environment class for the cluster. Possible options are `mw1.small`, `mw1.medium`, `mw1.large`. Will be set by default to `mw1.small`. Please check the [AWS Pricing](https://aws.amazon.com/de/managed-workflows-for-apache-airflow/pricing/) for more information about the environment classes. +* `execution_role_arn` - (Required) The Amazon Resource Name (ARN) of the task execution role that the Amazon MWAA and its environment can assume. Check the [official AWS documentation](https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-create-role.html) for the detailed role specification. +* `kms_key` - (Optional) The Amazon Resource Name (ARN) of your KMS key that you want to use for encryption. Will be set to the ARN of the managed KMS key `aws/airflow` by default. Please check the [Official Documentation](https://docs.aws.amazon.com/mwaa/latest/userguide/custom-keys-certs.html) for more information. +* `logging_configuration` - (Optional) The Apache Airflow logs you want to send to Amazon CloudWatch Logs. +* `max_workers` - (Optional) The maximum number of workers that can be automatically scaled up. Value need to be between `1` and `25`. Will be `10` by default. +* `min_workers` - (Optional) The minimum number of workers that you want to run in your environment. Will be `1` by default. +* `name` - (Required) The name of the Apache Airflow Environment +* `network_configuration` - (Required) Specifies the network configuration for your Apache Airflow Environment. This includes two private subnets as well as security groups for the Airflow environment. Each subnet requires internet connection, otherwise the deployment will fail. See [Network configuration](#network-configuration) below for details. +* `plugins_s3_object_version` - (Optional) The plugins.zip file version you want to use. +* `plugins_s3_path` - (Optional) The relative path to the plugins.zip file on your Amazon S3 storage bucket. For example, plugins.zip. If a relative path is provided in the request, then plugins_s3_object_version is required. For more information, see [Importing DAGs on Amazon MWAA](https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-import.html). +* `requirements_s3_object_version` - (Optional) The requirements.txt file version you want to use. +* `requirements_s3_path` - (Optional) The relative path to the requirements.txt file on your Amazon S3 storage bucket. For example, requirements.txt. If a relative path is provided in the request, then requirements_s3_object_version is required. For more information, see [Importing DAGs on Amazon MWAA](https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-import.html). +* `schedulers` - (Optional) The number of schedulers that you want to run in your environment. v2.0.2 and above accepts `2` - `5`, default `2`. v1.10.12 accepts `1`. +* `source_bucket_arn` - (Required) The Amazon Resource Name (ARN) of your Amazon S3 storage bucket. For example, arn:aws:s3:::airflow-mybucketname. +* `startup_script_s3_object_version` - (Optional) The version of the startup shell script you want to use. You must specify the version ID that Amazon S3 assigns to the file every time you update the script. +* `startup_script_s3_path` - (Optional) The relative path to the script hosted in your bucket. The script runs as your environment starts before starting the Apache Airflow process. Use this script to install dependencies, modify configuration options, and set environment variables. See [Using a startup script](https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html). Supported for environment versions 2.x and later. +* `webserver_access_mode` - (Optional) Specifies whether the webserver should be accessible over the internet or via your specified VPC. Possible options: `PRIVATE_ONLY` (default) and `PUBLIC_ONLY`. +* `weekly_maintenance_window_start` - (Optional) Specifies the start date for the weekly maintenance window. +* `tags` - (Optional) A map of resource tags to associate with the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Logging configurations + +The `logging_configuration` block supports the following arguments. + +* `dag_processing_logs` - (Optional) (Optional) Log configuration options for processing DAGs. See [Module logging configuration](#module-logging-configuration) for more information. Disabled by default. +* `scheduler_logs` - (Optional) Log configuration options for the schedulers. See [Module logging configuration](#module-logging-configuration) for more information. Disabled by default. +* `task_logs` - (Optional) Log configuration options for DAG tasks. See [Module logging configuration](#module-logging-configuration) for more information. Enabled by default with `INFO` log level. +* `webserver_logs` - (Optional) Log configuration options for the webservers. See [Module logging configuration](#module-logging-configuration) for more information. Disabled by default. +* `worker_logs` - (Optional) Log configuration options for the workers. See [Module logging configuration](#module-logging-configuration) for more information. Disabled by default. + +### Module logging configuration + +A configuration block to use for logging with respect to the various Apache Airflow services: DagProcessingLogs, SchedulerLogs, TaskLogs, WebserverLogs, and WorkerLogs. It supports the following arguments. + +* `enabled` - (Required) Enabling or disabling the collection of logs +* `log_level` - (Optional) Logging level. Valid values: `CRITICAL`, `ERROR`, `WARNING`, `INFO`, `DEBUG`. Will be `INFO` by default. + +### Network configuration + +The `network_configuration` block supports the following arguments. More information about the required subnet and security group settings can be found in the [official AWS documentation](https://docs.aws.amazon.com/mwaa/latest/userguide/vpc-create.html). + +* `security_group_ids` - (Required) Security groups IDs for the environment. At least one of the security group needs to allow MWAA resources to talk to each other, otherwise MWAA cannot be provisioned. +* `subnet_ids` - (Required) The private subnet IDs in which the environment should be created. MWAA requires two subnets. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the MWAA Environment +* `created_at` - The Created At date of the MWAA Environment +* `logging_configuration[0].[0].cloud_watch_log_group_arn` - Provides the ARN for the CloudWatch group where the logs will be published +* `service_role_arn` - The Service Role ARN of the Amazon MWAA Environment +* `status` - The status of the Amazon MWAA Environment +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `webserver_url` - The webserver URL of the MWAA Environment + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120m`) +- `update` - (Default `90m`) +- `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MWAA Environment using `Name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MWAA Environment using `Name`. For example: + +```console +% terraform import aws_mwaa_environment.example MyAirflowEnvironment +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/nat_gateway.html.markdown b/website/docs/cdktf/python/r/nat_gateway.html.markdown new file mode 100644 index 00000000000..279da7b6b2e --- /dev/null +++ b/website/docs/cdktf/python/r/nat_gateway.html.markdown @@ -0,0 +1,154 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_nat_gateway" +description: |- + Provides a resource to create a VPC NAT Gateway. +--- + + + +# Resource: aws_nat_gateway + +Provides a resource to create a VPC NAT Gateway. + +## Example Usage + +### Public NAT + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.nat_gateway import NatGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NatGateway(self, "example", + allocation_id=Token.as_string(aws_eip_example.id), + depends_on=[aws_internet_gateway_example], + subnet_id=Token.as_string(aws_subnet_example.id), + tags={ + "Name": "gw NAT" + } + ) +``` + +### Public NAT with Secondary Private IP Addresses + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.nat_gateway import NatGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NatGateway(self, "example", + allocation_id=Token.as_string(aws_eip_example.id), + secondary_allocation_ids=[secondary.id], + secondary_private_ip_addresses=["10.0.1.5"], + subnet_id=Token.as_string(aws_subnet_example.id) + ) +``` + +### Private NAT + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.nat_gateway import NatGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NatGateway(self, "example", + connectivity_type="private", + subnet_id=Token.as_string(aws_subnet_example.id) + ) +``` + +### Private NAT with Secondary Private IP Addresses + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.nat_gateway import NatGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NatGateway(self, "example", + connectivity_type="private", + secondary_private_ip_address_count=7, + subnet_id=Token.as_string(aws_subnet_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `allocation_id` - (Optional) The Allocation ID of the Elastic IP address for the NAT Gateway. Required for `connectivity_type` of `public`. +* `connectivity_type` - (Optional) Connectivity type for the NAT Gateway. Valid values are `private` and `public`. Defaults to `public`. +* `private_ip` - (Optional) The private IPv4 address to assign to the NAT Gateway. If you don't provide an address, a private IPv4 address will be automatically assigned. +* `subnet_id` - (Required) The Subnet ID of the subnet in which to place the NAT Gateway. +* `secondary_allocation_ids` - (Optional) A list of secondary allocation EIP IDs for this NAT Gateway. +* `secondary_private_ip_address_count` - (Optional) [Private NAT Gateway only] The number of secondary private IPv4 addresses you want to assign to the NAT Gateway. +* `secondary_private_ip_addresses` - (Optional) A list of secondary private IPv4 addresses to assign to the NAT Gateway. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `association_id` - The association ID of the Elastic IP address that's associated with the NAT Gateway. Only available when `connectivity_type` is `public`. +* `id` - The ID of the NAT Gateway. +* `network_interface_id` - The ID of the network interface associated with the NAT Gateway. +* `public_ip` - The Elastic IP address associated with the NAT Gateway. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `10m`) +- `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import NAT Gateways using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import NAT Gateways using the `id`. For example: + +```console +% terraform import aws_nat_gateway.private_gw nat-05dba92075d71c408 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/neptune_cluster.html.markdown b/website/docs/cdktf/python/r/neptune_cluster.html.markdown new file mode 100644 index 00000000000..d0719ffe7f9 --- /dev/null +++ b/website/docs/cdktf/python/r/neptune_cluster.html.markdown @@ -0,0 +1,166 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_cluster" +description: |- + Provides an Neptune Cluster Resource +--- + + + +# Resource: aws_neptune_cluster + +Provides an Neptune Cluster Resource. A Cluster Resource defines attributes that are +applied to the entire cluster of Neptune Cluster Instances. + +Changes to a Neptune Cluster can occur when you manually change a +parameter, such as `backup_retention_period`, and are reflected in the next maintenance +window. Because of this, Terraform may report a difference in its planning +phase because a modification has not yet taken place. You can use the +`apply_immediately` flag to instruct the service to apply the change immediately +(see documentation below). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_cluster import NeptuneCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NeptuneCluster(self, "default", + apply_immediately=True, + backup_retention_period=5, + cluster_identifier="neptune-cluster-demo", + engine="neptune", + iam_database_authentication_enabled=True, + preferred_backup_window="07:00-09:00", + skip_final_snapshot=True + ) +``` + +~> **Note:** AWS Neptune does not support user name/password–based access control. +See the AWS [Docs](https://docs.aws.amazon.com/neptune/latest/userguide/limits.html) for more information. + +## Argument Reference + +This resource supports the following arguments: + +* `allow_major_version_upgrade` - (Optional) Specifies whether upgrades between different major versions are allowed. You must set it to `true` when providing an `engine_version` parameter that uses a different major version than the DB cluster's current version. Default is `false`. +* `apply_immediately` - (Optional) Specifies whether any cluster modifications are applied immediately, or during the next maintenance window. Default is `false`. +* `availability_zones` - (Optional) A list of EC2 Availability Zones that instances in the Neptune cluster can be created in. +* `backup_retention_period` - (Optional) The days to retain backups for. Default `1` +* `cluster_identifier` - (Optional, Forces new resources) The cluster identifier. If omitted, Terraform will assign a random, unique identifier. +* `cluster_identifier_prefix` - (Optional, Forces new resource) Creates a unique cluster identifier beginning with the specified prefix. Conflicts with `cluster_identifier`. +* `copy_tags_to_snapshot` - (Optional) If set to true, tags are copied to any snapshot of the DB cluster that is created. +* `enable_cloudwatch_logs_exports` - (Optional) A list of the log types this DB cluster is configured to export to Cloudwatch Logs. Currently only supports `audit`. +* `engine` - (Optional) The name of the database engine to be used for this Neptune cluster. Defaults to `neptune`. +* `engine_version` - (Optional) The database engine version. +* `final_snapshot_identifier` - (Optional) The name of your final Neptune snapshot when this Neptune cluster is deleted. If omitted, no final snapshot will be made. +* `global_cluster_identifier` - (Optional) The global cluster identifier specified on [`aws_neptune_global_cluster`](/docs/providers/aws/r/neptune_global_cluster.html). +* `iam_roles` - (Optional) A List of ARNs for the IAM roles to associate to the Neptune Cluster. +* `iam_database_authentication_enabled` - (Optional) Specifies whether or not mappings of AWS Identity and Access Management (IAM) accounts to database accounts is enabled. +* `kms_key_arn` - (Optional) The ARN for the KMS encryption key. When specifying `kms_key_arn`, `storage_encrypted` needs to be set to true. +* `neptune_subnet_group_name` - (Optional) A Neptune subnet group to associate with this Neptune instance. +* `neptune_cluster_parameter_group_name` - (Optional) A cluster parameter group to associate with the cluster. +* `neptune_instance_parameter_group_name` - (Optional) The name of the DB parameter group to apply to all instances of the DB cluster. +* `preferred_backup_window` - (Optional) The daily time range during which automated backups are created if automated backups are enabled using the BackupRetentionPeriod parameter. Time in UTC. Default: A 30-minute window selected at random from an 8-hour block of time per regionE.g., 04:00-09:00 +* `preferred_maintenance_window` - (Optional) The weekly time range during which system maintenance can occur, in (UTC) e.g., wed:04:00-wed:04:30 +* `port` - (Optional) The port on which the Neptune accepts connections. Default is `8182`. +* `replication_source_identifier` - (Optional) ARN of a source Neptune cluster or Neptune instance if this Neptune cluster is to be created as a Read Replica. +* `skip_final_snapshot` - (Optional) Determines whether a final Neptune snapshot is created before the Neptune cluster is deleted. If true is specified, no Neptune snapshot is created. If false is specified, a Neptune snapshot is created before the Neptune cluster is deleted, using the value from `final_snapshot_identifier`. Default is `false`. +* `snapshot_identifier` - (Optional) Specifies whether or not to create this cluster from a snapshot. You can use either the name or ARN when specifying a Neptune cluster snapshot, or the ARN when specifying a Neptune snapshot. Automated snapshots **should not** be used for this attribute, unless from a different cluster. Automated snapshots are deleted as part of cluster destruction when the resource is replaced. +* `storage_encrypted` - (Optional) Specifies whether the Neptune cluster is encrypted. The default is `false` if not specified. +* `tags` - (Optional) A map of tags to assign to the Neptune cluster. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_security_group_ids` - (Optional) List of VPC security groups to associate with the Cluster +* `deletion_protection` - (Optional) A value that indicates whether the DB cluster has deletion protection enabled.The database can't be deleted when deletion protection is enabled. By default, deletion protection is disabled. +* `serverless_v2_scaling_configuration` - (Optional) If set, create the Neptune cluster as a serverless one. See [Serverless](#serverless) for example block attributes. + +### Serverless + +**Neptune serverless has some limitations. Please see the [limitations on the AWS documentation](https://docs.aws.amazon.com/neptune/latest/userguide/neptune-serverless.html#neptune-serverless-limitations) before jumping into Neptune Serverless.** + +Neptune serverless requires that the `engine_version` attribute must be `1.2.0.1` or above. Also, you need to provide a cluster parameter group compatible with the family `neptune1.2`. In the example below, the default cluster parameter group is used. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_cluster import NeptuneCluster +from imports.aws.neptune_cluster_instance import NeptuneClusterInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NeptuneCluster(self, "example", + apply_immediately=True, + cluster_identifier="neptune-cluster-development", + engine="neptune", + engine_version="1.2.0.1", + neptune_cluster_parameter_group_name="default.neptune1.2", + serverless_v2_scaling_configuration=NeptuneClusterServerlessV2ScalingConfiguration(), + skip_final_snapshot=True + ) + aws_neptune_cluster_instance_example = NeptuneClusterInstance(self, "example_1", + cluster_identifier=example.cluster_identifier, + instance_class="db.serverless", + neptune_parameter_group_name="default.neptune1.2" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_neptune_cluster_instance_example.override_logical_id("example") +``` + +* `min_capacity`: (default: **2.5**) The minimum Neptune Capacity Units (NCUs) for this cluster. Must be greater or equal than **1**. See [AWS Documentation](https://docs.aws.amazon.com/neptune/latest/userguide/neptune-serverless-capacity-scaling.html) for more details. +* `max_capacity`: (default: **128**) The maximum Neptune Capacity Units (NCUs) for this cluster. Must be lower or equal than **128**. See [AWS Documentation](https://docs.aws.amazon.com/neptune/latest/userguide/neptune-serverless-capacity-scaling.html) for more details. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Neptune Cluster Amazon Resource Name (ARN) +* `cluster_resource_id` - The Neptune Cluster Resource ID +* `cluster_members` – List of Neptune Instances that are a part of this cluster +* `endpoint` - The DNS address of the Neptune instance +* `hosted_zone_id` - The Route53 Hosted Zone ID of the endpoint +* `id` - The Neptune Cluster Identifier +* `reader_endpoint` - A read-only endpoint for the Neptune cluster, automatically load-balanced across replicas +* `status` - The Neptune instance status +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120m`) +- `update` - (Default `120m`) +- `delete` - (Default `120m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_neptune_cluster` using the cluster identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_neptune_cluster` using the cluster identifier. For example: + +```console +% terraform import aws_neptune_cluster.example my-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/neptune_cluster_endpoint.html.markdown b/website/docs/cdktf/python/r/neptune_cluster_endpoint.html.markdown new file mode 100644 index 00000000000..06e579debc3 --- /dev/null +++ b/website/docs/cdktf/python/r/neptune_cluster_endpoint.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_cluster_endpoint" +description: |- + Provides an Neptune Cluster Endpoint Resource +--- + + + +# Resource: aws_neptune_cluster_endpoint + +Provides an Neptune Cluster Endpoint Resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_cluster_endpoint import NeptuneClusterEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NeptuneClusterEndpoint(self, "example", + cluster_endpoint_identifier="example", + cluster_identifier=test.cluster_identifier, + endpoint_type="READER" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cluster_identifier` - (Required, Forces new resources) The DB cluster identifier of the DB cluster associated with the endpoint. +* `cluster_endpoint_identifier` - (Required, Forces new resources) The identifier of the endpoint. +* `endpoint_type` - (Required) The type of the endpoint. One of: `READER`, `WRITER`, `ANY`. +* `excluded_members` - (Optional) List of DB instance identifiers that aren't part of the custom endpoint group. All other eligible instances are reachable through the custom endpoint. Only relevant if the list of static members is empty. +* `static_members` - (Optional) List of DB instance identifiers that are part of the custom endpoint group. +* `tags` - (Optional) A map of tags to assign to the Neptune cluster. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Neptune Cluster Endpoint Amazon Resource Name (ARN). +* `endpoint` - The DNS address of the endpoint. +* `id` - The Neptune Cluster Endpoint Identifier. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_neptune_cluster_endpoint` using the `cluster-identifier:endpoint-identfier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_neptune_cluster_endpoint` using the `cluster-identifier:endpoint-identfier`. For example: + +```console +% terraform import aws_neptune_cluster_endpoint.example my-cluster:my-endpoint +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/neptune_cluster_instance.html.markdown b/website/docs/cdktf/python/r/neptune_cluster_instance.html.markdown new file mode 100644 index 00000000000..714c2880448 --- /dev/null +++ b/website/docs/cdktf/python/r/neptune_cluster_instance.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_cluster_instance" +description: |- + Provides an Neptune Cluster Resource Instance +--- + + + +# Resource: aws_neptune_cluster_instance + +A Cluster Instance Resource defines attributes that are specific to a single instance in a Neptune Cluster. + +You can simply add neptune instances and Neptune manages the replication. You can use the [count][1] +meta-parameter to make multiple instances and join them all to the same Neptune Cluster, or you may specify different Cluster Instance resources with various `instance_class` sizes. + +## Example Usage + +The following example will create a neptune cluster with two neptune instances(one writer and one reader). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformCount, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_cluster import NeptuneCluster +from imports.aws.neptune_cluster_instance import NeptuneClusterInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = NeptuneCluster(self, "default", + apply_immediately=True, + backup_retention_period=5, + cluster_identifier="neptune-cluster-demo", + engine="neptune", + iam_database_authentication_enabled=True, + preferred_backup_window="07:00-09:00", + skip_final_snapshot=True + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_count = TerraformCount.of(Token.as_number("2")) + NeptuneClusterInstance(self, "example", + apply_immediately=True, + cluster_identifier=default_var.id, + engine="neptune", + instance_class="db.r4.large", + count=example_count + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apply_immediately` - (Optional) Specifies whether any instance modifications + are applied immediately, or during the next maintenance window. Default is`false`. +* `auto_minor_version_upgrade` - (Optional) Indicates that minor engine upgrades will be applied automatically to the instance during the maintenance window. Default is `true`. +* `availability_zone` - (Optional) The EC2 Availability Zone that the neptune instance is created in. +* `cluster_identifier` - (Required) The identifier of the [`aws_neptune_cluster`](/docs/providers/aws/r/neptune_cluster.html) in which to launch this instance. +* `engine` - (Optional) The name of the database engine to be used for the neptune instance. Defaults to `neptune`. Valid Values: `neptune`. +* `engine_version` - (Optional) The neptune engine version. +* `identifier` - (Optional, Forces new resource) The identifier for the neptune instance, if omitted, Terraform will assign a random, unique identifier. +* `identifier_prefix` - (Optional, Forces new resource) Creates a unique identifier beginning with the specified prefix. Conflicts with `identifier`. +* `instance_class` - (Required) The instance class to use. +* `neptune_subnet_group_name` - (Required if `publicly_accessible = false`, Optional otherwise) A subnet group to associate with this neptune instance. **NOTE:** This must match the `neptune_subnet_group_name` of the attached [`aws_neptune_cluster`](/docs/providers/aws/r/neptune_cluster.html). +* `neptune_parameter_group_name` - (Optional) The name of the neptune parameter group to associate with this instance. +* `port` - (Optional) The port on which the DB accepts connections. Defaults to `8182`. +* `preferred_backup_window` - (Optional) The daily time range during which automated backups are created if automated backups are enabled. Eg: "04:00-09:00" +* `preferred_maintenance_window` - (Optional) The window to perform maintenance in. + Syntax: "ddd:hh24:mi-ddd:hh24:mi". Eg: "Mon:00:00-Mon:03:00". +* `promotion_tier` - (Optional) Default 0. Failover Priority setting on instance level. The reader who has lower tier has higher priority to get promoter to writer. +* `publicly_accessible` - (Optional) Bool to control if instance is publicly accessible. Default is `false`. +* `tags` - (Optional) A map of tags to assign to the instance. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `address` - The hostname of the instance. See also `endpoint` and `port`. +* `arn` - Amazon Resource Name (ARN) of neptune instance +* `dbi_resource_id` - The region-unique, immutable identifier for the neptune instance. +* `endpoint` - The connection endpoint in `address:port` format. +* `id` - The Instance identifier +* `kms_key_arn` - The ARN for the KMS encryption key if one is set to the neptune cluster. +* `storage_encrypted` - Specifies whether the neptune cluster is encrypted. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `writer` – Boolean indicating if this instance is writable. `False` indicates this instance is a read replica. + +[1]: https://www.terraform.io/docs/configuration/meta-arguments/count.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `90m`) +- `update` - (Default `90m`) +- `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_neptune_cluster_instance` using the instance identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_neptune_cluster_instance` using the instance identifier. For example: + +```console +% terraform import aws_neptune_cluster_instance.example my-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/neptune_cluster_parameter_group.html.markdown b/website/docs/cdktf/python/r/neptune_cluster_parameter_group.html.markdown new file mode 100644 index 00000000000..cb048b8dea9 --- /dev/null +++ b/website/docs/cdktf/python/r/neptune_cluster_parameter_group.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_cluster_parameter_group" +description: |- + Manages a Neptune Cluster Parameter Group +--- + + + +# Resource: aws_neptune_cluster_parameter_group + +Manages a Neptune Cluster Parameter Group + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_cluster_parameter_group import NeptuneClusterParameterGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NeptuneClusterParameterGroup(self, "example", + description="neptune cluster parameter group", + family="neptune1", + name="example", + parameter=[NeptuneClusterParameterGroupParameter( + name="neptune_enable_audit_log", + value=Token.as_string(1) + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the neptune cluster parameter group. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `family` - (Required) The family of the neptune cluster parameter group. +* `description` - (Optional) The description of the neptune cluster parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of neptune parameters to apply. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +* `name` - (Required) The name of the neptune parameter. +* `value` - (Required) The value of the neptune parameter. +* `apply_method` - (Optional) Valid values are `immediate` and `pending-reboot`. Defaults to `pending-reboot`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The neptune cluster parameter group name. +* `arn` - The ARN of the neptune cluster parameter group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Neptune Cluster Parameter Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Neptune Cluster Parameter Groups using the `name`. For example: + +```console +% terraform import aws_neptune_cluster_parameter_group.cluster_pg production-pg-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/neptune_cluster_snapshot.html.markdown b/website/docs/cdktf/python/r/neptune_cluster_snapshot.html.markdown new file mode 100644 index 00000000000..57147cdcd40 --- /dev/null +++ b/website/docs/cdktf/python/r/neptune_cluster_snapshot.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_cluster_snapshot" +description: |- + Manages a Neptune database cluster snapshot. +--- + + + +# Resource: aws_neptune_cluster_snapshot + +Manages a Neptune database cluster snapshot. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_cluster_snapshot import NeptuneClusterSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NeptuneClusterSnapshot(self, "example", + db_cluster_identifier=Token.as_string(aws_neptune_cluster_example.id), + db_cluster_snapshot_identifier="resourcetestsnapshot1234" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `db_cluster_identifier` - (Required) The DB Cluster Identifier from which to take the snapshot. +* `db_cluster_snapshot_identifier` - (Required) The Identifier for the snapshot. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `allocated_storage` - Specifies the allocated storage size in gigabytes (GB). +* `availability_zones` - List of EC2 Availability Zones that instances in the DB cluster snapshot can be restored in. +* `db_cluster_snapshot_arn` - The Amazon Resource Name (ARN) for the DB Cluster Snapshot. +* `engine` - Specifies the name of the database engine. +* `engine_version` - Version of the database engine for this DB cluster snapshot. +* `kms_key_id` - If storage_encrypted is true, the AWS KMS key identifier for the encrypted DB cluster snapshot. +* `license_model` - License model information for the restored DB cluster. +* `port` - Port that the DB cluster was listening on at the time of the snapshot. +* `source_db_cluster_snapshot_identifier` - The DB Cluster Snapshot Arn that the DB Cluster Snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `storage_encrypted` - Specifies whether the DB cluster snapshot is encrypted. +* `status` - The status of this DB Cluster Snapshot. +* `vpc_id` - The VPC ID associated with the DB cluster snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_neptune_cluster_snapshot` using the cluster snapshot identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_neptune_cluster_snapshot` using the cluster snapshot identifier. For example: + +```console +% terraform import aws_neptune_cluster_snapshot.example my-cluster-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/neptune_event_subscription.html.markdown b/website/docs/cdktf/python/r/neptune_event_subscription.html.markdown new file mode 100644 index 00000000000..ff3997d20f1 --- /dev/null +++ b/website/docs/cdktf/python/r/neptune_event_subscription.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_event_subscription" +description: |- + Provides a Neptune event subscription resource. +--- + + + +# Resource: aws_neptune_event_subscription + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_cluster import NeptuneCluster +from imports.aws.neptune_cluster_instance import NeptuneClusterInstance +from imports.aws.neptune_event_subscription import NeptuneEventSubscription +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = NeptuneCluster(self, "default", + apply_immediately=Token.as_boolean("true"), + backup_retention_period=5, + cluster_identifier="neptune-cluster-demo", + engine="neptune", + iam_database_authentication_enabled=Token.as_boolean("true"), + preferred_backup_window="07:00-09:00", + skip_final_snapshot=True + ) + example = NeptuneClusterInstance(self, "example", + apply_immediately=Token.as_boolean("true"), + cluster_identifier=default_var.id, + engine="neptune", + instance_class="db.r4.large" + ) + aws_sns_topic_default = SnsTopic(self, "default_2", + name="neptune-events" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_default.override_logical_id("default") + aws_neptune_event_subscription_default = NeptuneEventSubscription(self, "default_3", + event_categories=["maintenance", "availability", "creation", "backup", "restoration", "recovery", "deletion", "failover", "failure", "notification", "configuration change", "read replica" + ], + name="neptune-event-sub", + sns_topic_arn=Token.as_string(aws_sns_topic_default.arn), + source_ids=[example.id], + source_type="db-instance", + tags={ + "env": "test" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_neptune_event_subscription_default.override_logical_id("default") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `enabled` - (Optional) A boolean flag to enable/disable the subscription. Defaults to true. +* `event_categories` - (Optional) A list of event categories for a `source_type` that you want to subscribe to. Run `aws neptune describe-event-categories` to find all the event categories. +* `name` - (Optional) The name of the Neptune event subscription. By default generated by Terraform. +* `name_prefix` - (Optional) The name of the Neptune event subscription. Conflicts with `name`. +* `sns_topic_arn` - (Required) The ARN of the SNS topic to send events to. +* `source_ids` - (Optional) A list of identifiers of the event sources for which events will be returned. If not specified, then all sources are included in the response. If specified, a `source_type` must also be specified. +* `source_type` - (Optional) The type of source that will be generating the events. Valid options are `db-instance`, `db-security-group`, `db-parameter-group`, `db-snapshot`, `db-cluster` or `db-cluster-snapshot`. If not set, all sources will be subscribed to. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Neptune event notification subscription. +* `arn` - The Amazon Resource Name of the Neptune event notification subscription. +* `customer_aws_id` - The AWS customer account associated with the Neptune event notification subscription. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40m`) +- `delete` - (Default `40m`) +- `update` - (Default `40m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_neptune_event_subscription` using the event subscription name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_neptune_event_subscription` using the event subscription name. For example: + +```console +% terraform import aws_neptune_event_subscription.example my-event-subscription +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/neptune_global_cluster.html.markdown b/website/docs/cdktf/python/r/neptune_global_cluster.html.markdown new file mode 100644 index 00000000000..f6b3530855e --- /dev/null +++ b/website/docs/cdktf/python/r/neptune_global_cluster.html.markdown @@ -0,0 +1,192 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_global_cluster" +description: |- + Provides an Neptune Global Cluster Resource +--- + + + +# Resource: aws_neptune_global_cluster + +Manages a Neptune Global Cluster. A global cluster consists of one primary region and up to five read-only secondary regions. You issue write operations directly to the primary cluster in the primary region and Amazon Neptune automatically replicates the data to the secondary regions using dedicated infrastructure. + +More information about Neptune Global Clusters can be found in the [Neptune User Guide](https://docs.aws.amazon.com/neptune/latest/userguide/neptune-global-database.html). + +## Example Usage + +### New Neptune Global Cluster + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_cluster import NeptuneCluster +from imports.aws.neptune_cluster_instance import NeptuneClusterInstance +from imports.aws.neptune_global_cluster import NeptuneGlobalCluster +from imports.aws.provider import AwsProvider +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = AwsProvider(self, "aws", + alias="primary", + region="us-east-2" + ) + secondary = AwsProvider(self, "aws_1", + alias="secondary", + region="us-east-1" + ) + example = NeptuneGlobalCluster(self, "example", + engine="neptune", + engine_version="1.2.0.0", + global_cluster_identifier="global-test" + ) + aws_neptune_cluster_primary = NeptuneCluster(self, "primary", + cluster_identifier="test-primary-cluster", + engine=example.engine, + engine_version=example.engine_version, + global_cluster_identifier=example.id, + neptune_subnet_group_name="default", + provider=primary + ) + aws_neptune_cluster_secondary = NeptuneCluster(self, "secondary", + cluster_identifier="test-secondary-cluster", + engine=example.engine, + engine_version=example.engine_version, + global_cluster_identifier=example.id, + neptune_subnet_group_name="default", + provider=secondary + ) + aws_neptune_cluster_instance_primary = NeptuneClusterInstance(self, "primary_5", + cluster_identifier=Token.as_string(aws_neptune_cluster_primary.id), + engine=example.engine, + engine_version=example.engine_version, + identifier="test-primary-cluster-instance", + instance_class="db.r5.large", + neptune_subnet_group_name="default", + provider=primary + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_neptune_cluster_instance_primary.override_logical_id("primary") + aws_neptune_cluster_instance_secondary = NeptuneClusterInstance(self, "secondary_6", + cluster_identifier=Token.as_string(aws_neptune_cluster_secondary.id), + depends_on=[aws_neptune_cluster_instance_primary], + engine=example.engine, + engine_version=example.engine_version, + identifier="test-secondary-cluster-instance", + instance_class="db.r5.large", + neptune_subnet_group_name="default", + provider=secondary + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_neptune_cluster_instance_secondary.override_logical_id("secondary") +``` + +### New Global Cluster From Existing DB Cluster + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_cluster import NeptuneCluster +from imports.aws.neptune_global_cluster import NeptuneGlobalCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NeptuneCluster(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[global_cluster_identifier] + ) + ) + aws_neptune_global_cluster_example = NeptuneGlobalCluster(self, "example_1", + global_cluster_identifier="example", + source_db_cluster_identifier=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_neptune_global_cluster_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `global_cluster_identifier` - (Required, Forces new resources) The global cluster identifier. +* `deletion_protection` - (Optional) If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`. +* `engine` - (Optional, Forces new resources) Name of the database engine to be used for this DB cluster. Terraform will only perform drift detection if a configuration value is provided. Current Valid values: `neptune`. Conflicts with `source_db_cluster_identifier`. +* `engine_version` - (Optional) Engine version of the global database. Upgrading the engine version will result in all cluster members being immediately updated and will. + * **NOTE:** Upgrading major versions is not supported. +* `source_db_cluster_identifier` - (Optional) Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. Terraform cannot perform drift detection of this value. +* `storage_encrypted` - (Optional, Forces new resources) Specifies whether the DB cluster is encrypted. The default is `false` unless `source_db_cluster_identifier` is specified and encrypted. Terraform will only perform drift detection if a configuration value is provided. + +### Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts) for certain actions: + +* `create` - (Defaults to 5 mins) Used when creating the Global Cluster +* `update` - (Defaults to 120 mins) Used when updating the Global Cluster members (time is per member) +* `delete` - (Defaults to 5 mins) Used when deleting the Global Cluster members (time is per member) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Global Cluster Amazon Resource Name (ARN) +* `global_cluster_members` - Set of objects containing Global Cluster members. + * `db_cluster_arn` - Amazon Resource Name (ARN) of member DB Cluster. + * `is_writer` - Whether the member is the primary DB Cluster. +* `global_cluster_resource_id` - AWS Region-unique, immutable identifier for the global database cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. +* `id` - Neptune Global Cluster. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_neptune_global_cluster` using the Global Cluster identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_neptune_global_cluster` using the Global Cluster identifier. For example: + +```console +% terraform import aws_neptune_global_cluster.example example +``` + +Certain resource arguments, like `source_db_cluster_identifier`, do not have an API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_global_cluster import NeptuneGlobalCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, globalClusterIdentifier): + super().__init__(scope, name) + NeptuneGlobalCluster(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[source_db_cluster_identifier] + ), + global_cluster_identifier=global_cluster_identifier + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/neptune_parameter_group.html.markdown b/website/docs/cdktf/python/r/neptune_parameter_group.html.markdown new file mode 100644 index 00000000000..faccab9d872 --- /dev/null +++ b/website/docs/cdktf/python/r/neptune_parameter_group.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_parameter_group" +description: |- + Manages a Neptune Parameter Group +--- + + + +# Resource: aws_neptune_parameter_group + +Manages a Neptune Parameter Group + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_parameter_group import NeptuneParameterGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NeptuneParameterGroup(self, "example", + family="neptune1", + name="example", + parameter=[NeptuneParameterGroupParameter( + name="neptune_query_timeout", + value="25" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required, Forces new resource) The name of the Neptune parameter group. +* `family` - (Required) The family of the Neptune parameter group. +* `description` - (Optional) The description of the Neptune parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of Neptune parameters to apply. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +* `name` - (Required) The name of the Neptune parameter. +* `value` - (Required) The value of the Neptune parameter. +* `apply_method` - (Optional) The apply method of the Neptune parameter. Valid values are `immediate` and `pending-reboot`. Defaults to `pending-reboot`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Neptune parameter group name. +* `arn` - The Neptune parameter group Amazon Resource Name (ARN). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Neptune Parameter Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Neptune Parameter Groups using the `name`. For example: + +```console +% terraform import aws_neptune_parameter_group.some_pg some-pg +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/neptune_subnet_group.html.markdown b/website/docs/cdktf/python/r/neptune_subnet_group.html.markdown new file mode 100644 index 00000000000..e9025c3b2ad --- /dev/null +++ b/website/docs/cdktf/python/r/neptune_subnet_group.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_subnet_group" +description: |- + Provides an Neptune subnet group resource. +--- + + + +# Resource: aws_neptune_subnet_group + +Provides an Neptune subnet group resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.neptune_subnet_group import NeptuneSubnetGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NeptuneSubnetGroup(self, "default", + name="main", + subnet_ids=[frontend.id, backend.id], + tags={ + "Name": "My neptune subnet group" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the neptune subnet group. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) The description of the neptune subnet group. Defaults to "Managed by Terraform". +* `subnet_ids` - (Required) A list of VPC subnet IDs. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The neptune subnet group name. +* `arn` - The ARN of the neptune subnet group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Neptune Subnet groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Neptune Subnet groups using the `name`. For example: + +```console +% terraform import aws_neptune_subnet_group.default production-subnet-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/network_acl.html.markdown b/website/docs/cdktf/python/r/network_acl.html.markdown new file mode 100644 index 00000000000..1d18e314413 --- /dev/null +++ b/website/docs/cdktf/python/r/network_acl.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_acl" +description: |- + Provides an network ACL resource. +--- + + + +# Resource: aws_network_acl + +Provides an network ACL resource. You might set up network ACLs with rules similar +to your security groups in order to add an additional layer of security to your VPC. + +~> **NOTE on Network ACLs and Network ACL Rules:** Terraform currently +provides both a standalone [Network ACL Rule](network_acl_rule.html) resource and a Network ACL resource with rules +defined in-line. At this time you cannot use a Network ACL with in-line rules +in conjunction with any Network ACL Rule resources. Doing so will cause +a conflict of rule settings and will overwrite rules. + +~> **NOTE on Network ACLs and Network ACL Associations:** Terraform provides both a standalone [network ACL association](network_acl_association.html) +resource and a network ACL resource with a `subnet_ids` attribute. Do not use the same subnet ID in both a network ACL +resource and a network ACL association resource. Doing so will cause a conflict of associations and will overwrite the association. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.network_acl import NetworkAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkAcl(self, "main", + egress=[NetworkAclEgress( + action="allow", + cidr_block="10.3.0.0/18", + from_port=443, + protocol="tcp", + rule_no=200, + to_port=443 + ) + ], + ingress=[NetworkAclIngress( + action="allow", + cidr_block="10.3.0.0/18", + from_port=80, + protocol="tcp", + rule_no=100, + to_port=80 + ) + ], + tags={ + "Name": "main" + }, + vpc_id=Token.as_string(aws_vpc_main.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpc_id` - (Required) The ID of the associated VPC. +* `subnet_ids` - (Optional) A list of Subnet IDs to apply the ACL to +* `ingress` - (Optional) Specifies an ingress rule. Parameters defined below. + This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). +* `egress` - (Optional) Specifies an egress rule. Parameters defined below. + This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### egress and ingress + +Both arguments are processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +Both `egress` and `ingress` support the following keys: + +* `from_port` - (Required) The from port to match. +* `to_port` - (Required) The to port to match. +* `rule_no` - (Required) The rule number. Used for ordering. +* `action` - (Required) The action to take. +* `protocol` - (Required) The protocol to match. If using the -1 'all' +protocol, you must specify a from and to port of 0. +* `cidr_block` - (Optional) The CIDR block to match. This must be a +valid network mask. +* `ipv6_cidr_block` - (Optional) The IPv6 CIDR block. +* `icmp_type` - (Optional) The ICMP type to be used. Default 0. +* `icmp_code` - (Optional) The ICMP type code to be used. Default 0. + +~> Note: For more information on ICMP types and codes, see here: https://www.iana.org/assignments/icmp-parameters/icmp-parameters.xhtml + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the network ACL +* `arn` - The ARN of the network ACL +* `owner_id` - The ID of the AWS account that owns the network ACL. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network ACLs using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Network ACLs using the `id`. For example: + +```console +% terraform import aws_network_acl.main acl-7aaabd18 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/network_acl_association.html.markdown b/website/docs/cdktf/python/r/network_acl_association.html.markdown new file mode 100644 index 00000000000..444968ef4da --- /dev/null +++ b/website/docs/cdktf/python/r/network_acl_association.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_acl_association" +description: |- + Provides an network ACL association resource. +--- + + + +# Resource: aws_network_acl_association + +Provides an network ACL association resource which allows you to associate your network ACL with any subnet(s). + +~> **NOTE on Network ACLs and Network ACL Associations:** Terraform provides both a standalone network ACL association resource +and a [network ACL](network_acl.html) resource with a `subnet_ids` attribute. Do not use the same subnet ID in both a network ACL +resource and a network ACL association resource. Doing so will cause a conflict of associations and will overwrite the association. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.network_acl_association import NetworkAclAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkAclAssociation(self, "main", + network_acl_id=Token.as_string(aws_network_acl_main.id), + subnet_id=Token.as_string(aws_subnet_main.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `network_acl_id` - (Required) The ID of the network ACL. +* `subnet_id` - (Required) The ID of the associated Subnet. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the network ACL association + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/network_acl_rule.html.markdown b/website/docs/cdktf/python/r/network_acl_rule.html.markdown new file mode 100644 index 00000000000..7fd8e0ae6ea --- /dev/null +++ b/website/docs/cdktf/python/r/network_acl_rule.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_acl_rule" +description: |- + Provides an network ACL Rule resource. +--- + + + +# Resource: aws_network_acl_rule + +Creates an entry (a rule) in a network ACL with the specified rule number. + +~> **NOTE on Network ACLs and Network ACL Rules:** Terraform currently +provides both a standalone Network ACL Rule resource and a [Network ACL](network_acl.html) resource with rules +defined in-line. At this time you cannot use a Network ACL with in-line rules +in conjunction with any Network ACL Rule resources. Doing so will cause +a conflict of rule settings and will overwrite rules. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.network_acl import NetworkAcl +from imports.aws.network_acl_rule import NetworkAclRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bar = NetworkAcl(self, "bar", + vpc_id=foo.id + ) + aws_network_acl_rule_bar = NetworkAclRule(self, "bar_1", + cidr_block=foo.cidr_block, + egress=False, + from_port=22, + network_acl_id=bar.id, + protocol="tcp", + rule_action="allow", + rule_number=200, + to_port=22 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_network_acl_rule_bar.override_logical_id("bar") +``` + +~> **Note:** One of either `cidr_block` or `ipv6_cidr_block` is required. + +## Argument Reference + +This resource supports the following arguments: + +* `network_acl_id` - (Required) The ID of the network ACL. +* `rule_number` - (Required) The rule number for the entry (for example, 100). ACL entries are processed in ascending order by rule number. +* `egress` - (Optional, bool) Indicates whether this is an egress rule (rule is applied to traffic leaving the subnet). Default `false`. +* `protocol` - (Required) The protocol. A value of -1 means all protocols. +* `rule_action` - (Required) Indicates whether to allow or deny the traffic that matches the rule. Accepted values: `allow` | `deny` +* `cidr_block` - (Optional) The network range to allow or deny, in CIDR notation (for example 172.16.0.0/24 ). +* `ipv6_cidr_block` - (Optional) The IPv6 CIDR block to allow or deny. +* `from_port` - (Optional) The from port to match. +* `to_port` - (Optional) The to port to match. +* `icmp_type` - (Optional) ICMP protocol: The ICMP type. Required if specifying ICMP for the protocolE.g., -1 +* `icmp_code` - (Optional) ICMP protocol: The ICMP code. Required if specifying ICMP for the protocolE.g., -1 + +~> **NOTE:** If the value of `protocol` is `-1` or `all`, the `from_port` and `to_port` values will be ignored and the rule will apply to all ports. + +~> **NOTE:** If the value of `icmp_type` is `-1` (which results in a wildcard ICMP type), the `icmp_code` must also be set to `-1` (wildcard ICMP code). + +~> Note: For more information on ICMP types and codes, see here: https://www.iana.org/assignments/icmp-parameters/icmp-parameters.xhtml + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the network ACL Rule + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import individual rules using `NETWORK_ACL_ID:RULE_NUMBER:PROTOCOL:EGRESS`, where `PROTOCOL` can be a decimal (such as "6") or string (such as "tcp") value. For example: + +**NOTE:** If importing a rule previously provisioned by Terraform, the `PROTOCOL` must be the input value used at creation time. For more information on protocol numbers and keywords, see here: https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml. + +Using the procotol's string value: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using the procotol's decimal value: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** individual rules using `NETWORK_ACL_ID:RULE_NUMBER:PROTOCOL:EGRESS`, where `PROTOCOL` can be a decimal (such as "6") or string (such as "tcp") value. For example: + +Using the procotol's string value: + +```console +% terraform import aws_network_acl_rule.my_rule acl-7aaabd18:100:tcp:false +``` + +Using the procotol's decimal value: + +```console +% terraform import aws_network_acl_rule.my_rule acl-7aaabd18:100:6:false +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/network_interface.markdown b/website/docs/cdktf/python/r/network_interface.markdown new file mode 100644 index 00000000000..bc286cf81ef --- /dev/null +++ b/website/docs/cdktf/python/r/network_interface.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_interface" +description: |- + Provides an Elastic network interface (ENI) resource. +--- + + + +# Resource: aws_network_interface + +Provides an Elastic network interface (ENI) resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.network_interface import NetworkInterface +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkInterface(self, "test", + attachment=[NetworkInterfaceAttachment( + device_index=1, + instance=Token.as_string(aws_instance_test.id) + ) + ], + private_ips=["10.0.0.50"], + security_groups=[web.id], + subnet_id=public_a.id + ) +``` + +### Example of Managing Multiple IPs on a Network Interface + +By default, private IPs are managed through the `private_ips` and `private_ips_count` arguments which manage IPs as a set of IPs that are configured without regard to order. For a new network interface, the same primary IP address is consistently selected from a given set of addresses, regardless of the order provided. However, modifications of the set of addresses of an existing interface will not alter the current primary IP address unless it has been removed from the set. + +In order to manage the private IPs as a sequentially ordered list, configure `private_ip_list_enabled` to `true` and use `private_ip_list` to manage the IPs. This will disable the `private_ips` and `private_ips_count` settings, which must be removed from the config file but are still exported. Note that changing the first address of `private_ip_list`, which is the primary, always requires a new interface. + +If you are managing a specific set or list of IPs, instead of just using `private_ips_count`, this is a potential workflow for also leveraging `private_ips_count` to have AWS automatically assign additional IP addresses: + +1. Comment out `private_ips`, `private_ip_list`, `private_ip_list_enabled` in your configuration +2. Set the desired `private_ips_count` (count of the number of secondaries, the primary is not included) +3. Apply to assign the extra IPs +4. Remove `private_ips_count` and restore your settings from the first step +5. Add the new IPs to your current settings +6. Apply again to update the stored state + +This process can also be used to remove IP addresses in addition to the option of manually removing them. Adding IP addresses in a manually is more difficult because it requires knowledge of which addresses are available. + +## Argument Reference + +The following arguments are required: + +* `subnet_id` - (Required) Subnet ID to create the ENI in. + +The following arguments are optional: + +* `attachment` - (Optional) Configuration block to define the attachment of the ENI. See [Attachment](#attachment) below for more details! +* `description` - (Optional) Description for the network interface. +* `interface_type` - (Optional) Type of network interface to create. Set to `efa` for Elastic Fabric Adapter. Changing `interface_type` will cause the resource to be destroyed and re-created. +* `ipv4_prefix_count` - (Optional) Number of IPv4 prefixes that AWS automatically assigns to the network interface. +* `ipv4_prefixes` - (Optional) One or more IPv4 prefixes assigned to the network interface. +* `ipv6_address_count` - (Optional) Number of IPv6 addresses to assign to a network interface. You can't use this option if specifying specific `ipv6_addresses`. If your subnet has the AssignIpv6AddressOnCreation attribute set to `true`, you can specify `0` to override this setting. +* `ipv6_address_list_enabled` - (Optional) Whether `ipv6_address_list` is allowed and controls the IPs to assign to the ENI and `ipv6_addresses` and `ipv6_address_count` become read-only. Default false. +* `ipv6_address_list` - (Optional) List of private IPs to assign to the ENI in sequential order. +* `ipv6_addresses` - (Optional) One or more specific IPv6 addresses from the IPv6 CIDR block range of your subnet. Addresses are assigned without regard to order. You can't use this option if you're specifying `ipv6_address_count`. +* `ipv6_prefix_count` - (Optional) Number of IPv6 prefixes that AWS automatically assigns to the network interface. +* `ipv6_prefixes` - (Optional) One or more IPv6 prefixes assigned to the network interface. +* `private_ip_list` - (Optional) List of private IPs to assign to the ENI in sequential order. Requires setting `private_ip_list_enabled` to `true`. +* `private_ip_list_enabled` - (Optional) Whether `private_ip_list` is allowed and controls the IPs to assign to the ENI and `private_ips` and `private_ips_count` become read-only. Default false. +* `private_ips` - (Optional) List of private IPs to assign to the ENI without regard to order. +* `private_ips_count` - (Optional) Number of secondary private IPs to assign to the ENI. The total number of private IPs will be 1 + `private_ips_count`, as a primary private IP will be assiged to an ENI by default. +* `security_groups` - (Optional) List of security group IDs to assign to the ENI. +* `source_dest_check` - (Optional) Whether to enable source destination checking for the ENI. Default true. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Attachment + +The `attachment` block supports the following: + +* `instance` - (Required) ID of the instance to attach to. +* `device_index` - (Required) Integer to define the devices index. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the network interface. +* `id` - ID of the network interface. +* `mac_address` - MAC address of the network interface. +* `owner_id` - AWS account ID of the owner of the network interface. +* `private_dns_name` - Private DNS name of the network interface (IPv4). +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Interfaces using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Network Interfaces using the `id`. For example: + +```console +% terraform import aws_network_interface.test eni-e5aa89a3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/network_interface_attachment.html.markdown b/website/docs/cdktf/python/r/network_interface_attachment.html.markdown new file mode 100644 index 00000000000..feb12fa7cc7 --- /dev/null +++ b/website/docs/cdktf/python/r/network_interface_attachment.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_interface_attachment" +description: |- + Attach an Elastic network interface (ENI) resource with EC2 instance. +--- + + + +# Resource: aws_network_interface_attachment + +Attach an Elastic network interface (ENI) resource with EC2 instance. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.network_interface_attachment import NetworkInterfaceAttachmentA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkInterfaceAttachmentA(self, "test", + device_index=0, + instance_id=Token.as_string(aws_instance_test.id), + network_interface_id=Token.as_string(aws_network_interface_test.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instance_id` - (Required) Instance ID to attach. +* `network_interface_id` - (Required) ENI ID to attach. +* `device_index` - (Required) Network interface index (int). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `instance_id` - Instance ID. +* `network_interface_id` - Network interface ID. +* `attachment_id` - The ENI Attachment ID. +* `status` - The status of the Network Interface Attachment. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elastic network interface (ENI) Attachments using its Attachment ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Elastic network interface (ENI) Attachments using its Attachment ID. For example: + +```console +% terraform import aws_network_interface_attachment.secondary_nic eni-attach-0a33842b4ec347c4c +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/network_interface_sg_attachment.html.markdown b/website/docs/cdktf/python/r/network_interface_sg_attachment.html.markdown new file mode 100644 index 00000000000..7e639fa0ae7 --- /dev/null +++ b/website/docs/cdktf/python/r/network_interface_sg_attachment.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_interface_sg_attachment" +description: |- + Associates a security group with a network interface. +--- + + + +# Resource: aws_network_interface_sg_attachment + +This resource attaches a security group to an Elastic Network Interface (ENI). +It can be used to attach a security group to any existing ENI, be it a +secondary ENI or one attached as the primary interface on an instance. + +~> **NOTE on instances, interfaces, and security groups:** Terraform currently +provides the capability to assign security groups via the [`aws_instance`][1] +and the [`aws_network_interface`][2] resources. Using this resource in +conjunction with security groups provided in-line in those resources will cause +conflicts, and will lead to spurious diffs and undefined behavior - please use +one or the other. + +[1]: /docs/providers/aws/d/instance.html +[2]: /docs/providers/aws/r/network_interface.html + +## Example Usage + +The following provides a very basic example of setting up an instance (provided +by `instance`) in the default security group, creating a security group +(provided by `sg`) and then attaching the security group to the instance's +primary network interface via the `aws_network_interface_sg_attachment` resource, +named `sg_attachment`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ami import DataAwsAmi +from imports.aws.instance import Instance +from imports.aws.network_interface_sg_attachment import NetworkInterfaceSgAttachment +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + sg = SecurityGroup(self, "sg", + tags={ + "type": "terraform-test-security-group" + } + ) + ami = DataAwsAmi(self, "ami", + filter=[DataAwsAmiFilter( + name="name", + values=["amzn-ami-hvm-*"] + ) + ], + most_recent=True, + owners=["amazon"] + ) + instance = Instance(self, "instance", + ami=Token.as_string(ami.id), + instance_type="t2.micro", + tags={ + "type": "terraform-test-instance" + } + ) + NetworkInterfaceSgAttachment(self, "sg_attachment", + network_interface_id=instance.primary_network_interface_id, + security_group_id=sg.id + ) +``` + +In this example, `instance` is provided by the `aws_instance` data source, +fetching an external instance, possibly not managed by Terraform. +`sg_attachment` then attaches to the output instance's `network_interface_id`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_instance import DataAwsInstance +from imports.aws.network_interface_sg_attachment import NetworkInterfaceSgAttachment +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + sg = SecurityGroup(self, "sg", + tags={ + "type": "terraform-test-security-group" + } + ) + instance = DataAwsInstance(self, "instance", + instance_id="i-1234567890abcdef0" + ) + NetworkInterfaceSgAttachment(self, "sg_attachment", + network_interface_id=Token.as_string(instance.network_interface_id), + security_group_id=sg.id + ) +``` + +## Argument Reference + +* `security_group_id` - (Required) The ID of the security group. +* `network_interface_id` - (Required) The ID of the network interface to attach to. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Interface Security Group attachments using the associated network interface ID and security group ID, separated by an underscore (`_`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Network Interface Security Group attachments using the associated network interface ID and security group ID, separated by an underscore (`_`). For example: + +```console +% terraform import aws_network_interface_sg_attachment.sg_attachment eni-1234567890abcdef0_sg-1234567890abcdef0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkfirewall_firewall.html.markdown b/website/docs/cdktf/python/r/networkfirewall_firewall.html.markdown new file mode 100644 index 00000000000..e23d0a2493b --- /dev/null +++ b/website/docs/cdktf/python/r/networkfirewall_firewall.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_firewall" +description: |- + Provides an AWS Network Firewall Firewall resource. +--- + + + +# Resource: aws_networkfirewall_firewall + +Provides an AWS Network Firewall Firewall Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_firewall import NetworkfirewallFirewall +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallFirewall(self, "example", + firewall_policy_arn=Token.as_string(aws_networkfirewall_firewall_policy_example.arn), + name="example", + subnet_mapping=[NetworkfirewallFirewallSubnetMapping( + subnet_id=Token.as_string(aws_subnet_example.id) + ) + ], + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + }, + vpc_id=Token.as_string(aws_vpc_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `delete_protection` - (Optional) A boolean flag indicating whether it is possible to delete the firewall. Defaults to `false`. + +* `description` - (Optional) A friendly description of the firewall. + +* `encryption_configuration` - (Optional) KMS encryption configuration settings. See [Encryption Configuration](#encryption-configuration) below for details. + +* `firewall_policy_arn` - (Required) The Amazon Resource Name (ARN) of the VPC Firewall policy. + +* `firewall_policy_change_protection` - (Option) A boolean flag indicating whether it is possible to change the associated firewall policy. Defaults to `false`. + +* `name` - (Required, Forces new resource) A friendly name of the firewall. + +* `subnet_change_protection` - (Optional) A boolean flag indicating whether it is possible to change the associated subnet(s). Defaults to `false`. + +* `subnet_mapping` - (Required) Set of configuration blocks describing the public subnets. Each subnet must belong to a different Availability Zone in the VPC. AWS Network Firewall creates a firewall endpoint in each subnet. See [Subnet Mapping](#subnet-mapping) below for details. + +* `tags` - (Optional) Map of resource tags to associate with the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +* `vpc_id` - (Required, Forces new resource) The unique identifier of the VPC where AWS Network Firewall should create the firewall. + +### Encryption Configuration + +`encryption_configuration` settings for customer managed KMS keys. Remove this block to use the default AWS-managed KMS encryption (rather than setting `type` to `AWS_OWNED_KMS_KEY`). + +* `key_id` - (Optional) The ID of the customer managed key. You can use any of the [key identifiers](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-id) that KMS supports, unless you're using a key that's managed by another account. If you're using a key managed by another account, then specify the key ARN. +* `type` - (Required) The type of AWS KMS key to use for encryption of your Network Firewall resources. Valid values are `CUSTOMER_KMS` and `AWS_OWNED_KMS_KEY`. + +### Subnet Mapping + +The `subnet_mapping` block supports the following arguments: + +* `ip_address_type` - (Optional) The subnet's IP address type. Valida values: `"DUALSTACK"`, `"IPV4"`. +* `subnet_id` - (Required) The unique identifier for the subnet. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) that identifies the firewall. + +* `arn` - The Amazon Resource Name (ARN) that identifies the firewall. + +* `firewall_status` - Nested list of information about the current status of the firewall. + * `sync_states` - Set of subnets configured for use by the firewall. + * `attachment` - Nested list describing the attachment status of the firewall's association with a single VPC subnet. + * `endpoint_id` - The identifier of the firewall endpoint that AWS Network Firewall has instantiated in the subnet. You use this to identify the firewall endpoint in the VPC route tables, when you redirect the VPC traffic through the endpoint. + * `subnet_id` - The unique identifier of the subnet that you've specified to be used for a firewall endpoint. + * `availability_zone` - The Availability Zone where the subnet is configured. + +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +* `update_token` - A string token used when updating a firewall. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Firewall Firewalls using their `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Network Firewall Firewalls using their `arn`. For example: + +```console +% terraform import aws_networkfirewall_firewall.example arn:aws:network-firewall:us-west-1:123456789012:firewall/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkfirewall_firewall_policy.html.markdown b/website/docs/cdktf/python/r/networkfirewall_firewall_policy.html.markdown new file mode 100644 index 00000000000..493aa47dae6 --- /dev/null +++ b/website/docs/cdktf/python/r/networkfirewall_firewall_policy.html.markdown @@ -0,0 +1,271 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_firewall_policy" +description: |- + Provides an AWS Network Firewall Policy resource. +--- + + + +# Resource: aws_networkfirewall_firewall_policy + +Provides an AWS Network Firewall Firewall Policy Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_firewall_policy import NetworkfirewallFirewallPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallFirewallPolicy(self, "example", + firewall_policy=NetworkfirewallFirewallPolicyFirewallPolicy( + stateless_default_actions=["aws:pass"], + stateless_fragment_default_actions=["aws:drop"], + stateless_rule_group_reference=[NetworkfirewallFirewallPolicyFirewallPolicyStatelessRuleGroupReference( + priority=1, + resource_arn=Token.as_string(aws_networkfirewall_rule_group_example.arn) + ) + ] + ), + name="example", + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + } + ) +``` + +## Policy with a HOME_NET Override + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_firewall_policy import NetworkfirewallFirewallPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallFirewallPolicy(self, "example", + firewall_policy=NetworkfirewallFirewallPolicyFirewallPolicy( + policy_variables=[{ + "rule_variables": [{ + "ip_set": [{ + "definition": ["10.0.0.0/16", "10.1.0.0/24"] + } + ], + "key": "HOME_NET" + } + ] + } + ], + stateless_default_actions=["aws:pass"], + stateless_fragment_default_actions=["aws:drop"], + stateless_rule_group_reference=[NetworkfirewallFirewallPolicyFirewallPolicyStatelessRuleGroupReference( + priority=1, + resource_arn=Token.as_string(aws_networkfirewall_rule_group_example.arn) + ) + ] + ), + name="example", + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + } + ) +``` + +## Policy with a Custom Action for Stateless Inspection + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_firewall_policy import NetworkfirewallFirewallPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallFirewallPolicy(self, "test", + firewall_policy=NetworkfirewallFirewallPolicyFirewallPolicy( + stateless_custom_action=[NetworkfirewallFirewallPolicyFirewallPolicyStatelessCustomAction( + action_definition=NetworkfirewallFirewallPolicyFirewallPolicyStatelessCustomActionActionDefinition( + publish_metric_action=NetworkfirewallFirewallPolicyFirewallPolicyStatelessCustomActionActionDefinitionPublishMetricAction( + dimension=[NetworkfirewallFirewallPolicyFirewallPolicyStatelessCustomActionActionDefinitionPublishMetricActionDimension( + value="1" + ) + ] + ) + ), + action_name="ExampleCustomAction" + ) + ], + stateless_default_actions=["aws:pass", "ExampleCustomAction"], + stateless_fragment_default_actions=["aws:drop"] + ), + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) A friendly description of the firewall policy. + +* `encryption_configuration` - (Optional) KMS encryption configuration settings. See [Encryption Configuration](#encryption-configuration) below for details. + +* `firewall_policy` - (Required) A configuration block describing the rule groups and policy actions to use in the firewall policy. See [Firewall Policy](#firewall-policy) below for details. + +* `name` - (Required, Forces new resource) A friendly name of the firewall policy. + +* `tags` - (Optional) Map of resource tags to associate with the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Encryption Configuration + +`encryption_configuration` settings for customer managed KMS keys. Remove this block to use the default AWS-managed KMS encryption (rather than setting `type` to `AWS_OWNED_KMS_KEY`). + +* `key_id` - (Optional) The ID of the customer managed key. You can use any of the [key identifiers](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-id) that KMS supports, unless you're using a key that's managed by another account. If you're using a key managed by another account, then specify the key ARN. +* `type` - (Required) The type of AWS KMS key to use for encryption of your Network Firewall resources. Valid values are `CUSTOMER_KMS` and `AWS_OWNED_KMS_KEY`. + +### Firewall Policy + +The `firewall_policy` block supports the following arguments: + +* `policy_variables` - (Optional). Contains variables that you can use to override default Suricata settings in your firewall policy. See [Rule Variables](#rule-variables) for details. + +* `stateful_default_actions` - (Optional) Set of actions to take on a packet if it does not match any stateful rules in the policy. This can only be specified if the policy has a `stateful_engine_options` block with a `rule_order` value of `STRICT_ORDER`. You can specify one of either or neither values of `aws:drop_strict` or `aws:drop_established`, as well as any combination of `aws:alert_strict` and `aws:alert_established`. + +* `stateful_engine_options` - (Optional) A configuration block that defines options on how the policy handles stateful rules. See [Stateful Engine Options](#stateful-engine-options) below for details. + +* `stateful_rule_group_reference` - (Optional) Set of configuration blocks containing references to the stateful rule groups that are used in the policy. See [Stateful Rule Group Reference](#stateful-rule-group-reference) below for details. + +* `stateless_custom_action` - (Optional) Set of configuration blocks describing the custom action definitions that are available for use in the firewall policy's `stateless_default_actions`. See [Stateless Custom Action](#stateless-custom-action) below for details. + +* `stateless_default_actions` - (Required) Set of actions to take on a packet if it does not match any of the stateless rules in the policy. You must specify one of the standard actions including: `aws:drop`, `aws:pass`, or `aws:forward_to_sfe`. +In addition, you can specify custom actions that are compatible with your standard action choice. If you want non-matching packets to be forwarded for stateful inspection, specify `aws:forward_to_sfe`. + +* `stateless_fragment_default_actions` - (Required) Set of actions to take on a fragmented packet if it does not match any of the stateless rules in the policy. You must specify one of the standard actions including: `aws:drop`, `aws:pass`, or `aws:forward_to_sfe`. +In addition, you can specify custom actions that are compatible with your standard action choice. If you want non-matching packets to be forwarded for stateful inspection, specify `aws:forward_to_sfe`. + +* `stateless_rule_group_reference` - (Optional) Set of configuration blocks containing references to the stateless rule groups that are used in the policy. See [Stateless Rule Group Reference](#stateless-rule-group-reference) below for details. + +### Rule Variables + +The `rule_variables` block supports the following arguments: + +* `key` - (Required) An alphanumeric string to identify the `ip_set`. Valid values: `HOME_NET` + +* `ip_set` - (Required) A configuration block that defines a set of IP addresses. See [IP Set](#ip-set) below for details. + +### IP Set + +The `ip_set` block supports the following argument: + +* `definition` - (Required) Set of IPv4 or IPv6 addresses in CIDR notation to use for the Suricata `HOME_NET` variable. + +### Stateful Engine Options + +The `stateful_engine_options` block supports the following argument: + +~> **NOTE:** If the `STRICT_ORDER` rule order is specified, this firewall policy can only reference stateful rule groups that utilize `STRICT_ORDER`. + +* `rule_order` - Indicates how to manage the order of stateful rule evaluation for the policy. Default value: `DEFAULT_ACTION_ORDER`. Valid values: `DEFAULT_ACTION_ORDER`, `STRICT_ORDER`. + +* `stream_exception_policy` - Describes how to treat traffic which has broken midstream. Default value: `DROP`. Valid values: `DROP`, `CONTINUE`, `REJECT`. + +### Stateful Rule Group Reference + +The `stateful_rule_group_reference` block supports the following arguments: + +* `priority` - (Optional) An integer setting that indicates the order in which to apply the stateful rule groups in a single policy. This argument must be specified if the policy has a `stateful_engine_options` block with a `rule_order` value of `STRICT_ORDER`. AWS Network Firewall applies each stateful rule group to a packet starting with the group that has the lowest priority setting. + +* `resource_arn` - (Required) The Amazon Resource Name (ARN) of the stateful rule group. + +* `override` - (Optional) Configuration block for override values + +#### Override + +* `action` - (Optional) The action that changes the rule group from DROP to ALERT . This only applies to managed rule groups. + +### Stateless Custom Action + +The `stateless_custom_action` block supports the following arguments: + +* `action_definition` - (Required) A configuration block describing the custom action associated with the `action_name`. See [Action Definition](#action-definition) below for details. + +* `action_name` - (Required, Forces new resource) A friendly name of the custom action. + +### Stateless Rule Group Reference + +The `stateless_rule_group_reference` block supports the following arguments: + +* `priority` - (Required) An integer setting that indicates the order in which to run the stateless rule groups in a single policy. AWS Network Firewall applies each stateless rule group to a packet starting with the group that has the lowest priority setting. + +* `resource_arn` - (Required) The Amazon Resource Name (ARN) of the stateless rule group. + +### Action Definition + +The `action_definition` block supports the following argument: + +* `publish_metric_action` - (Required) A configuration block describing the stateless inspection criteria that publishes the specified metrics to Amazon CloudWatch for the matching packet. You can pair this custom action with any of the standard stateless rule actions. See [Publish Metric Action](#publish-metric-action) below for details. + +### Publish Metric Action + +The `publish_metric_action` block supports the following argument: + +* `dimension` - (Required) Set of configuration blocks describing dimension settings to use for Amazon CloudWatch custom metrics. See [Dimension](#dimension) below for more details. + +### Dimension + +The `dimension` block supports the following argument: + +* `value` - (Required) The string value to use in the custom metric dimension. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) that identifies the firewall policy. + +* `arn` - The Amazon Resource Name (ARN) that identifies the firewall policy. + +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +* `update_token` - A string token used when updating a firewall policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Firewall Policies using their `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Network Firewall Policies using their `arn`. For example: + +```console +% terraform import aws_networkfirewall_firewall_policy.example arn:aws:network-firewall:us-west-1:123456789012:firewall-policy/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkfirewall_logging_configuration.html.markdown b/website/docs/cdktf/python/r/networkfirewall_logging_configuration.html.markdown new file mode 100644 index 00000000000..fac5821d64e --- /dev/null +++ b/website/docs/cdktf/python/r/networkfirewall_logging_configuration.html.markdown @@ -0,0 +1,157 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_logging_configuration" +description: |- + Provides an AWS Network Firewall Logging Configuration resource. +--- + + + +# Resource: aws_networkfirewall_logging_configuration + +Provides an AWS Network Firewall Logging Configuration Resource + +## Example Usage + +### Logging to S3 + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_logging_configuration import NetworkfirewallLoggingConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallLoggingConfiguration(self, "example", + firewall_arn=Token.as_string(aws_networkfirewall_firewall_example.arn), + logging_configuration=NetworkfirewallLoggingConfigurationLoggingConfiguration( + log_destination_config=[NetworkfirewallLoggingConfigurationLoggingConfigurationLogDestinationConfig( + log_destination={ + "bucket_name": Token.as_string(aws_s3_bucket_example.bucket), + "prefix": "/example" + }, + log_destination_type="S3", + log_type="FLOW" + ) + ] + ) + ) +``` + +### Logging to CloudWatch + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_logging_configuration import NetworkfirewallLoggingConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallLoggingConfiguration(self, "example", + firewall_arn=Token.as_string(aws_networkfirewall_firewall_example.arn), + logging_configuration=NetworkfirewallLoggingConfigurationLoggingConfiguration( + log_destination_config=[NetworkfirewallLoggingConfigurationLoggingConfigurationLogDestinationConfig( + log_destination={ + "log_group": Token.as_string(aws_cloudwatch_log_group_example.name) + }, + log_destination_type="CloudWatchLogs", + log_type="ALERT" + ) + ] + ) + ) +``` + +### Logging to Kinesis Data Firehose + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_logging_configuration import NetworkfirewallLoggingConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallLoggingConfiguration(self, "example", + firewall_arn=Token.as_string(aws_networkfirewall_firewall_example.arn), + logging_configuration=NetworkfirewallLoggingConfigurationLoggingConfiguration( + log_destination_config=[NetworkfirewallLoggingConfigurationLoggingConfigurationLogDestinationConfig( + log_destination={ + "delivery_stream": Token.as_string(aws_kinesis_firehose_delivery_stream_example.name) + }, + log_destination_type="KinesisDataFirehose", + log_type="ALERT" + ) + ] + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `firewall_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Network Firewall firewall. + +* `logging_configuration` - (Required) A configuration block describing how AWS Network Firewall performs logging for a firewall. See [Logging Configuration](#logging-configuration) below for details. + +### Logging Configuration + +The `logging_configuration` block supports the following arguments: + +* `log_destination_config` - (Required) Set of configuration blocks describing the logging details for a firewall. See [Log Destination Config](#log-destination-config) below for details. At most, only two blocks can be specified; one for `FLOW` logs and one for `ALERT` logs. + +### Log Destination Config + +The `log_destination_config` block supports the following arguments: + +* `log_destination` - (Required) A map describing the logging destination for the chosen `log_destination_type`. + * For an Amazon S3 bucket, specify the key `bucketName` with the name of the bucket and optionally specify the key `prefix` with a path. + * For a CloudWatch log group, specify the key `logGroup` with the name of the CloudWatch log group. + * For a Kinesis Data Firehose delivery stream, specify the key `deliveryStream` with the name of the delivery stream. + +* `log_destination_type` - (Required) The location to send logs to. Valid values: `S3`, `CloudWatchLogs`, `KinesisDataFirehose`. + +* `log_type` - (Required) The type of log to send. Valid values: `ALERT` or `FLOW`. Alert logs report traffic that matches a `StatefulRule` with an action setting that sends a log message. Flow logs are standard network traffic flow logs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the associated firewall. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Firewall Logging Configurations using the `firewall_arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Network Firewall Logging Configurations using the `firewall_arn`. For example: + +```console +% terraform import aws_networkfirewall_logging_configuration.example arn:aws:network-firewall:us-west-1:123456789012:firewall/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkfirewall_resource_policy.html.markdown b/website/docs/cdktf/python/r/networkfirewall_resource_policy.html.markdown new file mode 100644 index 00000000000..83fc556cbbf --- /dev/null +++ b/website/docs/cdktf/python/r/networkfirewall_resource_policy.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_resource_policy" +description: |- + Provides an AWS Network Firewall Resource Policy resource. +--- + + + +# Resource: aws_networkfirewall_resource_policy + +Provides an AWS Network Firewall Resource Policy Resource for a rule group or firewall policy. + +## Example Usage + +### For a Firewall Policy resource + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_resource_policy import NetworkfirewallResourcePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallResourcePolicy(self, "example", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["network-firewall:ListFirewallPolicies", "network-firewall:CreateFirewall", "network-firewall:UpdateFirewall", "network-firewall:AssociateFirewallPolicy" + ], + "Effect": "Allow", + "Principal": { + "AWS": "arn:aws:iam::123456789012:root" + }, + "Resource": aws_networkfirewall_firewall_policy_example.arn + } + ], + "Version": "2012-10-17" + })), + resource_arn=Token.as_string(aws_networkfirewall_firewall_policy_example.arn) + ) +``` + +### For a Rule Group resource + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_resource_policy import NetworkfirewallResourcePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallResourcePolicy(self, "example", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["network-firewall:ListRuleGroups", "network-firewall:CreateFirewallPolicy", "network-firewall:UpdateFirewallPolicy" + ], + "Effect": "Allow", + "Principal": { + "AWS": "arn:aws:iam::123456789012:root" + }, + "Resource": aws_networkfirewall_rule_group_example.arn + } + ], + "Version": "2012-10-17" + })), + resource_arn=Token.as_string(aws_networkfirewall_rule_group_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) JSON formatted policy document that controls access to the Network Firewall resource. The policy must be provided **without whitespaces**. We recommend using [jsonencode](https://www.terraform.io/docs/configuration/functions/jsonencode.html) for formatting as seen in the examples above. For more details, including available policy statement Actions, see the [Policy](https://docs.aws.amazon.com/network-firewall/latest/APIReference/API_PutResourcePolicy.html#API_PutResourcePolicy_RequestSyntax) parameter in the AWS API documentation. + +* `resource_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the rule group or firewall policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the rule group or firewall policy associated with the resource policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Firewall Resource Policies using the `resource_arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Network Firewall Resource Policies using the `resource_arn`. For example: + +```console +% terraform import aws_networkfirewall_resource_policy.example aws_networkfirewall_rule_group.example arn:aws:network-firewall:us-west-1:123456789012:stateful-rulegroup/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkfirewall_rule_group.html.markdown b/website/docs/cdktf/python/r/networkfirewall_rule_group.html.markdown new file mode 100644 index 00000000000..b4b6a7c702d --- /dev/null +++ b/website/docs/cdktf/python/r/networkfirewall_rule_group.html.markdown @@ -0,0 +1,638 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_rule_group" +description: |- + Provides an AWS Network Firewall Rule Group resource. +--- + + + +# Resource: aws_networkfirewall_rule_group + +Provides an AWS Network Firewall Rule Group Resource + +## Example Usage + +### Stateful Inspection for denying access to a domain + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_rule_group import NetworkfirewallRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallRuleGroup(self, "example", + capacity=100, + name="example", + rule_group=NetworkfirewallRuleGroupRuleGroup( + rules_source=NetworkfirewallRuleGroupRuleGroupRulesSource( + rules_source_list=NetworkfirewallRuleGroupRuleGroupRulesSourceRulesSourceListStruct( + generated_rules_type="DENYLIST", + target_types=["HTTP_HOST"], + targets=["test.example.com"] + ) + ) + ), + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + }, + type="STATEFUL" + ) +``` + +### Stateful Inspection for permitting packets from a source IP address + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformIterator, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_rule_group import NetworkfirewallRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ips = ["1.1.1.1/32", "1.0.0.1/32"] + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_dynamic_iterator0 = TerraformIterator.from_list( + Token.as_any(ips)) + NetworkfirewallRuleGroup(self, "example", + capacity=50, + description="Permits http traffic from source", + name="example", + rule_group=NetworkfirewallRuleGroupRuleGroup( + rules_source=NetworkfirewallRuleGroupRuleGroupRulesSource( + stateful_rule=example_dynamic_iterator0.dynamic({ + "action": "PASS", + "header": [{ + "destination": "ANY", + "destination_port": "ANY", + "direction": "ANY", + "protocol": "HTTP", + "source": example_dynamic_iterator0.value, + "source_port": "ANY" + } + ], + "rule_option": [{ + "keyword": "sid", + "settings": ["1"] + } + ] + }) + ) + ), + tags={ + "Name": "permit HTTP from source" + }, + type="STATEFUL" + ) +``` + +### Stateful Inspection for blocking packets from going to an intended destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_rule_group import NetworkfirewallRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallRuleGroup(self, "example", + capacity=100, + name="example", + rule_group=NetworkfirewallRuleGroupRuleGroup( + rules_source=NetworkfirewallRuleGroupRuleGroupRulesSource( + stateful_rule=[NetworkfirewallRuleGroupRuleGroupRulesSourceStatefulRule( + action="DROP", + header=NetworkfirewallRuleGroupRuleGroupRulesSourceStatefulRuleHeader( + destination="124.1.1.24/32", + destination_port=Token.as_string(53), + direction="ANY", + protocol="TCP", + source="1.2.3.4/32", + source_port=Token.as_string(53) + ), + rule_option=[NetworkfirewallRuleGroupRuleGroupRulesSourceStatefulRuleRuleOption( + keyword="sid", + settings=["1"] + ) + ] + ) + ] + ) + ), + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + }, + type="STATEFUL" + ) +``` + +### Stateful Inspection from rules specifications defined in Suricata flat format + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_rule_group import NetworkfirewallRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallRuleGroup(self, "example", + capacity=100, + name="example", + rules=Token.as_string(Fn.file("example.rules")), + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + }, + type="STATEFUL" + ) +``` + +### Stateful Inspection from rule group specifications using rule variables and Suricata format rules + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_rule_group import NetworkfirewallRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallRuleGroup(self, "example", + capacity=100, + name="example", + rule_group=NetworkfirewallRuleGroupRuleGroup( + rule_variables=NetworkfirewallRuleGroupRuleGroupRuleVariables( + ip_sets=[NetworkfirewallRuleGroupRuleGroupRuleVariablesIpSets( + ip_set=NetworkfirewallRuleGroupRuleGroupRuleVariablesIpSetsIpSet( + definition=["10.0.0.0/16", "10.0.1.0/24", "192.168.0.0/16"] + ), + key="WEBSERVERS_HOSTS" + ), NetworkfirewallRuleGroupRuleGroupRuleVariablesIpSets( + ip_set=NetworkfirewallRuleGroupRuleGroupRuleVariablesIpSetsIpSet( + definition=["1.2.3.4/32"] + ), + key="EXTERNAL_HOST" + ) + ], + port_sets=[NetworkfirewallRuleGroupRuleGroupRuleVariablesPortSets( + key="HTTP_PORTS", + port_set=NetworkfirewallRuleGroupRuleGroupRuleVariablesPortSetsPortSet( + definition=["443", "80"] + ) + ) + ] + ), + rules_source=NetworkfirewallRuleGroupRuleGroupRulesSource( + rules_string=Token.as_string(Fn.file("suricata_rules_file")) + ) + ), + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + }, + type="STATEFUL" + ) +``` + +### Stateless Inspection with a Custom Action + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_rule_group import NetworkfirewallRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallRuleGroup(self, "example", + capacity=100, + description="Stateless Rate Limiting Rule", + name="example", + rule_group=NetworkfirewallRuleGroupRuleGroup( + rules_source=NetworkfirewallRuleGroupRuleGroupRulesSource( + stateless_rules_and_custom_actions=NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActions( + custom_action=[NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsCustomAction( + action_definition=NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsCustomActionActionDefinition( + publish_metric_action=NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsCustomActionActionDefinitionPublishMetricAction( + dimension=[NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsCustomActionActionDefinitionPublishMetricActionDimension( + value="2" + ) + ] + ) + ), + action_name="ExampleMetricsAction" + ) + ], + stateless_rule=[NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsStatelessRule( + priority=1, + rule_definition=NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsStatelessRuleRuleDefinition( + actions=["aws:pass", "ExampleMetricsAction"], + match_attributes=NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsStatelessRuleRuleDefinitionMatchAttributes( + destination=[NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsStatelessRuleRuleDefinitionMatchAttributesDestination( + address_definition="124.1.1.5/32" + ) + ], + destination_port=[NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsStatelessRuleRuleDefinitionMatchAttributesDestinationPort( + from_port=443, + to_port=443 + ) + ], + protocols=[6], + source=[NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsStatelessRuleRuleDefinitionMatchAttributesSource( + address_definition="1.2.3.4/32" + ) + ], + source_port=[NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsStatelessRuleRuleDefinitionMatchAttributesSourcePort( + from_port=443, + to_port=443 + ) + ], + tcp_flag=[NetworkfirewallRuleGroupRuleGroupRulesSourceStatelessRulesAndCustomActionsStatelessRuleRuleDefinitionMatchAttributesTcpFlag( + flags=["SYN"], + masks=["SYN", "ACK"] + ) + ] + ) + ) + ) + ] + ) + ) + ), + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + }, + type="STATELESS" + ) +``` + +### IP Set References to the Rule Group + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkfirewall_rule_group import NetworkfirewallRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkfirewallRuleGroup(self, "example", + capacity=100, + name="example", + rule_group=NetworkfirewallRuleGroupRuleGroup( + reference_sets=NetworkfirewallRuleGroupRuleGroupReferenceSets( + ip_set_references=[NetworkfirewallRuleGroupRuleGroupReferenceSetsIpSetReferences( + ip_set_reference=[NetworkfirewallRuleGroupRuleGroupReferenceSetsIpSetReferencesIpSetReference( + reference_arn=this_var.arn + ) + ], + key="example" + ) + ] + ), + rules_source=NetworkfirewallRuleGroupRuleGroupRulesSource( + rules_source_list=NetworkfirewallRuleGroupRuleGroupRulesSourceRulesSourceListStruct( + generated_rules_type="DENYLIST", + target_types=["HTTP_HOST"], + targets=["test.example.com"] + ) + ) + ), + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + }, + type="STATEFUL" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `capacity` - (Required, Forces new resource) The maximum number of operating resources that this rule group can use. For a stateless rule group, the capacity required is the sum of the capacity requirements of the individual rules. For a stateful rule group, the minimum capacity required is the number of individual rules. + +* `description` - (Optional) A friendly description of the rule group. + +* `encryption_configuration` - (Optional) KMS encryption configuration settings. See [Encryption Configuration](#encryption-configuration) below for details. + +* `name` - (Required, Forces new resource) A friendly name of the rule group. + +* `rule_group` - (Optional) A configuration block that defines the rule group rules. Required unless `rules` is specified. See [Rule Group](#rule-group) below for details. + +* `rules` - (Optional) The stateful rule group rules specifications in Suricata file format, with one rule per line. Use this to import your existing Suricata compatible rule groups. Required unless `rule_group` is specified. + +* `tags` - (Optional) A map of key:value pairs to associate with the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +* `type` - (Required) Whether the rule group is stateless (containing stateless rules) or stateful (containing stateful rules). Valid values include: `STATEFUL` or `STATELESS`. + +### Encryption Configuration + +`encryption_configuration` settings for customer managed KMS keys. Remove this block to use the default AWS-managed KMS encryption (rather than setting `type` to `AWS_OWNED_KMS_KEY`). + +* `key_id` - (Optional) The ID of the customer managed key. You can use any of the [key identifiers](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-id) that KMS supports, unless you're using a key that's managed by another account. If you're using a key managed by another account, then specify the key ARN. +* `type` - (Required) The type of AWS KMS key to use for encryption of your Network Firewall resources. Valid values are `CUSTOMER_KMS` and `AWS_OWNED_KMS_KEY`. + +### Rule Group + +The `rule_group` block supports the following argument: + +* `reference_sets` - (Optional) A configuration block that defines the IP Set References for the rule group. See [Reference Sets](#reference-sets) below for details. Please notes that there can only be a maximum of 5 `reference_sets` in a `rule_group`. See the [AWS documentation](https://docs.aws.amazon.com/network-firewall/latest/developerguide/rule-groups-ip-set-references.html#rule-groups-ip-set-reference-limits) for details. + +* `rule_variables` - (Optional) A configuration block that defines additional settings available to use in the rules defined in the rule group. Can only be specified for **stateful** rule groups. See [Rule Variables](#rule-variables) below for details. + +* `rules_source` - (Required) A configuration block that defines the stateful or stateless rules for the rule group. See [Rules Source](#rules-source) below for details. + +* `stateful_rule_options` - (Optional) A configuration block that defines stateful rule options for the rule group. See [Stateful Rule Options](#stateful-rule-options) below for details. + +### Reference Sets + +The `reference_sets` block supports the following arguments: + +* `ip_set_reference` - (Optional) Set of configuration blocks that define the IP Reference information. See [IP Set Reference](#ip-set-reference) below for details. + +### Rule Variables + +The `rule_variables` block supports the following arguments: + +* `ip_sets` - (Optional) Set of configuration blocks that define IP address information. See [IP Sets](#ip-sets) below for details. + +* `port_sets` - (Optional) Set of configuration blocks that define port range information. See [Port Sets](#port-sets) below for details. + +### IP Sets + +The `ip_sets` block supports the following arguments: + +* `key` - (Required) A unique alphanumeric string to identify the `ip_set`. + +* `ip_set` - (Required) A configuration block that defines a set of IP addresses. See [IP Set](#ip-set) below for details. + +### IP Set + +The `ip_set` configuration block supports the following argument: + +* `definition` - (Required) Set of IP addresses and address ranges, in CIDR notation. + +### IP Set Reference + +The `ip_set_reference` configuration block supports the following argument: + +* `key` - (Required) A unique alphanumeric string to identify the `ip_set`. + +* `reference_arn` - (Required) Set of Managed Prefix IP ARN(s) + +### Port Sets + +The `port_sets` block supports the following arguments: + +* `key` - (Required) An unique alphanumeric string to identify the `port_set`. + +* `port_set` - (Required) A configuration block that defines a set of port ranges. See [Port Set](#port-set) below for details. + +### Port Set + +The `port_set` configuration block suppports the following argument: + +* `definition` - (Required) Set of port ranges. + +### Rules Source + +The `rules_source` block supports the following arguments: + +~> **NOTE:** Only one of `rules_source_list`, `rules_string`, `stateful_rule`, or `stateless_rules_and_custom_actions` must be specified. + +* `rules_source_list` - (Optional) A configuration block containing **stateful** inspection criteria for a domain list rule group. See [Rules Source List](#rules-source-list) below for details. + +* `rules_string` - (Optional) The fully qualified name of a file in an S3 bucket that contains Suricata compatible intrusion preventions system (IPS) rules or the Suricata rules as a string. These rules contain **stateful** inspection criteria and the action to take for traffic that matches the criteria. + +* `stateful_rule` - (Optional) Set of configuration blocks containing **stateful** inspection criteria for 5-tuple rules to be used together in a rule group. See [Stateful Rule](#stateful-rule) below for details. + +* `stateless_rules_and_custom_actions` - (Optional) A configuration block containing **stateless** inspection criteria for a stateless rule group. See [Stateless Rules and Custom Actions](#stateless-rules-and-custom-actions) below for details. + +### Stateful Rule Options + +The `stateful_rule_options` block supports the following argument: + +~> **NOTE:** If the `STRICT_ORDER` rule order is specified, this rule group can only be referenced in firewall policies that also utilize `STRICT_ORDER` for the stateful engine. `STRICT_ORDER` can only be specified when using a `rules_source` of `rules_string` or `stateful_rule`. + +* `rule_order` - (Required) Indicates how to manage the order of the rule evaluation for the rule group. Default value: `DEFAULT_ACTION_ORDER`. Valid values: `DEFAULT_ACTION_ORDER`, `STRICT_ORDER`. + +### Rules Source List + +The `rules_source_list` block supports the following arguments: + +* `generated_rules_type` - (Required) String value to specify whether domains in the target list are allowed or denied access. Valid values: `ALLOWLIST`, `DENYLIST`. + +* `target_types` - (Required) Set of types of domain specifications that are provided in the `targets` argument. Valid values: `HTTP_HOST`, `TLS_SNI`. + +* `targets` - (Required) Set of domains that you want to inspect for in your traffic flows. + +### Stateful Rule + +The `stateful_rule` block supports the following arguments: + +* `action` - (Required) Action to take with packets in a traffic flow when the flow matches the stateful rule criteria. For all actions, AWS Network Firewall performs the specified action and discontinues stateful inspection of the traffic flow. Valid values: `ALERT`, `DROP` or `PASS`. + +* `header` - (Required) A configuration block containing the stateful 5-tuple inspection criteria for the rule, used to inspect traffic flows. See [Header](#header) below for details. + +* `rule_option` - (Required) Set of configuration blocks containing additional settings for a stateful rule. See [Rule Option](#rule-option) below for details. + +### Stateless Rules and Custom Actions + +The `stateless_rules_and_custom_actions` block supports the following arguments: + +* `custom_action` - (Optional) Set of configuration blocks containing custom action definitions that are available for use by the set of `stateless rule`. See [Custom Action](#custom-action) below for details. + +* `stateless_rule` - (Required) Set of configuration blocks containing the stateless rules for use in the stateless rule group. See [Stateless Rule](#stateless-rule) below for details. + +### Header + +The `header` block supports the following arguments: + +* `destination` - (Required) The destination IP address or address range to inspect for, in CIDR notation. To match with any address, specify `ANY`. + +* `destination_port` - (Required) The destination port to inspect for. To match with any address, specify `ANY`. + +* `direction` - (Required) The direction of traffic flow to inspect. Valid values: `ANY` or `FORWARD`. + +* `protocol` - (Required) The protocol to inspect. Valid values: `IP`, `TCP`, `UDP`, `ICMP`, `HTTP`, `FTP`, `TLS`, `SMB`, `DNS`, `DCERPC`, `SSH`, `SMTP`, `IMAP`, `MSN`, `KRB5`, `IKEV2`, `TFTP`, `NTP`, `DHCP`. + +* `source` - (Required) The source IP address or address range for, in CIDR notation. To match with any address, specify `ANY`. + +* `source_port` - (Required) The source port to inspect for. To match with any address, specify `ANY`. + +### Rule Option + +The `rule_option` block supports the following arguments: + +* `keyword` - (Required) Keyword defined by open source detection systems like Snort or Suricata for stateful rule inspection. +See [Snort General Rule Options](http://manual-snort-org.s3-website-us-east-1.amazonaws.com/node31.html) or [Suricata Rule Options](https://suricata.readthedocs.io/en/suricata-5.0.1/rules/intro.html#rule-options) for more details. +* `settings` - (Optional) Set of strings for additional settings to use in stateful rule inspection. + +### Custom Action + +The `custom_action` block supports the following arguments: + +* `action_definition` - (Required) A configuration block describing the custom action associated with the `action_name`. See [Action Definition](#action-definition) below for details. + +* `action_name` - (Required, Forces new resource) A friendly name of the custom action. + +### Stateless Rule + +The `stateless_rule` block supports the following arguments: + +* `priority` - (Required) A setting that indicates the order in which to run this rule relative to all of the rules that are defined for a stateless rule group. AWS Network Firewall evaluates the rules in a rule group starting with the lowest priority setting. + +* `rule_definition` - (Required) A configuration block defining the stateless 5-tuple packet inspection criteria and the action to take on a packet that matches the criteria. See [Rule Definition](#rule-definition) below for details. + +### Rule Definition + +The `rule_definition` block supports the following arguments: + +* `actions` - (Required) Set of actions to take on a packet that matches one of the stateless rule definition's `match_attributes`. For every rule you must specify 1 standard action, and you can add custom actions. Standard actions include: `aws:pass`, `aws:drop`, `aws:forward_to_sfe`. + +* `match_attributes` - (Required) A configuration block containing criteria for AWS Network Firewall to use to inspect an individual packet in stateless rule inspection. See [Match Attributes](#match-attributes) below for details. + +### Match Attributes + +The `match_attributes` block supports the following arguments: + +* `destination` - (Optional) Set of configuration blocks describing the destination IP address and address ranges to inspect for, in CIDR notation. If not specified, this matches with any destination address. See [Destination](#destination) below for details. + +* `destination_port` - (Optional) Set of configuration blocks describing the destination ports to inspect for. If not specified, this matches with any destination port. See [Destination Port](#destination-port) below for details. + +* `protocols` - (Optional) Set of protocols to inspect for, specified using the protocol's assigned internet protocol number (IANA). If not specified, this matches with any protocol. + +* `source` - (Optional) Set of configuration blocks describing the source IP address and address ranges to inspect for, in CIDR notation. If not specified, this matches with any source address. See [Source](#source) below for details. + +* `source_port` - (Optional) Set of configuration blocks describing the source ports to inspect for. If not specified, this matches with any source port. See [Source Port](#source-port) below for details. + +* `tcp_flag` - (Optional) Set of configuration blocks containing the TCP flags and masks to inspect for. If not specified, this matches with any settings. + +### Action Definition + +The `action_definition` block supports the following argument: + +* `publish_metric_action` - (Required) A configuration block describing the stateless inspection criteria that publishes the specified metrics to Amazon CloudWatch for the matching packet. You can pair this custom action with any of the standard stateless rule actions. See [Publish Metric Action](#publish-metric-action) below for details. + +### Publish Metric Action + +The `publish_metric_action` block supports the following argument: + +* `dimension` - (Required) Set of configuration blocks containing the dimension settings to use for Amazon CloudWatch custom metrics. See [Dimension](#dimension) below for details. + +### Dimension + +The `dimension` block supports the following argument: + +* `value` - (Required) The value to use in the custom metric dimension. + +### Destination + +The `destination` block supports the following argument: + +* `address_definition` - (Required) An IP address or a block of IP addresses in CIDR notation. AWS Network Firewall supports all address ranges for IPv4. + +### Destination Port + +The `destination_port` block supports the following arguments: + +* `from_port` - (Required) The lower limit of the port range. This must be less than or equal to the `to_port`. + +* `to_port` - (Optional) The upper limit of the port range. This must be greater than or equal to the `from_port`. + +### Source + +The `source` block supports the following argument: + +* `address_definition` - (Required) An IP address or a block of IP addresses in CIDR notation. AWS Network Firewall supports all address ranges for IPv4. + +### Source Port + +The `source_port` block supports the following arguments: + +* `from_port` - (Required) The lower limit of the port range. This must be less than or equal to the `to_port`. + +* `to_port` - (Optional) The upper limit of the port range. This must be greater than or equal to the `from_port`. + +### TCP Flag + +The `tcp_flag` block supports the following arguments: + +* `flags` - (Required) Set of flags to look for in a packet. This setting can only specify values that are also specified in `masks`. +Valid values: `FIN`, `SYN`, `RST`, `PSH`, `ACK`, `URG`, `ECE`, `CWR`. + +* `masks` - (Optional) Set of flags to consider in the inspection. To inspect all flags, leave this empty. +Valid values: `FIN`, `SYN`, `RST`, `PSH`, `ACK`, `URG`, `ECE`, `CWR`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) that identifies the rule group. + +* `arn` - The Amazon Resource Name (ARN) that identifies the rule group. + +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +* `update_token` - A string token used when updating the rule group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Firewall Rule Groups using their `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Network Firewall Rule Groups using their `arn`. For example: + +```console +% terraform import aws_networkfirewall_rule_group.example arn:aws:network-firewall:us-west-1:123456789012:stateful-rulegroup/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_attachment_accepter.html.markdown b/website/docs/cdktf/python/r/networkmanager_attachment_accepter.html.markdown new file mode 100644 index 00000000000..ab98b43bf54 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_attachment_accepter.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_attachment_accepter" +description: |- + Terraform resource for managing an AWS NetworkManager Attachment Accepter. +--- + + + +# Resource: aws_networkmanager_attachment_accepter + +Terraform resource for managing an AWS NetworkManager Attachment Accepter. + +## Example Usage + +### Example with VPC attachment + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_attachment_accepter import NetworkmanagerAttachmentAccepter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerAttachmentAccepter(self, "test", + attachment_id=vpc.id, + attachment_type=vpc.attachment_type + ) +``` + +### Example with site-to-site VPN attachment + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_attachment_accepter import NetworkmanagerAttachmentAccepter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerAttachmentAccepter(self, "test", + attachment_id=vpn.id, + attachment_type=vpn.attachment_type + ) +``` + +## Argument Reference + +The following arguments are required: + +- `attachment_id` - (Required) The ID of the attachment. +- `attachment_type` - (Required) The type of attachment. Valid values can be found in the [AWS Documentation](https://docs.aws.amazon.com/networkmanager/latest/APIReference/API_ListAttachments.html#API_ListAttachments_RequestSyntax) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `attachment_policy_rule_number` - The policy rule number associated with the attachment. +- `core_network_arn` - The ARN of a core network. +- `core_network_id` - The id of a core network. +- `edge_location` - The Region where the edge is located. +- `owner_account_id` - The ID of the attachment account owner. +- `resource_arn` - The attachment resource ARN. +- `segment_name` - The name of the segment attachment. +- `state` - The state of the attachment. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_connect_attachment.html.markdown b/website/docs/cdktf/python/r/networkmanager_connect_attachment.html.markdown new file mode 100644 index 00000000000..4baf91f7eb0 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_connect_attachment.html.markdown @@ -0,0 +1,145 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_connect_attachment" +description: |- + Terraform resource for managing an AWS NetworkManager ConnectAttachment. +--- + + + +# Resource: aws_networkmanager_connect_attachment + +Terraform resource for managing an AWS NetworkManager ConnectAttachment. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_connect_attachment import NetworkmanagerConnectAttachment +from imports.aws.networkmanager_vpc_attachment import NetworkmanagerVpcAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NetworkmanagerVpcAttachment(self, "example", + core_network_id=Token.as_string(awscc_networkmanager_core_network_example.id), + subnet_arns=Token.as_list(property_access(aws_subnet_example, ["*", "arn"])), + vpc_arn=Token.as_string(aws_vpc_example.arn) + ) + aws_networkmanager_connect_attachment_example = + NetworkmanagerConnectAttachment(self, "example_1", + core_network_id=Token.as_string(awscc_networkmanager_core_network_example.id), + edge_location=example.edge_location, + options=NetworkmanagerConnectAttachmentOptions( + protocol="GRE" + ), + transport_attachment_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_connect_attachment_example.override_logical_id("example") +``` + +### Usage with attachment accepter + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_attachment_accepter import NetworkmanagerAttachmentAccepter +from imports.aws.networkmanager_connect_attachment import NetworkmanagerConnectAttachment +from imports.aws.networkmanager_vpc_attachment import NetworkmanagerVpcAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NetworkmanagerVpcAttachment(self, "example", + core_network_id=Token.as_string(awscc_networkmanager_core_network_example.id), + subnet_arns=Token.as_list(property_access(aws_subnet_example, ["*", "arn"])), + vpc_arn=Token.as_string(aws_vpc_example.arn) + ) + aws_networkmanager_attachment_accepter_example = + NetworkmanagerAttachmentAccepter(self, "example_1", + attachment_id=example.id, + attachment_type=example.attachment_type + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_attachment_accepter_example.override_logical_id("example") + aws_networkmanager_connect_attachment_example = + NetworkmanagerConnectAttachment(self, "example_2", + core_network_id=Token.as_string(awscc_networkmanager_core_network_example.id), + depends_on=["aws_networkmanager_attachment_accepter.test"], + edge_location=example.edge_location, + options=NetworkmanagerConnectAttachmentOptions( + protocol="GRE" + ), + transport_attachment_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_connect_attachment_example.override_logical_id("example") + NetworkmanagerAttachmentAccepter(self, "example2", + attachment_id=Token.as_string(aws_networkmanager_connect_attachment_example.id), + attachment_type=Token.as_string(aws_networkmanager_connect_attachment_example.attachment_type) + ) +``` + +## Argument Reference + +The following arguments are required: + +- `core_network_id` - (Required) The ID of a core network where you want to create the attachment. +- `transport_attachment_id` - (Required) The ID of the attachment between the two connections. +- `edge_location` - (Required) The Region where the edge is located. +- `options` - (Required) Options for creating an attachment. + +The following arguments are optional: + +- `tags` - (Optional) Key-value tags for the attachment. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - The ARN of the attachment. +- `attachment_policy_rule_number` - The policy rule number associated with the attachment. +- `attachment_type` - The type of attachment. +- `core_network_arn` - The ARN of a core network. +- `core_network_id` - The ID of a core network +- `edge_location` - The Region where the edge is located. +- `id` - The ID of the attachment. +- `owner_account_id` - The ID of the attachment account owner. +- `resource_arn` - The attachment resource ARN. +- `segment_name` - The name of the segment attachment. +- `state` - The state of the attachment. +- `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_connect_attachment` using the attachment ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_connect_attachment` using the attachment ID. For example: + +```console +% terraform import aws_networkmanager_connect_attachment.example attachment-0f8fa60d2238d1bd8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_connect_peer.html.markdown b/website/docs/cdktf/python/r/networkmanager_connect_peer.html.markdown new file mode 100644 index 00000000000..b5c5b6a7266 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_connect_peer.html.markdown @@ -0,0 +1,164 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_connect_peer" +description: |- + Terraform resource for managing an AWS NetworkManager Connect Peer. +--- + + + +# Resource: aws_networkmanager_connect_peer + +Terraform resource for managing an AWS NetworkManager Connect Peer. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_connect_attachment import NetworkmanagerConnectAttachment +from imports.aws.networkmanager_connect_peer import NetworkmanagerConnectPeer +from imports.aws.networkmanager_vpc_attachment import NetworkmanagerVpcAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NetworkmanagerVpcAttachment(self, "example", + core_network_id=Token.as_string(awscc_networkmanager_core_network_example.id), + subnet_arns=Token.as_list(property_access(aws_subnet_example, ["*", "arn"])), + vpc_arn=Token.as_string(aws_vpc_example.arn) + ) + aws_networkmanager_connect_attachment_example = + NetworkmanagerConnectAttachment(self, "example_1", + core_network_id=Token.as_string(awscc_networkmanager_core_network_example.id), + edge_location=example.edge_location, + options=NetworkmanagerConnectAttachmentOptions( + protocol="GRE" + ), + transport_attachment_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_connect_attachment_example.override_logical_id("example") + aws_networkmanager_connect_peer_example = NetworkmanagerConnectPeer(self, "example_2", + bgp_options=NetworkmanagerConnectPeerBgpOptions( + peer_asn=65000 + ), + connect_attachment_id=Token.as_string(aws_networkmanager_connect_attachment_example.id), + inside_cidr_blocks=["172.16.0.0/16"], + peer_address="127.0.0.1" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_connect_peer_example.override_logical_id("example") +``` + +### Usage with attachment accepter + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_attachment_accepter import NetworkmanagerAttachmentAccepter +from imports.aws.networkmanager_connect_attachment import NetworkmanagerConnectAttachment +from imports.aws.networkmanager_connect_peer import NetworkmanagerConnectPeer +from imports.aws.networkmanager_vpc_attachment import NetworkmanagerVpcAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NetworkmanagerVpcAttachment(self, "example", + core_network_id=Token.as_string(awscc_networkmanager_core_network_example.id), + subnet_arns=Token.as_list(property_access(aws_subnet_example, ["*", "arn"])), + vpc_arn=Token.as_string(aws_vpc_example.arn) + ) + aws_networkmanager_attachment_accepter_example = + NetworkmanagerAttachmentAccepter(self, "example_1", + attachment_id=example.id, + attachment_type=example.attachment_type + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_attachment_accepter_example.override_logical_id("example") + aws_networkmanager_connect_attachment_example = + NetworkmanagerConnectAttachment(self, "example_2", + core_network_id=Token.as_string(awscc_networkmanager_core_network_example.id), + depends_on=["aws_networkmanager_attachment_accepter.test"], + edge_location=example.edge_location, + options=NetworkmanagerConnectAttachmentOptions( + protocol="GRE" + ), + transport_attachment_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_connect_attachment_example.override_logical_id("example") + aws_networkmanager_connect_peer_example = NetworkmanagerConnectPeer(self, "example_3", + bgp_options=NetworkmanagerConnectPeerBgpOptions( + peer_asn=65500 + ), + connect_attachment_id=Token.as_string(aws_networkmanager_connect_attachment_example.id), + depends_on=["aws_networkmanager_attachment_accepter.example2"], + inside_cidr_blocks=["172.16.0.0/16"], + peer_address="127.0.0.1" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_connect_peer_example.override_logical_id("example") + NetworkmanagerAttachmentAccepter(self, "example2", + attachment_id=Token.as_string(aws_networkmanager_connect_attachment_example.id), + attachment_type=Token.as_string(aws_networkmanager_connect_attachment_example.attachment_type) + ) +``` + +## Argument Reference + +The following arguments are required: + +- `connect_attachment_id` - (Required) The ID of the connection attachment. +- `inside_cidr_blocks` - (Required) The inside IP addresses used for BGP peering. +- `peer_address` - (Required) The Connect peer address. + +The following arguments are optional: + +- `bgp_options` (Optional) The Connect peer BGP options. +- `core_network_address` (Optional) A Connect peer core network address. +- `tags` - (Optional) Key-value tags for the attachment. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - The ARN of the attachment. +- `configuration` - The configuration of the Connect peer. +- `core_network_id` - The ID of a core network. +- `edge_location` - The Region where the peer is located. +- `id` - The ID of the Connect peer. +- `state` - The state of the Connect peer. +- `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_connect_peer` using the connect peer ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_connect_peer` using the connect peer ID. For example: + +```console +% terraform import aws_networkmanager_connect_peer.example connect-peer-061f3e96275db1acc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_connection.html.markdown b/website/docs/cdktf/python/r/networkmanager_connection.html.markdown new file mode 100644 index 00000000000..4a5ac3ddc16 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_connection.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_connection" +description: |- + Creates a connection between two devices. +--- + + + +# Resource: aws_networkmanager_connection + +Creates a connection between two devices. +The devices can be a physical or virtual appliance that connects to a third-party appliance in a VPC, or a physical appliance that connects to another physical appliance in an on-premises network. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_connection import NetworkmanagerConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerConnection(self, "example", + connected_device_id=example2.id, + device_id=example1.id, + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connected_device_id` - (Required) The ID of the second device in the connection. +* `connected_link_id` - (Optional) The ID of the link for the second device. +* `description` - (Optional) A description of the connection. +* `device_id` - (Required) The ID of the first device in the connection. +* `global_network_id` - (Required) The ID of the global network. +* `link_id` - (Optional) The ID of the link for the first device. +* `tags` - (Optional) Key-value tags for the connection. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the connection. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_connection` using the connection ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_connection` using the connection ARN. For example: + +```console +% terraform import aws_networkmanager_connection.example arn:aws:networkmanager::123456789012:device/global-network-0d47f6t230mz46dy4/connection-07f6fd08867abc123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_core_network.html.markdown b/website/docs/cdktf/python/r/networkmanager_core_network.html.markdown new file mode 100644 index 00000000000..74b4bf215d9 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_core_network.html.markdown @@ -0,0 +1,324 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_core_network" +description: |- + Provides a core network resource. +--- + + + +# Resource: aws_networkmanager_core_network + +Provides a core network resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_core_network import NetworkmanagerCoreNetwork +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerCoreNetwork(self, "example", + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id) + ) +``` + +### With description + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_core_network import NetworkmanagerCoreNetwork +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerCoreNetwork(self, "example", + description="example", + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id) + ) +``` + +### With tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_core_network import NetworkmanagerCoreNetwork +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerCoreNetwork(self, "example", + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id), + tags={ + "hello": "world" + } + ) +``` + +### With VPC Attachment (Single Region) + +The example below illustrates the scenario where your policy document has static routes pointing to VPC attachments and you want to attach your VPCs to the core network before applying the desired policy document. Set the `create_base_policy` argument to `true` if your core network does not currently have any `LIVE` policies (e.g. this is the first `terraform apply` with the core network resource), since a `LIVE` policy is required before VPCs can be attached to the core network. Otherwise, if your core network already has a `LIVE` policy, you may exclude the `create_base_policy` argument. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_core_network_policy_document import DataAwsNetworkmanagerCoreNetworkPolicyDocument +from imports.aws.networkmanager_core_network import NetworkmanagerCoreNetwork +from imports.aws.networkmanager_core_network_policy_attachment import NetworkmanagerCoreNetworkPolicyAttachment +from imports.aws.networkmanager_global_network import NetworkmanagerGlobalNetwork +from imports.aws.networkmanager_vpc_attachment import NetworkmanagerVpcAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NetworkmanagerGlobalNetwork(self, "example") + aws_networkmanager_core_network_example = NetworkmanagerCoreNetwork(self, "example_1", + create_base_policy=True, + global_network_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_core_network_example.override_logical_id("example") + aws_networkmanager_vpc_attachment_example = + NetworkmanagerVpcAttachment(self, "example_2", + core_network_id=Token.as_string(aws_networkmanager_core_network_example.id), + subnet_arns=Token.as_list( + property_access(aws_subnet_example, ["*", "arn"])), + vpc_arn=Token.as_string(aws_vpc_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_vpc_attachment_example.override_logical_id("example") + data_aws_networkmanager_core_network_policy_document_example = + DataAwsNetworkmanagerCoreNetworkPolicyDocument(self, "example_3", + core_network_configuration=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfiguration( + asn_ranges=["65022-65534"], + edge_locations=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfigurationEdgeLocations( + location="us-west-2" + ) + ] + ) + ], + segment_actions=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegmentActions( + action="create-route", + destination_cidr_blocks=["0.0.0.0/0"], + destinations=[ + Token.as_string(aws_networkmanager_vpc_attachment_example.id) + ], + segment="segment" + ) + ], + segments=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegments( + name="segment" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_networkmanager_core_network_policy_document_example.override_logical_id("example") + aws_networkmanager_core_network_policy_attachment_example = + NetworkmanagerCoreNetworkPolicyAttachment(self, "example_4", + core_network_id=Token.as_string(aws_networkmanager_core_network_example.id), + policy_document=Token.as_string(data_aws_networkmanager_core_network_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_core_network_policy_attachment_example.override_logical_id("example") +``` + +### With VPC Attachment (Multi-Region) + +The example below illustrates the scenario where your policy document has static routes pointing to VPC attachments and you want to attach your VPCs to the core network before applying the desired policy document. Set the `create_base_policy` argument of the [`aws_networkmanager_core_network` resource](/docs/providers/aws/r/networkmanager_core_network.html) to `true` if your core network does not currently have any `LIVE` policies (e.g. this is the first `terraform apply` with the core network resource), since a `LIVE` policy is required before VPCs can be attached to the core network. Otherwise, if your core network already has a `LIVE` policy, you may exclude the `create_base_policy` argument. For multi-region in a core network that does not yet have a `LIVE` policy, pass a list of regions to the `aws_networkmanager_core_network` `base_policy_regions` argument. In the example below, `us-west-2` and `us-east-1` are specified in the base policy. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_core_network_policy_document import DataAwsNetworkmanagerCoreNetworkPolicyDocument +from imports.aws.networkmanager_core_network import NetworkmanagerCoreNetwork +from imports.aws.networkmanager_core_network_policy_attachment import NetworkmanagerCoreNetworkPolicyAttachment +from imports.aws.networkmanager_global_network import NetworkmanagerGlobalNetwork +from imports.aws.networkmanager_vpc_attachment import NetworkmanagerVpcAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NetworkmanagerGlobalNetwork(self, "example") + aws_networkmanager_core_network_example = NetworkmanagerCoreNetwork(self, "example_1", + base_policy_regions=["us-west-2", "us-east-1"], + create_base_policy=True, + global_network_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_core_network_example.override_logical_id("example") + example_us_east1 = NetworkmanagerVpcAttachment(self, "example_us_east_1", + core_network_id=Token.as_string(aws_networkmanager_core_network_example.id), + provider="alternate", + subnet_arns=Token.as_list( + property_access(aws_subnet_example_us_east1, ["*", "arn"])), + vpc_arn=Token.as_string(aws_vpc_example_us_east1.arn) + ) + example_us_west2 = NetworkmanagerVpcAttachment(self, "example_us_west_2", + core_network_id=Token.as_string(aws_networkmanager_core_network_example.id), + subnet_arns=Token.as_list( + property_access(aws_subnet_example_us_west2, ["*", "arn"])), + vpc_arn=Token.as_string(aws_vpc_example_us_west2.arn) + ) + data_aws_networkmanager_core_network_policy_document_example = + DataAwsNetworkmanagerCoreNetworkPolicyDocument(self, "example_4", + core_network_configuration=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfiguration( + asn_ranges=["65022-65534"], + edge_locations=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfigurationEdgeLocations( + location="us-west-2" + ), DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfigurationEdgeLocations( + location="us-east-1" + ) + ] + ) + ], + segment_actions=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegmentActions( + action="create-route", + destination_cidr_blocks=["10.0.0.0/16"], + destinations=[example_us_west2.id], + segment="segment" + ), DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegmentActions( + action="create-route", + destination_cidr_blocks=["10.1.0.0/16"], + destinations=[example_us_east1.id], + segment="segment" + ) + ], + segments=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegments( + name="segment" + ), DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegments( + name="segment2" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_networkmanager_core_network_policy_document_example.override_logical_id("example") + aws_networkmanager_core_network_policy_attachment_example = + NetworkmanagerCoreNetworkPolicyAttachment(self, "example_5", + core_network_id=Token.as_string(aws_networkmanager_core_network_example.id), + policy_document=Token.as_string(data_aws_networkmanager_core_network_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_core_network_policy_attachment_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the Core Network. +* `base_policy_region` - (Optional, **Deprecated** use the `base_policy_regions` argument instead) The base policy created by setting the `create_base_policy` argument to `true` requires a region to be set in the `edge-locations`, `location` key. If `base_policy_region` is not specified, the region used in the base policy defaults to the region specified in the `provider` block. +* `base_policy_regions` - (Optional) A list of regions to add to the base policy. The base policy created by setting the `create_base_policy` argument to `true` requires one or more regions to be set in the `edge-locations`, `location` key. If `base_policy_regions` is not specified, the region used in the base policy defaults to the region specified in the `provider` block. +* `create_base_policy` - (Optional) Specifies whether to create a base policy when a core network is created or updated. A base policy is created and set to `LIVE` to allow attachments to the core network (e.g. VPC Attachments) before applying a policy document provided using the [`aws_networkmanager_core_network_policy_attachment` resource](/docs/providers/aws/r/networkmanager_core_network_policy_attachment.html). This base policy is needed if your core network does not have any `LIVE` policies and your policy document has static routes pointing to VPC attachments and you want to attach your VPCs to the core network before applying the desired policy document. Valid values are `true` or `false`. An example of this Terraform snippet can be found above [for VPC Attachment in a single region](#with-vpc-attachment-single-region) and [for VPC Attachment multi-region](#with-vpc-attachment-multi-region). An example base policy is shown below. This base policy is overridden with the policy that you specify in the [`aws_networkmanager_core_network_policy_attachment` resource](/docs/providers/aws/r/networkmanager_core_network_policy_attachment.html). + +```json +{ + "version": "2021.12", + "core-network-configuration": { + "asn-ranges": [ + "64512-65534" + ], + "vpn-ecmp-support": false, + "edge-locations": [ + { + "location": "us-east-1" + } + ] + }, + "segments": [ + { + "name": "segment", + "description": "base-policy", + "isolate-attachments": false, + "require-attachment-acceptance": false + } + ] +} +``` + +* `global_network_id` - (Required) The ID of the global network that a core network will be a part of. +* `tags` - (Optional) Key-value tags for the Core Network. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `delete` - (Default `30m`) +* `update` - (Default `30m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Core Network Amazon Resource Name (ARN). +* `created_at` - Timestamp when a core network was created. +* `edges` - One or more blocks detailing the edges within a core network. [Detailed below](#edges). +* `id` - Core Network ID. +* `segments` - One or more blocks detailing the segments within a core network. [Detailed below](#segments). +* `state` - Current state of a core network. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### `edges` + +The `edges` configuration block supports the following arguments: + +* `asn` - ASN of a core network edge. +* `edge_location` - Region where a core network edge is located. +* `inside_cidr_blocks` - Inside IP addresses used for core network edges. + +### `segments` + +The `segments` configuration block supports the following arguments: + +* `edge_locations` - Regions where the edges are located. +* `name` - Name of a core network segment. +* `shared_segments` - Shared segments of a core network. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_core_network` using the core network ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_core_network` using the core network ID. For example: + +```console +% terraform import aws_networkmanager_core_network.example core-network-0d47f6t230mz46dy4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_core_network_policy_attachment.html.markdown b/website/docs/cdktf/python/r/networkmanager_core_network_policy_attachment.html.markdown new file mode 100644 index 00000000000..1f44ccef2f6 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_core_network_policy_attachment.html.markdown @@ -0,0 +1,238 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_core_network_policy_attachment" +description: |- + Provides a Core Network Policy Attachment resource. +--- + + + +# Resource: aws_networkmanager_core_network_policy_attachment + +Provides a Core Network Policy Attachment resource. This puts a Core Network Policy to an existing Core Network and executes the change set, which deploys changes globally based on the policy submitted (Sets the policy to `LIVE`). + +~> **NOTE:** Deleting this resource will not delete the current policy defined in this resource. Deleting this resource will also not revert the current `LIVE` policy to the previous version. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_core_network import NetworkmanagerCoreNetwork +from imports.aws.networkmanager_core_network_policy_attachment import NetworkmanagerCoreNetworkPolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NetworkmanagerCoreNetwork(self, "example", + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id) + ) + aws_networkmanager_core_network_policy_attachment_example = + NetworkmanagerCoreNetworkPolicyAttachment(self, "example_1", + core_network_id=example.id, + policy_document=Token.as_string(data_aws_networkmanager_core_network_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_core_network_policy_attachment_example.override_logical_id("example") +``` + +### With VPC Attachment (Single Region) + +The example below illustrates the scenario where your policy document has static routes pointing to VPC attachments and you want to attach your VPCs to the core network before applying the desired policy document. Set the `create_base_policy` argument of the [`aws_networkmanager_core_network` resource](/docs/providers/aws/r/networkmanager_core_network.html) to `true` if your core network does not currently have any `LIVE` policies (e.g. this is the first `terraform apply` with the core network resource), since a `LIVE` policy is required before VPCs can be attached to the core network. Otherwise, if your core network already has a `LIVE` policy, you may exclude the `create_base_policy` argument. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_core_network_policy_document import DataAwsNetworkmanagerCoreNetworkPolicyDocument +from imports.aws.networkmanager_core_network import NetworkmanagerCoreNetwork +from imports.aws.networkmanager_core_network_policy_attachment import NetworkmanagerCoreNetworkPolicyAttachment +from imports.aws.networkmanager_global_network import NetworkmanagerGlobalNetwork +from imports.aws.networkmanager_vpc_attachment import NetworkmanagerVpcAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NetworkmanagerGlobalNetwork(self, "example") + aws_networkmanager_core_network_example = NetworkmanagerCoreNetwork(self, "example_1", + create_base_policy=True, + global_network_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_core_network_example.override_logical_id("example") + aws_networkmanager_vpc_attachment_example = + NetworkmanagerVpcAttachment(self, "example_2", + core_network_id=Token.as_string(aws_networkmanager_core_network_example.id), + subnet_arns=Token.as_list( + property_access(aws_subnet_example, ["*", "arn"])), + vpc_arn=Token.as_string(aws_vpc_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_vpc_attachment_example.override_logical_id("example") + data_aws_networkmanager_core_network_policy_document_example = + DataAwsNetworkmanagerCoreNetworkPolicyDocument(self, "example_3", + core_network_configuration=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfiguration( + asn_ranges=["65022-65534"], + edge_locations=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfigurationEdgeLocations( + location="us-west-2" + ) + ] + ) + ], + segment_actions=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegmentActions( + action="create-route", + destination_cidr_blocks=["0.0.0.0/0"], + destinations=[ + Token.as_string(aws_networkmanager_vpc_attachment_example.id) + ], + segment="segment" + ) + ], + segments=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegments( + name="segment" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_networkmanager_core_network_policy_document_example.override_logical_id("example") + aws_networkmanager_core_network_policy_attachment_example = + NetworkmanagerCoreNetworkPolicyAttachment(self, "example_4", + core_network_id=Token.as_string(aws_networkmanager_core_network_example.id), + policy_document=Token.as_string(data_aws_networkmanager_core_network_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_core_network_policy_attachment_example.override_logical_id("example") +``` + +### With VPC Attachment (Multi-Region) + +The example below illustrates the scenario where your policy document has static routes pointing to VPC attachments and you want to attach your VPCs to the core network before applying the desired policy document. Set the `create_base_policy` argument of the [`aws_networkmanager_core_network` resource](/docs/providers/aws/r/networkmanager_core_network.html) to `true` if your core network does not currently have any `LIVE` policies (e.g. this is the first `terraform apply` with the core network resource), since a `LIVE` policy is required before VPCs can be attached to the core network. Otherwise, if your core network already has a `LIVE` policy, you may exclude the `create_base_policy` argument. For multi-region in a core network that does not yet have a `LIVE` policy, pass a list of regions to the `aws_networkmanager_core_network` `base_policy_regions` argument. In the example below, `us-west-2` and `us-east-1` are specified in the base policy. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_networkmanager_core_network_policy_document import DataAwsNetworkmanagerCoreNetworkPolicyDocument +from imports.aws.networkmanager_core_network import NetworkmanagerCoreNetwork +from imports.aws.networkmanager_core_network_policy_attachment import NetworkmanagerCoreNetworkPolicyAttachment +from imports.aws.networkmanager_global_network import NetworkmanagerGlobalNetwork +from imports.aws.networkmanager_vpc_attachment import NetworkmanagerVpcAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NetworkmanagerGlobalNetwork(self, "example") + aws_networkmanager_core_network_example = NetworkmanagerCoreNetwork(self, "example_1", + base_policy_regions=["us-west-2", "us-east-1"], + create_base_policy=True, + global_network_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_core_network_example.override_logical_id("example") + example_us_east1 = NetworkmanagerVpcAttachment(self, "example_us_east_1", + core_network_id=Token.as_string(aws_networkmanager_core_network_example.id), + provider="alternate", + subnet_arns=Token.as_list( + property_access(aws_subnet_example_us_east1, ["*", "arn"])), + vpc_arn=Token.as_string(aws_vpc_example_us_east1.arn) + ) + example_us_west2 = NetworkmanagerVpcAttachment(self, "example_us_west_2", + core_network_id=Token.as_string(aws_networkmanager_core_network_example.id), + subnet_arns=Token.as_list( + property_access(aws_subnet_example_us_west2, ["*", "arn"])), + vpc_arn=Token.as_string(aws_vpc_example_us_west2.arn) + ) + data_aws_networkmanager_core_network_policy_document_example = + DataAwsNetworkmanagerCoreNetworkPolicyDocument(self, "example_4", + core_network_configuration=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfiguration( + asn_ranges=["65022-65534"], + edge_locations=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfigurationEdgeLocations( + location="us-west-2" + ), DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfigurationEdgeLocations( + location="us-east-1" + ) + ] + ) + ], + segment_actions=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegmentActions( + action="create-route", + destination_cidr_blocks=["10.0.0.0/16"], + destinations=[example_us_west2.id], + segment="segment" + ), DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegmentActions( + action="create-route", + destination_cidr_blocks=["10.1.0.0/16"], + destinations=[example_us_east1.id], + segment="segment" + ) + ], + segments=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegments( + name="segment" + ), DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegments( + name="segment2" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_networkmanager_core_network_policy_document_example.override_logical_id("example") + aws_networkmanager_core_network_policy_attachment_example = + NetworkmanagerCoreNetworkPolicyAttachment(self, "example_5", + core_network_id=Token.as_string(aws_networkmanager_core_network_example.id), + policy_document=Token.as_string(data_aws_networkmanager_core_network_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_core_network_policy_attachment_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `core_network_id` - (Required) The ID of the core network that a policy will be attached to and made `LIVE`. +* `policy_document` - (Required) Policy document for creating a core network. Note that updating this argument will result in the new policy document version being set as the `LATEST` and `LIVE` policy document. Refer to the [Core network policies documentation](https://docs.aws.amazon.com/network-manager/latest/cloudwan/cloudwan-policy-change-sets.html) for more information. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `update` - (Default `30m`). If this is the first time attaching a policy to a core network then this timeout value is also used as the `create` timeout value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `state` - Current state of a core network. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_core_network_policy_attachment` using the core network ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_core_network_policy_attachment` using the core network ID. For example: + +```console +% terraform import aws_networkmanager_core_network_policy_attachment.example core-network-0d47f6t230mz46dy4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_customer_gateway_association.html.markdown b/website/docs/cdktf/python/r/networkmanager_customer_gateway_association.html.markdown new file mode 100644 index 00000000000..0595ffe2ae7 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_customer_gateway_association.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_customer_gateway_association" +description: |- + Associates a customer gateway with a device and optionally, with a link. +--- + + + +# Resource: aws_networkmanager_customer_gateway_association + +Associates a customer gateway with a device and optionally, with a link. +If you specify a link, it must be associated with the specified device. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.customer_gateway import CustomerGateway +from imports.aws.ec2_transit_gateway import Ec2TransitGateway +from imports.aws.networkmanager_customer_gateway_association import NetworkmanagerCustomerGatewayAssociation +from imports.aws.networkmanager_device import NetworkmanagerDevice +from imports.aws.networkmanager_global_network import NetworkmanagerGlobalNetwork +from imports.aws.networkmanager_site import NetworkmanagerSite +from imports.aws.networkmanager_transit_gateway_registration import NetworkmanagerTransitGatewayRegistration +from imports.aws.vpn_connection import VpnConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CustomerGateway(self, "example", + bgp_asn=Token.as_string(65000), + ip_address="172.83.124.10", + type="ipsec.1" + ) + aws_ec2_transit_gateway_example = Ec2TransitGateway(self, "example_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ec2_transit_gateway_example.override_logical_id("example") + aws_networkmanager_global_network_example = + NetworkmanagerGlobalNetwork(self, "example_2", + description="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_global_network_example.override_logical_id("example") + aws_networkmanager_site_example = NetworkmanagerSite(self, "example_3", + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_site_example.override_logical_id("example") + aws_vpn_connection_example = VpnConnection(self, "example_4", + customer_gateway_id=example.id, + static_routes_only=True, + transit_gateway_id=Token.as_string(aws_ec2_transit_gateway_example.id), + type=example.type + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpn_connection_example.override_logical_id("example") + aws_networkmanager_device_example = NetworkmanagerDevice(self, "example_5", + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id), + site_id=Token.as_string(aws_networkmanager_site_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_device_example.override_logical_id("example") + aws_networkmanager_transit_gateway_registration_example = + NetworkmanagerTransitGatewayRegistration(self, "example_6", + depends_on=[aws_vpn_connection_example], + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id), + transit_gateway_arn=Token.as_string(aws_ec2_transit_gateway_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_transit_gateway_registration_example.override_logical_id("example") + aws_networkmanager_customer_gateway_association_example = + NetworkmanagerCustomerGatewayAssociation(self, "example_7", + customer_gateway_arn=example.arn, + depends_on=[aws_networkmanager_transit_gateway_registration_example], + device_id=Token.as_string(aws_networkmanager_device_example.id), + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_customer_gateway_association_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `customer_gateway_arn` - (Required) The Amazon Resource Name (ARN) of the customer gateway. +* `device_id` - (Required) The ID of the device. +* `global_network_id` - (Required) The ID of the global network. +* `link_id` - (Optional) The ID of the link. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_customer_gateway_association` using the global network ID and customer gateway ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_customer_gateway_association` using the global network ID and customer gateway ARN. For example: + +```console +% terraform import aws_networkmanager_customer_gateway_association.example global-network-0d47f6t230mz46dy4,arn:aws:ec2:us-west-2:123456789012:customer-gateway/cgw-123abc05e04123abc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_device.html.markdown b/website/docs/cdktf/python/r/networkmanager_device.html.markdown new file mode 100644 index 00000000000..1b358a64e49 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_device.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_device" +description: |- + Creates a device in a global network. +--- + + + +# Resource: aws_networkmanager_device + +Creates a device in a global network. If you specify both a site ID and a location, +the location of the site is used for visualization in the Network Manager console. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_device import NetworkmanagerDevice +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerDevice(self, "example", + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id), + site_id=Token.as_string(aws_networkmanager_site_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `aws_location` - (Optional) The AWS location of the device. Documented below. +* `description` - (Optional) A description of the device. +* `global_network_id` - (Required) The ID of the global network. +* `location` - (Optional) The location of the device. Documented below. +* `model` - (Optional) The model of device. +* `serial_number` - (Optional) The serial number of the device. +* `site_id` - (Optional) The ID of the site. +* `tags` - (Optional) Key-value tags for the device. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) The type of device. +* `vendor` - (Optional) The vendor of the device. + +The `aws_location` object supports the following: + +* `subnet_arn` - (Optional) The Amazon Resource Name (ARN) of the subnet that the device is located in. +* `zone` - (Optional) The Zone that the device is located in. Specify the ID of an Availability Zone, Local Zone, Wavelength Zone, or an Outpost. + +The `location` object supports the following: + +* `address` - (Optional) The physical address. +* `latitude` - (Optional) The latitude. +* `longitude` - (Optional) The longitude. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the device. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_device` using the device ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_device` using the device ARN. For example: + +```console +% terraform import aws_networkmanager_device.example arn:aws:networkmanager::123456789012:device/global-network-0d47f6t230mz46dy4/device-07f6fd08867abc123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_global_network.html.markdown b/website/docs/cdktf/python/r/networkmanager_global_network.html.markdown new file mode 100644 index 00000000000..bd9c8f6dade --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_global_network.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_global_network" +description: |- + Provides a global network resource. +--- + + + +# Resource: aws_networkmanager_global_network + +Provides a global network resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_global_network import NetworkmanagerGlobalNetwork +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerGlobalNetwork(self, "example", + description="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the Global Network. +* `tags` - (Optional) Key-value tags for the Global Network. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Global Network Amazon Resource Name (ARN) +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_global_network` using the global network ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_global_network` using the global network ID. For example: + +```console +% terraform import aws_networkmanager_global_network.example global-network-0d47f6t230mz46dy4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_link.html.markdown b/website/docs/cdktf/python/r/networkmanager_link.html.markdown new file mode 100644 index 00000000000..8879a231674 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_link.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_link" +description: |- + Creates a link for a site. +--- + + + +# Resource: aws_networkmanager_link + +Creates a link for a site. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_link import NetworkmanagerLink +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerLink(self, "example", + bandwidth=NetworkmanagerLinkBandwidth( + download_speed=50, + upload_speed=10 + ), + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id), + provider_name="MegaCorp", + site_id=Token.as_string(aws_networkmanager_site_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bandwidth` - (Required) The upload speed and download speed in Mbps. Documented below. +* `description` - (Optional) A description of the link. +* `global_network_id` - (Required) The ID of the global network. +* `provider_name` - (Optional) The provider of the link. +* `site_id` - (Required) The ID of the site. +* `tags` - (Optional) Key-value tags for the link. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) The type of the link. + +The `bandwidth` object supports the following: + +* `download_speed` - (Optional) Download speed in Mbps. +* `upload_speed` - (Optional) Upload speed in Mbps. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Link Amazon Resource Name (ARN). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_link` using the link ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_link` using the link ARN. For example: + +```console +% terraform import aws_networkmanager_link.example arn:aws:networkmanager::123456789012:link/global-network-0d47f6t230mz46dy4/link-444555aaabbb11223 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_link_association.html.markdown b/website/docs/cdktf/python/r/networkmanager_link_association.html.markdown new file mode 100644 index 00000000000..f2908e38dbb --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_link_association.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_link_association" +description: |- + Associates a link to a device. +--- + + + +# Resource: aws_networkmanager_link_association + +Associates a link to a device. +A device can be associated to multiple links and a link can be associated to multiple devices. +The device and link must be in the same global network and the same site. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_link_association import NetworkmanagerLinkAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerLinkAssociation(self, "example", + device_id=Token.as_string(aws_networkmanager_device_example.id), + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id), + link_id=Token.as_string(aws_networkmanager_link_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `device_id` - (Required) The ID of the device. +* `global_network_id` - (Required) The ID of the global network. +* `link_id` - (Required) The ID of the link. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_link_association` using the global network ID, link ID and device ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_link_association` using the global network ID, link ID and device ID. For example: + +```console +% terraform import aws_networkmanager_link_association.example global-network-0d47f6t230mz46dy4,link-444555aaabbb11223,device-07f6fd08867abc123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_site.html.markdown b/website/docs/cdktf/python/r/networkmanager_site.html.markdown new file mode 100644 index 00000000000..f9eac69f75a --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_site.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_site" +description: |- + Creates a site in a global network. +--- + + + +# Resource: aws_networkmanager_site + +Creates a site in a global network. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_global_network import NetworkmanagerGlobalNetwork +from imports.aws.networkmanager_site import NetworkmanagerSite +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = NetworkmanagerGlobalNetwork(self, "example") + aws_networkmanager_site_example = NetworkmanagerSite(self, "example_1", + global_network_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_site_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `global_network_id` - (Required) The ID of the Global Network to create the site in. +* `description` - (Optional) Description of the Site. +* `location` - (Optional) The site location as documented below. +* `tags` - (Optional) Key-value tags for the Site. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `location` object supports the following: + +* `address` - (Optional) Address of the location. +* `latitude` - (Optional) Latitude of the location. +* `longitude` - (Optional) Longitude of the location. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Site Amazon Resource Name (ARN) +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_site` using the site ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_site` using the site ARN. For example: + +```console +% terraform import aws_networkmanager_site.example arn:aws:networkmanager::123456789012:site/global-network-0d47f6t230mz46dy4/site-444555aaabbb11223 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_site_to_site_vpn_attachment.html.markdown b/website/docs/cdktf/python/r/networkmanager_site_to_site_vpn_attachment.html.markdown new file mode 100644 index 00000000000..3d79365efd8 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_site_to_site_vpn_attachment.html.markdown @@ -0,0 +1,198 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_site_to_site_vpn_attachment" +description: |- + Terraform resource for managing an AWS NetworkManager SiteToSiteAttachment. +--- + + + +# Resource: aws_networkmanager_site_to_site_vpn_attachment + +Terraform resource for managing an AWS NetworkManager SiteToSiteAttachment. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_site_to_site_vpn_attachment import NetworkmanagerSiteToSiteVpnAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerSiteToSiteVpnAttachment(self, "example", + core_network_id=Token.as_string(awscc_networkmanager_core_network_example.id), + vpn_connection_arn=Token.as_string(aws_vpn_connection_example.arn) + ) +``` + +### Full Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.customer_gateway import CustomerGateway +from imports.aws.data_aws_networkmanager_core_network_policy_document import DataAwsNetworkmanagerCoreNetworkPolicyDocument +from imports.aws.networkmanager_attachment_accepter import NetworkmanagerAttachmentAccepter +from imports.aws.networkmanager_global_network import NetworkmanagerGlobalNetwork +from imports.aws.networkmanager_site_to_site_vpn_attachment import NetworkmanagerSiteToSiteVpnAttachment +from imports.aws.vpn_connection import VpnConnection +from imports.awscc.networkmanager_core_network import NetworkmanagerCoreNetwork +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # The following providers are missing schema information and might need manual adjustments to synthesize correctly: awscc. + # For a more precise conversion please use the --provider flag in convert. + test = CustomerGateway(self, "test", + bgp_asn=Token.as_string(65000), + ip_address="172.0.0.1", + type="ipsec.1" + ) + aws_networkmanager_global_network_test = NetworkmanagerGlobalNetwork(self, "test_1", + tags={ + "Name": "test" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_global_network_test.override_logical_id("test") + aws_vpn_connection_test = VpnConnection(self, "test_2", + customer_gateway_id=test.id, + tags={ + "Name": "test" + }, + type="ipsec.1" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpn_connection_test.override_logical_id("test") + data_aws_networkmanager_core_network_policy_document_test = + DataAwsNetworkmanagerCoreNetworkPolicyDocument(self, "test_3", + attachment_policies=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentAttachmentPolicies( + action=DataAwsNetworkmanagerCoreNetworkPolicyDocumentAttachmentPoliciesAction( + association_method="constant", + segment="shared" + ), + condition_logic="or", + conditions=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentAttachmentPoliciesConditions( + key="segment", + operator="equals", + type="tag-value", + value="shared" + ) + ], + rule_number=1 + ) + ], + core_network_configuration=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfiguration( + asn_ranges=["64512-64555"], + edge_locations=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentCoreNetworkConfigurationEdgeLocations( + asn=Token.as_string(64512), + location=Token.as_string(current.name) + ) + ], + vpn_ecmp_support=False + ) + ], + segment_actions=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegmentActions( + action="share", + mode="attachment-route", + segment="shared", + share_with=["*"] + ) + ], + segments=[DataAwsNetworkmanagerCoreNetworkPolicyDocumentSegments( + description="SegmentForSharedServices", + name="shared", + require_attachment_acceptance=True + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_networkmanager_core_network_policy_document_test.override_logical_id("test") + awscc_networkmanager_core_network_test = NetworkmanagerCoreNetwork(self, "test_4", + global_network_id=aws_networkmanager_global_network_test.id, + policy_document=Fn.jsonencode( + Fn.jsondecode( + Token.as_string(data_aws_networkmanager_core_network_policy_document_test.json))) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + awscc_networkmanager_core_network_test.override_logical_id("test") + aws_networkmanager_site_to_site_vpn_attachment_test = + NetworkmanagerSiteToSiteVpnAttachment(self, "test_5", + core_network_id=Token.as_string(awscc_networkmanager_core_network_test.id), + tags={ + "segment": "shared" + }, + vpn_connection_arn=Token.as_string(aws_vpn_connection_test.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_site_to_site_vpn_attachment_test.override_logical_id("test") + aws_networkmanager_attachment_accepter_test = + NetworkmanagerAttachmentAccepter(self, "test_6", + attachment_id=Token.as_string(aws_networkmanager_site_to_site_vpn_attachment_test.id), + attachment_type=Token.as_string(aws_networkmanager_site_to_site_vpn_attachment_test.attachment_type) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_attachment_accepter_test.override_logical_id("test") +``` + +## Argument Reference + +The following arguments are required: + +- `core_network_id` - (Required) The ID of a core network for the VPN attachment. +- `vpn_connection_arn` - (Required) The ARN of the site-to-site VPN connection. + +The following arguments are optional: + +- `tags` - (Optional) Key-value tags for the attachment. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - The ARN of the attachment. +- `attachment_policy_rule_number` - The policy rule number associated with the attachment. +- `attachment_type` - The type of attachment. +- `core_network_arn` - The ARN of a core network. +- `core_network_id` - The ID of a core network +- `edge_location` - The Region where the edge is located. +- `id` - The ID of the attachment. +- `owner_account_id` - The ID of the attachment account owner. +- `resource_arn` - The attachment resource ARN. +- `segment_name` - The name of the segment attachment. +- `state` - The state of the attachment. +- `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_site_to_site_vpn_attachment` using the attachment ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_site_to_site_vpn_attachment` using the attachment ID. For example: + +```console +% terraform import aws_networkmanager_site_to_site_vpn_attachment.example attachment-0f8fa60d2238d1bd8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_transit_gateway_connect_peer_association.html.markdown b/website/docs/cdktf/python/r/networkmanager_transit_gateway_connect_peer_association.html.markdown new file mode 100644 index 00000000000..927822d1ed8 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_transit_gateway_connect_peer_association.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_transit_gateway_connect_peer_association" +description: |- + Associates a transit gateway Connect peer with a device, and optionally, with a link. +--- + + + +# Resource: aws_networkmanager_transit_gateway_connect_peer_association + +Associates a transit gateway Connect peer with a device, and optionally, with a link. +If you specify a link, it must be associated with the specified device. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_transit_gateway_connect_peer_association import NetworkmanagerTransitGatewayConnectPeerAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerTransitGatewayConnectPeerAssociation(self, "example", + device_id=Token.as_string(aws_networkmanager_device_example.id), + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id), + transit_gateway_connect_peer_arn=Token.as_string(aws_ec2_transit_gateway_connect_peer_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `device_id` - (Required) The ID of the device. +* `global_network_id` - (Required) The ID of the global network. +* `link_id` - (Optional) The ID of the link. +* `transit_gateway_connect_peer_arn` - (Required) The Amazon Resource Name (ARN) of the Connect peer. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_transit_gateway_connect_peer_association` using the global network ID and customer gateway ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_transit_gateway_connect_peer_association` using the global network ID and customer gateway ARN. For example: + +```console +% terraform import aws_networkmanager_transit_gateway_connect_peer_association.example global-network-0d47f6t230mz46dy4,arn:aws:ec2:us-west-2:123456789012:transit-gateway-connect-peer/tgw-connect-peer-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_transit_gateway_peering.html.markdown b/website/docs/cdktf/python/r/networkmanager_transit_gateway_peering.html.markdown new file mode 100644 index 00000000000..aee2702c1eb --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_transit_gateway_peering.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_transit_gateway_peering" +description: |- + Creates a peering connection between an AWS Cloud WAN core network and an AWS Transit Gateway. +--- + + + +# Resource: aws_networkmanager_transit_gateway_peering + +Creates a peering connection between an AWS Cloud WAN core network and an AWS Transit Gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_transit_gateway_peering import NetworkmanagerTransitGatewayPeering +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerTransitGatewayPeering(self, "example", + core_network_id=Token.as_string(awscc_networkmanager_core_network_example.id), + transit_gateway_arn=Token.as_string(aws_ec2_transit_gateway_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `core_network_id` - (Required) The ID of a core network. +* `tags` - (Optional) Key-value tags for the peering. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `transit_gateway_arn` - (Required) The ARN of the transit gateway for the peering request. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Peering Amazon Resource Name (ARN). +* `core_network_arn` - The ARN of the core network. +* `edge_location` - The edge location for the peer. +* `id` - Peering ID. +* `owner_account_id` - The ID of the account owner. +* `peering_type` - The type of peering. This will be `TRANSIT_GATEWAY`. +* `resource_arn` - The resource ARN of the peer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `transit_gateway_peering_attachment_id` - The ID of the transit gateway peering attachment. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_transit_gateway_peering` using the peering ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_transit_gateway_peering` using the peering ID. For example: + +```console +% terraform import aws_networkmanager_transit_gateway_peering.example peering-444555aaabbb11223 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_transit_gateway_registration.html.markdown b/website/docs/cdktf/python/r/networkmanager_transit_gateway_registration.html.markdown new file mode 100644 index 00000000000..7287c80b7be --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_transit_gateway_registration.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_transit_gateway_registration" +description: |- + Registers a transit gateway to a global network. +--- + + + +# Resource: aws_networkmanager_transit_gateway_registration + +Registers a transit gateway to a global network. The transit gateway can be in any AWS Region, +but it must be owned by the same AWS account that owns the global network. +You cannot register a transit gateway in more than one global network. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ec2_transit_gateway import Ec2TransitGateway +from imports.aws.networkmanager_global_network import NetworkmanagerGlobalNetwork +from imports.aws.networkmanager_transit_gateway_registration import NetworkmanagerTransitGatewayRegistration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Ec2TransitGateway(self, "example") + aws_networkmanager_global_network_example = + NetworkmanagerGlobalNetwork(self, "example_1", + description="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_global_network_example.override_logical_id("example") + aws_networkmanager_transit_gateway_registration_example = + NetworkmanagerTransitGatewayRegistration(self, "example_2", + global_network_id=Token.as_string(aws_networkmanager_global_network_example.id), + transit_gateway_arn=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_networkmanager_transit_gateway_registration_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `global_network_id` - (Required) The ID of the Global Network to register to. +* `transit_gateway_arn` - (Required) The ARN of the Transit Gateway to register. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_transit_gateway_registration` using the global network ID and transit gateway ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_transit_gateway_registration` using the global network ID and transit gateway ARN. For example: + +```console +% terraform import aws_networkmanager_transit_gateway_registration.example global-network-0d47f6t230mz46dy4,arn:aws:ec2:us-west-2:123456789012:transit-gateway/tgw-123abc05e04123abc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_transit_gateway_route_table_attachment.html.markdown b/website/docs/cdktf/python/r/networkmanager_transit_gateway_route_table_attachment.html.markdown new file mode 100644 index 00000000000..e4b5082c9fa --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_transit_gateway_route_table_attachment.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_transit_gateway_route_table_attachment" +description: |- + Creates a transit gateway route table attachment. +--- + + + +# Resource: aws_networkmanager_transit_gateway_route_table_attachment + +Creates a transit gateway route table attachment. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_transit_gateway_route_table_attachment import NetworkmanagerTransitGatewayRouteTableAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerTransitGatewayRouteTableAttachment(self, "example", + peering_id=Token.as_string(aws_networkmanager_transit_gateway_peering_example.id), + transit_gateway_route_table_arn=Token.as_string(aws_ec2_transit_gateway_route_table_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `peering_id` - (Required) The ID of the peer for the attachment. +* `tags` - (Optional) Key-value tags for the attachment. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `transit_gateway_route_table_arn` - (Required) The ARN of the transit gateway route table for the attachment. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Attachment Amazon Resource Name (ARN). +* `attachment_policy_rule_number` - The policy rule number associated with the attachment. +* `attachment_type` - The type of attachment. +* `core_network_arn` - The ARN of the core network. +* `core_network_id` - The ID of the core network. +* `edge_location` - The edge location for the peer. +* `id` - The ID of the attachment. +* `owner_account_id` - The ID of the attachment account owner. +* `resource_arn` - The attachment resource ARN. +* `segment_name` - The name of the segment attachment. +* `state` - The state of the attachment. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_transit_gateway_route_table_attachment` using the attachment ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_transit_gateway_route_table_attachment` using the attachment ID. For example: + +```console +% terraform import aws_networkmanager_transit_gateway_route_table_attachment.example attachment-0f8fa60d2238d1bd8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/networkmanager_vpc_attachment.html.markdown b/website/docs/cdktf/python/r/networkmanager_vpc_attachment.html.markdown new file mode 100644 index 00000000000..faa5dd09473 --- /dev/null +++ b/website/docs/cdktf/python/r/networkmanager_vpc_attachment.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_vpc_attachment" +description: |- + Terraform resource for managing an AWS NetworkManager VpcAttachment. +--- + + + +# Resource: aws_networkmanager_vpc_attachment + +Terraform resource for managing an AWS NetworkManager VpcAttachment. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.networkmanager_vpc_attachment import NetworkmanagerVpcAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + NetworkmanagerVpcAttachment(self, "example", + core_network_id=Token.as_string(awscc_networkmanager_core_network_example.id), + subnet_arns=[Token.as_string(aws_subnet_example.arn)], + vpc_arn=Token.as_string(aws_vpc_example.arn) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `core_network_id` - (Required) The ID of a core network for the VPC attachment. +* `subnet_arns` - (Required) The subnet ARN of the VPC attachment. +* `vpc_arn` - (Required) The ARN of the VPC. + +The following arguments are optional: + +* `options` - (Optional) Options for the VPC attachment. +* `tags` - (Optional) Key-value tags for the attachment. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### options + +* `appliance_mode_support` - (Optional) Indicates whether appliance mode is supported. If enabled, traffic flow between a source and destination use the same Availability Zone for the VPC attachment for the lifetime of that flow. +* `ipv6_support` - (Optional) Indicates whether IPv6 is supported. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the attachment. +* `attachment_policy_rule_number` - The policy rule number associated with the attachment. +* `attachment_type` - The type of attachment. +* `core_network_arn` - The ARN of a core network. +* `edge_location` - The Region where the edge is located. +* `id` - The ID of the attachment. +* `owner_account_id` - The ID of the attachment account owner. +* `resource_arn` - The attachment resource ARN. +* `segment_name` - The name of the segment attachment. +* `state` - The state of the attachment. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_networkmanager_vpc_attachment` using the attachment ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_networkmanager_vpc_attachment` using the attachment ID. For example: + +```console +% terraform import aws_networkmanager_vpc_attachment.example attachment-0f8fa60d2238d1bd8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/oam_link.html.markdown b/website/docs/cdktf/python/r/oam_link.html.markdown new file mode 100644 index 00000000000..e119a6aa65e --- /dev/null +++ b/website/docs/cdktf/python/r/oam_link.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_link" +description: |- + Terraform resource for managing an AWS CloudWatch Observability Access Manager Link. +--- + + + +# Resource: aws_oam_link + +Terraform resource for managing an AWS CloudWatch Observability Access Manager Link. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.oam_link import OamLink +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OamLink(self, "example", + label_template="$AccountName", + resource_types=["AWS::CloudWatch::Metric"], + sink_identifier=test.id, + tags={ + "Env": "prod" + } + ) +``` + +## Argument Reference + +The following arguments are required: + +* `label_template` - (Required) Human-readable name to use to identify this source account when you are viewing data from it in the monitoring account. +* `resource_types` - (Required) Types of data that the source account shares with the monitoring account. +* `sink_identifier` - (Required) Identifier of the sink to use to create this link. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the link. +* `label` - Label that is assigned to this link. +* `link_id` - ID string that AWS generated as part of the link ARN. +* `sink_arn` - ARN of the sink that is used for this link. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1m`) +* `update` - (Default `1m`) +* `delete` - (Default `1m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Observability Access Manager Link using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Observability Access Manager Link using the `arn`. For example: + +```console +% terraform import aws_oam_link.example arn:aws:oam:us-west-2:123456789012:link/link-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/oam_sink.html.markdown b/website/docs/cdktf/python/r/oam_sink.html.markdown new file mode 100644 index 00000000000..30b1c5d80d6 --- /dev/null +++ b/website/docs/cdktf/python/r/oam_sink.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_sink" +description: |- + Terraform resource for managing an AWS CloudWatch Observability Access Manager Sink. +--- + + + +# Resource: aws_oam_sink + +Terraform resource for managing an AWS CloudWatch Observability Access Manager Sink. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.oam_sink import OamSink +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OamSink(self, "example", + name="ExampleSink", + tags={ + "Env": "prod" + } + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name for the sink. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Sink. +* `sink_id` - ID string that AWS generated as part of the sink ARN. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1m`) +* `update` - (Default `1m`) +* `delete` - (Default `1m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Observability Access Manager Sink using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Observability Access Manager Sink using the `arn`. For example: + +```console +% terraform import aws_oam_sink.example arn:aws:oam:us-west-2:123456789012:sink/sink-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/oam_sink_policy.html.markdown b/website/docs/cdktf/python/r/oam_sink_policy.html.markdown new file mode 100644 index 00000000000..e3821e3ce5a --- /dev/null +++ b/website/docs/cdktf/python/r/oam_sink_policy.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_sink_policy" +description: |- + Terraform resource for managing an AWS CloudWatch Observability Access Manager Sink Policy. +--- + + + +# Resource: aws_oam_sink_policy + +Terraform resource for managing an AWS CloudWatch Observability Access Manager Sink Policy. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.oam_sink import OamSink +from imports.aws.oam_sink_policy import OamSinkPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = OamSink(self, "example", + name="ExampleSink" + ) + aws_oam_sink_policy_example = OamSinkPolicy(self, "example_1", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["oam:CreateLink", "oam:UpdateLink"], + "Condition": { + "ForAllValues:StringEquals": { + "oam:_resource_types": ["AWS::CloudWatch::Metric", "AWS::Logs::LogGroup" + ] + } + }, + "Effect": "Allow", + "Principal": { + "AWS": ["1111111111111", "222222222222"] + }, + "Resource": "*" + } + ], + "Version": "2012-10-17" + })), + sink_identifier=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_oam_sink_policy_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `sink_identifier` - (Required) ARN of the sink to attach this policy to. +* `policy` - (Required) JSON policy to use. If you are updating an existing policy, the entire existing policy is replaced by what you specify here. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Sink. +* `sink_id` - ID string that AWS generated as part of the sink ARN. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1m`) +* `update` - (Default `1m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Observability Access Manager Sink Policy using the `sink_identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Observability Access Manager Sink Policy using the `sink_identifier`. For example: + +```console +% terraform import aws_oam_sink_policy.example arn:aws:oam:us-west-2:123456789012:sink/sink-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opensearch_domain.html.markdown b/website/docs/cdktf/python/r/opensearch_domain.html.markdown new file mode 100644 index 00000000000..e2b46d463cc --- /dev/null +++ b/website/docs/cdktf/python/r/opensearch_domain.html.markdown @@ -0,0 +1,568 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_domain" +description: |- + Terraform resource for managing an AWS OpenSearch Domain. +--- + + + +# Resource: aws_opensearch_domain + +Manages an Amazon OpenSearch Domain. + +## Elasticsearch vs. OpenSearch + +Amazon OpenSearch Service is the successor to Amazon Elasticsearch Service and supports OpenSearch and legacy Elasticsearch OSS (up to 7.10, the final open source version of the software). + +OpenSearch Domain configurations are similar in many ways to Elasticsearch Domain configurations. However, there are important differences including these: + +* OpenSearch has `engine_version` while Elasticsearch has `elasticsearch_version` +* Versions are specified differently - _e.g._, `Elasticsearch_7.10` with OpenSearch vs. `7.10` for Elasticsearch. +* `instance_type` argument values end in `search` for OpenSearch vs. `elasticsearch` for Elasticsearch (_e.g._, `t2.micro.search` vs. `t2.micro.elasticsearch`). +* The AWS-managed service-linked role for OpenSearch is called `AWSServiceRoleForAmazonOpenSearchService` instead of `AWSServiceRoleForAmazonElasticsearchService` for Elasticsearch. + +There are also some potentially unexpected similarities in configurations: + +* ARNs for both are prefaced with `arn:aws:es:`. +* Both OpenSearch and Elasticsearch use assume role policies that refer to the `Principal` `Service` as `es.amazonaws.com`. +* IAM policy actions, such as those you will find in `access_policies`, are prefaced with `es:` for both. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearch_domain import OpensearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchDomain(self, "example", + cluster_config=OpensearchDomainClusterConfig( + instance_type="r4.large.search" + ), + domain_name="example", + engine_version="Elasticsearch_7.10", + tags={ + "Domain": "TestDomain" + } + ) +``` + +### Access Policy + +-> See also: [`aws_opensearch_domain_policy` resource](/docs/providers/aws/r/opensearch_domain_policy.html) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.opensearch_domain import OpensearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + domain = TerraformVariable(self, "domain", + default="tf-test" + ) + current = DataAwsCallerIdentity(self, "current") + data_aws_region_current = DataAwsRegion(self, "current_2") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + example = DataAwsIamPolicyDocument(self, "example", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["es:*"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="IpAddress", + values=["66.193.100.22/32"], + variable="aws:SourceIp" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="*" + ) + ], + resources=["arn:aws:es:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:domain/${" + domain.value + "}/*" + ] + ) + ] + ) + aws_opensearch_domain_example = OpensearchDomain(self, "example_4", + access_policies=Token.as_string(example.json), + domain_name=domain.string_value + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_opensearch_domain_example.override_logical_id("example") +``` + +### Log publishing to CloudWatch Logs + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.cloudwatch_log_resource_policy import CloudwatchLogResourcePolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.opensearch_domain import OpensearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, domainName): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="example" + ) + aws_opensearch_domain_example = OpensearchDomain(self, "example_1", + log_publishing_options=[OpensearchDomainLogPublishingOptions( + cloudwatch_log_group_arn=example.arn, + log_type="INDEX_SLOW_LOGS" + ) + ], + domain_name=domain_name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_opensearch_domain_example.override_logical_id("example") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_2", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:PutLogEvents", "logs:PutLogEventsBatch", "logs:CreateLogStream" + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["es.amazonaws.com"], + type="Service" + ) + ], + resources=["arn:aws:logs:*"] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_cloudwatch_log_resource_policy_example = + CloudwatchLogResourcePolicy(self, "example_3", + policy_document=Token.as_string(data_aws_iam_policy_document_example.json), + policy_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_resource_policy_example.override_logical_id("example") +``` + +### VPC based OpenSearch + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.data_aws_subnets import DataAwsSubnets +from imports.aws.data_aws_vpc import DataAwsVpc +from imports.aws.iam_service_linked_role import IamServiceLinkedRole +from imports.aws.opensearch_domain import OpensearchDomain +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + domain = TerraformVariable(self, "domain", + default="tf-test" + ) + vpc = TerraformVariable(self, "vpc") + example = IamServiceLinkedRole(self, "example", + aws_service_name="opensearchservice.amazonaws.com" + ) + current = DataAwsCallerIdentity(self, "current") + data_aws_region_current = DataAwsRegion(self, "current_4") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + data_aws_vpc_example = DataAwsVpc(self, "example_5", + tags={ + "Name": vpc.string_value + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_vpc_example.override_logical_id("example") + aws_security_group_example = SecurityGroup(self, "example_6", + description="Managed by Terraform", + ingress=[SecurityGroupIngress( + cidr_blocks=[Token.as_string(data_aws_vpc_example.cidr_block)], + from_port=443, + protocol="tcp", + to_port=443 + ) + ], + name="${" + vpc.value + "}-opensearch-${" + domain.value + "}", + vpc_id=Token.as_string(data_aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_security_group_example.override_logical_id("example") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_7", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["es:*"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="*" + ) + ], + resources=["arn:aws:es:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:domain/${" + domain.value + "}/*" + ] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + data_aws_subnets_example = DataAwsSubnets(self, "example_8", + filter=[DataAwsSubnetsFilter( + name="vpc-id", + values=[Token.as_string(data_aws_vpc_example.id)] + ) + ], + tags={ + "Tier": "private" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_subnets_example.override_logical_id("example") + aws_opensearch_domain_example = OpensearchDomain(self, "example_9", + access_policies=Token.as_string(data_aws_iam_policy_document_example.json), + advanced_options={ + "rest.action.multi.allow_explicit_index": "true" + }, + cluster_config=OpensearchDomainClusterConfig( + instance_type="m4.large.search", + zone_awareness_enabled=True + ), + depends_on=[example], + domain_name=domain.string_value, + engine_version="OpenSearch_1.0", + tags={ + "Domain": "TestDomain" + }, + vpc_options=OpensearchDomainVpcOptions( + security_group_ids=[Token.as_string(aws_security_group_example.id)], + subnet_ids=[ + Token.as_string(property_access(data_aws_subnets_example.ids, ["0"])), + Token.as_string(property_access(data_aws_subnets_example.ids, ["1"])) + ] + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_opensearch_domain_example.override_logical_id("example") +``` + +### Enabling fine-grained access control on an existing domain + +This example shows two configurations: one to create a domain without fine-grained access control and the second to modify the domain to enable fine-grained access control. For more information, see [Enabling fine-grained access control](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/fgac.html). + +#### First apply + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearch_domain import OpensearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchDomain(self, "example", + advanced_security_options=OpensearchDomainAdvancedSecurityOptions( + anonymous_auth_enabled=True, + enabled=False, + internal_user_database_enabled=True, + master_user_options=OpensearchDomainAdvancedSecurityOptionsMasterUserOptions( + master_user_name="example", + master_user_password="Barbarbarbar1!" + ) + ), + cluster_config=OpensearchDomainClusterConfig( + instance_type="r5.large.search" + ), + domain_endpoint_options=OpensearchDomainDomainEndpointOptions( + enforce_https=True, + tls_security_policy="Policy-Min-TLS-1-2-2019-07" + ), + domain_name="ggkitty", + ebs_options=OpensearchDomainEbsOptions( + ebs_enabled=True, + volume_size=10 + ), + encrypt_at_rest=OpensearchDomainEncryptAtRest( + enabled=True + ), + engine_version="Elasticsearch_7.1", + node_to_node_encryption=OpensearchDomainNodeToNodeEncryption( + enabled=True + ) + ) +``` + +#### Second apply + +Notice that the only change is `advanced_security_options.0.enabled` is now set to `true`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearch_domain import OpensearchDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchDomain(self, "example", + advanced_security_options=OpensearchDomainAdvancedSecurityOptions( + anonymous_auth_enabled=True, + enabled=True, + internal_user_database_enabled=True, + master_user_options=OpensearchDomainAdvancedSecurityOptionsMasterUserOptions( + master_user_name="example", + master_user_password="Barbarbarbar1!" + ) + ), + cluster_config=OpensearchDomainClusterConfig( + instance_type="r5.large.search" + ), + domain_endpoint_options=OpensearchDomainDomainEndpointOptions( + enforce_https=True, + tls_security_policy="Policy-Min-TLS-1-2-2019-07" + ), + domain_name="ggkitty", + ebs_options=OpensearchDomainEbsOptions( + ebs_enabled=True, + volume_size=10 + ), + encrypt_at_rest=OpensearchDomainEncryptAtRest( + enabled=True + ), + engine_version="Elasticsearch_7.1", + node_to_node_encryption=OpensearchDomainNodeToNodeEncryption( + enabled=True + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `domain_name` - (Required) Name of the domain. + +The following arguments are optional: + +* `access_policies` - (Optional) IAM policy document specifying the access policies for the domain. +* `advanced_options` - (Optional) Key-value string pairs to specify advanced configuration options. Note that the values for these configuration options must be strings (wrapped in quotes) or they may be wrong and cause a perpetual diff, causing Terraform to want to recreate your OpenSearch domain on every apply. +* `advanced_security_options` - (Optional) Configuration block for [fine-grained access control](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/fgac.html). Detailed below. +* `auto_tune_options` - (Optional) Configuration block for the Auto-Tune options of the domain. Detailed below. +* `cluster_config` - (Optional) Configuration block for the cluster of the domain. Detailed below. +* `cognito_options` - (Optional) Configuration block for authenticating dashboard with Cognito. Detailed below. +* `domain_endpoint_options` - (Optional) Configuration block for domain endpoint HTTP(S) related options. Detailed below. +* `ebs_options` - (Optional) Configuration block for EBS related options, may be required based on chosen [instance size](https://aws.amazon.com/opensearch-service/pricing/). Detailed below. +* `engine_version` - (Optional) Either `Elasticsearch_X.Y` or `OpenSearch_X.Y` to specify the engine version for the Amazon OpenSearch Service domain. For example, `OpenSearch_1.0` or `Elasticsearch_7.9`. + See [Creating and managing Amazon OpenSearch Service domains](http://docs.aws.amazon.com/opensearch-service/latest/developerguide/createupdatedomains.html#createdomains). + Defaults to the lastest version of OpenSearch. +* `encrypt_at_rest` - (Optional) Configuration block for encrypt at rest options. Only available for [certain instance types](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/encryption-at-rest.html). Detailed below. +* `log_publishing_options` - (Optional) Configuration block for publishing slow and application logs to CloudWatch Logs. This block can be declared multiple times, for each log_type, within the same resource. Detailed below. +* `node_to_node_encryption` - (Optional) Configuration block for node-to-node encryption options. Detailed below. +* `snapshot_options` - (Optional) Configuration block for snapshot related options. Detailed below. DEPRECATED. For domains running OpenSearch 5.3 and later, Amazon OpenSearch takes hourly automated snapshots, making this setting irrelevant. For domains running earlier versions, OpenSearch takes daily automated snapshots. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_options` - (Optional) Configuration block for VPC related options. Adding or removing this configuration forces a new resource ([documentation](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/vpc.html)). Detailed below. +* `off_peak_window_options` - (Optional) Configuration to add Off Peak update options. ([documentation](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/off-peak.html)). Detailed below. + +### advanced_security_options + +* `anonymous_auth_enabled` - (Optional) Whether Anonymous auth is enabled. Enables fine-grained access control on an existing domain. Ignored unless `advanced_security_options` are enabled. _Can only be enabled on an existing domain._ +* `enabled` - (Required, Forces new resource when changing from `true` to `false`) Whether advanced security is enabled. +* `internal_user_database_enabled` - (Optional) Whether the internal user database is enabled. Default is `false`. +* `master_user_options` - (Optional) Configuration block for the main user. Detailed below. + +#### master_user_options + +* `master_user_arn` - (Optional) ARN for the main user. Only specify if `internal_user_database_enabled` is not set or set to `false`. +* `master_user_name` - (Optional) Main user's username, which is stored in the Amazon OpenSearch Service domain's internal database. Only specify if `internal_user_database_enabled` is set to `true`. +* `master_user_password` - (Optional) Main user's password, which is stored in the Amazon OpenSearch Service domain's internal database. Only specify if `internal_user_database_enabled` is set to `true`. + +### auto_tune_options + +* `desired_state` - (Required) Auto-Tune desired state for the domain. Valid values: `ENABLED` or `DISABLED`. +* `maintenance_schedule` - (Required if `rollback_on_disable` is set to `DEFAULT_ROLLBACK`) Configuration block for Auto-Tune maintenance windows. Can be specified multiple times for each maintenance window. Detailed below. +* `rollback_on_disable` - (Optional) Whether to roll back to default Auto-Tune settings when disabling Auto-Tune. Valid values: `DEFAULT_ROLLBACK` or `NO_ROLLBACK`. + +#### maintenance_schedule + +* `start_at` - (Required) Date and time at which to start the Auto-Tune maintenance schedule in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `duration` - (Required) Configuration block for the duration of the Auto-Tune maintenance window. Detailed below. +* `cron_expression_for_recurrence` - (Required) A cron expression specifying the recurrence pattern for an Auto-Tune maintenance schedule. + +##### duration + +* `value` - (Required) An integer specifying the value of the duration of an Auto-Tune maintenance window. +* `unit` - (Required) Unit of time specifying the duration of an Auto-Tune maintenance window. Valid values: `HOURS`. + +### cluster_config + +* `cold_storage_options` - (Optional) Configuration block containing cold storage configuration. Detailed below. +* `dedicated_master_count` - (Optional) Number of dedicated main nodes in the cluster. +* `dedicated_master_enabled` - (Optional) Whether dedicated main nodes are enabled for the cluster. +* `dedicated_master_type` - (Optional) Instance type of the dedicated main nodes in the cluster. +* `instance_count` - (Optional) Number of instances in the cluster. +* `instance_type` - (Optional) Instance type of data nodes in the cluster. +* `warm_count` - (Optional) Number of warm nodes in the cluster. Valid values are between `2` and `150`. `warm_count` can be only and must be set when `warm_enabled` is set to `true`. +* `warm_enabled` - (Optional) Whether to enable warm storage. +* `warm_type` - (Optional) Instance type for the OpenSearch cluster's warm nodes. Valid values are `ultrawarm1.medium.search`, `ultrawarm1.large.search` and `ultrawarm1.xlarge.search`. `warm_type` can be only and must be set when `warm_enabled` is set to `true`. +* `zone_awareness_config` - (Optional) Configuration block containing zone awareness settings. Detailed below. +* `zone_awareness_enabled` - (Optional) Whether zone awareness is enabled, set to `true` for multi-az deployment. To enable awareness with three Availability Zones, the `availability_zone_count` within the `zone_awareness_config` must be set to `3`. + +#### cold_storage_options + +* `enabled` - (Optional) Boolean to enable cold storage for an OpenSearch domain. Defaults to `false`. Master and ultrawarm nodes must be enabled for cold storage. + +#### zone_awareness_config + +* `availability_zone_count` - (Optional) Number of Availability Zones for the domain to use with `zone_awareness_enabled`. Defaults to `2`. Valid values: `2` or `3`. + +### cognito_options + +AWS documentation: [Amazon Cognito Authentication for Dashboard](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/es-cognito-auth.html) + +* `enabled` - (Optional) Whether Amazon Cognito authentication with Dashboard is enabled or not. Default is `false`. +* `identity_pool_id` - (Required) ID of the Cognito Identity Pool to use. +* `role_arn` - (Required) ARN of the IAM role that has the AmazonOpenSearchServiceCognitoAccess policy attached. +* `user_pool_id` - (Required) ID of the Cognito User Pool to use. + +### domain_endpoint_options + +* `custom_endpoint_certificate_arn` - (Optional) ACM certificate ARN for your custom endpoint. +* `custom_endpoint_enabled` - (Optional) Whether to enable custom endpoint for the OpenSearch domain. +* `custom_endpoint` - (Optional) Fully qualified domain for your custom endpoint. +* `enforce_https` - (Optional) Whether or not to require HTTPS. Defaults to `true`. +* `tls_security_policy` - (Optional) Name of the TLS security policy that needs to be applied to the HTTPS endpoint. Valid values: `Policy-Min-TLS-1-0-2019-07` and `Policy-Min-TLS-1-2-2019-07`. Terraform will only perform drift detection if a configuration value is provided. + +### ebs_options + +* `ebs_enabled` - (Required) Whether EBS volumes are attached to data nodes in the domain. +* `iops` - (Optional) Baseline input/output (I/O) performance of EBS volumes attached to data nodes. Applicable only for the GP3 and Provisioned IOPS EBS volume types. +* `throughput` - (Required if `volume_type` is set to `gp3`) Specifies the throughput (in MiB/s) of the EBS volumes attached to data nodes. Applicable only for the gp3 volume type. +* `volume_size` - (Required if `ebs_enabled` is set to `true`.) Size of EBS volumes attached to data nodes (in GiB). +* `volume_type` - (Optional) Type of EBS volumes attached to data nodes. + +### encrypt_at_rest + +~> **Note:** You can enable `encrypt_at_rest` _in place_ for an existing, unencrypted domain only if you are using OpenSearch or your Elasticsearch version is 6.7 or greater. For other versions, if you enable `encrypt_at_rest`, Terraform with recreate the domain, potentially causing data loss. For any version, if you disable `encrypt_at_rest` for an existing, encrypted domain, Terraform will recreate the domain, potentially causing data loss. If you change the `kms_key_id`, Terraform will also recreate the domain, potentially causing data loss. + +* `enabled` - (Required) Whether to enable encryption at rest. If the `encrypt_at_rest` block is not provided then this defaults to `false`. Enabling encryption on new domains requires an `engine_version` of `OpenSearch_X.Y` or `Elasticsearch_5.1` or greater. +* `kms_key_id` - (Optional) KMS key ARN to encrypt the Elasticsearch domain with. If not specified then it defaults to using the `aws/es` service KMS key. Note that KMS will accept a KMS key ID but will return the key ARN. To prevent Terraform detecting unwanted changes, use the key ARN instead. + +### log_publishing_options + +* `cloudwatch_log_group_arn` - (Required) ARN of the Cloudwatch log group to which log needs to be published. +* `enabled` - (Optional, Default: true) Whether given log publishing option is enabled or not. +* `log_type` - (Required) Type of OpenSearch log. Valid values: `INDEX_SLOW_LOGS`, `SEARCH_SLOW_LOGS`, `ES_APPLICATION_LOGS`, `AUDIT_LOGS`. + +### node_to_node_encryption + +~> **Note:** You can enable `node_to_node_encryption` _in place_ for an existing, unencrypted domain only if you are using OpenSearch or your Elasticsearch version is 6.7 or greater. For other versions, if you enable `node_to_node_encryption`, Terraform will recreate the domain, potentially causing data loss. For any version, if you disable `node_to_node_encryption` for an existing, node-to-node encrypted domain, Terraform will recreate the domain, potentially causing data loss. + +* `enabled` - (Required) Whether to enable node-to-node encryption. If the `node_to_node_encryption` block is not provided then this defaults to `false`. Enabling node-to-node encryption of a new domain requires an `engine_version` of `OpenSearch_X.Y` or `Elasticsearch_6.0` or greater. + +### snapshot_options + +* `automated_snapshot_start_hour` - (Required) Hour during which the service takes an automated daily snapshot of the indices in the domain. + +### vpc_options + +AWS documentation: [VPC Support for Amazon OpenSearch Service Domains](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/es-vpc.html) + +~> **Note:** You must have created the service linked role for the OpenSearch service to use `vpc_options`. If you need to create the service linked role at the same time as the OpenSearch domain then you must use `depends_on` to make sure that the role is created before the OpenSearch domain. See the [VPC based ES domain example](#vpc-based-opensearch) above. + +-> Security Groups and Subnets referenced in these attributes must all be within the same VPC. This determines what VPC the endpoints are created in. + +* `security_group_ids` - (Optional) List of VPC Security Group IDs to be applied to the OpenSearch domain endpoints. If omitted, the default Security Group for the VPC will be used. +* `subnet_ids` - (Required) List of VPC Subnet IDs for the OpenSearch domain endpoints to be created in. + +### off_peak_window_options + +AWS documentation: [Off Peak Hours Support for Amazon OpenSearch Service Domains](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/off-peak.html) + +* `enabled` - (Optional) Enabled disabled toggle for off-peak update window. +* `off_peak_window` - (Optional) + * `window_start_time` - (Optional) 10h window for updates + * `hours` - (Required) Starting hour of the 10-hour window for updates + * `minutes` - (Required) Starting minute of the 10-hour window for updates + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the domain. +* `domain_id` - Unique identifier for the domain. +* `domain_name` - Name of the OpenSearch domain. +* `endpoint` - Domain-specific endpoint used to submit index, search, and data upload requests. +* `dashboard_endpoint` - Domain-specific endpoint for Dashboard without https scheme. +* `kibana_endpoint` - (**Deprecated**) Domain-specific endpoint for kibana without https scheme. Use the `dashboard_endpoint` attribute instead. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpc_options.0.availability_zones` - If the domain was created inside a VPC, the names of the availability zones the configured `subnet_ids` were created inside. +* `vpc_options.0.vpc_id` - If the domain was created inside a VPC, the ID of the VPC. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `update` - (Default `180m`) +* `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearch domains using the `domain_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import OpenSearch domains using the `domain_name`. For example: + +```console +% terraform import aws_opensearch_domain.example domain_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opensearch_domain_policy.html.markdown b/website/docs/cdktf/python/r/opensearch_domain_policy.html.markdown new file mode 100644 index 00000000000..a2cb135b188 --- /dev/null +++ b/website/docs/cdktf/python/r/opensearch_domain_policy.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_domain_policy" +description: |- + Provides an OpenSearch Domain Policy. +--- + + + +# Resource: aws_opensearch_domain_policy + +Allows setting policy to an OpenSearch domain while referencing domain attributes (e.g., ARN). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.opensearch_domain import OpensearchDomain +from imports.aws.opensearch_domain_policy import OpensearchDomainPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = OpensearchDomain(self, "example", + domain_name="tf-test", + engine_version="OpenSearch_1.1" + ) + main = DataAwsIamPolicyDocument(self, "main", + actions=["es:*"], + condition=[{ + "test": "IpAddress", + "values": "127.0.0.1/32", + "variable": "aws:SourceIp" + } + ], + effect="Allow", + principals=[{ + "identifiers": ["*"], + "type": "*" + } + ], + resources=["${" + example.arn + "}/*"] + ) + aws_opensearch_domain_policy_main = OpensearchDomainPolicy(self, "main_2", + access_policies=Token.as_string(main.json), + domain_name=example.domain_name + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_opensearch_domain_policy_main.override_logical_id("main") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `access_policies` - (Optional) IAM policy document specifying the access policies for the domain +* `domain_name` - (Required) Name of the domain. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `update` - (Default `180m`) +* `delete` - (Default `90m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opensearch_domain_saml_options.html.markdown b/website/docs/cdktf/python/r/opensearch_domain_saml_options.html.markdown new file mode 100644 index 00000000000..d2af23442aa --- /dev/null +++ b/website/docs/cdktf/python/r/opensearch_domain_saml_options.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_domain_saml_options" +description: |- + Terraform resource for managing SAML authentication options for an AWS OpenSearch Domain. +--- + + + +# Resource: aws_opensearch_domain_saml_options + +Manages SAML authentication options for an AWS OpenSearch Domain. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearch_domain import OpensearchDomain +from imports.aws.opensearch_domain_saml_options import OpensearchDomainSamlOptions +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = OpensearchDomain(self, "example", + cluster_config=OpensearchDomainClusterConfig( + instance_type="r4.large.search" + ), + domain_name="example", + engine_version="OpenSearch_1.1", + snapshot_options=OpensearchDomainSnapshotOptions( + automated_snapshot_start_hour=23 + ), + tags={ + "Domain": "TestDomain" + } + ) + aws_opensearch_domain_saml_options_example = + OpensearchDomainSamlOptions(self, "example_1", + domain_name=example.domain_name, + saml_options=OpensearchDomainSamlOptionsSamlOptions( + enabled=True, + idp=OpensearchDomainSamlOptionsSamlOptionsIdp( + entity_id="https://example.com", + metadata_content=Token.as_string(Fn.file("./saml-metadata.xml")) + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_opensearch_domain_saml_options_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `domain_name` - (Required) Name of the domain. + +The following arguments are optional: + +* `saml_options` - (Optional) SAML authentication options for an AWS OpenSearch Domain. + +### saml_options + +* `enabled` - (Required) Whether SAML authentication is enabled. +* `idp` - (Optional) Information from your identity provider. +* `master_backend_role` - (Optional) This backend role from the SAML IdP receives full permissions to the cluster, equivalent to a new master user. +* `master_user_name` - (Optional) This username from the SAML IdP receives full permissions to the cluster, equivalent to a new master user. +* `roles_key` - (Optional) Element of the SAML assertion to use for backend roles. Default is roles. +* `session_timeout_minutes` - (Optional) Duration of a session in minutes after a user logs in. Default is 60. Maximum value is 1,440. +* `subject_key` - (Optional) Element of the SAML assertion to use for username. Default is NameID. + +#### idp + +* `entity_id` - (Required) Unique Entity ID of the application in SAML Identity Provider. +* `metadata_content` - (Required) Metadata of the SAML application in xml format. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the domain the SAML options are associated with. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `update` - (Default `180m`) +* `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearch domains using the `domain_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import OpenSearch domains using the `domain_name`. For example: + +```console +% terraform import aws_opensearch_domain_saml_options.example domain_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opensearch_inbound_connection_accepter.html.markdown b/website/docs/cdktf/python/r/opensearch_inbound_connection_accepter.html.markdown new file mode 100644 index 00000000000..c2f040340a0 --- /dev/null +++ b/website/docs/cdktf/python/r/opensearch_inbound_connection_accepter.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_inbound_connection_accepter" +description: |- + Terraform resource for managing an AWS OpenSearch Inbound Connection Accepter. +--- + + + +# Resource: aws_opensearch_inbound_connection_accepter + +Manages an [AWS Opensearch Inbound Connection Accepter](https://docs.aws.amazon.com/opensearch-service/latest/APIReference/API_AcceptInboundConnection.html). If connecting domains from different AWS accounts, ensure that the accepter is configured to use the AWS account where the _remote_ opensearch domain exists. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.opensearch_inbound_connection_accepter import OpensearchInboundConnectionAccepter +from imports.aws.opensearch_outbound_connection import OpensearchOutboundConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + data_aws_region_current = DataAwsRegion(self, "current_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + foo = OpensearchOutboundConnection(self, "foo", + connection_alias="outbound_connection", + local_domain_info=OpensearchOutboundConnectionLocalDomainInfo( + domain_name=local_domain.domain_name, + owner_id=Token.as_string(current.account_id), + region=Token.as_string(data_aws_region_current.name) + ), + remote_domain_info=OpensearchOutboundConnectionRemoteDomainInfo( + domain_name=remote_domain.domain_name, + owner_id=Token.as_string(current.account_id), + region=Token.as_string(data_aws_region_current.name) + ) + ) + aws_opensearch_inbound_connection_accepter_foo = + OpensearchInboundConnectionAccepter(self, "foo_3", + connection_id=foo.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_opensearch_inbound_connection_accepter_foo.override_logical_id("foo") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connection_id` - (Required, Forces new resource) Specifies the ID of the connection to accept. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Id of the connection to accept. +* `connection_status` - Status of the connection request. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Opensearch Inbound Connection Accepters using the Inbound Connection ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS Opensearch Inbound Connection Accepters using the Inbound Connection ID. For example: + +```console +% terraform import aws_opensearch_inbound_connection_accepter.foo connection-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opensearch_outbound_connection.html.markdown b/website/docs/cdktf/python/r/opensearch_outbound_connection.html.markdown new file mode 100644 index 00000000000..50812f321c8 --- /dev/null +++ b/website/docs/cdktf/python/r/opensearch_outbound_connection.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_outbound_connection" +description: |- + Terraform resource for managing an AWS OpenSearch Outbound Connection. +--- + + + +# Resource: aws_opensearch_outbound_connection + +Manages an AWS Opensearch Outbound Connection. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.opensearch_outbound_connection import OpensearchOutboundConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + data_aws_region_current = DataAwsRegion(self, "current_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + OpensearchOutboundConnection(self, "foo", + connection_alias="outbound_connection", + local_domain_info=OpensearchOutboundConnectionLocalDomainInfo( + domain_name=local_domain.domain_name, + owner_id=Token.as_string(current.account_id), + region=Token.as_string(data_aws_region_current.name) + ), + remote_domain_info=OpensearchOutboundConnectionRemoteDomainInfo( + domain_name=remote_domain.domain_name, + owner_id=Token.as_string(current.account_id), + region=Token.as_string(data_aws_region_current.name) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connection_alias` - (Required, Forces new resource) Specifies the connection alias that will be used by the customer for this connection. +* `local_domain_info` - (Required, Forces new resource) Configuration block for the local Opensearch domain. +* `remote_domain_info` - (Required, Forces new resource) Configuration block for the remote Opensearch domain. + +### local_domain_info + +* `owner_id` - (Required, Forces new resource) The Account ID of the owner of the local domain. +* `domain_name` - (Required, Forces new resource) The name of the local domain. +* `region` - (Required, Forces new resource) The region of the local domain. + +### remote_domain_info + +* `owner_id` - (Required, Forces new resource) The Account ID of the owner of the remote domain. +* `domain_name` - (Required, Forces new resource) The name of the remote domain. +* `region` - (Required, Forces new resource) The region of the remote domain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Id of the connection. +* `connection_status` - Status of the connection request. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Opensearch Outbound Connections using the Outbound Connection ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS Opensearch Outbound Connections using the Outbound Connection ID. For example: + +```console +% terraform import aws_opensearch_outbound_connection.foo connection-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opensearchserverless_access_policy.html.markdown b/website/docs/cdktf/python/r/opensearchserverless_access_policy.html.markdown new file mode 100644 index 00000000000..aba0c38206b --- /dev/null +++ b/website/docs/cdktf/python/r/opensearchserverless_access_policy.html.markdown @@ -0,0 +1,167 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_access_policy" +description: |- + Terraform resource for managing an AWS OpenSearch Serverless Access Policy. +--- + + + +# Resource: aws_opensearchserverless_access_policy + +Terraform resource for managing an AWS OpenSearch Serverless Access Policy. See AWS documentation for [data access policies](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/serverless-data-access.html) and [supported data access policy permissions](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/serverless-data-access.html#serverless-data-supported-permissions). + +## Example Usage + +### Grant all collection and index permissions + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.opensearchserverless_access_policy import OpensearchserverlessAccessPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + OpensearchserverlessAccessPolicy(self, "example", + description="read and write permissions", + name="example", + policy=Token.as_string( + Fn.jsonencode([{ + "Principal": [current.arn], + "Rules": [{ + "Permission": ["aoss:*"], + "Resource": ["index/example-collection/*"], + "ResourceType": "index" + }, { + "Permission": ["aoss:*"], + "Resource": ["collection/example-collection"], + "ResourceType": "collection" + } + ] + } + ])), + type="data" + ) +``` + +### Grant read-only collection and index permissions + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.opensearchserverless_access_policy import OpensearchserverlessAccessPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + OpensearchserverlessAccessPolicy(self, "example", + description="read-only permissions", + name="example", + policy=Token.as_string( + Fn.jsonencode([{ + "Principal": [current.arn], + "Rules": [{ + "Permission": ["aoss:DescribeIndex", "aoss:ReadDocument"], + "Resource": ["index/example-collection/*"], + "ResourceType": "index" + }, { + "Permission": ["aoss:DescribeCollectionItems"], + "Resource": ["collection/example-collection"], + "ResourceType": "collection" + } + ] + } + ])), + type="data" + ) +``` + +### Grant SAML identity permissions + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearchserverless_access_policy import OpensearchserverlessAccessPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchserverlessAccessPolicy(self, "example", + description="saml permissions", + name="example", + policy=Token.as_string( + Fn.jsonencode([{ + "Principal": ["saml/123456789012/myprovider/user/Annie", "saml/123456789012/anotherprovider/group/Accounting" + ], + "Rules": [{ + "Permission": ["aoss:*"], + "Resource": ["index/example-collection/*"], + "ResourceType": "index" + }, { + "Permission": ["aoss:*"], + "Resource": ["collection/example-collection"], + "ResourceType": "collection" + } + ] + } + ])), + type="data" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the policy. +* `policy` - (Required) JSON policy document to use as the content for the new policy +* `type` - (Required) Type of access policy. Must be `data`. + +The following arguments are optional: + +* `description` - (Optional) Description of the policy. Typically used to store information about the permissions defined in the policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `policy_version` - Version of the policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearchServerless Access Policy using the `name` and `type` arguments separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import OpenSearchServerless Access Policy using the `name` and `type` arguments separated by a slash (`/`). For example: + +```console +% terraform import aws_opensearchserverless_access_policy.example example/data +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opensearchserverless_collection.html.markdown b/website/docs/cdktf/python/r/opensearchserverless_collection.html.markdown new file mode 100644 index 00000000000..51e45106c8b --- /dev/null +++ b/website/docs/cdktf/python/r/opensearchserverless_collection.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_collection" +description: |- + Terraform resource for managing an AWS OpenSearch Collection. +--- + + + +# Resource: aws_opensearchserverless_collection + +Terraform resource for managing an AWS OpenSearch Serverless Collection. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearchserverless_collection import OpensearchserverlessCollection +from imports.aws.opensearchserverless_security_policy import OpensearchserverlessSecurityPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = OpensearchserverlessSecurityPolicy(self, "example", + name="example", + policy=Token.as_string( + Fn.jsonencode({ + "AWSOwnedKey": True, + "Rules": [{ + "Resource": ["collection/example"], + "ResourceType": "collection" + } + ] + })), + type="encryption" + ) + aws_opensearchserverless_collection_example = + OpensearchserverlessCollection(self, "example_1", + depends_on=[example], + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_opensearchserverless_collection_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the collection. + +The following arguments are optional: + +* `description` - (Optional) Description of the collection. +* `tags` - (Optional) A map of tags to assign to the collection. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) Type of collection. One of `SEARCH` or `TIMESERIES`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the collection. +* `collection_endpoint` - Collection-specific endpoint used to submit index, search, and data upload requests to an OpenSearch Serverless collection. +* `dashboard_endpont` - Collection-specific endpoint used to access OpenSearch Dashboards. +* `kms_key_arn` - The ARN of the Amazon Web Services KMS key used to encrypt the collection. +* `id` - Unique identifier for the collection. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20m`) +- `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearchServerless Collection using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import OpenSearchServerless Collection using the `id`. For example: + +```console +% terraform import aws_opensearchserverless_collection.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opensearchserverless_security_config.html.markdown b/website/docs/cdktf/python/r/opensearchserverless_security_config.html.markdown new file mode 100644 index 00000000000..a7ef2c3c6ae --- /dev/null +++ b/website/docs/cdktf/python/r/opensearchserverless_security_config.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_security_config" +description: |- + Terraform resource for managing an AWS OpenSearch Serverless Security Config. +--- + + + +# Resource: aws_opensearchserverless_security_config + +Terraform resource for managing an AWS OpenSearch Serverless Security Config. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearchserverless_security_config import OpensearchserverlessSecurityConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchserverlessSecurityConfig(self, "example", + name="example", + saml_options=[{ + "metadata": Token.as_string(Fn.file("${path.module}/idp-metadata.xml")) + } + ], + type="saml" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required, Forces new resource) Name of the policy. +* `saml_options` - (Required) Configuration block for SAML options. +* `type` - (Required) Type of configuration. Must be `saml`. + +The following arguments are optional: + +* `description` - (Optional) Description of the security configuration. + +### saml_options + +* `group_attribute` - (Optional) Group attribute for this SAML integration. +* `metadata` - (Required) The XML IdP metadata file generated from your identity provider. +* `session_timeout` - (Optional) Session timeout, in minutes. Minimum is 5 minutes and maximum is 720 minutes (12 hours). Default is 60 minutes. +* `user_attribute` - (Optional) User attribute for this SAML integration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `config_version` - Version of the configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearchServerless Access Policy using the `name` argument prefixed with the string `saml/account_id/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import OpenSearchServerless Access Policy using the `name` argument prefixed with the string `saml/account_id/`. For example: + +```console +% terraform import aws_opensearchserverless_security_config.example saml/123456789012/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opensearchserverless_security_policy.html.markdown b/website/docs/cdktf/python/r/opensearchserverless_security_policy.html.markdown new file mode 100644 index 00000000000..dc0f04d09d2 --- /dev/null +++ b/website/docs/cdktf/python/r/opensearchserverless_security_policy.html.markdown @@ -0,0 +1,264 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_security_policy" +description: |- + Terraform resource for managing an AWS OpenSearch Serverless Security Policy. +--- + + + +# Resource: aws_opensearchserverless_security_policy + +Terraform resource for managing an AWS OpenSearch Serverless Security Policy. See AWS documentation for [encryption policies](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/serverless-encryption.html#serverless-encryption-policies) and [network policies](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/serverless-network.html#serverless-network-policies). + +## Example Usage + +### Encryption Security Policy + +#### Applies to a single collection + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearchserverless_security_policy import OpensearchserverlessSecurityPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchserverlessSecurityPolicy(self, "example", + description="encryption security policy for example-collection", + name="example", + policy=Token.as_string( + Fn.jsonencode({ + "AWSOwnedKey": True, + "Rules": [{ + "Resource": ["collection/example-collection"], + "ResourceType": "collection" + } + ] + })), + type="encryption" + ) +``` + +#### Applies to multiple collections + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearchserverless_security_policy import OpensearchserverlessSecurityPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchserverlessSecurityPolicy(self, "example", + description="encryption security policy for collections that begin with \\\"example\\\"", + name="example", + policy=Token.as_string( + Fn.jsonencode({ + "AWSOwnedKey": True, + "Rules": [{ + "Resource": ["collection/example*"], + "ResourceType": "collection" + } + ] + })), + type="encryption" + ) +``` + +#### Using a customer managed key + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearchserverless_security_policy import OpensearchserverlessSecurityPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchserverlessSecurityPolicy(self, "example", + description="encryption security policy using customer KMS key", + name="example", + policy=Token.as_string( + Fn.jsonencode({ + "AWSOwnedKey": False, + "KmsARN": "arn:aws:kms:us-east-1:123456789012:key/93fd6da4-a317-4c17-bfe9-382b5d988b36", + "Rules": [{ + "Resource": ["collection/customer-managed-key-collection"], + "ResourceType": "collection" + } + ] + })), + type="encryption" + ) +``` + +### Network Security Policy + +#### Allow public access to the collection endpoint and the Dashboards endpoint + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearchserverless_security_policy import OpensearchserverlessSecurityPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchserverlessSecurityPolicy(self, "example", + description="Public access", + name="example", + policy=Token.as_string( + Fn.jsonencode([{ + "AllowFromPublic": True, + "Description": "Public access to collection and Dashboards endpoint for example collection", + "Rules": [{ + "Resource": ["collection/example-collection"], + "ResourceType": "collection" + }, { + "Resource": ["collection/example-collection"], + "ResourceType": "dashboard" + } + ] + } + ])), + type="network" + ) +``` + +#### Allow VPC access to the collection endpoint and the Dashboards endpoint + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearchserverless_security_policy import OpensearchserverlessSecurityPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchserverlessSecurityPolicy(self, "example", + description="VPC access", + name="example", + policy=Token.as_string( + Fn.jsonencode([{ + "AllowFromPublic": False, + "Description": "VPC access to collection and Dashboards endpoint for example collection", + "Rules": [{ + "Resource": ["collection/example-collection"], + "ResourceType": "collection" + }, { + "Resource": ["collection/example-collection"], + "ResourceType": "dashboard" + } + ], + "SourceVPCEs": ["vpce-050f79086ee71ac05"] + } + ])), + type="network" + ) +``` + +#### Mixed access for different collections + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearchserverless_security_policy import OpensearchserverlessSecurityPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchserverlessSecurityPolicy(self, "example", + description="Mixed access for marketing and sales", + name="example", + policy=Token.as_string( + Fn.jsonencode([{ + "AllowFromPublic": False, + "Description": "Marketing access", + "Rules": [{ + "Resource": ["collection/marketing*"], + "ResourceType": "collection" + }, { + "Resource": ["collection/marketing*"], + "ResourceType": "dashboard" + } + ], + "SourceVPCEs": ["vpce-050f79086ee71ac05"] + }, { + "AllowFromPublic": True, + "Description": "Sales access", + "Rules": [{ + "Resource": ["collection/finance"], + "ResourceType": "collection" + } + ] + } + ])), + type="network" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the policy. +* `policy` - (Required) JSON policy document to use as the content for the new policy +* `type` - (Required) Type of security policy. One of `encryption` or `network`. + +The following arguments are optional: + +* `description` - (Optional) Description of the policy. Typically used to store information about the permissions defined in the policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `policy_version` - Version of the policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearchServerless Security Policy using the `name` and `type` arguments separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import OpenSearchServerless Security Policy using the `name` and `type` arguments separated by a slash (`/`). For example: + +```console +% terraform import aws_opensearchserverless_security_policy.example example/encryption +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opensearchserverless_vpc_endpoint.html.markdown b/website/docs/cdktf/python/r/opensearchserverless_vpc_endpoint.html.markdown new file mode 100644 index 00000000000..3ece40072f7 --- /dev/null +++ b/website/docs/cdktf/python/r/opensearchserverless_vpc_endpoint.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_vpc_endpoint" +description: |- + Terraform resource for managing an AWS OpenSearch Serverless VPC Endpoint. +--- + + + +# Resource: aws_opensearchserverless_vpc_endpoint + +Terraform resource for managing an AWS OpenSearchServerless VPC Endpoint. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opensearchserverless_vpc_endpoint import OpensearchserverlessVpcEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpensearchserverlessVpcEndpoint(self, "example", + name="myendpoint", + subnet_ids=[Token.as_string(aws_subnet_example.id)], + vpc_id=Token.as_string(aws_vpc_example.id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the interface endpoint. +* `subnet_ids` - (Required) One or more subnet IDs from which you'll access OpenSearch Serverless. Up to 6 subnets can be provided. +* `vpc_id` - (Required) ID of the VPC from which you'll access OpenSearch Serverless. + +The following arguments are optional: + +* `security_group_ids` - (Optional) One or more security groups that define the ports, protocols, and sources for inbound traffic that you are authorizing into your endpoint. Up to 5 security groups can be provided. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identified of the Vpc Endpoint. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearchServerless Vpc Endpointa using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import OpenSearchServerless Vpc Endpointa using the `id`. For example: + +```console +% terraform import aws_opensearchserverless_vpc_endpoint.example vpce-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_application.html.markdown b/website/docs/cdktf/python/r/opsworks_application.html.markdown new file mode 100644 index 00000000000..4c3549abccf --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_application.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_application" +description: |- + Provides an OpsWorks application resource. +--- + + + +# Resource: aws_opsworks_application + +Provides an OpsWorks application resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_application import OpsworksApplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksApplication(self, "foo-app", + app_source=[OpsworksApplicationAppSource( + revision="master", + type="git", + url="https://github.com/example.git" + ) + ], + auto_bundle_on_deploy=Token.as_string(True), + description="This is a Rails application", + document_root="public", + domains=["example.com", "sub.example.com"], + enable_ssl=True, + environment=[OpsworksApplicationEnvironment( + key="key", + secure=False, + value="value" + ) + ], + name="foobar application", + rails_env="staging", + short_name="foobar", + ssl_configuration=[OpsworksApplicationSslConfiguration( + certificate=Token.as_string(Fn.file("./foobar.crt")), + private_key=Token.as_string(Fn.file("./foobar.key")) + ) + ], + stack_id=main.id, + type="rails" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A human-readable name for the application. +* `short_name` - (Required) A short, machine-readable name for the application. This can only be defined on resource creation and ignored on resource update. +* `stack_id` - (Required) ID of the stack the application will belong to. +* `type` - (Required) Opsworks application type. One of `aws-flow-ruby`, `java`, `rails`, `php`, `nodejs`, `static` or `other`. +* `description` - (Optional) A description of the app. +* `environment` - (Optional) Object to define environment variables. Object is described below. +* `enable_ssl` - (Optional) Whether to enable SSL for the app. This must be set in order to let `ssl_configuration.private_key`, `ssl_configuration.certificate` and `ssl_configuration.chain` take effect. +* `ssl_configuration` - (Optional) The SSL configuration of the app. Object is described below. +* `app_source` - (Optional) SCM configuration of the app as described below. +* `data_source_arn` - (Optional) The data source's ARN. +* `data_source_type` - (Optional) The data source's type one of `AutoSelectOpsworksMysqlInstance`, `OpsworksMysqlInstance`, or `RdsDbInstance`. +* `data_source_database_name` - (Optional) The database name. +* `domains` - (Optional) A list of virtual host alias. +* `document_root` - (Optional) Subfolder for the document root for application of type `rails`. +* `auto_bundle_on_deploy` - (Optional) Run bundle install when deploying for application of type `rails`. +* `rails_env` - (Required if `type` = `rails`) The name of the Rails environment for application of type `rails`. +* `aws_flow_ruby_settings` - (Optional) Specify activity and workflow workers for your app using the aws-flow gem. + +An `app_source` block supports the following arguments (can only be defined once per resource): + +* `type` - (Required) The type of source to use. For example, "archive". +* `url` - (Required) The URL where the app resource can be found. +* `username` - (Optional) Username to use when authenticating to the source. +* `password` - (Optional) Password to use when authenticating to the source. Terraform cannot perform drift detection of this configuration. +* `ssh_key` - (Optional) SSH key to use when authenticating to the source. Terraform cannot perform drift detection of this configuration. +* `revision` - (Optional) For sources that are version-aware, the revision to use. + +An `environment` block supports the following arguments: + +* `key` - (Required) Variable name. +* `value` - (Required) Variable value. +* `secure` - (Optional) Set visibility of the variable value to `true` or `false`. + +A `ssl_configuration` block supports the following arguments (can only be defined once per resource): + +* `private_key` - (Required) The private key; the contents of the certificate's domain.key file. +* `certificate` - (Required) The contents of the certificate's domain.crt file. +* `chain` - (Optional) Can be used to specify an intermediate certificate authority key or client authentication. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the application. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Opsworks Application using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Opsworks Application using the `id`. For example: + +```console +% terraform import aws_opsworks_application.test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_custom_layer.html.markdown b/website/docs/cdktf/python/r/opsworks_custom_layer.html.markdown new file mode 100644 index 00000000000..c6a58f05cdc --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_custom_layer.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_custom_layer" +description: |- + Provides an OpsWorks custom layer resource. +--- + + + +# Resource: aws_opsworks_custom_layer + +Provides an OpsWorks custom layer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_custom_layer import OpsworksCustomLayer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksCustomLayer(self, "custlayer", + name="My Awesome Custom Layer", + short_name="awesome", + stack_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A human-readable name for the layer. +* `short_name` - (Required) A short, machine-readable name for the layer, which will be used to identify it in the Chef node JSON. +* `stack_id` - (Required) ID of the stack the layer will belong to. +* `auto_assign_elastic_ips` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `auto_assign_public_ips` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `cloudwatch_configuration` - (Optional) Will create an EBS volume and connect it to the layer's instances. See [Cloudwatch Configuration](#cloudwatch-configuration). +* `custom_instance_profile_arn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `custom_security_group_ids` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `auto_healing` - (Optional) Whether to enable auto-healing for the layer. +* `install_updates_on_boot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instance_shutdown_timeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elastic_load_balancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drain_elb_on_shutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `load_based_auto_scaling` - (Optional) Load-based auto scaling configuration. See [Load Based AutoScaling](#load-based-autoscaling) +* `system_packages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `use_ebs_optimized_instances` - (Optional) Whether to use EBS-optimized instances. +* `ebs_volume` - (Optional) Will create an EBS volume and connect it to the layer's instances. See [EBS Volume](#ebs-volume). +* `custom_json` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `custom_configure_recipes` +* `custom_deploy_recipes` +* `custom_setup_recipes` +* `custom_shutdown_recipes` +* `custom_undeploy_recipes` + +### EBS Volume + +* `mount_point` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `number_of_disks` - (Required) The number of disks to use for the EBS volume. +* `raid_level` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. +* `encrypted` - (Optional) Encrypt the volume. + +### Cloudwatch Configuration + +* `enabled` - (Optional) +* `log_streams` - (Optional) A block the specifies how an opsworks logs look like. See [Log Streams](#log-streams). + +#### Log Streams + +* `file` - (Required) Specifies log files that you want to push to CloudWatch Logs. File can point to a specific file or multiple files (by using wild card characters such as /var/log/system.log*). +* `log_group_name` - (Required) Specifies the destination log group. A log group is created automatically if it doesn't already exist. +* `batch_count` - (Optional) Specifies the max number of log events in a batch, up to `10000`. The default value is `1000`. +* `batch_size` - (Optional) Specifies the maximum size of log events in a batch, in bytes, up to `1048576` bytes. The default value is `32768` bytes. +* `buffer_duration` - (Optional) Specifies the time duration for the batching of log events. The minimum value is `5000` and default value is `5000`. +* `datetime_format` - (Optional) Specifies how the timestamp is extracted from logs. For more information, see the CloudWatch Logs Agent Reference (https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/AgentReference.html). +* `encoding` - (Optional) Specifies the encoding of the log file so that the file can be read correctly. The default is `utf_8`. +* `file_fingerprint_lines` - (Optional) Specifies the range of lines for identifying a file. The valid values are one number, or two dash-delimited numbers, such as `1`, `2-5`. The default value is `1`. +* `initial_position` - (Optional) Specifies where to start to read data (`start_of_file` or `end_of_file`). The default is `start_of_file`. +* `multiline_start_pattern` - (Optional) Specifies the pattern for identifying the start of a log message. +* `time_zone` - (Optional) Specifies the time zone of log event time stamps. + +### Load Based Autoscaling + +* `downscaling` - (Optional) The downscaling settings, as defined below, used for load-based autoscaling +* `enable` - (Optional) Whether load-based auto scaling is enabled for the layer. +* `upscaling` - (Optional) The upscaling settings, as defined below, used for load-based autoscaling + +The `downscaling` and `upscaling` blocks supports the following arguments: + +Though the three thresholds are optional, at least one threshold must be set when using load-based autoscaling. + +* `alarms` - (Optional) Custom Cloudwatch auto scaling alarms, to be used as thresholds. This parameter takes a list of up to five alarm names, which are case sensitive and must be in the same region as the stack. +* `cpu_threshold` - (Optional) The CPU utilization threshold, as a percent of the available CPU. A value of -1 disables the threshold. +* `ignore_metrics_time` - (Optional) The amount of time (in minutes) after a scaling event occurs that AWS OpsWorks Stacks should ignore metrics and suppress additional scaling events. +* `instance_count` - (Optional) The number of instances to add or remove when the load exceeds a threshold. +* `load_threshold` - (Optional) The load threshold. A value of -1 disables the threshold. +* `memory_threshold` - (Optional) The memory utilization threshold, as a percent of the available memory. A value of -1 disables the threshold. +* `thresholds_wait_time` - (Optional) The amount of time, in minutes, that the load must exceed a threshold before more instances are added or removed. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpsWorks Custom Layers using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import OpsWorks Custom Layers using the `id`. For example: + +```console +% terraform import aws_opsworks_custom_layer.bar 00000000-0000-0000-0000-000000000000 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_ecs_cluster_layer.html.markdown b/website/docs/cdktf/python/r/opsworks_ecs_cluster_layer.html.markdown new file mode 100644 index 00000000000..0e28eef1035 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_ecs_cluster_layer.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_ecs_cluster_layer" +description: |- + Provides an OpsWorks HAProxy layer resource. +--- + + + +# Resource: aws_opsworks_ecs_cluster_layer + +Provides an OpsWorks ECS Cluster layer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_ecs_cluster_layer import OpsworksEcsClusterLayer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksEcsClusterLayer(self, "example", + ecs_cluster_arn=Token.as_string(aws_ecs_cluster_example.arn), + stack_id=Token.as_string(aws_opsworks_stack_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_id` - (Required) ID of the stack the layer will belong to. +* `ecs_cluster_arn` - (Required) The ECS Cluster ARN of the layer. +* `name` - (Optional) A human-readable name for the layer. +* `auto_assign_elastic_ips` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `auto_assign_public_ips` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `custom_instance_profile_arn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `custom_security_group_ids` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `auto_healing` - (Optional) Whether to enable auto-healing for the layer. +* `install_updates_on_boot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instance_shutdown_timeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elastic_load_balancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drain_elb_on_shutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `system_packages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `use_ebs_optimized_instances` - (Optional) Whether to use EBS-optimized instances. +* `ebs_volume` - (Optional) `ebs_volume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `custom_json` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A mapping of tags to assign to the resource. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `custom_configure_recipes` +* `custom_deploy_recipes` +* `custom_setup_recipes` +* `custom_shutdown_recipes` +* `custom_undeploy_recipes` + +An `ebs_volume` block supports the following arguments: + +* `mount_point` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `number_of_disks` - (Required) The number of disks to use for the EBS volume. +* `raid_level` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_ganglia_layer.html.markdown b/website/docs/cdktf/python/r/opsworks_ganglia_layer.html.markdown new file mode 100644 index 00000000000..23d9f471259 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_ganglia_layer.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_ganglia_layer" +description: |- + Provides an OpsWorks Ganglia layer resource. +--- + + + +# Resource: aws_opsworks_ganglia_layer + +Provides an OpsWorks Ganglia layer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_ganglia_layer import OpsworksGangliaLayer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksGangliaLayer(self, "monitor", + password="foobarbaz", + stack_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_id` - (Required) ID of the stack the layer will belong to. +* `password` - (Required) The password to use for Ganglia. +* `name` - (Optional) A human-readable name for the layer. +* `auto_assign_elastic_ips` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `auto_assign_public_ips` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `custom_instance_profile_arn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `custom_security_group_ids` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `auto_healing` - (Optional) Whether to enable auto-healing for the layer. +* `install_updates_on_boot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instance_shutdown_timeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elastic_load_balancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drain_elb_on_shutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `system_packages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `url` - (Optional) The URL path to use for Ganglia. Defaults to "/ganglia". +* `username` - (Optiona) The username to use for Ganglia. Defaults to "opsworks". +* `use_ebs_optimized_instances` - (Optional) Whether to use EBS-optimized instances. +* `ebs_volume` - (Optional) `ebs_volume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `custom_json` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `custom_configure_recipes` +* `custom_deploy_recipes` +* `custom_setup_recipes` +* `custom_shutdown_recipes` +* `custom_undeploy_recipes` + +An `ebs_volume` block supports the following arguments: + +* `mount_point` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `number_of_disks` - (Required) The number of disks to use for the EBS volume. +* `raid_level` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_haproxy_layer.html.markdown b/website/docs/cdktf/python/r/opsworks_haproxy_layer.html.markdown new file mode 100644 index 00000000000..2f0e8028866 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_haproxy_layer.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_haproxy_layer" +description: |- + Provides an OpsWorks HAProxy layer resource. +--- + + + +# Resource: aws_opsworks_haproxy_layer + +Provides an OpsWorks haproxy layer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_haproxy_layer import OpsworksHaproxyLayer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksHaproxyLayer(self, "lb", + stack_id=main.id, + stats_password="foobarbaz" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_id` - (Required) ID of the stack the layer will belong to. +* `stats_password` - (Required) The password to use for HAProxy stats. +* `name` - (Optional) A human-readable name for the layer. +* `auto_assign_elastic_ips` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `auto_assign_public_ips` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `custom_instance_profile_arn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `custom_security_group_ids` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `auto_healing` - (Optional) Whether to enable auto-healing for the layer. +* `healthcheck_method` - (Optional) HTTP method to use for instance healthchecks. Defaults to "OPTIONS". +* `healthcheck_url` - (Optional) URL path to use for instance healthchecks. Defaults to "/". +* `install_updates_on_boot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instance_shutdown_timeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elastic_load_balancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drain_elb_on_shutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `stats_enabled` - (Optional) Whether to enable HAProxy stats. +* `stats_url` - (Optional) The HAProxy stats URL. Defaults to "/haproxy?stats". +* `stats_user` - (Optional) The username for HAProxy stats. Defaults to "opsworks". +* `system_packages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `use_ebs_optimized_instances` - (Optional) Whether to use EBS-optimized instances. +* `ebs_volume` - (Optional) `ebs_volume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `custom_json` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `custom_configure_recipes` +* `custom_deploy_recipes` +* `custom_setup_recipes` +* `custom_shutdown_recipes` +* `custom_undeploy_recipes` + +An `ebs_volume` block supports the following arguments: + +* `mount_point` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `number_of_disks` - (Required) The number of disks to use for the EBS volume. +* `raid_level` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_instance.html.markdown b/website/docs/cdktf/python/r/opsworks_instance.html.markdown new file mode 100644 index 00000000000..c1fa48a0263 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_instance.html.markdown @@ -0,0 +1,168 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_instance" +description: |- + Provides an OpsWorks instance resource. +--- + + + +# Resource: aws_opsworks_instance + +Provides an OpsWorks instance resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_instance import OpsworksInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksInstance(self, "my-instance", + instance_type="t2.micro", + layer_ids=[my_layer.id], + os="Amazon Linux 2015.09", + stack_id=main.id, + state="stopped" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `layer_ids` - (Required) List of the layers the instance will belong to. +* `stack_id` - (Required) Identifier of the stack the instance will belong to. + +The following arguments are optional: + +* `agent_version` - (Optional) OpsWorks agent to install. Default is `INHERIT`. +* `ami_id` - (Optional) AMI to use for the instance. If an AMI is specified, `os` must be `Custom`. +* `architecture` - (Optional) Machine architecture for created instances. Valid values are `x86_64` or `i386`. The default is `x86_64`. +* `auto_scaling_type` - (Optional) Creates load-based or time-based instances. Valid values are `load`, `timer`. +* `availability_zone` - (Optional) Name of the availability zone where instances will be created by default. +* `delete_ebs` - (Optional) Whether to delete EBS volume on deletion. Default is `true`. +* `delete_eip` - (Optional) Whether to delete the Elastic IP on deletion. +* `ebs_block_device` - (Optional) Configuration block for additional EBS block devices to attach to the instance. See [Block Devices](#block-devices) below. +* `ebs_optimized` - (Optional) Whether the launched EC2 instance will be EBS-optimized. +* `ecs_cluster_arn` - (Optional) ECS cluster's ARN for container instances. +* `elastic_ip` - (Optional) Instance Elastic IP address. +* `ephemeral_block_device` - (Optional) Configuration block for ephemeral (also known as "Instance Store") volumes on the instance. See [Block Devices](#block-devices) below. +* `hostname` - (Optional) Instance's host name. +* `infrastructure_class` - (Optional) For registered instances, infrastructure class: ec2 or on-premises. +* `install_updates_on_boot` - (Optional) Controls where to install OS and package updates when the instance boots. Default is `true`. +* `instance_profile_arn` - (Optional) ARN of the instance's IAM profile. +* `instance_type` - (Optional) Type of instance to start. +* `os` - (Optional) Name of operating system that will be installed. +* `root_block_device` - (Optional) Configuration block for the root block device of the instance. See [Block Devices](#block-devices) below. +* `root_device_type` - (Optional) Name of the type of root device instances will have by default. Valid values are `ebs` or `instance-store`. +* `ssh_key_name` - (Optional) Name of the SSH keypair that instances will have by default. +* `state` - (Optional) Desired state of the instance. Valid values are `running` or `stopped`. +* `subnet_id` - (Optional) Subnet ID to attach to. +* `tenancy` - (Optional) Instance tenancy to use. Valid values are `default`, `dedicated` or `host`. +* `virtualization_type` - (Optional) Keyword to choose what virtualization mode created instances will use. Valid values are `paravirtual` or `hvm`. + +## Block devices + +Each of the `*_block_device` attributes controls a portion of the AWS +Instance's "Block Device Mapping". It's a good idea to familiarize yourself with [AWS's Block Device +Mapping docs](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/block-device-mapping-concepts.html) +to understand the implications of using these attributes. + +### `ebs_block_device` + +* `delete_on_termination` - (Optional) Whether the volume should be destroyed on instance termination. Default is `true`. +* `device_name` - (Required) Name of the device to mount. +* `iops` - (Optional) Amount of provisioned [IOPS](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). This must be set with a `volume_type` of `io1`. +* `snapshot_id` - (Optional) Snapshot ID to mount. +* `volume_size` - (Optional) Size of the volume in gigabytes. +* `volume_type` - (Optional) Type of volume. Valid values are `standard`, `gp2`, or `io1`. Default is `standard`. + +Modifying any `ebs_block_device` currently requires resource replacement. + +### `ephemeral_block_device` + +* `device_name` - Name of the block device to mount on the instance. +* `virtual_name` - The [Instance Store Device Name](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#InstanceStoreDeviceNames) (e.g., `ephemeral0`). + +Each AWS Instance type has a different set of Instance Store block devices +available for attachment. AWS [publishes a +list](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#StorageOnInstanceTypes) +of which ephemeral devices are available on each type. The devices are always +identified by the `virtual_name` in the format `ephemeral{0..N}`. + +### `root_block_device` + +* `delete_on_termination` - (Optional) Whether the volume should be destroyed on instance termination. Default is `true`. +* `iops` - (Optional) Amount of provisioned [IOPS](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). This must be set with a `volume_type` of `io1`. +* `volume_size` - (Optional) Size of the volume in gigabytes. +* `volume_type` - (Optional) Type of volume. Valid values are `standard`, `gp2`, or `io1`. Default is `standard`. + +Modifying any of the `root_block_device` settings requires resource +replacement. + +~> **NOTE:** Currently, changes to `*_block_device` configuration of _existing_ +resources cannot be automatically detected by Terraform. After making updates +to block device configuration, resource recreation can be manually triggered by +using the [`taint` command](https://www.terraform.io/docs/commands/taint.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `created_at` - Time that the instance was created. +* `ec2_instance_id` - EC2 instance ID. +* `id` - ID of the OpsWorks instance. +* `last_service_error_id` - ID of the last service error. +* `platform` - Instance's platform. +* `private_dns` - Private DNS name assigned to the instance. Can only be used inside the Amazon EC2, and only available if you've enabled DNS hostnames for your VPC. +* `private_ip` - Private IP address assigned to the instance. +* `public_dns` - Public DNS name assigned to the instance. For EC2-VPC, this is only available if you've enabled DNS hostnames for your VPC. +* `public_ip` - Public IP address assigned to the instance, if applicable. +* `registered_by` - For registered instances, who performed the registration. +* `reported_agent_version` - Instance's reported AWS OpsWorks Stacks agent version. +* `reported_os_family` - For registered instances, the reported operating system family. +* `reported_os_name` - For registered instances, the reported operating system name. +* `reported_os_version` - For registered instances, the reported operating system version. +* `root_device_volume_id` - Root device volume ID. +* `security_group_ids` - Associated security groups. +* `ssh_host_dsa_key_fingerprint` - SSH key's Deep Security Agent (DSA) fingerprint. +* `ssh_host_rsa_key_fingerprint` - SSH key's RSA fingerprint. +* `status` - Instance status. Will be one of `booting`, `connection_lost`, `online`, `pending`, `rebooting`, `requested`, `running_setup`, `setup_failed`, `shutting_down`, `start_failed`, `stop_failed`, `stopped`, `stopping`, `terminated`, or `terminating`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) +- `update` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Opsworks Instances using the instance `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Opsworks Instances using the instance `id`. For example: + +```console +% terraform import aws_opsworks_instance.my_instance 4d6d1710-ded9-42a1-b08e-b043ad7af1e2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_java_app_layer.html.markdown b/website/docs/cdktf/python/r/opsworks_java_app_layer.html.markdown new file mode 100644 index 00000000000..f28e4a51515 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_java_app_layer.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_java_app_layer" +description: |- + Provides an OpsWorks Java application layer resource. +--- + + + +# Resource: aws_opsworks_java_app_layer + +Provides an OpsWorks Java application layer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_java_app_layer import OpsworksJavaAppLayer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksJavaAppLayer(self, "app", + stack_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_id` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `app_server` - (Optional) Keyword for the application container to use. Defaults to "tomcat". +* `app_server_version` - (Optional) Version of the selected application container to use. Defaults to "7". +* `auto_assign_elastic_ips` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `auto_assign_public_ips` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `custom_instance_profile_arn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `custom_security_group_ids` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `auto_healing` - (Optional) Whether to enable auto-healing for the layer. +* `install_updates_on_boot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instance_shutdown_timeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `jvm_type` - (Optional) Keyword for the type of JVM to use. Defaults to `openjdk`. +* `jvm_options` - (Optional) Options to set for the JVM. +* `jvm_version` - (Optional) Version of JVM to use. Defaults to "7". +* `elastic_load_balancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drain_elb_on_shutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `system_packages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `use_ebs_optimized_instances` - (Optional) Whether to use EBS-optimized instances. +* `ebs_volume` - (Optional) `ebs_volume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `custom_json` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `custom_configure_recipes` +* `custom_deploy_recipes` +* `custom_setup_recipes` +* `custom_shutdown_recipes` +* `custom_undeploy_recipes` + +An `ebs_volume` block supports the following arguments: + +* `mount_point` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `number_of_disks` - (Required) The number of disks to use for the EBS volume. +* `raid_level` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_memcached_layer.html.markdown b/website/docs/cdktf/python/r/opsworks_memcached_layer.html.markdown new file mode 100644 index 00000000000..80119951a37 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_memcached_layer.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_memcached_layer" +description: |- + Provides an OpsWorks memcached layer resource. +--- + + + +# Resource: aws_opsworks_memcached_layer + +Provides an OpsWorks memcached layer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_memcached_layer import OpsworksMemcachedLayer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksMemcachedLayer(self, "cache", + stack_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_id` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `allocated_memory` - (Optional) Amount of memory to allocate for the cache on each instance, in megabytes. Defaults to 512MB. +* `auto_assign_elastic_ips` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `auto_assign_public_ips` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `custom_instance_profile_arn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `custom_security_group_ids` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `auto_healing` - (Optional) Whether to enable auto-healing for the layer. +* `install_updates_on_boot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instance_shutdown_timeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elastic_load_balancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drain_elb_on_shutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `system_packages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `use_ebs_optimized_instances` - (Optional) Whether to use EBS-optimized instances. +* `ebs_volume` - (Optional) `ebs_volume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `custom_json` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `custom_configure_recipes` +* `custom_deploy_recipes` +* `custom_setup_recipes` +* `custom_shutdown_recipes` +* `custom_undeploy_recipes` + +An `ebs_volume` block supports the following arguments: + +* `mount_point` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `number_of_disks` - (Required) The number of disks to use for the EBS volume. +* `raid_level` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_mysql_layer.html.markdown b/website/docs/cdktf/python/r/opsworks_mysql_layer.html.markdown new file mode 100644 index 00000000000..ce2bcd18cee --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_mysql_layer.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_mysql_layer" +description: |- + Provides an OpsWorks MySQL layer resource. +--- + + + +# Resource: aws_opsworks_mysql_layer + +Provides an OpsWorks MySQL layer resource. + +~> **Note:** All arguments including the root password will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_mysql_layer import OpsworksMysqlLayer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksMysqlLayer(self, "db", + stack_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_id` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `auto_assign_elastic_ips` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `auto_assign_public_ips` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `custom_instance_profile_arn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `custom_security_group_ids` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `auto_healing` - (Optional) Whether to enable auto-healing for the layer. +* `install_updates_on_boot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instance_shutdown_timeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elastic_load_balancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drain_elb_on_shutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `root_password` - (Optional) Root password to use for MySQL. +* `root_password_on_all_instances` - (Optional) Whether to set the root user password to all instances in the stack so they can access the instances in this layer. +* `system_packages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `use_ebs_optimized_instances` - (Optional) Whether to use EBS-optimized instances. +* `ebs_volume` - (Optional) `ebs_volume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `custom_json` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `custom_configure_recipes` +* `custom_deploy_recipes` +* `custom_setup_recipes` +* `custom_shutdown_recipes` +* `custom_undeploy_recipes` + +An `ebs_volume` block supports the following arguments: + +* `mount_point` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `number_of_disks` - (Required) The number of disks to use for the EBS volume. +* `raid_level` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_nodejs_app_layer.html.markdown b/website/docs/cdktf/python/r/opsworks_nodejs_app_layer.html.markdown new file mode 100644 index 00000000000..adb5136fba4 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_nodejs_app_layer.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_nodejs_app_layer" +description: |- + Provides an OpsWorks NodeJS application layer resource. +--- + + + +# Resource: aws_opsworks_nodejs_app_layer + +Provides an OpsWorks NodeJS application layer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_nodejs_app_layer import OpsworksNodejsAppLayer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksNodejsAppLayer(self, "app", + stack_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_id` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `auto_assign_elastic_ips` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `auto_assign_public_ips` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `custom_instance_profile_arn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `custom_security_group_ids` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `auto_healing` - (Optional) Whether to enable auto-healing for the layer. +* `install_updates_on_boot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instance_shutdown_timeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elastic_load_balancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drain_elb_on_shutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `nodejs_version` - (Optional) The version of NodeJS to use. Defaults to "0.10.38". +* `system_packages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `use_ebs_optimized_instances` - (Optional) Whether to use EBS-optimized instances. +* `ebs_volume` - (Optional) `ebs_volume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `custom_json` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `custom_configure_recipes` +* `custom_deploy_recipes` +* `custom_setup_recipes` +* `custom_shutdown_recipes` +* `custom_undeploy_recipes` + +An `ebs_volume` block supports the following arguments: + +* `mount_point` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `number_of_disks` - (Required) The number of disks to use for the EBS volume. +* `raid_level` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_permission.html.markdown b/website/docs/cdktf/python/r/opsworks_permission.html.markdown new file mode 100644 index 00000000000..8bf25872988 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_permission.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_permission" +description: |- + Provides an OpsWorks permission resource. +--- + + + +# Resource: aws_opsworks_permission + +Provides an OpsWorks permission resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_permission import OpsworksPermission +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksPermission(self, "my_stack_permission", + allow_ssh=True, + allow_sudo=True, + level="iam_only", + stack_id=stack.id, + user_arn=user.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `allow_ssh` - (Optional) Whether the user is allowed to use SSH to communicate with the instance +* `allow_sudo` - (Optional) Whether the user is allowed to use sudo to elevate privileges +* `user_arn` - (Required) The user's IAM ARN to set permissions for +* `level` - (Optional) The users permission level. Mus be one of `deny`, `show`, `deploy`, `manage`, `iam_only` +* `stack_id` - (Required) The stack to set the permissions for + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The computed id of the permission. Please note that this is only used internally to identify the permission. This value is not used in aws. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_php_app_layer.html.markdown b/website/docs/cdktf/python/r/opsworks_php_app_layer.html.markdown new file mode 100644 index 00000000000..2fc4d0d9390 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_php_app_layer.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_php_app_layer" +description: |- + Provides an OpsWorks PHP application layer resource. +--- + + + +# Resource: aws_opsworks_php_app_layer + +Provides an OpsWorks PHP application layer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_php_app_layer import OpsworksPhpAppLayer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksPhpAppLayer(self, "app", + stack_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_id` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `auto_assign_elastic_ips` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `auto_assign_public_ips` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `custom_instance_profile_arn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `custom_security_group_ids` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `auto_healing` - (Optional) Whether to enable auto-healing for the layer. +* `install_updates_on_boot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instance_shutdown_timeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elastic_load_balancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drain_elb_on_shutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `system_packages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `use_ebs_optimized_instances` - (Optional) Whether to use EBS-optimized instances. +* `ebs_volume` - (Optional) `ebs_volume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `custom_json` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `custom_configure_recipes` +* `custom_deploy_recipes` +* `custom_setup_recipes` +* `custom_shutdown_recipes` +* `custom_undeploy_recipes` + +An `ebs_volume` block supports the following arguments: + +* `mount_point` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `number_of_disks` - (Required) The number of disks to use for the EBS volume. +* `raid_level` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpsWorks PHP Application Layers using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import OpsWorks PHP Application Layers using the `id`. For example: + +```console +% terraform import aws_opsworks_php_app_layer.bar 00000000-0000-0000-0000-000000000000 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_rails_app_layer.html.markdown b/website/docs/cdktf/python/r/opsworks_rails_app_layer.html.markdown new file mode 100644 index 00000000000..7e329a96922 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_rails_app_layer.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_rails_app_layer" +description: |- + Provides an OpsWorks Ruby on Rails application layer resource. +--- + + + +# Resource: aws_opsworks_rails_app_layer + +Provides an OpsWorks Ruby on Rails application layer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_rails_app_layer import OpsworksRailsAppLayer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksRailsAppLayer(self, "app", + stack_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_id` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `app_server` - (Optional) Keyword for the app server to use. Defaults to "apache_passenger". +* `auto_assign_elastic_ips` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `auto_assign_public_ips` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `bundler_version` - (Optional) When OpsWorks is managing Bundler, which version to use. Defaults to "1.5.3". +* `custom_instance_profile_arn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `custom_security_group_ids` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `auto_healing` - (Optional) Whether to enable auto-healing for the layer. +* `install_updates_on_boot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instance_shutdown_timeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elastic_load_balancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drain_elb_on_shutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `manage_bundler` - (Optional) Whether OpsWorks should manage bundler. On by default. +* `passenger_version` - (Optional) The version of Passenger to use. Defaults to "4.0.46". +* `ruby_version` - (Optional) The version of Ruby to use. Defaults to "2.0.0". +* `rubygems_version` - (Optional) The version of RubyGems to use. Defaults to "2.2.2". +* `system_packages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `use_ebs_optimized_instances` - (Optional) Whether to use EBS-optimized instances. +* `ebs_volume` - (Optional) `ebs_volume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `custom_json` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `custom_configure_recipes` +* `custom_deploy_recipes` +* `custom_setup_recipes` +* `custom_shutdown_recipes` +* `custom_undeploy_recipes` + +An `ebs_volume` block supports the following arguments: + +* `mount_point` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `number_of_disks` - (Required) The number of disks to use for the EBS volume. +* `raid_level` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_rds_db_instance.html.markdown b/website/docs/cdktf/python/r/opsworks_rds_db_instance.html.markdown new file mode 100644 index 00000000000..501693b2f58 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_rds_db_instance.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_rds_db_instance" +description: |- + Provides an OpsWorks RDS DB Instance resource. +--- + + + +# Resource: aws_opsworks_rds_db_instance + +Provides an OpsWorks RDS DB Instance resource. + +~> **Note:** All arguments including the username and password will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_rds_db_instance import OpsworksRdsDbInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksRdsDbInstance(self, "my_instance", + db_password="somePass", + db_user="someUser", + rds_db_instance_arn=Token.as_string(aws_db_instance_my_instance.arn), + stack_id=my_stack.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_id` - (Required) The stack to register a db instance for. Changing this will force a new resource. +* `rds_db_instance_arn` - (Required) The db instance to register for this stack. Changing this will force a new resource. +* `db_user` - (Required) A db username +* `db_password` - (Required) A db password + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The computed id. Please note that this is only used internally to identify the stack <-> instance relation. This value is not used in aws. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_stack.html.markdown b/website/docs/cdktf/python/r/opsworks_stack.html.markdown new file mode 100644 index 00000000000..5a9054a4d8b --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_stack.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_stack" +description: |- + Provides an OpsWorks stack resource. +--- + + + +# Resource: aws_opsworks_stack + +Provides an OpsWorks stack resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_stack import OpsworksStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksStack(self, "main", + custom_json="{\n \"foobar\": {\n \"version\": \"1.0.0\"\n }\n}\n\n", + default_instance_profile_arn=opsworks.arn, + name="awesome-stack", + region="us-west-1", + service_role_arn=Token.as_string(aws_iam_role_opsworks.arn), + tags={ + "Name": "foobar-terraform-stack" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the stack. +* `region` - (Required) The name of the region where the stack will exist. +* `service_role_arn` - (Required) The ARN of an IAM role that the OpsWorks service will act as. +* `default_instance_profile_arn` - (Required) The ARN of an IAM Instance Profile that created instances will have by default. +* `agent_version` - (Optional) If set to `"LATEST"`, OpsWorks will automatically install the latest version. +* `berkshelf_version` - (Optional) If `manage_berkshelf` is enabled, the version of Berkshelf to use. +* `color` - (Optional) Color to paint next to the stack's resources in the OpsWorks console. +* `configuration_manager_name` - (Optional) Name of the configuration manager to use. Defaults to "Chef". +* `configuration_manager_version` - (Optional) Version of the configuration manager to use. Defaults to "11.4". +* `custom_cookbooks_source` - (Optional) When `use_custom_cookbooks` is set, provide this sub-object as described below. +* `custom_json` - (Optional) User defined JSON passed to "Chef". Use a "here doc" for multiline JSON. +* `default_availability_zone` - (Optional) Name of the availability zone where instances will be created by default. + Cannot be set when `vpc_id` is set. +* `default_os` - (Optional) Name of OS that will be installed on instances by default. +* `default_root_device_type` - (Optional) Name of the type of root device instances will have by default. +* `default_ssh_key_name` - (Optional) Name of the SSH keypair that instances will have by default. +* `default_subnet_id` - (Optional) ID of the subnet in which instances will be created by default. + Required if `vpc_id` is set to a VPC other than the default VPC, and forbidden if it isn't. +* `hostname_theme` - (Optional) Keyword representing the naming scheme that will be used for instance hostnames within this stack. +* `manage_berkshelf` - (Optional) Boolean value controlling whether Opsworks will run Berkshelf for this stack. +* `tags` - (Optional) A map of tags to assign to the resource. + If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `use_custom_cookbooks` - (Optional) Boolean value controlling whether the custom cookbook settings are enabled. +* `use_opsworks_security_groups` - (Optional) Boolean value controlling whether the standard OpsWorks security groups apply to created instances. +* `vpc_id` - (Optional) ID of the VPC that this stack belongs to. + Defaults to the region's default VPC. +* `custom_json` - (Optional) Custom JSON attributes to apply to the entire stack. + +The `custom_cookbooks_source` block supports the following arguments: + +* `type` - (Required) The type of source to use. For example, "archive". +* `url` - (Required) The URL where the cookbooks resource can be found. +* `username` - (Optional) Username to use when authenticating to the source. +* `password` - (Optional) Password to use when authenticating to the source. Terraform cannot perform drift detection of this configuration. +* `ssh_key` - (Optional) SSH key to use when authenticating to the source. Terraform cannot perform drift detection of this configuration. +* `revision` - (Optional) For sources that are version-aware, the revision to use. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the stack. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpsWorks stacks using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import OpsWorks stacks using the `id`. For example: + +```console +% terraform import aws_opsworks_stack.bar 00000000-0000-0000-0000-000000000000 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_static_web_layer.html.markdown b/website/docs/cdktf/python/r/opsworks_static_web_layer.html.markdown new file mode 100644 index 00000000000..5446e5c5bfe --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_static_web_layer.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_static_web_layer" +description: |- + Provides an OpsWorks static web server layer resource. +--- + + + +# Resource: aws_opsworks_static_web_layer + +Provides an OpsWorks static web server layer resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_static_web_layer import OpsworksStaticWebLayer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksStaticWebLayer(self, "web", + stack_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stack_id` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `auto_assign_elastic_ips` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `auto_assign_public_ips` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `custom_instance_profile_arn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `custom_security_group_ids` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `auto_healing` - (Optional) Whether to enable auto-healing for the layer. +* `install_updates_on_boot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instance_shutdown_timeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elastic_load_balancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drain_elb_on_shutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `system_packages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `use_ebs_optimized_instances` - (Optional) Whether to use EBS-optimized instances. +* `ebs_volume` - (Optional) `ebs_volume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `custom_configure_recipes` +* `custom_deploy_recipes` +* `custom_setup_recipes` +* `custom_shutdown_recipes` +* `custom_undeploy_recipes` + +An `ebs_volume` block supports the following arguments: + +* `mount_point` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `number_of_disks` - (Required) The number of disks to use for the EBS volume. +* `raid_level` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpsWorks static web server Layers using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import OpsWorks static web server Layers using the `id`. For example: + +```console +% terraform import aws_opsworks_static_web_layer.bar 00000000-0000-0000-0000-000000000000 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/opsworks_user_profile.html.markdown b/website/docs/cdktf/python/r/opsworks_user_profile.html.markdown new file mode 100644 index 00000000000..eaa2c83d0c8 --- /dev/null +++ b/website/docs/cdktf/python/r/opsworks_user_profile.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_user_profile" +description: |- + Provides an OpsWorks User Profile resource. +--- + + + +# Resource: aws_opsworks_user_profile + +Provides an OpsWorks User Profile resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.opsworks_user_profile import OpsworksUserProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OpsworksUserProfile(self, "my_profile", + ssh_username="my_user", + user_arn=user.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `user_arn` - (Required) The user's IAM ARN +* `allow_self_management` - (Optional) Whether users can specify their own SSH public key through the My Settings page +* `ssh_username` - (Required) The ssh username, with witch this user wants to log in +* `ssh_public_key` - (Optional) The users public key + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same value as `user_arn` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/organizations_account.html.markdown b/website/docs/cdktf/python/r/organizations_account.html.markdown new file mode 100644 index 00000000000..312a640c89c --- /dev/null +++ b/website/docs/cdktf/python/r/organizations_account.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_account" +description: |- + Provides a resource to create a member account in the current AWS Organization. +--- + + + +# Resource: aws_organizations_account + +Provides a resource to create a member account in the current organization. + +~> **Note:** Account management must be done from the organization's root account. + +~> **Note:** By default, deleting this Terraform resource will only remove an AWS account from an organization. You must set the `close_on_deletion` flag to true to close the account. It is worth noting that quotas are enforced when using the `close_on_deletion` argument, which can produce a [CLOSE_ACCOUNT_QUOTA_EXCEEDED](https://docs.aws.amazon.com/organizations/latest/APIReference/API_CloseAccount.html) error, and require you to close the account manually. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.organizations_account import OrganizationsAccount +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OrganizationsAccount(self, "account", + email="john@doe.org", + name="my_new_account" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `email` - (Required) Email address of the owner to assign to the new member account. This email address must not already be associated with another AWS account. +* `name` - (Required) Friendly name for the member account. + +The following arguments are optional: + +* `close_on_deletion` - (Optional) If true, a deletion event will close the account. Otherwise, it will only remove from the organization. This is not supported for GovCloud accounts. +* `create_govcloud` - (Optional) Whether to also create a GovCloud account. The GovCloud account is tied to the main (commercial) account this resource creates. If `true`, the GovCloud account ID is available in the `govcloud_id` attribute. The only way to manage the GovCloud account with Terraform is to subsequently import the account using this resource. +* `iam_user_access_to_billing` - (Optional) If set to `ALLOW`, the new account enables IAM users and roles to access account billing information if they have the required permissions. If set to `DENY`, then only the root user (and no roles) of the new account can access account billing information. If this is unset, the AWS API will default this to `ALLOW`. If the resource is created and this option is changed, it will try to recreate the account. +* `parent_id` - (Optional) Parent Organizational Unit ID or Root ID for the account. Defaults to the Organization default Root ID. A configuration must be present for this argument to perform drift detection. +* `role_name` - (Optional) The name of an IAM role that Organizations automatically preconfigures in the new member account. This role trusts the root account, allowing users in the root account to assume the role, as permitted by the root account administrator. The role has administrator permissions in the new member account. The Organizations API provides no method for reading this information after account creation, so Terraform cannot perform drift detection on its value and will always show a difference for a configured value after import unless [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) is used. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN for this account. +* `govcloud_id` - ID for a GovCloud account created with the account. +* `id` - The AWS account id +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the AWS member account using the `account_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the AWS member account using the `account_id`. For example: + +```console +% terraform import aws_organizations_account.my_account 111111111111 +``` + +Certain resource arguments, like `role_name`, do not have an Organizations API method for reading the information after account creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.organizations_account import OrganizationsAccount +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OrganizationsAccount(self, "account", + email="john@doe.org", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[role_name] + ), + name="my_new_account", + role_name="myOrganizationRole" + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/organizations_delegated_administrator.html.markdown b/website/docs/cdktf/python/r/organizations_delegated_administrator.html.markdown new file mode 100644 index 00000000000..a9f03d12a67 --- /dev/null +++ b/website/docs/cdktf/python/r/organizations_delegated_administrator.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_delegated_administrator" +description: |- + Provides a resource to manage an AWS Organizations Delegated Administrator. +--- + + + +# Resource: aws_organizations_delegated_administrator + +Provides a resource to manage an [AWS Organizations Delegated Administrator](https://docs.aws.amazon.com/organizations/latest/APIReference/API_RegisterDelegatedAdministrator.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.organizations_delegated_administrator import OrganizationsDelegatedAdministrator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OrganizationsDelegatedAdministrator(self, "example", + account_id="123456789012", + service_principal="principal" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Required) The account ID number of the member account in the organization to register as a delegated administrator. +* `service_principal` - (Required) The service principal of the AWS service for which you want to make the member account a delegated administrator. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the delegated administrator. +* `arn` - The Amazon Resource Name (ARN) of the delegated administrator's account. +* `delegation_enabled_date` - The date when the account was made a delegated administrator. +* `email` - The email address that is associated with the delegated administrator's AWS account. +* `joined_method` - The method by which the delegated administrator's account joined the organization. +* `joined_timestamp` - The date when the delegated administrator's account became a part of the organization. +* `name` - The friendly name of the delegated administrator's account. +* `status` - The status of the delegated administrator's account in the organization. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_organizations_delegated_administrator` using the account ID and its service principal. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_organizations_delegated_administrator` using the account ID and its service principal. For example: + +```console +% terraform import aws_organizations_delegated_administrator.example 123456789012/config.amazonaws.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/organizations_organization.html.markdown b/website/docs/cdktf/python/r/organizations_organization.html.markdown new file mode 100644 index 00000000000..ce9059aa233 --- /dev/null +++ b/website/docs/cdktf/python/r/organizations_organization.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organization" +description: |- + Provides a resource to create an organization. +--- + + + +# Resource: aws_organizations_organization + +Provides a resource to create an organization. + +!> **WARNING:** When migrating from a `feature_set` of `CONSOLIDATED_BILLING` to `ALL`, the Organization account owner will received an email stating the following: "You started the process to enable all features for your AWS organization. As part of that process, all member accounts that joined your organization by invitation must approve the change. You don’t need approval from member accounts that you directly created from within your AWS organization." After all member accounts have accepted the invitation, the Organization account owner must then finalize the changes via the [AWS Console](https://console.aws.amazon.com/organizations/home#/organization/settings/migration-progress). Until these steps are performed, Terraform will perpetually show a difference, and the `DescribeOrganization` API will continue to show the `FeatureSet` as `CONSOLIDATED_BILLING`. See the [AWS Organizations documentation](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_org_support-all-features.html) for more information. + +!> **WARNING:** [Warning from the AWS Docs](https://docs.aws.amazon.com/organizations/latest/APIReference/API_EnableAWSServiceAccess.html): "We recommend that you enable integration between AWS Organizations and the specified AWS service by using the console or commands that are provided by the specified service. Doing so ensures that the service is aware that it can create the resources that are required for the integration. How the service creates those resources in the organization's accounts depends on that service. For more information, see the documentation for the other AWS service." + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.organizations_organization import OrganizationsOrganization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OrganizationsOrganization(self, "org", + aws_service_access_principals=["cloudtrail.amazonaws.com", "config.amazonaws.com" + ], + feature_set="ALL" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `aws_service_access_principals` - (Optional) List of AWS service principal names for which you want to enable integration with your organization. This is typically in the form of a URL, such as service-abbreviation.amazonaws.com. Organization must have `feature_set` set to `ALL`. Some services do not support enablement via this endpoint, see [warning in aws docs](https://docs.aws.amazon.com/organizations/latest/APIReference/API_EnableAWSServiceAccess.html). +* `enabled_policy_types` - (Optional) List of Organizations policy types to enable in the Organization Root. Organization must have `feature_set` set to `ALL`. For additional information about valid policy types (e.g., `AISERVICES_OPT_OUT_POLICY`, `BACKUP_POLICY`, `SERVICE_CONTROL_POLICY`, and `TAG_POLICY`), see the [AWS Organizations API Reference](https://docs.aws.amazon.com/organizations/latest/APIReference/API_EnablePolicyType.html). +* `feature_set` - (Optional) Specify "ALL" (default) or "CONSOLIDATED_BILLING". + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `accounts` - List of organization accounts including the master account. For a list excluding the master account, see the `non_master_accounts` attribute. All elements have these attributes: + * `arn` - ARN of the account + * `email` - Email of the account + * `id` - Identifier of the account + * `name` - Name of the account + * `status` - Current status of the account +* `arn` - ARN of the organization +* `id` - Identifier of the organization +* `master_account_arn` - ARN of the master account +* `master_account_email` - Email address of the master account +* `master_account_id` - Identifier of the master account +* `non_master_accounts` - List of organization accounts excluding the master account. For a list including the master account, see the `accounts` attribute. All elements have these attributes: + * `arn` - ARN of the account + * `email` - Email of the account + * `id` - Identifier of the account + * `name` - Name of the account + * `status` - Current status of the account +* `roots` - List of organization roots. All elements have these attributes: + * `arn` - ARN of the root + * `id` - Identifier of the root + * `name` - Name of the root + * `policy_types` - List of policy types enabled for this root. All elements have these attributes: + * `name` - The name of the policy type + * `status` - The status of the policy type as it relates to the associated root + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the AWS organization using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the AWS organization using the `id`. For example: + +```console +% terraform import aws_organizations_organization.my_org o-1234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/organizations_organizational_unit.html.markdown b/website/docs/cdktf/python/r/organizations_organizational_unit.html.markdown new file mode 100644 index 00000000000..25ff6bdb31d --- /dev/null +++ b/website/docs/cdktf/python/r/organizations_organizational_unit.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organizational_unit" +description: |- + Provides a resource to create an organizational unit. +--- + + + +# Resource: aws_organizations_organizational_unit + +Provides a resource to create an organizational unit. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.organizations_organizational_unit import OrganizationsOrganizationalUnit +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OrganizationsOrganizationalUnit(self, "example", + name="example", + parent_id=Token.as_string( + property_access(aws_organizations_organization_example.roots, ["0", "id"])) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - The name for the organizational unit +* `parent_id` - ID of the parent organizational unit, which may be the root +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `accounts` - List of child accounts for this Organizational Unit. Does not return account information for child Organizational Units. All elements have these attributes: + * `arn` - ARN of the account + * `email` - Email of the account + * `id` - Identifier of the account + * `name` - Name of the account +* `arn` - ARN of the organizational unit +* `id` - Identifier of the organization unit +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Organizations Organizational Units using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS Organizations Organizational Units using the `id`. For example: + +```console +% terraform import aws_organizations_organizational_unit.example ou-1234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/organizations_policy.html.markdown b/website/docs/cdktf/python/r/organizations_policy.html.markdown new file mode 100644 index 00000000000..28d45118b7b --- /dev/null +++ b/website/docs/cdktf/python/r/organizations_policy.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_policy" +description: |- + Provides a resource to manage an AWS Organizations policy. +--- + + + +# Resource: aws_organizations_policy + +Provides a resource to manage an [AWS Organizations policy](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.organizations_policy import OrganizationsPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsIamPolicyDocument(self, "example", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["*"], + effect="Allow", + resources=["*"] + ) + ] + ) + aws_organizations_policy_example = OrganizationsPolicy(self, "example_1", + content=Token.as_string(example.json), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_organizations_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Required) The policy content to add to the new policy. For example, if you create a [service control policy (SCP)](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_scp.html), this string must be JSON text that specifies the permissions that admins in attached accounts can delegate to their users, groups, and roles. For more information about the SCP syntax, see the [Service Control Policy Syntax documentation](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_reference_scp-syntax.html) and for more information on the Tag Policy syntax, see the [Tag Policy Syntax documentation](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_example-tag-policies.html). +* `name` - (Required) The friendly name to assign to the policy. +* `description` - (Optional) A description to assign to the policy. +* `skip_destroy` - (Optional) If set to `true`, destroy will **not** delete the policy and instead just remove the resource from state. This can be useful in situations where the policies (and the associated attachment) must be preserved to meet the AWS minimum requirement of 1 attached policy. +* `type` - (Optional) The type of policy to create. Valid values are `AISERVICES_OPT_OUT_POLICY`, `BACKUP_POLICY`, `SERVICE_CONTROL_POLICY` (SCP), and `TAG_POLICY`. Defaults to `SERVICE_CONTROL_POLICY`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the policy. +* `arn` - Amazon Resource Name (ARN) of the policy. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_organizations_policy` using the policy ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_organizations_policy` using the policy ID. For example: + +```console +% terraform import aws_organizations_policy.example p-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/organizations_policy_attachment.html.markdown b/website/docs/cdktf/python/r/organizations_policy_attachment.html.markdown new file mode 100644 index 00000000000..0e9021c3dab --- /dev/null +++ b/website/docs/cdktf/python/r/organizations_policy_attachment.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_policy_attachment" +description: |- + Provides a resource to attach an AWS Organizations policy to an organization account, root, or unit. +--- + + + +# Resource: aws_organizations_policy_attachment + +Provides a resource to attach an AWS Organizations policy to an organization account, root, or unit. + +## Example Usage + +### Organization Account + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.organizations_policy_attachment import OrganizationsPolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OrganizationsPolicyAttachment(self, "account", + policy_id=example.id, + target_id="123456789012" + ) +``` + +### Organization Root + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.organizations_policy_attachment import OrganizationsPolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OrganizationsPolicyAttachment(self, "root", + policy_id=example.id, + target_id=Token.as_string( + property_access(aws_organizations_organization_example.roots, ["0", "id"])) + ) +``` + +### Organization Unit + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.organizations_policy_attachment import OrganizationsPolicyAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OrganizationsPolicyAttachment(self, "unit", + policy_id=example.id, + target_id=Token.as_string(aws_organizations_organizational_unit_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy_id` - (Required) The unique identifier (ID) of the policy that you want to attach to the target. +* `target_id` - (Required) The unique identifier (ID) of the root, organizational unit, or account number that you want to attach the policy to. +* `skip_destroy` - (Optional) If set to `true`, destroy will **not** detach the policy and instead just remove the resource from state. This can be useful in situations where the attachment must be preserved to meet the AWS minimum requirement of 1 attached policy. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_organizations_policy_attachment` using the target ID and policy ID. For example: + +With an account target: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_organizations_policy_attachment` using the target ID and policy ID. For example: + +With an account target: + +```console +% terraform import aws_organizations_policy_attachment.account 123456789012:p-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/organizations_resource_policy.html.markdown b/website/docs/cdktf/python/r/organizations_resource_policy.html.markdown new file mode 100644 index 00000000000..34729ad9dc7 --- /dev/null +++ b/website/docs/cdktf/python/r/organizations_resource_policy.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_resource_policy" +description: |- + Provides a resource to manage an AWS Organizations resource policy. +--- + + + +# Resource: aws_organizations_resource_policy + +Provides a resource to manage a resource-based delegation policy that can be used to delegate policy management for AWS Organizations to specified member accounts to perform policy actions that are by default available only to the management account. See the [_AWS Organizations User Guide_](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_delegate_policies.html) for more information. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.organizations_resource_policy import OrganizationsResourcePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + OrganizationsResourcePolicy(self, "example", + content="{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"DelegatingNecessaryDescribeListActions\",\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"AWS\": \"arn:aws:iam::123456789012:root\"\n },\n \"Action\": [\n \"organizations:DescribeOrganization\",\n \"organizations:DescribeOrganizationalUnit\",\n \"organizations:DescribeAccount\",\n \"organizations:DescribePolicy\",\n \"organizations:DescribeEffectivePolicy\",\n \"organizations:ListRoots\",\n \"organizations:ListOrganizationalUnitsForParent\",\n \"organizations:ListParents\",\n \"organizations:ListChildren\",\n \"organizations:ListAccounts\",\n \"organizations:ListAccountsForParent\",\n \"organizations:ListPolicies\",\n \"organizations:ListPoliciesForTarget\",\n \"organizations:ListTargetsForPolicy\",\n \"organizations:ListTagsForResource\"\n ],\n \"Resource\": \"*\"\n }\n ]\n}\n\n" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Required) Content for the resource policy. The text must be correctly formatted JSON that complies with the syntax for the resource policy's type. See the [_AWS Organizations User Guide_](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_delegate_examples.html) for examples. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the resource policy. +* `id` - The unique identifier (ID) of the resource policy. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_organizations_resource_policy` using the resource policy ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_organizations_resource_policy` using the resource policy ID. For example: + +```console +% terraform import aws_organizations_resource_policy.example rp-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pinpoint_adm_channel.markdown b/website/docs/cdktf/python/r/pinpoint_adm_channel.markdown new file mode 100644 index 00000000000..f22c71680af --- /dev/null +++ b/website/docs/cdktf/python/r/pinpoint_adm_channel.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_adm_channel" +description: |- + Provides a Pinpoint ADM Channel resource. +--- + + + +# Resource: aws_pinpoint_adm_channel + +Provides a Pinpoint ADM (Amazon Device Messaging) Channel resource. + +~> **Note:** All arguments including the Client ID and Client Secret will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pinpoint_adm_channel import PinpointAdmChannel +from imports.aws.pinpoint_app import PinpointApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + app = PinpointApp(self, "app") + PinpointAdmChannel(self, "channel", + application_id=app.application_id, + client_id="", + client_secret="", + enabled=True + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required) The application ID. +* `client_id` - (Required) Client ID (part of OAuth Credentials) obtained via Amazon Developer Account. +* `client_secret` - (Required) Client Secret (part of OAuth Credentials) obtained via Amazon Developer Account. +* `enabled` - (Optional) Specifies whether to enable the channel. Defaults to `true`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint ADM Channel using the `application-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Pinpoint ADM Channel using the `application-id`. For example: + +```console +% terraform import aws_pinpoint_adm_channel.channel application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pinpoint_apns_channel.markdown b/website/docs/cdktf/python/r/pinpoint_apns_channel.markdown new file mode 100644 index 00000000000..e98b51b2287 --- /dev/null +++ b/website/docs/cdktf/python/r/pinpoint_apns_channel.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_apns_channel" +description: |- + Provides a Pinpoint APNs Channel resource. +--- + + + +# Resource: aws_pinpoint_apns_channel + +Provides a Pinpoint APNs Channel resource. + +~> **Note:** All arguments, including certificates and tokens, will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pinpoint_apns_channel import PinpointApnsChannel +from imports.aws.pinpoint_app import PinpointApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + app = PinpointApp(self, "app") + PinpointApnsChannel(self, "apns", + application_id=app.application_id, + certificate=Token.as_string(Fn.file("./certificate.pem")), + private_key=Token.as_string(Fn.file("./private_key.key")) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required) The application ID. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. +* `default_authentication_method` - (Optional) The default authentication method used for APNs. + __NOTE__: Amazon Pinpoint uses this default for every APNs push notification that you send using the console. + You can override the default when you send a message programmatically using the Amazon Pinpoint API, the AWS CLI, or an AWS SDK. + If your default authentication type fails, Amazon Pinpoint doesn't attempt to use the other authentication type. + +One of the following sets of credentials is also required. + +If you choose to use __Certificate credentials__ you will have to provide: + +* `certificate` - (Required) The pem encoded TLS Certificate from Apple. +* `private_key` - (Required) The Certificate Private Key file (ie. `.key` file). + +If you choose to use __Key credentials__ you will have to provide: + +* `bundle_id` - (Required) The ID assigned to your iOS app. To find this value, choose Certificates, IDs & Profiles, choose App IDs in the Identifiers section, and choose your app. +* `team_id` - (Required) The ID assigned to your Apple developer account team. This value is provided on the Membership page. +* `token_key` - (Required) The `.p8` file that you download from your Apple developer account when you create an authentication key. +* `token_key_id` - (Required) The ID assigned to your signing key. To find this value, choose Certificates, IDs & Profiles, and choose your key in the Keys section. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint APNs Channel using the `application-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Pinpoint APNs Channel using the `application-id`. For example: + +```console +% terraform import aws_pinpoint_apns_channel.apns application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pinpoint_apns_sandbox_channel.markdown b/website/docs/cdktf/python/r/pinpoint_apns_sandbox_channel.markdown new file mode 100644 index 00000000000..35c507d30d3 --- /dev/null +++ b/website/docs/cdktf/python/r/pinpoint_apns_sandbox_channel.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_apns_sandbox_channel" +description: |- + Provides a Pinpoint APNs Sandbox Channel resource. +--- + + + +# Resource: aws_pinpoint_apns_sandbox_channel + +Provides a Pinpoint APNs Sandbox Channel resource. + +~> **Note:** All arguments, including certificates and tokens, will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pinpoint_apns_sandbox_channel import PinpointApnsSandboxChannel +from imports.aws.pinpoint_app import PinpointApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + app = PinpointApp(self, "app") + PinpointApnsSandboxChannel(self, "apns_sandbox", + application_id=app.application_id, + certificate=Token.as_string(Fn.file("./certificate.pem")), + private_key=Token.as_string(Fn.file("./private_key.key")) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required) The application ID. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. +* `default_authentication_method` - (Optional) The default authentication method used for APNs Sandbox. + __NOTE__: Amazon Pinpoint uses this default for every APNs push notification that you send using the console. + You can override the default when you send a message programmatically using the Amazon Pinpoint API, the AWS CLI, or an AWS SDK. + If your default authentication type fails, Amazon Pinpoint doesn't attempt to use the other authentication type. + +One of the following sets of credentials is also required. + +If you choose to use __Certificate credentials__ you will have to provide: + +* `certificate` - (Required) The pem encoded TLS Certificate from Apple. +* `private_key` - (Required) The Certificate Private Key file (ie. `.key` file). + +If you choose to use __Key credentials__ you will have to provide: + +* `bundle_id` - (Required) The ID assigned to your iOS app. To find this value, choose Certificates, IDs & Profiles, choose App IDs in the Identifiers section, and choose your app. +* `team_id` - (Required) The ID assigned to your Apple developer account team. This value is provided on the Membership page. +* `token_key` - (Required) The `.p8` file that you download from your Apple developer account when you create an authentication key. +* `token_key_id` - (Required) The ID assigned to your signing key. To find this value, choose Certificates, IDs & Profiles, and choose your key in the Keys section. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint APNs Sandbox Channel using the `application-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Pinpoint APNs Sandbox Channel using the `application-id`. For example: + +```console +% terraform import aws_pinpoint_apns_sandbox_channel.apns_sandbox application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pinpoint_apns_voip_channel.markdown b/website/docs/cdktf/python/r/pinpoint_apns_voip_channel.markdown new file mode 100644 index 00000000000..ee615467039 --- /dev/null +++ b/website/docs/cdktf/python/r/pinpoint_apns_voip_channel.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_apns_voip_channel" +description: |- + Provides a Pinpoint APNs VoIP Channel resource. +--- + + + +# Resource: aws_pinpoint_apns_voip_channel + +Provides a Pinpoint APNs VoIP Channel resource. + +~> **Note:** All arguments, including certificates and tokens, will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pinpoint_apns_voip_channel import PinpointApnsVoipChannel +from imports.aws.pinpoint_app import PinpointApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + app = PinpointApp(self, "app") + PinpointApnsVoipChannel(self, "apns_voip", + application_id=app.application_id, + certificate=Token.as_string(Fn.file("./certificate.pem")), + private_key=Token.as_string(Fn.file("./private_key.key")) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required) The application ID. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. +* `default_authentication_method` - (Optional) The default authentication method used for APNs. + __NOTE__: Amazon Pinpoint uses this default for every APNs push notification that you send using the console. + You can override the default when you send a message programmatically using the Amazon Pinpoint API, the AWS CLI, or an AWS SDK. + If your default authentication type fails, Amazon Pinpoint doesn't attempt to use the other authentication type. + +One of the following sets of credentials is also required. + +If you choose to use __Certificate credentials__ you will have to provide: + +* `certificate` - (Required) The pem encoded TLS Certificate from Apple. +* `private_key` - (Required) The Certificate Private Key file (ie. `.key` file). + +If you choose to use __Key credentials__ you will have to provide: + +* `bundle_id` - (Required) The ID assigned to your iOS app. To find this value, choose Certificates, IDs & Profiles, choose App IDs in the Identifiers section, and choose your app. +* `team_id` - (Required) The ID assigned to your Apple developer account team. This value is provided on the Membership page. +* `token_key` - (Required) The `.p8` file that you download from your Apple developer account when you create an authentication key. +* `token_key_id` - (Required) The ID assigned to your signing key. To find this value, choose Certificates, IDs & Profiles, and choose your key in the Keys section. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint APNs VoIP Channel using the `application-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Pinpoint APNs VoIP Channel using the `application-id`. For example: + +```console +% terraform import aws_pinpoint_apns_voip_channel.apns_voip application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pinpoint_apns_voip_sandbox_channel.markdown b/website/docs/cdktf/python/r/pinpoint_apns_voip_sandbox_channel.markdown new file mode 100644 index 00000000000..c2db7d606bc --- /dev/null +++ b/website/docs/cdktf/python/r/pinpoint_apns_voip_sandbox_channel.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_apns_voip_sandbox_channel" +description: |- + Provides a Pinpoint APNs VoIP Sandbox Channel resource. +--- + + + +# Resource: aws_pinpoint_apns_voip_sandbox_channel + +Provides a Pinpoint APNs VoIP Sandbox Channel resource. + +~> **Note:** All arguments, including certificates and tokens, will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pinpoint_apns_voip_sandbox_channel import PinpointApnsVoipSandboxChannel +from imports.aws.pinpoint_app import PinpointApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + app = PinpointApp(self, "app") + PinpointApnsVoipSandboxChannel(self, "apns_voip_sandbox", + application_id=app.application_id, + certificate=Token.as_string(Fn.file("./certificate.pem")), + private_key=Token.as_string(Fn.file("./private_key.key")) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required) The application ID. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. +* `default_authentication_method` - (Optional) The default authentication method used for APNs. + __NOTE__: Amazon Pinpoint uses this default for every APNs push notification that you send using the console. + You can override the default when you send a message programmatically using the Amazon Pinpoint API, the AWS CLI, or an AWS SDK. + If your default authentication type fails, Amazon Pinpoint doesn't attempt to use the other authentication type. + +One of the following sets of credentials is also required. + +If you choose to use __Certificate credentials__ you will have to provide: + +* `certificate` - (Required) The pem encoded TLS Certificate from Apple. +* `private_key` - (Required) The Certificate Private Key file (ie. `.key` file). + +If you choose to use __Key credentials__ you will have to provide: + +* `bundle_id` - (Required) The ID assigned to your iOS app. To find this value, choose Certificates, IDs & Profiles, choose App IDs in the Identifiers section, and choose your app. +* `team_id` - (Required) The ID assigned to your Apple developer account team. This value is provided on the Membership page. +* `token_key` - (Required) The `.p8` file that you download from your Apple developer account when you create an authentication key. +* `token_key_id` - (Required) The ID assigned to your signing key. To find this value, choose Certificates, IDs & Profiles, and choose your key in the Keys section. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint APNs VoIP Sandbox Channel using the `application-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Pinpoint APNs VoIP Sandbox Channel using the `application-id`. For example: + +```console +% terraform import aws_pinpoint_apns_voip_sandbox_channel.apns_voip_sandbox application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pinpoint_app.markdown b/website/docs/cdktf/python/r/pinpoint_app.markdown new file mode 100644 index 00000000000..a0cf8660541 --- /dev/null +++ b/website/docs/cdktf/python/r/pinpoint_app.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_app" +description: |- + Provides a Pinpoint App resource. +--- + + + +# Resource: aws_pinpoint_app + +Provides a Pinpoint App resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pinpoint_app import PinpointApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + PinpointApp(self, "example", + limits=PinpointAppLimits( + maximum_duration=600 + ), + name="test-app", + quiet_time=PinpointAppQuietTime( + end="06:00", + start="00:00" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The application name. By default generated by Terraform +* `name_prefix` - (Optional) The name of the Pinpoint application. Conflicts with `name` +* `campaign_hook` - (Optional) Specifies settings for invoking an AWS Lambda function that customizes a segment for a campaign +* `limits` - (Optional) The default campaign limits for the app. These limits apply to each campaign for the app, unless the campaign overrides the default with limits of its own +* `quiet_time` - (Optional) The default quiet time for the app. Each campaign for this app sends no messages during this time unless the campaign overrides the default with a quiet time of its own +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +`campaign_hook` supports the following: + +* `lambda_function_name` - (Optional) Lambda function name or ARN to be called for delivery. Conflicts with `web_url` +* `mode` - (Required if `lambda_function_name` or `web_url` are provided) What mode Lambda should be invoked in. Valid values for this parameter are `DELIVERY`, `FILTER`. +* `web_url` - (Optional) Web URL to call for hook. If the URL has authentication specified it will be added as authentication to the request. Conflicts with `lambda_function_name` + +`limits` supports the following: + +* `daily` - (Optional) The maximum number of messages that the campaign can send daily. +* `maximum_duration` - (Optional) The length of time (in seconds) that the campaign can run before it ends and message deliveries stop. This duration begins at the scheduled start time for the campaign. The minimum value is 60. +* `messages_per_second` - (Optional) The number of messages that the campaign can send per second. The minimum value is 50, and the maximum is 20000. +* `total` - (Optional) The maximum total number of messages that the campaign can send. + +`quiet_time` supports the following: + +* `end` - (Optional) The default end time for quiet time in ISO 8601 format. Required if `start` is set +* `start` - (Optional) The default start time for quiet time in ISO 8601 format. Required if `end` is set + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `application_id` - The Application ID of the Pinpoint App. +* `arn` - Amazon Resource Name (ARN) of the PinPoint Application +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint App using the `application-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Pinpoint App using the `application-id`. For example: + +```console +% terraform import aws_pinpoint_app.name application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pinpoint_baidu_channel.markdown b/website/docs/cdktf/python/r/pinpoint_baidu_channel.markdown new file mode 100644 index 00000000000..c3631ad14f0 --- /dev/null +++ b/website/docs/cdktf/python/r/pinpoint_baidu_channel.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_baidu_channel" +description: |- + Provides a Pinpoint Baidu Channel resource. +--- + + + +# Resource: aws_pinpoint_baidu_channel + +Provides a Pinpoint Baidu Channel resource. + +~> **Note:** All arguments including the Api Key and Secret Key will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pinpoint_app import PinpointApp +from imports.aws.pinpoint_baidu_channel import PinpointBaiduChannel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + app = PinpointApp(self, "app") + PinpointBaiduChannel(self, "channel", + api_key="", + application_id=app.application_id, + secret_key="" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required) The application ID. +* `enabled` - (Optional) Specifies whether to enable the channel. Defaults to `true`. +* `api_key` - (Required) Platform credential API key from Baidu. +* `secret_key` - (Required) Platform credential Secret key from Baidu. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint Baidu Channel using the `application-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Pinpoint Baidu Channel using the `application-id`. For example: + +```console +% terraform import aws_pinpoint_baidu_channel.channel application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pinpoint_email_channel.markdown b/website/docs/cdktf/python/r/pinpoint_email_channel.markdown new file mode 100644 index 00000000000..dba4c7c6b10 --- /dev/null +++ b/website/docs/cdktf/python/r/pinpoint_email_channel.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_email_channel" +description: |- + Provides a Pinpoint Email Channel resource. +--- + + + +# Resource: aws_pinpoint_email_channel + +Provides a Pinpoint Email Channel resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.pinpoint_app import PinpointApp +from imports.aws.pinpoint_email_channel import PinpointEmailChannel +from imports.aws.ses_domain_identity import SesDomainIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, identity): + super().__init__(scope, name) + app = PinpointApp(self, "app") + SesDomainIdentity(self, "identity", + domain="example.com" + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["pinpoint.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + role_policy = DataAwsIamPolicyDocument(self, "role_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["mobileanalytics:PutEvents", "mobileanalytics:PutItems"], + effect="Allow", + resources=["*"] + ) + ] + ) + role = IamRole(self, "role", + assume_role_policy=Token.as_string(assume_role.json) + ) + aws_iam_role_policy_role_policy = IamRolePolicy(self, "role_policy_5", + name="role_policy", + policy=Token.as_string(role_policy.json), + role=role.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_role_policy.override_logical_id("role_policy") + PinpointEmailChannel(self, "email", + application_id=app.application_id, + from_address="user@example.com", + role_arn=role.arn, + identity=identity + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required) The application ID. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. +* `configuration_set` - (Optional) The ARN of the Amazon SES configuration set that you want to apply to messages that you send through the channel. +* `from_address` - (Required) The email address used to send emails from. You can use email only (`user@example.com`) or friendly address (`User `). This field comply with [RFC 5322](https://www.ietf.org/rfc/rfc5322.txt). +* `identity` - (Required) The ARN of an identity verified with SES. +* `role_arn` - (Optional) The ARN of an IAM Role used to submit events to Mobile Analytics' event ingestion service. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `messages_per_second` - Messages per second that can be sent. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint Email Channel using the `application-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Pinpoint Email Channel using the `application-id`. For example: + +```console +% terraform import aws_pinpoint_email_channel.email application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pinpoint_event_stream.markdown b/website/docs/cdktf/python/r/pinpoint_event_stream.markdown new file mode 100644 index 00000000000..13c30f8fb4f --- /dev/null +++ b/website/docs/cdktf/python/r/pinpoint_event_stream.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_event_stream" +description: |- + Provides a Pinpoint Event Stream resource. +--- + + + +# Resource: aws_pinpoint_event_stream + +Provides a Pinpoint Event Stream resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.kinesis_stream import KinesisStream +from imports.aws.pinpoint_app import PinpointApp +from imports.aws.pinpoint_event_stream import PinpointEventStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_stream = KinesisStream(self, "test_stream", + name="pinpoint-kinesis-test", + shard_count=1 + ) + app = PinpointApp(self, "app") + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["pinpoint.us-east-1.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + test_role_policy = DataAwsIamPolicyDocument(self, "test_role_policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["kinesis:PutRecords", "kinesis:DescribeStream"], + effect="Allow", + resources=["arn:aws:kinesis:us-east-1:*:*/*"] + ) + ] + ) + test_role = IamRole(self, "test_role", + assume_role_policy=Token.as_string(assume_role.json) + ) + aws_iam_role_policy_test_role_policy = IamRolePolicy(self, "test_role_policy_5", + name="test_policy", + policy=Token.as_string(test_role_policy.json), + role=test_role.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_test_role_policy.override_logical_id("test_role_policy") + PinpointEventStream(self, "stream", + application_id=app.application_id, + destination_stream_arn=test_stream.arn, + role_arn=test_role.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required) The application ID. +* `destination_stream_arn` - (Required) The Amazon Resource Name (ARN) of the Amazon Kinesis stream or Firehose delivery stream to which you want to publish events. +* `role_arn` - (Required) The IAM role that authorizes Amazon Pinpoint to publish events to the stream in your account. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint Event Stream using the `application-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Pinpoint Event Stream using the `application-id`. For example: + +```console +% terraform import aws_pinpoint_event_stream.stream application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pinpoint_gcm_channel.markdown b/website/docs/cdktf/python/r/pinpoint_gcm_channel.markdown new file mode 100644 index 00000000000..02390491174 --- /dev/null +++ b/website/docs/cdktf/python/r/pinpoint_gcm_channel.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_gcm_channel" +description: |- + Provides a Pinpoint GCM Channel resource. +--- + + + +# Resource: aws_pinpoint_gcm_channel + +Provides a Pinpoint GCM Channel resource. + +~> **Note:** Api Key argument will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pinpoint_app import PinpointApp +from imports.aws.pinpoint_gcm_channel import PinpointGcmChannel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + app = PinpointApp(self, "app") + PinpointGcmChannel(self, "gcm", + api_key="api_key", + application_id=app.application_id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required) The application ID. +* `api_key` - (Required) Platform credential API key from Google. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint GCM Channel using the `application-id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Pinpoint GCM Channel using the `application-id`. For example: + +```console +% terraform import aws_pinpoint_gcm_channel.gcm application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pinpoint_sms_channel.markdown b/website/docs/cdktf/python/r/pinpoint_sms_channel.markdown new file mode 100644 index 00000000000..dc6315a41c5 --- /dev/null +++ b/website/docs/cdktf/python/r/pinpoint_sms_channel.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_sms_channel" +description: |- + Use the `aws_pinpoint_sms_channel` resource to manage Pinpoint SMS Channels. +--- + + + +# Resource: aws_pinpoint_sms_channel + +Use the `aws_pinpoint_sms_channel` resource to manage Pinpoint SMS Channels. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pinpoint_app import PinpointApp +from imports.aws.pinpoint_sms_channel import PinpointSmsChannel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + app = PinpointApp(self, "app") + PinpointSmsChannel(self, "sms", + application_id=app.application_id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `application_id` - (Required) ID of the application. +* `enabled` - (Optional) Whether the channel is enabled or disabled. By default, it is set to `true`. +* `sender_id` - (Optional) Identifier of the sender for your messages. +* `short_code` - (Optional) Short Code registered with the phone provider. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `promotional_messages_per_second` - Maximum number of promotional messages that can be sent per second. +* `transactional_messages_per_second` - Maximum number of transactional messages per second that can be sent. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the Pinpoint SMS Channel using the `application_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the Pinpoint SMS Channel using the `application_id`. For example: + +```console +% terraform import aws_pinpoint_sms_channel.sms application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/pipes_pipe.html.markdown b/website/docs/cdktf/python/r/pipes_pipe.html.markdown new file mode 100644 index 00000000000..78c5b9ffd20 --- /dev/null +++ b/website/docs/cdktf/python/r/pipes_pipe.html.markdown @@ -0,0 +1,591 @@ +--- +subcategory: "EventBridge Pipes" +layout: "aws" +page_title: "AWS: aws_pipes_pipe" +description: |- + Terraform resource for managing an AWS EventBridge Pipes Pipe. +--- + + + +# Resource: aws_pipes_pipe + +Terraform resource for managing an AWS EventBridge Pipes Pipe. + +You can find out more about EventBridge Pipes in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes.html). + +EventBridge Pipes are very configurable, and may require IAM permissions to work correctly. More information on the configuration options and IAM permissions can be found in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes.html). + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.pipes_pipe import PipesPipe +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + source = SqsQueue(self, "source") + target = SqsQueue(self, "target") + main = DataAwsCallerIdentity(self, "main") + test = IamRole(self, "test", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": { + "Action": "sts:AssumeRole", + "Condition": { + "StringEquals": { + "aws:_source_account": main.account_id + } + }, + "Effect": "Allow", + "Principal": { + "Service": "pipes.amazonaws.com" + } + }, + "Version": "2012-10-17" + })) + ) + aws_iam_role_policy_source = IamRolePolicy(self, "source_4", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["sqs:DeleteMessage", "sqs:GetQueueAttributes", "sqs:ReceiveMessage" + ], + "Effect": "Allow", + "Resource": [source.arn] + } + ], + "Version": "2012-10-17" + })), + role=test.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_source.override_logical_id("source") + aws_iam_role_policy_target = IamRolePolicy(self, "target_5", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["sqs:SendMessage"], + "Effect": "Allow", + "Resource": [target.arn] + } + ], + "Version": "2012-10-17" + })), + role=test.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_target.override_logical_id("target") + PipesPipe(self, "example", + depends_on=[aws_iam_role_policy_source, aws_iam_role_policy_target], + name="example-pipe", + role_arn=Token.as_string(aws_iam_role_example.arn), + source=source.arn, + target=target.arn + ) +``` + +### Enrichment Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pipes_pipe import PipesPipe +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + PipesPipe(self, "example", + enrichment=Token.as_string(aws_cloudwatch_event_api_destination_example.arn), + enrichment_parameters=PipesPipeEnrichmentParameters( + http_parameters=PipesPipeEnrichmentParametersHttpParameters( + example-header="example-value", + second-example-header="second-example-value" + ), + path_parameter_values=["example-path-param"], + query_string_parameters=[{ + "example-query-string": "example-value", + "second-example-query-string": "second-example-value" + } + ] + ), + name="example-pipe", + role_arn=Token.as_string(aws_iam_role_example.arn), + source=source.arn, + target=target.arn + ) +``` + +### Filter Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pipes_pipe import PipesPipe +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + PipesPipe(self, "example", + name="example-pipe", + role_arn=Token.as_string(aws_iam_role_example.arn), + source=source.arn, + source_parameters=PipesPipeSourceParameters( + filter_criteria=PipesPipeSourceParametersFilterCriteria( + filter=[PipesPipeSourceParametersFilterCriteriaFilter( + pattern=Token.as_string( + Fn.jsonencode({ + "source": ["event-source"] + })) + ) + ] + ) + ), + target=target.arn + ) +``` + +### SQS Source and Target Configuration Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.pipes_pipe import PipesPipe +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + PipesPipe(self, "example", + name="example-pipe", + role_arn=Token.as_string(aws_iam_role_example.arn), + source=source.arn, + source_parameters=PipesPipeSourceParameters( + sqs_queue_parameters=PipesPipeSourceParametersSqsQueueParameters( + batch_size=1, + maximum_batching_window_in_seconds=2 + ) + ), + target=target.arn, + target_parameters=PipesPipeTargetParameters( + sqs_queue=[{ + "message_deduplication_id": "example-dedupe", + "message_group_id": "example-group" + } + ] + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `role_arn` - (Required) ARN of the role that allows the pipe to send data to the target. +* `source` - (Required) Source resource of the pipe (typically an ARN). +* `target` - (Required) Target resource of the pipe (typically an ARN). + +The following arguments are optional: + +* `description` - (Optional) A description of the pipe. At most 512 characters. +* `desired_state` - (Optional) The state the pipe should be in. One of: `RUNNING`, `STOPPED`. +* `enrichment` - (Optional) Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes.html#pipes-enrichment). +* `enrichment_parameters` - (Optional) Parameters to configure enrichment for your pipe. Detailed below. +* `name` - (Optional) Name of the pipe. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `source_parameters` - (Optional) Parameters to configure a source for the pipe. Detailed below. +* `target_parameters` - (Optional) Parameters to configure a target for your pipe. Detailed below. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### enrichment_parameters Configuration Block + +You can find out more about EventBridge Pipes Enrichment in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/pipes-enrichment.html). + +* `input_template` - (Optional) Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters. +* `http_parameters` - (Optional) Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below. + +#### enrichment_parameters.http_parameters Configuration Block + +* `header_parameters` - (Optional) Key-value mapping of the headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination. +* `path_parameter_values` - (Optional) The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*"). +* `query_string_parameters` - (Optional) Key-value mapping of the query strings that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination. + +### source_parameters Configuration Block + +You can find out more about EventBridge Pipes Sources in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes-event-source.html). + +* `activemq_broker_parameters` - (Optional) The parameters for using an Active MQ broker as a source. Detailed below. +* `dynamodb_stream_parameters` - (Optional) The parameters for using a DynamoDB stream as a source. Detailed below. +* `filter_criteria` - (Optional) The collection of event patterns used to [filter events](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes-event-filtering.html). Detailed below. +* `kinesis_stream_parameters` - (Optional) The parameters for using a Kinesis stream as a source. Detailed below. +* `managed_streaming_kafka_parameters` - (Optional) The parameters for using an MSK stream as a source. Detailed below. +* `rabbitmq_broker_parameters` - (Optional) The parameters for using a Rabbit MQ broker as a source. Detailed below. +* `self_managed_kafka_parameters` - (Optional) The parameters for using a self-managed Apache Kafka stream as a source. Detailed below. +* `sqs_queue_parameters` - (Optional) The parameters for using a Amazon SQS stream as a source. Detailed below. + +#### source_parameters.filter_criteria Configuration Block + +* `filter` - (Optional) An array of up to 5 event patterns. Detailed below. + +##### source_parameters.filter_criteria.filter Configuration Block + +* `pattern` - (Required) The event pattern. At most 4096 characters. + +#### source_parameters.activemq_broker_parameters Configuration Block + +* `batch_size` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `credentials` - (Required) The credentials needed to access the resource. Detailed below. +* `maximum_batching_window_in_seconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `queue_name` - (Required) The name of the destination queue to consume. Maximum length of 1000. + +##### source_parameters.activemq_broker_parameters.credentials Configuration Block + +* `basic_auth` - (Required) The ARN of the Secrets Manager secret containing the basic auth credentials. + +#### source_parameters.dynamodb_stream_parameters Configuration Block + +* `batch_size` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `dead_letter_config` - (Optional) Define the target queue to send dead-letter queue events to. Detailed below. +* `maximum_batching_window_in_seconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `maximum_record_age_in_seconds` - (Optional) Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800. +* `maximum_retry_attempts` - (Optional) Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000. +* `on_partial_batch_item_failure` - (Optional) Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT. +* `parallelization_factor` - (Optional)The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10. +* `starting_position` - (Optional) The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST. + +##### source_parameters.dynamodb_stream_parameters.dead_letter_config Configuration Block + +* `arn` - (Optional) The ARN of the Amazon SQS queue specified as the target for the dead-letter queue. + +#### source_parameters.kinesis_stream_parameters Configuration Block + +* `batch_size` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `dead_letter_config` - (Optional) Define the target queue to send dead-letter queue events to. Detailed below. +* `maximum_batching_window_in_seconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `maximum_record_age_in_seconds` - (Optional) Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800. +* `maximum_retry_attempts` - (Optional) Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000. +* `on_partial_batch_item_failure` - (Optional) Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT. +* `parallelization_factor` - (Optional)The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10. +* `starting_position` - (Required) The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST, AT_TIMESTAMP. +* `starting_position_timestamp` - (Optional) With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds. + +##### source_parameters.kinesis_stream_parameters.dead_letter_config Configuration Block + +* `arn` - (Optional) The ARN of the Amazon SQS queue specified as the target for the dead-letter queue. + +#### source_parameters.managed_streaming_kafka_parameters Configuration Block + +* `batch_size` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `consumer_group_id` - (Optional) The name of the destination queue to consume. Maximum value of 200. +* `credentials` - (Optional) The credentials needed to access the resource. Detailed below. +* `maximum_batching_window_in_seconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `starting_position` - (Optional) The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST. +* `topic_name` - (Required) The name of the topic that the pipe will read from. Maximum length of 249. + +##### source_parameters.managed_streaming_kafka_parameters.credentials Configuration Block + +* `client_certificate_tls_auth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. +* `sasl_scram_512_auth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. + +#### source_parameters.rabbitmq_broker_parameters Configuration Block + +* `batch_size` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `credentials` - (Required) The credentials needed to access the resource. Detailed below. +* `maximum_batching_window_in_seconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `queue_name` - (Required) The name of the destination queue to consume. Maximum length of 1000. +* `virtual_host` - (Optional) The name of the virtual host associated with the source broker. Maximum length of 200. + +##### source_parameters.rabbitmq_broker_parameters.credentials Configuration Block + +* `basic_auth` - (Required) The ARN of the Secrets Manager secret containing the credentials. + +#### source_parameters.self_managed_kafka_parameters Configuration Block + +* `additional_bootstrap_servers` - (Optional) An array of server URLs. Maximum number of 2 items, each of maximum length 300. +* `batch_size` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `consumer_group_id` - (Optional) The name of the destination queue to consume. Maximum value of 200. +* `credentials` - (Optional) The credentials needed to access the resource. Detailed below. +* `maximum_batching_window_in_seconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `server_root_ca_certificate` - (Optional) The ARN of the Secrets Manager secret used for certification. +* `starting_position` - (Optional) The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST. +* `topic_name` - (Required) The name of the topic that the pipe will read from. Maximum length of 249. +* `vpc` - (Optional) This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below. + +##### source_parameters.self_managed_kafka_parameters.credentials Configuration Block + +* `basic_auth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. +* `client_certificate_tls_auth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. +* `sasl_scram_256_auth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. +* `sasl_scram_512_auth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. + +##### source_parameters.self_managed_kafka_parameters.vpc Configuration Block + +* `security_groups` - (Optional) List of security groups associated with the stream. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used. +* `subnets` - (Optional) List of the subnets associated with the stream. These subnets must all be in the same VPC. You can specify as many as 16 subnets. + +#### source_parameters.sqs_queue_parameters Configuration Block + +* `batch_size` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `maximum_batching_window_in_seconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. + +### target_parameters Configuration Block + +You can find out more about EventBridge Pipes Targets in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes-event-target.html). + +* `batch_job_parameters` - (Optional) The parameters for using an AWS Batch job as a target. Detailed below. +* `cloudwatch_logs_parameters` - (Optional) The parameters for using an CloudWatch Logs log stream as a target. Detailed below. +* `ecs_task_parameters` - (Optional) The parameters for using an Amazon ECS task as a target. Detailed below. +* `eventbridge_event_bus_parameters` - (Optional) The parameters for using an EventBridge event bus as a target. Detailed below. +* `http_parameters` - (Optional) These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below. +* `input_template` - (Optional) Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters. +* `kinesis_stream_parameters` - (Optional) The parameters for using a Kinesis stream as a source. Detailed below. +* `lambda_function_parameters` - (Optional) The parameters for using a Lambda function as a target. Detailed below. +* `redshift_data_parameters` - (Optional) These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below. +* `sagemaker_pipeline_parameters` - (Optional) The parameters for using a SageMaker pipeline as a target. Detailed below. +* `sqs_queue_parameters` - (Optional) The parameters for using a Amazon SQS stream as a target. Detailed below. +* `step_function_state_machine_parameters` - (Optional) The parameters for using a Step Functions state machine as a target. Detailed below. + +#### target_parameters.batch_job_parameters Configuration Block + +* `array_properties` - (Optional) The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below. +* `container_overrides` - (Optional) The overrides that are sent to a container. Detailed below. +* `depends_on` - (Optional) A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below. +* `job_definition` - (Required) The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used. +* `job_name` - (Required) The name of the job. It can be up to 128 letters long. +* `parameters` - (Optional) Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below. +* `retry_strategy` - (Optional) The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below. + +##### target_parameters.batch_job_parameters.array_properties Configuration Block + +* `size` - (Optional) The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000. + +##### target_parameters.batch_job_parameters.container_overrides Configuration Block + +* `command` - (Optional) List of commands to send to the container that overrides the default command from the Docker image or the task definition. +* `environment` - (Optional) The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. Environment variables cannot start with " AWS Batch ". This naming convention is reserved for variables that AWS Batch sets. Detailed below. +* `instance_type` - (Optional) The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided. +* `resource_requirement` - (Optional) The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU, MEMORY, and VCPU. Detailed below. + +###### target_parameters.batch_job_parameters.container_overrides.environment Configuration Block + +* `name` - (Optional) The name of the key-value pair. For environment variables, this is the name of the environment variable. +* `value` - (Optional) The value of the key-value pair. For environment variables, this is the value of the environment variable. + +###### target_parameters.batch_job_parameters.container_overrides.resource_requirement Configuration Block + +* `type` - (Optional) The type of resource to assign to a container. The supported resources include GPU, MEMORY, and VCPU. +* `value` - (Optional) The quantity of the specified resource to reserve for the container. [The values vary based on the type specified](https://docs.aws.amazon.com/eventbridge/latest/pipes-reference/API_BatchResourceRequirement.html). + +##### target_parameters.batch_job_parameters.depends_on Configuration Block + +* `job_id` - (Optional) The job ID of the AWS Batch job that's associated with this dependency. +* `type` - (Optional) The type of the job dependency. Valid Values: N_TO_N, SEQUENTIAL. + +##### target_parameters.batch_job_parameters.retry_strategy Configuration Block + +* `attempts` - (Optional) The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10. + +#### target_parameters.cloudwatch_logs_parameters Configuration Block + +* `log_stream_name` - (Optional) The name of the log stream. +* `timestamp` - (Optional) The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp + +#### target_parameters.ecs_task_parameters Configuration Block + +* `capacity_provider_strategy` - (Optional) List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below. +* `enable_ecs_managed_tags` - (Optional) Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false. +* `enable_execute_command` - (Optional) Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false. +* `group` - (Optional) Specifies an Amazon ECS task group for the task. The maximum length is 255 characters. +* `launch_type` - (Optional) Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL +* `network_configuration` - (Optional) Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below. +* `overrides` - (Optional) The overrides that are associated with a task. Detailed below. +* `placement_constraint` - (Optional) An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below. +* `placement_strategy` - (Optional) The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below. +* `platform_version` - (Optional) Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE. +* `propagate_tags` - (Optional) Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION +* `reference_id` - (Optional) The reference ID to use for the task. Maximum length of 1,024. +* `tags` - (Optional) Key-value map of tags that you apply to the task to help you categorize and organize them. +* `task_count` - (Optional) The number of tasks to create based on TaskDefinition. The default is 1. +* `task_definition_arn` - (Optional) The ARN of the task definition to use if the event target is an Amazon ECS task. + +##### target_parameters.ecs_task_parameters.capacity_provider_strategy Configuration Block + +* `base` - (Optional) The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000. +* `capacity_provider` - (Optional) The short name of the capacity provider. Maximum value of 255. +* `weight` - (Optional) The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000. + +##### target_parameters.ecs_task_parameters.network_configuration Configuration Block + +* `aws_vpc_configuration` - (Optional) Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below. + +###### target_parameters.ecs_task_parameters.network_configuration.aws_vpc_configuration Configuration Block + +* `assign_public_ip` - (Optional) Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED. +* `security_groups` - (Optional) Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used. +* `subnets` - (Optional) Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets. + +##### target_parameters.ecs_task_parameters.overrides Configuration Block + +* `container_override` - (Optional) One or more container overrides that are sent to a task. Detailed below. +* `cpu` - (Optional) The cpu override for the task. +* `ephemeral_storage` - (Optional) The ephemeral storage setting override for the task. Detailed below. +* `execution_role_arn` - (Optional) The Amazon Resource Name (ARN) of the task execution IAM role override for the task. +* `inference_accelerator_override` - (Optional) List of Elastic Inference accelerator overrides for the task. Detailed below. +* `memory` - (Optional) The memory override for the task. +* `task_role_arn` - (Optional) The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. + +###### target_parameters.ecs_task_parameters.overrides.container_override Configuration Block + +* `command` - (Optional) List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name. +* `cpu` - (Optional) The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name. +* `environment` - (Optional) The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below. +* `environment_file` - (Optional) A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below. +* `memory` - (Optional) The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name. +* `memory_reservation` - (Optional) The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name. +* `name` - (Optional) The name of the container that receives the override. This parameter is required if any override is specified. +* `resource_requirement` - (Optional) The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below. + +###### target_parameters.ecs_task_parameters.overrides.container_override.environment Configuration Block + +* `name` - (Optional) The name of the key-value pair. For environment variables, this is the name of the environment variable. +* `value` - (Optional) The value of the key-value pair. For environment variables, this is the value of the environment variable. + +###### target_parameters.ecs_task_parameters.overrides.container_override.environment_file Configuration Block + +* `type` - (Optional) The file type to use. The only supported value is s3. +* `value` - (Optional) The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file. + +###### target_parameters.ecs_task_parameters.overrides.container_override.resource_requirement Configuration Block + +* `type` - (Optional) The type of resource to assign to a container. The supported values are GPU or InferenceAccelerator. +* `value` - (Optional) The value for the specified resource type. If the GPU type is used, the value is the number of physical GPUs the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on. If the InferenceAccelerator type is used, the value matches the deviceName for an InferenceAccelerator specified in a task definition. + +###### target_parameters.ecs_task_parameters.overrides.ephemeral_storage Configuration Block + +* `size_in_gib` - (Required) The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB. + +###### target_parameters.ecs_task_parameters.overrides.inference_accelerator_override Configuration Block + +* `device_name` - (Optional) The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition. +* `device_type` - (Optional) The Elastic Inference accelerator type to use. + +##### target_parameters.ecs_task_parameters.placement_constraint Configuration Block + +* `expression` - (Optional) A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000. +* `type` - (Optional) The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates. Valid Values: distinctInstance, memberOf. + +##### target_parameters.ecs_task_parameters.placement_strategy Configuration Block + +* `field` - (Optional) The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255. +* `type` - (Optional) The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack. + +#### target_parameters.eventbridge_event_bus_parameters Configuration Block + +* `detail_type` - (Optional) A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail. +* `endpoint_id` - (Optional) The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo. +* `resources` - (Optional) List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present. +* `source` - (Optional) The source of the event. Maximum length of 256. +* `time` - (Optional) The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp + +#### target_parameters.http_parameters Configuration Block + +* `header_parameters` - (Optional) Key-value mapping of the headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination. Detailed below. +* `path_parameter_values` - (Optional) The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*"). +* `query_string_parameters` - (Optional) Key-value mapping of the query strings that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination. Detailed below. + +#### target_parameters.kinesis_stream_parameters Configuration Block + +* `partition_key` - (Required) Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream. + +#### target_parameters.lambda_function_parameters Configuration Block + +* `invocation_type` - (Optional) Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET. + +#### target_parameters.redshift_data_parameters Configuration Block + +* `database` - (Required) The name of the database. Required when authenticating using temporary credentials. +* `db_user` - (Optional) The database user name. Required when authenticating using temporary credentials. +* `secret_manager_arn` - (Optional) The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager. +* `sqls` - (Optional) List of SQL statements text to run, each of maximum length of 100,000. +* `statement_name` - (Optional) The name of the SQL statement. You can name the SQL statement when you create it to identify the query. +* `with_event` - (Optional) Indicates whether to send an event back to EventBridge after the SQL statement runs. + +#### target_parameters.sagemaker_pipeline_parameters Configuration Block + +* `pipeline_parameter` - (Optional) List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below. + +##### target_parameters.sagemaker_pipeline_parameters.parameters Configuration Block + +* `name` - (Optional) Name of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 256. +* `value` - (Optional) Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024. + +#### target_parameters.sqs_queue_parameters Configuration Block + +* `message_deduplication_id` - (Optional) This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages. +* `message_group_id` - (Optional) The FIFO message group ID to use as the target. + +#### target_parameters.step_function_state_machine_parameters Configuration Block + +* `invocation_type` - (Optional) Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of this pipe. +* `id` - Same as `name`. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import pipes using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import pipes using the `name`. For example: + +```console +% terraform import aws_pipes_pipe.example my-pipe +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/placement_group.html.markdown b/website/docs/cdktf/python/r/placement_group.html.markdown new file mode 100644 index 00000000000..cbf3e401dbe --- /dev/null +++ b/website/docs/cdktf/python/r/placement_group.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_placement_group" +description: |- + Provides an EC2 placement group. +--- + + + +# Resource: aws_placement_group + +Provides an EC2 placement group. Read more about placement groups +in [AWS Docs](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/placement-groups.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.placement_group import PlacementGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + PlacementGroup(self, "web", + name="hunky-dory-pg", + strategy="cluster" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the placement group. +* `partition_count` - (Optional) The number of partitions to create in the + placement group. Can only be specified when the `strategy` is set to + `partition`. Valid values are 1 - 7 (default is `2`). +* `spread_level` - (Optional) Determines how placement groups spread instances. Can only be used + when the `strategy` is set to `spread`. Can be `host` or `rack`. `host` can only be used for Outpost placement groups. Defaults to `rack`. +* `strategy` - (Required) The placement strategy. Can be `cluster`, `partition` or `spread`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the placement group. +* `id` - The name of the placement group. +* `placement_group_id` - The ID of the placement group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import placement groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import placement groups using the `name`. For example: + +```console +% terraform import aws_placement_group.prod_pg production-placement-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/prometheus_alert_manager_definition.html.markdown b/website/docs/cdktf/python/r/prometheus_alert_manager_definition.html.markdown new file mode 100644 index 00000000000..08588dcfae8 --- /dev/null +++ b/website/docs/cdktf/python/r/prometheus_alert_manager_definition.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "AMP (Managed Prometheus)" +layout: "aws" +page_title: "AWS: aws_prometheus_alert_manager_definition" +description: |- + Manages an Amazon Managed Service for Prometheus (AMP) Alert Manager Definition +--- + + + +# Resource: aws_prometheus_alert_manager_definition + +Manages an Amazon Managed Service for Prometheus (AMP) Alert Manager Definition + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.prometheus_alert_manager_definition import PrometheusAlertManagerDefinition +from imports.aws.prometheus_workspace import PrometheusWorkspace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + demo = PrometheusWorkspace(self, "demo") + aws_prometheus_alert_manager_definition_demo = + PrometheusAlertManagerDefinition(self, "demo_1", + definition="alertmanager_config: |\n route:\n receiver: 'default'\n receivers:\n - name: 'default'\n\n", + workspace_id=demo.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_prometheus_alert_manager_definition_demo.override_logical_id("demo") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `workspace_id` - (Required) ID of the prometheus workspace the alert manager definition should be linked to +* `definition` - (Required) the alert manager definition that you want to be applied. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-alert-manager.html). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the prometheus alert manager definition using the workspace identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the prometheus alert manager definition using the workspace identifier. For example: + +```console +% terraform import aws_prometheus_alert_manager_definition.demo ws-C6DCB907-F2D7-4D96-957B-66691F865D8B +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/prometheus_rule_group_namespace.html.markdown b/website/docs/cdktf/python/r/prometheus_rule_group_namespace.html.markdown new file mode 100644 index 00000000000..a90eb2dbf02 --- /dev/null +++ b/website/docs/cdktf/python/r/prometheus_rule_group_namespace.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "AMP (Managed Prometheus)" +layout: "aws" +page_title: "AWS: aws_prometheus_rule_group_namespace" +description: |- + Manages an Amazon Managed Service for Prometheus (AMP) Rule Group Namespace +--- + + + +# Resource: aws_prometheus_rule_group_namespace + +Manages an Amazon Managed Service for Prometheus (AMP) Rule Group Namespace + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.prometheus_rule_group_namespace import PrometheusRuleGroupNamespace +from imports.aws.prometheus_workspace import PrometheusWorkspace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + demo = PrometheusWorkspace(self, "demo") + aws_prometheus_rule_group_namespace_demo = + PrometheusRuleGroupNamespace(self, "demo_1", + data="groups:\n - name: test\n rules:\n - record: metric:recording_rule\n expr: avg(rate(container_cpu_usage_seconds_total[5m]))\n\n", + name="rules", + workspace_id=demo.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_prometheus_rule_group_namespace_demo.override_logical_id("demo") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the rule group namespace +* `workspace_id` - (Required) ID of the prometheus workspace the rule group namespace should be linked to +* `data` - (Required) the rule group namespace data that you want to be applied. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-Ruler.html). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the prometheus rule group namespace using the arn. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import the prometheus rule group namespace using the arn. For example: + +```console +% terraform import aws_prometheus_rule_group_namespace.demo arn:aws:aps:us-west-2:123456789012:rulegroupsnamespace/IDstring/namespace_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/prometheus_workspace.html.markdown b/website/docs/cdktf/python/r/prometheus_workspace.html.markdown new file mode 100644 index 00000000000..4ef0d5f4a3b --- /dev/null +++ b/website/docs/cdktf/python/r/prometheus_workspace.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "AMP (Managed Prometheus)" +layout: "aws" +page_title: "AWS: aws_prometheus_workspace" +description: |- + Manages an Amazon Managed Service for Prometheus (AMP) Workspace +--- + + + +# Resource: aws_prometheus_workspace + +Manages an Amazon Managed Service for Prometheus (AMP) Workspace. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.prometheus_workspace import PrometheusWorkspace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + PrometheusWorkspace(self, "example", + alias="example", + tags={ + "Environment": "production" + } + ) +``` + +### CloudWatch Logging + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.prometheus_workspace import PrometheusWorkspace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="example" + ) + aws_prometheus_workspace_example = PrometheusWorkspace(self, "example_1", + logging_configuration=PrometheusWorkspaceLoggingConfiguration( + log_group_arn="${" + example.arn + "}:*" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_prometheus_workspace_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `alias` - (Optional) The alias of the prometheus workspace. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-onboard-create-workspace.html). +* `logging_configuration` - (Optional) Logging configuration for the workspace. See [Logging Configuration](#logging-configuration) below for details. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Logging Configuration + +The `logging_configuration` block supports the following arguments: + +* `log_group_arn` - (Required) The ARN of the CloudWatch log group to which the vended log data will be published. This log group must exist. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the workspace. +* `id` - Identifier of the workspace +* `prometheus_endpoint` - Prometheus endpoint available for this workspace. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AMP Workspaces using the identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AMP Workspaces using the identifier. For example: + +```console +% terraform import aws_prometheus_workspace.demo ws-C6DCB907-F2D7-4D96-957B-66691F865D8B +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/proxy_protocol_policy.html.markdown b/website/docs/cdktf/python/r/proxy_protocol_policy.html.markdown new file mode 100644 index 00000000000..e5b5de7bf90 --- /dev/null +++ b/website/docs/cdktf/python/r/proxy_protocol_policy.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_proxy_protocol_policy" +description: |- + Provides a proxy protocol policy, which allows an ELB to carry a client connection information to a backend. +--- + + + +# Resource: aws_proxy_protocol_policy + +Provides a proxy protocol policy, which allows an ELB to carry a client connection information to a backend. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elb import Elb +from imports.aws.proxy_protocol_policy import ProxyProtocolPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + lb = Elb(self, "lb", + availability_zones=["us-east-1a"], + listener=[ElbListener( + instance_port=25, + instance_protocol="tcp", + lb_port=25, + lb_protocol="tcp" + ), ElbListener( + instance_port=587, + instance_protocol="tcp", + lb_port=587, + lb_protocol="tcp" + ) + ], + name="test-lb" + ) + ProxyProtocolPolicy(self, "smtp", + instance_ports=["25", "587"], + load_balancer=lb.name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `load_balancer` - (Required) The load balancer to which the policy + should be attached. +* `instance_ports` - (Required) List of instance ports to which the policy + should be applied. This can be specified if the protocol is SSL or TCP. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `load_balancer` - The load balancer to which the policy is attached. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/qldb_ledger.html.markdown b/website/docs/cdktf/python/r/qldb_ledger.html.markdown new file mode 100644 index 00000000000..4097b8a4f90 --- /dev/null +++ b/website/docs/cdktf/python/r/qldb_ledger.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "QLDB (Quantum Ledger Database)" +layout: "aws" +page_title: "AWS: aws_qldb_ledger" +description: |- + Provides an QLDB Resource resource. +--- + + + +# Resource: aws_qldb_ledger + +Provides an AWS Quantum Ledger Database (QLDB) resource + +~> **NOTE:** Deletion protection is enabled by default. To successfully delete this resource via Terraform, `deletion_protection = false` must be applied before attempting deletion. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.qldb_ledger import QldbLedger +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QldbLedger(self, "sample-ledger", + name="sample-ledger", + permissions_mode="STANDARD" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deletion_protection` - (Optional) The deletion protection for the QLDB Ledger instance. By default it is `true`. To delete this resource via Terraform, this value must be configured to `false` and applied first before attempting deletion. +* `kms_key` - (Optional) The key in AWS Key Management Service (AWS KMS) to use for encryption of data at rest in the ledger. For more information, see the [AWS documentation](https://docs.aws.amazon.com/qldb/latest/developerguide/encryption-at-rest.html). Valid values are `"AWS_OWNED_KMS_KEY"` to use an AWS KMS key that is owned and managed by AWS on your behalf, or the ARN of a valid symmetric customer managed KMS key. +* `name` - (Optional) The friendly name for the QLDB Ledger instance. By default generated by Terraform. +* `permissions_mode` - (Required) The permissions mode for the QLDB ledger instance. Specify either `ALLOW_ALL` or `STANDARD`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Name of the QLDB Ledger +* `arn` - The ARN of the QLDB Ledger +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QLDB Ledgers using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import QLDB Ledgers using the `name`. For example: + +```console +% terraform import aws_qldb_ledger.sample-ledger sample-ledger +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/qldb_stream.html.markdown b/website/docs/cdktf/python/r/qldb_stream.html.markdown new file mode 100644 index 00000000000..fdd9225e859 --- /dev/null +++ b/website/docs/cdktf/python/r/qldb_stream.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "QLDB (Quantum Ledger Database)" +layout: "aws" +page_title: "AWS: aws_qldb_stream" +description: |- + Provides a QLDB Stream resource. +--- + + + +# Resource: aws_qldb_stream + +Provides an AWS Quantum Ledger Database (QLDB) Stream resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.qldb_stream import QldbStream +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QldbStream(self, "example", + inclusive_start_time="2021-01-01T00:00:00Z", + kinesis_configuration=QldbStreamKinesisConfiguration( + aggregation_enabled=False, + stream_arn="arn:aws:kinesis:us-east-1:xxxxxxxxxxxx:stream/example-kinesis-stream" + ), + ledger_name="existing-ledger-name", + role_arn="sample-role-arn", + stream_name="sample-ledger-stream", + tags={ + "example": "tag" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `exclusive_end_time` - (Optional) The exclusive date and time that specifies when the stream ends. If you don't define this parameter, the stream runs indefinitely until you cancel it. It must be in ISO 8601 date and time format and in Universal Coordinated Time (UTC). For example: `"2019-06-13T21:36:34Z"`. +* `inclusive_start_time` - (Required) The inclusive start date and time from which to start streaming journal data. This parameter must be in ISO 8601 date and time format and in Universal Coordinated Time (UTC). For example: `"2019-06-13T21:36:34Z"`. This cannot be in the future and must be before `exclusive_end_time`. If you provide a value that is before the ledger's `CreationDateTime`, QLDB effectively defaults it to the ledger's `CreationDateTime`. +* `kinesis_configuration` - (Required) The configuration settings of the Kinesis Data Streams destination for your stream request. Documented below. +* `ledger_name` - (Required) The name of the QLDB ledger. +* `role_arn` - (Required) The Amazon Resource Name (ARN) of the IAM role that grants QLDB permissions for a journal stream to write data records to a Kinesis Data Streams resource. +* `stream_name` - (Required) The name that you want to assign to the QLDB journal stream. User-defined names can help identify and indicate the purpose of a stream. Your stream name must be unique among other active streams for a given ledger. Stream names have the same naming constraints as ledger names, as defined in the [Amazon QLDB Developer Guide](https://docs.aws.amazon.com/qldb/latest/developerguide/limits.html#limits.naming). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### kinesis_configuration + +The `kinesis_configuration` block supports the following arguments: + +* `aggregation_enabled` - (Optional) Enables QLDB to publish multiple data records in a single Kinesis Data Streams record, increasing the number of records sent per API call. Default: `true`. +* `stream_arn` - (Required) The Amazon Resource Name (ARN) of the Kinesis Data Streams resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the QLDB Stream. +* `arn` - The ARN of the QLDB Stream. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `8m`) +- `delete` - (Default `5m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_account_subscription.html.markdown b/website/docs/cdktf/python/r/quicksight_account_subscription.html.markdown new file mode 100644 index 00000000000..db909c3ea76 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_account_subscription.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_account_subscription" +description: |- + Terraform resource for managing an AWS QuickSight Account Subscription. +--- + + + +# Resource: aws_quicksight_account_subscription + +Terraform resource for managing an AWS QuickSight Account Subscription. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_account_subscription import QuicksightAccountSubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightAccountSubscription(self, "subscription", + account_name="quicksight-terraform", + authentication_method="IAM_AND_QUICKSIGHT", + edition="ENTERPRISE", + notification_email="notification@email.com" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `account_name` - (Required) Name of your Amazon QuickSight account. This name is unique over all of AWS, and it appears only when users sign in. +* `authentication_method` - (Required) Method that you want to use to authenticate your Amazon QuickSight account. Currently, the valid values for this parameter are `IAM_AND_QUICKSIGHT`, `IAM_ONLY`, and `ACTIVE_DIRECTORY`. +* `edition` - (Required) Edition of Amazon QuickSight that you want your account to have. Currently, you can choose from `STANDARD`, `ENTERPRISE` or `ENTERPRISE_AND_Q`. +* `notification_email` - (Required) Email address that you want Amazon QuickSight to send notifications to regarding your Amazon QuickSight account or Amazon QuickSight subscription. + +The following arguments are optional: + +* `active_directory_name` - (Optional) Name of your Active Directory. This field is required if `ACTIVE_DIRECTORY` is the selected authentication method of the new Amazon QuickSight account. +* `admin_group` - (Optional) Admin group associated with your Active Directory. This field is required if `ACTIVE_DIRECTORY` is the selected authentication method of the new Amazon QuickSight account. +* `author_group` - (Optional) Author group associated with your Active Directory. +* `aws_account_id` - (Optional) AWS account ID hosting the QuickSight account. Default to provider account. +* `contact_number` - (Optional) A 10-digit phone number for the author of the Amazon QuickSight account to use for future communications. This field is required if `ENTERPPRISE_AND_Q` is the selected edition of the new Amazon QuickSight account. +* `directory_id` - (Optional) Active Directory ID that is associated with your Amazon QuickSight account. +* `email_address` - (Optional) Email address of the author of the Amazon QuickSight account to use for future communications. This field is required if `ENTERPPRISE_AND_Q` is the selected edition of the new Amazon QuickSight account. +* `first_name` - (Optional) First name of the author of the Amazon QuickSight account to use for future communications. This field is required if `ENTERPPRISE_AND_Q` is the selected edition of the new Amazon QuickSight account. +* `last_name` - (Optional) Last name of the author of the Amazon QuickSight account to use for future communications. This field is required if `ENTERPPRISE_AND_Q` is the selected edition of the new Amazon QuickSight account. +* `reader_group` - (Optional) Reader group associated with your Active Direcrtory. +* `realm` - (Optional) Realm of the Active Directory that is associated with your Amazon QuickSight account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `account_subscription_status` - Status of the Amazon QuickSight account's subscription. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) +* `delete` - (Default `10m`) + +## Import + +You cannot import this resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_analysis.html.markdown b/website/docs/cdktf/python/r/quicksight_analysis.html.markdown new file mode 100644 index 00000000000..8e353b5ffa5 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_analysis.html.markdown @@ -0,0 +1,209 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_analysis" +description: |- + Manages a QuickSight Analysis. +--- + + + +# Resource: aws_quicksight_analysis + +Resource for managing a QuickSight Analysis. + +## Example Usage + +### From Source Template + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_analysis import QuicksightAnalysis +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightAnalysis(self, "example", + analysis_id="example-id", + name="example-name", + source_entity=QuicksightAnalysisSourceEntity( + source_template=QuicksightAnalysisSourceEntitySourceTemplate( + arn=source.arn, + data_set_references=[QuicksightAnalysisSourceEntitySourceTemplateDataSetReferences( + data_set_arn=dataset.arn, + data_set_placeholder="1" + ) + ] + ) + ) + ) +``` + +### With Definition + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_analysis import QuicksightAnalysis +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightAnalysis(self, "example", + analysis_id="example-id", + definition={ + "data_set_identifiers_declarations": [{ + "data_set_arn": dataset.arn, + "identifier": "1" + } + ], + "sheets": [{ + "sheet_id": "Example1", + "title": "Example", + "visuals": [{ + "line_chart_visual": { + "chart_configuration": { + "field_wells": { + "line_chart_aggregated_field_wells": { + "category": [{ + "categorical_dimension_field": { + "column": { + "column_name": "Column1", + "data_set_identifier": "1" + }, + "field_id": "1" + } + } + ], + "values": [{ + "categorical_measure_field": { + "aggregation_function": "COUNT", + "column": { + "column_name": "Column1", + "data_set_identifier": "1" + }, + "field_id": "2" + } + } + ] + } + } + }, + "title": { + "format_text": { + "plain_text": "Line Chart Example" + } + }, + "visual_id": "LineChart" + } + } + ] + } + ] + }, + name="example-name" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `analysis_id` - (Required, Forces new resource) Identifier for the analysis. +* `name` - (Required) Display name for the analysis. + +The following arguments are optional: + +* `aws_account_id` - (Optional, Forces new resource) AWS account ID. +* `definition` - (Optional) A detailed analysis definition. Only one of `definition` or `source_entity` should be configured. See [definition](#definition). +* `parameters` - (Optional) The parameters for the creation of the analysis, which you want to use to override the default settings. An analysis can have any type of parameters, and some parameters might accept multiple values. See [parameters](#parameters). +* `permissions` - (Optional) A set of resource permissions on the analysis. Maximum of 64 items. See [permissions](#permissions). +* `recovery_window_in_days` - (Optional) A value that specifies the number of days that Amazon QuickSight waits before it deletes the analysis. Use `0` to force deletion without recovery. Minimum value of `7`. Maximum value of `30`. Default to `30`. +* `source_entity` - (Optional) The entity that you are using as a source when you create the analysis (template). Only one of `definition` or `source_entity` should be configured. See [source_entity](#source_entity). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `theme_arn` - (Optional) The Amazon Resource Name (ARN) of the theme that is being used for this analysis. The theme ARN must exist in the same AWS account where you create the analysis. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### source_entity + +* `source_template` - (Optional) The source template. See [source_template](#source_template). + +### source_template + +* `arn` - (Required) The Amazon Resource Name (ARN) of the resource. +* `data_set_references` - (Required) List of dataset references. See [data_set_references](#data_set_references). + +### data_set_references + +* `data_set_arn` - (Required) Dataset Amazon Resource Name (ARN). +* `data_set_placeholder` - (Required) Dataset placeholder. + +### parameters + +* `date_time_parameters` - (Optional) A list of parameters that have a data type of date-time. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DateTimeParameter.html). +* `decimal_parameters` - (Optional) A list of parameters that have a data type of decimal. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DecimalParameter.html). +* `integer_parameters` - (Optional) A list of parameters that have a data type of integer. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_IntegerParameter.html). +* `string_parameters` - (Optional) A list of parameters that have a data type of string. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_StringParameter.html). + +### definition + +* `data_set_identifiers_declarations` - (Required) A list dataset identifier declarations. With this mapping,you can use dataset identifiers instead of dataset Amazon Resource Names (ARNs) throughout the analysis sub-structures. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DataSetIdentifierDeclaration.html). +* `analysis_defaults` - (Optional) The configuration for default analysis settings. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_AnalysisDefaults.html). +* `calculated_fields` - (Optional) A list of calculated field definitions for the analysis. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_CalculatedField.html). +* `column_configurations` - (Optional) A list of analysis-level column configurations. Column configurations are used to set default formatting for a column that's used throughout an analysis. See [AWS API Documentation for complete description](ttps://docs.aws.amazon.com/quicksight/latest/APIReference/API_ColumnConfiguration.html). +* `filter_groups` - (Optional) A list of filter definitions for an analysis. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_FilterGroup.html). For more information, see [Filtering Data](https://docs.aws.amazon.com/quicksight/latest/user/filtering-visual-data.html) in Amazon QuickSight User Guide. +* `parameters_declarations` - (Optional) A list of parameter declarations for an analysis. Parameters are named variables that can transfer a value for use by an action or an object. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ParameterDeclaration.html). For more information, see [Parameters in Amazon QuickSight](https://docs.aws.amazon.com/quicksight/latest/user/parameters-in-quicksight.html) in the Amazon QuickSight User Guide. +* `sheets` - (Optional) A list of sheet definitions for an analysis. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_SheetDefinition.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the analysis. +* `created_time` - The time that the analysis was created. +* `id` - A comma-delimited string joining AWS account ID and analysis ID. +* `last_updated_time` - The time that the analysis was last updated. +* `status` - The analysis creation status. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Analysis using the AWS account ID and analysis ID separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a QuickSight Analysis using the AWS account ID and analysis ID separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_analysis.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_dashboard.html.markdown b/website/docs/cdktf/python/r/quicksight_dashboard.html.markdown new file mode 100644 index 00000000000..ce493459717 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_dashboard.html.markdown @@ -0,0 +1,267 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_dashboard" +description: |- + Manages a QuickSight Dashboard. +--- + + + +# Resource: aws_quicksight_dashboard + +Resource for managing a QuickSight Dashboard. + +## Example Usage + +### From Source Template + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_dashboard import QuicksightDashboard +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightDashboard(self, "example", + dashboard_id="example-id", + name="example-name", + source_entity=QuicksightDashboardSourceEntity( + source_template=QuicksightDashboardSourceEntitySourceTemplate( + arn=source.arn, + data_set_references=[QuicksightDashboardSourceEntitySourceTemplateDataSetReferences( + data_set_arn=dataset.arn, + data_set_placeholder="1" + ) + ] + ) + ), + version_description="version" + ) +``` + +### With Definition + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_dashboard import QuicksightDashboard +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightDashboard(self, "example", + dashboard_id="example-id", + definition={ + "data_set_identifiers_declarations": [{ + "data_set_arn": dataset.arn, + "identifier": "1" + } + ], + "sheets": [{ + "sheet_id": "Example1", + "title": "Example", + "visuals": [{ + "line_chart_visual": { + "chart_configuration": { + "field_wells": { + "line_chart_aggregated_field_wells": { + "category": [{ + "categorical_dimension_field": { + "column": { + "column_name": "Column1", + "data_set_identifier": "1" + }, + "field_id": "1" + } + } + ], + "values": [{ + "categorical_measure_field": { + "aggregation_function": "COUNT", + "column": { + "column_name": "Column1", + "data_set_identifier": "1" + }, + "field_id": "2" + } + } + ] + } + } + }, + "title": { + "format_text": { + "plain_text": "Line Chart Example" + } + }, + "visual_id": "LineChart" + } + } + ] + } + ] + }, + name="example-name", + version_description="version" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `dashboard_id` - (Required, Forces new resource) Identifier for the dashboard. +* `name` - (Required) Display name for the dashboard. +* `version_description` - (Required) A description of the current dashboard version being created/updated. + +The following arguments are optional: + +* `aws_account_id` - (Optional, Forces new resource) AWS account ID. +* `dashboard_publish_options` - (Optional) Options for publishing the dashboard. See [dashboard_publish_options](#dashboard_publish_options). +* `definition` - (Optional) A detailed dashboard definition. Only one of `definition` or `source_entity` should be configured. See [definition](#definition). +* `parameters` - (Optional) The parameters for the creation of the dashboard, which you want to use to override the default settings. A dashboard can have any type of parameters, and some parameters might accept multiple values. See [parameters](#parameters). +* `permissions` - (Optional) A set of resource permissions on the dashboard. Maximum of 64 items. See [permissions](#permissions). +* `source_entity` - (Optional) The entity that you are using as a source when you create the dashboard (template). Only one of `definition` or `source_entity` should be configured. See [source_entity](#source_entity). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `theme_arn` - (Optional) The Amazon Resource Name (ARN) of the theme that is being used for this dashboard. The theme ARN must exist in the same AWS account where you create the dashboard. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### source_entity + +* `source_template` - (Optional) The source template. See [source_template](#source_template). + +### source_template + +* `arn` - (Required) The Amazon Resource Name (ARN) of the resource. +* `data_set_references` - (Required) List of dataset references. See [data_set_references](#data_set_references). + +### data_set_references + +* `data_set_arn` - (Required) Dataset Amazon Resource Name (ARN). +* `data_set_placeholder` - (Required) Dataset placeholder. + +### dashboard_publish_options + +* `ad_hoc_filtering_option` - (Optional) Ad hoc (one-time) filtering option. See [ad_hoc_filtering_option](#ad_hoc_filtering_option). +* `data_point_drill_up_down_option` - (Optional) The drill-down options of data points in a dashboard. See [data_point_drill_up_down_option](#data_point_drill_up_down_option). +* `data_point_menu_label_option` - (Optional) The data point menu label options of a dashboard. See [data_point_menu_label_option](#data_point_menu_label_option). +* `data_point_tooltip_option` - (Optional) The data point tool tip options of a dashboard. See [data_point_tooltip_option](#data_point_tooltip_option). +* `export_to_csv_option` - (Optional) Export to .csv option. See [export_to_csv_option](#export_to_csv_option). +* `export_with_hidden_fields_option` - (Optional) Determines if hidden fields are exported with a dashboard. See [export_with_hidden_fields_option](#export_with_hidden_fields_option). +* `sheet_controls_option` - (Optional) Sheet controls option. See [sheet_controls_option](#sheet_controls_option). +* `sheet_layout_element_maximization_option` - (Optional) The sheet layout maximization options of a dashboard. See [sheet_layout_element_maximization_option](#sheet_layout_element_maximization_option). +* `visual_axis_sort_option` - (Optional) The axis sort options of a dashboard. See [visual_axis_sort_option](#visual_axis_sort_option). +* `visual_menu_option` - (Optional) The menu options of a visual in a dashboard. See [visual_menu_option](#visual_menu_option). + +### ad_hoc_filtering_option + +* `availability_status` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### data_point_drill_up_down_option + +* `availability_status` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### data_point_menu_label_option + +* `availability_status` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### data_point_tooltip_option + +* `availability_status` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### export_to_csv_option + +* `availability_status` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### export_with_hidden_fields_option + +* `availability_status` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### sheet_controls_option + +* `visibility_state` - (Optional) Visibility state. Possibles values: EXPANDED, COLLAPSED. + +### sheet_layout_element_maximization_option + +* `availability_status` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### visual_axis_sort_option + +* `availability_status` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### visual_menu_option + +* `availability_status` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### parameters + +* `date_time_parameters` - (Optional) A list of parameters that have a data type of date-time. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DateTimeParameter.html). +* `decimal_parameters` - (Optional) A list of parameters that have a data type of decimal. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DecimalParameter.html). +* `integer_parameters` - (Optional) A list of parameters that have a data type of integer. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_IntegerParameter.html). +* `string_parameters` - (Optional) A list of parameters that have a data type of string. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_StringParameter.html). + +### definition + +* `data_set_identifiers_declarations` - (Required) A list dataset identifier declarations. With this mapping,you can use dataset identifiers instead of dataset Amazon Resource Names (ARNs) throughout the dashboard's sub-structures. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DataSetIdentifierDeclaration.html). +* `analysis_defaults` - (Optional) The configuration for default analysis settings. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_AnalysisDefaults.html). +* `calculated_fields` - (Optional) A list of calculated field definitions for the dashboard. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_CalculatedField.html). +* `column_configurations` - (Optional) A list of dashboard-level column configurations. Column configurations are used to set default formatting for a column that's used throughout a dashboard. See [AWS API Documentation for complete description](ttps://docs.aws.amazon.com/quicksight/latest/APIReference/API_ColumnConfiguration.html). +* `filter_groups` - (Optional) A list of filter definitions for a dashboard. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_FilterGroup.html). For more information, see [Filtering Data](https://docs.aws.amazon.com/quicksight/latest/user/filtering-visual-data.html) in Amazon QuickSight User Guide. +* `parameters_declarations` - (Optional) A list of parameter declarations for a dashboard. Parameters are named variables that can transfer a value for use by an action or an object. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ParameterDeclaration.html). For more information, see [Parameters in Amazon QuickSight](https://docs.aws.amazon.com/quicksight/latest/user/parameters-in-quicksight.html) in the Amazon QuickSight User Guide. +* `sheets` - (Optional) A list of sheet definitions for a dashboard. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_SheetDefinition.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the dashboard. +* `created_time` - The time that the dashboard was created. +* `id` - A comma-delimited string joining AWS account ID and dashboard ID. +* `last_updated_time` - The time that the dashboard was last updated. +* `source_entity_arn` - Amazon Resource Name (ARN) of a template that was used to create this dashboard. +* `status` - The dashboard creation status. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `version_number` - The version number of the dashboard version. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Dashboard using the AWS account ID and dashboard ID separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a QuickSight Dashboard using the AWS account ID and dashboard ID separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_dashboard.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_data_set.html.markdown b/website/docs/cdktf/python/r/quicksight_data_set.html.markdown new file mode 100644 index 00000000000..cd5aa8d9f65 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_data_set.html.markdown @@ -0,0 +1,479 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_data_set" +description: |- + Manages a Resource QuickSight Data Set. +--- + + + +# Resource: aws_quicksight_data_set + +Resource for managing a QuickSight Data Set. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_data_set import QuicksightDataSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightDataSet(self, "example", + data_set_id="example-id", + import_mode="SPICE", + name="example-name", + physical_table_map=[QuicksightDataSetPhysicalTableMap( + physical_table_map_id="example-id", + s3_source=QuicksightDataSetPhysicalTableMapS3Source( + data_source_arn=Token.as_string(aws_quicksight_data_source_example.arn), + input_columns=[QuicksightDataSetPhysicalTableMapS3SourceInputColumns( + name="Column1", + type="STRING" + ) + ], + upload_settings=QuicksightDataSetPhysicalTableMapS3SourceUploadSettings( + format="JSON" + ) + ) + ) + ] + ) +``` + +### With Column Level Permission Rules + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_data_set import QuicksightDataSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightDataSet(self, "example", + column_level_permission_rules=[QuicksightDataSetColumnLevelPermissionRules( + column_names=["Column1"], + principals=[Token.as_string(aws_quicksight_user_example.arn)] + ) + ], + data_set_id="example-id", + import_mode="SPICE", + name="example-name", + physical_table_map=[QuicksightDataSetPhysicalTableMap( + physical_table_map_id="example-id", + s3_source=QuicksightDataSetPhysicalTableMapS3Source( + data_source_arn=Token.as_string(aws_quicksight_data_source_example.arn), + input_columns=[QuicksightDataSetPhysicalTableMapS3SourceInputColumns( + name="Column1", + type="STRING" + ) + ], + upload_settings=QuicksightDataSetPhysicalTableMapS3SourceUploadSettings( + format="JSON" + ) + ) + ) + ] + ) +``` + +### With Field Folders + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_data_set import QuicksightDataSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightDataSet(self, "example", + data_set_id="example-id", + field_folders=[QuicksightDataSetFieldFolders( + columns=["Column1"], + description="example description", + field_folders_id="example-id" + ) + ], + import_mode="SPICE", + name="example-name", + physical_table_map=[QuicksightDataSetPhysicalTableMap( + physical_table_map_id="example-id", + s3_source=QuicksightDataSetPhysicalTableMapS3Source( + data_source_arn=Token.as_string(aws_quicksight_data_source_example.arn), + input_columns=[QuicksightDataSetPhysicalTableMapS3SourceInputColumns( + name="Column1", + type="STRING" + ) + ], + upload_settings=QuicksightDataSetPhysicalTableMapS3SourceUploadSettings( + format="JSON" + ) + ) + ) + ] + ) +``` + +### With Permissions + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_data_set import QuicksightDataSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightDataSet(self, "example", + data_set_id="example-id", + import_mode="SPICE", + name="example-name", + permissions=[QuicksightDataSetPermissions( + actions=["quicksight:DescribeDataSet", "quicksight:DescribeDataSetPermissions", "quicksight:PassDataSet", "quicksight:DescribeIngestion", "quicksight:ListIngestions" + ], + principal=Token.as_string(aws_quicksight_user_example.arn) + ) + ], + physical_table_map=[QuicksightDataSetPhysicalTableMap( + physical_table_map_id="example-id", + s3_source=QuicksightDataSetPhysicalTableMapS3Source( + data_source_arn=Token.as_string(aws_quicksight_data_source_example.arn), + input_columns=[QuicksightDataSetPhysicalTableMapS3SourceInputColumns( + name="Column1", + type="STRING" + ) + ], + upload_settings=QuicksightDataSetPhysicalTableMapS3SourceUploadSettings( + format="JSON" + ) + ) + ) + ] + ) +``` + +### With Row Level Permission Tag Configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_data_set import QuicksightDataSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightDataSet(self, "example", + data_set_id="example-id", + import_mode="SPICE", + name="example-name", + physical_table_map=[QuicksightDataSetPhysicalTableMap( + physical_table_map_id="example-id", + s3_source=QuicksightDataSetPhysicalTableMapS3Source( + data_source_arn=Token.as_string(aws_quicksight_data_source_example.arn), + input_columns=[QuicksightDataSetPhysicalTableMapS3SourceInputColumns( + name="Column1", + type="STRING" + ) + ], + upload_settings=QuicksightDataSetPhysicalTableMapS3SourceUploadSettings( + format="JSON" + ) + ) + ) + ], + row_level_permission_tag_configuration=QuicksightDataSetRowLevelPermissionTagConfiguration( + status="ENABLED", + tag_rules=[QuicksightDataSetRowLevelPermissionTagConfigurationTagRules( + column_name="Column1", + match_all_value="*", + tag_key="tagkey", + tag_multi_value_delimiter="," + ) + ] + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `data_set_id` - (Required, Forces new resource) Identifier for the data set. +* `import_mode` - (Required) Indicates whether you want to import the data into SPICE. Valid values are `SPICE` and `DIRECT_QUERY`. +* `name` - (Required) Display name for the dataset. +* `physical_table_map` - (Required) Declares the physical tables that are available in the underlying data sources. See [physical_table_map](#physical_table_map). + +The following arguments are optional: + +* `aws_account_id` - (Optional, Forces new resource) AWS account ID. +* `column_groups` - (Optional) Groupings of columns that work together in certain Amazon QuickSight features. Currently, only geospatial hierarchy is supported. See [column_groups](#column_groups). +* `column_level_permission_rules` - (Optional) A set of 1 or more definitions of a [ColumnLevelPermissionRule](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ColumnLevelPermissionRule.html). See [column_level_permission_rules](#column_level_permission_rules). +* `data_set_usage_configuration` - (Optional) The usage configuration to apply to child datasets that reference this dataset as a source. See [data_set_usage_configuration](#data_set_usage_configuration). +* `field_folders` - (Optional) The folder that contains fields and nested subfolders for your dataset. See [field_folders](#field_folders). +* `logical_table_map` - (Optional) Configures the combination and transformation of the data from the physical tables. Maximum of 1 entry. See [logical_table_map](#logical_table_map). +* `permissions` - (Optional) A set of resource permissions on the data source. Maximum of 64 items. See [permissions](#permissions). +* `row_level_permission_data_set` - (Optional) The row-level security configuration for the data that you want to create. See [row_level_permission_data_set](#row_level_permission_data_set). +* `row_level_permission_tag_configuration` - (Optional) The configuration of tags on a dataset to set row-level security. Row-level security tags are currently supported for anonymous embedding only. See [row_level_permission_tag_configuration](#row_level_permission_tag_configuration). +* `refresh_properties` - (Optional) The refresh properties for the data set. **NOTE**: Only valid when `import_mode` is set to `SPICE`. See [refresh_properties](#refresh_properties). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### physical_table_map + +For a `physical_table_map` item to be valid, only one of `custom_sql`, `relational_table`, or `s3_source` should be configured. + +* `physical_table_map_id` - (Required) Key of the physical table map. +* `custom_sql` - (Optional) A physical table type built from the results of the custom SQL query. See [custom_sql](#custom_sql). +* `relational_table` - (Optional) A physical table type for relational data sources. See [relational_table](#relational_table). +* `s3_source` - (Optional) A physical table type for as S3 data source. See [s3_source](#s3_source). + +### custom_sql + +* `data_source_arn` - (Required) ARN of the data source. +* `name` - (Required) Display name for the SQL query result. +* `sql_query` - (Required) SQL query. +* `columns` - (Optional) Column schema from the SQL query result set. See [columns](#columns). + +### columns + +* `name` - (Required) Name of this column in the underlying data source. +* `type` - (Required) Data type of the column. + +### relational_table + +* `data_source_arn` - (Required) ARN of the data source. +* `input_columns` - (Required) Column schema of the table. See [input_columns](#input_columns). +* `name` - (Required) Name of the relational table. +* `catalog` - (Optional) Catalog associated with the table. +* `schema` - (Optional) Schema name. This name applies to certain relational database engines. + +### input_columns + +* `name` - (Required) Name of this column in the underlying data source. +* `type` - (Required) Data type of the column. + +### s3_source + +* `data_source_arn` - (Required) ARN of the data source. +* `input_columns` - (Required) Column schema of the table. See [input_columns](#input_columns). +* `upload_settings` - (Required) Information about the format for the S3 source file or files. See [upload_settings](#upload_settings). + +### upload_settings + +* `contains_header` - (Optional) Whether the file has a header row, or the files each have a header row. +* `delimiter` - (Optional) Delimiter between values in the file. +* `format` - (Optional) File format. Valid values are `CSV`, `TSV`, `CLF`, `ELF`, `XLSX`, and `JSON`. +* `start_from_row` - (Optional) A row number to start reading data from. +* `text_qualifier` - (Optional) Text qualifier. Valid values are `DOUBLE_QUOTE` and `SINGLE_QUOTE`. + +### column_groups + +* `geo_spatial_column_group` - (Optional) Geospatial column group that denotes a hierarchy. See [geo_spatial_column_group](#geo_spatial_column_group). + +### geo_spatial_column_group + +* `columns` - (Required) Columns in this hierarchy. +* `country_code` - (Required) Country code. Valid values are `US`. +* `name` - (Required) A display name for the hierarchy. + +### column_level_permission_rules + +* `column_names` - (Optional) An array of column names. +* `principals` - (Optional) An array of ARNs for Amazon QuickSight users or groups. + +### data_set_usage_configuration + +* `disable_use_as_direct_query_source` - (Optional) Controls whether a child dataset of a direct query can use this dataset as a source. +* `disable_use_as_imported_source` - (Optional) Controls whether a child dataset that's stored in QuickSight can use this dataset as a source. + +### field_folders + +* `field_folders_id` - (Required) Key of the field folder map. +* `columns` - (Optional) An array of column names to add to the folder. A column can only be in one folder. +* `description` - (Optional) Field folder description. + +### logical_table_map + +* `alias` - (Required) A display name for the logical table. +* `logical_table_map_id` - (Required) Key of the logical table map. +* `data_transforms` - (Optional) Transform operations that act on this logical table. For this structure to be valid, only one of the attributes can be non-null. See [data_transforms](#data_transforms). +* `source` - (Optional) Source of this logical table. See [source](#source). + +### data_transforms + +* `cast_column_type_operation` - (Optional) A transform operation that casts a column to a different type. See [cast_column_type_operation](#cast_column_type_operation). +* `create_columns_operation` - (Optional) An operation that creates calculated columns. Columns created in one such operation form a lexical closure. See [create_columns_operation](#create_columns_operation). +* `filter_operation` - (Optional) An operation that filters rows based on some condition. See [filter_operation](#filter_operation). +* `project_operation` - (Optional) An operation that projects columns. Operations that come after a projection can only refer to projected columns. See [project_operation](#project_operation). +* `rename_column_operation` - (Optional) An operation that renames a column. See [rename_column_operation](#rename_column_operation). +* `tag_column_operation` - (Optional) An operation that tags a column with additional information. See [tag_column_operation](#tag_column_operation). +* `untag_column_operation` - (Optional) A transform operation that removes tags associated with a column. See [untag_column_operation](#untag_column_operation). + +### cast_column_type_operation + +* `column_name` - (Required) Column name. +* `new_column_type` - (Required) New column data type. Valid values are `STRING`, `INTEGER`, `DECIMAL`, `DATETIME`. +* `format` - (Optional) When casting a column from string to datetime type, you can supply a string in a format supported by Amazon QuickSight to denote the source data format. + +### create_columns_operation + +* `columns` - (Required) Calculated columns to create. See [columns](#columns-1). + +### columns + +* `column_id` - (Required) A unique ID to identify a calculated column. During a dataset update, if the column ID of a calculated column matches that of an existing calculated column, Amazon QuickSight preserves the existing calculated column. +* `column_name` - (Required) Column name. +* `expression` - (Required) An expression that defines the calculated column. + +### filter_operation + +* `condition_expression` - (Required) An expression that must evaluate to a Boolean value. Rows for which the expression evaluates to true are kept in the dataset. + +### project_operation + +* `projected_columns` - (Required) Projected columns. + +### rename_column_operation + +* `column_name` - (Required) Column to be renamed. +* `new_column_name` - (Required) New name for the column. + +### tag_column_operation + +* `column_name` - (Required) Column name. +* `tags` - (Required) The dataset column tag, currently only used for geospatial type tagging. See [tags](#tags). + +### tags + +* `column_description` - (Optional) A description for a column. See [column_description](#column_description). +* `column_geographic_role` - (Optional) A geospatial role for a column. Valid values are `COUNTRY`, `STATE`, `COUNTY`, `CITY`, `POSTCODE`, `LONGITUDE`, and `LATITUDE`. + +### column_description + +* `text` - (Optional) The text of a description for a column. + +### untag_column_operation + +* `column_name` - (Required) Column name. +* `tag_names` - (Required) The column tags to remove from this column. + +### source + +* `data_set_arn` - (Optional) ARN of the parent data set. +* `join_instruction` - (Optional) Specifies the result of a join of two logical tables. See [join_instruction](#join_instruction). +* `physical_table_id` - (Optional) Physical table ID. + +### join_instruction + +* `left_operand` - (Required) Operand on the left side of a join. +* `on_clause` - (Required) Join instructions provided in the ON clause of a join. +* `right_operand` - (Required) Operand on the right side of a join. +* `type` - (Required) Type of join. Valid values are `INNER`, `OUTER`, `LEFT`, and `RIGHT`. +* `left_join_key_properties` - (Optional) Join key properties of the left operand. See [left_join_key_properties](#left_join_key_properties). +* `right_join_key_properties` - (Optional) Join key properties of the right operand. See [right_join_key_properties](#right_join_key_properties). + +### left_join_key_properties + +* `unique_key` - (Optional) A value that indicates that a row in a table is uniquely identified by the columns in a join key. This is used by Amazon QuickSight to optimize query performance. + +### right_join_key_properties + +* `unique_key` - (Optional) A value that indicates that a row in a table is uniquely identified by the columns in a join key. This is used by Amazon QuickSight to optimize query performance. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### row_level_permission_data_set + +* `arn` - (Required) ARN of the dataset that contains permissions for RLS. +* `permission_policy` - (Required) Type of permissions to use when interpreting the permissions for RLS. Valid values are `GRANT_ACCESS` and `DENY_ACCESS`. +* `format_version` - (Optional) User or group rules associated with the dataset that contains permissions for RLS. +* `namespace` - (Optional) Namespace associated with the dataset that contains permissions for RLS. +* `status` - (Optional) Status of the row-level security permission dataset. If enabled, the status is `ENABLED`. If disabled, the status is `DISABLED`. + +### row_level_permission_tag_configuration + +* `tag_rules` - (Required) A set of rules associated with row-level security, such as the tag names and columns that they are assigned to. See [tag_rules](#tag_rules). +* `status` - (Optional) The status of row-level security tags. If enabled, the status is `ENABLED`. If disabled, the status is `DISABLED`. + +### refresh_properties + +* `refresh_configuration` - (Required) The refresh configuration for the data set. See [refresh_configuration](#refresh_configuration). + +### refresh_configuration + +* `incremental_refresh` - (Required) The incremental refresh for the data set. See [incremental_refresh](#incremental_refresh). + +### incremental_refresh + +* `lookback_window` - (Required) The lookback window setup for an incremental refresh configuration. See [lookback_window](#lookback_window). + +### lookback_window + +* `column_name` - (Required) The name of the lookback window column. +* `size` - (Required) The lookback window column size. +* `size_unit` - (Required) The size unit that is used for the lookback window column. Valid values for this structure are `HOUR`, `DAY`, and `WEEK`. + +### tag_rules + +* `column_name` - (Required) Column name that a tag key is assigned to. +* `tag_key` - (Required) Unique key for a tag. +* `match_all_value` - (Optional) A string that you want to use to filter by all the values in a column in the dataset and don’t want to list the values one by one. +* `tag_multi_value_delimiter` - (Optional) A string that you want to use to delimit the values when you pass the values at run time. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the data set. +* `id` - A comma-delimited string joining AWS account ID and data set ID. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Data Set using the AWS account ID and data set ID separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a QuickSight Data Set using the AWS account ID and data set ID separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_data_set.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_data_source.html.markdown b/website/docs/cdktf/python/r/quicksight_data_source.html.markdown new file mode 100644 index 00000000000..bcadecd656d --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_data_source.html.markdown @@ -0,0 +1,248 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_data_source" +description: |- + Manages a Resource QuickSight Data Source. +--- + + + +# Resource: aws_quicksight_data_source + +Resource for managing QuickSight Data Source + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_data_source import QuicksightDataSource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightDataSource(self, "default", + data_source_id="example-id", + name="My Cool Data in S3", + parameters=QuicksightDataSourceParameters( + s3=QuicksightDataSourceParametersS3( + manifest_file_location=QuicksightDataSourceParametersS3ManifestFileLocation( + bucket="my-bucket", + key="path/to/manifest.json" + ) + ) + ), + type="S3" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `data_source_id` - (Required, Forces new resource) An identifier for the data source. +* `name` - (Required) A name for the data source, maximum of 128 characters. +* `parameters` - (Required) The [parameters](#parameters-argument-reference) used to connect to this data source (exactly one). +* `type` - (Required) The type of the data source. See the [AWS Documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_CreateDataSource.html#QS-CreateDataSource-request-Type) for the complete list of valid values. + +The following arguments are optional: + +* `aws_account_id` - (Optional, Forces new resource) The ID for the AWS account that the data source is in. Currently, you use the ID for the AWS account that contains your Amazon QuickSight account. +* `credentials` - (Optional) The credentials Amazon QuickSight uses to connect to your underlying source. Currently, only credentials based on user name and password are supported. See [Credentials](#credentials-argument-reference) below for more details. +* `permission` - (Optional) A set of resource permissions on the data source. Maximum of 64 items. See [Permission](#permission-argument-reference) below for more details. +* `ssl_properties` - (Optional) Secure Socket Layer (SSL) properties that apply when Amazon QuickSight connects to your underlying source. See [SSL Properties](#ssl_properties-argument-reference) below for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_connection_properties`- (Optional) Use this parameter only when you want Amazon QuickSight to use a VPC connection when connecting to your underlying source. See [VPC Connection Properties](#vpc_connection_properties-argument-reference) below for more details. + +### credentials Argument Reference + +* `copy_source_arn` (Optional, Conflicts with `credential_pair`) - The Amazon Resource Name (ARN) of a data source that has the credential pair that you want to use. +When the value is not null, the `credential_pair` from the data source in the ARN is used. +* `credential_pair` (Optional, Conflicts with `copy_source_arn`) - Credential pair. See [Credential Pair](#credential_pair-argument-reference) below for more details. + +### credential_pair Argument Reference + +* `password` - (Required) Password, maximum length of 1024 characters. +* `username` - (Required) User name, maximum length of 64 characters. + +### parameters Argument Reference + +To specify data source connection parameters, exactly one of the following sub-objects must be provided. + +* `amazon_elasticsearch` - (Optional) [Parameters](#amazon_elasticsearch-argument-reference) for connecting to Amazon Elasticsearch. +* `athena` - (Optional) [Parameters](#athena-argument-reference) for connecting to Athena. +* `aurora` - (Optional) [Parameters](#aurora-argument-reference) for connecting to Aurora MySQL. +* `aurora_postgresql` - (Optional) [Parameters](#aurora_postgresql-argument-reference) for connecting to Aurora Postgresql. +* `aws_iot_analytics` - (Optional) [Parameters](#aws_iot_analytics-argument-reference) for connecting to AWS IOT Analytics. +* `jira` - (Optional) [Parameters](#jira-fargument-reference) for connecting to Jira. +* `maria_db` - (Optional) [Parameters](#maria_db-argument-reference) for connecting to MariaDB. +* `mysql` - (Optional) [Parameters](#mysql-argument-reference) for connecting to MySQL. +* `oracle` - (Optional) [Parameters](#oracle-argument-reference) for connecting to Oracle. +* `postgresql` - (Optional) [Parameters](#postgresql-argument-reference) for connecting to Postgresql. +* `presto` - (Optional) [Parameters](#presto-argument-reference) for connecting to Presto. +* `rds` - (Optional) [Parameters](#rds-argument-reference) for connecting to RDS. +* `redshift` - (Optional) [Parameters](#redshift-argument-reference) for connecting to Redshift. +* `s3` - (Optional) [Parameters](#s3-argument-reference) for connecting to S3. +* `service_now` - (Optional) [Parameters](#service_now-argument-reference) for connecting to ServiceNow. +* `snowflake` - (Optional) [Parameters](#snowflake-argument-reference) for connecting to Snowflake. +* `spark` - (Optional) [Parameters](#spark-argument-reference) for connecting to Spark. +* `sql_server` - (Optional) [Parameters](#sql_server-argument-reference) for connecting to SQL Server. +* `teradata` - (Optional) [Parameters](#teradata-argument-reference) for connecting to Teradata. +* `twitter` - (Optional) [Parameters](#twitter-argument-reference) for connecting to Twitter. + +### permission Argument Reference + +* `actions` - (Required) Set of IAM actions to grant or revoke permissions on. Max of 16 items. +* `principal` - (Required) The Amazon Resource Name (ARN) of the principal. + +### ssl_properties Argument Reference + +* `disable_ssl` - (Required) A Boolean option to control whether SSL should be disabled. + +### vpc_connection_properties Argument Reference + +* `vpc_connection_arn` - (Required) The Amazon Resource Name (ARN) for the VPC connection. + +### amazon_elasticsearch Argument Reference + +* `domain` - (Required) The OpenSearch domain. + +### athena Argument Reference + +* `work_group` - (Optional) The work-group to which to connect. + +### aurora Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### aurora_postgresql Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### aws_iot_analytics Argument Reference + +* `data_set_name` - (Required) The name of the data set to which to connect. + +### jira fArgument Reference + +* `site_base_url` - (Required) The base URL of the Jira instance's site to which to connect. + +### maria_db Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### mysql Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### oracle Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### postgresql Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### presto Argument Reference + +* `catalog` - (Required) The catalog to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### rds Argument Reference + +* `database` - (Required) The database to which to connect. +* `instance_id` - (Optional) The instance ID to which to connect. + +### redshift Argument Reference + +* `cluster_id` - (Optional, Required if `host` and `port` are not provided) The ID of the cluster to which to connect. +* `database` - (Required) The database to which to connect. +* `host` - (Optional, Required if `cluster_id` is not provided) The host to which to connect. +* `port` - (Optional, Required if `cluster_id` is not provided) The port to which to connect. + +### s3 Argument Reference + +* `manifest_file_location` - (Required) An [object containing the S3 location](#manifest_file_location-argument-reference) of the S3 manifest file. + +### manifest_file_location Argument Reference + +* `bucket` - (Required) The name of the bucket that contains the manifest file. +* `key` - (Required) The key of the manifest file within the bucket. + +### service_now Argument Reference + +* `site_base_url` - (Required) The base URL of the Jira instance's site to which to connect. + +### snowflake Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `warehouse` - (Required) The warehouse to which to connect. + +### spark Argument Reference + +* `host` - (Required) The host to which to connect. +* `port` - (Required) The warehouse to which to connect. + +### sql_server Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The warehouse to which to connect. + +### teradata Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The warehouse to which to connect. + +#### twitter Argument Reference + +* `max_rows` - (Required) The maximum number of rows to query. +* `query` - (Required) The Twitter query to retrieve the data. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the data source +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight data source using the AWS account ID, and data source ID separated by a slash (`/`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a QuickSight data source using the AWS account ID, and data source ID separated by a slash (`/`). For example: + +```console +% terraform import aws_quicksight_data_source.example 123456789123/my-data-source-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_folder.html.markdown b/website/docs/cdktf/python/r/quicksight_folder.html.markdown new file mode 100644 index 00000000000..e7820c8e103 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_folder.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_folder" +description: |- + Manages a QuickSight Folder. +--- + + + +# Resource: aws_quicksight_folder + +Resource for managing a QuickSight Folder. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_folder import QuicksightFolder +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightFolder(self, "example", + folder_id="example-id", + name="example-name" + ) +``` + +### With Permissions + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_folder import QuicksightFolder +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightFolder(self, "example", + folder_id="example-id", + name="example-name", + permissions=[QuicksightFolderPermissions( + actions=["quicksight:CreateFolder", "quicksight:DescribeFolder", "quicksight:UpdateFolder", "quicksight:DeleteFolder", "quicksight:CreateFolderMembership", "quicksight:DeleteFolderMembership", "quicksight:DescribeFolderPermissions", "quicksight:UpdateFolderPermissions" + ], + principal=Token.as_string(aws_quicksight_user_example.arn) + ) + ] + ) +``` + +### With Parent Folder + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_folder import QuicksightFolder +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + parent = QuicksightFolder(self, "parent", + folder_id="parent-id", + name="parent-name" + ) + QuicksightFolder(self, "example", + folder_id="example-id", + name="example-name", + parent_folder_arn=parent.arn + ) +``` + +## Argument Reference + +The following arguments are required: + +* `folder_id` - (Required, Forces new resource) Identifier for the folder. +* `name` - (Required) Display name for the folder. + +The following arguments are optional: + +* `aws_account_id` - (Optional, Forces new resource) AWS account ID. +* `folder_type` - (Optional) The type of folder. By default, it is `SHARED`. Valid values are: `SHARED`. +* `parent_folder_arn` - (Optional) The Amazon Resource Name (ARN) for the parent folder. If not set, creates a root-level folder. +* `permissions` - (Optional) A set of resource permissions on the folder. Maximum of 64 items. See [permissions](#permissions). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the folder. +* `created_time` - The time that the folder was created. +* `folder_path` - An array of ancestor ARN strings for the folder. Empty for root-level folders. +* `id` - A comma-delimited string joining AWS account ID and folder ID. +* `last_updated_time` - The time that the folder was last updated. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `read` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight folder using the AWS account ID and folder ID name separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a QuickSight folder using the AWS account ID and folder ID name separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_folder.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_folder_membership.html.markdown b/website/docs/cdktf/python/r/quicksight_folder_membership.html.markdown new file mode 100644 index 00000000000..a60fdc15fb8 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_folder_membership.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_folder_membership" +description: |- + Terraform resource for managing an AWS QuickSight Folder Membership. +--- + + + +# Resource: aws_quicksight_folder_membership + +Terraform resource for managing an AWS QuickSight Folder Membership. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_folder_membership import QuicksightFolderMembership +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightFolderMembership(self, "example", + folder_id=Token.as_string(aws_quicksight_folder_example.folder_id), + member_id=Token.as_string(aws_quicksight_data_set_example.data_set_id), + member_type="DATASET" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `folder_id` - (Required, Forces new resource) Identifier for the folder. +* `member_id` - (Required, Forces new resource) ID of the asset (the dashboard, analysis, or dataset). +* `member_type` - (Required, Forces new resource) Type of the member. Valid values are `ANALYSIS`, `DASHBOARD`, and `DATASET`. + +The following arguments are optional: + +* `aws_account_id` - (Optional, Forces new resource) AWS account ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A comma-delimited string joining AWS account ID, folder ID, member type, and member ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Folder Membership using the AWS account ID, folder ID, member type, and member ID separated by commas (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import QuickSight Folder Membership using the AWS account ID, folder ID, member type, and member ID separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_folder_membership.example 123456789012,example-folder,DATASET,example-dataset +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_group.html.markdown b/website/docs/cdktf/python/r/quicksight_group.html.markdown new file mode 100644 index 00000000000..099a9b88fbc --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_group.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_group" +description: |- + Manages a Resource QuickSight Group. +--- + + + +# Resource: aws_quicksight_group + +Resource for managing QuickSight Group + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_group import QuicksightGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightGroup(self, "example", + group_name="tf-example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `group_name` - (Required) A name for the group. +* `aws_account_id` - (Optional) The ID for the AWS account that the group is in. Currently, you use the ID for the AWS account that contains your Amazon QuickSight account. +* `description` - (Optional) A description for the group. +* `namespace` - (Optional) The namespace. Currently, you should set this to `default`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of group + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Group using the aws account id, namespace and group name separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import QuickSight Group using the aws account id, namespace and group name separated by `/`. For example: + +```console +% terraform import aws_quicksight_group.example 123456789123/default/tf-example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_group_membership.html.markdown b/website/docs/cdktf/python/r/quicksight_group_membership.html.markdown new file mode 100644 index 00000000000..cdde5346535 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_group_membership.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_group_membership" +description: |- + Manages a Resource QuickSight Group Membership. +--- + + + +# Resource: aws_quicksight_group_membership + +Resource for managing QuickSight Group Membership + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_group_membership import QuicksightGroupMembership +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightGroupMembership(self, "example", + group_name="all-access-users", + member_name="john_smith" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `group_name` - (Required) The name of the group in which the member will be added. +* `member_name` - (Required) The name of the member to add to the group. +* `aws_account_id` - (Optional) The ID for the AWS account that the group is in. Currently, you use the ID for the AWS account that contains your Amazon QuickSight account. +* `namespace` - (Required) The namespace that you want the user to be a part of. Defaults to `default`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Group membership using the AWS account ID, namespace, group name and member name separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import QuickSight Group membership using the AWS account ID, namespace, group name and member name separated by `/`. For example: + +```console +% terraform import aws_quicksight_group_membership.example 123456789123/default/all-access-users/john_smith +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_iam_policy_assignment.html.markdown b/website/docs/cdktf/python/r/quicksight_iam_policy_assignment.html.markdown new file mode 100644 index 00000000000..e098271c686 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_iam_policy_assignment.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_iam_policy_assignment" +description: |- + Terraform resource for managing an AWS QuickSight IAM Policy Assignment. +--- + + + +# Resource: aws_quicksight_iam_policy_assignment + +Terraform resource for managing an AWS QuickSight IAM Policy Assignment. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_iam_policy_assignment import QuicksightIamPolicyAssignment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightIamPolicyAssignment(self, "example", + assignment_name="example", + assignment_status="ENABLED", + identities=[QuicksightIamPolicyAssignmentIdentities( + user=[Token.as_string(aws_quicksight_user_example.user_name)] + ) + ], + policy_arn=Token.as_string(aws_iam_policy_example.arn) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `assignment_name` - (Required) Name of the assignment. +* `assignment_status` - (Required) Status of the assignment. Valid values are `ENABLED`, `DISABLED`, and `DRAFT`. + +The following arguments are optional: + +* `aws_account_id` - (Optional) AWS account ID. +* `identities` - (Optional) Amazon QuickSight users, groups, or both to assign the policy to. See [`identities`](#identities). +* `namespace` - (Optional) Namespace that contains the assignment. Defaults to `default`. +* `policy_arn` - (Optional) ARN of the IAM policy to apply to the Amazon QuickSight users and groups specified in this assignment. + +### identities + +* `groups` - (Optional) Array of Quicksight group names to assign the policy to. +* `user` - (Optional) Array of Quicksight user names to assign the policy to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `assignment_id` - Assignment ID. +* `id` - A comma-delimited string joining AWS account ID, namespace, and assignment name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight IAM Policy Assignment using the AWS account ID, namespace, and assignment name separated by commas (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import QuickSight IAM Policy Assignment using the AWS account ID, namespace, and assignment name separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_iam_policy_assignment.example 123456789012,default,example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_ingestion.html.markdown b/website/docs/cdktf/python/r/quicksight_ingestion.html.markdown new file mode 100644 index 00000000000..d1328993030 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_ingestion.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_ingestion" +description: |- + Terraform resource for managing an AWS QuickSight Ingestion. +--- + + + +# Resource: aws_quicksight_ingestion + +Terraform resource for managing an AWS QuickSight Ingestion. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_ingestion import QuicksightIngestion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightIngestion(self, "example", + data_set_id=Token.as_string(aws_quicksight_data_set_example.data_set_id), + ingestion_id="example-id", + ingestion_type="FULL_REFRESH" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `data_set_id` - (Required) ID of the dataset used in the ingestion. +* `ingestion_id` - (Required) ID for the ingestion. +* `ingestion_type` - (Required) Type of ingestion to be created. Valid values are `INCREMENTAL_REFRESH` and `FULL_REFRESH`. + +The following arguments are optional: + +* `aws_account_id` - (Optional) AWS account ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Ingestion. +* `id` - A comma-delimited string joining AWS account ID, data set ID, and ingestion ID. +* `ingestion_status` - Ingestion status. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Ingestion using the AWS account ID, data set ID, and ingestion ID separated by commas (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import QuickSight Ingestion using the AWS account ID, data set ID, and ingestion ID separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_ingestion.example 123456789012,example-dataset-id,example-ingestion-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_namespace.html.markdown b/website/docs/cdktf/python/r/quicksight_namespace.html.markdown new file mode 100644 index 00000000000..a783dff7de2 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_namespace.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_namespace" +description: |- + Terraform resource for managing an AWS QuickSight Namespace. +--- + + + +# Resource: aws_quicksight_namespace + +Terraform resource for managing an AWS QuickSight Namespace. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_namespace import QuicksightNamespace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightNamespace(self, "example", + namespace="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `namespace` - (Required) Name of the namespace. + +The following arguments are optional: + +* `aws_account_id` - (Optional) AWS account ID. +* `identity_store` - (Optional) User identity directory type. Defaults to `QUICKSIGHT`, the only current valid value. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Namespace. +* `capacity_region` - Namespace AWS Region. +* `creation_status` - Creation status of the namespace. +* `id` - A comma-delimited string joining AWS account ID and namespace. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `2m`) +* `delete` - (Default `2m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Namespace using the AWS account ID and namespace separated by commas (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import QuickSight Namespace using the AWS account ID and namespace separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_namespace.example 123456789012,example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_refresh_schedule.html.markdown b/website/docs/cdktf/python/r/quicksight_refresh_schedule.html.markdown new file mode 100644 index 00000000000..dc28bed1f4e --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_refresh_schedule.html.markdown @@ -0,0 +1,169 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_refresh_schedule" +description: |- + Manages a Resource QuickSight Refresh Schedule. +--- + + + +# Resource: aws_quicksight_refresh_schedule + +Resource for managing a QuickSight Refresh Schedule. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_refresh_schedule import QuicksightRefreshSchedule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightRefreshSchedule(self, "example", + data_set_id="dataset-id", + schedule=[QuicksightRefreshScheduleSchedule( + refresh_type="FULL_REFRESH", + schedule_frequency=[QuicksightRefreshScheduleScheduleScheduleFrequency( + interval="HOURLY" + ) + ] + ) + ], + schedule_id="schedule-id" + ) +``` + +### With Weekly Refresh + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_refresh_schedule import QuicksightRefreshSchedule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightRefreshSchedule(self, "example", + data_set_id="dataset-id", + schedule=[QuicksightRefreshScheduleSchedule( + refresh_type="INCREMENTAL_REFRESH", + schedule_frequency=[QuicksightRefreshScheduleScheduleScheduleFrequency( + interval="WEEKLY", + refresh_on_day=[QuicksightRefreshScheduleScheduleScheduleFrequencyRefreshOnDay( + day_of_week="MONDAY" + ) + ], + time_of_the_day="01:00", + timezone="Europe/London" + ) + ] + ) + ], + schedule_id="schedule-id" + ) +``` + +### With Monthly Refresh + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_refresh_schedule import QuicksightRefreshSchedule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightRefreshSchedule(self, "example", + data_set_id="dataset-id", + schedule=[QuicksightRefreshScheduleSchedule( + refresh_type="INCREMENTAL_REFRESH", + schedule_frequency=[QuicksightRefreshScheduleScheduleScheduleFrequency( + interval="MONTHLY", + refresh_on_day=[QuicksightRefreshScheduleScheduleScheduleFrequencyRefreshOnDay( + day_of_month="1" + ) + ], + time_of_the_day="01:00", + timezone="Europe/London" + ) + ] + ) + ], + schedule_id="schedule-id" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `data_set_id` - (Required, Forces new resource) The ID of the dataset. +* `schedule_id` - (Required, Forces new resource) The ID of the refresh schedule. +* `schedule` - (Required) The [refresh schedule](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_RefreshSchedule.html). See [schedule](#schedule) + +The following arguments are optional: + +* `aws_account_id` - (Optional, Forces new resource) AWS account ID. + +### schedule + +* `refresh_type` - (Required) The type of refresh that the dataset undergoes. Valid values are `INCREMENTAL_REFRESH` and `FULL_REFRESH`. +* `start_after_date_time` (Optional) Time after which the refresh schedule can be started, expressed in `YYYY-MM-DDTHH:MM:SS` format. +* `schedule_frequency` - (Optional) The configuration of the [schedule frequency](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_RefreshFrequency.html). See [schedule_frequency](#schedule_frequency). + +### schedule_frequency + +* `interval` - (Required) The interval between scheduled refreshes. Valid values are `MINUTE15`, `MINUTE30`, `HOURLY`, `DAILY`, `WEEKLY` and `MONTHLY`. +* `time_of_the_day` - (Optional) The time of day that you want the dataset to refresh. This value is expressed in `HH:MM` format. This field is not required for schedules that refresh hourly. +* `timezone` - (Optional) The timezone that you want the refresh schedule to use. +* `refresh_on_day` - (Optional) The [refresh on entity](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ScheduleRefreshOnEntity.html) configuration for weekly or monthly schedules. See [refresh_on_day](#refresh_on_day). + +### refresh_on_day + +* `day_of_month` - (Optional) The day of the month that you want to schedule refresh on. +* `day_of_week` - (Optional) The day of the week that you want to schedule a refresh on. Valid values are `SUNDAY`, `MONDAY`, `TUESDAY`, `WEDNESDAY`, `THURSDAY`, `FRIDAY` and `SATURDAY`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the refresh schedule. +* `id` - A comma-delimited string joining AWS account ID, data set ID & refresh schedule ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Refresh Schedule using the AWS account ID, data set ID and schedule ID separated by commas (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a QuickSight Refresh Schedule using the AWS account ID, data set ID and schedule ID separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_refresh_schedule.example 123456789012,dataset-id,schedule-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_template.html.markdown b/website/docs/cdktf/python/r/quicksight_template.html.markdown new file mode 100644 index 00000000000..938a6997bc1 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_template.html.markdown @@ -0,0 +1,210 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_template" +description: |- + Manages a QuickSight Template. +--- + + + +# Resource: aws_quicksight_template + +Resource for managing a QuickSight Template. + +## Example Usage + +### From Source Template + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_template import QuicksightTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightTemplate(self, "example", + name="example-name", + source_entity=QuicksightTemplateSourceEntity( + source_template=QuicksightTemplateSourceEntitySourceTemplate( + arn=source.arn + ) + ), + template_id="example-id", + version_description="version" + ) +``` + +### With Definition + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_template import QuicksightTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightTemplate(self, "example", + definition={ + "data_set_configuration": [{ + "data_set_schema": { + "column_schema_list": [{ + "data_type": "STRING", + "name": "Column1" + }, { + "data_type": "INTEGER", + "name": "Column2" + } + ] + }, + "placeholder": "1" + } + ], + "sheets": [{ + "sheet_id": "Test1", + "title": "Test", + "visuals": [{ + "bar_chart_visual": { + "chart_configuration": { + "field_wells": { + "bar_chart_aggregated_field_wells": { + "category": [{ + "categorical_dimension_field": { + "column": { + "column_name": "Column1", + "data_set_identifier": "1" + }, + "field_id": "1" + } + } + ], + "values": [{ + "numerical_measure_field": { + "aggregation_function": { + "simple_numerical_aggregation": "SUM" + }, + "column": { + "column_name": "Column2", + "data_set_identifier": "1" + }, + "field_id": "2" + } + } + ] + } + } + }, + "visual_id": "BarChart" + } + } + ] + } + ] + }, + name="example-name", + template_id="example-id", + version_description="version" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `template_id` - (Required, Forces new resource) Identifier for the template. +* `name` - (Required) Display name for the template. +* `version_description` - (Required) A description of the current template version being created/updated. + +The following arguments are optional: + +* `aws_account_id` - (Optional, Forces new resource) AWS account ID. +* `definition` - (Optional) A detailed template definition. Only one of `definition` or `source_entity` should be configured. See [definition](#definition). +* `permissions` - (Optional) A set of resource permissions on the template. Maximum of 64 items. See [permissions](#permissions). +* `source_entity` - (Optional) The entity that you are using as a source when you create the template (analysis or template). Only one of `definition` or `source_entity` should be configured. See [source_entity](#source_entity). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### source_entity + +* `source_analysis` - (Optional) The source analysis, if it is based on an analysis.. Only one of `source_analysis` or `source_template` should be configured. See [source_analysis](#source_analysis). +* `source_template` - (Optional) The source template, if it is based on an template.. Only one of `source_analysis` or `source_template` should be configured. See [source_template](#source_template). + +### source_analysis + +* `arn` - (Required) The Amazon Resource Name (ARN) of the resource. +* `data_set_references` - (Required) A list of dataset references used as placeholders in the template. See [data_set_references](#data_set_references). + +### data_set_references + +* `data_set_arn` - (Required) Dataset Amazon Resource Name (ARN). +* `data_set_placeholder` - (Required) Dataset placeholder. + +### source_template + +* `arn` - (Required) The Amazon Resource Name (ARN) of the resource. + +### definition + +* `data_set_configuration` - (Required) A list of dataset configurations. These configurations define the required columns for each dataset used within a template. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DataSetConfiguration.html). +* `analysis_defaults` - (Optional) The configuration for default analysis settings. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_AnalysisDefaults.html). +* `calculated_fields` - (Optional) A list of calculated field definitions for the template. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_CalculatedField.html). +* `column_configurations` - (Optional) A list of template-level column configurations. Column configurations are used to set default formatting for a column that's used throughout a template. See [AWS API Documentation for complete description](ttps://docs.aws.amazon.com/quicksight/latest/APIReference/API_ColumnConfiguration.html). +* `filter_groups` - (Optional) A list of filter definitions for a template. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_FilterGroup.html). For more information, see [Filtering Data](https://docs.aws.amazon.com/quicksight/latest/user/filtering-visual-data.html) in Amazon QuickSight User Guide. +* `parameters_declarations` - (Optional) A list of parameter declarations for a template. Parameters are named variables that can transfer a value for use by an action or an object. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ParameterDeclaration.html). For more information, see [Parameters in Amazon QuickSight](https://docs.aws.amazon.com/quicksight/latest/user/parameters-in-quicksight.html) in the Amazon QuickSight User Guide. +* `sheets` - (Optional) A list of sheet definitions for a template. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_SheetDefinition.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the template. +* `created_time` - The time that the template was created. +* `id` - A comma-delimited string joining AWS account ID and template ID. +* `last_updated_time` - The time that the template was last updated. +* `source_entity_arn` - Amazon Resource Name (ARN) of an analysis or template that was used to create this template. +* `status` - The template creation status. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `version_number` - The version number of the template version. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Template using the AWS account ID and template ID separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a QuickSight Template using the AWS account ID and template ID separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_template.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_template_alias.html.markdown b/website/docs/cdktf/python/r/quicksight_template_alias.html.markdown new file mode 100644 index 00000000000..7054729759e --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_template_alias.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_template_alias" +description: |- + Terraform resource for managing an AWS QuickSight Template Alias. +--- + + + +# Resource: aws_quicksight_template_alias + +Terraform resource for managing an AWS QuickSight Template Alias. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_template_alias import QuicksightTemplateAlias +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightTemplateAlias(self, "example", + alias_name="example-alias", + template_id=test.template_id, + template_version_number=test.version_number + ) +``` + +## Argument Reference + +The following arguments are required: + +* `alias_name` - (Required, Forces new resource) Display name of the template alias. +* `template_id` - (Required, Forces new resource) ID of the template. +* `template_version_number` - (Required) Version number of the template. + +The following arguments are optional: + +* `aws_account_id` - (Optional, Forces new resource) AWS account ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the template alias. +* `id` - A comma-delimited string joining AWS account ID, template ID, and alias name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Template Alias using the AWS account ID, template ID, and alias name separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import QuickSight Template Alias using the AWS account ID, template ID, and alias name separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_template_alias.example 123456789012,example-id,example-alias +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_theme.html.markdown b/website/docs/cdktf/python/r/quicksight_theme.html.markdown new file mode 100644 index 00000000000..c94ffbccc00 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_theme.html.markdown @@ -0,0 +1,172 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_theme" +description: |- + Manages a QuickSight Theme. +--- + + + +# Resource: aws_quicksight_theme + +Resource for managing a QuickSight Theme. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_theme import QuicksightTheme +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightTheme(self, "example", + base_theme_id="MIDNIGHT", + configuration=QuicksightThemeConfiguration( + data_color_palette=QuicksightThemeConfigurationDataColorPalette( + colors=["#FFFFFF", "#111111", "#222222", "#333333", "#444444", "#555555", "#666666", "#777777", "#888888", "#999999" + ], + empty_fill_color="#FFFFFF", + min_max_gradient=["#FFFFFF", "#111111"] + ) + ), + name="example", + theme_id="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `theme_id` - (Required, Forces new resource) Identifier of the theme. +* `base_theme_id` - (Required) The ID of the theme that a custom theme will inherit from. All themes inherit from one of the starting themes defined by Amazon QuickSight. For a list of the starting themes, use ListThemes or choose Themes from within an analysis. +* `name` - (Required) Display name of the theme. +* `configuration` - (Required) The theme configuration, which contains the theme display properties. See [configuration](#configuration). + +The following arguments are optional: + +* `aws_account_id` - (Optional, Forces new resource) AWS account ID. +* `permissions` - (Optional) A set of resource permissions on the theme. Maximum of 64 items. See [permissions](#permissions). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `version_description` - (Optional) A description of the current theme version being created/updated. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### configuration + +* `data_color_palette` - (Optional) Color properties that apply to chart data colors. See [data_color_palette](#data_color_palette). +* `sheet` - (Optional) Display options related to sheets. See [sheet](#sheet). +* `typography` - (Optional) Determines the typography options. See [typography](#typography). +* `ui_color_palette` - (Optional) Color properties that apply to the UI and to charts, excluding the colors that apply to data. See [ui_color_palette](#ui_color_palette). + +### data_color_palette + +* `colors` - (Optional) List of hexadecimal codes for the colors. Minimum of 8 items and maximum of 20 items. +* `empty_fill_color` - (Optional) The hexadecimal code of a color that applies to charts where a lack of data is highlighted. +* `min_max_gradient` - (Optional) The minimum and maximum hexadecimal codes that describe a color gradient. List of exactly 2 items. + +### sheet + +* `tile` - (Optional) The display options for tiles. See [tile](#tile). +* `tile_layout` - (Optional) The layout options for tiles. See [tile_layout](#tile_layout). + +### tile + +* `border` - (Optional) The border around a tile. See [border](#border). + +### border + +* `show` - (Optional) The option to enable display of borders for visuals. + +### tile_layout + +* `gutter` - (Optional) The gutter settings that apply between tiles. See [gutter](#gutter). +* `margin` - (Optional) The margin settings that apply around the outside edge of sheets. See [margin](#margin). + +### gutter + +* `show` - (Optional) This Boolean value controls whether to display a gutter space between sheet tiles. + +### margin + +* `show` - (Optional) This Boolean value controls whether to display sheet margins. + +### typography + +* `font_families` - (Optional) Determines the list of font families. Maximum number of 5 items. See [font_families](#font_families). + +### font_families + +* `font_family` - (Optional) Font family name. + +### ui_color_palette + +* `accent` - (Optional) Color (hexadecimal) that applies to selected states and buttons. +* `accent_foreground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the accent color. +* `danger` - (Optional) Color (hexadecimal) that applies to error messages. +* `danger_foreground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the error color. +* `dimension` - (Optional) Color (hexadecimal) that applies to the names of fields that are identified as dimensions. +* `dimension_foreground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the dimension color. +* `measure` - (Optional) Color (hexadecimal) that applies to the names of fields that are identified as measures. +* `measure_foreground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the measure color. +* `primary_background` - (Optional) Color (hexadecimal) that applies to visuals and other high emphasis UI. +* `primary_foreground` - (Optional) Color (hexadecimal) of text and other foreground elements that appear over the primary background regions, such as grid lines, borders, table banding, icons, and so on. +* `secondary_background` - (Optional) Color (hexadecimal) that applies to the sheet background and sheet controls. +* `secondary_foreground` - (Optional) Color (hexadecimal) that applies to any sheet title, sheet control text, or UI that appears over the secondary background. +* `success` - (Optional) Color (hexadecimal) that applies to success messages, for example the check mark for a successful download. +* `success_foreground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the success color. +* `warning` - (Optional) Color (hexadecimal) that applies to warning and informational messages. +* `warning_foreground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the warning color. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the theme. +* `created_time` - The time that the theme was created. +* `id` - A comma-delimited string joining AWS account ID and theme ID. +* `last_updated_time` - The time that the theme was last updated. +* `status` - The theme creation status. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `version_number` - The version number of the theme version. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Theme using the AWS account ID and theme ID separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a QuickSight Theme using the AWS account ID and theme ID separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_theme.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_user.html.markdown b/website/docs/cdktf/python/r/quicksight_user.html.markdown new file mode 100644 index 00000000000..2c913f0fe39 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_user.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_user" +description: |- + Manages a Resource QuickSight User. +--- + + + +# Resource: aws_quicksight_user + +Resource for managing QuickSight User + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.quicksight_user import QuicksightUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + QuicksightUser(self, "example", + email="author@example.com", + iam_arn="arn:aws:iam::123456789012:user/Example", + identity_type="IAM", + namespace="foo", + session_name="an-author", + user_role="AUTHOR" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `email` - (Required) The email address of the user that you want to register. +* `identity_type` - (Required) Amazon QuickSight supports several ways of managing the identity of users. This parameter accepts either `IAM` or `QUICKSIGHT`. If `IAM` is specified, the `iam_arn` must also be specified. +* `user_role` - (Required) The Amazon QuickSight role of the user. The user role can be one of the following: `READER`, `AUTHOR`, or `ADMIN` +* `user_name` - (Optional) The Amazon QuickSight user name that you want to create for the user you are registering. Only valid for registering a user with `identity_type` set to `QUICKSIGHT`. +* `aws_account_id` - (Optional) The ID for the AWS account that the user is in. Currently, you use the ID for the AWS account that contains your Amazon QuickSight account. +* `iam_arn` - (Optional) The ARN of the IAM user or role that you are registering with Amazon QuickSight. +* `namespace` - (Optional) The Amazon Quicksight namespace to create the user in. Defaults to `default`. +* `session_name` - (Optional) The name of the IAM session to use when assuming roles that can embed QuickSight dashboards. Only valid for registering users using an assumed IAM role. Additionally, if registering multiple users using the same IAM role, each user needs to have a unique session name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the user + +## Import + +You cannot import this resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/quicksight_vpc_connection.html.markdown b/website/docs/cdktf/python/r/quicksight_vpc_connection.html.markdown new file mode 100644 index 00000000000..e13fdd7c4b7 --- /dev/null +++ b/website/docs/cdktf/python/r/quicksight_vpc_connection.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_vpc_connection" +description: |- + Terraform resource for managing an AWS QuickSight VPC Connection. +--- + + + +# Resource: aws_quicksight_vpc_connection + +Terraform resource for managing an AWS QuickSight VPC Connection. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_role import IamRole +from imports.aws.quicksight_vpc_connection import QuicksightVpcConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + vpc_connection_role = IamRole(self, "vpc_connection_role", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "quicksight.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + })), + inline_policy=[IamRoleInlinePolicy( + name="QuickSightVPCConnectionRolePolicy", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["ec2:CreateNetworkInterface", "ec2:ModifyNetworkInterfaceAttribute", "ec2:DeleteNetworkInterface", "ec2:DescribeSubnets", "ec2:DescribeSecurityGroups" + ], + "Effect": "Allow", + "Resource": ["*"] + } + ], + "Version": "2012-10-17" + })) + ) + ] + ) + QuicksightVpcConnection(self, "example", + name="Example Connection", + role_arn=vpc_connection_role.arn, + security_group_ids=["sg-00000000000000000"], + subnet_ids=["subnet-00000000000000000", "subnet-00000000000000001"], + vpc_connection_id="example-connection-id" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `vpc_connection_id` - (Required) The ID of the VPC connection. +* `name` - (Required) The display name for the VPC connection. +* `role_arn` - (Required) The IAM role to associate with the VPC connection. +* `security_group_ids` - (Required) A list of security group IDs for the VPC connection. +* `subnet_ids` - (Required) A list of subnet IDs for the VPC connection. + +The following arguments are optional: + +* `aws_account_id` - (Optional) AWS account ID. +* `dns_resolvers` - (Optional) A list of IP addresses of DNS resolver endpoints for the VPC connection. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the VPC connection. +* `availability_status` - The availability status of the VPC connection. Valid values are `AVAILABLE`, `UNAVAILABLE` or `PARTIALLY_AVAILABLE`. +* `id` - A comma-delimited string joining AWS account ID and VPC connection ID. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight VPC connection using the AWS account ID and VPC connection ID separated by commas (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import QuickSight VPC connection using the AWS account ID and VPC connection ID separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_vpc_connection.example 123456789012,example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ram_principal_association.markdown b/website/docs/cdktf/python/r/ram_principal_association.markdown new file mode 100644 index 00000000000..6d53dc81b41 --- /dev/null +++ b/website/docs/cdktf/python/r/ram_principal_association.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "RAM (Resource Access Manager)" +layout: "aws" +page_title: "AWS: aws_ram_principal_association" +description: |- + Provides a Resource Access Manager (RAM) principal association. +--- + + + +# Resource: aws_ram_principal_association + +Provides a Resource Access Manager (RAM) principal association. Depending if [RAM Sharing with AWS Organizations is enabled](https://docs.aws.amazon.com/ram/latest/userguide/getting-started-sharing.html#getting-started-sharing-orgs), the RAM behavior with different principal types changes. + +When RAM Sharing with AWS Organizations is enabled: + +- For AWS Account ID, Organization, and Organizational Unit principals within the same AWS Organization, no resource share invitation is sent and resources become available automatically after creating the association. +- For AWS Account ID principals outside the AWS Organization, a resource share invitation is sent and must be accepted before resources become available. See the [`aws_ram_resource_share_accepter` resource](/docs/providers/aws/r/ram_resource_share_accepter.html) to accept these invitations. + +When RAM Sharing with AWS Organizations is not enabled: + +- Organization and Organizational Unit principals cannot be used. +- For AWS Account ID principals, a resource share invitation is sent and must be accepted before resources become available. See the [`aws_ram_resource_share_accepter` resource](/docs/providers/aws/r/ram_resource_share_accepter.html) to accept these invitations. + +## Example Usage + +### AWS Account ID + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ram_principal_association import RamPrincipalAssociation +from imports.aws.ram_resource_share import RamResourceShare +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, name): + super().__init__(scope, name) + example = RamResourceShare(self, "example", + allow_external_principals=True, + name=name + ) + aws_ram_principal_association_example = RamPrincipalAssociation(self, "example_1", + principal="111111111111", + resource_share_arn=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ram_principal_association_example.override_logical_id("example") +``` + +### AWS Organization + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ram_principal_association import RamPrincipalAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RamPrincipalAssociation(self, "example", + principal=Token.as_string(aws_organizations_organization_example.arn), + resource_share_arn=Token.as_string(aws_ram_resource_share_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `principal` - (Required) The principal to associate with the resource share. Possible values are an AWS account ID, an AWS Organizations Organization ARN, or an AWS Organizations Organization Unit ARN. +* `resource_share_arn` - (Required) The Amazon Resource Name (ARN) of the resource share. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the Resource Share and the principal, separated by a comma. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RAM Principal Associations using their Resource Share ARN and the `principal` separated by a comma. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import RAM Principal Associations using their Resource Share ARN and the `principal` separated by a comma. For example: + +```console +% terraform import aws_ram_principal_association.example arn:aws:ram:eu-west-1:123456789012:resource-share/73da1ab9-b94a-4ba3-8eb4-45917f7f4b12,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ram_resource_association.html.markdown b/website/docs/cdktf/python/r/ram_resource_association.html.markdown new file mode 100644 index 00000000000..0551594816a --- /dev/null +++ b/website/docs/cdktf/python/r/ram_resource_association.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "RAM (Resource Access Manager)" +layout: "aws" +page_title: "AWS: aws_ram_resource_association" +description: |- + Manages a Resource Access Manager (RAM) Resource Association. +--- + + + +# Resource: aws_ram_resource_association + +Manages a Resource Access Manager (RAM) Resource Association. + +~> *NOTE:* Certain AWS resources (e.g., EC2 Subnets) can only be shared in an AWS account that is a member of an AWS Organizations organization with organization-wide Resource Access Manager functionality enabled. See the [Resource Access Manager User Guide](https://docs.aws.amazon.com/ram/latest/userguide/what-is.html) and AWS service specific documentation for additional information. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ram_resource_association import RamResourceAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RamResourceAssociation(self, "example", + resource_arn=Token.as_string(aws_subnet_example.arn), + resource_share_arn=Token.as_string(aws_ram_resource_share_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_arn` - (Required) Amazon Resource Name (ARN) of the resource to associate with the RAM Resource Share. +* `resource_share_arn` - (Required) Amazon Resource Name (ARN) of the RAM Resource Share. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the resource share. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RAM Resource Associations using their Resource Share ARN and Resource ARN separated by a comma. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import RAM Resource Associations using their Resource Share ARN and Resource ARN separated by a comma. For example: + +```console +% terraform import aws_ram_resource_association.example arn:aws:ram:eu-west-1:123456789012:resource-share/73da1ab9-b94a-4ba3-8eb4-45917f7f4b12,arn:aws:ec2:eu-west-1:123456789012:subnet/subnet-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ram_resource_share.markdown b/website/docs/cdktf/python/r/ram_resource_share.markdown new file mode 100644 index 00000000000..99cee5acb85 --- /dev/null +++ b/website/docs/cdktf/python/r/ram_resource_share.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "RAM (Resource Access Manager)" +layout: "aws" +page_title: "AWS: aws_ram_resource_share" +description: |- + Manages a Resource Access Manager (RAM) Resource Share. +--- + + + +# Resource: aws_ram_resource_share + +Manages a Resource Access Manager (RAM) Resource Share. To associate principals with the share, see the [`aws_ram_principal_association` resource](/docs/providers/aws/r/ram_principal_association.html). To associate resources with the share, see the [`aws_ram_resource_association` resource](/docs/providers/aws/r/ram_resource_association.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ram_resource_share import RamResourceShare +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RamResourceShare(self, "example", + allow_external_principals=True, + name="example", + tags={ + "Environment": "Production" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the resource share. +* `allow_external_principals` - (Optional) Indicates whether principals outside your organization can be associated with a resource share. +* `permission_arns` - (Optional) Specifies the Amazon Resource Names (ARNs) of the RAM permission to associate with the resource share. If you do not specify an ARN for the permission, RAM automatically attaches the default version of the permission for each resource type. You can associate only one permission with each resource type included in the resource share. +* `tags` - (Optional) A map of tags to assign to the resource share. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the resource share. +* `id` - The Amazon Resource Name (ARN) of the resource share. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import resource shares using the `arn` of the resource share. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import resource shares using the `arn` of the resource share. For example: + +```console +% terraform import aws_ram_resource_share.example arn:aws:ram:eu-west-1:123456789012:resource-share/73da1ab9-b94a-4ba3-8eb4-45917f7f4b12 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ram_resource_share_accepter.markdown b/website/docs/cdktf/python/r/ram_resource_share_accepter.markdown new file mode 100644 index 00000000000..ae06f97901b --- /dev/null +++ b/website/docs/cdktf/python/r/ram_resource_share_accepter.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "RAM (Resource Access Manager)" +layout: "aws" +page_title: "AWS: aws_ram_resource_share_accepter" +description: |- + Manages accepting a Resource Access Manager (RAM) Resource Share invitation. +--- + + + +# Resource: aws_ram_resource_share_accepter + +Manage accepting a Resource Access Manager (RAM) Resource Share invitation. From a _receiver_ AWS account, accept an invitation to share resources that were shared by a _sender_ AWS account. To create a resource share in the _sender_, see the [`aws_ram_resource_share` resource](/docs/providers/aws/r/ram_resource_share.html). + +~> **Note:** If both AWS accounts are in the same Organization and [RAM Sharing with AWS Organizations is enabled](https://docs.aws.amazon.com/ram/latest/userguide/getting-started-sharing.html#getting-started-sharing-orgs), this resource is not necessary as RAM Resource Share invitations are not used. + +## Example Usage + +This configuration provides an example of using multiple Terraform AWS providers to configure two different AWS accounts. In the _sender_ account, the configuration creates a `aws_ram_resource_share` and uses a data source in the _receiver_ account to create a `aws_ram_principal_association` resource with the _receiver's_ account ID. In the _receiver_ account, the configuration accepts the invitation to share resources with the `aws_ram_resource_share_accepter`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.provider import AwsProvider +from imports.aws.ram_principal_association import RamPrincipalAssociation +from imports.aws.ram_resource_share import RamResourceShare +from imports.aws.ram_resource_share_accepter import RamResourceShareAccepter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + profile="profile2" + ) + alternate = AwsProvider(self, "aws_1", + alias="alternate", + profile="profile1" + ) + sender_share = RamResourceShare(self, "sender_share", + allow_external_principals=True, + name="tf-test-resource-share", + provider=alternate, + tags={ + "Name": "tf-test-resource-share" + } + ) + receiver = DataAwsCallerIdentity(self, "receiver") + sender_invite = RamPrincipalAssociation(self, "sender_invite", + principal=Token.as_string(receiver.account_id), + provider=alternate, + resource_share_arn=sender_share.arn + ) + RamResourceShareAccepter(self, "receiver_accept", + share_arn=sender_invite.resource_share_arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `share_arn` - (Required) The ARN of the resource share. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `invitation_arn` - The ARN of the resource share invitation. +* `share_id` - The ID of the resource share as displayed in the console. +* `status` - The status of the resource share (ACTIVE, PENDING, FAILED, DELETING, DELETED). +* `receiver_account_id` - The account ID of the receiver account which accepts the invitation. +* `sender_account_id` - The account ID of the sender account which submits the invitation. +* `share_name` - The name of the resource share. +* `resources` - A list of the resource ARNs shared via the resource share. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import resource share accepters using the resource share ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import resource share accepters using the resource share ARN. For example: + +```console +% terraform import aws_ram_resource_share_accepter.example arn:aws:ram:us-east-1:123456789012:resource-share/c4b56393-e8d9-89d9-6dc9-883752de4767 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rbin_rule.html.markdown b/website/docs/cdktf/python/r/rbin_rule.html.markdown new file mode 100644 index 00000000000..a42100ff959 --- /dev/null +++ b/website/docs/cdktf/python/r/rbin_rule.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "Recycle Bin (RBin)" +layout: "aws" +page_title: "AWS: aws_rbin_rule" +description: |- + Terraform resource for managing an AWS RBin Rule. +--- + + + +# Resource: aws_rbin_rule + +Terraform resource for managing an AWS RBin Rule. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rbin_rule import RbinRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RbinRule(self, "example", + description="example_rule", + resource_tags=[RbinRuleResourceTags( + resource_tag_key="tag_key", + resource_tag_value="tag_value" + ) + ], + resource_type="EBS_SNAPSHOT", + retention_period=RbinRuleRetentionPeriod( + retention_period_unit="DAYS", + retention_period_value=10 + ), + tags={ + "test_tag_key": "test_tag_value" + } + ) +``` + +## Argument Reference + +The following arguments are required: + +* `resource_type` - (Required) The resource type to be retained by the retention rule. Valid values are `EBS_SNAPSHOT` and `EC2_IMAGE`. +* `retention_period` - (Required) Information about the retention period for which the retention rule is to retain resources. See [`retention_period`](#retention_period) below. + +The following arguments are optional: + +* `description` - (Optional) The retention rule description. +* `resource_tags` - (Optional) Specifies the resource tags to use to identify resources that are to be retained by a tag-level retention rule. See [`resource_tags`](#resource_tags) below. +* `lock_configuration` - (Optional) Information about the retention rule lock configuration. See [`lock_configuration`](#lock_configuration) below. + +### retention_period + +The following arguments are required: + +* `retention_period_unit` - (Required) The unit of time in which the retention period is measured. Currently, only DAYS is supported. +* `retention_period_value` - (Required) The period value for which the retention rule is to retain resources. The period is measured using the unit specified for RetentionPeriodUnit. + +### resource_tags + +The following argument is required: + +* `resource_tag_key` - (Required) The tag key. + +The following argument is optional: + +* `resource_tag_value` - (Optional) The tag value. + +### lock_configuration + +The following argument is required: + +* `unlock_delay` - (Required) Information about the retention rule unlock delay. See [`unlock_delay`](#unlock_delay) below. + +### unlock_delay + +The following arguments are required: + +* `unlock_delay_unit` - (Required) The unit of time in which to measure the unlock delay. Currently, the unlock delay can be measure only in days. +* `unlock_delay_value` - (Required) The unlock delay period, measured in the unit specified for UnlockDelayUnit. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - (String) ID of the Rule. +* `lock_end_time` - (Timestamp) The date and time at which the unlock delay is set to expire. Only returned for retention rules that have been unlocked and that are still within the unlock delay period. +* `lock_state` - (Optional) The lock state of the retention rules to list. Only retention rules with the specified lock state are returned. Valid values are `locked`, `pending_unlock`, `unlocked`. +* `status` - (String) The state of the retention rule. Only retention rules that are in the `available` state retain resources. Valid values include `pending` and `available`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RBin Rule using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import RBin Rule using the `id`. For example: + +```console +% terraform import aws_rbin_rule.example examplerule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rds_cluster.html.markdown b/website/docs/cdktf/python/r/rds_cluster.html.markdown new file mode 100644 index 00000000000..0bfe53d3fb4 --- /dev/null +++ b/website/docs/cdktf/python/r/rds_cluster.html.markdown @@ -0,0 +1,583 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster" +description: |- + Manages an RDS Aurora Cluster +--- + + + +# Resource: aws_rds_cluster + +Manages a [RDS Aurora Cluster][2]. To manage cluster instances that inherit configuration from the cluster (when not running the cluster in `serverless` engine mode), see the [`aws_rds_cluster_instance` resource](/docs/providers/aws/r/rds_cluster_instance.html). To manage non-Aurora databases (e.g., MySQL, PostgreSQL, SQL Server, etc.), see the [`aws_db_instance` resource](/docs/providers/aws/r/db_instance.html). + +For information on the difference between the available Aurora MySQL engines +see [Comparison between Aurora MySQL 1 and Aurora MySQL 2](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraMySQL.Updates.20180206.html) +in the Amazon RDS User Guide. + +Changes to an RDS Cluster can occur when you manually change a +parameter, such as `port`, and are reflected in the next maintenance +window. Because of this, Terraform may report a difference in its planning +phase because a modification has not yet taken place. You can use the +`apply_immediately` flag to instruct the service to apply the change immediately +(see documentation below). + +~> **Note:** using `apply_immediately` can result in a +brief downtime as the server reboots. See the AWS Docs on [RDS Maintenance][4] +for more information. + +~> **Note:** All arguments including the username and password will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **NOTE on RDS Clusters and RDS Cluster Role Associations:** Terraform provides both a standalone [RDS Cluster Role Association](rds_cluster_role_association.html) - (an association between an RDS Cluster and a single IAM Role) and +an RDS Cluster resource with `iam_roles` attributes. +Use one resource or the other to associate IAM Roles and RDS Clusters. +Not doing so will cause a conflict of associations and will result in the association being overwritten. + +## Example Usage + +### Aurora MySQL 2.x (MySQL 5.7) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RdsCluster(self, "default", + availability_zones=["us-west-2a", "us-west-2b", "us-west-2c"], + backup_retention_period=5, + cluster_identifier="aurora-cluster-demo", + database_name="mydb", + engine="aurora-mysql", + engine_version="5.7.mysql_aurora.2.03.2", + master_password="bar", + master_username="foo", + preferred_backup_window="07:00-09:00" + ) +``` + +### Aurora MySQL 1.x (MySQL 5.6) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine): + super().__init__(scope, name) + RdsCluster(self, "default", + availability_zones=["us-west-2a", "us-west-2b", "us-west-2c"], + backup_retention_period=5, + cluster_identifier="aurora-cluster-demo", + database_name="mydb", + master_password="bar", + master_username="foo", + preferred_backup_window="07:00-09:00", + engine=engine + ) +``` + +### Aurora with PostgreSQL engine + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RdsCluster(self, "postgresql", + availability_zones=["us-west-2a", "us-west-2b", "us-west-2c"], + backup_retention_period=5, + cluster_identifier="aurora-cluster-demo", + database_name="mydb", + engine="aurora-postgresql", + master_password="bar", + master_username="foo", + preferred_backup_window="07:00-09:00" + ) +``` + +### Aurora Multi-Master Cluster + +-> More information about Aurora Multi-Master Clusters can be found in the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-multi-master.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine): + super().__init__(scope, name) + RdsCluster(self, "example", + cluster_identifier="example", + db_subnet_group_name=Token.as_string(aws_db_subnet_group_example.name), + engine_mode="multimaster", + master_password="barbarbarbar", + master_username="foo", + skip_final_snapshot=True, + engine=engine + ) +``` + +### RDS Multi-AZ Cluster + +-> More information about RDS Multi-AZ Clusters can be found in the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/multi-az-db-clusters-concepts.html). + +To create a Multi-AZ RDS cluster, you must additionally specify the `engine`, `storage_type`, `allocated_storage`, `iops` and `db_cluster_instance_class` attributes. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RdsCluster(self, "example", + allocated_storage=100, + availability_zones=["us-west-2a", "us-west-2b", "us-west-2c"], + cluster_identifier="example", + db_cluster_instance_class="db.r6gd.xlarge", + engine="mysql", + iops=1000, + master_password="mustbeeightcharaters", + master_username="test", + storage_type="io1" + ) +``` + +### RDS Serverless v2 Cluster + +-> More information about RDS Serverless v2 Clusters can be found in the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-serverless-v2.html). + +To create a Serverless v2 RDS cluster, you must additionally specify the `engine_mode` and `serverlessv2_scaling_configuration` attributes. An `aws_rds_cluster_instance` resource must also be added to the cluster with the `instance_class` attribute specified. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +from imports.aws.rds_cluster_instance import RdsClusterInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = RdsCluster(self, "example", + cluster_identifier="example", + database_name="test", + engine="aurora-postgresql", + engine_mode="provisioned", + engine_version="13.6", + master_password="must_be_eight_characters", + master_username="test", + serverlessv2_scaling_configuration=RdsClusterServerlessv2ScalingConfiguration( + max_capacity=1, + min_capacity=0.5 + ) + ) + aws_rds_cluster_instance_example = RdsClusterInstance(self, "example_1", + cluster_identifier=example.id, + engine=example.engine, + engine_version=example.engine_version, + instance_class="db.serverless" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_instance_example.override_logical_id("example") +``` + +### RDS/Aurora Managed Master Passwords via Secrets Manager, default KMS Key + +-> More information about RDS/Aurora Aurora integrates with Secrets Manager to manage master user passwords for your DB clusters can be found in the [RDS User Guide](https://aws.amazon.com/about-aws/whats-new/2022/12/amazon-rds-integration-aws-secrets-manager/) and [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/rds-secrets-manager.html). + +You can specify the `manage_master_user_password` attribute to enable managing the master password with Secrets Manager. You can also update an existing cluster to use Secrets Manager by specify the `manage_master_user_password` attribute and removing the `master_password` attribute (removal is required). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine): + super().__init__(scope, name) + RdsCluster(self, "test", + cluster_identifier="example", + database_name="test", + manage_master_user_password=True, + master_username="test", + engine=engine + ) +``` + +### RDS/Aurora Managed Master Passwords via Secrets Manager, specific KMS Key + +-> More information about RDS/Aurora Aurora integrates with Secrets Manager to manage master user passwords for your DB clusters can be found in the [RDS User Guide](https://aws.amazon.com/about-aws/whats-new/2022/12/amazon-rds-integration-aws-secrets-manager/) and [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/rds-secrets-manager.html). + +You can specify the `master_user_secret_kms_key_id` attribute to specify a specific KMS Key. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_key import KmsKey +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine): + super().__init__(scope, name) + example = KmsKey(self, "example", + description="Example KMS Key" + ) + RdsCluster(self, "test", + cluster_identifier="example", + database_name="test", + manage_master_user_password=True, + master_user_secret_kms_key_id=example.key_id, + master_username="test", + engine=engine + ) +``` + +### Global Cluster Restored From Snapshot + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_db_cluster_snapshot import DataAwsDbClusterSnapshot +from imports.aws.rds_cluster import RdsCluster +from imports.aws.rds_global_cluster import RdsGlobalCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsDbClusterSnapshot(self, "example", + db_cluster_identifier="example-original-cluster", + most_recent=True + ) + aws_rds_cluster_example = RdsCluster(self, "example_1", + cluster_identifier="example", + engine="aurora", + engine_version="5.6.mysql_aurora.1.22.4", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[snapshot_identifier, global_cluster_identifier] + ), + snapshot_identifier=Token.as_string(example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_example.override_logical_id("example") + aws_rds_global_cluster_example = RdsGlobalCluster(self, "example_2", + force_destroy=True, + global_cluster_identifier="example", + source_db_cluster_identifier=Token.as_string(aws_rds_cluster_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_global_cluster_example.override_logical_id("example") +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the AWS official documentation : + +* [create-db-cluster](https://docs.aws.amazon.com/cli/latest/reference/rds/create-db-cluster.html) +* [modify-db-cluster](https://docs.aws.amazon.com/cli/latest/reference/rds/modify-db-cluster.html) + +This argument supports the following arguments: + +* `allocated_storage` - (Optional) (Required for Multi-AZ DB cluster) The amount of storage in gibibytes (GiB) to allocate to each DB instance in the Multi-AZ DB cluster. +* `allow_major_version_upgrade` - (Optional) Enable to allow major engine version upgrades when changing engine versions. Defaults to `false`. +* `apply_immediately` - (Optional) Specifies whether any cluster modifications are applied immediately, or during the next maintenance window. Default is `false`. See [Amazon RDS Documentation for more information.](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.DBInstance.Modifying.html) +* `availability_zones` - (Optional) List of EC2 Availability Zones for the DB cluster storage where DB cluster instances can be created. RDS automatically assigns 3 AZs if less than 3 AZs are configured, which will show as a difference requiring resource recreation next Terraform apply. We recommend specifying 3 AZs or using [the `lifecycle` configuration block `ignore_changes` argument](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) if necessary. A maximum of 3 AZs can be configured. +* `backtrack_window` - (Optional) Target backtrack window, in seconds. Only available for `aurora` and `aurora-mysql` engines currently. To disable backtracking, set this value to `0`. Defaults to `0`. Must be between `0` and `259200` (72 hours) +* `backup_retention_period` - (Optional) Days to retain backups for. Default `1` +* `cluster_identifier_prefix` - (Optional, Forces new resource) Creates a unique cluster identifier beginning with the specified prefix. Conflicts with `cluster_identifier`. +* `cluster_identifier` - (Optional, Forces new resources) The cluster identifier. If omitted, Terraform will assign a random, unique identifier. +* `copy_tags_to_snapshot` – (Optional, boolean) Copy all Cluster `tags` to snapshots. Default is `false`. +* `database_name` - (Optional) Name for an automatically created database on cluster creation. There are different naming restrictions per database engine: [RDS Naming Constraints][5] +* `db_cluster_instance_class` - (Optional) (Required for Multi-AZ DB cluster) The compute and memory capacity of each DB instance in the Multi-AZ DB cluster, for example db.m6g.xlarge. Not all DB instance classes are available in all AWS Regions, or for all database engines. For the full list of DB instance classes and availability for your engine, see [DB instance class](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.DBInstanceClass.html) in the Amazon RDS User Guide. +* `db_instance_parameter_group_name` - (Optional) Instance parameter group to associate with all instances of the DB cluster. The `db_instance_parameter_group_name` parameter is only valid in combination with the `allow_major_version_upgrade` parameter. +* `db_subnet_group_name` - (Optional) DB subnet group to associate with this DB instance. **NOTE:** This must match the `db_subnet_group_name` specified on every [`aws_rds_cluster_instance`](/docs/providers/aws/r/rds_cluster_instance.html) in the cluster. +* `deletion_protection` - (Optional) If the DB instance should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`. +* `enable_global_write_forwarding` - (Optional) Whether cluster should forward writes to an associated global cluster. Applied to secondary clusters to enable them to forward writes to an [`aws_rds_global_cluster`](/docs/providers/aws/r/rds_global_cluster.html)'s primary cluster. See the [Aurora Userguide documentation](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database-write-forwarding.html) for more information. +* `enable_http_endpoint` - (Optional) Enable HTTP endpoint (data API). Only valid when `engine_mode` is set to `serverless`. +* `enabled_cloudwatch_logs_exports` - (Optional) Set of log types to export to cloudwatch. If omitted, no logs will be exported. The following log types are supported: `audit`, `error`, `general`, `slowquery`, `postgresql` (PostgreSQL). +* `engine_mode` - (Optional) Database engine mode. Valid values: `global` (only valid for Aurora MySQL 1.21 and earlier), `multimaster`, `parallelquery`, `provisioned`, `serverless`. Defaults to: `provisioned`. See the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/aurora-serverless.html) for limitations when using `serverless`. +* `engine_version` - (Optional) Database engine version. Updating this argument results in an outage. See the [Aurora MySQL](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Updates.html) and [Aurora Postgres](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraPostgreSQL.Updates.html) documentation for your configured engine to determine this value, or by running `aws rds describe-db-engine-versions`. For example with Aurora MySQL 2, a potential value for this argument is `5.7.mysql_aurora.2.03.2`. The value can contain a partial version where supported by the API. The actual engine version used is returned in the attribute `engine_version_actual`, , see [Attribute Reference](#attribute-reference) below. +* `engine` - (Required) Name of the database engine to be used for this DB cluster. Valid Values: `aurora-mysql`, `aurora-postgresql`, `mysql`, `postgres`. (Note that `mysql` and `postgres` are Multi-AZ RDS clusters). +* `final_snapshot_identifier` - (Optional) Name of your final DB snapshot when this DB cluster is deleted. If omitted, no final snapshot will be made. +* `global_cluster_identifier` - (Optional) Global cluster identifier specified on [`aws_rds_global_cluster`](/docs/providers/aws/r/rds_global_cluster.html). +* `iam_database_authentication_enabled` - (Optional) Specifies whether or not mappings of AWS Identity and Access Management (IAM) accounts to database accounts is enabled. Please see [AWS Documentation](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/UsingWithRDS.IAMDBAuth.html) for availability and limitations. +* `iam_roles` - (Optional) List of ARNs for the IAM roles to associate to the RDS Cluster. +* `iops` - (Optional) Amount of Provisioned IOPS (input/output operations per second) to be initially allocated for each DB instance in the Multi-AZ DB cluster. For information about valid Iops values, see [Amazon RDS Provisioned IOPS storage to improve performance](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html#USER_PIOPS) in the Amazon RDS User Guide. (This setting is required to create a Multi-AZ DB cluster). Must be a multiple between .5 and 50 of the storage amount for the DB cluster. +* `kms_key_id` - (Optional) ARN for the KMS encryption key. When specifying `kms_key_id`, `storage_encrypted` needs to be set to true. +* `manage_master_user_password` - (Optional) Set to true to allow RDS to manage the master user password in Secrets Manager. Cannot be set if `master_password` is provided. +* `master_password` - (Required unless `manage_master_user_password` is set to true or unless a `snapshot_identifier` or `replication_source_identifier` is provided or unless a `global_cluster_identifier` is provided when the cluster is the "secondary" cluster of a global database) Password for the master DB user. Note that this may show up in logs, and it will be stored in the state file. Please refer to the [RDS Naming Constraints][5]. Cannot be set if `manage_master_user_password` is set to `true`. +* `master_user_secret_kms_key_id` - (Optional) Amazon Web Services KMS key identifier is the key ARN, key ID, alias ARN, or alias name for the KMS key. To use a KMS key in a different Amazon Web Services account, specify the key ARN or alias ARN. If not specified, the default KMS key for your Amazon Web Services account is used. +* `master_username` - (Required unless a `snapshot_identifier` or `replication_source_identifier` is provided or unless a `global_cluster_identifier` is provided when the cluster is the "secondary" cluster of a global database) Username for the master DB user. Please refer to the [RDS Naming Constraints][5]. This argument does not support in-place updates and cannot be changed during a restore from snapshot. +* `network_type` - (Optional) Network type of the cluster. Valid values: `IPV4`, `DUAL`. +* `port` - (Optional) Port on which the DB accepts connections +* `preferred_backup_window` - (Optional) Daily time range during which automated backups are created if automated backups are enabled using the BackupRetentionPeriod parameter.Time in UTC. Default: A 30-minute window selected at random from an 8-hour block of time per regionE.g., 04:00-09:00 +* `preferred_maintenance_window` - (Optional) Weekly time range during which system maintenance can occur, in (UTC) e.g., wed:04:00-wed:04:30 +* `replication_source_identifier` - (Optional) ARN of a source DB cluster or DB instance if this DB cluster is to be created as a Read Replica. If DB Cluster is part of a Global Cluster, use the [`lifecycle` configuration block `ignore_changes` argument](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to prevent Terraform from showing differences for this argument instead of configuring this value. +* `restore_to_point_in_time` - (Optional) Nested attribute for [point in time restore](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/USER_PIT.html). More details below. +* `scaling_configuration` - (Optional) Nested attribute with scaling properties. Only valid when `engine_mode` is set to `serverless`. More details below. +* `serverlessv2_scaling_configuration`- (Optional) Nested attribute with scaling properties for ServerlessV2. Only valid when `engine_mode` is set to `provisioned`. More details below. +* `skip_final_snapshot` - (Optional) Determines whether a final DB snapshot is created before the DB cluster is deleted. If true is specified, no DB snapshot is created. If false is specified, a DB snapshot is created before the DB cluster is deleted, using the value from `final_snapshot_identifier`. Default is `false`. +* `snapshot_identifier` - (Optional) Specifies whether or not to create this cluster from a snapshot. You can use either the name or ARN when specifying a DB cluster snapshot, or the ARN when specifying a DB snapshot. Conflicts with `global_cluster_identifier`. Clusters cannot be restored from snapshot **and** joined to an existing global cluster in a single operation. See the [AWS documentation](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database-getting-started.html#aurora-global-database.use-snapshot) or the [Global Cluster Restored From Snapshot example](#global-cluster-restored-from-snapshot) for instructions on building a global cluster starting with a snapshot. +* `source_region` - (Optional) The source region for an encrypted replica DB cluster. +* `storage_encrypted` - (Optional) Specifies whether the DB cluster is encrypted. The default is `false` for `provisioned` `engine_mode` and `true` for `serverless` `engine_mode`. When restoring an unencrypted `snapshot_identifier`, the `kms_key_id` argument must be provided to encrypt the restored cluster. Terraform will only perform drift detection if a configuration value is provided. +* `storage_type` - (Optional) (Required for Multi-AZ DB clusters) (Forces new for Multi-AZ DB clusters) Specifies the storage type to be associated with the DB cluster. For Aurora DB clusters, `storage_type` modifications can be done in-place. For Multi-AZ DB Clusters, the `iops` argument must also be set. Valid values are: `""`, `aurora-iopt1` (Aurora DB Clusters); `io1` (Multi-AZ DB Clusters). Default: `""` (Aurora DB Clusters); `io1` (Multi-AZ DB Clusters). +* `tags` - (Optional) A map of tags to assign to the DB cluster. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc_security_group_ids` - (Optional) List of VPC security groups to associate with the Cluster + +### S3 Import Options + +Full details on the core parameters and impacts are in the API Docs: [RestoreDBClusterFromS3](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_RestoreDBClusterFromS3.html). Requires that the S3 bucket be in the same region as the RDS cluster you're trying to create. Sample: + +~> **NOTE:** RDS Aurora Serverless does not support loading data from S3, so its not possible to directly use `engine_mode` set to `serverless` with `s3_import`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RdsCluster(self, "db", + engine="aurora", + s3_import=RdsClusterS3Import( + bucket_name="mybucket", + bucket_prefix="backups", + ingestion_role="arn:aws:iam::1234567890:role/role-xtrabackup-rds-restore", + source_engine="mysql", + source_engine_version="5.6" + ) + ) +``` + +* `bucket_name` - (Required) Bucket name where your backup is stored +* `bucket_prefix` - (Optional) Can be blank, but is the path to your backup +* `ingestion_role` - (Required) Role applied to load the data. +* `source_engine` - (Required) Source engine for the backup +* `source_engine_version` - (Required) Version of the source engine used to make the backup + +This will not recreate the resource if the S3 object changes in some way. It's only used to initialize the database. This only works currently with the aurora engine. See AWS for currently supported engines and options. See [Aurora S3 Migration Docs](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraMySQL.Migrating.ExtMySQL.html#AuroraMySQL.Migrating.ExtMySQL.S3). + +### restore_to_point_in_time Argument Reference + +~> **NOTE:** The DB cluster is created from the source DB cluster with the same configuration as the original DB cluster, except that the new DB cluster is created with the default DB security group. Thus, the following arguments should only be specified with the source DB cluster's respective values: `database_name`, `master_username`, `storage_encrypted`, `replication_source_identifier`, and `source_region`. + +Example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine): + super().__init__(scope, name) + RdsCluster(self, "example-clone", + restore_to_point_in_time=RdsClusterRestoreToPointInTime( + restore_type="copy-on-write", + source_cluster_identifier="example", + use_latest_restorable_time=True + ), + engine=engine + ) +``` + +* `source_cluster_identifier` - (Required) Identifier of the source database cluster from which to restore. +* `restore_type` - (Optional) Type of restore to be performed. + Valid options are `full-copy` (default) and `copy-on-write`. +* `use_latest_restorable_time` - (Optional) Set to true to restore the database cluster to the latest restorable backup time. Defaults to false. Conflicts with `restore_to_time`. +* `restore_to_time` - (Optional) Date and time in UTC format to restore the database cluster to. Conflicts with `use_latest_restorable_time`. + +### scaling_configuration Argument Reference + +~> **NOTE:** `scaling_configuration` configuration is only valid when `engine_mode` is set to `serverless`. + +Example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine): + super().__init__(scope, name) + RdsCluster(self, "example", + engine_mode="serverless", + scaling_configuration=RdsClusterScalingConfiguration( + auto_pause=True, + max_capacity=256, + min_capacity=2, + seconds_until_auto_pause=300, + timeout_action="ForceApplyCapacityChange" + ), + engine=engine + ) +``` + +* `auto_pause` - (Optional) Whether to enable automatic pause. A DB cluster can be paused only when it's idle (it has no connections). If a DB cluster is paused for more than seven days, the DB cluster might be backed up with a snapshot. In this case, the DB cluster is restored when there is a request to connect to it. Defaults to `true`. +* `max_capacity` - (Optional) Maximum capacity for an Aurora DB cluster in `serverless` DB engine mode. The maximum capacity must be greater than or equal to the minimum capacity. Valid Aurora MySQL capacity values are `1`, `2`, `4`, `8`, `16`, `32`, `64`, `128`, `256`. Valid Aurora PostgreSQL capacity values are (`2`, `4`, `8`, `16`, `32`, `64`, `192`, and `384`). Defaults to `16`. +* `min_capacity` - (Optional) Minimum capacity for an Aurora DB cluster in `serverless` DB engine mode. The minimum capacity must be lesser than or equal to the maximum capacity. Valid Aurora MySQL capacity values are `1`, `2`, `4`, `8`, `16`, `32`, `64`, `128`, `256`. Valid Aurora PostgreSQL capacity values are (`2`, `4`, `8`, `16`, `32`, `64`, `192`, and `384`). Defaults to `1`. +* `seconds_until_auto_pause` - (Optional) Time, in seconds, before an Aurora DB cluster in serverless mode is paused. Valid values are `300` through `86400`. Defaults to `300`. +* `timeout_action` - (Optional) Action to take when the timeout is reached. Valid values: `ForceApplyCapacityChange`, `RollbackCapacityChange`. Defaults to `RollbackCapacityChange`. See [documentation](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-serverless.how-it-works.html#aurora-serverless.how-it-works.timeout-action). + +### serverlessv2_scaling_configuration Argument Reference + +~> **NOTE:** serverlessv2_scaling_configuration configuration is only valid when engine_mode is set to provisioned + +Example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine): + super().__init__(scope, name) + RdsCluster(self, "example", + serverlessv2_scaling_configuration=RdsClusterServerlessv2ScalingConfiguration( + max_capacity=128, + min_capacity=0.5 + ), + engine=engine + ) +``` + +* `max_capacity` - (Required) Maximum capacity for an Aurora DB cluster in `provisioned` DB engine mode. The maximum capacity must be greater than or equal to the minimum capacity. Valid capacity values are in a range of `0.5` up to `128` in steps of `0.5`. +* `min_capacity` - (Required) Minimum capacity for an Aurora DB cluster in `provisioned` DB engine mode. The minimum capacity must be lesser than or equal to the maximum capacity. Valid capacity values are in a range of `0.5` up to `128` in steps of `0.5`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster +* `id` - RDS Cluster Identifier +* `cluster_identifier` - RDS Cluster Identifier +* `cluster_resource_id` - RDS Cluster Resource ID +* `cluster_members` – List of RDS Instances that are a part of this cluster +* `availability_zones` - Availability zone of the instance +* `backup_retention_period` - Backup retention period +* `preferred_backup_window` - Daily time range during which the backups happen +* `preferred_maintenance_window` - Maintenance window +* `endpoint` - DNS address of the RDS instance +* `reader_endpoint` - Read-only endpoint for the Aurora cluster, automatically +load-balanced across replicas +* `engine` - Database engine +* `engine_version_actual` - Running version of the database. +* `database_name` - Database name +* `port` - Database port +* `master_username` - Master username for the database +* `master_user_secret` - Block that specifies the master user secret. Only available when `manage_master_user_password` is set to true. [Documented below](#master_user_secret). +* `storage_encrypted` - Specifies whether the DB cluster is encrypted +* `replication_source_identifier` - ARN of the source DB cluster or DB instance if this DB cluster is created as a Read Replica. +* `hosted_zone_id` - Route53 Hosted Zone ID of the endpoint +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[1]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.Replication.html +[2]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Aurora.html +[3]: /docs/providers/aws/r/rds_cluster_instance.html +[4]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_UpgradeDBInstance.Maintenance.html +[5]: http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Limits.html#RDS_Limits.Constraints + +### master_user_secret + +The `master_user_secret` configuration block supports the following attributes: + +* `kms_key_id` - Amazon Web Services KMS key identifier that is used to encrypt the secret. +* `secret_arn` - Amazon Resource Name (ARN) of the secret. +* `secret_status` - Status of the secret. Valid Values: `creating` | `active` | `rotating` | `impaired`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120m`) +- `update` - (Default `120m`) +- `delete` - (Default `120m`) +any cleanup task during the destroying process. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS Clusters using the `cluster_identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import RDS Clusters using the `cluster_identifier`. For example: + +```console +% terraform import aws_rds_cluster.aurora_cluster aurora-prod-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rds_cluster_activity_stream.html.markdown b/website/docs/cdktf/python/r/rds_cluster_activity_stream.html.markdown new file mode 100644 index 00000000000..dd3a020b36e --- /dev/null +++ b/website/docs/cdktf/python/r/rds_cluster_activity_stream.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster_activity_stream" +description: |- + Manages RDS Aurora Cluster Database Activity Streams +--- + + + +# Resource: aws_rds_cluster_activity_stream + +Manages RDS Aurora Cluster Database Activity Streams. + +Database Activity Streams have some limits and requirements, refer to the [Monitoring Amazon Aurora using Database Activity Streams][1] documentation for detailed limitations and requirements. + +~> **Note:** This resource always calls the RDS [`StartActivityStream`][2] API with the `ApplyImmediately` parameter set to `true`. This is because the Terraform needs the activity stream to be started in order for it to get the associated attributes. + +~> **Note:** This resource depends on having at least one `aws_rds_cluster_instance` created. To avoid race conditions when all resources are being created together, add an explicit resource reference using the [resource `depends_on` meta-argument](https://www.terraform.io/docs/configuration/resources.html#depends_on-explicit-resource-dependencies). + +~> **Note:** This resource is available in all regions except the following: `cn-north-1`, `cn-northwest-1`, `us-gov-east-1`, `us-gov-west-1` + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_key import KmsKey +from imports.aws.rds_cluster import RdsCluster +from imports.aws.rds_cluster_activity_stream import RdsClusterActivityStream +from imports.aws.rds_cluster_instance import RdsClusterInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = KmsKey(self, "default", + description="AWS KMS Key to encrypt Database Activity Stream" + ) + aws_rds_cluster_default = RdsCluster(self, "default_1", + availability_zones=["us-west-2a", "us-west-2b", "us-west-2c"], + cluster_identifier="aurora-cluster-demo", + database_name="mydb", + engine="aurora-postgresql", + engine_version="13.4", + master_password="mustbeeightcharaters", + master_username="foo" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_default.override_logical_id("default") + aws_rds_cluster_instance_default = RdsClusterInstance(self, "default_2", + cluster_identifier=Token.as_string(aws_rds_cluster_default.cluster_identifier), + engine=Token.as_string(aws_rds_cluster_default.engine), + identifier="aurora-instance-demo", + instance_class="db.r6g.large" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_instance_default.override_logical_id("default") + aws_rds_cluster_activity_stream_default = RdsClusterActivityStream(self, "default_3", + depends_on=[aws_rds_cluster_instance_default], + kms_key_id=default_var.key_id, + mode="async", + resource_arn=Token.as_string(aws_rds_cluster_default.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_activity_stream_default.override_logical_id("default") +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation][3]. + +This argument supports the following arguments: + +* `resource_arn` - (Required, Forces new resources) The Amazon Resource Name (ARN) of the DB cluster. +* `mode` - (Required, Forces new resources) Specifies the mode of the database activity stream. Database events such as a change or access generate an activity stream event. The database session can handle these events either synchronously or asynchronously. One of: `sync`, `async`. +* `kms_key_id` - (Required, Forces new resources) The AWS KMS key identifier for encrypting messages in the database activity stream. The AWS KMS key identifier is the key ARN, key ID, alias ARN, or alias name for the KMS key. +* `engine_native_audit_fields_included` - (Optional, Forces new resources) Specifies whether the database activity stream includes engine-native audit fields. This option only applies to an Oracle DB instance. By default, no engine-native audit fields are included. Defaults `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the DB cluster. +* `kinesis_stream_name` - The name of the Amazon Kinesis data stream to be used for the database activity stream. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS Aurora Cluster Database Activity Streams using the `resource_arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import RDS Aurora Cluster Database Activity Streams using the `resource_arn`. For example: + +```console +% terraform import aws_rds_cluster_activity_stream.default arn:aws:rds:us-west-2:123456789012:cluster:aurora-cluster-demo +``` + +[1]: https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/DBActivityStreams.html +[2]: https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_StartActivityStream.html +[3]: https://docs.aws.amazon.com/cli/latest/reference/rds/start-activity-stream.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rds_cluster_endpoint.html.markdown b/website/docs/cdktf/python/r/rds_cluster_endpoint.html.markdown new file mode 100644 index 00000000000..790863c6670 --- /dev/null +++ b/website/docs/cdktf/python/r/rds_cluster_endpoint.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster_endpoint" +description: |- + Manages an RDS Aurora Cluster Endpoint +--- + + + +# Resource: aws_rds_cluster_endpoint + +Manages an RDS Aurora Cluster Endpoint. +You can refer to the [User Guide][1]. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +from imports.aws.rds_cluster_endpoint import RdsClusterEndpoint +from imports.aws.rds_cluster_instance import RdsClusterInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine): + super().__init__(scope, name) + default_var = RdsCluster(self, "default", + availability_zones=["us-west-2a", "us-west-2b", "us-west-2c"], + backup_retention_period=5, + cluster_identifier="aurora-cluster-demo", + database_name="mydb", + master_password="bar", + master_username="foo", + preferred_backup_window="07:00-09:00", + engine=engine + ) + test1 = RdsClusterInstance(self, "test1", + apply_immediately=True, + cluster_identifier=default_var.id, + engine=default_var.engine, + engine_version=default_var.engine_version, + identifier="test1", + instance_class="db.t2.small" + ) + test2 = RdsClusterInstance(self, "test2", + apply_immediately=True, + cluster_identifier=default_var.id, + engine=default_var.engine, + engine_version=default_var.engine_version, + identifier="test2", + instance_class="db.t2.small" + ) + test3 = RdsClusterInstance(self, "test3", + apply_immediately=True, + cluster_identifier=default_var.id, + engine=default_var.engine, + engine_version=default_var.engine_version, + identifier="test3", + instance_class="db.t2.small" + ) + RdsClusterEndpoint(self, "eligible", + cluster_endpoint_identifier="reader", + cluster_identifier=default_var.id, + custom_endpoint_type="READER", + excluded_members=[test1.id, test2.id] + ) + RdsClusterEndpoint(self, "static", + cluster_endpoint_identifier="static", + cluster_identifier=default_var.id, + custom_endpoint_type="READER", + static_members=[test1.id, test3.id] + ) +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation](https://docs.aws.amazon.com/cli/latest/reference/rds/create-db-cluster-endpoint.html). + +This argument supports the following arguments: + +* `cluster_identifier` - (Required, Forces new resources) The cluster identifier. +* `cluster_endpoint_identifier` - (Required, Forces new resources) The identifier to use for the new endpoint. This parameter is stored as a lowercase string. +* `custom_endpoint_type` - (Required) The type of the endpoint. One of: READER , ANY . +* `static_members` - (Optional) List of DB instance identifiers that are part of the custom endpoint group. Conflicts with `excluded_members`. +* `excluded_members` - (Optional) List of DB instance identifiers that aren't part of the custom endpoint group. All other eligible instances are reachable through the custom endpoint. Only relevant if the list of static members is empty. Conflicts with `static_members`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster +* `id` - The RDS Cluster Endpoint Identifier +* `endpoint` - A custom endpoint for the Aurora cluster +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS Clusters Endpoint using the `cluster_endpoint_identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import RDS Clusters Endpoint using the `cluster_endpoint_identifier`. For example: + +```console +% terraform import aws_rds_cluster_endpoint.custom_reader aurora-prod-cluster-custom-reader +``` + +[1]: https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/Aurora.Overview.Endpoints.html#Aurora.Endpoints.Cluster + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rds_cluster_instance.html.markdown b/website/docs/cdktf/python/r/rds_cluster_instance.html.markdown new file mode 100644 index 00000000000..a9b25da8ed7 --- /dev/null +++ b/website/docs/cdktf/python/r/rds_cluster_instance.html.markdown @@ -0,0 +1,155 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster_instance" +description: |- + Provides an RDS Cluster Resource Instance +--- + + + +# Resource: aws_rds_cluster_instance + +Provides an RDS Cluster Instance Resource. A Cluster Instance Resource defines +attributes that are specific to a single instance in a [RDS Cluster][3], +specifically running Amazon Aurora. + +Unlike other RDS resources that support replication, with Amazon Aurora you do +not designate a primary and subsequent replicas. Instead, you simply add RDS +Instances and Aurora manages the replication. You can use the [count][5] +meta-parameter to make multiple instances and join them all to the same RDS +Cluster, or you may specify different Cluster Instance resources with various +`instance_class` sizes. + +For more information on Amazon Aurora, see [Aurora on Amazon RDS][2] in the Amazon RDS User Guide. + +~> **NOTE:** Deletion Protection from the RDS service can only be enabled at the cluster level, not for individual cluster instances. You can still add the [`prevent_destroy` lifecycle behavior](https://www.terraform.io/language/meta-arguments/lifecycle#prevent_destroy) to your Terraform resource configuration if you desire protection from accidental deletion. + +~> **NOTE:** `aurora` is no longer a valid `engine` because of [Amazon Aurora's MySQL-Compatible Edition version 1 end of life](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/Aurora.MySQL56.EOL.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformCount, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +from imports.aws.rds_cluster_instance import RdsClusterInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine): + super().__init__(scope, name) + default_var = RdsCluster(self, "default", + availability_zones=["us-west-2a", "us-west-2b", "us-west-2c"], + cluster_identifier="aurora-cluster-demo", + database_name="mydb", + master_password="barbut8chars", + master_username="foo", + engine=engine + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + cluster_instances_count = TerraformCount.of(Token.as_number("2")) + RdsClusterInstance(self, "cluster_instances", + cluster_identifier=default_var.id, + engine=default_var.engine, + engine_version=default_var.engine_version, + identifier="aurora-cluster-demo-${" + cluster_instances_count.index + "}", + instance_class="db.r4.large", + count=cluster_instances_count + ) +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation](https://docs.aws.amazon.com/cli/latest/reference/rds/create-db-instance.html). + +This argument supports the following arguments: + +* `apply_immediately` - (Optional) Specifies whether any database modifications are applied immediately, or during the next maintenance window. Default is`false`. +* `auto_minor_version_upgrade` - (Optional) Indicates that minor engine upgrades will be applied automatically to the DB instance during the maintenance window. Default `true`. +* `availability_zone` - (Optional, Computed, Forces new resource) EC2 Availability Zone that the DB instance is created in. See [docs](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html) about the details. +* `ca_cert_identifier` - (Optional) Identifier of the CA certificate for the DB instance. +* `cluster_identifier` - (Required, Forces new resource) Identifier of the [`aws_rds_cluster`](/docs/providers/aws/r/rds_cluster.html) in which to launch this instance. +* `copy_tags_to_snapshot` – (Optional, boolean) Indicates whether to copy all of the user-defined tags from the DB instance to snapshots of the DB instance. Default `false`. +* `db_parameter_group_name` - (Optional) Name of the DB parameter group to associate with this instance. +* `db_subnet_group_name` - (Required if `publicly_accessible = false`, Optional otherwise, Forces new resource) DB subnet group to associate with this DB instance. **NOTE:** This must match the `db_subnet_group_name` of the attached [`aws_rds_cluster`](/docs/providers/aws/r/rds_cluster.html). +* `engine_version` - (Optional) Database engine version. +* `engine` - (Required, Forces new resource) Name of the database engine to be used for the RDS instance. Valid Values: `aurora-mysql`, `aurora-postgresql`, `mysql`, `postgres`. +* `identifier_prefix` - (Optional, Forces new resource) Creates a unique identifier beginning with the specified prefix. Conflicts with `identifier`. +* `identifier` - (Optional, Forces new resource) Identifier for the RDS instance, if omitted, Terraform will assign a random, unique identifier. +* `instance_class` - (Required) Instance class to use. For details on CPU and memory, see [Scaling Aurora DB Instances][4]. Aurora uses `db.*` instance classes/types. Please see [AWS Documentation][7] for currently available instance classes and complete details. +* `monitoring_interval` - (Optional) Interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. To disable collecting Enhanced Monitoring metrics, specify 0. The default is 0. Valid Values: 0, 1, 5, 10, 15, 30, 60. +* `monitoring_role_arn` - (Optional) ARN for the IAM role that permits RDS to send enhanced monitoring metrics to CloudWatch Logs. You can find more information on the [AWS Documentation](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Monitoring.html) what IAM permissions are needed to allow Enhanced Monitoring for RDS Instances. +* `performance_insights_enabled` - (Optional) Specifies whether Performance Insights is enabled or not. +* `performance_insights_kms_key_id` - (Optional) ARN for the KMS key to encrypt Performance Insights data. When specifying `performance_insights_kms_key_id`, `performance_insights_enabled` needs to be set to true. +* `performance_insights_retention_period` - (Optional) Amount of time in days to retain Performance Insights data. Valid values are `7`, `731` (2 years) or a multiple of `31`. When specifying `performance_insights_retention_period`, `performance_insights_enabled` needs to be set to true. Defaults to '7'. +* `preferred_backup_window` - (Optional) Daily time range during which automated backups are created if automated backups are enabled. Eg: "04:00-09:00". **NOTE:** If `preferred_backup_window` is set at the cluster level, this argument **must** be omitted. +* `preferred_maintenance_window` - (Optional) Window to perform maintenance in. Syntax: "ddd:hh24:mi-ddd:hh24:mi". Eg: "Mon:00:00-Mon:03:00". +* `promotion_tier` - (Optional) Default 0. Failover Priority setting on instance level. The reader who has lower tier has higher priority to get promoted to writer. +* `publicly_accessible` - (Optional) Bool to control if instance is publicly accessible. Default `false`. See the documentation on [Creating DB Instances][6] for more details on controlling this property. +* `tags` - (Optional) Map of tags to assign to the instance. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster instance +* `cluster_identifier` - RDS Cluster Identifier +* `identifier` - Instance identifier +* `id` - Instance identifier +* `writer` – Boolean indicating if this instance is writable. `False` indicates this instance is a read replica. +* `availability_zone` - Availability zone of the instance +* `endpoint` - DNS address for this instance. May not be writable +* `engine` - Database engine +* `engine_version_actual` - Database engine version +* `port` - Database port +* `storage_encrypted` - Specifies whether the DB cluster is encrypted. +* `kms_key_id` - ARN for the KMS encryption key if one is set to the cluster. +* `network_type` - Network type of the DB instance. +* `dbi_resource_id` - Region-unique, immutable identifier for the DB instance. +* `performance_insights_enabled` - Specifies whether Performance Insights is enabled or not. +* `performance_insights_kms_key_id` - ARN for the KMS encryption key used by Performance Insights. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[2]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Aurora.html +[3]: /docs/providers/aws/r/rds_cluster.html +[4]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Aurora.Managing.html +[5]: https://www.terraform.io/docs/configuration/meta-arguments/count.html +[6]: https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html +[7]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.DBInstanceClass.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `90m`) +- `update` - (Default `90m`) +- `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS Cluster Instances using the `identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import RDS Cluster Instances using the `identifier`. For example: + +```console +% terraform import aws_rds_cluster_instance.prod_instance_1 aurora-cluster-instance-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rds_cluster_parameter_group.markdown b/website/docs/cdktf/python/r/rds_cluster_parameter_group.markdown new file mode 100644 index 00000000000..472b6450f2f --- /dev/null +++ b/website/docs/cdktf/python/r/rds_cluster_parameter_group.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster_parameter_group" +description: |- + Provides an RDS DB cluster parameter group resource. +--- + + + +# Resource: aws_rds_cluster_parameter_group + +Provides an RDS DB cluster parameter group resource. Documentation of the available parameters for various Aurora engines can be found at: + +* [Aurora MySQL Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraMySQL.Reference.html) +* [Aurora PostgreSQL Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraPostgreSQL.Reference.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster_parameter_group import RdsClusterParameterGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RdsClusterParameterGroup(self, "default", + description="RDS default cluster parameter group", + family="aurora5.6", + name="rds-cluster-pg", + parameter=[RdsClusterParameterGroupParameter( + name="character_set_server", + value="utf8" + ), RdsClusterParameterGroupParameter( + name="character_set_client", + value="utf8" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the DB cluster parameter group. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `family` - (Required) The family of the DB cluster parameter group. +* `description` - (Optional) The description of the DB cluster parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of DB parameters to apply. Note that parameters may differ from a family to an other. Full list of all parameters can be discovered via [`aws rds describe-db-cluster-parameters`](https://docs.aws.amazon.com/cli/latest/reference/rds/describe-db-cluster-parameters.html) after initial creation of the group. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +* `name` - (Required) The name of the DB parameter. +* `value` - (Required) The value of the DB parameter. +* `apply_method` - (Optional) "immediate" (default), or "pending-reboot". Some + engines can't apply some parameters without a reboot, and you will need to + specify "pending-reboot" here. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The db cluster parameter group name. +* `arn` - The ARN of the db cluster parameter group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS Cluster Parameter Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import RDS Cluster Parameter Groups using the `name`. For example: + +```console +% terraform import aws_rds_cluster_parameter_group.cluster_pg production-pg-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rds_cluster_role_association.html.markdown b/website/docs/cdktf/python/r/rds_cluster_role_association.html.markdown new file mode 100644 index 00000000000..d5f2435396f --- /dev/null +++ b/website/docs/cdktf/python/r/rds_cluster_role_association.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster_role_association" +description: |- + Manages a RDS DB Cluster association with an IAM Role. +--- + + + +# Resource: aws_rds_cluster_role_association + +Manages a RDS DB Cluster association with an IAM Role. Example use cases: + +* [Creating an IAM Role to Allow Amazon Aurora to Access AWS Services](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Integrating.Authorizing.IAM.CreateRole.html) +* [Importing Amazon S3 Data into an RDS PostgreSQL DB Cluster](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_PostgreSQL.S3Import.html) + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster_role_association import RdsClusterRoleAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RdsClusterRoleAssociation(self, "example", + db_cluster_identifier=Token.as_string(aws_rds_cluster_example.id), + feature_name="S3_INTEGRATION", + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `db_cluster_identifier` - (Required) DB Cluster Identifier to associate with the IAM Role. +* `feature_name` - (Required) Name of the feature for association. This can be found in the AWS documentation relevant to the integration or a full list is available in the `SupportedFeatureNames` list returned by [AWS CLI rds describe-db-engine-versions](https://docs.aws.amazon.com/cli/latest/reference/rds/describe-db-engine-versions.html). +* `role_arn` - (Required) Amazon Resource Name (ARN) of the IAM Role to associate with the DB Cluster. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - DB Cluster Identifier and IAM Role ARN separated by a comma (`,`) + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_rds_cluster_role_association` using the DB Cluster Identifier and IAM Role ARN separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_rds_cluster_role_association` using the DB Cluster Identifier and IAM Role ARN separated by a comma (`,`). For example: + +```console +% terraform import aws_rds_cluster_role_association.example my-db-cluster,arn:aws:iam::123456789012:role/my-role +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rds_export_task.html.markdown b/website/docs/cdktf/python/r/rds_export_task.html.markdown new file mode 100644 index 00000000000..c0842f704de --- /dev/null +++ b/website/docs/cdktf/python/r/rds_export_task.html.markdown @@ -0,0 +1,204 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_export_task" +description: |- + Terraform resource for managing an AWS RDS (Relational Database) Export Task. +--- + + + +# Resource: aws_rds_export_task + +Terraform resource for managing an AWS RDS (Relational Database) Export Task. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_export_task import RdsExportTask +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RdsExportTask(self, "example", + export_task_identifier="example", + iam_role_arn=Token.as_string(aws_iam_role_example.arn), + kms_key_id=Token.as_string(aws_kms_key_example.arn), + s3_bucket_name=Token.as_string(aws_s3_bucket_example.id), + source_arn=Token.as_string(aws_db_snapshot_example.db_snapshot_arn) + ) +``` + +### Complete Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.db_instance import DbInstance +from imports.aws.db_snapshot import DbSnapshot +from imports.aws.iam_policy import IamPolicy +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +from imports.aws.kms_key import KmsKey +from imports.aws.rds_export_task import RdsExportTask +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DbInstance(self, "example", + allocated_storage=10, + db_name="test", + engine="mysql", + engine_version="5.7", + identifier="example", + instance_class="db.t3.micro", + parameter_group_name="default.mysql5.7", + password="foobarbaz", + skip_final_snapshot=True, + username="foo" + ) + aws_db_snapshot_example = DbSnapshot(self, "example_1", + db_instance_identifier=example.identifier, + db_snapshot_identifier="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_db_snapshot_example.override_logical_id("example") + aws_iam_role_example = IamRole(self, "example_2", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "export.rds.amazonaws.com" + }, + "Sid": "" + } + ], + "Version": "2012-10-17" + })), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_kms_key_example = KmsKey(self, "example_3", + deletion_window_in_days=10 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_key_example.override_logical_id("example") + aws_s3_bucket_example = S3Bucket(self, "example_4", + bucket="example", + force_destroy=True + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_example.override_logical_id("example") + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_5", + acl="private", + bucket=Token.as_string(aws_s3_bucket_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_6", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:ListAllMyBuckets"], + resources=["*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:GetBucketLocation", "s3:ListBucket"], + resources=[Token.as_string(aws_s3_bucket_example.arn)] + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:GetObject", "s3:PutObject", "s3:DeleteObject"], + resources=["${" + aws_s3_bucket_example.arn + "}/*"] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_iam_policy_example = IamPolicy(self, "example_7", + name="example", + policy=Token.as_string(data_aws_iam_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_policy_example.override_logical_id("example") + aws_iam_role_policy_attachment_example = IamRolePolicyAttachment(self, "example_8", + policy_arn=Token.as_string(aws_iam_policy_example.arn), + role=Token.as_string(aws_iam_role_example.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_example.override_logical_id("example") + aws_rds_export_task_example = RdsExportTask(self, "example_9", + export_only=["database"], + export_task_identifier="example", + iam_role_arn=Token.as_string(aws_iam_role_example.arn), + kms_key_id=Token.as_string(aws_kms_key_example.arn), + s3_bucket_name=Token.as_string(aws_s3_bucket_example.id), + s3_prefix="my_prefix/example", + source_arn=Token.as_string(aws_db_snapshot_example.db_snapshot_arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_export_task_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `export_task_identifier` - (Required) Unique identifier for the snapshot export task. +* `iam_role_arn` - (Required) ARN of the IAM role to use for writing to the Amazon S3 bucket. +* `kms_key_id` - (Required) ID of the Amazon Web Services KMS key to use to encrypt the snapshot. +* `s3_bucket_name` - (Required) Name of the Amazon S3 bucket to export the snapshot to. +* `source_arn` - (Required) Amazon Resource Name (ARN) of the snapshot to export. + +The following arguments are optional: + +* `export_only` - (Optional) Data to be exported from the snapshot. If this parameter is not provided, all the snapshot data is exported. Valid values are documented in the [AWS StartExportTask API documentation](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_StartExportTask.html#API_StartExportTask_RequestParameters). +* `s3_prefix` - (Optional) Amazon S3 bucket prefix to use as the file name and path of the exported snapshot. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `failure_cause` - Reason the export failed, if it failed. +* `id` - Unique identifier for the snapshot export task (same value as `export_task_identifier`). +* `percent_progress` - Progress of the snapshot export task as a percentage. +* `snapshot_time` - Time that the snapshot was created. +* `source_type` - Type of source for the export. +* `status` - Status of the export task. +* `task_end_time` - Time that the snapshot export task completed. +* `task_start_time` - Time that the snapshot export task started. +* `warning_message` - Warning about the snapshot export task, if any. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a RDS (Relational Database) Export Task using the `export_task_identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a RDS (Relational Database) Export Task using the `export_task_identifier`. For example: + +```console +% terraform import aws_rds_export_task.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rds_global_cluster.html.markdown b/website/docs/cdktf/python/r/rds_global_cluster.html.markdown new file mode 100644 index 00000000000..4509c088192 --- /dev/null +++ b/website/docs/cdktf/python/r/rds_global_cluster.html.markdown @@ -0,0 +1,319 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_global_cluster" +description: |- + Manages an RDS Global Cluster +--- + + + +# Resource: aws_rds_global_cluster + +Manages an RDS Global Cluster, which is an Aurora global database spread across multiple regions. The global database contains a single primary cluster with read-write capability, and a read-only secondary cluster that receives data from the primary cluster through high-speed replication performed by the Aurora storage subsystem. + +More information about Aurora global databases can be found in the [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database.html#aurora-global-database-creating). + +## Example Usage + +### New MySQL Global Cluster + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +from imports.aws.rds_cluster_instance import RdsClusterInstance +from imports.aws.rds_global_cluster import RdsGlobalCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = RdsGlobalCluster(self, "example", + database_name="example_db", + engine="aurora", + engine_version="5.6.mysql_aurora.1.22.2", + global_cluster_identifier="global-test" + ) + primary = RdsCluster(self, "primary", + cluster_identifier="test-primary-cluster", + database_name="example_db", + db_subnet_group_name="default", + engine=example.engine, + engine_version=example.engine_version, + global_cluster_identifier=example.id, + master_password="somepass123", + master_username="username", + provider=aws_primary + ) + aws_rds_cluster_instance_primary = RdsClusterInstance(self, "primary_2", + cluster_identifier=primary.id, + db_subnet_group_name="default", + engine=example.engine, + engine_version=example.engine_version, + identifier="test-primary-cluster-instance", + instance_class="db.r4.large", + provider=aws_primary + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_instance_primary.override_logical_id("primary") + secondary = RdsCluster(self, "secondary", + cluster_identifier="test-secondary-cluster", + db_subnet_group_name="default", + depends_on=[aws_rds_cluster_instance_primary], + engine=example.engine, + engine_version=example.engine_version, + global_cluster_identifier=example.id, + provider=aws_secondary + ) + aws_rds_cluster_instance_secondary = RdsClusterInstance(self, "secondary_4", + cluster_identifier=secondary.id, + db_subnet_group_name="default", + engine=example.engine, + engine_version=example.engine_version, + identifier="test-secondary-cluster-instance", + instance_class="db.r4.large", + provider=aws_secondary + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_instance_secondary.override_logical_id("secondary") +``` + +### New PostgreSQL Global Cluster + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +from imports.aws.rds_cluster import RdsCluster +from imports.aws.rds_cluster_instance import RdsClusterInstance +from imports.aws.rds_global_cluster import RdsGlobalCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = AwsProvider(self, "aws", + alias="primary", + region="us-east-2" + ) + secondary = AwsProvider(self, "aws_1", + alias="secondary", + region="us-east-1" + ) + example = RdsGlobalCluster(self, "example", + database_name="example_db", + engine="aurora-postgresql", + engine_version="11.9", + global_cluster_identifier="global-test" + ) + aws_rds_cluster_primary = RdsCluster(self, "primary", + cluster_identifier="test-primary-cluster", + database_name="example_db", + db_subnet_group_name="default", + engine=example.engine, + engine_version=example.engine_version, + global_cluster_identifier=example.id, + master_password="somepass123", + master_username="username", + provider=primary + ) + aws_rds_cluster_instance_primary = RdsClusterInstance(self, "primary_4", + cluster_identifier=Token.as_string(aws_rds_cluster_primary.id), + db_subnet_group_name="default", + engine=example.engine, + engine_version=example.engine_version, + identifier="test-primary-cluster-instance", + instance_class="db.r4.large", + provider=primary + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_instance_primary.override_logical_id("primary") + aws_rds_cluster_secondary = RdsCluster(self, "secondary", + cluster_identifier="test-secondary-cluster", + db_subnet_group_name="default", + depends_on=[aws_rds_cluster_instance_primary], + engine=example.engine, + engine_version=example.engine_version, + global_cluster_identifier=example.id, + provider=secondary, + skip_final_snapshot=True + ) + aws_rds_cluster_instance_secondary = RdsClusterInstance(self, "secondary_6", + cluster_identifier=Token.as_string(aws_rds_cluster_secondary.id), + db_subnet_group_name="default", + engine=example.engine, + engine_version=example.engine_version, + identifier="test-secondary-cluster-instance", + instance_class="db.r4.large", + provider=secondary + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_instance_secondary.override_logical_id("secondary") +``` + +### New Global Cluster From Existing DB Cluster + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +from imports.aws.rds_global_cluster import RdsGlobalCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, engine): + super().__init__(scope, name) + example = RdsCluster(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[global_cluster_identifier] + ), + engine=engine + ) + aws_rds_global_cluster_example = RdsGlobalCluster(self, "example_1", + force_destroy=True, + global_cluster_identifier="example", + source_db_cluster_identifier=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_global_cluster_example.override_logical_id("example") +``` + +### Upgrading Engine Versions + +When you upgrade the version of an `aws_rds_global_cluster`, Terraform will attempt to in-place upgrade the engine versions of all associated clusters. Since the `aws_rds_cluster` resource is being updated through the `aws_rds_global_cluster`, you are likely to get an error (`Provider produced inconsistent final plan`). To avoid this, use the `lifecycle` `ignore_changes` meta argument as shown below on the `aws_rds_cluster`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_cluster import RdsCluster +from imports.aws.rds_cluster_instance import RdsClusterInstance +from imports.aws.rds_global_cluster import RdsGlobalCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = RdsGlobalCluster(self, "example", + engine="aurora-mysql", + engine_version="5.7.mysql_aurora.2.07.5", + global_cluster_identifier="kyivkharkiv" + ) + primary = RdsCluster(self, "primary", + allow_major_version_upgrade=True, + apply_immediately=True, + cluster_identifier="odessadnipro", + database_name="totoro", + engine=example.engine, + engine_version=example.engine_version, + global_cluster_identifier=example.id, + lifecycle=TerraformResourceLifecycle( + ignore_changes=[engine_version] + ), + master_password="satsukimae", + master_username="maesatsuki", + skip_final_snapshot=True + ) + aws_rds_cluster_instance_primary = RdsClusterInstance(self, "primary_2", + apply_immediately=True, + cluster_identifier=primary.id, + engine=primary.engine, + engine_version=primary.engine_version, + identifier="donetsklviv", + instance_class="db.r4.large" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rds_cluster_instance_primary.override_logical_id("primary") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `global_cluster_identifier` - (Required, Forces new resources) Global cluster identifier. +* `database_name` - (Optional, Forces new resources) Name for an automatically created database on cluster creation. +* `deletion_protection` - (Optional) If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`. +* `engine` - (Optional, Forces new resources) Name of the database engine to be used for this DB cluster. Terraform will only perform drift detection if a configuration value is provided. Valid values: `aurora`, `aurora-mysql`, `aurora-postgresql`. Defaults to `aurora`. Conflicts with `source_db_cluster_identifier`. +* `engine_version` - (Optional) Engine version of the Aurora global database. The `engine`, `engine_version`, and `instance_class` (on the `aws_rds_cluster_instance`) must together support global databases. See [Using Amazon Aurora global databases](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database.html) for more information. By upgrading the engine version, Terraform will upgrade cluster members. **NOTE:** To avoid an `inconsistent final plan` error while upgrading, use the `lifecycle` `ignore_changes` for `engine_version` meta argument on the associated `aws_rds_cluster` resource as shown above in [Upgrading Engine Versions](#upgrading-engine-versions) example. +* `force_destroy` - (Optional) Enable to remove DB Cluster members from Global Cluster on destroy. Required with `source_db_cluster_identifier`. +* `source_db_cluster_identifier` - (Optional) Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. Terraform cannot perform drift detection of this value. +* `storage_encrypted` - (Optional, Forces new resources) Specifies whether the DB cluster is encrypted. The default is `false` unless `source_db_cluster_identifier` is specified and encrypted. Terraform will only perform drift detection if a configuration value is provided. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - RDS Global Cluster Amazon Resource Name (ARN) +* `global_cluster_members` - Set of objects containing Global Cluster members. + * `db_cluster_arn` - Amazon Resource Name (ARN) of member DB Cluster + * `is_writer` - Whether the member is the primary DB Cluster +* `global_cluster_resource_id` - AWS Region-unique, immutable identifier for the global database cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed +* `id` - RDS Global Cluster identifier + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `update` - (Default `90m`) +- `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_rds_global_cluster` using the RDS Global Cluster identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_rds_global_cluster` using the RDS Global Cluster identifier. For example: + +```console +% terraform import aws_rds_global_cluster.example example +``` + +Certain resource arguments, like `force_destroy`, only exist within Terraform. If the argument is set in the Terraform configuration on an imported resource, Terraform will show a difference on the first plan after import to update the state value. This change is safe to apply immediately so the state matches the desired configuration. + +Certain resource arguments, like `source_db_cluster_identifier`, do not have an API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rds_global_cluster import RdsGlobalCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, globalClusterIdentifier): + super().__init__(scope, name) + RdsGlobalCluster(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[source_db_cluster_identifier] + ), + global_cluster_identifier=global_cluster_identifier + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rds_reserved_instance.html.markdown b/website/docs/cdktf/python/r/rds_reserved_instance.html.markdown new file mode 100644 index 00000000000..3f614303a05 --- /dev/null +++ b/website/docs/cdktf/python/r/rds_reserved_instance.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_reserved_instance" +description: |- + Manages an RDS DB Reserved Instance +--- + + + +# Resource: aws_rds_reserved_instance + +Manages an RDS DB Reserved Instance. + +~> **NOTE:** Once created, a reservation is valid for the `duration` of the provided `offering_id` and cannot be deleted. Performing a `destroy` will only remove the resource from state. For more information see [RDS Reserved Instances Documentation](https://aws.amazon.com/rds/reserved-instances/) and [PurchaseReservedDBInstancesOffering](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_PurchaseReservedDBInstancesOffering.html). + +~> **NOTE:** Due to the expense of testing this resource, we provide it as best effort. If you find it useful, and have the ability to help test or notice issues, consider reaching out to us on [GitHub](https://github.com/hashicorp/terraform-provider-aws). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_rds_reserved_instance_offering import DataAwsRdsReservedInstanceOffering +from imports.aws.rds_reserved_instance import RdsReservedInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = DataAwsRdsReservedInstanceOffering(self, "test", + db_instance_class="db.t2.micro", + duration=31536000, + multi_az=False, + offering_type="All Upfront", + product_description="mysql" + ) + RdsReservedInstance(self, "example", + instance_count=3, + offering_id=Token.as_string(test.offering_id), + reservation_id="optionalCustomReservationID" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `offering_id` - (Required) ID of the Reserved DB instance offering to purchase. To determine an `offering_id`, see the `aws_rds_reserved_instance_offering` data source. + +The following arguments are optional: + +* `instance_count` - (Optional) Number of instances to reserve. Default value is `1`. +* `reservation_id` - (Optional) Customer-specified identifier to track this reservation. +* `tags` - (Optional) Map of tags to assign to the DB reservation. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the reserved DB instance. +* `id` - Unique identifier for the reservation. same as `reservation_id`. +* `currency_code` - Currency code for the reserved DB instance. +* `duration` - Duration of the reservation in seconds. +* `fixed_price` – Fixed price charged for this reserved DB instance. +* `db_instance_class` - DB instance class for the reserved DB instance. +* `lease_id` - Unique identifier for the lease associated with the reserved DB instance. Amazon Web Services Support might request the lease ID for an issue related to a reserved DB instance. +* `multi_az` - Whether the reservation applies to Multi-AZ deployments. +* `offering_type` - Offering type of this reserved DB instance. +* `product_description` - Description of the reserved DB instance. +* `recurring_charges` - Recurring price charged to run this reserved DB instance. +* `start_time` - Time the reservation started. +* `state` - State of the reserved DB instance. +* `usage_price` - Hourly price charged for this reserved DB instance. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `update` - (Default `10m`) +- `delete` - (Default `1m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS DB Instance Reservations using the `instance_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import RDS DB Instance Reservations using the `instance_id`. For example: + +```console +% terraform import aws_rds_reserved_instance.reservation_instance CustomReservationID +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_authentication_profile.html.markdown b/website/docs/cdktf/python/r/redshift_authentication_profile.html.markdown new file mode 100644 index 00000000000..bdaaf673d63 --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_authentication_profile.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_authentication_profile" +description: |- + Creates a Redshift authentication profile +--- + + + +# Resource: aws_redshift_authentication_profile + +Creates a Redshift authentication profile + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_authentication_profile import RedshiftAuthenticationProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftAuthenticationProfile(self, "example", + authentication_profile_content=Token.as_string( + Fn.jsonencode({ + "AllowDBUserOverride": "1", + "App_ID": "example", + "Client_ID": "ExampleClientID" + })), + authentication_profile_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `authentication_profile_name` - (Required, Forces new resource) The name of the authentication profile. +* `authentication_profile_content` - (Required) The content of the authentication profile in JSON format. The maximum length of the JSON string is determined by a quota for your account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the authentication profile. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Authentication by `authentication_profile_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Authentication by `authentication_profile_name`. For example: + +```console +% terraform import aws_redshift_authentication_profile.test example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_cluster.html.markdown b/website/docs/cdktf/python/r/redshift_cluster.html.markdown new file mode 100644 index 00000000000..7eb8da7d13c --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_cluster.html.markdown @@ -0,0 +1,175 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_cluster" +description: |- + Provides a Redshift Cluster resource. +--- + + + +# Resource: aws_redshift_cluster + +Provides a Redshift Cluster Resource. + +~> **Note:** All arguments including the username and password will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **NOTE:** A Redshift cluster's default IAM role can be managed both by this resource's `default_iam_role_arn` argument and the [`aws_redshift_cluster_iam_roles`](redshift_cluster_iam_roles.html) resource's `default_iam_role_arn` argument. Do not configure different values for both arguments. Doing so will cause a conflict of default IAM roles. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_cluster import RedshiftCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftCluster(self, "example", + cluster_identifier="tf-redshift-cluster", + cluster_type="single-node", + database_name="mydb", + master_password="Mustbe8characters", + master_username="exampleuser", + node_type="dc1.large" + ) +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation](http://docs.aws.amazon.com/cli/latest/reference/redshift/index.html#cli-aws-redshift). + +This argument supports the following arguments: + +* `cluster_identifier` - (Required) The Cluster Identifier. Must be a lower case string. +* `database_name` - (Optional) The name of the first database to be created when the cluster is created. + If you do not provide a name, Amazon Redshift will create a default database called `dev`. +* `default_iam_role_arn` - (Optional) The Amazon Resource Name (ARN) for the IAM role that was set as default for the cluster when the cluster was created. +* `node_type` - (Required) The node type to be provisioned for the cluster. +* `cluster_type` - (Optional) The cluster type to use. Either `single-node` or `multi-node`. +* `master_password` - (Required unless a `snapshot_identifier` is provided) Password for the master DB user. + Note that this may show up in logs, and it will be stored in the state file. Password must contain at least 8 chars and + contain at least one uppercase letter, one lowercase letter, and one number. +* `master_username` - (Required unless a `snapshot_identifier` is provided) Username for the master DB user. +* `vpc_security_group_ids` - (Optional) A list of Virtual Private Cloud (VPC) security groups to be associated with the cluster. +* `cluster_subnet_group_name` - (Optional) The name of a cluster subnet group to be associated with this cluster. If this parameter is not provided the resulting cluster will be deployed outside virtual private cloud (VPC). +* `availability_zone` - (Optional) The EC2 Availability Zone (AZ) in which you want Amazon Redshift to provision the cluster. For example, if you have several EC2 instances running in a specific Availability Zone, then you might want the cluster to be provisioned in the same zone in order to decrease network latency. Can only be changed if `availability_zone_relocation_enabled` is `true`. +* `availability_zone_relocation_enabled` - (Optional) If true, the cluster can be relocated to another availabity zone, either automatically by AWS or when requested. Default is `false`. Available for use on clusters from the RA3 instance family. +* `preferred_maintenance_window` - (Optional) The weekly time range (in UTC) during which automated cluster maintenance can occur. + Format: ddd:hh24:mi-ddd:hh24:mi +* `cluster_parameter_group_name` - (Optional) The name of the parameter group to be associated with this cluster. +* `automated_snapshot_retention_period` - (Optional) The number of days that automated snapshots are retained. If the value is 0, automated snapshots are disabled. Even if automated snapshots are disabled, you can still create manual snapshots when you want with create-cluster-snapshot. Default is 1. +* `port` - (Optional) The port number on which the cluster accepts incoming connections. Valid values are between `1115` and `65535`. + The cluster is accessible only via the JDBC and ODBC connection strings. + Part of the connection string requires the port on which the cluster will listen for incoming connections. + Default port is `5439`. +* `cluster_version` - (Optional) The version of the Amazon Redshift engine software that you want to deploy on the cluster. + The version selected runs on all the nodes in the cluster. +* `allow_version_upgrade` - (Optional) If true , major version upgrades can be applied during the maintenance window to the Amazon Redshift engine that is running on the cluster. Default is `true`. +* `apply_immediately` - (Optional) Specifies whether any cluster modifications are applied immediately, or during the next maintenance window. Default is `false`. +* `aqua_configuration_status` - (Optional, **Deprecated**) The value represents how the cluster is configured to use AQUA (Advanced Query Accelerator) after the cluster is restored. + No longer supported by the AWS API. + Always returns `auto`. +* `number_of_nodes` - (Optional) The number of compute nodes in the cluster. This parameter is required when the ClusterType parameter is specified as multi-node. Default is 1. +* `publicly_accessible` - (Optional) If true, the cluster can be accessed from a public network. Default is `true`. +* `encrypted` - (Optional) If true , the data in the cluster is encrypted at rest. +* `enhanced_vpc_routing` - (Optional) If true , enhanced VPC routing is enabled. +* `kms_key_id` - (Optional) The ARN for the KMS encryption key. When specifying `kms_key_id`, `encrypted` needs to be set to true. +* `elastic_ip` - (Optional) The Elastic IP (EIP) address for the cluster. +* `skip_final_snapshot` - (Optional) Determines whether a final snapshot of the cluster is created before Amazon Redshift deletes the cluster. If true , a final cluster snapshot is not created. If false , a final cluster snapshot is created before the cluster is deleted. Default is false. +* `final_snapshot_identifier` - (Optional) The identifier of the final snapshot that is to be created immediately before deleting the cluster. If this parameter is provided, `skip_final_snapshot` must be false. +* `snapshot_identifier` - (Optional) The name of the snapshot from which to create the new cluster. +* `snapshot_cluster_identifier` - (Optional) The name of the cluster the source snapshot was created from. +* `owner_account` - (Optional) The AWS customer account used to create or copy the snapshot. Required if you are restoring a snapshot you do not own, optional if you own the snapshot. +* `iam_roles` - (Optional) A list of IAM Role ARNs to associate with the cluster. A Maximum of 10 can be associated to the cluster at any time. +* `logging` - (Optional) Logging, documented below. +* `maintenance_track_name` - (Optional) The name of the maintenance track for the restored cluster. When you take a snapshot, the snapshot inherits the MaintenanceTrack value from the cluster. The snapshot might be on a different track than the cluster that was the source for the snapshot. For example, suppose that you take a snapshot of a cluster that is on the current track and then change the cluster to be on the trailing track. In this case, the snapshot and the source cluster are on different tracks. Default value is `current`. +* `manual_snapshot_retention_period` - (Optional) The default number of days to retain a manual snapshot. If the value is -1, the snapshot is retained indefinitely. This setting doesn't change the retention period of existing snapshots. Valid values are between `-1` and `3653`. Default value is `-1`. +* `snapshot_copy` - (Optional) Configuration of automatic copy of snapshots from one region to another. Documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Blocks + +#### `logging` + +* `enable` - (Required) Enables logging information such as queries and connection attempts, for the specified Amazon Redshift cluster. +* `bucket_name` - (Optional, required when `enable` is `true` and `log_destination_type` is `s3`) The name of an existing S3 bucket where the log files are to be stored. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. +For more information on the permissions required for the bucket, please read the AWS [documentation](http://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-enable-logging) +* `s3_key_prefix` - (Optional) The prefix applied to the log file names. +* `log_destination_type` - (Optional) The log destination type. An enum with possible values of `s3` and `cloudwatch`. +* `log_exports` - (Optional) The collection of exported log types. Log types include the connection log, user log and user activity log. Required when `log_destination_type` is `cloudwatch`. Valid log types are `connectionlog`, `userlog`, and `useractivitylog`. + +#### `snapshot_copy` + +* `destination_region` - (Required) The destination region that you want to copy snapshots to. +* `retention_period` - (Optional) The number of days to retain automated snapshots in the destination region after they are copied from the source region. Defaults to `7`. +* `grant_name` - (Optional) The name of the snapshot copy grant to use when snapshots of an AWS KMS-encrypted cluster are copied to the destination region. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster +* `id` - The Redshift Cluster ID. +* `cluster_identifier` - The Cluster Identifier +* `cluster_type` - The cluster type +* `node_type` - The type of nodes in the cluster +* `database_name` - The name of the default database in the Cluster +* `availability_zone` - The availability zone of the Cluster +* `automated_snapshot_retention_period` - The backup retention period +* `preferred_maintenance_window` - The backup window +* `endpoint` - The connection endpoint +* `encrypted` - Whether the data in the cluster is encrypted +* `vpc_security_group_ids` - The VPC security group Ids associated with the cluster +* `dns_name` - The DNS name of the cluster +* `port` - The Port the cluster responds on +* `cluster_version` - The version of Redshift engine software +* `cluster_parameter_group_name` - The name of the parameter group to be associated with this cluster +* `cluster_subnet_group_name` - The name of a cluster subnet group to be associated with this cluster +* `cluster_public_key` - The public key for the cluster +* `cluster_revision_number` - The specific revision number of the database in the cluster +* `cluster_nodes` - The nodes in the cluster. Cluster node blocks are documented below +* `cluster_namespace_arn` - The namespace Amazon Resource Name (ARN) of the cluster +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +Cluster nodes (for `cluster_nodes`) support the following attributes: + +* `node_role` - Whether the node is a leader node or a compute node +* `private_ip_address` - The private IP address of a node within a cluster +* `public_ip_address` - The public IP address of a node within a cluster + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `75m`) +- `update` - (Default `75m`) +- `delete` - (Default `40m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Clusters using the `cluster_identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Clusters using the `cluster_identifier`. For example: + +```console +% terraform import aws_redshift_cluster.myprodcluster tf-redshift-cluster-12345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_cluster_iam_roles.html.markdown b/website/docs/cdktf/python/r/redshift_cluster_iam_roles.html.markdown new file mode 100644 index 00000000000..3453dff3dc5 --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_cluster_iam_roles.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_cluster_iam_roles" +description: |- + Provides a Redshift Cluster IAM Roles resource. +--- + + + +# Resource: aws_redshift_cluster_iam_roles + +Provides a Redshift Cluster IAM Roles resource. + +~> **NOTE:** A Redshift cluster's default IAM role can be managed both by this resource's `default_iam_role_arn` argument and the [`aws_redshift_cluster`](redshift_cluster.html) resource's `default_iam_role_arn` argument. Do not configure different values for both arguments. Doing so will cause a conflict of default IAM roles. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_cluster_iam_roles import RedshiftClusterIamRoles +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftClusterIamRoles(self, "example", + cluster_identifier=Token.as_string(aws_redshift_cluster_example.cluster_identifier), + iam_role_arns=[Token.as_string(aws_iam_role_example.arn)] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cluster_identifier` - (Required) The name of the Redshift Cluster IAM Roles. +* `iam_role_arns` - (Optional) A list of IAM Role ARNs to associate with the cluster. A Maximum of 10 can be associated to the cluster at any time. +* `default_iam_role_arn` - (Optional) The Amazon Resource Name (ARN) for the IAM role that was set as default for the cluster when the cluster was created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Redshift Cluster ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Cluster IAM Roless using the `cluster_identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Cluster IAM Roless using the `cluster_identifier`. For example: + +```console +% terraform import aws_redshift_cluster_iam_roles.examplegroup1 example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_cluster_snapshot.html.markdown b/website/docs/cdktf/python/r/redshift_cluster_snapshot.html.markdown new file mode 100644 index 00000000000..8e3fb64e1ed --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_cluster_snapshot.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_cluster_snapshot" +description: |- + Creates a Redshift cluster snapshot +--- + + + +# Resource: aws_redshift_cluster_snapshot + +Creates a Redshift cluster snapshot + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_cluster_snapshot import RedshiftClusterSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, clusterIdentifier, snapshotIdentifier): + super().__init__(scope, name) + RedshiftClusterSnapshot(self, "example", + cluster_snapshot_content=Fn.jsonencode({ + "AllowDBUserOverride": "1", + "App_ID": "example", + "Client_ID": "ExampleClientID" + }), + cluster_snapshot_name="example", + cluster_identifier=cluster_identifier, + snapshot_identifier=snapshot_identifier + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cluster_identifier` - (Required, Forces new resource) The cluster identifier for which you want a snapshot. +* `snapshot_identifier` - (Required, Forces new resource) A unique identifier for the snapshot that you are requesting. This identifier must be unique for all snapshots within the Amazon Web Services account. +* `manual_snapshot_retention_period` - (Optional) The number of days that a manual snapshot is retained. If the value is `-1`, the manual snapshot is retained indefinitely. Valid values are -1 and between `1` and `3653`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the snapshot. +* `id` - A unique identifier for the snapshot that you are requesting. This identifiermust be unique for all snapshots within the Amazon Web Services account. +* `kms_key_id` - The Key Management Service (KMS) key ID of the encryption key that was used to encrypt data in the cluster from which the snapshot was taken. +* `owner_account` - For manual snapshots, the Amazon Web Services account used to create or copy the snapshot. For automatic snapshots, the owner of the cluster. The owner can perform all snapshot actions, such as sharing a manual snapshot. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Cluster Snapshots using `snapshot_identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Cluster Snapshots using `snapshot_identifier`. For example: + +```console +% terraform import aws_redshift_cluster_snapshot.test example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_endpoint_access.html.markdown b/website/docs/cdktf/python/r/redshift_endpoint_access.html.markdown new file mode 100644 index 00000000000..19cadf2fed9 --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_endpoint_access.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_endpoint_access" +description: |- + Provides a Redshift Endpoint Access resource. +--- + + + +# Resource: aws_redshift_endpoint_access + +Creates a new Amazon Redshift endpoint access. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_endpoint_access import RedshiftEndpointAccess +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftEndpointAccess(self, "example", + cluster_identifier=Token.as_string(aws_redshift_cluster_example.cluster_identifier), + endpoint_name="example", + subnet_group_name=Token.as_string(aws_redshift_subnet_group_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cluster_identifier` - (Required) The cluster identifier of the cluster to access. +* `endpoint_name` - (Required) The Redshift-managed VPC endpoint name. +* `resource_owner` - (Optional) The Amazon Web Services account ID of the owner of the cluster. This is only required if the cluster is in another Amazon Web Services account. +* `subnet_group_name` - (Required) The subnet group from which Amazon Redshift chooses the subnet to deploy the endpoint. +* `vpc_security_group_ids` - (Optional) The security group that defines the ports, protocols, and sources for inbound traffic that you are authorizing into your endpoint. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `address` - The DNS address of the endpoint. +* `id` - The Redshift-managed VPC endpoint name. +* `port` - The port number on which the cluster accepts incoming connections. +* `vpc_endpoint` - The connection endpoint for connecting to an Amazon Redshift cluster through the proxy. See details below. + +### VPC Endpoint + +* `network_interface` - One or more network interfaces of the endpoint. Also known as an interface endpoint. See details below. +* `vpc_endpoint_id` - The connection endpoint ID for connecting an Amazon Redshift cluster through the proxy. +* `vpc_id` - The VPC identifier that the endpoint is associated. + +### Network Interface + +* `availability_zone` - The Availability Zone. +* `network_interface_id` - The network interface identifier. +* `private_ip_address` - The IPv4 address of the network interface within the subnet. +* `subnet_id` - The subnet identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift endpoint access using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift endpoint access using the `name`. For example: + +```console +% terraform import aws_redshift_endpoint_access.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_endpoint_authorization.html.markdown b/website/docs/cdktf/python/r/redshift_endpoint_authorization.html.markdown new file mode 100644 index 00000000000..b7d987d3b9b --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_endpoint_authorization.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_endpoint_authorization" +description: |- + Provides a Redshift Endpoint Authorization resource. +--- + + + +# Resource: aws_redshift_endpoint_authorization + +Creates a new Amazon Redshift endpoint authorization. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_endpoint_authorization import RedshiftEndpointAuthorization +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftEndpointAuthorization(self, "example", + account="01234567910", + cluster_identifier=Token.as_string(aws_redshift_cluster_example.cluster_identifier) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account` - (Required) The Amazon Web Services account ID to grant access to. +* `cluster_identifier` - (Required) The cluster identifier of the cluster to grant access to. +* `force_delete` - (Optional) Indicates whether to force the revoke action. If true, the Redshift-managed VPC endpoints associated with the endpoint authorization are also deleted. Default value is `false`. +* `vpc_ids` - (Optional) The virtual private cloud (VPC) identifiers to grant access to. If none are specified all VPCs in shared account are allowed. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `allowed_all_vpcs` - Indicates whether all VPCs in the grantee account are allowed access to the cluster. +* `id` - The identifier of the Redshift Endpoint Authorization, `account`, and `cluster_identifier` separated by a colon (`:`). +* `endpoint_count` - The number of Redshift-managed VPC endpoints created for the authorization. +* `grantee` - The Amazon Web Services account ID of the grantee of the cluster. +* `grantor` - The Amazon Web Services account ID of the cluster owner. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift endpoint authorization using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift endpoint authorization using the `id`. For example: + +```console +% terraform import aws_redshift_endpoint_authorization.example 01234567910:cluster-example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_event_subscription.html.markdown b/website/docs/cdktf/python/r/redshift_event_subscription.html.markdown new file mode 100644 index 00000000000..f5335bc6f38 --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_event_subscription.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_event_subscription" +description: |- + Provides a Redshift event subscription resource. +--- + + + +# Resource: aws_redshift_event_subscription + +Provides a Redshift event subscription resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_cluster import RedshiftCluster +from imports.aws.redshift_event_subscription import RedshiftEventSubscription +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, nodeType): + super().__init__(scope, name) + default_var = RedshiftCluster(self, "default", + cluster_identifier="default", + database_name="default", + node_type=node_type + ) + aws_sns_topic_default = SnsTopic(self, "default_1", + name="redshift-events" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_default.override_logical_id("default") + aws_redshift_event_subscription_default = RedshiftEventSubscription(self, "default_2", + event_categories=["configuration", "management", "monitoring", "security" + ], + name="redshift-event-sub", + severity="INFO", + sns_topic_arn=Token.as_string(aws_sns_topic_default.arn), + source_ids=[default_var.id], + source_type="cluster", + tags={ + "Name": "default" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_redshift_event_subscription_default.override_logical_id("default") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Redshift event subscription. +* `sns_topic_arn` - (Required) The ARN of the SNS topic to send events to. +* `source_ids` - (Optional) A list of identifiers of the event sources for which events will be returned. If not specified, then all sources are included in the response. If specified, a `source_type` must also be specified. +* `source_type` - (Optional) The type of source that will be generating the events. Valid options are `cluster`, `cluster-parameter-group`, `cluster-security-group`, `cluster-snapshot`, or `scheduled-action`. If not set, all sources will be subscribed to. +* `severity` - (Optional) The event severity to be published by the notification subscription. Valid options are `INFO` or `ERROR`. Default value of `INFO`. +* `event_categories` - (Optional) A list of event categories for a SourceType that you want to subscribe to. See https://docs.aws.amazon.com/redshift/latest/mgmt/working-with-event-notifications.html or run `aws redshift describe-event-categories`. +* `enabled` - (Optional) A boolean flag to enable/disable the subscription. Defaults to `true`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift event notification subscription +* `id` - The name of the Redshift event notification subscription +* `customer_aws_id` - The AWS customer account associated with the Redshift event notification subscription +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Event Subscriptions using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Event Subscriptions using the `name`. For example: + +```console +% terraform import aws_redshift_event_subscription.default redshift-event-sub +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_hsm_client_certificate.html.markdown b/website/docs/cdktf/python/r/redshift_hsm_client_certificate.html.markdown new file mode 100644 index 00000000000..b5819af23c2 --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_hsm_client_certificate.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_hsm_client_certificate" +description: |- + Creates an HSM client certificate that an Amazon Redshift cluster will use to connect to the client's HSM in order to store and retrieve the keys used to encrypt the cluster databases. +--- + + + +# Resource: aws_redshift_hsm_client_certificate + +Creates an HSM client certificate that an Amazon Redshift cluster will use to connect to the client's HSM in order to store and retrieve the keys used to encrypt the cluster databases. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_hsm_client_certificate import RedshiftHsmClientCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftHsmClientCertificate(self, "example", + hsm_client_certificate_identifier="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `hsm_client_certificate_identifier` - (Required, Forces new resource) The identifier of the HSM client certificate. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Hsm Client Certificate. +* `hsm_client_certificate_public_key` - The public key that the Amazon Redshift cluster will use to connect to the HSM. You must register the public key in the HSM. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift HSM Client Certificates using `hsm_client_certificate_identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift HSM Client Certificates using `hsm_client_certificate_identifier`. For example: + +```console +% terraform import aws_redshift_hsm_client_certificate.test example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_hsm_configuration.html.markdown b/website/docs/cdktf/python/r/redshift_hsm_configuration.html.markdown new file mode 100644 index 00000000000..5e99dadaf15 --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_hsm_configuration.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_hsm_configuration" +description: |- + Creates an HSM configuration that contains the information required by an Amazon Redshift cluster to store and use database encryption keys in a Hardware Security Module (HSM). +--- + + + +# Resource: aws_redshift_hsm_configuration + + Creates an HSM configuration that contains the information required by an Amazon Redshift cluster to store and use database encryption keys in a Hardware Security Module (HSM). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_hsm_configuration import RedshiftHsmConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftHsmConfiguration(self, "example", + description="example", + hsm_configuration_identifier="example", + hsm_ip_address="10.0.0.1", + hsm_partition_name="aws", + hsm_partition_password="example", + hsm_server_public_certificate="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Required, Forces new resource) A text description of the HSM configuration to be created. +* `hsm_configuration_identifier` - (Required, Forces new resource) The identifier to be assigned to the new Amazon Redshift HSM configuration. +* `hsm_ip_address` - (Required, Forces new resource) The IP address that the Amazon Redshift cluster must use to access the HSM. +* `hsm_partition_name` - (Required, Forces new resource) The name of the partition in the HSM where the Amazon Redshift clusters will store their database encryption keys. +* `hsm_partition_password` - (Required, Forces new resource) The password required to access the HSM partition. +* `hsm_server_public_certificate` - (Required, Forces new resource) The HSMs public certificate file. When using Cloud HSM, the file name is server.pem. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Hsm Client Certificate. +* `hsm_configuration_public_key` - The public key that the Amazon Redshift cluster will use to connect to the HSM. You must register the public key in the HSM. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift HSM Client Certificates using `hsm_configuration_identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift HSM Client Certificates using `hsm_configuration_identifier`. For example: + +```console +% terraform import aws_redshift_hsm_configuration.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_parameter_group.html.markdown b/website/docs/cdktf/python/r/redshift_parameter_group.html.markdown new file mode 100644 index 00000000000..d7452b77f2e --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_parameter_group.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_parameter_group" +description: |- + Provides a Redshift Cluster parameter group resource. +--- + + + +# Resource: aws_redshift_parameter_group + +Provides a Redshift Cluster parameter group resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_parameter_group import RedshiftParameterGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftParameterGroup(self, "bar", + family="redshift-1.0", + name="parameter-group-test-terraform", + parameter=[RedshiftParameterGroupParameter( + name="require_ssl", + value="true" + ), RedshiftParameterGroupParameter( + name="query_group", + value="example" + ), RedshiftParameterGroupParameter( + name="enable_user_activity_logging", + value="true" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Redshift parameter group. +* `family` - (Required) The family of the Redshift parameter group. +* `description` - (Optional) The description of the Redshift parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of Redshift parameters to apply. + +Parameter blocks support the following: + +* `name` - (Required) The name of the Redshift parameter. +* `value` - (Required) The value of the Redshift parameter. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +You can read more about the parameters that Redshift supports in the [documentation](http://docs.aws.amazon.com/redshift/latest/mgmt/working-with-parameter-groups.html) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of parameter group +* `id` - The Redshift parameter group name. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Parameter Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Parameter Groups using the `name`. For example: + +```console +% terraform import aws_redshift_parameter_group.paramgroup1 parameter-group-test-terraform +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_partner.html.markdown b/website/docs/cdktf/python/r/redshift_partner.html.markdown new file mode 100644 index 00000000000..bf5a2d81852 --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_partner.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_partner" +description: |- + Provides a Redshift Partner resource. +--- + + + +# Resource: aws_redshift_partner + +Creates a new Amazon Redshift Partner Integration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_partner import RedshiftPartner +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftPartner(self, "example", + account_id=Token.as_string(1234567910), + cluster_identifier=Token.as_string(aws_redshift_cluster_example.id), + database_name=Token.as_string(aws_redshift_cluster_example.database_name), + partner_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Required) The Amazon Web Services account ID that owns the cluster. +* `cluster_identifier` - (Required) The cluster identifier of the cluster that receives data from the partner. +* `database_name` - (Required) The name of the database that receives data from the partner. +* `partner_name` - (Required) The name of the partner that is authorized to send data. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the Redshift partner, `account_id`, `cluster_identifier`, `database_name`, `partner_name` separated by a colon (`:`). +* `status` - (Optional) The partner integration status. +* `status_message` - (Optional) The status message provided by the partner. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift usage limits using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift usage limits using the `id`. For example: + +```console +% terraform import aws_redshift_partner.example 01234567910:cluster-example-id:example:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_scheduled_action.html.markdown b/website/docs/cdktf/python/r/redshift_scheduled_action.html.markdown new file mode 100644 index 00000000000..1f52efc4193 --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_scheduled_action.html.markdown @@ -0,0 +1,177 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_scheduled_action" +description: |- + Provides a Redshift Scheduled Action resource. +--- + + + +# Resource: aws_redshift_scheduled_action + +## Example Usage + +### Pause Cluster Action + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_policy import IamPolicy +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +from imports.aws.redshift_scheduled_action import RedshiftScheduledAction +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["scheduler.redshift.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + example = DataAwsIamPolicyDocument(self, "example", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["redshift:PauseCluster", "redshift:ResumeCluster", "redshift:ResizeCluster" + ], + effect="Allow", + resources=["*"] + ) + ] + ) + aws_iam_policy_example = IamPolicy(self, "example_2", + name="redshift_scheduled_action", + policy=Token.as_string(example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_policy_example.override_logical_id("example") + aws_iam_role_example = IamRole(self, "example_3", + assume_role_policy=Token.as_string(assume_role.json), + name="redshift_scheduled_action" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_iam_role_policy_attachment_example = IamRolePolicyAttachment(self, "example_4", + policy_arn=Token.as_string(aws_iam_policy_example.arn), + role=Token.as_string(aws_iam_role_example.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_example.override_logical_id("example") + aws_redshift_scheduled_action_example = RedshiftScheduledAction(self, "example_5", + iam_role=Token.as_string(aws_iam_role_example.arn), + name="tf-redshift-scheduled-action", + schedule="cron(00 23 * * ? *)", + target_action=RedshiftScheduledActionTargetAction( + pause_cluster=RedshiftScheduledActionTargetActionPauseCluster( + cluster_identifier="tf-redshift001" + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_redshift_scheduled_action_example.override_logical_id("example") +``` + +### Resize Cluster Action + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_scheduled_action import RedshiftScheduledAction +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftScheduledAction(self, "example", + iam_role=Token.as_string(aws_iam_role_example.arn), + name="tf-redshift-scheduled-action", + schedule="cron(00 23 * * ? *)", + target_action=RedshiftScheduledActionTargetAction( + resize_cluster=RedshiftScheduledActionTargetActionResizeCluster( + cluster_identifier="tf-redshift001", + cluster_type="multi-node", + node_type="dc1.large", + number_of_nodes=2 + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The scheduled action name. +* `description` - (Optional) The description of the scheduled action. +* `enable` - (Optional) Whether to enable the scheduled action. Default is `true` . +* `start_time` - (Optional) The start time in UTC when the schedule is active, in UTC RFC3339 format(for example, YYYY-MM-DDTHH:MM:SSZ). +* `end_time` - (Optional) The end time in UTC when the schedule is active, in UTC RFC3339 format(for example, YYYY-MM-DDTHH:MM:SSZ). +* `schedule` - (Required) The schedule of action. The schedule is defined format of "at expression" or "cron expression", for example `at(2016-03-04T17:27:00)` or `cron(0 10 ? * MON *)`. See [Scheduled Action](https://docs.aws.amazon.com/redshift/latest/APIReference/API_ScheduledAction.html) for more information. +* `iam_role` - (Required) The IAM role to assume to run the scheduled action. +* `target_action` - (Required) Target action. Documented below. + +### Nested Blocks + +#### `target_action` + +* `pause_cluster` - (Optional) An action that runs a `PauseCluster` API operation. Documented below. +* `resize_cluster` - (Optional) An action that runs a `ResizeCluster` API operation. Documented below. +* `resume_cluster` - (Optional) An action that runs a `ResumeCluster` API operation. Documented below. + +### `pause_cluster` + +* `cluster_identifier` - (Required) The identifier of the cluster to be paused. + +### `resize_cluster` + +* `cluster_identifier` - (Required) The unique identifier for the cluster to resize. +* `classic` - (Optional) A boolean value indicating whether the resize operation is using the classic resize process. Default: `false`. +* `cluster_type` - (Optional) The new cluster type for the specified cluster. +* `node_type` - (Optional) The new node type for the nodes you are adding. +* `number_of_nodes` - (Optional) The new number of nodes for the cluster. + +### `resume_cluster` + +* `cluster_identifier` - (Required) The identifier of the cluster to be resumed. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Redshift Scheduled Action name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Scheduled Action using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Scheduled Action using the `name`. For example: + +```console +% terraform import aws_redshift_scheduled_action.example tf-redshift-scheduled-action +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_snapshot_copy_grant.html.markdown b/website/docs/cdktf/python/r/redshift_snapshot_copy_grant.html.markdown new file mode 100644 index 00000000000..482e4dcf13d --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_snapshot_copy_grant.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_snapshot_copy_grant" +description: |- + Creates a snapshot copy grant that allows AWS Redshift to encrypt copied snapshots with a customer master key from AWS KMS in a destination region. +--- + + + +# Resource: aws_redshift_snapshot_copy_grant + +Creates a snapshot copy grant that allows AWS Redshift to encrypt copied snapshots with a customer master key from AWS KMS in a destination region. + +Note that the grant must exist in the destination region, and not in the region of the cluster. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_cluster import RedshiftCluster +from imports.aws.redshift_snapshot_copy_grant import RedshiftSnapshotCopyGrant +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, clusterIdentifier, nodeType): + super().__init__(scope, name) + test = RedshiftSnapshotCopyGrant(self, "test", + snapshot_copy_grant_name="my-grant" + ) + aws_redshift_cluster_test = RedshiftCluster(self, "test_1", + snapshot_copy=RedshiftClusterSnapshotCopy( + destination_region="us-east-2", + grant_name=test.snapshot_copy_grant_name + ), + cluster_identifier=cluster_identifier, + node_type=node_type + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_redshift_cluster_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `snapshot_copy_grant_name` - (Required, Forces new resource) A friendly name for identifying the grant. +* `kms_key_id` - (Optional, Forces new resource) The unique identifier for the customer master key (CMK) that the grant applies to. Specify the key ID or the Amazon Resource Name (ARN) of the CMK. To specify a CMK in a different AWS account, you must use the key ARN. If not specified, the default key is used. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of snapshot copy grant +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Snapshot Copy Grants by name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Snapshot Copy Grants by name. For example: + +```console +% terraform import aws_redshift_snapshot_copy_grant.test my-grant +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_snapshot_schedule.html.markdown b/website/docs/cdktf/python/r/redshift_snapshot_schedule.html.markdown new file mode 100644 index 00000000000..3fac94c3050 --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_snapshot_schedule.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_snapshot_schedule" +description: |- + Provides an Redshift Snapshot Schedule resource. +--- + + + +# Resource: aws_redshift_snapshot_schedule + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_snapshot_schedule import RedshiftSnapshotSchedule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftSnapshotSchedule(self, "default", + definitions=["rate(12 hours)"], + identifier="tf-redshift-snapshot-schedule" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `identifier` - (Optional, Forces new resource) The snapshot schedule identifier. If omitted, Terraform will assign a random, unique identifier. +* `identifier_prefix` - (Optional, Forces new resource) Creates a unique +identifier beginning with the specified prefix. Conflicts with `identifier`. +* `description` - (Optional) The description of the snapshot schedule. +* `definitions` - (Optional) The definition of the snapshot schedule. The definition is made up of schedule expressions, for example `cron(30 12 *)` or `rate(12 hours)`. +* `force_destroy` - (Optional) Whether to destroy all associated clusters with this snapshot schedule on deletion. Must be enabled and applied before attempting deletion. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Snapshot Schedule. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Snapshot Schedule using the `identifier`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Snapshot Schedule using the `identifier`. For example: + +```console +% terraform import aws_redshift_snapshot_schedule.default tf-redshift-snapshot-schedule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_snapshot_schedule_association.html.markdown b/website/docs/cdktf/python/r/redshift_snapshot_schedule_association.html.markdown new file mode 100644 index 00000000000..8a22fa71e5e --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_snapshot_schedule_association.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_snapshot_schedule_association" +description: |- + Provides an Association Redshift Cluster and Snapshot Schedule resource. +--- + + + +# Resource: aws_redshift_snapshot_schedule_association + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_cluster import RedshiftCluster +from imports.aws.redshift_snapshot_schedule import RedshiftSnapshotSchedule +from imports.aws.redshift_snapshot_schedule_association import RedshiftSnapshotScheduleAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = RedshiftCluster(self, "default", + cluster_identifier="tf-redshift-cluster", + cluster_type="single-node", + database_name="mydb", + master_password="Mustbe8characters", + master_username="foo", + node_type="dc1.large" + ) + aws_redshift_snapshot_schedule_default = RedshiftSnapshotSchedule(self, "default_1", + definitions=["rate(12 hours)"], + identifier="tf-redshift-snapshot-schedule" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_redshift_snapshot_schedule_default.override_logical_id("default") + aws_redshift_snapshot_schedule_association_default = + RedshiftSnapshotScheduleAssociation(self, "default_2", + cluster_identifier=default_var.id, + schedule_identifier=Token.as_string(aws_redshift_snapshot_schedule_default.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_redshift_snapshot_schedule_association_default.override_logical_id("default") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cluster_identifier` - (Required, Forces new resource) The cluster identifier. +* `schedule_identifier` - (Required, Forces new resource) The snapshot schedule identifier. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Snapshot Schedule Association using the `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Snapshot Schedule Association using the `/`. For example: + +```console +% terraform import aws_redshift_snapshot_schedule_association.default tf-redshift-cluster/tf-redshift-snapshot-schedule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_subnet_group.html.markdown b/website/docs/cdktf/python/r/redshift_subnet_group.html.markdown new file mode 100644 index 00000000000..1ff4192078e --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_subnet_group.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_subnet_group" +description: |- + Provides a Redshift Subnet Group resource. +--- + + + +# Resource: aws_redshift_subnet_group + +Creates a new Amazon Redshift subnet group. You must provide a list of one or more subnets in your existing Amazon Virtual Private Cloud (Amazon VPC) when creating Amazon Redshift subnet group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_subnet_group import RedshiftSubnetGroup +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = Vpc(self, "foo", + cidr_block="10.1.0.0/16" + ) + bar = Subnet(self, "bar", + availability_zone="us-west-2b", + cidr_block="10.1.2.0/24", + tags={ + "Name": "tf-dbsubnet-test-2" + }, + vpc_id=foo.id + ) + aws_subnet_foo = Subnet(self, "foo_2", + availability_zone="us-west-2a", + cidr_block="10.1.1.0/24", + tags={ + "Name": "tf-dbsubnet-test-1" + }, + vpc_id=foo.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_subnet_foo.override_logical_id("foo") + aws_redshift_subnet_group_foo = RedshiftSubnetGroup(self, "foo_3", + name="foo", + subnet_ids=[Token.as_string(aws_subnet_foo.id), bar.id], + tags={ + "environment": "Production" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_redshift_subnet_group_foo.override_logical_id("foo") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Redshift Subnet group. +* `description` - (Optional) The description of the Redshift Subnet group. Defaults to "Managed by Terraform". +* `subnet_ids` - (Required) An array of VPC subnet IDs. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Subnet group name +* `id` - The Redshift Subnet group ID. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift subnet groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift subnet groups using the `name`. For example: + +```console +% terraform import aws_redshift_subnet_group.testgroup1 test-cluster-subnet-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshift_usage_limit.html.markdown b/website/docs/cdktf/python/r/redshift_usage_limit.html.markdown new file mode 100644 index 00000000000..1acba50f1cb --- /dev/null +++ b/website/docs/cdktf/python/r/redshift_usage_limit.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_usage_limit" +description: |- + Provides a Redshift Usage Limit resource. +--- + + + +# Resource: aws_redshift_usage_limit + +Creates a new Amazon Redshift Usage Limit. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshift_usage_limit import RedshiftUsageLimit +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftUsageLimit(self, "example", + amount=60, + cluster_identifier=Token.as_string(aws_redshift_cluster_example.id), + feature_type="concurrency-scaling", + limit_type="time" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `amount` - (Required) The limit amount. If time-based, this amount is in minutes. If data-based, this amount is in terabytes (TB). The value must be a positive number. +* `breach_action` - (Optional) The action that Amazon Redshift takes when the limit is reached. The default is `log`. Valid values are `log`, `emit-metric`, and `disable`. +* `cluster_identifier` - (Required) The identifier of the cluster that you want to limit usage. +* `feature_type` - (Required) The Amazon Redshift feature that you want to limit. Valid values are `spectrum`, `concurrency-scaling`, and `cross-region-datasharing`. +* `limit_type` - (Required) The type of limit. Depending on the feature type, this can be based on a time duration or data size. If FeatureType is `spectrum`, then LimitType must be `data-scanned`. If FeatureType is `concurrency-scaling`, then LimitType must be `time`. If FeatureType is `cross-region-datasharing`, then LimitType must be `data-scanned`. Valid values are `data-scanned`, and `time`. +* `period` - (Optional) The time period that the amount applies to. A weekly period begins on Sunday. The default is `monthly`. Valid values are `daily`, `weekly`, and `monthly`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Usage Limit. +* `id` - The Redshift Usage Limit ID. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift usage limits using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift usage limits using the `id`. For example: + +```console +% terraform import aws_redshift_usage_limit.example example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshiftdata_statement.html.markdown b/website/docs/cdktf/python/r/redshiftdata_statement.html.markdown new file mode 100644 index 00000000000..19e2ea5f2d5 --- /dev/null +++ b/website/docs/cdktf/python/r/redshiftdata_statement.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Redshift Data" +layout: "aws" +page_title: "AWS: aws_redshiftdata_statement" +description: |- + Provides a Redshift Data Statement execution resource. +--- + + + +# Resource: aws_redshiftdata_statement + +Executes a Redshift Data Statement. + +## Example Usage + +### cluster_identifier + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshiftdata_statement import RedshiftdataStatement +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftdataStatement(self, "example", + cluster_identifier=Token.as_string(aws_redshift_cluster_example.cluster_identifier), + database=Token.as_string(aws_redshift_cluster_example.database_name), + db_user=Token.as_string(aws_redshift_cluster_example.master_username), + sql="CREATE GROUP group_name;" + ) +``` + +### workgroup_name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshiftdata_statement import RedshiftdataStatement +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftdataStatement(self, "example", + database="dev", + sql="CREATE GROUP group_name;", + workgroup_name=Token.as_string(aws_redshiftserverless_workgroup_example.workgroup_name) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `database` - (Required) The name of the database. +* `sql` - (Required) The SQL statement text to run. + +The following arguments are optional: + +* `cluster_identifier` - (Optional) The cluster identifier. This parameter is required when connecting to a cluster and authenticating using either Secrets Manager or temporary credentials. +* `db_user` - (Optional) The database user name. +* `secret_arn` - (Optional) The name or ARN of the secret that enables access to the database. +* `statement_name` - (Optional) The name of the SQL statement. You can name the SQL statement when you create it to identify the query. +* `with_event` - (Optional) A value that indicates whether to send an event to the Amazon EventBridge event bus after the SQL statement runs. +* `workgroup_name` - (Optional) The serverless workgroup name. This parameter is required when connecting to a serverless workgroup and authenticating using either Secrets Manager or temporary credentials. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Redshift Data Statement ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Data Statements using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Data Statements using the `id`. For example: + +```console +% terraform import aws_redshiftdata_statement.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshiftserverless_endpoint_access.html.markdown b/website/docs/cdktf/python/r/redshiftserverless_endpoint_access.html.markdown new file mode 100644 index 00000000000..8777b33ce90 --- /dev/null +++ b/website/docs/cdktf/python/r/redshiftserverless_endpoint_access.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_endpoint_access" +description: |- + Provides a Redshift Serverless Endpoint Access resource. +--- + + + +# Resource: aws_redshiftserverless_endpoint_access + +Creates a new Amazon Redshift Serverless Endpoint Access. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshiftserverless_endpoint_access import RedshiftserverlessEndpointAccess +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, subnetIds): + super().__init__(scope, name) + RedshiftserverlessEndpointAccess(self, "example", + endpoint_name="example", + workgroup_name="example", + subnet_ids=subnet_ids + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `endpoint_name` - (Required) The name of the endpoint. +* `subnet_ids` - (Required) An array of VPC subnet IDs to associate with the endpoint. +* `vpc_security_group_ids` - (Optional) An array of security group IDs to associate with the workgroup. +* `workgroup_name` - (Required) The name of the workgroup. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Endpoint Access. +* `id` - The Redshift Endpoint Access Name. +* `address` - The DNS address of the VPC endpoint. +* `port` - The port that Amazon Redshift Serverless listens on. +* `vpc_endpoint` - The VPC endpoint or the Redshift Serverless workgroup. See `VPC Endpoint` below. + +#### VPC Endpoint + +* `vpc_endpoint_id` - The DNS address of the VPC endpoint. +* `vpc_id` - The port that Amazon Redshift Serverless listens on. +* `network_interface` - The network interfaces of the endpoint.. See `Network Interface` below. + +##### Network Interface + +* `availability_zone` - The availability Zone. +* `network_interface_id` - The unique identifier of the network interface. +* `private_ip_address` - The IPv4 address of the network interface within the subnet. +* `subnet_id` - The unique identifier of the subnet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Endpoint Access using the `endpoint_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Serverless Endpoint Access using the `endpoint_name`. For example: + +```console +% terraform import aws_redshiftserverless_endpoint_access.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshiftserverless_namespace.html.markdown b/website/docs/cdktf/python/r/redshiftserverless_namespace.html.markdown new file mode 100644 index 00000000000..ca32915c6b3 --- /dev/null +++ b/website/docs/cdktf/python/r/redshiftserverless_namespace.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_namespace" +description: |- + Provides a Redshift Serverless Namespace resource. +--- + + + +# Resource: aws_redshiftserverless_namespace + +Creates a new Amazon Redshift Serverless Namespace. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshiftserverless_namespace import RedshiftserverlessNamespace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftserverlessNamespace(self, "example", + namespace_name="concurrency-scaling" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `admin_user_password` - (Optional) The password of the administrator for the first database created in the namespace. +* `admin_username` - (Optional) The username of the administrator for the first database created in the namespace. +* `db_name` - (Optional) The name of the first database created in the namespace. +* `default_iam_role_arn` - (Optional) The Amazon Resource Name (ARN) of the IAM role to set as a default in the namespace. When specifying `default_iam_role_arn`, it also must be part of `iam_roles`. +* `iam_roles` - (Optional) A list of IAM roles to associate with the namespace. +* `kms_key_id` - (Optional) The ARN of the Amazon Web Services Key Management Service key used to encrypt your data. +* `log_exports` - (Optional) The types of logs the namespace can export. Available export types are `userlog`, `connectionlog`, and `useractivitylog`. +* `namespace_name` - (Required) The name of the namespace. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Namespace. +* `id` - The Redshift Namespace Name. +* `namespace_id` - The Redshift Namespace ID. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Namespaces using the `namespace_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Serverless Namespaces using the `namespace_name`. For example: + +```console +% terraform import aws_redshiftserverless_namespace.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshiftserverless_resource_policy.html.markdown b/website/docs/cdktf/python/r/redshiftserverless_resource_policy.html.markdown new file mode 100644 index 00000000000..ff055895775 --- /dev/null +++ b/website/docs/cdktf/python/r/redshiftserverless_resource_policy.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_resource_policy" +description: |- + Provides a Redshift Serverless Resource Policy resource. +--- + + + +# Resource: aws_redshiftserverless_resource_policy + +Creates a new Amazon Redshift Serverless Resource Policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshiftserverless_resource_policy import RedshiftserverlessResourcePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftserverlessResourcePolicy(self, "example", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["redshift-serverless:RestoreFromSnapshot"], + "Effect": "Allow", + "Principal": { + "AWS": ["12345678901"] + }, + "Sid": "" + } + ], + "Version": "2012-10-17" + })), + resource_arn=Token.as_string(aws_redshiftserverless_snapshot_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_arn` - (Required) The Amazon Resource Name (ARN) of the account to create or update a resource policy for. +* `policy` - (Required) The policy to create or update. For example, the following policy grants a user authorization to restore a snapshot. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the account to create or update a resource policy for. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Resource Policies using the `resource_arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Serverless Resource Policies using the `resource_arn`. For example: + +```console +% terraform import aws_redshiftserverless_resource_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshiftserverless_snapshot.html.markdown b/website/docs/cdktf/python/r/redshiftserverless_snapshot.html.markdown new file mode 100644 index 00000000000..d9cc2916a89 --- /dev/null +++ b/website/docs/cdktf/python/r/redshiftserverless_snapshot.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_snapshot" +description: |- + Provides a Redshift Serverless Snapshot resource. +--- + + + +# Resource: aws_redshiftserverless_snapshot + +Creates a new Amazon Redshift Serverless Snapshot. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshiftserverless_snapshot import RedshiftserverlessSnapshot +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftserverlessSnapshot(self, "example", + namespace_name=Token.as_string(aws_redshiftserverless_workgroup_example.namespace_name), + snapshot_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `namespace_name` - (Required) The namespace to create a snapshot for. +* `snapshot_name` - (Required) The name of the snapshot. +* `retention_period` - (Optional) How long to retain the created snapshot. Default value is `-1`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `accounts_with_provisioned_restore_access` - All of the Amazon Web Services accounts that have access to restore a snapshot to a provisioned cluster. +* `accounts_with_restore_access` - All of the Amazon Web Services accounts that have access to restore a snapshot to a namespace. +* `admin_username` - The username of the database within a snapshot. +* `arn` - The Amazon Resource Name (ARN) of the snapshot. +* `id` - The name of the snapshot. +* `kms_key_id` - The unique identifier of the KMS key used to encrypt the snapshot. +* `namespace_arn` - The Amazon Resource Name (ARN) of the namespace the snapshot was created from. +* `owner_account` - The owner Amazon Web Services; account of the snapshot. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Snapshots using the `snapshot_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Serverless Snapshots using the `snapshot_name`. For example: + +```console +% terraform import aws_redshiftserverless_snapshot.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshiftserverless_usage_limit.html.markdown b/website/docs/cdktf/python/r/redshiftserverless_usage_limit.html.markdown new file mode 100644 index 00000000000..b19173e65b4 --- /dev/null +++ b/website/docs/cdktf/python/r/redshiftserverless_usage_limit.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_usage_limit" +description: |- + Provides a Redshift Serverless Usage Limit resource. +--- + + + +# Resource: aws_redshiftserverless_usage_limit + +Creates a new Amazon Redshift Serverless Usage Limit. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshiftserverless_usage_limit import RedshiftserverlessUsageLimit +from imports.aws.redshiftserverless_workgroup import RedshiftserverlessWorkgroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = RedshiftserverlessWorkgroup(self, "example", + namespace_name=Token.as_string(aws_redshiftserverless_namespace_example.namespace_name), + workgroup_name="example" + ) + aws_redshiftserverless_usage_limit_example = + RedshiftserverlessUsageLimit(self, "example_1", + amount=60, + resource_arn=example.arn, + usage_type="serverless-compute" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_redshiftserverless_usage_limit_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `amount` - (Required) The limit amount. If time-based, this amount is in Redshift Processing Units (RPU) consumed per hour. If data-based, this amount is in terabytes (TB) of data transferred between Regions in cross-account sharing. The value must be a positive number. +* `breach_action` - (Optional) The action that Amazon Redshift Serverless takes when the limit is reached. Valid values are `log`, `emit-metric`, and `deactivate`. The default is `log`. +* `period` - (Optional) The time period that the amount applies to. A weekly period begins on Sunday. Valid values are `daily`, `weekly`, and `monthly`. The default is `monthly`. +* `resource_arn` - (Required) The Amazon Resource Name (ARN) of the Amazon Redshift Serverless resource to create the usage limit for. +* `usage_type` - (Required) The type of Amazon Redshift Serverless usage to create a usage limit for. Valid values are `serverless-compute` or `cross-region-datasharing`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Usage Limit. +* `id` - The Redshift Usage Limit id. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Usage Limits using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Serverless Usage Limits using the `id`. For example: + +```console +% terraform import aws_redshiftserverless_usage_limit.example example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/redshiftserverless_workgroup.html.markdown b/website/docs/cdktf/python/r/redshiftserverless_workgroup.html.markdown new file mode 100644 index 00000000000..13851dda52f --- /dev/null +++ b/website/docs/cdktf/python/r/redshiftserverless_workgroup.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_workgroup" +description: |- + Provides a Redshift Serverless Workgroup resource. +--- + + + +# Resource: aws_redshiftserverless_workgroup + +Creates a new Amazon Redshift Serverless Workgroup. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.redshiftserverless_workgroup import RedshiftserverlessWorkgroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RedshiftserverlessWorkgroup(self, "example", + namespace_name="concurrency-scaling", + workgroup_name="concurrency-scaling" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `namespace_name` - (Required) The name of the namespace. +* `workgroup_name` - (Required) The name of the workgroup. + +The following arguments are optional: + +* `base_capacity` - (Optional) The base data warehouse capacity of the workgroup in Redshift Processing Units (RPUs). +* `config_parameter` - (Optional) An array of parameters to set for more control over a serverless database. See `Config Parameter` below. +* `enhanced_vpc_routing` - (Optional) The value that specifies whether to turn on enhanced virtual private cloud (VPC) routing, which forces Amazon Redshift Serverless to route traffic through your VPC instead of over the internet. +* `publicly_accessible` - (Optional) A value that specifies whether the workgroup can be accessed from a public network. +* `security_group_ids` - (Optional) An array of security group IDs to associate with the workgroup. +* `subnet_ids` - (Optional) An array of VPC subnet IDs to associate with the workgroup. When set, must contain at least three subnets spanning three Availability Zones. A minimum number of IP addresses is required and scales with the Base Capacity. For more information, see the following [AWS document](https://docs.aws.amazon.com/redshift/latest/mgmt/serverless-known-issues.html). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Config Parameter + +* `parameter_key` - (Required) The key of the parameter. The options are `auto_mv`, `datestyle`, `enable_case_sensitive_identifier`, `enable_user_activity_logging`, `query_group`, `search_path` and [query monitoring metrics](https://docs.aws.amazon.com/redshift/latest/dg/cm-c-wlm-query-monitoring-rules.html#cm-c-wlm-query-monitoring-metrics-serverless) that let you define performance boundaries: `max_query_cpu_time`, `max_query_blocks_read`, `max_scan_row_count`, `max_query_execution_time`, `max_query_queue_time`, `max_query_cpu_usage_percent`, `max_query_temp_blocks_to_disk`, `max_join_row_count` and `max_nested_loop_join_row_count`. +* `parameter_value` - (Required) The value of the parameter to set. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Workgroup. +* `id` - The Redshift Workgroup Name. +* `workgroup_id` - The Redshift Workgroup ID. +* `endpoint` - The endpoint that is created from the workgroup. See `Endpoint` below. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### Endpoint + +* `address` - The DNS address of the VPC endpoint. +* `port` - The port that Amazon Redshift Serverless listens on. +* `vpc_endpoint` - The VPC endpoint or the Redshift Serverless workgroup. See `VPC Endpoint` below. + +#### VPC Endpoint + +* `vpc_endpoint_id` - The DNS address of the VPC endpoint. +* `vpc_id` - The port that Amazon Redshift Serverless listens on. +* `network_interface` - The network interfaces of the endpoint.. See `Network Interface` below. + +##### Network Interface + +* `availability_zone` - The availability Zone. +* `network_interface_id` - The unique identifier of the network interface. +* `private_ip_address` - The IPv4 address of the network interface within the subnet. +* `subnet_id` - The unique identifier of the subnet. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20m`) +- `update` - (Default `20m`) +- `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Workgroups using the `workgroup_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Redshift Serverless Workgroups using the `workgroup_name`. For example: + +```console +% terraform import aws_redshiftserverless_workgroup.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/resourceexplorer2_index.html.markdown b/website/docs/cdktf/python/r/resourceexplorer2_index.html.markdown new file mode 100644 index 00000000000..5e29e4e2195 --- /dev/null +++ b/website/docs/cdktf/python/r/resourceexplorer2_index.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Resource Explorer" +layout: "aws" +page_title: "AWS: aws_resourceexplorer2_index" +description: |- + Provides a resource to manage a Resource Explorer index in the current AWS Region. +--- + + + +# Resource: aws_resourceexplorer2_index + +Provides a resource to manage a Resource Explorer index in the current AWS Region. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.resourceexplorer2_index import Resourceexplorer2Index +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Resourceexplorer2Index(self, "example", + type="LOCAL" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `type` - (Required) The type of the index. Valid values: `AGGREGATOR`, `LOCAL`. To understand the difference between `LOCAL` and `AGGREGATOR`, see the [_AWS Resource Explorer User Guide_](https://docs.aws.amazon.com/resource-explorer/latest/userguide/manage-aggregator-region.html). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `2h`) +- `update` - (Default `2h`) +- `delete` - (Default `10m`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Resource Explorer index. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Resource Explorer indexes using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Resource Explorer indexes using the `arn`. For example: + +```console +% terraform import aws_resourceexplorer2_index.example arn:aws:resource-explorer-2:us-east-1:123456789012:index/6047ac4e-207e-4487-9bcf-cb53bb0ff5cc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/resourceexplorer2_view.html.markdown b/website/docs/cdktf/python/r/resourceexplorer2_view.html.markdown new file mode 100644 index 00000000000..b144dbde0fd --- /dev/null +++ b/website/docs/cdktf/python/r/resourceexplorer2_view.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "Resource Explorer" +layout: "aws" +page_title: "AWS: aws_resourceexplorer2_view" +description: |- + Provides a resource to manage a Resource Explorer view. +--- + + + +# Resource: aws_resourceexplorer2_view + +Provides a resource to manage a Resource Explorer view. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.resourceexplorer2_index import Resourceexplorer2Index +from imports.aws.resourceexplorer2_view import Resourceexplorer2View +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Resourceexplorer2Index(self, "example", + type="LOCAL" + ) + aws_resourceexplorer2_view_example = Resourceexplorer2View(self, "example_1", + depends_on=[example], + filters=[Resourceexplorer2ViewFilters( + filter_string="resourcetype:ec2:instance" + ) + ], + included_property=[Resourceexplorer2ViewIncludedProperty( + name="tags" + ) + ], + name="exampleview" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_resourceexplorer2_view_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `default_view` - (Optional) Specifies whether the view is the [_default view_](https://docs.aws.amazon.com/resource-explorer/latest/userguide/manage-views-about.html#manage-views-about-default) for the AWS Region. Default: `false`. +* `filters` - (Optional) Specifies which resources are included in the results of queries made using this view. See [Filters](#filters) below for more details. +* `included_property` - (Optional) Optional fields to be included in search results from this view. See [Included Properties](#included-properties) below for more details. +* `name` - (Required) The name of the view. The name must be no more than 64 characters long, and can include letters, digits, and the dash (-) character. The name must be unique within its AWS Region. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Filters + +The `filters` block supports the following: + +* `filter_string` - (Required) The string that contains the search keywords, prefixes, and operators to control the results that can be returned by a search operation. For more details, see [Search query syntax](https://docs.aws.amazon.com/resource-explorer/latest/userguide/using-search-query-syntax.html). + +### Included Properties + +The `included_property` block supports the following: + +* `name` - (Required) The name of the property that is included in this view. Valid values: `tags`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Resource Explorer view. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Resource Explorer views using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Resource Explorer views using the `arn`. For example: + +```console +% terraform import aws_resourceexplorer2_view.example arn:aws:resource-explorer-2:us-west-2:123456789012:view/exampleview/e0914f6c-6c27-4b47-b5d4-6b28381a2421 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/resourcegroups_group.html.markdown b/website/docs/cdktf/python/r/resourcegroups_group.html.markdown new file mode 100644 index 00000000000..59d933be626 --- /dev/null +++ b/website/docs/cdktf/python/r/resourcegroups_group.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Resource Groups" +layout: "aws" +page_title: "AWS: aws_resourcegroups_group" +description: |- + Provides a Resource Group. +--- + + + +# Resource: aws_resourcegroups_group + +Provides a Resource Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.resourcegroups_group import ResourcegroupsGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ResourcegroupsGroup(self, "test", + name="test-group", + resource_query=ResourcegroupsGroupResourceQuery( + query="{\n \"ResourceTypeFilters\": [\n \"AWS::EC2::Instance\"\n ],\n \"TagFilters\": [\n {\n \"Key\": \"Stage\",\n \"Values\": [\"Test\"]\n }\n ]\n}\n\n" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The resource group's name. A resource group name can have a maximum of 127 characters, including letters, numbers, hyphens, dots, and underscores. The name cannot start with `AWS` or `aws`. +* `configuration` - (Optional) A configuration associates the resource group with an AWS service and specifies how the service can interact with the resources in the group. See below for details. +* `description` - (Optional) A description of the resource group. +* `resource_query` - (Required) A `resource_query` block. Resource queries are documented below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `resource_query` block supports the following arguments: + +* `query` - (Required) The resource query as a JSON string. +* `type` - (Required) The type of the resource query. Defaults to `TAG_FILTERS_1_0`. + +The `configuration` block supports the following arguments: + +* `type` - (Required) Specifies the type of group configuration item. +* `parameters` - (Optional) A collection of parameters for this group configuration item. See below for details. + +The `parameters` block supports the following arguments: + +* `name` - (Required) The name of the group configuration parameter. +* `values` - (Optional) The value or values to be used for the specified parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS for this resource group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import resource groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import resource groups using the `name`. For example: + +```console +% terraform import aws_resourcegroups_group.foo resource-group-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/resourcegroups_resource.html.markdown b/website/docs/cdktf/python/r/resourcegroups_resource.html.markdown new file mode 100644 index 00000000000..e965b8562a5 --- /dev/null +++ b/website/docs/cdktf/python/r/resourcegroups_resource.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Resource Groups" +layout: "aws" +page_title: "AWS: aws_resourcegroups_resource" +description: |- + Terraform resource for managing an AWS Resource Groups Resource. +--- + + + +# Resource: aws_resourcegroups_resource + +Terraform resource for managing an AWS Resource Groups Resource. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ec2_host import Ec2Host +from imports.aws.resourcegroups_group import ResourcegroupsGroup +from imports.aws.resourcegroups_resource import ResourcegroupsResource +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Ec2Host(self, "example", + auto_placement="on", + availability_zone="us-east-1a", + host_recovery="off", + instance_family="t3" + ) + aws_resourcegroups_group_example = ResourcegroupsGroup(self, "example_1", + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_resourcegroups_group_example.override_logical_id("example") + aws_resourcegroups_resource_example = ResourcegroupsResource(self, "example_2", + group_arn=Token.as_string(aws_resourcegroups_group_example.arn), + resource_arn=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_resourcegroups_resource_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `group_arn` - (Required) The name or the ARN of the resource group to add resources to. + +The following arguments are optional: + +* `resource_arn` - (Required) The ARN of the resource to be added to the group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `resource_type` - The resource type of a resource, such as `AWS::EC2::Instance`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `delete` - (Default `5m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rolesanywhere_profile.html.markdown b/website/docs/cdktf/python/r/rolesanywhere_profile.html.markdown new file mode 100644 index 00000000000..2b8c2cb60b0 --- /dev/null +++ b/website/docs/cdktf/python/r/rolesanywhere_profile.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Roles Anywhere" +layout: "aws" +page_title: "AWS: aws_rolesanywhere_profile" +description: |- + Provides a Roles Anywhere Profile resource +--- + + + +# Resource: aws_rolesanywhere_profile + +Terraform resource for managing a Roles Anywhere Profile. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.iam_role import IamRole +from imports.aws.rolesanywhere_profile import RolesanywhereProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = IamRole(self, "test", + assume_role_policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["sts:AssumeRole", "sts:TagSession", "sts:SetSourceIdentity" + ], + "Effect": "Allow", + "Principal": { + "Service": "rolesanywhere.amazonaws.com" + }, + "Sid": "" + } + ], + "Version": "2012-10-17" + })), + name="test", + path="/" + ) + aws_rolesanywhere_profile_test = RolesanywhereProfile(self, "test_1", + name="example", + role_arns=[test.arn] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_rolesanywhere_profile_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `duration_seconds` - (Optional) The number of seconds the vended session credentials are valid for. Defaults to 3600. +* `enabled` - (Optional) Whether or not the Profile is enabled. +* `managed_policy_arns` - (Optional) A list of managed policy ARNs that apply to the vended session credentials. +* `name` - (Required) The name of the Profile. +* `require_instance_properties` - (Optional) Specifies whether instance properties are required in [CreateSession](https://docs.aws.amazon.com/rolesanywhere/latest/APIReference/API_CreateSession.html) requests with this profile. +* `role_arns` - (Required) A list of IAM roles that this profile can assume +* `session_policy` - (Optional) A session policy that applies to the trust boundary of the vended session credentials. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Profile +* `id` - The Profile ID. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_rolesanywhere_profile` using its `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_rolesanywhere_profile` using its `id`. For example: + +```console +% terraform import aws_rolesanywhere_profile.example db138a85-8925-4f9f-a409-08231233cacf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rolesanywhere_trust_anchor.html.markdown b/website/docs/cdktf/python/r/rolesanywhere_trust_anchor.html.markdown new file mode 100644 index 00000000000..8f26e9cdbcd --- /dev/null +++ b/website/docs/cdktf/python/r/rolesanywhere_trust_anchor.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "Roles Anywhere" +layout: "aws" +page_title: "AWS: aws_rolesanywhere_trust_anchor" +description: |- + Provides a Roles Anywhere Trust Anchor resource +--- + + + +# Resource: aws_rolesanywhere_trust_anchor + +Terraform resource for managing a Roles Anywhere Trust Anchor. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.acmpca_certificate import AcmpcaCertificate +from imports.aws.acmpca_certificate_authority import AcmpcaCertificateAuthority +from imports.aws.acmpca_certificate_authority_certificate import AcmpcaCertificateAuthorityCertificate +from imports.aws.data_aws_partition import DataAwsPartition +from imports.aws.rolesanywhere_trust_anchor import RolesanywhereTrustAnchor +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = AcmpcaCertificateAuthority(self, "example", + certificate_authority_configuration=AcmpcaCertificateAuthorityCertificateAuthorityConfiguration( + key_algorithm="RSA_4096", + signing_algorithm="SHA512WITHRSA", + subject=AcmpcaCertificateAuthorityCertificateAuthorityConfigurationSubject( + common_name="example.com" + ) + ), + permanent_deletion_time_in_days=7, + type="ROOT" + ) + aws_acmpca_certificate_authority_certificate_example = + AcmpcaCertificateAuthorityCertificate(self, "example_1", + certificate=Token.as_string(aws_acmpca_certificate_example.certificate), + certificate_authority_arn=example.arn, + certificate_chain=Token.as_string(aws_acmpca_certificate_example.certificate_chain) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_certificate_authority_certificate_example.override_logical_id("example") + RolesanywhereTrustAnchor(self, "test", + depends_on=[aws_acmpca_certificate_authority_certificate_example], + name="example", + source=RolesanywhereTrustAnchorSource( + source_data=RolesanywhereTrustAnchorSourceSourceData( + acm_pca_arn=example.arn + ), + source_type="AWS_ACM_PCA" + ) + ) + current = DataAwsPartition(self, "current") + aws_acmpca_certificate_test = AcmpcaCertificate(self, "test_4", + certificate_authority_arn=example.arn, + certificate_signing_request=example.certificate_signing_request, + signing_algorithm="SHA512WITHRSA", + template_arn="arn:${" + current.partition + "}:acm-pca:::template/RootCACertificate/V1", + validity=AcmpcaCertificateValidity( + type="YEARS", + value=Token.as_string(1) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_acmpca_certificate_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `enabled` - (Optional) Whether or not the Trust Anchor should be enabled. +* `name` - (Required) The name of the Trust Anchor. +* `source` - (Required) The source of trust, documented below +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Blocks + +#### `source` + +* `source_data` - (Required) The data denoting the source of trust, documented below +* `source_type` - (Required) The type of the source of trust. Must be either `AWS_ACM_PCA` or `CERTIFICATE_BUNDLE`. + +#### `source_data` + +* `acm_pca_arn` - (Optional, required when `source_type` is `AWS_ACM_PCA`) The ARN of an ACM Private Certificate Authority. +* `x509_certificate_data` - (Optional, required when `source_type` is `CERTIFICATE_BUNDLE`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Trust Anchor +* `id` - The Trust Anchor ID. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_rolesanywhere_trust_anchor` using its `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_rolesanywhere_trust_anchor` using its `id`. For example: + +```console +% terraform import aws_rolesanywhere_trust_anchor.example 92b2fbbb-984d-41a3-a765-e3cbdb69ebb1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route.html.markdown b/website/docs/cdktf/python/r/route.html.markdown new file mode 100644 index 00000000000..7bd341a0b2e --- /dev/null +++ b/website/docs/cdktf/python/r/route.html.markdown @@ -0,0 +1,175 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route" +description: |- + Provides a resource to create a routing entry in a VPC routing table. +--- + + + +# Resource: aws_route + +Provides a resource to create a routing table entry (a route) in a VPC routing table. + +~> **NOTE on Route Tables and Routes:** Terraform currently provides both a standalone Route resource and a [Route Table](route_table.html) resource with routes defined in-line. At this time you cannot use a Route Table with in-line routes in conjunction with any Route resources. Doing so will cause a conflict of rule settings and will overwrite rules. + +~> **NOTE on `gateway_id` attribute:** The AWS API is very forgiving with the resource ID passed in the `gateway_id` attribute. For example an `aws_route` resource can be created with an [`aws_nat_gateway`](nat_gateway.html) or [`aws_egress_only_internet_gateway`](egress_only_internet_gateway.html) ID specified for the `gateway_id` attribute. Specifying anything other than an [`aws_internet_gateway`](internet_gateway.html) or [`aws_vpn_gateway`](vpn_gateway.html) ID will lead to Terraform reporting a permanent diff between your configuration and recorded state, as the AWS API returns the more-specific attribute. If you are experiencing constant diffs with an `aws_route` resource, the first thing to check is that the correct attribute is being specified. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route import Route +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route(self, "r", + depends_on=[testing], + destination_cidr_block="10.0.1.0/22", + route_table_id="rtb-4fbb3ac4", + vpc_peering_connection_id="pcx-45ff3dc1" + ) +``` + +## Example IPv6 Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.egress_only_internet_gateway import EgressOnlyInternetGateway +from imports.aws.route import Route +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + vpc = Vpc(self, "vpc", + assign_generated_ipv6_cidr_block=True, + cidr_block="10.1.0.0/16" + ) + egress = EgressOnlyInternetGateway(self, "egress", + vpc_id=vpc.id + ) + Route(self, "r", + destination_ipv6_cidr_block="::/0", + egress_only_gateway_id=egress.id, + route_table_id="rtb-4fbb3ac4" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `route_table_id` - (Required) The ID of the routing table. + +One of the following destination arguments must be supplied: + +* `destination_cidr_block` - (Optional) The destination CIDR block. +* `destination_ipv6_cidr_block` - (Optional) The destination IPv6 CIDR block. +* `destination_prefix_list_id` - (Optional) The ID of a [managed prefix list](ec2_managed_prefix_list.html) destination. + +One of the following target arguments must be supplied: + +* `carrier_gateway_id` - (Optional) Identifier of a carrier gateway. This attribute can only be used when the VPC contains a subnet which is associated with a Wavelength Zone. +* `core_network_arn` - (Optional) The Amazon Resource Name (ARN) of a core network. +* `egress_only_gateway_id` - (Optional) Identifier of a VPC Egress Only Internet Gateway. +* `gateway_id` - (Optional) Identifier of a VPC internet gateway or a virtual private gateway. Specify `local` when updating a previously [imported](#import) local route. +* `nat_gateway_id` - (Optional) Identifier of a VPC NAT gateway. +* `local_gateway_id` - (Optional) Identifier of a Outpost local gateway. +* `network_interface_id` - (Optional) Identifier of an EC2 network interface. +* `transit_gateway_id` - (Optional) Identifier of an EC2 Transit Gateway. +* `vpc_endpoint_id` - (Optional) Identifier of a VPC Endpoint. +* `vpc_peering_connection_id` - (Optional) Identifier of a VPC peering connection. + +Note that the default route, mapping the VPC's CIDR block to "local", is created implicitly and cannot be specified. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +~> **NOTE:** Only the arguments that are configured (one of the above) will be exported as an attribute once the resource is created. + +* `id` - Route identifier computed from the routing table identifier and route destination. +* `instance_id` - Identifier of an EC2 instance. +* `instance_owner_id` - The AWS account ID of the owner of the EC2 instance. +* `origin` - How the route was created - `CreateRouteTable`, `CreateRoute` or `EnableVgwRoutePropagation`. +* `state` - The state of the route - `active` or `blackhole`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `update` - (Default `2m`) +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import individual routes using `ROUTETABLEID_DESTINATION`. Import [local routes](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Route_Tables.html#RouteTables) using the VPC's IPv4 or IPv6 CIDR blocks. For example: + +Import a route in route table `rtb-656C65616E6F72` with an IPv4 destination CIDR of `10.42.0.0/16`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import a route in route table `rtb-656C65616E6F72` with an IPv6 destination CIDR of `2620:0:2d0:200::8/125`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import a route in route table `rtb-656C65616E6F72` with a managed prefix list destination of `pl-0570a1d2d725c16be`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** individual routes using `ROUTETABLEID_DESTINATION`. Import [local routes](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Route_Tables.html#RouteTables) using the VPC's IPv4 or IPv6 CIDR blocks. For example: + +Import a route in route table `rtb-656C65616E6F72` with an IPv4 destination CIDR of `10.42.0.0/16`: + +```console +% terraform import aws_route.my_route rtb-656C65616E6F72_10.42.0.0/16 +``` + +Import a route in route table `rtb-656C65616E6F72` with an IPv6 destination CIDR of `2620:0:2d0:200::8/125`: + +```console +% terraform import aws_route.my_route rtb-656C65616E6F72_2620:0:2d0:200::8/125 +``` + +Import a route in route table `rtb-656C65616E6F72` with a managed prefix list destination of `pl-0570a1d2d725c16be`: + +```console +% terraform import aws_route.my_route rtb-656C65616E6F72_pl-0570a1d2d725c16be +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_cidr_collection.html.markdown b/website/docs/cdktf/python/r/route53_cidr_collection.html.markdown new file mode 100644 index 00000000000..49e03410693 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_cidr_collection.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_cidr_collection" +description: |- + Provides a Route53 CIDR collection resource. +--- + + + +# Resource: aws_route53_cidr_collection + +Provides a Route53 CIDR collection resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_cidr_collection import Route53CidrCollection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53CidrCollection(self, "example", + name="collection-1" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Unique name for the CIDR collection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the CIDR collection. +* `id` - The CIDR collection ID. +* `version` - The lastest version of the CIDR collection. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CIDR collections using their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CIDR collections using their ID. For example: + +```console +% terraform import aws_route53_cidr_collection.example 9ac32814-3e67-0932-6048-8d779cc6f511 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_cidr_location.html.markdown b/website/docs/cdktf/python/r/route53_cidr_location.html.markdown new file mode 100644 index 00000000000..93b49baba7b --- /dev/null +++ b/website/docs/cdktf/python/r/route53_cidr_location.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_cidr_location" +description: |- + Provides a Route53 CIDR location resource. +--- + + + +# Resource: aws_route53_cidr_location + +Provides a Route53 CIDR location resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_cidr_collection import Route53CidrCollection +from imports.aws.route53_cidr_location import Route53CidrLocation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Route53CidrCollection(self, "example", + name="collection-1" + ) + aws_route53_cidr_location_example = Route53CidrLocation(self, "example_1", + cidr_blocks=["200.5.3.0/24", "200.6.3.0/24"], + cidr_collection_id=example.id, + name="office" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_cidr_location_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cidr_blocks` - (Required) CIDR blocks for the location. +* `cidr_collection_id` - (Required) The ID of the CIDR collection to update. +* `name` - (Required) Name for the CIDR location. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The CIDR location ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CIDR locations using their the CIDR collection ID and location name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CIDR locations using their the CIDR collection ID and location name. For example: + +```console +% terraform import aws_route53_cidr_location.example 9ac32814-3e67-0932-6048-8d779cc6f511,office +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_delegation_set.html.markdown b/website/docs/cdktf/python/r/route53_delegation_set.html.markdown new file mode 100644 index 00000000000..83027316b23 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_delegation_set.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_delegation_set" +description: |- + Provides a Route53 Delegation Set resource. +--- + + + +# Resource: aws_route53_delegation_set + +Provides a [Route53 Delegation Set](https://docs.aws.amazon.com/Route53/latest/APIReference/API-actions-by-function.html#actions-by-function-reusable-delegation-sets) resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_delegation_set import Route53DelegationSet +from imports.aws.route53_zone import Route53Zone +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = Route53DelegationSet(self, "main", + reference_name="DynDNS" + ) + Route53Zone(self, "primary", + delegation_set_id=main.id, + name="hashicorp.com" + ) + Route53Zone(self, "secondary", + delegation_set_id=main.id, + name="terraform.io" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `reference_name` - (Optional) This is a reference name used in Caller Reference + (helpful for identifying single delegation set amongst others) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Delegation Set. +* `id` - The delegation set ID +* `name_servers` - A list of authoritative name servers for the hosted zone + (effectively a list of NS records). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Delegation Sets using the delegation set `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Delegation Sets using the delegation set `id`. For example: + +```console +% terraform import aws_route53_delegation_set.set1 N1PA6795SAMPLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_health_check.html.markdown b/website/docs/cdktf/python/r/route53_health_check.html.markdown new file mode 100644 index 00000000000..89e92fb0dde --- /dev/null +++ b/website/docs/cdktf/python/r/route53_health_check.html.markdown @@ -0,0 +1,187 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_health_check" +description: |- + Provides a Route53 health check. +--- + + +# Resource: aws_route53_health_check + +Provides a Route53 health check. + +## Example Usage + +### Connectivity and HTTP Status Code Check + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_health_check import Route53HealthCheck +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53HealthCheck(self, "example", + failure_threshold=Token.as_number("5"), + fqdn="example.com", + port=80, + request_interval=Token.as_number("30"), + resource_path="/", + tags={ + "Name": "tf-test-health-check" + }, + type="HTTP" + ) +``` + +### Connectivity and String Matching Check + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_health_check import Route53HealthCheck +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53HealthCheck(self, "example", + failure_threshold=Token.as_number("5"), + fqdn="example.com", + port=443, + request_interval=Token.as_number("30"), + resource_path="/", + search_string="example", + type="HTTPS_STR_MATCH" + ) +``` + +### Aggregate Check + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_health_check import Route53HealthCheck +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53HealthCheck(self, "parent", + child_health_threshold=1, + child_healthchecks=[child.id], + tags={ + "Name": "tf-test-calculated-health-check" + }, + type="CALCULATED" + ) +``` + +### CloudWatch Alarm Check + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_metric_alarm import CloudwatchMetricAlarm +from imports.aws.route53_health_check import Route53HealthCheck +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foobar = CloudwatchMetricAlarm(self, "foobar", + alarm_description="This metric monitors ec2 cpu utilization", + alarm_name="terraform-test-foobar5", + comparison_operator="GreaterThanOrEqualToThreshold", + evaluation_periods=Token.as_number("2"), + metric_name="CPUUtilization", + namespace="AWS/EC2", + period=Token.as_number("120"), + statistic="Average", + threshold=Token.as_number("80") + ) + Route53HealthCheck(self, "foo", + cloudwatch_alarm_name=foobar.alarm_name, + cloudwatch_alarm_region="us-west-2", + insufficient_data_health_status="Healthy", + type="CLOUDWATCH_METRIC" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +~> **Note:** At least one of either `fqdn` or `ip_address` must be specified. + +* `reference_name` - (Optional) This is a reference name used in Caller Reference + (helpful for identifying single health_check set amongst others) +* `fqdn` - (Optional) The fully qualified domain name of the endpoint to be checked. If a value is set for `ip_address`, the value set for `fqdn` will be passed in the `Host` header. +* `ip_address` - (Optional) The IP address of the endpoint to be checked. +* `port` - (Optional) The port of the endpoint to be checked. +* `type` - (Required) The protocol to use when performing health checks. Valid values are `HTTP`, `HTTPS`, `HTTP_STR_MATCH`, `HTTPS_STR_MATCH`, `TCP`, `CALCULATED`, `CLOUDWATCH_METRIC` and `RECOVERY_CONTROL`. +* `failure_threshold` - (Optional) The number of consecutive health checks that an endpoint must pass or fail. +* `request_interval` - (Required) The number of seconds between the time that Amazon Route 53 gets a response from your endpoint and the time that it sends the next health-check request. +* `resource_path` - (Optional) The path that you want Amazon Route 53 to request when performing health checks. +* `search_string` - (Optional) String searched in the first 5120 bytes of the response body for check to be considered healthy. Only valid with `HTTP_STR_MATCH` and `HTTPS_STR_MATCH`. +* `measure_latency` - (Optional) A Boolean value that indicates whether you want Route 53 to measure the latency between health checkers in multiple AWS regions and your endpoint and to display CloudWatch latency graphs in the Route 53 console. +* `invert_healthcheck` - (Optional) A boolean value that indicates whether the status of health check should be inverted. For example, if a health check is healthy but Inverted is True , then Route 53 considers the health check to be unhealthy. +* `disabled` - (Optional) A boolean value that stops Route 53 from performing health checks. When set to true, Route 53 will do the following depending on the type of health check: + * For health checks that check the health of endpoints, Route5 53 stops submitting requests to your application, server, or other resource. + * For calculated health checks, Route 53 stops aggregating the status of the referenced health checks. + * For health checks that monitor CloudWatch alarms, Route 53 stops monitoring the corresponding CloudWatch metrics. + + ~> **Note:** After you disable a health check, Route 53 considers the status of the health check to always be healthy. If you configured DNS failover, Route 53 continues to route traffic to the corresponding resources. If you want to stop routing traffic to a resource, change the value of `invert_healthcheck`. +* `enable_sni` - (Optional) A boolean value that indicates whether Route53 should send the `fqdn` to the endpoint when performing the health check. This defaults to AWS' defaults: when the `type` is "HTTPS" `enable_sni` defaults to `true`, when `type` is anything else `enable_sni` defaults to `false`. +* `child_healthchecks` - (Optional) For a specified parent health check, a list of HealthCheckId values for the associated child health checks. +* `child_health_threshold` - (Optional) The minimum number of child health checks that must be healthy for Route 53 to consider the parent health check to be healthy. Valid values are integers between 0 and 256, inclusive +* `cloudwatch_alarm_name` - (Optional) The name of the CloudWatch alarm. +* `cloudwatch_alarm_region` - (Optional) The CloudWatchRegion that the CloudWatch alarm was created in. +* `insufficient_data_health_status` - (Optional) The status of the health check when CloudWatch has insufficient data about the state of associated alarm. Valid values are `Healthy` , `Unhealthy` and `LastKnownStatus`. +* `regions` - (Optional) A list of AWS regions that you want Amazon Route 53 health checkers to check the specified endpoint from. +* `routing_control_arn` - (Optional) The Amazon Resource Name (ARN) for the Route 53 Application Recovery Controller routing control. This is used when health check type is `RECOVERY_CONTROL` +* `tags` - (Optional) A map of tags to assign to the health check. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Health Check. +* `id` - The id of the health check +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Health Checks using the health check `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Health Checks using the health check `id`. For example: + +```console +% terraform import aws_route53_health_check.http_check abcdef11-2222-3333-4444-555555fedcba +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_hosted_zone_dnssec.html.markdown b/website/docs/cdktf/python/r/route53_hosted_zone_dnssec.html.markdown new file mode 100644 index 00000000000..ec868046ff6 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_hosted_zone_dnssec.html.markdown @@ -0,0 +1,123 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_hosted_zone_dnssec" +description: |- + Manages Route 53 Hosted Zone DNSSEC +--- + + + +# Resource: aws_route53_hosted_zone_dnssec + +Manages Route 53 Hosted Zone Domain Name System Security Extensions (DNSSEC). For more information about managing DNSSEC in Route 53, see the [Route 53 Developer Guide](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/dns-configuring-dnssec.html). + +!> **WARNING:** If you disable DNSSEC signing for your hosted zone before the DNS changes have propagated, your domain could become unavailable on the internet. When you remove the DS records, you must wait until the longest TTL for the DS records that you remove has expired before you complete the step to disable DNSSEC signing. Please refer to the [Route 53 Developer Guide - Disable DNSSEC](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/dns-configuring-dnssec-disable.html) for a detailed breakdown on the steps required to disable DNSSEC safely for a hosted zone. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.kms_key import KmsKey +from imports.aws.provider import AwsProvider +from imports.aws.route53_hosted_zone_dnssec import Route53HostedZoneDnssec +from imports.aws.route53_key_signing_key import Route53KeySigningKey +from imports.aws.route53_zone import Route53Zone +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + region="us-east-1" + ) + example = Route53Zone(self, "example", + name="example.com" + ) + current = DataAwsCallerIdentity(self, "current") + aws_kms_key_example = KmsKey(self, "example_3", + customer_master_key_spec="ECC_NIST_P256", + deletion_window_in_days=7, + key_usage="SIGN_VERIFY", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["kms:DescribeKey", "kms:GetPublicKey", "kms:Sign", "kms:Verify" + ], + "Effect": "Allow", + "Principal": { + "Service": "dnssec-route53.amazonaws.com" + }, + "Resource": "*", + "Sid": "Allow Route 53 DNSSEC Service" + }, { + "Action": "kms:*", + "Effect": "Allow", + "Principal": { + "AWS": "arn:aws:iam::${" + current.account_id + "}:root" + }, + "Resource": "*", + "Sid": "Enable IAM User Permissions" + } + ], + "Version": "2012-10-17" + })) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_key_example.override_logical_id("example") + aws_route53_key_signing_key_example = Route53KeySigningKey(self, "example_4", + hosted_zone_id=example.id, + key_management_service_arn=Token.as_string(aws_kms_key_example.arn), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_key_signing_key_example.override_logical_id("example") + aws_route53_hosted_zone_dnssec_example = Route53HostedZoneDnssec(self, "example_5", + depends_on=[aws_route53_key_signing_key_example], + hosted_zone_id=Token.as_string(aws_route53_key_signing_key_example.hosted_zone_id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_hosted_zone_dnssec_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `hosted_zone_id` - (Required) Identifier of the Route 53 Hosted Zone. + +The following arguments are optional: + +* `signing_status` - (Optional) Hosted Zone signing status. Valid values: `SIGNING`, `NOT_SIGNING`. Defaults to `SIGNING`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Route 53 Hosted Zone identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_route53_hosted_zone_dnssec` resources using the Route 53 Hosted Zone identifier. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_route53_hosted_zone_dnssec` resources using the Route 53 Hosted Zone identifier. For example: + +```console +% terraform import aws_route53_hosted_zone_dnssec.example Z1D633PJN98FT9 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_key_signing_key.html.markdown b/website/docs/cdktf/python/r/route53_key_signing_key.html.markdown new file mode 100644 index 00000000000..e55e378a433 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_key_signing_key.html.markdown @@ -0,0 +1,153 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_key_signing_key" +description: |- + Manages an Route 53 Key Signing Key +--- + + + +# Resource: aws_route53_key_signing_key + +Manages a Route 53 Key Signing Key. To manage Domain Name System Security Extensions (DNSSEC) for a Hosted Zone, see the [`aws_route53_hosted_zone_dnssec` resource](route53_hosted_zone_dnssec.html). For more information about managing DNSSEC in Route 53, see the [Route 53 Developer Guide](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/dns-configuring-dnssec.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.kms_key import KmsKey +from imports.aws.provider import AwsProvider +from imports.aws.route53_hosted_zone_dnssec import Route53HostedZoneDnssec +from imports.aws.route53_key_signing_key import Route53KeySigningKey +from imports.aws.route53_zone import Route53Zone +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + region="us-east-1" + ) + example = Route53KeySigningKey(self, "example", + hosted_zone_id=test.id, + key_management_service_arn=Token.as_string(aws_kms_key_test.arn), + name="example" + ) + aws_route53_zone_example = Route53Zone(self, "example_2", + name="example.com" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_zone_example.override_logical_id("example") + current = DataAwsCallerIdentity(self, "current") + aws_kms_key_example = KmsKey(self, "example_4", + customer_master_key_spec="ECC_NIST_P256", + deletion_window_in_days=7, + key_usage="SIGN_VERIFY", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["kms:DescribeKey", "kms:GetPublicKey", "kms:Sign"], + "Condition": { + "ArnLike": { + "aws:_source_arn": "arn:aws:route53:::hostedzone/*" + }, + "StringEquals": { + "aws:_source_account": current.account_id + } + }, + "Effect": "Allow", + "Principal": { + "Service": "dnssec-route53.amazonaws.com" + }, + "Resource": "*", + "Sid": "Allow Route 53 DNSSEC Service" + }, { + "Action": "kms:CreateGrant", + "Condition": { + "Bool": { + "kms:_grant_is_for_aWSResource": "true" + } + }, + "Effect": "Allow", + "Principal": { + "Service": "dnssec-route53.amazonaws.com" + }, + "Resource": "*", + "Sid": "Allow Route 53 DNSSEC Service to CreateGrant" + }, { + "Action": "kms:*", + "Effect": "Allow", + "Principal": { + "AWS": "arn:aws:iam::${" + current.account_id + "}:root" + }, + "Resource": "*", + "Sid": "Enable IAM User Permissions" + } + ], + "Version": "2012-10-17" + })) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_key_example.override_logical_id("example") + aws_route53_hosted_zone_dnssec_example = Route53HostedZoneDnssec(self, "example_5", + depends_on=[example], + hosted_zone_id=example.hosted_zone_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_hosted_zone_dnssec_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `hosted_zone_id` - (Required) Identifier of the Route 53 Hosted Zone. +* `key_management_service_arn` - (Required) Amazon Resource Name (ARN) of the Key Management Service (KMS) Key. This must be unique for each key-signing key (KSK) in a single hosted zone. This key must be in the `us-east-1` Region and meet certain requirements, which are described in the [Route 53 Developer Guide](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/dns-configuring-dnssec-cmk-requirements.html) and [Route 53 API Reference](https://docs.aws.amazon.com/Route53/latest/APIReference/API_CreateKeySigningKey.html). +* `name` - (Required) Name of the key-signing key (KSK). Must be unique for each key-singing key in the same hosted zone. + +The following arguments are optional: + +* `status` - (Optional) Status of the key-signing key (KSK). Valid values: `ACTIVE`, `INACTIVE`. Defaults to `ACTIVE`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `digest_algorithm_mnemonic` - A string used to represent the delegation signer digest algorithm. This value must follow the guidelines provided by [RFC-8624 Section 3.3](https://tools.ietf.org/html/rfc8624#section-3.3). +* `digest_algorithm_type` - An integer used to represent the delegation signer digest algorithm. This value must follow the guidelines provided by [RFC-8624 Section 3.3](https://tools.ietf.org/html/rfc8624#section-3.3). +* `digest_value` - A cryptographic digest of a DNSKEY resource record (RR). DNSKEY records are used to publish the public key that resolvers can use to verify DNSSEC signatures that are used to secure certain kinds of information provided by the DNS system. +* `dnskey_record` - A string that represents a DNSKEY record. +* `ds_record` - A string that represents a delegation signer (DS) record. +* `flag` - An integer that specifies how the key is used. For key-signing key (KSK), this value is always 257. +* `id` - Route 53 Hosted Zone identifier and KMS Key identifier, separated by a comma (`,`). +* `key_tag` - An integer used to identify the DNSSEC record for the domain name. The process used to calculate the value is described in [RFC-4034 Appendix B](https://tools.ietf.org/rfc/rfc4034.txt). +* `public_key` - The public key, represented as a Base64 encoding, as required by [RFC-4034 Page 5](https://tools.ietf.org/rfc/rfc4034.txt). +* `signing_algorithm_mnemonic` - A string used to represent the signing algorithm. This value must follow the guidelines provided by [RFC-8624 Section 3.1](https://tools.ietf.org/html/rfc8624#section-3.1). +* `signing_algorithm_type` - An integer used to represent the signing algorithm. This value must follow the guidelines provided by [RFC-8624 Section 3.1](https://tools.ietf.org/html/rfc8624#section-3.1). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_route53_key_signing_key` resources using the Route 53 Hosted Zone identifier and KMS Key identifier, separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_route53_key_signing_key` resources using the Route 53 Hosted Zone identifier and KMS Key identifier, separated by a comma (`,`). For example: + +```console +% terraform import aws_route53_key_signing_key.example Z1D633PJN98FT9,example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_query_log.html.markdown b/website/docs/cdktf/python/r/route53_query_log.html.markdown new file mode 100644 index 00000000000..fea52ef91a5 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_query_log.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_query_log" +description: |- + Provides a Route53 query logging configuration resource. +--- + + + +# Resource: aws_route53_query_log + +Provides a Route53 query logging configuration resource. + +~> **NOTE:** There are restrictions on the configuration of query logging. Notably, +the CloudWatch log group must be in the `us-east-1` region, +a permissive CloudWatch log resource policy must be in place, and +the Route53 hosted zone must be public. +See [Configuring Logging for DNS Queries](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/query-logs.html?console_help=true#query-logs-configuring) for additional details. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.cloudwatch_log_resource_policy import CloudwatchLogResourcePolicy +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.provider import AwsProvider +from imports.aws.route53_query_log import Route53QueryLog +from imports.aws.route53_zone import Route53Zone +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + us_east1 = AwsProvider(self, "aws", + alias="us-east-1", + region="us-east-1" + ) + example_com = Route53Zone(self, "example_com", + name="example.com" + ) + route53_query_logging_policy = DataAwsIamPolicyDocument(self, "route53-query-logging-policy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:CreateLogStream", "logs:PutLogEvents"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["route53.amazonaws.com"], + type="Service" + ) + ], + resources=["arn:aws:logs:*:*:log-group:/aws/route53/*"] + ) + ] + ) + aws_route53_example_com = CloudwatchLogGroup(self, "aws_route53_example_com", + name="/aws/route53/${" + example_com.name + "}", + provider=us_east1, + retention_in_days=30 + ) + aws_cloudwatch_log_resource_policy_route53_query_logging_policy = + CloudwatchLogResourcePolicy(self, "route53-query-logging-policy_4", + policy_document=Token.as_string(route53_query_logging_policy.json), + policy_name="route53-query-logging-policy", + provider=us_east1 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_resource_policy_route53_query_logging_policy.override_logical_id("route53-query-logging-policy") + aws_route53_query_log_example_com = Route53QueryLog(self, "example_com_5", + cloudwatch_log_group_arn=aws_route53_example_com.arn, + depends_on=[aws_cloudwatch_log_resource_policy_route53_query_logging_policy], + zone_id=example_com.zone_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_query_log_example_com.override_logical_id("example_com") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cloudwatch_log_group_arn` - (Required) CloudWatch log group ARN to send query logs. +* `zone_id` - (Required) Route53 hosted zone ID to enable query logs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Query Logging Config. +* `id` - The query logging configuration ID + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 query logging configurations using their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 query logging configurations using their ID. For example: + +```console +% terraform import aws_route53_query_log.example_com xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_record.html.markdown b/website/docs/cdktf/python/r/route53_record.html.markdown new file mode 100644 index 00000000000..d8ff405bf33 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_record.html.markdown @@ -0,0 +1,272 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_record" +description: |- + Provides a Route53 record resource. +--- + + + +# Resource: aws_route53_record + +Provides a Route53 record resource. + +## Example Usage + +### Simple routing policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53Record(self, "www", + name="www.example.com", + records=[lb.public_ip], + ttl=300, + type="A", + zone_id=primary.zone_id + ) +``` + +### Weighted routing policy + +Other routing policies are configured similarly. See [Amazon Route 53 Developer Guide](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/routing-policy.html) for details. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53Record(self, "www-dev", + name="www", + records=["dev.example.com"], + set_identifier="dev", + ttl=5, + type="CNAME", + weighted_routing_policy=Route53RecordWeightedRoutingPolicy( + weight=10 + ), + zone_id=primary.zone_id + ) + Route53Record(self, "www-live", + name="www", + records=["live.example.com"], + set_identifier="live", + ttl=5, + type="CNAME", + weighted_routing_policy=Route53RecordWeightedRoutingPolicy( + weight=90 + ), + zone_id=primary.zone_id + ) +``` + +### Alias record + +See [related part of Amazon Route 53 Developer Guide](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/resource-record-sets-choosing-alias-non-alias.html) +to understand differences between alias and non-alias records. + +TTL for all alias records is [60 seconds](https://aws.amazon.com/route53/faqs/#dns_failover_do_i_need_to_adjust), +you cannot change this, therefore `ttl` has to be omitted in alias records. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.elb import Elb +from imports.aws.route53_record import Route53Record +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + main = Elb(self, "main", + availability_zones=["us-east-1c"], + listener=[ElbListener( + instance_port=80, + instance_protocol="http", + lb_port=80, + lb_protocol="http" + ) + ], + name="foobar-terraform-elb" + ) + Route53Record(self, "www", + alias=Route53RecordAlias( + evaluate_target_health=True, + name=main.dns_name, + zone_id=main.zone_id + ), + name="example.com", + type="A", + zone_id=primary.zone_id + ) +``` + +### NS and SOA Record Management + +When creating Route 53 zones, the `NS` and `SOA` records for the zone are automatically created. Enabling the `allow_overwrite` argument will allow managing these records in a single Terraform run without the requirement for `terraform import`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_record import Route53Record +from imports.aws.route53_zone import Route53Zone +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Route53Zone(self, "example", + name="test.example.com" + ) + aws_route53_record_example = Route53Record(self, "example_1", + allow_overwrite=True, + name="test.example.com", + records=[ + Token.as_string(property_access(example.name_servers, ["0"])), + Token.as_string(property_access(example.name_servers, ["1"])), + Token.as_string(property_access(example.name_servers, ["2"])), + Token.as_string(property_access(example.name_servers, ["3"])) + ], + ttl=172800, + type="NS", + zone_id=example.zone_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_record_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `zone_id` - (Required) The ID of the hosted zone to contain this record. +* `name` - (Required) The name of the record. +* `type` - (Required) The record type. Valid values are `A`, `AAAA`, `CAA`, `CNAME`, `DS`, `MX`, `NAPTR`, `NS`, `PTR`, `SOA`, `SPF`, `SRV` and `TXT`. +* `ttl` - (Required for non-alias records) The TTL of the record. +* `records` - (Required for non-alias records) A string list of records. To specify a single record value longer than 255 characters such as a TXT record for DKIM, add `\"\"` inside the Terraform configuration string (e.g., `"first255characters\"\"morecharacters"`). +* `set_identifier` - (Optional) Unique identifier to differentiate records with routing policies from one another. Required if using `cidr_routing_policy`, `failover_routing_policy`, `geolocation_routing_policy`, `latency_routing_policy`, `multivalue_answer_routing_policy`, or `weighted_routing_policy`. +* `health_check_id` - (Optional) The health check the record should be associated with. +* `alias` - (Optional) An alias block. Conflicts with `ttl` & `records`. + [Documented below](#alias). +* `cidr_routing_policy` - (Optional) A block indicating a routing policy based on the IP network ranges of requestors. Conflicts with any other routing policy. [Documented below](#cidr-routing-policy). +* `failover_routing_policy` - (Optional) A block indicating the routing behavior when associated health check fails. Conflicts with any other routing policy. [Documented below](#failover-routing-policy). +* `geolocation_routing_policy` - (Optional) A block indicating a routing policy based on the geolocation of the requestor. Conflicts with any other routing policy. [Documented below](#geolocation-routing-policy). +* `latency_routing_policy` - (Optional) A block indicating a routing policy based on the latency between the requestor and an AWS region. Conflicts with any other routing policy. [Documented below](#latency-routing-policy). +* `multivalue_answer_routing_policy` - (Optional) Set to `true` to indicate a multivalue answer routing policy. Conflicts with any other routing policy. +* `weighted_routing_policy` - (Optional) A block indicating a weighted routing policy. Conflicts with any other routing policy. [Documented below](#weighted-routing-policy). +* `allow_overwrite` - (Optional) Allow creation of this record in Terraform to overwrite an existing record, if any. This does not affect the ability to update the record in Terraform and does not prevent other resources within Terraform or manual Route 53 changes outside Terraform from overwriting this record. `false` by default. This configuration is not recommended for most environments. + +Exactly one of `records` or `alias` must be specified: this determines whether it's an alias record. + +### Alias + +Alias records support the following: + +* `name` - (Required) DNS domain name for a CloudFront distribution, S3 bucket, ELB, or another resource record set in this hosted zone. +* `zone_id` - (Required) Hosted zone ID for a CloudFront distribution, S3 bucket, ELB, or Route 53 hosted zone. See [`resource_elb.zone_id`](/docs/providers/aws/r/elb.html#zone_id) for example. +* `evaluate_target_health` - (Required) Set to `true` if you want Route 53 to determine whether to respond to DNS queries using this resource record set by checking the health of the resource record set. Some resources have special requirements, see [related part of documentation](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/resource-record-sets-values.html#rrsets-values-alias-evaluate-target-health). + +### CIDR Routing Policy + +CIDR routing policies support the following: + +* `collection_id` - (Required) The CIDR collection ID. See the [`aws_route53_cidr_collection` resource](route53_cidr_collection.html) for more details. +* `location_name` - (Required) The CIDR collection location name. See the [`aws_route53_cidr_location` resource](route53_cidr_location.html) for more details. A `location_name` with an asterisk `"*"` can be used to create a default CIDR record. `collection_id` is still required for default record. + +### Failover Routing Policy + +Failover routing policies support the following: + +* `type` - (Required) `PRIMARY` or `SECONDARY`. A `PRIMARY` record will be served if its healthcheck is passing, otherwise the `SECONDARY` will be served. See http://docs.aws.amazon.com/Route53/latest/DeveloperGuide/dns-failover-configuring-options.html#dns-failover-failover-rrsets + +### Geolocation Routing Policy + +Geolocation routing policies support the following: + +* `continent` - A two-letter continent code. See http://docs.aws.amazon.com/Route53/latest/APIReference/API_GetGeoLocation.html for code details. Either `continent` or `country` must be specified. +* `country` - A two-character country code or `*` to indicate a default resource record set. +* `subdivision` - (Optional) A subdivision code for a country. + +### Latency Routing Policy + +Latency routing policies support the following: + +* `region` - (Required) An AWS region from which to measure latency. See http://docs.aws.amazon.com/Route53/latest/DeveloperGuide/routing-policy.html#routing-policy-latency + +### Weighted Routing Policy + +Weighted routing policies support the following: + +* `weight` - (Required) A numeric value indicating the relative weight of the record. See http://docs.aws.amazon.com/Route53/latest/DeveloperGuide/routing-policy.html#routing-policy-weighted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `name` - The name of the record. +* `fqdn` - [FQDN](https://en.wikipedia.org/wiki/Fully_qualified_domain_name) built using the zone domain and `name`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Records using the ID of the record, record name, record type, and set identifier. For example: + +Using the ID of the record, which is the zone identifier, record name, and record type, separated by underscores (`_`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the record also contains a set identifier, append it: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** Route53 Records using the ID of the record, record name, record type, and set identifier. For example: + +Using the ID of the record, which is the zone identifier, record name, and record type, separated by underscores (`_`): + +```console +% terraform import aws_route53_record.myrecord Z4KAPRWWNC7JR_dev.example.com_NS +``` + +If the record also contains a set identifier, append it: + +```console +% terraform import aws_route53_record.myrecord Z4KAPRWWNC7JR_dev.example.com_NS_dev +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_config.html.markdown b/website/docs/cdktf/python/r/route53_resolver_config.html.markdown new file mode 100644 index 00000000000..76f3b0cc21b --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_config.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_config" +description: |- + Provides a Route 53 Resolver config resource. +--- + + + +# Resource: aws_route53_resolver_config + +Provides a Route 53 Resolver config resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_config import Route53ResolverConfig +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Vpc(self, "example", + cidr_block="10.0.0.0/16", + enable_dns_hostnames=True, + enable_dns_support=True + ) + aws_route53_resolver_config_example = Route53ResolverConfig(self, "example_1", + autodefined_reverse_flag="DISABLE", + resource_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_resolver_config_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_id` - (Required) The ID of the VPC that the configuration is for. +* `autodefined_reverse_flag` - (Required) Indicates whether or not the Resolver will create autodefined rules for reverse DNS lookups. Valid values: `ENABLE`, `DISABLE`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the resolver configuration. +* `owner_id` - The AWS account ID of the owner of the VPC that this resolver configuration applies to. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver configs using the Route 53 Resolver config ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route 53 Resolver configs using the Route 53 Resolver config ID. For example: + +```console +% terraform import aws_route53_resolver_config.example rslvr-rc-715aa20c73a23da7 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_dnssec_config.html.markdown b/website/docs/cdktf/python/r/route53_resolver_dnssec_config.html.markdown new file mode 100644 index 00000000000..c02a3ada56a --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_dnssec_config.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_dnssec_config" +description: |- + Provides a Route 53 Resolver DNSSEC config resource. +--- + + + +# Resource: aws_route53_resolver_dnssec_config + +Provides a Route 53 Resolver DNSSEC config resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_dnssec_config import Route53ResolverDnssecConfig +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Vpc(self, "example", + cidr_block="10.0.0.0/16", + enable_dns_hostnames=True, + enable_dns_support=True + ) + aws_route53_resolver_dnssec_config_example = + Route53ResolverDnssecConfig(self, "example_1", + resource_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_resolver_dnssec_config_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_id` - (Required) The ID of the virtual private cloud (VPC) that you're updating the DNSSEC validation status for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN for a configuration for DNSSEC validation. +* `id` - The ID for a configuration for DNSSEC validation. +* `owner_id` - The owner account ID of the virtual private cloud (VPC) for a configuration for DNSSEC validation. +* `validation_status` - The validation status for a DNSSEC configuration. The status can be one of the following: `ENABLING`, `ENABLED`, `DISABLING` and `DISABLED`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNSSEC configs using the Route 53 Resolver DNSSEC config ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route 53 Resolver DNSSEC configs using the Route 53 Resolver DNSSEC config ID. For example: + +```console +% terraform import aws_route53_resolver_dnssec_config.example rdsc-be1866ecc1683e95 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_endpoint.html.markdown b/website/docs/cdktf/python/r/route53_resolver_endpoint.html.markdown new file mode 100644 index 00000000000..5d7cb3a6122 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_endpoint.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_endpoint" +description: |- + Provides a Route 53 Resolver endpoint resource. +--- + + + +# Resource: aws_route53_resolver_endpoint + +Provides a Route 53 Resolver endpoint resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_endpoint import Route53ResolverEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53ResolverEndpoint(self, "foo", + direction="INBOUND", + ip_address=[Route53ResolverEndpointIpAddress( + subnet_id=sn1.id + ), Route53ResolverEndpointIpAddress( + ip="10.0.64.4", + subnet_id=sn2.id + ) + ], + name="foo", + security_group_ids=[sg1.id, sg2.id], + tags={ + "Environment": "Prod" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `direction` - (Required) The direction of DNS queries to or from the Route 53 Resolver endpoint. +Valid values are `INBOUND` (resolver forwards DNS queries to the DNS service for a VPC from your network or another VPC) +or `OUTBOUND` (resolver forwards DNS queries from the DNS service for a VPC to your network or another VPC). +* `ip_address` - (Required) The subnets and IP addresses in your VPC that you want DNS queries to pass through on the way from your VPCs +to your network (for outbound endpoints) or on the way from your network to your VPCs (for inbound endpoints). Described below. +* `security_group_ids` - (Required) The ID of one or more security groups that you want to use to control access to this VPC. +* `name` - (Optional) The friendly name of the Route 53 Resolver endpoint. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `ip_address` object supports the following: + +* `subnet_id` - (Required) The ID of the subnet that contains the IP address. +* `ip` - (Optional) The IP address in the subnet that you want to use for DNS queries. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Route 53 Resolver endpoint. +* `arn` - The ARN of the Route 53 Resolver endpoint. +* `host_vpc_id` - The ID of the VPC that you want to create the resolver endpoint in. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `update` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver endpoints using the Route 53 Resolver endpoint ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route 53 Resolver endpoints using the Route 53 Resolver endpoint ID. For example: + +```console +% terraform import aws_route53_resolver_endpoint.foo rslvr-in-abcdef01234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_firewall_config.markdown b/website/docs/cdktf/python/r/route53_resolver_firewall_config.markdown new file mode 100644 index 00000000000..e92c496b852 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_firewall_config.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_config" +description: |- + Provides a Route 53 Resolver DNS Firewall config resource. +--- + + + +# Resource: aws_route53_resolver_firewall_config + +Provides a Route 53 Resolver DNS Firewall config resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_firewall_config import Route53ResolverFirewallConfig +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Vpc(self, "example", + cidr_block="10.0.0.0/16", + enable_dns_hostnames=True, + enable_dns_support=True + ) + aws_route53_resolver_firewall_config_example = + Route53ResolverFirewallConfig(self, "example_1", + firewall_fail_open="ENABLED", + resource_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_resolver_firewall_config_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_id` - (Required) The ID of the VPC that the configuration is for. +* `firewall_fail_open` - (Required) Determines how Route 53 Resolver handles queries during failures, for example when all traffic that is sent to DNS Firewall fails to receive a reply. By default, fail open is disabled, which means the failure mode is closed. This approach favors security over availability. DNS Firewall blocks queries that it is unable to evaluate properly. If you enable this option, the failure mode is open. This approach favors availability over security. DNS Firewall allows queries to proceed if it is unable to properly evaluate them. Valid values: `ENABLED`, `DISABLED`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the firewall configuration. +* `owner_id` - The AWS account ID of the owner of the VPC that this firewall configuration applies to. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNS Firewall configs using the Route 53 Resolver DNS Firewall config ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route 53 Resolver DNS Firewall configs using the Route 53 Resolver DNS Firewall config ID. For example: + +```console +% terraform import aws_route53_resolver_firewall_config.example rdsc-be1866ecc1683e95 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_firewall_domain_list.markdown b/website/docs/cdktf/python/r/route53_resolver_firewall_domain_list.markdown new file mode 100644 index 00000000000..2c7498a5fda --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_firewall_domain_list.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_domain_list" +description: |- + Provides a Route 53 Resolver DNS Firewall domain list resource. +--- + + + +# Resource: aws_route53_resolver_firewall_domain_list + +Provides a Route 53 Resolver DNS Firewall domain list resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_firewall_domain_list import Route53ResolverFirewallDomainList +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53ResolverFirewallDomainList(self, "example", + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name that lets you identify the domain list, to manage and use it. +* `domains` - (Optional) A array of domains for the firewall domain list. +* `tags` - (Optional) A map of tags to assign to the resource. f configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN (Amazon Resource Name) of the domain list. +* `id` - The ID of the domain list. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNS Firewall domain lists using the Route 53 Resolver DNS Firewall domain list ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route 53 Resolver DNS Firewall domain lists using the Route 53 Resolver DNS Firewall domain list ID. For example: + +```console +% terraform import aws_route53_resolver_firewall_domain_list.example rslvr-fdl-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_firewall_rule.markdown b/website/docs/cdktf/python/r/route53_resolver_firewall_rule.markdown new file mode 100644 index 00000000000..e49afe2a424 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_firewall_rule.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rule" +description: |- + Provides a Route 53 Resolver DNS Firewall rule resource. +--- + + + +# Resource: aws_route53_resolver_firewall_rule + +Provides a Route 53 Resolver DNS Firewall rule resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_firewall_domain_list import Route53ResolverFirewallDomainList +from imports.aws.route53_resolver_firewall_rule import Route53ResolverFirewallRule +from imports.aws.route53_resolver_firewall_rule_group import Route53ResolverFirewallRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Route53ResolverFirewallDomainList(self, "example", + domains=["example.com"], + name="example", + tags={} + ) + aws_route53_resolver_firewall_rule_group_example = + Route53ResolverFirewallRuleGroup(self, "example_1", + name="example", + tags={} + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_resolver_firewall_rule_group_example.override_logical_id("example") + aws_route53_resolver_firewall_rule_example = + Route53ResolverFirewallRule(self, "example_2", + action="BLOCK", + block_override_dns_type="CNAME", + block_override_domain="example.com", + block_override_ttl=1, + block_response="OVERRIDE", + firewall_domain_list_id=example.id, + firewall_rule_group_id=Token.as_string(aws_route53_resolver_firewall_rule_group_example.id), + name="example", + priority=100 + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_resolver_firewall_rule_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name that lets you identify the rule, to manage and use it. +* `action` - (Required) The action that DNS Firewall should take on a DNS query when it matches one of the domains in the rule's domain list. Valid values: `ALLOW`, `BLOCK`, `ALERT`. +* `block_override_dns_type` - (Required if `block_response` is `OVERRIDE`) The DNS record's type. This determines the format of the record value that you provided in BlockOverrideDomain. Value values: `CNAME`. +* `block_override_domain` - (Required if `block_response` is `OVERRIDE`) The custom DNS record to send back in response to the query. +* `block_override_ttl` - (Required if `block_response` is `OVERRIDE`) The recommended amount of time, in seconds, for the DNS resolver or web browser to cache the provided override record. Minimum value of 0. Maximum value of 604800. +* `block_response` - (Required if `action` is `BLOCK`) The way that you want DNS Firewall to block the request. Valid values: `NODATA`, `NXDOMAIN`, `OVERRIDE`. +* `firewall_domain_list_id` - (Required) The ID of the domain list that you want to use in the rule. +* `firewall_rule_group_id` - (Required) The unique identifier of the firewall rule group where you want to create the rule. +* `priority` - (Required) The setting that determines the processing order of the rule in the rule group. DNS Firewall processes the rules in a rule group by order of priority, starting from the lowest setting. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the rule. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNS Firewall rules using the Route 53 Resolver DNS Firewall rule group ID and domain list ID separated by ':'. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route 53 Resolver DNS Firewall rules using the Route 53 Resolver DNS Firewall rule group ID and domain list ID separated by ':'. For example: + +```console +% terraform import aws_route53_resolver_firewall_rule.example rslvr-frg-0123456789abcdef:rslvr-fdl-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_firewall_rule_group.markdown b/website/docs/cdktf/python/r/route53_resolver_firewall_rule_group.markdown new file mode 100644 index 00000000000..57e05bb914a --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_firewall_rule_group.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rule_group" +description: |- + Provides a Route 53 Resolver DNS Firewall rule group resource. +--- + + + +# Resource: aws_route53_resolver_firewall_rule_group + +Provides a Route 53 Resolver DNS Firewall rule group resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_firewall_rule_group import Route53ResolverFirewallRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53ResolverFirewallRuleGroup(self, "example", + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name that lets you identify the rule group, to manage and use it. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN (Amazon Resource Name) of the rule group. +* `id` - The ID of the rule group. +* `owner_id` - The AWS account ID for the account that created the rule group. When a rule group is shared with your account, this is the account that has shared the rule group with you. +* `share_status` - Whether the rule group is shared with other AWS accounts, or was shared with the current account by another AWS account. Sharing is configured through AWS Resource Access Manager (AWS RAM). Valid values: `NOT_SHARED`, `SHARED_BY_ME`, `SHARED_WITH_ME` +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNS Firewall rule groups using the Route 53 Resolver DNS Firewall rule group ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route 53 Resolver DNS Firewall rule groups using the Route 53 Resolver DNS Firewall rule group ID. For example: + +```console +% terraform import aws_route53_resolver_firewall_rule_group.example rslvr-frg-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_firewall_rule_group_association.markdown b/website/docs/cdktf/python/r/route53_resolver_firewall_rule_group_association.markdown new file mode 100644 index 00000000000..d9aa8c738bb --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_firewall_rule_group_association.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rule_group_association" +description: |- + Provides a Route 53 Resolver DNS Firewall rule group association resource. +--- + + + +# Resource: aws_route53_resolver_firewall_rule_group_association + +Provides a Route 53 Resolver DNS Firewall rule group association resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_firewall_rule_group import Route53ResolverFirewallRuleGroup +from imports.aws.route53_resolver_firewall_rule_group_association import Route53ResolverFirewallRuleGroupAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Route53ResolverFirewallRuleGroup(self, "example", + name="example" + ) + aws_route53_resolver_firewall_rule_group_association_example = + Route53ResolverFirewallRuleGroupAssociation(self, "example_1", + firewall_rule_group_id=example.id, + name="example", + priority=100, + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_resolver_firewall_rule_group_association_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name that lets you identify the rule group association, to manage and use it. +* `firewall_rule_group_id` - (Required) The unique identifier of the firewall rule group. +* `mutation_protection` - (Optional) If enabled, this setting disallows modification or removal of the association, to help prevent against accidentally altering DNS firewall protections. Valid values: `ENABLED`, `DISABLED`. +* `priority` - (Required) The setting that determines the processing order of the rule group among the rule groups that you associate with the specified VPC. DNS Firewall filters VPC traffic starting from the rule group with the lowest numeric priority setting. +* `vpc_id` - (Required) The unique identifier of the VPC that you want to associate with the rule group. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN (Amazon Resource Name) of the firewall rule group association. +* `id` - The identifier for the association. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNS Firewall rule group associations using the Route 53 Resolver DNS Firewall rule group association ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route 53 Resolver DNS Firewall rule group associations using the Route 53 Resolver DNS Firewall rule group association ID. For example: + +```console +% terraform import aws_route53_resolver_firewall_rule_group_association.example rslvr-frgassoc-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_query_log_config.html.markdown b/website/docs/cdktf/python/r/route53_resolver_query_log_config.html.markdown new file mode 100644 index 00000000000..ea5b2644b32 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_query_log_config.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_query_log_config" +description: |- + Provides a Route 53 Resolver query logging configuration resource. +--- + + + +# Resource: aws_route53_resolver_query_log_config + +Provides a Route 53 Resolver query logging configuration resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_query_log_config import Route53ResolverQueryLogConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53ResolverQueryLogConfig(self, "example", + destination_arn=Token.as_string(aws_s3_bucket_example.arn), + name="example", + tags={ + "Environment": "Prod" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `destination_arn` - (Required) The ARN of the resource that you want Route 53 Resolver to send query logs. +You can send query logs to an [S3 bucket](s3_bucket.html), a [CloudWatch Logs log group](cloudwatch_log_group.html), or a [Kinesis Data Firehose delivery stream](kinesis_firehose_delivery_stream.html). +* `name` - (Required) The name of the Route 53 Resolver query logging configuration. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Route 53 Resolver query logging configuration. +* `arn` - The ARN (Amazon Resource Name) of the Route 53 Resolver query logging configuration. +* `owner_id` - The AWS account ID of the account that created the query logging configuration. +* `share_status` - An indication of whether the query logging configuration is shared with other AWS accounts, or was shared with the current account by another AWS account. +Sharing is configured through AWS Resource Access Manager (AWS RAM). +Values are `NOT_SHARED`, `SHARED_BY_ME` or `SHARED_WITH_ME` +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver query logging configurations using the Route 53 Resolver query logging configuration ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route 53 Resolver query logging configurations using the Route 53 Resolver query logging configuration ID. For example: + +```console +% terraform import aws_route53_resolver_query_log_config.example rqlc-92edc3b1838248bf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_query_log_config_association.html.markdown b/website/docs/cdktf/python/r/route53_resolver_query_log_config_association.html.markdown new file mode 100644 index 00000000000..dce085314e5 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_query_log_config_association.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_query_log_config_association" +description: |- + Provides a Route 53 Resolver query logging configuration association resource. +--- + + + +# Resource: aws_route53_resolver_query_log_config_association + +Provides a Route 53 Resolver query logging configuration association resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_query_log_config_association import Route53ResolverQueryLogConfigAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53ResolverQueryLogConfigAssociation(self, "example", + resolver_query_log_config_id=Token.as_string(aws_route53_resolver_query_log_config_example.id), + resource_id=Token.as_string(aws_vpc_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resolver_query_log_config_id` - (Required) The ID of the [Route 53 Resolver query logging configuration](route53_resolver_query_log_config.html) that you want to associate a VPC with. +* `resource_id` - (Required) The ID of a VPC that you want this query logging configuration to log queries for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` -The ID of the Route 53 Resolver query logging configuration association. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver query logging configuration associations using the Route 53 Resolver query logging configuration association ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route 53 Resolver query logging configuration associations using the Route 53 Resolver query logging configuration association ID. For example: + +```console +% terraform import aws_route53_resolver_query_log_config_association.example rqlca-b320624fef3c4d70 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_rule.html.markdown b/website/docs/cdktf/python/r/route53_resolver_rule.html.markdown new file mode 100644 index 00000000000..6fe26ae6353 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_rule.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_rule" +description: |- + Provides a Route53 Resolver rule. +--- + + + +# Resource: aws_route53_resolver_rule + +Provides a Route53 Resolver rule. + +## Example Usage + +### System rule + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_rule import Route53ResolverRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53ResolverRule(self, "sys", + domain_name="subdomain.example.com", + rule_type="SYSTEM" + ) +``` + +### Forward rule + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_rule import Route53ResolverRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53ResolverRule(self, "fwd", + domain_name="example.com", + name="example", + resolver_endpoint_id=foo.id, + rule_type="FORWARD", + tags={ + "Environment": "Prod" + }, + target_ip=[Route53ResolverRuleTargetIp( + ip="123.45.67.89" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain_name` - (Required) DNS queries for this domain name are forwarded to the IP addresses that are specified using `target_ip`. +* `rule_type` - (Required) The rule type. Valid values are `FORWARD`, `SYSTEM` and `RECURSIVE`. +* `name` - (Optional) A friendly name that lets you easily find a rule in the Resolver dashboard in the Route 53 console. +* `resolver_endpoint_id` (Optional) The ID of the outbound resolver endpoint that you want to use to route DNS queries to the IP addresses that you specify using `target_ip`. +This argument should only be specified for `FORWARD` type rules. +* `target_ip` - (Optional) Configuration block(s) indicating the IPs that you want Resolver to forward DNS queries to (documented below). +This argument should only be specified for `FORWARD` type rules. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `target_ip` object supports the following: + +* `ip` - (Required) One IP address that you want to forward DNS queries to. You can specify only IPv4 addresses. +* `port` - (Optional) The port at `ip` that you want to forward DNS queries to. Default value is `53` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the resolver rule. +* `arn` - The ARN (Amazon Resource Name) for the resolver rule. +* `owner_id` - When a rule is shared with another AWS account, the account ID of the account that the rule is shared with. +* `share_status` - Whether the rules is shared and, if so, whether the current account is sharing the rule with another account, or another account is sharing the rule with the current account. +Values are `NOT_SHARED`, `SHARED_BY_ME` or `SHARED_WITH_ME` +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Resolver rules using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Resolver rules using the `id`. For example: + +```console +% terraform import aws_route53_resolver_rule.sys rslvr-rr-0123456789abcdef0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_resolver_rule_association.html.markdown b/website/docs/cdktf/python/r/route53_resolver_rule_association.html.markdown new file mode 100644 index 00000000000..d6ba4d9ed89 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_resolver_rule_association.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_rule_association" +description: |- + Provides a Route53 Resolver rule association. +--- + + + +# Resource: aws_route53_resolver_rule_association + +Provides a Route53 Resolver rule association. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_resolver_rule_association import Route53ResolverRuleAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53ResolverRuleAssociation(self, "example", + resolver_rule_id=sys.id, + vpc_id=foo.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resolver_rule_id` - (Required) The ID of the resolver rule that you want to associate with the VPC. +* `vpc_id` - (Required) The ID of the VPC that you want to associate the resolver rule with. +* `name` - (Optional) A name for the association that you're creating between a resolver rule and a VPC. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the resolver rule association. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Resolver rule associations using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Resolver rule associations using the `id`. For example: + +```console +% terraform import aws_route53_resolver_rule_association.example rslvr-rrassoc-97242eaf88example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_traffic_policy.html.markdown b/website/docs/cdktf/python/r/route53_traffic_policy.html.markdown new file mode 100644 index 00000000000..2129c2e9333 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_traffic_policy.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_traffic_policy" +description: |- + Manages a Route53 Traffic Policy +--- + + + +# Resource: aws_route53_traffic_policy + +Manages a Route53 Traffic Policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_traffic_policy import Route53TrafficPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53TrafficPolicy(self, "example", + comment="example comment", + document="{\n \"AWSPolicyFormatVersion\": \"2015-10-01\",\n \"RecordType\": \"A\",\n \"Endpoints\": {\n \"endpoint-start-NkPh\": {\n \"Type\": \"value\",\n \"Value\": \"10.0.0.2\"\n }\n },\n \"StartEndpoint\": \"endpoint-start-NkPh\"\n}\n\n", + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the traffic policy. +* `document` - (Required) Policy document. This is a JSON formatted string. For more information about building Route53 traffic policy documents, see the [AWS Route53 Traffic Policy document format](https://docs.aws.amazon.com/Route53/latest/APIReference/api-policies-traffic-policy-document-format.html) + +The following arguments are optional: + +* `comment` - (Optional) Comment for the traffic policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the traffic policy +* `type` - DNS type of the resource record sets that Amazon Route 53 creates when you use a traffic policy to create a traffic policy instance. +* `version` - Version number of the traffic policy. This value is automatically incremented by AWS after each update of this resource. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Traffic Policy using the `id` and `version`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Traffic Policy using the `id` and `version`. For example: + +```console +% terraform import aws_route53_traffic_policy.example 01a52019-d16f-422a-ae72-c306d2b6df7e/1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_traffic_policy_instance.html.markdown b/website/docs/cdktf/python/r/route53_traffic_policy_instance.html.markdown new file mode 100644 index 00000000000..ccccdd91843 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_traffic_policy_instance.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_traffic_policy_instance" +description: |- + Provides a Route53 traffic policy instance resource. +--- + + + +# Resource: aws_route53_traffic_policy_instance + +Provides a Route53 traffic policy instance resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_traffic_policy_instance import Route53TrafficPolicyInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53TrafficPolicyInstance(self, "test", + hosted_zone_id="Z033120931TAQO548OGJC", + name="test.example.com", + traffic_policy_id="b3gb108f-ea6f-45a5-baab-9d112d8b4037", + traffic_policy_version=1, + ttl=360 + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Domain name for which Amazon Route 53 responds to DNS queries by using the resource record sets that Route 53 creates for this traffic policy instance. +* `traffic_policy_id` - (Required) ID of the traffic policy that you want to use to create resource record sets in the specified hosted zone. +* `traffic_policy_version` - (Required) Version of the traffic policy +* `hosted_zone_id` - (Required) ID of the hosted zone that you want Amazon Route 53 to create resource record sets in by using the configuration in a traffic policy. +* `ttl` - (Required) TTL that you want Amazon Route 53 to assign to all the resource record sets that it creates in the specified hosted zone. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of traffic policy instance. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 traffic policy instance using its id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 traffic policy instance using its id. For example: + +```console +% terraform import aws_route53_traffic_policy_instance.test df579d9a-6396-410e-ac22-e7ad60cf9e7e +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_vpc_association_authorization.html.markdown b/website/docs/cdktf/python/r/route53_vpc_association_authorization.html.markdown new file mode 100644 index 00000000000..b4605434944 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_vpc_association_authorization.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_vpc_association_authorization" +description: |- + Authorizes a VPC in a different account to be associated with a local Route53 Hosted Zone +--- + + + +# Resource: aws_route53_vpc_association_authorization + +Authorizes a VPC in a different account to be associated with a local Route53 Hosted Zone. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +from imports.aws.route53_vpc_association_authorization import Route53VpcAssociationAuthorization +from imports.aws.route53_zone import Route53Zone +from imports.aws.route53_zone_association import Route53ZoneAssociation +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws") + alternate = AwsProvider(self, "aws_1", + alias="alternate" + ) + aws_vpc_alternate = Vpc(self, "alternate", + cidr_block="10.7.0.0/16", + enable_dns_hostnames=True, + enable_dns_support=True, + provider=alternate + ) + example = Vpc(self, "example", + cidr_block="10.6.0.0/16", + enable_dns_hostnames=True, + enable_dns_support=True + ) + aws_route53_zone_example = Route53Zone(self, "example_4", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[vpc] + ), + name="example.com", + vpc=[Route53ZoneVpc( + vpc_id=example.id + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_zone_example.override_logical_id("example") + aws_route53_vpc_association_authorization_example = + Route53VpcAssociationAuthorization(self, "example_5", + vpc_id=Token.as_string(aws_vpc_alternate.id), + zone_id=Token.as_string(aws_route53_zone_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_vpc_association_authorization_example.override_logical_id("example") + aws_route53_zone_association_example = Route53ZoneAssociation(self, "example_6", + provider=alternate, + vpc_id=Token.as_string(aws_route53_vpc_association_authorization_example.vpc_id), + zone_id=Token.as_string(aws_route53_vpc_association_authorization_example.zone_id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_zone_association_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `zone_id` - (Required) The ID of the private hosted zone that you want to authorize associating a VPC with. +* `vpc_id` - (Required) The VPC to authorize for association with the private hosted zone. +* `vpc_region` - (Optional) The VPC's region. Defaults to the region of the AWS provider. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The calculated unique identifier for the association. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 VPC Association Authorizations using the Hosted Zone ID and VPC ID, separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route 53 VPC Association Authorizations using the Hosted Zone ID and VPC ID, separated by a colon (`:`). For example: + +```console +% terraform import aws_route53_vpc_association_authorization.example Z123456ABCDEFG:vpc-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_zone.html.markdown b/website/docs/cdktf/python/r/route53_zone.html.markdown new file mode 100644 index 00000000000..64520b5ccc1 --- /dev/null +++ b/website/docs/cdktf/python/r/route53_zone.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_zone" +description: |- + Manages a Route53 Hosted Zone +--- + + + +# Resource: aws_route53_zone + +Manages a Route53 Hosted Zone. For managing Domain Name System Security Extensions (DNSSEC), see the [`aws_route53_key_signing_key`](route53_key_signing_key.html) and [`aws_route53_hosted_zone_dnssec`](route53_hosted_zone_dnssec.html) resources. + +## Example Usage + +### Public Zone + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_zone import Route53Zone +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53Zone(self, "primary", + name="example.com" + ) +``` + +### Public Subdomain Zone + +For use in subdomains, note that you need to create a +`aws_route53_record` of type `NS` as well as the subdomain +zone. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_record import Route53Record +from imports.aws.route53_zone import Route53Zone +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + dev = Route53Zone(self, "dev", + name="dev.example.com", + tags={ + "Environment": "dev" + } + ) + main = Route53Zone(self, "main", + name="example.com" + ) + Route53Record(self, "dev-ns", + name="dev.example.com", + records=Token.as_list(dev.name_servers), + ttl=Token.as_number("30"), + type="NS", + zone_id=main.zone_id + ) +``` + +### Private Zone + +~> **NOTE:** Terraform provides both exclusive VPC associations defined in-line in this resource via `vpc` configuration blocks and a separate [Zone VPC Association](/docs/providers/aws/r/route53_zone_association.html) resource. At this time, you cannot use in-line VPC associations in conjunction with any `aws_route53_zone_association` resources with the same zone ID otherwise it will cause a perpetual difference in plan output. You can optionally use the generic Terraform resource [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignore_changes` to manage additional associations via the `aws_route53_zone_association` resource. + +~> **NOTE:** Private zones require at least one VPC association at all times. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_zone import Route53Zone +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53Zone(self, "private", + name="example.com", + vpc=[Route53ZoneVpc( + vpc_id=example.id + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) This is the name of the hosted zone. +* `comment` - (Optional) A comment for the hosted zone. Defaults to 'Managed by Terraform'. +* `delegation_set_id` - (Optional) The ID of the reusable delegation set whose NS records you want to assign to the hosted zone. Conflicts with `vpc` as delegation sets can only be used for public zones. +* `force_destroy` - (Optional) Whether to destroy all records (possibly managed outside of Terraform) in the zone when destroying the zone. +* `tags` - (Optional) A map of tags to assign to the zone. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc` - (Optional) Configuration block(s) specifying VPC(s) to associate with a private hosted zone. Conflicts with the `delegation_set_id` argument in this resource and any [`aws_route53_zone_association` resource](/docs/providers/aws/r/route53_zone_association.html) specifying the same zone ID. Detailed below. + +### vpc Argument Reference + +* `vpc_id` - (Required) ID of the VPC to associate. +* `vpc_region` - (Optional) Region of the VPC to associate. Defaults to AWS provider region. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Hosted Zone. +* `zone_id` - The Hosted Zone ID. This can be referenced by zone records. +* `name_servers` - A list of name servers in associated (or default) delegation set. + Find more about delegation sets in [AWS docs](https://docs.aws.amazon.com/Route53/latest/APIReference/actions-on-reusable-delegation-sets.html). +* `primary_name_server` - The Route 53 name server that created the SOA record. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Zones using the zone `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Zones using the zone `id`. For example: + +```console +% terraform import aws_route53_zone.myzone Z1D633PJN98FT9 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53_zone_association.html.markdown b/website/docs/cdktf/python/r/route53_zone_association.html.markdown new file mode 100644 index 00000000000..efd938e0e4b --- /dev/null +++ b/website/docs/cdktf/python/r/route53_zone_association.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_zone_association" +description: |- + Manages a Route53 Hosted Zone VPC association +--- + + + +# Resource: aws_route53_zone_association + +Manages a Route53 Hosted Zone VPC association. VPC associations can only be made on private zones. See the [`aws_route53_vpc_association_authorization` resource](route53_vpc_association_authorization.html) for setting up cross-account associations. + +~> **NOTE:** Unless explicit association ordering is required (e.g., a separate cross-account association authorization), usage of this resource is not recommended. Use the `vpc` configuration blocks available within the [`aws_route53_zone` resource](/docs/providers/aws/r/route53_zone.html) instead. + +~> **NOTE:** Terraform provides both this standalone Zone VPC Association resource and exclusive VPC associations defined in-line in the [`aws_route53_zone` resource](/docs/providers/aws/r/route53_zone.html) via `vpc` configuration blocks. At this time, you cannot use those in-line VPC associations in conjunction with this resource and the same zone ID otherwise it will cause a perpetual difference in plan output. You can optionally use the generic Terraform resource [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignore_changes` in the `aws_route53_zone` resource to manage additional associations via this resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_zone import Route53Zone +from imports.aws.route53_zone_association import Route53ZoneAssociation +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary = Vpc(self, "primary", + cidr_block="10.6.0.0/16", + enable_dns_hostnames=True, + enable_dns_support=True + ) + secondary = Vpc(self, "secondary", + cidr_block="10.7.0.0/16", + enable_dns_hostnames=True, + enable_dns_support=True + ) + example = Route53Zone(self, "example", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[vpc] + ), + name="example.com", + vpc=[Route53ZoneVpc( + vpc_id=primary.id + ) + ] + ) + aws_route53_zone_association_secondary = Route53ZoneAssociation(self, "secondary_3", + vpc_id=secondary.id, + zone_id=example.zone_id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_zone_association_secondary.override_logical_id("secondary") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `zone_id` - (Required) The private hosted zone to associate. +* `vpc_id` - (Required) The VPC to associate with the private hosted zone. +* `vpc_region` - (Optional) The VPC's region. Defaults to the region of the AWS provider. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The calculated unique identifier for the association. +* `owning_account` - The account ID of the account that created the hosted zone. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Hosted Zone Associations using the Hosted Zone ID and VPC ID, separated by a colon (`:`). For example: + +The VPC is in the same region where you have configured the Terraform AWS Provider: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +The VPC is _not_ in the same region where you have configured the Terraform AWS Provider: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** Route 53 Hosted Zone Associations using the Hosted Zone ID and VPC ID, separated by a colon (`:`). For example: + +The VPC is in the same region where you have configured the Terraform AWS Provider: + +```console +% terraform import aws_route53_zone_association.example Z123456ABCDEFG:vpc-12345678 +``` + +The VPC is _not_ in the same region where you have configured the Terraform AWS Provider: + +```console +% terraform import aws_route53_zone_association.example Z123456ABCDEFG:vpc-12345678:us-east-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53domains_registered_domain.html.markdown b/website/docs/cdktf/python/r/route53domains_registered_domain.html.markdown new file mode 100644 index 00000000000..3d82f4f85ef --- /dev/null +++ b/website/docs/cdktf/python/r/route53domains_registered_domain.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "Route 53 Domains" +layout: "aws" +page_title: "AWS: aws_route53domains_registered_domain" +description: |- + Provides a resource to manage a domain that has been registered and associated with the current AWS account. +--- + + + +# Resource: aws_route53domains_registered_domain + +Provides a resource to manage a domain that has been [registered](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/registrar-tld-list.html) and associated with the current AWS account. + +**This is an advanced resource** and has special caveats to be aware of when using it. Please read this document in its entirety before using this resource. + +The `aws_route53domains_registered_domain` resource behaves differently from normal resources in that if a domain has been registered, Terraform does not _register_ this domain, but instead "adopts" it into management. `terraform destroy` does not delete the domain but does remove the resource from Terraform state. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_domains_registered_domain import Route53DomainsRegisteredDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53DomainsRegisteredDomain(self, "example", + domain_name="example.com", + name_server=[{ + "name": "ns-195.awsdns-24.com" + }, { + "name": "ns-874.awsdns-45.net" + } + ], + tags={ + "Environment": "test" + } + ) +``` + +## Argument Reference + +~> **NOTE:** You must specify the same privacy setting for `admin_privacy`, `registrant_privacy` and `tech_privacy`. + +This argument supports the following arguments: + +* `admin_contact` - (Optional) Details about the domain administrative contact. +* `admin_privacy` - (Optional) Whether domain administrative contact information is concealed from WHOIS queries. Default: `true`. +* `auto_renew` - (Optional) Whether the domain registration is set to renew automatically. Default: `true`. +* `domain_name` - (Required) The name of the registered domain. +* `name_server` - (Optional) The list of nameservers for the domain. +* `registrant_contact` - (Optional) Details about the domain registrant. +* `registrant_privacy` - (Optional) Whether domain registrant contact information is concealed from WHOIS queries. Default: `true`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tech_contact` - (Optional) Details about the domain technical contact. +* `tech_privacy` - (Optional) Whether domain technical contact information is concealed from WHOIS queries. Default: `true`. +* `transfer_lock` - (Optional) Whether the domain is locked for transfer. Default: `true`. + +The `admin_contact`, `registrant_contact` and `tech_contact` objects support the following: + +* `address_line_1` - (Optional) First line of the contact's address. +* `address_line_2` - (Optional) Second line of contact's address, if any. +* `city` - (Optional) The city of the contact's address. +* `contact_type` - (Optional) Indicates whether the contact is a person, company, association, or public organization. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-ContactType) for valid values. +* `country_code` - (Optional) Code for the country of the contact's address. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-CountryCode) for valid values. +* `email` - (Optional) Email address of the contact. +* `extra_params` - (Optional) A key-value map of parameters required by certain top-level domains. +* `fax` - (Optional) Fax number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]". +* `first_name` - (Optional) First name of contact. +* `last_name` - (Optional) Last name of contact. +* `organization_name` - (Optional) Name of the organization for contact types other than `PERSON`. +* `phone_number` - (Optional) The phone number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]". +* `state` - (Optional) The state or province of the contact's city. +* `zip_code` - (Optional) The zip or postal code of the contact's address. + +The `name_server` object supports the following: + +* `glue_ips` - (Optional) Glue IP addresses of a name server. The list can contain only one IPv4 and one IPv6 address. +* `name` - (Required) The fully qualified host name of the name server. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The domain name. +* `abuse_contact_email` - Email address to contact to report incorrect contact information for a domain, to report that the domain is being used to send spam, to report that someone is cybersquatting on a domain name, or report some other type of abuse. +* `abuse_contact_phone` - Phone number for reporting abuse. +* `creation_date` - The date when the domain was created as found in the response to a WHOIS query. +* `expiration_date` - The date when the registration for the domain is set to expire. +* `registrar_name` - Name of the registrar of the domain as identified in the registry. +* `registrar_url` - Web address of the registrar. +* `reseller` - Reseller of the domain. +* `status_list` - List of [domain name status codes](https://www.icann.org/resources/pages/epp-status-codes-2014-06-16-en). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `updated_date` - The last updated date of the domain as found in the response to a WHOIS query. +* `whois_server` - The fully qualified name of the WHOIS server that can answer the WHOIS query for the domain. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `update` - (Default `30m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53recoverycontrolconfig_cluster.html.markdown b/website/docs/cdktf/python/r/route53recoverycontrolconfig_cluster.html.markdown new file mode 100644 index 00000000000..acbd546bdd9 --- /dev/null +++ b/website/docs/cdktf/python/r/route53recoverycontrolconfig_cluster.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "Route 53 Recovery Control Config" +layout: "aws" +page_title: "AWS: aws_route53recoverycontrolconfig_cluster" +description: |- + Provides an AWS Route 53 Recovery Control Config Cluster +--- + + + +# Resource: aws_route53recoverycontrolconfig_cluster + +Provides an AWS Route 53 Recovery Control Config Cluster. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_recoverycontrolconfig_cluster import Route53RecoverycontrolconfigCluster +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53RecoverycontrolconfigCluster(self, "example", + name="georgefitzgerald" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Unique name describing the cluster. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cluster +* `cluster_endpoints` - List of 5 endpoints in 5 regions that can be used to talk to the cluster. See below. +* `status` - Status of cluster. `PENDING` when it is being created, `PENDING_DELETION` when it is being deleted and `DEPLOYED` otherwise. + +### cluster_endpoints + +* `endpoint` - Cluster endpoint. +* `region` - Region of the endpoint. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Control Config cluster using the cluster ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Recovery Control Config cluster using the cluster ARN. For example: + +```console +% terraform import aws_route53recoverycontrolconfig_cluster.mycluster arn:aws:route53-recovery-control::313517334327:cluster/f9ae13be-a11e-4ec7-8522-94a70468e6ea +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53recoverycontrolconfig_control_panel.html.markdown b/website/docs/cdktf/python/r/route53recoverycontrolconfig_control_panel.html.markdown new file mode 100644 index 00000000000..27f7727450e --- /dev/null +++ b/website/docs/cdktf/python/r/route53recoverycontrolconfig_control_panel.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "Route 53 Recovery Control Config" +layout: "aws" +page_title: "AWS: aws_route53recoverycontrolconfig_control_panel" +description: |- + Provides an AWS Route 53 Recovery Control Config Control Panel +--- + + + +# Resource: aws_route53recoverycontrolconfig_control_panel + +Provides an AWS Route 53 Recovery Control Config Control Panel. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_recoverycontrolconfig_control_panel import Route53RecoverycontrolconfigControlPanel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53RecoverycontrolconfigControlPanel(self, "example", + cluster_arn="arn:aws:route53-recovery-control::123456789012:cluster/8d47920e-d789-437d-803a-2dcc4b204393", + name="balmorhea" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `cluster_arn` - (Required) ARN of the cluster in which this control panel will reside. +* `name` - (Required) Name describing the control panel. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the control panel. +* `default_control_panel` - Whether a control panel is default. +* `routing_control_count` - Number routing controls in a control panel. +* `status` - Status of control panel: `PENDING` when it is being created/updated, `PENDING_DELETION` when it is being deleted, and `DEPLOYED` otherwise. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Control Config Control Panel using the control panel arn. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Recovery Control Config Control Panel using the control panel arn. For example: + +```console +% terraform import aws_route53recoverycontrolconfig_control_panel.mypanel arn:aws:route53-recovery-control::313517334327:controlpanel/1bfba17df8684f5dab0467b71424f7e8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53recoverycontrolconfig_routing_control.html.markdown b/website/docs/cdktf/python/r/route53recoverycontrolconfig_routing_control.html.markdown new file mode 100644 index 00000000000..11ea8c663eb --- /dev/null +++ b/website/docs/cdktf/python/r/route53recoverycontrolconfig_routing_control.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Route 53 Recovery Control Config" +layout: "aws" +page_title: "AWS: aws_route53recoverycontrolconfig_routing_control" +description: |- + Provides an AWS Route 53 Recovery Control Config Routing Control +--- + + + +# Resource: aws_route53recoverycontrolconfig_routing_control + +Provides an AWS Route 53 Recovery Control Config Routing Control. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_recoverycontrolconfig_routing_control import Route53RecoverycontrolconfigRoutingControl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53RecoverycontrolconfigRoutingControl(self, "example", + cluster_arn="arn:aws:route53-recovery-control::881188118811:cluster/8d47920e-d789-437d-803a-2dcc4b204393", + name="tinlicker" + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_recoverycontrolconfig_routing_control import Route53RecoverycontrolconfigRoutingControl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53RecoverycontrolconfigRoutingControl(self, "example", + cluster_arn="arn:aws:route53-recovery-control::881188118811:cluster/8d47920e-d789-437d-803a-2dcc4b204393", + control_panel_arn="arn:aws:route53-recovery-control::428113431245:controlpanel/abd5fbfc052d4844a082dbf400f61da8", + name="thomasoliver" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `cluster_arn` - (Required) ARN of the cluster in which this routing control will reside. +* `name` - (Required) The name describing the routing control. + +The following arguments are optional: + +* `control_panel_arn` - (Optional) ARN of the control panel in which this routing control will reside. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the routing control. +* `status` - Status of routing control. `PENDING` when it is being created/updated, `PENDING_DELETION` when it is being deleted, and `DEPLOYED` otherwise. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Control Config Routing Control using the routing control arn. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Recovery Control Config Routing Control using the routing control arn. For example: + +```console +% terraform import aws_route53recoverycontrolconfig_routing_control.mycontrol arn:aws:route53-recovery-control::313517334327:controlpanel/abd5fbfc052d4844a082dbf400f61da8/routingcontrol/d5d90e587870494b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53recoverycontrolconfig_safety_rule.html.markdown b/website/docs/cdktf/python/r/route53recoverycontrolconfig_safety_rule.html.markdown new file mode 100644 index 00000000000..cb822cdc7fe --- /dev/null +++ b/website/docs/cdktf/python/r/route53recoverycontrolconfig_safety_rule.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "Route 53 Recovery Control Config" +layout: "aws" +page_title: "AWS: aws_route53recoverycontrolconfig_safety_rule" +description: |- + Provides an AWS Route 53 Recovery Control Config Safety Rule +--- + + + +# Resource: aws_route53recoverycontrolconfig_safety_rule + +Provides an AWS Route 53 Recovery Control Config Safety Rule + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_recoverycontrolconfig_safety_rule import Route53RecoverycontrolconfigSafetyRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53RecoverycontrolconfigSafetyRule(self, "example", + asserted_controls=[ + Token.as_string(aws_route53_recoverycontrolconfig_routing_control_example.arn) + ], + control_panel_arn="arn:aws:route53-recovery-control::313517334327:controlpanel/abd5fbfc052d4844a082dbf400f61da8", + name="daisyguttridge", + rule_config={ + "inverted": False, + "threshold": 1, + "type": "ATLEAST" + }, + wait_period_ms=5000 + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_recoverycontrolconfig_safety_rule import Route53RecoverycontrolconfigSafetyRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53RecoverycontrolconfigSafetyRule(self, "example", + control_panel_arn="arn:aws:route53-recovery-control::313517334327:controlpanel/abd5fbfc052d4844a082dbf400f61da8", + gating_controls=[ + Token.as_string(aws_route53_recoverycontrolconfig_routing_control_example.arn) + ], + name="i_o", + rule_config={ + "inverted": False, + "threshold": 1, + "type": "ATLEAST" + }, + target_controls=[ + Token.as_string(aws_route53_recoverycontrolconfig_routing_control_example.arn) + ], + wait_period_ms=5000 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `control_panel_arn` - (Required) ARN of the control panel in which this safety rule will reside. +* `name` - (Required) Name describing the safety rule. +* `rule_config` - (Required) Configuration block for safety rule criteria. See below. +* `wait_period_ms` - (Required) Evaluation period, in milliseconds (ms), during which any request against the target routing controls will fail. + +The following arguments are optional: + +* `asserted_controls` - (Optional) Routing controls that are part of transactions that are evaluated to determine if a request to change a routing control state is allowed. +* `gating_controls` - (Optional) Gating controls for the new gating rule. That is, routing controls that are evaluated by the rule configuration that you specify. +* `target_controls` - (Optional) Routing controls that can only be set or unset if the specified `rule_config` evaluates to true for the specified `gating_controls`. + +### rule_config + +* `inverted` - (Required) Logical negation of the rule. +* `threshold` - (Required) Number of controls that must be set when you specify an `ATLEAST` type rule. +* `type` - (Required) Rule type. Valid values are `ATLEAST`, `AND`, and `OR`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the safety rule. +* `status` - Status of the safety rule. `PENDING` when it is being created/updated, `PENDING_DELETION` when it is being deleted, and `DEPLOYED` otherwise. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Control Config Safety Rule using the safety rule ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Recovery Control Config Safety Rule using the safety rule ARN. For example: + +```console +% terraform import aws_route53recoverycontrolconfig_safety_rule.myrule arn:aws:route53-recovery-control::313517334327:controlpanel/1bfba17df8684f5dab0467b71424f7e8/safetyrule/3bacc77003364c0f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53recoveryreadiness_cell.html.markdown b/website/docs/cdktf/python/r/route53recoveryreadiness_cell.html.markdown new file mode 100644 index 00000000000..5010944b8a6 --- /dev/null +++ b/website/docs/cdktf/python/r/route53recoveryreadiness_cell.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Route 53 Recovery Readiness" +layout: "aws" +page_title: "AWS: aws_route53recoveryreadiness_cell" +description: |- + Provides an AWS Route 53 Recovery Readiness Cell +--- + + + +# Resource: aws_route53recoveryreadiness_cell + +Provides an AWS Route 53 Recovery Readiness Cell. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_recoveryreadiness_cell import Route53RecoveryreadinessCell +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53RecoveryreadinessCell(self, "example", + cell_name="us-west-2-failover-cell" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `cell_name` - (Required) Unique name describing the cell. + +The following arguments are optional: + +* `cells` - (Optional) List of cell arns to add as nested fault domains within this cell. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cell +* `parent_readiness_scopes` - List of readiness scopes (recovery groups or cells) that contain this cell. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Readiness cells using the cell name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Recovery Readiness cells using the cell name. For example: + +```console +% terraform import aws_route53recoveryreadiness_cell.us-west-2-failover-cell us-west-2-failover-cell +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53recoveryreadiness_readiness_check.html.markdown b/website/docs/cdktf/python/r/route53recoveryreadiness_readiness_check.html.markdown new file mode 100644 index 00000000000..105e3dbbab1 --- /dev/null +++ b/website/docs/cdktf/python/r/route53recoveryreadiness_readiness_check.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Route 53 Recovery Readiness" +layout: "aws" +page_title: "AWS: aws_route53recoveryreadiness_readiness_check" +description: |- + Provides an AWS Route 53 Recovery Readiness Readiness Check +--- + + + +# Resource: aws_route53recoveryreadiness_readiness_check + +Provides an AWS Route 53 Recovery Readiness Readiness Check. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_recoveryreadiness_readiness_check import Route53RecoveryreadinessReadinessCheck +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53RecoveryreadinessReadinessCheck(self, "example", + readiness_check_name=my_cw_alarm_check, + resource_set_name=my_cw_alarm_set + ) +``` + +## Argument Reference + +The following arguments are required: + +* `readiness_check_name` - (Required) Unique name describing the readiness check. +* `resource_set_name` - (Required) Name describing the resource set that will be monitored for readiness. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the readiness_check +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Readiness readiness checks using the readiness check name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Recovery Readiness readiness checks using the readiness check name. For example: + +```console +% terraform import aws_route53recoveryreadiness_readiness_check.my-cw-alarm-check example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53recoveryreadiness_recovery_group.html.markdown b/website/docs/cdktf/python/r/route53recoveryreadiness_recovery_group.html.markdown new file mode 100644 index 00000000000..4c49fa035ed --- /dev/null +++ b/website/docs/cdktf/python/r/route53recoveryreadiness_recovery_group.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Route 53 Recovery Readiness" +layout: "aws" +page_title: "AWS: aws_route53recoveryreadiness_recovery_group" +description: |- + Provides an AWS Route 53 Recovery Readiness Recovery Group +--- + + + +# Resource: aws_route53recoveryreadiness_recovery_group + +Provides an AWS Route 53 Recovery Readiness Recovery Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_recoveryreadiness_recovery_group import Route53RecoveryreadinessRecoveryGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53RecoveryreadinessRecoveryGroup(self, "example", + recovery_group_name="my-high-availability-app" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `recovery_group_name` - (Required) A unique name describing the recovery group. + +The following argument are optional: + +* `cells` - (Optional) List of cell arns to add as nested fault domains within this recovery group +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the recovery group +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Readiness recovery groups using the recovery group name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Recovery Readiness recovery groups using the recovery group name. For example: + +```console +% terraform import aws_route53recoveryreadiness_recovery_group.my-high-availability-app my-high-availability-app +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route53recoveryreadiness_resource_set.html.markdown b/website/docs/cdktf/python/r/route53recoveryreadiness_resource_set.html.markdown new file mode 100644 index 00000000000..d9708fc60e7 --- /dev/null +++ b/website/docs/cdktf/python/r/route53recoveryreadiness_resource_set.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Route 53 Recovery Readiness" +layout: "aws" +page_title: "AWS: aws_route53recoveryreadiness_resource_set" +description: |- + Provides an AWS Route 53 Recovery Readiness Resource Set +--- + + + +# Resource: aws_route53recoveryreadiness_resource_set + +Provides an AWS Route 53 Recovery Readiness Resource Set. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_recoveryreadiness_resource_set import Route53RecoveryreadinessResourceSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Route53RecoveryreadinessResourceSet(self, "example", + resource_set_name=my_cw_alarm_set, + resource_set_type="AWS::CloudWatch::Alarm", + resources=[{ + "resource_arn": Token.as_string(aws_cloudwatch_metric_alarm_example.arn) + } + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_set_name` - (Required) Unique name describing the resource set. +* `resource_set_type` - (Required) Type of the resources in the resource set. +* `resources` - (Required) List of resources to add to this resource set. See below. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### resources + +* `dns_target_resource` - (Required if `resource_arn` is not set) Component for DNS/Routing Control Readiness Checks. +* `readiness_scopes` - (Optional) Recovery group ARN or cell ARN that contains this resource set. +* `resource_arn` - (Required if `dns_target_resource` is not set) ARN of the resource. + +### dns_target_resource + +* `domain_name` - (Optional) DNS Name that acts as the ingress point to a portion of application. +* `hosted_zone_arn` - (Optional) Hosted Zone ARN that contains the DNS record with the provided name of target resource. +* `record_set_id` - (Optional) Route53 record set id to uniquely identify a record given a `domain_name` and a `record_type`. +* `record_type` - (Optional) Type of DNS Record of target resource. +* `target_resource` - (Optional) Target resource the R53 record specified with the above params points to. + +### target_resource + +* `nlb_resource` - (Optional) NLB resource a DNS Target Resource points to. Required if `r53_resource` is not set. +* `r53_resource` - (Optional) Route53 resource a DNS Target Resource record points to. + +### nlb_resource + +* `arn` - (Required) NLB resource ARN. + +### r53_resource + +* `domain_name` - (Optional) Domain name that is targeted. +* `record_set_id` - (Optional) Resource record set ID that is targeted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the resource set +* `resources.#.component_id` - Unique identified for DNS Target Resources, use for readiness checks. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Readiness resource set name using the resource set name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route53 Recovery Readiness resource set name using the resource set name. For example: + +```console +% terraform import aws_route53recoveryreadiness_resource_set.my-cw-alarm-set example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route_table.html.markdown b/website/docs/cdktf/python/r/route_table.html.markdown new file mode 100644 index 00000000000..8cc46c6b406 --- /dev/null +++ b/website/docs/cdktf/python/r/route_table.html.markdown @@ -0,0 +1,161 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route_table" +description: |- + Provides a resource to create a VPC routing table. +--- + + + +# Resource: aws_route_table + +Provides a resource to create a VPC routing table. + +~> **NOTE on Route Tables and Routes:** Terraform currently +provides both a standalone [Route resource](route.html) and a Route Table resource with routes +defined in-line. At this time you cannot use a Route Table with in-line routes +in conjunction with any Route resources. Doing so will cause +a conflict of rule settings and will overwrite rules. + +~> **NOTE on `gateway_id` and `nat_gateway_id`:** The AWS API is very forgiving with these two +attributes and the `aws_route_table` resource can be created with a NAT ID specified as a Gateway ID attribute. +This _will_ lead to a permanent diff between your configuration and statefile, as the API returns the correct +parameters in the returned route table. If you're experiencing constant diffs in your `aws_route_table` resources, +the first thing to check is whether or not you're specifying a NAT ID instead of a Gateway ID, or vice-versa. + +~> **NOTE on `propagating_vgws` and the `aws_vpn_gateway_route_propagation` resource:** +If the `propagating_vgws` argument is present, it's not supported to _also_ +define route propagations using `aws_vpn_gateway_route_propagation`, since +this resource will delete any propagating gateways not explicitly listed in +`propagating_vgws`. Omit this argument when defining route propagation using +the separate resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route_table import RouteTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RouteTable(self, "example", + route=[RouteTableRoute( + cidr_block="10.0.1.0/24", + gateway_id=Token.as_string(aws_internet_gateway_example.id) + ), RouteTableRoute( + egress_only_gateway_id=Token.as_string(aws_egress_only_internet_gateway_example.id), + ipv6_cidr_block="::/0" + ) + ], + tags={ + "Name": "example" + }, + vpc_id=Token.as_string(aws_vpc_example.id) + ) +``` + +To subsequently remove all managed routes: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route_table import RouteTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RouteTable(self, "example", + route=[], + tags={ + "Name": "example" + }, + vpc_id=Token.as_string(aws_vpc_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpc_id` - (Required) The VPC ID. +* `route` - (Optional) A list of route objects. Their keys are documented below. This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). +This means that omitting this argument is interpreted as ignoring any existing routes. To remove all managed routes an empty list should be specified. See the example above. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `propagating_vgws` - (Optional) A list of virtual gateways for propagation. + +### route Argument Reference + +This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +One of the following destination arguments must be supplied: + +* `cidr_block` - (Required) The CIDR block of the route. +* `ipv6_cidr_block` - (Optional) The Ipv6 CIDR block of the route. +* `destination_prefix_list_id` - (Optional) The ID of a [managed prefix list](ec2_managed_prefix_list.html) destination of the route. + +One of the following target arguments must be supplied: + +* `carrier_gateway_id` - (Optional) Identifier of a carrier gateway. This attribute can only be used when the VPC contains a subnet which is associated with a Wavelength Zone. +* `core_network_arn` - (Optional) The Amazon Resource Name (ARN) of a core network. +* `egress_only_gateway_id` - (Optional) Identifier of a VPC Egress Only Internet Gateway. +* `gateway_id` - (Optional) Identifier of a VPC internet gateway or a virtual private gateway. +* `local_gateway_id` - (Optional) Identifier of a Outpost local gateway. +* `nat_gateway_id` - (Optional) Identifier of a VPC NAT gateway. +* `network_interface_id` - (Optional) Identifier of an EC2 network interface. +* `transit_gateway_id` - (Optional) Identifier of an EC2 Transit Gateway. +* `vpc_endpoint_id` - (Optional) Identifier of a VPC Endpoint. +* `vpc_peering_connection_id` - (Optional) Identifier of a VPC peering connection. + +Note that the default route, mapping the VPC's CIDR block to "local", is created implicitly and cannot be specified. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +~> **NOTE:** Only the target that is entered is exported as a readable +attribute once the route resource is created. + +* `id` - The ID of the routing table. +* `arn` - The ARN of the route table. +* `owner_id` - The ID of the AWS account that owns the route table. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `update` - (Default `2m`) +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route Tables using the route table `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Route Tables using the route table `id`. For example: + +```console +% terraform import aws_route_table.public_rt rtb-4e616f6d69 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/route_table_association.html.markdown b/website/docs/cdktf/python/r/route_table_association.html.markdown new file mode 100644 index 00000000000..5324ac975c7 --- /dev/null +++ b/website/docs/cdktf/python/r/route_table_association.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route_table_association" +description: |- + Provides a resource to create an association between a route table and a subnet or a route table and an internet gateway or virtual private gateway. +--- + + + +# Resource: aws_route_table_association + +Provides a resource to create an association between a route table and a subnet or a route table and an +internet gateway or virtual private gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route_table_association import RouteTableAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RouteTableAssociation(self, "a", + route_table_id=bar.id, + subnet_id=foo.id + ) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route_table_association import RouteTableAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RouteTableAssociation(self, "b", + gateway_id=foo.id, + route_table_id=bar.id + ) +``` + +## Argument Reference + +~> **NOTE:** Please note that one of either `subnet_id` or `gateway_id` is required. + +This argument supports the following arguments: + +* `subnet_id` - (Optional) The subnet ID to create an association. Conflicts with `gateway_id`. +* `gateway_id` - (Optional) The gateway ID to create an association. Conflicts with `subnet_id`. +* `route_table_id` - (Required) The ID of the routing table to associate with. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the association + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `update` - (Default `2m`) +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EC2 Route Table Associations using the associated resource ID and Route Table ID separated by a forward slash (`/`). For example: + +~> **NOTE:** Attempting to associate a route table with a subnet or gateway, where either is already associated, will result in an error (e.g., `Resource.AlreadyAssociated: the specified association for route table rtb-4176657279 conflicts with an existing association`) unless you first import the original association. + +With EC2 Subnets: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +With EC2 Internet Gateways: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** EC2 Route Table Associations using the associated resource ID and Route Table ID separated by a forward slash (`/`). For example: + +With EC2 Subnets: + +```console +% terraform import aws_route_table_association.assoc subnet-6777656e646f6c796e/rtb-656c65616e6f72 +``` + +With EC2 Internet Gateways: + +```console +% terraform import aws_route_table_association.assoc igw-01b3a60780f8d034a/rtb-656c65616e6f72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rum_app_monitor.html.markdown b/website/docs/cdktf/python/r/rum_app_monitor.html.markdown new file mode 100644 index 00000000000..5f0dcb00d0c --- /dev/null +++ b/website/docs/cdktf/python/r/rum_app_monitor.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "CloudWatch RUM" +layout: "aws" +page_title: "AWS: aws_rum_app_monitor" +description: |- + Provides a CloudWatch RUM App Monitor resource. +--- + + + +# Resource: aws_rum_app_monitor + +Provides a CloudWatch RUM App Monitor resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rum_app_monitor import RumAppMonitor +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RumAppMonitor(self, "example", + domain="localhost", + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the log stream. +* `domain` - (Required) The top-level internet domain name for which your application has administrative authority. +* `app_monitor_configuration` - (Optional) configuration data for the app monitor. See [app_monitor_configuration](#app_monitor_configuration) below. +* `cw_log_enabled` - (Optional) Data collected by RUM is kept by RUM for 30 days and then deleted. This parameter specifies whether RUM sends a copy of this telemetry data to Amazon CloudWatch Logs in your account. This enables you to keep the telemetry data for more than 30 days, but it does incur Amazon CloudWatch Logs charges. Default value is `false`. +* `custom_events` - (Optional) Specifies whether this app monitor allows the web client to define and send custom events. If you omit this parameter, custom events are `DISABLED`. See [custom_events](#custom_events) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### app_monitor_configuration + +* `allow_cookies` - (Optional) If you set this to `true`, RUM web client sets two cookies, a session cookie and a user cookie. The cookies allow the RUM web client to collect data relating to the number of users an application has and the behavior of the application across a sequence of events. Cookies are stored in the top-level domain of the current page. +* `enable_xray` - (Optional) If you set this to `true`, RUM enables X-Ray tracing for the user sessions that RUM samples. RUM adds an X-Ray trace header to allowed HTTP requests. It also records an X-Ray segment for allowed HTTP requests. +* `excluded_pages` - (Optional) A list of URLs in your website or application to exclude from RUM data collection. +* `favorite_pages` - (Optional) A list of pages in the CloudWatch RUM console that are to be displayed with a "favorite" icon. +* `guest_role_arn` - (Optional) The ARN of the guest IAM role that is attached to the Amazon Cognito identity pool that is used to authorize the sending of data to RUM. +* `identity_pool_id` - (Optional) The ID of the Amazon Cognito identity pool that is used to authorize the sending of data to RUM. +* `included_pages` - (Optional) If this app monitor is to collect data from only certain pages in your application, this structure lists those pages. +* `session_sample_rate` - (Optional) Specifies the percentage of user sessions to use for RUM data collection. Choosing a higher percentage gives you more data but also incurs more costs. The number you specify is the percentage of user sessions that will be used. Default value is `0.1`. +* `telemetries` - (Optional) An array that lists the types of telemetry data that this app monitor is to collect. Valid values are `errors`, `performance`, and `http`. + +### custom_events + +* `status` - (Optional) Specifies whether this app monitor allows the web client to define and send custom events. The default is for custom events to be `DISABLED`. Valid values are `DISABLED` and `ENABLED`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the app monitor. +* `id` - The CloudWatch RUM name as it is the identifier of a RUM. +* `app_monitor_id` - The unique ID of the app monitor. Useful for JS templates. +* `cw_log_group` - The name of the log group where the copies are stored. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudwatch RUM App Monitor using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloudwatch RUM App Monitor using the `name`. For example: + +```console +% terraform import aws_rum_app_monitor.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/rum_metrics_destination.html.markdown b/website/docs/cdktf/python/r/rum_metrics_destination.html.markdown new file mode 100644 index 00000000000..7dc7939d6b3 --- /dev/null +++ b/website/docs/cdktf/python/r/rum_metrics_destination.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "CloudWatch RUM" +layout: "aws" +page_title: "AWS: aws_rum_metrics_destination" +description: |- + Provides a CloudWatch RUM Metrics Destination resource. +--- + + + +# Resource: aws_rum_metrics_destination + +Provides a CloudWatch RUM Metrics Destination resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.rum_metrics_destination import RumMetricsDestination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + RumMetricsDestination(self, "example", + app_monitor_name=Token.as_string(aws_rum_app_monitor_example.name), + destination="CloudWatch" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `app_monitor_name` - (Required) The name of the CloudWatch RUM app monitor that will send the metrics. +* `destination` - (Required) Defines the destination to send the metrics to. Valid values are `CloudWatch` and `Evidently`. If you specify `Evidently`, you must also specify the ARN of the CloudWatchEvidently experiment that is to be the destination and an IAM role that has permission to write to the experiment. +* `destination_arn` - (Optional) Use this parameter only if Destination is Evidently. This parameter specifies the ARN of the Evidently experiment that will receive the extended metrics. +* `iam_role_arn` - (Optional) This parameter is required if Destination is Evidently. If Destination is CloudWatch, do not use this parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the CloudWatch RUM app monitor that will send the metrics. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudwatch RUM Metrics Destination using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Cloudwatch RUM Metrics Destination using the `id`. For example: + +```console +% terraform import aws_rum_metrics_destination.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_access_point.html.markdown b/website/docs/cdktf/python/r/s3_access_point.html.markdown new file mode 100644 index 00000000000..5a36665249a --- /dev/null +++ b/website/docs/cdktf/python/r/s3_access_point.html.markdown @@ -0,0 +1,172 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3_access_point" +description: |- + Manages an S3 Access Point. +--- + + + +# Resource: aws_s3_access_point + +Provides a resource to manage an S3 Access Point. + +~> **NOTE on Access Points and Access Point Policies:** Terraform provides both a standalone [Access Point Policy](s3control_access_point_policy.html) resource and an Access Point resource with a resource policy defined in-line. You cannot use an Access Point with in-line resource policy in conjunction with an Access Point Policy resource. Doing so will cause a conflict of policies and will overwrite the access point's resource policy. + +-> Advanced usage: To use a custom API endpoint for this Terraform resource, use the [`s3control` endpoint provider configuration](/docs/providers/aws/index.html#s3control), not the `s3` endpoint provider configuration. + +## Example Usage + +### AWS Partition Bucket + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_access_point import S3AccessPoint +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_s3_access_point_example = S3AccessPoint(self, "example_1", + bucket=example.id, + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_access_point_example.override_logical_id("example") +``` + +### S3 on Outposts Bucket + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_access_point import S3AccessPoint +from imports.aws.s3_control_bucket import S3ControlBucket +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, outpostId): + super().__init__(scope, name) + example = S3ControlBucket(self, "example", + bucket="example", + outpost_id=outpost_id + ) + aws_vpc_example = Vpc(self, "example_1", + cidr_block="10.0.0.0/16" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpc_example.override_logical_id("example") + aws_s3_access_point_example = S3AccessPoint(self, "example_2", + bucket=example.arn, + name="example", + vpc_configuration=S3AccessPointVpcConfiguration( + vpc_id=Token.as_string(aws_vpc_example.id) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_access_point_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Name of an AWS Partition S3 Bucket or the ARN of S3 on Outposts Bucket that you want to associate this access point with. +* `name` - (Required) Name you want to assign to this access point. + +The following arguments are optional: + +* `account_id` - (Optional) AWS account ID for the owner of the bucket for which you want to create an access point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `bucket_account_id` - (Optional) AWS account ID associated with the S3 bucket associated with this access point. +* `policy` - (Optional) Valid JSON document that specifies the policy that you want to apply to this access point. Removing `policy` from your configuration or setting `policy` to null or an empty string (i.e., `policy = ""`) _will not_ delete the policy since it could have been set by `aws_s3control_access_point_policy`. To remove the `policy`, set it to `"{}"` (an empty JSON document). +* `public_access_block_configuration` - (Optional) Configuration block to manage the `PublicAccessBlock` configuration that you want to apply to this Amazon S3 bucket. You can enable the configuration options in any combination. Detailed below. +* `vpc_configuration` - (Optional) Configuration block to restrict access to this access point to requests from the specified Virtual Private Cloud (VPC). Required for S3 on Outposts. Detailed below. + +### public_access_block_configuration Configuration Block + +The following arguments are optional: + +* `block_public_acls` - (Optional) Whether Amazon S3 should block public ACLs for buckets in this account. Defaults to `true`. Enabling this setting does not affect existing policies or ACLs. When set to `true` causes the following behavior: + * PUT Bucket acl and PUT Object acl calls fail if the specified ACL is public. + * PUT Object calls fail if the request includes a public ACL. + * PUT Bucket calls fail if the request includes a public ACL. +* `block_public_policy` - (Optional) Whether Amazon S3 should block public bucket policies for buckets in this account. Defaults to `true`. Enabling this setting does not affect existing bucket policies. When set to `true` causes Amazon S3 to: + * Reject calls to PUT Bucket policy if the specified bucket policy allows public access. +* `ignore_public_acls` - (Optional) Whether Amazon S3 should ignore public ACLs for buckets in this account. Defaults to `true`. Enabling this setting does not affect the persistence of any existing ACLs and doesn't prevent new public ACLs from being set. When set to `true` causes Amazon S3 to: + * Ignore all public ACLs on buckets in this account and any objects that they contain. +* `restrict_public_buckets` - (Optional) Whether Amazon S3 should restrict public bucket policies for buckets in this account. Defaults to `true`. Enabling this setting does not affect previously stored bucket policies, except that public and cross-account access within any public bucket policy, including non-public delegation to specific accounts, is blocked. When set to `true`: + * Only the bucket owner and AWS Services can access buckets with public policies. + +### vpc_configuration Configuration Block + +The following arguments are required: + +* `vpc_id` - (Required) This access point will only allow connections from the specified VPC ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `alias` - Alias of the S3 Access Point. +* `arn` - ARN of the S3 Access Point. +* `domain_name` - DNS domain name of the S3 Access Point in the format _`name`_-_`account_id`_.s3-accesspoint._region_.amazonaws.com. +Note: S3 access points only support secure access by HTTPS. HTTP isn't supported. +* `endpoints` - VPC endpoints for the S3 Access Point. +* `has_public_access_policy` - Indicates whether this access point currently has a policy that allows public access. +* `id` - For Access Point of an AWS Partition S3 Bucket, the AWS account ID and access point name separated by a colon (`:`). For S3 on Outposts Bucket, the ARN of the Access Point. +* `network_origin` - Indicates whether this access point allows access from the public Internet. Values are `VPC` (the access point doesn't allow access from the public Internet) and `Internet` (the access point allows access from the public Internet, subject to the access point and bucket access policies). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import this resource using the `account_id` and `name` separated by a colon (`:`) for Access Points associated with an AWS Partition S3 Bucket or the ARN for Access Points associated with an S3 on Outposts Bucket. For example: + +Import using the `account_id` and `name` separated by a colon (`:`) for Access Points associated with an AWS Partition S3 Bucket: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import using the ARN for Access Points associated with an S3 on Outposts Bucket: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import.** For example: + +Import using the `account_id` and `name` separated by a colon (`:`) for Access Points associated with an AWS Partition S3 Bucket: + +```console +% terraform import aws_s3_access_point.example 123456789012:example +``` + +Import using the ARN for Access Points associated with an S3 on Outposts Bucket: + +```console +% terraform import aws_s3_access_point.example arn:aws:s3-outposts:us-east-1:123456789012:outpost/op-1234567890123456/accesspoint/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_account_public_access_block.html.markdown b/website/docs/cdktf/python/r/s3_account_public_access_block.html.markdown new file mode 100644 index 00000000000..ac3d112f44f --- /dev/null +++ b/website/docs/cdktf/python/r/s3_account_public_access_block.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3_account_public_access_block" +description: |- + Manages S3 account-level Public Access Block Configuration +--- + + + +# Resource: aws_s3_account_public_access_block + +Manages S3 account-level Public Access Block configuration. For more information about these settings, see the [AWS S3 Block Public Access documentation](https://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-block-public-access.html). + +~> **NOTE:** Each AWS account may only have one S3 Public Access Block configuration. Multiple configurations of the resource against the same AWS account will cause a perpetual difference. + +-> Advanced usage: To use a custom API endpoint for this Terraform resource, use the [`s3control` endpoint provider configuration](/docs/providers/aws/index.html#s3control), not the `s3` endpoint provider configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_account_public_access_block import S3AccountPublicAccessBlock +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3AccountPublicAccessBlock(self, "example", + block_public_acls=True, + block_public_policy=True + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Optional) AWS account ID to configure. Defaults to automatically determined account ID of the Terraform AWS provider. +* `block_public_acls` - (Optional) Whether Amazon S3 should block public ACLs for buckets in this account. Defaults to `false`. Enabling this setting does not affect existing policies or ACLs. When set to `true` causes the following behavior: + * PUT Bucket acl and PUT Object acl calls will fail if the specified ACL allows public access. + * PUT Object calls fail if the request includes a public ACL. +* `block_public_policy` - (Optional) Whether Amazon S3 should block public bucket policies for buckets in this account. Defaults to `false`. Enabling this setting does not affect existing bucket policies. When set to `true` causes Amazon S3 to: + * Reject calls to PUT Bucket policy if the specified bucket policy allows public access. +* `ignore_public_acls` - (Optional) Whether Amazon S3 should ignore public ACLs for buckets in this account. Defaults to `false`. Enabling this setting does not affect the persistence of any existing ACLs and doesn't prevent new public ACLs from being set. When set to `true` causes Amazon S3 to: + * Ignore all public ACLs on buckets in this account and any objects that they contain. +* `restrict_public_buckets` - (Optional) Whether Amazon S3 should restrict public bucket policies for buckets in this account. Defaults to `false`. Enabling this setting does not affect previously stored bucket policies, except that public and cross-account access within any public bucket policy, including non-public delegation to specific accounts, is blocked. When set to `true`: + * Only the bucket owner and AWS Services can access buckets with public policies. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS account ID + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_s3_account_public_access_block` using the AWS account ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_s3_account_public_access_block` using the AWS account ID. For example: + +```console +% terraform import aws_s3_account_public_access_block.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket.html.markdown b/website/docs/cdktf/python/r/s3_bucket.html.markdown new file mode 100644 index 00000000000..5e2c530e940 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket.html.markdown @@ -0,0 +1,395 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket" +description: |- + Provides a S3 bucket resource. +--- + + + +# Resource: aws_s3_bucket + +Provides a S3 bucket resource. + +-> This functionality is for managing S3 in an AWS Partition. To manage [S3 on Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html), see the [`aws_s3control_bucket`](/docs/providers/aws/r/s3control_bucket.html) resource. + +-> In April 2023, [AWS introduced](https://aws.amazon.com/about-aws/whats-new/2022/12/amazon-s3-automatically-enable-block-public-access-disable-access-control-lists-buckets-april-2023/) updated security defaults for new S3 buckets. See [this issue](https://github.com/hashicorp/terraform-provider-aws/issues/28353) for a information on how this affects the `aws_s3_bucket` resource. + +## Example Usage + +### Private Bucket With Tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3Bucket(self, "example", + bucket="my-tf-test-bucket", + tags={ + "Environment": "Dev", + "Name": "My bucket" + } + ) +``` + +### Static Website Hosting + +-> **NOTE:** The `website` attribute is deprecated. +See [`aws_s3_bucket_website_configuration`](s3_bucket_website_configuration.html.markdown) for examples with static website hosting configured. + +### CORS Rules + +-> **NOTE:** The `cors_rule` attribute is deprecated. +See [`aws_s3_bucket_cors_configuration`](s3_bucket_cors_configuration.html.markdown) for examples with CORS rules configured. + +### Versioning + +-> **NOTE:** The `versioning` attribute is deprecated. +See [`aws_s3_bucket_versioning`](s3_bucket_versioning.html.markdown) for examples with versioning configured. + +### Logging + +-> **NOTE:** The `logging` attribute is deprecated. +See [`aws_s3_bucket_logging`](s3_bucket_logging.html.markdown) for examples with logging enabled. + +### Object Lifecycle Rules + +-> **NOTE:** The `lifecycle_rule` attribute is deprecated. +See [`aws_s3_bucket_lifecycle_configuration`](s3_bucket_lifecycle_configuration.html.markdown) for examples with object lifecycle rules. + +### Object Lock Configuration + +-> **NOTE:** The `object_lock_configuration` attribute is deprecated. +See [`aws_s3_bucket_object_lock_configuration`](s3_bucket_object_lock_configuration.html.markdown) for examples with object lock configurations on both new and existing buckets. + +### Replication Configuration + +-> **NOTE:** The `replication_configuration` attribute is deprecated. +See [`aws_s3_bucket_replication_configuration`](s3_bucket_replication_configuration.html.markdown) for examples with replication configured. + +### Enable SSE-KMS Server Side Encryption + +-> **NOTE:** The `server_side_encryption_configuration` attribute is deprecated. +See [`aws_s3_bucket_server_side_encryption_configuration`](s3_bucket_server_side_encryption_configuration.html.markdown) for examples with server side encryption configured. + +### ACL Policy Grants + +-> **NOTE:** The `acl` and `grant` attributes are deprecated. +See [`aws_s3_bucket_acl`](s3_bucket_acl.html.markdown) for examples with ACL grants. + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Optional, Forces new resource) Name of the bucket. If omitted, Terraform will assign a random, unique name. Must be lowercase and less than or equal to 63 characters in length. A full list of bucket naming rules [may be found here](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html). +* `bucket_prefix` - (Optional, Forces new resource) Creates a unique bucket name beginning with the specified prefix. Conflicts with `bucket`. Must be lowercase and less than or equal to 37 characters in length. A full list of bucket naming rules [may be found here](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html). +* `force_destroy` - (Optional, Default:`false`) Boolean that indicates all objects (including any [locked objects](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html)) should be deleted from the bucket *when the bucket is destroyed* so that the bucket can be destroyed without error. These objects are *not* recoverable. This only deletes objects when the bucket is destroyed, *not* when setting this parameter to `true`. Once this parameter is set to `true`, there must be a successful `terraform apply` run before a destroy is required to update this value in the resource state. Without a successful `terraform apply` after this parameter is set, this flag will have no effect. If setting this field in the same operation that would require replacing the bucket or destroying the bucket, this flag will not work. Additionally when importing a bucket, a successful `terraform apply` is required to set this value in state before it will take effect on a destroy operation. +* `object_lock_enabled` - (Optional, Forces new resource) Indicates whether this bucket has an Object Lock configuration enabled. Valid values are `true` or `false`. This argument is not supported in all regions or partitions. +* `tags` - (Optional) Map of tags to assign to the bucket. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following arguments are deprecated, and will be removed in a future major version: + +* `acceleration_status` - (Optional, **Deprecated**) Sets the accelerate configuration of an existing bucket. Can be `Enabled` or `Suspended`. Cannot be used in `cn-north-1` or `us-gov-west-1`. Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`aws_s3_bucket_accelerate_configuration`](s3_bucket_accelerate_configuration.html) instead. +* `acl` - (Optional, **Deprecated**) The [canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Valid values are `private`, `public-read`, `public-read-write`, `aws-exec-read`, `authenticated-read`, and `log-delivery-write`. Defaults to `private`. Conflicts with `grant`. Terraform will only perform drift detection if a configuration value is provided. Use the resource [`aws_s3_bucket_acl`](s3_bucket_acl.html.markdown) instead. +* `grant` - (Optional, **Deprecated**) An [ACL policy grant](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#sample-acl). See [Grant](#grant) below for details. Conflicts with `acl`. Terraform will only perform drift detection if a configuration value is provided. Use the resource [`aws_s3_bucket_acl`](s3_bucket_acl.html.markdown) instead. +* `cors_rule` - (Optional, **Deprecated**) Rule of [Cross-Origin Resource Sharing](https://docs.aws.amazon.com/AmazonS3/latest/dev/cors.html). See [CORS rule](#cors-rule) below for details. Terraform will only perform drift detection if a configuration value is provided. Use the resource [`aws_s3_bucket_cors_configuration`](s3_bucket_cors_configuration.html.markdown) instead. +* `lifecycle_rule` - (Optional, **Deprecated**) Configuration of [object lifecycle management](http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html). See [Lifecycle Rule](#lifecycle-rule) below for details. Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`aws_s3_bucket_lifecycle_configuration`](s3_bucket_lifecycle_configuration.html) instead. +* `logging` - (Optional, **Deprecated**) Configuration of [S3 bucket logging](https://docs.aws.amazon.com/AmazonS3/latest/UG/ManagingBucketLogging.html) parameters. See [Logging](#logging) below for details. Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`aws_s3_bucket_logging`](s3_bucket_logging.html.markdown) instead. +* `object_lock_configuration` - (Optional, **Deprecated**) Configuration of [S3 object locking](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock.html). See [Object Lock Configuration](#object-lock-configuration) below for details. + Terraform wil only perform drift detection if a configuration value is provided. + Use the `object_lock_enabled` parameter and the resource [`aws_s3_bucket_object_lock_configuration`](s3_bucket_object_lock_configuration.html.markdown) instead. +* `policy` - (Optional, **Deprecated**) Valid [bucket policy](https://docs.aws.amazon.com/AmazonS3/latest/dev/example-bucket-policies.html) JSON document. Note that if the policy document is not specific enough (but still valid), Terraform may view the policy as constantly changing in a `terraform plan`. In this case, please make sure you use the verbose/specific version of the policy. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`aws_s3_bucket_policy`](s3_bucket_policy.html) instead. +* `replication_configuration` - (Optional, **Deprecated**) Configuration of [replication configuration](http://docs.aws.amazon.com/AmazonS3/latest/dev/crr.html). See [Replication Configuration](#replication-configuration) below for details. Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`aws_s3_bucket_replication_configuration`](s3_bucket_replication_configuration.html) instead. +* `request_payer` - (Optional, **Deprecated**) Specifies who should bear the cost of Amazon S3 data transfer. + Can be either `BucketOwner` or `Requester`. By default, the owner of the S3 bucket would incur the costs of any data transfer. + See [Requester Pays Buckets](http://docs.aws.amazon.com/AmazonS3/latest/dev/RequesterPaysBuckets.html) developer guide for more information. + Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`aws_s3_bucket_request_payment_configuration`](s3_bucket_request_payment_configuration.html) instead. +* `server_side_encryption_configuration` - (Optional, **Deprecated**) Configuration of [server-side encryption configuration](http://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-encryption.html). See [Server Side Encryption Configuration](#server-side-encryption-configuration) below for details. + Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`aws_s3_bucket_server_side_encryption_configuration`](s3_bucket_server_side_encryption_configuration.html) instead. +* `versioning` - (Optional, **Deprecated**) Configuration of the [S3 bucket versioning state](https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html). See [Versioning](#versioning) below for details. Terraform will only perform drift detection if a configuration value is provided. Use the resource [`aws_s3_bucket_versioning`](s3_bucket_versioning.html.markdown) instead. +* `website` - (Optional, **Deprecated**) Configuration of the [S3 bucket website](https://docs.aws.amazon.com/AmazonS3/latest/userguide/WebsiteHosting.html). See [Website](#website) below for details. Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`aws_s3_bucket_website_configuration`](s3_bucket_website_configuration.html.markdown) instead. + +### CORS Rule + +~> **NOTE:** Currently, changes to the `cors_rule` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of CORS rules to an S3 bucket, use the `aws_s3_bucket_cors_configuration` resource instead. If you use `cors_rule` on an `aws_s3_bucket`, Terraform will assume management over the full set of CORS rules for the S3 bucket, treating additional CORS rules as drift. For this reason, `cors_rule` cannot be mixed with the external `aws_s3_bucket_cors_configuration` resource for a given S3 bucket. + +The `cors_rule` configuration block supports the following arguments: + +* `allowed_headers` - (Optional) List of headers allowed. +* `allowed_methods` - (Required) One or more HTTP methods that you allow the origin to execute. Can be `GET`, `PUT`, `POST`, `DELETE` or `HEAD`. +* `allowed_origins` - (Required) One or more origins you want customers to be able to access the bucket from. +* `expose_headers` - (Optional) One or more headers in the response that you want customers to be able to access from their applications (for example, from a JavaScript `XMLHttpRequest` object). +* `max_age_seconds` - (Optional) Specifies time in seconds that browser can cache the response for a preflight request. + +### Grant + +~> **NOTE:** Currently, changes to the `grant` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of ACL grants to an S3 bucket, use the `aws_s3_bucket_acl` resource instead. If you use `grant` on an `aws_s3_bucket`, Terraform will assume management over the full set of ACL grants for the S3 bucket, treating additional ACL grants as drift. For this reason, `grant` cannot be mixed with the external `aws_s3_bucket_acl` resource for a given S3 bucket. + +The `grant` configuration block supports the following arguments: + +* `id` - (Optional) Canonical user id to grant for. Used only when `type` is `CanonicalUser`. +* `type` - (Required) Type of grantee to apply for. Valid values are `CanonicalUser` and `Group`. `AmazonCustomerByEmail` is not supported. +* `permissions` - (Required) List of permissions to apply for grantee. Valid values are `READ`, `WRITE`, `READ_ACP`, `WRITE_ACP`, `FULL_CONTROL`. +* `uri` - (Optional) Uri address to grant for. Used only when `type` is `Group`. + +### Lifecycle Rule + +~> **NOTE:** Currently, changes to the `lifecycle_rule` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of Lifecycle rules to an S3 bucket, use the `aws_s3_bucket_lifecycle_configuration` resource instead. If you use `lifecycle_rule` on an `aws_s3_bucket`, Terraform will assume management over the full set of Lifecycle rules for the S3 bucket, treating additional Lifecycle rules as drift. For this reason, `lifecycle_rule` cannot be mixed with the external `aws_s3_bucket_lifecycle_configuration` resource for a given S3 bucket. + +~> **NOTE:** At least one of `abort_incomplete_multipart_upload_days`, `expiration`, `transition`, `noncurrent_version_expiration`, `noncurrent_version_transition` must be specified. + +The `lifecycle_rule` configuration block supports the following arguments: + +* `id` - (Optional) Unique identifier for the rule. Must be less than or equal to 255 characters in length. +* `prefix` - (Optional) Object key prefix identifying one or more objects to which the rule applies. +* `tags` - (Optional) Specifies object tags key and value. +* `enabled` - (Required) Specifies lifecycle rule status. +* `abort_incomplete_multipart_upload_days` (Optional) Specifies the number of days after initiating a multipart upload when the multipart upload must be completed. +* `expiration` - (Optional) Specifies a period in the object's expire. See [Expiration](#expiration) below for details. +* `transition` - (Optional) Specifies a period in the object's transitions. See [Transition](#transition) below for details. +* `noncurrent_version_expiration` - (Optional) Specifies when noncurrent object versions expire. See [Noncurrent Version Expiration](#noncurrent-version-expiration) below for details. +* `noncurrent_version_transition` - (Optional) Specifies when noncurrent object versions transitions. See [Noncurrent Version Transition](#noncurrent-version-transition) below for details. + +### Expiration + +The `expiration` configuration block supports the following arguments: + +* `date` - (Optional) Specifies the date after which you want the corresponding action to take effect. +* `days` - (Optional) Specifies the number of days after object creation when the specific rule action takes effect. +* `expired_object_delete_marker` - (Optional) On a versioned bucket (versioning-enabled or versioning-suspended bucket), you can add this element in the lifecycle configuration to direct Amazon S3 to delete expired object delete markers. This cannot be specified with Days or Date in a Lifecycle Expiration Policy. + +### Transition + +The `transition` configuration block supports the following arguments: + +* `date` - (Optional) Specifies the date after which you want the corresponding action to take effect. +* `days` - (Optional) Specifies the number of days after object creation when the specific rule action takes effect. +* `storage_class` - (Required) Specifies the Amazon S3 [storage class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_Transition.html#AmazonS3-Type-Transition-StorageClass) to which you want the object to transition. + +### Noncurrent Version Expiration + +The `noncurrent_version_expiration` configuration block supports the following arguments: + +* `days` - (Required) Specifies the number of days noncurrent object versions expire. + +### Noncurrent Version Transition + +The `noncurrent_version_transition` configuration supports the following arguments: + +* `days` - (Required) Specifies the number of days noncurrent object versions transition. +* `storage_class` - (Required) Specifies the Amazon S3 [storage class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_Transition.html#AmazonS3-Type-Transition-StorageClass) to which you want the object to transition. + +### Logging + +~> **NOTE:** Currently, changes to the `logging` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of logging parameters to an S3 bucket, use the `aws_s3_bucket_logging` resource instead. If you use `logging` on an `aws_s3_bucket`, Terraform will assume management over the full set of logging parameters for the S3 bucket, treating additional logging parameters as drift. For this reason, `logging` cannot be mixed with the external `aws_s3_bucket_logging` resource for a given S3 bucket. + +The `logging` configuration block supports the following arguments: + +* `target_bucket` - (Required) Name of the bucket that will receive the log objects. +* `target_prefix` - (Optional) To specify a key prefix for log objects. + +### Object Lock Configuration + +~> **NOTE:** You can only **enable** S3 Object Lock for **new** buckets. If you need to **enable** S3 Object Lock for an **existing** bucket, please contact AWS Support. +When you create a bucket with S3 Object Lock enabled, Amazon S3 automatically enables versioning for the bucket. +Once you create a bucket with S3 Object Lock enabled, you can't disable Object Lock or suspend versioning for the bucket. + +~> **NOTE:** Currently, changes to the `object_lock_configuration` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of Object Lock settings to an S3 bucket, use the `aws_s3_bucket_object_lock_configuration` resource instead. If you use `object_lock_configuration` on an `aws_s3_bucket`, Terraform will assume management over the full set of Object Lock configuration parameters for the S3 bucket, treating additional Object Lock configuration parameters as drift. For this reason, `object_lock_configuration` cannot be mixed with the external `aws_s3_bucket_object_lock_configuration` resource for a given S3 bucket. + +The `object_lock_configuration` configuration block supports the following arguments: + +* `object_lock_enabled` - (Optional, **Deprecated**) Indicates whether this bucket has an Object Lock configuration enabled. Valid value is `Enabled`. Use the top-level argument `object_lock_enabled` instead. +* `rule` - (Optional) Object Lock rule in place for this bucket ([documented below](#rule)). + +#### Rule + +The `rule` configuration block supports the following argument: + +* `default_retention` - (Required) Default retention period that you want to apply to new objects placed in this bucket ([documented below](#default-retention)). + +#### Default Retention + +The `default_retention` configuration block supports the following arguments: + +~> **NOTE:** Either `days` or `years` must be specified, but not both. + +* `mode` - (Required) Default Object Lock retention mode you want to apply to new objects placed in this bucket. Valid values are `GOVERNANCE` and `COMPLIANCE`. +* `days` - (Optional) Number of days that you want to specify for the default retention period. +* `years` - (Optional) Number of years that you want to specify for the default retention period. + +### Replication Configuration + +~> **NOTE:** Currently, changes to the `replication_configuration` configuration of *existing* resources cannot be automatically detected by Terraform. To manage replication configuration changes to an S3 bucket, use the `aws_s3_bucket_replication_configuration` resource instead. If you use `replication_configuration` on an `aws_s3_bucket`, Terraform will assume management over the full replication configuration for the S3 bucket, treating additional replication configuration rules as drift. For this reason, `replication_configuration` cannot be mixed with the external `aws_s3_bucket_replication_configuration` resource for a given S3 bucket. + +The `replication_configuration` configuration block supports the following arguments: + +* `role` - (Required) ARN of the IAM role for Amazon S3 to assume when replicating the objects. +* `rules` - (Required) Specifies the rules managing the replication ([documented below](#rules)). + +#### Rules + +The `rules` configuration block supports the following arguments: + +~> **NOTE:** Amazon S3's latest version of the replication configuration is V2, which includes the `filter` attribute for replication rules. +With the `filter` attribute, you can specify object filters based on the object key prefix, tags, or both to scope the objects that the rule applies to. +Replication configuration V1 supports filtering based on only the `prefix` attribute. For backwards compatibility, Amazon S3 continues to support the V1 configuration. + +* `delete_marker_replication_status` - (Optional) Whether delete markers are replicated. The only valid value is `Enabled`. To disable, omit this argument. This argument is only valid with V2 replication configurations (i.e., when `filter` is used). +* `destination` - (Required) Specifies the destination for the rule ([documented below](#destination)). +* `filter` - (Optional, Conflicts with `prefix`) Filter that identifies subset of objects to which the replication rule applies ([documented below](#filter)). +* `id` - (Optional) Unique identifier for the rule. Must be less than or equal to 255 characters in length. +* `prefix` - (Optional, Conflicts with `filter`) Object keyname prefix identifying one or more objects to which the rule applies. Must be less than or equal to 1024 characters in length. +* `priority` - (Optional) Priority associated with the rule. Priority should only be set if `filter` is configured. If not provided, defaults to `0`. Priority must be unique between multiple rules. +* `source_selection_criteria` - (Optional) Specifies special object selection criteria ([documented below](#source-selection-criteria)). +* `status` - (Required) Status of the rule. Either `Enabled` or `Disabled`. The rule is ignored if status is not Enabled. + +#### Filter + +The `filter` configuration block supports the following arguments: + +* `prefix` - (Optional) Object keyname prefix that identifies subset of objects to which the rule applies. Must be less than or equal to 1024 characters in length. +* `tags` - (Optional) A map of tags that identifies subset of objects to which the rule applies. + The rule applies only to objects having all the tags in its tagset. + +#### Destination + +~> **NOTE:** Replication to multiple destination buckets requires that `priority` is specified in the `rules` object. If the corresponding rule requires no filter, an empty configuration block `filter {}` must be specified. + +The `destination` configuration block supports the following arguments: + +* `bucket` - (Required) ARN of the S3 bucket where you want Amazon S3 to store replicas of the object identified by the rule. +* `storage_class` - (Optional) The [storage class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_Destination.html#AmazonS3-Type-Destination-StorageClass) used to store the object. By default, Amazon S3 uses the storage class of the source object to create the object replica. +* `replica_kms_key_id` - (Optional) Destination KMS encryption key ARN for SSE-KMS replication. Must be used in conjunction with + `sse_kms_encrypted_objects` source selection criteria. +* `access_control_translation` - (Optional) Specifies the overrides to use for object owners on replication. Must be used in conjunction with `account_id` owner override configuration. +* `account_id` - (Optional) Account ID to use for overriding the object owner on replication. Must be used in conjunction with `access_control_translation` override configuration. +* `replication_time` - (Optional) Enables S3 Replication Time Control (S3 RTC) ([documented below](#replication-time)). +* `metrics` - (Optional) Enables replication metrics (required for S3 RTC) ([documented below](#metrics)). + +#### Replication Time + +The `replication_time` configuration block supports the following arguments: + +* `status` - (Optional) Status of RTC. Either `Enabled` or `Disabled`. +* `minutes` - (Optional) Threshold within which objects are to be replicated. The only valid value is `15`. + +#### Metrics + +The `metrics` configuration block supports the following arguments: + +* `status` - (Optional) Status of replication metrics. Either `Enabled` or `Disabled`. +* `minutes` - (Optional) Threshold within which objects are to be replicated. The only valid value is `15`. + +#### Source Selection Criteria + +The `source_selection_criteria` configuration block supports the following argument: + +* `sse_kms_encrypted_objects` - (Optional) Match SSE-KMS encrypted objects ([documented below](#sse-kms-encrypted-objects)). If specified, `replica_kms_key_id` + in `destination` must be specified as well. + +#### SSE KMS Encrypted Objects + +The `sse_kms_encrypted_objects` configuration block supports the following argument: + +* `enabled` - (Required) Boolean which indicates if this criteria is enabled. + +### Server Side Encryption Configuration + +~> **NOTE:** Currently, changes to the `server_side_encryption_configuration` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes in encryption of an S3 bucket, use the `aws_s3_bucket_server_side_encryption_configuration` resource instead. If you use `server_side_encryption_configuration` on an `aws_s3_bucket`, Terraform will assume management over the encryption configuration for the S3 bucket, treating additional encryption changes as drift. For this reason, `server_side_encryption_configuration` cannot be mixed with the external `aws_s3_bucket_server_side_encryption_configuration` resource for a given S3 bucket. + +The `server_side_encryption_configuration` configuration block supports the following argument: + +* `rule` - (Required) Single object for server-side encryption by default configuration. (documented below) + +The `rule` configuration block supports the following arguments: + +* `apply_server_side_encryption_by_default` - (Required) Single object for setting server-side encryption by default. (documented below) +* `bucket_key_enabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. + +The `apply_server_side_encryption_by_default` configuration block supports the following arguments: + +* `sse_algorithm` - (Required) Server-side encryption algorithm to use. Valid values are `AES256` and `aws:kms` +* `kms_master_key_id` - (Optional) AWS KMS master key ID used for the SSE-KMS encryption. This can only be used when you set the value of `sse_algorithm` as `aws:kms`. The default `aws/s3` AWS KMS master key is used if this element is absent while the `sse_algorithm` is `aws:kms`. + +### Versioning + +~> **NOTE:** Currently, changes to the `versioning` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of versioning state to an S3 bucket, use the `aws_s3_bucket_versioning` resource instead. If you use `versioning` on an `aws_s3_bucket`, Terraform will assume management over the versioning state of the S3 bucket, treating additional versioning state changes as drift. For this reason, `versioning` cannot be mixed with the external `aws_s3_bucket_versioning` resource for a given S3 bucket. + +The `versioning` configuration block supports the following arguments: + +* `enabled` - (Optional) Enable versioning. Once you version-enable a bucket, it can never return to an unversioned state. You can, however, suspend versioning on that bucket. +* `mfa_delete` - (Optional) Enable MFA delete for either `Change the versioning state of your bucket` or `Permanently delete an object version`. Default is `false`. This cannot be used to toggle this setting but is available to allow managed buckets to reflect the state in AWS + +### Website + +~> **NOTE:** Currently, changes to the `website` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes to the website configuration of an S3 bucket, use the `aws_s3_bucket_website_configuration` resource instead. If you use `website` on an `aws_s3_bucket`, Terraform will assume management over the configuration of the website of the S3 bucket, treating additional website configuration changes as drift. For this reason, `website` cannot be mixed with the external `aws_s3_bucket_website_configuration` resource for a given S3 bucket. + +The `website` configuration block supports the following arguments: + +* `index_document` - (Required, unless using `redirect_all_requests_to`) Amazon S3 returns this index document when requests are made to the root domain or any of the subfolders. +* `error_document` - (Optional) Absolute path to the document to return in case of a 4XX error. +* `redirect_all_requests_to` - (Optional) Hostname to redirect all website requests for this bucket to. Hostname can optionally be prefixed with a protocol (`http://` or `https://`) to use when redirecting requests. The default is the protocol that is used in the original request. +* `routing_rules` - (Optional) JSON array containing [routing rules](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules.html) + describing redirect behavior and when redirects are applied. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the bucket. +* `arn` - ARN of the bucket. Will be of format `arn:aws:s3:::bucketname`. +* `bucket_domain_name` - Bucket domain name. Will be of format `bucketname.s3.amazonaws.com`. +* `bucket_regional_domain_name` - The bucket region-specific domain name. The bucket domain name including the region name. Please refer to the [S3 endpoints reference](https://docs.aws.amazon.com/general/latest/gr/s3.html#s3_region) for format. Note: AWS CloudFront allows specifying an S3 region-specific endpoint when creating an S3 origin. This will prevent redirect issues from CloudFront to the S3 Origin URL. For more information, see the [Virtual Hosted-Style Requests for Other Regions](https://docs.aws.amazon.com/AmazonS3/latest/userguide/VirtualHosting.html#deprecated-global-endpoint) section in the AWS S3 User Guide. +* `hosted_zone_id` - [Route 53 Hosted Zone ID](https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_website_region_endpoints) for this bucket's region. +* `region` - AWS region this bucket resides in. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `website_endpoint` - (**Deprecated**) Website endpoint, if the bucket is configured with a website. If not, this will be an empty string. Use the resource [`aws_s3_bucket_website_configuration`](s3_bucket_website_configuration.html.markdown) instead. +* `website_domain` - (**Deprecated**) Domain of the website endpoint, if the bucket is configured with a website. If not, this will be an empty string. This is used to create Route 53 alias records. Use the resource [`aws_s3_bucket_website_configuration`](s3_bucket_website_configuration.html.markdown) instead. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20m`) +- `read` - (Default `20m`) +- `update` - (Default `20m`) +- `delete` - (Default `60m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket using the `bucket`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 bucket using the `bucket`. For example: + +```console +% terraform import aws_s3_bucket.bucket bucket-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_accelerate_configuration.html.markdown b/website/docs/cdktf/python/r/s3_bucket_accelerate_configuration.html.markdown new file mode 100644 index 00000000000..a8f422b5b85 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_accelerate_configuration.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_accelerate_configuration" +description: |- + Provides an S3 bucket accelerate configuration resource. +--- + + + +# Resource: aws_s3_bucket_accelerate_configuration + +Provides an S3 bucket accelerate configuration resource. See the [Requirements for using Transfer Acceleration](https://docs.aws.amazon.com/AmazonS3/latest/userguide/transfer-acceleration.html#transfer-acceleration-requirements) for more details. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_accelerate_configuration import S3BucketAccelerateConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + mybucket = S3Bucket(self, "mybucket", + bucket="mybucket" + ) + S3BucketAccelerateConfiguration(self, "example", + bucket=mybucket.id, + status="Enabled" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expected_bucket_owner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `status` - (Required) Transfer acceleration state of the bucket. Valid values: `Enabled`, `Suspended`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expected_bucket_owner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket accelerate configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import.** For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_accelerate_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_accelerate_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_acl.html.markdown b/website/docs/cdktf/python/r/s3_bucket_acl.html.markdown new file mode 100644 index 00000000000..ae7d440679c --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_acl.html.markdown @@ -0,0 +1,286 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_acl" +description: |- + Provides an S3 bucket ACL resource. +--- + + + +# Resource: aws_s3_bucket_acl + +Provides an S3 bucket ACL resource. + +~> **Note:** `terraform destroy` does not delete the S3 Bucket ACL but does remove the resource from Terraform state. + +## Example Usage + +### With `private` ACL + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_ownership_controls import S3BucketOwnershipControls +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="my-tf-example-bucket" + ) + aws_s3_bucket_ownership_controls_example = S3BucketOwnershipControls(self, "example_1", + bucket=example.id, + rule=S3BucketOwnershipControlsRule( + object_ownership="BucketOwnerPreferred" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_ownership_controls_example.override_logical_id("example") + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_2", + acl="private", + bucket=example.id, + depends_on=[aws_s3_bucket_ownership_controls_example] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") +``` + +### With `public-read` ACL + +-> This example explicitly disables the default S3 bucket security settings. This +should be done with caution, as all bucket objects become publicly exposed. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_ownership_controls import S3BucketOwnershipControls +from imports.aws.s3_bucket_public_access_block import S3BucketPublicAccessBlock +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="my-tf-example-bucket" + ) + aws_s3_bucket_ownership_controls_example = S3BucketOwnershipControls(self, "example_1", + bucket=example.id, + rule=S3BucketOwnershipControlsRule( + object_ownership="BucketOwnerPreferred" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_ownership_controls_example.override_logical_id("example") + aws_s3_bucket_public_access_block_example = S3BucketPublicAccessBlock(self, "example_2", + block_public_acls=False, + block_public_policy=False, + bucket=example.id, + ignore_public_acls=False, + restrict_public_buckets=False + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_public_access_block_example.override_logical_id("example") + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_3", + acl="public-read", + bucket=example.id, + depends_on=[aws_s3_bucket_ownership_controls_example, aws_s3_bucket_public_access_block_example + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") +``` + +### With Grants + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_canonical_user_id import DataAwsCanonicalUserId +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_ownership_controls import S3BucketOwnershipControls +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="my-tf-example-bucket" + ) + aws_s3_bucket_ownership_controls_example = S3BucketOwnershipControls(self, "example_1", + bucket=example.id, + rule=S3BucketOwnershipControlsRule( + object_ownership="BucketOwnerPreferred" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_ownership_controls_example.override_logical_id("example") + current = DataAwsCanonicalUserId(self, "current") + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_3", + access_control_policy=S3BucketAclAccessControlPolicy( + grant=[S3BucketAclAccessControlPolicyGrant( + grantee=S3BucketAclAccessControlPolicyGrantGrantee( + id=Token.as_string(current.id), + type="CanonicalUser" + ), + permission="READ" + ), S3BucketAclAccessControlPolicyGrant( + grantee=S3BucketAclAccessControlPolicyGrantGrantee( + type="Group", + uri="http://acs.amazonaws.com/groups/s3/LogDelivery" + ), + permission="READ_ACP" + ) + ], + owner=S3BucketAclAccessControlPolicyOwner( + id=Token.as_string(current.id) + ) + ), + bucket=example.id, + depends_on=[aws_s3_bucket_ownership_controls_example] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `acl` - (Optional, Conflicts with `access_control_policy`) Canned ACL to apply to the bucket. +* `access_control_policy` - (Optional, Conflicts with `acl`) Configuration block that sets the ACL permissions for an object per grantee. [See below](#access_control_policy). +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expected_bucket_owner` - (Optional, Forces new resource) Account ID of the expected bucket owner. + +### access_control_policy + +The `access_control_policy` configuration block supports the following arguments: + +* `grant` - (Required) Set of `grant` configuration blocks. [See below](#grant). +* `owner` - (Required) Configuration block of the bucket owner's display name and ID. [See below](#owner). + +### grant + +The `grant` configuration block supports the following arguments: + +* `grantee` - (Required) Configuration block for the person being granted permissions. [See below](#grantee). +* `permission` - (Required) Logging permissions assigned to the grantee for the bucket. + +### owner + +The `owner` configuration block supports the following arguments: + +* `id` - (Required) ID of the owner. +* `display_name` - (Optional) Display name of the owner. + +### grantee + +The `grantee` configuration block supports the following arguments: + +* `email_address` - (Optional) Email address of the grantee. See [Regions and Endpoints](https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region) for supported AWS regions where this argument can be specified. +* `id` - (Optional) Canonical user ID of the grantee. +* `type` - (Required) Type of grantee. Valid values: `CanonicalUser`, `AmazonCustomerByEmail`, `Group`. +* `uri` - (Optional) URI of the grantee group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket`, `expected_bucket_owner` (if configured), and `acl` (if configured) separated by commas (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket ACL using `bucket`, `expected_bucket_owner`, and/or `acl`, depending on your situation. For example: + +If the owner (account ID) of the source bucket is the _same_ account used to configure the Terraform AWS Provider, and the source bucket is **not configured** with a +[canned ACL][1] (i.e. predefined grant), import using the `bucket`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket is the _same_ account used to configure the Terraform AWS Provider, and the source bucket is **configured** with a +[canned ACL][1] (i.e. predefined grant), import using the `bucket` and `acl` separated by a comma (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket _differs_ from the account used to configure the Terraform AWS Provider, and the source bucket is **not configured** with a [canned ACL][1] (i.e. predefined grant), imported using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket _differs_ from the account used to configure the Terraform AWS Provider, and the source bucket is **configured** with a +[canned ACL][1] (i.e. predefined grant), imported using the `bucket`, `expected_bucket_owner`, and `acl` separated by commas (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** using `bucket`, `expected_bucket_owner`, and/or `acl`, depending on your situation. For example: + +If the owner (account ID) of the source bucket is the _same_ account used to configure the Terraform AWS Provider, and the source bucket is **not configured** with a +[canned ACL][1] (i.e. predefined grant), import using the `bucket`: + +```console +% terraform import aws_s3_bucket_acl.example bucket-name +``` + +If the owner (account ID) of the source bucket is the _same_ account used to configure the Terraform AWS Provider, and the source bucket is **configured** with a [canned ACL][1] (i.e. predefined grant), import using the `bucket` and `acl` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_acl.example bucket-name,private +``` + +If the owner (account ID) of the source bucket _differs_ from the account used to configure the Terraform AWS Provider, and the source bucket is **not configured** with a [canned ACL][1] (i.e. predefined grant), imported using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_acl.example bucket-name,123456789012 +``` + +If the owner (account ID) of the source bucket _differs_ from the account used to configure the Terraform AWS Provider, and the source bucket is **configured** with a [canned ACL][1] (i.e. predefined grant), imported using the `bucket`, `expected_bucket_owner`, and `acl` separated by commas (`,`): + +```console +% terraform import aws_s3_bucket_acl.example bucket-name,123456789012,private +``` + +[1]: https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_analytics_configuration.html.markdown b/website/docs/cdktf/python/r/s3_bucket_analytics_configuration.html.markdown new file mode 100644 index 00000000000..3b3b3e7df72 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_analytics_configuration.html.markdown @@ -0,0 +1,141 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_analytics_configuration" +description: |- + Provides a S3 bucket analytics configuration resource. +--- + + + +# Resource: aws_s3_bucket_analytics_configuration + +Provides a S3 bucket [analytics configuration](https://docs.aws.amazon.com/AmazonS3/latest/dev/analytics-storage-class.html) resource. + +## Example Usage + +### Add analytics configuration for entire S3 bucket and export results to a second S3 bucket + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_analytics_configuration import S3BucketAnalyticsConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + analytics = S3Bucket(self, "analytics", + bucket="analytics destination" + ) + example = S3Bucket(self, "example", + bucket="example" + ) + S3BucketAnalyticsConfiguration(self, "example-entire-bucket", + bucket=example.id, + name="EntireBucket", + storage_class_analysis=S3BucketAnalyticsConfigurationStorageClassAnalysis( + data_export=S3BucketAnalyticsConfigurationStorageClassAnalysisDataExport( + destination=S3BucketAnalyticsConfigurationStorageClassAnalysisDataExportDestination( + s3_bucket_destination=S3BucketAnalyticsConfigurationStorageClassAnalysisDataExportDestinationS3BucketDestination( + bucket_arn=analytics.arn + ) + ) + ) + ) + ) +``` + +### Add analytics configuration with S3 object filter + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_analytics_configuration import S3BucketAnalyticsConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + S3BucketAnalyticsConfiguration(self, "example-filtered", + bucket=example.id, + filter=S3BucketAnalyticsConfigurationFilter( + prefix="documents/", + tags={ + "class": "blue", + "priority": "high" + } + ), + name="ImportantBlueDocuments" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the bucket this analytics configuration is associated with. +* `name` - (Required) Unique identifier of the analytics configuration for the bucket. +* `filter` - (Optional) Object filtering that accepts a prefix, tags, or a logical AND of prefix and tags (documented below). +* `storage_class_analysis` - (Optional) Configuration for the analytics data export (documented below). + +The `filter` configuration supports the following: + +* `prefix` - (Optional) Object prefix for filtering. +* `tags` - (Optional) Set of object tags for filtering. + +The `storage_class_analysis` configuration supports the following: + +* `data_export` - (Required) Data export configuration (documented below). + +The `data_export` configuration supports the following: + +* `output_schema_version` - (Optional) Schema version of exported analytics data. Allowed values: `V_1`. Default value: `V_1`. +* `destination` - (Required) Specifies the destination for the exported analytics data (documented below). + +The `destination` configuration supports the following: + +* `s3_bucket_destination` - (Required) Analytics data export currently only supports an S3 bucket destination (documented below). + +The `s3_bucket_destination` configuration supports the following: + +* `bucket_arn` - (Required) ARN of the destination bucket. +* `bucket_account_id` - (Optional) Account ID that owns the destination bucket. +* `format` - (Optional) Output format of exported analytics data. Allowed values: `CSV`. Default value: `CSV`. +* `prefix` - (Optional) Prefix to append to exported analytics data. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket analytics configurations using `bucket:analytics`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 bucket analytics configurations using `bucket:analytics`. For example: + +```console +% terraform import aws_s3_bucket_analytics_configuration.my-bucket-entire-bucket my-bucket:EntireBucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_cors_configuration.html.markdown b/website/docs/cdktf/python/r/s3_bucket_cors_configuration.html.markdown new file mode 100644 index 00000000000..6017b8cf0a9 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_cors_configuration.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_cors_configuration" +description: |- + Provides an S3 bucket CORS configuration resource. +--- + + + +# Resource: aws_s3_bucket_cors_configuration + +Provides an S3 bucket CORS configuration resource. For more information about CORS, go to [Enabling Cross-Origin Resource Sharing](https://docs.aws.amazon.com/AmazonS3/latest/userguide/cors.html) in the Amazon S3 User Guide. + +~> **NOTE:** S3 Buckets only support a single CORS configuration. Declaring multiple `aws_s3_bucket_cors_configuration` resources to the same S3 Bucket will cause a perpetual difference in configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_cors_configuration import S3BucketCorsConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="mybucket" + ) + aws_s3_bucket_cors_configuration_example = S3BucketCorsConfiguration(self, "example_1", + bucket=example.id, + cors_rule=[S3BucketCorsConfigurationCorsRule( + allowed_headers=["*"], + allowed_methods=["PUT", "POST"], + allowed_origins=["https://s3-website-test.hashicorp.com"], + expose_headers=["ETag"], + max_age_seconds=3000 + ), S3BucketCorsConfigurationCorsRule( + allowed_methods=["GET"], + allowed_origins=["*"] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_cors_configuration_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expected_bucket_owner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `cors_rule` - (Required) Set of origins and methods (cross-origin access that you want to allow). [See below](#cors_rule). You can configure up to 100 rules. + +### cors_rule + +The `cors_rule` configuration block supports the following arguments: + +* `allowed_headers` - (Optional) Set of Headers that are specified in the `Access-Control-Request-Headers` header. +* `allowed_methods` - (Required) Set of HTTP methods that you allow the origin to execute. Valid values are `GET`, `PUT`, `HEAD`, `POST`, and `DELETE`. +* `allowed_origins` - (Required) Set of origins you want customers to be able to access the bucket from. +* `expose_headers` - (Optional) Set of headers in the response that you want customers to be able to access from their applications (for example, from a JavaScript `XMLHttpRequest` object). +* `id` - (Optional) Unique identifier for the rule. The value cannot be longer than 255 characters. +* `max_age_seconds` - (Optional) Time in seconds that your browser is to cache the preflight response for the specified resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expected_bucket_owner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket CORS configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** S3 bucket CORS configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_cors_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_cors_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_intelligent_tiering_configuration.html.markdown b/website/docs/cdktf/python/r/s3_bucket_intelligent_tiering_configuration.html.markdown new file mode 100644 index 00000000000..b6a34912d04 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_intelligent_tiering_configuration.html.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_intelligent_tiering_configuration" +description: |- + Provides an S3 Intelligent-Tiering configuration resource. +--- + + + +# Resource: aws_s3_bucket_intelligent_tiering_configuration + +Provides an [S3 Intelligent-Tiering](https://docs.aws.amazon.com/AmazonS3/latest/userguide/intelligent-tiering.html) configuration resource. + +## Example Usage + +### Add intelligent tiering configuration for entire S3 bucket + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_intelligent_tiering_configuration import S3BucketIntelligentTieringConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + S3BucketIntelligentTieringConfiguration(self, "example-entire-bucket", + bucket=example.id, + name="EntireBucket", + tiering=[S3BucketIntelligentTieringConfigurationTiering( + access_tier="DEEP_ARCHIVE_ACCESS", + days=180 + ), S3BucketIntelligentTieringConfigurationTiering( + access_tier="ARCHIVE_ACCESS", + days=125 + ) + ] + ) +``` + +### Add intelligent tiering configuration with S3 object filter + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_intelligent_tiering_configuration import S3BucketIntelligentTieringConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + S3BucketIntelligentTieringConfiguration(self, "example-filtered", + bucket=example.id, + filter=S3BucketIntelligentTieringConfigurationFilter( + prefix="documents/", + tags={ + "class": "blue", + "priority": "high" + } + ), + name="ImportantBlueDocuments", + status="Disabled", + tiering=[S3BucketIntelligentTieringConfigurationTiering( + access_tier="ARCHIVE_ACCESS", + days=125 + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the bucket this intelligent tiering configuration is associated with. +* `name` - (Required) Unique name used to identify the S3 Intelligent-Tiering configuration for the bucket. +* `status` - (Optional) Specifies the status of the configuration. Valid values: `Enabled`, `Disabled`. +* `filter` - (Optional) Bucket filter. The configuration only includes objects that meet the filter's criteria (documented below). +* `tiering` - (Required) S3 Intelligent-Tiering storage class tiers of the configuration (documented below). + +The `filter` configuration supports the following: + +* `prefix` - (Optional) Object key name prefix that identifies the subset of objects to which the configuration applies. +* `tags` - (Optional) All of these tags must exist in the object's tag set in order for the configuration to apply. + +The `tiering` configuration supports the following: + +* `access_tier` - (Required) S3 Intelligent-Tiering access tier. Valid values: `ARCHIVE_ACCESS`, `DEEP_ARCHIVE_ACCESS`. +* `days` - (Required) Number of consecutive days of no access after which an object will be eligible to be transitioned to the corresponding tier. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket intelligent tiering configurations using `bucket:name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 bucket intelligent tiering configurations using `bucket:name`. For example: + +```console +% terraform import aws_s3_bucket_intelligent_tiering_configuration.my-bucket-entire-bucket my-bucket:EntireBucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_inventory.html.markdown b/website/docs/cdktf/python/r/s3_bucket_inventory.html.markdown new file mode 100644 index 00000000000..4e8542b2061 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_inventory.html.markdown @@ -0,0 +1,162 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_inventory" +description: |- + Provides a S3 bucket inventory configuration resource. +--- + + + +# Resource: aws_s3_bucket_inventory + +Provides a S3 bucket [inventory configuration](https://docs.aws.amazon.com/AmazonS3/latest/dev/storage-inventory.html) resource. + +## Example Usage + +### Add inventory configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_inventory import S3BucketInventory +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + inventory = S3Bucket(self, "inventory", + bucket="my-tf-inventory-bucket" + ) + test = S3Bucket(self, "test", + bucket="my-tf-test-bucket" + ) + aws_s3_bucket_inventory_test = S3BucketInventory(self, "test_2", + bucket=test.id, + destination=S3BucketInventoryDestination( + bucket=S3BucketInventoryDestinationBucket( + bucket_arn=inventory.arn, + format="ORC" + ) + ), + included_object_versions="All", + name="EntireBucketDaily", + schedule=S3BucketInventorySchedule( + frequency="Daily" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_inventory_test.override_logical_id("test") +``` + +### Add inventory configuration with S3 object prefix + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_inventory import S3BucketInventory +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + inventory = S3Bucket(self, "inventory", + bucket="my-tf-inventory-bucket" + ) + test = S3Bucket(self, "test", + bucket="my-tf-test-bucket" + ) + S3BucketInventory(self, "test-prefix", + bucket=test.id, + destination=S3BucketInventoryDestination( + bucket=S3BucketInventoryDestinationBucket( + bucket_arn=inventory.arn, + format="ORC", + prefix="inventory" + ) + ), + filter=S3BucketInventoryFilter( + prefix="documents/" + ), + included_object_versions="All", + name="DocumentsWeekly", + schedule=S3BucketInventorySchedule( + frequency="Daily" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the source bucket that inventory lists the objects for. +* `name` - (Required) Unique identifier of the inventory configuration for the bucket. +* `included_object_versions` - (Required) Object versions to include in the inventory list. Valid values: `All`, `Current`. +* `schedule` - (Required) Specifies the schedule for generating inventory results (documented below). +* `destination` - (Required) Contains information about where to publish the inventory results (documented below). +* `enabled` - (Optional, Default: `true`) Specifies whether the inventory is enabled or disabled. +* `filter` - (Optional) Specifies an inventory filter. The inventory only includes objects that meet the filter's criteria (documented below). +* `optional_fields` - (Optional) List of optional fields that are included in the inventory results. Please refer to the S3 [documentation](https://docs.aws.amazon.com/AmazonS3/latest/API/API_InventoryConfiguration.html#AmazonS3-Type-InventoryConfiguration-OptionalFields) for more details. + +The `filter` configuration supports the following: + +* `prefix` - (Optional) Prefix that an object must have to be included in the inventory results. + +The `schedule` configuration supports the following: + +* `frequency` - (Required) Specifies how frequently inventory results are produced. Valid values: `Daily`, `Weekly`. + +The `destination` configuration supports the following: + +* `bucket` - (Required) S3 bucket configuration where inventory results are published (documented below). + +The `bucket` configuration supports the following: + +* `bucket_arn` - (Required) Amazon S3 bucket ARN of the destination. +* `format` - (Required) Specifies the output format of the inventory results. Can be `CSV`, [`ORC`](https://orc.apache.org/) or [`Parquet`](https://parquet.apache.org/). +* `account_id` - (Optional) ID of the account that owns the destination bucket. Recommended to be set to prevent problems if the destination bucket ownership changes. +* `prefix` - (Optional) Prefix that is prepended to all inventory results. +* `encryption` - (Optional) Contains the type of server-side encryption to use to encrypt the inventory (documented below). + +The `encryption` configuration supports the following: + +* `sse_kms` - (Optional) Specifies to use server-side encryption with AWS KMS-managed keys to encrypt the inventory file (documented below). +* `sse_s3` - (Optional) Specifies to use server-side encryption with Amazon S3-managed keys (SSE-S3) to encrypt the inventory file. + +The `sse_kms` configuration supports the following: + +* `key_id` - (Required) ARN of the KMS customer master key (CMK) used to encrypt the inventory file. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket inventory configurations using `bucket:inventory`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 bucket inventory configurations using `bucket:inventory`. For example: + +```console +% terraform import aws_s3_bucket_inventory.my-bucket-entire-bucket my-bucket:EntireBucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_lifecycle_configuration.html.markdown b/website/docs/cdktf/python/r/s3_bucket_lifecycle_configuration.html.markdown new file mode 100644 index 00000000000..52aa4c85885 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_lifecycle_configuration.html.markdown @@ -0,0 +1,549 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_lifecycle_configuration" +description: |- + Provides a S3 bucket lifecycle configuration resource. +--- + + + +# Resource: aws_s3_bucket_lifecycle_configuration + +Provides an independent configuration resource for S3 bucket [lifecycle configuration](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lifecycle-mgmt.html). + +An S3 Lifecycle configuration consists of one or more Lifecycle rules. Each rule consists of the following: + +* Rule metadata (`id` and `status`) +* [Filter](#filter) identifying objects to which the rule applies +* One or more transition or expiration actions + +For more information see the Amazon S3 User Guide on [`Lifecycle Configuration Elements`](https://docs.aws.amazon.com/AmazonS3/latest/userguide/intro-lifecycle-rules.html). + +~> **NOTE:** S3 Buckets only support a single lifecycle configuration. Declaring multiple `aws_s3_bucket_lifecycle_configuration` resources to the same S3 Bucket will cause a perpetual difference in configuration. + +## Example Usage + +### With neither a filter nor prefix specified + +The Lifecycle rule applies to a subset of objects based on the key name prefix (`""`). + +This configuration is intended to replicate the default behavior of the `lifecycle_rule` +parameter in the Terraform AWS Provider `aws_s3_bucket` resource prior to `v4.0`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_lifecycle_configuration import S3BucketLifecycleConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketLifecycleConfiguration(self, "example", + bucket=bucket.id, + rule=[S3BucketLifecycleConfigurationRule( + id="rule-1", + status="Enabled" + ) + ] + ) +``` + +### Specifying an empty filter + +The Lifecycle rule applies to all objects in the bucket. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_lifecycle_configuration import S3BucketLifecycleConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketLifecycleConfiguration(self, "example", + bucket=bucket.id, + rule=[S3BucketLifecycleConfigurationRule( + filter=S3BucketLifecycleConfigurationRuleFilter(), + id="rule-1", + status="Enabled" + ) + ] + ) +``` + +### Specifying a filter using key prefixes + +The Lifecycle rule applies to a subset of objects based on the key name prefix (`logs/`). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_lifecycle_configuration import S3BucketLifecycleConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketLifecycleConfiguration(self, "example", + bucket=bucket.id, + rule=[S3BucketLifecycleConfigurationRule( + filter=S3BucketLifecycleConfigurationRuleFilter( + prefix="logs/" + ), + id="rule-1", + status="Enabled" + ) + ] + ) +``` + +If you want to apply a Lifecycle action to a subset of objects based on different key name prefixes, specify separate rules. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_lifecycle_configuration import S3BucketLifecycleConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketLifecycleConfiguration(self, "example", + bucket=bucket.id, + rule=[S3BucketLifecycleConfigurationRule( + filter=S3BucketLifecycleConfigurationRuleFilter( + prefix="logs/" + ), + id="rule-1", + status="Enabled" + ), S3BucketLifecycleConfigurationRule( + filter=S3BucketLifecycleConfigurationRuleFilter( + prefix="tmp/" + ), + id="rule-2", + status="Enabled" + ) + ] + ) +``` + +### Specifying a filter based on an object tag + +The Lifecycle rule specifies a filter based on a tag key and value. The rule then applies only to a subset of objects with the specific tag. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_lifecycle_configuration import S3BucketLifecycleConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketLifecycleConfiguration(self, "example", + bucket=bucket.id, + rule=[S3BucketLifecycleConfigurationRule( + filter=S3BucketLifecycleConfigurationRuleFilter( + tag=S3BucketLifecycleConfigurationRuleFilterTag( + key="Name", + value="Staging" + ) + ), + id="rule-1", + status="Enabled" + ) + ] + ) +``` + +### Specifying a filter based on multiple tags + +The Lifecycle rule directs Amazon S3 to perform lifecycle actions on objects with two tags (with the specific tag keys and values). Notice `tags` is wrapped in the `and` configuration block. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_lifecycle_configuration import S3BucketLifecycleConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketLifecycleConfiguration(self, "example", + bucket=bucket.id, + rule=[S3BucketLifecycleConfigurationRule( + filter=S3BucketLifecycleConfigurationRuleFilter( + and=S3BucketLifecycleConfigurationRuleFilterAnd( + tags={ + "key1": "Value1", + "key2": "Value2" + } + ) + ), + id="rule-1", + status="Enabled" + ) + ] + ) +``` + +### Specifying a filter based on both prefix and one or more tags + +The Lifecycle rule directs Amazon S3 to perform lifecycle actions on objects with the specified prefix and two tags (with the specific tag keys and values). Notice both `prefix` and `tags` are wrapped in the `and` configuration block. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_lifecycle_configuration import S3BucketLifecycleConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketLifecycleConfiguration(self, "example", + bucket=bucket.id, + rule=[S3BucketLifecycleConfigurationRule( + filter=S3BucketLifecycleConfigurationRuleFilter( + and=S3BucketLifecycleConfigurationRuleFilterAnd( + prefix="logs/", + tags={ + "key1": "Value1", + "key2": "Value2" + } + ) + ), + id="rule-1", + status="Enabled" + ) + ] + ) +``` + +### Specifying a filter based on object size + +Object size values are in bytes. Maximum filter size is 5TB. Some storage classes have minimum object size limitations, for more information, see [Comparing the Amazon S3 storage classes](https://docs.aws.amazon.com/AmazonS3/latest/userguide/storage-class-intro.html#sc-compare). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_lifecycle_configuration import S3BucketLifecycleConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketLifecycleConfiguration(self, "example", + bucket=bucket.id, + rule=[S3BucketLifecycleConfigurationRule( + filter=S3BucketLifecycleConfigurationRuleFilter( + object_size_greater_than=Token.as_string(500) + ), + id="rule-1", + status="Enabled" + ) + ] + ) +``` + +### Specifying a filter based on object size range and prefix + +The `object_size_greater_than` must be less than the `object_size_less_than`. Notice both the object size range and prefix are wrapped in the `and` configuration block. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_lifecycle_configuration import S3BucketLifecycleConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketLifecycleConfiguration(self, "example", + bucket=bucket.id, + rule=[S3BucketLifecycleConfigurationRule( + filter=S3BucketLifecycleConfigurationRuleFilter( + and=S3BucketLifecycleConfigurationRuleFilterAnd( + object_size_greater_than=500, + object_size_less_than=64000, + prefix="logs/" + ) + ), + id="rule-1", + status="Enabled" + ) + ] + ) +``` + +### Creating a Lifecycle Configuration for a bucket with versioning + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_lifecycle_configuration import S3BucketLifecycleConfiguration +from imports.aws.s3_bucket_versioning import S3BucketVersioningA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bucket = S3Bucket(self, "bucket", + bucket="my-bucket" + ) + versioning_bucket = S3Bucket(self, "versioning_bucket", + bucket="my-versioning-bucket" + ) + S3BucketAcl(self, "bucket_acl", + acl="private", + bucket=bucket.id + ) + S3BucketAcl(self, "versioning_bucket_acl", + acl="private", + bucket=versioning_bucket.id + ) + S3BucketLifecycleConfiguration(self, "bucket-config", + bucket=bucket.id, + rule=[S3BucketLifecycleConfigurationRule( + expiration=S3BucketLifecycleConfigurationRuleExpiration( + days=90 + ), + filter=S3BucketLifecycleConfigurationRuleFilter( + and=S3BucketLifecycleConfigurationRuleFilterAnd( + prefix="log/", + tags={ + "autoclean": "true", + "rule": "log" + } + ) + ), + id="log", + status="Enabled", + transition=[S3BucketLifecycleConfigurationRuleTransition( + days=30, + storage_class="STANDARD_IA" + ), S3BucketLifecycleConfigurationRuleTransition( + days=60, + storage_class="GLACIER" + ) + ] + ), S3BucketLifecycleConfigurationRule( + expiration=S3BucketLifecycleConfigurationRuleExpiration( + date="2023-01-13T00:00:00Z" + ), + filter=S3BucketLifecycleConfigurationRuleFilter( + prefix="tmp/" + ), + id="tmp", + status="Enabled" + ) + ] + ) + versioning = S3BucketVersioningA(self, "versioning", + bucket=versioning_bucket.id, + versioning_configuration=S3BucketVersioningVersioningConfiguration( + status="Enabled" + ) + ) + S3BucketLifecycleConfiguration(self, "versioning-bucket-config", + bucket=versioning_bucket.id, + depends_on=[versioning], + rule=[S3BucketLifecycleConfigurationRule( + filter=S3BucketLifecycleConfigurationRuleFilter( + prefix="config/" + ), + id="config", + noncurrent_version_expiration=S3BucketLifecycleConfigurationRuleNoncurrentVersionExpiration( + noncurrent_days=90 + ), + noncurrent_version_transition=[S3BucketLifecycleConfigurationRuleNoncurrentVersionTransition( + noncurrent_days=30, + storage_class="STANDARD_IA" + ), S3BucketLifecycleConfigurationRuleNoncurrentVersionTransition( + noncurrent_days=60, + storage_class="GLACIER" + ) + ], + status="Enabled" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the source S3 bucket you want Amazon S3 to monitor. +* `expected_bucket_owner` - (Optional) Account ID of the expected bucket owner. If the bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error. +* `rule` - (Required) List of configuration blocks describing the rules managing the replication. [See below](#rule). + +### rule + +~> **NOTE:** The `filter` argument, while Optional, is required if the `rule` configuration block does not contain a `prefix` **and** you intend to override the default behavior of setting the rule to filter objects with the empty string prefix (`""`). +Since `prefix` is deprecated by Amazon S3 and will be removed in the next major version of the Terraform AWS Provider, we recommend users either specify `filter` or leave both `filter` and `prefix` unspecified. + +~> **NOTE:** A rule cannot be updated from having a filter (via either the `rule.filter` parameter or when neither `rule.filter` and `rule.prefix` are specified) to only having a prefix via the `rule.prefix` parameter. + +~> **NOTE** Terraform cannot distinguish a difference between configurations that use `rule.filter {}` and configurations that neither use `rule.filter` nor `rule.prefix`, so a rule cannot be updated from applying to all objects in the bucket via `rule.filter {}` to applying to a subset of objects based on the key prefix `""` and vice versa. + +The `rule` configuration block supports the following arguments: + +* `abort_incomplete_multipart_upload` - (Optional) Configuration block that specifies the days since the initiation of an incomplete multipart upload that Amazon S3 will wait before permanently removing all parts of the upload. [See below](#abort_incomplete_multipart_upload). +* `expiration` - (Optional) Configuration block that specifies the expiration for the lifecycle of the object in the form of date, days and, whether the object has a delete marker. [See below](#expiration). +* `filter` - (Optional) Configuration block used to identify objects that a Lifecycle Rule applies to. [See below](#filter). If not specified, the `rule` will default to using `prefix`. +* `id` - (Required) Unique identifier for the rule. The value cannot be longer than 255 characters. +* `noncurrent_version_expiration` - (Optional) Configuration block that specifies when noncurrent object versions expire. [See below](#noncurrent_version_expiration). +* `noncurrent_version_transition` - (Optional) Set of configuration blocks that specify the transition rule for the lifecycle rule that describes when noncurrent objects transition to a specific storage class. [See below](#noncurrent_version_transition). +* `prefix` - (Optional) **DEPRECATED** Use `filter` instead. This has been deprecated by Amazon S3. Prefix identifying one or more objects to which the rule applies. Defaults to an empty string (`""`) if `filter` is not specified. +* `status` - (Required) Whether the rule is currently being applied. Valid values: `Enabled` or `Disabled`. +* `transition` - (Optional) Set of configuration blocks that specify when an Amazon S3 object transitions to a specified storage class. [See below](#transition). + +### abort_incomplete_multipart_upload + +The `abort_incomplete_multipart_upload` configuration block supports the following arguments: + +* `days_after_initiation` - Number of days after which Amazon S3 aborts an incomplete multipart upload. + +### expiration + +The `expiration` configuration block supports the following arguments: + +* `date` - (Optional) Date the object is to be moved or deleted. Should be in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `days` - (Optional) Lifetime, in days, of the objects that are subject to the rule. The value must be a non-zero positive integer. +* `expired_object_delete_marker` - (Optional, Conflicts with `date` and `days`) Indicates whether Amazon S3 will remove a delete marker with no noncurrent versions. If set to `true`, the delete marker will be expired; if set to `false` the policy takes no action. + +### filter + +~> **NOTE:** The `filter` configuration block must either be specified as the empty configuration block (`filter {}`) or with exactly one of `prefix`, `tag`, `and`, `object_size_greater_than` or `object_size_less_than` specified. + +The `filter` configuration block supports the following arguments: + +* `and`- (Optional) Configuration block used to apply a logical `AND` to two or more predicates. [See below](#and). The Lifecycle Rule will apply to any object matching all the predicates configured inside the `and` block. +* `object_size_greater_than` - (Optional) Minimum object size (in bytes) to which the rule applies. +* `object_size_less_than` - (Optional) Maximum object size (in bytes) to which the rule applies. +* `prefix` - (Optional) Prefix identifying one or more objects to which the rule applies. Defaults to an empty string (`""`) if not specified. +* `tag` - (Optional) Configuration block for specifying a tag key and value. [See below](#tag). + +### noncurrent_version_expiration + +The `noncurrent_version_expiration` configuration block supports the following arguments: + +* `newer_noncurrent_versions` - (Optional) Number of noncurrent versions Amazon S3 will retain. Must be a non-zero positive integer. +* `noncurrent_days` - (Optional) Number of days an object is noncurrent before Amazon S3 can perform the associated action. Must be a positive integer. + +### noncurrent_version_transition + +The `noncurrent_version_transition` configuration block supports the following arguments: + +* `newer_noncurrent_versions` - (Optional) Number of noncurrent versions Amazon S3 will retain. Must be a non-zero positive integer. +* `noncurrent_days` - (Optional) Number of days an object is noncurrent before Amazon S3 can perform the associated action. +* `storage_class` - (Required) Class of storage used to store the object. Valid Values: `GLACIER`, `STANDARD_IA`, `ONEZONE_IA`, `INTELLIGENT_TIERING`, `DEEP_ARCHIVE`, `GLACIER_IR`. + +### transition + +The `transition` configuration block supports the following arguments: + +~> **Note:** Only one of `date` or `days` should be specified. If neither are specified, the `transition` will default to 0 `days`. + +* `date` - (Optional, Conflicts with `days`) Date objects are transitioned to the specified storage class. The date value must be in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) and set to midnight UTC e.g. `2023-01-13T00:00:00Z`. +* `days` - (Optional, Conflicts with `date`) Number of days after creation when objects are transitioned to the specified storage class. The value must be a positive integer. If both `days` and `date` are not specified, defaults to `0`. Valid values depend on `storage_class`, see [Transition objects using Amazon S3 Lifecycle](https://docs.aws.amazon.com/AmazonS3/latest/userguide/lifecycle-transition-general-considerations.html) for more details. +* `storage_class` - Class of storage used to store the object. Valid Values: `GLACIER`, `STANDARD_IA`, `ONEZONE_IA`, `INTELLIGENT_TIERING`, `DEEP_ARCHIVE`, `GLACIER_IR`. + +### and + +The `and` configuration block supports the following arguments: + +* `object_size_greater_than` - (Optional) Minimum object size to which the rule applies. Value must be at least `0` if specified. +* `object_size_less_than` - (Optional) Maximum object size to which the rule applies. Value must be at least `1` if specified. +* `prefix` - (Optional) Prefix identifying one or more objects to which the rule applies. +* `tags` - (Optional) Key-value map of resource tags. All of these tags must exist in the object's tag set in order for the rule to apply. + +### tag + +The `tag` configuration block supports the following arguments: + +* `key` - (Required) Name of the object key. +* `value` - (Required) Value of the tag. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expected_bucket_owner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket lifecycle configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** S3 bucket lifecycle configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_lifecycle_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_lifecycle_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_logging.html.markdown b/website/docs/cdktf/python/r/s3_bucket_logging.html.markdown new file mode 100644 index 00000000000..c721ea4103f --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_logging.html.markdown @@ -0,0 +1,132 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_logging" +description: |- + Provides an S3 bucket (server access) logging resource. +--- + + + +# Resource: aws_s3_bucket_logging + +Provides an S3 bucket (server access) logging resource. For more information, see [Logging requests using server access logging](https://docs.aws.amazon.com/AmazonS3/latest/userguide/ServerLogs.html) +in the AWS S3 User Guide. + +~> **Note:** Amazon S3 supports server access logging, AWS CloudTrail, or a combination of both. Refer to the [Logging options for Amazon S3](https://docs.aws.amazon.com/AmazonS3/latest/userguide/logging-with-S3.html) +to decide which method meets your requirements. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_logging import S3BucketLoggingA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="my-tf-example-bucket" + ) + log_bucket = S3Bucket(self, "log_bucket", + bucket="my-tf-log-bucket" + ) + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_2", + acl="private", + bucket=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") + S3BucketAcl(self, "log_bucket_acl", + acl="log-delivery-write", + bucket=log_bucket.id + ) + aws_s3_bucket_logging_example = S3BucketLoggingA(self, "example_4", + bucket=example.id, + target_bucket=log_bucket.id, + target_prefix="log/" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_logging_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expected_bucket_owner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `target_bucket` - (Required) Name of the bucket where you want Amazon S3 to store server access logs. +* `target_prefix` - (Required) Prefix for all log object keys. +* `target_grant` - (Optional) Set of configuration blocks with information for granting permissions. [See below](#target_grant). + +### target_grant + +The `target_grant` configuration block supports the following arguments: + +* `grantee` - (Required) Configuration block for the person being granted permissions. [See below](#grantee). +* `permission` - (Required) Logging permissions assigned to the grantee for the bucket. Valid values: `FULL_CONTROL`, `READ`, `WRITE`. + +### grantee + +The `grantee` configuration block supports the following arguments: + +* `email_address` - (Optional) Email address of the grantee. See [Regions and Endpoints](https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region) for supported AWS regions where this argument can be specified. +* `id` - (Optional) Canonical user ID of the grantee. +* `type` - (Required) Type of grantee. Valid values: `CanonicalUser`, `AmazonCustomerByEmail`, `Group`. +* `uri` - (Optional) URI of the grantee group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expected_bucket_owner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket logging using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** S3 bucket logging using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_logging.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_logging.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_metric.html.markdown b/website/docs/cdktf/python/r/s3_bucket_metric.html.markdown new file mode 100644 index 00000000000..fb23bcc6ed8 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_metric.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_metric" +description: |- + Provides a S3 bucket metrics configuration resource. +--- + + + +# Resource: aws_s3_bucket_metric + +Provides a S3 bucket [metrics configuration](http://docs.aws.amazon.com/AmazonS3/latest/dev/metrics-configurations.html) resource. + +## Example Usage + +### Add metrics configuration for entire S3 bucket + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_metric import S3BucketMetric +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + S3BucketMetric(self, "example-entire-bucket", + bucket=example.id, + name="EntireBucket" + ) +``` + +### Add metrics configuration with S3 object filter + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_metric import S3BucketMetric +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + S3BucketMetric(self, "example-filtered", + bucket=example.id, + filter=S3BucketMetricFilter( + prefix="documents/", + tags={ + "class": "blue", + "priority": "high" + } + ), + name="ImportantBlueDocuments" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the bucket to put metric configuration. +* `name` - (Required) Unique identifier of the metrics configuration for the bucket. Must be less than or equal to 64 characters in length. +* `filter` - (Optional) [Object filtering](http://docs.aws.amazon.com/AmazonS3/latest/dev/metrics-configurations.html#metrics-configurations-filter) that accepts a prefix, tags, or a logical AND of prefix and tags (documented below). + +The `filter` metric configuration supports the following: + +~> **NOTE:** At least one of `prefix` or `tags` is required when specifying a `filter` + +* `prefix` - (Optional) Object prefix for filtering (singular). +* `tags` - (Optional) Object tags for filtering (up to 10). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket metric configurations using `bucket:metric`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 bucket metric configurations using `bucket:metric`. For example: + +```console +% terraform import aws_s3_bucket_metric.my-bucket-entire-bucket my-bucket:EntireBucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_notification.html.markdown b/website/docs/cdktf/python/r/s3_bucket_notification.html.markdown new file mode 100644 index 00000000000..b2baf9058e3 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_notification.html.markdown @@ -0,0 +1,444 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_notification" +description: |- + Manages a S3 Bucket Notification Configuration +--- + + + +# Resource: aws_s3_bucket_notification + +Manages a S3 Bucket Notification Configuration. For additional information, see the [Configuring S3 Event Notifications section in the Amazon S3 Developer Guide](https://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html). + +~> **NOTE:** S3 Buckets only support a single notification configuration. Declaring multiple `aws_s3_bucket_notification` resources to the same S3 Bucket will cause a perpetual difference in configuration. See the example "Trigger multiple Lambda functions" for an option. + +## Example Usage + +### Add notification configuration to SNS Topic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_notification import S3BucketNotification +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bucket = S3Bucket(self, "bucket", + bucket="your-bucket-name" + ) + topic = DataAwsIamPolicyDocument(self, "topic", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["SNS:Publish"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="ArnLike", + values=[bucket.arn], + variable="aws:SourceArn" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["s3.amazonaws.com"], + type="Service" + ) + ], + resources=["arn:aws:sns:*:*:s3-event-notification-topic"] + ) + ] + ) + aws_sns_topic_topic = SnsTopic(self, "topic_2", + name="s3-event-notification-topic", + policy=Token.as_string(topic.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_topic.override_logical_id("topic") + S3BucketNotification(self, "bucket_notification", + bucket=bucket.id, + topic=[S3BucketNotificationTopic( + events=["s3:ObjectCreated:*"], + filter_suffix=".log", + topic_arn=Token.as_string(aws_sns_topic_topic.arn) + ) + ] + ) +``` + +### Add notification configuration to SQS Queue + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_notification import S3BucketNotification +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bucket = S3Bucket(self, "bucket", + bucket="your-bucket-name" + ) + queue = DataAwsIamPolicyDocument(self, "queue", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sqs:SendMessage"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="ArnEquals", + values=[bucket.arn], + variable="aws:SourceArn" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="*" + ) + ], + resources=["arn:aws:sqs:*:*:s3-event-notification-queue"] + ) + ] + ) + aws_sqs_queue_queue = SqsQueue(self, "queue_2", + name="s3-event-notification-queue", + policy=Token.as_string(queue.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sqs_queue_queue.override_logical_id("queue") + S3BucketNotification(self, "bucket_notification", + bucket=bucket.id, + queue=[S3BucketNotificationQueue( + events=["s3:ObjectCreated:*"], + filter_suffix=".log", + queue_arn=Token.as_string(aws_sqs_queue_queue.arn) + ) + ] + ) +``` + +### Add notification configuration to Lambda Function + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.lambda_function import LambdaFunction +from imports.aws.lambda_permission import LambdaPermission +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_notification import S3BucketNotification +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bucket = S3Bucket(self, "bucket", + bucket="your-bucket-name" + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["lambda.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + iam_for_lambda = IamRole(self, "iam_for_lambda", + assume_role_policy=Token.as_string(assume_role.json), + name="iam_for_lambda" + ) + func = LambdaFunction(self, "func", + filename="your-function.zip", + function_name="example_lambda_name", + handler="exports.example", + role=iam_for_lambda.arn, + runtime="go1.x" + ) + allow_bucket = LambdaPermission(self, "allow_bucket", + action="lambda:InvokeFunction", + function_name=func.arn, + principal="s3.amazonaws.com", + source_arn=bucket.arn, + statement_id="AllowExecutionFromS3Bucket" + ) + S3BucketNotification(self, "bucket_notification", + bucket=bucket.id, + depends_on=[allow_bucket], + lambda_function=[S3BucketNotificationLambdaFunction( + events=["s3:ObjectCreated:*"], + filter_prefix="AWSLogs/", + filter_suffix=".log", + lambda_function_arn=func.arn + ) + ] + ) +``` + +### Trigger multiple Lambda functions + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.lambda_function import LambdaFunction +from imports.aws.lambda_permission import LambdaPermission +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_notification import S3BucketNotification +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bucket = S3Bucket(self, "bucket", + bucket="your-bucket-name" + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + actions=["sts:AssumeRole"], + effect="Allow", + principals=[{ + "identifiers": ["lambda.amazonaws.com"], + "type": "Service" + } + ] + ) + iam_for_lambda = IamRole(self, "iam_for_lambda", + assume_role_policy=Token.as_string(assume_role.json), + name="iam_for_lambda" + ) + func1 = LambdaFunction(self, "func1", + filename="your-function1.zip", + function_name="example_lambda_name1", + handler="exports.example", + role=iam_for_lambda.arn, + runtime="go1.x" + ) + func2 = LambdaFunction(self, "func2", + filename="your-function2.zip", + function_name="example_lambda_name2", + handler="exports.example", + role=iam_for_lambda.arn + ) + allow_bucket1 = LambdaPermission(self, "allow_bucket1", + action="lambda:InvokeFunction", + function_name=func1.arn, + principal="s3.amazonaws.com", + source_arn=bucket.arn, + statement_id="AllowExecutionFromS3Bucket1" + ) + allow_bucket2 = LambdaPermission(self, "allow_bucket2", + action="lambda:InvokeFunction", + function_name=func2.arn, + principal="s3.amazonaws.com", + source_arn=bucket.arn, + statement_id="AllowExecutionFromS3Bucket2" + ) + S3BucketNotification(self, "bucket_notification", + bucket=bucket.id, + depends_on=[allow_bucket1, allow_bucket2], + lambda_function=[S3BucketNotificationLambdaFunction( + events=["s3:ObjectCreated:*"], + filter_prefix="AWSLogs/", + filter_suffix=".log", + lambda_function_arn=func1.arn + ), S3BucketNotificationLambdaFunction( + events=["s3:ObjectCreated:*"], + filter_prefix="OtherLogs/", + filter_suffix=".log", + lambda_function_arn=func2.arn + ) + ] + ) +``` + +### Add multiple notification configurations to SQS Queue + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_notification import S3BucketNotification +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bucket = S3Bucket(self, "bucket", + bucket="your-bucket-name" + ) + queue = DataAwsIamPolicyDocument(self, "queue", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sqs:SendMessage"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="ArnEquals", + values=[bucket.arn], + variable="aws:SourceArn" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="*" + ) + ], + resources=["arn:aws:sqs:*:*:s3-event-notification-queue"] + ) + ] + ) + aws_sqs_queue_queue = SqsQueue(self, "queue_2", + name="s3-event-notification-queue", + policy=Token.as_string(queue.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sqs_queue_queue.override_logical_id("queue") + S3BucketNotification(self, "bucket_notification", + bucket=bucket.id, + queue=[S3BucketNotificationQueue( + events=["s3:ObjectCreated:*"], + filter_prefix="images/", + id="image-upload-event", + queue_arn=Token.as_string(aws_sqs_queue_queue.arn) + ), S3BucketNotificationQueue( + events=["s3:ObjectCreated:*"], + filter_prefix="videos/", + id="video-upload-event", + queue_arn=Token.as_string(aws_sqs_queue_queue.arn) + ) + ] + ) +``` + +For Terraform's [JSON syntax](https://www.terraform.io/docs/configuration/syntax.html), use an array instead of defining the `queue` key twice. + +```json +{ + "bucket": "${aws_s3_bucket.bucket.id}", + "queue": [ + { + "id": "image-upload-event", + "queue_arn": "${aws_sqs_queue.queue.arn}", + "events": ["s3:ObjectCreated:*"], + "filter_prefix": "images/" + }, + { + "id": "video-upload-event", + "queue_arn": "${aws_sqs_queue.queue.arn}", + "events": ["s3:ObjectCreated:*"], + "filter_prefix": "videos/" + } + ] +} +``` + +### Emit events to EventBridge + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_notification import S3BucketNotification +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + bucket = S3Bucket(self, "bucket", + bucket="your-bucket-name" + ) + S3BucketNotification(self, "bucket_notification", + bucket=bucket.id, + eventbridge=True + ) +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket for notification configuration. + +The following arguments are optional: + +* `eventbridge` - (Optional) Whether to enable Amazon EventBridge notifications. Defaults to `false`. +* `lambda_function` - (Optional, Multiple) Used to configure notifications to a Lambda Function. See below. +* `queue` - (Optional) Notification configuration to SQS Queue. See below. +* `topic` - (Optional) Notification configuration to SNS Topic. See below. + +### `lambda_function` + +* `events` - (Required) [Event](http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html#notification-how-to-event-types-and-destinations) for which to send notifications. +* `filter_prefix` - (Optional) Object key name prefix. +* `filter_suffix` - (Optional) Object key name suffix. +* `id` - (Optional) Unique identifier for each of the notification configurations. +* `lambda_function_arn` - (Required) Lambda function ARN. + +### `queue` + +* `events` - (Required) Specifies [event](http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html#notification-how-to-event-types-and-destinations) for which to send notifications. +* `filter_prefix` - (Optional) Object key name prefix. +* `filter_suffix` - (Optional) Object key name suffix. +* `id` - (Optional) Unique identifier for each of the notification configurations. +* `queue_arn` - (Required) SQS queue ARN. + +### `topic` + +* `events` - (Required) [Event](http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html#notification-how-to-event-types-and-destinations) for which to send notifications. +* `filter_prefix` - (Optional) Object key name prefix. +* `filter_suffix` - (Optional) Object key name suffix. +* `id` - (Optional) Unique identifier for each of the notification configurations. +* `topic_arn` - (Required) SNS topic ARN. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket notification using the `bucket`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 bucket notification using the `bucket`. For example: + +```console +% terraform import aws_s3_bucket_notification.bucket_notification bucket-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_object.html.markdown b/website/docs/cdktf/python/r/s3_bucket_object.html.markdown new file mode 100644 index 00000000000..b610b6df8b8 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_object.html.markdown @@ -0,0 +1,279 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_object" +description: |- + Provides an S3 object resource. +--- + + + +# Resource: aws_s3_bucket_object + +~> **NOTE:** The `aws_s3_bucket_object` resource is DEPRECATED and will be removed in a future version! Use `aws_s3_object` instead, where new features and fixes will be added. When replacing `aws_s3_bucket_object` with `aws_s3_object` in your configuration, on the next apply, Terraform will recreate the object. If you prefer to not have Terraform recreate the object, import the object using `aws_s3_object`. + +Provides an S3 object resource. + +## Example Usage + +### Uploading a file to a bucket + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_object import S3BucketObject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketObject(self, "object", + bucket="your_bucket_name", + etag=Token.as_string(Fn.filemd5("path/to/file")), + key="new_object_key", + source="path/to/file" + ) +``` + +### Encrypting with KMS Key + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_key import KmsKey +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_object import S3BucketObject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + examplekms = KmsKey(self, "examplekms", + deletion_window_in_days=7, + description="KMS key 1" + ) + examplebucket = S3Bucket(self, "examplebucket", + bucket="examplebuckettftest" + ) + S3BucketAcl(self, "example", + acl="private", + bucket=examplebucket.id + ) + aws_s3_bucket_object_example = S3BucketObject(self, "example_3", + bucket=examplebucket.id, + key="someobject", + kms_key_id=examplekms.arn, + source="index.html" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_object_example.override_logical_id("example") +``` + +### Server Side Encryption with S3 Default Master Key + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_object import S3BucketObject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + examplebucket = S3Bucket(self, "examplebucket", + bucket="examplebuckettftest" + ) + S3BucketAcl(self, "example", + acl="private", + bucket=examplebucket.id + ) + aws_s3_bucket_object_example = S3BucketObject(self, "example_2", + bucket=examplebucket.id, + key="someobject", + server_side_encryption="aws:kms", + source="index.html" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_object_example.override_logical_id("example") +``` + +### Server Side Encryption with AWS-Managed Key + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_object import S3BucketObject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + examplebucket = S3Bucket(self, "examplebucket", + bucket="examplebuckettftest" + ) + S3BucketAcl(self, "example", + acl="private", + bucket=examplebucket.id + ) + aws_s3_bucket_object_example = S3BucketObject(self, "example_2", + bucket=examplebucket.id, + key="someobject", + server_side_encryption="AES256", + source="index.html" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_object_example.override_logical_id("example") +``` + +### S3 Object Lock + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_object import S3BucketObject +from imports.aws.s3_bucket_versioning import S3BucketVersioningA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + examplebucket = S3Bucket(self, "examplebucket", + bucket="examplebuckettftest", + object_lock_enabled=True + ) + S3BucketAcl(self, "example", + acl="private", + bucket=examplebucket.id + ) + aws_s3_bucket_versioning_example = S3BucketVersioningA(self, "example_2", + bucket=examplebucket.id, + versioning_configuration=S3BucketVersioningVersioningConfiguration( + status="Enabled" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_versioning_example.override_logical_id("example") + aws_s3_bucket_object_example = S3BucketObject(self, "example_3", + bucket=examplebucket.id, + depends_on=[aws_s3_bucket_versioning_example], + force_destroy=True, + key="someobject", + object_lock_legal_hold_status="ON", + object_lock_mode="GOVERNANCE", + object_lock_retain_until_date="2021-12-31T23:59:60Z", + source="important.txt" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_object_example.override_logical_id("example") +``` + +## Argument Reference + +-> **Note:** If you specify `content_encoding` you are responsible for encoding the body appropriately. `source`, `content`, and `content_base64` all expect already encoded/compressed bytes. + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket to put the file in. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified. +* `key` - (Required) Name of the object once it is in the bucket. + +The following arguments are optional: + +* `acl` - (Optional) [Canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Valid values are `private`, `public-read`, `public-read-write`, `aws-exec-read`, `authenticated-read`, `bucket-owner-read`, and `bucket-owner-full-control`. Defaults to `private`. +* `bucket_key_enabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. +* `cache_control` - (Optional) Caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for further details. +* `content_base64` - (Optional, conflicts with `source` and `content`) Base64-encoded data that will be decoded and uploaded as raw bytes for the object content. This allows safely uploading non-UTF8 binary data, but is recommended only for small content such as the result of the `gzipbase64` function with small text strings. For larger objects, use `source` to stream the content from a disk file. +* `content_disposition` - (Optional) Presentational information for the object. Read [w3c content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information. +* `content_encoding` - (Optional) Content encodings that have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. +* `content_language` - (Optional) Language the content is in e.g., en-US or en-GB. +* `content_type` - (Optional) Standard MIME type describing the format of the object data, e.g., application/octet-stream. All Valid MIME Types are valid for this input. +* `content` - (Optional, conflicts with `source` and `content_base64`) Literal string value to use as the object content, which will be uploaded as UTF-8-encoded text. +* `etag` - (Optional) Triggers updates when the value changes. The only meaningful value is `filemd5("path/to/file")` (Terraform 0.11.12 or later) or `${md5(file("path/to/file"))}` (Terraform 0.11.11 or earlier). This attribute is not compatible with KMS encryption, `kms_key_id` or `server_side_encryption = "aws:kms"` (see `source_hash` instead). +* `force_destroy` - (Optional) Whether to allow the object to be deleted by removing any legal hold on any object version. Default is `false`. This value should be set to `true` only if the bucket has S3 object lock enabled. +* `kms_key_id` - (Optional) ARN of the KMS Key to use for object encryption. If the S3 Bucket has server-side encryption enabled, that value will automatically be used. If referencing the `aws_kms_key` resource, use the `arn` attribute. If referencing the `aws_kms_alias` data source or resource, use the `target_key_arn` attribute. Terraform will only perform drift detection if a configuration value is provided. +* `metadata` - (Optional) Map of keys/values to provision metadata (will be automatically prefixed by `x-amz-meta-`, note that only lowercase label are currently supported by the AWS Go API). +* `object_lock_legal_hold_status` - (Optional) [Legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds) status that you want to apply to the specified object. Valid values are `ON` and `OFF`. +* `object_lock_mode` - (Optional) Object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) that you want to apply to this object. Valid values are `GOVERNANCE` and `COMPLIANCE`. +* `object_lock_retain_until_date` - (Optional) Date and time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), when this object's object lock will [expire](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-periods). +* `server_side_encryption` - (Optional) Server-side encryption of the object in S3. Valid values are "`AES256`" and "`aws:kms`". +* `source_hash` - (Optional) Triggers updates like `etag` but useful to address `etag` encryption limitations. Set using `filemd5("path/to/source")` (Terraform 0.11.12 or later). (The value is only stored in state and not saved by AWS.) +* `source` - (Optional, conflicts with `content` and `content_base64`) Path to a file that will be read and uploaded as raw bytes for the object content. +* `storage_class` - (Optional) [Storage Class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html#AmazonS3-PutObject-request-header-StorageClass) for the object. Defaults to "`STANDARD`". +* `tags` - (Optional) Map of tags to assign to the object. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `website_redirect` - (Optional) Target URL for [website redirect](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html). + +If no content is provided through `source`, `content` or `content_base64`, then the object will be empty. + +-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/index.html` and `index.html` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - ETag generated for the object (an MD5 sum of the object content). For plaintext objects or objects encrypted with an AWS-managed key, the hash is an MD5 digest of the object data. For objects encrypted with a KMS key or objects created by either the Multipart Upload or Part Copy operation, the hash is not an MD5 digest, regardless of the method of encryption. More information on possible values can be found on [Common Response Headers](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html). +* `id` - `key` of the resource supplied above +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version_id` - Unique version ID value for the object, if bucket versioning is enabled. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import objects using the `id` or S3 URL. For example: + +Import using the `id`, which is the bucket name and the key together: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import using S3 URL syntax: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** objects using the `id` or S3 URL. For example: + +Import using the `id`, which is the bucket name and the key together: + +```console +% terraform import aws_s3_bucket_object.example some-bucket-name/some/key.txt +``` + +Import using S3 URL syntax: + +```console +% terraform import aws_s3_bucket_object.example s3://some-bucket-name/some/key.txt +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_object_lock_configuration.html.markdown b/website/docs/cdktf/python/r/s3_bucket_object_lock_configuration.html.markdown new file mode 100644 index 00000000000..4cfe4c8fcd7 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_object_lock_configuration.html.markdown @@ -0,0 +1,185 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_object_lock_configuration" +description: |- + Provides an S3 bucket Object Lock configuration resource. +--- + + + +# Resource: aws_s3_bucket_object_lock_configuration + +Provides an S3 bucket Object Lock configuration resource. For more information about Object Locking, go to [Using S3 Object Lock](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lock.html) in the Amazon S3 User Guide. + +~> **NOTE:** This resource **does not enable** Object Lock for **new** buckets. It configures a default retention period for objects placed in the specified bucket. +Thus, to **enable** Object Lock for a **new** bucket, see the [Using object lock configuration](s3_bucket.html.markdown#using-object-lock-configuration) section in the `aws_s3_bucket` resource or the [Object Lock configuration for a new bucket](#object-lock-configuration-for-a-new-bucket) example below. +If you want to **enable** Object Lock for an **existing** bucket, contact AWS Support and see the [Object Lock configuration for an existing bucket](#object-lock-configuration-for-an-existing-bucket) example below. + +## Example Usage + +### Object Lock configuration for a new bucket + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_object_lock_configuration import S3BucketObjectLockConfigurationA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="mybucket", + object_lock_enabled=True + ) + aws_s3_bucket_object_lock_configuration_example = + S3BucketObjectLockConfigurationA(self, "example_1", + bucket=example.id, + rule=S3BucketObjectLockConfigurationRuleA( + default_retention=S3BucketObjectLockConfigurationRuleDefaultRetentionA( + days=5, + mode="COMPLIANCE" + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_object_lock_configuration_example.override_logical_id("example") +``` + +### Object Lock configuration for an existing bucket + +This is a multistep process that requires AWS Support intervention. + +1. Enable versioning on your S3 bucket, if you have not already done so. +Doing so will generate an "Object Lock token" in the back-end. + + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_versioning import S3BucketVersioningA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="mybucket" + ) + aws_s3_bucket_versioning_example = S3BucketVersioningA(self, "example_1", + bucket=example.id, + versioning_configuration=S3BucketVersioningVersioningConfiguration( + status="Enabled" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_versioning_example.override_logical_id("example") +``` + + +2. Contact AWS Support to provide you with the "Object Lock token" for the specified bucket and use the token (or token ID) within your new `aws_s3_bucket_object_lock_configuration` resource. + Notice the `object_lock_enabled` argument does not need to be specified as it defaults to `Enabled`. + + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_object_lock_configuration import S3BucketObjectLockConfigurationA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketObjectLockConfigurationA(self, "example", + bucket=Token.as_string(aws_s3_bucket_example.id), + rule=S3BucketObjectLockConfigurationRuleA( + default_retention=S3BucketObjectLockConfigurationRuleDefaultRetentionA( + days=5, + mode="COMPLIANCE" + ) + ), + token="NG2MKsfoLqV3A+aquXneSG4LOu/ekrlXkRXwIPFVfERT7XOPos+/k444d7RIH0E3W3p5QU6ml2exS2F/eYCFmMWHJ3hFZGk6al1sIJkmNhUMYmsv0jYVQyTTZNLM+DnfooA6SATt39mM1VW1yJh4E+XljMlWzaBwHKbss3/EjlGDjOmVhaSs4Z6427mMCaFD0RLwsYY7zX49gEc31YfOMJGxbXCXSeyNwAhhM/A8UH7gQf38RmjHjjAFbbbLtl8arsxTPW8F1IYohqwmKIr9DnotLLj8Tg44U2SPwujVaqmlKKP9s41rfgb4UbIm7khSafDBng0LGfxC4pMlT9Ny2w==" + ) +``` + + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expected_bucket_owner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `object_lock_enabled` - (Optional, Forces new resource) Indicates whether this bucket has an Object Lock configuration enabled. Defaults to `Enabled`. Valid values: `Enabled`. +* `rule` - (Optional) Configuration block for specifying the Object Lock rule for the specified object. [See below](#rule). +* `token` - (Optional) Token to allow Object Lock to be enabled for an existing bucket. You must contact AWS support for the bucket's "Object Lock token". +The token is generated in the back-end when [versioning](https://docs.aws.amazon.com/AmazonS3/latest/userguide/manage-versioning-examples.html) is enabled on a bucket. For more details on versioning, see the [`aws_s3_bucket_versioning` resource](s3_bucket_versioning.html.markdown). + +### rule + +The `rule` configuration block supports the following arguments: + +* `default_retention` - (Required) Configuration block for specifying the default Object Lock retention settings for new objects placed in the specified bucket. [See below](#default_retention). + +### default_retention + +The `default_retention` configuration block supports the following arguments: + +* `days` - (Optional, Required if `years` is not specified) Number of days that you want to specify for the default retention period. +* `mode` - (Required) Default Object Lock retention mode you want to apply to new objects placed in the specified bucket. Valid values: `COMPLIANCE`, `GOVERNANCE`. +* `years` - (Optional, Required if `days` is not specified) Number of years that you want to specify for the default retention period. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expected_bucket_owner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket Object Lock configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +import { + to = aws_s3_bucket_object_lock_configuration.example + id = "bucket-name,123456789012" +} + +**Using `terraform import` to import** S3 bucket Object Lock configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_object_lock_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_object_lock_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_ownership_controls.html.markdown b/website/docs/cdktf/python/r/s3_bucket_ownership_controls.html.markdown new file mode 100644 index 00000000000..beafe675c53 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_ownership_controls.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_ownership_controls" +description: |- + Manages S3 Bucket Ownership Controls. +--- + + + +# Resource: aws_s3_bucket_ownership_controls + +Provides a resource to manage S3 Bucket Ownership Controls. For more information, see the [S3 Developer Guide](https://docs.aws.amazon.com/AmazonS3/latest/dev/about-object-ownership.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_ownership_controls import S3BucketOwnershipControls +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_s3_bucket_ownership_controls_example = S3BucketOwnershipControls(self, "example_1", + bucket=example.id, + rule=S3BucketOwnershipControlsRule( + object_ownership="BucketOwnerPreferred" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_ownership_controls_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket that you want to associate this access point with. +* `rule` - (Required) Configuration block(s) with Ownership Controls rules. Detailed below. + +### rule Configuration Block + +The following arguments are required: + +* `object_ownership` - (Required) Object ownership. Valid values: `BucketOwnerPreferred`, `ObjectWriter` or `BucketOwnerEnforced` + * `BucketOwnerPreferred` - Objects uploaded to the bucket change ownership to the bucket owner if the objects are uploaded with the `bucket-owner-full-control` canned ACL. + * `ObjectWriter` - Uploading account will own the object if the object is uploaded with the `bucket-owner-full-control` canned ACL. + * `BucketOwnerEnforced` - Bucket owner automatically owns and has full control over every object in the bucket. ACLs no longer affect permissions to data in the S3 bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - S3 Bucket name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Bucket Ownership Controls using S3 Bucket name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 Bucket Ownership Controls using S3 Bucket name. For example: + +```console +% terraform import aws_s3_bucket_ownership_controls.example my-bucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_policy.html.markdown b/website/docs/cdktf/python/r/s3_bucket_policy.html.markdown new file mode 100644 index 00000000000..d6e0a6a7a71 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_policy.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_policy" +description: |- + Attaches a policy to an S3 bucket resource. +--- + + + +# Resource: aws_s3_bucket_policy + +Attaches a policy to an S3 bucket resource. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_policy import S3BucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="my-tf-test-bucket" + ) + allow_access_from_another_account = DataAwsIamPolicyDocument(self, "allow_access_from_another_account", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:GetObject", "s3:ListBucket"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["123456789012"], + type="AWS" + ) + ], + resources=[example.arn, "${" + example.arn + "}/*"] + ) + ] + ) + aws_s3_bucket_policy_allow_access_from_another_account = S3BucketPolicy(self, "allow_access_from_another_account_2", + bucket=example.id, + policy=Token.as_string(allow_access_from_another_account.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_policy_allow_access_from_another_account.override_logical_id("allow_access_from_another_account") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the bucket to which to apply the policy. +* `policy` - (Required) Text of the policy. Although this is a bucket policy rather than an IAM policy, the [`aws_iam_policy_document`](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) data source may be used, so long as it specifies a principal. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). Note: Bucket policies are limited to 20 KB in size. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket policies using the bucket name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 bucket policies using the bucket name. For example: + +```console +% terraform import aws_s3_bucket_policy.allow_access_from_another_account my-tf-test-bucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_public_access_block.html.markdown b/website/docs/cdktf/python/r/s3_bucket_public_access_block.html.markdown new file mode 100644 index 00000000000..50185c1c27e --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_public_access_block.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_public_access_block" +description: |- + Manages S3 bucket-level Public Access Block Configuration +--- + + + +# Resource: aws_s3_bucket_public_access_block + +Manages S3 bucket-level Public Access Block configuration. For more information about these settings, see the [AWS S3 Block Public Access documentation](https://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-block-public-access.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_public_access_block import S3BucketPublicAccessBlock +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_s3_bucket_public_access_block_example = S3BucketPublicAccessBlock(self, "example_1", + block_public_acls=True, + block_public_policy=True, + bucket=example.id, + ignore_public_acls=True, + restrict_public_buckets=True + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_public_access_block_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) S3 Bucket to which this Public Access Block configuration should be applied. +* `block_public_acls` - (Optional) Whether Amazon S3 should block public ACLs for this bucket. Defaults to `false`. Enabling this setting does not affect existing policies or ACLs. When set to `true` causes the following behavior: + * PUT Bucket acl and PUT Object acl calls will fail if the specified ACL allows public access. + * PUT Object calls will fail if the request includes an object ACL. +* `block_public_policy` - (Optional) Whether Amazon S3 should block public bucket policies for this bucket. Defaults to `false`. Enabling this setting does not affect the existing bucket policy. When set to `true` causes Amazon S3 to: + * Reject calls to PUT Bucket policy if the specified bucket policy allows public access. +* `ignore_public_acls` - (Optional) Whether Amazon S3 should ignore public ACLs for this bucket. Defaults to `false`. Enabling this setting does not affect the persistence of any existing ACLs and doesn't prevent new public ACLs from being set. When set to `true` causes Amazon S3 to: + * Ignore public ACLs on this bucket and any objects that it contains. +* `restrict_public_buckets` - (Optional) Whether Amazon S3 should restrict public bucket policies for this bucket. Defaults to `false`. Enabling this setting does not affect the previously stored bucket policy, except that public and cross-account access within the public bucket policy, including non-public delegation to specific accounts, is blocked. When set to `true`: + * Only the bucket owner and AWS Services can access this buckets if it has a public policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the S3 bucket the configuration is attached to + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_s3_bucket_public_access_block` using the bucket name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_s3_bucket_public_access_block` using the bucket name. For example: + +```console +% terraform import aws_s3_bucket_public_access_block.example my-bucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_replication_configuration.html.markdown b/website/docs/cdktf/python/r/s3_bucket_replication_configuration.html.markdown new file mode 100644 index 00000000000..5da108be1bf --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_replication_configuration.html.markdown @@ -0,0 +1,455 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_replication_configuration" +description: |- + Provides a S3 bucket replication configuration resource. +--- + + + +# Resource: aws_s3_bucket_replication_configuration + +Provides an independent configuration resource for S3 bucket [replication configuration](http://docs.aws.amazon.com/AmazonS3/latest/dev/crr.html). + +~> **NOTE:** S3 Buckets only support a single replication configuration. Declaring multiple `aws_s3_bucket_replication_configuration` resources to the same S3 Bucket will cause a perpetual difference in configuration. + +## Example Usage + +### Using replication configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_policy import IamPolicy +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +from imports.aws.provider import AwsProvider +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_replication_configuration import S3BucketReplicationConfigurationA +from imports.aws.s3_bucket_versioning import S3BucketVersioningA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + AwsProvider(self, "aws", + region="eu-west-1" + ) + central = AwsProvider(self, "aws_1", + alias="central", + region="eu-central-1" + ) + destination = S3Bucket(self, "destination", + bucket="tf-test-bucket-destination-12345" + ) + source = S3Bucket(self, "source", + bucket="tf-test-bucket-source-12345", + provider=central + ) + S3BucketAcl(self, "source_bucket_acl", + acl="private", + bucket=source.id, + provider=central + ) + aws_s3_bucket_versioning_destination = S3BucketVersioningA(self, "destination_5", + bucket=destination.id, + versioning_configuration=S3BucketVersioningVersioningConfiguration( + status="Enabled" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_versioning_destination.override_logical_id("destination") + aws_s3_bucket_versioning_source = S3BucketVersioningA(self, "source_6", + bucket=source.id, + provider=central, + versioning_configuration=S3BucketVersioningVersioningConfiguration( + status="Enabled" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_versioning_source.override_logical_id("source") + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["s3.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + replication = DataAwsIamPolicyDocument(self, "replication", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:GetReplicationConfiguration", "s3:ListBucket"], + effect="Allow", + resources=[source.arn] + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:GetObjectVersionForReplication", "s3:GetObjectVersionAcl", "s3:GetObjectVersionTagging" + ], + effect="Allow", + resources=["${" + source.arn + "}/*"] + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:ReplicateObject", "s3:ReplicateDelete", "s3:ReplicateTags" + ], + effect="Allow", + resources=["${" + destination.arn + "}/*"] + ) + ] + ) + aws_iam_policy_replication = IamPolicy(self, "replication_9", + name="tf-iam-role-policy-replication-12345", + policy=Token.as_string(replication.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_policy_replication.override_logical_id("replication") + aws_iam_role_replication = IamRole(self, "replication_10", + assume_role_policy=Token.as_string(assume_role.json), + name="tf-iam-role-replication-12345" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_replication.override_logical_id("replication") + aws_iam_role_policy_attachment_replication = IamRolePolicyAttachment(self, "replication_11", + policy_arn=Token.as_string(aws_iam_policy_replication.arn), + role=Token.as_string(aws_iam_role_replication.name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_attachment_replication.override_logical_id("replication") + aws_s3_bucket_replication_configuration_replication = + S3BucketReplicationConfigurationA(self, "replication_12", + bucket=source.id, + depends_on=[aws_s3_bucket_versioning_source], + provider=central, + role=Token.as_string(aws_iam_role_replication.arn), + rule=[S3BucketReplicationConfigurationRule( + destination=S3BucketReplicationConfigurationRuleDestination( + bucket=destination.arn, + storage_class="STANDARD" + ), + filter=S3BucketReplicationConfigurationRuleFilter( + prefix="foo" + ), + id="foobar", + status="Enabled" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_replication_configuration_replication.override_logical_id("replication") +``` + +### Bi-Directional Replication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_replication_configuration import S3BucketReplicationConfigurationA +from imports.aws.s3_bucket_versioning import S3BucketVersioningA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + east = S3Bucket(self, "east", + bucket="tf-test-bucket-east-12345" + ) + west = S3Bucket(self, "west", + bucket="tf-test-bucket-west-12345", + provider=aws_west + ) + aws_s3_bucket_versioning_east = S3BucketVersioningA(self, "east_2", + bucket=east.id, + versioning_configuration=S3BucketVersioningVersioningConfiguration( + status="Enabled" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_versioning_east.override_logical_id("east") + aws_s3_bucket_versioning_west = S3BucketVersioningA(self, "west_3", + bucket=west.id, + provider=aws_west, + versioning_configuration=S3BucketVersioningVersioningConfiguration( + status="Enabled" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_versioning_west.override_logical_id("west") + S3BucketReplicationConfigurationA(self, "east_to_west", + bucket=east.id, + depends_on=[aws_s3_bucket_versioning_east], + role=east_replication.arn, + rule=[S3BucketReplicationConfigurationRule( + destination=S3BucketReplicationConfigurationRuleDestination( + bucket=west.arn, + storage_class="STANDARD" + ), + filter=S3BucketReplicationConfigurationRuleFilter( + prefix="foo" + ), + id="foobar", + status="Enabled" + ) + ] + ) + S3BucketReplicationConfigurationA(self, "west_to_east", + bucket=west.id, + depends_on=[aws_s3_bucket_versioning_west], + provider=aws_west, + role=west_replication.arn, + rule=[S3BucketReplicationConfigurationRule( + destination=S3BucketReplicationConfigurationRuleDestination( + bucket=east.arn, + storage_class="STANDARD" + ), + filter=S3BucketReplicationConfigurationRuleFilter( + prefix="foo" + ), + id="foobar", + status="Enabled" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the source S3 bucket you want Amazon S3 to monitor. +* `role` - (Required) ARN of the IAM role for Amazon S3 to assume when replicating the objects. +* `rule` - (Required) List of configuration blocks describing the rules managing the replication. [See below](#rule). +* `token` - (Optional) Token to allow replication to be enabled on an Object Lock-enabled bucket. You must contact AWS support for the bucket's "Object Lock token". +For more details, see [Using S3 Object Lock with replication](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lock-managing.html#object-lock-managing-replication). + +### rule + +~> **NOTE:** Replication to multiple destination buckets requires that `priority` is specified in the `rule` object. If the corresponding rule requires no filter, an empty configuration block `filter {}` must be specified. + +~> **NOTE:** Amazon S3's latest version of the replication configuration is V2, which includes the `filter` attribute for replication rules. + +~> **NOTE:** The `existing_object_replication` parameter is not supported by Amazon S3 at this time and should not be included in your `rule` configurations. Specifying this parameter will result in `MalformedXML` errors. +To replicate existing objects, please refer to the [Replicating existing objects with S3 Batch Replication](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-batch-replication-batch.html) documentation in the Amazon S3 User Guide. + +The `rule` configuration block supports the following arguments: + +* `delete_marker_replication` - (Optional) Whether delete markers are replicated. This argument is only valid with V2 replication configurations (i.e., when `filter` is used)[documented below](#delete_marker_replication). +* `destination` - (Required) Specifies the destination for the rule. [See below](#destination). +* `existing_object_replication` - (Optional) Replicate existing objects in the source bucket according to the rule configurations. [See below](#existing_object_replication). +* `filter` - (Optional, Conflicts with `prefix`) Filter that identifies subset of objects to which the replication rule applies. [See below](#filter). If not specified, the `rule` will default to using `prefix`. +* `id` - (Optional) Unique identifier for the rule. Must be less than or equal to 255 characters in length. +* `prefix` - (Optional, Conflicts with `filter`, **Deprecated**) Object key name prefix identifying one or more objects to which the rule applies. Must be less than or equal to 1024 characters in length. Defaults to an empty string (`""`) if `filter` is not specified. +* `priority` - (Optional) Priority associated with the rule. Priority should only be set if `filter` is configured. If not provided, defaults to `0`. Priority must be unique between multiple rules. +* `source_selection_criteria` - (Optional) Specifies special object selection criteria. [See below](#source_selection_criteria). +* `status` - (Required) Status of the rule. Either `"Enabled"` or `"Disabled"`. The rule is ignored if status is not "Enabled". + +### delete_marker_replication + +~> **NOTE:** This argument is only available with V2 replication configurations. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +The `delete_marker_replication` configuration block supports the following arguments: + +* `status` - (Required) Whether delete markers should be replicated. Either `"Enabled"` or `"Disabled"`. + +### destination + +The `destination` configuration block supports the following arguments: + +* `access_control_translation` - (Optional) Configuration block that specifies the overrides to use for object owners on replication. [See below](#access_control_translation). Specify this only in a cross-account scenario (where source and destination bucket owners are not the same), and you want to change replica ownership to the AWS account that owns the destination bucket. If this is not specified in the replication configuration, the replicas are owned by same AWS account that owns the source object. Must be used in conjunction with `account` owner override configuration. +* `account` - (Optional) Account ID to specify the replica ownership. Must be used in conjunction with `access_control_translation` override configuration. +* `bucket` - (Required) ARN of the bucket where you want Amazon S3 to store the results. +* `encryption_configuration` - (Optional) Configuration block that provides information about encryption. [See below](#encryption_configuration). If `source_selection_criteria` is specified, you must specify this element. +* `metrics` - (Optional) Configuration block that specifies replication metrics-related settings enabling replication metrics and events. [See below](#metrics). +* `replication_time` - (Optional) Configuration block that specifies S3 Replication Time Control (S3 RTC), including whether S3 RTC is enabled and the time when all objects and operations on objects must be replicated. [See below](#replication_time). Replication Time Control must be used in conjunction with `metrics`. +* `storage_class` - (Optional) The [storage class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_Destination.html#AmazonS3-Type-Destination-StorageClass) used to store the object. By default, Amazon S3 uses the storage class of the source object to create the object replica. + +### access_control_translation + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +The `access_control_translation` configuration block supports the following arguments: + +* `owner` - (Required) Specifies the replica ownership. For default and valid values, see [PUT bucket replication](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketPUTreplication.html) in the Amazon S3 API Reference. Valid values: `Destination`. + +### encryption_configuration + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +The `encryption_configuration` configuration block supports the following arguments: + +* `replica_kms_key_id` - (Required) ID (Key ARN or Alias ARN) of the customer managed AWS KMS key stored in AWS Key Management Service (KMS) for the destination bucket. + +### metrics + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +The `metrics` configuration block supports the following arguments: + +* `event_threshold` - (Optional) Configuration block that specifies the time threshold for emitting the `s3:Replication:OperationMissedThreshold` event. [See below](#event_threshold). +* `status` - (Required) Status of the Destination Metrics. Either `"Enabled"` or `"Disabled"`. + +### event_threshold + +The `event_threshold` configuration block supports the following arguments: + +* `minutes` - (Required) Time in minutes. Valid values: `15`. + +### replication_time + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +The `replication_time` configuration block supports the following arguments: + +* `status` - (Required) Status of the Replication Time Control. Either `"Enabled"` or `"Disabled"`. +* `time` - (Required) Configuration block specifying the time by which replication should be complete for all objects and operations on objects. [See below](#time). + +### time + +The `time` configuration block supports the following arguments: + +* `minutes` - (Required) Time in minutes. Valid values: `15`. + +### existing_object_replication + +~> **NOTE:** Replication for existing objects requires activation by AWS Support. See [userguide/replication-what-is-isnot-replicated](https://docs.aws.amazon.com/AmazonS3/latest/userguide/replication-what-is-isnot-replicated.html#existing-object-replication) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +The `existing_object_replication` configuration block supports the following arguments: + +* `status` - (Required) Whether the existing objects should be replicated. Either `"Enabled"` or `"Disabled"`. + +### filter + +~> **NOTE:** The `filter` argument must be specified as either an empty configuration block (`filter {}`) to imply the rule requires no filter or with exactly one of `prefix`, `tag`, or `and`. +Replication configuration V1 supports filtering based on only the `prefix` attribute. For backwards compatibility, Amazon S3 continues to support the V1 configuration. + +The `filter` configuration block supports the following arguments: + +* `and` - (Optional) Configuration block for specifying rule filters. This element is required only if you specify more than one filter. See [and](#and) below for more details. +* `prefix` - (Optional) Object key name prefix that identifies subset of objects to which the rule applies. Must be less than or equal to 1024 characters in length. +* `tag` - (Optional) Configuration block for specifying a tag key and value. [See below](#tag). + +### and + +The `and` configuration block supports the following arguments: + +* `prefix` - (Optional) Object key name prefix that identifies subset of objects to which the rule applies. Must be less than or equal to 1024 characters in length. +* `tags` - (Optional, Required if `prefix` is configured) Map of tags (key and value pairs) that identifies a subset of objects to which the rule applies. The rule applies only to objects having all the tags in its tagset. + +### tag + +The `tag` configuration block supports the following arguments: + +* `key` - (Required) Name of the object key. +* `value` - (Required) Value of the tag. + +### source_selection_criteria + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +The `source_selection_criteria` configuration block supports the following arguments: + +* `replica_modifications` - (Optional) Configuration block that you can specify for selections for modifications on replicas. Amazon S3 doesn't replicate replica modifications by default. In the latest version of replication configuration (when `filter` is specified), you can specify this element and set the status to `Enabled` to replicate modifications on replicas. + +* `sse_kms_encrypted_objects` - (Optional) Configuration block for filter information for the selection of Amazon S3 objects encrypted with AWS KMS. If specified, `replica_kms_key_id` in `destination` `encryption_configuration` must be specified as well. + +### replica_modifications + +The `replica_modifications` configuration block supports the following arguments: + +* `status` - (Required) Whether the existing objects should be replicated. Either `"Enabled"` or `"Disabled"`. + +### sse_kms_encrypted_objects + +The `sse_kms_encrypted_objects` configuration block supports the following arguments: + +* `status` - (Required) Whether the existing objects should be replicated. Either `"Enabled"` or `"Disabled"`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - S3 source bucket name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket replication configuration using the `bucket`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 bucket replication configuration using the `bucket`. For example: + +```console +% terraform import aws_s3_bucket_replication_configuration.replication bucket-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_request_payment_configuration.html.markdown b/website/docs/cdktf/python/r/s3_bucket_request_payment_configuration.html.markdown new file mode 100644 index 00000000000..d0dc374a9fc --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_request_payment_configuration.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_request_payment_configuration" +description: |- + Provides an S3 bucket request payment configuration resource. +--- + + + +# Resource: aws_s3_bucket_request_payment_configuration + +Provides an S3 bucket request payment configuration resource. For more information, see [Requester Pays Buckets](https://docs.aws.amazon.com/AmazonS3/latest/dev/RequesterPaysBuckets.html). + +~> **NOTE:** Destroying an `aws_s3_bucket_request_payment_configuration` resource resets the bucket's `payer` to the S3 default: the bucket owner. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_request_payment_configuration import S3BucketRequestPaymentConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketRequestPaymentConfiguration(self, "example", + bucket=Token.as_string(aws_s3_bucket_example.id), + payer="Requester" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expected_bucket_owner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `payer` - (Required) Specifies who pays for the download and request fees. Valid values: `BucketOwner`, `Requester`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expected_bucket_owner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket request payment configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** S3 bucket request payment configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_request_payment_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_request_payment_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_server_side_encryption_configuration.html.markdown b/website/docs/cdktf/python/r/s3_bucket_server_side_encryption_configuration.html.markdown new file mode 100644 index 00000000000..13ffa62f923 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_server_side_encryption_configuration.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_server_side_encryption_configuration" +description: |- + Provides a S3 bucket server-side encryption configuration resource. +--- + + + +# Resource: aws_s3_bucket_server_side_encryption_configuration + +Provides a S3 bucket server-side encryption configuration resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_key import KmsKey +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_server_side_encryption_configuration import S3BucketServerSideEncryptionConfigurationA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + mykey = KmsKey(self, "mykey", + deletion_window_in_days=10, + description="This key is used to encrypt bucket objects" + ) + mybucket = S3Bucket(self, "mybucket", + bucket="mybucket" + ) + S3BucketServerSideEncryptionConfigurationA(self, "example", + bucket=mybucket.id, + rule=[S3BucketServerSideEncryptionConfigurationRuleA( + apply_server_side_encryption_by_default=S3BucketServerSideEncryptionConfigurationRuleApplyServerSideEncryptionByDefaultA( + kms_master_key_id=mykey.arn, + sse_algorithm="aws:kms" + ) + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) ID (name) of the bucket. +* `expected_bucket_owner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `rule` - (Required) Set of server-side encryption configuration rules. [See below](#rule). Currently, only a single rule is supported. + +### rule + +The `rule` configuration block supports the following arguments: + +* `apply_server_side_encryption_by_default` - (Optional) Single object for setting server-side encryption by default. [See below](#apply_server_side_encryption_by_default). +* `bucket_key_enabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. + +### apply_server_side_encryption_by_default + +The `apply_server_side_encryption_by_default` configuration block supports the following arguments: + +* `sse_algorithm` - (Required) Server-side encryption algorithm to use. Valid values are `AES256` and `aws:kms` +* `kms_master_key_id` - (Optional) AWS KMS master key ID used for the SSE-KMS encryption. This can only be used when you set the value of `sse_algorithm` as `aws:kms`. The default `aws/s3` AWS KMS master key is used if this element is absent while the `sse_algorithm` is `aws:kms`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expected_bucket_owner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket server-side encryption configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** S3 bucket server-side encryption configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_server_side_encryption_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_server_side_encryption_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_versioning.html.markdown b/website/docs/cdktf/python/r/s3_bucket_versioning.html.markdown new file mode 100644 index 00000000000..25fd09a0dc5 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_versioning.html.markdown @@ -0,0 +1,196 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_versioning" +description: |- + Provides an S3 bucket versioning resource. +--- + + + +# Resource: aws_s3_bucket_versioning + +Provides a resource for controlling versioning on an S3 bucket. +Deleting this resource will either suspend versioning on the associated S3 bucket or +simply remove the resource from Terraform state if the associated S3 bucket is unversioned. + +For more information, see [How S3 versioning works](https://docs.aws.amazon.com/AmazonS3/latest/userguide/manage-versioning-examples.html). + +~> **NOTE:** If you are enabling versioning on the bucket for the first time, AWS recommends that you wait for 15 minutes after enabling versioning before issuing write operations (PUT or DELETE) on objects in the bucket. + +## Example Usage + +### With Versioning Enabled + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_versioning import S3BucketVersioningA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example-bucket" + ) + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_1", + acl="private", + bucket=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") + S3BucketVersioningA(self, "versioning_example", + bucket=example.id, + versioning_configuration=S3BucketVersioningVersioningConfiguration( + status="Enabled" + ) + ) +``` + +### With Versioning Disabled + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_versioning import S3BucketVersioningA +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example-bucket" + ) + aws_s3_bucket_acl_example = S3BucketAcl(self, "example_1", + acl="private", + bucket=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_acl_example.override_logical_id("example") + S3BucketVersioningA(self, "versioning_example", + bucket=example.id, + versioning_configuration=S3BucketVersioningVersioningConfiguration( + status="Disabled" + ) + ) +``` + +### Object Dependency On Versioning + +When you create an object whose `version_id` you need and an `aws_s3_bucket_versioning` resource in the same configuration, you are more likely to have success by ensuring the `s3_object` depends either implicitly (see below) or explicitly (i.e., using `depends_on = [aws_s3_bucket_versioning.example]`) on the `aws_s3_bucket_versioning` resource. + +~> **NOTE:** For critical and/or production S3 objects, do not create a bucket, enable versioning, and create an object in the bucket within the same configuration. Doing so will not allow the AWS-recommended 15 minutes between enabling versioning and writing to the bucket. + +This example shows the `aws_s3_object.example` depending implicitly on the versioning resource through the reference to `aws_s3_bucket_versioning.example.bucket` to define `bucket`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_versioning import S3BucketVersioningA +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="yotto" + ) + aws_s3_bucket_versioning_example = S3BucketVersioningA(self, "example_1", + bucket=example.id, + versioning_configuration=S3BucketVersioningVersioningConfiguration( + status="Enabled" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_versioning_example.override_logical_id("example") + aws_s3_object_example = S3Object(self, "example_2", + bucket=Token.as_string(aws_s3_bucket_versioning_example.id), + key="droeloe", + source="example.txt" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_object_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the S3 bucket. +* `versioning_configuration` - (Required) Configuration block for the versioning parameters. [See below](#versioning_configuration). +* `expected_bucket_owner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `mfa` - (Optional, Required if `versioning_configuration` `mfa_delete` is enabled) Concatenation of the authentication device's serial number, a space, and the value that is displayed on your authentication device. + +### versioning_configuration + +~> **Note:** While the `versioning_configuration.status` parameter supports `Disabled`, this value is only intended for _creating_ or _importing_ resources that correspond to unversioned S3 buckets. +Updating the value from `Enabled` or `Suspended` to `Disabled` will result in errors as the AWS S3 API does not support returning buckets to an unversioned state. + +The `versioning_configuration` configuration block supports the following arguments: + +* `status` - (Required) Versioning state of the bucket. Valid values: `Enabled`, `Suspended`, or `Disabled`. `Disabled` should only be used when creating or importing resources that correspond to unversioned S3 buckets. +* `mfa_delete` - (Optional) Specifies whether MFA delete is enabled in the bucket versioning configuration. Valid values: `Enabled` or `Disabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expected_bucket_owner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket versioning using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** S3 bucket versioning using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_versioning.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_versioning.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_bucket_website_configuration.html.markdown b/website/docs/cdktf/python/r/s3_bucket_website_configuration.html.markdown new file mode 100644 index 00000000000..572a2badf34 --- /dev/null +++ b/website/docs/cdktf/python/r/s3_bucket_website_configuration.html.markdown @@ -0,0 +1,183 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_website_configuration" +description: |- + Provides an S3 bucket website configuration resource. +--- + + + +# Resource: aws_s3_bucket_website_configuration + +Provides an S3 bucket website configuration resource. For more information, see [Hosting Websites on S3](https://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteHosting.html). + +## Example Usage + +### With `routing_rule` configured + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_website_configuration import S3BucketWebsiteConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketWebsiteConfiguration(self, "example", + bucket=Token.as_string(aws_s3_bucket_example.id), + error_document=S3BucketWebsiteConfigurationErrorDocument( + key="error.html" + ), + index_document=S3BucketWebsiteConfigurationIndexDocument( + suffix="index.html" + ), + routing_rule=[S3BucketWebsiteConfigurationRoutingRule( + condition=S3BucketWebsiteConfigurationRoutingRuleCondition( + key_prefix_equals="docs/" + ), + redirect=S3BucketWebsiteConfigurationRoutingRuleRedirect( + replace_key_prefix_with="documents/" + ) + ) + ] + ) +``` + +### With `routing_rules` configured + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket_website_configuration import S3BucketWebsiteConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3BucketWebsiteConfiguration(self, "example", + bucket=Token.as_string(aws_s3_bucket_example.id), + error_document=S3BucketWebsiteConfigurationErrorDocument( + key="error.html" + ), + index_document=S3BucketWebsiteConfigurationIndexDocument( + suffix="index.html" + ), + routing_rules="[{\n \"Condition\": {\n \"KeyPrefixEquals\": \"docs/\"\n },\n \"Redirect\": {\n \"ReplaceKeyPrefixWith\": \"\"\n }\n}]\n\n" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `error_document` - (Optional, Conflicts with `redirect_all_requests_to`) Name of the error document for the website. [See below](#error_document). +* `expected_bucket_owner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `index_document` - (Optional, Required if `redirect_all_requests_to` is not specified) Name of the index document for the website. [See below](#index_document). +* `redirect_all_requests_to` - (Optional, Required if `index_document` is not specified) Redirect behavior for every request to this bucket's website endpoint. [See below](#redirect_all_requests_to). Conflicts with `error_document`, `index_document`, and `routing_rule`. +* `routing_rule` - (Optional, Conflicts with `redirect_all_requests_to` and `routing_rules`) List of rules that define when a redirect is applied and the redirect behavior. [See below](#routing_rule). +* `routing_rules` - (Optional, Conflicts with `routing_rule` and `redirect_all_requests_to`) JSON array containing [routing rules](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules.html) + describing redirect behavior and when redirects are applied. Use this parameter when your routing rules contain empty String values (`""`) as seen in the [example above](#with-routing_rules-configured). + +### error_document + +The `error_document` configuration block supports the following arguments: + +* `key` - (Required) Object key name to use when a 4XX class error occurs. + +### index_document + +The `index_document` configuration block supports the following arguments: + +* `suffix` - (Required) Suffix that is appended to a request that is for a directory on the website endpoint. +For example, if the suffix is `index.html` and you make a request to `samplebucket/images/`, the data that is returned will be for the object with the key name `images/index.html`. +The suffix must not be empty and must not include a slash character. + +### redirect_all_requests_to + +The `redirect_all_requests_to` configuration block supports the following arguments: + +* `host_name` - (Required) Name of the host where requests are redirected. +* `protocol` - (Optional) Protocol to use when redirecting requests. The default is the protocol that is used in the original request. Valid values: `http`, `https`. + +### routing_rule + +The `routing_rule` configuration block supports the following arguments: + +* `condition` - (Optional) Configuration block for describing a condition that must be met for the specified redirect to apply. [See below](#condition). +* `redirect` - (Required) Configuration block for redirect information. [See below](#redirect). + +### condition + +The `condition` configuration block supports the following arguments: + +* `http_error_code_returned_equals` - (Optional, Required if `key_prefix_equals` is not specified) HTTP error code when the redirect is applied. If specified with `key_prefix_equals`, then both must be true for the redirect to be applied. +* `key_prefix_equals` - (Optional, Required if `http_error_code_returned_equals` is not specified) Object key name prefix when the redirect is applied. If specified with `http_error_code_returned_equals`, then both must be true for the redirect to be applied. + +### redirect + +The `redirect` configuration block supports the following arguments: + +* `host_name` - (Optional) Host name to use in the redirect request. +* `http_redirect_code` - (Optional) HTTP redirect code to use on the response. +* `protocol` - (Optional) Protocol to use when redirecting requests. The default is the protocol that is used in the original request. Valid values: `http`, `https`. +* `replace_key_prefix_with` - (Optional, Conflicts with `replace_key_with`) Object key prefix to use in the redirect request. For example, to redirect requests for all pages with prefix `docs/` (objects in the `docs/` folder) to `documents/`, you can set a `condition` block with `key_prefix_equals` set to `docs/` and in the `redirect` set `replace_key_prefix_with` to `/documents`. +* `replace_key_with` - (Optional, Conflicts with `replace_key_prefix_with`) Specific object key to use in the redirect request. For example, redirect request to `error.html`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expected_bucket_owner` separated by a comma (`,`) if the latter is provided. +* `website_domain` - Domain of the website endpoint. This is used to create Route 53 alias records. +* `website_endpoint` - Website endpoint. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket website configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** S3 bucket website configuration using the `bucket` or using the `bucket` and `expected_bucket_owner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_website_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expected_bucket_owner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_website_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_object.html.markdown b/website/docs/cdktf/python/r/s3_object.html.markdown new file mode 100644 index 00000000000..0c2393bba3a --- /dev/null +++ b/website/docs/cdktf/python/r/s3_object.html.markdown @@ -0,0 +1,275 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_object" +description: |- + Provides an S3 object resource. +--- + + + +# Resource: aws_s3_object + +Provides an S3 object resource. + +## Example Usage + +### Uploading a file to a bucket + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3Object(self, "object", + bucket="your_bucket_name", + etag=Token.as_string(Fn.filemd5("path/to/file")), + key="new_object_key", + source="path/to/file" + ) +``` + +### Encrypting with KMS Key + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_key import KmsKey +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + examplekms = KmsKey(self, "examplekms", + deletion_window_in_days=7, + description="KMS key 1" + ) + examplebucket = S3Bucket(self, "examplebucket", + bucket="examplebuckettftest" + ) + S3BucketAcl(self, "example", + acl="private", + bucket=examplebucket.id + ) + aws_s3_object_example = S3Object(self, "example_3", + bucket=examplebucket.id, + key="someobject", + kms_key_id=examplekms.arn, + source="index.html" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_object_example.override_logical_id("example") +``` + +### Server Side Encryption with S3 Default Master Key + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + examplebucket = S3Bucket(self, "examplebucket", + bucket="examplebuckettftest" + ) + S3BucketAcl(self, "example", + acl="private", + bucket=examplebucket.id + ) + aws_s3_object_example = S3Object(self, "example_2", + bucket=examplebucket.id, + key="someobject", + server_side_encryption="aws:kms", + source="index.html" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_object_example.override_logical_id("example") +``` + +### Server Side Encryption with AWS-Managed Key + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + examplebucket = S3Bucket(self, "examplebucket", + bucket="examplebuckettftest" + ) + S3BucketAcl(self, "example", + acl="private", + bucket=examplebucket.id + ) + aws_s3_object_example = S3Object(self, "example_2", + bucket=examplebucket.id, + key="someobject", + server_side_encryption="AES256", + source="index.html" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_object_example.override_logical_id("example") +``` + +### S3 Object Lock + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_acl import S3BucketAcl +from imports.aws.s3_bucket_versioning import S3BucketVersioningA +from imports.aws.s3_object import S3Object +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + examplebucket = S3Bucket(self, "examplebucket", + bucket="examplebuckettftest", + object_lock_enabled=True + ) + S3BucketAcl(self, "example", + acl="private", + bucket=examplebucket.id + ) + aws_s3_bucket_versioning_example = S3BucketVersioningA(self, "example_2", + bucket=examplebucket.id, + versioning_configuration=S3BucketVersioningVersioningConfiguration( + status="Enabled" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_versioning_example.override_logical_id("example") + S3Object(self, "examplebucket_object", + bucket=examplebucket.id, + depends_on=[aws_s3_bucket_versioning_example], + force_destroy=True, + key="someobject", + object_lock_legal_hold_status="ON", + object_lock_mode="GOVERNANCE", + object_lock_retain_until_date="2021-12-31T23:59:60Z", + source="important.txt" + ) +``` + +## Argument Reference + +-> **Note:** If you specify `content_encoding` you are responsible for encoding the body appropriately. `source`, `content`, and `content_base64` all expect already encoded/compressed bytes. + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket to put the file in. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified. +* `key` - (Required) Name of the object once it is in the bucket. + +The following arguments are optional: + +* `acl` - (Optional) [Canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Valid values are `private`, `public-read`, `public-read-write`, `aws-exec-read`, `authenticated-read`, `bucket-owner-read`, and `bucket-owner-full-control`. +* `bucket_key_enabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. +* `cache_control` - (Optional) Caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for further details. +* `content_base64` - (Optional, conflicts with `source` and `content`) Base64-encoded data that will be decoded and uploaded as raw bytes for the object content. This allows safely uploading non-UTF8 binary data, but is recommended only for small content such as the result of the `gzipbase64` function with small text strings. For larger objects, use `source` to stream the content from a disk file. +* `content_disposition` - (Optional) Presentational information for the object. Read [w3c content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information. +* `content_encoding` - (Optional) Content encodings that have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. +* `content_language` - (Optional) Language the content is in e.g., en-US or en-GB. +* `content_type` - (Optional) Standard MIME type describing the format of the object data, e.g., application/octet-stream. All Valid MIME Types are valid for this input. +* `content` - (Optional, conflicts with `source` and `content_base64`) Literal string value to use as the object content, which will be uploaded as UTF-8-encoded text. +* `etag` - (Optional) Triggers updates when the value changes. The only meaningful value is `filemd5("path/to/file")` (Terraform 0.11.12 or later) or `${md5(file("path/to/file"))}` (Terraform 0.11.11 or earlier). This attribute is not compatible with KMS encryption, `kms_key_id` or `server_side_encryption = "aws:kms"`, also if an object is larger than 16 MB, the AWS Management Console will upload or copy that object as a Multipart Upload, and therefore the ETag will not be an MD5 digest (see `source_hash` instead). +* `force_destroy` - (Optional) Whether to allow the object to be deleted by removing any legal hold on any object version. Default is `false`. This value should be set to `true` only if the bucket has S3 object lock enabled. +* `kms_key_id` - (Optional) ARN of the KMS Key to use for object encryption. If the S3 Bucket has server-side encryption enabled, that value will automatically be used. If referencing the `aws_kms_key` resource, use the `arn` attribute. If referencing the `aws_kms_alias` data source or resource, use the `target_key_arn` attribute. Terraform will only perform drift detection if a configuration value is provided. +* `metadata` - (Optional) Map of keys/values to provision metadata (will be automatically prefixed by `x-amz-meta-`, note that only lowercase label are currently supported by the AWS Go API). +* `object_lock_legal_hold_status` - (Optional) [Legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds) status that you want to apply to the specified object. Valid values are `ON` and `OFF`. +* `object_lock_mode` - (Optional) Object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) that you want to apply to this object. Valid values are `GOVERNANCE` and `COMPLIANCE`. +* `object_lock_retain_until_date` - (Optional) Date and time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), when this object's object lock will [expire](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-periods). +* `server_side_encryption` - (Optional) Server-side encryption of the object in S3. Valid values are "`AES256`" and "`aws:kms`". +* `source_hash` - (Optional) Triggers updates like `etag` but useful to address `etag` encryption limitations. Set using `filemd5("path/to/source")` (Terraform 0.11.12 or later). (The value is only stored in state and not saved by AWS.) +* `source` - (Optional, conflicts with `content` and `content_base64`) Path to a file that will be read and uploaded as raw bytes for the object content. +* `storage_class` - (Optional) [Storage Class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html#AmazonS3-PutObject-request-header-StorageClass) for the object. Defaults to "`STANDARD`". +* `tags` - (Optional) Map of tags to assign to the object. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `website_redirect` - (Optional) Target URL for [website redirect](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html). + +If no content is provided through `source`, `content` or `content_base64`, then the object will be empty. + +-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/index.html` and `index.html` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - ETag generated for the object (an MD5 sum of the object content). For plaintext objects or objects encrypted with an AWS-managed key, the hash is an MD5 digest of the object data. For objects encrypted with a KMS key or objects created by either the Multipart Upload or Part Copy operation, the hash is not an MD5 digest, regardless of the method of encryption. More information on possible values can be found on [Common Response Headers](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html). +* `id` - `key` of the resource supplied above +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version_id` - Unique version ID value for the object, if bucket versioning is enabled. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import objects using the `id` or S3 URL. For example: + +Import using the `id`, which is the bucket name and the key together: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import using S3 URL syntax: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** objects using the `id` or S3 URL. For example: + +Import using the `id`, which is the bucket name and the key together: + +```console +% terraform import aws_s3_object.example some-bucket-name/some/key.txt +``` + +Import using S3 URL syntax: + +```console +% terraform import aws_s3_object.example s3://some-bucket-name/some/key.txt +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3_object_copy.html.markdown b/website/docs/cdktf/python/r/s3_object_copy.html.markdown new file mode 100644 index 00000000000..1bdf542020e --- /dev/null +++ b/website/docs/cdktf/python/r/s3_object_copy.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_object_copy" +description: |- + Provides a resource for copying an S3 object. +--- + + + +# Resource: aws_s3_object_copy + +Provides a resource for copying an S3 object. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_object_copy import S3ObjectCopy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3ObjectCopy(self, "test", + bucket="destination_bucket", + grant=[S3ObjectCopyGrant( + permissions=["READ"], + type="Group", + uri="http://acs.amazonaws.com/groups/global/AllUsers" + ) + ], + key="destination_key", + source="source_bucket/source_key" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket to put the file in. +* `key` - (Required) Name of the object once it is in the bucket. +* `source` - (Required) Specifies the source object for the copy operation. You specify the value in one of two formats. For objects not accessed through an access point, specify the name of the source bucket and the key of the source object, separated by a slash (`/`). For example, `testbucket/test1.json`. For objects accessed through access points, specify the ARN of the object as accessed through the access point, in the format `arn:aws:s3:::accesspoint//object/`. For example, `arn:aws:s3:us-west-2:9999912999:accesspoint/my-access-point/object/testbucket/test1.json`. + +The following arguments are optional: + +* `acl` - (Optional) [Canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Valid values are `private`, `public-read`, `public-read-write`, `authenticated-read`, `aws-exec-read`, `bucket-owner-read`, and `bucket-owner-full-control`. Conflicts with `grant`. +* `cache_control` - (Optional) Specifies caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for further details. +* `content_disposition` - (Optional) Specifies presentational information for the object. Read [w3c content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information. +* `content_encoding` - (Optional) Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. +* `content_language` - (Optional) Language the content is in e.g., en-US or en-GB. +* `content_type` - (Optional) Standard MIME type describing the format of the object data, e.g., `application/octet-stream`. All Valid MIME Types are valid for this input. +* `copy_if_match` - (Optional) Copies the object if its entity tag (ETag) matches the specified tag. +* `copy_if_modified_since` - (Optional) Copies the object if it has been modified since the specified time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `copy_if_none_match` - (Optional) Copies the object if its entity tag (ETag) is different than the specified ETag. +* `copy_if_unmodified_since` - (Optional) Copies the object if it hasn't been modified since the specified time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `customer_algorithm` - (Optional) Specifies the algorithm to use to when encrypting the object (for example, AES256). +* `customer_key` - (Optional) Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon S3 does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side-encryption-customer-algorithm header. +* `customer_key_md5` - (Optional) Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure that the encryption key was transmitted without error. +* `expected_bucket_owner` - (Optional) Account id of the expected destination bucket owner. If the destination bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error. +* `expected_source_bucket_owner` - (Optional) Account id of the expected source bucket owner. If the source bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error. +* `expires` - (Optional) Date and time at which the object is no longer cacheable, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `force_destroy` - (Optional) Allow the object to be deleted by removing any legal hold on any object version. Default is `false`. This value should be set to `true` only if the bucket has S3 object lock enabled. +* `grant` - (Optional) Configuration block for header grants. Documented below. Conflicts with `acl`. +* `kms_encryption_context` - (Optional) Specifies the AWS KMS Encryption Context to use for object encryption. The value is a base64-encoded UTF-8 string holding JSON with the encryption context key-value pairs. +* `kms_key_id` - (Optional) Specifies the AWS KMS Key ARN to use for object encryption. This value is a fully qualified **ARN** of the KMS Key. If using `aws_kms_key`, use the exported `arn` attribute: `kms_key_id = aws_kms_key.foo.arn` +* `metadata` - (Optional) Map of keys/values to provision metadata (will be automatically prefixed by `x-amz-meta-`, note that only lowercase label are currently supported by the AWS Go API). +* `metadata_directive` - (Optional) Specifies whether the metadata is copied from the source object or replaced with metadata provided in the request. Valid values are `COPY` and `REPLACE`. +* `object_lock_legal_hold_status` - (Optional) The [legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds) status that you want to apply to the specified object. Valid values are `ON` and `OFF`. +* `object_lock_mode` - (Optional) Object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) that you want to apply to this object. Valid values are `GOVERNANCE` and `COMPLIANCE`. +* `object_lock_retain_until_date` - (Optional) Date and time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), when this object's object lock will [expire](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-periods). +* `request_payer` - (Optional) Confirms that the requester knows that they will be charged for the request. Bucket owners need not specify this parameter in their requests. For information about downloading objects from requester pays buckets, see Downloading Objects in Requestor Pays Buckets (https://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html) in the Amazon S3 Developer Guide. If included, the only valid value is `requester`. +* `server_side_encryption` - (Optional) Specifies server-side encryption of the object in S3. Valid values are `AES256` and `aws:kms`. +* `source_customer_algorithm` - (Optional) Specifies the algorithm to use when decrypting the source object (for example, AES256). +* `source_customer_key` - (Optional) Specifies the customer-provided encryption key for Amazon S3 to use to decrypt the source object. The encryption key provided in this header must be one that was used when the source object was created. +* `source_customer_key_md5` - (Optional) Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure that the encryption key was transmitted without error. +* `storage_class` - (Optional) Specifies the desired [storage class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_CopyObject.html#AmazonS3-CopyObject-request-header-StorageClass) for the object. Defaults to `STANDARD`. +* `tagging_directive` - (Optional) Specifies whether the object tag-set are copied from the source object or replaced with tag-set provided in the request. Valid values are `COPY` and `REPLACE`. +* `tags` - (Optional) Map of tags to assign to the object. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `website_redirect` - (Optional) Specifies a target URL for [website redirect](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html). + +### grant + +-> For more information on header grants, see the Amazon Simple Storage Service (S3) [API Reference: PutObjectAcl](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObjectAcl.html). + +This configuration block has the following required arguments: + +* `permissions` - (Required) List of permissions to grant to grantee. Valid values are `READ`, `READ_ACP`, `WRITE_ACP`, `FULL_CONTROL`. +* `type` - (Required) - Type of grantee. Valid values are `CanonicalUser`, `Group`, and `AmazonCustomerByEmail`. + +This configuration block has the following optional arguments (one of the three is required): + +* `email` - (Optional) Email address of the grantee. Used only when `type` is `AmazonCustomerByEmail`. +* `id` - (Optional) Canonical user ID of the grantee. Used only when `type` is `CanonicalUser`. +* `uri` - (Optional) URI of the grantee group. Used only when `type` is `Group`. + +-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/index.html` and `index.html` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - ETag generated for the object (an MD5 sum of the object content). For plaintext objects or objects encrypted with an AWS-managed key, the hash is an MD5 digest of the object data. For objects encrypted with a KMS key or objects created by either the Multipart Upload or Part Copy operation, the hash is not an MD5 digest, regardless of the method of encryption. More information on possible values can be found on [Common Response Headers](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html). +* `expiration` - If the object expiration is configured, this attribute will be set. +* `id` - The `key` of the resource supplied above. +* `last_modified` - Returns the date that the object was last modified, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `request_charged` - If present, indicates that the requester was successfully charged for the request. +* `source_version_id` - Version of the copied object in the source bucket. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version_id` - Version ID of the newly created copy. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3control_access_point_policy.html.markdown b/website/docs/cdktf/python/r/s3control_access_point_policy.html.markdown new file mode 100644 index 00000000000..99cde7b0567 --- /dev/null +++ b/website/docs/cdktf/python/r/s3control_access_point_policy.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_access_point_policy" +description: |- + Provides a resource to manage an S3 Access Point resource policy. +--- + + + +# Resource: aws_s3control_access_point_policy + +Provides a resource to manage an S3 Access Point resource policy. + +~> **NOTE on Access Points and Access Point Policies:** Terraform provides both a standalone Access Point Policy resource and an [Access Point](s3_access_point.html) resource with a resource policy defined in-line. You cannot use an Access Point with in-line resource policy in conjunction with an Access Point Policy resource. Doing so will cause a conflict of policies and will overwrite the access point's resource policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_access_point import S3AccessPoint +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_control_access_point_policy import S3ControlAccessPointPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_s3_access_point_example = S3AccessPoint(self, "example_1", + bucket=example.id, + lifecycle=TerraformResourceLifecycle( + ignore_changes=[policy] + ), + name="example", + public_access_block_configuration=S3AccessPointPublicAccessBlockConfiguration( + block_public_acls=True, + block_public_policy=False, + ignore_public_acls=True, + restrict_public_buckets=False + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_access_point_example.override_logical_id("example") + aws_s3_control_access_point_policy_example = S3ControlAccessPointPolicy(self, "example_2", + access_point_arn=Token.as_string(aws_s3_access_point_example.arn), + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "s3:GetObjectTagging", + "Effect": "Allow", + "Principal": { + "AWS": "*" + }, + "Resource": "${" + aws_s3_access_point_example.arn + "}/object/*" + } + ], + "Version": "2008-10-17" + })) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_control_access_point_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `access_point_arn` - (Required) The ARN of the access point that you want to associate with the specified policy. +* `policy` - (Required) The policy that you want to apply to the specified access point. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `has_public_access_policy` - Indicates whether this access point currently has a policy that allows public access. +* `id` - The AWS account ID and access point name separated by a colon (`:`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Access Point policies using the `access_point_arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Access Point policies using the `access_point_arn`. For example: + +```console +% terraform import aws_s3control_access_point_policy.example arn:aws:s3:us-west-2:123456789012:accesspoint/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3control_bucket.html.markdown b/website/docs/cdktf/python/r/s3control_bucket.html.markdown new file mode 100644 index 00000000000..01d34377115 --- /dev/null +++ b/website/docs/cdktf/python/r/s3control_bucket.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_bucket" +description: |- + Manages an S3 Control Bucket. +--- + + + +# Resource: aws_s3control_bucket + +Provides a resource to manage an S3 Control Bucket. + +-> This functionality is for managing [S3 on Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html). To manage S3 Buckets in an AWS Partition, see the [`aws_s3_bucket` resource](/docs/providers/aws/r/s3_bucket.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_control_bucket import S3ControlBucket +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3ControlBucket(self, "example", + bucket="example", + outpost_id=Token.as_string(data_aws_outposts_outpost_example.id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket. +* `outpost_id` - (Required) Identifier of the Outpost to contain this bucket. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the bucket. +* `creation_date` - UTC creation date in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `id` - Amazon Resource Name (ARN) of the bucket. +* `public_access_block_enabled` - Boolean whether Public Access Block is enabled. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Control Buckets using Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 Control Buckets using Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_s3control_bucket.example arn:aws:s3-outposts:us-east-1:123456789012:outpost/op-12345678/bucket/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3control_bucket_lifecycle_configuration.html.markdown b/website/docs/cdktf/python/r/s3control_bucket_lifecycle_configuration.html.markdown new file mode 100644 index 00000000000..ad5aedcf599 --- /dev/null +++ b/website/docs/cdktf/python/r/s3control_bucket_lifecycle_configuration.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_bucket_lifecycle_configuration" +description: |- + Manages an S3 Control Bucket Lifecycle Configuration. +--- + + + +# Resource: aws_s3control_bucket_lifecycle_configuration + +Provides a resource to manage an S3 Control Bucket Lifecycle Configuration. + +~> **NOTE:** Each S3 Control Bucket can only have one Lifecycle Configuration. Using multiple of this resource against the same S3 Control Bucket will result in perpetual differences each Terraform run. + +-> This functionality is for managing [S3 on Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html). To manage S3 Bucket Lifecycle Configurations in an AWS Partition, see the [`aws_s3_bucket` resource](/docs/providers/aws/r/s3_bucket.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_control_bucket_lifecycle_configuration import S3ControlBucketLifecycleConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3ControlBucketLifecycleConfiguration(self, "example", + bucket=Token.as_string(aws_s3_control_bucket_example.arn), + rule=[{ + "expiration": { + "days": 365 + }, + "filter": { + "prefix": "logs/" + }, + "id": "logs" + }, { + "expiration": { + "days": 7 + }, + "filter": { + "prefix": "temp/" + }, + "id": "temp" + } + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Amazon Resource Name (ARN) of the bucket. +* `rule` - (Required) Configuration block(s) containing lifecycle rules for the bucket. + * `abort_incomplete_multipart_upload` - (Optional) Configuration block containing settings for abort incomplete multipart upload. + * `days_after_initiation` - (Required) Number of days after which Amazon S3 aborts an incomplete multipart upload. + * `expiration` - (Optional) Configuration block containing settings for expiration of objects. + * `date` - (Optional) Date the object is to be deleted. Should be in `YYYY-MM-DD` date format, e.g., `2020-09-30`. + * `days` - (Optional) Number of days before the object is to be deleted. + * `expired_object_delete_marker` - (Optional) Enable to remove a delete marker with no noncurrent versions. Cannot be specified with `date` or `days`. + * `filter` - (Optional) Configuration block containing settings for filtering. + * `prefix` - (Optional) Object prefix for rule filtering. + * `tags` - (Optional) Key-value map of object tags for rule filtering. + * `id` - (Required) Unique identifier for the rule. + * `status` - (Optional) Status of the rule. Valid values: `Enabled` and `Disabled`. Defaults to `Enabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the bucket. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Control Bucket Lifecycle Configurations using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 Control Bucket Lifecycle Configurations using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_s3control_bucket_lifecycle_configuration.example arn:aws:s3-outposts:us-east-1:123456789012:outpost/op-12345678/bucket/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3control_bucket_policy.html.markdown b/website/docs/cdktf/python/r/s3control_bucket_policy.html.markdown new file mode 100644 index 00000000000..571b1f596f4 --- /dev/null +++ b/website/docs/cdktf/python/r/s3control_bucket_policy.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_bucket_policy" +description: |- + Manages an S3 Control Bucket Policy. +--- + + + +# Resource: aws_s3control_bucket_policy + +Provides a resource to manage an S3 Control Bucket Policy. + +-> This functionality is for managing [S3 on Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html). To manage S3 Bucket Policies in an AWS Partition, see the [`aws_s3_bucket_policy` resource](/docs/providers/aws/r/s3_bucket_policy.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_control_bucket_policy import S3ControlBucketPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3ControlBucketPolicy(self, "example", + bucket=Token.as_string(aws_s3_control_bucket_example.arn), + policy=Token.as_string( + Fn.jsonencode({ + "Id": "testBucketPolicy", + "Statement": [{ + "Action": "s3-outposts:PutBucketLifecycleConfiguration", + "Effect": "Deny", + "Principal": { + "AWS": "*" + }, + "Resource": aws_s3_control_bucket_example.arn, + "Sid": "statement1" + } + ], + "Version": "2012-10-17" + })) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Amazon Resource Name (ARN) of the bucket. +* `policy` - (Required) JSON string of the resource policy. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the bucket. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Control Bucket Policies using the Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 Control Bucket Policies using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_s3control_bucket_policy.example arn:aws:s3-outposts:us-east-1:123456789012:outpost/op-12345678/bucket/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3control_multi_region_access_point.html.markdown b/website/docs/cdktf/python/r/s3control_multi_region_access_point.html.markdown new file mode 100644 index 00000000000..9df34ffd416 --- /dev/null +++ b/website/docs/cdktf/python/r/s3control_multi_region_access_point.html.markdown @@ -0,0 +1,136 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_multi_region_access_point" +description: |- + Provides a resource to manage an S3 Multi-Region Access Point associated with specified buckets. +--- + + + +# Resource: aws_s3control_multi_region_access_point + +Provides a resource to manage an S3 Multi-Region Access Point associated with specified buckets. + +## Example Usage + +### Multiple AWS Buckets in Different Regions + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.provider import AwsProvider +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_control_multi_region_access_point import S3ControlMultiRegionAccessPoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + primary_region = AwsProvider(self, "aws", + alias="primary_region", + region="us-east-1" + ) + secondary_region = AwsProvider(self, "aws_1", + alias="secondary_region", + region="us-west-2" + ) + bar_bucket = S3Bucket(self, "bar_bucket", + bucket="example-bucket-bar", + provider=secondary_region + ) + foo_bucket = S3Bucket(self, "foo_bucket", + bucket="example-bucket-foo", + provider=primary_region + ) + S3ControlMultiRegionAccessPoint(self, "example", + details={ + "name": "example", + "region": [{ + "bucket": foo_bucket.id + }, { + "bucket": bar_bucket.id + } + ] + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Optional) The AWS account ID for the owner of the buckets for which you want to create a Multi-Region Access Point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `details` - (Required) A configuration block containing details about the Multi-Region Access Point. See [Details Configuration Block](#details-configuration) below for more details + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `delete` - (Default `15m`) + +### Details Configuration + +The `details` block supports the following: + +* `name` - (Required) The name of the Multi-Region Access Point. +* `public_access_block` - (Optional) Configuration block to manage the `PublicAccessBlock` configuration that you want to apply to this Multi-Region Access Point. You can enable the configuration options in any combination. See [Public Access Block Configuration](#public-access-block-configuration) below for more details. +* `region` - (Required) The Region configuration block to specify the bucket associated with the Multi-Region Access Point. See [Region Configuration](#region-configuration) below for more details. + +For more information, see the documentation on [Multi-Region Access Points](https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiRegionAccessPoints.html). + +### Public Access Block Configuration + +The `public_access_block` block supports the following: + +* `block_public_acls` - (Optional) Whether Amazon S3 should block public ACLs for buckets in this account. Defaults to `true`. Enabling this setting does not affect existing policies or ACLs. When set to `true` causes the following behavior: + * PUT Bucket acl and PUT Object acl calls fail if the specified ACL is public. + * PUT Object calls fail if the request includes a public ACL. + * PUT Bucket calls fail if the request includes a public ACL. +* `block_public_policy` - (Optional) Whether Amazon S3 should block public bucket policies for buckets in this account. Defaults to `true`. Enabling this setting does not affect existing bucket policies. When set to `true` causes Amazon S3 to: + * Reject calls to PUT Bucket policy if the specified bucket policy allows public access. +* `ignore_public_acls` - (Optional) Whether Amazon S3 should ignore public ACLs for buckets in this account. Defaults to `true`. Enabling this setting does not affect the persistence of any existing ACLs and doesn't prevent new public ACLs from being set. When set to `true` causes Amazon S3 to: + * Ignore all public ACLs on buckets in this account and any objects that they contain. +* `restrict_public_buckets` - (Optional) Whether Amazon S3 should restrict public bucket policies for buckets in this account. Defaults to `true`. Enabling this setting does not affect previously stored bucket policies, except that public and cross-account access within any public bucket policy, including non-public delegation to specific accounts, is blocked. When set to `true`: + * Only the bucket owner and AWS Services can access buckets with public policies. + +### Region Configuration + +The `region` block supports the following: + +* `bucket` - (Required) The name of the associated bucket for the Region. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `alias` - The alias for the Multi-Region Access Point. +* `arn` - Amazon Resource Name (ARN) of the Multi-Region Access Point. +* `domain_name` - The DNS domain name of the S3 Multi-Region Access Point in the format _`alias`_.accesspoint.s3-global.amazonaws.com. For more information, see the documentation on [Multi-Region Access Point Requests](https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiRegionAccessPointRequests.html). +* `id` - The AWS account ID and access point name separated by a colon (`:`). +* `status` - The current status of the Multi-Region Access Point. One of: `READY`, `INCONSISTENT_ACROSS_REGIONS`, `CREATING`, `PARTIALLY_CREATED`, `PARTIALLY_DELETED`, `DELETING`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Multi-Region Access Points using the `account_id` and `name` of the Multi-Region Access Point separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Multi-Region Access Points using the `account_id` and `name` of the Multi-Region Access Point separated by a colon (`:`). For example: + +```console +% terraform import aws_s3control_multi_region_access_point.example 123456789012:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3control_multi_region_access_point_policy.html.markdown b/website/docs/cdktf/python/r/s3control_multi_region_access_point_policy.html.markdown new file mode 100644 index 00000000000..e59711947ef --- /dev/null +++ b/website/docs/cdktf/python/r/s3control_multi_region_access_point_policy.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_multi_region_access_point_policy" +description: |- + Provides a resource to manage an S3 Multi-Region Access Point access control policy. +--- + + + +# Resource: aws_s3control_multi_region_access_point_policy + +Provides a resource to manage an S3 Multi-Region Access Point access control policy. + +## Example Usage + +### Basic Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_partition import DataAwsPartition +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_control_multi_region_access_point import S3ControlMultiRegionAccessPoint +from imports.aws.s3_control_multi_region_access_point_policy import S3ControlMultiRegionAccessPointPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo_bucket = S3Bucket(self, "foo_bucket", + bucket="example-bucket-foo" + ) + example = S3ControlMultiRegionAccessPoint(self, "example", + details={ + "name": "example", + "region": [{ + "bucket": foo_bucket.id + } + ] + } + ) + current = DataAwsCallerIdentity(self, "current") + data_aws_partition_current = DataAwsPartition(self, "current_3") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_partition_current.override_logical_id("current") + aws_s3_control_multi_region_access_point_policy_example = + S3ControlMultiRegionAccessPointPolicy(self, "example_4", + details={ + "name": Token.as_string(Fn.element(Fn.split(":", example.id), 1)), + "policy": Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["s3:GetObject", "s3:PutObject"], + "Effect": "Allow", + "Principal": { + "AWS": current.account_id + }, + "Resource": "arn:${" + data_aws_partition_current.partition + "}:s3::${" + current.account_id + "}:accesspoint/${" + example.alias + "}/object/*", + "Sid": "Example" + } + ], + "Version": "2012-10-17" + })) + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_control_multi_region_access_point_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Optional) The AWS account ID for the owner of the Multi-Region Access Point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `details` - (Required) A configuration block containing details about the policy for the Multi-Region Access Point. See [Details Configuration Block](#details-configuration) below for more details + +### Details Configuration + +The `details` block supports the following: + +* `name` - (Required) The name of the Multi-Region Access Point. +* `policy` - (Required) A valid JSON document that specifies the policy that you want to associate with this Multi-Region Access Point. Once applied, the policy can be edited, but not deleted. For more information, see the documentation on [Multi-Region Access Point Permissions](https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiRegionAccessPointPermissions.html). + +-> **NOTE:** When you update the `policy`, the update is first listed as the proposed policy. After the update is finished and all Regions have been updated, the proposed policy is listed as the established policy. If both policies have the same version number, the proposed policy is the established policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `established` - The last established policy for the Multi-Region Access Point. +* `id` - The AWS account ID and access point name separated by a colon (`:`). +* `proposed` - The proposed policy for the Multi-Region Access Point. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `15m`) +* `update` - (Default `15m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Multi-Region Access Point Policies using the `account_id` and `name` of the Multi-Region Access Point separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Multi-Region Access Point Policies using the `account_id` and `name` of the Multi-Region Access Point separated by a colon (`:`). For example: + +```console +% terraform import aws_s3control_multi_region_access_point_policy.example 123456789012:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3control_object_lambda_access_point.html.markdown b/website/docs/cdktf/python/r/s3control_object_lambda_access_point.html.markdown new file mode 100644 index 00000000000..edf309acd68 --- /dev/null +++ b/website/docs/cdktf/python/r/s3control_object_lambda_access_point.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_object_lambda_access_point" +description: |- + Provides a resource to manage an S3 Object Lambda Access Point. +--- + + + +# Resource: aws_s3control_object_lambda_access_point + +Provides a resource to manage an S3 Object Lambda Access Point. +An Object Lambda access point is associated with exactly one [standard access point](s3_access_point.html) and thus one Amazon S3 bucket. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_access_point import S3AccessPoint +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_control_object_lambda_access_point import S3ControlObjectLambdaAccessPoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_s3_access_point_example = S3AccessPoint(self, "example_1", + bucket=example.id, + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_access_point_example.override_logical_id("example") + aws_s3_control_object_lambda_access_point_example = + S3ControlObjectLambdaAccessPoint(self, "example_2", + configuration={ + "supporting_access_point": Token.as_string(aws_s3_access_point_example.arn), + "transformation_configuration": [{ + "actions": ["GetObject"], + "content_transformation": { + "aws_lambda": { + "function_arn": Token.as_string(aws_lambda_function_example.arn) + } + } + } + ] + }, + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_control_object_lambda_access_point_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Optional) The AWS account ID for the owner of the bucket for which you want to create an Object Lambda Access Point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `configuration` - (Required) A configuration block containing details about the Object Lambda Access Point. See [Configuration](#configuration) below for more details. +* `name` - (Required) The name for this Object Lambda Access Point. + +### Configuration + +The `configuration` block supports the following: + +* `allowed_features` - (Optional) Allowed features. Valid values: `GetObject-Range`, `GetObject-PartNumber`. +* `cloud_watch_metrics_enabled` - (Optional) Whether or not the CloudWatch metrics configuration is enabled. +* `supporting_access_point` - (Required) Standard access point associated with the Object Lambda Access Point. +* `transformation_configuration` - (Required) List of transformation configurations for the Object Lambda Access Point. See [Transformation Configuration](#transformation-configuration) below for more details. + +### Transformation Configuration + +The `transformation_configuration` block supports the following: + +* `actions` - (Required) The actions of an Object Lambda Access Point configuration. Valid values: `GetObject`. +* `content_transformation` - (Required) The content transformation of an Object Lambda Access Point configuration. See [Content Transformation](#content-transformation) below for more details. + +### Content Transformation + +The `content_transformation` block supports the following: + +* `aws_lambda` - (Required) Configuration for an AWS Lambda function. See [AWS Lambda](#aws-lambda) below for more details. + +### AWS Lambda + +The `aws_lambda` block supports the following: + +* `function_arn` - (Required) The Amazon Resource Name (ARN) of the AWS Lambda function. +* `function_payload` - (Optional) Additional JSON that provides supplemental data to the Lambda function used to transform objects. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Object Lambda Access Point. +* `id` - The AWS account ID and access point name separated by a colon (`:`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Object Lambda Access Points using the `account_id` and `name`, separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Object Lambda Access Points using the `account_id` and `name`, separated by a colon (`:`). For example: + +```console +% terraform import aws_s3control_object_lambda_access_point.example 123456789012:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3control_object_lambda_access_point_policy.html.markdown b/website/docs/cdktf/python/r/s3control_object_lambda_access_point_policy.html.markdown new file mode 100644 index 00000000000..7115ab70d01 --- /dev/null +++ b/website/docs/cdktf/python/r/s3control_object_lambda_access_point_policy.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_object_lambda_access_point_policy" +description: |- + Provides a resource to manage an S3 Object Lambda Access Point resource policy. +--- + + + +# Resource: aws_s3control_object_lambda_access_point_policy + +Provides a resource to manage an S3 Object Lambda Access Point resource policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_access_point import S3AccessPoint +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_control_object_lambda_access_point import S3ControlObjectLambdaAccessPoint +from imports.aws.s3_control_object_lambda_access_point_policy import S3ControlObjectLambdaAccessPointPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example" + ) + aws_s3_access_point_example = S3AccessPoint(self, "example_1", + bucket=example.id, + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_access_point_example.override_logical_id("example") + aws_s3_control_object_lambda_access_point_example = + S3ControlObjectLambdaAccessPoint(self, "example_2", + configuration={ + "supporting_access_point": Token.as_string(aws_s3_access_point_example.arn), + "transformation_configuration": [{ + "actions": ["GetObject"], + "content_transformation": { + "aws_lambda": { + "function_arn": Token.as_string(aws_lambda_function_example.arn) + } + } + } + ] + }, + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_control_object_lambda_access_point_example.override_logical_id("example") + aws_s3_control_object_lambda_access_point_policy_example = + S3ControlObjectLambdaAccessPointPolicy(self, "example_3", + name=Token.as_string(aws_s3_control_object_lambda_access_point_example.name), + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": "s3-object-lambda:GetObject", + "Effect": "Allow", + "Principal": { + "AWS": current.account_id + }, + "Resource": aws_s3_control_object_lambda_access_point_example.arn + } + ], + "Version": "2008-10-17" + })) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_control_object_lambda_access_point_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Optional) The AWS account ID for the account that owns the Object Lambda Access Point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `name` - (Required) The name of the Object Lambda Access Point. +* `policy` - (Required) The Object Lambda Access Point resource policy document. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `has_public_access_policy` - Indicates whether this access point currently has a policy that allows public access. +* `id` - The AWS account ID and access point name separated by a colon (`:`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Object Lambda Access Point policies using the `account_id` and `name`, separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Object Lambda Access Point policies using the `account_id` and `name`, separated by a colon (`:`). For example: + +```console +% terraform import aws_s3control_object_lambda_access_point_policy.example 123456789012:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3control_storage_lens_configuration.html.markdown b/website/docs/cdktf/python/r/s3control_storage_lens_configuration.html.markdown new file mode 100644 index 00000000000..8242a42fe42 --- /dev/null +++ b/website/docs/cdktf/python/r/s3control_storage_lens_configuration.html.markdown @@ -0,0 +1,235 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_storage_lens_configuration" +description: |- + Provides a resource to manage an S3 Storage Lens configuration. +--- + + + +# Resource: aws_s3control_storage_lens_configuration + +Provides a resource to manage an S3 Storage Lens configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.s3_control_storage_lens_configuration import S3ControlStorageLensConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + S3ControlStorageLensConfiguration(self, "example", + config_id="example-1", + storage_lens_configuration={ + "account_level": { + "activity_metrics": { + "enabled": True + }, + "bucket_level": { + "activity_metrics": { + "enabled": True + } + } + }, + "data_export": { + "cloud_watch_metrics": { + "enabled": True + }, + "s3_bucket_destination": { + "account_id": Token.as_string(current.account_id), + "arn": target.arn, + "encryption": { + "sse_s3": [{}] + }, + "format": "CSV", + "output_schema_version": "V_1" + } + }, + "enabled": True, + "exclude": { + "buckets": [b1.arn, b2.arn], + "regions": ["us-east-2"] + } + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Optional) The AWS account ID for the S3 Storage Lens configuration. Defaults to automatically determined account ID of the Terraform AWS provider. +* `config_id` - (Required) The ID of the S3 Storage Lens configuration. +* `storage_lens_configuration` - (Required) The S3 Storage Lens configuration. See [Storage Lens Configuration](#storage-lens-configuration) below for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Storage Lens Configuration + +The `storage_lens_configuration` block supports the following: + +* `account_level` (Required) The account-level configurations of the S3 Storage Lens configuration. See [Account Level](#account-level) below for more details. +* `aws_org` (Optional) The Amazon Web Services organization for the S3 Storage Lens configuration. See [AWS Org](#aws-org) below for more details. +* `data_export` (Optional) Properties of S3 Storage Lens metrics export including the destination, schema and format. See [Data Export](#data-export) below for more details. +* `enabled` (Required) Whether the S3 Storage Lens configuration is enabled. +* `exclude` (Optional) What is excluded in this configuration. Conflicts with `include`. See [Exclude](#exclude) below for more details. +* `include` (Optional) What is included in this configuration. Conflicts with `exclude`. See [Include](#include) below for more details. + +### Account Level + +The `account_level` block supports the following: + +* `activity_metrics` (Optional) S3 Storage Lens activity metrics. See [Activity Metrics](#activity-metrics) below for more details. +* `advanced_cost_optimization_metrics` (Optional) Advanced cost-optimization metrics for S3 Storage Lens. See [Advanced Cost-Optimization Metrics](#advanced-cost-optimization-metrics) below for more details. +* `advanced_data_protection_metrics` (Optional) Advanced data-protection metrics for S3 Storage Lens. See [Advanced Data-Protection Metrics](#advanced-data-protection-metrics) below for more details. +* `bucket_level` (Required) S3 Storage Lens bucket-level configuration. See [Bucket Level](#bucket-level) below for more details. +* `detailed_status_code_metrics` (Optional) Detailed status code metrics for S3 Storage Lens. See [Detailed Status Code Metrics](#detailed-status-code-metrics) below for more details. + +### Activity Metrics + +The `activity_metrics` block supports the following: + +* `enabled` (Optional) Whether the activity metrics are enabled. + +### Advanced Cost-Optimization Metrics + +The `advanced_cost_optimization_metrics` block supports the following: + +* `enabled` (Optional) Whether advanced cost-optimization metrics are enabled. + +### Advanced Data-Protection Metrics + +The `advanced_data_protection_metrics` block supports the following: + +* `enabled` (Optional) Whether advanced data-protection metrics are enabled. + +### Detailed Status Code Metrics + +The `detailed_status_code_metrics` block supports the following: + +* `enabled` (Optional) Whether detailed status code metrics are enabled. + +### Bucket Level + +The `bucket_level` block supports the following: + +* `activity_metrics` (Optional) S3 Storage Lens activity metrics. See [Activity Metrics](#activity-metrics) above for more details. +* `advanced_cost_optimization_metrics` (Optional) Advanced cost-optimization metrics for S3 Storage Lens. See [Advanced Cost-Optimization Metrics](#advanced-cost-optimization-metrics) above for more details. +* `advanced_data_protection_metrics` (Optional) Advanced data-protection metrics for S3 Storage Lens. See [Advanced Data-Protection Metrics](#advanced-data-protection-metrics) above for more details. +* `detailed_status_code_metrics` (Optional) Detailed status code metrics for S3 Storage Lens. See [Detailed Status Code Metrics](#detailed-status-code-metrics) above for more details. +* `prefix_level` (Optional) Prefix-level metrics for S3 Storage Lens. See [Prefix Level](#prefix-level) below for more details. + +### Prefix Level + +The `prefix_level` block supports the following: + +* `storage_metrics` (Required) Prefix-level storage metrics for S3 Storage Lens. See [Prefix Level Storage Metrics](#prefix-level-storage-metrics) below for more details. + +### Prefix Level Storage Metrics + +The `storage_metrics` block supports the following: + +* `enabled` (Optional) Whether prefix-level storage metrics are enabled. +* `selection_criteria` (Optional) Selection criteria. See [Selection Criteria](#selection-criteria) below for more details. + +### Selection Criteria + +The `selection_criteria` block supports the following: + +* `delimiter` (Optional) The delimiter of the selection criteria being used. +* `max_depth` (Optional) The max depth of the selection criteria. +* `min_storage_bytes_percentage` (Optional) The minimum number of storage bytes percentage whose metrics will be selected. + +### AWS Org + +The `aws_org` block supports the following: + +* `arn` (Required) The Amazon Resource Name (ARN) of the Amazon Web Services organization. + +### Data Export + +The `data_export` block supports the following: + +* `cloud_watch_metrics` (Optional) Amazon CloudWatch publishing for S3 Storage Lens metrics. See [Cloud Watch Metrics](#cloud-watch-metrics) below for more details. +* `s3_bucket_destination` (Optional) The bucket where the S3 Storage Lens metrics export will be located. See [S3 Bucket Destination](#s3-bucket-destination) below for more details. + +### Cloud Watch Metrics + +The `cloud_watch_metrics` block supports the following: + +* `enabled` (Required) Whether CloudWatch publishing for S3 Storage Lens metrics is enabled. + +### S3 Bucket Destination + +The `s3_bucket_destination` block supports the following: + +* `account_id` (Required) The account ID of the owner of the S3 Storage Lens metrics export bucket. +* `arn` (Required) The Amazon Resource Name (ARN) of the bucket. +* `encryption` (Optional) Encryption of the metrics exports in this bucket. See [Encryption](#encryption) below for more details. +* `format` (Required) The export format. Valid values: `CSV`, `Parquet`. +* `output_schema_version` (Required) The schema version of the export file. Valid values: `V_1`. +* `prefix` (Optional) The prefix of the destination bucket where the metrics export will be delivered. + +### Encryption + +The `encryption` block supports the following: + +* `sse_kms` (Optional) SSE-KMS encryption. See [SSE KMS](#sse-kms) below for more details. +* `sse_s3` (Optional) SSE-S3 encryption. An empty configuration block `{}` should be used. + +### SSE KMS + +The `sse_kms` block supports the following: + +* `key_id` (Required) KMS key ARN. + +### Exclude + +The `exclude` block supports the following: + +* `buckets` (Optional) List of S3 bucket ARNs. +* `regions` (Optional) List of AWS Regions. + +### Include + +The `include` block supports the following: + +* `buckets` (Optional) List of S3 bucket ARNs. +* `regions` (Optional) List of AWS Regions. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the S3 Storage Lens configuration. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Storage Lens configurations using the `account_id` and `config_id`, separated by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 Storage Lens configurations using the `account_id` and `config_id`, separated by a colon (`:`). For example: + +```console +% terraform import aws_s3control_storage_lens_configuration.example 123456789012:example-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/s3outposts_endpoint.html.markdown b/website/docs/cdktf/python/r/s3outposts_endpoint.html.markdown new file mode 100644 index 00000000000..79caef0a197 --- /dev/null +++ b/website/docs/cdktf/python/r/s3outposts_endpoint.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "S3 on Outposts" +layout: "aws" +page_title: "AWS: aws_s3outposts_endpoint" +description: |- + Manages an S3 Outposts Endpoint. +--- + + + +# Resource: aws_s3outposts_endpoint + +Provides a resource to manage an S3 Outposts Endpoint. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_outposts_endpoint import S3OutpostsEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + S3OutpostsEndpoint(self, "example", + outpost_id=Token.as_string(data_aws_outposts_outpost_example.id), + security_group_id=Token.as_string(aws_security_group_example.id), + subnet_id=Token.as_string(aws_subnet_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `outpost_id` - (Required) Identifier of the Outpost to contain this endpoint. +* `security_group_id` - (Required) Identifier of the EC2 Security Group. +* `subnet_id` - (Required) Identifier of the EC2 Subnet. +* `access_type` - (Optional) Type of access for the network connectivity. Valid values are `Private` or `CustomerOwnedIp`. +* `customer_owned_ipv4_pool` - (Optional) The ID of a Customer Owned IP Pool. For more on customer owned IP addresses see the [User Guide](https://docs.aws.amazon.com/outposts/latest/userguide/local-rack.html#local-gateway-subnet). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the endpoint. +* `cidr_block` - VPC CIDR block of the endpoint. +* `creation_time` - UTC creation time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `id` - Amazon Resource Name (ARN) of the endpoint. +* `network_interfaces` - Set of nested attributes for associated Elastic Network Interfaces (ENIs). + * `network_interface_id` - Identifier of the Elastic Network Interface (ENI). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Outposts Endpoints using Amazon Resource Name (ARN), EC2 Security Group identifier, and EC2 Subnet identifier, separated by commas (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import S3 Outposts Endpoints using Amazon Resource Name (ARN), EC2 Security Group identifier, and EC2 Subnet identifier, separated by commas (`,`). For example: + +```console +% terraform import aws_s3outposts_endpoint.example arn:aws:s3-outposts:us-east-1:123456789012:outpost/op-12345678/endpoint/0123456789abcdef,sg-12345678,subnet-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_app.html.markdown b/website/docs/cdktf/python/r/sagemaker_app.html.markdown new file mode 100644 index 00000000000..eb51a7b2bcf --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_app.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_app" +description: |- + Provides a SageMaker App resource. +--- + + + +# Resource: aws_sagemaker_app + +Provides a SageMaker App resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_app import SagemakerApp +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerApp(self, "example", + app_name="example", + app_type="JupyterServer", + domain_id=Token.as_string(aws_sagemaker_domain_example.id), + user_profile_name=Token.as_string(aws_sagemaker_user_profile_example.user_profile_name) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `app_name` - (Required) The name of the app. +* `app_type` - (Required) The type of app. Valid values are `JupyterServer`, `KernelGateway`, `RStudioServerPro`, `RSessionGateway` and `TensorBoard`. +* `domain_id` - (Required) The domain ID. +* `user_profile_name` - (Optional) The user profile name. At least one of `user_profile_name` or `space_name` required. +* `resource_spec` - (Optional) The instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance.See [Resource Spec](#resource-spec) below. +* `space_name` - (Optional) The name of the space. At least one of `user_profile_name` or `space_name` required. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Resource Spec + +* `instance_type` - (Optional) The instance type that the image version runs on. For valid values see [SageMaker Instance Types](https://docs.aws.amazon.com/sagemaker/latest/dg/notebooks-available-instance-types.html). +* `lifecycle_config_arn` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configuration attached to the Resource. +* `sagemaker_image_arn` - (Optional) The ARN of the SageMaker image that the image version belongs to. +* `sagemaker_image_version_arn` - (Optional) The ARN of the image version created on the instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the app. +* `arn` - The Amazon Resource Name (ARN) of the app. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Apps using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Apps using the `id`. For example: + +```console +% terraform import aws_sagemaker_app.example arn:aws:sagemaker:us-west-2:012345678912:app/domain-id/user-profile-name/app-type/app-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_app_image_config.html.markdown b/website/docs/cdktf/python/r/sagemaker_app_image_config.html.markdown new file mode 100644 index 00000000000..a025f892112 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_app_image_config.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_app_image_config" +description: |- + Provides a SageMaker App Image Config resource. +--- + + + +# Resource: aws_sagemaker_app_image_config + +Provides a SageMaker App Image Config resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_app_image_config import SagemakerAppImageConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerAppImageConfig(self, "test", + app_image_config_name="example", + kernel_gateway_image_config=SagemakerAppImageConfigKernelGatewayImageConfig( + kernel_spec=SagemakerAppImageConfigKernelGatewayImageConfigKernelSpec( + name="example" + ) + ) + ) +``` + +### Default File System Config + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_app_image_config import SagemakerAppImageConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerAppImageConfig(self, "test", + app_image_config_name="example", + kernel_gateway_image_config=SagemakerAppImageConfigKernelGatewayImageConfig( + file_system_config=SagemakerAppImageConfigKernelGatewayImageConfigFileSystemConfig(), + kernel_spec=SagemakerAppImageConfigKernelGatewayImageConfigKernelSpec( + name="example" + ) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `app_image_config_name` - (Required) The name of the App Image Config. +* `kernel_gateway_image_config` - (Optional) The configuration for the file system and kernels in a SageMaker image running as a KernelGateway app. See [Kernel Gateway Image Config](#kernel-gateway-image-config) details below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Kernel Gateway Image Config + +* `file_system_config` - (Optional) The URL where the Git repository is located. See [File System Config](#file-system-config) details below. +* `kernel_spec` - (Required) The default branch for the Git repository. See [Kernel Spec](#kernel-spec) details below. + +#### File System Config + +* `default_gid` - (Optional) The default POSIX group ID (GID). If not specified, defaults to `100`. Valid values are `0` and `100`. +* `default_uid` - (Optional) The default POSIX user ID (UID). If not specified, defaults to `1000`. Valid values are `0` and `1000`. +* `mount_path` - (Optional) The path within the image to mount the user's EFS home directory. The directory should be empty. If not specified, defaults to `/home/sagemaker-user`. + +~> **Note:** When specifying `default_gid` and `default_uid`, Valid value pairs are [`0`, `0`] and [`100`, `1000`]. + +#### Kernel Spec + +* `name` - (Required) The name of the kernel. +* `display_name` - (Optional) The display name of the kernel. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the App Image Config. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this App Image Config. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker App Image Configs using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker App Image Configs using the `name`. For example: + +```console +% terraform import aws_sagemaker_app_image_config.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_code_repository.html.markdown b/website/docs/cdktf/python/r/sagemaker_code_repository.html.markdown new file mode 100644 index 00000000000..f3e96f22b3a --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_code_repository.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_code_repository" +description: |- + Provides a SageMaker Code Repository resource. +--- + + + +# Resource: aws_sagemaker_code_repository + +Provides a SageMaker Code Repository resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_code_repository import SagemakerCodeRepository +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerCodeRepository(self, "example", + code_repository_name="example", + git_config=SagemakerCodeRepositoryGitConfig( + repository_url="https://github.com/hashicorp/terraform-provider-aws.git" + ) + ) +``` + +### Example with Secret + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_code_repository import SagemakerCodeRepository +from imports.aws.secretsmanager_secret import SecretsmanagerSecret +from imports.aws.secretsmanager_secret_version import SecretsmanagerSecretVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecretsmanagerSecret(self, "example", + name="example" + ) + aws_secretsmanager_secret_version_example = + SecretsmanagerSecretVersion(self, "example_1", + secret_id=example.id, + secret_string=Token.as_string( + Fn.jsonencode({ + "password": "example", + "username": "example" + })) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_secretsmanager_secret_version_example.override_logical_id("example") + aws_sagemaker_code_repository_example = SagemakerCodeRepository(self, "example_2", + code_repository_name="example", + depends_on=[aws_secretsmanager_secret_version_example], + git_config=SagemakerCodeRepositoryGitConfig( + repository_url="https://github.com/hashicorp/terraform-provider-aws.git", + secret_arn=example.arn + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sagemaker_code_repository_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `code_repository_name` - (Required) The name of the Code Repository (must be unique). +* `git_config` - (Required) Specifies details about the repository. see [Git Config](#git-config) details below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Git Config + +* `repository_url` - (Required) The URL where the Git repository is located. +* `branch` - (Optional) The default branch for the Git repository. +* `secret_arn` - (Optional) The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to access the git repository. The secret must have a staging label of AWSCURRENT and must be in the following format: `{"username": UserName, "password": Password}` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Code Repository. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Code Repository. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Code Repositories using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Code Repositories using the `name`. For example: + +```console +% terraform import aws_sagemaker_code_repository.test_code_repository my-code-repo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_data_quality_job_definition.html.markdown b/website/docs/cdktf/python/r/sagemaker_data_quality_job_definition.html.markdown new file mode 100644 index 00000000000..f19a70602ed --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_data_quality_job_definition.html.markdown @@ -0,0 +1,195 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_data_quality_job_definition" +description: |- + Provides a SageMaker Data Quality Job Definition resource. +--- + + + +# Resource: aws_sagemaker_data_quality_job_definition + +Provides a SageMaker data quality job definition resource. + +## Example Usage + +Basic usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_data_quality_job_definition import SagemakerDataQualityJobDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerDataQualityJobDefinition(self, "test", + data_quality_app_specification=SagemakerDataQualityJobDefinitionDataQualityAppSpecification( + image_uri=Token.as_string(monitor.registry_path) + ), + data_quality_job_input=SagemakerDataQualityJobDefinitionDataQualityJobInput( + endpoint_input=SagemakerDataQualityJobDefinitionDataQualityJobInputEndpointInput( + endpoint_name=my_endpoint.name + ) + ), + data_quality_job_output_config=SagemakerDataQualityJobDefinitionDataQualityJobOutputConfig( + monitoring_outputs=SagemakerDataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs( + s3_output=SagemakerDataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output( + s3_uri="https://${" + my_bucket.bucket_regional_domain_name + "}/output" + ) + ) + ), + job_resources=SagemakerDataQualityJobDefinitionJobResources( + cluster_config=SagemakerDataQualityJobDefinitionJobResourcesClusterConfig( + instance_count=1, + instance_type="ml.t3.medium", + volume_size_in_gb=20 + ) + ), + name="my-data-quality-job-definition", + role_arn=my_role.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `data_quality_app_specification` - (Required) Specifies the container that runs the monitoring job. Fields are documented below. +* `data_quality_baseline_config` - (Optional) Configures the constraints and baselines for the monitoring job. Fields are documented below. +* `data_quality_job_input` - (Required) A list of inputs for the monitoring job. Fields are documented below. +* `data_quality_job_output_config` - (Required) The output configuration for monitoring jobs. Fields are documented below. +* `job_resources` - (Required) Identifies the resources to deploy for a monitoring job. Fields are documented below. +* `name` - (Optional) The name of the data quality job definition. If omitted, Terraform will assign a random, unique name. +* `network_config` - (Optional) Specifies networking configuration for the monitoring job. Fields are documented below. +* `role_arn` - (Required) The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf. +* `stopping_condition` - (Optional) A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below. +* `tags` - (Optional) A mapping of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### data_quality_app_specification + +* `environment` - (Optional) Sets the environment variables in the container that the monitoring job runs. A list of key value pairs. +* `image_uri` - (Required) The container image that the data quality monitoring job runs. +* `post_analytics_processor_source_uri` - (Optional) An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers. +* `record_preprocessor_source_uri` - (Optional) An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers. + +### data_quality_baseline_config + +* `constraints_resource` - (Optional) The constraints resource for a monitoring job. Fields are documented below. +* `statistics_resource` - (Optional) The statistics resource for a monitoring job. Fields are documented below. + +#### constraints_resource + +* `s3_uri` - (Optional) The Amazon S3 URI for the constraints resource. + +#### statistics_resource + +* `s3_uri` - (Optional) The Amazon S3 URI for the statistics resource. + +### data_quality_job_input + +* `batch_transform_input` - (Optional) Input object for the batch transform job. Fields are documented below. +* `endpoint_input` - (Optional) Input object for the endpoint. Fields are documented below. + +#### batch_transform_input + +* `data_captured_destination_s3_uri` - (Required) The Amazon S3 location being used to capture the data. +* `dataset_format` - (Required) The dataset format for your batch transform job. Fields are documented below. +* `local_path` - (Optional) Path to the filesystem where the batch transform data is available to the container. Defaults to `/opt/ml/processing/input`. +* `s3_data_distribution_type` - (Optional) Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to `FullyReplicated`. Valid values are `FullyReplicated` or `ShardedByS3Key` +* `s3_input_mode` - (Optional) Whether the `Pipe` or `File` is used as the input mode for transferring data for the monitoring job. `Pipe` mode is recommended for large datasets. `File` mode is useful for small files that fit in memory. Defaults to `File`. Valid values are `Pipe` or `File` + +##### dataset_format + +* `csv` - (Optional) The CSV dataset used in the monitoring job. Fields are documented below. +* `json` - (Optional) The JSON dataset used in the monitoring job. Fields are documented below. + +###### csv + +* `header` - (Optional) Indicates if the CSV data has a header. + +###### json + +* `line` - (Optional) Indicates if the file should be read as a json object per line. + +#### endpoint_input + +* `endpoint_name` - (Required) An endpoint in customer's account which has `data_capture_config` enabled. +* `local_path` - (Optional) Path to the filesystem where the endpoint data is available to the container. Defaults to `/opt/ml/processing/input`. +* `s3_data_distribution_type` - (Optional) Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to `FullyReplicated`. Valid values are `FullyReplicated` or `ShardedByS3Key` +* `s3_input_mode` - (Optional) Whether the `Pipe` or `File` is used as the input mode for transferring data for the monitoring job. `Pipe` mode is recommended for large datasets. `File` mode is useful for small files that fit in memory. Defaults to `File`. Valid values are `Pipe` or `File` + +### data_quality_job_output_config + +* `kms_key_id` - (Optional) The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption. +* `monitoring_outputs` - (Required) Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below. + +#### monitoring_outputs + +* `s3_output` - (Required) The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below. + +##### s3_output + +* `local_path` - (Optional) The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to `/opt/ml/processing/output`. +* `s3_upload_mode` - (Optional) Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are `Continuous` or `EndOfJob` +* `s3_uri` - (Required) A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. + +### job_resources + +* `cluster_config` - (Required) The configuration for the cluster resources used to run the processing job. Fields are documented below. + +#### cluster_config + +* `instance_count` - (Required) The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1. +* `instance_type` - (Required) The ML compute instance type for the processing job. +* `volume_kms_key_id` - (Optional) The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job. +* `volume_size_in_gb` - (Required) The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario. + +### network_config + +* `enable_inter_container_traffic_encryption` - (Optional) Whether to encrypt all communications between the instances used for the monitoring jobs. Choose `true` to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer. +* `enable_network_isolation` - (Optional) Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job. +* `vpc_config` - (Optional) Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below. + +#### vpc_config + +* `security_group_ids` - (Required) The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the `subnets` field. +* `subnets` - (Required) The ID of the subnets in the VPC to which you want to connect your training job or model. + +### stopping_condition + +* `max_runtime_in_seconds` - (Required) The maximum runtime allowed in seconds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition. +* `name` - The name of the data quality job definition. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import data quality job definitions using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import data quality job definitions using the `name`. For example: + +```console +% terraform import aws_sagemaker_data_quality_job_definition.test_data_quality_job_definition data-quality-job-definition-foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_device.html.markdown b/website/docs/cdktf/python/r/sagemaker_device.html.markdown new file mode 100644 index 00000000000..a0c0b421f13 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_device.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_device" +description: |- + Provides a SageMaker Device resource. +--- + + + +# Resource: aws_sagemaker_device + +Provides a SageMaker Device resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_device import SagemakerDevice +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerDevice(self, "example", + device=SagemakerDeviceDevice( + device_name="example" + ), + device_fleet_name=Token.as_string(aws_sagemaker_device_fleet_example.device_fleet_name) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `device_fleet_name` - (Required) The name of the Device Fleet. +* `device` - (Required) The device to register with SageMaker Edge Manager. See [Device](#device) details below. + +### Device + +* `description` - (Required) A description for the device. +* `device_name` - (Optional) The name of the device. +* `iot_thing_name` - (Optional) Amazon Web Services Internet of Things (IoT) object name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id is constructed from `device-fleet-name/device-name`. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Device. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Devices using the `device-fleet-name/device-name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Devices using the `device-fleet-name/device-name`. For example: + +```console +% terraform import aws_sagemaker_device.example my-fleet/my-device +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_device_fleet.html.markdown b/website/docs/cdktf/python/r/sagemaker_device_fleet.html.markdown new file mode 100644 index 00000000000..75e73dbd342 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_device_fleet.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_device_fleet" +description: |- + Provides a SageMaker Device Fleet resource. +--- + + + +# Resource: aws_sagemaker_device_fleet + +Provides a SageMaker Device Fleet resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_device_fleet import SagemakerDeviceFleet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerDeviceFleet(self, "example", + device_fleet_name="example", + output_config=SagemakerDeviceFleetOutputConfig( + s3_output_location="s3://${" + aws_s3_bucket_example.bucket + "}/prefix/" + ), + role_arn=test.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `device_fleet_name` - (Required) The name of the Device Fleet (must be unique). +* `role_arn` - (Required) The Amazon Resource Name (ARN) that has access to AWS Internet of Things (IoT). +* `output_config` - (Required) Specifies details about the repository. see [Output Config](#output-config) details below. +* `description` - (Optional) A description of the fleet. +* `enable_iot_role_alias` - (Optional) Whether to create an AWS IoT Role Alias during device fleet creation. The name of the role alias generated will match this pattern: "SageMakerEdge-{DeviceFleetName}". +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Output Config + +* `s3_output_location` - (Required) The Amazon Simple Storage (S3) bucker URI. +* `kms_key_id` - (Optional) The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume after compilation job. If you don't provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Device Fleet. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Device Fleet. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Device Fleets using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Device Fleets using the `name`. For example: + +```console +% terraform import aws_sagemaker_device_fleet.example my-fleet +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_domain.html.markdown b/website/docs/cdktf/python/r/sagemaker_domain.html.markdown new file mode 100644 index 00000000000..eb598233bfd --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_domain.html.markdown @@ -0,0 +1,278 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_domain" +description: |- + Provides a SageMaker Domain resource. +--- + + + +# Resource: aws_sagemaker_domain + +Provides a SageMaker Domain resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.sagemaker_domain import SagemakerDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsIamPolicyDocument(self, "example", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["sagemaker.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + aws_iam_role_example = IamRole(self, "example_1", + assume_role_policy=Token.as_string(example.json), + name="example", + path="/" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_sagemaker_domain_example = SagemakerDomain(self, "example_2", + auth_mode="IAM", + default_user_settings=SagemakerDomainDefaultUserSettings( + execution_role=Token.as_string(aws_iam_role_example.arn) + ), + domain_name="example", + subnet_ids=[Token.as_string(aws_subnet_example.id)], + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sagemaker_domain_example.override_logical_id("example") +``` + +### Using Custom Images + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_app_image_config import SagemakerAppImageConfig +from imports.aws.sagemaker_domain import SagemakerDomain +from imports.aws.sagemaker_image import SagemakerImage +from imports.aws.sagemaker_image_version import SagemakerImageVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SagemakerAppImageConfig(self, "example", + app_image_config_name="example", + kernel_gateway_image_config=SagemakerAppImageConfigKernelGatewayImageConfig( + kernel_spec=SagemakerAppImageConfigKernelGatewayImageConfigKernelSpec( + name="example" + ) + ) + ) + aws_sagemaker_image_example = SagemakerImage(self, "example_1", + image_name="example", + role_arn=Token.as_string(aws_iam_role_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sagemaker_image_example.override_logical_id("example") + aws_sagemaker_image_version_example = SagemakerImageVersion(self, "example_2", + base_image="base-image", + image_name=Token.as_string(aws_sagemaker_image_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sagemaker_image_version_example.override_logical_id("example") + aws_sagemaker_domain_example = SagemakerDomain(self, "example_3", + auth_mode="IAM", + default_user_settings=SagemakerDomainDefaultUserSettings( + execution_role=Token.as_string(aws_iam_role_example.arn), + kernel_gateway_app_settings=SagemakerDomainDefaultUserSettingsKernelGatewayAppSettings( + custom_image=[SagemakerDomainDefaultUserSettingsKernelGatewayAppSettingsCustomImage( + app_image_config_name=example.app_image_config_name, + image_name=Token.as_string(aws_sagemaker_image_version_example.image_name) + ) + ] + ) + ), + domain_name="example", + subnet_ids=[Token.as_string(aws_subnet_example.id)], + vpc_id=Token.as_string(aws_vpc_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sagemaker_domain_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `auth_mode` - (Required) The mode of authentication that members use to access the domain. Valid values are `IAM` and `SSO`. +* `default_space_settings` - (Required) The default space settings. See [Default Space Settings](#default_space_settings) below. +* `default_user_settings` - (Required) The default user settings. See [Default User Settings](#default_user_settings) below.* `domain_name` - (Required) The domain name. +* `subnet_ids` - (Required) The VPC subnets that Studio uses for communication. +* `vpc_id` - (Required) The ID of the Amazon Virtual Private Cloud (VPC) that Studio uses for communication. + +The following arguments are optional: + +* `app_network_access_type` - (Optional) Specifies the VPC used for non-EFS traffic. The default value is `PublicInternetOnly`. Valid values are `PublicInternetOnly` and `VpcOnly`. +* `app_security_group_management` - (Optional) The entity that creates and manages the required security groups for inter-app communication in `VPCOnly` mode. Valid values are `Service` and `Customer`. +* `domain_settings` - (Optional) The domain settings. See [Domain Settings](#domain_settings) below. +* `domain_settings` - (Optional) The domain's settings. +* `kms_key_id` - (Optional) The AWS KMS customer managed CMK used to encrypt the EFS volume attached to the domain. +* `retention_policy` - (Optional) The retention policy for this domain, which specifies whether resources will be retained after the Domain is deleted. By default, all resources are retained. See [Retention Policy](#retention_policy) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### default_space_settings + +* `execution_role` - (Required) The execution role for the space. +* `jupyter_server_app_settings` - (Optional) The Jupyter server's app settings. See [Jupyter Server App Settings](#jupyter_server_app_settings) below. +* `kernel_gateway_app_settings` - (Optional) The kernel gateway app settings. See [Kernel Gateway App Settings](#kernel_gateway_app_settings) below. +* `security_groups` - (Optional) The security groups for the Amazon Virtual Private Cloud that the space uses for communication. + +### default_user_settings + +* `execution_role` - (Required) The execution role ARN for the user. +* `canvas_app_settings` - (Optional) The Canvas app settings. See [Canvas App Settings](#canvas_app_settings) below. +* `jupyter_server_app_settings` - (Optional) The Jupyter server's app settings. See [Jupyter Server App Settings](#jupyter_server_app_settings) below. +* `kernel_gateway_app_settings` - (Optional) The kernel gateway app settings. See [Kernel Gateway App Settings](#kernel_gateway_app_settings) below. +* `r_session_app_settings` - (Optional) The RSession app settings. See [RSession App Settings](#r_session_app_settings) below. +* `r_studio_server_pro_app_settings` - (Optional) A collection of settings that configure user interaction with the RStudioServerPro app. See [RStudioServerProAppSettings](#r_studio_server_pro_app_settings) below. +* `security_groups` - (Optional) A list of security group IDs that will be attached to the user. +* `sharing_settings` - (Optional) The sharing settings. See [Sharing Settings](#sharing_settings) below. +* `tensor_board_app_settings` - (Optional) The TensorBoard app settings. See [TensorBoard App Settings](#tensor_board_app_settings) below. + +#### r_studio_server_pro_app_settings + +* `access_status` - (Optional) Indicates whether the current user has access to the RStudioServerPro app. Valid values are `ENABLED` and `DISABLED`. +* `user_group` - (Optional) The level of permissions that the user has within the RStudioServerPro app. This value defaults to `R_STUDIO_USER`. The `R_STUDIO_ADMIN` value allows the user access to the RStudio Administrative Dashboard. Valid values are `R_STUDIO_USER` and `R_STUDIO_ADMIN`. + +#### canvas_app_settings + +* `model_register_settings` - (Optional) The model registry settings for the SageMaker Canvas application. See [Model Register Settings](#model_register_settings) below. +* `time_series_forecasting_settings` - (Optional) Time series forecast settings for the Canvas app. See [Time Series Forecasting Settings](#time_series_forecasting_settings) below. +* `workspace_settings` - (Optional) The workspace settings for the SageMaker Canvas application. See [Workspace Settings](#workspace_settings) below. + +##### model_register_settings + +* `cross_account_model_register_role_arn` - (Optional) The Amazon Resource Name (ARN) of the SageMaker model registry account. Required only to register model versions created by a different SageMaker Canvas AWS account than the AWS account in which SageMaker model registry is set up. +* `status` - (Optional) Describes whether the integration to the model registry is enabled or disabled in the Canvas application. Valid values are `ENABLED` and `DISABLED`. + +##### time_series_forecasting_settings + +* `amazon_forecast_role_arn` - (Optional) The IAM role that Canvas passes to Amazon Forecast for time series forecasting. By default, Canvas uses the execution role specified in the UserProfile that launches the Canvas app. If an execution role is not specified in the UserProfile, Canvas uses the execution role specified in the Domain that owns the UserProfile. To allow time series forecasting, this IAM role should have the [AmazonSageMakerCanvasForecastAccess](https://docs.aws.amazon.com/sagemaker/latest/dg/security-iam-awsmanpol-canvas.html#security-iam-awsmanpol-AmazonSageMakerCanvasForecastAccess) policy attached and forecast.amazonaws.com added in the trust relationship as a service principal. +* `status` - (Optional) Describes whether time series forecasting is enabled or disabled in the Canvas app. Valid values are `ENABLED` and `DISABLED`. + +##### workspace_settings + +* `s3_artifact_path` - (Optional) The Amazon S3 bucket used to store artifacts generated by Canvas. Updating the Amazon S3 location impacts existing configuration settings, and Canvas users no longer have access to their artifacts. Canvas users must log out and log back in to apply the new location. +* `s3_kms_key_id` - (Optional) The Amazon Web Services Key Management Service (KMS) encryption key ID that is used to encrypt artifacts generated by Canvas in the Amazon S3 bucket. + +#### sharing_settings + +* `notebook_output_option` - (Optional) Whether to include the notebook cell output when sharing the notebook. The default is `Disabled`. Valid values are `Allowed` and `Disabled`. +* `s3_kms_key_id` - (Optional) When `notebook_output_option` is Allowed, the AWS Key Management Service (KMS) encryption key ID used to encrypt the notebook cell output in the Amazon S3 bucket. +* `s3_output_path` - (Optional) When `notebook_output_option` is Allowed, the Amazon S3 bucket used to save the notebook cell output. + +#### tensor_board_app_settings + +* `default_resource_spec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default_resource_spec) below. + +#### kernel_gateway_app_settings + +* `custom_image` - (Optional) A list of custom SageMaker images that are configured to run as a KernelGateway app. see [Custom Image](#custom_image) below. +* `default_resource_spec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default_resource_spec) below. +* `lifecycle_config_arns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +#### jupyter_server_app_settings + +* `code_repository` - (Optional) A list of Git repositories that SageMaker automatically displays to users for cloning in the JupyterServer application. see [Code Repository](#code_repository) below. +* `default_resource_spec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default_resource_spec) below. +* `lifecycle_config_arns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +##### code_repository + +* `repository_url` - (Optional) The URL of the Git repository. + +##### default_resource_spec + +* `instance_type` - (Optional) The instance type that the image version runs on.. For valid values see [SageMaker Instance Types](https://docs.aws.amazon.com/sagemaker/latest/dg/notebooks-available-instance-types.html). +* `lifecycle_config_arn` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configuration attached to the Resource. +* `sagemaker_image_arn` - (Optional) The ARN of the SageMaker image that the image version belongs to. +* `sagemaker_image_version_arn` - (Optional) The ARN of the image version created on the instance. + +#### r_session_app_settings + +* `custom_image` - (Optional) A list of custom SageMaker images that are configured to run as a KernelGateway app. see [Custom Image](#custom_image) below. +* `default_resource_spec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default_resource_spec) below. + +##### custom_image + +* `app_image_config_name` - (Required) The name of the App Image Config. +* `image_name` - (Required) The name of the Custom Image. +* `image_version_number` - (Optional) The version number of the Custom Image. + +### domain_settings + +* `execution_role_identity_config` - (Optional) The configuration for attaching a SageMaker user profile name to the execution role as a sts:SourceIdentity key [AWS Docs](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_control-access_monitor.html). Valid values are `USER_PROFILE_NAME` and `DISABLED`. +* `r_studio_server_pro_domain_settings` - (Optional) A collection of settings that configure the RStudioServerPro Domain-level app. see [RStudioServerProDomainSettings](#r_studio_server_pro_domain_settings) below. +* `security_group_ids` - (Optional) The security groups for the Amazon Virtual Private Cloud that the Domain uses for communication between Domain-level apps and user apps. + +#### r_studio_server_pro_domain_settings + +* `default_resource_spec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default_resource_spec) below. +* `domain_execution_role_arn` - (Required) The ARN of the execution role for the RStudioServerPro Domain-level app. +* `r_studio_connect_url` - (Optional) A URL pointing to an RStudio Connect server. +* `r_studio_package_manager_url` - (Optional) A URL pointing to an RStudio Package Manager server. + +### retention_policy + +* `home_efs_file_system` - (Optional) The retention policy for data stored on an Amazon Elastic File System (EFS) volume. Valid values are `Retain` or `Delete`. Default value is `Retain`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Domain. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Domain. +* `url` - The domain's URL. +* `single_sign_on_managed_application_instance_id` - The SSO managed application instance ID. +* `security_group_id_for_domain_boundary` - The ID of the security group that authorizes traffic between the RSessionGateway apps and the RStudioServerPro app. +* `home_efs_file_system_id` - The ID of the Amazon Elastic File System (EFS) managed by this Domain. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Domains using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Domains using the `id`. For example: + +```console +% terraform import aws_sagemaker_domain.test_domain d-8jgsjtilstu8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_endpoint.html.markdown b/website/docs/cdktf/python/r/sagemaker_endpoint.html.markdown new file mode 100644 index 00000000000..0cf2c85674c --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_endpoint.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_endpoint" +description: |- + Provides a SageMaker Endpoint resource. +--- + + + +# Resource: aws_sagemaker_endpoint + +Provides a SageMaker Endpoint resource. + +## Example Usage + +Basic usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_endpoint import SagemakerEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerEndpoint(self, "e", + endpoint_config_name=ec.name, + name="my-endpoint", + tags={ + "Name": "foo" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `endpoint_config_name` - (Required) The name of the endpoint configuration to use. +* `deployment_config` - (Optional) The deployment configuration for an endpoint, which contains the desired deployment strategy and rollback configurations. See [Deployment Config](#deployment-config). +* `name` - (Optional) The name of the endpoint. If omitted, Terraform will assign a random, unique name. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Deployment Config + +* `blue_green_update_policy` - (Required) Update policy for a blue/green deployment. If this update policy is specified, SageMaker creates a new fleet during the deployment while maintaining the old fleet. See [Blue Green Update Config](#blue-green-update-config). +* `auto_rollback_configuration` - (Optional) Automatic rollback configuration for handling endpoint deployment failures and recovery. See [Auto Rollback Configuration](#auto-rollback-configuration). + +#### Blue Green Update Config + +* `traffic_routing_configuration` - (Required) Defines the traffic routing strategy to shift traffic from the old fleet to the new fleet during an endpoint deployment. See [Traffic Routing Configuration](#traffic-routing-configuration). +* `maximum_execution_timeout_in_seconds` - (Optional) Maximum execution timeout for the deployment. Note that the timeout value should be larger than the total waiting time specified in `termination_wait_in_seconds` and `wait_interval_in_seconds`. Valid values are between `600` and `14400`. +* `termination_wait_in_seconds` - (Optional) Additional waiting time in seconds after the completion of an endpoint deployment before terminating the old endpoint fleet. Default is `0`. Valid values are between `0` and `3600`. + +##### Traffic Routing Configuration + +* `type` - (Required) Traffic routing strategy type. Valid values are: `ALL_AT_ONCE`, `CANARY`, and `LINEAR`. +* `wait_interval_in_seconds` - (Required) The waiting time (in seconds) between incremental steps to turn on traffic on the new endpoint fleet. Valid values are between `0` and `3600`. +* `canary_size` - (Optional) Batch size for the first step to turn on traffic on the new endpoint fleet. Value must be less than or equal to 50% of the variant's total instance count. See [Canary Size](#canary-size). +* `linear_step_size` - (Optional) Batch size for each step to turn on traffic on the new endpoint fleet. Value must be 10-50% of the variant's total instance count. See [Linear Step Size](#linear-step-size). + +###### Canary Size + +* `type` - (Required) Specifies the endpoint capacity type. Valid values are: `INSTANCE_COUNT`, or `CAPACITY_PERCENT`. +* `value` - (Required) Defines the capacity size, either as a number of instances or a capacity percentage. + +###### Linear Step Size + +* `type` - (Required) Specifies the endpoint capacity type. Valid values are: `INSTANCE_COUNT`, or `CAPACITY_PERCENT`. +* `value` - (Required) Defines the capacity size, either as a number of instances or a capacity percentage. + +#### Auto Rollback Configuration + +* `alarms` - (Required) List of CloudWatch alarms in your account that are configured to monitor metrics on an endpoint. If any alarms are tripped during a deployment, SageMaker rolls back the deployment. See [Alarms](#alarms). + +##### Alarms + +* `alarm_name` - (Required) The name of a CloudWatch alarm in your account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this endpoint. +* `name` - The name of the endpoint. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import endpoints using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import endpoints using the `name`. For example: + +```console +% terraform import aws_sagemaker_endpoint.test_endpoint my-endpoint +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_endpoint_configuration.html.markdown b/website/docs/cdktf/python/r/sagemaker_endpoint_configuration.html.markdown new file mode 100644 index 00000000000..bc307c39fc6 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_endpoint_configuration.html.markdown @@ -0,0 +1,152 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_endpoint_configuration" +description: |- + Provides a SageMaker Endpoint Configuration resource. +--- + + + +# Resource: aws_sagemaker_endpoint_configuration + +Provides a SageMaker endpoint configuration resource. + +## Example Usage + +Basic usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_endpoint_configuration import SagemakerEndpointConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerEndpointConfiguration(self, "ec", + name="my-endpoint-config", + production_variants=[SagemakerEndpointConfigurationProductionVariants( + initial_instance_count=1, + instance_type="ml.t2.medium", + model_name=m.name, + variant_name="variant-1" + ) + ], + tags={ + "Name": "foo" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `production_variants` - (Required) An list of ProductionVariant objects, one for each model that you want to host at this endpoint. Fields are documented below. +* `kms_key_arn` - (Optional) Amazon Resource Name (ARN) of a AWS Key Management Service key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance that hosts the endpoint. +* `name` - (Optional) The name of the endpoint configuration. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional) Creates a unique endpoint configuration name beginning with the specified prefix. Conflicts with `name`. +* `tags` - (Optional) A mapping of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `data_capture_config` - (Optional) Specifies the parameters to capture input/output of SageMaker models endpoints. Fields are documented below. +* `async_inference_config` - (Optional) Specifies configuration for how an endpoint performs asynchronous inference. +* `shadow_production_variants` - (Optional) Array of ProductionVariant objects. There is one for each model that you want to host at this endpoint in shadow mode with production traffic replicated from the model specified on ProductionVariants.If you use this field, you can only specify one variant for ProductionVariants and one variant for ShadowProductionVariants. Fields are documented below. + +### production_variants + +* `accelerator_type` - (Optional) The size of the Elastic Inference (EI) instance to use for the production variant. +* `container_startup_health_check_timeout_in_seconds` - (Optional) The timeout value, in seconds, for your inference container to pass health check by SageMaker Hosting. For more information about health check, see [How Your Container Should Respond to Health Check (Ping) Requests](https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms-inference-code.html#your-algorithms-inference-algo-ping-requests). Valid values between `60` and `3600`. +* `core_dump_config` - (Optional) Specifies configuration for a core dump from the model container when the process crashes. Fields are documented below. +* `enable_ssm_access` - (Optional) You can use this parameter to turn on native Amazon Web Services Systems Manager (SSM) access for a production variant behind an endpoint. By default, SSM access is disabled for all production variants behind an endpoints. +* `initial_instance_count` - (Optional) Initial number of instances used for auto-scaling. +* `instance_type` - (Optional) The type of instance to start. +* `initial_variant_weight` - (Optional) Determines initial traffic distribution among all of the models that you specify in the endpoint configuration. If unspecified, it defaults to `1.0`. +* `model_data_download_timeout_in_seconds` - (Optional) The timeout value, in seconds, to download and extract the model that you want to host from Amazon S3 to the individual inference instance associated with this production variant. Valid values between `60` and `3600`. +* `model_name` - (Required) The name of the model to use. +* `serverless_config` - (Optional) Specifies configuration for how an endpoint performs asynchronous inference. +* `variant_name` - (Optional) The name of the variant. If omitted, Terraform will assign a random, unique name. +* `volume_size_in_gb` - (Optional) The size, in GB, of the ML storage volume attached to individual inference instance associated with the production variant. Valid values between `1` and `512`. + +#### core_dump_config + +* `destination_s3_uri` - (Required) The Amazon S3 bucket to send the core dump to. +* `kms_key_id` - (Required) The Amazon Web Services Key Management Service (Amazon Web Services KMS) key that SageMaker uses to encrypt the core dump data at rest using Amazon S3 server-side encryption. + +#### serverless_config + +* `max_concurrency` - (Required) The maximum number of concurrent invocations your serverless endpoint can process. Valid values are between `1` and `200`. +* `memory_size_in_mb` - (Required) The memory size of your serverless endpoint. Valid values are in 1 GB increments: `1024` MB, `2048` MB, `3072` MB, `4096` MB, `5120` MB, or `6144` MB. +* `provisioned_concurrency` - The amount of provisioned concurrency to allocate for the serverless endpoint. Should be less than or equal to `max_concurrency`. Valid values are between `1` and `200`. + +### data_capture_config + +* `initial_sampling_percentage` - (Required) Portion of data to capture. Should be between 0 and 100. +* `destination_s3_uri` - (Required) The URL for S3 location where the captured data is stored. +* `capture_options` - (Required) Specifies what data to capture. Fields are documented below. +* `kms_key_id` - (Optional) Amazon Resource Name (ARN) of a AWS Key Management Service key that Amazon SageMaker uses to encrypt the captured data on Amazon S3. +* `enable_capture` - (Optional) Flag to enable data capture. Defaults to `false`. +* `capture_content_type_header` - (Optional) The content type headers to capture. Fields are documented below. + +#### capture_options + +* `capture_mode` - (Required) Specifies the data to be captured. Should be one of `Input` or `Output`. + +#### capture_content_type_header + +* `csv_content_types` - (Optional) The CSV content type headers to capture. +* `json_content_types` - (Optional) The JSON content type headers to capture. + +### async_inference_config + +* `output_config` - (Required) Specifies the configuration for asynchronous inference invocation outputs. +* `client_config` - (Optional) Configures the behavior of the client used by Amazon SageMaker to interact with the model container during asynchronous inference. + +#### client_config + +* `max_concurrent_invocations_per_instance` - (Optional) The maximum number of concurrent requests sent by the SageMaker client to the model container. If no value is provided, Amazon SageMaker will choose an optimal value for you. + +#### output_config + +* `s3_output_path` - (Required) The Amazon S3 location to upload inference responses to. +* `s3_failure_path` - (Optional) The Amazon S3 location to upload failure inference responses to. +* `kms_key_id` - (Optional) The Amazon Web Services Key Management Service (Amazon Web Services KMS) key that Amazon SageMaker uses to encrypt the asynchronous inference output in Amazon S3. +* `notification_config` - (Optional) Specifies the configuration for notifications of inference results for asynchronous inference. + +##### notification_config + +* `include_inference_response_in` - (Optional) The Amazon SNS topics where you want the inference response to be included. Valid values are `SUCCESS_NOTIFICATION_TOPIC` and `ERROR_NOTIFICATION_TOPIC`. +* `error_topic` - (Optional) Amazon SNS topic to post a notification to when inference fails. If no topic is provided, no notification is sent on failure. +* `success_topic` - (Optional) Amazon SNS topic to post a notification to when inference completes successfully. If no topic is provided, no notification is sent on success. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this endpoint configuration. +* `name` - The name of the endpoint configuration. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import endpoint configurations using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import endpoint configurations using the `name`. For example: + +```console +% terraform import aws_sagemaker_endpoint_configuration.test_endpoint_config endpoint-config-foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_feature_group.html.markdown b/website/docs/cdktf/python/r/sagemaker_feature_group.html.markdown new file mode 100644 index 00000000000..9a0630907f1 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_feature_group.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_feature_group" +description: |- + Provides a SageMaker Feature Group resource. +--- + + + +# Resource: aws_sagemaker_feature_group + +Provides a SageMaker Feature Group resource. + +## Example Usage + +Basic usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_feature_group import SagemakerFeatureGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerFeatureGroup(self, "example", + event_time_feature_name="example", + feature_definition=[SagemakerFeatureGroupFeatureDefinition( + feature_name="example", + feature_type="String" + ) + ], + feature_group_name="example", + online_store_config=SagemakerFeatureGroupOnlineStoreConfig( + enable_online_store=True + ), + record_identifier_feature_name="example", + role_arn=test.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `feature_group_name` - (Required) The name of the Feature Group. The name must be unique within an AWS Region in an AWS account. +* `record_identifier_feature_name` - (Required) The name of the Feature whose value uniquely identifies a Record defined in the Feature Store. Only the latest record per identifier value will be stored in the Online Store. +* `event_time_feature_name` - (Required) The name of the feature that stores the EventTime of a Record in a Feature Group. +* `description` (Optional) - A free-form description of a Feature Group. +* `role_arn` (Required) - The Amazon Resource Name (ARN) of the IAM execution role used to persist data into the Offline Store if an `offline_store_config` is provided. +* `feature_definition` (Optional) - A list of Feature names and types. See [Feature Definition](#feature-definition) Below. +* `offline_store_config` (Optional) - The Offline Feature Store Configuration. See [Offline Store Config](#offline-store-config) Below. +* `online_store_config` (Optional) - The Online Feature Store Configuration. See [Online Store Config](#online-store-config) Below. +* `tags` - (Optional) Map of resource tags for the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Feature Definition + +* `feature_name` - (Required) The name of a feature. `feature_name` cannot be any of the following: `is_deleted`, `write_time`, `api_invocation_time`. +* `feature_type` - (Required) The value type of a feature. Valid values are `Integral`, `Fractional`, or `String`. + +### Offline Store Config + +* `enable_online_store` - (Optional) Set to `true` to disable the automatic creation of an AWS Glue table when configuring an OfflineStore. +* `s3_storage_config` - (Required) The Amazon Simple Storage (Amazon S3) location of OfflineStore. See [S3 Storage Config](#s3-storage-config) Below. +* `data_catalog_config` - (Optional) The meta data of the Glue table that is autogenerated when an OfflineStore is created. See [Data Catalog Config](#data-catalog-config) Below. +* `table_format` - (Optional) Format for the offline store table. Supported formats are `Glue` (Default) and Apache `Iceberg` (https://iceberg.apache.org/). + +### Online Store Config + +* `disable_glue_table_creation` - (Optional) Set to `true` to turn Online Store On. +* `security_config` - (Required) Security config for at-rest encryption of your OnlineStore. See [Security Config](#security-config) Below. + +#### S3 Storage Config + +* `kms_key_id` - (Optional) The AWS Key Management Service (KMS) key ID of the key used to encrypt any objects written into the OfflineStore S3 location. +* `s3_uri` - (Required) The S3 URI, or location in Amazon S3, of OfflineStore. + +#### Data Catalog Config + +* `catalog` - (Optional) The name of the Glue table catalog. +* `database` - (Optional) The name of the Glue table database. +* `table_name` - (Optional) The name of the Glue table. + +#### Security Config + +* `kms_key_id` - (Optional) The ID of the AWS Key Management Service (AWS KMS) key that SageMaker Feature Store uses to encrypt the Amazon S3 objects at rest using Amazon S3 server-side encryption. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `name` - The name of the Feature Group. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this feature_group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Feature Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Feature Groups using the `name`. For example: + +```console +% terraform import aws_sagemaker_feature_group.test_feature_group feature_group-foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_flow_definition.html.markdown b/website/docs/cdktf/python/r/sagemaker_flow_definition.html.markdown new file mode 100644 index 00000000000..48b4eb64970 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_flow_definition.html.markdown @@ -0,0 +1,202 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_flow_definition" +description: |- + Provides a SageMaker Flow Definition resource. +--- + + + +# Resource: aws_sagemaker_flow_definition + +Provides a SageMaker Flow Definition resource. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_flow_definition import SagemakerFlowDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerFlowDefinition(self, "example", + flow_definition_name="example", + human_loop_config=SagemakerFlowDefinitionHumanLoopConfig( + human_task_ui_arn=Token.as_string(aws_sagemaker_human_task_ui_example.arn), + task_availability_lifetime_in_seconds=1, + task_count=1, + task_description="example", + task_title="example", + workteam_arn=Token.as_string(aws_sagemaker_workteam_example.arn) + ), + output_config=SagemakerFlowDefinitionOutputConfig( + s3_output_path="s3://${" + aws_s3_bucket_example.bucket + "}/" + ), + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### Public Workteam Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_flow_definition import SagemakerFlowDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerFlowDefinition(self, "example", + flow_definition_name="example", + human_loop_config=SagemakerFlowDefinitionHumanLoopConfig( + human_task_ui_arn=Token.as_string(aws_sagemaker_human_task_ui_example.arn), + public_workforce_task_price=SagemakerFlowDefinitionHumanLoopConfigPublicWorkforceTaskPrice( + amount_in_usd=SagemakerFlowDefinitionHumanLoopConfigPublicWorkforceTaskPriceAmountInUsd( + cents=1, + tenth_fractions_of_aCent=2 + ) + ), + task_availability_lifetime_in_seconds=1, + task_count=1, + task_description="example", + task_title="example", + workteam_arn="arn:aws:sagemaker:${" + current.name + "}:394669845002:workteam/public-crowd/default" + ), + output_config=SagemakerFlowDefinitionOutputConfig( + s3_output_path="s3://${" + aws_s3_bucket_example.bucket + "}/" + ), + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### Human Loop Activation Config Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_flow_definition import SagemakerFlowDefinition +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerFlowDefinition(self, "example", + flow_definition_name="example", + human_loop_activation_config=SagemakerFlowDefinitionHumanLoopActivationConfig( + human_loop_activation_conditions_config=SagemakerFlowDefinitionHumanLoopActivationConfigHumanLoopActivationConditionsConfig( + human_loop_activation_conditions=" {\n\t\t\t\"Conditions\": [\n\t\t\t {\n\t\t\t\t\"ConditionType\": \"Sampling\",\n\t\t\t\t\"ConditionParameters\": {\n\t\t\t\t \"RandomSamplingPercentage\": 5\n\t\t\t\t}\n\t\t\t }\n\t\t\t]\n\t\t}\n\n" + ) + ), + human_loop_config=SagemakerFlowDefinitionHumanLoopConfig( + human_task_ui_arn=Token.as_string(aws_sagemaker_human_task_ui_example.arn), + task_availability_lifetime_in_seconds=1, + task_count=1, + task_description="example", + task_title="example", + workteam_arn=Token.as_string(aws_sagemaker_workteam_example.arn) + ), + human_loop_request_source=SagemakerFlowDefinitionHumanLoopRequestSource( + aws_managed_human_loop_request_source="AWS/Textract/AnalyzeDocument/Forms/V1" + ), + output_config=SagemakerFlowDefinitionOutputConfig( + s3_output_path="s3://${" + aws_s3_bucket_example.bucket + "}/" + ), + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `flow_definition_name` - (Required) The name of your flow definition. +* `human_loop_config` - (Required) An object containing information about the tasks the human reviewers will perform. See [Human Loop Config](#human-loop-config) details below. +* `role_arn` - (Required) The Amazon Resource Name (ARN) of the role needed to call other services on your behalf. +* `output_config` - (Required) An object containing information about where the human review results will be uploaded. See [Output Config](#output-config) details below. +* `human_loop_activation_config` - (Optional) An object containing information about the events that trigger a human workflow. See [Human Loop Activation Config](#human-loop-activation-config) details below. +* `human_loop_request_source` - (Optional) Container for configuring the source of human task requests. Use to specify if Amazon Rekognition or Amazon Textract is used as an integration source. See [Human Loop Request Source](#human-loop-request-source) details below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Human Loop Config + +* `human_task_ui_arn` - (Required) The Amazon Resource Name (ARN) of the human task user interface. +* `public_workforce_task_price` - (Optional) Defines the amount of money paid to an Amazon Mechanical Turk worker for each task performed. See [Public Workforce Task Price](#public-workforce-task-price) details below. +* `task_availability_lifetime_in_seconds` - (Required) The length of time that a task remains available for review by human workers. Valid value range between `1` and `864000`. +* `task_count` - (Required) The number of distinct workers who will perform the same task on each object. Valid value range between `1` and `3`. +* `task_description` - (Required) A description for the human worker task. +* `task_keywords` - (Optional) An array of keywords used to describe the task so that workers can discover the task. +* `task_time_limit_in_seconds` - (Optional) The amount of time that a worker has to complete a task. The default value is `3600` seconds. +* `task_title` - (Required) A title for the human worker task. +* `workteam_arn` - (Required) The Amazon Resource Name (ARN) of the human task user interface. Amazon Resource Name (ARN) of a team of workers. For Public workforces see [AWS Docs](https://docs.aws.amazon.com/sagemaker/latest/dg/sms-workforce-management-public.html). + +#### Public Workforce Task Price + +* `amount_in_usd` - (Optional) Defines the amount of money paid to an Amazon Mechanical Turk worker in United States dollars. See [Amount In Usd](#amount-in-usd) details below. + +##### Amount In Usd + +* `cents` - (Optional) The fractional portion, in cents, of the amount. Valid value range between `0` and `99`. +* `dollars` - (Optional) The whole number of dollars in the amount. Valid value range between `0` and `2`. +* `tenth_fractions_of_a_cent` - (Optional) Fractions of a cent, in tenths. Valid value range between `0` and `9`. + +### Human Loop Activation Config + +* `human_loop_activation_conditions_config` - (Required) defines under what conditions SageMaker creates a human loop. See [Human Loop Activation Conditions Config](#human-loop-activation-conditions-config) details below. + +#### Human Loop Activation Conditions Config + +* `human_loop_activation_conditions` - (Required) A JSON expressing use-case specific conditions declaratively. If any condition is matched, atomic tasks are created against the configured work team. For more information about how to structure the JSON, see [JSON Schema for Human Loop Activation Conditions in Amazon Augmented AI](https://docs.aws.amazon.com/sagemaker/latest/dg/a2i-human-fallback-conditions-json-schema.html). + +### Human Loop Request Source + +* `aws_managed_human_loop_request_source` - (Required) Specifies whether Amazon Rekognition or Amazon Textract are used as the integration source. Valid values are: `AWS/Rekognition/DetectModerationLabels/Image/V3` and `AWS/Textract/AnalyzeDocument/Forms/V1`. + +### Output Config + +* `s3_output_path` - (Required) The Amazon S3 path where the object containing human output will be made available. +* `kms_key_id` - (Optional) The Amazon Key Management Service (KMS) key ARN for server-side encryption. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Flow Definition. +* `id` - The name of the Flow Definition. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Flow Definitions using the `flow_definition_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Flow Definitions using the `flow_definition_name`. For example: + +```console +% terraform import aws_sagemaker_flow_definition.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_human_task_ui.html.markdown b/website/docs/cdktf/python/r/sagemaker_human_task_ui.html.markdown new file mode 100644 index 00000000000..f10e4087cfa --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_human_task_ui.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_human_task_ui" +description: |- + Provides a SageMaker Human Task UI resource. +--- + + + +# Resource: aws_sagemaker_human_task_ui + +Provides a SageMaker Human Task UI resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_human_task_ui import SagemakerHumanTaskUi +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerHumanTaskUi(self, "example", + human_task_ui_name="example", + ui_template=SagemakerHumanTaskUiUiTemplate( + content=Token.as_string( + Fn.file("sagemaker-human-task-ui-template.html")) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `human_task_ui_name` - (Required) The name of the Human Task UI. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `ui_template` - (Required) The Liquid template for the worker user interface. See [UI Template](#ui-template) below. + +### UI Template + +* `content` - (Required) The content of the Liquid template for the worker user interface. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Human Task UI. +* `id` - The name of the Human Task UI. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `ui_template` - (Required) The Liquid template for the worker user interface. See [UI Template](#ui-template) below. + +### UI Template + +* `content_sha256` - The SHA-256 digest of the contents of the template. +* `url` - The URL for the user interface template. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Human Task UIs using the `human_task_ui_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Human Task UIs using the `human_task_ui_name`. For example: + +```console +% terraform import aws_sagemaker_human_task_ui.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_image.html.markdown b/website/docs/cdktf/python/r/sagemaker_image.html.markdown new file mode 100644 index 00000000000..87bc8e8e1d3 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_image.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_image" +description: |- + Provides a SageMaker Image resource. +--- + + + +# Resource: aws_sagemaker_image + +Provides a SageMaker Image resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_image import SagemakerImage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerImage(self, "example", + image_name="example", + role_arn=test.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `image_name` - (Required) The name of the image. Must be unique to your account. +* `role_arn` - (Required) The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on your behalf. +* `display_name` - (Optional) The display name of the image. When the image is added to a domain (must be unique to the domain). +* `description` - (Optional) The description of the image. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Image. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Image. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Code Images using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Code Images using the `name`. For example: + +```console +% terraform import aws_sagemaker_image.test_image my-code-repo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_image_version.html.markdown b/website/docs/cdktf/python/r/sagemaker_image_version.html.markdown new file mode 100644 index 00000000000..9c0d4b51f30 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_image_version.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_image_version" +description: |- + Provides a SageMaker Image Version resource. +--- + + + +# Resource: aws_sagemaker_image_version + +Provides a SageMaker Image Version resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_image_version import SagemakerImageVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerImageVersion(self, "test", + base_image="012345678912.dkr.ecr.us-west-2.amazonaws.com/image:latest", + image_name=Token.as_string(aws_sagemaker_image_test.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `image_name` - (Required) The name of the image. Must be unique to your account. +* `base_image` - (Required) The registry path of the container image on which this image version is based. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Image. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Image Version. +* `image_arn`- The Amazon Resource Name (ARN) of the image the version is based on. +* `container_image` - The registry path of the container image that contains this image version. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Image Versions using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Image Versions using the `name`. For example: + +```console +% terraform import aws_sagemaker_image_version.test_image my-code-repo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_model.html.markdown b/website/docs/cdktf/python/r/sagemaker_model.html.markdown new file mode 100644 index 00000000000..3ee9430d922 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_model.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_model" +description: |- + Provides a SageMaker model resource. +--- + + + +# Resource: aws_sagemaker_model + +Provides a SageMaker model resource. + +## Example Usage + +Basic usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_sagemaker_prebuilt_ecr_image import DataAwsSagemakerPrebuiltEcrImage +from imports.aws.iam_role import IamRole +from imports.aws.sagemaker_model import SagemakerModel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["sagemaker.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + test = DataAwsSagemakerPrebuiltEcrImage(self, "test", + repository_name="kmeans" + ) + example = IamRole(self, "example", + assume_role_policy=Token.as_string(assume_role.json) + ) + aws_sagemaker_model_example = SagemakerModel(self, "example_3", + execution_role_arn=example.arn, + name="my-model", + primary_container=SagemakerModelPrimaryContainer( + image=Token.as_string(test.registry_path) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sagemaker_model_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the model (must be unique). If omitted, Terraform will assign a random, unique name. +* `primary_container` - (Optional) The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the `container` argument is required. Fields are documented below. +* `execution_role_arn` - (Required) A role that SageMaker can assume to access model artifacts and docker images for deployment. +* `inference_execution_config` - (Optional) Specifies details of how containers in a multi-container endpoint are called. see [Inference Execution Config](#inference-execution-config). +* `container` (Optional) - Specifies containers in the inference pipeline. If not specified, the `primary_container` argument is required. Fields are documented below. +* `enable_network_isolation` (Optional) - Isolates the model container. No inbound or outbound network calls can be made to or from the model container. +* `vpc_config` (Optional) - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `primary_container` and `container` block both support: + +* `image` - (Optional) The registry path where the inference code image is stored in Amazon ECR. +* `mode` - (Optional) The container hosts value `SingleModel/MultiModel`. The default value is `SingleModel`. +* `model_data_url` - (Optional) The URL for the S3 location where model artifacts are stored. +* `model_package_name` - (Optional) The Amazon Resource Name (ARN) of the model package to use to create the model. +* `container_hostname` - (Optional) The DNS host name for the container. +* `environment` - (Optional) Environment variables for the Docker container. + A list of key value pairs. +* `image_config` - (Optional) Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). For more information see [Using a Private Docker Registry for Real-Time Inference Containers](https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms-containers-inference-private.html). see [Image Config](#image-config). + +### Image Config + +* `repository_access_mode` - (Required) Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are: `Platform` and `Vpc`. +* `repository_auth_config` - (Optional) Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see [Repository Auth Config](#repository-auth-config). + +#### Repository Auth Config + +* `repository_credentials_provider_arn` - (Required) The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see [Create a Lambda function with the console](https://docs.aws.amazon.com/lambda/latest/dg/getting-started-create-function.html) in the _AWS Lambda Developer Guide_. + +## Inference Execution Config + +* `mode` - (Required) How containers in a multi-container are run. The following values are valid `Serial` and `Direct`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `name` - The name of the model. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this model. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import models using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import models using the `name`. For example: + +```console +% terraform import aws_sagemaker_model.test_model model-foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_model_package_group.html.markdown b/website/docs/cdktf/python/r/sagemaker_model_package_group.html.markdown new file mode 100644 index 00000000000..988a5559605 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_model_package_group.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_model_package_group" +description: |- + Provides a SageMaker Model Package Group resource. +--- + + + +# Resource: aws_sagemaker_model_package_group + +Provides a SageMaker Model Package Group resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_model_package_group import SagemakerModelPackageGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerModelPackageGroup(self, "example", + model_package_group_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `model_package_group_name` - (Required) The name of the model group. +* `model_package_group_description` - (Optional) A description for the model group. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Model Package Group. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Model Package Group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Model Package Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Model Package Groups using the `name`. For example: + +```console +% terraform import aws_sagemaker_model_package_group.test_model_package_group my-code-repo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_model_package_group_policy.html.markdown b/website/docs/cdktf/python/r/sagemaker_model_package_group_policy.html.markdown new file mode 100644 index 00000000000..378524e7e5b --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_model_package_group_policy.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_model_package_group_policy" +description: |- + Provides a SageMaker Model Package Group Policy resource. +--- + + + +# Resource: aws_sagemaker_model_package_group_policy + +Provides a SageMaker Model Package Group Policy resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.sagemaker_model_package_group import SagemakerModelPackageGroup +from imports.aws.sagemaker_model_package_group_policy import SagemakerModelPackageGroupPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SagemakerModelPackageGroup(self, "example", + model_package_group_name="example" + ) + current = DataAwsCallerIdentity(self, "current") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_2", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sagemaker:DescribeModelPackage", "sagemaker:ListModelPackages" + ], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=[Token.as_string(current.account_id)], + type="AWS" + ) + ], + resources=[example.arn], + sid="AddPermModelPackageGroup" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_sagemaker_model_package_group_policy_example = + SagemakerModelPackageGroupPolicy(self, "example_3", + model_package_group_name=example.model_package_group_name, + resource_policy=Token.as_string( + Fn.jsonencode( + Fn.jsondecode(Token.as_string(data_aws_iam_policy_document_example.json)))) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sagemaker_model_package_group_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `model_package_group_name` - (Required) The name of the model package group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Model Package Package Group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Model Package Groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Model Package Groups using the `name`. For example: + +```console +% terraform import aws_sagemaker_model_package_group_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_monitoring_schedule.html.markdown b/website/docs/cdktf/python/r/sagemaker_monitoring_schedule.html.markdown new file mode 100644 index 00000000000..3687cd5e36d --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_monitoring_schedule.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_monitoring_schedule" +description: |- + Provides a SageMaker Monitoring Schedule resource. +--- + + + +# Resource: aws_sagemaker_monitoring_schedule + +Provides a SageMaker monitoring schedule resource. + +## Example Usage + +Basic usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_monitoring_schedule import SagemakerMonitoringSchedule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerMonitoringSchedule(self, "test", + monitoring_schedule_config=SagemakerMonitoringScheduleMonitoringScheduleConfig( + monitoring_job_definition_name=Token.as_string(aws_sagemaker_data_quality_job_definition_test.name), + monitoring_type="DataQuality" + ), + name="my-monitoring-schedule" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `monitoring_schedule_config` - (Required) The configuration object that specifies the monitoring schedule and defines the monitoring job. Fields are documented below. +* `name` - (Optional) The name of the monitoring schedule. The name must be unique within an AWS Region within an AWS account. If omitted, Terraform will assign a random, unique name. +* `tags` - (Optional) A mapping of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### monitoring_schedule_config + +* `monitoring_job_definition_name` - (Required) The name of the monitoring job definition to schedule. +* `monitoring_type` - (Required) The type of the monitoring job definition to schedule. Valid values are `DataQuality`, `ModelQuality`, `ModelBias` or `ModelExplainability` +* `schedule_config` - (Optional) Configures the monitoring schedule. Fields are documented below. + +#### schedule_config + +* `schedule_expression` - (Required) A cron expression that describes details about the monitoring schedule. For example, and hourly schedule would be `cron(0 * ? * * *)`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this monitoring schedule. +* `name` - The name of the monitoring schedule. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import monitoring schedules using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import monitoring schedules using the `name`. For example: + +```console +% terraform import aws_sagemaker_monitoring_schedule.test_monitoring_schedule monitoring-schedule-foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_notebook_instance.html.markdown b/website/docs/cdktf/python/r/sagemaker_notebook_instance.html.markdown new file mode 100644 index 00000000000..b3c3ede2fd8 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_notebook_instance.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_notebook_instance" +description: |- + Provides a SageMaker Notebook Instance resource. +--- + + + +# Resource: aws_sagemaker_notebook_instance + +Provides a SageMaker Notebook Instance resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_notebook_instance import SagemakerNotebookInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerNotebookInstance(self, "ni", + instance_type="ml.t2.medium", + name="my-notebook-instance", + role_arn=role.arn, + tags={ + "Name": "foo" + } + ) +``` + +### Code repository usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_code_repository import SagemakerCodeRepository +from imports.aws.sagemaker_notebook_instance import SagemakerNotebookInstance +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SagemakerCodeRepository(self, "example", + code_repository_name="my-notebook-instance-code-repo", + git_config=SagemakerCodeRepositoryGitConfig( + repository_url="https://github.com/hashicorp/terraform-provider-aws.git" + ) + ) + SagemakerNotebookInstance(self, "ni", + default_code_repository=example.code_repository_name, + instance_type="ml.t2.medium", + name="my-notebook-instance", + role_arn=role.arn, + tags={ + "Name": "foo" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the notebook instance (must be unique). +* `role_arn` - (Required) The ARN of the IAM role to be used by the notebook instance which allows SageMaker to call other services on your behalf. +* `instance_type` - (Required) The name of ML compute instance type. +* `platform_identifier` - (Optional) The platform identifier of the notebook instance runtime environment. This value can be either `notebook-al1-v1`, `notebook-al2-v1`, or `notebook-al2-v2`, depending on which version of Amazon Linux you require. +* `volume_size` - (Optional) The size, in GB, of the ML storage volume to attach to the notebook instance. The default value is 5 GB. +* `subnet_id` - (Optional) The VPC subnet ID. +* `security_groups` - (Optional) The associated security groups. +* `accelerator_types` - (Optional) A list of Elastic Inference (EI) instance types to associate with this notebook instance. See [Elastic Inference Accelerator](https://docs.aws.amazon.com/sagemaker/latest/dg/ei.html) for more details. Valid values: `ml.eia1.medium`, `ml.eia1.large`, `ml.eia1.xlarge`, `ml.eia2.medium`, `ml.eia2.large`, `ml.eia2.xlarge`. +* `additional_code_repositories` - (Optional) An array of up to three Git repositories to associate with the notebook instance. + These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in [AWS CodeCommit](https://docs.aws.amazon.com/codecommit/latest/userguide/welcome.html) or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. +* `default_code_repository` - (Optional) The Git repository associated with the notebook instance as its default code repository. This can be either the name of a Git repository stored as a resource in your account, or the URL of a Git repository in [AWS CodeCommit](https://docs.aws.amazon.com/codecommit/latest/userguide/welcome.html) or in any other Git repository. +* `direct_internet_access` - (Optional) Set to `Disabled` to disable internet access to notebook. Requires `security_groups` and `subnet_id` to be set. Supported values: `Enabled` (Default) or `Disabled`. If set to `Disabled`, the notebook instance will be able to access resources only in your VPC, and will not be able to connect to Amazon SageMaker training and endpoint services unless your configure a NAT Gateway in your VPC. +* `instance_metadata_service_configuration` - (Optional) Information on the IMDS configuration of the notebook instance. Conflicts with `instance_metadata_service_configuration`. see details below. +* `kms_key_id` - (Optional) The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption. +* `lifecycle_config_name` - (Optional) The name of a lifecycle configuration to associate with the notebook instance. +* `root_access` - (Optional) Whether root access is `Enabled` or `Disabled` for users of the notebook instance. The default value is `Enabled`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### instance_metadata_service_configuration + +* `minimum_instance_metadata_service_version` - (Optional) Indicates the minimum IMDS version that the notebook instance supports. When passed "1" is passed. This means that both IMDSv1 and IMDSv2 are supported. Valid values are `1` and `2`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the notebook instance. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this notebook instance. +* `url` - The URL that you use to connect to the Jupyter notebook that is running in your notebook instance. +* `network_interface_id` - The network interface ID that Amazon SageMaker created at the time of creating the instance. Only available when setting `subnet_id`. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Notebook Instances using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Notebook Instances using the `name`. For example: + +```console +% terraform import aws_sagemaker_notebook_instance.test_notebook_instance my-notebook-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_notebook_instance_lifecycle_configuration.html.markdown b/website/docs/cdktf/python/r/sagemaker_notebook_instance_lifecycle_configuration.html.markdown new file mode 100644 index 00000000000..60f28ae5ec0 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_notebook_instance_lifecycle_configuration.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_notebook_instance_lifecycle_configuration" +description: |- + Provides a lifecycle configuration for SageMaker Notebook Instances. +--- + + + +# Resource: aws_sagemaker_notebook_instance_lifecycle_configuration + +Provides a lifecycle configuration for SageMaker Notebook Instances. + +## Example Usage + +Usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_notebook_instance_lifecycle_configuration import SagemakerNotebookInstanceLifecycleConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerNotebookInstanceLifecycleConfiguration(self, "lc", + name="foo", + on_create=Token.as_string(Fn.base64encode("echo foo")), + on_start=Token.as_string(Fn.base64encode("echo bar")) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the lifecycle configuration (must be unique). If omitted, Terraform will assign a random, unique name. +* `on_create` - (Optional) A shell script (base64-encoded) that runs only once when the SageMaker Notebook Instance is created. +* `on_start` - (Optional) A shell script (base64-encoded) that runs every time the SageMaker Notebook Instance is started including the time it's created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this lifecycle configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import models using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import models using the `name`. For example: + +```console +% terraform import aws_sagemaker_notebook_instance_lifecycle_configuration.lc foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_project.html.markdown b/website/docs/cdktf/python/r/sagemaker_project.html.markdown new file mode 100644 index 00000000000..2c00e3b7135 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_project.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_project" +description: |- + Provides a SageMaker Project resource. +--- + + + +# Resource: aws_sagemaker_project + +Provides a SageMaker Project resource. + + -> Note: If you are trying to use SageMaker projects with SageMaker studio you will need to add a tag with the key `sagemaker:studio-visibility` with value `true`. For more on requirements to use projects and permission needed see [AWS Docs](https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-projects-templates-custom.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_project import SagemakerProject +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerProject(self, "example", + project_name="example", + service_catalog_provisioning_details=SagemakerProjectServiceCatalogProvisioningDetails( + product_id=Token.as_string(aws_servicecatalog_product_example.id) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `project_name` - (Required) The name of the Project. +* `project_description` - (Optional) A description for the project. +* `service_catalog_provisioning_details` - (Required) The product ID and provisioning artifact ID to provision a service catalog. See [Service Catalog Provisioning Details](#service-catalog-provisioning-details) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Service Catalog Provisioning Details + +* `path_id` - (Optional) The path identifier of the product. This value is optional if the product has a default path, and required if the product has more than one path. +* `product_id` - (Required) The ID of the product to provision. +* `provisioning_artifact_id` - (Optional) The ID of the provisioning artifact. +* `provisioning_parameter` - (Optional) A list of key value pairs that you specify when you provision a product. See [Provisioning Parameter](#provisioning-parameter) below. + +#### Provisioning Parameter + +* `key` - (Required) The key that identifies a provisioning parameter. +* `value` - (Optional) The value of the provisioning parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Project. +* `id` - The name of the Project. +* `project_id` - The ID of the project. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Projects using the `project_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Projects using the `project_name`. For example: + +```console +% terraform import aws_sagemaker_project.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_servicecatalog_portfolio_status.html.markdown b/website/docs/cdktf/python/r/sagemaker_servicecatalog_portfolio_status.html.markdown new file mode 100644 index 00000000000..d2098f9e7e7 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_servicecatalog_portfolio_status.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_servicecatalog_portfolio_status" +description: |- + Manages status of Service Catalog in SageMaker. Service Catalog is used to create SageMaker projects. +--- + + + +# Resource: aws_sagemaker_servicecatalog_portfolio_status + +Manages status of Service Catalog in SageMaker. Service Catalog is used to create SageMaker projects. + +## Example Usage + +Usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_servicecatalog_portfolio_status import SagemakerServicecatalogPortfolioStatus +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerServicecatalogPortfolioStatus(self, "example", + status="Enabled" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `status` - (Required) Whether Service Catalog is enabled or disabled in SageMaker. Valid values are `Enabled` and `Disabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS Region the Servicecatalog portfolio status resides in. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import models using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import models using the `id`. For example: + +```console +% terraform import aws_sagemaker_servicecatalog_portfolio_status.example us-east-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_space.html.markdown b/website/docs/cdktf/python/r/sagemaker_space.html.markdown new file mode 100644 index 00000000000..2247a74b57a --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_space.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_space" +description: |- + Provides a SageMaker Space resource. +--- + + + +# Resource: aws_sagemaker_space + +Provides a SageMaker Space resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_space import SagemakerSpace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerSpace(self, "example", + domain_id=test.id, + space_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `space_name` - (Required) The name of the space. +* `domain_id` - (Required) The ID of the associated Domain. +* `space_settings` - (Required) A collection of space settings. See [Space Settings](#space-settings) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Space Settings + +* `jupyter_server_app_settings` - (Optional) The Jupyter server's app settings. See [Jupyter Server App Settings](#jupyter-server-app-settings) below. +* `kernel_gateway_app_settings` - (Optional) The kernel gateway app settings. See [Kernel Gateway App Settings](#kernel-gateway-app-settings) below. + +#### Kernel Gateway App Settings + +* `default_resource_spec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. +* `custom_image` - (Optional) A list of custom SageMaker images that are configured to run as a KernelGateway app. see [Custom Image](#custom-image) below. +* `lifecycle_config_arns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +#### Jupyter Server App Settings + +* `code_repository` - (Optional) A list of Git repositories that SageMaker automatically displays to users for cloning in the JupyterServer application. see [Code Repository](#code-repository) below. +* `default_resource_spec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. +* `lifecycle_config_arns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +##### Code Repository + +* `repository_url` - (Optional) The URL of the Git repository. + +##### Default Resource Spec + +* `instance_type` - (Optional) The instance type. +* `lifecycle_config_arn` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configuration attached to the Resource. +* `sagemaker_image_arn` - (Optional) The Amazon Resource Name (ARN) of the SageMaker image created on the instance. +* `sagemaker_image_version_arn` - (Optional) The ARN of the image version created on the instance. + +##### Custom Image + +* `app_image_config_name` - (Required) The name of the App Image Config. +* `image_name` - (Required) The name of the Custom Image. +* `image_version_number` - (Optional) The version number of the Custom Image. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The space's Amazon Resource Name (ARN). +* `arn` - The space's Amazon Resource Name (ARN). +* `home_efs_file_system_uid` - The ID of the space's profile in the Amazon Elastic File System volume. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Spaces using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Spaces using the `id`. For example: + +```console +% terraform import aws_sagemaker_space.test_space arn:aws:sagemaker:us-west-2:123456789012:space/domain-id/space-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_studio_lifecycle_config.html.markdown b/website/docs/cdktf/python/r/sagemaker_studio_lifecycle_config.html.markdown new file mode 100644 index 00000000000..aa13547de68 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_studio_lifecycle_config.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_studio_lifecycle_config" +description: |- + Provides a SageMaker Studio Lifecycle Config resource. +--- + + + +# Resource: aws_sagemaker_studio_lifecycle_config + +Provides a SageMaker Studio Lifecycle Config resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_studio_lifecycle_config import SagemakerStudioLifecycleConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerStudioLifecycleConfig(self, "example", + studio_lifecycle_config_app_type="JupyterServer", + studio_lifecycle_config_content=Token.as_string( + Fn.base64encode("echo Hello")), + studio_lifecycle_config_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `studio_lifecycle_config_name` - (Required) The name of the Studio Lifecycle Configuration to create. +* `studio_lifecycle_config_app_type` - (Required) The App type that the Lifecycle Configuration is attached to. Valid values are `JupyterServer` and `KernelGateway`. +* `studio_lifecycle_config_content` - (Required) The content of your Studio Lifecycle Configuration script. This content must be base64 encoded. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Studio Lifecycle Config. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Studio Lifecycle Config. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Studio Lifecycle Configs using the `studio_lifecycle_config_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Studio Lifecycle Configs using the `studio_lifecycle_config_name`. For example: + +```console +% terraform import aws_sagemaker_studio_lifecycle_config.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_user_profile.html.markdown b/website/docs/cdktf/python/r/sagemaker_user_profile.html.markdown new file mode 100644 index 00000000000..200aedaa045 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_user_profile.html.markdown @@ -0,0 +1,158 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_user_profile" +description: |- + Provides a SageMaker User Profile resource. +--- + + + +# Resource: aws_sagemaker_user_profile + +Provides a SageMaker User Profile resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_user_profile import SagemakerUserProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerUserProfile(self, "example", + domain_id=test.id, + user_profile_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `user_profile_name` - (Required) The name for the User Profile. +* `domain_id` - (Required) The ID of the associated Domain. +* `single_sign_on_user_identifier` - (Optional) A specifier for the type of value specified in `single_sign_on_user_value`. Currently, the only supported value is `UserName`. If the Domain's AuthMode is SSO, this field is required. If the Domain's AuthMode is not SSO, this field cannot be specified. +* `single_sign_on_user_value` - (Required) The username of the associated AWS Single Sign-On User for this User Profile. If the Domain's AuthMode is SSO, this field is required, and must match a valid username of a user in your directory. If the Domain's AuthMode is not SSO, this field cannot be specified. +* `user_settings` - (Required) The user settings. See [User Settings](#user-settings) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### User Settings + +* `execution_role` - (Required) The execution role ARN for the user. +* `security_groups` - (Optional) The security groups. +* `sharing_settings` - (Optional) The sharing settings. See [Sharing Settings](#sharing-settings) below. +* `tensor_board_app_settings` - (Optional) The TensorBoard app settings. See [TensorBoard App Settings](#tensorboard-app-settings) below. +* `jupyter_server_app_settings` - (Optional) The Jupyter server's app settings. See [Jupyter Server App Settings](#jupyter-server-app-settings) below. +* `kernel_gateway_app_settings` - (Optional) The kernel gateway app settings. See [Kernel Gateway App Settings](#kernel-gateway-app-settings) below. +* `r_session_app_settings` - (Optional) The RSession app settings. See [RSession App Settings](#rsession-app-settings) below. +* `r_studio_server_pro_app_settings` - (Optional) A collection of settings that configure user interaction with the RStudioServerPro app. See [RStudio Server Pro App Settings](#rstudio-server-pro-app-settings) below. +* `canvas_app_settings` - (Optional) The Canvas app settings. See [Canvas App Settings](#canvas-app-settings) below. + +#### Canvas App Settings + +* `model_register_settings` - (Optional) The model registry settings for the SageMaker Canvas application. See [Model Register Settings](#model-register-settings) below. +* `time_series_forecasting_settings` - (Optional) Time series forecast settings for the Canvas app. see [Time Series Forecasting Settings](#time-series-forecasting-settings) below. +* `workspace_settings` - (Optional) The workspace settings for the SageMaker Canvas application. See [Workspace Settings](#workspace-settings) below. + +#### Sharing Settings + +* `notebook_output_option` - (Optional) Whether to include the notebook cell output when sharing the notebook. The default is `Disabled`. Valid values are `Allowed` and `Disabled`. +* `s3_kms_key_id` - (Optional) When `notebook_output_option` is Allowed, the AWS Key Management Service (KMS) encryption key ID used to encrypt the notebook cell output in the Amazon S3 bucket. +* `s3_output_path` - (Optional) When `notebook_output_option` is Allowed, the Amazon S3 bucket used to save the notebook cell output. + +#### TensorBoard App Settings + +* `default_resource_spec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. + +#### Kernel Gateway App Settings + +* `default_resource_spec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. +* `custom_image` - (Optional) A list of custom SageMaker images that are configured to run as a KernelGateway app. see [Custom Image](#custom-image) below. +* `lifecycle_config_arns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +#### Jupyter Server App Settings + +* `code_repository` - (Optional) A list of Git repositories that SageMaker automatically displays to users for cloning in the JupyterServer application. see [Code Repository](#code-repository) below. +* `default_resource_spec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. +* `lifecycle_config_arns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +#### RSession App Settings + +* `default_resource_spec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. +* `custom_image` - (Optional) A list of custom SageMaker images that are configured to run as a KernelGateway app. see [Custom Image](#custom-image) below. + +#### RStudio Server Pro App Settings + +* `access_status` - (Optional) Indicates whether the current user has access to the RStudioServerPro app. Valid values are `ENABLED` and `DISABLED`. +* `user_group` - (Optional) The level of permissions that the user has within the RStudioServerPro app. This value defaults to `R_STUDIO_USER`. The `R_STUDIO_ADMIN` value allows the user access to the RStudio Administrative Dashboard. Valid values are `R_STUDIO_USER` and `R_STUDIO_ADMIN`. + +##### Code Repository + +* `repository_url` - (Optional) The URL of the Git repository. + +##### Default Resource Spec + +* `instance_type` - (Optional) The instance type. +* `lifecycle_config_arn` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configuration attached to the Resource. +* `sagemaker_image_arn` - (Optional) The Amazon Resource Name (ARN) of the SageMaker image created on the instance. +* `sagemaker_image_version_arn` - (Optional) The ARN of the image version created on the instance. + +##### Custom Image + +* `app_image_config_name` - (Required) The name of the App Image Config. +* `image_name` - (Required) The name of the Custom Image. +* `image_version_number` - (Optional) The version number of the Custom Image. + +##### Time Series Forecasting Settings + +* `amazon_forecast_role_arn` - (Optional) The IAM role that Canvas passes to Amazon Forecast for time series forecasting. By default, Canvas uses the execution role specified in the UserProfile that launches the Canvas app. If an execution role is not specified in the UserProfile, Canvas uses the execution role specified in the Domain that owns the UserProfile. To allow time series forecasting, this IAM role should have the [AmazonSageMakerCanvasForecastAccess](https://docs.aws.amazon.com/sagemaker/latest/dg/security-iam-awsmanpol-canvas.html#security-iam-awsmanpol-AmazonSageMakerCanvasForecastAccess) policy attached and forecast.amazonaws.com added in the trust relationship as a service principal. +* `status` - (Optional) Describes whether time series forecasting is enabled or disabled in the Canvas app. Valid values are `ENABLED` and `DISABLED`. + +##### Model Register Settings + +* `cross_account_model_register_role_arn` - (Optional) The Amazon Resource Name (ARN) of the SageMaker model registry account. Required only to register model versions created by a different SageMaker Canvas AWS account than the AWS account in which SageMaker model registry is set up. +* `status` - (Optional) Describes whether the integration to the model registry is enabled or disabled in the Canvas application. Valid values are `ENABLED` and `DISABLED`. + +##### Workspace Settings + +* `s3_artifact_path` - (Optional) The Amazon S3 bucket used to store artifacts generated by Canvas. Updating the Amazon S3 location impacts existing configuration settings, and Canvas users no longer have access to their artifacts. Canvas users must log out and log back in to apply the new location. +* `s3_kms_key_id` - (Optional) The Amazon Web Services Key Management Service (KMS) encryption key ID that is used to encrypt artifacts generated by Canvas in the Amazon S3 bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The user profile Amazon Resource Name (ARN). +* `arn` - The user profile Amazon Resource Name (ARN). +* `home_efs_file_system_uid` - The ID of the user's profile in the Amazon Elastic File System (EFS) volume. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker User Profiles using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker User Profiles using the `arn`. For example: + +```console +% terraform import aws_sagemaker_user_profile.test_user_profile arn:aws:sagemaker:us-west-2:123456789012:user-profile/domain-id/profile-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_workforce.html.markdown b/website/docs/cdktf/python/r/sagemaker_workforce.html.markdown new file mode 100644 index 00000000000..6c12da71550 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_workforce.html.markdown @@ -0,0 +1,154 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_workforce" +description: |- + Provides a SageMaker Workforce resource. +--- + + + +# Resource: aws_sagemaker_workforce + +Provides a SageMaker Workforce resource. + +## Example Usage + +### Cognito Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cognito_user_pool import CognitoUserPool +from imports.aws.cognito_user_pool_client import CognitoUserPoolClient +from imports.aws.cognito_user_pool_domain import CognitoUserPoolDomain +from imports.aws.sagemaker_workforce import SagemakerWorkforce +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CognitoUserPool(self, "example", + name="example" + ) + aws_cognito_user_pool_client_example = CognitoUserPoolClient(self, "example_1", + generate_secret=True, + name="example", + user_pool_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_pool_client_example.override_logical_id("example") + aws_cognito_user_pool_domain_example = CognitoUserPoolDomain(self, "example_2", + domain="example", + user_pool_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cognito_user_pool_domain_example.override_logical_id("example") + aws_sagemaker_workforce_example = SagemakerWorkforce(self, "example_3", + cognito_config=SagemakerWorkforceCognitoConfig( + client_id=Token.as_string(aws_cognito_user_pool_client_example.id), + user_pool=Token.as_string(aws_cognito_user_pool_domain_example.user_pool_id) + ), + workforce_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sagemaker_workforce_example.override_logical_id("example") +``` + +### Oidc Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_workforce import SagemakerWorkforce +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerWorkforce(self, "example", + oidc_config=SagemakerWorkforceOidcConfig( + authorization_endpoint="https://example.com", + client_id="example", + client_secret="example", + issuer="https://example.com", + jwks_uri="https://example.com", + logout_endpoint="https://example.com", + token_endpoint="https://example.com", + user_info_endpoint="https://example.com" + ), + workforce_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `workforce_name` - (Required) The name of the Workforce (must be unique). +* `cognito_config` - (Optional) Use this parameter to configure an Amazon Cognito private workforce. A single Cognito workforce is created using and corresponds to a single Amazon Cognito user pool. Conflicts with `oidc_config`. see [Cognito Config](#cognito-config) details below. +* `oidc_config` - (Optional) Use this parameter to configure a private workforce using your own OIDC Identity Provider. Conflicts with `cognito_config`. see [OIDC Config](#oidc-config) details below. +* `source_ip_config` - (Optional) A list of IP address ranges Used to create an allow list of IP addresses for a private workforce. By default, a workforce isn't restricted to specific IP addresses. see [Source Ip Config](#source-ip-config) details below. +* `workforce_vpc_config` - (Optional) configure a workforce using VPC. see [Workforce VPC Config](#workforce-vpc-config) details below. + +### Cognito Config + +* `client_id` - (Required) The client ID for your Amazon Cognito user pool. +* `user_pool` - (Required) ID for your Amazon Cognito user pool. + +### Oidc Config + +* `authorization_endpoint` - (Required) The OIDC IdP authorization endpoint used to configure your private workforce. +* `client_id` - (Required) The OIDC IdP client ID used to configure your private workforce. +* `client_secret` - (Required) The OIDC IdP client secret used to configure your private workforce. +* `issuer` - (Required) The OIDC IdP issuer used to configure your private workforce. +* `jwks_uri` - (Required) The OIDC IdP JSON Web Key Set (Jwks) URI used to configure your private workforce. +* `logout_endpoint` - (Required) The OIDC IdP logout endpoint used to configure your private workforce. +* `token_endpoint` - (Required) The OIDC IdP token endpoint used to configure your private workforce. +* `user_info_endpoint` - (Required) The OIDC IdP user information endpoint used to configure your private workforce. + +### Source Ip Config + +* `cidrs` - (Required) A list of up to 10 CIDR values. + +### Workforce VPC Config + +* `security_group_ids` - (Optional) The VPC security group IDs. The security groups must be for the same VPC as specified in the subnet. +* `subnets` - (Optional) The ID of the subnets in the VPC that you want to connect. +* `vpc_id` - (Optional) The ID of the VPC that the workforce uses for communication. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Workforce. +* `id` - The name of the Workforce. +* `subdomain` - The subdomain for your OIDC Identity Provider. +* `workforce_vpc_config.0.vpc_endpoint_id` - The IDs for the VPC service endpoints of your VPC workforce. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Workforces using the `workforce_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Workforces using the `workforce_name`. For example: + +```console +% terraform import aws_sagemaker_workforce.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sagemaker_workteam.html.markdown b/website/docs/cdktf/python/r/sagemaker_workteam.html.markdown new file mode 100644 index 00000000000..09e1fb064b8 --- /dev/null +++ b/website/docs/cdktf/python/r/sagemaker_workteam.html.markdown @@ -0,0 +1,131 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_workteam" +description: |- + Provides a SageMaker Workteam resource. +--- + + + +# Resource: aws_sagemaker_workteam + +Provides a SageMaker Workteam resource. + +## Example Usage + +### Cognito Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_workteam import SagemakerWorkteam +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerWorkteam(self, "example", + description="example", + member_definition=[SagemakerWorkteamMemberDefinition( + cognito_member_definition=SagemakerWorkteamMemberDefinitionCognitoMemberDefinition( + client_id=Token.as_string(aws_cognito_user_pool_client_example.id), + user_group=Token.as_string(aws_cognito_user_group_example.id), + user_pool=Token.as_string(aws_cognito_user_pool_domain_example.user_pool_id) + ) + ) + ], + workforce_name=Token.as_string(aws_sagemaker_workforce_example.id), + workteam_name="example" + ) +``` + +### Oidc Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sagemaker_workteam import SagemakerWorkteam +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SagemakerWorkteam(self, "example", + description="example", + member_definition=[SagemakerWorkteamMemberDefinition( + oidc_member_definition=SagemakerWorkteamMemberDefinitionOidcMemberDefinition( + groups=["example"] + ) + ) + ], + workforce_name=Token.as_string(aws_sagemaker_workforce_example.id), + workteam_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Required) A description of the work team. +* `workforce_name` - (Required) The name of the Workteam (must be unique). +* `workteam_name` - (Required) The name of the workforce. +* `member_definition` - (Required) A list of Member Definitions that contains objects that identify the workers that make up the work team. Workforces can be created using Amazon Cognito or your own OIDC Identity Provider (IdP). For private workforces created using Amazon Cognito use `cognito_member_definition`. For workforces created using your own OIDC identity provider (IdP) use `oidc_member_definition`. Do not provide input for both of these parameters in a single request. see [Member Definition](#member-definition) details below. +* `notification_configuration` - (Optional) Configures notification of workers regarding available or expiring work items. see [Notification Configuration](#notification-configuration) details below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Member Definition + +* `cognito_member_definition` - (Optional) The Amazon Cognito user group that is part of the work team. See [Cognito Member Definition](#cognito-member-definition) details below. +* `oidc_member_definition` - (Optional) A list user groups that exist in your OIDC Identity Provider (IdP). One to ten groups can be used to create a single private work team. See [Cognito Member Definition](#oidc-member-definition) details below. + +#### Cognito Member Definition + +* `client_id` - (Required) An identifier for an application client. You must create the app client ID using Amazon Cognito. +* `user_pool` - (Required) An identifier for a user pool. The user pool must be in the same region as the service that you are calling. +* `user_group` - (Required) An identifier for a user group. + +#### Oidc Member Definition + +* `groups` - (Required) A list of comma separated strings that identifies user groups in your OIDC IdP. Each user group is made up of a group of private workers. + +### Notification Configuration + +* `notification_topic_arn` - (Required) The ARN for the SNS topic to which notifications should be published. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Workteam. +* `id` - The name of the Workteam. +* `subdomain` - The subdomain for your OIDC Identity Provider. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Workteams using the `workteam_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SageMaker Workteams using the `workteam_name`. For example: + +```console +% terraform import aws_sagemaker_workteam.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/scheduler_schedule.html.markdown b/website/docs/cdktf/python/r/scheduler_schedule.html.markdown new file mode 100644 index 00000000000..3f16abfe3f4 --- /dev/null +++ b/website/docs/cdktf/python/r/scheduler_schedule.html.markdown @@ -0,0 +1,229 @@ +--- +subcategory: "EventBridge Scheduler" +layout: "aws" +page_title: "AWS: aws_scheduler_schedule" +description: |- + Provides an EventBridge Scheduler Schedule resource. +--- + + + +# Resource: aws_scheduler_schedule + +Provides an EventBridge Scheduler Schedule resource. + +You can find out more about EventBridge Scheduler in the [User Guide](https://docs.aws.amazon.com/scheduler/latest/UserGuide/what-is-scheduler.html). + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.scheduler_schedule import SchedulerSchedule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SchedulerSchedule(self, "example", + flexible_time_window=SchedulerScheduleFlexibleTimeWindow( + mode="OFF" + ), + group_name="default", + name="my-schedule", + schedule_expression="rate(1 hours)", + target=SchedulerScheduleTarget( + arn=Token.as_string(aws_sqs_queue_example.arn), + role_arn=Token.as_string(aws_iam_role_example.arn) + ) + ) +``` + +### Universal Target + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.scheduler_schedule import SchedulerSchedule +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SqsQueue(self, "example") + aws_scheduler_schedule_example = SchedulerSchedule(self, "example_1", + flexible_time_window=SchedulerScheduleFlexibleTimeWindow( + mode="OFF" + ), + name="my-schedule", + schedule_expression="rate(1 hours)", + target=SchedulerScheduleTarget( + arn="arn:aws:scheduler:::aws-sdk:sqs:sendMessage", + input=Token.as_string( + Fn.jsonencode({ + "MessageBody": "Greetings, programs!", + "QueueUrl": example.url + })), + role_arn=Token.as_string(aws_iam_role_example.arn) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_scheduler_schedule_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `flexible_time_window` - (Required) Configures a time window during which EventBridge Scheduler invokes the schedule. Detailed below. +* `schedule_expression` - (Required) Defines when the schedule runs. Read more in [Schedule types on EventBridge Scheduler](https://docs.aws.amazon.com/scheduler/latest/UserGuide/schedule-types.html). +* `target` - (Required) Configures the target of the schedule. Detailed below. + +The following arguments are optional: + +* `description` - (Optional) Brief description of the schedule. +* `end_date` - (Optional) The date, in UTC, before which the schedule can invoke its target. Depending on the schedule's recurrence expression, invocations might stop on, or before, the end date you specify. EventBridge Scheduler ignores the end date for one-time schedules. Example: `2030-01-01T01:00:00Z`. +* `group_name` - (Optional, Forces new resource) Name of the schedule group to associate with this schedule. When omitted, the `default` schedule group is used. +* `kms_key_arn` - (Optional) ARN for the customer managed KMS key that EventBridge Scheduler will use to encrypt and decrypt your data. +* `name` - (Optional, Forces new resource) Name of the schedule. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `schedule_expression_timezone` - (Optional) Timezone in which the scheduling expression is evaluated. Defaults to `UTC`. Example: `Australia/Sydney`. +* `start_date` - (Optional) The date, in UTC, after which the schedule can begin invoking its target. Depending on the schedule's recurrence expression, invocations might occur on, or after, the start date you specify. EventBridge Scheduler ignores the start date for one-time schedules. Example: `2030-01-01T01:00:00Z`. +* `state` - (Optional) Specifies whether the schedule is enabled or disabled. One of: `ENABLED` (default), `DISABLED`. + +### flexible_time_window Configuration Block + +* `maximum_window_in_minutes` - (Optional) Maximum time window during which a schedule can be invoked. Ranges from `1` to `1440` minutes. +* `mode` - (Required) Determines whether the schedule is invoked within a flexible time window. One of: `OFF`, `FLEXIBLE`. + +### target Configuration Block + +The following arguments are required: + +* `arn` - (Required) ARN of the target of this schedule, such as a SQS queue or ECS cluster. For universal targets, this is a [Service ARN specific to the target service](https://docs.aws.amazon.com/scheduler/latest/UserGuide/managing-targets-universal.html#supported-universal-targets). +* `role_arn` - (Required) ARN of the IAM role that EventBridge Scheduler will use for this target when the schedule is invoked. Read more in [Set up the execution role](https://docs.aws.amazon.com/scheduler/latest/UserGuide/setting-up.html#setting-up-execution-role). + +The following arguments are optional: + +* `dead_letter_config` - (Optional) Information about an Amazon SQS queue that EventBridge Scheduler uses as a dead-letter queue for your schedule. If specified, EventBridge Scheduler delivers failed events that could not be successfully delivered to a target to the queue. Detailed below. +* `ecs_parameters` - (Optional) Templated target type for the Amazon ECS [`RunTask`](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_RunTask.html) API operation. Detailed below. +* `eventbridge_parameters` - (Optional) Templated target type for the EventBridge [`PutEvents`](https://docs.aws.amazon.com/eventbridge/latest/APIReference/API_PutEvents.html) API operation. Detailed below. +* `input` - (Optional) Text, or well-formed JSON, passed to the target. Read more in [Universal target](https://docs.aws.amazon.com/scheduler/latest/UserGuide/managing-targets-universal.html). +* `kinesis_parameters` - (Optional) Templated target type for the Amazon Kinesis [`PutRecord`](https://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecord.html) API operation. Detailed below. +* `retry_policy` - (Optional) Information about the retry policy settings. Detailed below. +* `sagemaker_pipeline_parameters` - (Optional) Templated target type for the Amazon SageMaker [`StartPipelineExecution`](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_StartPipelineExecution.html) API operation. Detailed below. +* `sqs_parameters` - (Optional) The templated target type for the Amazon SQS [`SendMessage`](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_SendMessage.html) API operation. Detailed below. + +#### dead_letter_config Configuration Block + +* `arn` - (Required) ARN of the SQS queue specified as the destination for the dead-letter queue. + +#### ecs_parameters Configuration Block + +The following arguments are required: + +* `task_definition_arn` - (Required) ARN of the task definition to use. + +The following arguments are optional: + +* `capacity_provider_strategy` - (Optional) Up to `6` capacity provider strategies to use for the task. Detailed below. +* `enable_ecs_managed_tags` - (Optional) Specifies whether to enable Amazon ECS managed tags for the task. For more information, see [Tagging Your Amazon ECS Resources](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs-using-tags.html) in the Amazon ECS Developer Guide. +* `enable_execute_command` - (Optional) Specifies whether to enable the execute command functionality for the containers in this task. +* `group` - (Optional) Specifies an ECS task group for the task. At most 255 characters. +* `launch_type` - (Optional) Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. One of: `EC2`, `FARGATE`, `EXTERNAL`. +* `network_configuration` - (Optional) Configures the networking associated with the task. Detailed below. +* `placement_constraints` - (Optional) A set of up to 10 placement constraints to use for the task. Detailed below. +* `placement_strategy` - (Optional) A set of up to 5 placement strategies. Detailed below. +* `platform_version` - (Optional) Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as `1.1.0`. +* `propagate_tags` - (Optional) Specifies whether to propagate the tags from the task definition to the task. One of: `TASK_DEFINITION`. +* `reference_id` - (Optional) Reference ID to use for the task. +* `tags` - (Optional) The metadata that you apply to the task. Each tag consists of a key and an optional value. For more information, see [`RunTask`](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_RunTask.html) in the Amazon ECS API Reference. +* `task_count` - (Optional) The number of tasks to create. Ranges from `1` (default) to `10`. + +##### capacity_provider_strategy Configuration Block + +* `base` - (Optional) How many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. Ranges from `0` (default) to `100000`. +* `capacity_provider` - (Required) Short name of the capacity provider. +* `weight` - (Optional) Designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Ranges from from `0` to `1000`. + +##### network_configuration Configuration Block + +* `assign_public_ip` - (Optional) Specifies whether the task's elastic network interface receives a public IP address. This attribute is a boolean type, where `true` maps to `ENABLED` and `false` to `DISABLED`. You can specify `true` only when the `launch_type` is set to `FARGATE`. +* `security_groups` - (Optional) Set of 1 to 5 Security Group ID-s to be associated with the task. These security groups must all be in the same VPC. +* `subnets` - (Optional) Set of 1 to 16 subnets to be associated with the task. These subnets must all be in the same VPC. + +##### placement_constraints Configuration Block + +* `expression` - (Optional) A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is `distinctInstance`. For more information, see [Cluster query language](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html) in the Amazon ECS Developer Guide. +* `type` - (Required) The type of constraint. One of: `distinctInstance`, `memberOf`. + +##### placement_strategy Configuration Block + +* `field` - (Optional) The field to apply the placement strategy against. +* `type` - (Required) The type of placement strategy. One of: `random`, `spread`, `binpack`. + +#### eventbridge_parameters Configuration Block + +* `detail_type` - (Required) Free-form string used to decide what fields to expect in the event detail. Up to 128 characters. +* `source` - (Required) Source of the event. + +#### kinesis_parameters Configuration Block + +* `partition_key` - (Required) Specifies the shard to which EventBridge Scheduler sends the event. Up to 256 characters. + +#### retry_policy Configuration Block + +* `maximum_event_age_in_seconds` - (Optional) Maximum amount of time, in seconds, to continue to make retry attempts. Ranges from `60` to `86400` (default). +* `maximum_retry_attempts` - (Optional) Maximum number of retry attempts to make before the request fails. Ranges from `0` to `185` (default). + +#### sagemaker_pipeline_parameters Configuration Block + +* `pipeline_parameter` - (Optional) Set of up to 200 parameter names and values to use when executing the SageMaker Model Building Pipeline. Detailed below. + +##### pipeline_parameter Configuration Block + +* `name` - (Required) Name of parameter to start execution of a SageMaker Model Building Pipeline. +* `value` - (Required) Value of parameter to start execution of a SageMaker Model Building Pipeline. + +#### sqs_parameters Configuration Block + +* `message_group_id` - (Optional) FIFO message group ID to use as the target. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the schedule. +* `arn` - ARN of the schedule. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import schedules using the combination `group_name/name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import schedules using the combination `group_name/name`. For example: + +```console +% terraform import aws_scheduler_schedule.example my-schedule-group/my-schedule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/scheduler_schedule_group.html.markdown b/website/docs/cdktf/python/r/scheduler_schedule_group.html.markdown new file mode 100644 index 00000000000..8582e672c99 --- /dev/null +++ b/website/docs/cdktf/python/r/scheduler_schedule_group.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "EventBridge Scheduler" +layout: "aws" +page_title: "AWS: aws_scheduler_schedule_group" +description: |- + Provides an EventBridge Scheduler Schedule Group resource. +--- + + + +# Resource: aws_scheduler_schedule_group + +Provides an EventBridge Scheduler Schedule Group resource. + +You can find out more about EventBridge Scheduler in the [User Guide](https://docs.aws.amazon.com/scheduler/latest/UserGuide/what-is-scheduler.html). + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.scheduler_schedule_group import SchedulerScheduleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SchedulerScheduleGroup(self, "example", + name="my-schedule-group" + ) +``` + +## Argument Reference + +The following arguments are optional: + +* `name` - (Optional, Forces new resource) Name of the schedule group. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix`. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the schedule group. +* `arn` - ARN of the schedule group. +* `creation_date` - Time at which the schedule group was created. +* `last_modification_date` - Time at which the schedule group was last modified. +* `state` - State of the schedule group. Can be `ACTIVE` or `DELETING`. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import schedule groups using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import schedule groups using the `name`. For example: + +```console +% terraform import aws_scheduler_schedule_group.example my-schedule-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/schemas_discoverer.html.markdown b/website/docs/cdktf/python/r/schemas_discoverer.html.markdown new file mode 100644 index 00000000000..2691ddfd016 --- /dev/null +++ b/website/docs/cdktf/python/r/schemas_discoverer.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "EventBridge Schemas" +layout: "aws" +page_title: "AWS: aws_schemas_discoverer" +description: |- + Provides an EventBridge Schema Discoverer resource. +--- + + + +# Resource: aws_schemas_discoverer + +Provides an EventBridge Schema Discoverer resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_event_bus import CloudwatchEventBus +from imports.aws.schemas_discoverer import SchemasDiscoverer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + messenger = CloudwatchEventBus(self, "messenger", + name="chat-messages" + ) + SchemasDiscoverer(self, "test", + description="Auto discover event schemas", + source_arn=messenger.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `source_arn` - (Required) The ARN of the event bus to discover event schemas on. +* `description` - (Optional) The description of the discoverer. Maximum of 256 characters. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the discoverer. +* `id` - The ID of the discoverer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge discoverers using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EventBridge discoverers using the `id`. For example: + +```console +% terraform import aws_schemas_discoverer.test 123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/schemas_registry.html.markdown b/website/docs/cdktf/python/r/schemas_registry.html.markdown new file mode 100644 index 00000000000..c1f58c67765 --- /dev/null +++ b/website/docs/cdktf/python/r/schemas_registry.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "EventBridge Schemas" +layout: "aws" +page_title: "AWS: aws_schemas_registry" +description: |- + Provides an EventBridge Custom Schema Registry resource. +--- + + + +# Resource: aws_schemas_registry + +Provides an EventBridge Custom Schema Registry resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.schemas_registry import SchemasRegistry +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SchemasRegistry(self, "test", + description="A custom schema registry", + name="my_own_registry" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the custom event schema registry. Maximum of 64 characters consisting of lower case letters, upper case letters, 0-9, ., -, _. +* `description` - (Optional) The description of the discoverer. Maximum of 256 characters. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the discoverer. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge schema registries using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EventBridge schema registries using the `name`. For example: + +```console +% terraform import aws_schemas_registry.test my_own_registry +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/schemas_registry_policy.html.markdown b/website/docs/cdktf/python/r/schemas_registry_policy.html.markdown new file mode 100644 index 00000000000..482765f3bc9 --- /dev/null +++ b/website/docs/cdktf/python/r/schemas_registry_policy.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "EventBridge Schemas" +layout: "aws" +page_title: "AWS: aws_schemas_registry_policy" +description: |- + Terraform resource for managing an AWS EventBridge Schemas Registry Policy. +--- + + + +# Resource: aws_schemas_registry_policy + +Terraform resource for managing an AWS EventBridge Schemas Registry Policy. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.schemas_registry_policy import SchemasRegistryPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsIamPolicyDocument(self, "example", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["schemas:*"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["109876543210"], + type="AWS" + ) + ], + resources=["arn:aws:schemas:us-east-1:012345678901:registry/example", "arn:aws:schemas:us-east-1:012345678901:schema/example*" + ], + sid="example" + ) + ] + ) + aws_schemas_registry_policy_example = SchemasRegistryPolicy(self, "example_1", + policy=Token.as_string(example.json), + registry_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_schemas_registry_policy_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `registry_name` - (Required) Name of EventBridge Schema Registry +* `policy` - (Required) Resource Policy for EventBridge Schema Registry + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `5m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge Schema Registry Policy using the `registry_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EventBridge Schema Registry Policy using the `registry_name`. For example: + +```console +% terraform import aws_schemas_registry_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/schemas_schema.html.markdown b/website/docs/cdktf/python/r/schemas_schema.html.markdown new file mode 100644 index 00000000000..392e20031df --- /dev/null +++ b/website/docs/cdktf/python/r/schemas_schema.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "EventBridge Schemas" +layout: "aws" +page_title: "AWS: aws_schemas_schema" +description: |- + Provides an EventBridge Schema resource. +--- + + + +# Resource: aws_schemas_schema + +Provides an EventBridge Schema resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.schemas_registry import SchemasRegistry +from imports.aws.schemas_schema import SchemasSchema +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = SchemasRegistry(self, "test", + name="my_own_registry" + ) + aws_schemas_schema_test = SchemasSchema(self, "test_1", + content=Token.as_string( + Fn.jsonencode({ + "components": { + "schemas": { + "Event": { + "properties": { + "name": { + "type": "string" + } + }, + "type": "object" + } + } + }, + "info": { + "title": "Event", + "version": "1.0.0" + }, + "openapi": "3.0.0", + "paths": {} + })), + description="The schema definition for my event", + name="my_schema", + registry_name=test.name, + type="OpenApi3" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_schemas_schema_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the schema. Maximum of 385 characters consisting of lower case letters, upper case letters, ., -, _, @. +* `content` - (Required) The schema specification. Must be a valid Open API 3.0 spec. +* `registry_name` - (Required) The name of the registry in which this schema belongs. +* `type` - (Required) The type of the schema. Valid values: `OpenApi3`. +* `description` - (Optional) The description of the schema. Maximum of 256 characters. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the discoverer. +* `last_modified` - The last modified date of the schema. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version` - The version of the schema. +* `version_created_date` - The created date of the version of the schema. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge schema using the `name` and `registry_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EventBridge schema using the `name` and `registry_name`. For example: + +```console +% terraform import aws_schemas_schema.test name/registry +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/secretsmanager_secret.html.markdown b/website/docs/cdktf/python/r/secretsmanager_secret.html.markdown new file mode 100644 index 00000000000..5fdeb90d30d --- /dev/null +++ b/website/docs/cdktf/python/r/secretsmanager_secret.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret" +description: |- + Provides a resource to manage AWS Secrets Manager secret metadata +--- + + + +# Resource: aws_secretsmanager_secret + +Provides a resource to manage AWS Secrets Manager secret metadata. To manage secret rotation, see the [`aws_secretsmanager_secret_rotation` resource](/docs/providers/aws/r/secretsmanager_secret_rotation.html). To manage a secret value, see the [`aws_secretsmanager_secret_version` resource](/docs/providers/aws/r/secretsmanager_secret_version.html). + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.secretsmanager_secret import SecretsmanagerSecret +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SecretsmanagerSecret(self, "example", + name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the secret. +* `kms_key_id` - (Optional) ARN or Id of the AWS KMS key to be used to encrypt the secret values in the versions stored in this secret. If you need to reference a CMK in a different account, you can use only the key ARN. If you don't specify this value, then Secrets Manager defaults to using the AWS account's default KMS key (the one named `aws/secretsmanager`). If the default KMS key with that name doesn't yet exist, then AWS Secrets Manager creates it for you automatically the first time. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `name` - (Optional) Friendly name of the new secret. The secret name can consist of uppercase letters, lowercase letters, digits, and any of the following characters: `/_+=.@-` Conflicts with `name_prefix`. +* `policy` - (Optional) Valid JSON document representing a [resource policy](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access_resource-based-policies.html). For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). Removing `policy` from your configuration or setting `policy` to null or an empty string (i.e., `policy = ""`) _will not_ delete the policy since it could have been set by `aws_secretsmanager_secret_policy`. To delete the `policy`, set it to `"{}"` (an empty JSON document). +* `recovery_window_in_days` - (Optional) Number of days that AWS Secrets Manager waits before it can delete the secret. This value can be `0` to force deletion without recovery or range from `7` to `30` days. The default value is `30`. +* `replica` - (Optional) Configuration block to support secret replication. See details below. +* `force_overwrite_replica_secret` - (Optional) Accepts boolean value to specify whether to overwrite a secret with the same name in the destination Region. +* `tags` - (Optional) Key-value map of user-defined tags that are attached to the secret. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### replica + +* `kms_key_id` - (Optional) ARN, Key ID, or Alias of the AWS KMS key within the region secret is replicated to. If one is not specified, then Secrets Manager defaults to using the AWS account's default KMS key (`aws/secretsmanager`) in the region or creates one for use if non-existent. +* `region` - (Required) Region for replicating the secret. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN of the secret. +* `arn` - ARN of the secret. +* `replica` - Attributes of a replica are described below. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### replica + +* `last_accessed_date` - Date that you last accessed the secret in the Region. +* `status` - Status can be `InProgress`, `Failed`, or `InSync`. +* `status_message` - Message such as `Replication succeeded` or `Secret with this name already exists in this region`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_secretsmanager_secret` using the secret Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_secretsmanager_secret` using the secret Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_secretsmanager_secret.example arn:aws:secretsmanager:us-east-1:123456789012:secret:example-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/secretsmanager_secret_policy.html.markdown b/website/docs/cdktf/python/r/secretsmanager_secret_policy.html.markdown new file mode 100644 index 00000000000..c59d2d1a730 --- /dev/null +++ b/website/docs/cdktf/python/r/secretsmanager_secret_policy.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret_policy" +description: |- + Provides a resource to manage AWS Secrets Manager secret policy +--- + + + +# Resource: aws_secretsmanager_secret_policy + +Provides a resource to manage AWS Secrets Manager secret policy. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.secretsmanager_secret import SecretsmanagerSecret +from imports.aws.secretsmanager_secret_policy import SecretsmanagerSecretPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecretsmanagerSecret(self, "example", + name="example" + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_1", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["secretsmanager:GetSecretValue"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["arn:aws:iam::123456789012:root"], + type="AWS" + ) + ], + resources=["*"], + sid="EnableAnotherAWSAccountToReadTheSecret" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_secretsmanager_secret_policy_example = SecretsmanagerSecretPolicy(self, "example_2", + policy=Token.as_string(data_aws_iam_policy_document_example.json), + secret_arn=example.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_secretsmanager_secret_policy_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `policy` - (Required) Valid JSON document representing a [resource policy](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access_resource-based-policies.html). For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). Unlike `aws_secretsmanager_secret`, where `policy` can be set to `"{}"` to delete the policy, `"{}"` is not a valid policy since `policy` is required. +* `secret_arn` - (Required) Secret ARN. + +The following arguments are optional: + +* `block_public_policy` - (Optional) Makes an optional API call to Zelkova to validate the Resource Policy to prevent broad access to your secret. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the secret. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_secretsmanager_secret_policy` using the secret Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_secretsmanager_secret_policy` using the secret Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_secretsmanager_secret_policy.example arn:aws:secretsmanager:us-east-1:123456789012:secret:example-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/secretsmanager_secret_rotation.html.markdown b/website/docs/cdktf/python/r/secretsmanager_secret_rotation.html.markdown new file mode 100644 index 00000000000..011745f9b04 --- /dev/null +++ b/website/docs/cdktf/python/r/secretsmanager_secret_rotation.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret_rotation" +description: |- + Provides a resource to manage AWS Secrets Manager secret rotation +--- + + + +# Resource: aws_secretsmanager_secret_rotation + +Provides a resource to manage AWS Secrets Manager secret rotation. To manage a secret, see the [`aws_secretsmanager_secret` resource](/docs/providers/aws/r/secretsmanager_secret.html). To manage a secret value, see the [`aws_secretsmanager_secret_version` resource](/docs/providers/aws/r/secretsmanager_secret_version.html). + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.secretsmanager_secret_rotation import SecretsmanagerSecretRotation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SecretsmanagerSecretRotation(self, "example", + rotation_lambda_arn=Token.as_string(aws_lambda_function_example.arn), + rotation_rules=SecretsmanagerSecretRotationRotationRules( + automatically_after_days=30 + ), + secret_id=Token.as_string(aws_secretsmanager_secret_example.id) + ) +``` + +### Rotation Configuration + +To enable automatic secret rotation, the Secrets Manager service requires usage of a Lambda function. The [Rotate Secrets section in the Secrets Manager User Guide](https://docs.aws.amazon.com/secretsmanager/latest/userguide/rotating-secrets.html) provides additional information about deploying a prebuilt Lambda functions for supported credential rotation (e.g., RDS) or deploying a custom Lambda function. + +~> **NOTE:** Configuring rotation causes the secret to rotate once as soon as you enable rotation. Before you do this, you must ensure that all of your applications that use the credentials stored in the secret are updated to retrieve the secret from AWS Secrets Manager. The old credentials might no longer be usable after the initial rotation and any applications that you fail to update will break as soon as the old credentials are no longer valid. + +~> **NOTE:** If you cancel a rotation that is in progress (by removing the `rotation` configuration), it can leave the VersionStage labels in an unexpected state. Depending on what step of the rotation was in progress, you might need to remove the staging label AWSPENDING from the partially created version, specified by the SecretVersionId response value. You should also evaluate the partially rotated new version to see if it should be deleted, which you can do by removing all staging labels from the new version's VersionStage field. + +## Argument Reference + +This resource supports the following arguments: + +* `secret_id` - (Required) Specifies the secret to which you want to add a new version. You can specify either the Amazon Resource Name (ARN) or the friendly name of the secret. The secret must already exist. +* `rotation_lambda_arn` - (Required) Specifies the ARN of the Lambda function that can rotate the secret. +* `rotation_rules` - (Required) A structure that defines the rotation configuration for this secret. Defined below. + +### rotation_rules + +* `automatically_after_days` - (Optional) Specifies the number of days between automatic scheduled rotations of the secret. Either `automatically_after_days` or `schedule_expression` must be specified. +* `duration` - (Optional) - The length of the rotation window in hours. For example, `3h` for a three hour window. +* `schedule_expression` - (Optional) A `cron()` or `rate()` expression that defines the schedule for rotating your secret. Either `automatically_after_days` or `schedule_expression` must be specified. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the secret. +* `arn` - Amazon Resource Name (ARN) of the secret. +* `rotation_enabled` - Specifies whether automatic rotation is enabled for this secret. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_secretsmanager_secret_rotation` using the secret Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_secretsmanager_secret_rotation` using the secret Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_secretsmanager_secret_rotation.example arn:aws:secretsmanager:us-east-1:123456789012:secret:example-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/secretsmanager_secret_version.html.markdown b/website/docs/cdktf/python/r/secretsmanager_secret_version.html.markdown new file mode 100644 index 00000000000..4e15b0aae80 --- /dev/null +++ b/website/docs/cdktf/python/r/secretsmanager_secret_version.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret_version" +description: |- + Provides a resource to manage AWS Secrets Manager secret version including its secret value +--- + + + +# Resource: aws_secretsmanager_secret_version + +Provides a resource to manage AWS Secrets Manager secret version including its secret value. To manage secret metadata, see the [`aws_secretsmanager_secret` resource](/docs/providers/aws/r/secretsmanager_secret.html). + +~> **NOTE:** If the `AWSCURRENT` staging label is present on this version during resource deletion, that label cannot be removed and will be skipped to prevent errors when fully deleting the secret. That label will leave this secret version active even after the resource is deleted from Terraform unless the secret itself is deleted. Move the `AWSCURRENT` staging label before or after deleting this resource from Terraform to fully trigger version deprecation if necessary. + +## Example Usage + +### Simple String Value + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.secretsmanager_secret_version import SecretsmanagerSecretVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SecretsmanagerSecretVersion(self, "example", + secret_id=Token.as_string(aws_secretsmanager_secret_example.id), + secret_string="example-string-to-protect" + ) +``` + +### Key-Value Pairs + +Secrets Manager also accepts key-value pairs in JSON. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import VariableType, TerraformVariable, Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.secretsmanager_secret_version import SecretsmanagerSecretVersion +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + example = TerraformVariable(self, "example", + default=[{ + "key1": "value1", + "key2": "value2" + } + ], + type=VariableType.map(VariableType.STRING) + ) + aws_secretsmanager_secret_version_example = + SecretsmanagerSecretVersion(self, "example_1", + secret_id=Token.as_string(aws_secretsmanager_secret_example.id), + secret_string=Token.as_string(Fn.jsonencode(example.value)) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_secretsmanager_secret_version_example.override_logical_id("example") +``` + +-> **Note:** In Terraform 0.14 and later, use `sensitive = true` to protect the values of the variable from being printed in logs and console output (see [Protect Sensitive Input Variables](https://learn.hashicorp.com/tutorials/terraform/sensitive-variables)). + +Reading key-value pairs from JSON back into a native Terraform map can be accomplished in Terraform 0.12 and later with the [`jsondecode()` function](https://www.terraform.io/docs/configuration/functions/jsondecode.html): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformOutput, Fn, property_access, TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TerraformOutput(self, "example", + value=property_access(Fn.jsondecode(example.secret_string), ["\"key1\""]) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `secret_id` - (Required) Specifies the secret to which you want to add a new version. You can specify either the Amazon Resource Name (ARN) or the friendly name of the secret. The secret must already exist. +* `secret_string` - (Optional) Specifies text data that you want to encrypt and store in this version of the secret. This is required if secret_binary is not set. +* `secret_binary` - (Optional) Specifies binary data that you want to encrypt and store in this version of the secret. This is required if secret_string is not set. Needs to be encoded to base64. +* `version_stages` - (Optional) Specifies a list of staging labels that are attached to this version of the secret. A staging label must be unique to a single version of the secret. If you specify a staging label that's already associated with a different version of the same secret then that staging label is automatically removed from the other version and attached to this version. If you do not specify a value, then AWS Secrets Manager automatically moves the staging label `AWSCURRENT` to this new version on creation. + +~> **NOTE:** If `version_stages` is configured, you must include the `AWSCURRENT` staging label if this secret version is the only version or if the label is currently present on this secret version, otherwise Terraform will show a perpetual difference. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the secret. +* `id` - A pipe delimited combination of secret ID and version ID. +* `version_id` - The unique identifier of the version of the secret. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_secretsmanager_secret_version` using the secret ID and version ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_secretsmanager_secret_version` using the secret ID and version ID. For example: + +```console +% terraform import aws_secretsmanager_secret_version.example 'arn:aws:secretsmanager:us-east-1:123456789012:secret:example-123456|xxxxx-xxxxxxx-xxxxxxx-xxxxx' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/security_group.html.markdown b/website/docs/cdktf/python/r/security_group.html.markdown new file mode 100644 index 00000000000..f1042622cc1 --- /dev/null +++ b/website/docs/cdktf/python/r/security_group.html.markdown @@ -0,0 +1,390 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_security_group" +description: |- + Provides a security group resource. +--- + + + +# Resource: aws_security_group + +Provides a security group resource. + +~> **NOTE on Security Groups and Security Group Rules:** Terraform currently provides a Security Group resource with `ingress` and `egress` rules defined in-line and a [Security Group Rule resource](security_group_rule.html) which manages one or more `ingress` or `egress` rules. Both of these resource were added before AWS assigned a [security group rule unique ID](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/security-group-rules.html), and they do not work well in all scenarios using the`description` and `tags` attributes, which rely on the unique ID. The [`aws_vpc_security_group_egress_rule`](vpc_security_group_egress_rule.html) and [`aws_vpc_security_group_ingress_rule`](vpc_security_group_ingress_rule.html) resources have been added to address these limitations and should be used for all new security group rules. You should not use the `aws_vpc_security_group_egress_rule` and `aws_vpc_security_group_ingress_rule` resources in conjunction with an `aws_security_group` resource with in-line rules or with `aws_security_group_rule` resources defined for the same Security Group, as rule conflicts may occur and rules will be overwritten. + +~> **NOTE:** Referencing Security Groups across VPC peering has certain restrictions. More information is available in the [VPC Peering User Guide](https://docs.aws.amazon.com/vpc/latest/peering/vpc-peering-security-groups.html). + +~> **NOTE:** Due to [AWS Lambda improved VPC networking changes that began deploying in September 2019](https://aws.amazon.com/blogs/compute/announcing-improved-vpc-networking-for-aws-lambda-functions/), security groups associated with Lambda Functions can take up to 45 minutes to successfully delete. Terraform AWS Provider version 2.31.0 and later automatically handles this increased timeout, however prior versions require setting the [customizable deletion timeout](#timeouts) to 45 minutes (`delete = "45m"`). AWS and HashiCorp are working together to reduce the amount of time required for resource deletion and updates can be tracked in this [GitHub issue](https://github.com/hashicorp/terraform-provider-aws/issues/10329). + +~> **NOTE:** The `cidr_blocks` and `ipv6_cidr_blocks` parameters are optional in the `ingress` and `egress` blocks. If nothing is specified, traffic will be blocked as described in _NOTE on Egress rules_ later. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SecurityGroup(self, "allow_tls", + description="Allow TLS inbound traffic", + egress=[SecurityGroupEgress( + cidr_blocks=["0.0.0.0/0"], + from_port=0, + ipv6_cidr_blocks=["::/0"], + protocol="-1", + to_port=0 + ) + ], + ingress=[SecurityGroupIngress( + cidr_blocks=[main.cidr_block], + description="TLS from VPC", + from_port=443, + ipv6_cidr_blocks=[main.ipv6_cidr_block], + protocol="tcp", + to_port=443 + ) + ], + name="allow_tls", + tags={ + "Name": "allow_tls" + }, + vpc_id=main.id + ) +``` + +~> **NOTE on Egress rules:** By default, AWS creates an `ALLOW ALL` egress rule when creating a new Security Group inside of a VPC. When creating a new Security Group inside a VPC, **Terraform will remove this default rule**, and require you specifically re-create it if you desire that rule. We feel this leads to fewer surprises in terms of controlling your egress rules. If you desire this rule to be in place, you can use this `egress` block: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SecurityGroup(self, "example", + egress=[SecurityGroupEgress( + cidr_blocks=["0.0.0.0/0"], + from_port=0, + ipv6_cidr_blocks=["::/0"], + protocol="-1", + to_port=0 + ) + ] + ) +``` + +### Usage With Prefix List IDs + +Prefix Lists are either managed by AWS internally, or created by the customer using a +[Prefix List resource](ec2_managed_prefix_list.html). Prefix Lists provided by +AWS are associated with a prefix list name, or service name, that is linked to a specific region. +Prefix list IDs are exported on VPC Endpoints, so you can use this format: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.security_group import SecurityGroup +from imports.aws.vpc_endpoint import VpcEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, serviceName, vpcId): + super().__init__(scope, name) + my_endpoint = VpcEndpoint(self, "my_endpoint", + service_name=service_name, + vpc_id=vpc_id + ) + SecurityGroup(self, "example", + egress=[SecurityGroupEgress( + from_port=0, + prefix_list_ids=[my_endpoint.prefix_list_id], + protocol="-1", + to_port=0 + ) + ] + ) +``` + +You can also find a specific Prefix List using the `aws_prefix_list` data source. + +### Removing All Ingress and Egress Rules + +The `ingress` and `egress` arguments are processed in [attributes-as-blocks](https://developer.hashicorp.com/terraform/language/attr-as-blocks) mode. Due to this, removing these arguments from the configuration will **not** cause Terraform to destroy the managed rules. To subsequently remove all managed ingress and egress rules: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SecurityGroup(self, "example", + egress=[], + ingress=[], + name="sg", + vpc_id=Token.as_string(aws_vpc_example.id) + ) +``` + +### Recreating a Security Group + +A simple security group `name` change "forces new" the security group--Terraform destroys the security group and creates a new one. (Likewise, `description`, `name_prefix`, or `vpc_id` [cannot be changed](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/working-with-security-groups.html#creating-security-group).) Attempting to recreate the security group leads to a variety of complications depending on how it is used. + +Security groups are generally associated with other resources--**more than 100** AWS Provider resources reference security groups. Referencing a resource from another resource creates a one-way dependency. For example, if you create an EC2 `aws_instance` that has a `vpc_security_group_ids` argument that refers to an `aws_security_group` resource, the `aws_security_group` is a dependent of the `aws_instance`. Because of this, Terraform will create the security group first so that it can then be associated with the EC2 instance. + +However, the dependency relationship actually goes both directions causing the _Security Group Deletion Problem_. AWS does not allow you to delete the security group associated with another resource (_e.g._, the `aws_instance`). + +Terraform does [not model bi-directional dependencies](https://developer.hashicorp.com/terraform/internals/graph) like this, but, even if it did, simply knowing the dependency situation would not be enough to solve it. For example, some resources must always have an associated security group while others don't need to. In addition, when the `aws_security_group` resource attempts to recreate, it receives a dependent object error, which does not provide information on whether the dependent object is a security group rule or, for example, an associated EC2 instance. Within Terraform, the associated resource (_e.g._, `aws_instance`) does not receive an error when the `aws_security_group` is trying to recreate even though that is where changes to the associated resource would need to take place (_e.g._, removing the security group association). + +Despite these sticky problems, below are some ways to improve your experience when you find it necessary to recreate a security group. + +#### `create_before_destroy` + +(This example is one approach to [recreating security groups](#recreating-a-security-group). For more information on the challenges and the _Security Group Deletion Problem_, see [the section above](#recreating-a-security-group).) + +Normally, Terraform first deletes the existing security group resource and then creates a new one. When a security group is associated with a resource, the delete won't succeed. You can invert the default behavior using the [`create_before_destroy` meta argument](https://www.terraform.io/language/meta-arguments/lifecycle#create_before_destroy): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SecurityGroup(self, "example", + lifecycle=TerraformResourceLifecycle( + create_before_destroy=True + ), + name="changeable-name" + ) +``` + +#### `replace_triggered_by` + +(This example is one approach to [recreating security groups](#recreating-a-security-group). For more information on the challenges and the _Security Group Deletion Problem_, see [the section above](#recreating-a-security-group).) + +To replace a resource when a security group changes, use the [`replace_triggered_by` meta argument](https://www.terraform.io/language/meta-arguments/lifecycle#replace_triggered_by). Note that in this example, the `aws_instance` will be destroyed and created again when the `aws_security_group` changes. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.instance import Instance +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityGroup(self, "example", + name="sg" + ) + aws_instance_example = Instance(self, "example_1", + instance_type="t3.small", + lifecycle=TerraformResourceLifecycle( + replace_triggered_by=[example] + ), + vpc_security_group_ids=[test.id] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_instance_example.override_logical_id("example") +``` + +#### Shorter timeout + +(This example is one approach to [recreating security groups](#recreating-a-security-group). For more information on the challenges and the _Security Group Deletion Problem_, see [the section above](#recreating-a-security-group).) + +If destroying a security group takes a long time, it may be because Terraform cannot distinguish between a dependent object (_e.g._, a security group rule or EC2 instance) that is _in the process of being deleted_ and one that is not. In other words, it may be waiting for a train that isn't scheduled to arrive. To fail faster, shorten the `delete` [timeout](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) from the default timeout: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.security_group import SecurityGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SecurityGroup(self, "example", + name="izizavle", + timeouts=[{ + "delete": "2m" + } + ] + ) +``` + +#### Provisioners + +(This example is one approach to [recreating security groups](#recreating-a-security-group). For more information on the challenges and the _Security Group Deletion Problem_, see [the section above](#recreating-a-security-group).) + +**DISCLAIMER:** We **_HIGHLY_** recommend using one of the above approaches and _NOT_ using local provisioners. Provisioners, like the one shown below, should be considered a **last resort** since they are _not readable_, _require skills outside standard Terraform configuration_, are _error prone_ and _difficult to maintain_, are not compatible with cloud environments and upgrade tools, require AWS CLI installation, and are subject to AWS CLI and Terraform changes outside the AWS Provider. + +```terraform +data "aws_security_group" "default" { + name = "default" + # ... other configuration ... +} + +resource "aws_security_group" "example" { + name = "sg" + # ... other configuration ... + + # The downstream resource must have at least one SG attached, therefore we + # attach the default SG of the VPC temporarily and remove it later on + provisioner "local-exec" { + when = destroy + command = < **Note** Although `cidr_blocks`, `ipv6_cidr_blocks`, `prefix_list_ids`, and `security_groups` are all marked as optional, you _must_ provide one of them in order to configure the source of the traffic. + +* `cidr_blocks` - (Optional) List of CIDR blocks. +* `description` - (Optional) Description of this ingress rule. +* `ipv6_cidr_blocks` - (Optional) List of IPv6 CIDR blocks. +* `prefix_list_ids` - (Optional) List of Prefix List IDs. +* `security_groups` - (Optional) List of security groups. A group name can be used relative to the default VPC. Otherwise, group ID. +* `self` - (Optional) Whether the security group itself will be added as a source to this ingress rule. + +### egress + +This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +The following arguments are required: + +* `from_port` - (Required) Start port (or ICMP type number if protocol is `icmp`) +* `to_port` - (Required) End range port (or ICMP code if protocol is `icmp`). + +The following arguments are optional: + +~> **Note** Although `cidr_blocks`, `ipv6_cidr_blocks`, `prefix_list_ids`, and `security_groups` are all marked as optional, you _must_ provide one of them in order to configure the destination of the traffic. + +* `cidr_blocks` - (Optional) List of CIDR blocks. +* `description` - (Optional) Description of this egress rule. +* `ipv6_cidr_blocks` - (Optional) List of IPv6 CIDR blocks. +* `prefix_list_ids` - (Optional) List of Prefix List IDs. +* `protocol` - (Required) Protocol. If you select a protocol of `-1` (semantically equivalent to `all`, which is not a valid value here), you must specify a `from_port` and `to_port` equal to 0. The supported values are defined in the `IpProtocol` argument in the [IpPermission](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_IpPermission.html) API reference. This argument is normalized to a lowercase value to match the AWS API requirement when using Terraform 0.12.x and above. Please make sure that the value of the protocol is specified as lowercase when used with older version of Terraform to avoid issues during upgrade. +* `security_groups` - (Optional) List of security groups. A group name can be used relative to the default VPC. Otherwise, group ID. +* `self` - (Optional) Whether the security group itself will be added as a source to this egress rule. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the security group. +* `id` - ID of the security group. +* `owner_id` - Owner ID. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `15m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Groups using the security group `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Security Groups using the security group `id`. For example: + +```console +% terraform import aws_security_group.elb_sg sg-903004f8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/security_group_rule.html.markdown b/website/docs/cdktf/python/r/security_group_rule.html.markdown new file mode 100644 index 00000000000..5290680d32d --- /dev/null +++ b/website/docs/cdktf/python/r/security_group_rule.html.markdown @@ -0,0 +1,283 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_security_group_rule" +description: |- + Provides an security group rule resource. +--- + + + +# Resource: aws_security_group_rule + +Provides a security group rule resource. Represents a single `ingress` or +`egress` group rule, which can be added to external Security Groups. + +~> **NOTE on Security Groups and Security Group Rules:** Terraform currently provides a [Security Group resource](security_group.html) with `ingress` and `egress` rules defined in-line and a Security Group Rule resource which manages one or more `ingress` or +`egress` rules. Both of these resource were added before AWS assigned a [security group rule unique ID](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/security-group-rules.html), and they do not work well in all scenarios using the`description` and `tags` attributes, which rely on the unique ID. +The [`aws_vpc_security_group_egress_rule`](vpc_security_group_egress_rule.html) and [`aws_vpc_security_group_ingress_rule`](vpc_security_group_ingress_rule.html) resources have been added to address these limitations and should be used for all new security group rules. +You should not use the `aws_vpc_security_group_egress_rule` and `aws_vpc_security_group_ingress_rule` resources in conjunction with an `aws_security_group` resource with in-line rules or with `aws_security_group_rule` resources defined for the same Security Group, as rule conflicts may occur and rules will be overwritten. + +~> **NOTE:** Setting `protocol = "all"` or `protocol = -1` with `from_port` and `to_port` will result in the EC2 API creating a security group rule with all ports open. This API behavior cannot be controlled by Terraform and may generate warnings in the future. + +~> **NOTE:** Referencing Security Groups across VPC peering has certain restrictions. More information is available in the [VPC Peering User Guide](https://docs.aws.amazon.com/vpc/latest/peering/vpc-peering-security-groups.html). + +## Example Usage + +Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.security_group_rule import SecurityGroupRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SecurityGroupRule(self, "example", + cidr_blocks=[Token.as_string(aws_vpc_example.cidr_block)], + from_port=0, + ipv6_cidr_blocks=[Token.as_string(aws_vpc_example.ipv6_cidr_block)], + protocol="tcp", + security_group_id="sg-123456", + to_port=65535, + type="ingress" + ) +``` + +### Usage With Prefix List IDs + +Prefix Lists are either managed by AWS internally, or created by the customer using a +[Managed Prefix List resource](ec2_managed_prefix_list.html). Prefix Lists provided by +AWS are associated with a prefix list name, or service name, that is linked to a specific region. + +Prefix list IDs are exported on VPC Endpoints, so you can use this format: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.security_group_rule import SecurityGroupRule +from imports.aws.vpc_endpoint import VpcEndpoint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, serviceName, vpcId): + super().__init__(scope, name) + my_endpoint = VpcEndpoint(self, "my_endpoint", + service_name=service_name, + vpc_id=vpc_id + ) + SecurityGroupRule(self, "allow_all", + from_port=0, + prefix_list_ids=[my_endpoint.prefix_list_id], + protocol="-1", + security_group_id="sg-123456", + to_port=0, + type="egress" + ) +``` + +You can also find a specific Prefix List using the [`aws_prefix_list`](/docs/providers/aws/d/prefix_list.html) +or [`ec2_managed_prefix_list`](/docs/providers/aws/d/ec2_managed_prefix_list.html) data sources: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_prefix_list import DataAwsPrefixList +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.security_group_rule import SecurityGroupRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsRegion(self, "current") + s3 = DataAwsPrefixList(self, "s3", + name="com.amazonaws.${" + current.name + "}.s3" + ) + SecurityGroupRule(self, "s3_gateway_egress", + description="S3 Gateway Egress", + from_port=443, + prefix_list_ids=[Token.as_string(s3.id)], + protocol="tcp", + security_group_id="sg-123456", + to_port=443, + type="egress" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `from_port` - (Required) Start port (or ICMP type number if protocol is "icmp" or "icmpv6"). +* `protocol` - (Required) Protocol. If not icmp, icmpv6, tcp, udp, or all use the [protocol number](https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml) +* `security_group_id` - (Required) Security group to apply this rule to. +* `to_port` - (Required) End port (or ICMP code if protocol is "icmp"). +* `type` - (Required) Type of rule being created. Valid options are `ingress` (inbound) +or `egress` (outbound). + +The following arguments are optional: + +~> **Note** Although `cidr_blocks`, `ipv6_cidr_blocks`, `prefix_list_ids`, and `source_security_group_id` are all marked as optional, you _must_ provide one of them in order to configure the source of the traffic. + +* `cidr_blocks` - (Optional) List of CIDR blocks. Cannot be specified with `source_security_group_id` or `self`. +* `description` - (Optional) Description of the rule. +* `ipv6_cidr_blocks` - (Optional) List of IPv6 CIDR blocks. Cannot be specified with `source_security_group_id` or `self`. +* `prefix_list_ids` - (Optional) List of Prefix List IDs. +* `self` - (Optional) Whether the security group itself will be added as a source to this ingress rule. Cannot be specified with `cidr_blocks`, `ipv6_cidr_blocks`, or `source_security_group_id`. +* `source_security_group_id` - (Optional) Security group id to allow access to/from, depending on the `type`. Cannot be specified with `cidr_blocks`, `ipv6_cidr_blocks`, or `self`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the security group rule. +* `security_group_rule_id` - If the `aws_security_group_rule` resource has a single source or destination then this is the AWS Security Group Rule resource ID. Otherwise it is empty. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Group Rules using the `security_group_id`, `type`, `protocol`, `from_port`, `to_port`, and source(s)/destination(s) (such as a `cidr_block`) separated by underscores (`_`). All parts are required. For example: + +**NOTE:** Not all rule permissions (e.g., not all of a rule's CIDR blocks) need to be imported for Terraform to manage rule permissions. However, importing some of a rule's permissions but not others, and then making changes to the rule will result in the creation of an additional rule to capture the updated permissions. Rule permissions that were not imported are left intact in the original rule. + +Import an ingress rule in security group `sg-6e616f6d69` for TCP port 8000 with an IPv4 destination CIDR of `10.0.3.0/24`: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import a rule with various IPv4 and IPv6 source CIDR blocks: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import a rule, applicable to all ports, with a protocol other than TCP/UDP/ICMP/ICMPV6/ALL, e.g., Multicast Transport Protocol (MTP), using the IANA protocol number. For example: 92. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import a default any/any egress rule to 0.0.0.0/0: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import an egress rule with a prefix list ID destination: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import a rule applicable to all protocols and ports with a security group source: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Import a rule that has itself and an IPv6 CIDR block as sources: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** Security Group Rules using the `security_group_id`, `type`, `protocol`, `from_port`, `to_port`, and source(s)/destination(s) (such as a `cidr_block`) separated by underscores (`_`). All parts are required. For example: + +**NOTE:** Not all rule permissions (e.g., not all of a rule's CIDR blocks) need to be imported for Terraform to manage rule permissions. However, importing some of a rule's permissions but not others, and then making changes to the rule will result in the creation of an additional rule to capture the updated permissions. Rule permissions that were not imported are left intact in the original rule. + +Import an ingress rule in security group `sg-6e616f6d69` for TCP port 8000 with an IPv4 destination CIDR of `10.0.3.0/24`: + +```console +% terraform import aws_security_group_rule.ingress sg-6e616f6d69_ingress_tcp_8000_8000_10.0.3.0/24 +``` + +Import a rule with various IPv4 and IPv6 source CIDR blocks: + +```console +% terraform import aws_security_group_rule.ingress sg-4973616163_ingress_tcp_100_121_10.1.0.0/16_2001:db8::/48_10.2.0.0/16_2002:db8::/48 +``` + +Import a rule, applicable to all ports, with a protocol other than TCP/UDP/ICMP/ICMPV6/ALL, e.g., Multicast Transport Protocol (MTP), using the IANA protocol number. For example: 92. + +```console +% terraform import aws_security_group_rule.ingress sg-6777656e646f6c796e_ingress_92_0_65536_10.0.3.0/24_10.0.4.0/24 +``` + +Import a default any/any egress rule to 0.0.0.0/0: + +```console +% terraform import aws_security_group_rule.default_egress sg-6777656e646f6c796e_egress_all_0_0_0.0.0.0/0 +``` + +Import an egress rule with a prefix list ID destination: + +```console +% terraform import aws_security_group_rule.egress sg-62726f6479_egress_tcp_8000_8000_pl-6469726b +``` + +Import a rule applicable to all protocols and ports with a security group source: + +```console +% terraform import aws_security_group_rule.ingress_rule sg-7472697374616e_ingress_all_0_65536_sg-6176657279 +``` + +Import a rule that has itself and an IPv6 CIDR block as sources: + +```console +% terraform import aws_security_group_rule.rule_name sg-656c65616e6f72_ingress_tcp_80_80_self_2001:db8::/48 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/securityhub_account.markdown b/website/docs/cdktf/python/r/securityhub_account.markdown new file mode 100644 index 00000000000..3c0870cf251 --- /dev/null +++ b/website/docs/cdktf/python/r/securityhub_account.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_account" +description: |- + Enables Security Hub for an AWS account. +--- + + + +# Resource: aws_securityhub_account + +Enables Security Hub for this AWS account. + +~> **NOTE:** Destroying this resource will disable Security Hub for this AWS account. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SecurityhubAccount(self, "example") +``` + +## Argument Reference + +* `enable_default_standards` - (Optional) Whether to enable the security standards that Security Hub has designated as automatically enabled including: ` AWS Foundational Security Best Practices v1.0.0` and `CIS AWS Foundations Benchmark v1.2.0`. Defaults to `true`. +* `control_finding_generator` - (Optional) Updates whether the calling account has consolidated control findings turned on. If the value for this field is set to `SECURITY_CONTROL`, Security Hub generates a single finding for a control check even when the check applies to multiple enabled standards. If the value for this field is set to `STANDARD_CONTROL`, Security Hub generates separate findings for a control check when the check applies to multiple enabled standards. For accounts that are part of an organization, this value can only be updated in the administrator account. +* `auto_enable_controls` - (Optional) Whether to automatically enable new controls when they are added to standards that are enabled. By default, this is set to true, and new controls are enabled automatically. To not automatically enable new controls, set this to false. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS Account ID. +* `arn` - ARN of the SecurityHub Hub created in the account. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an existing Security Hub enabled account using the AWS account ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an existing Security Hub enabled account using the AWS account ID. For example: + +```console +% terraform import aws_securityhub_account.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/securityhub_action_target.html.markdown b/website/docs/cdktf/python/r/securityhub_action_target.html.markdown new file mode 100644 index 00000000000..046f1710e5b --- /dev/null +++ b/website/docs/cdktf/python/r/securityhub_action_target.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_action_target" +description: |- + Creates Security Hub custom action. +--- + + + +# Resource: aws_securityhub_action_target + +Creates Security Hub custom action. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_action_target import SecurityhubActionTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + aws_securityhub_action_target_example = SecurityhubActionTarget(self, "example_1", + depends_on=[example], + description="This is custom action sends selected findings to chat", + identifier="SendToChat", + name="Send notification to chat" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_action_target_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The description for the custom action target. +* `identifier` - (Required) The ID for the custom action target. +* `description` - (Required) The name of the custom action target. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Security Hub custom action target. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub custom action using the action target ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Security Hub custom action using the action target ARN. For example: + +```console +% terraform import aws_securityhub_action_target.example arn:aws:securityhub:eu-west-1:312940875350:action/custom/a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/securityhub_finding_aggregator.markdown b/website/docs/cdktf/python/r/securityhub_finding_aggregator.markdown new file mode 100644 index 00000000000..da70a1f3a07 --- /dev/null +++ b/website/docs/cdktf/python/r/securityhub_finding_aggregator.markdown @@ -0,0 +1,132 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_finding_aggregator" +description: |- + Manages a Security Hub finding aggregator +--- + + + +# Resource: aws_securityhub_finding_aggregator + +Manages a Security Hub finding aggregator. Security Hub needs to be enabled in a region in order for the aggregator to pull through findings. + +## Example Usage + +### All Regions Usage + +The following example will enable the aggregator for every region. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_finding_aggregator import SecurityhubFindingAggregator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + aws_securityhub_finding_aggregator_example = + SecurityhubFindingAggregator(self, "example_1", + depends_on=[example], + linking_mode="ALL_REGIONS" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_finding_aggregator_example.override_logical_id("example") +``` + +### All Regions Except Specified Regions Usage + +The following example will enable the aggregator for every region except those specified in `specified_regions`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_finding_aggregator import SecurityhubFindingAggregator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + aws_securityhub_finding_aggregator_example = + SecurityhubFindingAggregator(self, "example_1", + depends_on=[example], + linking_mode="ALL_REGIONS_EXCEPT_SPECIFIED", + specified_regions=["eu-west-1", "eu-west-2"] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_finding_aggregator_example.override_logical_id("example") +``` + +### Specified Regions Usage + +The following example will enable the aggregator for every region specified in `specified_regions`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_finding_aggregator import SecurityhubFindingAggregator +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + aws_securityhub_finding_aggregator_example = + SecurityhubFindingAggregator(self, "example_1", + depends_on=[example], + linking_mode="SPECIFIED_REGIONS", + specified_regions=["eu-west-1", "eu-west-2"] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_finding_aggregator_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +- `linking_mode` - (Required) Indicates whether to aggregate findings from all of the available Regions or from a specified list. The options are `ALL_REGIONS`, `ALL_REGIONS_EXCEPT_SPECIFIED` or `SPECIFIED_REGIONS`. When `ALL_REGIONS` or `ALL_REGIONS_EXCEPT_SPECIFIED` are used, Security Hub will automatically aggregate findings from new Regions as Security Hub supports them and you opt into them. +- `specified_regions` - (Optional) List of regions to include or exclude (required if `linking_mode` is set to `ALL_REGIONS_EXCEPT_SPECIFIED` or `SPECIFIED_REGIONS`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - Amazon Resource Name (ARN) of the Security Hub finding aggregator. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an existing Security Hub finding aggregator using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an existing Security Hub finding aggregator using the `arn`. For example: + +```console +% terraform import aws_securityhub_finding_aggregator.example arn:aws:securityhub:eu-west-1:123456789098:finding-aggregator/abcd1234-abcd-1234-1234-abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/securityhub_insight.html.markdown b/website/docs/cdktf/python/r/securityhub_insight.html.markdown new file mode 100644 index 00000000000..9fab4ae3fd9 --- /dev/null +++ b/website/docs/cdktf/python/r/securityhub_insight.html.markdown @@ -0,0 +1,369 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_insight" +description: |- + Provides a Security Hub custom insight resource. +--- + + + +# Resource: aws_securityhub_insight + +Provides a Security Hub custom insight resource. See the [Managing custom insights section](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-custom-insights.html) of the AWS User Guide for more information. + +## Example Usage + +### Filter by AWS account ID + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_insight import SecurityhubInsight +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + aws_securityhub_insight_example = SecurityhubInsight(self, "example_1", + depends_on=[example], + filters=SecurityhubInsightFilters( + aws_account_id=[SecurityhubInsightFiltersAwsAccountId( + comparison="EQUALS", + value="1234567890" + ), SecurityhubInsightFiltersAwsAccountId( + comparison="EQUALS", + value="09876543210" + ) + ] + ), + group_by_attribute="AwsAccountId", + name="example-insight" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_insight_example.override_logical_id("example") +``` + +### Filter by date range + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_insight import SecurityhubInsight +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + aws_securityhub_insight_example = SecurityhubInsight(self, "example_1", + depends_on=[example], + filters=SecurityhubInsightFilters( + created_at=[SecurityhubInsightFiltersCreatedAt( + date_range=SecurityhubInsightFiltersCreatedAtDateRange( + unit="DAYS", + value=5 + ) + ) + ] + ), + group_by_attribute="CreatedAt", + name="example-insight" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_insight_example.override_logical_id("example") +``` + +### Filter by destination IPv4 address + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_insight import SecurityhubInsight +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + aws_securityhub_insight_example = SecurityhubInsight(self, "example_1", + depends_on=[example], + filters=SecurityhubInsightFilters( + network_destination_ipv4=[SecurityhubInsightFiltersNetworkDestinationIpv4( + cidr="10.0.0.0/16" + ) + ] + ), + group_by_attribute="NetworkDestinationIpV4", + name="example-insight" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_insight_example.override_logical_id("example") +``` + +### Filter by finding's confidence + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_insight import SecurityhubInsight +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + aws_securityhub_insight_example = SecurityhubInsight(self, "example_1", + depends_on=[example], + filters=SecurityhubInsightFilters( + confidence=[SecurityhubInsightFiltersConfidence( + gte="80" + ) + ] + ), + group_by_attribute="Confidence", + name="example-insight" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_insight_example.override_logical_id("example") +``` + +### Filter by resource tags + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_insight import SecurityhubInsight +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + aws_securityhub_insight_example = SecurityhubInsight(self, "example_1", + depends_on=[example], + filters=SecurityhubInsightFilters( + resource_tags=[SecurityhubInsightFiltersResourceTags( + comparison="EQUALS", + key="Environment", + value="Production" + ) + ] + ), + group_by_attribute="ResourceTags", + name="example-insight" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_insight_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `filters` - (Required) A configuration block including one or more (up to 10 distinct) attributes used to filter the findings included in the insight. The insight only includes findings that match criteria defined in the filters. See [filters](#filters) below for more details. +* `group_by_attribute` - (Required) The attribute used to group the findings for the insight e.g., if an insight is grouped by `ResourceId`, then the insight produces a list of resource identifiers. +* `name` - (Required) The name of the custom insight. + +### filters + +The `filters` configuration block supports the following arguments: + +~> **NOTE:** For each argument below, up to 20 can be provided. + +* `aws_account_id` - (Optional) AWS account ID that a finding is generated in. See [String_Filter](#string-filter-argument-reference) below for more details. +* `company_name` - (Optional) The name of the findings provider (company) that owns the solution (product) that generates findings. See [String_Filter](#string-filter-argument-reference) below for more details. +* `compliance_status` - (Optional) Exclusive to findings that are generated as the result of a check run against a specific rule in a supported standard, such as CIS AWS Foundations. Contains security standard-related finding details. See [String Filter](#string-filter-argument-reference) below for more details. +* `confidence` - (Optional) A finding's confidence. Confidence is defined as the likelihood that a finding accurately identifies the behavior or issue that it was intended to identify. Confidence is scored on a 0-100 basis using a ratio scale, where 0 means zero percent confidence and 100 means 100 percent confidence. See [Number Filter](#number-filter-argument-reference) below for more details. +* `created_at` - (Optional) An ISO8601-formatted timestamp that indicates when the security-findings provider captured the potential security issue that a finding captured. See [Date Filter](#date-filter-argument-reference) below for more details. +* `criticality` - (Optional) The level of importance assigned to the resources associated with the finding. A score of 0 means that the underlying resources have no criticality, and a score of 100 is reserved for the most critical resources. See [Number Filter](#number-filter-argument-reference) below for more details. +* `description` - (Optional) A finding's description. See [String Filter](#string-filter-argument-reference) below for more details. +* `finding_provider_fields_confidence` - (Optional) The finding provider value for the finding confidence. Confidence is defined as the likelihood that a finding accurately identifies the behavior or issue that it was intended to identify. Confidence is scored on a 0-100 basis using a ratio scale, where 0 means zero percent confidence and 100 means 100 percent confidence. See [Number Filter](#number-filter-argument-reference) below for more details. +* `finding_provider_fields_criticality` - (Optional) The finding provider value for the level of importance assigned to the resources associated with the findings. A score of 0 means that the underlying resources have no criticality, and a score of 100 is reserved for the most critical resources. See [Number Filter](#number-filter-argument-reference) below for more details. +* `finding_provider_fields_related_findings_id` - (Optional) The finding identifier of a related finding that is identified by the finding provider. See [String Filter](#string-filter-argument-reference) below for more details. +* `finding_provider_fields_related_findings_product_arn` - (Optional) The ARN of the solution that generated a related finding that is identified by the finding provider. See [String Filter](#string-filter-argument-reference) below for more details. +* `finding_provider_fields_severity_label` - (Optional) The finding provider value for the severity label. See [String Filter](#string-filter-argument-reference) below for more details. +* `finding_provider_fields_severity_original` - (Optional) The finding provider's original value for the severity. See [String Filter](#string-filter-argument-reference) below for more details. +* `finding_provider_fields_types` - (Optional) One or more finding types that the finding provider assigned to the finding. Uses the format of `namespace/category/classifier` that classify a finding. Valid namespace values include: `Software and Configuration Checks`, `TTPs`, `Effects`, `Unusual Behaviors`, and `Sensitive Data Identifications`. See [String Filter](#string-filter-argument-reference) below for more details. +* `first_observed_at` - (Optional) An ISO8601-formatted timestamp that indicates when the security-findings provider first observed the potential security issue that a finding captured. See [Date Filter](#date-filter-argument-reference) below for more details. +* `generator_id` - (Optional) The identifier for the solution-specific component (a discrete unit of logic) that generated a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `id` - (Optional) The security findings provider-specific identifier for a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `keyword` - (Optional) A keyword for a finding. See [Keyword Filter](#keyword-filter-argument-reference) below for more details. +* `last_observed_at` - (Optional) An ISO8601-formatted timestamp that indicates when the security-findings provider most recently observed the potential security issue that a finding captured. See [Date Filter](#date-filter-argument-reference) below for more details. +* `malware_name` - (Optional) The name of the malware that was observed. See [String Filter](#string-filter-argument-reference) below for more details. +* `malware_path` - (Optional) The filesystem path of the malware that was observed. See [String Filter](#string-filter-argument-reference) below for more details. +* `malware_state` - (Optional) The state of the malware that was observed. See [String Filter](#string-filter-argument-reference) below for more details. +* `malware_type` - (Optional) The type of the malware that was observed. See [String Filter](#string-filter-argument-reference) below for more details. +* `network_destination_domain` - (Optional) The destination domain of network-related information about a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `network_destination_ipv4` - (Optional) The destination IPv4 address of network-related information about a finding. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `network_destination_ipv6` - (Optional) The destination IPv6 address of network-related information about a finding. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `network_destination_port` - (Optional) The destination port of network-related information about a finding. See [Number Filter](#number-filter-argument-reference) below for more details. +* `network_direction` - (Optional) Indicates the direction of network traffic associated with a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `network_protocol` - (Optional) The protocol of network-related information about a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `network_source_domain` - (Optional) The source domain of network-related information about a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `network_source_ipv4` - (Optional) The source IPv4 address of network-related information about a finding. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `network_source_ipv6` - (Optional) The source IPv6 address of network-related information about a finding. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `network_source_mac` - (Optional) The source media access control (MAC) address of network-related information about a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `network_source_port` - (Optional) The source port of network-related information about a finding. See [Number Filter](#number-filter-argument-reference) below for more details. +* `note_text` - (Optional) The text of a note. See [String Filter](#string-filter-argument-reference) below for more details. +* `note_updated_at` - (Optional) The timestamp of when the note was updated. See [Date Filter](#date-filter-argument-reference) below for more details. +* `note_updated_by` - (Optional) The principal that created a note. See [String Filter](#string-filter-argument-reference) below for more details. +* `process_launched_at` - (Optional) The date/time that the process was launched. See [Date Filter](#date-filter-argument-reference) below for more details. +* `process_name` - (Optional) The name of the process. See [String Filter](#string-filter-argument-reference) below for more details. +* `process_parent_pid` - (Optional) The parent process ID. See [Number Filter](#number-filter-argument-reference) below for more details. +* `process_path` - (Optional) The path to the process executable. See [String Filter](#string-filter-argument-reference) below for more details. +* `process_pid` - (Optional) The process ID. See [Number Filter](#number-filter-argument-reference) below for more details. +* `process_terminated_at` - (Optional) The date/time that the process was terminated. See [Date Filter](#date-filter-argument-reference) below for more details. +* `product_arn` - (Optional) The ARN generated by Security Hub that uniquely identifies a third-party company (security findings provider) after this provider's product (solution that generates findings) is registered with Security Hub. See [String Filter](#string-filter-argument-reference) below for more details. +* `product_fields` - (Optional) A data type where security-findings providers can include additional solution-specific details that aren't part of the defined `AwsSecurityFinding` format. See [Map Filter](#map-filter-argument-reference) below for more details. +* `product_name` - (Optional) The name of the solution (product) that generates findings. See [String Filter](#string-filter-argument-reference) below for more details. +* `recommendation_text` - (Optional) The recommendation of what to do about the issue described in a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `record_state` - (Optional) The updated record state for the finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `related_findings_id` - (Optional) The solution-generated identifier for a related finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `related_findings_product_arn` - (Optional) The ARN of the solution that generated a related finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_aws_ec2_instance_iam_instance_profile_arn` - (Optional) The IAM profile ARN of the instance. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_aws_ec2_instance_image_id` - (Optional) The Amazon Machine Image (AMI) ID of the instance. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_aws_ec2_instance_ipv4_addresses` - (Optional) The IPv4 addresses associated with the instance. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `resource_aws_ec2_instance_ipv6_addresses` - (Optional) The IPv6 addresses associated with the instance. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `resource_aws_ec2_instance_key_name` - (Optional) The key name associated with the instance. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_aws_ec2_instance_launched_at` - (Optional) The date and time the instance was launched. See [Date Filter](#date-filter-argument-reference) below for more details. +* `resource_aws_ec2_instance_subnet_id` - (Optional) The identifier of the subnet that the instance was launched in. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_aws_ec2_instance_type` - (Optional) The instance type of the instance. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_aws_ec2_instance_vpc_id` - (Optional) The identifier of the VPC that the instance was launched in. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_aws_iam_access_key_created_at` - (Optional) The creation date/time of the IAM access key related to a finding. See [Date Filter](#date-filter-argument-reference) below for more details. +* `resource_aws_iam_access_key_status` - (Optional) The status of the IAM access key related to a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_aws_iam_access_key_user_name` - (Optional) The user associated with the IAM access key related to a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_aws_s3_bucket_owner_id` - (Optional) The canonical user ID of the owner of the S3 bucket. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_aws_s3_bucket_owner_name` - (Optional) The display name of the owner of the S3 bucket. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_container_image_id` - (Optional) The identifier of the image related to a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_container_image_name` - (Optional) The name of the image related to a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_container_launched_at` - (Optional) The date/time that the container was started. See [Date Filter](#date-filter-argument-reference) below for more details. +* `resource_container_name` - (Optional) The name of the container related to a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_details_other` - (Optional) The details of a resource that doesn't have a specific subfield for the resource type defined. See [Map Filter](#map-filter-argument-reference) below for more details. +* `resource_id` - (Optional) The canonical identifier for the given resource type. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_partition` - (Optional) The canonical AWS partition name that the Region is assigned to. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_region` - (Optional) The canonical AWS external Region name where this resource is located. See [String Filter](#string-filter-argument-reference) below for more details. +* `resource_tags` - (Optional) A list of AWS tags associated with a resource at the time the finding was processed. See [Map Filter](#map-filter-argument-reference) below for more details. +* `resource_type` - (Optional) Specifies the type of the resource that details are provided for. See [String Filter](#string-filter-argument-reference) below for more details. +* `severity_label` - (Optional) The label of a finding's severity. See [String Filter](#string-filter-argument-reference) below for more details. +* `source_url` - (Optional) A URL that links to a page about the current finding in the security-findings provider's solution. See [String Filter](#string-filter-argument-reference) below for more details. +* `threat_intel_indicator_category` - (Optional) The category of a threat intelligence indicator. See [String Filter](#string-filter-argument-reference) below for more details. +* `threat_intel_indicator_last_observed_at` - (Optional) The date/time of the last observation of a threat intelligence indicator. See [Date Filter](#date-filter-argument-reference) below for more details. +* `threat_intel_indicator_source` - (Optional) The source of the threat intelligence. See [String Filter](#string-filter-argument-reference) below for more details. +* `threat_intel_indicator_source_url` - (Optional) The URL for more details from the source of the threat intelligence. See [String Filter](#string-filter-argument-reference) below for more details. +* `threat_intel_indicator_type` - (Optional) The type of a threat intelligence indicator. See [String Filter](#string-filter-argument-reference) below for more details. +* `threat_intel_indicator_value` - (Optional) The value of a threat intelligence indicator. See [String Filter](#string-filter-argument-reference) below for more details. +* `title` - (Optional) A finding's title. See [String Filter](#string-filter-argument-reference) below for more details. +* `type` - (Optional) A finding type in the format of `namespace/category/classifier` that classifies a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `updated_at` - (Optional) An ISO8601-formatted timestamp that indicates when the security-findings provider last updated the finding record. See [Date Filter](#date-filter-argument-reference) below for more details. +* `user_defined_values` - (Optional) A list of name/value string pairs associated with the finding. These are custom, user-defined fields added to a finding. See [Map Filter](#map-filter-argument-reference) below for more details. +* `verification_state` - (Optional) The veracity of a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `workflow_status` - (Optional) The status of the investigation into a finding. See [Workflow Status Filter](#workflow-status-filter-argument-reference) below for more details. + +### Date Filter Argument reference + +The date filter configuration block supports the following arguments: + +* `date_range` - (Optional) A configuration block of the date range for the date filter. See [date_range](#date_range-argument-reference) below for more details. +* `end` - (Optional) An end date for the date filter. Required with `start` if `date_range` is not specified. +* `start` - (Optional) A start date for the date filter. Required with `end` if `date_range` is not specified. + +### date_range Argument reference + +The `date_range` configuration block supports the following arguments: + +* `unit` - (Required) A date range unit for the date filter. Valid values: `DAYS`. +* `value` - (Required) A date range value for the date filter, provided as an Integer. + +### Ip Filter Argument Reference + +The Ip filter configuration block supports the following arguments: + +* `cidr` - (Required) A finding's CIDR value. + +### Keyword Filter Argument Reference + +The keyword filter configuration block supports the following arguments: + +* `value` - (Required) A value for the keyword. + +### Map Filter Argument reference + +The map filter configuration block supports the following arguments: + +* `comparison` - (Required) The condition to apply to a string value when querying for findings. Valid values include: `EQUALS` and `NOT_EQUALS`. +* `key` - (Required) The key of the map filter. For example, for `ResourceTags`, `Key` identifies the name of the tag. For `UserDefinedFields`, `Key` is the name of the field. +* `value` - (Required) The value for the key in the map filter. Filter values are case sensitive. For example, one of the values for a tag called `Department` might be `Security`. If you provide `security` as the filter value, then there is no match. + +### Number Filter Argument reference + +The number filter configuration block supports the following arguments: + +~> **NOTE:** Only one of `eg`, `gte`, or `lte` must be specified. + +* `eq` - (Optional) The equal-to condition to be applied to a single field when querying for findings, provided as a String. +* `gte` - (Optional) The greater-than-equal condition to be applied to a single field when querying for findings, provided as a String. +* `lte` - (Optional) The less-than-equal condition to be applied to a single field when querying for findings, provided as a String. + +### String Filter Argument reference + +The string filter configuration block supports the following arguments: + +* `comparison` - (Required) The condition to apply to a string value when querying for findings. Valid values include: `EQUALS`, `PREFIX`, `NOT_EQUALS`, `PREFIX_NOT_EQUALS`. +* `value` - (Required) The string filter value. Filter values are case sensitive. + +### Workflow Status Filter Argument reference + +The workflow status filter configuration block supports the following arguments: + +* `comparison` - (Required) The condition to apply to a string value when querying for findings. Valid values include: `EQUALS`, `PREFIX`, `NOT_EQUALS`, `PREFIX_NOT_EQUALS`. +* `value` - (Required) The string filter value. Valid values include: `NEW`, `NOTIFIED`, `SUPPRESSED`, and `RESOLVED`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN of the insight. +* `arn` - ARN of the insight. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub insights using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Security Hub insights using the ARN. For example: + +```console +% terraform import aws_securityhub_insight.example arn:aws:securityhub:us-west-2:1234567890:insight/1234567890/custom/91299ed7-abd0-4e44-a858-d0b15e37141a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/securityhub_invite_accepter.markdown b/website/docs/cdktf/python/r/securityhub_invite_accepter.markdown new file mode 100644 index 00000000000..26cf7b8e8ae --- /dev/null +++ b/website/docs/cdktf/python/r/securityhub_invite_accepter.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_invite_accepter" +description: |- + Accepts a Security Hub invitation. +--- + + + +# Resource: aws_securityhub_invite_accepter + +-> **Note:** AWS accounts can only be associated with a single Security Hub master account. Destroying this resource will disassociate the member account from the master account. + +Accepts a Security Hub invitation. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_invite_accepter import SecurityhubInviteAccepter +from imports.aws.securityhub_member import SecurityhubMember +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SecurityhubAccount(self, "example") + invitee = SecurityhubAccount(self, "invitee", + provider="aws.invitee" + ) + aws_securityhub_member_example = SecurityhubMember(self, "example_2", + account_id="123456789012", + email="example@example.com", + invite=True + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_member_example.override_logical_id("example") + aws_securityhub_invite_accepter_invitee = SecurityhubInviteAccepter(self, "invitee_3", + depends_on=[invitee], + master_id=Token.as_string(aws_securityhub_member_example.master_id), + provider="aws.invitee" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_invite_accepter_invitee.override_logical_id("invitee") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `master_id` - (Required) The account ID of the master Security Hub account whose invitation you're accepting. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `invitation_id` - The ID of the invitation. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub invite acceptance using the account ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Security Hub invite acceptance using the account ID. For example: + +```console +% terraform import aws_securityhub_invite_accepter.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/securityhub_member.markdown b/website/docs/cdktf/python/r/securityhub_member.markdown new file mode 100644 index 00000000000..9606f9a43a9 --- /dev/null +++ b/website/docs/cdktf/python/r/securityhub_member.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_member" +description: |- + Provides a Security Hub member resource. +--- + + + +# Resource: aws_securityhub_member + +Provides a Security Hub member resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_member import SecurityhubMember +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + aws_securityhub_member_example = SecurityhubMember(self, "example_1", + account_id="123456789012", + depends_on=[example], + email="example@example.com", + invite=True + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_member_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account_id` - (Required) The ID of the member AWS account. +* `email` - (Optional) The email of the member AWS account. +* `invite` - (Optional) Boolean whether to invite the account to Security Hub as a member. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the member AWS account (matches `account_id`). +* `master_id` - The ID of the master Security Hub AWS account. +* `member_status` - The status of the member account relationship. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub members using their account ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Security Hub members using their account ID. For example: + +```console +% terraform import aws_securityhub_member.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/securityhub_organization_admin_account.html.markdown b/website/docs/cdktf/python/r/securityhub_organization_admin_account.html.markdown new file mode 100644 index 00000000000..4f3ed89b087 --- /dev/null +++ b/website/docs/cdktf/python/r/securityhub_organization_admin_account.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_organization_admin_account" +description: |- + Manages a Security Hub administrator account for an organization. +--- + + + +# Resource: aws_securityhub_organization_admin_account + +Manages a Security Hub administrator account for an organization. The AWS account utilizing this resource must be an Organizations primary account. More information about Organizations support in Security Hub can be found in the [Security Hub User Guide](https://docs.aws.amazon.com/securityhub/latest/userguide/designate-orgs-admin-account.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.organizations_organization import OrganizationsOrganization +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_organization_admin_account import SecurityhubOrganizationAdminAccount +from imports.aws.securityhub_organization_configuration import SecurityhubOrganizationConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = OrganizationsOrganization(self, "example", + aws_service_access_principals=["securityhub.amazonaws.com"], + feature_set="ALL" + ) + aws_securityhub_account_example = SecurityhubAccount(self, "example_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_account_example.override_logical_id("example") + aws_securityhub_organization_admin_account_example = + SecurityhubOrganizationAdminAccount(self, "example_2", + admin_account_id="123456789012", + depends_on=[example] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_organization_admin_account_example.override_logical_id("example") + aws_securityhub_organization_configuration_example = + SecurityhubOrganizationConfiguration(self, "example_3", + auto_enable=True + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_organization_configuration_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `admin_account_id` - (Required) The AWS account identifier of the account to designate as the Security Hub administrator account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS account identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub Organization Admin Accounts using the AWS account ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Security Hub Organization Admin Accounts using the AWS account ID. For example: + +```console +% terraform import aws_securityhub_organization_admin_account.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/securityhub_organization_configuration.markdown b/website/docs/cdktf/python/r/securityhub_organization_configuration.markdown new file mode 100644 index 00000000000..bf223dc0982 --- /dev/null +++ b/website/docs/cdktf/python/r/securityhub_organization_configuration.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_organization_configuration" +description: |- + Manages the Security Hub Organization Configuration +--- + + + +# Resource: aws_securityhub_organization_configuration + +Manages the Security Hub Organization Configuration. + +~> **NOTE:** This resource requires an [`aws_securityhub_organization_admin_account`](/docs/providers/aws/r/securityhub_organization_admin_account.html) to be configured (not necessarily with Terraform). More information about managing Security Hub in an organization can be found in the [Managing administrator and member accounts](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-accounts.html) documentation + +~> **NOTE:** This is an advanced Terraform resource. Terraform will automatically assume management of the Security Hub Organization Configuration without import and perform no actions on removal from the Terraform configuration. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.organizations_organization import OrganizationsOrganization +from imports.aws.securityhub_organization_admin_account import SecurityhubOrganizationAdminAccount +from imports.aws.securityhub_organization_configuration import SecurityhubOrganizationConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = OrganizationsOrganization(self, "example", + aws_service_access_principals=["securityhub.amazonaws.com"], + feature_set="ALL" + ) + aws_securityhub_organization_admin_account_example = + SecurityhubOrganizationAdminAccount(self, "example_1", + admin_account_id="123456789012", + depends_on=[example] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_organization_admin_account_example.override_logical_id("example") + aws_securityhub_organization_configuration_example = + SecurityhubOrganizationConfiguration(self, "example_2", + auto_enable=True + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_organization_configuration_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `auto_enable` - (Required) Whether to automatically enable Security Hub for new accounts in the organization. +* `auto_enable_standards` - (Optional) Whether to automatically enable Security Hub default standards for new member accounts in the organization. By default, this parameter is equal to `DEFAULT`, and new member accounts are automatically enabled with default Security Hub standards. To opt out of enabling default standards for new member accounts, set this parameter equal to `NONE`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS Account ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an existing Security Hub enabled account using the AWS account ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an existing Security Hub enabled account using the AWS account ID. For example: + +```console +% terraform import aws_securityhub_organization_configuration.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/securityhub_product_subscription.markdown b/website/docs/cdktf/python/r/securityhub_product_subscription.markdown new file mode 100644 index 00000000000..4dcb1d8b12b --- /dev/null +++ b/website/docs/cdktf/python/r/securityhub_product_subscription.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_product_subscription" +description: |- + Subscribes to a Security Hub product. +--- + + + +# Resource: aws_securityhub_product_subscription + +Subscribes to a Security Hub product. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_product_subscription import SecurityhubProductSubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + current = DataAwsRegion(self, "current") + aws_securityhub_product_subscription_example = + SecurityhubProductSubscription(self, "example_2", + depends_on=[example], + product_arn="arn:aws:securityhub:${" + current.name + "}:733251395267:product/alertlogic/althreatmanagement" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_securityhub_product_subscription_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `product_arn` - (Required) The ARN of the product that generates findings that you want to import into Security Hub - see below. + +Amazon maintains a list of [Product integrations in AWS Security Hub](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-providers.html) that changes over time. Any of the products on the linked [Available AWS service integrations](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-internal-providers.html) or [Available third-party partner product integrations](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-partner-providers.html) can be configured using `aws_securityhub_product_subscription`. + +Available products can also be listed by running the AWS CLI command `aws securityhub describe-products`. + +A subset of currently available products (remember to replace `${var.region}` as appropriate) includes: + +* `arn:aws:securityhub:${var.region}::product/aws/guardduty` +* `arn:aws:securityhub:${var.region}::product/aws/inspector` +* `arn:aws:securityhub:${var.region}::product/aws/macie` +* `arn:aws:securityhub:${var.region}::product/alertlogic/althreatmanagement` +* `arn:aws:securityhub:${var.region}::product/armordefense/armoranywhere` +* `arn:aws:securityhub:${var.region}::product/barracuda/cloudsecurityguardian` +* `arn:aws:securityhub:${var.region}::product/checkpoint/cloudguard-iaas` +* `arn:aws:securityhub:${var.region}::product/checkpoint/dome9-arc` +* `arn:aws:securityhub:${var.region}::product/crowdstrike/crowdstrike-falcon` +* `arn:aws:securityhub:${var.region}::product/cyberark/cyberark-pta` +* `arn:aws:securityhub:${var.region}::product/f5networks/f5-advanced-waf` +* `arn:aws:securityhub:${var.region}::product/fortinet/fortigate` +* `arn:aws:securityhub:${var.region}::product/guardicore/aws-infection-monkey` +* `arn:aws:securityhub:${var.region}::product/guardicore/guardicore` +* `arn:aws:securityhub:${var.region}::product/ibm/qradar-siem` +* `arn:aws:securityhub:${var.region}::product/imperva/imperva-attack-analytics` +* `arn:aws:securityhub:${var.region}::product/mcafee-skyhigh/mcafee-mvision-cloud-aws` +* `arn:aws:securityhub:${var.region}::product/paloaltonetworks/redlock` +* `arn:aws:securityhub:${var.region}::product/paloaltonetworks/vm-series` +* `arn:aws:securityhub:${var.region}::product/qualys/qualys-pc` +* `arn:aws:securityhub:${var.region}::product/qualys/qualys-vm` +* `arn:aws:securityhub:${var.region}::product/rapid7/insightvm` +* `arn:aws:securityhub:${var.region}::product/sophos/sophos-server-protection` +* `arn:aws:securityhub:${var.region}::product/splunk/splunk-enterprise` +* `arn:aws:securityhub:${var.region}::product/splunk/splunk-phantom` +* `arn:aws:securityhub:${var.region}::product/sumologicinc/sumologic-mda` +* `arn:aws:securityhub:${var.region}::product/symantec-corp/symantec-cwp` +* `arn:aws:securityhub:${var.region}::product/tenable/tenable-io` +* `arn:aws:securityhub:${var.region}::product/trend-micro/deep-security` +* `arn:aws:securityhub:${var.region}::product/turbot/turbot` +* `arn:aws:securityhub:${var.region}::product/twistlock/twistlock-enterprise` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of a resource that represents your subscription to the product that generates the findings that you want to import into Security Hub. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub product subscriptions using `product_arn,arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Security Hub product subscriptions using `product_arn,arn`. For example: + +```console +% terraform import aws_securityhub_product_subscription.example arn:aws:securityhub:eu-west-1:733251395267:product/alertlogic/althreatmanagement,arn:aws:securityhub:eu-west-1:123456789012:product-subscription/alertlogic/althreatmanagement +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/securityhub_standards_control.markdown b/website/docs/cdktf/python/r/securityhub_standards_control.markdown new file mode 100644 index 00000000000..ceb8119de18 --- /dev/null +++ b/website/docs/cdktf/python/r/securityhub_standards_control.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_standards_control" +description: |- + Enable/disable Security Hub standards controls. +--- + + + +# Resource: aws_securityhub_standards_control + +Disable/enable Security Hub standards control in the current region. + +The `aws_securityhub_standards_control` behaves differently from normal resources, in that +Terraform does not _create_ this resource, but instead "adopts" it +into management. When you _delete_ this resource configuration, Terraform "abandons" resource as is and just removes it from the state. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_standards_control import SecurityhubStandardsControl +from imports.aws.securityhub_standards_subscription import SecurityhubStandardsSubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + cis_aws_foundations_benchmark = SecurityhubStandardsSubscription(self, "cis_aws_foundations_benchmark", + depends_on=[example], + standards_arn="arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0" + ) + SecurityhubStandardsControl(self, "ensure_iam_password_policy_prevents_password_reuse", + control_status="DISABLED", + depends_on=[cis_aws_foundations_benchmark], + disabled_reason="We handle password policies within Okta", + standards_control_arn="arn:aws:securityhub:us-east-1:111111111111:control/cis-aws-foundations-benchmark/v/1.2.0/1.10" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `standards_control_arn` - (Required) The standards control ARN. See the AWS documentation for how to list existing controls using [`get-enabled-standards`](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/securityhub/get-enabled-standards.html) and [`describe-standards-controls`](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/securityhub/describe-standards-controls.html). +* `control_status` – (Required) The control status could be `ENABLED` or `DISABLED`. You have to specify `disabled_reason` argument for `DISABLED` control status. +* `disabled_reason` – (Optional) A description of the reason why you are disabling a security standard control. If you specify this attribute, `control_status` will be set to `DISABLED` automatically. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The standard control ARN. +* `control_id` – The identifier of the security standard control. +* `control_status_updated_at` – The date and time that the status of the security standard control was most recently updated. +* `description` – The standard control longer description. Provides information about what the control is checking for. +* `related_requirements` – The list of requirements that are related to this control. +* `remediation_url` – A link to remediation information for the control in the Security Hub user documentation. +* `severity_rating` – The severity of findings generated from this security standard control. +* `title` – The standard control title. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/securityhub_standards_subscription.markdown b/website/docs/cdktf/python/r/securityhub_standards_subscription.markdown new file mode 100644 index 00000000000..6c99a02f5fb --- /dev/null +++ b/website/docs/cdktf/python/r/securityhub_standards_subscription.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_standards_subscription" +description: |- + Subscribes to a Security Hub standard. +--- + + + +# Resource: aws_securityhub_standards_subscription + +Subscribes to a Security Hub standard. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.securityhub_account import SecurityhubAccount +from imports.aws.securityhub_standards_subscription import SecurityhubStandardsSubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SecurityhubAccount(self, "example") + SecurityhubStandardsSubscription(self, "cis", + depends_on=[example], + standards_arn="arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0" + ) + current = DataAwsRegion(self, "current") + SecurityhubStandardsSubscription(self, "pci_321", + depends_on=[example], + standards_arn="arn:aws:securityhub:${" + current.name + "}::standards/pci-dss/v/3.2.1" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `standards_arn` - (Required) The ARN of a standard - see below. + +Currently available standards (remember to replace `${var.region}` as appropriate): + +| Name | ARN | +|------------------------------------------|-------------------------------------------------------------------------------------------------| +| AWS Foundational Security Best Practices | `arn:aws:securityhub:${var.region}::standards/aws-foundational-security-best-practices/v/1.0.0` | +| CIS AWS Foundations Benchmark v1.2.0 | `arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0` | +| CIS AWS Foundations Benchmark v1.4.0 | `arn:aws:securityhub:${var.region}::standards/cis-aws-foundations-benchmark/v/1.4.0` | +| NIST SP 800-53 Rev. 5 | `arn:aws:securityhub:${var.region}::standards/nist-800-53/v/5.0.0` | +| PCI DSS | `arn:aws:securityhub:${var.region}::standards/pci-dss/v/3.2.1` | + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of a resource that represents your subscription to a supported standard. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub standards subscriptions using the standards subscription ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Security Hub standards subscriptions using the standards subscription ARN. For example: + +```console +% terraform import aws_securityhub_standards_subscription.cis arn:aws:securityhub:eu-west-1:123456789012:subscription/cis-aws-foundations-benchmark/v/1.2.0 +``` + +```console +% terraform import aws_securityhub_standards_subscription.pci_321 arn:aws:securityhub:eu-west-1:123456789012:subscription/pci-dss/v/3.2.1 +``` + +```console +% terraform import aws_securityhub_standards_subscription.nist_800_53_rev_5 arn:aws:securityhub:eu-west-1:123456789012:subscription/nist-800-53/v/5.0.0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/serverlessapplicationrepository_cloudformation_stack.html.markdown b/website/docs/cdktf/python/r/serverlessapplicationrepository_cloudformation_stack.html.markdown new file mode 100644 index 00000000000..06888f88e18 --- /dev/null +++ b/website/docs/cdktf/python/r/serverlessapplicationrepository_cloudformation_stack.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Serverless Application Repository" +layout: "aws" +page_title: "AWS: aws_serverlessapplicationrepository_cloudformation_stack" +description: |- + Deploys an Application CloudFormation Stack from the Serverless Application Repository. +--- + + + +# Resource: aws_serverlessapplicationrepository_cloudformation_stack + +Deploys an Application CloudFormation Stack from the Serverless Application Repository. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_partition import DataAwsPartition +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.serverlessapplicationrepository_cloudformation_stack import ServerlessapplicationrepositoryCloudformationStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsPartition(self, "current") + data_aws_region_current = DataAwsRegion(self, "current_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + ServerlessapplicationrepositoryCloudformationStack(self, "postgres-rotator", + application_id="arn:aws:serverlessrepo:us-east-1:297356227824:applications/SecretsManagerRDSPostgreSQLRotationSingleUser", + capabilities=["CAPABILITY_IAM", "CAPABILITY_RESOURCE_POLICY"], + name="postgres-rotator", + parameters={ + "endpoint": "secretsmanager.${" + data_aws_region_current.name + "}.${" + current.dns_suffix + "}", + "function_name": "func-postgres-rotator" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the stack to create. The resource deployed in AWS will be prefixed with `serverlessrepo-` +* `application_id` - (Required) The ARN of the application from the Serverless Application Repository. +* `capabilities` - (Required) A list of capabilities. Valid values are `CAPABILITY_IAM`, `CAPABILITY_NAMED_IAM`, `CAPABILITY_RESOURCE_POLICY`, or `CAPABILITY_AUTO_EXPAND` +* `parameters` - (Optional) A map of Parameter structures that specify input parameters for the stack. +* `semantic_version` - (Optional) The version of the application to deploy. If not supplied, deploys the latest version. +* `tags` - (Optional) A list of tags to associate with this stack. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A unique identifier of the stack. +* `outputs` - A map of outputs from the stack. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Serverless Application Repository Stack using the CloudFormation Stack name (with or without the `serverlessrepo-` prefix) or the CloudFormation Stack ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Serverless Application Repository Stack using the CloudFormation Stack name (with or without the `serverlessrepo-` prefix) or the CloudFormation Stack ID. For example: + +```console +% terraform import aws_serverlessapplicationrepository_cloudformation_stack.example serverlessrepo-postgres-rotator +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/service_discovery_http_namespace.html.markdown b/website/docs/cdktf/python/r/service_discovery_http_namespace.html.markdown new file mode 100644 index 00000000000..f6d63093187 --- /dev/null +++ b/website/docs/cdktf/python/r/service_discovery_http_namespace.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_http_namespace" +description: |- + Provides a Service Discovery HTTP Namespace resource. +--- + + + +# Resource: aws_service_discovery_http_namespace + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.service_discovery_http_namespace import ServiceDiscoveryHttpNamespace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServiceDiscoveryHttpNamespace(self, "example", + description="example", + name="development" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the http namespace. +* `description` - (Optional) The description that you specify for the namespace when you create it. +* `tags` - (Optional) A map of tags to assign to the namespace. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of a namespace. +* `arn` - The ARN that Amazon Route 53 assigns to the namespace when you create it. +* `http_name` - The name of an HTTP namespace. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Discovery HTTP Namespace using the namespace ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Service Discovery HTTP Namespace using the namespace ID. For example: + +```console +% terraform import aws_service_discovery_http_namespace.example ns-1234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/service_discovery_instance.html.markdown b/website/docs/cdktf/python/r/service_discovery_instance.html.markdown new file mode 100644 index 00000000000..7590279a61b --- /dev/null +++ b/website/docs/cdktf/python/r/service_discovery_instance.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_instance" +description: |- + Provides a Service Discovery Instance resource. +--- + + + +# Resource: aws_service_discovery_instance + +Provides a Service Discovery Instance resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.service_discovery_instance import ServiceDiscoveryInstance +from imports.aws.service_discovery_private_dns_namespace import ServiceDiscoveryPrivateDnsNamespace +from imports.aws.service_discovery_service import ServiceDiscoveryService +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Vpc(self, "example", + cidr_block="10.0.0.0/16", + enable_dns_hostnames=True, + enable_dns_support=True + ) + aws_service_discovery_private_dns_namespace_example = + ServiceDiscoveryPrivateDnsNamespace(self, "example_1", + description="example", + name="example.terraform.local", + vpc=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_service_discovery_private_dns_namespace_example.override_logical_id("example") + aws_service_discovery_service_example = ServiceDiscoveryService(self, "example_2", + dns_config=ServiceDiscoveryServiceDnsConfig( + dns_records=[ServiceDiscoveryServiceDnsConfigDnsRecords( + ttl=10, + type="A" + ) + ], + namespace_id=Token.as_string(aws_service_discovery_private_dns_namespace_example.id), + routing_policy="MULTIVALUE" + ), + health_check_custom_config=ServiceDiscoveryServiceHealthCheckCustomConfig( + failure_threshold=1 + ), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_service_discovery_service_example.override_logical_id("example") + aws_service_discovery_instance_example = ServiceDiscoveryInstance(self, "example_3", + attributes={ + "AWS_INSTANCE_IPV4": "172.18.0.1", + "custom_attribute": "custom" + }, + instance_id="example-instance-id", + service_id=Token.as_string(aws_service_discovery_service_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_service_discovery_instance_example.override_logical_id("example") +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.service_discovery_http_namespace import ServiceDiscoveryHttpNamespace +from imports.aws.service_discovery_instance import ServiceDiscoveryInstance +from imports.aws.service_discovery_service import ServiceDiscoveryService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ServiceDiscoveryHttpNamespace(self, "example", + description="example", + name="example.terraform.com" + ) + aws_service_discovery_service_example = ServiceDiscoveryService(self, "example_1", + name="example", + namespace_id=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_service_discovery_service_example.override_logical_id("example") + aws_service_discovery_instance_example = ServiceDiscoveryInstance(self, "example_2", + attributes={ + "AWS_EC2_INSTANCE_ID": "i-0abdg374kd892cj6dl" + }, + instance_id="example-instance-id", + service_id=Token.as_string(aws_service_discovery_service_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_service_discovery_instance_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instance_id` - (Required, ForceNew) The ID of the service instance. +* `service_id` - (Required, ForceNew) The ID of the service that you want to use to create the instance. +* `attributes` - (Required) A map contains the attributes of the instance. Check the [doc](https://docs.aws.amazon.com/cloud-map/latest/api/API_RegisterInstance.html#API_RegisterInstance_RequestSyntax) for the supported attributes and syntax. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the instance. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Discovery Instance using the service ID and instance ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Service Discovery Instance using the service ID and instance ID. For example: + +```console +% terraform import aws_service_discovery_instance.example 0123456789/i-0123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/service_discovery_private_dns_namespace.html.markdown b/website/docs/cdktf/python/r/service_discovery_private_dns_namespace.html.markdown new file mode 100644 index 00000000000..48c615c7ff3 --- /dev/null +++ b/website/docs/cdktf/python/r/service_discovery_private_dns_namespace.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_private_dns_namespace" +description: |- + Provides a Service Discovery Private DNS Namespace resource. +--- + + + +# Resource: aws_service_discovery_private_dns_namespace + +Provides a Service Discovery Private DNS Namespace resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.service_discovery_private_dns_namespace import ServiceDiscoveryPrivateDnsNamespace +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Vpc(self, "example", + cidr_block="10.0.0.0/16" + ) + aws_service_discovery_private_dns_namespace_example = + ServiceDiscoveryPrivateDnsNamespace(self, "example_1", + description="example", + name="hoge.example.local", + vpc=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_service_discovery_private_dns_namespace_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the namespace. +* `vpc` - (Required) The ID of VPC that you want to associate the namespace with. +* `description` - (Optional) The description that you specify for the namespace when you create it. +* `tags` - (Optional) A map of tags to assign to the namespace. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of a namespace. +* `arn` - The ARN that Amazon Route 53 assigns to the namespace when you create it. +* `hosted_zone` - The ID for the hosted zone that Amazon Route 53 creates when you create a namespace. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Discovery Private DNS Namespace using the namespace ID and VPC ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Service Discovery Private DNS Namespace using the namespace ID and VPC ID. For example: + +```console +% terraform import aws_service_discovery_private_dns_namespace.example 0123456789:vpc-123345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/service_discovery_public_dns_namespace.html.markdown b/website/docs/cdktf/python/r/service_discovery_public_dns_namespace.html.markdown new file mode 100644 index 00000000000..fe59678c5d6 --- /dev/null +++ b/website/docs/cdktf/python/r/service_discovery_public_dns_namespace.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_public_dns_namespace" +description: |- + Provides a Service Discovery Public DNS Namespace resource. +--- + + + +# Resource: aws_service_discovery_public_dns_namespace + +Provides a Service Discovery Public DNS Namespace resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.service_discovery_public_dns_namespace import ServiceDiscoveryPublicDnsNamespace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServiceDiscoveryPublicDnsNamespace(self, "example", + description="example", + name="hoge.example.com" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the namespace. +* `description` - (Optional) The description that you specify for the namespace when you create it. +* `tags` - (Optional) A map of tags to assign to the namespace. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of a namespace. +* `arn` - The ARN that Amazon Route 53 assigns to the namespace when you create it. +* `hosted_zone` - The ID for the hosted zone that Amazon Route 53 creates when you create a namespace. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Discovery Public DNS Namespace using the namespace ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Service Discovery Public DNS Namespace using the namespace ID. For example: + +```console +% terraform import aws_service_discovery_public_dns_namespace.example 0123456789 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/service_discovery_service.html.markdown b/website/docs/cdktf/python/r/service_discovery_service.html.markdown new file mode 100644 index 00000000000..f9f45d3ab7d --- /dev/null +++ b/website/docs/cdktf/python/r/service_discovery_service.html.markdown @@ -0,0 +1,170 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_service" +description: |- + Provides a Service Discovery Service resource. +--- + + + +# Resource: aws_service_discovery_service + +Provides a Service Discovery Service resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.service_discovery_private_dns_namespace import ServiceDiscoveryPrivateDnsNamespace +from imports.aws.service_discovery_service import ServiceDiscoveryService +from imports.aws.vpc import Vpc +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Vpc(self, "example", + cidr_block="10.0.0.0/16", + enable_dns_hostnames=True, + enable_dns_support=True + ) + aws_service_discovery_private_dns_namespace_example = + ServiceDiscoveryPrivateDnsNamespace(self, "example_1", + description="example", + name="example.terraform.local", + vpc=example.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_service_discovery_private_dns_namespace_example.override_logical_id("example") + aws_service_discovery_service_example = ServiceDiscoveryService(self, "example_2", + dns_config=ServiceDiscoveryServiceDnsConfig( + dns_records=[ServiceDiscoveryServiceDnsConfigDnsRecords( + ttl=10, + type="A" + ) + ], + namespace_id=Token.as_string(aws_service_discovery_private_dns_namespace_example.id), + routing_policy="MULTIVALUE" + ), + health_check_custom_config=ServiceDiscoveryServiceHealthCheckCustomConfig( + failure_threshold=1 + ), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_service_discovery_service_example.override_logical_id("example") +``` + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.service_discovery_public_dns_namespace import ServiceDiscoveryPublicDnsNamespace +from imports.aws.service_discovery_service import ServiceDiscoveryService +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = ServiceDiscoveryPublicDnsNamespace(self, "example", + description="example", + name="example.terraform.com" + ) + aws_service_discovery_service_example = ServiceDiscoveryService(self, "example_1", + dns_config=ServiceDiscoveryServiceDnsConfig( + dns_records=[ServiceDiscoveryServiceDnsConfigDnsRecords( + ttl=10, + type="A" + ) + ], + namespace_id=example.id + ), + health_check_config=ServiceDiscoveryServiceHealthCheckConfig( + failure_threshold=10, + resource_path="path", + type="HTTP" + ), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_service_discovery_service_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required, ForceNew) The name of the service. +* `description` - (Optional) The description of the service. +* `dns_config` - (Optional) A complex type that contains information about the resource record sets that you want Amazon Route 53 to create when you register an instance. +* `health_check_config` - (Optional) A complex type that contains settings for an optional health check. Only for Public DNS namespaces. +* `force_destroy` - (Optional, Default:false ) A boolean that indicates all instances should be deleted from the service so that the service can be destroyed without error. These instances are not recoverable. +* `health_check_custom_config` - (Optional, ForceNew) A complex type that contains settings for ECS managed health checks. +* `namespace_id` - (Optional) The ID of the namespace that you want to use to create the service. +* `type` - (Optional) If present, specifies that the service instances are only discoverable using the `DiscoverInstances` API operation. No DNS records is registered for the service instances. The only valid value is `HTTP`. +* `tags` - (Optional) A map of tags to assign to the service. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### dns_config + +This argument supports the following arguments: + +* `namespace_id` - (Required, ForceNew) The ID of the namespace to use for DNS configuration. +* `dns_records` - (Required) An array that contains one DnsRecord object for each resource record set. +* `routing_policy` - (Optional) The routing policy that you want to apply to all records that Route 53 creates when you register an instance and specify the service. Valid Values: MULTIVALUE, WEIGHTED + +#### dns_records + +This argument supports the following arguments: + +* `ttl` - (Required) The amount of time, in seconds, that you want DNS resolvers to cache the settings for this resource record set. +* `type` - (Required, ForceNew) The type of the resource, which indicates the value that Amazon Route 53 returns in response to DNS queries. Valid Values: A, AAAA, SRV, CNAME + +### health_check_config + +This argument supports the following arguments: + +* `failure_threshold` - (Optional) The number of consecutive health checks. Maximum value of 10. +* `resource_path` - (Optional) The path that you want Route 53 to request when performing health checks. Route 53 automatically adds the DNS name for the service. If you don't specify a value, the default value is /. +* `type` - (Optional, ForceNew) The type of health check that you want to create, which indicates how Route 53 determines whether an endpoint is healthy. Valid Values: HTTP, HTTPS, TCP + +### health_check_custom_config + +This argument supports the following arguments: + +* `failure_threshold` - (Optional, ForceNew) The number of 30-second intervals that you want service discovery to wait before it changes the health status of a service instance. Maximum value of 10. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the service. +* `arn` - The ARN of the service. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Discovery Service using the service ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Service Discovery Service using the service ID. For example: + +```console +% terraform import aws_service_discovery_service.example 0123456789 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_budget_resource_association.html.markdown b/website/docs/cdktf/python/r/servicecatalog_budget_resource_association.html.markdown new file mode 100644 index 00000000000..38cc5b0c58e --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_budget_resource_association.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_budget_resource_association" +description: |- + Manages a Service Catalog Budget Resource Association +--- + + + +# Resource: aws_servicecatalog_budget_resource_association + +Manages a Service Catalog Budget Resource Association. + +-> **Tip:** A "resource" is either a Service Catalog portfolio or product. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_budget_resource_association import ServicecatalogBudgetResourceAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogBudgetResourceAssociation(self, "example", + budget_name="budget-pjtvyakdlyo3m", + resource_id="prod-dnigbtea24ste" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `budget_name` - (Required) Budget name. +* `resource_id` - (Required) Resource identifier. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the association. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3m`) +- `read` - (Default `10m`) +- `delete` - (Default `3m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicecatalog_budget_resource_association` using the budget name and resource ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicecatalog_budget_resource_association` using the budget name and resource ID. For example: + +```console +% terraform import aws_servicecatalog_budget_resource_association.example budget-pjtvyakdlyo3m:prod-dnigbtea24ste +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_constraint.html.markdown b/website/docs/cdktf/python/r/servicecatalog_constraint.html.markdown new file mode 100644 index 00000000000..dfd9b53b120 --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_constraint.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_constraint" +description: |- + Manages a Service Catalog Constraint +--- + + + +# Resource: aws_servicecatalog_constraint + +Manages a Service Catalog Constraint. + +~> **NOTE:** This resource does not associate a Service Catalog product and portfolio. However, the product and portfolio must be associated (see the `aws_servicecatalog_product_portfolio_association` resource) prior to creating a constraint or you will receive an error. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_constraint import ServicecatalogConstraint +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogConstraint(self, "example", + description="Back off, man. I'm a scientist.", + parameters=Token.as_string( + Fn.jsonencode({ + "RoleArn": "arn:aws:iam::123456789012:role/LaunchRole" + })), + portfolio_id=Token.as_string(aws_servicecatalog_portfolio_example.id), + product_id=Token.as_string(aws_servicecatalog_product_example.id), + type="LAUNCH" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `parameters` - (Required) Constraint parameters in JSON format. The syntax depends on the constraint type. See details below. +* `portfolio_id` - (Required) Portfolio identifier. +* `product_id` - (Required) Product identifier. +* `type` - (Required) Type of constraint. Valid values are `LAUNCH`, `NOTIFICATION`, `RESOURCE_UPDATE`, `STACKSET`, and `TEMPLATE`. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `description` - (Optional) Description of the constraint. + +### `parameters` + +The `type` you specify determines what must be included in the `parameters` JSON: + +* `LAUNCH`: You are required to specify either the RoleArn or the LocalRoleName but can't use both. If you specify the `LocalRoleName` property, when an account uses the launch constraint, the IAM role with that name in the account will be used. This allows launch-role constraints to be account-agnostic so the administrator can create fewer resources per shared account. The given role name must exist in the account used to create the launch constraint and the account of the user who launches a product with this launch constraint. You cannot have both a `LAUNCH` and a `STACKSET` constraint. You also cannot have more than one `LAUNCH` constraint on an `aws_servicecatalog_product` and `aws_servicecatalog_portfolio`. Specify the `RoleArn` and `LocalRoleName` properties as follows: + +```json +{ "RoleArn" : "arn:aws:iam::123456789012:role/LaunchRole" } +``` + +```json +{ "LocalRoleName" : "SCBasicLaunchRole" } +``` + +* `NOTIFICATION`: Specify the `NotificationArns` property as follows: + +```json +{ "NotificationArns" : ["arn:aws:sns:us-east-1:123456789012:Topic"] } +``` + +* `RESOURCE_UPDATE`: Specify the `TagUpdatesOnProvisionedProduct` property as follows. The `TagUpdatesOnProvisionedProduct` property accepts a string value of `ALLOWED` or `NOT_ALLOWED`. + +```json +{ "Version" : "2.0","Properties" :{ "TagUpdateOnProvisionedProduct" : "String" }} +``` + +* `STACKSET`: Specify the Parameters property as follows. You cannot have both a `LAUNCH` and a `STACKSET` constraint. You also cannot have more than one `STACKSET` constraint on on an `aws_servicecatalog_product` and `aws_servicecatalog_portfolio`. Products with a `STACKSET` constraint will launch an AWS CloudFormation stack set. + +```json +{ "Version" : "String", "Properties" : { "AccountList" : [ "String" ], "RegionList" : [ "String" ], "AdminRole" : "String", "ExecutionRole" : "String" }} +``` + +* `TEMPLATE`: Specify the Rules property. For more information, see [Template Constraint Rules](http://docs.aws.amazon.com/servicecatalog/latest/adminguide/reference-template_constraint_rules.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Constraint identifier. +* `owner` - Owner of the constraint. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3m`) +- `read` - (Default `10m`) +- `update` - (Default `3m`) +- `delete` - (Default `3m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicecatalog_constraint` using the constraint ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicecatalog_constraint` using the constraint ID. For example: + +```console +% terraform import aws_servicecatalog_constraint.example cons-nmdkb6cgxfcrs +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_organizations_access.html.markdown b/website/docs/cdktf/python/r/servicecatalog_organizations_access.html.markdown new file mode 100644 index 00000000000..df3e4ce13f2 --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_organizations_access.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_organizations_access" +description: |- + Manages Service Catalog Organizations Access +--- + + + +# Resource: aws_servicecatalog_organizations_access + +Manages Service Catalog AWS Organizations Access, a portfolio sharing feature through AWS Organizations. This allows Service Catalog to receive updates on your organization in order to sync your shares with the current structure. This resource will prompt AWS to set `organizations:EnableAWSServiceAccess` on your behalf so that your shares can be in sync with any changes in your AWS Organizations structure. + +~> **NOTE:** This resource can only be used by the management account in the organization. In other words, a delegated administrator is not authorized to use the resource. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_organizations_access import ServicecatalogOrganizationsAccess +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogOrganizationsAccess(self, "example", + enabled=Token.as_boolean("true") + ) +``` + +## Argument Reference + +The following arguments are required: + +* `enabled` - (Required) Whether to enable AWS Organizations access. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Account ID for the account using the resource. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `10m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_portfolio.html.markdown b/website/docs/cdktf/python/r/servicecatalog_portfolio.html.markdown new file mode 100644 index 00000000000..e6bd0f9a20f --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_portfolio.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_portfolio" +description: |- + Provides a resource to create a Service Catalog portfolio +--- + + + +# Resource: aws_servicecatalog_portfolio + +Provides a resource to create a Service Catalog Portfolio. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_portfolio import ServicecatalogPortfolio +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogPortfolio(self, "portfolio", + description="List of my organizations apps", + name="My App Portfolio", + provider_name="Brett" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the portfolio. +* `description` - (Required) Description of the portfolio +* `provider_name` - (Required) Name of the person or organization who owns the portfolio. +* `tags` - (Optional) Tags to apply to the connection. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Service Catalog Portfolio. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `read` - (Default `10m`) +- `update` - (Default `30m`) +- `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Catalog Portfolios using the Service Catalog Portfolio `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Service Catalog Portfolios using the Service Catalog Portfolio `id`. For example: + +```console +% terraform import aws_servicecatalog_portfolio.testfolio port-12344321 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_portfolio_share.html.markdown b/website/docs/cdktf/python/r/servicecatalog_portfolio_share.html.markdown new file mode 100644 index 00000000000..6aefc867d14 --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_portfolio_share.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_portfolio_share" +description: |- + Manages a Service Catalog Portfolio Share +--- + + + +# Resource: aws_servicecatalog_portfolio_share + +Manages a Service Catalog Portfolio Share. Shares the specified portfolio with the specified account or organization node. You can share portfolios to an organization, an organizational unit, or a specific account. + +If the portfolio share with the specified account or organization node already exists, using this resource to re-create the share will have no effect and will not return an error. You can then use this resource to update the share. + +~> **NOTE:** Shares to an organization node can only be created by the management account of an organization or by a delegated administrator. If a delegated admin is de-registered, they can no longer create portfolio shares. + +~> **NOTE:** AWSOrganizationsAccess must be enabled in order to create a portfolio share to an organization node. + +~> **NOTE:** You can't share a shared resource, including portfolios that contain a shared product. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_portfolio_share import ServicecatalogPortfolioShare +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogPortfolioShare(self, "example", + portfolio_id=Token.as_string(aws_servicecatalog_portfolio_example.id), + principal_id="012128675309", + type="ACCOUNT" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `portfolio_id` - (Required) Portfolio identifier. +* `principal_id` - (Required) Identifier of the principal with whom you will share the portfolio. Valid values AWS account IDs and ARNs of AWS Organizations and organizational units. +* `type` - (Required) Type of portfolio share. Valid values are `ACCOUNT` (an external account), `ORGANIZATION` (a share to every account in an organization), `ORGANIZATIONAL_UNIT`, `ORGANIZATION_MEMBER_ACCOUNT` (a share to an account in an organization). + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `share_principals` - (Optional) Enables or disables Principal sharing when creating the portfolio share. If this flag is not provided, principal sharing is disabled. +* `share_tag_options` - (Optional) Whether to enable sharing of `aws_servicecatalog_tag_option` resources when creating the portfolio share. +* `wait_for_acceptance` - (Optional) Whether to wait (up to the timeout) for the share to be accepted. Organizational shares are automatically accepted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `accepted` - Whether the shared portfolio is imported by the recipient account. If the recipient is organizational, the share is automatically imported, and the field is always set to true. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3m`) +- `read` - (Default `10m`) +- `update` - (Default `3m`) +- `delete` - (Default `3m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicecatalog_portfolio_share` using the portfolio share ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicecatalog_portfolio_share` using the portfolio share ID. For example: + +```console +% terraform import aws_servicecatalog_portfolio_share.example port-12344321:ACCOUNT:123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_principal_portfolio_association.html.markdown b/website/docs/cdktf/python/r/servicecatalog_principal_portfolio_association.html.markdown new file mode 100644 index 00000000000..f74391c04f1 --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_principal_portfolio_association.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_principal_portfolio_association" +description: |- + Manages a Service Catalog Principal Portfolio Association +--- + + + +# Resource: aws_servicecatalog_principal_portfolio_association + +Manages a Service Catalog Principal Portfolio Association. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_principal_portfolio_association import ServicecatalogPrincipalPortfolioAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogPrincipalPortfolioAssociation(self, "example", + portfolio_id="port-68656c6c6f", + principal_arn="arn:aws:iam::123456789012:user/Eleanor" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `portfolio_id` - (Required) Portfolio identifier. +* `principal_arn` - (Required) Principal ARN. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `principal_type` - (Optional) Principal type. Setting this argument empty (e.g., `principal_type = ""`) will result in an error. Valid value is `IAM`. Default is `IAM`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the association. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3m`) +- `read` - (Default `10m`) +- `delete` - (Default `3m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicecatalog_principal_portfolio_association` using the accept language, principal ARN, and portfolio ID, separated by a comma. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicecatalog_principal_portfolio_association` using the accept language, principal ARN, and portfolio ID, separated by a comma. For example: + +```console +% terraform import aws_servicecatalog_principal_portfolio_association.example en,arn:aws:iam::123456789012:user/Eleanor,port-68656c6c6f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_product.html.markdown b/website/docs/cdktf/python/r/servicecatalog_product.html.markdown new file mode 100644 index 00000000000..157987340d2 --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_product.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_product" +description: |- + Manages a Service Catalog Product +--- + + + +# Resource: aws_servicecatalog_product + +Manages a Service Catalog Product. + +~> **NOTE:** The user or role that uses this resources must have the `cloudformation:GetTemplate` IAM policy permission. This policy permission is required when using the `template_physical_id` argument. + +-> A "provisioning artifact" is also referred to as a "version." A "distributor" is also referred to as a "vendor." + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_product import ServicecatalogProduct +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogProduct(self, "example", + name="example", + owner="example-owner", + provisioning_artifact_parameters=ServicecatalogProductProvisioningArtifactParameters( + template_url="https://s3.amazonaws.com/cf-templates-ozkq9d3hgiq2-us-east-1/temp1.json" + ), + tags={ + "foo": "bar" + }, + type="CLOUD_FORMATION_TEMPLATE" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the product. +* `owner` - (Required) Owner of the product. +* `provisioning_artifact_parameters` - (Required) Configuration block for provisioning artifact (i.e., version) parameters. Detailed below. +* `type` - (Required) Type of product. See [AWS Docs](https://docs.aws.amazon.com/servicecatalog/latest/dg/API_CreateProduct.html#API_CreateProduct_RequestSyntax) for valid list of values. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `description` - (Optional) Description of the product. +* `distributor` - (Optional) Distributor (i.e., vendor) of the product. +* `support_description` - (Optional) Support information about the product. +* `support_email` - (Optional) Contact email for product support. +* `support_url` - (Optional) Contact URL for product support. +* `tags` - (Optional) Tags to apply to the product. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### provisioning_artifact_parameters + +This argument supports the following arguments: + +* `description` - (Optional) Description of the provisioning artifact (i.e., version), including how it differs from the previous provisioning artifact. +* `disable_template_validation` - (Optional) Whether AWS Service Catalog stops validating the specified provisioning artifact template even if it is invalid. +* `name` - (Optional) Name of the provisioning artifact (for example, `v1`, `v2beta`). No spaces are allowed. +* `template_physical_id` - (Required if `template_url` is not provided) Template source as the physical ID of the resource that contains the template. Currently only supports CloudFormation stack ARN. Specify the physical ID as `arn:[partition]:cloudformation:[region]:[account ID]:stack/[stack name]/[resource ID]`. +* `template_url` - (Required if `template_physical_id` is not provided) Template source as URL of the CloudFormation template in Amazon S3. +* `type` - (Optional) Type of provisioning artifact. See [AWS Docs](https://docs.aws.amazon.com/servicecatalog/latest/dg/API_ProvisioningArtifactProperties.html) for valid list of values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the product. +* `created_time` - Time when the product was created. +* `has_default_path` - Whether the product has a default path. If the product does not have a default path, call `ListLaunchPaths` to disambiguate between paths. Otherwise, `ListLaunchPaths` is not required, and the output of ProductViewSummary can be used directly with `DescribeProvisioningParameters`. +* `id` - Product ID. For example, `prod-dnigbtea24ste`. +* `status` - Status of the product. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5m`) +- `read` - (Default `10m`) +- `update` - (Default `5m`) +- `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicecatalog_product` using the product ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicecatalog_product` using the product ID. For example: + +```console +% terraform import aws_servicecatalog_product.example prod-dnigbtea24ste +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_product_portfolio_association.html.markdown b/website/docs/cdktf/python/r/servicecatalog_product_portfolio_association.html.markdown new file mode 100644 index 00000000000..f7b91349191 --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_product_portfolio_association.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_product_portfolio_association" +description: |- + Manages a Service Catalog Product Portfolio Association +--- + + + +# Resource: aws_servicecatalog_product_portfolio_association + +Manages a Service Catalog Product Portfolio Association. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_product_portfolio_association import ServicecatalogProductPortfolioAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogProductPortfolioAssociation(self, "example", + portfolio_id="port-68656c6c6f", + product_id="prod-dnigbtea24ste" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `portfolio_id` - (Required) Portfolio identifier. +* `product_id` - (Required) Product identifier. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `source_portfolio_id` - (Optional) Identifier of the source portfolio. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the association. + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3m`) +- `read` - (Default `10m`) +- `delete` - (Default `3m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicecatalog_product_portfolio_association` using the accept language, portfolio ID, and product ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicecatalog_product_portfolio_association` using the accept language, portfolio ID, and product ID. For example: + +```console +% terraform import aws_servicecatalog_product_portfolio_association.example en:port-68656c6c6f:prod-dnigbtea24ste +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_provisioned_product.html.markdown b/website/docs/cdktf/python/r/servicecatalog_provisioned_product.html.markdown new file mode 100644 index 00000000000..2a679bf74fc --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_provisioned_product.html.markdown @@ -0,0 +1,154 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_provisioned_product" +description: |- + Manages a Service Catalog Provisioned Product +--- + + + +# Resource: aws_servicecatalog_provisioned_product + +This resource provisions and manages a Service Catalog provisioned product. + +A provisioned product is a resourced instance of a product. For example, provisioning a product based on a CloudFormation template launches a CloudFormation stack and its underlying resources. + +Like this resource, the `aws_servicecatalog_record` data source also provides information about a provisioned product. Although a Service Catalog record provides some overlapping information with this resource, a record is tied to a provisioned product event, such as provisioning, termination, and updating. + +-> **Tip:** If you include conflicted keys as tags, AWS will report an error, "Parameter validation failed: Missing required parameter in Tags[N]:Value". + +-> **Tip:** A "provisioning artifact" is also referred to as a "version." A "distributor" is also referred to as a "vendor." + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_provisioned_product import ServicecatalogProvisionedProduct +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogProvisionedProduct(self, "example", + name="example", + product_name="Example product", + provisioning_artifact_name="Example version", + provisioning_parameters=[ServicecatalogProvisionedProductProvisioningParameters( + key="foo", + value="bar" + ) + ], + tags={ + "foo": "bar" + } + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) User-friendly name of the provisioned product. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `ignore_errors` - (Optional) _Only applies to deleting._ If set to `true`, AWS Service Catalog stops managing the specified provisioned product even if it cannot delete the underlying resources. The default value is `false`. +* `notification_arns` - (Optional) Passed to CloudFormation. The SNS topic ARNs to which to publish stack-related events. +* `path_id` - (Optional) Path identifier of the product. This value is optional if the product has a default path, and required if the product has more than one path. To list the paths for a product, use `aws_servicecatalog_launch_paths`. When required, you must provide `path_id` or `path_name`, but not both. +* `path_name` - (Optional) Name of the path. You must provide `path_id` or `path_name`, but not both. +* `product_id` - (Optional) Product identifier. For example, `prod-abcdzk7xy33qa`. You must provide `product_id` or `product_name`, but not both. +* `product_name` - (Optional) Name of the product. You must provide `product_id` or `product_name`, but not both. +* `provisioning_artifact_id` - (Optional) Identifier of the provisioning artifact. For example, `pa-4abcdjnxjj6ne`. You must provide the `provisioning_artifact_id` or `provisioning_artifact_name`, but not both. +* `provisioning_artifact_name` - (Optional) Name of the provisioning artifact. You must provide the `provisioning_artifact_id` or `provisioning_artifact_name`, but not both. +* `provisioning_parameters` - (Optional) Configuration block with parameters specified by the administrator that are required for provisioning the product. See details below. +* `retain_physical_resources` - (Optional) _Only applies to deleting._ Whether to delete the Service Catalog provisioned product but leave the CloudFormation stack, stack set, or the underlying resources of the deleted provisioned product. The default value is `false`. +* `stack_set_provisioning_preferences` - (Optional) Configuration block with information about the provisioning preferences for a stack set. See details below. +* `tags` - (Optional) Tags to apply to the provisioned product. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### provisioning_parameters + +This argument supports the following arguments: + +* `key` - (Required) Parameter key. +* `use_previous_value` - (Optional) Whether to ignore `value` and keep the previous parameter value. Ignored when initially provisioning a product. +* `value` - (Optional) Parameter value. + +### stack_set_provisioning_preferences + +All of the `stack_set_provisioning_preferences` are only applicable to a `CFN_STACKSET` provisioned product type. + +This argument supports the following arguments: + +* `accounts` - (Optional) One or more AWS accounts that will have access to the provisioned product. The AWS accounts specified should be within the list of accounts in the STACKSET constraint. To get the list of accounts in the STACKSET constraint, use the `aws_servicecatalog_provisioning_parameters` data source. If no values are specified, the default value is all accounts from the STACKSET constraint. +* `failure_tolerance_count` - (Optional) Number of accounts, per region, for which this operation can fail before AWS Service Catalog stops the operation in that region. If the operation is stopped in a region, AWS Service Catalog doesn't attempt the operation in any subsequent regions. You must specify either `failure_tolerance_count` or `failure_tolerance_percentage`, but not both. The default value is 0 if no value is specified. +* `failure_tolerance_percentage` - (Optional) Percentage of accounts, per region, for which this stack operation can fail before AWS Service Catalog stops the operation in that region. If the operation is stopped in a region, AWS Service Catalog doesn't attempt the operation in any subsequent regions. When calculating the number of accounts based on the specified percentage, AWS Service Catalog rounds down to the next whole number. You must specify either `failure_tolerance_count` or `failure_tolerance_percentage`, but not both. +* `max_concurrency_count` - (Optional) Maximum number of accounts in which to perform this operation at one time. This is dependent on the value of `failure_tolerance_count`. `max_concurrency_count` is at most one more than the `failure_tolerance_count`. Note that this setting lets you specify the maximum for operations. For large deployments, under certain circumstances the actual number of accounts acted upon concurrently may be lower due to service throttling. You must specify either `max_concurrency_count` or `max_concurrency_percentage`, but not both. +* `max_concurrency_percentage` - (Optional) Maximum percentage of accounts in which to perform this operation at one time. When calculating the number of accounts based on the specified percentage, AWS Service Catalog rounds down to the next whole number. This is true except in cases where rounding down would result is zero. In this case, AWS Service Catalog sets the number as 1 instead. Note that this setting lets you specify the maximum for operations. For large deployments, under certain circumstances the actual number of accounts acted upon concurrently may be lower due to service throttling. You must specify either `max_concurrency_count` or `max_concurrency_percentage`, but not both. +* `regions` - (Optional) One or more AWS Regions where the provisioned product will be available. The specified regions should be within the list of regions from the STACKSET constraint. To get the list of regions in the STACKSET constraint, use the `aws_servicecatalog_provisioning_parameters` data source. If no values are specified, the default value is all regions from the STACKSET constraint. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the provisioned product. +* `cloudwatch_dashboard_names` - Set of CloudWatch dashboards that were created when provisioning the product. +* `created_time` - Time when the provisioned product was created. +* `id` - Provisioned Product ID. +* `last_provisioning_record_id` - Record identifier of the last request performed on this provisioned product of the following types: `ProvisionedProduct`, `UpdateProvisionedProduct`, `ExecuteProvisionedProductPlan`, `TerminateProvisionedProduct`. +* `last_record_id` - Record identifier of the last request performed on this provisioned product. +* `last_successful_provisioning_record_id` - Record identifier of the last successful request performed on this provisioned product of the following types: `ProvisionedProduct`, `UpdateProvisionedProduct`, `ExecuteProvisionedProductPlan`, `TerminateProvisionedProduct`. +* `launch_role_arn` - ARN of the launch role associated with the provisioned product. +* `outputs` - The set of outputs for the product created. + * `description` - The description of the output. + * `key` - The output key. + * `value` - The output value. +* `status` - Current status of the provisioned product. See meanings below. +* `status_message` - Current status message of the provisioned product. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `type` - Type of provisioned product. Valid values are `CFN_STACK` and `CFN_STACKSET`. + +### `status` Meanings + +* `AVAILABLE` - Stable state, ready to perform any operation. The most recent operation succeeded and completed. +* `UNDER_CHANGE` - Transitive state. Operations performed might not have valid results. Wait for an `AVAILABLE` status before performing operations. +* `TAINTED` - Stable state, ready to perform any operation. The stack has completed the requested operation but is not exactly what was requested. For example, a request to update to a new version failed and the stack rolled back to the current version. +* `ERROR` - An unexpected error occurred. The provisioned product exists but the stack is not running. For example, CloudFormation received a parameter value that was not valid and could not launch the stack. +* `PLAN_IN_PROGRESS` - Transitive state. The plan operations were performed to provision a new product, but resources have not yet been created. After reviewing the list of resources to be created, execute the plan. Wait for an `AVAILABLE` status before performing operations. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `read` - (Default `10m`) +- `update` - (Default `30m`) +- `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicecatalog_provisioned_product` using the provisioned product ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicecatalog_provisioned_product` using the provisioned product ID. For example: + +```console +% terraform import aws_servicecatalog_provisioned_product.example pp-dnigbtea24ste +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_provisioning_artifact.html.markdown b/website/docs/cdktf/python/r/servicecatalog_provisioning_artifact.html.markdown new file mode 100644 index 00000000000..0d9464d612f --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_provisioning_artifact.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_provisioning_artifact" +description: |- + Manages a Service Catalog Provisioning Artifact +--- + + + +# Resource: aws_servicecatalog_provisioning_artifact + +Manages a Service Catalog Provisioning Artifact for a specified product. + +-> A "provisioning artifact" is also referred to as a "version." + +~> **NOTE:** You cannot create a provisioning artifact for a product that was shared with you. + +~> **NOTE:** The user or role that use this resource must have the `cloudformation:GetTemplate` IAM policy permission. This policy permission is required when using the `template_physical_id` argument. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_provisioning_artifact import ServicecatalogProvisioningArtifact +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogProvisioningArtifact(self, "example", + name="example", + product_id=Token.as_string(aws_servicecatalog_product_example.id), + template_url="https://${" + aws_s3_bucket_example.bucket_regional_domain_name + "}/${" + aws_s3_object_example.key + "}", + type="CLOUD_FORMATION_TEMPLATE" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `product_id` - (Required) Identifier of the product. +* `template_physical_id` - (Required if `template_url` is not provided) Template source as the physical ID of the resource that contains the template. Currently only supports CloudFormation stack ARN. Specify the physical ID as `arn:[partition]:cloudformation:[region]:[account ID]:stack/[stack name]/[resource ID]`. +* `template_url` - (Required if `template_physical_id` is not provided) Template source as URL of the CloudFormation template in Amazon S3. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). The default value is `en`. +* `active` - (Optional) Whether the product version is active. Inactive provisioning artifacts are invisible to end users. End users cannot launch or update a provisioned product from an inactive provisioning artifact. Default is `true`. +* `description` - (Optional) Description of the provisioning artifact (i.e., version), including how it differs from the previous provisioning artifact. +* `disable_template_validation` - (Optional) Whether AWS Service Catalog stops validating the specified provisioning artifact template even if it is invalid. +* `guidance` - (Optional) Information set by the administrator to provide guidance to end users about which provisioning artifacts to use. Valid values are `DEFAULT` and `DEPRECATED`. The default is `DEFAULT`. Users are able to make updates to a provisioned product of a deprecated version but cannot launch new provisioned products using a deprecated version. +* `name` - (Optional) Name of the provisioning artifact (for example, `v1`, `v2beta`). No spaces are allowed. +* `type` - (Optional) Type of provisioning artifact. See [AWS Docs](https://docs.aws.amazon.com/servicecatalog/latest/dg/API_ProvisioningArtifactProperties.html) for valid list of values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `created_time` - Time when the provisioning artifact was created. +* `id` - Provisioning artifact identifier and product identifier separated by a colon. +* `provisioning_artifact_id` - Provisioning artifact identifier. +* `status` - Status of the provisioning artifact. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3m`) +- `read` - (Default `10m`) +- `update` - (Default `3m`) +- `delete` - (Default `3m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicecatalog_provisioning_artifact` using the provisioning artifact ID and product ID separated by a colon. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicecatalog_provisioning_artifact` using the provisioning artifact ID and product ID separated by a colon. For example: + +```console +% terraform import aws_servicecatalog_provisioning_artifact.example pa-ij2b6lusy6dec:prod-el3an0rma3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_service_action.html.markdown b/website/docs/cdktf/python/r/servicecatalog_service_action.html.markdown new file mode 100644 index 00000000000..5309256949b --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_service_action.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_service_action" +description: |- + Manages a Service Catalog Service Action +--- + + + +# Resource: aws_servicecatalog_service_action + +Manages a Service Catalog self-service action. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_service_action import ServicecatalogServiceAction +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, version): + super().__init__(scope, name) + ServicecatalogServiceAction(self, "example", + definition=ServicecatalogServiceActionDefinition( + name="AWS-RestartEC2Instance", + version=version + ), + description="Motor generator unit", + name="MGU" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `definition` - (Required) Self-service action definition configuration block. Detailed below. +* `name` - (Required) Self-service action name. + +The following arguments are optional: + +* `accept_language` - (Optional) Language code. Valid values are `en` (English), `jp` (Japanese), and `zh` (Chinese). Default is `en`. +* `description` - (Optional) Self-service action description. + +### `definition` + +The `definition` configuration block supports the following attributes: + +* `assume_role` - (Optional) ARN of the role that performs the self-service actions on your behalf. For example, `arn:aws:iam::12345678910:role/ActionRole`. To reuse the provisioned product launch role, set to `LAUNCH_ROLE`. +* `name` - (Required) Name of the SSM document. For example, `AWS-RestartEC2Instance`. If you are using a shared SSM document, you must provide the ARN instead of the name. +* `parameters` - (Optional) List of parameters in JSON format. For example: `[{\"Name\":\"InstanceId\",\"Type\":\"TARGET\"}]` or `[{\"Name\":\"InstanceId\",\"Type\":\"TEXT_VALUE\"}]`. +* `type` - (Optional) Service action definition type. Valid value is `SSM_AUTOMATION`. Default is `SSM_AUTOMATION`. +* `version` - (Required) SSM document version. For example, `1`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the service action. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3m`) +- `read` - (Default `10m`) +- `update` - (Default `3m`) +- `delete` - (Default `3m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicecatalog_service_action` using the service action ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicecatalog_service_action` using the service action ID. For example: + +```console +% terraform import aws_servicecatalog_service_action.example act-f1w12eperfslh +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_tag_option.html.markdown b/website/docs/cdktf/python/r/servicecatalog_tag_option.html.markdown new file mode 100644 index 00000000000..b685322c339 --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_tag_option.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_tag_option" +description: |- + Manages a Service Catalog Tag Option +--- + + + +# Resource: aws_servicecatalog_tag_option + +Manages a Service Catalog Tag Option. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_tag_option import ServicecatalogTagOption +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogTagOption(self, "example", + key="nyckel", + value="värde" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `key` - (Required) Tag option key. +* `value` - (Required) Tag option value. + +The following arguments are optional: + +* `active` - (Optional) Whether tag option is active. Default is `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier (e.g., `tag-pjtvagohlyo3m`). +* `owner_id` - AWS account ID of the owner account that created the tag option. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3m`) +- `read` - (Default `10m`) +- `update` - (Default `3m`) +- `delete` - (Default `3m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicecatalog_tag_option` using the tag option ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicecatalog_tag_option` using the tag option ID. For example: + +```console +% terraform import aws_servicecatalog_tag_option.example tag-pjtvagohlyo3m +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicecatalog_tag_option_resource_association.html.markdown b/website/docs/cdktf/python/r/servicecatalog_tag_option_resource_association.html.markdown new file mode 100644 index 00000000000..6cc8adc5bad --- /dev/null +++ b/website/docs/cdktf/python/r/servicecatalog_tag_option_resource_association.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_tag_option_resource_association" +description: |- + Manages a Service Catalog Tag Option Resource Association +--- + + + +# Resource: aws_servicecatalog_tag_option_resource_association + +Manages a Service Catalog Tag Option Resource Association. + +-> **Tip:** A "resource" is either a Service Catalog portfolio or product. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicecatalog_tag_option_resource_association import ServicecatalogTagOptionResourceAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicecatalogTagOptionResourceAssociation(self, "example", + resource_id="prod-dnigbtea24ste", + tag_option_id="tag-pjtvyakdlyo3m" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `resource_id` - (Required) Resource identifier. +* `tag_option_id` - (Required) Tag Option identifier. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the association. +* `resource_arn` - ARN of the resource. +* `resource_created_time` - Creation time of the resource. +* `resource_description` - Description of the resource. +* `resource_name` - Description of the resource. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3m`) +- `read` - (Default `10m`) +- `delete` - (Default `3m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicecatalog_tag_option_resource_association` using the tag option ID and resource ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicecatalog_tag_option_resource_association` using the tag option ID and resource ID. For example: + +```console +% terraform import aws_servicecatalog_tag_option_resource_association.example tag-pjtvyakdlyo3m:prod-dnigbtea24ste +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/servicequotas_service_quota.html.markdown b/website/docs/cdktf/python/r/servicequotas_service_quota.html.markdown new file mode 100644 index 00000000000..de149426b17 --- /dev/null +++ b/website/docs/cdktf/python/r/servicequotas_service_quota.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Service Quotas" +layout: "aws" +page_title: "AWS: aws_servicequotas_service_quota" +description: |- + Manages an individual Service Quota +--- + + + +# Resource: aws_servicequotas_service_quota + +Manages an individual Service Quota. + +~> **NOTE:** Global quotas apply to all AWS regions, but can only be accessed in `us-east-1` in the Commercial partition or `us-gov-west-1` in the GovCloud partition. In other regions, the AWS API will return the error `The request failed because the specified service does not exist.` + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.servicequotas_service_quota import ServicequotasServiceQuota +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ServicequotasServiceQuota(self, "example", + quota_code="L-F678F1CE", + service_code="vpc", + value=75 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `quota_code` - (Required) Code of the service quota to track. For example: `L-F678F1CE`. Available values can be found with the [AWS CLI service-quotas list-service-quotas command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-service-quotas.html). +* `service_code` - (Required) Code of the service to track. For example: `vpc`. Available values can be found with the [AWS CLI service-quotas list-services command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-services.html). +* `value` - (Required) Float specifying the desired value for the service quota. If the desired value is higher than the current value, a quota increase request is submitted. When a known request is submitted and pending, the value reflects the desired value of the pending request. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `adjustable` - Whether the service quota can be increased. +* `arn` - Amazon Resource Name (ARN) of the service quota. +* `default_value` - Default value of the service quota. +* `id` - Service code and quota code, separated by a front slash (`/`) +* `quota_name` - Name of the quota. +* `service_name` - Name of the service. +* `usage_metric` - Information about the measurement. + * `metric_dimensions` - The metric dimensions. + * `class` + * `resource` + * `service` + * `type` + * `metric_name` - The name of the metric. + * `metric_namespace` - The namespace of the metric. + * `metric_statistic_recommendation` - The metric statistic that AWS recommend you use when determining quota usage. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_servicequotas_service_quota` using the service code and quota code, separated by a front slash (`/`). For example: + +~> **NOTE:** This resource does not require explicit import and will assume management of an existing service quota on Terraform resource creation. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_servicequotas_service_quota` using the service code and quota code, separated by a front slash (`/`). For example: + +~> **NOTE:** This resource does not require explicit import and will assume management of an existing service quota on Terraform resource creation. + +```console +% terraform import aws_servicequotas_service_quota.example vpc/L-F678F1CE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_active_receipt_rule_set.html.markdown b/website/docs/cdktf/python/r/ses_active_receipt_rule_set.html.markdown new file mode 100644 index 00000000000..393cd3ac8cb --- /dev/null +++ b/website/docs/cdktf/python/r/ses_active_receipt_rule_set.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_active_receipt_rule_set" +description: |- + Provides a resource to designate the active SES receipt rule set +--- + + + +# Resource: aws_ses_active_receipt_rule_set + +Provides a resource to designate the active SES receipt rule set + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_active_receipt_rule_set import SesActiveReceiptRuleSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesActiveReceiptRuleSet(self, "main", + rule_set_name="primary-rules" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `rule_set_name` - (Required) The name of the rule set + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The SES receipt rule set name. +* `arn` - The SES receipt rule set ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import active SES receipt rule sets using the rule set name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import active SES receipt rule sets using the rule set name. For example: + +```console +% terraform import aws_ses_active_receipt_rule_set.my_rule_set my_rule_set_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_configuration_set.html.markdown b/website/docs/cdktf/python/r/ses_configuration_set.html.markdown new file mode 100644 index 00000000000..8ef73c3ec91 --- /dev/null +++ b/website/docs/cdktf/python/r/ses_configuration_set.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_configuration_set" +description: |- + Provides an SES configuration set +--- + + + +# Resource: aws_ses_configuration_set + +Provides an SES configuration set resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_configuration_set import SesConfigurationSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesConfigurationSet(self, "test", + name="some-configuration-set-test" + ) +``` + +### Require TLS Connections + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_configuration_set import SesConfigurationSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesConfigurationSet(self, "test", + delivery_options=SesConfigurationSetDeliveryOptions( + tls_policy="Require" + ), + name="some-configuration-set-test" + ) +``` + +## Argument Reference + +The following argument is required: + +* `name` - (Required) Name of the configuration set. + +The following argument is optional: + +* `delivery_options` - (Optional) Whether messages that use the configuration set are required to use TLS. See below. +* `reputation_metrics_enabled` - (Optional) Whether or not Amazon SES publishes reputation metrics for the configuration set, such as bounce and complaint rates, to Amazon CloudWatch. The default value is `false`. +* `sending_enabled` - (Optional) Whether email sending is enabled or disabled for the configuration set. The default value is `true`. +* `tracking_options` - (Optional) Domain that is used to redirect email recipients to an Amazon SES-operated domain. See below. **NOTE:** This functionality is best effort. + +### delivery_options + +* `tls_policy` - (Optional) Whether messages that use the configuration set are required to use Transport Layer Security (TLS). If the value is `Require`, messages are only delivered if a TLS connection can be established. If the value is `Optional`, messages can be delivered in plain text if a TLS connection can't be established. Valid values: `Require` or `Optional`. Defaults to `Optional`. + +### tracking_options + +* `custom_redirect_domain` - (Optional) Custom subdomain that is used to redirect email recipients to the Amazon SES event tracking domain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - SES configuration set ARN. +* `id` - SES configuration set name. +* `last_fresh_start` - Date and time at which the reputation metrics for the configuration set were last reset. Resetting these metrics is known as a fresh start. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES Configuration Sets using their `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SES Configuration Sets using their `name`. For example: + +```console +% terraform import aws_ses_configuration_set.test some-configuration-set-test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_domain_dkim.html.markdown b/website/docs/cdktf/python/r/ses_domain_dkim.html.markdown new file mode 100644 index 00000000000..281ca490038 --- /dev/null +++ b/website/docs/cdktf/python/r/ses_domain_dkim.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_domain_dkim" +description: |- + Provides an SES domain DKIM generation resource +--- + + + +# Resource: aws_ses_domain_dkim + +Provides an SES domain DKIM generation resource. + +Domain ownership needs to be confirmed first using [ses_domain_identity Resource](/docs/providers/aws/r/ses_domain_identity.html) + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) Verified domain name to generate DKIM tokens for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `dkim_tokens` - DKIM tokens generated by SES. + These tokens should be used to create CNAME records used to verify SES Easy DKIM. + See below for an example of how this might be achieved + when the domain is hosted in Route 53 and managed by Terraform. + Find out more about verifying domains in Amazon SES + in the [AWS SES docs](http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformCount, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_record import Route53Record +from imports.aws.ses_domain_dkim import SesDomainDkim +from imports.aws.ses_domain_identity import SesDomainIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SesDomainIdentity(self, "example", + domain="example.com" + ) + aws_ses_domain_dkim_example = SesDomainDkim(self, "example_1", + domain=example.domain + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ses_domain_dkim_example.override_logical_id("example") + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_amazonses_dkim_record_count = TerraformCount.of( + Token.as_number("3")) + Route53Record(self, "example_amazonses_dkim_record", + name= + Token.as_string( + property_access(aws_ses_domain_dkim_example.dkim_tokens, [example_amazonses_dkim_record_count.index + ])) + "._domainkey", + records=[ + Token.as_string( + property_access(aws_ses_domain_dkim_example.dkim_tokens, [example_amazonses_dkim_record_count.index + ])) + ".dkim.amazonses.com" + ], + ttl=Token.as_number("600"), + type="CNAME", + zone_id="ABCDEFGHIJ123", + count=example_amazonses_dkim_record_count + ) +``` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DKIM tokens using the `domain` attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import DKIM tokens using the `domain` attribute. For example: + +```console +% terraform import aws_ses_domain_dkim.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_domain_identity.html.markdown b/website/docs/cdktf/python/r/ses_domain_identity.html.markdown new file mode 100644 index 00000000000..d9eec96e25d --- /dev/null +++ b/website/docs/cdktf/python/r/ses_domain_identity.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_domain_identity" +description: |- + Provides an SES domain identity resource +--- + + + +# Resource: aws_ses_domain_identity + +Provides an SES domain identity resource + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_domain_identity import SesDomainIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesDomainIdentity(self, "example", + domain="example.com" + ) +``` + +### With Route53 Record + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_record import Route53Record +from imports.aws.ses_domain_identity import SesDomainIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SesDomainIdentity(self, "example", + domain="example.com" + ) + Route53Record(self, "example_amazonses_verification_record", + name="_amazonses.example.com", + records=[example.verification_token], + ttl=Token.as_number("600"), + type="TXT", + zone_id="ABCDEFGHIJ123" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) The domain name to assign to SES + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the domain identity. +* `verification_token` - A code which when added to the domain as a TXT record + will signal to SES that the owner of the domain has authorised SES to act on + their behalf. The domain identity will be in state "verification pending" + until this is done. See the [With Route53 Record](#with-route53-record) example + for how this might be achieved when the domain is hosted in Route 53 and + managed by Terraform. Find out more about verifying domains in Amazon + SES in the [AWS SES + docs](http://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-domains.html). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES domain identities using the domain name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SES domain identities using the domain name. For example: + +```console +% terraform import aws_ses_domain_identity.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_domain_identity_verification.html.markdown b/website/docs/cdktf/python/r/ses_domain_identity_verification.html.markdown new file mode 100644 index 00000000000..6bff4322f3d --- /dev/null +++ b/website/docs/cdktf/python/r/ses_domain_identity_verification.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_domain_identity_verification" +description: |- + Waits for and checks successful verification of an SES domain identity. +--- + + + +# Resource: aws_ses_domain_identity_verification + +Represents a successful verification of an SES domain identity. + +Most commonly, this resource is used together with [`aws_route53_record`](route53_record.html) and +[`aws_ses_domain_identity`](ses_domain_identity.html) to request an SES domain identity, +deploy the required DNS verification records, and wait for verification to complete. + +~> **WARNING:** This resource implements a part of the verification workflow. It does not represent a real-world entity in AWS, therefore changing or deleting this resource on its own has no immediate effect. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_record import Route53Record +from imports.aws.ses_domain_identity import SesDomainIdentity +from imports.aws.ses_domain_identity_verification import SesDomainIdentityVerification +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SesDomainIdentity(self, "example", + domain="example.com" + ) + example_amazonses_verification_record = Route53Record(self, "example_amazonses_verification_record", + name="_amazonses.${" + example.id + "}", + records=[example.verification_token], + ttl=Token.as_number("600"), + type="TXT", + zone_id=Token.as_string(aws_route53_zone_example.zone_id) + ) + SesDomainIdentityVerification(self, "example_verification", + depends_on=[example_amazonses_verification_record], + domain=example.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) The domain name of the SES domain identity to verify. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The domain name of the domain identity. +* `arn` - The ARN of the domain identity. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `45m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_domain_mail_from.html.markdown b/website/docs/cdktf/python/r/ses_domain_mail_from.html.markdown new file mode 100644 index 00000000000..24f20ae94e4 --- /dev/null +++ b/website/docs/cdktf/python/r/ses_domain_mail_from.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_domain_mail_from" +description: |- + Provides an SES domain MAIL FROM resource +--- + + + +# Resource: aws_ses_domain_mail_from + +Provides an SES domain MAIL FROM resource. + +~> **NOTE:** For the MAIL FROM domain to be fully usable, this resource should be paired with the [aws_ses_domain_identity resource](/docs/providers/aws/r/ses_domain_identity.html). To validate the MAIL FROM domain, a DNS MX record is required. To pass SPF checks, a DNS TXT record may also be required. See the [Amazon SES MAIL FROM documentation](https://docs.aws.amazon.com/ses/latest/dg/mail-from.html) for more information. + +## Example Usage + +### Domain Identity MAIL FROM + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.route53_record import Route53Record +from imports.aws.ses_domain_identity import SesDomainIdentity +from imports.aws.ses_domain_mail_from import SesDomainMailFrom +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SesDomainIdentity(self, "example", + domain="example.com" + ) + aws_ses_domain_mail_from_example = SesDomainMailFrom(self, "example_1", + domain=example.domain, + mail_from_domain="bounce.${" + example.domain + "}" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ses_domain_mail_from_example.override_logical_id("example") + Route53Record(self, "example_ses_domain_mail_from_mx", + name=Token.as_string(aws_ses_domain_mail_from_example.mail_from_domain), + records=["10 feedback-smtp.us-east-1.amazonses.com"], + ttl=Token.as_number("600"), + type="MX", + zone_id=Token.as_string(aws_route53_zone_example.id) + ) + Route53Record(self, "example_ses_domain_mail_from_txt", + name=Token.as_string(aws_ses_domain_mail_from_example.mail_from_domain), + records=["v=spf1 include:amazonses.com -all"], + ttl=Token.as_number("600"), + type="TXT", + zone_id=Token.as_string(aws_route53_zone_example.id) + ) +``` + +### Email Identity MAIL FROM + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_domain_mail_from import SesDomainMailFrom +from imports.aws.ses_email_identity import SesEmailIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SesEmailIdentity(self, "example", + email="user@example.com" + ) + aws_ses_domain_mail_from_example = SesDomainMailFrom(self, "example_1", + domain=example.email, + mail_from_domain="mail.example.com" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ses_domain_mail_from_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `domain` - (Required) Verified domain name or email identity to generate DKIM tokens for. +* `mail_from_domain` - (Required) Subdomain (of above domain) which is to be used as MAIL FROM address (Required for DMARC validation) + +The following arguments are optional: + +* `behavior_on_mx_failure` - (Optional) The action that you want Amazon SES to take if it cannot successfully read the required MX record when you send an email. Defaults to `UseDefaultValue`. See the [SES API documentation](https://docs.aws.amazon.com/ses/latest/APIReference/API_SetIdentityMailFromDomain.html) for more information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The domain name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MAIL FROM domain using the `domain` attribute. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import MAIL FROM domain using the `domain` attribute. For example: + +```console +% terraform import aws_ses_domain_mail_from.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_email_identity.html.markdown b/website/docs/cdktf/python/r/ses_email_identity.html.markdown new file mode 100644 index 00000000000..cfb23747325 --- /dev/null +++ b/website/docs/cdktf/python/r/ses_email_identity.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_email_identity" +description: |- + Provides an SES email identity resource +--- + + + +# Resource: aws_ses_email_identity + +Provides an SES email identity resource + +## Argument Reference + +This resource supports the following arguments: + +* `email` - (Required) The email address to assign to SES. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the email identity. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_email_identity import SesEmailIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesEmailIdentity(self, "example", + email="email@example.com" + ) +``` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES email identities using the email address. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SES email identities using the email address. For example: + +```console +% terraform import aws_ses_email_identity.example email@example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_event_destination.html.markdown b/website/docs/cdktf/python/r/ses_event_destination.html.markdown new file mode 100644 index 00000000000..780361f9a77 --- /dev/null +++ b/website/docs/cdktf/python/r/ses_event_destination.html.markdown @@ -0,0 +1,151 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_event_destination" +description: |- + Provides an SES event destination +--- + + + +# Resource: aws_ses_event_destination + +Provides an SES event destination + +## Example Usage + +### CloudWatch Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_event_destination import SesEventDestination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesEventDestination(self, "cloudwatch", + cloudwatch_destination=[SesEventDestinationCloudwatchDestination( + default_value="default", + dimension_name="dimension", + value_source="emailHeader" + ) + ], + configuration_set_name=example.name, + enabled=True, + matching_types=["bounce", "send"], + name="event-destination-cloudwatch" + ) +``` + +### Kinesis Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_event_destination import SesEventDestination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesEventDestination(self, "kinesis", + configuration_set_name=example.name, + enabled=True, + kinesis_destination=SesEventDestinationKinesisDestination( + role_arn=Token.as_string(aws_iam_role_example.arn), + stream_arn=Token.as_string(aws_kinesis_firehose_delivery_stream_example.arn) + ), + matching_types=["bounce", "send"], + name="event-destination-kinesis" + ) +``` + +### SNS Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_event_destination import SesEventDestination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesEventDestination(self, "sns", + configuration_set_name=example.name, + enabled=True, + matching_types=["bounce", "send"], + name="event-destination-sns", + sns_destination=SesEventDestinationSnsDestination( + topic_arn=Token.as_string(aws_sns_topic_example.arn) + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the event destination +* `configuration_set_name` - (Required) The name of the configuration set +* `enabled` - (Optional) If true, the event destination will be enabled +* `matching_types` - (Required) A list of matching types. May be any of `"send"`, `"reject"`, `"bounce"`, `"complaint"`, `"delivery"`, `"open"`, `"click"`, or `"renderingFailure"`. +* `cloudwatch_destination` - (Optional) CloudWatch destination for the events +* `kinesis_destination` - (Optional) Send the events to a kinesis firehose destination +* `sns_destination` - (Optional) Send the events to an SNS Topic destination + +~> **NOTE:** You can specify `"cloudwatch_destination"` or `"kinesis_destination"` but not both + +### cloudwatch_destination Argument Reference + +* `default_value` - (Required) The default value for the event +* `dimension_name` - (Required) The name for the dimension +* `value_source` - (Required) The source for the value. May be any of `"messageTag"`, `"emailHeader"` or `"linkTag"`. + +### kinesis_destination Argument Reference + +* `stream_arn` - (Required) The ARN of the Kinesis Stream +* `role_arn` - (Required) The ARN of the role that has permissions to access the Kinesis Stream + +### sns_destination Argument Reference + +* `topic_arn` - (Required) The ARN of the SNS topic + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The SES event destination name. +* `arn` - The SES event destination ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES event destinations using `configuration_set_name` together with the event destination's `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SES event destinations using `configuration_set_name` together with the event destination's `name`. For example: + +```console +% terraform import aws_ses_event_destination.sns some-configuration-set-test/event-destination-sns +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_identity_notification_topic.markdown b/website/docs/cdktf/python/r/ses_identity_notification_topic.markdown new file mode 100644 index 00000000000..b1085534787 --- /dev/null +++ b/website/docs/cdktf/python/r/ses_identity_notification_topic.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_identity_notification_topic" +description: |- + Setting AWS SES Identity Notification Topic +--- + + + +# Resource: aws_ses_identity_notification_topic + +Resource for managing SES Identity Notification Topics + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_identity_notification_topic import SesIdentityNotificationTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesIdentityNotificationTopic(self, "test", + identity=example.domain, + include_original_headers=True, + notification_type="Bounce", + topic_arn=Token.as_string(aws_sns_topic_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `topic_arn` - (Optional) The Amazon Resource Name (ARN) of the Amazon SNS topic. Can be set to `""` (an empty string) to disable publishing. +* `notification_type` - (Required) The type of notifications that will be published to the specified Amazon SNS topic. Valid Values: `Bounce`, `Complaint` or `Delivery`. +* `identity` - (Required) The identity for which the Amazon SNS topic will be set. You can specify an identity by using its name or by using its Amazon Resource Name (ARN). +* `include_original_headers` - (Optional) Whether SES should include original email headers in SNS notifications of this type. `false` by default. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Identity Notification Topics using the ID of the record. The ID is made up as `IDENTITY|TYPE` where `IDENTITY` is the SES Identity and `TYPE` is the Notification Type. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Identity Notification Topics using the ID of the record. The ID is made up as `IDENTITY|TYPE` where `IDENTITY` is the SES Identity and `TYPE` is the Notification Type. For example: + +```console +% terraform import aws_ses_identity_notification_topic.test 'example.com|Bounce' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_identity_policy.html.markdown b/website/docs/cdktf/python/r/ses_identity_policy.html.markdown new file mode 100644 index 00000000000..037f809c3cd --- /dev/null +++ b/website/docs/cdktf/python/r/ses_identity_policy.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_identity_policy" +description: |- + Manages a SES Identity Policy +--- + + + +# Resource: aws_ses_identity_policy + +Manages a SES Identity Policy. More information about SES Sending Authorization Policies can be found in the [SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization-policies.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.ses_domain_identity import SesDomainIdentity +from imports.aws.ses_identity_policy import SesIdentityPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SesDomainIdentity(self, "example", + domain="example.com" + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_1", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["SES:SendEmail", "SES:SendRawEmail"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=[example.arn] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_ses_identity_policy_example = SesIdentityPolicy(self, "example_2", + identity=example.arn, + name="example", + policy=Token.as_string(data_aws_iam_policy_document_example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ses_identity_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `identity` - (Required) Name or Amazon Resource Name (ARN) of the SES Identity. +* `name` - (Required) Name of the policy. +* `policy` - (Required) JSON string of the policy. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES Identity Policies using the identity and policy name, separated by a pipe character (`|`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SES Identity Policies using the identity and policy name, separated by a pipe character (`|`). For example: + +```console +% terraform import aws_ses_identity_policy.example 'example.com|example' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_receipt_filter.html.markdown b/website/docs/cdktf/python/r/ses_receipt_filter.html.markdown new file mode 100644 index 00000000000..64aae1125bb --- /dev/null +++ b/website/docs/cdktf/python/r/ses_receipt_filter.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_receipt_filter" +description: |- + Provides an SES receipt filter +--- + + + +# Resource: aws_ses_receipt_filter + +Provides an SES receipt filter resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_receipt_filter import SesReceiptFilter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesReceiptFilter(self, "filter", + cidr="10.10.10.10", + name="block-spammer", + policy="Block" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the filter +* `cidr` - (Required) The IP address or address range to filter, in CIDR notation +* `policy` - (Required) Block or Allow + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The SES receipt filter name. +* `arn` - The SES receipt filter ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES Receipt Filter using their `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SES Receipt Filter using their `name`. For example: + +```console +% terraform import aws_ses_receipt_filter.test some-filter +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_receipt_rule.html.markdown b/website/docs/cdktf/python/r/ses_receipt_rule.html.markdown new file mode 100644 index 00000000000..e132236c0bf --- /dev/null +++ b/website/docs/cdktf/python/r/ses_receipt_rule.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_receipt_rule" +description: |- + Provides an SES receipt rule resource +--- + + + +# Resource: aws_ses_receipt_rule + +Provides an SES receipt rule resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_receipt_rule import SesReceiptRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesReceiptRule(self, "store", + add_header_action=[SesReceiptRuleAddHeaderAction( + header_name="Custom-Header", + header_value="Added by SES", + position=1 + ) + ], + enabled=True, + name="store", + recipients=["karen@example.com"], + rule_set_name="default-rule-set", + s3_action=[SesReceiptRuleS3Action( + bucket_name="emails", + position=2 + ) + ], + scan_enabled=True + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the rule +* `rule_set_name` - (Required) The name of the rule set +* `after` - (Optional) The name of the rule to place this rule after +* `enabled` - (Optional) If true, the rule will be enabled +* `recipients` - (Optional) A list of email addresses +* `scan_enabled` - (Optional) If true, incoming emails will be scanned for spam and viruses +* `tls_policy` - (Optional) `Require` or `Optional` +* `add_header_action` - (Optional) A list of Add Header Action blocks. Documented below. +* `bounce_action` - (Optional) A list of Bounce Action blocks. Documented below. +* `lambda_action` - (Optional) A list of Lambda Action blocks. Documented below. +* `s3_action` - (Optional) A list of S3 Action blocks. Documented below. +* `sns_action` - (Optional) A list of SNS Action blocks. Documented below. +* `stop_action` - (Optional) A list of Stop Action blocks. Documented below. +* `workmail_action` - (Optional) A list of WorkMail Action blocks. Documented below. + +Add header actions support the following: + +* `header_name` - (Required) The name of the header to add +* `header_value` - (Required) The value of the header to add +* `position` - (Required) The position of the action in the receipt rule + +Bounce actions support the following: + +* `message` - (Required) The message to send +* `sender` - (Required) The email address of the sender +* `smtp_reply_code` - (Required) The RFC 5321 SMTP reply code +* `status_code` - (Optional) The RFC 3463 SMTP enhanced status code +* `topic_arn` - (Optional) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule + +Lambda actions support the following: + +* `function_arn` - (Required) The ARN of the Lambda function to invoke +* `invocation_type` - (Optional) `Event` or `RequestResponse` +* `topic_arn` - (Optional) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule + +S3 actions support the following: + +* `bucket_name` - (Required) The name of the S3 bucket +* `kms_key_arn` - (Optional) The ARN of the KMS key +* `object_key_prefix` - (Optional) The key prefix of the S3 bucket +* `topic_arn` - (Optional) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule + +SNS actions support the following: + +* `topic_arn` - (Required) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule +* `encoding` - (Optional) The encoding to use for the email within the Amazon SNS notification. Default value is `UTF-8`. + +Stop actions support the following: + +* `scope` - (Required) The scope to apply. The only acceptable value is `RuleSet`. +* `topic_arn` - (Optional) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule + +WorkMail actions support the following: + +* `organization_arn` - (Required) The ARN of the WorkMail organization +* `topic_arn` - (Optional) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The SES receipt rule name. +* `arn` - The SES receipt rule ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES receipt rules using the ruleset name and rule name separated by `:`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SES receipt rules using the ruleset name and rule name separated by `:`. For example: + +```console +% terraform import aws_ses_receipt_rule.my_rule my_rule_set:my_rule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_receipt_rule_set.html.markdown b/website/docs/cdktf/python/r/ses_receipt_rule_set.html.markdown new file mode 100644 index 00000000000..c0204148355 --- /dev/null +++ b/website/docs/cdktf/python/r/ses_receipt_rule_set.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_receipt_rule_set" +description: |- + Provides an SES receipt rule set resource +--- + + + +# Resource: aws_ses_receipt_rule_set + +Provides an SES receipt rule set resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_receipt_rule_set import SesReceiptRuleSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesReceiptRuleSet(self, "main", + rule_set_name="primary-rules" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `rule_set_name` - (Required) Name of the rule set. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - SES receipt rule set ARN. +* `id` - SES receipt rule set name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES receipt rule sets using the rule set name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SES receipt rule sets using the rule set name. For example: + +```console +% terraform import aws_ses_receipt_rule_set.my_rule_set my_rule_set_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ses_template.html.markdown b/website/docs/cdktf/python/r/ses_template.html.markdown new file mode 100644 index 00000000000..5c1ea89bc7f --- /dev/null +++ b/website/docs/cdktf/python/r/ses_template.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_template" +description: |- + Provides a resource to create a SES template +--- + + + +# Resource: aws_ses_template + +Provides a resource to create a SES template. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ses_template import SesTemplate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SesTemplate(self, "MyTemplate", + html="

Hello {{name}},

Your favorite animal is {{favoriteanimal}}.

", + name="MyTemplate", + subject="Greetings, {{name}}!", + text="Hello {{name}},\r\nYour favorite animal is {{favoriteanimal}}.\n" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the template. Cannot exceed 64 characters. You will refer to this name when you send email. +* `html` - (Optional) The HTML body of the email. Must be less than 500KB in size, including both the text and HTML parts. +* `subject` - (Optional) The subject line of the email. +* `text` - (Optional) The email body that will be visible to recipients whose email clients do not display HTML. Must be less than 500KB in size, including both the text and HTML parts. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the SES template +* `id` - The name of the SES template + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES templates using the template name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SES templates using the template name. For example: + +```console +% terraform import aws_ses_template.MyTemplate MyTemplate +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sesv2_configuration_set.html.markdown b/website/docs/cdktf/python/r/sesv2_configuration_set.html.markdown new file mode 100644 index 00000000000..5ec5e1b3322 --- /dev/null +++ b/website/docs/cdktf/python/r/sesv2_configuration_set.html.markdown @@ -0,0 +1,131 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_configuration_set" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Configuration Set. +--- + + + +# Resource: aws_sesv2_configuration_set + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Configuration Set. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_configuration_set import Sesv2ConfigurationSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Sesv2ConfigurationSet(self, "example", + configuration_set_name="example", + delivery_options=Sesv2ConfigurationSetDeliveryOptions( + tls_policy="REQUIRE" + ), + reputation_options=Sesv2ConfigurationSetReputationOptions( + reputation_metrics_enabled=False + ), + sending_options=Sesv2ConfigurationSetSendingOptions( + sending_enabled=True + ), + suppression_options=Sesv2ConfigurationSetSuppressionOptions( + suppressed_reasons=["BOUNCE", "COMPLAINT"] + ), + tracking_options=Sesv2ConfigurationSetTrackingOptions( + custom_redirect_domain="example.com" + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `configuration_set_name` - (Required) The name of the configuration set. +* `delivery_options` - (Optional) An object that defines the dedicated IP pool that is used to send emails that you send using the configuration set. +* `reputation_options` - (Optional) An object that defines whether or not Amazon SES collects reputation metrics for the emails that you send that use the configuration set. +* `sending_options` - (Optional) An object that defines whether or not Amazon SES can send email that you send using the configuration set. +* `suppression_options` - (Optional) An object that contains information about the suppression list preferences for your account. +* `tags` - (Optional) A map of tags to assign to the service. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tracking_options` - (Optional) An object that defines the open and click tracking options for emails that you send using the configuration set. +* `vdm_options` - (Optional) An object that defines the VDM settings that apply to emails that you send using the configuration set. + +### delivery_options + +This argument supports the following arguments: + +* `sending_pool_name` - (Optional) The name of the dedicated IP pool to associate with the configuration set. +* `tls_policy` - (Optional) Specifies whether messages that use the configuration set are required to use Transport Layer Security (TLS). Valid values: `REQUIRE`, `OPTIONAL`. + +### reputation_options + +This argument supports the following arguments: + +* `reputation_metrics_enabled` - (Optional) If `true`, tracking of reputation metrics is enabled for the configuration set. If `false`, tracking of reputation metrics is disabled for the configuration set. + +### sending_options + +This argument supports the following arguments: + +* `sending_enabled` - (Optional) If `true`, email sending is enabled for the configuration set. If `false`, email sending is disabled for the configuration set. + +### suppression_options + +* `suppressed_reasons` - (Optional) A list that contains the reasons that email addresses are automatically added to the suppression list for your account. Valid values: `BOUNCE`, `COMPLAINT`. + +### tracking_options + +* `custom_redirect_domain` - (Required) The domain to use for tracking open and click events. + +### vdm_options + +* `dashboard_options` - (Optional) Specifies additional settings for your VDM configuration as applicable to the Dashboard. +* `guardian_options` - (Optional) Specifies additional settings for your VDM configuration as applicable to the Guardian. + +### dashboard_options + +* `engagement_metrics` - (Optional) Specifies the status of your VDM engagement metrics collection. Valid values: `ENABLED`, `DISABLED`. + +### guardian_options + +* `optimized_shared_delivery` - (Optional) Specifies the status of your VDM optimized shared delivery. Valid values: `ENABLED`, `DISABLED`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Configuration Set. +* `reputation_options` - An object that defines whether or not Amazon SES collects reputation metrics for the emails that you send that use the configuration set. + * `last_fresh_start` - The date and time (in Unix time) when the reputation metrics were last given a fresh start. When your account is given a fresh start, your reputation metrics are calculated starting from the date of the fresh start. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Configuration Set using the `configuration_set_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Configuration Set using the `configuration_set_name`. For example: + +```console +% terraform import aws_sesv2_configuration_set.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sesv2_configuration_set_event_destination.html.markdown b/website/docs/cdktf/python/r/sesv2_configuration_set_event_destination.html.markdown new file mode 100644 index 00000000000..7abbda5a23d --- /dev/null +++ b/website/docs/cdktf/python/r/sesv2_configuration_set_event_destination.html.markdown @@ -0,0 +1,239 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_configuration_set_event_destination" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Configuration Set Event Destination. +--- + + + +# Resource: aws_sesv2_configuration_set_event_destination + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Configuration Set Event Destination. + +## Example Usage + +### Cloud Watch Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_configuration_set import Sesv2ConfigurationSet +from imports.aws.sesv2_configuration_set_event_destination import Sesv2ConfigurationSetEventDestination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Sesv2ConfigurationSet(self, "example", + configuration_set_name="example" + ) + aws_sesv2_configuration_set_event_destination_example = + Sesv2ConfigurationSetEventDestination(self, "example_1", + configuration_set_name=example.configuration_set_name, + event_destination=Sesv2ConfigurationSetEventDestinationEventDestination( + cloud_watch_destination=Sesv2ConfigurationSetEventDestinationEventDestinationCloudWatchDestination( + dimension_configuration=[Sesv2ConfigurationSetEventDestinationEventDestinationCloudWatchDestinationDimensionConfiguration( + default_dimension_value="example", + dimension_name="example", + dimension_value_source="MESSAGE_TAG" + ) + ] + ), + enabled=True, + matching_event_types=["SEND"] + ), + event_destination_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sesv2_configuration_set_event_destination_example.override_logical_id("example") +``` + +### Kinesis Firehose Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_configuration_set import Sesv2ConfigurationSet +from imports.aws.sesv2_configuration_set_event_destination import Sesv2ConfigurationSetEventDestination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Sesv2ConfigurationSet(self, "example", + configuration_set_name="example" + ) + aws_sesv2_configuration_set_event_destination_example = + Sesv2ConfigurationSetEventDestination(self, "example_1", + configuration_set_name=example.configuration_set_name, + event_destination=Sesv2ConfigurationSetEventDestinationEventDestination( + enabled=True, + kinesis_firehose_destination=Sesv2ConfigurationSetEventDestinationEventDestinationKinesisFirehoseDestination( + delivery_stream_arn=Token.as_string(aws_kinesis_firehose_delivery_stream_example.arn), + iam_role_arn=Token.as_string(aws_iam_role_example.arn) + ), + matching_event_types=["SEND"] + ), + event_destination_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sesv2_configuration_set_event_destination_example.override_logical_id("example") +``` + +### Pinpoint Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_configuration_set import Sesv2ConfigurationSet +from imports.aws.sesv2_configuration_set_event_destination import Sesv2ConfigurationSetEventDestination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Sesv2ConfigurationSet(self, "example", + configuration_set_name="example" + ) + aws_sesv2_configuration_set_event_destination_example = + Sesv2ConfigurationSetEventDestination(self, "example_1", + configuration_set_name=example.configuration_set_name, + event_destination=Sesv2ConfigurationSetEventDestinationEventDestination( + enabled=True, + matching_event_types=["SEND"], + pinpoint_destination=Sesv2ConfigurationSetEventDestinationEventDestinationPinpointDestination( + application_arn=Token.as_string(aws_pinpoint_app_example.arn) + ) + ), + event_destination_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sesv2_configuration_set_event_destination_example.override_logical_id("example") +``` + +### SNS Destination + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_configuration_set import Sesv2ConfigurationSet +from imports.aws.sesv2_configuration_set_event_destination import Sesv2ConfigurationSetEventDestination +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Sesv2ConfigurationSet(self, "example", + configuration_set_name="example" + ) + aws_sesv2_configuration_set_event_destination_example = + Sesv2ConfigurationSetEventDestination(self, "example_1", + configuration_set_name=example.configuration_set_name, + event_destination=Sesv2ConfigurationSetEventDestinationEventDestination( + enabled=True, + matching_event_types=["SEND"], + sns_destination=Sesv2ConfigurationSetEventDestinationEventDestinationSnsDestination( + topic_arn=Token.as_string(aws_sns_topic_example.arn) + ) + ), + event_destination_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sesv2_configuration_set_event_destination_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `configuration_set_name` - (Required) The name of the configuration set. +* `event_destination` - (Required) A name that identifies the event destination within the configuration set. +* `event_destination_name` - (Required) An object that defines the event destination. See [event_destination](#event_destination) below. + +### event_destination + +The following arguments are required: + +* `matching_event_types` - (Required) - An array that specifies which events the Amazon SES API v2 should send to the destinations. Valid values: `SEND`, `REJECT`, `BOUNCE`, `COMPLAINT`, `DELIVERY`, `OPEN`, `CLICK`, `RENDERING_FAILURE`, `DELIVERY_DELAY`, `SUBSCRIPTION`. + +The following arguments are optional: + +* `cloud_watch_destination` - (Optional) An object that defines an Amazon CloudWatch destination for email events. See [cloud_watch_destination](#cloud_watch_destination) below +* `enabled` - (Optional) When the event destination is enabled, the specified event types are sent to the destinations. Default: `false`. +* `kinesis_firehose_destination` - (Optional) An object that defines an Amazon Kinesis Data Firehose destination for email events. See [kinesis_firehose_destination](#kinesis_firehose_destination) below. +* `pinpoint_destination` - (Optional) An object that defines an Amazon Pinpoint project destination for email events. See [pinpoint_destination](#pinpoint_destination) below. +* `sns_destination` - (Optional) An object that defines an Amazon SNS destination for email events. See [sns_destination](#sns_destination) below. + +### cloud_watch_destination + +The following arguments are required: + +* `dimension_configuration` - (Required) An array of objects that define the dimensions to use when you send email events to Amazon CloudWatch. See [dimension_configuration](#dimension_configuration) below. + +### dimension_configuration + +The following arguments are required: + +* `default_dimension_value` - (Required) The default value of the dimension that is published to Amazon CloudWatch if you don't provide the value of the dimension when you send an email. +( `dimension_name` - (Required) The name of an Amazon CloudWatch dimension associated with an email sending metric. +* `dimension_value_source` - (Required) The location where the Amazon SES API v2 finds the value of a dimension to publish to Amazon CloudWatch. Valid values: `MESSAGE_TAG`, `EMAIL_HEADER`, `LINK_TAG`. + +### kinesis_firehose_destination + +The following arguments are required: + +* `delivery_stream_arn` - (Required) The Amazon Resource Name (ARN) of the Amazon Kinesis Data Firehose stream that the Amazon SES API v2 sends email events to. +* `iam_role_arn` - (Required) The Amazon Resource Name (ARN) of the IAM role that the Amazon SES API v2 uses to send email events to the Amazon Kinesis Data Firehose stream. + +### pinpoint_destination + +The following arguments are required: + +* `pinpoint_application_arn` - (Required) The Amazon Resource Name (ARN) of the Amazon Pinpoint project to send email events to. + +### sns_destination + +The following arguments are required: + +* `topic_arn` - (Required) The Amazon Resource Name (ARN) of the Amazon SNS topic to publish email events to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A pipe-delimited string combining `configuration_set_name` and `event_destination_name`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Configuration Set Event Destination using the `id` (`configuration_set_name|event_destination_name`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Configuration Set Event Destination using the `id` (`configuration_set_name|event_destination_name`). For example: + +```console +% terraform import aws_sesv2_configuration_set_event_destination.example example_configuration_set|example_event_destination +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sesv2_contact_list.html.markdown b/website/docs/cdktf/python/r/sesv2_contact_list.html.markdown new file mode 100644 index 00000000000..da5397f9ac1 --- /dev/null +++ b/website/docs/cdktf/python/r/sesv2_contact_list.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_contact_list" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Contact List. +--- + + + +# Resource: aws_sesv2_contact_list + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Contact List. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_contact_list import Sesv2ContactList +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Sesv2ContactList(self, "example", + contact_list_name="example" + ) +``` + +### Extended Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_contact_list import Sesv2ContactList +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Sesv2ContactList(self, "example", + contact_list_name="example", + description="description", + topic=[Sesv2ContactListTopic( + default_subscription_status="OPT_IN", + description="topic description", + display_name="Example Topic", + topic_name="example-topic" + ) + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `contact_list_name` - (Required) The name of the contact list. + +The following arguments are optional: + +* `description` - (Optional) A description of what the contact list is about. +* `tags` - (Optional) Key-value map of resource tags for the contact list. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `topic` - (Optional) Configuration block(s) with topic for the contact list. Detailed below. + +### topic + +The following arguments are required: + +* `default_subscription_status` - (Required) The default subscription status to be applied to a contact if the contact has not noted their preference for subscribing to a topic. +* `display_name` - (Required) The name of the topic the contact will see. +* `topic_name` - (Required) The name of the topic. + +The following arguments are optional: + +* `description` - (Optional) A description of what the topic is about, which the contact will see. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `created_timestamp` - A timestamp noting when the contact list was created in ISO 8601 format. +* `last_updated_timestamp` - A timestamp noting the last time the contact list was updated in ISO 8601 format. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Contact List using the `example_id_arg`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Contact List using the `example_id_arg`. For example: + +```console +% terraform import aws_sesv2_contact_list.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sesv2_dedicated_ip_assignment.html.markdown b/website/docs/cdktf/python/r/sesv2_dedicated_ip_assignment.html.markdown new file mode 100644 index 00000000000..497e730ec5e --- /dev/null +++ b/website/docs/cdktf/python/r/sesv2_dedicated_ip_assignment.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_dedicated_ip_assignment" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Dedicated IP Assignment. +--- + + + +# Resource: aws_sesv2_dedicated_ip_assignment + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Dedicated IP Assignment. + +This resource is used with "Standard" dedicated IP addresses. This includes addresses [requested and relinquished manually](https://docs.aws.amazon.com/ses/latest/dg/dedicated-ip-case.html) via an AWS support case, or [Bring Your Own IP](https://docs.aws.amazon.com/ses/latest/dg/dedicated-ip-byo.html) addresses. Once no longer assigned, this resource returns the IP to the [`ses-default-dedicated-pool`](https://docs.aws.amazon.com/ses/latest/dg/managing-ip-pools.html), managed by AWS. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_dedicated_ip_assignment import Sesv2DedicatedIpAssignment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Sesv2DedicatedIpAssignment(self, "example", + destination_pool_name="my-pool", + ip="0.0.0.0" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `ip` - (Required) Dedicated IP address. +* `destination_pool_name` - (Required) Dedicated IP address. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A comma-separated string made up of `ip` and `destination_pool_name`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Dedicated IP Assignment using the `id`, which is a comma-separated string made up of `ip` and `destination_pool_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Dedicated IP Assignment using the `id`, which is a comma-separated string made up of `ip` and `destination_pool_name`. For example: + +```console +% terraform import aws_sesv2_dedicated_ip_assignment.example "0.0.0.0,my-pool" +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sesv2_dedicated_ip_pool.html.markdown b/website/docs/cdktf/python/r/sesv2_dedicated_ip_pool.html.markdown new file mode 100644 index 00000000000..db5070d71b8 --- /dev/null +++ b/website/docs/cdktf/python/r/sesv2_dedicated_ip_pool.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_dedicated_ip_pool" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Dedicated IP Pool. +--- + + + +# Resource: aws_sesv2_dedicated_ip_pool + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Dedicated IP Pool. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_dedicated_ip_pool import Sesv2DedicatedIpPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Sesv2DedicatedIpPool(self, "example", + pool_name="my-pool" + ) +``` + +### Managed Pool + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_dedicated_ip_pool import Sesv2DedicatedIpPool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Sesv2DedicatedIpPool(self, "example", + pool_name="my-managed-pool", + scaling_mode="MANAGED" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `pool_name` - (Required) Name of the dedicated IP pool. + +The following arguments are optional: + +* `scaling_mode` - (Optional) IP pool scaling mode. Valid values: `STANDARD`, `MANAGED`. If omitted, the AWS API will default to a standard pool. +* `tags` - (Optional) A map of tags to assign to the pool. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Dedicated IP Pool. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Dedicated IP Pool using the `pool_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Dedicated IP Pool using the `pool_name`. For example: + +```console +% terraform import aws_sesv2_dedicated_ip_pool.example my-pool +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sesv2_email_identity.html.markdown b/website/docs/cdktf/python/r/sesv2_email_identity.html.markdown new file mode 100644 index 00000000000..b4202030fbd --- /dev/null +++ b/website/docs/cdktf/python/r/sesv2_email_identity.html.markdown @@ -0,0 +1,162 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_email_identity" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity. +--- + + + +# Resource: aws_sesv2_email_identity + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity. + +## Example Usage + +### Basic Usage + +#### Email Address Identity + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_email_identity import Sesv2EmailIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Sesv2EmailIdentity(self, "example", + email_identity="testing@example.com" + ) +``` + +#### Domain Identity + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_email_identity import Sesv2EmailIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Sesv2EmailIdentity(self, "example", + email_identity="example.com" + ) +``` + +#### Configuration Set + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_configuration_set import Sesv2ConfigurationSet +from imports.aws.sesv2_email_identity import Sesv2EmailIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Sesv2ConfigurationSet(self, "example", + configuration_set_name="example" + ) + aws_sesv2_email_identity_example = Sesv2EmailIdentity(self, "example_1", + configuration_set_name=example.configuration_set_name, + email_identity="example.com" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sesv2_email_identity_example.override_logical_id("example") +``` + +#### DKIM Signing Attributes (BYODKIM) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_email_identity import Sesv2EmailIdentity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Sesv2EmailIdentity(self, "example", + dkim_signing_attributes=Sesv2EmailIdentityDkimSigningAttributes( + domain_signing_private_key="MIIJKAIBAAKCAgEA2Se7p8zvnI4yh+Gh9j2rG5e2aRXjg03Y8saiupLnadPH9xvM...", + domain_signing_selector="example" + ), + email_identity="example.com" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `email_identity` - (Required) The email address or domain to verify. + +The following arguments are optional: + +* `configuration_set_name` - (Optional) The configuration set to use by default when sending from this identity. Note that any configuration set defined in the email sending request takes precedence. +* `dkim_signing_attributes` - (Optional) The configuration of the DKIM authentication settings for an email domain identity. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### dkim_signing_attributes + +* `domain_signing_private_key` - (Optional) [Bring Your Own DKIM] A private key that's used to generate a DKIM signature. The private key must use 1024 or 2048-bit RSA encryption, and must be encoded using base64 encoding. + +-> **NOTE:** You have to delete the first and last lines ('-----BEGIN PRIVATE KEY-----' and '-----END PRIVATE KEY-----', respectively) of the generated private key. Additionally, you have to remove the line breaks in the generated private key. The resulting value is a string of characters with no spaces or line breaks. + +* `domain_signing_selector` - (Optional) [Bring Your Own DKIM] A string that's used to identify a public key in the DNS configuration for a domain. +* `next_signing_key_length` - (Optional) [Easy DKIM] The key length of the future DKIM key pair to be generated. This can be changed at most once per day. Valid values: `RSA_1024_BIT`, `RSA_2048_BIT`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Email Identity. +* `dkim_signing_attributes` - A list of objects that contains at most one element with information about the private key and selector that you want to use to configure DKIM for the identity for Bring Your Own DKIM (BYODKIM) for the identity, or, configures the key length to be used for Easy DKIM. + * `current_signing_key_length` - [Easy DKIM] The key length of the DKIM key pair in use. + * `last_key_generation_timestamp` - [Easy DKIM] The last time a key pair was generated for this identity. + * `next_signing_key_length` - [Easy DKIM] The key length of the future DKIM key pair to be generated. This can be changed at most once per day. + * `signing_attributes_origin` - A string that indicates how DKIM was configured for the identity. `AWS_SES` indicates that DKIM was configured for the identity by using Easy DKIM. `EXTERNAL` indicates that DKIM was configured for the identity by using Bring Your Own DKIM (BYODKIM). + * `status` - Describes whether or not Amazon SES has successfully located the DKIM records in the DNS records for the domain. See the [AWS SES API v2 Reference](https://docs.aws.amazon.com/ses/latest/APIReference-V2/API_DkimAttributes.html#SES-Type-DkimAttributes-Status) for supported statuses. + * `tokens` - If you used Easy DKIM to configure DKIM authentication for the domain, then this object contains a set of unique strings that you use to create a set of CNAME records that you add to the DNS configuration for your domain. When Amazon SES detects these records in the DNS configuration for your domain, the DKIM authentication process is complete. If you configured DKIM authentication for the domain by providing your own public-private key pair, then this object contains the selector for the public key. +* `identity_type` - The email identity type. Valid values: `EMAIL_ADDRESS`, `DOMAIN`. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `verified_for_sending_status` - Specifies whether or not the identity is verified. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Email Identity using the `email_identity`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Email Identity using the `email_identity`. For example: + +```console +% terraform import aws_sesv2_email_identity.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sesv2_email_identity_feedback_attributes.html.markdown b/website/docs/cdktf/python/r/sesv2_email_identity_feedback_attributes.html.markdown new file mode 100644 index 00000000000..3b2df1fdc40 --- /dev/null +++ b/website/docs/cdktf/python/r/sesv2_email_identity_feedback_attributes.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_email_identity_feedback_attributes" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity Feedback Attributes. +--- + + + +# Resource: aws_sesv2_email_identity_feedback_attributes + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity Feedback Attributes. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_email_identity import Sesv2EmailIdentity +from imports.aws.sesv2_email_identity_feedback_attributes import Sesv2EmailIdentityFeedbackAttributes +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Sesv2EmailIdentity(self, "example", + email_identity="example.com" + ) + aws_sesv2_email_identity_feedback_attributes_example = + Sesv2EmailIdentityFeedbackAttributes(self, "example_1", + email_forwarding_enabled=True, + email_identity=example.email_identity + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sesv2_email_identity_feedback_attributes_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `email_identity` - (Required) The email identity. +* `email_forwarding_enabled` - (Optional) Sets the feedback forwarding configuration for the identity. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Email Identity Feedback Attributes using the `email_identity`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Email Identity Feedback Attributes using the `email_identity`. For example: + +```console +% terraform import aws_sesv2_email_identity_feedback_attributes.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sesv2_email_identity_mail_from_attributes.html.markdown b/website/docs/cdktf/python/r/sesv2_email_identity_mail_from_attributes.html.markdown new file mode 100644 index 00000000000..c2ce0ef2523 --- /dev/null +++ b/website/docs/cdktf/python/r/sesv2_email_identity_mail_from_attributes.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_email_identity_mail_from_attributes" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity Mail From Attributes. +--- + + + +# Resource: aws_sesv2_email_identity_mail_from_attributes + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity Mail From Attributes. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sesv2_email_identity import Sesv2EmailIdentity +from imports.aws.sesv2_email_identity_mail_from_attributes import Sesv2EmailIdentityMailFromAttributes +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Sesv2EmailIdentity(self, "example", + email_identity="example.com" + ) + aws_sesv2_email_identity_mail_from_attributes_example = + Sesv2EmailIdentityMailFromAttributes(self, "example_1", + behavior_on_mx_failure="REJECT_MESSAGE", + email_identity=example.email_identity, + mail_from_domain="subdomain.${" + example.email_identity + "}" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sesv2_email_identity_mail_from_attributes_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `email_identity` - (Required) The verified email identity. +* `behavior_on_mx_failure` - (Optional) The action to take if the required MX record isn't found when you send an email. Valid values: `USE_DEFAULT_VALUE`, `REJECT_MESSAGE`. +* `mail_from_domain` - (Optional) The custom MAIL FROM domain that you want the verified identity to use. Required if `behavior_on_mx_failure` is `REJECT_MESSAGE`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Email Identity Mail From Attributes using the `email_identity`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Email Identity Mail From Attributes using the `email_identity`. For example: + +```console +% terraform import aws_sesv2_email_identity_mail_from_attributes.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sfn_activity.html.markdown b/website/docs/cdktf/python/r/sfn_activity.html.markdown new file mode 100644 index 00000000000..596fd4980e0 --- /dev/null +++ b/website/docs/cdktf/python/r/sfn_activity.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_activity" +description: |- + Provides a Step Function Activity resource. +--- + + + +# Resource: aws_sfn_activity + +Provides a Step Function Activity resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sfn_activity import SfnActivity +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SfnActivity(self, "sfn_activity", + name="my-activity" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the activity to create. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) that identifies the created activity. +* `name` - The name of the activity. +* `creation_date` - The date the activity was created. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import activities using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import activities using the `arn`. For example: + +```console +% terraform import aws_sfn_activity.foo arn:aws:states:eu-west-1:123456789098:activity:bar +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sfn_alias.html.markdown b/website/docs/cdktf/python/r/sfn_alias.html.markdown new file mode 100644 index 00000000000..9c3a6a2a6f8 --- /dev/null +++ b/website/docs/cdktf/python/r/sfn_alias.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_alias" +description: |- + Provides a Step Function State Machine Alias. +--- + + + +# Resource: aws_sfn_alias + +Provides a Step Function State Machine Alias. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sfn_alias import SfnAlias +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SfnAlias(self, "my_sfn_alias", + name="my_sfn_alias", + routing_configuration=[SfnAliasRoutingConfiguration( + state_machine_version_arn="arn:aws:states:us-east-1:12345:stateMachine:demo:3", + weight=50 + ), SfnAliasRoutingConfiguration( + state_machine_version_arn="arn:aws:states:us-east-1:12345:stateMachine:demo:2", + weight=50 + ) + ] + ) + SfnAlias(self, "sfn_alias", + name="my_sfn_alias", + routing_configuration=[SfnAliasRoutingConfiguration( + state_machine_version_arn=sfn_test.state_machine_version_arn, + weight=100 + ) + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name for the alias you are creating. +* `description` - (Optional) Description of the alias. +* `routing_configuration` - (Required) The StateMachine alias' route configuration settings. Fields documented below + +`routing_configuration` supports the following arguments: + +* `state_machine_version_arn` - (Required) A version of the state machine. +* `weight` - (Required) Percentage of traffic routed to the state machine version. + +The following arguments are optional: + +* `optional_arg` - (Optional) Concise argument description. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) identifying your state machine alias. +* `creation_date` - The date the state machine alias was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SFN (Step Functions) Alias using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SFN (Step Functions) Alias using the `arn`. For example: + +```console +% terraform import aws_sfn_alias.foo arn:aws:states:us-east-1:123456789098:stateMachine:myStateMachine:foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sfn_state_machine.html.markdown b/website/docs/cdktf/python/r/sfn_state_machine.html.markdown new file mode 100644 index 00000000000..5698071a44b --- /dev/null +++ b/website/docs/cdktf/python/r/sfn_state_machine.html.markdown @@ -0,0 +1,172 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_state_machine" +description: |- + Provides a Step Function State Machine resource. +--- + + + +# Resource: aws_sfn_state_machine + +Provides a Step Function State Machine resource + +## Example Usage + +### Basic (Standard Workflow) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sfn_state_machine import SfnStateMachine +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SfnStateMachine(self, "sfn_state_machine", + definition="{\n \"Comment\": \"A Hello World example of the Amazon States Language using an AWS Lambda Function\",\n \"StartAt\": \"HelloWorld\",\n \"States\": {\n \"HelloWorld\": {\n \"Type\": \"Task\",\n \"Resource\": \"${" + lambda_.arn + "}\",\n \"End\": true\n }\n }\n}\n\n", + name="my-state-machine", + role_arn=iam_for_sfn.arn + ) +``` + +### Basic (Express Workflow) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sfn_state_machine import SfnStateMachine +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SfnStateMachine(self, "sfn_state_machine", + definition="{\n \"Comment\": \"A Hello World example of the Amazon States Language using an AWS Lambda Function\",\n \"StartAt\": \"HelloWorld\",\n \"States\": {\n \"HelloWorld\": {\n \"Type\": \"Task\",\n \"Resource\": \"${" + lambda_.arn + "}\",\n \"End\": true\n }\n }\n}\n\n", + name="my-state-machine", + role_arn=iam_for_sfn.arn, + type="EXPRESS" + ) +``` + +### Publish (Publish SFN version) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sfn_state_machine import SfnStateMachine +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SfnStateMachine(self, "sfn_state_machine", + definition="{\n \"Comment\": \"A Hello World example of the Amazon States Language using an AWS Lambda Function\",\n \"StartAt\": \"HelloWorld\",\n \"States\": {\n \"HelloWorld\": {\n \"Type\": \"Task\",\n \"Resource\": \"${" + lambda_.arn + "}\",\n \"End\": true\n }\n }\n}\n\n", + name="my-state-machine", + publish=True, + role_arn=iam_for_sfn.arn, + type="EXPRESS" + ) +``` + +### Logging + +~> *NOTE:* See the [AWS Step Functions Developer Guide](https://docs.aws.amazon.com/step-functions/latest/dg/welcome.html) for more information about enabling Step Function logging. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sfn_state_machine import SfnStateMachine +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SfnStateMachine(self, "sfn_state_machine", + definition="{\n \"Comment\": \"A Hello World example of the Amazon States Language using an AWS Lambda Function\",\n \"StartAt\": \"HelloWorld\",\n \"States\": {\n \"HelloWorld\": {\n \"Type\": \"Task\",\n \"Resource\": \"${" + lambda_.arn + "}\",\n \"End\": true\n }\n }\n}\n\n", + logging_configuration=SfnStateMachineLoggingConfiguration( + include_execution_data=True, + level="ERROR", + log_destination="${" + log_group_for_sfn.arn + "}:*" + ), + name="my-state-machine", + role_arn=iam_for_sfn.arn + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `definition` - (Required) The [Amazon States Language](https://docs.aws.amazon.com/step-functions/latest/dg/concepts-amazon-states-language.html) definition of the state machine. +* `logging_configuration` - (Optional) Defines what execution history events are logged and where they are logged. The `logging_configuration` parameter is only valid when `type` is set to `EXPRESS`. Defaults to `OFF`. For more information see [Logging Express Workflows](https://docs.aws.amazon.com/step-functions/latest/dg/cw-logs.html) and [Log Levels](https://docs.aws.amazon.com/step-functions/latest/dg/cloudwatch-log-level.html) in the AWS Step Functions User Guide. +* `name` - (Optional) The name of the state machine. The name should only contain `0`-`9`, `A`-`Z`, `a`-`z`, `-` and `_`. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `publish` - (Optional) Set to true to publish a version of the state machine during creation. Default: false. +* `role_arn` - (Required) The Amazon Resource Name (ARN) of the IAM role to use for this state machine. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tracing_configuration` - (Optional) Selects whether AWS X-Ray tracing is enabled. +* `type` - (Optional) Determines whether a Standard or Express state machine is created. The default is `STANDARD`. You cannot update the type of a state machine once it has been created. Valid values: `STANDARD`, `EXPRESS`. + +### `logging_configuration` Configuration Block + +* `include_execution_data` - (Optional) Determines whether execution data is included in your log. When set to `false`, data is excluded. +* `level` - (Optional) Defines which category of execution history events are logged. Valid values: `ALL`, `ERROR`, `FATAL`, `OFF` +* `log_destination` - (Optional) Amazon Resource Name (ARN) of a CloudWatch log group. Make sure the State Machine has the correct IAM policies for logging. The ARN must end with `:*` + +### `tracing_configuration` Configuration Block + +* `enabled` - (Optional) When set to `true`, AWS X-Ray tracing is enabled. Make sure the State Machine has the correct IAM policies for logging. See the [AWS Step Functions Developer Guide](https://docs.aws.amazon.com/step-functions/latest/dg/xray-iam.html) for details. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the state machine. +* `arn` - The ARN of the state machine. +* `creation_date` - The date the state machine was created. +* `status` - The current status of the state machine. Either `ACTIVE` or `DELETING`. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5m`) +* `update` - (Default `1m`) +* `delete` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import State Machines using the `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import State Machines using the `arn`. For example: + +```console +% terraform import aws_sfn_state_machine.foo arn:aws:states:eu-west-1:123456789098:stateMachine:bar +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/shield_protection.html.markdown b/website/docs/cdktf/python/r/shield_protection.html.markdown new file mode 100644 index 00000000000..8a5345b69a7 --- /dev/null +++ b/website/docs/cdktf/python/r/shield_protection.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Shield" +layout: "aws" +page_title: "AWS: aws_shield_protection" +description: |- + Enables AWS Shield Advanced for a specific AWS resource. +--- + + + +# Resource: aws_shield_protection + +Enables AWS Shield Advanced for a specific AWS resource. +The resource can be an Amazon CloudFront distribution, Elastic Load Balancing load balancer, AWS Global Accelerator accelerator, Elastic IP Address, or an Amazon Route 53 hosted zone. + +## Example Usage + +### Create protection + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.eip import Eip +from imports.aws.shield_protection import ShieldProtection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Eip(self, "example", + domain="vpc" + ) + DataAwsAvailabilityZones(self, "available") + current = DataAwsCallerIdentity(self, "current") + data_aws_region_current = DataAwsRegion(self, "current_3") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + aws_shield_protection_example = ShieldProtection(self, "example_4", + name="example", + resource_arn="arn:aws:ec2:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:eip-allocation/${" + example.id + "}", + tags={ + "Environment": "Dev" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_shield_protection_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A friendly name for the Protection you are creating. +* `resource_arn` - (Required) The ARN (Amazon Resource Name) of the resource to be protected. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) for the Protection object that is created. +* `arn` - The ARN of the Protection. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Shield protection resources using specifying their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Shield protection resources using specifying their ID. For example: + +```console +% terraform import aws_shield_protection.example ff9592dc-22f3-4e88-afa1-7b29fde9669a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/shield_protection_group.html.markdown b/website/docs/cdktf/python/r/shield_protection_group.html.markdown new file mode 100644 index 00000000000..de81b19fce4 --- /dev/null +++ b/website/docs/cdktf/python/r/shield_protection_group.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "Shield" +layout: "aws" +page_title: "AWS: aws_shield_protection_group" +description: |- + Creates a grouping of protected resources so they can be handled as a collective. +--- + + + +# Resource: aws_shield_protection_group + +Creates a grouping of protected resources so they can be handled as a collective. +This resource grouping improves the accuracy of detection and reduces false positives. For more information see +[Managing AWS Shield Advanced protection groups](https://docs.aws.amazon.com/waf/latest/developerguide/manage-protection-group.html) + +## Example Usage + +### Create protection group for all resources + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.shield_protection_group import ShieldProtectionGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ShieldProtectionGroup(self, "example", + aggregation="MAX", + pattern="ALL", + protection_group_id="example" + ) +``` + +### Create protection group for arbitrary number of resources + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.eip import Eip +from imports.aws.shield_protection import ShieldProtection +from imports.aws.shield_protection_group import ShieldProtectionGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Eip(self, "example", + domain="vpc" + ) + current = DataAwsCallerIdentity(self, "current") + data_aws_region_current = DataAwsRegion(self, "current_2") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + aws_shield_protection_example = ShieldProtection(self, "example_3", + name="example", + resource_arn="arn:aws:ec2:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:eip-allocation/${" + example.id + "}" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_shield_protection_example.override_logical_id("example") + aws_shield_protection_group_example = ShieldProtectionGroup(self, "example_4", + aggregation="MEAN", + depends_on=[aws_shield_protection_example], + members=["arn:aws:ec2:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:eip-allocation/${" + example.id + "}" + ], + pattern="ARBITRARY", + protection_group_id="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_shield_protection_group_example.override_logical_id("example") +``` + +### Create protection group for a type of resource + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.shield_protection_group import ShieldProtectionGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ShieldProtectionGroup(self, "example", + aggregation="SUM", + pattern="BY_RESOURCE_TYPE", + protection_group_id="example", + resource_type="ELASTIC_IP_ALLOCATION" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `aggregation` - (Required) Defines how AWS Shield combines resource data for the group in order to detect, mitigate, and report events. +* `members` - (Optional) The Amazon Resource Names (ARNs) of the resources to include in the protection group. You must set this when you set `pattern` to ARBITRARY and you must not set it for any other `pattern` setting. +* `pattern` - (Required) The criteria to use to choose the protected resources for inclusion in the group. +* `protection_group_id` - (Required) The name of the protection group. +* `resource_type` - (Optional) The resource type to include in the protection group. You must set this when you set `pattern` to BY_RESOURCE_TYPE and you must not set it for any other `pattern` setting. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `protection_group_arn` - The ARN (Amazon Resource Name) of the protection group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Shield protection group resources using their protection group id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Shield protection group resources using their protection group id. For example: + +```console +% terraform import aws_shield_protection_group.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/shield_protection_health_check_association.html.markdown b/website/docs/cdktf/python/r/shield_protection_health_check_association.html.markdown new file mode 100644 index 00000000000..8f68e0fa59a --- /dev/null +++ b/website/docs/cdktf/python/r/shield_protection_health_check_association.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "Shield" +layout: "aws" +page_title: "AWS: aws_shield_protection_health_check_association" +description: |- + Creates an association between a Route53 Health Check and a Shield Advanced protected resource. +--- + + + +# Resource: aws_shield_protection_health_check_association + +Creates an association between a Route53 Health Check and a Shield Advanced protected resource. +This association uses the health of your applications to improve responsiveness and accuracy in attack detection and mitigation. + +Blog post: [AWS Shield Advanced now supports Health Based Detection](https://aws.amazon.com/about-aws/whats-new/2020/02/aws-shield-advanced-now-supports-health-based-detection/) + +## Example Usage + +### Create an association between a protected EIP and a Route53 Health Check + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_partition import DataAwsPartition +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.eip import Eip +from imports.aws.route53_health_check import Route53HealthCheck +from imports.aws.shield_protection import ShieldProtection +from imports.aws.shield_protection_health_check_association import ShieldProtectionHealthCheckAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Eip(self, "example", + domain="vpc", + tags={ + "Name": "example" + } + ) + aws_route53_health_check_example = Route53HealthCheck(self, "example_1", + failure_threshold=Token.as_number("3"), + ip_address=example.public_ip, + port=80, + request_interval=Token.as_number("30"), + resource_path="/ready", + tags={ + "Name": "tf-example-health-check" + }, + type="HTTP" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_route53_health_check_example.override_logical_id("example") + current = DataAwsCallerIdentity(self, "current") + data_aws_partition_current = DataAwsPartition(self, "current_3") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_partition_current.override_logical_id("current") + data_aws_region_current = DataAwsRegion(self, "current_4") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + aws_shield_protection_example = ShieldProtection(self, "example_5", + name="example-protection", + resource_arn="arn:${" + data_aws_partition_current.partition + "}:ec2:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:eip-allocation/${" + example.id + "}" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_shield_protection_example.override_logical_id("example") + aws_shield_protection_health_check_association_example = + ShieldProtectionHealthCheckAssociation(self, "example_6", + health_check_arn=Token.as_string(aws_route53_health_check_example.arn), + shield_protection_id=Token.as_string(aws_shield_protection_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_shield_protection_health_check_association_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `health_check_arn` - (Required) The ARN (Amazon Resource Name) of the Route53 Health Check resource which will be associated to the protected resource. +* `shield_protection_id` - (Required) The ID of the protected resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) for the Protection object that is created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Shield protection health check association resources using the `shield_protection_id` and `health_check_arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Shield protection health check association resources using the `shield_protection_id` and `health_check_arn`. For example: + +```console +% terraform import aws_shield_protection_health_check_association.example ff9592dc-22f3-4e88-afa1-7b29fde9669a+arn:aws:route53:::healthcheck/3742b175-edb9-46bc-9359-f53e3b794b1b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/signer_signing_job.html.markdown b/website/docs/cdktf/python/r/signer_signing_job.html.markdown new file mode 100644 index 00000000000..bcc662e6a24 --- /dev/null +++ b/website/docs/cdktf/python/r/signer_signing_job.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "Signer" +layout: "aws" +page_title: "AWS: aws_signer_signing_job" +description: |- + Creates a Signer Signing Job. +--- + + + +# Resource: aws_signer_signing_job + +Creates a Signer Signing Job. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.signer_signing_job import SignerSigningJob +from imports.aws.signer_signing_profile import SignerSigningProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test_sp = SignerSigningProfile(self, "test_sp", + platform_id="AWSLambda-SHA384-ECDSA" + ) + SignerSigningJob(self, "build_signing_job", + destination=SignerSigningJobDestination( + s3=SignerSigningJobDestinationS3( + bucket="s3-bucket-name", + prefix="signed/" + ) + ), + ignore_signing_job_failure=True, + profile_name=test_sp.name, + source=SignerSigningJobSource( + s3=SignerSigningJobSourceS3( + bucket="s3-bucket-name", + key="object-to-be-signed.zip", + version="jADjFYYYEXAMPLETszPjOmCMFDzd9dN1" + ) + ) + ) +``` + +## Argument Reference + +* `profile_name` - (Required) The name of the profile to initiate the signing operation. +* `source` - (Required) The S3 bucket that contains the object to sign. See [Source](#source) below for details. +* `destination` - (Required) The S3 bucket in which to save your signed object. See [Destination](#destination) below for details. +* `ignore_signing_job_failure` - (Optional) Set this argument to `true` to ignore signing job failures and retrieve failed status and reason. Default `false`. + +### Source + +The source configuration block supports the following arguments: + +* `s3` - (Required) A configuration block describing the S3 Source object: See [S3 Source](#s3-source) below for details. + +### S3 Source + +The configuration block supports the following arguments: + +* `bucket` - (Required) Name of the S3 bucket. +* `key` - (Required) Key name of the object that contains your unsigned code. +* `version` - (Required) Version of your source image in your version enabled S3 bucket. + +### Destination + +The destination configuration block supports the following arguments: + +* `s3` - (Required) A configuration block describing the S3 Destination object: See [S3 Destination](#s3-destination) below for details. + +### S3 Destination + +The configuration block supports the following arguments: + +* `bucket` - (Required) Name of the S3 bucket. +* `prefix` - (Optional) An Amazon S3 object key prefix that you can use to limit signed objects keys to begin with the specified prefix. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `completed_at` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the signing job was completed. +* `created_at` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the signing job was created. +* `job_id` - The ID of the signing job on output. +* `job_invoker` - The IAM entity that initiated the signing job. +* `job_owner` - The AWS account ID of the job owner. +* `platform_display_name` - A human-readable name for the signing platform associated with the signing job. +* `platform_id` - The platform to which your signed code image will be distributed. +* `profile_version` - The version of the signing profile used to initiate the signing job. +* `requested_by` - The IAM principal that requested the signing job. +* `revocation_record` - A revocation record if the signature generated by the signing job has been revoked. Contains a timestamp and the ID of the IAM entity that revoked the signature. +* `signature_expires_at` - The time when the signature of a signing job expires. +* `signed_object` - Name of the S3 bucket where the signed code image is saved by code signing. +* `status` - Status of the signing job. +* `status_reason` - String value that contains the status reason. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Signer signing jobs using the `job_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Signer signing jobs using the `job_id`. For example: + +```console +% terraform import aws_signer_signing_job.test_signer_signing_job 9ed7e5c3-b8d4-4da0-8459-44e0b068f7ee +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/signer_signing_profile.html.markdown b/website/docs/cdktf/python/r/signer_signing_profile.html.markdown new file mode 100644 index 00000000000..ac936b05643 --- /dev/null +++ b/website/docs/cdktf/python/r/signer_signing_profile.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Signer" +layout: "aws" +page_title: "AWS: aws_signer_signing_profile" +description: |- + Creates a Signer Signing Profile. +--- + + + +# Resource: aws_signer_signing_profile + +Creates a Signer Signing Profile. A signing profile contains information about the code signing configuration parameters that can be used by a given code signing user. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.signer_signing_profile import SignerSigningProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SignerSigningProfile(self, "prod_sp", + name_prefix="prod_sp_", + platform_id="AWSLambda-SHA384-ECDSA", + signature_validity_period=SignerSigningProfileSignatureValidityPeriod( + type="YEARS", + value=5 + ), + tags={ + "tag1": "value1", + "tag2": "value2" + } + ) + SignerSigningProfile(self, "test_sp", + platform_id="AWSLambda-SHA384-ECDSA" + ) +``` + +## Argument Reference + +* `platform_id` - (Required) The ID of the platform that is used by the target signing profile. +* `name` - (Optional) A unique signing profile name. By default generated by Terraform. Signing profile names are immutable and cannot be reused after canceled. +* `name_prefix` - (Optional) A signing profile name prefix. Terraform will generate a unique suffix. Conflicts with `name`. +* `signature_validity_period` - (Optional) The validity period for a signing job. +* `tags` - (Optional) A list of tags associated with the signing profile. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) for the signing profile. +* `name` - The name of the target signing profile. +* `platform_display_name` - A human-readable name for the signing platform associated with the signing profile. +* `revocation_record` - Revocation information for a signing profile. +* `status` - The status of the target signing profile. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version` - The current version of the signing profile. +* `version_arn` - The signing profile ARN, including the profile version. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Signer signing profiles using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Signer signing profiles using the `name`. For example: + +```console +% terraform import aws_signer_signing_profile.test_signer_signing_profile test_sp_DdW3Mk1foYL88fajut4mTVFGpuwfd4ACO6ANL0D1uIj7lrn8adK +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/signer_signing_profile_permission.html.markdown b/website/docs/cdktf/python/r/signer_signing_profile_permission.html.markdown new file mode 100644 index 00000000000..802bc4e4b72 --- /dev/null +++ b/website/docs/cdktf/python/r/signer_signing_profile_permission.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "Signer" +layout: "aws" +page_title: "AWS: aws_signer_signing_profile_permission" +description: |- + Creates a Signer Signing Profile Permission. +--- + + + +# Resource: aws_signer_signing_profile_permission + +Creates a Signer Signing Profile Permission. That is, a cross-account permission for a signing profile. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.signer_signing_profile import SignerSigningProfile +from imports.aws.signer_signing_profile_permission import SignerSigningProfilePermission +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + prod_sp = SignerSigningProfile(self, "prod_sp", + name_prefix="prod_sp_", + platform_id="AWSLambda-SHA384-ECDSA", + signature_validity_period=SignerSigningProfileSignatureValidityPeriod( + type="YEARS", + value=5 + ), + tags={ + "tag1": "value1", + "tag2": "value2" + } + ) + SignerSigningProfilePermission(self, "sp_permission_1", + action="signer:StartSigningJob", + principal=aws_account.string_value, + profile_name=prod_sp.name + ) + SignerSigningProfilePermission(self, "sp_permission_2", + action="signer:GetSigningProfile", + principal=aws_team_role_arn.string_value, + profile_name=prod_sp.name, + statement_id="ProdAccountStartSigningJob_StatementId" + ) + SignerSigningProfilePermission(self, "sp_permission_3", + action="signer:RevokeSignature", + principal="123456789012", + profile_name=prod_sp.name, + profile_version=prod_sp.version, + statement_id_prefix="version-permission-" + ) +``` + +## Argument Reference + +* `profile_name` - (Required) Name of the signing profile to add the cross-account permissions. +* `action` - (Required) An AWS Signer action permitted as part of cross-account permissions. Valid values: `signer:StartSigningJob`, `signer:GetSigningProfile`, or `signer:RevokeSignature`. +* `principal` - (Required) The AWS principal to be granted a cross-account permission. +* `profile_version` - (Optional) The signing profile version that a permission applies to. +* `statement_id` - (Optional) A unique statement identifier. By default generated by Terraform. +* `statement_id_prefix` - (Optional) A statement identifier prefix. Terraform will generate a unique suffix. Conflicts with `statement_id`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Signer signing profile permission statements using profile_name/statement_id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Signer signing profile permission statements using profile_name/statement_id. For example: + +```console +% terraform import aws_signer_signing_profile_permission.test_signer_signing_profile_permission prod_profile_DdW3Mk1foYL88fajut4mTVFGpuwfd4ACO6ANL0D1uIj7lrn8adK/ProdAccountStartSigningJobStatementId +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/simpledb_domain.html.markdown b/website/docs/cdktf/python/r/simpledb_domain.html.markdown new file mode 100644 index 00000000000..4be8a47527e --- /dev/null +++ b/website/docs/cdktf/python/r/simpledb_domain.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "SDB (SimpleDB)" +layout: "aws" +page_title: "AWS: aws_simpledb_domain" +description: |- + Provides a SimpleDB domain resource. +--- + + + +# Resource: aws_simpledb_domain + +Provides a SimpleDB domain resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.simpledb_domain import SimpledbDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SimpledbDomain(self, "users", + name="users" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the SimpleDB domain + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the SimpleDB domain + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SimpleDB Domains using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SimpleDB Domains using the `name`. For example: + +```console +% terraform import aws_simpledb_domain.users users +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/snapshot_create_volume_permission.html.markdown b/website/docs/cdktf/python/r/snapshot_create_volume_permission.html.markdown new file mode 100644 index 00000000000..f56a66aa2ee --- /dev/null +++ b/website/docs/cdktf/python/r/snapshot_create_volume_permission.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_snapshot_create_volume_permission" +description: |- + Adds create volume permission to an EBS Snapshot +--- + + + +# Resource: aws_snapshot_create_volume_permission + +Adds permission to create volumes off of a given EBS Snapshot. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ebs_snapshot import EbsSnapshot +from imports.aws.ebs_volume import EbsVolume +from imports.aws.snapshot_create_volume_permission import SnapshotCreateVolumePermission +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = EbsVolume(self, "example", + availability_zone="us-west-2a", + size=40 + ) + example_snapshot = EbsSnapshot(self, "example_snapshot", + volume_id=example.id + ) + SnapshotCreateVolumePermission(self, "example_perm", + account_id="12345678", + snapshot_id=example_snapshot.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `snapshot_id` - (Required) A snapshot ID +* `account_id` - (Required) An AWS Account ID to add create volume permissions. The AWS Account cannot be the snapshot's owner + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of "`snapshot_id`-`account_id`". + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sns_platform_application.html.markdown b/website/docs/cdktf/python/r/sns_platform_application.html.markdown new file mode 100644 index 00000000000..00f2a8de7cb --- /dev/null +++ b/website/docs/cdktf/python/r/sns_platform_application.html.markdown @@ -0,0 +1,134 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_platform_application" +description: |- + Provides an SNS platform application resource. +--- + + + +# Resource: aws_sns_platform_application + +Provides an SNS platform application resource + +## Example Usage + +### Apple Push Notification Service (APNS) using certificate-based authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sns_platform_application import SnsPlatformApplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SnsPlatformApplication(self, "apns_application", + name="apns_application", + platform="APNS", + platform_credential="", + platform_principal="" + ) +``` + +### Apple Push Notification Service (APNS) using token-based authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sns_platform_application import SnsPlatformApplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SnsPlatformApplication(self, "apns_application", + apple_platform_bundle_id="", + apple_platform_team_id="", + name="apns_application", + platform="APNS", + platform_credential="", + platform_principal="" + ) +``` + +### Google Cloud Messaging (GCM) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sns_platform_application import SnsPlatformApplication +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SnsPlatformApplication(self, "gcm_application", + name="gcm_application", + platform="GCM", + platform_credential="" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The friendly name for the SNS platform application +* `platform` - (Required) The platform that the app is registered with. See [Platform][1] for supported platforms. +* `platform_credential` - (Required) Application Platform credential. See [Credential][1] for type of credential required for platform. The value of this attribute when stored into the Terraform state is only a hash of the real value, so therefore it is not practical to use this as an attribute for other resources. +* `event_delivery_failure_topic_arn` - (Optional) The ARN of the SNS Topic triggered when a delivery to any of the platform endpoints associated with your platform application encounters a permanent failure. +* `event_endpoint_created_topic_arn` - (Optional) The ARN of the SNS Topic triggered when a new platform endpoint is added to your platform application. +* `event_endpoint_deleted_topic_arn` - (Optional) The ARN of the SNS Topic triggered when an existing platform endpoint is deleted from your platform application. +* `event_endpoint_updated_topic_arn` - (Optional) The ARN of the SNS Topic triggered when an existing platform endpoint is changed from your platform application. +* `failure_feedback_role_arn` - (Optional) The IAM role ARN permitted to receive failure feedback for this application and give SNS write access to use CloudWatch logs on your behalf. +* `platform_principal` - (Optional) Application Platform principal. See [Principal][2] for type of principal required for platform. The value of this attribute when stored into the Terraform state is only a hash of the real value, so therefore it is not practical to use this as an attribute for other resources. +* `success_feedback_role_arn` - (Optional) The IAM role ARN permitted to receive success feedback for this application and give SNS write access to use CloudWatch logs on your behalf. +* `success_feedback_sample_rate` - (Optional) The sample rate percentage (0-100) of successfully delivered messages. + +The following attributes are needed only when using APNS token credentials: + +* `apple_platform_team_id` - (Required) The identifier that's assigned to your Apple developer account team. Must be 10 alphanumeric characters. +* `apple_platform_bundle_id` - (Required) The bundle identifier that's assigned to your iOS app. May only include alphanumeric characters, hyphens (-), and periods (.). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the SNS platform application +* `arn` - The ARN of the SNS platform application + +[1]: http://docs.aws.amazon.com/sns/latest/dg/mobile-push-send-register.html +[2]: http://docs.aws.amazon.com/sns/latest/api/API_CreatePlatformApplication.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SNS platform applications using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SNS platform applications using the ARN. For example: + +```console +% terraform import aws_sns_platform_application.gcm_application arn:aws:sns:us-west-2:0123456789012:app/GCM/gcm_application +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sns_sms_preferences.html.markdown b/website/docs/cdktf/python/r/sns_sms_preferences.html.markdown new file mode 100644 index 00000000000..64b13a8b978 --- /dev/null +++ b/website/docs/cdktf/python/r/sns_sms_preferences.html.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_sms_preferences" +description: |- + Provides a way to set SNS SMS preferences. +--- + + + +# Resource: aws_sns_sms_preferences + +Provides a way to set SNS SMS preferences. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sns_sms_preferences import SnsSmsPreferences +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SnsSmsPreferences(self, "update_sms_prefs") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `monthly_spend_limit` - (Optional) The maximum amount in USD that you are willing to spend each month to send SMS messages. +* `delivery_status_iam_role_arn` - (Optional) The ARN of the IAM role that allows Amazon SNS to write logs about SMS deliveries in CloudWatch Logs. +* `delivery_status_success_sampling_rate` - (Optional) The percentage of successful SMS deliveries for which Amazon SNS will write logs in CloudWatch Logs. The value must be between 0 and 100. +* `default_sender_id` - (Optional) A string, such as your business brand, that is displayed as the sender on the receiving device. +* `default_sms_type` - (Optional) The type of SMS message that you will send by default. Possible values are: Promotional, Transactional +* `usage_report_s3_bucket` - (Optional) The name of the Amazon S3 bucket to receive daily SMS usage reports from Amazon SNS. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sns_topic.html.markdown b/website/docs/cdktf/python/r/sns_topic.html.markdown new file mode 100644 index 00000000000..1397132661e --- /dev/null +++ b/website/docs/cdktf/python/r/sns_topic.html.markdown @@ -0,0 +1,158 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_topic" +description: |- + Provides an SNS topic resource. +--- + + + +# Resource: aws_sns_topic + +Provides an SNS topic resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SnsTopic(self, "user_updates", + name="user-updates-topic" + ) +``` + +## Example with Delivery Policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SnsTopic(self, "user_updates", + delivery_policy="{\n \"http\": {\n \"defaultHealthyRetryPolicy\": {\n \"minDelayTarget\": 20,\n \"maxDelayTarget\": 20,\n \"numRetries\": 3,\n \"numMaxDelayRetries\": 0,\n \"numNoDelayRetries\": 0,\n \"numMinDelayRetries\": 0,\n \"backoffFunction\": \"linear\"\n },\n \"disableSubscriptionOverrides\": false,\n \"defaultThrottlePolicy\": {\n \"maxReceivesPerSecond\": 1\n }\n }\n}\n\n", + name="user-updates-topic" + ) +``` + +## Example with Server-side encryption (SSE) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SnsTopic(self, "user_updates", + kms_master_key_id="alias/aws/sns", + name="user-updates-topic" + ) +``` + +## Example with First-In-First-Out (FIFO) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sns_topic import SnsTopic +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SnsTopic(self, "user_updates", + content_based_deduplication=True, + fifo_topic=True, + name="user-updates-topic.fifo" + ) +``` + +## Message Delivery Status Arguments + +The `_success_feedback_role_arn` and `_failure_feedback_role_arn` arguments are used to give Amazon SNS write access to use CloudWatch Logs on your behalf. The `_success_feedback_sample_rate` argument is for specifying the sample rate percentage (0-100) of successfully delivered messages. After you configure the `_failure_feedback_role_arn` argument, then all failed message deliveries generate CloudWatch Logs. + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the topic. Topic names must be made up of only uppercase and lowercase ASCII letters, numbers, underscores, and hyphens, and must be between 1 and 256 characters long. For a FIFO (first-in-first-out) topic, the name must end with the `.fifo` suffix. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix` +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name` +* `display_name` - (Optional) The display name for the topic +* `policy` - (Optional) The fully-formed AWS policy as JSON. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `delivery_policy` - (Optional) The SNS delivery policy. More on [AWS documentation](https://docs.aws.amazon.com/sns/latest/dg/DeliveryPolicies.html) +* `application_success_feedback_role_arn` - (Optional) The IAM role permitted to receive success feedback for this topic +* `application_success_feedback_sample_rate` - (Optional) Percentage of success to sample +* `application_failure_feedback_role_arn` - (Optional) IAM role for failure feedback +* `http_success_feedback_role_arn` - (Optional) The IAM role permitted to receive success feedback for this topic +* `http_success_feedback_sample_rate` - (Optional) Percentage of success to sample +* `http_failure_feedback_role_arn` - (Optional) IAM role for failure feedback +* `kms_master_key_id` - (Optional) The ID of an AWS-managed customer master key (CMK) for Amazon SNS or a custom CMK. For more information, see [Key Terms](https://docs.aws.amazon.com/sns/latest/dg/sns-server-side-encryption.html#sse-key-terms) +* `signature_version` - (Optional) If `SignatureVersion` should be [1 (SHA1) or 2 (SHA256)](https://docs.aws.amazon.com/sns/latest/dg/sns-verify-signature-of-message.html). The signature version corresponds to the hashing algorithm used while creating the signature of the notifications, subscription confirmations, or unsubscribe confirmation messages sent by Amazon SNS. +* `tracing_config` - (Optional) Tracing mode of an Amazon SNS topic. Valid values: `"PassThrough"`, `"Active"`. +* `fifo_topic` - (Optional) Boolean indicating whether or not to create a FIFO (first-in-first-out) topic (default is `false`). +* `content_based_deduplication` - (Optional) Enables content-based deduplication for FIFO topics. For more information, see the [related documentation](https://docs.aws.amazon.com/sns/latest/dg/fifo-message-dedup.html) +* `lambda_success_feedback_role_arn` - (Optional) The IAM role permitted to receive success feedback for this topic +* `lambda_success_feedback_sample_rate` - (Optional) Percentage of success to sample +* `lambda_failure_feedback_role_arn` - (Optional) IAM role for failure feedback +* `sqs_success_feedback_role_arn` - (Optional) The IAM role permitted to receive success feedback for this topic +* `sqs_success_feedback_sample_rate` - (Optional) Percentage of success to sample +* `sqs_failure_feedback_role_arn` - (Optional) IAM role for failure feedback +* `firehose_success_feedback_role_arn` - (Optional) The IAM role permitted to receive success feedback for this topic +* `firehose_success_feedback_sample_rate` - (Optional) Percentage of success to sample +* `firehose_failure_feedback_role_arn` - (Optional) IAM role for failure feedback +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the SNS topic +* `arn` - The ARN of the SNS topic, as a more obvious property (clone of id) +* `owner` - The AWS Account ID of the SNS topic owner +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SNS Topics using the topic `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SNS Topics using the topic `arn`. For example: + +```console +% terraform import aws_sns_topic.user_updates arn:aws:sns:us-west-2:0123456789012:my-topic +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sns_topic_data_protection_policy.html.markdown b/website/docs/cdktf/python/r/sns_topic_data_protection_policy.html.markdown new file mode 100644 index 00000000000..c8be77f27f3 --- /dev/null +++ b/website/docs/cdktf/python/r/sns_topic_data_protection_policy.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_topic_data_protection_policy" +description: |- + Provides an SNS data protection topic policy resource. +--- + + + +# Resource: aws_sns_topic_data_protection_policy + +Provides an SNS data protection topic policy resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sns_topic import SnsTopic +from imports.aws.sns_topic_data_protection_policy import SnsTopicDataProtectionPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SnsTopic(self, "example", + name="example" + ) + aws_sns_topic_data_protection_policy_example = + SnsTopicDataProtectionPolicy(self, "example_1", + arn=example.arn, + policy=Token.as_string( + Fn.jsonencode({ + "Description": "Example data protection policy", + "Name": "__example_data_protection_policy", + "Statement": [{ + "DataDirection": "Inbound", + "DataIdentifier": ["arn:aws:dataprotection::aws:data-identifier/EmailAddress" + ], + "Operation": { + "Deny": {} + }, + "Principal": ["*"], + "Sid": "__deny_statement_11ba9d96" + } + ], + "Version": "2021-06-01" + })) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_data_protection_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `arn` - (Required) The ARN of the SNS topic +* `policy` - (Required) The fully-formed AWS policy as JSON. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SNS Data Protection Topic Policy using the topic ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SNS Data Protection Topic Policy using the topic ARN. For example: + +```console +% terraform import aws_sns_topic_data_protection_policy.example arn:aws:sns:us-west-2:0123456789012:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sns_topic_policy.html.markdown b/website/docs/cdktf/python/r/sns_topic_policy.html.markdown new file mode 100644 index 00000000000..65277b81fdf --- /dev/null +++ b/website/docs/cdktf/python/r/sns_topic_policy.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_topic_policy" +description: |- + Provides an SNS topic policy resource. +--- + + + +# Resource: aws_sns_topic_policy + +Provides an SNS topic policy resource + +~> **NOTE:** If a Principal is specified as just an AWS account ID rather than an ARN, AWS silently converts it to the ARN for the root user, causing future terraform plans to differ. To avoid this problem, just specify the full ARN, e.g., `arn:aws:iam::123456789012:root` + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.sns_topic import SnsTopic +from imports.aws.sns_topic_policy import SnsTopicPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = SnsTopic(self, "test", + name="my-topic-with-policy" + ) + sns_topic_policy = DataAwsIamPolicyDocument(self, "sns_topic_policy", + policy_id="__default_policy_ID", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["SNS:Subscribe", "SNS:SetTopicAttributes", "SNS:RemovePermission", "SNS:Receive", "SNS:Publish", "SNS:ListSubscriptionsByTopic", "SNS:GetTopicAttributes", "SNS:DeleteTopic", "SNS:AddPermission" + ], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=[account_id.string_value], + variable="AWS:SourceOwner" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=[test.arn], + sid="__default_statement_ID" + ) + ] + ) + SnsTopicPolicy(self, "default", + arn=test.arn, + policy=Token.as_string(sns_topic_policy.json) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `arn` - (Required) The ARN of the SNS topic +* `policy` - (Required) The fully-formed AWS policy as JSON. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `owner` - The AWS Account ID of the SNS topic owner + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SNS Topic Policy using the topic ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SNS Topic Policy using the topic ARN. For example: + +```console +% terraform import aws_sns_topic_policy.user_updates arn:aws:sns:us-west-2:0123456789012:my-topic +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sns_topic_subscription.html.markdown b/website/docs/cdktf/python/r/sns_topic_subscription.html.markdown new file mode 100644 index 00000000000..59907cddc7e --- /dev/null +++ b/website/docs/cdktf/python/r/sns_topic_subscription.html.markdown @@ -0,0 +1,326 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_topic_subscription" +description: |- + Provides a resource for subscribing to SNS topics. +--- + + + +# Resource: aws_sns_topic_subscription + +Provides a resource for subscribing to SNS topics. Requires that an SNS topic exist for the subscription to attach to. This resource allows you to automatically place messages sent to SNS topics in SQS queues, send them as HTTP(S) POST requests to a given endpoint, send SMS messages, or notify devices / applications. The most likely use case for Terraform users will probably be SQS queues. + +~> **NOTE:** If the SNS topic and SQS queue are in different AWS regions, the `aws_sns_topic_subscription` must use an AWS provider that is in the same region as the SNS topic. If the `aws_sns_topic_subscription` uses a provider with a different region than the SNS topic, Terraform will fail to create the subscription. + +~> **NOTE:** Setup of cross-account subscriptions from SNS topics to SQS queues requires Terraform to have access to BOTH accounts. + +~> **NOTE:** If an SNS topic and SQS queue are in different AWS accounts but the same region, the `aws_sns_topic_subscription` must use the AWS provider for the account with the SQS queue. If `aws_sns_topic_subscription` uses a Provider with a different account than the SQS queue, Terraform creates the subscription but does not keep state and tries to re-create the subscription at every `apply`. + +~> **NOTE:** If an SNS topic and SQS queue are in different AWS accounts and different AWS regions, the subscription needs to be initiated from the account with the SQS queue but in the region of the SNS topic. + +~> **NOTE:** You cannot unsubscribe to a subscription that is pending confirmation. If you use `email`, `email-json`, or `http`/`https` (without auto-confirmation enabled), until the subscription is confirmed (e.g., outside of Terraform), AWS does not allow Terraform to delete / unsubscribe the subscription. If you `destroy` an unconfirmed subscription, Terraform will remove the subscription from its state but the subscription will still exist in AWS. However, if you delete an SNS topic, SNS [deletes all the subscriptions](https://docs.aws.amazon.com/sns/latest/dg/sns-delete-subscription-topic.html) associated with the topic. Also, you can import a subscription after confirmation and then have the capability to delete it. + +## Example Usage + +You can directly supply a topic and ARN by hand in the `topic_arn` property along with the queue ARN: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sns_topic_subscription import SnsTopicSubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SnsTopicSubscription(self, "user_updates_sqs_target", + endpoint="arn:aws:sqs:us-west-2:432981146916:terraform-queue-too", + protocol="sqs", + topic_arn="arn:aws:sns:us-west-2:432981146916:user-updates-topic" + ) +``` + +Alternatively you can use the ARN properties of a managed SNS topic and SQS queue: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sns_topic import SnsTopic +from imports.aws.sns_topic_subscription import SnsTopicSubscription +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + user_updates = SnsTopic(self, "user_updates", + name="user-updates-topic" + ) + user_updates_queue = SqsQueue(self, "user_updates_queue", + name="user-updates-queue" + ) + SnsTopicSubscription(self, "user_updates_sqs_target", + endpoint=user_updates_queue.arn, + protocol="sqs", + topic_arn=user_updates.arn + ) +``` + +You can subscribe SNS topics to SQS queues in different Amazon accounts and regions: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.provider import AwsProvider +from imports.aws.sns_topic import SnsTopic +from imports.aws.sns_topic_subscription import SnsTopicSubscription +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + sns = TerraformVariable(self, "sns", + default=[{ + "account-id": "111111111111", + "display_name": "example", + "name": "example-sns-topic", + "region": "us-west-1", + "role-name": "service/service-hashicorp-terraform" + } + ] + ) + sqs = TerraformVariable(self, "sqs", + default=[{ + "account-id": "222222222222", + "name": "example-sqs-queue", + "region": "us-east-1", + "role-name": "service/service-hashicorp-terraform" + } + ] + ) + sns_topic_policy = DataAwsIamPolicyDocument(self, "sns-topic-policy", + policy_id="__default_policy_ID", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["SNS:Subscribe", "SNS:SetTopicAttributes", "SNS:RemovePermission", "SNS:Publish", "SNS:ListSubscriptionsByTopic", "SNS:GetTopicAttributes", "SNS:DeleteTopic", "SNS:AddPermission" + ], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=[ + Token.as_string(property_access(sns.value, ["\"account-id\""])) + ], + variable="AWS:SourceOwner" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=["arn:aws:sns:" + + Token.as_string(property_access(sns.value, ["\"region\""])) + ":" + + Token.as_string(property_access(sns.value, ["\"account-id\""])) + ":" + + Token.as_string(property_access(sns.value, ["\"name\""])) + ], + sid="__default_statement_ID" + ), DataAwsIamPolicyDocumentStatement( + actions=["SNS:Subscribe", "SNS:Receive"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringLike", + values=["arn:aws:sqs:" + + Token.as_string(property_access(sqs.value, ["\"region\""])) + ":" + + Token.as_string( + property_access(sqs.value, ["\"account-id\""])) + ":" + + Token.as_string(property_access(sqs.value, ["\"name\""])) + ], + variable="SNS:Endpoint" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=["arn:aws:sns:" + + Token.as_string(property_access(sns.value, ["\"region\""])) + ":" + + Token.as_string(property_access(sns.value, ["\"account-id\""])) + ":" + + Token.as_string(property_access(sns.value, ["\"name\""])) + ], + sid="__console_sub_0" + ) + ] + ) + sqs_queue_policy = DataAwsIamPolicyDocument(self, "sqs-queue-policy", + policy_id="arn:aws:sqs:" + + Token.as_string(property_access(sqs.value, ["\"region\""])) + ":" + + Token.as_string(property_access(sqs.value, ["\"account-id\""])) + ":" + + Token.as_string(property_access(sqs.value, ["\"name\""])) + "/SQSDefaultPolicy", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["SQS:SendMessage"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="ArnEquals", + values=["arn:aws:sns:" + + Token.as_string(property_access(sns.value, ["\"region\""])) + ":" + + Token.as_string( + property_access(sns.value, ["\"account-id\""])) + ":" + + Token.as_string(property_access(sns.value, ["\"name\""])) + ], + variable="aws:SourceArn" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="AWS" + ) + ], + resources=["arn:aws:sqs:" + + Token.as_string(property_access(sqs.value, ["\"region\""])) + ":" + + Token.as_string(property_access(sqs.value, ["\"account-id\""])) + ":" + + Token.as_string(property_access(sqs.value, ["\"name\""])) + ], + sid="example-sns-topic" + ) + ] + ) + aws_sns = AwsProvider(self, "aws", + alias="sns", + assume_role=[AwsProviderAssumeRole( + role_arn="arn:aws:iam::" + + Token.as_string(property_access(sns.value, ["\"account-id\""])) + ":role/" + + Token.as_string(property_access(sns.value, ["\"role-name\""])), + session_name="sns-" + Token.as_string(property_access(sns.value, ["\"region\""])) + ) + ], + region=Token.as_string(property_access(sns.value, ["\"region\""])) + ) + aws_sqs = AwsProvider(self, "aws_5", + alias="sqs", + assume_role=[AwsProviderAssumeRole( + role_arn="arn:aws:iam::" + + Token.as_string(property_access(sqs.value, ["\"account-id\""])) + ":role/" + + Token.as_string(property_access(sqs.value, ["\"role-name\""])), + session_name="sqs-" + Token.as_string(property_access(sqs.value, ["\"region\""])) + ) + ], + region=Token.as_string(property_access(sqs.value, ["\"region\""])) + ) + sns2_sqs = AwsProvider(self, "aws_6", + alias="sns2sqs", + assume_role=[AwsProviderAssumeRole( + role_arn="arn:aws:iam::" + + Token.as_string(property_access(sqs.value, ["\"account-id\""])) + ":role/" + + Token.as_string(property_access(sqs.value, ["\"role-name\""])), + session_name="sns2sqs-" + + Token.as_string(property_access(sns.value, ["\"region\""])) + ) + ], + region=Token.as_string(property_access(sns.value, ["\"region\""])) + ) + sns_topic = SnsTopic(self, "sns-topic", + display_name=Token.as_string( + property_access(sns.value, ["\"display_name\""])), + name=Token.as_string(property_access(sns.value, ["\"name\""])), + policy=Token.as_string(sns_topic_policy.json), + provider="${aws.sns}" + ) + sqs_queue = SqsQueue(self, "sqs-queue", + name=Token.as_string(property_access(sqs.value, ["\"name\""])), + policy=Token.as_string(sqs_queue_policy.json), + provider="${aws.sqs}" + ) + aws_sns_topic_subscription_sns_topic = SnsTopicSubscription(self, "sns-topic_9", + endpoint=sqs_queue.arn, + protocol="sqs", + provider=sns2_sqs, + topic_arn=sns_topic.arn + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sns_topic_subscription_sns_topic.override_logical_id("sns-topic") +``` + +## Argument Reference + +The following arguments are required: + +* `endpoint` - (Required) Endpoint to send data to. The contents vary with the protocol. See details below. +* `protocol` - (Required) Protocol to use. Valid values are: `sqs`, `sms`, `lambda`, `firehose`, and `application`. Protocols `email`, `email-json`, `http` and `https` are also valid but partially supported. See details below. +* `subscription_role_arn` - (Required if `protocol` is `firehose`) ARN of the IAM role to publish to Kinesis Data Firehose delivery stream. Refer to [SNS docs](https://docs.aws.amazon.com/sns/latest/dg/sns-firehose-as-subscriber.html). +* `topic_arn` - (Required) ARN of the SNS topic to subscribe to. + +The following arguments are optional: + +* `confirmation_timeout_in_minutes` - (Optional) Integer indicating number of minutes to wait in retrying mode for fetching subscription arn before marking it as failure. Only applicable for http and https protocols. Default is `1`. +* `delivery_policy` - (Optional) JSON String with the delivery policy (retries, backoff, etc.) that will be used in the subscription - this only applies to HTTP/S subscriptions. Refer to the [SNS docs](https://docs.aws.amazon.com/sns/latest/dg/DeliveryPolicies.html) for more details. +* `endpoint_auto_confirms` - (Optional) Whether the endpoint is capable of [auto confirming subscription](http://docs.aws.amazon.com/sns/latest/dg/SendMessageToHttp.html#SendMessageToHttp.prepare) (e.g., PagerDuty). Default is `false`. +* `filter_policy` - (Optional) JSON String with the filter policy that will be used in the subscription to filter messages seen by the target resource. Refer to the [SNS docs](https://docs.aws.amazon.com/sns/latest/dg/message-filtering.html) for more details. +* `filter_policy_scope` - (Optional) Whether the `filter_policy` applies to `MessageAttributes` (default) or `MessageBody`. +* `raw_message_delivery` - (Optional) Whether to enable raw message delivery (the original message is directly passed, not wrapped in JSON with the original message in the message property). Default is `false`. +* `redrive_policy` - (Optional) JSON String with the redrive policy that will be used in the subscription. Refer to the [SNS docs](https://docs.aws.amazon.com/sns/latest/dg/sns-dead-letter-queues.html#how-messages-moved-into-dead-letter-queue) for more details. + +### Protocol support + +Supported values for `protocol` include: + +* `application` - Delivers JSON-encoded messages. `endpoint` is the endpoint ARN of a mobile app and device. +* `firehose` - Delivers JSON-encoded messages. `endpoint` is the ARN of an Amazon Kinesis Data Firehose delivery stream (e.g., +`arn:aws:firehose:us-east-1:123456789012:deliverystream/ticketUploadStream`). +* `lambda` - Delivers JSON-encoded messages. `endpoint` is the ARN of an AWS Lambda function. +* `sms` - Delivers text messages via SMS. `endpoint` is the phone number of an SMS-enabled device. +* `sqs` - Delivers JSON-encoded messages. `endpoint` is the ARN of an Amazon SQS queue (e.g., `arn:aws:sqs:us-west-2:123456789012:terraform-queue-too`). + +Partially supported values for `protocol` include: + +~> **NOTE:** If an `aws_sns_topic_subscription` uses a partially-supported protocol and the subscription is not confirmed, either through automatic confirmation or means outside of Terraform (e.g., clicking on a "Confirm Subscription" link in an email), Terraform cannot delete / unsubscribe the subscription. Attempting to `destroy` an unconfirmed subscription will remove the `aws_sns_topic_subscription` from Terraform's state but **_will not_** remove the subscription from AWS. The `pending_confirmation` attribute provides confirmation status. + +* `email` - Delivers messages via SMTP. `endpoint` is an email address. +* `email-json` - Delivers JSON-encoded messages via SMTP. `endpoint` is an email address. +* `http` -- Delivers JSON-encoded messages via HTTP POST. `endpoint` is a URL beginning with `http://`. +* `https` -- Delivers JSON-encoded messages via HTTPS POST. `endpoint` is a URL beginning with `https://`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the subscription. +* `confirmation_was_authenticated` - Whether the subscription confirmation request was authenticated. +* `id` - ARN of the subscription. +* `owner_id` - AWS account ID of the subscription's owner. +* `pending_confirmation` - Whether the subscription has not been confirmed. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SNS Topic Subscriptions using the subscription `arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SNS Topic Subscriptions using the subscription `arn`. For example: + +```console +% terraform import aws_sns_topic_subscription.user_updates_sqs_target arn:aws:sns:us-west-2:0123456789012:my-topic:8a21d249-4329-4871-acc6-7be709c6ea7f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/spot_datafeed_subscription.html.markdown b/website/docs/cdktf/python/r/spot_datafeed_subscription.html.markdown new file mode 100644 index 00000000000..ee00c6b9e40 --- /dev/null +++ b/website/docs/cdktf/python/r/spot_datafeed_subscription.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_spot_datafeed_subscription" +description: |- + Provides a Spot Datafeed Subscription resource. +--- + + + +# Resource: aws_spot_datafeed_subscription + +-> **Note:** There is only a single subscription allowed per account. + +To help you understand the charges for your Spot instances, Amazon EC2 provides a data feed that describes your Spot instance usage and pricing. +This data feed is sent to an Amazon S3 bucket that you specify when you subscribe to the data feed. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.spot_datafeed_subscription import SpotDatafeedSubscription +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + default_var = S3Bucket(self, "default", + bucket="tf-spot-datafeed" + ) + aws_spot_datafeed_subscription_default = SpotDatafeedSubscription(self, "default_1", + bucket=default_var.id, + prefix="my_subdirectory" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_spot_datafeed_subscription_default.override_logical_id("default") +``` + +## Argument Reference + +* `bucket` - (Required) The Amazon S3 bucket in which to store the Spot instance data feed. +* `prefix` - (Optional) Path of folder inside bucket to place spot pricing data. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a Spot Datafeed Subscription using the word `spot-datafeed-subscription`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import a Spot Datafeed Subscription using the word `spot-datafeed-subscription`. For example: + +```console +% terraform import aws_spot_datafeed_subscription.mysubscription spot-datafeed-subscription +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/spot_fleet_request.html.markdown b/website/docs/cdktf/python/r/spot_fleet_request.html.markdown new file mode 100644 index 00000000000..6d9c302a8f6 --- /dev/null +++ b/website/docs/cdktf/python/r/spot_fleet_request.html.markdown @@ -0,0 +1,489 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_spot_fleet_request" +description: |- + Provides a Spot Fleet Request resource. +--- + + + +# Resource: aws_spot_fleet_request + +Provides an EC2 Spot Fleet Request resource. This allows a fleet of Spot +instances to be requested on the Spot market. + +~> **NOTE [AWS strongly discourages](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-best-practices.html#which-spot-request-method-to-use) the use of the legacy APIs called by this resource. +We recommend using the [EC2 Fleet](ec2_fleet.html) or [Auto Scaling Group](autoscaling_group.html) resources instead. + +## Example Usage + +### Using launch specifications + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.spot_fleet_request import SpotFleetRequest +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SpotFleetRequest(self, "cheap_compute", + allocation_strategy="diversified", + iam_fleet_role="arn:aws:iam::12345678:role/spot-fleet", + launch_specification=[SpotFleetRequestLaunchSpecification( + ami="ami-1234", + iam_instance_profile_arn=example.arn, + instance_type="m4.10xlarge", + placement_tenancy="dedicated", + spot_price="2.793" + ), SpotFleetRequestLaunchSpecification( + ami="ami-5678", + availability_zone="us-west-1a", + iam_instance_profile_arn=example.arn, + instance_type="m4.4xlarge", + key_name="my-key", + root_block_device=[SpotFleetRequestLaunchSpecificationRootBlockDevice( + volume_size=Token.as_number("300"), + volume_type="gp2" + ) + ], + spot_price="1.117", + subnet_id="subnet-1234", + tags={ + "Name": "spot-fleet-example" + }, + weighted_capacity=Token.as_string(35) + ) + ], + spot_price="0.03", + target_capacity=6, + valid_until="2019-11-04T20:44:20Z" + ) +``` + +### Using launch templates + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.launch_template import LaunchTemplate +from imports.aws.spot_fleet_request import SpotFleetRequest +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = LaunchTemplate(self, "foo", + image_id="ami-516b9131", + instance_type="m1.small", + key_name="some-key", + name="launch-template" + ) + aws_spot_fleet_request_foo = SpotFleetRequest(self, "foo_1", + depends_on=[test_attach], + iam_fleet_role="arn:aws:iam::12345678:role/spot-fleet", + launch_template_config=[SpotFleetRequestLaunchTemplateConfig( + launch_template_specification=SpotFleetRequestLaunchTemplateConfigLaunchTemplateSpecification( + id=foo.id, + version=Token.as_string(foo.latest_version) + ) + ) + ], + spot_price="0.005", + target_capacity=2, + valid_until="2019-11-04T20:44:20Z" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_spot_fleet_request_foo.override_logical_id("foo") +``` + +~> **NOTE:** Terraform does not support the functionality where multiple `subnet_id` or `availability_zone` parameters can be specified in the same +launch configuration block. If you want to specify multiple values, then separate launch configuration blocks should be used or launch template overrides should be configured, one per subnet: + +### Using multiple launch specifications + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.spot_fleet_request import SpotFleetRequest +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SpotFleetRequest(self, "foo", + iam_fleet_role="arn:aws:iam::12345678:role/spot-fleet", + launch_specification=[SpotFleetRequestLaunchSpecification( + ami="ami-d06a90b0", + availability_zone="us-west-2a", + instance_type="m1.small", + key_name="my-key" + ), SpotFleetRequestLaunchSpecification( + ami="ami-d06a90b0", + availability_zone="us-west-2a", + instance_type="m5.large", + key_name="my-key" + ) + ], + spot_price="0.005", + target_capacity=2, + valid_until="2019-11-04T20:44:20Z" + ) +``` + +-> In this example, we use a [`dynamic` block](https://www.terraform.io/language/expressions/dynamic-blocks) to define zero or more `launch_specification` blocks, producing one for each element in the list of subnet ids. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformVariable, Token, TerraformIterator, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.spot_fleet_request import SpotFleetRequest +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + # You can read more about this at https://cdk.tf/variables + subnets = TerraformVariable(self, "subnets") + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + example_dynamic_iterator0 = TerraformIterator.from_list( + Token.as_any("${[ for s in ${" + subnets.value + "} : {\n subnet_id = s[1]\n }]}")) + SpotFleetRequest(self, "example", + allocation_strategy="lowestPrice", + fleet_type="request", + iam_fleet_role="arn:aws:iam::12345678:role/spot-fleet", + target_capacity=3, + terminate_instances_with_expiration=Token.as_boolean("true"), + valid_until="2019-11-04T20:44:20Z", + wait_for_fulfillment=Token.as_boolean("true"), + launch_specification=example_dynamic_iterator0.dynamic({ + "ami": "ami-1234", + "instance_type": "m4.4xlarge", + "root_block_device": [{ + "delete_on_termination": "true", + "volume_size": "8", + "volume_type": "gp2" + } + ], + "subnet_id": property_access(example_dynamic_iterator0.value, ["subnet_id"]), + "tags": { + "Name": "Spot Node", + "tag_builder": "builder" + }, + "vpc_security_group_ids": "sg-123456" + }) + ) +``` + +### Using multiple launch configurations + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_subnets import DataAwsSubnets +from imports.aws.launch_template import LaunchTemplate +from imports.aws.spot_fleet_request import SpotFleetRequest +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = LaunchTemplate(self, "foo", + image_id="ami-516b9131", + instance_type="m1.small", + key_name="some-key", + name="launch-template" + ) + example = DataAwsSubnets(self, "example", + filter=[DataAwsSubnetsFilter( + name="vpc-id", + values=[vpc_id.string_value] + ) + ] + ) + aws_spot_fleet_request_foo = SpotFleetRequest(self, "foo_2", + depends_on=[test_attach], + iam_fleet_role="arn:aws:iam::12345678:role/spot-fleet", + launch_template_config=[SpotFleetRequestLaunchTemplateConfig( + launch_template_specification=SpotFleetRequestLaunchTemplateConfigLaunchTemplateSpecification( + id=foo.id, + version=Token.as_string(foo.latest_version) + ), + overrides=[SpotFleetRequestLaunchTemplateConfigOverrides( + subnet_id=Token.as_string(property_access(example.ids, ["0"])) + ), SpotFleetRequestLaunchTemplateConfigOverrides( + subnet_id=Token.as_string(property_access(example.ids, ["1"])) + ), SpotFleetRequestLaunchTemplateConfigOverrides( + subnet_id=Token.as_string(property_access(example.ids, ["2"])) + ) + ] + ) + ], + spot_price="0.005", + target_capacity=2, + valid_until="2019-11-04T20:44:20Z" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_spot_fleet_request_foo.override_logical_id("foo") +``` + +## Argument Reference + +Most of these arguments directly correspond to the +[official API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_SpotFleetRequestConfigData.html). + +* `iam_fleet_role` - (Required) Grants the Spot fleet permission to terminate + Spot instances on your behalf when you cancel its Spot fleet request using +CancelSpotFleetRequests or when the Spot fleet request expires, if you set +terminateInstancesWithExpiration. +* `context` - (Optional) Reserved. +* `replace_unhealthy_instances` - (Optional) Indicates whether Spot fleet should replace unhealthy instances. Default `false`. +* `launch_specification` - (Optional) Used to define the launch configuration of the + spot-fleet request. Can be specified multiple times to define different bids +across different markets and instance types. Conflicts with `launch_template_config`. At least one of `launch_specification` or `launch_template_config` is required. + + **Note**: This takes in similar but not + identical inputs as [`aws_instance`](instance.html). There are limitations on + what you can specify. See the list of officially supported inputs in the + [reference documentation](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_SpotFleetLaunchSpecification.html). Any normal [`aws_instance`](instance.html) parameter that corresponds to those inputs may be used and it have + a additional parameter `iam_instance_profile_arn` takes `aws_iam_instance_profile` attribute `arn` as input. + +* `launch_template_config` - (Optional) Launch template configuration block. See [Launch Template Configs](#launch-template-configs) below for more details. Conflicts with `launch_specification`. At least one of `launch_specification` or `launch_template_config` is required. +* `spot_maintenance_strategies` - (Optional) Nested argument containing maintenance strategies for managing your Spot Instances that are at an elevated risk of being interrupted. Defined below. +* `spot_price` - (Optional; Default: On-demand price) The maximum bid price per unit hour. +* `wait_for_fulfillment` - (Optional; Default: false) If set, Terraform will + wait for the Spot Request to be fulfilled, and will throw an error if the + timeout of 10m is reached. +* `target_capacity` - The number of units to request. You can choose to set the + target capacity in terms of instances or a performance characteristic that is + important to your application workload, such as vCPUs, memory, or I/O. +* `target_capacity_unit_type` - (Optional) The unit for the target capacity. This can only be done with `instance_requirements` defined +* `allocation_strategy` - Indicates how to allocate the target capacity across + the Spot pools specified by the Spot fleet request. Valid values: `lowestPrice`, `diversified`, `capacityOptimized`, `capacityOptimizedPrioritized`, and `priceCapacityOptimized`. The default is + `lowestPrice`. +* `instance_pools_to_use_count` - (Optional; Default: 1) + The number of Spot pools across which to allocate your target Spot capacity. + Valid only when `allocation_strategy` is set to `lowestPrice`. Spot Fleet selects + the cheapest Spot pools and evenly allocates your target Spot capacity across + the number of Spot pools that you specify. +* `excess_capacity_termination_policy` - Indicates whether running Spot + instances should be terminated if the target capacity of the Spot fleet + request is decreased below the current size of the Spot fleet. +* `terminate_instances_with_expiration` - (Optional) Indicates whether running Spot + instances should be terminated when the Spot fleet request expires. +* `terminate_instances_on_delete` - (Optional) Indicates whether running Spot + instances should be terminated when the resource is deleted (and the Spot fleet request cancelled). + If no value is specified, the value of the `terminate_instances_with_expiration` argument is used. +* `instance_interruption_behaviour` - (Optional) Indicates whether a Spot + instance stops or terminates when it is interrupted. Default is + `terminate`. +* `fleet_type` - (Optional) The type of fleet request. Indicates whether the Spot Fleet only requests the target + capacity or also attempts to maintain it. Default is `maintain`. +* `valid_until` - (Optional) The end date and time of the request, in UTC [RFC3339](https://tools.ietf.org/html/rfc3339#section-5.8) format(for example, YYYY-MM-DDTHH:MM:SSZ). At this point, no new Spot instance requests are placed or enabled to fulfill the request. +* `valid_from` - (Optional) The start date and time of the request, in UTC [RFC3339](https://tools.ietf.org/html/rfc3339#section-5.8) format(for example, YYYY-MM-DDTHH:MM:SSZ). The default is to start fulfilling the request immediately. +* `load_balancers` (Optional) A list of elastic load balancer names to add to the Spot fleet. +* `target_group_arns` (Optional) A list of `aws_alb_target_group` ARNs, for use with Application Load Balancing. +* `on_demand_allocation_strategy` - The order of the launch template overrides to use in fulfilling On-Demand capacity. the possible values are: `lowestPrice` and `prioritized`. the default is `lowestPrice`. +* `on_demand_max_total_price` - The maximum amount per hour for On-Demand Instances that you're willing to pay. When the maximum amount you're willing to pay is reached, the fleet stops launching instances even if it hasn’t met the target capacity. +* `on_demand_target_capacity` - The number of On-Demand units to request. If the request type is `maintain`, you can specify a target capacity of 0 and add capacity later. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Launch Template Configs + +The `launch_template_config` block supports the following: + +* `launch_template_specification` - (Required) Launch template specification. See [Launch Template Specification](#launch-template-specification) below for more details. +* `overrides` - (Optional) One or more override configurations. See [Overrides](#overrides) below for more details. + +### Launch Template Specification + +* `id` - The ID of the launch template. Conflicts with `name`. +* `name` - The name of the launch template. Conflicts with `id`. +* `version` - (Optional) Template version. Unlike the autoscaling equivalent, does not support `$Latest` or `$Default`, so use the launch_template resource's attribute, e.g., `"${aws_launch_template.foo.latest_version}"`. It will use the default version if omitted. + + **Note:** The specified launch template can specify only a subset of the + inputs of [`aws_launch_template`](launch_template.html). There are limitations on + what you can specify as spot fleet does not support all the attributes that are supported by autoscaling groups. [AWS documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-launch-templates.html#launch-templates-spot-fleet) is currently sparse, but at least `instance_initiated_shutdown_behavior` is confirmed unsupported. + +### spot_maintenance_strategies + +* `capacity_rebalance` - (Optional) Nested argument containing the capacity rebalance for your fleet request. Defined below. + +### capacity_rebalance + +* `replacement_strategy` - (Optional) The replacement strategy to use. Only available for spot fleets with `fleet_type` set to `maintain`. Valid values: `launch`. + +### Overrides + +* `availability_zone` - (Optional) The availability zone in which to place the request. +* `instance_requirements` - (Optional) The instance requirements. See below. +* `instance_type` - (Optional) The type of instance to request. +* `priority` - (Optional) The priority for the launch template override. The lower the number, the higher the priority. If no number is set, the launch template override has the lowest priority. +* `spot_price` - (Optional) The maximum spot bid for this override request. +* `subnet_id` - (Optional) The subnet in which to launch the requested instance. +* `weighted_capacity` - (Optional) The capacity added to the fleet by a fulfilled request. + +### Instance Requirements + +This configuration block supports the following: + +* `accelerator_count` - (Optional) Block describing the minimum and maximum number of accelerators (GPUs, FPGAs, or AWS Inferentia chips). Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. Set to `0` to exclude instance types with accelerators. +* `accelerator_manufacturers` - (Optional) List of accelerator manufacturer names. Default is any manufacturer. + + ``` + Valid names: + * amazon-web-services + * amd + * nvidia + * xilinx + ``` + +* `accelerator_names` - (Optional) List of accelerator names. Default is any acclerator. + + ``` + Valid names: + * a100 - NVIDIA A100 GPUs + * v100 - NVIDIA V100 GPUs + * k80 - NVIDIA K80 GPUs + * t4 - NVIDIA T4 GPUs + * m60 - NVIDIA M60 GPUs + * radeon-pro-v520 - AMD Radeon Pro V520 GPUs + * vu9p - Xilinx VU9P FPGAs + ``` + +* `accelerator_total_memory_mib` - (Optional) Block describing the minimum and maximum total memory of the accelerators. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `accelerator_types` - (Optional) List of accelerator types. Default is any accelerator type. + + ``` + Valid types: + * fpga + * gpu + * inference + ``` + +* `allowed_instance_types` - (Optional) List of instance types to apply your specified attributes against. All other instance types are ignored, even if they match your specified attributes. You can use strings with one or more wild cards, represented by an asterisk (\*), to allow an instance type, size, or generation. The following are examples: `m5.8xlarge`, `c5*.*`, `m5a.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are allowing the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5a.*`, you are allowing all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is all instance types. + + ~> **NOTE:** If you specify `allowed_instance_types`, you can't specify `excluded_instance_types`. + +* `bare_metal` - (Optional) Indicate whether bare metal instace types should be `included`, `excluded`, or `required`. Default is `excluded`. +* `baseline_ebs_bandwidth_mbps` - (Optional) Block describing the minimum and maximum baseline EBS bandwidth, in Mbps. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `burstable_performance` - (Optional) Indicate whether burstable performance instance types should be `included`, `excluded`, or `required`. Default is `excluded`. +* `cpu_manufacturers` (Optional) List of CPU manufacturer names. Default is any manufacturer. + + ~> **NOTE:** Don't confuse the CPU hardware manufacturer with the CPU hardware architecture. Instances will be launched with a compatible CPU architecture based on the Amazon Machine Image (AMI) that you specify in your launch template. + + ``` + Valid names: + * amazon-web-services + * amd + * intel + ``` + +* `excluded_instance_types` - (Optional) List of instance types to exclude. You can use strings with one or more wild cards, represented by an asterisk (\*), to exclude an instance type, size, or generation. The following are examples: `m5.8xlarge`, `c5*.*`, `m5a.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are excluding the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5a.*`, you are excluding all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is no excluded instance types. + + ~> **NOTE:** If you specify `excluded_instance_types`, you can't specify `allowed_instance_types`. + +* `instance_generations` - (Optional) List of instance generation names. Default is any generation. + + ``` + Valid names: + * current - Recommended for best performance. + * previous - For existing applications optimized for older instance types. + ``` + +* `local_storage` - (Optional) Indicate whether instance types with local storage volumes are `included`, `excluded`, or `required`. Default is `included`. +* `local_storage_types` - (Optional) List of local storage type names. Default any storage type. + + ``` + Value names: + * hdd - hard disk drive + * ssd - solid state drive + ``` + +* `memory_gib_per_vcpu` - (Optional) Block describing the minimum and maximum amount of memory (GiB) per vCPU. Default is no minimum or maximum. + * `min` - (Optional) Minimum. May be a decimal number, e.g. `0.5`. + * `max` - (Optional) Maximum. May be a decimal number, e.g. `0.5`. +* `memory_mib` - (Optional) Block describing the minimum and maximum amount of memory (MiB). Default is no maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `network_bandwidth_gbps` - (Optional) Block describing the minimum and maximum amount of network bandwidth, in gigabits per second (Gbps). Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `network_interface_count` - (Optional) Block describing the minimum and maximum number of network interfaces. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `on_demand_max_price_percentage_over_lowest_price` - (Optional) The price protection threshold for On-Demand Instances. This is the maximum you’ll pay for an On-Demand Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 20. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. +* `require_hibernate_support` - (Optional) Indicate whether instance types must support On-Demand Instance Hibernation, either `true` or `false`. Default is `false`. +* `spot_max_price_percentage_over_lowest_price` - (Optional) The price protection threshold for Spot Instances. This is the maximum you’ll pay for a Spot Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 100. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. +* `total_local_storage_gb` - (Optional) Block describing the minimum and maximum total local storage (GB). Default is no minimum or maximum. + * `min` - (Optional) Minimum. May be a decimal number, e.g. `0.5`. + * `max` - (Optional) Maximum. May be a decimal number, e.g. `0.5`. +* `vcpu_count` - (Optional) Block describing the minimum and maximum number of vCPUs. Default is no maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Spot fleet request ID +* `spot_request_state` - The state of the Spot fleet request. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) +* `delete` - (Default `15m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Spot Fleet Requests using `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Spot Fleet Requests using `id`. For example: + +```console +% terraform import aws_spot_fleet_request.fleet sfr-005e9ec8-5546-4c31-b317-31a62325411e +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/spot_instance_request.html.markdown b/website/docs/cdktf/python/r/spot_instance_request.html.markdown new file mode 100644 index 00000000000..3168a43f0b9 --- /dev/null +++ b/website/docs/cdktf/python/r/spot_instance_request.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_spot_instance_request" +description: |- + Provides a Spot Instance Request resource. +--- + + + +# Resource: aws_spot_instance_request + +Provides an EC2 Spot Instance Request resource. This allows instances to be +requested on the spot market. + +By default Terraform creates Spot Instance Requests with a `persistent` type, +which means that for the duration of their lifetime, AWS will launch an +instance with the configured details if and when the spot market will accept +the requested price. + +On destruction, Terraform will make an attempt to terminate the associated Spot +Instance if there is one present. + +Spot Instances requests with a `one-time` type will close the spot request +when the instance is terminated either by the request being below the current spot +price availability or by a user. + +~> **NOTE:** Because their behavior depends on the live status of the spot +market, Spot Instance Requests have a unique lifecycle that makes them behave +differently than other Terraform resources. Most importantly: there is __no +guarantee__ that a Spot Instance exists to fulfill the request at any given +point in time. See the [AWS Spot Instance +documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-spot-instances.html) +for more information. + +~> **NOTE [AWS strongly discourages](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-best-practices.html#which-spot-request-method-to-use) the use of the legacy APIs called by this resource. +We recommend using the [EC2 Instance](instance.html) resource with `instance_market_options` instead. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.spot_instance_request import SpotInstanceRequest +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SpotInstanceRequest(self, "cheap_worker", + ami="ami-1234", + instance_type="c4.xlarge", + spot_price="0.03", + tags={ + "Name": "CheapWorker" + } + ) +``` + +## Argument Reference + +Spot Instance Requests support all the same arguments as +[`aws_instance`](instance.html), with the addition of: + +* `spot_price` - (Optional; Default: On-demand price) The maximum price to request on the spot market. +* `wait_for_fulfillment` - (Optional; Default: false) If set, Terraform will + wait for the Spot Request to be fulfilled, and will throw an error if the + timeout of 10m is reached. +* `spot_type` - (Optional; Default: `persistent`) If set to `one-time`, after + the instance is terminated, the spot request will be closed. +* `launch_group` - (Optional) A launch group is a group of spot instances that launch together and terminate together. + If left empty instances are launched and terminated individually. +* `block_duration_minutes` - (Optional) The required duration for the Spot instances, in minutes. This value must be a multiple of 60 (60, 120, 180, 240, 300, or 360). + The duration period starts as soon as your Spot instance receives its instance ID. At the end of the duration period, Amazon EC2 marks the Spot instance for termination and provides a Spot instance termination notice, which gives the instance a two-minute warning before it terminates. + Note that you can't specify an Availability Zone group or a launch group if you specify a duration. +* `instance_interruption_behavior` - (Optional) Indicates Spot instance behavior when it is interrupted. Valid values are `terminate`, `stop`, or `hibernate`. Default value is `terminate`. +* `valid_until` - (Optional) The end date and time of the request, in UTC [RFC3339](https://tools.ietf.org/html/rfc3339#section-5.8) format(for example, YYYY-MM-DDTHH:MM:SSZ). At this point, no new Spot instance requests are placed or enabled to fulfill the request. The default end date is 7 days from the current date. +* `valid_from` - (Optional) The start date and time of the request, in UTC [RFC3339](https://tools.ietf.org/html/rfc3339#section-5.8) format(for example, YYYY-MM-DDTHH:MM:SSZ). The default is to start fulfilling the request immediately. +* `tags` - (Optional) A map of tags to assign to the Spot Instance Request. These tags are not automatically applied to the launched Instance. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Spot Instance Request ID. + +These attributes are exported, but they are expected to change over time and so +should only be used for informational purposes, not for resource dependencies: + +* `spot_bid_status` - The current [bid + status](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-bid-status.html) + of the Spot Instance Request. +* `spot_request_state` The current [request + state](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html#creating-spot-request-status) + of the Spot Instance Request. +* `spot_instance_id` - The Instance ID (if any) that is currently fulfilling + the Spot Instance request. +* `public_dns` - The public DNS name assigned to the instance. For EC2-VPC, this + is only available if you've enabled DNS hostnames for your VPC +* `public_ip` - The public IP address assigned to the instance, if applicable. +* `private_dns` - The private DNS name assigned to the instance. Can only be + used inside the Amazon EC2, and only available if you've enabled DNS hostnames + for your VPC +* `private_ip` - The private IP address assigned to the instance +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) +* `delete` - (Default `20m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sqs_queue.html.markdown b/website/docs/cdktf/python/r/sqs_queue.html.markdown new file mode 100644 index 00000000000..f956acba09c --- /dev/null +++ b/website/docs/cdktf/python/r/sqs_queue.html.markdown @@ -0,0 +1,205 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queue" +description: |- + Provides a SQS resource. +--- + + + +# Resource: aws_sqs_queue + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SqsQueue(self, "terraform_queue", + delay_seconds=90, + max_message_size=2048, + message_retention_seconds=86400, + name="terraform-example-queue", + receive_wait_time_seconds=10, + redrive_policy=Token.as_string( + Fn.jsonencode({ + "dead_letter_target_arn": terraform_queue_deadletter.arn, + "max_receive_count": 4 + })), + tags={ + "Environment": "production" + } + ) +``` + +## FIFO queue + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SqsQueue(self, "terraform_queue", + content_based_deduplication=True, + fifo_queue=True, + name="terraform-example-queue.fifo" + ) +``` + +## High-throughput FIFO queue + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SqsQueue(self, "terraform_queue", + deduplication_scope="messageGroup", + fifo_queue=True, + fifo_throughput_limit="perMessageGroupId", + name="terraform-example-queue.fifo" + ) +``` + +## Dead-letter queue + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SqsQueue(self, "terraform_queue_deadletter", + name="terraform-example-deadletter-queue", + redrive_allow_policy=Token.as_string( + Fn.jsonencode({ + "redrive_permission": "byQueue", + "source_queue_arns": [terraform_queue.arn] + })) + ) +``` + +## Server-side encryption (SSE) + +Using [SSE-SQS](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-configure-sqs-sse-queue.html): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SqsQueue(self, "terraform_queue", + name="terraform-example-queue", + sqs_managed_sse_enabled=True + ) +``` + +Using [SSE-KMS](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-configure-sse-existing-queue.html): + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sqs_queue import SqsQueue +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SqsQueue(self, "terraform_queue", + kms_data_key_reuse_period_seconds=300, + kms_master_key_id="alias/aws/sqs", + name="terraform-example-queue" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the queue. Queue names must be made up of only uppercase and lowercase ASCII letters, numbers, underscores, and hyphens, and must be between 1 and 80 characters long. For a FIFO (first-in-first-out) queue, the name must end with the `.fifo` suffix. If omitted, Terraform will assign a random, unique name. Conflicts with `name_prefix` +* `name_prefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name` +* `visibility_timeout_seconds` - (Optional) The visibility timeout for the queue. An integer from 0 to 43200 (12 hours). The default for this attribute is 30. For more information about visibility timeout, see [AWS docs](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/AboutVT.html). +* `message_retention_seconds` - (Optional) The number of seconds Amazon SQS retains a message. Integer representing seconds, from 60 (1 minute) to 1209600 (14 days). The default for this attribute is 345600 (4 days). +* `max_message_size` - (Optional) The limit of how many bytes a message can contain before Amazon SQS rejects it. An integer from 1024 bytes (1 KiB) up to 262144 bytes (256 KiB). The default for this attribute is 262144 (256 KiB). +* `delay_seconds` - (Optional) The time in seconds that the delivery of all messages in the queue will be delayed. An integer from 0 to 900 (15 minutes). The default for this attribute is 0 seconds. +* `receive_wait_time_seconds` - (Optional) The time for which a ReceiveMessage call will wait for a message to arrive (long polling) before returning. An integer from 0 to 20 (seconds). The default for this attribute is 0, meaning that the call will return immediately. +* `policy` - (Optional) The JSON policy for the SQS queue. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `redrive_policy` - (Optional) The JSON policy to set up the Dead Letter Queue, see [AWS docs](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/SQSDeadLetterQueue.html). **Note:** when specifying `maxReceiveCount`, you must specify it as an integer (`5`), and not a string (`"5"`). +* `redrive_allow_policy` - (Optional) The JSON policy to set up the Dead Letter Queue redrive permission, see [AWS docs](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/SQSDeadLetterQueue.html). +* `fifo_queue` - (Optional) Boolean designating a FIFO queue. If not set, it defaults to `false` making it standard. +* `content_based_deduplication` - (Optional) Enables content-based deduplication for FIFO queues. For more information, see the [related documentation](http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/FIFO-queues.html#FIFO-queues-exactly-once-processing) +* `sqs_managed_sse_enabled` - (Optional) Boolean to enable server-side encryption (SSE) of message content with SQS-owned encryption keys. See [Encryption at rest](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-server-side-encryption.html). Terraform will only perform drift detection of its value when present in a configuration. +* `kms_master_key_id` - (Optional) The ID of an AWS-managed customer master key (CMK) for Amazon SQS or a custom CMK. For more information, see [Key Terms](http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-server-side-encryption.html#sqs-sse-key-terms). +* `kms_data_key_reuse_period_seconds` - (Optional) The length of time, in seconds, for which Amazon SQS can reuse a data key to encrypt or decrypt messages before calling AWS KMS again. An integer representing seconds, between 60 seconds (1 minute) and 86,400 seconds (24 hours). The default is 300 (5 minutes). +* `deduplication_scope` - (Optional) Specifies whether message deduplication occurs at the message group or queue level. Valid values are `messageGroup` and `queue` (default). +* `fifo_throughput_limit` - (Optional) Specifies whether the FIFO queue throughput quota applies to the entire queue or per message group. Valid values are `perQueue` (default) and `perMessageGroupId`. +* `tags` - (Optional) A map of tags to assign to the queue. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The URL for the created Amazon SQS queue. +* `arn` - The ARN of the SQS queue +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `url` - Same as `id`: The URL for the created Amazon SQS queue. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SQS Queues using the queue `url`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SQS Queues using the queue `url`. For example: + +```console +% terraform import aws_sqs_queue.public_queue https://queue.amazonaws.com/80398EXAMPLE/MyQueue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sqs_queue_policy.html.markdown b/website/docs/cdktf/python/r/sqs_queue_policy.html.markdown new file mode 100644 index 00000000000..70d86c2e347 --- /dev/null +++ b/website/docs/cdktf/python/r/sqs_queue_policy.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queue_policy" +description: |- + Provides a SQS Queue Policy resource. +--- + + + +# Resource: aws_sqs_queue_policy + +Allows you to set a policy of an SQS Queue +while referencing ARN of the queue within the policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.sqs_queue import SqsQueue +from imports.aws.sqs_queue_policy import SqsQueuePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + q = SqsQueue(self, "q", + name="examplequeue" + ) + test = DataAwsIamPolicyDocument(self, "test", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sqs:SendMessage"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="ArnEquals", + values=[example.arn], + variable="aws:SourceArn" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["*"], + type="*" + ) + ], + resources=[q.arn], + sid="First" + ) + ] + ) + aws_sqs_queue_policy_test = SqsQueuePolicy(self, "test_2", + policy=Token.as_string(test.json), + queue_url=q.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sqs_queue_policy_test.override_logical_id("test") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `queue_url` - (Required) The URL of the SQS Queue to which to attach the policy +* `policy` - (Required) The JSON policy for the SQS queue. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SQS Queue Policies using the queue URL. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SQS Queue Policies using the queue URL. For example: + +```console +% terraform import aws_sqs_queue_policy.test https://queue.amazonaws.com/0123456789012/myqueue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sqs_queue_redrive_allow_policy.html.markdown b/website/docs/cdktf/python/r/sqs_queue_redrive_allow_policy.html.markdown new file mode 100644 index 00000000000..3adc781f1a0 --- /dev/null +++ b/website/docs/cdktf/python/r/sqs_queue_redrive_allow_policy.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queue_redrive_allow_policy" +description: |- + Provides a SQS Queue Redrive Allow Policy resource. +--- + + + +# Resource: aws_sqs_queue_redrive_allow_policy + +Provides a SQS Queue Redrive Allow Policy resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sqs_queue import SqsQueue +from imports.aws.sqs_queue_redrive_allow_policy import SqsQueueRedriveAllowPolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SqsQueue(self, "example", + name="examplequeue" + ) + src = SqsQueue(self, "src", + name="srcqueue", + redrive_policy=Token.as_string( + Fn.jsonencode({ + "dead_letter_target_arn": example.arn, + "max_receive_count": 4 + })) + ) + aws_sqs_queue_redrive_allow_policy_example = SqsQueueRedriveAllowPolicy(self, "example_2", + queue_url=example.id, + redrive_allow_policy=Token.as_string( + Fn.jsonencode({ + "redrive_permission": "byQueue", + "source_queue_arns": [src.arn] + })) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sqs_queue_redrive_allow_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `queue_url` - (Required) The URL of the SQS Queue to which to attach the policy +* `redrive_allow_policy` - (Required) The JSON redrive allow policy for the SQS queue. Learn more in the [Amazon SQS dead-letter queues documentation](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-dead-letter-queues.html). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SQS Queue Redrive Allow Policies using the queue URL. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SQS Queue Redrive Allow Policies using the queue URL. For example: + +```console +% terraform import aws_sqs_queue_redrive_allow_policy.test https://queue.amazonaws.com/0123456789012/myqueue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/sqs_queue_redrive_policy.html.markdown b/website/docs/cdktf/python/r/sqs_queue_redrive_policy.html.markdown new file mode 100644 index 00000000000..3a7ff9e2b68 --- /dev/null +++ b/website/docs/cdktf/python/r/sqs_queue_redrive_policy.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queue_redrive_policy" +description: |- + Provides a SQS Queue Redrive Policy resource. +--- + + + +# Resource: aws_sqs_queue_redrive_policy + +Allows you to set a redrive policy of an SQS Queue +while referencing ARN of the dead letter queue inside the redrive policy. + +This is useful when you want to set a dedicated +dead letter queue for a standard or FIFO queue, but need +the dead letter queue to exist before setting the redrive policy. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.sqs_queue import SqsQueue +from imports.aws.sqs_queue_redrive_policy import SqsQueueRedrivePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + q = SqsQueue(self, "q", + name="examplequeue" + ) + ddl = SqsQueue(self, "ddl", + name="examplequeue-ddl", + redrive_allow_policy=Token.as_string( + Fn.jsonencode({ + "redrive_permission": "byQueue", + "source_queue_arns": [q.arn] + })) + ) + aws_sqs_queue_redrive_policy_q = SqsQueueRedrivePolicy(self, "q_2", + queue_url=q.id, + redrive_policy=Token.as_string( + Fn.jsonencode({ + "dead_letter_target_arn": ddl.arn, + "max_receive_count": 4 + })) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_sqs_queue_redrive_policy_q.override_logical_id("q") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `queue_url` - (Required) The URL of the SQS Queue to which to attach the policy +* `redrive_policy` - (Required) The JSON redrive policy for the SQS queue. Accepts two key/val pairs: `deadLetterTargetArn` and `maxReceiveCount`. Learn more in the [Amazon SQS dead-letter queues documentation](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-dead-letter-queues.html). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SQS Queue Redrive Policies using the queue URL. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SQS Queue Redrive Policies using the queue URL. For example: + +```console +% terraform import aws_sqs_queue_redrive_policy.test https://queue.amazonaws.com/0123456789012/myqueue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_activation.html.markdown b/website/docs/cdktf/python/r/ssm_activation.html.markdown new file mode 100644 index 00000000000..d0e17280eac --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_activation.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_activation" +description: |- + Registers an on-premises server or virtual machine with Amazon EC2 so that it can be managed using Run Command. +--- + + + +# Resource: aws_ssm_activation + +Registers an on-premises server or virtual machine with Amazon EC2 so that it can be managed using Run Command. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +from imports.aws.ssm_activation import SsmActivation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["ssm.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + test_role = IamRole(self, "test_role", + assume_role_policy=Token.as_string(assume_role.json), + name="test_role" + ) + test_attach = IamRolePolicyAttachment(self, "test_attach", + policy_arn="arn:aws:iam::aws:policy/AmazonSSMManagedInstanceCore", + role=test_role.name + ) + SsmActivation(self, "foo", + depends_on=[test_attach], + description="Test", + iam_role=test_role.id, + name="test_ssm_activation", + registration_limit=Token.as_number("5") + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The default name of the registered managed instance. +* `description` - (Optional) The description of the resource that you want to register. +* `expiration_date` - (Optional) UTC timestamp in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) by which this activation request should expire. The default value is 24 hours from resource creation time. Terraform will only perform drift detection of its value when present in a configuration. +* `iam_role` - (Required) The IAM Role to attach to the managed instance. +* `registration_limit` - (Optional) The maximum number of managed instances you want to register. The default value is 1 instance. +* `tags` - (Optional) A map of tags to assign to the object. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The activation ID. +* `activation_code` - The code the system generates when it processes the activation. +* `name` - The default name of the registered managed instance. +* `description` - The description of the resource that was registered. +* `expired` - If the current activation has expired. +* `expiration_date` - The date by which this activation request should expire. The default value is 24 hours. +* `iam_role` - The IAM Role attached to the managed instance. +* `registration_limit` - The maximum number of managed instances you want to be registered. The default value is 1 instance. +* `registration_count` - The number of managed instances that are currently registered using this activation. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS SSM Activation using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS SSM Activation using the `id`. For example: + +```console +% terraform import aws_ssm_activation.example e488f2f6-e686-4afb-8a04-ef6dfEXAMPLE +``` + +-> **Note:** The `activation_code` attribute cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_association.html.markdown b/website/docs/cdktf/python/r/ssm_association.html.markdown new file mode 100644 index 00000000000..74cfd9d17fd --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_association.html.markdown @@ -0,0 +1,179 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_association" +description: |- + Associates an SSM Document to an instance or EC2 tag. +--- + + + +# Resource: aws_ssm_association + +Associates an SSM Document to an instance or EC2 tag. + +## Example Usage + +### Create an association for a specific instance + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_association import SsmAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmAssociation(self, "example", + name=Token.as_string(aws_ssm_document_example.name), + targets=[SsmAssociationTargets( + key="InstanceIds", + values=[Token.as_string(aws_instance_example.id)] + ) + ] + ) +``` + +### Create an association for all managed instances in an AWS account + +To target all managed instances in an AWS account, set the `key` as `"InstanceIds"` with `values` set as `["*"]`. This example also illustrates how to use an Amazon owned SSM document named `AmazonCloudWatch-ManageAgent`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_association import SsmAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmAssociation(self, "example", + name="AmazonCloudWatch-ManageAgent", + targets=[SsmAssociationTargets( + key="InstanceIds", + values=["*"] + ) + ] + ) +``` + +### Create an association for a specific tag + +This example shows how to target all managed instances that are assigned a tag key of `Environment` and value of `Development`. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_association import SsmAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmAssociation(self, "example", + name="AmazonCloudWatch-ManageAgent", + targets=[SsmAssociationTargets( + key="tag:Environment", + values=["Development"] + ) + ] + ) +``` + +### Create an association with a specific schedule + +This example shows how to schedule an association in various ways. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_association import SsmAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmAssociation(self, "example", + name=Token.as_string(aws_ssm_document_example.name), + schedule_expression="cron(0 2 ? * SUN *)", + targets=[SsmAssociationTargets( + key="InstanceIds", + values=[Token.as_string(aws_instance_example.id)] + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the SSM document to apply. +* `apply_only_at_cron_interval` - (Optional) By default, when you create a new or update associations, the system runs it immediately and then according to the schedule you specified. Enable this option if you do not want an association to run immediately after you create or update it. This parameter is not supported for rate expressions. Default: `false`. +* `association_name` - (Optional) The descriptive name for the association. +* `document_version` - (Optional) The document version you want to associate with the target(s). Can be a specific version or the default version. +* `instance_id` - (Optional, **Deprecated**) The instance ID to apply an SSM document to. Use `targets` with key `InstanceIds` for document schema versions 2.0 and above. Use the `targets` attribute instead. +* `output_location` - (Optional) An output location block. Output Location is documented below. +* `parameters` - (Optional) A block of arbitrary string parameters to pass to the SSM document. +* `schedule_expression` - (Optional) A [cron or rate expression](https://docs.aws.amazon.com/systems-manager/latest/userguide/reference-cron-and-rate-expressions.html) that specifies when the association runs. +* `targets` - (Optional) A block containing the targets of the SSM association. Targets are documented below. AWS currently supports a maximum of 5 targets. +* `compliance_severity` - (Optional) The compliance severity for the association. Can be one of the following: `UNSPECIFIED`, `LOW`, `MEDIUM`, `HIGH` or `CRITICAL` +* `max_concurrency` - (Optional) The maximum number of targets allowed to run the association at the same time. You can specify a number, for example 10, or a percentage of the target set, for example 10%. +* `max_errors` - (Optional) The number of errors that are allowed before the system stops sending requests to run the association on additional targets. You can specify a number, for example 10, or a percentage of the target set, for example 10%. +* `automation_target_parameter_name` - (Optional) Specify the target for the association. This target is required for associations that use an `Automation` document and target resources by using rate controls. This should be set to the SSM document `parameter` that will define how your automation will branch out. +* `wait_for_success_timeout_seconds` - (Optional) The number of seconds to wait for the association status to be `Success`. If `Success` status is not reached within the given time, create opration will fail. + +Output Location (`output_location`) is an S3 bucket where you want to store the results of this association: + +* `s3_bucket_name` - (Required) The S3 bucket name. +* `s3_key_prefix` - (Optional) The S3 bucket prefix. Results stored in the root if not configured. +* `s3_region` - (Optional) The S3 bucket region. + +Targets specify what instance IDs or tags to apply the document to and has these keys: + +* `key` - (Required) Either `InstanceIds` or `tag:Tag Name` to specify an EC2 tag. +* `values` - (Required) A list of instance IDs or tag values. AWS currently limits this list size to one value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the SSM association +* `association_id` - The ID of the SSM association. +* `instance_id` - The instance id that the SSM document was applied to. +* `name` - The name of the SSM document to apply. +* `parameters` - Additional parameters passed to the SSM document. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM associations using the `association_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSM associations using the `association_id`. For example: + +```console +% terraform import aws_ssm_association.test-association 10abcdef-0abc-1234-5678-90abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_default_patch_baseline.html.markdown b/website/docs/cdktf/python/r/ssm_default_patch_baseline.html.markdown new file mode 100644 index 00000000000..fc026fd151e --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_default_patch_baseline.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_default_patch_baseline" +description: |- + Terraform resource for managing an AWS Systems Manager Default Patch Baseline. +--- + + + +# Resource: aws_ssm_default_patch_baseline + +Terraform resource for registering an AWS Systems Manager Default Patch Baseline. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_default_patch_baseline import SsmDefaultPatchBaseline +from imports.aws.ssm_patch_baseline import SsmPatchBaseline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = SsmPatchBaseline(self, "example", + approved_patches=["KB123456"], + name="example" + ) + aws_ssm_default_patch_baseline_example = SsmDefaultPatchBaseline(self, "example_1", + baseline_id=example.id, + operating_system=example.operating_system + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ssm_default_patch_baseline_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `baseline_id` - (Required) ID of the patch baseline. + Can be an ID or an ARN. + When specifying an AWS-provided patch baseline, must be the ARN. +* `operating_system` - (Required) The operating system the patch baseline applies to. + Valid values are + `AMAZON_LINUX`, + `AMAZON_LINUX_2`, + `AMAZON_LINUX_2022`, + `CENTOS`, + `DEBIAN`, + `MACOS`, + `ORACLE_LINUX`, + `RASPBIAN`, + `REDHAT_ENTERPRISE_LINUX`, + `ROCKY_LINUX`, + `SUSE`, + `UBUNTU`, and + `WINDOWS`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the Systems Manager Default Patch Baseline using the patch baseline ID, patch baseline ARN, or the operating system value. For example: + +Using the patch baseline ID: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using the patch baseline ARN: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using the operating system value: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +**Using `terraform import` to import** the Systems Manager Default Patch Baseline using the patch baseline ID, patch baseline ARN, or the operating system value. For example: + +Using the patch baseline ID: + +```console +% terraform import aws_ssm_default_patch_baseline.example pb-1234567890abcdef1 +``` + +Using the patch baseline ARN: + +```console +% terraform import aws_ssm_default_patch_baseline.example arn:aws:ssm:us-west-2:123456789012:patchbaseline/pb-1234567890abcdef1 +``` + +Using the operating system value: + +```console +% terraform import aws_ssm_default_patch_baseline.example CENTOS +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_document.html.markdown b/website/docs/cdktf/python/r/ssm_document.html.markdown new file mode 100644 index 00000000000..e80b47c77cb --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_document.html.markdown @@ -0,0 +1,166 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_document" +description: |- + Provides an SSM Document resource +--- + + + +# Resource: aws_ssm_document + +Provides an SSM Document resource + +~> **NOTE on updating SSM documents:** Only documents with a schema version of 2.0 +or greater can update their content once created, see [SSM Schema Features][1]. To update a document with an older schema version you must recreate the resource. Not all document types support a schema version of 2.0 or greater. Refer to [SSM document schema features and examples][2] for information about which schema versions are supported for the respective `document_type`. + +## Example Usage + +### Create an ssm document in JSON format + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_document import SsmDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmDocument(self, "foo", + content=" {\n \"schemaVersion\": \"1.2\",\n \"description\": \"Check ip configuration of a Linux instance.\",\n \"parameters\": {\n\n },\n \"runtimeConfig\": {\n \"aws:runShellScript\": {\n \"properties\": [\n {\n \"id\": \"0.aws:runShellScript\",\n \"runCommand\": [\"ifconfig\"]\n }\n ]\n }\n }\n }\n\n", + document_type="Command", + name="test_document" + ) +``` + +### Create an ssm document in YAML format + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_document import SsmDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmDocument(self, "foo", + content="schemaVersion: '1.2'\ndescription: Check ip configuration of a Linux instance.\nparameters: {}\nruntimeConfig:\n 'aws:runShellScript':\n properties:\n - id: '0.aws:runShellScript'\n runCommand:\n - ifconfig\n\n", + document_format="YAML", + document_type="Command", + name="test_document" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the document. +* `attachments_source` - (Optional) One or more configuration blocks describing attachments sources to a version of a document. Defined below. +* `content` - (Required) The JSON or YAML content of the document. +* `document_format` - (Optional, defaults to JSON) The format of the document. Valid document types include: `JSON` and `YAML` +* `document_type` - (Required) The type of the document. Valid document types include: `Automation`, `Command`, `Package`, `Policy`, and `Session` +* `permissions` - (Optional) Additional Permissions to attach to the document. See [Permissions](#permissions) below for details. +* `target_type` - (Optional) The target type which defines the kinds of resources the document can run on. For example, /AWS::EC2::Instance. For a list of valid resource types, see AWS Resource Types Reference (http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) +* `tags` - (Optional) A map of tags to assign to the object. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `version_name` - (Optional) A field specifying the version of the artifact you are creating with the document. For example, "Release 12, Update 6". This value is unique across all versions of a document and cannot be changed for an existing document version. + +## attachments_source + +The `attachments_source` block supports the following: + +* `key` - (Required) The key describing the location of an attachment to a document. Valid key types include: `SourceUrl` and `S3FileUrl` +* `values` - (Required) The value describing the location of an attachment to a document +* `name` - (Optional) The name of the document attachment file + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `created_date` - The date the document was created. +* `description` - The description of the document. +* `schema_version` - The schema version of the document. +* `default_version` - The default version of the document. +* `document_version` - The document version. +* `hash` - The sha1 or sha256 of the document content +* `hash_type` - "Sha1" "Sha256". The hashing algorithm used when hashing the content. +* `latest_version` - The latest version of the document. +* `owner` - The AWS user account of the person who created the document. +* `status` - "Creating", "Active" or "Deleting". The current status of the document. +* `parameter` - The parameters that are available to this document. +* `platform_types` - A list of OS platforms compatible with this SSM document, either "Windows" or "Linux". +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[1]: http://docs.aws.amazon.com/systems-manager/latest/userguide/sysman-ssm-docs.html#document-schemas-features +[2]: https://docs.aws.amazon.com/systems-manager/latest/userguide/document-schemas-features.html + +## Permissions + +The permissions attribute specifies how you want to share the document. If you share a document privately, +you must specify the AWS user account IDs for those people who can use the document. If you share a document +publicly, you must specify All as the account ID. + +The permissions mapping supports the following: + +* `type` - The permission type for the document. The permission type can be `Share`. +* `account_ids` - The AWS user accounts that should have access to the document. The account IDs can either be a group of account IDs or `All`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Documents using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSM Documents using the name. For example: + +```console +% terraform import aws_ssm_document.example example +``` + +The `attachments_source` argument does not have an SSM API method for reading the attachment information detail after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_document import SsmDocument +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, content): + super().__init__(scope, name) + SsmDocument(self, "test", + attachments_source=[SsmDocumentAttachmentsSource( + key="SourceUrl", + values=["s3://${" + object_bucket.bucket + "}/test.zip"] + ) + ], + document_type="Package", + lifecycle=TerraformResourceLifecycle( + ignore_changes=[attachments_source] + ), + name="test_document", + content=content + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_maintenance_window.html.markdown b/website/docs/cdktf/python/r/ssm_maintenance_window.html.markdown new file mode 100644 index 00000000000..a8c5c10f948 --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_maintenance_window.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_maintenance_window" +description: |- + Provides an SSM Maintenance Window resource +--- + + + +# Resource: aws_ssm_maintenance_window + +Provides an SSM Maintenance Window resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_maintenance_window import SsmMaintenanceWindow +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmMaintenanceWindow(self, "production", + cutoff=1, + duration=3, + name="maintenance-window-application", + schedule="cron(0 16 ? * TUE *)" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the maintenance window. +* `schedule` - (Required) The schedule of the Maintenance Window in the form of a [cron or rate expression](https://docs.aws.amazon.com/systems-manager/latest/userguide/reference-cron-and-rate-expressions.html). +* `cutoff` - (Required) The number of hours before the end of the Maintenance Window that Systems Manager stops scheduling new tasks for execution. +* `duration` - (Required) The duration of the Maintenance Window in hours. +* `description` - (Optional) A description for the maintenance window. +* `allow_unassociated_targets` - (Optional) Whether targets must be registered with the Maintenance Window before tasks can be defined for those targets. +* `enabled` - (Optional) Whether the maintenance window is enabled. Default: `true`. +* `end_date` - (Optional) Timestamp in [ISO-8601 extended format](https://www.iso.org/iso-8601-date-and-time-format.html) when to no longer run the maintenance window. +* `schedule_timezone` - (Optional) Timezone for schedule in [Internet Assigned Numbers Authority (IANA) Time Zone Database format](https://www.iana.org/time-zones). For example: `America/Los_Angeles`, `etc/UTC`, or `Asia/Seoul`. +* `schedule_offset` - (Optional) The number of days to wait after the date and time specified by a CRON expression before running the maintenance window. +* `start_date` - (Optional) Timestamp in [ISO-8601 extended format](https://www.iso.org/iso-8601-date-and-time-format.html) when to begin the maintenance window. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the maintenance window. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Maintenance Windows using the maintenance window `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSM Maintenance Windows using the maintenance window `id`. For example: + +```console +% terraform import aws_ssm_maintenance_window.imported-window mw-0123456789 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_maintenance_window_target.html.markdown b/website/docs/cdktf/python/r/ssm_maintenance_window_target.html.markdown new file mode 100644 index 00000000000..f7765a57d28 --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_maintenance_window_target.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_maintenance_window_target" +description: |- + Provides an SSM Maintenance Window Target resource +--- + + + +# Resource: aws_ssm_maintenance_window_target + +Provides an SSM Maintenance Window Target resource + +## Example Usage + +### Instance Target + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_maintenance_window import SsmMaintenanceWindow +from imports.aws.ssm_maintenance_window_target import SsmMaintenanceWindowTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + window = SsmMaintenanceWindow(self, "window", + cutoff=1, + duration=3, + name="maintenance-window-webapp", + schedule="cron(0 16 ? * TUE *)" + ) + SsmMaintenanceWindowTarget(self, "target1", + description="This is a maintenance window target", + name="maintenance-window-target", + resource_type="INSTANCE", + targets=[SsmMaintenanceWindowTargetTargets( + key="tag:Name", + values=["acceptance_test"] + ) + ], + window_id=window.id + ) +``` + +### Resource Group Target + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_maintenance_window import SsmMaintenanceWindow +from imports.aws.ssm_maintenance_window_target import SsmMaintenanceWindowTarget +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + window = SsmMaintenanceWindow(self, "window", + cutoff=1, + duration=3, + name="maintenance-window-webapp", + schedule="cron(0 16 ? * TUE *)" + ) + SsmMaintenanceWindowTarget(self, "target1", + description="This is a maintenance window target", + name="maintenance-window-target", + resource_type="RESOURCE_GROUP", + targets=[SsmMaintenanceWindowTargetTargets( + key="resource-groups:ResourceTypeFilters", + values=["AWS::EC2::Instance"] + ) + ], + window_id=window.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `window_id` - (Required) The Id of the maintenance window to register the target with. +* `name` - (Optional) The name of the maintenance window target. +* `description` - (Optional) The description of the maintenance window target. +* `resource_type` - (Required) The type of target being registered with the Maintenance Window. Possible values are `INSTANCE` and `RESOURCE_GROUP`. +* `targets` - (Required) The targets to register with the maintenance window. In other words, the instances to run commands on when the maintenance window runs. You can specify targets using instance IDs, resource group names, or tags that have been applied to instances. For more information about these examples formats see + (https://docs.aws.amazon.com/systems-manager/latest/userguide/mw-cli-tutorial-targets-examples.html) +* `owner_information` - (Optional) User-provided value that will be included in any CloudWatch events raised while running tasks for these targets in this Maintenance Window. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the maintenance window target. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Maintenance Window targets using `WINDOW_ID/WINDOW_TARGET_ID`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSM Maintenance Window targets using `WINDOW_ID/WINDOW_TARGET_ID`. For example: + +```console +% terraform import aws_ssm_maintenance_window_target.example mw-0c50858d01EXAMPLE/23639a0b-ddbc-4bca-9e72-78d96EXAMPLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_maintenance_window_task.html.markdown b/website/docs/cdktf/python/r/ssm_maintenance_window_task.html.markdown new file mode 100644 index 00000000000..4f11e9f41cc --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_maintenance_window_task.html.markdown @@ -0,0 +1,270 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_maintenance_window_task" +description: |- + Provides an SSM Maintenance Window Task resource +--- + + + +# Resource: aws_ssm_maintenance_window_task + +Provides an SSM Maintenance Window Task resource + +## Example Usage + +### Automation Tasks + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_maintenance_window_task import SsmMaintenanceWindowTask +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmMaintenanceWindowTask(self, "example", + max_concurrency=Token.as_string(2), + max_errors=Token.as_string(1), + priority=1, + targets=[SsmMaintenanceWindowTaskTargets( + key="InstanceIds", + values=[Token.as_string(aws_instance_example.id)] + ) + ], + task_arn="AWS-RestartEC2Instance", + task_invocation_parameters=SsmMaintenanceWindowTaskTaskInvocationParameters( + automation_parameters=SsmMaintenanceWindowTaskTaskInvocationParametersAutomationParameters( + document_version="$LATEST", + parameter=[SsmMaintenanceWindowTaskTaskInvocationParametersAutomationParametersParameter( + name="InstanceId", + values=[Token.as_string(aws_instance_example.id)] + ) + ] + ) + ), + task_type="AUTOMATION", + window_id=Token.as_string(aws_ssm_maintenance_window_example.id) + ) +``` + +### Lambda Tasks + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_maintenance_window_task import SsmMaintenanceWindowTask +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmMaintenanceWindowTask(self, "example", + max_concurrency=Token.as_string(2), + max_errors=Token.as_string(1), + priority=1, + targets=[SsmMaintenanceWindowTaskTargets( + key="InstanceIds", + values=[Token.as_string(aws_instance_example.id)] + ) + ], + task_arn=Token.as_string(aws_lambda_function_example.arn), + task_invocation_parameters=SsmMaintenanceWindowTaskTaskInvocationParameters( + lambda_parameters=SsmMaintenanceWindowTaskTaskInvocationParametersLambdaParameters( + client_context=Token.as_string( + Fn.base64encode("{\\\"key1\\\":\\\"value1\\\"}")), + payload="{\\\"key1\\\":\\\"value1\\\"}" + ) + ), + task_type="LAMBDA", + window_id=Token.as_string(aws_ssm_maintenance_window_example.id) + ) +``` + +### Run Command Tasks + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_maintenance_window_task import SsmMaintenanceWindowTask +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmMaintenanceWindowTask(self, "example", + max_concurrency=Token.as_string(2), + max_errors=Token.as_string(1), + priority=1, + targets=[SsmMaintenanceWindowTaskTargets( + key="InstanceIds", + values=[Token.as_string(aws_instance_example.id)] + ) + ], + task_arn="AWS-RunShellScript", + task_invocation_parameters=SsmMaintenanceWindowTaskTaskInvocationParameters( + run_command_parameters=SsmMaintenanceWindowTaskTaskInvocationParametersRunCommandParameters( + notification_config=SsmMaintenanceWindowTaskTaskInvocationParametersRunCommandParametersNotificationConfig( + notification_arn=Token.as_string(aws_sns_topic_example.arn), + notification_events=["All"], + notification_type="Command" + ), + output_s3_bucket=Token.as_string(aws_s3_bucket_example.id), + output_s3_key_prefix="output", + parameter=[SsmMaintenanceWindowTaskTaskInvocationParametersRunCommandParametersParameter( + name="commands", + values=["date"] + ) + ], + service_role_arn=Token.as_string(aws_iam_role_example.arn), + timeout_seconds=600 + ) + ), + task_type="RUN_COMMAND", + window_id=Token.as_string(aws_ssm_maintenance_window_example.id) + ) +``` + +### Step Function Tasks + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_maintenance_window_task import SsmMaintenanceWindowTask +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmMaintenanceWindowTask(self, "example", + max_concurrency=Token.as_string(2), + max_errors=Token.as_string(1), + priority=1, + targets=[SsmMaintenanceWindowTaskTargets( + key="InstanceIds", + values=[Token.as_string(aws_instance_example.id)] + ) + ], + task_arn=Token.as_string(aws_sfn_activity_example.id), + task_invocation_parameters=SsmMaintenanceWindowTaskTaskInvocationParameters( + step_functions_parameters=SsmMaintenanceWindowTaskTaskInvocationParametersStepFunctionsParameters( + input="{\\\"key1\\\":\\\"value1\\\"}", + name="example" + ) + ), + task_type="STEP_FUNCTIONS", + window_id=Token.as_string(aws_ssm_maintenance_window_example.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `window_id` - (Required) The Id of the maintenance window to register the task with. +* `max_concurrency` - (Optional) The maximum number of targets this task can be run for in parallel. +* `max_errors` - (Optional) The maximum number of errors allowed before this task stops being scheduled. +* `cutoff_behavior` - (Optional) Indicates whether tasks should continue to run after the cutoff time specified in the maintenance windows is reached. Valid values are `CONTINUE_TASK` and `CANCEL_TASK`. +* `task_type` - (Required) The type of task being registered. Valid values: `AUTOMATION`, `LAMBDA`, `RUN_COMMAND` or `STEP_FUNCTIONS`. +* `task_arn` - (Required) The ARN of the task to execute. +* `service_role_arn` - (Optional) The role that should be assumed when executing the task. If a role is not provided, Systems Manager uses your account's service-linked role. If no service-linked role for Systems Manager exists in your account, it is created for you. +* `name` - (Optional) The name of the maintenance window task. +* `description` - (Optional) The description of the maintenance window task. +* `targets` - (Optional) The targets (either instances or window target ids). Instances are specified using Key=InstanceIds,Values=instanceid1,instanceid2. Window target ids are specified using Key=WindowTargetIds,Values=window target id1, window target id2. +* `priority` - (Optional) The priority of the task in the Maintenance Window, the lower the number the higher the priority. Tasks in a Maintenance Window are scheduled in priority order with tasks that have the same priority scheduled in parallel. +* `task_invocation_parameters` - (Optional) Configuration block with parameters for task execution. + +`task_invocation_parameters` supports the following: + +* `automation_parameters` - (Optional) The parameters for an AUTOMATION task type. Documented below. +* `lambda_parameters` - (Optional) The parameters for a LAMBDA task type. Documented below. +* `run_command_parameters` - (Optional) The parameters for a RUN_COMMAND task type. Documented below. +* `step_functions_parameters` - (Optional) The parameters for a STEP_FUNCTIONS task type. Documented below. + +`automation_parameters` supports the following: + +* `document_version` - (Optional) The version of an Automation document to use during task execution. +* `parameter` - (Optional) The parameters for the RUN_COMMAND task execution. Documented below. + +`lambda_parameters` supports the following: + +* `client_context` - (Optional) Pass client-specific information to the Lambda function that you are invoking. +* `payload` - (Optional) JSON to provide to your Lambda function as input. +* `qualifier` - (Optional) Specify a Lambda function version or alias name. + +`run_command_parameters` supports the following: + +* `comment` - (Optional) Information about the command(s) to execute. +* `document_hash` - (Optional) The SHA-256 or SHA-1 hash created by the system when the document was created. SHA-1 hashes have been deprecated. +* `document_hash_type` - (Optional) SHA-256 or SHA-1. SHA-1 hashes have been deprecated. Valid values: `Sha256` and `Sha1` +* `notification_config` - (Optional) Configurations for sending notifications about command status changes on a per-instance basis. Documented below. +* `output_s3_bucket` - (Optional) The name of the Amazon S3 bucket. +* `output_s3_key_prefix` - (Optional) The Amazon S3 bucket subfolder. +* `parameter` - (Optional) The parameters for the RUN_COMMAND task execution. Documented below. +* `service_role_arn` - (Optional) The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) service role to use to publish Amazon Simple Notification Service (Amazon SNS) notifications for maintenance window Run Command tasks. +* `timeout_seconds` - (Optional) If this time is reached and the command has not already started executing, it doesn't run. +* `cloudwatch_config` - (Optional) Configuration options for sending command output to CloudWatch Logs. Documented below. + +`step_functions_parameters` supports the following: + +* `input` - (Optional) The inputs for the STEP_FUNCTION task. +* `name` - (Optional) The name of the STEP_FUNCTION task. + +`notification_config` supports the following: + +* `notification_arn` - (Optional) An Amazon Resource Name (ARN) for a Simple Notification Service (SNS) topic. Run Command pushes notifications about command status changes to this topic. +* `notification_events` - (Optional) The different events for which you can receive notifications. Valid values: `All`, `InProgress`, `Success`, `TimedOut`, `Cancelled`, and `Failed` +* `notification_type` - (Optional) When specified with `Command`, receive notification when the status of a command changes. When specified with `Invocation`, for commands sent to multiple instances, receive notification on a per-instance basis when the status of a command changes. Valid values: `Command` and `Invocation` + +`cloudwatch_config` supports the following: + +* `cloudwatch_log_group_name` - (Optional) The name of the CloudWatch log group where you want to send command output. If you don't specify a group name, Systems Manager automatically creates a log group for you. The log group uses the following naming format: aws/ssm/SystemsManagerDocumentName. +* `cloudwatch_output_enabled` - (Optional) Enables Systems Manager to send command output to CloudWatch Logs. + +`parameter` supports the following: + +* `name` - (Required) The parameter name. +* `values` - (Required) The array of strings. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the maintenance window task. +* `id` - The ID of the maintenance window task. +* `window_task_id` - The ID of the maintenance window task. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Maintenance Window Task using the `window_id` and `window_task_id` separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS Maintenance Window Task using the `window_id` and `window_task_id` separated by `/`. For example: + +```console +% terraform import aws_ssm_maintenance_window_task.task / +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_parameter.html.markdown b/website/docs/cdktf/python/r/ssm_parameter.html.markdown new file mode 100644 index 00000000000..682c5da072d --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_parameter.html.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_parameter" +description: |- + Provides a SSM Parameter resource +--- + + + +# Resource: aws_ssm_parameter + +Provides an SSM Parameter resource. + +~> **Note:** `overwrite` also makes it possible to overwrite an existing SSM Parameter that's not created by Terraform before. + +## Example Usage + +### Basic example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_parameter import SsmParameter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmParameter(self, "foo", + name="foo", + type="String", + value="bar" + ) +``` + +### Encrypted string using default SSM KMS key + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.db_instance import DbInstance +from imports.aws.ssm_parameter import SsmParameter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + DbInstance(self, "default", + allocated_storage=10, + db_name="mydb", + db_subnet_group_name="my_database_subnet_group", + engine="mysql", + engine_version="5.7.16", + instance_class="db.t2.micro", + parameter_group_name="default.mysql5.7", + password=database_master_password.string_value, + storage_type="gp2", + username="foo" + ) + SsmParameter(self, "secret", + description="The parameter description", + name="/production/database/password/master", + tags={ + "environment": "production" + }, + type="SecureString", + value=database_master_password.string_value + ) +``` + +~> **Note:** The unencrypted value of a SecureString will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the parameter. If the name contains a path (e.g., any forward slashes (`/`)), it must be fully qualified with a leading forward slash (`/`). For additional requirements and constraints, see the [AWS SSM User Guide](https://docs.aws.amazon.com/systems-manager/latest/userguide/sysman-parameter-name-constraints.html). +* `type` - (Required) Type of the parameter. Valid types are `String`, `StringList` and `SecureString`. + +The following arguments are optional: + +* `allowed_pattern` - (Optional) Regular expression used to validate the parameter value. +* `data_type` - (Optional) Data type of the parameter. Valid values: `text`, `aws:ssm:integration` and `aws:ec2:image` for AMI format, see the [Native parameter support for Amazon Machine Image IDs](https://docs.aws.amazon.com/systems-manager/latest/userguide/parameter-store-ec2-aliases.html). +* `description` - (Optional) Description of the parameter. +* `insecure_value` - (Optional, exactly one of `value` or `insecure_value` is required) Value of the parameter. **Use caution:** This value is _never_ marked as sensitive in the Terraform plan output. This argument is not valid with a `type` of `SecureString`. +* `key_id` - (Optional) KMS key ID or ARN for encrypting a SecureString. +* `overwrite` - (Optional, **Deprecated**) Overwrite an existing parameter. If not specified, will default to `false` if the resource has not been created by terraform to avoid overwrite of existing resource and will default to `true` otherwise (terraform lifecycle rules should then be used to manage the update behavior). +* `tags` - (Optional) Map of tags to assign to the object. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tier` - (Optional) Parameter tier to assign to the parameter. If not specified, will use the default parameter tier for the region. Valid tiers are `Standard`, `Advanced`, and `Intelligent-Tiering`. Downgrading an `Advanced` tier parameter to `Standard` will recreate the resource. For more information on parameter tiers, see the [AWS SSM Parameter tier comparison and guide](https://docs.aws.amazon.com/systems-manager/latest/userguide/parameter-store-advanced-parameters.html). +* `value` - (Optional, exactly one of `value` or `insecure_value` is required) Value of the parameter. This value is always marked as sensitive in the Terraform plan output, regardless of `type`. In Terraform CLI version 0.15 and later, this may require additional configuration handling for certain scenarios. For more information, see the [Terraform v0.15 Upgrade Guide](https://www.terraform.io/upgrade-guides/0-15.html#sensitive-output-values). + +~> **NOTE:** `aws:ssm:integration` data_type parameters must be of the type `SecureString` and the name must start with the prefix `/d9d01087-4a3f-49e0-b0b4-d568d7826553/ssm/integrations/webhook/`. See [here](https://docs.aws.amazon.com/systems-manager/latest/userguide/creating-integrations.html) for information on the usage of `aws:ssm:integration` parameters. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the parameter. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version` - Version of the parameter. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Parameters using the parameter store `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSM Parameters using the parameter store `name`. For example: + +```console +% terraform import aws_ssm_parameter.my_param /my_path/my_paramname +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_patch_baseline.html.markdown b/website/docs/cdktf/python/r/ssm_patch_baseline.html.markdown new file mode 100644 index 00000000000..4afe1174338 --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_patch_baseline.html.markdown @@ -0,0 +1,279 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_patch_baseline" +description: |- + Provides an SSM Patch Baseline resource +--- + + + +# Resource: aws_ssm_patch_baseline + +Provides an SSM Patch Baseline resource. + +~> **NOTE on Patch Baselines:** The `approved_patches` and `approval_rule` are +both marked as optional fields, but the Patch Baseline requires that at least one +of them is specified. + +## Example Usage + +### Basic Usage + +Using `approved_patches` only. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_patch_baseline import SsmPatchBaseline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmPatchBaseline(self, "production", + approved_patches=["KB123456"], + name="patch-baseline" + ) +``` + +### Advanced Usage, specifying patch filters + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_patch_baseline import SsmPatchBaseline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmPatchBaseline(self, "production", + approval_rule=[SsmPatchBaselineApprovalRule( + approve_after_days=7, + compliance_level="HIGH", + patch_filter=[SsmPatchBaselineApprovalRulePatchFilter( + key="PRODUCT", + values=["WindowsServer2016"] + ), SsmPatchBaselineApprovalRulePatchFilter( + key="CLASSIFICATION", + values=["CriticalUpdates", "SecurityUpdates", "Updates"] + ), SsmPatchBaselineApprovalRulePatchFilter( + key="MSRC_SEVERITY", + values=["Critical", "Important", "Moderate"] + ) + ] + ), SsmPatchBaselineApprovalRule( + approve_after_days=7, + patch_filter=[SsmPatchBaselineApprovalRulePatchFilter( + key="PRODUCT", + values=["WindowsServer2012"] + ) + ] + ) + ], + approved_patches=["KB123456", "KB456789"], + description="Patch Baseline Description", + global_filter=[SsmPatchBaselineGlobalFilter( + key="PRODUCT", + values=["WindowsServer2008"] + ), SsmPatchBaselineGlobalFilter( + key="CLASSIFICATION", + values=["ServicePacks"] + ), SsmPatchBaselineGlobalFilter( + key="MSRC_SEVERITY", + values=["Low"] + ) + ], + name="patch-baseline", + rejected_patches=["KB987654"] + ) +``` + +### Advanced usage, specifying Microsoft application and Windows patch rules + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_patch_baseline import SsmPatchBaseline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmPatchBaseline(self, "windows_os_apps", + approval_rule=[SsmPatchBaselineApprovalRule( + approve_after_days=7, + patch_filter=[SsmPatchBaselineApprovalRulePatchFilter( + key="CLASSIFICATION", + values=["CriticalUpdates", "SecurityUpdates"] + ), SsmPatchBaselineApprovalRulePatchFilter( + key="MSRC_SEVERITY", + values=["Critical", "Important"] + ) + ] + ), SsmPatchBaselineApprovalRule( + approve_after_days=7, + patch_filter=[SsmPatchBaselineApprovalRulePatchFilter( + key="PATCH_SET", + values=["APPLICATION"] + ), SsmPatchBaselineApprovalRulePatchFilter( + key="PRODUCT", + values=["Office 2013", "Office 2016"] + ) + ] + ) + ], + description="Patch both Windows and Microsoft apps", + name="WindowsOSAndMicrosoftApps", + operating_system="WINDOWS" + ) +``` + +### Advanced usage, specifying alternate patch source repository + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_patch_baseline import SsmPatchBaseline +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, patchFilter): + super().__init__(scope, name) + SsmPatchBaseline(self, "al_2017_09", + approval_rule=[SsmPatchBaselineApprovalRule( + patch_filter=patch_filter + ) + ], + description="My patch repository for Amazon Linux 2017.09", + name="Amazon-Linux-2017.09", + operating_system="AMAZON_LINUX", + source=[SsmPatchBaselineSource( + configuration="[amzn-main]\nname=amzn-main-Base\nmirrorlist=http://repo./$awsregion./$awsdomain//$releasever/main/mirror.list\nmirrorlist_expire=300\nmetadata_expire=300\npriority=10\nfailovermethod=priority\nfastestmirror_enabled=0\ngpgcheck=1\ngpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-amazon-ga\nenabled=1\nretries=3\ntimeout=5\nreport_instanceid=yes\n\n", + name="My-AL2017.09", + products=["AmazonLinux2017.09"] + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the patch baseline. +* `description` - (Optional) The description of the patch baseline. +* `operating_system` - (Optional) The operating system the patch baseline applies to. + Valid values are + `ALMA_LINUX`, + `AMAZON_LINUX`, + `AMAZON_LINUX_2`, + `AMAZON_LINUX_2022`, + `AMAZON_LINUX_2023`, + `CENTOS`, + `DEBIAN`, + `MACOS`, + `ORACLE_LINUX`, + `RASPBIAN`, + `REDHAT_ENTERPRISE_LINUX`, + `ROCKY_LINUX`, + `SUSE`, + `UBUNTU`, and + `WINDOWS`. + The default value is `WINDOWS`. +* `approved_patches_compliance_level` - (Optional) The compliance level for approved patches. + This means that if an approved patch is reported as missing, this is the severity of the compliance violation. + Valid values are `CRITICAL`, `HIGH`, `MEDIUM`, `LOW`, `INFORMATIONAL`, `UNSPECIFIED`. + The default value is `UNSPECIFIED`. +* `approved_patches` - (Optional) A list of explicitly approved patches for the baseline. + Cannot be specified with `approval_rule`. +* `rejected_patches` - (Optional) A list of rejected patches. +* `global_filter` - (Optional) A set of global filters used to exclude patches from the baseline. + Up to 4 global filters can be specified using Key/Value pairs. + Valid Keys are `PRODUCT`, `CLASSIFICATION`, `MSRC_SEVERITY`, and `PATCH_ID`. +* `approval_rule` - (Optional) A set of rules used to include patches in the baseline. + Up to 10 approval rules can be specified. + See [`approval_rule`](#approval_rule-block) below. +* `source` - (Optional) Configuration block with alternate sources for patches. + Applies to Linux instances only. + See [`source`](#source-block) below. +* `rejected_patches_action` - (Optional) The action for Patch Manager to take on patches included in the `rejected_patches` list. + Valid values are `ALLOW_AS_DEPENDENCY` and `BLOCK`. +* `approved_patches_enable_non_security` - (Optional) Indicates whether the list of approved patches includes non-security updates that should be applied to the instances. + Applies to Linux instances only. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `approval_rule` Block + +The `approval_rule` block supports: + +* `approve_after_days` - (Optional) The number of days after the release date of each patch matched by the rule the patch is marked as approved in the patch baseline. + Valid Range: 0 to 100. + Conflicts with `approve_until_date`. +* `approve_until_date` - (Optional) The cutoff date for auto approval of released patches. + Any patches released on or before this date are installed automatically. + Date is formatted as `YYYY-MM-DD`. + Conflicts with `approve_after_days` +* `patch_filter` - (Required) The patch filter group that defines the criteria for the rule. + Up to 5 patch filters can be specified per approval rule using Key/Value pairs. + Valid combinations of these Keys and the `operating_system` value can be found in the [SSM DescribePatchProperties API Reference](https://docs.aws.amazon.com/systems-manager/latest/APIReference/API_DescribePatchProperties.html). + Valid Values are exact values for the patch property given as the key, or a wildcard `*`, which matches all values. + * `PATCH_SET` defaults to `OS` if unspecified +* `compliance_level` - (Optional) The compliance level for patches approved by this rule. + Valid values are `CRITICAL`, `HIGH`, `MEDIUM`, `LOW`, `INFORMATIONAL`, and `UNSPECIFIED`. + The default value is `UNSPECIFIED`. +* `enable_non_security` - (Optional) Boolean enabling the application of non-security updates. + The default value is `false`. + Valid for Linux instances only. + +### `source` Block + +The `source` block supports: + +* `name` - (Required) The name specified to identify the patch source. +* `configuration` - (Required) The value of the yum repo configuration. + For information about other options available for your yum repository configuration, see the [`dnf.conf` documentation](https://man7.org/linux/man-pages/man5/dnf.conf.5.html) +* `products` - (Required) The specific operating system versions a patch repository applies to, such as `"Ubuntu16.04"`, `"AmazonLinux2016.09"`, `"RedhatEnterpriseLinux7.2"` or `"Suse12.7"`. + For lists of supported product values, see [PatchFilter](https://docs.aws.amazon.com/systems-manager/latest/APIReference/API_PatchFilter.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the patch baseline. +* `arn` - The ARN of the patch baseline. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Patch Baselines using their baseline ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSM Patch Baselines using their baseline ID. For example: + +```console +% terraform import aws_ssm_patch_baseline.example pb-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_patch_group.html.markdown b/website/docs/cdktf/python/r/ssm_patch_group.html.markdown new file mode 100644 index 00000000000..3f616f331cc --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_patch_group.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_patch_group" +description: |- + Provides an SSM Patch Group resource +--- + + + +# Resource: aws_ssm_patch_group + +Provides an SSM Patch Group resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_patch_baseline import SsmPatchBaseline +from imports.aws.ssm_patch_group import SsmPatchGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + production = SsmPatchBaseline(self, "production", + approved_patches=["KB123456"], + name="patch-baseline" + ) + SsmPatchGroup(self, "patchgroup", + baseline_id=production.id, + patch_group="patch-group-name" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `baseline_id` - (Required) The ID of the patch baseline to register the patch group with. +* `patch_group` - (Required) The name of the patch group that should be registered with the patch baseline. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the patch group and ID of the patch baseline separated by a comma (`,`). + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_resource_data_sync.html.markdown b/website/docs/cdktf/python/r/ssm_resource_data_sync.html.markdown new file mode 100644 index 00000000000..79264488512 --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_resource_data_sync.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_resource_data_sync" +description: |- + Provides a SSM resource data sync. +--- + + + +# Resource: aws_ssm_resource_data_sync + +Provides a SSM resource data sync. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_bucket_policy import S3BucketPolicy +from imports.aws.ssm_resource_data_sync import SsmResourceDataSync +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + hoge = S3Bucket(self, "hoge", + bucket="tf-test-bucket-1234" + ) + SsmResourceDataSync(self, "foo", + name="foo", + s3_destination=SsmResourceDataSyncS3Destination( + bucket_name=hoge.bucket, + region=hoge.region + ) + ) + data_aws_iam_policy_document_hoge = DataAwsIamPolicyDocument(self, "hoge_2", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:GetBucketAcl"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["ssm.amazonaws.com"], + type="Service" + ) + ], + resources=["arn:aws:s3:::tf-test-bucket-1234"], + sid="SSMBucketPermissionsCheck" + ), DataAwsIamPolicyDocumentStatement( + actions=["s3:PutObject"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=["bucket-owner-full-control"], + variable="s3:x-amz-acl" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["ssm.amazonaws.com"], + type="Service" + ) + ], + resources=["arn:aws:s3:::tf-test-bucket-1234/*"], + sid="SSMBucketDelivery" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_hoge.override_logical_id("hoge") + aws_s3_bucket_policy_hoge = S3BucketPolicy(self, "hoge_3", + bucket=hoge.id, + policy=Token.as_string(data_aws_iam_policy_document_hoge.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_s3_bucket_policy_hoge.override_logical_id("hoge") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name for the configuration. +* `s3_destination` - (Required) Amazon S3 configuration details for the sync. + +## s3_destination + +`s3_destination` supports the following: + +* `bucket_name` - (Required) Name of S3 bucket where the aggregated data is stored. +* `region` - (Required) Region with the bucket targeted by the Resource Data Sync. +* `kms_key_arn` - (Optional) ARN of an encryption key for a destination in Amazon S3. +* `prefix` - (Optional) Prefix for the bucket. +* `sync_format` - (Optional) A supported sync format. Only JsonSerDe is currently supported. Defaults to JsonSerDe. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM resource data sync using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSM resource data sync using the `name`. For example: + +```console +% terraform import aws_ssm_resource_data_sync.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssm_service_setting.html.markdown b/website/docs/cdktf/python/r/ssm_service_setting.html.markdown new file mode 100644 index 00000000000..2ddd6b048eb --- /dev/null +++ b/website/docs/cdktf/python/r/ssm_service_setting.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_service_setting" +description: |- + Defines how a user interacts with or uses a service or a feature of a service. +--- + + + +# Resource: aws_ssm_service_setting + +This setting defines how a user interacts with or uses a service or a feature of a service. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssm_service_setting import SsmServiceSetting +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmServiceSetting(self, "test_setting", + setting_id="arn:aws:ssm:us-east-1:123456789012:servicesetting/ssm/parameter-store/high-throughput-enabled", + setting_value="true" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `setting_id` - (Required) ID of the service setting. +* `setting_value` - (Required) Value of the service setting. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the service setting. +* `status` - Status of the service setting. Value can be `Default`, `Customized` or `PendingUpdate`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS SSM Service Setting using the `setting_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS SSM Service Setting using the `setting_id`. For example: + +```console +% terraform import aws_ssm_service_setting.example arn:aws:ssm:us-east-1:123456789012:servicesetting/ssm/parameter-store/high-throughput-enabled +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssmcontacts_contact.html.markdown b/website/docs/cdktf/python/r/ssmcontacts_contact.html.markdown new file mode 100644 index 00000000000..4fe6f063ec7 --- /dev/null +++ b/website/docs/cdktf/python/r/ssmcontacts_contact.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_contact" +description: |- + Terraform resource for managing an AWS SSM Contact. +--- + + + +# Resource: aws_ssmcontacts_contact + +Terraform resource for managing an AWS SSM Contact. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmcontacts_contact import SsmcontactsContact +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmcontactsContact(self, "example", + alias="alias", + depends_on=[aws_ssmincidents_replication_set_example], + type="PERSONAL" + ) +``` + +### Usage With All Fields + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmcontacts_contact import SsmcontactsContact +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmcontactsContact(self, "example", + alias="alias", + depends_on=[aws_ssmincidents_replication_set_example], + display_name="displayName", + tags={ + "key": "value" + }, + type="ESCALATION" + ) +``` + +## Argument Reference + +~> **NOTE:** A contact implicitly depends on a replication set. If you configured your replication set in Terraform, we recommend you add it to the `depends_on` argument for the Terraform Contact Resource. + +The following arguments are required: + +- `alias` - (Required) A unique and identifiable alias for the contact or escalation plan. + +- `type` - (Required) The type of contact engaged. A single contact is type PERSONAL and an escalation + plan is type ESCALATION. + +The following arguments are optional: + +- `display_name` - (Optional) Full friendly name of the contact or escalation plan. + +- `tags` - (Optional) Map of tags to assign to the resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - The Amazon Resource Name (ARN) of the contact or escalation plan. + +- `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Contact using the `ARN`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSM Contact using the `ARN`. For example: + +```console +% terraform import aws_ssmcontacts_contact.example {ARNValue} +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssmcontacts_contact_channel.html.markdown b/website/docs/cdktf/python/r/ssmcontacts_contact_channel.html.markdown new file mode 100644 index 00000000000..b35447df23b --- /dev/null +++ b/website/docs/cdktf/python/r/ssmcontacts_contact_channel.html.markdown @@ -0,0 +1,115 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_contact_channel" +description: |- + Terraform resource for managing an AWS SSM Contacts Contact Channel. +--- + + + +# Resource: aws_ssmcontacts_contact_channel + +Terraform resource for managing an AWS SSM Contacts Contact Channel. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmcontacts_contact_channel import SsmcontactsContactChannel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmcontactsContactChannel(self, "example", + contact_id="arn:aws:ssm-contacts:us-west-2:123456789012:contact/contactalias", + delivery_address=SsmcontactsContactChannelDeliveryAddress( + simple_address="email@example.com" + ), + name="Example contact channel", + type="EMAIL" + ) +``` + +### Usage with SSM Contact + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmcontacts_contact import SsmcontactsContact +from imports.aws.ssmcontacts_contact_channel import SsmcontactsContactChannel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example_contact = SsmcontactsContact(self, "example_contact", + alias="example_contact", + type="PERSONAL" + ) + SsmcontactsContactChannel(self, "example", + contact_id=example_contact.arn, + delivery_address=SsmcontactsContactChannelDeliveryAddress( + simple_address="email@example.com" + ), + name="Example contact channel", + type="EMAIL" + ) +``` + +## Argument Reference + +~> **NOTE:** The contact channel needs to be activated in the AWS Systems Manager console, otherwise it can't be used to engage the contact. See the [Contacts section of the Incident Manager User Guide](https://docs.aws.amazon.com/incident-manager/latest/userguide/contacts.html) for more information. + +The following arguments are required: + +- `contact_id` - (Required) Amazon Resource Name (ARN) of the AWS SSM Contact that the contact channel belongs to. + +- `delivery_address` - (Required) Block that contains contact engagement details. See details below. + +- `name` - (Required) Name of the contact channel. + +- `type` - (Required) Type of the contact channel. One of `SMS`, `VOICE` or `EMAIL`. + +### delivery_address + +- `simple_address` - (Required) Details to engage this contact channel. The expected format depends on the contact channel type and is described in the [`ContactChannelAddress` section of the SSM Contacts API Reference](https://docs.aws.amazon.com/incident-manager/latest/APIReference/API_SSMContacts_ContactChannelAddress.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `activation_status` - Whether the contact channel is activated. The contact channel must be activated to use it to engage the contact. One of `ACTIVATED` or `NOT_ACTIVATED`. + +- `arn` - Amazon Resource Name (ARN) of the contact channel. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Contact Channel using the `ARN`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSM Contact Channel using the `ARN`. For example: + +```console +% terraform import aws_ssmcontacts_contact_channel.example arn:aws:ssm-contacts:us-west-2:123456789012:contact-channel/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssmcontacts_plan.html.markdown b/website/docs/cdktf/python/r/ssmcontacts_plan.html.markdown new file mode 100644 index 00000000000..87b237ac501 --- /dev/null +++ b/website/docs/cdktf/python/r/ssmcontacts_plan.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_plan" +description: |- + Terraform resource for managing an AWS SSM Contact Plan. +--- + + + +# Resource: aws_ssmcontacts_plan + +Terraform resource for managing an AWS SSM Contact Plan. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmcontacts_plan import SsmcontactsPlan +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmcontactsPlan(self, "example", + contact_id="arn:aws:ssm-contacts:us-west-2:123456789012:contact/contactalias", + stage=[SsmcontactsPlanStage( + duration_in_minutes=1 + ) + ] + ) +``` + +### Usage with SSM Contact + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmcontacts_contact import SsmcontactsContact +from imports.aws.ssmcontacts_plan import SsmcontactsPlan +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + contact = SsmcontactsContact(self, "contact", + alias="alias", + type="PERSONAL" + ) + SsmcontactsPlan(self, "plan", + contact_id=contact.arn, + stage=[SsmcontactsPlanStage( + duration_in_minutes=1 + ) + ] + ) +``` + +### Usage With All Fields + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmcontacts_contact import SsmcontactsContact +from imports.aws.ssmcontacts_plan import SsmcontactsPlan +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + contact_one = SsmcontactsContact(self, "contact_one", + alias="alias", + type="PERSONAL" + ) + contact_two = SsmcontactsContact(self, "contact_two", + alias="alias", + type="PERSONAL" + ) + escalation_plan = SsmcontactsContact(self, "escalation_plan", + alias="escalation-plan-alias", + type="ESCALATION" + ) + SsmcontactsPlan(self, "test", + contact_id=escalation_plan.arn, + stage=[SsmcontactsPlanStage( + duration_in_minutes=0, + target=[SsmcontactsPlanStageTarget( + contact_target_info=SsmcontactsPlanStageTargetContactTargetInfo( + contact_id=contact_one.arn, + is_essential=False + ) + ), SsmcontactsPlanStageTarget( + contact_target_info=SsmcontactsPlanStageTargetContactTargetInfo( + contact_id=contact_two.arn, + is_essential=True + ) + ) + ] + ) + ] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `contact_id` - (Required) The Amazon Resource Name (ARN) of the contact or escalation plan. +* `stage` - (Required) List of stages. A contact has an engagement plan with stages that contact specified contact channels. An escalation plan uses stages that contact specified contacts. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Contact Plan using the Contact ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSM Contact Plan using the Contact ARN. For example: + +```console +% terraform import aws_ssmcontacts_plan.example {ARNValue} +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssmincidents_replication_set.html.markdown b/website/docs/cdktf/python/r/ssmincidents_replication_set.html.markdown new file mode 100644 index 00000000000..281df0762e4 --- /dev/null +++ b/website/docs/cdktf/python/r/ssmincidents_replication_set.html.markdown @@ -0,0 +1,197 @@ +--- +subcategory: "SSM Incident Manager Incidents" +layout: "aws" +page_title: "AWS: aws_ssmincidents_replication_set" +description: |- + Terraform resource for managing an incident replication set for AWS Systems Manager Incident Manager. +--- + + + +# Resource: aws_ssmincidents_replication_set + +Provides a resource for managing a replication set in AWS Systems Manager Incident Manager. + +~> **NOTE:** Deleting a replication set also deletes all Incident Manager related data including response plans, incident records, contacts and escalation plans. + +## Example Usage + +### Basic Usage + +Create a replication set. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmincidents_replication_set import SsmincidentsReplicationSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmincidentsReplicationSet(self, "replicationSetName", + region=[SsmincidentsReplicationSetRegion( + name="us-west-2" + ) + ], + tags={ + "example_tag": "exampleValue" + } + ) +``` + +Add a Region to a replication set. (You can add only one Region at a time.) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmincidents_replication_set import SsmincidentsReplicationSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmincidentsReplicationSet(self, "replicationSetName", + region=[SsmincidentsReplicationSetRegion( + name="us-west-2" + ), SsmincidentsReplicationSetRegion( + name="ap-southeast-2" + ) + ] + ) +``` + +Delete a Region from a replication set. (You can delete only one Region at a time.) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmincidents_replication_set import SsmincidentsReplicationSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmincidentsReplicationSet(self, "replicationSetName", + region=[SsmincidentsReplicationSetRegion( + name="us-west-2" + ) + ] + ) +``` + +## Basic Usage with an AWS Customer Managed Key + +Create a replication set with an AWS Key Management Service (AWS KMS) customer manager key: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.kms_key import KmsKey +from imports.aws.ssmincidents_replication_set import SsmincidentsReplicationSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example_key = KmsKey(self, "example_key") + SsmincidentsReplicationSet(self, "replicationSetName", + region=[SsmincidentsReplicationSetRegion( + kms_key_arn=example_key.arn, + name="us-west-2" + ) + ], + tags={ + "example_tag": "exampleValue" + } + ) +``` + +## Argument Reference + +~> **NOTE:** The Region specified by a Terraform provider must always be one of the Regions specified for the replication set. This is especially important when you perform complex update operations. + +~> **NOTE:** After a replication set is created, you can add or delete only one Region at a time. + +~> **NOTE:** Incident Manager does not support updating the customer managed key associated with a replication set. Instead, for a replication set with multiple Regions, you must first delete a Region from the replication set, then re-add it with a different customer managed key in separate `terraform apply` operations. For a replication set with only one Region, the entire replication set must be deleted and recreated. To do this, comment out the replication set and all response plans, and then run the `terraform apply` command to recreate the replication set with the new customer managed key. + +~> **NOTE:** You must either use AWS-owned keys on all regions of a replication set, or customer managed keys. To change between an AWS owned key and a customer managed key, a replication set and it associated data must be deleted and recreated. + +~> **NOTE:** If possible, create all the customer managed keys you need (using the `terraform apply` command) before you create the replication set, or create the keys and replication set in the same `terraform apply` command. Otherwise, to delete a replication set, you must run one `terraform apply` command to delete the replication set and another to delete the AWS KMS keys used by the replication set. Deleting the AWS KMS keys before deleting the replication set results in an error. In that case, you must manually reenable the deleted key using the AWS Management Console before you can delete the replication set. + +The `region` configuration block is required and supports the following arguments: + +* `name` - (Required) The name of the Region, such as `ap-southeast-2`. +* `kms_key_arn` - (Optional) The Amazon Resource name (ARN) of the customer managed key. If omitted, AWS manages the AWS KMS keys for you, using an AWS owned key, as indicated by a default value of `DefaultKey`. + +The following arguments are optional: + +* `tags` - Tags applied to the replication set. + +For information about the maximum allowed number of Regions and tag value constraints, see [CreateReplicationSet in the *AWS Systems Manager Incident Manager API Reference*](https://docs.aws.amazon.com/incident-manager/latest/APIReference/API_CreateReplicationSet.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the replication set. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `created_by` - The ARN of the user who created the replication set. +* `created_time` - A timestamp showing when the replication set was created. +* `deletion_protected` - If `true`, the last region in a replication set cannot be deleted. +* `last_modified_by` - A timestamp showing when the replication set was last modified. +* `last_modified_time` - When the replication set was last modified +* `status` - The overall status of a replication set. + * Valid Values: `ACTIVE` | `CREATING` | `UPDATING` | `DELETING` | `FAILED` + +In addition to the preceding arguments, the `region` configuration block exports the following attributes for each Region: + +* `status` - The current status of the Region. + * Valid Values: `ACTIVE` | `CREATING` | `UPDATING` | `DELETING` | `FAILED` +* `status_update_time` - A timestamp showing when the Region status was last updated. +* `status_message` - More information about the status of a Region. + +## Timeouts + +~> **NOTE:** `Update` and `Delete` operations applied to replication sets with large numbers of response plans and data take longer to complete. We recommend that you configure custom timeouts for this situation. + +~> **NOTE:** Each additional Region included when you create a replication set increases the amount of time required to complete the `create` operation. + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `120m`) +* `update` - (Default `120m`) +* `delete` - (Default `120m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an Incident Manager replication. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an Incident Manager replication. For example: + +```console +% terraform import aws_ssmincidents_replication_set.replicationSetName import +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssmincidents_response_plan.html.markdown b/website/docs/cdktf/python/r/ssmincidents_response_plan.html.markdown new file mode 100644 index 00000000000..b037fd8e9ef --- /dev/null +++ b/website/docs/cdktf/python/r/ssmincidents_response_plan.html.markdown @@ -0,0 +1,189 @@ +--- +subcategory: "SSM Incident Manager Incidents" +layout: "aws" +page_title: "AWS: aws_ssmincidents_response_plan" +description: |- + Terraform resource for managing an incident response plan in AWS Systems Manager Incident Manager. +--- + + + +# Resource: aws_ssmincidents_response_plan + +Provides a Terraform resource to manage response plans in AWS Systems Manager Incident Manager. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmincidents_response_plan import SsmincidentsResponsePlan +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmincidentsResponsePlan(self, "example", + depends_on=[aws_ssmincidents_replication_set_example], + incident_template=SsmincidentsResponsePlanIncidentTemplate( + impact=Token.as_number("3"), + title="title" + ), + name="name", + tags={ + "key": "value" + } + ) +``` + +### Usage With All Fields + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssmincidents_response_plan import SsmincidentsResponsePlan +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsmincidentsResponsePlan(self, "example", + action=SsmincidentsResponsePlanAction( + ssm_automation=[SsmincidentsResponsePlanActionSsmAutomation( + document_name=document1.name, + document_version="version1", + dynamic_parameters={ + "another_key": "INCIDENT_RECORD_ARN", + "some_key": "INVOLVED_RESOURCES" + }, + parameter=[SsmincidentsResponsePlanActionSsmAutomationParameter( + name="key", + values=["value1", "value2"] + ), SsmincidentsResponsePlanActionSsmAutomationParameter( + name="foo", + values=["bar"] + ) + ], + role_arn=role1.arn, + target_account="RESPONSE_PLAN_OWNER_ACCOUNT" + ) + ] + ), + chat_channel=[topic.arn], + depends_on=[aws_ssmincidents_replication_set_example], + display_name="display name", + engagements=["arn:aws:ssm-contacts:us-east-2:111122223333:contact/test1" + ], + incident_template=SsmincidentsResponsePlanIncidentTemplate( + dedupe_string="dedupe", + impact=Token.as_number("3"), + incident_tags={ + "key": "value" + }, + notification_target=[SsmincidentsResponsePlanIncidentTemplateNotificationTarget( + sns_topic_arn=example1.arn + ), SsmincidentsResponsePlanIncidentTemplateNotificationTarget( + sns_topic_arn=example2.arn + ) + ], + summary="summary", + title="title" + ), + integration=SsmincidentsResponsePlanIntegration( + pagerduty=[SsmincidentsResponsePlanIntegrationPagerduty( + name="pagerdutyIntergration", + secret_id="example", + service_id="example" + ) + ] + ), + name="name", + tags={ + "key": "value" + } + ) +``` + +## Argument Reference + +~> NOTE: A response plan implicitly depends on a replication set. If you configured your replication set in Terraform, +we recommend you add it to the `depends_on` argument for the Terraform ResponsePlan Resource. + +The following arguments are required: + +* `name` - (Required) The name of the response plan. + +The `incident_template` configuration block is required and supports the following arguments: + +* `title` - (Required) The title of a generated incident. +* `impact` - (Required) The impact value of a generated incident. The following values are supported: + * `1` - Severe Impact + * `2` - High Impact + * `3` - Medium Impact + * `4` - Low Impact + * `5` - No Impact +* `dedupe_string` - (Optional) A string used to stop Incident Manager from creating multiple incident records for the same incident. +* `incident_tags` - (Optional) The tags assigned to an incident template. When an incident starts, Incident Manager assigns the tags specified in the template to the incident. +* `summary` - (Optional) The summary of an incident. +* `notification_target` - (Optional) The Amazon Simple Notification Service (Amazon SNS) targets that this incident notifies when it is updated. The `notification_target` configuration block supports the following argument: + * `sns_topic_arn` - (Required) The ARN of the Amazon SNS topic. + +The following arguments are optional: + +* `tags` - (Optional) The tags applied to the response plan. +* `display_name` - (Optional) The long format of the response plan name. This field can contain spaces. +* `chat_channel` - (Optional) The Chatbot chat channel used for collaboration during an incident. +* `engagements` - (Optional) The Amazon Resource Name (ARN) for the contacts and escalation plans that the response plan engages during an incident. +* `action` - (Optional) The actions that the response plan starts at the beginning of an incident. + * `ssm_automation` - (Optional) The Systems Manager automation document to start as the runbook at the beginning of the incident. The following values are supported: + * `document_name` - (Required) The automation document's name. + * `role_arn` - (Required) The Amazon Resource Name (ARN) of the role that the automation document assumes when it runs commands. + * `document_version` - (Optional) The version of the automation document to use at runtime. + * `target_account` - (Optional) The account that the automation document runs in. This can be in either the management account or an application account. + * `parameter` - (Optional) The key-value pair parameters to use when the automation document runs. The following values are supported: + * `name` - The name of parameter. + * `values` - The values for the associated parameter name. + * `dynamic_parameters` - (Optional) The key-value pair to resolve dynamic parameter values when processing a Systems Manager Automation runbook. +* `integration` - (Optional) Information about third-party services integrated into the response plan. The following values are supported: + * `pagerduty` - (Optional) Details about the PagerDuty configuration for a response plan. The following values are supported: + * `name` - (Required) The name of the PagerDuty configuration. + * `service_id` - (Required) The ID of the PagerDuty service that the response plan associated with the incident at launch. + * `secret_id` - (Required) The ID of the AWS Secrets Manager secret that stores your PagerDuty key — either a General Access REST API Key or User Token REST API Key — and other user credentials. + +For more information about the constraints for each field, see [CreateResponsePlan](https://docs.aws.amazon.com/incident-manager/latest/APIReference/API_CreateResponsePlan.html) in the *AWS Systems Manager Incident Manager API Reference*. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the response plan. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an Incident Manager response plan using the response plan ARN. You can find the response plan ARN in the AWS Management Console. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import an Incident Manager response plan using the response plan ARN. You can find the response plan ARN in the AWS Management Console. For example: + +```console +% terraform import aws_ssmincidents_response_plan.responsePlanName ARNValue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssoadmin_account_assignment.html.markdown b/website/docs/cdktf/python/r/ssoadmin_account_assignment.html.markdown new file mode 100644 index 00000000000..95e85aad446 --- /dev/null +++ b/website/docs/cdktf/python/r/ssoadmin_account_assignment.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_account_assignment" +description: |- + Manages a Single Sign-On (SSO) Account Assignment +--- + + + +# Resource: aws_ssoadmin_account_assignment + +Provides a Single Sign-On (SSO) Account Assignment resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_identitystore_group import DataAwsIdentitystoreGroup +from imports.aws.data_aws_ssoadmin_instances import DataAwsSsoadminInstances +from imports.aws.data_aws_ssoadmin_permission_set import DataAwsSsoadminPermissionSet +from imports.aws.ssoadmin_account_assignment import SsoadminAccountAssignment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsSsoadminInstances(self, "example") + data_aws_ssoadmin_permission_set_example = + DataAwsSsoadminPermissionSet(self, "example_1", + instance_arn=Token.as_string( + property_access(Fn.tolist(example.arns), ["0"])), + name="AWSReadOnlyAccess" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_ssoadmin_permission_set_example.override_logical_id("example") + data_aws_identitystore_group_example = DataAwsIdentitystoreGroup(self, "example_2", + alternate_identifier=DataAwsIdentitystoreGroupAlternateIdentifier( + unique_attribute=DataAwsIdentitystoreGroupAlternateIdentifierUniqueAttribute( + attribute_path="DisplayName", + attribute_value="ExampleGroup" + ) + ), + identity_store_id=Token.as_string( + property_access(Fn.tolist(example.identity_store_ids), ["0"])) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_identitystore_group_example.override_logical_id("example") + aws_ssoadmin_account_assignment_example = SsoadminAccountAssignment(self, "example_3", + instance_arn=Token.as_string( + property_access(Fn.tolist(example.arns), ["0"])), + permission_set_arn=Token.as_string(data_aws_ssoadmin_permission_set_example.arn), + principal_id=Token.as_string(data_aws_identitystore_group_example.group_id), + principal_type="GROUP", + target_id="012347678910", + target_type="AWS_ACCOUNT" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ssoadmin_account_assignment_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instance_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance. +* `permission_set_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Permission Set that the admin wants to grant the principal access to. +* `principal_id` - (Required, Forces new resource) An identifier for an object in SSO, such as a user or group. PrincipalIds are GUIDs (For example, `f81d4fae-7dec-11d0-a765-00a0c91e6bf6`). +* `principal_type` - (Required, Forces new resource) The entity type for which the assignment will be created. Valid values: `USER`, `GROUP`. +* `target_id` - (Required, Forces new resource) An AWS account identifier, typically a 10-12 digit string. +* `target_type` - (Optional, Forces new resource) The entity type for which the assignment will be created. Valid values: `AWS_ACCOUNT`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the Account Assignment i.e., `principal_id`, `principal_type`, `target_id`, `target_type`, `permission_set_arn`, `instance_arn` separated by commas (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Account Assignments using the `principal_id`, `principal_type`, `target_id`, `target_type`, `permission_set_arn`, `instance_arn` separated by commas (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSO Account Assignments using the `principal_id`, `principal_type`, `target_id`, `target_type`, `permission_set_arn`, `instance_arn` separated by commas (`,`). For example: + +```console +% terraform import aws_ssoadmin_account_assignment.example f81d4fae-7dec-11d0-a765-00a0c91e6bf6,GROUP,1234567890,AWS_ACCOUNT,arn:aws:sso:::permissionSet/ssoins-0123456789abcdef/ps-0123456789abcdef,arn:aws:sso:::instance/ssoins-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssoadmin_customer_managed_policy_attachment.html.markdown b/website/docs/cdktf/python/r/ssoadmin_customer_managed_policy_attachment.html.markdown new file mode 100644 index 00000000000..4b3cb7a5527 --- /dev/null +++ b/website/docs/cdktf/python/r/ssoadmin_customer_managed_policy_attachment.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_customer_managed_policy_attachment" +description: |- + Manages a customer managed policy for a Single Sign-On (SSO) Permission Set +--- + + + +# Resource: aws_ssoadmin_customer_managed_policy_attachment + +Provides a customer managed policy attachment for a Single Sign-On (SSO) Permission Set resource + +~> **NOTE:** Creating this resource will automatically [Provision the Permission Set](https://docs.aws.amazon.com/singlesignon/latest/APIReference/API_ProvisionPermissionSet.html) to apply the corresponding updates to all assigned accounts. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssoadmin_instances import DataAwsSsoadminInstances +from imports.aws.iam_policy import IamPolicy +from imports.aws.ssoadmin_customer_managed_policy_attachment import SsoadminCustomerManagedPolicyAttachment +from imports.aws.ssoadmin_permission_set import SsoadminPermissionSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = IamPolicy(self, "example", + description="My test policy", + name="TestPolicy", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["ec2:Describe*"], + "Effect": "Allow", + "Resource": "*" + } + ], + "Version": "2012-10-17" + })) + ) + data_aws_ssoadmin_instances_example = DataAwsSsoadminInstances(self, "example_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_ssoadmin_instances_example.override_logical_id("example") + aws_ssoadmin_permission_set_example = SsoadminPermissionSet(self, "example_2", + instance_arn=Token.as_string( + property_access(Fn.tolist(data_aws_ssoadmin_instances_example.arns), ["0"])), + name="Example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ssoadmin_permission_set_example.override_logical_id("example") + aws_ssoadmin_customer_managed_policy_attachment_example = + SsoadminCustomerManagedPolicyAttachment(self, "example_3", + customer_managed_policy_reference=SsoadminCustomerManagedPolicyAttachmentCustomerManagedPolicyReference( + name=example.name, + path="/" + ), + instance_arn=Token.as_string(aws_ssoadmin_permission_set_example.instance_arn), + permission_set_arn=Token.as_string(aws_ssoadmin_permission_set_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ssoadmin_customer_managed_policy_attachment_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instance_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance under which the operation will be executed. +* `permission_set_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Permission Set. +* `customer_managed_policy_reference` - (Required, Forces new resource) Specifies the name and path of a customer managed policy. See below. + +### Customer Managed Policy Reference + +The `customer_managed_policy_reference` config block describes a customer managed IAM policy. You must have an IAM policy that matches the name and path in each AWS account where you want to deploy your specified permission set. + +* `name` - (Required, Forces new resource) Name of the customer managed IAM Policy to be attached. +* `path` - (Optional, Forces new resource) The path to the IAM policy to be attached. The default is `/`. See [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-friendly-names) for more information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Policy Name, Policy Path, Permission Set Amazon Resource Name (ARN), and SSO Instance ARN, each separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Managed Policy Attachments using the `name`, `path`, `permission_set_arn`, and `instance_arn` separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSO Managed Policy Attachments using the `name`, `path`, `permission_set_arn`, and `instance_arn` separated by a comma (`,`). For example: + +```console +% terraform import aws_ssoadmin_customer_managed_policy_attachment.example TestPolicy,/,arn:aws:sso:::permissionSet/ssoins-2938j0x8920sbj72/ps-80383020jr9302rk,arn:aws:sso:::instance/ssoins-2938j0x8920sbj72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssoadmin_instance_access_control_attributes.html.markdown b/website/docs/cdktf/python/r/ssoadmin_instance_access_control_attributes.html.markdown new file mode 100644 index 00000000000..6ac24380d5e --- /dev/null +++ b/website/docs/cdktf/python/r/ssoadmin_instance_access_control_attributes.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_instance_access_control_attributes" +description: |- + Provides a Single Sign-On (SSO) ABAC Resource: https://docs.aws.amazon.com/singlesignon/latest/userguide/abac.html +--- + + + +# Resource: aws_ssoadmin_instance_access_control_attributes + +Provides a Single Sign-On (SSO) ABAC Resource: https://docs.aws.amazon.com/singlesignon/latest/userguide/abac.html + +## Example Usage + +```terraform +data "aws_ssoadmin_instances" "example" {} + +resource "aws_ssoadmin_instance_access_control_attributes" "example" { + instance_arn = tolist(data.aws_ssoadmin_instances.example.arns)[0] + attribute { + key = "name" + value { + source = ["$${path:name.givenName}"] + } + } + attribute { + key = "last" + value { + source = ["$${path:name.familyName}"] + } + } +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instance_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance. +* `attribute` - (Required) See [AccessControlAttribute](#accesscontrolattribute) for more details. + +### AccessControlAttribute + +* `key` - (Required) The name of the attribute associated with your identities in your identity source. This is used to map a specified attribute in your identity source with an attribute in AWS SSO. +* `value` - (Required) The value used for mapping a specified attribute to an identity source. See [AccessControlAttributeValue](#accesscontrolattributevalue) + +### AccessControlAttributeValue + +* `source` - (Required) The identity source to use when mapping a specified attribute to AWS SSO. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the Instance Access Control Attribute `instance_arn`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Account Assignments using the `instance_arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSO Account Assignments using the `instance_arn`. For example: + +```console +% terraform import aws_ssoadmin_instance_access_control_attributes.example arn:aws:sso:::instance/ssoins-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssoadmin_managed_policy_attachment.html.markdown b/website/docs/cdktf/python/r/ssoadmin_managed_policy_attachment.html.markdown new file mode 100644 index 00000000000..878ab6e109f --- /dev/null +++ b/website/docs/cdktf/python/r/ssoadmin_managed_policy_attachment.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_managed_policy_attachment" +description: |- + Manages an IAM managed policy for a Single Sign-On (SSO) Permission Set +--- + + + +# Resource: aws_ssoadmin_managed_policy_attachment + +Provides an IAM managed policy for a Single Sign-On (SSO) Permission Set resource + +~> **NOTE:** Creating this resource will automatically [Provision the Permission Set](https://docs.aws.amazon.com/singlesignon/latest/APIReference/API_ProvisionPermissionSet.html) to apply the corresponding updates to all assigned accounts. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssoadmin_instances import DataAwsSsoadminInstances +from imports.aws.ssoadmin_managed_policy_attachment import SsoadminManagedPolicyAttachment +from imports.aws.ssoadmin_permission_set import SsoadminPermissionSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsSsoadminInstances(self, "example") + aws_ssoadmin_permission_set_example = SsoadminPermissionSet(self, "example_1", + instance_arn=Token.as_string( + property_access(Fn.tolist(example.arns), ["0"])), + name="Example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ssoadmin_permission_set_example.override_logical_id("example") + aws_ssoadmin_managed_policy_attachment_example = + SsoadminManagedPolicyAttachment(self, "example_2", + instance_arn=Token.as_string( + property_access(Fn.tolist(example.arns), ["0"])), + managed_policy_arn="arn:aws:iam::aws:policy/AlexaForBusinessDeviceSetup", + permission_set_arn=Token.as_string(aws_ssoadmin_permission_set_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ssoadmin_managed_policy_attachment_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instance_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance under which the operation will be executed. +* `managed_policy_arn` - (Required, Forces new resource) The IAM managed policy Amazon Resource Name (ARN) to be attached to the Permission Set. +* `permission_set_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Permission Set. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Names (ARNs) of the Managed Policy, Permission Set, and SSO Instance, separated by a comma (`,`). +* `managed_policy_name` - The name of the IAM Managed Policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Managed Policy Attachments using the `managed_policy_arn`, `permission_set_arn`, and `instance_arn` separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSO Managed Policy Attachments using the `managed_policy_arn`, `permission_set_arn`, and `instance_arn` separated by a comma (`,`). For example: + +```console +% terraform import aws_ssoadmin_managed_policy_attachment.example arn:aws:iam::aws:policy/AlexaForBusinessDeviceSetup,arn:aws:sso:::permissionSet/ssoins-2938j0x8920sbj72/ps-80383020jr9302rk,arn:aws:sso:::instance/ssoins-2938j0x8920sbj72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssoadmin_permission_set.html.markdown b/website/docs/cdktf/python/r/ssoadmin_permission_set.html.markdown new file mode 100644 index 00000000000..576829719d1 --- /dev/null +++ b/website/docs/cdktf/python/r/ssoadmin_permission_set.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_permission_set" +description: |- + Manages a Single Sign-On (SSO) Permission Set +--- + + + +# Resource: aws_ssoadmin_permission_set + +Provides a Single Sign-On (SSO) Permission Set resource + +~> **NOTE:** Updating this resource will automatically [Provision the Permission Set](https://docs.aws.amazon.com/singlesignon/latest/APIReference/API_ProvisionPermissionSet.html) to apply the corresponding updates to all assigned accounts. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssoadmin_instances import DataAwsSsoadminInstances +from imports.aws.ssoadmin_permission_set import SsoadminPermissionSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsSsoadminInstances(self, "example") + aws_ssoadmin_permission_set_example = SsoadminPermissionSet(self, "example_1", + description="An example", + instance_arn=Token.as_string( + property_access(Fn.tolist(example.arns), ["0"])), + name="Example", + relay_state="https://s3.console.aws.amazon.com/s3/home?region=us-east-1#", + session_duration="PT2H" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ssoadmin_permission_set_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) The description of the Permission Set. +* `instance_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance under which the operation will be executed. +* `name` - (Required, Forces new resource) The name of the Permission Set. +* `relay_state` - (Optional) The relay state URL used to redirect users within the application during the federation authentication process. +* `session_duration` - (Optional) The length of time that the application user sessions are valid in the ISO-8601 standard. Default: `PT1H`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Permission Set. +* `id` - The Amazon Resource Names (ARNs) of the Permission Set and SSO Instance, separated by a comma (`,`). +* `created_date` - The date the Permission Set was created in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Permission Sets using the `arn` and `instance_arn` separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSO Permission Sets using the `arn` and `instance_arn` separated by a comma (`,`). For example: + +```console +% terraform import aws_ssoadmin_permission_set.example arn:aws:sso:::permissionSet/ssoins-2938j0x8920sbj72/ps-80383020jr9302rk,arn:aws:sso:::instance/ssoins-2938j0x8920sbj72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssoadmin_permission_set_inline_policy.html.markdown b/website/docs/cdktf/python/r/ssoadmin_permission_set_inline_policy.html.markdown new file mode 100644 index 00000000000..009971f14b3 --- /dev/null +++ b/website/docs/cdktf/python/r/ssoadmin_permission_set_inline_policy.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_permission_set_inline_policy" +description: |- + Manages an IAM inline policy for a Single Sign-On (SSO) Permission Set +--- + + + +# Resource: aws_ssoadmin_permission_set_inline_policy + +Provides an IAM inline policy for a Single Sign-On (SSO) Permission Set resource + +~> **NOTE:** AWS Single Sign-On (SSO) only supports one IAM inline policy per [`aws_ssoadmin_permission_set`](ssoadmin_permission_set.html) resource. +Creating or updating this resource will automatically [Provision the Permission Set](https://docs.aws.amazon.com/singlesignon/latest/APIReference/API_ProvisionPermissionSet.html) to apply the corresponding updates to all assigned accounts. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_ssoadmin_instances import DataAwsSsoadminInstances +from imports.aws.ssoadmin_permission_set import SsoadminPermissionSet +from imports.aws.ssoadmin_permission_set_inline_policy import SsoadminPermissionSetInlinePolicy +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = DataAwsIamPolicyDocument(self, "example", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:ListAllMyBuckets", "s3:GetBucketLocation"], + resources=["arn:aws:s3:::*"], + sid="1" + ) + ] + ) + data_aws_ssoadmin_instances_example = DataAwsSsoadminInstances(self, "example_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_ssoadmin_instances_example.override_logical_id("example") + aws_ssoadmin_permission_set_example = SsoadminPermissionSet(self, "example_2", + instance_arn=Token.as_string( + property_access(Fn.tolist(data_aws_ssoadmin_instances_example.arns), ["0"])), + name="Example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ssoadmin_permission_set_example.override_logical_id("example") + aws_ssoadmin_permission_set_inline_policy_example = + SsoadminPermissionSetInlinePolicy(self, "example_3", + inline_policy=Token.as_string(example.json), + instance_arn=Token.as_string( + property_access(Fn.tolist(data_aws_ssoadmin_instances_example.arns), ["0"])), + permission_set_arn=Token.as_string(aws_ssoadmin_permission_set_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ssoadmin_permission_set_inline_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `inline_policy` - (Required) The IAM inline policy to attach to a Permission Set. +* `instance_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance under which the operation will be executed. +* `permission_set_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Permission Set. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Names (ARNs) of the Permission Set and SSO Instance, separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Permission Set Inline Policies using the `permission_set_arn` and `instance_arn` separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSO Permission Set Inline Policies using the `permission_set_arn` and `instance_arn` separated by a comma (`,`). For example: + +```console +% terraform import aws_ssoadmin_permission_set_inline_policy.example arn:aws:sso:::permissionSet/ssoins-2938j0x8920sbj72/ps-80383020jr9302rk,arn:aws:sso:::instance/ssoins-2938j0x8920sbj72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/ssoadmin_permissions_boundary_attachment.html.markdown b/website/docs/cdktf/python/r/ssoadmin_permissions_boundary_attachment.html.markdown new file mode 100644 index 00000000000..081f9fced75 --- /dev/null +++ b/website/docs/cdktf/python/r/ssoadmin_permissions_boundary_attachment.html.markdown @@ -0,0 +1,145 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_permissions_boundary_attachment" +description: |- + Attaches a permissions boundary policy to a Single Sign-On (SSO) Permission Set resource. +--- + + + +# Resource: aws_ssoadmin_permissions_boundary_attachment + +Attaches a permissions boundary policy to a Single Sign-On (SSO) Permission Set resource. + +~> **NOTE:** A permission set can have at most one permissions boundary attached; using more than one `aws_ssoadmin_permissions_boundary_attachment` references the same permission set will show a permanent difference. + +## Example Usage + +### Attaching a customer-managed policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssoadmin_instances import DataAwsSsoadminInstances +from imports.aws.iam_policy import IamPolicy +from imports.aws.ssoadmin_permission_set import SsoadminPermissionSet +from imports.aws.ssoadmin_permissions_boundary_attachment import SsoadminPermissionsBoundaryAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = IamPolicy(self, "example", + description="My test policy", + name="TestPolicy", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["ec2:Describe*"], + "Effect": "Allow", + "Resource": "*" + } + ], + "Version": "2012-10-17" + })) + ) + data_aws_ssoadmin_instances_example = DataAwsSsoadminInstances(self, "example_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_ssoadmin_instances_example.override_logical_id("example") + aws_ssoadmin_permission_set_example = SsoadminPermissionSet(self, "example_2", + instance_arn=Token.as_string( + property_access(Fn.tolist(data_aws_ssoadmin_instances_example.arns), ["0"])), + name="Example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ssoadmin_permission_set_example.override_logical_id("example") + aws_ssoadmin_permissions_boundary_attachment_example = + SsoadminPermissionsBoundaryAttachment(self, "example_3", + instance_arn=Token.as_string(aws_ssoadmin_permission_set_example.instance_arn), + permission_set_arn=Token.as_string(aws_ssoadmin_permission_set_example.arn), + permissions_boundary=SsoadminPermissionsBoundaryAttachmentPermissionsBoundary( + customer_managed_policy_reference=SsoadminPermissionsBoundaryAttachmentPermissionsBoundaryCustomerManagedPolicyReference( + name=example.name, + path="/" + ) + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ssoadmin_permissions_boundary_attachment_example.override_logical_id("example") +``` + +### Attaching an AWS-managed policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ssoadmin_permissions_boundary_attachment import SsoadminPermissionsBoundaryAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SsoadminPermissionsBoundaryAttachment(self, "example", + instance_arn=Token.as_string(aws_ssoadmin_permission_set_example.instance_arn), + permission_set_arn=Token.as_string(aws_ssoadmin_permission_set_example.arn), + permissions_boundary=SsoadminPermissionsBoundaryAttachmentPermissionsBoundary( + managed_policy_arn="arn:aws:iam::aws:policy/ReadOnlyAccess" + ) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `instance_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance under which the operation will be executed. +* `permission_set_arn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Permission Set. +* `permissions_boundary` - (Required, Forces new resource) The permissions boundary policy. See below. + +### Permissions Boundary + +The `permissions_boundary` config block describes the permissions boundary policy to attach. You can reference either an AWS-managed policy, or a customer managed policy, but only one may be set. + +* `managed_policy_arn` - (Optional) AWS-managed IAM policy ARN to use as the permissions boundary. +* `customer_managed_policy_reference` - (Optional) Specifies the name and path of a customer managed policy. See below. + +### Customer Managed Policy Reference + +The `customer_managed_policy_reference` config block describes a customer managed IAM policy. You must have an IAM policy that matches the name and path in each AWS account where you want to deploy your specified permission set. + +* `name` - (Required, Forces new resource) Name of the customer managed IAM Policy to be attached. +* `path` - (Optional, Forces new resource) The path to the IAM policy to be attached. The default is `/`. See [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-friendly-names) for more information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Permission Set Amazon Resource Name (ARN) and SSO Instance ARN, separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Admin Permissions Boundary Attachments using the `permission_set_arn` and `instance_arn`, separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SSO Admin Permissions Boundary Attachments using the `permission_set_arn` and `instance_arn`, separated by a comma (`,`). For example: + +```console +% terraform import aws_ssoadmin_permissions_boundary_attachment.example arn:aws:sso:::permissionSet/ssoins-2938j0x8920sbj72/ps-80383020jr9302rk,arn:aws:sso:::instance/ssoins-2938j0x8920sbj72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/storagegateway_cache.html.markdown b/website/docs/cdktf/python/r/storagegateway_cache.html.markdown new file mode 100644 index 00000000000..71dd20af056 --- /dev/null +++ b/website/docs/cdktf/python/r/storagegateway_cache.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_cache" +description: |- + Manages an AWS Storage Gateway cache +--- + + + +# Resource: aws_storagegateway_cache + +Manages an AWS Storage Gateway cache. + +~> **NOTE:** The Storage Gateway API provides no method to remove a cache disk. Destroying this Terraform resource does not perform any Storage Gateway actions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_cache import StoragegatewayCache +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayCache(self, "example", + disk_id=Token.as_string(data_aws_storagegateway_local_disk_example.id), + gateway_arn=Token.as_string(aws_storagegateway_gateway_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `disk_id` - (Required) Local disk identifier. For example, `pci-0000:03:00.0-scsi-0:0:0:0`. +* `gateway_arn` - (Required) The Amazon Resource Name (ARN) of the gateway. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Combined gateway Amazon Resource Name (ARN) and local disk identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_storagegateway_cache` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_storagegateway_cache` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```console +% terraform import aws_storagegateway_cache.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678:pci-0000:03:00.0-scsi-0:0:0:0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/storagegateway_cached_iscsi_volume.html.markdown b/website/docs/cdktf/python/r/storagegateway_cached_iscsi_volume.html.markdown new file mode 100644 index 00000000000..29be8575070 --- /dev/null +++ b/website/docs/cdktf/python/r/storagegateway_cached_iscsi_volume.html.markdown @@ -0,0 +1,140 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_cached_iscsi_volume" +description: |- + Manages an AWS Storage Gateway cached iSCSI volume +--- + + + +# Resource: aws_storagegateway_cached_iscsi_volume + +Manages an AWS Storage Gateway cached iSCSI volume. + +~> **NOTE:** The gateway must have cache added (e.g., via the [`aws_storagegateway_cache`](/docs/providers/aws/r/storagegateway_cache.html) resource) before creating volumes otherwise the Storage Gateway API will return an error. + +~> **NOTE:** The gateway must have an upload buffer added (e.g., via the [`aws_storagegateway_upload_buffer`](/docs/providers/aws/r/storagegateway_upload_buffer.html) resource) before the volume is operational to clients, however the Storage Gateway API will allow volume creation without error in that case and return volume status as `UPLOAD BUFFER NOT CONFIGURED`. + +## Example Usage + +~> **NOTE:** These examples are referencing the [`aws_storagegateway_cache`](/docs/providers/aws/r/storagegateway_cache.html) resource `gateway_arn` attribute to ensure Terraform properly adds cache before creating the volume. If you are not using this method, you may need to declare an expicit dependency (e.g., via `depends_on = [aws_storagegateway_cache.example]`) to ensure proper ordering. + +### Create Empty Cached iSCSI Volume + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_cached_iscsi_volume import StoragegatewayCachedIscsiVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayCachedIscsiVolume(self, "example", + gateway_arn=Token.as_string(aws_storagegateway_cache_example.gateway_arn), + network_interface_id=Token.as_string(aws_instance_example.private_ip), + target_name="example", + volume_size_in_bytes=5368709120 + ) +``` + +### Create Cached iSCSI Volume From Snapshot + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Op, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_cached_iscsi_volume import StoragegatewayCachedIscsiVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayCachedIscsiVolume(self, "example", + gateway_arn=Token.as_string(aws_storagegateway_cache_example.gateway_arn), + network_interface_id=Token.as_string(aws_instance_example.private_ip), + snapshot_id=Token.as_string(aws_ebs_snapshot_example.id), + target_name="example", + volume_size_in_bytes=Token.as_number( + Op.mul( + Op.mul(Op.mul(aws_ebs_snapshot_example.volume_size, 1024), 1024), 1024)) + ) +``` + +### Create Cached iSCSI Volume From Source Volume + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_cached_iscsi_volume import StoragegatewayCachedIscsiVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayCachedIscsiVolume(self, "example", + gateway_arn=Token.as_string(aws_storagegateway_cache_example.gateway_arn), + network_interface_id=Token.as_string(aws_instance_example.private_ip), + source_volume_arn=existing.arn, + target_name="example", + volume_size_in_bytes=existing.volume_size_in_bytes + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `gateway_arn` - (Required) The Amazon Resource Name (ARN) of the gateway. +* `network_interface_id` - (Required) The network interface of the gateway on which to expose the iSCSI target. Only IPv4 addresses are accepted. +* `target_name` - (Required) The name of the iSCSI target used by initiators to connect to the target and as a suffix for the target ARN. The target name must be unique across all volumes of a gateway. +* `volume_size_in_bytes` - (Required) The size of the volume in bytes. +* `snapshot_id` - (Optional) The snapshot ID of the snapshot to restore as the new cached volumeE.g., `snap-1122aabb`. +* `source_volume_arn` - (Optional) The ARN for an existing volume. Specifying this ARN makes the new volume into an exact copy of the specified existing volume's latest recovery point. The `volume_size_in_bytes` value for this new volume must be equal to or larger than the size of the existing volume, in bytes. +* `kms_encrypted` - (Optional) Set to `true` to use Amazon S3 server side encryption with your own AWS KMS key, or `false` to use a key managed by Amazon S3. +* `kms_key` - (Optional) The Amazon Resource Name (ARN) of the AWS KMS key used for Amazon S3 server side encryption. Is required when `kms_encrypted` is set. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/volume/vol-12345678`. +* `chap_enabled` - Whether mutual CHAP is enabled for the iSCSI target. +* `id` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/volume/vol-12345678`. +* `lun_number` - Logical disk number. +* `network_interface_port` - The port used to communicate with iSCSI targets. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `target_arn` - Target Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/target/iqn.1997-05.com.amazon:TargetName`. +* `volume_arn` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/volume/vol-12345678`. +* `volume_id` - Volume ID, e.g., `vol-12345678`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_storagegateway_cached_iscsi_volume` using the volume Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_storagegateway_cached_iscsi_volume` using the volume Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_cached_iscsi_volume.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/volume/vol-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/storagegateway_file_system_association.html.markdown b/website/docs/cdktf/python/r/storagegateway_file_system_association.html.markdown new file mode 100644 index 00000000000..707efa38b30 --- /dev/null +++ b/website/docs/cdktf/python/r/storagegateway_file_system_association.html.markdown @@ -0,0 +1,149 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_file_system_association" +description: |- + Mananges an association between an Amazon FSx file system and an Amazon FSx File Gateway. +--- + + + +# Resource: aws_storagegateway_file_system_association + +Associate an Amazon FSx file system with the FSx File Gateway. After the association process is complete, the file shares on the Amazon FSx file system are available for access through the gateway. This operation only supports the FSx File Gateway type. + +[FSx File Gateway requirements](https://docs.aws.amazon.com/filegateway/latest/filefsxw/Requirements.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_file_system_association import StoragegatewayFileSystemAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayFileSystemAssociation(self, "example", + audit_destination_arn=Token.as_string(aws_s3_bucket_example.arn), + gateway_arn=Token.as_string(aws_storagegateway_gateway_example.arn), + location_arn=Token.as_string(aws_fsx_windows_file_system_example.arn), + password="avoid-plaintext-passwords", + username="Admin" + ) +``` + +## Required Services Example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_ssm_parameter import DataAwsSsmParameter +from imports.aws.fsx_windows_file_system import FsxWindowsFileSystem +from imports.aws.instance import Instance +from imports.aws.storagegateway_file_system_association import StoragegatewayFileSystemAssociation +from imports.aws.storagegateway_gateway import StoragegatewayGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = FsxWindowsFileSystem(self, "test", + active_directory_id=Token.as_string(aws_directory_service_directory_test.id), + security_group_ids=[Token.as_string(aws_security_group_test.id)], + skip_final_backup=True, + storage_capacity=32, + subnet_ids=[Token.as_string(property_access(aws_subnet_test, ["0", "id"]))], + throughput_capacity=8 + ) + aws_service_storagegateway_ami_file_s3_latest = DataAwsSsmParameter(self, "aws_service_storagegateway_ami_FILE_S3_latest", + name="/aws/service/storagegateway/ami/FILE_S3/latest" + ) + aws_instance_test = Instance(self, "test_2", + ami=Token.as_string(aws_service_storagegateway_ami_file_s3_latest.value), + associate_public_ip_address=True, + depends_on=[aws_route_test, aws_vpc_dhcp_options_association_test], + instance_type=Token.as_string(available.instance_type), + subnet_id=Token.as_string(property_access(aws_subnet_test, ["0", "id"])), + vpc_security_group_ids=[Token.as_string(aws_security_group_test.id)] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_instance_test.override_logical_id("test") + aws_storagegateway_gateway_test = StoragegatewayGateway(self, "test_3", + gateway_ip_address=Token.as_string(aws_instance_test.public_ip), + gateway_name="test-sgw", + gateway_timezone="GMT", + gateway_type="FILE_FSX_SMB", + smb_active_directory_settings=StoragegatewayGatewaySmbActiveDirectorySettings( + domain_name=Token.as_string(aws_directory_service_directory_test.name), + password=Token.as_string(aws_directory_service_directory_test.password), + username="Admin" + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_storagegateway_gateway_test.override_logical_id("test") + StoragegatewayFileSystemAssociation(self, "fsx", + audit_destination_arn=Token.as_string(aws_cloudwatch_log_group_test.arn), + cache_attributes=StoragegatewayFileSystemAssociationCacheAttributes( + cache_stale_timeout_in_seconds=400 + ), + gateway_arn=Token.as_string(aws_storagegateway_gateway_test.arn), + location_arn=test.arn, + password=Token.as_string(aws_directory_service_directory_test.password), + username="Admin" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `gateway_arn` - (Required) The Amazon Resource Name (ARN) of the gateway. +* `location_arn` - (Required) The Amazon Resource Name (ARN) of the Amazon FSx file system to associate with the FSx File Gateway. +* `username` - (Required) The user name of the user credential that has permission to access the root share of the Amazon FSx file system. The user account must belong to the Amazon FSx delegated admin user group. +* `password` - (Required, sensitive) The password of the user credential. +* `audit_destination_arn` - (Optional) The Amazon Resource Name (ARN) of the storage used for the audit logs. +* `cache_attributes` - (Optional) Refresh cache information. see [Cache Attributes](#cache_attributes) for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### cache_attributes + +* `cache_stale_timeout_in_seconds` - (Optional) Refreshes a file share's cache by using Time To Live (TTL). + TTL is the length of time since the last refresh after which access to the directory would cause the file gateway + to first refresh that directory's contents from the Amazon S3 bucket. Valid Values: `0` or `300` to `2592000` seconds (5 minutes to 30 days). Defaults to `0` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the FSx file system association +* `arn` - Amazon Resource Name (ARN) of the newly created file system association. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_storagegateway_file_system_association` using the FSx file system association Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_storagegateway_file_system_association` using the FSx file system association Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_file_system_association.example arn:aws:storagegateway:us-east-1:123456789012:fs-association/fsa-0DA347732FDB40125 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/storagegateway_gateway.html.markdown b/website/docs/cdktf/python/r/storagegateway_gateway.html.markdown new file mode 100644 index 00000000000..94a7278145e --- /dev/null +++ b/website/docs/cdktf/python/r/storagegateway_gateway.html.markdown @@ -0,0 +1,287 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_gateway" +description: |- + Manages an AWS Storage Gateway file, tape, or volume gateway in the provider region +--- + + + +# Resource: aws_storagegateway_gateway + +Manages an AWS Storage Gateway file, tape, or volume gateway in the provider region. + +~> **NOTE:** The Storage Gateway API requires the gateway to be connected to properly return information after activation. If you are receiving `The specified gateway is not connected` errors during resource creation (gateway activation), ensure your gateway instance meets the [Storage Gateway requirements](https://docs.aws.amazon.com/storagegateway/latest/userguide/Requirements.html). + +## Example Usage + +### Local Cache + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_storagegateway_local_disk import DataAwsStoragegatewayLocalDisk +from imports.aws.storagegateway_cache import StoragegatewayCache +from imports.aws.volume_attachment import VolumeAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + VolumeAttachment(self, "test", + device_name="/dev/xvdb", + instance_id=Token.as_string(aws_instance_test.id), + volume_id=Token.as_string(aws_ebs_volume_test.id) + ) + data_aws_storagegateway_local_disk_test = + DataAwsStoragegatewayLocalDisk(self, "test_1", + disk_node=Token.as_string(data_aws_volume_attachment_test.device_name), + gateway_arn=Token.as_string(aws_storagegateway_gateway_test.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_storagegateway_local_disk_test.override_logical_id("test") + aws_storagegateway_cache_test = StoragegatewayCache(self, "test_2", + disk_id=Token.as_string(data_aws_storagegateway_local_disk_test.disk_id), + gateway_arn=Token.as_string(aws_storagegateway_gateway_test.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_storagegateway_cache_test.override_logical_id("test") +``` + +### FSx File Gateway + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_gateway import StoragegatewayGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayGateway(self, "example", + gateway_ip_address="1.2.3.4", + gateway_name="example", + gateway_timezone="GMT", + gateway_type="FILE_FSX_SMB", + smb_active_directory_settings=StoragegatewayGatewaySmbActiveDirectorySettings( + domain_name="corp.example.com", + password="avoid-plaintext-passwords", + username="Admin" + ) + ) +``` + +### S3 File Gateway + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_gateway import StoragegatewayGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayGateway(self, "example", + gateway_ip_address="1.2.3.4", + gateway_name="example", + gateway_timezone="GMT", + gateway_type="FILE_S3" + ) +``` + +### Tape Gateway + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_gateway import StoragegatewayGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayGateway(self, "example", + gateway_ip_address="1.2.3.4", + gateway_name="example", + gateway_timezone="GMT", + gateway_type="VTL", + medium_changer_type="AWS-Gateway-VTL", + tape_drive_type="IBM-ULT3580-TD5" + ) +``` + +### Volume Gateway (Cached) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_gateway import StoragegatewayGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayGateway(self, "example", + gateway_ip_address="1.2.3.4", + gateway_name="example", + gateway_timezone="GMT", + gateway_type="CACHED" + ) +``` + +### Volume Gateway (Stored) + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_gateway import StoragegatewayGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayGateway(self, "example", + gateway_ip_address="1.2.3.4", + gateway_name="example", + gateway_timezone="GMT", + gateway_type="STORED" + ) +``` + +## Argument Reference + +~> **NOTE:** One of `activation_key` or `gateway_ip_address` must be provided for resource creation (gateway activation). Neither is required for resource import. If using `gateway_ip_address`, Terraform must be able to make an HTTP (port 80) GET request to the specified IP address from where it is running. + +This argument supports the following arguments: + +* `gateway_name` - (Required) Name of the gateway. +* `gateway_timezone` - (Required) Time zone for the gateway. The time zone is of the format "GMT", "GMT-hr:mm", or "GMT+hr:mm". For example, `GMT-4:00` indicates the time is 4 hours behind GMT. The time zone is used, for example, for scheduling snapshots and your gateway's maintenance schedule. +* `activation_key` - (Optional) Gateway activation key during resource creation. Conflicts with `gateway_ip_address`. Additional information is available in the [Storage Gateway User Guide](https://docs.aws.amazon.com/storagegateway/latest/userguide/get-activation-key.html). +* `average_download_rate_limit_in_bits_per_sec` - (Optional) The average download bandwidth rate limit in bits per second. This is supported for the `CACHED`, `STORED`, and `VTL` gateway types. +* `average_upload_rate_limit_in_bits_per_sec` - (Optional) The average upload bandwidth rate limit in bits per second. This is supported for the `CACHED`, `STORED`, and `VTL` gateway types. +* `gateway_ip_address` - (Optional) Gateway IP address to retrieve activation key during resource creation. Conflicts with `activation_key`. Gateway must be accessible on port 80 from where Terraform is running. Additional information is available in the [Storage Gateway User Guide](https://docs.aws.amazon.com/storagegateway/latest/userguide/get-activation-key.html). +* `gateway_type` - (Optional) Type of the gateway. The default value is `STORED`. Valid values: `CACHED`, `FILE_FSX_SMB`, `FILE_S3`, `STORED`, `VTL`. +* `gateway_vpc_endpoint` - (Optional) VPC endpoint address to be used when activating your gateway. This should be used when your instance is in a private subnet. Requires HTTP access from client computer running terraform. More info on what ports are required by your VPC Endpoint Security group in [Activating a Gateway in a Virtual Private Cloud](https://docs.aws.amazon.com/storagegateway/latest/userguide/gateway-private-link.html). +* `cloudwatch_log_group_arn` - (Optional) The Amazon Resource Name (ARN) of the Amazon CloudWatch log group to use to monitor and log events in the gateway. +* `maintenance_start_time` - (Optional) The gateway's weekly maintenance start time information, including day and time of the week. The maintenance time is the time in your gateway's time zone. More details below. +* `medium_changer_type` - (Optional) Type of medium changer to use for tape gateway. Terraform cannot detect drift of this argument. Valid values: `STK-L700`, `AWS-Gateway-VTL`, `IBM-03584L32-0402`. +* `smb_active_directory_settings` - (Optional) Nested argument with Active Directory domain join information for Server Message Block (SMB) file shares. Only valid for `FILE_S3` and `FILE_FSX_SMB` gateway types. Must be set before creating `ActiveDirectory` authentication SMB file shares. More details below. +* `smb_guest_password` - (Optional) Guest password for Server Message Block (SMB) file shares. Only valid for `FILE_S3` and `FILE_FSX_SMB` gateway types. Must be set before creating `GuestAccess` authentication SMB file shares. Terraform can only detect drift of the existence of a guest password, not its actual value from the gateway. Terraform can however update the password with changing the argument. +* `smb_security_strategy` - (Optional) Specifies the type of security strategy. Valid values are: `ClientSpecified`, `MandatorySigning`, and `MandatoryEncryption`. See [Setting a Security Level for Your Gateway](https://docs.aws.amazon.com/storagegateway/latest/userguide/managing-gateway-file.html#security-strategy) for more information. +* `smb_file_share_visibility` - (Optional) Specifies whether the shares on this gateway appear when listing shares. +* `tape_drive_type` - (Optional) Type of tape drive to use for tape gateway. Terraform cannot detect drift of this argument. Valid values: `IBM-ULT3580-TD5`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### maintenance_start_time + +* `day_of_month` - (Optional) The day of the month component of the maintenance start time represented as an ordinal number from 1 to 28, where 1 represents the first day of the month and 28 represents the last day of the month. +* `day_of_week` - (Optional) The day of the week component of the maintenance start time week represented as an ordinal number from 0 to 6, where 0 represents Sunday and 6 Saturday. +* `hour_of_day` - (Required) The hour component of the maintenance start time represented as _hh_, where _hh_ is the hour (00 to 23). The hour of the day is in the time zone of the gateway. +* `minute_of_hour` - (Required) The minute component of the maintenance start time represented as _mm_, where _mm_ is the minute (00 to 59). The minute of the hour is in the time zone of the gateway. + +### smb_active_directory_settings + +Information to join the gateway to an Active Directory domain for Server Message Block (SMB) file shares. + +~> **NOTE** It is not possible to unconfigure this setting without recreating the gateway. Also, Terraform can only detect drift of the `domain_name` argument from the gateway. + +~> **NOTE:** The Storage Gateway needs to be able to resolve the name of your Active Directory Domain Controller. If the gateway is hosted on EC2, ensure that DNS/DHCP is configured prior to creating the EC2 instance. If you are receiving `NETWORK_ERROR` errors during resource creation (gateway joining the domain), ensure your gateway instance meets the [FSx File Gateway requirements](https://docs.aws.amazon.com/filegateway/latest/filefsxw/Requirements.html). + +* `domain_name` - (Required) The name of the domain that you want the gateway to join. +* `password` - (Required) The password of the user who has permission to add the gateway to the Active Directory domain. +* `username` - (Required) The user name of user who has permission to add the gateway to the Active Directory domain. +* `timeout_in_seconds` - (Optional) Specifies the time in seconds, in which the JoinDomain operation must complete. The default is `20` seconds. +* `organizational_unit` - (Optional) The organizational unit (OU) is a container in an Active Directory that can hold users, groups, + computers, and other OUs and this parameter specifies the OU that the gateway will join within the AD domain. +* `domain_controllers` - (Optional) List of IPv4 addresses, NetBIOS names, or host names of your domain server. + If you need to specify the port number include it after the colon (“:”). For example, `mydc.mydomain.com:389`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the gateway. +* `arn` - Amazon Resource Name (ARN) of the gateway. +* `gateway_id` - Identifier of the gateway. +* `ec2_instance_id` - The ID of the Amazon EC2 instance that was used to launch the gateway. +* `endpoint_type` - The type of endpoint for your gateway. +* `host_environment` - The type of hypervisor environment used by the host. +* `gateway_network_interface` - An array that contains descriptions of the gateway network interfaces. See [Gateway Network Interface](#gateway-network-interface). +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### Gateway Network Interface + +* `ipv4_address` - The Internet Protocol version 4 (IPv4) address of the interface. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_storagegateway_gateway` using the gateway Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_storagegateway_gateway` using the gateway Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_gateway.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678 +``` + +Certain resource arguments, like `gateway_ip_address` do not have a Storage Gateway API method for reading the information after creation, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from cdktf import TerraformResourceLifecycle +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_gateway import StoragegatewayGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, gatewayName, gatewayTimezone): + super().__init__(scope, name) + StoragegatewayGateway(self, "example", + gateway_ip_address=sgw.private_ip, + lifecycle=TerraformResourceLifecycle( + ignore_changes=["gateway_ip_address"] + ), + gateway_name=gateway_name, + gateway_timezone=gateway_timezone + ) +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/storagegateway_nfs_file_share.html.markdown b/website/docs/cdktf/python/r/storagegateway_nfs_file_share.html.markdown new file mode 100644 index 00000000000..e7c8ed8288d --- /dev/null +++ b/website/docs/cdktf/python/r/storagegateway_nfs_file_share.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_nfs_file_share" +description: |- + Manages an AWS Storage Gateway NFS File Share +--- + + + +# Resource: aws_storagegateway_nfs_file_share + +Manages an AWS Storage Gateway NFS File Share. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_nfs_file_share import StoragegatewayNfsFileShare +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayNfsFileShare(self, "example", + client_list=["0.0.0.0/0"], + gateway_arn=Token.as_string(aws_storagegateway_gateway_example.arn), + location_arn=Token.as_string(aws_s3_bucket_example.arn), + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `client_list` - (Required) The list of clients that are allowed to access the file gateway. The list must contain either valid IP addresses or valid CIDR blocks. Set to `["0.0.0.0/0"]` to not limit access. Minimum 1 item. Maximum 100 items. +* `gateway_arn` - (Required) Amazon Resource Name (ARN) of the file gateway. +* `location_arn` - (Required) The ARN of the backed storage used for storing file data. +* `vpc_endpoint_dns_name` - (Optional) The DNS name of the VPC endpoint for S3 PrivateLink. +* `bucket_region` - (Optional) The region of the S3 bucket used by the file share. Required when specifying `vpc_endpoint_dns_name`. +* `role_arn` - (Required) The ARN of the AWS Identity and Access Management (IAM) role that a file gateway assumes when it accesses the underlying storage. +* `audit_destination_arn` - (Optional) The Amazon Resource Name (ARN) of the storage used for audit logs. +* `default_storage_class` - (Optional) The default [storage class](https://docs.aws.amazon.com/storagegateway/latest/APIReference/API_CreateNFSFileShare.html#StorageGateway-CreateNFSFileShare-request-DefaultStorageClass) for objects put into an Amazon S3 bucket by the file gateway. Defaults to `S3_STANDARD`. +* `guess_mime_type_enabled` - (Optional) Boolean value that enables guessing of the MIME type for uploaded objects based on file extensions. Defaults to `true`. +* `kms_encrypted` - (Optional) Boolean value if `true` to use Amazon S3 server side encryption with your own AWS KMS key, or `false` to use a key managed by Amazon S3. Defaults to `false`. +* `kms_key_arn` - (Optional) Amazon Resource Name (ARN) for KMS key used for Amazon S3 server side encryption. This value can only be set when `kms_encrypted` is true. +* `nfs_file_share_defaults` - (Optional) Nested argument with file share default values. More information below. see [NFS File Share Defaults](#nfs_file_share_defaults) for more details. +* `cache_attributes` - (Optional) Refresh cache information. see [Cache Attributes](#cache_attributes) for more details. +* `object_acl` - (Optional) Access Control List permission for S3 objects. Defaults to `private`. +* `read_only` - (Optional) Boolean to indicate write status of file share. File share does not accept writes if `true`. Defaults to `false`. +* `requester_pays` - (Optional) Boolean who pays the cost of the request and the data download from the Amazon S3 bucket. Set this value to `true` if you want the requester to pay instead of the bucket owner. Defaults to `false`. +* `squash` - (Optional) Maps a user to anonymous user. Defaults to `RootSquash`. Valid values: `RootSquash` (only root is mapped to anonymous user), `NoSquash` (no one is mapped to anonymous user), `AllSquash` (everyone is mapped to anonymous user) +* `file_share_name` - (Optional) The name of the file share. Must be set if an S3 prefix name is set in `location_arn`. +* `notification_policy` - (Optional) The notification policy of the file share. For more information see the [AWS Documentation](https://docs.aws.amazon.com/storagegateway/latest/APIReference/API_CreateNFSFileShare.html#StorageGateway-CreateNFSFileShare-request-NotificationPolicy). Default value is `{}`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### nfs_file_share_defaults + +Files and folders stored as Amazon S3 objects in S3 buckets don't, by default, have Unix file permissions assigned to them. Upon discovery in an S3 bucket by Storage Gateway, the S3 objects that represent files and folders are assigned these default Unix permissions. + +* `directory_mode` - (Optional) The Unix directory mode in the string form "nnnn". Defaults to `"0777"`. +* `file_mode` - (Optional) The Unix file mode in the string form "nnnn". Defaults to `"0666"`. +* `group_id` - (Optional) The default group ID for the file share (unless the files have another group ID specified). Defaults to `65534` (`nfsnobody`). Valid values: `0` through `4294967294`. +* `owner_id` - (Optional) The default owner ID for the file share (unless the files have another owner ID specified). Defaults to `65534` (`nfsnobody`). Valid values: `0` through `4294967294`. + +### cache_attributes + +* `cache_stale_timeout_in_seconds` - (Optional) Refreshes a file share's cache by using Time To Live (TTL). + TTL is the length of time since the last refresh after which access to the directory would cause the file gateway + to first refresh that directory's contents from the Amazon S3 bucket. Valid Values: 300 to 2,592,000 seconds (5 minutes to 30 days) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the NFS File Share. +* `arn` - Amazon Resource Name (ARN) of the NFS File Share. +* `fileshare_id` - ID of the NFS File Share. +* `path` - File share path used by the NFS client to identify the mount point. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) +* `update` - (Default `10m`) +* `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_storagegateway_nfs_file_share` using the NFS File Share Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_storagegateway_nfs_file_share` using the NFS File Share Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_nfs_file_share.example arn:aws:storagegateway:us-east-1:123456789012:share/share-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/storagegateway_smb_file_share.html.markdown b/website/docs/cdktf/python/r/storagegateway_smb_file_share.html.markdown new file mode 100644 index 00000000000..188884af9eb --- /dev/null +++ b/website/docs/cdktf/python/r/storagegateway_smb_file_share.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_smb_file_share" +description: |- + Manages an AWS Storage Gateway SMB File Share +--- + + + +# Resource: aws_storagegateway_smb_file_share + +Manages an AWS Storage Gateway SMB File Share. + +## Example Usage + +### Active Directory Authentication + +~> **NOTE:** The gateway must have already joined the Active Directory domain prior to SMB file share creationE.g., via "SMB Settings" in the AWS Storage Gateway console or `smb_active_directory_settings` in the [`aws_storagegateway_gateway` resource](/docs/providers/aws/r/storagegateway_gateway.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_smb_file_share import StoragegatewaySmbFileShare +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewaySmbFileShare(self, "example", + authentication="ActiveDirectory", + gateway_arn=Token.as_string(aws_storagegateway_gateway_example.arn), + location_arn=Token.as_string(aws_s3_bucket_example.arn), + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +### Guest Authentication + +~> **NOTE:** The gateway must have already had the SMB guest password set prior to SMB file share creationE.g., via "SMB Settings" in the AWS Storage Gateway console or `smb_guest_password` in the [`aws_storagegateway_gateway` resource](/docs/providers/aws/r/storagegateway_gateway.html). + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_smb_file_share import StoragegatewaySmbFileShare +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewaySmbFileShare(self, "example", + authentication="GuestAccess", + gateway_arn=Token.as_string(aws_storagegateway_gateway_example.arn), + location_arn=Token.as_string(aws_s3_bucket_example.arn), + role_arn=Token.as_string(aws_iam_role_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `gateway_arn` - (Required) Amazon Resource Name (ARN) of the file gateway. +* `location_arn` - (Required) The ARN of the backed storage used for storing file data. +* `vpc_endpoint_dns_name` - (Optional) The DNS name of the VPC endpoint for S3 private link. +* `bucket_region` - (Optional) The region of the S3 buck used by the file share. Required when specifying a `vpc_endpoint_dns_name`. +* `role_arn` - (Required) The ARN of the AWS Identity and Access Management (IAM) role that a file gateway assumes when it accesses the underlying storage. +* `admin_user_list` - (Optional) A list of users in the Active Directory that have admin access to the file share. Only valid if `authentication` is set to `ActiveDirectory`. +* `authentication` - (Optional) The authentication method that users use to access the file share. Defaults to `ActiveDirectory`. Valid values: `ActiveDirectory`, `GuestAccess`. +* `audit_destination_arn` - (Optional) The Amazon Resource Name (ARN) of the CloudWatch Log Group used for the audit logs. +* `default_storage_class` - (Optional) The default [storage class](https://docs.aws.amazon.com/storagegateway/latest/APIReference/API_CreateNFSFileShare.html#StorageGateway-CreateNFSFileShare-request-DefaultStorageClass) for objects put into an Amazon S3 bucket by the file gateway. Defaults to `S3_STANDARD`. +* `file_share_name` - (Optional) The name of the file share. Must be set if an S3 prefix name is set in `location_arn`. +* `guess_mime_type_enabled` - (Optional) Boolean value that enables guessing of the MIME type for uploaded objects based on file extensions. Defaults to `true`. +* `invalid_user_list` - (Optional) A list of users in the Active Directory that are not allowed to access the file share. Only valid if `authentication` is set to `ActiveDirectory`. +* `kms_encrypted` - (Optional) Boolean value if `true` to use Amazon S3 server side encryption with your own AWS KMS key, or `false` to use a key managed by Amazon S3. Defaults to `false`. +* `kms_key_arn` - (Optional) Amazon Resource Name (ARN) for KMS key used for Amazon S3 server side encryption. This value can only be set when `kms_encrypted` is true. +* `object_acl` - (Optional) Access Control List permission for S3 objects. Defaults to `private`. +* `oplocks_enabled` - (Optional) Boolean to indicate Opportunistic lock (oplock) status. Defaults to `true`. +* `cache_attributes` - (Optional) Refresh cache information. see [Cache Attributes](#cache_attributes) for more details. +* `read_only` - (Optional) Boolean to indicate write status of file share. File share does not accept writes if `true`. Defaults to `false`. +* `requester_pays` - (Optional) Boolean who pays the cost of the request and the data download from the Amazon S3 bucket. Set this value to `true` if you want the requester to pay instead of the bucket owner. Defaults to `false`. +* `smb_acl_enabled` - (Optional) Set this value to `true` to enable ACL (access control list) on the SMB fileshare. Set it to `false` to map file and directory permissions to the POSIX permissions. This setting applies only to `ActiveDirectory` authentication type. +* `case_sensitivity` - (Optional) The case of an object name in an Amazon S3 bucket. For `ClientSpecified`, the client determines the case sensitivity. For `CaseSensitive`, the gateway determines the case sensitivity. The default value is `ClientSpecified`. +* `valid_user_list` - (Optional) A list of users in the Active Directory that are allowed to access the file share. If you need to specify an Active directory group, add '@' before the name of the group. It will be set on Allowed group in AWS console. Only valid if `authentication` is set to `ActiveDirectory`. +* `access_based_enumeration` - (Optional) The files and folders on this share will only be visible to users with read access. Default value is `false`. +* `notification_policy` - (Optional) The notification policy of the file share. For more information see the [AWS Documentation](https://docs.aws.amazon.com/storagegateway/latest/APIReference/API_CreateNFSFileShare.html#StorageGateway-CreateNFSFileShare-request-NotificationPolicy). Default value is `{}`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### cache_attributes + +* `cache_stale_timeout_in_seconds` - (Optional) Refreshes a file share's cache by using Time To Live (TTL). + TTL is the length of time since the last refresh after which access to the directory would cause the file gateway + to first refresh that directory's contents from the Amazon S3 bucket. Valid Values: 300 to 2,592,000 seconds (5 minutes to 30 days) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the SMB File Share. +* `arn` - Amazon Resource Name (ARN) of the SMB File Share. +* `fileshare_id` - ID of the SMB File Share. +* `path` - File share path used by the NFS client to identify the mount point. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10m`) +* `update` - (Default `10m`) +* `delete` - (Default `15m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_storagegateway_smb_file_share` using the SMB File Share Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_storagegateway_smb_file_share` using the SMB File Share Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_smb_file_share.example arn:aws:storagegateway:us-east-1:123456789012:share/share-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/storagegateway_stored_iscsi_volume.html.markdown b/website/docs/cdktf/python/r/storagegateway_stored_iscsi_volume.html.markdown new file mode 100644 index 00000000000..2dc130e6746 --- /dev/null +++ b/website/docs/cdktf/python/r/storagegateway_stored_iscsi_volume.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_stored_iscsi_volume" +description: |- + Manages an AWS Storage Gateway stored iSCSI volume +--- + + + +# Resource: aws_storagegateway_stored_iscsi_volume + +Manages an AWS Storage Gateway stored iSCSI volume. + +~> **NOTE:** The gateway must have a working storage added (e.g., via the [`aws_storagegateway_working_storage`](/docs/providers/aws/r/storagegateway_working_storage.html) resource) before the volume is operational to clients, however the Storage Gateway API will allow volume creation without error in that case and return volume status as `WORKING STORAGE NOT CONFIGURED`. + +## Example Usage + +### Create Empty Stored iSCSI Volume + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_stored_iscsi_volume import StoragegatewayStoredIscsiVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayStoredIscsiVolume(self, "example", + disk_id=Token.as_string(test.id), + gateway_arn=Token.as_string(aws_storagegateway_cache_example.gateway_arn), + network_interface_id=Token.as_string(aws_instance_example.private_ip), + preserve_existing_data=False, + target_name="example" + ) +``` + +### Create Stored iSCSI Volume From Snapshot + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_stored_iscsi_volume import StoragegatewayStoredIscsiVolume +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayStoredIscsiVolume(self, "example", + disk_id=Token.as_string(test.id), + gateway_arn=Token.as_string(aws_storagegateway_cache_example.gateway_arn), + network_interface_id=Token.as_string(aws_instance_example.private_ip), + preserve_existing_data=False, + snapshot_id=Token.as_string(aws_ebs_snapshot_example.id), + target_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `gateway_arn` - (Required) The Amazon Resource Name (ARN) of the gateway. +* `network_interface_id` - (Required) The network interface of the gateway on which to expose the iSCSI target. Only IPv4 addresses are accepted. +* `target_name` - (Required) The name of the iSCSI target used by initiators to connect to the target and as a suffix for the target ARN. The target name must be unique across all volumes of a gateway. +* `disk_id` - (Required) The unique identifier for the gateway local disk that is configured as a stored volume. +* `preserve_existing_data` - (Required) Specify this field as `true` if you want to preserve the data on the local disk. Otherwise, specifying this field as false creates an empty volume. +* `snapshot_id` - (Optional) The snapshot ID of the snapshot to restore as the new stored volumeE.g., `snap-1122aabb`. +* `kms_encrypted` - (Optional) `true` to use Amazon S3 server side encryption with your own AWS KMS key, or `false` to use a key managed by Amazon S3. Optional. +* `kms_key` - (Optional) The Amazon Resource Name (ARN) of the AWS KMS key used for Amazon S3 server side encryption. This value can only be set when `kms_encrypted` is `true`. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/volume/vol-12345678`. +* `chap_enabled` - Whether mutual CHAP is enabled for the iSCSI target. +* `id` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/volume/vol-12345678`. +* `lun_number` - Logical disk number. +* `network_interface_port` - The port used to communicate with iSCSI targets. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `target_arn` - Target Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/target/iqn.1997-05.com.amazon:TargetName`. +* `volume_arn` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/volume/vol-12345678`. +* `volume_id` - Volume ID, e.g., `vol-12345678`. +* `volume_status` - indicates the state of the storage volume. +* `volume_type` - indicates the type of the volume. +* `volume_size_in_bytes` - The size of the data stored on the volume in bytes. +* `volume_attachment_status` - A value that indicates whether a storage volume is attached to, detached from, or is in the process of detaching from a gateway. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_storagegateway_stored_iscsi_volume` using the volume Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_storagegateway_stored_iscsi_volume` using the volume Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_stored_iscsi_volume.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/volume/vol-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/storagegateway_tape_pool.html.markdown b/website/docs/cdktf/python/r/storagegateway_tape_pool.html.markdown new file mode 100644 index 00000000000..3da6a88d035 --- /dev/null +++ b/website/docs/cdktf/python/r/storagegateway_tape_pool.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_tape_pool" +description: |- + Manages an AWS Storage Gateway Tape Pool +--- + + + +# Resource: aws_storagegateway_tape_pool + +Manages an AWS Storage Gateway Tape Pool. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_tape_pool import StoragegatewayTapePool +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayTapePool(self, "example", + pool_name="example", + storage_class="GLACIER" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `pool_name` - (Required) The name of the new custom tape pool. +* `storage_class` - (Required) The storage class that is associated with the new custom pool. When you use your backup application to eject the tape, the tape is archived directly into the storage class that corresponds to the pool. Possible values are `DEEP_ARCHIVE` or `GLACIER`. +* `retention_lock_type` - (Required) Tape retention lock can be configured in two modes. When configured in governance mode, AWS accounts with specific IAM permissions are authorized to remove the tape retention lock from archived virtual tapes. When configured in compliance mode, the tape retention lock cannot be removed by any user, including the root AWS account. Possible values are `COMPLIANCE`, `GOVERNANCE`, and `NONE`. Default value is `NONE`. +* `retention_lock_time_in_days` - (Optional) Tape retention lock time is set in days. Tape retention lock can be enabled for up to 100 years (36,500 days). Default value is 0. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Volume Amazon Resource Name (ARN), e.g., `aws_storagegateway_tape_pool.example arn:aws:storagegateway:us-east-1:123456789012:tapepool/pool-12345678`. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_storagegateway_tape_pool` using the volume Amazon Resource Name (ARN). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_storagegateway_tape_pool` using the volume Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_tape_pool.example arn:aws:storagegateway:us-east-1:123456789012:tapepool/pool-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/storagegateway_upload_buffer.html.markdown b/website/docs/cdktf/python/r/storagegateway_upload_buffer.html.markdown new file mode 100644 index 00000000000..bca942d2176 --- /dev/null +++ b/website/docs/cdktf/python/r/storagegateway_upload_buffer.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_upload_buffer" +description: |- + Manages an AWS Storage Gateway upload buffer +--- + + + +# Resource: aws_storagegateway_upload_buffer + +Manages an AWS Storage Gateway upload buffer. + +~> **NOTE:** The Storage Gateway API provides no method to remove an upload buffer disk. Destroying this Terraform resource does not perform any Storage Gateway actions. + +## Example Usage + +### Cached and VTL Gateway Type + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_storagegateway_local_disk import DataAwsStoragegatewayLocalDisk +from imports.aws.storagegateway_upload_buffer import StoragegatewayUploadBuffer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = DataAwsStoragegatewayLocalDisk(self, "test", + disk_node=Token.as_string(aws_volume_attachment_test.device_name), + gateway_arn=Token.as_string(aws_storagegateway_gateway_test.arn) + ) + aws_storagegateway_upload_buffer_test = StoragegatewayUploadBuffer(self, "test_1", + disk_path=Token.as_string(test.disk_path), + gateway_arn=Token.as_string(aws_storagegateway_gateway_test.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_storagegateway_upload_buffer_test.override_logical_id("test") +``` + +### Stored Gateway Type + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_storagegateway_local_disk import DataAwsStoragegatewayLocalDisk +from imports.aws.storagegateway_upload_buffer import StoragegatewayUploadBuffer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayUploadBuffer(self, "example", + disk_id=Token.as_string(data_aws_storagegateway_local_disk_example.id), + gateway_arn=Token.as_string(aws_storagegateway_gateway_example.arn) + ) + DataAwsStoragegatewayLocalDisk(self, "test", + disk_node=Token.as_string(aws_volume_attachment_test.device_name), + gateway_arn=Token.as_string(aws_storagegateway_gateway_test.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `disk_id` - (Optional) Local disk identifier. For example, `pci-0000:03:00.0-scsi-0:0:0:0`. +* `disk_path` - (Optional) Local disk path. For example, `/dev/nvme1n1`. +* `gateway_arn` - (Required) The Amazon Resource Name (ARN) of the gateway. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Combined gateway Amazon Resource Name (ARN) and local disk identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_storagegateway_upload_buffer` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_storagegateway_upload_buffer` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```console +% terraform import aws_storagegateway_upload_buffer.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678:pci-0000:03:00.0-scsi-0:0:0:0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/storagegateway_working_storage.html.markdown b/website/docs/cdktf/python/r/storagegateway_working_storage.html.markdown new file mode 100644 index 00000000000..9a281d8d06a --- /dev/null +++ b/website/docs/cdktf/python/r/storagegateway_working_storage.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_working_storage" +description: |- + Manages an AWS Storage Gateway working storage +--- + + + +# Resource: aws_storagegateway_working_storage + +Manages an AWS Storage Gateway working storage. + +~> **NOTE:** The Storage Gateway API provides no method to remove a working storage disk. Destroying this Terraform resource does not perform any Storage Gateway actions. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.storagegateway_working_storage import StoragegatewayWorkingStorage +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + StoragegatewayWorkingStorage(self, "example", + disk_id=Token.as_string(data_aws_storagegateway_local_disk_example.id), + gateway_arn=Token.as_string(aws_storagegateway_gateway_example.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `disk_id` - (Required) Local disk identifier. For example, `pci-0000:03:00.0-scsi-0:0:0:0`. +* `gateway_arn` - (Required) The Amazon Resource Name (ARN) of the gateway. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Combined gateway Amazon Resource Name (ARN) and local disk identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_storagegateway_working_storage` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_storagegateway_working_storage` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```console +% terraform import aws_storagegateway_working_storage.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678:pci-0000:03:00.0-scsi-0:0:0:0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/subnet.html.markdown b/website/docs/cdktf/python/r/subnet.html.markdown new file mode 100644 index 00000000000..224f2e62647 --- /dev/null +++ b/website/docs/cdktf/python/r/subnet.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_subnet" +description: |- + Provides an VPC subnet resource. +--- + + + +# Resource: aws_subnet + +Provides an VPC subnet resource. + +~> **NOTE:** Due to [AWS Lambda improved VPC networking changes that began deploying in September 2019](https://aws.amazon.com/blogs/compute/announcing-improved-vpc-networking-for-aws-lambda-functions/), subnets associated with Lambda Functions can take up to 45 minutes to successfully delete. Terraform AWS Provider version 2.31.0 and later automatically handles this increased timeout, however prior versions require setting the [customizable deletion timeout](#timeouts) to 45 minutes (`delete = "45m"`). AWS and HashiCorp are working together to reduce the amount of time required for resource deletion and updates can be tracked in this [GitHub issue](https://github.com/hashicorp/terraform-provider-aws/issues/10329). + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.subnet import Subnet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Subnet(self, "main", + cidr_block="10.0.1.0/24", + tags={ + "Name": "Main" + }, + vpc_id=Token.as_string(aws_vpc_main.id) + ) +``` + +### Subnets In Secondary VPC CIDR Blocks + +When managing subnets in one of a VPC's secondary CIDR blocks created using a [`aws_vpc_ipv4_cidr_block_association`](vpc_ipv4_cidr_block_association.html) +resource, it is recommended to reference that resource's `vpc_id` attribute to ensure correct dependency ordering. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.subnet import Subnet +from imports.aws.vpc_ipv4_cidr_block_association import VpcIpv4CidrBlockAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + secondary_cidr = VpcIpv4CidrBlockAssociation(self, "secondary_cidr", + cidr_block="172.2.0.0/16", + vpc_id=main.id + ) + Subnet(self, "in_secondary_cidr", + cidr_block="172.2.0.0/24", + vpc_id=secondary_cidr.vpc_id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `assign_ipv6_address_on_creation` - (Optional) Specify true to indicate + that network interfaces created in the specified subnet should be + assigned an IPv6 address. Default is `false` +* `availability_zone` - (Optional) AZ for the subnet. +* `availability_zone_id` - (Optional) AZ ID of the subnet. This argument is not supported in all regions or partitions. If necessary, use `availability_zone` instead. +* `cidr_block` - (Optional) The IPv4 CIDR block for the subnet. +* `customer_owned_ipv4_pool` - (Optional) The customer owned IPv4 address pool. Typically used with the `map_customer_owned_ip_on_launch` argument. The `outpost_arn` argument must be specified when configured. +* `enable_dns64` - (Optional) Indicates whether DNS queries made to the Amazon-provided DNS Resolver in this subnet should return synthetic IPv6 addresses for IPv4-only destinations. Default: `false`. +* `enable_lni_at_device_index` - (Optional) Indicates the device position for local network interfaces in this subnet. For example, 1 indicates local network interfaces in this subnet are the secondary network interface (eth1). A local network interface cannot be the primary network interface (eth0). +* `enable_resource_name_dns_aaaa_record_on_launch` - (Optional) Indicates whether to respond to DNS queries for instance hostnames with DNS AAAA records. Default: `false`. +* `enable_resource_name_dns_a_record_on_launch` - (Optional) Indicates whether to respond to DNS queries for instance hostnames with DNS A records. Default: `false`. +* `ipv6_cidr_block` - (Optional) The IPv6 network range for the subnet, + in CIDR notation. The subnet size must use a /64 prefix length. +* `ipv6_native` - (Optional) Indicates whether to create an IPv6-only subnet. Default: `false`. +* `map_customer_owned_ip_on_launch` - (Optional) Specify `true` to indicate that network interfaces created in the subnet should be assigned a customer owned IP address. The `customer_owned_ipv4_pool` and `outpost_arn` arguments must be specified when set to `true`. Default is `false`. +* `map_public_ip_on_launch` - (Optional) Specify true to indicate + that instances launched into the subnet should be assigned + a public IP address. Default is `false`. +* `outpost_arn` - (Optional) The Amazon Resource Name (ARN) of the Outpost. +* `private_dns_hostname_type_on_launch` - (Optional) The type of hostnames to assign to instances in the subnet at launch. For IPv6-only subnets, an instance DNS name must be based on the instance ID. For dual-stack and IPv4-only subnets, you can specify whether DNS names use the instance IPv4 address or the instance ID. Valid values: `ip-name`, `resource-name`. +* `vpc_id` - (Required) The VPC ID. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the subnet +* `arn` - The ARN of the subnet. +* `ipv6_cidr_block_association_id` - The association ID for the IPv6 CIDR block. +* `owner_id` - The ID of the AWS account that owns the subnet. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10m`) +- `delete` - (Default `20m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import subnets using the subnet `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import subnets using the subnet `id`. For example: + +```console +% terraform import aws_subnet.public_subnet subnet-9d4a7b6c +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/swf_domain.html.markdown b/website/docs/cdktf/python/r/swf_domain.html.markdown new file mode 100644 index 00000000000..196a5ff69b9 --- /dev/null +++ b/website/docs/cdktf/python/r/swf_domain.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "SWF (Simple Workflow)" +layout: "aws" +page_title: "AWS: aws_swf_domain" +description: |- + Provides an SWF Domain resource +--- + + + +# Resource: aws_swf_domain + +Provides an SWF Domain resource. + +## Example Usage + +To register a basic SWF domain: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.swf_domain import SwfDomain +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SwfDomain(self, "foo", + description="Terraform SWF Domain", + name="foo", + workflow_execution_retention_period_in_days=Token.as_string(30) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the domain. If omitted, Terraform will assign a random, unique name. +* `name_prefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional, Forces new resource) The domain description. +* `workflow_execution_retention_period_in_days` - (Required, Forces new resource) Length of time that SWF will continue to retain information about the workflow execution after the workflow execution is complete, must be between 0 and 90 days. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the domain. +* `arn` - Amazon Resource Name (ARN) +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SWF Domains using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import SWF Domains using the `name`. For example: + +```console +% terraform import aws_swf_domain.foo test-domain +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/synthetics_canary.html.markdown b/website/docs/cdktf/python/r/synthetics_canary.html.markdown new file mode 100644 index 00000000000..c471c2a9781 --- /dev/null +++ b/website/docs/cdktf/python/r/synthetics_canary.html.markdown @@ -0,0 +1,140 @@ +--- +subcategory: "CloudWatch Synthetics" +layout: "aws" +page_title: "AWS: aws_synthetics_canary" +description: |- + Provides a Synthetics Canary resource +--- + + + +# Resource: aws_synthetics_canary + +Provides a Synthetics Canary resource. + +~> **NOTE:** When you create a canary, AWS creates supporting implicit resources. See the Amazon CloudWatch Synthetics documentation on [DeleteCanary](https://docs.aws.amazon.com/AmazonSynthetics/latest/APIReference/API_DeleteCanary.html) for a full list. Neither AWS nor Terraform deletes these implicit resources automatically when the canary is deleted. Before deleting a canary, ensure you have all the information about the canary that you need to delete the implicit resources using Terraform shell commands, the AWS Console, or AWS CLI. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.synthetics_canary import SyntheticsCanary +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SyntheticsCanary(self, "some", + artifact_s3_location="s3://some-bucket/", + execution_role_arn="some-role", + handler="exports.handler", + name="some-canary", + runtime_version="syn-1.0", + schedule=SyntheticsCanarySchedule( + expression="rate(0 minute)" + ), + zip_file="test-fixtures/lambdatest.zip" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `artifact_s3_location` - (Required) Location in Amazon S3 where Synthetics stores artifacts from the test runs of this canary. +* `execution_role_arn` - (Required) ARN of the IAM role to be used to run the canary. see [AWS Docs](https://docs.aws.amazon.com/AmazonSynthetics/latest/APIReference/API_CreateCanary.html#API_CreateCanary_RequestSyntax) for permissions needs for IAM Role. +* `handler` - (Required) Entry point to use for the source code when running the canary. This value must end with the string `.handler` . +* `name` - (Required) Name for this canary. Has a maximum length of 21 characters. Valid characters are lowercase alphanumeric, hyphen, or underscore. +* `runtime_version` - (Required) Runtime version to use for the canary. Versions change often so consult the [Amazon CloudWatch documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Library.html) for the latest valid versions. Values include `syn-python-selenium-1.0`, `syn-nodejs-puppeteer-3.0`, `syn-nodejs-2.2`, `syn-nodejs-2.1`, `syn-nodejs-2.0`, and `syn-1.0`. +* `schedule` - (Required) Configuration block providing how often the canary is to run and when these test runs are to stop. Detailed below. + +The following arguments are optional: + +* `delete_lambda` - (Optional) Specifies whether to also delete the Lambda functions and layers used by this canary. The default is `false`. +* `vpc_config` - (Optional) Configuration block. Detailed below. +* `failure_retention_period` - (Optional) Number of days to retain data about failed runs of this canary. If you omit this field, the default of 31 days is used. The valid range is 1 to 455 days. +* `run_config` - (Optional) Configuration block for individual canary runs. Detailed below. +* `s3_bucket` - (Optional) Full bucket name which is used if your canary script is located in S3. The bucket must already exist. **Conflicts with `zip_file`.** +* `s3_key` - (Optional) S3 key of your script. **Conflicts with `zip_file`.** +* `s3_version` - (Optional) S3 version ID of your script. **Conflicts with `zip_file`.** +* `start_canary` - (Optional) Whether to run or stop the canary. +* `success_retention_period` - (Optional) Number of days to retain data about successful runs of this canary. If you omit this field, the default of 31 days is used. The valid range is 1 to 455 days. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `artifact_config` - (Optional) configuration for canary artifacts, including the encryption-at-rest settings for artifacts that the canary uploads to Amazon S3. See [Artifact Config](#artifact_config). +* `zip_file` - (Optional) ZIP file that contains the script, if you input your canary script directly into the canary instead of referring to an S3 location. It can be up to 225KB. **Conflicts with `s3_bucket`, `s3_key`, and `s3_version`.** + +### artifact_config + +* `s3_encryption` - (Optional) Configuration of the encryption-at-rest settings for artifacts that the canary uploads to Amazon S3. See [S3 Encryption](#s3_encryption). + +### s3_encryption + +* `encryption_mode` - (Optional) The encryption method to use for artifacts created by this canary. Valid values are: `SSE_S3` and `SSE_KMS`. +* `kms_key_arn` - (Optional) The ARN of the customer-managed KMS key to use, if you specify `SSE_KMS` for `encryption_mode`. + +### schedule + +* `expression` - (Required) Rate expression or cron expression that defines how often the canary is to run. For rate expression, the syntax is `rate(number unit)`. _unit_ can be `minute`, `minutes`, or `hour`. For cron expression, the syntax is `cron(expression)`. For more information about the syntax for cron expressions, see [Scheduling canary runs using cron](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_cron.html). +* `duration_in_seconds` - (Optional) Duration in seconds, for the canary to continue making regular runs according to the schedule in the Expression value. + +### run_config + +* `timeout_in_seconds` - (Optional) Number of seconds the canary is allowed to run before it must stop. If you omit this field, the frequency of the canary is used, up to a maximum of 840 (14 minutes). +* `memory_in_mb` - (Optional) Maximum amount of memory available to the canary while it is running, in MB. The value you specify must be a multiple of 64. +* `active_tracing` - (Optional) Whether this canary is to use active AWS X-Ray tracing when it runs. You can enable active tracing only for canaries that use version syn-nodejs-2.0 or later for their canary runtime. +* `environment_variables` - (Optional) Map of environment variables that are accessible from the canary during execution. Please see [AWS Docs](https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html#configuration-envvars-runtime) for variables reserved for Lambda. + +### vpc_config + +If this canary tests an endpoint in a VPC, this structure contains information about the subnet and security groups of the VPC endpoint. For more information, see [Running a Canary in a VPC](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_VPC.html). + +* `subnet_ids` - (Required) IDs of the subnets where this canary is to run. +* `security_group_ids` - (Required) IDs of the security groups for this canary. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Canary. +* `engine_arn` - ARN of the Lambda function that is used as your canary's engine. +* `id` - Name for this canary. +* `source_location_arn` - ARN of the Lambda layer where Synthetics stores the canary script code. +* `status` - Canary status. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `timeline` - Structure that contains information about when the canary was created, modified, and most recently run. see [Timeline](#timeline). + +### vpc_config + +* `vpc_id` - ID of the VPC where this canary is to run. + +### timeline + +* `created` - Date and time the canary was created. +* `last_modified` - Date and time the canary was most recently modified. +* `last_started` - Date and time that the canary's most recent run started. +* `last_stopped` - Date and time that the canary's most recent run ended. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Synthetics Canaries using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Synthetics Canaries using the `name`. For example: + +```console +% terraform import aws_synthetics_canary.some some-canary +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/synthetics_group.html.markdown b/website/docs/cdktf/python/r/synthetics_group.html.markdown new file mode 100644 index 00000000000..74903e97cdc --- /dev/null +++ b/website/docs/cdktf/python/r/synthetics_group.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "CloudWatch Synthetics" +layout: "aws" +page_title: "AWS: aws_synthetics_group" +description: |- + Provides a Synthetics Group resource +--- + + + +# Resource: aws_synthetics_group + +Provides a Synthetics Group resource. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.synthetics_group import SyntheticsGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SyntheticsGroup(self, "example", + name="example" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the group. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Group. +* `group_id` - ID of the Group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Synthetics Group using the `name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Synthetics Group using the `name`. For example: + +```console +% terraform import aws_synthetics_group.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/synthetics_group_association.html.markdown b/website/docs/cdktf/python/r/synthetics_group_association.html.markdown new file mode 100644 index 00000000000..d777a34b48b --- /dev/null +++ b/website/docs/cdktf/python/r/synthetics_group_association.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "CloudWatch Synthetics" +layout: "aws" +page_title: "AWS: aws_synthetics_group_association" +description: |- + Provides a Synthetics Group Association resource +--- + + + +# Resource: aws_synthetics_group_association + +Provides a Synthetics Group Association resource. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.synthetics_group_association import SyntheticsGroupAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + SyntheticsGroupAssociation(self, "example", + canary_arn=Token.as_string(aws_synthetics_canary_example.arn), + group_name=Token.as_string(aws_synthetics_group_example.name) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `group_name` - (Required) Name of the group that the canary will be associated with. +* `canary_arn` - (Required) ARN of the canary. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `group_name` - Name of the Group. +* `group_id` - ID of the Group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Synthetics Group Association using the `canary_arn,group_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import CloudWatch Synthetics Group Association using the `canary_arn,group_name`. For example: + +```console +% terraform import aws_synthetics_group_association.example arn:aws:synthetics:us-west-2:123456789012:canary:tf-acc-test-abcd1234,examplename +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/timestreamwrite_database.html.markdown b/website/docs/cdktf/python/r/timestreamwrite_database.html.markdown new file mode 100644 index 00000000000..d57592e7657 --- /dev/null +++ b/website/docs/cdktf/python/r/timestreamwrite_database.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Timestream Write" +layout: "aws" +page_title: "AWS: aws_timestreamwrite_database" +description: |- + Provides a Timestream database resource. +--- + + + +# Resource: aws_timestreamwrite_database + +Provides a Timestream database resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.timestreamwrite_database import TimestreamwriteDatabase +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TimestreamwriteDatabase(self, "example", + database_name="database-example" + ) +``` + +### Full usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.timestreamwrite_database import TimestreamwriteDatabase +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TimestreamwriteDatabase(self, "example", + database_name="database-example", + kms_key_id=Token.as_string(aws_kms_key_example.arn), + tags={ + "Name": "value" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `database_name` – (Required) The name of the Timestream database. Minimum length of 3. Maximum length of 64. +* `kms_key_id` - (Optional) The ARN (not Alias ARN) of the KMS key to be used to encrypt the data stored in the database. If the KMS key is not specified, the database will be encrypted with a Timestream managed KMS key located in your account. Refer to [AWS managed KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk) for more info. +* `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Timestream database. +* `arn` - The ARN that uniquely identifies this database. +* `kms_key_id` - The ARN of the KMS key used to encrypt the data stored in the database. +* `table_count` - The total number of tables found within the Timestream database. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Timestream databases using the `database_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Timestream databases using the `database_name`. For example: + +```console +% terraform import aws_timestreamwrite_database.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/timestreamwrite_table.html.markdown b/website/docs/cdktf/python/r/timestreamwrite_table.html.markdown new file mode 100644 index 00000000000..c2ccda2c2df --- /dev/null +++ b/website/docs/cdktf/python/r/timestreamwrite_table.html.markdown @@ -0,0 +1,172 @@ +--- +subcategory: "Timestream Write" +layout: "aws" +page_title: "AWS: aws_timestreamwrite_table" +description: |- + Provides a Timestream table resource. +--- + + + +# Resource: aws_timestreamwrite_table + +Provides a Timestream table resource. + +## Example Usage + +### Basic usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.timestreamwrite_table import TimestreamwriteTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TimestreamwriteTable(self, "example", + database_name=Token.as_string(aws_timestreamwrite_database_example.database_name), + table_name="example" + ) +``` + +### Full usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.timestreamwrite_table import TimestreamwriteTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TimestreamwriteTable(self, "example", + database_name=Token.as_string(aws_timestreamwrite_database_example.database_name), + retention_properties=TimestreamwriteTableRetentionProperties( + magnetic_store_retention_period_in_days=30, + memory_store_retention_period_in_hours=8 + ), + table_name="example", + tags={ + "Name": "example-timestream-table" + } + ) +``` + +### Customer-defined Partition Key + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.timestreamwrite_table import TimestreamwriteTable +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TimestreamwriteTable(self, "example", + database_name=Token.as_string(aws_timestreamwrite_database_example.database_name), + schema=TimestreamwriteTableSchema( + composite_partition_key=TimestreamwriteTableSchemaCompositePartitionKey( + enforcement_in_record="REQUIRED", + name="attr1", + type="DIMENSION" + ) + ), + table_name="example" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `database_name` – (Required) The name of the Timestream database. +* `magnetic_store_write_properties` - (Optional) Contains properties to set on the table when enabling magnetic store writes. See [Magnetic Store Write Properties](#magnetic-store-write-properties) below for more details. +* `retention_properties` - (Optional) The retention duration for the memory store and magnetic store. See [Retention Properties](#retention-properties) below for more details. If not provided, `magnetic_store_retention_period_in_days` default to 73000 and `memory_store_retention_period_in_hours` defaults to 6. +* `schema` - (Optional) The schema of the table. See [Schema](#schema) below for more details. +* `table_name` - (Required) The name of the Timestream table. +* `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Magnetic Store Write Properties + +The `magnetic_store_write_properties` block supports the following arguments: + +* `enable_magnetic_store_writes` - (Required) A flag to enable magnetic store writes. +* `magnetic_store_rejected_data_location` - (Optional) The location to write error reports for records rejected asynchronously during magnetic store writes. See [Magnetic Store Rejected Data Location](#magnetic-store-rejected-data-location) below for more details. + +#### Magnetic Store Rejected Data Location + +The `magnetic_store_rejected_data_location` block supports the following arguments: + +* `s3_configuration` - (Optional) Configuration of an S3 location to write error reports for records rejected, asynchronously, during magnetic store writes. See [S3 Configuration](#s3-configuration) below for more details. + +##### S3 Configuration + +The `s3_configuration` block supports the following arguments: + +* `bucket_name` - (Optional) Bucket name of the customer S3 bucket. +* `encryption_option` - (Optional) Encryption option for the customer s3 location. Options are S3 server side encryption with an S3-managed key or KMS managed key. Valid values are `SSE_KMS` and `SSE_S3`. +* `kms_key_id` - (Optional) KMS key arn for the customer s3 location when encrypting with a KMS managed key. +* `object_key_prefix` - (Optional) Object key prefix for the customer S3 location. + +### Retention Properties + +The `retention_properties` block supports the following arguments: + +* `magnetic_store_retention_period_in_days` - (Required) The duration for which data must be stored in the magnetic store. Minimum value of 1. Maximum value of 73000. +* `memory_store_retention_period_in_hours` - (Required) The duration for which data must be stored in the memory store. Minimum value of 1. Maximum value of 8766. + +### Schema + +The `schema` block supports the following arguments: + +* `composite_partition_key` - (Required) A non-empty list of partition keys defining the attributes used to partition the table data. The order of the list determines the partition hierarchy. The name and type of each partition key as well as the partition key order cannot be changed after the table is created. However, the enforcement level of each partition key can be changed. See [Composite Partition Key](#composite-partition-key) below for more details. + +### Composite Partition Key + +The `composite_partition_key` block supports the following arguments: + +* `enforcement_in_record` - (Optional) The level of enforcement for the specification of a dimension key in ingested records. Valid values: `REQUIRED`, `OPTIONAL`. +* `name` - (Optional) The name of the attribute used for a dimension key. +* `type` - (Required) The type of the partition key. Valid values: `DIMENSION`, `MEASURE`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `table_name` and `database_name` separated by a colon (`:`). +* `arn` - The ARN that uniquely identifies this table. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Timestream tables using the `table_name` and `database_name` separate by a colon (`:`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Timestream tables using the `table_name` and `database_name` separate by a colon (`:`). For example: + +```console +% terraform import aws_timestreamwrite_table.example ExampleTable:ExampleDatabase +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transcribe_language_model.html.markdown b/website/docs/cdktf/python/r/transcribe_language_model.html.markdown new file mode 100644 index 00000000000..cc45c3ea076 --- /dev/null +++ b/website/docs/cdktf/python/r/transcribe_language_model.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "Transcribe" +layout: "aws" +page_title: "AWS: aws_transcribe_language_model" +description: |- + Terraform resource for managing an AWS Transcribe LanguageModel. +--- + + + +# Resource: aws_transcribe_language_model + +Terraform resource for managing an AWS Transcribe LanguageModel. + +-> This resource can take a significant amount of time to provision. See Language Model [FAQ](https://aws.amazon.com/transcribe/faqs/) for more details. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_object import S3Object +from imports.aws.transcribe_language_model import TranscribeLanguageModel +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example-transcribe", + force_destroy=True + ) + S3Object(self, "object", + bucket=example.id, + key="transcribe/test1.txt", + source="test1.txt" + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_2", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["transcribe.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_iam_role_example = IamRole(self, "example_3", + assume_role_policy=Token.as_string(data_aws_iam_policy_document_example.json), + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + IamRolePolicy(self, "test_policy", + name="example", + policy=Token.as_string( + Fn.jsonencode({ + "Statement": [{ + "Action": ["s3:GetObject", "s3:ListBucket"], + "Effect": "Allow", + "Resource": ["*"] + } + ], + "Version": "2012-10-17" + })), + role=Token.as_string(aws_iam_role_example.id) + ) + aws_transcribe_language_model_example = TranscribeLanguageModel(self, "example_5", + base_model_name="NarrowBand", + input_data_config=TranscribeLanguageModelInputDataConfig( + data_access_role_arn=Token.as_string(aws_iam_role_example.arn), + s3_uri="s3://${" + example.id + "}/transcribe/" + ), + language_code="en-US", + model_name="example", + tags={ + "ENVIRONMENT": "development" + } + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_transcribe_language_model_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `base_model_name` - (Required) Name of reference base model. +* `input_data_config` - (Required) The input data config for the LanguageModel. See [Input Data Config](#input-data-config) for more details. +* `language_code` - (Required) The language code you selected for your language model. Refer to the [supported languages](https://docs.aws.amazon.com/transcribe/latest/dg/supported-languages.html) page for accepted codes. +* `model_name` - (Required) The model name. + +### Input Data Config + +* `data_access_role_arn` - (Required) IAM role with access to S3 bucket. +* `s3_uri` - (Required) S3 URI where training data is located. +* `tuning_data_s3_uri` - (Optional) S3 URI where tuning data is located. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the LanguageModel. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - LanguageModel name. +* `arn` - ARN of the LanguageModel. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `600m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transcribe LanguageModel using the `model_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transcribe LanguageModel using the `model_name`. For example: + +```console +% terraform import aws_transcribe_language_model.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transcribe_medical_vocabulary.html.markdown b/website/docs/cdktf/python/r/transcribe_medical_vocabulary.html.markdown new file mode 100644 index 00000000000..48ad45e9d6e --- /dev/null +++ b/website/docs/cdktf/python/r/transcribe_medical_vocabulary.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "Transcribe" +layout: "aws" +page_title: "AWS: aws_transcribe_medical_vocabulary" +description: |- + Terraform resource for managing an AWS Transcribe MedicalVocabulary. +--- + + + +# Resource: aws_transcribe_medical_vocabulary + +Terraform resource for managing an AWS Transcribe MedicalVocabulary. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_object import S3Object +from imports.aws.transcribe_medical_vocabulary import TranscribeMedicalVocabulary +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example-medical-vocab-123", + force_destroy=True + ) + object = S3Object(self, "object", + bucket=example.id, + key="transcribe/test1.txt", + source="test.txt" + ) + aws_transcribe_medical_vocabulary_example = + TranscribeMedicalVocabulary(self, "example_2", + depends_on=[object], + language_code="en-US", + tags={ + "tag1": "value1", + "tag2": "value3" + }, + vocabulary_file_uri="s3://${" + example.id + "}/${" + object.key + "}", + vocabulary_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_transcribe_medical_vocabulary_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `language_code` - (Required) The language code you selected for your medical vocabulary. US English (en-US) is the only language supported with Amazon Transcribe Medical. +* `vocabulary_file_uri` - (Required) The Amazon S3 location (URI) of the text file that contains your custom medical vocabulary. +* `vocabulary_name` - (Required) The name of the Medical Vocabulary. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the MedicalVocabulary. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the MedicalVocabulary. +* `arn` - ARN of the MedicalVocabulary. +* `download_uri` - Generated download URI. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transcribe MedicalVocabulary using the `vocabulary_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transcribe MedicalVocabulary using the `vocabulary_name`. For example: + +```console +% terraform import aws_transcribe_medical_vocabulary.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transcribe_vocabulary.html.markdown b/website/docs/cdktf/python/r/transcribe_vocabulary.html.markdown new file mode 100644 index 00000000000..fdd5486c379 --- /dev/null +++ b/website/docs/cdktf/python/r/transcribe_vocabulary.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Transcribe" +layout: "aws" +page_title: "AWS: aws_transcribe_vocabulary" +description: |- + Terraform resource for managing an AWS Transcribe Vocabulary. +--- + + + +# Resource: aws_transcribe_vocabulary + +Terraform resource for managing an AWS Transcribe Vocabulary. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.s3_bucket import S3Bucket +from imports.aws.s3_object import S3Object +from imports.aws.transcribe_vocabulary import TranscribeVocabulary +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = S3Bucket(self, "example", + bucket="example-vocab-123", + force_destroy=True + ) + object = S3Object(self, "object", + bucket=example.id, + key="transcribe/test1.txt", + source="test.txt" + ) + aws_transcribe_vocabulary_example = TranscribeVocabulary(self, "example_2", + depends_on=[object], + language_code="en-US", + tags={ + "tag1": "value1", + "tag2": "value3" + }, + vocabulary_file_uri="s3://${" + example.id + "}/${" + object.key + "}", + vocabulary_name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_transcribe_vocabulary_example.override_logical_id("example") +``` + +## Argument Reference + +The following arguments are required: + +* `language_code` - (Required) The language code you selected for your vocabulary. +* `vocabulary_file_uri` - (Required) The Amazon S3 location (URI) of the text file that contains your custom vocabulary. +* `vocabulary_name` - (Required) The name of the Vocabulary. + +The following arguments are optional: + +* `phrases` - (Optional) - A list of terms to include in the vocabulary. Conflicts with `vocabulary_file_uri` +* `vocabulary_file_uri` - (Optional) The Amazon S3 location (URI) of the text file that contains your custom vocabulary. Conflicts wth `phrases`. +* `tags` - (Optional) A map of tags to assign to the Vocabulary. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the Vocabulary. +* `arn` - ARN of the Vocabulary. +* `download_uri` - Generated download URI. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30m`) +* `update` - (Default `30m`) +* `delete` - (Default `30m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transcribe Vocabulary using the `vocabulary_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transcribe Vocabulary using the `vocabulary_name`. For example: + +```console +% terraform import aws_transcribe_vocabulary.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transcribe_vocabulary_filter.html.markdown b/website/docs/cdktf/python/r/transcribe_vocabulary_filter.html.markdown new file mode 100644 index 00000000000..d85fbf30d09 --- /dev/null +++ b/website/docs/cdktf/python/r/transcribe_vocabulary_filter.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Transcribe" +layout: "aws" +page_title: "AWS: aws_transcribe_vocabulary_filter" +description: |- + Terraform resource for managing an AWS Transcribe VocabularyFilter. +--- + + + +# Resource: aws_transcribe_vocabulary_filter + +Terraform resource for managing an AWS Transcribe VocabularyFilter. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transcribe_vocabulary_filter import TranscribeVocabularyFilter +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TranscribeVocabularyFilter(self, "example", + language_code="en-US", + tags={ + "tag1": "value1", + "tag2": "value3" + }, + vocabulary_filter_name="example", + words=["cars", "bucket"] + ) +``` + +## Argument Reference + +The following arguments are required: + +* `language_code` - (Required) The language code you selected for your vocabulary filter. Refer to the [supported languages](https://docs.aws.amazon.com/transcribe/latest/dg/supported-languages.html) page for accepted codes. +* `vocabulary_filter_name` - (Required) The name of the VocabularyFilter. + +The following arguments are optional: + +* `vocabulary_filter_file_uri` - (Optional) The Amazon S3 location (URI) of the text file that contains your custom VocabularyFilter. Conflicts with `words` argument. +* `tags` - (Optional) A map of tags to assign to the VocabularyFilter. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `words` - (Optional) - A list of terms to include in the vocabulary. Conflicts with `vocabulary_filter_file_uri` argument. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - VocabularyFilter name. +* `arn` - ARN of the VocabularyFilter. +* `download_uri` - Generated download URI. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transcribe VocabularyFilter using the `vocabulary_filter_name`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transcribe VocabularyFilter using the `vocabulary_filter_name`. For example: + +```console +% terraform import aws_transcribe_vocabulary_filter.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transfer_access.html.markdown b/website/docs/cdktf/python/r/transfer_access.html.markdown new file mode 100644 index 00000000000..39622bc4ef4 --- /dev/null +++ b/website/docs/cdktf/python/r/transfer_access.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_access" +description: |- + Provides a AWS Transfer Access resource. +--- + + + +# Resource: aws_transfer_access + +Provides a AWS Transfer Access resource. + +## Example Usage + +### Basic S3 + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_access import TransferAccess +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferAccess(self, "example", + external_id="S-1-1-12-1234567890-123456789-1234567890-1234", + home_directory="/${" + aws_s3_bucket_example.id + "}/", + role=Token.as_string(aws_iam_role_example.arn), + server_id=Token.as_string(aws_transfer_server_example.id) + ) +``` + +### Basic EFS + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_access import TransferAccess +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferAccess(self, "test", + external_id="S-1-1-12-1234567890-123456789-1234567890-1234", + home_directory="/${" + aws_efs_file_system_test.id + "}/", + posix_profile=TransferAccessPosixProfile( + gid=1000, + uid=1000 + ), + role=Token.as_string(aws_iam_role_test.arn), + server_id=Token.as_string(aws_transfer_server_test.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `external_id` - (Required) The SID of a group in the directory connected to the Transfer Server (e.g., `S-1-1-12-1234567890-123456789-1234567890-1234`) +* `server_id` - (Required) The Server ID of the Transfer Server (e.g., `s-12345678`) +* `home_directory` - (Optional) The landing directory (folder) for a user when they log in to the server using their SFTP client. It should begin with a `/`. The first item in the path is the name of the home bucket (accessible as `${Transfer:HomeBucket}` in the policy) and the rest is the home directory (accessible as `${Transfer:HomeDirectory}` in the policy). For example, `/example-bucket-1234/username` would set the home bucket to `example-bucket-1234` and the home directory to `username`. +* `home_directory_mappings` - (Optional) Logical directory mappings that specify what S3 paths and keys should be visible to your user and how you want to make them visible. See [Home Directory Mappings](#home-directory-mappings) below. +* `home_directory_type` - (Optional) The type of landing directory (folder) you mapped for your users' home directory. Valid values are `PATH` and `LOGICAL`. +* `policy` - (Optional) An IAM JSON policy document that scopes down user access to portions of their Amazon S3 bucket. IAM variables you can use inside this policy include `${Transfer:UserName}`, `${Transfer:HomeDirectory}`, and `${Transfer:HomeBucket}`. Since the IAM variable syntax matches Terraform's interpolation syntax, they must be escaped inside Terraform configuration strings (`$${Transfer:UserName}`). These are evaluated on-the-fly when navigating the bucket. +* `posix_profile` - (Optional) Specifies the full POSIX identity, including user ID (Uid), group ID (Gid), and any secondary groups IDs (SecondaryGids), that controls your users' access to your Amazon EFS file systems. See [Posix Profile](#posix-profile) below. +* `role` - (Required) Amazon Resource Name (ARN) of an IAM role that allows the service to controls your user’s access to your Amazon S3 bucket. + +### Home Directory Mappings + +* `entry` - (Required) Represents an entry and a target. +* `target` - (Required) Represents the map target. + +### Posix Profile + +* `gid` - (Required) The POSIX group ID used for all EFS operations by this user. +* `uid` - (Required) The POSIX user ID used for all EFS operations by this user. +* `secondary_gids` - (Optional) The secondary POSIX group IDs used for all EFS operations by this user. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the resource + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer Accesses using the `server_id` and `external_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transfer Accesses using the `server_id` and `external_id`. For example: + +```console +% terraform import aws_transfer_access.example s-12345678/S-1-1-12-1234567890-123456789-1234567890-1234 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transfer_agreement.html.markdown b/website/docs/cdktf/python/r/transfer_agreement.html.markdown new file mode 100644 index 00000000000..c0140a07c25 --- /dev/null +++ b/website/docs/cdktf/python/r/transfer_agreement.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_agreement" +description: |- + Provides a AWS Transfer AS2 Agreement Resource +--- + + + +# Resource: aws_transfer_agreement + +Provides a AWS Transfer AS2 Agreement resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_agreement import TransferAgreement +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferAgreement(self, "example", + access_role=test.arn, + base_directory="/DOC-EXAMPLE-BUCKET/home/mydirectory", + description="example", + local_profile_id=local.profile_id, + partner_profile_id=partner.profile_id, + server_id=Token.as_string(aws_transfer_server_test.id) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `access_role` - (Required) The IAM Role which provides read and write access to the parent directory of the file location mentioned in the StartFileTransfer request. +* `base_directory` - (Required) The landing directory for the files transferred by using the AS2 protocol. +* `description` - (Optional) The Optional description of the transdfer. +* `local_profile_id` - (Required) The unique identifier for the AS2 local profile. +* `partner_profile_id` - (Required) The unique identifier for the AS2 partner profile. +* `server_id` - (Required) The unique server identifier for the server instance. This is the specific server the agreement uses. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `agreement_id` - The unique identifier for the AS2 agreement +* `staus` - The staus of the agreement which is either ACTIVE or INACTIVE. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer AS2 Agreement using the `server_id/agreement_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transfer AS2 Agreement using the `server_id/agreement_id`. For example: + +```console +% terraform import aws_transfer_agreement.example s-4221a88afd5f4362a/a-4221a88afd5f4362a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transfer_certificate.html.markdown b/website/docs/cdktf/python/r/transfer_certificate.html.markdown new file mode 100644 index 00000000000..a23431fef1d --- /dev/null +++ b/website/docs/cdktf/python/r/transfer_certificate.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_certificate" +description: |- + Provides a AWS Transfer AS2 Certificate Resource +--- + + + +# Resource: aws_transfer_certificate + +Provides a AWS Transfer AS2 Certificate resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_certificate import TransferCertificate +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferCertificate(self, "example", + certificate=Token.as_string( + Fn.file("${path.module}/example.com/example.crt")), + certificate_chain=Token.as_string( + Fn.file("${path.module}/example.com/ca.crt")), + description="example", + private_key=Token.as_string( + Fn.file("${path.module}/example.com/example.key")), + usage="SIGNING" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate` - (Required) The valid certificate file required for the transfer. +* `certificate_chain` - (Optional) The optional list of certificate that make up the chain for the certificate that is being imported. +* `description` - (Optional) A short description that helps identify the certificate. +* `private_key` - (Optional) The private key associated with the certificate being imported. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `usage` - (Required) Specifies if a certificate is being used for signing or encryption. The valid values are SIGNING and ENCRYPTION. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `certificate_id` - The unique identifier for the AS2 certificate +* `active_date` - An date when the certificate becomes active +* `inactive_date` - An date when the certificate becomes inactive + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer AS2 Certificate using the `certificate_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transfer AS2 Certificate using the `certificate_id`. For example: + +```console +% terraform import aws_transfer_certificate.example c-4221a88afd5f4362a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transfer_connector.html.markdown b/website/docs/cdktf/python/r/transfer_connector.html.markdown new file mode 100644 index 00000000000..5ab6d3a7bc4 --- /dev/null +++ b/website/docs/cdktf/python/r/transfer_connector.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_connector" +description: |- + Provides a AWS Transfer AS2 Connector Resource +--- + + + +# Resource: aws_transfer_connector + +Provides a AWS Transfer AS2 Connector resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_connector import TransferConnector +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferConnector(self, "example", + access_role=test.arn, + as2_config=TransferConnectorAs2Config( + compression="DISABLED", + encryption_algorithm="AWS128_CBC", + local_profile_id=local.profile_id, + mdn_response="NONE", + mdn_signing_algorithm="NONE", + message_subject="For Connector", + partner_profile_id=partner.profile_id, + signing_algorithm="NONE" + ), + url="http://www.test.com" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `access_role` - (Required) The IAM Role which provides read and write access to the parent directory of the file location mentioned in the StartFileTransfer request. +* `as2_config` - (Required) The parameters to configure for the connector object. Fields documented below. +* `logging_role` - (Optional) The IAM Role which is required for allowing the connector to turn on CloudWatch logging for Amazon S3 events. +* `url` - (Required) The URL of the partners AS2 endpoint. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### As2Config Details + +* `compression` - (Required) Specifies weather AS2 file is compressed. The valud values are ZLIB and DISABLED. +* `encryption_algorithm` - (Required) The algorithm that is used to encrypt the file. The valid values are AES128_CBC | AES192_CBC | AES256_CBC | NONE. +* `local_profile_id` - (Required) The unique identifier for the AS2 local profile. +* `mdn_response` - (Required) Used for outbound requests to determine if a partner response for transfers is synchronous or asynchronous. The valid values are SYNC and NONE. +* `mdn_signing_algorithm` - (Optional) The signing algorithm for the Mdn response. The valid values are SHA256 | SHA384 | SHA512 | SHA1 | NONE | DEFAULT. +* `message_subject` - (Optional) Used as the subject HTTP header attribute in AS2 messages that are being sent with the connector. +* `partner_profile_id` - (Required) The unique identifier for the AS2 partner profile. +* `signing_algorithm` - (Required) The algorithm that is used to sign AS2 messages sent with the connector. The valid values are SHA256 | SHA384 | SHA512 | SHA1 | NONE . + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `connector_id` - The unique identifier for the AS2 profile + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer AS2 Connector using the `connector_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transfer AS2 Connector using the `connector_id`. For example: + +```console +% terraform import aws_transfer_connector.example c-4221a88afd5f4362a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transfer_profile.html.markdown b/website/docs/cdktf/python/r/transfer_profile.html.markdown new file mode 100644 index 00000000000..7720ca04c0f --- /dev/null +++ b/website/docs/cdktf/python/r/transfer_profile.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_profile" +description: |- + Provides a AWS Transfer AS2 Profile Resource +--- + + + +# Resource: aws_transfer_profile + +Provides a AWS Transfer AS2 Profile resource. + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_profile import TransferProfile +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, profileType): + super().__init__(scope, name) + TransferProfile(self, "example", + as2_id="example", + certificate_ids=[ + Token.as_string(aws_transfer_certificate_example.certificate_id) + ], + usage="LOCAL", + profile_type=profile_type + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `as2_id` - (Required) The As2Id is the AS2 name as defined in the RFC 4130. For inbound ttransfers this is the AS2 From Header for the AS2 messages sent from the partner. For Outbound messages this is the AS2 To Header for the AS2 messages sent to the partner. his ID cannot include spaces. +* `certificate_ids` - (Optional) The list of certificate Ids from the imported certificate operation. +* `profile_type` - (Required) The profile type should be LOCAL or PARTNER. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `profile_id` - The unique identifier for the AS2 profile + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer AS2 Profile using the `profile_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transfer AS2 Profile using the `profile_id`. For example: + +```console +% terraform import aws_transfer_profile.example p-4221a88afd5f4362a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transfer_server.html.markdown b/website/docs/cdktf/python/r/transfer_server.html.markdown new file mode 100644 index 00000000000..50ef4951df2 --- /dev/null +++ b/website/docs/cdktf/python/r/transfer_server.html.markdown @@ -0,0 +1,285 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_server" +description: |- + Provides a AWS Transfer Server resource. +--- + + + +# Resource: aws_transfer_server + +Provides a AWS Transfer Server resource. + +~> **NOTE on AWS IAM permissions:** If the `endpoint_type` is set to `VPC`, the `ec2:DescribeVpcEndpoints` and `ec2:ModifyVpcEndpoint` [actions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonec2.html#amazonec2-actions-as-permissions) are used. + +~> **NOTE:** Use the [`aws_transfer_tag`](transfer_tag.html) resource to manage the system tags used for [custom hostnames](https://docs.aws.amazon.com/transfer/latest/userguide/requirements-dns.html#tag-custom-hostname-cdk). + +## Example Usage + +### Basic + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_server import TransferServer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferServer(self, "example", + tags={ + "Name": "Example" + } + ) +``` + +### Security Policy Name + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_server import TransferServer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferServer(self, "example", + security_policy_name="TransferSecurityPolicy-2020-06" + ) +``` + +### VPC Endpoint + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_server import TransferServer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferServer(self, "example", + endpoint_details=TransferServerEndpointDetails( + address_allocation_ids=[Token.as_string(aws_eip_example.id)], + subnet_ids=[Token.as_string(aws_subnet_example.id)], + vpc_id=Token.as_string(aws_vpc_example.id) + ), + endpoint_type="VPC" + ) +``` + +### AWS Directory authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_server import TransferServer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferServer(self, "example", + directory_id=Token.as_string(aws_directory_service_directory_example.id), + identity_provider_type="AWS_DIRECTORY_SERVICE" + ) +``` + +### AWS Lambda authentication + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_server import TransferServer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferServer(self, "example", + function=Token.as_string(aws_lambda_identity_provider_example.arn), + identity_provider_type="AWS_LAMBDA" + ) +``` + +### Protocols + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_server import TransferServer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferServer(self, "example", + certificate=Token.as_string(aws_acm_certificate_example.arn), + endpoint_details=TransferServerEndpointDetails( + subnet_ids=[Token.as_string(aws_subnet_example.id)], + vpc_id=Token.as_string(aws_vpc_example.id) + ), + endpoint_type="VPC", + identity_provider_type="API_GATEWAY", + protocols=["FTP", "FTPS"], + url="${" + aws_api_gateway_deployment_example.invoke_url + "${" + aws_api_gateway_resource_example.path + "}" + ) +``` + +### Using Structured Logging Destinations + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.transfer_server import TransferServer +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + transfer = CloudwatchLogGroup(self, "transfer", + name_prefix="transfer_test_" + ) + transfer_assume_role = DataAwsIamPolicyDocument(self, "transfer_assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["transfer.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + iam_for_transfer = IamRole(self, "iam_for_transfer", + assume_role_policy=Token.as_string(transfer_assume_role.json), + managed_policy_arns=["arn:aws:iam::aws:policy/service-role/AWSTransferLoggingAccess" + ], + name_prefix="iam_for_transfer_" + ) + aws_transfer_server_transfer = TransferServer(self, "transfer_3", + endpoint_type="PUBLIC", + logging_role=iam_for_transfer.arn, + protocols=["SFTP"], + structured_log_destinations=["${" + transfer.arn + "}:*"] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_transfer_server_transfer.override_logical_id("transfer") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate` - (Optional) The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. This is required when `protocols` is set to `FTPS` +* `domain` - (Optional) The domain of the storage system that is used for file transfers. Valid values are: `S3` and `EFS`. The default value is `S3`. +* `protocols` - (Optional) Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server's endpoint. This defaults to `SFTP` . The available protocols are: + * `AS2`: File transfer over Applicability Statement 2 + * `SFTP`: File transfer over SSH + * `FTPS`: File transfer with TLS encryption + * `FTP`: Unencrypted file transfer +* `endpoint_details` - (Optional) The virtual private cloud (VPC) endpoint settings that you want to configure for your SFTP server. Fields documented below. +* `endpoint_type` - (Optional) The type of endpoint that you want your SFTP server connect to. If you connect to a `VPC` (or `VPC_ENDPOINT`), your SFTP server isn't accessible over the public internet. If you want to connect your SFTP server via public internet, set `PUBLIC`. Defaults to `PUBLIC`. +* `invocation_role` - (Optional) Amazon Resource Name (ARN) of the IAM role used to authenticate the user account with an `identity_provider_type` of `API_GATEWAY`. +* `host_key` - (Optional) RSA, ECDSA, or ED25519 private key (e.g., as generated by the `ssh-keygen -t rsa -b 2048 -N "" -m PEM -f my-new-server-key`, `ssh-keygen -t ecdsa -b 256 -N "" -m PEM -f my-new-server-key` or `ssh-keygen -t ed25519 -N "" -f my-new-server-key` commands). +* `url` - (Optional) - URL of the service endpoint used to authenticate users with an `identity_provider_type` of `API_GATEWAY`. +* `identity_provider_type` - (Optional) The mode of authentication enabled for this service. The default value is `SERVICE_MANAGED`, which allows you to store and access SFTP user credentials within the service. `API_GATEWAY` indicates that user authentication requires a call to an API Gateway endpoint URL provided by you to integrate an identity provider of your choice. Using `AWS_DIRECTORY_SERVICE` will allow for authentication against AWS Managed Active Directory or Microsoft Active Directory in your on-premises environment, or in AWS using AD Connectors. Use the `AWS_LAMBDA` value to directly use a Lambda function as your identity provider. If you choose this value, you must specify the ARN for the lambda function in the `function` argument. +* `directory_id` - (Optional) The directory service ID of the directory service you want to connect to with an `identity_provider_type` of `AWS_DIRECTORY_SERVICE`. +* `function` - (Optional) The ARN for a lambda function to use for the Identity provider. +* `logging_role` - (Optional) Amazon Resource Name (ARN) of an IAM role that allows the service to write your SFTP users’ activity to your Amazon CloudWatch logs for monitoring and auditing purposes. +* `force_destroy` - (Optional) A boolean that indicates all users associated with the server should be deleted so that the Server can be destroyed without error. The default value is `false`. This option only applies to servers configured with a `SERVICE_MANAGED` `identity_provider_type`. +* `post_authentication_login_banner`- (Optional) Specify a string to display when users connect to a server. This string is displayed after the user authenticates. The SFTP protocol does not support post-authentication display banners. +* `pre_authentication_login_banner`- (Optional) Specify a string to display when users connect to a server. This string is displayed before the user authenticates. +* `protocol_details`- (Optional) The protocol settings that are configured for your server. +* `security_policy_name` - (Optional) Specifies the name of the security policy that is attached to the server. Possible values are `TransferSecurityPolicy-2018-11`, `TransferSecurityPolicy-2020-06`, `TransferSecurityPolicy-FIPS-2020-06`, `TransferSecurityPolicy-2022-03` and `TransferSecurityPolicy-2023-05`. Default value is: `TransferSecurityPolicy-2018-11`. +* `structured_logging_destinations` - (Optional) A set of ARNs of destinations that will receive structured logs from the transfer server such as CloudWatch Log Group ARNs. If provided this enables the transfer server to emit structured logs to the specified locations. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `workflow_details` - (Optional) Specifies the workflow details. See Workflow Details below. + +### Endpoint Details + +* `address_allocation_ids` - (Optional) A list of address allocation IDs that are required to attach an Elastic IP address to your SFTP server's endpoint. This property can only be used when `endpoint_type` is set to `VPC`. +* `security_group_ids` - (Optional) A list of security groups IDs that are available to attach to your server's endpoint. If no security groups are specified, the VPC's default security groups are automatically assigned to your endpoint. This property can only be used when `endpoint_type` is set to `VPC`. +* `subnet_ids` - (Optional) A list of subnet IDs that are required to host your SFTP server endpoint in your VPC. This property can only be used when `endpoint_type` is set to `VPC`. +* `vpc_endpoint_id` - (Optional) The ID of the VPC endpoint. This property can only be used when `endpoint_type` is set to `VPC_ENDPOINT` +* `vpc_id` - (Optional) The VPC ID of the virtual private cloud in which the SFTP server's endpoint will be hosted. This property can only be used when `endpoint_type` is set to `VPC`. + +### Protocol Details + +* `as2_transports` - (Optional) Indicates the transport method for the AS2 messages. Currently, only `HTTP` is supported. +* `passive_ip` - (Optional) Indicates passive mode, for FTP and FTPS protocols. Enter a single IPv4 address, such as the public IP address of a firewall, router, or load balancer. +* `set_stat_option` - (Optional) Use to ignore the error that is generated when the client attempts to use `SETSTAT` on a file you are uploading to an S3 bucket. Valid values: `DEFAULT`, `ENABLE_NO_OP`. +* `tls_session_resumption_mode` - (Optional) A property used with Transfer Family servers that use the FTPS protocol. Provides a mechanism to resume or share a negotiated secret key between the control and data connection for an FTPS session. Valid values: `DISABLED`, `ENABLED`, `ENFORCED`. + +### Workflow Details + +* `on_upload` - (Optional) A trigger that starts a workflow: the workflow begins to execute after a file is uploaded. See Workflow Detail below. +* `on_partial_upload` - (Optional) A trigger that starts a workflow if a file is only partially uploaded. See Workflow Detail below. + +#### Workflow Detail + +* `execution_role` - (Required) Includes the necessary permissions for S3, EFS, and Lambda operations that Transfer can assume, so that all workflow steps can operate on the required resources. +* `workflow_id` - (Required) A unique identifier for the workflow. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Transfer Server +* `id` - The Server ID of the Transfer Server (e.g., `s-12345678`) +* `endpoint` - The endpoint of the Transfer Server (e.g., `s-12345678.server.transfer.REGION.amazonaws.com`) +* `host_key_fingerprint` - This value contains the message-digest algorithm (MD5) hash of the server's host key. This value is equivalent to the output of the `ssh-keygen -l -E md5 -f my-new-server-key` command. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer Servers using the server `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transfer Servers using the server `id`. For example: + +```console +% terraform import aws_transfer_server.example s-12345678 +``` + +Certain resource arguments, such as `host_key`, cannot be read via the API and imported into Terraform. Terraform will display a difference for these arguments the first run after import if declared in the Terraform configuration for an imported resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transfer_ssh_key.html.markdown b/website/docs/cdktf/python/r/transfer_ssh_key.html.markdown new file mode 100644 index 00000000000..3747c10d1bc --- /dev/null +++ b/website/docs/cdktf/python/r/transfer_ssh_key.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_ssh_key" +description: |- + Provides a AWS Transfer SSH Public Key resource. +--- + + + +# Resource: aws_transfer_ssh_key + +Provides a AWS Transfer User SSH Key resource. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.transfer_server import TransferServer +from imports.aws.transfer_ssh_key import TransferSshKey +from imports.aws.transfer_user import TransferUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = TransferServer(self, "example", + identity_provider_type="SERVICE_MANAGED", + tags={ + "NAME": "tf-acc-test-transfer-server" + } + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["transfer.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_2", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + effect="Allow", + resources=["*"], + sid="AllowFullAccesstoS3" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_iam_role_example = IamRole(self, "example_3", + assume_role_policy=Token.as_string(assume_role.json), + name="tf-test-transfer-user-iam-role" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_example.override_logical_id("example") + aws_iam_role_policy_example = IamRolePolicy(self, "example_4", + name="tf-test-transfer-user-iam-policy", + policy=Token.as_string(data_aws_iam_policy_document_example.json), + role=Token.as_string(aws_iam_role_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_example.override_logical_id("example") + aws_transfer_user_example = TransferUser(self, "example_5", + role=Token.as_string(aws_iam_role_example.arn), + server_id=example.id, + tags={ + "NAME": "tftestuser" + }, + user_name="tftestuser" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_transfer_user_example.override_logical_id("example") + aws_transfer_ssh_key_example = TransferSshKey(self, "example_6", + body="... SSH key ...", + server_id=example.id, + user_name=Token.as_string(aws_transfer_user_example.user_name) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_transfer_ssh_key_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `server_id` - (Requirement) The Server ID of the Transfer Server (e.g., `s-12345678`) +* `user_name` - (Requirement) The name of the user account that is assigned to one or more servers. +* `body` - (Requirement) The public key portion of an SSH key pair. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer SSH Public Key using the `server_id` and `user_name` and `ssh_public_key_id` separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transfer SSH Public Key using the `server_id` and `user_name` and `ssh_public_key_id` separated by `/`. For example: + +```console +% terraform import aws_transfer_ssh_key.bar s-12345678/test-username/key-12345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transfer_tag.html.markdown b/website/docs/cdktf/python/r/transfer_tag.html.markdown new file mode 100644 index 00000000000..99869da1011 --- /dev/null +++ b/website/docs/cdktf/python/r/transfer_tag.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_tag" +description: |- + Manages an individual Transfer Family resource tag +--- + + + +# Resource: aws_transfer_tag + +Manages an individual Transfer Family resource tag. This resource should only be used in cases where Transfer Family resources are created outside Terraform (e.g., Servers without AWS Management Console) or the tag key has the `aws:` prefix. + +~> **NOTE:** This tagging resource should not be combined with the Terraform resource for managing the parent resource. For example, using `aws_transfer_server` and `aws_transfer_tag` to manage tags of the same server will cause a perpetual difference where the `aws_transfer_server` resource will try to remove the tag being added by the `aws_transfer_tag` resource. + +~> **NOTE:** This tagging resource does not use the [provider `ignore_tags` configuration](/docs/providers/aws/index.html#ignore_tags). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_server import TransferServer +from imports.aws.transfer_tag import TransferTag +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = TransferServer(self, "example", + identity_provider_type="SERVICE_MANAGED" + ) + TransferTag(self, "hostname", + key="aws:transfer:customHostname", + resource_arn=example.arn, + value="example.com" + ) + TransferTag(self, "zone_id", + key="aws:transfer:route53HostedZoneId", + resource_arn=example.arn, + value="/hostedzone/MyHostedZoneId" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_arn` - (Required) Amazon Resource Name (ARN) of the Transfer Family resource to tag. +* `key` - (Required) Tag name. +* `value` - (Required) Tag value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Transfer Family resource identifier and key, separated by a comma (`,`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_transfer_tag` using the Transfer Family resource identifier and key, separated by a comma (`,`). For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import `aws_transfer_tag` using the Transfer Family resource identifier and key, separated by a comma (`,`). For example: + +```console +% terraform import aws_transfer_tag.example arn:aws:transfer:us-east-1:123456789012:server/s-1234567890abcdef0,Name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transfer_user.html.markdown b/website/docs/cdktf/python/r/transfer_user.html.markdown new file mode 100644 index 00000000000..250221e41b2 --- /dev/null +++ b/website/docs/cdktf/python/r/transfer_user.html.markdown @@ -0,0 +1,158 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_user" +description: |- + Provides a AWS Transfer User resource. +--- + + + +# Resource: aws_transfer_user + +Provides a AWS Transfer User resource. Managing SSH keys can be accomplished with the [`aws_transfer_ssh_key` resource](/docs/providers/aws/r/transfer_ssh_key.html). + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy import IamRolePolicy +from imports.aws.transfer_server import TransferServer +from imports.aws.transfer_user import TransferUser +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = TransferServer(self, "foo", + identity_provider_type="SERVICE_MANAGED", + tags={ + "NAME": "tf-acc-test-transfer-server" + } + ) + assume_role = DataAwsIamPolicyDocument(self, "assume_role", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["transfer.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + data_aws_iam_policy_document_foo = DataAwsIamPolicyDocument(self, "foo_2", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["s3:*"], + effect="Allow", + resources=["*"], + sid="AllowFullAccesstoS3" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_foo.override_logical_id("foo") + aws_iam_role_foo = IamRole(self, "foo_3", + assume_role_policy=Token.as_string(assume_role.json), + name="tf-test-transfer-user-iam-role" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_foo.override_logical_id("foo") + aws_iam_role_policy_foo = IamRolePolicy(self, "foo_4", + name="tf-test-transfer-user-iam-policy", + policy=Token.as_string(data_aws_iam_policy_document_foo.json), + role=Token.as_string(aws_iam_role_foo.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_iam_role_policy_foo.override_logical_id("foo") + aws_transfer_user_foo = TransferUser(self, "foo_5", + home_directory_mappings=[TransferUserHomeDirectoryMappings( + entry="/test.pdf", + target="/bucket3/test-path/tftestuser.pdf" + ) + ], + home_directory_type="LOGICAL", + role=Token.as_string(aws_iam_role_foo.arn), + server_id=foo.id, + user_name="tftestuser" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_transfer_user_foo.override_logical_id("foo") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `server_id` - (Required) The Server ID of the Transfer Server (e.g., `s-12345678`) +* `user_name` - (Required) The name used for log in to your SFTP server. +* `home_directory` - (Optional) The landing directory (folder) for a user when they log in to the server using their SFTP client. It should begin with a `/`. The first item in the path is the name of the home bucket (accessible as `${Transfer:HomeBucket}` in the policy) and the rest is the home directory (accessible as `${Transfer:HomeDirectory}` in the policy). For example, `/example-bucket-1234/username` would set the home bucket to `example-bucket-1234` and the home directory to `username`. +* `home_directory_mappings` - (Optional) Logical directory mappings that specify what S3 paths and keys should be visible to your user and how you want to make them visible. See [Home Directory Mappings](#home-directory-mappings) below. +* `home_directory_type` - (Optional) The type of landing directory (folder) you mapped for your users' home directory. Valid values are `PATH` and `LOGICAL`. +* `policy` - (Optional) An IAM JSON policy document that scopes down user access to portions of their Amazon S3 bucket. IAM variables you can use inside this policy include `${Transfer:UserName}`, `${Transfer:HomeDirectory}`, and `${Transfer:HomeBucket}`. Since the IAM variable syntax matches Terraform's interpolation syntax, they must be escaped inside Terraform configuration strings (`$${Transfer:UserName}`). These are evaluated on-the-fly when navigating the bucket. +* `posix_profile` - (Optional) Specifies the full POSIX identity, including user ID (Uid), group ID (Gid), and any secondary groups IDs (SecondaryGids), that controls your users' access to your Amazon EFS file systems. See [Posix Profile](#posix-profile) below. +* `role` - (Required) Amazon Resource Name (ARN) of an IAM role that allows the service to control your user’s access to your Amazon S3 bucket. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Home Directory Mappings + +* `entry` - (Required) Represents an entry and a target. +* `target` - (Required) Represents the map target. + +The `Restricted` option is achieved using the following mapping: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +### Posix Profile + +* `gid` - (Required) The POSIX group ID used for all EFS operations by this user. +* `uid` - (Required) The POSIX user ID used for all EFS operations by this user. +* `secondary_gids` - (Optional) The secondary POSIX group IDs used for all EFS operations by this user. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Transfer User +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer Users using the `server_id` and `user_name` separated by `/`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transfer Users using the `server_id` and `user_name` separated by `/`. For example: + +```console +% terraform import aws_transfer_user.bar s-12345678/test-username +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/transfer_workflow.html.markdown b/website/docs/cdktf/python/r/transfer_workflow.html.markdown new file mode 100644 index 00000000000..5b62fdbfed7 --- /dev/null +++ b/website/docs/cdktf/python/r/transfer_workflow.html.markdown @@ -0,0 +1,180 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_workflow" +description: |- + Provides a AWS Transfer Workflow resource. +--- + + + +# Resource: aws_transfer_workflow + +Provides a AWS Transfer Workflow resource. + +## Example Usage + +### Basic single step example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_workflow import TransferWorkflow +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferWorkflow(self, "example", + steps=[TransferWorkflowSteps( + delete_step_details=TransferWorkflowStepsDeleteStepDetails( + name="example", + source_file_location=file + ), + type="DELETE" + ) + ] + ) +``` + +### Multistep example + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.transfer_workflow import TransferWorkflow +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + TransferWorkflow(self, "example", + steps=[TransferWorkflowSteps( + custom_step_details=TransferWorkflowStepsCustomStepDetails( + name="example", + source_file_location=file, + target=Token.as_string(aws_lambda_function_example.arn), + timeout_seconds=60 + ), + type="CUSTOM" + ), TransferWorkflowSteps( + tag_step_details=TransferWorkflowStepsTagStepDetails( + name="example", + source_file_location=file, + tags=[TransferWorkflowStepsTagStepDetailsTags( + key="Name", + value="Hello World" + ) + ] + ), + type="TAG" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) A textual description for the workflow. +* `on_exception_steps` - (Optional) Specifies the steps (actions) to take if errors are encountered during execution of the workflow. See Workflow Steps below. +* `steps` - (Required) Specifies the details for the steps that are in the specified workflow. See Workflow Steps below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Workflow Steps + +* `copy_step_details` - (Optional) Details for a step that performs a file copy. See Copy Step Details below. +* `custom_step_details` - (Optional) Details for a step that invokes a lambda function. +* `decrypt_step_details` - (Optional) Details for a step that decrypts the file. +* `delete_step_details` - (Optional) Details for a step that deletes the file. +* `tag_step_details` - (Optional) Details for a step that creates one or more tags. +* `type` - (Required) One of the following step types are supported. `COPY`, `CUSTOM`, `DECRYPT`, `DELETE`, and `TAG`. + +#### Copy Step Details + +* `destination_file_location` - (Optional) Specifies the location for the file being copied. Use ${Transfer:username} in this field to parametrize the destination prefix by username. +* `name` - (Optional) The name of the step, used as an identifier. +* `overwrite_existing` - (Optional) A flag that indicates whether or not to overwrite an existing file of the same name. The default is `FALSE`. Valid values are `TRUE` and `FALSE`. +* `source_file_location` - (Optional) Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file for the workflow. Enter ${previous.file} to use the previous file as the input. In this case, this workflow step uses the output file from the previous workflow step as input. This is the default value. Enter ${original.file} to use the originally-uploaded file location as input for this step. + +#### Custom Step Details + +* `name` - (Optional) The name of the step, used as an identifier. +* `source_file_location` - (Optional) Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file for the workflow. Enter ${previous.file} to use the previous file as the input. In this case, this workflow step uses the output file from the previous workflow step as input. This is the default value. Enter ${original.file} to use the originally-uploaded file location as input for this step. +* `target` - (Optional) The ARN for the lambda function that is being called. +* `timeout_seconds` - (Optional) Timeout, in seconds, for the step. + +#### Decrypt Step Details + +* `destination_file_location` - (Optional) Specifies the location for the file being copied. Use ${Transfer:username} in this field to parametrize the destination prefix by username. +* `name` - (Optional) The name of the step, used as an identifier. +* `overwrite_existing` - (Optional) A flag that indicates whether or not to overwrite an existing file of the same name. The default is `FALSE`. Valid values are `TRUE` and `FALSE`. +* `source_file_location` - (Optional) Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file for the workflow. Enter ${previous.file} to use the previous file as the input. In this case, this workflow step uses the output file from the previous workflow step as input. This is the default value. Enter ${original.file} to use the originally-uploaded file location as input for this step. +* `type` - (Required) The type of encryption used. Currently, this value must be `"PGP"`. + +#### Delete Step Details + +* `name` - (Optional) The name of the step, used as an identifier. +* `source_file_location` - (Optional) Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file for the workflow. Enter ${previous.file} to use the previous file as the input. In this case, this workflow step uses the output file from the previous workflow step as input. This is the default value. Enter ${original.file} to use the originally-uploaded file location as input for this step. + +#### Tag Step Details + +* `name` - (Optional) The name of the step, used as an identifier. +* `source_file_location` - (Optional) Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file for the workflow. Enter ${previous.file} to use the previous file as the input. In this case, this workflow step uses the output file from the previous workflow step as input. This is the default value. Enter ${original.file} to use the originally-uploaded file location as input for this step. +* `tags` - (Optional) Array that contains from 1 to 10 key/value pairs. See S3 Tags below. + +##### Destination File Location + +* `efs_file_location` - (Optional) Specifies the details for the EFS file being copied. +* `s3_file_location` - (Optional) Specifies the details for the S3 file being copied. + +###### EFS File Location + +* `file_system_id` - (Optional) The ID of the file system, assigned by Amazon EFS. +* `path` - (Optional) The pathname for the folder being used by a workflow. + +###### S3 File Location + +* `bucket` - (Optional) Specifies the S3 bucket for the customer input file. +* `key` - (Optional) The name assigned to the file when it was created in S3. You use the object key to retrieve the object. + +##### S3 Tag + +* `key` - (Required) The name assigned to the tag that you create. +* `value` - (Required) The value that corresponds to the key. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Workflow ARN. +* `id` - The Workflow id. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer Workflows using the `worflow_id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Transfer Workflows using the `worflow_id`. For example: + +```console +% terraform import aws_transfer_workflow.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/volume_attachment.html.markdown b/website/docs/cdktf/python/r/volume_attachment.html.markdown new file mode 100644 index 00000000000..768a2f9facb --- /dev/null +++ b/website/docs/cdktf/python/r/volume_attachment.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_volume_attachment" +description: |- + Provides an AWS EBS Volume Attachment +--- + + + +# Resource: aws_volume_attachment + +Provides an AWS EBS Volume Attachment as a top level resource, to attach and +detach volumes from AWS Instances. + +~> **NOTE on EBS block devices:** If you use `ebs_block_device` on an `aws_instance`, Terraform will assume management over the full set of non-root EBS block devices for the instance, and treats additional block devices as drift. For this reason, `ebs_block_device` cannot be mixed with external `aws_ebs_volume` + `aws_volume_attachment` resources for a given instance. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.ebs_volume import EbsVolume +from imports.aws.instance import Instance +from imports.aws.volume_attachment import VolumeAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = EbsVolume(self, "example", + availability_zone="us-west-2a", + size=1 + ) + web = Instance(self, "web", + ami="ami-21f78e11", + availability_zone="us-west-2a", + instance_type="t2.micro", + tags={ + "Name": "HelloWorld" + } + ) + VolumeAttachment(self, "ebs_att", + device_name="/dev/sdh", + instance_id=web.id, + volume_id=example.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `device_name` - (Required) The device name to expose to the instance (for +example, `/dev/sdh` or `xvdh`). See [Device Naming on Linux Instances][1] and [Device Naming on Windows Instances][2] for more information. +* `instance_id` - (Required) ID of the Instance to attach to +* `volume_id` - (Required) ID of the Volume to be attached +* `force_detach` - (Optional, Boolean) Set to `true` if you want to force the +volume to detach. Useful if previous attempts failed, but use this option only +as a last resort, as this can result in **data loss**. See +[Detaching an Amazon EBS Volume from an Instance][3] for more information. +* `skip_destroy` - (Optional, Boolean) Set this to true if you do not wish +to detach the volume from the instance to which it is attached at destroy +time, and instead just remove the attachment from Terraform state. This is +useful when destroying an instance which has volumes created by some other +means attached. +* `stop_instance_before_detaching` - (Optional, Boolean) Set this to true to ensure that the target instance is stopped +before trying to detach the volume. Stops the instance, if it is not already stopped. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `device_name` - The device name exposed to the instance +* `instance_id` - ID of the Instance +* `volume_id` - ID of the Volume + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EBS Volume Attachments using `DEVICE_NAME:VOLUME_ID:INSTANCE_ID`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import EBS Volume Attachments using `DEVICE_NAME:VOLUME_ID:INSTANCE_ID`. For example: + +```console +% terraform import aws_volume_attachment.example /dev/sdh:vol-049df61146c4d7901:i-12345678 +``` + +[1]: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#available-ec2-device-names +[2]: https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html#available-ec2-device-names +[3]: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-detaching-volume.html + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc.html.markdown b/website/docs/cdktf/python/r/vpc.html.markdown index 9f47e71608d..d4c0d3841f1 100644 --- a/website/docs/cdktf/python/r/vpc.html.markdown +++ b/website/docs/cdktf/python/r/vpc.html.markdown @@ -105,7 +105,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidr_block` - (Optional) The IPv4 CIDR block for the VPC. CIDR can be explicitly set or it can be derived from IPAM using `ipv4_netmask_length`. * `instance_tenancy` - (Optional) A tenancy option for instances launched into the VPC. Default is `default`, which ensures that EC2 instances launched in this VPC use the EC2 instance tenancy attribute specified when the EC2 instance is launched. The only other option is `dedicated`, which ensures that EC2 instances launched in this VPC are run on dedicated tenancy instances regardless of the tenancy attribute specified at launch. This has a dedicated per region fee of $2 per hour, plus an hourly per instance usage fee. @@ -121,9 +121,9 @@ The following arguments are supported: * `assign_generated_ipv6_cidr_block` - (Optional) Requests an Amazon-provided IPv6 CIDR block with a /56 prefix length for the VPC. You cannot specify the range of IP addresses, or the size of the CIDR block. Default is `false`. Conflicts with `ipv6_ipam_pool_id` * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of VPC * `id` - The ID of the VPC @@ -144,10 +144,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPCs can be imported using the `vpc id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPCs using the VPC `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc.test_vpc vpc-a01106c2 + +Using `terraform import`, import VPCs using the VPC `id`. For example: + +```console +% terraform import aws_vpc.test_vpc vpc-a01106c2 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_dhcp_options.html.markdown b/website/docs/cdktf/python/r/vpc_dhcp_options.html.markdown index 5ed06604909..3d14db2ad35 100644 --- a/website/docs/cdktf/python/r/vpc_dhcp_options.html.markdown +++ b/website/docs/cdktf/python/r/vpc_dhcp_options.html.markdown @@ -61,7 +61,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `domain_name` - (Optional) the suffix domain name to use by default when resolving non Fully Qualified Domain Names. In other words, this is what ends up being the `search` value in the `/etc/resolv.conf` file. * `domain_name_servers` - (Optional) List of name servers to configure in `/etc/resolv.conf`. If you want to use the default AWS nameservers you should set this to `AmazonProvidedDNS`. @@ -78,9 +78,9 @@ The following arguments are supported: * If you delete a DHCP Options Set, all VPCs using it will be associated to AWS's `default` DHCP Option Set. * In most cases unless you're configuring your own DNS you'll want to set `domain_name_servers` to `AmazonProvidedDNS`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the DHCP Options Set. * `arn` - The ARN of the DHCP Options Set. @@ -92,10 +92,21 @@ official [AWS User Guide](https://docs.aws.amazon.com/AmazonVPC/latest/UserGuide ## Import -VPC DHCP Options can be imported using the `dhcp options id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC DHCP Options using the DHCP Options `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_dhcp_options.my_options dopt-d9070ebb + +Using `terraform import`, import VPC DHCP Options using the DHCP Options `id`. For example: + +```console +% terraform import aws_vpc_dhcp_options.my_options dopt-d9070ebb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_dhcp_options_association.html.markdown b/website/docs/cdktf/python/r/vpc_dhcp_options_association.html.markdown index 657e4b9825f..c1075d7651c 100644 --- a/website/docs/cdktf/python/r/vpc_dhcp_options_association.html.markdown +++ b/website/docs/cdktf/python/r/vpc_dhcp_options_association.html.markdown @@ -34,7 +34,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpc_id` - (Required) The ID of the VPC to which we would like to associate a DHCP Options Set. * `dhcp_options_id` - (Required) The ID of the DHCP Options Set to associate to the VPC. @@ -44,18 +44,29 @@ The following arguments are supported: * You can only associate one DHCP Options Set to a given VPC ID. * Removing the DHCP Options Association automatically sets AWS's `default` DHCP Options Set to the VPC. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the DHCP Options Set Association. ## Import -DHCP associations can be imported by providing the VPC ID associated with the options: +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DHCP associations using the VPC ID associated with the options. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_dhcp_options_association.imported vpc-0f001273ec18911b1 + +Using `terraform import`, import DHCP associations using the VPC ID associated with the options. For example: + +```console +% terraform import aws_vpc_dhcp_options_association.imported vpc-0f001273ec18911b1 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_endpoint.html.markdown b/website/docs/cdktf/python/r/vpc_endpoint.html.markdown index 609006b08da..860254a47d9 100644 --- a/website/docs/cdktf/python/r/vpc_endpoint.html.markdown +++ b/website/docs/cdktf/python/r/vpc_endpoint.html.markdown @@ -167,7 +167,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `service_name` - (Required) The service name. For AWS services the service name is usually in the form `com.amazonaws..` (the SageMaker Notebook service is an exception to this rule, the service name is in the form `aws.sagemaker..notebook`). * `vpc_id` - (Required) The ID of the VPC in which the endpoint will be used. @@ -197,9 +197,9 @@ If no security groups are specified, the VPC's [default security group](https:// - `update` - (Default `10m`) - `delete` - (Default `10m`) -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC endpoint. * `arn` - The Amazon Resource Name (ARN) of the VPC endpoint. @@ -219,10 +219,21 @@ DNS blocks (for `dns_entry`) support the following attributes: ## Import -VPC Endpoints can be imported using the `vpc endpoint id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoints using the VPC endpoint `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_endpoint.endpoint1 vpce-3ecf2a57 + +Using `terraform import`, import VPC Endpoints using the VPC endpoint `id`. For example: + +```console +% terraform import aws_vpc_endpoint.endpoint1 vpce-3ecf2a57 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_endpoint_connection_accepter.html.markdown b/website/docs/cdktf/python/r/vpc_endpoint_connection_accepter.html.markdown index b5589d1b2bc..014cf2b88aa 100644 --- a/website/docs/cdktf/python/r/vpc_endpoint_connection_accepter.html.markdown +++ b/website/docs/cdktf/python/r/vpc_endpoint_connection_accepter.html.markdown @@ -55,24 +55,35 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpc_endpoint_id` - (Required) AWS VPC Endpoint ID. * `vpc_endpoint_service_id` - (Required) AWS VPC Endpoint Service ID. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC Endpoint Connection. * `vpc_endpoint_state` - State of the VPC Endpoint. ## Import -VPC Endpoint Services can be imported using ID of the connection, which is the `VPC Endpoint Service ID` and `VPC Endpoint ID` separated by underscore (`_`). e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint Services using ID of the connection, which is the `VPC Endpoint Service ID` and `VPC Endpoint ID` separated by underscore (`_`).. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_endpoint_connection_accepter.foo vpce-svc-0f97a19d3fa8220bc_vpce-010601a6db371e263 + +Using `terraform import`, import VPC Endpoint Services using ID of the connection, which is the `VPC Endpoint Service ID` and `VPC Endpoint ID` separated by underscore (`_`).. For example: + +```console +% terraform import aws_vpc_endpoint_connection_accepter.foo vpce-svc-0f97a19d3fa8220bc_vpce-010601a6db371e263 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_endpoint_connection_notification.html.markdown b/website/docs/cdktf/python/r/vpc_endpoint_connection_notification.html.markdown index 7b820597591..e0f417d68e2 100644 --- a/website/docs/cdktf/python/r/vpc_endpoint_connection_notification.html.markdown +++ b/website/docs/cdktf/python/r/vpc_endpoint_connection_notification.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpc_endpoint_service_id` - (Optional) The ID of the VPC Endpoint Service to receive notifications for. * `vpc_endpoint_id` - (Optional) The ID of the VPC Endpoint to receive notifications for. @@ -74,9 +74,9 @@ The following arguments are supported: ~> **NOTE:** One of `vpc_endpoint_service_id` or `vpc_endpoint_id` must be specified. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC connection notification. * `state` - The state of the notification. @@ -84,10 +84,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Endpoint connection notifications can be imported using the `VPC endpoint connection notification id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint connection notifications using the VPC endpoint connection notification `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_endpoint_connection_notification.foo vpce-nfn-09e6ed3b4efba2263 + +Using `terraform import`, import VPC Endpoint connection notifications using the VPC endpoint connection notification `id`. For example: + +```console +% terraform import aws_vpc_endpoint_connection_notification.foo vpce-nfn-09e6ed3b4efba2263 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_endpoint_policy.html.markdown b/website/docs/cdktf/python/r/vpc_endpoint_policy.html.markdown index a690d8360e2..2fbeefa9247 100644 --- a/website/docs/cdktf/python/r/vpc_endpoint_policy.html.markdown +++ b/website/docs/cdktf/python/r/vpc_endpoint_policy.html.markdown @@ -66,23 +66,34 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpc_endpoint_id` - (Required) The VPC Endpoint ID. * `policy` - (Optional) A policy to attach to the endpoint that controls access to the service. Defaults to full access. All `Gateway` and some `Interface` endpoints support policies - see the [relevant AWS documentation](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-endpoints-access.html) for more details. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC endpoint. ## Import -VPC Endpoint Policies can be imported using the `id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint Policies using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_endpoint_policy.example vpce-3ecf2a57 + +Using `terraform import`, import VPC Endpoint Policies using the `id`. For example: + +```console +% terraform import aws_vpc_endpoint_policy.example vpce-3ecf2a57 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_endpoint_route_table_association.html.markdown b/website/docs/cdktf/python/r/vpc_endpoint_route_table_association.html.markdown index a6b3734cd2e..b66f0613fbe 100644 --- a/website/docs/cdktf/python/r/vpc_endpoint_route_table_association.html.markdown +++ b/website/docs/cdktf/python/r/vpc_endpoint_route_table_association.html.markdown @@ -34,24 +34,34 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `route_table_id` - (Required) Identifier of the EC2 Route Table to be associated with the VPC Endpoint. * `vpc_endpoint_id` - (Required) Identifier of the VPC Endpoint with which the EC2 Route Table will be associated. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - A hash of the EC2 Route Table and VPC Endpoint identifiers. ## Import -VPC Endpoint Route Table Associations can be imported using `vpc_endpoint_id` together with `route_table_id`, -e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint Route Table Associations using `vpc_endpoint_id` together with `route_table_id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_endpoint_route_table_association.example vpce-aaaaaaaa/rtb-bbbbbbbb + +Using `terraform import`, import VPC Endpoint Route Table Associations using `vpc_endpoint_id` together with `route_table_id`. For example: + +```console +% terraform import aws_vpc_endpoint_route_table_association.example vpce-aaaaaaaa/rtb-bbbbbbbb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_endpoint_security_group_association.html.markdown b/website/docs/cdktf/python/r/vpc_endpoint_security_group_association.html.markdown index 2f5e5c7702c..740b84ac170 100644 --- a/website/docs/cdktf/python/r/vpc_endpoint_security_group_association.html.markdown +++ b/website/docs/cdktf/python/r/vpc_endpoint_security_group_association.html.markdown @@ -42,16 +42,16 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `security_group_id` - (Required) The ID of the security group to be associated with the VPC endpoint. * `vpc_endpoint_id` - (Required) The ID of the VPC endpoint with which the security group will be associated. * `replace_default_association` - (Optional) Whether this association should replace the association with the VPC's default security group that is created when no security groups are specified during VPC endpoint creation. At most 1 association per-VPC endpoint should be configured with `replace_default_association = true`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the association. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_endpoint_service.html.markdown b/website/docs/cdktf/python/r/vpc_endpoint_service.html.markdown index 51bfa796173..66cad5d229a 100644 --- a/website/docs/cdktf/python/r/vpc_endpoint_service.html.markdown +++ b/website/docs/cdktf/python/r/vpc_endpoint_service.html.markdown @@ -63,7 +63,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `acceptance_required` - (Required) Whether or not VPC endpoint connection requests to the service must be accepted by the service owner - `true` or `false`. * `allowed_principals` - (Optional) The ARNs of one or more principals allowed to discover the endpoint service. @@ -73,9 +73,9 @@ The following arguments are supported: * `private_dns_name` - (Optional) The private DNS name for the service. * `supported_ip_address_types` - (Optional) The supported IP address types. The possible values are `ipv4` and `ipv6`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC endpoint service. * `availability_zones` - A set of Availability Zones in which the service is available. @@ -94,10 +94,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Endpoint Services can be imported using the `VPC endpoint service id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint Services using the VPC endpoint service `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_endpoint_service.foo vpce-svc-0f97a19d3fa8220bc + +Using `terraform import`, import VPC Endpoint Services using the VPC endpoint service `id`. For example: + +```console +% terraform import aws_vpc_endpoint_service.foo vpce-svc-0f97a19d3fa8220bc ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_endpoint_service_allowed_principal.html.markdown b/website/docs/cdktf/python/r/vpc_endpoint_service_allowed_principal.html.markdown index 560655f2d41..1732d42ef61 100644 --- a/website/docs/cdktf/python/r/vpc_endpoint_service_allowed_principal.html.markdown +++ b/website/docs/cdktf/python/r/vpc_endpoint_service_allowed_principal.html.markdown @@ -44,15 +44,15 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpc_endpoint_service_id` - (Required) The ID of the VPC endpoint service to allow permission. * `principal_arn` - (Required) The ARN of the principal to allow permissions. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the association. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_endpoint_subnet_association.html.markdown b/website/docs/cdktf/python/r/vpc_endpoint_subnet_association.html.markdown index cadea84ea7d..15b2abc5254 100644 --- a/website/docs/cdktf/python/r/vpc_endpoint_subnet_association.html.markdown +++ b/website/docs/cdktf/python/r/vpc_endpoint_subnet_association.html.markdown @@ -42,14 +42,14 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpc_endpoint_id` - (Required) The ID of the VPC endpoint with which the subnet will be associated. * `subnet_id` - (Required) The ID of the subnet to be associated with the VPC endpoint. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the association. @@ -62,11 +62,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Endpoint Subnet Associations can be imported using `vpc_endpoint_id` together with `subnet_id`, -e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint Subnet Associations using `vpc_endpoint_id` together with `subnet_id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_endpoint_subnet_association.example vpce-aaaaaaaa/subnet-bbbbbbbbbbbbbbbbb + +Using `terraform import`, import VPC Endpoint Subnet Associations using `vpc_endpoint_id` together with `subnet_id`. For example: + +```console +% terraform import aws_vpc_endpoint_subnet_association.example vpce-aaaaaaaa/subnet-bbbbbbbbbbbbbbbbb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_ipam.html.markdown b/website/docs/cdktf/python/r/vpc_ipam.html.markdown index d0690383d58..749fefc766a 100644 --- a/website/docs/cdktf/python/r/vpc_ipam.html.markdown +++ b/website/docs/cdktf/python/r/vpc_ipam.html.markdown @@ -82,7 +82,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional) A description for the IPAM. * `operating_regions` - (Required) Determines which locales can be chosen when you create pools. Locale is the Region where you want to make an IPAM pool available for allocations. You can only create pools with locales that match the operating Regions of the IPAM. You can only create VPCs from a pool whose locale matches the VPC's Region. You specify a region using the [region_name](#operating_regions) parameter. You **must** set your provider block region as an operating_region. @@ -93,9 +93,9 @@ The following arguments are supported: * `region_name` - (Required) The name of the Region you want to add to the IPAM. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of IPAM * `id` - The ID of the IPAM @@ -109,10 +109,21 @@ IP space. The public scope is intended for all internet-routable IP space. ## Import -IPAMs can be imported using the `ipam id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the IPAM `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_ipam.example ipam-0178368ad2146a492 + +Using `terraform import`, import IPAMs using the IPAM `id`. For example: + +```console +% terraform import aws_vpc_ipam.example ipam-0178368ad2146a492 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_ipam_organization_admin_account.html.markdown b/website/docs/cdktf/python/r/vpc_ipam_organization_admin_account.html.markdown index 21817a47bcb..3409e7c932c 100644 --- a/website/docs/cdktf/python/r/vpc_ipam_organization_admin_account.html.markdown +++ b/website/docs/cdktf/python/r/vpc_ipam_organization_admin_account.html.markdown @@ -43,13 +43,13 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `delegated_admin_account_id` - (Required) -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Organizations ARN for the delegate account. * `id` - The Organizations member account ID that you want to enable as the IPAM account. @@ -59,10 +59,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAMs can be imported using the `delegate account id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the delegate account `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_ipam_organization_admin_account.example 12345678901 + +Using `terraform import`, import IPAMs using the delegate account `id`. For example: + +```console +% terraform import aws_vpc_ipam_organization_admin_account.example 12345678901 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_ipam_pool.html.markdown b/website/docs/cdktf/python/r/vpc_ipam_pool.html.markdown index e98bc4d531c..abc5a5daead 100644 --- a/website/docs/cdktf/python/r/vpc_ipam_pool.html.markdown +++ b/website/docs/cdktf/python/r/vpc_ipam_pool.html.markdown @@ -92,7 +92,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `address_family` - (Optional) The IP protocol assigned to this pool. You must choose either IPv4 or IPv6 protocol for a pool. * `allocation_default_netmask_length` - (Optional) A default netmask length for allocations added to this pool. If, for example, the CIDR assigned to this pool is 10.0.0.0/8 and you enter 16 here, new allocations will default to 10.0.0.0/16 (unless you provide a different netmask value when you create the new allocation). @@ -110,9 +110,9 @@ within the CIDR range in the pool. * `source_ipam_pool_id` - (Optional) The ID of the source IPAM pool. Use this argument to create a child pool within an existing pool. * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of IPAM * `id` - The ID of the IPAM @@ -121,10 +121,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAMs can be imported using the `ipam pool id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the IPAM pool `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_ipam_pool.example ipam-pool-0958f95207d978e1e + +Using `terraform import`, import IPAMs using the IPAM pool `id`. For example: + +```console +% terraform import aws_vpc_ipam_pool.example ipam-pool-0958f95207d978e1e ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_ipam_pool_cidr.html.markdown b/website/docs/cdktf/python/r/vpc_ipam_pool_cidr.html.markdown index 9bdb0d81815..b02ab941116 100644 --- a/website/docs/cdktf/python/r/vpc_ipam_pool_cidr.html.markdown +++ b/website/docs/cdktf/python/r/vpc_ipam_pool_cidr.html.markdown @@ -104,7 +104,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidr` - (Optional) The CIDR you want to assign to the pool. Conflicts with `netmask_length`. * `cidr_authorization_context` - (Optional) A signed document that proves that you are authorized to bring the specified IP address range to Amazon using BYOIP. This is not stored in the state file. See [cidr_authorization_context](#cidr_authorization_context) for more information. @@ -116,19 +116,34 @@ The following arguments are supported: * `message` - (Optional) The plain-text authorization message for the prefix and account. * `signature` - (Optional) The signed authorization message for the prefix and account. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the IPAM Pool Cidr concatenated with the IPAM Pool ID. * `ipam_pool_cidr_id` - The unique ID generated by AWS for the pool cidr. Typically this is the resource `id` but this attribute was added to the API calls after the fact and is therefore not used as the terraform resource id. ## Import -IPAMs can be imported using the `_`. Please note we **DO NOT** use the ipam pool cidr id as this was introduced after the resource already existed. An import example: +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the `_`. For example: +**NOTE:** Do not use the IPAM Pool Cidr ID as this was introduced after the resource already existed. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_ipam_pool_cidr.example 172.2.0.0/24_ipam-pool-0e634f5a1517cccdc + +Using `terraform import`, import IPAMs using the `_`. For example: + +**NOTE:** Do not use the IPAM Pool Cidr ID as this was introduced after the resource already existed. + +```console +% terraform import aws_vpc_ipam_pool_cidr.example 172.2.0.0/24_ipam-pool-0e634f5a1517cccdc ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_ipam_pool_cidr_allocation.html.markdown b/website/docs/cdktf/python/r/vpc_ipam_pool_cidr_allocation.html.markdown index e33b503282f..68a4c0627ce 100644 --- a/website/docs/cdktf/python/r/vpc_ipam_pool_cidr_allocation.html.markdown +++ b/website/docs/cdktf/python/r/vpc_ipam_pool_cidr_allocation.html.markdown @@ -111,7 +111,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidr` - (Optional) The CIDR you want to assign to the pool. * `description` - (Optional) The description for the allocation. @@ -119,9 +119,9 @@ The following arguments are supported: * `ipam_pool_id` - (Required) The ID of the pool to which you want to assign a CIDR. * `netmask_length` - (Optional) The netmask length of the CIDR you would like to allocate to the IPAM pool. Valid Values: `0-128`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the allocation. * `resource_id` - The ID of the resource. @@ -130,10 +130,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAM allocations can be imported using the `allocation id` and `pool id`, separated by `_`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAM allocations using the allocation `id` and `pool id`, separated by `_`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_ipam_pool_cidr_allocation.example ipam-pool-alloc-0dc6d196509c049ba8b549ff99f639736_ipam-pool-07cfb559e0921fcbe + +Using `terraform import`, import IPAM allocations using the allocation `id` and `pool id`, separated by `_`. For example: + +```console +% terraform import aws_vpc_ipam_pool_cidr_allocation.example ipam-pool-alloc-0dc6d196509c049ba8b549ff99f639736_ipam-pool-07cfb559e0921fcbe ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_ipam_preview_next_cidr.html.markdown b/website/docs/cdktf/python/r/vpc_ipam_preview_next_cidr.html.markdown index 10ae09302b2..3dc0f7d314a 100644 --- a/website/docs/cdktf/python/r/vpc_ipam_preview_next_cidr.html.markdown +++ b/website/docs/cdktf/python/r/vpc_ipam_preview_next_cidr.html.markdown @@ -64,17 +64,17 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `disallowed_cidrs` - (Optional) Exclude a particular CIDR range from being returned by the pool. * `ipam_pool_id` - (Required) The ID of the pool to which you want to assign a CIDR. * `netmask_length` - (Optional) The netmask length of the CIDR you would like to preview from the IPAM pool. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `cidr` - The previewed CIDR from the pool. * `id` - The ID of the preview. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_ipam_resource_discovery.html.markdown b/website/docs/cdktf/python/r/vpc_ipam_resource_discovery.html.markdown index 5a85e9b8a00..f9b4fb63a7b 100644 --- a/website/docs/cdktf/python/r/vpc_ipam_resource_discovery.html.markdown +++ b/website/docs/cdktf/python/r/vpc_ipam_resource_discovery.html.markdown @@ -44,7 +44,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional) A description for the IPAM Resource Discovery. * `operating_regions` - (Required) Determines which regions the Resource Discovery will enable IPAM features for usage and monitoring. Locale is the Region where you want to make an IPAM pool available for allocations. You can only create pools with locales that match the operating Regions of the IPAM Resource Discovery. You can only create VPCs from a pool whose locale matches the VPC's Region. You specify a region using the [region_name](#operating_regions) parameter. **You must set your provider block region as an operating_region.** @@ -54,9 +54,9 @@ The following arguments are supported: * `region_name` - (Required) The name of the Region you want to add to the IPAM. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of IPAM Resource Discovery * `id` - The ID of the IPAM Resource Discovery @@ -67,10 +67,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAMs can be imported using the `ipam resource discovery id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the IPAM resource discovery `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_ipam_resource_discovery.example ipam-res-disco-0178368ad2146a492 + +Using `terraform import`, import IPAMs using the IPAM resource discovery `id`. For example: + +```console +% terraform import aws_vpc_ipam_resource_discovery.example ipam-res-disco-0178368ad2146a492 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_ipam_resource_discovery_association.html.markdown b/website/docs/cdktf/python/r/vpc_ipam_resource_discovery_association.html.markdown index 40b5572856a..2e7434722ad 100644 --- a/website/docs/cdktf/python/r/vpc_ipam_resource_discovery_association.html.markdown +++ b/website/docs/cdktf/python/r/vpc_ipam_resource_discovery_association.html.markdown @@ -41,15 +41,15 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `ipam_id` - (Required) The ID of the IPAM to associate. * `ipam_resource_discovery_id` - (Required) The ID of the Resource Discovery to associate. * `tags` - (Optional) A map of tags to add to the IPAM resource discovery association resource. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of IPAM Resource Discovery Association. * `id` - The ID of the IPAM Resource Discovery Association. @@ -62,10 +62,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAMs can be imported using the `ipam resource discovery association id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the IPAM resource discovery association `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_ipam_resource_discovery_association.example ipam-res-disco-assoc-0178368ad2146a492 + +Using `terraform import`, import IPAMs using the IPAM resource discovery association `id`. For example: + +```console +% terraform import aws_vpc_ipam_resource_discovery_association.example ipam-res-disco-assoc-0178368ad2146a492 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_ipam_scope.html.markdown b/website/docs/cdktf/python/r/vpc_ipam_scope.html.markdown index b7f1eebf0cc..fe3060d7aa8 100644 --- a/website/docs/cdktf/python/r/vpc_ipam_scope.html.markdown +++ b/website/docs/cdktf/python/r/vpc_ipam_scope.html.markdown @@ -47,15 +47,15 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `ipam_id` - The ID of the IPAM for which you're creating this scope. * `description` - (Optional) A description for the scope you're creating. * `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of the scope. * `id` - The ID of the IPAM Scope. @@ -66,10 +66,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAMs can be imported using the `scope_id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the `scope_id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_ipam_scope.example ipam-scope-0513c69f283d11dfb + +Using `terraform import`, import IPAMs using the `scope_id`. For example: + +```console +% terraform import aws_vpc_ipam_scope.example ipam-scope-0513c69f283d11dfb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_ipv4_cidr_block_association.html.markdown b/website/docs/cdktf/python/r/vpc_ipv4_cidr_block_association.html.markdown index 072452a39dc..56c288c3ab6 100644 --- a/website/docs/cdktf/python/r/vpc_ipv4_cidr_block_association.html.markdown +++ b/website/docs/cdktf/python/r/vpc_ipv4_cidr_block_association.html.markdown @@ -41,16 +41,16 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidr_block` - (Optional) The IPv4 CIDR block for the VPC. CIDR can be explicitly set or it can be derived from IPAM using `ipv4_netmask_length`. * `ipv4_ipam_pool_id` - (Optional) The ID of an IPv4 IPAM pool you want to use for allocating this VPC's CIDR. IPAM is a VPC feature that you can use to automate your IP address management workflows including assigning, tracking, troubleshooting, and auditing IP addresses across AWS Regions and accounts. Using IPAM you can monitor IP address usage throughout your AWS Organization. * `ipv4_netmask_length` - (Optional) The netmask length of the IPv4 CIDR you want to allocate to this VPC. Requires specifying a `ipv4_ipam_pool_id`. * `vpc_id` - (Required) The ID of the VPC to make the association with. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC CIDR association @@ -63,10 +63,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -`aws_vpc_ipv4_cidr_block_association` can be imported by using the VPC CIDR Association ID, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_vpc_ipv4_cidr_block_association` using the VPC CIDR Association ID. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_ipv4_cidr_block_association.example vpc-cidr-assoc-xxxxxxxx + +Using `terraform import`, import `aws_vpc_ipv4_cidr_block_association` using the VPC CIDR Association ID. For example: + +```console +% terraform import aws_vpc_ipv4_cidr_block_association.example vpc-cidr-assoc-xxxxxxxx ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_ipv6_cidr_block_association.html.markdown b/website/docs/cdktf/python/r/vpc_ipv6_cidr_block_association.html.markdown index 36d68b4c96d..c7f0e4af638 100644 --- a/website/docs/cdktf/python/r/vpc_ipv6_cidr_block_association.html.markdown +++ b/website/docs/cdktf/python/r/vpc_ipv6_cidr_block_association.html.markdown @@ -42,7 +42,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `ipv6_cidr_block` - (Optional) The IPv6 CIDR block for the VPC. CIDR can be explicitly set or it can be derived from IPAM using `ipv6_netmask_length`. This parameter is required if `ipv6_netmask_length` is not set and he IPAM pool does not have `allocation_default_netmask` set. * `ipv6_ipam_pool_id` - (Required) The ID of an IPv6 IPAM pool you want to use for allocating this VPC's CIDR. IPAM is a VPC feature that you can use to automate your IP address management workflows including assigning, tracking, troubleshooting, and auditing IP addresses across AWS Regions and accounts. @@ -56,18 +56,29 @@ The following arguments are supported: - `create` - (Default `10m`) - `delete` - (Default `10m`) -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC CIDR association ## Import -`aws_vpc_ipv6_cidr_block_association` can be imported by using the VPC CIDR Association ID, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `aws_vpc_ipv6_cidr_block_association` using the VPC CIDR Association ID. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_ipv6_cidr_block_association.example vpc-cidr-assoc-xxxxxxxx + +Using `terraform import`, import `aws_vpc_ipv6_cidr_block_association` using the VPC CIDR Association ID. For example: + +```console +% terraform import aws_vpc_ipv6_cidr_block_association.example vpc-cidr-assoc-xxxxxxxx ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_network_performance_metric_subscription.html.markdown b/website/docs/cdktf/python/r/vpc_network_performance_metric_subscription.html.markdown index 2806c10e30b..ba620f73e51 100644 --- a/website/docs/cdktf/python/r/vpc_network_performance_metric_subscription.html.markdown +++ b/website/docs/cdktf/python/r/vpc_network_performance_metric_subscription.html.markdown @@ -34,17 +34,17 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `destination` - (Required) The target Region or Availability Zone that the metric subscription is enabled for. For example, `eu-west-1`. * `metric` - (Optional) The metric used for the enabled subscription. Valid values: `aggregate-latency`. Default: `aggregate-latency`. * `source` - (Required) The source Region or Availability Zone that the metric subscription is enabled for. For example, `us-east-1`. * `statistic` - (Optional) The statistic used for the enabled subscription. Valid values: `p50`. Default: `p50`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `period` - The data aggregation time for the subscription. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_peering_connection.html.markdown b/website/docs/cdktf/python/r/vpc_peering_connection.html.markdown index 2b53db53793..a1f6bdc2fed 100644 --- a/website/docs/cdktf/python/r/vpc_peering_connection.html.markdown +++ b/website/docs/cdktf/python/r/vpc_peering_connection.html.markdown @@ -148,7 +148,7 @@ can be done using the [`auto_accept`](vpc_peering_connection.html#auto_accept) a Connection has to be made active manually using other means. See [notes](vpc_peering_connection.html#notes) below for more information. -The following arguments are supported: +This argument supports the following arguments: * `peer_owner_id` - (Optional) The AWS account ID of the owner of the peer VPC. Defaults to the account ID the [AWS provider][1] is currently connected to. @@ -171,9 +171,9 @@ must have support for the DNS hostnames enabled. This can be done using the [`en * `allow_remote_vpc_dns_resolution` - (Optional) Allow a local VPC to resolve public DNS hostnames to private IP addresses when queried from instances in the peer VPC. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC Peering Connection. * `accept_status` - The status of the VPC Peering Connection request. @@ -195,12 +195,23 @@ or accept the connection manually using the AWS Management Console, AWS CLI, thr ## Import -VPC Peering resources can be imported using the `vpc peering id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Peering resources using the VPC peering `id`. For example: -```sh -$ terraform import aws_vpc_peering_connection.test_connection pcx-111aaa111 +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import VPC Peering resources using the VPC peering `id`. For example: + +```console +% terraform import aws_vpc_peering_connection.test_connection pcx-111aaa111 ``` [1]: /docs/providers/aws/index.html - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_peering_connection_accepter.html.markdown b/website/docs/cdktf/python/r/vpc_peering_connection_accepter.html.markdown index b31bf169fb9..7ea7150a1ff 100644 --- a/website/docs/cdktf/python/r/vpc_peering_connection_accepter.html.markdown +++ b/website/docs/cdktf/python/r/vpc_peering_connection_accepter.html.markdown @@ -83,7 +83,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpc_peering_connection_id` - (Required) The VPC Peering Connection ID to manage. * `auto_accept` - (Optional) Whether or not to accept the peering request. Defaults to `false`. @@ -97,9 +97,9 @@ by removing the corresponding `aws_vpc_peering_connection` resource from your co Removing a `aws_vpc_peering_connection_accepter` resource from your configuration will remove it from your statefile and management, **but will not destroy the VPC Peering Connection.** -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC Peering Connection. * `accept_status` - The status of the VPC Peering Connection request. @@ -113,20 +113,31 @@ In addition to all arguments above, the following attributes are exported: (https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the requester VPC. * `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). -#### Accepter and Requester Attributes Reference +#### Accepter and Requester Attribute Reference * `allow_remote_vpc_dns_resolution` - Indicates whether a local VPC can resolve public DNS hostnames to private IP addresses when queried from instances in a peer VPC. ## Import -VPC Peering Connection Accepters can be imported by using the Peering Connection ID, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Peering Connection Accepters using the Peering Connection ID. For example: -```sh -$ terraform import aws_vpc_peering_connection_accepter.example pcx-12345678 +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import VPC Peering Connection Accepters using the Peering Connection ID. For example: + +```console +% terraform import aws_vpc_peering_connection_accepter.example pcx-12345678 ``` -Certain resource arguments, like `auto_accept`, do not have an EC2 API method for reading the information after peering connection creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference, e.g., +Certain resource arguments, like `auto_accept`, do not have an EC2 API method for reading the information after peering connection creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: ```python # Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug @@ -149,4 +160,4 @@ class MyConvertedCode(TerraformStack): ) ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_peering_connection_options.html.markdown b/website/docs/cdktf/python/r/vpc_peering_connection_options.html.markdown index 9b7f60d818d..9222ebc5fb8 100644 --- a/website/docs/cdktf/python/r/vpc_peering_connection_options.html.markdown +++ b/website/docs/cdktf/python/r/vpc_peering_connection_options.html.markdown @@ -145,7 +145,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpc_peering_connection_id` - (Required) The ID of the requester VPC peering connection. * `accepter` (Optional) - An optional configuration block that allows for [VPC Peering Connection](https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options to be set for the VPC that acceptsthe peering connection (a maximum of one). @@ -157,18 +157,29 @@ The following arguments are supported: * `allow_remote_vpc_dns_resolution` - (Optional) Allow a local VPC to resolve public DNS hostnames to private IP addresses when queried from instances in the peer VPC. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC Peering Connection Options. ## Import -VPC Peering Connection Options can be imported using the `vpc peering id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Peering Connection Options using the VPC peering `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_peering_connection_options.foo pcx-111aaa111 + +Using `terraform import`, import VPC Peering Connection Options using the VPC peering `id`. For example: + +```console +% terraform import aws_vpc_peering_connection_options.foo pcx-111aaa111 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_security_group_egress_rule.html.markdown b/website/docs/cdktf/python/r/vpc_security_group_egress_rule.html.markdown index ea3b8e55257..ad52b349e94 100644 --- a/website/docs/cdktf/python/r/vpc_security_group_egress_rule.html.markdown +++ b/website/docs/cdktf/python/r/vpc_security_group_egress_rule.html.markdown @@ -38,13 +38,15 @@ class MyConvertedCode(TerraformStack): from_port=80, ip_protocol="tcp", security_group_id=Token.as_string(aws_security_group_example.id), - to_port=8080 + to_port=80 ) ``` ## Argument Reference -The following arguments are supported: +~> **Note** Although `cidr_ipv4`, `cidr_ipv6`, `prefix_list_id`, and `referenced_security_group_id` are all marked as optional, you *must* provide one of them in order to configure the destination of the traffic. The `from_port` and `to_port` arguments are required unless `ip_protocol` is set to `-1` or `icmpv6`. + +This argument supports the following arguments: * `cidr_ipv4` - (Optional) The destination IPv4 CIDR range. * `cidr_ipv6` - (Optional) The destination IPv6 CIDR range. @@ -57,9 +59,9 @@ The following arguments are supported: * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `to_port` - (Optional) The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of the security group rule. * `security_group_rule_id` - The ID of the security group rule. @@ -67,10 +69,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Security group egress rules can be imported using the `security_group_rule_id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import security group egress rules using the `security_group_rule_id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_security_group_egress_rule.example sgr-02108b27edd666983 + +Using `terraform import`, import security group egress rules using the `security_group_rule_id`. For example: + +```console +% terraform import aws_vpc_security_group_egress_rule.example sgr-02108b27edd666983 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpc_security_group_ingress_rule.html.markdown b/website/docs/cdktf/python/r/vpc_security_group_ingress_rule.html.markdown index bea3bb58980..eb67ab5473a 100644 --- a/website/docs/cdktf/python/r/vpc_security_group_ingress_rule.html.markdown +++ b/website/docs/cdktf/python/r/vpc_security_group_ingress_rule.html.markdown @@ -38,28 +38,30 @@ class MyConvertedCode(TerraformStack): from_port=80, ip_protocol="tcp", security_group_id=Token.as_string(aws_security_group_example.id), - to_port=8080 + to_port=80 ) ``` ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: + +~> **Note** Although `cidr_ipv4`, `cidr_ipv6`, `prefix_list_id`, and `referenced_security_group_id` are all marked as optional, you *must* provide one of them in order to configure the destination of the traffic. The `from_port` and `to_port` arguments are required unless `ip_protocol` is set to `-1` or `icmpv6`. * `cidr_ipv4` - (Optional) The source IPv4 CIDR range. * `cidr_ipv6` - (Optional) The source IPv6 CIDR range. * `description` - (Optional) The security group rule description. * `from_port` - (Optional) The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 type. -* `ip_protocol` - (Optional) The IP protocol name or number. Use `-1` to specify all protocols. Note that if `ip_protocol` is set to `-1`, it translates to all protocols, all port ranges, and `from_port` and `to_port` values should not be defined. +* `ip_protocol` - (Required) The IP protocol name or number. Use `-1` to specify all protocols. Note that if `ip_protocol` is set to `-1`, it translates to all protocols, all port ranges, and `from_port` and `to_port` values should not be defined. * `prefix_list_id` - (Optional) The ID of the source prefix list. * `referenced_security_group_id` - (Optional) The source security group that is referenced in the rule. * `security_group_id` - (Required) The ID of the security group. * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `to_port` - (Optional) The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of the security group rule. * `security_group_rule_id` - The ID of the security group rule. @@ -67,10 +69,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -Security group ingress rules can be imported using the `security_group_rule_id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import security group ingress rules using the `security_group_rule_id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpc_security_group_ingress_rule.example sgr-02108b27edd666983 + +Using `terraform import`, import security group ingress rules using the `security_group_rule_id`. For example: + +```console +% terraform import aws_vpc_security_group_ingress_rule.example sgr-02108b27edd666983 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpclattice_access_log_subscription.html.markdown b/website/docs/cdktf/python/r/vpclattice_access_log_subscription.html.markdown index d026ec61430..65eb6667dbb 100644 --- a/website/docs/cdktf/python/r/vpclattice_access_log_subscription.html.markdown +++ b/website/docs/cdktf/python/r/vpclattice_access_log_subscription.html.markdown @@ -41,9 +41,9 @@ The following arguments are required: * `destination_arn` - (Required) Amazon Resource Name (ARN) of the log destination. * `resource_identifier` - (Required) The ID or Amazon Resource Identifier (ARN) of the service network or service. You must use the ARN if the resources specified in the operation are in different accounts. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - ID of the access log subscription. * `arn` - Amazon Resource Name (ARN) of the access log subscription. @@ -53,10 +53,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Access Log Subscription can be imported using the access log subscription ID, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Access Log Subscription using the access log subscription ID. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpclattice_access_log_subscription.example rft-8012925589 + +Using `terraform import`, import VPC Lattice Access Log Subscription using the access log subscription ID. For example: + +```console +% terraform import aws_vpclattice_access_log_subscription.example rft-8012925589 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpclattice_auth_policy.html.markdown b/website/docs/cdktf/python/r/vpclattice_auth_policy.html.markdown index fe55fa1d7f6..585d81185a1 100644 --- a/website/docs/cdktf/python/r/vpclattice_auth_policy.html.markdown +++ b/website/docs/cdktf/python/r/vpclattice_auth_policy.html.markdown @@ -64,9 +64,9 @@ The following arguments are required: * `resource_identifier` - (Required) The ID or Amazon Resource Name (ARN) of the service network or service for which the policy is created. * `policy` - (Required) The auth policy. The policy string in JSON must not contain newlines or blank lines. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `policy` - The auth policy. The policy string in JSON must not contain newlines or blank lines. * `state` - The state of the auth policy. The auth policy is only active when the auth type is set to AWS_IAM. If you provide a policy, then authentication and authorization decisions are made based on this policy and the client's IAM policy. If the Auth type is NONE, then, any auth policy you provide will remain inactive. @@ -81,10 +81,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Auth Policy can be imported using the `example_id_arg`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Auth Policy using the `example_id_arg`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpclattice_auth_policy.example rft-8012925589 + +Using `terraform import`, import VPC Lattice Auth Policy using the `example_id_arg`. For example: + +```console +% terraform import aws_vpclattice_auth_policy.example rft-8012925589 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpclattice_listener.html.markdown b/website/docs/cdktf/python/r/vpclattice_listener.html.markdown index 5f1bc5420c1..544d482dc4b 100644 --- a/website/docs/cdktf/python/r/vpclattice_listener.html.markdown +++ b/website/docs/cdktf/python/r/vpclattice_listener.html.markdown @@ -138,7 +138,7 @@ class MyConvertedCode(TerraformStack): ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `default_action` - (Required) Default action block for the default listener rule. Default action blocks are defined below. * `name` - (Required, Forces new resource) Name of the listener. A listener name must be unique within a service. Valid characters are a-z, 0-9, and hyphens (-). You can't use a hyphen as the first or last character, or immediately after another hyphen. @@ -178,9 +178,9 @@ Target group blocks (for `target_group`) must include the following arguments: * `weight` - (Optional) Determines how requests are distributed to the target group. Only required if you specify multiple target groups for a forward action. For example, if you specify two target groups, one with a weight of 10 and the other with a weight of 20, the target group with a weight of 20 receives twice as many requests as the other target group. See [Listener rules](https://docs.aws.amazon.com/vpc-lattice/latest/ug/listeners.html#listener-rules) in the AWS documentation for additional examples. Default: `100`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the listener. * `created_at` - Date and time that the listener was created, specified in ISO-8601 format. @@ -189,10 +189,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Listener can be imported by using the `listener_id` of the listener and the `id` of the VPC Lattice service combined with a `/` character, e.g.: +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Listener using the `listener_id` of the listener and the `id` of the VPC Lattice service combined with a `/` character. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpclattice_listener.example svc-1a2b3c4d/listener-987654321 + +Using `terraform import`, import VPC Lattice Listener using the `listener_id` of the listener and the `id` of the VPC Lattice service combined with a `/` character. For example: + +```console +% terraform import aws_vpclattice_listener.example svc-1a2b3c4d/listener-987654321 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpclattice_listener_rule.html.markdown b/website/docs/cdktf/python/r/vpclattice_listener_rule.html.markdown index 15925609f0d..9a7a2ad7d8c 100644 --- a/website/docs/cdktf/python/r/vpclattice_listener_rule.html.markdown +++ b/website/docs/cdktf/python/r/vpclattice_listener_rule.html.markdown @@ -163,9 +163,9 @@ path match match (`match`) supports the following: * `exact` - (Optional) Specifies an exact type match. * `prefix` - (Optional) Specifies a prefix type match. Matches the value with the prefix. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the target group. * `rule_id` - Unique identifier for the target group. @@ -181,10 +181,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Listener Rule can be imported using the `example_id_arg`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Listener Rule using the `example_id_arg`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpclattice_listener_rule.example rft-8012925589 + +Using `terraform import`, import VPC Lattice Listener Rule using the `example_id_arg`. For example: + +```console +% terraform import aws_vpclattice_listener_rule.example rft-8012925589 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpclattice_resource_policy.html.markdown b/website/docs/cdktf/python/r/vpclattice_resource_policy.html.markdown index 1e7cb7b8df8..5e61b296296 100644 --- a/website/docs/cdktf/python/r/vpclattice_resource_policy.html.markdown +++ b/website/docs/cdktf/python/r/vpclattice_resource_policy.html.markdown @@ -67,16 +67,27 @@ The following arguments are required: * `resource_arn` - (Required) The ID or Amazon Resource Name (ARN) of the service network or service for which the policy is created. * `policy` - (Required) An IAM policy. The policy string in JSON must not contain newlines or blank lines. -## Attributes Reference +## Attribute Reference -No additional attributes are exported. +This resource exports no additional attributes. ## Import -VPC Lattice Resource Policy can be imported using the `resource_arn`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Resource Policy using the `resource_arn`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpclattice_resource_policy.example rft-8012925589 + +Using `terraform import`, import VPC Lattice Resource Policy using the `resource_arn`. For example: + +```console +% terraform import aws_vpclattice_resource_policy.example rft-8012925589 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpclattice_service.html.markdown b/website/docs/cdktf/python/r/vpclattice_service.html.markdown index 7ee6db9448e..560a15bc076 100644 --- a/website/docs/cdktf/python/r/vpclattice_service.html.markdown +++ b/website/docs/cdktf/python/r/vpclattice_service.html.markdown @@ -48,9 +48,9 @@ The following arguments are optional: * `custom_domain_name` - (Optional) Custom domain name of the service. * `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the service. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. * `dns_entry` - Concise description. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. @@ -67,10 +67,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Service can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Service using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpclattice_service.example svc-06728e2357ea55f8a + +Using `terraform import`, import VPC Lattice Service using the `id`. For example: + +```console +% terraform import aws_vpclattice_service.example svc-06728e2357ea55f8a ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpclattice_service_network.html.markdown b/website/docs/cdktf/python/r/vpclattice_service_network.html.markdown index 98dbdf8075e..27f25b7c2db 100644 --- a/website/docs/cdktf/python/r/vpclattice_service_network.html.markdown +++ b/website/docs/cdktf/python/r/vpclattice_service_network.html.markdown @@ -45,19 +45,30 @@ The following arguments are optional: * `auth_type` - (Optional) Type of IAM policy. Either `NONE` or `AWS_IAM`. * `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the Service Network. * `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). ## Import -VPC Lattice Service Network can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Service Network using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpclattice_service_network.example sn-0158f91c1e3358dba + +Using `terraform import`, import VPC Lattice Service Network using the `id`. For example: + +```console +% terraform import aws_vpclattice_service_network.example sn-0158f91c1e3358dba ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpclattice_service_network_service_association.html.markdown b/website/docs/cdktf/python/r/vpclattice_service_network_service_association.html.markdown index 5b4a74dfbcb..f274acc911b 100644 --- a/website/docs/cdktf/python/r/vpclattice_service_network_service_association.html.markdown +++ b/website/docs/cdktf/python/r/vpclattice_service_network_service_association.html.markdown @@ -44,9 +44,9 @@ The following arguments are optional: * `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the Association. * `created_by` - The account that created the association. @@ -67,10 +67,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Service Network Service Association can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Service Network Service Association using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpclattice_service_network_service_association.example snsa-05e2474658a88f6ba + +Using `terraform import`, import VPC Lattice Service Network Service Association using the `id`. For example: + +```console +% terraform import aws_vpclattice_service_network_service_association.example snsa-05e2474658a88f6ba ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpclattice_service_network_vpc_association.html.markdown b/website/docs/cdktf/python/r/vpclattice_service_network_vpc_association.html.markdown index b3b2628e955..f89717b1ec6 100644 --- a/website/docs/cdktf/python/r/vpclattice_service_network_vpc_association.html.markdown +++ b/website/docs/cdktf/python/r/vpclattice_service_network_vpc_association.html.markdown @@ -46,9 +46,9 @@ The following arguments are optional: * `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `security_group_ids` - (Optional) The IDs of the security groups. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the Association. * `created_by` - The account that created the association. @@ -65,10 +65,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Service Network VPC Association can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Service Network VPC Association using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpclattice_service_network_vpc_association.example snsa-05e2474658a88f6ba + +Using `terraform import`, import VPC Lattice Service Network VPC Association using the `id`. For example: + +```console +% terraform import aws_vpclattice_service_network_vpc_association.example snsa-05e2474658a88f6ba ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpclattice_target_group.html.markdown b/website/docs/cdktf/python/r/vpclattice_target_group.html.markdown index 2893cdc91df..40dd65ec4bc 100644 --- a/website/docs/cdktf/python/r/vpclattice_target_group.html.markdown +++ b/website/docs/cdktf/python/r/vpclattice_target_group.html.markdown @@ -136,9 +136,9 @@ Health Check (`health_check`) supports the following: * `protocol_version` - (Optional) The protocol version used when performing health checks on targets. The possible protocol versions are `HTTP1` and `HTTP2`. The default is `HTTP1`. * `unhealthy_threshold_count` - (Optional) The number of consecutive failed health checks required before considering a target unhealthy. The range is 2–10. The default is 2. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the target group. * `id` - Unique identifier for the target group. @@ -154,10 +154,21 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Target Group can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Target Group using the `id`. For example: +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) ``` -$ terraform import aws_vpclattice_target_group.example tg-0c11d4dc16ed96bdb + +Using `terraform import`, import VPC Lattice Target Group using the `id`. For example: + +```console +% terraform import aws_vpclattice_target_group.example tg-0c11d4dc16ed96bdb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpclattice_target_group_attachment.html.markdown b/website/docs/cdktf/python/r/vpclattice_target_group_attachment.html.markdown index 960204792c8..ad23954f351 100644 --- a/website/docs/cdktf/python/r/vpclattice_target_group_attachment.html.markdown +++ b/website/docs/cdktf/python/r/vpclattice_target_group_attachment.html.markdown @@ -49,8 +49,8 @@ The following arguments are required: - `id` - (Required) The ID of the target. If the target type of the target group is INSTANCE, this is an instance ID. If the target type is IP , this is an IP address. If the target type is LAMBDA, this is the ARN of the Lambda function. If the target type is ALB, this is the ARN of the Application Load Balancer. - `port` - (Optional) The port on which the target is listening. For HTTP, the default is 80. For HTTPS, the default is 443. -## Attributes Reference +## Attribute Reference -No additional attributes are exported. +This resource exports no additional attributes. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpn_connection.html.markdown b/website/docs/cdktf/python/r/vpn_connection.html.markdown new file mode 100644 index 00000000000..1481e52e8ba --- /dev/null +++ b/website/docs/cdktf/python/r/vpn_connection.html.markdown @@ -0,0 +1,298 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_connection" +description: |- + Manages a Site-to-Site VPN connection. A Site-to-Site VPN connection is an Internet Protocol security (IPsec) VPN connection between a VPC and an on-premises network. +--- + + + +# Resource: aws_vpn_connection + +Manages a Site-to-Site VPN connection. A Site-to-Site VPN connection is an Internet Protocol security (IPsec) VPN connection between a VPC and an on-premises network. +Any new Site-to-Site VPN connection that you create is an [AWS VPN connection](https://docs.aws.amazon.com/vpn/latest/s2svpn/vpn-categories.html). + +~> **Note:** All arguments including `tunnel1_preshared_key` and `tunnel2_preshared_key` will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **Note:** The CIDR blocks in the arguments `tunnel1_inside_cidr` and `tunnel2_inside_cidr` must have a prefix of /30 and be a part of a specific range. +[Read more about this in the AWS documentation](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_VpnTunnelOptionsSpecification.html). + +## Example Usage + +### EC2 Transit Gateway + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.customer_gateway import CustomerGateway +from imports.aws.ec2_transit_gateway import Ec2TransitGateway +from imports.aws.vpn_connection import VpnConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CustomerGateway(self, "example", + bgp_asn=Token.as_string(65000), + ip_address="172.0.0.1", + type="ipsec.1" + ) + aws_ec2_transit_gateway_example = Ec2TransitGateway(self, "example_1") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ec2_transit_gateway_example.override_logical_id("example") + aws_vpn_connection_example = VpnConnection(self, "example_2", + customer_gateway_id=example.id, + transit_gateway_id=Token.as_string(aws_ec2_transit_gateway_example.id), + type=example.type + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpn_connection_example.override_logical_id("example") +``` + +### Virtual Private Gateway + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.customer_gateway import CustomerGateway +from imports.aws.vpc import Vpc +from imports.aws.vpn_connection import VpnConnection +from imports.aws.vpn_gateway import VpnGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + customer_gateway = CustomerGateway(self, "customer_gateway", + bgp_asn=Token.as_string(65000), + ip_address="172.0.0.1", + type="ipsec.1" + ) + vpc = Vpc(self, "vpc", + cidr_block="10.0.0.0/16" + ) + vpn_gateway = VpnGateway(self, "vpn_gateway", + vpc_id=vpc.id + ) + VpnConnection(self, "main", + customer_gateway_id=customer_gateway.id, + static_routes_only=True, + type="ipsec.1", + vpn_gateway_id=vpn_gateway.id + ) +``` + +### AWS Site to Site Private VPN + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.customer_gateway import CustomerGateway +from imports.aws.data_aws_ec2_transit_gateway_dx_gateway_attachment import DataAwsEc2TransitGatewayDxGatewayAttachment +from imports.aws.dx_gateway import DxGateway +from imports.aws.dx_gateway_association import DxGatewayAssociation +from imports.aws.ec2_transit_gateway import Ec2TransitGateway +from imports.aws.vpn_connection import VpnConnection +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CustomerGateway(self, "example", + bgp_asn=Token.as_string(64514), + ip_address="10.0.0.1", + tags={ + "Name": "terraform_ipsec_vpn_example" + }, + type="ipsec.1" + ) + aws_dx_gateway_example = DxGateway(self, "example_1", + amazon_side_asn="64512", + name="terraform_ipsec_vpn_example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dx_gateway_example.override_logical_id("example") + aws_ec2_transit_gateway_example = Ec2TransitGateway(self, "example_2", + amazon_side_asn=Token.as_number("64513"), + description="terraform_ipsec_vpn_example", + transit_gateway_cidr_blocks=["10.0.0.0/24"] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_ec2_transit_gateway_example.override_logical_id("example") + aws_dx_gateway_association_example = DxGatewayAssociation(self, "example_3", + allowed_prefixes=["10.0.0.0/8"], + associated_gateway_id=Token.as_string(aws_ec2_transit_gateway_example.id), + dx_gateway_id=Token.as_string(aws_dx_gateway_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_dx_gateway_association_example.override_logical_id("example") + data_aws_ec2_transit_gateway_dx_gateway_attachment_example = + DataAwsEc2TransitGatewayDxGatewayAttachment(self, "example_4", + depends_on=[aws_dx_gateway_association_example], + dx_gateway_id=Token.as_string(aws_dx_gateway_example.id), + transit_gateway_id=Token.as_string(aws_ec2_transit_gateway_example.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_ec2_transit_gateway_dx_gateway_attachment_example.override_logical_id("example") + aws_vpn_connection_example = VpnConnection(self, "example_5", + customer_gateway_id=example.id, + outside_ip_address_type="PrivateIpv4", + tags={ + "Name": "terraform_ipsec_vpn_example" + }, + transit_gateway_id=Token.as_string(aws_ec2_transit_gateway_example.id), + transport_transit_gateway_attachment_id=Token.as_string(data_aws_ec2_transit_gateway_dx_gateway_attachment_example.id), + type="ipsec.1" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_vpn_connection_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `customer_gateway_id` - (Required) The ID of the customer gateway. +* `type` - (Required) The type of VPN connection. The only type AWS supports at this time is "ipsec.1". +* `transit_gateway_id` - (Optional) The ID of the EC2 Transit Gateway. +* `vpn_gateway_id` - (Optional) The ID of the Virtual Private Gateway. +* `static_routes_only` - (Optional, Default `false`) Whether the VPN connection uses static routes exclusively. Static routes must be used for devices that don't support BGP. +* `enable_acceleration` - (Optional, Default `false`) Indicate whether to enable acceleration for the VPN connection. Supports only EC2 Transit Gateway. +* `tags` - (Optional) Tags to apply to the connection. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `local_ipv4_network_cidr` - (Optional, Default `0.0.0.0/0`) The IPv4 CIDR on the customer gateway (on-premises) side of the VPN connection. +* `local_ipv6_network_cidr` - (Optional, Default `::/0`) The IPv6 CIDR on the customer gateway (on-premises) side of the VPN connection. +* `outside_ip_address_type` - (Optional, Default `PublicIpv4`) Indicates if a Public S2S VPN or Private S2S VPN over AWS Direct Connect. Valid values are `PublicIpv4 | PrivateIpv4` +* `remote_ipv4_network_cidr` - (Optional, Default `0.0.0.0/0`) The IPv4 CIDR on the AWS side of the VPN connection. +* `remote_ipv6_network_cidr` - (Optional, Default `::/0`) The IPv6 CIDR on the customer gateway (on-premises) side of the VPN connection. +* `transport_transit_gateway_attachment_id` - (Required when outside_ip_address_type is set to `PrivateIpv4`). The attachment ID of the Transit Gateway attachment to Direct Connect Gateway. The ID is obtained through a data source only. +* `tunnel_inside_ip_version` - (Optional, Default `ipv4`) Indicate whether the VPN tunnels process IPv4 or IPv6 traffic. Valid values are `ipv4 | ipv6`. `ipv6` Supports only EC2 Transit Gateway. +* `tunnel1_inside_cidr` - (Optional) The CIDR block of the inside IP addresses for the first VPN tunnel. Valid value is a size /30 CIDR block from the 169.254.0.0/16 range. +* `tunnel2_inside_cidr` - (Optional) The CIDR block of the inside IP addresses for the second VPN tunnel. Valid value is a size /30 CIDR block from the 169.254.0.0/16 range. +* `tunnel1_inside_ipv6_cidr` - (Optional) The range of inside IPv6 addresses for the first VPN tunnel. Supports only EC2 Transit Gateway. Valid value is a size /126 CIDR block from the local fd00::/8 range. +* `tunnel2_inside_ipv6_cidr` - (Optional) The range of inside IPv6 addresses for the second VPN tunnel. Supports only EC2 Transit Gateway. Valid value is a size /126 CIDR block from the local fd00::/8 range. +* `tunnel1_preshared_key` - (Optional) The preshared key of the first VPN tunnel. The preshared key must be between 8 and 64 characters in length and cannot start with zero(0). Allowed characters are alphanumeric characters, periods(.) and underscores(_). +* `tunnel2_preshared_key` - (Optional) The preshared key of the second VPN tunnel. The preshared key must be between 8 and 64 characters in length and cannot start with zero(0). Allowed characters are alphanumeric characters, periods(.) and underscores(_). +* `tunnel1_dpd_timeout_action` - (Optional, Default `clear`) The action to take after DPD timeout occurs for the first VPN tunnel. Specify restart to restart the IKE initiation. Specify clear to end the IKE session. Valid values are `clear | none | restart`. +* `tunnel2_dpd_timeout_action` - (Optional, Default `clear`) The action to take after DPD timeout occurs for the second VPN tunnel. Specify restart to restart the IKE initiation. Specify clear to end the IKE session. Valid values are `clear | none | restart`. +* `tunnel1_dpd_timeout_seconds` - (Optional, Default `30`) The number of seconds after which a DPD timeout occurs for the first VPN tunnel. Valid value is equal or higher than `30`. +* `tunnel2_dpd_timeout_seconds` - (Optional, Default `30`) The number of seconds after which a DPD timeout occurs for the second VPN tunnel. Valid value is equal or higher than `30`. +* `tunnel1_enable_tunnel_lifecycle_control` - (Optional, Default `false`) Turn on or off tunnel endpoint lifecycle control feature for the first VPN tunnel. Valid values are `true | false`. +* `tunnel2_enable_tunnel_lifecycle_control` - (Optional, Default `false`) Turn on or off tunnel endpoint lifecycle control feature for the second VPN tunnel. Valid values are `true | false`. +* `tunnel1_ike_versions` - (Optional) The IKE versions that are permitted for the first VPN tunnel. Valid values are `ikev1 | ikev2`. +* `tunnel2_ike_versions` - (Optional) The IKE versions that are permitted for the second VPN tunnel. Valid values are `ikev1 | ikev2`. +* `tunnel1_log_options` - (Optional) Options for logging VPN tunnel activity. See [Log Options](#log-options) below for more details. +* `tunnel2_log_options` - (Optional) Options for logging VPN tunnel activity. See [Log Options](#log-options) below for more details. +* `tunnel1_phase1_dh_group_numbers` - (Optional) List of one or more Diffie-Hellman group numbers that are permitted for the first VPN tunnel for phase 1 IKE negotiations. Valid values are ` 2 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24`. +* `tunnel2_phase1_dh_group_numbers` - (Optional) List of one or more Diffie-Hellman group numbers that are permitted for the second VPN tunnel for phase 1 IKE negotiations. Valid values are ` 2 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24`. +* `tunnel1_phase1_encryption_algorithms` - (Optional) List of one or more encryption algorithms that are permitted for the first VPN tunnel for phase 1 IKE negotiations. Valid values are `AES128 | AES256 | AES128-GCM-16 | AES256-GCM-16`. +* `tunnel2_phase1_encryption_algorithms` - (Optional) List of one or more encryption algorithms that are permitted for the second VPN tunnel for phase 1 IKE negotiations. Valid values are `AES128 | AES256 | AES128-GCM-16 | AES256-GCM-16`. +* `tunnel1_phase1_integrity_algorithms` - (Optional) One or more integrity algorithms that are permitted for the first VPN tunnel for phase 1 IKE negotiations. Valid values are `SHA1 | SHA2-256 | SHA2-384 | SHA2-512`. +* `tunnel2_phase1_integrity_algorithms` - (Optional) One or more integrity algorithms that are permitted for the second VPN tunnel for phase 1 IKE negotiations. Valid values are `SHA1 | SHA2-256 | SHA2-384 | SHA2-512`. +* `tunnel1_phase1_lifetime_seconds` - (Optional, Default `28800`) The lifetime for phase 1 of the IKE negotiation for the first VPN tunnel, in seconds. Valid value is between `900` and `28800`. +* `tunnel2_phase1_lifetime_seconds` - (Optional, Default `28800`) The lifetime for phase 1 of the IKE negotiation for the second VPN tunnel, in seconds. Valid value is between `900` and `28800`. +* `tunnel1_phase2_dh_group_numbers` - (Optional) List of one or more Diffie-Hellman group numbers that are permitted for the first VPN tunnel for phase 2 IKE negotiations. Valid values are `2 | 5 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24`. +* `tunnel2_phase2_dh_group_numbers` - (Optional) List of one or more Diffie-Hellman group numbers that are permitted for the second VPN tunnel for phase 2 IKE negotiations. Valid values are `2 | 5 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24`. +* `tunnel1_phase2_encryption_algorithms` - (Optional) List of one or more encryption algorithms that are permitted for the first VPN tunnel for phase 2 IKE negotiations. Valid values are `AES128 | AES256 | AES128-GCM-16 | AES256-GCM-16`. +* `tunnel2_phase2_encryption_algorithms` - (Optional) List of one or more encryption algorithms that are permitted for the second VPN tunnel for phase 2 IKE negotiations. Valid values are `AES128 | AES256 | AES128-GCM-16 | AES256-GCM-16`. +* `tunnel1_phase2_integrity_algorithms` - (Optional) List of one or more integrity algorithms that are permitted for the first VPN tunnel for phase 2 IKE negotiations. Valid values are `SHA1 | SHA2-256 | SHA2-384 | SHA2-512`. +* `tunnel2_phase2_integrity_algorithms` - (Optional) List of one or more integrity algorithms that are permitted for the second VPN tunnel for phase 2 IKE negotiations. Valid values are `SHA1 | SHA2-256 | SHA2-384 | SHA2-512`. +* `tunnel1_phase2_lifetime_seconds` - (Optional, Default `3600`) The lifetime for phase 2 of the IKE negotiation for the first VPN tunnel, in seconds. Valid value is between `900` and `3600`. +* `tunnel2_phase2_lifetime_seconds` - (Optional, Default `3600`) The lifetime for phase 2 of the IKE negotiation for the second VPN tunnel, in seconds. Valid value is between `900` and `3600`. +* `tunnel1_rekey_fuzz_percentage` - (Optional, Default `100`) The percentage of the rekey window for the first VPN tunnel (determined by `tunnel1_rekey_margin_time_seconds`) during which the rekey time is randomly selected. Valid value is between `0` and `100`. +* `tunnel2_rekey_fuzz_percentage` - (Optional, Default `100`) The percentage of the rekey window for the second VPN tunnel (determined by `tunnel2_rekey_margin_time_seconds`) during which the rekey time is randomly selected. Valid value is between `0` and `100`. +* `tunnel1_rekey_margin_time_seconds` - (Optional, Default `540`) The margin time, in seconds, before the phase 2 lifetime expires, during which the AWS side of the first VPN connection performs an IKE rekey. The exact time of the rekey is randomly selected based on the value for `tunnel1_rekey_fuzz_percentage`. Valid value is between `60` and half of `tunnel1_phase2_lifetime_seconds`. +* `tunnel2_rekey_margin_time_seconds` - (Optional, Default `540`) The margin time, in seconds, before the phase 2 lifetime expires, during which the AWS side of the second VPN connection performs an IKE rekey. The exact time of the rekey is randomly selected based on the value for `tunnel2_rekey_fuzz_percentage`. Valid value is between `60` and half of `tunnel2_phase2_lifetime_seconds`. +* `tunnel1_replay_window_size` - (Optional, Default `1024`) The number of packets in an IKE replay window for the first VPN tunnel. Valid value is between `64` and `2048`. +* `tunnel2_replay_window_size` - (Optional, Default `1024`) The number of packets in an IKE replay window for the second VPN tunnel. Valid value is between `64` and `2048`. +* `tunnel1_startup_action` - (Optional, Default `add`) The action to take when the establishing the tunnel for the first VPN connection. By default, your customer gateway device must initiate the IKE negotiation and bring up the tunnel. Specify start for AWS to initiate the IKE negotiation. Valid values are `add | start`. +* `tunnel2_startup_action` - (Optional, Default `add`) The action to take when the establishing the tunnel for the second VPN connection. By default, your customer gateway device must initiate the IKE negotiation and bring up the tunnel. Specify start for AWS to initiate the IKE negotiation. Valid values are `add | start`. + +### Log Options + +The `tunnel1_log_options` and `tunnel2_log_options` block supports the following arguments: + +* `cloudwatch_log_options` - (Optional) Options for sending VPN tunnel logs to CloudWatch. See [CloudWatch Log Options](#cloudwatch-log-options) below for more details. + +### CloudWatch Log Options + +The `cloudwatch_log_options` blocks supports the following arguments: + +* `log_enabled` - (Optional) Enable or disable VPN tunnel logging feature. The default is `false`. +* `log_group_arn` - (Optional) The Amazon Resource Name (ARN) of the CloudWatch log group to send logs to. +* `log_output_format` - (Optional) Set log format. Default format is json. Possible values are: `json` and `text`. The default is `json`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the VPN Connection. +* `id` - The amazon-assigned ID of the VPN connection. +* `core_network_arn` - The ARN of the core network. +* `core_network_attachment_arn` - The ARN of the core network attachment. +* `customer_gateway_configuration` - The configuration information for the VPN connection's customer gateway (in the native XML format). +* `customer_gateway_id` - The ID of the customer gateway to which the connection is attached. +* `routes` - The static routes associated with the VPN connection. Detailed below. +* `static_routes_only` - Whether the VPN connection uses static routes exclusively. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `transit_gateway_attachment_id` - When associated with an EC2 Transit Gateway (`transit_gateway_id` argument), the attachment ID. See also the [`aws_ec2_tag` resource](/docs/providers/aws/r/ec2_tag.html) for tagging the EC2 Transit Gateway VPN Attachment. +* `tunnel1_address` - The public IP address of the first VPN tunnel. +* `tunnel1_cgw_inside_address` - The RFC 6890 link-local address of the first VPN tunnel (Customer Gateway Side). +* `tunnel1_vgw_inside_address` - The RFC 6890 link-local address of the first VPN tunnel (VPN Gateway Side). +* `tunnel1_preshared_key` - The preshared key of the first VPN tunnel. +* `tunnel1_bgp_asn` - The bgp asn number of the first VPN tunnel. +* `tunnel1_bgp_holdtime` - The bgp holdtime of the first VPN tunnel. +* `tunnel2_address` - The public IP address of the second VPN tunnel. +* `tunnel2_cgw_inside_address` - The RFC 6890 link-local address of the second VPN tunnel (Customer Gateway Side). +* `tunnel2_vgw_inside_address` - The RFC 6890 link-local address of the second VPN tunnel (VPN Gateway Side). +* `tunnel2_preshared_key` - The preshared key of the second VPN tunnel. +* `tunnel2_bgp_asn` - The bgp asn number of the second VPN tunnel. +* `tunnel2_bgp_holdtime` - The bgp holdtime of the second VPN tunnel. +* `vgw_telemetry` - Telemetry for the VPN tunnels. Detailed below. +* `vpn_gateway_id` - The ID of the virtual private gateway to which the connection is attached. + +### routes + +* `destination_cidr_block` - The CIDR block associated with the local subnet of the customer data center. +* `source` - Indicates how the routes were provided. +* `state` - The current state of the static route. + +### vgw_telemetry + +* `accepted_route_count` - The number of accepted routes. +* `certificate_arn` - The Amazon Resource Name (ARN) of the VPN tunnel endpoint certificate. +* `last_status_change` - The date and time of the last change in status. +* `outside_ip_address` - The Internet-routable IP address of the virtual private gateway's outside interface. +* `status` - The status of the VPN tunnel. +* `status_message` - If an error occurs, a description of the error. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPN Connections using the VPN connection `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import VPN Connections using the VPN connection `id`. For example: + +```console +% terraform import aws_vpn_connection.testvpnconnection vpn-40f41529 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpn_connection_route.html.markdown b/website/docs/cdktf/python/r/vpn_connection_route.html.markdown new file mode 100644 index 00000000000..a52908ce064 --- /dev/null +++ b/website/docs/cdktf/python/r/vpn_connection_route.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_connection_route" +description: |- + Provides a static route between a VPN connection and a customer gateway. +--- + + + +# Resource: aws_vpn_connection_route + +Provides a static route between a VPN connection and a customer gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.customer_gateway import CustomerGateway +from imports.aws.vpc import Vpc +from imports.aws.vpn_connection import VpnConnection +from imports.aws.vpn_connection_route import VpnConnectionRoute +from imports.aws.vpn_gateway import VpnGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + customer_gateway = CustomerGateway(self, "customer_gateway", + bgp_asn=Token.as_string(65000), + ip_address="172.0.0.1", + type="ipsec.1" + ) + vpc = Vpc(self, "vpc", + cidr_block="10.0.0.0/16" + ) + vpn_gateway = VpnGateway(self, "vpn_gateway", + vpc_id=vpc.id + ) + main = VpnConnection(self, "main", + customer_gateway_id=customer_gateway.id, + static_routes_only=True, + type="ipsec.1", + vpn_gateway_id=vpn_gateway.id + ) + VpnConnectionRoute(self, "office", + destination_cidr_block="192.168.10.0/24", + vpn_connection_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `destination_cidr_block` - (Required) The CIDR block associated with the local subnet of the customer network. +* `vpn_connection_id` - (Required) The ID of the VPN connection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `destination_cidr_block` - The CIDR block associated with the local subnet of the customer network. +* `vpn_connection_id` - The ID of the VPN connection. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpn_gateway.html.markdown b/website/docs/cdktf/python/r/vpn_gateway.html.markdown new file mode 100644 index 00000000000..4591d40c0a7 --- /dev/null +++ b/website/docs/cdktf/python/r/vpn_gateway.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_gateway" +description: |- + Provides a resource to create a VPC VPN Gateway. +--- + + + +# Resource: aws_vpn_gateway + +Provides a resource to create a VPC VPN Gateway. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.vpn_gateway import VpnGateway +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + VpnGateway(self, "vpn_gw", + tags={ + "Name": "main" + }, + vpc_id=main.id + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpc_id` - (Optional) The VPC ID to create in. +* `availability_zone` - (Optional) The Availability Zone for the virtual private gateway. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `amazon_side_asn` - (Optional) The Autonomous System Number (ASN) for the Amazon side of the gateway. If you don't specify an ASN, the virtual private gateway is created with the default ASN. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the VPN Gateway. +* `id` - The ID of the VPN Gateway. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPN Gateways using the VPN gateway `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import VPN Gateways using the VPN gateway `id`. For example: + +```console +% terraform import aws_vpn_gateway.testvpngateway vgw-9a4cacf3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpn_gateway_attachment.html.markdown b/website/docs/cdktf/python/r/vpn_gateway_attachment.html.markdown new file mode 100644 index 00000000000..51cb455f3d6 --- /dev/null +++ b/website/docs/cdktf/python/r/vpn_gateway_attachment.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_gateway_attachment" +description: |- + Provides a Virtual Private Gateway attachment resource. +--- + + + +# Resource: aws_vpn_gateway_attachment + +Provides a Virtual Private Gateway attachment resource, allowing for an existing +hardware VPN gateway to be attached and/or detached from a VPC. + +-> **Note:** The [`aws_vpn_gateway`](vpn_gateway.html) +resource can also automatically attach the Virtual Private Gateway it creates +to an existing VPC by setting the [`vpc_id`](vpn_gateway.html#vpc_id) attribute accordingly. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.vpc import Vpc +from imports.aws.vpn_gateway import VpnGateway +from imports.aws.vpn_gateway_attachment import VpnGatewayAttachment +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + network = Vpc(self, "network", + cidr_block="10.0.0.0/16" + ) + vpn = VpnGateway(self, "vpn", + tags={ + "Name": "example-vpn-gateway" + } + ) + VpnGatewayAttachment(self, "vpn_attachment", + vpc_id=network.id, + vpn_gateway_id=vpn.id + ) +``` + +See [Virtual Private Cloud](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_Introduction.html) +and [Virtual Private Gateway](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_VPN.html) user +guides for more information. + +## Argument Reference + +This resource supports the following arguments: + +* `vpc_id` - (Required) The ID of the VPC. +* `vpn_gateway_id` - (Required) The ID of the Virtual Private Gateway. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `vpc_id` - The ID of the VPC that Virtual Private Gateway is attached to. +* `vpn_gateway_id` - The ID of the Virtual Private Gateway. + +## Import + +You cannot import this resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/vpn_gateway_route_propagation.html.markdown b/website/docs/cdktf/python/r/vpn_gateway_route_propagation.html.markdown new file mode 100644 index 00000000000..2c8a810a22a --- /dev/null +++ b/website/docs/cdktf/python/r/vpn_gateway_route_propagation.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_gateway_route_propagation" +description: |- + Requests automatic route propagation between a VPN gateway and a route table. +--- + + + +# Resource: aws_vpn_gateway_route_propagation + +Requests automatic route propagation between a VPN gateway and a route table. + +~> **Note:** This resource should not be used with a route table that has +the `propagating_vgws` argument set. If that argument is set, any route +propagation not explicitly listed in its value will be removed. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.vpn_gateway_route_propagation import VpnGatewayRoutePropagation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + VpnGatewayRoutePropagation(self, "example", + route_table_id=Token.as_string(aws_route_table_example.id), + vpn_gateway_id=Token.as_string(aws_vpn_gateway_example.id) + ) +``` + +## Argument Reference + +The following arguments are required: + +* `vpn_gateway_id` - The id of the `aws_vpn_gateway` to propagate routes from. +* `route_table_id` - The id of the `aws_route_table` to propagate routes into. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `2m`) +- `delete` - (Default `2m`) + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_byte_match_set.html.markdown b/website/docs/cdktf/python/r/waf_byte_match_set.html.markdown new file mode 100644 index 00000000000..c113e557d91 --- /dev/null +++ b/website/docs/cdktf/python/r/waf_byte_match_set.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_byte_match_set" +description: |- + Provides a AWS WAF Byte Match Set resource. +--- + + + +# Resource: aws_waf_byte_match_set + +Provides a WAF Byte Match Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_byte_match_set import WafByteMatchSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafByteMatchSet(self, "byte_set", + byte_match_tuples=[WafByteMatchSetByteMatchTuples( + field_to_match=WafByteMatchSetByteMatchTuplesFieldToMatch( + data="referer", + type="HEADER" + ), + positional_constraint="CONTAINS", + target_string="badrefer1", + text_transformation="NONE" + ) + ], + name="tf_waf_byte_match_set" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Byte Match Set. +* `byte_match_tuples` - Specifies the bytes (typically a string that corresponds + with ASCII characters) that you want to search for in web requests, + the location in requests that you want to search, and other settings. + +## Nested blocks + +### `byte_match_tuples` + +#### Arguments + +* `field_to_match` - (Required) The part of a web request that you want to search, such as a specified header or a query string. +* `positional_constraint` - (Required) Within the portion of a web request that you want to search + (for example, in the query string, if any), specify where you want to search. + e.g., `CONTAINS`, `CONTAINS_WORD` or `EXACTLY`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchTuple.html#WAF-Type-ByteMatchTuple-PositionalConstraint) + for all supported values. +* `target_string` - (Optional) The value that you want to search for within the field specified by `field_to_match`, e.g., `badrefer1`. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_waf_ByteMatchTuple.html) + for all supported values. +* `text_transformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `target_string` before inspecting a request for a match. + e.g., `CMD_LINE`, `HTML_ENTITY_DECODE` or `NONE`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchTuple.html#WAF-Type-ByteMatchTuple-TextTransformation) + for all supported values. + +### `field_to_match` + +#### Arguments + +* `data` - (Optional) When `type` is `HEADER`, enter the name of the header that you want to search, e.g., `User-Agent` or `Referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `HEADER`, `METHOD` or `BODY`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Byte Match Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Byte Match Set using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Byte Match Set using the id. For example: + +```console +% terraform import aws_waf_byte_match_set.byte_set a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_geo_match_set.html.markdown b/website/docs/cdktf/python/r/waf_geo_match_set.html.markdown new file mode 100644 index 00000000000..374e70ca711 --- /dev/null +++ b/website/docs/cdktf/python/r/waf_geo_match_set.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_geo_match_set" +description: |- + Provides a AWS WAF GeoMatchSet resource. +--- + + + +# Resource: aws_waf_geo_match_set + +Provides a WAF Geo Match Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_geo_match_set import WafGeoMatchSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafGeoMatchSet(self, "geo_match_set", + geo_match_constraint=[WafGeoMatchSetGeoMatchConstraint( + type="Country", + value="US" + ), WafGeoMatchSetGeoMatchConstraint( + type="Country", + value="CA" + ) + ], + name="geo_match_set" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the GeoMatchSet. +* `geo_match_constraint` - (Optional) The GeoMatchConstraint objects which contain the country that you want AWS WAF to search for. + +## Nested Blocks + +### `geo_match_constraint` + +#### Arguments + +* `type` - (Required) The type of geographical area you want AWS WAF to search for. Currently Country is the only valid value. +* `value` - (Required) The country that you want AWS WAF to search for. + This is the two-letter country code, e.g., `US`, `CA`, `RU`, `CN`, etc. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_GeoMatchConstraint.html) for all supported values. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF GeoMatchSet. +* `arn` - Amazon Resource Name (ARN) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Geo Match Set using their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Geo Match Set using their ID. For example: + +```console +% terraform import aws_waf_geo_match_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_ipset.html.markdown b/website/docs/cdktf/python/r/waf_ipset.html.markdown new file mode 100644 index 00000000000..19d30eba653 --- /dev/null +++ b/website/docs/cdktf/python/r/waf_ipset.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_ipset" +description: |- + Provides a AWS WAF IPSet resource. +--- + + + +# Resource: aws_waf_ipset + +Provides a WAF IPSet Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_ipset import WafIpset +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafIpset(self, "ipset", + ip_set_descriptors=[WafIpsetIpSetDescriptors( + type="IPV4", + value="192.0.7.0/24" + ), WafIpsetIpSetDescriptors( + type="IPV4", + value="10.16.16.0/16" + ) + ], + name="tfIPSet" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the IPSet. +* `ip_set_descriptors` - (Optional) One or more pairs specifying the IP address type (IPV4 or IPV6) and the IP address range (in CIDR format) from which web requests originate. + +## Nested Blocks + +### `ip_set_descriptors` + +#### Arguments + +* `type` - (Required) Type of the IP address - `IPV4` or `IPV6`. +* `value` - (Required) An IPv4 or IPv6 address specified via CIDR notationE.g., `192.0.2.44/32` or `1111:0000:0000:0000:0000:0000:0000:0000/64` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF IPSet. +* `arn` - The ARN of the WAF IPSet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF IPSets using their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF IPSets using their ID. For example: + +```console +% terraform import aws_waf_ipset.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_rate_based_rule.html.markdown b/website/docs/cdktf/python/r/waf_rate_based_rule.html.markdown new file mode 100644 index 00000000000..2bb992d7404 --- /dev/null +++ b/website/docs/cdktf/python/r/waf_rate_based_rule.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_rate_based_rule" +description: |- + Provides a AWS WAF rule resource. +--- + + + +# Resource: aws_waf_rate_based_rule + +Provides a WAF Rate Based Rule Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_ipset import WafIpset +from imports.aws.waf_rate_based_rule import WafRateBasedRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ipset = WafIpset(self, "ipset", + ip_set_descriptors=[WafIpsetIpSetDescriptors( + type="IPV4", + value="192.0.7.0/24" + ) + ], + name="tfIPSet" + ) + WafRateBasedRule(self, "wafrule", + depends_on=[ipset], + metric_name="tfWAFRule", + name="tfWAFRule", + predicates=[WafRateBasedRulePredicates( + data_id=ipset.id, + negated=False, + type="IPMatch" + ) + ], + rate_key="IP", + rate_limit=100 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `metric_name` - (Required) The name or description for the Amazon CloudWatch metric of this rule. +* `name` - (Required) The name or description of the rule. +* `rate_key` - (Required) Valid value is IP. +* `rate_limit` - (Required) The maximum number of requests, which have an identical value in the field specified by the RateKey, allowed in a five-minute period. Minimum value is 100. +* `predicates` - (Optional) The objects to include in a rule (documented below). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `predicates` + +See the [WAF Documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_Predicate.html) for more information. + +#### Arguments + +* `negated` - (Required) Set this to `false` if you want to allow, block, or count requests + based on the settings in the specified `ByteMatchSet`, `IPSet`, `SqlInjectionMatchSet`, `XssMatchSet`, or `SizeConstraintSet`. + For example, if an IPSet includes the IP address `192.0.2.44`, AWS WAF will allow or block requests based on that IP address. + If set to `true`, AWS WAF will allow, block, or count requests based on all IP addresses _except_ `192.0.2.44`. +* `data_id` - (Required) A unique identifier for a predicate in the rule, such as Byte Match Set ID or IPSet ID. +* `type` - (Required) The type of predicate in a rule. Valid values: `ByteMatch`, `GeoMatch`, `IPMatch`, `RegexMatch`, `SizeConstraint`, `SqlInjectionMatch`, or `XssMatch`. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF rule. +* `arn` - Amazon Resource Name (ARN) +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Rated Based Rule using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Rated Based Rule using the id. For example: + +```console +% terraform import aws_waf_rate_based_rule.wafrule a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_regex_match_set.html.markdown b/website/docs/cdktf/python/r/waf_regex_match_set.html.markdown new file mode 100644 index 00000000000..3d125c0c954 --- /dev/null +++ b/website/docs/cdktf/python/r/waf_regex_match_set.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_regex_match_set" +description: |- + Provides a AWS WAF Regex Match Set resource. +--- + + + +# Resource: aws_waf_regex_match_set + +Provides a WAF Regex Match Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_regex_match_set import WafRegexMatchSet +from imports.aws.waf_regex_pattern_set import WafRegexPatternSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = WafRegexPatternSet(self, "example", + name="example", + regex_pattern_strings=["one", "two"] + ) + aws_waf_regex_match_set_example = WafRegexMatchSet(self, "example_1", + name="example", + regex_match_tuple=[WafRegexMatchSetRegexMatchTuple( + field_to_match=WafRegexMatchSetRegexMatchTupleFieldToMatch( + data="User-Agent", + type="HEADER" + ), + regex_pattern_set_id=example.id, + text_transformation="NONE" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_waf_regex_match_set_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Regex Match Set. +* `regex_match_tuple` - (Required) The regular expression pattern that you want AWS WAF to search for in web requests, the location in requests that you want AWS WAF to search, and other settings. See below. + +### Nested Arguments + +#### `regex_match_tuple` + +* `field_to_match` - (Required) The part of a web request that you want to search, such as a specified header or a query string. +* `regex_pattern_set_id` - (Required) The ID of a [Regex Pattern Set](/docs/providers/aws/r/waf_regex_pattern_set.html). +* `text_transformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + e.g., `CMD_LINE`, `HTML_ENTITY_DECODE` or `NONE`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchTuple.html#WAF-Type-ByteMatchTuple-TextTransformation) + for all supported values. + +#### `field_to_match` + +* `data` - (Optional) When `type` is `HEADER`, enter the name of the header that you want to search, e.g., `User-Agent` or `Referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `HEADER`, `METHOD` or `BODY`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regex Match Set. +* `arn` - Amazon Resource Name (ARN) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regex Match Set using their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regex Match Set using their ID. For example: + +```console +% terraform import aws_waf_regex_match_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_regex_pattern_set.html.markdown b/website/docs/cdktf/python/r/waf_regex_pattern_set.html.markdown new file mode 100644 index 00000000000..144bdab3ea3 --- /dev/null +++ b/website/docs/cdktf/python/r/waf_regex_pattern_set.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_regex_pattern_set" +description: |- + Provides a AWS WAF Regex Pattern Set resource. +--- + + + +# Resource: aws_waf_regex_pattern_set + +Provides a WAF Regex Pattern Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_regex_pattern_set import WafRegexPatternSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafRegexPatternSet(self, "example", + name="tf_waf_regex_pattern_set", + regex_pattern_strings=["one", "two"] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Regex Pattern Set. +* `regex_pattern_strings` - (Optional) A list of regular expression (regex) patterns that you want AWS WAF to search for, such as `B[a@]dB[o0]t`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regex Pattern Set. +* `arn` - Amazon Resource Name (ARN) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS WAF Regex Pattern Set using their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS WAF Regex Pattern Set using their ID. For example: + +```console +% terraform import aws_waf_regex_pattern_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_rule.html.markdown b/website/docs/cdktf/python/r/waf_rule.html.markdown new file mode 100644 index 00000000000..81225fec415 --- /dev/null +++ b/website/docs/cdktf/python/r/waf_rule.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_rule" +description: |- + Provides a AWS WAF rule resource. +--- + + + +# Resource: aws_waf_rule + +Provides a WAF Rule Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_ipset import WafIpset +from imports.aws.waf_rule import WafRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ipset = WafIpset(self, "ipset", + ip_set_descriptors=[WafIpsetIpSetDescriptors( + type="IPV4", + value="192.0.7.0/24" + ) + ], + name="tfIPSet" + ) + WafRule(self, "wafrule", + depends_on=[ipset], + metric_name="tfWAFRule", + name="tfWAFRule", + predicates=[WafRulePredicates( + data_id=ipset.id, + negated=False, + type="IPMatch" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `metric_name` - (Required) The name or description for the Amazon CloudWatch metric of this rule. The name can contain only alphanumeric characters (A-Z, a-z, 0-9); the name can't contain whitespace. +* `name` - (Required) The name or description of the rule. +* `predicates` - (Optional) The objects to include in a rule (documented below). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `predicates` + +See the [WAF Documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_Predicate.html) for more information. + +#### Arguments + +* `negated` - (Required) Set this to `false` if you want to allow, block, or count requests + based on the settings in the specified [waf_byte_match_set](/docs/providers/aws/r/waf_byte_match_set.html), [waf_ipset](/docs/providers/aws/r/waf_ipset.html), [aws_waf_size_constraint_set](/docs/providers/aws/r/waf_size_constraint_set.html), [aws_waf_sql_injection_match_set](/docs/providers/aws/r/waf_sql_injection_match_set.html) or [aws_waf_xss_match_set](/docs/providers/aws/r/waf_xss_match_set.html). + For example, if an IPSet includes the IP address `192.0.2.44`, AWS WAF will allow or block requests based on that IP address. + If set to `true`, AWS WAF will allow, block, or count requests based on all IP addresses except `192.0.2.44`. +* `data_id` - (Required) A unique identifier for a predicate in the rule, such as Byte Match Set ID or IPSet ID. +* `type` - (Required) The type of predicate in a rule. Valid values: `ByteMatch`, `GeoMatch`, `IPMatch`, `RegexMatch`, `SizeConstraint`, `SqlInjectionMatch`, or `XssMatch`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF rule. +* `arn` - The ARN of the WAF rule. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF rules using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF rules using the id. For example: + +```console +% terraform import aws_waf_rule.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_rule_group.html.markdown b/website/docs/cdktf/python/r/waf_rule_group.html.markdown new file mode 100644 index 00000000000..7139dc31673 --- /dev/null +++ b/website/docs/cdktf/python/r/waf_rule_group.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_rule_group" +description: |- + Provides a AWS WAF rule group resource. +--- + + + +# Resource: aws_waf_rule_group + +Provides a WAF Rule Group Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_rule import WafRule +from imports.aws.waf_rule_group import WafRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = WafRule(self, "example", + metric_name="example", + name="example" + ) + aws_waf_rule_group_example = WafRuleGroup(self, "example_1", + activated_rule=[WafRuleGroupActivatedRule( + action=WafRuleGroupActivatedRuleAction( + type="COUNT" + ), + priority=50, + rule_id=example.id + ) + ], + metric_name="example", + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_waf_rule_group_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A friendly name of the rule group +* `metric_name` - (Required) A friendly name for the metrics from the rule group +* `activated_rule` - (Optional) A list of activated rules, see below +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `activated_rule` + +#### Arguments + +* `action` - (Required) Specifies the action that CloudFront or AWS WAF takes when a web request matches the conditions in the rule. + * `type` - (Required) e.g., `BLOCK`, `ALLOW`, or `COUNT` +* `priority` - (Required) Specifies the order in which the rules are evaluated. Rules with a lower value are evaluated before rules with a higher value. +* `rule_id` - (Required) The ID of a [rule](/docs/providers/aws/r/waf_rule.html) +* `type` - (Optional) The rule type, either [`REGULAR`](/docs/providers/aws/r/waf_rule.html), [`RATE_BASED`](/docs/providers/aws/r/waf_rate_based_rule.html), or `GROUP`. Defaults to `REGULAR`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF rule group. +* `arn` - The ARN of the WAF rule group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Rule Group using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Rule Group using the id. For example: + +```console +% terraform import aws_waf_rule_group.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_size_constraint_set.html.markdown b/website/docs/cdktf/python/r/waf_size_constraint_set.html.markdown new file mode 100644 index 00000000000..7a4462deeb0 --- /dev/null +++ b/website/docs/cdktf/python/r/waf_size_constraint_set.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_size_constraint_set" +description: |- + Provides a AWS WAF Size Constraint Set resource. +--- + + + +# Resource: aws_waf_size_constraint_set + +Provides a WAF Size Constraint Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_size_constraint_set import WafSizeConstraintSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafSizeConstraintSet(self, "size_constraint_set", + name="tfsize_constraints", + size_constraints=[WafSizeConstraintSetSizeConstraints( + comparison_operator="EQ", + field_to_match=WafSizeConstraintSetSizeConstraintsFieldToMatch( + type="BODY" + ), + size=Token.as_number("4096"), + text_transformation="NONE" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Size Constraint Set. +* `size_constraints` - (Optional) Specifies the parts of web requests that you want to inspect the size of. + +## Nested Blocks + +### `size_constraints` + +#### Arguments + +* `field_to_match` - (Required) Specifies where in a web request to look for the size constraint. +* `comparison_operator` - (Required) The type of comparison you want to perform. + e.g., `EQ`, `NE`, `LT`, `GT`. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_wafRegional_SizeConstraint.html) for all supported values. +* `size` - (Required) The size in bytes that you want to compare against the size of the specified `field_to_match`. + Valid values are between 0 - 21474836480 bytes (0 - 20 GB). +* `text_transformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `field_to_match` before inspecting a request for a match. + e.g., `CMD_LINE`, `HTML_ENTITY_DECODE` or `NONE`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_SizeConstraint.html#WAF-Type-SizeConstraint-TextTransformation) + for all supported values. + **Note:** if you choose `BODY` as `type`, you must choose `NONE` because CloudFront forwards only the first 8192 bytes for inspection. + +### `field_to_match` + +#### Arguments + +* `data` - (Optional) When `type` is `HEADER`, enter the name of the header that you want to search, e.g., `User-Agent` or `Referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `HEADER`, `METHOD` or `BODY`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Size Constraint Set. +* `arn` - Amazon Resource Name (ARN) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS WAF Size Constraint Set using their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS WAF Size Constraint Set using their ID. For example: + +```console +% terraform import aws_waf_size_constraint_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_sql_injection_match_set.html.markdown b/website/docs/cdktf/python/r/waf_sql_injection_match_set.html.markdown new file mode 100644 index 00000000000..3cb1b85ede2 --- /dev/null +++ b/website/docs/cdktf/python/r/waf_sql_injection_match_set.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_sql_injection_match_set" +description: |- + Provides a AWS WAF SQL Injection Match Set resource. +--- + + + +# Resource: aws_waf_sql_injection_match_set + +Provides a WAF SQL Injection Match Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_sql_injection_match_set import WafSqlInjectionMatchSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafSqlInjectionMatchSet(self, "sql_injection_match_set", + name="tf-sql_injection_match_set", + sql_injection_match_tuples=[WafSqlInjectionMatchSetSqlInjectionMatchTuples( + field_to_match=WafSqlInjectionMatchSetSqlInjectionMatchTuplesFieldToMatch( + type="QUERY_STRING" + ), + text_transformation="URL_DECODE" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the SQL Injection Match Set. +* `sql_injection_match_tuples` - (Optional) The parts of web requests that you want AWS WAF to inspect for malicious SQL code and, if you want AWS WAF to inspect a header, the name of the header. + +## Nested Blocks + +### `sql_injection_match_tuples` + +* `field_to_match` - (Required) Specifies where in a web request to look for snippets of malicious SQL code. +* `text_transformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `field_to_match` before inspecting a request for a match. + e.g., `CMD_LINE`, `HTML_ENTITY_DECODE` or `NONE`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_SqlInjectionMatchTuple.html#WAF-Type-SqlInjectionMatchTuple-TextTransformation) + for all supported values. + +### `field_to_match` + +#### Arguments + +* `data` - (Optional) When `type` is `HEADER`, enter the name of the header that you want to search, e.g., `User-Agent` or `Referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `HEADER`, `METHOD` or `BODY`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF SQL Injection Match Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS WAF SQL Injection Match Set using their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS WAF SQL Injection Match Set using their ID. For example: + +```console +% terraform import aws_waf_sql_injection_match_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_web_acl.html.markdown b/website/docs/cdktf/python/r/waf_web_acl.html.markdown new file mode 100644 index 00000000000..9f26af4383f --- /dev/null +++ b/website/docs/cdktf/python/r/waf_web_acl.html.markdown @@ -0,0 +1,178 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_web_acl" +description: |- + Provides a AWS WAF web access control group (ACL) resource. +--- + + + +# Resource: aws_waf_web_acl + +Provides a WAF Web ACL Resource + +## Example Usage + +This example blocks requests coming from `192.0.7.0/24` and allows everything else. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_ipset import WafIpset +from imports.aws.waf_rule import WafRule +from imports.aws.waf_web_acl import WafWebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ipset = WafIpset(self, "ipset", + ip_set_descriptors=[WafIpsetIpSetDescriptors( + type="IPV4", + value="192.0.7.0/24" + ) + ], + name="tfIPSet" + ) + wafrule = WafRule(self, "wafrule", + depends_on=[ipset], + metric_name="tfWAFRule", + name="tfWAFRule", + predicates=[WafRulePredicates( + data_id=ipset.id, + negated=False, + type="IPMatch" + ) + ] + ) + WafWebAcl(self, "waf_acl", + default_action=WafWebAclDefaultAction( + type="ALLOW" + ), + depends_on=[ipset, wafrule], + metric_name="tfWebACL", + name="tfWebACL", + rules=[WafWebAclRules( + action=WafWebAclRulesAction( + type="BLOCK" + ), + priority=1, + rule_id=wafrule.id, + type="REGULAR" + ) + ] + ) +``` + +### Logging + +~> *NOTE:* The Kinesis Firehose Delivery Stream name must begin with `aws-waf-logs-` and be located in `us-east-1` region. See the [AWS WAF Developer Guide](https://docs.aws.amazon.com/waf/latest/developerguide/logging.html) for more information about enabling WAF logging. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_web_acl import WafWebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, defaultAction, metricName, name): + super().__init__(scope, name) + WafWebAcl(self, "example", + logging_configuration=WafWebAclLoggingConfiguration( + log_destination=Token.as_string(aws_kinesis_firehose_delivery_stream_example.arn), + redacted_fields=WafWebAclLoggingConfigurationRedactedFields( + field_to_match=[WafWebAclLoggingConfigurationRedactedFieldsFieldToMatch( + type="URI" + ), WafWebAclLoggingConfigurationRedactedFieldsFieldToMatch( + data="referer", + type="HEADER" + ) + ] + ) + ), + default_action=default_action, + metric_name=metric_name, + name=name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `default_action` - (Required) Configuration block with action that you want AWS WAF to take when a request doesn't match the criteria in any of the rules that are associated with the web ACL. Detailed below. +* `metric_name` - (Required) The name or description for the Amazon CloudWatch metric of this web ACL. +* `name` - (Required) The name or description of the web ACL. +* `rules` - (Optional) Configuration blocks containing rules to associate with the web ACL and the settings for each rule. Detailed below. +* `logging_configuration` - (Optional) Configuration block to enable WAF logging. Detailed below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `default_action` Configuration Block + +* `type` - (Required) Specifies how you want AWS WAF to respond to requests that don't match the criteria in any of the `rules`. + e.g., `ALLOW` or `BLOCK` + +### `logging_configuration` Configuration Block + +* `log_destination` - (Required) Amazon Resource Name (ARN) of Kinesis Firehose Delivery Stream +* `redacted_fields` - (Optional) Configuration block containing parts of the request that you want redacted from the logs. Detailed below. + +#### `redacted_fields` Configuration Block + +* `field_to_match` - (Required) Set of configuration blocks for fields to redact. Detailed below. + +##### `field_to_match` Configuration Block + +-> Additional information about this configuration can be found in the [AWS WAF Regional API Reference](https://docs.aws.amazon.com/waf/latest/APIReference/API_regional_FieldToMatch.html). + +* `data` - (Optional) When the value of `type` is `HEADER`, enter the name of the header that you want the WAF to search, for example, `User-Agent` or `Referer`. If the value of `type` is any other value, omit `data`. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified stringE.g., `HEADER` or `METHOD` + +### `rules` Configuration Block + +See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_ActivatedRule.html) for all details and supported values. + +* `action` - (Optional) The action that CloudFront or AWS WAF takes when a web request matches the conditions in the rule. Not used if `type` is `GROUP`. + * `type` - (Required) valid values are: `BLOCK`, `ALLOW`, or `COUNT` +* `override_action` - (Optional) Override the action that a group requests CloudFront or AWS WAF takes when a web request matches the conditions in the rule. Only used if `type` is `GROUP`. + * `type` - (Required) valid values are: `NONE` or `COUNT` +* `priority` - (Required) Specifies the order in which the rules in a WebACL are evaluated. + Rules with a lower value are evaluated before rules with a higher value. +* `rule_id` - (Required) ID of the associated WAF (Global) rule (e.g., [`aws_waf_rule`](/docs/providers/aws/r/waf_rule.html)). WAF (Regional) rules cannot be used. +* `type` - (Optional) The rule type, either `REGULAR`, as defined by [Rule](http://docs.aws.amazon.com/waf/latest/APIReference/API_Rule.html), `RATE_BASED`, as defined by [RateBasedRule](http://docs.aws.amazon.com/waf/latest/APIReference/API_RateBasedRule.html), or `GROUP`, as defined by [RuleGroup](https://docs.aws.amazon.com/waf/latest/APIReference/API_RuleGroup.html). The default is REGULAR. If you add a RATE_BASED rule, you need to set `type` as `RATE_BASED`. If you add a GROUP rule, you need to set `type` as `GROUP`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF WebACL. +* `arn` - The ARN of the WAF WebACL. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Web ACL using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Web ACL using the `id`. For example: + +```console +% terraform import aws_waf_web_acl.main 0c8e583e-18f3-4c13-9e2a-67c4805d2f94 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/waf_xss_match_set.html.markdown b/website/docs/cdktf/python/r/waf_xss_match_set.html.markdown new file mode 100644 index 00000000000..deb70ed3386 --- /dev/null +++ b/website/docs/cdktf/python/r/waf_xss_match_set.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_xss_match_set" +description: |- + Provides a AWS WAF XssMatchSet resource. +--- + + + +# Resource: aws_waf_xss_match_set + +Provides a WAF XSS Match Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.waf_xss_match_set import WafXssMatchSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafXssMatchSet(self, "xss_match_set", + name="xss_match_set", + xss_match_tuples=[WafXssMatchSetXssMatchTuples( + field_to_match=WafXssMatchSetXssMatchTuplesFieldToMatch( + type="URI" + ), + text_transformation="NONE" + ), WafXssMatchSetXssMatchTuples( + field_to_match=WafXssMatchSetXssMatchTuplesFieldToMatch( + type="QUERY_STRING" + ), + text_transformation="NONE" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the SizeConstraintSet. +* `xss_match_tuples` - (Optional) The parts of web requests that you want to inspect for cross-site scripting attacks. + +## Nested Blocks + +### `xss_match_tuples` + +* `field_to_match` - (Required) Specifies where in a web request to look for cross-site scripting attacks. +* `text_transformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `target_string` before inspecting a request for a match. + e.g., `CMD_LINE`, `HTML_ENTITY_DECODE` or `NONE`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_XssMatchTuple.html#WAF-Type-XssMatchTuple-TextTransformation) + for all supported values. + +### `field_to_match` + +#### Arguments + +* `data` - (Optional) When `type` is `HEADER`, enter the name of the header that you want to search, e.g., `User-Agent` or `Referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `HEADER`, `METHOD` or `BODY`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF XssMatchSet. +* `arn` - Amazon Resource Name (ARN) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF XSS Match Set using their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF XSS Match Set using their ID. For example: + +```console +% terraform import aws_waf_xss_match_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_byte_match_set.html.markdown b/website/docs/cdktf/python/r/wafregional_byte_match_set.html.markdown new file mode 100644 index 00000000000..c7d89d457c1 --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_byte_match_set.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_byte_match_set" +description: |- + Provides a AWS WAF Regional ByteMatchSet resource for use with ALB. +--- + + + +# Resource: aws_wafregional_byte_match_set + +Provides a WAF Regional Byte Match Set Resource for use with Application Load Balancer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_byte_match_set import WafregionalByteMatchSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafregionalByteMatchSet(self, "byte_set", + byte_match_tuples=[WafregionalByteMatchSetByteMatchTuples( + field_to_match=WafregionalByteMatchSetByteMatchTuplesFieldToMatch( + data="referer", + type="HEADER" + ), + positional_constraint="CONTAINS", + target_string="badrefer1", + text_transformation="NONE" + ) + ], + name="tf_waf_byte_match_set" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the ByteMatchSet. +* `byte_match_tuples` - (Optional)Settings for the ByteMatchSet, such as the bytes (typically a string that corresponds with ASCII characters) that you want AWS WAF to search for in web requests. ByteMatchTuple documented below. + +ByteMatchTuples(byte_match_tuples) support the following: + +* `field_to_match` - (Required) Settings for the ByteMatchTuple. FieldToMatch documented below. +* `positional_constraint` - (Required) Within the portion of a web request that you want to search. +* `target_string` - (Required) The value that you want AWS WAF to search for. The maximum length of the value is 50 bytes. +* `text_transformation` - (Required) The formatting way for web request. + +FieldToMatch(field_to_match) support following: + +* `data` - (Optional) When the value of Type is HEADER, enter the name of the header that you want AWS WAF to search, for example, User-Agent or Referer. If the value of Type is any other value, omit Data. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF ByteMatchSet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Byte Match Set using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regional Byte Match Set using the id. For example: + +```console +% terraform import aws_wafregional_byte_match_set.byte_set a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_geo_match_set.html.markdown b/website/docs/cdktf/python/r/wafregional_geo_match_set.html.markdown new file mode 100644 index 00000000000..9e4601a6e02 --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_geo_match_set.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_geo_match_set" +description: |- + Provides a AWS WAF Regional Geo Match Set resource. +--- + + + +# Resource: aws_wafregional_geo_match_set + +Provides a WAF Regional Geo Match Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_geo_match_set import WafregionalGeoMatchSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafregionalGeoMatchSet(self, "geo_match_set", + geo_match_constraint=[WafregionalGeoMatchSetGeoMatchConstraint( + type="Country", + value="US" + ), WafregionalGeoMatchSetGeoMatchConstraint( + type="Country", + value="CA" + ) + ], + name="geo_match_set" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Geo Match Set. +* `geo_match_constraint` - (Optional) The Geo Match Constraint objects which contain the country that you want AWS WAF to search for. + +## Nested Blocks + +### `geo_match_constraint` + +#### Arguments + +* `type` - (Required) The type of geographical area you want AWS WAF to search for. Currently Country is the only valid value. +* `value` - (Required) The country that you want AWS WAF to search for. + This is the two-letter country code, e.g., `US`, `CA`, `RU`, `CN`, etc. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_GeoMatchConstraint.html) for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Geo Match Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Geo Match Set using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regional Geo Match Set using the id. For example: + +```console +% terraform import aws_wafregional_geo_match_set.geo_match_set a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_ipset.html.markdown b/website/docs/cdktf/python/r/wafregional_ipset.html.markdown new file mode 100644 index 00000000000..6b8d9919ee8 --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_ipset.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_ipset" +description: |- + Provides a AWS WAF Regional IPSet resource for use with ALB. +--- + + + +# Resource: aws_wafregional_ipset + +Provides a WAF Regional IPSet Resource for use with Application Load Balancer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_ipset import WafregionalIpset +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafregionalIpset(self, "ipset", + ip_set_descriptor=[WafregionalIpsetIpSetDescriptor( + type="IPV4", + value="192.0.7.0/24" + ), WafregionalIpsetIpSetDescriptor( + type="IPV4", + value="10.16.16.0/16" + ) + ], + name="tfIPSet" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the IPSet. +* `ip_set_descriptor` - (Optional) One or more pairs specifying the IP address type (IPV4 or IPV6) and the IP address range (in CIDR notation) from which web requests originate. + +## Nested Blocks + +### `ip_set_descriptor` + +#### Arguments + +* `type` - (Required) The string like IPV4 or IPV6. +* `value` - (Required) The CIDR notation. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF IPSet. +* `arn` - The ARN of the WAF IPSet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional IPSets using their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regional IPSets using their ID. For example: + +```console +% terraform import aws_wafregional_ipset.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_rate_based_rule.html.markdown b/website/docs/cdktf/python/r/wafregional_rate_based_rule.html.markdown new file mode 100644 index 00000000000..c6db0d52275 --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_rate_based_rule.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_rate_based_rule" +description: |- + Provides a AWS WAF Regional rate based rule resource. +--- + + + +# Resource: aws_wafregional_rate_based_rule + +Provides a WAF Rate Based Rule Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_ipset import WafregionalIpset +from imports.aws.wafregional_rate_based_rule import WafregionalRateBasedRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ipset = WafregionalIpset(self, "ipset", + ip_set_descriptor=[WafregionalIpsetIpSetDescriptor( + type="IPV4", + value="192.0.7.0/24" + ) + ], + name="tfIPSet" + ) + WafregionalRateBasedRule(self, "wafrule", + depends_on=[ipset], + metric_name="tfWAFRule", + name="tfWAFRule", + predicate=[WafregionalRateBasedRulePredicate( + data_id=ipset.id, + negated=False, + type="IPMatch" + ) + ], + rate_key="IP", + rate_limit=100 + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `metric_name` - (Required) The name or description for the Amazon CloudWatch metric of this rule. +* `name` - (Required) The name or description of the rule. +* `rate_key` - (Required) Valid value is IP. +* `rate_limit` - (Required) The maximum number of requests, which have an identical value in the field specified by the RateKey, allowed in a five-minute period. Minimum value is 100. +* `predicate` - (Optional) The objects to include in a rule (documented below). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `predicate` + +See the [WAF Documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_Predicate.html) for more information. + +#### Arguments + +* `negated` - (Required) Set this to `false` if you want to allow, block, or count requests + based on the settings in the specified `ByteMatchSet`, `IPSet`, `SqlInjectionMatchSet`, `XssMatchSet`, or `SizeConstraintSet`. + For example, if an IPSet includes the IP address `192.0.2.44`, AWS WAF will allow or block requests based on that IP address. + If set to `true`, AWS WAF will allow, block, or count requests based on all IP addresses _except_ `192.0.2.44`. +* `data_id` - (Required) A unique identifier for a predicate in the rule, such as Byte Match Set ID or IPSet ID. +* `type` - (Required) The type of predicate in a rule. Valid values: `ByteMatch`, `GeoMatch`, `IPMatch`, `RegexMatch`, `SizeConstraint`, `SqlInjectionMatch`, or `XssMatch`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Rate Based Rule. +* `arn` - The ARN of the WAF Regional Rate Based Rule. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Rate Based Rule using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regional Rate Based Rule using the id. For example: + +```console +% terraform import aws_wafregional_rate_based_rule.wafrule a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_regex_match_set.html.markdown b/website/docs/cdktf/python/r/wafregional_regex_match_set.html.markdown new file mode 100644 index 00000000000..cb39d70229a --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_regex_match_set.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_regex_match_set" +description: |- + Provides a AWS WAF Regional Regex Match Set resource. +--- + + + +# Resource: aws_wafregional_regex_match_set + +Provides a WAF Regional Regex Match Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_regex_match_set import WafregionalRegexMatchSet +from imports.aws.wafregional_regex_pattern_set import WafregionalRegexPatternSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = WafregionalRegexPatternSet(self, "example", + name="example", + regex_pattern_strings=["one", "two"] + ) + aws_wafregional_regex_match_set_example = WafregionalRegexMatchSet(self, "example_1", + name="example", + regex_match_tuple=[WafregionalRegexMatchSetRegexMatchTuple( + field_to_match=WafregionalRegexMatchSetRegexMatchTupleFieldToMatch( + data="User-Agent", + type="HEADER" + ), + regex_pattern_set_id=example.id, + text_transformation="NONE" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_wafregional_regex_match_set_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Regex Match Set. +* `regex_match_tuple` - (Required) The regular expression pattern that you want AWS WAF to search for in web requests, the location in requests that you want AWS WAF to search, and other settings. See below. + +### Nested Arguments + +#### `regex_match_tuple` + +* `field_to_match` - (Required) The part of a web request that you want to search, such as a specified header or a query string. +* `regex_pattern_set_id` - (Required) The ID of a [Regex Pattern Set](/docs/providers/aws/r/waf_regex_pattern_set.html). +* `text_transformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + e.g., `CMD_LINE`, `HTML_ENTITY_DECODE` or `NONE`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchTuple.html#WAF-Type-ByteMatchTuple-TextTransformation) + for all supported values. + +#### `field_to_match` + +* `data` - (Optional) When `type` is `HEADER`, enter the name of the header that you want to search, e.g., `User-Agent` or `Referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `HEADER`, `METHOD` or `BODY`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Regex Match Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Regex Match Set using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regional Regex Match Set using the id. For example: + +```console +% terraform import aws_wafregional_regex_match_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_regex_pattern_set.html.markdown b/website/docs/cdktf/python/r/wafregional_regex_pattern_set.html.markdown new file mode 100644 index 00000000000..74102bd42d1 --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_regex_pattern_set.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_regex_pattern_set" +description: |- + Provides a AWS WAF Regional Regex Pattern Set resource. +--- + + + +# Resource: aws_wafregional_regex_pattern_set + +Provides a WAF Regional Regex Pattern Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_regex_pattern_set import WafregionalRegexPatternSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafregionalRegexPatternSet(self, "example", + name="example", + regex_pattern_strings=["one", "two"] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Regex Pattern Set. +* `regex_pattern_strings` - (Optional) A list of regular expression (regex) patterns that you want AWS WAF to search for, such as `B[a@]dB[o0]t`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Regex Pattern Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Regex Pattern Set using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regional Regex Pattern Set using the id. For example: + +```console +% terraform import aws_wafregional_regex_pattern_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_rule.html.markdown b/website/docs/cdktf/python/r/wafregional_rule.html.markdown new file mode 100644 index 00000000000..ab58c64997f --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_rule.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_rule" +description: |- + Provides an AWS WAF Regional rule resource for use with ALB. +--- + + + +# Resource: aws_wafregional_rule + +Provides an WAF Regional Rule Resource for use with Application Load Balancer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_ipset import WafregionalIpset +from imports.aws.wafregional_rule import WafregionalRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ipset = WafregionalIpset(self, "ipset", + ip_set_descriptor=[WafregionalIpsetIpSetDescriptor( + type="IPV4", + value="192.0.7.0/24" + ) + ], + name="tfIPSet" + ) + WafregionalRule(self, "wafrule", + metric_name="tfWAFRule", + name="tfWAFRule", + predicate=[WafregionalRulePredicate( + data_id=ipset.id, + negated=False, + type="IPMatch" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the rule. +* `metric_name` - (Required) The name or description for the Amazon CloudWatch metric of this rule. +* `predicate` - (Optional) The objects to include in a rule (documented below). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Fields + +### `predicate` + +See the [WAF Documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_Predicate.html) for more information. + +#### Arguments + +* `type` - (Required) The type of predicate in a rule. Valid values: `ByteMatch`, `GeoMatch`, `IPMatch`, `RegexMatch`, `SizeConstraint`, `SqlInjectionMatch`, or `XssMatch` +* `data_id` - (Required) The unique identifier of a predicate, such as the ID of a `ByteMatchSet` or `IPSet`. +* `negated` - (Required) Whether to use the settings or the negated settings that you specified in the objects. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Rule. +* `arn` - The ARN of the WAF Regional Rule. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Rule using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regional Rule using the id. For example: + +```console +% terraform import aws_wafregional_rule.wafrule a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_rule_group.html.markdown b/website/docs/cdktf/python/r/wafregional_rule_group.html.markdown new file mode 100644 index 00000000000..074e4d963dd --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_rule_group.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_rule_group" +description: |- + Provides a AWS WAF Regional Rule Group resource. +--- + + + +# Resource: aws_wafregional_rule_group + +Provides a WAF Regional Rule Group Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_rule import WafregionalRule +from imports.aws.wafregional_rule_group import WafregionalRuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = WafregionalRule(self, "example", + metric_name="example", + name="example" + ) + aws_wafregional_rule_group_example = WafregionalRuleGroup(self, "example_1", + activated_rule=[WafregionalRuleGroupActivatedRule( + action=WafregionalRuleGroupActivatedRuleAction( + type="COUNT" + ), + priority=50, + rule_id=example.id + ) + ], + metric_name="example", + name="example" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_wafregional_rule_group_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A friendly name of the rule group +* `metric_name` - (Required) A friendly name for the metrics from the rule group +* `activated_rule` - (Optional) A list of activated rules, see below +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `activated_rule` + +#### Arguments + +* `action` - (Required) Specifies the action that CloudFront or AWS WAF takes when a web request matches the conditions in the rule. + * `type` - (Required) e.g., `BLOCK`, `ALLOW`, or `COUNT` +* `priority` - (Required) Specifies the order in which the rules are evaluated. Rules with a lower value are evaluated before rules with a higher value. +* `rule_id` - (Required) The ID of a [rule](/docs/providers/aws/r/wafregional_rule.html) +* `type` - (Optional) The rule type, either [`REGULAR`](/docs/providers/aws/r/wafregional_rule.html), [`RATE_BASED`](/docs/providers/aws/r/wafregional_rate_based_rule.html), or `GROUP`. Defaults to `REGULAR`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Rule Group. +* `arn` - The ARN of the WAF Regional Rule Group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Rule Group using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regional Rule Group using the id. For example: + +```console +% terraform import aws_wafregional_rule_group.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_size_constraint_set.html.markdown b/website/docs/cdktf/python/r/wafregional_size_constraint_set.html.markdown new file mode 100644 index 00000000000..1c2cac5984c --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_size_constraint_set.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_size_constraint_set" +description: |- + Provides an AWS WAF Regional Size Constraint Set resource for use with ALB. +--- + + + +# Resource: aws_wafregional_size_constraint_set + +Provides a WAF Regional Size Constraint Set Resource for use with Application Load Balancer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_size_constraint_set import WafregionalSizeConstraintSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafregionalSizeConstraintSet(self, "size_constraint_set", + name="tfsize_constraints", + size_constraints=[WafregionalSizeConstraintSetSizeConstraints( + comparison_operator="EQ", + field_to_match=WafregionalSizeConstraintSetSizeConstraintsFieldToMatch( + type="BODY" + ), + size=Token.as_number("4096"), + text_transformation="NONE" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Size Constraint Set. +* `size_constraints` - (Optional) Specifies the parts of web requests that you want to inspect the size of. + +## Nested Blocks + +### `size_constraints` + +#### Arguments + +* `field_to_match` - (Required) Specifies where in a web request to look for the size constraint. +* `comparison_operator` - (Required) The type of comparison you want to perform. + e.g., `EQ`, `NE`, `LT`, `GT`. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_wafRegional_SizeConstraint.html) for all supported values. +* `size` - (Required) The size in bytes that you want to compare against the size of the specified `field_to_match`. + Valid values are between 0 - 21474836480 bytes (0 - 20 GB). +* `text_transformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `field_to_match` before inspecting a request for a match. + e.g., `CMD_LINE`, `HTML_ENTITY_DECODE` or `NONE`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_SizeConstraint.html#WAF-Type-SizeConstraint-TextTransformation) + for all supported values. + **Note:** if you choose `BODY` as `type`, you must choose `NONE` because CloudFront forwards only the first 8192 bytes for inspection. + +### `field_to_match` + +#### Arguments + +* `data` - (Optional) When `type` is `HEADER`, enter the name of the header that you want to search, e.g., `User-Agent` or `Referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `HEADER`, `METHOD` or `BODY`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Size Constraint Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Size Constraint Set using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Size Constraint Set using the id. For example: + +```console +% terraform import aws_wafregional_size_constraint_set.size_constraint_set a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_sql_injection_match_set.html.markdown b/website/docs/cdktf/python/r/wafregional_sql_injection_match_set.html.markdown new file mode 100644 index 00000000000..11c5cc95b21 --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_sql_injection_match_set.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_sql_injection_match_set" +description: |- + Provides a AWS WAF Regional SqlInjectionMatchSet resource for use with ALB. +--- + + + +# Resource: aws_wafregional_sql_injection_match_set + +Provides a WAF Regional SQL Injection Match Set Resource for use with Application Load Balancer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_sql_injection_match_set import WafregionalSqlInjectionMatchSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafregionalSqlInjectionMatchSet(self, "sql_injection_match_set", + name="tf-sql_injection_match_set", + sql_injection_match_tuple=[WafregionalSqlInjectionMatchSetSqlInjectionMatchTuple( + field_to_match=WafregionalSqlInjectionMatchSetSqlInjectionMatchTupleFieldToMatch( + type="QUERY_STRING" + ), + text_transformation="URL_DECODE" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the SizeConstraintSet. +* `sql_injection_match_tuple` - (Optional) The parts of web requests that you want AWS WAF to inspect for malicious SQL code and, if you want AWS WAF to inspect a header, the name of the header. + +### Nested fields + +### `sql_injection_match_tuple` + +* `field_to_match` - (Required) Specifies where in a web request to look for snippets of malicious SQL code. +* `text_transformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `field_to_match` before inspecting a request for a match. + e.g., `CMD_LINE`, `HTML_ENTITY_DECODE` or `NONE`. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_regional_SqlInjectionMatchTuple.html#WAF-Type-regional_SqlInjectionMatchTuple-TextTransformation) + for all supported values. + +### `field_to_match` + +* `data` - (Optional) When `type` is `HEADER`, enter the name of the header that you want to search, e.g., `User-Agent` or `Referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `HEADER`, `METHOD` or `BODY`. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_regional_FieldToMatch.html) + for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF SqlInjectionMatchSet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Sql Injection Match Set using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regional Sql Injection Match Set using the id. For example: + +```console +% terraform import aws_wafregional_sql_injection_match_set.sql_injection_match_set a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_web_acl.html.markdown b/website/docs/cdktf/python/r/wafregional_web_acl.html.markdown new file mode 100644 index 00000000000..63cff3f6dff --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_web_acl.html.markdown @@ -0,0 +1,209 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_web_acl" +description: |- + Provides a AWS WAF Regional web access control group (ACL) resource for use with ALB. +--- + + + +# Resource: aws_wafregional_web_acl + +Provides a WAF Regional Web ACL Resource for use with Application Load Balancer. + +## Example Usage + +### Regular Rule + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_ipset import WafregionalIpset +from imports.aws.wafregional_rule import WafregionalRule +from imports.aws.wafregional_web_acl import WafregionalWebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + ipset = WafregionalIpset(self, "ipset", + ip_set_descriptor=[WafregionalIpsetIpSetDescriptor( + type="IPV4", + value="192.0.7.0/24" + ) + ], + name="tfIPSet" + ) + wafrule = WafregionalRule(self, "wafrule", + metric_name="tfWAFRule", + name="tfWAFRule", + predicate=[WafregionalRulePredicate( + data_id=ipset.id, + negated=False, + type="IPMatch" + ) + ] + ) + WafregionalWebAcl(self, "wafacl", + default_action=WafregionalWebAclDefaultAction( + type="ALLOW" + ), + metric_name="tfWebACL", + name="tfWebACL", + rule=[WafregionalWebAclRule( + action=WafregionalWebAclRuleAction( + type="BLOCK" + ), + priority=1, + rule_id=wafrule.id, + type="REGULAR" + ) + ] + ) +``` + +### Group Rule + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_web_acl import WafregionalWebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafregionalWebAcl(self, "example", + default_action=WafregionalWebAclDefaultAction( + type="ALLOW" + ), + metric_name="example", + name="example", + rule=[WafregionalWebAclRule( + override_action=WafregionalWebAclRuleOverrideAction( + type="NONE" + ), + priority=1, + rule_id=Token.as_string(aws_wafregional_rule_group_example.id), + type="GROUP" + ) + ] + ) +``` + +### Logging + +~> *NOTE:* The Kinesis Firehose Delivery Stream name must begin with `aws-waf-logs-`. See the [AWS WAF Developer Guide](https://docs.aws.amazon.com/waf/latest/developerguide/logging.html) for more information about enabling WAF logging. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_web_acl import WafregionalWebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name, *, defaultAction, metricName, name): + super().__init__(scope, name) + WafregionalWebAcl(self, "example", + logging_configuration=WafregionalWebAclLoggingConfiguration( + log_destination=Token.as_string(aws_kinesis_firehose_delivery_stream_example.arn), + redacted_fields=WafregionalWebAclLoggingConfigurationRedactedFields( + field_to_match=[WafregionalWebAclLoggingConfigurationRedactedFieldsFieldToMatch( + type="URI" + ), WafregionalWebAclLoggingConfigurationRedactedFieldsFieldToMatch( + data="referer", + type="HEADER" + ) + ] + ) + ), + default_action=default_action, + metric_name=metric_name, + name=name + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `default_action` - (Required) The action that you want AWS WAF Regional to take when a request doesn't match the criteria in any of the rules that are associated with the web ACL. +* `metric_name` - (Required) The name or description for the Amazon CloudWatch metric of this web ACL. +* `name` - (Required) The name or description of the web ACL. +* `logging_configuration` - (Optional) Configuration block to enable WAF logging. Detailed below. +* `rule` - (Optional) Set of configuration blocks containing rules for the web ACL. Detailed below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `default_action` Configuration Block + +* `type` - (Required) Specifies how you want AWS WAF Regional to respond to requests that match the settings in a ruleE.g., `ALLOW`, `BLOCK` or `COUNT` + +### `logging_configuration` Configuration Block + +* `log_destination` - (Required) Amazon Resource Name (ARN) of Kinesis Firehose Delivery Stream +* `redacted_fields` - (Optional) Configuration block containing parts of the request that you want redacted from the logs. Detailed below. + +#### `redacted_fields` Configuration Block + +* `field_to_match` - (Required) Set of configuration blocks for fields to redact. Detailed below. + +##### `field_to_match` Configuration Block + +-> Additional information about this configuration can be found in the [AWS WAF Regional API Reference](https://docs.aws.amazon.com/waf/latest/APIReference/API_regional_FieldToMatch.html). + +* `data` - (Optional) When the value of `type` is `HEADER`, enter the name of the header that you want the WAF to search, for example, `User-Agent` or `Referer`. If the value of `type` is any other value, omit `data`. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified stringE.g., `HEADER` or `METHOD` + +### `rule` Configuration Block + +-> Additional information about this configuration can be found in the [AWS WAF Regional API Reference](https://docs.aws.amazon.com/waf/latest/APIReference/API_regional_ActivatedRule.html). + +* `priority` - (Required) Specifies the order in which the rules in a WebACL are evaluated. + Rules with a lower value are evaluated before rules with a higher value. +* `rule_id` - (Required) ID of the associated WAF (Regional) rule (e.g., [`aws_wafregional_rule`](/docs/providers/aws/r/wafregional_rule.html)). WAF (Global) rules cannot be used. +* `action` - (Optional) Configuration block of the action that CloudFront or AWS WAF takes when a web request matches the conditions in the rule. Not used if `type` is `GROUP`. Detailed below. +* `override_action` - (Optional) Configuration block of the override the action that a group requests CloudFront or AWS WAF takes when a web request matches the conditions in the rule. Only used if `type` is `GROUP`. Detailed below. +* `type` - (Optional) The rule type, either `REGULAR`, as defined by [Rule](http://docs.aws.amazon.com/waf/latest/APIReference/API_Rule.html), `RATE_BASED`, as defined by [RateBasedRule](http://docs.aws.amazon.com/waf/latest/APIReference/API_RateBasedRule.html), or `GROUP`, as defined by [RuleGroup](https://docs.aws.amazon.com/waf/latest/APIReference/API_RuleGroup.html). The default is REGULAR. If you add a RATE_BASED rule, you need to set `type` as `RATE_BASED`. If you add a GROUP rule, you need to set `type` as `GROUP`. + +#### `action` / `override_action` Configuration Block + +* `type` - (Required) Specifies how you want AWS WAF Regional to respond to requests that match the settings in a rule. Valid values for `action` are `ALLOW`, `BLOCK` or `COUNT`. Valid values for `override_action` are `COUNT` and `NONE`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the WAF Regional WebACL. +* `id` - The ID of the WAF Regional WebACL. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Web ACL using the id. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regional Web ACL using the id. For example: + +```console +% terraform import aws_wafregional_web_acl.wafacl a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_web_acl_association.html.markdown b/website/docs/cdktf/python/r/wafregional_web_acl_association.html.markdown new file mode 100644 index 00000000000..a618d89033b --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_web_acl_association.html.markdown @@ -0,0 +1,228 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_web_acl_association" +description: |- + Manages an association with WAF Regional Web ACL +--- + + + +# Resource: aws_wafregional_web_acl_association + +Manages an association with WAF Regional Web ACL. + +-> **Note:** An Application Load Balancer can only be associated with one WAF Regional WebACL. + +## Example Usage + +### Application Load Balancer Association + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, property_access, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.alb import Alb +from imports.aws.data_aws_availability_zones import DataAwsAvailabilityZones +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +from imports.aws.wafregional_ipset import WafregionalIpset +from imports.aws.wafregional_rule import WafregionalRule +from imports.aws.wafregional_web_acl import WafregionalWebAcl +from imports.aws.wafregional_web_acl_association import WafregionalWebAclAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + foo = Vpc(self, "foo", + cidr_block="10.1.0.0/16" + ) + ipset = WafregionalIpset(self, "ipset", + ip_set_descriptor=[WafregionalIpsetIpSetDescriptor( + type="IPV4", + value="192.0.7.0/24" + ) + ], + name="tfIPSet" + ) + aws_wafregional_rule_foo = WafregionalRule(self, "foo_2", + metric_name="tfWAFRule", + name="tfWAFRule", + predicate=[WafregionalRulePredicate( + data_id=ipset.id, + negated=False, + type="IPMatch" + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_wafregional_rule_foo.override_logical_id("foo") + aws_wafregional_web_acl_foo = WafregionalWebAcl(self, "foo_3", + default_action=WafregionalWebAclDefaultAction( + type="ALLOW" + ), + metric_name="foo", + name="foo", + rule=[WafregionalWebAclRule( + action=WafregionalWebAclRuleAction( + type="BLOCK" + ), + priority=1, + rule_id=Token.as_string(aws_wafregional_rule_foo.id) + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_wafregional_web_acl_foo.override_logical_id("foo") + available = DataAwsAvailabilityZones(self, "available") + bar = Subnet(self, "bar", + availability_zone=Token.as_string(property_access(available.names, ["1"])), + cidr_block="10.1.2.0/24", + vpc_id=foo.id + ) + aws_subnet_foo = Subnet(self, "foo_6", + availability_zone=Token.as_string(property_access(available.names, ["0"])), + cidr_block="10.1.1.0/24", + vpc_id=foo.id + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_subnet_foo.override_logical_id("foo") + aws_alb_foo = Alb(self, "foo_7", + internal=True, + subnets=[Token.as_string(aws_subnet_foo.id), bar.id] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_alb_foo.override_logical_id("foo") + aws_wafregional_web_acl_association_foo = WafregionalWebAclAssociation(self, "foo_8", + resource_arn=Token.as_string(aws_alb_foo.arn), + web_acl_id=Token.as_string(aws_wafregional_web_acl_foo.id) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_wafregional_web_acl_association_foo.override_logical_id("foo") +``` + +### API Gateway Association + +```terraform +resource "aws_wafregional_ipset" "ipset" { + name = "tfIPSet" + + ip_set_descriptor { + type = "IPV4" + value = "192.0.7.0/24" + } +} + +resource "aws_wafregional_rule" "foo" { + name = "tfWAFRule" + metric_name = "tfWAFRule" + + predicate { + data_id = aws_wafregional_ipset.ipset.id + negated = false + type = "IPMatch" + } +} + +resource "aws_wafregional_web_acl" "foo" { + name = "foo" + metric_name = "foo" + + default_action { + type = "ALLOW" + } + + rule { + action { + type = "BLOCK" + } + + priority = 1 + rule_id = aws_wafregional_rule.foo.id + } +} + +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} + +resource "aws_wafregional_web_acl_association" "association" { + resource_arn = aws_api_gateway_stage.example.arn + web_acl_id = aws_wafregional_web_acl.foo.id +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `web_acl_id` - (Required) The ID of the WAF Regional WebACL to create an association. +* `resource_arn` - (Required) ARN of the resource to associate with. For example, an Application Load Balancer or API Gateway Stage. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the association + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Web ACL Association using their `web_acl_id:resource_arn`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAF Regional Web ACL Association using their `web_acl_id:resource_arn`. For example: + +```console +% terraform import aws_wafregional_web_acl_association.foo web_acl_id:resource_arn +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafregional_xss_match_set.html.markdown b/website/docs/cdktf/python/r/wafregional_xss_match_set.html.markdown new file mode 100644 index 00000000000..b3d1d65d0bf --- /dev/null +++ b/website/docs/cdktf/python/r/wafregional_xss_match_set.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_xss_match_set" +description: |- + Provides an AWS WAF Regional XSS Match Set resource for use with ALB. +--- + + + +# Resource: aws_wafregional_xss_match_set + +Provides a WAF Regional XSS Match Set Resource for use with Application Load Balancer. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafregional_xss_match_set import WafregionalXssMatchSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WafregionalXssMatchSet(self, "xss_match_set", + name="xss_match_set", + xss_match_tuple=[WafregionalXssMatchSetXssMatchTuple( + field_to_match=WafregionalXssMatchSetXssMatchTupleFieldToMatch( + type="URI" + ), + text_transformation="NONE" + ), WafregionalXssMatchSetXssMatchTuple( + field_to_match=WafregionalXssMatchSetXssMatchTupleFieldToMatch( + type="QUERY_STRING" + ), + text_transformation="NONE" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the set +* `xss_match_tuple` - (Optional) The parts of web requests that you want to inspect for cross-site scripting attacks. + +### Nested fields + +#### `xss_match_tuple` + +* `field_to_match` - (Required) Specifies where in a web request to look for cross-site scripting attacks. +* `text_transformation` - (Required) Which text transformation, if any, to perform on the web request before inspecting the request for cross-site scripting attacks. + +#### `field_to_match` + +* `data` - (Optional) When the value of `type` is `HEADER`, enter the name of the header that you want the WAF to search, for example, `User-Agent` or `Referer`. If the value of `type` is any other value, omit `data`. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified stringE.g., `HEADER` or `METHOD` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Regional WAF XSS Match Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS WAF Regional XSS Match using the `id`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import AWS WAF Regional XSS Match using the `id`. For example: + +```console +% terraform import aws_wafregional_xss_match_set.example 12345abcde +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafv2_ip_set.html.markdown b/website/docs/cdktf/python/r/wafv2_ip_set.html.markdown new file mode 100644 index 00000000000..cea892b2d38 --- /dev/null +++ b/website/docs/cdktf/python/r/wafv2_ip_set.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_ip_set" +description: |- + Provides an AWS WAFv2 IP Set resource. +--- + + + +# Resource: aws_wafv2_ip_set + +Provides a WAFv2 IP Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafv2_ip_set import Wafv2IpSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Wafv2IpSet(self, "example", + addresses=["1.2.3.4/32", "5.6.7.8/32"], + description="Example IP set", + ip_address_version="IPV4", + name="example", + scope="REGIONAL", + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A friendly name of the IP set. +* `description` - (Optional) A friendly description of the IP set. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `CLOUDFRONT` or `REGIONAL`. To work with CloudFront, you must also specify the Region US East (N. Virginia). +* `ip_address_version` - (Required) Specify IPV4 or IPV6. Valid values are `IPV4` or `IPV6`. +* `addresses` - (Required) Contains an array of strings that specifies zero or more IP addresses or blocks of IP addresses. All addresses must be specified using Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for `/0`. +* `tags` - (Optional) An array of key:value pairs to associate with the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A unique identifier for the IP set. +* `arn` - The Amazon Resource Name (ARN) of the IP set. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 IP Sets using `ID/name/scope`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAFv2 IP Sets using `ID/name/scope`. For example: + +```console +% terraform import aws_wafv2_ip_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc/example/REGIONAL +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafv2_regex_pattern_set.html.markdown b/website/docs/cdktf/python/r/wafv2_regex_pattern_set.html.markdown new file mode 100644 index 00000000000..7ee74104601 --- /dev/null +++ b/website/docs/cdktf/python/r/wafv2_regex_pattern_set.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_regex_pattern_set" +description: |- + Provides an AWS WAFv2 Regex Pattern Set resource. +--- + + + +# Resource: aws_wafv2_regex_pattern_set + +Provides an AWS WAFv2 Regex Pattern Set Resource + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafv2_regex_pattern_set import Wafv2RegexPatternSet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Wafv2RegexPatternSet(self, "example", + description="Example regex pattern set", + name="example", + regular_expression=[Wafv2RegexPatternSetRegularExpression( + regex_string="one" + ), Wafv2RegexPatternSetRegularExpression( + regex_string="two" + ) + ], + scope="REGIONAL", + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + } + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A friendly name of the regular expression pattern set. +* `description` - (Optional) A friendly description of the regular expression pattern set. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `CLOUDFRONT` or `REGIONAL`. To work with CloudFront, you must also specify the region `us-east-1` (N. Virginia) on the AWS provider. +* `regular_expression` - (Optional) One or more blocks of regular expression patterns that you want AWS WAF to search for, such as `B[a@]dB[o0]t`. See [Regular Expression](#regular-expression) below for details. A maximum of 10 `regular_expression` blocks may be specified. +* `tags` - (Optional) An array of key:value pairs to associate with the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Regular Expression + +* `regex_string` - (Required) The string representing the regular expression, see the AWS WAF [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-regex-pattern-set-creating.html) for more information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A unique identifier for the set. +* `arn` - The Amazon Resource Name (ARN) that identifies the cluster. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 Regex Pattern Sets using `ID/name/scope`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAFv2 Regex Pattern Sets using `ID/name/scope`. For example: + +```console +% terraform import aws_wafv2_regex_pattern_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc/example/REGIONAL +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafv2_rule_group.html.markdown b/website/docs/cdktf/python/r/wafv2_rule_group.html.markdown new file mode 100644 index 00000000000..46c8aea9449 --- /dev/null +++ b/website/docs/cdktf/python/r/wafv2_rule_group.html.markdown @@ -0,0 +1,688 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_rule_group" +description: |- + Creates a WAFv2 rule group resource. +--- + + + +# Resource: aws_wafv2_rule_group + +Creates a WAFv2 Rule Group resource. + +## Example Usage + +### Simple + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafv2_rule_group import Wafv2RuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Wafv2RuleGroup(self, "example", + capacity=2, + name="example-rule", + rule=[Wafv2RuleGroupRule( + action=Wafv2RuleGroupRuleAction( + allow=Wafv2RuleGroupRuleActionAllow() + ), + name="rule-1", + priority=1, + statement=Wafv2RuleGroupRuleStatement( + geo_match_statement=Wafv2RuleGroupRuleStatementGeoMatchStatement( + country_codes=["US", "NL"] + ) + ), + visibility_config=Wafv2RuleGroupRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-rule-metric-name", + sampled_requests_enabled=False + ) + ) + ], + scope="REGIONAL", + visibility_config=Wafv2RuleGroupVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-metric-name", + sampled_requests_enabled=False + ) + ) +``` + +### Complex + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafv2_ip_set import Wafv2IpSet +from imports.aws.wafv2_regex_pattern_set import Wafv2RegexPatternSet +from imports.aws.wafv2_rule_group import Wafv2RuleGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + test = Wafv2IpSet(self, "test", + addresses=["1.1.1.1/32", "2.2.2.2/32"], + ip_address_version="IPV4", + name="test", + scope="REGIONAL" + ) + aws_wafv2_regex_pattern_set_test = Wafv2RegexPatternSet(self, "test_1", + name="test", + regular_expression=[Wafv2RegexPatternSetRegularExpression( + regex_string="one" + ) + ], + scope="REGIONAL" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_wafv2_regex_pattern_set_test.override_logical_id("test") + Wafv2RuleGroup(self, "example", + capacity=500, + captcha_config=[{ + "immunity_time_property": [{ + "immunity_time": 120 + } + ] + } + ], + description="An rule group containing all statements", + name="complex-example", + rule=[Wafv2RuleGroupRule( + action=Wafv2RuleGroupRuleAction( + block=Wafv2RuleGroupRuleActionBlock() + ), + name="rule-1", + priority=1, + statement=Wafv2RuleGroupRuleStatement( + not_statement=Wafv2RuleGroupRuleStatementNotStatement( + statement=[Wafv2RuleGroupRuleStatementNotStatementStatement( + and_statement=Wafv2RuleGroupRuleStatementNotStatementStatementAndStatement( + statement=[Wafv2RuleGroupRuleStatementNotStatementStatement( + geo_match_statement=Wafv2RuleGroupRuleStatementNotStatementStatementGeoMatchStatement( + country_codes=["US"] + ) + ), Wafv2RuleGroupRuleStatementNotStatementStatement( + byte_match_statement=Wafv2RuleGroupRuleStatementNotStatementStatementByteMatchStatement( + field_to_match=Wafv2RuleGroupRuleStatementNotStatementStatementByteMatchStatementFieldToMatch( + all_query_arguments=Wafv2RuleGroupRuleStatementNotStatementStatementByteMatchStatementFieldToMatchAllQueryArguments() + ), + positional_constraint="CONTAINS", + search_string="word", + text_transformation=[Wafv2RuleGroupRuleStatementNotStatementStatementByteMatchStatementTextTransformation( + priority=5, + type="CMD_LINE" + ), Wafv2RuleGroupRuleStatementNotStatementStatementByteMatchStatementTextTransformation( + priority=2, + type="LOWERCASE" + ) + ] + ) + ) + ] + ) + ) + ] + ) + ), + visibility_config=Wafv2RuleGroupRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="rule-1", + sampled_requests_enabled=False + ) + ), Wafv2RuleGroupRule( + action=Wafv2RuleGroupRuleAction( + count=Wafv2RuleGroupRuleActionCount() + ), + captcha_config=Wafv2RuleGroupRuleCaptchaConfig( + immunity_time_property=Wafv2RuleGroupRuleCaptchaConfigImmunityTimeProperty( + immunity_time=240 + ) + ), + name="rule-2", + priority=2, + statement=Wafv2RuleGroupRuleStatement( + or_statement=Wafv2RuleGroupRuleStatementOrStatement( + statement=[Wafv2RuleGroupRuleStatementOrStatementStatement( + regex_match_statement=Wafv2RuleGroupRuleStatementOrStatementStatementRegexMatchStatement( + field_to_match=Wafv2RuleGroupRuleStatementOrStatementStatementRegexMatchStatementFieldToMatch( + single_header=Wafv2RuleGroupRuleStatementOrStatementStatementRegexMatchStatementFieldToMatchSingleHeader( + name="user-agent" + ) + ), + regex_string="[a-z]([a-z0-9_-]*[a-z0-9])?", + text_transformation=[Wafv2RuleGroupRuleStatementOrStatementStatementRegexMatchStatementTextTransformation( + priority=6, + type="NONE" + ) + ] + ) + ), Wafv2RuleGroupRuleStatementOrStatementStatement( + sqli_match_statement=Wafv2RuleGroupRuleStatementOrStatementStatementSqliMatchStatement( + field_to_match=Wafv2RuleGroupRuleStatementOrStatementStatementSqliMatchStatementFieldToMatch( + body=Wafv2RuleGroupRuleStatementOrStatementStatementSqliMatchStatementFieldToMatchBody() + ), + text_transformation=[Wafv2RuleGroupRuleStatementOrStatementStatementSqliMatchStatementTextTransformation( + priority=5, + type="URL_DECODE" + ), Wafv2RuleGroupRuleStatementOrStatementStatementSqliMatchStatementTextTransformation( + priority=4, + type="HTML_ENTITY_DECODE" + ), Wafv2RuleGroupRuleStatementOrStatementStatementSqliMatchStatementTextTransformation( + priority=3, + type="COMPRESS_WHITE_SPACE" + ) + ] + ) + ), Wafv2RuleGroupRuleStatementOrStatementStatement( + xss_match_statement=Wafv2RuleGroupRuleStatementOrStatementStatementXssMatchStatement( + field_to_match=Wafv2RuleGroupRuleStatementOrStatementStatementXssMatchStatementFieldToMatch( + method=Wafv2RuleGroupRuleStatementOrStatementStatementXssMatchStatementFieldToMatchMethod() + ), + text_transformation=[Wafv2RuleGroupRuleStatementOrStatementStatementXssMatchStatementTextTransformation( + priority=2, + type="NONE" + ) + ] + ) + ) + ] + ) + ), + visibility_config=Wafv2RuleGroupRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="rule-2", + sampled_requests_enabled=False + ) + ), Wafv2RuleGroupRule( + action=Wafv2RuleGroupRuleAction( + block=Wafv2RuleGroupRuleActionBlock() + ), + name="rule-3", + priority=3, + statement=Wafv2RuleGroupRuleStatement( + size_constraint_statement=Wafv2RuleGroupRuleStatementSizeConstraintStatement( + comparison_operator="GT", + field_to_match=Wafv2RuleGroupRuleStatementSizeConstraintStatementFieldToMatch( + single_query_argument=Wafv2RuleGroupRuleStatementSizeConstraintStatementFieldToMatchSingleQueryArgument( + name="username" + ) + ), + size=100, + text_transformation=[Wafv2RuleGroupRuleStatementSizeConstraintStatementTextTransformation( + priority=5, + type="NONE" + ) + ] + ) + ), + visibility_config=Wafv2RuleGroupRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="rule-3", + sampled_requests_enabled=False + ) + ), Wafv2RuleGroupRule( + action=Wafv2RuleGroupRuleAction( + block=Wafv2RuleGroupRuleActionBlock() + ), + name="rule-4", + priority=4, + statement=Wafv2RuleGroupRuleStatement( + or_statement=Wafv2RuleGroupRuleStatementOrStatement( + statement=[Wafv2RuleGroupRuleStatementOrStatementStatement( + ip_set_reference_statement=Wafv2RuleGroupRuleStatementOrStatementStatementIpSetReferenceStatement( + arn=test.arn + ) + ), Wafv2RuleGroupRuleStatementOrStatementStatement( + regex_pattern_set_reference_statement=Wafv2RuleGroupRuleStatementOrStatementStatementRegexPatternSetReferenceStatement( + arn=Token.as_string(aws_wafv2_regex_pattern_set_test.arn), + field_to_match=Wafv2RuleGroupRuleStatementOrStatementStatementRegexPatternSetReferenceStatementFieldToMatch( + single_header=Wafv2RuleGroupRuleStatementOrStatementStatementRegexPatternSetReferenceStatementFieldToMatchSingleHeader( + name="referer" + ) + ), + text_transformation=[Wafv2RuleGroupRuleStatementOrStatementStatementRegexPatternSetReferenceStatementTextTransformation( + priority=2, + type="NONE" + ) + ] + ) + ) + ] + ) + ), + visibility_config=Wafv2RuleGroupRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="rule-4", + sampled_requests_enabled=False + ) + ) + ], + scope="REGIONAL", + tags={ + "Code": "123456", + "Name": "example-and-statement" + }, + visibility_config=Wafv2RuleGroupVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-metric-name", + sampled_requests_enabled=False + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `capacity` - (Required, Forces new resource) The web ACL capacity units (WCUs) required for this rule group. See [here](https://docs.aws.amazon.com/waf/latest/APIReference/API_CreateRuleGroup.html#API_CreateRuleGroup_RequestSyntax) for general information and [here](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statements-list.html) for capacity specific information. +* `custom_response_body` - (Optional) Defines custom response bodies that can be referenced by `custom_response` actions. See [Custom Response Body](#custom-response-body) below for details. +* `description` - (Optional) A friendly description of the rule group. +* `name` - (Required, Forces new resource) A friendly name of the rule group. +* `rule` - (Optional) The rule blocks used to identify the web requests that you want to `allow`, `block`, or `count`. See [Rules](#rules) below for details. +* `scope` - (Required, Forces new resource) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `CLOUDFRONT` or `REGIONAL`. To work with CloudFront, you must also specify the region `us-east-1` (N. Virginia) on the AWS provider. +* `tags` - (Optional) An array of key:value pairs to associate with the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `visibility_config` - (Required) Defines and enables Amazon CloudWatch metrics and web request sample collection. See [Visibility Configuration](#visibility-configuration) below for details. + +### Custom Response Body + +Each `custom_response_body` block supports the following arguments: + +* `key` - (Required) A unique key identifying the custom response body. This is referenced by the `custom_response_body_key` argument in the [Custom Response](#custom-response) block. +* `content` - (Required) The payload of the custom response. +* `content_type` - (Required) The type of content in the payload that you are defining in the `content` argument. Valid values are `TEXT_PLAIN`, `TEXT_HTML`, or `APPLICATION_JSON`. + +### Rules + +Each `rule` supports the following arguments: + +* `action` - (Required) The action that AWS WAF should take on a web request when it matches the rule's statement. Settings at the `aws_wafv2_web_acl` level can override the rule action setting. See [Action](#action) below for details. +* `captcha_config` - (Optional) Specifies how AWS WAF should handle CAPTCHA evaluations. See [Captcha Configuration](#captcha-configuration) below for details. +* `name` - (Required, Forces new resource) A friendly name of the rule. +* `priority` - (Required) If you define more than one Rule in a WebACL, AWS WAF evaluates each request against the `rules` in order based on the value of `priority`. AWS WAF processes rules with lower priority first. +* `rule_label` - (Optional) Labels to apply to web requests that match the rule match statement. See [Rule Label](#rule-label) below for details. +* `statement` - (Required) The AWS WAF processing statement for the rule, for example `byte_match_statement` or `geo_match_statement`. See [Statement](#statement) below for details. +* `visibility_config` - (Required) Defines and enables Amazon CloudWatch metrics and web request sample collection. See [Visibility Configuration](#visibility-configuration) below for details. + +### Action + +The `action` block supports the following arguments: + +~> **NOTE:** One of `allow`, `block`, or `count`, is required when specifying an `action`. + +* `allow` - (Optional) Instructs AWS WAF to allow the web request. See [Allow](#action) below for details. +* `block` - (Optional) Instructs AWS WAF to block the web request. See [Block](#block) below for details. +* `captcha` - (Optional) Instructs AWS WAF to run a `CAPTCHA` check against the web request. See [Captcha](#captcha) below for details. +* `challenge` - (Optional) Instructs AWS WAF to run a check against the request to verify that the request is coming from a legitimate client session. See [Challenge](#challenge) below for details. +* `count` - (Optional) Instructs AWS WAF to count the web request and allow it. See [Count](#count) below for details. + +### Allow + +The `allow` block supports the following arguments: + +* `custom_request_handling` - (Optional) Defines custom handling for the web request. See [Custom Request Handling](#custom-request-handling) below for details. + +### Block + +The `block` block supports the following arguments: + +* `custom_response` - (Optional) Defines a custom response for the web request. See [Custom Response](#custom-response) below for details. + +### Captcha + +The `captcha` block supports the following arguments: + +* `custom_request_handling` - (Optional) Defines custom handling for the web request. See [Custom Request Handling](#custom-request-handling) below for details. + +#### Challenge + +The `challenge` block supports the following arguments: + +* `custom_request_handling` - (Optional) Defines custom handling for the web request. See [Custom Request Handling](#custom-request-handling) below for details. + +### Count + +The `count` block supports the following arguments: + +* `custom_request_handling` - (Optional) Defines custom handling for the web request. See [Custom Request Handling](#custom-request-handling) below for details. + +### Custom Request Handling + +The `custom_request_handling` block supports the following arguments: + +* `insert_header` - (Required) The `insert_header` blocks used to define HTTP headers added to the request. See [Custom HTTP Header](#custom-http-header) below for details. + +### Custom Response + +The `custom_response` block supports the following arguments: + +* `custom_response_body_key` - (Optional) References the response body that you want AWS WAF to return to the web request client. This must reference a `key` defined in a `custom_response_body` block of this resource. +* `response_code` - (Required) The HTTP status code to return to the client. +* `response_header` - (Optional) The `response_header` blocks used to define the HTTP response headers added to the response. See [Custom HTTP Header](#custom-http-header) below for details. + +### Custom HTTP Header + +Each block supports the following arguments. Duplicate header names are not allowed: + +* `name` - The name of the custom header. For custom request header insertion, when AWS WAF inserts the header into the request, it prefixes this name `x-amzn-waf-`, to avoid confusion with the headers that are already in the request. For example, for the header name `sample`, AWS WAF inserts the header `x-amzn-waf-sample`. +* `value` - The value of the custom header. + +### Rule Label + +Each block supports the following arguments: + +* `name` - The label string. + +### Statement + +The processing guidance for a Rule, used by AWS WAF to determine whether a web request matches the rule. See the [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statements-list.html) for more information. + +-> **NOTE:** Although the `statement` block is recursive, currently only 3 levels are supported. + +The `statement` block supports the following arguments: + +* `and_statement` - (Optional) A logical rule statement used to combine other rule statements with AND logic. See [AND Statement](#and-statement) below for details. +* `byte_match_statement` - (Optional) A rule statement that defines a string match search for AWS WAF to apply to web requests. See [Byte Match Statement](#byte-match-statement) below for details. +* `geo_match_statement` - (Optional) A rule statement used to identify web requests based on country of origin. See [GEO Match Statement](#geo-match-statement) below for details. +* `label_match_statement` - (Optional) A rule statement that defines a string match search against labels that have been added to the web request by rules that have already run in the web ACL. See [Label Match Statement](#label-match-statement) below for details. +* `ip_set_reference_statement` - (Optional) A rule statement used to detect web requests coming from particular IP addresses or address ranges. See [IP Set Reference Statement](#ip-set-reference-statement) below for details. +* `not_statement` - (Optional) A logical rule statement used to negate the results of another rule statement. See [NOT Statement](#not-statement) below for details. +* `or_statement` - (Optional) A logical rule statement used to combine other rule statements with OR logic. See [OR Statement](#or-statement) below for details. +* `rate_based_statement` - (Optional) A rate-based rule tracks the rate of requests for each originating `IP address`, and triggers the rule action when the rate exceeds a limit that you specify on the number of requests in any `5-minute` time span. This statement can not be nested. See [Rate Based Statement](#rate-based-statement) below for details. +* `regex_match_statement` - (Optional) A rule statement used to search web request components for a match against a single regular expression. See [Regex Match Statement](#regex-match-statement) below for details. +* `regex_pattern_set_reference_statement` - (Optional) A rule statement used to search web request components for matches with regular expressions. See [Regex Pattern Set Reference Statement](#regex-pattern-set-reference-statement) below for details. +* `size_constraint_statement` - (Optional) A rule statement that compares a number of bytes against the size of a request component, using a comparison operator, such as greater than (>) or less than (<). See [Size Constraint Statement](#size-constraint-statement) below for more details. +* `sqli_match_statement` - (Optional) An SQL injection match condition identifies the part of web requests, such as the URI or the query string, that you want AWS WAF to inspect. See [SQL Injection Match Statement](#sql-injection-match-statement) below for details. +* `xss_match_statement` - (Optional) A rule statement that defines a cross-site scripting (XSS) match search for AWS WAF to apply to web requests. See [XSS Match Statement](#xss-match-statement) below for details. + +### AND Statement + +A logical rule statement used to combine other rule statements with `AND` logic. You provide more than one `statement` within the `and_statement`. + +The `and_statement` block supports the following arguments: + +* `statement` - (Required) The statements to combine with `AND` logic. You can use any statements that can be nested. See [Statement](#statement) above for details. + +### Byte Match Statement + +The byte match statement provides the bytes to search for, the location in requests that you want AWS WAF to search, and other settings. The bytes to search for are typically a string that corresponds with ASCII characters. + +The `byte_match_statement` block supports the following arguments: + +* `field_to_match` - (Required) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `positional_constraint` - (Required) The area within the portion of a web request that you want AWS WAF to search for `search_string`. Valid values include the following: `EXACTLY`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CONTAINS_WORD`. See the AWS [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchStatement.html) for more information. +* `search_string` - (Required) A string value that you want AWS WAF to search for. AWS WAF searches only in the part of web requests that you designate for inspection in `field_to_match`. The maximum length of the value is 50 bytes. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### GEO Match Statement + +The `geo_match_statement` block supports the following arguments: + +* `country_codes` - (Required) An array of two-character country codes, for example, [ "US", "CN" ], from the alpha-2 country ISO codes of the `ISO 3166` international standard. See the [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_GeoMatchStatement.html) for valid values. +* `forwarded_ip_config` - (Optional) The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. See [Forwarded IP Config](#forwarded-ip-config) below for details. + +### Label Match Statement + +The `label_match_statement` block supports the following arguments: + +* `scope` - (Required) Specify whether you want to match using the label name or just the namespace. Valid values are `LABEL` or `NAMESPACE`. +* `key` - (Required) The string to match against. + +### IP Set Reference Statement + +A rule statement used to detect web requests coming from particular IP addresses or address ranges. To use this, create an `aws_wafv2_ip_set` that specifies the addresses you want to detect, then use the `ARN` of that set in this statement. + +The `ip_set_reference_statement` block supports the following arguments: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the IP Set that this statement references. +* `ip_set_forwarded_ip_config` - (Optional) The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. See [IPSet Forwarded IP Config](#ipset-forwarded-ip-config) below for more details. + +### NOT Statement + +A logical rule statement used to negate the results of another rule statement. You provide one `statement` within the `not_statement`. + +The `not_statement` block supports the following arguments: + +* `statement` - (Required) The statement to negate. You can use any statement that can be nested. See [Statement](#statement) above for details. + +### OR Statement + +A logical rule statement used to combine other rule statements with `OR` logic. You provide more than one `statement` within the `or_statement`. + +The `or_statement` block supports the following arguments: + +* `statement` - (Required) The statements to combine with `OR` logic. You can use any statements that can be nested. See [Statement](#statement) above for details. + +### Rate Based Statement + +A rate-based rule tracks the rate of requests for each originating IP address, and triggers the rule action when the rate exceeds a limit that you specify on the number of requests in any 5-minute time span. You can use this to put a temporary block on requests from an IP address that is sending excessive requests. See the [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_RateBasedStatement.html) for more information. + +You can't nest a `rate_based_statement`, for example for use inside a `not_statement` or `or_statement`. It can only be referenced as a `top-level` statement within a `rule`. + +The `rate_based_statement` block supports the following arguments: + +* `aggregate_key_type` - (Optional) Setting that indicates how to aggregate the request counts. Valid values include: `CONSTANT`, `FORWARDED_IP` or `IP`. Default: `IP`. +* `forwarded_ip_config` - (Optional) The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. If `aggregate_key_type` is set to `FORWARDED_IP`, this block is required. See [Forwarded IP Config](#forwarded-ip-config) below for details. +* `limit` - (Required) The limit on requests per 5-minute period for a single originating IP address. +* `scope_down_statement` - (Optional) An optional nested statement that narrows the scope of the rate-based statement to matching web requests. This can be any nestable statement, and you can nest statements at any level below this scope-down statement. See [Statement](#statement) above for details. If `aggregate_key_type` is set to `CONSTANT`, this block is required. + +### Regex Match Statement + +A rule statement used to search web request components for a match against a single regular expression. + +The `regex_match_statement` block supports the following arguments: + +* `regex_string` - (Required) The string representing the regular expression. Minimum of `1` and maximum of `512` characters. +* `field_to_match` - (Required) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### Regex Pattern Set Reference Statement + +A rule statement used to search web request components for matches with regular expressions. To use this, create a `aws_wafv2_regex_pattern_set` that specifies the expressions that you want to detect, then use the `ARN` of that set in this statement. A web request matches the pattern set rule statement if the request component matches any of the patterns in the set. + +The `regex_pattern_set_reference_statement` block supports the following arguments: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the Regex Pattern Set that this statement references. +* `field_to_match` - (Required) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### Size Constraint Statement + +A rule statement that uses a comparison operator to compare a number of bytes against the size of a request component. AWS WAFv2 inspects up to the first 8192 bytes (8 KB) of a request body, and when inspecting the request URI Path, the slash `/` in +the URI counts as one character. + +The `size_constraint_statement` block supports the following arguments: + +* `comparison_operator` - (Required) The operator to use to compare the request part to the size setting. Valid values include: `EQ`, `NE`, `LE`, `LT`, `GE`, or `GT`. +* `field_to_match` - (Optional) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `size` - (Required) The size, in bytes, to compare to the request part, after any transformations. Valid values are integers between 0 and 21474836480, inclusive. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### SQL Injection Match Statement + +An SQL injection match condition identifies the part of web requests, such as the URI or the query string, that you want AWS WAF to inspect. Later in the process, when you create a web ACL, you specify whether to allow or block requests that appear to contain malicious SQL code. + +The `sqli_match_statement` block supports the following arguments: + +* `field_to_match` - (Required) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### XSS Match Statement + +The XSS match statement provides the location in requests that you want AWS WAF to search and text transformations to use on the search area before AWS WAF searches for character sequences that are likely to be malicious strings. + +The `xss_match_statement` block supports the following arguments: + +* `field_to_match` - (Required) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### Field to Match + +The part of a web request that you want AWS WAF to inspect. Include the single `field_to_match` type that you want to inspect, with additional specifications as needed, according to the type. You specify a single request component in `field_to_match` for each rule statement that requires it. To inspect more than one component of a web request, create a separate rule statement for each component. See the [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statement-fields.html#waf-rule-statement-request-component) for more details. + +The `field_to_match` block supports the following arguments: + +~> **NOTE:** Only one of `all_query_arguments`, `body`, `cookies`, `headers`, `json_body`, `method`, `query_string`, `single_header`, `single_query_argument`, or `uri_path` can be specified. +An empty configuration block `{}` should be used when specifying `all_query_arguments`, `body`, `method`, or `query_string` attributes. + +* `all_query_arguments` - (Optional) Inspect all query arguments. +* `body` - (Optional) Inspect the request body, which immediately follows the request headers. +* `cookies` - (Optional) Inspect the cookies in the web request. See [Cookies](#cookies) below for details. +* `headers` - (Optional) Inspect the request headers. See [Headers](#headers) below for details. +* `json_body` - (Optional) Inspect the request body as JSON. See [JSON Body](#json-body) for details. +* `method` - (Optional) Inspect the HTTP method. The method indicates the type of operation that the request is asking the origin to perform. +* `query_string` - (Optional) Inspect the query string. This is the part of a URL that appears after a `?` character, if any. +* `single_header` - (Optional) Inspect a single header. See [Single Header](#single-header) below for details. +* `single_query_argument` - (Optional) Inspect a single query argument. See [Single Query Argument](#single-query-argument) below for details. +* `uri_path` - (Optional) Inspect the request URI path. This is the part of a web request that identifies a resource, for example, `/images/daily-ad.jpg`. + +### Forwarded IP Config + +The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. Commonly, this is the X-Forwarded-For (XFF) header, but you can specify +any header name. If the specified header isn't present in the request, AWS WAFv2 doesn't apply the rule to the web request at all. +AWS WAFv2 only evaluates the first IP address found in the specified HTTP header. + +The `forwarded_ip_config` block supports the following arguments: + +* `fallback_behavior` - (Required) - The match status to assign to the web request if the request doesn't have a valid IP address in the specified position. Valid values include: `MATCH` or `NO_MATCH`. +* `header_name` - (Required) - The name of the HTTP header to use for the IP address. + +### IPSet Forwarded IP Config + +The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. Commonly, this is the X-Forwarded-For (XFF) header, but you can specify any header name. + +The `ip_set_forwarded_ip_config` block supports the following arguments: + +* `fallback_behavior` - (Required) - The match status to assign to the web request if the request doesn't have a valid IP address in the specified position. Valid values include: `MATCH` or `NO_MATCH`. +* `header_name` - (Required) - The name of the HTTP header to use for the IP address. +* `position` - (Required) - The position in the header to search for the IP address. Valid values include: `FIRST`, `LAST`, or `ANY`. If `ANY` is specified and the header contains more than 10 IP addresses, AWS WAFv2 inspects the last 10. + +### Headers + +Inspect the request headers. + +The `headers` block supports the following arguments: + +* `match_pattern` - (Required) The filter to use to identify the subset of headers to inspect in a web request. The `match_pattern` block supports only one of the following arguments: + * `all` - An empty configuration block that is used for inspecting all headers. + * `included_headers` - An array of strings that will be used for inspecting headers that have a key that matches one of the provided values. + * `excluded_headers` - An array of strings that will be used for inspecting headers that do not have a key that matches one of the provided values. +* `match_scope` - (Required) The parts of the headers to inspect with the rule inspection criteria. If you specify `All`, AWS WAF inspects both keys and values. Valid values include the following: `ALL`, `Key`, `Value`. +* `oversize_handling` - (Required) Oversize handling tells AWS WAF what to do with a web request when the request component that the rule inspects is over the limits. Valid values include the following: `CONTINUE`, `MATCH`, `NO_MATCH`. See the AWS [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statement-oversize-handling.html) for more information. + +### JSON Body + +The `json_body` block supports the following arguments: + +* `invalid_fallback_behavior` - (Optional) What to do when JSON parsing fails. Defaults to evaluating up to the first parsing failure. Valid values are `EVALUATE_AS_STRING`, `MATCH` and `NO_MATCH`. +* `match_pattern` - (Required) The patterns to look for in the JSON body. You must specify exactly one setting: either `all` or `included_paths`. See [JsonMatchPattern](https://docs.aws.amazon.com/waf/latest/APIReference/API_JsonMatchPattern.html) for details. +* `match_scope` - (Required) The parts of the JSON to match against using the `match_pattern`. Valid values are `ALL`, `KEY` and `VALUE`. +* `oversize_handling` - (Optional) What to do if the body is larger than can be inspected. Valid values are `CONTINUE` (default), `MATCH` and `NO_MATCH`. + +### Single Header + +Inspect a single header. Provide the name of the header to inspect, for example, `User-Agent` or `Referer` (provided as lowercase strings). + +The `single_header` block supports the following arguments: + +* `name` - (Optional) The name of the query header to inspect. This setting must be provided as lower case characters. + +### Single Query Argument + +Inspect a single query argument. Provide the name of the query argument to inspect, such as `UserName` or `SalesRegion` (provided as lowercase strings). + +The `single_query_argument` block supports the following arguments: + +* `name` - (Optional) The name of the query header to inspect. This setting must be provided as lower case characters. + +### Cookies + +Inspect the cookies in the web request. You can specify the parts of the cookies to inspect and you can narrow the set of cookies to inspect by including or excluding specific keys. +This is used to indicate the web request component to inspect, in the [FieldToMatch](https://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) specification. + +The `cookies` block supports the following arguments: + +* `match_pattern` - (Required) The filter to use to identify the subset of cookies to inspect in a web request. You must specify exactly one setting: either `all`, `included_cookies` or `excluded_cookies`. More details: [CookieMatchPattern](https://docs.aws.amazon.com/waf/latest/APIReference/API_CookieMatchPattern.html) +* `match_scope` - (Required) The parts of the cookies to inspect with the rule inspection criteria. If you specify All, AWS WAF inspects both keys and values. Valid values: `ALL`, `KEY`, `VALUE` +* `oversize_handling` - (Required) What AWS WAF should do if the cookies of the request are larger than AWS WAF can inspect. AWS WAF does not support inspecting the entire contents of request cookies when they exceed 8 KB (8192 bytes) or 200 total cookies. The underlying host service forwards a maximum of 200 cookies and at most 8 KB of cookie contents to AWS WAF. Valid values: `CONTINUE`, `MATCH`, `NO_MATCH` + +### Text Transformation + +The `text_transformation` block supports the following arguments: + +* `priority` - (Required) The relative processing order for multiple transformations that are defined for a rule statement. AWS WAF processes all transformations, from lowest priority to highest, before inspecting the transformed content. +* `type` - (Required) The transformation to apply, please refer to the Text Transformation [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_TextTransformation.html) for more details. + +### Visibility Configuration + +The `visibility_config` block supports the following arguments: + +* `cloudwatch_metrics_enabled` - (Required) A boolean indicating whether the associated resource sends metrics to CloudWatch. For the list of available metrics, see [AWS WAF Metrics](https://docs.aws.amazon.com/waf/latest/developerguide/monitoring-cloudwatch.html#waf-metrics). +* `metric_name` - (Required, Forces new resource) A friendly name of the CloudWatch metric. The name can contain only alphanumeric characters (A-Z, a-z, 0-9) hyphen(-) and underscore (_), with length from one to 128 characters. It can't contain whitespace or metric names reserved for AWS WAF, for example `All` and `Default_Action`. +* `sampled_requests_enabled` - (Required) A boolean indicating whether AWS WAF should store a sampling of the web requests that match the rules. You can view the sampled requests through the AWS WAF console. + +### Captcha Configuration + +The `captcha_config` block supports the following arguments: + +* `immunity_time_property` - (Optional) Defines custom immunity time. See [Immunity Time Property](#immunity-time-property) below for details. + +### Immunity Time Property + +The `immunity_time_property` block supports the following arguments: + +* `immunity_time` - (Optional) The amount of time, in seconds, that a CAPTCHA or challenge timestamp is considered valid by AWS WAF. The default setting is 300. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF rule group. +* `arn` - The ARN of the WAF rule group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 Rule Group using `ID/name/scope`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAFv2 Rule Group using `ID/name/scope`. For example: + +```console +% terraform import aws_wafv2_rule_group.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc/example/REGIONAL +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafv2_web_acl.html.markdown b/website/docs/cdktf/python/r/wafv2_web_acl.html.markdown new file mode 100644 index 00000000000..cdbc1706906 --- /dev/null +++ b/website/docs/cdktf/python/r/wafv2_web_acl.html.markdown @@ -0,0 +1,881 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_web_acl" +description: |- + Creates a WAFv2 Web ACL resource. +--- + + + +# Resource: aws_wafv2_web_acl + +Creates a WAFv2 Web ACL resource. + +~> **Note** In `field_to_match` blocks, _e.g._, in `byte_match_statement`, the `body` block includes an optional argument `oversize_handling`. AWS indicates this argument will be required starting February 2023. To avoid configurations breaking when that change happens, treat the `oversize_handling` argument as **required** as soon as possible. + +## Example Usage + +This resource is based on `aws_wafv2_rule_group`, check the documentation of the `aws_wafv2_rule_group` resource to see examples of the various available statements. + +### Managed Rule + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafv2_web_acl import Wafv2WebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Wafv2WebAcl(self, "example", + default_action=Wafv2WebAclDefaultAction( + allow=Wafv2WebAclDefaultActionAllow() + ), + description="Example of a managed rule.", + name="managed-rule-example", + rule=[Wafv2WebAclRule( + name="rule-1", + override_action=Wafv2WebAclRuleOverrideAction( + count=Wafv2WebAclRuleOverrideActionCount() + ), + priority=1, + statement=Wafv2WebAclRuleStatement( + managed_rule_group_statement=Wafv2WebAclRuleStatementManagedRuleGroupStatement( + name="AWSManagedRulesCommonRuleSet", + rule_action_override=[Wafv2WebAclRuleStatementManagedRuleGroupStatementRuleActionOverride( + action_to_use=Wafv2WebAclRuleStatementManagedRuleGroupStatementRuleActionOverrideActionToUse( + count=Wafv2WebAclRuleStatementManagedRuleGroupStatementRuleActionOverrideActionToUseCount() + ), + name="SizeRestrictions_QUERYSTRING" + ), Wafv2WebAclRuleStatementManagedRuleGroupStatementRuleActionOverride( + action_to_use=Wafv2WebAclRuleStatementManagedRuleGroupStatementRuleActionOverrideActionToUse( + count=Wafv2WebAclRuleStatementManagedRuleGroupStatementRuleActionOverrideActionToUseCount() + ), + name="NoUserAgent_HEADER" + ) + ], + scope_down_statement=Wafv2WebAclRuleStatementManagedRuleGroupStatementScopeDownStatement( + geo_match_statement=Wafv2WebAclRuleStatementManagedRuleGroupStatementScopeDownStatementGeoMatchStatement( + country_codes=["US", "NL"] + ) + ), + vendor_name="AWS" + ) + ), + token_domains=["mywebsite.com", "myotherwebsite.com"], + visibility_config=Wafv2WebAclRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-rule-metric-name", + sampled_requests_enabled=False + ) + ) + ], + scope="REGIONAL", + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + }, + visibility_config=Wafv2WebAclVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-metric-name", + sampled_requests_enabled=False + ) + ) +``` + +### Account Takeover Protection + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafv2_web_acl import Wafv2WebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Wafv2WebAcl(self, "atp-example", + default_action=Wafv2WebAclDefaultAction( + allow=Wafv2WebAclDefaultActionAllow() + ), + description="Example of a managed ATP rule.", + name="managed-atp-example", + rule=[Wafv2WebAclRule( + name="atp-rule-1", + override_action=Wafv2WebAclRuleOverrideAction( + count=Wafv2WebAclRuleOverrideActionCount() + ), + priority=1, + statement=Wafv2WebAclRuleStatement( + managed_rule_group_statement=Wafv2WebAclRuleStatementManagedRuleGroupStatement( + managed_rule_group_configs=[Wafv2WebAclRuleStatementManagedRuleGroupStatementManagedRuleGroupConfigs( + aws_managed_rules_atp_rule_set=Wafv2WebAclRuleStatementManagedRuleGroupStatementManagedRuleGroupConfigsAwsManagedRulesAtpRuleSet( + login_path="/api/1/signin", + request_inspection=Wafv2WebAclRuleStatementManagedRuleGroupStatementManagedRuleGroupConfigsAwsManagedRulesAtpRuleSetRequestInspection( + password_field=Wafv2WebAclRuleStatementManagedRuleGroupStatementManagedRuleGroupConfigsAwsManagedRulesAtpRuleSetRequestInspectionPasswordField( + identifier="/password" + ), + payload_type="JSON", + username_field=Wafv2WebAclRuleStatementManagedRuleGroupStatementManagedRuleGroupConfigsAwsManagedRulesAtpRuleSetRequestInspectionUsernameField( + identifier="/email" + ) + ), + response_inspection=Wafv2WebAclRuleStatementManagedRuleGroupStatementManagedRuleGroupConfigsAwsManagedRulesAtpRuleSetResponseInspection( + status_code=Wafv2WebAclRuleStatementManagedRuleGroupStatementManagedRuleGroupConfigsAwsManagedRulesAtpRuleSetResponseInspectionStatusCode( + failure_codes=[Token.as_number("403")], + success_codes=[Token.as_number("200")] + ) + ) + ) + ) + ], + name="AWSManagedRulesATPRuleSet", + vendor_name="AWS" + ) + ), + visibility_config=Wafv2WebAclRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-rule-metric-name", + sampled_requests_enabled=False + ) + ) + ], + scope="CLOUDFRONT", + visibility_config=Wafv2WebAclVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-metric-name", + sampled_requests_enabled=False + ) + ) +``` + +### Rate Based + +Rate-limit US and NL-based clients to 10,000 requests for every 5 minutes. + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafv2_web_acl import Wafv2WebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Wafv2WebAcl(self, "example", + default_action=Wafv2WebAclDefaultAction( + allow=Wafv2WebAclDefaultActionAllow() + ), + description="Example of a Cloudfront rate based statement.", + name="rate-based-example", + rule=[Wafv2WebAclRule( + action=Wafv2WebAclRuleAction( + block=Wafv2WebAclRuleActionBlock() + ), + name="rule-1", + priority=1, + statement=Wafv2WebAclRuleStatement( + rate_based_statement=Wafv2WebAclRuleStatementRateBasedStatement( + aggregate_key_type="IP", + limit=10000, + scope_down_statement=Wafv2WebAclRuleStatementRateBasedStatementScopeDownStatement( + geo_match_statement=Wafv2WebAclRuleStatementRateBasedStatementScopeDownStatementGeoMatchStatement( + country_codes=["US", "NL"] + ) + ) + ) + ), + visibility_config=Wafv2WebAclRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-rule-metric-name", + sampled_requests_enabled=False + ) + ) + ], + scope="CLOUDFRONT", + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + }, + visibility_config=Wafv2WebAclVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-metric-name", + sampled_requests_enabled=False + ) + ) +``` + +### Rule Group Reference + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafv2_rule_group import Wafv2RuleGroup +from imports.aws.wafv2_web_acl import Wafv2WebAcl +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Wafv2RuleGroup(self, "example", + capacity=10, + name="example-rule-group", + rule=[Wafv2RuleGroupRule( + action=Wafv2RuleGroupRuleAction( + count=Wafv2RuleGroupRuleActionCount() + ), + name="rule-1", + priority=1, + statement=Wafv2RuleGroupRuleStatement( + geo_match_statement=Wafv2RuleGroupRuleStatementGeoMatchStatement( + country_codes=["NL"] + ) + ), + visibility_config=Wafv2RuleGroupRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-rule-metric-name", + sampled_requests_enabled=False + ) + ), Wafv2RuleGroupRule( + action=Wafv2RuleGroupRuleAction( + allow=Wafv2RuleGroupRuleActionAllow() + ), + name="rule-to-exclude-a", + priority=10, + statement=Wafv2RuleGroupRuleStatement( + geo_match_statement=Wafv2RuleGroupRuleStatementGeoMatchStatement( + country_codes=["US"] + ) + ), + visibility_config=Wafv2RuleGroupRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-rule-metric-name", + sampled_requests_enabled=False + ) + ), Wafv2RuleGroupRule( + action=Wafv2RuleGroupRuleAction( + allow=Wafv2RuleGroupRuleActionAllow() + ), + name="rule-to-exclude-b", + priority=15, + statement=Wafv2RuleGroupRuleStatement( + geo_match_statement=Wafv2RuleGroupRuleStatementGeoMatchStatement( + country_codes=["GB"] + ) + ), + visibility_config=Wafv2RuleGroupRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-rule-metric-name", + sampled_requests_enabled=False + ) + ) + ], + scope="REGIONAL", + visibility_config=Wafv2RuleGroupVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-metric-name", + sampled_requests_enabled=False + ) + ) + Wafv2WebAcl(self, "test", + default_action=Wafv2WebAclDefaultAction( + block=Wafv2WebAclDefaultActionBlock() + ), + name="rule-group-example", + rule=[Wafv2WebAclRule( + name="rule-1", + override_action=Wafv2WebAclRuleOverrideAction( + count=Wafv2WebAclRuleOverrideActionCount() + ), + priority=1, + statement=Wafv2WebAclRuleStatement( + rule_group_reference_statement=Wafv2WebAclRuleStatementRuleGroupReferenceStatement( + arn=example.arn, + rule_action_override=[Wafv2WebAclRuleStatementRuleGroupReferenceStatementRuleActionOverride( + action_to_use=Wafv2WebAclRuleStatementRuleGroupReferenceStatementRuleActionOverrideActionToUse( + count=Wafv2WebAclRuleStatementRuleGroupReferenceStatementRuleActionOverrideActionToUseCount() + ), + name="rule-to-exclude-b" + ), Wafv2WebAclRuleStatementRuleGroupReferenceStatementRuleActionOverride( + action_to_use=Wafv2WebAclRuleStatementRuleGroupReferenceStatementRuleActionOverrideActionToUse( + count=Wafv2WebAclRuleStatementRuleGroupReferenceStatementRuleActionOverrideActionToUseCount() + ), + name="rule-to-exclude-a" + ) + ] + ) + ), + visibility_config=Wafv2WebAclRuleVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-rule-metric-name", + sampled_requests_enabled=False + ) + ) + ], + scope="REGIONAL", + tags={ + "Tag1": "Value1", + "Tag2": "Value2" + }, + visibility_config=Wafv2WebAclVisibilityConfig( + cloudwatch_metrics_enabled=False, + metric_name="friendly-metric-name", + sampled_requests_enabled=False + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `association_config` - (Optional) Specifies custom configurations for the associations between the web ACL and protected resources. See [`association_config`](#association_config-block) below for details. +* `custom_response_body` - (Optional) Defines custom response bodies that can be referenced by `custom_response` actions. See [`custom_response_body`](#custom_response_body-block) below for details. +* `default_action` - (Required) Action to perform if none of the `rules` contained in the WebACL match. See [`default_action`](#default_action-block) below for details. +* `description` - (Optional) Friendly description of the WebACL. +* `name` - (Required) Friendly name of the WebACL. +* `rule` - (Optional) Rule blocks used to identify the web requests that you want to `allow`, `block`, or `count`. See [`rule`](#rule-block) below for details. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `CLOUDFRONT` or `REGIONAL`. To work with CloudFront, you must also specify the region `us-east-1` (N. Virginia) on the AWS provider. +* `tags` - (Optional) Map of key-value pairs to associate with the resource. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `token_domains` - (Optional) Specifies the domains that AWS WAF should accept in a web request token. This enables the use of tokens across multiple protected websites. When AWS WAF provides a token, it uses the domain of the AWS resource that the web ACL is protecting. If you don't specify a list of token domains, AWS WAF accepts tokens only for the domain of the protected resource. With a token domain list, AWS WAF accepts the resource's host domain plus all domains in the token domain list, including their prefixed subdomains. +* `visibility_config` - (Required) Defines and enables Amazon CloudWatch metrics and web request sample collection. See [`visibility_config`](#visibility_config-block) below for details. + +### `association_config` Block + +The `association_config` block supports the following arguments: + +* `request_body` - (Optional) Customizes the request body that your protected resource forward to AWS WAF for inspection. See [`request_body`](#request_body-block) below for details. + +### `custom_response_body` Block + +Each `custom_response_body` block supports the following arguments: + +* `key` - (Required) Unique key identifying the custom response body. This is referenced by the `custom_response_body_key` argument in the [`custom_response`](#custom_response-block) block. +* `content` - (Required) Payload of the custom response. +* `content_type` - (Required) Type of content in the payload that you are defining in the `content` argument. Valid values are `TEXT_PLAIN`, `TEXT_HTML`, or `APPLICATION_JSON`. + +### `default_action` Block + +The `default_action` block supports the following arguments: + +~> **Note** One of `allow` or `block`, expressed as an empty configuration block `{}`, is required when specifying a `default_action` + +* `allow` - (Optional) Specifies that AWS WAF should allow requests by default. See [`allow`](#allow-block) below for details. +* `block` - (Optional) Specifies that AWS WAF should block requests by default. See [`block`](#block-block) below for details. + +### `rule` Block + +~> **Note** One of `action` or `override_action` is required when specifying a rule + +Each `rule` supports the following arguments: + +* `action` - (Optional) Action that AWS WAF should take on a web request when it matches the rule's statement. This is used only for rules whose **statements do not reference a rule group**. See [`action`](#action-block) for details. +* `captcha_config` - (Optional) Specifies how AWS WAF should handle CAPTCHA evaluations. See [`captcha_config`](#captcha_config-block) below for details. +* `name` - (Required) Friendly name of the rule. Note that the provider assumes that rules with names matching this pattern, `^ShieldMitigationRuleGroup___.*`, are AWS-added for [automatic application layer DDoS mitigation activities](https://docs.aws.amazon.com/waf/latest/developerguide/ddos-automatic-app-layer-response-rg.html). Such rules will be ignored by the provider unless you explicitly include them in your configuration (for example, by using the AWS CLI to discover their properties and creating matching configuration). However, since these rules are owned and managed by AWS, you may get permission errors. +* `override_action` - (Optional) Override action to apply to the rules in a rule group. Used only for rule **statements that reference a rule group**, like `rule_group_reference_statement` and `managed_rule_group_statement`. See [`override_action`](#override_action-block) below for details. +* `priority` - (Required) If you define more than one Rule in a WebACL, AWS WAF evaluates each request against the `rules` in order based on the value of `priority`. AWS WAF processes rules with lower priority first. +* `rule_label` - (Optional) Labels to apply to web requests that match the rule match statement. See [`rule_label`](#rule_label-block) below for details. +* `statement` - (Required) The AWS WAF processing statement for the rule, for example `byte_match_statement` or `geo_match_statement`. See [`statement`](#statement-block) below for details. +* `visibility_config` - (Required) Defines and enables Amazon CloudWatch metrics and web request sample collection. See [`visibility_config`](#visibility_config-block) below for details. + +### `action` Block + +The `action` block supports the following arguments: + +~> **Note** One of `allow`, `block`, or `count`, is required when specifying an `action`. + +* `allow` - (Optional) Instructs AWS WAF to allow the web request. See [`allow`](#allow-block) below for details. +* `block` - (Optional) Instructs AWS WAF to block the web request. See [`block`](#block-block) below for details. +* `captcha` - (Optional) Instructs AWS WAF to run a Captcha check against the web request. See [`captcha`](#captcha-block) below for details. +* `challenge` - (Optional) Instructs AWS WAF to run a check against the request to verify that the request is coming from a legitimate client session. See [`challenge`](#challenge-block) below for details. +* `count` - (Optional) Instructs AWS WAF to count the web request and allow it. See [`count`](#count-block) below for details. + +### `override_action` Block + +The `override_action` block supports the following arguments: + +~> **Note** One of `count` or `none`, expressed as an empty configuration block `{}`, is required when specifying an `override_action` + +* `count` - (Optional) Override the rule action setting to count (i.e., only count matches). Configured as an empty block `{}`. +* `none` - (Optional) Don't override the rule action setting. Configured as an empty block `{}`. + +### `allow` Block + +The `allow` block supports the following arguments: + +* `custom_request_handling` - (Optional) Defines custom handling for the web request. See [`custom_request_handling`](#custom_request_handling-block) below for details. + +### `block` Block + +The `block` block supports the following arguments: + +* `custom_response` - (Optional) Defines a custom response for the web request. See [`custom_response`](#custom_response-block) below for details. + +### `captcha` Block + +The `captcha` block supports the following arguments: + +* `custom_request_handling` - (Optional) Defines custom handling for the web request. See [`custom_request_handling`](#custom_request_handling-block) below for details. + +### `challenge` Block + +The `challenge` block supports the following arguments: + +* `custom_request_handling` - (Optional) Defines custom handling for the web request. See [`custom_request_handling`](#custom_request_handling-block) below for details. + +### `count` Block + +The `count` block supports the following arguments: + +* `custom_request_handling` - (Optional) Defines custom handling for the web request. See [`custom_request_handling`](#custom_request_handling-block) below for details. + +### `custom_request_handling` Block + +The `custom_request_handling` block supports the following arguments: + +* `insert_header` - (Required) The `insert_header` blocks used to define HTTP headers added to the request. See [`insert_header`](#insert_header-block) below for details. + +### `insert_header` Block + +Each `insert_header` block supports the following arguments. Duplicate header names are not allowed: + +* `name` - Name of the custom header. For custom request header insertion, when AWS WAF inserts the header into the request, it prefixes this name `x-amzn-waf-`, to avoid confusion with the headers that are already in the request. For example, for the header name `sample`, AWS WAF inserts the header `x-amzn-waf-sample`. +* `value` - Value of the custom header. + +### `custom_response` Block + +The `custom_response` block supports the following arguments: + +* `custom_response_body_key` - (Optional) References the response body that you want AWS WAF to return to the web request client. This must reference a `key` defined in a `custom_response_body` block of this resource. +* `response_code` - (Required) The HTTP status code to return to the client. +* `response_header` - (Optional) The `response_header` blocks used to define the HTTP response headers added to the response. See [`response_header`](#response_header-block) below for details. + +### `response_header` Block + +Each `response_header` block supports the following arguments. Duplicate header names are not allowed: + +* `name` - Name of the custom header. For custom request header insertion, when AWS WAF inserts the header into the request, it prefixes this name `x-amzn-waf-`, to avoid confusion with the headers that are already in the request. For example, for the header name `sample`, AWS WAF inserts the header `x-amzn-waf-sample`. +* `value` - Value of the custom header. + +### `rule_label` Block + +Each block supports the following arguments: + +* `name` - Label string. + +### `statement` Block + +The processing guidance for a Rule, used by AWS WAF to determine whether a web request matches the rule. See the [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statements-list.html) for more information. + +-> **Note** Although the `statement` block is recursive, currently only 3 levels are supported. + +The `statement` block supports the following arguments: + +* `and_statement` - (Optional) Logical rule statement used to combine other rule statements with AND logic. See [`and_statement`](#and_statement-block) below for details. +* `byte_match_statement` - (Optional) Rule statement that defines a string match search for AWS WAF to apply to web requests. See [`byte_match_statement`](#byte_match_statement-block) below for details. +* `geo_match_statement` - (Optional) Rule statement used to identify web requests based on country of origin. See [`geo_match_statement`](#geo_match_statement-block) below for details. +* `ip_set_reference_statement` - (Optional) Rule statement used to detect web requests coming from particular IP addresses or address ranges. See [`ip_set_reference_statement`](#ip_set_reference_statement-block) below for details. +* `label_match_statement` - (Optional) Rule statement that defines a string match search against labels that have been added to the web request by rules that have already run in the web ACL. See [`label_match_statement`](#label_match_statement-block) below for details. +* `managed_rule_group_statement` - (Optional) Rule statement used to run the rules that are defined in a managed rule group. This statement can not be nested. See [`managed_rule_group_statement`](#managed_rule_group_statement-block) below for details. +* `not_statement` - (Optional) Logical rule statement used to negate the results of another rule statement. See [`not_statement`](#not_statement-block) below for details. +* `or_statement` - (Optional) Logical rule statement used to combine other rule statements with OR logic. See [`or_statement`](#or_statement-block) below for details. +* `rate_based_statement` - (Optional) Rate-based rule tracks the rate of requests for each originating `IP address`, and triggers the rule action when the rate exceeds a limit that you specify on the number of requests in any `5-minute` time span. This statement can not be nested. See [`rate_based_statement`](#rate_based_statement-block) below for details. +* `regex_match_statement` - (Optional) Rule statement used to search web request components for a match against a single regular expression. See [`regex_match_statement`](#regex_match_statement-block) below for details. +* `regex_pattern_set_reference_statement` - (Optional) Rule statement used to search web request components for matches with regular expressions. See [`regex_pattern_set_reference_statement`](#regex_pattern_set_reference_statement-block) below for details. +* `rule_group_reference_statement` - (Optional) Rule statement used to run the rules that are defined in an WAFv2 Rule Group. See [`rule_group_reference_statement`](#rule_group_reference_statement-block) below for details. +* `size_constraint_statement` - (Optional) Rule statement that compares a number of bytes against the size of a request component, using a comparison operator, such as greater than (>) or less than (<). See [`size_constraint_statement`](#size_constraint_statement-block) below for more details. +* `sqli_match_statement` - (Optional) An SQL injection match condition identifies the part of web requests, such as the URI or the query string, that you want AWS WAF to inspect. See [`sqli_match_statement`](#sqli_match_statement-block) below for details. +* `xss_match_statement` - (Optional) Rule statement that defines a cross-site scripting (XSS) match search for AWS WAF to apply to web requests. See [`xss_match_statement`](#xss_match_statement-block) below for details. + +### `and_statement` Block + +A logical rule statement used to combine other rule statements with `AND` logic. You provide more than one `statement` within the `and_statement`. + +The `and_statement` block supports the following arguments: + +* `statement` - (Required) Statements to combine with `AND` logic. You can use any statements that can be nested. See [`statement`](#statement-block) above for details. + +### `byte_match_statement` Block + +The byte match statement provides the bytes to search for, the location in requests that you want AWS WAF to search, and other settings. The bytes to search for are typically a string that corresponds with ASCII characters. + +The `byte_match_statement` block supports the following arguments: + +* `field_to_match` - (Optional) Part of a web request that you want AWS WAF to inspect. See [`field_to_match`](#field_to_match-block) below for details. +* `positional_constraint` - (Required) Area within the portion of a web request that you want AWS WAF to search for `search_string`. Valid values include the following: `EXACTLY`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`, `CONTAINS_WORD`. See the AWS [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchStatement.html) for more information. +* `search_string` - (Required) String value that you want AWS WAF to search for. AWS WAF searches only in the part of web requests that you designate for inspection in `field_to_match`. The maximum length of the value is 50 bytes. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`text_transformation`](#text_transformation-block) below for details. + +### `geo_match_statement` Block + +The `geo_match_statement` block supports the following arguments: + +* `country_codes` - (Required) Array of two-character country codes, for example, [ "US", "CN" ], from the alpha-2 country ISO codes of the `ISO 3166` international standard. See the [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_GeoMatchStatement.html) for valid values. +* `forwarded_ip_config` - (Optional) Configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. See [`forwarded_ip_config`](#forwarded_ip_config-block) below for details. + +### `ip_set_reference_statement` Block + +A rule statement used to detect web requests coming from particular IP addresses or address ranges. To use this, create an `aws_wafv2_ip_set` that specifies the addresses you want to detect, then use the `ARN` of that set in this statement. + +The `ip_set_reference_statement` block supports the following arguments: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the IP Set that this statement references. +* `ip_set_forwarded_ip_config` - (Optional) Configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. See [`ip_set_forwarded_ip_config`](#ip_set_forwarded_ip_config-block) below for more details. + +### `label_match_statement` Block + +The `label_match_statement` block supports the following arguments: + +* `scope` - (Required) Specify whether you want to match using the label name or just the namespace. Valid values are `LABEL` or `NAMESPACE`. +* `key` - (Required) String to match against. + +### `managed_rule_group_statement` Block + +A rule statement used to run the rules that are defined in a managed rule group. + +You can't nest a `managed_rule_group_statement`, for example for use inside a `not_statement` or `or_statement`. It can only be referenced as a `top-level` statement within a `rule`. + +The `managed_rule_group_statement` block supports the following arguments: + +* `name` - (Required) Name of the managed rule group. +* `rule_action_override` - (Optional) Action settings to use in the place of the rule actions that are configured inside the rule group. You specify one override for each rule whose action you want to change. See [`rule_action_override`](#rule_action_override-block) below for details. +* `managed_rule_group_configs`- (Optional) Additional information that's used by a managed rule group. Only one rule attribute is allowed in each config. See [`managed_rule_group_configs`](#managed_rule_group_configs-block) for more details +* `scope_down_statement` - Narrows the scope of the statement to matching web requests. This can be any nestable statement, and you can nest statements at any level below this scope-down statement. See [`statement`](#statement-block) above for details. +* `vendor_name` - (Required) Name of the managed rule group vendor. +* `version` - (Optional) Version of the managed rule group. You can set `Version_1.0` or `Version_1.1` etc. If you want to use the default version, do not set anything. + +### `not_statement` Block + +A logical rule statement used to negate the results of another rule statement. You provide one `statement` within the `not_statement`. + +The `not_statement` block supports the following arguments: + +* `statement` - (Required) Statement to negate. You can use any statement that can be nested. See [`statement`](#statement-block) above for details. + +### `or_statement` Block + +A logical rule statement used to combine other rule statements with `OR` logic. You provide more than one `statement` within the `or_statement`. + +The `or_statement` block supports the following arguments: + +* `statement` - (Required) Statements to combine with `OR` logic. You can use any statements that can be nested. See [`statement`](#statement-block) above for details. + +### `rate_based_statement` Block + +A rate-based rule tracks the rate of requests for each originating IP address, and triggers the rule action when the rate exceeds a limit that you specify on the number of requests in any 5-minute time span. You can use this to put a temporary block on requests from an IP address that is sending excessive requests. See the [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_RateBasedStatement.html) for more information. + +You can't nest a `rate_based_statement`, for example for use inside a `not_statement` or `or_statement`. It can only be referenced as a `top-level` statement within a `rule`. + +The `rate_based_statement` block supports the following arguments: + +* `aggregate_key_type` - (Optional) Setting that indicates how to aggregate the request counts. Valid values include: `CONSTANT`, `FORWARDED_IP` or `IP`. Default: `IP`. +* `forwarded_ip_config` - (Optional) Configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. If `aggregate_key_type` is set to `FORWARDED_IP`, this block is required. See [`forwarded_ip_config`](#forwarded_ip_config-block) below for details. +* `limit` - (Required) Limit on requests per 5-minute period for a single originating IP address. +* `scope_down_statement` - (Optional) Optional nested statement that narrows the scope of the rate-based statement to matching web requests. This can be any nestable statement, and you can nest statements at any level below this scope-down statement. See [`statement`](#statement-block) above for details. If `aggregate_key_type` is set to `CONSTANT`, this block is required. + +### `regex_match_statement` Block + +A rule statement used to search web request components for a match against a single regular expression. + +The `regex_match_statement` block supports the following arguments: + +* `regex_string` - (Required) String representing the regular expression. Minimum of `1` and maximum of `512` characters. +* `field_to_match` - (Required) The part of a web request that you want AWS WAF to inspect. See [`field_to_match`](#field_to_match-block) below for details. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`text_transformation`](#text_transformation-block) below for details. + +### `regex_pattern_set_reference_statement` Block + +A rule statement used to search web request components for matches with regular expressions. To use this, create a `aws_wafv2_regex_pattern_set` that specifies the expressions that you want to detect, then use the `ARN` of that set in this statement. A web request matches the pattern set rule statement if the request component matches any of the patterns in the set. + +The `regex_pattern_set_reference_statement` block supports the following arguments: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the Regex Pattern Set that this statement references. +* `field_to_match` - (Optional) Part of a web request that you want AWS WAF to inspect. See [`field_to_match`](#field_to_match-block) below for details. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`text_transformation`](#text_transformation-block) below for details. + +### `rule_group_reference_statement` Block + +A rule statement used to run the rules that are defined in an WAFv2 Rule Group or `aws_wafv2_rule_group` resource. + +You can't nest a `rule_group_reference_statement`, for example for use inside a `not_statement` or `or_statement`. It can only be referenced as a `top-level` statement within a `rule`. + +The `rule_group_reference_statement` block supports the following arguments: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the `aws_wafv2_rule_group` resource. +* `rule_action_override` - (Optional) Action settings to use in the place of the rule actions that are configured inside the rule group. You specify one override for each rule whose action you want to change. See [`rule_action_override`](#rule_action_override-block) below for details. + +### `size_constraint_statement` Block + +A rule statement that uses a comparison operator to compare a number of bytes against the size of a request component. AWS WAFv2 inspects up to the first 8192 bytes (8 KB) of a request body, and when inspecting the request URI Path, the slash `/` in +the URI counts as one character. + +The `size_constraint_statement` block supports the following arguments: + +* `comparison_operator` - (Required) Operator to use to compare the request part to the size setting. Valid values include: `EQ`, `NE`, `LE`, `LT`, `GE`, or `GT`. +* `field_to_match` - (Optional) Part of a web request that you want AWS WAF to inspect. See [`field_to_match`](#field_to_match-block) below for details. +* `size` - (Required) Size, in bytes, to compare to the request part, after any transformations. Valid values are integers between 0 and 21474836480, inclusive. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`text_transformation`](#text_transformation-block) below for details. + +### `sqli_match_statement` Block + +An SQL injection match condition identifies the part of web requests, such as the URI or the query string, that you want AWS WAF to inspect. Later in the process, when you create a web ACL, you specify whether to allow or block requests that appear to contain malicious SQL code. + +The `sqli_match_statement` block supports the following arguments: + +* `field_to_match` - (Optional) Part of a web request that you want AWS WAF to inspect. See [`field_to_match`](#field_to_match-block) below for details. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`text_transformation`](#text_transformation-block) below for details. + +### `xss_match_statement` Block + +The XSS match statement provides the location in requests that you want AWS WAF to search and text transformations to use on the search area before AWS WAF searches for character sequences that are likely to be malicious strings. + +The `xss_match_statement` block supports the following arguments: + +* `field_to_match` - (Optional) Part of a web request that you want AWS WAF to inspect. See [`field_to_match`](#field_to_match-block) below for details. +* `text_transformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`text_transformation`](#text_transformation-block) below for details. + +### `rule_action_override` Block + +The `rule_action_override` block supports the following arguments: + +* `action_to_use` - (Required) Override action to use, in place of the configured action of the rule in the rule group. See [`action`](#action-block) for details. +* `name` - (Required) Name of the rule to override. See the [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/aws-managed-rule-groups-list.html) for a list of names in the appropriate rule group in use. + +### `managed_rule_group_configs` Block + +The `managed_rule_group_configs` block support the following arguments: + +* `aws_managed_rules_bot_control_rule_set` - (Optional) Additional configuration for using the Bot Control managed rule group. Use this to specify the inspection level that you want to use. See [`aws_managed_rules_bot_control_rule_set`](#aws_managed_rules_bot_control_rule_set-block) for more details +* `aws_managed_rules_atp_rule_set` - (Optional) Additional configuration for using the Account Takeover Protection managed rule group. Use this to specify information such as the sign-in page of your application and the type of content to accept or reject from the client. +* `login_path` - (Optional, **Deprecated**) The path of the login endpoint for your application. +* `password_field` - (Optional, **Deprecated**) Details about your login page password field. See [`password_field`](#password_field-block) for more details. +* `payload_type`- (Optional, **Deprecated**) The payload type for your login endpoint, either JSON or form encoded. +* `username_field` - (Optional, **Deprecated**) Details about your login page username field. See [`username_field`](#username_field-block) for more details. + +### `aws_managed_rules_bot_control_rule_set` Block + +* `inspection_level` - (Optional) The inspection level to use for the Bot Control rule group. + +### `aws_managed_rules_atp_rule_set` Block + +* `login_path` - (Required) The path of the login endpoint for your application. +* `request_inspection` - (Optional) The criteria for inspecting login requests, used by the ATP rule group to validate credentials usage. See [`request_inspection`](#request_inspection-block) for more details. +* `response_inspection` - (Optional) The criteria for inspecting responses to login requests, used by the ATP rule group to track login failure rates. Note that Response Inspection is available only on web ACLs that protect CloudFront distributions. See [`response_inspection`](#response_inspection-block) for more details. + +### `request_inspection` Block + +* `payload_type` (Required) The payload type for your login endpoint, either JSON or form encoded. +* `username_field` (Required) Details about your login page username field. See [`username_field`](#username_field-block) for more details. +* `password_field` (Required) Details about your login page password field. See [`password_field`](#password_field-block) for more details. + +### `password_field` Block + +* `identifier` - (Optional) The name of the password field. + +### `username_field` Block + +* `identifier` - (Optional) The name of the username field. + +### `response_inspection` Block + +* `body_contains` (Optional) Configures inspection of the response body. See [`body_contains`](#body_contains-block) for more details. +* `header` (Optional) Configures inspection of the response header.See [`header`](#header-block) for more details. +* `json` (Optional) Configures inspection of the response JSON. See [`json`](#json-block) for more details. +* `status_code` (Optional) Configures inspection of the response status code.See [`status_code`](#status_code-block) for more details. + +### `body_contains` Block + +* `success_strings` (Required) Strings in the body of the response that indicate a successful login attempt. +* `failure_strings` (Required) Strings in the body of the response that indicate a failed login attempt. + +### `header` Block + +* `name` (Required) The name of the header to match against. The name must be an exact match, including case. +* `success_values` (Required) Values in the response header with the specified name that indicate a successful login attempt. +* `failure_values` (Required) Values in the response header with the specified name that indicate a failed login attempt. + +### `json` Block + +* `identifier` (Required) The identifier for the value to match against in the JSON. +* `success_strings` (Required) Strings in the body of the response that indicate a successful login attempt. +* `failure_strings` (Required) Strings in the body of the response that indicate a failed login attempt. + +### `status_code` Block + +* `success_codes` (Required) Status codes in the response that indicate a successful login attempt. +* `failure_codes` (Required) Status codes in the response that indicate a failed login attempt. + +### `field_to_match` Block + +The part of a web request that you want AWS WAF to inspect. Include the single `field_to_match` type that you want to inspect, with additional specifications as needed, according to the type. You specify a single request component in `field_to_match` for each rule statement that requires it. To inspect more than one component of a web request, create a separate rule statement for each component. See the [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statement-fields.html#waf-rule-statement-request-component) for more details. + +The `field_to_match` block supports the following arguments: + +~> **Note** Only one of `all_query_arguments`, `body`, `cookies`, `headers`, `json_body`, `method`, `query_string`, `single_header`, `single_query_argument`, or `uri_path` can be specified. An empty configuration block `{}` should be used when specifying `all_query_arguments`, `method`, or `query_string` attributes. + +* `all_query_arguments` - (Optional) Inspect all query arguments. +* `body` - (Optional) Inspect the request body, which immediately follows the request headers. See [`body`](#body-block) below for details. +* `cookies` - (Optional) Inspect the cookies in the web request. See [`cookies`](#cookies-block) below for details. +* `headers` - (Optional) Inspect the request headers. See [`headers`](#headers-block) below for details. +* `json_body` - (Optional) Inspect the request body as JSON. See [`json_body`](#json_body-block) for details. +* `method` - (Optional) Inspect the HTTP method. The method indicates the type of operation that the request is asking the origin to perform. +* `query_string` - (Optional) Inspect the query string. This is the part of a URL that appears after a `?` character, if any. +* `single_header` - (Optional) Inspect a single header. See [`single_header`](#single_header-block) below for details. +* `single_query_argument` - (Optional) Inspect a single query argument. See [`single_query_argument`](#single_query_argument-block) below for details. +* `uri_path` - (Optional) Inspect the request URI path. This is the part of a web request that identifies a resource, for example, `/images/daily-ad.jpg`. + +### `forwarded_ip_config` Block + +The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. Commonly, this is the X-Forwarded-For (XFF) header, but you can specify any header name. If the specified header isn't present in the request, AWS WAFv2 doesn't apply the rule to the web request at all. AWS WAFv2 only evaluates the first IP address found in the specified HTTP header. + +The `forwarded_ip_config` block supports the following arguments: + +* `fallback_behavior` - (Required) - Match status to assign to the web request if the request doesn't have a valid IP address in the specified position. Valid values include: `MATCH` or `NO_MATCH`. +* `header_name` - (Required) - Name of the HTTP header to use for the IP address. + +### `ip_set_forwarded_ip_config` Block + +The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. Commonly, this is the X-Forwarded-For (XFF) header, but you can specify any header name. + +The `ip_set_forwarded_ip_config` block supports the following arguments: + +* `fallback_behavior` - (Required) - Match status to assign to the web request if the request doesn't have a valid IP address in the specified position. Valid values include: `MATCH` or `NO_MATCH`. +* `header_name` - (Required) - Name of the HTTP header to use for the IP address. +* `position` - (Required) - Position in the header to search for the IP address. Valid values include: `FIRST`, `LAST`, or `ANY`. If `ANY` is specified and the header contains more than 10 IP addresses, AWS WAFv2 inspects the last 10. + +### `headers` Block + +Inspect the request headers. + +The `headers` block supports the following arguments: + +* `match_pattern` - (Required) The filter to use to identify the subset of headers to inspect in a web request. The `match_pattern` block supports only one of the following arguments: + * `all` - An empty configuration block that is used for inspecting all headers. + * `included_headers` - An array of strings that will be used for inspecting headers that have a key that matches one of the provided values. + * `excluded_headers` - An array of strings that will be used for inspecting headers that do not have a key that matches one of the provided values. +* `match_scope` - (Required) The parts of the headers to inspect with the rule inspection criteria. If you specify `All`, AWS WAF inspects both keys and values. Valid values include the following: `ALL`, `Key`, `Value`. +* `oversize_handling` - (Required) Oversize handling tells AWS WAF what to do with a web request when the request component that the rule inspects is over the limits. Valid values include the following: `CONTINUE`, `MATCH`, `NO_MATCH`. See the AWS [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statement-oversize-handling.html) for more information. + +### `json_body` Block + +The `json_body` block supports the following arguments: + +* `invalid_fallback_behavior` - (Optional) What to do when JSON parsing fails. Defaults to evaluating up to the first parsing failure. Valid values are `EVALUATE_AS_STRING`, `MATCH` and `NO_MATCH`. +* `match_pattern` - (Required) The patterns to look for in the JSON body. You must specify exactly one setting: either `all` or `included_paths`. See [JsonMatchPattern](https://docs.aws.amazon.com/waf/latest/APIReference/API_JsonMatchPattern.html) for details. +* `match_scope` - (Required) The parts of the JSON to match against using the `match_pattern`. Valid values are `ALL`, `KEY` and `VALUE`. +* `oversize_handling` - (Optional) What to do if the body is larger than can be inspected. Valid values are `CONTINUE` (default), `MATCH` and `NO_MATCH`. + +### `single_header` Block + +Inspect a single header. Provide the name of the header to inspect, for example, `User-Agent` or `Referer` (provided as lowercase strings). + +The `single_header` block supports the following arguments: + +* `name` - (Optional) Name of the query header to inspect. This setting must be provided as lower case characters. + +### `single_query_argument` Block + +Inspect a single query argument. Provide the name of the query argument to inspect, such as `UserName` or `SalesRegion` (provided as lowercase strings). + +The `single_query_argument` block supports the following arguments: + +* `name` - (Optional) Name of the query header to inspect. This setting must be provided as lower case characters. + +### `body` Block + +The `body` block supports the following arguments: + +* `oversize_handling` - (Optional) What WAF should do if the body is larger than WAF can inspect. WAF does not support inspecting the entire contents of the body of a web request when the body exceeds 8 KB (8192 bytes). Only the first 8 KB of the request body are forwarded to WAF by the underlying host service. Valid values: `CONTINUE`, `MATCH`, `NO_MATCH`. + +### `cookies` Block + +Inspect the cookies in the web request. You can specify the parts of the cookies to inspect and you can narrow the set of cookies to inspect by including or excluding specific keys. This is used to indicate the web request component to inspect, in the [FieldToMatch](https://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) specification. + +The `cookies` block supports the following arguments: + +* `match_pattern` - (Required) The filter to use to identify the subset of cookies to inspect in a web request. You must specify exactly one setting: either `all`, `included_cookies` or `excluded_cookies`. More details: [CookieMatchPattern](https://docs.aws.amazon.com/waf/latest/APIReference/API_CookieMatchPattern.html) +* `match_scope` - (Required) The parts of the cookies to inspect with the rule inspection criteria. If you specify All, AWS WAF inspects both keys and values. Valid values: `ALL`, `KEY`, `VALUE` +* `oversize_handling` - (Required) What AWS WAF should do if the cookies of the request are larger than AWS WAF can inspect. AWS WAF does not support inspecting the entire contents of request cookies when they exceed 8 KB (8192 bytes) or 200 total cookies. The underlying host service forwards a maximum of 200 cookies and at most 8 KB of cookie contents to AWS WAF. Valid values: `CONTINUE`, `MATCH`, `NO_MATCH`. + +### `text_transformation` Block + +The `text_transformation` block supports the following arguments: + +* `priority` - (Required) Relative processing order for multiple transformations that are defined for a rule statement. AWS WAF processes all transformations, from lowest priority to highest, before inspecting the transformed content. +* `type` - (Required) Transformation to apply, please refer to the Text Transformation [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_TextTransformation.html) for more details. + +### `visibility_config` Block + +The `visibility_config` block supports the following arguments: + +* `cloudwatch_metrics_enabled` - (Required) Whether the associated resource sends metrics to CloudWatch. For the list of available metrics, see [AWS WAF Metrics](https://docs.aws.amazon.com/waf/latest/developerguide/monitoring-cloudwatch.html#waf-metrics). +* `metric_name` - (Required) A friendly name of the CloudWatch metric. The name can contain only alphanumeric characters (A-Z, a-z, 0-9) hyphen(-) and underscore (\_), with length from one to 128 characters. It can't contain whitespace or metric names reserved for AWS WAF, for example `All` and `Default_Action`. +* `sampled_requests_enabled` - (Required) Whether AWS WAF should store a sampling of the web requests that match the rules. You can view the sampled requests through the AWS WAF console. + +### `captcha_config` Block + +The `captcha_config` block supports the following arguments: + +* `immunity_time_property` - (Optional) Defines custom immunity time. See [`immunity_time_property`](#immunity_time_property-block) below for details. + +### `immunity_time_property` Block + +The `immunity_time_property` block supports the following arguments: + +* `immunity_time` - (Optional) The amount of time, in seconds, that a CAPTCHA or challenge timestamp is considered valid by AWS WAF. The default setting is 300. + +### `request_body` Block + +The `request_body` block supports the following arguments: + +* `cloudfront` - (Optional) Customizes the request body that your protected CloudFront distributions forward to AWS WAF for inspection. See [`cloudfront`](#cloudfront-block) below for details. + +### `cloudfront` Block + +The `cloudfront` block supports the following arguments: + +* `default_size_inspection_limit` - (Required) Specifies the maximum size of the web request body component that an associated CloudFront distribution should send to AWS WAF for inspection. This applies to statements in the web ACL that inspect the body or JSON body. Valid values are `KB_16`, `KB_32`, `KB_48` and `KB_64`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the WAF WebACL. +* `capacity` - Web ACL capacity units (WCUs) currently being used by this web ACL. +* `id` - The ID of the WAF WebACL. +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 Web ACLs using `ID/Name/Scope`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAFv2 Web ACLs using `ID/Name/Scope`. For example: + +```console +% terraform import aws_wafv2_web_acl.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc/example/REGIONAL +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafv2_web_acl_association.html.markdown b/website/docs/cdktf/python/r/wafv2_web_acl_association.html.markdown new file mode 100644 index 00000000000..e58e2a73f48 --- /dev/null +++ b/website/docs/cdktf/python/r/wafv2_web_acl_association.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_web_acl_association" +description: |- + Creates a WAFv2 Web ACL Association. +--- + + + +# Resource: aws_wafv2_web_acl_association + +Creates a WAFv2 Web ACL Association. + +~> **NOTE on associating a WAFv2 Web ACL with a Cloudfront distribution:** Do not use this resource to associate a WAFv2 Web ACL with a Cloudfront Distribution. The [AWS API call backing this resource][1] notes that you should use the [`web_acl_id`][2] property on the [`cloudfront_distribution`][2] instead. + +[1]: https://docs.aws.amazon.com/waf/latest/APIReference/API_AssociateWebACL.html +[2]: /docs/providers/aws/r/cloudfront_distribution.html#web_acl_id + +## Example Usage + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} + +resource "aws_wafv2_web_acl" "example" { + name = "web-acl-association-example" + scope = "REGIONAL" + + default_action { + allow {} + } + + visibility_config { + cloudwatch_metrics_enabled = false + metric_name = "friendly-metric-name" + sampled_requests_enabled = false + } +} + +resource "aws_wafv2_web_acl_association" "example" { + resource_arn = aws_api_gateway_stage.example.arn + web_acl_arn = aws_wafv2_web_acl.example.arn +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resource_arn` - (Required) The Amazon Resource Name (ARN) of the resource to associate with the web ACL. This must be an ARN of an Application Load Balancer, an Amazon API Gateway stage, or an Amazon Cognito User Pool. +* `web_acl_arn` - (Required) The Amazon Resource Name (ARN) of the Web ACL that you want to associate with the resource. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 Web ACL Association using `WEB_ACL_ARN,RESOURCE_ARN`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAFv2 Web ACL Association using `WEB_ACL_ARN,RESOURCE_ARN`. For example: + +```console +% terraform import aws_wafv2_web_acl_association.example arn:aws:wafv2:...7ce849ea,arn:aws:apigateway:...ages/name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/wafv2_web_acl_logging_configuration.html.markdown b/website/docs/cdktf/python/r/wafv2_web_acl_logging_configuration.html.markdown new file mode 100644 index 00000000000..95f29b61b21 --- /dev/null +++ b/website/docs/cdktf/python/r/wafv2_web_acl_logging_configuration.html.markdown @@ -0,0 +1,254 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_web_acl_logging_configuration" +description: |- + Create a resource for WAFv2 Web ACL Logging Configuration. +--- + + + +# Resource: aws_wafv2_web_acl_logging_configuration + +This resource creates a WAFv2 Web ACL Logging Configuration. + +!> **WARNING:** When logging from a WAFv2 Web ACL to a CloudWatch Log Group, the WAFv2 service tries to create or update a generic Log Resource Policy named `AWSWAF-LOGS`. However, if there are a large number of Web ACLs or if the account frequently creates and deletes Web ACLs, this policy may exceed the maximum policy size. As a result, this resource type will fail to be created. More details about this issue can be found in [this issue](https://github.com/hashicorp/terraform-provider-aws/issues/25296). To prevent this issue, you can manage a specific resource policy. Please refer to the [example](#with-cloudwatch-log-group-and-managed-cloudwatch-log-resource-policy) below for managing a CloudWatch Log Group with a managed CloudWatch Log Resource Policy. + +## Example Usage + +### With Redacted Fields + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafv2_web_acl_logging_configuration import Wafv2WebAclLoggingConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Wafv2WebAclLoggingConfiguration(self, "example", + log_destination_configs=[ + Token.as_string(aws_kinesis_firehose_delivery_stream_example.arn) + ], + redacted_fields=[Wafv2WebAclLoggingConfigurationRedactedFields( + single_header=Wafv2WebAclLoggingConfigurationRedactedFieldsSingleHeader( + name="user-agent" + ) + ) + ], + resource_arn=Token.as_string(aws_wafv2_web_acl_example.arn) + ) +``` + +### With Logging Filter + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.wafv2_web_acl_logging_configuration import Wafv2WebAclLoggingConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + Wafv2WebAclLoggingConfiguration(self, "example", + log_destination_configs=[ + Token.as_string(aws_kinesis_firehose_delivery_stream_example.arn) + ], + logging_filter=Wafv2WebAclLoggingConfigurationLoggingFilter( + default_behavior="KEEP", + filter=[Wafv2WebAclLoggingConfigurationLoggingFilterFilter( + behavior="DROP", + condition=[Wafv2WebAclLoggingConfigurationLoggingFilterFilterCondition( + action_condition=Wafv2WebAclLoggingConfigurationLoggingFilterFilterConditionActionCondition( + action="COUNT" + ) + ), Wafv2WebAclLoggingConfigurationLoggingFilterFilterCondition( + label_name_condition=Wafv2WebAclLoggingConfigurationLoggingFilterFilterConditionLabelNameCondition( + label_name="awswaf:111122223333:rulegroup:testRules:LabelNameZ" + ) + ) + ], + requirement="MEETS_ALL" + ), Wafv2WebAclLoggingConfigurationLoggingFilterFilter( + behavior="KEEP", + condition=[Wafv2WebAclLoggingConfigurationLoggingFilterFilterCondition( + action_condition=Wafv2WebAclLoggingConfigurationLoggingFilterFilterConditionActionCondition( + action="ALLOW" + ) + ) + ], + requirement="MEETS_ANY" + ) + ] + ), + resource_arn=Token.as_string(aws_wafv2_web_acl_example.arn) + ) +``` + +### With CloudWatch Log Group and managed CloudWatch Log Resource Policy + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, Fn, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.cloudwatch_log_group import CloudwatchLogGroup +from imports.aws.cloudwatch_log_resource_policy import CloudwatchLogResourcePolicy +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.data_aws_region import DataAwsRegion +from imports.aws.wafv2_web_acl_logging_configuration import Wafv2WebAclLoggingConfiguration +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = CloudwatchLogGroup(self, "example", + name="aws-waf-logs-some-uniq-suffix" + ) + aws_wafv2_web_acl_logging_configuration_example = + Wafv2WebAclLoggingConfiguration(self, "example_1", + log_destination_configs=[example.arn], + resource_arn=Token.as_string(aws_wafv2_web_acl_example.arn) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_wafv2_web_acl_logging_configuration_example.override_logical_id("example") + current = DataAwsCallerIdentity(self, "current") + data_aws_region_current = DataAwsRegion(self, "current_3") + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_region_current.override_logical_id("current") + data_aws_iam_policy_document_example = DataAwsIamPolicyDocument(self, "example_4", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["logs:CreateLogStream", "logs:PutLogEvents"], + condition=[DataAwsIamPolicyDocumentStatementCondition( + test="ArnLike", + values=["arn:aws:logs:${" + data_aws_region_current.name + "}:${" + current.account_id + "}:*" + ], + variable="aws:SourceArn" + ), DataAwsIamPolicyDocumentStatementCondition( + test="StringEquals", + values=[Token.as_string(Fn.tostring(current.account_id))], + variable="aws:SourceAccount" + ) + ], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["delivery.logs.amazonaws.com"], + type="Service" + ) + ], + resources=["${" + example.arn + "}:*"] + ) + ], + version="2012-10-17" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + data_aws_iam_policy_document_example.override_logical_id("example") + aws_cloudwatch_log_resource_policy_example = + CloudwatchLogResourcePolicy(self, "example_5", + policy_document=Token.as_string(data_aws_iam_policy_document_example.json), + policy_name="webacl-policy-uniq-name" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_cloudwatch_log_resource_policy_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `log_destination_configs` - (Required) Configuration block that allows you to associate Amazon Kinesis Data Firehose, Cloudwatch Log log group, or S3 bucket Amazon Resource Names (ARNs) with the web ACL. +* `logging_filter` - (Optional) Configuration block that specifies which web requests are kept in the logs and which are dropped. It allows filtering based on the rule action and the web request labels applied by matching rules during web ACL evaluation. For more details, refer to the [Logging Filter](#logging-filter) section below. +* `redacted_fields` - (Optional) Configuration for parts of the request that you want to keep out of the logs. Up to 100 `redacted_fields` blocks are supported. See [Redacted Fields](#redacted-fields) below for more details. +* `resource_arn` - (Required) Amazon Resource Name (ARN) of the web ACL that you want to associate with `log_destination_configs`. + +### Logging Filter + +The `logging_filter` block supports the following arguments: + +* `default_behavior` - (Required) Default handling for logs that don't match any of the specified filtering conditions. Valid values for `default_behavior` are `KEEP` or `DROP`. +* `filter` - (Required) Filter(s) that you want to apply to the logs. See [Filter](#filter) below for more details. + +### Filter + +The `filter` block supports the following arguments: + +* `behavior` - (Required) Parameter that determines how to handle logs that meet the conditions and requirements of the filter. The valid values for `behavior` are `KEEP` or `DROP`. +* `condition` - (Required) Match condition(s) for the filter. See [Condition](#condition) below for more details. +* `requirement` - (Required) Logic to apply to the filtering conditions. You can specify that a log must match all conditions or at least one condition in order to satisfy the filter. Valid values for `requirement` are `MEETS_ALL` or `MEETS_ANY`. + +### Condition + +The `condition` block supports the following arguments: + +~> **NOTE:** Either the `action_condition` or `label_name_condition` must be specified. + +* `action_condition` - (Optional) Configuration for a single action condition. See [Action Condition](#action-condition) below for more details. +* `label_name_condition` - (Optional) Condition for a single label name. See [Label Name Condition](#label-name-condition) below for more details. + +### Action Condition + +The `action_condition` block supports the following argument: + +* `action` - (Required) Action setting that a log record must contain in order to meet the condition. Valid values for `action` are `ALLOW`, `BLOCK`, and `COUNT`. + +### Label Name Condition + +The `label_name_condition` block supports the following argument: + +* `label_name` - (Required) Name of the label that a log record must contain in order to meet the condition. It must be a fully qualified label name, which includes a prefix, optional namespaces, and the label name itself. The prefix identifies the rule group or web ACL context of the rule that added the label. + +### Redacted Fields + +The `redacted_fields` block supports the following arguments: + +~> **NOTE:** You can only specify one of the following: `method`, `query_string`, `single_header`, or `uri_path`. + +* `method` - (Optional) HTTP method to be redacted. It must be specified as an empty configuration block `{}`. The method indicates the type of operation that the request is asking the origin to perform. +* `query_string` - (Optional) Whether to redact the query string. It must be specified as an empty configuration block `{}`. The query string is the part of a URL that appears after a `?` character, if any. +* `single_header` - (Optional) "single_header" refers to the redaction of a single header. For more information, please see the details below under [Single Header](#single-header). +* `uri_path` - (Optional) Configuration block that redacts the request URI path. It should be specified as an empty configuration block `{}`. The URI path is the part of a web request that identifies a resource, such as `/images/daily-ad.jpg`. + +### Single Header + +To redact a single header, provide the name of the header to be redacted. For example, use `User-Agent` or `Referer` (provided as lowercase strings). + +The `single_header` block supports the following arguments: + +* `name` - (Optional) Name of the query header to redact. This setting must be provided in lowercase characters. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the WAFv2 Web ACL. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 Web ACL Logging Configurations using the ARN of the WAFv2 Web ACL. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WAFv2 Web ACL Logging Configurations using the ARN of the WAFv2 Web ACL. For example: + +```console +% terraform import aws_wafv2_web_acl_logging_configuration.example arn:aws:wafv2:us-west-2:123456789012:regional/webacl/test-logs/a1b2c3d4-5678-90ab-cdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/worklink_fleet.html.markdown b/website/docs/cdktf/python/r/worklink_fleet.html.markdown new file mode 100644 index 00000000000..25afb0d6254 --- /dev/null +++ b/website/docs/cdktf/python/r/worklink_fleet.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "WorkLink" +layout: "aws" +page_title: "AWS: aws_worklink_fleet" +description: |- + Provides a AWS WorkLink Fleet resource. +--- + + + +# Resource: aws_worklink_fleet + +## Example Usage + +Basic usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.worklink_fleet import WorklinkFleet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WorklinkFleet(self, "example", + name="terraform-example" + ) +``` + +Network Configuration Usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import property_access, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.worklink_fleet import WorklinkFleet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WorklinkFleet(self, "example", + name="terraform-example", + network=WorklinkFleetNetwork( + security_group_ids=[test.id], + subnet_ids=[Token.as_string(property_access(aws_subnet_test, ["*", "id"]))], + vpc_id=Token.as_string(aws_vpc_test.id) + ) + ) +``` + +Identity Provider Configuration Usage: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.worklink_fleet import WorklinkFleet +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WorklinkFleet(self, "test", + identity_provider=WorklinkFleetIdentityProvider( + saml_metadata=Token.as_string(Fn.file("saml-metadata.xml")), + type="SAML" + ), + name="tf-worklink-fleet" + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A region-unique name for the AMI. +* `audit_stream_arn` - (Optional) The ARN of the Amazon Kinesis data stream that receives the audit events. Kinesis data stream name must begin with `"AmazonWorkLink-"`. +* `device_ca_certificate` - (Optional) The certificate chain, including intermediate certificates and the root certificate authority certificate used to issue device certificates. +* `identity_provider` - (Optional) Provide this to allow manage the identity provider configuration for the fleet. Fields documented below. +* `display_name` - (Optional) The name of the fleet. +* `network` - (Optional) Provide this to allow manage the company network configuration for the fleet. Fields documented below. +* `optimize_for_end_user_location` - (Optional) The option to optimize for better performance by routing traffic through the closest AWS Region to users, which may be outside of your home Region. Defaults to `true`. + +**network** requires the following: + +~> **NOTE:** `network` cannot be removed without force recreating by `terraform taint`. + +* `vpc_id` - (Required) The VPC ID with connectivity to associated websites. +* `subnet_ids` - (Required) A list of subnet IDs used for X-ENI connections from Amazon WorkLink rendering containers. +* `security_group_ids` - (Required) A list of security group IDs associated with access to the provided subnets. + +**identity_provider** requires the following: + +~> **NOTE:** `identity_provider` cannot be removed without force recreating by `terraform taint`. + +* `type` - (Required) The type of identity provider. +* `saml_metadata` - (Required) The SAML metadata document provided by the customer’s identity provider. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the created WorkLink Fleet. +* `arn` - The ARN of the created WorkLink Fleet. +* `company_code` - The identifier used by users to sign in to the Amazon WorkLink app. +* `created_time` - The time that the fleet was created. +* `last_updated_time` - The time that the fleet was last updated. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WorkLink using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WorkLink using the ARN. For example: + +```console +% terraform import aws_worklink_fleet.test arn:aws:worklink::123456789012:fleet/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/worklink_website_certificate_authority_association.html.markdown b/website/docs/cdktf/python/r/worklink_website_certificate_authority_association.html.markdown new file mode 100644 index 00000000000..25f58d32993 --- /dev/null +++ b/website/docs/cdktf/python/r/worklink_website_certificate_authority_association.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "WorkLink" +layout: "aws" +page_title: "AWS: aws_worklink_website_certificate_authority_association" +description: |- + Provides a AWS WorkLink Website Certificate Authority Association resource. +--- + + + +# Resource: aws_worklink_website_certificate_authority_association + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Fn, Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.worklink_fleet import WorklinkFleet +from imports.aws.worklink_website_certificate_authority_association import WorklinkWebsiteCertificateAuthorityAssociation +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WorklinkFleet(self, "example", + name="terraform-example" + ) + WorklinkWebsiteCertificateAuthorityAssociation(self, "test", + certificate=Token.as_string(Fn.file("certificate.pem")), + fleet_arn=Token.as_string(aws_worklink_fleet_test.arn) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fleet_arn` - (Required, ForceNew) The ARN of the fleet. +* `certificate` - (Required, ForceNew) The root certificate of the Certificate Authority. +* `display_name` - (Optional, ForceNew) The certificate name to display. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `website_ca_id` - A unique identifier for the Certificate Authority. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WorkLink Website Certificate Authority using `FLEET-ARN,WEBSITE-CA-ID`. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WorkLink Website Certificate Authority using `FLEET-ARN,WEBSITE-CA-ID`. For example: + +```console +% terraform import aws_worklink_website_certificate_authority_association.example arn:aws:worklink::123456789012:fleet/example,abcdefghijk +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/workspaces_connection_alias.html.markdown b/website/docs/cdktf/python/r/workspaces_connection_alias.html.markdown new file mode 100644 index 00000000000..8b51dd286d3 --- /dev/null +++ b/website/docs/cdktf/python/r/workspaces_connection_alias.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_connection_alias" +description: |- + Terraform resource for managing an AWS WorkSpaces Connection Alias. +--- + + + +# Resource: aws_workspaces_connection_alias + +Terraform resource for managing an AWS WorkSpaces Connection Alias. + +## Example Usage + +### Basic Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.workspaces_connection_alias import WorkspacesConnectionAlias +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WorkspacesConnectionAlias(self, "example", + connection_string="testdomain.test" + ) +``` + +## Argument Reference + +The following arguments are required: + +* `connection_string` - (Required) The connection string specified for the connection alias. The connection string must be in the form of a fully qualified domain name (FQDN), such as www.example.com. +* `tags` – (Optional) A map of tags assigned to the WorkSpaces Connection Alias. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the connection alias. +* `owner_account_id` - The identifier of the Amazon Web Services account that owns the connection alias. +* `state` - The current state of the connection alias. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60m`) +* `update` - (Default `180m`) +* `delete` - (Default `90m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WorkSpaces Connection Alias using the connection alias ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WorkSpaces Connection Alias using the connection alias ID. For example: + +```console +% terraform import aws_workspaces_connection_alias.example rft-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/workspaces_directory.html.markdown b/website/docs/cdktf/python/r/workspaces_directory.html.markdown new file mode 100644 index 00000000000..d2ad5f56d34 --- /dev/null +++ b/website/docs/cdktf/python/r/workspaces_directory.html.markdown @@ -0,0 +1,233 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_directory" +description: |- + Provides a WorkSpaces directory in AWS WorkSpaces Service. +--- + + + +# Resource: aws_workspaces_directory + +Provides a WorkSpaces directory in AWS WorkSpaces Service. + +~> **NOTE:** AWS WorkSpaces service requires [`workspaces_DefaultRole`](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspaces-access-control.html#create-default-role) IAM role to operate normally. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.directory_service_directory import DirectoryServiceDirectory +from imports.aws.iam_role import IamRole +from imports.aws.iam_role_policy_attachment import IamRolePolicyAttachment +from imports.aws.subnet import Subnet +from imports.aws.vpc import Vpc +from imports.aws.workspaces_directory import WorkspacesDirectory +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = Vpc(self, "example", + cidr_block="10.0.0.0/16" + ) + workspaces = DataAwsIamPolicyDocument(self, "workspaces", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["sts:AssumeRole"], + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["workspaces.amazonaws.com"], + type="Service" + ) + ] + ) + ] + ) + workspaces_default = IamRole(self, "workspaces_default", + assume_role_policy=Token.as_string(workspaces.json), + name="workspaces_DefaultRole" + ) + workspaces_default_self_service_access = IamRolePolicyAttachment(self, "workspaces_default_self_service_access", + policy_arn="arn:aws:iam::aws:policy/AmazonWorkSpacesSelfServiceAccess", + role=workspaces_default.name + ) + workspaces_default_service_access = IamRolePolicyAttachment(self, "workspaces_default_service_access", + policy_arn="arn:aws:iam::aws:policy/AmazonWorkSpacesServiceAccess", + role=workspaces_default.name + ) + example_a = Subnet(self, "example_a", + availability_zone="us-east-1a", + cidr_block="10.0.0.0/24", + vpc_id=example.id + ) + example_b = Subnet(self, "example_b", + availability_zone="us-east-1b", + cidr_block="10.0.1.0/24", + vpc_id=example.id + ) + example_c = Subnet(self, "example_c", + availability_zone="us-east-1c", + cidr_block="10.0.2.0/24", + vpc_id=example.id + ) + example_d = Subnet(self, "example_d", + availability_zone="us-east-1d", + cidr_block="10.0.3.0/24", + vpc_id=example.id + ) + aws_directory_service_directory_example = DirectoryServiceDirectory(self, "example_9", + name="corp.example.com", + password="#S1ncerely", + size="Small", + vpc_settings=DirectoryServiceDirectoryVpcSettings( + subnet_ids=[example_a.id, example_b.id], + vpc_id=example.id + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_directory_service_directory_example.override_logical_id("example") + aws_workspaces_directory_example = WorkspacesDirectory(self, "example_10", + depends_on=[workspaces_default_service_access, workspaces_default_self_service_access + ], + directory_id=Token.as_string(aws_directory_service_directory_example.id), + self_service_permissions=WorkspacesDirectorySelfServicePermissions( + change_compute_type=True, + increase_volume_size=True, + rebuild_workspace=True, + restart_workspace=True, + switch_running_mode=True + ), + subnet_ids=[example_c.id, example_d.id], + tags={ + "Example": Token.as_string(True) + }, + workspace_access_properties=WorkspacesDirectoryWorkspaceAccessProperties( + device_type_android="ALLOW", + device_type_chromeos="ALLOW", + device_type_ios="ALLOW", + device_type_linux="DENY", + device_type_osx="ALLOW", + device_type_web="DENY", + device_type_windows="DENY", + device_type_zeroclient="DENY" + ), + workspace_creation_properties=WorkspacesDirectoryWorkspaceCreationProperties( + custom_security_group_id=Token.as_string(aws_security_group_example.id), + default_ou="OU=AWS,DC=Workgroup,DC=Example,DC=com", + enable_internet_access=True, + enable_maintenance_mode=True, + user_enabled_as_local_administrator=True + ) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_workspaces_directory_example.override_logical_id("example") +``` + +### IP Groups + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.workspaces_directory import WorkspacesDirectory +from imports.aws.workspaces_ip_group import WorkspacesIpGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + example = WorkspacesIpGroup(self, "example", + name="example" + ) + aws_workspaces_directory_example = WorkspacesDirectory(self, "example_1", + directory_id=Token.as_string(aws_directory_service_directory_example.id), + ip_group_ids=[example.id] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_workspaces_directory_example.override_logical_id("example") +``` + +## Argument Reference + +This resource supports the following arguments: + +* `directory_id` - (Required) The directory identifier for registration in WorkSpaces service. +* `subnet_ids` - (Optional) The identifiers of the subnets where the directory resides. +* `ip_group_ids` - The identifiers of the IP access control groups associated with the directory. +* `tags` – (Optional) A map of tags assigned to the WorkSpaces directory. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `self_service_permissions` – (Optional) Permissions to enable or disable self-service capabilities. Defined below. +* `workspace_access_properties` – (Optional) Specifies which devices and operating systems users can use to access their WorkSpaces. Defined below. +* `workspace_creation_properties` – (Optional) Default properties that are used for creating WorkSpaces. Defined below. + +### self_service_permissions + +* `change_compute_type` – (Optional) Whether WorkSpaces directory users can change the compute type (bundle) for their workspace. Default `false`. +* `increase_volume_size` – (Optional) Whether WorkSpaces directory users can increase the volume size of the drives on their workspace. Default `false`. +* `rebuild_workspace` – (Optional) Whether WorkSpaces directory users can rebuild the operating system of a workspace to its original state. Default `false`. +* `restart_workspace` – (Optional) Whether WorkSpaces directory users can restart their workspace. Default `true`. +* `switch_running_mode` – (Optional) Whether WorkSpaces directory users can switch the running mode of their workspace. Default `false`. + +### workspace_access_properties + +* `device_type_android` – (Optional) Indicates whether users can use Android devices to access their WorkSpaces. +* `device_type_chromeos` – (Optional) Indicates whether users can use Chromebooks to access their WorkSpaces. +* `device_type_ios` – (Optional) Indicates whether users can use iOS devices to access their WorkSpaces. +* `device_type_linux` – (Optional) Indicates whether users can use Linux clients to access their WorkSpaces. +* `device_type_osx` – (Optional) Indicates whether users can use macOS clients to access their WorkSpaces. +* `device_type_web` – (Optional) Indicates whether users can access their WorkSpaces through a web browser. +* `device_type_windows` – (Optional) Indicates whether users can use Windows clients to access their WorkSpaces. +* `device_type_zeroclient` – (Optional) Indicates whether users can use zero client devices to access their WorkSpaces. + +### workspace_creation_properties + +-> **Note:** Once you specified `custom_security_group_id` or `default_ou`, there is no way to delete these attributes. If you cleanup them from the configuration, they still be present in state. + +* `custom_security_group_id` – (Optional) The identifier of your custom security group. Should relate to the same VPC, where workspaces reside in. +* `default_ou` – (Optional) The default organizational unit (OU) for your WorkSpace directories. Should conform `"OU=,DC=,...,DC="` pattern. +* `enable_internet_access` – (Optional) Indicates whether internet access is enabled for your WorkSpaces. +* `enable_maintenance_mode` – (Optional) Indicates whether maintenance mode is enabled for your WorkSpaces. For more information, see [WorkSpace Maintenance](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspace-maintenance.html).. +* `user_enabled_as_local_administrator` – (Optional) Indicates whether users are local administrators of their WorkSpaces. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The WorkSpaces directory identifier. +* `alias` - The directory alias. +* `customer_user_name` - The user name for the service account. +* `directory_name` - The name of the directory. +* `directory_type` - The directory type. +* `dns_ip_addresses` - The IP addresses of the DNS servers for the directory. +* `iam_role_id` - The identifier of the IAM role. This is the role that allows Amazon WorkSpaces to make calls to other services, such as Amazon EC2, on your behalf. +* `ip_group_ids` - The identifiers of the IP access control groups associated with the directory. +* `registration_code` - The registration code for the directory. This is the code that users enter in their Amazon WorkSpaces client application to connect to the directory. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `workspace_security_group_id` - The identifier of the security group that is assigned to new WorkSpaces. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Workspaces directory using the directory ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Workspaces directory using the directory ID. For example: + +```console +% terraform import aws_workspaces_directory.main d-4444444444 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/workspaces_ip_group.html.markdown b/website/docs/cdktf/python/r/workspaces_ip_group.html.markdown new file mode 100644 index 00000000000..89e5db010ca --- /dev/null +++ b/website/docs/cdktf/python/r/workspaces_ip_group.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_ip_group" +description: |- + Provides an IP access control group in AWS WorkSpaces Service. +--- + + + +# Resource: aws_workspaces_ip_group + +Provides an IP access control group in AWS WorkSpaces Service + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.workspaces_ip_group import WorkspacesIpGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + WorkspacesIpGroup(self, "contractors", + description="Contractors IP access control group", + name="Contractors", + rules=[WorkspacesIpGroupRules( + description="NY", + source="150.24.14.0/24" + ), WorkspacesIpGroupRules( + description="LA", + source="125.191.14.85/32" + ), WorkspacesIpGroupRules( + description="STL", + source="44.98.100.0/24" + ) + ] + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the IP group. +* `description` - (Optional) The description of the IP group. +* `rules` - (Optional) One or more pairs specifying the IP group rule (in CIDR format) from which web requests originate. +* `tags` – (Optional) A map of tags assigned to the WorkSpaces directory. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `rules` + +#### Arguments + +* `source` - (Required) The IP address range, in CIDR notation, e.g., `10.0.0.0/16` +* `description` - (Optional) The description. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The IP group identifier. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WorkSpaces IP groups using their GroupID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import WorkSpaces IP groups using their GroupID. For example: + +```console +% terraform import aws_workspaces_ip_group.example wsipg-488lrtl3k +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/workspaces_workspace.html.markdown b/website/docs/cdktf/python/r/workspaces_workspace.html.markdown new file mode 100644 index 00000000000..76e6e93c734 --- /dev/null +++ b/website/docs/cdktf/python/r/workspaces_workspace.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_workspace" +description: |- + Provides a workspaces in AWS Workspaces Service. +--- + + + +# Resource: aws_workspaces_workspace + +Provides a workspace in [AWS Workspaces](https://docs.aws.amazon.com/workspaces/latest/adminguide/amazon-workspaces.html) Service + +~> **NOTE:** AWS WorkSpaces service requires [`workspaces_DefaultRole`](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspaces-access-control.html#create-default-role) IAM role to operate normally. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_workspaces_bundle import DataAwsWorkspacesBundle +from imports.aws.workspaces_workspace import WorkspacesWorkspace +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + value_windows10 = DataAwsWorkspacesBundle(self, "value_windows_10", + bundle_id="wsb-bh8rsxt14" + ) + WorkspacesWorkspace(self, "example", + bundle_id=Token.as_string(value_windows10.id), + directory_id=Token.as_string(aws_workspaces_directory_example.id), + root_volume_encryption_enabled=True, + tags={ + "Department": "IT" + }, + user_name="john.doe", + user_volume_encryption_enabled=True, + volume_encryption_key="alias/aws/workspaces", + workspace_properties=WorkspacesWorkspaceWorkspaceProperties( + compute_type_name="VALUE", + root_volume_size_gib=80, + running_mode="AUTO_STOP", + running_mode_auto_stop_timeout_in_minutes=60, + user_volume_size_gib=10 + ) + ) +``` + +## Argument Reference + +This resource supports the following arguments: + +* `directory_id` - (Required) The ID of the directory for the WorkSpace. +* `bundle_id` - (Required) The ID of the bundle for the WorkSpace. +* `user_name` – (Required) The user name of the user for the WorkSpace. This user name must exist in the directory for the WorkSpace. +* `root_volume_encryption_enabled` - (Optional) Indicates whether the data stored on the root volume is encrypted. +* `user_volume_encryption_enabled` – (Optional) Indicates whether the data stored on the user volume is encrypted. +* `volume_encryption_key` – (Optional) The symmetric AWS KMS customer master key (CMK) used to encrypt data stored on your WorkSpace. Amazon WorkSpaces does not support asymmetric CMKs. +* `tags` - (Optional) The tags for the WorkSpace. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `workspace_properties` – (Optional) The WorkSpace properties. + +`workspace_properties` supports the following: + +* `compute_type_name` – (Optional) The compute type. For more information, see [Amazon WorkSpaces Bundles](http://aws.amazon.com/workspaces/details/#Amazon_WorkSpaces_Bundles). Valid values are `VALUE`, `STANDARD`, `PERFORMANCE`, `POWER`, `GRAPHICS`, `POWERPRO`, `GRAPHICSPRO`, `GRAPHICS_G4DN`, and `GRAPHICSPRO_G4DN`. +* `root_volume_size_gib` – (Optional) The size of the root volume. +* `running_mode` – (Optional) The running mode. For more information, see [Manage the WorkSpace Running Mode](https://docs.aws.amazon.com/workspaces/latest/adminguide/running-mode.html). Valid values are `AUTO_STOP` and `ALWAYS_ON`. +* `running_mode_auto_stop_timeout_in_minutes` – (Optional) The time after a user logs off when WorkSpaces are automatically stopped. Configured in 60-minute intervals. +* `user_volume_size_gib` – (Optional) The size of the user storage. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The workspaces ID. +* `ip_address` - The IP address of the WorkSpace. +* `computer_name` - The name of the WorkSpace, as seen by the operating system. +* `state` - The operational state of the WorkSpace. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30m`) +- `update` - (Default `10m`) +- `delete` - (Default `10m`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Workspaces using their ID. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import Workspaces using their ID. For example: + +```console +% terraform import aws_workspaces_workspace.example ws-9z9zmbkhv +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/xray_encryption_config.html.markdown b/website/docs/cdktf/python/r/xray_encryption_config.html.markdown new file mode 100644 index 00000000000..19dc129293d --- /dev/null +++ b/website/docs/cdktf/python/r/xray_encryption_config.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "X-Ray" +layout: "aws" +page_title: "AWS: aws_xray_encryption_config" +description: |- + Creates and manages an AWS XRay Encryption Config. +--- + + + +# Resource: aws_xray_encryption_config + +Creates and manages an AWS XRay Encryption Config. + +~> **NOTE:** Removing this resource from Terraform has no effect to the encryption configuration within X-Ray. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.xray_encryption_config import XrayEncryptionConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + XrayEncryptionConfig(self, "example", + type="NONE" + ) +``` + +## Example Usage with KMS Key + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import Token, TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.data_aws_caller_identity import DataAwsCallerIdentity +from imports.aws.data_aws_iam_policy_document import DataAwsIamPolicyDocument +from imports.aws.kms_key import KmsKey +from imports.aws.xray_encryption_config import XrayEncryptionConfig +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + current = DataAwsCallerIdentity(self, "current") + example = DataAwsIamPolicyDocument(self, "example", + statement=[DataAwsIamPolicyDocumentStatement( + actions=["kms:*"], + effect="Allow", + principals=[DataAwsIamPolicyDocumentStatementPrincipals( + identifiers=["arn:aws:iam::${" + current.account_id + "}:root"], + type="AWS" + ) + ], + resources=["*"], + sid="Enable IAM User Permissions" + ) + ] + ) + aws_kms_key_example = KmsKey(self, "example_2", + deletion_window_in_days=7, + description="Some Key", + policy=Token.as_string(example.json) + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_kms_key_example.override_logical_id("example") + aws_xray_encryption_config_example = XrayEncryptionConfig(self, "example_3", + key_id=Token.as_string(aws_kms_key_example.arn), + type="KMS" + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + aws_xray_encryption_config_example.override_logical_id("example") +``` + +## Argument Reference + +* `type` - (Required) The type of encryption. Set to `KMS` to use your own key for encryption. Set to `NONE` for default encryption. +* `key_id` - (Optional) An AWS KMS customer master key (CMK) ARN. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Region name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import XRay Encryption Config using the region name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import XRay Encryption Config using the region name. For example: + +```console +% terraform import aws_xray_encryption_config.example us-west-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/xray_group.html.markdown b/website/docs/cdktf/python/r/xray_group.html.markdown new file mode 100644 index 00000000000..53a3066a1da --- /dev/null +++ b/website/docs/cdktf/python/r/xray_group.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "X-Ray" +layout: "aws" +page_title: "AWS: aws_xray_group" +description: |- + Creates and manages an AWS XRay Group. +--- + + + +# Resource: aws_xray_group + +Creates and manages an AWS XRay Group. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.xray_group import XrayGroup +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + XrayGroup(self, "example", + filter_expression="responsetime > 5", + group_name="example", + insights_configuration=XrayGroupInsightsConfiguration( + insights_enabled=True, + notifications_enabled=True + ) + ) +``` + +## Argument Reference + +* `group_name` - (Required) The name of the group. +* `filter_expression` - (Required) The filter expression defining criteria by which to group traces. more info can be found in official [docs](https://docs.aws.amazon.com/xray/latest/devguide/xray-console-filters.html). +* `insights_configuration` - (Optional) Configuration options for enabling insights. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested fields + +#### `insights_configuration` + +* `insights_enabled` - (Required) Specifies whether insights are enabled. +* `notifications_enabled` - (Optional) Specifies whether insight notifications are enabled. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the Group. +* `arn` - The ARN of the Group. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import XRay Groups using the ARN. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import XRay Groups using the ARN. For example: + +```console +% terraform import aws_xray_group.example arn:aws:xray:us-west-2:1234567890:group/example-group/TNGX7SW5U6QY36T4ZMOUA3HVLBYCZTWDIOOXY3CJAXTHSS3YCWUA +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/python/r/xray_sampling_rule.html.markdown b/website/docs/cdktf/python/r/xray_sampling_rule.html.markdown new file mode 100644 index 00000000000..29dfa7c456d --- /dev/null +++ b/website/docs/cdktf/python/r/xray_sampling_rule.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "X-Ray" +layout: "aws" +page_title: "AWS: aws_xray_sampling_rule" +description: |- + Creates and manages an AWS XRay Sampling Rule. +--- + + + +# Resource: aws_xray_sampling_rule + +Creates and manages an AWS XRay Sampling Rule. + +## Example Usage + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +# +# Provider bindings are generated by running `cdktf get`. +# See https://cdk.tf/provider-generation for more details. +# +from imports.aws.xray_sampling_rule import XraySamplingRule +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + XraySamplingRule(self, "example", + attributes={ + "Hello": "Tris" + }, + fixed_rate=0.05, + host="*", + http_method="*", + priority=9999, + reservoir_size=1, + resource_arn="*", + rule_name="example", + service_name="*", + service_type="*", + url_path="*", + version=1 + ) +``` + +## Argument Reference + +* `rule_name` - (Required) The name of the sampling rule. +* `resource_arn` - (Required) Matches the ARN of the AWS resource on which the service runs. +* `priority` - (Required) The priority of the sampling rule. +* `fixed_rate` - (Required) The percentage of matching requests to instrument, after the reservoir is exhausted. +* `reservoir_size` - (Required) A fixed number of matching requests to instrument per second, prior to applying the fixed rate. The reservoir is not used directly by services, but applies to all services using the rule collectively. +* `service_name` - (Required) Matches the `name` that the service uses to identify itself in segments. +* `service_type` - (Required) Matches the `origin` that the service uses to identify its type in segments. +* `host` - (Required) Matches the hostname from a request URL. +* `http_method` - (Required) Matches the HTTP method of a request. +* `url_path` - (Required) Matches the path from a request URL. +* `version` - (Required) The version of the sampling rule format (`1` ) +* `attributes` - (Optional) Matches attributes derived from the request. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the sampling rule. +* `arn` - The ARN of the sampling rule. +* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import XRay Sampling Rules using the name. For example: + +```python +# Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +from constructs import Construct +from cdktf import TerraformStack +class MyConvertedCode(TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) +``` + +Using `terraform import`, import XRay Sampling Rules using the name. For example: + +```console +% terraform import aws_xray_sampling_rule.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/acm_certificate.html.markdown b/website/docs/cdktf/typescript/d/acm_certificate.html.markdown new file mode 100644 index 00000000000..34b96d4f217 --- /dev/null +++ b/website/docs/cdktf/typescript/d/acm_certificate.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "ACM (Certificate Manager)" +layout: "aws" +page_title: "AWS: aws_acm_certificate" +description: |- + Get information on a Amazon Certificate Manager (ACM) Certificate +--- + + + +# Data Source: aws_acm_certificate + +Use this data source to get the ARN of a certificate in AWS Certificate +Manager (ACM), you can reference +it by domain without having to hard code the ARNs as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAcmCertificate } from "./.gen/providers/aws/data-aws-acm-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAcmCertificate(this, "amazon_issued", { + domain: "tf.example.com", + mostRecent: true, + types: ["AMAZON_ISSUED"], + }); + new DataAwsAcmCertificate(this, "issued", { + domain: "tf.example.com", + statuses: ["ISSUED"], + }); + new DataAwsAcmCertificate(this, "rsa_4096", { + domain: "tf.example.com", + keyTypes: ["RSA_4096"], + }); + } +} + +``` + +## Argument Reference + +* `domain` - (Required) Domain of the certificate to look up. If no certificate is found with this name, an error will be returned. +* `keyTypes` - (Optional) List of key algorithms to filter certificates. By default, ACM does not return all certificate types when searching. See the [ACM API Reference](https://docs.aws.amazon.com/acm/latest/APIReference/API_CertificateDetail.html#ACM-Type-CertificateDetail-KeyAlgorithm) for supported key algorithms. +* `statuses` - (Optional) List of statuses on which to filter the returned list. Valid values are `pendingValidation`, `issued`, + `inactive`, `expired`, `validationTimedOut`, `revoked` and `failed`. If no value is specified, only certificates in the `issued` state + are returned. +* `types` - (Optional) List of types on which to filter the returned list. Valid values are `amazonIssued`, `private`, and `imported`. +* `mostRecent` - (Optional) If set to true, it sorts the certificates matched by previous criteria by the NotBefore field, returning only the most recent one. If set to false, it returns an error if more than one certificate is found. Defaults to false. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the found certificate, suitable for referencing in other resources that support ACM certificates. +* `id` - ARN of the found certificate, suitable for referencing in other resources that support ACM certificates. +* `status` - Status of the found certificate. +* `certificate` - ACM-issued certificate. +* `certificateChain` - Certificates forming the requested ACM-issued certificate's chain of trust. The chain consists of the certificate of the issuing CA and the intermediate certificates of any other subordinate CAs. +* `tags` - Mapping of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/acmpca_certificate.html.markdown b/website/docs/cdktf/typescript/d/acmpca_certificate.html.markdown new file mode 100644 index 00000000000..518dbdc23ea --- /dev/null +++ b/website/docs/cdktf/typescript/d/acmpca_certificate.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_certificate" +description: |- + Get information on a Certificate issued by a AWS Certificate Manager Private Certificate Authority +--- + + + +# Data Source: aws_acmpca_certificate + +Get information on a Certificate issued by a AWS Certificate Manager Private Certificate Authority. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAcmpcaCertificate } from "./.gen/providers/aws/data-aws-acmpca-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAcmpcaCertificate(this, "example", { + arn: "arn:aws:acm-pca:us-east-1:123456789012:certificate-authority/12345678-1234-1234-1234-123456789012/certificate/1234b4a0d73e2056789bdbe77d5b1a23", + certificateAuthorityArn: + "arn:aws:acm-pca:us-east-1:123456789012:certificate-authority/12345678-1234-1234-1234-123456789012", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Required) ARN of the certificate issued by the private certificate authority. +* `certificateAuthorityArn` - (Required) ARN of the certificate authority. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `certificate` - PEM-encoded certificate value. +* `certificateChain` - PEM-encoded certificate chain that includes any intermediate certificates and chains up to root CA. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/acmpca_certificate_authority.html.markdown b/website/docs/cdktf/typescript/d/acmpca_certificate_authority.html.markdown new file mode 100644 index 00000000000..d47a093b7b9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/acmpca_certificate_authority.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_certificate_authority" +description: |- + Get information on a AWS Certificate Manager Private Certificate Authority +--- + + + +# Data Source: aws_acmpca_certificate_authority + +Get information on a AWS Certificate Manager Private Certificate Authority (ACM PCA Certificate Authority). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAcmpcaCertificateAuthority } from "./.gen/providers/aws/data-aws-acmpca-certificate-authority"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAcmpcaCertificateAuthority(this, "example", { + arn: "arn:aws:acm-pca:us-east-1:123456789012:certificate-authority/12345678-1234-1234-1234-123456789012", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Required) ARN of the certificate authority. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ARN of the certificate authority. +* `certificate` - Base64-encoded certificate authority (CA) certificate. Only available after the certificate authority certificate has been imported. +* `certificateChain` - Base64-encoded certificate chain that includes any intermediate certificates and chains up to root on-premises certificate that you used to sign your private CA certificate. The chain does not include your private CA certificate. Only available after the certificate authority certificate has been imported. +* `certificateSigningRequest` - The base64 PEM-encoded certificate signing request (CSR) for your private CA certificate. +* `usageMode` - Specifies whether the CA issues general-purpose certificates that typically require a revocation mechanism, or short-lived certificates that may optionally omit revocation because they expire quickly. +* `notAfter` - Date and time after which the certificate authority is not valid. Only available after the certificate authority certificate has been imported. +* `notBefore` - Date and time before which the certificate authority is not valid. Only available after the certificate authority certificate has been imported. +* `revocationConfiguration` - Nested attribute containing revocation configuration. + * `revocationConfiguration0CrlConfiguration` - Nested attribute containing configuration of the certificate revocation list (CRL), if any, maintained by the certificate authority. + * `revocationConfiguration0CrlConfiguration0CustomCname` - Name inserted into the certificate CRL Distribution Points extension that enables the use of an alias for the CRL distribution point. + * `revocationConfiguration0CrlConfiguration0Enabled` - Boolean value that specifies whether certificate revocation lists (CRLs) are enabled. + * `revocationConfiguration0CrlConfiguration0ExpirationInDays` - Number of days until a certificate expires. + * `revocationConfiguration0CrlConfiguration0S3BucketName` - Name of the S3 bucket that contains the CRL. + * `revocationConfiguration0CrlConfiguration0S3ObjectAcl` - Whether the CRL is publicly readable or privately held in the CRL Amazon S3 bucket. + * `revocationConfiguration0OcspConfiguration0Enabled` - Boolean value that specifies whether a custom OCSP responder is enabled. + * `revocationConfiguration0OcspConfiguration0OcspCustomCname` - A CNAME specifying a customized OCSP domain. +* `serial` - Serial number of the certificate authority. Only available after the certificate authority certificate has been imported. +* `status` - Status of the certificate authority. +* `tags` - Key-value map of user-defined tags that are attached to the certificate authority. +* `type` - Type of the certificate authority. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ami.html.markdown b/website/docs/cdktf/typescript/d/ami.html.markdown new file mode 100644 index 00000000000..74f90f7b976 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ami.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami" +description: |- + Get information on an Amazon Machine Image (AMI). +--- + + + +# Data Source: aws_ami + +Use this data source to get the ID of a registered AMI for use in other +resources. + +## Example Usage + +```terraform +data "aws_ami" "example" { + executable_users = ["self"] + most_recent = true + name_regex = "^myami-\\d{3}" + owners = ["self"] + + filter { + name = "name" + values = ["myami-*"] + } + + filter { + name = "root-device-type" + values = ["ebs"] + } + + filter { + name = "virtualization-type" + values = ["hvm"] + } +} +``` + +## Argument Reference + +* `owners` - (Optional) List of AMI owners to limit search. Valid values: an AWS account ID, `self` (the current account), or an AWS owner alias (e.g., `amazon`, `awsMarketplace`, `microsoft`). + +* `mostRecent` - (Optional) If more than one result is returned, use the most +recent AMI. + +* `executableUsers` - (Optional) Limit search to users with *explicit* launch permission on + the image. Valid items are the numeric account ID or `self`. + +* `includeDeprecated` - (Optional) If true, all deprecated AMIs are included in the response. If false, no deprecated AMIs are included in the response. If no value is specified, the default value is false. + +* `filter` - (Optional) One or more name/value pairs to filter off of. There are +several valid keys, for a full reference, check out +[describe-images in the AWS CLI reference][1]. + +* `nameRegex` - (Optional) Regex string to apply to the AMI list returned +by AWS. This allows more advanced filtering not supported from the AWS API. This +filtering is done locally on what AWS returns, and could have a performance +impact if the result is large. Combine this with other +options to narrow down the list AWS returns. + +~> **NOTE:** If more or less than a single match is returned by the search, +Terraform will fail. Ensure that your search is specific enough to return +a single AMI ID only, or use `mostRecent` to choose the most recent one. If +you want to match multiple AMIs, use the `awsAmiIds` data source instead. + +## Attribute Reference + +`id` is set to the ID of the found AMI. In addition, the following attributes +are exported: + +~> **NOTE:** Some values are not always set and may not be available for +interpolation. + +* `arn` - ARN of the AMI. +* `architecture` - OS architecture of the AMI (ie: `i386` or `x8664`). +* `bootMode` - Boot mode of the image. +* `blockDeviceMappings` - Set of objects with block device mappings of the AMI. + * `deviceName` - Physical name of the device. + * `ebs` - Map containing EBS information, if the device is EBS based. Unlike most object attributes, these are accessed directly (e.g., `ebsVolumeSize` or `ebs["volumeSize"]`) rather than accessed through the first element of a list (e.g., `ebs[0]VolumeSize`). + * `deleteOnTermination` - `true` if the EBS volume will be deleted on termination. + * `encrypted` - `true` if the EBS volume is encrypted. + * `iops` - `0` if the EBS volume is not a provisioned IOPS image, otherwise the supported IOPS count. + * `snapshotId` - The ID of the snapshot. + * `volumeSize` - The size of the volume, in GiB. + * `throughput` - The throughput that the EBS volume supports, in MiB/s. + * `volumeType` - The volume type. + * `noDevice` - Suppresses the specified device included in the block device mapping of the AMI. + * `virtualName` - Virtual device name (for instance stores). +* `creationDate` - Date and time the image was created. +* `deprecationTime` - Date and time when the image will be deprecated. +* `description` - Description of the AMI that was provided during image + creation. +* `hypervisor` - Hypervisor type of the image. +* `imageId` - ID of the AMI. Should be the same as the resource `id`. +* `imageLocation` - Location of the AMI. +* `imageOwnerAlias` - AWS account alias (for example, `amazon`, `self`) or + the AWS account ID of the AMI owner. +* `imageType` - Type of image. +* `imdsSupport` - Instance Metadata Service (IMDS) support mode for the image. Set to `v20` if instances ran from this image enforce IMDSv2. +* `kernelId` - Kernel associated with the image, if any. Only applicable + for machine images. +* `name` - Name of the AMI that was provided during image creation. +* `ownerId` - AWS account ID of the image owner. +* `platform` - Value is Windows for `windows` AMIs; otherwise blank. +* `productCodes` - Any product codes associated with the AMI. + * `productCodes.#ProductCodeId` - The product code. + * `productCodes.#ProductCodeType` - The type of product code. +* `public` - `true` if the image has public launch permissions. +* `ramdiskId` - RAM disk associated with the image, if any. Only applicable + for machine images. +* `rootDeviceName` - Device name of the root device. +* `rootDeviceType` - Type of root device (ie: `ebs` or `instanceStore`). +* `rootSnapshotId` - Snapshot id associated with the root device, if any + (only applies to `ebs` root devices). +* `sriovNetSupport` - Whether enhanced networking is enabled. +* `state` - Current state of the AMI. If the state is `available`, the image + is successfully registered and can be used to launch an instance. +* `stateReason` - Describes a state change. Fields are `unset` if not available. + * `stateReasonCode` - The reason code for the state change. + * `stateReasonMessage` - The message for the state change. +* `tags` - Any tags assigned to the image. + * `tags.#Key` - Key name of the tag. + * `tags.#Value` - Value of the tag. +* `tpmSupport` - If the image is configured for NitroTPM support, the value is `v20`. +* `virtualizationType` - Type of virtualization of the AMI (ie: `hvm` or + `paravirtual`). +* `usageOperation` - Operation of the Amazon EC2 instance and the billing code that is associated with the AMI. +* `platformDetails` - Platform details associated with the billing code of the AMI. +* `enaSupport` - Whether enhanced networking with ENA is enabled. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-images.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ami_ids.html.markdown b/website/docs/cdktf/typescript/d/ami_ids.html.markdown new file mode 100644 index 00000000000..67b5feecc37 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ami_ids.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami_ids" +description: |- + Provides a list of AMI IDs. +--- + + + +# Data Source: aws_ami_ids + +Use this data source to get a list of AMI IDs matching the specified criteria. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAmiIds } from "./.gen/providers/aws/data-aws-ami-ids"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAmiIds(this, "ubuntu", { + filter: [ + { + name: "name", + values: ["ubuntu/images/ubuntu-*-*-amd64-server-*"], + }, + ], + owners: ["099720109477"], + }); + } +} + +``` + +## Argument Reference + +* `owners` - (Required) List of AMI owners to limit search. At least 1 value must be specified. Valid values: an AWS account ID, `self` (the current account), or an AWS owner alias (e.g., `amazon`, `awsMarketplace`, `microsoft`). + +* `executableUsers` - (Optional) Limit search to users with *explicit* launch +permission on the image. Valid items are the numeric account ID or `self`. + +* `filter` - (Optional) One or more name/value pairs to filter off of. There +are several valid keys, for a full reference, check out +[describe-images in the AWS CLI reference][1]. + +* `nameRegex` - (Optional) Regex string to apply to the AMI list returned +by AWS. This allows more advanced filtering not supported from the AWS API. +This filtering is done locally on what AWS returns, and could have a performance +impact if the result is large. Combine this with other +options to narrow down the list AWS returns. + +* `sortAscending` - (Optional) Used to sort AMIs by creation time. +If no value is specified, the default value is `false`. + +* `includeDeprecated` - (Optional) If true, all deprecated AMIs are included in the response. +If false, no deprecated AMIs are included in the response. If no value is specified, the default value is `false`. + +## Attribute Reference + +`ids` is set to the list of AMI IDs, sorted by creation time according to `sortAscending`. + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-images.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/api_gateway_api_key.html.markdown b/website/docs/cdktf/typescript/d/api_gateway_api_key.html.markdown new file mode 100644 index 00000000000..1bdb39f448b --- /dev/null +++ b/website/docs/cdktf/typescript/d/api_gateway_api_key.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_api_key" +description: |- + Get information on an API Gateway REST API Key +--- + + + +# Data Source: aws_api_gateway_api_key + +Use this data source to get the name and value of a pre-existing API Key, for +example to supply credentials for a dependency microservice. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApiGatewayApiKey } from "./.gen/providers/aws/data-aws-api-gateway-api-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsApiGatewayApiKey(this, "my_api_key", { + id: "ru3mpjgse6", + }); + } +} + +``` + +## Argument Reference + +* `id` - (Required) ID of the API Key to look up. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Set to the ID of the API Key. +* `name` - Set to the name of the API Key. +* `value` - Set to the value of the API Key. +* `createdDate` - Date and time when the API Key was created. +* `lastUpdatedDate` - Date and time when the API Key was last updated. +* `description` - Description of the API Key. +* `enabled` - Whether the API Key is enabled. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/api_gateway_authorizer.html.markdown b/website/docs/cdktf/typescript/d/api_gateway_authorizer.html.markdown new file mode 100644 index 00000000000..f23bc2e544b --- /dev/null +++ b/website/docs/cdktf/typescript/d/api_gateway_authorizer.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_authorizer" +description: |- + Provides details about a specific API Gateway Authorizer. +--- + + + +# Data Source: aws_api_gateway_authorizer + +Provides details about a specific API Gateway Authorizer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApiGatewayAuthorizer } from "./.gen/providers/aws/data-aws-api-gateway-authorizer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsApiGatewayAuthorizer(this, "example", { + authorizerId: Token.asString( + propertyAccess(dataAwsApiGatewayAuthorizersExample.ids, ["0"]) + ), + restApiId: Token.asString(awsApiGatewayRestApiExample.id), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `authorizerId` - (Required) Authorizer identifier. +* `restApiId` - (Required) ID of the associated REST API. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the API Gateway Authorizer. +* `authorizerCredentials` - Credentials required for the authorizer. +* `authorizerResultTtlInSeconds` - TTL of cached authorizer results in seconds. +* `authorizerUri` - Authorizer's Uniform Resource Identifier (URI). +* `identitySource` - Source of the identity in an incoming request. +* `identityValidationExpression` - Validation expression for the incoming identity. +* `name` - Name of the authorizer. +* `providerArns` - List of the Amazon Cognito user pool ARNs. +* `type` - Type of the authorizer. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/api_gateway_authorizers.html.markdown b/website/docs/cdktf/typescript/d/api_gateway_authorizers.html.markdown new file mode 100644 index 00000000000..5e7633ab95a --- /dev/null +++ b/website/docs/cdktf/typescript/d/api_gateway_authorizers.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_authorizers" +description: |- + Provides details about multiple API Gateway Authorizers. +--- + + + +# Data Source: aws_api_gateway_authorizers + +Provides details about multiple API Gateway Authorizers. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApiGatewayAuthorizers } from "./.gen/providers/aws/data-aws-api-gateway-authorizers"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsApiGatewayAuthorizers(this, "example", { + restApiId: Token.asString(awsApiGatewayRestApiExample.id), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `restApiId` - (Required) ID of the associated REST API. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - List of Authorizer identifiers. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/api_gateway_domain_name.html.markdown b/website/docs/cdktf/typescript/d/api_gateway_domain_name.html.markdown new file mode 100644 index 00000000000..60bd9fa6884 --- /dev/null +++ b/website/docs/cdktf/typescript/d/api_gateway_domain_name.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_domain_name" +description: |- + Get information on a custom domain name for use with AWS API Gateway. +--- + + + +# Data Source: aws_api_gateway_domain_name + +Use this data source to get the custom domain name for use with AWS API Gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApiGatewayDomainName } from "./.gen/providers/aws/data-aws-api-gateway-domain-name"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsApiGatewayDomainName(this, "example", { + domainName: "api.example.com", + }); + } +} + +``` + +## Argument Reference + +* `domainName` - (Required) Fully-qualified domain name to look up. If no domain name is found, an error will be returned. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the found custom domain name. +* `certificateArn` - ARN for an AWS-managed certificate that is used by edge-optimized endpoint for this domain name. +* `certificateName` - Name of the certificate that is used by edge-optimized endpoint for this domain name. +* `certificateUploadDate` - Upload date associated with the domain certificate. +* `cloudfrontDomainName` - Hostname created by Cloudfront to represent the distribution that implements this domain name mapping. +* `cloudfrontZoneId` - For convenience, the hosted zone ID (`z2Fdtndataqyw2`) that can be used to create a Route53 alias record for the distribution. +* `endpointConfiguration` - List of objects with the endpoint configuration of this domain name. + * `types` - List of endpoint types. +* `regionalCertificateArn` - ARN for an AWS-managed certificate that is used for validating the regional domain name. +* `regionalCertificateName` - User-friendly name of the certificate that is used by regional endpoint for this domain name. +* `regionalDomainName` - Hostname for the custom domain's regional endpoint. +* `regionalZoneId` - Hosted zone ID that can be used to create a Route53 alias record for the regional endpoint. +* `securityPolicy` - Security policy for the domain name. +* `tags` - Key-value map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/api_gateway_export.html.markdown b/website/docs/cdktf/typescript/d/api_gateway_export.html.markdown new file mode 100644 index 00000000000..28d8b94c43d --- /dev/null +++ b/website/docs/cdktf/typescript/d/api_gateway_export.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_export" +description: |- + Get information on an API Gateway REST API Key +--- + + + +# Data Source: aws_api_gateway_export + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApiGatewayExport } from "./.gen/providers/aws/data-aws-api-gateway-export"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsApiGatewayExport(this, "example", { + exportType: "oas30", + restApiId: Token.asString(awsApiGatewayStageExample.restApiId), + stageName: Token.asString(awsApiGatewayStageExample.stageName), + }); + } +} + +``` + +## Argument Reference + +* `exportType` - (Required) Type of export. Acceptable values are `oas30` for OpenAPI 3.0.x and `swagger` for Swagger/OpenAPI 2.0. +* `restApiId` - (Required) Identifier of the associated REST API. +* `stageName` - (Required) Name of the Stage that will be exported. +* `accepts` - (Optional) Content-type of the export. Valid values are `application/json` and `application/yaml` are supported for `exportType` `ofoas30` and `swagger`. +* `parameters` - (Optional) Key-value map of query string parameters that specify properties of the export. the following parameters are supported: `extensions='integrations'` or `extensions='apigateway'` will export the API with x-amazon-apigateway-integration extensions. `extensions='authorizers'` will export the API with x-amazon-apigateway-authorizer extensions. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The `restApiId:stageName` +* `body` - API Spec. +* `contentType` - Content-type header value in the HTTP response. +* `contentDisposition` - Content-disposition header value in the HTTP response. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/api_gateway_resource.html.markdown b/website/docs/cdktf/typescript/d/api_gateway_resource.html.markdown new file mode 100644 index 00000000000..e31e3ae1f44 --- /dev/null +++ b/website/docs/cdktf/typescript/d/api_gateway_resource.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_resource" +description: |- + Get information on a API Gateway Resource +--- + + + +# Data Source: aws_api_gateway_resource + +Use this data source to get the id of a Resource in API Gateway. +To fetch the Resource, you must provide the REST API id as well as the full path. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApiGatewayResource } from "./.gen/providers/aws/data-aws-api-gateway-resource"; +import { DataAwsApiGatewayRestApi } from "./.gen/providers/aws/data-aws-api-gateway-rest-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const myRestApi = new DataAwsApiGatewayRestApi(this, "my_rest_api", { + name: "my-rest-api", + }); + new DataAwsApiGatewayResource(this, "my_resource", { + path: "/endpoint/path", + restApiId: Token.asString(myRestApi.id), + }); + } +} + +``` + +## Argument Reference + +* `restApiId` - (Required) REST API id that owns the resource. If no REST API is found, an error will be returned. +* `path` - (Required) Full path of the resource. If no path is found, an error will be returned. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Set to the ID of the found Resource. +* `parentId` - Set to the ID of the parent Resource. +* `pathPart` - Set to the path relative to the parent Resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/api_gateway_rest_api.html.markdown b/website/docs/cdktf/typescript/d/api_gateway_rest_api.html.markdown new file mode 100644 index 00000000000..816077676f3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/api_gateway_rest_api.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_rest_api" +description: |- + Get information on a API Gateway REST API +--- + + + +# Data Source: aws_api_gateway_rest_api + +Use this data source to get the id and root_resource_id of a REST API in +API Gateway. To fetch the REST API you must provide a name to match against. +As there is no unique name constraint on REST APIs this data source will +error if there is more than one match. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApiGatewayRestApi } from "./.gen/providers/aws/data-aws-api-gateway-rest-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsApiGatewayRestApi(this, "my_rest_api", { + name: "my-rest-api", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the REST API to look up. If no REST API is found with this name, an error will be returned. If multiple REST APIs are found with this name, an error will be returned. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `apiKeySource` - Source of the API key for requests. +* `arn` - ARN of the REST API. +* `binaryMediaTypes` - List of binary media types supported by the REST API. +* `description` - Description of the REST API. +* `endpointConfiguration` - The endpoint configuration of this RestApi showing the endpoint types of the API. +* `executionArn` - Execution ARN part to be used in [`lambdaPermission`](/docs/providers/aws/r/lambda_permission.html)'s `sourceArn` when allowing API Gateway to invoke a Lambda function, e.g., `arn:aws:executeApi:euWest2:123456789012:z4675Bid1J`, which can be concatenated with allowed stage, method and resource path. +* `id` - Set to the ID of the found REST API. +* `minimumCompressionSize` - Minimum response size to compress for the REST API. +* `policy` - JSON formatted policy document that controls access to the API Gateway. +* `rootResourceId` - Set to the ID of the API Gateway Resource on the found REST API where the route matches '/'. +* `tags` - Key-value map of resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/api_gateway_sdk.html.markdown b/website/docs/cdktf/typescript/d/api_gateway_sdk.html.markdown new file mode 100644 index 00000000000..f7b6fd984aa --- /dev/null +++ b/website/docs/cdktf/typescript/d/api_gateway_sdk.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_sdk" +description: |- + Gets an API Gateway client SDK +--- + + + +# Data Source: aws_api_gateway_sdk + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApiGatewaySdk } from "./.gen/providers/aws/data-aws-api-gateway-sdk"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsApiGatewaySdk(this, "example", { + parameters: { + artifactId: "example", + artifactVersion: "example", + groupId: "example", + invokerPackage: "example", + }, + restApiId: Token.asString(awsApiGatewayStageExample.restApiId), + sdkType: "android", + stageName: Token.asString(awsApiGatewayStageExample.stageName), + }); + } +} + +``` + +## Argument Reference + +* `restApiId` - (Required) Identifier of the associated REST API. +* `stageName` - (Required) Name of the Stage that will be exported. +* `sdkType` - (Required) Language for the generated SDK. Currently `java`, `javascript`, `android`, `objectivec` (for iOS), `swift` (for iOS), and `ruby` are supported. +* `parameters` - (Optional) Key-value map of query string parameters `sdkType` properties of the SDK. For SDK Type of `objectivec` or `swift`, a parameter named `classPrefix` is required. For SDK Type of `android`, parameters named `groupId`, `artifactId`, `artifactVersion`, and `invokerPackage` are required. For SDK Type of `java`, parameters named `serviceName` and `javaPackageName` are required. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The `restApiId:stageName` +* `body` - SDK as a string. +* `contentType` - Content-type header value in the HTTP response. +* `contentDisposition` - Content-disposition header value in the HTTP response. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/api_gateway_vpc_link.html.markdown b/website/docs/cdktf/typescript/d/api_gateway_vpc_link.html.markdown new file mode 100644 index 00000000000..4062f6a09b9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/api_gateway_vpc_link.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_vpc_link" +description: |- + Get information on a API Gateway VPC Link +--- + + + +# Data Source: aws_api_gateway_vpc_link + +Use this data source to get the id of a VPC Link in +API Gateway. To fetch the VPC Link you must provide a name to match against. +As there is no unique name constraint on API Gateway VPC Links this data source will +error if there is more than one match. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApiGatewayVpcLink } from "./.gen/providers/aws/data-aws-api-gateway-vpc-link"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsApiGatewayVpcLink(this, "my_api_gateway_vpc_link", { + name: "my-vpc-link", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the API Gateway VPC Link to look up. If no API Gateway VPC Link is found with this name, an error will be returned. + If multiple API Gateway VPC Links are found with this name, an error will be returned. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Set to the ID of the found API Gateway VPC Link. +* `description` - Description of the VPC link. +* `status` - Status of the VPC link. +* `statusMessage` - Status message of the VPC link. +* `targetArns` - List of network load balancer arns in the VPC targeted by the VPC link. Currently AWS only supports 1 target. +* `tags` - Key-value map of resource tags + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/apigatewayv2_api.html.markdown b/website/docs/cdktf/typescript/d/apigatewayv2_api.html.markdown new file mode 100644 index 00000000000..9ce494ed6b9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/apigatewayv2_api.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_api" +description: |- + Provides details about a specific Amazon API Gateway Version 2 API. +--- + + + +# Data Source: aws_apigatewayv2_api + +Provides details about a specific Amazon API Gateway Version 2 API. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApigatewayv2Api } from "./.gen/providers/aws/data-aws-apigatewayv2-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsApigatewayv2Api(this, "example", { + apiId: "aabbccddee", + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available APIs in the current region. +The given filters must match exactly one API whose data will be exported as attributes. + +This argument supports the following arguments: + +* `apiId` - (Required) API identifier. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `apiEndpoint` - URI of the API, of the form `https://{apiId}ExecuteApi.{region}AmazonawsCom` for HTTP APIs and `wss://{apiId}ExecuteApi.{region}AmazonawsCom` for WebSocket APIs. +* `apiKeySelectionExpression` - An [API key selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-apikey-selection-expressions). +Applicable for WebSocket APIs. +* `arn` - ARN of the API. +* `corsConfiguration` - Cross-origin resource sharing (CORS) [configuration](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-cors.html). +Applicable for HTTP APIs. +* `description` - Description of the API. +* `disableExecuteApiEndpoint` - Whether clients can invoke the API by using the default `executeApi` endpoint. +* `executionArn` - ARN prefix to be used in an [`awsLambdaPermission`](/docs/providers/aws/r/lambda_permission.html)'s `sourceArn` attribute +or in an [`awsIamPolicy`](/docs/providers/aws/r/iam_policy.html) to authorize access to the [`@connections` API](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-how-to-call-websocket-api-connections.html). +See the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-control-access-iam.html) for details. +* `name` - Name of the API. +* `protocolType` - API protocol. +* `routeSelectionExpression` - The [route selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-route-selection-expressions) for the API. +* `tags` - Map of resource tags. +* `version` - Version identifier for the API. + +The `corsConfiguration` object supports the following: + +* `allowCredentials` - Whether credentials are included in the CORS request. +* `allowHeaders` - Set of allowed HTTP headers. +* `allowMethods` - Set of allowed HTTP methods. +* `allowOrigins` - Set of allowed origins. +* `exposeHeaders` - Set of exposed HTTP headers. +* `maxAge` - Number of seconds that the browser should cache preflight request results. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/apigatewayv2_apis.html.markdown b/website/docs/cdktf/typescript/d/apigatewayv2_apis.html.markdown new file mode 100644 index 00000000000..963abb5c8b1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/apigatewayv2_apis.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_apis" +description: |- + Provides details about multiple Amazon API Gateway Version 2 APIs. +--- + + + +# Data Source: aws_apigatewayv2_apis + +Provides details about multiple Amazon API Gateway Version 2 APIs. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApigatewayv2Apis } from "./.gen/providers/aws/data-aws-apigatewayv2-apis"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsApigatewayv2Apis(this, "example", { + protocolType: "HTTP", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Optional) API name. +* `protocolType` - (Optional) API protocol. +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired APIs. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - Set of API identifiers. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/apigatewayv2_export.html.markdown b/website/docs/cdktf/typescript/d/apigatewayv2_export.html.markdown new file mode 100644 index 00000000000..b89c99d50ef --- /dev/null +++ b/website/docs/cdktf/typescript/d/apigatewayv2_export.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_export" +description: |- + Exports a definition of an API in a particular output format and specification. +--- + + + +# Data Source: aws_apigatewayv2_export + +Exports a definition of an API in a particular output format and specification. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsApigatewayv2Export } from "./.gen/providers/aws/data-aws-apigatewayv2-export"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsApigatewayv2Export(this, "test", { + apiId: Token.asString(awsApigatewayv2RouteTest.apiId), + outputType: "JSON", + specification: "OAS30", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `apiId` - (Required) API identifier. +* `specification` - (Required) Version of the API specification to use. `oas30`, for OpenAPI 3.0, is the only supported value. +* `outputType` - (Required) Output type of the exported definition file. Valid values are `json` and `yaml`. +* `exportVersion` - (Optional) Version of the API Gateway export algorithm. API Gateway uses the latest version by default. Currently, the only supported version is `10`. +* `includeExtensions` - (Optional) Whether to include API Gateway extensions in the exported API definition. API Gateway extensions are included by default. +* `stageName` - (Optional) Name of the API stage to export. If you don't specify this property, a representation of the latest API configuration is exported. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - API identifier. +* `body` - ID of the API. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appconfig_configuration_profile.html.markdown b/website/docs/cdktf/typescript/d/appconfig_configuration_profile.html.markdown new file mode 100644 index 00000000000..ca0e21cc2fa --- /dev/null +++ b/website/docs/cdktf/typescript/d/appconfig_configuration_profile.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_configuration_profile" +description: |- + Terraform data source for managing an AWS AppConfig Configuration Profile. +--- + + + +# Data Source: aws_appconfig_configuration_profile + +Provides access to an AppConfig Configuration Profile. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppconfigConfigurationProfile } from "./.gen/providers/aws/data-aws-appconfig-configuration-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAppconfigConfigurationProfile(this, "example", { + applicationId: "b5d5gpj", + configurationProfileId: "qrbb1c1", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `applicationId` - (Required) ID of the AppConfig application to which this configuration profile belongs. +* `configurationProfileId` - (Required) ID of the Configuration Profile. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Configuration Profile. +* `description` - Description of the Configuration Profile. +* `id` - AppConfig Configuration Profile ID and Application ID separated by a colon `(:)`. +* `locationUri` - Location URI of the Configuration Profile. +* `name` - Name of the Configuration Profile. +* `retrievalRoleArn` - ARN of an IAM role with permission to access the configuration at the specified location_uri. +* `tags` - Map of tags for the resource. +* `validator` - Nested list of methods for validating the configuration. + * `content` - Either the JSON Schema content or the ARN of an AWS Lambda function. + * `type` - Type of validator. Valid values: JSON_SCHEMA and LAMBDA. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appconfig_configuration_profiles.html.markdown b/website/docs/cdktf/typescript/d/appconfig_configuration_profiles.html.markdown new file mode 100644 index 00000000000..33512681750 --- /dev/null +++ b/website/docs/cdktf/typescript/d/appconfig_configuration_profiles.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_configuration_profiles" +description: |- + Terraform data source for managing an AWS AppConfig Configuration Profiles. +--- + + + +# Data Source: aws_appconfig_configuration_profiles + +Provides access to all Configuration Properties for an AppConfig Application. This will allow you to pass Configuration +Profile IDs to another resource. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformIterator, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppconfigConfigurationProfile } from "./.gen/providers/aws/data-aws-appconfig-configuration-profile"; +import { DataAwsAppconfigConfigurationProfiles } from "./.gen/providers/aws/data-aws-appconfig-configuration-profiles"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsAppconfigConfigurationProfiles(this, "example", { + applicationId: "a1d3rpe", + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleForEachIterator = TerraformIterator.fromList( + Token.asAny(example.configurationProfileIds) + ); + const dataAwsAppconfigConfigurationProfileExample = + new DataAwsAppconfigConfigurationProfile(this, "example_1", { + applicationId: Token.asString(awsAppconfigApplicationExample.id), + configurationProfileId: Token.asString(exampleForEachIterator.value), + forEach: exampleForEachIterator, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsAppconfigConfigurationProfileExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `applicationId` - (Required) ID of the AppConfig Application. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `configurationProfileIds` - Set of Configuration Profile IDs associated with the AppConfig Application. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appconfig_environment.html.markdown b/website/docs/cdktf/typescript/d/appconfig_environment.html.markdown new file mode 100644 index 00000000000..5035033417b --- /dev/null +++ b/website/docs/cdktf/typescript/d/appconfig_environment.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_environment" +description: |- + Terraform data source for managing an AWS AppConfig Environment. +--- + + + +# Data Source: aws_appconfig_environment + +Provides access to an AppConfig Environment. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppconfigEnvironment } from "./.gen/providers/aws/data-aws-appconfig-environment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAppconfigEnvironment(this, "example", { + applicationId: "b5d5gpj", + environmentId: "qrbb1c1", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `applicationId` - (Required) ID of the AppConfig Application to which this Environment belongs. +* `environmentId` - (Required) ID of the AppConfig Environment. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the environment. +* `name` - Name of the environment. +* `description` - Name of the environment. +* `monitor` - Set of Amazon CloudWatch alarms to monitor during the deployment process. + * `alarmArn` - ARN of the Amazon CloudWatch alarm. + * `alarmRoleArn` - ARN of an IAM role for AWS AppConfig to monitor. +* `state` - State of the environment. Possible values are `readyForDeployment`, `deploying`, `rollingBack` + or `rolledBack`. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appconfig_environments.html.markdown b/website/docs/cdktf/typescript/d/appconfig_environments.html.markdown new file mode 100644 index 00000000000..2a96bbb099b --- /dev/null +++ b/website/docs/cdktf/typescript/d/appconfig_environments.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_environments" +description: |- + Terraform data source for managing an AWS AppConfig Environments. +--- + + + +# Data Source: aws_appconfig_environments + +Provides access to all Environments for an AppConfig Application. This will allow you to pass Environment IDs to another +resource. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppconfigEnvironments } from "./.gen/providers/aws/data-aws-appconfig-environments"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAppconfigEnvironments(this, "example", { + applicationId: "a1d3rpe", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `applicationId` - (Required) ID of the AppConfig Application. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `environmentIds` - Set of Environment IDs associated with this AppConfig Application. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appintegrations_event_integration.html.markdown b/website/docs/cdktf/typescript/d/appintegrations_event_integration.html.markdown new file mode 100644 index 00000000000..bdeca45b0cf --- /dev/null +++ b/website/docs/cdktf/typescript/d/appintegrations_event_integration.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "AppIntegrations" +layout: "aws" +page_title: "AWS: aws_appintegrations_event_integration" +description: |- + Provides details about an Amazon AppIntegrations Event Integration +--- + + + +# Data Source: aws_appintegrations_event_integration + +Use this data source to get information on an existing AppIntegrations Event Integration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppintegrationsEventIntegration } from "./.gen/providers/aws/data-aws-appintegrations-event-integration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAppintegrationsEventIntegration(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) The AppIntegrations Event Integration name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the AppIntegrations Event Integration. +* `description` - The description of the Event Integration. +* `eventbridgeBus` - The EventBridge bus. +* `eventFilter` - A block that defines the configuration information for the event filter. The Event Filter block is documented below. +* `id` - The identifier of the Event Integration which is the name of the Event Integration. +* `tags` - Metadata that you can assign to help organize the report plans you create. + +### Event Filter Attributes + +`eventFilter` has the following attributes: + +* `source` - The source of the events. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appmesh_gateway_route.html.markdown b/website/docs/cdktf/typescript/d/appmesh_gateway_route.html.markdown new file mode 100644 index 00000000000..45724e3c60f --- /dev/null +++ b/website/docs/cdktf/typescript/d/appmesh_gateway_route.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_gateway_route" +description: |- + Terraform data source for managing an AWS App Mesh Gateway Route. +--- + + + +# Data Source: aws_appmesh_gateway_route + +The App Mesh Gateway Route data source allows details of an App Mesh Gateway Route to be retrieved by its name, mesh_name, virtual_gateway_name, and optionally the mesh_owner. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppmeshGatewayRoute } from "./.gen/providers/aws/data-aws-appmesh-gateway-route"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAppmeshGatewayRoute(this, "test", { + meshName: "test-mesh", + name: "test-route", + virtualGatewayName: "test-gateway", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the gateway route. +* `meshName` - (Required) Name of the service mesh in which the virtual gateway exists. +* `virtualGatewayName` - (Required) Name of the virtual gateway in which the route exists. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the gateway route. +* `createdDate` - Creation date of the gateway route. +* `lastUpdatedDate` - Last update date of the gateway route. +* `resourceOwner` - Resource owner's AWS account ID. +* `spec` - Gateway route specification. See the [`awsAppmeshGatewayRoute`](/docs/providers/aws/r/appmesh_gateway_route.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appmesh_mesh.html.markdown b/website/docs/cdktf/typescript/d/appmesh_mesh.html.markdown new file mode 100644 index 00000000000..2d5f4f90fd8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/appmesh_mesh.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_mesh" +description: |- + Terraform data source for managing an AWS App Mesh Mesh. +--- + + + +# Data Source: aws_appmesh_mesh + +The App Mesh Mesh data source allows details of an App Mesh Mesh to be retrieved by its name and optionally the mesh_owner. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppmeshMesh } from "./.gen/providers/aws/data-aws-appmesh-mesh"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAppmeshMesh(this, "simple", { + name: "simpleapp", + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppmeshMesh } from "./.gen/providers/aws/data-aws-appmesh-mesh"; +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new DataAwsAppmeshMesh(this, "simple", { + meshOwner: Token.asString(current.accountId), + name: "simpleapp", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the service mesh. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the service mesh. +* `createdDate` - Creation date of the service mesh. +* `lastUpdatedDate` - Last update date of the service mesh. +* `resourceOwner` - Resource owner's AWS account ID. +* `spec` - Service mesh specification. See the [`awsAppmeshMesh`](/docs/providers/aws/r/appmesh_mesh.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appmesh_route.html.markdown b/website/docs/cdktf/typescript/d/appmesh_route.html.markdown new file mode 100644 index 00000000000..2cf043e8094 --- /dev/null +++ b/website/docs/cdktf/typescript/d/appmesh_route.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_route" +description: |- + Terraform data source for managing an AWS App Mesh Route. +--- + + + +# Data Source: aws_appmesh_route + +The App Mesh Route data source allows details of an App Mesh Route to be retrieved by its name, mesh_name, virtual_router_name, and optionally the mesh_owner. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppmeshVirtualService } from "./.gen/providers/aws/data-aws-appmesh-virtual-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAppmeshVirtualService(this, "test", { + meshName: "test-mesh", + name: "test-route", + virtual_router_name: "test-router", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the route. +* `meshName` - (Required) Name of the service mesh in which the virtual router exists. +* `virtualRouterName` - (Required) Name of the virtual router in which the route exists. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the route. +* `createdDate` - Creation date of the route. +* `lastUpdatedDate` - Last update date of the route. +* `resourceOwner` - Resource owner's AWS account ID. +* `spec` - Route specification. See the [`awsAppmeshRoute`](/docs/providers/aws/r/appmesh_route.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appmesh_virtual_gateway.html.markdown b/website/docs/cdktf/typescript/d/appmesh_virtual_gateway.html.markdown new file mode 100644 index 00000000000..ca7d0f48e7e --- /dev/null +++ b/website/docs/cdktf/typescript/d/appmesh_virtual_gateway.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_gateway" +description: |- + Terraform data source for managing an AWS App Mesh Virtual Gateway. +--- + + + +# Data Source: aws_appmesh_virtual_gateway + +Terraform data source for managing an AWS App Mesh Virtual Gateway. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppmeshVirtualGateway } from "./.gen/providers/aws/data-aws-appmesh-virtual-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAppmeshVirtualGateway(this, "example", { + meshName: "mesh-gateway", + name: "example-mesh", + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppmeshVirtualGateway } from "./.gen/providers/aws/data-aws-appmesh-virtual-gateway"; +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new DataAwsAppmeshVirtualGateway(this, "test", { + meshName: "example-mesh", + meshOwner: Token.asString(current.accountId), + name: "example.mesh.local", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the virtual gateway. +* `meshName` - (Required) Name of the service mesh in which the virtual gateway exists. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the virtual gateway. +* `createdDate` - Creation date of the virtual gateway. +* `lastUpdatedDate` - Last update date of the virtual gateway. +* `resourceOwner` - Resource owner's AWS account ID. +* `spec` - Virtual gateway specification. See the [`awsAppmeshVirtualGateway`](/docs/providers/aws/r/appmesh_virtual_gateway.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appmesh_virtual_node.html.markdown b/website/docs/cdktf/typescript/d/appmesh_virtual_node.html.markdown new file mode 100644 index 00000000000..b4e7e4afdd8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/appmesh_virtual_node.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_node" +description: |- + Terraform data source for managing an AWS App Mesh Virtual Node. +--- + + + +# Data Source: aws_appmesh_virtual_node + +Terraform data source for managing an AWS App Mesh Virtual Node. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppmeshVirtualNode } from "./.gen/providers/aws/data-aws-appmesh-virtual-node"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAppmeshVirtualNode(this, "test", { + meshName: "example-mesh", + name: "serviceBv1", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the virtual node. +* `meshName` - (Required) Name of the service mesh in which the virtual node exists. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the virtual node. +* `createdDate` - Creation date of the virtual node. +* `lastUpdatedDate` - Last update date of the virtual node. +* `resourceOwner` - Resource owner's AWS account ID. +* `spec` - Virtual node specification. See the [`awsAppmeshVirtualNode`](/docs/providers/aws/r/appmesh_virtual_node.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appmesh_virtual_router.html.markdown b/website/docs/cdktf/typescript/d/appmesh_virtual_router.html.markdown new file mode 100644 index 00000000000..1ff34b12a26 --- /dev/null +++ b/website/docs/cdktf/typescript/d/appmesh_virtual_router.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_router" +description: |- + Terraform data source for managing an AWS App Mesh Virtual Router. +--- + + + +# Data Source: aws_appmesh_virtual_router + +The App Mesh Virtual Router data source allows details of an App Mesh Virtual Service to be retrieved by its name and mesh_name. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppmeshVirtualRouter } from "./.gen/providers/aws/data-aws-appmesh-virtual-router"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAppmeshVirtualRouter(this, "test", { + meshName: "example-mesh-name", + name: "example-router-name", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the virtual router. +* `meshName` - (Required) Name of the mesh in which the virtual router exists + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the virtual router. +* `createdDate` - Creation date of the virtual router. +* `lastUpdatedDate` - Last update date of the virtual router. +* `resourceOwner` - Resource owner's AWS account ID. +* `spec` - Virtual routers specification. See the [`awsAppmeshVirtualRouter`](/docs/providers/aws/r/appmesh_virtual_router.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/appmesh_virtual_service.html.markdown b/website/docs/cdktf/typescript/d/appmesh_virtual_service.html.markdown new file mode 100644 index 00000000000..41124a743e0 --- /dev/null +++ b/website/docs/cdktf/typescript/d/appmesh_virtual_service.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_service" +description: |- + Terraform data source for managing an AWS App Mesh Virtual Service. +--- + + + +# Data Source: aws_appmesh_virtual_service + +The App Mesh Virtual Service data source allows details of an App Mesh Virtual Service to be retrieved by its name, mesh_name, and optionally the mesh_owner. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppmeshVirtualService } from "./.gen/providers/aws/data-aws-appmesh-virtual-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAppmeshVirtualService(this, "test", { + meshName: "example-mesh", + name: "example.mesh.local", + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAppmeshVirtualService } from "./.gen/providers/aws/data-aws-appmesh-virtual-service"; +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new DataAwsAppmeshVirtualService(this, "test", { + meshName: "example-mesh", + meshOwner: Token.asString(current.accountId), + name: "example.mesh.local", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the virtual service. +* `meshName` - (Required) Name of the service mesh in which the virtual service exists. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the virtual service. +* `createdDate` - Creation date of the virtual service. +* `lastUpdatedDate` - Last update date of the virtual service. +* `resourceOwner` - Resource owner's AWS account ID. +* `spec` - Virtual service specification. See the [`awsAppmeshVirtualService`](/docs/providers/aws/r/appmesh_virtual_service.html#spec) resource for details. +* `tags` - Map of tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/arn.html.markdown b/website/docs/cdktf/typescript/d/arn.html.markdown new file mode 100644 index 00000000000..e6cf61907dc --- /dev/null +++ b/website/docs/cdktf/typescript/d/arn.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_arn" +description: |- + Parses an ARN into its constituent parts. +--- + + + +# Data Source: aws_arn + +Parses an ARN into its constituent parts. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsArn } from "./.gen/providers/aws/data-aws-arn"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsArn(this, "db_instance", { + arn: "arn:aws:rds:eu-west-1:123456789012:db:mysql-db", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Required) ARN to parse. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `partition` - Partition that the resource is in. + +* `service` - The [service namespace](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html#genref-aws-service-namespaces) that identifies the AWS product. + +* `region` - Region the resource resides in. +Note that the ARNs for some resources do not require a region, so this component might be omitted. + +* `account` - The [ID](https://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html) of the AWS account that owns the resource, without the hyphens. + +* `resource` - Content of this part of the ARN varies by service. +It often includes an indicator of the type of resource—for example, an IAM user or Amazon RDS database —followed by a slash (/) or a colon (:), followed by the resource name itself. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/auditmanager_control.html.markdown b/website/docs/cdktf/typescript/d/auditmanager_control.html.markdown new file mode 100644 index 00000000000..059c484508e --- /dev/null +++ b/website/docs/cdktf/typescript/d/auditmanager_control.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_control" +description: |- + Terraform data source for managing an AWS Audit Manager Control. +--- + + + +# Data Source: aws_auditmanager_control + +Terraform data source for managing an AWS Audit Manager Control. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAuditmanagerControl } from "./.gen/providers/aws/data-aws-auditmanager-control"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAuditmanagerControl(this, "example", { + name: "1. Risk Management", + type: "Standard", + }); + } +} + +``` + +### With Framework Resource + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AuditmanagerFramework } from "./.gen/providers/aws/auditmanager-framework"; +import { DataAwsAuditmanagerControl } from "./.gen/providers/aws/data-aws-auditmanager-control"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsAuditmanagerControl(this, "example", { + name: "1. Risk Management", + type: "Standard", + }); + const example2 = new DataAwsAuditmanagerControl(this, "example2", { + name: "2. Personnel", + type: "Standard", + }); + const awsAuditmanagerFrameworkExample = new AuditmanagerFramework( + this, + "example_2", + { + controlSets: [ + { + controls: [ + { + id: Token.asString(example.id), + }, + ], + name: "example", + }, + { + controls: [ + { + id: Token.asString(example2.id), + }, + ], + name: "example2", + }, + ], + name: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAuditmanagerFrameworkExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the control. +* `type` - (Required) Type of control. Valid values are `custom` and `standard`. + +## Attribute Reference + +See the [`awsAuditmanagerControl` resource](/docs/providers/aws/r/auditmanager_control.html) for details on the returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/auditmanager_framework.html.markdown b/website/docs/cdktf/typescript/d/auditmanager_framework.html.markdown new file mode 100644 index 00000000000..ebfac888377 --- /dev/null +++ b/website/docs/cdktf/typescript/d/auditmanager_framework.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_framework" +description: |- + Terraform data source for managing an AWS Audit Manager Framework. +--- + + + +# Data Source: aws_auditmanager_framework + +Terraform data source for managing an AWS Audit Manager Framework. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAuditmanagerFramework } from "./.gen/providers/aws/data-aws-auditmanager-framework"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAuditmanagerFramework(this, "example", { + frameworkType: "Standard", + name: "Essential Eight", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the framework. +* `type` - (Required) Type of framework. Valid values are `custom` and `standard`. + +## Attribute Reference + +See the [`awsAuditmanagerFramework` resource](/docs/providers/aws/r/auditmanager_framework.html) for details on the returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/autoscaling_group.html.markdown b/website/docs/cdktf/typescript/d/autoscaling_group.html.markdown new file mode 100644 index 00000000000..73bdaca9507 --- /dev/null +++ b/website/docs/cdktf/typescript/d/autoscaling_group.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_group" +description: |- + Get information on an Amazon EC2 Autoscaling Group. +--- + + + +# Data Source: aws_autoscaling_group + +Use this data source to get information on an existing autoscaling group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAutoscalingGroup } from "./.gen/providers/aws/data-aws-autoscaling-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAutoscalingGroup(this, "foo", { + name: "foo", + }); + } +} + +``` + +## Argument Reference + +* `name` - Specify the exact name of the desired autoscaling group. + +## Attribute Reference + +~> **NOTE:** Some values are not always set and may not be available for +interpolation. + +* `arn` - ARN of the Auto Scaling group. +* `availabilityZones` - One or more Availability Zones for the group. +* `defaultCoolDown` - Amount of time, in seconds, after a scaling activity completes before another scaling activity can start. +* `desiredCapacity` - Desired size of the group. +* `desiredCapacityType` - The unit of measurement for the value returned for `desiredCapacity`. +* `enabledMetrics` - List of metrics enabled for collection. +* `healthCheckGracePeriod` - The amount of time, in seconds, that Amazon EC2 Auto Scaling waits before checking the health status of an EC2 instance that has come into service. +* `healthCheckType` - Service to use for the health checks. The valid values are EC2 and ELB. +* `id` - Name of the Auto Scaling Group. +* `launchConfiguration` - The name of the associated launch configuration. +* `launchTemplate` - List of launch templates for the group. + * `id` - ID of the launch template. + * `name` - Name of the launch template. + * `version` - Template version. +* `loadBalancers` - One or more load balancers associated with the group. +* `maxInstanceLifetime` - Maximum amount of time, in seconds, that an instance can be in service. +* `maxSize` - Maximum size of the group. +* `minSize` - Minimum size of the group. +* `mixedInstancesPolicy` - List of mixed instances policy objects for the group. + * `instancesDistribution` - List of instances distribution objects. + * `onDemandAllocationStrategy` - Strategy used when launching on-demand instances. + * `onDemandBaseCapacity` - Absolute minimum amount of desired capacity that must be fulfilled by on-demand instances. + * `spotAllocationStrategy` - Strategy used when launching Spot instances. + * `spotInstancePools` - Number of Spot pools per availability zone to allocate capacity. + * `spotMaxPrice` - Maximum price per unit hour that the user is willing to pay for the Spot instances. + * `launchTemplate` - List of launch templates along with the overrides. + * `launchTemplateSpecification` - List of launch template specification objects. + * `launchTemplateId` - ID of the launch template. + * `launchTemplateName` - Name of the launch template. + * `version` - Template version. + * `override` - List of properties overriding the same properties in the launch template. + * `instanceRequirements` - List of instance requirements objects. + * `accelerator_count - List of objects describing the minimum and maximum number of accelerators for an instance type. + * `min` - Minimum. + * `max` - Maximum. + * `acceleratorManufacturers` - List of accelerator manufacturer names. + * `acceleratorNames` - List of accelerator names. + * `acceleratorTotalMemoryMib` - List of objects describing the minimum and maximum total memory of the accelerators. + * `acceleratorTypes` - List of accelerator types. + * `allowedInstanceTypes` - List of instance types to apply the specified attributes against. + * `bareMetal` - Indicates whether bare metal instances are included, excluded, or required. + * `baselineEbsBandwidthMbps` - List of objects describing the minimum and maximum baseline EBS bandwidth (Mbps). + * `min` - Minimum. + * `max` - Maximum. + * `burstablePerformance` - Indicates whether burstable performance instance types are included, excluded, or required. + * `cpuManufacturers` - List of CPU manufacturer names. + * `excludedInstanceTypes` - List of excluded instance types. + * `instanceGenerations` - List of instance generation names. + * `localStorage` - Indicates whether instance types with instance store volumes are included, excluded, or required. + * `localStorageTypes` - List of local storage type names. + * `memoryGibPerVcpu` - List of objects describing the minimum and maximum amount of memory (GiB) per vCPU. + * `min` - Minimum. + * `max` - Maximum. + * `memoryMib` - List of objects describing the minimum and maximum amount of memory (MiB). + * `min` - Minimum. + * `max` - Maximum. + * `networkBandwidthGbps` - List of objects describing the minimum and maximum amount of network bandwidth (Gbps). + * `min` - Minimum. + * `max`- Maximum. + * `networkInterfaceCount` - List of objects describing the minimum and maximum amount of network interfaces. + * `min` - Minimum. + * `max` - Maximum. + * `onDemandMaxPricePercentageOverLowestPrice` - Price protection threshold for On-Demand Instances. + * `requireHibernateSupport` - Indicates whether instance types must support On-Demand Instance Hibernation. + * `spotMaxPricePercentageOverLowestPrice` - Price protection threshold for Spot Instances. + * `totalLocalStorageGb` - List of objects describing the minimum and maximum total storage (GB). + * `min` - Minimum. + * `max` - Maximum. + * `vcpuCount` - List of objects describing the minimum and maximum number of vCPUs. + * `min` - Minimum. + * `max` - Maximum. + * `instanceType` - Overriding instance type. + * `launchTemplateSpecification` - List of overriding launch template specification objects. + * `launchTemplateId` - ID of the launch template. + * `launchTemplateName` - Name of the launch template. + * `version` - Template version. + * `weightedCapacity` - Number of capacity units, which gives the instance type a proportional weight to other instance types. +* `name` - Name of the Auto Scaling Group. +* `placementGroup` - Name of the placement group into which to launch your instances, if any. For more information, see Placement Groups (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/placement-groups.html) in the Amazon Elastic Compute Cloud User Guide. +* `predictedCapacity` - Predicted capacity of the group. +* `serviceLinkedRoleArn` - ARN of the service-linked role that the Auto Scaling group uses to call other AWS services on your behalf. +* `status` - Current state of the group when DeleteAutoScalingGroup is in progress. +* `suspendedProcesses` - List of processes suspended processes for the Auto Scaling Group. +* `tag` - List of tags for the group. + * `key` - Key. + * `value` - Value. + * `propagateAtLaunch` - Whether the tag is propagated to Amazon EC2 instances launched via this ASG. +* `targetGroupArns` - ARNs of the target groups for your load balancer. +* `terminationPolicies` - The termination policies for the group. +* `trafficSource` -Traffic sources. + * `identifier` - Identifies the traffic source. For Application Load Balancers, Gateway Load Balancers, Network Load Balancers, and VPC Lattice, this will be the Amazon Resource Name (ARN) for a target group in this account and Region. For Classic Load Balancers, this will be the name of the Classic Load Balancer in this account and Region. + * `type` - Traffic source type. +* `vpcZoneIdentifier` - VPC ID for the group. +* `warmPool` - List of warm pool configuration objects. + * `instanceReusePolicy` - List of instance reuse policy objects. + * `reuseOnScaleIn` - Indicates whether instances in the Auto Scaling group can be returned to the warm pool on scale in. + * `maxGroupPreparedPolicy` - Total maximum number of instances that are allowed to be in the warm pool or in any state except Terminated for the Auto Scaling group. + * `minSize` - Minimum number of instances to maintain in the warm pool. + * `poolState` - Instance state to transition to after the lifecycle actions are complete. +* `warmPoolSize` - Current size of the warm pool. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/autoscaling_groups.html.markdown b/website/docs/cdktf/typescript/d/autoscaling_groups.html.markdown new file mode 100644 index 00000000000..4b032a78ce0 --- /dev/null +++ b/website/docs/cdktf/typescript/d/autoscaling_groups.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_groups" +description: |- + Provides a list of Autoscaling Groups within a specific region. +--- + + + +# Data Source: aws_autoscaling_groups + +The Autoscaling Groups data source allows access to the list of AWS +ASGs within a specific region. This will allow you to pass a list of AutoScaling Groups to other resources. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingNotification } from "./.gen/providers/aws/autoscaling-notification"; +import { DataAwsAutoscalingGroups } from "./.gen/providers/aws/data-aws-autoscaling-groups"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const groups = new DataAwsAutoscalingGroups(this, "groups", { + filter: [ + { + name: "tag:Team", + values: ["Pets"], + }, + { + name: "tag-key", + values: ["Environment"], + }, + ], + }); + new AutoscalingNotification(this, "slack_notifications", { + groupNames: Token.asList(groups.names), + notifications: [ + "autoscaling:EC2_INSTANCE_LAUNCH", + "autoscaling:EC2_INSTANCE_TERMINATE", + "autoscaling:EC2_INSTANCE_LAUNCH_ERROR", + "autoscaling:EC2_INSTANCE_TERMINATE_ERROR", + ], + topicArn: "TOPIC ARN", + }); + } +} + +``` + +## Argument Reference + +* `names` - (Optional) List of autoscaling group names +* `filter` - (Optional) Filter used to scope the list e.g., by tags. See [related docs](http://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_Filter.html). + * `name` - (Required) Name of the DescribeAutoScalingGroup filter. The recommended values are: `tagKey`, `tagValue`, and `tag:` + * `values` - (Required) Value of the filter. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - List of the Autoscaling Groups Arns in the current region. +* `id` - AWS Region. +* `names` - List of the Autoscaling Groups in the current region. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/availability_zone.html.markdown b/website/docs/cdktf/typescript/d/availability_zone.html.markdown new file mode 100644 index 00000000000..1e0190fa500 --- /dev/null +++ b/website/docs/cdktf/typescript/d/availability_zone.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_availability_zone" +description: |- + Provides details about a specific availability zone +--- + + + +# Data Source: aws_availability_zone + +`awsAvailabilityZone` provides details about a specific availability zone (AZ) +in the current region. + +This can be used both to validate an availability zone given in a variable +and to split the AZ name into its component parts of an AWS region and an +AZ identifier letter. The latter may be useful e.g., for implementing a +consistent subnet numbering scheme across several regions by mapping both +the region and the subnet letter to network numbers. + +This is different from the `awsAvailabilityZones` (plural) data source, +which provides a list of the available zones. + +## Example Usage + +The following example shows how this data source might be used to derive +VPC and subnet CIDR prefixes systematically for an availability zone. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + TerraformVariable, + propertyAccess, + Fn, + Token, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZone } from "./.gen/providers/aws/data-aws-availability-zone"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const azNumber = new TerraformVariable(this, "az_number", { + default: [ + { + a: 1, + b: 2, + c: 3, + d: 4, + e: 5, + f: 6, + }, + ], + }); + const regionNumber = new TerraformVariable(this, "region_number", { + default: [ + { + "ap-northeast-1": 5, + "eu-central-1": 4, + "us-east-1": 1, + "us-west-1": 2, + "us-west-2": 3, + }, + ], + }); + const example = new DataAwsAvailabilityZone(this, "example", { + name: "eu-central-1a", + }); + const awsVpcExample = new Vpc(this, "example_3", { + cidrBlock: Token.asString( + Fn.cidrsubnet( + "10.0.0.0/8", + 4, + Token.asNumber(propertyAccess(regionNumber.value, [example.region])) + ) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpcExample.overrideLogicalId("example"); + const awsSubnetExample = new Subnet(this, "example_4", { + cidrBlock: Token.asString( + Fn.cidrsubnet( + Token.asString(awsVpcExample.cidrBlock), + 4, + Token.asNumber(propertyAccess(azNumber.value, [example.nameSuffix])) + ) + ), + vpcId: Token.asString(awsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSubnetExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +availability zones. The given filters must match exactly one availability +zone whose data will be exported as attributes. + +* `allAvailabilityZones` - (Optional) Set to `true` to include all Availability Zones and Local Zones regardless of your opt in status. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. +* `name` - (Optional) Full name of the availability zone to select. +* `state` - (Optional) Specific availability zone state to require. May be any of `"available"`, `"information"` or `"impaired"`. +* `zoneId` - (Optional) Zone ID of the availability zone to select. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeAvailabilityZones API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeAvailabilityZones.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `groupName` - For Availability Zones, this is the same value as the Region name. For Local Zones, the name of the associated group, for example `usWest2Lax1`. +* `nameSuffix` - Part of the AZ name that appears after the region name, uniquely identifying the AZ within its region. +For Availability Zones this is usually a single letter, for example `a` for the `usWest2A` zone. +For Local and Wavelength Zones this is a longer string, for example `wl1SfoWlz1` for the `usWest2Wl1SfoWlz1` zone. +* `networkBorderGroup` - The name of the location from which the address is advertised. +* `optInStatus` - For Availability Zones, this always has the value of `optInNotRequired`. For Local Zones, this is the opt in status. The possible values are `optedIn` and `notOptedIn`. +* `parentZoneId` - ID of the zone that handles some of the Local Zone or Wavelength Zone control plane operations, such as API calls. +* `parentZoneName` - Name of the zone that handles some of the Local Zone or Wavelength Zone control plane operations, such as API calls. +* `region` - Region where the selected availability zone resides. This is always the region selected on the provider, since this data source searches only within that region. +* `zoneType` - Type of zone. Values are `availabilityZone`, `localZone`, and `wavelengthZone`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/availability_zones.html.markdown b/website/docs/cdktf/typescript/d/availability_zones.html.markdown new file mode 100644 index 00000000000..c9fdf001f05 --- /dev/null +++ b/website/docs/cdktf/typescript/d/availability_zones.html.markdown @@ -0,0 +1,153 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_availability_zones" +description: |- + Provides a list of Availability Zones which can be used by an AWS account. +--- + + + +# Data Source: aws_availability_zones + +The Availability Zones data source allows access to the list of AWS +Availability Zones which can be accessed by an AWS account within the region +configured in the provider. + +This is different from the `awsAvailabilityZone` (singular) data source, +which provides some details about a specific availability zone. + +-> When [Local Zones](https://aws.amazon.com/about-aws/global-infrastructure/localzones/) are enabled in a region, by default the API and this data source include both Local Zones and Availability Zones. To return only Availability Zones, see the example section below. + +## Example Usage + +### By State + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { Subnet } from "./.gen/providers/aws/subnet"; +interface MyConfig { + vpcId: any; + vpcId1: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const available = new DataAwsAvailabilityZones(this, "available", { + state: "available", + }); + new Subnet(this, "primary", { + availabilityZone: Token.asString(propertyAccess(available.names, ["0"])), + vpcId: config.vpcId, + }); + new Subnet(this, "secondary", { + availabilityZone: Token.asString(propertyAccess(available.names, ["1"])), + vpcId: config.vpcId1, + }); + } +} + +``` + +### By Filter + +All Local Zones (regardless of opt-in status): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAvailabilityZones(this, "example", { + allAvailabilityZones: true, + filter: [ + { + name: "opt-in-status", + values: ["not-opted-in", "opted-in"], + }, + ], + }); + } +} + +``` + +Only Availability Zones (no Local Zones): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsAvailabilityZones(this, "example", { + filter: [ + { + name: "opt-in-status", + values: ["opt-in-not-required"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `allAvailabilityZones` - (Optional) Set to `true` to include all Availability Zones and Local Zones regardless of your opt in status. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. +* `excludeNames` - (Optional) List of Availability Zone names to exclude. +* `excludeZoneIds` - (Optional) List of Availability Zone IDs to exclude. +* `state` - (Optional) Allows to filter list of Availability Zones based on their +current state. Can be either `"available"`, `"information"`, `"impaired"` or +`"unavailable"`. By default the list includes a complete set of Availability Zones +to which the underlying AWS account has access, regardless of their state. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeAvailabilityZones API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeAvailabilityZones.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `groupNames` A set of the Availability Zone Group names. For Availability Zones, this is the same value as the Region name. For Local Zones, the name of the associated group, for example `usWest2Lax1`. +* `id` - Region of the Availability Zones. +* `names` - List of the Availability Zone names available to the account. +* `zoneIds` - List of the Availability Zone IDs available to the account. + +Note that the indexes of Availability Zone names and IDs correspond. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/backup_framework.html.markdown b/website/docs/cdktf/typescript/d/backup_framework.html.markdown new file mode 100644 index 00000000000..a4a382e7596 --- /dev/null +++ b/website/docs/cdktf/typescript/d/backup_framework.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_framework" +description: |- + Provides details about an AWS Backup Framework. +--- + + + +# Data Source: aws_backup_framework + +Use this data source to get information on an existing backup framework. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsBackupFramework } from "./.gen/providers/aws/data-aws-backup-framework"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsBackupFramework(this, "example", { + name: "tf_example_backup_framework_name", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Backup framework name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the backup framework. +* `control` - One or more control blocks that make up the framework. Each control in the list has a name, input parameters, and scope. Detailed below. +* `creationTime` - Date and time that a framework is created, in Unix format and Coordinated Universal Time (UTC). +* `deploymentStatus` - Deployment status of a framework. The statuses are: `createInProgress` | `updateInProgress` | `deleteInProgress` | `completed`| `failed`. +* `description` - Description of the framework. +* `id` - ID of the framework. +* `status` - Framework consists of one or more controls. Each control governs a resource, such as backup plans, backup selections, backup vaults, or recovery points. You can also turn AWS Config recording on or off for each resource. The statuses are: `active`, `partiallyActive`, `inactive`, `unavailable`. For more information refer to the [AWS documentation for Framework Status](https://docs.aws.amazon.com/aws-backup/latest/devguide/API_DescribeFramework.html#Backup-DescribeFramework-response-FrameworkStatus) +* `tags` - Metadata that helps organize the frameworks you create. + +### Control Attributes + +`control` has the following attributes: + +* `inputParameter` - One or more input parameter blocks. An example of a control with two parameters is: "backup plan frequency is at least daily and the retention period is at least 1 year". The first parameter is daily. The second parameter is 1 year. Detailed below. +* `name` - Name of a control. +* `scope` - Scope of a control. The control scope defines what the control will evaluate. Three examples of control scopes are: a specific backup plan, all backup plans with a specific tag, or all backup plans. Detailed below. + +### Input Parameter Attributes + +`inputParameter` has the following attributes: + +* `name` - Name of a parameter, for example, BackupPlanFrequency. +* `value` - Value of parameter, for example, hourly. + +### Scope Attributes + +`scope` has the following attributes: + +* `complianceResourceIds` - The ID of the only AWS resource that you want your control scope to contain. +* `complianceResourceTypes` - Describes whether the control scope includes one or more types of resources, such as EFS or RDS. +* `tags` - Tag key-value pair applied to those AWS resources that you want to trigger an evaluation for a rule. A maximum of one key-value pair can be provided. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/backup_plan.html.markdown b/website/docs/cdktf/typescript/d/backup_plan.html.markdown new file mode 100644 index 00000000000..7ffdff2e534 --- /dev/null +++ b/website/docs/cdktf/typescript/d/backup_plan.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_plan" +description: |- + Provides details about an AWS Backup plan. +--- + + + +# Data Source: aws_backup_plan + +Use this data source to get information on an existing backup plan. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsBackupPlan } from "./.gen/providers/aws/data-aws-backup-plan"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsBackupPlan(this, "example", { + planId: "tf_example_backup_plan_id", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `planId` - (Required) Backup plan ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the backup plan. +* `name` - Display name of a backup plan. +* `tags` - Metadata that you can assign to help organize the plans you create. +* `version` - Unique, randomly generated, Unicode, UTF-8 encoded string that serves as the version ID of the backup plan. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/backup_report_plan.html.markdown b/website/docs/cdktf/typescript/d/backup_report_plan.html.markdown new file mode 100644 index 00000000000..85bf8641455 --- /dev/null +++ b/website/docs/cdktf/typescript/d/backup_report_plan.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_report_plan" +description: |- + Provides details about an AWS Backup Report Plan. +--- + + + +# Data Source: aws_backup_report_plan + +Use this data source to get information on an existing backup report plan. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsBackupReportPlan } from "./.gen/providers/aws/data-aws-backup-report-plan"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsBackupReportPlan(this, "example", { + name: "tf_example_backup_report_plan_name", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Backup report plan name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the backup report plan. +* `creationTime` - Date and time that a report plan is created, in Unix format and Coordinated Universal Time (UTC). +* `deploymentStatus` - Deployment status of a report plan. The statuses are: `createInProgress` | `updateInProgress` | `deleteInProgress` | `completed`. +* `description` - Description of the report plan. +* `id` - ID of the report plan. +* `reportDeliveryChannel` - An object that contains information about where and how to deliver your reports, specifically your Amazon S3 bucket name, S3 key prefix, and the formats of your reports. Detailed below. +* `reportSetting` - An object that identifies the report template for the report. Reports are built using a report template. Detailed below. +* `tags` - Metadata that you can assign to help organize the report plans you create. + +### Report Delivery Channel Attributes + +`reportDeliveryChannel` has the following attributes: + +* `formats` - List of the format of your reports: CSV, JSON, or both. +* `s3BucketName` - Unique name of the S3 bucket that receives your reports. +* `s3KeyPrefix` - Prefix for where Backup Audit Manager delivers your reports to Amazon S3. The prefix is this part of the following path: s3://your-bucket-name/prefix/Backup/us-west-2/year/month/day/report-name. + +### Report Setting Attributes + +`reportSetting` has the following attributes: + +* `accounts` - (Optional) Specifies the list of accounts a report covers. +* `frameworkArns` - ARNs of the frameworks a report covers. +* `numberOfFrameworks` - Specifies the number of frameworks a report covers. +* `organizationUnits` - (Optional) Specifies the list of Organizational Units a report covers. +* `regions` - (Optional) Specifies the list of regions a report covers. +* `reportTemplate` - Identifies the report template for the report. Reports are built using a report template. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/backup_selection.html.markdown b/website/docs/cdktf/typescript/d/backup_selection.html.markdown new file mode 100644 index 00000000000..4df5016b500 --- /dev/null +++ b/website/docs/cdktf/typescript/d/backup_selection.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_selection" +description: |- + Provides details about an AWS Backup selection. +--- + + + +# Data Source: aws_backup_selection + +Use this data source to get information on an existing backup selection. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsBackupSelection } from "./.gen/providers/aws/data-aws-backup-selection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsBackupSelection(this, "example", { + planId: Token.asString(dataAwsBackupPlanExample.id), + selectionId: "selection-id-example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `planId` - (Required) Backup plan ID associated with the selection of resources. +* `selectionId` - (Required) Backup selection ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - Display name of a resource selection document. +* `iamRoleArn` - ARN of the IAM role that AWS Backup uses to authenticate when restoring and backing up the target resource. See the [AWS Backup Developer Guide](https://docs.aws.amazon.com/aws-backup/latest/devguide/access-control.html#managed-policies) for additional information about using AWS managed policies or creating custom policies attached to the IAM role. +* `resources` - An array of strings that either contain Amazon Resource Names (ARNs) or match patterns of resources to assign to a backup plan.. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/backup_vault.html.markdown b/website/docs/cdktf/typescript/d/backup_vault.html.markdown new file mode 100644 index 00000000000..d3ee1b917e3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/backup_vault.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_vault" +description: |- + Provides details about an AWS Backup vault. +--- + + + +# Data Source: aws_backup_vault + +Use this data source to get information on an existing backup vault. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsBackupVault } from "./.gen/providers/aws/data-aws-backup-vault"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsBackupVault(this, "example", { + name: "example_backup_vault", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the backup vault. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the vault. +* `kmsKeyArn` - Server-side encryption key that is used to protect your backups. +* `recoveryPoints` - Number of recovery points that are stored in a backup vault. +* `tags` - Metadata that you can assign to help organize the resources that you create. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/batch_compute_environment.html.markdown b/website/docs/cdktf/typescript/d/batch_compute_environment.html.markdown new file mode 100644 index 00000000000..e33316a031b --- /dev/null +++ b/website/docs/cdktf/typescript/d/batch_compute_environment.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_compute_environment" +description: |- + Provides details about a batch compute environment +--- + + + +# Data Source: aws_batch_compute_environment + +The Batch Compute Environment data source allows access to details of a specific +compute environment within AWS Batch. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsBatchComputeEnvironment } from "./.gen/providers/aws/data-aws-batch-compute-environment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsBatchComputeEnvironment(this, "batch-mongo", { + computeEnvironmentName: "batch-mongo-production", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `computeEnvironmentName` - (Required) Name of the Batch Compute Environment + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the compute environment. +* `ecsClusterArn` - ARN of the underlying Amazon ECS cluster used by the compute environment. +* `serviceRole` - ARN of the IAM role that allows AWS Batch to make calls to other AWS services on your behalf. +* `type` - Type of the compute environment (for example, `managed` or `unmanaged`). +* `status` - Current status of the compute environment (for example, `creating` or `valid`). +* `statusReason` - Short, human-readable string to provide additional details about the current status of the compute environment. +* `state` - State of the compute environment (for example, `enabled` or `disabled`). If the state is `enabled`, then the compute environment accepts jobs from a queue and can scale out automatically based on queues. +* `tags` - Key-value map of resource tags + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/batch_job_queue.html.markdown b/website/docs/cdktf/typescript/d/batch_job_queue.html.markdown new file mode 100644 index 00000000000..99ac18ed5c1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/batch_job_queue.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_job_queue" +description: |- + Provides details about a batch job queue +--- + + + +# Data Source: aws_batch_job_queue + +The Batch Job Queue data source allows access to details of a specific +job queue within AWS Batch. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsBatchJobQueue } from "./.gen/providers/aws/data-aws-batch-job-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsBatchJobQueue(this, "test-queue", { + name: "tf-test-batch-job-queue", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the job queue. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the job queue. +* `schedulingPolicyArn` - The ARN of the fair share scheduling policy. If this attribute has a value, the job queue uses a fair share scheduling policy. If this attribute does not have a value, the job queue uses a first in, first out (FIFO) scheduling policy. +* `status` - Current status of the job queue (for example, `creating` or `valid`). +* `statusReason` - Short, human-readable string to provide additional details about the current status + of the job queue. +* `state` - Describes the ability of the queue to accept new jobs (for example, `enabled` or `disabled`). +* `tags` - Key-value map of resource tags +* `priority` - Priority of the job queue. Job queues with a higher priority are evaluated first when + associated with the same compute environment. +* `computeEnvironmentOrder` - The compute environments that are attached to the job queue and the order in + which job placement is preferred. Compute environments are selected for job placement in ascending order. + * `computeEnvironmentOrder.#Order` - The order of the compute environment. + * `computeEnvironmentOrder.#ComputeEnvironment` - The ARN of the compute environment. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/batch_scheduling_policy.html.markdown b/website/docs/cdktf/typescript/d/batch_scheduling_policy.html.markdown new file mode 100644 index 00000000000..17cce9ae60f --- /dev/null +++ b/website/docs/cdktf/typescript/d/batch_scheduling_policy.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_scheduling_policy" +description: |- + Provides details about a Batch Scheduling Policy +--- + + + +# Data Source: aws_batch_scheduling_policy + +The Batch Scheduling Policy data source allows access to details of a specific Scheduling Policy within AWS Batch. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsBatchSchedulingPolicy } from "./.gen/providers/aws/data-aws-batch-scheduling-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsBatchSchedulingPolicy(this, "test", { + arn: "arn:aws:batch:us-east-1:012345678910:scheduling-policy/example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Required) ARN of the scheduling policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `fairsharePolicy` - Fairshare policy block specifies the `computeReservation`, `shareDelaySeconds`, and `shareDistribution` of the scheduling policy. The `fairsharePolicy` block is documented below. +* `name` - Name of the scheduling policy. +* `tags` - Key-value map of resource tags + +A `fairsharePolicy` block supports the following arguments: + +* `computeReservation` - Value used to reserve some of the available maximum vCPU for fair share identifiers that have not yet been used. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). +* `shareDelaySeconds` - Time period to use to calculate a fair share percentage for each fair share identifier in use, in seconds. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). +* `shareDistribution` - One or more share distribution blocks which define the weights for the fair share identifiers for the fair share policy. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). The `shareDistribution` block is documented below. + +A `shareDistribution` block supports the following arguments: + +* `shareIdentifier` - Fair share identifier or fair share identifier prefix. For more information, see [ShareAttributes](https://docs.aws.amazon.com/batch/latest/APIReference/API_ShareAttributes.html). +* `weightFactor` - Weight factor for the fair share identifier. For more information, see [ShareAttributes](https://docs.aws.amazon.com/batch/latest/APIReference/API_ShareAttributes.html). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/billing_service_account.html.markdown b/website/docs/cdktf/typescript/d/billing_service_account.html.markdown new file mode 100644 index 00000000000..e56f7094d83 --- /dev/null +++ b/website/docs/cdktf/typescript/d/billing_service_account.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_billing_service_account" +description: |- + Get AWS Billing Service Account +--- + + + +# Data Source: aws_billing_service_account + +Use this data source to get the Account ID of the [AWS Billing and Cost Management Service Account](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/billing-getting-started.html#step-2) for the purpose of permitting in S3 bucket policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsBillingServiceAccount } from "./.gen/providers/aws/data-aws-billing-service-account"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const billingLogs = new S3Bucket(this, "billing_logs", { + bucket: "my-billing-tf-test-bucket", + }); + new S3BucketAcl(this, "billing_logs_acl", { + acl: "private", + bucket: billingLogs.id, + }); + const main = new DataAwsBillingServiceAccount(this, "main", {}); + const allowBillingLogging = new DataAwsIamPolicyDocument( + this, + "allow_billing_logging", + { + statement: [ + { + actions: ["s3:GetBucketAcl", "s3:GetBucketPolicy"], + effect: "Allow", + principals: [ + { + identifiers: [Token.asString(main.arn)], + type: "AWS", + }, + ], + resources: [billingLogs.arn], + }, + { + actions: ["s3:PutObject"], + effect: "Allow", + principals: [ + { + identifiers: [Token.asString(main.arn)], + type: "AWS", + }, + ], + resources: ["${" + billingLogs.arn + "}/*"], + }, + ], + } + ); + const awsS3BucketPolicyAllowBillingLogging = new S3BucketPolicy( + this, + "allow_billing_logging_4", + { + bucket: billingLogs.id, + policy: Token.asString(allowBillingLogging.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPolicyAllowBillingLogging.overrideLogicalId( + "allow_billing_logging" + ); + } +} + +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS billing service account. +* `arn` - ARN of the AWS billing service account. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/budgets_budget.html.markdown b/website/docs/cdktf/typescript/d/budgets_budget.html.markdown new file mode 100644 index 00000000000..525d50f2fe7 --- /dev/null +++ b/website/docs/cdktf/typescript/d/budgets_budget.html.markdown @@ -0,0 +1,152 @@ +--- +subcategory: "Web Services Budgets" +layout: "aws" +page_title: "AWS: aws_budgets_budget" +description: |- + Terraform data source for managing an AWS Web Services Budgets Budget. +--- + + + +# Data Source: aws_budgets_budget + +Terraform data source for managing an AWS Web Services Budgets Budget. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsBudgetsBudget } from "./.gen/providers/aws/data-aws-budgets-budget"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsBudgetsBudget(this, "test", { + name: Token.asString(awsBudgetsBudgetTest.name), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - The name of a budget. Unique within accounts. + +The following arguments are optional: + +* `accountId` - The ID of the target account for budget. Will use current user's account_id by default if omitted. +* `namePrefix` - The prefix of the name of a budget. Unique within accounts. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `autoAdjustData` - Object containing [AutoAdjustData] which determines the budget amount for an auto-adjusting budget. +* `budgetExceeded` - Boolean indicating whether this budget has been exceeded. +* `budgetLimit` - The total amount of cost, usage, RI utilization, RI coverage, Savings Plans utilization, or Savings Plans coverage that you want to track with your budget. Contains object [Spend](#spend). +* `budgetType` - Whether this budget tracks monetary cost or usage. +* `calculatedSpend` - The spend objects that are associated with this budget. The [actualSpend](#actual-spend) tracks how much you've used, cost, usage, RI units, or Savings Plans units and the [forecastedSpend](#forecasted-spend) tracks how much that you're predicted to spend based on your historical usage profile. +* `costFilter` - A list of [CostFilter](#cost-filter) name/values pair to apply to budget. +* `costTypes` - Object containing [CostTypes](#cost-types) The types of cost included in a budget, such as tax and subscriptions. +* `notification` - Object containing [Budget Notifications](#budget-notification). Can be used multiple times to define more than one budget notification. +* `plannedLimit` - Object containing [Planned Budget Limits](#planned-budget-limits). Can be used multiple times to plan more than one budget limit. See [PlannedBudgetLimits](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_Budget.html#awscostmanagement-Type-budgets_Budget-PlannedBudgetLimits) documentation. +* `timePeriodEnd` - The end of the time period covered by the budget. There are no restrictions on the end date. Format: `2017010112:00`. +* `timePeriodStart` - The start of the time period covered by the budget. If you don't specify a start date, AWS defaults to the start of your chosen time period. The start date must come before the end date. Format: `2017010112:00`. +* `timeUnit` - The length of time until a budget resets the actual and forecasted spend. Valid values: `monthly`, `quarterly`, `annually`, and `daily`. + +### Actual Spend + +The amount of cost, usage, RI units, or Savings Plans units that you used. Type is [Spend](#spend) + +### Auto Adjust Data + +The parameters that determine the budget amount for an auto-adjusting budget. + +* `autoAdjustType` (Required) - The string that defines whether your budget auto-adjusts based on historical or forecasted data. Valid values: `forecast`,`historical`. +* `historicalOptions` (Optional) - Configuration block of [Historical Options](#historical-options). Required for `autoAdjustType` of `historical` Configuration block that defines the historical data that your auto-adjusting budget is based on. +* `lastAutoAdjustTime` (Optional) - The last time that your budget was auto-adjusted. + +### Budget Notification + +Valid keys for `notification` parameter. + +* `comparisonOperator` - (Required) Comparison operator to use to evaluate the condition. Can be `lessThan`, `equalTo` or `greaterThan`. +* `threshold` - (Required) Threshold when the notification should be sent. +* `thresholdType` - (Required) What kind of threshold is defined. Can be `percentage` OR `absoluteValue`. +* `notificationType` - (Required) What kind of budget value to notify on. Can be `actual` or `forecasted`. +* `subscriberEmailAddresses` - (Optional) E-Mail addresses to notify. Either this or `subscriberSnsTopicArns` is required. +* `subscriberSnsTopicArns` - (Optional) SNS topics to notify. Either this or `subscriberEmailAddresses` is required. + +### Cost Filter + +Based on your choice of budget type, you can choose one or more of the available budget filters. + +* `purchaseType` +* `usageTypeGroup` +* `service` +* `operation` +* `usageType` +* `billingEntity` +* `costCategory` +* `linkedAccount` +* `tagKeyValue` +* `legalEntityName` +* `invoicingEntity` +* `az` +* `region` +* `instanceType` + +Refer to [AWS CostFilter documentation](https://docs.aws.amazon.com/cost-management/latest/userguide/budgets-create-filters.html) for further detail. + +### Cost Types + +Valid keys for `costTypes` parameter. + +* `includeCredit` - A boolean value whether to include credits in the cost budget. Defaults to `true`. +* `includeDiscount` - Whether a budget includes discounts. Defaults to `true`. +* `includeOtherSubscription` - A boolean value whether to include other subscription costs in the cost budget. Defaults to `true`. +* `includeRecurring` - A boolean value whether to include recurring costs in the cost budget. Defaults to `true`. +* `includeRefund` - A boolean value whether to include refunds in the cost budget. Defaults to `true`. +* `includeSubscription` - A boolean value whether to include subscriptions in the cost budget. Defaults to `true`. +* `includeSupport` - A boolean value whether to include support costs in the cost budget. Defaults to `true`. +* `includeTax` - A boolean value whether to include tax in the cost budget. Defaults to `true`. +* `includeUpfront` - A boolean value whether to include upfront costs in the cost budget. Defaults to `true`. +* `useAmortized` - Whether a budget uses the amortized rate. Defaults to `false`. +* `useBlended` - A boolean value whether to use blended costs in the cost budget. Defaults to `false`. + +Refer to [AWS CostTypes documentation](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_CostTypes.html) for further detail. + +### Forecasted Spend + +The amount of cost, usage, RI units, or Savings Plans units that you're forecasted to use. +Type is [Spend](#spend) + +### Historical Options + +* `budgetAdjustmentPeriod` (Required) - The number of budget periods included in the moving-average calculation that determines your auto-adjusted budget amount. +* `lookbackAvailablePeriods` (Optional) - The integer that describes how many budget periods in your BudgetAdjustmentPeriod are included in the calculation of your current budget limit. If the first budget period in your BudgetAdjustmentPeriod has no cost data, then that budget period isn’t included in the average that determines your budget limit. You can’t set your own LookBackAvailablePeriods. The value is automatically calculated from the `budgetAdjustmentPeriod` and your historical cost data. + +### Planned Budget Limits + +Valid keys for `plannedLimit` parameter. + +* `amount` - (Required) The amount of cost or usage being measured for a budget. +* `startTime` - (Required) The start time of the budget limit. Format: `2017010112:00`. See [PlannedBudgetLimits](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_Budget.html#awscostmanagement-Type-budgets_Budget-PlannedBudgetLimits) documentation. +* `unit` - (Required) The unit of measurement used for the budget forecast, actual spend, or budget threshold, such as dollars or GB. See [Spend](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/data-type-spend.html) documentation. + +### Spend + +* `amount` - The cost or usage amount that's associated with a budget forecast, actual spend, or budget threshold. Length Constraints: Minimum length of `1`. Maximum length of `2147483647`. +* `unit` - The unit of measurement that's used for the budget forecast, actual spend, or budget threshold, such as USD or GBP. Length Constraints: Minimum length of `1`. Maximum length of `2147483647`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/caller_identity.html.markdown b/website/docs/cdktf/typescript/d/caller_identity.html.markdown new file mode 100644 index 00000000000..78478c0353c --- /dev/null +++ b/website/docs/cdktf/typescript/d/caller_identity.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "STS (Security Token)" +layout: "aws" +page_title: "AWS: aws_caller_identity" +description: |- + Get information about the identity of the caller for the provider + connection to AWS. +--- + + + +# Data Source: aws_caller_identity + +Use this data source to get the access to the effective Account ID, User ID, and ARN in +which Terraform is authorized. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new TerraformOutput(this, "account_id", { + value: current.accountId, + }); + new TerraformOutput(this, "caller_arn", { + value: current.arn, + }); + new TerraformOutput(this, "caller_user", { + value: current.userId, + }); + } +} + +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accountId` - AWS Account ID number of the account that owns or contains the calling entity. +* `arn` - ARN associated with the calling entity. +* `id` - Account ID number of the account that owns or contains the calling entity. +* `userId` - Unique identifier of the calling entity. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/canonical_user_id.html.markdown b/website/docs/cdktf/typescript/d/canonical_user_id.html.markdown new file mode 100644 index 00000000000..ba3ad41afc3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/canonical_user_id.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_canonical_user_id" +description: |- + Provides the canonical user ID for the AWS account associated with the provider + connection to AWS. +--- + + + +# Data Source: aws_canonical_user_id + +The Canonical User ID data source allows access to the [canonical user ID](http://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html) +for the effective account in which Terraform is working. + +~> **NOTE:** To use this data source, you must have the `s3:listAllMyBuckets` permission. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCanonicalUserId } from "./.gen/providers/aws/data-aws-canonical-user-id"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCanonicalUserId(this, "current", {}); + new TerraformOutput(this, "canonical_user_id", { + value: current.id, + }); + } +} + +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Canonical user ID associated with the AWS account. + +* `displayName` - Human-friendly name linked to the canonical user ID. The bucket owner's display name. **NOTE:** [This value](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTServiceGET.html) is only included in the response in the US East (N. Virginia), US West (N. California), US West (Oregon), Asia Pacific (Singapore), Asia Pacific (Sydney), Asia Pacific (Tokyo), EU (Ireland), and South America (São Paulo) regions. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ce_cost_category.html.markdown b/website/docs/cdktf/typescript/d/ce_cost_category.html.markdown new file mode 100644 index 00000000000..a8de3761d86 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ce_cost_category.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_cost_category" +description: |- + Provides details about a specific CostExplorer Cost Category Definition +--- + + + +# Resource: aws_ce_cost_category + +Provides details about a specific CostExplorer Cost Category. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCeCostCategory } from "./.gen/providers/aws/data-aws-ce-cost-category"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCeCostCategory(this, "example", { + costCategoryArn: "costCategoryARN", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `costCategoryArn` - (Required) Unique name for the Cost Category. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cost category. +* `defaultValue` - Default value for the cost category. +* `effectiveEnd` - Effective end data of your Cost Category. +* `effectiveStart` - Effective state data of your Cost Category. +* `id` - Unique ID of the cost category. +* `rule` - Configuration block for the Cost Category rules used to categorize costs. See below. +* `ruleVersion` - Rule schema version in this particular Cost Category. +* `splitChargeRule` - Configuration block for the split charge rules used to allocate your charges between your Cost Category values. See below. +* `tags` - Resource tags. + +### `rule` + +* `inheritedValue` - Configuration block for the value the line item is categorized as if the line item contains the matched dimension. See below. +* `rule` - Configuration block for the `expression` object used to categorize costs. See below. +* `type` - You can define the CostCategoryRule rule type as either `regular` or `inheritedValue`. +* `value` - Default value for the cost category. + +### `inheritedValue` + +* `dimensionKey` - Key to extract cost category values. +* `dimensionName` - Name of the dimension that's used to group costs. If you specify `linkedAccountName`, the cost category value is based on account name. If you specify `tag`, the cost category value will be based on the value of the specified tag key. Valid values are `linkedAccountName`, `tag` + +### `rule` + +* `and` - Return results that match both `dimension` objects. +* `costCategory` - Configuration block for the filter that's based on `costCategory` values. See below. +* `dimension` - Configuration block for the specific `dimension` to use for `expression`. See below. +* `not` - Return results that do not match the `dimension` object. +* `or` - Return results that match either `dimension` object. +* `tags` - Configuration block for the specific `tag` to use for `expression`. See below. + +### `costCategory` + +* `key` - Unique name of the Cost Category. +* `matchOptions` - Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - Specific value of the Cost Category. + +### `dimension` + +* `key` - Unique name of the Cost Category. +* `matchOptions` - Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - Specific value of the Cost Category. + +### `tags` + +* `key` - Key for the tag. +* `matchOptions` - Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - Specific value of the Cost Category. + +### `splitChargeRule` + +* `method` - Method that's used to define how to split your source costs across your targets. Valid values are `fixed`, `proportional`, `even` +* `parameter` - Configuration block for the parameters for a split charge method. This is only required for the `fixed` method. See below. +* `source` - Cost Category value that you want to split. +* `targets` - Cost Category values that you want to split costs across. These values can't be used as a source in other split charge rules. + +### `parameter` + +* `type` - Parameter type. +* `values` - Parameter values. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ce_tags.html.markdown b/website/docs/cdktf/typescript/d/ce_tags.html.markdown new file mode 100644 index 00000000000..1e1f0668d87 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ce_tags.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_tags" +description: |- + Provides details about a specific CE Tags +--- + + + +# Resource: aws_ce_tags + +Provides details about a specific CE Tags. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCeTags } from "./.gen/providers/aws/data-aws-ce-tags"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCeTags(this, "test", { + timePeriod: { + end: "2022-12-01", + start: "2021-01-01", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `timePeriod` - (Required) Configuration block for the start and end dates for retrieving the dimension values. + +The following arguments are optional: + +* `filter` - (Optional) Configuration block for the `expression` object used to categorize costs. See below. +* `searchString` - (Optional) Value that you want to search for. +* `sortBy` - (Optional) Configuration block for the value by which you want to sort the data. See below. +* `tagKey` - (Optional) Key of the tag that you want to return values for. + +### `timePeriod` + +* `start` - (Required) End of the time period. +* `end` - (Required) Beginning of the time period. + +### `filter` + +* `and` - (Optional) Return results that match both `dimension` objects. +* `costCategory` - (Optional) Configuration block for the filter that's based on `costCategory` values. See below. +* `dimension` - (Optional) Configuration block for the specific `dimension` to use for `expression`. See below. +* `not` - (Optional) Return results that match both `dimension` object. +* `or` - (Optional) Return results that match both `dimension` object. +* `tag` - (Optional) Configuration block for the specific `tag` to use for `expression`. See below. + +### `costCategory` + +* `key` - (Optional) Unique name of the Cost Category. +* `matchOptions` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - (Optional) Specific value of the Cost Category. + +### `dimension` + +* `key` - (Optional) Unique name of the Cost Category. +* `matchOptions` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - (Optional) Specific value of the Cost Category. + +### `tag` + +* `key` - (Optional) Key for the tag. +* `matchOptions` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - (Optional) Specific value of the Cost Category. + +### `sortBy` + +* `key` - (Required) key that's used to sort the data. Valid values are: `blendedCost`, `unblendedCost`, `amortizedCost`, `netAmortizedCost`, `netUnblendedCost`, `usageQuantity`, `normalizedUsageAmount`. +* `sortOrder` - (Optional) order that's used to sort the data. Valid values are: `ascending`, `descending`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the tag. +* `tags` - Tags that match your request. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudcontrolapi_resource.html.markdown b/website/docs/cdktf/typescript/d/cloudcontrolapi_resource.html.markdown new file mode 100644 index 00000000000..7407757f700 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudcontrolapi_resource.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Cloud Control API" +layout: "aws" +page_title: "AWS: aws_cloudcontrolapi_resource" +description: |- + Provides details for a Cloud Control API Resource. +--- + + + +# Data Source: aws_cloudcontrolapi_resource + +Provides details for a Cloud Control API Resource. The reading of these resources is proxied through Cloud Control API handlers to the backend service. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudcontrolapiResource } from "./.gen/providers/aws/data-aws-cloudcontrolapi-resource"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudcontrolapiResource(this, "example", { + identifier: "example", + typeName: "AWS::ECS::Cluster", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `identifier` - (Required) Identifier of the CloudFormation resource type. For example, `vpc12345678`. +* `typeName` - (Required) CloudFormation resource type name. For example, `aws::ec2::vpc`. + +The following arguments are optional: + +* `roleArn` - (Optional) ARN of the IAM Role to assume for operations. +* `typeVersionId` - (Optional) Identifier of the CloudFormation resource type version. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `properties` - JSON string matching the CloudFormation resource type schema with current configuration. Underlying attributes can be referenced via the [`jsondecode()` function](https://www.terraform.io/docs/language/functions/jsondecode.html), for example, `jsondecode(dataAwsCloudcontrolapiResourceExampleProperties)["example"]`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudformation_export.html.markdown b/website/docs/cdktf/typescript/d/cloudformation_export.html.markdown new file mode 100644 index 00000000000..12bb902ddb9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudformation_export.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_export" +description: |- + Provides metadata of a CloudFormation Export (e.g., Cross Stack References) +--- + + + +# Data Source: aws_cloudformation_export + +The CloudFormation Export data source allows access to stack +exports specified in the [Output](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/outputs-section-structure.html) section of the Cloudformation Template using the optional Export Property. + + -> Note: If you are trying to use a value from a Cloudformation Stack in the same Terraform run please use normal interpolation or Cloudformation Outputs. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudformationExport } from "./.gen/providers/aws/data-aws-cloudformation-export"; +import { Instance } from "./.gen/providers/aws/instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const subnetId = new DataAwsCloudformationExport(this, "subnet_id", { + name: "mySubnetIdExportName", + }); + new Instance(this, "web", { + ami: "ami-abb07bcb", + instanceType: "t2.micro", + subnetId: Token.asString(subnetId.value), + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the export as it appears in the console or from [list-exports](http://docs.aws.amazon.com/cli/latest/reference/cloudformation/list-exports.html) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `value` - Value from Cloudformation export identified by the export name found from [list-exports](http://docs.aws.amazon.com/cli/latest/reference/cloudformation/list-exports.html) +* `exportingStackId` - ARN of stack that contains the exported output name and value. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudformation_stack.html.markdown b/website/docs/cdktf/typescript/d/cloudformation_stack.html.markdown new file mode 100644 index 00000000000..88f1e9560dd --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudformation_stack.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_stack" +description: |- + Provides metadata of a CloudFormation stack (e.g., outputs) +--- + + + +# Data Source: aws_cloudformation_stack + +The CloudFormation Stack data source allows access to stack +outputs and other useful data including the template body. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudformationStack } from "./.gen/providers/aws/data-aws-cloudformation-stack"; +import { Instance } from "./.gen/providers/aws/instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const network = new DataAwsCloudformationStack(this, "network", { + name: "my-network-stack", + }); + new Instance(this, "web", { + ami: "ami-abb07bcb", + instanceType: "t2.micro", + subnetId: Token.asString(propertyAccess(network.outputs, ['"SubnetId"'])), + tags: { + Name: "HelloWorld", + }, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the stack + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `capabilities` - List of capabilities +* `description` - Description of the stack +* `disableRollback` - Whether the rollback of the stack is disabled when stack creation fails +* `notificationArns` - List of SNS topic ARNs to publish stack related events +* `outputs` - Map of outputs from the stack. +* `parameters` - Map of parameters that specify input parameters for the stack. +* `tags` - Map of tags associated with this stack. +* `templateBody` - Structure containing the template body. +* `iamRoleArn` - ARN of the IAM role used to create the stack. +* `timeoutInMinutes` - Amount of time that can pass before the stack status becomes `createFailed` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudformation_type.html.markdown b/website/docs/cdktf/typescript/d/cloudformation_type.html.markdown new file mode 100644 index 00000000000..321109b9255 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudformation_type.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_type" +description: |- + Provides details about a CloudFormation Type. +--- + + + +# Data Source: aws_cloudformation_type + +Provides details about a CloudFormation Type. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudformationType } from "./.gen/providers/aws/data-aws-cloudformation-type"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudformationType(this, "example", { + type: "RESOURCE", + typeName: "AWS::Athena::WorkGroup", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) ARN of the CloudFormation Type. For example, `arn:aws:cloudformation:usWest2::type/resource/awsEc2Vpc`. +* `type` - (Optional) CloudFormation Registry Type. For example, `resource`. +* `typeName` - (Optional) CloudFormation Type name. For example, `aws::ec2::vpc`. +* `versionId` - (Optional) Identifier of the CloudFormation Type version. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `defaultVersionId` - Identifier of the CloudFormation Type default version. +* `deprecatedStatus` - Deprecation status of the CloudFormation Type. +* `description` - Description of the CloudFormation Type. +* `documentationUrl` - URL of the documentation for the CloudFormation Type. +* `executionRoleArn` - ARN of the IAM Role used to register the CloudFormation Type. +* `isDefaultVersion` - Whether the CloudFormation Type version is the default version. +* `loggingConfig` - List of objects containing logging configuration. + * `logGroupName` - Name of the CloudWatch Log Group where CloudFormation sends error logging information when invoking the type's handlers. + * `logRoleArn` - ARN of the IAM Role CloudFormation assumes when sending error logging information to CloudWatch Logs. +* `provisioningType` - Provisioning behavior of the CloudFormation Type. +* `schema` - JSON document of the CloudFormation Type schema. +* `sourceUrl` - URL of the source code for the CloudFormation Type. +* `visibility` - Scope of the CloudFormation Type. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudfront_cache_policy.html.markdown b/website/docs/cdktf/typescript/d/cloudfront_cache_policy.html.markdown new file mode 100644 index 00000000000..2828ce6df8d --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudfront_cache_policy.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_cache_policy" +description: |- + Use this data source to retrieve information about a CloudFront cache policy. +--- + + + +# Data Source: aws_cloudfront_cache_policy + +Use this data source to retrieve information about a CloudFront cache policy. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontCachePolicy } from "./.gen/providers/aws/data-aws-cloudfront-cache-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudfrontCachePolicy(this, "example", { + name: "example-policy", + }); + } +} + +``` + +### AWS-Managed Policies + +AWS managed cache policy names are prefixed with `managed`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontCachePolicy } from "./.gen/providers/aws/data-aws-cloudfront-cache-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudfrontCachePolicy(this, "example", { + name: "Managed-CachingOptimized", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Optional) Unique name to identify the cache policy. +* `id` - (Optional) Identifier for the cache policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `etag` - Current version of the cache policy. +* `minTtl` - Minimum amount of time, in seconds, that you want objects to stay in the CloudFront cache before CloudFront sends another request to the origin to see if the object has been updated. +* `maxTtl` - Maximum amount of time, in seconds, that objects stay in the CloudFront cache before CloudFront sends another request to the origin to see if the object has been updated. +* `defaultTtl` - Default amount of time, in seconds, that you want objects to stay in the CloudFront cache before CloudFront sends another request to the origin to see if the object has been updated. +* `comment` - Comment to describe the cache policy. +* `parametersInCacheKeyAndForwardedToOrigin` - The HTTP headers, cookies, and URL query strings to include in the cache key. See [Parameters In Cache Key And Forwarded To Origin](#parameters-in-cache-key-and-forwarded-to-origin) for more information. + +### Parameters In Cache Key And Forwarded To Origin + +* `cookiesConfig` - Object that determines whether any cookies in viewer requests (and if so, which cookies) are included in the cache key and automatically included in requests that CloudFront sends to the origin. See [Cookies Config](#cookies-config) for more information. +* `headersConfig` - Object that determines whether any HTTP headers (and if so, which headers) are included in the cache key and automatically included in requests that CloudFront sends to the origin. See [Headers Config](#headers-config) for more information. +* `queryStringsConfig` - Object that determines whether any URL query strings in viewer requests (and if so, which query strings) are included in the cache key and automatically included in requests that CloudFront sends to the origin. See [Query String Config](#query-string-config) for more information. +* `enableAcceptEncodingBrotli` - A flag that can affect whether the Accept-Encoding HTTP header is included in the cache key and included in requests that CloudFront sends to the origin. +* `enableAcceptEncodingGzip` - A flag that can affect whether the Accept-Encoding HTTP header is included in the cache key and included in requests that CloudFront sends to the origin. + +### Cookies Config + +* `cookieBehavior` - Determines whether any cookies in viewer requests are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `allExcept`, `all`. +* `cookies` - Object that contains a list of cookie names. See [Items](#items) for more information. + +### Headers Config + +* `headerBehavior` - Determines whether any HTTP headers are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`. +* `headers` - Object that contains a list of header names. See [Items](#items) for more information. + +### Query String Config + +* `queryStringBehavior` - Determines whether any URL query strings in viewer requests are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `allExcept`, `all`. +* `queryStrings` - Object that contains a list of query string names. See [Items](#items) for more information. + +### Items + +* `items` - List of item names (`cookies`, `headers`, or `queryStrings`). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudfront_distribution.html.markdown b/website/docs/cdktf/typescript/d/cloudfront_distribution.html.markdown new file mode 100644 index 00000000000..16861cf8bb9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudfront_distribution.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_distribution" +description: |- + Provides a CloudFront web distribution data source. +--- + + + +# Data Source: aws_cloudfront_distribution + +Use this data source to retrieve information about a CloudFront distribution. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontDistribution } from "./.gen/providers/aws/data-aws-cloudfront-distribution"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudfrontDistribution(this, "test", { + id: "EDFDVBD632BHDS5", + }); + } +} + +``` + +## Argument Reference + +* `id` - Identifier for the distribution. For example: `edfdvbd632Bhds5`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Identifier for the distribution. For example: `edfdvbd632Bhds5`. + +* `aliases` - List that contains information about CNAMEs (alternate domain names), if any, for this distribution. + +* `arn` - ARN (Amazon Resource Name) for the distribution. For example: arn:aws:cloudfront::123456789012:distribution/EDFDVBD632BHDS5, where 123456789012 is your AWS account ID. + +* `status` - Current status of the distribution. `deployed` if the + distribution's information is fully propagated throughout the Amazon + CloudFront system. + +* `domainName` - Domain name corresponding to the distribution. For + example: `d604721Fxaaqy9CloudfrontNet`. + +* `lastModifiedTime` - Date and time the distribution was last modified. + +* `inProgressValidationBatches` - The number of invalidation batches + currently in progress. + +* `etag` - Current version of the distribution's information. For example: + `e2Qwruhapomqzl`. + +* `hostedZoneId` - CloudFront Route 53 zone ID that can be used to + route an [Alias Resource Record Set][7] to. This attribute is simply an + alias for the zone ID `z2Fdtndataqyw2`. +* `webAclId` AWS WAF web ACL associated with this distribution. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudfront_function.html.markdown b/website/docs/cdktf/typescript/d/cloudfront_function.html.markdown new file mode 100644 index 00000000000..10698556eb0 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudfront_function.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_function" +description: |- + Provides a CloudFront Function data source. +--- + + + +# aws_cloudfront_function + +Provides information about a CloudFront Function. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { VariableType, TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontFunction } from "./.gen/providers/aws/data-aws-cloudfront-function"; +interface MyConfig { + stage: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const functionName = new TerraformVariable(this, "function_name", { + type: VariableType.STRING, + }); + new DataAwsCloudfrontFunction(this, "existing", { + name: functionName.stringValue, + stage: config.stage, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the CloudFront function. +* `stage` - (Required) Function’s stage, either `development` or `live`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN identifying your CloudFront Function. +* `code` - Source code of the function +* `comment` - Comment. +* `etag` - ETag hash of the function +* `lastModifiedTime` - When this resource was last modified. +* `runtime` - Identifier of the function's runtime. +* `status` - Status of the function. Can be `unpublished`, `unassociated` or `associated`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudfront_log_delivery_canonical_user_id.html.markdown b/website/docs/cdktf/typescript/d/cloudfront_log_delivery_canonical_user_id.html.markdown new file mode 100644 index 00000000000..0638d54e986 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudfront_log_delivery_canonical_user_id.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_log_delivery_canonical_user_id" +description: |- + Provides the canonical user ID of the AWS `awslogsdelivery` account for CloudFront bucket logging. +--- + + + +# Data Source: aws_cloudfront_log_delivery_canonical_user_id + +The CloudFront Log Delivery Canonical User ID data source allows access to the [canonical user ID](http://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html) of the AWS `awslogsdelivery` account for CloudFront bucket logging. +See the [Amazon CloudFront Developer Guide](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/AccessLogs.html) for more information. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontLogDeliveryCanonicalUserId } from "./.gen/providers/aws/data-aws-cloudfront-log-delivery-canonical-user-id"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +interface MyConfig { + owner: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const dataAwsCloudfrontLogDeliveryCanonicalUserIdExample = + new DataAwsCloudfrontLogDeliveryCanonicalUserId(this, "example_1", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsCloudfrontLogDeliveryCanonicalUserIdExample.overrideLogicalId( + "example" + ); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_2", { + accessControlPolicy: { + grant: [ + { + grantee: { + id: Token.asString( + dataAwsCloudfrontLogDeliveryCanonicalUserIdExample.id + ), + type: "CanonicalUser", + }, + permission: "FULL_CONTROL", + }, + ], + owner: config.owner, + }, + bucket: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `region` - (Optional) Region you'd like the zone for. By default, fetches the current region. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Canonical user ID for the AWS `awslogsdelivery` account in the region. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudfront_origin_access_identities.html.markdown b/website/docs/cdktf/typescript/d/cloudfront_origin_access_identities.html.markdown new file mode 100644 index 00000000000..2375d263560 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudfront_origin_access_identities.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_access_identities" +description: |- + Use this data source to retrieve information about a set of Amazon CloudFront origin access identities. +--- + + + +# Data Source: aws_cloudfront_origin_access_identities + +Use this data source to get ARNs, ids and S3 canonical user IDs of Amazon CloudFront origin access identities. + +## Example Usage + +### All origin access identities in the account + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontOriginAccessIdentities } from "./.gen/providers/aws/data-aws-cloudfront-origin-access-identities"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudfrontOriginAccessIdentities(this, "example", {}); + } +} + +``` + +### Origin access identities filtered by comment/name + +Origin access identities whose comments are `exampleComment1`, `exampleComment2` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontOriginAccessIdentities } from "./.gen/providers/aws/data-aws-cloudfront-origin-access-identities"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudfrontOriginAccessIdentities(this, "example", { + comments: ["example-comment1", "example-comment2"], + }); + } +} + +``` + +## Argument Reference + +* `comments` (Optional) - Filter origin access identities by comment. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `iamArns` - Set of ARNs of the matched origin access identities. +* `ids` - Set of ids of the matched origin access identities. +* `s3CanonicalUserIds` - Set of S3 canonical user IDs of the matched origin access identities. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudfront_origin_access_identity.html.markdown b/website/docs/cdktf/typescript/d/cloudfront_origin_access_identity.html.markdown new file mode 100644 index 00000000000..70d7e6af8d3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudfront_origin_access_identity.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_access_identity" +description: |- + Use this data source to retrieve information for an Amazon CloudFront origin access identity. +--- + + + +# Data Source: aws_cloudfront_origin_access_identity + +Use this data source to retrieve information for an Amazon CloudFront origin access identity. + +## Example Usage + +The following example below creates a CloudFront origin access identity. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontOriginAccessIdentity } from "./.gen/providers/aws/data-aws-cloudfront-origin-access-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudfrontOriginAccessIdentity(this, "example", { + id: "EDFDVBD632BHDS5", + }); + } +} + +``` + +## Argument Reference + +* `id` (Required) - The identifier for the distribution. For example: `edfdvbd632Bhds5`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `callerReference` - Internal value used by CloudFront to allow future + updates to the origin access identity. +* `cloudfrontAccessIdentityPath` - A shortcut to the full path for the + origin access identity to use in CloudFront, see below. +* `comment` - An optional comment for the origin access identity. +* `etag` - Current version of the origin access identity's information. + For example: `e2Qwruhapomqzl`. +* `iamArn` - Pre-generated ARN for use in S3 bucket policies (see below). + Example: `arn:aws:iam::cloudfront:user/CloudFront Origin Access Identity + E2QWRUHAPOMQZL`. +* `s3CanonicalUserId` - The Amazon S3 canonical user ID for the origin + access identity, which you use when giving the origin access identity read + permission to an object in Amazon S3. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudfront_origin_request_policy.html.markdown b/website/docs/cdktf/typescript/d/cloudfront_origin_request_policy.html.markdown new file mode 100644 index 00000000000..f5943ef2d2e --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudfront_origin_request_policy.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_request_policy" +description: |- + Determines the values that CloudFront includes in requests that it sends to the origin. +--- + + + +# Data Source: aws_cloudfront_origin_request_policy + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontOriginRequestPolicy } from "./.gen/providers/aws/data-aws-cloudfront-origin-request-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudfrontOriginRequestPolicy(this, "example", { + name: "example-policy", + }); + } +} + +``` + +### AWS-Managed Policies + +AWS managed origin request policy names are prefixed with `managed`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontOriginRequestPolicy } from "./.gen/providers/aws/data-aws-cloudfront-origin-request-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudfrontOriginRequestPolicy(this, "ua_referer", { + name: "Managed-UserAgentRefererHeaders", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - Unique name to identify the origin request policy. +* `id` - Identifier for the origin request policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `comment` - Comment to describe the origin request policy. +* `cookiesConfig` - Object that determines whether any cookies in viewer requests (and if so, which cookies) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Cookies Config](#cookies-config) for more information. +* `etag` - Current version of the origin request policy. +* `headersConfig` - Object that determines whether any HTTP headers (and if so, which headers) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Headers Config](#headers-config) for more information. +* `queryStringsConfig` - Object that determines whether any URL query strings in viewer requests (and if so, which query strings) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Query String Config](#query-string-config) for more information. + +### Cookies Config + +`cookieBehavior` - Determines whether any cookies in viewer requests are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist` `all`. +`cookies` - Object that contains a list of cookie names. See [Items](#items) for more information. + +### Headers Config + +`headerBehavior` - Determines whether any HTTP headers are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `allViewer`, `allViewerAndWhitelistCloudFront`. +`headers` - Object that contains a list of header names. See [Items](#items) for more information. + +### Query String Config + +`queryStringBehavior` - Determines whether any URL query strings in viewer requests are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `all`. +`queryStrings` - Object that contains a list of query string names. See [Items](#items) for more information. + +### Items + +`items` - List of item names (cookies, headers, or query strings). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudfront_realtime_log_config.html.markdown b/website/docs/cdktf/typescript/d/cloudfront_realtime_log_config.html.markdown new file mode 100644 index 00000000000..b7ac4b84047 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudfront_realtime_log_config.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_realtime_log_config" +description: |- + Provides a CloudFront real-time log configuration resource. +--- + + + +# Data Source: aws_cloudfront_realtime_log_config + +Provides a CloudFront real-time log configuration resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontRealtimeLogConfig } from "./.gen/providers/aws/data-aws-cloudfront-realtime-log-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudfrontRealtimeLogConfig(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Unique name to identify this real-time log configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN (Amazon Resource Name) of the CloudFront real-time log configuration. +* `endpoint` - (Required) Amazon Kinesis data streams where real-time log data is sent. +* `fields` - (Required) Fields that are included in each real-time log record. See the [AWS documentation](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/real-time-logs.html#understand-real-time-log-config-fields) for supported values. +* `samplingRate` - (Required) Sampling rate for this real-time log configuration. The sampling rate determines the percentage of viewer requests that are represented in the real-time log data. An integer between `1` and `100`, inclusive. + +The `endpoint` object supports the following: + +* `kinesisStreamConfig` - (Required) Amazon Kinesis data stream configuration. +* `streamType` - (Required) Type of data stream where real-time log data is sent. The only valid value is `kinesis`. + +The `kinesisStreamConfig` object supports the following: + +* `roleArn` - (Required) ARN of an [IAM role](iam_role.html) that CloudFront can use to send real-time log data to the Kinesis data stream. +See the [AWS documentation](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/real-time-logs.html#understand-real-time-log-config-iam-role) for more information. +* `streamArn` - (Required) ARN of the [Kinesis data stream](kinesis_stream.html). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudfront_response_headers_policy.html.markdown b/website/docs/cdktf/typescript/d/cloudfront_response_headers_policy.html.markdown new file mode 100644 index 00000000000..eeae6e886b6 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudfront_response_headers_policy.html.markdown @@ -0,0 +1,148 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_response_headers_policy" +description: |- + Use this data source to retrieve information about a CloudFront response headers policy. +--- + + + +# Data Source: aws_cloudfront_response_headers_policy + +Use this data source to retrieve information about a CloudFront cache policy. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontResponseHeadersPolicy } from "./.gen/providers/aws/data-aws-cloudfront-response-headers-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudfrontResponseHeadersPolicy(this, "example", { + name: "example-policy", + }); + } +} + +``` + +### AWS-Managed Policies + +AWS managed response header policy names are prefixed with `managed`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudfrontResponseHeadersPolicy } from "./.gen/providers/aws/data-aws-cloudfront-response-headers-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudfrontResponseHeadersPolicy(this, "example", { + name: "Managed-SimpleCORS", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Optional) Unique name to identify the response headers policy. +* `id` - (Optional) Identifier for the response headers policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `comment` - Comment to describe the response headers policy. The comment cannot be longer than 128 characters. +* `etag` - Current version of the response headers policy. +* `corsConfig` - Configuration for a set of HTTP response headers that are used for Cross-Origin Resource Sharing (CORS). See [Cors Config](#cors-config) for more information. +* `customHeadersConfig` - Object that contains an attribute `items` that contains a list of Custom Headers. See [Custom Header](#custom-header) for more information. +* `removeHeadersConfig` - Object that contains an attribute `items` that contains a list of Remove Headers. See [Remove Header](#remove-header) for more information. +* `securityHeadersConfig` - A configuration for a set of security-related HTTP response headers. See [Security Headers Config](#security-headers-config) for more information. +* `serverTimingHeadersConfig` - (Optional) Configuration for enabling the Server-Timing header in HTTP responses sent from CloudFront. See [Server Timing Headers Config](#server-timing-headers-config) for more information. + +### Cors Config + +* `accessControlAllowCredentials` - A Boolean value that CloudFront uses as the value for the Access-Control-Allow-Credentials HTTP response header. +* `accessControlAllowHeaders` - Object that contains an attribute `items` that contains a list of HTTP header names that CloudFront includes as values for the Access-Control-Allow-Headers HTTP response header. +* `accessControlAllowMethods` - Object that contains an attribute `items` that contains a list of HTTP methods that CloudFront includes as values for the Access-Control-Allow-Methods HTTP response header. Valid values: `get` | `post` | `options` | `put` | `delete` | `head` | `all` +* `accessControlAllowOrigins` - Object that contains an attribute `items` that contains a list of origins that CloudFront can use as the value for the Access-Control-Allow-Origin HTTP response header. +* `accessControlExposeHeaders` - Object that contains an attribute `items` that contains a list of HTTP headers that CloudFront includes as values for the Access-Control-Expose-Headers HTTP response header. +* `accessControlMaxAgeSec` - A number that CloudFront uses as the value for the Access-Control-Max-Age HTTP response header. + +### Custom Header + +* `header` - HTTP response header name. +* `override` - Whether CloudFront overrides a response header with the same name received from the origin with the header specifies here. +* `value` - Value for the HTTP response header. + +### Remove Header + +* `header` - The HTTP header name. + +### Security Headers Config + +* `contentSecurityPolicy` - The policy directives and their values that CloudFront includes as values for the Content-Security-Policy HTTP response header. See [Content Security Policy](#content-security-policy) for more information. +* `contentTypeOptions` - A setting that determines whether CloudFront includes the X-Content-Type-Options HTTP response header with its value set to nosniff. See [Content Type Options](#content-type-options) for more information. +* `frameOptions` - Setting that determines whether CloudFront includes the X-Frame-Options HTTP response header and the header’s value. See [Frame Options](#frame-options) for more information. +* `referrerPolicy` - Setting that determines whether CloudFront includes the Referrer-Policy HTTP response header and the header’s value. See [Referrer Policy](#referrer-policy) for more information. +* `strictTransportSecurity` - Settings that determine whether CloudFront includes the Strict-Transport-Security HTTP response header and the header’s value. See [Strict Transport Security](#strict-transport-security) for more information. +* `xssProtection` - Settings that determine whether CloudFront includes the X-XSS-Protection HTTP response header and the header’s value. See [XSS Protection](#xss-protection) for more information. + +### Content Security Policy + +* `contentSecurityPolicy` - The policy directives and their values that CloudFront includes as values for the Content-Security-Policy HTTP response header. +* `override` - Whether CloudFront overrides the Content-Security-Policy HTTP response header received from the origin with the one specified in this response headers policy. + +### Content Type Options + +* `override` - Whether CloudFront overrides the X-Content-Type-Options HTTP response header received from the origin with the one specified in this response headers policy. + +### Frame Options + +* `frameOption` - Value of the X-Frame-Options HTTP response header. Valid values: `deny` | `sameorigin` +* `override` - Whether CloudFront overrides the X-Frame-Options HTTP response header received from the origin with the one specified in this response headers policy. + +### Referrer Policy + +* `referrerPolicy` - Value of the Referrer-Policy HTTP response header. Valid Values: `noReferrer` | `noReferrerWhenDowngrade` | `origin` | `originWhenCrossOrigin` | `sameOrigin` | `strictOrigin` | `strictOriginWhenCrossOrigin` | `unsafeUrl` +* `override` - Whether CloudFront overrides the Referrer-Policy HTTP response header received from the origin with the one specified in this response headers policy. + +### Strict Transport Security + +* `accessControlMaxAgeSec` - A number that CloudFront uses as the value for the max-age directive in the Strict-Transport-Security HTTP response header. +* `includeSubdomains` - Whether CloudFront includes the includeSubDomains directive in the Strict-Transport-Security HTTP response header. +* `override` - Whether CloudFront overrides the Strict-Transport-Security HTTP response header received from the origin with the one specified in this response headers policy. +* `preload` - Whether CloudFront includes the preload directive in the Strict-Transport-Security HTTP response header. + +### XSS Protection + +* `modeBlock` - Whether CloudFront includes the mode=block directive in the X-XSS-Protection header. +* `override` - Whether CloudFront overrides the X-XSS-Protection HTTP response header received from the origin with the one specified in this response headers policy. +* `protection` - Boolean value that determines the value of the X-XSS-Protection HTTP response header. When this setting is true, the value of the X-XSS-Protection header is 1. When this setting is false, the value of the X-XSS-Protection header is 0. +* `reportUri` - Whether CloudFront sets a reporting URI in the X-XSS-Protection header. + +### Server Timing Headers Config + +* `enabled` - Whether CloudFront adds the `serverTiming` header to HTTP responses that it sends in response to requests that match a cache behavior that's associated with this response headers policy. +* `samplingRate` - Number 0–100 (inclusive) that specifies the percentage of responses that you want CloudFront to add the Server-Timing header to. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudhsm_v2_cluster.html.markdown b/website/docs/cdktf/typescript/d/cloudhsm_v2_cluster.html.markdown new file mode 100644 index 00000000000..f9a4ec415e1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudhsm_v2_cluster.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "CloudHSM" +layout: "aws" +page_title: "AWS: aws_cloudhsm_v2_cluster" +description: |- + Get information on a CloudHSM v2 cluster. +--- + + + +# Data Source: aws_cloudhsm_v2_cluster + +Use this data source to get information about a CloudHSM v2 cluster + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudhsmV2Cluster } from "./.gen/providers/aws/data-aws-cloudhsm-v2-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudhsmV2Cluster(this, "cluster", { + clusterId: "cluster-testclusterid", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `clusterId` - (Required) ID of Cloud HSM v2 cluster. +* `clusterState` - (Optional) State of the cluster to be found. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `vpcId` - ID of the VPC that the CloudHSM cluster resides in. +* `securityGroupId` - ID of the security group associated with the CloudHSM cluster. +* `subnetIds` - IDs of subnets in which cluster operates. +* `clusterCertificates` - The list of cluster certificates. + * `clusterCertificates0ClusterCertificate` - The cluster certificate issued (signed) by the issuing certificate authority (CA) of the cluster's owner. + * `clusterCertificates0ClusterCsr` - The certificate signing request (CSR). Available only in UNINITIALIZED state. + * `clusterCertificates0AwsHardwareCertificate` - The HSM hardware certificate issued (signed) by AWS CloudHSM. + * `clusterCertificates0HsmCertificate` - The HSM certificate issued (signed) by the HSM hardware. + * `clusterCertificates0ManufacturerHardwareCertificate` - The HSM hardware certificate issued (signed) by the hardware manufacturer. +The number of available cluster certificates may vary depending on state of the cluster. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudtrail_service_account.html.markdown b/website/docs/cdktf/typescript/d/cloudtrail_service_account.html.markdown new file mode 100644 index 00000000000..44d0cdb1890 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudtrail_service_account.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "CloudTrail" +layout: "aws" +page_title: "AWS: aws_cloudtrail_service_account" +description: |- + Get AWS CloudTrail Service Account ID for storing trail data in S3. +--- + + + +# Data Source: aws_cloudtrail_service_account + +Use this data source to get the Account ID of the [AWS CloudTrail Service Account](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-supported-regions.html) +in a given region for the purpose of allowing CloudTrail to store trail data in S3. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudtrailServiceAccount } from "./.gen/providers/aws/data-aws-cloudtrail-service-account"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bucket = new S3Bucket(this, "bucket", { + bucket: "tf-cloudtrail-logging-test-bucket", + forceDestroy: true, + }); + const main = new DataAwsCloudtrailServiceAccount(this, "main", {}); + const allowCloudtrailLogging = new DataAwsIamPolicyDocument( + this, + "allow_cloudtrail_logging", + { + statement: [ + { + actions: ["s3:PutObject"], + effect: "Allow", + principals: [ + { + identifiers: [Token.asString(main.arn)], + type: "AWS", + }, + ], + resources: ["${" + bucket.arn + "}/*"], + sid: "Put bucket policy needed for trails", + }, + { + actions: ["s3:GetBucketAcl"], + effect: "Allow", + principals: [ + { + identifiers: [Token.asString(main.arn)], + type: "AWS", + }, + ], + resources: [bucket.arn], + sid: "Get bucket policy needed for trails", + }, + ], + } + ); + const awsS3BucketPolicyAllowCloudtrailLogging = new S3BucketPolicy( + this, + "allow_cloudtrail_logging_3", + { + bucket: bucket.id, + policy: Token.asString(allowCloudtrailLogging.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPolicyAllowCloudtrailLogging.overrideLogicalId( + "allow_cloudtrail_logging" + ); + } +} + +``` + +## Argument Reference + +* `region` - (Optional) Name of the region whose AWS CloudTrail account ID is desired. +Defaults to the region from the AWS provider configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS CloudTrail service account in the selected region. +* `arn` - ARN of the AWS CloudTrail service account in the selected region. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudwatch_event_bus.html.markdown b/website/docs/cdktf/typescript/d/cloudwatch_event_bus.html.markdown new file mode 100644 index 00000000000..56e54fde052 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudwatch_event_bus.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_bus" +description: |- + Get information on an EventBridge (Cloudwatch) Event Bus. +--- + + + +# Data Source: aws_cloudwatch_event_bus + +This data source can be used to fetch information about a specific +EventBridge event bus. Use this data source to compute the ARN of +an event bus, given the name of the bus. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudwatchEventBus } from "./.gen/providers/aws/data-aws-cloudwatch-event-bus"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudwatchEventBus(this, "example", { + name: "example-bus-name", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Friendly EventBridge event bus name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudwatch_event_connection.html.markdown b/website/docs/cdktf/typescript/d/cloudwatch_event_connection.html.markdown new file mode 100644 index 00000000000..c0708742d54 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudwatch_event_connection.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_connection" +description: |- + Provides an EventBridge connection data source. +--- + + + +# Data Source: aws_cloudwatch_event_connection + +Use this data source to retrieve information about an EventBridge connection. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudwatchEventConnection } from "./.gen/providers/aws/data-aws-cloudwatch-event-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudwatchEventConnection(this, "test", { + name: "test", + }); + } +} + +``` + +## Argument Reference + +* `name` - Name of the connection. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - Name of the connection. + +* `arn` - ARN (Amazon Resource Name) for the connection. + +* `secretArn` - ARN (Amazon Resource Name) for the secret created from the authorization parameters specified for the connection. + +* `authorizationType` - Type of authorization to use to connect. One of `apiKey`,`basic`,`oauthClientCredentials`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudwatch_event_source.html.markdown b/website/docs/cdktf/typescript/d/cloudwatch_event_source.html.markdown new file mode 100644 index 00000000000..b8214e450ff --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudwatch_event_source.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_source" +description: |- + Get information on an EventBridge (Cloudwatch) Event Source. +--- + + + +# Data Source: aws_cloudwatch_event_source + +Use this data source to get information about an EventBridge Partner Event Source. This data source will only return one partner event source. An error will be returned if multiple sources match the same name prefix. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudwatchEventSource } from "./.gen/providers/aws/data-aws-cloudwatch-event-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudwatchEventSource(this, "examplepartner", { + namePrefix: "aws.partner/examplepartner.com", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `namePrefix` - (Optional) Specifying this limits the results to only those partner event sources with names that start with the specified prefix + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the partner event source +* `createdBy` - Name of the SaaS partner that created the event source +* `name` - Name of the event source +* `state` - State of the event source (`active` or `pending`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudwatch_log_data_protection_policy_document.html.markdown b/website/docs/cdktf/typescript/d/cloudwatch_log_data_protection_policy_document.html.markdown new file mode 100644 index 00000000000..c0b4726ef5c --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudwatch_log_data_protection_policy_document.html.markdown @@ -0,0 +1,148 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_data_protection_policy_document" +description: |- + Generates a CloudWatch Log Group Data Protection Policy document in JSON format +--- + + + +# Data Source: aws_cloudwatch_log_data_protection_policy_document + +Generates a CloudWatch Log Group Data Protection Policy document in JSON format for use with the `awsCloudwatchLogDataProtectionPolicy` resource. + +-> For more information about data protection policies, see the [Help protect sensitive log data with masking](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/mask-sensitive-log-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogDataProtectionPolicy } from "./.gen/providers/aws/cloudwatch-log-data-protection-policy"; +import { DataAwsCloudwatchLogDataProtectionPolicyDocument } from "./.gen/providers/aws/data-aws-cloudwatch-log-data-protection-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsCloudwatchLogDataProtectionPolicyDocument( + this, + "example", + { + name: "Example", + statement: [ + { + dataIdentifiers: [ + "arn:aws:dataprotection::aws:data-identifier/EmailAddress", + "arn:aws:dataprotection::aws:data-identifier/DriversLicense-US", + ], + operation: { + audit: { + findingsDestination: { + cloudwatchLogs: { + logGroup: audit.name, + }, + firehose: { + deliveryStream: Token.asString( + awsKinesisFirehoseDeliveryStreamAudit.name + ), + }, + s3: { + bucket: Token.asString(awsS3BucketAudit.bucket), + }, + }, + }, + }, + sid: "Audit", + }, + { + dataIdentifiers: [ + "arn:aws:dataprotection::aws:data-identifier/EmailAddress", + "arn:aws:dataprotection::aws:data-identifier/DriversLicense-US", + ], + operation: { + deidentify: { + maskConfig: {}, + }, + }, + sid: "Deidentify", + }, + ], + } + ); + const awsCloudwatchLogDataProtectionPolicyExample = + new CloudwatchLogDataProtectionPolicy(this, "example_1", { + logGroupName: Token.asString(awsCloudwatchLogGroupExample.name), + policyDocument: Token.asString(example.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogDataProtectionPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the data protection policy document. +* `statement` - (Required) Configures the data protection policy. + +-> There must be exactly two statements: the first with an `audit` operation, and the second with a `deidentify` operation. + +The following arguments are optional: + +* `description` - (Optional) +* `version` - (Optional) + +### statement Configuration Block + +* `dataIdentifiers` - (Required) Set of at least 1 sensitive data identifiers that you want to mask. Read more in [Types of data that you can protect](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/protect-sensitive-log-data-types.html). +* `operation` - (Required) Configures the data protection operation applied by this statement. +* `sid` - (Optional) Name of this statement. + +#### operation Configuration Block + +* `audit` - (Optional) Configures the detection of sensitive data. +* `deidentify` - (Optional) Configures the masking of sensitive data. + +-> Every policy statement must specify exactly one operation. + +##### audit Configuration Block + +* `findingsDestination` - (Required) Configures destinations to send audit findings to. + +##### findings_destination Configuration Block + +* `cloudwatchLogs` - (Optional) Configures CloudWatch Logs as a findings destination. +* `firehose` - (Optional) Configures Kinesis Firehose as a findings destination. +* `s3` - (Optional) Configures S3 as a findings destination. + +###### cloudwatch_logs Configuration Block + +* `logGroup` - (Required) Name of the CloudWatch Log Group to send findings to. + +###### firehose Configuration Block + +* `deliveryStream` - (Required) Name of the Kinesis Firehose Delivery Stream to send findings to. + +###### s3 Configuration Block + +* `bucket` - (Required) Name of the S3 Bucket to send findings to. + +##### deidentify Configuration Block + +* `maskConfig` - (Required) An empty object that configures masking. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `json` - Standard JSON policy document rendered based on the arguments above. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudwatch_log_group.html.markdown b/website/docs/cdktf/typescript/d/cloudwatch_log_group.html.markdown new file mode 100644 index 00000000000..2c7009d0b26 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudwatch_log_group.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_group" +description: |- + Get information on a Cloudwatch Log Group. +--- + + + +# Data Source: aws_cloudwatch_log_group + +Use this data source to get information about an AWS Cloudwatch Log Group + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudwatchLogGroup } from "./.gen/providers/aws/data-aws-cloudwatch-log-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudwatchLogGroup(this, "example", { + name: "MyImportantLogs", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the Cloudwatch log group + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Cloudwatch log group. Any `:*` suffix added by the API, denoting all CloudWatch Log Streams under the CloudWatch Log Group, is removed for greater compatibility with other AWS services that do not accept the suffix. +* `creationTime` - Creation time of the log group, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. +* `retentionInDays` - Number of days log events retained in the specified log group. +* `kmsKeyId` - ARN of the KMS Key to use when encrypting log data. +* `tags` - Map of tags to assign to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cloudwatch_log_groups.html.markdown b/website/docs/cdktf/typescript/d/cloudwatch_log_groups.html.markdown new file mode 100644 index 00000000000..049e85bac1f --- /dev/null +++ b/website/docs/cdktf/typescript/d/cloudwatch_log_groups.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_groups" +description: |- + Get list of Cloudwatch Log Groups. +--- + + + +# Data Source: aws_cloudwatch_log_groups + +Use this data source to get a list of AWS Cloudwatch Log Groups + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCloudwatchLogGroups } from "./.gen/providers/aws/data-aws-cloudwatch-log-groups"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCloudwatchLogGroups(this, "example", { + logGroupNamePrefix: "/MyImportantLogs", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `logGroupNamePrefix` - (Optional) Group prefix of the Cloudwatch log groups to list + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the Cloudwatch log groups +* `logGroupNames` - Set of names of the Cloudwatch log groups + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/codeartifact_authorization_token.html.markdown b/website/docs/cdktf/typescript/d/codeartifact_authorization_token.html.markdown new file mode 100644 index 00000000000..040b9c296b1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/codeartifact_authorization_token.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_authorization_token" +description: |- + Provides details about a CodeArtifact Authorization Token +--- + + + +# Data Source: aws_codeartifact_authorization_token + +The CodeArtifact Authorization Token data source generates a temporary authentication token for accessing repositories in a CodeArtifact domain. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCodeartifactAuthorizationToken } from "./.gen/providers/aws/data-aws-codeartifact-authorization-token"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCodeartifactAuthorizationToken(this, "test", { + domain: Token.asString(awsCodeartifactDomainTest.domain), + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `domain` - (Required) Name of the domain that is in scope for the generated authorization token. +* `domainOwner` - (Optional) Account number of the AWS account that owns the domain. +* `durationSeconds` - (Optional) Time, in seconds, that the generated authorization token is valid. Valid values are `0` and between `900` and `43200`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `authorizationToken` - Temporary authorization token. +* `expiration` - Time in UTC RFC3339 format when the authorization token expires. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/codeartifact_repository_endpoint.html.markdown b/website/docs/cdktf/typescript/d/codeartifact_repository_endpoint.html.markdown new file mode 100644 index 00000000000..96a43817cb2 --- /dev/null +++ b/website/docs/cdktf/typescript/d/codeartifact_repository_endpoint.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_repository_endpoint" +description: |- + Provides details about a CodeArtifact Repository Endpoint +--- + + + +# Data Source: aws_codeartifact_repository_endpoint + +The CodeArtifact Repository Endpoint data source returns the endpoint of a repository for a specific package format. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCodeartifactRepositoryEndpoint } from "./.gen/providers/aws/data-aws-codeartifact-repository-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCodeartifactRepositoryEndpoint(this, "test", { + domain: Token.asString(awsCodeartifactDomainTest.domain), + format: "npm", + repository: Token.asString(awsCodeartifactRepositoryTest.repository), + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `domain` - (Required) Name of the domain that contains the repository. +* `repository` - (Required) Name of the repository. +* `format` - (Required) Which endpoint of a repository to return. A repository has one endpoint for each package format: `npm`, `pypi`, `maven`, and `nuget`. +* `domainOwner` - (Optional) Account number of the AWS account that owns the domain. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `repositoryEndpoint` - URL of the returned endpoint. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/codecommit_approval_rule_template.html.markdown b/website/docs/cdktf/typescript/d/codecommit_approval_rule_template.html.markdown new file mode 100644 index 00000000000..15cd446b5c8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/codecommit_approval_rule_template.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_approval_rule_template" +description: |- + Provides details about a specific CodeCommit Approval Rule Template. +--- + + + +# Data Source: aws_codecommit_approval_rule_template + +Provides details about a specific CodeCommit Approval Rule Template. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCodecommitApprovalRuleTemplate } from "./.gen/providers/aws/data-aws-codecommit-approval-rule-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCodecommitApprovalRuleTemplate(this, "example", { + name: "MyExampleApprovalRuleTemplate", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name for the approval rule template. This needs to be less than 100 characters. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `approvalRuleTemplateId` - The ID of the approval rule template. +* `content` - Content of the approval rule template. +* `creationDate` - Date the approval rule template was created, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `description` - Description of the approval rule template. +* `lastModifiedDate` - Date the approval rule template was most recently changed, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `lastModifiedUser` - ARN of the user who made the most recent changes to the approval rule template. +* `ruleContentSha256` - SHA-256 hash signature for the content of the approval rule template. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/codecommit_repository.html.markdown b/website/docs/cdktf/typescript/d/codecommit_repository.html.markdown new file mode 100644 index 00000000000..9b621c25388 --- /dev/null +++ b/website/docs/cdktf/typescript/d/codecommit_repository.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_repository" +description: |- + Provides details about CodeCommit Repository. +--- + + + +# Data Source: aws_codecommit_repository + +The CodeCommit Repository data source allows the ARN, Repository ID, Repository URL for HTTP and Repository URL for SSH to be retrieved for an CodeCommit repository. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCodecommitRepository } from "./.gen/providers/aws/data-aws-codecommit-repository"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCodecommitRepository(this, "test", { + repositoryName: "MyTestRepository", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `repositoryName` - (Required) Name for the repository. This needs to be less than 100 characters. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `repositoryId` - ID of the repository +* `arn` - ARN of the repository +* `cloneUrlHttp` - URL to use for cloning the repository over HTTPS. +* `cloneUrlSsh` - URL to use for cloning the repository over SSH. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/codestarconnections_connection.html.markdown b/website/docs/cdktf/typescript/d/codestarconnections_connection.html.markdown new file mode 100644 index 00000000000..2d02fee1ff3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/codestarconnections_connection.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "CodeStar Connections" +layout: "aws" +page_title: "AWS: aws_codestarconnections_connection" +description: |- + Provides details about CodeStar Connection +--- + + + +# Data Source: aws_codestarconnections_connection + +Provides details about CodeStar Connection. + +## Example Usage + +### By ARN + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCodestarconnectionsConnection } from "./.gen/providers/aws/data-aws-codestarconnections-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCodestarconnectionsConnection(this, "example", { + arn: Token.asString(awsCodestarconnectionsConnectionExample.arn), + }); + } +} + +``` + +### By Name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCodestarconnectionsConnection } from "./.gen/providers/aws/data-aws-codestarconnections-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCodestarconnectionsConnection(this, "example", { + name: Token.asString(awsCodestarconnectionsConnectionExample.name), + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) CodeStar Connection ARN. +* `name` - (Optional) CodeStar Connection name. + +~> **NOTE:** When both `arn` and `name` are specified, `arn` takes precedence. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `connectionStatus` - CodeStar Connection status. Possible values are `pending`, `available` and `error`. +* `id` - CodeStar Connection ARN. +* `hostArn` - ARN of the host associated with the connection. +* `name` - Name of the CodeStar Connection. The name is unique in the calling AWS account. +* `providerType` - Name of the external provider where your third-party code repository is configured. Possible values are `bitbucket` and `gitHub`. For connections to a GitHub Enterprise Server instance, you must create an [aws_codestarconnections_host](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codestarconnections_host) resource and use `hostArn` instead. +* `tags` - Map of key-value resource tags to associate with the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cognito_user_pool_client.markdown b/website/docs/cdktf/typescript/d/cognito_user_pool_client.markdown new file mode 100644 index 00000000000..b8c02db6180 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cognito_user_pool_client.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_client" +description: |- + Provides a Cognito User Pool Client +--- + + + +# Data Source: aws_cognito_user_pool_client + +Provides a Cognito User Pool Client resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCognitoUserPoolClient } from "./.gen/providers/aws/data-aws-cognito-user-pool-client"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCognitoUserPoolClient(this, "client", { + clientId: "38fjsnc484p94kpqsnet7mpld0", + userPoolId: "us-west-2_aaaaaaaaa", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `clientId` - (Required) Client Id of the user pool. +* `userPoolId` - (Required) User pool the client belongs to. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accessTokenValidity` - (Optional) Time limit, between 5 minutes and 1 day, after which the access token is no longer valid and cannot be used. This value will be overridden if you have entered a value in `tokenValidityUnits`. +* `allowedOauthFlowsUserPoolClient` - (Optional) Whether the client is allowed to follow the OAuth protocol when interacting with Cognito user pools. +* `allowedOauthFlows` - (Optional) List of allowed OAuth flows (code, implicit, client_credentials). +* `allowedOauthScopes` - (Optional) List of allowed OAuth scopes (phone, email, openid, profile, and aws.cognito.signin.user.admin). +* `analyticsConfiguration` - (Optional) Configuration block for Amazon Pinpoint analytics for collecting metrics for this user pool. [Detailed below](#analytics_configuration). +* `callbackUrls` - (Optional) List of allowed callback URLs for the identity providers. +* `clientSecret` - Client secret of the user pool client. +* `defaultRedirectUri` - (Optional) Default redirect URI. Must be in the list of callback URLs. +* `enableTokenRevocation` - (Optional) Enables or disables token revocation. +* `explicitAuthFlows` - (Optional) List of authentication flows (ADMIN_NO_SRP_AUTH, CUSTOM_AUTH_FLOW_ONLY, USER_PASSWORD_AUTH, ALLOW_ADMIN_USER_PASSWORD_AUTH, ALLOW_CUSTOM_AUTH, ALLOW_USER_PASSWORD_AUTH, ALLOW_USER_SRP_AUTH, ALLOW_REFRESH_TOKEN_AUTH). +* `generateSecret` - (Optional) Should an application secret be generated. +* `idTokenValidity` - (Optional) Time limit, between 5 minutes and 1 day, after which the ID token is no longer valid and cannot be used. This value will be overridden if you have entered a value in `tokenValidityUnits`. +* `logoutUrls` - (Optional) List of allowed logout URLs for the identity providers. +* `preventUserExistenceErrors` - (Optional) Choose which errors and responses are returned by Cognito APIs during authentication, account confirmation, and password recovery when the user does not exist in the user pool. When set to `enabled` and the user does not exist, authentication returns an error indicating either the username or password was incorrect, and account confirmation and password recovery return a response indicating a code was sent to a simulated destination. When set to `legacy`, those APIs will return a `userNotFoundException` exception if the user does not exist in the user pool. +* `readAttributes` - (Optional) List of user pool attributes the application client can read from. +* `refreshTokenValidity` - (Optional) Time limit in days refresh tokens are valid for. +* `supportedIdentityProviders` - (Optional) List of provider names for the identity providers that are supported on this client. Uses the `providerName` attribute of `awsCognitoIdentityProvider` resource(s), or the equivalent string(s). +* `tokenValidityUnits` - (Optional) Configuration block for units in which the validity times are represented in. [Detailed below](#token_validity_units). +* `writeAttributes` - (Optional) List of user pool attributes the application client can write to. + +### analytics_configuration + +Either `applicationArn` or `applicationId` is required. + +* `applicationArn` - (Optional) Application ARN for an Amazon Pinpoint application. Conflicts with `externalId` and `roleArn`. +* `applicationId` - (Optional) Application ID for an Amazon Pinpoint application. +* `externalId` - (Optional) ID for the Analytics Configuration. Conflicts with `applicationArn`. +* `roleArn` - (Optional) ARN of an IAM role that authorizes Amazon Cognito to publish events to Amazon Pinpoint analytics. Conflicts with `applicationArn`. +* `userDataShared` (Optional) If set to `true`, Amazon Cognito will include user data in the events it publishes to Amazon Pinpoint analytics. + +### token_validity_units + +Valid values for the following arguments are: `seconds`, `minutes`, `hours` or `days`. + +* `accessToken` - (Optional) Time unit in for the value in `accessTokenValidity`, defaults to `hours`. +* `idToken` - (Optional) Time unit in for the value in `idTokenValidity`, defaults to `hours`. +* `refreshToken` - (Optional) Time unit in for the value in `refreshTokenValidity`, defaults to `days`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cognito_user_pool_clients.markdown b/website/docs/cdktf/typescript/d/cognito_user_pool_clients.markdown new file mode 100644 index 00000000000..6ab27b3b812 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cognito_user_pool_clients.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_clients" +description: |- + Get list of cognito user pool clients connected to user pool. +--- + + + +# Data Source: aws_cognito_user_pool_clients + +Use this data source to get a list of Cognito user pools clients for a Cognito IdP user pool. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCognitoUserPoolClients } from "./.gen/providers/aws/data-aws-cognito-user-pool-clients"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCognitoUserPoolClients(this, "main", { + userPoolId: Token.asString(awsCognitoUserPoolMain.id), + }); + } +} + +``` + +## Argument Reference + +* `userPoolId` - (Required) Cognito user pool ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `clientIds` - List of Cognito user pool client IDs. +* `clientNames` - List of Cognito user pool client names. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cognito_user_pool_signing_certificate.markdown b/website/docs/cdktf/typescript/d/cognito_user_pool_signing_certificate.markdown new file mode 100644 index 00000000000..ebea4f6fee0 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cognito_user_pool_signing_certificate.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_signing_certificate" +description: |- + Get signing certificate of user pool +--- + + + +# Data Source: aws_cognito_user_pool_signing_certificate + +Use this data source to get the signing certificate for a Cognito IdP user pool. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCognitoUserPoolSigningCertificate } from "./.gen/providers/aws/data-aws-cognito-user-pool-signing-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCognitoUserPoolSigningCertificate(this, "sc", { + userPoolId: myPool.id, + }); + } +} + +``` + +## Argument Reference + +* `userPoolId` - (Required) Cognito user pool ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `certificate` - Certificate string + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cognito_user_pools.markdown b/website/docs/cdktf/typescript/d/cognito_user_pools.markdown new file mode 100644 index 00000000000..3d582b81d64 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cognito_user_pools.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pools" +description: |- + Get list of cognito user pools. +--- + + + +# Data Source: aws_cognito_user_pools + +Use this data source to get a list of cognito user pools. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayAuthorizer } from "./.gen/providers/aws/api-gateway-authorizer"; +import { DataAwsApiGatewayRestApi } from "./.gen/providers/aws/data-aws-api-gateway-rest-api"; +import { DataAwsCognitoUserPools } from "./.gen/providers/aws/data-aws-cognito-user-pools"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const selected = new DataAwsApiGatewayRestApi(this, "selected", { + name: apiGatewayName.stringValue, + }); + const dataAwsCognitoUserPoolsSelected = new DataAwsCognitoUserPools( + this, + "selected_1", + { + name: cognitoUserPoolName.stringValue, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsCognitoUserPoolsSelected.overrideLogicalId("selected"); + new ApiGatewayAuthorizer(this, "cognito", { + name: "cognito", + providerArns: Token.asList(dataAwsCognitoUserPoolsSelected.arns), + restApiId: Token.asString(selected.id), + type: "COGNITO_USER_POOLS", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the cognito user pools. Name is not a unique attribute for cognito user pool, so multiple pools might be returned with given name. If the pool name is expected to be unique, you can reference the pool id via `tolist(dataAwsCognitoUserPoolsSelectedIds)[0]` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - Set of cognito user pool ids. +* `arns` - Set of cognito user pool Amazon Resource Names (ARNs). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_bot_association.markdown b/website/docs/cdktf/typescript/d/connect_bot_association.markdown new file mode 100644 index 00000000000..7c636afb7ce --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_bot_association.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_bot_association" +description: |- + Provides details about a specific Lex (V1) Bot associated with an Amazon Connect instance +--- + + + +# Data Source: aws_connect_bot_association + +Provides details about a specific Lex (V1) Bot associated with an Amazon Connect instance. + +## Example Usage + +### By name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectBotAssociation } from "./.gen/providers/aws/data-aws-connect-bot-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectBotAssociation(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + lexBot: { + name: "Test", + }, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `instanceId` - (Required) Identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. +* `lexBot` - (Required) Configuration information of an Amazon Lex (V1) bot. Detailed below. + +### lex_bot + +The `lexBot` configuration block supports the following: + +* `name` - (Required) Name of the Amazon Lex (V1) bot. +* `lexRegion` - (Optional) Region that the Amazon Lex (V1) bot was created in. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_contact_flow.html.markdown b/website/docs/cdktf/typescript/d/connect_contact_flow.html.markdown new file mode 100644 index 00000000000..e63d73cae97 --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_contact_flow.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_contact_flow" +description: |- + Provides details about a specific Amazon Connect Contact Flow. +--- + + + +# Data Source: aws_connect_contact_flow + +Provides details about a specific Amazon Connect Contact Flow. + +## Example Usage + +By name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectContactFlow } from "./.gen/providers/aws/data-aws-connect-contact-flow"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectContactFlow(this, "test", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Test", + }); + } +} + +``` + +By contact_flow_id + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectContactFlow } from "./.gen/providers/aws/data-aws-connect-contact-flow"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectContactFlow(this, "test", { + contactFlowId: "cccccccc-bbbb-cccc-dddd-111111111111", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** `instanceId` and one of either `name` or `contactFlowId` is required. + +This argument supports the following arguments: + +* `contactFlowId` - (Optional) Returns information on a specific Contact Flow by contact flow id +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Contact Flow by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Contact Flow. +* `content` - Logic of the Contact Flow. +* `description` - Description of the Contact Flow. +* `tags` - Tags to assign to the Contact Flow. +* `type` - Type of Contact Flow. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_contact_flow_module.html.markdown b/website/docs/cdktf/typescript/d/connect_contact_flow_module.html.markdown new file mode 100644 index 00000000000..f3a3ffa5f87 --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_contact_flow_module.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_contact_flow_module" +description: |- + Provides details about a specific Amazon Connect Contact Flow Module. +--- + + + +# Data Source: aws_connect_contact_flow_module + +Provides details about a specific Amazon Connect Contact Flow Module. + +## Example Usage + +By `name` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectContactFlowModule } from "./.gen/providers/aws/data-aws-connect-contact-flow-module"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectContactFlowModule(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "example", + }); + } +} + +``` + +By `contactFlowModuleId` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectContactFlowModule } from "./.gen/providers/aws/data-aws-connect-contact-flow-module"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectContactFlowModule(this, "example", { + contactFlowModuleId: "cccccccc-bbbb-cccc-dddd-111111111111", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** `instanceId` and one of either `name` or `contactFlowModuleId` is required. + +This argument supports the following arguments: + +* `contactFlowModuleId` - (Optional) Returns information on a specific Contact Flow Module by contact flow module id +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Contact Flow Module by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Contact Flow Module. +* `content` - Logic of the Contact Flow Module. +* `description` - Description of the Contact Flow Module. +* `tags` - Map of tags to assign to the Contact Flow Module. +* `state` - Type of Contact Flow Module Module. Values are either `active` or `archived`. +* `status` - Status of the Contact Flow Module Module. Values are either `published` or `saved`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_hours_of_operation.html.markdown b/website/docs/cdktf/typescript/d/connect_hours_of_operation.html.markdown new file mode 100644 index 00000000000..c76175f9d81 --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_hours_of_operation.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_hours_of_operation" +description: |- + Provides details about a specific Amazon Connect Hours of Operation. +--- + + + +# Data Source: aws_connect_hours_of_operation + +Provides details about a specific Amazon Connect Hours of Operation. + +## Example Usage + +By `name` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectHoursOfOperation } from "./.gen/providers/aws/data-aws-connect-hours-of-operation"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectHoursOfOperation(this, "test", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Test", + }); + } +} + +``` + +By `hoursOfOperationId` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectHoursOfOperation } from "./.gen/providers/aws/data-aws-connect-hours-of-operation"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectHoursOfOperation(this, "test", { + hoursOfOperationId: "cccccccc-bbbb-cccc-dddd-111111111111", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** `instanceId` and one of either `name` or `hoursOfOperationId` is required. + +This argument supports the following arguments: + +* `hoursOfOperationId` - (Optional) Returns information on a specific Hours of Operation by hours of operation id +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Hours of Operation by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Hours of Operation. +* `config` - Configuration information for the hours of operation: day, start time, and end time . Config blocks are documented below. Config blocks are documented below. +* `description` - Description of the Hours of Operation. +* `hoursOfOperationId` - The identifier for the hours of operation. +* `instanceId` - Identifier of the hosting Amazon Connect Instance. +* `name` - Name of the Hours of Operation. +* `tags` - Map of tags to assign to the Hours of Operation. +* `timeZone` - Time zone of the Hours of Operation. + +A `config` block supports the following arguments: + +* `day` - Day that the hours of operation applies to. +* `endTime` - End time block specifies the time that your contact center closes. The `endTime` is documented below. +* `startTime` - Start time block specifies the time that your contact center opens. The `startTime` is documented below. + +A `endTime` block supports the following arguments: + +* `hours` - Hour of closing. +* `minutes` - Minute of closing. + +A `startTime` block supports the following arguments: + +* `hours` - Hour of opening. +* `minutes` - Minute of opening. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_instance.html.markdown b/website/docs/cdktf/typescript/d/connect_instance.html.markdown new file mode 100644 index 00000000000..33f8d10509c --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_instance.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_instance" +description: |- + Provides details about a specific Connect Instance. +--- + + + +# Data Source: aws_connect_instance + +Provides details about a specific Amazon Connect Instance. + +## Example Usage + +By instance_alias + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectInstance } from "./.gen/providers/aws/data-aws-connect-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectInstance(this, "foo", { + instanceAlias: "foo", + }); + } +} + +``` + +By instance_id + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectInstance } from "./.gen/providers/aws/data-aws-connect-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectInstance(this, "foo", { + instanceId: "97afc98d-101a-ba98-ab97-ae114fc115ec", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** One of either `instanceId` or `instanceAlias` is required. + +This argument supports the following arguments: + +* `instanceId` - (Optional) Returns information on a specific connect instance by id + +* `instanceAlias` - (Optional) Returns information on a specific connect instance by alias + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `createdTime` - When the instance was created. +* `arn` - ARN of the instance. +* `identityManagementType` - Specifies The identity management type attached to the instance. +* `inboundCallsEnabled` - Whether inbound calls are enabled. +* `outboundCallsEnabled` - Whether outbound calls are enabled. +* `earlyMediaEnabled` - Whether early media for outbound calls is enabled . +* `contactFlowLogsEnabled` - Whether contact flow logs are enabled. +* `contactLensEnabled` - Whether contact lens is enabled. +* `autoResolveBestVoices` - Whether auto resolve best voices is enabled. +* `multiPartyConferenceEnabled` - Whether multi-party calls/conference is enabled. +* `useCustomTtsVoices` - Whether use custom tts voices is enabled. +* `status` - State of the instance. +* `serviceRole` - Service role of the instance. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_instance_storage_config.html.markdown b/website/docs/cdktf/typescript/d/connect_instance_storage_config.html.markdown new file mode 100644 index 00000000000..953ee085e51 --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_instance_storage_config.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_instance_storage_config" +description: |- + Provides details about a specific Amazon Connect Instance Storage Config. +--- + + + +# Data Source: aws_connect_instance_storage_config + +Provides details about a specific Amazon Connect Instance Storage Config. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectInstanceStorageConfig } from "./.gen/providers/aws/data-aws-connect-instance-storage-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectInstanceStorageConfig(this, "example", { + associationId: + "1234567890123456789012345678901234567890123456789012345678901234", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + resourceType: "CONTACT_TRACE_RECORDS", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `associationId` - (Required) The existing association identifier that uniquely identifies the resource type and storage config for the given instance ID. +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance +* `resourceType` - (Required) A valid resource type. Valid Values: `chatTranscripts` | `callRecordings` | `scheduledReports` | `mediaStreams` | `contactTraceRecords` | `agentEvents` | `realTimeContactAnalysisSegments`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the hosting Amazon Connect Instance, `associationId`, and `resourceType` separated by a colon (`:`). +* `storageConfig` - Specifies the storage configuration options for the Connect Instance. [Documented below](#storage_config). + +### `storageConfig` + +The `storageConfig` configuration block supports the following arguments: + +* `kinesisFirehoseConfig` - A block that specifies the configuration of the Kinesis Firehose delivery stream. [Documented below](#kinesis_firehose_config). +* `kinesisStreamConfig` - A block that specifies the configuration of the Kinesis data stream. [Documented below](#kinesis_stream_config). +* `kinesisVideoStreamConfig` - A block that specifies the configuration of the Kinesis video stream. [Documented below](#kinesis_video_stream_config). +* `s3Config` - A block that specifies the configuration of S3 Bucket. [Documented below](#s3_config). +* `storageType` - A valid storage type. Valid Values: `s3` | `kinesisVideoStream` | `kinesisStream` | `kinesisFirehose`. + +#### `kinesisFirehoseConfig` + +The `kinesisFirehoseConfig` configuration block supports the following arguments: + +* `firehoseArn` - The Amazon Resource Name (ARN) of the delivery stream. + +#### `kinesisStreamConfig` + +The `kinesisStreamConfig` configuration block supports the following arguments: + +* `streamArn` - The Amazon Resource Name (ARN) of the data stream. + +#### `kinesisVideoStreamConfig` + +The `kinesisVideoStreamConfig` configuration block supports the following arguments: + +* `encryptionConfig` - The encryption configuration. [Documented below](#encryption_config). +* `prefix` - The prefix of the video stream. Minimum length of `1`. Maximum length of `128`. When read from the state, the value returned is `Connect-Contact` since the API appends additional details to the `prefix`. +* `retentionPeriodHours` - The number of hours to retain the data in a data store associated with the stream. Minimum value of `0`. Maximum value of `87600`. A value of `0` indicates that the stream does not persist data. + +#### `s3Config` + +The `s3Config` configuration block supports the following arguments: + +* `bucketName` - The S3 bucket name. +* `bucketPrefix` - The S3 bucket prefix. +* `encryptionConfig` - The encryption configuration. [Documented below](#encryption_config). + +#### `encryptionConfig` + +The `encryptionConfig` configuration block supports the following arguments: + +* `encryptionType` - The type of encryption. Valid Values: `kms`. +* `keyId` - The full ARN of the encryption key. Be sure to provide the full ARN of the encryption key, not just the ID. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_lambda_function_association.markdown b/website/docs/cdktf/typescript/d/connect_lambda_function_association.markdown new file mode 100644 index 00000000000..4546ad3bf11 --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_lambda_function_association.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_lambda_function_association" +description: |- + Provides details about a specific Connect Lambda Function Association. +--- + + + +# Data Source: aws_connect_lambda_function_association + +Provides details about a specific Connect Lambda Function Association. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectLambdaFunctionAssociation } from "./.gen/providers/aws/data-aws-connect-lambda-function-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectLambdaFunctionAssociation(this, "example", { + functionArn: "arn:aws:lambda:us-west-2:123456789123:function:abcdefg", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `functionArn` - (Required) ARN of the Lambda Function, omitting any version or alias qualifier. +* `instanceId` - (Required) Identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_prompt.html.markdown b/website/docs/cdktf/typescript/d/connect_prompt.html.markdown new file mode 100644 index 00000000000..957f35c65f5 --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_prompt.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_prompt" +description: |- + Provides details about a specific Amazon Connect Prompt. +--- + + + +# Data Source: aws_connect_prompt + +Provides details about a specific Amazon Connect Prompt. + +## Example Usage + +By `name` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectPrompt } from "./.gen/providers/aws/data-aws-connect-prompt"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectPrompt(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Beep.wav", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Required) Returns information on a specific Prompt by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Prompt. +* `promptId` - Identifier for the prompt. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_queue.markdown b/website/docs/cdktf/typescript/d/connect_queue.markdown new file mode 100644 index 00000000000..e98b7082cea --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_queue.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_queue" +description: |- + Provides details about a specific Amazon Connect Queue. +--- + + + +# Data Source: aws_connect_queue + +Provides details about a specific Amazon Connect Queue. + +## Example Usage + +By `name` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectQueue } from "./.gen/providers/aws/data-aws-connect-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectQueue(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example", + }); + } +} + +``` + +By `queueId` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectQueue } from "./.gen/providers/aws/data-aws-connect-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectQueue(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + queueId: "cccccccc-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** `instanceId` and one of either `name` or `queueId` is required. + +This argument supports the following arguments: + +* `queueId` - (Optional) Returns information on a specific Queue by Queue id +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Queue by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Queue. +* `description` - Description of the Queue. +* `hoursOfOperationId` - Specifies the identifier of the Hours of Operation. +* `id` - Identifier of the hosting Amazon Connect Instance and identifier of the Queue separated by a colon (`:`). +* `maxContacts` - Maximum number of contacts that can be in the queue before it is considered full. Minimum value of 0. +* `outboundCallerConfig` - A block that defines the outbound caller ID name, number, and outbound whisper flow. The Outbound Caller Config block is documented below. +* `queueId` - Identifier for the Queue. +* `status` - Description of the Queue. Values are `enabled` or `disabled`. +* `tags` - Map of tags assigned to the Queue. + +A `outboundCallerConfig` block supports the following arguments: + +* `outboundCallerIdName` - Specifies the caller ID name. +* `outboundCallerIdNumberId` - Specifies the caller ID number. +* `outboundFlowId` - Outbound whisper flow to be used during an outbound call. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_quick_connect.markdown b/website/docs/cdktf/typescript/d/connect_quick_connect.markdown new file mode 100644 index 00000000000..8d7cb972349 --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_quick_connect.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_quick_connect" +description: |- + Provides details about a specific Amazon Connect Quick Connect. +--- + + + +# Data Source: aws_connect_quick_connect + +Provides details about a specific Amazon Connect Quick Connect. + +## Example Usage + +By `name` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectQuickConnect } from "./.gen/providers/aws/data-aws-connect-quick-connect"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectQuickConnect(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example", + }); + } +} + +``` + +By `quickConnectId` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectQuickConnect } from "./.gen/providers/aws/data-aws-connect-quick-connect"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectQuickConnect(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + quickConnectId: "cccccccc-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** `instanceId` and one of either `name` or `quickConnectId` is required. + +This argument supports the following arguments: + +* `quickConnectId` - (Optional) Returns information on a specific Quick Connect by Quick Connect id +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Quick Connect by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Quick Connect. +* `description` - Description of the Quick Connect. +* `id` - Identifier of the hosting Amazon Connect Instance and identifier of the Quick Connect separated by a colon (`:`). +* `quickConnectConfig` - A block that defines the configuration information for the Quick Connect: `quickConnectType` and one of `phoneConfig`, `queueConfig`, `userConfig` . The Quick Connect Config block is documented below. +* `quickConnectId` - Identifier for the Quick Connect. +* `tags` - Map of tags to assign to the Quick Connect. + +A `quickConnectConfig` block contains the following arguments: + +* `quickConnectType` - Configuration type of the Quick Connect. Valid values are `phoneNumber`, `queue`, `user`. +* `phoneConfig` - Phone configuration of the Quick Connect. This is returned only if `quickConnectType` is `phoneNumber`. The `phoneConfig` block is documented below. +* `queueConfig` - Queue configuration of the Quick Connect. This is returned only if `quickConnectType` is `queue`. The `queueConfig` block is documented below. +* `userConfig` - User configuration of the Quick Connect. This is returned only if `quickConnectType` is `user`. The `userConfig` block is documented below. + +A `phoneConfig` block contains the following arguments: + +* `phoneNumber` - Phone number in in E.164 format. + +A `queueConfig` block contains the following arguments: + +* `contactFlowId` - Identifier of the contact flow. +* `queueId` - Identifier for the queue. + +A `userConfig` block contains the following arguments: + +* `contactFlowId` - Identifier of the contact flow. +* `userId` - Identifier for the user. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_routing_profile.html.markdown b/website/docs/cdktf/typescript/d/connect_routing_profile.html.markdown new file mode 100644 index 00000000000..a7bfb1a5474 --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_routing_profile.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_routing_profile" +description: |- + Provides details about a specific Amazon Connect Routing Profile. +--- + + + +# Data Source: aws_connect_routing_profile + +Provides details about a specific Amazon Connect Routing Profile. + +## Example Usage + +By `name` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectRoutingProfile } from "./.gen/providers/aws/data-aws-connect-routing-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectRoutingProfile(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example", + }); + } +} + +``` + +By `routingProfileId` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectRoutingProfile } from "./.gen/providers/aws/data-aws-connect-routing-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectRoutingProfile(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + routingProfileId: "cccccccc-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** `instanceId` and one of either `name` or `routingProfileId` is required. + +This argument supports the following arguments: + +* `instanceId` - Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Routing Profile by name +* `routingProfileId` - (Optional) Returns information on a specific Routing Profile by Routing Profile id + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Routing Profile. +* `defaultOutboundQueueId` - Specifies the default outbound queue for the Routing Profile. +* `description` - Description of the Routing Profile. +* `id` - Identifier of the hosting Amazon Connect Instance and identifier of the Routing Profile separated by a colon (`:`). +* `mediaConcurrencies` - One or more `mediaConcurrencies` blocks that specify the channels that agents can handle in the Contact Control Panel (CCP) for this Routing Profile. The `mediaConcurrencies` block is documented below. +* `queueConfigs` - One or more `queueConfigs` blocks that specify the inbound queues associated with the routing profile. If no queue is added, the agent only can make outbound calls. The `queueConfigs` block is documented below. +* `tags` - Map of tags to assign to the Routing Profile. + +A `mediaConcurrencies` block supports the following attributes: + +* `channel` - Channels that agents can handle in the Contact Control Panel (CCP). Valid values are `voice`, `chat`, `task`. +* `concurrency` - Number of contacts an agent can have on a channel simultaneously. Valid Range for `voice`: Minimum value of 1. Maximum value of 1. Valid Range for `chat`: Minimum value of 1. Maximum value of 10. Valid Range for `task`: Minimum value of 1. Maximum value of 10. + +A `queueConfigs` block supports the following attributes: + +* `channel` - Channels agents can handle in the Contact Control Panel (CCP) for this routing profile. Valid values are `voice`, `chat`, `task`. +* `delay` - Delay, in seconds, that a contact should be in the queue before they are routed to an available agent +* `priority` - Order in which contacts are to be handled for the queue. +* `queueArn` - ARN for the queue. +* `queueId` - Identifier for the queue. +* `queueName` - Name for the queue. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_security_profile.html.markdown b/website/docs/cdktf/typescript/d/connect_security_profile.html.markdown new file mode 100644 index 00000000000..3a7aded2a13 --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_security_profile.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_security_profile" +description: |- + Provides details about a specific Amazon Connect Security Profile. +--- + + + +# Data Source: aws_connect_security_profile + +Provides details about a specific Amazon Connect Security Profile. + +## Example Usage + +By `name` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectSecurityProfile } from "./.gen/providers/aws/data-aws-connect-security-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectSecurityProfile(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example", + }); + } +} + +``` + +By `securityProfileId` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectSecurityProfile } from "./.gen/providers/aws/data-aws-connect-security-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectSecurityProfile(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + securityProfileId: "cccccccc-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** `instanceId` and one of either `name` or `securityProfileId` is required. + +This argument supports the following arguments: + +* `securityProfileId` - (Optional) Returns information on a specific Security Profile by Security Profile id +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Security Profile by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Security Profile. +* `description` - Description of the Security Profile. +* `id` - Identifier of the hosting Amazon Connect Instance and identifier of the Security Profile separated by a colon (`:`). +* `organizationResourceId` - The organization resource identifier for the security profile. +* `permissions` - List of permissions assigned to the security profile. +* `tags` - Map of tags to assign to the Security Profile. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_user.html.markdown b/website/docs/cdktf/typescript/d/connect_user.html.markdown new file mode 100644 index 00000000000..25f30601f9c --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_user.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user" +description: |- + Provides details about a specific Amazon Connect User. +--- + + + +# Data Source: aws_connect_user + +Provides details about a specific Amazon Connect User. + +## Example Usage + +By `name` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectUser } from "./.gen/providers/aws/data-aws-connect-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectUser(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example", + }); + } +} + +``` + +By `userId` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectUser } from "./.gen/providers/aws/data-aws-connect-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectUser(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + userId: "cccccccc-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** `instanceId` and one of either `name` or `userId` is required. + +This argument supports the following arguments: + +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific User by name +* `userId` - (Optional) Returns information on a specific User by User id + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the User. +* `directoryUserId` - The identifier of the user account in the directory used for identity management. +* `hierarchyGroupId` - The identifier of the hierarchy group for the user. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the User separated by a colon (`:`). +* `identityInfo` - A block that contains information about the identity of the user. [Documented below](#identity_info). +* `instanceId` - Specifies the identifier of the hosting Amazon Connect Instance. +* `phoneConfig` - A block that contains information about the phone settings for the user. [Documented below](#phone_config). +* `routingProfileId` - The identifier of the routing profile for the user. +* `securityProfileIds` - A list of identifiers for the security profiles for the user. +* `tags` - A map of tags to assign to the User. + +### `identityInfo` + +An `identityInfo` block supports the following attributes: + +* `email` - The email address. +* `firstName` - The first name. +* `lastName` - The last name. + +### `phoneConfig` + +A `phoneConfig` block supports the following attributes: + +* `afterContactWorkTimeLimit` - The After Call Work (ACW) timeout setting, in seconds. +* `autoAccept` - When Auto-Accept Call is enabled for an available agent, the agent connects to contacts automatically. +* `deskPhoneNumber` - The phone number for the user's desk phone. +* `phoneType` - The phone type. Valid values are `deskPhone` and `softPhone`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_user_hierarchy_group.html.markdown b/website/docs/cdktf/typescript/d/connect_user_hierarchy_group.html.markdown new file mode 100644 index 00000000000..faed21ef62f --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_user_hierarchy_group.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user_hierarchy_group" +description: |- + Provides details about a specific Amazon Connect User Hierarchy Group. +--- + + + +# Data Source: aws_connect_user_hierarchy_group + +Provides details about a specific Amazon Connect User Hierarchy Group. + +## Example Usage + +By `name` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectUserHierarchyGroup } from "./.gen/providers/aws/data-aws-connect-user-hierarchy-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectUserHierarchyGroup(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example", + }); + } +} + +``` + +By `hierarchyGroupId` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectUserHierarchyGroup } from "./.gen/providers/aws/data-aws-connect-user-hierarchy-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectUserHierarchyGroup(this, "example", { + hierarchyGroupId: "cccccccc-bbbb-cccc-dddd-111111111111", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** `instanceId` and one of either `name` or `hierarchyGroupId` is required. + +This argument supports the following arguments: + +* `hierarchyGroupId` - (Optional) Returns information on a specific hierarchy group by hierarchy group id +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific hierarchy group by name + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the hierarchy group. +* `hierarchyPath` - Block that contains information about the levels in the hierarchy group. The `hierarchyPath` block is documented below. +* `levelId` - Identifier of the level in the hierarchy group. +* `id` - Identifier of the hosting Amazon Connect Instance and identifier of the hierarchy group separated by a colon (`:`). +* `tags` - Map of tags to assign to the hierarchy group. + +A `hierarchyPath` block supports the following attributes: + +* `levelOne` - Details of level one. See below. +* `levelTwo` - Details of level two. See below. +* `levelThree` - Details of level three. See below. +* `levelFour` - Details of level four. See below. +* `levelFive` - Details of level five. See below. + +A level block supports the following attributes: + +* `arn` - ARN of the hierarchy group. +* `id` - The identifier of the hierarchy group. +* `name` - Name of the hierarchy group. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_user_hierarchy_structure.html.markdown b/website/docs/cdktf/typescript/d/connect_user_hierarchy_structure.html.markdown new file mode 100644 index 00000000000..c0981f8a590 --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_user_hierarchy_structure.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user_hierarchy_structure" +description: |- + Provides details about a specific Amazon Connect User Hierarchy Structure +--- + + + +# Data Source: aws_connect_user_hierarchy_structure + +Provides details about a specific Amazon Connect User Hierarchy Structure + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectUserHierarchyStructure } from "./.gen/providers/aws/data-aws-connect-user-hierarchy-structure"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectUserHierarchyStructure(this, "test", { + instanceId: Token.asString(awsConnectInstanceTest.id), + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `hierarchyStructure` - Block that defines the hierarchy structure's levels. The `hierarchyStructure` block is documented below. + +A `hierarchyStructure` block supports the following attributes: + +* `levelOne` - Details of level one. See below. +* `levelTwo` - Details of level two. See below. +* `levelThree` - Details of level three. See below. +* `levelFour` - Details of level four. See below. +* `levelFive` - Details of level five. See below. + +Each level block supports the following attributes: + +* `arn` - ARN of the hierarchy level. +* `id` - The identifier of the hierarchy level. +* `name` - Name of the user hierarchy level. Must not be more than 50 characters. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/connect_vocabulary.html.markdown b/website/docs/cdktf/typescript/d/connect_vocabulary.html.markdown new file mode 100644 index 00000000000..b96d1ef5248 --- /dev/null +++ b/website/docs/cdktf/typescript/d/connect_vocabulary.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_vocabulary" +description: |- + Provides details about a specific Amazon Connect Vocabulary. +--- + + + +# Data Source: aws_connect_vocabulary + +Provides details about a specific Amazon Connect Vocabulary. + +## Example Usage + +By `name` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectVocabulary } from "./.gen/providers/aws/data-aws-connect-vocabulary"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectVocabulary(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example", + }); + } +} + +``` + +By `vocabularyId` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsConnectVocabulary } from "./.gen/providers/aws/data-aws-connect-vocabulary"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsConnectVocabulary(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + vocabularyId: "cccccccc-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** `instanceId` and one of either `name` or `vocabularyId` is required. + +This argument supports the following arguments: + +* `instanceId` - (Required) Reference to the hosting Amazon Connect Instance +* `name` - (Optional) Returns information on a specific Vocabulary by name +* `vocabularyId` - (Optional) Returns information on a specific Vocabulary by Vocabulary id + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Vocabulary. +* `content` - The content of the custom vocabulary in plain-text format with a table of values. Each row in the table represents a word or a phrase, described with Phrase, IPA, SoundsLike, and DisplayAs fields. Separate the fields with TAB characters. For more information, see [Create a custom vocabulary using a table](https://docs.aws.amazon.com/transcribe/latest/dg/custom-vocabulary.html#create-vocabulary-table). +* `failureReason` - The reason why the custom vocabulary was not created. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the vocabulary +separated by a colon (`:`). +* `languageCode` - The language code of the vocabulary entries. For a list of languages and their corresponding language codes, see [What is Amazon Transcribe?](https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html). Valid Values are `arAe`, `deCh`, `deDe`, `enAb`, `enAu`, `enGb`, `enIe`, `enIn`, `enUs`, `enWl`, `esEs`, `esUs`, `frCa`, `frFr`, `hiIn`, `itIt`, `jaJp`, `koKr`, `ptBr`, `ptPt`, `zhCn`. +* `lastModifiedTime` - The timestamp when the custom vocabulary was last modified. +* `state` - The current state of the custom vocabulary. Valid values are `creationInProgress`, `active`, `creationFailed`, `deleteInProgress`. +* `tags` - A map of tags to assign to the Vocabulary. +* `vocabularyId` - The identifier of the custom vocabulary. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/controltower_controls.html.markdown b/website/docs/cdktf/typescript/d/controltower_controls.html.markdown new file mode 100644 index 00000000000..75fcd4f35cc --- /dev/null +++ b/website/docs/cdktf/typescript/d/controltower_controls.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "Control Tower" +layout: "aws" +page_title: "AWS: aws_controltower_controls" +description: |- + List of Control Tower controls applied to an OU. +--- + + + +# Data Source: aws_controltower_controls + +List of Control Tower controls applied to an OU. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsControltowerControls } from "./.gen/providers/aws/data-aws-controltower-controls"; +import { DataAwsOrganizationsOrganization } from "./.gen/providers/aws/data-aws-organizations-organization"; +import { DataAwsOrganizationsOrganizationalUnits } from "./.gen/providers/aws/data-aws-organizations-organizational-units"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const thisVar = new DataAwsOrganizationsOrganization(this, "this", {}); + const dataAwsOrganizationsOrganizationalUnitsThis = + new DataAwsOrganizationsOrganizationalUnits(this, "this_1", { + parentId: Token.asString(propertyAccess(thisVar.roots, ["0", "id"])), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsOrganizationsOrganizationalUnitsThis.overrideLogicalId("this"); + const dataAwsControltowerControlsThis = new DataAwsControltowerControls( + this, + "this_2", + { + targetIdentifier: Token.asString( + propertyAccess( + "${[ for x in ${" + + dataAwsOrganizationsOrganizationalUnitsThis.children + + '} : x.arn if x.name == "Security"]}', + ["0"] + ) + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsControltowerControlsThis.overrideLogicalId("this"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `targetIdentifier` - (Required) The ARN of the organizational unit. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `enabledControls` - List of all the ARNs for the controls applied to the `targetIdentifier`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/cur_report_definition.html.markdown b/website/docs/cdktf/typescript/d/cur_report_definition.html.markdown new file mode 100644 index 00000000000..1e96ffa7fe2 --- /dev/null +++ b/website/docs/cdktf/typescript/d/cur_report_definition.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Cost and Usage Report" +layout: "aws" +page_title: "AWS: aws_cur_report_definition" +description: |- + Get information on an AWS Cost and Usage Report Definition. +--- + + + +# Data Source: aws_cur_report_definition + +Use this data source to get information on an AWS Cost and Usage Report Definition. + +~> *NOTE:* The AWS Cost and Usage Report service is only available in `usEast1` currently. + +~> *NOTE:* If AWS Organizations is enabled, only the master account can use this resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCurReportDefinition } from "./.gen/providers/aws/data-aws-cur-report-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsCurReportDefinition(this, "report_definition", { + reportName: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `reportName` - (Required) Name of the report definition to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `timeUnit` - Frequency on which report data are measured and displayed. +* `format` - Preferred compression format for report. +* `compression` - Preferred format for report. +* `additionalSchemaElements` - A list of schema elements. +* `s3Bucket` - Name of customer S3 bucket. +* `s3Prefix` - Preferred report path prefix. +* `s3Region` - Region of customer S3 bucket. +* `additionalArtifacts` - A list of additional artifacts. +* `refreshClosedReports` - If true reports are updated after they have been finalized. +* `reportVersioning` - Overwrite the previous version of each report or to deliver the report in addition to the previous versions. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/customer_gateway.html.markdown b/website/docs/cdktf/typescript/d/customer_gateway.html.markdown new file mode 100644 index 00000000000..351cfe66f11 --- /dev/null +++ b/website/docs/cdktf/typescript/d/customer_gateway.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_customer_gateway" +description: |- + Get an existing AWS Customer Gateway. +--- + + + +# Data Source: aws_customer_gateway + +Get an existing AWS Customer Gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCustomerGateway } from "./.gen/providers/aws/data-aws-customer-gateway"; +import { VpnConnection } from "./.gen/providers/aws/vpn-connection"; +import { VpnGateway } from "./.gen/providers/aws/vpn-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new VpnGateway(this, "main", { + amazonSideAsn: Token.asString(7224), + vpcId: Token.asString(awsVpcMain.id), + }); + const foo = new DataAwsCustomerGateway(this, "foo", { + filter: [ + { + name: "tag:Name", + values: ["foo-prod"], + }, + ], + }); + new VpnConnection(this, "transit", { + customerGatewayId: Token.asString(foo.id), + staticRoutesOnly: false, + type: Token.asString(foo.type), + vpnGatewayId: main.id, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` - (Optional) ID of the gateway. +* `filter` - (Optional) One or more [name-value pairs][dcg-filters] to filter by. + +[dcg-filters]: https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeCustomerGateways.html + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the customer gateway. +* `bgpAsn` - Gateway's Border Gateway Protocol (BGP) Autonomous System Number (ASN). +* `certificateArn` - ARN for the customer gateway certificate. +* `deviceName` - Name for the customer gateway device. +* `ipAddress` - IP address of the gateway's Internet-routable external interface. +* `tags` - Map of key-value pairs assigned to the gateway. +* `type` - Type of customer gateway. The only type AWS supports at this time is "ipsec.1". + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/datapipeline_pipeline.html.markdown b/website/docs/cdktf/typescript/d/datapipeline_pipeline.html.markdown new file mode 100644 index 00000000000..28e65653ae9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/datapipeline_pipeline.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Data Pipeline" +layout: "aws" +page_title: "AWS: aws_datapipeline_pipeline" +description: |- + Provides details about a specific DataPipeline. +--- + + + +# Source: aws_datapipeline_pipeline + +Provides details about a specific DataPipeline Pipeline. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDatapipelinePipeline } from "./.gen/providers/aws/data-aws-datapipeline-pipeline"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDatapipelinePipeline(this, "example", { + pipelineId: "pipelineID", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `pipelineId` - (Required) ID of the pipeline. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - Name of Pipeline. +* `description` - Description of Pipeline. +* `tags` - Map of tags assigned to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/datapipeline_pipeline_definition.html.markdown b/website/docs/cdktf/typescript/d/datapipeline_pipeline_definition.html.markdown new file mode 100644 index 00000000000..1ebda985031 --- /dev/null +++ b/website/docs/cdktf/typescript/d/datapipeline_pipeline_definition.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Data Pipeline" +layout: "aws" +page_title: "AWS: aws_datapipeline_pipeline_definition" +description: |- + Provides details about a specific DataPipeline Definition. +--- + + + +# Source: aws_datapipeline_pipeline_definition + +Provides details about a specific DataPipeline Pipeline Definition. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDatapipelinePipelineDefinition } from "./.gen/providers/aws/data-aws-datapipeline-pipeline-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDatapipelinePipelineDefinition(this, "example", { + pipelineId: "pipelineID", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `pipelineId` - (Required) ID of the pipeline. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `parameterObject` - Parameter objects used in the pipeline definition. See below +* `parameterValue` - Parameter values used in the pipeline definition. See below +* `pipelineObject` - Objects defined in the pipeline. See below + +### `parameterObject` + +* `attributes` - Attributes of the parameter object. See below +* `id` - ID of the parameter object. + +### `attributes` + +* `key` - Field identifier. +* `stringValue` - Field value, expressed as a String. + +### `parameterValue` + +* `id` - ID of the parameter value. +* `stringValue` - Field value, expressed as a String. + +### `pipelineObject` + +* `field` - Key-value pairs that define the properties of the object. See below +* `id` - ID of the object. +* `name` - ARN of the storage connector. + +### `field` + +* `key` - Field identifier. +* `refValue` - Field value, expressed as the identifier of another object +* `stringValue` - Field value, expressed as a String. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/db_cluster_snapshot.html.markdown b/website/docs/cdktf/typescript/d/db_cluster_snapshot.html.markdown new file mode 100644 index 00000000000..9cfc740e6fb --- /dev/null +++ b/website/docs/cdktf/typescript/d/db_cluster_snapshot.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_cluster_snapshot" +description: |- + Get information on a DB Cluster Snapshot. +--- + + + +# Data Source: aws_db_cluster_snapshot + +Use this data source to get information about a DB Cluster Snapshot for use when provisioning DB clusters. + +~> **NOTE:** This data source does not apply to snapshots created on DB Instances. +See the [`awsDbSnapshot` data source](/docs/providers/aws/d/db_snapshot.html) for DB Instance snapshots. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDbClusterSnapshot } from "./.gen/providers/aws/data-aws-db-cluster-snapshot"; +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +import { RdsClusterInstance } from "./.gen/providers/aws/rds-cluster-instance"; +interface MyConfig { + engine: any; + engine1: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const developmentFinalSnapshot = new DataAwsDbClusterSnapshot( + this, + "development_final_snapshot", + { + dbClusterIdentifier: "development_cluster", + mostRecent: true, + } + ); + const aurora = new RdsCluster(this, "aurora", { + clusterIdentifier: "development_cluster", + dbSubnetGroupName: "my_db_subnet_group", + lifecycle: { + ignoreChanges: [snapshotIdentifier], + }, + snapshotIdentifier: Token.asString(developmentFinalSnapshot.id), + engine: config.engine, + }); + const awsRdsClusterInstanceAurora = new RdsClusterInstance( + this, + "aurora_2", + { + clusterIdentifier: aurora.id, + dbSubnetGroupName: "my_db_subnet_group", + instanceClass: "db.t2.small", + engine: config.engine1, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterInstanceAurora.overrideLogicalId("aurora"); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `mostRecent` - (Optional) If more than one result is returned, use the most recent Snapshot. + +* `dbClusterIdentifier` - (Optional) Returns the list of snapshots created by the specific db_cluster + +* `dbClusterSnapshotIdentifier` - (Optional) Returns information on a specific snapshot_id. + +* `snapshotType` - (Optional) Type of snapshots to be returned. If you don't specify a SnapshotType +value, then both automated and manual DB cluster snapshots are returned. Shared and public DB Cluster Snapshots are not +included in the returned results by default. Possible values are, `automated`, `manual`, `shared`, `public` and `awsbackup`. + +* `includeShared` - (Optional) Set this value to true to include shared manual DB Cluster Snapshots from other +AWS accounts that this AWS account has been given permission to copy or restore, otherwise set this value to false. +The default is `false`. + +* `includePublic` - (Optional) Set this value to true to include manual DB Cluster Snapshots that are public and can be +copied or restored by any AWS account, otherwise set this value to false. The default is `false`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `allocatedStorage` - Allocated storage size in gigabytes (GB). +* `availabilityZones` - List of EC2 Availability Zones that instances in the DB cluster snapshot can be restored in. +* `dbClusterIdentifier` - Specifies the DB cluster identifier of the DB cluster that this DB cluster snapshot was created from. +* `dbClusterSnapshotArn` - The ARN for the DB Cluster Snapshot. +* `engineVersion` - Version of the database engine for this DB cluster snapshot. +* `engine` - Name of the database engine. +* `id` - Snapshot ID. +* `kmsKeyId` - If storage_encrypted is true, the AWS KMS key identifier for the encrypted DB cluster snapshot. +* `licenseModel` - License model information for the restored DB cluster. +* `port` - Port that the DB cluster was listening on at the time of the snapshot. +* `snapshotCreateTime` - Time when the snapshot was taken, in Universal Coordinated Time (UTC). +* `sourceDbClusterSnapshotIdentifier` - DB Cluster Snapshot ARN that the DB Cluster Snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `status` - Status of this DB Cluster Snapshot. +* `storageEncrypted` - Whether the DB cluster snapshot is encrypted. +* `vpcId` - VPC ID associated with the DB cluster snapshot. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/db_event_categories.html.markdown b/website/docs/cdktf/typescript/d/db_event_categories.html.markdown new file mode 100644 index 00000000000..08dd6aed603 --- /dev/null +++ b/website/docs/cdktf/typescript/d/db_event_categories.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_event_categories" +description: |- + Provides a list of DB Event Categories which can be used to pass values into DB Event Subscription. +--- + + + +# Data Source: aws_db_event_categories + +## Example Usage + +List the event categories of all the RDS resources. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDbEventCategories } from "./.gen/providers/aws/data-aws-db-event-categories"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsDbEventCategories(this, "example", {}); + const cdktfTerraformOutputExample = new TerraformOutput(this, "example_1", { + value: example.eventCategories, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + cdktfTerraformOutputExample.overrideLogicalId("example"); + } +} + +``` + +List the event categories specific to the RDS resource `dbSnapshot`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDbEventCategories } from "./.gen/providers/aws/data-aws-db-event-categories"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsDbEventCategories(this, "example", { + sourceType: "db-snapshot", + }); + const cdktfTerraformOutputExample = new TerraformOutput(this, "example_1", { + value: example.eventCategories, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + cdktfTerraformOutputExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `sourceType` - (Optional) Type of source that will be generating the events. Valid options are db-instance, db-security-group, db-parameter-group, db-snapshot, db-cluster or db-cluster-snapshot. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `eventCategories` - List of the event categories. +* `id` - Region of the event categories. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/db_instance.html.markdown b/website/docs/cdktf/typescript/d/db_instance.html.markdown new file mode 100644 index 00000000000..ecdd825c404 --- /dev/null +++ b/website/docs/cdktf/typescript/d/db_instance.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_instance" +description: |- + Get information on an RDS Database Instance. +--- + + + +# Data Source: aws_db_instance + +Use this data source to get information about an RDS instance + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDbInstance } from "./.gen/providers/aws/data-aws-db-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDbInstance(this, "database", { + dbInstanceIdentifier: "my-test-database", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `dbInstanceIdentifier` - (Required) Name of the RDS instance + +## Attribute Reference + +~> **NOTE:** The `port` field may be empty while an Aurora cluster is still in the process of being created. This can occur if the cluster was initiated with the [AWS CLI `createDbCluster`](https://docs.aws.amazon.com/cli/latest/reference/rds/create-db-cluster.html) command, but no DB instance has yet been added to it. + +This data source exports the following attributes in addition to the arguments above: + +* `address` - Hostname of the RDS instance. See also `endpoint` and `port`. +* `allocatedStorage` - Allocated storage size specified in gigabytes. +* `autoMinorVersionUpgrade` - Indicates that minor version patches are applied automatically. +* `availabilityZone` - Name of the Availability Zone the DB instance is located in. +* `backupRetentionPeriod` - Specifies the number of days for which automatic DB snapshots are retained. +* `dbClusterIdentifier` - If the DB instance is a member of a DB cluster, contains the name of the DB cluster that the DB instance is a member of. +* `dbInstanceArn` - ARN for the DB instance. +* `dbInstanceClass` - Contains the name of the compute and memory capacity class of the DB instance. +* `dbName` - Contains the name of the initial database of this instance that was provided at create time, if one was specified when the DB instance was created. This same name is returned for the life of the DB instance. +* `dbParameterGroups` - Provides the list of DB parameter groups applied to this DB instance. +* `dbSubnetGroup` - Name of the subnet group associated with the DB instance. +* `dbInstancePort` - Port that the DB instance listens on. +* `enabledCloudwatchLogsExports` - List of log types to export to cloudwatch. +* `endpoint` - Connection endpoint in `address:port` format. +* `engine` - Provides the name of the database engine to be used for this DB instance. +* `engineVersion` - Database engine version. +* `hostedZoneId` - Canonical hosted zone ID of the DB instance (to be used in a Route 53 Alias record). +* `iops` - Provisioned IOPS (I/O operations per second) value. +* `kmsKeyId` - If StorageEncrypted is true, the KMS key identifier for the encrypted DB instance. +* `licenseModel` - License model information for this DB instance. +* `masterUsername` - Contains the master username for the DB instance. +* `masterUserSecret` - Provides the master user secret. Only available when `manageMasterUserPassword` is set to true. [Documented below](#master_user_secret). +* `maxAllocatedStorage` - The upper limit to which Amazon RDS can automatically scale the storage of the DB instance. +* `monitoringInterval` - Interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. +* `monitoringRoleArn` - ARN for the IAM role that permits RDS to send Enhanced Monitoring metrics to CloudWatch Logs. +* `multiAz` - If the DB instance is a Multi-AZ deployment. +* `networkType` - Network type of the DB instance. +* `optionGroupMemberships` - Provides the list of option group memberships for this DB instance. +* `port` - Database endpoint port, primarily used by an Aurora DB cluster. For a conventional RDS DB instance, the `dbInstancePort` is typically the preferred choice. +* `preferredBackupWindow` - Specifies the daily time range during which automated backups are created. +* `preferredMaintenanceWindow` - Specifies the weekly time range during which system maintenance can occur in UTC. +* `publiclyAccessible` - Accessibility options for the DB instance. +* `resourceId` - RDS Resource ID of this instance. +* `storageEncrypted` - Whether the DB instance is encrypted. +* `storageThroughput` - Storage throughput value for the DB instance. +* `storageType` - Storage type associated with DB instance. +* `timezone` - Time zone of the DB instance. +* `vpcSecurityGroups` - Provides a list of VPC security group elements that the DB instance belongs to. +* `replicateSourceDb` - Identifier of the source DB that this is a replica of. +* `caCertIdentifier` - Identifier of the CA certificate for the DB instance. + +### master_user_secret + +The `masterUserSecret` configuration block supports the following attributes: + +* `kmsKeyId` - The Amazon Web Services KMS key identifier that is used to encrypt the secret. +* `secretArn` - The Amazon Resource Name (ARN) of the secret. +* `secretStatus` - The status of the secret. Valid Values: `creating` | `active` | `rotating` | `impaired`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/db_instances.html.markdown b/website/docs/cdktf/typescript/d/db_instances.html.markdown new file mode 100644 index 00000000000..745b455aa2c --- /dev/null +++ b/website/docs/cdktf/typescript/d/db_instances.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_instances" +description: |- + Terraform data source for listing RDS Database Instances. +--- + + + +# Data Source: aws_db_instances + +Terraform data source for listing RDS Database Instances. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDbInstances } from "./.gen/providers/aws/data-aws-db-instances"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDbInstances(this, "example", { + filter: [ + { + name: "db-instance-id", + values: ["my-database-id"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [RDS DescribeDBClusters API Reference](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DescribeDBClusters.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `instanceArns` - ARNs of the matched RDS instances. +* `instanceIdentifiers` - Identifiers of the matched RDS instances. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/db_proxy.html.markdown b/website/docs/cdktf/typescript/d/db_proxy.html.markdown new file mode 100644 index 00000000000..83b6666bd94 --- /dev/null +++ b/website/docs/cdktf/typescript/d/db_proxy.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_proxy" +description: |- + Get information on a DB Proxy. +--- + + + +# Data Source: aws_db_proxy + +Use this data source to get information about a DB Proxy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDbProxy } from "./.gen/providers/aws/data-aws-db-proxy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDbProxy(this, "proxy", { + name: "my-test-db-proxy", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the DB proxy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the DB Proxy. +* `auth` - Configuration(s) with authorization mechanisms to connect to the associated instance or cluster. +* `debugLogging` - Whether the proxy includes detailed information about SQL statements in its logs. +* `endpoint` - Endpoint that you can use to connect to the DB proxy. +* `engineFamily` - Kinds of databases that the proxy can connect to. +* `idleClientTimeout` - Number of seconds a connection to the proxy can have no activity before the proxy drops the client connection. +* `requireTls` - Whether Transport Layer Security (TLS) encryption is required for connections to the proxy. +* `roleArn` - ARN for the IAM role that the proxy uses to access Amazon Secrets Manager. +* `vpcId` - Provides the VPC ID of the DB proxy. +* `vpcSecurityGroupIds` - Provides a list of VPC security groups that the proxy belongs to. +* `vpcSubnetIds` - EC2 subnet IDs for the proxy. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/db_snapshot.html.markdown b/website/docs/cdktf/typescript/d/db_snapshot.html.markdown new file mode 100644 index 00000000000..d3d53b4f74c --- /dev/null +++ b/website/docs/cdktf/typescript/d/db_snapshot.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_snapshot" +description: |- + Get information on a DB Snapshot. +--- + + + +# Data Source: aws_db_snapshot + +Use this data source to get information about a DB Snapshot for use when provisioning DB instances + +~> **NOTE:** This data source does not apply to snapshots created on Aurora DB clusters. +See the [`awsDbClusterSnapshot` data source](/docs/providers/aws/d/db_cluster_snapshot.html) for DB Cluster snapshots. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDbSnapshot } from "./.gen/providers/aws/data-aws-db-snapshot"; +import { DbInstance } from "./.gen/providers/aws/db-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const prod = new DbInstance(this, "prod", { + allocatedStorage: 10, + dbName: "mydb", + dbSubnetGroupName: "my_database_subnet_group", + engine: "mysql", + engineVersion: "5.6.17", + instanceClass: "db.t2.micro", + parameterGroupName: "default.mysql5.6", + password: "bar", + username: "foo", + }); + const latestProdSnapshot = new DataAwsDbSnapshot( + this, + "latest_prod_snapshot", + { + dbInstanceIdentifier: prod.identifier, + mostRecent: true, + } + ); + new DbInstance(this, "dev", { + dbName: "mydbdev", + instanceClass: "db.t2.micro", + lifecycle: { + ignoreChanges: [snapshotIdentifier], + }, + snapshotIdentifier: Token.asString(latestProdSnapshot.id), + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** One of either `dbInstanceIdentifier` or `dbSnapshotIdentifier` is required. + +This argument supports the following arguments: + +* `mostRecent` - (Optional) If more than one result is returned, use the most +recent Snapshot. + +* `dbInstanceIdentifier` - (Optional) Returns the list of snapshots created by the specific db_instance + +* `dbSnapshotIdentifier` - (Optional) Returns information on a specific snapshot_id. + +* `snapshotType` - (Optional) Type of snapshots to be returned. If you don't specify a SnapshotType +value, then both automated and manual snapshots are returned. Shared and public DB snapshots are not +included in the returned results by default. Possible values are, `automated`, `manual`, `shared`, `public` and `awsbackup`. + +* `includeShared` - (Optional) Set this value to true to include shared manual DB snapshots from other +AWS accounts that this AWS account has been given permission to copy or restore, otherwise set this value to false. +The default is `false`. + +* `includePublic` - (Optional) Set this value to true to include manual DB snapshots that are public and can be +copied or restored by any AWS account, otherwise set this value to false. The default is `false`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Snapshot ID. +* `allocatedStorage` - Allocated storage size in gigabytes (GB). +* `availabilityZone` - Name of the Availability Zone the DB instance was located in at the time of the DB snapshot. +* `dbSnapshotArn` - ARN for the DB snapshot. +* `encrypted` - Whether the DB snapshot is encrypted. +* `engine` - Name of the database engine. +* `engineVersion` - Version of the database engine. +* `iops` - Provisioned IOPS (I/O operations per second) value of the DB instance at the time of the snapshot. +* `kmsKeyId` - ARN for the KMS encryption key. +* `licenseModel` - License model information for the restored DB instance. +* `optionGroupName` - Provides the option group name for the DB snapshot. +* `sourceDbSnapshotIdentifier` - DB snapshot ARN that the DB snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `sourceRegion` - Region that the DB snapshot was created in or copied from. +* `status` - Status of this DB snapshot. +* `storageType` - Storage type associated with DB snapshot. +* `vpcId` - ID of the VPC associated with the DB snapshot. +* `snapshotCreateTime` - Provides the time when the snapshot was taken, in Universal Coordinated Time (UTC). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/db_subnet_group.html.markdown b/website/docs/cdktf/typescript/d/db_subnet_group.html.markdown new file mode 100644 index 00000000000..618510fe7fe --- /dev/null +++ b/website/docs/cdktf/typescript/d/db_subnet_group.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_subnet_group" +description: |- + Get information on an RDS Database Subnet Group. +--- + + + +# Data Source: aws_db_subnet_group + +Use this data source to get information about an RDS subnet group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDbSubnetGroup } from "./.gen/providers/aws/data-aws-db-subnet-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDbSubnetGroup(this, "database", { + name: "my-test-database-subnet-group", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the RDS database subnet group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the DB subnet group. +* `description` - Provides the description of the DB subnet group. +* `status` - Provides the status of the DB subnet group. +* `subnetIds` - Contains a list of subnet identifiers. +* `supportedNetworkTypes` - The network type of the DB subnet group. +* `vpcId` - Provides the VPC ID of the DB subnet group. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/default_tags.markdown b/website/docs/cdktf/typescript/d/default_tags.markdown new file mode 100644 index 00000000000..6de9751d4f8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/default_tags.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_default_tags" +description: |- + Access the default tags configured on the provider. +--- + + + +# Data Source: aws_default_tags + +Use this data source to get the default tags configured on the provider. + +With this data source, you can apply default tags to resources not _directly_ managed by a Terraform resource, such as the instances underneath an Auto Scaling group or the volumes created for an EC2 instance. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDefaultTags } from "./.gen/providers/aws/data-aws-default-tags"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDefaultTags(this, "example", {}); + } +} + +``` + +### Dynamically Apply Default Tags to Auto Scaling Group + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformIterator, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { DataAwsDefaultTags } from "./.gen/providers/aws/data-aws-default-tags"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +interface MyConfig { + maxSize: any; + minSize: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new AwsProvider(this, "aws", { + defaultTags: [ + { + tags: { + Environment: "Test", + Name: "Provider Tag", + }, + }, + ], + }); + const example = new DataAwsDefaultTags(this, "example", {}); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleDynamicIterator0 = TerraformIterator.fromList( + Token.asAny(example.tags) + ); + const awsAutoscalingGroupExample = new AutoscalingGroup(this, "example_2", { + tag: exampleDynamicIterator0.dynamic({ + key: exampleDynamicIterator0.key, + propagate_at_launch: true, + value: exampleDynamicIterator0.value, + }), + maxSize: config.maxSize, + minSize: config.minSize, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAutoscalingGroupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This data source has no arguments. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `tags` - Blocks of default tags set on the provider. See details below. + +### tags + +* `key` - Key name of the tag (i.e., `tags.#Key`). +* `value` - Value of the tag (i.e., `tags.#Value`). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/directory_service_directory.html.markdown b/website/docs/cdktf/typescript/d/directory_service_directory.html.markdown new file mode 100644 index 00000000000..c7237c6f150 --- /dev/null +++ b/website/docs/cdktf/typescript/d/directory_service_directory.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_directory" +description: |- + AWS Directory Service Directory +--- + + + +# Data Source: aws_directory_service_directory + +Get attributes of AWS Directory Service directory (SimpleAD, Managed AD, AD Connector). It's especially useful to refer AWS Managed AD or on-premise AD in AD Connector configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDirectoryServiceDirectory } from "./.gen/providers/aws/data-aws-directory-service-directory"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDirectoryServiceDirectory(this, "example", { + directoryId: main.id, + }); + } +} + +``` + +## Argument Reference + +* `directoryId` - (Required) ID of the directory. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `type` - Directory type (`simpleAd`, `adConnector` or `microsoftAd`). +* `edition` - (for `microsoftAd`) Microsoft AD edition (`standard` or `enterprise`). +* `name` - Fully qualified name for the directory/connector. +* `password` - Password for the directory administrator or connector user. +* `size` - (for `simpleAd` and `adConnector`) Size of the directory/connector (`small` or `large`). +* `alias` - Alias for the directory/connector, such as `d991708B282AwsappsCom`. +* `description` - Textual description for the directory/connector. +* `shortName` - Short name of the directory/connector, such as `corp`. +* `enableSso` - Directory/connector single-sign on status. +* `accessUrl` - Access URL for the directory/connector, such as http://alias.awsapps.com. +* `dnsIpAddresses` - List of IP addresses of the DNS servers for the directory/connector. +* `securityGroupId` - ID of the security group created by the directory/connector. +* `tags` – A map of tags assigned to the directory/connector. + + `vpcSettings` (for `simpleAd` and `microsoftAd`) is also exported with the following attributes: + +* `subnetIds` - Identifiers of the subnets for the directory servers (2 subnets in 2 different AZs). +* `vpcId` - ID of the VPC that the directory is in. + +`connectSettings` (for `adConnector`) is also exported with the following attributes: + +* `connectIps` - IP addresses of the AD Connector servers. +* `customerUsername` - Username corresponding to the password provided. +* `customerDnsIps` - DNS IP addresses of the domain to connect to. +* `subnetIds` - Identifiers of the subnets for the connector servers (2 subnets in 2 different AZs). +* `vpcId` - ID of the VPC that the connector is in. + +`radiusSettings` is also exported with the following attributes: + +* `authenticationProtocol` - The protocol specified for your RADIUS endpoints. +* `displayLabel` - Display label. +* `radiusPort` - Port that your RADIUS server is using for communications. +* `radiusRetries` - Maximum number of times that communication with the RADIUS server is attempted. +* `radiusServers` - Set of strings that contains the fully qualified domain name (FQDN) or IP addresses of the RADIUS server endpoints, or the FQDN or IP addresses of your RADIUS server load balancer. +* `radiusTimeout` - Amount of time, in seconds, to wait for the RADIUS server to respond. +* `useSameUsername` - Not currently used. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dms_certificate.html.markdown b/website/docs/cdktf/typescript/d/dms_certificate.html.markdown new file mode 100644 index 00000000000..0af63ba0504 --- /dev/null +++ b/website/docs/cdktf/typescript/d/dms_certificate.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_certificate" +description: |- + Terraform data source for managing an AWS DMS (Database Migration) Certificate. +--- + + + +# Data Source: aws_dms_certificate + +Terraform data source for managing an AWS DMS (Database Migration) Certificate. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDmsCertificate } from "./.gen/providers/aws/data-aws-dms-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDmsCertificate(this, "example", { + certificateId: test.certificateId, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `certificateId` - (Required) A customer-assigned name for the certificate. Identifiers must begin with a letter and must contain only ASCII letters, digits, and hyphens. They can't end with a hyphen or contain two consecutive hyphens. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `certificateCreationDate` - The date that the certificate was created. +* `certificatePem` - The contents of a .pem file, which contains an X.509 certificate. +* `certificateOwner` - The owner of the certificate. +* `certificateArn` - The Amazon Resource Name (ARN) for the certificate. +* `certificateWallet` - The owner of the certificate. +* `keyLength` - The key length of the cryptographic algorithm being used. +* `signingAlgorithm` - The algorithm for the certificate. +* `validFromDate` - The beginning date that the certificate is valid. +* `validToDate` - The final date that the certificate is valid. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dms_endpoint.html.markdown b/website/docs/cdktf/typescript/d/dms_endpoint.html.markdown new file mode 100644 index 00000000000..39e4a75cd20 --- /dev/null +++ b/website/docs/cdktf/typescript/d/dms_endpoint.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_endpoint" +description: |- + Terraform data source for managing an AWS DMS (Database Migration) Endpoint. +--- + + + +# Data Source: aws_dms_endpoint + +Terraform data source for managing an AWS DMS (Database Migration) Endpoint. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDmsEndpoint } from "./.gen/providers/aws/data-aws-dms-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDmsEndpoint(this, "test", { + endpointId: "test_id", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `endpointId` - (Required) Database endpoint identifier. Identifiers must contain from 1 to 255 alphanumeric characters or hyphens, begin with a letter, contain only ASCII letters, digits, and hyphens, not end with a hyphen, and not contain two consecutive hyphens. + +## Attribute Reference + +See the [`awsDmsEndpoint` resource](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/dms_endpoint) for details on the returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dms_replication_instance.html.markdown b/website/docs/cdktf/typescript/d/dms_replication_instance.html.markdown new file mode 100644 index 00000000000..603b4ca4ee2 --- /dev/null +++ b/website/docs/cdktf/typescript/d/dms_replication_instance.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_instance" +description: |- + Terraform data source for managing an AWS DMS (Database Migration) Replication Instance. +--- + + + +# Data Source: aws_dms_replication_instance + +Terraform data source for managing an AWS DMS (Database Migration) Replication Instance. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDmsReplicationInstance } from "./.gen/providers/aws/data-aws-dms-replication-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDmsReplicationInstance(this, "test", { + replicationInstanceId: Token.asString( + awsDmsReplicationInstanceTest.replicationInstanceId + ), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `replicationInstanceId` - (Required) The replication instance identifier. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `allocatedStorage` - The amount of storage (in gigabytes) to be initially allocated for the replication instance. +* `autoMinorVersionUpgrade` - Indicates that minor engine upgrades will be applied automatically to the replication instance during the maintenance window. +* `availabilityZone` - The EC2 Availability Zone that the replication instance will be created in. +* `engineVersion` - The engine version number of the replication instance. +* `kmsKeyArn` - The Amazon Resource Name (ARN) for the KMS key used to encrypt the connection parameters. +* `multiAz` - Specifies if the replication instance is a multi-az deployment. +* `preferredMaintenanceWindow` - The weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). +* `publiclyAccessible` - Specifies the accessibility options for the replication instance. A value of true represents an instance with a public IP address. A value of false represents an instance with a private IP address. +* `replicationInstanceArn` - The Amazon Resource Name (ARN) of the replication instance. +* `replicationInstanceClass` - The compute and memory capacity of the replication instance as specified by the replication instance class. See [AWS DMS User Guide](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_ReplicationInstance.Types.html) for information on instance classes. +* `replicationInstancePrivateIps` - A list of the private IP addresses of the replication instance. +* `replicationInstancePublicIps` - A list of the public IP addresses of the replication instance. +* `replicationSubnetGroupId` - A subnet group to associate with the replication instance. +* `vpcSecurityGroupIds` - A set of VPC security group IDs that are used with the replication instance. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dms_replication_subnet_group.html.markdown b/website/docs/cdktf/typescript/d/dms_replication_subnet_group.html.markdown new file mode 100644 index 00000000000..c5704de205a --- /dev/null +++ b/website/docs/cdktf/typescript/d/dms_replication_subnet_group.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_subnet_group" +description: |- + Terraform data source for managing an AWS DMS (Database Migration) Replication Subnet Group. +--- + + + +# Data Source: aws_dms_replication_subnet_group + +Terraform data source for managing an AWS DMS (Database Migration) Replication Subnet Group. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDmsReplicationSubnetGroup } from "./.gen/providers/aws/data-aws-dms-replication-subnet-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDmsReplicationSubnetGroup(this, "test", { + replicationSubnetGroupId: Token.asString( + awsDmsReplicationSubnetGroupTest.replicationSubnetGroupId + ), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `replicationSubnetGroupId` - (Required) Name for the replication subnet group. This value is stored as a lowercase string. It must contain no more than 255 alphanumeric characters, periods, spaces, underscores, or hyphens and cannot be `default`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `replicationSubnetGroupDescription` - Description for the subnet group. +* `subnetIds` - List of at least 2 EC2 subnet IDs for the subnet group. The subnets must cover at least 2 availability zones. +* `vpcId` - The ID of the VPC the subnet group is in. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dms_replication_task.html.markdown b/website/docs/cdktf/typescript/d/dms_replication_task.html.markdown new file mode 100644 index 00000000000..24555ec0b9d --- /dev/null +++ b/website/docs/cdktf/typescript/d/dms_replication_task.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_task" +description: |- + Terraform data source for managing an AWS DMS (Database Migration) Replication Task. +--- + + + +# Data Source: aws_dms_replication_task + +Terraform data source for managing an AWS DMS (Database Migration) Replication Task. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDmsReplicationTask } from "./.gen/providers/aws/data-aws-dms-replication-task"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDmsReplicationTask(this, "test", { + replicationTaskId: Token.asString( + awsDmsReplicationTaskTest.replicationTaskId + ), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `replicationTaskId` - (Required) The replication task identifier. + + - Must contain from 1 to 255 alphanumeric characters or hyphens. + - First character must be a letter. + - Cannot end with a hyphen. + - Cannot contain two consecutive hyphens. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `cdcStartPosition` - (Conflicts with `cdcStartTime`) Indicates when you want a change data capture (CDC) operation to start. The value can be in date, checkpoint, or LSN/SCN format depending on the source engine. For more information, see [Determining a CDC native start point](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Task.CDC.html#CHAP_Task.CDC.StartPoint.Native). +* `cdcStartTime` - (Conflicts with `cdcStartPosition`) The Unix timestamp integer for the start of the Change Data Capture (CDC) operation. +* `migrationType` - The migration type. Can be one of `full-load | cdc | full-load-and-cdc`. +* `replicationInstanceArn` - The Amazon Resource Name (ARN) of the replication instance. +* `replicationTaskSettings` - An escaped JSON string that contains the task settings. For a complete list of task settings, see [Task Settings for AWS Database Migration Service Tasks](http://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.CustomizingTasks.TaskSettings.html). +* `sourceEndpointArn` - The Amazon Resource Name (ARN) string that uniquely identifies the source endpoint. +* `startReplicationTask` - Whether to run or stop the replication task. +* `status` - Replication Task status. +* `tableMappings` - An escaped JSON string that contains the table mappings. For information on table mapping see [Using Table Mapping with an AWS Database Migration Service Task to Select and Filter Data](http://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.CustomizingTasks.TableMapping.html) +* `targetEndpointArn` - The Amazon Resource Name (ARN) string that uniquely identifies the target endpoint. +* `replicationTaskArn` - The Amazon Resource Name (ARN) for the replication task. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/docdb_engine_version.markdown b/website/docs/cdktf/typescript/d/docdb_engine_version.markdown new file mode 100644 index 00000000000..867cd2f94bb --- /dev/null +++ b/website/docs/cdktf/typescript/d/docdb_engine_version.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_engine_version" +description: |- + Information about a DocumentDB engine version. +--- + + + +# Data Source: aws_docdb_engine_version + +Information about a DocumentDB engine version. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDocdbEngineVersion } from "./.gen/providers/aws/data-aws-docdb-engine-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDocdbEngineVersion(this, "test", { + version: "3.6.0", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engine` - (Optional) DB engine. (Default: `docdb`) +* `parameterGroupFamily` - (Optional) Name of a specific DB parameter group family. An example parameter group family is `docdb36`. +* `preferredVersions` - (Optional) Ordered list of preferred engine versions. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. If both the `version` and `preferredVersions` arguments are not configured, the data source will return the default version for the engine. +* `version` - (Optional) Version of the DB engine. For example, `360`. If `version` and `preferredVersions` are not set, the data source will provide information for the AWS-defined default version. If both the `version` and `preferredVersions` arguments are not configured, the data source will return the default version for the engine. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `engineDescription` - Description of the database engine. +* `exportableLogTypes` - Set of log types that the database engine has available for export to CloudWatch Logs. +* `supportsLogExportsToCloudwatch` - Indicates whether the engine version supports exporting the log types specified by `exportableLogTypes` to CloudWatch Logs. +* `validUpgradeTargets` - A set of engine versions that this database engine version can be upgraded to. +* `versionDescription` - Description of the database engine version. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/docdb_orderable_db_instance.markdown b/website/docs/cdktf/typescript/d/docdb_orderable_db_instance.markdown new file mode 100644 index 00000000000..e27eb6795e9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/docdb_orderable_db_instance.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_orderable_db_instance" +description: |- + Information about DocumentDB orderable DB instances. +--- + + + +# Data Source: aws_docdb_orderable_db_instance + +Information about DocumentDB orderable DB instances. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDocdbOrderableDbInstance } from "./.gen/providers/aws/data-aws-docdb-orderable-db-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDocdbOrderableDbInstance(this, "test", { + engine: "docdb", + engineVersion: "3.6.0", + licenseModel: "na", + preferredInstanceClasses: ["db.r5.large", "db.r4.large", "db.t3.medium"], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engine` - (Optional) DB engine. Default: `docdb` +* `engineVersion` - (Optional) Version of the DB engine. +* `instanceClass` - (Optional) DB instance class. Examples of classes are `dbR512Xlarge`, `dbR524Xlarge`, `dbR52Xlarge`, `dbR54Xlarge`, `dbR5Large`, `dbR5Xlarge`, and `dbT3Medium`. (Conflicts with `preferredInstanceClasses`.) +* `licenseModel` - (Optional) License model. Default: `na` +* `preferredInstanceClasses` - (Optional) Ordered list of preferred DocumentDB DB instance classes. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. (Conflicts with `instanceClass`.) +* `vpc` - (Optional) Enable to show only VPC. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `availabilityZones` - Availability zones where the instance is available. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dx_connection.html.markdown b/website/docs/cdktf/typescript/d/dx_connection.html.markdown new file mode 100644 index 00000000000..0d3c07c6e31 --- /dev/null +++ b/website/docs/cdktf/typescript/d/dx_connection.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_connection" +description: |- + Retrieve information about a Direct Connect Connection. +--- + + + +# Data Source: aws_dx_connection + +Retrieve information about a Direct Connect Connection. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDxConnection } from "./.gen/providers/aws/data-aws-dx-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDxConnection(this, "example", { + name: "tf-dx-connection", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the connection to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the connection. +* `awsDevice` - Direct Connect endpoint on which the physical connection terminates. +* `bandwidth` - Bandwidth of the connection. +* `id` - ID of the connection. +* `location` - AWS Direct Connect location where the connection is located. +* `ownerAccountId` - ID of the AWS account that owns the connection. +* `partnerName` - The name of the AWS Direct Connect service provider associated with the connection. +* `providerName` - Name of the service provider associated with the connection. +* `tags` - Map of tags for the resource. +* `vlanId` - The VLAN ID. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dx_gateway.html.markdown b/website/docs/cdktf/typescript/d/dx_gateway.html.markdown new file mode 100644 index 00000000000..95ff6a45478 --- /dev/null +++ b/website/docs/cdktf/typescript/d/dx_gateway.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_gateway" +description: |- + Retrieve information about a Direct Connect Gateway +--- + + + +# Data Source: aws_dx_gateway + +Retrieve information about a Direct Connect Gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDxGateway } from "./.gen/providers/aws/data-aws-dx-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDxGateway(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the gateway to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `amazonSideAsn` - ASN on the Amazon side of the connection. +* `id` - ID of the gateway. +* `ownerAccountId` - AWS Account ID of the gateway. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dx_location.html.markdown b/website/docs/cdktf/typescript/d/dx_location.html.markdown new file mode 100644 index 00000000000..b44734f1b74 --- /dev/null +++ b/website/docs/cdktf/typescript/d/dx_location.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_location" +description: |- + Retrieve information about a specific AWS Direct Connect location in the current AWS Region. +--- + + + +# Data Source: aws_dx_location + +Retrieve information about a specific AWS Direct Connect location in the current AWS Region. +These are the locations that can be specified when configuring [`awsDxConnection`](/docs/providers/aws/r/dx_connection.html) or [`awsDxLag`](/docs/providers/aws/r/dx_lag.html) resources. + +~> **Note:** This data source is different from the [`awsDxLocations`](/docs/providers/aws/d/dx_locations.html) data source which retrieves information about all the AWS Direct Connect locations in the current AWS Region. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDxLocation } from "./.gen/providers/aws/data-aws-dx-location"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDxLocation(this, "example", { + locationCode: "CS32A-24FL", + }); + } +} + +``` + +## Argument Reference + +* `locationCode` - (Required) Code for the location to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `availableMacsecPortSpeeds` - The available MAC Security (MACsec) port speeds for the location. +* `availablePortSpeeds` - The available port speeds for the location. +* `availableProviders` - Names of the service providers for the location. +* `locationName` - Name of the location. This includes the name of the colocation partner and the physical site of the building. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dx_locations.html.markdown b/website/docs/cdktf/typescript/d/dx_locations.html.markdown new file mode 100644 index 00000000000..b20b2036938 --- /dev/null +++ b/website/docs/cdktf/typescript/d/dx_locations.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_locations" +description: |- + Retrieve information about the AWS Direct Connect locations in the current AWS Region. +--- + + + +# Data Source: aws_dx_locations + +Retrieve information about the AWS Direct Connect locations in the current AWS Region. +These are the locations that can be specified when configuring [`awsDxConnection`](/docs/providers/aws/r/dx_connection.html) or [`awsDxLag`](/docs/providers/aws/r/dx_lag.html) resources. + +~> **Note:** This data source is different from the [`awsDxLocation`](/docs/providers/aws/d/dx_location.html) data source which retrieves information about a specific AWS Direct Connect location in the current AWS Region. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDxLocations } from "./.gen/providers/aws/data-aws-dx-locations"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDxLocations(this, "available", {}); + } +} + +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `locationCodes` - Code for the locations. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dx_router_configuration.html.markdown b/website/docs/cdktf/typescript/d/dx_router_configuration.html.markdown new file mode 100644 index 00000000000..49814d4066e --- /dev/null +++ b/website/docs/cdktf/typescript/d/dx_router_configuration.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_router_configuration" +description: |- + Terraform data source for managing an AWS Direct Connect Router Configuration. +--- + + + +# Data Source: aws_dx_router_configuration + +Terraform data source for retrieving Router Configuration instructions for a given AWS Direct Connect Virtual Interface and Router Type. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDxRouterConfiguration } from "./.gen/providers/aws/data-aws-dx-router-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDxRouterConfiguration(this, "example", { + routerTypeIdentifier: "CiscoSystemsInc-2900SeriesRouters-IOS124", + virtualInterfaceId: "dxvif-abcde123", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `virtualInterfaceId` - (Required) ID of the Direct Connect Virtual Interface +* `routerTypeIdentifier` - (Required) ID of the Router Type. For example: `ciscoSystemsInc2900SeriesRoutersIos124` + +There is currently no AWS API to retrieve the full list of `routerTypeIdentifier` values. Here is a list of known `routerType` objects that can be used: + +```json +{ + "routerTypes": [ + {"platform":"2900 Series Routers","routerTypeIdentifier":"CiscoSystemsInc-2900SeriesRouters-IOS124","software":"IOS 12.4+","vendor":"Cisco Systems, Inc.","xsltTemplateName":"customer-router-cisco-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"3700 Series Routers","routerTypeIdentifier":"CiscoSystemsInc-3700SeriesRouters-IOS124","software":"IOS 12.4+","vendor":"Cisco Systems, Inc.","xsltTemplateName":"customer-router-cisco-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"7200 Series Routers","routerTypeIdentifier":"CiscoSystemsInc-7200SeriesRouters-IOS124","software":"IOS 12.4+","vendor":"Cisco Systems, Inc.","xsltTemplateName":"customer-router-cisco-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"Nexus 7000 Series Switches","routerTypeIdentifier":"CiscoSystemsInc-Nexus7000SeriesSwitches-NXOS51","software":"NX-OS 5.1+","vendor":"Cisco Systems, Inc.","xsltTemplateName":"customer-switch-cisco-nexus-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"Nexus 9K+ Series Switches","routerTypeIdentifier":"CiscoSystemsInc-Nexus9KSeriesSwitches-NXOS93","software":"NX-OS 9.3+","vendor":"Cisco Systems, Inc.","xsltTemplateName":"customer-switch-cisco-nexus-generic.xslt","xsltTemplateNameForMacSec":"customer-switch-cisco-nexus-generic-macsec.xslt"}, + {"platform":"M/MX Series Routers","routerTypeIdentifier":"JuniperNetworksInc-MMXSeriesRouters-JunOS95","software":"JunOS 9.5+","vendor":"Juniper Networks, Inc.","xsltTemplateName":"customer-router-juniper-generic.xslt","xsltTemplateNameForMacSec":"customer-router-juniper-generic-macsec.xslt"}, + {"platform":"SRX Series Routers","routerTypeIdentifier":"JuniperNetworksInc-SRXSeriesRouters-JunOS95","software":"JunOS 9.5+","vendor":"Juniper Networks, Inc.","xsltTemplateName":"customer-router-juniper-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"T Series Routers","routerTypeIdentifier":"JuniperNetworksInc-TSeriesRouters-JunOS95","software":"JunOS 9.5+","vendor":"Juniper Networks, Inc.","xsltTemplateName":"customer-router-juniper-generic.xslt","xsltTemplateNameForMacSec":""}, + {"platform":"PA-3000+ and 5000+ series","routerTypeIdentifier":"PaloAltoNetworks-PA3000and5000series-PANOS803","software":"PAN-OS 8.0.3+","vendor":"Palo Alto Networks","xsltTemplateName":"customer-router-palo-alto-generic.xslt","xsltTemplateNameForMacSec":""}] +} +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `customerRouterConfig` - Instructions for configuring your router +* `router` - Block of the router type details + +A `router` block supports the following attributes: + +* `platform` - Router platform +* `routerTypeIdentifier` - Router type identifier +* `software` - Router operating system +* `vendor` - Router vendor +* `xsltTemplateName` - Router XSLT Template Name +* `xsltTemplateNameForMac` - Router XSLT Template Name for MacSec + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dynamodb_table.html.markdown b/website/docs/cdktf/typescript/d/dynamodb_table.html.markdown new file mode 100644 index 00000000000..1af295e67fe --- /dev/null +++ b/website/docs/cdktf/typescript/d/dynamodb_table.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_table" +description: |- + Provides a DynamoDB table data source. +--- + + + +# Data Source: aws_dynamodb_table + +Provides information about a DynamoDB table. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDynamodbTable } from "./.gen/providers/aws/data-aws-dynamodb-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDynamodbTable(this, "tableName", { + name: "tableName", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the DynamoDB table. + +## Attribute Reference + +See the [DynamoDB Table Resource](/docs/providers/aws/r/dynamodb_table.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/dynamodb_table_item.html.markdown b/website/docs/cdktf/typescript/d/dynamodb_table_item.html.markdown new file mode 100644 index 00000000000..195b0099d94 --- /dev/null +++ b/website/docs/cdktf/typescript/d/dynamodb_table_item.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_table_item" +description: |- + Terraform data source for retrieving a value from an AWS DynamoDB table. +--- + + + +# Data Source: aws_dynamodb_table_item + +Terraform data source for retrieving a value from an AWS DynamoDB table. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDynamodbTableItem } from "./.gen/providers/aws/data-aws-dynamodb-table-item"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsDynamodbTableItem(this, "test", { + dependsOn: [example], + expressionAttributeNames: { + "#P": "Percentile", + }, + key: '{\n\t"hashKey": {"S": "example"}\n}\n\n', + projectionExpression: "#P", + tableName: Token.asString(awsDynamodbTableExample.name), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `tableName` - (Required) The name of the table containing the requested item. +* `key` - (Required) A map of attribute names to AttributeValue objects, representing the primary key of the item to retrieve. + For the primary key, you must provide all of the attributes. For example, with a simple primary key, you only need to provide a value for the partition key. For a composite primary key, you must provide values for both the partition key and the sort key. + +The following arguments are optional: + +* `expressionAttributeName` - (Optional) - One or more substitution tokens for attribute names in an expression. Use the `#` character in an expression to dereference an attribute name. +* `projectionExpression` - (Optional) A string that identifies one or more attributes to retrieve from the table. These attributes can include scalars, sets, or elements of a JSON document. The attributes in the expression must be separated by commas. +If no attribute names are specified, then all attributes are returned. If any of the requested attributes are not found, they do not appear in the result. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `item` - JSON representation of a map of attribute names to [AttributeValue](https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_AttributeValue.html) objects, as specified by ProjectionExpression. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ebs_default_kms_key.html.markdown b/website/docs/cdktf/typescript/d/ebs_default_kms_key.html.markdown new file mode 100644 index 00000000000..4479f01d094 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ebs_default_kms_key.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_default_kms_key" +description: |- + Provides metadata about the KMS key set for EBS default encryption +--- + + + +# Data Source: aws_ebs_default_kms_key + +Use this data source to get the default EBS encryption KMS key in the current region. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEbsDefaultKmsKey } from "./.gen/providers/aws/data-aws-ebs-default-kms-key"; +import { EbsVolume } from "./.gen/providers/aws/ebs-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsEbsDefaultKmsKey(this, "current", {}); + new EbsVolume(this, "example", { + availabilityZone: "us-west-2a", + encrypted: true, + kmsKeyId: Token.asString(current.keyArn), + }); + } +} + +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `keyArn` - ARN of the default KMS key uses to encrypt an EBS volume in this region when no key is specified in an API call that creates the volume and encryption by default is enabled. +* `id` - Region of the default KMS Key. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ebs_encryption_by_default.html.markdown b/website/docs/cdktf/typescript/d/ebs_encryption_by_default.html.markdown new file mode 100644 index 00000000000..a32fd055eff --- /dev/null +++ b/website/docs/cdktf/typescript/d/ebs_encryption_by_default.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_encryption_by_default" +description: |- + Checks whether default EBS encryption is enabled for your AWS account in the current AWS region. +--- + + + +# Data Source: aws_ebs_encryption_by_default + +Provides a way to check whether default EBS encryption is enabled for your AWS account in the current AWS region. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEbsEncryptionByDefault } from "./.gen/providers/aws/data-aws-ebs-encryption-by-default"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEbsEncryptionByDefault(this, "current", {}); + } +} + +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `enabled` - Whether or not default EBS encryption is enabled. Returns as `true` or `false`. +* `id` - Region of default EBS encryption. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ebs_snapshot.html.markdown b/website/docs/cdktf/typescript/d/ebs_snapshot.html.markdown new file mode 100644 index 00000000000..306a1be70c1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ebs_snapshot.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_snapshot" +description: |- + Get information on an EBS Snapshot. +--- + + + +# Data Source: aws_ebs_snapshot + +Use this data source to get information about an EBS Snapshot for use when provisioning EBS Volumes + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEbsSnapshot } from "./.gen/providers/aws/data-aws-ebs-snapshot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEbsSnapshot(this, "ebs_volume", { + filter: [ + { + name: "volume-size", + values: ["40"], + }, + { + name: "tag:Name", + values: ["Example"], + }, + ], + mostRecent: true, + owners: ["self"], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `mostRecent` - (Optional) If more than one result is returned, use the most recent snapshot. + +* `owners` - (Optional) Returns the snapshots owned by the specified owner id. Multiple owners can be specified. + +* `snapshotIds` - (Optional) Returns information on a specific snapshot_id. + +* `restorableByUserIds` - (Optional) One or more AWS accounts IDs that can create volumes from the snapshot. + +* `filter` - (Optional) One or more name/value pairs to filter off of. There are +several valid keys, for a full reference, check out +[describe-snapshots in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the EBS Snapshot. +* `id` - Snapshot ID (e.g., snap-59fcb34e). +* `snapshotId` - Snapshot ID (e.g., snap-59fcb34e). +* `description` - Description for the snapshot +* `ownerId` - AWS account ID of the EBS snapshot owner. +* `ownerAlias` - Value from an Amazon-maintained list (`amazon`, `awsMarketplace`, `microsoft`) of snapshot owners. +* `volumeId` - Volume ID (e.g., vol-59fcb34e). +* `encrypted` - Whether the snapshot is encrypted. +* `volumeSize` - Size of the drive in GiBs. +* `kmsKeyId` - ARN for the KMS encryption key. +* `dataEncryptionKeyId` - The data encryption key identifier for the snapshot. +* `state` - Snapshot state. +* `storageTier` - Storage tier in which the snapshot is stored. +* `outpostArn` - ARN of the Outpost on which the snapshot is stored. +* `tags` - Map of tags for the resource. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-snapshots.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ebs_snapshot_ids.html.markdown b/website/docs/cdktf/typescript/d/ebs_snapshot_ids.html.markdown new file mode 100644 index 00000000000..71c1e8d6612 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ebs_snapshot_ids.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_snapshot_ids" +description: |- + Provides a list of EBS snapshot IDs. +--- + + + +# Data Source: aws_ebs_snapshot_ids + +Use this data source to get a list of EBS Snapshot IDs matching the specified +criteria. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEbsSnapshotIds } from "./.gen/providers/aws/data-aws-ebs-snapshot-ids"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEbsSnapshotIds(this, "ebs_volumes", { + filter: [ + { + name: "volume-size", + values: ["40"], + }, + { + name: "tag:Name", + values: ["Example"], + }, + ], + owners: ["self"], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `owners` - (Optional) Returns the snapshots owned by the specified owner id. Multiple owners can be specified. + +* `restorableByUserIds` - (Optional) One or more AWS accounts IDs that can create volumes from the snapshot. + +* `filter` - (Optional) One or more name/value pairs to filter off of. There are +several valid keys, for a full reference, check out +[describe-volumes in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - Set of EBS snapshot IDs, sorted by creation time in descending order. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-snapshots.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ebs_volume.html.markdown b/website/docs/cdktf/typescript/d/ebs_volume.html.markdown new file mode 100644 index 00000000000..7bc29805a97 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ebs_volume.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_volume" +description: |- + Get information on an EBS volume. +--- + + + +# Data Source: aws_ebs_volume + +Use this data source to get information about an EBS volume for use in other +resources. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEbsVolume } from "./.gen/providers/aws/data-aws-ebs-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEbsVolume(this, "ebs_volume", { + filter: [ + { + name: "volume-type", + values: ["gp2"], + }, + { + name: "tag:Name", + values: ["Example"], + }, + ], + mostRecent: true, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `mostRecent` - (Optional) If more than one result is returned, use the most +recent Volume. +* `filter` - (Optional) One or more name/value pairs to filter off of. There are +several valid keys, for a full reference, check out +[describe-volumes in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Volume ID (e.g., vol-59fcb34e). +* `volumeId` - Volume ID (e.g., vol-59fcb34e). +* `arn` - Volume ARN (e.g., arn:aws:ec2:us-east-1:0123456789012:volume/vol-59fcb34e). +* `availabilityZone` - AZ where the EBS volume exists. +* `encrypted` - Whether the disk is encrypted. +* `iops` - Amount of IOPS for the disk. +* `multiAttachEnabled` - (Optional) Specifies whether Amazon EBS Multi-Attach is enabled. +* `size` - Size of the drive in GiBs. +* `snapshotId` - Snapshot_id the EBS volume is based off. +* `outpostArn` - ARN of the Outpost. +* `volumeType` - Type of EBS volume. +* `kmsKeyId` - ARN for the KMS encryption key. +* `tags` - Map of tags for the resource. +* `throughput` - Throughput that the volume supports, in MiB/s. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-volumes.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ebs_volumes.html.markdown b/website/docs/cdktf/typescript/d/ebs_volumes.html.markdown new file mode 100644 index 00000000000..07c4af86349 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ebs_volumes.html.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_volumes" +description: |- + Provides identifying information for EBS volumes matching given criteria +--- + + + +# Data Source: aws_ebs_volumes + +`awsEbsVolumes` provides identifying information for EBS volumes matching given criteria. + +This data source can be useful for getting a list of volume IDs with (for example) matching tags. + +## Example Usage + +The following demonstrates obtaining a map of availability zone to EBS volume ID for volumes with a given tag value. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Token, + TerraformIterator, + TerraformOutput, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEbsVolume } from "./.gen/providers/aws/data-aws-ebs-volume"; +import { DataAwsEbsVolumes } from "./.gen/providers/aws/data-aws-ebs-volumes"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsEbsVolumes(this, "example", { + tags: { + VolumeSet: "TestVolumeSet", + }, + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleForEachIterator = TerraformIterator.fromList( + Token.asAny(example.ids) + ); + const dataAwsEbsVolumeExample = new DataAwsEbsVolume(this, "example_1", { + filter: [ + { + name: "volume-id", + values: [Token.asString(exampleForEachIterator.value)], + }, + ], + forEach: exampleForEachIterator, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsEbsVolumeExample.overrideLogicalId("example"); + new TerraformOutput(this, "availability_zone_to_volume_id", { + value: + "${{ for s in ${" + + dataAwsEbsVolumeExample.fqn + + "} : s.id => s.availability_zone}}", + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired volumes. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVolumes.html). + For example, if matching against the `size` filter, use: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEbsVolumes } from "./.gen/providers/aws/data-aws-ebs-volumes"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEbsVolumes(this, "ten_or_twenty_gb_volumes", { + filter: [ + { + name: "size", + values: ["10", "20"], + }, + ], + }); + } +} + +``` + +* `values` - (Required) Set of values that are accepted for the given field. + EBS Volume IDs will be selected if any one of the given values match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - Set of all the EBS Volume IDs found. This data source will fail if + no volumes match the provided criteria. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_client_vpn_endpoint.html.markdown b/website/docs/cdktf/typescript/d/ec2_client_vpn_endpoint.html.markdown index 2abe74edef4..f066b5f0cd1 100644 --- a/website/docs/cdktf/typescript/d/ec2_client_vpn_endpoint.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_client_vpn_endpoint.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `clientVpnEndpointId` - (Optional) ID of the Client VPN endpoint. * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. @@ -82,7 +82,7 @@ The following arguments are required: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - The ARN of the Client VPN endpoint. * `authenticationOptions` - Information about the authentication method used by the Client VPN endpoint. @@ -108,4 +108,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_coip_pool.html.markdown b/website/docs/cdktf/typescript/d/ec2_coip_pool.html.markdown index 40a365b9e6a..d62f9636157 100644 --- a/website/docs/cdktf/typescript/d/ec2_coip_pool.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_coip_pool.html.markdown @@ -65,14 +65,14 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A COIP Pool will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` blocks are also exported as result attributes. This data source will complete the data by populating any fields that are not included in the configuration with the data for the selected COIP Pool. -In addition, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the COIP pool * `poolCidrs` - Set of CIDR blocks in pool @@ -83,4 +83,4 @@ In addition, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_coip_pools.html.markdown b/website/docs/cdktf/typescript/d/ec2_coip_pools.html.markdown index b476cb5d3cc..72f5efe46e3 100644 --- a/website/docs/cdktf/typescript/d/ec2_coip_pools.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_coip_pools.html.markdown @@ -55,7 +55,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A COIP Pool will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `poolIds` - Set of COIP Pool Identifiers @@ -66,4 +68,4 @@ which take the following arguments: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_host.html.markdown b/website/docs/cdktf/typescript/d/ec2_host.html.markdown index 2b93aebeacc..9f712e339ba 100644 --- a/website/docs/cdktf/typescript/d/ec2_host.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_host.html.markdown @@ -85,12 +85,13 @@ The following arguments are required: * `name` - (Required) Name of the field to filter by, as defined by [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeHosts.html). * `values` - (Required) Set of values that are accepted for the given field. A host will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference -In addition to the attributes above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - ID of the Dedicated Host. * `arn` - ARN of the Dedicated Host. +* `assetId` - The ID of the Outpost hardware asset on which the Dedicated Host is allocated. * `autoPlacement` - Whether auto-placement is on or off. * `availabilityZone` - Availability Zone of the Dedicated Host. * `cores` - Number of cores on the Dedicated Host. @@ -108,4 +109,4 @@ In addition to the attributes above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_instance_type.html.markdown b/website/docs/cdktf/typescript/d/ec2_instance_type.html.markdown index 95e3e29cf1b..68d160d4984 100644 --- a/website/docs/cdktf/typescript/d/ec2_instance_type.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_instance_type.html.markdown @@ -37,13 +37,13 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following argument is supported: +This data source supports the following arguments: * `instanceType` - (Required) Instance ## Attribute Reference -In addition to the argument above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: ~> **NOTE:** Not all attributes are set for every instance type. @@ -113,4 +113,4 @@ In addition to the argument above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_instance_type_offering.html.markdown b/website/docs/cdktf/typescript/d/ec2_instance_type_offering.html.markdown index 62b279a6406..2713287ec0a 100644 --- a/website/docs/cdktf/typescript/d/ec2_instance_type_offering.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_instance_type_offering.html.markdown @@ -42,7 +42,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. See the [EC2 API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstanceTypeOfferings.html) for supported filters. Detailed below. * `locationType` - (Optional) Location type. Defaults to `region`. Valid values: `availabilityZone`, `availabilityZoneId`, and `region`. @@ -55,7 +55,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - EC2 Instance Type. * `instanceType` - EC2 Instance Type. @@ -66,4 +66,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_instance_type_offerings.html.markdown b/website/docs/cdktf/typescript/d/ec2_instance_type_offerings.html.markdown index e0edc26503e..3fb7dccd9cc 100644 --- a/website/docs/cdktf/typescript/d/ec2_instance_type_offerings.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_instance_type_offerings.html.markdown @@ -46,7 +46,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. See the [EC2 API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstanceTypeOfferings.html) for supported filters. Detailed below. * `locationType` - (Optional) Location type. Defaults to `region`. Valid values: `availabilityZone`, `availabilityZoneId`, and `region`. @@ -58,7 +58,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `instanceTypes` - List of EC2 Instance Types. @@ -73,4 +73,4 @@ Note that the indexes of Instance Type Offering instance types, locations and lo - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_instance_types.html.markdown b/website/docs/cdktf/typescript/d/ec2_instance_types.html.markdown index b95729406c7..07d76a7878f 100644 --- a/website/docs/cdktf/typescript/d/ec2_instance_types.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_instance_types.html.markdown @@ -53,7 +53,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. See the [EC2 API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstanceTypes.html) for supported filters. Detailed below. @@ -64,7 +64,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `instanceTypes` - List of EC2 Instance Types. @@ -75,4 +75,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_local_gateway.html.markdown b/website/docs/cdktf/typescript/d/ec2_local_gateway.html.markdown index 9cb74a6e239..7f6c4a074ab 100644 --- a/website/docs/cdktf/typescript/d/ec2_local_gateway.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_local_gateway.html.markdown @@ -64,7 +64,7 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Local Gateway will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` blocks are also exported as result attributes. This data source will complete the data by populating @@ -83,4 +83,4 @@ The following attributes are additionally exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_local_gateway_route_tables.html.markdown b/website/docs/cdktf/typescript/d/ec2_local_gateway_route_tables.html.markdown index ceba3054adf..89cd8baf19d 100644 --- a/website/docs/cdktf/typescript/d/ec2_local_gateway_route_tables.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_local_gateway_route_tables.html.markdown @@ -55,7 +55,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Local Gateway Route Table will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of Local Gateway Route Table identifiers @@ -66,4 +68,4 @@ which take the following arguments: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface.html.markdown b/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface.html.markdown index 092a8e610eb..44660c3ab2c 100644 --- a/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface.html.markdown @@ -61,7 +61,7 @@ The `filter` configuration block supports the following arguments: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `localAddress` - Local address. * `localBgpAsn` - Border Gateway Protocol (BGP) Autonomous System Number (ASN) of the EC2 Local Gateway. @@ -76,4 +76,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface_group.html.markdown b/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface_group.html.markdown index 0852e802c13..e073aa27f05 100644 --- a/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface_group.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface_group.html.markdown @@ -52,7 +52,7 @@ The `filter` configuration block supports the following arguments: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `localGatewayVirtualInterfaceIds` - Set of EC2 Local Gateway Virtual Interface identifiers. @@ -62,4 +62,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface_groups.html.markdown b/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface_groups.html.markdown index 0485b303200..20f2725b753 100644 --- a/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface_groups.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_local_gateway_virtual_interface_groups.html.markdown @@ -48,7 +48,7 @@ The `filter` configuration block supports the following arguments: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of EC2 Local Gateway Virtual Interface Group identifiers. @@ -60,4 +60,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_local_gateways.html.markdown b/website/docs/cdktf/typescript/d/ec2_local_gateways.html.markdown index bf6b64815f7..4787cd62a79 100644 --- a/website/docs/cdktf/typescript/d/ec2_local_gateways.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_local_gateways.html.markdown @@ -59,7 +59,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Local Gateway will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of all the Local Gateway identifiers @@ -70,4 +72,4 @@ which take the following arguments: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_managed_prefix_list.html.markdown b/website/docs/cdktf/typescript/d/ec2_managed_prefix_list.html.markdown index af2c52d4249..4287e770bcb 100644 --- a/website/docs/cdktf/typescript/d/ec2_managed_prefix_list.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_managed_prefix_list.html.markdown @@ -78,14 +78,14 @@ whose data will be exported as attributes. ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the EC2 [DescribeManagedPrefixLists](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeManagedPrefixLists.html) API Reference. * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - ID of the selected prefix list. * `arn` - ARN of the selected prefix list. @@ -102,4 +102,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_managed_prefix_lists.html.markdown b/website/docs/cdktf/typescript/d/ec2_managed_prefix_lists.html.markdown index 47713987443..217ec11f96c 100644 --- a/website/docs/cdktf/typescript/d/ec2_managed_prefix_lists.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_managed_prefix_lists.html.markdown @@ -78,7 +78,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A managed prefix list will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - List of all the managed prefix list ids found. @@ -89,4 +91,4 @@ which take the following arguments: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_network_insights_analysis.html.markdown b/website/docs/cdktf/typescript/d/ec2_network_insights_analysis.html.markdown index a2c6b062835..64ce737a247 100644 --- a/website/docs/cdktf/typescript/d/ec2_network_insights_analysis.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_network_insights_analysis.html.markdown @@ -47,14 +47,14 @@ whose data will be exported as attributes. ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the EC2 [`describeNetworkInsightsAnalyses`](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNetworkInsightsAnalyses.html) API Reference. * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `alternatePathHints` - Potential intermediate components of a feasible path. * `arn` - ARN of the selected Network Insights Analysis. @@ -69,4 +69,4 @@ In addition to all arguments above, the following attributes are exported: * `statusMessage` - Message to provide more context when the `status` is `failed`. * `warningMessage` - Warning message. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_network_insights_path.html.markdown b/website/docs/cdktf/typescript/d/ec2_network_insights_path.html.markdown index 960426cf86c..a3cd4f9c862 100644 --- a/website/docs/cdktf/typescript/d/ec2_network_insights_path.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_network_insights_path.html.markdown @@ -47,14 +47,14 @@ whose data will be exported as attributes. ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the EC2 [`describeNetworkInsightsPaths`](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNetworkInsightsPaths.html) API Reference. * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the selected Network Insights Path. * `destination` - AWS resource that is the destination of the path. @@ -65,4 +65,4 @@ In addition to all arguments above, the following attributes are exported: * `sourceIp` - IP address of the AWS resource that is the source of the path. * `tags` - Map of tags assigned to the resource. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_public_ipv4_pool.html.markdown b/website/docs/cdktf/typescript/d/ec2_public_ipv4_pool.html.markdown index 64f1e067fe7..febada73a69 100644 --- a/website/docs/cdktf/typescript/d/ec2_public_ipv4_pool.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_public_ipv4_pool.html.markdown @@ -42,9 +42,9 @@ The following arguments are required: * `poolId` - (Required) AWS resource IDs of a public IPv4 pool (as a string) for which this data source will fetch detailed information. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `description` - Description of the pool, if any. * `networkBorderGroup` - Name of the location from which the address pool is advertised. @@ -57,4 +57,4 @@ In addition to all arguments above, the following attributes are exported: * `totalAddressCount` - Total number of addresses in the pool. * `totalAvailableAddressCount` - Total number of available addresses in the pool. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_public_ipv4_pools.html.markdown b/website/docs/cdktf/typescript/d/ec2_public_ipv4_pools.html.markdown index c31a10c9b57..889fceb6810 100644 --- a/website/docs/cdktf/typescript/d/ec2_public_ipv4_pools.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_public_ipv4_pools.html.markdown @@ -74,8 +74,10 @@ which take the following arguments: * `name` - (Required) Name of the field to filter by, as defined by [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribePublicIpv4Pools.html). * `values` - (Required) Set of values that are accepted for the given field. Pool IDs will be selected if any one of the given values match. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `poolIds` - List of all the pool IDs found. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_serial_console_access.html.markdown b/website/docs/cdktf/typescript/d/ec2_serial_console_access.html.markdown index 341248b8335..22d9f002fea 100644 --- a/website/docs/cdktf/typescript/d/ec2_serial_console_access.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_serial_console_access.html.markdown @@ -32,9 +32,9 @@ class MyConvertedCode extends TerraformStack { ``` -## Attributes Reference +## Attribute Reference -The following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `enabled` - Whether or not serial console access is enabled. Returns as `true` or `false`. * `id` - Region of serial console access. @@ -45,4 +45,4 @@ The following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_spot_price.html.markdown b/website/docs/cdktf/typescript/d/ec2_spot_price.html.markdown index 9eb299a0919..72fd58b11c5 100644 --- a/website/docs/cdktf/typescript/d/ec2_spot_price.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_spot_price.html.markdown @@ -43,7 +43,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `instanceType` - (Optional) Type of instance for which to query Spot Price information. * `availabilityZone` - (Optional) Availability zone in which to query Spot price information. @@ -56,7 +56,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `spotPrice` - Most recent Spot Price value for the given instance type and AZ. @@ -68,4 +68,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway.html.markdown index 1de06305310..47031765f69 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `id` - (Optional) Identifier of the EC2 Transit Gateway. @@ -77,7 +77,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `amazonSideAsn` - Private Autonomous System Number (ASN) for the Amazon side of a BGP session * `arn` - EC2 Transit Gateway ARN @@ -101,4 +101,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_attachment.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_attachment.html.markdown index 781551024ca..06923cc494a 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_attachment.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_attachment.html.markdown @@ -45,7 +45,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `transitGatewayAttachmentId` - (Optional) ID of the attachment. @@ -57,7 +57,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the attachment. * `associationState` - The state of the association (see [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_TransitGatewayAttachmentAssociation.html) for valid values). @@ -70,4 +70,4 @@ In addition to all arguments above, the following attributes are exported: * `transitGatewayId` - ID of the transit gateway. * `transitGatewayOwnerId` - The ID of the AWS account that owns the transit gateway. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_attachments.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_attachments.html.markdown index 499796a0021..2063e526f0b 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_attachments.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_attachments.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. @@ -76,7 +76,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `ids` A list of all attachments ids matching the filter. You can retrieve more information about the attachment using the [aws_ec2_transit_gateway_attachment][2] data source, searching by identifier. @@ -89,4 +89,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_connect.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_connect.html.markdown index 434fdc4048e..151fd7e0167 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_connect.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_connect.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `transitGatewayConnectId` - (Optional) Identifier of the EC2 Transit Gateway Connect. @@ -77,7 +77,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `protocol` - Tunnel protocol * `tags` - Key-value tags for the EC2 Transit Gateway Connect @@ -90,4 +90,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_connect_peer.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_connect_peer.html.markdown index 1587b77af25..e8b21f33de6 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_connect_peer.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_connect_peer.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `transitGatewayConnectPeerId` - (Optional) Identifier of the EC2 Transit Gateway Connect Peer. @@ -77,7 +77,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - EC2 Transit Gateway Connect Peer ARN * `bgpAsn` - BGP ASN number assigned customer device @@ -95,4 +95,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_dx_gateway_attachment.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_dx_gateway_attachment.html.markdown index 50b08244685..05dfb939a82 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_dx_gateway_attachment.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_dx_gateway_attachment.html.markdown @@ -39,7 +39,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `transitGatewayId` - (Optional) Identifier of the EC2 Transit Gateway. * `dxGatewayId` - (Optional) Identifier of the Direct Connect Gateway. @@ -48,14 +48,14 @@ The following arguments are supported: ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeTransitGatewayAttachments API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeTransitGatewayAttachments.html). * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `tags` - Key-value tags for the EC2 Transit Gateway Attachment @@ -66,4 +66,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_multicast_domain.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_multicast_domain.html.markdown index 378f52ac1a4..3101b2232b8 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_multicast_domain.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_multicast_domain.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `transitGatewayMulticastDomainId` - (Optional) Identifier of the EC2 Transit Gateway Multicast Domain. @@ -81,7 +81,7 @@ The following arguments are required: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Multicast Domain identifier. * `arn` - EC2 Transit Gateway Multicast Domain ARN. @@ -107,4 +107,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_peering_attachment.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_peering_attachment.html.markdown index 954c749aa56..5c9f1959b8d 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_peering_attachment.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_peering_attachment.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `id` - (Optional) Identifier of the EC2 Transit Gateway Peering Attachment. @@ -82,7 +82,7 @@ which take the following arguments: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `peerAccountId` - Identifier of the peer AWS account * `peerRegion` - Identifier of the peer AWS region @@ -95,4 +95,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table.html.markdown index 5b3fcef0fae..3412c1db9f2 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table.html.markdown @@ -69,7 +69,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `id` - (Optional) Identifier of the EC2 Transit Gateway Route Table. @@ -81,7 +81,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - EC2 Transit Gateway Route Table ARN. * `defaultAssociationRouteTable` - Boolean whether this is the default association route table for the EC2 Transit Gateway @@ -96,4 +96,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table_associations.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table_associations.html.markdown index 582b8cdb955..83e8b23ac85 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table_associations.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table_associations.html.markdown @@ -57,11 +57,11 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Transit Gateway Route Table will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of Transit Gateway Route Table Association identifiers. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table_propagations.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table_propagations.html.markdown index 8679e45595d..0aa1cd9fb35 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table_propagations.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_table_propagations.html.markdown @@ -57,11 +57,11 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Transit Gateway Route Table will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of Transit Gateway Route Table Association identifiers. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_tables.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_tables.html.markdown index 3376b09ca92..fb1b8ae329e 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_tables.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_route_tables.html.markdown @@ -45,7 +45,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) Custom filter block as described below. @@ -61,9 +61,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A Transit Gateway Route Table will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - Set of Transit Gateway Route Table identifiers. @@ -74,4 +74,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpc_attachment.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpc_attachment.html.markdown index 207e34f7946..9f01a1e1f7a 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpc_attachment.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpc_attachment.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. * `id` - (Optional) Identifier of the EC2 Transit Gateway VPC Attachment. @@ -77,7 +77,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `applianceModeSupport` - Whether Appliance Mode support is enabled. * `dnsSupport` - Whether DNS support is enabled. @@ -95,4 +95,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpc_attachments.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpc_attachments.html.markdown index 4c22c0955b3..081e63cb744 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpc_attachments.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpc_attachments.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `filter` - (Optional) One or more configuration blocks containing name-values filters. Detailed below. @@ -76,7 +76,7 @@ The following arguments are supported: ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `ids` A list of all attachments ids matching the filter. You can retrieve more information about the attachment using the [aws_ec2_transit_gateway_vpc_attachment][2] data source, searching by identifier. @@ -89,4 +89,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpn_attachment.html.markdown b/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpn_attachment.html.markdown index fe77c101499..6451646621d 100644 --- a/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpn_attachment.html.markdown +++ b/website/docs/cdktf/typescript/d/ec2_transit_gateway_vpn_attachment.html.markdown @@ -68,7 +68,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `transitGatewayId` - (Optional) Identifier of the EC2 Transit Gateway. * `vpnConnectionId` - (Optional) Identifier of the EC2 VPN Connection. @@ -77,14 +77,14 @@ The following arguments are supported: ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeTransitGatewayAttachments API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeTransitGatewayAttachments.html). * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. ## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway VPN Attachment identifier * `tags` - Key-value tags for the EC2 Transit Gateway VPN Attachment @@ -95,4 +95,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ecr_authorization_token.html.markdown b/website/docs/cdktf/typescript/d/ecr_authorization_token.html.markdown new file mode 100644 index 00000000000..229568c6306 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ecr_authorization_token.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_authorization_token" +description: |- + Provides details about an ECR Authorization Token +--- + + + +# Data Source: aws_ecr_authorization_token + +The ECR Authorization Token data source allows the authorization token, proxy endpoint, token expiration date, user name and password to be retrieved for an ECR repository. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEcrAuthorizationToken } from "./.gen/providers/aws/data-aws-ecr-authorization-token"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEcrAuthorizationToken(this, "token", {}); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `registryId` - (Optional) AWS account ID of the ECR Repository. If not specified the default account is assumed. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `authorizationToken` - Temporary IAM authentication credentials to access the ECR repository encoded in base64 in the form of `userName:password`. +* `expiresAt` - Time in UTC RFC3339 format when the authorization token expires. +* `id` - Region of the authorization token. +* `password` - Password decoded from the authorization token. +* `proxyEndpoint` - Registry URL to use in the docker login command. +* `userName` - User name decoded from the authorization token. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ecr_image.html.markdown b/website/docs/cdktf/typescript/d/ecr_image.html.markdown new file mode 100644 index 00000000000..66d88b87da1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ecr_image.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_image" +description: |- + Provides details about an ECR Image +--- + + + +# Data Source: aws_ecr_image + +The ECR Image data source allows the details of an image with a particular tag or digest to be retrieved. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEcrImage } from "./.gen/providers/aws/data-aws-ecr-image"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEcrImage(this, "service_image", { + imageTag: "latest", + repositoryName: "my/service", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `registryId` - (Optional) ID of the Registry where the repository resides. +* `repositoryName` - (Required) Name of the ECR Repository. +* `imageDigest` - (Optional) Sha256 digest of the image manifest. At least one of `imageDigest`, `imageTag`, or `mostRecent` must be specified. +* `imageTag` - (Optional) Tag associated with this image. At least one of `imageDigest`, `imageTag`, or `mostRecent` must be specified. +* `mostRecent` - (Optional) Return the most recently pushed image. At least one of `imageDigest`, `imageTag`, or `mostRecent` must be specified. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - SHA256 digest of the image manifest. +* `imagePushedAt` - Date and time, expressed as a unix timestamp, at which the current image was pushed to the repository. +* `imageSizeInBytes` - Size, in bytes, of the image in the repository. +* `imageTags` - List of tags associated with this image. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ecr_pull_through_cache_rule.html.markdown b/website/docs/cdktf/typescript/d/ecr_pull_through_cache_rule.html.markdown new file mode 100644 index 00000000000..424eb79a4bc --- /dev/null +++ b/website/docs/cdktf/typescript/d/ecr_pull_through_cache_rule.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_pull_through_cache_rule" +description: |- + Provides details about an ECR Pull Through Cache Rule +--- + + + +# Data Source: aws_ecr_pull_through_cache_rule + +The ECR Pull Through Cache Rule data source allows the upstream registry URL and registry ID to be retrieved for a Pull Through Cache Rule. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEcrPullThroughCacheRule } from "./.gen/providers/aws/data-aws-ecr-pull-through-cache-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEcrPullThroughCacheRule(this, "ecr_public", { + ecrRepositoryPrefix: "ecr-public", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +- `ecrRepositoryPrefix` - (Required) The repository name prefix to use when caching images from the source registry. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +- `id` - The repository name prefix. +- `upstreamRegistryUrl` - The registry URL of the upstream public registry to use as the source. +- `registryId` - The registry ID where the repository was created. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ecr_repository.html.markdown b/website/docs/cdktf/typescript/d/ecr_repository.html.markdown new file mode 100644 index 00000000000..6280108087f --- /dev/null +++ b/website/docs/cdktf/typescript/d/ecr_repository.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_repository" +description: |- + Provides details about an ECR Repository +--- + + + +# Data Source: aws_ecr_repository + +The ECR Repository data source allows the ARN, Repository URI and Registry ID to be retrieved for an ECR repository. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEcrRepository } from "./.gen/providers/aws/data-aws-ecr-repository"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEcrRepository(this, "service", { + name: "ecr-repository", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the ECR Repository. +* `registryId` - (Optional) Registry ID where the repository was created. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Full ARN of the repository. +* `encryptionConfiguration` - Encryption configuration for the repository. See [Encryption Configuration](#encryption-configuration) below. +* `imageScanningConfiguration` - Configuration block that defines image scanning configuration for the repository. See [Image Scanning Configuration](#image-scanning-configuration) below. +* `imageTagMutability` - The tag mutability setting for the repository. +* `mostRecentImageTags` - List of image tags associated with the most recently pushed image in the repository. +* `repositoryUrl` - URL of the repository (in the form `awsAccountIdDkrEcrRegionAmazonawsCom/repositoryName`). +* `tags` - Map of tags assigned to the resource. + +### Encryption Configuration + +* `encryptionType` - Encryption type to use for the repository, either `aes256` or `kms`. +* `kmsKey` - If `encryptionType` is `kms`, the ARN of the KMS key used. + +### Image Scanning Configuration + +* `scanOnPush` - Whether images are scanned after being pushed to the repository. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ecrpublic_authorization_token.html.markdown b/website/docs/cdktf/typescript/d/ecrpublic_authorization_token.html.markdown new file mode 100644 index 00000000000..8f8162ef7f9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ecrpublic_authorization_token.html.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "ECR Public" +layout: "aws" +page_title: "AWS: aws_ecrpublic_authorization_token" +description: |- + Provides details about a Public ECR Authorization Token +--- + + + +# Data Source: aws_ecrpublic_authorization_token + +The Public ECR Authorization Token data source allows the authorization token, token expiration date, user name, and password to be retrieved for a Public ECR repository. + +~> **NOTE:** This data source can only be used in the `usEast1` region. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEcrpublicAuthorizationToken } from "./.gen/providers/aws/data-aws-ecrpublic-authorization-token"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEcrpublicAuthorizationToken(this, "token", {}); + } +} + +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `authorizationToken` - Temporary IAM authentication credentials to access the ECR repository encoded in base64 in the form of `userName:password`. +* `expiresAt` - Time in UTC RFC3339 format when the authorization token expires. +* `id` - Region of the authorization token. +* `password` - Password decoded from the authorization token. +* `userName` - User name decoded from the authorization token. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ecs_cluster.html.markdown b/website/docs/cdktf/typescript/d/ecs_cluster.html.markdown new file mode 100644 index 00000000000..a09436f5f1b --- /dev/null +++ b/website/docs/cdktf/typescript/d/ecs_cluster.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_cluster" +description: |- + Provides details about an ecs cluster +--- + + + +# Data Source: aws_ecs_cluster + +The ECS Cluster data source allows access to details of a specific +cluster within an AWS ECS service. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEcsCluster } from "./.gen/providers/aws/data-aws-ecs-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEcsCluster(this, "ecs-mongo", { + clusterName: "ecs-mongo-production", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `clusterName` - (Required) Name of the ECS Cluster + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the ECS Cluster +* `status` - Status of the ECS Cluster +* `pendingTasksCount` - Number of pending tasks for the ECS Cluster +* `runningTasksCount` - Number of running tasks for the ECS Cluster +* `registeredContainerInstancesCount` - The number of registered container instances for the ECS Cluster +* `serviceConnectDefaults` - The default Service Connect namespace +* `setting` - Settings associated with the ECS Cluster +* `tags` - Key-value map of resource tags + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ecs_container_definition.html.markdown b/website/docs/cdktf/typescript/d/ecs_container_definition.html.markdown new file mode 100644 index 00000000000..95f06924838 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ecs_container_definition.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_container_definition" +description: |- + Provides details about a single container within an ecs task definition +--- + + + +# Data Source: aws_ecs_container_definition + +The ECS container definition data source allows access to details of +a specific container within an AWS ECS service. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEcsContainerDefinition } from "./.gen/providers/aws/data-aws-ecs-container-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEcsContainerDefinition(this, "ecs-mongo", { + containerName: "mongodb", + taskDefinition: mongo.id, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `taskDefinition` - (Required) ARN of the task definition which contains the container +* `containerName` - (Required) Name of the container definition + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `image` - Docker image in use, including the digest +* `imageDigest` - Digest of the docker image in use +* `cpu` - CPU limit for this container definition +* `memory` - Memory limit for this container definition +* `memoryReservation` - Soft limit (in MiB) of memory to reserve for the container. When system memory is under contention, Docker attempts to keep the container memory to this soft limit +* `environment` - Environment in use +* `disableNetworking` - Indicator if networking is disabled +* `dockerLabels` - Set docker labels + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ecs_service.html.markdown b/website/docs/cdktf/typescript/d/ecs_service.html.markdown new file mode 100644 index 00000000000..a981a02ae6d --- /dev/null +++ b/website/docs/cdktf/typescript/d/ecs_service.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_service" +description: |- + Provides details about an ecs service +--- + + + +# Data Source: aws_ecs_service + +The ECS Service data source allows access to details of a specific +Service within a AWS ECS Cluster. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEcsService } from "./.gen/providers/aws/data-aws-ecs-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEcsService(this, "example", { + clusterArn: Token.asString(dataAwsEcsClusterExample.arn), + serviceName: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `serviceName` - (Required) Name of the ECS Service +* `clusterArn` - (Required) ARN of the ECS Cluster + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the ECS Service +* `desiredCount` - Number of tasks for the ECS Service +* `launchType` - Launch type for the ECS Service +* `schedulingStrategy` - Scheduling strategy for the ECS Service +* `taskDefinition` - Family for the latest ACTIVE revision or full ARN of the task definition. +* `tags` - Resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ecs_task_definition.html.markdown b/website/docs/cdktf/typescript/d/ecs_task_definition.html.markdown new file mode 100644 index 00000000000..02b40cc8f59 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ecs_task_definition.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_task_definition" +description: |- + Provides details about an ecs task definition +--- + + + +# Data Source: aws_ecs_task_definition + +The ECS task definition data source allows access to details of +a specific AWS ECS task definition. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEcsTaskDefinition } from "./.gen/providers/aws/data-aws-ecs-task-definition"; +import { EcsCluster } from "./.gen/providers/aws/ecs-cluster"; +import { EcsService } from "./.gen/providers/aws/ecs-service"; +import { EcsTaskDefinition } from "./.gen/providers/aws/ecs-task-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new EcsCluster(this, "foo", { + name: "foo", + }); + const mongo = new EcsTaskDefinition(this, "mongo", { + containerDefinitions: + '[\n {\n "cpu": 128,\n "environment": [{\n "name": "SECRET",\n "value": "KEY"\n }],\n "essential": true,\n "image": "mongo:latest",\n "memory": 128,\n "memoryReservation": 64,\n "name": "mongodb"\n }\n]\n\n', + family: "mongodb", + }); + const dataAwsEcsTaskDefinitionMongo = new DataAwsEcsTaskDefinition( + this, + "mongo_2", + { + taskDefinition: mongo.family, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsEcsTaskDefinitionMongo.overrideLogicalId("mongo"); + const awsEcsServiceMongo = new EcsService(this, "mongo_3", { + cluster: foo.id, + desiredCount: 2, + name: "mongo", + taskDefinition: Token.asString(dataAwsEcsTaskDefinitionMongo.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEcsServiceMongo.overrideLogicalId("mongo"); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `taskDefinition` - (Required) Family for the latest ACTIVE revision, family and revision (family:revision) for a specific revision in the family, the ARN of the task definition to access to. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ARN of the task definition. +* `arn` - ARN of the task definition. +* `arnWithoutRevision` - ARN of the Task Definition with the trailing `revision` removed. This may be useful for situations where the latest task definition is always desired. If a revision isn't specified, the latest ACTIVE revision is used. See the [AWS documentation](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_StartTask.html#ECS-StartTask-request-taskDefinition) for details. +* `executionRoleArn` - ARN of the task execution role that the Amazon ECS container agent and the Docker. +* `family` - Family of this task definition. +* `networkMode` - Docker networking mode to use for the containers in this task. +* `revision` - Revision of this task definition. +* `status` - Status of this task definition. +* `taskRoleArn` - ARN of the IAM role that containers in this task can assume. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ecs_task_execution.html.markdown b/website/docs/cdktf/typescript/d/ecs_task_execution.html.markdown new file mode 100644 index 00000000000..279d16dc8c3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ecs_task_execution.html.markdown @@ -0,0 +1,141 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_task_execution" +description: |- + Terraform data source for managing an AWS ECS (Elastic Container) Task Execution. +--- + + + +# Data Source: aws_ecs_task_execution + +Terraform data source for managing an AWS ECS (Elastic Container) Task Execution. This data source calls the [RunTask](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_RunTask.html) API, allowing execution of one-time tasks that don't fit a standard resource lifecycle. See the [feature request issue](https://github.com/hashicorp/terraform-provider-aws/issues/1703) for additional context. + +~> **NOTE on plan operations:** This data source calls the `runTask` API on every read operation, which means new task(s) may be created from a `terraform plan` command if all attributes are known. Placing this functionality behind a data source is an intentional trade off to enable use cases requiring a one-time task execution without relying on [provisioners](https://developer.hashicorp.com/terraform/language/resources/provisioners/syntax). Caution should be taken to ensure the data source is only executed once, or that the resulting tasks can safely run in parallel. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEcsTaskExecution } from "./.gen/providers/aws/data-aws-ecs-task-execution"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEcsTaskExecution(this, "example", { + cluster: Token.asString(awsEcsClusterExample.id), + desiredCount: 1, + launchType: "FARGATE", + networkConfiguration: { + assignPublicIp: false, + securityGroups: [Token.asString(awsSecurityGroupExample.id)], + subnets: Token.asList(propertyAccess(awsSubnetExample, ["*", "id"])), + }, + taskDefinition: Token.asString(awsEcsTaskDefinitionExample.arn), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `cluster` - (Required) Short name or full Amazon Resource Name (ARN) of the cluster to run the task on. +* `taskDefinition` - (Required) The `family` and `revision` (`family:revision`) or full ARN of the task definition to run. If a revision isn't specified, the latest `active` revision is used. + +The following arguments are optional: + +* `capacityProviderStrategy` - (Optional) Set of capacity provider strategies to use for the cluster. See below. +* `desiredCount` - (Optional) Number of instantiations of the specified task to place on your cluster. You can specify up to 10 tasks for each call. +* `enableEcsManagedTags` - (Optional) Specifies whether to enable Amazon ECS managed tags for the tasks within the service. +* `enableExecuteCommand` - (Optional) Specifies whether to enable Amazon ECS Exec for the tasks within the service. +* `group` - (Optional) Name of the task group to associate with the task. The default value is the family name of the task definition. +* `launchType` - (Optional) Launch type on which to run your service. Valid values are `ec2`, `fargate`, and `external`. +* `networkConfiguration` - (Optional) Network configuration for the service. This parameter is required for task definitions that use the `awsvpc` network mode to receive their own Elastic Network Interface, and it is not supported for other network modes. See below. +* `overrides` - (Optional) A list of container overrides that specify the name of a container in the specified task definition and the overrides it should receive. +* `placementConstraints` - (Optional) An array of placement constraint objects to use for the task. You can specify up to 10 constraints for each task. See below. +* `placementStrategy` - (Optional) The placement strategy objects to use for the task. You can specify a maximum of 5 strategy rules for each task. See below. +* `platformVersion` - (Optional) The platform version the task uses. A platform version is only specified for tasks hosted on Fargate. If one isn't specified, the `latest` platform version is used. +* `propagateTags` - (Optional) Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags aren't propagated. An error will be received if you specify the `service` option when running a task. Valid values are `taskDefinition` or `none`. +* `referenceId` - (Optional) The reference ID to use for the task. +* `startedBy` - (Optional) An optional tag specified when a task is started. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### capacity_provider_strategy + +* `capacityProvider` - (Required) Name of the capacity provider. +* `base` - (Optional) The number of tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. Defaults to `0`. +* `weight` - (Optional) The relative percentage of the total number of launched tasks that should use the specified capacity provider. The `weight` value is taken into consideration after the `base` count of tasks has been satisfied. Defaults to `0`. + +### network_configuration + +* `subnets` - (Required) Subnets associated with the task or service. +* `securityGroups` - (Optional) Security groups associated with the task or service. If you do not specify a security group, the default security group for the VPC is used. +* `assignPublicIp` - (Optional) Assign a public IP address to the ENI (Fargate launch type only). Valid values are `true` or `false`. Default `false`. + +For more information, see the [Task Networking](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-networking.html) documentation. + +### overrides + +* `containerOverrides` - (Optional) One or more container overrides that are sent to a task. See below. +* `cpu` - (Optional) The CPU override for the task. +* `executionRoleArn` - (Optional) Amazon Resource Name (ARN) of the task execution role override for the task. +* `inferenceAcceleratorOverrides` - (Optional) Elastic Inference accelerator override for the task. See below. +* `memory` - (Optional) The memory override for the task. +* `taskRoleArn` - (Optional) Amazon Resource Name (ARN) of the role that containers in this task can assume. + +### container_overrides + +* `command` - (Optional) The command to send to the container that overrides the default command from the Docker image or the task definition. +* `cpu` - (Optional) The number of cpu units reserved for the container, instead of the default value from the task definition. +* `environment` - (Optional) The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. See below. +* `memory` - (Optional) The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. +* `memoryReservation` - (Optional) The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. +* `name` - (Optional) The name of the container that receives the override. This parameter is required if any override is specified. +* `resourceRequirements` - (Optional) The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. See below. + +### environment + +* `key` - (Required) The name of the key-value pair. For environment variables, this is the name of the environment variable. +* `value` - (Required) The value of the key-value pair. For environment variables, this is the value of the environment variable. + +### resource_requirements + +* `type` - (Required) The type of resource to assign to a container. Valid values are `gpu` or `inferenceAccelerator`. +* `value` - (Required) The value for the specified resource type. If the `gpu` type is used, the value is the number of physical GPUs the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on. If the `inferenceAccelerator` type is used, the value matches the `deviceName` for an InferenceAccelerator specified in a task definition. + +### inference_accelerator_overrides + +* `deviceName` - (Optional) The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition. +* `deviceType` - (Optional) The Elastic Inference accelerator type to use. + +### placement_constraints + +* `expression` - (Optional) A cluster query language expression to apply to the constraint. The expression can have a maximum length of 2000 characters. You can't specify an expression if the constraint type is `distinctInstance`. +* `type` - (Optional) The type of constraint. Valid values are `distinctInstance` or `memberOf`. Use `distinctInstance` to ensure that each task in a particular group is running on a different container instance. Use `memberOf` to restrict the selection to a group of valid candidates. + +### placement_strategy + +* `field` - (Optional) The field to apply the placement strategy against. +* `type` - (Optional) The type of placement strategy. Valid values are `random`, `spread`, and `binpack`. + +For more information, see the [Placement Strategy](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_PlacementStrategy.html) documentation. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `taskArns` - A list of the provisioned task ARNs. +* `id` - The unique identifier, which is a comma-delimited string joining the `cluster` and `taskDefinition` attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/efs_access_point.html.markdown b/website/docs/cdktf/typescript/d/efs_access_point.html.markdown new file mode 100644 index 00000000000..1694bb54b67 --- /dev/null +++ b/website/docs/cdktf/typescript/d/efs_access_point.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_access_point" +description: |- + Provides an Elastic File System (EFS) Access Point data source. +--- + + + +# Data Source: aws_efs_access_point + +Provides information about an Elastic File System (EFS) Access Point. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEfsAccessPoint } from "./.gen/providers/aws/data-aws-efs-access-point"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEfsAccessPoint(this, "test", { + accessPointId: "fsap-12345678", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `accessPointId` - (Required) ID that identifies the file system. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the access point. +* `arn` - Amazon Resource Name of the file system. +* `fileSystemArn` - Amazon Resource Name of the file system. +* `fileSystemId` - ID of the file system for which the access point is intended. +* `posixUser` - Single element list containing operating system user and group applied to all file system requests made using the access point. + * `gid` - Group ID + * `secondaryGids` - Secondary group IDs + * `uid` - User Id +* `rootDirectory`- Single element list containing information on the directory on the Amazon EFS file system that the access point provides access to. + * `creationInfo` - Single element list containing information on the creation permissions of the directory + * `ownerGid` - POSIX owner group ID + * `ownerUid` - POSIX owner user ID + * `permissions` - POSIX permissions mode + * `path` - Path exposed as the root directory +* `tags` - Key-value mapping of resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/efs_access_points.html.markdown b/website/docs/cdktf/typescript/d/efs_access_points.html.markdown new file mode 100644 index 00000000000..f96d79dd1cf --- /dev/null +++ b/website/docs/cdktf/typescript/d/efs_access_points.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_access_points" +description: |- + Provides information about multiple Elastic File System (EFS) Access Points. +--- + + + +# Data Source: aws_efs_access_points + +Provides information about multiple Elastic File System (EFS) Access Points. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEfsAccessPoints } from "./.gen/providers/aws/data-aws-efs-access-points"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEfsAccessPoints(this, "test", { + fileSystemId: "fs-12345678", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `fileSystemId` - (Required) EFS File System identifier. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of Amazon Resource Names (ARNs). +* `id` - EFS File System identifier. +* `ids` - Set of identifiers. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/efs_file_system.html.markdown b/website/docs/cdktf/typescript/d/efs_file_system.html.markdown new file mode 100644 index 00000000000..8d6b24b3652 --- /dev/null +++ b/website/docs/cdktf/typescript/d/efs_file_system.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_file_system" +description: |- + Provides an Elastic File System (EFS) File System data source. +--- + + + +# Data Source: aws_efs_file_system + +Provides information about an Elastic File System (EFS) File System. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { VariableType, TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEfsFileSystem } from "./.gen/providers/aws/data-aws-efs-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const fileSystemId = new TerraformVariable(this, "file_system_id", { + default: "", + type: VariableType.STRING, + }); + new DataAwsEfsFileSystem(this, "by_id", { + fileSystemId: fileSystemId.stringValue, + }); + new DataAwsEfsFileSystem(this, "by_tag", { + tags: { + Environment: "dev", + }, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `fileSystemId` - (Optional) ID that identifies the file system (e.g., fs-ccfc0d65). +* `creationToken` - (Optional) Restricts the list to the file system with this creation token. +* `tags` - (Optional) Restricts the list to the file system with these tags. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `availabilityZoneName` - The Availability Zone name in which the file system's One Zone storage classes exist. +* `availabilityZoneId` - The identifier of the Availability Zone in which the file system's One Zone storage classes exist. +* `dnsName` - DNS name for the filesystem per [documented convention](http://docs.aws.amazon.com/efs/latest/ug/mounting-fs-mount-cmd-dns-name.html). +* `encrypted` - Whether EFS is encrypted. +* `kmsKeyId` - ARN for the KMS encryption key. +* `lifecyclePolicy` - File system [lifecycle policy](https://docs.aws.amazon.com/efs/latest/ug/API_LifecyclePolicy.html) object. +* `performanceMode` - File system performance mode. +* `provisionedThroughputInMibps` - The throughput, measured in MiB/s, that you want to provision for the file system. +* `tags` -A map of tags to assign to the file system. +* `throughputMode` - Throughput mode for the file system. +* `sizeInBytes` - Current byte count used by the file system. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/efs_mount_target.html.markdown b/website/docs/cdktf/typescript/d/efs_mount_target.html.markdown new file mode 100644 index 00000000000..8a8ea5c1c1d --- /dev/null +++ b/website/docs/cdktf/typescript/d/efs_mount_target.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_mount_target" +description: |- + Provides an Elastic File System Mount Target (EFS) data source. +--- + + + +# Data Source: aws_efs_mount_target + +Provides information about an Elastic File System Mount Target (EFS). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { VariableType, TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEfsMountTarget } from "./.gen/providers/aws/data-aws-efs-mount-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const mountTargetId = new TerraformVariable(this, "mount_target_id", { + default: "", + type: VariableType.STRING, + }); + new DataAwsEfsMountTarget(this, "by_id", { + mountTargetId: mountTargetId.stringValue, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `accessPointId` - (Optional) ID or ARN of the access point whose mount target that you want to find. It must be included if a `fileSystemId` and `mountTargetId` are not included. +* `fileSystemId` - (Optional) ID or ARN of the file system whose mount target that you want to find. It must be included if an `accessPointId` and `mountTargetId` are not included. +* `mountTargetId` - (Optional) ID or ARN of the mount target that you want to find. It must be included in your request if an `accessPointId` and `fileSystemId` are not included. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `fileSystemArn` - Amazon Resource Name of the file system for which the mount target is intended. +* `subnetId` - ID of the mount target's subnet. +* `ipAddress` - Address at which the file system may be mounted via the mount target. +* `securityGroups` - List of VPC security group IDs attached to the mount target. +* `dnsName` - DNS name for the EFS file system. +* `mountTargetDnsName` - The DNS name for the given subnet/AZ per [documented convention](http://docs.aws.amazon.com/efs/latest/ug/mounting-fs-mount-cmd-dns-name.html). +* `networkInterfaceId` - The ID of the network interface that Amazon EFS created when it created the mount target. +* `availabilityZoneName` - The name of the Availability Zone (AZ) that the mount target resides in. +* `availabilityZoneId` - The unique and consistent identifier of the Availability Zone (AZ) that the mount target resides in. +* `ownerId` - AWS account ID that owns the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/eip.html.markdown b/website/docs/cdktf/typescript/d/eip.html.markdown new file mode 100644 index 00000000000..5d78c511ec5 --- /dev/null +++ b/website/docs/cdktf/typescript/d/eip.html.markdown @@ -0,0 +1,151 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_eip" +description: |- + Provides details about a specific Elastic IP +--- + + + +# Data Source: aws_eip + +`awsEip` provides details about a specific Elastic IP. + +## Example Usage + +### Search By Allocation ID (VPC only) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEip } from "./.gen/providers/aws/data-aws-eip"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEip(this, "by_allocation_id", { + id: "eipalloc-12345678", + }); + } +} + +``` + +### Search By Filters (EC2-Classic or VPC) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEip } from "./.gen/providers/aws/data-aws-eip"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEip(this, "by_filter", { + filter: [ + { + name: "tag:Name", + values: ["exampleNameTagValue"], + }, + ], + }); + } +} + +``` + +### Search By Public IP (EC2-Classic or VPC) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEip } from "./.gen/providers/aws/data-aws-eip"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEip(this, "by_public_ip", { + publicIp: "1.2.3.4", + }); + } +} + +``` + +### Search By Tags (EC2-Classic or VPC) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEip } from "./.gen/providers/aws/data-aws-eip"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEip(this, "by_tags", { + tags: { + Name: "exampleNameTagValue", + }, + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +Elastic IPs in the current region. The given filters must match exactly one +Elastic IP whose data will be exported as attributes. + +* `filter` - (Optional) One or more name/value pairs to use as filters. There are several valid keys, for a full reference, check out the [EC2 API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeAddresses.html). +* `id` - (Optional) Allocation ID of the specific VPC EIP to retrieve. If a classic EIP is required, do NOT set `id`, only set `publicIp` +* `publicIp` - (Optional) Public IP of the specific EIP to retrieve. +* `tags` - (Optional) Map of tags, each pair of which must exactly match a pair on the desired Elastic IP + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `associationId` - ID representing the association of the address with an instance in a VPC. +* `domain` - Whether the address is for use in EC2-Classic (standard) or in a VPC (vpc). +* `id` - If VPC Elastic IP, the allocation identifier. If EC2-Classic Elastic IP, the public IP address. +* `instanceId` - ID of the instance that the address is associated with (if any). +* `networkInterfaceId` - The ID of the network interface. +* `networkInterfaceOwnerId` - The ID of the AWS account that owns the network interface. +* `privateIp` - Private IP address associated with the Elastic IP address. +* `privateDns` - Private DNS associated with the Elastic IP address. +* `publicIp` - Public IP address of Elastic IP. +* `publicDns` - Public DNS associated with the Elastic IP address. +* `publicIpv4Pool` - ID of an address pool. +* `carrierIp` - Carrier IP address. +* `customerOwnedIpv4Pool` - The ID of a Customer Owned IP Pool. For more on customer owned IP addressed check out [Customer-owned IP addresses guide](https://docs.aws.amazon.com/outposts/latest/userguide/outposts-networking-components.html#ip-addressing) +* `customerOwnedIp` - Customer Owned IP. +* `tags` - Key-value map of tags associated with Elastic IP. + +~> **Note:** The data source computes the `publicDns` and `privateDns` attributes according to the [VPC DNS Guide](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-dns.html#vpc-dns-hostnames) as they are not available with the EC2 API. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/eips.html.markdown b/website/docs/cdktf/typescript/d/eips.html.markdown new file mode 100644 index 00000000000..9c91ed6b2a0 --- /dev/null +++ b/website/docs/cdktf/typescript/d/eips.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_eips" +description: |- + Provides a list of Elastic IPs in a region +--- + + + +# Data Source: aws_eips + +Provides a list of Elastic IPs in a region. + +## Example Usage + +The following shows outputting all Elastic IPs with the a specific tag value. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEips } from "./.gen/providers/aws/data-aws-eips"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsEips(this, "example", { + tags: { + Env: "dev", + }, + }); + new TerraformOutput(this, "allocation_ids", { + value: example.allocationIds, + }); + new TerraformOutput(this, "public_ips", { + value: example.publicIps, + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. +* `tags` - (Optional) Map of tags, each pair of which must exactly match a pair on the desired Elastic IPs. + +More complex filters can be expressed using one or more `filter` sub-blocks, which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeAddresses.html). +* `values` - (Required) Set of values that are accepted for the given field. An Elastic IP will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `allocationIds` - List of all the allocation IDs for address for use with EC2-VPC. +* `publicIps` - List of all the Elastic IP addresses. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/eks_addon.html.markdown b/website/docs/cdktf/typescript/d/eks_addon.html.markdown index c4498df4438..7b7209806d6 100644 --- a/website/docs/cdktf/typescript/d/eks_addon.html.markdown +++ b/website/docs/cdktf/typescript/d/eks_addon.html.markdown @@ -44,9 +44,9 @@ class MyConvertedCode extends TerraformStack { the names returned by [list-addon](https://docs.aws.amazon.com/cli/latest/reference/eks/list-addons.html). * `clusterName` – (Required) Name of the EKS Cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`). -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the EKS add-on. * `addonVersion` - Version of EKS add-on. @@ -57,4 +57,4 @@ In addition to all arguments above, the following attributes are exported: * `createdAt` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the EKS add-on was created. * `modifiedAt` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the EKS add-on was updated. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/eks_addon_version.html.markdown b/website/docs/cdktf/typescript/d/eks_addon_version.html.markdown index 3d61368609a..237d43a575b 100644 --- a/website/docs/cdktf/typescript/d/eks_addon_version.html.markdown +++ b/website/docs/cdktf/typescript/d/eks_addon_version.html.markdown @@ -63,11 +63,11 @@ class MyConvertedCode extends TerraformStack { * `kubernetesVersion` – (Required) Version of the EKS Cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`). * `mostRecent` - (Optional) Determines if the most recent or default version of the addon should be returned. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `id` - Name of the add-on * `version` - Version of the EKS add-on. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/eks_cluster.html.markdown b/website/docs/cdktf/typescript/d/eks_cluster.html.markdown index b03dd208f08..eebbfef42e6 100644 --- a/website/docs/cdktf/typescript/d/eks_cluster.html.markdown +++ b/website/docs/cdktf/typescript/d/eks_cluster.html.markdown @@ -47,7 +47,9 @@ class MyConvertedCode extends TerraformStack { * `name` - (Required) Name of the cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`). -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - Name of the cluster * `arn` - ARN of the cluster. @@ -83,4 +85,4 @@ class MyConvertedCode extends TerraformStack { * `subnetIds` – List of subnet IDs * `vpcId` – The VPC associated with your cluster. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/eks_cluster_auth.html.markdown b/website/docs/cdktf/typescript/d/eks_cluster_auth.html.markdown index 02b2c1dc198..d6b82e1d523 100644 --- a/website/docs/cdktf/typescript/d/eks_cluster_auth.html.markdown +++ b/website/docs/cdktf/typescript/d/eks_cluster_auth.html.markdown @@ -67,9 +67,11 @@ class MyConvertedCode extends TerraformStack { * `name` - (Required) Name of the cluster -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - Name of the cluster. * `token` - Token to use to authenticate with the cluster. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/eks_clusters.html.markdown b/website/docs/cdktf/typescript/d/eks_clusters.html.markdown index 6acf8ae3d22..555cb3d0170 100644 --- a/website/docs/cdktf/typescript/d/eks_clusters.html.markdown +++ b/website/docs/cdktf/typescript/d/eks_clusters.html.markdown @@ -46,9 +46,11 @@ class MyConvertedCode extends TerraformStack { ``` -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `names` - Set of EKS clusters names - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/eks_node_group.html.markdown b/website/docs/cdktf/typescript/d/eks_node_group.html.markdown index a5da5a46199..6addcd5b50f 100644 --- a/website/docs/cdktf/typescript/d/eks_node_group.html.markdown +++ b/website/docs/cdktf/typescript/d/eks_node_group.html.markdown @@ -40,7 +40,9 @@ class MyConvertedCode extends TerraformStack { * `clusterName` - (Required) Name of the cluster. * `nodeGroupName` - (Required) Name of the node group. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - EKS Cluster name and EKS Node Group name separated by a colon (`:`). * `amiType` - Type of Amazon Machine Image (AMI) associated with the EKS Node Group. @@ -75,4 +77,4 @@ class MyConvertedCode extends TerraformStack { * `tags` - Key-value map of resource tags. * `version` – Kubernetes version. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/eks_node_groups.html.markdown b/website/docs/cdktf/typescript/d/eks_node_groups.html.markdown index acf428aea20..11c51e984ef 100644 --- a/website/docs/cdktf/typescript/d/eks_node_groups.html.markdown +++ b/website/docs/cdktf/typescript/d/eks_node_groups.html.markdown @@ -57,9 +57,11 @@ class MyConvertedCode extends TerraformStack { * `clusterName` - (Required) Name of the cluster. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - Cluster name. * `names` - Set of all node group names in an EKS Cluster. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/elastic_beanstalk_application.html.markdown b/website/docs/cdktf/typescript/d/elastic_beanstalk_application.html.markdown new file mode 100644 index 00000000000..f2c8bd425c9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/elastic_beanstalk_application.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_application" +description: |- + Retrieve information about an Elastic Beanstalk Application +--- + + + +# Data Source: aws_elastic_beanstalk_application + +Retrieve information about an Elastic Beanstalk Application. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsElasticBeanstalkApplication } from "./.gen/providers/aws/data-aws-elastic-beanstalk-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsElasticBeanstalkApplication(this, "example", { + name: "example", + }); + new TerraformOutput(this, "arn", { + value: example.arn, + }); + new TerraformOutput(this, "description", { + value: example.description, + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the application + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the application +* `arn` - ARN of the application. +* `description` - Short description of the application + +Application version lifecycle (`appversionLifecycle`) supports the nested attribute containing. + +* `serviceRole` - ARN of an IAM service role under which the application version is deleted. Elastic Beanstalk must have permission to assume this role. +* `maxCount` - Maximum number of application versions to retain. +* `maxAgeInDays` - Number of days to retain an application version. +* `deleteSourceFromS3` - Specifies whether delete a version's source bundle from S3 when the application version is deleted. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/elastic_beanstalk_hosted_zone.html.markdown b/website/docs/cdktf/typescript/d/elastic_beanstalk_hosted_zone.html.markdown new file mode 100644 index 00000000000..773ca873865 --- /dev/null +++ b/website/docs/cdktf/typescript/d/elastic_beanstalk_hosted_zone.html.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_hosted_zone" +description: |- + Get an elastic beanstalk hosted zone. +--- + + + +# Data Source: aws_elastic_beanstalk_hosted_zone + +Use this data source to get the ID of an [elastic beanstalk hosted zone](http://docs.aws.amazon.com/general/latest/gr/rande.html#elasticbeanstalk_region). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsElasticBeanstalkHostedZone } from "./.gen/providers/aws/data-aws-elastic-beanstalk-hosted-zone"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsElasticBeanstalkHostedZone(this, "current", {}); + } +} + +``` + +## Argument Reference + +* `region` - (Optional) Region you'd like the zone for. By default, fetches the current region. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the hosted zone. + +* `region` - Region of the hosted zone. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/elastic_beanstalk_solution_stack.html.markdown b/website/docs/cdktf/typescript/d/elastic_beanstalk_solution_stack.html.markdown new file mode 100644 index 00000000000..fe0648fe1d8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/elastic_beanstalk_solution_stack.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_solution_stack" +description: |- + Get an elastic beanstalk solution stack. +--- + + + +# Data Source: aws_elastic_beanstalk_solution_stack + +Use this data source to get the name of a elastic beanstalk solution stack. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsElasticBeanstalkSolutionStack } from "./.gen/providers/aws/data-aws-elastic-beanstalk-solution-stack"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsElasticBeanstalkSolutionStack(this, "multi_docker", { + mostRecent: true, + nameRegex: "^64bit Amazon Linux (.*) Multi-container Docker (.*)$", + }); + } +} + +``` + +## Argument Reference + +* `mostRecent` - (Optional) If more than one result is returned, use the most +recent solution stack. + +* `nameRegex` - Regex string to apply to the solution stack list returned +by AWS. See [Elastic Beanstalk Supported Platforms][beanstalk-platforms] from +AWS documentation for reference solution stack names. + +~> **NOTE:** If more or less than a single match is returned by the search, +Terraform will fail. Ensure that your search is specific enough to return +a single solution stack, or use `mostRecent` to choose the most recent one. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - Name of the solution stack. + +[beanstalk-platforms]: http://docs.aws.amazon.com/elasticbeanstalk/latest/dg/concepts.platforms.html "AWS Elastic Beanstalk Supported Platforms documentation" + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/elasticache_cluster.html.markdown b/website/docs/cdktf/typescript/d/elasticache_cluster.html.markdown new file mode 100644 index 00000000000..14063880ed3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/elasticache_cluster.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_cluster" +description: |- + Get information on an ElastiCache Cluster resource. +--- + + + +# Data Source: aws_elasticache_cluster + +Use this data source to get information about an ElastiCache Cluster + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsElasticacheCluster } from "./.gen/providers/aws/data-aws-elasticache-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsElasticacheCluster(this, "my_cluster", { + clusterId: "my-cluster-id", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `clusterId` – (Required) Group identifier. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `nodeType` – The cluster node type. +* `numCacheNodes` – The number of cache nodes that the cache cluster has. +* `engine` – Name of the cache engine. +* `engineVersion` – Version number of the cache engine. +* `ipDiscovery` - The IP version advertised in the discovery protocol. +* `networkType` - The IP versions for cache cluster connections. +* `subnetGroupName` – Name of the subnet group associated to the cache cluster. +* `securityGroupIds` – List VPC security groups associated with the cache cluster. +* `parameterGroupName` – Name of the parameter group associated with this cache cluster. +* `replicationGroupId` - The replication group to which this cache cluster belongs. +* `logDeliveryConfiguration` - Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log) delivery settings. +* `maintenanceWindow` – Specifies the weekly time range for when maintenance +on the cache cluster is performed. +* `snapshotWindow` - Daily time range (in UTC) during which ElastiCache will +begin taking a daily snapshot of the cache cluster. +* `snapshotRetentionLimit` - The number of days for which ElastiCache will +retain automatic cache cluster snapshots before deleting them. +* `availabilityZone` - Availability Zone for the cache cluster. +* `notificationTopicArn` – An ARN of an +SNS topic that ElastiCache notifications get sent to. +* `port` – The port number on which each of the cache nodes will +accept connections. +* `configurationEndpoint` - (Memcached only) Configuration endpoint to allow host discovery. +* `clusterAddress` - (Memcached only) DNS name of the cache cluster without the port appended. +* `preferredOutpostArn` - The outpost ARN in which the cache cluster was created if created in outpost. +* `cacheNodes` - List of node objects including `id`, `address`, `port`, `availabilityZone` and `outpostArn`. + Referenceable e.g., as `${dataAwsElasticacheClusterBarCacheNodes0Address}` +* `tags` - Tags assigned to the resource + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/elasticache_replication_group.html.markdown b/website/docs/cdktf/typescript/d/elasticache_replication_group.html.markdown new file mode 100644 index 00000000000..e413f6e6229 --- /dev/null +++ b/website/docs/cdktf/typescript/d/elasticache_replication_group.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_replication_group" +description: |- + Get information on an ElastiCache Replication Group resource. +--- + + + +# Data Source: aws_elasticache_replication_group + +Use this data source to get information about an ElastiCache Replication Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsElasticacheReplicationGroup } from "./.gen/providers/aws/data-aws-elasticache-replication-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsElasticacheReplicationGroup(this, "bar", { + replicationGroupId: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `replicationGroupId` – (Required) Identifier for the replication group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `description` - Description of the replication group. +* `arn` - ARN of the created ElastiCache Replication Group. +* `authTokenEnabled` - Whether an AuthToken (password) is enabled. +* `automaticFailoverEnabled` - A flag whether a read-only replica will be automatically promoted to read/write primary if the existing primary fails. +* `nodeType` – The cluster node type. +* `numCacheClusters` – The number of cache clusters that the replication group has. +* `numNodeGroups` - Number of node groups (shards) for the replication group. +* `memberClusters` - Identifiers of all the nodes that are part of this replication group. +* `multiAzEnabled` - Whether Multi-AZ Support is enabled for the replication group. +* `replicasPerNodeGroup` - Number of replica nodes in each node group. +* `logDeliveryConfiguration` - Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log) delivery settings. +* `snapshotWindow` - Daily time range (in UTC) during which ElastiCache begins taking a daily snapshot of your node group (shard). +* `snapshotRetentionLimit` - The number of days for which ElastiCache retains automatic cache cluster snapshots before deleting them. +* `port` – The port number on which the configuration endpoint will accept connections. +* `configurationEndpointAddress` - The configuration endpoint address to allow host discovery. +* `primaryEndpointAddress` - The endpoint of the primary node in this node group (shard). +* `readerEndpointAddress` - The endpoint of the reader node in this node group (shard). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/elasticache_subnet_group.html.markdown b/website/docs/cdktf/typescript/d/elasticache_subnet_group.html.markdown new file mode 100644 index 00000000000..7b317992cf9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/elasticache_subnet_group.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_subnet_group" +description: |- + Provides information about a ElastiCache Subnet Group. +--- + + + +# Resource: aws_elasticache_subnet_group + +Provides information about a ElastiCache Subnet Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsElasticacheSubnetGroup } from "./.gen/providers/aws/data-aws-elasticache-subnet-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsElasticacheSubnetGroup(this, "example", { + name: "my-subnet-group", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the subnet group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the subnet group. +* `arn` - ARN of the subnet group. +* `description` - Description of the subnet group. +* `subnetIds` - Set of VPC Subnet ID-s of the subnet group. +* `tags` - Map of tags assigned to the subnet group. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/elasticache_user.html.markdown b/website/docs/cdktf/typescript/d/elasticache_user.html.markdown new file mode 100644 index 00000000000..0510ec498d5 --- /dev/null +++ b/website/docs/cdktf/typescript/d/elasticache_user.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_user" +description: |- + Get information on an ElastiCache User resource. +--- + + + +# Data Source: aws_elasticache_user + +Use this data source to get information about an ElastiCache User. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsElasticacheUser } from "./.gen/providers/aws/data-aws-elasticache-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsElasticacheUser(this, "bar", { + userId: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `userId` – (Required) Identifier for the user. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `userId` - Identifier for the user. +* `userName` - User name of the user. +* `accessString` - String for what access a user possesses within the associated ElastiCache replication groups or clusters. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/elasticsearch_domain.html.markdown b/website/docs/cdktf/typescript/d/elasticsearch_domain.html.markdown new file mode 100644 index 00000000000..f1ac83548d3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/elasticsearch_domain.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Elasticsearch" +layout: "aws" +page_title: "AWS: aws_elasticsearch_domain" +description: |- + Get information on an Elasticsearch Domain resource. +--- + + + +# Data Source: aws_elasticsearch_domain + +Use this data source to get information about an Elasticsearch Domain + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsElasticsearchDomain } from "./.gen/providers/aws/data-aws-elasticsearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsElasticsearchDomain(this, "my_domain", { + domainName: "my-domain-name", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `domainName` – (Required) Name of the domain. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accessPolicies` – The policy document attached to the domain. +* `advancedOptions` - Key-value string pairs to specify advanced configuration options. +* `advancedSecurityOptions` - Status of the Elasticsearch domain's advanced security options. The block consists of the following attributes: + * `enabled` - Whether advanced security is enabled. + * `internalUserDatabaseEnabled` - Whether the internal user database is enabled. +* `arn` – The ARN of the domain. +* `autoTuneOptions` - Configuration of the Auto-Tune options of the domain. + * `desiredState` - The Auto-Tune desired state for the domain. + * `maintenanceSchedule` - A list of the nested configurations for the Auto-Tune maintenance windows of the domain. + * `startAt` - Date and time at which the Auto-Tune maintenance schedule starts in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). + * `duration` - Configuration block for the duration of the Auto-Tune maintenance window. + * `value` - Duration of an Auto-Tune maintenance window. + * `unit` - Unit of time. + * `cronExpressionForRecurrence` - Cron expression for an Auto-Tune maintenance schedule. + * `rollbackOnDisable` - Whether the domain is set to roll back to default Auto-Tune settings when disabling Auto-Tune. +* `clusterConfig` - Cluster configuration of the domain. + * `coldStorageOptions` - Configuration block containing cold storage configuration. + * `enabled` - Indicates cold storage is enabled. + * `instanceType` - Instance type of data nodes in the cluster. + * `instanceCount` - Number of instances in the cluster. + * `dedicatedMasterEnabled` - Indicates whether dedicated master nodes are enabled for the cluster. + * `dedicatedMasterType` - Instance type of the dedicated master nodes in the cluster. + * `dedicatedMasterCount` - Number of dedicated master nodes in the cluster. + * `zoneAwarenessEnabled` - Indicates whether zone awareness is enabled. + * `zoneAwarenessConfig` - Configuration block containing zone awareness settings. + * `availabilityZoneCount` - Number of availability zones used. + * `warmEnabled` - Warm storage is enabled. + * `warmCount` - The number of warm nodes in the cluster. + * `warmType` - The instance type for the Elasticsearch cluster's warm nodes. +* `cognitoOptions` - Domain Amazon Cognito Authentication options for Kibana. + * `enabled` - Whether Amazon Cognito Authentication is enabled. + * `userPoolId` - The Cognito User pool used by the domain. + * `identityPoolId` - The Cognito Identity pool used by the domain. + * `roleArn` - The IAM Role with the AmazonESCognitoAccess policy attached. +* `created` – Status of the creation of the domain. +* `deleted` – Status of the deletion of the domain. +* `domainId` – Unique identifier for the domain. +* `ebsOptions` - EBS Options for the instances in the domain. + * `ebsEnabled` - Whether EBS volumes are attached to data nodes in the domain. + * `throughput` - The throughput (in MiB/s) of the EBS volumes attached to data nodes. + * `volumeType` - The type of EBS volumes attached to data nodes. + * `volumeSize` - The size of EBS volumes attached to data nodes (in GB). + * `iops` - The baseline input/output (I/O) performance of EBS volumes attached to data nodes. +* `elasticsearchVersion` – Elasticsearch version for the domain. +* `encryptionAtRest` - Domain encryption at rest related options. + * `enabled` - Whether encryption at rest is enabled in the domain. + * `kmsKeyId` - The KMS key id used to encrypt data at rest. +* `endpoint` – Domain-specific endpoint used to submit index, search, and data upload requests. +* `kibanaEndpoint` - Domain-specific endpoint used to access the Kibana application. +* `logPublishingOptions` - Domain log publishing related options. + * `logType` - The type of Elasticsearch log being published. + * `cloudwatchLogGroupArn` - The CloudWatch Log Group where the logs are published. + * `enabled` - Whether log publishing is enabled. +* `nodeToNodeEncryption` - Domain in transit encryption related options. + * `enabled` - Whether node to node encryption is enabled. +* `processing` – Status of a configuration change in the domain. +* `snapshotOptions` – Domain snapshot related options. + * `automatedSnapshotStartHour` - Hour during which the service takes an automated daily snapshot of the indices in the domain. +* `tags` - Tags assigned to the domain. +* `vpcOptions` - VPC Options for private Elasticsearch domains. + * `availabilityZones` - The availability zones used by the domain. + * `securityGroupIds` - The security groups used by the domain. + * `subnetIds` - The subnets used by the domain. + * `vpcId` - The VPC used by the domain. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/elb.html.markdown b/website/docs/cdktf/typescript/d/elb.html.markdown new file mode 100644 index 00000000000..2f089503f85 --- /dev/null +++ b/website/docs/cdktf/typescript/d/elb.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_elb" +description: |- + Provides a classic Elastic Load Balancer data source. +--- + + + +# aws_elb + +Provides information about a "classic" Elastic Load Balancer (ELB). +See [LB Data Source](/docs/providers/aws/d/lb.html) if you are looking for "v2" +Application Load Balancer (ALB) or Network Load Balancer (NLB). + +This data source can prove useful when a module accepts an LB as an input +variable and needs to, for example, determine the security groups associated +with it, etc. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { VariableType, TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsElb } from "./.gen/providers/aws/data-aws-elb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const lbName = new TerraformVariable(this, "lb_name", { + default: "", + type: VariableType.STRING, + }); + new DataAwsElb(this, "test", { + name: lbName.stringValue, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Unique name of the load balancer. + +## Attribute Reference + +See the [ELB Resource](/docs/providers/aws/r/elb.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/elb_hosted_zone_id.html.markdown b/website/docs/cdktf/typescript/d/elb_hosted_zone_id.html.markdown new file mode 100644 index 00000000000..700a8d1c548 --- /dev/null +++ b/website/docs/cdktf/typescript/d/elb_hosted_zone_id.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_elb_hosted_zone_id" +description: |- + Get AWS Elastic Load Balancing Hosted Zone Id +--- + + + +# Data Source: aws_elb_hosted_zone_id + +Use this data source to get the HostedZoneId of the AWS Elastic Load Balancing HostedZoneId +in a given region for the purpose of using in an AWS Route53 Alias. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsElbHostedZoneId } from "./.gen/providers/aws/data-aws-elb-hosted-zone-id"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new DataAwsElbHostedZoneId(this, "main", {}); + new Route53Record(this, "www", { + alias: { + evaluateTargetHealth: true, + name: Token.asString(awsElbMain.dnsName), + zoneId: Token.asString(main.id), + }, + name: "example.com", + type: "A", + zoneId: primary.zoneId, + }); + } +} + +``` + +## Argument Reference + +* `region` - (Optional) Name of the region whose AWS ELB HostedZoneId is desired. + Defaults to the region from the AWS provider configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS ELB HostedZoneId in the selected region. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/elb_service_account.html.markdown b/website/docs/cdktf/typescript/d/elb_service_account.html.markdown new file mode 100644 index 00000000000..6e711e465f2 --- /dev/null +++ b/website/docs/cdktf/typescript/d/elb_service_account.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_elb_service_account" +description: |- + Get AWS Elastic Load Balancing Service Account +--- + + + +# Data Source: aws_elb_service_account + +Use this data source to get the Account ID of the [AWS Elastic Load Balancing Service Account](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-access-logs.html#attach-bucket-policy) +in a given region for the purpose of permitting in S3 bucket policy. + +~> **Note:** For AWS Regions opened since Jakarta (`apSoutheast3`) in December 2021, AWS [documents that](https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/enable-access-logs.html#attach-bucket-policy) a [service principal name](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html#principal-services) should be used instead of an AWS account ID in any relevant IAM policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsElbServiceAccount } from "./.gen/providers/aws/data-aws-elb-service-account"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { Elb } from "./.gen/providers/aws/elb"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const elbLogs = new S3Bucket(this, "elb_logs", { + bucket: "my-elb-tf-test-bucket", + }); + new S3BucketAcl(this, "elb_logs_acl", { + acl: "private", + bucket: elbLogs.id, + }); + const main = new DataAwsElbServiceAccount(this, "main", {}); + const allowElbLogging = new DataAwsIamPolicyDocument( + this, + "allow_elb_logging", + { + statement: [ + { + actions: ["s3:PutObject"], + effect: "Allow", + principals: [ + { + identifiers: [Token.asString(main.arn)], + type: "AWS", + }, + ], + resources: ["${" + elbLogs.arn + "}/AWSLogs/*"], + }, + ], + } + ); + new Elb(this, "bar", { + accessLogs: { + bucket: elbLogs.id, + interval: 5, + }, + availabilityZones: ["us-west-2a"], + listener: [ + { + instancePort: 8000, + instanceProtocol: "http", + lbPort: 80, + lbProtocol: "http", + }, + ], + name: "my-foobar-terraform-elb", + }); + const awsS3BucketPolicyAllowElbLogging = new S3BucketPolicy( + this, + "allow_elb_logging_5", + { + bucket: elbLogs.id, + policy: Token.asString(allowElbLogging.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPolicyAllowElbLogging.overrideLogicalId("allow_elb_logging"); + } +} + +``` + +## Argument Reference + +* `region` - (Optional) Name of the region whose AWS ELB account ID is desired. + Defaults to the region from the AWS provider configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS ELB service account in the selected region. +* `arn` - ARN of the AWS ELB service account in the selected region. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/emr_release_labels.markdown b/website/docs/cdktf/typescript/d/emr_release_labels.markdown new file mode 100644 index 00000000000..12b16cf6f1a --- /dev/null +++ b/website/docs/cdktf/typescript/d/emr_release_labels.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_release_labels" +description: |- + Retrieve information about EMR Release Labels +--- + + + +# Data Source: aws_emr_release_labels + +Retrieve information about EMR Release Labels. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEmrReleaseLabels } from "./.gen/providers/aws/data-aws-emr-release-labels"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsEmrReleaseLabels(this, "example", { + filters: { + application: "spark@2.1.0", + prefix: "emr-5", + }, + }); + } +} + +``` + +## Argument Reference + +* `filters` – (Optional) Filters the results of the request. Prefix specifies the prefix of release labels to return. Application specifies the application (with/without version) of release labels to return. See [Filters](#filters). + +### Filters + +* `application` - (Optional) Optional release label application filter. For example, `spark@210` or `spark`. +* `prefix` - (Optional) Optional release label version prefix filter. For example, `emr5`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `releaseLabels` - Returned release labels. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/emrcontainers_virtual_cluster.markdown b/website/docs/cdktf/typescript/d/emrcontainers_virtual_cluster.markdown new file mode 100644 index 00000000000..17b3f029cd0 --- /dev/null +++ b/website/docs/cdktf/typescript/d/emrcontainers_virtual_cluster.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "EMR Containers" +layout: "aws" +page_title: "AWS: aws_emrcontainers_virtual_cluster" +description: |- + Retrieve information about an EMR Containers (EMR on EKS) Virtual Cluster +--- + + + +# Data Source: aws_emrcontainers_virtual_cluster + +Retrieve information about an EMR Containers (EMR on EKS) Virtual Cluster. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsEmrcontainersVirtualCluster } from "./.gen/providers/aws/data-aws-emrcontainers-virtual-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsEmrcontainersVirtualCluster(this, "example", { + virtualClusterId: "example id", + }); + new TerraformOutput(this, "arn", { + value: example.arn, + }); + new TerraformOutput(this, "name", { + value: example.name, + }); + } +} + +``` + +## Argument Reference + +* `virtualClusterId` - (Required) ID of the cluster. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the cluster. +* `name` - Name of the cluster. +* `arn` - ARN of the cluster. +* `containerProvider` - Nested attribute containing information about the underlying container provider (EKS cluster) for your EMR Containers cluster. + * `id` - The name of the container provider that is running your EMR Containers cluster + * `info` - Nested list containing information about the configuration of the container provider + * `eksInfo` - Nested list containing EKS-specific information about the cluster where the EMR Containers cluster is running + * `namespace` - The namespace where the EMR Containers cluster is running + * `type` - The type of the container provider +* `createdAt` - Unix epoch time stamp in seconds for when the cluster was created. +* `state` - Status of the EKS cluster. One of `running`, `terminating`, `terminated`, `arrested`. +* `tags` - Key-value mapping of resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/fsx_openzfs_snapshot.html.markdown b/website/docs/cdktf/typescript/d/fsx_openzfs_snapshot.html.markdown new file mode 100644 index 00000000000..fa8c407f340 --- /dev/null +++ b/website/docs/cdktf/typescript/d/fsx_openzfs_snapshot.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_openzfs_snapshot" +description: |- + Get information on an Amazon FSx for OpenZFS snapshot. +--- + + + +# Data Source: aws_fsx_openzfs_snapshot + +Use this data source to get information about an Amazon FSx for OpenZFS Snapshot for use when provisioning new Volumes. + +## Example Usage + +### Root volume Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsFsxOpenzfsSnapshot } from "./.gen/providers/aws/data-aws-fsx-openzfs-snapshot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsFsxOpenzfsSnapshot(this, "example", { + filter: [ + { + name: "volume-id", + values: ["fsvol-073a32b6098a73feb"], + }, + ], + mostRecent: true, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `mostRecent` - (Optional) If more than one result is returned, use the most recent snapshot. + +* `snapshotIds` - (Optional) Returns information on a specific snapshot_id. + +* `filter` - (Optional) One or more name/value pairs to filter off of. The +supported names are file-system-id or volume-id. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the snapshot. +* `creationTime` - Time that the resource was created. +* `id` - Identifier of the snapshot, e.g., `fsvolsnap12345678` +* `name` - Name of the snapshot. +* `snapshotId` - ID of the snapshot. +* `tags` - List of Tag values, with a maximum of 50 elements. +* `volumeId` - ID of the volume that the snapshot is of. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/fsx_windows_file_system.html.markdown b/website/docs/cdktf/typescript/d/fsx_windows_file_system.html.markdown new file mode 100644 index 00000000000..836b329aa4f --- /dev/null +++ b/website/docs/cdktf/typescript/d/fsx_windows_file_system.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_windows_file_system" +description: |- + Retrieve information on FSx Windows File System. +--- + + + +# Data Source: aws_fsx_windows_file_system + +Retrieve information on FSx Windows File System. + +## Example Usage + +### Root volume Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsFsxWindowsFileSystem } from "./.gen/providers/aws/data-aws-fsx-windows-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsFsxWindowsFileSystem(this, "example", { + id: "fs-12345678", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `id` - (Required) Identifier of the file system (e.g. `fs12345678`). + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `activeDirectoryId` - The ID for Microsoft Active Directory instance that the file system is join to. +* `aliases` - An array DNS alias names associated with the Amazon FSx file system. +* `arn` - Amazon Resource Name of the file system. +* `auditLogConfiguration` - The configuration that Amazon FSx for Windows File Server uses to audit and log user accesses of files, folders, and file shares on the Amazon FSx for Windows File Server file system. +* `automaticBackupRetentionDays` - The number of days to retain automatic backups. +* `copyTagsToBackups` - A boolean flag indicating whether tags on the file system should be copied to backups. +* `dailyAutomaticBackupStartTime` - The preferred time (in `hh:mm` format) to take daily automatic backups, in the UTC time zone. +* `deploymentType` - The file system deployment type. +* `dnsName` - DNS name for the file system (e.g. `fs12345678CorpExampleCom`). +* `id` - Identifier of the file system (e.g. `fs12345678`). +* `kmsKeyId` - ARN for the KMS Key to encrypt the file system at rest. +* `ownerId` - AWS account identifier that created the file system. +* `preferredSubnetId` - Specifies the subnet in which you want the preferred file server to be located. +* `preferredFileServerIp` - The IP address of the primary, or preferred, file server. +* `storageCapacity` - The storage capacity of the file system in gibibytes (GiB). +* `storageType` - The type of storage the file system is using. If set to `ssd`, the file system uses solid state drive storage. If set to `hdd`, the file system uses hard disk drive storage. +* `subnetIds` - Specifies the IDs of the subnets that the file system is accessible from. +* `tags` - The tags to associate with the file system. +* `throughputCapacity` - Throughput (megabytes per second) of the file system in power of 2 increments. Minimum of `8` and maximum of `2048`. +* `vpcId` - The ID of the primary virtual private cloud (VPC) for the file system. +* `weeklyMaintenanceStartTime` - The preferred start time (in `d:hh:mm` format) to perform weekly maintenance, in the UTC time zone. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/globalaccelerator_accelerator.html.markdown b/website/docs/cdktf/typescript/d/globalaccelerator_accelerator.html.markdown new file mode 100644 index 00000000000..8bad8ed275e --- /dev/null +++ b/website/docs/cdktf/typescript/d/globalaccelerator_accelerator.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_accelerator" +description: |- + Provides a Global Accelerator accelerator data source. +--- + + + +# Data Source: aws_globalaccelerator_accelerator + +Provides information about a Global Accelerator accelerator. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { VariableType, TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsGlobalacceleratorAccelerator } from "./.gen/providers/aws/data-aws-globalaccelerator-accelerator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const acceleratorArn = new TerraformVariable(this, "accelerator_arn", { + default: "", + type: VariableType.STRING, + }); + const acceleratorName = new TerraformVariable(this, "accelerator_name", { + default: "", + type: VariableType.STRING, + }); + new DataAwsGlobalacceleratorAccelerator(this, "example", { + arn: acceleratorArn.stringValue, + name: acceleratorName.stringValue, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) Full ARN of the Global Accelerator. +* `name` - (Optional) Unique name of the Global Accelerator. + +~> **NOTE:** When both `arn` and `name` are specified, `arn` takes precedence. + +## Attribute Reference + +See the [`awsGlobalacceleratorAccelerator` resource](/docs/providers/aws/r/globalaccelerator_accelerator.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/globalaccelerator_custom_routing_accelerator.html.markdown b/website/docs/cdktf/typescript/d/globalaccelerator_custom_routing_accelerator.html.markdown new file mode 100644 index 00000000000..e82e5039017 --- /dev/null +++ b/website/docs/cdktf/typescript/d/globalaccelerator_custom_routing_accelerator.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_custom_routing_accelerator" +description: |- + Provides a Global Accelerator custom routing accelerator data source. +--- + + + +# Data Source: aws_globalaccelerator_custom_routing_accelerator + +Provides information about a Global Accelerator custom routing accelerator. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { VariableType, TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsGlobalacceleratorCustomRoutingAccelerator } from "./.gen/providers/aws/data-aws-globalaccelerator-custom-routing-accelerator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const acceleratorArn = new TerraformVariable(this, "accelerator_arn", { + default: "", + type: VariableType.STRING, + }); + const acceleratorName = new TerraformVariable(this, "accelerator_name", { + default: "", + type: VariableType.STRING, + }); + new DataAwsGlobalacceleratorCustomRoutingAccelerator(this, "example", { + arn: acceleratorArn.stringValue, + name: acceleratorName.stringValue, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) Full ARN of the custom routing accelerator. +* `name` - (Optional) Unique name of the custom routing accelerator. + +~> **NOTE:** When both `arn` and `name` are specified, `arn` takes precedence. + +## Attribute Reference + +See the [`awsGlobalacceleratorCustomRoutingAccelerator` resource](/docs/providers/aws/r/globalaccelerator_custom_routing_accelerator.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/glue_catalog_table.html.markdown b/website/docs/cdktf/typescript/d/glue_catalog_table.html.markdown new file mode 100644 index 00000000000..25ce8bb3703 --- /dev/null +++ b/website/docs/cdktf/typescript/d/glue_catalog_table.html.markdown @@ -0,0 +1,135 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_catalog_table" +description: |- + Get information on AWS Glue Data Catalog Table +--- + + + +# Data Source: aws_glue_catalog_table + +This data source can be used to fetch information about an AWS Glue Data Catalog Table. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsGlueCatalogTable } from "./.gen/providers/aws/data-aws-glue-catalog-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsGlueCatalogTable(this, "example", { + databaseName: "MyCatalogDatabase", + name: "MyCatalogTable", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the table. +* `databaseName` - (Required) Name of the metadata database where the table metadata resides. +* `catalogId` - (Optional) ID of the Glue Catalog and database where the table metadata resides. If omitted, this defaults to the current AWS Account ID. +* `queryAsOfTime`- (Optional) The time as of when to read the table contents. If not set, the most recent transaction commit time will be used. Cannot be specified along with `transactionId`. Specified in RFC 3339 format, e.g. `20060102T15:04:05Z07:00`. +* `transactionId` - (Optional) The transaction ID at which to read the table contents. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Catalog ID, Database name and of the name table. +* `arn` - The ARN of the Glue Table. +* `description` - Description of the table. +* `owner` - Owner of the table. +* `parameters` - Properties associated with this table, as a list of key-value pairs. +* `partitionIndex` - Configuration block for a maximum of 3 partition indexes. See [`partitionIndex`](#partition_index) below. +* `partitionKeys` - Configuration block of columns by which the table is partitioned. Only primitive types are supported as partition keys. See [`partitionKeys`](#partition_keys) below. +* `retention` - Retention time for this table. +* `storageDescriptor` - Configuration block for information about the physical storage of this table. For more information, refer to the [Glue Developer Guide](https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-catalog-tables.html#aws-glue-api-catalog-tables-StorageDescriptor). See [`storageDescriptor`](#storage_descriptor) below. +* `tableType` - Type of this table (EXTERNAL_TABLE, VIRTUAL_VIEW, etc.). While optional, some Athena DDL queries such as `ALTER TABLE` and `SHOW CREATE TABLE` will fail if this argument is empty. +* `targetTable` - Configuration block of a target table for resource linking. See [`targetTable`](#target_table) below. +* `viewExpandedText` - If the table is a view, the expanded text of the view; otherwise null. +* `viewOriginalText` - If the table is a view, the original text of the view; otherwise null. + +### partition_index + +* `indexName` - Name of the partition index. +* `keys` - Keys for the partition index. + +### partition_keys + +* `comment` - Free-form text comment. +* `name` - Name of the Partition Key. +* `type` - Datatype of data in the Partition Key. + +### storage_descriptor + +* `bucketColumns` - List of reducer grouping columns, clustering columns, and bucketing columns in the table. +* `columns` - Configuration block for columns in the table. See [`columns`](#columns) below. +* `compressed` - Whether the data in the table is compressed. +* `inputFormat` - Input format: SequenceFileInputFormat (binary), or TextInputFormat, or a custom format. +* `location` - Physical location of the table. By default, this takes the form of the warehouse location, followed by the database location in the warehouse, followed by the table name. +* `numberOfBuckets` - Is if the table contains any dimension columns. +* `outputFormat` - Output format: SequenceFileOutputFormat (binary), or IgnoreKeyTextOutputFormat, or a custom format. +* `parameters` - User-supplied properties in key-value form. +* `schemaReference` - Object that references a schema stored in the AWS Glue Schema Registry. See [`schemaReference`](#schema_reference) below. +* `serDeInfo` - Configuration block for serialization and deserialization ("SerDe") information. See [`serDeInfo`](#ser_de_info) below. +* `skewedInfo` - Configuration block with information about values that appear very frequently in a column (skewed values). See [`skewedInfo`](#skewed_info) below. +* `sortColumns` - Configuration block for the sort order of each bucket in the table. See [`sortColumns`](#sort_columns) below. +* `storedAsSubDirectories` - Whether the table data is stored in subdirectories. + +#### columns + +* `comment` - Free-form text comment. +* `name` - Name of the Column. +* `parameters` - Key-value pairs defining properties associated with the column. +* `type` - Datatype of data in the Column. + +#### schema_reference + +* `schemaId` - Configuration block that contains schema identity fields. See [`schemaId`](#schema_id) below. +* `schemaVersionId` - Unique ID assigned to a version of the schema. +* `schemaVersionNumber` - Version number of the schema. + +##### schema_id + +* `registryName` - Name of the schema registry that contains the schema. +* `schemaArn` - ARN of the schema. +* `schemaName` - Name of the schema. + +#### ser_de_info + +* `name` - Name of the SerDe. +* `parameters` - Map of initialization parameters for the SerDe, in key-value form. +* `serializationLibrary` - Usually the class that implements the SerDe. An example is `orgApacheHadoopHiveSerde2ColumnarColumnarSerDe`. + +#### sort_columns + +* `column` - Name of the column. +* `sortOrder` - Whether the column is sorted in ascending (`1`) or descending order (`0`). + +#### skewed_info + +* `skewedColumnNames` - List of names of columns that contain skewed values. +* `skewedColumnValueLocationMaps` - List of values that appear so frequently as to be considered skewed. +* `skewedColumnValues` - Map of skewed values to the columns that contain them. + +### target_table + +* `catalogId` - ID of the Data Catalog in which the table resides. +* `databaseName` - Name of the catalog database that contains the target table. +* `name` - Name of the target table. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/glue_connection.html.markdown b/website/docs/cdktf/typescript/d/glue_connection.html.markdown new file mode 100644 index 00000000000..0bda781be33 --- /dev/null +++ b/website/docs/cdktf/typescript/d/glue_connection.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_connection" +description: |- + Get information on an AWS Glue Connection +--- + + + +# Data Source: aws_glue_connection + +This data source can be used to fetch information about a specific Glue Connection. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsGlueConnection } from "./.gen/providers/aws/data-aws-glue-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsGlueConnection(this, "example", { + id: "123456789123:connection", + }); + } +} + +``` + +## Argument Reference + +* `id` - (Required) Concatenation of the catalog ID and connection name. For example, if your account ID is +`123456789123` and the connection name is `conn` then the ID is `123456789123:conn`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Glue Connection. +* `catalogId` - Catalog ID of the Glue Connection. +* `connectionType` - Type of Glue Connection. +* `description` – Description of the connection. +* `matchCriteria` – A list of criteria that can be used in selecting this connection. +* `name` - Name of the Glue Connection. +* `physicalConnectionRequirements` - A map of physical connection requirements, such as VPC and SecurityGroup. +* `tags` - Tags assigned to the resource + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/glue_data_catalog_encryption_settings.html.markdown b/website/docs/cdktf/typescript/d/glue_data_catalog_encryption_settings.html.markdown new file mode 100644 index 00000000000..4b5b801e47c --- /dev/null +++ b/website/docs/cdktf/typescript/d/glue_data_catalog_encryption_settings.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_data_catalog_encryption_settings" +description: |- + Get information on AWS Glue Data Catalog Encryption Settings +--- + + + +# Data Source: aws_glue_data_catalog_encryption_settings + +This data source can be used to fetch information about AWS Glue Data Catalog Encryption Settings. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsGlueDataCatalogEncryptionSettings } from "./.gen/providers/aws/data-aws-glue-data-catalog-encryption-settings"; +interface MyConfig { + catalogId: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new DataAwsGlueDataCatalogEncryptionSettings(this, "example", { + id: "123456789123", + catalogId: config.catalogId, + }); + } +} + +``` + +## Argument Reference + +* `catalogId` - (Required) ID of the Data Catalog. This is typically the AWS account ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `dataCatalogEncryptionSettings` – The security configuration to set. see [Data Catalog Encryption Settings](#data_catalog_encryption_settings). +* `id` – The ID of the Data Catalog to set the security configuration for. + +### data_catalog_encryption_settings + +* `connectionPasswordEncryption` - When connection password protection is enabled, the Data Catalog uses a customer-provided key to encrypt the password as part of CreateConnection or UpdateConnection and store it in the ENCRYPTED_PASSWORD field in the connection properties. You can enable catalog encryption or only password encryption. see [Connection Password Encryption](#connection_password_encryption). +* `encryptionAtRest` - Encryption-at-rest configuration for the Data Catalog. see [Encryption At Rest](#encryption_at_rest). + +### connection_password_encryption + +* `returnConnectionPasswordEncrypted` - When set to `true`, passwords remain encrypted in the responses of GetConnection and GetConnections. This encryption takes effect independently of the catalog encryption. +* `awsKmsKeyId` - KMS key ARN that is used to encrypt the connection password. + +### encryption_at_rest + +* `catalogEncryptionMode` - The encryption-at-rest mode for encrypting Data Catalog data. +* `sseAwsKmsKeyId` - ARN of the AWS KMS key to use for encryption at rest. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/glue_script.html.markdown b/website/docs/cdktf/typescript/d/glue_script.html.markdown new file mode 100644 index 00000000000..6588d50505d --- /dev/null +++ b/website/docs/cdktf/typescript/d/glue_script.html.markdown @@ -0,0 +1,254 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_script" +description: |- + Generate Glue script from Directed Acyclic Graph +--- + + + +# Data Source: aws_glue_script + +Use this data source to generate a Glue script from a Directed Acyclic Graph (DAG). + +## Example Usage + +### Generate Python Script + +```terraform +data "aws_glue_script" "example" { + language = "PYTHON" + + dag_edge { + source = "datasource0" + target = "applymapping1" + } + + dag_edge { + source = "applymapping1" + target = "selectfields2" + } + + dag_edge { + source = "selectfields2" + target = "resolvechoice3" + } + + dag_edge { + source = "resolvechoice3" + target = "datasink4" + } + + dag_node { + id = "datasource0" + node_type = "DataSource" + + args { + name = "database" + value = "\"${aws_glue_catalog_database.source.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.source.name}\"" + } + } + + dag_node { + id = "applymapping1" + node_type = "ApplyMapping" + + args { + name = "mapping" + value = "[(\"column1\", \"string\", \"column1\", \"string\")]" + } + } + + dag_node { + id = "selectfields2" + node_type = "SelectFields" + + args { + name = "paths" + value = "[\"column1\"]" + } + } + + dag_node { + id = "resolvechoice3" + node_type = "ResolveChoice" + + args { + name = "choice" + value = "\"MATCH_CATALOG\"" + } + + args { + name = "database" + value = "\"${aws_glue_catalog_database.destination.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.destination.name}\"" + } + } + + dag_node { + id = "datasink4" + node_type = "DataSink" + + args { + name = "database" + value = "\"${aws_glue_catalog_database.destination.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.destination.name}\"" + } + } +} + +output "python_script" { + value = data.aws_glue_script.example.python_script +} +``` + +### Generate Scala Code + +```terraform +data "aws_glue_script" "example" { + language = "SCALA" + + dag_edge { + source = "datasource0" + target = "applymapping1" + } + + dag_edge { + source = "applymapping1" + target = "selectfields2" + } + + dag_edge { + source = "selectfields2" + target = "resolvechoice3" + } + + dag_edge { + source = "resolvechoice3" + target = "datasink4" + } + + dag_node { + id = "datasource0" + node_type = "DataSource" + + args { + name = "database" + value = "\"${aws_glue_catalog_database.source.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.source.name}\"" + } + } + + dag_node { + id = "applymapping1" + node_type = "ApplyMapping" + + args { + name = "mappings" + value = "[(\"column1\", \"string\", \"column1\", \"string\")]" + } + } + + dag_node { + id = "selectfields2" + node_type = "SelectFields" + + args { + name = "paths" + value = "[\"column1\"]" + } + } + + dag_node { + id = "resolvechoice3" + node_type = "ResolveChoice" + + args { + name = "choice" + value = "\"MATCH_CATALOG\"" + } + + args { + name = "database" + value = "\"${aws_glue_catalog_database.destination.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.destination.name}\"" + } + } + + dag_node { + id = "datasink4" + node_type = "DataSink" + + args { + name = "database" + value = "\"${aws_glue_catalog_database.destination.name}\"" + } + + args { + name = "table_name" + value = "\"${aws_glue_catalog_table.destination.name}\"" + } + } +} + +output "scala_code" { + value = data.aws_glue_script.example.scala_code +} +``` + +## Argument Reference + +* `dagEdge` - (Required) List of the edges in the DAG. Defined below. +* `dagNode` - (Required) List of the nodes in the DAG. Defined below. +* `language` - (Optional) Programming language of the resulting code from the DAG. Defaults to `python`. Valid values are `python` and `scala`. + +### dag_edge Argument Reference + +* `source` - (Required) ID of the node at which the edge starts. +* `target` - (Required) ID of the node at which the edge ends. +* `targetParameter` - (Optional) Target of the edge. + +### dag_node Argument Reference + +* `args` - (Required) Nested configuration an argument or property of a node. Defined below. +* `id` - (Required) Node identifier that is unique within the node's graph. +* `nodeType` - (Required) Type of node this is. +* `lineNumber` - (Optional) Line number of the node. + +#### args Argument Reference + +* `name` - (Required) Name of the argument or property. +* `value` - (Required) Value of the argument or property. +* `param` - (Optional) Boolean if the value is used as a parameter. Defaults to `false`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `pythonScript` - Python script generated from the DAG when the `language` argument is set to `python`. +* `scalaCode` - Scala code generated from the DAG when the `language` argument is set to `scala`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/grafana_workspace.html.markdown b/website/docs/cdktf/typescript/d/grafana_workspace.html.markdown new file mode 100644 index 00000000000..b6f38eacc73 --- /dev/null +++ b/website/docs/cdktf/typescript/d/grafana_workspace.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_workspace" +description: |- + Gets information on an Amazon Managed Grafana workspace. +--- + + + +# Data Source: aws_grafana_workspace + +Provides an Amazon Managed Grafana workspace data source. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsGrafanaWorkspace } from "./.gen/providers/aws/data-aws-grafana-workspace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsGrafanaWorkspace(this, "example", { + workspaceId: "g-2054c75a02", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `workspaceId` - (Required) Grafana workspace ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accountAccessType` - (Required) Type of account access for the workspace. Valid values are `currentAccount` and `organization`. If `organization` is specified, then `organizationalUnits` must also be present. +* `authenticationProviders` - (Required) Authentication providers for the workspace. Valid values are `awsSso`, `saml`, or both. +* `arn` - ARN of the Grafana workspace. +* `createdDate` - Creation date of the Grafana workspace. +* `dataSources` - Data sources for the workspace. +* `description` - Workspace description. +* `endpoint` - Endpoint of the Grafana workspace. +* `grafanaVersion` - Version of Grafana running on the workspace. +* `lastUpdatedDate` - Last updated date of the Grafana workspace. +* `name` - Grafana workspace name. +* `notificationDestinations` - The notification destinations. +* `organizationRoleName` - The role name that the workspace uses to access resources through Amazon Organizations. +* `organizationalUnits` - The Amazon Organizations organizational units that the workspace is authorized to use data sources from. +* `permissionType` - Permission type of the workspace. +* `roleArn` - IAM role ARN that the workspace assumes. +* `stackSetName` - AWS CloudFormation stack set name that provisions IAM roles to be used by the workspace. +* `status` - Status of the Grafana workspace. +* `tags` - Tags assigned to the resource + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/guardduty_detector.html.markdown b/website/docs/cdktf/typescript/d/guardduty_detector.html.markdown new file mode 100644 index 00000000000..e79359554dc --- /dev/null +++ b/website/docs/cdktf/typescript/d/guardduty_detector.html.markdown @@ -0,0 +1,47 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_detector" +description: |- + Retrieve information about a GuardDuty detector. +--- + + + +# Data Source: aws_guardduty_detector + +Retrieve information about a GuardDuty detector. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsGuarddutyDetector } from "./.gen/providers/aws/data-aws-guardduty-detector"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsGuarddutyDetector(this, "example", {}); + } +} + +``` + +## Argument Reference + +* `id` - (Optional) ID of the detector. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `findingPublishingFrequency` - The frequency of notifications sent about subsequent finding occurrences. +* `serviceRoleArn` - Service-linked role that grants GuardDuty access to the resources in the AWS account. +* `status` - Current status of the detector. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/guardduty_finding_ids.html.markdown b/website/docs/cdktf/typescript/d/guardduty_finding_ids.html.markdown new file mode 100644 index 00000000000..ae3f101c7d4 --- /dev/null +++ b/website/docs/cdktf/typescript/d/guardduty_finding_ids.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_finding_ids" +description: |- + Terraform data source for managing an AWS GuardDuty Finding Ids. +--- + + + +# Data Source: aws_guardduty_finding_ids + +Terraform data source for managing an AWS GuardDuty Finding Ids. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsGuarddutyFindingIds } from "./.gen/providers/aws/data-aws-guardduty-finding-ids"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsGuarddutyFindingIds(this, "example", { + detectorId: Token.asString(awsGuarddutyDetectorExample.id), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `detectorId` - (Required) ID of the GuardDuty detector. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `hasFindings` - Indicates whether findings are present for the specified detector. +* `findingIds` - A list of finding IDs for the specified detector. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_access_keys.html.markdown b/website/docs/cdktf/typescript/d/iam_access_keys.html.markdown new file mode 100644 index 00000000000..0d76d1a0eed --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_access_keys.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_access_keys" +description: |- + Get information on IAM access keys associated with the specified IAM user. +--- + + + +# Data Source: aws_iam_access_keys + +This data source can be used to fetch information about IAM access keys of a +specific IAM user. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamAccessKeys } from "./.gen/providers/aws/data-aws-iam-access-keys"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamAccessKeys(this, "example", { + user: "an_example_user_name", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `user` - (Required) Name of the IAM user associated with the access keys. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accessKeys` - List of the IAM access keys associated with the specified user. See below. + +The elements of the `accessKeys` are exported with the following attributes: + +* `accessKeyId` - Access key ID. +* `createDate` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the access key was created. +* `status` - Access key status. Possible values are `active` and `inactive`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_account_alias.html.markdown b/website/docs/cdktf/typescript/d/iam_account_alias.html.markdown new file mode 100644 index 00000000000..f4bc00a15c5 --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_account_alias.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_account_alias" +description: |- + Provides the account alias for the AWS account associated with the provider + connection to AWS. +--- + + + +# Data Source: aws_iam_account_alias + +The IAM Account Alias data source allows access to the account alias +for the effective account in which Terraform is working. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamAccountAlias } from "./.gen/providers/aws/data-aws-iam-account-alias"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsIamAccountAlias(this, "current", {}); + new TerraformOutput(this, "account_id", { + value: current.accountAlias, + }); + } +} + +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accountAlias` - Alias associated with the AWS account. +* `id` - Alias associated with the AWS account. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_group.html.markdown b/website/docs/cdktf/typescript/d/iam_group.html.markdown new file mode 100644 index 00000000000..ffeb313b955 --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_group.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_group" +description: |- + Get information on a Amazon IAM group +--- + + + +# Data Source: aws_iam_group + +This data source can be used to fetch information about a specific +IAM group. By using this data source, you can reference IAM group +properties without having to hard code ARNs as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamGroup } from "./.gen/providers/aws/data-aws-iam-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamGroup(this, "example", { + groupName: "an_example_group_name", + }); + } +} + +``` + +## Argument Reference + +* `groupName` - (Required) Friendly IAM group name to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Group ARN. +* `groupId` - Stable and unique string identifying the group. +* `path` - Path to the group. +* `users` - List of objects containing group member information. See below. + +### `users` + +* `arn` - User ARN. +* `path` - Path to the IAM user. +* `userId` - Stable and unique string identifying the IAM user. +* `userName` - Name of the IAM user. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_instance_profile.html.markdown b/website/docs/cdktf/typescript/d/iam_instance_profile.html.markdown new file mode 100644 index 00000000000..10dcd9837d6 --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_instance_profile.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_instance_profile" +description: |- + Get information on a Amazon IAM Instance Profile +--- + + + +# Data Source: aws_iam_instance_profile + +This data source can be used to fetch information about a specific +IAM instance profile. By using this data source, you can reference IAM +instance profile properties without having to hard code ARNs as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamInstanceProfile } from "./.gen/providers/aws/data-aws-iam-instance-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamInstanceProfile(this, "example", { + name: "an_example_instance_profile_name", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Friendly IAM instance profile name to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN. +* `createDate` - String representation of the date the instance profile was created. +* `path` - Path to the instance profile. +* `roleArn` - Role ARN associated with this instance profile. +* `roleId` - Role ID associated with this instance profile. +* `roleName` - Role name associated with this instance profile. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_instance_profiles.html.markdown b/website/docs/cdktf/typescript/d/iam_instance_profiles.html.markdown new file mode 100644 index 00000000000..645a2f45377 --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_instance_profiles.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_instance_profiles" +description: |- + Get information on a Amazon IAM Instance Profiles from IAM role +--- + + + +# Data Source: aws_iam_instance_profiles + +This data source can be used to fetch information about all +IAM instance profiles under a role. By using this data source, you can reference IAM +instance profile properties without having to hard code ARNs as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamInstanceProfiles } from "./.gen/providers/aws/data-aws-iam-instance-profiles"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamInstanceProfiles(this, "example", { + roleName: "an_example_iam_role_name", + }); + } +} + +``` + +## Argument Reference + +* `roleName` - (Required) IAM role name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of instance profiles. +* `names` - Set of IAM instance profile names. +* `paths` - Set of IAM instance profile paths. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_openid_connect_provider.html.markdown b/website/docs/cdktf/typescript/d/iam_openid_connect_provider.html.markdown new file mode 100644 index 00000000000..0c63bec4d54 --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_openid_connect_provider.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_openid_connect_provider" +description: |- + Get information on a Amazon IAM OpenID Connect provider. +--- + + + +# Data Source: aws_iam_openid_connect_provider + +This data source can be used to fetch information about a specific +IAM OpenID Connect provider. By using this data source, you can retrieve the +the resource information by either its `arn` or `url`. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamOpenidConnectProvider } from "./.gen/providers/aws/data-aws-iam-openid-connect-provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamOpenidConnectProvider(this, "example", { + arn: "arn:aws:iam::123456789012:oidc-provider/accounts.google.com", + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamOpenidConnectProvider } from "./.gen/providers/aws/data-aws-iam-openid-connect-provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamOpenidConnectProvider(this, "example", { + url: "https://accounts.google.com", + }); + } +} + +``` + +## Argument Reference + +* `arn` - (Optional) ARN of the OpenID Connect provider. +* `url` - (Optional) URL of the OpenID Connect provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `clientIdList` - List of client IDs (also known as audiences). When a mobile or web app registers with an OpenID Connect provider, they establish a value that identifies the application. (This is the value that's sent as the client_id parameter on OAuth requests.) +* `thumbprintList` - List of server certificate thumbprints for the OpenID Connect (OIDC) identity provider's server certificate(s). +* `tags` - Map of resource tags for the IAM OIDC provider. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_policy.html.markdown b/website/docs/cdktf/typescript/d/iam_policy.html.markdown new file mode 100644 index 00000000000..c8c4ec1215a --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_policy.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_policy" +description: |- + Get information on a Amazon IAM policy +--- + + + +# Data Source: aws_iam_policy + +This data source can be used to fetch information about a specific +IAM policy. + +## Example Usage + +### By ARN + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicy } from "./.gen/providers/aws/data-aws-iam-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamPolicy(this, "example", { + arn: "arn:aws:iam::123456789012:policy/UsersManageOwnCredentials", + }); + } +} + +``` + +### By Name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicy } from "./.gen/providers/aws/data-aws-iam-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamPolicy(this, "example", { + name: "test_policy", + }); + } +} + +``` + +## Argument Reference + +* `arn` - (Optional) ARN of the IAM policy. + Conflicts with `name` and `pathPrefix`. +* `name` - (Optional) Name of the IAM policy. + Conflicts with `arn`. +* `pathPrefix` - (Optional) Prefix of the path to the IAM policy. + Defaults to a slash (`/`). + Conflicts with `arn`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the policy. +* `path` - Path to the policy. +* `description` - Description of the policy. +* `policy` - Policy document of the policy. +* `policyId` - Policy's ID. +* `tags` - Key-value mapping of tags for the IAM Policy. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_policy_document.html.markdown b/website/docs/cdktf/typescript/d/iam_policy_document.html.markdown new file mode 100644 index 00000000000..0af46e45832 --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_policy_document.html.markdown @@ -0,0 +1,641 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_policy_document" +description: |- + Generates an IAM policy document in JSON format +--- + + + +# Data Source: aws_iam_policy_document + +Generates an IAM policy document in JSON format for use with resources that expect policy documents such as [`awsIamPolicy`](/docs/providers/aws/r/iam_policy.html). + +Using this data source to generate policy documents is *optional*. It is also valid to use literal JSON strings in your configuration or to use the `file` interpolation function to read a raw JSON policy document from a file. + +~> **NOTE:** AWS's IAM policy document syntax allows for replacement of [policy variables](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_variables.html) within a statement using `${...}`-style notation, which conflicts with Terraform's interpolation syntax. In order to use AWS policy variables with this data source, use `&{...}` notation for interpolations that should be processed by AWS rather than by Terraform. + +-> For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Example Usage + +### Basic Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsIamPolicyDocument(this, "example", { + statement: [ + { + actions: ["s3:ListAllMyBuckets", "s3:GetBucketLocation"], + resources: ["arn:aws:s3:::*"], + sid: "1", + }, + { + actions: ["s3:ListBucket"], + condition: [ + { + test: "StringLike", + values: ["", "home/", "home/&{aws:username}/"], + variable: "s3:prefix", + }, + ], + resources: ["arn:aws:s3:::${" + s3BucketName.value + "}"], + }, + { + actions: ["s3:*"], + resources: [ + "arn:aws:s3:::${" + s3BucketName.value + "}/home/&{aws:username}", + "arn:aws:s3:::${" + s3BucketName.value + "}/home/&{aws:username}/*", + ], + }, + ], + }); + const awsIamPolicyExample = new IamPolicy(this, "example_1", { + name: "example_policy", + path: "/", + policy: Token.asString(example.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamPolicyExample.overrideLogicalId("example"); + } +} + +``` + +### Example Multiple Condition Keys and Values + +You can specify a [condition with multiple keys and values](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_multi-value-conditions.html) by supplying multiple `condition` blocks with the same `test` value, but differing `variable` and `values` values. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamPolicyDocument( + this, + "example_multiple_condition_keys_and_values", + { + statement: [ + { + actions: ["kms:Decrypt", "kms:GenerateDataKey"], + condition: [ + { + test: "ForAnyValue:StringEquals", + values: ["pi"], + variable: "kms:EncryptionContext:service", + }, + { + test: "ForAnyValue:StringEquals", + values: ["rds"], + variable: "kms:EncryptionContext:aws:pi:service", + }, + { + test: "ForAnyValue:StringEquals", + values: [ + "db-AAAAABBBBBCCCCCDDDDDEEEEE", + "db-EEEEEDDDDDCCCCCBBBBBAAAAA", + ], + variable: "kms:EncryptionContext:aws:rds:db-id", + }, + ], + resources: ["*"], + }, + ], + } + ); + } +} + +``` + +`dataAwsIamPolicyDocumentExampleMultipleConditionKeysAndValuesJson` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Action": [ + "kms:GenerateDataKey", + "kms:Decrypt" + ], + "Resource": "*", + "Condition": { + "ForAnyValue:StringEquals": { + "kms:EncryptionContext:aws:pi:service": "rds", + "kms:EncryptionContext:aws:rds:db-id": [ + "db-AAAAABBBBBCCCCCDDDDDEEEEE", + "db-EEEEEDDDDDCCCCCBBBBBAAAAA" + ], + "kms:EncryptionContext:service": "pi" + } + } + } + ] +} +``` + +### Example Assume-Role Policy with Multiple Principals + +You can specify multiple principal blocks with different types. You can also use this data source to generate an assume-role policy. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamPolicyDocument( + this, + "event_stream_bucket_role_assume_role_policy", + { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["firehose.amazonaws.com"], + type: "Service", + }, + { + identifiers: [trustedRoleArn.stringValue], + type: "AWS", + }, + { + identifiers: [ + "arn:aws:iam::${" + + accountId.value + + "}:saml-provider/${" + + providerName.value + + "}", + "cognito-identity.amazonaws.com", + ], + type: "Federated", + }, + ], + }, + ], + } + ); + } +} + +``` + +### Example Using A Source Document + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const source = new DataAwsIamPolicyDocument(this, "source", { + statement: [ + { + actions: ["ec2:*"], + resources: ["*"], + }, + { + actions: ["s3:*"], + resources: ["*"], + sid: "SidToOverride", + }, + ], + }); + new DataAwsIamPolicyDocument(this, "source_document_example", { + sourcePolicyDocuments: [Token.asString(source.json)], + statement: [ + { + actions: ["s3:*"], + resources: ["arn:aws:s3:::somebucket", "arn:aws:s3:::somebucket/*"], + sid: "SidToOverride", + }, + ], + }); + } +} + +``` + +`dataAwsIamPolicyDocumentSourceDocumentExampleJson` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Action": "ec2:*", + "Resource": "*" + }, + { + "Sid": "SidToOverride", + "Effect": "Allow", + "Action": "s3:*", + "Resource": [ + "arn:aws:s3:::somebucket/*", + "arn:aws:s3:::somebucket" + ] + } + ] +} +``` + +### Example Using An Override Document + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const override = new DataAwsIamPolicyDocument(this, "override", { + statement: [ + { + actions: ["s3:*"], + resources: ["*"], + sid: "SidToOverride", + }, + ], + }); + new DataAwsIamPolicyDocument(this, "override_policy_document_example", { + overridePolicyDocuments: [Token.asString(override.json)], + statement: [ + { + actions: ["ec2:*"], + resources: ["*"], + }, + { + actions: ["s3:*"], + resources: ["arn:aws:s3:::somebucket", "arn:aws:s3:::somebucket/*"], + sid: "SidToOverride", + }, + ], + }); + } +} + +``` + +`dataAwsIamPolicyDocumentOverridePolicyDocumentExampleJson` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Action": "ec2:*", + "Resource": "*" + }, + { + "Sid": "SidToOverride", + "Effect": "Allow", + "Action": "s3:*", + "Resource": "*" + } + ] +} +``` + +### Example with Both Source and Override Documents + +You can also combine `sourcePolicyDocuments` and `overridePolicyDocuments` in the same document. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const override = new DataAwsIamPolicyDocument(this, "override", { + statement: [ + { + actions: ["s3:GetObject"], + resources: ["*"], + sid: "OverridePlaceholder", + }, + ], + }); + const source = new DataAwsIamPolicyDocument(this, "source", { + statement: [ + { + actions: ["ec2:DescribeAccountAttributes"], + resources: ["*"], + sid: "OverridePlaceholder", + }, + ], + }); + new DataAwsIamPolicyDocument(this, "politik", { + overridePolicyDocuments: [Token.asString(override.json)], + sourcePolicyDocuments: [Token.asString(source.json)], + }); + } +} + +``` + +`dataAwsIamPolicyDocumentPolitikJson` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "OverridePlaceholder", + "Effect": "Allow", + "Action": "s3:GetObject", + "Resource": "*" + } + ] +} +``` + +### Example of Merging Source Documents + +Multiple documents can be combined using the `sourcePolicyDocuments` or `overridePolicyDocuments` attributes. `sourcePolicyDocuments` requires that all documents have unique Sids, while `overridePolicyDocuments` will iteratively override matching Sids. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const sourceOne = new DataAwsIamPolicyDocument(this, "source_one", { + statement: [ + { + actions: ["ec2:*"], + resources: ["*"], + }, + { + actions: ["s3:*"], + resources: ["*"], + sid: "UniqueSidOne", + }, + ], + }); + const sourceTwo = new DataAwsIamPolicyDocument(this, "source_two", { + statement: [ + { + actions: ["iam:*"], + resources: ["*"], + sid: "UniqueSidTwo", + }, + { + actions: ["lambda:*"], + resources: ["*"], + }, + ], + }); + new DataAwsIamPolicyDocument(this, "combined", { + sourcePolicyDocuments: [ + Token.asString(sourceOne.json), + Token.asString(sourceTwo.json), + ], + }); + } +} + +``` + +`dataAwsIamPolicyDocumentCombinedJson` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "", + "Effect": "Allow", + "Action": "ec2:*", + "Resource": "*" + }, + { + "Sid": "UniqueSidOne", + "Effect": "Allow", + "Action": "s3:*", + "Resource": "*" + }, + { + "Sid": "UniqueSidTwo", + "Effect": "Allow", + "Action": "iam:*", + "Resource": "*" + }, + { + "Sid": "", + "Effect": "Allow", + "Action": "lambda:*", + "Resource": "*" + } + ] +} +``` + +### Example of Merging Override Documents + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const policyOne = new DataAwsIamPolicyDocument(this, "policy_one", { + statement: [ + { + actions: ["s3:*"], + effect: "Allow", + resources: ["*"], + sid: "OverridePlaceHolderOne", + }, + ], + }); + const policyThree = new DataAwsIamPolicyDocument(this, "policy_three", { + statement: [ + { + actions: ["logs:*"], + effect: "Deny", + resources: ["*"], + sid: "OverridePlaceHolderOne", + }, + ], + }); + const policyTwo = new DataAwsIamPolicyDocument(this, "policy_two", { + statement: [ + { + actions: ["ec2:*"], + effect: "Allow", + resources: ["*"], + }, + { + actions: ["iam:*"], + effect: "Allow", + resources: ["*"], + sid: "OverridePlaceHolderTwo", + }, + ], + }); + new DataAwsIamPolicyDocument(this, "combined", { + overridePolicyDocuments: [ + Token.asString(policyOne.json), + Token.asString(policyTwo.json), + Token.asString(policyThree.json), + ], + statement: [ + { + actions: ["*"], + effect: "Deny", + resources: ["*"], + sid: "OverridePlaceHolderTwo", + }, + ], + }); + } +} + +``` + +`dataAwsIamPolicyDocumentCombinedJson` will evaluate to: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "OverridePlaceholderTwo", + "Effect": "Allow", + "Action": "iam:*", + "Resource": "*" + }, + { + "Sid": "OverridePlaceholderOne", + "Effect": "Deny", + "Action": "logs:*", + "Resource": "*" + }, + { + "Sid": "", + "Effect": "Allow", + "Action": "ec2:*", + "Resource": "*" + }, + ] +} +``` + +## Argument Reference + +The following arguments are optional: + +~> **NOTE:** Statements without a `sid` cannot be overridden. In other words, a statement without a `sid` from `sourcePolicyDocuments` cannot be overridden by statements from `overridePolicyDocuments`. + +* `overridePolicyDocuments` (Optional) - List of IAM policy documents that are merged together into the exported document. In merging, statements with non-blank `sid`s will override statements with the same `sid` from earlier documents in the list. Statements with non-blank `sid`s will also override statements with the same `sid` from `sourcePolicyDocuments`. Non-overriding statements will be added to the exported document. +* `policyId` (Optional) - ID for the policy document. +* `sourcePolicyDocuments` (Optional) - List of IAM policy documents that are merged together into the exported document. Statements defined in `sourcePolicyDocuments` must have unique `sid`s. Statements with the same `sid` from `overridePolicyDocuments` will override source statements. +* `statement` (Optional) - Configuration block for a policy statement. Detailed below. +* `version` (Optional) - IAM policy document version. Valid values are `20081017` and `20121017`. Defaults to `20121017`. For more information, see the [AWS IAM User Guide](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_version.html). + +### `statement` + +The following arguments are optional: + +* `actions` (Optional) - List of actions that this statement either allows or denies. For example, `["ec2:RunInstances", "s3:*"]`. +* `condition` (Optional) - Configuration block for a condition. Detailed below. +* `effect` (Optional) - Whether this statement allows or denies the given actions. Valid values are `allow` and `deny`. Defaults to `allow`. +* `notActions` (Optional) - List of actions that this statement does *not* apply to. Use to apply a policy statement to all actions *except* those listed. +* `notPrincipals` (Optional) - Like `principals` except these are principals that the statement does *not* apply to. +* `notResources` (Optional) - List of resource ARNs that this statement does *not* apply to. Use to apply a policy statement to all resources *except* those listed. Conflicts with `resources`. +* `principals` (Optional) - Configuration block for principals. Detailed below. +* `resources` (Optional) - List of resource ARNs that this statement applies to. This is required by AWS if used for an IAM policy. Conflicts with `notResources`. +* `sid` (Optional) - Sid (statement ID) is an identifier for a policy statement. + +### `condition` + +A `condition` constrains whether a statement applies in a particular situation. Conditions can be specific to an AWS service. When using multiple `condition` blocks, they must *all* evaluate to true for the policy statement to apply. In other words, AWS evaluates the conditions as though with an "AND" boolean operation. + +The following arguments are required: + +* `test` (Required) Name of the [IAM condition operator](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_condition_operators.html) to evaluate. +* `values` (Required) Values to evaluate the condition against. If multiple values are provided, the condition matches if at least one of them applies. That is, AWS evaluates multiple values as though using an "OR" boolean operation. +* `variable` (Required) Name of a [Context Variable](http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#AvailableKeys) to apply the condition to. Context variables may either be standard AWS variables starting with `aws:` or service-specific variables prefixed with the service name. + +### `principals` and `notPrincipals` + +The `principals` and `notPrincipals` arguments define to whom a statement applies or does not apply, respectively. + +~> **NOTE:** Even though the [IAM Documentation](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html) states that `"Principal": "*"` and `"Principal": {"AWS": "*"}` are equivalent, those principal elements have different behavior in some situations, e.g., IAM Role Trust Policy. To have Terraform render JSON containing `"Principal": "*"`, use `type = "*"` and `identifiers = ["*"]`. To have Terraform render JSON containing `"Principal": {"AWS": "*"}`, use `type = "AWS"` and `identifiers = ["*"]`. + +-> For more information about AWS principals, refer to the [AWS Identity and Access Management User Guide: AWS JSON policy elements: Principal](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html). + +The following arguments are required: + +* `identifiers` (Required) List of identifiers for principals. When `type` is `aws`, these are IAM principal ARNs, e.g., `arn:aws:iam::12345678901:role/yakRole`. When `type` is `service`, these are AWS Service roles, e.g., `lambdaAmazonawsCom`. When `type` is `federated`, these are web identity users or SAML provider ARNs, e.g., `accountsGoogleCom` or `arn:aws:iam::12345678901:samlProvider/yakSamlProvider`. When `type` is `canonicalUser`, these are [canonical user IDs](https://docs.aws.amazon.com/general/latest/gr/acct-identifiers.html#FindingCanonicalId), e.g., `79A59Df900B949E55D96A1E698Fbacedfd6E09D98Eacf8F8D5218E7Cd47Ef2Be`. +* `type` (Required) Type of principal. Valid values include `aws`, `service`, `federated`, `canonicalUser` and `*`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `json` - Standard JSON policy document rendered based on the arguments above. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_principal_policy_simulation.html.markdown b/website/docs/cdktf/typescript/d/iam_principal_policy_simulation.html.markdown new file mode 100644 index 00000000000..ecd5a713115 --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_principal_policy_simulation.html.markdown @@ -0,0 +1,268 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_principal_policy_simulation" +description: |- + Runs a simulation of the IAM policies of a particular principal against a given hypothetical request. +--- + + + +# Data Source: aws_iam_principal_policy_simulation + +Runs a simulation of the IAM policies of a particular principal against a given hypothetical request. + +You can use this data source in conjunction with +[Preconditions and Postconditions](https://www.terraform.io/language/expressions/custom-conditions#preconditions-and-postconditions) so that your configuration can test either whether it should have sufficient access to do its own work, or whether policies your configuration declares itself are sufficient for their intended use elsewhere. + +-> **Note:** Correctly using this data source requires familiarity with various details of AWS Identity and Access Management, and how various AWS services integrate with it. For general information on the AWS IAM policy simulator, see [Testing IAM policies with the IAM policy simulator](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_testing-policies.html). This data source wraps the `iam:simulatePrincipalPolicy` API action described on that page. + +## Example Usage + +### Self Access-checking Example + +The following example raises an error if the credentials passed to the AWS provider do not have access to perform the three actions `s3:getObject`, `s3:putObject`, and `s3:deleteObject` on the S3 bucket with the given ARN. It combines `awsIamPrincipalPolicySimulation` with the core Terraform postconditions feature. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformSelf, Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPrincipalPolicySimulation } from "./.gen/providers/aws/data-aws-iam-principal-policy-simulation"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new DataAwsIamPrincipalPolicySimulation(this, "s3_object_access", { + actionNames: ["s3:GetObject", "s3:PutObject", "s3:DeleteObject"], + lifecycle: { + postcondition: [ + { + condition: TerraformSelf.getAny("all_allowed"), + errorMessage: + " Given AWS credentials do not have sufficient access to manage " + + Token.asString( + Fn.join( + ", ", + Token.asList(TerraformSelf.getAny("resource_arns")) + ) + ) + + ".\n\n", + }, + ], + }, + policySourceArn: Token.asString(current.arn), + resourceArns: ["arn:aws:s3:::my-test-bucket"], + }); + } +} + +``` + +If you intend to use this data source to quickly raise an error when the given credentials are insufficient then you must use [`dependsOn`](https://www.terraform.io/language/meta-arguments/depends_on) inside any resource which would require those credentials, to ensure that the policy check will run first: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketObject } from "./.gen/providers/aws/s3-bucket-object"; +interface MyConfig { + key: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new S3BucketObject(this, "example", { + bucket: "my-test-bucket", + dependsOn: [s3ObjectAccess], + key: config.key, + }); + } +} + +``` + +### Testing the Effect of a Declared Policy + +The following example declares an S3 bucket and a user that should have access to the bucket, and then uses `awsIamPrincipalPolicySimulation` to verify that the user does indeed have access to perform needed operations against the bucket. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformSelf, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPrincipalPolicySimulation } from "./.gen/providers/aws/data-aws-iam-principal-policy-simulation"; +import { IamUser } from "./.gen/providers/aws/iam-user"; +import { IamUserPolicy } from "./.gen/providers/aws/iam-user-policy"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new IamUser(this, "example", { + name: "example", + }); + const awsS3BucketExample = new S3Bucket(this, "example_1", { + bucket: "my-test-bucket", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketExample.overrideLogicalId("example"); + const current = new DataAwsCallerIdentity(this, "current", {}); + const s3Access = new IamUserPolicy(this, "s3_access", { + name: "example_s3_access", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "s3:GetObject", + Effect: "Allow", + Resource: awsS3BucketExample.arn, + }, + ], + Version: "2012-10-17", + }) + ), + user: example.name, + }); + const accountAccess = new S3BucketPolicy(this, "account_access", { + bucket: Token.asString(awsS3BucketExample.bucket), + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "s3:*", + Effect: "Allow", + Principal: { + AWS: current.accountId, + }, + Resource: [ + awsS3BucketExample.arn, + "${" + awsS3BucketExample.arn + "}/*", + ], + }, + ], + Version: "2012-10-17", + }) + ), + }); + new DataAwsIamPrincipalPolicySimulation(this, "s3_object_access", { + actionNames: ["s3:GetObject"], + dependsOn: [s3Access], + lifecycle: { + postcondition: [ + { + condition: TerraformSelf.getAny("all_allowed"), + errorMessage: + " " + + Token.asString(TerraformSelf.getAny("policy_source_arn")) + + " does not have the expected access to " + + Token.asString( + Fn.join( + ", ", + Token.asList(TerraformSelf.getAny("resource_arns")) + ) + ) + + ".\n\n", + }, + ], + }, + policySourceArn: example.arn, + resourceArns: [Token.asString(awsS3BucketExample.arn)], + resourcePolicyJson: accountAccess.policy, + }); + } +} + +``` + +When using `awsIamPrincipalPolicySimulation` to test the effect of a policy declared elsewhere in the same configuration, it's important to use [`dependsOn`](https://www.terraform.io/language/meta-arguments/depends_on) to make sure that the needed policy has been fully created or updated before running the simulation. + +## Argument Reference + +The following arguments are required for any principal policy simulation: + +* `actionNames` (Required) - A set of IAM action names to run simulations for. Each entry in this set adds an additional hypothetical request to the simulation. + + Action names consist of a service prefix and an action verb separated by a colon, such as `s3:getObject`. Refer to [Actions, resources, and condition keys for AWS services](https://docs.aws.amazon.com/service-authorization/latest/reference/reference_policies_actions-resources-contextkeys.html) to see the full set of possible IAM action names across all AWS services. + +* `policySourceArn` (Required) - The [ARN](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html) of the IAM user, group, or role whose policies will be included in the simulation. + +You must closely match the form of the real service request you are simulating in order to achieve a realistic result. You can use the following additional arguments to specify other characteristics of the simulated requests: + +* `callerArn` (Optional) - The ARN of an user that will appear as the "caller" of the simulated requests. If you do not specify `callerArn` then the simulation will use the `policySourceArn` instead, if it contains a user ARN. + +* `context` (Optional) - Each [`context` block](#context-block-arguments) defines an entry in the table of additional context keys in the simulated request. + + IAM uses context keys for both custom conditions and for interpolating dynamic request-specific values into policy values. If you use policies that include those features then you will need to provide suitable example values for those keys to achieve a realistic simulation. + +* `additionalPoliciesJson` (Optional) - A set of additional principal policy documents to include in the simulation. The simulator will behave as if each of these policies were associated with the object specified in `policySourceArn`, allowing you to test the effect of hypothetical policies not yet created. + +* `permissionsBoundaryPoliciesJson` (Optional) - A set of [permissions boundary policy documents](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_boundaries.html) to include in the simulation. + +* `resourceArns` (Optional) - A set of ARNs of resources to include in the simulation. + + This argument is important for actions that have either required or optional resource types listed in [Actions, resources, and condition keys for AWS services](https://docs.aws.amazon.com/service-authorization/latest/reference/reference_policies_actions-resources-contextkeys.html), and you must provide ARNs that identify AWS objects of the appropriate types for the chosen actions. + + The policy simulator only automatically loads policies associated with the `policySourceArn`, so if your given resources have their own resource-level policy then you'll also need to provide that explicitly using the `resourcePolicyJson` argument to achieve a realistic simulation. + +* `resourceHandlingOption` (Optional) - Specifies a special simulation type to run. Some EC2 actions require special simulation behaviors and a particular set of resource ARNs to achieve a realistic result. + + For more details, see the `resourceHandlingOption` request parameter for [the underlying `iam:simulatePrincipalPolicy` action](https://docs.aws.amazon.com/IAM/latest/APIReference/API_SimulatePrincipalPolicy.html). + +* `resourceOwnerAccountId` (Optional) - An AWS account ID to use for any resource ARN in `resourceArns` that doesn't include its own AWS account ID. If unspecified, the simulator will use the account ID from the `callerArn` argument as a placeholder. + +* `resourcePolicyJson` (Optional) - An IAM policy document representing the resource-level policy of all of the resources specified in `resourceArns`. + + The policy simulator cannot automatically load policies that are associated with individual resources, as described in the documentation for `resourceArns` above. + +### `context` block arguments + +The following arguments are all required in each `context` block: + +* `key` (Required) - The context _condition key_ to set. + + If you have policies containing `condition` elements or using dynamic interpolations then you will need to provide suitable values for each condition key your policies use. See [Actions, resources, and condition keys for AWS services](https://docs.aws.amazon.com/service-authorization/latest/reference/reference_policies_actions-resources-contextkeys.html) to find the various condition keys that are normally provided for real requests to each action of each AWS service. + +* `type` (Required) - An IAM value type that determines how the policy simulator will interpret the strings given in `values`. + + For more information, see the `contextKeyType` field of [`iamContextEntry`](https://docs.aws.amazon.com/IAM/latest/APIReference/API_ContextEntry.html) in the underlying API. + +* `values` (Required) - A set of one or more values for this context entry. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `allAllowed` - `true` if all of the simulation results have decision "allowed", or `false` otherwise. + + This is a convenient shorthand for the common case of requiring that all of the simulated requests passed in a postcondition associated with the data source. If you need to describe a more granular condition, use the `results` attribute instead. + +* `results` - A set of result objects, one for each of the simulated requests, with the following nested attributes: + + * `actionName` - The name of the single IAM action used for this particular request. + + * `decision` - The raw decision determined from all of the policies in scope; either "allowed", "explicitDeny", or "implicitDeny". + + * `allowed` - `true` if `decision` is "allowed", and `false` otherwise. + + * `decisionDetails` - A map of arbitrary metadata entries returned by the policy simulator for this request. + + * `resourceArn` - ARN of the resource that was used for this particular request. When you specify multiple actions and multiple resource ARNs, that causes a separate policy request for each combination of unique action and resource. + + * `matchedStatements` - A nested set of objects describing which policies contained statements that were relevant to this simulation request. Each object has attributes `sourcePolicyId` and `sourcePolicyType` to identify one of the policies. + + * `missingContextKeys` - A set of context keys (or condition keys) that were needed by some of the policies contributing to this result but not specified using a `context` block in the configuration. Missing or incorrect context keys will typically cause a simulated request to be disallowed. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_role.html.markdown b/website/docs/cdktf/typescript/d/iam_role.html.markdown new file mode 100644 index 00000000000..072917e291f --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_role.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_role" +description: |- + Get information on a Amazon IAM role +--- + + + +# Data Source: aws_iam_role + +This data source can be used to fetch information about a specific +IAM role. By using this data source, you can reference IAM role +properties without having to hard code ARNs as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamRole } from "./.gen/providers/aws/data-aws-iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamRole(this, "example", { + name: "an_example_role_name", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Friendly IAM role name to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Friendly IAM role name to match. +* `arn` - ARN of the role. +* `assumeRolePolicy` - Policy document associated with the role. +* `createDate` - Creation date of the role in RFC 3339 format. +* `description` - Description for the role. +* `maxSessionDuration` - Maximum session duration. +* `path` - Path to the role. +* `permissionsBoundary` - The ARN of the policy that is used to set the permissions boundary for the role. +* `roleLastUsed` - Contains information about the last time that an IAM role was used. See [`roleLastUsed`](#role_last_used) for details. +* `uniqueId` - Stable and unique string identifying the role. +* `tags` - Tags attached to the role. + +### role_last_used + +* `region` - The name of the AWS Region in which the role was last used. +* `lastUsedDate` - The date and time, in RFC 3339 format, that the role was last used. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_roles.html.markdown b/website/docs/cdktf/typescript/d/iam_roles.html.markdown new file mode 100644 index 00000000000..8e07cc2f3ad --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_roles.html.markdown @@ -0,0 +1,176 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_roles" +description: |- + Get information about a set of IAM Roles. +--- + + + +# Data Source: aws_iam_roles + +Use this data source to get the ARNs and Names of IAM Roles. + +## Example Usage + +### All roles in an account + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamRoles } from "./.gen/providers/aws/data-aws-iam-roles"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamRoles(this, "roles", {}); + } +} + +``` + +### Roles filtered by name regex + +Roles whose role-name contains `project` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamRoles } from "./.gen/providers/aws/data-aws-iam-roles"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamRoles(this, "roles", { + nameRegex: ".*project.*", + }); + } +} + +``` + +### Roles filtered by path prefix + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamRoles } from "./.gen/providers/aws/data-aws-iam-roles"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamRoles(this, "roles", { + pathPrefix: "/custom-path", + }); + } +} + +``` + +### Roles provisioned by AWS SSO + +Roles in the account filtered by path prefix + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamRoles } from "./.gen/providers/aws/data-aws-iam-roles"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamRoles(this, "roles", { + pathPrefix: "/aws-reserved/sso.amazonaws.com/", + }); + } +} + +``` + +Specific role in the account filtered by name regex and path prefix + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamRoles } from "./.gen/providers/aws/data-aws-iam-roles"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamRoles(this, "roles", { + nameRegex: "AWSReservedSSO_permission_set_name_.*", + pathPrefix: "/aws-reserved/sso.amazonaws.com/", + }); + } +} + +``` + +### Role ARNs with paths removed + +For services like Amazon EKS that do not permit a path in the role ARN when used in a cluster's configuration map + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamRoles } from "./.gen/providers/aws/data-aws-iam-roles"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const roles = new DataAwsIamRoles(this, "roles", { + pathPrefix: "/aws-reserved/sso.amazonaws.com/", + }); + new TerraformOutput(this, "arns", { + value: + "${[ for parts in ${" + + ("${[ for arn in ${" + roles.arns + '} : split("/", arn)]}') + + '} : format("%s/%s", parts[0], element(parts, length(parts) - 1))]}', + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `nameRegex` - (Optional) Regex string to apply to the IAM roles list returned by AWS. This allows more advanced filtering not supported from the AWS API. This filtering is done locally on what AWS returns, and could have a performance impact if the result is large. Combine this with other options to narrow down the list AWS returns. +* `pathPrefix` - (Optional) Path prefix for filtering the results. For example, the prefix `/application_abc/component_xyz/` gets all roles whose path starts with `/application_abc/component_xyz/`. If it is not included, it defaults to a slash (`/`), listing all roles. For more details, check out [list-roles in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched IAM roles. +* `names` - Set of Names of the matched IAM roles. + +[1]: https://awscli.amazonaws.com/v2/documentation/api/latest/reference/iam/list-roles.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_saml_provider.html.markdown b/website/docs/cdktf/typescript/d/iam_saml_provider.html.markdown new file mode 100644 index 00000000000..dbf4088db07 --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_saml_provider.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_saml_provider" +description: |- + Get information on an IAM SAML provider. +--- + + + +# Data Source: aws_saml_provider + +This data source can be used to fetch information about a specific +IAM SAML provider. This will allow you to easily retrieve the metadata +document of an existing SAML provider. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamSamlProvider } from "./.gen/providers/aws/data-aws-iam-saml-provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamSamlProvider(this, "example", { + arn: "arn:aws:iam::123456789:saml-provider/myprovider", + }); + } +} + +``` + +## Argument Reference + +* `arn` - (Required) ARN assigned by AWS for the provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `createDate` - Creation date of the SAML provider in RFC1123 format, e.g. `Mon, 02 Jan 2006 15:04:05 MST`. +* `name` - Name of the provider. +* `samlMetadataDocument` - The XML document generated by an identity provider that supports SAML 2.0. +* `tags` - Tags attached to the SAML provider. +* `validUntil` - Expiration date and time for the SAML provider in RFC1123 format, e.g. `Mon, 02 Jan 2007 15:04:05 MST`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_server_certificate.html.markdown b/website/docs/cdktf/typescript/d/iam_server_certificate.html.markdown new file mode 100644 index 00000000000..6bf6304f8d8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_server_certificate.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_server_certificate" +description: |- + Get information about a server certificate +--- + + + +# Data Source: aws_iam_server_certificate + +Use this data source to lookup information about IAM Server Certificates. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamServerCertificate } from "./.gen/providers/aws/data-aws-iam-server-certificate"; +import { Elb } from "./.gen/providers/aws/elb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const myDomain = new DataAwsIamServerCertificate(this, "my-domain", { + latest: true, + namePrefix: "my-domain.org", + }); + new Elb(this, "elb", { + listener: [ + { + instancePort: 8000, + instanceProtocol: "https", + lbPort: 443, + lbProtocol: "https", + sslCertificateId: Token.asString(myDomain.arn), + }, + ], + name: "my-domain-elb", + }); + } +} + +``` + +## Argument Reference + +* `namePrefix` - prefix of cert to filter by +* `pathPrefix` - prefix of path to filter by +* `name` - exact name of the cert to lookup +* `latest` - sort results by expiration date. returns the certificate with expiration date in furthest in the future. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` is set to the unique id of the IAM Server Certificate +* `arn` is set to the ARN of the IAM Server Certificate +* `path` is set to the path of the IAM Server Certificate +* `expirationDate` is set to the expiration date of the IAM Server Certificate +* `uploadDate` is the date when the server certificate was uploaded +* `certificateBody` is the public key certificate (PEM-encoded). This is useful when [configuring back-end instance authentication](http://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-create-https-ssl-load-balancer.html) policy for load balancer +* `certificateChain` is the public key certificate chain (PEM-encoded) if exists, empty otherwise + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an IAM server certificate using `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an IAM server certificate using `name`. For example: + +```console +% terraform import aws_iam_server_certificate.example example +``` + +Import will read in the certificate body, certificate chain (if it exists), ID, name, path, and ARN. It will not retrieve the private key which is not available through the AWS API. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_session_context.markdown b/website/docs/cdktf/typescript/d/iam_session_context.markdown new file mode 100644 index 00000000000..e4f8ab476dd --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_session_context.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_session_context" +description: |- + Get information on the IAM source role of an STS assumed role +--- + + + +# Data Source: aws_iam_session_context + +This data source provides information on the IAM source role of an STS assumed role. For non-role ARNs, this data source simply passes the ARN through in `issuerArn`. + +For some AWS resources, multiple types of principals are allowed in the same argument (e.g., IAM users and IAM roles). However, these arguments often do not allow assumed-role (i.e., STS, temporary credential) principals. Given an STS ARN, this data source provides the ARN for the source IAM role. + +## Example Usage + +### Basic Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamSessionContext } from "./.gen/providers/aws/data-aws-iam-session-context"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamSessionContext(this, "example", { + arn: "arn:aws:sts::123456789012:assumed-role/Audien-Heaven/MatyNoyes", + }); + } +} + +``` + +### Find the Terraform Runner's Source Role + +Combined with `awsCallerIdentity`, you can get the current user's source IAM role ARN (`issuerArn`) if you're using an assumed role. If you're not using an assumed role, the caller's (e.g., an IAM user's) ARN will simply be passed through. In environments where both IAM users and individuals using assumed roles need to apply the same configurations, this data source enables seamless use. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamSessionContext } from "./.gen/providers/aws/data-aws-iam-session-context"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new DataAwsIamSessionContext(this, "example", { + arn: Token.asString(current.arn), + }); + } +} + +``` + +## Argument Reference + +* `arn` - (Required) ARN for an assumed role. + +~> If `arn` is a non-role ARN, Terraform gives no error and `issuerArn` will be equal to the `arn` value. For STS assumed-role ARNs, Terraform gives an error if the identified IAM role does not exist. + +## Attribute Reference + +~> With the exception of `issuerArn`, the attributes will not be populated unless the `arn` corresponds to an STS assumed role. + +* `issuerArn` - IAM source role ARN if `arn` corresponds to an STS assumed role. Otherwise, `issuerArn` is equal to `arn`. +* `issuerId` - Unique identifier of the IAM role that issues the STS assumed role. +* `issuerName` - Name of the source role. Only available if `arn` corresponds to an STS assumed role. +* `sessionName` - Name of the STS session. Only available if `arn` corresponds to an STS assumed role. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_user.html.markdown b/website/docs/cdktf/typescript/d/iam_user.html.markdown new file mode 100644 index 00000000000..3032a0d302b --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_user.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user" +description: |- + Get information on a Amazon IAM user +--- + + + +# Data Source: aws_iam_user + +This data source can be used to fetch information about a specific +IAM user. By using this data source, you can reference IAM user +properties without having to hard code ARNs or unique IDs as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamUser } from "./.gen/providers/aws/data-aws-iam-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamUser(this, "example", { + userName: "an_example_user_name", + }); + } +} + +``` + +## Argument Reference + +* `userName` - (Required) Friendly IAM user name to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN assigned by AWS for this user. +* `path` - Path in which this user was created. +* `permissionsBoundary` - The ARN of the policy that is used to set the permissions boundary for the user. +* `userId` - Unique ID assigned by AWS for this user. +* `userName` - Name associated to this User +* `tags` - Map of key-value pairs associated with the user. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_user_ssh_key.html.markdown b/website/docs/cdktf/typescript/d/iam_user_ssh_key.html.markdown new file mode 100644 index 00000000000..afc76ec81dd --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_user_ssh_key.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_ssh_key" +description: |- + Get information on a SSH public key associated with the specified IAM user. +--- + + + +# Data Source: aws_iam_user_ssh_key + +Use this data source to get information about a SSH public key associated with the specified IAM user. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamUserSshKey } from "./.gen/providers/aws/data-aws-iam-user-ssh-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamUserSshKey(this, "example", { + encoding: "SSH", + sshPublicKeyId: "APKARUZ32GUTKIGARLXE", + username: "test-user", + }); + } +} + +``` + +## Argument Reference + +* `encoding` - (Required) Specifies the public key encoding format to use in the response. To retrieve the public key in ssh-rsa format, use `ssh`. To retrieve the public key in PEM format, use `pem`. +* `sshPublicKeyId` - (Required) Unique identifier for the SSH public key. +* `username` - (Required) Name of the IAM user associated with the SSH public key. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `fingerprint` - MD5 message digest of the SSH public key. +* `publicKey` - SSH public key. +* `status` - Status of the SSH public key. Active means that the key can be used for authentication with an CodeCommit repository. Inactive means that the key cannot be used. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iam_users.html.markdown b/website/docs/cdktf/typescript/d/iam_users.html.markdown new file mode 100644 index 00000000000..f3c087f1d49 --- /dev/null +++ b/website/docs/cdktf/typescript/d/iam_users.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_users" +description: |- + Get information about a set of IAM Users. +--- + + + +# Data Source: aws_iam_users + +Use this data source to get the ARNs and Names of IAM Users. + +## Example Usage + +### All users in an account + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamUsers } from "./.gen/providers/aws/data-aws-iam-users"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamUsers(this, "users", {}); + } +} + +``` + +### Users filtered by name regex + +Users whose username contains `abc` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamUsers } from "./.gen/providers/aws/data-aws-iam-users"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamUsers(this, "users", { + nameRegex: ".*abc.*", + }); + } +} + +``` + +### Users filtered by path prefix + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamUsers } from "./.gen/providers/aws/data-aws-iam-users"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIamUsers(this, "users", { + pathPrefix: "/custom-path", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `nameRegex` - (Optional) Regex string to apply to the IAM users list returned by AWS. This allows more advanced filtering not supported from the AWS API. This filtering is done locally on what AWS returns, and could have a performance impact if the result is large. Combine this with other options to narrow down the list AWS returns. +* `pathPrefix` - (Optional) Path prefix for filtering the results. For example, the prefix `/division_abc/subdivision_xyz/` gets all users whose path starts with `/division_abc/subdivision_xyz/`. If it is not included, it defaults to a slash (`/`), listing all users. For more details, check out [list-users in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched IAM users. +* `names` - Set of Names of the matched IAM users. + +[1]: https://awscli.amazonaws.com/v2/documentation/api/latest/reference/iam/list-users.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/identitystore_group.html.markdown b/website/docs/cdktf/typescript/d/identitystore_group.html.markdown new file mode 100644 index 00000000000..2960f47a3b0 --- /dev/null +++ b/website/docs/cdktf/typescript/d/identitystore_group.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "SSO Identity Store" +layout: "aws" +page_title: "AWS: aws_identitystore_group" +description: |- + Get information on an Identity Store Group +--- + + + +# Data Source: aws_identitystore_group + +Use this data source to get an Identity Store Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Fn, + propertyAccess, + Token, + TerraformOutput, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIdentitystoreGroup } from "./.gen/providers/aws/data-aws-identitystore-group"; +import { DataAwsSsoadminInstances } from "./.gen/providers/aws/data-aws-ssoadmin-instances"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsSsoadminInstances(this, "example", {}); + const dataAwsIdentitystoreGroupExample = new DataAwsIdentitystoreGroup( + this, + "example_1", + { + alternateIdentifier: { + uniqueAttribute: { + attributePath: "DisplayName", + attributeValue: "ExampleGroup", + }, + }, + identityStoreId: Token.asString( + propertyAccess(Fn.tolist(example.identityStoreIds), ["0"]) + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIdentitystoreGroupExample.overrideLogicalId("example"); + new TerraformOutput(this, "group_id", { + value: dataAwsIdentitystoreGroupExample.groupId, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `identityStoreId` - (Required) Identity Store ID associated with the Single Sign-On Instance. + +The following arguments are optional: + +* `alternateIdentifier` (Optional) A unique identifier for the group that is not the primary identifier. Conflicts with `groupId`. Detailed below. +* `groupId` - (Optional) The identifier for a group in the Identity Store. + +-> Exactly one of the above arguments must be provided. + +### `alternateIdentifier` Configuration Block + +The `alternateIdentifier` configuration block supports the following arguments: + +* `externalId` - (Optional) Configuration block for filtering by the identifier issued by an external identity provider. Detailed below. +* `uniqueAttribute` - (Optional) An entity attribute that's unique to a specific entity. Detailed below. + +-> Exactly one of the above arguments must be provided. + +### `externalId` Configuration Block + +The `externalId` configuration block supports the following arguments: + +* `id` - (Required) The identifier issued to this resource by an external identity provider. +* `issuer` - (Required) The issuer for an external identifier. + +### `uniqueAttribute` Configuration Block + +The `uniqueAttribute` configuration block supports the following arguments: + +* `attributePath` - (Required) Attribute path that is used to specify which attribute name to search. For example: `displayName`. Refer to the [Group data type](https://docs.aws.amazon.com/singlesignon/latest/IdentityStoreAPIReference/API_Group.html). +* `attributeValue` - (Required) Value for an attribute. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the group in the Identity Store. +* `description` - Description of the specified group. +* `displayName` - Group's display name value. +* `externalIds` - List of identifiers issued to this resource by an external identity provider. + * `id` - The identifier issued to this resource by an external identity provider. + * `issuer` - The issuer for an external identifier. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/identitystore_user.html.markdown b/website/docs/cdktf/typescript/d/identitystore_user.html.markdown new file mode 100644 index 00000000000..65aa994a359 --- /dev/null +++ b/website/docs/cdktf/typescript/d/identitystore_user.html.markdown @@ -0,0 +1,140 @@ +--- +subcategory: "SSO Identity Store" +layout: "aws" +page_title: "AWS: aws_identitystore_user" +description: |- + Get information on an Identity Store User +--- + + + +# Data Source: aws_identitystore_user + +Use this data source to get an Identity Store User. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Fn, + propertyAccess, + Token, + TerraformOutput, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIdentitystoreUser } from "./.gen/providers/aws/data-aws-identitystore-user"; +import { DataAwsSsoadminInstances } from "./.gen/providers/aws/data-aws-ssoadmin-instances"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsSsoadminInstances(this, "example", {}); + const dataAwsIdentitystoreUserExample = new DataAwsIdentitystoreUser( + this, + "example_1", + { + alternateIdentifier: { + uniqueAttribute: { + attributePath: "UserName", + attributeValue: "ExampleUser", + }, + }, + identityStoreId: Token.asString( + propertyAccess(Fn.tolist(example.identityStoreIds), ["0"]) + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIdentitystoreUserExample.overrideLogicalId("example"); + new TerraformOutput(this, "user_id", { + value: dataAwsIdentitystoreUserExample.userId, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `identityStoreId` - (Required) Identity Store ID associated with the Single Sign-On Instance. + +The following arguments are optional: + +* `alternateIdentifier` (Optional) A unique identifier for a user or group that is not the primary identifier. Conflicts with `userId`. Detailed below. +* `userId` - (Optional) The identifier for a user in the Identity Store. + +-> Exactly one of the above arguments must be provided. + +### `alternateIdentifier` Configuration Block + +The `alternateIdentifier` configuration block supports the following arguments: + +* `externalId` - (Optional) Configuration block for filtering by the identifier issued by an external identity provider. Detailed below. +* `uniqueAttribute` - (Optional) An entity attribute that's unique to a specific entity. Detailed below. + +-> Exactly one of the above arguments must be provided. + +### `externalId` Configuration Block + +The `externalId` configuration block supports the following arguments: + +* `id` - (Required) The identifier issued to this resource by an external identity provider. +* `issuer` - (Required) The issuer for an external identifier. + +### `uniqueAttribute` Configuration Block + +The `uniqueAttribute` configuration block supports the following arguments: + +* `attributePath` - (Required) Attribute path that is used to specify which attribute name to search. For example: `userName`. Refer to the [User data type](https://docs.aws.amazon.com/singlesignon/latest/IdentityStoreAPIReference/API_User.html). +* `attributeValue` - (Required) Value for an attribute. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the user in the Identity Store. +* `addresses` - List of details about the user's address. + * `country` - The country that this address is in. + * `formatted` - The name that is typically displayed when the address is shown for display. + * `locality` - The address locality. + * `postalCode` - The postal code of the address. + * `primary` - When `true`, this is the primary address associated with the user. + * `region` - The region of the address. + * `streetAddress` - The street of the address. + * `type` - The type of address. +* `displayName` - The name that is typically displayed when the user is referenced. +* `emails` - List of details about the user's email. + * `primary` - When `true`, this is the primary email associated with the user. + * `type` - The type of email. + * `value` - The email address. This value must be unique across the identity store. +* `externalIds` - List of identifiers issued to this resource by an external identity provider. + * `id` - The identifier issued to this resource by an external identity provider. + * `issuer` - The issuer for an external identifier. +* `locale` - The user's geographical region or location. +* `name` - Details about the user's full name. + * `familyName` - The family name of the user. + * `formatted` - The name that is typically displayed when the name is shown for display. + * `givenName` - The given name of the user. + * `honorificPrefix` - The honorific prefix of the user. + * `honorificSuffix` - The honorific suffix of the user. + * `middleName` - The middle name of the user. +* `nickname` - An alternate name for the user. +* `phoneNumbers` - List of details about the user's phone number. + * `primary` - When `true`, this is the primary phone number associated with the user. + * `type` - The type of phone number. + * `value` - The user's phone number. +* `preferredLanguage` - The preferred language of the user. +* `profileUrl` - An URL that may be associated with the user. +* `timezone` - The user's time zone. +* `title` - The user's title. +* `userName` - User's user name value. +* `userType` - The user type. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_component.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_component.html.markdown new file mode 100644 index 00000000000..9f5773666aa --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_component.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_component" +description: |- + Provides details about an Image Builder Component +--- + + + +# Data Source: aws_imagebuilder_component + +Provides details about an Image Builder Component. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderComponent } from "./.gen/providers/aws/data-aws-imagebuilder-component"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderComponent(this, "example", { + arn: "arn:aws:imagebuilder:us-west-2:aws:component/amazon-cloudwatch-agent-linux/1.0.0", + }); + } +} + +``` + +## Argument Reference + +* `arn` - (Required) ARN of the component. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `changeDescription` - Change description of the component. +* `data` - Data of the component. +* `dateCreated` - Date the component was created. +* `description` - Description of the component. +* `encrypted` - Encryption status of the component. +* `kmsKeyId` - ARN of the Key Management Service (KMS) Key used to encrypt the component. +* `name` - Name of the component. +* `owner` - Owner of the component. +* `platform` - Platform of the component. +* `supportedOsVersions` - Operating Systems (OSes) supported by the component. +* `tags` - Key-value map of resource tags for the component. +* `type` - Type of the component. +* `version` - Version of the component. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_components.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_components.html.markdown new file mode 100644 index 00000000000..a66fe21c321 --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_components.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_components" +description: |- + Get information on Image Builder Components. +--- + + + +# Data Source: aws_imagebuilder_components + +Use this data source to get the ARNs and names of Image Builder Components matching the specified criteria. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderComponents } from "./.gen/providers/aws/data-aws-imagebuilder-components"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderComponents(this, "example", { + filter: [ + { + name: "platform", + values: ["Linux"], + }, + ], + owner: "Self", + }); + } +} + +``` + +## Argument Reference + +* `owner` - (Optional) Owner of the image recipes. Valid values are `self`, `shared` and `amazon`. Defaults to `self`. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListComponents API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListComponents.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Components. +* `names` - Set of names of the matched Image Builder Components. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_container_recipe.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_container_recipe.html.markdown new file mode 100644 index 00000000000..4b8f9535da4 --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_container_recipe.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_container_recipe" +description: |- + Provides details about an Image Builder Container Recipe +--- + + + +# Data Source: aws_imagebuilder_container_recipe + +Provides details about an Image builder Container Recipe. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderContainerRecipe } from "./.gen/providers/aws/data-aws-imagebuilder-container-recipe"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderContainerRecipe(this, "example", { + arn: "arn:aws:imagebuilder:us-east-1:aws:container-recipe/example/1.0.0", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) ARN of the container recipe. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `component` - List of objects with components for the container recipe. + * `componentArn` - ARN of the Image Builder Component. + * `parameter` - Set of parameters that are used to configure the component. + * `name` - Name of the component parameter. + * `value` - Value of the component parameter. +* `containerType` - Type of the container. +* `dateCreated` - Date the container recipe was created. +* `description` - Description of the container recipe. +* `dockerfileTemplateData` - Dockerfile template used to build the image. +* `encrypted` - Flag that indicates if the target container is encrypted. +* `instanceConfiguration` - List of objects with instance configurations for building and testing container images. + * `blockDeviceMapping` - Set of objects with block device mappings for the instance configuration. + * `deviceName` - Name of the device. For example, `/dev/sda` or `/dev/xvdb`. + * `ebs` - Single list of object with Elastic Block Storage (EBS) block device mapping settings. + * `deleteOnTermination` - Whether to delete the volume on termination. Defaults to unset, which is the value inherited from the parent image. + * `encrypted` - Whether to encrypt the volume. Defaults to unset, which is the value inherited from the parent image. + * `iops` - Number of Input/Output (I/O) operations per second to provision for an `io1` or `io2` volume. + * `kmsKeyId` - ARN of the Key Management Service (KMS) Key for encryption. + * `snapshotId` - Identifier of the EC2 Volume Snapshot. + * `throughput` - For GP3 volumes only. The throughput in MiB/s that the volume supports. + * `volumeSize` - Size of the volume, in GiB. + * `volumeType` - Type of the volume. For example, `gp2` or `io2`. + * `noDevice` - Whether to remove a mapping from the parent image. + * `virtualName` - Virtual device name. For example, `ephemeral0`. Instance store volumes are numbered starting from 0. + * `image` - AMI ID of the base image for container build and test instance. +* `kmsKeyId` - KMS key used to encrypt the container image. +* `name` - Name of the container recipe. +* `owner` - Owner of the container recipe. +* `parentImage` - Base image for the container recipe. +* `platform` - Platform of the container recipe. +* `tags` - Key-value map of resource tags for the container recipe. +* `targetRepository` - Destination repository for the container image. + * `repositoryName` - Name of the container repository where the output container image is stored. The name is prefixed by the repository location. + * `service` - Service in which this image is registered. +* `version` - Version of the container recipe. +* `workingDirectory` - Working directory used during build and test workflows. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_container_recipes.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_container_recipes.html.markdown new file mode 100644 index 00000000000..e1beeaab975 --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_container_recipes.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_container_recipes" +description: |- + Get information on Image Builder Container Recipes. +--- + + + +# Data Source: aws_imagebuilder_container_recipes + +Use this data source to get the ARNs and names of Image Builder Container Recipes matching the specified criteria. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderContainerRecipes } from "./.gen/providers/aws/data-aws-imagebuilder-container-recipes"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderContainerRecipes(this, "example", { + filter: [ + { + name: "platform", + values: ["Linux"], + }, + ], + owner: "Self", + }); + } +} + +``` + +## Argument Reference + +* `owner` - (Optional) Owner of the container recipes. Valid values are `self`, `shared` and `amazon`. Defaults to `self`. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListContainerRecipes API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListContainerRecipes.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Container Recipes. +* `names` - Set of names of the matched Image Builder Container Recipes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_distribution_configuration.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_distribution_configuration.html.markdown new file mode 100644 index 00000000000..483909df04d --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_distribution_configuration.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_distribution_configuration" +description: |- + Provides details about an Image Builder Distribution Configuration +--- + + + +# Data Source: aws_imagebuilder_distribution_configuration + +Provides details about an Image Builder Distribution Configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderDistributionConfiguration } from "./.gen/providers/aws/data-aws-imagebuilder-distribution-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderDistributionConfiguration(this, "example", { + arn: "arn:aws:imagebuilder:us-west-2:aws:distribution-configuration/example", + }); + } +} + +``` + +## Argument Reference + +* `arn` - (Required) ARN of the distribution configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `dateCreated` - Date the distribution configuration was created. +* `dateUpdated` - Date the distribution configuration was updated. +* `description` - Description of the distribution configuration. +* `distribution` - Set of distributions. + * `amiDistributionConfiguration` - Nested list of AMI distribution configuration. + * `amiTags` - Key-value map of tags to apply to distributed AMI. + * `description` - Description to apply to distributed AMI. + * `kmsKeyId` - ARN of Key Management Service (KMS) Key to encrypt AMI. + * `launchPermission` - Nested list of EC2 launch permissions. + * `organizationArns` - Set of AWS Organization ARNs. + * `organizationalUnitArns` - Set of AWS Organizational Unit ARNs. + * `userGroups` - Set of EC2 launch permission user groups. + * `userIds` - Set of AWS Account identifiers. + * `targetAccountIds` - Set of target AWS Account identifiers. + * `containerDistributionConfiguration` - Nested list of container distribution configurations. + * `containerTags` - Set of tags that are attached to the container distribution configuration. + * `description` - Description of the container distribution configuration. + * `targetRepository` - Set of destination repositories for the container distribution configuration. + * `repositoryName` - Name of the container repository where the output container image is stored. + * `service` - Service in which the image is registered. + * `fastLaunchConfiguration` - Nested list of Windows faster-launching configurations to use for AMI distribution. + * `accountId` - The owner account ID for the fast-launch enabled Windows AMI. + * `enabled` - A Boolean that represents the current state of faster launching for the Windows AMI. + * `launchTemplate` - Nested list of launch templates that the fast-launch enabled Windows AMI uses when it launches Windows instances to create pre-provisioned snapshots. + * `launchTemplateId` - The ID of the launch template to use for faster launching for a Windows AMI. + * `launchTemplateName` - The name of the launch template to use for faster launching for a Windows AMI. + * `launchTemplateVersion` - The version of the launch template to use for faster launching for a Windows AMI. + * `maxParallelLaunches` - The maximum number of parallel instances that are launched for creating resources. + * `snapshotConfiguration` - Nested list of configurations for managing the number of snapshots that are created from pre-provisioned instances for the Windows AMI when faster launching is enabled. + * `targetResourceCount` - The number of pre-provisioned snapshots to keep on hand for a fast-launch enabled Windows AMI. + * `launchTemplateConfiguration` - Nested list of launch template configurations. + * `default` - Whether the specified Amazon EC2 launch template is set as the default launch template. + * `launchTemplateId` - ID of the Amazon EC2 launch template. + * `accountId` - The account ID that this configuration applies to. + * `licenseConfigurationArns` - Set of Amazon Resource Names (ARNs) of License Manager License Configurations. + * `region` - AWS Region of distribution. +* `name` - Name of the distribution configuration. +* `tags` - Key-value map of resource tags for the distribution configuration. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_distribution_configurations.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_distribution_configurations.html.markdown new file mode 100644 index 00000000000..b0419190067 --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_distribution_configurations.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_distribution_configurations" +description: |- + Get information on Image Builder Distribution Configurations. +--- + + + +# Data Source: aws_imagebuilder_distribution_configurations + +Use this data source to get the ARNs and names of Image Builder Distribution Configurations matching the specified criteria. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderDistributionConfigurations } from "./.gen/providers/aws/data-aws-imagebuilder-distribution-configurations"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderDistributionConfigurations(this, "example", { + filter: [ + { + name: "name", + values: ["example"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +## filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListDistributionConfigurations API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListDistributionConfigurations.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Distribution Configurations. +* `names` - Set of names of the matched Image Builder Distribution Configurations. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_image.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_image.html.markdown new file mode 100644 index 00000000000..17564186455 --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_image.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image" +description: |- + Provides details about an Image Builder Image +--- + + + +# Data Source: aws_imagebuilder_image + +Provides details about an Image Builder Image. + +## Example Usage + +### Latest + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderImage } from "./.gen/providers/aws/data-aws-imagebuilder-image"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderImage(this, "example", { + arn: "arn:aws:imagebuilder:us-west-2:aws:image/amazon-linux-2-x86/x.x.x", + }); + } +} + +``` + +## Argument Reference + +* `arn` - (Required) ARN of the image. The suffix can either be specified with wildcards (`xXX`) to fetch the latest build version or a full build version (e.g., `20201126/1`) to fetch an exact version. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `buildVersionArn` - Build version ARN of the image. This will always have the `#.#.#/#` suffix. +* `containerRecipeArn` - ARN of the container recipe. +* `dateCreated` - Date the image was created. +* `distributionConfigurationArn` - ARN of the Image Builder Distribution Configuration. +* `enhancedImageMetadataEnabled` - Whether additional information about the image being created is collected. +* `imageRecipeArn` - ARN of the image recipe. +* `imageTestsConfiguration` - List of an object with image tests configuration. + * `imageTestsEnabled` - Whether image tests are enabled. + * `timeoutMinutes` - Number of minutes before image tests time out. +* `infrastructureConfigurationArn` - ARN of the Image Builder Infrastructure Configuration. +* `name` - Name of the image. +* `platform` - Platform of the image. +* `osVersion` - Operating System version of the image. +* `outputResources` - List of objects with resources created by the image. + * `amis` - Set of objects with each Amazon Machine Image (AMI) created. + * `accountId` - Account identifier of the AMI. + * `description` - Description of the AMI. + * `image` - Identifier of the AMI. + * `name` - Name of the AMI. + * `region` - Region of the AMI. + * `containers` - Set of objects with each container image created and stored in the output repository. + * `imageUris` - Set of URIs for created containers. + * `region` - Region of the container image. +* `tags` - Key-value map of resource tags for the image. +* `version` - Version of the image. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_image_pipeline.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_image_pipeline.html.markdown new file mode 100644 index 00000000000..aa2fee7b21d --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_image_pipeline.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_pipeline" +description: |- + Provides details about an Image Builder Image Pipeline +--- + + + +# Data Source: aws_imagebuilder_image_pipeline + +Provides details about an Image Builder Image Pipeline. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderImagePipeline } from "./.gen/providers/aws/data-aws-imagebuilder-image-pipeline"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderImagePipeline(this, "example", { + arn: "arn:aws:imagebuilder:us-west-2:aws:image-pipeline/example", + }); + } +} + +``` + +## Argument Reference + +* `arn` - (Required) ARN of the image pipeline. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `containerRecipeArn` - ARN of the container recipe. +* `dateCreated` - Date the image pipeline was created. +* `dateLastRun` - Date the image pipeline was last run. +* `dateNextRun` - Date the image pipeline will run next. +* `dateUpdated` - Date the image pipeline was updated. +* `description` - Description of the image pipeline. +* `distributionConfigurationArn` - ARN of the Image Builder Distribution Configuration. +* `enhancedImageMetadataEnabled` - Whether additional information about the image being created is collected. +* `imageRecipeArn` - ARN of the image recipe. +* `imageTestsConfiguration` - List of an object with image tests configuration. + * `imageTestsEnabled` - Whether image tests are enabled. + * `timeoutMinutes` - Number of minutes before image tests time out. +* `infrastructureConfigurationArn` - ARN of the Image Builder Infrastructure Configuration. +* `name` - Name of the image pipeline. +* `platform` - Platform of the image pipeline. +* `schedule` - List of an object with schedule settings. + * `pipelineExecutionStartCondition` - Condition when the pipeline should trigger a new image build. + * `scheduleExpression` - Cron expression of how often the pipeline start condition is evaluated. +* `status` - Status of the image pipeline. +* `tags` - Key-value map of resource tags for the image pipeline. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_image_pipelines.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_image_pipelines.html.markdown new file mode 100644 index 00000000000..e6eb3e2b62b --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_image_pipelines.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_pipelines" +description: |- + Get information on Image Builder Image Pipelines. +--- + + + +# Data Source: aws_imagebuilder_image_pipelines + +Use this data source to get the ARNs and names of Image Builder Image Pipelines matching the specified criteria. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderImagePipelines } from "./.gen/providers/aws/data-aws-imagebuilder-image-pipelines"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderImagePipelines(this, "example", { + filter: [ + { + name: "name", + values: ["example"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListImagePipelines API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListImagePipelines.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Image Pipelines. +* `names` - Set of names of the matched Image Builder Image Pipelines. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_image_recipe.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_image_recipe.html.markdown new file mode 100644 index 00000000000..657fb21dd7a --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_image_recipe.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_recipe" +description: |- + Provides details about an Image Builder Image Recipe +--- + + + +# Data Source: aws_imagebuilder_image_recipe + +Provides details about an Image Builder Image Recipe. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderImageRecipe } from "./.gen/providers/aws/data-aws-imagebuilder-image-recipe"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderImageRecipe(this, "example", { + arn: "arn:aws:imagebuilder:us-east-1:aws:image-recipe/example/1.0.0", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) ARN of the image recipe. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `blockDeviceMapping` - Set of objects with block device mappings for the image recipe. + * `deviceName` - Name of the device. For example, `/dev/sda` or `/dev/xvdb`. + * `ebs` - Single list of object with Elastic Block Storage (EBS) block device mapping settings. + * `deleteOnTermination` - Whether to delete the volume on termination. Defaults to unset, which is the value inherited from the parent image. + * `encrypted` - Whether to encrypt the volume. Defaults to unset, which is the value inherited from the parent image. + * `iops` - Number of Input/Output (I/O) operations per second to provision for an `io1` or `io2` volume. + * `kmsKeyId` - ARN of the Key Management Service (KMS) Key for encryption. + * `snapshotId` - Identifier of the EC2 Volume Snapshot. + * `throughput` - For GP3 volumes only. The throughput in MiB/s that the volume supports. + * `volumeSize` - Size of the volume, in GiB. + * `volumeType` - Type of the volume. For example, `gp2` or `io2`. + * `noDevice` - Whether to remove a mapping from the parent image. + * `virtualName` - Virtual device name. For example, `ephemeral0`. Instance store volumes are numbered starting from 0. +* `component` - List of objects with components for the image recipe. + * `componentArn` - ARN of the Image Builder Component. + * `parameter` - Set of parameters that are used to configure the component. + * `name` - Name of the component parameter. + * `value` - Value of the component parameter. +* `dateCreated` - Date the image recipe was created. +* `description` - Description of the image recipe. +* `name` - Name of the image recipe. +* `owner` - Owner of the image recipe. +* `parentImage` - Base image of the image recipe. +* `platform` - Platform of the image recipe. +* `tags` - Key-value map of resource tags for the image recipe. +* `userDataBase64` - Base64 encoded contents of user data. Commands or a command script to run when build instance is launched. +* `version` - Version of the image recipe. +* `workingDirectory` - Working directory used during build and test workflows. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_image_recipes.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_image_recipes.html.markdown new file mode 100644 index 00000000000..a0558b66928 --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_image_recipes.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_recipes" +description: |- + Get information on Image Builder Image Recipes. +--- + + + +# Data Source: aws_imagebuilder_image_recipes + +Use this data source to get the ARNs and names of Image Builder Image Recipes matching the specified criteria. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderImageRecipes } from "./.gen/providers/aws/data-aws-imagebuilder-image-recipes"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderImageRecipes(this, "example", { + filter: [ + { + name: "platform", + values: ["Linux"], + }, + ], + owner: "Self", + }); + } +} + +``` + +## Argument Reference + +* `owner` - (Optional) Owner of the image recipes. Valid values are `self`, `shared` and `amazon`. Defaults to `self`. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListImageRecipes API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListImageRecipes.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Image Recipes. +* `names` - Set of names of the matched Image Builder Image Recipes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_infrastructure_configuration.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_infrastructure_configuration.html.markdown new file mode 100644 index 00000000000..d08236b4489 --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_infrastructure_configuration.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_infrastructure_configuration" +description: |- + Provides details about an Image Builder Infrastructure Configuration +--- + + + +# Data Source: aws_imagebuilder_infrastructure_configuration + +Provides details about an Image Builder Infrastructure Configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderInfrastructureConfiguration } from "./.gen/providers/aws/data-aws-imagebuilder-infrastructure-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderInfrastructureConfiguration(this, "example", { + arn: "arn:aws:imagebuilder:us-west-2:aws:infrastructure-configuration/example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) ARN of the infrastructure configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `dateCreated` - Date the infrastructure configuration was created. +* `dateCreated` - Date the infrastructure configuration was updated. +* `description` - Description of the infrastructure configuration. +* `instanceMetadataOptions` - Nested list of instance metadata options for the HTTP requests that pipeline builds use to launch EC2 build and test instances. + * `httpPutResponseHopLimit` - Number of hops that an instance can traverse to reach its destonation. + * `httpTokens` - Whether a signed token is required for instance metadata retrieval requests. +* `instanceProfileName` - Name of the IAM Instance Profile associated with the configuration. +* `instanceTypes` - Set of EC2 Instance Types associated with the configuration. +* `keyPair` - Name of the EC2 Key Pair associated with the configuration. +* `logging` - Nested list of logging settings. + * `s3Logs` - Nested list of S3 logs settings. + * `s3BucketName` - Name of the S3 Bucket for logging. + * `s3KeyPrefix` - Key prefix for S3 Bucket logging. +* `name` - Name of the infrastructure configuration. +* `resourceTags` - Key-value map of resource tags for the infrastructure created by the infrastructure configuration. +* `securityGroupIds` - Set of EC2 Security Group identifiers associated with the configuration. +* `snsTopicArn` - ARN of the SNS Topic associated with the configuration. +* `subnetId` - Identifier of the EC2 Subnet associated with the configuration. +* `tags` - Key-value map of resource tags for the infrastructure configuration. +* `terminateInstanceOnFailure` - Whether instances are terminated on failure. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/imagebuilder_infrastructure_configurations.html.markdown b/website/docs/cdktf/typescript/d/imagebuilder_infrastructure_configurations.html.markdown new file mode 100644 index 00000000000..24eb4dca35d --- /dev/null +++ b/website/docs/cdktf/typescript/d/imagebuilder_infrastructure_configurations.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_infrastructure_configurations" +description: |- + Get information on Image Builder Infrastructure Configurations. +--- + + + +# Data Source: aws_imagebuilder_infrastructure_configurations + +Use this data source to get the ARNs and names of Image Builder Infrastructure Configurations matching the specified criteria. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsImagebuilderInfrastructureConfigurations } from "./.gen/providers/aws/data-aws-imagebuilder-infrastructure-configurations"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsImagebuilderInfrastructureConfigurations(this, "example", { + filter: [ + { + name: "name", + values: ["example"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +## filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Image Builder ListInfrastructureConfigurations API Reference](https://docs.aws.amazon.com/imagebuilder/latest/APIReference/API_ListInfrastructureConfigurations.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Image Builder Infrastructure Configurations. +* `names` - Set of names of the matched Image Builder Infrastructure Configurations. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/inspector_rules_packages.html.markdown b/website/docs/cdktf/typescript/d/inspector_rules_packages.html.markdown new file mode 100644 index 00000000000..94f0b2bc3d3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/inspector_rules_packages.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Inspector Classic" +layout: "aws" +page_title: "AWS: aws_inspector_rules_packages" +description: |- + Provides a list of Amazon Inspector Classic Rules packages which can be used by Amazon Inspector Classic. +--- + + + +# Data Source: aws_inspector_rules_packages + +The Amazon Inspector Classic Rules Packages data source allows access to the list of AWS +Inspector Rules Packages which can be used by Amazon Inspector Classic within the region +configured in the provider. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsInspectorRulesPackages } from "./.gen/providers/aws/data-aws-inspector-rules-packages"; +import { InspectorAssessmentTarget } from "./.gen/providers/aws/inspector-assessment-target"; +import { InspectorAssessmentTemplate } from "./.gen/providers/aws/inspector-assessment-template"; +import { InspectorResourceGroup } from "./.gen/providers/aws/inspector-resource-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const group = new InspectorResourceGroup(this, "group", { + tags: { + test: "test", + }, + }); + const rules = new DataAwsInspectorRulesPackages(this, "rules", {}); + const assessment = new InspectorAssessmentTarget(this, "assessment", { + name: "test", + resourceGroupArn: group.arn, + }); + const awsInspectorAssessmentTemplateAssessment = + new InspectorAssessmentTemplate(this, "assessment_3", { + duration: Token.asNumber("60"), + name: "Test", + rulesPackageArns: Token.asList(rules.arns), + targetArn: assessment.arn, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsInspectorAssessmentTemplateAssessment.overrideLogicalId("assessment"); + } +} + +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `arns` - List of the Amazon Inspector Classic Rules Packages arns available in the AWS region. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/instance.html.markdown b/website/docs/cdktf/typescript/d/instance.html.markdown new file mode 100644 index 00000000000..b1e84eb2abf --- /dev/null +++ b/website/docs/cdktf/typescript/d/instance.html.markdown @@ -0,0 +1,155 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_instance" +description: |- + Get information on an Amazon EC2 Instance. +--- + + + +# Data Source: aws_instance + +Use this data source to get the ID of an Amazon EC2 Instance for use in other resources. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsInstance } from "./.gen/providers/aws/data-aws-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsInstance(this, "foo", { + filter: [ + { + name: "image-id", + values: ["ami-xxxxxxxx"], + }, + { + name: "tag:Name", + values: ["instance-name-tag"], + }, + ], + instanceId: "i-instanceid", + }); + } +} + +``` + +## Argument Reference + +* `instanceId` - (Optional) Specify the exact Instance ID with which to populate the data source. + +* `instanceTags` - (Optional) Map of tags, each pair of which must +exactly match a pair on the desired Instance. + +* `filter` - (Optional) One or more name/value pairs to use as filters. There are +several valid keys, for a full reference, check out +[describe-instances in the AWS CLI reference][1]. + +* `getPasswordData` - (Optional) If true, wait for password data to become available and retrieve it. Useful for getting the administrator password for instances running Microsoft Windows. The password data is exported to the `passwordData` attribute. See [GetPasswordData](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_GetPasswordData.html) for more information. + +* `getUserData` - (Optional) Retrieve Base64 encoded User Data contents into the `userDataBase64` attribute. A SHA-1 hash of the User Data contents will always be present in the `userData` attribute. Defaults to `false`. + +~> **NOTE:** At least one of `filter`, `instanceTags`, or `instanceId` must be specified. + +~> **NOTE:** If anything other than a single match is returned by the search, +Terraform will fail. Ensure that your search is specific enough to return +a single Instance ID only. + +## Attribute Reference + +`id` is set to the ID of the found Instance. In addition, the following attributes +are exported: + +~> **NOTE:** Some values are not always set and may not be available for +interpolation. + +* `ami` - ID of the AMI used to launch the instance. +* `arn` - ARN of the instance. +* `associatePublicIpAddress` - Whether or not the Instance is associated with a public IP address or not (Boolean). +* `availabilityZone` - Availability zone of the Instance. +* `creditSpecification` - Credit specification of the Instance. +* `disableApiStop` - Whether or not EC2 Instance Stop Protection](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Stop_Start.html#Using_StopProtection) is enabled (Boolean). +* `disableApiTermination` - Whether or not [EC2 Instance Termination Protection](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#Using_ChangingDisableAPITermination) is enabled (Boolean). +* `ebsBlockDevice` - EBS block device mappings of the Instance. + * `deleteOnTermination` - If the EBS volume will be deleted on termination. + * `deviceName` - Physical name of the device. + * `encrypted` - If the EBS volume is encrypted. + * `iops` - `0` If the EBS volume is not a provisioned IOPS image, otherwise the supported IOPS count. + * `kmsKeyArn` - ARN of KMS Key, if EBS volume is encrypted. + * `snapshotId` - ID of the snapshot. + * `throughput` - Throughput of the volume, in MiB/s. + * `volumeSize` - Size of the volume, in GiB. + * `volumeType` - Volume type. +* `ebsOptimized` - Whether the Instance is EBS optimized or not (Boolean). +* `enclaveOptions` - Enclave options of the instance. + * `enabled` - Whether Nitro Enclaves are enabled. +* `ephemeralBlockDevice` - Ephemeral block device mappings of the Instance. + * `deviceName` - Physical name of the device. + * `noDevice` - Whether the specified device included in the device mapping was suppressed or not (Boolean). + * `virtualName` - Virtual device name. +* `hostId` - ID of the dedicated host the instance will be assigned to. +* `hostResourceGroupArn` - ARN of the host resource group the instance is associated with. +* `iamInstanceProfile` - Name of the instance profile associated with the Instance. +* `instanceState` - State of the instance. One of: `pending`, `running`, `shuttingDown`, `terminated`, `stopping`, `stopped`. See [Instance Lifecycle](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-lifecycle.html) for more information. +* `instanceType` - Type of the Instance. +* `ipv6Addresses` - IPv6 addresses associated to the Instance, if applicable. **NOTE**: Unlike the IPv4 address, this doesn't change if you attach an EIP to the instance. +* `keyName` - Key name of the Instance. +* `maintenanceOptions` - Maintenance and recovery options for the instance. + * `autoRecovery` - Automatic recovery behavior of the instance. +* `metadataOptions` - Metadata options of the Instance. + * `httpEndpoint` - State of the metadata service: `enabled`, `disabled`. + * `httpTokens` - If session tokens are required: `optional`, `required`. + * `httpPutResponseHopLimit` - Desired HTTP PUT response hop limit for instance metadata requests. + * `instanceMetadataTags` - If access to instance tags is allowed from the metadata service: `enabled`, `disabled`. +* `monitoring` - Whether detailed monitoring is enabled or disabled for the Instance (Boolean). +* `networkInterfaceId` - ID of the network interface that was created with the Instance. +* `outpostArn` - ARN of the Outpost. +* `passwordData` - Base-64 encoded encrypted password data for the instance. Useful for getting the administrator password for instances running Microsoft Windows. This attribute is only exported if `getPasswordData` is true. See [GetPasswordData](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_GetPasswordData.html) for more information. +* `placementGroup` - Placement group of the Instance. +* `placementPartitionNumber` - Number of the partition the instance is in. +* `privateDns` - Private DNS name assigned to the Instance. Can only be used inside the Amazon EC2, and only available if you've enabled DNS hostnames for your VPC. +* `privateDnsNameOptions` - Options for the instance hostname. + * `enableResourceNameDnsAaaaRecord` - Indicates whether to respond to DNS queries for instance hostnames with DNS AAAA records. + * `enableResourceNameDnsARecord` - Indicates whether to respond to DNS queries for instance hostnames with DNS A records. + * `hostnameType` - Type of hostname for EC2 instances. +* `privateIp` - Private IP address assigned to the Instance. +* `publicDns` - Public DNS name assigned to the Instance. For EC2-VPC, this is only available if you've enabled DNS hostnames for your VPC. +* `publicIp` - Public IP address assigned to the Instance, if applicable. **NOTE**: If you are using an [`awsEip`](/docs/providers/aws/r/eip.html) with your instance, you should refer to the EIP's address directly and not use `publicIp`, as this field will change after the EIP is attached. +* `rootBlockDevice` - Root block device mappings of the Instance + * `deviceName` - Physical name of the device. + * `deleteOnTermination` - If the root block device will be deleted on termination. + * `encrypted` - If the EBS volume is encrypted. + * `iops` - `0` If the volume is not a provisioned IOPS image, otherwise the supported IOPS count. + * `kmsKeyArn` - ARN of KMS Key, if EBS volume is encrypted. + * `throughput` - Throughput of the volume, in MiB/s. + * `volumeSize` - Size of the volume, in GiB. + * `volumeType` - Type of the volume. +* `secondaryPrivateIps` - Secondary private IPv4 addresses assigned to the instance's primary network interface (eth0) in a VPC. +* `securityGroups` - Associated security groups. +* `sourceDestCheck` - Whether the network interface performs source/destination checking (Boolean). +* `subnetId` - VPC subnet ID. +* `tags` - Map of tags assigned to the Instance. +* `tenancy` - Tenancy of the instance: `dedicated`, `default`, `host`. +* `userData` - SHA-1 hash of User Data supplied to the Instance. +* `userDataBase64` - Base64 encoded contents of User Data supplied to the Instance. Valid UTF-8 contents can be decoded with the [`base64Decode` function](https://www.terraform.io/docs/configuration/functions/base64decode.html). This attribute is only exported if `getUserData` is true. +* `vpcSecurityGroupIds` - Associated security groups in a non-default VPC. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-instances.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/instances.html.markdown b/website/docs/cdktf/typescript/d/instances.html.markdown new file mode 100644 index 00000000000..d7af7fc22ac --- /dev/null +++ b/website/docs/cdktf/typescript/d/instances.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_instances" +description: |- + Get information on an Amazon EC2 instances. +--- + + + +# Data Source: aws_instances + +Use this data source to get IDs or IPs of Amazon EC2 instances to be referenced elsewhere, +e.g., to allow easier migration from another management solution +or to make it easier for an operator to connect through bastion host(s). + +-> **Note:** It's a best practice to expose instance details via [outputs](https://www.terraform.io/docs/configuration/outputs.html) +and [remote state](https://www.terraform.io/docs/state/remote.html) and +**use [`terraformRemoteState`](https://www.terraform.io/docs/providers/terraform/d/remote_state.html) +data source instead** if you manage referenced instances via Terraform. + +~> **Note:** It's strongly discouraged to use this data source for querying ephemeral +instances (e.g., managed via autoscaling group), as the output may change at any time +and you'd need to re-run `apply` every time an instance comes up or dies. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Fn, + Token, + TerraformCount, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsInstances } from "./.gen/providers/aws/data-aws-instances"; +import { Eip } from "./.gen/providers/aws/eip"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new DataAwsInstances(this, "test", { + filter: [ + { + name: "instance.group-id", + values: ["sg-12345678"], + }, + ], + instanceStateNames: ["running", "stopped"], + instanceTags: { + Role: "HardWorker", + }, + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const testCount = TerraformCount.of(Token.asNumber(Fn.lengthOf(test.ids))); + const awsEipTest = new Eip(this, "test_1", { + instance: Token.asString(propertyAccess(test.ids, [testCount.index])), + count: testCount, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEipTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +* `instanceTags` - (Optional) Map of tags, each pair of which must +exactly match a pair on desired instances. + +* `instanceStateNames` - (Optional) List of instance states that should be applicable to the desired instances. The permitted values are: `pending, running, shutting-down, stopped, stopping, terminated`. The default value is `running`. + +* `filter` - (Optional) One or more name/value pairs to use as filters. There are +several valid keys, for a full reference, check out +[describe-instances in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - IDs of instances found through the filter +* `privateIps` - Private IP addresses of instances found through the filter +* `publicIps` - Public IP addresses of instances found through the filter +* `ipv6Addresses` - IPv6 addresses of instances found through the filter + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + +[1]: http://docs.aws.amazon.com/cli/latest/reference/ec2/describe-instances.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/internet_gateway.html.markdown b/website/docs/cdktf/typescript/d/internet_gateway.html.markdown new file mode 100644 index 00000000000..30c2d8632eb --- /dev/null +++ b/website/docs/cdktf/typescript/d/internet_gateway.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_internet_gateway" +description: |- + Provides details about a specific Internet Gateway +--- + + + +# Data Source: aws_internet_gateway + +`awsInternetGateway` provides details about a specific Internet Gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsInternetGateway } from "./.gen/providers/aws/data-aws-internet-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const vpcId = new TerraformVariable(this, "vpc_id", {}); + new DataAwsInternetGateway(this, "default", { + filter: [ + { + name: "attachment.vpc-id", + values: [vpcId.stringValue], + }, + ], + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +Internet Gateway in the current region. The given filters must match exactly one +Internet Gateway whose data will be exported as attributes. + +* `internetGatewayId` - (Optional) ID of the specific Internet Gateway to retrieve. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired Internet Gateway. + +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInternetGateways.html). + +* `values` - (Required) Set of values that are accepted for the given field. + An Internet Gateway will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Internet Gateway. + +All of the argument attributes except `filter` block are also exported as +result attributes. This data source will complete the data by populating +any fields that are not included in the configuration with the data for +the selected Internet Gateway. + +`attachments` are also exported with the following attributes, when there are relevants: +Each attachment supports the following: + +* `ownerId` - ID of the AWS account that owns the internet gateway. +* `state` - Current state of the attachment between the gateway and the VPC. Present only if a VPC is attached +* `vpcId` - ID of an attached VPC. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/iot_endpoint.html.markdown b/website/docs/cdktf/typescript/d/iot_endpoint.html.markdown new file mode 100644 index 00000000000..111a1f8e82e --- /dev/null +++ b/website/docs/cdktf/typescript/d/iot_endpoint.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_endpoint" +description: |- + Get the unique IoT endpoint +--- + + + +# Data Source: aws_iot_endpoint + +Returns a unique endpoint specific to the AWS account making the call. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIotEndpoint } from "./.gen/providers/aws/data-aws-iot-endpoint"; +import { Pod } from "./.gen/providers/kubernetes/pod"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*The following providers are missing schema information and might need manual adjustments to synthesize correctly: kubernetes. + For a more precise conversion please use the --provider flag in convert.*/ + const example = new DataAwsIotEndpoint(this, "example", {}); + new Pod(this, "agent", { + metadata: [ + { + name: "my-device", + }, + ], + spec: [ + { + container: [ + { + env: [ + { + name: "IOT_ENDPOINT", + value: example.endpointAddress, + }, + ], + image: "gcr.io/my-project/image-name", + name: "image-name", + }, + ], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `endpointType` - (Optional) Endpoint type. Valid values: `iot:credentialProvider`, `iot:data`, `iot:dataAts`, `iot:jobs`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `endpointAddress` - Endpoint based on `endpointType`: + * No `endpointType`: Either `iot:data` or `iot:dataAts` [depending on region](https://aws.amazon.com/blogs/iot/aws-iot-core-ats-endpoints/) + * `iot:credentialsProvider`: `identifierCredentialsIotRegionAmazonawsCom` + * `iot:data`: `identifierIotRegionAmazonawsCom` + * `iot:dataAts`: `identifierAtsIotRegionAmazonawsCom` + * `iot:jobs`: `identifierJobsIotRegionAmazonawsCom` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ip_ranges.html.markdown b/website/docs/cdktf/typescript/d/ip_ranges.html.markdown new file mode 100644 index 00000000000..b80d8cf22e6 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ip_ranges.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_ip_ranges" +description: |- + Get information on AWS IP ranges. +--- + + + +# Data Source: aws_ip_ranges + +Use this data source to get the IP ranges of various AWS products and services. For more information about the contents of this data source and required JSON syntax if referencing a custom URL, see the [AWS IP Address Ranges documentation][1]. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIpRanges } from "./.gen/providers/aws/data-aws-ip-ranges"; +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const europeanEc2 = new DataAwsIpRanges(this, "european_ec2", { + regions: ["eu-west-1", "eu-central-1"], + services: ["ec2"], + }); + new SecurityGroup(this, "from_europe", { + ingress: [ + { + cidrBlocks: Token.asList(europeanEc2.cidrBlocks), + fromPort: Token.asNumber("443"), + ipv6CidrBlocks: Token.asList(europeanEc2.ipv6CidrBlocks), + protocol: "tcp", + toPort: Token.asNumber("443"), + }, + ], + name: "from_europe", + tags: { + CreateDate: Token.asString(europeanEc2.createDate), + SyncToken: Token.asString(europeanEc2.syncToken), + }, + }); + } +} + +``` + +## Argument Reference + +* `regions` - (Optional) Filter IP ranges by regions (or include all regions, if +omitted). Valid items are `global` (for `cloudfront`) as well as all AWS regions +(e.g., `euCentral1`) + +* `services` - (Required) Filter IP ranges by services. Valid items are `amazon` +(for amazon.com), `amazonConnect`, `apiGateway`, `cloud9`, `cloudfront`, +`codebuild`, `dynamodb`, `ec2`, `ec2InstanceConnect`, `globalaccelerator`, +`route53`, `route53Healthchecks`, `s3` and `workspacesGateways`. See the +[`service` attribute][2] documentation for other possible values. + +~> **NOTE:** If the specified combination of regions and services does not yield any +CIDR blocks, Terraform will fail. + +* `url` - (Optional) Custom URL for source JSON file. Syntax must match [AWS IP Address Ranges documentation][1]. Defaults to `https://ipRangesAmazonawsCom/ipRangesJson`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `cidrBlocks` - Lexically ordered list of CIDR blocks. +* `ipv6CidrBlocks` - Lexically ordered list of IPv6 CIDR blocks. +* `createDate` - Publication time of the IP ranges (e.g., `20160803234605`). +* `syncToken` - Publication time of the IP ranges, in Unix epoch time format + (e.g., `1470267965`). + +[1]: https://docs.aws.amazon.com/general/latest/gr/aws-ip-ranges.html +[2]: https://docs.aws.amazon.com/general/latest/gr/aws-ip-ranges.html#aws-ip-syntax + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ivs_stream_key.html.markdown b/website/docs/cdktf/typescript/d/ivs_stream_key.html.markdown new file mode 100644 index 00000000000..fbe1410bd9d --- /dev/null +++ b/website/docs/cdktf/typescript/d/ivs_stream_key.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "IVS (Interactive Video)" +layout: "aws" +page_title: "AWS: aws_ivs_stream_key" +description: |- + Terraform data source for managing an AWS IVS (Interactive Video) Stream Key. +--- + + + +# Data Source: aws_ivs_stream_key + +Terraform data source for managing an AWS IVS (Interactive Video) Stream Key. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIvsStreamKey } from "./.gen/providers/aws/data-aws-ivs-stream-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsIvsStreamKey(this, "example", { + channelArn: "arn:aws:ivs:us-west-2:326937407773:channel/0Y1lcs4U7jk5", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `channelArn` - (Required) ARN of the Channel. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Stream Key. +* `tags` - Map of tags assigned to the resource. +* `value` - Stream Key value. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kendra_experience.html.markdown b/website/docs/cdktf/typescript/d/kendra_experience.html.markdown new file mode 100644 index 00000000000..9d1651166e8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/kendra_experience.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_experience" +description: |- + Provides details about a specific Amazon Kendra Experience. +--- + + + +# Data Source: aws_kendra_experience + +Provides details about a specific Amazon Kendra Experience. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKendraExperience } from "./.gen/providers/aws/data-aws-kendra-experience"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKendraExperience(this, "example", { + experienceId: "87654321-1234-4321-4321-321987654321", + indexId: "12345678-1234-1234-1234-123456789123", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `experienceId` - (Required) Identifier of the Experience. +* `indexId` - (Required) Identifier of the index that contains the Experience. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Experience. +* `configuration` - Block that specifies the configuration information for your Amazon Kendra Experience. This includes `contentSourceConfiguration`, which specifies the data source IDs and/or FAQ IDs, and `userIdentityConfiguration`, which specifies the user or group information to grant access to your Amazon Kendra Experience. Documented below. +* `createdAt` - Unix datetime that the Experience was created. +* `description` - Description of the Experience. +* `endpoints` - Shows the endpoint URLs for your Amazon Kendra Experiences. The URLs are unique and fully hosted by AWS. Documented below. +* `errorMessage` - Reason your Amazon Kendra Experience could not properly process. +* `id` - Unique identifiers of the Experience and index separated by a slash (`/`). +* `name` - Name of the Experience. +* `roleArn` - Shows the ARN of a role with permission to access `query` API, `querySuggestions` API, `submitFeedback` API, and AWS SSO that stores your user and group information. +* `status` - Current processing status of your Amazon Kendra Experience. When the status is `active`, your Amazon Kendra Experience is ready to use. When the status is `failed`, the `errorMessage` field contains the reason that this failed. +* `updatedAt` - Date and time that the Experience was last updated. + +The `configuration` block supports the following attributes: + +* `contentSourceConfiguration` - The identifiers of your data sources and FAQs. This is the content you want to use for your Amazon Kendra Experience. Documented below. +* `userIdentityConfiguration` - The AWS SSO field name that contains the identifiers of your users, such as their emails. Documented below. + +The `contentSourceConfiguration` block supports the following attributes: + +* `dataSourceIds` - Identifiers of the data sources you want to use for your Amazon Kendra Experience. +* `directPutContent` - Whether to use documents you indexed directly using the `BatchPutDocument API`. +* `faqIds` - Identifier of the FAQs that you want to use for your Amazon Kendra Experience. + +The `userIdentityConfiguration` block supports the following attributes: + +* `identityAttributeName` - The AWS SSO field name that contains the identifiers of your users, such as their emails. + +The `endpoints` block supports the following attributes: + +* `endpoint` - Endpoint of your Amazon Kendra Experience. +* `endpointType` - Type of endpoint for your Amazon Kendra Experience. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kendra_faq.html.markdown b/website/docs/cdktf/typescript/d/kendra_faq.html.markdown new file mode 100644 index 00000000000..43760d593c7 --- /dev/null +++ b/website/docs/cdktf/typescript/d/kendra_faq.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_faq" +description: |- + Provides details about a specific Amazon Kendra Faq. +--- + + + +# Data Source: aws_kendra_faq + +Provides details about a specific Amazon Kendra Faq. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKendraFaq } from "./.gen/providers/aws/data-aws-kendra-faq"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKendraFaq(this, "test", { + faqId: "87654321-1234-4321-4321-321987654321", + indexId: "12345678-1234-1234-1234-123456789123", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `faqId` - (Required) Identifier of the FAQ. +* `indexId` - (Required) Identifier of the index that contains the FAQ. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the FAQ. +* `createdAt` - Unix datetime that the faq was created. +* `description` - Description of the FAQ. +* `errorMessage` - When the `status` field value is `failed`, this contains a message that explains why. +* `fileFormat` - File format used by the input files for the FAQ. Valid Values are `csv`, `csvWithHeader`, `json`. +* `id` - Unique identifiers of the FAQ and index separated by a slash (`/`). +* `languageCode` - Code for a language. This shows a supported language for the FAQ document. For more information on supported languages, including their codes, see [Adding documents in languages other than English](https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html). +* `name` - Name of the FAQ. +* `roleArn` - ARN of a role with permission to access the S3 bucket that contains the FAQs. For more information, see [IAM Roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). +* `s3Path` - S3 location of the FAQ input data. Detailed below. +* `status` - Status of the FAQ. It is ready to use when the status is ACTIVE. +* `updatedAt` - Date and time that the FAQ was last updated. +* `tags` - Metadata that helps organize the FAQs you create. + +The `s3Path` configuration block supports the following attributes: + +* `bucket` - Name of the S3 bucket that contains the file. +* `key` - Name of the file. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kendra_index.html.markdown b/website/docs/cdktf/typescript/d/kendra_index.html.markdown new file mode 100644 index 00000000000..c4e60535be1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/kendra_index.html.markdown @@ -0,0 +1,135 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_index" +description: |- + Provides details about a specific Amazon Kendra Index. +--- + + + +# Data Source: aws_kendra_index + +Provides details about a specific Amazon Kendra Index. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKendraIndex } from "./.gen/providers/aws/data-aws-kendra-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKendraIndex(this, "example", { + id: "12345678-1234-1234-1234-123456789123", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` - (Required) Returns information on a specific Index by id. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Index. +* `capacityUnits` - Block that sets the number of additional document storage and query capacity units that should be used by the index. Documented below. +* `createdAt` - Unix datetime that the index was created. +* `description` - Description of the Index. +* `documentMetadataConfigurationUpdates` - One or more blocks that specify the configuration settings for any metadata applied to the documents in the index. Documented below. +* `edition` - Amazon Kendra edition for the index. +* `errorMessage` - When the Status field value is `failed`, this contains a message that explains why. +* `id` - Identifier of the Index. +* `indexStatistics` - Block that provides information about the number of FAQ questions and answers and the number of text documents indexed. Documented below. +* `name` - Name of the Index. +* `roleArn` - An AWS Identity and Access Management (IAM) role that gives Amazon Kendra permissions to access your Amazon CloudWatch logs and metrics. This is also the role you use when you call the `batchPutDocument` API to index documents from an Amazon S3 bucket. +* `serverSideEncryptionConfiguration` - A block that specifies the identifier of the AWS KMS customer managed key (CMK) that's used to encrypt data indexed by Amazon Kendra. Amazon Kendra doesn't support asymmetric CMKs. Documented below. +* `status` - Current status of the index. When the value is `active`, the index is ready for use. If the Status field value is `failed`, the `errorMessage` field contains a message that explains why. +* `updatedAt` - Unix datetime that the index was last updated. +* `userContextPolicy` - User context policy. Valid values are `attributeFilter` or `userToken`. For more information, refer to [UserContextPolicy](https://docs.aws.amazon.com/kendra/latest/APIReference/API_CreateIndex.html#kendra-CreateIndex-request-UserContextPolicy). +* `userGroupResolutionConfiguration` - A block that enables fetching access levels of groups and users from an AWS Single Sign-On identity source. Documented below. +* `userTokenConfigurations` - A block that specifies the user token configuration. Documented below. +* `tags` - Metadata that helps organize the Indices you create. + +A `capacityUnits` block supports the following attributes: + +* `queryCapacityUnits` - The amount of extra query capacity for an index and GetQuerySuggestions capacity. For more information, refer to [QueryCapacityUnits](https://docs.aws.amazon.com/kendra/latest/APIReference/API_CapacityUnitsConfiguration.html#Kendra-Type-CapacityUnitsConfiguration-QueryCapacityUnits). +* `storageCapacityUnits` - The amount of extra storage capacity for an index. A single capacity unit provides 30 GB of storage space or 100,000 documents, whichever is reached first. Minimum value of 0. + +A `documentMetadataConfigurationUpdates` block supports the following attributes: + +* `name` - Name of the index field. Minimum length of 1. Maximum length of 30. +* `relevance` - Block that provides manual tuning parameters to determine how the field affects the search results. Documented below. +* `search` - Block that provides information about how the field is used during a search. Documented below. +* `type` - Data type of the index field. Valid values are `stringValue`, `stringListValue`, `longValue`, `dateValue`. + +A `relevance` block supports the following attributes: + +* `duration` - Time period that the boost applies to. For more information, refer to [Duration](https://docs.aws.amazon.com/kendra/latest/APIReference/API_Relevance.html#Kendra-Type-Relevance-Duration). +* `freshness` - How "fresh" a document is. For more information, refer to [Freshness](https://docs.aws.amazon.com/kendra/latest/APIReference/API_Relevance.html#Kendra-Type-Relevance-Freshness). +* `importance` - Relative importance of the field in the search. Larger numbers provide more of a boost than smaller numbers. Minimum value of 1. Maximum value of 10. +* `rankOrder` - Determines how values should be interpreted. For more information, refer to [RankOrder](https://docs.aws.amazon.com/kendra/latest/APIReference/API_Relevance.html#Kendra-Type-Relevance-RankOrder). +* `valuesImportanceMap` - A list of values that should be given a different boost when they appear in the result list. For more information, refer to [ValueImportanceMap](https://docs.aws.amazon.com/kendra/latest/APIReference/API_Relevance.html#Kendra-Type-Relevance-ValueImportanceMap). + +A `search` block supports the following attributes: + +* `displayable` - Determines whether the field is returned in the query response. The default is `true`. +* `facetable` - Whether the field can be used to create search facets, a count of results for each value in the field. The default is `false`. +* `searchable` - Determines whether the field is used in the search. If the Searchable field is true, you can use relevance tuning to manually tune how Amazon Kendra weights the field in the search. The default is `true` for `string` fields and `false` for `number` and `date` fields. +* `sortable` - Determines whether the field can be used to sort the results of a query. If you specify sorting on a field that does not have Sortable set to true, Amazon Kendra returns an exception. The default is `false`. + +A `indexStatistics` block supports the following attributes: + +* `faqStatistics` - Block that specifies the number of question and answer topics in the index. Documented below. +* `textDocumentStatistics` - A block that specifies the number of text documents indexed. + +A `faqStatistics` block supports the following attributes: + +* `indexedQuestionAnswersCount` - The total number of FAQ questions and answers contained in the index. + +A `textDocumentStatistics` block supports the following attributes: + +* `indexedTextBytes` - Total size, in bytes, of the indexed documents. +* `indexedTextDocumentsCount` - The number of text documents indexed. + +A `serverSideEncryptionConfiguration` block supports the following attributes: + +* `kmsKeyId` - Identifier of the AWS KMScustomer master key (CMK). Amazon Kendra doesn't support asymmetric CMKs. + +A `userGroupResolutionConfiguration` block supports the following attributes: + +* `userGroupResolutionMode` - The identity store provider (mode) you want to use to fetch access levels of groups and users. AWS Single Sign-On is currently the only available mode. Your users and groups must exist in an AWS SSO identity source in order to use this mode. Valid Values are `awsSso` or `none`. + +A `userTokenConfigurations` block supports the following attributes: + +* `jsonTokenTypeConfiguration` - A block that specifies the information about the JSON token type configuration. +* `jwtTokenTypeConfiguration` - A block that specifies the information about the JWT token type configuration. + +A `jsonTokenTypeConfiguration` block supports the following attributes: + +* `groupAttributeField` - The group attribute field. +* `userNameAttributeField` - The user name attribute field. + +A `jwtTokenTypeConfiguration` block supports the following attributes: + +* `claimRegex` - Regular expression that identifies the claim. +* `groupAttributeField` - The group attribute field. +* `issuer` - Issuer of the token. +* `keyLocation` - Location of the key. Valid values are `url` or `secretManager` +* `secretsManagerArn` - ARN of the secret. +* `url` - Signing key URL. +* `userNameAttributeField` - The user name attribute field. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kendra_query_suggestions_block_list.html.markdown b/website/docs/cdktf/typescript/d/kendra_query_suggestions_block_list.html.markdown new file mode 100644 index 00000000000..9ac8147ee17 --- /dev/null +++ b/website/docs/cdktf/typescript/d/kendra_query_suggestions_block_list.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_query_suggestions_block_list" +description: |- + Provides details about a specific Amazon Kendra block list used for query suggestions for an index. +--- + + + +# Data Source: aws_kendra_query_suggestions_block_list + +Provides details about a specific Amazon Kendra block list used for query suggestions for an index. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKendraQuerySuggestionsBlockList } from "./.gen/providers/aws/data-aws-kendra-query-suggestions-block-list"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKendraQuerySuggestionsBlockList(this, "example", { + indexId: "12345678-1234-1234-1234-123456789123", + querySuggestionsBlockListId: "87654321-1234-4321-4321-321987654321", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `indexId` - (Required) Identifier of the index that contains the block list. +* `querySuggestionsBlockListId` - (Required) Identifier of the block list. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the block list. +* `createdAt` - Date-time a block list was created. +* `description` - Description for the block list. +* `errorMessage` - Error message containing details if there are issues processing the block list. +* `fileSizeBytes` - Current size of the block list text file in S3. +* `id` - Unique identifiers of the block list and index separated by a slash (`/`). +* `itemCount` - Current number of valid, non-empty words or phrases in the block list text file. +* `name` - Name of the block list. +* `roleArn` - ARN of a role with permission to access the S3 bucket that contains the block list. For more information, see [IAM Roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). +* `sourceS3Path` - S3 location of the block list input data. Detailed below. +* `status` - Current status of the block list. When the value is `active`, the block list is ready for use. +* `updatedAt` - Date and time that the block list was last updated. +* `tags` - Metadata that helps organize the block list you create. + +The `sourceS3Path` configuration block supports the following attributes: + +* `bucket` - Name of the S3 bucket that contains the file. +* `key` - Name of the file. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kendra_thesaurus.html.markdown b/website/docs/cdktf/typescript/d/kendra_thesaurus.html.markdown new file mode 100644 index 00000000000..b8fb75c2801 --- /dev/null +++ b/website/docs/cdktf/typescript/d/kendra_thesaurus.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_thesaurus" +description: |- + Provides details about a specific Amazon Kendra Thesaurus. +--- + + + +# Data Source: aws_kendra_thesaurus + +Provides details about a specific Amazon Kendra Thesaurus. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKendraThesaurus } from "./.gen/providers/aws/data-aws-kendra-thesaurus"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKendraThesaurus(this, "example", { + indexId: "12345678-1234-1234-1234-123456789123", + thesaurusId: "87654321-1234-4321-4321-321987654321", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `indexId` - (Required) Identifier of the index that contains the Thesaurus. +* `thesaurusId` - (Required) Identifier of the Thesaurus. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Thesaurus. +* `createdAt` - Unix datetime that the Thesaurus was created. +* `description` - Description of the Thesaurus. +* `errorMessage` - When the `status` field value is `failed`, this contains a message that explains why. +* `fileSizeBytes` - Size of the Thesaurus file in bytes. +* `id` - Unique identifiers of the Thesaurus and index separated by a slash (`/`). +* `name` - Name of the Thesaurus. +* `roleArn` - ARN of a role with permission to access the S3 bucket that contains the Thesaurus. For more information, see [IAM Roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). +* `sourceS3Path` - S3 location of the Thesaurus input data. Detailed below. +* `status` - Status of the Thesaurus. It is ready to use when the status is `active`. +* `synonymRuleCount` - Number of synonym rules in the Thesaurus file. +* `termCount` - Number of unique terms in the Thesaurus file. For example, the synonyms `a,b,c` and `a=>d`, the term count would be 4. +* `updatedAt` - Date and time that the Thesaurus was last updated. +* `tags` - Metadata that helps organize the Thesaurus you create. + +The `sourceS3Path` configuration block supports the following attributes: + +* `bucket` - Name of the S3 bucket that contains the file. +* `key` - Name of the file. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/key_pair.html.markdown b/website/docs/cdktf/typescript/d/key_pair.html.markdown new file mode 100644 index 00000000000..57d99a82b66 --- /dev/null +++ b/website/docs/cdktf/typescript/d/key_pair.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_key_pair" +description: |- + Provides details about a specific EC2 Key Pair. +--- + + + +# Data Source: aws_key_pair + +Use this data source to get information about a specific EC2 Key Pair. + +## Example Usage + +The following example shows how to get a EC2 Key Pair including the public key material from its name. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKeyPair } from "./.gen/providers/aws/data-aws-key-pair"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsKeyPair(this, "example", { + filter: [ + { + name: "tag:Component", + values: ["web"], + }, + ], + includePublicKey: true, + keyName: "test", + }); + new TerraformOutput(this, "fingerprint", { + value: example.fingerprint, + }); + new TerraformOutput(this, "id", { + value: example.id, + }); + new TerraformOutput(this, "name", { + value: example.keyName, + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +Key Pairs. The given filters must match exactly one Key Pair +whose data will be exported as attributes. + +* `keyPairId` - (Optional) Key Pair ID. +* `keyName` - (Optional) Key Pair name. +* `includePublicKey` - (Optional) Whether to include the public key material in the response. +* `filter` - (Optional) Custom filter block as described below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeKeyPairs API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeKeyPairs.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the Key Pair. +* `arn` - ARN of the Key Pair. +* `createTime` - Timestamp for when the key pair was created in ISO 8601 format. +* `fingerprint` - SHA-1 digest of the DER encoded private key. +* `keyType` - Type of key pair. +* `publicKey` - Public key material. +* `tags` - Any tags assigned to the Key Pair. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kinesis_firehose_delivery_stream.html.markdown b/website/docs/cdktf/typescript/d/kinesis_firehose_delivery_stream.html.markdown new file mode 100644 index 00000000000..6c74eed5d13 --- /dev/null +++ b/website/docs/cdktf/typescript/d/kinesis_firehose_delivery_stream.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Kinesis Firehose" +layout: "aws" +page_title: "AWS: aws_kinesis_firehose_delivery_stream" +description: |- + Provides an AWS Kinesis Firehose Delivery Stream data source. +--- + + + +# Data Source: aws_kinesis_firehose_delivery_stream + +Use this data source to get information about a Kinesis Firehose Delivery Stream for use in other resources. + +For more details, see the [Amazon Kinesis Firehose Documentation][1]. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKinesisFirehoseDeliveryStream } from "./.gen/providers/aws/data-aws-kinesis-firehose-delivery-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKinesisFirehoseDeliveryStream(this, "stream", { + name: "stream-name", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the Kinesis Stream. + +## Attribute Reference + +`id` is set to the ARN of the Kinesis Stream. In addition, the following attributes +are exported: + +* `arn` - ARN of the Kinesis Stream (same as id). + +[1]: https://aws.amazon.com/documentation/firehose/ + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kinesis_stream.html.markdown b/website/docs/cdktf/typescript/d/kinesis_stream.html.markdown new file mode 100644 index 00000000000..5cc953620dc --- /dev/null +++ b/website/docs/cdktf/typescript/d/kinesis_stream.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Kinesis" +layout: "aws" +page_title: "AWS: aws_kinesis_stream" +description: |- + Provides a Kinesis Stream data source. +--- + + + +# Data Source: aws_kinesis_stream + +Use this data source to get information about a Kinesis Stream for use in other +resources. + +For more details, see the [Amazon Kinesis Documentation][1]. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKinesisStream } from "./.gen/providers/aws/data-aws-kinesis-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKinesisStream(this, "stream", { + name: "stream-name", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the Kinesis Stream. + +## Attribute Reference + +`id` is set to the ARN of the Kinesis Stream. In addition, the following attributes +are exported: + +* `arn` - ARN of the Kinesis Stream (same as id). +* `name` - Name of the Kinesis Stream. +* `creationTimestamp` - Approximate UNIX timestamp that the stream was created. +* `status` - Current status of the stream. The stream status is one of CREATING, DELETING, ACTIVE, or UPDATING. +* `retentionPeriod` - Length of time (in hours) data records are accessible after they are added to the stream. +* `openShards` - List of shard ids in the OPEN state. See [Shard State][2] for more. +* `closedShards` - List of shard ids in the CLOSED state. See [Shard State][2] for more. +* `shardLevelMetrics` - List of shard-level CloudWatch metrics which are enabled for the stream. See [Monitoring with CloudWatch][3] for more. +* `streamModeDetails` - [Capacity mode][4] of the data stream. Detailed below. +* `tags` - Map of tags to assigned to the stream. + +### stream_mode_details Configuration Block + +* `streamMode` - Capacity mode of the stream. Either `onDemand` or `provisioned`. + +[1]: https://aws.amazon.com/documentation/kinesis/ +[2]: https://docs.aws.amazon.com/streams/latest/dev/kinesis-using-sdk-java-after-resharding.html#kinesis-using-sdk-java-resharding-data-routing +[3]: https://docs.aws.amazon.com/streams/latest/dev/monitoring-with-cloudwatch.html +[4]: https://docs.aws.amazon.com/streams/latest/dev/how-do-i-size-a-stream.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kinesis_stream_consumer.html.markdown b/website/docs/cdktf/typescript/d/kinesis_stream_consumer.html.markdown new file mode 100644 index 00000000000..8c7556211b4 --- /dev/null +++ b/website/docs/cdktf/typescript/d/kinesis_stream_consumer.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Kinesis" +layout: "aws" +page_title: "AWS: aws_kinesis_stream_consumer" +description: |- + Provides details about a Kinesis Stream Consumer. +--- + + + +# Data Source: aws_kinesis_stream_consumer + +Provides details about a Kinesis Stream Consumer. + +For more details, see the [Amazon Kinesis Stream Consumer Documentation][1]. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKinesisStreamConsumer } from "./.gen/providers/aws/data-aws-kinesis-stream-consumer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKinesisStreamConsumer(this, "example", { + name: "example-consumer", + streamArn: Token.asString(awsKinesisStreamExample.arn), + }); + } +} + +``` + +## Argument Reference + +* `arn` - (Optional) ARN of the stream consumer. +* `name` - (Optional) Name of the stream consumer. +* `streamArn` - (Required) ARN of the data stream the consumer is registered with. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `creationTimestamp` - Approximate timestamp in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) of when the stream consumer was created. +* `id` - ARN of the stream consumer. +* `status` - Current status of the stream consumer. + +[1]: https://docs.aws.amazon.com/streams/latest/dev/amazon-kinesis-consumers.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kms_alias.html.markdown b/website/docs/cdktf/typescript/d/kms_alias.html.markdown new file mode 100644 index 00000000000..31fe1620168 --- /dev/null +++ b/website/docs/cdktf/typescript/d/kms_alias.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_alias" +description: |- + Get information on a AWS Key Management Service (KMS) Alias +--- + + + +# Data Source: aws_kms_alias + +Use this data source to get the ARN of a KMS key alias. +By using this data source, you can reference key alias +without having to hard code the ARN as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKmsAlias } from "./.gen/providers/aws/data-aws-kms-alias"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKmsAlias(this, "s3", { + name: "alias/aws/s3", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Display name of the alias. The name must start with the word "alias" followed by a forward slash (alias/) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name(ARN) of the key alias. +* `id` - Amazon Resource Name(ARN) of the key alias. +* `targetKeyId` - Key identifier pointed to by the alias. +* `targetKeyArn` - ARN pointed to by the alias. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kms_ciphertext.html.markdown b/website/docs/cdktf/typescript/d/kms_ciphertext.html.markdown new file mode 100644 index 00000000000..6617a7b598b --- /dev/null +++ b/website/docs/cdktf/typescript/d/kms_ciphertext.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_ciphertext" +description: |- + Provides ciphertext encrypted using a KMS key +--- + + + +# Data Source: aws_kms_ciphertext + +The KMS ciphertext data source allows you to encrypt plaintext into ciphertext +by using an AWS KMS customer master key. The value returned by this data source +changes every apply. For a stable ciphertext value, see the [`awsKmsCiphertext` +resource](/docs/providers/aws/r/kms_ciphertext.html). + +~> **Note:** All arguments including the plaintext be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKmsCiphertext } from "./.gen/providers/aws/data-aws-kms-ciphertext"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const oauthConfig = new KmsKey(this, "oauth_config", { + description: "oauth config", + isEnabled: true, + }); + new DataAwsKmsCiphertext(this, "oauth", { + keyId: oauthConfig.keyId, + plaintext: + '{\n "client_id": "e587dbae22222f55da22",\n "client_secret": "8289575d00000ace55e1815ec13673955721b8a5"\n}\n\n', + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `plaintext` - (Required) Data to be encrypted. Note that this may show up in logs, and it will be stored in the state file. +* `keyId` - (Required) Globally unique key ID for the customer master key. +* `context` - (Optional) An optional mapping that makes up the encryption context. + +## Attribute Reference + +All of the argument attributes are also exported as result attributes. + +* `id` - Globally unique key ID for the customer master key. +* `ciphertextBlob` - Base64 encoded ciphertext + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kms_custom_key_store.html.markdown b/website/docs/cdktf/typescript/d/kms_custom_key_store.html.markdown new file mode 100644 index 00000000000..bfa5f4d23a6 --- /dev/null +++ b/website/docs/cdktf/typescript/d/kms_custom_key_store.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_custom_key_store" +description: |- + Get information on a AWS Key Management Service (KMS) Custom Key Store +--- + + + +# Data Source: aws_kms_custom_key_store + +Use this data source to get the metadata KMS custom key store. +By using this data source, you can reference KMS custom key store +without having to hard code the ID as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKmsCustomKeyStore } from "./.gen/providers/aws/data-aws-kms-custom-key-store"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKmsCustomKeyStore(this, "keystore", { + customKeyStoreName: "my_cloudhsm", + }); + } +} + +``` + +## Argument Reference + +* `customKeyStoreId` - (Optional) The ID for the custom key store. +* `customKeyStoreName` - (Optional) The user-specified friendly name for the custom key store. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The ID for the custom key store. +* `cloudhsmClusterId` - ID for the CloudHSM cluster that is associated with the custom key store. +* `connectionState` - Indicates whether the custom key store is connected to its CloudHSM cluster. +* `creationDate` - The date and time when the custom key store was created. +* `trustAnchorCertificate` - The trust anchor certificate of the associated CloudHSM cluster. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kms_key.html.markdown b/website/docs/cdktf/typescript/d/kms_key.html.markdown new file mode 100644 index 00000000000..e9180210c1c --- /dev/null +++ b/website/docs/cdktf/typescript/d/kms_key.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_key" +description: |- + Get information on a AWS Key Management Service (KMS) Key +--- + + + +# aws_kms_key + +Use this data source to get detailed information about +the specified KMS Key with flexible key id input. +This can be useful to reference key alias +without having to hard code the ARN as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKmsKey } from "./.gen/providers/aws/data-aws-kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKmsKey(this, "by_alias", { + keyId: "alias/my-key", + }); + new DataAwsKmsKey(this, "by_alias_arn", { + keyId: "arn:aws:kms:us-east-1:111122223333:alias/my-key", + }); + new DataAwsKmsKey(this, "by_id", { + keyId: "1234abcd-12ab-34cd-56ef-1234567890ab", + }); + new DataAwsKmsKey(this, "by_key_arn", { + keyId: + "arn:aws:kms:us-east-1:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", + }); + } +} + +``` + +## Argument Reference + +* `keyId` - (Required) Key identifier which can be one of the following format: + * Key ID. E.g: `1234Abcd12Ab34Cd56Ef1234567890Ab` + * Key ARN. E.g.: `arn:aws:kms:usEast1:111122223333:key/1234Abcd12Ab34Cd56Ef1234567890Ab` + * Alias name. E.g.: `alias/myKey` + * Alias ARN: E.g.: `arn:aws:kms:usEast1:111122223333:alias/myKey` +* `grantTokens` - (Optional) List of grant tokens + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id`: The globally unique identifier for the key +* `arn`: The ARN of the key +* `awsAccountId`: The twelve-digit account ID of the AWS account that owns the key +* `cloudHsmClusterId`: The cluster ID of the AWS CloudHSM cluster that contains the key material for the KMS key. +* `creationDate`: The date and time when the key was created +* `customKeyStoreId`: A unique identifier for the custom key store that contains the KMS key. +* `customerMasterKeySpec`: Specifies whether the key contains a symmetric key or an asymmetric key pair and the encryption algorithms or signing algorithms that the key supports +* `deletionDate`: The date and time after which AWS KMS deletes the key. This value is present only when `keyState` is `pendingDeletion`, otherwise this value is 0 +* `description`: The description of the key. +* `enabled`: Specifies whether the key is enabled. When `keyState` is `enabled` this value is true, otherwise it is false +* `expirationModel`: Specifies whether the Key's key material expires. This value is present only when `origin` is `external`, otherwise this value is empty +* `keyManager`: The key's manager +* `keySpec`: Describes the type of key material in the KMS key. +* `keyState`: The state of the key +* `keyUsage`: Specifies the intended use of the key +* `multiRegion`: Indicates whether the KMS key is a multi-Region (`true`) or regional (`false`) key. +* `multiRegionConfiguration`: Lists the primary and replica keys in same multi-Region key. Present only when the value of `multiRegion` is `true`. +* `origin`: When this value is `awsKms`, AWS KMS created the key material. When this value is `external`, the key material was imported from your existing key management infrastructure or the CMK lacks key material +* `pendingDeletionWindowInDays`: The waiting period before the primary key in a multi-Region key is deleted. +* `validTo`: The time at which the imported key material expires. This value is present only when `origin` is `external` and whose `expirationModel` is `keyMaterialExpires`, otherwise this value is 0 +* `xksKeyConfiguration`: Information about the external key that is associated with a KMS key in an external key store. + +The `multiRegionConfiguration` object supports the following: + +* `multiRegionKeyType`: Indicates whether the KMS key is a `primary` or `replica` key. +* `primaryKey`: The key ARN and Region of the primary key. This is the current KMS key if it is the primary key. +* `replicaKeys`: The key ARNs and Regions of all replica keys. Includes the current KMS key if it is a replica key. + +The `primaryKey` and `replicaKeys` objects support the following: + +* `arn`: The key ARN of a primary or replica key of a multi-Region key. +* `region`: The AWS Region of a primary or replica key in a multi-Region key. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kms_public_key.html.markdown b/website/docs/cdktf/typescript/d/kms_public_key.html.markdown new file mode 100644 index 00000000000..102ae44fd50 --- /dev/null +++ b/website/docs/cdktf/typescript/d/kms_public_key.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_public_key" +description: |- + Get information on a KMS public key +--- + + + +# aws_kms_public_key + +Use this data source to get the public key about the specified KMS Key with flexible key id input. This can be useful to reference key alias without having to hard code the ARN as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKmsPublicKey } from "./.gen/providers/aws/data-aws-kms-public-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsKmsPublicKey(this, "by_alias", { + keyId: "alias/my-key", + }); + new DataAwsKmsPublicKey(this, "by_alias_arn", { + keyId: "arn:aws:kms:us-east-1:111122223333:alias/my-key", + }); + new DataAwsKmsPublicKey(this, "by_id", { + keyId: "1234abcd-12ab-34cd-56ef-1234567890ab", + }); + new DataAwsKmsPublicKey(this, "by_key_arn", { + keyId: + "arn:aws:kms:us-east-1:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `keyId` - (Required) Key identifier which can be one of the following format: + * Key ID. E.g - `1234Abcd12Ab34Cd56Ef1234567890Ab` + * Key ARN. E.g. - `arn:aws:kms:usEast1:111122223333:key/1234Abcd12Ab34Cd56Ef1234567890Ab` + * Alias name. E.g. - `alias/myKey` + * Alias ARN - E.g. - `arn:aws:kms:usEast1:111122223333:alias/myKey` +* `grantTokens` - (Optional) List of grant tokens + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Key ARN of the asymmetric CMK from which the public key was downloaded. +* `customerMasterKeySpec` - Type of the public key that was downloaded. +* `encryptionAlgorithms` - Encryption algorithms that AWS KMS supports for this key. Only set when the `keyUsage` of the public key is `encryptDecrypt`. +* `id` - Key ARN of the asymmetric CMK from which the public key was downloaded. +* `keyUsage` - Permitted use of the public key. Valid values are `encryptDecrypt` or `signVerify` +* `publicKey` - Exported public key. The value is a DER-encoded X.509 public key, also known as SubjectPublicKeyInfo (SPKI), as defined in [RFC 5280](https://tools.ietf.org/html/rfc5280). The value is Base64-encoded. +* `publicKeyPem` - Exported public key. The value is Privacy Enhanced Mail (PEM) encoded. +* `signingAlgorithms` - Signing algorithms that AWS KMS supports for this key. Only set when the `keyUsage` of the public key is `signVerify`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kms_secret.html.markdown b/website/docs/cdktf/typescript/d/kms_secret.html.markdown new file mode 100644 index 00000000000..7aa0c57f415 --- /dev/null +++ b/website/docs/cdktf/typescript/d/kms_secret.html.markdown @@ -0,0 +1,15 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_secret" +description: |- + Provides secret data encrypted with the KMS service +--- + + + +# Data Source: aws_kms_secret + +!> **WARNING:** This data source was removed in version 2.0.0 of the Terraform AWS Provider. You can migrate existing configurations to the [`awsKmsSecrets` data source](/docs/providers/aws/d/kms_secrets.html) following instructions available in the [Version 2 Upgrade Guide](/docs/providers/aws/guides/version-2-upgrade.html#data-source-aws_kms_secret). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/kms_secrets.html.markdown b/website/docs/cdktf/typescript/d/kms_secrets.html.markdown new file mode 100644 index 00000000000..e7b09df94de --- /dev/null +++ b/website/docs/cdktf/typescript/d/kms_secrets.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_secrets" +description: |- + Decrypt multiple secrets from data encrypted with the AWS KMS service +--- + + + +# Data Source: aws_kms_secrets + +Decrypt multiple secrets from data encrypted with the AWS KMS service. + +~> **NOTE:** Using this data provider will allow you to conceal secret data within your resource definitions but does not take care of protecting that data in all Terraform logging and state output. Please take care to secure your secret data beyond just the Terraform configuration. + +## Example Usage + +If you do not already have a `ciphertextBlob` from encrypting a KMS secret, you can use the below commands to obtain one using the [AWS CLI kms encrypt](https://docs.aws.amazon.com/cli/latest/reference/kms/encrypt.html) command. This requires you to have your AWS CLI setup correctly and replace the `keyId` with your own. Alternatively you can use `--plaintext 'master-password'` (CLIv1) or `--plaintext fileb://<(echo -n 'master-password')` (CLIv2) instead of reading from a file. + +-> If you have a newline character at the end of your file, it will be decrypted with this newline character intact. For most use cases this is undesirable and leads to incorrect passwords or invalid values, as well as possible changes in the plan. Be sure to use `echo -n` if necessary. +-> If you are using asymmetric keys ensure you are using the right encryption algorithm when you encrypt and decrypt else you will get IncorrectKeyException during the decrypt phase. + +```console +% echo -n 'master-password' > plaintext-password +% aws kms encrypt --key-id ab123456-c012-4567-890a-deadbeef123 --plaintext fileb://plaintext-password --encryption-context foo=bar --output text --query CiphertextBlob +AQECAHgaPa0J8WadplGCqqVAr4HNvDaFSQ+NaiwIBhmm6qDSFwAAAGIwYAYJKoZIhvcNAQcGoFMwUQIBADBMBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDI+LoLdvYv8l41OhAAIBEIAfx49FFJCLeYrkfMfAw6XlnxP23MmDBdqP8dPp28OoAQ== +% aws kms encrypt --key-id ab123456-c012-4567-890a-deadbeef123 --plaintext fileb://plaintext-password --encryption-algorithm RSAES_OAEP_SHA_256 --output text --query CiphertextBlob +AQECAHgaPa0J8WadplGCqqVAr4HNvDaFSQ+NaiwIBhmm6qDSFwAAAGIwYAYJKoZIhvcNAQcGoFMwUQIBADBMBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDI+LoLdvYv8l41OhAAIBEIAfx49FFJCLeYrkfMfAw6XlnxP23MmDBdqP8dPp28OoAQ== +``` + +That encrypted output can now be inserted into Terraform configurations without exposing the plaintext secret directly. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKmsSecrets } from "./.gen/providers/aws/data-aws-kms-secrets"; +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +interface MyConfig { + engine: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new DataAwsKmsSecrets(this, "example", { + secret: [ + { + context: { + foo: "bar", + }, + name: "master_password", + payload: + "AQECAHgaPa0J8WadplGCqqVAr4HNvDaFSQ+NaiwIBhmm6qDSFwAAAGIwYAYJKoZIhvcNAQcGoFMwUQIBADBMBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDI+LoLdvYv8l41OhAAIBEIAfx49FFJCLeYrkfMfAw6XlnxP23MmDBdqP8dPp28OoAQ==", + }, + { + name: "master_username", + payload: + "AQECAHgaPa0J8WadplGCqqVAr4HNvDaFSQ+NaiwIBhmm6qDSFwAAAGIwYAYJKoZIhvcNAQcGoFMwUQIBADBMBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDI+LoLdvYv8l41OhAAIBEIAfx49FFJCLeYrkfMfAw6XlnxP23MmDBdqP8dPp28OoAQ==", + }, + ], + }); + const awsRdsClusterExample = new RdsCluster(this, "example_1", { + masterPassword: Token.asString( + propertyAccess(example.plaintext, ['"master_password"']) + ), + masterUsername: Token.asString( + propertyAccess(example.plaintext, ['"master_username"']) + ), + engine: config.engine, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `secret` - (Required) One or more encrypted payload definitions from the KMS service. See the Secret Definitions below. + +### Secret Definitions + +Each `secret` supports the following arguments: + +* `name` - (Required) Name to export this secret under in the attributes. +* `payload` - (Required) Base64 encoded payload, as returned from a KMS encrypt operation. +* `context` - (Optional) An optional mapping that makes up the Encryption Context for the secret. +* `grantTokens` (Optional) An optional list of Grant Tokens for the secret. +* `encryptionAlgorithm` - (Optional) The encryption algorithm that will be used to decrypt the ciphertext. This parameter is required only when the ciphertext was encrypted under an asymmetric KMS key. Valid Values: SYMMETRIC_DEFAULT | RSAES_OAEP_SHA_1 | RSAES_OAEP_SHA_256 | SM2PKE +* `keyId` (Optional) Specifies the KMS key that AWS KMS uses to decrypt the ciphertext. This parameter is required only when the ciphertext was encrypted under an asymmetric KMS key. + +For more information on `context` and `grantTokens` see the [KMS +Concepts](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `plaintext` - Map containing each `secret` `name` as the key with its decrypted plaintext value + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lakeformation_data_lake_settings.html.markdown b/website/docs/cdktf/typescript/d/lakeformation_data_lake_settings.html.markdown new file mode 100644 index 00000000000..11a5685e861 --- /dev/null +++ b/website/docs/cdktf/typescript/d/lakeformation_data_lake_settings.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_data_lake_settings" +description: |- + Get data lake administrators and default database and table permissions +--- + + + +# Data Source: aws_lakeformation_data_lake_settings + +Get Lake Formation principals designated as data lake administrators and lists of principal permission entries for default create database and default create table permissions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLakeformationDataLakeSettings } from "./.gen/providers/aws/data-aws-lakeformation-data-lake-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLakeformationDataLakeSettings(this, "example", { + catalogId: "14916253649", + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `catalogId` – (Optional) Identifier for the Data Catalog. By default, the account ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `admins` – List of ARNs of AWS Lake Formation principals (IAM users or roles). +* `createDatabaseDefaultPermissions` - Up to three configuration blocks of principal permissions for default create database permissions. Detailed below. +* `createTableDefaultPermissions` - Up to three configuration blocks of principal permissions for default create table permissions. Detailed below. +* `trustedResourceOwners` – List of the resource-owning account IDs that the caller's account can use to share their user access details (user ARNs). +* `allowExternalDataFiltering` - Whether to allow Amazon EMR clusters to access data managed by Lake Formation. +* `externalDataFilteringAllowList` - A list of the account IDs of Amazon Web Services accounts with Amazon EMR clusters that are to perform data filtering. +* `authorizedSessionTagValueList` - Lake Formation relies on a privileged process secured by Amazon EMR or the third party integrator to tag the user's role while assuming it. + +### create_database_default_permissions + +* `permissions` - List of permissions granted to the principal. +* `principal` - Principal who is granted permissions. + +### create_table_default_permissions + +* `permissions` - List of permissions granted to the principal. +* `principal` - Principal who is granted permissions. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lakeformation_permissions.html.markdown b/website/docs/cdktf/typescript/d/lakeformation_permissions.html.markdown new file mode 100644 index 00000000000..e77a3d50b27 --- /dev/null +++ b/website/docs/cdktf/typescript/d/lakeformation_permissions.html.markdown @@ -0,0 +1,204 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_permissions" +description: |- + Get permissions for a principal to access metadata in the Data Catalog and data organized in underlying data storage such as Amazon S3. +--- + + + +# Data Source: aws_lakeformation_permissions + +Get permissions for a principal to access metadata in the Data Catalog and data organized in underlying data storage such as Amazon S3. Permissions are granted to a principal, in a Data Catalog, relative to a Lake Formation resource, which includes the Data Catalog, databases, tables, LF-tags, and LF-tag policies. For more information, see [Security and Access Control to Metadata and Data in Lake Formation](https://docs.aws.amazon.com/lake-formation/latest/dg/security-data-access.html). + +~> **NOTE:** This data source deals with explicitly granted permissions. Lake Formation grants implicit permissions to data lake administrators, database creators, and table creators. For more information, see [Implicit Lake Formation Permissions](https://docs.aws.amazon.com/lake-formation/latest/dg/implicit-permissions.html). + +## Example Usage + +### Permissions For A Lake Formation S3 Resource + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLakeformationPermissions } from "./.gen/providers/aws/data-aws-lakeformation-permissions"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLakeformationPermissions(this, "test", { + dataLocation: { + arn: Token.asString(awsLakeformationResourceTest.arn), + }, + principal: workflowRole.arn, + }); + } +} + +``` + +### Permissions For A Glue Catalog Database + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLakeformationPermissions } from "./.gen/providers/aws/data-aws-lakeformation-permissions"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLakeformationPermissions(this, "test", { + database: { + catalogId: "110376042874", + name: Token.asString(awsGlueCatalogDatabaseTest.name), + }, + principal: workflowRole.arn, + }); + } +} + +``` + +### Permissions For Tag-Based Access Control + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLakeformationPermissions } from "./.gen/providers/aws/data-aws-lakeformation-permissions"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLakeformationPermissions(this, "test", { + lfTagPolicy: { + expression: [ + { + key: "Team", + values: ["Sales"], + }, + { + key: "Environment", + values: ["Dev", "Production"], + }, + ], + resourceType: "DATABASE", + }, + principal: workflowRole.arn, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `principal` – (Required) Principal to be granted the permissions on the resource. Supported principals are IAM users or IAM roles. + +One of the following is required: + +* `catalogResource` - Whether the permissions are to be granted for the Data Catalog. Defaults to `false`. +* `dataLocation` - Configuration block for a data location resource. Detailed below. +* `database` - Configuration block for a database resource. Detailed below. +* `lfTag` - (Optional) Configuration block for an LF-tag resource. Detailed below. +* `lfTagPolicy` - (Optional) Configuration block for an LF-tag policy resource. Detailed below. +* `table` - Configuration block for a table resource. Detailed below. +* `tableWithColumns` - Configuration block for a table with columns resource. Detailed below. + +The following arguments are optional: + +* `catalogId` – (Optional) Identifier for the Data Catalog. By default, the account ID. The Data Catalog is the persistent metadata store. It contains database definitions, table definitions, and other control information to manage your Lake Formation environment. + +### data_location + +The following argument is required: + +* `arn` – (Required) ARN that uniquely identifies the data location resource. + +The following argument is optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog where the location is registered with Lake Formation. By default, it is the account ID of the caller. + +### database + +The following argument is required: + +* `name` – (Required) Name of the database resource. Unique to the Data Catalog. + +The following argument is optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### lf_tag + +The following arguments are required: + +* `key` – (Required) Key-name for the tag. +* `values` - (Required) List of possible values an attribute can take. + +The following argument is optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### lf_tag_policy + +The following arguments are required: + +* `resourceType` – (Required) Resource type for which the tag policy applies. Valid values are `database` and `table`. +* `expression` - (Required) List of tag conditions that apply to the resource's tag policy. Configuration block for tag conditions that apply to the policy. See [`expression`](#expression) below. + +The following argument is optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +#### expression + +* `key` – (Required) Key-name of an LF-Tag. +* `values` - (Required) List of possible values of an LF-Tag. + +### table + +The following argument is required: + +* `databaseName` – (Required) Name of the database for the table. Unique to a Data Catalog. + +The following arguments are optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. +* `name` - (Optional) Name of the table. At least one of `name` or `wildcard` is required. +* `wildcard` - (Optional) Whether to use a wildcard representing every table under a database. At least one of `name` or `wildcard` is required. Defaults to `false`. + +### table_with_columns + +The following arguments are required: + +* `databaseName` – (Required) Name of the database for the table with columns resource. Unique to the Data Catalog. +* `name` – (Required) Name of the table resource. + +The following arguments are optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. +* `columnNames` - (Optional) Set of column names for the table. At least one of `columnNames` or `excludedColumnNames` is required. +* `excludedColumnNames` - (Optional) Set of column names for the table to exclude. At least one of `columnNames` or `excludedColumnNames` is required. + +## Attribute Reference + +In addition to the above arguments, the following attribute is exported: + +* `permissions` – List of permissions granted to the principal. For details on permissions, see [Lake Formation Permissions Reference](https://docs.aws.amazon.com/lake-formation/latest/dg/lf-permissions-reference.html). +* `permissionsWithGrantOption` - Subset of `permissions` which the principal can pass. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lakeformation_resource.html.markdown b/website/docs/cdktf/typescript/d/lakeformation_resource.html.markdown new file mode 100644 index 00000000000..a9d6c4305e1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/lakeformation_resource.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_resource" +description: |- + Provides details about a Lake Formation resource. +--- + + + +# Data Source: aws_lakeformation_resource + +Provides details about a Lake Formation resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLakeformationResource } from "./.gen/providers/aws/data-aws-lakeformation-resource"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLakeformationResource(this, "example", { + arn: "arn:aws:s3:::tf-acc-test-9151654063908211878", + }); + } +} + +``` + +## Argument Reference + +* `arn` – (Required) ARN of the resource, an S3 path. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `lastModified` - Date and time the resource was last modified in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `roleArn` – Role that the resource was registered with. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lambda_alias.html.markdown b/website/docs/cdktf/typescript/d/lambda_alias.html.markdown index 15eaac6e1ea..2008a7a3ac3 100644 --- a/website/docs/cdktf/typescript/d/lambda_alias.html.markdown +++ b/website/docs/cdktf/typescript/d/lambda_alias.html.markdown @@ -37,18 +37,18 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `functionName` - (Required) Name of the aliased Lambda function. * `name` - (Required) Name of the Lambda alias. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN identifying the Lambda function alias. * `description` - Description of alias. * `functionVersion` - Lambda function version which the alias uses. * `invokeArn` - ARN to be used for invoking Lambda Function from API Gateway - to be used in aws_api_gateway_integration's `uri`. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lambda_code_signing_config.html.markdown b/website/docs/cdktf/typescript/d/lambda_code_signing_config.html.markdown index 0f388c913f1..186588970cc 100644 --- a/website/docs/cdktf/typescript/d/lambda_code_signing_config.html.markdown +++ b/website/docs/cdktf/typescript/d/lambda_code_signing_config.html.markdown @@ -43,13 +43,13 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `arn` - (Required) ARN of the code signing configuration. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `allowedPublishers` - List of allowed publishers as signing profiles for this code signing configuration. * `configId` - Unique identifier for the code signing configuration. @@ -67,4 +67,4 @@ In addition to all arguments above, the following attributes are exported: [1]: https://docs.aws.amazon.com/lambda/latest/dg/configuration-codesigning.html - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lambda_function.html.markdown b/website/docs/cdktf/typescript/d/lambda_function.html.markdown index 5f40d9ce902..075132735ae 100644 --- a/website/docs/cdktf/typescript/d/lambda_function.html.markdown +++ b/website/docs/cdktf/typescript/d/lambda_function.html.markdown @@ -41,14 +41,14 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `functionName` - (Required) Name of the lambda function. * `qualifier` - (Optional) Alias name or version number of the lambda functionE.g., `$latest`, `myAlias`, or `1`. When not included: the data source resolves to the most recent published version; if no published version exists: it resolves to the most recent unpublished version. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `architectures` - Instruction set architecture for the Lambda function. * `arn` - Unqualified (no `:qualifier` or `:version` suffix) ARN identifying your Lambda Function. See also `qualifiedArn`. @@ -79,4 +79,4 @@ In addition to all arguments above, the following attributes are exported: * `version` - The version of the Lambda function returned. If `qualifier` is not set, this will resolve to the most recent published version. If no published version of the function exists, `version` will resolve to `$latest`. * `vpcConfig` - VPC configuration associated with your Lambda function. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lambda_function_url.html.markdown b/website/docs/cdktf/typescript/d/lambda_function_url.html.markdown index 1314595a433..5e8af8f7263 100644 --- a/website/docs/cdktf/typescript/d/lambda_function_url.html.markdown +++ b/website/docs/cdktf/typescript/d/lambda_function_url.html.markdown @@ -41,14 +41,14 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `functionName` - (Required) he name (or ARN) of the Lambda function. * `qualifier` - (Optional) Alias name or `"$latest"`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `authorizationType` - Type of authentication that the function URL uses. * `cors` - The [cross-origin resource sharing (CORS)](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) settings for the function URL. See the [`awsLambdaFunctionUrl` resource](/docs/providers/aws/r/lambda_function_url.html) documentation for more details. @@ -59,4 +59,4 @@ In addition to all arguments above, the following attributes are exported: * `lastModifiedTime` - When the function URL configuration was last updated, in [ISO-8601 format](https://www.w3.org/TR/NOTE-datetime). * `urlId` - Generated ID for the endpoint. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lambda_functions.html.markdown b/website/docs/cdktf/typescript/d/lambda_functions.html.markdown index 13d01716465..558a645d956 100644 --- a/website/docs/cdktf/typescript/d/lambda_functions.html.markdown +++ b/website/docs/cdktf/typescript/d/lambda_functions.html.markdown @@ -36,11 +36,11 @@ class MyConvertedCode extends TerraformStack { The resource does not support any arguments. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `functionNames` - A list of Lambda Function names. * `functionArns` - A list of Lambda Function ARNs. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lambda_invocation.html.markdown b/website/docs/cdktf/typescript/d/lambda_invocation.html.markdown index b8fc8b9878d..9d8f9f32dff 100644 --- a/website/docs/cdktf/typescript/d/lambda_invocation.html.markdown +++ b/website/docs/cdktf/typescript/d/lambda_invocation.html.markdown @@ -57,8 +57,10 @@ class MyConvertedCode extends TerraformStack { * `qualifier` - (Optional) Qualifier (a.k.a version) of the lambda function. Defaults to `$latest`. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `result` - String result of the lambda function invocation. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lambda_layer_version.html.markdown b/website/docs/cdktf/typescript/d/lambda_layer_version.html.markdown index e7ca613d9f1..3e04a21f371 100644 --- a/website/docs/cdktf/typescript/d/lambda_layer_version.html.markdown +++ b/website/docs/cdktf/typescript/d/lambda_layer_version.html.markdown @@ -41,16 +41,16 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `layerName` - (Required) Name of the lambda layer. * `version` - (Optional) Specific layer version. Conflicts with `compatibleRuntime` and `compatibleArchitecture`. If omitted, the latest available layer version will be used. * `compatibleRuntime` (Optional) Specific runtime the layer version must support. Conflicts with `version`. If specified, the latest available layer version supporting the provided runtime will be used. * `compatibleArchitecture` (Optional) Specific architecture the layer version could support. Conflicts with `version`. If specified, the latest available layer version supporting the provided architecture will be used. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `description` - Description of the specific Lambda Layer version. * `licenseInfo` - License info associated with the specific Lambda Layer version. @@ -68,4 +68,4 @@ In addition to all arguments above, the following attributes are exported: [1]: https://docs.aws.amazon.com/lambda/latest/dg/API_GetLayerVersion.html#SSS-GetLayerVersion-response-CompatibleRuntimes [2]: https://docs.aws.amazon.com/lambda/latest/dg/API_GetLayerVersion.html#SSS-GetLayerVersion-response-CompatibleArchitectures - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/launch_configuration.html.markdown b/website/docs/cdktf/typescript/d/launch_configuration.html.markdown new file mode 100644 index 00000000000..da8702e7411 --- /dev/null +++ b/website/docs/cdktf/typescript/d/launch_configuration.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_launch_configuration" +description: |- + Provides a Launch Configuration data source. +--- + + + +# Data Source: aws_launch_configuration + +Provides information about a Launch Configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLaunchConfiguration } from "./.gen/providers/aws/data-aws-launch-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLaunchConfiguration(this, "ubuntu", { + name: "test-launch-config", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the launch configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the launch configuration. +* `arn` - Amazon Resource Name of the launch configuration. +* `name` - Name of the launch configuration. +* `imageId` - EC2 Image ID of the instance. +* `instanceType` - Instance Type of the instance to launch. +* `iamInstanceProfile` - The IAM Instance Profile to associate with launched instances. +* `keyName` - Key Name that should be used for the instance. +* `metadataOptions` - Metadata options for the instance. + * `httpEndpoint` - State of the metadata service: `enabled`, `disabled`. + * `httpTokens` - If session tokens are required: `optional`, `required`. + * `httpPutResponseHopLimit` - The desired HTTP PUT response hop limit for instance metadata requests. +* `securityGroups` - List of associated Security Group IDS. +* `associatePublicIpAddress` - Whether a Public IP address is associated with the instance. +* `userData` - User Data of the instance. +* `enableMonitoring` - Whether Detailed Monitoring is Enabled. +* `ebsOptimized` - Whether the launched EC2 instance will be EBS-optimized. +* `rootBlockDevice` - Root Block Device of the instance. +* `ebsBlockDevice` - EBS Block Devices attached to the instance. +* `ephemeralBlockDevice` - The Ephemeral volumes on the instance. +* `spotPrice` - Price to use for reserving Spot instances. +* `placementTenancy` - Tenancy of the instance. + +`rootBlockDevice` is exported with the following attributes: + +* `deleteOnTermination` - Whether the EBS Volume will be deleted on instance termination. +* `encrypted` - Whether the volume is Encrypted. +* `iops` - Provisioned IOPs of the volume. +* `throughput` - Throughput of the volume. +* `volumeSize` - Size of the volume. +* `volumeType` - Type of the volume. + +`ebsBlockDevice` is exported with the following attributes: + +* `deleteOnTermination` - Whether the EBS Volume will be deleted on instance termination. +* `deviceName` - Name of the device. +* `encrypted` - Whether the volume is Encrypted. +* `iops` - Provisioned IOPs of the volume. +* `noDevice` - Whether the device in the block device mapping of the AMI is suppressed. +* `snapshotId` - Snapshot ID of the mount. +* `throughput` - Throughput of the volume. +* `volumeSize` - Size of the volume. +* `volumeType` - Type of the volume. + +`ephemeralBlockDevice` is exported with the following attributes: + +* `deviceName` - Name of the device. +* `virtualName` - Virtual Name of the device. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/launch_template.html.markdown b/website/docs/cdktf/typescript/d/launch_template.html.markdown new file mode 100644 index 00000000000..dd5c28f913c --- /dev/null +++ b/website/docs/cdktf/typescript/d/launch_template.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_launch_template" +description: |- + Provides a Launch Template data source. +--- + + + +# Data Source: aws_launch_template + +Provides information about a Launch Template. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLaunchTemplate } from "./.gen/providers/aws/data-aws-launch-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLaunchTemplate(this, "default", { + name: "my-launch-template", + }); + } +} + +``` + +### Filter + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLaunchTemplate } from "./.gen/providers/aws/data-aws-launch-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLaunchTemplate(this, "test", { + filter: [ + { + name: "launch-template-name", + values: ["some-template"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. +* `id` - (Optional) ID of the specific launch template to retrieve. +* `name` - (Optional) Name of the launch template. +* `tags` - (Optional) Map of tags, each pair of which must exactly match a pair on the desired Launch Template. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeLaunchTemplates API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeLaunchTemplates.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the launch template. + +This resource also exports a full set of attributes corresponding to the arguments of the [`awsLaunchTemplate`](/docs/providers/aws/r/launch_template.html) resource. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lb.html.markdown b/website/docs/cdktf/typescript/d/lb.html.markdown new file mode 100644 index 00000000000..1a073d0b165 --- /dev/null +++ b/website/docs/cdktf/typescript/d/lb.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb" +description: |- + Provides a Load Balancer data source. +--- + + + +# Data Source: aws_lb + +~> **Note:** `awsAlb` is known as `awsLb`. The functionality is identical. + +Provides information about a Load Balancer. + +This data source can prove useful when a module accepts an LB as an input +variable and needs to, for example, determine the security groups associated +with it, etc. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { VariableType, TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLb } from "./.gen/providers/aws/data-aws-lb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const lbArn = new TerraformVariable(this, "lb_arn", { + default: "", + type: VariableType.STRING, + }); + const lbName = new TerraformVariable(this, "lb_name", { + default: "", + type: VariableType.STRING, + }); + new DataAwsLb(this, "test", { + arn: lbArn.stringValue, + name: lbName.stringValue, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) Full ARN of the load balancer. +* `name` - (Optional) Unique name of the load balancer. +* `tags` - (Optional) Mapping of tags, each pair of which must exactly match a pair on the desired load balancer. + +~> **NOTE:** When both `arn` and `name` are specified, `arn` takes precedence. `tags` has lowest precedence. + +## Attribute Reference + +See the [LB Resource](/docs/providers/aws/r/lb.html) for details on the +returned attributes - they are identical. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lb_hosted_zone_id.html.markdown b/website/docs/cdktf/typescript/d/lb_hosted_zone_id.html.markdown new file mode 100644 index 00000000000..0cc2fceb5d2 --- /dev/null +++ b/website/docs/cdktf/typescript/d/lb_hosted_zone_id.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_hosted_zone_id" +description: |- + Provides AWS Elastic Load Balancing Hosted Zone Id +--- + + + +# Data Source: aws_lb_hosted_zone_id + +Use this data source to get the HostedZoneId of the AWS Elastic Load Balancing (ELB) in a given region for the purpose of using in an AWS Route53 Alias. Specify the ELB type (`network` or `application`) to return the relevant the associated HostedZoneId. Ref: [ELB service endpoints](https://docs.aws.amazon.com/general/latest/gr/elb.html#elb_region) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLbHostedZoneId } from "./.gen/providers/aws/data-aws-lb-hosted-zone-id"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new DataAwsLbHostedZoneId(this, "main", {}); + new Route53Record(this, "www", { + alias: { + evaluateTargetHealth: true, + name: Token.asString(awsLbMain.dnsName), + zoneId: Token.asString(main.id), + }, + name: "example.com", + type: "A", + zoneId: primary.zoneId, + }); + } +} + +``` + +## Argument Reference + +* `region` - (Optional) Name of the region whose AWS ELB HostedZoneId is desired. + Defaults to the region from the AWS provider configuration. + +* `loadBalancerType` - (Optional) Type of load balancer to create. Possible values are `application` or `network`. The default value is `application`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS ELB HostedZoneId in the selected region. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lb_listener.html.markdown b/website/docs/cdktf/typescript/d/lb_listener.html.markdown new file mode 100644 index 00000000000..00889d5a330 --- /dev/null +++ b/website/docs/cdktf/typescript/d/lb_listener.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_listener" +description: |- + Provides a Load Balancer Listener data source. +--- + + + +# Data Source: aws_lb_listener + +~> **Note:** `awsAlbListener` is known as `awsLbListener`. The functionality is identical. + +Provides information about a Load Balancer Listener. + +This data source can prove useful when a module accepts an LB Listener as an input variable and needs to know the LB it is attached to, or other information specific to the listener in question. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { VariableType, TerraformVariable, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLb } from "./.gen/providers/aws/data-aws-lb"; +import { DataAwsLbListener } from "./.gen/providers/aws/data-aws-lb-listener"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const listenerArn = new TerraformVariable(this, "listener_arn", { + type: VariableType.STRING, + }); + const selected = new DataAwsLb(this, "selected", { + name: "default-public", + }); + new DataAwsLbListener(this, "listener", { + arn: listenerArn.stringValue, + }); + new DataAwsLbListener(this, "selected443", { + loadBalancerArn: Token.asString(selected.arn), + port: 443, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) ARN of the listener. Required if `loadBalancerArn` and `port` is not set. +* `loadBalancerArn` - (Optional) ARN of the load balancer. Required if `arn` is not set. +* `port` - (Optional) Port of the listener. Required if `arn` is not set. + +## Attribute Reference + +See the [LB Listener Resource](/docs/providers/aws/r/lb_listener.html) for details on the returned attributes - they are identical. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lb_target_group.html.markdown b/website/docs/cdktf/typescript/d/lb_target_group.html.markdown new file mode 100644 index 00000000000..3bae5cbb654 --- /dev/null +++ b/website/docs/cdktf/typescript/d/lb_target_group.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_target_group" +description: |- + Provides a Load Balancer Target Group data source. +--- + + + +# Data Source: aws_lb_target_group + +~> **Note:** `awsAlbTargetGroup` is known as `awsLbTargetGroup`. The functionality is identical. + +Provides information about a Load Balancer Target Group. + +This data source can prove useful when a module accepts an LB Target Group as an +input variable and needs to know its attributes. It can also be used to get the ARN of +an LB Target Group for use in other resources, given LB Target Group name. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { VariableType, TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLbTargetGroup } from "./.gen/providers/aws/data-aws-lb-target-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const lbTgArn = new TerraformVariable(this, "lb_tg_arn", { + default: "", + type: VariableType.STRING, + }); + const lbTgName = new TerraformVariable(this, "lb_tg_name", { + default: "", + type: VariableType.STRING, + }); + new DataAwsLbTargetGroup(this, "test", { + arn: lbTgArn.stringValue, + name: lbTgName.stringValue, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Optional) Full ARN of the target group. +* `name` - (Optional) Unique name of the target group. +* `tags` - (Optional) Mapping of tags, each pair of which must exactly match a pair on the desired target group. + +~> **NOTE:** When both `arn` and `name` are specified, `arn` takes precedence. `tags` has the lowest precedence. + +## Attribute Reference + +See the [LB Target Group Resource](/docs/providers/aws/r/lb_target_group.html) for details +on the returned attributes - they are identical. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lbs.html.markdown b/website/docs/cdktf/typescript/d/lbs.html.markdown new file mode 100644 index 00000000000..a60a8478e81 --- /dev/null +++ b/website/docs/cdktf/typescript/d/lbs.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lbs" +description: |- + Data source for managing an AWS ELB (Elastic Load Balancing) Load Balancers. +--- + + + +# Data Source: aws_lbs + +Use this data source to get a list of Load Balancer ARNs matching the specified criteria. Useful for passing to other +resources. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLbs } from "./.gen/providers/aws/data-aws-lbs"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLbs(this, "example", { + tags: { + "elbv2.k8s.aws/cluster": "my-cluster", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired Load Balancers. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of Load Balancer ARNs. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lex_bot.html.markdown b/website/docs/cdktf/typescript/d/lex_bot.html.markdown new file mode 100644 index 00000000000..7226734040e --- /dev/null +++ b/website/docs/cdktf/typescript/d/lex_bot.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_bot" +description: |- + Provides details about a specific Lex Bot +--- + + + +# Data Source: aws_lex_bot + +Provides details about a specific Amazon Lex Bot. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLexBot } from "./.gen/providers/aws/data-aws-lex-bot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLexBot(this, "order_flowers_bot", { + name: "OrderFlowers", + version: "$LATEST", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the bot. The name is case sensitive. +* `version` - (Optional) Version or alias of the bot. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the bot. +* `checksum` - Checksum of the bot used to identify a specific revision of the bot's `$latest` version. +* `childDirected` - If this Amazon Lex Bot is related to a website, program, or other application that is directed or targeted, in whole or in part, to children under age 13 and subject to COPPA. +* `createdDate` - Date that the bot was created. +* `description` - Description of the bot. +* `detectSentiment` - When set to true user utterances are sent to Amazon Comprehend for sentiment analysis. +* `enableModelImprovements` - Set to true if natural language understanding improvements are enabled. +* `failureReason` - If the `status` is `failed`, the reason why the bot failed to build. +* `idleSessionTtlInSeconds` - The maximum time in seconds that Amazon Lex retains the data gathered in a conversation. +* `lastUpdatedDate` - Date that the bot was updated. +* `locale` - Target locale for the bot. Any intent used in the bot must be compatible with the locale of the bot. +* `name` - Name of the bot, case sensitive. +* `nluIntentConfidenceThreshold` - The threshold where Amazon Lex will insert the AMAZON.FallbackIntent, AMAZON.KendraSearchIntent, or both when returning alternative intents in a PostContent or PostText response. AMAZON.FallbackIntent and AMAZON.KendraSearchIntent are only inserted if they are configured for the bot. +* `status` - Status of the bot. +* `version` - Version of the bot. For a new bot, the version is always `$latest`. +* `voiceId` - Amazon Polly voice ID that the Amazon Lex Bot uses for voice interactions with the user. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lex_bot_alias.html.markdown b/website/docs/cdktf/typescript/d/lex_bot_alias.html.markdown new file mode 100644 index 00000000000..7ef31c534c1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/lex_bot_alias.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_bot_alias" +description: |- + Provides details about a specific Lex Bot Alias +--- + + + +# Data Source: aws_lex_bot_alias + +Provides details about a specific Amazon Lex Bot Alias. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLexBotAlias } from "./.gen/providers/aws/data-aws-lex-bot-alias"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLexBotAlias(this, "order_flowers_prod", { + botName: "OrderFlowers", + name: "OrderFlowersProd", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `botName` - (Required) Name of the bot. +* `name` - (Required) Name of the bot alias. The name is case sensitive. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the bot alias. +* `botName` - Name of the bot. +* `botVersion` - Version of the bot that the alias points to. +* `checksum` - Checksum of the bot alias. +* `createdDate` - Date that the bot alias was created. +* `description` - Description of the alias. +* `lastUpdatedDate` - Date that the bot alias was updated. When you create a resource, the creation date and the last updated date are the same. +* `name` - Name of the alias. The name is not case sensitive. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lex_intent.html.markdown b/website/docs/cdktf/typescript/d/lex_intent.html.markdown new file mode 100644 index 00000000000..4bdb9c2d95a --- /dev/null +++ b/website/docs/cdktf/typescript/d/lex_intent.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_intent" +description: |- + Provides details about a specific Amazon Lex Intent +--- + + + +# Data Source: aws_lex_intent + +Provides details about a specific Amazon Lex Intent. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLexIntent } from "./.gen/providers/aws/data-aws-lex-intent"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLexIntent(this, "order_flowers", { + name: "OrderFlowers", + version: "$LATEST", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the intent. The name is case sensitive. +* `version` - (Optional) Version of the intent. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Lex intent. +* `checksum` - Checksum identifying the version of the intent that was created. The checksum is not +included as an argument because the resource will add it automatically when updating the intent. +* `createdDate` - Date when the intent version was created. +* `description` - Description of the intent. +* `lastUpdatedDate` - Date when the $LATEST version of this intent was updated. +* `name` - Name of the intent, not case sensitive. +* `parentIntentSignature` - A unique identifier for the built-in intent to base this +intent on. To find the signature for an intent, see +[Standard Built-in Intents](https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/standard-intents) +in the Alexa Skills Kit. +* `version` - Version of the bot. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/lex_slot_type.html.markdown b/website/docs/cdktf/typescript/d/lex_slot_type.html.markdown new file mode 100644 index 00000000000..6c4ea45d619 --- /dev/null +++ b/website/docs/cdktf/typescript/d/lex_slot_type.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_slot_type" +description: |- + Provides details about a specific Amazon Lex Slot Type +--- + + + +# Data Source: aws_lex_slot_type + +Provides details about a specific Amazon Lex Slot Type. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLexSlotType } from "./.gen/providers/aws/data-aws-lex-slot-type"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLexSlotType(this, "flower_types", { + name: "FlowerTypes", + version: "1", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the slot type. The name is case sensitive. +* `version` - (Optional) Version of the slot type. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `checksum` - Checksum identifying the version of the slot type that was created. The checksum is +not included as an argument because the resource will add it automatically when updating the slot type. +* `createdDate` - Date when the slot type version was created. +* `description` - Description of the slot type. +* `enumerationValue` - Set of EnumerationValue objects that defines the values that +the slot type can take. Each value can have a set of synonyms, which are additional values that help +train the machine learning model about the values that it resolves for a slot. +* `lastUpdatedDate` - Date when the $LATEST version of this slot type was updated. +* `name` - Name of the slot type. The name is not case sensitive. +* `valueSelectionStrategy` - Determines the slot resolution strategy that Amazon Lex +uses to return slot type values. `originalValue` returns the value entered by the user if the user +value is similar to the slot value. `topResolution` returns the first value in the resolution list +if there is a resolution list for the slot, otherwise null is returned. +* `version` - Version of the slot type. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/licensemanager_grants.html.markdown b/website/docs/cdktf/typescript/d/licensemanager_grants.html.markdown new file mode 100644 index 00000000000..b1038b4a920 --- /dev/null +++ b/website/docs/cdktf/typescript/d/licensemanager_grants.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_grants" +description: |- + Get information about a set of license manager grant licenses +--- + + + +# Data Source: aws_licensemanager_grants + +This resource can be used to get a set of license grant ARNs matching a filter. + +## Example Usage + +The following shows getting all license grant ARNs granted to your account. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsLicensemanagerGrants } from "./.gen/providers/aws/data-aws-licensemanager-grants"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new DataAwsLicensemanagerGrants(this, "test", { + filter: [ + { + name: "GranteePrincipalARN", + values: ["arn:aws:iam::${" + current.accountId + "}:root"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/license-manager/latest/APIReference/API_ListReceivedGrants.html#API_ListReceivedGrants_RequestSyntax). + For example, if filtering using `productSku`, use: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLicensemanagerGrants } from "./.gen/providers/aws/data-aws-licensemanager-grants"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLicensemanagerGrants(this, "selected", { + filter: [ + { + name: "ProductSKU", + values: [""], + }, + ], + }); + } +} + +``` + +* `values` - (Required) Set of values that are accepted for the given field. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - List of all the license grant ARNs found. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/licensemanager_received_license.html.markdown b/website/docs/cdktf/typescript/d/licensemanager_received_license.html.markdown new file mode 100644 index 00000000000..c3b26f2e55f --- /dev/null +++ b/website/docs/cdktf/typescript/d/licensemanager_received_license.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_received_license" +description: |- + Get information about a set of license manager received license +--- + + + +# Data Source: aws_licensemanager_received_license + +This resource can be used to get data on a received license using an ARN. This can be helpful for pulling in data on a license from the AWS marketplace and sharing that license with another account. + +## Example Usage + +The following shows getting the received license data using and ARN. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLicensemanagerReceivedLicense } from "./.gen/providers/aws/data-aws-licensemanager-received-license"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLicensemanagerReceivedLicense(this, "test", { + licenseArn: + "arn:aws:license-manager::111111111111:license:l-ecbaa94eb71a4830b6d7e49268fecaa0", + }); + } +} + +``` + +## Argument Reference + +* `licenseArn` - (Required) The ARN of the received license you want data for. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The received license ARN (Same as: `licenseArn`). +* `beneficiary` - Granted license beneficiary. This is in the form of the ARN of the root user of the account. +* `consumptionConfiguration` - Configuration for consumption of the license. [Detailed below](#consumption_configuration) +* `createTime` - Creation time of the granted license in RFC 3339 format. +* `entitlements` - License entitlements. [Detailed below](#entitlements) +* `homeRegion` - Home Region of the granted license. +* `issuer` - Granted license issuer. [Detailed below](#issuer) +* `licenseArn` - Amazon Resource Name (ARN) of the license. +* `licenseMetadata`- Granted license metadata. This is in the form of a set of all meta data. [Detailed below](#license_metadata) +* `licenseName` - License name. +* `productName` - Product name. +* `product_sku ` - Product SKU. +* `receivedMetadata` - Granted license received metadata. [Detailed below](#received_metadata) +* `status` - Granted license status. +* `validity` - Date and time range during which the granted license is valid, in ISO8601-UTC format. [Detailed below](#validity) +* `version` - Version of the granted license. + +### consumption_configuration + +* `borrowConfiguration` - Details about a borrow configuration. [Detailed below](#borrow_configuration) +* `provisionalConfiguration` - Details about a provisional configuration. [Detailed below](#provisional_configuration) +* `renewalFrequency` - Renewal frequency. + +#### borrow_configuration + +A list with a single map. + +* `allowEarlyCheckIn` - Indicates whether early check-ins are allowed. +* `maxTimeToLiveInMinutes` - Maximum time for the borrow configuration, in minutes. + +#### provisional_configuration + +A list with a single map. + +* `maxTimeToLiveInMinutes` - Maximum time for the provisional configuration, in minutes. + +### entitlements + +A list with a single map. + +* `allowCheckIn` - Indicates whether check-ins are allowed. +* `maxCount` - Maximum entitlement count. Use if the unit is not None. +* `name` - Entitlement name. +* `overage` - Indicates whether overages are allowed. +* `unit` - Entitlement unit. +* `value` - Entitlement resource. Use only if the unit is None. + +### issuer + +A list with a single map. + +* `keyFingerprint` - Issuer key fingerprint. +* `name` - Issuer name. +* `signKey` - Asymmetric KMS key from AWS Key Management Service. The KMS key must have a key usage of sign and verify, and support the RSASSA-PSS SHA-256 signing algorithm. + +### license_metadata + +Each metadata item will have the following attributes. + +* `name` - The key name. +* `value` - The value. + +### received_metadata + +A list with a single map. + +* `allowedOperations` - A list of allowed operations. +* `receivedStatus` - Received status. +* `receivedStatusReason` - Received status reason. + +### validity + +A list with a single map. + +* `begin` - Start of the validity time range. +* `end` - End of the validity time range. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/licensemanager_received_licenses.html.markdown b/website/docs/cdktf/typescript/d/licensemanager_received_licenses.html.markdown new file mode 100644 index 00000000000..5c5583bf537 --- /dev/null +++ b/website/docs/cdktf/typescript/d/licensemanager_received_licenses.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_received_licenses" +description: |- + Get information about a set of license manager received licenses +--- + + + +# Data Source: aws_licensemanager_received_licenses + +This resource can be used to get a set of license ARNs matching a filter. + +## Example Usage + +The following shows getting all license ARNs issued from the AWS marketplace. Providing no filter, would provide all license ARNs for the entire account. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLicensemanagerReceivedLicenses } from "./.gen/providers/aws/data-aws-licensemanager-received-licenses"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLicensemanagerReceivedLicenses(this, "test", { + filter: [ + { + name: "IssuerName", + values: ["AWS/Marketplace"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/license-manager/latest/APIReference/API_ListReceivedLicenses.html#API_ListReceivedLicenses_RequestSyntax). + For example, if filtering using `productSku`, use: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLicensemanagerReceivedLicenses } from "./.gen/providers/aws/data-aws-licensemanager-received-licenses"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLicensemanagerReceivedLicenses(this, "selected", { + filter: [ + { + name: "ProductSKU", + values: [""], + }, + ], + }); + } +} + +``` + +* `values` - (Required) Set of values that are accepted for the given field. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - List of all the license ARNs found. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/location_geofence_collection.html.markdown b/website/docs/cdktf/typescript/d/location_geofence_collection.html.markdown new file mode 100644 index 00000000000..24aaa69556e --- /dev/null +++ b/website/docs/cdktf/typescript/d/location_geofence_collection.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_geofence_collection" +description: |- + Retrieve information about a Location Service Geofence Collection. +--- + + + +# Data Source: aws_location_geofence_collection + +Retrieve information about a Location Service Geofence Collection. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLocationGeofenceCollection } from "./.gen/providers/aws/data-aws-location-geofence-collection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLocationGeofenceCollection(this, "example", { + collectionName: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `collectionName` - (Required) Name of the geofence collection. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `collectionArn` - ARN for the geofence collection resource. Used when you need to specify a resource across all AWS. +* `createTime` - Timestamp for when the geofence collection resource was created in ISO 8601 format. +* `description` - Optional description of the geofence collection resource. +* `kmsKeyId` - Key identifier for an AWS KMS customer managed key assigned to the Amazon Location resource. +* `tags` - Key-value map of resource tags for the geofence collection. +* `updateTime` - Timestamp for when the geofence collection resource was last updated in ISO 8601 format. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/location_map.html.markdown b/website/docs/cdktf/typescript/d/location_map.html.markdown new file mode 100644 index 00000000000..c3606722aec --- /dev/null +++ b/website/docs/cdktf/typescript/d/location_map.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_map" +description: |- + Retrieve information about a Location Service Map. +--- + + + +# Data Source: aws_location_map + +Retrieve information about a Location Service Map. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLocationMap } from "./.gen/providers/aws/data-aws-location-map"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLocationMap(this, "example", { + mapName: "example", + }); + } +} + +``` + +## Argument Reference + +* `mapName` - (Required) Name of the map resource. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `configuration` - List of configurations that specify the map tile style selected from a partner data provider. + * `style` - The map style selected from an available data provider. +* `createTime` - Timestamp for when the map resource was created in ISO 8601 format. +* `description` - Optional description for the map resource. +* `mapArn` - ARN for the map resource. +* `tags` - Key-value map of resource tags for the map. +* `updateTime` - Timestamp for when the map resource was last updated in ISO 8601 format. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/location_place_index.html.markdown b/website/docs/cdktf/typescript/d/location_place_index.html.markdown new file mode 100644 index 00000000000..63366506927 --- /dev/null +++ b/website/docs/cdktf/typescript/d/location_place_index.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_place_index" +description: |- + Retrieve information about a Location Service Place Index. +--- + + + +# Data Source: aws_location_place_index + +Retrieve information about a Location Service Place Index. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLocationPlaceIndex } from "./.gen/providers/aws/data-aws-location-place-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLocationPlaceIndex(this, "example", { + indexName: "example", + }); + } +} + +``` + +## Argument Reference + +* `indexName` - (Required) Name of the place index resource. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `createTime` - Timestamp for when the place index resource was created in ISO 8601 format. +* `dataSource` - Data provider of geospatial data. +* `dataSourceConfiguration` - List of configurations that specify data storage option for requesting Places. +* `description` - Optional description for the place index resource. +* `indexArn` - ARN for the place index resource. +* `tags` - Key-value map of resource tags for the place index. +* `updateTime` - Timestamp for when the place index resource was last updated in ISO 8601 format. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/location_route_calculator.html.markdown b/website/docs/cdktf/typescript/d/location_route_calculator.html.markdown new file mode 100644 index 00000000000..1e74fdfae7f --- /dev/null +++ b/website/docs/cdktf/typescript/d/location_route_calculator.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_route_calculator" +description: |- + Retrieve information about a Location Service Route Calculator. +--- + + + +# Data Source: aws_location_route_calculator + +Retrieve information about a Location Service Route Calculator. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLocationRouteCalculator } from "./.gen/providers/aws/data-aws-location-route-calculator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLocationRouteCalculator(this, "example", { + calculatorName: "example", + }); + } +} + +``` + +## Argument Reference + +* `calculatorName` - (Required) Name of the route calculator resource. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `calculatorArn` - ARN for the Route calculator resource. Use the ARN when you specify a resource across AWS. +* `createTime` - Timestamp for when the route calculator resource was created in ISO 8601 format. +* `dataSource` - Data provider of traffic and road network data. +* `description` - Optional description of the route calculator resource. +* `tags` - Key-value map of resource tags for the route calculator. +* `updateTime` - Timestamp for when the route calculator resource was last updated in ISO 8601 format. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/location_tracker.html.markdown b/website/docs/cdktf/typescript/d/location_tracker.html.markdown new file mode 100644 index 00000000000..2d83801e8b5 --- /dev/null +++ b/website/docs/cdktf/typescript/d/location_tracker.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_tracker" +description: |- + Retrieve information about a Location Service Tracker. +--- + + + +# Data Source: aws_location_tracker + +Retrieve information about a Location Service Tracker. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLocationTracker } from "./.gen/providers/aws/data-aws-location-tracker"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLocationTracker(this, "example", { + trackerName: "example", + }); + } +} + +``` + +## Argument Reference + +* `trackerName` - (Required) Name of the tracker resource. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `createTime` - Timestamp for when the tracker resource was created in ISO 8601 format. +* `description` - Optional description for the tracker resource. +* `kmsKeyId` - Key identifier for an AWS KMS customer managed key assigned to the Amazon Location resource. +* `positionFiltering` - Position filtering method of the tracker resource. +* `tags` - Key-value map of resource tags for the tracker. +* `trackerArn` - ARN for the tracker resource. Used when you need to specify a resource across all AWS. +* `updateTime` - Timestamp for when the tracker resource was last updated in ISO 8601 format. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/location_tracker_association.html.markdown b/website/docs/cdktf/typescript/d/location_tracker_association.html.markdown new file mode 100644 index 00000000000..657e18e0b19 --- /dev/null +++ b/website/docs/cdktf/typescript/d/location_tracker_association.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_tracker_association" +description: |- + Retrieve information about a Location Service Tracker Association. +--- + + + +# Data Source: aws_location_tracker_association + +Retrieve information about a Location Service Tracker Association. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLocationTrackerAssociation } from "./.gen/providers/aws/data-aws-location-tracker-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLocationTrackerAssociation(this, "example", { + consumerArn: + "arn:aws:geo:region:account-id:geofence-collection/ExampleGeofenceCollectionConsumer", + trackerName: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `consumerArn` - (Required) ARN of the geofence collection associated to tracker resource. +* `trackerName` - (Required) Name of the tracker resource associated with a geofence collection. + +## Attribute Reference + +This data source exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/location_tracker_associations.html.markdown b/website/docs/cdktf/typescript/d/location_tracker_associations.html.markdown new file mode 100644 index 00000000000..86ca8e622c6 --- /dev/null +++ b/website/docs/cdktf/typescript/d/location_tracker_associations.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_tracker_associations" +description: |- + Retrieve information about Location Service Tracker Associations. +--- + + + +# Data Source: aws_location_tracker_associations + +Retrieve information about Location Service Tracker Associations. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsLocationTrackerAssociations } from "./.gen/providers/aws/data-aws-location-tracker-associations"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsLocationTrackerAssociations(this, "example", { + trackerName: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `trackerName` - (Required) Name of the tracker resource associated with a geofence collection. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `consumerArns` - List of geofence collection ARNs associated to the tracker resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/memorydb_acl.html.markdown b/website/docs/cdktf/typescript/d/memorydb_acl.html.markdown new file mode 100644 index 00000000000..26422a93046 --- /dev/null +++ b/website/docs/cdktf/typescript/d/memorydb_acl.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_acl" +description: |- + Provides information about a MemoryDB ACL. +--- + + + +# Resource: aws_memorydb_acl + +Provides information about a MemoryDB ACL. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMemorydbAcl } from "./.gen/providers/aws/data-aws-memorydb-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMemorydbAcl(this, "example", { + name: "my-acl", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the ACL. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the ACL. +* `arn` - ARN of the ACL. +* `minimumEngineVersion` - The minimum engine version supported by the ACL. +* `tags` - Map of tags assigned to the ACL. +* `userNames` - Set of MemoryDB user names included in this ACL. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/memorydb_cluster.html.markdown b/website/docs/cdktf/typescript/d/memorydb_cluster.html.markdown new file mode 100644 index 00000000000..b23ee0efb6f --- /dev/null +++ b/website/docs/cdktf/typescript/d/memorydb_cluster.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_cluster" +description: |- + Provides information about a MemoryDB Cluster. +--- + + + +# Resource: aws_memorydb_cluster + +Provides information about a MemoryDB Cluster. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMemorydbCluster } from "./.gen/providers/aws/data-aws-memorydb-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMemorydbCluster(this, "example", { + name: "my-cluster", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the cluster. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Same as `name`. +* `arn` - ARN of the cluster. +* `aclName` - Name of the Access Control List associated with the cluster. +* `autoMinorVersionUpgrade` - True when the cluster allows automatic minor version upgrades. +* `clusterEndpoint` + * `address` - DNS hostname of the cluster configuration endpoint. + * `port` - Port number that the cluster configuration endpoint is listening on. +* `dataTiering` - True when data tiering is enabled. +* `description` - Description for the cluster. +* `enginePatchVersion` - Patch version number of the Redis engine used by the cluster. +* `engineVersion` - Version number of the Redis engine used by the cluster. +* `finalSnapshotName` - Name of the final cluster snapshot to be created when this resource is deleted. If omitted, no final snapshot will be made. +* `kmsKeyArn` - ARN of the KMS key used to encrypt the cluster at rest. +* `maintenanceWindow` - Weekly time range during which maintenance on the cluster is performed. Specify as a range in the format `ddd:hh24:miDdd:hh24:mi` (24H Clock UTC). Example: `sun:23:00Mon:01:30`. +* `nodeType` - Compute and memory capacity of the nodes in the cluster. +* `numReplicasPerShard` - The number of replicas to apply to each shard. +* `numShards` - Number of shards in the cluster. +* `parameterGroupName` - The name of the parameter group associated with the cluster. +* `port` - Port number on which each of the nodes accepts connections. +* `securityGroupIds` - Set of VPC Security Group ID-s associated with this cluster. +* `shards` - Set of shards in this cluster. + * `name` - Name of this shard. + * `numNodes` - Number of individual nodes in this shard. + * `slots` - Keyspace for this shard. Example: `016383`. + * `nodes` - Set of nodes in this shard. + * `availabilityZone` - The Availability Zone in which the node resides. + * `createTime` - The date and time when the node was created. Example: `20220101T21:00:00Z`. + * `name` - Name of this node. + * `endpoint` + * `address` - DNS hostname of the node. + * `port` - Port number that this node is listening on. +* `snapshotRetentionLimit` - The number of days for which MemoryDB retains automatic snapshots before deleting them. When set to `0`, automatic backups are disabled. +* `snapshotWindow` - Daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of your shard. Example: `05:0009:00`. +* `snsTopicArn` - ARN of the SNS topic to which cluster notifications are sent. +* `subnetGroupName` -The name of the subnet group used for the cluster. +* `tlsEnabled` - When true, in-transit encryption is enabled for the cluster. +* `tags` - Map of tags assigned to the cluster. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/memorydb_parameter_group.html.markdown b/website/docs/cdktf/typescript/d/memorydb_parameter_group.html.markdown new file mode 100644 index 00000000000..7f86fe8605c --- /dev/null +++ b/website/docs/cdktf/typescript/d/memorydb_parameter_group.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_parameter_group" +description: |- + Provides information about a MemoryDB Parameter Group. +--- + + + +# Resource: aws_memorydb_parameter_group + +Provides information about a MemoryDB Parameter Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMemorydbParameterGroup } from "./.gen/providers/aws/data-aws-memorydb-parameter-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMemorydbParameterGroup(this, "example", { + name: "my-parameter-group", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the parameter group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the parameter group. +* `arn` - ARN of the parameter group. +* `description` - Description of the parameter group. +* `family` - Engine version that the parameter group can be used with. +* `parameter` - Set of user-defined MemoryDB parameters applied by the parameter group. + * `name` - Name of the parameter. + * `value` - Value of the parameter. +* `tags` - Map of tags assigned to the parameter group. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/memorydb_snapshot.html.markdown b/website/docs/cdktf/typescript/d/memorydb_snapshot.html.markdown new file mode 100644 index 00000000000..f291a839954 --- /dev/null +++ b/website/docs/cdktf/typescript/d/memorydb_snapshot.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_snapshot" +description: |- + Provides information about a MemoryDB Snapshot. +--- + + + +# Resource: aws_memorydb_snapshot + +Provides information about a MemoryDB Snapshot. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMemorydbSnapshot } from "./.gen/providers/aws/data-aws-memorydb-snapshot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMemorydbSnapshot(this, "example", { + name: "my-snapshot", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the snapshot. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the snapshot. +* `arn` - ARN of the snapshot. +* `clusterConfiguration` - The configuration of the cluster from which the snapshot was taken. + * `description` - Description for the cluster. + * `engineVersion` - Version number of the Redis engine used by the cluster. + * `maintenanceWindow` - The weekly time range during which maintenance on the cluster is performed. + * `name` - Name of the cluster. + * `nodeType` - Compute and memory capacity of the nodes in the cluster. + * `numShards` - Number of shards in the cluster. + * `parameterGroupName` - Name of the parameter group associated with the cluster. + * `port` - Port number on which the cluster accepts connections. + * `snapshotRetentionLimit` - Number of days for which MemoryDB retains automatic snapshots before deleting them. + * `snapshotWindow` - The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of the shard. + * `subnetGroupName` - Name of the subnet group used by the cluster. + * `topicArn` - ARN of the SNS topic to which cluster notifications are sent. + * `vpcId` - The VPC in which the cluster exists. +* `clusterName` - Name of the MemoryDB cluster that this snapshot was taken from. +* `kmsKeyArn` - ARN of the KMS key used to encrypt the snapshot at rest. +* `source` - Whether the snapshot is from an automatic backup (`automated`) or was created manually (`manual`). +* `tags` - Map of tags assigned to the snapshot. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/memorydb_subnet_group.html.markdown b/website/docs/cdktf/typescript/d/memorydb_subnet_group.html.markdown new file mode 100644 index 00000000000..52424a4b4a2 --- /dev/null +++ b/website/docs/cdktf/typescript/d/memorydb_subnet_group.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_subnet_group" +description: |- + Provides information about a MemoryDB Subnet Group. +--- + + + +# Resource: aws_memorydb_subnet_group + +Provides information about a MemoryDB Subnet Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMemorydbSubnetGroup } from "./.gen/providers/aws/data-aws-memorydb-subnet-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMemorydbSubnetGroup(this, "example", { + name: "my-subnet-group", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the subnet group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the subnet group. +* `arn` - ARN of the subnet group. +* `description` - Description of the subnet group. +* `subnetIds` - Set of VPC Subnet ID-s of the subnet group. +* `vpcId` - VPC in which the subnet group exists. +* `tags` - Map of tags assigned to the subnet group. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/memorydb_user.html.markdown b/website/docs/cdktf/typescript/d/memorydb_user.html.markdown new file mode 100644 index 00000000000..a0219cc7437 --- /dev/null +++ b/website/docs/cdktf/typescript/d/memorydb_user.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_user" +description: |- + Provides information about a MemoryDB User. +--- + + + +# Resource: aws_memorydb_user + +Provides information about a MemoryDB User. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMemorydbUser } from "./.gen/providers/aws/data-aws-memorydb-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMemorydbUser(this, "example", { + userName: "my-user", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `userName` - (Required) Name of the user. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the user. +* `accessString` - Access permissions string used for this user. +* `arn` - ARN of the user. +* `authenticationMode` - Denotes the user's authentication properties. + * `passwordCount` - The number of passwords belonging to the user. + * `type` - Whether the user requires a password to authenticate. +* `minimumEngineVersion` - The minimum engine version supported for the user. +* `tags` - Map of tags assigned to the subnet group. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/mq_broker.html.markdown b/website/docs/cdktf/typescript/d/mq_broker.html.markdown new file mode 100644 index 00000000000..4ec7a4e5047 --- /dev/null +++ b/website/docs/cdktf/typescript/d/mq_broker.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "MQ" +layout: "aws" +page_title: "AWS: aws_mq_broker" +description: |- + Provides a MQ Broker data source. +--- + + + +# Data Source: aws_mq_broker + +Provides information about a MQ Broker. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { VariableType, TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMqBroker } from "./.gen/providers/aws/data-aws-mq-broker"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const brokerId = new TerraformVariable(this, "broker_id", { + default: "", + type: VariableType.STRING, + }); + const brokerName = new TerraformVariable(this, "broker_name", { + default: "", + type: VariableType.STRING, + }); + new DataAwsMqBroker(this, "by_id", { + brokerId: brokerId.stringValue, + }); + new DataAwsMqBroker(this, "by_name", { + brokerName: brokerName.stringValue, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `brokerId` - (Optional) Unique id of the mq broker. +* `brokerName` - (Optional) Unique name of the mq broker. + +## Attribute Reference + +See the [`awsMqBroker` resource](/docs/providers/aws/r/mq_broker.html) for details on the returned attributes. +They are identical except for user password, which is not returned when describing broker. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/mq_broker_instance_type_offerings.markdown b/website/docs/cdktf/typescript/d/mq_broker_instance_type_offerings.markdown new file mode 100644 index 00000000000..b6db19beda3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/mq_broker_instance_type_offerings.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "MQ" +layout: "aws" +page_title: "AWS: aws_mq_broker_instance_type_offerings" +description: |- + Provides a MQ Broker Instance Offerings data source. +--- + + + +# Data Source: aws_mq_broker_instance_type_offerings + +Provides information about a MQ Broker Instance Offerings. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMqBrokerInstanceTypeOfferings } from "./.gen/providers/aws/data-aws-mq-broker-instance-type-offerings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMqBrokerInstanceTypeOfferings(this, "all", { + engineType: "ACTIVEMQ", + hostInstanceType: "mq.m5.large", + storageType: "EBS", + }); + new DataAwsMqBrokerInstanceTypeOfferings(this, "empty", {}); + new DataAwsMqBrokerInstanceTypeOfferings(this, "engine", { + engineType: "ACTIVEMQ", + }); + new DataAwsMqBrokerInstanceTypeOfferings(this, "instance", { + hostInstanceType: "mq.m5.large", + }); + new DataAwsMqBrokerInstanceTypeOfferings(this, "storage", { + storageType: "EBS", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engineType` - (Optional) Filter response by engine type. +* `hostInstanceType` - (Optional) Filter response by host instance type. +* `storageType` - (Optional) Filter response by storage type. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `brokerInstanceOptions` - Option for host instance type. See Broker Instance Options below. + +### Broker Instance Options + +* `availabilityZones` - List of available AZs. See Availability Zones. below +* `engineType` - Broker's engine type. +* `hostInstanceType` - Broker's instance type. +* `storageType` - Broker's storage type. +* `supportedDeploymentModes` - The list of supported deployment modes. +* `supportedEngineVersions` - The list of supported engine versions. + +### Availability Zones + +* `name` - Name of the Availability Zone. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/msk_broker_nodes.html.markdown b/website/docs/cdktf/typescript/d/msk_broker_nodes.html.markdown new file mode 100644 index 00000000000..5ebefea1f69 --- /dev/null +++ b/website/docs/cdktf/typescript/d/msk_broker_nodes.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_broker_nodes" +description: |- + Get information on an Amazon MSK Broker Nodes +--- + + + +# Data Source: aws_msk_broker_nodes + +Get information on an Amazon MSK Broker Nodes. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMskBrokerNodes } from "./.gen/providers/aws/data-aws-msk-broker-nodes"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMskBrokerNodes(this, "example", { + clusterArn: Token.asString(awsMskClusterExample.arn), + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `clusterArn` - (Required) ARN of the cluster the nodes belong to. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* [`nodeInfoList`](#nodes) - List of MSK Broker Nodes, sorted by broker ID in ascending order. + +### Nodes + +* `attachedEniId` - Attached elastic network interface of the broker +* `brokerId` - ID of the broker +* `clientSubnet` - Client subnet to which this broker node belongs +* `clientVpcIpAddress` - The client virtual private cloud (VPC) IP address +* `endpoints` - Set of endpoints for accessing the broker. This does not include ports +* `nodeArn` - ARN of the node + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/msk_cluster.html.markdown b/website/docs/cdktf/typescript/d/msk_cluster.html.markdown new file mode 100644 index 00000000000..34592dc01c1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/msk_cluster.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_cluster" +description: |- + Get information on an Amazon MSK Cluster +--- + + + +# Data Source: aws_msk_cluster + +Get information on an Amazon MSK Cluster. + +-> **Note:** This data sources returns information on _provisioned_ clusters. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMskCluster } from "./.gen/providers/aws/data-aws-msk-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMskCluster(this, "example", { + clusterName: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `clusterName` - (Required) Name of the cluster. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the MSK cluster. +* `bootstrapBrokers` - Comma separated list of one or more hostname:port pairs of kafka brokers suitable to bootstrap connectivity to the kafka cluster. Contains a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `plaintext` or `tlsPlaintext`. The resource sorts values alphabetically. AWS may not always return all endpoints so this value is not guaranteed to be stable across applies. +* `bootstrapBrokersPublicSaslIam` - One or more DNS names (or IP addresses) and SASL IAM port pairs. For example, `b1PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9198,b2PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9198,b3PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9198`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls` and `clientAuthentication0Sasl0Iam` is set to `true` and `brokerNodeGroupInfo0ConnectivityInfo0PublicAccess0Type` is set to `serviceProvidedEips` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrapBrokersPublicSaslScram` - One or more DNS names (or IP addresses) and SASL SCRAM port pairs. For example, `b1PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9196,b2PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9196,b3PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9196`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls` and `clientAuthentication0Sasl0Scram` is set to `true` and `brokerNodeGroupInfo0ConnectivityInfo0PublicAccess0Type` is set to `serviceProvidedEips` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrapBrokersPublicTls` - One or more DNS names (or IP addresses) and TLS port pairs. For example, `b1PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9194,b2PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9194,b3PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9194`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls` and `brokerNodeGroupInfo0ConnectivityInfo0PublicAccess0Type` is set to `serviceProvidedEips` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrapBrokersSaslIam` - One or more DNS names (or IP addresses) and SASL IAM port pairs. For example, `b1ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9098,b2ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9098,b3ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9098`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls` and `clientAuthentication0Sasl0Iam` is set to `true`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrapBrokersSaslScram` - One or more DNS names (or IP addresses) and SASL SCRAM port pairs. For example, `b1ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9096,b2ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9096,b3ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9096`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls` and `clientAuthentication0Sasl0Scram` is set to `true`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrapBrokersTls` - One or more DNS names (or IP addresses) and TLS port pairs. For example, `b1ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9094,b2ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9094,b3ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9094`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `kafkaVersion` - Apache Kafka version. +* `numberOfBrokerNodes` - Number of broker nodes in the cluster. +* `tags` - Map of key-value pairs assigned to the cluster. +* `zookeeperConnectString` - A comma separated list of one or more hostname:port pairs to use to connect to the Apache Zookeeper cluster. The returned values are sorted alphbetically. The AWS API may not return all endpoints, so this value is not guaranteed to be stable across applies. +* `zookeeperConnectStringTls` - A comma separated list of one or more hostname:port pairs to use to connect to the Apache Zookeeper cluster via TLS. The returned values are sorted alphabetically. The AWS API may not return all endpoints, so this value is not guaranteed to be stable across applies. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/msk_configuration.html.markdown b/website/docs/cdktf/typescript/d/msk_configuration.html.markdown new file mode 100644 index 00000000000..d36727e627c --- /dev/null +++ b/website/docs/cdktf/typescript/d/msk_configuration.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_configuration" +description: |- + Get information on an Amazon MSK Configuration +--- + + + +# Data Source: aws_msk_configuration + +Get information on an Amazon MSK Configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMskConfiguration } from "./.gen/providers/aws/data-aws-msk-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMskConfiguration(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the configuration. +* `latestRevision` - Latest revision of the configuration. +* `description` - Description of the configuration. +* `kafkaVersions` - List of Apache Kafka versions which can use this configuration. +* `serverProperties` - Contents of the server.properties file. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/msk_kafka_version.html.markdown b/website/docs/cdktf/typescript/d/msk_kafka_version.html.markdown new file mode 100644 index 00000000000..41853bd0266 --- /dev/null +++ b/website/docs/cdktf/typescript/d/msk_kafka_version.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_kafka_version" +description: |- + Get information on a Amazon MSK Kafka Version +--- + + + +# Data Source: aws_msk_cluster + +Get information on a Amazon MSK Kafka Version + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMskKafkaVersion } from "./.gen/providers/aws/data-aws-msk-kafka-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMskKafkaVersion(this, "example", { + version: "2.8.0", + }); + new DataAwsMskKafkaVersion(this, "preferred", { + preferredVersions: ["2.4.1.1", "2.4.1", "2.2.1"], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `preferredVersions` - (Optional) Ordered list of preferred Kafka versions. The first match in this list will be returned. Either `preferredVersions` or `version` must be set. +* `version` - (Optional) Version of MSK Kafka. For example 2.4.1.1 or "2.2.1" etc. Either `preferredVersions` or `version` must be set. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `status` - Status of the MSK Kafka version eg. `active` or `deprecated`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/mskconnect_connector.html.markdown b/website/docs/cdktf/typescript/d/mskconnect_connector.html.markdown new file mode 100644 index 00000000000..67ba966f801 --- /dev/null +++ b/website/docs/cdktf/typescript/d/mskconnect_connector.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_connector" +description: |- + Get information on an Amazon MSK Connect Connector. +--- + + + +# Data Source: aws_mskconnect_connector + +Get information on an Amazon MSK Connect Connector. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMskconnectConnector } from "./.gen/providers/aws/data-aws-mskconnect-connector"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMskconnectConnector(this, "example", { + name: "example-mskconnector", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the connector. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the connector. +* `description` - Summary description of the connector. +* `version` - Current version of the connector. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/mskconnect_custom_plugin.html.markdown b/website/docs/cdktf/typescript/d/mskconnect_custom_plugin.html.markdown new file mode 100644 index 00000000000..47acd0f342e --- /dev/null +++ b/website/docs/cdktf/typescript/d/mskconnect_custom_plugin.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_custom_plugin" +description: |- + Get information on an Amazon MSK Connect custom plugin. +--- + + + +# Data Source: aws_mskconnect_custom_plugin + +Get information on an Amazon MSK Connect custom plugin. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMskconnectCustomPlugin } from "./.gen/providers/aws/data-aws-mskconnect-custom-plugin"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMskconnectCustomPlugin(this, "example", { + name: "example-debezium-1", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the custom plugin. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - the ARN of the custom plugin. +* `description` - a summary description of the custom plugin. +* `latestRevision` - an ID of the latest successfully created revision of the custom plugin. +* `state` - the state of the custom plugin. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/mskconnect_worker_configuration.html.markdown b/website/docs/cdktf/typescript/d/mskconnect_worker_configuration.html.markdown new file mode 100644 index 00000000000..1f8b7900bca --- /dev/null +++ b/website/docs/cdktf/typescript/d/mskconnect_worker_configuration.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_worker_configuration" +description: |- + Get information on an Amazon MSK Connect worker configuration. +--- + + + +# Data Source: aws_mskconnect_worker_configuration + +Get information on an Amazon MSK Connect Worker Configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsMskconnectWorkerConfiguration } from "./.gen/providers/aws/data-aws-mskconnect-worker-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsMskconnectWorkerConfiguration(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the worker configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - the ARN of the worker configuration. +* `description` - a summary description of the worker configuration. +* `latestRevision` - an ID of the latest successfully created revision of the worker configuration. +* `propertiesFileContent` - contents of connect-distributed.properties file. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/nat_gateway.html.markdown b/website/docs/cdktf/typescript/d/nat_gateway.html.markdown new file mode 100644 index 00000000000..05de152b88c --- /dev/null +++ b/website/docs/cdktf/typescript/d/nat_gateway.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_nat_gateway" +description: |- + Provides details about a specific VPC NAT Gateway. +--- + + + +# Data Source: aws_nat_gateway + +Provides details about a specific VPC NAT Gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNatGateway } from "./.gen/providers/aws/data-aws-nat-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNatGateway(this, "default", { + subnetId: public.id, + }); + } +} + +``` + +### With tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNatGateway } from "./.gen/providers/aws/data-aws-nat-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNatGateway(this, "default", { + subnetId: public.id, + tags: { + Name: "gw NAT", + }, + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +NAT Gateways in the current Region. The given filters must match exactly one +NAT Gateway whose data will be exported as attributes. + +* `id` - (Optional) ID of the specific NAT Gateway to retrieve. +* `subnetId` - (Optional) ID of subnet that the NAT Gateway resides in. +* `vpcId` - (Optional) ID of the VPC that the NAT Gateway resides in. +* `state` - (Optional) State of the NAT Gateway (pending | failed | available | deleting | deleted ). +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired NAT Gateway. +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNatGateways.html). +* `values` - (Required) Set of values that are accepted for the given field. + An Nat Gateway will be selected if any one of the given values matches. + +## Attribute Reference + +All of the argument attributes except `filter` block are also exported as +result attributes. This data source will complete the data by populating +any fields that are not included in the configuration with the data for +the selected Nat Gateway. + +* `allocationId` - ID of the EIP allocated to the selected NAT Gateway. +* `associationId` - The association ID of the Elastic IP address that's associated with the NAT Gateway. Only available when `connectivityType` is `public`. +* `connectivityType` - Connectivity type of the NAT Gateway. +* `networkInterfaceId` - The ID of the ENI allocated to the selected NAT Gateway. +* `privateIp` - Private IP address of the selected NAT Gateway. +* `publicIp` - Public IP (EIP) address of the selected NAT Gateway. +* `secondaryAllocationIds` - Secondary allocation EIP IDs for the selected NAT Gateway. +* `secondaryPrivateIpAddressCount` - The number of secondary private IPv4 addresses assigned to the selected NAT Gateway. +* `secondaryPrivateIpAddresses` - Secondary private IPv4 addresses assigned to the selected NAT Gateway. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/nat_gateways.html.markdown b/website/docs/cdktf/typescript/d/nat_gateways.html.markdown new file mode 100644 index 00000000000..2ec44820635 --- /dev/null +++ b/website/docs/cdktf/typescript/d/nat_gateways.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_nat_gateways" +description: |- + Get information on Amazon NAT Gateways. +--- + + + +# Data Source: aws_nat_gateways + +This resource can be useful for getting back a list of NAT gateway ids to be referenced elsewhere. + +## Example Usage + +The following returns all NAT gateways in a specified VPC that are marked as available + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Fn, + Token, + TerraformCount, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNatGateway } from "./.gen/providers/aws/data-aws-nat-gateway"; +import { DataAwsNatGateways } from "./.gen/providers/aws/data-aws-nat-gateways"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ngws = new DataAwsNatGateways(this, "ngws", { + filter: [ + { + name: "state", + values: ["available"], + }, + ], + vpcId: vpcId.stringValue, + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const ngwCount = TerraformCount.of(Token.asNumber(Fn.lengthOf(ngws.ids))); + new DataAwsNatGateway(this, "ngw", { + id: Token.asString(propertyAccess(Fn.tolist(ngws.ids), [ngwCount.index])), + count: ngwCount, + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. +* `vpcId` - (Optional) VPC ID that you want to filter from. +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired NAT Gateways. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNatGateways.html). +* `values` - (Required) Set of values that are accepted for the given field. + A Nat Gateway will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - List of all the NAT gateway ids found. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/neptune_engine_version.markdown b/website/docs/cdktf/typescript/d/neptune_engine_version.markdown new file mode 100644 index 00000000000..27e62150ab8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/neptune_engine_version.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_engine_version" +description: |- + Information about a Neptune engine version. +--- + + + +# Data Source: aws_neptune_engine_version + +Information about a Neptune engine version. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNeptuneEngineVersion } from "./.gen/providers/aws/data-aws-neptune-engine-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNeptuneEngineVersion(this, "test", { + preferredVersions: ["1.0.3.0", "1.0.2.2", "1.0.2.1"], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engine` - (Optional) DB engine. (Default: `neptune`) +* `parameterGroupFamily` - (Optional) Name of a specific DB parameter group family. An example parameter group family is `neptune1`. +* `preferredVersions` - (Optional) Ordered list of preferred engine versions. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. If both the `version` and `preferredVersions` arguments are not configured, the data source will return the default version for the engine. +* `version` - (Optional) Version of the DB engine. For example, `1010`, `1022`, and `1030`. If both the `version` and `preferredVersions` arguments are not configured, the data source will return the default version for the engine. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `engineDescription` - Description of the database engine. +* `exportableLogTypes` - Set of log types that the database engine has available for export to CloudWatch Logs. +* `supportedTimezones` - Set of the time zones supported by this engine. +* `supportsLogExportsToCloudwatch` - Indicates whether the engine version supports exporting the log types specified by `exportableLogTypes` to CloudWatch Logs. +* `supportsReadReplica` - Indicates whether the database engine version supports read replicas. +* `validUpgradeTargets` - Set of engine versions that this database engine version can be upgraded to. +* `versionDescription` - Description of the database engine version. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/neptune_orderable_db_instance.markdown b/website/docs/cdktf/typescript/d/neptune_orderable_db_instance.markdown new file mode 100644 index 00000000000..d8cbfcd02b1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/neptune_orderable_db_instance.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_orderable_db_instance" +description: |- + Information about Neptune orderable DB instances. +--- + + + +# Data Source: aws_neptune_orderable_db_instance + +Information about Neptune orderable DB instances. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNeptuneOrderableDbInstance } from "./.gen/providers/aws/data-aws-neptune-orderable-db-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNeptuneOrderableDbInstance(this, "test", { + engineVersion: "1.0.3.0", + preferredInstanceClasses: ["db.r5.large", "db.r4.large", "db.t3.medium"], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engine` - (Optional) DB engine. (Default: `neptune`) +* `engineVersion` - (Optional) Version of the DB engine. For example, `1010`, `1012`, `1022`, and `1030`. +* `instanceClass` - (Optional) DB instance class. Examples of classes are `dbR5Large`, `dbR5Xlarge`, `dbR4Large`, `dbR54Xlarge`, `dbR512Xlarge`, `dbR4Xlarge`, and `dbT3Medium`. +* `licenseModel` - (Optional) License model. (Default: `amazonLicense`) +* `preferredInstanceClasses` - (Optional) Ordered list of preferred Neptune DB instance classes. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. +* `vpc` - (Optional) Enable to show only VPC offerings. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `availabilityZones` - Availability zones where the instance is available. +* `maxIopsPerDbInstance` - Maximum total provisioned IOPS for a DB instance. +* `maxIopsPerGib` - Maximum provisioned IOPS per GiB for a DB instance. +* `maxStorageSize` - Maximum storage size for a DB instance. +* `minIopsPerDbInstance` - Minimum total provisioned IOPS for a DB instance. +* `minIopsPerGib` - Minimum provisioned IOPS per GiB for a DB instance. +* `minStorageSize` - Minimum storage size for a DB instance. +* `multiAzCapable` - Whether a DB instance is Multi-AZ capable. +* `readReplicaCapable` - Whether a DB instance can have a read replica. +* `storageType` - Storage type for a DB instance. +* `supportsEnhancedMonitoring` - Whether a DB instance supports Enhanced Monitoring at intervals from 1 to 60 seconds. +* `supportsIamDatabaseAuthentication` - Whether a DB instance supports IAM database authentication. +* `supportsIops` - Whether a DB instance supports provisioned IOPS. +* `supportsPerformanceInsights` - Whether a DB instance supports Performance Insights. +* `supportsStorageEncryption` - Whether a DB instance supports encrypted storage. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/network_acls.html.markdown b/website/docs/cdktf/typescript/d/network_acls.html.markdown new file mode 100644 index 00000000000..19638ccb918 --- /dev/null +++ b/website/docs/cdktf/typescript/d/network_acls.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_acls" +description: |- + Provides a list of network ACL ids for a VPC +--- + + + +# Data Source: aws_network_acls + +## Example Usage + +The following shows outputting all network ACL ids in a vpc. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkAcls } from "./.gen/providers/aws/data-aws-network-acls"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsNetworkAcls(this, "example", { + vpcId: vpcId.stringValue, + }); + const cdktfTerraformOutputExample = new TerraformOutput(this, "example_1", { + value: example.ids, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + cdktfTerraformOutputExample.overrideLogicalId("example"); + } +} + +``` + +The following example retrieves a list of all network ACL ids in a VPC with a custom +tag of `tier` set to a value of "Private". + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkAcls } from "./.gen/providers/aws/data-aws-network-acls"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkAcls(this, "example", { + tags: { + Tier: "Private", + }, + vpcId: vpcId.stringValue, + }); + } +} + +``` + +The following example retrieves a network ACL id in a VPC which associated +with specific subnet. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkAcls } from "./.gen/providers/aws/data-aws-network-acls"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkAcls(this, "example", { + filter: [ + { + name: "association.subnet-id", + values: [test.id], + }, + ], + vpcId: vpcId.stringValue, + }); + } +} + +``` + +## Argument Reference + +* `vpcId` - (Optional) VPC ID that you want to filter from. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired network ACLs. + +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNetworkAcls.html). + +* `values` - (Required) Set of values that are accepted for the given field. + A VPC will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - List of all the network ACL ids found. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/network_interface.html.markdown b/website/docs/cdktf/typescript/d/network_interface.html.markdown new file mode 100644 index 00000000000..817bd0cf6e8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/network_interface.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_interface" +description: |- + Get information on a Network Interface resource. +--- + + + +# aws_network_interface + +Use this data source to get information about a Network Interface. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkInterface } from "./.gen/providers/aws/data-aws-network-interface"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkInterface(this, "bar", { + id: "eni-01234567", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` – (Optional) Identifier for the network interface. +* `filter` – (Optional) One or more name/value pairs to filter off of. There are several valid keys, for a full reference, check out [describe-network-interfaces](https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-network-interfaces.html) in the AWS CLI reference. + +## Attribute Reference + +See the [Network Interface](/docs/providers/aws/r/network_interface.html) for details on the returned attributes. + +Additionally, the following attributes are exported: + +* `arn` - ARN of the network interface. +* `association` - Association information for an Elastic IP address (IPv4) associated with the network interface. See supported fields below. +* `availabilityZone` - Availability Zone. +* `description` - Description of the network interface. +* `interfaceType` - Type of interface. +* `ipv6Addresses` - List of IPv6 addresses to assign to the ENI. +* `macAddress` - MAC address. +* `ownerId` - AWS account ID of the owner of the network interface. +* `privateDnsName` - Private DNS name. +* `privateIp` - Private IPv4 address of the network interface within the subnet. +* `privateIps` - Private IPv4 addresses associated with the network interface. +* `requesterId` - ID of the entity that launched the instance on your behalf. +* `securityGroups` - List of security groups for the network interface. +* `subnetId` - ID of the subnet. +* `outpostArn` - ARN of the Outpost. +* `tags` - Any tags assigned to the network interface. +* `vpcId` - ID of the VPC. + +### `association` + +* `allocationId` - Allocation ID. +* `associationId` - Association ID. +* `carrierIp` - Carrier IP address associated with the network interface. This attribute is only set when the network interface is in a subnet which is associated with a Wavelength Zone. +* `customerOwnedIp` - Customer-owned IP address. +* `ipOwnerId` - ID of the Elastic IP address owner. +* `publicDnsName` - Public DNS name. +* `publicIp` - Address of the Elastic IP address bound to the network interface. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/network_interfaces.html.markdown b/website/docs/cdktf/typescript/d/network_interfaces.html.markdown new file mode 100644 index 00000000000..0aea36e68e6 --- /dev/null +++ b/website/docs/cdktf/typescript/d/network_interfaces.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_interfaces" +description: |- + Provides a list of network interface ids +--- + + + +# Data Source: aws_network_interfaces + +## Example Usage + +The following shows outputting all network interface ids in a region. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkInterfaces } from "./.gen/providers/aws/data-aws-network-interfaces"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsNetworkInterfaces(this, "example", {}); + const cdktfTerraformOutputExample = new TerraformOutput(this, "example_1", { + value: example.ids, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + cdktfTerraformOutputExample.overrideLogicalId("example"); + } +} + +``` + +The following example retrieves a list of all network interface ids with a custom tag of `name` set to a value of `test`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkInterfaces } from "./.gen/providers/aws/data-aws-network-interfaces"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsNetworkInterfaces(this, "example", { + tags: { + Name: "test", + }, + }); + new TerraformOutput(this, "example1", { + value: example.ids, + }); + } +} + +``` + +The following example retrieves a network interface ids which associated +with specific subnet. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkInterfaces } from "./.gen/providers/aws/data-aws-network-interfaces"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsNetworkInterfaces(this, "example", { + filter: [ + { + name: "subnet-id", + values: [test.id], + }, + ], + }); + const cdktfTerraformOutputExample = new TerraformOutput(this, "example_1", { + value: example.ids, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + cdktfTerraformOutputExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired network interfaces. + +* `filter` - (Optional) Custom filter block as described below. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNetworkInterfaces.html). + +* `values` - (Required) Set of values that are accepted for the given field. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - List of all the network interface ids found. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkfirewall_firewall.html.markdown b/website/docs/cdktf/typescript/d/networkfirewall_firewall.html.markdown new file mode 100644 index 00000000000..80398310f97 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkfirewall_firewall.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_firewall" +description: |- + Retrieve information about an AWS Network Firewall Firewall resource. +--- + + + +# Data Source: aws_networkfirewall_firewall + +Retrieve information about a firewall. + +## Example Usage + +### Find firewall policy by ARN + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkfirewallFirewall } from "./.gen/providers/aws/data-aws-networkfirewall-firewall"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkfirewallFirewall(this, "example", { + arn: arn, + }); + } +} + +``` + +### Find firewall policy by Name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkfirewallFirewall } from "./.gen/providers/aws/data-aws-networkfirewall-firewall"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkfirewallFirewall(this, "example", { + name: "Test", + }); + } +} + +``` + +### Find firewall policy by ARN and Name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkfirewallFirewall } from "./.gen/providers/aws/data-aws-networkfirewall-firewall"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkfirewallFirewall(this, "example", { + arn: arn, + name: "Test", + }); + } +} + +``` + +## Argument Reference + +One or more of the following arguments are required: + +* `arn` - ARN of the firewall. +* `name` - Descriptive name of the firewall. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the firewall. +* `deleteProtection` - Boolean flag indicating whether it is possible to delete the firewall. +* `description` - Description of the firewall. +* `encryptionConfiguration` - AWS Key Management Service (AWS KMS) encryption settings for the firewall. + * `keyId` - The ID of the AWS Key Management Service (AWS KMS) customer managed key. + * `type` - The type of the AWS Key Management Service (AWS KMS) key use by the firewall. +* `firewallPolicyArn` - ARN of the VPC Firewall policy. +* `firewallPolicyChangeProtection` - A boolean flag indicating whether it is possible to change the associated firewall policy. +* `firewallStatus` - Nested list of information about the current status of the firewall. + * `syncStates` - Set of subnets configured for use by the firewall. + * `attachment` - Nested list describing the attachment status of the firewall's association with a single VPC subnet. + * `endpointId` - The identifier of the firewall endpoint that AWS Network Firewall has instantiated in the subnet. You use this to identify the firewall endpoint in the VPC route tables, when you redirect the VPC traffic through the endpoint. + * `subnetId` - The unique identifier of the subnet that you've specified to be used for a firewall endpoint. + * `availabilityZone` - The Availability Zone where the subnet is configured. + * `capacityUsageSummary` - Aggregated count of all resources used by reference sets in a firewall. + * `cidrs` - Capacity usage of CIDR blocks used by IP set references in a firewall. + * `availableCidrCount` - Available number of CIDR blocks available for use by the IP set references in a firewall. + * `ipSetReferences` - The list of IP set references used by a firewall. + * `resolvedCidrCount` - Total number of CIDR blocks used by the IP set references in a firewall. + * `utilizedCidrCount` - Number of CIDR blocks used by the IP set references in a firewall. + * `configurationSyncStateSummary` - Summary of sync states for all availability zones in which the firewall is configured. +* `id` - ARN that identifies the firewall. +* `name` - Descriptive name of the firewall. +* `subnetChangeProtection` - A boolean flag indicating whether it is possible to change the associated subnet(s). +* `subnetMapping` - Set of configuration blocks describing the public subnets. Each subnet must belong to a different Availability Zone in the VPC. AWS Network Firewall creates a firewall endpoint in each subnet. + * `subnetId` - The unique identifier for the subnet. +* `tags` - Map of resource tags to associate with the resource. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `updateToken` - String token used when updating a firewall. +* `vpcId` - Unique identifier of the VPC where AWS Network Firewall should create the firewall. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkfirewall_firewall_policy.html.markdown b/website/docs/cdktf/typescript/d/networkfirewall_firewall_policy.html.markdown new file mode 100644 index 00000000000..929e82597b5 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkfirewall_firewall_policy.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_firewall_policy" +description: |- + Retrieve information about a firewall policy. +--- + + + +# Data Source: aws_networkfirewall_firewall_policy + +Retrieve information about a firewall policy. + +## Example Usage + +### Find firewall policy by name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkfirewallFirewallPolicy } from "./.gen/providers/aws/data-aws-networkfirewall-firewall-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkfirewallFirewallPolicy(this, "example", { + name: firewallPolicyName.stringValue, + }); + } +} + +``` + +### Find firewall policy by ARN + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkfirewallFirewallPolicy } from "./.gen/providers/aws/data-aws-networkfirewall-firewall-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkfirewallFirewallPolicy(this, "example", { + arn: firewallPolicyArn.stringValue, + }); + } +} + +``` + +### Find firewall policy by name and ARN + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkfirewallFirewallPolicy } from "./.gen/providers/aws/data-aws-networkfirewall-firewall-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkfirewallFirewallPolicy(this, "example", { + arn: firewallPolicyArn.stringValue, + name: firewallPolicyName.stringValue, + }); + } +} + +``` + +AWS Network Firewall does not allow multiple firewall policies with the same name to be created in an account. It is possible, however, to have multiple firewall policies available in a single account with identical `name` values but distinct `arn` values, e.g. firewall policies shared via a [Resource Access Manager (RAM) share][1]. In that case specifying `arn`, or `name` and `arn`, is recommended. + +~> **Note:** If there are multiple firewall policies in an account with the same `name`, and `arn` is not specified, the default behavior will return the firewall policy with `name` that was created in the account. + +## Argument Reference + +One or more of the following arguments are required: + +* `arn` - ARN of the firewall policy. +* `name` - Descriptive name of the firewall policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `description` - Description of the firewall policy. +* `firewallPolicy` - The [policy][2] for the specified firewall policy. +* `tags` - Key-value tags for the firewall policy. +* `updateToken` - Token used for optimistic locking. + +[1]: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/ram_resource_share +[2]: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/networkfirewall_firewall_policy + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkfirewall_resource_policy.html.markdown b/website/docs/cdktf/typescript/d/networkfirewall_resource_policy.html.markdown new file mode 100644 index 00000000000..a832cd16b90 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkfirewall_resource_policy.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_resource_policy" +description: |- + Retrieve information about a Network Firewall resource policy. +--- + + + +# Data Source: aws_networkfirewall_resource_policy + +Retrieve information about a Network Firewall resource policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkfirewallResourcePolicy } from "./.gen/providers/aws/data-aws-networkfirewall-resource-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkfirewallResourcePolicy(this, "example", { + resourceArn: resourcePolicyArn.stringValue, + }); + } +} + +``` + +## Argument Reference + +* `resourceArn` - (Required) The Amazon Resource Name (ARN) that identifies the resource policy. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) that identifies the resource policy. +* `policy` - The [policy][1] for the resource. + +[1]: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/networkfirewall_resource_policy + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkmanager_connection.html.markdown b/website/docs/cdktf/typescript/d/networkmanager_connection.html.markdown new file mode 100644 index 00000000000..3b8eefc36bd --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkmanager_connection.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_connection" +description: |- + Retrieve information about a connection. +--- + + + +# Data Source: aws_networkmanager_connection + +Retrieve information about a connection. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerConnection } from "./.gen/providers/aws/data-aws-networkmanager-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkmanagerConnection(this, "example", { + connectionId: connectionId.stringValue, + globalNetworkId: globalNetworkId.stringValue, + }); + } +} + +``` + +## Argument Reference + +* `globalNetworkId` - (Required) ID of the Global Network of the connection to retrieve. +* `connectionId` - (Required) ID of the specific connection to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the connection. +* `connectedDeviceId` - ID of the second device in the connection. +* `connectedLinkId` - ID of the link for the second device. +* `description` - Description of the connection. +* `deviceId` - ID of the first device in the connection. +* `linkId` - ID of the link for the first device. +* `tags` - Key-value tags for the connection. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkmanager_connections.html.markdown b/website/docs/cdktf/typescript/d/networkmanager_connections.html.markdown new file mode 100644 index 00000000000..42c9b81bb15 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkmanager_connections.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_connections" +description: |- + Retrieve information about connections. +--- + + + +# Data Source: aws_networkmanager_connections + +Retrieve information about connections. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerConnections } from "./.gen/providers/aws/data-aws-networkmanager-connections"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkmanagerConnections(this, "example", { + globalNetworkId: globalNetworkId.stringValue, + tags: { + Env: "test", + }, + }); + } +} + +``` + +## Argument Reference + +* `deviceId` - (Optional) ID of the device of the connections to retrieve. +* `globalNetworkId` - (Required) ID of the Global Network of the connections to retrieve. +* `tags` - (Optional) Restricts the list to the connections with these tags. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - IDs of the connections. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkmanager_core_network_policy_document.html.markdown b/website/docs/cdktf/typescript/d/networkmanager_core_network_policy_document.html.markdown new file mode 100644 index 00000000000..f8e9efd5900 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkmanager_core_network_policy_document.html.markdown @@ -0,0 +1,281 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_core_network_policy_document" +description: |- + Generates an Core Network policy document in JSON format +--- + + + +# Data Source: aws_networkmanager_core_network_policy_document + +Generates a Core Network policy document in JSON format for use with resources that expect core network policy documents such as `awsccNetworkmanagerCoreNetwork`. It follows the API definition from the [core-network-policy documentation](https://docs.aws.amazon.com/vpc/latest/cloudwan/cloudwan-policies-json.html). + +Using this data source to generate policy documents is *optional*. It is also valid to use literal JSON strings in your configuration or to use the `file` interpolation function to read a raw JSON policy document from a file. + +-> For more information about building AWS Core Network policy documents with Terraform, see the [Using AWS & AWSCC Provider Together Guide](/docs/providers/aws/guides/using-aws-with-awscc-provider.html) + +## Example Usage + +### Basic Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerCoreNetworkPolicyDocument } from "./.gen/providers/aws/data-aws-networkmanager-core-network-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkmanagerCoreNetworkPolicyDocument(this, "test", { + attachmentPolicies: [ + { + action: { + associationMethod: "constant", + segment: "shared", + }, + conditionLogic: "or", + conditions: [ + { + key: "segment", + operator: "equals", + type: "tag-value", + value: "shared", + }, + ], + ruleNumber: 100, + }, + { + action: { + associationMethod: "constant", + segment: "prod", + }, + conditionLogic: "or", + conditions: [ + { + key: "segment", + operator: "equals", + type: "tag-value", + value: "prod", + }, + ], + ruleNumber: 200, + }, + ], + coreNetworkConfiguration: [ + { + asnRanges: ["64512-64555"], + edgeLocations: [ + { + asn: Token.asString(64512), + location: "us-east-1", + }, + { + asn: Token.asString(64513), + location: "eu-central-1", + }, + ], + vpnEcmpSupport: false, + }, + ], + segmentActions: [ + { + action: "share", + mode: "attachment-route", + segment: "shared", + shareWith: ["*"], + }, + ], + segments: [ + { + description: "Segment for shared services", + name: "shared", + requireAttachmentAcceptance: true, + }, + { + description: "Segment for prod services", + name: "prod", + requireAttachmentAcceptance: true, + }, + ], + }); + } +} + +``` + +`dataAwsNetworkmanagerCoreNetworkPolicyDocumentTestJson` will evaluate to: + +```json +{ + "version": "2021.12", + "core-network-configuration": { + "asn-ranges": [ + "64512-64555" + ], + "vpn-ecmp-support": false, + "edge-locations": [ + { + "location": "us-east-1", + "asn": 64512 + }, + { + "location": "eu-central-1", + "asn": 64513 + } + ] + }, + "segments": [ + { + "name": "shared", + "description": "Segment for shared services", + "require-attachment-acceptance": true + }, + { + "name": "prod", + "description": "Segment for prod services", + "require-attachment-acceptance": true + } + ], + "attachment-policies": [ + { + "rule-number": 100, + "action": { + "association-method": "constant", + "segment": "shared" + }, + "conditions": [ + { + "type": "tag-value", + "operator": "equals", + "key": "segment", + "value": "shared" + } + ], + "condition-logic": "or" + }, + { + "rule-number": 200, + "action": { + "association-method": "constant", + "segment": "prod" + }, + "conditions": [ + { + "type": "tag-value", + "operator": "equals", + "key": "segment", + "value": "prod" + } + ], + "condition-logic": "or" + } + ], + "segment-actions": [ + { + "action": "share", + "mode": "attachment-route", + "segment": "shared", + "share-with": "*" + } + ] +} +``` + +## Argument Reference + +The following arguments are available: + +* `attachmentPolicies` (Optional) - In a core network, all attachments use the block argument `attachmentPolicies` section to map an attachment to a segment. Instead of manually associating a segment to each attachment, attachments use tags, and then the tags are used to associate the attachment to the specified segment. Detailed below. +* `coreNetworkConfiguration` (Required) - The core network configuration section defines the Regions where a core network should operate. For AWS Regions that are defined in the policy, the core network creates a Core Network Edge where you can connect attachments. After it's created, each Core Network Edge is peered with every other defined Region and is configured with consistent segment and routing across all Regions. Regions cannot be removed until the associated attachments are deleted. Detailed below. +* `segments` (Required) - Block argument that defines the different segments in the network. Here you can provide descriptions, change defaults, and provide explicit Regional operational and route filters. The names defined for each segment are used in the `segmentActions` and `attachmentPolicies` section. Each segment is created, and operates, as a completely separated routing domain. By default, attachments can only communicate with other attachments in the same segment. Detailed below. +* `segmentActions` (Optional) - A block argument, `segmentActions` define how routing works between segments. By default, attachments can only communicate with other attachments in the same segment. Detailed below. + +### `attachmentPolicies` + +The following arguments are available: + +* `action` (Required) - Action to take when a condition is true. Detailed Below. +* `conditionLogic` (Optional) - Valid values include `and` or `or`. This is a mandatory parameter only if you have more than one condition. The `conditionLogic` apply to all of the conditions for a rule, which also means nested conditions of `and` or `or` are not supported. Use `or` if you want to associate the attachment with the segment by either the segment name or attachment tag value, or by the chosen conditions. Use `and` if you want to associate the attachment with the segment by either the segment name or attachment tag value and by the chosen conditions. Detailed Below. +* `conditions` (Required) - A block argument. Detailed Below. +* `description` (Optional) - A user-defined description that further helps identify the rule. +* `ruleNumber` (Required) - An integer from `1` to `65535` indicating the rule's order number. Rules are processed in order from the lowest numbered rule to the highest. Rules stop processing when a rule is matched. It's important to make sure that you number your rules in the exact order that you want them processed. + +### `action` + +The following arguments are available: + +* `associationMethod` (Required) - Defines how a segment is mapped. Values can be `constant` or `tag`. `constant` statically defines the segment to associate the attachment to. `tag` uses the value of a tag to dynamically try to map to a segment.reference_policies_elements_condition_operators.html) to evaluate. +* `segment` (Optional) - Name of the `segment` to share as defined in the `segments` section. This is used only when the `associationMethod` is `constant`. +* `tagValueOfKey` (Optional) - Maps the attachment to the value of a known key. This is used with the `associationMethod` is `tag`. For example a `tag` of `stage = “test”`, will map to a segment named `test`. The value must exactly match the name of a segment. This allows you to have many segments, but use only a single rule without having to define multiple nearly identical conditions. This prevents creating many similar conditions that all use the same keys to map to segments. +* `requireAcceptance` (Optional) - Determines if this mapping should override the segment value for `requireAttachmentAcceptance`. You can only set this to `true`, indicating that this setting applies only to segments that have `requireAttachmentAcceptance` set to `false`. If the segment already has the default `requireAttachmentAcceptance`, you can set this to inherit segment’s acceptance value. + +### `conditions` + +The conditions block has 4 arguments `type`, `operator`, `key`, `value`. Setting or omitting each argument requires a combination of logic based on the value set to `type`. For that reason, please refer to the [AWS documentation](https://docs.aws.amazon.com/vpc/latest/cloudwan/cloudwan-policies-json.html) for complete usage docs. + +The following arguments are available: + +* `type` (Required) - Valid values include: `accountId`, `any`, `tagValue`, `tagExists`, `resourceId`, `region`, `attachmentType`. +* `operator` (Optional) - Valid values include: `equals`, `notEquals`, `contains`, `beginsWith`. +* `key` (Optional) - string value +* `value` (Optional) - string value + +### `coreNetworkConfiguration` + +The following arguments are available: + +* `asnRanges` (Required) - List of strings containing Autonomous System Numbers (ASNs) to assign to Core Network Edges. By default, the core network automatically assigns an ASN for each Core Network Edge but you can optionally define the ASN in the edge-locations for each Region. The ASN uses an array of integer ranges only from `64512` to `65534` and `4200000000` to `4294967294` expressed as a string like `"6451265534"`. No other ASN ranges can be used. +* `insideCidrBlocks` (Optional) - The Classless Inter-Domain Routing (CIDR) block range used to create tunnels for AWS Transit Gateway Connect. The format is standard AWS CIDR range (for example, `10010/24`). You can optionally define the inside CIDR in the Core Network Edges section per Region. The minimum is a `/24` for IPv4 or `/64` for IPv6. You can provide multiple `/24` subnets or a larger CIDR range. If you define a larger CIDR range, new Core Network Edges will be automatically assigned `/24` and `/64` subnets from the larger CIDR. an Inside CIDR block is required for attaching Connect attachments to a Core Network Edge. +* `vpnEcmpSupport` (Optional) - Indicates whether the core network forwards traffic over multiple equal-cost routes using VPN. The value can be either `true` or `false`. The default is `true`. +* `edgeLocations` (Required) - A block value of AWS Region locations where you're creating Core Network Edges. Detailed below. + +### `edgeLocations` + +The following arguments are available: + +* `locations` (Required) - An AWS Region code, such as `usEast1`. +* `asn` (Optional) - ASN of the Core Network Edge in an AWS Region. By default, the ASN will be a single integer automatically assigned from `asnRanges` +* `insideCidrBlocks` (Optional) - The local CIDR blocks for this Core Network Edge for AWS Transit Gateway Connect attachments. By default, this CIDR block will be one or more optional IPv4 and IPv6 CIDR prefixes auto-assigned from `insideCidrBlocks`. + +### `segments` + +The following arguments are available: + +* `allowFilter` (Optional) - List of strings of segment names that explicitly allows only routes from the segments that are listed in the array. Use the `allowFilter` setting if a segment has a well-defined group of other segments that connectivity should be restricted to. It is applied after routes have been shared in `segmentActions`. If a segment is listed in `allowFilter`, attachments between the two segments will have routes if they are also shared in the segment-actions area. For example, you might have a segment named "video-producer" that should only ever share routes with a "video-distributor" segment, no matter how many other share statements are created. +* `denyFilter` (Optional) - An array of segments that disallows routes from the segments listed in the array. It is applied only after routes have been shared in `segmentActions`. If a segment is listed in the `denyFilter`, attachments between the two segments will never have routes shared across them. For example, you might have a "financial" payment segment that should never share routes with a "development" segment, regardless of how many other share statements are created. Adding the payments segment to the deny-filter parameter prevents any shared routes from being created with other segments. +* `description` (Optional) - A user-defined string describing the segment. +* `edgeLocations` (Optional) - A list of strings of AWS Region names. Allows you to define a more restrictive set of Regions for a segment. The edge location must be a subset of the locations that are defined for `edgeLocations` in the `coreNetworkConfiguration`. +* `isolateAttachments` (Optional) - This Boolean setting determines whether attachments on the same segment can communicate with each other. If set to `true`, the only routes available will be either shared routes through the share actions, which are attachments in other segments, or static routes. The default value is `false`. For example, you might have a segment dedicated to "development" that should never allow VPCs to talk to each other, even if they’re on the same segment. In this example, you would keep the default parameter of `false`. +* `name` (Required) - Unique name for a segment. The name is a string used in other parts of the policy document, as well as in the console for metrics and other reference points. Valid characters are a–z, and 0–9. +* `requireAttachmentAcceptance` (Optional) - This Boolean setting determines whether attachment requests are automatically approved or require acceptance. The default is `true`, indicating that attachment requests require acceptance. For example, you might use this setting to allow a "sandbox" segment to allow any attachment request so that a core network or attachment administrator does not need to review and approve attachment requests. In this example, `requireAttachmentAcceptance` is set to `false`. + +### `segmentActions` + +`segmentActions` have differnet outcomes based on their `action` argument value. There are 2 valid values for `action`: `createRoute` & `share`. Behaviors of the below arguments changed depending on the `action` you specify. For more details on their use see the [AWS documentation](https://docs.aws.amazon.com/vpc/latest/cloudwan/cloudwan-policies-json.html#cloudwan-segment-actions-json). + +~> **NOTE:** `shareWith` and `shareWithExcept` break from the AWS API specification. The API has 1 argument `shareWith` and it can accept 3 input types as valid (`"*"`, `[""]`, or `{ except: [""]}`). To emulate this behavior, `shareWith` is always a list that can accept the argument `["*"]` as valid for `"*"` and `shareWithExcept` is a that can accept `[""]` as valid for `{ except: [""]}`. You may only specify one of: `shareWith` or `shareWithExcept`. + +The following arguments are available: + +* `action` (Required) - Action to take for the chosen segment. Valid values `createRoute` or `share`. +* `description` (Optional) - A user-defined string describing the segment action. +* `destinationCidrBlocks` (Optional) - List of strings containing CIDRs. You can define the IPv4 and IPv6 CIDR notation for each AWS Region. For example, `10100/16` or `2001:db8::/56`. This is an array of CIDR notation strings. +* `destinations` (Optional) - A list of strings. Valid values include `["blackhole"]` or a list of attachment ids. +* `mode` (Optional) - String. This mode places the attachment and return routes in each of the `shareWith` segments. Valid values include: `attachmentRoute`. +* `segment` (Optional) - Name of the segment. +* `shareWith` (Optional) - A list of strings to share with. Must be a substring is all segments. Valid values include: `["*"]` or `[""]`. +* `shareWithExcept` (Optional) - A set subtraction of segments to not share with. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `json` - Standard JSON policy document rendered based on the arguments above. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkmanager_device.html.markdown b/website/docs/cdktf/typescript/d/networkmanager_device.html.markdown new file mode 100644 index 00000000000..3a4c10294b2 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkmanager_device.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_device" +description: |- + Retrieve information about a device. +--- + + + +# Data Source: aws_networkmanager_device + +Retrieve information about a device. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerDevice } from "./.gen/providers/aws/data-aws-networkmanager-device"; +interface MyConfig { + globalNetworkId: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new DataAwsNetworkmanagerDevice(this, "example", { + deviceId: deviceId.stringValue, + global_network_id_id: globalNetworkId.value, + globalNetworkId: config.globalNetworkId, + }); + } +} + +``` + +## Argument Reference + +* `deviceId` - (Required) ID of the device. +* `globalNetworkId` - (Required) ID of the global network. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the device. +* `awsLocation` - AWS location of the device. Documented below. +* `description` - Description of the device. +* `location` - Location of the device. Documented below. +* `model` - Model of device. +* `serialNumber` - Serial number of the device. +* `siteId` - ID of the site. +* `tags` - Key-value tags for the device. +* `type` - Type of device. +* `vendor` - Vendor of the device. + +The `awsLocation` object supports the following: + +* `subnetArn` - ARN of the subnet that the device is located in. +* `zone` - Zone that the device is located in. + +The `location` object supports the following: + +* `address` - Physical address. +* `latitude` - Latitude. +* `longitude` - Longitude. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkmanager_devices.html.markdown b/website/docs/cdktf/typescript/d/networkmanager_devices.html.markdown new file mode 100644 index 00000000000..a63a62a13f6 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkmanager_devices.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_devices" +description: |- + Retrieve information about devices. +--- + + + +# Data Source: aws_networkmanager_devices + +Retrieve information about devices. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerDevices } from "./.gen/providers/aws/data-aws-networkmanager-devices"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkmanagerDevices(this, "example", { + globalNetworkId: globalNetworkId.stringValue, + tags: { + Env: "test", + }, + }); + } +} + +``` + +## Argument Reference + +* `globalNetworkId` - (Required) ID of the Global Network of the devices to retrieve. +* `siteId` - (Optional) ID of the site of the devices to retrieve. +* `tags` - (Optional) Restricts the list to the devices with these tags. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - IDs of the devices. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkmanager_global_network.html.markdown b/website/docs/cdktf/typescript/d/networkmanager_global_network.html.markdown new file mode 100644 index 00000000000..16d980a264f --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkmanager_global_network.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_global_network" +description: |- + Retrieve information about a global network. +--- + + + +# Data Source: aws_networkmanager_global_network + +Retrieve information about a global network. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerGlobalNetwork } from "./.gen/providers/aws/data-aws-networkmanager-global-network"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkmanagerGlobalNetwork(this, "example", { + globalNetworkId: globalNetworkId.stringValue, + }); + } +} + +``` + +## Argument Reference + +* `globalNetworkId` - (Required) ID of the specific global network to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the global network. +* `description` - Description of the global network. +* `tags` - Map of resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkmanager_global_networks.html.markdown b/website/docs/cdktf/typescript/d/networkmanager_global_networks.html.markdown new file mode 100644 index 00000000000..9a70cd32027 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkmanager_global_networks.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_global_networks" +description: |- + Retrieve information about global networks. +--- + + + +# Data Source: aws_networkmanager_global_networks + +Retrieve information about global networks. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerGlobalNetworks } from "./.gen/providers/aws/data-aws-networkmanager-global-networks"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkmanagerGlobalNetworks(this, "example", { + tags: { + Env: "test", + }, + }); + } +} + +``` + +## Argument Reference + +* `tags` - (Optional) Restricts the list to the global networks with these tags. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - IDs of the global networks. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkmanager_link.html.markdown b/website/docs/cdktf/typescript/d/networkmanager_link.html.markdown new file mode 100644 index 00000000000..692103f1fe9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkmanager_link.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_link" +description: |- + Retrieve information about a link. +--- + + + +# Data Source: aws_networkmanager_link + +Retrieve information about a link. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerLink } from "./.gen/providers/aws/data-aws-networkmanager-link"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkmanagerLink(this, "example", { + globalNetworkId: globalNetworkId.stringValue, + linkId: linkId.stringValue, + }); + } +} + +``` + +## Argument Reference + +* `globalNetworkId` - (Required) ID of the Global Network of the link to retrieve. +* `linkId` - (Required) ID of the specific link to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the link. +* `bandwidth` - Upload speed and download speed of the link as documented below +* `description` - Description of the link. +* `providerName` - Provider of the link. +* `siteId` - ID of the site. +* `tags` - Key-value tags for the link. +* `type` - Type of the link. + +The `bandwidth` object supports the following: + +* `downloadSpeed` - Download speed in Mbps. +* `uploadSpeed` - Upload speed in Mbps. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkmanager_links.html.markdown b/website/docs/cdktf/typescript/d/networkmanager_links.html.markdown new file mode 100644 index 00000000000..05a7ae6f8f2 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkmanager_links.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_links" +description: |- + Retrieve information about links. +--- + + + +# Data Source: aws_networkmanager_links + +Retrieve information about link. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerLinks } from "./.gen/providers/aws/data-aws-networkmanager-links"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkmanagerLinks(this, "example", { + globalNetworkId: globalNetworkId.stringValue, + tags: { + Env: "test", + }, + }); + } +} + +``` + +## Argument Reference + +* `globalNetworkId` - (Required) ID of the Global Network of the links to retrieve. +* `providerName` - (Optional) Link provider to retrieve. +* `siteId` - (Optional) ID of the site of the links to retrieve. +* `tags` - (Optional) Restricts the list to the links with these tags. +* `type` - (Optional) Link type to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - IDs of the links. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkmanager_site.html.markdown b/website/docs/cdktf/typescript/d/networkmanager_site.html.markdown new file mode 100644 index 00000000000..5a6b2056a04 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkmanager_site.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_site" +description: |- + Retrieve information about a site. +--- + + + +# Data Source: aws_networkmanager_site + +Retrieve information about a site. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerSite } from "./.gen/providers/aws/data-aws-networkmanager-site"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkmanagerSite(this, "example", { + globalNetworkId: globalNetworkId.stringValue, + siteId: siteId.stringValue, + }); + } +} + +``` + +## Argument Reference + +* `globalNetworkId` - (Required) ID of the Global Network of the site to retrieve. +* `siteId` - (Required) ID of the specific site to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the site. +* `description` - Description of the site. +* `location` - Site location as documented below. +* `tags` - Key-value tags for the Site. + +The `location` object supports the following: + +* `address` - Address of the location. +* `latitude` - Latitude of the location. +* `longitude` - Longitude of the location. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/networkmanager_sites.html.markdown b/website/docs/cdktf/typescript/d/networkmanager_sites.html.markdown new file mode 100644 index 00000000000..0756faf2fd7 --- /dev/null +++ b/website/docs/cdktf/typescript/d/networkmanager_sites.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_sites" +description: |- + Retrieve information about sites. +--- + + + +# Data Source: aws_networkmanager_sites + +Retrieve information about sites. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerSites } from "./.gen/providers/aws/data-aws-networkmanager-sites"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsNetworkmanagerSites(this, "example", { + globalNetworkId: globalNetworkId.stringValue, + tags: { + Env: "test", + }, + }); + } +} + +``` + +## Argument Reference + +* `globalNetworkId` - (Required) ID of the Global Network of the sites to retrieve. +* `tags` - (Optional) Restricts the list to the sites with these tags. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - IDs of the sites. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/oam_link.html.markdown b/website/docs/cdktf/typescript/d/oam_link.html.markdown new file mode 100644 index 00000000000..54880766331 --- /dev/null +++ b/website/docs/cdktf/typescript/d/oam_link.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_link" +description: |- + Terraform data source for managing an AWS CloudWatch Observability Access Manager Link. +--- + + + +# Data Source: aws_oam_link + +Terraform data source for managing an AWS CloudWatch Observability Access Manager Link. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOamLink } from "./.gen/providers/aws/data-aws-oam-link"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOamLink(this, "example", { + linkIdentifier: + "arn:aws:oam:us-west-1:111111111111:link/abcd1234-a123-456a-a12b-a123b456c789", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `linkIdentifier` - (Required) ARN of the link. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the link. +* `label` - Label that is assigned to this link. +* `labelTemplate` - Human-readable name used to identify this source account when you are viewing data from it in the monitoring account. +* `linkId` - ID string that AWS generated as part of the link ARN. +* `resourceTypes` - Types of data that the source account shares with the monitoring account. +* `sinkArn` - ARN of the sink that is used for this link. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/oam_links.html.markdown b/website/docs/cdktf/typescript/d/oam_links.html.markdown new file mode 100644 index 00000000000..e8b92fd7433 --- /dev/null +++ b/website/docs/cdktf/typescript/d/oam_links.html.markdown @@ -0,0 +1,43 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_links" +description: |- + Terraform data source for managing an AWS CloudWatch Observability Access Manager Links. +--- + + + +# Data Source: aws_oam_links + +Terraform data source for managing an AWS CloudWatch Observability Access Manager Links. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOamLinks } from "./.gen/providers/aws/data-aws-oam-links"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOamLinks(this, "example", {}); + } +} + +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARN of the Links. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/oam_sink.html.markdown b/website/docs/cdktf/typescript/d/oam_sink.html.markdown new file mode 100644 index 00000000000..e9b89501398 --- /dev/null +++ b/website/docs/cdktf/typescript/d/oam_sink.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_sink" +description: |- + Terraform data source for managing an AWS CloudWatch Observability Access Manager Sink. +--- + + + +# Data Source: aws_oam_sink + +Terraform data source for managing an AWS CloudWatch Observability Access Manager Sink. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOamSink } from "./.gen/providers/aws/data-aws-oam-sink"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOamSink(this, "example", { + sinkIdentifier: + "arn:aws:oam:us-west-1:111111111111:sink/abcd1234-a123-456a-a12b-a123b456c789", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `sinkIdentifier` - (Required) ARN of the sink. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the sink. +* `name` - Name of the sink. +* `sinkId` - Random ID string that AWS generated as part of the sink ARN. +* `tags` - Tags assigned to the sink. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/oam_sinks.html.markdown b/website/docs/cdktf/typescript/d/oam_sinks.html.markdown new file mode 100644 index 00000000000..834964bdb9d --- /dev/null +++ b/website/docs/cdktf/typescript/d/oam_sinks.html.markdown @@ -0,0 +1,43 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_sinks" +description: |- + Terraform data source for managing an AWS CloudWatch Observability Access Manager Sinks. +--- + + + +# Data Source: aws_oam_sinks + +Terraform data source for managing an AWS CloudWatch Observability Access Manager Sinks. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOamSinks } from "./.gen/providers/aws/data-aws-oam-sinks"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOamSinks(this, "example", {}); + } +} + +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARN of the Sinks. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/opensearch_domain.html.markdown b/website/docs/cdktf/typescript/d/opensearch_domain.html.markdown new file mode 100644 index 00000000000..4337e9253c3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/opensearch_domain.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_domain" +description: |- + Get information on an OpenSearch Domain resource. +--- + + + +# Data Source: aws_opensearch_domain + +Use this data source to get information about an OpenSearch Domain + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOpensearchDomain } from "./.gen/providers/aws/data-aws-opensearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOpensearchDomain(this, "my_domain", { + domainName: "my-domain-name", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `domainName` – (Required) Name of the domain. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accessPolicies` – Policy document attached to the domain. +* `advancedOptions` - Key-value string pairs to specify advanced configuration options. +* `advancedSecurityOptions` - Status of the OpenSearch domain's advanced security options. The block consists of the following attributes: + * `enabled` - Whether advanced security is enabled. + * `internalUserDatabaseEnabled` - Whether the internal user database is enabled. +* `arn` – ARN of the domain. +* `autoTuneOptions` - Configuration of the Auto-Tune options of the domain. + * `desiredState` - Auto-Tune desired state for the domain. + * `maintenanceSchedule` - A list of the nested configurations for the Auto-Tune maintenance windows of the domain. + * `startAt` - Date and time at which the Auto-Tune maintenance schedule starts in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). + * `duration` - Configuration block for the duration of the Auto-Tune maintenance window. + * `value` - Duration of an Auto-Tune maintenance window. + * `unit` - Unit of time. + * `cronExpressionForRecurrence` - Cron expression for an Auto-Tune maintenance schedule. + * `rollbackOnDisable` - Whether the domain is set to roll back to default Auto-Tune settings when disabling Auto-Tune. +* `clusterConfig` - Cluster configuration of the domain. + * `coldStorageOptions` - Configuration block containing cold storage configuration. + * `enabled` - Indicates cold storage is enabled. + * `instanceType` - Instance type of data nodes in the cluster. + * `instanceCount` - Number of instances in the cluster. + * `dedicatedMasterEnabled` - Indicates whether dedicated master nodes are enabled for the cluster. + * `dedicatedMasterType` - Instance type of the dedicated master nodes in the cluster. + * `dedicatedMasterCount` - Number of dedicated master nodes in the cluster. + * `zoneAwarenessEnabled` - Indicates whether zone awareness is enabled. + * `zoneAwarenessConfig` - Configuration block containing zone awareness settings. + * `availabilityZoneCount` - Number of availability zones used. + * `warmEnabled` - Warm storage is enabled. + * `warmCount` - Number of warm nodes in the cluster. + * `warmType` - Instance type for the OpenSearch cluster's warm nodes. +* `cognitoOptions` - Domain Amazon Cognito Authentication options for Dashboard. + * `enabled` - Whether Amazon Cognito Authentication is enabled. + * `userPoolId` - Cognito User pool used by the domain. + * `identityPoolId` - Cognito Identity pool used by the domain. + * `roleArn` - IAM Role with the AmazonOpenSearchServiceCognitoAccess policy attached. +* `created` – Status of the creation of the domain. +* `deleted` – Status of the deletion of the domain. +* `domainId` – Unique identifier for the domain. +* `ebsOptions` - EBS Options for the instances in the domain. + * `ebsEnabled` - Whether EBS volumes are attached to data nodes in the domain. + * `throughput` - The throughput (in MiB/s) of the EBS volumes attached to data nodes. + * `volumeType` - Type of EBS volumes attached to data nodes. + * `volumeSize` - Size of EBS volumes attached to data nodes (in GB). + * `iops` - Baseline input/output (I/O) performance of EBS volumes attached to data nodes. +* `engineVersion` – OpenSearch version for the domain. +* `encryptionAtRest` - Domain encryption at rest related options. + * `enabled` - Whether encryption at rest is enabled in the domain. + * `kmsKeyId` - KMS key id used to encrypt data at rest. +* `endpoint` – Domain-specific endpoint used to submit index, search, and data upload requests. +* `dashboardEndpoint` - Domain-specific endpoint used to access the [Dashboard application](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/dashboards.html). +* `kibanaEndpoint` - (**Deprecated**) Domain-specific endpoint for kibana without https scheme. Use the `dashboardEndpoint` attribute instead. +* `logPublishingOptions` - Domain log publishing related options. + * `logType` - Type of OpenSearch log being published. + * `cloudwatchLogGroupArn` - CloudWatch Log Group where the logs are published. + * `enabled` - Whether log publishing is enabled. +* `nodeToNodeEncryption` - Domain in transit encryption related options. + * `enabled` - Whether node to node encryption is enabled. +* `offPeakWindowOptions` - Off Peak update options + * `enabled` - Enabled disabled toggle for off-peak update window + * `offPeakWindow` + * `windowStartTime` - 10h window for updates + * `hours` - Starting hour of the 10-hour window for updates + * `minutes` - Starting minute of the 10-hour window for updates +* `processing` – Status of a configuration change in the domain. +* `snapshotOptions` – Domain snapshot related options. + * `automatedSnapshotStartHour` - Hour during which the service takes an automated daily snapshot of the indices in the domain. +* `tags` - Tags assigned to the domain. +* `vpcOptions` - VPC Options for private OpenSearch domains. + * `availabilityZones` - Availability zones used by the domain. + * `securityGroupIds` - Security groups used by the domain. + * `subnetIds` - Subnets used by the domain. + * `vpcId` - VPC used by the domain. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/opensearchserverless_access_policy.html.markdown b/website/docs/cdktf/typescript/d/opensearchserverless_access_policy.html.markdown new file mode 100644 index 00000000000..e4e391f5795 --- /dev/null +++ b/website/docs/cdktf/typescript/d/opensearchserverless_access_policy.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_access_policy" +description: |- + Terraform data source for managing an AWS OpenSearch Serverless Access Policy. +--- + + + +# Data Source: aws_opensearchserverless_access_policy + +Terraform data source for managing an AWS OpenSearch Serverless Access Policy. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOpensearchserverlessAccessPolicy } from "./.gen/providers/aws/data-aws-opensearchserverless-access-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOpensearchserverlessAccessPolicy(this, "example", { + name: Token.asString(awsOpensearchserverlessAccessPolicyExample.name), + type: Token.asString(awsOpensearchserverlessAccessPolicyExample.type), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the policy. +* `type` - (Required) Type of access policy. Must be `data`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `description` - Description of the policy. Typically used to store information about the permissions defined in the policy. +* `policy` - JSON policy document to use as the content for the new policy. +* `policyVersion` - Version of the policy. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/opensearchserverless_collection.html.markdown b/website/docs/cdktf/typescript/d/opensearchserverless_collection.html.markdown new file mode 100644 index 00000000000..5fb63f42fa4 --- /dev/null +++ b/website/docs/cdktf/typescript/d/opensearchserverless_collection.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_collection" +description: |- + Terraform data source for managing an AWS OpenSearch Serverless Collection. +--- + + + +# Data Source: aws_opensearchserverless_collection + +Terraform data source for managing an AWS OpenSearch Serverless Collection. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOpensearchserverlessCollection } from "./.gen/providers/aws/data-aws-opensearchserverless-collection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOpensearchserverlessCollection(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `id` - (Required) ID of the collection. Either `id` or `name` must be provided. +* `name` - (Required) Name of the collection. Either `name` or `id` must be provided. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the collection. +* `collectionEndpoint` - Collection-specific endpoint used to submit index, search, and data upload requests to an OpenSearch Serverless collection. +* `createdDate` - Date the Collection was created. +* `dashboardEndpont` - Collection-specific endpoint used to access OpenSearch Dashboards. +* `description` - Description of the collection. +* `kmsKeyArn` - The ARN of the Amazon Web Services KMS key used to encrypt the collection. +* `lastModifiedDate` - Date the Collection was last modified. +* `tags` - A map of tags to assign to the collection. +* `type` - Type of collection. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/opensearchserverless_security_config.html.markdown b/website/docs/cdktf/typescript/d/opensearchserverless_security_config.html.markdown new file mode 100644 index 00000000000..32d1b44729a --- /dev/null +++ b/website/docs/cdktf/typescript/d/opensearchserverless_security_config.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_security_config" +description: |- + Terraform data source for managing an AWS OpenSearch Serverless Security Config. +--- + + + +# Data Source: aws_opensearchserverless_security_config + +Terraform data source for managing an AWS OpenSearch Serverless Security Config. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOpensearchserverlessSecurityConfig } from "./.gen/providers/aws/data-aws-opensearchserverless-security-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOpensearchserverlessSecurityConfig(this, "example", { + id: "saml/12345678912/example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `id` - (Required) The unique identifier of the security configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `configVersion` - The version of the security configuration. +* `createdDate` - The date the configuration was created. +* `description` - The description of the security configuration. +* `lastModifiedDate` - The date the configuration was last modified. +* `samlOptions` - SAML options for the security configuration. +* `type` - The type of security configuration. + +### saml_options + +SAML options for the security configuration. + +* `groupAttribute` - Group attribute for this SAML integration. +* `metadata` - The XML IdP metadata file generated from your identity provider. +* `sessionTimeout` - Session timeout, in minutes. Minimum is 5 minutes and maximum is 720 minutes (12 hours). Default is 60 minutes. +* `userAttribute` - User attribute for this SAML integration. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/opensearchserverless_security_policy.html.markdown b/website/docs/cdktf/typescript/d/opensearchserverless_security_policy.html.markdown new file mode 100644 index 00000000000..8a40fb6e8a2 --- /dev/null +++ b/website/docs/cdktf/typescript/d/opensearchserverless_security_policy.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_security_policy" +description: |- + Get information on an OpenSearch Serverless Security Policy. +--- + + + +# Data Source: aws_opensearchserverless_security_policy + +Use this data source to get information about an AWS OpenSearch Serverless Security Policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOpensearchserverlessSecurityPolicy } from "./.gen/providers/aws/data-aws-opensearchserverless-security-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOpensearchserverlessSecurityPolicy(this, "example", { + name: "example-security-policy", + type: "encryption", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the policy +* `type` - (Required) Type of security policy. One of `encryption` or `network`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `createdDate` - The date the security policy was created. +* `description` - Description of the security policy. +* `lastModifiedDate` - The date the security policy was last modified. +* `policy` - The JSON policy document without any whitespaces. +* `policyVersion` - Version of the policy. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/opensearchserverless_vpc_endpoint.html.markdown b/website/docs/cdktf/typescript/d/opensearchserverless_vpc_endpoint.html.markdown new file mode 100644 index 00000000000..be576d867c7 --- /dev/null +++ b/website/docs/cdktf/typescript/d/opensearchserverless_vpc_endpoint.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_vpc_endpoint" +description: |- + Terraform data source for managing an AWS OpenSearch Serverless VPC Endpoint. +--- + + + +# Data Source: aws_opensearchserverless_vpc_endpoint + +Terraform data source for managing an AWS OpenSearch Serverless VPC Endpoint. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOpensearchserverlessVpcEndpoint } from "./.gen/providers/aws/data-aws-opensearchserverless-vpc-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOpensearchserverlessVpcEndpoint(this, "example", { + vpcEndpointId: "vpce-829a4487959e2a839", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `vpcEndpointId` - (Required) The unique identifier of the endpoint. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `createdDate` - The date the endpoint was created. +* `name` - The name of the endpoint. +* `securityGroupIds` - The IDs of the security groups that define the ports, protocols, and sources for inbound traffic that you are authorizing into your endpoint. +* `subnetIds` - The IDs of the subnets from which you access OpenSearch Serverless. +* `vpcId` - The ID of the VPC from which you access OpenSearch Serverless. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/organizations_delegated_administrators.html.markdown b/website/docs/cdktf/typescript/d/organizations_delegated_administrators.html.markdown new file mode 100644 index 00000000000..248afc7cbc0 --- /dev/null +++ b/website/docs/cdktf/typescript/d/organizations_delegated_administrators.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_delegated_administrators" +description: |- + Get a list of AWS accounts that are designated as delegated administrators in this organization +--- + + + +# Data Source: aws_organizations_delegated_administrators + +Get a list of AWS accounts that are designated as delegated administrators in this organization + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOrganizationsDelegatedAdministrators } from "./.gen/providers/aws/data-aws-organizations-delegated-administrators"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOrganizationsDelegatedAdministrators(this, "example", { + servicePrincipal: "SERVICE PRINCIPAL", + }); + } +} + +``` + +## Argument Reference + +* `servicePrincipal` - (Optional) Specifies a service principal name. If specified, then the operation lists the delegated administrators only for the specified service. If you don't specify a service principal, the operation lists all delegated administrators for all services in your organization. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `delegatedAdministrators` - The list of delegated administrators in your organization, which have the following attributes: + * `arn` - The ARN of the delegated administrator's account. + * `delegationEnabledDate` - The date when the account was made a delegated administrator. + * `email` - The email address that is associated with the delegated administrator's AWS account. + * `id` - The unique identifier (ID) of the delegated administrator's account. + * `joinedMethod` - The method by which the delegated administrator's account joined the organization. + * `joinedTimestamp` - The date when the delegated administrator's account became a part of the organization. + * `name` - The friendly name of the delegated administrator's account. + * `status` - The status of the delegated administrator's account in the organization. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/organizations_delegated_services.html.markdown b/website/docs/cdktf/typescript/d/organizations_delegated_services.html.markdown new file mode 100644 index 00000000000..cde5e434e51 --- /dev/null +++ b/website/docs/cdktf/typescript/d/organizations_delegated_services.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_delegated_services" +description: |- + Get a list the AWS services for which the specified account is a delegated administrator +--- + + + +# Data Source: aws_organizations_delegated_services + +Get a list the AWS services for which the specified account is a delegated administrator + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOrganizationsDelegatedServices } from "./.gen/providers/aws/data-aws-organizations-delegated-services"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOrganizationsDelegatedServices(this, "example", { + accountId: "AWS ACCOUNT ID", + }); + } +} + +``` + +## Argument Reference + +* `accountId` - (Required) Account ID number of a delegated administrator account in the organization. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `delegatedServices` - Services for which the account is a delegated administrator, which have the following attributes: + * `delegationEnabledDate` - The date that the account became a delegated administrator for this service. + * `servicePrincipal` - The name of an AWS service that can request an operation for the specified service. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/organizations_organization.html.markdown b/website/docs/cdktf/typescript/d/organizations_organization.html.markdown new file mode 100644 index 00000000000..6ab1392a025 --- /dev/null +++ b/website/docs/cdktf/typescript/d/organizations_organization.html.markdown @@ -0,0 +1,143 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organization" +description: |- + Get information about the organization that the user's account belongs to +--- + + + +# Data Source: aws_organizations_organization + +Get information about the organization that the user's account belongs to + +## Example Usage + +### List all account IDs for the organization + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOrganizationsOrganization } from "./.gen/providers/aws/data-aws-organizations-organization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsOrganizationsOrganization(this, "example", {}); + new TerraformOutput(this, "account_ids", { + value: propertyAccess(example.accounts, ["*", "id"]), + }); + } +} + +``` + +### SNS topic that can be interacted by the organization only + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsOrganizationsOrganization } from "./.gen/providers/aws/data-aws-organizations-organization"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +import { SnsTopicPolicy } from "./.gen/providers/aws/sns-topic-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const snsTopic = new SnsTopic(this, "sns_topic", { + name: "my-sns-topic", + }); + const example = new DataAwsOrganizationsOrganization(this, "example", {}); + const snsTopicPolicy = new DataAwsIamPolicyDocument( + this, + "sns_topic_policy", + { + statement: [ + { + actions: ["SNS:Subscribe", "SNS:Publish"], + condition: [ + { + test: "StringEquals", + values: [Token.asString(example.id)], + variable: "aws:PrincipalOrgID", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [snsTopic.arn], + }, + ], + } + ); + const awsSnsTopicPolicySnsTopicPolicy = new SnsTopicPolicy( + this, + "sns_topic_policy_3", + { + arn: snsTopic.arn, + policy: Token.asString(snsTopicPolicy.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicPolicySnsTopicPolicy.overrideLogicalId("sns_topic_policy"); + } +} + +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the organization. +* `featureSet` - FeatureSet of the organization. +* `id` - ID of the organization. +* `masterAccountArn` - ARN of the account that is designated as the master account for the organization. +* `masterAccountEmail` - The email address that is associated with the AWS account that is designated as the master account for the organization. +* `masterAccountId` - Unique identifier (ID) of the master account of an organization. + +### Master Account or Delegated Administrator Attribute Reference + +If the account is the master account or a delegated administrator for the organization, the following attributes are also exported: + +* `accounts` - List of organization accounts including the master account. For a list excluding the master account, see the `nonMasterAccounts` attribute. All elements have these attributes: + * `arn` - ARN of the account + * `email` - Email of the account + * `id` - Identifier of the account + * `name` - Name of the account + * `status` - Status of the account +* `awsServiceAccessPrincipals` - A list of AWS service principal names that have integration enabled with your organization. Organization must have `featureSet` set to `all`. For additional information, see the [AWS Organizations User Guide](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_integrate_services.html). +* `enabledPolicyTypes` - A list of Organizations policy types that are enabled in the Organization Root. Organization must have `featureSet` set to `all`. For additional information about valid policy types (e.g., `serviceControlPolicy`), see the [AWS Organizations API Reference](https://docs.aws.amazon.com/organizations/latest/APIReference/API_EnablePolicyType.html). +* `nonMasterAccounts` - List of organization accounts excluding the master account. For a list including the master account, see the `accounts` attribute. All elements have these attributes: + * `arn` - ARN of the account + * `email` - Email of the account + * `id` - Identifier of the account + * `name` - Name of the account + * `status` - Status of the account +* `roots` - List of organization roots. All elements have these attributes: + * `arn` - ARN of the root + * `id` - Identifier of the root + * `name` - Name of the root + * `policyTypes` - List of policy types enabled for this root. All elements have these attributes: + * `name` - The name of the policy type + * `status` - The status of the policy type as it relates to the associated root + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/organizations_organizational_unit_child_accounts.html.markdown b/website/docs/cdktf/typescript/d/organizations_organizational_unit_child_accounts.html.markdown new file mode 100644 index 00000000000..100d4c2dc60 --- /dev/null +++ b/website/docs/cdktf/typescript/d/organizations_organizational_unit_child_accounts.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organizational_unit_child_accounts" +description: |- + Get all direct child accounts under a parent organizational unit. This only provides immediate children, not all children. +--- + + + +# Data Source: aws_organizations_organizational_unit_child_accounts + +Get all direct child accounts under a parent organizational unit. This only provides immediate children, not all children. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOrganizationsOrganization } from "./.gen/providers/aws/data-aws-organizations-organization"; +import { DataAwsOrganizationsOrganizationalUnitChildAccounts } from "./.gen/providers/aws/data-aws-organizations-organizational-unit-child-accounts"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const org = new DataAwsOrganizationsOrganization(this, "org", {}); + new DataAwsOrganizationsOrganizationalUnitChildAccounts(this, "accounts", { + parentId: Token.asString(propertyAccess(org.roots, ["0", "id"])), + }); + } +} + +``` + +## Argument Reference + +* `parentId` - (Required) The parent ID of the accounts. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accounts` - List of child accounts, which have the following attributes: + * `arn` - The Amazon Resource Name (ARN) of the account. + * `email` - The email address associated with the AWS account. + * `id` - The unique identifier (ID) of the account. + * `name` - The friendly name of the account. + * `status` - The status of the account in the organization. +* `id` - Parent identifier of the organizational units. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/organizations_organizational_unit_descendant_accounts.html.markdown b/website/docs/cdktf/typescript/d/organizations_organizational_unit_descendant_accounts.html.markdown new file mode 100644 index 00000000000..1609cf021f0 --- /dev/null +++ b/website/docs/cdktf/typescript/d/organizations_organizational_unit_descendant_accounts.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organizational_unit_descendant_accounts" +description: |- + Get all child accounts under a parent organizational unit. This provides all children. +--- + + + +# Data Source: aws_organizations_organizational_unit_descendant_accounts + +Get all direct child accounts under a parent organizational unit. This provides all children. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOrganizationsOrganization } from "./.gen/providers/aws/data-aws-organizations-organization"; +import { DataAwsOrganizationsOrganizationalUnitDescendantAccounts } from "./.gen/providers/aws/data-aws-organizations-organizational-unit-descendant-accounts"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const org = new DataAwsOrganizationsOrganization(this, "org", {}); + new DataAwsOrganizationsOrganizationalUnitDescendantAccounts( + this, + "accounts", + { + parentId: Token.asString(propertyAccess(org.roots, ["0", "id"])), + } + ); + } +} + +``` + +## Argument Reference + +* `parentId` - (Required) The parent ID of the accounts. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accounts` - List of child accounts, which have the following attributes: + * `arn` - The Amazon Resource Name (ARN) of the account. + * `email` - The email address associated with the AWS account. + * `id` - The unique identifier (ID) of the account. + * `name` - The friendly name of the account. + * `status` - The status of the account in the organization. +* `id` - Parent identifier of the organizational units. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/organizations_organizational_units.html.markdown b/website/docs/cdktf/typescript/d/organizations_organizational_units.html.markdown new file mode 100644 index 00000000000..73c92aa9d2b --- /dev/null +++ b/website/docs/cdktf/typescript/d/organizations_organizational_units.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organizational_units" +description: |- + Get all direct child organizational units under a parent organizational unit. This only provides immediate children, not all children +--- + + + +# Data Source: aws_organizations_organizational_units + +Get all direct child organizational units under a parent organizational unit. This only provides immediate children, not all children. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOrganizationsOrganization } from "./.gen/providers/aws/data-aws-organizations-organization"; +import { DataAwsOrganizationsOrganizationalUnits } from "./.gen/providers/aws/data-aws-organizations-organizational-units"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const org = new DataAwsOrganizationsOrganization(this, "org", {}); + new DataAwsOrganizationsOrganizationalUnits(this, "ou", { + parentId: Token.asString(propertyAccess(org.roots, ["0", "id"])), + }); + } +} + +``` + +## Argument Reference + +* `parentId` - (Required) Parent ID of the organizational unit. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `children` - List of child organizational units, which have the following attributes: + * `arn` - ARN of the organizational unit + * `name` - Name of the organizational unit + * `id` - ID of the organizational unit +* `id` - Parent identifier of the organizational units. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/organizations_policies.html.markdown b/website/docs/cdktf/typescript/d/organizations_policies.html.markdown new file mode 100644 index 00000000000..4743bd06112 --- /dev/null +++ b/website/docs/cdktf/typescript/d/organizations_policies.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_policies" +description: |- + Terraform data source for managing an AWS Organizations Policies. +--- + + + +# Data Source: aws_organizations_policies + +Terraform data source for managing an AWS Organizations Policies. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformIterator, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOrganizationsPolicies } from "./.gen/providers/aws/data-aws-organizations-policies"; +import { DataAwsOrganizationsPolicy } from "./.gen/providers/aws/data-aws-organizations-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsOrganizationsPolicies(this, "example", { + filter: "SERVICE_CONTROL_POLICY", + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleForEachIterator = TerraformIterator.fromList( + Token.asAny(Fn.toset(example.ids)) + ); + const dataAwsOrganizationsPolicyExample = new DataAwsOrganizationsPolicy( + this, + "example_1", + { + policyId: Token.asString(exampleForEachIterator.value), + forEach: exampleForEachIterator, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsOrganizationsPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `filter` - (Required) The type of policies to be returned in the response. Valid values are `SERVICE_CONTROL_POLICY | TAG_POLICY | BACKUP_POLICY | AISERVICES_OPT_OUT_POLICY` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - List of all the policy ids found. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/organizations_policies_for_target.html.markdown b/website/docs/cdktf/typescript/d/organizations_policies_for_target.html.markdown new file mode 100644 index 00000000000..907f05e5c67 --- /dev/null +++ b/website/docs/cdktf/typescript/d/organizations_policies_for_target.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_policies_for_target" +description: |- + Terraform data source for managing an AWS Organizations Policies For Target. +--- + + + +# Data Source: aws_organizations_policies_for_target + +Terraform data source for managing an AWS Organizations Policies For Target. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + propertyAccess, + Token, + Fn, + TerraformIterator, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOrganizationsOrganization } from "./.gen/providers/aws/data-aws-organizations-organization"; +import { DataAwsOrganizationsPoliciesForTarget } from "./.gen/providers/aws/data-aws-organizations-policies-for-target"; +import { DataAwsOrganizationsPolicy } from "./.gen/providers/aws/data-aws-organizations-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsOrganizationsOrganization(this, "example", {}); + const dataAwsOrganizationsPoliciesForTargetExample = + new DataAwsOrganizationsPoliciesForTarget(this, "example_1", { + filter: "SERVICE_CONTROL_POLICY", + targetId: Token.asString(propertyAccess(example.roots, ["0", "id"])), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsOrganizationsPoliciesForTargetExample.overrideLogicalId("example"); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleForEachIterator = TerraformIterator.fromList( + Token.asAny(Fn.toset(dataAwsOrganizationsPoliciesForTargetExample.ids)) + ); + const dataAwsOrganizationsPolicyExample = new DataAwsOrganizationsPolicy( + this, + "example_2", + { + policyId: Token.asString(exampleForEachIterator.value), + forEach: exampleForEachIterator, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsOrganizationsPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `targetId` - (Required) The root (string that begins with "r-" followed by 4-32 lowercase letters or digits), account (12 digit string), or Organizational Unit (string starting with "ou-" followed by 4-32 lowercase letters or digits. This string is followed by a second "-" dash and from 8-32 additional lowercase letters or digits.) +* `filter` - (Required) Must supply one of the 4 different policy filters for a target (SERVICE_CONTROL_POLICY | TAG_POLICY | BACKUP_POLICY | AISERVICES_OPT_OUT_POLICY) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - List of all the policy ids found. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/organizations_policy.html.markdown b/website/docs/cdktf/typescript/d/organizations_policy.html.markdown new file mode 100644 index 00000000000..a3f3eab59ea --- /dev/null +++ b/website/docs/cdktf/typescript/d/organizations_policy.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_policy" +description: |- + Terraform data source for managing an AWS Organizations Policy. +--- + + + +# Data Source: aws_organizations_policy + +Terraform data source for managing an AWS Organizations Policy. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOrganizationalPolicies } from "./.gen/providers/aws/"; +import { DataAwsOrganizationsOrganization } from "./.gen/providers/aws/data-aws-organizations-organization"; +import { DataAwsOrganizationsPoliciesForTarget } from "./.gen/providers/aws/data-aws-organizations-policies-for-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOrganizationalPolicies(this, "test", { + policy_id: propertyAccess(current.policies, ["0", "id"]), + }); + const dataAwsOrganizationsOrganizationCurrent = + new DataAwsOrganizationsOrganization(this, "current", {}); + const dataAwsOrganizationsPoliciesForTargetCurrent = + new DataAwsOrganizationsPoliciesForTarget(this, "current_2", { + filter: "SERVICE_CONTROL_POLICY", + targetId: Token.asString( + propertyAccess(dataAwsOrganizationsOrganizationCurrent.roots, [ + "0", + "id", + ]) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsOrganizationsPoliciesForTargetCurrent.overrideLogicalId("current"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `policyId` - (Required) The unique identifier (ID) of the policy that you want more details on. Policy id starts with a "p-" followed by 8-28 lowercase or uppercase letters, digits, and underscores. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of the policy. +* `awsManaged` - Indicates if a policy is an AWS managed policy. +* `content` - The text content of the policy. +* `description` - The description of the policy. +* `name` - The friendly name of the policy. +* `type` - The type of policy values can be `SERVICE_CONTROL_POLICY | TAG_POLICY | BACKUP_POLICY | AISERVICES_OPT_OUT_POLICY` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/organizations_resource_tags.html.markdown b/website/docs/cdktf/typescript/d/organizations_resource_tags.html.markdown new file mode 100644 index 00000000000..d9e95b4f32c --- /dev/null +++ b/website/docs/cdktf/typescript/d/organizations_resource_tags.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_resource_tags" +description: |- + Get tags attached to the specified AWS Organizations resource. +--- + + + +# Data Source: aws_organizations_resource_tags + +Get tags attached to the specified AWS Organizations resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOrganizationsResourceTags } from "./.gen/providers/aws/data-aws-organizations-resource-tags"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOrganizationsResourceTags(this, "account", { + resourceId: "123456123846", + }); + } +} + +``` + +## Argument Reference + +* `resourceId` - (Required) ID of the resource with the tags to list. See details below. + +### resource_id + +You can specify any of the following taggable resources. + +* AWS account – specify the account ID number. +* Organizational unit – specify the OU ID that begins with `ou` and looks similar to: `ou1A2B34Uvwxyz` +* Root – specify the root ID that begins with `r` and looks similar to: `r1A2B` +* Policy – specify the policy ID that begins with `p` and looks similar to: `p12Abcdefg3` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `tags` - Map of key=value pairs for each tag set on the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/outposts_asset.html.markdown b/website/docs/cdktf/typescript/d/outposts_asset.html.markdown new file mode 100644 index 00000000000..093f0d7dc6c --- /dev/null +++ b/website/docs/cdktf/typescript/d/outposts_asset.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_asset" +description: |- + Information about hardware assets in an Outpost. +--- + + + +# Data Source: aws_outposts_asset + +Information about a specific hardware asset in an Outpost. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformCount, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOutpostsAsset } from "./.gen/providers/aws/data-aws-outposts-asset"; +import { DataAwsOutpostsAssets } from "./.gen/providers/aws/data-aws-outposts-assets"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsOutpostsAssets(this, "example", { + arn: Token.asString(dataAwsOutpostsOutpostExample.arn), + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleCount = TerraformCount.of( + Token.asNumber(Fn.lengthOf(example.assetIds)) + ); + const dataAwsOutpostsAssetExample = new DataAwsOutpostsAsset( + this, + "example_1", + { + arn: Token.asString(dataAwsOutpostsOutpostExample.arn), + assetId: Token.asString( + Fn.element(thisVar.assetIds, Token.asNumber(exampleCount.index)) + ), + count: exampleCount, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsOutpostsAssetExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) Outpost ARN. +* `assetId` - (Required) ID of the asset. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `assetType` - Type of the asset. +* `hostId` - Host ID of the Dedicated Hosts on the asset, if a Dedicated Host is provisioned. +* `rackElevation` - Position of an asset in a rack measured in rack units. +* `rackId` - Rack ID of the asset. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/outposts_assets.html.markdown b/website/docs/cdktf/typescript/d/outposts_assets.html.markdown new file mode 100644 index 00000000000..7057df25c79 --- /dev/null +++ b/website/docs/cdktf/typescript/d/outposts_assets.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_assets" +description: |- + Information about hardware assets in an Outpost. +--- + + + +# Data Source: aws_outposts_assets + +Information about hardware assets in an Outpost. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOutpostsAssets } from "./.gen/providers/aws/data-aws-outposts-assets"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOutpostsAssets(this, "example", { + arn: Token.asString(dataAwsOutpostsOutpostExample.arn), + }); + } +} + +``` + +### With Host ID Filter + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOutpostsAssets } from "./.gen/providers/aws/data-aws-outposts-assets"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOutpostsAssets(this, "example", { + arn: Token.asString(dataAwsOutpostsOutpostExample.arn), + hostIdFilter: ["h-x38g5n0yd2a0ueb61"], + }); + } +} + +``` + +### With Status ID Filter + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOutpostsAssets } from "./.gen/providers/aws/data-aws-outposts-assets"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOutpostsAssets(this, "example", { + arn: Token.asString(dataAwsOutpostsOutpostExample.arn), + statusIdFilter: ["ACTIVE"], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `arn` - (Required) Outpost ARN. +* `hostIdFilter` - (Optional) Filters by list of Host IDs of a Dedicated Host. +* `statusIdFilter` - (Optional) Filters by list of state status. Valid values: "ACTIVE", "RETIRING". + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `assetIds` - List of all the asset ids found. This data source will fail if none are found. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/outposts_outpost.html.markdown b/website/docs/cdktf/typescript/d/outposts_outpost.html.markdown new file mode 100644 index 00000000000..2867a160515 --- /dev/null +++ b/website/docs/cdktf/typescript/d/outposts_outpost.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_outpost" +description: |- + Provides details about an Outposts Outpost +--- + + + +# Data Source: aws_outposts_outpost + +Provides details about an Outposts Outpost. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOutpostsOutpost } from "./.gen/providers/aws/data-aws-outposts-outpost"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOutpostsOutpost(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` - (Optional) Identifier of the Outpost. +* `name` - (Optional) Name of the Outpost. +* `arn` - (Optional) ARN. +* `ownerId` - (Optional) AWS Account identifier of the Outpost owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `availabilityZone` - Availability Zone name. +* `availabilityZoneId` - Availability Zone identifier. +* `description` - The description of the Outpost. +* `lifecycleStatus` - The life cycle status. +* `siteArn` - The Amazon Resource Name (ARN) of the site. +* `siteId` - The ID of the site. +* `supportedHardwareType` - The hardware type. +* `tags` - The Outpost tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/outposts_outpost_instance_type.html.markdown b/website/docs/cdktf/typescript/d/outposts_outpost_instance_type.html.markdown new file mode 100644 index 00000000000..6f825e58451 --- /dev/null +++ b/website/docs/cdktf/typescript/d/outposts_outpost_instance_type.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_outpost_instance_type" +description: |- + Information about single Outpost Instance Type. +--- + + + +# Data Source: aws_outposts_outpost_instance_type + +Information about single Outpost Instance Type. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Ec2Instance } from "./.gen/providers/aws/"; +import { DataAwsOutpostsOutpostInstanceType } from "./.gen/providers/aws/data-aws-outposts-outpost-instance-type"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsOutpostsOutpostInstanceType(this, "example", { + arn: Token.asString(dataAwsOutpostsOutpostExample.arn), + preferredInstanceTypes: ["m5.large", "m5.4xlarge"], + }); + const awsEc2InstanceExample = new Ec2Instance(this, "example_1", { + instance_type: example.instanceType, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEc2InstanceExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) Outpost ARN. + +The following arguments are optional: + +* `instanceType` - (Optional) Desired instance type. Conflicts with `preferredInstanceTypes`. +* `preferredInstanceTypes` - (Optional) Ordered list of preferred instance types. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. Conflicts with `instanceType`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Outpost identifier. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/outposts_outpost_instance_types.html.markdown b/website/docs/cdktf/typescript/d/outposts_outpost_instance_types.html.markdown new file mode 100644 index 00000000000..d5f594cfdba --- /dev/null +++ b/website/docs/cdktf/typescript/d/outposts_outpost_instance_types.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_outpost_instance_types" +description: |- + Information about Outpost Instance Types. +--- + + + +# Data Source: aws_outposts_outpost_instance_types + +Information about Outposts Instance Types. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOutpostsOutpostInstanceTypes } from "./.gen/providers/aws/data-aws-outposts-outpost-instance-types"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOutpostsOutpostInstanceTypes(this, "example", { + arn: Token.asString(dataAwsOutpostsOutpostExample.arn), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) Outpost ARN. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `instanceTypes` - Set of instance types. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/outposts_outposts.html.markdown b/website/docs/cdktf/typescript/d/outposts_outposts.html.markdown new file mode 100644 index 00000000000..2a28e39ef6c --- /dev/null +++ b/website/docs/cdktf/typescript/d/outposts_outposts.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_outposts" +description: |- + Provides details about multiple Outposts +--- + + + +# Data Source: aws_outposts_outposts + +Provides details about multiple Outposts. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOutpostsOutposts } from "./.gen/providers/aws/data-aws-outposts-outposts"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOutpostsOutposts(this, "example", { + siteId: id, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `availabilityZone` - (Optional) Availability Zone name. +* `availabilityZoneId` - (Optional) Availability Zone identifier. +* `siteId` - (Optional) Site identifier. +* `ownerId` - (Optional) AWS Account identifier of the Outpost owner. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of Amazon Resource Names (ARNs). +* `id` - AWS Region. +* `ids` - Set of identifiers. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/outposts_site.html.markdown b/website/docs/cdktf/typescript/d/outposts_site.html.markdown new file mode 100644 index 00000000000..540e93eada8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/outposts_site.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_site" +description: |- + Provides details about an Outposts Site +--- + + + +# Data Source: aws_outposts_site + +Provides details about an Outposts Site. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOutpostsSite } from "./.gen/providers/aws/data-aws-outposts-site"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOutpostsSite(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` - (Optional) Identifier of the Site. +* `name` - (Optional) Name of the Site. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `accountId` - AWS Account identifier. +* `description` - Description. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/outposts_sites.html.markdown b/website/docs/cdktf/typescript/d/outposts_sites.html.markdown new file mode 100644 index 00000000000..33f7a1c6640 --- /dev/null +++ b/website/docs/cdktf/typescript/d/outposts_sites.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "Outposts" +layout: "aws" +page_title: "AWS: aws_outposts_sites" +description: |- + Provides details about multiple Outposts Sites. +--- + + + +# Data Source: aws_outposts_sites + +Provides details about multiple Outposts Sites. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOutpostsSites } from "./.gen/providers/aws/data-aws-outposts-sites"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsOutpostsSites(this, "all", {}); + } +} + +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - Set of Outposts Site identifiers. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/partition.html.markdown b/website/docs/cdktf/typescript/d/partition.html.markdown new file mode 100644 index 00000000000..9885a6f7e64 --- /dev/null +++ b/website/docs/cdktf/typescript/d/partition.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_partition" +description: |- + Get AWS partition identifier +--- + + + +# Data Source: aws_partition + +Use this data source to lookup information about the current AWS partition in +which Terraform is working. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsPartition(this, "current", {}); + new DataAwsIamPolicyDocument(this, "s3_policy", { + statement: [ + { + actions: ["s3:ListBucket"], + resources: ["arn:${" + current.partition + "}:s3:::my-bucket"], + sid: "1", + }, + ], + }); + } +} + +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `dnsSuffix` - Base DNS domain name for the current partition (e.g., `amazonawsCom` in AWS Commercial, `amazonawsComCn` in AWS China). +* `id` - Identifier of the current partition (e.g., `aws` in AWS Commercial, `awsCn` in AWS China). +* `partition` - Identifier of the current partition (e.g., `aws` in AWS Commercial, `awsCn` in AWS China). +* `reverseDnsPrefix` - Prefix of service names (e.g., `comAmazonaws` in AWS Commercial, `cnComAmazonaws` in AWS China). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/prefix_list.html.markdown b/website/docs/cdktf/typescript/d/prefix_list.html.markdown new file mode 100644 index 00000000000..f2df5aaf559 --- /dev/null +++ b/website/docs/cdktf/typescript/d/prefix_list.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_prefix_list" +description: |- + Provides details about a specific prefix list +--- + + + +# Data Source: aws_prefix_list + +`awsPrefixList` provides details about a specific AWS prefix list (PL) +in the current region. + +This can be used both to validate a prefix list given in a variable +and to obtain the CIDR blocks (IP address ranges) for the associated +AWS service. The latter may be useful e.g., for adding network ACL +rules. + +The [aws_ec2_managed_prefix_list](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/ec2_managed_prefix_list) data source is normally more appropriate to use given it can return customer-managed prefix list info, as well as additional attributes. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsPrefixList } from "./.gen/providers/aws/data-aws-prefix-list"; +import { NetworkAcl } from "./.gen/providers/aws/network-acl"; +import { NetworkAclRule } from "./.gen/providers/aws/network-acl-rule"; +import { VpcEndpoint } from "./.gen/providers/aws/vpc-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bar = new NetworkAcl(this, "bar", { + vpcId: foo.id, + }); + const privateS3 = new VpcEndpoint(this, "private_s3", { + serviceName: "com.amazonaws.us-west-2.s3", + vpcId: foo.id, + }); + const dataAwsPrefixListPrivateS3 = new DataAwsPrefixList( + this, + "private_s3_2", + { + prefixListId: privateS3.prefixListId, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsPrefixListPrivateS3.overrideLogicalId("private_s3"); + const awsNetworkAclRulePrivateS3 = new NetworkAclRule( + this, + "private_s3_3", + { + cidrBlock: Token.asString( + propertyAccess(dataAwsPrefixListPrivateS3.cidrBlocks, ["0"]) + ), + egress: false, + fromPort: 443, + networkAclId: bar.id, + protocol: "tcp", + ruleAction: "allow", + ruleNumber: 200, + toPort: 443, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkAclRulePrivateS3.overrideLogicalId("private_s3"); + } +} + +``` + +### Filter + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsPrefixList } from "./.gen/providers/aws/data-aws-prefix-list"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsPrefixList(this, "test", { + filter: [ + { + name: "prefix-list-id", + values: ["pl-68a54001"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +prefix lists. The given filters must match exactly one prefix list +whose data will be exported as attributes. + +* `prefixListId` - (Optional) ID of the prefix list to select. +* `name` - (Optional) Name of the prefix list to select. +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribePrefixLists API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribePrefixLists.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the selected prefix list. +* `name` - Name of the selected prefix list. +* `cidrBlocks` - List of CIDR blocks for the AWS service associated with the prefix list. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/pricing_product.html.markdown b/website/docs/cdktf/typescript/d/pricing_product.html.markdown new file mode 100644 index 00000000000..f4381578cbe --- /dev/null +++ b/website/docs/cdktf/typescript/d/pricing_product.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "Pricing Calculator" +layout: "aws" +page_title: "AWS: aws_pricing_product" +description: |- + Get information regarding the pricing of an Amazon product +--- + + + +# Data Source: aws_pricing_product + +Use this data source to get the pricing information of all products in AWS. +This data source is only available in a us-east-1 or ap-south-1 provider. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsPricingProduct } from "./.gen/providers/aws/data-aws-pricing-product"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsPricingProduct(this, "example", { + filters: [ + { + field: "instanceType", + value: "c5.xlarge", + }, + { + field: "operatingSystem", + value: "Linux", + }, + { + field: "location", + value: "US East (N. Virginia)", + }, + { + field: "preInstalledSw", + value: "NA", + }, + { + field: "licenseModel", + value: "No License required", + }, + { + field: "tenancy", + value: "Shared", + }, + { + field: "capacitystatus", + value: "Used", + }, + ], + serviceCode: "AmazonEC2", + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsPricingProduct } from "./.gen/providers/aws/data-aws-pricing-product"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsPricingProduct(this, "example", { + filters: [ + { + field: "instanceType", + value: "ds1.xlarge", + }, + { + field: "location", + value: "US East (N. Virginia)", + }, + ], + serviceCode: "AmazonRedshift", + }); + } +} + +``` + +## Argument Reference + +* `serviceCode` - (Required) Code of the service. Available service codes can be fetched using the DescribeServices pricing API call. +* `filters` - (Required) List of filters. Passed directly to the API (see GetProducts API reference). These filters must describe a single product, this resource will fail if more than one product is returned by the API. + +### filters + +* `field` (Required) Product attribute name that you want to filter on. +* `value` (Required) Product attribute value that you want to filter on. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `result` - Set to the product returned from the API. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/prometheus_workspace.html.markdown b/website/docs/cdktf/typescript/d/prometheus_workspace.html.markdown new file mode 100644 index 00000000000..f261d46f4d3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/prometheus_workspace.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "AMP (Managed Prometheus)" +layout: "aws" +page_title: "AWS: aws_prometheus_workspace" +description: |- + Gets information on an Amazon Managed Prometheus workspace. +--- + + + +# Data Source: aws_prometheus_workspace + +Provides an Amazon Managed Prometheus workspace data source. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsPrometheusWorkspace } from "./.gen/providers/aws/data-aws-prometheus-workspace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsPrometheusWorkspace(this, "example", { + workspaceId: "ws-41det8a1-2c67-6a1a-9381-9b83d3d78ef7", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `workspaceId` - (Required) Prometheus workspace ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Prometheus workspace. +* `createdDate` - Creation date of the Prometheus workspace. +* `prometheusEndpoint` - Endpoint of the Prometheus workspace. +* `alias` - Prometheus workspace alias. +* `status` - Status of the Prometheus workspace. +* `tags` - Tags assigned to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/prometheus_workspaces.html.markdown b/website/docs/cdktf/typescript/d/prometheus_workspaces.html.markdown new file mode 100644 index 00000000000..e3173254e90 --- /dev/null +++ b/website/docs/cdktf/typescript/d/prometheus_workspaces.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "AMP (Managed Prometheus)" +layout: "aws" +page_title: "AWS: aws_prometheus_workspaces" +description: |- + Gets the aliases, ARNs, and workspace IDs of Amazon Prometheus workspaces. +--- + + + +# Data Source: aws_prometheus_workspaces + +Provides the aliases, ARNs, and workspace IDs of Amazon Prometheus workspaces. + +## Example Usage + +The following example returns all of the workspaces in a region: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsPrometheusWorkspaces } from "./.gen/providers/aws/data-aws-prometheus-workspaces"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsPrometheusWorkspaces(this, "example", {}); + } +} + +``` + +The following example filters the workspaces by alias. Only the workspaces with +aliases that begin with the value of `aliasPrefix` will be returned: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsPrometheusWorkspaces } from "./.gen/providers/aws/data-aws-prometheus-workspaces"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsPrometheusWorkspaces(this, "example", { + aliasPrefix: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `aliasPrefix` - (Optional) Limits results to workspaces with aliases that begin with this value. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `aliases` - List of aliases of the matched Prometheus workspaces. +* `arns` - List of ARNs of the matched Prometheus workspaces. +* `workspaceIds` - List of workspace IDs of the matched Prometheus workspaces. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/qldb_ledger.html.markdown b/website/docs/cdktf/typescript/d/qldb_ledger.html.markdown new file mode 100644 index 00000000000..9caa99d9a29 --- /dev/null +++ b/website/docs/cdktf/typescript/d/qldb_ledger.html.markdown @@ -0,0 +1,46 @@ +--- +subcategory: "QLDB (Quantum Ledger Database)" +layout: "aws" +page_title: "AWS: aws_qldb_ledger" +description: |- + Get information on a Amazon Quantum Ledger Database (QLDB) +--- + + + +# Data Source: aws_qldb_ledger + +Use this data source to fetch information about a Quantum Ledger Database. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsQldbLedger } from "./.gen/providers/aws/data-aws-qldb-ledger"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsQldbLedger(this, "example", { + name: "an_example_ledger", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Friendly name of the ledger to match. + +## Attribute Reference + +See the [QLDB Ledger Resource](/docs/providers/aws/r/qldb_ledger.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/quicksight_data_set.html.markdown b/website/docs/cdktf/typescript/d/quicksight_data_set.html.markdown new file mode 100644 index 00000000000..fdee2683d00 --- /dev/null +++ b/website/docs/cdktf/typescript/d/quicksight_data_set.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_data_set" +description: |- + Use this data source to fetch information about a QuickSight Data Set. +--- + + + +# Data Source: aws_quicksight_data_set + +Data source for managing a QuickSight Data Set. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsQuicksightDataSet } from "./.gen/providers/aws/data-aws-quicksight-data-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsQuicksightDataSet(this, "example", { + dataSetId: "example-id", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `dataSetId` - (Required) Identifier for the data set. + +The following arguments are optional: + +* `awsAccountId` - (Optional) AWS account ID. + +## Attribute Reference + +See the [Data Set Resource](/docs/providers/aws/r/quicksight_data_set.html) for details on the +returned attributes - they are identical. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/quicksight_group.html.markdown b/website/docs/cdktf/typescript/d/quicksight_group.html.markdown new file mode 100644 index 00000000000..db945d138c4 --- /dev/null +++ b/website/docs/cdktf/typescript/d/quicksight_group.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_group" +description: |- + Use this data source to fetch information about a QuickSight Group. +--- + + + +# Data Source: aws_quicksight_group + +This data source can be used to fetch information about a specific +QuickSight group. By using this data source, you can reference QuickSight group +properties without having to hard code ARNs or unique IDs as input. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsQuicksightGroup } from "./.gen/providers/aws/data-aws-quicksight-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsQuicksightGroup(this, "example", { + groupName: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `groupName` - (Required) The name of the group that you want to match. + +The following arguments are optional: + +* `awsAccountId` - (Optional) AWS account ID. +* `namespace` - (Optional) QuickSight namespace. Defaults to `default`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) for the group. +* `description` - The group description. +* `principalId` - The principal ID of the group. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/quicksight_theme.html.markdown b/website/docs/cdktf/typescript/d/quicksight_theme.html.markdown new file mode 100644 index 00000000000..3a0d78696e1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/quicksight_theme.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_theme" +description: |- + Use this data source to fetch information about a QuickSight Theme. +--- + + + +# Data Source: aws_quicksight_theme + +Terraform data source for managing an AWS QuickSight Theme. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsQuicksightTheme } from "./.gen/providers/aws/data-aws-quicksight-theme"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsQuicksightTheme(this, "example", { + themeId: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `themeId` - Identifier of the theme. + +The following arguments are optional: + +* `awsAccountId` - AWS account ID. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the theme. +* `baseThemeId` - The ID of the theme that a custom theme will inherit from. All themes inherit from one of the starting themes defined by Amazon QuickSight. +* `configuration` - The theme configuration, which contains the theme display properties. See [configuration](#configuration). +* `createdTime` - The time that the theme was created. +* `id` - A comma-delimited string joining AWS account ID and theme ID. +* `lastUpdatedTime` - The time that the theme was last updated. +* `name` - Display name of the theme. +* `permissions` - A set of resource permissions on the theme. See [permissions](#permissions). +* `status` - The theme creation status. +* `tags` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `versionDescription` - A description of the current theme version being created/updated. +* `versionNumber` - The version number of the theme version. + +### permissions + +* `actions` - List of IAM actions to grant or revoke permissions on. +* `principal` - ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### configuration + +* `dataColorPalette` - Color properties that apply to chart data colors. See [data_color_palette](#data_color_palette). +* `sheet` - Display options related to sheets. See [sheet](#sheet). +* `typography` - Determines the typography options. See [typography](#typography). +* `uiColorPalette` - Color properties that apply to the UI and to charts, excluding the colors that apply to data. See [ui_color_palette](#ui_color_palette). + +### data_color_palette + +* `colors` - List of hexadecimal codes for the colors. Minimum of 8 items and maximum of 20 items. +* `emptyFillColor` - The hexadecimal code of a color that applies to charts where a lack of data is highlighted. +* `minMaxGradient` - The minimum and maximum hexadecimal codes that describe a color gradient. List of exactly 2 items. + +### sheet + +* `tile` - The display options for tiles. See [tile](#tile). +* `tileLayout` - The layout options for tiles. See [tile_layout](#tile_layout). + +### tile + +* `border` - The border around a tile. See [border](#border). + +### border + +* `show` - The option to enable display of borders for visuals. + +### tile_layout + +* `gutter` - The gutter settings that apply between tiles. See [gutter](#gutter). +* `margin` - The margin settings that apply around the outside edge of sheets. See [margin](#margin). + +### gutter + +* `show` - This Boolean value controls whether to display a gutter space between sheet tiles. + +### margin + +* `show` - This Boolean value controls whether to display sheet margins. + +### typography + +* `fontFamilies` - Determines the list of font families. Maximum number of 5 items. See [font_families](#font_families). + +### font_families + +* `fontFamily` - Font family name. + +### ui_color_palette + +* `accent` - Color (hexadecimal) that applies to selected states and buttons. +* `accentForeground` - Color (hexadecimal) that applies to any text or other elements that appear over the accent color. +* `danger` - Color (hexadecimal) that applies to error messages. +* `dangerForeground` - Color (hexadecimal) that applies to any text or other elements that appear over the error color. +* `dimension` - Color (hexadecimal) that applies to the names of fields that are identified as dimensions. +* `dimensionForeground` - Color (hexadecimal) that applies to any text or other elements that appear over the dimension color. +* `measure` - Color (hexadecimal) that applies to the names of fields that are identified as measures. +* `measureForeground` - Color (hexadecimal) that applies to any text or other elements that appear over the measure color. +* `primaryBackground` - Color (hexadecimal) that applies to visuals and other high emphasis UI. +* `primaryForeground` - Color (hexadecimal) of text and other foreground elements that appear over the primary background regions, such as grid lines, borders, table banding, icons, and so on. +* `secondaryBackground` - Color (hexadecimal) that applies to the sheet background and sheet controls. +* `secondaryForeground` - Color (hexadecimal) that applies to any sheet title, sheet control text, or UI that appears over the secondary background. +* `success` - Color (hexadecimal) that applies to success messages, for example the check mark for a successful download. +* `successForeground` - Color (hexadecimal) that applies to any text or other elements that appear over the success color. +* `warning` - Color (hexadecimal) that applies to warning and informational messages. +* `warningForeground` - Color (hexadecimal) that applies to any text or other elements that appear over the warning color. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/quicksight_user.html.markdown b/website/docs/cdktf/typescript/d/quicksight_user.html.markdown new file mode 100644 index 00000000000..23ee11e8fc1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/quicksight_user.html.markdown @@ -0,0 +1,66 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_user" +description: |- + Use this data source to fetch information about a QuickSight User. +--- + + + +# Data Source: aws_quicksight_user + +This data source can be used to fetch information about a specific +QuickSight user. By using this data source, you can reference QuickSight user +properties without having to hard code ARNs or unique IDs as input. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsQuicksightUser } from "./.gen/providers/aws/data-aws-quicksight-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsQuicksightUser(this, "example", { + userName: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `userName` - (Required) The name of the user that you want to match. + +The following arguments are optional: + +* `awsAccountId` - (Optional) AWS account ID. +* `namespace` - (Optional) QuickSight namespace. Defaults to `default`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `active` - The active status of user. When you create an Amazon QuickSight user that’s not an IAM user or an Active Directory user, that user is inactive until they sign in and provide a password. +* `arn` - The Amazon Resource Name (ARN) for the user. +* `email` - The user's email address. +* `identityType` - The type of identity authentication used by the user. +* `principalId` - The principal ID of the user. +* `userRole` - The Amazon QuickSight role for the user. The user role can be one of the following:. + - `reader`: A user who has read-only access to dashboards. + - `author`: A user who can create data sources, datasets, analyses, and dashboards. + - `admin`: A user who is an author, who can also manage Amazon QuickSight settings. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ram_resource_share.html.markdown b/website/docs/cdktf/typescript/d/ram_resource_share.html.markdown new file mode 100644 index 00000000000..804b85acf7f --- /dev/null +++ b/website/docs/cdktf/typescript/d/ram_resource_share.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "RAM (Resource Access Manager)" +layout: "aws" +page_title: "AWS: aws_ram_resource_share" +description: |- + Retrieve information about a RAM Resource Share +--- + + + +# Data Source: aws_ram_resource_share + +`awsRamResourceShare` Retrieve information about a RAM Resource Share. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRamResourceShare } from "./.gen/providers/aws/data-aws-ram-resource-share"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRamResourceShare(this, "example", { + name: "example", + resourceOwner: "SELF", + }); + } +} + +``` + +## Search by filters + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRamResourceShare } from "./.gen/providers/aws/data-aws-ram-resource-share"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRamResourceShare(this, "tag_filter", { + filter: [ + { + name: "NameOfTag", + values: ["exampleNameTagValue"], + }, + ], + name: "MyResourceName", + resourceOwner: "SELF", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the resource share to retrieve. +* `resourceOwner` (Required) Owner of the resource share. Valid values are `self` or `otherAccounts`. + +* `resourceShareStatus` (Optional) Specifies that you want to retrieve details of only those resource shares that have this status. Valid values are `pending`, `active`, `failed`, `deleting`, and `deleted`. +* `filter` - (Optional) Filter used to scope the list e.g., by tags. See [related docs] (https://docs.aws.amazon.com/ram/latest/APIReference/API_TagFilter.html). + * `name` - (Required) Name of the tag key to filter on. + * `values` - (Required) Value of the tag key. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the resource share. +* `id` - ARN of the resource share. +* `status` - Status of the RAM share. +* `owningAccountId` - ID of the AWS account that owns the resource share. +* `tags` - Tags attached to the RAM share + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/rds_certificate.html.markdown b/website/docs/cdktf/typescript/d/rds_certificate.html.markdown new file mode 100644 index 00000000000..33690090fa9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/rds_certificate.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_certificate" +description: |- + Information about an RDS Certificate. +--- + + + +# Data Source: aws_rds_certificate + +Information about an RDS Certificate. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRdsCertificate } from "./.gen/providers/aws/data-aws-rds-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRdsCertificate(this, "example", { + latestValidTill: true, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `id` - (Optional) Certificate identifier. For example, `rdsCa2019`. +* `latestValidTill` - (Optional) When enabled, returns the certificate with the latest `validTill`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the certificate. +* `certificateType` - Type of certificate. For example, `ca`. +* `customerOverride` - Boolean whether there is an override for the default certificate identifier. +* `customerOverrideValidTill` - If there is an override for the default certificate identifier, when the override expires. +* `thumbprint` - Thumbprint of the certificate. +* `validFrom` - [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) of certificate starting validity date. +* `validTill` - [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) of certificate ending validity date. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/rds_cluster.html.markdown b/website/docs/cdktf/typescript/d/rds_cluster.html.markdown new file mode 100644 index 00000000000..33550e9c767 --- /dev/null +++ b/website/docs/cdktf/typescript/d/rds_cluster.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster" +description: |- + Provides an RDS cluster data source. +--- + + + +# Data Source: aws_rds_cluster + +Provides information about an RDS cluster. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRdsCluster } from "./.gen/providers/aws/data-aws-rds-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRdsCluster(this, "clusterName", { + clusterIdentifier: "clusterName", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `clusterIdentifier` - (Required) Cluster identifier of the RDS cluster. + +## Attribute Reference + +See the [RDS Cluster Resource](/docs/providers/aws/r/rds_cluster.html) for details on the +returned attributes - they are identical for all attributes, except the `tagsAll`. If you need to get the tags for this resource, use the attribute `tags` as described below. + +* `tags` - A map of tags assigned to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/rds_clusters.html.markdown b/website/docs/cdktf/typescript/d/rds_clusters.html.markdown new file mode 100644 index 00000000000..bb516ee3ffa --- /dev/null +++ b/website/docs/cdktf/typescript/d/rds_clusters.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_clusters" +description: |- + Terraform data source for managing an AWS RDS (Relational Database) Clusters. +--- + + + +# Data Source: aws_rds_clusters + +Terraform data source for managing an AWS RDS (Relational Database) Clusters. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRdsClusters } from "./.gen/providers/aws/data-aws-rds-clusters"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRdsClusters(this, "example", { + filter: [ + { + name: "engine", + values: ["aurora-postgresql"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [RDS DescribeDBClusters API Reference](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DescribeDBClusters.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `clusterArns` - Set of cluster ARNs of the matched RDS clusters. +* `clusterIdentifiers` - Set of ARNs of cluster identifiers of the matched RDS clusters. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/rds_engine_version.markdown b/website/docs/cdktf/typescript/d/rds_engine_version.markdown new file mode 100644 index 00000000000..b3f4350027a --- /dev/null +++ b/website/docs/cdktf/typescript/d/rds_engine_version.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_engine_version" +description: |- + Information about an RDS engine version. +--- + + + +# Data Source: aws_rds_engine_version + +Information about an RDS engine version. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRdsEngineVersion } from "./.gen/providers/aws/data-aws-rds-engine-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRdsEngineVersion(this, "test", { + engine: "mysql", + preferredVersions: ["8.0.27", "8.0.26"], + }); + } +} + +``` + +### With `filter` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRdsEngineVersion } from "./.gen/providers/aws/data-aws-rds-engine-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRdsEngineVersion(this, "test", { + engine: "aurora-postgresql", + filter: [ + { + name: "engine-mode", + values: ["serverless"], + }, + ], + includeAll: true, + version: "10.14", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `engine` - (Required) DB engine. Engine values include `aurora`, `auroraMysql`, `auroraPostgresql`, `docdb`, `mariadb`, `mysql`, `neptune`, `oracleEe`, `oracleSe`, `oracleSe1`, `oracleSe2`, `postgres`, `sqlserverEe`, `sqlserverEx`, `sqlserverSe`, and `sqlserverWeb`. +* `defaultOnly` - (Optional) When set to `true`, the default version for the specified `engine` or combination of `engine` and major `version` will be returned. Can be used to limit responses to a single version when they would otherwise fail for returning multiple versions. +* `filter` - (Optional) One or more name/value pairs to filter off of. There are several valid keys; for a full reference, check out [describe-db-engine-versions in the AWS CLI reference](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/describe-db-engine-versions.html). +* `includeAll` - (Optional) When set to `true`, the specified `version` or member of `preferredVersions` will be returned even if it is `deprecated`. Otherwise, only `available` versions will be returned. +* `parameterGroupFamily` - (Optional) Name of a specific DB parameter group family. Examples of parameter group families are `mysql80`, `mariadb104`, and `postgres12`. +* `preferredVersions` - (Optional) Ordered list of preferred engine versions. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. If both the `version` and `preferredVersions` arguments are not configured, the data source will return the default version for the engine. +* `version` - (Optional) Version of the DB engine. For example, `5722`, `10134`, and `123`. If both the `version` and `preferredVersions` arguments are not configured, the data source will return the default version for the engine. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `defaultCharacterSet` - The default character set for new instances of this engine version. +* `engineDescription` - Description of the database engine. +* `exportableLogTypes` - Set of log types that the database engine has available for export to CloudWatch Logs. +* `status` - Status of the DB engine version, either available or deprecated. +* `supportedCharacterSets` - Set of the character sets supported by this engine. +* `supportedFeatureNames` - Set of features supported by the DB engine. +* `supportedModes` - Set of the supported DB engine modes. +* `supportedTimezones` - Set of the time zones supported by this engine. +* `supportsGlobalDatabases` - Indicates whether you can use Aurora global databases with a specific DB engine version. +* `supportsLogExportsToCloudwatch` - Indicates whether the engine version supports exporting the log types specified by `exportableLogTypes` to CloudWatch Logs. +* `supportsParallelQuery` - Indicates whether you can use Aurora parallel query with a specific DB engine version. +* `supportsReadReplica` - Indicates whether the database engine version supports read replicas. +* `validUpgradeTargets` - Set of engine versions that this database engine version can be upgraded to. +* `versionDescription` - Description of the database engine version. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/rds_orderable_db_instance.markdown b/website/docs/cdktf/typescript/d/rds_orderable_db_instance.markdown new file mode 100644 index 00000000000..4a3b4b444da --- /dev/null +++ b/website/docs/cdktf/typescript/d/rds_orderable_db_instance.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_orderable_db_instance" +description: |- + Information about RDS orderable DB instances. +--- + + + +# Data Source: aws_rds_orderable_db_instance + +Information about RDS orderable DB instances and valid parameter combinations. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRdsOrderableDbInstance } from "./.gen/providers/aws/data-aws-rds-orderable-db-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRdsOrderableDbInstance(this, "test", { + engine: "mysql", + engineVersion: "5.7.22", + licenseModel: "general-public-license", + preferredInstanceClasses: ["db.r6.xlarge", "db.m4.large", "db.t3.small"], + storageType: "standard", + }); + } +} + +``` + +Valid parameter combinations can also be found with `preferredEngineVersions` and/or `preferredInstanceClasses`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRdsOrderableDbInstance } from "./.gen/providers/aws/data-aws-rds-orderable-db-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRdsOrderableDbInstance(this, "test", { + engine: "mysql", + licenseModel: "general-public-license", + preferredEngineVersions: ["5.6.35", "5.6.41", "5.6.44"], + preferredInstanceClasses: ["db.t2.small", "db.t3.medium", "db.t3.large"], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `availabilityZoneGroup` - (Optional) Availability zone group. +* `engine` - (Required) DB engine. Engine values include `aurora`, `auroraMysql`, `auroraPostgresql`, `docdb`, `mariadb`, `mysql`, `neptune`, `oracleEe`, `oracleSe`, `oracleSe1`, `oracleSe2`, `postgres`, `sqlserverEe`, `sqlserverEx`, `sqlserverSe`, and `sqlserverWeb`. +* `engineVersion` - (Optional) Version of the DB engine. If none is provided, the AWS-defined default version will be used. +* `instanceClass` - (Optional) DB instance class. Examples of classes are `dbM32Xlarge`, `dbT2Small`, and `dbM3Medium`. +* `licenseModel` - (Optional) License model. Examples of license models are `generalPublicLicense`, `bringYourOwnLicense`, and `amazonLicense`. +* `preferredInstanceClasses` - (Optional) Ordered list of preferred RDS DB instance classes. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. +* `preferredEngineVersions` - (Optional) Ordered list of preferred RDS DB instance engine versions. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. +* `storageType` - (Optional) Storage types. Examples of storage types are `standard`, `io1`, `gp2`, and `aurora`. +* `supportsEnhancedMonitoring` - (Optional) Enable this to ensure a DB instance supports Enhanced Monitoring at intervals from 1 to 60 seconds. +* `supportsGlobalDatabases` - (Optional) Enable this to ensure a DB instance supports Aurora global databases with a specific combination of other DB engine attributes. +* `supportsIamDatabaseAuthentication` - (Optional) Enable this to ensure a DB instance supports IAM database authentication. +* `supportsIops` - (Optional) Enable this to ensure a DB instance supports provisioned IOPS. +* `supportsKerberosAuthentication` - (Optional) Enable this to ensure a DB instance supports Kerberos Authentication. +* `supportsPerformanceInsights` - (Optional) Enable this to ensure a DB instance supports Performance Insights. +* `supportsStorageAutoscaling` - (Optional) Enable this to ensure Amazon RDS can automatically scale storage for DB instances that use the specified DB instance class. +* `supportsStorageEncryption` - (Optional) Enable this to ensure a DB instance supports encrypted storage. +* `vpc` - (Optional) Boolean that indicates whether to show only VPC or non-VPC offerings. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `availabilityZones` - Availability zones where the instance is available. +* `maxIopsPerDbInstance` - Maximum total provisioned IOPS for a DB instance. +* `maxIopsPerGib` - Maximum provisioned IOPS per GiB for a DB instance. +* `maxStorageSize` - Maximum storage size for a DB instance. +* `minIopsPerDbInstance` - Minimum total provisioned IOPS for a DB instance. +* `minIopsPerGib` - Minimum provisioned IOPS per GiB for a DB instance. +* `minStorageSize` - Minimum storage size for a DB instance. +* `multiAzCapable` - Whether a DB instance is Multi-AZ capable. +* `outpostCapable` - Whether a DB instance supports RDS on Outposts. +* `readReplicaCapable` - Whether a DB instance can have a read replica. +* `supportedEngineModes` - A list of the supported DB engine modes. +* `supportedNetworkTypes` - The network types supported by the DB instance (`ipv4` or `dual`). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/rds_reserved_instance_offering.html.markdown b/website/docs/cdktf/typescript/d/rds_reserved_instance_offering.html.markdown new file mode 100644 index 00000000000..a154c828f5a --- /dev/null +++ b/website/docs/cdktf/typescript/d/rds_reserved_instance_offering.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_reserved_instance_offering" +description: |- + Information about a single RDS Reserved Instance Offering. +--- + + + +# Data Source: aws_rds_reserved_instance_offering + +Information about a single RDS Reserved Instance Offering. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRdsReservedInstanceOffering } from "./.gen/providers/aws/data-aws-rds-reserved-instance-offering"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRdsReservedInstanceOffering(this, "test", { + dbInstanceClass: "db.t2.micro", + duration: 31536000, + multiAz: false, + offeringType: "All Upfront", + productDescription: "mysql", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `dbInstanceClass` - (Required) DB instance class for the reserved DB instance. +* `duration` - (Required) Duration of the reservation in years or seconds. Valid values are `1`, `3`, `31536000`, `94608000` +* `multiAz` - (Required) Whether the reservation applies to Multi-AZ deployments. +* `offeringType` - (Required) Offering type of this reserved DB instance. Valid values are `No Upfront`, `Partial Upfront`, `All Upfront`. +* `productDescription` - (Required) Description of the reserved DB instance. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier for the reservation. Same as `offeringId`. +* `currencyCode` - Currency code for the reserved DB instance. +* `fixedPrice` - Fixed price charged for this reserved DB instance. +* `offeringId` - Unique identifier for the reservation. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/redshift_cluster.html.markdown b/website/docs/cdktf/typescript/d/redshift_cluster.html.markdown new file mode 100644 index 00000000000..b62d90f6b15 --- /dev/null +++ b/website/docs/cdktf/typescript/d/redshift_cluster.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_cluster" +description: |- + Provides details about a specific redshift cluster +--- + + + +# Data Source: aws_redshift_cluster + +Provides details about a specific redshift cluster. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRedshiftCluster } from "./.gen/providers/aws/data-aws-redshift-cluster"; +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsRedshiftCluster(this, "example", { + clusterIdentifier: "example-cluster", + }); + new KinesisFirehoseDeliveryStream(this, "example_stream", { + destination: "redshift", + name: "terraform-kinesis-firehose-example-stream", + redshiftConfiguration: { + clusterJdbcurl: + "jdbc:redshift://${" + + example.endpoint + + "}/${" + + example.databaseName + + "}", + copyOptions: "delimiter '|'", + dataTableColumns: "example-col", + dataTableName: "example-table", + password: "Exampl3Pass", + roleArn: firehoseRole.arn, + s3Configuration: { + bucketArn: bucket.arn, + buffer_interval: 400, + buffer_size: 10, + compressionFormat: "GZIP", + roleArn: firehoseRole.arn, + }, + username: "exampleuser", + }, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `clusterIdentifier` - (Required) Cluster identifier + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of cluster. +* `allowVersionUpgrade` - Whether major version upgrades can be applied during maintenance period +* `automatedSnapshotRetentionPeriod` - The backup retention period +* `aquaConfigurationStatus` - The value represents how the cluster is configured to use AQUA. +* `availabilityZone` - Availability zone of the cluster +* `availabilityZoneRelocationEnabled` - Indicates whether the cluster is able to be relocated to another availability zone. +* `bucketName` - Name of the S3 bucket where the log files are to be stored +* `clusterIdentifier` - Cluster identifier +* `clusterNodes` - Nodes in the cluster. Cluster node blocks are documented below +* `clusterParameterGroupName` - The name of the parameter group to be associated with this cluster +* `clusterPublicKey` - Public key for the cluster +* `clusterRevisionNumber` - The cluster revision number +* `clusterSubnetGroupName` - The name of a cluster subnet group to be associated with this cluster +* `clusterType` - Cluster type +* `clusterNamespaceArn` - The namespace Amazon Resource Name (ARN) of the cluster +* `databaseName` - Name of the default database in the cluster +* `defaultIamRoleArn` - The ARN for the IAM role that was set as default for the cluster when the cluster was created. +* `elasticIp` - Elastic IP of the cluster +* `enableLogging` - Whether cluster logging is enabled +* `encrypted` - Whether the cluster data is encrypted +* `endpoint` - Cluster endpoint +* `enhancedVpcRouting` - Whether enhanced VPC routing is enabled +* `iamRoles` - IAM roles associated to the cluster +* `kmsKeyId` - KMS encryption key associated to the cluster +* `masterUsername` - Username for the master DB user +* `nodeType` - Cluster node type +* `numberOfNodes` - Number of nodes in the cluster +* `maintenanceTrackName` - The name of the maintenance track for the restored cluster. +* `manualSnapshotRetentionPeriod` - (Optional) The default number of days to retain a manual snapshot. +* `port` - Port the cluster responds on +* `preferredMaintenanceWindow` - The maintenance window +* `publiclyAccessible` - Whether the cluster is publicly accessible +* `s3KeyPrefix` - Folder inside the S3 bucket where the log files are stored +* `logDestinationType` - The log destination type. +* `logExports` - Collection of exported log types. Log types include the connection log, user log and user activity log. +* `tags` - Tags associated to the cluster +* `vpcId` - VPC Id associated with the cluster +* `vpcSecurityGroupIds` - The VPC security group Ids associated with the cluster + +Cluster nodes (for `clusterNodes`) support the following attributes: + +* `nodeRole` - Whether the node is a leader node or a compute node +* `privateIpAddress` - Private IP address of a node within a cluster +* `publicIpAddress` - Public IP address of a node within a cluster + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/redshift_cluster_credentials.html.markdown b/website/docs/cdktf/typescript/d/redshift_cluster_credentials.html.markdown new file mode 100644 index 00000000000..a3984549f2c --- /dev/null +++ b/website/docs/cdktf/typescript/d/redshift_cluster_credentials.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_cluster_credentials" +description: |- + Provides redshift cluster credentials +--- + + + +# Data Source: aws_redshift_cluster_credentials + +Provides redshift cluster temporary credentials. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRedshiftClusterCredentials } from "./.gen/providers/aws/data-aws-redshift-cluster-credentials"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRedshiftClusterCredentials(this, "example", { + clusterIdentifier: Token.asString( + awsRedshiftClusterExample.clusterIdentifier + ), + dbUser: Token.asString(awsRedshiftClusterExample.masterUsername), + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `autoCreate` - (Optional) Create a database user with the name specified for the user named in `dbUser` if one does not exist. +* `clusterIdentifier` - (Required) Unique identifier of the cluster that contains the database for which your are requesting credentials. +* `dbName` - (Optional) Name of a database that DbUser is authorized to log on to. If `dbName` is not specified, `dbUser` can log on to any existing database. +* `dbUser` - (Required) Name of a database user. If a user name matching `dbUser` exists in the database, the temporary user credentials have the same permissions as the existing user. If `dbUser` doesn't exist in the database and `autoCreate` is `true`, a new user is created using the value for `dbUser` with `public` permissions. If a database user matching the value for `dbUser` doesn't exist and `not` is `false`, then the command succeeds but the connection attempt will fail because the user doesn't exist in the database. +* `dbGroups` - (Optional) List of the names of existing database groups that the user named in `dbUser` will join for the current session, in addition to any group memberships for an existing user. If not specified, a new user is added only to `public`. +* `durationSeconds` - (Optional) The number of seconds until the returned temporary password expires. Valid values are between `900` and `3600`. Default value is `900`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `dbPassword` - Temporary password that authorizes the user name returned by `dbUser` to log on to the database `dbName`. +* `expiration` - Date and time the password in `dbPassword` expires. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/redshift_orderable_cluster.html.markdown b/website/docs/cdktf/typescript/d/redshift_orderable_cluster.html.markdown new file mode 100644 index 00000000000..5e8e9b5dcc7 --- /dev/null +++ b/website/docs/cdktf/typescript/d/redshift_orderable_cluster.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_orderable_cluster" +description: |- + Information about RDS orderable DB instances. +--- + + + +# Data Source: aws_redshift_orderable_cluster + +Information about Redshift Orderable Clusters and valid parameter combinations. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRedshiftOrderableCluster } from "./.gen/providers/aws/data-aws-redshift-orderable-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRedshiftOrderableCluster(this, "test", { + clusterType: "multi-node", + preferredNodeTypes: ["dc2.large", "ds2.xlarge"], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `clusterType` - (Optional) Reshift Cluster typeE.g., `multiNode` or `singleNode` +* `clusterVersion` - (Optional) Redshift Cluster versionE.g., `10` +* `nodeType` - (Optional) Redshift Cluster node typeE.g., `dc28Xlarge` +* `preferredNodeTypes` - (Optional) Ordered list of preferred Redshift Cluster node types. The first match in this list will be returned. If no preferred matches are found and the original search returned more than one result, an error is returned. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `availabilityZones` - List of Availability Zone names where the Redshift Cluster is available. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/redshift_service_account.html.markdown b/website/docs/cdktf/typescript/d/redshift_service_account.html.markdown new file mode 100644 index 00000000000..c4ed7bef6fb --- /dev/null +++ b/website/docs/cdktf/typescript/d/redshift_service_account.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_service_account" +description: |- + Get AWS Redshift Service Account for storing audit data in S3. +--- + + + +# Data Source: aws_redshift_service_account + +Use this data source to get the Account ID of the [AWS Redshift Service Account](http://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-enable-logging) +in a given region for the purpose of allowing Redshift to store audit data in S3. + +~> **Note:** AWS documentation [states that](https://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-bucket-permissions) a [service principal name](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html#principal-services) should be used instead of an AWS account ID in any relevant IAM policy. +The `awsRedshiftServiceAccount` data source has been deprecated and will be removed in a future version. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsRedshiftServiceAccount } from "./.gen/providers/aws/data-aws-redshift-service-account"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bucket = new S3Bucket(this, "bucket", { + bucket: "tf-redshift-logging-test-bucket", + forceDestroy: true, + }); + const main = new DataAwsRedshiftServiceAccount(this, "main", {}); + const allowAuditLogging = new DataAwsIamPolicyDocument( + this, + "allow_audit_logging", + { + statement: [ + { + actions: ["s3:PutObject"], + effect: "Allow", + principals: [ + { + identifiers: [Token.asString(main.arn)], + type: "AWS", + }, + ], + resources: ["${" + bucket.arn + "}/*"], + sid: "Put bucket policy needed for audit logging", + }, + { + actions: ["s3:GetBucketAcl"], + effect: "Allow", + principals: [ + { + identifiers: [Token.asString(main.arn)], + type: "AWS", + }, + ], + resources: Token.asList(dataAwsS3BucketBucket.arn), + sid: "Get bucket policy needed for audit logging", + }, + ], + } + ); + const awsS3BucketPolicyAllowAuditLogging = new S3BucketPolicy( + this, + "allow_audit_logging_3", + { + bucket: bucket.id, + policy: Token.asString(allowAuditLogging.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPolicyAllowAuditLogging.overrideLogicalId("allow_audit_logging"); + } +} + +``` + +## Argument Reference + +* `region` - (Optional) Name of the region whose AWS Redshift account ID is desired. +Defaults to the region from the AWS provider configuration. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the AWS Redshift service account in the selected region. +* `arn` - ARN of the AWS Redshift service account in the selected region. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/redshift_subnet_group.html.markdown b/website/docs/cdktf/typescript/d/redshift_subnet_group.html.markdown new file mode 100644 index 00000000000..6572d12acc3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/redshift_subnet_group.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_subnet_group" +description: |- + Provides details about a specific redshift subnet_group +--- + + + +# Data Source: aws_redshift_subnet_group + +Provides details about a specific redshift subnet group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRedshiftSubnetGroup } from "./.gen/providers/aws/data-aws-redshift-subnet-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRedshiftSubnetGroup(this, "example", { + name: Token.asString(awsRedshiftSubnetGroupExample.name), + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the cluster subnet group for which information is requested. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Redshift Subnet Group name. +* `description` - Description of the Redshift Subnet group. +* `id` - Redshift Subnet group Name. +* `subnetIds` - An array of VPC subnet IDs. +* `tags` - Tags associated to the Subnet Group + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/redshiftserverless_credentials.html.markdown b/website/docs/cdktf/typescript/d/redshiftserverless_credentials.html.markdown new file mode 100644 index 00000000000..b9369d1c60a --- /dev/null +++ b/website/docs/cdktf/typescript/d/redshiftserverless_credentials.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_credentials" +description: |- + Provides redshift serverless credentials +--- + + + +# Data Source: aws_redshiftserverless_credentials + +Provides redshift serverless temporary credentials for a workgroup. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRedshiftserverlessCredentials } from "./.gen/providers/aws/data-aws-redshiftserverless-credentials"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRedshiftserverlessCredentials(this, "example", { + workgroupName: Token.asString( + awsRedshiftserverlessWorkgroupExample.workgroupName + ), + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `workgroupName` - (Required) The name of the workgroup associated with the database. +* `dbName` - (Optional) The name of the database to get temporary authorization to log on to. +* `durationSeconds` - (Optional) The number of seconds until the returned temporary password expires. The minimum is 900 seconds, and the maximum is 3600 seconds. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `dbPassword` - Temporary password that authorizes the user name returned by `dbUser` to log on to the database `dbName`. +* `dbUser` - A database user name that is authorized to log on to the database `dbName` using the password `dbPassword` . If the specified `dbUser` exists in the database, the new user name has the same database privileges as the user named in `dbUser` . By default, the user is added to PUBLIC. the user doesn't exist in the database. +* `expiration` - Date and time the password in `dbPassword` expires. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/redshiftserverless_namespace.html.markdown b/website/docs/cdktf/typescript/d/redshiftserverless_namespace.html.markdown new file mode 100644 index 00000000000..77dd3444bc3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/redshiftserverless_namespace.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_namespace" +description: |- + Terraform data source for managing an AWS Redshift Serverless Namespace. +--- + + + +# Data Source: aws_redshiftserverless_namespace + +Terraform data source for managing an AWS Redshift Serverless Namespace. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRedshiftserverlessNamespace } from "./.gen/providers/aws/data-aws-redshiftserverless-namespace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRedshiftserverlessNamespace(this, "example", { + namespaceName: "example-namespace", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `namespaceName` - (Required) The name of the namespace. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `adminUsername` - The username of the administrator for the first database created in the namespace. +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Namespace. +* `dbName` - The name of the first database created in the namespace. +* `defaultIamRoleArn` - The Amazon Resource Name (ARN) of the IAM role to set as a default in the namespace. When specifying `defaultIamRoleArn`, it also must be part of `iamRoles`. +* `iamRoles` - A list of IAM roles to associate with the namespace. +* `kmsKeyId` - The ARN of the Amazon Web Services Key Management Service key used to encrypt your data. +* `logExports` - The types of logs the namespace can export. Available export types are `userlog`, `connectionlog`, and `useractivitylog`. +* `namespaceId` - The Redshift Namespace ID. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/redshiftserverless_workgroup.html.markdown b/website/docs/cdktf/typescript/d/redshiftserverless_workgroup.html.markdown new file mode 100644 index 00000000000..4a996e91725 --- /dev/null +++ b/website/docs/cdktf/typescript/d/redshiftserverless_workgroup.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_workgroup" +description: |- + Terraform data source for managing an AWS Redshift Serverless Workgroup. +--- + + + +# Data Source: aws_redshiftserverless_workgroup + +Terraform data source for managing an AWS Redshift Serverless Workgroup. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRedshiftserverlessWorkgroup } from "./.gen/providers/aws/data-aws-redshiftserverless-workgroup"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRedshiftserverlessWorkgroup(this, "example", { + workgroupName: Token.asString( + awsRedshiftserverlessWorkgroupExample.workgroupName + ), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `workgroupName` - (Required) The name of the workgroup associated with the database. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Workgroup. +* `id` - The Redshift Workgroup Name. +* `endpoint` - The endpoint that is created from the workgroup. See `endpoint` below. +* `enhancedVpcRouting` - The value that specifies whether to turn on enhanced virtual private cloud (VPC) routing, which forces Amazon Redshift Serverless to route traffic through your VPC instead of over the internet. +* `publiclyAccessible` - A value that specifies whether the workgroup can be accessed from a public network. +* `securityGroupIds` - An array of security group IDs to associate with the workgroup. +* `subnetIds` - An array of VPC subnet IDs to associate with the workgroup. When set, must contain at least three subnets spanning three Availability Zones. A minimum number of IP addresses is required and scales with the Base Capacity. For more information, see the following [AWS document](https://docs.aws.amazon.com/redshift/latest/mgmt/serverless-known-issues.html). +* `workgroupId` - The Redshift Workgroup ID. + +### Endpoint + +* `address` - The DNS address of the VPC endpoint. +* `port` - The port that Amazon Redshift Serverless listens on. +* `vpcEndpoint` - The VPC endpoint or the Redshift Serverless workgroup. See `VPC Endpoint` below. + +#### VPC Endpoint + +* `vpcEndpointId` - The DNS address of the VPC endpoint. +* `vpcId` - The port that Amazon Redshift Serverless listens on. +* `networkInterface` - The network interfaces of the endpoint.. See `Network Interface` below. + +##### Network Interface + +* `availabilityZone` - The availability Zone. +* `networkInterfaceId` - The unique identifier of the network interface. +* `privateIpAddress` - The IPv4 address of the network interface within the subnet. +* `subnetId` - The unique identifier of the subnet. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/region.html.markdown b/website/docs/cdktf/typescript/d/region.html.markdown new file mode 100644 index 00000000000..690adaaa2b4 --- /dev/null +++ b/website/docs/cdktf/typescript/d/region.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_region" +description: |- + Provides details about a specific service region +--- + + + +# Data Source: aws_region + +`awsRegion` provides details about a specific AWS region. + +As well as validating a given region name this resource can be used to +discover the name of the region configured within the provider. The latter +can be useful in a child module which is inheriting an AWS provider +configuration from its parent module. + +## Example Usage + +The following example shows how the resource might be used to obtain +the name of the AWS region configured on the provider. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRegion(this, "current", {}); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +regions. The given filters must match exactly one region whose data will be +exported as attributes. + +* `name` - (Optional) Full name of the region to select. + +* `endpoint` - (Optional) EC2 endpoint of the region to select. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - Name of the selected region. + +* `endpoint` - EC2 endpoint for the selected region. + +* `description` - Region's description in this format: "Location (Region name)". + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/regions.html.markdown b/website/docs/cdktf/typescript/d/regions.html.markdown new file mode 100644 index 00000000000..b342ccbd86f --- /dev/null +++ b/website/docs/cdktf/typescript/d/regions.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_regions" +description: |- + Provides information about AWS Regions. +--- + + + +# Data Source: aws_regions + +Provides information about AWS Regions. Can be used to filter regions i.e., by Opt-In status or only regions enabled for current account. To get details like endpoint and description of each region the data source can be combined with the [`awsRegion` data source](/docs/providers/aws/d/region.html). + +## Example Usage + +Enabled AWS Regions: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRegions } from "./.gen/providers/aws/data-aws-regions"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRegions(this, "current", {}); + } +} + +``` + +All the regions regardless of the availability + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRegions } from "./.gen/providers/aws/data-aws-regions"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRegions(this, "current", { + allRegions: true, + }); + } +} + +``` + +To see regions that are filtered by `"notOptedIn"`, the `allRegions` argument needs to be set to `true` or no results will be returned. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRegions } from "./.gen/providers/aws/data-aws-regions"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRegions(this, "current", { + allRegions: true, + filter: [ + { + name: "opt-in-status", + values: ["not-opted-in"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `allRegions` - (Optional) If true the source will query all regions regardless of availability. + +* `filter` - (Optional) Configuration block(s) to use as filters. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [describe-regions AWS CLI Reference][1]. +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the current partition (e.g., `aws` in AWS Commercial, `awsCn` in AWS China). +* `names` - Names of regions that meets the criteria. + +[1]: https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-regions.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/resourcegroupstaggingapi_resources.html.markdown b/website/docs/cdktf/typescript/d/resourcegroupstaggingapi_resources.html.markdown new file mode 100644 index 00000000000..898db5b5723 --- /dev/null +++ b/website/docs/cdktf/typescript/d/resourcegroupstaggingapi_resources.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "Resource Groups Tagging" +layout: "aws" +page_title: "AWS: aws_resourcegroupstaggingapi_resources" +description: |- + Provides details about resource tagging. +--- + + + +# Data Source: aws_resourcegroupstaggingapi_resources + +Provides details about resource tagging. + +## Example Usage + +### Get All Resource Tag Mappings + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsResourcegroupstaggingapiResources } from "./.gen/providers/aws/data-aws-resourcegroupstaggingapi-resources"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsResourcegroupstaggingapiResources(this, "test", {}); + } +} + +``` + +### Filter By Tag Key and Value + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsResourcegroupstaggingapiResources } from "./.gen/providers/aws/data-aws-resourcegroupstaggingapi-resources"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsResourcegroupstaggingapiResources(this, "test", { + tagFilter: [ + { + key: "tag-key", + values: ["tag-value-1", "tag-value-2"], + }, + ], + }); + } +} + +``` + +### Filter By Resource Type + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsResourcegroupstaggingapiResources } from "./.gen/providers/aws/data-aws-resourcegroupstaggingapi-resources"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsResourcegroupstaggingapiResources(this, "test", { + resourceTypeFilters: ["ec2:instance"], + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `excludeCompliantResources` - (Optional) Specifies whether to exclude resources that are compliant with the tag policy. You can use this parameter only if the `includeComplianceDetails` argument is also set to `true`. +* `includeComplianceDetails` - (Optional) Specifies whether to include details regarding the compliance with the effective tag policy. +* `tagFilter` - (Optional) Specifies a list of Tag Filters (keys and values) to restrict the output to only those resources that have the specified tag and, if included, the specified value. See [Tag Filter](#tag-filter) below. Conflicts with `resourceArnList`. +* `resourceTypeFilters` - (Optional) Constraints on the resources that you want returned. The format of each resource type is `service:resourceType`. For example, specifying a resource type of `ec2` returns all Amazon EC2 resources (which includes EC2 instances). Specifying a resource type of `ec2:instance` returns only EC2 instances. +* `resourceArnList` - (Optional) Specifies a list of ARNs of resources for which you want to retrieve tag data. Conflicts with `filter`. + +### Tag Filter + +A `tagFilter` block supports the following arguments: + +If you do specify `tagFilter`, the response returns only those resources that are currently associated with the specified tag. +If you don't specify a `tagFilter`, the response includes all resources that were ever associated with tags. Resources that currently don't have associated tags are shown with an empty tag set. + +* `key` - (Required) One part of a key-value pair that makes up a tag. +* `values` - (Optional) Optional part of a key-value pair that make up a tag. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `resourceTagMappingList` - List of objects matching the search criteria. + * `complianceDetails` - List of objects with information that shows whether a resource is compliant with the effective tag policy, including details on any noncompliant tag keys. + * `complianceStatus` - Whether the resource is compliant. + * `keys_with_noncompliant_values ` - Set of tag keys with non-compliant tag values. + * `non_compliant_keys ` - Set of non-compliant tag keys. + * `resourceArn` - ARN of the resource. + * `tags` - Map of tags assigned to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route.html.markdown b/website/docs/cdktf/typescript/d/route.html.markdown new file mode 100644 index 00000000000..76b168ef479 --- /dev/null +++ b/website/docs/cdktf/typescript/d/route.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route" +description: |- + Provides details about a specific Route +--- + + + +# Data Source: aws_route + +`awsRoute` provides details about a specific Route. + +This resource can prove useful when finding the resource associated with a CIDR. For example, finding the peering connection associated with a CIDR value. + +## Example Usage + +The following example shows how one might use a CIDR value to find a network interface id and use this to create a data source of that network interface. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformVariable, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkInterface } from "./.gen/providers/aws/data-aws-network-interface"; +import { DataAwsRoute } from "./.gen/providers/aws/data-aws-route"; +import { DataAwsRouteTable } from "./.gen/providers/aws/data-aws-route-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const subnetId = new TerraformVariable(this, "subnet_id", {}); + const route = new DataAwsRoute(this, "route", { + destinationCidrBlock: "10.0.1.0/24", + routeTableId: selected.id, + }); + new DataAwsRouteTable(this, "selected", { + subnetId: subnetId.stringValue, + }); + new DataAwsNetworkInterface(this, "interface", { + id: Token.asString(route.networkInterfaceId), + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available Route in the current region. The given filters must match exactly oneRoute whose data will be exported as attributes. + +The following arguments are required: + +* `routeTableId` - (Required) ID of the specific Route Table containing the Route entry. + +The following arguments are optional: + +* `carrierGatewayId` - (Optional) EC2 Carrier Gateway ID of the Route belonging to the Route Table. +* `coreNetworkArn` - (Optional) Core network ARN of the Route belonging to the Route Table. +* `destinationCidrBlock` - (Optional) CIDR block of the Route belonging to the Route Table. +* `destinationIpv6CidrBlock` - (Optional) IPv6 CIDR block of the Route belonging to the Route Table. +* `destinationPrefixListId` - (Optional) ID of a [managed prefix list](ec2_managed_prefix_list.html) destination of the Route belonging to the Route Table. +* `egressOnlyGatewayId` - (Optional) Egress Only Gateway ID of the Route belonging to the Route Table. +* `gatewayId` - (Optional) Gateway ID of the Route belonging to the Route Table. +* `instanceId` - (Optional) Instance ID of the Route belonging to the Route Table. +* `localGatewayId` - (Optional) Local Gateway ID of the Route belonging to the Route Table. +* `natGatewayId` - (Optional) NAT Gateway ID of the Route belonging to the Route Table. +* `networkInterfaceId` - (Optional) Network Interface ID of the Route belonging to the Route Table. +* `transitGatewayId` - (Optional) EC2 Transit Gateway ID of the Route belonging to the Route Table. +* `vpcPeeringConnectionId` - (Optional) VPC Peering Connection ID of the Route belonging to the Route Table. + +## Attribute Reference + +All of the argument attributes are also exported as result attributes when there is data available. For example, the `vpcPeeringConnectionId` field will be empty when the route is attached to a Network Interface. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_delegation_set.html.markdown b/website/docs/cdktf/typescript/d/route53_delegation_set.html.markdown new file mode 100644 index 00000000000..3020e7b6953 --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_delegation_set.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_delegation_set" +description: |- + Provides details about a specific Route 53 Delegation Set +--- + + + +# Data Source: aws_route53_delegation_set + +`awsRoute53DelegationSet` provides details about a specific Route 53 Delegation Set. + +This data source allows to find a list of name servers associated with a specific delegation set. + +## Example Usage + +The following example shows how to get a delegation set from its id. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53DelegationSet } from "./.gen/providers/aws/data-aws-route53-delegation-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53DelegationSet(this, "dset", { + id: "MQWGHCBFAKEID", + }); + } +} + +``` + +## Argument Reference + +* `id` - (Required) Delegation set ID. + +The following attribute is additionally exported: + +* `arn` - ARN of the Delegation Set. +* `callerReference` - Caller Reference of the delegation set. +* `nameServers` - List of DNS name servers for the delegation set. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_resolver_endpoint.html.markdown b/website/docs/cdktf/typescript/d/route53_resolver_endpoint.html.markdown new file mode 100644 index 00000000000..e6095402e3f --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_resolver_endpoint.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_endpoint" +description: |- + Provides details about a specific Route 53 Resolver Endpoint +--- + + + +# Data Source: aws_route53_resolver_endpoint + +`awsRoute53ResolverEndpoint` provides details about a specific Route53 Resolver Endpoint. + +This data source allows to find a list of IPaddresses associated with a specific Route53 Resolver Endpoint. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverEndpoint } from "./.gen/providers/aws/data-aws-route53-resolver-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverEndpoint(this, "example", { + resolverEndpointId: "rslvr-in-1abc2345ef678g91h", + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverEndpoint } from "./.gen/providers/aws/data-aws-route53-resolver-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverEndpoint(this, "example", { + filter: [ + { + name: "NAME", + values: ["MyResolverExampleName"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `resolverEndpointId` - (Optional) ID of the Route53 Resolver Endpoint. +* `filter` - (Optional) One or more name/value pairs to use as filters. There are +several valid keys, for a full reference, check out +[Route53resolver Filter value in the AWS API reference][1]. + +In addition to all arguments above, the following attributes are exported: + +* `arn` - Computed ARN of the Route53 Resolver Endpoint. +* `direction` - Direction of the queries to or from the Resolver Endpoint . +* `ipAddresses` - List of IPaddresses that have been associated with the Resolver Endpoint. +* `status` - Current status of the Resolver Endpoint. +* `vpcId` - ID of the Host VPC that the Resolver Endpoint resides in. + +[1]: https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_Filter.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_resolver_firewall_config.html.markdown b/website/docs/cdktf/typescript/d/route53_resolver_firewall_config.html.markdown new file mode 100644 index 00000000000..4a6c3895bcb --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_resolver_firewall_config.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_config" +description: |- + Provides details about a specific a Route 53 Resolver DNS Firewall config. +--- + + + +# Data Source: aws_route53_resolver_firewall_config + +`awsRoute53ResolverFirewallConfig` provides details about a specific a Route 53 Resolver DNS Firewall config. + +This data source allows to find a details about a specific a Route 53 Resolver DNS Firewall config. + +## Example Usage + +The following example shows how to get a firewall config using the VPC ID. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverFirewallConfig } from "./.gen/providers/aws/data-aws-route53-resolver-firewall-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverFirewallConfig(this, "example", { + resourceId: "vpc-exampleid", + }); + } +} + +``` + +## Argument Reference + +* `resourceId` - (Required) The ID of the VPC from Amazon VPC that the configuration is for. + +The following attribute is additionally exported: + +* `firewallFailOpen` - Determines how DNS Firewall operates during failures, for example when all traffic that is sent to DNS Firewall fails to receive a reply. +* `id` - The ID of the firewall configuration. +* `ownerId` - The Amazon Web Services account ID of the owner of the VPC that this firewall configuration applies to. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_resolver_firewall_domain_list.html.markdown b/website/docs/cdktf/typescript/d/route53_resolver_firewall_domain_list.html.markdown new file mode 100644 index 00000000000..5db16615393 --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_resolver_firewall_domain_list.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_domain_list" +description: |- + Retrieves the specified firewall domain list. +--- + + + +# Data Source: aws_route53_resolver_firewall_domain_list + +`awsRoute53ResolverFirewallDomainList` Retrieves the specified firewall domain list. + +This data source allows to retrieve details about a specific a Route 53 Resolver DNS Firewall domain list. + +## Example Usage + +The following example shows how to get a firewall domain list from its ID. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverFirewallDomainList } from "./.gen/providers/aws/data-aws-route53-resolver-firewall-domain-list"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverFirewallDomainList(this, "example", { + firewallDomainListId: "rslvr-fdl-example", + }); + } +} + +``` + +## Argument Reference + +* `firewallDomainListId` - (Required) The ID of the domain list. + +The following attribute is additionally exported: + +* `arn` - The Amazon Resource Name (ARN) of the firewall domain list. +* `creationTime` - The date and time that the domain list was created, in Unix time format and Coordinated Universal Time (UTC). +* `creatorRequestId` - A unique string defined by you to identify the request. +* `domainCount` - The number of domain names that are specified in the domain list. +* `name` - The name of the domain list. +* `managedOwnerName` - The owner of the list, used only for lists that are not managed by you. +* `modificationTime` - The date and time that the domain list was last modified, in Unix time format and Coordinated Universal Time (UTC). +* `status` - The status of the domain list. +* `statusMessage` - Additional information about the status of the list, if available. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_resolver_firewall_rule_group.html.markdown b/website/docs/cdktf/typescript/d/route53_resolver_firewall_rule_group.html.markdown new file mode 100644 index 00000000000..fc3fff71df0 --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_resolver_firewall_rule_group.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rule_group" +description: |- + Retrieves the specified firewall rule group. +--- + + + +# Data Source: aws_route53_resolver_firewall_rule_group + +`awsRoute53ResolverFirewallRuleGroup` Retrieves the specified firewall rule group. + +This data source allows to retrieve details about a specific a Route 53 Resolver DNS Firewall rule group. + +## Example Usage + +The following example shows how to get a firewall rule group from its ID. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverFirewallRuleGroup } from "./.gen/providers/aws/data-aws-route53-resolver-firewall-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverFirewallRuleGroup(this, "example", { + firewallRuleGroupId: "rslvr-frg-example", + }); + } +} + +``` + +## Argument Reference + +* `firewallRuleGroupId` - (Required) The ID of the rule group. + +The following attribute is additionally exported: + +* `arn` - The ARN (Amazon Resource Name) of the rule group. +* `creationTime` - The date and time that the rule group was created, in Unix time format and Coordinated Universal Time (UTC). +* `creatorRequestId` - A unique string defined by you to identify the request. +* `name` - The name of the rule group. +* `modificationTime` - The date and time that the rule group was last modified, in Unix time format and Coordinated Universal Time (UTC). +* `ownerId` - The Amazon Web Services account ID for the account that created the rule group. When a rule group is shared with your account, this is the account that has shared the rule group with you. +* `ruleCount` - The number of rules in the rule group. +* `shareStatus` - Whether the rule group is shared with other Amazon Web Services accounts, or was shared with the current account by another Amazon Web Services account. +* `status` - The status of the rule group. +* `statusMessage` - Additional information about the status of the rule group, if available. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_resolver_firewall_rule_group_association.html.markdown b/website/docs/cdktf/typescript/d/route53_resolver_firewall_rule_group_association.html.markdown new file mode 100644 index 00000000000..6826ce4ff8a --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_resolver_firewall_rule_group_association.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rule_group_association" +description: |- + Retrieves the specified firewall rule group association. +--- + + + +# Data Source: aws_route53_resolver_firewall_rule_group_association + +`awsRoute53ResolverFirewallRuleGroupAssociation` Retrieves the specified firewall rule group association. + +This data source allows to retrieve details about a specific a Route 53 Resolver DNS Firewall rule group association. + +## Example Usage + +The following example shows how to get a firewall rule group association from its id. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverFirewallRuleGroupAssociation } from "./.gen/providers/aws/data-aws-route53-resolver-firewall-rule-group-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverFirewallRuleGroupAssociation(this, "example", { + firewallRuleGroupAssociationId: "rslvr-frgassoc-example", + }); + } +} + +``` + +## Argument Reference + +* `firewallRuleGroupAssociationId` - (Required) The identifier for the association. + +The following attribute is additionally exported: + +* `arn` - The Amazon Resource Name (ARN) of the firewall rule group association. +* `creationTime` - The date and time that the association was created, in Unix time format and Coordinated Universal Time (UTC). +* `creatorRequestId` - A unique string defined by you to identify the request. +* `firewallRuleGroupId` - The unique identifier of the firewall rule group. +* `managedOwnerName` - The owner of the association, used only for associations that are not managed by you. +* `modificationTime` - The date and time that the association was last modified, in Unix time format and Coordinated Universal Time (UTC). +* `mutationProtection` - If enabled, this setting disallows modification or removal of the association, to help prevent against accidentally altering DNS firewall protections. +* `name` - The name of the association. +* `priority` - The setting that determines the processing order of the rule group among the rule groups that are associated with a single VPC. +* `status` - The current status of the association. +* `statusMessage` - Additional information about the status of the response, if available. +* `vpcId` - The unique identifier of the VPC that is associated with the rule group. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_resolver_firewall_rules.html.markdown b/website/docs/cdktf/typescript/d/route53_resolver_firewall_rules.html.markdown new file mode 100644 index 00000000000..c05fe98278b --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_resolver_firewall_rules.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rules" +description: |- + Provides details about rules in a specific Route53 Resolver Firewall rule group. +--- + + + +# Data Source: aws_route53_resolver_firewall_rules + +`awsRoute53ResolverFirewallRules` Provides details about rules in a specific Route53 Resolver Firewall rule group. + +## Example Usage + +The following example shows how to get Route53 Resolver Firewall rules based on its associated firewall group id. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverFirewallRules } from "./.gen/providers/aws/data-aws-route53-resolver-firewall-rules"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverFirewallRules(this, "example", { + firewallRuleGroupId: Token.asString( + awsRoute53ResolverFirewallRuleGroupExample.id + ), + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available resolver rules in the current region. +The given filters must match exactly one resolver rule whose data will be exported as attributes. + +* `firewallRuleGroupId` - (Required) The unique identifier of the firewall rule group that you want to retrieve the rules for. +* `action` - (Optional) The action that DNS Firewall should take on a DNS query when it matches one of the domains in the rule's domain list. +* `priority` - (Optional) The setting that determines the processing order of the rules in a rule group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `firewallRules` - List with information about the firewall rules. See details below. + +### provisioning_artifact_details + +* `blockOverrideDnsType` - The DNS record's type. +* `blockOverrideDomain` - The custom DNS record to send back in response to the query. +* `blockOverrideTtl` - The recommended amount of time, in seconds, for the DNS resolver or web browser to cache the provided override record. +* `blockResponse` - The way that you want DNS Firewall to block the request. +* `creationTime` - The date and time that the rule was created, in Unix time format and Coordinated Universal Time (UTC). +* `creatorRequestId` - A unique string defined by you to identify the request. +* `firewallDomainListId` - The ID of the domain list that's used in the rule. +* `modificationTime` - The date and time that the rule was last modified, in Unix time format and Coordinated Universal Time (UTC). +* `name` - The name of the rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_resolver_query_log_config.html.markdown b/website/docs/cdktf/typescript/d/route53_resolver_query_log_config.html.markdown new file mode 100644 index 00000000000..cc0b07b44a8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_resolver_query_log_config.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_query_log_config" +description: |- + Provides details about a specific Route53 Resolver Query Logging Configuration. +--- + + + +# Data Source: aws_route53_resolver_query_log_config + +`awsRoute53ResolverQueryLogConfig` provides details about a specific Route53 Resolver Query Logging Configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverQueryLogConfig } from "./.gen/providers/aws/data-aws-route53-resolver-query-log-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverQueryLogConfig(this, "example", { + resolverQueryLogConfigId: "rqlc-1abc2345ef678g91h", + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverQueryLogConfig } from "./.gen/providers/aws/data-aws-route53-resolver-query-log-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverQueryLogConfig(this, "example", { + filter: [ + { + name: "Name", + values: ["shared-query-log-config"], + }, + { + name: "ShareStatus", + values: ["SHARED_WITH_ME"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `resolverQueryLogConfigId` - (Optional) ID of the Route53 Resolver Query Logging Configuration. +* `filter` - (Optional) One or more name/value pairs to use as filters. There are +several valid keys, for a full reference, check out +[Route53resolver Filter value in the AWS API reference][1]. + +In addition to all arguments above, the following attributes are exported: + +* `id` - The ID for the query logging configuration. +* `arn` - Computed ARN of the Route53 Resolver Query Logging Configuration. +* `destinationArn` - The ARN of the resource that you want Resolver to send query logs: an Amazon S3 bucket, a CloudWatch Logs log group or a Kinesis Data Firehose delivery stream. +* `name` - The name of the query logging configuration. +* `ownerId` - The AWS account ID for the account that created the query logging configuration. +* `shareStatus` - An indication of whether the query logging configuration is shared with other AWS accounts or was shared with the current account by another AWS account. +* `tags` - Map of tags to assign to the service. + +[1]: https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_Filter.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_resolver_rule.html.markdown b/website/docs/cdktf/typescript/d/route53_resolver_rule.html.markdown new file mode 100644 index 00000000000..a3a9d492a13 --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_resolver_rule.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_rule" +description: |- + Provides details about a specific Route53 Resolver rule +--- + + + +# Data Source: aws_route53_resolver_rule + +`awsRoute53ResolverRule` provides details about a specific Route53 Resolver rule. + +## Example Usage + +The following example shows how to get a Route53 Resolver rule based on its associated domain name and rule type. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverRule } from "./.gen/providers/aws/data-aws-route53-resolver-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverRule(this, "example", { + domainName: "subdomain.example.com", + ruleType: "SYSTEM", + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available resolver rules in the current region. +The given filters must match exactly one resolver rule whose data will be exported as attributes. + +* `domainName` - (Optional) Domain name the desired resolver rule forwards DNS queries for. Conflicts with `resolverRuleId`. +* `name` - (Optional) Friendly name of the desired resolver rule. Conflicts with `resolverRuleId`. +* `resolverEndpointId` (Optional) ID of the outbound resolver endpoint of the desired resolver rule. Conflicts with `resolverRuleId`. +* `resolverRuleId` (Optional) ID of the desired resolver rule. Conflicts with `domainName`, `name`, `resolverEndpointId` and `ruleType`. +* `ruleType` - (Optional) Rule type of the desired resolver rule. Valid values are `forward`, `system` and `recursive`. Conflicts with `resolverRuleId`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the resolver rule. +* `arn` - ARN (Amazon Resource Name) for the resolver rule. +* `ownerId` - When a rule is shared with another AWS account, the account ID of the account that the rule is shared with. +* `shareStatus` - Whether the rules is shared and, if so, whether the current account is sharing the rule with another account, or another account is sharing the rule with the current account. +Values are `notShared`, `sharedByMe` or `sharedWithMe` +* `tags` - Map of tags assigned to the resolver rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_resolver_rules.html.markdown b/website/docs/cdktf/typescript/d/route53_resolver_rules.html.markdown new file mode 100644 index 00000000000..05bb7cb32d6 --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_resolver_rules.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_rules" +description: |- + Provides details about a set of Route53 Resolver rules +--- + + + +# Data Source: aws_route53_resolver_rules + +`awsRoute53ResolverRules` provides details about a set of Route53 Resolver rules. + +## Example Usage + +### Retrieving the default resolver rule + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverRules } from "./.gen/providers/aws/data-aws-route53-resolver-rules"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverRules(this, "example", { + ownerId: "Route 53 Resolver", + ruleType: "RECURSIVE", + shareStatus: "NOT_SHARED", + }); + } +} + +``` + +### Retrieving forward rules shared with me + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverRules } from "./.gen/providers/aws/data-aws-route53-resolver-rules"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverRules(this, "example", { + ruleType: "FORWARD", + shareStatus: "SHARED_WITH_ME", + }); + } +} + +``` + +### Retrieving rules by name regex + +Resolver rules whose name contains `abc`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53ResolverRules } from "./.gen/providers/aws/data-aws-route53-resolver-rules"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsRoute53ResolverRules(this, "example", { + nameRegex: ".*abc.*", + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available resolver rules in the current region. + +* `nameRegex` - (Optional) Regex string to filter resolver rule names. + The filtering is done locally, so could have a performance impact if the result is large. + This argument should be used along with other arguments to limit the number of results returned. +* `ownerId` (Optional) When the desired resolver rules are shared with another AWS account, the account ID of the account that the rules are shared with. +* `resolverEndpointId` (Optional) ID of the outbound resolver endpoint for the desired resolver rules. +* `ruleType` (Optional) Rule type of the desired resolver rules. Valid values are `forward`, `system` and `recursive`. +* `shareStatus` (Optional) Whether the desired resolver rules are shared and, if so, whether the current account is sharing the rules with another account, or another account is sharing the rules with the current account. Valid values are `notShared`, `sharedByMe` or `sharedWithMe` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `resolverRuleIds` - IDs of the matched resolver rules. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_traffic_policy_document.html.markdown b/website/docs/cdktf/typescript/d/route53_traffic_policy_document.html.markdown new file mode 100644 index 00000000000..317f96a3cb3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_traffic_policy_document.html.markdown @@ -0,0 +1,251 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_traffic_policy_document" +description: |- + Generates an Route53 traffic policy document in JSON format +--- + + + +# Data Source: aws_route53_traffic_policy_document + +Generates an Route53 traffic policy document in JSON format for use with resources that expect policy documents such as [`awsRoute53TrafficPolicy`](/docs/providers/aws/r/route53_traffic_policy.html). + +## Example Usage + +### Basic Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { DataAwsRoute53TrafficPolicyDocument } from "./.gen/providers/aws/data-aws-route53-traffic-policy-document"; +import { Route53TrafficPolicy } from "./.gen/providers/aws/route53-traffic-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsRegion(this, "current", {}); + const example = new DataAwsRoute53TrafficPolicyDocument(this, "example", { + endpoint: [ + { + id: "my_elb", + type: "elastic-load-balancer", + value: "elb-111111.${" + current.name + "}.elb.amazonaws.com", + }, + { + id: "site_down_banner", + region: Token.asString(current.name), + type: "s3-website", + value: "www.example.com", + }, + ], + recordType: "A", + rule: [ + { + id: "site_switch", + primary: { + endpointReference: "my_elb", + }, + secondary: { + endpointReference: "site_down_banner", + }, + type: "failover", + }, + ], + startRule: "site_switch", + }); + const awsRoute53TrafficPolicyExample = new Route53TrafficPolicy( + this, + "example_2", + { + comment: "example comment", + document: Token.asString(example.json), + name: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53TrafficPolicyExample.overrideLogicalId("example"); + } +} + +``` + +### Complex Example + +The following example showcases the use of nested rules within the traffic policy document and introduces the `geoproximity` rule type. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53TrafficPolicyDocument } from "./.gen/providers/aws/data-aws-route53-traffic-policy-document"; +import { Route53TrafficPolicy } from "./.gen/providers/aws/route53-traffic-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsRoute53TrafficPolicyDocument(this, "example", { + endpoint: [ + { + id: "na_endpoint_a", + type: "elastic-load-balancer", + value: "elb-111111.us-west-1.elb.amazonaws.com", + }, + { + id: "na_endpoint_b", + type: "elastic-load-balancer", + value: "elb-222222.us-west-1.elb.amazonaws.com", + }, + { + id: "eu_endpoint", + type: "elastic-load-balancer", + value: "elb-333333.eu-west-1.elb.amazonaws.com", + }, + { + id: "ap_endpoint", + type: "elastic-load-balancer", + value: "elb-444444.ap-northeast-2.elb.amazonaws.com", + }, + ], + recordType: "A", + rule: [ + { + id: "na_rule", + primary: { + endpointReference: "na_endpoint_a", + }, + secondary: { + endpointReference: "na_endpoint_b", + }, + type: "failover", + }, + { + geoProximityLocation: [ + { + bias: Token.asString(10), + evaluateTargetHealth: true, + region: "aws:route53:us-west-1", + ruleReference: "na_rule", + }, + { + bias: Token.asString(10), + endpointReference: "eu_endpoint", + evaluateTargetHealth: true, + region: "aws:route53:eu-west-1", + }, + { + bias: Token.asString(0), + endpointReference: "ap_endpoint", + evaluateTargetHealth: true, + region: "aws:route53:ap-northeast-2", + }, + ], + id: "geoproximity_rule", + type: "geoproximity", + }, + ], + startRule: "geoproximity_rule", + }); + const awsRoute53TrafficPolicyExample = new Route53TrafficPolicy( + this, + "example_1", + { + comment: "example comment", + document: Token.asString(example.json), + name: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53TrafficPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `endpoint` (Optional) - Configuration block for the definitions of the endpoints that you want to use in this traffic policy. See below +* `recordType` (Optional) - DNS type of all of the resource record sets that Amazon Route 53 will create based on this traffic policy. +* `rule` (Optional) - Configuration block for definitions of the rules that you want to use in this traffic policy. See below +* `startEndpoint` (Optional) - An endpoint to be as the starting point for the traffic policy. +* `startRule` (Optional) - A rule to be as the starting point for the traffic policy. +* `version` (Optional) - Version of the traffic policy format. + +### `endpoint` + +* `id` - (Required) ID of an endpoint you want to assign. +* `type` - (Optional) Type of the endpoint. Valid values are `value` , `cloudfront` , `elasticLoadBalancer`, `s3Website` +* `region` - (Optional) To route traffic to an Amazon S3 bucket that is configured as a website endpoint, specify the region in which you created the bucket for `region`. +* `value` - (Optional) Value of the `type`. + +### `rule` + +* `id` - (Required) ID of a rule you want to assign. +* `type` - (Optional) Type of the rule. +* `primary` - (Optional) Configuration block for the settings for the rule or endpoint that you want to route traffic to whenever the corresponding resources are available. Only valid for `failover` type. See below +* `secondary` - (Optional) Configuration block for the rule or endpoint that you want to route traffic to whenever the primary resources are not available. Only valid for `failover` type. See below +* `location` - (Optional) Configuration block for when you add a geolocation rule, you configure your traffic policy to route your traffic based on the geographic location of your users. Only valid for `geo` type. See below +* `geoProximityLocation` - (Optional) Configuration block for when you add a geoproximity rule, you configure Amazon Route 53 to route traffic to your resources based on the geographic location of your resources. Only valid for `geoproximity` type. See below +* `regions` - (Optional) Configuration block for when you add a latency rule, you configure your traffic policy to route your traffic based on the latency (the time delay) between your users and the AWS regions where you've created AWS resources such as ELB load balancers and Amazon S3 buckets. Only valid for `latency` type. See below +* `items` - (Optional) Configuration block for when you add a multivalue answer rule, you configure your traffic policy to route traffic approximately randomly to your healthy resources. Only valid for `multivalue` type. See below + +### `primary` and `secondary` + +* `endpointReference` - (Optional) References to an endpoint. +* `evaluateTargetHealth` - (Optional) Indicates whether you want Amazon Route 53 to evaluate the health of the endpoint and route traffic only to healthy endpoints. +* `healthCheck` - (Optional) If you want to associate a health check with the endpoint or rule. +* `ruleReference` - (Optional) References to a rule. + +### `location` + +* `continent` - (Optional) Value of a continent. +* `country` - (Optional) Value of a country. +* `endpointReference` - (Optional) References to an endpoint. +* `evaluateTargetHealth` - (Optional) Indicates whether you want Amazon Route 53 to evaluate the health of the endpoint and route traffic only to healthy endpoints. +* `healthCheck` - (Optional) If you want to associate a health check with the endpoint or rule. +* `isDefault` - (Optional) Indicates whether this set of values represents the default location. +* `ruleReference` - (Optional) References to a rule. +* `subdivision` - (Optional) Value of a subdivision. + +### `geoProximityLocation` + +* `bias` - (Optional) Specify a value for `bias` if you want to route more traffic to an endpoint from nearby endpoints (positive values) or route less traffic to an endpoint (negative values). +* `endpointReference` - (Optional) References to an endpoint. +* `evaluateTargetHealth` - (Optional) Indicates whether you want Amazon Route 53 to evaluate the health of the endpoint and route traffic only to healthy endpoints. +* `healthCheck` - (Optional) If you want to associate a health check with the endpoint or rule. +* `latitude` - (Optional) Represents the location south (negative) or north (positive) of the equator. Valid values are -90 degrees to 90 degrees. +* `longitude` - (Optional) Represents the location west (negative) or east (positive) of the prime meridian. Valid values are -180 degrees to 180 degrees. +* `region` - (Optional) If your endpoint is an AWS resource, specify the AWS Region that you created the resource in. +* `ruleReference` - (Optional) References to a rule. + +### `region` + +* `endpointReference` - (Optional) References to an endpoint. +* `evaluateTargetHealth` - (Optional) Indicates whether you want Amazon Route 53 to evaluate the health of the endpoint and route traffic only to healthy endpoints. +* `healthCheck` - (Optional) If you want to associate a health check with the endpoint or rule. +* `region` - (Optional) Region code for the AWS Region that you created the resource in. +* `ruleReference` - (Optional) References to a rule. + +### `item` + +* `endpointReference` - (Optional) References to an endpoint. +* `healthCheck` - (Optional) If you want to associate a health check with the endpoint or rule. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `json` - Standard JSON policy document rendered based on the arguments above. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route53_zone.html.markdown b/website/docs/cdktf/typescript/d/route53_zone.html.markdown new file mode 100644 index 00000000000..fd2c822bb03 --- /dev/null +++ b/website/docs/cdktf/typescript/d/route53_zone.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_zone" +description: |- + Provides details about a specific Route 53 Hosted Zone +--- + + + +# Data Source: aws_route53_zone + +`awsRoute53Zone` provides details about a specific Route 53 Hosted Zone. + +This data source allows to find a Hosted Zone ID given Hosted Zone name and certain search criteria. + +## Example Usage + +The following example shows how to get a Hosted Zone from its name and from this data how to create a Record Set. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53Zone } from "./.gen/providers/aws/data-aws-route53-zone"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const selected = new DataAwsRoute53Zone(this, "selected", { + name: "test.com.", + privateZone: true, + }); + new Route53Record(this, "www", { + name: "www.${" + selected.name + "}", + records: ["10.0.0.1"], + ttl: Token.asNumber("300"), + type: "A", + zoneId: Token.asString(selected.zoneId), + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +Hosted Zone. You have to use `zoneId` or `name`, not both of them. The given filter must match exactly one +Hosted Zone. If you use `name` field for private Hosted Zone, you need to add `privateZone` field to `true` + +* `zoneId` - (Optional) Hosted Zone id of the desired Hosted Zone. + +* `name` - (Optional) Hosted Zone name of the desired Hosted Zone. +* `privateZone` - (Optional) Used with `name` field to get a private Hosted Zone. +* `vpcId` - (Optional) Used with `name` field to get a private Hosted Zone associated with the vpc_id (in this case, private_zone is not mandatory). +* `tags` - (Optional) Used with `name` field. A map of tags, each pair of which must exactly match a pair on the desired Hosted Zone. + +## Attribute Reference + +All of the argument attributes are also exported as +result attributes. This data source will complete the data by populating +any fields that are not included in the configuration with the data for +the selected Hosted Zone. + +The following attribute is additionally exported: + +* `arn` - ARN of the Hosted Zone. +* `callerReference` - Caller Reference of the Hosted Zone. +* `comment` - Comment field of the Hosted Zone. +* `nameServers` - List of DNS name servers for the Hosted Zone. +* `primaryNameServer` - The Route 53 name server that created the SOA record. +* `resourceRecordSetCount` - The number of Record Set in the Hosted Zone. +* `linkedServicePrincipal` - The service that created the Hosted Zone (e.g., `servicediscoveryAmazonawsCom`). +* `linkedServiceDescription` - The description provided by the service that created the Hosted Zone (e.g., `arn:aws:servicediscovery:usEast1:1234567890:namespace/nsXxxxxxxxxxxxxxxx`). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route_table.html.markdown b/website/docs/cdktf/typescript/d/route_table.html.markdown new file mode 100644 index 00000000000..e4fe8e53e92 --- /dev/null +++ b/website/docs/cdktf/typescript/d/route_table.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route_table" +description: |- + Provides details about a specific Route Table +--- + + + +# Data Source: aws_route_table + +`awsRouteTable` provides details about a specific Route Table. + +This resource can prove useful when a module accepts a Subnet ID as an input variable and needs to, for example, add a route in the Route Table. + +## Example Usage + +The following example shows how one might accept a Route Table ID as a variable and use this data source to obtain the data necessary to create a route. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformVariable, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRouteTable } from "./.gen/providers/aws/data-aws-route-table"; +import { Route } from "./.gen/providers/aws/route"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const subnetId = new TerraformVariable(this, "subnet_id", {}); + const selected = new DataAwsRouteTable(this, "selected", { + subnetId: subnetId.stringValue, + }); + new Route(this, "route", { + destinationCidrBlock: "10.0.1.0/22", + routeTableId: Token.asString(selected.id), + vpcPeeringConnectionId: "pcx-45ff3dc1", + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available Route Table in the current region. The given filters must match exactly one Route Table whose data will be exported as attributes. + +The following arguments are optional: + +* `filter` - (Optional) Configuration block. Detailed below. +* `gatewayId` - (Optional) ID of an Internet Gateway or Virtual Private Gateway which is connected to the Route Table (not exported if not passed as a parameter). +* `routeTableId` - (Optional) ID of the specific Route Table to retrieve. +* `subnetId` - (Optional) ID of a Subnet which is connected to the Route Table (not exported if not passed as a parameter). +* `tags` - (Optional) Map of tags, each pair of which must exactly match a pair on the desired Route Table. +* `vpcId` - (Optional) ID of the VPC that the desired Route Table belongs to. + +### filter + +Complex filters can be expressed using one or more `filter` blocks. + +The following arguments are required: + +* `name` - (Required) Name of the field to filter by, as defined by [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeRouteTables.html). +* `values` - (Required) Set of values that are accepted for the given field. A Route Table will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the route table. +* `associations` - List of associations with attributes detailed below. +* `ownerId` - ID of the AWS account that owns the route table. +* `routes` - List of routes with attributes detailed below. + +### routes + +When relevant, routes are also exported with the following attributes: + +For destinations: + +* `cidrBlock` - CIDR block of the route. +* `destinationPrefixListId` - The ID of a [managed prefix list](ec2_managed_prefix_list.html) destination of the route. +* `ipv6CidrBlock` - IPv6 CIDR block of the route. + +For targets: + +* `carrierGatewayId` - ID of the Carrier Gateway. +* `coreNetworkArn` - ARN of the core network. +* `egressOnlyGatewayId` - ID of the Egress Only Internet Gateway. +* `gatewayId` - Internet Gateway ID. +* `instanceId` - EC2 instance ID. +* `localGatewayId` - Local Gateway ID. +* `natGatewayId` - NAT Gateway ID. +* `networkInterfaceId` - ID of the elastic network interface (eni) to use. +* `transitGatewayId` - EC2 Transit Gateway ID. +* `vpcEndpointId` - VPC Endpoint ID. +* `vpcPeeringConnectionId` - VPC Peering ID. + +### associations + +Associations are also exported with the following attributes: + +* `gatewayId` - Gateway ID. Only set when associated with an Internet Gateway or Virtual Private Gateway. +* `main` - Whether the association is due to the main route table. +* `routeTableAssociationId` - Association ID. +* `routeTableId` - Route Table ID. +* `subnetId` - Subnet ID. Only set when associated with a subnet. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/route_tables.html.markdown b/website/docs/cdktf/typescript/d/route_tables.html.markdown new file mode 100644 index 00000000000..a321ab67d84 --- /dev/null +++ b/website/docs/cdktf/typescript/d/route_tables.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route_tables" +description: |- + Get information on Amazon route tables. +--- + + + +# Data Source: aws_route_tables + +This resource can be useful for getting back a list of route table ids to be referenced elsewhere. + +## Example Usage + +The following adds a route for a particular cidr block to every (private +kops) route table in a specified vpc to use a particular vpc peering +connection. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Fn, + Token, + TerraformCount, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRouteTables } from "./.gen/providers/aws/data-aws-route-tables"; +import { Route } from "./.gen/providers/aws/route"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const rts = new DataAwsRouteTables(this, "rts", { + filter: [ + { + name: "tag:kubernetes.io/kops/role", + values: ["private*"], + }, + ], + vpcId: vpcId.stringValue, + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const rCount = TerraformCount.of(Token.asNumber(Fn.lengthOf(rts.ids))); + new Route(this, "r", { + destinationCidrBlock: "10.0.0.0/22", + routeTableId: Token.asString( + propertyAccess(Fn.tolist(rts.ids), [rCount.index]) + ), + vpcPeeringConnectionId: "pcx-0e9a7a9ecd137dc54", + count: rCount, + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. + +* `vpcId` - (Optional) VPC ID that you want to filter from. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired route tables. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeRouteTables.html). + +* `values` - (Required) Set of values that are accepted for the given field. + A Route Table will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS Region. +* `ids` - List of all the route table ids found. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/s3_account_public_access_block.html.markdown b/website/docs/cdktf/typescript/d/s3_account_public_access_block.html.markdown new file mode 100644 index 00000000000..16e04ae1569 --- /dev/null +++ b/website/docs/cdktf/typescript/d/s3_account_public_access_block.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3_account_public_access_block" +description: |- + Provides S3 account-level Public Access Block Configuration +--- + + + +# Data Source: aws_s3_account_public_access_block + +The S3 account public access block data source returns account-level public access block configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsS3AccountPublicAccessBlock } from "./.gen/providers/aws/data-aws-s3-account-public-access-block"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsS3AccountPublicAccessBlock(this, "example", {}); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `accountId` - (Optional) AWS account ID to configure. Defaults to automatically determined account ID of the Terraform AWS provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - AWS account ID +* `blockPublicAcls` - Whether or not Amazon S3 should block public ACLs for buckets in this account is enabled. Returns as `true` or `false`. +* `blockPublicPolicy` - Whether or not Amazon S3 should block public bucket policies for buckets in this account is enabled. Returns as `true` or `false`. +* `ignorePublicAcls` - Whether or not Amazon S3 should ignore public ACLs for buckets in this account is enabled. Returns as `true` or `false`. +* `restrictPublicBuckets` - Whether or not Amazon S3 should restrict public bucket policies for buckets in this account is enabled. Returns as `true` or `false`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/s3_bucket.html.markdown b/website/docs/cdktf/typescript/d/s3_bucket.html.markdown new file mode 100644 index 00000000000..396ebd859c2 --- /dev/null +++ b/website/docs/cdktf/typescript/d/s3_bucket.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket" +description: |- + Provides details about a specific S3 bucket +--- + + + +# Data Source: aws_s3_bucket + +Provides details about a specific S3 bucket. + +This resource may prove useful when setting up a Route53 record, or an origin for a CloudFront +Distribution. + +## Example Usage + +### Route53 Record + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRoute53Zone } from "./.gen/providers/aws/data-aws-route53-zone"; +import { DataAwsS3Bucket } from "./.gen/providers/aws/data-aws-s3-bucket"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +interface MyConfig { + evaluateTargetHealth: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const testZone = new DataAwsRoute53Zone(this, "test_zone", { + name: "test.com.", + }); + const selected = new DataAwsS3Bucket(this, "selected", { + bucket: "bucket.test.com", + }); + new Route53Record(this, "example", { + alias: { + name: Token.asString(selected.websiteDomain), + zoneId: Token.asString(selected.hostedZoneId), + evaluateTargetHealth: config.evaluateTargetHealth, + }, + name: "bucket", + type: "A", + zoneId: Token.asString(testZone.id), + }); + } +} + +``` + +### CloudFront Origin + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontDistribution } from "./.gen/providers/aws/cloudfront-distribution"; +import { DataAwsS3Bucket } from "./.gen/providers/aws/data-aws-s3-bucket"; +interface MyConfig { + defaultCacheBehavior: any; + enabled: any; + restrictions: any; + viewerCertificate: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const selected = new DataAwsS3Bucket(this, "selected", { + bucket: "a-test-bucket", + }); + new CloudfrontDistribution(this, "test", { + origin: [ + { + domainName: Token.asString(selected.bucketDomainName), + originId: "s3-selected-bucket", + }, + ], + defaultCacheBehavior: config.defaultCacheBehavior, + enabled: config.enabled, + restrictions: config.restrictions, + viewerCertificate: config.viewerCertificate, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Name of the bucket + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Name of the bucket. +* `arn` - ARN of the bucket. Will be of format `arn:aws:s3:::bucketname`. +* `bucketDomainName` - Bucket domain name. Will be of format `bucketnameS3AmazonawsCom`. +* `bucketRegionalDomainName` - The bucket region-specific domain name. The bucket domain name including the region name. Please refer to the [S3 endpoints reference](https://docs.aws.amazon.com/general/latest/gr/s3.html#s3_region) for format. Note: AWS CloudFront allows specifying an S3 region-specific endpoint when creating an S3 origin. This will prevent redirect issues from CloudFront to the S3 Origin URL. For more information, see the [Virtual Hosted-Style Requests for Other Regions](https://docs.aws.amazon.com/AmazonS3/latest/userguide/VirtualHosting.html#deprecated-global-endpoint) section in the AWS S3 User Guide. +* `hostedZoneId` - The [Route 53 Hosted Zone ID](https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_website_region_endpoints) for this bucket's region. +* `region` - AWS region this bucket resides in. +* `websiteEndpoint` - Website endpoint, if the bucket is configured with a website. If not, this will be an empty string. +* `websiteDomain` - Domain of the website endpoint, if the bucket is configured with a website. If not, this will be an empty string. This is used to create Route 53 alias records. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/s3_bucket_object.html.markdown b/website/docs/cdktf/typescript/d/s3_bucket_object.html.markdown new file mode 100644 index 00000000000..f51b375f418 --- /dev/null +++ b/website/docs/cdktf/typescript/d/s3_bucket_object.html.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_object" +description: |- + Provides metadata and optionally content of an S3 object +--- + + + +# Data Source: aws_s3_bucket_object + +~> **NOTE:** The `awsS3BucketObject` data source is DEPRECATED and will be removed in a future version! Use `awsS3Object` instead, where new features and fixes will be added. + +The S3 object data source allows access to the metadata and +_optionally_ (see below) content of an object stored inside S3 bucket. + +~> **Note:** The content of an object (`body` field) is available only for objects which have a human-readable `contentType` (`text/*` and `application/json`). This is to prevent printing unsafe characters and potentially downloading large amount of data which would be thrown away in favour of metadata. + +## Example Usage + +The following example retrieves a text object (which must have a `contentType` +value starting with `text/`) and uses it as the `userData` for an EC2 instance: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsS3BucketObject } from "./.gen/providers/aws/data-aws-s3-bucket-object"; +import { Instance } from "./.gen/providers/aws/instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bootstrapScript = new DataAwsS3BucketObject( + this, + "bootstrap_script", + { + bucket: "ourcorp-deploy-config", + key: "ec2-bootstrap-script.sh", + } + ); + new Instance(this, "example", { + ami: "ami-2757f631", + instanceType: "t2.micro", + userData: Token.asString(bootstrapScript.body), + }); + } +} + +``` + +The following, more-complex example retrieves only the metadata for a zip +file stored in S3, which is then used to pass the most recent `versionId` +to AWS Lambda for use as a function implementation. More information about +Lambda functions is available in the documentation for +[`awsLambdaFunction`](/docs/providers/aws/r/lambda_function.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsS3BucketObject } from "./.gen/providers/aws/data-aws-s3-bucket-object"; +import { LambdaFunction } from "./.gen/providers/aws/lambda-function"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const lambda = new DataAwsS3BucketObject(this, "lambda", { + bucket: "ourcorp-lambda-functions", + key: "hello-world.zip", + }); + new LambdaFunction(this, "test_lambda", { + functionName: "lambda_function_name", + handler: "exports.test", + role: iamForLambda.arn, + s3Bucket: Token.asString(lambda.id), + s3Key: Token.asString(lambda.key), + s3ObjectVersion: Token.asString(lambda.versionId), + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Name of the bucket to read the object from. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified +* `key` - (Required) Full path to the object inside the bucket +* `versionId` - (Optional) Specific version ID of the object returned (defaults to latest version) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `body` - Object data (see **limitations above** to understand cases in which this field is actually available) +* `bucketKeyEnabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. +* `cacheControl` - Caching behavior along the request/reply chain. +* `contentDisposition` - Presentational information for the object. +* `contentEncoding` - What content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. +* `contentLanguage` - Language the content is in. +* `contentLength` - Size of the body in bytes. +* `contentType` - Standard MIME type describing the format of the object data. +* `etag` - [ETag](https://en.wikipedia.org/wiki/HTTP_ETag) generated for the object (an MD5 sum of the object content in case it's not encrypted) +* `expiration` - If the object expiration is configured (see [object lifecycle management](http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html)), the field includes this header. It includes the expiry-date and rule-id key value pairs providing object expiration information. The value of the rule-id is URL encoded. +* `expires` - Date and time at which the object is no longer cacheable. +* `lastModified` - Last modified date of the object in RFC1123 format (e.g., `Mon, 02 Jan 2006 15:04:05 MST`) +* `metadata` - Map of metadata stored with the object in S3 +* `objectLockLegalHoldStatus` - Indicates whether this object has an active [legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds). This field is only returned if you have permission to view an object's legal hold status. +* `objectLockMode` - Object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) currently in place for this object. +* `objectLockRetainUntilDate` - The date and time when this object's object lock will expire. +* `serverSideEncryption` - If the object is stored using server-side encryption (KMS or Amazon S3-managed encryption key), this field includes the chosen encryption and algorithm used. +* `sseKmsKeyId` - If present, specifies the ID of the Key Management Service (KMS) master encryption key that was used for the object. +* `storageClass` - [Storage class](http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html) information of the object. Available for all objects except for `standard` storage class objects. +* `versionId` - Latest version ID of the object returned. +* `websiteRedirectLocation` - If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata. +* `tags` - Map of tags assigned to the object. + +-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/indexHtml` and `indexHtml` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/s3_bucket_objects.html.markdown b/website/docs/cdktf/typescript/d/s3_bucket_objects.html.markdown new file mode 100644 index 00000000000..cd63683f6aa --- /dev/null +++ b/website/docs/cdktf/typescript/d/s3_bucket_objects.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_objects" +description: |- + Returns keys and metadata of S3 objects +--- + + + +# Data Source: aws_s3_bucket_objects + +~> **NOTE:** The `awsS3BucketObjects` data source is DEPRECATED and will be removed in a future version! Use `awsS3Objects` instead, where new features and fixes will be added. + +~> **NOTE on `maxKeys`:** Retrieving very large numbers of keys can adversely affect Terraform's performance. + +The objects data source returns keys (i.e., file names) and other metadata about objects in an S3 bucket. + +## Example Usage + +The following example retrieves a list of all object keys in an S3 bucket and creates corresponding Terraform object data sources: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformCount, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsS3BucketObjects } from "./.gen/providers/aws/data-aws-s3-bucket-objects"; +import { DataAwsS3Object } from "./.gen/providers/aws/data-aws-s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const myObjects = new DataAwsS3BucketObjects(this, "my_objects", { + bucket: "ourcorp", + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const objectInfoCount = TerraformCount.of( + Token.asNumber(Fn.lengthOf(myObjects.keys)) + ); + new DataAwsS3Object(this, "object_info", { + bucket: Token.asString(myObjects.id), + key: Token.asString( + Fn.element(myObjects.keys, Token.asNumber(objectInfoCount.index)) + ), + count: objectInfoCount, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Lists object keys in this S3 bucket. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified +* `prefix` - (Optional) Limits results to object keys with this prefix (Default: none) +* `delimiter` - (Optional) Character used to group keys (Default: none) +* `encodingType` - (Optional) Encodes keys using this method (Default: none; besides none, only "url" can be used) +* `maxKeys` - (Optional) Maximum object keys to return (Default: 1000) +* `startAfter` - (Optional) Returns key names lexicographically after a specific object key in your bucket (Default: none; S3 lists object keys in UTF-8 character encoding in lexicographical order) +* `fetchOwner` - (Optional) Boolean specifying whether to populate the owner list (Default: false) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `keys` - List of strings representing object keys +* `commonPrefixes` - List of any keys between `prefix` and the next occurrence of `delimiter` (i.e., similar to subdirectories of the `prefix` "directory"); the list is only returned when you specify `delimiter` +* `id` - S3 Bucket. +* `owners` - List of strings representing object owner IDs (see `fetchOwner` above) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/s3_bucket_policy.html.markdown b/website/docs/cdktf/typescript/d/s3_bucket_policy.html.markdown new file mode 100644 index 00000000000..44654ed9cad --- /dev/null +++ b/website/docs/cdktf/typescript/d/s3_bucket_policy.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_policy" +description: |- + Provides IAM policy of an S3 bucket +--- + + + +# Data Source: aws_s3_bucket_policy + +The bucket policy data source returns IAM policy of an S3 bucket. + +## Example Usage + +The following example retrieves IAM policy of a specified S3 bucket. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsS3BucketPolicy } from "./.gen/providers/aws/data-aws-s3-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsS3BucketPolicy(this, "example", { + bucket: "example-bucket-name", + }); + new TerraformOutput(this, "foo", { + value: example.policy, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Bucket name. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `policy` - IAM bucket policy. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/s3_object.html.markdown b/website/docs/cdktf/typescript/d/s3_object.html.markdown new file mode 100644 index 00000000000..b8ee89e871a --- /dev/null +++ b/website/docs/cdktf/typescript/d/s3_object.html.markdown @@ -0,0 +1,123 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_object" +description: |- + Provides metadata and optionally content of an S3 object +--- + + + +# Data Source: aws_s3_object + +The S3 object data source allows access to the metadata and +_optionally_ (see below) content of an object stored inside S3 bucket. + +~> **Note:** The content of an object (`body` field) is available only for objects which have a human-readable `contentType` (`text/*` and `application/json`). This is to prevent printing unsafe characters and potentially downloading large amount of data which would be thrown away in favour of metadata. + +## Example Usage + +The following example retrieves a text object (which must have a `contentType` +value starting with `text/`) and uses it as the `userData` for an EC2 instance: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsS3Object } from "./.gen/providers/aws/data-aws-s3-object"; +import { Instance } from "./.gen/providers/aws/instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bootstrapScript = new DataAwsS3Object(this, "bootstrap_script", { + bucket: "ourcorp-deploy-config", + key: "ec2-bootstrap-script.sh", + }); + new Instance(this, "example", { + ami: "ami-2757f631", + instanceType: "t2.micro", + userData: Token.asString(bootstrapScript.body), + }); + } +} + +``` + +The following, more-complex example retrieves only the metadata for a zip +file stored in S3, which is then used to pass the most recent `versionId` +to AWS Lambda for use as a function implementation. More information about +Lambda functions is available in the documentation for +[`awsLambdaFunction`](/docs/providers/aws/r/lambda_function.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsS3Object } from "./.gen/providers/aws/data-aws-s3-object"; +import { LambdaFunction } from "./.gen/providers/aws/lambda-function"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const lambda = new DataAwsS3Object(this, "lambda", { + bucket: "ourcorp-lambda-functions", + key: "hello-world.zip", + }); + new LambdaFunction(this, "test_lambda", { + functionName: "lambda_function_name", + handler: "exports.test", + role: iamForLambda.arn, + s3Bucket: Token.asString(lambda.bucket), + s3Key: Token.asString(lambda.key), + s3ObjectVersion: Token.asString(lambda.versionId), + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Name of the bucket to read the object from. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified +* `key` - (Required) Full path to the object inside the bucket +* `versionId` - (Optional) Specific version ID of the object returned (defaults to latest version) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `body` - Object data (see **limitations above** to understand cases in which this field is actually available) +* `bucketKeyEnabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. +* `cacheControl` - Caching behavior along the request/reply chain. +* `contentDisposition` - Presentational information for the object. +* `contentEncoding` - What content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. +* `contentLanguage` - Language the content is in. +* `contentLength` - Size of the body in bytes. +* `contentType` - Standard MIME type describing the format of the object data. +* `etag` - [ETag](https://en.wikipedia.org/wiki/HTTP_ETag) generated for the object (an MD5 sum of the object content in case it's not encrypted) +* `expiration` - If the object expiration is configured (see [object lifecycle management](http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html)), the field includes this header. It includes the expiry-date and rule-id key value pairs providing object expiration information. The value of the rule-id is URL encoded. +* `expires` - Date and time at which the object is no longer cacheable. +* `lastModified` - Last modified date of the object in RFC1123 format (e.g., `Mon, 02 Jan 2006 15:04:05 MST`) +* `metadata` - Map of metadata stored with the object in S3 +* `objectLockLegalHoldStatus` - Indicates whether this object has an active [legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds). This field is only returned if you have permission to view an object's legal hold status. +* `objectLockMode` - Object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) currently in place for this object. +* `objectLockRetainUntilDate` - The date and time when this object's object lock will expire. +* `serverSideEncryption` - If the object is stored using server-side encryption (KMS or Amazon S3-managed encryption key), this field includes the chosen encryption and algorithm used. +* `sseKmsKeyId` - If present, specifies the ID of the Key Management Service (KMS) master encryption key that was used for the object. +* `storageClass` - [Storage class](http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html) information of the object. Available for all objects except for `standard` storage class objects. +* `versionId` - Latest version ID of the object returned. +* `websiteRedirectLocation` - If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata. +* `tags` - Map of tags assigned to the object. + +-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/indexHtml` and `indexHtml` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/s3_objects.html.markdown b/website/docs/cdktf/typescript/d/s3_objects.html.markdown new file mode 100644 index 00000000000..9abb40584af --- /dev/null +++ b/website/docs/cdktf/typescript/d/s3_objects.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_objects" +description: |- + Returns keys and metadata of S3 objects +--- + + + +# Data Source: aws_s3_objects + +~> **NOTE on `maxKeys`:** Retrieving very large numbers of keys can adversely affect Terraform's performance. + +The objects data source returns keys (i.e., file names) and other metadata about objects in an S3 bucket. + +## Example Usage + +The following example retrieves a list of all object keys in an S3 bucket and creates corresponding Terraform object data sources: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformCount, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsS3Object } from "./.gen/providers/aws/data-aws-s3-object"; +import { DataAwsS3Objects } from "./.gen/providers/aws/data-aws-s3-objects"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const myObjects = new DataAwsS3Objects(this, "my_objects", { + bucket: "ourcorp", + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const objectInfoCount = TerraformCount.of( + Token.asNumber(Fn.lengthOf(myObjects.keys)) + ); + new DataAwsS3Object(this, "object_info", { + bucket: Token.asString(myObjects.id), + key: Token.asString( + Fn.element(myObjects.keys, Token.asNumber(objectInfoCount.index)) + ), + count: objectInfoCount, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bucket` - (Required) Lists object keys in this S3 bucket. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified +* `prefix` - (Optional) Limits results to object keys with this prefix (Default: none) +* `delimiter` - (Optional) Character used to group keys (Default: none) +* `encodingType` - (Optional) Encodes keys using this method (Default: none; besides none, only "url" can be used) +* `maxKeys` - (Optional) Maximum object keys to return (Default: 1000) +* `startAfter` - (Optional) Returns key names lexicographically after a specific object key in your bucket (Default: none; S3 lists object keys in UTF-8 character encoding in lexicographical order) +* `fetchOwner` - (Optional) Boolean specifying whether to populate the owner list (Default: false) + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `keys` - List of strings representing object keys +* `commonPrefixes` - List of any keys between `prefix` and the next occurrence of `delimiter` (i.e., similar to subdirectories of the `prefix` "directory"); the list is only returned when you specify `delimiter` +* `id` - S3 Bucket. +* `owners` - List of strings representing object owner IDs (see `fetchOwner` above) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/s3control_multi_region_access_point.html.markdown b/website/docs/cdktf/typescript/d/s3control_multi_region_access_point.html.markdown new file mode 100644 index 00000000000..8900d2e5ee9 --- /dev/null +++ b/website/docs/cdktf/typescript/d/s3control_multi_region_access_point.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_multi_region_access_point" +description: |- + Provides details an S3 Multi-Region Access Point. +--- + + + +# Data Source: aws_s3control_multi_region_access_point + +Provides details on a specific S3 Multi-Region Access Point. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsS3ControlMultiRegionAccessPoint } from "./.gen/providers/aws/data-aws-s3-control-multi-region-access-point"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsS3ControlMultiRegionAccessPoint(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `accountId` - (Optional) The AWS account ID of the S3 Multi-Region Access Point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `name` - (Required) The name of the Multi-Region Access Point. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `alias` - The alias for the Multi-Region Access Point. +* `arn` - Amazon Resource Name (ARN) of the Multi-Region Access Point. +* `createdAt` - Timestamp when the resource has been created. +* `domainName` - The DNS domain name of the S3 Multi-Region Access Point in the format _`alias`_.accesspoint.s3-global.amazonaws.com. For more information, see the documentation on [Multi-Region Access Point Requests](https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiRegionAccessPointRequests.html). +* `publicAccessBlock` - Public Access Block of the Multi-Region Access Point. Detailed below. +* `regions` - A collection of the regions and buckets associated with the Multi-Region Access Point. +* `status` - The current status of the Multi-Region Access Point. + +### public_access_block + +* `blockPublicAcls` - Specifies whether Amazon S3 should block public access control lists (ACLs). When set to `true` causes the following behavior: + * PUT Bucket acl and PUT Object acl calls fail if the specified ACL is public. + * PUT Object calls fail if the request includes a public ACL. + * PUT Bucket calls fail if the request includes a public ACL. +* `blockPublicPolicy` - Specifies whether Amazon S3 should block public bucket policies for buckets in this account. When set to `true` causes Amazon S3 to: + * Reject calls to PUT Bucket policy if the specified bucket policy allows public access. +* `ignorePublicAcls` - Specifies whether Amazon S3 should ignore public ACLs for buckets in this account. When set to `true` causes Amazon S3 to: + * Ignore all public ACLs on buckets in this account and any objects that they contain. +* `restrictPublicBuckets` - Specifies whether Amazon S3 should restrict public bucket policies for buckets in this account. When set to `true`: + * Only the bucket owner and AWS Services can access buckets with public policies. + +### regions + +* `bucket` - The name of the bucket. +* `region` - The name of the region. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sagemaker_prebuilt_ecr_image.html.markdown b/website/docs/cdktf/typescript/d/sagemaker_prebuilt_ecr_image.html.markdown new file mode 100644 index 00000000000..72ca74a1622 --- /dev/null +++ b/website/docs/cdktf/typescript/d/sagemaker_prebuilt_ecr_image.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_prebuilt_ecr_image" +description: |- + Get information about prebuilt Amazon SageMaker Docker images. +--- + + + +# Data Source: aws_sagemaker_prebuilt_ecr_image + +Get information about prebuilt Amazon SageMaker Docker images. + +~> **NOTE:** The AWS provider creates a validly constructed `registryPath` but does not verify that the `registryPath` corresponds to an existing image. For example, using a `registryPath` containing an `imageTag` that does not correspond to a Docker image in the ECR repository, will result in an error. + +## Example Usage + +Basic usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSagemakerPrebuiltEcrImage } from "./.gen/providers/aws/data-aws-sagemaker-prebuilt-ecr-image"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSagemakerPrebuiltEcrImage(this, "test", { + imageTag: "2.2-1.0.11.0", + repositoryName: "sagemaker-scikit-learn", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `repositoryName` - (Required) Name of the repository, which is generally the algorithm or library. Values include `blazingtext`, `factorizationMachines`, `forecastingDeepar`, `imageClassification`, `ipinsights`, `kmeans`, `knn`, `lda`, `linearLearner`, `mxnetInferenceEia`, `mxnetInference`, `mxnetTraining`, `ntm`, `objectDetection`, `object2Vec`, `pca`, `pytorchInferenceEia`, `pytorchInference`, `pytorchTraining`, `randomcutforest`, `sagemakerScikitLearn`, `sagemakerSparkmlServing`, `sagemakerXgboost`, `semanticSegmentation`, `seq2Seq`, `tensorflowInferenceEia`, `tensorflowInference`, `tensorflowTraining`, `huggingfaceTensorflowTraining`, `huggingfaceTensorflowInference`, `huggingfacePytorchTraining`, and `huggingfacePytorchInference`. +* `dnsSuffix` - (Optional) DNS suffix to use in the registry path. If not specified, the AWS provider sets it to the DNS suffix for the current region. +* `imageTag` - (Optional) Image tag for the Docker image. If not specified, the AWS provider sets the value to `1`, which for many repositories indicates the latest version. Some repositories, such as XGBoost, do not support `1` or `latest` and specific version must be used. +* `region` (Optional) - Region to use in the registry path. If not specified, the AWS provider sets it to the current region. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `registryId` - Account ID containing the image. For example, `469771592824`. +* `registryPath` - Docker image URL. For example, `341280168497DkrEcrCaCentral1AmazonawsCom/sagemakerSparkmlServing:24`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/secretsmanager_random_password.html.markdown b/website/docs/cdktf/typescript/d/secretsmanager_random_password.html.markdown new file mode 100644 index 00000000000..20a4fb91959 --- /dev/null +++ b/website/docs/cdktf/typescript/d/secretsmanager_random_password.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_random_password" +description: |- + Generate a random password +--- + + + +# Data Source: aws_secretsmanager_random_password + +Generate a random password. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSecretsmanagerRandomPassword } from "./.gen/providers/aws/data-aws-secretsmanager-random-password"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSecretsmanagerRandomPassword(this, "test", { + excludeNumbers: true, + passwordLength: 50, + }); + } +} + +``` + +## Argument Reference + +* `excludeCharacters` - (Optional) String of the characters that you don't want in the password. +* `excludeLowercase` - (Optional) Specifies whether to exclude lowercase letters from the password. +* `excludeNumbers` - (Optional) Specifies whether to exclude numbers from the password. +* `excludePunctuation` - (Optional) Specifies whether to exclude the following punctuation characters from the password: `! " # $ % & ' ( ) * + , - . / : ; < = > ? @ [ \ ] ^ _ ` { | } ~ .` +* `excludeUppercase` - (Optional) Specifies whether to exclude uppercase letters from the password. +* `includeSpace` - (Optional) Specifies whether to include the space character. +* `passwordLength` - (Optional) Length of the password. +* `requireEachIncludedType` - (Optional) Specifies whether to include at least one upper and lowercase letter, one number, and one punctuation. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `randomPassword` - Random password. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/secretsmanager_secret.html.markdown b/website/docs/cdktf/typescript/d/secretsmanager_secret.html.markdown new file mode 100644 index 00000000000..baf8f6a86f5 --- /dev/null +++ b/website/docs/cdktf/typescript/d/secretsmanager_secret.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret" +description: |- + Retrieve metadata information about a Secrets Manager secret +--- + + + +# Data Source: aws_secretsmanager_secret + +Retrieve metadata information about a Secrets Manager secret. To retrieve a secret value, see the [`awsSecretsmanagerSecretVersion` data source](/docs/providers/aws/d/secretsmanager_secret_version.html). + +## Example Usage + +### ARN + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSecretsmanagerSecret } from "./.gen/providers/aws/data-aws-secretsmanager-secret"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSecretsmanagerSecret(this, "by-arn", { + arn: "arn:aws:secretsmanager:us-east-1:123456789012:secret:example-123456", + }); + } +} + +``` + +### Name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSecretsmanagerSecret } from "./.gen/providers/aws/data-aws-secretsmanager-secret"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSecretsmanagerSecret(this, "by-name", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +* `arn` - (Optional) ARN of the secret to retrieve. +* `name` - (Optional) Name of the secret to retrieve. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the secret. +* `description` - Description of the secret. +* `kmsKeyId` - Key Management Service (KMS) Customer Master Key (CMK) associated with the secret. +* `id` - ARN of the secret. +* `tags` - Tags of the secret. +* `policy` - Resource-based policy document that's attached to the secret. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/secretsmanager_secret_rotation.html.markdown b/website/docs/cdktf/typescript/d/secretsmanager_secret_rotation.html.markdown new file mode 100644 index 00000000000..8240055cade --- /dev/null +++ b/website/docs/cdktf/typescript/d/secretsmanager_secret_rotation.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret_rotation" +description: |- + Retrieve information about a Secrets Manager secret rotation configuration +--- + + + +# Data Source: aws_secretsmanager_secret_rotation + +Retrieve information about a Secrets Manager secret rotation. To retrieve secret metadata, see the [`awsSecretsmanagerSecret` data source](/docs/providers/aws/d/secretsmanager_secret.html). To retrieve a secret value, see the [`awsSecretsmanagerSecretVersion` data source](/docs/providers/aws/d/secretsmanager_secret_version.html). + +## Example Usage + +### Retrieve Secret Rotation Configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSecretsmanagerSecretRotation } from "./.gen/providers/aws/data-aws-secretsmanager-secret-rotation"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSecretsmanagerSecretRotation(this, "example", { + secretId: Token.asString(dataAwsSecretsmanagerSecretExample.id), + }); + } +} + +``` + +## Argument Reference + +* `secretId` - (Required) Specifies the secret containing the version that you want to retrieve. You can specify either the ARN or the friendly name of the secret. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `rotationEnabled` - ARN of the secret. +* `rotationLambdaArn` - Decrypted part of the protected secret information that was originally provided as a string. +* `rotationRules` - Decrypted part of the protected secret information that was originally provided as a binary. Base64 encoded. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/secretsmanager_secret_version.html.markdown b/website/docs/cdktf/typescript/d/secretsmanager_secret_version.html.markdown new file mode 100644 index 00000000000..b6925f9abbf --- /dev/null +++ b/website/docs/cdktf/typescript/d/secretsmanager_secret_version.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret_version" +description: |- + Retrieve information about a Secrets Manager secret version including its secret value +--- + + + +# Data Source: aws_secretsmanager_secret_version + +Retrieve information about a Secrets Manager secret version, including its secret value. To retrieve secret metadata, see the [`awsSecretsmanagerSecret` data source](/docs/providers/aws/d/secretsmanager_secret.html). + +## Example Usage + +### Retrieve Current Secret Version + +By default, this data sources retrieves information based on the `awscurrent` staging label. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSecretsmanagerSecretVersion } from "./.gen/providers/aws/data-aws-secretsmanager-secret-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSecretsmanagerSecretVersion(this, "secret-version", { + secretId: Token.asString(example.id), + }); + } +} + +``` + +### Retrieve Specific Secret Version + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSecretsmanagerSecretVersion } from "./.gen/providers/aws/data-aws-secretsmanager-secret-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSecretsmanagerSecretVersion(this, "by-version-stage", { + secretId: Token.asString(example.id), + versionStage: "example", + }); + } +} + +``` + +### Handling Key-Value Secret Strings in JSON + +Reading key-value pairs from JSON back into a native Terraform map can be accomplished in Terraform 0.12 and later with the [`jsondecode()` function](https://www.terraform.io/docs/configuration/functions/jsondecode.html): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + TerraformOutput, + Fn, + Token, + propertyAccess, + TerraformStack, +} from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TerraformOutput(this, "example", { + value: propertyAccess( + Fn.jsondecode(Token.asString(example.secretString)), + ['"key1"'] + ), + }); + } +} + +``` + +## Argument Reference + +* `secretId` - (Required) Specifies the secret containing the version that you want to retrieve. You can specify either the ARN or the friendly name of the secret. +* `versionId` - (Optional) Specifies the unique identifier of the version of the secret that you want to retrieve. Overrides `versionStage`. +* `versionStage` - (Optional) Specifies the secret version that you want to retrieve by the staging label attached to the version. Defaults to `awscurrent`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the secret. +* `id` - Unique identifier of this version of the secret. +* `secretString` - Decrypted part of the protected secret information that was originally provided as a string. +* `secretBinary` - Decrypted part of the protected secret information that was originally provided as a binary. +* `versionId` - Unique identifier of this version of the secret. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/secretsmanager_secrets.html.markdown b/website/docs/cdktf/typescript/d/secretsmanager_secrets.html.markdown new file mode 100644 index 00000000000..e3e8185e3bc --- /dev/null +++ b/website/docs/cdktf/typescript/d/secretsmanager_secrets.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secrets" +description: |- + Get information on Secrets Manager secrets. +--- + + + +# Data Source: aws_secretsmanager_secrets + +Use this data source to get the ARNs and names of Secrets Manager secrets matching the specified criteria. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSecretsmanagerSecrets } from "./.gen/providers/aws/data-aws-secretsmanager-secrets"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSecretsmanagerSecrets(this, "example", { + filter: [ + { + name: "name", + values: ["example"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +## filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [Secrets Manager ListSecrets API Reference](https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_ListSecrets.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of ARNs of the matched Secrets Manager secrets. +* `names` - Set of names of the matched Secrets Manager secrets. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/security_group.html.markdown b/website/docs/cdktf/typescript/d/security_group.html.markdown new file mode 100644 index 00000000000..674e011635f --- /dev/null +++ b/website/docs/cdktf/typescript/d/security_group.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_security_group" +description: |- + Provides details about a specific Security Group +--- + + + +# Data Source: aws_security_group + +`awsSecurityGroup` provides details about a specific Security Group. + +This resource can prove useful when a module accepts a Security Group id as +an input variable and needs to, for example, determine the id of the +VPC that the security group belongs to. + +## Example Usage + +The following example shows how one might accept a Security Group id as a variable +and use this data source to obtain the data necessary to create a subnet. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformVariable, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSecurityGroup } from "./.gen/providers/aws/data-aws-security-group"; +import { Subnet } from "./.gen/providers/aws/subnet"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const securityGroupId = new TerraformVariable( + this, + "security_group_id", + {} + ); + const selected = new DataAwsSecurityGroup(this, "selected", { + id: securityGroupId.stringValue, + }); + new Subnet(this, "subnet", { + cidrBlock: "10.0.1.0/24", + vpcId: Token.asString(selected.vpcId), + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available +security group in the current region. The given filters must match exactly one +security group whose data will be exported as attributes. + +* `filter` - (Optional) Custom filter block as described below. + +* `id` - (Optional) Id of the specific security group to retrieve. + +* `name` - (Optional) Name that the desired security group must have. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired security group. + +* `vpcId` - (Optional) Id of the VPC that the desired security group belongs to. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSecurityGroups.html). + +* `values` - (Required) Set of values that are accepted for the given field. + A Security Group will be selected if any one of the given values matches. + +## Attribute Reference + +All of the argument attributes except `filter` blocks are also exported as +result attributes. This data source will complete the data by populating +any fields that are not included in the configuration with the data for +the selected Security Group. + +The following fields are also exported: + +* `description` - Description of the security group. +* `arn` - Computed ARN of the security group. + +~> **Note:** The [default security group for a VPC](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_SecurityGroups.html#DefaultSecurityGroup) has the name `default`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/security_groups.html.markdown b/website/docs/cdktf/typescript/d/security_groups.html.markdown new file mode 100644 index 00000000000..32c7a21b901 --- /dev/null +++ b/website/docs/cdktf/typescript/d/security_groups.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_security_groups" +description: |- + Get information about a set of Security Groups. +--- + + + +# Data Source: aws_security_groups + +Use this data source to get IDs and VPC membership of Security Groups that are created outside of Terraform. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSecurityGroups } from "./.gen/providers/aws/data-aws-security-groups"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSecurityGroups(this, "test", { + tags: { + Application: "k8s", + Environment: "dev", + }, + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSecurityGroups } from "./.gen/providers/aws/data-aws-security-groups"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSecurityGroups(this, "test", { + filter: [ + { + name: "group-name", + values: ["*nodes*"], + }, + { + name: "vpc-id", + values: [vpcId.stringValue], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `tags` - (Optional) Map of tags, each pair of which must exactly match for desired security groups. +* `filter` - (Optional) One or more name/value pairs to use as filters. There are several valid keys, for a full reference, check out [describe-security-groups in the AWS CLI reference][1]. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - ARNs of the matched security groups. +* `id` - AWS Region. +* `ids` - IDs of the matches security groups. +* `vpcIds` - VPC IDs of the matched security groups. The data source's tag or filter *will span VPCs* unless the `vpcId` filter is also used. + +[1]: https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-security-groups.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/serverlessapplicationrepository_application.html.markdown b/website/docs/cdktf/typescript/d/serverlessapplicationrepository_application.html.markdown new file mode 100644 index 00000000000..61eb85e835c --- /dev/null +++ b/website/docs/cdktf/typescript/d/serverlessapplicationrepository_application.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Serverless Application Repository" +layout: "aws" +page_title: "AWS: aws_serverlessapplicationrepository_application" +description: |- + Get information on a AWS Serverless Application Repository application +--- + + + +# Data Source: aws_serverlessapplicationrepository_application + +Use this data source to get information about an AWS Serverless Application Repository application. For example, this can be used to determine the required `capabilities` for an application. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServerlessapplicationrepositoryApplication } from "./.gen/providers/aws/data-aws-serverlessapplicationrepository-application"; +import { ServerlessapplicationrepositoryCloudformationStack } from "./.gen/providers/aws/serverlessapplicationrepository-cloudformation-stack"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsServerlessapplicationrepositoryApplication( + this, + "example", + { + applicationId: + "arn:aws:serverlessrepo:us-east-1:123456789012:applications/ExampleApplication", + } + ); + const awsServerlessapplicationrepositoryCloudformationStackExample = + new ServerlessapplicationrepositoryCloudformationStack( + this, + "example_1", + { + applicationId: Token.asString(example.applicationId), + capabilities: Token.asList(example.requiredCapabilities), + name: "Example", + semanticVersion: Token.asString(example.semanticVersion), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsServerlessapplicationrepositoryCloudformationStackExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +* `applicationId` - (Required) ARN of the application. +* `semanticVersion` - (Optional) Requested version of the application. By default, retrieves the latest version. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `applicationId` - ARN of the application. +* `name` - Name of the application. +* `requiredCapabilities` - A list of capabilities describing the permissions needed to deploy the application. +* `sourceCodeUrl` - URL pointing to the source code of the application version. +* `templateUrl` - URL pointing to the Cloud Formation template for the application version. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/service.html.markdown b/website/docs/cdktf/typescript/d/service.html.markdown new file mode 100644 index 00000000000..89413a33d1c --- /dev/null +++ b/website/docs/cdktf/typescript/d/service.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "Meta Data Sources" +layout: "aws" +page_title: "AWS: aws_service" +description: |- + Compose and decompose AWS service DNS names +--- + + + +# Data Source: aws_service + +Use this data source to compose and decompose AWS service DNS names. + +## Example Usage + +### Get Service DNS Name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { DataAwsService } from "./.gen/providers/aws/data-aws-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsRegion(this, "current", {}); + new DataAwsService(this, "test", { + region: Token.asString(current.name), + serviceId: "ec2", + }); + } +} + +``` + +### Use Service Reverse DNS Name to Get Components + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsService } from "./.gen/providers/aws/data-aws-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsService(this, "s3", { + reverseDnsName: "cn.com.amazonaws.cn-north-1.s3", + }); + } +} + +``` + +### Determine Regional Support for a Service + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsService } from "./.gen/providers/aws/data-aws-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsService(this, "s3", { + reverseDnsName: "com.amazonaws.us-gov-west-1.waf", + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `dnsName` - (Optional) DNS name of the service (_e.g.,_ `rdsUsEast1AmazonawsCom`). One of `dnsName`, `reverseDnsName`, or `serviceId` is required. +* `partition` - (Optional) Partition corresponding to the region. +* `region` - (Optional) Region of the service (_e.g.,_ `usWest2`, `apNortheast1`). +* `reverseDnsName` - (Optional) Reverse DNS name of the service (_e.g.,_ `comAmazonawsUsWest2S3`). One of `dnsName`, `reverseDnsName`, or `serviceId` is required. +* `reverseDnsPrefix` - (Optional) Prefix of the service (_e.g.,_ `comAmazonaws` in AWS Commercial, `cnComAmazonaws` in AWS China). +* `serviceId` - (Optional) Service (_e.g.,_ `s3`, `rds`, `ec2`). One of `dnsName`, `reverseDnsName`, or `serviceId` is required. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `supported` - Whether the service is supported in the region's partition. New services may not be listed immediately as supported. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/service_discovery_dns_namespace.html.markdown b/website/docs/cdktf/typescript/d/service_discovery_dns_namespace.html.markdown new file mode 100644 index 00000000000..cace1f1e6f5 --- /dev/null +++ b/website/docs/cdktf/typescript/d/service_discovery_dns_namespace.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_dns_namespace" +description: |- + Retrieves information about a Service Discovery private or public DNS namespace. +--- + + + +# Data Source: aws_service_discovery_dns_namespace + +Retrieves information about a Service Discovery private or public DNS namespace. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServiceDiscoveryDnsNamespace } from "./.gen/providers/aws/data-aws-service-discovery-dns-namespace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsServiceDiscoveryDnsNamespace(this, "test", { + name: "example.terraform.local", + type: "DNS_PRIVATE", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the namespace. +* `type` - (Required) Type of the namespace. Allowed values are `dnsPublic` or `dnsPrivate`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the namespace. +* `description` - Description of the namespace. +* `id` - Namespace ID. +* `hostedZone` - ID for the hosted zone that Amazon Route 53 creates when you create a namespace. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/service_discovery_http_namespace.html.markdown b/website/docs/cdktf/typescript/d/service_discovery_http_namespace.html.markdown new file mode 100644 index 00000000000..0cfdf4269ce --- /dev/null +++ b/website/docs/cdktf/typescript/d/service_discovery_http_namespace.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_http_namespace" +description: |- + Retrieves information about a Service Discovery HTTP Namespace. +--- + + + +# Data Source: aws_service_discovery_http_namespace + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServiceDiscoveryHttpNamespace } from "./.gen/providers/aws/data-aws-service-discovery-http-namespace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsServiceDiscoveryHttpNamespace(this, "example", { + name: "development", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the http namespace. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of a namespace. +* `arn` - ARN that Amazon Route 53 assigns to the namespace when you create it. +* `description` - Description that you specify for the namespace when you create it. +* `httpName` - Name of an HTTP namespace. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/service_discovery_service.html.markdown b/website/docs/cdktf/typescript/d/service_discovery_service.html.markdown new file mode 100644 index 00000000000..a32089dba62 --- /dev/null +++ b/website/docs/cdktf/typescript/d/service_discovery_service.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_service" +description: |- + Retrieves information about a Service Discovery Service +--- + + + +# Data Source: aws_service_discovery_service + +Retrieves information about a Service Discovery Service. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServiceDiscoveryService } from "./.gen/providers/aws/data-aws-service-discovery-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsServiceDiscoveryService(this, "test", { + name: "example", + namespaceId: "NAMESPACE_ID_VALUE", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the service. +* `namespaceId` - (Required) ID of the namespace that the service belongs to. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the service. +* `arn` - ARN of the service. +* `description` - Description of the service. +* `dnsConfig` - Complex type that contains information about the resource record sets that you want Amazon Route 53 to create when you register an instance. +* `healthCheckConfig` - Complex type that contains settings for an optional health check. Only for Public DNS namespaces. +* `healthCheckCustomConfig` - A complex type that contains settings for ECS managed health checks. +* `tags` - Map of tags to assign to the service. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tagsAll` - (**Deprecated**) Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### dns_config + +This argument supports the following arguments: + +* `namespaceId` - ID of the namespace to use for DNS configuration. +* `dnsRecords` - An array that contains one DnsRecord object for each resource record set. +* `routingPolicy` - Routing policy that you want to apply to all records that Route 53 creates when you register an instance and specify the service. Valid Values: MULTIVALUE, WEIGHTED + +#### dns_records + +This argument supports the following arguments: + +* `ttl` - Amount of time, in seconds, that you want DNS resolvers to cache the settings for this resource record set. +* `type` - Type of the resource, which indicates the value that Amazon Route 53 returns in response to DNS queries. Valid Values: A, AAAA, SRV, CNAME + +### health_check_config + +This argument supports the following arguments: + +* `failureThreshold` - Number of consecutive health checks. Maximum value of 10. +* `resourcePath` - Path that you want Route 53 to request when performing health checks. Route 53 automatically adds the DNS name for the service. If you don't specify a value, the default value is /. +* `type` - The type of health check that you want to create, which indicates how Route 53 determines whether an endpoint is healthy. Valid Values: HTTP, HTTPS, TCP + +### health_check_custom_config + +This argument supports the following arguments: + +* `failureThreshold` - The number of 30-second intervals that you want service discovery to wait before it changes the health status of a service instance. Maximum value of 10. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/servicecatalog_constraint.html.markdown b/website/docs/cdktf/typescript/d/servicecatalog_constraint.html.markdown new file mode 100644 index 00000000000..291beec81d6 --- /dev/null +++ b/website/docs/cdktf/typescript/d/servicecatalog_constraint.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_constraint" +description: |- + Provides information on a Service Catalog Constraint +--- + + + +# Data Source: aws_servicecatalog_constraint + +Provides information on a Service Catalog Constraint. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServicecatalogConstraint } from "./.gen/providers/aws/data-aws-servicecatalog-constraint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsServicecatalogConstraint(this, "example", { + acceptLanguage: "en", + id: "cons-hrvy0335", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `id` - Constraint identifier. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `description` - Description of the constraint. +* `owner` - Owner of the constraint. +* `parameters` - Constraint parameters in JSON format. +* `portfolioId` - Portfolio identifier. +* `productId` - Product identifier. +* `status` - Constraint status. +* `type` - Type of constraint. Valid values are `launch`, `notification`, `resourceUpdate`, `stackset`, and `template`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/servicecatalog_launch_paths.html.markdown b/website/docs/cdktf/typescript/d/servicecatalog_launch_paths.html.markdown new file mode 100644 index 00000000000..7a8b94ef1b8 --- /dev/null +++ b/website/docs/cdktf/typescript/d/servicecatalog_launch_paths.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_launch_paths" +description: |- + Provides information on Service Catalog Launch Paths +--- + + + +# Data Source: aws_servicecatalog_launch_paths + +Lists the paths to the specified product. A path is how the user has access to a specified product, and is necessary when provisioning a product. A path also determines the constraints put on the product. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServicecatalogLaunchPaths } from "./.gen/providers/aws/data-aws-servicecatalog-launch-paths"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsServicecatalogLaunchPaths(this, "example", { + productId: "prod-yakog5pdriver", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `productId` - (Required) Product identifier. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `summaries` - Block with information about the launch path. See details below. + +### summaries + +* `constraintSummaries` - Block for constraints on the portfolio-product relationship. See details below. +* `pathId` - Identifier of the product path. +* `name` - Name of the portfolio to which the path was assigned. +* `tags` - Tags associated with this product path. + +### constraint_summaries + +* `description` - Description of the constraint. +* `type` - Type of constraint. Valid values are `launch`, `notification`, `stackset`, and `template`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/servicecatalog_portfolio.html.markdown b/website/docs/cdktf/typescript/d/servicecatalog_portfolio.html.markdown new file mode 100644 index 00000000000..ddab121c083 --- /dev/null +++ b/website/docs/cdktf/typescript/d/servicecatalog_portfolio.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_portfolio" +description: |- + Provides information for a Service Catalog Portfolio. +--- + + + +# Data Source: aws_servicecatalog_portfolio + +Provides information for a Service Catalog Portfolio. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServicecatalogPortfolio } from "./.gen/providers/aws/data-aws-servicecatalog-portfolio"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsServicecatalogPortfolio(this, "portfolio", { + id: "port-07052002", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `id` - (Required) Portfolio identifier. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - Portfolio ARN. +* `createdTime` - Time the portfolio was created. +* `description` - Description of the portfolio +* `name` - Portfolio name. +* `providerName` - Name of the person or organization who owns the portfolio. +* `tags` - Tags applied to the portfolio. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/servicecatalog_portfolio_constraints.html.markdown b/website/docs/cdktf/typescript/d/servicecatalog_portfolio_constraints.html.markdown new file mode 100644 index 00000000000..441419b74bc --- /dev/null +++ b/website/docs/cdktf/typescript/d/servicecatalog_portfolio_constraints.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_portfolio_constraints" +description: |- + Provides information on Service Catalog Portfolio Constraints +--- + + + +# Data Source: aws_servicecatalog_portfolio_constraints + +Provides information on Service Catalog Portfolio Constraints. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServicecatalogPortfolioConstraints } from "./.gen/providers/aws/data-aws-servicecatalog-portfolio-constraints"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsServicecatalogPortfolioConstraints(this, "example", { + portfolioId: "port-3lli3b3an", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `portfolioId` - (Required) Portfolio identifier. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `productId` - (Optional) Product identifier. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `details` - List of information about the constraints. See details below. + +### details + +* `constraintId` - Identifier of the constraint. +* `description` - Description of the constraint. +* `portfolioId` - Identifier of the portfolio the product resides in. The constraint applies only to the instance of the product that lives within this portfolio. +* `productId` - Identifier of the product the constraint applies to. A constraint applies to a specific instance of a product within a certain portfolio. +* `type` - Type of constraint. Valid values are `launch`, `notification`, `stackset`, and `template`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/servicecatalog_product.html.markdown b/website/docs/cdktf/typescript/d/servicecatalog_product.html.markdown new file mode 100644 index 00000000000..625141a43dc --- /dev/null +++ b/website/docs/cdktf/typescript/d/servicecatalog_product.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_product" +description: |- + This data source provides information about a Service Catalog product. +--- + + + +# Data Source: aws_servicecatalog_product + +Use this data source to retrieve information about a Service Catalog product. + +~> **NOTE:** A "provisioning artifact" is also known as a "version," and a "distributor" is also known as a "vendor." + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServicecatalogProduct } from "./.gen/providers/aws/data-aws-servicecatalog-product"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsServicecatalogProduct(this, "example", { + id: "prod-dnigbtea24ste", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `id` - (Required) ID of the product. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values are `en` (English), `jp` (Japanese), `zh` (Chinese). The default value is `en`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the product. +* `createdTime` - Time when the product was created. +* `description` - Description of the product. +* `distributor` - Vendor of the product. +* `hasDefaultPath` - Whether the product has a default path. +* `name` - Name of the product. +* `owner` - Owner of the product. +* `status` - Status of the product. +* `supportDescription` - Field that provides support information about the product. +* `supportEmail` - Contact email for product support. +* `supportUrl` - Contact URL for product support. +* `tags` - Tags applied to the product. +* `type` - Type of product. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/servicecatalog_provisioning_artifacts.html.markdown b/website/docs/cdktf/typescript/d/servicecatalog_provisioning_artifacts.html.markdown new file mode 100644 index 00000000000..919e6295ff3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/servicecatalog_provisioning_artifacts.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_provisioning_artifacts" +description: |- + Provides information on Service Catalog Provisioning Artifacts +--- + + + +# Data Source: aws_servicecatalog_provisioning_artifacts + +Lists the provisioning artifacts for the specified product. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServicecatalogProvisioningArtifacts } from "./.gen/providers/aws/data-aws-servicecatalog-provisioning-artifacts"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsServicecatalogProvisioningArtifacts(this, "example", { + productId: "prod-yakog5pdriver", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `productId` - (Required) Product identifier. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `provisioningArtifactDetails` - List with information about the provisioning artifacts. See details below. + +### provisioning_artifact_details + +* `active` - Indicates whether the product version is active. +* `createdTime` - The UTC time stamp of the creation time. +* `description` - The description of the provisioning artifact. +* `guidance` - Information set by the administrator to provide guidance to end users about which provisioning artifacts to use. +* `id` - The identifier of the provisioning artifact. +* `name` - The name of the provisioning artifact. +* `type` - The type of provisioning artifact. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/servicequotas_service.html.markdown b/website/docs/cdktf/typescript/d/servicequotas_service.html.markdown new file mode 100644 index 00000000000..2c91af86764 --- /dev/null +++ b/website/docs/cdktf/typescript/d/servicequotas_service.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "Service Quotas" +layout: "aws" +page_title: "AWS: aws_servicequotas_service" +description: |- + Retrieve information about a Service Quotas Service +--- + + + +# Data Source: aws_servicequotas_service + +Retrieve information about a Service Quotas Service. + +~> **NOTE:** Global quotas apply to all AWS regions, but can only be accessed in `usEast1` in the Commercial partition or `usGovWest1` in the GovCloud partition. In other regions, the AWS API will return the error `The request failed because the specified service does not exist.` + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServicequotasService } from "./.gen/providers/aws/data-aws-servicequotas-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsServicequotasService(this, "example", { + serviceName: "Amazon Virtual Private Cloud (Amazon VPC)", + }); + } +} + +``` + +## Argument Reference + +* `serviceName` - (Required) Service name to lookup within Service Quotas. Available values can be found with the [AWS CLI service-quotas list-services command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-services.html). + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Code of the service. +* `serviceCode` - Code of the service. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/servicequotas_service_quota.html.markdown b/website/docs/cdktf/typescript/d/servicequotas_service_quota.html.markdown new file mode 100644 index 00000000000..db0fba2c506 --- /dev/null +++ b/website/docs/cdktf/typescript/d/servicequotas_service_quota.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Service Quotas" +layout: "aws" +page_title: "AWS: aws_servicequotas_service_quota" +description: |- + Retrieve information about a Service Quota +--- + + + +# Data Source: aws_servicequotas_service_quota + +Retrieve information about a Service Quota. + +~> **NOTE:** Global quotas apply to all AWS regions, but can only be accessed in `usEast1` in the Commercial partition or `usGovWest1` in the GovCloud partition. In other regions, the AWS API will return the error `The request failed because the specified service does not exist.` + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsServicequotasServiceQuota } from "./.gen/providers/aws/data-aws-servicequotas-service-quota"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsServicequotasServiceQuota(this, "by_quota_code", { + quotaCode: "L-F678F1CE", + serviceCode: "vpc", + }); + new DataAwsServicequotasServiceQuota(this, "by_quota_name", { + quotaName: "VPCs per Region", + serviceCode: "vpc", + }); + } +} + +``` + +## Argument Reference + +~> *NOTE:* Either `quotaCode` or `quotaName` must be configured. + +* `serviceCode` - (Required) Service code for the quota. Available values can be found with the [`awsServicequotasService` data source](/docs/providers/aws/d/servicequotas_service.html) or [AWS CLI service-quotas list-services command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-services.html). +* `quotaCode` - (Optional) Quota code within the service. When configured, the data source directly looks up the service quota. Available values can be found with the [AWS CLI service-quotas list-service-quotas command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-service-quotas.html). One of `quotaCode` or `quotaName` must be specified. +* `quotaName` - (Optional) Quota name within the service. When configured, the data source searches through all service quotas to find the matching quota name. Available values can be found with the [AWS CLI service-quotas list-service-quotas command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-service-quotas.html). One of `quotaName` or `quotaCode` must be specified. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `adjustable` - Whether the service quota is adjustable. +* `arn` - ARN of the service quota. +* `defaultValue` - Default value of the service quota. +* `globalQuota` - Whether the service quota is global for the AWS account. +* `id` - ARN of the service quota. +* `serviceName` - Name of the service. +* `usageMetric` - Information about the measurement. + * `metricDimensions` - The metric dimensions. + * `class` + * `resource` + * `service` + * `type` + * `metricName` - The name of the metric. + * `metricNamespace` - The namespace of the metric. + * `metricStatisticRecommendation` - The metric statistic that AWS recommend you use when determining quota usage. +* `value` - Current value of the service quota. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ses_active_receipt_rule_set.html.markdown b/website/docs/cdktf/typescript/d/ses_active_receipt_rule_set.html.markdown new file mode 100644 index 00000000000..2398cab5467 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ses_active_receipt_rule_set.html.markdown @@ -0,0 +1,42 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_active_receipt_rule_set" +description: |- + Retrieve the active SES receipt rule set +--- + + + +# Data Source: aws_ses_active_receipt_rule_set + +Retrieve the active SES receipt rule set + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSesActiveReceiptRuleSet } from "./.gen/providers/aws/data-aws-ses-active-receipt-rule-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSesActiveReceiptRuleSet(this, "main", {}); + } +} + +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - SES receipt rule set ARN. +* `ruleSetName` - Name of the rule set + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ses_domain_identity.markdown b/website/docs/cdktf/typescript/d/ses_domain_identity.markdown new file mode 100644 index 00000000000..2060b140582 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ses_domain_identity.markdown @@ -0,0 +1,45 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_domain_identity" +description: |- + Retrieve the SES domain identity +--- + + + +# Data Source: aws_ses_domain_identity + +Retrieve the SES domain identity + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSesDomainIdentity } from "./.gen/providers/aws/data-aws-ses-domain-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSesDomainIdentity(this, "example", { + domain: "example.com", + }); + } +} + +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the domain identity. +* `domain` - Name of the domain +* `verificationToken` - Code which when added to the domain as a TXT record will signal to SES that the owner of the domain has authorized SES to act on their behalf. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ses_email_identity.markdown b/website/docs/cdktf/typescript/d/ses_email_identity.markdown new file mode 100644 index 00000000000..6deb83eea86 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ses_email_identity.markdown @@ -0,0 +1,44 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_email_identity" +description: |- + Retrieve the active SES email identity +--- + + + +# Data Source: aws_ses_email_identity + +Retrieve the active SES email identity + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSesEmailIdentity } from "./.gen/providers/aws/data-aws-ses-email-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSesEmailIdentity(this, "example", { + email: "awesome@example.com", + }); + } +} + +``` + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the email identity. +* `email` - Email identity. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sesv2_configuration_set.html.markdown b/website/docs/cdktf/typescript/d/sesv2_configuration_set.html.markdown new file mode 100644 index 00000000000..48bea39aa46 --- /dev/null +++ b/website/docs/cdktf/typescript/d/sesv2_configuration_set.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_configuration_set" +description: |- + Terraform data source for managing an AWS SESv2 (Simple Email V2) Configuration Set. +--- + + + +# Data Source: aws_sesv2_configuration_set + +Terraform data source for managing an AWS SESv2 (Simple Email V2) Configuration Set. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSesv2ConfigurationSet } from "./.gen/providers/aws/data-aws-sesv2-configuration-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSesv2ConfigurationSet(this, "example", { + configurationSetName: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `configurationSetName` - (Required) The name of the configuration set. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `deliveryOptions` - An object that defines the dedicated IP pool that is used to send emails that you send using the configuration set. + * `sendingPoolName` - The name of the dedicated IP pool to associate with the configuration set. + * `tlsPolicy` - Specifies whether messages that use the configuration set are required to use Transport Layer Security (TLS). +* `reputationOptions` - An object that defines whether or not Amazon SES collects reputation metrics for the emails that you send that use the configuration set. + * `lastFreshStart` - The date and time (in Unix time) when the reputation metrics were last given a fresh start. + * `reputationMetricsEnabled` - Specifies whether tracking of reputation metrics is enabled. +* `sendingOptions` - An object that defines whether or not Amazon SES can send email that you send using the configuration set. + * `sendingEnabled` - Specifies whether email sending is enabled. +* `suppressionOptions` - An object that contains information about the suppression list preferences for your account. + * `suppressedReasons` - A list that contains the reasons that email addresses are automatically added to the suppression list for your account. +* `tags` - Key-value map of resource tags for the container recipe. +* `trackingOptions` - An object that defines the open and click tracking options for emails that you send using the configuration set. + * `customRedirectDomain` - The domain to use for tracking open and click events. +* `vdmOptions` - An object that contains information about the VDM preferences for your configuration set. + * `dashboardOptions` - Specifies additional settings for your VDM configuration as applicable to the Dashboard. + * `engagementMetrics` - Specifies the status of your VDM engagement metrics collection. + * `guardianOptions` - Specifies additional settings for your VDM configuration as applicable to the Guardian. + * `optimizedSharedDelivery` - Specifies the status of your VDM optimized shared delivery. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sesv2_dedicated_ip_pool.html.markdown b/website/docs/cdktf/typescript/d/sesv2_dedicated_ip_pool.html.markdown new file mode 100644 index 00000000000..5a614a1d527 --- /dev/null +++ b/website/docs/cdktf/typescript/d/sesv2_dedicated_ip_pool.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_dedicated_ip_pool" +description: |- + Terraform data source for managing an AWS SESv2 (Simple Email V2) Dedicated IP Pool. +--- + + + +# Data Source: aws_sesv2_dedicated_ip_pool + +Terraform data source for managing an AWS SESv2 (Simple Email V2) Dedicated IP Pool. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSesv2DedicatedIpPool } from "./.gen/providers/aws/data-aws-sesv2-dedicated-ip-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSesv2DedicatedIpPool(this, "example", { + poolName: "my-pool", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `poolName` - (Required) Name of the dedicated IP pool. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Dedicated IP Pool. +* `dedicatedIps` - A list of objects describing the pool's dedicated IP's. See [`dedicatedIps`](#dedicated_ips). +* `scalingMode` - (Optional) IP pool scaling mode. Valid values: `standard`, `managed`. +* `tags` - A map of tags attached to the pool. + +### dedicated_ips + +* `ip` - IPv4 address. +* `warmupPercentage` - Indicates how complete the dedicated IP warm-up process is. When this value equals `1`, the address has completed the warm-up process and is ready for use. +* `warmupStatus` - The warm-up status of a dedicated IP address. Valid values: `inProgress`, `done`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sesv2_email_identity.html.markdown b/website/docs/cdktf/typescript/d/sesv2_email_identity.html.markdown new file mode 100644 index 00000000000..030acac3b7a --- /dev/null +++ b/website/docs/cdktf/typescript/d/sesv2_email_identity.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_email_identity" +description: |- + Terraform data source for managing an AWS SESv2 (Simple Email V2) Email Identity. +--- + + + +# Data Source: aws_sesv2_email_identity + +Terraform data source for managing an AWS SESv2 (Simple Email V2) Email Identity. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSesv2EmailIdentity } from "./.gen/providers/aws/data-aws-sesv2-email-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSesv2EmailIdentity(this, "example", { + emailIdentity: "example.com", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `emailIdentity` - (Required) The name of the email identity. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Email Identity. +* `dkimSigningAttributes` - A list of objects that contains at most one element with information about the private key and selector that you want to use to configure DKIM for the identity for Bring Your Own DKIM (BYODKIM) for the identity, or, configures the key length to be used for Easy DKIM. + * `currentSigningKeyLength` - [Easy DKIM] The key length of the DKIM key pair in use. + * `lastKeyGenerationTimestamp` - [Easy DKIM] The last time a key pair was generated for this identity. + * `nextSigningKeyLength` - [Easy DKIM] The key length of the future DKIM key pair to be generated. This can be changed at most once per day. + * `signingAttributesOrigin` - A string that indicates how DKIM was configured for the identity. `awsSes` indicates that DKIM was configured for the identity by using Easy DKIM. `external` indicates that DKIM was configured for the identity by using Bring Your Own DKIM (BYODKIM). + * `status` - Describes whether or not Amazon SES has successfully located the DKIM records in the DNS records for the domain. See the [AWS SES API v2 Reference](https://docs.aws.amazon.com/ses/latest/APIReference-V2/API_DkimAttributes.html#SES-Type-DkimAttributes-Status) for supported statuses. + * `tokens` - If you used Easy DKIM to configure DKIM authentication for the domain, then this object contains a set of unique strings that you use to create a set of CNAME records that you add to the DNS configuration for your domain. When Amazon SES detects these records in the DNS configuration for your domain, the DKIM authentication process is complete. If you configured DKIM authentication for the domain by providing your own public-private key pair, then this object contains the selector for the public key. +* `identityType` - The email identity type. Valid values: `emailAddress`, `domain`. +* `tags` - Key-value mapping of resource tags. +* `verifiedForSendingStatus` - Specifies whether or not the identity is verified. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sesv2_email_identity_mail_from_attributes.html.markdown b/website/docs/cdktf/typescript/d/sesv2_email_identity_mail_from_attributes.html.markdown new file mode 100644 index 00000000000..a17b80fe3ce --- /dev/null +++ b/website/docs/cdktf/typescript/d/sesv2_email_identity_mail_from_attributes.html.markdown @@ -0,0 +1,61 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_email_identity_mail_from_attributes" +description: |- + Terraform data source for managing an AWS SESv2 (Simple Email V2) Email Identity Mail From Attributes. +--- + + + +# Data Source: aws_sesv2_email_identity_mail_from_attributes + +Terraform data source for managing an AWS SESv2 (Simple Email V2) Email Identity Mail From Attributes. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSesv2EmailIdentity } from "./.gen/providers/aws/data-aws-sesv2-email-identity"; +import { DataAwsSesv2EmailIdentityMailFromAttributes } from "./.gen/providers/aws/data-aws-sesv2-email-identity-mail-from-attributes"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsSesv2EmailIdentity(this, "example", { + emailIdentity: "example.com", + }); + const dataAwsSesv2EmailIdentityMailFromAttributesExample = + new DataAwsSesv2EmailIdentityMailFromAttributes(this, "example_1", { + emailIdentity: Token.asString(example.emailIdentity), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsSesv2EmailIdentityMailFromAttributesExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `emailIdentity` - (Required) The name of the email identity. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `behaviorOnMxFailure` - The action to take if the required MX record isn't found when you send an email. Valid values: `useDefaultValue`, `rejectMessage`. +* `mailFromDomain` - The custom MAIL FROM domain that you want the verified identity to use. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sfn_activity.html.markdown b/website/docs/cdktf/typescript/d/sfn_activity.html.markdown new file mode 100644 index 00000000000..6c694fc07d6 --- /dev/null +++ b/website/docs/cdktf/typescript/d/sfn_activity.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_activity" +description: |- + Use this data source to get information about a Step Functions Activity. +--- + + + +# Data Source: aws_sfn_activity + +Provides a Step Functions Activity data source + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSfnActivity } from "./.gen/providers/aws/data-aws-sfn-activity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSfnActivity(this, "sfn_activity", { + name: "my-activity", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Optional) Name that identifies the activity. +* `arn` - (Optional) ARN that identifies the activity. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ARN that identifies the activity. +* `creationDate` - Date the activity was created. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sfn_alias.html.markdown b/website/docs/cdktf/typescript/d/sfn_alias.html.markdown new file mode 100644 index 00000000000..cdfe3ed748f --- /dev/null +++ b/website/docs/cdktf/typescript/d/sfn_alias.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_alias" +description: |- + Terraform data source for managing an AWS SFN (Step Functions) State Machine Alias. +--- + + + +# Data Source: aws_sfn_alias + +Terraform data source for managing an AWS SFN (Step Functions) State Machine Alias. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSfnAlias } from "./.gen/providers/aws/data-aws-sfn-alias"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSfnAlias(this, "example", { + name: "my_sfn_alias", + statemachineArn: sfnTest.arn, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the State Machine alias. +* `statemachineArn` - (Required) ARN of the State Machine. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN identifying the State Machine alias. +* `creationDate` - Date the state machine Alias was created. +* `description` - Description of state machine alias. +* `routingConfiguration` - Routing Configuration of state machine alias + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sfn_state_machine.html.markdown b/website/docs/cdktf/typescript/d/sfn_state_machine.html.markdown new file mode 100644 index 00000000000..76ce09b8108 --- /dev/null +++ b/website/docs/cdktf/typescript/d/sfn_state_machine.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_state_machine" +description: |- + Get information on an Amazon Step Function State Machine +--- + + + +# Data Source: aws_sfn_state_machine + +Use this data source to get the ARN of a State Machine in AWS Step +Function (SFN). By using this data source, you can reference a +state machine without having to hard code the ARNs as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSfnStateMachine } from "./.gen/providers/aws/data-aws-sfn-state-machine"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSfnStateMachine(this, "example", { + name: "an_example_sfn_name", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Friendly name of the state machine to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Set to the ARN of the found state machine, suitable for referencing in other resources that support State Machines. +* `arn` - Set to the arn of the state function. +* `roleArn` - Set to the role_arn used by the state function. +* `definition` - Set to the state machine definition. +* `creationDate` - Date the state machine was created. +* `revisionId` - The revision identifier for the state machine. +* `status` - Set to the current status of the state machine. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sfn_state_machine_versions.html.markdown b/website/docs/cdktf/typescript/d/sfn_state_machine_versions.html.markdown new file mode 100644 index 00000000000..fda0361bfb1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/sfn_state_machine_versions.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_state_machine_versions" +description: |- + Terraform data source for managing an AWS SFN (Step Functions) State Machine Versions. +--- + + + +# Data Source: aws_sfn_state_machine_versions + +Terraform data source for managing an AWS SFN (Step Functions) State Machine Versions. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSfnStateMachineVersions } from "./.gen/providers/aws/data-aws-sfn-state-machine-versions"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSfnStateMachineVersions(this, "test", { + statemachineArn: Token.asString(awsSfnStateMachineTest.arn), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `statemachineArn` - (Required) ARN of the State Machine. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `statemachineVersions` - ARN List identifying the statemachine versions. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/signer_signing_job.html.markdown b/website/docs/cdktf/typescript/d/signer_signing_job.html.markdown new file mode 100644 index 00000000000..29987a6d5ce --- /dev/null +++ b/website/docs/cdktf/typescript/d/signer_signing_job.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "Signer" +layout: "aws" +page_title: "AWS: aws_signer_signing_job" +description: |- + Provides a Signer Signing Job data source. +--- + + + +# Data Source: aws_signer_signing_job + +Provides information about a Signer Signing Job. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSignerSigningJob } from "./.gen/providers/aws/data-aws-signer-signing-job"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSignerSigningJob(this, "build_signing_job", { + jobId: "9ed7e5c3-b8d4-4da0-8459-44e0b068f7ee", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `jobId` - (Required) ID of the signing job on output. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `completedAt` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the signing job was completed. +* `createdAt` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the signing job was created. +* `jobInvoker` - IAM entity that initiated the signing job. +* `jobOwner` - AWS account ID of the job owner. +* `platformDisplayName` - A human-readable name for the signing platform associated with the signing job. +* `platformId` - Platform to which your signed code image will be distributed. +* `profileName` - Name of the profile that initiated the signing operation. +* `profileVersion` - Version of the signing profile used to initiate the signing job. +* `requestedBy` - IAM principal that requested the signing job. +* `revocationRecord` - Revocation record if the signature generated by the signing job has been revoked. Contains a timestamp and the ID of the IAM entity that revoked the signature. +* `signatureExpiresAt` - The time when the signature of a signing job expires. +* `signedObject` - Name of the S3 bucket where the signed code image is saved by code signing. +* `source` - Object that contains the name of your S3 bucket or your raw code. +* `status` - Status of the signing job. +* `statusReason` - String value that contains the status reason. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/signer_signing_profile.html.markdown b/website/docs/cdktf/typescript/d/signer_signing_profile.html.markdown new file mode 100644 index 00000000000..7bb68bb28cd --- /dev/null +++ b/website/docs/cdktf/typescript/d/signer_signing_profile.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "Signer" +layout: "aws" +page_title: "AWS: aws_signer_signing_profile" +description: |- + Provides a Signer Signing Profile data source. +--- + + + +# Data Source: aws_signer_signing_profile + +Provides information about a Signer Signing Profile. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSignerSigningProfile } from "./.gen/providers/aws/data-aws-signer-signing-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSignerSigningProfile(this, "production_signing_profile", { + name: "prod_profile_DdW3Mk1foYL88fajut4mTVFGpuwfd4ACO6ANL0D1uIj7lrn8adK", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the target signing profile. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the signing profile. +* `platformDisplayName` - A human-readable name for the signing platform associated with the signing profile. +* `platformId` - ID of the platform that is used by the target signing profile. +* `revocationRecord` - Revocation information for a signing profile. +* `signatureValidityPeriod` - The validity period for a signing job. +* `status` - Status of the target signing profile. +* `tags` - List of tags associated with the signing profile. +* `version` - Current version of the signing profile. +* `versionArn` - Signing profile ARN, including the profile version. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sns_topic.html.markdown b/website/docs/cdktf/typescript/d/sns_topic.html.markdown new file mode 100644 index 00000000000..0f23df44eb3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/sns_topic.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_topic" +description: |- + Get information on a Amazon Simple Notification Service (SNS) Topic +--- + + + +# Data Source: aws_sns_topic + +Use this data source to get the ARN of a topic in AWS Simple Notification +Service (SNS). By using this data source, you can reference SNS topics +without having to hard code the ARNs as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSnsTopic } from "./.gen/providers/aws/data-aws-sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSnsTopic(this, "example", { + name: "an_example_topic", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Friendly name of the topic to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the found topic, suitable for referencing in other resources that support SNS topics. +* `id` - ARN of the found topic, suitable for referencing in other resources that support SNS topics. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sqs_queue.html.markdown b/website/docs/cdktf/typescript/d/sqs_queue.html.markdown new file mode 100644 index 00000000000..7daf12b6e99 --- /dev/null +++ b/website/docs/cdktf/typescript/d/sqs_queue.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queue" +description: |- + Get information on an Amazon Simple Queue Service (SQS) Queue +--- + + + +# Data Source: aws_sqs_queue + +Use this data source to get the ARN and URL of queue in AWS Simple Queue Service (SQS). +By using this data source, you can reference SQS queues without having to hardcode +the ARNs as input. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSqsQueue } from "./.gen/providers/aws/data-aws-sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSqsQueue(this, "example", { + name: "queue", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the queue to match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the queue. +* `url` - URL of the queue. +* `tags` - Map of tags for the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/sqs_queues.html.markdown b/website/docs/cdktf/typescript/d/sqs_queues.html.markdown new file mode 100644 index 00000000000..6cc43d3ba88 --- /dev/null +++ b/website/docs/cdktf/typescript/d/sqs_queues.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queues" +description: |- + Terraform data source for managing an AWS SQS (Simple Queue) Queues. +--- + + + +# Data Source: aws_sqs_queues + +Terraform data source for managing an AWS SQS (Simple Queue) Queues. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSqsQueues } from "./.gen/providers/aws/data-aws-sqs-queues"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSqsQueues(this, "example", { + queueNamePrefix: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `queueNamePrefix` - (Optional) A string to use for filtering the list results. Only those queues whose name begins with the specified string are returned. Queue URLs and names are case-sensitive. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `queueUrls` - A list of queue URLs. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssm_document.html.markdown b/website/docs/cdktf/typescript/d/ssm_document.html.markdown new file mode 100644 index 00000000000..f24cd5a142d --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssm_document.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_document" +description: |- + Provides a SSM Document data source +--- + + + +# Data Source: aws_ssm_document + +Gets the contents of the specified Systems Manager document. + +## Example Usage + +To get the contents of the document owned by AWS. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmDocument } from "./.gen/providers/aws/data-aws-ssm-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new DataAwsSsmDocument(this, "foo", { + documentFormat: "YAML", + name: "AWS-GatherSoftwareInventory", + }); + new TerraformOutput(this, "content", { + value: foo.content, + }); + } +} + +``` + +To get the contents of the custom document. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmDocument } from "./.gen/providers/aws/data-aws-ssm-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSsmDocument(this, "test", { + documentFormat: "JSON", + name: Token.asString(awsSsmDocumentTest.name), + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the Systems Manager document. +* `documentFormat` - (Optional) Returns the document in the specified format. The document format can be either `json`, `yaml` and `text`. JSON is the default format. +* `documentVersion` - (Optional) Document version for which you want information. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the document. If the document is an AWS managed document, this value will be set to the name of the document instead. +* `content` - Contents of the document. +* `documentType` - Type of the document. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssm_instances.html.markdown b/website/docs/cdktf/typescript/d/ssm_instances.html.markdown new file mode 100644 index 00000000000..517880cd60b --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssm_instances.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_instances" +description: |- + Get information on SSM managed instances. +--- + + + +# Data Source: aws_ssm_instances + +Use this data source to get the instance IDs of SSM managed instances. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmInstances } from "./.gen/providers/aws/data-aws-ssm-instances"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSsmInstances(this, "example", { + filter: [ + { + name: "PlatformTypes", + values: ["Linux"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [SSM InstanceInformationStringFilter API Reference](https://docs.aws.amazon.com/systems-manager/latest/APIReference/API_InstanceInformationStringFilter.html). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - Set of instance IDs of the matched SSM managed instances. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssm_maintenance_windows.html.markdown b/website/docs/cdktf/typescript/d/ssm_maintenance_windows.html.markdown new file mode 100644 index 00000000000..c8327038555 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssm_maintenance_windows.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_maintenance_windows" +description: |- + Get information on SSM maintenance windows. +--- + + + +# Data Source: ssm_maintenance_windows + +Use this data source to get the window IDs of SSM maintenance windows. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmMaintenanceWindows } from "./.gen/providers/aws/data-aws-ssm-maintenance-windows"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSsmMaintenanceWindows(this, "example", { + filter: [ + { + name: "Enabled", + values: ["true"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Configuration block(s) for filtering. Detailed below. + +### filter Configuration Block + +The `filter` configuration block supports the following arguments: + +* `name` - (Required) Name of the filter field. Valid values can be found in the [SSM DescribeMaintenanceWindows API Reference](https://docs.aws.amazon.com/systems-manager/latest/APIReference/API_DescribeMaintenanceWindows.html#API_DescribeMaintenanceWindows_RequestSyntax). +* `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - List of window IDs of the matched SSM maintenance windows. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssm_parameter.html.markdown b/website/docs/cdktf/typescript/d/ssm_parameter.html.markdown new file mode 100644 index 00000000000..f85c258dee1 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssm_parameter.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_parameter" +description: |- + Provides a SSM Parameter Data Source +--- + + + +# Data Source: aws_ssm_parameter + +Provides an SSM Parameter data source. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmParameter } from "./.gen/providers/aws/data-aws-ssm-parameter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSsmParameter(this, "foo", { + name: "foo", + }); + } +} + +``` + +~> **Note:** The unencrypted value of a SecureString will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **Note:** The data source is currently following the behavior of the [SSM API](https://docs.aws.amazon.com/sdk-for-go/api/service/ssm/#Parameter) to return a string value, regardless of parameter type. For type `stringList`, we can use the built-in [split()](https://www.terraform.io/docs/configuration/functions/split.html) function to get values in a list. Example: `split(",", data.aws_ssm_parameter.subnets.value)` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the parameter. +* `withDecryption` - (Optional) Whether to return decrypted `secureString` value. Defaults to `true`. + +In addition to all arguments above, the following attributes are exported: + +* `arn` - ARN of the parameter. +* `name` - Name of the parameter. +* `type` - Type of the parameter. Valid types are `string`, `stringList` and `secureString`. +* `value` - Value of the parameter. This value is always marked as sensitive in the Terraform plan output, regardless of `type`. In Terraform CLI version 0.15 and later, this may require additional configuration handling for certain scenarios. For more information, see the [Terraform v0.15 Upgrade Guide](https://www.terraform.io/upgrade-guides/0-15.html#sensitive-output-values). +* `insecureValue` - Value of the parameter. **Use caution:** This value is never marked as sensitive. +* `version` - Version of the parameter. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssm_parameters_by_path.html.markdown b/website/docs/cdktf/typescript/d/ssm_parameters_by_path.html.markdown new file mode 100644 index 00000000000..27f52d2c660 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssm_parameters_by_path.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_parameters_by_path" +description: |- + Provides SSM Parameters by path +--- + + + +# Data Source: aws_ssm_parameters_by_path + +Provides SSM Parameters by path. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmParametersByPath } from "./.gen/providers/aws/data-aws-ssm-parameters-by-path"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSsmParametersByPath(this, "foo", { + path: "/foo", + }); + } +} + +``` + +~> **Note:** The unencrypted value of a SecureString will be stored in the raw state as plain-text. +[Read more about sensitive data in state](/docs/state/sensitive-data.html). + +~> **Note:** The data source is currently following the behavior of the [SSM API](https://docs.aws.amazon.com/sdk-for-go/api/service/ssm/#Parameter) to return a string value, regardless of parameter type. For type `stringList`, we can use the built-in [split()](https://www.terraform.io/docs/configuration/functions/split.html) function to get values in a list. Example: `split(",", data.aws_ssm_parameter.subnets.value)` + +## Argument Reference + +This data source supports the following arguments: + +* `path` - (Required) Prefix path of the parameter. +* `withDecryption` - (Optional) Whether to return decrypted `secureString` value. Defaults to `true`. +* `recursive` - (Optional) Whether to recursively return parameters under `path`. Defaults to `false`. + +In addition to all arguments above, the following attributes are exported: + +* `arns` - ARNs of the parameters. +* `names` - Names of the parameters. +* `types` - Types of the parameters. +* `values` - Value of the parameters. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssm_patch_baseline.html.markdown b/website/docs/cdktf/typescript/d/ssm_patch_baseline.html.markdown new file mode 100644 index 00000000000..0c0b29fc3d4 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssm_patch_baseline.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_patch_baseline" +description: |- + Provides an SSM Patch Baseline data source +--- + + + +# Data Source: aws_ssm_patch_baseline + +Provides an SSM Patch Baseline data source. Useful if you wish to reuse the default baselines provided. + +## Example Usage + +To retrieve a baseline provided by AWS: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmPatchBaseline } from "./.gen/providers/aws/data-aws-ssm-patch-baseline"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSsmPatchBaseline(this, "centos", { + namePrefix: "AWS-", + operatingSystem: "CENTOS", + owner: "AWS", + }); + } +} + +``` + +To retrieve a baseline on your account: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmPatchBaseline } from "./.gen/providers/aws/data-aws-ssm-patch-baseline"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSsmPatchBaseline(this, "default_custom", { + defaultBaseline: true, + namePrefix: "MyCustomBaseline", + operatingSystem: "WINDOWS", + owner: "Self", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `owner` - (Required) Owner of the baseline. Valid values: `all`, `aws`, `self` (the current account). +* `namePrefix` - (Optional) Filter results by the baseline name prefix. +* `defaultBaseline` - (Optional) Filters the results against the baselines default_baseline field. +* `operatingSystem` - (Optional) Specified OS for the baseline. Valid values: `amazonLinux`, `amazonLinux2`, `ubuntu`, `redhatEnterpriseLinux`, `suse`, `centos`, `oracleLinux`, `debian`, `macos`, `raspbian` and `rockyLinux`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `approvedPatches` - List of explicitly approved patches for the baseline. +* `approvedPatchesComplianceLevel` - The compliance level for approved patches. +* `approvedPatchesEnableNonSecurity` - Indicates whether the list of approved patches includes non-security updates that should be applied to the instances. +* `approvalRule` - List of rules used to include patches in the baseline. + * `approveAfterDays` - The number of days after the release date of each patch matched by the rule the patch is marked as approved in the patch baseline. + * `approveUntilDate` - The cutoff date for auto approval of released patches. Any patches released on or before this date are installed automatically. Date is formatted as `yyyyMmDd`. Conflicts with `approveAfterDays` + * `complianceLevel` - The compliance level for patches approved by this rule. + * `enableNonSecurity` - Boolean enabling the application of non-security updates. + * `patchFilter` - The patch filter group that defines the criteria for the rule. + * `key` - The key for the filter. + * `values` - The value for the filter. +* `globalFilter` - Set of global filters used to exclude patches from the baseline. + * `key` - The key for the filter. + * `values` - The value for the filter. +* `id` - ID of the baseline. +* `name` - Name of the baseline. +* `description` - Description of the baseline. +* `rejectedPatches` - List of rejected patches. +* `rejectedPatchesAction` - The action specified to take on patches included in the `rejectedPatches` list. +* `source` - Information about the patches to use to update the managed nodes, including target operating systems and source repositories. + * `configuration` - The value of the yum repo configuration. + * `name` - The name specified to identify the patch source. + * `products` - The specific operating system versions a patch repository applies to. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssmcontacts_contact.html.markdown b/website/docs/cdktf/typescript/d/ssmcontacts_contact.html.markdown new file mode 100644 index 00000000000..cbf802dd416 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssmcontacts_contact.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_contact" +description: |- + Terraform data source for managing an AWS SSM Contact. +--- + + + +# Data Source: aws_ssmcontacts_contact + +Terraform data source for managing an AWS SSM Contact. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmcontactsContact } from "./.gen/providers/aws/data-aws-ssmcontacts-contact"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSsmcontactsContact(this, "example", { + arn: "arn:aws:ssm-contacts:us-west-2:123456789012:contact/contactalias", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the contact or escalation plan. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `alias` - A unique and identifiable alias of the contact or escalation plan. +* `type` - The type of contact engaged. A single contact is type `personal` and an escalation plan is type `escalation`. +* `displayName` - Full friendly name of the contact or escalation plan. +* `tags` - Map of tags to assign to the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssmcontacts_contact_channel.html.markdown b/website/docs/cdktf/typescript/d/ssmcontacts_contact_channel.html.markdown new file mode 100644 index 00000000000..e353762f8f5 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssmcontacts_contact_channel.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_contact_channel" +description: |- + Terraform data source for managing an AWS SSM Contacts Contact Channel. +--- + + + +# Data Source: aws_ssmcontacts_contact_channel + +Terraform data source for managing an AWS SSM Contacts Contact Channel. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmcontactsContactChannel } from "./.gen/providers/aws/data-aws-ssmcontacts-contact-channel"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSsmcontactsContactChannel(this, "example", { + arn: "arn:aws:ssm-contacts:us-west-2:123456789012:contact-channel/example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +- `arn` - Amazon Resource Name (ARN) of the contact channel. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +- `activationStatus` - Whether the contact channel is activated. + +- `contactId` - Amazon Resource Name (ARN) of the AWS SSM Contact that the contact channel belongs to. + +- `deliveryAddress` - Details used to engage the contact channel. + +- `name` - Name of the contact channel. + +- `type` - Type of the contact channel. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssmcontacts_plan.html.markdown b/website/docs/cdktf/typescript/d/ssmcontacts_plan.html.markdown new file mode 100644 index 00000000000..caf0c725e40 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssmcontacts_plan.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_plan" +description: |- + Terraform data source for managing an AWS SSM Contact Plan. +--- + + + +# Data Source: aws_ssmcontacts_plan + +Terraform data source for managing a Plan of an AWS SSM Contact. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmcontactsPlan } from "./.gen/providers/aws/data-aws-ssmcontacts-plan"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSsmcontactsPlan(this, "test", { + contactId: + "arn:aws:ssm-contacts:us-west-2:123456789012:contact/contactalias", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `contactId` - (Required) The Amazon Resource Name (ARN) of the contact or escalation plan. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `stage` - List of stages. A contact has an engagement plan with stages that contact specified contact channels. An escalation plan uses stages that contact specified contacts. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssmincidents_replication_set.html.markdown b/website/docs/cdktf/typescript/d/ssmincidents_replication_set.html.markdown new file mode 100644 index 00000000000..491e3cb9246 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssmincidents_replication_set.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "SSM Incident Manager Incidents" +layout: "aws" +page_title: "AWS: aws_ssmincidents_replication_set" +description: |- + Terraform data source for managing an incident replication set in AWS Systems Manager Incident Manager. +--- + + + + +# Data Source: aws_ssmincidents_replication_set + +~> **NOTE:** The AWS Region specified by a Terraform provider must always be one of the Regions specified for the replication set. + +Use this Terraform data source to manage a replication set in AWS Systems Manager Incident Manager. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmincidentsReplicationSet } from "./.gen/providers/aws/data-aws-ssmincidents-replication-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSsmincidentsReplicationSet(this, "example", {}); + } +} + +``` + +## Argument Reference + +No arguments are required. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resouce Name (ARN) of the replication set. +* `tags` - All tags applied to the replication set. +* `createdBy` - The ARN of the user who created the replication set. +* `deletionProtected` - If `true`, the last remaining Region in a replication set can’t be deleted. +* `lastModifiedBy` - The ARN of the user who last modified the replication set. +* `status` - The overall status of a replication set. + * Valid Values: `active` | `creating` | `updating` | `deleting` | `failed` + +The `region` configuration block exports the following attributes for each Region: + +* `name` - The name of the Region. +* `kmsKeyArn` - The ARN of the AWS Key Management Service (AWS KMS) encryption key. +* `status` - The current status of the Region. + * Valid Values: `active` | `creating` | `updating` | `deleting` | `failed` +* `statusMessage` - More information about the status of a Region. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssmincidents_response_plan.html.markdown b/website/docs/cdktf/typescript/d/ssmincidents_response_plan.html.markdown new file mode 100644 index 00000000000..2b619e0f887 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssmincidents_response_plan.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "SSM Incident Manager Incidents" +layout: "aws" +page_title: "AWS: aws_ssmincidents_response_plan" +description: |- + Terraform data source for managing a response plan in AWS Systems Manager Incident Manager. +--- + + + +# Data Source: aws_ssmincidents_response_plan + +Use this Terraform data source to manage a response plan in AWS Systems Manager Incident Manager. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmincidentsResponsePlan } from "./.gen/providers/aws/ssmincidents-response-plan"; +interface MyConfig { + incidentTemplate: any; + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new SsmincidentsResponsePlan(this, "example", { + arn: "exampleARN", + incidentTemplate: config.incidentTemplate, + name: config.name, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the response plan. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` - The name of the response plan. +* `tags` - The tags applied to the response plan. +* `displayName` - The long format of the response plan name. This field can contain spaces. +* `chatChannel` - The Chatbot chat channel used for collaboration during an incident. +* `engagements` - The Amazon Resource Name (ARN) for the contacts and escalation plans that the response plan engages during an incident. + +The `incidentTemplate` configuration block exports the following attributes: + +* `title` - The title of a generated incident. +* `impact` - The impact value of a generated incident. The following values are supported: + * `1` - Severe Impact + * `2` - High Impact + * `3` - Medium Impact + * `4` - Low Impact + * `5` - No Impact +* `dedupeString` - A string used to stop Incident Manager from creating multiple incident records for the same incident. +* `incidentTags` - The tags assigned to an incident template. When an incident starts, Incident Manager assigns the tags specified in the template to the incident. +* `summary` - The summary of an incident. +* `notificationTarget` - The Amazon Simple Notification Service (Amazon SNS) targets that this incident notifies when it is updated. The `notificationTarget` configuration block supports the following argument: + * `snsTopicArn` - The ARN of the Amazon SNS topic. + +The `action` configuration block exports the following attributes: + +* `action` - (Optional) The actions that the response plan starts at the beginning of an incident. + * `ssmAutomation` - The Systems Manager automation document to start as the runbook at the beginning of the incident. The following values are supported: + * `documentName` - The automation document's name. + * `roleArn` - The Amazon Resource Name (ARN) of the role that the automation document assumes when it runs commands. + * `documentVersion` - The version of the automation document to use at runtime. + * `targetAccount` - The account that runs the automation document. This can be in either the management account or an application account. + * `parameter` - The key-value pair parameters used when the automation document runs. The following values are supported: + * `name` - The name of parameter. + * `values` - The values for the associated parameter name. + * `dynamicParameters` - The key-value pair used to resolve dynamic parameter values when processing a Systems Manager Automation runbook. + +The `integration` configuration block exports the following attributes: + +* `integration` - Information about third-party services integrated into the response plan. The following values are supported: + * `pagerduty` - Details about the PagerDuty configuration for a response plan. The following values are supported: + * `name` - The name of the PagerDuty configuration. + * `serviceId` - The ID of the PagerDuty service that the response plan associates with an incident when it launches. + * `secretId` - The ID of the AWS Secrets Manager secret that stores your PagerDuty key — either a General Access REST API Key or User Token REST API Key — and other user credentials. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssoadmin_instances.html.markdown b/website/docs/cdktf/typescript/d/ssoadmin_instances.html.markdown new file mode 100644 index 00000000000..c57a7dd892c --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssoadmin_instances.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_instances" +description: |- + Get information on SSO Instances. +--- + + + +# Data Source: aws_ssoadmin_instances + +Use this data source to get ARNs and Identity Store IDs of Single Sign-On (SSO) Instances. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, Fn, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsoadminInstances } from "./.gen/providers/aws/data-aws-ssoadmin-instances"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsSsoadminInstances(this, "example", {}); + new TerraformOutput(this, "arn", { + value: propertyAccess(Fn.tolist(example.arns), ["0"]), + }); + new TerraformOutput(this, "identity_store_id", { + value: propertyAccess(Fn.tolist(example.identityStoreIds), ["0"]), + }); + } +} + +``` + +## Argument Reference + +There are no arguments available for this data source. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arns` - Set of Amazon Resource Names (ARNs) of the SSO Instances. +* `id` - AWS Region. +* `identityStoreIds` - Set of identifiers of the identity stores connected to the SSO Instances. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/ssoadmin_permission_set.html.markdown b/website/docs/cdktf/typescript/d/ssoadmin_permission_set.html.markdown new file mode 100644 index 00000000000..c473eae0da2 --- /dev/null +++ b/website/docs/cdktf/typescript/d/ssoadmin_permission_set.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_permission_set" +description: |- + Get information on a Single Sign-On (SSO) Permission Set. +--- + + + +# Data Source: aws_ssoadmin_permission_set + +Use this data source to get a Single Sign-On (SSO) Permission Set. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Fn, + propertyAccess, + Token, + TerraformOutput, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsoadminInstances } from "./.gen/providers/aws/data-aws-ssoadmin-instances"; +import { DataAwsSsoadminPermissionSet } from "./.gen/providers/aws/data-aws-ssoadmin-permission-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsSsoadminInstances(this, "example", {}); + const dataAwsSsoadminPermissionSetExample = + new DataAwsSsoadminPermissionSet(this, "example_1", { + instanceArn: Token.asString( + propertyAccess(Fn.tolist(example.arns), ["0"]) + ), + name: "Example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsSsoadminPermissionSetExample.overrideLogicalId("example"); + new TerraformOutput(this, "arn", { + value: dataAwsSsoadminPermissionSetExample.arn, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +~> **NOTE:** Either `arn` or `name` must be configured. + +* `arn` - (Optional) ARN of the permission set. +* `instanceArn` - (Required) ARN of the SSO Instance associated with the permission set. +* `name` - (Optional) Name of the SSO Permission Set. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ARN of the Permission Set. +* `description` - Description of the Permission Set. +* `relayState` - Relay state URL used to redirect users within the application during the federation authentication process. +* `sessionDuration` - Length of time that the application user sessions are valid in the ISO-8601 standard. +* `tags` - Key-value map of resource tags. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/storagegateway_local_disk.html.markdown b/website/docs/cdktf/typescript/d/storagegateway_local_disk.html.markdown new file mode 100644 index 00000000000..7f28b4da051 --- /dev/null +++ b/website/docs/cdktf/typescript/d/storagegateway_local_disk.html.markdown @@ -0,0 +1,51 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_local_disk" +description: |- + Retrieve information about a Storage Gateway local disk +--- + + + +# Data Source: aws_storagegateway_local_disk + +Retrieve information about a Storage Gateway local disk. The disk identifier is useful for adding the disk as a cache or upload buffer to a gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsStoragegatewayLocalDisk } from "./.gen/providers/aws/data-aws-storagegateway-local-disk"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsStoragegatewayLocalDisk(this, "test", { + diskPath: Token.asString(awsVolumeAttachmentTest.deviceName), + gatewayArn: Token.asString(awsStoragegatewayGatewayTest.arn), + }); + } +} + +``` + +## Argument Reference + +* `gatewayArn` - (Required) ARN of the gateway. +* `diskNode` - (Optional) Device node of the local disk to retrieve. For example, `/dev/sdb`. +* `diskPath` - (Optional) Device path of the local disk to retrieve. For example, `/dev/xvdb` or `/dev/nvme1N1`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `diskId` - Disk identifierE.g., `pci0000:03:000Scsi0:0:0:0` +* `id` - Disk identifierE.g., `pci0000:03:000Scsi0:0:0:0` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/subnet.html.markdown b/website/docs/cdktf/typescript/d/subnet.html.markdown new file mode 100644 index 00000000000..75895980b84 --- /dev/null +++ b/website/docs/cdktf/typescript/d/subnet.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_subnet" +description: |- + Provides details about a specific VPC subnet +--- + + + +# Data Source: aws_subnet + +`awsSubnet` provides details about a specific VPC subnet. + +This resource can prove useful when a module accepts a subnet ID as an input variable and needs to, for example, determine the ID of the VPC that the subnet belongs to. + +## Example Usage + +The following example shows how one might accept a subnet ID as a variable and use this data source to obtain the data necessary to create a security group that allows connections from hosts in that subnet. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformVariable, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSubnet } from "./.gen/providers/aws/data-aws-subnet"; +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const subnetId = new TerraformVariable(this, "subnet_id", {}); + const selected = new DataAwsSubnet(this, "selected", { + id: subnetId.stringValue, + }); + new SecurityGroup(this, "subnet", { + ingress: [ + { + cidrBlocks: [Token.asString(selected.cidrBlock)], + fromPort: 80, + protocol: "tcp", + toPort: 80, + }, + ], + vpcId: Token.asString(selected.vpcId), + }); + } +} + +``` + +### Filter Example + +If you want to match against tag `name`, use: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSubnet } from "./.gen/providers/aws/data-aws-subnet"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSubnet(this, "selected", { + filter: [ + { + name: "tag:Name", + values: ["yakdriver"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available subnets in the current region. The given filters must match exactly one subnet whose data will be exported as attributes. + +The following arguments are optional: + +* `availabilityZone` - (Optional) Availability zone where the subnet must reside. +* `availabilityZoneId` - (Optional) ID of the Availability Zone for the subnet. This argument is not supported in all regions or partitions. If necessary, use `availabilityZone` instead. +* `cidrBlock` - (Optional) CIDR block of the desired subnet. +* `defaultForAz` - (Optional) Whether the desired subnet must be the default subnet for its associated availability zone. +* `filter` - (Optional) Configuration block. Detailed below. +* `id` - (Optional) ID of the specific subnet to retrieve. +* `ipv6CidrBlock` - (Optional) IPv6 CIDR block of the desired subnet. +* `state` - (Optional) State that the desired subnet must have. +* `tags` - (Optional) Map of tags, each pair of which must exactly match a pair on the desired subnet. +* `vpcId` - (Optional) ID of the VPC that the desired subnet belongs to. + +### filter + +This block allows for complex filters. You can use one or more `filter` blocks. + +The following arguments are required: + +* `name` - (Required) Name of the field to filter by, as defined by [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSubnets.html). +* `values` - (Required) Set of values that are accepted for the given field. A subnet will be selected if any one of the given values matches. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the subnet. +* `assignIpv6AddressOnCreation` - Whether an IPv6 address is assigned on creation. +* `availableIpAddressCount` - Available IP addresses of the subnet. +* `customerOwnedIpv4Pool` - Identifier of customer owned IPv4 address pool. +* `enableDns64` - Whether DNS queries made to the Amazon-provided DNS Resolver in this subnet return synthetic IPv6 addresses for IPv4-only destinations. +* `enableLniAtDeviceIndex` - Indicates the device position for local network interfaces in this subnet. For example, 1 indicates local network interfaces in this subnet are the secondary network interface (eth1). A local network interface cannot be the primary network interface (eth0). +* `enableResourceNameDnsAaaaRecordOnLaunch` - Indicates whether to respond to DNS queries for instance hostnames with DNS AAAA records. +* `enableResourceNameDnsARecordOnLaunch` - Indicates whether to respond to DNS queries for instance hostnames with DNS A records. +* `ipv6CidrBlockAssociationId` - Association ID of the IPv6 CIDR block. +* `ipv6Native` - Whether this is an IPv6-only subnet. +* `mapCustomerOwnedIpOnLaunch` - Whether customer owned IP addresses are assigned on network interface creation. +* `mapPublicIpOnLaunch` - Whether public IP addresses are assigned on instance launch. +* `outpostArn` - ARN of the Outpost. +* `ownerId` - ID of the AWS account that owns the subnet. +* `privateDnsHostnameTypeOnLaunch` - The type of hostnames assigned to instances in the subnet at launch. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/subnets.html.markdown b/website/docs/cdktf/typescript/d/subnets.html.markdown new file mode 100644 index 00000000000..60891d6783c --- /dev/null +++ b/website/docs/cdktf/typescript/d/subnets.html.markdown @@ -0,0 +1,167 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_subnets" +description: |- + Get information about a set of subnets. +--- + + + +# Data Source: aws_subnets + +This resource can be useful for getting back a set of subnet IDs. + +## Example Usage + +The following shows outputting all CIDR blocks for every subnet ID in a VPC. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Fn, + Token, + TerraformIterator, + TerraformOutput, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSubnet } from "./.gen/providers/aws/data-aws-subnet"; +import { DataAwsSubnets } from "./.gen/providers/aws/data-aws-subnets"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsSubnets(this, "example", { + filter: [ + { + name: "vpc-id", + values: [vpcId.stringValue], + }, + ], + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleForEachIterator = TerraformIterator.fromList( + Token.asAny(Fn.toset(example.ids)) + ); + const dataAwsSubnetExample = new DataAwsSubnet(this, "example_1", { + id: Token.asString(exampleForEachIterator.value), + forEach: exampleForEachIterator, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsSubnetExample.overrideLogicalId("example"); + new TerraformOutput(this, "subnet_cidr_blocks", { + value: + "${[ for s in ${" + dataAwsSubnetExample.fqn + "} : s.cidr_block]}", + }); + } +} + +``` + +The following example retrieves a set of all subnets in a VPC with a custom +tag of `tier` set to a value of "Private" so that the `awsInstance` resource +can loop through the subnets, putting instances across availability zones. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformIterator, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSubnets } from "./.gen/providers/aws/data-aws-subnets"; +import { Instance } from "./.gen/providers/aws/instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const private = new DataAwsSubnets(this, "private", { + filter: [ + { + name: "vpc-id", + values: [vpcId.stringValue], + }, + ], + tags: { + Tier: "Private", + }, + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const appForEachIterator = TerraformIterator.fromList( + Token.asAny(Fn.toset(private.ids)) + ); + new Instance(this, "app", { + ami: ami.stringValue, + instanceType: "t2.micro", + subnetId: Token.asString(appForEachIterator.value), + forEach: appForEachIterator, + }); + } +} + +``` + +## Argument Reference + +* `filter` - (Optional) Custom filter block as described below. +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired subnets. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSubnets.html). + For example, if matching against tag `name`, use: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSubnets } from "./.gen/providers/aws/data-aws-subnets"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsSubnets(this, "selected", { + filter: [ + { + name: "tag:Name", + values: [""], + }, + ], + }); + } +} + +``` + +* `values` - (Required) Set of values that are accepted for the given field. + Subnet IDs will be selected if any one of the given values match. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `ids` - List of all the subnet ids found. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/transfer_server.html.markdown b/website/docs/cdktf/typescript/d/transfer_server.html.markdown new file mode 100644 index 00000000000..0ef831a7ebe --- /dev/null +++ b/website/docs/cdktf/typescript/d/transfer_server.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_server" +description: |- + Get information on an AWS Transfer Server resource +--- + + + +# Data Source: aws_transfer_server + +Use this data source to get the ARN of an AWS Transfer Server for use in other +resources. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsTransferServer } from "./.gen/providers/aws/data-aws-transfer-server"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsTransferServer(this, "example", { + serverId: "s-1234567", + }); + } +} + +``` + +## Argument Reference + +* `serverId` - (Required) ID for an SFTP server. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of Transfer Server. +* `certificate` - ARN of any certificate. +* `domain` - The domain of the storage system that is used for file transfers. +* `endpoint` - Endpoint of the Transfer Server (e.g., `s12345678ServerTransferRegionAmazonawsCom`). +* `endpointType` - Type of endpoint that the server is connected to. +* `identityProviderType` - The mode of authentication enabled for this service. The default value is `serviceManaged`, which allows you to store and access SFTP user credentials within the service. `apiGateway` indicates that user authentication requires a call to an API Gateway endpoint URL provided by you to integrate an identity provider of your choice. +* `invocationRole` - ARN of the IAM role used to authenticate the user account with an `identityProviderType` of `apiGateway`. +* `loggingRole` - ARN of an IAM role that allows the service to write your SFTP users’ activity to your Amazon CloudWatch logs for monitoring and auditing purposes. +* `protocols` - File transfer protocol or protocols over which your file transfer protocol client can connect to your server's endpoint. +* `securityPolicyName` - The name of the security policy that is attached to the server. +* `structuredLoggingDestinations` - A set of ARNs of destinations that will receive structured logs from the transfer server such as CloudWatch Log Group ARNs. +* `url` - URL of the service endpoint used to authenticate users with an `identityProviderType` of `apiGateway`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc.html.markdown b/website/docs/cdktf/typescript/d/vpc.html.markdown index 9fc784e05b6..18701754cc8 100644 --- a/website/docs/cdktf/typescript/d/vpc.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc.html.markdown @@ -85,7 +85,7 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A VPC will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` blocks are also exported as result attributes. This data source will complete the data by populating @@ -117,4 +117,4 @@ The following attribute is additionally exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc_dhcp_options.html.markdown b/website/docs/cdktf/typescript/d/vpc_dhcp_options.html.markdown index ee266a73263..b54b77376a0 100644 --- a/website/docs/cdktf/typescript/d/vpc_dhcp_options.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc_dhcp_options.html.markdown @@ -79,7 +79,9 @@ For more information about filtering, see the [EC2 API documentation](https://do * `name` - (Required) Name of the field to filter. * `values` - (Required) Set of values for filtering. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the DHCP Options Set. * `dhcpOptionsId` - EC2 DHCP Options ID @@ -98,4 +100,4 @@ For more information about filtering, see the [EC2 API documentation](https://do - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc_endpoint.html.markdown b/website/docs/cdktf/typescript/d/vpc_endpoint.html.markdown index d0e16aed077..4da2d941231 100644 --- a/website/docs/cdktf/typescript/d/vpc_endpoint.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc_endpoint.html.markdown @@ -62,13 +62,14 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A VPC Endpoint will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference In addition to all arguments above except `filter`, the following attributes are exported: * `arn` - ARN of the VPC endpoint. * `cidrBlocks` - List of CIDR blocks for the exposed AWS service. Applicable for endpoints of type `gateway`. -* `dnsEntry` - DNS entries for the VPC Endpoint. Applicable for endpoints of type `interface`. DNS blocks are documented below. +* `dnsEntry` - DNS entries for the VPC Endpoint. Applicable for endpoints of type `interface`. [DNS entry blocks are documented below](#dns_entry-block). +* `dnsOptions` - DNS options for the VPC Endpoint. [DNS options blocks are documented below](#dns_options-block). * `networkInterfaceIds` - One or more network interfaces for the VPC Endpoint. Applicable for endpoints of type `interface`. * `ownerId` - ID of the AWS account that owns the VPC endpoint. * `policy` - Policy document associated with the VPC Endpoint. Applicable for endpoints of type `gateway`. @@ -80,15 +81,24 @@ In addition to all arguments above except `filter`, the following attributes are * `subnetIds` - One or more subnets in which the VPC Endpoint is located. Applicable for endpoints of type `interface`. * `vpcEndpointType` - VPC Endpoint type, `gateway` or `interface`. +### `dnsEntry` Block + DNS blocks (for `dnsEntry`) support the following attributes: * `dnsName` - DNS name. * `hostedZoneId` - ID of the private hosted zone. +### `dnsOptions` Block + +DNS options (for `dnsOptions`) support the following attributes: + +* `dnsRecordIpType` - The DNS records created for the endpoint. +* `privateDnsOnlyForInboundResolverEndpoint` - Indicates whether to enable private DNS only for inbound endpoints. + ## Timeouts [Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc_endpoint_service.html.markdown b/website/docs/cdktf/typescript/d/vpc_endpoint_service.html.markdown index 21b644362f5..0a0a0eb47a3 100644 --- a/website/docs/cdktf/typescript/d/vpc_endpoint_service.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc_endpoint_service.html.markdown @@ -111,14 +111,14 @@ The given filters must match exactly one VPC endpoint service whose data will be ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the [EC2 DescribeVpcEndpointServices API Reference](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVpcEndpointServices.html). * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `acceptanceRequired` - Whether or not VPC endpoint connection requests to the service must be accepted by the service owner - `true` or `false`. * `arn` - ARN of the VPC endpoint service. @@ -138,4 +138,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc_ipam_pool.html.markdown b/website/docs/cdktf/typescript/d/vpc_ipam_pool.html.markdown index 47531a1217c..8829e7b9ca9 100644 --- a/website/docs/cdktf/typescript/d/vpc_ipam_pool.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc_ipam_pool.html.markdown @@ -72,7 +72,7 @@ VPC whose data will be exported as attributes. * `name` - (Required) The name of the filter. Filter names are case-sensitive. * `values` - (Required) The filter values. Filter values are case-sensitive. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` blocks are also exported as result attributes. This data source will complete the data by populating @@ -103,4 +103,4 @@ The following attribute is additionally exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc_ipam_pool_cidrs.html.markdown b/website/docs/cdktf/typescript/d/vpc_ipam_pool_cidrs.html.markdown index e94b15ddbeb..0d336d3b0b3 100644 --- a/website/docs/cdktf/typescript/d/vpc_ipam_pool_cidrs.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc_ipam_pool_cidrs.html.markdown @@ -109,7 +109,7 @@ VPC whose data will be exported as attributes. * `ipamPoolId` - ID of the IPAM pool you would like the list of provisioned CIDRs. * `filter` - Custom filter block as described below. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` blocks are also exported as result attributes. This data source will complete the data by populating @@ -131,4 +131,4 @@ The following attribute is additionally exported: - `read` - (Default `1M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc_ipam_pools.html.markdown b/website/docs/cdktf/typescript/d/vpc_ipam_pools.html.markdown index 972bdd96dea..fab390f87cf 100644 --- a/website/docs/cdktf/typescript/d/vpc_ipam_pools.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc_ipam_pools.html.markdown @@ -59,9 +59,9 @@ IPAM Pools in the current region. * `name` - (Required) The name of the filter. Filter names are case-sensitive. * `values` - (Required) The filter values. Filter values are case-sensitive. -## Attributes Reference +## Attribute Reference -In addition to all of the arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `ipamPools` - List of IPAM pools and their attributes. See below for details @@ -85,4 +85,4 @@ The following attributes are available on each pool entry found. * `sourceIpamPoolId` - ID of the source IPAM pool. * `tags` - Map of tags to assigned to the resource. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc_ipam_preview_next_cidr.html.markdown b/website/docs/cdktf/typescript/d/vpc_ipam_preview_next_cidr.html.markdown index 0a5cc7761ec..5558a12c2ce 100644 --- a/website/docs/cdktf/typescript/d/vpc_ipam_preview_next_cidr.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc_ipam_preview_next_cidr.html.markdown @@ -56,15 +56,15 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This data source supports the following arguments: * `disallowedCidrs` - (Optional) Exclude a particular CIDR range from being returned by the pool. * `ipamPoolId` - (Required) ID of the pool to which you want to assign a CIDR. * `netmaskLength` - (Optional) Netmask length of the CIDR you would like to preview from the IPAM pool. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `cidr` - Previewed CIDR from the pool. * `id` - ID of the preview. @@ -75,4 +75,4 @@ In addition to all arguments above, the following attributes are exported: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc_peering_connection.html.markdown b/website/docs/cdktf/typescript/d/vpc_peering_connection.html.markdown index f34b83f8479..1ce9bccb367 100644 --- a/website/docs/cdktf/typescript/d/vpc_peering_connection.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc_peering_connection.html.markdown @@ -85,7 +85,7 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A VPC Peering Connection will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` are also exported as result attributes. @@ -99,12 +99,12 @@ All of the argument attributes except `filter` are also exported as result attri * `requester` - Configuration block that describes [VPC Peering Connection] (https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the requester VPC. -#### Accepter and Requester Attributes Reference +#### Accepter and Requester Attribute Reference * `allowRemoteVpcDnsResolution` - Indicates whether a local VPC can resolve public DNS hostnames to private IP addresses when queried from instances in a peer VPC. -#### CIDR block set Attributes Reference +#### CIDR block set Attribute Reference * `cidrBlock` - CIDR block associated to the VPC of the specific VPC Peering Connection. @@ -114,4 +114,4 @@ private IP addresses when queried from instances in a peer VPC. - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc_peering_connections.html.markdown b/website/docs/cdktf/typescript/d/vpc_peering_connections.html.markdown index 0885af8b340..ef796365949 100644 --- a/website/docs/cdktf/typescript/d/vpc_peering_connections.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc_peering_connections.html.markdown @@ -77,7 +77,7 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A VPC Peering Connection will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference All of the argument attributes except `filter` are also exported as result attributes. @@ -90,4 +90,4 @@ All of the argument attributes except `filter` are also exported as result attri - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc_security_group_rule.html.markdown b/website/docs/cdktf/typescript/d/vpc_security_group_rule.html.markdown index f8bed9ea1f5..2a0943aea8f 100644 --- a/website/docs/cdktf/typescript/d/vpc_security_group_rule.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc_security_group_rule.html.markdown @@ -45,14 +45,14 @@ whose data will be exported as attributes. ### filter Configuration Block -The following arguments are supported by the `filter` configuration block: +The `filter` configuration block supports the following arguments: * `name` - (Required) Name of the filter field. Valid values can be found in the EC2 [`describeSecurityGroupRules`](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSecurityGroupRules.html) API Reference. * `values` - (Required) Set of values that are accepted for the given filter field. Results will be selected if any given value matches. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of the security group rule. * `cidrIpv4` - The destination IPv4 CIDR range. @@ -67,4 +67,4 @@ In addition to all arguments above, the following attributes are exported: * `tags` - A map of tags assigned to the resource. * `toPort` - (Optional) The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpc_security_group_rules.html.markdown b/website/docs/cdktf/typescript/d/vpc_security_group_rules.html.markdown index 2c4ea90d1b7..2f190e906ed 100644 --- a/website/docs/cdktf/typescript/d/vpc_security_group_rules.html.markdown +++ b/website/docs/cdktf/typescript/d/vpc_security_group_rules.html.markdown @@ -53,8 +53,10 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. Security group rule IDs will be selected if any one of the given values match. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `ids` - List of all the security group rule IDs found. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpclattice_auth_policy.html.markdown b/website/docs/cdktf/typescript/d/vpclattice_auth_policy.html.markdown index 18289319013..11e643c83d5 100644 --- a/website/docs/cdktf/typescript/d/vpclattice_auth_policy.html.markdown +++ b/website/docs/cdktf/typescript/d/vpclattice_auth_policy.html.markdown @@ -44,11 +44,11 @@ The following arguments are required: * `resourceIdentifier` - (Required) The ID or Amazon Resource Name (ARN) of the service network or service for which the policy is created. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `policy` - The auth policy. The policy string in JSON must not contain newlines or blank lines. * `state` - The state of the auth policy. The auth policy is only active when the auth type is set to AWS_IAM. If you provide a policy, then authentication and authorization decisions are made based on this policy and the client's IAM policy. If the Auth type is NONE, then, any auth policy you provide will remain inactive. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpclattice_listener.html.markdown b/website/docs/cdktf/typescript/d/vpclattice_listener.html.markdown index 4fe35677079..bde9642c442 100644 --- a/website/docs/cdktf/typescript/d/vpclattice_listener.html.markdown +++ b/website/docs/cdktf/typescript/d/vpclattice_listener.html.markdown @@ -48,9 +48,9 @@ The following arguments are required: * `serviceIdentifier` - (Required) ID or Amazon Resource Name (ARN) of the service network * `listenerIdentifier` - (Required) ID or Amazon Resource Name (ARN) of the listener -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the listener. * `createdAt` - The date and time that the listener was created. @@ -64,4 +64,4 @@ In addition to all arguments above, the following attributes are exported: * `serviceId` - The ID of the service. * `tags` - List of tags associated with the listener. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpclattice_resource_policy.html.markdown b/website/docs/cdktf/typescript/d/vpclattice_resource_policy.html.markdown index 75623336557..b8cca63e4a1 100644 --- a/website/docs/cdktf/typescript/d/vpclattice_resource_policy.html.markdown +++ b/website/docs/cdktf/typescript/d/vpclattice_resource_policy.html.markdown @@ -42,10 +42,10 @@ The following arguments are required: * `resourceArn` - (Required) Resource ARN of the resource for which a policy is retrieved. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `policy` - JSON-encoded string representation of the applied resource policy. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpclattice_service.html.markdown b/website/docs/cdktf/typescript/d/vpclattice_service.html.markdown index 028e0d423f5..8fafaa4008b 100644 --- a/website/docs/cdktf/typescript/d/vpclattice_service.html.markdown +++ b/website/docs/cdktf/typescript/d/vpclattice_service.html.markdown @@ -45,9 +45,9 @@ The following arguments are required: * `serviceIdentifier` - (Required) ID or Amazon Resource Name (ARN) of the service network -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the service. * `authType` - Type of IAM policy. Either `none` or `awsIam`. @@ -58,4 +58,4 @@ In addition to all arguments above, the following attributes are exported: * `status` - Status of the service. * `tags` - List of tags associated with the service. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpclattice_service_network.html.markdown b/website/docs/cdktf/typescript/d/vpclattice_service_network.html.markdown index e2372705a8d..0034cb9830c 100644 --- a/website/docs/cdktf/typescript/d/vpclattice_service_network.html.markdown +++ b/website/docs/cdktf/typescript/d/vpclattice_service_network.html.markdown @@ -42,9 +42,9 @@ The following arguments are required: * `serviceNetworkIdentifier` - (Required) Identifier of the network service. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This data source exports the following attributes in addition to the arguments above: * `arn` - ARN of the Service Network. * `authType` - Authentication type for the service network. Either `none` or `awsIam`. @@ -55,4 +55,4 @@ In addition to all arguments above, the following attributes are exported: * `numberOfAssociatedServices` - Number of services associated with this service network. * `numberOfAssociatedVpcs` - Number of VPCs associated with this service network. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpcs.html.markdown b/website/docs/cdktf/typescript/d/vpcs.html.markdown index 769918d06ca..81859f73246 100644 --- a/website/docs/cdktf/typescript/d/vpcs.html.markdown +++ b/website/docs/cdktf/typescript/d/vpcs.html.markdown @@ -122,7 +122,9 @@ which take the following arguments: * `values` - (Required) Set of values that are accepted for the given field. A VPC will be selected if any one of the given values matches. -## Attributes Reference +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: * `id` - AWS Region. * `ids` - List of all the VPC Ids found. @@ -133,4 +135,4 @@ which take the following arguments: - `read` - (Default `20M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/vpn_gateway.html.markdown b/website/docs/cdktf/typescript/d/vpn_gateway.html.markdown new file mode 100644 index 00000000000..84df9c99632 --- /dev/null +++ b/website/docs/cdktf/typescript/d/vpn_gateway.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_gateway" +description: |- + Provides details about a specific VPN gateway. +--- + + + +# Data Source: aws_vpn_gateway + +The VPN Gateway data source provides details about +a specific VPN gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsVpnGateway } from "./.gen/providers/aws/data-aws-vpn-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const selected = new DataAwsVpnGateway(this, "selected", { + filter: [ + { + name: "tag:Name", + values: ["vpn-gw"], + }, + ], + }); + new TerraformOutput(this, "vpn_gateway_id", { + value: selected.id, + }); + } +} + +``` + +## Argument Reference + +The arguments of this data source act as filters for querying the available VPN gateways. +The given filters must match exactly one VPN gateway whose data will be exported as attributes. + +* `id` - (Optional) ID of the specific VPN Gateway to retrieve. + +* `state` - (Optional) State of the specific VPN Gateway to retrieve. + +* `availabilityZone` - (Optional) Availability Zone of the specific VPN Gateway to retrieve. + +* `attachedVpcId` - (Optional) ID of a VPC attached to the specific VPN Gateway to retrieve. + +* `filter` - (Optional) Custom filter block as described below. + +* `tags` - (Optional) Map of tags, each pair of which must exactly match + a pair on the desired VPN Gateway. + +* `amazonSideAsn` - (Optional) Autonomous System Number (ASN) for the Amazon side of the specific VPN Gateway to retrieve. + +More complex filters can be expressed using one or more `filter` sub-blocks, +which take the following arguments: + +* `name` - (Required) Name of the field to filter by, as defined by + [the underlying AWS API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVpnGateways.html). + +* `values` - (Required) Set of values that are accepted for the given field. + A VPN Gateway will be selected if any one of the given values matches. + +## Attribute Reference + +All of the argument attributes are also exported as result attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/waf_ipset.html.markdown b/website/docs/cdktf/typescript/d/waf_ipset.html.markdown new file mode 100644 index 00000000000..990f4e601fe --- /dev/null +++ b/website/docs/cdktf/typescript/d/waf_ipset.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_ipset" +description: |- + Retrieves an AWS WAF IP set id. +--- + + + +# Data Source: aws_waf_ipset + +`awsWafIpset` Retrieves a WAF IP Set Resource Id. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafIpset } from "./.gen/providers/aws/data-aws-waf-ipset"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafIpset(this, "example", { + name: "tfWAFIPSet", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF IP set. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF IP set. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/waf_rate_based_rule.html.markdown b/website/docs/cdktf/typescript/d/waf_rate_based_rule.html.markdown new file mode 100644 index 00000000000..0f08f7fcc6e --- /dev/null +++ b/website/docs/cdktf/typescript/d/waf_rate_based_rule.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_rate_based_rule" +description: |- + Retrieves an AWS WAF rate based rule id. +--- + + + +# Data Source: aws_waf_rate_based_rule + +`awsWafRateBasedRule` Retrieves a WAF Rate Based Rule Resource Id. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafRateBasedRule } from "./.gen/providers/aws/data-aws-waf-rate-based-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafRateBasedRule(this, "example", { + name: "tfWAFRateBasedRule", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF rate based rule. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF rate based rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/waf_rule.html.markdown b/website/docs/cdktf/typescript/d/waf_rule.html.markdown new file mode 100644 index 00000000000..72f214c9f6e --- /dev/null +++ b/website/docs/cdktf/typescript/d/waf_rule.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_rule" +description: |- + Retrieves an AWS WAF rule id. +--- + + + +# Data Source: aws_waf_rule + +`awsWafRule` Retrieves a WAF Rule Resource Id. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafRule } from "./.gen/providers/aws/data-aws-waf-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafRule(this, "example", { + name: "tfWAFRule", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF rule. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/waf_subscribed_rule_group.html.markdown b/website/docs/cdktf/typescript/d/waf_subscribed_rule_group.html.markdown new file mode 100644 index 00000000000..8d59441b8f6 --- /dev/null +++ b/website/docs/cdktf/typescript/d/waf_subscribed_rule_group.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_subscribed_rule_group" +description: |- + Retrieves information about a Managed WAF Rule Group from AWS Marketplace. +--- + + + +# Data Source: aws_waf_rule + +`awsWafSubscribedRuleGroup` retrieves information about a Managed WAF Rule Group from AWS Marketplace (needs to be subscribed to first). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafSubscribedRuleGroup } from "./.gen/providers/aws/data-aws-waf-subscribed-rule-group"; +import { WafWebAcl } from "./.gen/providers/aws/waf-web-acl"; +interface MyConfig { + defaultAction: any; + metricName: any; + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const byMetricName = new DataAwsWafSubscribedRuleGroup( + this, + "by_metric_name", + { + metricName: "F5BotDetectionSignatures", + } + ); + const byName = new DataAwsWafSubscribedRuleGroup(this, "by_name", { + name: "F5 Bot Detection Signatures For AWS WAF", + }); + new WafWebAcl(this, "acl", { + rules: [ + { + priority: 1, + ruleId: Token.asString(byName.id), + type: "GROUP", + }, + { + priority: 2, + ruleId: Token.asString(byMetricName.id), + type: "GROUP", + }, + ], + defaultAction: config.defaultAction, + metricName: config.metricName, + name: config.name, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: (at least one needs to be specified) + +* `name` - (Optional) Name of the WAF rule group. +* `metricName` - (Optional) Name of the WAF rule group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF rule group. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/waf_web_acl.html.markdown b/website/docs/cdktf/typescript/d/waf_web_acl.html.markdown new file mode 100644 index 00000000000..295b3c3c2a3 --- /dev/null +++ b/website/docs/cdktf/typescript/d/waf_web_acl.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_web_acl" +description: |- + Retrieves a WAF Web ACL id. +--- + + + +# Data Source: aws_waf_web_acl + +`awsWafWebAcl` Retrieves a WAF Web ACL Resource Id. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafWebAcl } from "./.gen/providers/aws/data-aws-waf-web-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafWebAcl(this, "example", { + name: "tfWAFWebACL", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF Web ACL. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF Web ACL. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/wafregional_ipset.html.markdown b/website/docs/cdktf/typescript/d/wafregional_ipset.html.markdown new file mode 100644 index 00000000000..0ae95c50ce5 --- /dev/null +++ b/website/docs/cdktf/typescript/d/wafregional_ipset.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_ipset" +description: |- + Retrieves an AWS WAF Regional IP set id. +--- + + + +# Data Source: aws_wafregional_ipset + +`awsWafregionalIpset` Retrieves a WAF Regional IP Set Resource Id. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafregionalIpset } from "./.gen/providers/aws/data-aws-wafregional-ipset"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafregionalIpset(this, "example", { + name: "tfWAFRegionalIPSet", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF Regional IP set. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF Regional IP set. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/wafregional_rate_based_rule.html.markdown b/website/docs/cdktf/typescript/d/wafregional_rate_based_rule.html.markdown new file mode 100644 index 00000000000..a048e1eecef --- /dev/null +++ b/website/docs/cdktf/typescript/d/wafregional_rate_based_rule.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_rate_based_rule" +description: |- + Retrieves an AWS WAF Regional rate based rule id. +--- + + + +# Data Source: aws_wafregional_rate_based_rule + +`awsWafregionalRateBasedRule` Retrieves a WAF Regional Rate Based Rule Resource Id. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafregionalRateBasedRule } from "./.gen/providers/aws/data-aws-wafregional-rate-based-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafregionalRateBasedRule(this, "example", { + name: "tfWAFRegionalRateBasedRule", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF Regional rate based rule. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF Regional rate based rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/wafregional_rule.html.markdown b/website/docs/cdktf/typescript/d/wafregional_rule.html.markdown new file mode 100644 index 00000000000..8590a94324d --- /dev/null +++ b/website/docs/cdktf/typescript/d/wafregional_rule.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_rule" +description: |- + Retrieves an AWS WAF Regional rule id. +--- + + + +# Data Source: aws_wafregional_rule + +`awsWafregionalRule` Retrieves a WAF Regional Rule Resource Id. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafregionalRule } from "./.gen/providers/aws/data-aws-wafregional-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafregionalRule(this, "example", { + name: "tfWAFRegionalRule", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF Regional rule. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF Regional rule. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/wafregional_subscribed_rule_group.html.markdown b/website/docs/cdktf/typescript/d/wafregional_subscribed_rule_group.html.markdown new file mode 100644 index 00000000000..32c9479487b --- /dev/null +++ b/website/docs/cdktf/typescript/d/wafregional_subscribed_rule_group.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_subscribed_rule_group" +description: |- + retrieves information about a Managed WAF Rule Group from AWS Marketplace for use in WAF Regional. +--- + + + +# Data Source: aws_wafregional_rule + +`awsWafregionalSubscribedRuleGroup` retrieves information about a Managed WAF Rule Group from AWS Marketplace for use in WAF Regional (needs to be subscribed to first). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafregionalSubscribedRuleGroup } from "./.gen/providers/aws/data-aws-wafregional-subscribed-rule-group"; +import { WafregionalWebAcl } from "./.gen/providers/aws/wafregional-web-acl"; +interface MyConfig { + defaultAction: any; + metricName: any; + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const byMetricName = new DataAwsWafregionalSubscribedRuleGroup( + this, + "by_metric_name", + { + metricName: "F5BotDetectionSignatures", + } + ); + const byName = new DataAwsWafregionalSubscribedRuleGroup(this, "by_name", { + name: "F5 Bot Detection Signatures For AWS WAF", + }); + new WafregionalWebAcl(this, "acl", { + rules: [ + { + priority: 1, + rule_id: byName.id, + type: "GROUP", + }, + { + priority: 2, + rule_id: byMetricName.id, + type: "GROUP", + }, + ], + defaultAction: config.defaultAction, + metricName: config.metricName, + name: config.name, + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: (at least one needs to be specified) + +* `name` - (Optional) Name of the WAF rule group. +* `metricName` - (Optional) Name of the WAF rule group. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF rule group. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/wafregional_web_acl.html.markdown b/website/docs/cdktf/typescript/d/wafregional_web_acl.html.markdown new file mode 100644 index 00000000000..45f1f5dc0d4 --- /dev/null +++ b/website/docs/cdktf/typescript/d/wafregional_web_acl.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_web_acl" +description: |- + Retrieves a WAF Regional Web ACL id. +--- + + + +# Data Source: aws_wafregional_web_acl + +`awsWafregionalWebAcl` Retrieves a WAF Regional Web ACL Resource Id. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafregionalWebAcl } from "./.gen/providers/aws/data-aws-wafregional-web-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafregionalWebAcl(this, "example", { + name: "tfWAFRegionalWebACL", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAF Regional Web ACL. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - ID of the WAF Regional Web ACL. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/wafv2_ip_set.html.markdown b/website/docs/cdktf/typescript/d/wafv2_ip_set.html.markdown new file mode 100644 index 00000000000..4df2809eeeb --- /dev/null +++ b/website/docs/cdktf/typescript/d/wafv2_ip_set.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_ip_set" +description: |- + Retrieves the summary of a WAFv2 IP Set. +--- + + + +# Data Source: aws_wafv2_ip_set + +Retrieves the summary of a WAFv2 IP Set. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafv2IpSet } from "./.gen/providers/aws/data-aws-wafv2-ip-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafv2IpSet(this, "example", { + name: "some-ip-set", + scope: "REGIONAL", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAFv2 IP Set. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `cloudfront` or `regional`. To work with CloudFront, you must also specify the region `usEast1` (N. Virginia) on the AWS provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `addresses` - An array of strings that specifies zero or more IP addresses or blocks of IP addresses in Classless Inter-Domain Routing (CIDR) notation. +* `arn` - ARN of the entity. +* `description` - Description of the set that helps with identification. +* `id` - Unique identifier for the set. +* `ipAddressVersion` - IP address version of the set. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/wafv2_regex_pattern_set.html.markdown b/website/docs/cdktf/typescript/d/wafv2_regex_pattern_set.html.markdown new file mode 100644 index 00000000000..ba765e26caf --- /dev/null +++ b/website/docs/cdktf/typescript/d/wafv2_regex_pattern_set.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_regex_pattern_set" +description: |- + Retrieves the summary of a WAFv2 Regex Pattern Set. +--- + + + +# Data Source: aws_wafv2_regex_pattern_set + +Retrieves the summary of a WAFv2 Regex Pattern Set. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafv2RegexPatternSet } from "./.gen/providers/aws/data-aws-wafv2-regex-pattern-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafv2RegexPatternSet(this, "example", { + name: "some-regex-pattern-set", + scope: "REGIONAL", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAFv2 Regex Pattern Set. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `cloudfront` or `regional`. To work with CloudFront, you must also specify the region `usEast1` (N. Virginia) on the AWS provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the entity. +* `description` - Description of the set that helps with identification. +* `id` - Unique identifier for the set. +* `regularExpression` - One or more blocks of regular expression patterns that AWS WAF is searching for. See [Regular Expression](#regular-expression) below for details. + +### Regular Expression + +Each `regularExpression` supports the following argument: + +* `regexString` - (Required) String representing the regular expression, see the AWS WAF [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-regex-pattern-set-creating.html) for more information. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/wafv2_rule_group.html.markdown b/website/docs/cdktf/typescript/d/wafv2_rule_group.html.markdown new file mode 100644 index 00000000000..2426e14a876 --- /dev/null +++ b/website/docs/cdktf/typescript/d/wafv2_rule_group.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_rule_group" +description: |- + Retrieves the summary of a WAFv2 Rule Group. +--- + + + +# Data Source: aws_wafv2_rule_group + +Retrieves the summary of a WAFv2 Rule Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafv2RuleGroup } from "./.gen/providers/aws/data-aws-wafv2-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafv2RuleGroup(this, "example", { + name: "some-rule-group", + scope: "REGIONAL", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAFv2 Rule Group. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `cloudfront` or `regional`. To work with CloudFront, you must also specify the region `usEast1` (N. Virginia) on the AWS provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the entity. +* `description` - Description of the rule group that helps with identification. +* `id` - Unique identifier of the rule group. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/wafv2_web_acl.html.markdown b/website/docs/cdktf/typescript/d/wafv2_web_acl.html.markdown new file mode 100644 index 00000000000..2a38b175802 --- /dev/null +++ b/website/docs/cdktf/typescript/d/wafv2_web_acl.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_web_acl" +description: |- + Retrieves the summary of a WAFv2 Web ACL. +--- + + + +# Data Source: aws_wafv2_web_acl + +Retrieves the summary of a WAFv2 Web ACL. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWafv2WebAcl } from "./.gen/providers/aws/data-aws-wafv2-web-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWafv2WebAcl(this, "example", { + name: "some-web-acl", + scope: "REGIONAL", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `name` - (Required) Name of the WAFv2 Web ACL. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `cloudfront` or `regional`. To work with CloudFront, you must also specify the region `usEast1` (N. Virginia) on the AWS provider. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the entity. +* `description` - Description of the WebACL that helps with identification. +* `id` - Unique identifier of the WebACL. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/workspaces_bundle.html.markdown b/website/docs/cdktf/typescript/d/workspaces_bundle.html.markdown new file mode 100644 index 00000000000..58d0eafa26b --- /dev/null +++ b/website/docs/cdktf/typescript/d/workspaces_bundle.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_bundle" +description: |- + Retrieve information about an AWS WorkSpaces bundle. +--- + + + +# Data Source: aws_workspaces_bundle + +Retrieve information about an AWS WorkSpaces bundle. + +## Example Usage + +### By ID + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWorkspacesBundle } from "./.gen/providers/aws/data-aws-workspaces-bundle"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWorkspacesBundle(this, "example", { + bundleId: "wsb-b0s22j3d7", + }); + } +} + +``` + +### By Owner & Name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWorkspacesBundle } from "./.gen/providers/aws/data-aws-workspaces-bundle"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWorkspacesBundle(this, "example", { + name: "Value with Windows 10 and Office 2016", + owner: "AMAZON", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bundleId` – (Optional) ID of the bundle. +* `owner` – (Optional) Owner of the bundles. You have to leave it blank for own bundles. You cannot combine this parameter with `bundleId`. +* `name` – (Optional) Name of the bundle. You cannot combine this parameter with `bundleId`. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `description` – The description of the bundle. +* `bundleId` – The ID of the bundle. +* `name` – The name of the bundle. +* `owner` – The owner of the bundle. +* `computeType` – The compute type. See supported fields below. +* `rootStorage` – The root volume. See supported fields below. +* `userStorage` – The user storage. See supported fields below. + +### `computeType` + +* `name` - Name of the compute type. + +### `rootStorage` + +* `capacity` - Size of the root volume. + +### `userStorage` + +* `capacity` - Size of the user storage. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/workspaces_directory.html.markdown b/website/docs/cdktf/typescript/d/workspaces_directory.html.markdown new file mode 100644 index 00000000000..956d9dc355f --- /dev/null +++ b/website/docs/cdktf/typescript/d/workspaces_directory.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_directory" +description: |- + Retrieve information about an AWS WorkSpaces directory. +--- + + + +# Data Source: aws_workspaces_directory + +Retrieve information about an AWS WorkSpaces directory. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWorkspacesDirectory } from "./.gen/providers/aws/data-aws-workspaces-directory"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWorkspacesDirectory(this, "example", { + directoryId: "d-9067783251", + }); + } +} + +``` + +## Argument Reference + +* `directoryId` - (Required) Directory identifier for registration in WorkSpaces service. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - WorkSpaces directory identifier. +* `alias` - Directory alias. +* `customerUserName` - User name for the service account. +* `directoryName` - Name of the directory. +* `directoryType` - Directory type. +* `dnsIpAddresses` - IP addresses of the DNS servers for the directory. +* `iamRoleId` - Identifier of the IAM role. This is the role that allows Amazon WorkSpaces to make calls to other services, such as Amazon EC2, on your behalf. +* `ipGroupIds` - Identifiers of the IP access control groups associated with the directory. +* `registrationCode` - Registration code for the directory. This is the code that users enter in their Amazon WorkSpaces client application to connect to the directory. +* `selfServicePermissions` – The permissions to enable or disable self-service capabilities. +* `subnetIds` - Identifiers of the subnets where the directory resides. +* `tags` – A map of tags assigned to the WorkSpaces directory. +* `workspaceCreationProperties` – The default properties that are used for creating WorkSpaces. Defined below. +* `workspaceAccessProperties` – (Optional) Specifies which devices and operating systems users can use to access their WorkSpaces. Defined below. +* `workspaceSecurityGroupId` - The identifier of the security group that is assigned to new WorkSpaces. Defined below. + +### self_service_permissions + +* `changeComputeType` – Whether WorkSpaces directory users can change the compute type (bundle) for their workspace. +* `increaseVolumeSize` – Whether WorkSpaces directory users can increase the volume size of the drives on their workspace. +* `rebuildWorkspace` – Whether WorkSpaces directory users can rebuild the operating system of a workspace to its original state. +* `restartWorkspace` – Whether WorkSpaces directory users can restart their workspace. +* `switchRunningMode` – Whether WorkSpaces directory users can switch the running mode of their workspace. + +### workspace_access_properties + +* `deviceTypeAndroid` – (Optional) Indicates whether users can use Android devices to access their WorkSpaces. +* `deviceTypeChromeos` – (Optional) Indicates whether users can use Chromebooks to access their WorkSpaces. +* `deviceTypeIos` – (Optional) Indicates whether users can use iOS devices to access their WorkSpaces. +* `deviceTypeLinux` – (Optional) Indicates whether users can use Linux clients to access their WorkSpaces. +* `deviceTypeOsx` – (Optional) Indicates whether users can use macOS clients to access their WorkSpaces. +* `deviceTypeWeb` – (Optional) Indicates whether users can access their WorkSpaces through a web browser. +* `deviceTypeWindows` – (Optional) Indicates whether users can use Windows clients to access their WorkSpaces. +* `deviceTypeZeroclient` – (Optional) Indicates whether users can use zero client devices to access their WorkSpaces. + +### workspace_creation_properties + +* `customSecurityGroupId` – The identifier of your custom security group. Should relate to the same VPC, where workspaces reside in. +* `defaultOu` – The default organizational unit (OU) for your WorkSpace directories. +* `enableInternetAccess` – Indicates whether internet access is enabled for your WorkSpaces. +* `enableMaintenanceMode` – Indicates whether maintenance mode is enabled for your WorkSpaces. For more information, see [WorkSpace Maintenance](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspace-maintenance.html). +* `userEnabledAsLocalAdministrator` – Indicates whether users are local administrators of their WorkSpaces. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/workspaces_image.html.markdown b/website/docs/cdktf/typescript/d/workspaces_image.html.markdown new file mode 100644 index 00000000000..377f68093cb --- /dev/null +++ b/website/docs/cdktf/typescript/d/workspaces_image.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_image" +description: |- + Get information about Workspaces image. +--- + + + +# Data Source: aws_workspaces_image + +Use this data source to get information about a Workspaces image. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWorkspacesImage } from "./.gen/providers/aws/data-aws-workspaces-image"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWorkspacesImage(this, "example", { + imageId: "wsi-ten5h0y19", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `imageId` – (Required) ID of the image. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `name` – The name of the image. +* `description` – The description of the image. +* `os` – The operating system that the image is running. +* `requiredTenancy` – Specifies whether the image is running on dedicated hardware. When Bring Your Own License (BYOL) is enabled, this value is set to DEDICATED. For more information, see [Bring Your Own Windows Desktop Images](https://docs.aws.amazon.com/workspaces/latest/adminguide/byol-windows-images.html). +* `state` – The status of the image. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/d/workspaces_workspace.html.markdown b/website/docs/cdktf/typescript/d/workspaces_workspace.html.markdown new file mode 100644 index 00000000000..0907dfefe9d --- /dev/null +++ b/website/docs/cdktf/typescript/d/workspaces_workspace.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_workspace" +description: |- + Get information about a WorkSpace in AWS Workspaces Service. +--- + + + +# Resource: aws_workspaces_workspace + +Use this data source to get information about a workspace in [AWS Workspaces](https://docs.aws.amazon.com/workspaces/latest/adminguide/amazon-workspaces.html) Service. + +## Example Usage + +### Filter By Workspace ID + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWorkspacesWorkspace } from "./.gen/providers/aws/data-aws-workspaces-workspace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWorkspacesWorkspace(this, "example", { + workspaceId: "ws-cj5xcxsz5", + }); + } +} + +``` + +### Filter By Directory ID & User Name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWorkspacesWorkspace } from "./.gen/providers/aws/data-aws-workspaces-workspace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataAwsWorkspacesWorkspace(this, "example", { + directoryId: "d-9967252f57", + userName: "Example", + }); + } +} + +``` + +## Argument Reference + +This data source supports the following arguments: + +* `bundleId` - (Optional) ID of the bundle for the WorkSpace. +* `directoryId` - (Optional) ID of the directory for the WorkSpace. You have to specify `userName` along with `directoryId`. You cannot combine this parameter with `workspaceId`. +* `rootVolumeEncryptionEnabled` - (Optional) Indicates whether the data stored on the root volume is encrypted. +* `tags` - (Optional) Tags for the WorkSpace. +* `userName` – (Optional) User name of the user for the WorkSpace. This user name must exist in the directory for the WorkSpace. You cannot combine this parameter with `workspaceId`. +* `userVolumeEncryptionEnabled` – (Optional) Indicates whether the data stored on the user volume is encrypted. +* `volumeEncryptionKey` – (Optional) Symmetric AWS KMS customer master key (CMK) used to encrypt data stored on your WorkSpace. Amazon WorkSpaces does not support asymmetric CMKs. +* `workspaceId` - (Optional) ID of the WorkSpace. You cannot combine this parameter with `directoryId`. +* `workspaceProperties` – (Optional) WorkSpace properties. + +`workspaceProperties` supports the following: + +* `computeTypeName` – (Optional) Compute type. For more information, see [Amazon WorkSpaces Bundles](http://aws.amazon.com/workspaces/details/#Amazon_WorkSpaces_Bundles). Valid values are `value`, `standard`, `performance`, `power`, `graphics`, `powerpro` and `graphicspro`. +* `rootVolumeSizeGib` – (Optional) Size of the root volume. +* `runningMode` – (Optional) Running mode. For more information, see [Manage the WorkSpace Running Mode](https://docs.aws.amazon.com/workspaces/latest/adminguide/running-mode.html). Valid values are `autoStop` and `alwaysOn`. +* `runningModeAutoStopTimeoutInMinutes` – (Optional) Time after a user logs off when WorkSpaces are automatically stopped. Configured in 60-minute intervals. +* `userVolumeSizeGib` – (Optional) Size of the user storage. + +## Attribute Reference + +This data source exports the following attributes in addition to the arguments above: + +* `id` - Workspaces ID. +* `ipAddress` - IP address of the WorkSpace. +* `computerName` - Name of the WorkSpace, as seen by the operating system. +* `state` - Operational state of the WorkSpace. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/index.html.markdown b/website/docs/cdktf/typescript/index.html.markdown new file mode 100644 index 00000000000..c3731f24448 --- /dev/null +++ b/website/docs/cdktf/typescript/index.html.markdown @@ -0,0 +1,823 @@ +--- +layout: "aws" +page_title: "Provider: AWS" +description: |- + Use the Amazon Web Services (AWS) provider to interact with the many resources supported by AWS. You must configure the provider with the proper credentials before you can use it. +--- + + + +# AWS Provider + +Use the Amazon Web Services (AWS) provider to interact with the +many resources supported by AWS. You must configure the provider +with the proper credentials before you can use it. + +Use the navigation to the left to read about the available resources. There are currently 1236 resources and 510 data sources available in the provider. + +To learn the basics of Terraform using this provider, follow the +hands-on [get started tutorials](https://learn.hashicorp.com/tutorials/terraform/infrastructure-as-code?in=terraform/aws-get-started&utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS). Interact with AWS services, +including Lambda, RDS, and IAM by following the [AWS services +tutorials](https://learn.hashicorp.com/collections/terraform/aws?utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS). + +## Example Usage + +Terraform 0.13 and later: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + region: "us-east-1", + }); + new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + }); + } +} + +``` + +Terraform 0.12 and earlier: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + region: "us-east-1", + }); + new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + }); + } +} + +``` + +## Authentication and Configuration + +Configuration for the AWS Provider can be derived from several sources, +which are applied in the following order: + +1. Parameters in the provider configuration +1. Environment variables +1. Shared credentials files +1. Shared configuration files +1. Container credentials +1. Instance profile credentials and region + +This order matches the precedence used by the +[AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-precedence) +and the [AWS SDKs](https://aws.amazon.com/tools/). + +The AWS Provider supports assuming an IAM role, either in +the provider configuration block parameter `assumeRole` +or in [a named profile](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html). + +The AWS Provider supports assuming an IAM role using [web identity federation and OpenID Connect (OIDC)](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html#cli-configure-role-oidc). +This can be configured either using environment variables or in a named profile. + +When using a named profile, the AWS Provider also supports [sourcing credentials from an external process](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sourcing-external.html). + +### Provider Configuration + +!> **Warning:** Hard-coded credentials are not recommended in any Terraform +configuration and risks secret leakage should this file ever be committed to a +public version control system. + +Credentials can be provided by adding an `accessKey`, `secretKey`, and optionally `token`, to the `aws` provider block. + +Usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + accessKey: "my-access-key", + region: "us-west-2", + secretKey: "my-secret-key", + }); + } +} + +``` + +Other settings related to authorization can be configured, such as: + +* `profile` +* `sharedConfigFiles` +* `sharedCredentialsFiles` + +### Environment Variables + +Credentials can be provided by using the `awsAccessKeyId`, `awsSecretAccessKey`, and optionally `awsSessionToken` environment variables. +The region can be set using the `awsRegion` or `awsDefaultRegion` environment variables. + +For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", {}); + } +} + +``` + +```console +% export AWS_ACCESS_KEY_ID="anaccesskey" +% export AWS_SECRET_ACCESS_KEY="asecretkey" +% export AWS_REGION="us-west-2" +% terraform plan +``` + +Other environment variables related to authorization are: + +* `awsProfile` +* `awsConfigFile` +* `awsSharedCredentialsFile` + +### Shared Configuration and Credentials Files + +The AWS Provider can source credentials and other settings from the [shared configuration and credentials files](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html). +By default, these files are located at `$home/Aws/config` and `$home/Aws/credentials` on Linux and macOS, +and `"%userprofile%\Aws\config"` and `"%userprofile%\Aws\credentials"` on Windows. + +If no named profile is specified, the `default` profile is used. +Use the `profile` parameter or `awsProfile` environment variable to specify a named profile. + +The locations of the shared configuration and credentials files can be configured using either +the parameters `sharedConfigFiles` and `sharedCredentialsFiles` +or the environment variables `awsConfigFile` and `awsSharedCredentialsFile`. + +For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + profile: "customprofile", + sharedConfigFiles: ["/Users/tf_user/.aws/conf"], + sharedCredentialsFiles: ["/Users/tf_user/.aws/creds"], + }); + } +} + +``` + +### Container Credentials + +If you're running Terraform on CodeBuild or ECS and have configured an [IAM Task Role](http://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html), Terraform can use the container's Task Role. This support is based on the underlying `awsContainerCredentialsRelativeUri` and `awsContainerCredentialsFullUri` environment variables being automatically set by those services or manually for advanced usage. + +If you're running Terraform on EKS and have configured [IAM Roles for Service Accounts (IRSA)](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html), Terraform can use the pod's role. This support is based on the underlying `awsRoleArn` and `awsWebIdentityTokenFile` environment variables being automatically set by Kubernetes or manually for advanced usage. + +### Instance profile credentials and region + +When the AWS Provider is running on an EC2 instance with an IAM Instance Profile set, +the provider can source credentials from the [EC2 Instance Metadata Service](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html#instance-metadata-security-credentials). +Both IMDS v1 and IMDS v2 are supported. + +A custom endpoint for the metadata service can be provided using the `ec2MetadataServiceEndpoint` parameter or the `awsEc2MetadataServiceEndpoint` environment variable. + +### Assuming an IAM Role + +If provided with a role ARN, the AWS Provider will attempt to assume this role +using the supplied credentials. + +Usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + assumeRole: [ + { + externalId: "EXTERNAL_ID", + roleArn: "arn:aws:iam::123456789012:role/ROLE_NAME", + sessionName: "SESSION_NAME", + }, + ], + }); + } +} + +``` + +> **Hands-on:** Try the [Use AssumeRole to Provision AWS Resources Across Accounts](https://learn.hashicorp.com/tutorials/terraform/aws-assumerole) tutorial. + +### Assuming an IAM Role Using A Web Identity + +If provided with a role ARN and a token from a web identity provider, +the AWS Provider will attempt to assume this role using the supplied credentials. + +Usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + assumeRoleWithWebIdentity: [ + { + roleArn: "arn:aws:iam::123456789012:role/ROLE_NAME", + sessionName: "SESSION_NAME", + webIdentityTokenFile: "/Users/tf_user/secrets/web-identity-token", + }, + ], + }); + } +} + +``` + +### Using an External Credentials Process + +To use an [external process to source credentials](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sourcing-external.html), +the process must be configured in a named profile, including the `default` profile. +The profile is configured in a shared configuration file. + +For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + profile: "customprofile", + }); + } +} + +``` + +```ini +[profile customprofile] +credential_process = custom-process --username jdoe +``` + +## AWS Configuration Reference + +|Setting|Provider|[Environment Variable][envvars]|[Shared Config][config]| +|-------|--------|-------------------------------|-----------------------| +|Access Key ID|`accessKey`|`awsAccessKeyId`|`awsAccessKeyId`| +|Secret Access Key|`secretKey`|`awsSecretAccessKey`|`awsSecretAccessKey`| +|Session Token|`token`|`awsSessionToken`|`awsSessionToken`| +|Region|`region`|`awsRegion` or `awsDefaultRegion`|`region`| +|Custom CA Bundle |`customCaBundle`|`awsCaBundle`|`caBundle`| +|EC2 IMDS Endpoint |`ec2MetadataServiceEndpoint`|`awsEc2MetadataServiceEndpoint`|N/A| +|EC2 IMDS Endpoint Mode|`ec2MetadataServiceEndpointMode`|`awsEc2MetadataServiceEndpointMode`|N/A| +|Disable EC2 IMDS|`skipMetadataApiCheck`|`awsEc2MetadataDisabled`|N/A| +|HTTP Proxy|`httpProxy`|`httpProxy` or `httpsProxy`|N/A| +|Max Retries|`maxRetries`|`awsMaxAttempts`|`maxAttempts`| +|Profile|`profile`|`awsProfile` or `awsDefaultProfile`|N/A| +|Retry Mode|`retryMode`|`awsRetryMode`|`retryMode`| +|Shared Config Files|`sharedConfigFiles`|`awsConfigFile`|N/A| +|Shared Credentials Files|`sharedCredentialsFiles`|`awsSharedCredentialsFile`|N/A| +|Use DualStack Endpoints|`useDualstackEndpoint`|`awsUseDualstackEndpoint`|`useDualstackEndpoint`| +|Use FIPS Endpoints|`useFipsEndpoint`|`awsUseFipsEndpoint`|`useFipsEndpoint`| + +### Assume Role Configuration Reference + +Configuation for assuming an IAM role can be done using provider configuration or a named profile in shared configuration files. +In the provider, all parameters for assuming an IAM role are set in the `assumeRole` block. + +See the [assume role documentation](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html) for more information. + +|Setting|Provider|[Environment Variable][envvars]|[Shared Config][config]| +|-------|--------|--------|-----------------------| +|Role ARN|`roleArn`|`awsRoleArn`|`roleArn`| +|Duration|`duration`|N/A|`durationSeconds`| +|External ID|`externalId`|N/A|`externalId`| +|Policy|`policy`|N/A|N/A| +|Policy ARNs|`policyArns`|N/A|N/A| +|Session Name|`sessionName`|`awsRoleSessionName`|`roleSessionName`| +|Source Identity|`sourceIdentity`|N/A|N/A| +|Tags|`tags`|N/A|N/A| +|Transitive Tag Keys|`transitiveTagKeys`|N/A|N/A| + +[envvars]: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html +[config]: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html#cli-configure-files-settings + +### Assume Role with Web Identity Configuration Reference + +Configuration for assuming an IAM role using web identify federation can be done using provider configuration, environment variables, or a named profile in shared configuration files. +In the provider, all parameters for assuming an IAM role are set in the `assumeRoleWithWebIdentity` block. + +See the assume role documentation [section on web identities](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html#cli-configure-role-oidc) for more information. + +|Setting|Provider|[Environment Variable][envvars]|[Shared Config][config]| +|-------|--------|--------|-----------------------| +|Role ARN|`roleArn`|`awsRoleArn`|`roleArn`| +|Web Identity Token|`webIdentityToken`|N/A|N/A| +|Web Identity Token File|`webIdentityTokenFile`|`awsWebIdentityTokenFile`|`webIdentityTokenFile`| +|Duration|`duration`|N/A|`durationSeconds`| +|Policy|`policy`|N/A|`policy`| +|Policy ARNs|`policyArns`|N/A|`policyArns`| +|Session Name|`sessionName`|`awsRoleSessionName`|`roleSessionName`| + +## Custom User-Agent Information + +By default, the underlying AWS client used by the Terraform AWS Provider creates requests with User-Agent headers including information about Terraform and AWS SDK for Go versions. To provide additional information in the User-Agent headers, the `tfAppendUserAgent` environment variable can be set and its value will be directly added to HTTP requests. E.g., + +```console +% export TF_APPEND_USER_AGENT="JenkinsAgent/i-12345678 BuildID/1234 (Optional Extra Information)" +``` + +## Argument Reference + +In addition to [generic `provider` arguments](https://www.terraform.io/docs/configuration/providers.html) +(e.g., `alias` and `version`), the following arguments are supported in the AWS + `provider` block: + +* `accessKey` - (Optional) AWS access key. Can also be set with the `awsAccessKeyId` environment variable, or via a shared credentials file if `profile` is specified. See also `secretKey`. +* `allowedAccountIds` - (Optional) List of allowed AWS account IDs to prevent you from mistakenly using an incorrect one (and potentially end up destroying a live environment). Conflicts with `forbiddenAccountIds`. +* `assumeRole` - (Optional) Configuration block for assuming an IAM role. See the [`assumeRole` Configuration Block](#assume_role-configuration-block) section below. Only one `assumeRole` block may be in the configuration. +* `assumeRoleWithWebIdentity` - (Optional) Configuration block for assuming an IAM role using a web identity. See the [`assumeRoleWithWebIdentity` Configuration Block](#assume_role_with_web_identity-configuration-block) section below. Only one `assumeRoleWithWebIdentity` block may be in the configuration. +* `customCaBundle` - (Optional) File containing custom root and intermediate certificates. + Can also be set using the `awsCaBundle` environment variable. + Setting `caBundle` in the shared config file is not supported. +* `defaultTags` - (Optional) Configuration block with resource tag settings to apply across all resources handled by this provider (see the [Terraform multiple provider instances documentation](/docs/configuration/providers.html#alias-multiple-provider-instances) for more information about additional provider configurations). This is designed to replace redundant per-resource `tags` configurations. Provider tags can be overridden with new values, but not excluded from specific resources. To override provider tag values, use the `tags` argument within a resource to configure new tag values for matching keys. See the [`defaultTags`](#default_tags-configuration-block) Configuration Block section below for example usage and available arguments. This functionality is supported in all resources that implement `tags`, with the exception of the `awsAutoscalingGroup` resource. +* `ec2MetadataServiceEndpoint` - (Optional) Address of the EC2 metadata service (IMDS) endpoint to use. Can also be set with the `awsEc2MetadataServiceEndpoint` environment variable. +* `ec2MetadataServiceEndpointMode` - (Optional) Mode to use in communicating with the metadata service. Valid values are `iPv4` and `iPv6`. Can also be set with the `awsEc2MetadataServiceEndpointMode` environment variable. +* `endpoints` - (Optional) Configuration block for customizing service endpoints. See the [Custom Service Endpoints Guide](/docs/providers/aws/guides/custom-service-endpoints.html) for more information about connecting to alternate AWS endpoints or AWS compatible solutions. See also `useFipsEndpoint`. +* `forbiddenAccountIds` - (Optional) List of forbidden AWS account IDs to prevent you from mistakenly using the wrong one (and potentially end up destroying a live environment). Conflicts with `allowedAccountIds`. +* `httpProxy` - (Optional) Address of an HTTP proxy to use when accessing the AWS API. Can also be set using the `httpProxy` or `httpsProxy` environment variables. +* `ignoreTags` - (Optional) Configuration block with resource tag settings to ignore across all resources handled by this provider (except any individual service tag resources such as `awsEc2Tag`) for situations where external systems are managing certain resource tags. Arguments to the configuration block are described below in the `ignoreTags` Configuration Block section. See the [Terraform multiple provider instances documentation](https://www.terraform.io/docs/configuration/providers.html#alias-multiple-provider-configurations) for more information about additional provider configurations. +* `insecure` - (Optional) Whether to explicitly allow the provider to perform "insecure" SSL requests. If omitted, the default value is `false`. +* `maxRetries` - (Optional) Maximum number of times an API call is retried when AWS throttles requests or you experience transient failures. + The delay between the subsequent API calls increases exponentially. + If omitted, the default value is `25`. + Can also be set using the environment variable `awsMaxAttempts` + and the shared configuration parameter `maxAttempts`. +* `profile` - (Optional) AWS profile name as set in the shared configuration and credentials files. + Can also be set using either the environment variables `awsProfile` or `awsDefaultProfile`. +* `region` - (Optional) AWS region where the provider will operate. The region must be set. + Can also be set with either the `awsRegion` or `awsDefaultRegion` environment variables, + or via a shared config file parameter `region` if `profile` is used. + If credentials are retrieved from the EC2 Instance Metadata Service, the region can also be retrieved from the metadata. +* `retryMode` - (Optional) Specifies how retries are attempted. + Valid values are `standard` and `adaptive`. + Can also be configured using the `awsRetryMode` environment variable or the shared config file parameter `retryMode`. +* `s3UsePathStyle` - (Optional) Whether to enable the request to use path-style addressing, i.e., `https://s3AmazonawsCom/bucket/key`. By default, the S3 client will use virtual hosted bucket addressing, `https://bucketS3AmazonawsCom/key`, when possible. Specific to the Amazon S3 service. +* `secretKey` - (Optional) AWS secret key. Can also be set with the `awsSecretAccessKey` environment variable, or via a shared configuration and credentials files if `profile` is used. See also `accessKey`. +* `sharedConfigFiles` - (Optional) List of paths to AWS shared config files. If not set, the default is `[~/Aws/config]`. A single value can also be set with the `awsConfigFile` environment variable. +* `sharedCredentialsFiles` - (Optional) List of paths to the shared credentials file. If not set and a profile is used, the default value is `[~/Aws/credentials]`. A single value can also be set with the `awsSharedCredentialsFile` environment variable. +* `skipCredentialsValidation` - (Optional) Whether to skip credentials validation via the STS API. This can be useful for testing and for AWS API implementations that do not have STS available. +* `skipMetadataApiCheck` - (Optional) Whether to skip the AWS Metadata API check. Useful for AWS API implementations that do not have a metadata API endpoint. Setting to `true` prevents Terraform from authenticating via the Metadata API. You may need to use other authentication methods like static credentials, configuration variables, or environment variables. +* `skipRegionValidation` - (Optional) Whether to skip validating the region. Useful for AWS-like implementations that use their own region names or to bypass the validation for regions that aren't publicly available yet. +* `skipRequestingAccountId` - (Optional) Whether to skip requesting the account ID. Useful for AWS API implementations that do not have the IAM, STS API, or metadata API. When set to `true` and not determined previously, returns an empty account ID when manually constructing ARN attributes with the following: + - [`awsApiGatewayDeployment` resource](/docs/providers/aws/r/api_gateway_deployment.html) + - [`awsApiGatewayRestApi` resource](/docs/providers/aws/r/api_gateway_rest_api.html) + - [`awsApiGatewayStage` resource](/docs/providers/aws/r/api_gateway_stage.html) + - [`awsApigatewayv2Api` data source](/docs/providers/aws/d/apigatewayv2_api.html) + - [`awsApigatewayv2Api` resource](/docs/providers/aws/r/apigatewayv2_api.html) + - [`awsApigatewayv2Stage` resource](/docs/providers/aws/r/apigatewayv2_stage.html) + - [`awsAppconfigApplication` resource](/docs/providers/aws/r/appconfig_application.html) + - [`awsAppconfigConfigurationProfile` resource](/docs/providers/aws/r/appconfig_configuration_profile.html) + - [`awsAppconfigDeployment` resource](/docs/providers/aws/r/appconfig_deployment.html) + - [`awsAppconfigDeploymentStrategy` resource](/docs/providers/aws/r/appconfig_deployment_strategy.html) + - [`awsAppconfigEnvironment` resource](/docs/providers/aws/r/appconfig_environment.html) + - [`awsAppconfigHostedConfigurationVersion` resource](/docs/providers/aws/r/appconfig_hosted_configuration_version.html) + - [`awsAthenaWorkgroup` resource](/docs/providers/aws/r/athena_workgroup.html) + - [`awsBudgetsBudget` resource](/docs/providers/aws/r/budgets_budget.html) + - [`awsCodedeployApp` resource](/docs/providers/aws/r/codedeploy_app.html) + - [`awsCodedeployDeploymentGroup` resource](/docs/providers/aws/r/codedeploy_deployment_group.html) + - [`awsCognitoIdentityPool` resource](/docs/providers/aws/r/cognito_identity_pool.html) + - [`awsCognitoUserPools` data source](/docs/providers/aws/d/cognito_user_pools.html) + - [`awsDefaultVpcDhcpOptions`](/docs/providers/aws/r/default_vpc_dhcp_options.html) + - [`awsDmsEventSubscription` resource](/docs/providers/aws/r/dms_event_subscription.html) + - [`awsDmsReplicationSubnetGroup` resource](/docs/providers/aws/r/dms_replication_subnet_group.html) + - [`awsDxConnection` resource](/docs/providers/aws/r/dx_connection.html) + - [`awsDxHostedPrivateVirtualInterfaceAccepter` resource](/docs/providers/aws/r/dx_hosted_private_virtual_interface_accepter.html) + - [`awsDxHostedPrivateVirtualInterface` resource](/docs/providers/aws/r/dx_hosted_private_virtual_interface.html) + - [`awsDxHostedPublicVirtualInterfaceAccepter` resource](/docs/providers/aws/r/dx_hosted_public_virtual_interface_accepter.html) + - [`awsDxHostedPublicVirtualInterface` resource](/docs/providers/aws/r/dx_hosted_public_virtual_interface.html) + - [`awsDxHostedTransitVirtualInterfaceAccepter` resource](/docs/providers/aws/r/dx_hosted_transit_virtual_interface_accepter.html) + - [`awsDxHostedTransitVirtualInterface` resource](/docs/providers/aws/r/dx_hosted_transit_virtual_interface.html) + - [`awsDxLag` resource](/docs/providers/aws/r/dx_lag.html) + - [`awsDxPrivateVirtualInterface` resource](/docs/providers/aws/r/dx_private_virtual_interface.html) + - [`awsDxPublicVirtualInterface` resource](/docs/providers/aws/r/dx_public_virtual_interface.html) + - [`awsDxTransitVirtualInterface` resource](/docs/providers/aws/r/dx_transit_virtual_interface.html) + - [`awsEbsVolume` data source](/docs/providers/aws/d/ebs_volume.html) + - [`awsEc2ClientVpnEndpoint` resource](/docs/providers/aws/r/ec2_client_vpn_endpoint.html) + - [`awsEc2TrafficMirrorFilter` resource](/docs/providers/aws/r/ec2_traffic_mirror_filter.html) + - [`awsEc2TrafficMirrorFilterRule` resource](/docs/providers/aws/r/ec2_traffic_mirror_filter_rule.html) + - [`awsEc2TrafficMirrorSession` resource](/docs/providers/aws/r/ec2_traffic_mirror_session.html) + - [`awsEc2TrafficMirrorTarget` resource](/docs/providers/aws/r/ec2_traffic_mirror_target.html) + - [`awsEc2TransitGatewayRouteTable` data source](/docs/providers/aws/d/ec2_transit_gateway_route_table.html) + - [`awsEc2TransitGatewayRouteTable` resource](/docs/providers/aws/r/ec2_transit_gateway_route_table.html) + - [`awsEcsCapacityProvider` resource (import)](/docs/providers/aws/r/ecs_capacity_provider.html) + - [`awsEcsCluster` resource (import)](/docs/providers/aws/r/ecs_cluster.html) + - [`awsEcsService` resource (import)](/docs/providers/aws/r/ecs_service.html) + - [`awsCustomerGateway` data source](/docs/providers/aws/d/customer_gateway.html) + - [`awsCustomerGateway` resource](/docs/providers/aws/r/customer_gateway.html) + - [`awsEfsAccessPoint` data source](/docs/providers/aws/d/efs_access_point.html) + - [`awsEfsAccessPoint` resource](/docs/providers/aws/r/efs_access_point.html) + - [`awsEfsFileSystem` data source](/docs/providers/aws/d/efs_file_system.html) + - [`awsEfsFileSystem` resource](/docs/providers/aws/r/efs_file_system.html) + - [`awsEfsMountTarget` data source](/docs/providers/aws/d/efs_mount_target.html) + - [`awsEfsMountTarget` resource](/docs/providers/aws/r/efs_mount_target.html) + - [`awsElasticacheCluster` data source](/docs/providers/aws/d/elasticache_cluster.html) + - [`awsElasticacheCluster` resource](/docs/providers/aws/r/elasticache_cluster.html) + - [`awsElb` data source](/docs/providers/aws/d/elb.html) + - [`awsElb` resource](/docs/providers/aws/r/elb.html) + - [`awsFlowLog` resource](/docs/providers/aws/r/flow_log.html) + - [`awsGlueCatalogDatabase` resource](/docs/providers/aws/r/glue_catalog_database.html) + - [`awsGlueCatalogTable` resource](/docs/providers/aws/r/glue_catalog_table.html) + - [`awsGlueConnection` resource](/docs/providers/aws/r/glue_connection.html) + - [`awsGlueCrawler` resource](/docs/providers/aws/r/glue_crawler.html) + - [`awsGlueJob` resource](/docs/providers/aws/r/glue_job.html) + - [`awsGlueMlTransform` resource](/docs/providers/aws/r/glue_ml_transform.html) + - [`awsGlueTrigger` resource](/docs/providers/aws/r/glue_trigger.html) + - [`awsGlueUserDefinedFunction` resource](/docs/providers/aws/r/glue_user_defined_function.html) + - [`awsGlueWorkflow` resource](/docs/providers/aws/r/glue_workflow.html) + - [`awsGuarddutyDetector` resource](/docs/providers/aws/r/guardduty_detector.html) + - [`awsGuarddutyIpset` resource](/docs/providers/aws/r/guardduty_ipset.html) + - [`awsGuarddutyThreatintelset` resource](/docs/providers/aws/r/guardduty_threatintelset.html) + - [`awsInstance` data source](/docs/providers/aws/d/instance.html) + - [`awsInstance` resource](/docs/providers/aws/r/instance.html) + - [`awsKeyPair` resource](/docs/providers/aws/r/key_pair.html) + - [`awsLaunchTemplate` data source](/docs/providers/aws/d/launch_template.html) + - [`awsLaunchTemplate` resource](/docs/providers/aws/r/launch_template.html) + - [`awsPlacementGroup` resource](/docs/providers/aws/r/placement_group.html) + - [`awsRedshiftCluster` resource](/docs/providers/aws/r/redshift_cluster.html) + - [`awsRedshiftEventSubscription` resource](/docs/providers/aws/r/redshift_event_subscription.html) + - [`awsRedshiftParameterGroup` resource](/docs/providers/aws/r/redshift_parameter_group.html) + - [`awsRedshiftSnapshotCopyGrant` resource](/docs/providers/aws/r/redshift_snapshot_copy_grant.html) + - [`awsRedshiftSnapshotSchedule` resource](/docs/providers/aws/r/redshift_snapshot_schedule.html) + - [`awsRedshiftSubnetGroup` resource](/docs/providers/aws/r/redshift_subnet_group.html) + - [`awsS3AccountPublicAccessBlock` resource](/docs/providers/aws/r/s3_account_public_access_block.html) + - [`awsSesActiveReceiptRuleSet` resource](/docs/providers/aws/r/ses_active_receipt_rule_set.html) + - [`awsSesConfigurationSet` resource](/docs/providers/aws/r/ses_configuration_set.html) + - [`awsSesDomainIdentityVerification` resource](/docs/providers/aws/r/ses_domain_identity_verification.html) + - [`awsSesDomainIdentity` resource](/docs/providers/aws/r/ses_domain_identity.html) + - [`awsSesEmailIdentity` resource](/docs/providers/aws/r/ses_email_identity.html) + - [`awsSesEventDestination` resource](/docs/providers/aws/r/ses_event_destination.html) + - [`awsSesReceiptFilter` resource](/docs/providers/aws/r/ses_receipt_filter.html) + - [`awsSesReceiptRule` resource](/docs/providers/aws/r/ses_receipt_rule.html) + - [`awsSesTemplate` resource](/docs/providers/aws/r/ses_template.html) + - [`awsSsmDocument` data source](/docs/providers/aws/d/ssm_document.html) + - [`awsSsmDocument` resource](/docs/providers/aws/r/ssm_document.html) + - [`awsSsmParameter` data source](/docs/providers/aws/d/ssm_parameter.html) + - [`awsSsmParameter` resource](/docs/providers/aws/r/ssm_parameter.html) + - [`awsSyntheticsCanary` resource](/docs/providers/aws/r/synthetics_canary.html) + - [`awsVpcEndpointService` data source](/docs/providers/aws/d/vpc_endpoint_service.html) + - [`awsVpcEndpointService` resource](/docs/providers/aws/r/vpc_endpoint_service.html) + - [`awsVpnConnection` resource](/docs/providers/aws/r/vpn_connection.html) + - [`awsVpnGateway` data source](/docs/providers/aws/d/vpn_gateway.html) + - [`awsVpnGateway` resource](/docs/providers/aws/r/vpn_gateway.html) + - [`awsWafGeoMatchSet` resource](/docs/providers/aws/r/waf_geo_match_set.html) + - [`awsWafIpset` resource](/docs/providers/aws/r/waf_ipset.html) + - [`awsWafRateBasedRule` resource](/docs/providers/aws/r/waf_rate_based_rule.html) + - [`awsWafRegexMatchSet` resource](/docs/providers/aws/r/waf_regex_match_set.html) + - [`awsWafRegexPatternSet` resource](/docs/providers/aws/r/waf_regex_pattern_set.html) + - [`awsWafregionalIpset` resource](/docs/providers/aws/r/wafregional_ipset.html) + - [`awsWafregionalRateBasedRule` resource](/docs/providers/aws/r/wafregional_rate_based_rule.html) + - [`awsWafregionalRule` resource](/docs/providers/aws/r/wafregional_rule.html) + - [`awsWafregionalRuleGroup` resource](/docs/providers/aws/r/wafregional_rule_group.html) + - [`awsWafregionalWebAcl` resource](/docs/providers/aws/r/wafregional_web_acl.html) + - [`awsWafRule` resource](/docs/providers/aws/r/waf_rule.html) + - [`awsWafRuleGroup` resource](/docs/providers/aws/r/waf_rule_group.html) + - [`awsWafSizeConstraintSet` resource](/docs/providers/aws/r/waf_size_constraint_set.html) + - [`awsWafWebAcl` resource](/docs/providers/aws/r/waf_web_acl.html) + - [`awsWafXssMatchSet` resource](/docs/providers/aws/r/waf_xss_match_set.html) +* `stsRegion` - (Optional) AWS region for STS. If unset, AWS will use the same region for STS as other non-STS operations. +* `token` - (Optional) Session token for validating temporary credentials. Typically provided after successful identity federation or Multi-Factor Authentication (MFA) login. With MFA login, this is the session token provided afterward, not the 6 digit MFA code used to get temporary credentials. Can also be set with the `awsSessionToken` environment variable. +* `useDualstackEndpoint` - (Optional) Force the provider to resolve endpoints with DualStack capability. Can also be set with the `awsUseDualstackEndpoint` environment variable or in a shared config file (`useDualstackEndpoint`). +* `useFipsEndpoint` - (Optional) Force the provider to resolve endpoints with FIPS capability. Can also be set with the `awsUseFipsEndpoint` environment variable or in a shared config file (`useFipsEndpoint`). + +### assume_role Configuration Block + +The `assumeRole` configuration block supports the following arguments: + +* `duration` - (Optional) Duration of the assume role session. You can provide a value from 15 minutes up to the maximum session duration setting for the role. Represented by a string such as `1H`, `2H45M`, or `30M15S`. +* `externalId` - (Optional) External identifier to use when assuming the role. +* `policy` - (Optional) IAM Policy JSON describing further restricting permissions for the IAM Role being assumed. +* `policyArns` - (Optional) Set of Amazon Resource Names (ARNs) of IAM Policies describing further restricting permissions for the IAM Role being assumed. +* `roleArn` - (Required) ARN of the IAM Role to assume. +* `sessionName` - (Optional) Session name to use when assuming the role. +* `sourceIdentity` - (Optional) Source identity specified by the principal assuming the role. +* `tags` - (Optional) Map of assume role session tags. +* `transitiveTagKeys` - (Optional) Set of assume role session tag keys to pass to any subsequent sessions. + +### assume_role_with_web_identity Configuration Block + +The `assumeRoleWithWebIdentity` configuration block supports the following arguments: + +* `duration` - (Optional) Duration of the assume role session. + You can provide a value from 15 minutes up to the maximum session duration setting for the role. + Represented by a string such as `1H`, `2H45M`, or `30M15S`. +* `policy` - (Optional) IAM Policy JSON describing further restricting permissions for the IAM Role being assumed. +* `policyArns` - (Optional) Set of Amazon Resource Names (ARNs) of IAM Policies describing further restricting permissions for the IAM Role being assumed. +* `roleArn` - (Required) ARN of the IAM Role to assume. + Can also be set with the `awsRoleArn` environment variable. +* `sessionName` - (Optional) Session name to use when assuming the role. + Can also be set with the `awsRoleSessionName` environment variable. +* `webIdentityToken` - (Optional) Value of a web identity token from an OpenID Connect (OIDC) or OAuth provider. + One of `webIdentityToken` or `webIdentityTokenFile` is required. +* `webIdentityTokenFile` - (Optional) File containing a web identity token from an OpenID Connect (OIDC) or OAuth provider. + One of `webIdentityTokenFile` or `webIdentityToken` is required. + Can also be set with the `awsWebIdentityTokenFile` environment variable. + +### default_tags Configuration Block + +> **Hands-on:** Try the [Configure Default Tags for AWS Resources](https://learn.hashicorp.com/tutorials/terraform/aws-default-tags?in=terraform/aws) tutorial. + +Example: Resource with provider default tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + defaultTags: [ + { + tags: { + Environment: "Test", + Name: "Provider Tag", + }, + }, + ], + }); + const example = new Vpc(this, "example", {}); + new TerraformOutput(this, "vpc_all_tags", { + value: example.tagsAll, + }); + new TerraformOutput(this, "vpc_resource_level_tags", { + value: example.tags, + }); + } +} + +``` + +Outputs: + +```console +$ terraform apply +... +Outputs: + +vpc_all_tags = tomap({ + "Environment" = "Test" + "Name" = "Provider Tag" +}) +``` + +Example: Resource with tags and provider default tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + defaultTags: [ + { + tags: { + Environment: "Test", + Name: "Provider Tag", + }, + }, + ], + }); + const example = new Vpc(this, "example", { + tags: { + Owner: "example", + }, + }); + new TerraformOutput(this, "vpc_all_tags", { + value: example.tagsAll, + }); + new TerraformOutput(this, "vpc_resource_level_tags", { + value: example.tags, + }); + } +} + +``` + +Outputs: + +```console +$ terraform apply +... +Outputs: + +vpc_all_tags = tomap({ + "Environment" = "Test" + "Name" = "Provider Tag" + "Owner" = "example" +}) +vpc_resource_level_tags = tomap({ + "Owner" = "example" +}) +``` + +Example: Resource overriding provider default tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + defaultTags: [ + { + tags: { + Environment: "Test", + Name: "Provider Tag", + }, + }, + ], + }); + const example = new Vpc(this, "example", { + tags: { + Environment: "Production", + }, + }); + new TerraformOutput(this, "vpc_all_tags", { + value: example.tagsAll, + }); + new TerraformOutput(this, "vpc_resource_level_tags", { + value: example.tags, + }); + } +} + +``` + +Outputs: + +```console +$ terraform apply +... +Outputs: + +vpc_all_tags = tomap({ + "Environment" = "Production" + "Name" = "Provider Tag" +}) +vpc_resource_level_tags = tomap({ + "Environment" = "Production" +}) +``` + +The `defaultTags` configuration block supports the following argument: + +* `tags` - (Optional) Key-value map of tags to apply to all resources. + +### ignore_tags Configuration Block + +Example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + ignoreTags: [ + { + keys: ["TagKey1"], + }, + ], + }); + } +} + +``` + +The `ignoreTags` configuration block supports the following arguments: + +* `keys` - (Optional) List of exact resource tag keys to ignore across all resources handled by this provider. This configuration prevents Terraform from returning the tag in any `tags` attributes and displaying any configuration difference for the tag value. If any resource configuration still has this tag key configured in the `tags` argument, it will display a perpetual difference until the tag is removed from the argument or [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) is also used. +* `keyPrefixes` - (Optional) List of resource tag key prefixes to ignore across all resources handled by this provider. This configuration prevents Terraform from returning any tag key matching the prefixes in any `tags` attributes and displaying any configuration difference for those tag values. If any resource configuration still has a tag matching one of the prefixes configured in the `tags` argument, it will display a perpetual difference until the tag is removed from the argument or [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) is also used. + +## Getting the Account ID + +If you use either `allowedAccountIds` or `forbiddenAccountIds`, +Terraform uses several approaches to get the actual account ID +in order to compare it with allowed or forbidden IDs. + +Approaches differ per authentication providers: + +* EC2 instance w/ IAM Instance Profile - [Metadata API](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html) + is always used. Introduced in Terraform `0616`. +* All other providers (environment variable, shared credentials file, ...) + will try three approaches in the following order + * `iam:getUser` - Typically useful for IAM Users. It also means + that each user needs to be privileged to call `iam:getUser` for themselves. + * `sts:getCallerIdentity` - _Should_ work for both IAM Users and federated IAM Roles, + introduced in Terraform `0616`. + * `iam:listRoles` - This is specifically useful for IdP-federated profiles + which cannot use `iam:getUser`. It also means that each federated user + need to be _assuming_ an IAM role which allows `iam:listRoles`. + Used in Terraform `0616+`. + There used to be no better way to get account ID out of the API + when using the federated account until `sts:getCallerIdentity` was introduced. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/accessanalyzer_analyzer.html.markdown b/website/docs/cdktf/typescript/r/accessanalyzer_analyzer.html.markdown new file mode 100644 index 00000000000..4437784fff3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/accessanalyzer_analyzer.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "IAM Access Analyzer" +layout: "aws" +page_title: "AWS: aws_accessanalyzer_analyzer" +description: |- + Manages an Access Analyzer Analyzer +--- + + + +# Resource: aws_accessanalyzer_analyzer + +Manages an Access Analyzer Analyzer. More information can be found in the [Access Analyzer User Guide](https://docs.aws.amazon.com/IAM/latest/UserGuide/what-is-access-analyzer.html). + +## Example Usage + +### Account Analyzer + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AccessanalyzerAnalyzer } from "./.gen/providers/aws/accessanalyzer-analyzer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AccessanalyzerAnalyzer(this, "example", { + analyzerName: "example", + }); + } +} + +``` + +### Organization Analyzer + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AccessanalyzerAnalyzer } from "./.gen/providers/aws/accessanalyzer-analyzer"; +import { OrganizationsOrganization } from "./.gen/providers/aws/organizations-organization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new OrganizationsOrganization(this, "example", { + awsServiceAccessPrincipals: ["access-analyzer.amazonaws.com"], + }); + const awsAccessanalyzerAnalyzerExample = new AccessanalyzerAnalyzer( + this, + "example_1", + { + analyzerName: "example", + dependsOn: [example], + type: "ORGANIZATION", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAccessanalyzerAnalyzerExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `analyzerName` - (Required) Name of the Analyzer. + +The following arguments are optional: + +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) Type of Analyzer. Valid values are `account` or `organization`. Defaults to `account`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Analyzer. +* `id` - Analyzer name. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Access Analyzer Analyzers using the `analyzerName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Access Analyzer Analyzers using the `analyzerName`. For example: + +```console +% terraform import aws_accessanalyzer_analyzer.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/accessanalyzer_archive_rule.html.markdown b/website/docs/cdktf/typescript/r/accessanalyzer_archive_rule.html.markdown new file mode 100644 index 00000000000..a77cd2af487 --- /dev/null +++ b/website/docs/cdktf/typescript/r/accessanalyzer_archive_rule.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "IAM Access Analyzer" +layout: "aws" +page_title: "AWS: aws_accessanalyzer_archive_rule" +description: |- + Terraform resource for managing an AWS AccessAnalyzer Archive Rule. +--- + + + +# Resource: aws_accessanalyzer_archive_rule + +Terraform resource for managing an AWS AccessAnalyzer Archive Rule. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AccessanalyzerArchiveRule } from "./.gen/providers/aws/accessanalyzer-archive-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AccessanalyzerArchiveRule(this, "example", { + analyzerName: "example-analyzer", + filter: [ + { + criteria: "condition.aws:UserId", + eq: ["userid"], + }, + { + criteria: "error", + exists: Token.asString(true), + }, + { + criteria: "isPublic", + eq: ["false"], + }, + ], + ruleName: "example-rule", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `analyzerName` - (Required) Analyzer name. +* `filter` - (Required) Filter criteria for the archive rule. See [Filter](#filter) for more details. +* `ruleName` - (Required) Rule name. + +### Filter + +**Note** One comparator must be included with each filter. + +* `criteria` - (Required) Filter criteria. +* `contains` - (Optional) Contains comparator. +* `eq` - (Optional) Equals comparator. +* `exists` - (Optional) Boolean comparator. +* `neq` - (Optional) Not Equals comparator. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Resource ID in the format: `analyzerName/ruleName`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AccessAnalyzer ArchiveRule using the `analyzerName/ruleName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AccessAnalyzer ArchiveRule using the `analyzerName/ruleName`. For example: + +```console +% terraform import aws_accessanalyzer_archive_rule.example example-analyzer/example-rule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/account_alternate_contact.html.markdown b/website/docs/cdktf/typescript/r/account_alternate_contact.html.markdown new file mode 100644 index 00000000000..c33c7cba69f --- /dev/null +++ b/website/docs/cdktf/typescript/r/account_alternate_contact.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "Account Management" +layout: "aws" +page_title: "AWS: aws_account_alternate_contact" +description: |- + Manages the specified alternate contact attached to an AWS Account. +--- + + + +# Resource: aws_account_alternate_contact + +Manages the specified alternate contact attached to an AWS Account. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AccountAlternateContact } from "./.gen/providers/aws/account-alternate-contact"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AccountAlternateContact(this, "operations", { + alternateContactType: "OPERATIONS", + emailAddress: "test@example.com", + name: "Example", + phoneNumber: "+1234567890", + title: "Example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Optional) ID of the target account when managing member accounts. Will manage current user's account by default if omitted. +* `alternateContactType` - (Required) Type of the alternate contact. Allowed values are: `billing`, `operations`, `security`. +* `emailAddress` - (Required) An email address for the alternate contact. +* `name` - (Required) Name of the alternate contact. +* `phoneNumber` - (Required) Phone number for the alternate contact. +* `title` - (Required) Title for the alternate contact. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `update` - (Default `5M`) +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the Alternate Contact for the current or another account using the `alternateContactType`. For example: + +Import the Alternate Contact for the current account: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import the Alternate Contact for another account using the `accountId` and `alternateContactType` separated by a forward slash (`/`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** the Alternate Contact for the current or another account using the `alternateContactType`. For example: + +Import the Alternate Contact for the current account: + +```console +% terraform import aws_account_alternate_contact.operations OPERATIONS +``` + +Import the Alternate Contact for another account using the `accountId` and `alternateContactType` separated by a forward slash (`/`): + +```console +% terraform import aws_account_alternate_contact.operations 1234567890/OPERATIONS +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/account_primary_contact.html.markdown b/website/docs/cdktf/typescript/r/account_primary_contact.html.markdown new file mode 100644 index 00000000000..8e80a59cc19 --- /dev/null +++ b/website/docs/cdktf/typescript/r/account_primary_contact.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Account Management" +layout: "aws" +page_title: "AWS: aws_account_primary_contact" +description: |- + Manages the specified primary contact information associated with an AWS Account. +--- + + + +# Resource: aws_account_primary_contact + +Manages the specified primary contact information associated with an AWS Account. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AccountPrimaryContact } from "./.gen/providers/aws/account-primary-contact"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AccountPrimaryContact(this, "test", { + addressLine1: "123 Any Street", + city: "Seattle", + companyName: "Example Corp, Inc.", + countryCode: "US", + districtOrCounty: "King", + fullName: "My Name", + phoneNumber: "+64211111111", + postalCode: "98101", + stateOrRegion: "WA", + websiteUrl: "https://www.examplecorp.com", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Optional) The ID of the target account when managing member accounts. Will manage current user's account by default if omitted. +* `addressLine1` - (Required) The first line of the primary contact address. +* `addressLine2` - (Optional) The second line of the primary contact address, if any. +* `addressLine3` - (Optional) The third line of the primary contact address, if any. +* `city` - (Required) The city of the primary contact address. +* `companyName` - (Optional) The name of the company associated with the primary contact information, if any. +* `countryCode` - (Required) The ISO-3166 two-letter country code for the primary contact address. +* `districtOrCounty` - (Optional) The district or county of the primary contact address, if any. +* `fullName` - (Required) The full name of the primary contact address. +* `phoneNumber` - (Required) The phone number of the primary contact information. The number will be validated and, in some countries, checked for activation. +* `postalCode` - (Required) The postal code of the primary contact address. +* `stateOrRegion` - (Optional) The state or region of the primary contact address. This field is required in selected countries. +* `websiteUrl` - (Optional) The URL of the website associated with the primary contact information, if any. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the Primary Contact using the `accountId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the Primary Contact using the `accountId`. For example: + +```console +% terraform import aws_account_primary_contact.test 1234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/acm_certificate.html.markdown b/website/docs/cdktf/typescript/r/acm_certificate.html.markdown new file mode 100644 index 00000000000..be6659d51d1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/acm_certificate.html.markdown @@ -0,0 +1,309 @@ +--- +subcategory: "ACM (Certificate Manager)" +layout: "aws" +page_title: "AWS: aws_acm_certificate" +description: |- + Requests and manages a certificate from Amazon Certificate Manager (ACM). +--- + + + +# Resource: aws_acm_certificate + +The ACM certificate resource allows requesting and management of certificates +from the Amazon Certificate Manager. + +ACM certificates can be created in three ways: +Amazon-issued, where AWS provides the certificate authority and automatically manages renewal; +imported certificates, issued by another certificate authority; +and private certificates, issued using an ACM Private Certificate Authority. + +## Amazon-Issued Certificates + +For Amazon-issued certificates, this resource deals with requesting certificates and managing their attributes and life-cycle. +This resource does not deal with validation of a certificate but can provide inputs +for other resources implementing the validation. +It does not wait for a certificate to be issued. +Use a [`awsAcmCertificateValidation`](acm_certificate_validation.html) resource for this. + +Most commonly, this resource is used together with [`awsRoute53Record`](route53_record.html) and +[`awsAcmCertificateValidation`](acm_certificate_validation.html) to request a DNS validated certificate, +deploy the required validation records and wait for validation to complete. + +Domain validation through email is also supported but should be avoided as it requires a manual step outside of Terraform. + +It's recommended to specify `create_before_destroy = true` in a [lifecycle][1] block to replace a certificate +which is currently in use (eg, by [`awsLbListener`](lb_listener.html)). + +## Certificates Imported from Other Certificate Authority + +Imported certificates can be used to make certificates created with an external certificate authority available for AWS services. + +As they are not managed by AWS, imported certificates are not eligible for automatic renewal. +New certificate materials can be supplied to an existing imported certificate to update it in place. + +## Private Certificates + +Private certificates are issued by an ACM Private Cerificate Authority, which can be created using the resource type [`awsAcmpcaCertificateAuthority`](acmpca_certificate_authority.html). + +Private certificates created using this resource are eligible for managed renewal if they have been exported or associated with another AWS service. +See [managed renewal documentation](https://docs.aws.amazon.com/acm/latest/userguide/managed-renewal.html) for more information. +By default, a certificate is valid for 395 days and the managed renewal process will start 60 days before expiration. +To renew the certificate earlier than 60 days before expiration, configure `earlyRenewalDuration`. + +## Example Usage + +### Create Certificate + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmCertificate } from "./.gen/providers/aws/acm-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AcmCertificate(this, "cert", { + domainName: "example.com", + lifecycle: { + createBeforeDestroy: true, + }, + tags: { + Environment: "test", + }, + validationMethod: "DNS", + }); + } +} + +``` + +### Custom Domain Validation Options + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmCertificate } from "./.gen/providers/aws/acm-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AcmCertificate(this, "cert", { + domainName: "testing.example.com", + validationMethod: "EMAIL", + validationOption: [ + { + domainName: "testing.example.com", + validationDomain: "example.com", + }, + ], + }); + } +} + +``` + +### Existing Certificate Body Import + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmCertificate } from "./.gen/providers/aws/acm-certificate"; +import { PrivateKey } from "./.gen/providers/tls/private-key"; +import { SelfSignedCert } from "./.gen/providers/tls/self-signed-cert"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*The following providers are missing schema information and might need manual adjustments to synthesize correctly: tls. + For a more precise conversion please use the --provider flag in convert.*/ + const example = new PrivateKey(this, "example", { + algorithm: "RSA", + }); + const tlsSelfSignedCertExample = new SelfSignedCert(this, "example_1", { + allowed_uses: ["key_encipherment", "digital_signature", "server_auth"], + key_algorithm: "RSA", + private_key_pem: example.privateKeyPem, + subject: [ + { + common_name: "example.com", + organization: "ACME Examples, Inc", + }, + ], + validity_period_hours: 12, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + tlsSelfSignedCertExample.overrideLogicalId("example"); + new AcmCertificate(this, "cert", { + certificateBody: Token.asString(tlsSelfSignedCertExample.certPem), + privateKey: Token.asString(example.privateKeyPem), + }); + } +} + +``` + +### Referencing domain_validation_options With for_each Based Resources + +See the [`awsAcmCertificateValidation` resource](acm_certificate_validation.html) for a full example of performing DNS validation. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Token, + TerraformIterator, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleForEachIterator = TerraformIterator.fromList( + Token.asAny( + "${{ for dvo in ${" + + awsAcmCertificateExample.domainValidationOptions + + "} : dvo.domain_name => {\n name = dvo.resource_record_name\n record = dvo.resource_record_value\n type = dvo.resource_record_type\n }}}" + ) + ); + new Route53Record(this, "example", { + allowOverwrite: true, + name: Token.asString( + propertyAccess(exampleForEachIterator.value, ["name"]) + ), + records: [ + Token.asString( + propertyAccess(exampleForEachIterator.value, ["record"]) + ), + ], + ttl: 60, + type: Token.asString( + propertyAccess(exampleForEachIterator.value, ["type"]) + ), + zoneId: Token.asString(awsRoute53ZoneExample.zoneId), + forEach: exampleForEachIterator, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* Creating an Amazon issued certificate + * `domainName` - (Required) Domain name for which the certificate should be issued + * `subjectAlternativeNames` - (Optional) Set of domains that should be SANs in the issued certificate. To remove all elements of a previously configured list, set this value equal to an empty list (`[]`) or use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html) to trigger recreation. + * `validationMethod` - (Optional) Which method to use for validation. `dns` or `email` are valid. This parameter must not be set for certificates that were imported into ACM and then into Terraform. + * `keyAlgorithm` - (Optional) Specifies the algorithm of the public and private key pair that your Amazon issued certificate uses to encrypt data. See [ACM Certificate characteristics](https://docs.aws.amazon.com/acm/latest/userguide/acm-certificate.html#algorithms) for more details. + * `options` - (Optional) Configuration block used to set certificate options. Detailed below. + * `validationOption` - (Optional) Configuration block used to specify information about the initial validation of each domain name. Detailed below. +* Importing an existing certificate + * `privateKey` - (Required) Certificate's PEM-formatted private key + * `certificateBody` - (Required) Certificate's PEM-formatted public key + * `certificateChain` - (Optional) Certificate's PEM-formatted chain +* Creating a private CA issued certificate + * `certificateAuthorityArn` - (Required) ARN of an ACM PCA + * `domainName` - (Required) Domain name for which the certificate should be issued. + * `earlyRenewalDuration` - (Optional) Amount of time to start automatic renewal process before expiration. + Has no effect if less than 60 days. + Represented by either + a subset of [RFC 3339 duration](https://www.rfc-editor.org/rfc/rfc3339) supporting years, months, and days (e.g., `p90D`), + or a string such as `2160H`. +* `subjectAlternativeNames` - (Optional) Set of domains that should be SANs in the issued certificate. + To remove all elements of a previously configured list, set this value equal to an empty list (`[]`) + or use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html) to trigger recreation. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## options Configuration Block + +Supported nested arguments for the `options` configuration block: + +* `certificateTransparencyLoggingPreference` - (Optional) Whether certificate details should be added to a certificate transparency log. Valid values are `enabled` or `disabled`. See https://docs.aws.amazon.com/acm/latest/userguide/acm-concepts.html#concept-transparency for more details. + +## validation_option Configuration Block + +Supported nested arguments for the `validationOption` configuration block: + +* `domainName` - (Required) Fully qualified domain name (FQDN) in the certificate. +* `validationDomain` - (Required) Domain name that you want ACM to use to send you validation emails. This domain name is the suffix of the email addresses that you want ACM to use. This must be the same as the `domainName` value or a superdomain of the `domainName` value. For example, if you request a certificate for `"testingExampleCom"`, you can specify `"exampleCom"` for this value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN of the certificate +* `arn` - ARN of the certificate +* `domainName` - Domain name for which the certificate is issued +* `domainValidationOptions` - Set of domain validation objects which can be used to complete certificate validation. + Can have more than one element, e.g., if SANs are defined. + Only set if `dns`-validation was used. +* `notAfter` - Expiration date and time of the certificate. +* `notBefore` - Start of the validity period of the certificate. +* `pendingRenewal` - `true` if a Private certificate eligible for managed renewal is within the `earlyRenewalDuration` period. +* `renewalEligibility` - Whether the certificate is eligible for managed renewal. +* `renewalSummary` - Contains information about the status of ACM's [managed renewal](https://docs.aws.amazon.com/acm/latest/userguide/acm-renewal.html) for the certificate. +* `status` - Status of the certificate. +* `type` - Source of the certificate. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `validationEmails` - List of addresses that received a validation email. Only set if `email` validation was used. + +Domain validation objects export the following attributes: + +* `domainName` - Domain to be validated +* `resourceRecordName` - The name of the DNS record to create to validate the certificate +* `resourceRecordType` - The type of DNS record to create +* `resourceRecordValue` - The value the DNS record needs to have + +Renewal summary objects export the following attributes: + +* `renewalStatus` - The status of ACM's managed renewal of the certificate +* `renewalStatusReason` - The reason that a renewal request was unsuccessful or is pending + +[1]: https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import certificates using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import certificates using their ARN. For example: + +```console +% terraform import aws_acm_certificate.cert arn:aws:acm:eu-central-1:123456789012:certificate/7e7a28d2-163f-4b8f-b9cd-822f96c08d6a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/acm_certificate_validation.html.markdown b/website/docs/cdktf/typescript/r/acm_certificate_validation.html.markdown new file mode 100644 index 00000000000..d31aa3ae524 --- /dev/null +++ b/website/docs/cdktf/typescript/r/acm_certificate_validation.html.markdown @@ -0,0 +1,277 @@ +--- +subcategory: "ACM (Certificate Manager)" +layout: "aws" +page_title: "AWS: aws_acm_certificate_validation" +description: |- + Waits for and checks successful validation of an ACM certificate. +--- + + + +# Resource: aws_acm_certificate_validation + +This resource represents a successful validation of an ACM certificate in concert +with other resources. + +Most commonly, this resource is used together with [`awsRoute53Record`](route53_record.html) and +[`awsAcmCertificate`](acm_certificate.html) to request a DNS validated certificate, +deploy the required validation records and wait for validation to complete. + +~> **WARNING:** This resource implements a part of the validation workflow. It does not represent a real-world entity in AWS, therefore changing or deleting this resource on its own has no immediate effect. + +## Example Usage + +### DNS Validation with Route 53 + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Token, + TerraformIterator, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmCertificate } from "./.gen/providers/aws/acm-certificate"; +import { AcmCertificateValidation } from "./.gen/providers/aws/acm-certificate-validation"; +import { DataAwsRoute53Zone } from "./.gen/providers/aws/data-aws-route53-zone"; +import { LbListener } from "./.gen/providers/aws/lb-listener"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +interface MyConfig { + defaultAction: any; + loadBalancerArn: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new AcmCertificate(this, "example", { + domainName: "example.com", + validationMethod: "DNS", + }); + const dataAwsRoute53ZoneExample = new DataAwsRoute53Zone( + this, + "example_1", + { + name: "example.com", + privateZone: false, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRoute53ZoneExample.overrideLogicalId("example"); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleForEachIterator = TerraformIterator.fromList( + Token.asAny( + "${{ for dvo in ${" + + example.domainValidationOptions + + "} : dvo.domain_name => {\n name = dvo.resource_record_name\n record = dvo.resource_record_value\n type = dvo.resource_record_type\n }}}" + ) + ); + const awsRoute53RecordExample = new Route53Record(this, "example_2", { + allowOverwrite: true, + name: Token.asString( + propertyAccess(exampleForEachIterator.value, ["name"]) + ), + records: [ + Token.asString( + propertyAccess(exampleForEachIterator.value, ["record"]) + ), + ], + ttl: 60, + type: Token.asString( + propertyAccess(exampleForEachIterator.value, ["type"]) + ), + zoneId: Token.asString(dataAwsRoute53ZoneExample.zoneId), + forEach: exampleForEachIterator, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53RecordExample.overrideLogicalId("example"); + const awsAcmCertificateValidationExample = new AcmCertificateValidation( + this, + "example_3", + { + certificateArn: example.arn, + validationRecordFqdns: Token.asList( + "${[ for record in ${" + + awsRoute53RecordExample.fqn + + "} : record.fqdn]}" + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmCertificateValidationExample.overrideLogicalId("example"); + const awsLbListenerExample = new LbListener(this, "example_4", { + certificateArn: Token.asString( + awsAcmCertificateValidationExample.certificateArn + ), + defaultAction: config.defaultAction, + loadBalancerArn: config.loadBalancerArn, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbListenerExample.overrideLogicalId("example"); + } +} + +``` + +### Alternative Domains DNS Validation with Route 53 + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Token, + TerraformIterator, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmCertificate } from "./.gen/providers/aws/acm-certificate"; +import { AcmCertificateValidation } from "./.gen/providers/aws/acm-certificate-validation"; +import { DataAwsRoute53Zone } from "./.gen/providers/aws/data-aws-route53-zone"; +import { LbListener } from "./.gen/providers/aws/lb-listener"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +interface MyConfig { + defaultAction: any; + loadBalancerArn: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new AcmCertificate(this, "example", { + domainName: "example.com", + subjectAlternativeNames: ["www.example.com", "example.org"], + validationMethod: "DNS", + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleForEachIterator = TerraformIterator.fromList( + Token.asAny( + "${{ for dvo in ${" + + example.domainValidationOptions + + '} : dvo.domain_name => {\n name = dvo.resource_record_name\n record = dvo.resource_record_value\n type = dvo.resource_record_type\n zone_id = dvo.domain_name == "example.org" ? data.aws_route53_zone.example_org.zone_id : data.aws_route53_zone.example_com.zone_id\n }}}' + ) + ); + const awsRoute53RecordExample = new Route53Record(this, "example_1", { + allowOverwrite: true, + name: Token.asString( + propertyAccess(exampleForEachIterator.value, ["name"]) + ), + records: [ + Token.asString( + propertyAccess(exampleForEachIterator.value, ["record"]) + ), + ], + ttl: 60, + type: Token.asString( + propertyAccess(exampleForEachIterator.value, ["type"]) + ), + zoneId: Token.asString( + propertyAccess(exampleForEachIterator.value, ["zone_id"]) + ), + forEach: exampleForEachIterator, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53RecordExample.overrideLogicalId("example"); + new DataAwsRoute53Zone(this, "example_com", { + name: "example.com", + privateZone: false, + }); + new DataAwsRoute53Zone(this, "example_org", { + name: "example.org", + privateZone: false, + }); + const awsAcmCertificateValidationExample = new AcmCertificateValidation( + this, + "example_4", + { + certificateArn: example.arn, + validationRecordFqdns: Token.asList( + "${[ for record in ${" + + awsRoute53RecordExample.fqn + + "} : record.fqdn]}" + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmCertificateValidationExample.overrideLogicalId("example"); + const awsLbListenerExample = new LbListener(this, "example_5", { + certificateArn: Token.asString( + awsAcmCertificateValidationExample.certificateArn + ), + defaultAction: config.defaultAction, + loadBalancerArn: config.loadBalancerArn, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbListenerExample.overrideLogicalId("example"); + } +} + +``` + +### Email Validation + +In this situation, the resource is simply a waiter for manual email approval of ACM certificates. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmCertificate } from "./.gen/providers/aws/acm-certificate"; +import { AcmCertificateValidation } from "./.gen/providers/aws/acm-certificate-validation"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AcmCertificate(this, "example", { + domainName: "example.com", + validationMethod: "EMAIL", + }); + const awsAcmCertificateValidationExample = new AcmCertificateValidation( + this, + "example_1", + { + certificateArn: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmCertificateValidationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificateArn` - (Required) ARN of the certificate that is being validated. +* `validationRecordFqdns` - (Optional) List of FQDNs that implement the validation. Only valid for DNS validation method ACM certificates. If this is set, the resource can implement additional sanity checks and has an explicit dependency on the resource that is implementing the validation + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Time at which the certificate was issued + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `75M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/acmpca_certificate.html.markdown b/website/docs/cdktf/typescript/r/acmpca_certificate.html.markdown new file mode 100644 index 00000000000..451b18336f9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/acmpca_certificate.html.markdown @@ -0,0 +1,139 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_certificate" +description: |- + Provides a resource to issue a certificate using AWS Certificate Manager Private Certificate Authority (ACM PCA) +--- + + + +# Resource: aws_acmpca_certificate + +Provides a resource to issue a certificate using AWS Certificate Manager Private Certificate Authority (ACM PCA). + +Certificates created using `awsAcmpcaCertificate` are not eligible for automatic renewal, +and must be replaced instead. +To issue a renewable certificate using an ACM PCA, create a [`awsAcmCertificate`](acm_certificate.html) +with the parameter `certificateAuthorityArn`. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmpcaCertificate } from "./.gen/providers/aws/acmpca-certificate"; +import { AcmpcaCertificateAuthority } from "./.gen/providers/aws/acmpca-certificate-authority"; +import { CertRequest } from "./.gen/providers/tls/cert-request"; +import { PrivateKey } from "./.gen/providers/tls/private-key"; +interface MyConfig { + certificateAuthorityConfiguration: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + /*The following providers are missing schema information and might need manual adjustments to synthesize correctly: tls. + For a more precise conversion please use the --provider flag in convert.*/ + const example = new AcmpcaCertificateAuthority(this, "example", { + permanentDeletionTimeInDays: 7, + private_certificate_configuration: [ + { + key_algorithm: "RSA_4096", + signing_algorithm: "SHA512WITHRSA", + subject: [ + { + common_name: "example.com", + }, + ], + }, + ], + certificateAuthorityConfiguration: + config.certificateAuthorityConfiguration, + }); + const key = new PrivateKey(this, "key", { + algorithm: "RSA", + }); + const csr = new CertRequest(this, "csr", { + key_algorithm: "RSA", + private_key_pem: key.privateKeyPem, + subject: [ + { + common_name: "example", + }, + ], + }); + const awsAcmpcaCertificateExample = new AcmpcaCertificate( + this, + "example_3", + { + certificateAuthorityArn: example.arn, + certificateSigningRequest: Token.asString(csr.certRequestPem), + signingAlgorithm: "SHA256WITHRSA", + validity: { + type: "YEARS", + value: Token.asString(1), + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaCertificateExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificateAuthorityArn` - (Required) ARN of the certificate authority. +* `certificateSigningRequest` - (Required) Certificate Signing Request in PEM format. +* `signingAlgorithm` - (Required) Algorithm to use to sign certificate requests. Valid values: `sha256Withrsa`, `sha256Withecdsa`, `sha384Withrsa`, `sha384Withecdsa`, `sha512Withrsa`, `sha512Withecdsa`. +* `validity` - (Required) Configures end of the validity period for the certificate. See [validity block](#validity-block) below. +* `templateArn` - (Optional) Template to use when issuing a certificate. + See [ACM PCA Documentation](https://docs.aws.amazon.com/privateca/latest/userguide/UsingTemplates.html) for more information. +* `apiPassthrough` - (Optional) Specifies X.509 certificate information to be included in the issued certificate. To use with API Passthrough templates + +### validity block + +* `type` - (Required) Determines how `value` is interpreted. Valid values: `days`, `months`, `years`, `absolute`, `endDate`. +* `value` - (Required) If `type` is `days`, `months`, or `years`, the relative time until the certificate expires. If `type` is `absolute`, the date in seconds since the Unix epoch. If `type` is `endDate`, the date in RFC 3339 format. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the certificate. +* `certificate` - PEM-encoded certificate value. +* `certificateChain` - PEM-encoded certificate chain that includes any intermediate certificates and chains up to root CA. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ACM PCA Certificates using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ACM PCA Certificates using their ARN. For example: + +```console +% terraform import aws_acmpca_certificate.cert arn:aws:acm-pca:eu-west-1:675225743824:certificate-authority/08319ede-83g9-1400-8f21-c7d12b2b6edb/certificate/a4e9c2aa4bcfab625g1b9136464cd3a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/acmpca_certificate_authority.html.markdown b/website/docs/cdktf/typescript/r/acmpca_certificate_authority.html.markdown new file mode 100644 index 00000000000..3f74c00ecc2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/acmpca_certificate_authority.html.markdown @@ -0,0 +1,253 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_certificate_authority" +description: |- + Provides a resource to manage AWS Certificate Manager Private Certificate Authorities +--- + + + +# Resource: aws_acmpca_certificate_authority + +Provides a resource to manage AWS Certificate Manager Private Certificate Authorities (ACM PCA Certificate Authorities). + +~> **NOTE:** Creating this resource will leave the certificate authority in a `pendingCertificate` status, which means it cannot yet issue certificates. To complete this setup, you must fully sign the certificate authority CSR available in the `certificateSigningRequest` attribute. The [`awsAcmpcaCertificateAuthorityCertificate`](/docs/providers/aws/r/acmpca_certificate_authority_certificate.html) resource can be used for this purpose. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmpcaCertificateAuthority } from "./.gen/providers/aws/acmpca-certificate-authority"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AcmpcaCertificateAuthority(this, "example", { + certificateAuthorityConfiguration: { + keyAlgorithm: "RSA_4096", + signingAlgorithm: "SHA512WITHRSA", + subject: { + commonName: "example.com", + }, + }, + permanentDeletionTimeInDays: 7, + }); + } +} + +``` + +### Short-lived certificate + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmpcaCertificateAuthority } from "./.gen/providers/aws/acmpca-certificate-authority"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AcmpcaCertificateAuthority(this, "example", { + certificateAuthorityConfiguration: { + keyAlgorithm: "RSA_4096", + signingAlgorithm: "SHA512WITHRSA", + subject: { + commonName: "example.com", + }, + }, + usageMode: "SHORT_LIVED_CERTIFICATE", + }); + } +} + +``` + +### Enable Certificate Revocation List + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmpcaCertificateAuthority } from "./.gen/providers/aws/acmpca-certificate-authority"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const acmpcaBucketAccess = new DataAwsIamPolicyDocument( + this, + "acmpca_bucket_access", + { + statement: [ + { + actions: [ + "s3:GetBucketAcl", + "s3:GetBucketLocation", + "s3:PutObject", + "s3:PutObjectAcl", + ], + principals: [ + { + identifiers: ["acm-pca.amazonaws.com"], + type: "Service", + }, + ], + resources: [example.arn, "${" + example.arn + "}/*"], + }, + ], + } + ); + const awsS3BucketPolicyExample = new S3BucketPolicy(this, "example_2", { + bucket: example.id, + policy: Token.asString(acmpcaBucketAccess.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPolicyExample.overrideLogicalId("example"); + const awsAcmpcaCertificateAuthorityExample = new AcmpcaCertificateAuthority( + this, + "example_3", + { + certificateAuthorityConfiguration: { + keyAlgorithm: "RSA_4096", + signingAlgorithm: "SHA512WITHRSA", + subject: { + commonName: "example.com", + }, + }, + dependsOn: [awsS3BucketPolicyExample], + revocationConfiguration: { + crlConfiguration: { + customCname: "crl.example.com", + enabled: true, + expirationInDays: 7, + s3BucketName: example.id, + }, + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaCertificateAuthorityExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificateAuthorityConfiguration` - (Required) Nested argument containing algorithms and certificate subject information. Defined below. +* `enabled` - (Optional) Whether the certificate authority is enabled or disabled. Defaults to `true`. Can only be disabled if the CA is in an `active` state. +* `revocationConfiguration` - (Optional) Nested argument containing revocation configuration. Defined below. +* `usageMode` - (Optional) Specifies whether the CA issues general-purpose certificates that typically require a revocation mechanism, or short-lived certificates that may optionally omit revocation because they expire quickly. Short-lived certificate validity is limited to seven days. Defaults to `generalPurpose`. Valid values: `generalPurpose` and `shortLivedCertificate`. +* `tags` - (Optional) Key-value map of user-defined tags that are attached to the certificate authority. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) Type of the certificate authority. Defaults to `subordinate`. Valid values: `root` and `subordinate`. +* `keyStorageSecurityStandard` - (Optional) Cryptographic key management compliance standard used for handling CA keys. Defaults to `fips1402Level3OrHigher`. Valid values: `fips1402Level3OrHigher` and `fips1402Level2OrHigher`. Supported standard for each region can be found in the [Storage and security compliance of AWS Private CA private keys Documentation](https://docs.aws.amazon.com/privateca/latest/userguide/data-protection.html#private-keys). +* `permanentDeletionTimeInDays` - (Optional) Number of days to make a CA restorable after it has been deleted, must be between 7 to 30 days, with default to 30 days. + +### certificate_authority_configuration + +* `keyAlgorithm` - (Required) Type of the public key algorithm and size, in bits, of the key pair that your key pair creates when it issues a certificate. Valid values can be found in the [ACM PCA Documentation](https://docs.aws.amazon.com/privateca/latest/APIReference/API_CertificateAuthorityConfiguration.html). +* `signingAlgorithm` - (Required) Name of the algorithm your private CA uses to sign certificate requests. Valid values can be found in the [ACM PCA Documentation](https://docs.aws.amazon.com/privateca/latest/APIReference/API_CertificateAuthorityConfiguration.html). +* `subject` - (Required) Nested argument that contains X.500 distinguished name information. At least one nested attribute must be specified. + +#### subject + +Contains information about the certificate subject. Identifies the entity that owns or controls the public key in the certificate. The entity can be a user, computer, device, or service. + +* `commonName` - (Optional) Fully qualified domain name (FQDN) associated with the certificate subject. Must be less than or equal to 64 characters in length. +* `country` - (Optional) Two digit code that specifies the country in which the certificate subject located. Must be less than or equal to 2 characters in length. +* `distinguishedNameQualifier` - (Optional) Disambiguating information for the certificate subject. Must be less than or equal to 64 characters in length. +* `generationQualifier` - (Optional) Typically a qualifier appended to the name of an individual. Examples include Jr. for junior, Sr. for senior, and III for third. Must be less than or equal to 3 characters in length. +* `givenName` - (Optional) First name. Must be less than or equal to 16 characters in length. +* `initials` - (Optional) Concatenation that typically contains the first letter of the `givenName`, the first letter of the middle name if one exists, and the first letter of the `surname`. Must be less than or equal to 5 characters in length. +* `locality` - (Optional) Locality (such as a city or town) in which the certificate subject is located. Must be less than or equal to 128 characters in length. +* `organization` - (Optional) Legal name of the organization with which the certificate subject is affiliated. Must be less than or equal to 64 characters in length. +* `organizationalUnit` - (Optional) Subdivision or unit of the organization (such as sales or finance) with which the certificate subject is affiliated. Must be less than or equal to 64 characters in length. +* `pseudonym` - (Optional) Typically a shortened version of a longer `givenName`. For example, Jonathan is often shortened to John. Elizabeth is often shortened to Beth, Liz, or Eliza. Must be less than or equal to 128 characters in length. +* `state` - (Optional) State in which the subject of the certificate is located. Must be less than or equal to 128 characters in length. +* `surname` - (Optional) Family name. In the US and the UK for example, the surname of an individual is ordered last. In Asian cultures the surname is typically ordered first. Must be less than or equal to 40 characters in length. +* `title` - (Optional) Title such as Mr. or Ms. which is pre-pended to the name to refer formally to the certificate subject. Must be less than or equal to 64 characters in length. + +### revocation_configuration + +* `crlConfiguration` - (Optional) Nested argument containing configuration of the certificate revocation list (CRL), if any, maintained by the certificate authority. Defined below. +* `ocspConfiguration` - (Optional) Nested argument containing configuration of +the custom OCSP responder endpoint. Defined below. + +#### crl_configuration + +* `customCname` - (Optional) Name inserted into the certificate CRL Distribution Points extension that enables the use of an alias for the CRL distribution point. Use this value if you don't want the name of your S3 bucket to be public. Must be less than or equal to 253 characters in length. +* `enabled` - (Optional) Boolean value that specifies whether certificate revocation lists (CRLs) are enabled. Defaults to `false`. +* `expirationInDays` - (Optional, Required if `enabled` is `true`) Number of days until a certificate expires. Must be between 1 and 5000. +* `s3BucketName` - (Optional, Required if `enabled` is `true`) Name of the S3 bucket that contains the CRL. If you do not provide a value for the `customCname` argument, the name of your S3 bucket is placed into the CRL Distribution Points extension of the issued certificate. You must specify a bucket policy that allows ACM PCA to write the CRL to your bucket. Must be between 3 and 255 characters in length. +* `s3ObjectAcl` - (Optional) Determines whether the CRL will be publicly readable or privately held in the CRL Amazon S3 bucket. Defaults to `publicRead`. + +#### ocsp_configuration + +* `enabled` - (Required) Boolean value that specifies whether a custom OCSP responder is enabled. +* `ocspCustomCname` - (Optional) CNAME specifying a customized OCSP domain. Note: The value of the CNAME must not include a protocol prefix such as "http://" or "https://". + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN of the certificate authority. +* `arn` - ARN of the certificate authority. +* `certificate` - Base64-encoded certificate authority (CA) certificate. Only available after the certificate authority certificate has been imported. +* `certificateChain` - Base64-encoded certificate chain that includes any intermediate certificates and chains up to root on-premises certificate that you used to sign your private CA certificate. The chain does not include your private CA certificate. Only available after the certificate authority certificate has been imported. +* `certificateSigningRequest` - The base64 PEM-encoded certificate signing request (CSR) for your private CA certificate. +* `notAfter` - Date and time after which the certificate authority is not valid. Only available after the certificate authority certificate has been imported. +* `notBefore` - Date and time before which the certificate authority is not valid. Only available after the certificate authority certificate has been imported. +* `serial` - Serial number of the certificate authority. Only available after the certificate authority certificate has been imported. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAcmpcaCertificateAuthority` using the certificate authority ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAcmpcaCertificateAuthority` using the certificate authority ARN. For example: + +```console +% terraform import aws_acmpca_certificate_authority.example arn:aws:acm-pca:us-east-1:123456789012:certificate-authority/12345678-1234-1234-1234-123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/acmpca_certificate_authority_certificate.html.markdown b/website/docs/cdktf/typescript/r/acmpca_certificate_authority_certificate.html.markdown new file mode 100644 index 00000000000..80f719a562c --- /dev/null +++ b/website/docs/cdktf/typescript/r/acmpca_certificate_authority_certificate.html.markdown @@ -0,0 +1,195 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_certificate_authority_certificate" +description: |- + Associates a certificate with an AWS Certificate Manager Private Certificate Authority +--- + + + +# Resource: aws_acmpca_certificate_authority_certificate + +Associates a certificate with an AWS Certificate Manager Private Certificate Authority (ACM PCA Certificate Authority). An ACM PCA Certificate Authority is unable to issue certificates until it has a certificate associated with it. A root level ACM PCA Certificate Authority is able to self-sign its own root certificate. + +## Example Usage + +### Self-Signed Root Certificate Authority Certificate + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmpcaCertificate } from "./.gen/providers/aws/acmpca-certificate"; +import { AcmpcaCertificateAuthority } from "./.gen/providers/aws/acmpca-certificate-authority"; +import { AcmpcaCertificateAuthorityCertificate } from "./.gen/providers/aws/acmpca-certificate-authority-certificate"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AcmpcaCertificateAuthority(this, "example", { + certificateAuthorityConfiguration: { + keyAlgorithm: "RSA_4096", + signingAlgorithm: "SHA512WITHRSA", + subject: { + commonName: "example.com", + }, + }, + type: "ROOT", + }); + const current = new DataAwsPartition(this, "current", {}); + const awsAcmpcaCertificateExample = new AcmpcaCertificate( + this, + "example_2", + { + certificateAuthorityArn: example.arn, + certificateSigningRequest: example.certificateSigningRequest, + signingAlgorithm: "SHA512WITHRSA", + templateArn: + "arn:${" + + current.partition + + "}:acm-pca:::template/RootCACertificate/V1", + validity: { + type: "YEARS", + value: Token.asString(1), + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaCertificateExample.overrideLogicalId("example"); + const awsAcmpcaCertificateAuthorityCertificateExample = + new AcmpcaCertificateAuthorityCertificate(this, "example_3", { + certificate: Token.asString(awsAcmpcaCertificateExample.certificate), + certificateAuthorityArn: example.arn, + certificateChain: Token.asString( + awsAcmpcaCertificateExample.certificateChain + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaCertificateAuthorityCertificateExample.overrideLogicalId( + "example" + ); + } +} + +``` + +### Certificate for Subordinate Certificate Authority + +Note that the certificate for the subordinate certificate authority must be issued by the root certificate authority using a signing request from the subordinate certificate authority. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmpcaCertificate } from "./.gen/providers/aws/acmpca-certificate"; +import { AcmpcaCertificateAuthority } from "./.gen/providers/aws/acmpca-certificate-authority"; +import { AcmpcaCertificateAuthorityCertificate } from "./.gen/providers/aws/acmpca-certificate-authority-certificate"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +interface MyConfig { + certificateAuthorityArn: any; + certificateSigningRequest: any; + signingAlgorithm: any; + validity: any; + certificateAuthorityConfiguration: any; + certificate: any; + certificateAuthorityArn1: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new AcmpcaCertificate(this, "root", { + certificateAuthorityArn: config.certificateAuthorityArn, + certificateSigningRequest: config.certificateSigningRequest, + signingAlgorithm: config.signingAlgorithm, + validity: config.validity, + }); + const awsAcmpcaCertificateAuthorityRoot = new AcmpcaCertificateAuthority( + this, + "root_1", + { + certificateAuthorityConfiguration: + config.certificateAuthorityConfiguration, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaCertificateAuthorityRoot.overrideLogicalId("root"); + const subordinate = new AcmpcaCertificateAuthority(this, "subordinate", { + certificateAuthorityConfiguration: { + keyAlgorithm: "RSA_2048", + signingAlgorithm: "SHA512WITHRSA", + subject: { + commonName: "sub.example.com", + }, + }, + type: "SUBORDINATE", + }); + const awsAcmpcaCertificateAuthorityCertificateRoot = + new AcmpcaCertificateAuthorityCertificate(this, "root_3", { + certificate: config.certificate, + certificateAuthorityArn: config.certificateAuthorityArn1, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaCertificateAuthorityCertificateRoot.overrideLogicalId("root"); + const current = new DataAwsPartition(this, "current", {}); + const awsAcmpcaCertificateSubordinate = new AcmpcaCertificate( + this, + "subordinate_5", + { + certificateAuthorityArn: Token.asString( + awsAcmpcaCertificateAuthorityRoot.arn + ), + certificateSigningRequest: subordinate.certificateSigningRequest, + signingAlgorithm: "SHA512WITHRSA", + templateArn: + "arn:${" + + current.partition + + "}:acm-pca:::template/SubordinateCACertificate_PathLen0/V1", + validity: { + type: "YEARS", + value: Token.asString(1), + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaCertificateSubordinate.overrideLogicalId("subordinate"); + const awsAcmpcaCertificateAuthorityCertificateSubordinate = + new AcmpcaCertificateAuthorityCertificate(this, "subordinate_6", { + certificate: Token.asString( + awsAcmpcaCertificateSubordinate.certificate + ), + certificateAuthorityArn: subordinate.arn, + certificateChain: Token.asString( + awsAcmpcaCertificateSubordinate.certificateChain + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaCertificateAuthorityCertificateSubordinate.overrideLogicalId( + "subordinate" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate` - (Required) PEM-encoded certificate for the Certificate Authority. +* `certificateAuthorityArn` - (Required) ARN of the Certificate Authority. +* `certificateChain` - (Optional) PEM-encoded certificate chain that includes any intermediate certificates and chains up to root CA. Required for subordinate Certificate Authorities. Not allowed for root Certificate Authorities. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/acmpca_permission.html.markdown b/website/docs/cdktf/typescript/r/acmpca_permission.html.markdown new file mode 100644 index 00000000000..3e128cfa96a --- /dev/null +++ b/website/docs/cdktf/typescript/r/acmpca_permission.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_permission" +description: |- + Provides a resource to manage an AWS Certificate Manager Private Certificate Authorities Permission +--- + + + +# Resource: aws_acmpca_permission + +Provides a resource to manage an AWS Certificate Manager Private Certificate Authorities Permission. +Currently, this is only required in order to allow the ACM service to automatically renew certificates issued by a PCA. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmpcaCertificateAuthority } from "./.gen/providers/aws/acmpca-certificate-authority"; +import { AcmpcaPermission } from "./.gen/providers/aws/acmpca-permission"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AcmpcaCertificateAuthority(this, "example", { + certificateAuthorityConfiguration: { + keyAlgorithm: "RSA_4096", + signingAlgorithm: "SHA512WITHRSA", + subject: { + commonName: "example.com", + }, + }, + }); + const awsAcmpcaPermissionExample = new AcmpcaPermission(this, "example_1", { + actions: ["IssueCertificate", "GetCertificate", "ListPermissions"], + certificateAuthorityArn: example.arn, + principal: "acm.amazonaws.com", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaPermissionExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificateAuthorityArn` - (Required) ARN of the CA that grants the permissions. +* `actions` - (Required) Actions that the specified AWS service principal can use. These include `issueCertificate`, `getCertificate`, and `listPermissions`. Note that in order for ACM to automatically rotate certificates issued by a PCA, it must be granted permission on all 3 actions, as per the example above. +* `principal` - (Required) AWS service or identity that receives the permission. At this time, the only valid principal is `acmAmazonawsCom`. +* `sourceAccount` - (Optional) ID of the calling account + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `policy` - IAM policy that is associated with the permission. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/acmpca_policy.html.markdown b/website/docs/cdktf/typescript/r/acmpca_policy.html.markdown new file mode 100644 index 00000000000..0a4f41faf58 --- /dev/null +++ b/website/docs/cdktf/typescript/r/acmpca_policy.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "ACM PCA (Certificate Manager Private Certificate Authority)" +layout: "aws" +page_title: "AWS: aws_acmpca_policy" +description: |- + Attaches a resource based policy to an AWS Certificate Manager Private Certificate Authority (ACM PCA) +--- + + + +# Resource: aws_acmpca_policy + +Attaches a resource based policy to a private CA. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmpcaPolicy } from "./.gen/providers/aws/acmpca-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsIamPolicyDocument(this, "example", { + statement: [ + { + actions: [ + "acm-pca:DescribeCertificateAuthority", + "acm-pca:GetCertificate", + "acm-pca:GetCertificateAuthorityCertificate", + "acm-pca:ListPermissions", + "acm-pca:ListTags", + ], + effect: "Allow", + principals: [ + { + identifiers: [Token.asString(current.accountId)], + type: "AWS", + }, + ], + resources: [Token.asString(awsAcmpcaCertificateAuthorityExample.arn)], + sid: "1", + }, + { + actions: ["acm-pca:IssueCertificate"], + condition: [ + { + test: "StringEquals", + values: ["arn:aws:acm-pca:::template/EndEntityCertificate/V1"], + variable: "acm-pca:TemplateArn", + }, + ], + effect: allow, + principals: [ + { + identifiers: [Token.asString(current.accountId)], + type: "AWS", + }, + ], + resources: [Token.asString(awsAcmpcaCertificateAuthorityExample.arn)], + sid: "2", + }, + ], + }); + const awsAcmpcaPolicyExample = new AcmpcaPolicy(this, "example_1", { + policy: Token.asString(example.json), + resourceArn: Token.asString(awsAcmpcaCertificateAuthorityExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceArn` - (Required) ARN of the private CA to associate with the policy. +* `policy` - (Required) JSON-formatted IAM policy to attach to the specified private CA resource. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAcmpcaPolicy` using the `resourceArn` value. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAcmpcaPolicy` using the `resourceArn` value. For example: + +```console +% terraform import aws_acmpca_policy.example arn:aws:acm-pca:us-east-1:123456789012:certificate-authority/12345678-1234-1234-1234-123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ami.html.markdown b/website/docs/cdktf/typescript/r/ami.html.markdown new file mode 100644 index 00000000000..c4dd686f054 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ami.html.markdown @@ -0,0 +1,164 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami" +description: |- + Creates and manages a custom Amazon Machine Image (AMI). +--- + + + +# Resource: aws_ami + +The AMI resource allows the creation and management of a completely-custom +*Amazon Machine Image* (AMI). + +If you just want to duplicate an existing AMI, possibly copying it to another +region, it's better to use `awsAmiCopy` instead. + +If you just want to share an existing AMI with another AWS account, +it's better to use `awsAmiLaunchPermission` instead. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Ami } from "./.gen/providers/aws/ami"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Ami(this, "example", { + ebsBlockDevice: [ + { + deviceName: "/dev/xvda", + snapshotId: "snap-xxxxxxxx", + volumeSize: 8, + }, + ], + imdsSupport: "v2.0", + name: "terraform-example", + rootDeviceName: "/dev/xvda", + virtualizationType: "hvm", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Region-unique name for the AMI. +* `bootMode` - (Optional) Boot mode of the AMI. For more information, see [Boot modes](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ami-boot.html) in the Amazon Elastic Compute Cloud User Guide. +* `deprecationTime` - (Optional) Date and time to deprecate the AMI. If you specified a value for seconds, Amazon EC2 rounds the seconds to the nearest minute. Valid values: [RFC3339 time string](https://tools.ietf.org/html/rfc3339#section-5.8) (`yyyyMmDdthh:mm:ssz`) +* `description` - (Optional) Longer, human-readable description for the AMI. +* `enaSupport` - (Optional) Whether enhanced networking with ENA is enabled. Defaults to `false`. +* `rootDeviceName` - (Optional) Name of the root device (for example, `/dev/sda1`, or `/dev/xvda`). +* `virtualizationType` - (Optional) Keyword to choose what virtualization mode created instances + will use. Can be either "paravirtual" (the default) or "hvm". The choice of virtualization type + changes the set of further arguments that are required, as described below. +* `architecture` - (Optional) Machine architecture for created instances. Defaults to "x86_64". +* `ebsBlockDevice` - (Optional) Nested block describing an EBS block device that should be + attached to created instances. The structure of this block is described below. +* `ephemeralBlockDevice` - (Optional) Nested block describing an ephemeral block device that + should be attached to created instances. The structure of this block is described below. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tpmSupport` - (Optional) If the image is configured for NitroTPM support, the value is `v20`. For more information, see [NitroTPM](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/nitrotpm.html) in the Amazon Elastic Compute Cloud User Guide. +* `imdsSupport` - (Optional) If EC2 instances started from this image should require the use of the Instance Metadata Service V2 (IMDSv2), set this argument to `v20`. For more information, see [Configure instance metadata options for new instances](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-IMDS-new-instances.html#configure-IMDS-new-instances-ami-configuration). + +When `virtualizationType` is "paravirtual" the following additional arguments apply: + +* `imageLocation` - (Required) Path to an S3 object containing an image manifest, e.g., created + by the `ec2UploadBundle` command in the EC2 command line tools. +* `kernelId` - (Required) ID of the kernel image (AKI) that will be used as the paravirtual + kernel in created instances. +* `ramdiskId` - (Optional) ID of an initrd image (ARI) that will be used when booting the + created instances. + +When `virtualizationType` is "hvm" the following additional arguments apply: + +* `sriovNetSupport` - (Optional) When set to "simple" (the default), enables enhanced networking + for created instances. No other value is supported at this time. + +Nested `ebsBlockDevice` blocks have the following structure: + +* `deviceName` - (Required) Path at which the device is exposed to created instances. +* `deleteOnTermination` - (Optional) Boolean controlling whether the EBS volumes created to + support each created instance will be deleted once that instance is terminated. +* `encrypted` - (Optional) Boolean controlling whether the created EBS volumes will be encrypted. Can't be used with `snapshotId`. +* `iops` - (Required only when `volumeType` is `io1` or `io2`) Number of I/O operations per second the + created volumes will support. +* `snapshotId` - (Optional) ID of an EBS snapshot that will be used to initialize the created + EBS volumes. If set, the `volumeSize` attribute must be at least as large as the referenced + snapshot. +* `throughput` - (Optional) Throughput that the EBS volume supports, in MiB/s. Only valid for `volumeType` of `gp3`. +* `volumeSize` - (Required unless `snapshotId` is set) Size of created volumes in GiB. + If `snapshotId` is set and `volumeSize` is omitted then the volume will have the same size + as the selected snapshot. +* `volumeType` - (Optional) Type of EBS volume to create. Can be `standard`, `gp2`, `gp3`, `io1`, `io2`, `sc1` or `st1` (Default: `standard`). +* `outpostArn` - (Optional) ARN of the Outpost on which the snapshot is stored. + +~> **Note:** You can specify `encrypted` or `snapshotId` but not both. + +Nested `ephemeralBlockDevice` blocks have the following structure: + +* `deviceName` - (Required) Path at which the device is exposed to created instances. +* `virtualName` - (Required) Name for the ephemeral device, of the form "ephemeralN" where + *N* is a volume number starting from zero. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AMI. +* `id` - ID of the created AMI. +* `ownerId` - AWS account ID of the image owner. +* `rootSnapshotId` - Snapshot ID for the root volume (for EBS-backed AMIs) +* `usageOperation` - Operation of the Amazon EC2 instance and the billing code that is associated with the AMI. +* `platformDetails` - Platform details associated with the billing code of the AMI. +* `imageOwnerAlias` - AWS account alias (for example, amazon, self) or the AWS account ID of the AMI owner. +* `imageType` - Type of image. +* `hypervisor` - Hypervisor type of the image. +* `ownerId` - AWS account ID of the image owner. +* `platform` - This value is set to windows for Windows AMIs; otherwise, it is blank. +* `public` - Whether the image has public launch permissions. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `40M`) +* `update` - (Default `40M`) +* `delete` - (Default `90M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAmi` using the ID of the AMI. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAmi` using the ID of the AMI. For example: + +```console +% terraform import aws_ami.example ami-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ami_copy.html.markdown b/website/docs/cdktf/typescript/r/ami_copy.html.markdown new file mode 100644 index 00000000000..5bdda4a59f3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ami_copy.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami_copy" +description: |- + Duplicates an existing Amazon Machine Image (AMI) +--- + + + +# Resource: aws_ami_copy + +The "AMI copy" resource allows duplication of an Amazon Machine Image (AMI), +including cross-region copies. + +If the source AMI has associated EBS snapshots, those will also be duplicated +along with the AMI. + +This is useful for taking a single AMI provisioned in one region and making +it available in another for a multi-region deployment. + +Copying an AMI can take several minutes. The creation of this resource will +block until the new AMI is available for use on new instances. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmiCopy } from "./.gen/providers/aws/ami-copy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AmiCopy(this, "example", { + description: "A copy of ami-xxxxxxxx", + name: "terraform-example", + sourceAmiId: "ami-xxxxxxxx", + sourceAmiRegion: "us-west-1", + tags: { + Name: "HelloWorld", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Region-unique name for the AMI. +* `sourceAmiId` - (Required) Id of the AMI to copy. This id must be valid in the region + given by `sourceAmiRegion`. +* `sourceAmiRegion` - (Required) Region from which the AMI will be copied. This may be the + same as the AWS provider region in order to create a copy within the same region. +* `destinationOutpostArn` - (Optional) ARN of the Outpost to which to copy the AMI. + Only specify this parameter when copying an AMI from an AWS Region to an Outpost. The AMI must be in the Region of the destination Outpost. +* `encrypted` - (Optional) Whether the destination snapshots of the copied image should be encrypted. Defaults to `false` +* `kmsKeyId` - (Optional) Full ARN of the KMS Key to use when encrypting the snapshots of an image during a copy operation. If not specified, then the default AWS KMS Key will be used +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +This resource also exposes the full set of arguments from the [`awsAmi`](ami.html) resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AMI. +* `id` - ID of the created AMI. + +This resource also exports a full set of attributes corresponding to the arguments of the +[`awsAmi`](/docs/providers/aws/r/ami.html) resource, allowing the properties of the created AMI to be used elsewhere in the +configuration. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `40M`) +* `update` - (Default `40M`) +* `delete` - (Default `90M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ami_from_instance.html.markdown b/website/docs/cdktf/typescript/r/ami_from_instance.html.markdown new file mode 100644 index 00000000000..be2798b7dbb --- /dev/null +++ b/website/docs/cdktf/typescript/r/ami_from_instance.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami_from_instance" +description: |- + Creates an Amazon Machine Image (AMI) from an EBS-backed EC2 instance +--- + + + +# Resource: aws_ami_from_instance + +The "AMI from instance" resource allows the creation of an Amazon Machine +Image (AMI) modelled after an existing EBS-backed EC2 instance. + +The created AMI will refer to implicitly-created snapshots of the instance's +EBS volumes and mimick its assigned block device configuration at the time +the resource is created. + +This resource is best applied to an instance that is stopped when this instance +is created, so that the contents of the created image are predictable. When +applied to an instance that is running, *the instance will be stopped before taking +the snapshots and then started back up again*, resulting in a period of +downtime. + +Note that the source instance is inspected only at the initial creation of this +resource. Ongoing updates to the referenced instance will not be propagated into +the generated AMI. Users may taint or otherwise recreate the resource in order +to produce a fresh snapshot. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmiFromInstance } from "./.gen/providers/aws/ami-from-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AmiFromInstance(this, "example", { + name: "terraform-example", + sourceInstanceId: "i-xxxxxxxx", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Region-unique name for the AMI. +* `sourceInstanceId` - (Required) ID of the instance to use as the basis of the AMI. +* `snapshotWithoutReboot` - (Optional) Boolean that overrides the behavior of stopping + the instance before snapshotting. This is risky since it may cause a snapshot of an + inconsistent filesystem state, but can be used to avoid downtime if the user otherwise + guarantees that no filesystem writes will be underway at the time of snapshot. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `40M`) +* `update` - (Default `40M`) +* `delete` - (Default `90M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AMI. +* `id` - ID of the created AMI. + +This resource also exports a full set of attributes corresponding to the arguments of the +[`awsAmi`](/docs/providers/aws/r/ami.html) resource, allowing the properties of the created AMI to be used elsewhere in the +configuration. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ami_launch_permission.html.markdown b/website/docs/cdktf/typescript/r/ami_launch_permission.html.markdown new file mode 100644 index 00000000000..16a921a1e40 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ami_launch_permission.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ami_launch_permission" +description: |- + Adds a launch permission to an Amazon Machine Image (AMI). +--- + + + +# Resource: aws_ami_launch_permission + +Adds a launch permission to an Amazon Machine Image (AMI). + +## Example Usage + +### AWS Account ID + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmiLaunchPermission } from "./.gen/providers/aws/ami-launch-permission"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AmiLaunchPermission(this, "example", { + accountId: "123456789012", + imageId: "ami-12345678", + }); + } +} + +``` + +### Public Access + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmiLaunchPermission } from "./.gen/providers/aws/ami-launch-permission"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AmiLaunchPermission(this, "example", { + group: "all", + imageId: "ami-12345678", + }); + } +} + +``` + +### Organization Access + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmiLaunchPermission } from "./.gen/providers/aws/ami-launch-permission"; +import { DataAwsOrganizationsOrganization } from "./.gen/providers/aws/data-aws-organizations-organization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsOrganizationsOrganization(this, "current", {}); + new AmiLaunchPermission(this, "example", { + imageId: "ami-12345678", + organizationArn: Token.asString(current.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Optional) AWS account ID for the launch permission. +* `group` - (Optional) Name of the group for the launch permission. Valid values: `"all"`. +* `imageId` - (Required) ID of the AMI. +* `organizationArn` - (Optional) ARN of an organization for the launch permission. +* `organizationalUnitArn` - (Optional) ARN of an organizational unit for the launch permission. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Launch permission ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AMI Launch Permissions using `[accountId|groupName|organizationArn|organizationalUnitArn]/imageId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AMI Launch Permissions using `[accountId|groupName|organizationArn|organizationalUnitArn]/imageId`. For example: + +```console +% terraform import aws_ami_launch_permission.example 123456789012/ami-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/amplify_app.html.markdown b/website/docs/cdktf/typescript/r/amplify_app.html.markdown new file mode 100644 index 00000000000..547abccdb61 --- /dev/null +++ b/website/docs/cdktf/typescript/r/amplify_app.html.markdown @@ -0,0 +1,266 @@ +--- +subcategory: "Amplify" +layout: "aws" +page_title: "AWS: aws_amplify_app" +description: |- + Provides an Amplify App resource. +--- + + + +# Resource: aws_amplify_app + +Provides an Amplify App resource, a fullstack serverless app hosted on the [AWS Amplify Console](https://docs.aws.amazon.com/amplify/latest/userguide/welcome.html). + +~> **Note:** When you create/update an Amplify App from Terraform, you may end up with the error "BadRequestException: You should at least provide one valid token" because of authentication issues. See the section "Repository with Tokens" below. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmplifyApp } from "./.gen/providers/aws/amplify-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AmplifyApp(this, "example", { + buildSpec: + "version: 0.1\nfrontend:\n phases:\n preBuild:\n commands:\n - yarn install\n build:\n commands:\n - yarn run build\n artifacts:\n baseDirectory: build\n files:\n - '**/*'\n cache:\n paths:\n - node_modules/**/*\n\n", + customRule: [ + { + source: "/<*>", + status: "404", + target: "/index.html", + }, + ], + environmentVariables: { + ENV: "test", + }, + name: "example", + repository: "https://github.com/example/app", + }); + } +} + +``` + +### Repository with Tokens + +If you create a new Amplify App with the `repository` argument, you also need to set `oauthToken` or `accessToken` for authentication. For GitHub, get a [personal access token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) and set `accessToken` as follows: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmplifyApp } from "./.gen/providers/aws/amplify-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AmplifyApp(this, "example", { + accessToken: "...", + name: "example", + repository: "https://github.com/example/app", + }); + } +} + +``` + +You can omit `accessToken` if you import an existing Amplify App created by the Amplify Console (using OAuth for authentication). + +### Auto Branch Creation + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmplifyApp } from "./.gen/providers/aws/amplify-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AmplifyApp(this, "example", { + autoBranchCreationConfig: { + enableAutoBuild: true, + }, + autoBranchCreationPatterns: ["*", "*/**"], + enableAutoBranchCreation: true, + name: "example", + }); + } +} + +``` + +### Basic Authorization + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmplifyApp } from "./.gen/providers/aws/amplify-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AmplifyApp(this, "example", { + basicAuthCredentials: Token.asString( + Fn.base64encode("username1:password1") + ), + enableBasicAuth: true, + name: "example", + }); + } +} + +``` + +### Rewrites and Redirects + +```terraform +resource "aws_amplify_app" "example" { + name = "example" + + # Reverse Proxy Rewrite for API requests + # https://docs.aws.amazon.com/amplify/latest/userguide/redirects.html#reverse-proxy-rewrite + custom_rule { + source = "/api/<*>" + status = "200" + target = "https://api.example.com/api/<*>" + } + + # Redirects for Single Page Web Apps (SPA) + # https://docs.aws.amazon.com/amplify/latest/userguide/redirects.html#redirects-for-single-page-web-apps-spa + custom_rule { + source = "" + status = "200" + target = "/index.html" + } +} +``` + +### Custom Image + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmplifyApp } from "./.gen/providers/aws/amplify-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AmplifyApp(this, "example", { + environmentVariables: { + _CUSTOM_IMAGE: "node:16", + }, + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name for an Amplify app. +* `accessToken` - (Optional) Personal access token for a third-party source control system for an Amplify app. The personal access token is used to create a webhook and a read-only deploy key. The token is not stored. +* `autoBranchCreationConfig` - (Optional) Automated branch creation configuration for an Amplify app. An `autoBranchCreationConfig` block is documented below. +* `autoBranchCreationPatterns` - (Optional) Automated branch creation glob patterns for an Amplify app. +* `basicAuthCredentials` - (Optional) Credentials for basic authorization for an Amplify app. +* `buildSpec` - (Optional) The [build specification](https://docs.aws.amazon.com/amplify/latest/userguide/build-settings.html) (build spec) for an Amplify app. +* `customRule` - (Optional) Custom rewrite and redirect rules for an Amplify app. A `customRule` block is documented below. +* `description` - (Optional) Description for an Amplify app. +* `enableAutoBranchCreation` - (Optional) Enables automated branch creation for an Amplify app. +* `enableBasicAuth` - (Optional) Enables basic authorization for an Amplify app. This will apply to all branches that are part of this app. +* `enableBranchAutoBuild` - (Optional) Enables auto-building of branches for the Amplify App. +* `enableBranchAutoDeletion` - (Optional) Automatically disconnects a branch in the Amplify Console when you delete a branch from your Git repository. +* `environmentVariables` - (Optional) Environment variables map for an Amplify app. +* `iamServiceRoleArn` - (Optional) AWS Identity and Access Management (IAM) service role for an Amplify app. +* `oauthToken` - (Optional) OAuth token for a third-party source control system for an Amplify app. The OAuth token is used to create a webhook and a read-only deploy key. The OAuth token is not stored. +* `platform` - (Optional) Platform or framework for an Amplify app. Valid values: `web`, `webCompute`. Default value: `web`. +* `repository` - (Optional) Repository for an Amplify app. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +An `autoBranchCreationConfig` block supports the following arguments: + +* `basicAuthCredentials` - (Optional) Basic authorization credentials for the autocreated branch. +* `buildSpec` - (Optional) Build specification (build spec) for the autocreated branch. +* `enableAutoBuild` - (Optional) Enables auto building for the autocreated branch. +* `enableBasicAuth` - (Optional) Enables basic authorization for the autocreated branch. +* `enablePerformanceMode` - (Optional) Enables performance mode for the branch. +* `enablePullRequestPreview` - (Optional) Enables pull request previews for the autocreated branch. +* `environmentVariables` - (Optional) Environment variables for the autocreated branch. +* `framework` - (Optional) Framework for the autocreated branch. +* `pullRequestEnvironmentName` - (Optional) Amplify environment name for the pull request. +* `stage` - (Optional) Describes the current stage for the autocreated branch. Valid values: `production`, `beta`, `development`, `experimental`, `pullRequest`. + +A `customRule` block supports the following arguments: + +* `condition` - (Optional) Condition for a URL rewrite or redirect rule, such as a country code. +* `source` - (Required) Source pattern for a URL rewrite or redirect rule. +* `status` - (Optional) Status code for a URL rewrite or redirect rule. Valid values: `200`, `301`, `302`, `404`, `404200`. +* `target` - (Required) Target pattern for a URL rewrite or redirect rule. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Amplify app. +* `defaultDomain` - Default domain for the Amplify app. +* `id` - Unique ID of the Amplify app. +* `productionBranch` - Describes the information about a production branch for an Amplify app. A `productionBranch` block is documented below. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +A `productionBranch` block supports the following attributes: + +* `branchName` - Branch name for the production branch. +* `lastDeployTime` - Last deploy time of the production branch. +* `status` - Status of the production branch. +* `thumbnailUrl` - Thumbnail URL for the production branch. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amplify App using Amplify App ID (appId). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amplify App using Amplify App ID (appId). For example: + +```console +% terraform import aws_amplify_app.example d2ypk4k47z8u6 +``` + +App ID can be obtained from App ARN (e.g., `arn:aws:amplify:usEast1:12345678:apps/d2Ypk4K47Z8U6`). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/amplify_backend_environment.html.markdown b/website/docs/cdktf/typescript/r/amplify_backend_environment.html.markdown new file mode 100644 index 00000000000..c7cfc900bcd --- /dev/null +++ b/website/docs/cdktf/typescript/r/amplify_backend_environment.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Amplify" +layout: "aws" +page_title: "AWS: aws_amplify_backend_environment" +description: |- + Provides an Amplify Backend Environment resource. +--- + + + +# Resource: aws_amplify_backend_environment + +Provides an Amplify Backend Environment resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmplifyApp } from "./.gen/providers/aws/amplify-app"; +import { AmplifyBackendEnvironment } from "./.gen/providers/aws/amplify-backend-environment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AmplifyApp(this, "example", { + name: "example", + }); + const awsAmplifyBackendEnvironmentExample = new AmplifyBackendEnvironment( + this, + "example_1", + { + appId: example.id, + deploymentArtifacts: "app-example-deployment", + environmentName: "example", + stackName: "amplify-app-example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAmplifyBackendEnvironmentExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `appId` - (Required) Unique ID for an Amplify app. +* `environmentName` - (Required) Name for the backend environment. +* `deploymentArtifacts` - (Optional) Name of deployment artifacts. +* `stackName` - (Optional) AWS CloudFormation stack name of a backend environment. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN for a backend environment that is part of an Amplify app. +* `id` - Unique ID of the Amplify backend environment. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amplify backend environment using `appId` and `environmentName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amplify backend environment using `appId` and `environmentName`. For example: + +```console +% terraform import aws_amplify_backend_environment.example d2ypk4k47z8u6/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/amplify_branch.html.markdown b/website/docs/cdktf/typescript/r/amplify_branch.html.markdown new file mode 100644 index 00000000000..b672a047554 --- /dev/null +++ b/website/docs/cdktf/typescript/r/amplify_branch.html.markdown @@ -0,0 +1,267 @@ +--- +subcategory: "Amplify" +layout: "aws" +page_title: "AWS: aws_amplify_branch" +description: |- + Provides an Amplify Branch resource. +--- + + + +# Resource: aws_amplify_branch + +Provides an Amplify Branch resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmplifyApp } from "./.gen/providers/aws/amplify-app"; +import { AmplifyBranch } from "./.gen/providers/aws/amplify-branch"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AmplifyApp(this, "example", { + name: "app", + }); + new AmplifyBranch(this, "master", { + appId: example.id, + branchName: "master", + environmentVariables: { + REACT_APP_API_SERVER: "https://api.example.com", + }, + framework: "React", + stage: "PRODUCTION", + }); + } +} + +``` + +### Basic Authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmplifyApp } from "./.gen/providers/aws/amplify-app"; +import { AmplifyBranch } from "./.gen/providers/aws/amplify-branch"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AmplifyApp(this, "example", { + name: "app", + }); + new AmplifyBranch(this, "master", { + appId: example.id, + basicAuthCredentials: Token.asString( + Fn.base64encode("username:password") + ), + branchName: "master", + enableBasicAuth: true, + }); + } +} + +``` + +### Notifications + +Amplify Console uses EventBridge (formerly known as CloudWatch Events) and SNS for email notifications. To implement the same functionality, you need to set `enableNotification` in a `awsAmplifyBranch` resource, as well as creating an EventBridge Rule, an SNS topic, and SNS subscriptions. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmplifyApp } from "./.gen/providers/aws/amplify-app"; +import { AmplifyBranch } from "./.gen/providers/aws/amplify-branch"; +import { CloudwatchEventRule } from "./.gen/providers/aws/cloudwatch-event-rule"; +import { CloudwatchEventTarget } from "./.gen/providers/aws/cloudwatch-event-target"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +import { SnsTopicPolicy } from "./.gen/providers/aws/sns-topic-policy"; +import { SnsTopicSubscription } from "./.gen/providers/aws/sns-topic-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AmplifyApp(this, "example", { + name: "app", + }); + const master = new AmplifyBranch(this, "master", { + appId: example.id, + branchName: "master", + enableNotification: true, + }); + const amplifyAppMaster = new CloudwatchEventRule( + this, + "amplify_app_master", + { + description: + "AWS Amplify build notifications for : App: ${" + + app.id + + "} Branch: ${" + + master.branchName + + "}", + eventPattern: Token.asString( + Fn.jsonencode({ + detail: { + appId: [example.id], + branchName: [master.branchName], + jobStatus: ["SUCCEED", "FAILED", "STARTED"], + }, + "detail-type": ["Amplify Deployment Status Change"], + source: ["aws.amplify"], + }) + ), + name: + "amplify-${" + + app.id + + "}-${" + + master.branchName + + "}-branch-notification", + } + ); + const awsSnsTopicAmplifyAppMaster = new SnsTopic( + this, + "amplify_app_master_3", + { + name: "amplify-${" + app.id + "}_${" + master.branchName + "}", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicAmplifyAppMaster.overrideLogicalId("amplify_app_master"); + new SnsTopicSubscription(this, "this", { + endpoint: "user@acme.com", + protocol: "email", + topicArn: Token.asString(awsSnsTopicAmplifyAppMaster.arn), + }); + const dataAwsIamPolicyDocumentAmplifyAppMaster = + new DataAwsIamPolicyDocument(this, "amplify_app_master_5", { + statement: [ + { + actions: ["SNS:Publish"], + effect: "Allow", + principals: [ + { + identifiers: ["events.amazonaws.com"], + type: "Service", + }, + ], + resources: [Token.asString(awsSnsTopicAmplifyAppMaster.arn)], + sid: "Allow_Publish_Events ${" + master.arn + "}", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentAmplifyAppMaster.overrideLogicalId( + "amplify_app_master" + ); + const awsCloudwatchEventTargetAmplifyAppMaster = new CloudwatchEventTarget( + this, + "amplify_app_master_6", + { + arn: Token.asString(awsSnsTopicAmplifyAppMaster.arn), + inputTransformer: { + inputPaths: { + appId: "$.detail.appId", + branch: "$.detail.branchName", + jobId: "$.detail.jobId", + region: "$.region", + status: "$.detail.jobStatus", + }, + inputTemplate: + '\\"Build notification from the AWS Amplify Console for app: https://..amplifyapp.com/. Your build status is . Go to https://console.aws.amazon.com/amplify/home?region=#// to view details on your build. \\"', + }, + rule: amplifyAppMaster.name, + targetId: master.branchName, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventTargetAmplifyAppMaster.overrideLogicalId( + "amplify_app_master" + ); + const awsSnsTopicPolicyAmplifyAppMaster = new SnsTopicPolicy( + this, + "amplify_app_master_7", + { + arn: Token.asString(awsSnsTopicAmplifyAppMaster.arn), + policy: Token.asString(dataAwsIamPolicyDocumentAmplifyAppMaster.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicPolicyAmplifyAppMaster.overrideLogicalId("amplify_app_master"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `appId` - (Required) Unique ID for an Amplify app. +* `branchName` - (Required) Name for the branch. +* `backendEnvironmentArn` - (Optional) ARN for a backend environment that is part of an Amplify app. +* `basicAuthCredentials` - (Optional) Basic authorization credentials for the branch. +* `description` - (Optional) Description for the branch. +* `displayName` - (Optional) Display name for a branch. This is used as the default domain prefix. +* `enableAutoBuild` - (Optional) Enables auto building for the branch. +* `enableBasicAuth` - (Optional) Enables basic authorization for the branch. +* `enableNotification` - (Optional) Enables notifications for the branch. +* `enablePerformanceMode` - (Optional) Enables performance mode for the branch. +* `enablePullRequestPreview` - (Optional) Enables pull request previews for this branch. +* `environmentVariables` - (Optional) Environment variables for the branch. +* `framework` - (Optional) Framework for the branch. +* `pullRequestEnvironmentName` - (Optional) Amplify environment name for the pull request. +* `stage` - (Optional) Describes the current stage for the branch. Valid values: `production`, `beta`, `development`, `experimental`, `pullRequest`. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `ttl` - (Optional) Content Time To Live (TTL) for the website in seconds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the branch. +* `associatedResources` - A list of custom resources that are linked to this branch. +* `customDomains` - Custom domains for the branch. +* `destinationBranch` - Destination branch if the branch is a pull request branch. +* `sourceBranch` - Source branch if the branch is a pull request branch. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amplify branch using `appId` and `branchName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amplify branch using `appId` and `branchName`. For example: + +```console +% terraform import aws_amplify_branch.master d2ypk4k47z8u6/master +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/amplify_domain_association.html.markdown b/website/docs/cdktf/typescript/r/amplify_domain_association.html.markdown new file mode 100644 index 00000000000..f41413efa9b --- /dev/null +++ b/website/docs/cdktf/typescript/r/amplify_domain_association.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "Amplify" +layout: "aws" +page_title: "AWS: aws_amplify_domain_association" +description: |- + Provides an Amplify Domain Association resource. +--- + + + +# Resource: aws_amplify_domain_association + +Provides an Amplify Domain Association resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmplifyApp } from "./.gen/providers/aws/amplify-app"; +import { AmplifyBranch } from "./.gen/providers/aws/amplify-branch"; +import { AmplifyDomainAssociation } from "./.gen/providers/aws/amplify-domain-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AmplifyApp(this, "example", { + customRule: [ + { + source: "https://example.com", + status: "302", + target: "https://www.example.com", + }, + ], + name: "app", + }); + const master = new AmplifyBranch(this, "master", { + appId: example.id, + branchName: "master", + }); + const awsAmplifyDomainAssociationExample = new AmplifyDomainAssociation( + this, + "example_2", + { + appId: example.id, + domainName: "example.com", + subDomain: [ + { + branchName: master.branchName, + prefix: "", + }, + { + branchName: master.branchName, + prefix: "www", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAmplifyDomainAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `appId` - (Required) Unique ID for an Amplify app. +* `domainName` - (Required) Domain name for the domain association. +* `enableAutoSubDomain` - (Optional) Enables the automated creation of subdomains for branches. +* `subDomain` - (Required) Setting for the subdomain. Documented below. +* `waitForVerification` - (Optional) If enabled, the resource will wait for the domain association status to change to `pendingDeployment` or `available`. Setting this to `false` will skip the process. Default: `true`. + +The `subDomain` configuration block supports the following arguments: + +* `branchName` - (Required) Branch name setting for the subdomain. +* `prefix` - (Required) Prefix setting for the subdomain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the domain association. +* `certificateVerificationDnsRecord` - The DNS record for certificate verification. + +The `subDomain` configuration block exports the following attributes: + +* `dnsRecord` - DNS record for the subdomain. +* `verified` - Verified status of the subdomain. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amplify domain association using `appId` and `domainName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amplify domain association using `appId` and `domainName`. For example: + +```console +% terraform import aws_amplify_domain_association.app d2ypk4k47z8u6/example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/amplify_webhook.html.markdown b/website/docs/cdktf/typescript/r/amplify_webhook.html.markdown new file mode 100644 index 00000000000..35231f2eff6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/amplify_webhook.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Amplify" +layout: "aws" +page_title: "AWS: aws_amplify_webhook" +description: |- + Provides an Amplify Webhook resource. +--- + + + +# Resource: aws_amplify_webhook + +Provides an Amplify Webhook resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AmplifyApp } from "./.gen/providers/aws/amplify-app"; +import { AmplifyBranch } from "./.gen/providers/aws/amplify-branch"; +import { AmplifyWebhook } from "./.gen/providers/aws/amplify-webhook"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AmplifyApp(this, "example", { + name: "app", + }); + const master = new AmplifyBranch(this, "master", { + appId: example.id, + branchName: "master", + }); + const awsAmplifyWebhookMaster = new AmplifyWebhook(this, "master_2", { + appId: example.id, + branchName: master.branchName, + description: "triggermaster", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAmplifyWebhookMaster.overrideLogicalId("master"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `appId` - (Required) Unique ID for an Amplify app. +* `branchName` - (Required) Name for a branch that is part of the Amplify app. +* `description` - (Optional) Description for a webhook. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the webhook. +* `url` - URL of the webhook. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amplify webhook using a webhook ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amplify webhook using a webhook ID. For example: + +```console +% terraform import aws_amplify_webhook.master a26b22a0-748b-4b57-b9a0-ae7e601fe4b1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_account.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_account.html.markdown new file mode 100644 index 00000000000..69bd98a6c74 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_account.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_account" +description: |- + Provides a settings of an API Gateway Account. +--- + + + +# Resource: aws_api_gateway_account + +Provides a settings of an API Gateway Account. Settings is applied region-wide per `provider` block. + +-> **Note:** As there is no API method for deleting account settings or resetting it to defaults, destroying this resource will keep your account settings intact + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayAccount } from "./.gen/providers/aws/api-gateway-account"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["apigateway.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const cloudwatch = new DataAwsIamPolicyDocument(this, "cloudwatch", { + statement: [ + { + actions: [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:DescribeLogGroups", + "logs:DescribeLogStreams", + "logs:PutLogEvents", + "logs:GetLogEvents", + "logs:FilterLogEvents", + ], + effect: "Allow", + resources: ["*"], + }, + ], + }); + const awsIamRoleCloudwatch = new IamRole(this, "cloudwatch_2", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "api_gateway_cloudwatch_global", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleCloudwatch.overrideLogicalId("cloudwatch"); + const awsIamRolePolicyCloudwatch = new IamRolePolicy(this, "cloudwatch_3", { + name: "default", + policy: Token.asString(cloudwatch.json), + role: Token.asString(awsIamRoleCloudwatch.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyCloudwatch.overrideLogicalId("cloudwatch"); + new ApiGatewayAccount(this, "demo", { + cloudwatchRoleArn: Token.asString(awsIamRoleCloudwatch.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cloudwatchRoleArn` - (Optional) ARN of an IAM role for CloudWatch (to allow logging & monitoring). See more [in AWS Docs](https://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-stage-settings.html#how-to-stage-settings-console). Logging & monitoring can be enabled/disabled and otherwise tuned on the API Gateway Stage level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `throttleSettings` - Account-Level throttle settings. See exported fields below. + +`throttleSettings` block exports the following: + +* `burstLimit` - Absolute maximum number of times API Gateway allows the API to be called per second (RPS). +* `rateLimit` - Number of times API Gateway allows the API to be called per second on average (RPS). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway Accounts using the word `apiGatewayAccount`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import API Gateway Accounts using the word `apiGatewayAccount`. For example: + +```console +% terraform import aws_api_gateway_account.demo api-gateway-account +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_api_key.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_api_key.html.markdown new file mode 100644 index 00000000000..22c009d3bed --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_api_key.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_api_key" +description: |- + Provides an API Gateway API Key. +--- + + + +# Resource: aws_api_gateway_api_key + +Provides an API Gateway API Key. + +~> **NOTE:** Since the API Gateway usage plans feature was launched on August 11, 2016, usage plans are now **required** to associate an API key with an API stage. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayApiKey } from "./.gen/providers/aws/api-gateway-api-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApiGatewayApiKey(this, "MyDemoApiKey", { + name: "demo", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the API key. +* `description` - (Optional) API key description. Defaults to "Managed by Terraform". +* `enabled` - (Optional) Whether the API key can be used by callers. Defaults to `true`. +* `value` - (Optional) Value of the API key. If specified, the value must be an alphanumeric string between 20 and 128 characters. If not specified, it will be automatically generated by AWS on creation. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the API key +* `createdDate` - Creation date of the API key +* `lastUpdatedDate` - Last update date of the API key +* `value` - Value of the API key +* `arn` - ARN +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway Keys using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import API Gateway Keys using the `id`. For example: + +```console +% terraform import aws_api_gateway_api_key.my_demo_key 8bklk8bl1k3sB38D9B3l0enyWT8c09B30lkq0blk +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_authorizer.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_authorizer.html.markdown new file mode 100644 index 00000000000..9a3f2f54948 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_authorizer.html.markdown @@ -0,0 +1,176 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_authorizer" +description: |- + Provides an API Gateway Authorizer. +--- + + + +# Resource: aws_api_gateway_authorizer + +Provides an API Gateway Authorizer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayAuthorizer } from "./.gen/providers/aws/api-gateway-authorizer"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { LambdaFunction } from "./.gen/providers/aws/lambda-function"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const demo = new ApiGatewayRestApi(this, "demo", { + name: "auth-demo", + }); + const invocationAssumeRole = new DataAwsIamPolicyDocument( + this, + "invocation_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["apigateway.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const lambdaAssumeRole = new DataAwsIamPolicyDocument( + this, + "lambda_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["lambda.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const invocationRole = new IamRole(this, "invocation_role", { + assumeRolePolicy: Token.asString(invocationAssumeRole.json), + name: "api_gateway_auth_invocation", + path: "/", + }); + const lambda = new IamRole(this, "lambda", { + assumeRolePolicy: Token.asString(lambdaAssumeRole.json), + name: "demo-lambda", + }); + const authorizer = new LambdaFunction(this, "authorizer", { + filename: "lambda-function.zip", + functionName: "api_gateway_authorizer", + handler: "exports.example", + role: lambda.arn, + sourceCodeHash: Token.asString( + Fn.filebase64sha256("lambda-function.zip") + ), + }); + const invocationPolicy = new DataAwsIamPolicyDocument( + this, + "invocation_policy", + { + statement: [ + { + actions: ["lambda:InvokeFunction"], + effect: "Allow", + resources: [authorizer.arn], + }, + ], + } + ); + const awsApiGatewayAuthorizerDemo = new ApiGatewayAuthorizer( + this, + "demo_7", + { + authorizerCredentials: invocationRole.arn, + authorizerUri: authorizer.invokeArn, + name: "demo", + restApiId: demo.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayAuthorizerDemo.overrideLogicalId("demo"); + const awsIamRolePolicyInvocationPolicy = new IamRolePolicy( + this, + "invocation_policy_8", + { + name: "default", + policy: Token.asString(invocationPolicy.json), + role: invocationRole.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyInvocationPolicy.overrideLogicalId("invocation_policy"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `authorizerUri` - (Optional, required for type `token`/`request`) Authorizer's Uniform Resource Identifier (URI). This must be a well-formed Lambda function URI in the form of `arn:aws:apigateway:{region}:lambda:path/{serviceApi}`, + e.g., `arn:aws:apigateway:usWest2:lambda:path/20150331/functions/arn:aws:lambda:usWest2:012345678912:function:myFunction/invocations` +* `name` - (Required) Name of the authorizer +* `restApiId` - (Required) ID of the associated REST API +* `identitySource` - (Optional) Source of the identity in an incoming request. Defaults to `methodRequestHeaderAuthorization`. For `request` type, this may be a comma-separated list of values, including headers, query string parameters and stage variables - e.g., `"methodRequestHeaderSomeHeaderName,methodRequestQuerystringSomeQueryStringName,stageVariablesSomeStageVariableName"` +* `type` - (Optional) Type of the authorizer. Possible values are `token` for a Lambda function using a single authorization token submitted in a custom header, `request` for a Lambda function using incoming request parameters, or `cognitoUserPools` for using an Amazon Cognito user pool. Defaults to `token`. +* `authorizerCredentials` - (Optional) Credentials required for the authorizer. To specify an IAM Role for API Gateway to assume, use the IAM Role ARN. +* `authorizerResultTtlInSeconds` - (Optional) TTL of cached authorizer results in seconds. Defaults to `300`. +* `identityValidationExpression` - (Optional) Validation expression for the incoming identity. For `token` type, this value should be a regular expression. The incoming token from the client is matched against this expression, and will proceed if the token matches. If the token doesn't match, the client receives a 401 Unauthorized response. +* `providerArns` - (Optional, required for type `cognitoUserPools`) List of the Amazon Cognito user pool ARNs. Each element is of this format: `arn:aws:cognitoIdp:{region}:{accountId}:userpool/{userPoolId}`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the API Gateway Authorizer +* `id` - Authorizer identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS API Gateway Authorizer using the `restApiId/authorizerId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS API Gateway Authorizer using the `restApiId/authorizerId`. For example: + +```console +% terraform import aws_api_gateway_authorizer.authorizer 12345abcde/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_base_path_mapping.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_base_path_mapping.html.markdown new file mode 100644 index 00000000000..f72722557c3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_base_path_mapping.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_base_path_mapping" +description: |- + Connects a custom domain with a deployed API +--- + + + +# Resource: aws_api_gateway_base_path_mapping + +Connects a custom domain name registered via `awsApiGatewayDomainName` +with a deployed API so that its methods can be called via the +custom domain name. + +## Example Usage + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/apiGatewayRestApiOpenapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayBasePathMapping } from "./.gen/providers/aws/api-gateway-base-path-mapping"; +import { ApiGatewayDomainName } from "./.gen/providers/aws/api-gateway-domain-name"; +import { ApiGatewayStage } from "./.gen/providers/aws/api-gateway-stage"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ApiGatewayDomainName(this, "example", { + certificateBody: Token.asString( + Fn.file("${path.module}/example.com/example.crt") + ), + certificateChain: Token.asString( + Fn.file("${path.module}/example.com/ca.crt") + ), + certificateName: "example-api", + certificatePrivateKey: Token.asString( + Fn.file("${path.module}/example.com/example.key") + ), + domainName: "example.com", + }); + const awsApiGatewayStageExample = new ApiGatewayStage(this, "example_1", { + deploymentId: Token.asString(awsApiGatewayDeploymentExample.id), + restApiId: Token.asString(awsApiGatewayRestApiExample.id), + stageName: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayStageExample.overrideLogicalId("example"); + const awsApiGatewayBasePathMappingExample = new ApiGatewayBasePathMapping( + this, + "example_2", + { + apiId: Token.asString(awsApiGatewayRestApiExample.id), + domainName: example.domainName, + stageName: Token.asString(awsApiGatewayStageExample.stageName), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayBasePathMappingExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domainName` - (Required) Already-registered domain name to connect the API to. +* `apiId` - (Required) ID of the API to connect. +* `stageName` - (Optional) Name of a specific deployment stage to expose at the given path. If omitted, callers may select any stage by including its name as a path element after the base path. +* `basePath` - (Optional) Path segment that must be prepended to the path when accessing the API via this mapping. If omitted, the API is exposed at the root of the given domain. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayBasePathMapping` using the domain name and base path. For example: + +For an empty `basePath` or, in other words, a root path (`/`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +For a non-root `basePath`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayBasePathMapping` using the domain name and base path. For example: + +For an empty `basePath` or, in other words, a root path (`/`): + +```console +% terraform import aws_api_gateway_base_path_mapping.example example.com/ +``` + +For a non-root `basePath`: + +```console +% terraform import aws_api_gateway_base_path_mapping.example example.com/base-path +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_client_certificate.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_client_certificate.html.markdown new file mode 100644 index 00000000000..1fe25587d25 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_client_certificate.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_client_certificate" +description: |- + Provides an API Gateway Client Certificate. +--- + + + +# Resource: aws_api_gateway_client_certificate + +Provides an API Gateway Client Certificate. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayClientCertificate } from "./.gen/providers/aws/api-gateway-client-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApiGatewayClientCertificate(this, "demo", { + description: "My client certificate", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the client certificate. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the client certificate. +* `createdDate` - Date when the client certificate was created. +* `expirationDate` - Date when the client certificate will expire. +* `pemEncodedCertificate` - The PEM-encoded public key of the client certificate. +* `arn` - ARN +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway Client Certificates using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import API Gateway Client Certificates using the id. For example: + +```console +% terraform import aws_api_gateway_client_certificate.demo ab1cqe +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_deployment.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_deployment.html.markdown new file mode 100644 index 00000000000..b140b615a23 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_deployment.html.markdown @@ -0,0 +1,213 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_deployment" +description: |- + Manages an API Gateway REST Deployment. +--- + + + +# Resource: aws_api_gateway_deployment + +Manages an API Gateway REST Deployment. A deployment is a snapshot of the REST API configuration. The deployment can then be published to callable endpoints via the [`awsApiGatewayStage` resource](api_gateway_stage.html) and optionally managed further with the [`awsApiGatewayBasePathMapping` resource](api_gateway_base_path_mapping.html), [`awsApiGatewayDomainName` resource](api_gateway_domain_name.html), and [`awsApiMethodSettings` resource](api_gateway_method_settings.html). For more information, see the [API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-deploy-api.html). + +To properly capture all REST API configuration in a deployment, this resource must have dependencies on all prior Terraform resources that manage resources/paths, methods, integrations, etc. + +* For REST APIs that are configured via OpenAPI specification ([`awsApiGatewayRestApi` resource](api_gateway_rest_api.html) `body` argument), no special dependency setup is needed beyond referencing the `id` attribute of that resource unless additional Terraform resources have further customized the REST API. +* When the REST API configuration involves other Terraform resources ([`awsApiGatewayIntegration` resource](api_gateway_integration.html), etc.), the dependency setup can be done with implicit resource references in the `triggers` argument or explicit resource references using the [resource `dependsOn` meta-argument](https://www.terraform.io/docs/configuration/meta-arguments/depends_on.html). The `triggers` argument should be preferred over `dependsOn`, since `dependsOn` can only capture dependency ordering and will not cause the resource to recreate (redeploy the REST API) with upstream configuration changes. + +!> **WARNING:** We recommend using the [`awsApiGatewayStage` resource](api_gateway_stage.html) instead of managing an API Gateway Stage via the `stageName` argument of this resource. When this resource is recreated (REST API redeployment) with the `stageName` configured, the stage is deleted and recreated. This will cause a temporary service interruption, increase Terraform plan differences, and can require a second Terraform apply to recreate any downstream stage configuration such as associated `awsApiMethodSettings` resources. + +~> **NOTE:** Enable the [resource `lifecycle` configuration block `createBeforeDestroy` argument](https://www.terraform.io/language/meta-arguments/lifecycle#create_before_destroy) in this resource configuration to properly order redeployments in Terraform. Without enabling `createBeforeDestroy`, API Gateway can return errors such as `BadRequestException: Active stages pointing to this deployment must be moved or deleted` on recreation. + +## Example Usage + +### OpenAPI Specification + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/apiGatewayRestApiOpenapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} +``` + +### Terraform Resources + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayDeployment } from "./.gen/providers/aws/api-gateway-deployment"; +import { ApiGatewayIntegration } from "./.gen/providers/aws/api-gateway-integration"; +import { ApiGatewayMethod } from "./.gen/providers/aws/api-gateway-method"; +import { ApiGatewayResource } from "./.gen/providers/aws/api-gateway-resource"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +import { ApiGatewayStage } from "./.gen/providers/aws/api-gateway-stage"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ApiGatewayRestApi(this, "example", { + name: "example", + }); + const awsApiGatewayResourceExample = new ApiGatewayResource( + this, + "example_1", + { + parentId: example.rootResourceId, + pathPart: "example", + restApiId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayResourceExample.overrideLogicalId("example"); + const awsApiGatewayMethodExample = new ApiGatewayMethod(this, "example_2", { + authorization: "NONE", + httpMethod: "GET", + resourceId: Token.asString(awsApiGatewayResourceExample.id), + restApiId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayMethodExample.overrideLogicalId("example"); + const awsApiGatewayIntegrationExample = new ApiGatewayIntegration( + this, + "example_3", + { + httpMethod: Token.asString(awsApiGatewayMethodExample.httpMethod), + resourceId: Token.asString(awsApiGatewayResourceExample.id), + restApiId: example.id, + type: "MOCK", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayIntegrationExample.overrideLogicalId("example"); + const awsApiGatewayDeploymentExample = new ApiGatewayDeployment( + this, + "example_4", + { + lifecycle: { + createBeforeDestroy: true, + }, + restApiId: example.id, + triggers: { + redeployment: Token.asString( + Fn.sha1( + Token.asString( + Fn.jsonencode([ + awsApiGatewayResourceExample.id, + awsApiGatewayMethodExample.id, + awsApiGatewayIntegrationExample.id, + ]) + ) + ) + ), + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayDeploymentExample.overrideLogicalId("example"); + const awsApiGatewayStageExample = new ApiGatewayStage(this, "example_5", { + deploymentId: Token.asString(awsApiGatewayDeploymentExample.id), + restApiId: example.id, + stageName: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayStageExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `restApiId` - (Required) REST API identifier. +* `description` - (Optional) Description of the deployment +* `stageName` - (Optional) Name of the stage to create with this deployment. If the specified stage already exists, it will be updated to point to the new deployment. We recommend using the [`awsApiGatewayStage` resource](api_gateway_stage.html) instead to manage stages. +* `stageDescription` - (Optional) Description to set on the stage managed by the `stageName` argument. +* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger a redeployment. To force a redeployment without changing these keys/values, use the [`replace` option](https://developer.hashicorp.com/terraform/cli/commands/plan#replace-address) with `terraform plan` or `terraform apply`. +* `variables` - (Optional) Map to set on the stage managed by the `stageName` argument. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the deployment +* `invokeUrl` - URL to invoke the API pointing to the stage, + e.g., `https://z4675Bid1JExecuteApiEuWest2AmazonawsCom/prod` +* `executionArn` - Execution ARN to be used in [`lambdaPermission`](/docs/providers/aws/r/lambda_permission.html)'s `sourceArn` + when allowing API Gateway to invoke a Lambda function, + e.g., `arn:aws:executeApi:euWest2:123456789012:z4675Bid1J/prod` +* `createdDate` - Creation date of the deployment + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayDeployment` using `restApiId/deploymentId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayDeployment` using `restApiId/deploymentId`. For example: + +```console +% terraform import aws_api_gateway_deployment.example aabbccddee/1122334 +``` + +The `stageName`, `stageDescription`, and `variables` arguments cannot be imported. Use the [`awsApiGatewayStage` resource](api_gateway_stage.html) to import and manage stages. + +The `triggers` argument cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_documentation_part.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_documentation_part.html.markdown new file mode 100644 index 00000000000..baa3743842e --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_documentation_part.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_documentation_part" +description: |- + Provides a settings of an API Gateway Documentation Part. +--- + + + +# Resource: aws_api_gateway_documentation_part + +Provides a settings of an API Gateway Documentation Part. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayDocumentationPart } from "./.gen/providers/aws/api-gateway-documentation-part"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ApiGatewayRestApi(this, "example", { + name: "example_api", + }); + const awsApiGatewayDocumentationPartExample = + new ApiGatewayDocumentationPart(this, "example_1", { + location: { + method: "GET", + path: "/example", + type: "METHOD", + }, + properties: '{\\"description\\":\\"Example description\\"}', + restApiId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayDocumentationPartExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `location` - (Required) Location of the targeted API entity of the to-be-created documentation part. See below. +* `properties` - (Required) Content map of API-specific key-value pairs describing the targeted API entity. The map must be encoded as a JSON string, e.g., "{ \"description\": \"The API does ...\" }". Only Swagger-compliant key-value pairs can be exported and, hence, published. +* `restApiId` - (Required) ID of the associated Rest API + +### Nested fields + +#### `location` + +See supported entity types for each field in the [official docs](https://docs.aws.amazon.com/apigateway/api-reference/resource/documentation-part/). + +* `method` - (Optional) HTTP verb of a method. The default value is `*` for any method. +* `name` - (Optional) Name of the targeted API entity. +* `path` - (Optional) URL path of the target. The default value is `/` for the root resource. +* `statusCode` - (Optional) HTTP status code of a response. The default value is `*` for any status code. +* `type` - (Required) Type of API entity to which the documentation content appliesE.g., `api`, `method` or `requestBody` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the Documentation Part + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway documentation_parts using `restApiId/docPartId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import API Gateway documentation_parts using `restApiId/docPartId`. For example: + +```console +% terraform import aws_api_gateway_documentation_part.example 5i4e1ko720/3oyy3t +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_documentation_version.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_documentation_version.html.markdown new file mode 100644 index 00000000000..4a6c18d2a71 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_documentation_version.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_documentation_version" +description: |- + Provides a resource to manage an API Gateway Documentation Version. +--- + + + +# Resource: aws_api_gateway_documentation_version + +Provides a resource to manage an API Gateway Documentation Version. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayDocumentationPart } from "./.gen/providers/aws/api-gateway-documentation-part"; +import { ApiGatewayDocumentationVersion } from "./.gen/providers/aws/api-gateway-documentation-version"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ApiGatewayRestApi(this, "example", { + name: "example_api", + }); + const awsApiGatewayDocumentationPartExample = + new ApiGatewayDocumentationPart(this, "example_1", { + location: { + type: "API", + }, + properties: '{\\"description\\":\\"Example\\"}', + restApiId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayDocumentationPartExample.overrideLogicalId("example"); + const awsApiGatewayDocumentationVersionExample = + new ApiGatewayDocumentationVersion(this, "example_2", { + dependsOn: [awsApiGatewayDocumentationPartExample], + description: "Example description", + restApiId: example.id, + version: "example_version", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayDocumentationVersionExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `version` - (Required) Version identifier of the API documentation snapshot. +* `restApiId` - (Required) ID of the associated Rest API +* `description` - (Optional) Description of the API documentation version. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway documentation versions using `restApiId/version`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import API Gateway documentation versions using `restApiId/version`. For example: + +```console +% terraform import aws_api_gateway_documentation_version.example 5i4e1ko720/example-version +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_domain_name.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_domain_name.html.markdown new file mode 100644 index 00000000000..9863f8f82cc --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_domain_name.html.markdown @@ -0,0 +1,287 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_domain_name" +description: |- + Registers a custom domain name for use with AWS API Gateway. +--- + + + +# Resource: aws_api_gateway_domain_name + +Registers a custom domain name for use with AWS API Gateway. Additional information about this functionality +can be found in the [API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-custom-domains.html). + +This resource just establishes ownership of and the TLS settings for +a particular domain name. An API can be attached to a particular path +under the registered domain name using +[the `awsApiGatewayBasePathMapping` resource](api_gateway_base_path_mapping.html). + +API Gateway domains can be defined as either 'edge-optimized' or 'regional'. In an edge-optimized configuration, +API Gateway internally creates and manages a CloudFront distribution to route requests on the given hostname. In +addition to this resource it's necessary to create a DNS record corresponding to the given domain name which is an alias +(either Route53 alias or traditional CNAME) to the Cloudfront domain name exported in the `cloudfrontDomainName` +attribute. + +In a regional configuration, API Gateway does not create a CloudFront distribution to route requests to the API, though +a distribution can be created if needed. In either case, it is necessary to create a DNS record corresponding to the +given domain name which is an alias (either Route53 alias or traditional CNAME) to the regional domain name exported in +the `regionalDomainName` attribute. + +~> **Note:** API Gateway requires the use of AWS Certificate Manager (ACM) certificates instead of Identity and Access Management (IAM) certificates in regions that support ACM. Regions that support ACM can be found in the [Regions and Endpoints Documentation](https://docs.aws.amazon.com/general/latest/gr/rande.html#acm_region). To import an existing private key and certificate into ACM or request an ACM certificate, see the [`awsAcmCertificate` resource](/docs/providers/aws/r/acm_certificate.html). + +~> **Note:** The `awsApiGatewayDomainName` resource expects dependency on the `awsAcmCertificateValidation` as +only verified certificates can be used. This can be made either explicitly by adding the +`depends_on = [aws_acm_certificate_validation.cert]` attribute. Or implicitly by referring certificate ARN +from the validation resource where it will be available after the resource creation: +`regional_certificate_arn = aws_acm_certificate_validation.cert.certificate_arn`. + +~> **Note:** All arguments including the private key will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/apiGatewayRestApiOpenapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +### Edge Optimized (ACM Certificate) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayDomainName } from "./.gen/providers/aws/api-gateway-domain-name"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ApiGatewayDomainName(this, "example", { + certificateArn: Token.asString( + awsAcmCertificateValidationExample.certificateArn + ), + domainName: "api.example.com", + }); + const awsRoute53RecordExample = new Route53Record(this, "example_1", { + alias: { + evaluateTargetHealth: true, + name: example.cloudfrontDomainName, + zoneId: example.cloudfrontZoneId, + }, + name: example.domainName, + type: "A", + zoneId: Token.asString(awsRoute53ZoneExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53RecordExample.overrideLogicalId("example"); + } +} + +``` + +### Edge Optimized (IAM Certificate) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayDomainName } from "./.gen/providers/aws/api-gateway-domain-name"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ApiGatewayDomainName(this, "example", { + certificateBody: Token.asString( + Fn.file("${path.module}/example.com/example.crt") + ), + certificateChain: Token.asString( + Fn.file("${path.module}/example.com/ca.crt") + ), + certificateName: "example-api", + certificatePrivateKey: Token.asString( + Fn.file("${path.module}/example.com/example.key") + ), + domainName: "api.example.com", + }); + const awsRoute53RecordExample = new Route53Record(this, "example_1", { + alias: { + evaluateTargetHealth: true, + name: example.cloudfrontDomainName, + zoneId: example.cloudfrontZoneId, + }, + name: example.domainName, + type: "A", + zoneId: Token.asString(awsRoute53ZoneExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53RecordExample.overrideLogicalId("example"); + } +} + +``` + +### Regional (ACM Certificate) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayDomainName } from "./.gen/providers/aws/api-gateway-domain-name"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ApiGatewayDomainName(this, "example", { + domainName: "api.example.com", + endpointConfiguration: { + types: ["REGIONAL"], + }, + regionalCertificateArn: Token.asString( + awsAcmCertificateValidationExample.certificateArn + ), + }); + const awsRoute53RecordExample = new Route53Record(this, "example_1", { + alias: { + evaluateTargetHealth: true, + name: example.regionalDomainName, + zoneId: example.regionalZoneId, + }, + name: example.domainName, + type: "A", + zoneId: Token.asString(awsRoute53ZoneExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53RecordExample.overrideLogicalId("example"); + } +} + +``` + +### Regional (IAM Certificate) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayDomainName } from "./.gen/providers/aws/api-gateway-domain-name"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ApiGatewayDomainName(this, "example", { + certificateBody: Token.asString( + Fn.file("${path.module}/example.com/example.crt") + ), + certificateChain: Token.asString( + Fn.file("${path.module}/example.com/ca.crt") + ), + certificatePrivateKey: Token.asString( + Fn.file("${path.module}/example.com/example.key") + ), + domainName: "api.example.com", + endpointConfiguration: { + types: ["REGIONAL"], + }, + regionalCertificateName: "example-api", + }); + const awsRoute53RecordExample = new Route53Record(this, "example_1", { + alias: { + evaluateTargetHealth: true, + name: example.regionalDomainName, + zoneId: example.regionalZoneId, + }, + name: example.domainName, + type: "A", + zoneId: Token.asString(awsRoute53ZoneExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53RecordExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domainName` - (Required) Fully-qualified domain name to register. +* `endpointConfiguration` - (Optional) Configuration block defining API endpoint information including type. See below. +* `mutualTlsAuthentication` - (Optional) Mutual TLS authentication configuration for the domain name. See below. +* `ownershipVerificationCertificateArn` - (Optional) ARN of the AWS-issued certificate used to validate custom domain ownership (when `certificateArn` is issued via an ACM Private CA or `mutualTlsAuthentication` is configured with an ACM-imported certificate.) +* `securityPolicy` - (Optional) Transport Layer Security (TLS) version + cipher suite for this DomainName. Valid values are `tls10` and `tls12`. Must be configured to perform drift detection. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +When referencing an AWS-managed certificate, the following arguments are supported: + +* `certificateArn` - (Optional) ARN for an AWS-managed certificate. AWS Certificate Manager is the only supported source. Used when an edge-optimized domain name is desired. Conflicts with `certificateName`, `certificateBody`, `certificateChain`, `certificatePrivateKey`, `regionalCertificateArn`, and `regionalCertificateName`. +* `regionalCertificateArn` - (Optional) ARN for an AWS-managed certificate. AWS Certificate Manager is the only supported source. Used when a regional domain name is desired. Conflicts with `certificateArn`, `certificateName`, `certificateBody`, `certificateChain`, and `certificatePrivateKey`. + +When uploading a certificate, the following arguments are supported: + +* `certificateBody` - (Optional) Certificate issued for the domain name being registered, in PEM format. Only valid for `edge` endpoint configuration type. Conflicts with `certificateArn`, `regionalCertificateArn`, and `regionalCertificateName`. +* `certificateChain` - (Optional) Certificate for the CA that issued the certificate, along with any intermediate CA certificates required to create an unbroken chain to a certificate trusted by the intended API clients. Only valid for `edge` endpoint configuration type. Conflicts with `certificateArn`, `regionalCertificateArn`, and `regionalCertificateName`. +* `certificateName` - (Optional) Unique name to use when registering this certificate as an IAM server certificate. Conflicts with `certificateArn`, `regionalCertificateArn`, and `regionalCertificateName`. Required if `certificateArn` is not set. +* `certificatePrivateKey` - (Optional) Private key associated with the domain certificate given in `certificateBody`. Only valid for `edge` endpoint configuration type. Conflicts with `certificateArn`, `regionalCertificateArn`, and `regionalCertificateName`. +* `regionalCertificateName` - (Optional) User-friendly name of the certificate that will be used by regional endpoint for this domain name. Conflicts with `certificateArn`, `certificateName`, `certificateBody`, `certificateChain`, and `certificatePrivateKey`. + +### endpoint_configuration + +* `types` - (Required) List of endpoint types. This resource currently only supports managing a single value. Valid values: `edge` or `regional`. If unspecified, defaults to `edge`. Must be declared as `regional` in non-Commercial partitions. Refer to the [documentation](https://docs.aws.amazon.com/apigateway/latest/developerguide/create-regional-api.html) for more information on the difference between edge-optimized and regional APIs. + +### mutual_tls_authentication + +* `truststoreUri` - (Required) Amazon S3 URL that specifies the truststore for mutual TLS authentication, for example, `s3://bucketName/keyName`. The truststore can contain certificates from public or private certificate authorities. To update the truststore, upload a new version to S3, and then update your custom domain name to use the new version. +* `truststoreVersion` - (Optional) Version of the S3 object that contains the truststore. To specify a version, you must have versioning enabled for the S3 bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of domain name. +* `certificateUploadDate` - Upload date associated with the domain certificate. +* `cloudfrontDomainName` - Hostname created by Cloudfront to represent the distribution that implements this domain name mapping. +* `cloudfrontZoneId` - For convenience, the hosted zone ID (`z2Fdtndataqyw2`) that can be used to create a Route53 alias record for the distribution. +* `id` - Internal identifier assigned to this domain name by API Gateway. +* `regionalDomainName` - Hostname for the custom domain's regional endpoint. +* `regionalZoneId` - Hosted zone ID that can be used to create a Route53 alias record for the regional endpoint. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway domain names using their `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import API Gateway domain names using their `name`. For example: + +```console +% terraform import aws_api_gateway_domain_name.example dev.example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_gateway_response.markdown b/website/docs/cdktf/typescript/r/api_gateway_gateway_response.markdown new file mode 100644 index 00000000000..c50d0bd1048 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_gateway_response.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_gateway_response" +description: |- + Provides an API Gateway Gateway Response for a REST API Gateway. +--- + + + +# Resource: aws_api_gateway_gateway_response + +Provides an API Gateway Gateway Response for a REST API Gateway. + +## Example Usage + +```terraform +resource "aws_api_gateway_rest_api" "main" { + name = "MyDemoAPI" +} + +resource "aws_api_gateway_gateway_response" "test" { + rest_api_id = aws_api_gateway_rest_api.main.id + status_code = "401" + response_type = "UNAUTHORIZED" + + response_templates = { + "application/json" = "{\"message\":$context.error.messageString}" + } + + response_parameters = { + "gatewayresponse.header.Authorization" = "'Basic'" + } +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `restApiId` - (Required) String identifier of the associated REST API. +* `responseType` - (Required) Response type of the associated GatewayResponse. +* `statusCode` - (Optional) HTTP status code of the Gateway Response. +* `responseTemplates` - (Optional) Map of templates used to transform the response body. +* `responseParameters` - (Optional) Map of parameters (paths, query strings and headers) of the Gateway Response. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayGatewayResponse` using `restApiId/responseType`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayGatewayResponse` using `restApiId/responseType`. For example: + +```console +% terraform import aws_api_gateway_gateway_response.example 12345abcde/UNAUTHORIZED +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_integration.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_integration.html.markdown new file mode 100644 index 00000000000..67cc045a51e --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_integration.html.markdown @@ -0,0 +1,283 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_integration" +description: |- + Provides an HTTP Method Integration for an API Gateway Integration. +--- + + + +# Resource: aws_api_gateway_integration + +Provides an HTTP Method Integration for an API Gateway Integration. + +## Example Usage + +```terraform +resource "aws_api_gateway_rest_api" "MyDemoAPI" { + name = "MyDemoAPI" + description = "This is my API for demonstration purposes" +} + +resource "aws_api_gateway_resource" "MyDemoResource" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + parent_id = aws_api_gateway_rest_api.MyDemoAPI.root_resource_id + path_part = "mydemoresource" +} + +resource "aws_api_gateway_method" "MyDemoMethod" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.MyDemoResource.id + http_method = "GET" + authorization = "NONE" +} + +resource "aws_api_gateway_integration" "MyDemoIntegration" { + rest_api_id = aws_api_gateway_rest_api.MyDemoAPI.id + resource_id = aws_api_gateway_resource.MyDemoResource.id + http_method = aws_api_gateway_method.MyDemoMethod.http_method + type = "MOCK" + cache_key_parameters = ["method.request.path.param"] + cache_namespace = "foobar" + timeout_milliseconds = 29000 + + request_parameters = { + "integration.request.header.X-Authorization" = "'static'" + } + + # Transforms the incoming XML request to JSON + request_templates = { + "application/xml" = < \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_integration_response.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_integration_response.html.markdown new file mode 100644 index 00000000000..45bf873db9c --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_integration_response.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_integration_response" +description: |- + Provides an HTTP Method Integration Response for an API Gateway Resource. +--- + + + +# Resource: aws_api_gateway_integration_response + +Provides an HTTP Method Integration Response for an API Gateway Resource. + +-> **Note:** Depends on having `awsApiGatewayIntegration` inside your rest api. To ensure this +you might need to add an explicit `dependsOn` for clean runs. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayIntegration } from "./.gen/providers/aws/api-gateway-integration"; +import { ApiGatewayIntegrationResponse } from "./.gen/providers/aws/api-gateway-integration-response"; +import { ApiGatewayMethod } from "./.gen/providers/aws/api-gateway-method"; +import { ApiGatewayMethodResponse } from "./.gen/providers/aws/api-gateway-method-response"; +import { ApiGatewayResource } from "./.gen/providers/aws/api-gateway-resource"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const myDemoApi = new ApiGatewayRestApi(this, "MyDemoAPI", { + description: "This is my API for demonstration purposes", + name: "MyDemoAPI", + }); + const myDemoResource = new ApiGatewayResource(this, "MyDemoResource", { + parentId: myDemoApi.rootResourceId, + pathPart: "mydemoresource", + restApiId: myDemoApi.id, + }); + const myDemoMethod = new ApiGatewayMethod(this, "MyDemoMethod", { + authorization: "NONE", + httpMethod: "GET", + resourceId: myDemoResource.id, + restApiId: myDemoApi.id, + }); + const response200 = new ApiGatewayMethodResponse(this, "response_200", { + httpMethod: myDemoMethod.httpMethod, + resourceId: myDemoResource.id, + restApiId: myDemoApi.id, + statusCode: "200", + }); + new ApiGatewayIntegration(this, "MyDemoIntegration", { + httpMethod: myDemoMethod.httpMethod, + resourceId: myDemoResource.id, + restApiId: myDemoApi.id, + type: "MOCK", + }); + new ApiGatewayIntegrationResponse(this, "MyDemoIntegrationResponse", { + httpMethod: myDemoMethod.httpMethod, + resourceId: myDemoResource.id, + responseTemplates: { + "application/xml": + '#set($inputRoot = $input.path(\'$\'))\n\n\n $inputRoot.body\n\n\n', + }, + restApiId: myDemoApi.id, + statusCode: response200.statusCode, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `httpMethod` - (Required) HTTP method (`get`, `post`, `put`, `delete`, `head`, `options`, `any`). +* `resourceId` - (Required) API resource ID. +* `restApiId` - (Required) ID of the associated REST API. +* `statusCode` - (Required) HTTP status code. + +The following arguments are optional: + +* `contentHandling` - (Optional) How to handle request payload content type conversions. Supported values are `convertToBinary` and `convertToText`. If this property is not defined, the response payload will be passed through from the integration response to the method response without modification. +* `responseParameters` - (Optional) Map of response parameters that can be read from the backend response. For example: `response_parameters = { "method.response.header.X-Some-Header" = "integration.response.header.X-Some-Other-Header" }`. +* `responseTemplates` - (Optional) Map of templates used to transform the integration response body. +* `selectionPattern` - (Optional) Regular expression pattern used to choose an integration response based on the response from the backend. Omit configuring this to make the integration the default one. If the backend is an `aws` Lambda function, the AWS Lambda function error header is matched. For all other `http` and `aws` backends, the HTTP status code is matched. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayIntegrationResponse` using `restApiId/resourceId/httpMethod/statusCode`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayIntegrationResponse` using `restApiId/resourceId/httpMethod/statusCode`. For example: + +```console +% terraform import aws_api_gateway_integration_response.example 12345abcde/67890fghij/GET/200 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_method.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_method.html.markdown new file mode 100644 index 00000000000..d730a250adc --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_method.html.markdown @@ -0,0 +1,167 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_method" +description: |- + Provides a HTTP Method for an API Gateway Resource. +--- + + + +# Resource: aws_api_gateway_method + +Provides a HTTP Method for an API Gateway Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayMethod } from "./.gen/providers/aws/api-gateway-method"; +import { ApiGatewayResource } from "./.gen/providers/aws/api-gateway-resource"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const myDemoApi = new ApiGatewayRestApi(this, "MyDemoAPI", { + description: "This is my API for demonstration purposes", + name: "MyDemoAPI", + }); + const myDemoResource = new ApiGatewayResource(this, "MyDemoResource", { + parentId: myDemoApi.rootResourceId, + pathPart: "mydemoresource", + restApiId: myDemoApi.id, + }); + new ApiGatewayMethod(this, "MyDemoMethod", { + authorization: "NONE", + httpMethod: "GET", + resourceId: myDemoResource.id, + restApiId: myDemoApi.id, + }); + } +} + +``` + +## Usage with Cognito User Pool Authorizer + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformVariable, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayAuthorizer } from "./.gen/providers/aws/api-gateway-authorizer"; +import { ApiGatewayMethod } from "./.gen/providers/aws/api-gateway-method"; +import { ApiGatewayResource } from "./.gen/providers/aws/api-gateway-resource"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +import { DataAwsCognitoUserPools } from "./.gen/providers/aws/data-aws-cognito-user-pools"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const cognitoUserPoolName = new TerraformVariable( + this, + "cognito_user_pool_name", + {} + ); + const thisVar = new ApiGatewayRestApi(this, "this", { + name: "with-authorizer", + }); + const dataAwsCognitoUserPoolsThis = new DataAwsCognitoUserPools( + this, + "this_2", + { + name: cognitoUserPoolName.stringValue, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsCognitoUserPoolsThis.overrideLogicalId("this"); + const awsApiGatewayAuthorizerThis = new ApiGatewayAuthorizer( + this, + "this_3", + { + name: "CognitoUserPoolAuthorizer", + providerArns: Token.asList(dataAwsCognitoUserPoolsThis.arns), + restApiId: thisVar.id, + type: "COGNITO_USER_POOLS", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayAuthorizerThis.overrideLogicalId("this"); + const awsApiGatewayResourceThis = new ApiGatewayResource(this, "this_4", { + parentId: thisVar.rootResourceId, + pathPart: "{proxy+}", + restApiId: thisVar.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayResourceThis.overrideLogicalId("this"); + new ApiGatewayMethod(this, "any", { + authorization: "COGNITO_USER_POOLS", + authorizerId: Token.asString(awsApiGatewayAuthorizerThis.id), + httpMethod: "ANY", + requestParameters: { + "method.request.path.proxy": true, + }, + resourceId: Token.asString(awsApiGatewayResourceThis.id), + restApiId: thisVar.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `restApiId` - (Required) ID of the associated REST API +* `resourceId` - (Required) API resource ID +* `httpMethod` - (Required) HTTP Method (`get`, `post`, `put`, `delete`, `head`, `options`, `any`) +* `authorization` - (Required) Type of authorization used for the method (`none`, `custom`, `awsIam`, `cognitoUserPools`) +* `authorizerId` - (Optional) Authorizer id to be used when the authorization is `custom` or `cognitoUserPools` +* `authorizationScopes` - (Optional) Authorization scopes used when the authorization is `cognitoUserPools` +* `apiKeyRequired` - (Optional) Specify if the method requires an API key +* `operationName` - (Optional) Function name that will be given to the method when generating an SDK through API Gateway. If omitted, API Gateway will generate a function name based on the resource path and HTTP verb. +* `requestModels` - (Optional) Map of the API models used for the request's content type + where key is the content type (e.g., `application/json`) + and value is either `error`, `empty` (built-in models) or `awsApiGatewayModel`'s `name`. +* `requestValidatorId` - (Optional) ID of a `awsApiGatewayRequestValidator` +* `requestParameters` - (Optional) Map of request parameters (from the path, query string and headers) that should be passed to the integration. The boolean value indicates whether the parameter is required (`true`) or optional (`false`). + For example: `request_parameters = {"method.request.header.X-Some-Header" = true "method.request.querystring.some-query-param" = true}` would define that the header `xSomeHeader` and the query string `someQueryParam` must be provided in the request. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayMethod` using `restApiId/resourceId/httpMethod`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayMethod` using `restApiId/resourceId/httpMethod`. For example: + +```console +% terraform import aws_api_gateway_method.example 12345abcde/67890fghij/GET +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_method_response.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_method_response.html.markdown new file mode 100644 index 00000000000..adba0c1677d --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_method_response.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_method_response" +description: |- + Provides an HTTP Method Response for an API Gateway Resource. +--- + + + +# Resource: aws_api_gateway_method_response + +Provides an HTTP Method Response for an API Gateway Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayIntegration } from "./.gen/providers/aws/api-gateway-integration"; +import { ApiGatewayMethod } from "./.gen/providers/aws/api-gateway-method"; +import { ApiGatewayMethodResponse } from "./.gen/providers/aws/api-gateway-method-response"; +import { ApiGatewayResource } from "./.gen/providers/aws/api-gateway-resource"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const myDemoApi = new ApiGatewayRestApi(this, "MyDemoAPI", { + description: "This is my API for demonstration purposes", + name: "MyDemoAPI", + }); + const myDemoResource = new ApiGatewayResource(this, "MyDemoResource", { + parentId: myDemoApi.rootResourceId, + pathPart: "mydemoresource", + restApiId: myDemoApi.id, + }); + const myDemoMethod = new ApiGatewayMethod(this, "MyDemoMethod", { + authorization: "NONE", + httpMethod: "GET", + resourceId: myDemoResource.id, + restApiId: myDemoApi.id, + }); + new ApiGatewayMethodResponse(this, "response_200", { + httpMethod: myDemoMethod.httpMethod, + resourceId: myDemoResource.id, + restApiId: myDemoApi.id, + statusCode: "200", + }); + new ApiGatewayIntegration(this, "MyDemoIntegration", { + httpMethod: myDemoMethod.httpMethod, + resourceId: myDemoResource.id, + restApiId: myDemoApi.id, + type: "MOCK", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `restApiId` - (Required) ID of the associated REST API +* `resourceId` - (Required) API resource ID +* `httpMethod` - (Required) HTTP Method (`get`, `post`, `put`, `delete`, `head`, `options`, `any`) +* `statusCode` - (Required) HTTP status code +* `responseModels` - (Optional) Map of the API models used for the response's content type +* `responseParameters` - (Optional) Map of response parameters that can be sent to the caller. + For example: `response_parameters = { "method.response.header.X-Some-Header" = true }` + would define that the header `xSomeHeader` can be provided on the response. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayMethodResponse` using `restApiId/resourceId/httpMethod/statusCode`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayMethodResponse` using `restApiId/resourceId/httpMethod/statusCode`. For example: + +```console +% terraform import aws_api_gateway_method_response.example 12345abcde/67890fghij/GET/200 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_method_settings.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_method_settings.html.markdown new file mode 100644 index 00000000000..35239f2ad61 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_method_settings.html.markdown @@ -0,0 +1,257 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_method_settings" +description: |- + Manages API Gateway Stage Method Settings +--- + + + +# Resource: aws_api_gateway_method_settings + +Manages API Gateway Stage Method Settings. For example, CloudWatch logging and metrics. + +~> **NOTE:** We recommend using this resource in conjunction with the [`awsApiGatewayStage` resource](api_gateway_stage.html) instead of a stage managed by the [`awsApiGatewayDeployment` resource](api_gateway_deployment.html) optional `stageName` argument. Stages managed by the `awsApiGatewayDeployment` resource are recreated on redeployment and this resource will require a second apply to recreate the method settings. + +## Example Usage + +### End-to-end + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/apiGatewayRestApiOpenapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +### Basic Usage + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} + +resource "aws_api_gateway_method_settings" "all" { + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = aws_api_gateway_stage.example.stage_name + method_path = "*/*" + + settings { + metrics_enabled = true + logging_level = "ERROR" + } +} + +resource "aws_api_gateway_method_settings" "path_specific" { + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = aws_api_gateway_stage.example.stage_name + method_path = "path1/GET" + + settings { + metrics_enabled = true + logging_level = "INFO" + } +} +``` + +### CloudWatch Logging and Tracing + +The AWS Console API Gateway Editor displays multiple options for CloudWatch Logs that don't directly map to the options in the AWS API and Terraform. These examples show the `settings` blocks that are equivalent to the options the AWS Console gives for CloudWatch Logs. + +#### Off + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayMethodSettings } from "./.gen/providers/aws/api-gateway-method-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApiGatewayMethodSettings(this, "path_specific", { + methodPath: "path1/GET", + restApiId: example.id, + settings: { + loggingLevel: "OFF", + }, + stageName: Token.asString(awsApiGatewayStageExample.stageName), + }); + } +} + +``` + +#### Errors Only + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayMethodSettings } from "./.gen/providers/aws/api-gateway-method-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApiGatewayMethodSettings(this, "path_specific", { + methodPath: "path1/GET", + restApiId: example.id, + settings: { + dataTraceEnabled: false, + loggingLevel: "ERROR", + metricsEnabled: true, + }, + stageName: Token.asString(awsApiGatewayStageExample.stageName), + }); + } +} + +``` + +#### Errors and Info Logs + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayMethodSettings } from "./.gen/providers/aws/api-gateway-method-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApiGatewayMethodSettings(this, "path_specific", { + methodPath: "path1/GET", + restApiId: example.id, + settings: { + dataTraceEnabled: false, + loggingLevel: "INFO", + metricsEnabled: true, + }, + stageName: Token.asString(awsApiGatewayStageExample.stageName), + }); + } +} + +``` + +#### Full Request and Response Logs + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayMethodSettings } from "./.gen/providers/aws/api-gateway-method-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApiGatewayMethodSettings(this, "path_specific", { + methodPath: "path1/GET", + restApiId: example.id, + settings: { + dataTraceEnabled: true, + loggingLevel: "INFO", + metricsEnabled: true, + }, + stageName: Token.asString(awsApiGatewayStageExample.stageName), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `restApiId` - (Required) ID of the REST API +* `stageName` - (Required) Name of the stage +* `methodPath` - (Required) Method path defined as `{resourcePath}/{httpMethod}` for an individual method override, or `*/*` for overriding all methods in the stage. Ensure to trim any leading forward slashes in the path (e.g., `trimprefix(aws_api_gateway_resource.example.path, "/")`). +* `settings` - (Required) Settings block, see below. + +### `settings` + +* `metricsEnabled` - (Optional) Whether Amazon CloudWatch metrics are enabled for this method. +* `loggingLevel` - (Optional) Logging level for this method, which effects the log entries pushed to Amazon CloudWatch Logs. The available levels are `off`, `error`, and `info`. +* `dataTraceEnabled` - (Optional) Whether data trace logging is enabled for this method, which effects the log entries pushed to Amazon CloudWatch Logs. +* `throttlingBurstLimit` - (Optional) Throttling burst limit. Default: `1` (throttling disabled). +* `throttlingRateLimit` - (Optional) Throttling rate limit. Default: `1` (throttling disabled). +* `cachingEnabled` - (Optional) Whether responses should be cached and returned for requests. A cache cluster must be enabled on the stage for responses to be cached. +* `cacheTtlInSeconds` - (Optional) Time to live (TTL), in seconds, for cached responses. The higher the TTL, the longer the response will be cached. +* `cacheDataEncrypted` - (Optional) Whether the cached responses are encrypted. +* `requireAuthorizationForCacheControl` - (Optional) Whether authorization is required for a cache invalidation request. +* `unauthorizedCacheControlHeaderStrategy` - (Optional) How to handle unauthorized requests for cache invalidation. The available values are `failWith403`, `succeedWithResponseHeader`, `succeedWithoutResponseHeader`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayMethodSettings` using `restApiId/stageName/methodPath`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayMethodSettings` using `restApiId/stageName/methodPath`. For example: + +```console +% terraform import aws_api_gateway_method_settings.example 12345abcde/example/test/GET +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_model.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_model.html.markdown new file mode 100644 index 00000000000..8540e4d1892 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_model.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_model" +description: |- + Provides a Model for a REST API Gateway. +--- + + + +# Resource: aws_api_gateway_model + +Provides a Model for a REST API Gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayModel } from "./.gen/providers/aws/api-gateway-model"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const myDemoApi = new ApiGatewayRestApi(this, "MyDemoAPI", { + description: "This is my API for demonstration purposes", + name: "MyDemoAPI", + }); + new ApiGatewayModel(this, "MyDemoModel", { + contentType: "application/json", + description: "a JSON schema", + name: "user", + restApiId: myDemoApi.id, + schema: Token.asString( + Fn.jsonencode({ + type: "object", + }) + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `restApiId` - (Required) ID of the associated REST API +* `name` - (Required) Name of the model +* `description` - (Optional) Description of the model +* `contentType` - (Required) Content type of the model +* `schema` - (Required) Schema of the model in a JSON form + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the model + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayModel` using `restApiId/name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayModel` using `restApiId/name`. For example: + +```console +% terraform import aws_api_gateway_model.example 12345abcde/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_request_validator.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_request_validator.html.markdown new file mode 100644 index 00000000000..c404a605199 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_request_validator.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_request_validator" +description: |- + Manages an API Gateway Request Validator. +--- + + + +# Resource: aws_api_gateway_request_validator + +Manages an API Gateway Request Validator. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayRequestValidator } from "./.gen/providers/aws/api-gateway-request-validator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApiGatewayRequestValidator(this, "example", { + name: "example", + restApiId: Token.asString(awsApiGatewayRestApiExample.id), + validateRequestBody: true, + validateRequestParameters: true, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the request validator +* `restApiId` - (Required) ID of the associated Rest API +* `validateRequestBody` - (Optional) Boolean whether to validate request body. Defaults to `false`. +* `validateRequestParameters` - (Optional) Boolean whether to validate request parameters. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the request validator + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayRequestValidator` using `restApiId/requestValidatorId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayRequestValidator` using `restApiId/requestValidatorId`. For example: + +```console +% terraform import aws_api_gateway_request_validator.example 12345abcde/67890fghij +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_resource.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_resource.html.markdown new file mode 100644 index 00000000000..98a2c889260 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_resource.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_resource" +description: |- + Provides an API Gateway Resource. +--- + + + +# Resource: aws_api_gateway_resource + +Provides an API Gateway Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayResource } from "./.gen/providers/aws/api-gateway-resource"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const myDemoApi = new ApiGatewayRestApi(this, "MyDemoAPI", { + description: "This is my API for demonstration purposes", + name: "MyDemoAPI", + }); + new ApiGatewayResource(this, "MyDemoResource", { + parentId: myDemoApi.rootResourceId, + pathPart: "mydemoresource", + restApiId: myDemoApi.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `restApiId` - (Required) ID of the associated REST API +* `parentId` - (Required) ID of the parent API resource +* `pathPart` - (Required) Last path segment of this API resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Resource's identifier. +* `path` - Complete path for this API resource, including all parent paths. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayResource` using `restApiId/resourceId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayResource` using `restApiId/resourceId`. For example: + +```console +% terraform import aws_api_gateway_resource.example 12345abcde/67890fghij +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_rest_api.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_rest_api.html.markdown new file mode 100644 index 00000000000..13cd15f50c0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_rest_api.html.markdown @@ -0,0 +1,326 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_rest_api" +description: |- + Manages an API Gateway REST API. +--- + + + +# Resource: aws_api_gateway_rest_api + +Manages an API Gateway REST API. The REST API can be configured via [importing an OpenAPI specification](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-import-api.html) in the `body` argument (with other arguments serving as overrides) or via other Terraform resources to manage the resources ([`awsApiGatewayResource` resource](api_gateway_resource.html)), methods ([`awsApiGatewayMethod` resource](api_gateway_method.html)), integrations ([`awsApiGatewayIntegration` resource](api_gateway_integration.html)), etc. of the REST API. Once the REST API is configured, the [`awsApiGatewayDeployment` resource](api_gateway_deployment.html) can be used along with the [`awsApiGatewayStage` resource](api_gateway_stage.html) to publish the REST API. + +-> **Note:** Amazon API Gateway Version 1 resources are used for creating and deploying REST APIs. To create and deploy WebSocket and HTTP APIs, use Amazon API Gateway Version 2 [resources](/docs/providers/aws/r/apigatewayv2_api.html). + +!> **WARN:** When importing Open API Specifications with the `body` argument, by default the API Gateway REST API will be replaced with the Open API Specification thus removing any existing methods, resources, integrations, or endpoints. Endpoint mutations are asynchronous operations, and race conditions with DNS are possible. To overcome this limitation, use the `putRestApiMode` attribute and set it to `merge`. + +## Example Usage + +### OpenAPI Specification + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/apiGatewayRestApiOpenapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" + + endpoint_configuration { + types = ["REGIONAL"] + } +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} +``` + +### OpenAPI Specification with Private Endpoints + +Using `putRestApiMode` = `merge` when importing the OpenAPI Specification, the AWS control plane will not delete all existing literal properties that are not explicitly set in the OpenAPI definition. Impacted API Gateway properties: ApiKeySourceType, BinaryMediaTypes, Description, EndpointConfiguration, MinimumCompressionSize, Name, Policy). + +```terraform +data "aws_availability_zones" "available" { + state = "available" + + filter { + name = "opt-in-status" + values = ["opt-in-not-required"] + } +} + +data "aws_region" "current" {} + +resource "aws_vpc" "example" { + cidr_block = "10.0.0.0/16" + enable_dns_support = true + enable_dns_hostnames = true +} + +resource "aws_default_security_group" "example" { + vpc_id = aws_vpc.example.id +} + +resource "aws_subnet" "example" { + availability_zone = data.aws_availability_zones.available.names[0] + cidr_block = cidrsubnet(aws_vpc.example.cidr_block, 8, 0) + vpc_id = aws_vpc.example.id +} + +resource "aws_vpc_endpoint" "example" { + count = 3 + + private_dns_enabled = false + security_group_ids = [aws_default_security_group.example.id] + service_name = "com.amazonaws.${data.aws_region.current.name}.execute-api" + subnet_ids = [aws_subnet.example.id] + vpc_endpoint_type = "Interface" + vpc_id = aws_vpc.example.id +} + +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" + put_rest_api_mode = "merge" + + endpoint_configuration { + types = ["PRIVATE"] + vpc_endpoint_ids = [aws_vpc_endpoint.example[0].id, aws_vpc_endpoint.example[1].id, aws_vpc_endpoint.example[2].id] + } +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} +``` + +### Terraform Resources + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayDeployment } from "./.gen/providers/aws/api-gateway-deployment"; +import { ApiGatewayIntegration } from "./.gen/providers/aws/api-gateway-integration"; +import { ApiGatewayMethod } from "./.gen/providers/aws/api-gateway-method"; +import { ApiGatewayResource } from "./.gen/providers/aws/api-gateway-resource"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +import { ApiGatewayStage } from "./.gen/providers/aws/api-gateway-stage"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ApiGatewayRestApi(this, "example", { + name: "example", + }); + const awsApiGatewayResourceExample = new ApiGatewayResource( + this, + "example_1", + { + parentId: example.rootResourceId, + pathPart: "example", + restApiId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayResourceExample.overrideLogicalId("example"); + const awsApiGatewayMethodExample = new ApiGatewayMethod(this, "example_2", { + authorization: "NONE", + httpMethod: "GET", + resourceId: Token.asString(awsApiGatewayResourceExample.id), + restApiId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayMethodExample.overrideLogicalId("example"); + const awsApiGatewayIntegrationExample = new ApiGatewayIntegration( + this, + "example_3", + { + httpMethod: Token.asString(awsApiGatewayMethodExample.httpMethod), + resourceId: Token.asString(awsApiGatewayResourceExample.id), + restApiId: example.id, + type: "MOCK", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayIntegrationExample.overrideLogicalId("example"); + const awsApiGatewayDeploymentExample = new ApiGatewayDeployment( + this, + "example_4", + { + lifecycle: { + createBeforeDestroy: true, + }, + restApiId: example.id, + triggers: { + redeployment: Token.asString( + Fn.sha1( + Token.asString( + Fn.jsonencode([ + awsApiGatewayResourceExample.id, + awsApiGatewayMethodExample.id, + awsApiGatewayIntegrationExample.id, + ]) + ) + ) + ), + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayDeploymentExample.overrideLogicalId("example"); + const awsApiGatewayStageExample = new ApiGatewayStage(this, "example_5", { + deploymentId: Token.asString(awsApiGatewayDeploymentExample.id), + restApiId: example.id, + stageName: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayStageExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiKeySource` - (Optional) Source of the API key for requests. Valid values are `header` (default) and `authorizer`. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`xAmazonApigatewayApiKeySource` extension](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-swagger-extensions-api-key-source.html). If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `binaryMediaTypes` - (Optional) List of binary media types supported by the REST API. By default, the REST API supports only UTF-8-encoded text payloads. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`xAmazonApigatewayBinaryMediaTypes` extension](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-swagger-extensions-binary-media-types.html). If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `body` - (Optional) OpenAPI specification that defines the set of routes and integrations to create as part of the REST API. This configuration, and any updates to it, will replace all REST API configuration except values overridden in this resource configuration and other resource updates applied after this resource but before any `awsApiGatewayDeployment` creation. More information about REST API OpenAPI support can be found in the [API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-import-api.html). +* `description` - (Optional) Description of the REST API. If importing an OpenAPI specification via the `body` argument, this corresponds to the `infoDescription` field. If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `disableExecuteApiEndpoint` - (Optional) Whether clients can invoke your API by using the default execute-api endpoint. By default, clients can invoke your API with the default https://{api_id}.execute-api.{region}.amazonaws.com endpoint. To require that clients use a custom domain name to invoke your API, disable the default endpoint. Defaults to `false`. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`xAmazonApigatewayEndpointConfiguration` extension `disableExecuteApiEndpoint` property](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-swagger-extensions-endpoint-configuration.html). If the argument value is `true` and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `endpointConfiguration` - (Optional) Configuration block defining API endpoint configuration including endpoint type. Defined below. +* `minimumCompressionSize` - (Optional) Minimum response size to compress for the REST API. String containing an integer value between `1` and `10485760` (10MB). `1` will disable an existing compression configuration, and all other values will enable compression with the configured size. New resources can simply omit this argument to disable compression, rather than setting the value to `1`. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`xAmazonApigatewayMinimumCompressionSize` extension](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-openapi-minimum-compression-size.html). If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `name` - (Required) Name of the REST API. If importing an OpenAPI specification via the `body` argument, this corresponds to the `infoTitle` field. If the argument value is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `failOnWarnings` - (Optional) Whether warnings while API Gateway is creating or updating the resource should return an error or not. Defaults to `false` +* `parameters` - (Optional) Map of customizations for importing the specification in the `body` argument. For example, to exclude DocumentationParts from an imported API, set `ignore` equal to `documentation`. Additional documentation, including other parameters such as `basepath`, can be found in the [API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-import-api.html). +* `policy` - (Optional) JSON formatted policy document that controls access to the API Gateway. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). Terraform will only perform drift detection of its value when present in a configuration. We recommend using the [`awsApiGatewayRestApiPolicy` resource](/docs/providers/aws/r/api_gateway_rest_api_policy.html) instead. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`xAmazonApigatewayPolicy` extension](https://docs.aws.amazon.com/apigateway/latest/developerguide/openapi-extensions-policy.html). If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `putRestApiMode` - (Optional) Mode of the PutRestApi operation when importing an OpenAPI specification via the `body` argument (create or update operation). Valid values are `merge` and `overwrite`. If unspecificed, defaults to `overwrite` (for backwards compatibility). This corresponds to the [`xAmazonApigatewayPutIntegrationMethod` extension](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-swagger-extensions-put-integration-method.html). If the argument value is provided and is different than the OpenAPI value, the argument value will override the OpenAPI value. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +**Note**: If the `body` argument is provided, the OpenAPI specification will be used to configure the resources, methods and integrations for the Rest API. If this argument is provided, the following resources should not be managed as separate ones, as updates may cause manual resource updates to be overwritten: + +* `awsApiGatewayResource` +* `awsApiGatewayMethod` +* `awsApiGatewayMethodResponse` +* `awsApiGatewayMethodSettings` +* `awsApiGatewayIntegration` +* `awsApiGatewayIntegrationResponse` +* `awsApiGatewayGatewayResponse` +* `awsApiGatewayModel` + +### endpoint_configuration + +* `types` - (Required) List of endpoint types. This resource currently only supports managing a single value. Valid values: `edge`, `regional` or `private`. If unspecified, defaults to `edge`. If set to `private` recommend to set `putRestApiMode` = `merge` to not cause the endpoints and associated Route53 records to be deleted. Refer to the [documentation](https://docs.aws.amazon.com/apigateway/latest/developerguide/create-regional-api.html) for more information on the difference between edge-optimized and regional APIs. +* `vpcEndpointIds` - (Optional) Set of VPC Endpoint identifiers. It is only supported for `private` endpoint type. If importing an OpenAPI specification via the `body` argument, this corresponds to the [`xAmazonApigatewayEndpointConfiguration` extension `vpcEndpointIds` property](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-swagger-extensions-endpoint-configuration.html). If the argument value is provided and is different than the OpenAPI value, **the argument value will override the OpenAPI value**. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN +* `createdDate` - Creation date of the REST API +* `executionArn` - Execution ARN part to be used in [`lambdaPermission`](/docs/providers/aws/r/lambda_permission.html)'s `sourceArn` + when allowing API Gateway to invoke a Lambda function, + e.g., `arn:aws:executeApi:euWest2:123456789012:z4675Bid1J`, which can be concatenated with allowed stage, method and resource path. +* `id` - ID of the REST API +* `rootResourceId` - Resource ID of the REST API's root +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayRestApi` using the REST API ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayRestApi` using the REST API ID. For example: + +```console +% terraform import aws_api_gateway_rest_api.example 12345abcde +``` + +~> **NOTE:** Resource import does not currently support the `body` attribute. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_rest_api_policy.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_rest_api_policy.html.markdown new file mode 100644 index 00000000000..f999401a426 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_rest_api_policy.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_rest_api_policy" +description: |- + Provides an API Gateway REST API Policy. +--- + + + +# Resource: aws_api_gateway_rest_api_policy + +Provides an API Gateway REST API Policy. + +-> **Note:** Amazon API Gateway Version 1 resources are used for creating and deploying REST APIs. To create and deploy WebSocket and HTTP APIs, use Amazon API Gateway Version 2 [resources](/docs/providers/aws/r/apigatewayv2_api.html). + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +import { ApiGatewayRestApiPolicy } from "./.gen/providers/aws/api-gateway-rest-api-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new ApiGatewayRestApi(this, "test", { + name: "example-rest-api", + }); + const dataAwsIamPolicyDocumentTest = new DataAwsIamPolicyDocument( + this, + "test_1", + { + statement: [ + { + actions: ["execute-api:Invoke"], + condition: [ + { + test: "IpAddress", + values: ["123.123.123.123/32"], + variable: "aws:SourceIp", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [test.executionArn], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentTest.overrideLogicalId("test"); + const awsApiGatewayRestApiPolicyTest = new ApiGatewayRestApiPolicy( + this, + "test_2", + { + policy: Token.asString(dataAwsIamPolicyDocumentTest.json), + restApiId: test.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayRestApiPolicyTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `restApiId` - (Required) ID of the REST API. +* `policy` - (Required) JSON formatted policy document that controls access to the API Gateway. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the REST API + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayRestApiPolicy` using the REST API ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayRestApiPolicy` using the REST API ID. For example: + +```console +% terraform import aws_api_gateway_rest_api_policy.example 12345abcde +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_stage.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_stage.html.markdown new file mode 100644 index 00000000000..df78dc772a3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_stage.html.markdown @@ -0,0 +1,202 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_stage" +description: |- + Manages an API Gateway Stage. +--- + + + +# Resource: aws_api_gateway_stage + +Manages an API Gateway Stage. A stage is a named reference to a deployment, which can be done via the [`awsApiGatewayDeployment` resource](api_gateway_deployment.html). Stages can be optionally managed further with the [`awsApiGatewayBasePathMapping` resource](api_gateway_base_path_mapping.html), [`awsApiGatewayDomainName` resource](api_gateway_domain_name.html), and [`awsApiMethodSettings` resource](api_gateway_method_settings.html). For more information, see the [API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-stages.html). + +## Example Usage + +An end-to-end example of a REST API configured with OpenAPI can be found in the [`/examples/apiGatewayRestApiOpenapi` directory within the GitHub repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/api-gateway-rest-api-openapi). + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} + +resource "aws_api_gateway_method_settings" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = aws_api_gateway_stage.example.stage_name + method_path = "*/*" + + settings { + metrics_enabled = true + logging_level = "INFO" + } +} +``` + +### Managing the API Logging CloudWatch Log Group + +API Gateway provides the ability to [enable CloudWatch API logging](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-logging.html). To manage the CloudWatch Log Group when this feature is enabled, the [`awsCloudwatchLogGroup` resource](/docs/providers/aws/r/cloudwatch_log_group.html) can be used where the name matches the API Gateway naming convention. If the CloudWatch Log Group previously exists, import the [`awsCloudwatchLogGroup` resource into Terraform](/docs/providers/aws/r/cloudwatch_log_group.html#import) as a one time operation. You can recreate the environment without import. + +-> The below configuration uses [`dependsOn`](https://www.terraform.io/language/meta-arguments/depends_on) to prevent ordering issues with API Gateway automatically creating the log group first and a variable for naming consistency. Other ordering and naming methodologies may be more appropriate for your environment. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { VariableType, TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +import { ApiGatewayStage } from "./.gen/providers/aws/api-gateway-stage"; +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +interface MyConfig { + name: any; + deploymentId: any; + restApiId: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const stageName = new TerraformVariable(this, "stage_name", { + default: "example", + type: VariableType.STRING, + }); + const example = new ApiGatewayRestApi(this, "example", { + name: config.name, + }); + const awsCloudwatchLogGroupExample = new CloudwatchLogGroup( + this, + "example_2", + { + name: + "API-Gateway-Execution-Logs_${" + + example.id + + "}/${" + + stageName.value + + "}", + retentionInDays: 7, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogGroupExample.overrideLogicalId("example"); + const awsApiGatewayStageExample = new ApiGatewayStage(this, "example_3", { + dependsOn: [awsCloudwatchLogGroupExample], + stageName: stageName.stringValue, + deploymentId: config.deploymentId, + restApiId: config.restApiId, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayStageExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `restApiId` - (Required) ID of the associated REST API +* `stageName` - (Required) Name of the stage +* `deploymentId` - (Required) ID of the deployment that the stage points to +* `accessLogSettings` - (Optional) Enables access logs for the API stage. See [Access Log Settings](#access-log-settings) below. +* `cacheClusterEnabled` - (Optional) Whether a cache cluster is enabled for the stage +* `cacheClusterSize` - (Optional) Size of the cache cluster for the stage, if enabled. Allowed values include `05`, `16`, `61`, `135`, `284`, `582`, `118` and `237`. +* `canarySettings` - (Optional) Configuration settings of a canary deployment. See [Canary Settings](#canary-settings) below. +* `clientCertificateId` - (Optional) Identifier of a client certificate for the stage. +* `description` - (Optional) Description of the stage. +* `documentationVersion` - (Optional) Version of the associated API documentation +* `variables` - (Optional) Map that defines the stage variables +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `xrayTracingEnabled` - (Optional) Whether active tracing with X-ray is enabled. Defaults to `false`. + +### Access Log Settings + +* `destinationArn` - (Required) ARN of the CloudWatch Logs log group or Kinesis Data Firehose delivery stream to receive access logs. If you specify a Kinesis Data Firehose delivery stream, the stream name must begin with `amazonApigateway`. Automatically removes trailing `:*` if present. +* `format` - (Required) Formatting and values recorded in the logs. +For more information on configuring the log format rules visit the AWS [documentation](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-logging.html) + +### Canary Settings + +* `percentTraffic` - (Optional) Percent `00` - `1000` of traffic to divert to the canary deployment. +* `stageVariableOverrides` - (Optional) Map of overridden stage `variables` (including new variables) for the canary deployment. +* `useStageCache` - (Optional) Whether the canary deployment uses the stage cache. Defaults to false. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN +* `id` - ID of the stage +* `invokeUrl` - URL to invoke the API pointing to the stage, + e.g., `https://z4675Bid1JExecuteApiEuWest2AmazonawsCom/prod` +* `executionArn` - Execution ARN to be used in [`lambdaPermission`](/docs/providers/aws/r/lambda_permission.html)'s `sourceArn` + when allowing API Gateway to invoke a Lambda function, + e.g., `arn:aws:executeApi:euWest2:123456789012:z4675Bid1J/prod` +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `webAclArn` - ARN of the WebAcl associated with the Stage. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApiGatewayStage` using `restApiId/stageName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApiGatewayStage` using `restApiId/stageName`. For example: + +```console +% terraform import aws_api_gateway_stage.example 12345abcde/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_usage_plan.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_usage_plan.html.markdown new file mode 100644 index 00000000000..895321d1f10 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_usage_plan.html.markdown @@ -0,0 +1,167 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_usage_plan" +description: |- + Provides an API Gateway Usage Plan. +--- + + + +# Resource: aws_api_gateway_usage_plan + +Provides an API Gateway Usage Plan. + +## Example Usage + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "development" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "development" +} + +resource "aws_api_gateway_stage" "production" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "production" +} + +resource "aws_api_gateway_usage_plan" "example" { + name = "my-usage-plan" + description = "my description" + product_code = "MYCODE" + + api_stages { + api_id = aws_api_gateway_rest_api.example.id + stage = aws_api_gateway_stage.development.stage_name + } + + api_stages { + api_id = aws_api_gateway_rest_api.example.id + stage = aws_api_gateway_stage.production.stage_name + } + + quota_settings { + limit = 20 + offset = 2 + period = "WEEK" + } + + throttle_settings { + burst_limit = 5 + rate_limit = 10 + } +} +``` + +## Argument Reference + +The API Gateway Usage Plan argument layout is a structure composed of several sub-resources - these resources are laid out below. + +### Top-Level Arguments + +* `name` - (Required) Name of the usage plan. +* `description` - (Optional) Description of a usage plan. +* `apiStages` - (Optional) Associated [API stages](#api-stages-arguments) of the usage plan. +* `quotaSettings` - (Optional) The [quota settings](#quota-settings-arguments) of the usage plan. +* `throttleSettings` - (Optional) The [throttling limits](#throttling-settings-arguments) of the usage plan. +* `productCode` - (Optional) AWS Marketplace product identifier to associate with the usage plan as a SaaS product on AWS Marketplace. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +#### Api Stages arguments + +* `apiId` (Required) - API Id of the associated API stage in a usage plan. +* `stage` (Required) - API stage name of the associated API stage in a usage plan. +* `throttle` - (Optional) The [throttling limits](#throttle) of the usage plan. + +##### Throttle + +* `path` (Required) - Method to apply the throttle settings for. Specfiy the path and method, for example `/test/get`. +* `burstLimit` (Optional) - The API request burst limit, the maximum rate limit over a time ranging from one to a few seconds, depending upon whether the underlying token bucket is at its full capacity. +* `rateLimit` (Optional) - The API request steady-state rate limit. + +#### Quota Settings Arguments + +* `limit` (Optional) - Maximum number of requests that can be made in a given time period. +* `offset` (Optional) - Number of requests subtracted from the given limit in the initial time period. +* `period` (Optional) - Time period in which the limit applies. Valid values are "DAY", "WEEK" or "MONTH". + +#### Throttling Settings Arguments + +* `burstLimit` (Optional) - The API request burst limit, the maximum rate limit over a time ranging from one to a few seconds, depending upon whether the underlying token bucket is at its full capacity. +* `rateLimit` (Optional) - The API request steady-state rate limit. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the API resource +* `name` - Name of the usage plan. +* `description` - Description of a usage plan. +* `apiStages` - Associated API stages of the usage plan. +* `quotaSettings` - Quota of the usage plan. +* `throttleSettings` - Throttling limits of the usage plan. +* `productCode` - AWS Marketplace product identifier to associate with the usage plan as a SaaS product on AWS Marketplace. +* `arn` - ARN +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS API Gateway Usage Plan using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS API Gateway Usage Plan using the `id`. For example: + +```console +% terraform import aws_api_gateway_usage_plan.myusageplan +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_usage_plan_key.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_usage_plan_key.html.markdown new file mode 100644 index 00000000000..f9b0a46fa59 --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_usage_plan_key.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_usage_plan_key" +description: |- + Provides an API Gateway Usage Plan Key. +--- + + + +# Resource: aws_api_gateway_usage_plan_key + +Provides an API Gateway Usage Plan Key. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayApiKey } from "./.gen/providers/aws/api-gateway-api-key"; +import { ApiGatewayRestApi } from "./.gen/providers/aws/api-gateway-rest-api"; +import { ApiGatewayUsagePlan } from "./.gen/providers/aws/api-gateway-usage-plan"; +import { ApiGatewayUsagePlanKey } from "./.gen/providers/aws/api-gateway-usage-plan-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const mykey = new ApiGatewayApiKey(this, "mykey", { + name: "my_key", + }); + const test = new ApiGatewayRestApi(this, "test", { + name: "MyDemoAPI", + }); + const myusageplan = new ApiGatewayUsagePlan(this, "myusageplan", { + apiStages: [ + { + apiId: test.id, + stage: foo.stageName, + }, + ], + name: "my_usage_plan", + }); + new ApiGatewayUsagePlanKey(this, "main", { + keyId: mykey.id, + keyType: "API_KEY", + usagePlanId: myusageplan.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `keyId` - (Required) Identifier of the API key resource. +* `keyType` - (Required) Type of the API key resource. Currently, the valid key type is API_KEY. +* `usagePlanId` - (Required) Id of the usage plan resource representing to associate the key to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of a usage plan key. +* `keyId` - Identifier of the API gateway key resource. +* `keyType` - Type of a usage plan key. Currently, the valid key type is API_KEY. +* `usagePlanId` - ID of the API resource +* `name` - Name of a usage plan key. +* `value` - Value of a usage plan key. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS API Gateway Usage Plan Key using the `usagePlanId/usagePlanKeyId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS API Gateway Usage Plan Key using the `usagePlanId/usagePlanKeyId`. For example: + +```console +% terraform import aws_api_gateway_usage_plan_key.key 12345abcde/zzz +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/api_gateway_vpc_link.html.markdown b/website/docs/cdktf/typescript/r/api_gateway_vpc_link.html.markdown new file mode 100644 index 00000000000..ebda9faf88b --- /dev/null +++ b/website/docs/cdktf/typescript/r/api_gateway_vpc_link.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "API Gateway" +layout: "aws" +page_title: "AWS: aws_api_gateway_vpc_link" +description: |- + Provides an API Gateway VPC Link. +--- + + + +# Resource: aws_api_gateway_vpc_link + +Provides an API Gateway VPC Link. + +-> **Note:** Amazon API Gateway Version 1 VPC Links enable private integrations that connect REST APIs to private resources in a VPC. +To enable private integration for HTTP APIs, use the Amazon API Gateway Version 2 VPC Link [resource](/docs/providers/aws/r/apigatewayv2_vpc_link.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayVpcLink } from "./.gen/providers/aws/api-gateway-vpc-link"; +import { Lb } from "./.gen/providers/aws/lb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Lb(this, "example", { + internal: true, + loadBalancerType: "network", + name: "example", + subnetMapping: [ + { + subnetId: "12345", + }, + ], + }); + const awsApiGatewayVpcLinkExample = new ApiGatewayVpcLink( + this, + "example_1", + { + description: "example description", + name: "example", + targetArns: [example.arn], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayVpcLinkExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name used to label and identify the VPC link. +* `description` - (Optional) Description of the VPC link. +* `targetArns` - (Required, ForceNew) List of network load balancer arns in the VPC targeted by the VPC link. Currently AWS only supports 1 target. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the VpcLink. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import API Gateway VPC Link using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import API Gateway VPC Link using the `id`. For example: + +```console +% terraform import aws_api_gateway_vpc_link.example 12345abcde +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_api.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_api.html.markdown new file mode 100644 index 00000000000..d66065b50ea --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_api.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_api" +description: |- + Manages an Amazon API Gateway Version 2 API. +--- + + + +# Resource: aws_apigatewayv2_api + +Manages an Amazon API Gateway Version 2 API. + +-> **Note:** Amazon API Gateway Version 2 resources are used for creating and deploying WebSocket and HTTP APIs. To create and deploy REST APIs, use Amazon API Gateway Version 1 [resources](/docs/providers/aws/r/api_gateway_rest_api.html). + +## Example Usage + +### Basic WebSocket API + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Api } from "./.gen/providers/aws/apigatewayv2-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2Api(this, "example", { + name: "example-websocket-api", + protocolType: "WEBSOCKET", + routeSelectionExpression: "$request.body.action", + }); + } +} + +``` + +### Basic HTTP API + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Api } from "./.gen/providers/aws/apigatewayv2-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2Api(this, "example", { + name: "example-http-api", + protocolType: "HTTP", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the API. Must be less than or equal to 128 characters in length. +* `protocolType` - (Required) API protocol. Valid values: `http`, `websocket`. +* `apiKeySelectionExpression` - (Optional) An [API key selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-apikey-selection-expressions). +Valid values: `$contextAuthorizerUsageIdentifierKey`, `$requestHeaderXApiKey`. Defaults to `$requestHeaderXApiKey`. +Applicable for WebSocket APIs. +* `corsConfiguration` - (Optional) Cross-origin resource sharing (CORS) [configuration](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-cors.html). Applicable for HTTP APIs. +* `credentialsArn` - (Optional) Part of _quick create_. Specifies any credentials required for the integration. Applicable for HTTP APIs. +* `description` - (Optional) Description of the API. Must be less than or equal to 1024 characters in length. +* `disableExecuteApiEndpoint` - (Optional) Whether clients can invoke the API by using the default `executeApi` endpoint. +By default, clients can invoke the API with the default `{api_id}.execute-api.{region}.amazonaws.com endpoint`. +To require that clients use a custom domain name to invoke the API, disable the default endpoint. +* `routeKey` - (Optional) Part of _quick create_. Specifies any [route key](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-routes.html). Applicable for HTTP APIs. +* `routeSelectionExpression` - (Optional) The [route selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-route-selection-expressions) for the API. +Defaults to `$request.method $request.path`. +* `tags` - (Optional) Map of tags to assign to the API. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `target` - (Optional) Part of _quick create_. Quick create produces an API with an integration, a default catch-all route, and a default stage which is configured to automatically deploy changes. +For HTTP integrations, specify a fully qualified URL. For Lambda integrations, specify a function ARN. +The type of the integration will be `httpProxy` or `awsProxy`, respectively. Applicable for HTTP APIs. +* `body` - (Optional) An OpenAPI specification that defines the set of routes and integrations to create as part of the HTTP APIs. Supported only for HTTP APIs. +* `version` - (Optional) Version identifier for the API. Must be between 1 and 64 characters in length. +* `failOnWarnings` - (Optional) Whether warnings should return an error while API Gateway is creating or updating the resource using an OpenAPI specification. Defaults to `false`. Applicable for HTTP APIs. + +__Note__: If the `body` argument is provided, the OpenAPI specification will be used to configure the integrations and route for the HTTP API. If this argument is provided, the following resources should not be managed as separate ones, as updates may cause manual resource updates to be overwritten: + +* `awsApigatewayv2Integration` +* `awsApigatewayv2Route` + +Further more, the `name`, `description`, `corsConfiguration`, `tags` and `version` fields should be specified in the Terraform configuration and the values will override any values specified in the OpenAPI document. + +The `corsConfiguration` object supports the following: + +* `allowCredentials` - (Optional) Whether credentials are included in the CORS request. +* `allowHeaders` - (Optional) Set of allowed HTTP headers. +* `allowMethods` - (Optional) Set of allowed HTTP methods. +* `allowOrigins` - (Optional) Set of allowed origins. +* `exposeHeaders` - (Optional) Set of exposed HTTP headers. +* `maxAge` - (Optional) Number of seconds that the browser should cache preflight request results. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - API identifier. +* `apiEndpoint` - URI of the API, of the form `https://{apiId}ExecuteApi.{region}AmazonawsCom` for HTTP APIs and `wss://{apiId}ExecuteApi.{region}AmazonawsCom` for WebSocket APIs. +* `arn` - ARN of the API. +* `executionArn` - ARN prefix to be used in an [`awsLambdaPermission`](/docs/providers/aws/r/lambda_permission.html)'s `sourceArn` attribute +or in an [`awsIamPolicy`](/docs/providers/aws/r/iam_policy.html) to authorize access to the [`@connections` API](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-how-to-call-websocket-api-connections.html). +See the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-control-access-iam.html) for details. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2Api` using the API identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2Api` using the API identifier. For example: + +```console +% terraform import aws_apigatewayv2_api.example aabbccddee +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_api_mapping.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_api_mapping.html.markdown new file mode 100644 index 00000000000..bee33cc699f --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_api_mapping.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_api_mapping" +description: |- + Manages an Amazon API Gateway Version 2 API mapping. +--- + + + +# Resource: aws_apigatewayv2_api_mapping + +Manages an Amazon API Gateway Version 2 API mapping. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-custom-domains.html). + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2ApiMapping } from "./.gen/providers/aws/apigatewayv2-api-mapping"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2ApiMapping(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + domainName: Token.asString(awsApigatewayv2DomainNameExample.id), + stage: Token.asString(awsApigatewayv2StageExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) API identifier. +* `domainName` - (Required) Domain name. Use the [`awsApigatewayv2DomainName`](/docs/providers/aws/r/apigatewayv2_domain_name.html) resource to configure a domain name. +* `stage` - (Required) API stage. Use the [`awsApigatewayv2Stage`](/docs/providers/aws/r/apigatewayv2_stage.html) resource to configure an API stage. +* `apiMappingKey` - (Optional) The API mapping key. Refer to [REST API](https://docs.aws.amazon.com/apigateway/latest/developerguide/rest-api-mappings.html), [HTTP API](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-mappings.html) or [WebSocket API](https://docs.aws.amazon.com/apigateway/latest/developerguide/websocket-api-mappings.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - API mapping identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2ApiMapping` using the API mapping identifier and domain name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2ApiMapping` using the API mapping identifier and domain name. For example: + +```console +% terraform import aws_apigatewayv2_api_mapping.example 1122334/ws-api.example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_authorizer.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_authorizer.html.markdown new file mode 100644 index 00000000000..b5f21f85ccf --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_authorizer.html.markdown @@ -0,0 +1,131 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_authorizer" +description: |- + Manages an Amazon API Gateway Version 2 authorizer. +--- + + + +# Resource: aws_apigatewayv2_authorizer + +Manages an Amazon API Gateway Version 2 authorizer. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +## Example Usage + +### Basic WebSocket API + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Authorizer } from "./.gen/providers/aws/apigatewayv2-authorizer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2Authorizer(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + authorizerType: "REQUEST", + authorizerUri: Token.asString(awsLambdaFunctionExample.invokeArn), + identitySources: ["route.request.header.Auth"], + name: "example-authorizer", + }); + } +} + +``` + +### Basic HTTP API + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Authorizer } from "./.gen/providers/aws/apigatewayv2-authorizer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2Authorizer(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + authorizerPayloadFormatVersion: "2.0", + authorizerType: "REQUEST", + authorizerUri: Token.asString(awsLambdaFunctionExample.invokeArn), + identitySources: ["$request.header.Authorization"], + name: "example-authorizer", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) API identifier. +* `authorizerType` - (Required) Authorizer type. Valid values: `jwt`, `request`. +Specify `request` for a Lambda function using incoming request parameters. +For HTTP APIs, specify `jwt` to use JSON Web Tokens. +* `name` - (Required) Name of the authorizer. Must be between 1 and 128 characters in length. +* `authorizerCredentialsArn` - (Optional) Required credentials as an IAM role for API Gateway to invoke the authorizer. +Supported only for `request` authorizers. +* `authorizerPayloadFormatVersion` - (Optional) Format of the payload sent to an HTTP API Lambda authorizer. Required for HTTP API Lambda authorizers. +Valid values: `10`, `20`. +* `authorizerResultTtlInSeconds` - (Optional) Time to live (TTL) for cached authorizer results, in seconds. If it equals 0, authorization caching is disabled. +If it is greater than 0, API Gateway caches authorizer responses. The maximum value is 3600, or 1 hour. Defaults to `300`. +Supported only for HTTP API Lambda authorizers. +* `authorizerUri` - (Optional) Authorizer's Uniform Resource Identifier (URI). +For `request` authorizers this must be a well-formed Lambda function URI, such as the `invokeArn` attribute of the [`awsLambdaFunction`](/docs/providers/aws/r/lambda_function.html) resource. +Supported only for `request` authorizers. Must be between 1 and 2048 characters in length. +* `enableSimpleResponses` - (Optional) Whether a Lambda authorizer returns a response in a simple format. If enabled, the Lambda authorizer can return a boolean value instead of an IAM policy. +Supported only for HTTP APIs. +* `identitySources` - (Optional) Identity sources for which authorization is requested. +For `request` authorizers the value is a list of one or more mapping expressions of the specified request parameters. +For `jwt` authorizers the single entry specifies where to extract the JSON Web Token (JWT) from inbound requests. +* `jwtConfiguration` - (Optional) Configuration of a JWT authorizer. Required for the `jwt` authorizer type. +Supported only for HTTP APIs. + +The `jwtConfiguration` object supports the following: + +* `audience` - (Optional) List of the intended recipients of the JWT. A valid JWT must provide an aud that matches at least one entry in this list. +* `issuer` - (Optional) Base domain of the identity provider that issues JSON Web Tokens, such as the `endpoint` attribute of the [`awsCognitoUserPool`](/docs/providers/aws/r/cognito_user_pool.html) resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Authorizer identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2Authorizer` using the API identifier and authorizer identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2Authorizer` using the API identifier and authorizer identifier. For example: + +```console +% terraform import aws_apigatewayv2_authorizer.example aabbccddee/1122334 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_deployment.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_deployment.html.markdown new file mode 100644 index 00000000000..483bae86303 --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_deployment.html.markdown @@ -0,0 +1,132 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_deployment" +description: |- + Manages an Amazon API Gateway Version 2 deployment. +--- + + + +# Resource: aws_apigatewayv2_deployment + +Manages an Amazon API Gateway Version 2 deployment. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +-> **Note:** Creating a deployment for an API requires at least one `awsApigatewayv2Route` resource associated with that API. To avoid race conditions when all resources are being created together, you need to add implicit resource references via the `triggers` argument or explicit resource references using the [resource `dependsOn` meta-argument](https://www.terraform.io/docs/configuration/meta-arguments/depends_on.html). + +-> Enable the [resource `lifecycle` configuration block `createBeforeDestroy` argument](https://www.terraform.io/language/meta-arguments/lifecycle#create_before_destroy) in this resource configuration to properly order redeployments in Terraform. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Deployment } from "./.gen/providers/aws/apigatewayv2-deployment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2Deployment(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + description: "Example deployment", + lifecycle: { + createBeforeDestroy: true, + }, + }); + } +} + +``` + +### Redeployment Triggers + +-> **NOTE:** This is an optional and Terraform 0.12 (or later) advanced configuration that shows calculating a hash of the API's Terraform resources to determine changes that should trigger a new deployment. This value will change after the first Terraform apply of new resources, triggering an immediate redeployment, however it will stabilize afterwards except for resource changes. The `triggers` map can also be configured in other, more complex ways to fit the environment, avoiding the immediate redeployment issue. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Deployment } from "./.gen/providers/aws/apigatewayv2-deployment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2Deployment(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + description: "Example deployment", + lifecycle: { + createBeforeDestroy: true, + }, + triggers: { + redeployment: Token.asString( + Fn.sha1( + Token.asString( + Fn.join( + ",", + Token.asList( + Fn.tolist([ + Fn.jsonencode(awsApigatewayv2IntegrationExample), + Fn.jsonencode(awsApigatewayv2RouteExample), + ]) + ) + ) + ) + ) + ), + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) API identifier. +* `description` - (Optional) Description for the deployment resource. Must be less than or equal to 1024 characters in length. +* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger a redeployment. To force a redeployment without changing these keys/values, use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Deployment identifier. +* `autoDeployed` - Whether the deployment was automatically released. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2Deployment` using the API identifier and deployment identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2Deployment` using the API identifier and deployment identifier. For example: + +```console +% terraform import aws_apigatewayv2_deployment.example aabbccddee/1122334 +``` + +The `triggers` argument cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_domain_name.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_domain_name.html.markdown new file mode 100644 index 00000000000..5e5e060ba2b --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_domain_name.html.markdown @@ -0,0 +1,159 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_domain_name" +description: |- + Manages an Amazon API Gateway Version 2 domain name. +--- + + + +# Resource: aws_apigatewayv2_domain_name + +Manages an Amazon API Gateway Version 2 domain name. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-custom-domains.html). + +-> **Note:** This resource establishes ownership of and the TLS settings for +a particular domain name. An API stage can be associated with the domain name using the `awsApigatewayv2ApiMapping` resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2DomainName } from "./.gen/providers/aws/apigatewayv2-domain-name"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2DomainName(this, "example", { + domainName: "ws-api.example.com", + domainNameConfiguration: { + certificateArn: Token.asString(awsAcmCertificateExample.arn), + endpointType: "REGIONAL", + securityPolicy: "TLS_1_2", + }, + }); + } +} + +``` + +### Associated Route 53 Resource Record + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2DomainName } from "./.gen/providers/aws/apigatewayv2-domain-name"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Apigatewayv2DomainName(this, "example", { + domainName: "http-api.example.com", + domainNameConfiguration: { + certificateArn: Token.asString(awsAcmCertificateExample.arn), + endpointType: "REGIONAL", + securityPolicy: "TLS_1_2", + }, + }); + const awsRoute53RecordExample = new Route53Record(this, "example_1", { + alias: { + evaluateTargetHealth: false, + name: Token.asString( + propertyAccess(example.domainNameConfiguration, [ + "0", + "target_domain_name", + ]) + ), + zoneId: Token.asString( + propertyAccess(example.domainNameConfiguration, [ + "0", + "hosted_zone_id", + ]) + ), + }, + name: example.domainName, + type: "A", + zoneId: Token.asString(awsRoute53ZoneExample.zoneId), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53RecordExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domainName` - (Required) Domain name. Must be between 1 and 512 characters in length. +* `domainNameConfiguration` - (Required) Domain name configuration. See below. +* `mutualTlsAuthentication` - (Optional) Mutual TLS authentication configuration for the domain name. +* `tags` - (Optional) Map of tags to assign to the domain name. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `domainNameConfiguration` + +* `certificateArn` - (Required) ARN of an AWS-managed certificate that will be used by the endpoint for the domain name. AWS Certificate Manager is the only supported source. Use the [`awsAcmCertificate`](/docs/providers/aws/r/acm_certificate.html) resource to configure an ACM certificate. +* `endpointType` - (Required) Endpoint type. Valid values: `regional`. +* `hostedZoneId` - (Computed) Amazon Route 53 Hosted Zone ID of the endpoint. +* `ownershipVerificationCertificateArn` - (Optional) ARN of the AWS-issued certificate used to validate custom domain ownership (when `certificateArn` is issued via an ACM Private CA or `mutualTlsAuthentication` is configured with an ACM-imported certificate.) +* `securityPolicy` - (Required) Transport Layer Security (TLS) version of the [security policy](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-custom-domain-tls-version.html) for the domain name. Valid values: `tls12`. +* `targetDomainName` - (Computed) Target domain name. + +### `mutualTlsAuthentication` + +* `truststoreUri` - (Required) Amazon S3 URL that specifies the truststore for mutual TLS authentication, for example, `s3://bucketName/keyName`. The truststore can contain certificates from public or private certificate authorities. To update the truststore, upload a new version to S3, and then update your custom domain name to use the new version. +* `truststoreVersion` - (Optional) Version of the S3 object that contains the truststore. To specify a version, you must have versioning enabled for the S3 bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `apiMappingSelectionExpression` - [API mapping selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-mapping-selection-expressions) for the domain name. +* `arn` - ARN of the domain name. +* `id` - Domain name identifier. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `60M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2DomainName` using the domain name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2DomainName` using the domain name. For example: + +```console +% terraform import aws_apigatewayv2_domain_name.example ws-api.example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_integration.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_integration.html.markdown new file mode 100644 index 00000000000..1991045dd5f --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_integration.html.markdown @@ -0,0 +1,237 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_integration" +description: |- + Manages an Amazon API Gateway Version 2 integration. +--- + + + +# Resource: aws_apigatewayv2_integration + +Manages an Amazon API Gateway Version 2 integration. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Integration } from "./.gen/providers/aws/apigatewayv2-integration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2Integration(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + integrationType: "MOCK", + }); + } +} + +``` + +### Lambda Integration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Integration } from "./.gen/providers/aws/apigatewayv2-integration"; +import { LambdaFunction } from "./.gen/providers/aws/lambda-function"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new LambdaFunction(this, "example", { + filename: "example.zip", + functionName: "Example", + handler: "index.handler", + role: Token.asString(awsIamRoleExample.arn), + runtime: "nodejs16.x", + }); + const awsApigatewayv2IntegrationExample = new Apigatewayv2Integration( + this, + "example_1", + { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + connectionType: "INTERNET", + contentHandlingStrategy: "CONVERT_TO_TEXT", + description: "Lambda example", + integrationMethod: "POST", + integrationType: "AWS_PROXY", + integrationUri: example.invokeArn, + passthroughBehavior: "WHEN_NO_MATCH", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApigatewayv2IntegrationExample.overrideLogicalId("example"); + } +} + +``` + +### AWS Service Integration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Integration } from "./.gen/providers/aws/apigatewayv2-integration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2Integration(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + credentialsArn: Token.asString(awsIamRoleExample.arn), + description: "SQS example", + integrationSubtype: "SQS-SendMessage", + integrationType: "AWS_PROXY", + requestParameters: { + MessageBody: "$request.body.message", + QueueUrl: "$request.header.queueUrl", + }, + }); + } +} + +``` + +### Private Integration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Integration } from "./.gen/providers/aws/apigatewayv2-integration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2Integration(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + connectionId: Token.asString(awsApigatewayv2VpcLinkExample.id), + connectionType: "VPC_LINK", + credentialsArn: Token.asString(awsIamRoleExample.arn), + description: "Example with a load balancer", + integrationMethod: "ANY", + integrationType: "HTTP_PROXY", + integrationUri: Token.asString(awsLbListenerExample.arn), + requestParameters: { + "append:header.authforintegration": + "$context.authorizer.authorizerResponse", + "overwrite:path": "staticValueForIntegration", + }, + responseParameters: [ + { + mappings: { + "append:header.auth": "$context.authorizer.authorizerResponse", + }, + statusCode: Token.asString(403), + }, + { + mappings: { + "overwrite:statuscode": "204", + }, + statusCode: Token.asString(200), + }, + ], + tlsConfig: { + serverNameToVerify: "example.com", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) API identifier. +* `integrationType` - (Required) Integration type of an integration. +Valid values: `aws` (supported only for WebSocket APIs), `awsProxy`, `http` (supported only for WebSocket APIs), `httpProxy`, `mock` (supported only for WebSocket APIs). For an HTTP API private integration, use `httpProxy`. +* `connectionId` - (Optional) ID of the [VPC link](apigatewayv2_vpc_link.html) for a private integration. Supported only for HTTP APIs. Must be between 1 and 1024 characters in length. +* `connectionType` - (Optional) Type of the network connection to the integration endpoint. Valid values: `internet`, `vpcLink`. Default is `internet`. +* `contentHandlingStrategy` - (Optional) How to handle response payload content type conversions. Valid values: `convertToBinary`, `convertToText`. Supported only for WebSocket APIs. +* `credentialsArn` - (Optional) Credentials required for the integration, if any. +* `description` - (Optional) Description of the integration. +* `integrationMethod` - (Optional) Integration's HTTP method. Must be specified if `integrationType` is not `mock`. +* `integrationSubtype` - (Optional) AWS service action to invoke. Supported only for HTTP APIs when `integrationType` is `awsProxy`. See the [AWS service integration reference](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-aws-services-reference.html) documentation for supported values. Must be between 1 and 128 characters in length. +* `integrationUri` - (Optional) URI of the Lambda function for a Lambda proxy integration, when `integrationType` is `awsProxy`. +For an `http` integration, specify a fully-qualified URL. For an HTTP API private integration, specify the ARN of an Application Load Balancer listener, Network Load Balancer listener, or AWS Cloud Map service. +* `passthroughBehavior` - (Optional) Pass-through behavior for incoming requests based on the Content-Type header in the request, and the available mapping templates specified as the `requestTemplates` attribute. +Valid values: `whenNoMatch`, `whenNoTemplates`, `never`. Default is `whenNoMatch`. Supported only for WebSocket APIs. +* `payloadFormatVersion` - (Optional) The [format of the payload](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html#http-api-develop-integrations-lambda.proxy-format) sent to an integration. Valid values: `10`, `20`. Default is `10`. +* `requestParameters` - (Optional) For WebSocket APIs, a key-value map specifying request parameters that are passed from the method request to the backend. +For HTTP APIs with a specified `integrationSubtype`, a key-value map specifying parameters that are passed to `awsProxy` integrations. +For HTTP APIs without a specified `integrationSubtype`, a key-value map specifying how to transform HTTP requests before sending them to the backend. +See the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-parameter-mapping.html) for details. +* `requestTemplates` - (Optional) Map of [Velocity](https://velocity.apache.org/) templates that are applied on the request payload based on the value of the Content-Type header sent by the client. Supported only for WebSocket APIs. +* `responseParameters` - (Optional) Mappings to transform the HTTP response from a backend integration before returning the response to clients. Supported only for HTTP APIs. +* `templateSelectionExpression` - (Optional) The [template selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-template-selection-expressions) for the integration. +* `timeoutMilliseconds` - (Optional) Custom timeout between 50 and 29,000 milliseconds for WebSocket APIs and between 50 and 30,000 milliseconds for HTTP APIs. +The default timeout is 29 seconds for WebSocket APIs and 30 seconds for HTTP APIs. +Terraform will only perform drift detection of its value when present in a configuration. +* `tlsConfig` - (Optional) TLS configuration for a private integration. Supported only for HTTP APIs. + +The `responseParameters` object supports the following: + +* `statusCode` - (Required) HTTP status code in the range 200-599. +* `mappings` - (Required) Key-value map. The key of this map identifies the location of the request parameter to change, and how to change it. The corresponding value specifies the new data for the parameter. +See the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-parameter-mapping.html) for details. + +The `tlsConfig` object supports the following: + +* `serverNameToVerify` - (Optional) If you specify a server name, API Gateway uses it to verify the hostname on the integration's certificate. The server name is also included in the TLS handshake to support Server Name Indication (SNI) or virtual hosting. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Integration identifier. +* `integrationResponseSelectionExpression` - The [integration response selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-integration-response-selection-expressions) for the integration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2Integration` using the API identifier and integration identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2Integration` using the API identifier and integration identifier. For example: + +```console +% terraform import aws_apigatewayv2_integration.example aabbccddee/1122334 +``` + +-> **Note:** The API Gateway managed integration created as part of [_quick_create_](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-basic-concept.html#apigateway-definition-quick-create) cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_integration_response.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_integration_response.html.markdown new file mode 100644 index 00000000000..eb50d16260c --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_integration_response.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_integration_response" +description: |- + Manages an Amazon API Gateway Version 2 integration response. +--- + + + +# Resource: aws_apigatewayv2_integration_response + +Manages an Amazon API Gateway Version 2 integration response. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2IntegrationResponse } from "./.gen/providers/aws/apigatewayv2-integration-response"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2IntegrationResponse(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + integrationId: Token.asString(awsApigatewayv2IntegrationExample.id), + integrationResponseKey: "/200/", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) API identifier. +* `integrationId` - (Required) Identifier of the [`awsApigatewayv2Integration`](/docs/providers/aws/r/apigatewayv2_integration.html). +* `integrationResponseKey` - (Required) Integration response key. +* `contentHandlingStrategy` - (Optional) How to handle response payload content type conversions. Valid values: `convertToBinary`, `convertToText`. +* `responseTemplates` - (Optional) Map of Velocity templates that are applied on the request payload based on the value of the Content-Type header sent by the client. +* `templateSelectionExpression` - (Optional) The [template selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-template-selection-expressions) for the integration response. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Integration response identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2IntegrationResponse` using the API identifier, integration identifier and integration response identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2IntegrationResponse` using the API identifier, integration identifier and integration response identifier. For example: + +```console +% terraform import aws_apigatewayv2_integration_response.example aabbccddee/1122334/998877 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_model.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_model.html.markdown new file mode 100644 index 00000000000..fcf104b3fde --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_model.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_model" +description: |- + Manages an Amazon API Gateway Version 2 model. +--- + + + +# Resource: aws_apigatewayv2_model + +Manages an Amazon API Gateway Version 2 [model](https://docs.aws.amazon.com/apigateway/latest/developerguide/models-mappings.html#models-mappings-models). + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Model } from "./.gen/providers/aws/apigatewayv2-model"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2Model(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + contentType: "application/json", + name: "example", + schema: Token.asString( + Fn.jsonencode({ + $schema: "http://json-schema.org/draft-04/schema#", + properties: { + id: { + type: "string", + }, + }, + title: "ExampleModel", + type: "object", + }) + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) API identifier. +* `contentType` - (Required) The content-type for the model, for example, `application/json`. Must be between 1 and 256 characters in length. +* `name` - (Required) Name of the model. Must be alphanumeric. Must be between 1 and 128 characters in length. +* `schema` - (Required) Schema for the model. This should be a [JSON schema draft 4](https://tools.ietf.org/html/draft-zyp-json-schema-04) model. Must be less than or equal to 32768 characters in length. +* `description` - (Optional) Description of the model. Must be between 1 and 128 characters in length. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Model identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2Model` using the API identifier and model identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2Model` using the API identifier and model identifier. For example: + +```console +% terraform import aws_apigatewayv2_model.example aabbccddee/1122334 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_route.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_route.html.markdown new file mode 100644 index 00000000000..d4e709dd4fe --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_route.html.markdown @@ -0,0 +1,156 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_route" +description: |- + Manages an Amazon API Gateway Version 2 route. +--- + + + +# Resource: aws_apigatewayv2_route + +Manages an Amazon API Gateway Version 2 route. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/welcome.html) for [WebSocket](https://docs.aws.amazon.com/apigateway/latest/developerguide/websocket-api-develop-routes.html) and [HTTP](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-routes.html) APIs. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Api } from "./.gen/providers/aws/apigatewayv2-api"; +import { Apigatewayv2Route } from "./.gen/providers/aws/apigatewayv2-route"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Apigatewayv2Api(this, "example", { + name: "example-websocket-api", + protocolType: "WEBSOCKET", + routeSelectionExpression: "$request.body.action", + }); + const awsApigatewayv2RouteExample = new Apigatewayv2Route( + this, + "example_1", + { + apiId: example.id, + routeKey: "$default", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApigatewayv2RouteExample.overrideLogicalId("example"); + } +} + +``` + +### HTTP Proxy Integration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Api } from "./.gen/providers/aws/apigatewayv2-api"; +import { Apigatewayv2Integration } from "./.gen/providers/aws/apigatewayv2-integration"; +import { Apigatewayv2Route } from "./.gen/providers/aws/apigatewayv2-route"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Apigatewayv2Api(this, "example", { + name: "example-http-api", + protocolType: "HTTP", + }); + const awsApigatewayv2IntegrationExample = new Apigatewayv2Integration( + this, + "example_1", + { + apiId: example.id, + integrationMethod: "ANY", + integrationType: "HTTP_PROXY", + integrationUri: "https://example.com/{proxy}", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApigatewayv2IntegrationExample.overrideLogicalId("example"); + const awsApigatewayv2RouteExample = new Apigatewayv2Route( + this, + "example_2", + { + apiId: example.id, + routeKey: "ANY /example/{proxy+}", + target: "integrations/${" + awsApigatewayv2IntegrationExample.id + "}", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApigatewayv2RouteExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) API identifier. +* `routeKey` - (Required) Route key for the route. For HTTP APIs, the route key can be either `$default`, or a combination of an HTTP method and resource path, for example, `GET /pets`. +* `apiKeyRequired` - (Optional) Boolean whether an API key is required for the route. Defaults to `false`. Supported only for WebSocket APIs. +* `authorizationScopes` - (Optional) Authorization scopes supported by this route. The scopes are used with a JWT authorizer to authorize the method invocation. +* `authorizationType` - (Optional) Authorization type for the route. +For WebSocket APIs, valid values are `none` for open access, `awsIam` for using AWS IAM permissions, and `custom` for using a Lambda authorizer. +For HTTP APIs, valid values are `none` for open access, `jwt` for using JSON Web Tokens, `awsIam` for using AWS IAM permissions, and `custom` for using a Lambda authorizer. +Defaults to `none`. +* `authorizerId` - (Optional) Identifier of the [`awsApigatewayv2Authorizer`](apigatewayv2_authorizer.html) resource to be associated with this route. +* `modelSelectionExpression` - (Optional) The [model selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-model-selection-expressions) for the route. Supported only for WebSocket APIs. +* `operationName` - (Optional) Operation name for the route. Must be between 1 and 64 characters in length. +* `requestModels` - (Optional) Request models for the route. Supported only for WebSocket APIs. +* `requestParameter` - (Optional) Request parameters for the route. Supported only for WebSocket APIs. +* `routeResponseSelectionExpression` - (Optional) The [route response selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-route-response-selection-expressions) for the route. Supported only for WebSocket APIs. +* `target` - (Optional) Target for the route, of the form `integrations/`*`integrationId`*, where *`integrationId`* is the identifier of an [`awsApigatewayv2Integration`](apigatewayv2_integration.html) resource. + +The `requestParameter` object supports the following: + +* `requestParameterKey` - (Required) Request parameter key. This is a [request data mapping parameter](https://docs.aws.amazon.com/apigateway/latest/developerguide/websocket-api-data-mapping.html#websocket-mapping-request-parameters). +* `required` - (Required) Boolean whether or not the parameter is required. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Route identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2Route` using the API identifier and route identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2Route` using the API identifier and route identifier. For example: + +```console +% terraform import aws_apigatewayv2_route.example aabbccddee/1122334 +``` + +-> **Note:** The API Gateway managed route created as part of [_quick_create_](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-basic-concept.html#apigateway-definition-quick-create) cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_route_response.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_route_response.html.markdown new file mode 100644 index 00000000000..66f89dcf7fe --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_route_response.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_route_response" +description: |- + Manages an Amazon API Gateway Version 2 route response. +--- + + + +# Resource: aws_apigatewayv2_route_response + +Manages an Amazon API Gateway Version 2 route response. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2RouteResponse } from "./.gen/providers/aws/apigatewayv2-route-response"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2RouteResponse(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + routeId: Token.asString(awsApigatewayv2RouteExample.id), + routeResponseKey: "$default", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) API identifier. +* `routeId` - (Required) Identifier of the [`awsApigatewayv2Route`](/docs/providers/aws/r/apigatewayv2_route.html). +* `routeResponseKey` - (Required) Route response key. +* `modelSelectionExpression` - (Optional) The [model selection expression](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-selection-expressions.html#apigateway-websocket-api-model-selection-expressions) for the route response. +* `responseModels` - (Optional) Response models for the route response. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Route response identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2RouteResponse` using the API identifier, route identifier and route response identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2RouteResponse` using the API identifier, route identifier and route response identifier. For example: + +```console +% terraform import aws_apigatewayv2_route_response.example aabbccddee/1122334/998877 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_stage.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_stage.html.markdown new file mode 100644 index 00000000000..72b5cf3ce76 --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_stage.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_stage" +description: |- + Manages an Amazon API Gateway Version 2 stage. +--- + + + +# Resource: aws_apigatewayv2_stage + +Manages an Amazon API Gateway Version 2 stage. +More information can be found in the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api.html). + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2Stage } from "./.gen/providers/aws/apigatewayv2-stage"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2Stage(this, "example", { + apiId: Token.asString(awsApigatewayv2ApiExample.id), + name: "example-stage", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `apiId` - (Required) API identifier. +* `name` - (Required) Name of the stage. Must be between 1 and 128 characters in length. + +The following arguments are optional: + +* `accessLogSettings` - (Optional) Settings for logging access in this stage. +Use the [`awsApiGatewayAccount`](/docs/providers/aws/r/api_gateway_account.html) resource to configure [permissions for CloudWatch Logging](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-logging.html#set-up-access-logging-permissions). +* `autoDeploy` - (Optional) Whether updates to an API automatically trigger a new deployment. Defaults to `false`. Applicable for HTTP APIs. +* `clientCertificateId` - (Optional) Identifier of a client certificate for the stage. Use the [`awsApiGatewayClientCertificate`](/docs/providers/aws/r/api_gateway_client_certificate.html) resource to configure a client certificate. +Supported only for WebSocket APIs. +* `defaultRouteSettings` - (Optional) Default route settings for the stage. +* `deploymentId` - (Optional) Deployment identifier of the stage. Use the [`awsApigatewayv2Deployment`](/docs/providers/aws/r/apigatewayv2_deployment.html) resource to configure a deployment. +* `description` - (Optional) Description for the stage. Must be less than or equal to 1024 characters in length. +* `routeSettings` - (Optional) Route settings for the stage. +* `stageVariables` - (Optional) Map that defines the stage variables for the stage. +* `tags` - (Optional) Map of tags to assign to the stage. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### access_log_settings + +* `destinationArn` - (Required) ARN of the CloudWatch Logs log group to receive access logs. Any trailing `:*` is trimmed from the ARN. +* `format` - (Required) Single line [format](https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-logging.html#apigateway-cloudwatch-log-formats) of the access logs of data. Refer to log settings for [HTTP](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-logging-variables.html) or [Websocket](https://docs.aws.amazon.com/apigateway/latest/developerguide/websocket-api-logging.html). + +### default_route_settings + +* `dataTraceEnabled` - (Optional) Whether data trace logging is enabled for the default route. Affects the log entries pushed to Amazon CloudWatch Logs. +Defaults to `false`. Supported only for WebSocket APIs. +* `detailedMetricsEnabled` - (Optional) Whether detailed metrics are enabled for the default route. Defaults to `false`. +* `loggingLevel` - (Optional) Logging level for the default route. Affects the log entries pushed to Amazon CloudWatch Logs. +Valid values: `error`, `info`, `off`. Defaults to `off`. Supported only for WebSocket APIs. Terraform will only perform drift detection of its value when present in a configuration. +* `throttlingBurstLimit` - (Optional) Throttling burst limit for the default route. +* `throttlingRateLimit` - (Optional) Throttling rate limit for the default route. + +### route_settings + +* `routeKey` - (Required) Route key. +* `dataTraceEnabled` - (Optional) Whether data trace logging is enabled for the route. Affects the log entries pushed to Amazon CloudWatch Logs. +Defaults to `false`. Supported only for WebSocket APIs. +* `detailedMetricsEnabled` - (Optional) Whether detailed metrics are enabled for the route. Defaults to `false`. +* `loggingLevel` - (Optional) Logging level for the route. Affects the log entries pushed to Amazon CloudWatch Logs. +Valid values: `error`, `info`, `off`. Defaults to `off`. Supported only for WebSocket APIs. Terraform will only perform drift detection of its value when present in a configuration. +* `throttlingBurstLimit` - (Optional) Throttling burst limit for the route. +* `throttlingRateLimit` - (Optional) Throttling rate limit for the route. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Stage identifier. +* `arn` - ARN of the stage. +* `executionArn` - ARN prefix to be used in an [`awsLambdaPermission`](/docs/providers/aws/r/lambda_permission.html)'s `sourceArn` attribute. +For WebSocket APIs this attribute can additionally be used in an [`awsIamPolicy`](/docs/providers/aws/r/iam_policy.html) to authorize access to the [`@connections` API](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-how-to-call-websocket-api-connections.html). +See the [Amazon API Gateway Developer Guide](https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-control-access-iam.html) for details. +* `invokeUrl` - URL to invoke the API pointing to the stage, + e.g., `wss://z4675Bid1JExecuteApiEuWest2AmazonawsCom/exampleStage`, or `https://z4675Bid1JExecuteApiEuWest2AmazonawsCom/` +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2Stage` using the API identifier and stage name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2Stage` using the API identifier and stage name. For example: + +```console +% terraform import aws_apigatewayv2_stage.example aabbccddee/example-stage +``` + +-> **Note:** The API Gateway managed stage created as part of [_quick_create_](https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-basic-concept.html#apigateway-definition-quick-create) cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apigatewayv2_vpc_link.html.markdown b/website/docs/cdktf/typescript/r/apigatewayv2_vpc_link.html.markdown new file mode 100644 index 00000000000..f4a7efb627b --- /dev/null +++ b/website/docs/cdktf/typescript/r/apigatewayv2_vpc_link.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "API Gateway V2" +layout: "aws" +page_title: "AWS: aws_apigatewayv2_vpc_link" +description: |- + Manages an Amazon API Gateway Version 2 VPC Link. +--- + + + +# Resource: aws_apigatewayv2_vpc_link + +Manages an Amazon API Gateway Version 2 VPC Link. + +-> **Note:** Amazon API Gateway Version 2 VPC Links enable private integrations that connect HTTP APIs to private resources in a VPC. +To enable private integration for REST APIs, use the Amazon API Gateway Version 1 VPC Link [resource](/docs/providers/aws/r/api_gateway_vpc_link.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Apigatewayv2VpcLink } from "./.gen/providers/aws/apigatewayv2-vpc-link"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Apigatewayv2VpcLink(this, "example", { + name: "example", + securityGroupIds: [Token.asString(dataAwsSecurityGroupExample.id)], + subnetIds: Token.asList(dataAwsSubnetsExample.ids), + tags: { + Usage: "example", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the VPC Link. Must be between 1 and 128 characters in length. +* `securityGroupIds` - (Required) Security group IDs for the VPC Link. +* `subnetIds` - (Required) Subnet IDs for the VPC Link. +* `tags` - (Optional) Map of tags to assign to the VPC Link. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - VPC Link identifier. +* `arn` - VPC Link ARN. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsApigatewayv2VpcLink` using the VPC Link identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsApigatewayv2VpcLink` using the VPC Link identifier. For example: + +```console +% terraform import aws_apigatewayv2_vpc_link.example aabbccddee +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/app_cookie_stickiness_policy.html.markdown b/website/docs/cdktf/typescript/r/app_cookie_stickiness_policy.html.markdown new file mode 100644 index 00000000000..241bcc02728 --- /dev/null +++ b/website/docs/cdktf/typescript/r/app_cookie_stickiness_policy.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_app_cookie_stickiness_policy" +description: |- + Provides an application cookie stickiness policy, which allows an ELB to wed its stickiness cookie to a cookie generated by your application. +--- + + + +# Resource: aws_app_cookie_stickiness_policy + +Provides an application cookie stickiness policy, which allows an ELB to wed its sticky cookie's expiration to a cookie generated by your application. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppCookieStickinessPolicy } from "./.gen/providers/aws/app-cookie-stickiness-policy"; +import { Elb } from "./.gen/providers/aws/elb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const lb = new Elb(this, "lb", { + availabilityZones: ["us-east-1a"], + listener: [ + { + instancePort: 8000, + instanceProtocol: "http", + lbPort: 80, + lbProtocol: "http", + }, + ], + name: "test-lb", + }); + new AppCookieStickinessPolicy(this, "foo", { + cookieName: "MyAppCookie", + lbPort: 80, + loadBalancer: lb.name, + name: "foo_policy", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the stickiness policy. +* `loadBalancer` - (Required) Name of load balancer to which the policy + should be attached. +* `lbPort` - (Required) Load balancer port to which the policy + should be applied. This must be an active listener on the load +balancer. +* `cookieName` - (Required) Application cookie whose lifetime the ELB's cookie should follow. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the policy. +* `name` - Name of the stickiness policy. +* `loadBalancer` - Name of load balancer to which the policy is attached. +* `lbPort` - Load balancer port to which the policy is applied. +* `cookieName` - Application cookie whose lifetime the ELB's cookie should follow. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import application cookie stickiness policies using the ELB name, port, and policy name separated by colons (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import application cookie stickiness policies using the ELB name, port, and policy name separated by colons (`:`). For example: + +```console +% terraform import aws_app_cookie_stickiness_policy.example my-elb:80:my-policy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appautoscaling_policy.html.markdown b/website/docs/cdktf/typescript/r/appautoscaling_policy.html.markdown new file mode 100644 index 00000000000..bd225bf0753 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appautoscaling_policy.html.markdown @@ -0,0 +1,490 @@ +--- +subcategory: "Application Auto Scaling" +layout: "aws" +page_title: "AWS: aws_appautoscaling_policy" +description: |- + Provides an Application AutoScaling Policy resource. +--- + + + +# Resource: aws_appautoscaling_policy + +Provides an Application AutoScaling Policy resource. + +## Example Usage + +### DynamoDB Table Autoscaling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingPolicy } from "./.gen/providers/aws/appautoscaling-policy"; +import { AppautoscalingTarget } from "./.gen/providers/aws/appautoscaling-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const dynamodbTableReadTarget = new AppautoscalingTarget( + this, + "dynamodb_table_read_target", + { + maxCapacity: 100, + minCapacity: 5, + resourceId: "table/tableName", + scalableDimension: "dynamodb:table:ReadCapacityUnits", + serviceNamespace: "dynamodb", + } + ); + new AppautoscalingPolicy(this, "dynamodb_table_read_policy", { + name: + "DynamoDBReadCapacityUtilization:${" + + dynamodbTableReadTarget.resourceId + + "}", + policyType: "TargetTrackingScaling", + resourceId: dynamodbTableReadTarget.resourceId, + scalableDimension: dynamodbTableReadTarget.scalableDimension, + serviceNamespace: dynamodbTableReadTarget.serviceNamespace, + targetTrackingScalingPolicyConfiguration: { + predefinedMetricSpecification: { + predefinedMetricType: "DynamoDBReadCapacityUtilization", + }, + targetValue: 70, + }, + }); + } +} + +``` + +### ECS Service Autoscaling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Op, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingPolicy } from "./.gen/providers/aws/appautoscaling-policy"; +import { AppautoscalingTarget } from "./.gen/providers/aws/appautoscaling-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ecsTarget = new AppautoscalingTarget(this, "ecs_target", { + maxCapacity: 4, + minCapacity: 1, + resourceId: "service/clusterName/serviceName", + scalableDimension: "ecs:service:DesiredCount", + serviceNamespace: "ecs", + }); + new AppautoscalingPolicy(this, "ecs_policy", { + name: "scale-down", + policyType: "StepScaling", + resourceId: ecsTarget.resourceId, + scalableDimension: ecsTarget.scalableDimension, + serviceNamespace: ecsTarget.serviceNamespace, + stepScalingPolicyConfiguration: { + adjustmentType: "ChangeInCapacity", + cooldown: 60, + metricAggregationType: "Maximum", + stepAdjustment: [ + { + metricIntervalUpperBound: Token.asString(0), + scalingAdjustment: Token.asNumber(Op.negate(1)), + }, + ], + }, + }); + } +} + +``` + +### Preserve desired count when updating an autoscaled ECS Service + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsService } from "./.gen/providers/aws/ecs-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsService(this, "ecs_service", { + cluster: "clusterName", + desiredCount: 2, + lifecycle: { + ignoreChanges: [desiredCount], + }, + name: "serviceName", + taskDefinition: "taskDefinitionFamily:1", + }); + } +} + +``` + +### Aurora Read Replica Autoscaling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingPolicy } from "./.gen/providers/aws/appautoscaling-policy"; +import { AppautoscalingTarget } from "./.gen/providers/aws/appautoscaling-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const replicas = new AppautoscalingTarget(this, "replicas", { + maxCapacity: 15, + minCapacity: 1, + resourceId: "cluster:${" + example.id + "}", + scalableDimension: "rds:cluster:ReadReplicaCount", + serviceNamespace: "rds", + }); + const awsAppautoscalingPolicyReplicas = new AppautoscalingPolicy( + this, + "replicas_1", + { + name: "cpu-auto-scaling", + policyType: "TargetTrackingScaling", + resourceId: replicas.resourceId, + scalableDimension: replicas.scalableDimension, + serviceNamespace: replicas.serviceNamespace, + targetTrackingScalingPolicyConfiguration: { + predefinedMetricSpecification: { + predefinedMetricType: "RDSReaderAverageCPUUtilization", + }, + scaleInCooldown: 300, + scaleOutCooldown: 300, + targetValue: 75, + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppautoscalingPolicyReplicas.overrideLogicalId("replicas"); + } +} + +``` + +### Create target tracking scaling policy using metric math + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingPolicy } from "./.gen/providers/aws/appautoscaling-policy"; +import { AppautoscalingTarget } from "./.gen/providers/aws/appautoscaling-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ecsTarget = new AppautoscalingTarget(this, "ecs_target", { + maxCapacity: 4, + minCapacity: 1, + resourceId: "service/clusterName/serviceName", + scalableDimension: "ecs:service:DesiredCount", + serviceNamespace: "ecs", + }); + new AppautoscalingPolicy(this, "example", { + name: "foo", + policyType: "TargetTrackingScaling", + resourceId: ecsTarget.resourceId, + scalableDimension: ecsTarget.scalableDimension, + serviceNamespace: ecsTarget.serviceNamespace, + targetTrackingScalingPolicyConfiguration: { + customizedMetricSpecification: { + metrics: [ + { + id: "m1", + label: + "Get the queue size (the number of messages waiting to be processed)", + metricStat: { + metric: { + dimensions: [ + { + name: "QueueName", + value: "my-queue", + }, + ], + metricName: "ApproximateNumberOfMessagesVisible", + namespace: "AWS/SQS", + }, + stat: "Sum", + }, + returnData: false, + }, + { + id: "m2", + label: + "Get the ECS running task count (the number of currently running tasks)", + metricStat: { + metric: { + dimensions: [ + { + name: "ClusterName", + value: "default", + }, + { + name: "ServiceName", + value: "web-app", + }, + ], + metricName: "RunningTaskCount", + namespace: "ECS/ContainerInsights", + }, + stat: "Average", + }, + returnData: false, + }, + { + expression: "m1 / m2", + id: "e1", + label: "Calculate the backlog per instance", + returnData: true, + }, + ], + }, + targetValue: 100, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the policy. Must be between 1 and 255 characters in length. +* `policyType` - (Optional) Policy type. Valid values are `stepScaling` and `targetTrackingScaling`. Defaults to `stepScaling`. Certain services only support only one policy type. For more information see the [Target Tracking Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-target-tracking.html) and [Step Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-step-scaling-policies.html) documentation. +* `resourceId` - (Required) Resource type and unique identifier string for the resource associated with the scaling policy. Documentation can be found in the `resourceId` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html) +* `scalableDimension` - (Required) Scalable dimension of the scalable target. Documentation can be found in the `scalableDimension` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html) +* `serviceNamespace` - (Required) AWS service namespace of the scalable target. Documentation can be found in the `serviceNamespace` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html) +* `stepScalingPolicyConfiguration` - (Optional) Step scaling policy configuration, requires `policy_type = "StepScaling"` (default). See supported fields below. +* `targetTrackingScalingPolicyConfiguration` - (Optional) Target tracking policy, requires `policy_type = "TargetTrackingScaling"`. See supported fields below. + +### step_scaling_policy_configuration + +The `stepScalingPolicyConfiguration` configuration block supports the following arguments: + +* `adjustmentType` - (Required) Whether the adjustment is an absolute number or a percentage of the current capacity. Valid values are `changeInCapacity`, `exactCapacity`, and `percentChangeInCapacity`. +* `cooldown` - (Required) Amount of time, in seconds, after a scaling activity completes and before the next scaling activity can start. +* `metricAggregationType` - (Optional) Aggregation type for the policy's metrics. Valid values are "Minimum", "Maximum", and "Average". Without a value, AWS will treat the aggregation type as "Average". +* `minAdjustmentMagnitude` - (Optional) Minimum number to adjust your scalable dimension as a result of a scaling activity. If the adjustment type is PercentChangeInCapacity, the scaling policy changes the scalable dimension of the scalable target by this amount. +* `stepAdjustment` - (Optional) Set of adjustments that manage scaling. These have the following structure: + + ```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Op, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingPolicy } from "./.gen/providers/aws/appautoscaling-policy"; +interface MyConfig { + name: any; + resourceId: any; + scalableDimension: any; + serviceNamespace: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new AppautoscalingPolicy(this, "ecs_policy", { + stepScalingPolicyConfiguration: { + stepAdjustment: [ + { + metricIntervalLowerBound: Token.asString(1), + metricIntervalUpperBound: Token.asString(2), + scalingAdjustment: Token.asNumber(Op.negate(1)), + }, + { + metricIntervalLowerBound: Token.asString(2), + metricIntervalUpperBound: Token.asString(3), + scalingAdjustment: 1, + }, + ], + }, + name: config.name, + resourceId: config.resourceId, + scalableDimension: config.scalableDimension, + serviceNamespace: config.serviceNamespace, + }); + } +} + +``` + +* `metricIntervalLowerBound` - (Optional) Lower bound for the difference between the alarm threshold and the CloudWatch metric. Without a value, AWS will treat this bound as negative infinity. +* `metricIntervalUpperBound` - (Optional) Upper bound for the difference between the alarm threshold and the CloudWatch metric. Without a value, AWS will treat this bound as infinity. The upper bound must be greater than the lower bound. +* `scalingAdjustment` - (Required) Number of members by which to scale, when the adjustment bounds are breached. A positive value scales up. A negative value scales down. + +### target_tracking_scaling_policy_configuration + +The `targetTrackingScalingPolicyConfiguration` configuration block supports the following arguments: + +* `targetValue` - (Required) Target value for the metric. +* `disableScaleIn` - (Optional) Whether scale in by the target tracking policy is disabled. If the value is true, scale in is disabled and the target tracking policy won't remove capacity from the scalable resource. Otherwise, scale in is enabled and the target tracking policy can remove capacity from the scalable resource. The default value is `false`. +* `scaleInCooldown` - (Optional) Amount of time, in seconds, after a scale in activity completes before another scale in activity can start. +* `scaleOutCooldown` - (Optional) Amount of time, in seconds, after a scale out activity completes before another scale out activity can start. +* `customizedMetricSpecification` - (Optional) Custom CloudWatch metric. Documentation can be found at: [AWS Customized Metric Specification](https://docs.aws.amazon.com/autoscaling/ec2/APIReference/API_CustomizedMetricSpecification.html). See supported fields below. +* `predefinedMetricSpecification` - (Optional) Predefined metric. See supported fields below. + +### target_tracking_scaling_policy_configuration customized_metric_specification + +Example usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingPolicy } from "./.gen/providers/aws/appautoscaling-policy"; +interface MyConfig { + name: any; + resourceId: any; + scalableDimension: any; + serviceNamespace: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new AppautoscalingPolicy(this, "example", { + policyType: "TargetTrackingScaling", + targetTrackingScalingPolicyConfiguration: { + customizedMetricSpecification: { + dimensions: [ + { + name: "MyOptionalMetricDimensionName", + value: "MyOptionalMetricDimensionValue", + }, + ], + metricName: "MyUtilizationMetric", + namespace: "MyNamespace", + statistic: "Average", + unit: "Percent", + }, + targetValue: 40, + }, + name: config.name, + resourceId: config.resourceId, + scalableDimension: config.scalableDimension, + serviceNamespace: config.serviceNamespace, + }); + } +} + +``` + +The `targetTrackingScalingPolicyConfiguration` `customizedMetricSpecification` configuration block supports the following arguments: + +* `dimensions` - (Optional) Configuration block(s) with the dimensions of the metric if the metric was published with dimensions. Detailed below. +* `metricName` - (Optional) Name of the metric. +* `namespace` - (Optional) Namespace of the metric. +* `statistic` - (Optional) Statistic of the metric. Valid values: `average`, `minimum`, `maximum`, `sampleCount`, and `sum`. +* `unit` - (Optional) Unit of the metric. +* `metrics` - (Optional) Metrics to include, as a metric data query. + +### target_tracking_scaling_policy_configuration customized_metric_specification dimensions + +The `targetTrackingScalingPolicyConfiguration` `customizedMetricSpecification` `dimensions` configuration block supports the following arguments: + +* `name` - (Required) Name of the dimension. +* `value` - (Required) Value of the dimension. + +### target_tracking_scaling_policy_configuration customized_metric_specification metrics + +The `targetTrackingScalingPolicyConfiguration` `customizedMetricSpecification` `metrics` configuration block supports the following arguments: + +* `expression` - (Optional) Math expression used on the returned metric. You must specify either `expression` or `metricStat`, but not both. +* `id` - (Required) Short name for the metric used in target tracking scaling policy. +* `label` - (Optional) Human-readable label for this metric or expression. +* `metricStat` - (Optional) Structure that defines CloudWatch metric to be used in target tracking scaling policy. You must specify either `expression` or `metricStat`, but not both. +* `returnData` - (Optional) Boolean that indicates whether to return the timestamps and raw data values of this metric, the default is true + +### target_tracking_scaling_policy_configuration customized_metric_specification metrics metric_stat + +The `targetTrackingScalingPolicyConfiguration` `customizedMetricSpecification` `metrics` `metricStat` configuration block supports the following arguments: + +* `metric` - (Required) Structure that defines the CloudWatch metric to return, including the metric name, namespace, and dimensions. +* `stat` - (Required) Statistic of the metrics to return. +* `unit` - (Optional) Unit of the metrics to return. + +### target_tracking_scaling_policy_configuration customized_metric_specification metrics metric + +The `targetTrackingScalingPolicyConfiguration` `customizedMetricSpecification` `metrics` `metric` configuration block supports the following arguments: + +* `dimensions` - (Optional) Dimensions of the metric. +* `metricName` - (Required) Name of the metric. +* `namespace` - (Required) Namespace of the metric. + +### target_tracking_scaling_policy_configuration customized_metric_specification metrics dimensions + +The `targetTrackingScalingPolicyConfiguration` `customizedMetricSpecification` `metrics` `dimensions` configuration block supports the following arguments: + +* `name` - (Required) Name of the dimension. +* `value` - (Required) Value of the dimension. + +### target_tracking_scaling_policy_configuration predefined_metric_specification + +The `targetTrackingScalingPolicyConfiguration` `predefinedMetricSpecification` configuration block supports the following arguments: + +* `predefinedMetricType` - (Required) Metric type. +* `resourceLabel` - (Optional) Reserved for future use if the `predefinedMetricType` is not `albRequestCountPerTarget`. If the `predefinedMetricType` is `albRequestCountPerTarget`, you must specify this argument. Documentation can be found at: [AWS Predefined Scaling Metric Specification](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_PredefinedScalingMetricSpecification.html). Must be less than or equal to 1023 characters in length. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `alarmArns` - List of CloudWatch alarm ARNs associated with the scaling policy. +* `arn` - ARN assigned by AWS to the scaling policy. +* `name` - Scaling policy's name. +* `policyType` - Scaling policy's type. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Application AutoScaling Policy using the `serviceNamespace` , `resourceId`, `scalableDimension` and `policyName` separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Application AutoScaling Policy using the `serviceNamespace` , `resourceId`, `scalableDimension` and `policyName` separated by `/`. For example: + +```console +% terraform import aws_appautoscaling_policy.test-policy service-namespace/resource-id/scalable-dimension/policy-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appautoscaling_scheduled_action.html.markdown b/website/docs/cdktf/typescript/r/appautoscaling_scheduled_action.html.markdown new file mode 100644 index 00000000000..559fcf58eb2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appautoscaling_scheduled_action.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "Application Auto Scaling" +layout: "aws" +page_title: "AWS: aws_appautoscaling_scheduled_action" +description: |- + Provides an Application AutoScaling ScheduledAction resource. +--- + + + +# Resource: aws_appautoscaling_scheduled_action + +Provides an Application AutoScaling ScheduledAction resource. + +## Example Usage + +### DynamoDB Table Autoscaling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingScheduledAction } from "./.gen/providers/aws/appautoscaling-scheduled-action"; +import { AppautoscalingTarget } from "./.gen/providers/aws/appautoscaling-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const dynamodb = new AppautoscalingTarget(this, "dynamodb", { + maxCapacity: 100, + minCapacity: 5, + resourceId: "table/tableName", + scalableDimension: "dynamodb:table:ReadCapacityUnits", + serviceNamespace: "dynamodb", + }); + const awsAppautoscalingScheduledActionDynamodb = + new AppautoscalingScheduledAction(this, "dynamodb_1", { + name: "dynamodb", + resourceId: dynamodb.resourceId, + scalableDimension: dynamodb.scalableDimension, + scalableTargetAction: { + maxCapacity: Token.asString(200), + minCapacity: Token.asString(1), + }, + schedule: "at(2006-01-02T15:04:05)", + serviceNamespace: dynamodb.serviceNamespace, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppautoscalingScheduledActionDynamodb.overrideLogicalId("dynamodb"); + } +} + +``` + +### ECS Service Autoscaling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingScheduledAction } from "./.gen/providers/aws/appautoscaling-scheduled-action"; +import { AppautoscalingTarget } from "./.gen/providers/aws/appautoscaling-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ecs = new AppautoscalingTarget(this, "ecs", { + maxCapacity: 4, + minCapacity: 1, + resourceId: "service/clusterName/serviceName", + scalableDimension: "ecs:service:DesiredCount", + serviceNamespace: "ecs", + }); + const awsAppautoscalingScheduledActionEcs = + new AppautoscalingScheduledAction(this, "ecs_1", { + name: "ecs", + resourceId: ecs.resourceId, + scalableDimension: ecs.scalableDimension, + scalableTargetAction: { + maxCapacity: Token.asString(10), + minCapacity: Token.asString(1), + }, + schedule: "at(2006-01-02T15:04:05)", + serviceNamespace: ecs.serviceNamespace, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppautoscalingScheduledActionEcs.overrideLogicalId("ecs"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the scheduled action. +* `serviceNamespace` - (Required) Namespace of the AWS service. Documentation can be found in the `serviceNamespace` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_PutScheduledAction.html) Example: ecs +* `resourceId` - (Required) Identifier of the resource associated with the scheduled action. Documentation can be found in the `resourceId` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_PutScheduledAction.html) +* `scalableDimension` - (Required) Scalable dimension. Documentation can be found in the `scalableDimension` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_PutScheduledAction.html) Example: ecs:service:DesiredCount +* `scalableTargetAction` - (Required) New minimum and maximum capacity. You can set both values or just one. See [below](#scalable-target-action-arguments) +* `schedule` - (Required) Schedule for this action. The following formats are supported: At expressions - at(yyyy-mm-ddThh:mm:ss), Rate expressions - rate(valueunit), Cron expressions - cron(fields). Times for at expressions and cron expressions are evaluated using the time zone configured in `timezone`. Documentation can be found in the `timezone` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_PutScheduledAction.html) +* `startTime` - (Optional) Date and time for the scheduled action to start in RFC 3339 format. The timezone is not affected by the setting of `timezone`. +* `endTime` - (Optional) Date and time for the scheduled action to end in RFC 3339 format. The timezone is not affected by the setting of `timezone`. +* `timezone` - (Optional) Time zone used when setting a scheduled action by using an at or cron expression. Does not affect timezone for `startTime` and `endTime`. Valid values are the [canonical names of the IANA time zones supported by Joda-Time](https://www.joda.org/joda-time/timezones.html), such as `etc/gmt+9` or `pacific/tahiti`. Default is `utc`. + +### Scalable Target Action Arguments + +* `maxCapacity` - (Optional) Maximum capacity. At least one of `maxCapacity` or `minCapacity` must be set. +* `minCapacity` - (Optional) Minimum capacity. At least one of `minCapacity` or `maxCapacity` must be set. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the scheduled action. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appautoscaling_target.html.markdown b/website/docs/cdktf/typescript/r/appautoscaling_target.html.markdown new file mode 100644 index 00000000000..51daa2654c5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appautoscaling_target.html.markdown @@ -0,0 +1,198 @@ +--- +subcategory: "Application Auto Scaling" +layout: "aws" +page_title: "AWS: aws_appautoscaling_target" +description: |- + Provides an Application AutoScaling ScalableTarget resource. +--- + + + +# Resource: aws_appautoscaling_target + +Provides an Application AutoScaling ScalableTarget resource. To manage policies which get attached to the target, see the [`awsAppautoscalingPolicy` resource](/docs/providers/aws/r/appautoscaling_policy.html). + +~> **NOTE:** Scalable targets created before 2023-03-20 may not have an assigned `arn`. These resource cannot use `tags` or participate in `defaultTags`. To prevent `terraform plan` showing differences that can never be reconciled, use the [`lifecycleIgnoreChanges`](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#ignore_changes) meta-argument. See the example below. + +~> **NOTE:** The [Application Auto Scaling service automatically attempts to manage IAM Service-Linked Roles](https://docs.aws.amazon.com/autoscaling/application/userguide/security_iam_service-with-iam.html#security_iam_service-with-iam-roles) when registering certain service namespaces for the first time. To manually manage this role, see the [`awsIamServiceLinkedRole` resource](/docs/providers/aws/r/iam_service_linked_role.html). + +## Example Usage + +### DynamoDB Table Autoscaling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingTarget } from "./.gen/providers/aws/appautoscaling-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppautoscalingTarget(this, "dynamodb_table_read_target", { + maxCapacity: 100, + minCapacity: 5, + resourceId: "table/${" + example.name + "}", + scalableDimension: "dynamodb:table:ReadCapacityUnits", + serviceNamespace: "dynamodb", + }); + } +} + +``` + +### DynamoDB Index Autoscaling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingTarget } from "./.gen/providers/aws/appautoscaling-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppautoscalingTarget(this, "dynamodb_index_read_target", { + maxCapacity: 100, + minCapacity: 5, + resourceId: + "table/${" + example.name + "}/index/${" + indexName.value + "}", + scalableDimension: "dynamodb:index:ReadCapacityUnits", + serviceNamespace: "dynamodb", + }); + } +} + +``` + +### ECS Service Autoscaling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingTarget } from "./.gen/providers/aws/appautoscaling-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppautoscalingTarget(this, "ecs_target", { + maxCapacity: 4, + minCapacity: 1, + resourceId: + "service/${" + example.name + "}/${" + awsEcsServiceExample.name + "}", + scalableDimension: "ecs:service:DesiredCount", + serviceNamespace: "ecs", + }); + } +} + +``` + +### Aurora Read Replica Autoscaling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingTarget } from "./.gen/providers/aws/appautoscaling-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppautoscalingTarget(this, "replicas", { + maxCapacity: 15, + minCapacity: 1, + resourceId: "cluster:${" + example.id + "}", + scalableDimension: "rds:cluster:ReadReplicaCount", + serviceNamespace: "rds", + }); + } +} + +``` + +### Suppressing `tagsAll` Differences For Older Resources + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppautoscalingTarget } from "./.gen/providers/aws/appautoscaling-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppautoscalingTarget(this, "ecs_target", { + lifecycle: { + ignoreChanges: [tagsAll], + }, + maxCapacity: 4, + minCapacity: 1, + resourceId: + "service/${" + example.name + "}/${" + awsEcsServiceExample.name + "}", + scalableDimension: "ecs:service:DesiredCount", + serviceNamespace: "ecs", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `maxCapacity` - (Required) Max capacity of the scalable target. +* `minCapacity` - (Required) Min capacity of the scalable target. +* `resourceId` - (Required) Resource type and unique identifier string for the resource associated with the scaling policy. Documentation can be found in the `resourceId` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters) +* `roleArn` - (Optional) ARN of the IAM role that allows Application AutoScaling to modify your scalable target on your behalf. This defaults to an IAM Service-Linked Role for most services and custom IAM Roles are ignored by the API for those namespaces. See the [AWS Application Auto Scaling documentation](https://docs.aws.amazon.com/autoscaling/application/userguide/security_iam_service-with-iam.html#security_iam_service-with-iam-roles) for more information about how this service interacts with IAM. +* `scalableDimension` - (Required) Scalable dimension of the scalable target. Documentation can be found in the `scalableDimension` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters) +* `serviceNamespace` - (Required) AWS service namespace of the scalable target. Documentation can be found in the `serviceNamespace` parameter at: [AWS Application Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/application/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters) +* `tags` - (Optional) Map of tags to assign to the scalable target. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the scalable target. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Application AutoScaling Target using the `serviceNamespace` , `resourceId` and `scalableDimension` separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Application AutoScaling Target using the `serviceNamespace` , `resourceId` and `scalableDimension` separated by `/`. For example: + +```console +% terraform import aws_appautoscaling_target.test-target service-namespace/resource-id/scalable-dimension +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appconfig_application.html.markdown b/website/docs/cdktf/typescript/r/appconfig_application.html.markdown new file mode 100644 index 00000000000..f324f56baa7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appconfig_application.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_application" +description: |- + Provides an AppConfig Application resource. +--- + + + +# Resource: aws_appconfig_application + +Provides an AppConfig Application resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppconfigApplication } from "./.gen/providers/aws/appconfig-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppconfigApplication(this, "example", { + description: "Example AppConfig Application", + name: "example-application-tf", + tags: { + Type: "AppConfig Application", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name for the application. Must be between 1 and 64 characters in length. +* `description` - (Optional) Description of the application. Can be at most 1024 characters. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig Application. +* `id` - AppConfig application ID. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Applications using their application ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppConfig Applications using their application ID. For example: + +```console +% terraform import aws_appconfig_application.example 71rxuzt +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appconfig_configuration_profile.html.markdown b/website/docs/cdktf/typescript/r/appconfig_configuration_profile.html.markdown new file mode 100644 index 00000000000..5559bcf1991 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appconfig_configuration_profile.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_configuration_profile" +description: |- + Provides an AppConfig Configuration Profile resource. +--- + + + +# Resource: aws_appconfig_configuration_profile + +Provides an AppConfig Configuration Profile resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppconfigConfigurationProfile } from "./.gen/providers/aws/appconfig-configuration-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppconfigConfigurationProfile(this, "example", { + applicationId: Token.asString(awsAppconfigApplicationExample.id), + description: "Example Configuration Profile", + locationUri: "hosted", + name: "example-configuration-profile-tf", + tags: { + Type: "AppConfig Configuration Profile", + }, + validator: [ + { + content: Token.asString(awsLambdaFunctionExample.arn), + type: "LAMBDA", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required, Forces new resource) Application ID. Must be between 4 and 7 characters in length. +* `locationUri` - (Required, Forces new resource) URI to locate the configuration. You can specify the AWS AppConfig hosted configuration store, Systems Manager (SSM) document, an SSM Parameter Store parameter, or an Amazon S3 object. For the hosted configuration store, specify `hosted`. For an SSM document, specify either the document name in the format `ssmDocument://` or the ARN. For a parameter, specify either the parameter name in the format `ssmParameter://` or the ARN. For an Amazon S3 object, specify the URI in the following format: `s3:///`. +* `name` - (Required) Name for the configuration profile. Must be between 1 and 64 characters in length. +* `description` - (Optional) Description of the configuration profile. Can be at most 1024 characters. +* `retrievalRoleArn` - (Optional) ARN of an IAM role with permission to access the configuration at the specified `locationUri`. A retrieval role ARN is not required for configurations stored in the AWS AppConfig `hosted` configuration store. It is required for all other sources that store your configuration. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) Type of configurations contained in the profile. Valid values: `awsAppConfigFeatureFlags` and `awsFreeform`. Default: `awsFreeform`. +* `validator` - (Optional) Set of methods for validating the configuration. Maximum of 2. See [Validator](#validator) below for more details. + +### Validator + +The `validator` block supports the following: + +* `content` - (Optional, Required when `type` is `lambda`) Either the JSON Schema content or the ARN of an AWS Lambda function. +* `type` - (Optional) Type of validator. Valid values: `jsonSchema` and `lambda`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig Configuration Profile. +* `configurationProfileId` - The configuration profile ID. +* `id` - AppConfig configuration profile ID and application ID separated by a colon (`:`). +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Configuration Profiles using the configuration profile ID and application ID separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppConfig Configuration Profiles using the configuration profile ID and application ID separated by a colon (`:`). For example: + +```console +% terraform import aws_appconfig_configuration_profile.example 71abcde:11xxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appconfig_deployment.html.markdown b/website/docs/cdktf/typescript/r/appconfig_deployment.html.markdown new file mode 100644 index 00000000000..425ec7591a6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appconfig_deployment.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_deployment" +description: |- + Provides an AppConfig Deployment resource. +--- + + + +# Resource: aws_appconfig_deployment + +Provides an AppConfig Deployment resource for an [`awsAppconfigApplication` resource](appconfig_application.html.markdown). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppconfigDeployment } from "./.gen/providers/aws/appconfig-deployment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppconfigDeployment(this, "example", { + applicationId: Token.asString(awsAppconfigApplicationExample.id), + configurationProfileId: Token.asString( + awsAppconfigConfigurationProfileExample.configurationProfileId + ), + configurationVersion: Token.asString( + awsAppconfigHostedConfigurationVersionExample.versionNumber + ), + deploymentStrategyId: Token.asString( + awsAppconfigDeploymentStrategyExample.id + ), + description: "My example deployment", + environmentId: Token.asString( + awsAppconfigEnvironmentExample.environmentId + ), + tags: { + Type: "AppConfig Deployment", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required, Forces new resource) Application ID. Must be between 4 and 7 characters in length. +* `configurationProfileId` - (Required, Forces new resource) Configuration profile ID. Must be between 4 and 7 characters in length. +* `configurationVersion` - (Required, Forces new resource) Configuration version to deploy. Can be at most 1024 characters. +* `deploymentStrategyId` - (Required, Forces new resource) Deployment strategy ID or name of a predefined deployment strategy. See [Predefined Deployment Strategies](https://docs.aws.amazon.com/appconfig/latest/userguide/appconfig-creating-deployment-strategy.html#appconfig-creating-deployment-strategy-predefined) for more details. +* `description` - (Optional, Forces new resource) Description of the deployment. Can be at most 1024 characters. +* `environmentId` - (Required, Forces new resource) Environment ID. Must be between 4 and 7 characters in length. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AppConfig application ID, environment ID, and deployment number separated by a slash (`/`). +* `arn` - ARN of the AppConfig Deployment. +* `deploymentNumber` - Deployment number. +* `state` - State of the deployment. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Deployments using the application ID, environment ID, and deployment number separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppConfig Deployments using the application ID, environment ID, and deployment number separated by a slash (`/`). For example: + +```console +% terraform import aws_appconfig_deployment.example 71abcde/11xxxxx/1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appconfig_deployment_strategy.html.markdown b/website/docs/cdktf/typescript/r/appconfig_deployment_strategy.html.markdown new file mode 100644 index 00000000000..e3e59f6289c --- /dev/null +++ b/website/docs/cdktf/typescript/r/appconfig_deployment_strategy.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_deployment_strategy" +description: |- + Provides an AppConfig Deployment Strategy resource. +--- + + + +# Resource: aws_appconfig_deployment_strategy + +Provides an AppConfig Deployment Strategy resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppconfigDeploymentStrategy } from "./.gen/providers/aws/appconfig-deployment-strategy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppconfigDeploymentStrategy(this, "example", { + deploymentDurationInMinutes: 3, + description: "Example Deployment Strategy", + finalBakeTimeInMinutes: 4, + growthFactor: 10, + growthType: "LINEAR", + name: "example-deployment-strategy-tf", + replicateTo: "NONE", + tags: { + Type: "AppConfig Deployment Strategy", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deploymentDurationInMinutes` - (Required) Total amount of time for a deployment to last. Minimum value of 0, maximum value of 1440. +* `growthFactor` - (Required) Percentage of targets to receive a deployed configuration during each interval. Minimum value of 1.0, maximum value of 100.0. +* `name` - (Required, Forces new resource) Name for the deployment strategy. Must be between 1 and 64 characters in length. +* `replicateTo` - (Required, Forces new resource) Where to save the deployment strategy. Valid values: `none` and `ssmDocument`. +* `description` - (Optional) Description of the deployment strategy. Can be at most 1024 characters. +* `finalBakeTimeInMinutes` - (Optional) Amount of time AWS AppConfig monitors for alarms before considering the deployment to be complete and no longer eligible for automatic roll back. Minimum value of 0, maximum value of 1440. +* `growthType` - (Optional) Algorithm used to define how percentage grows over time. Valid value: `linear` and `exponential`. Defaults to `linear`. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AppConfig deployment strategy ID. +* `arn` - ARN of the AppConfig Deployment Strategy. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Deployment Strategies using their deployment strategy ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppConfig Deployment Strategies using their deployment strategy ID. For example: + +```console +% terraform import aws_appconfig_deployment_strategy.example 11xxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appconfig_environment.html.markdown b/website/docs/cdktf/typescript/r/appconfig_environment.html.markdown new file mode 100644 index 00000000000..b90cb9ef3c7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appconfig_environment.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_environment" +description: |- + Provides an AppConfig Environment resource. +--- + + + +# Resource: aws_appconfig_environment + +Provides an AppConfig Environment resource for an [`awsAppconfigApplication` resource](appconfig_application.html.markdown). One or more environments can be defined for an application. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppconfigApplication } from "./.gen/providers/aws/appconfig-application"; +import { AppconfigEnvironment } from "./.gen/providers/aws/appconfig-environment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AppconfigApplication(this, "example", { + description: "Example AppConfig Application", + name: "example-application-tf", + tags: { + Type: "AppConfig Application", + }, + }); + const awsAppconfigEnvironmentExample = new AppconfigEnvironment( + this, + "example_1", + { + applicationId: example.id, + description: "Example AppConfig Environment", + monitor: [ + { + alarmArn: Token.asString(awsCloudwatchMetricAlarmExample.arn), + alarmRoleArn: Token.asString(awsIamRoleExample.arn), + }, + ], + name: "example-environment-tf", + tags: { + Type: "AppConfig Environment", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppconfigEnvironmentExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required, Forces new resource) AppConfig application ID. Must be between 4 and 7 characters in length. +* `name` - (Required) Name for the environment. Must be between 1 and 64 characters in length. +* `description` - (Optional) Description of the environment. Can be at most 1024 characters. +* `monitor` - (Optional) Set of Amazon CloudWatch alarms to monitor during the deployment process. Maximum of 5. See [Monitor](#monitor) below for more details. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Monitor + +The `monitor` block supports the following: + +* `alarmArn` - (Required) ARN of the Amazon CloudWatch alarm. +* `alarmRoleArn` - (Optional) ARN of an IAM role for AWS AppConfig to monitor `alarmArn`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig Environment. +* `id` - (**Deprecated**) AppConfig environment ID and application ID separated by a colon (`:`). +* `environmentId` - AppConfig environment ID. +* `state` - State of the environment. Possible values are `readyForDeployment`, `deploying`, `rollingBack` + or `rolledBack`. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Environments using the environment ID and application ID separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppConfig Environments using the environment ID and application ID separated by a colon (`:`). For example: + +```console +% terraform import aws_appconfig_environment.example 71abcde:11xxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appconfig_extension.html.markdown b/website/docs/cdktf/typescript/r/appconfig_extension.html.markdown new file mode 100644 index 00000000000..99f4a977c66 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appconfig_extension.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_extension" +description: |- + Provides an AppConfig Extension resource. +--- + + + +# Resource: aws_appconfig_extension + +Provides an AppConfig Extension resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppconfigExtension } from "./.gen/providers/aws/appconfig-extension"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new SnsTopic(this, "test", { + name: "test", + }); + const dataAwsIamPolicyDocumentTest = new DataAwsIamPolicyDocument( + this, + "test_1", + { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["appconfig.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentTest.overrideLogicalId("test"); + const awsIamRoleTest = new IamRole(this, "test_2", { + assumeRolePolicy: Token.asString(dataAwsIamPolicyDocumentTest.json), + name: "test", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleTest.overrideLogicalId("test"); + const awsAppconfigExtensionTest = new AppconfigExtension(this, "test_3", { + actionPoint: [ + { + action: [ + { + name: "test", + roleArn: Token.asString(awsIamRoleTest.arn), + uri: test.arn, + }, + ], + point: "ON_DEPLOYMENT_COMPLETE", + }, + ], + description: "test description", + name: "test", + tags: { + Type: "AppConfig Extension", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppconfigExtensionTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name for the extension. Each extension name in your account must be unique. Extension versions use the same name. +* `description` - (Optional) Information about the extension. +* `actionPoint` - (Required) The action points defined in the extension. [Detailed below](#action_point). +* `parameter` - (Optional) The parameters accepted by the extension. You specify parameter values when you associate the extension to an AppConfig resource by using the CreateExtensionAssociation API action. For Lambda extension actions, these parameters are included in the Lambda request object. [Detailed below](#parameter). +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `actionPoint` + +Defines the actions the extension performs during the AppConfig workflow and at which point those actions are performed. The `actionPoint` configuration block supports the following arguments: + +* `point` - (Required) The point at which to perform the defined actions. Valid points are `preCreateHostedConfigurationVersion`, `preStartDeployment`, `onDeploymentStart`, `onDeploymentStep`, `onDeploymentBaking`, `onDeploymentComplete`, `onDeploymentRolledBack`. +* `action` - (Required) An action defines the tasks the extension performs during the AppConfig workflow. [Detailed below](#action). + +#### `action` + +The `action` configuration block supports configuring any number of the following arguments: + +* `name` - (Required) The action name. +* `roleArn` - (Required) An Amazon Resource Name (ARN) for an Identity and Access Management assume role. +* `uri` - (Required) The extension URI associated to the action point in the extension definition. The URI can be an Amazon Resource Name (ARN) for one of the following: an Lambda function, an Amazon Simple Queue Service queue, an Amazon Simple Notification Service topic, or the Amazon EventBridge default event bus. +* `description` - (Optional) Information about the action. + +#### `parameter` + +The `parameter` configuration block supports configuring any number of the following arguments: + +* `name` - (Required) The parameter name. +* `required` - (Required) Determines if a parameter value must be specified in the extension association. +* `description` - (Optional) Information about the parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig Extension. +* `id` - AppConfig Extension ID. +* `version` - The version number for the extension. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Extensions using their extension ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppConfig Extensions using their extension ID. For example: + +```console +% terraform import aws_appconfig_extension.example 71rxuzt +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appconfig_extension_association.html.markdown b/website/docs/cdktf/typescript/r/appconfig_extension_association.html.markdown new file mode 100644 index 00000000000..673425bf550 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appconfig_extension_association.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_extension_association" +description: |- + Associates an AppConfig Extension with a Resource. +--- + + + +# Resource: aws_appconfig_extension_association + +Associates an AppConfig Extension with a Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppconfigApplication } from "./.gen/providers/aws/appconfig-application"; +import { AppconfigExtension } from "./.gen/providers/aws/appconfig-extension"; +import { AppconfigExtensionAssociation } from "./.gen/providers/aws/appconfig-extension-association"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new AppconfigApplication(this, "test", { + name: "test", + }); + const awsSnsTopicTest = new SnsTopic(this, "test_1", { + name: "test", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicTest.overrideLogicalId("test"); + const dataAwsIamPolicyDocumentTest = new DataAwsIamPolicyDocument( + this, + "test_2", + { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["appconfig.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentTest.overrideLogicalId("test"); + const awsIamRoleTest = new IamRole(this, "test_3", { + assumeRolePolicy: Token.asString(dataAwsIamPolicyDocumentTest.json), + name: "test", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleTest.overrideLogicalId("test"); + const awsAppconfigExtensionTest = new AppconfigExtension(this, "test_4", { + actionPoint: [ + { + action: [ + { + name: "test", + roleArn: Token.asString(awsIamRoleTest.arn), + uri: Token.asString(awsSnsTopicTest.arn), + }, + ], + point: "ON_DEPLOYMENT_COMPLETE", + }, + ], + description: "test description", + name: "test", + tags: { + Type: "AppConfig Extension", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppconfigExtensionTest.overrideLogicalId("test"); + const awsAppconfigExtensionAssociationTest = + new AppconfigExtensionAssociation(this, "test_5", { + extensionArn: Token.asString(awsAppconfigExtensionTest.arn), + resourceArn: test.arn, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppconfigExtensionAssociationTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `extensionArn` - (Required) The ARN of the extension defined in the association. +* `resourceArn` - (Optional) The ARN of the application, configuration profile, or environment to associate with the extension. +* `parameters` - (Optional) The parameter names and values defined for the association. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig Extension Association. +* `id` - AppConfig Extension Association ID. +* `extensionVersion` - The version number for the extension defined in the association. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Extension Associations using their extension association ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppConfig Extension Associations using their extension association ID. For example: + +```console +% terraform import aws_appconfig_extension_association.example 71rxuzt +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appconfig_hosted_configuration_version.html.markdown b/website/docs/cdktf/typescript/r/appconfig_hosted_configuration_version.html.markdown new file mode 100644 index 00000000000..caff8580520 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appconfig_hosted_configuration_version.html.markdown @@ -0,0 +1,158 @@ +--- +subcategory: "AppConfig" +layout: "aws" +page_title: "AWS: aws_appconfig_hosted_configuration_version" +description: |- + Provides an AppConfig Hosted Configuration Version resource. +--- + + + +# Resource: aws_appconfig_hosted_configuration_version + +Provides an AppConfig Hosted Configuration Version resource. + +## Example Usage + +### Freeform + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppconfigHostedConfigurationVersion } from "./.gen/providers/aws/appconfig-hosted-configuration-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppconfigHostedConfigurationVersion(this, "example", { + applicationId: Token.asString(awsAppconfigApplicationExample.id), + configurationProfileId: Token.asString( + awsAppconfigConfigurationProfileExample.configurationProfileId + ), + content: Token.asString( + Fn.jsonencode({ + foo: "bar", + fruit: ["apple", "pear", "orange"], + isThingEnabled: true, + }) + ), + contentType: "application/json", + description: "Example Freeform Hosted Configuration Version", + }); + } +} + +``` + +### Feature Flags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppconfigHostedConfigurationVersion } from "./.gen/providers/aws/appconfig-hosted-configuration-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppconfigHostedConfigurationVersion(this, "example", { + applicationId: Token.asString(awsAppconfigApplicationExample.id), + configurationProfileId: Token.asString( + awsAppconfigConfigurationProfileExample.configurationProfileId + ), + content: Token.asString( + Fn.jsonencode({ + flags: { + bar: { + attributes: { + someAttribute: { + constraints: { + required: true, + type: "string", + }, + }, + someOtherAttribute: { + constraints: { + required: true, + type: "number", + }, + }, + }, + name: "bar", + }, + foo: { + _deprecation: { + status: "planned", + }, + name: "foo", + }, + }, + values: { + bar: { + enabled: "true", + someAttribute: "Hello World", + someOtherAttribute: 123, + }, + foo: { + enabled: "true", + }, + }, + version: "1", + }) + ), + contentType: "application/json", + description: "Example Feature Flag Configuration Version", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required, Forces new resource) Application ID. +* `configurationProfileId` - (Required, Forces new resource) Configuration profile ID. +* `content` - (Required, Forces new resource) Content of the configuration or the configuration data. +* `contentType` - (Required, Forces new resource) Standard MIME type describing the format of the configuration content. For more information, see [Content-Type](https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.17). +* `description` - (Optional, Forces new resource) Description of the configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the AppConfig hosted configuration version. +* `id` - AppConfig application ID, configuration profile ID, and version number separated by a slash (`/`). +* `versionNumber` - Version number of the hosted configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppConfig Hosted Configuration Versions using the application ID, configuration profile ID, and version number separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppConfig Hosted Configuration Versions using the application ID, configuration profile ID, and version number separated by a slash (`/`). For example: + +```console +% terraform import aws_appconfig_hosted_configuration_version.example 71abcde/11xxxxx/2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appflow_connector_profile.html.markdown b/website/docs/cdktf/typescript/r/appflow_connector_profile.html.markdown new file mode 100644 index 00000000000..04c4511db00 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appflow_connector_profile.html.markdown @@ -0,0 +1,380 @@ +--- +subcategory: "AppFlow" +layout: "aws" +page_title: "AWS: aws_appflow_connector_profile" +description: |- + Provides an AppFlow Connector Profile resource. +--- + + + +# Resource: aws_appflow_connector_profile + +Provides an AppFlow connector profile resource. + +For information about AppFlow flows, see the [Amazon AppFlow API Reference][1]. +For specific information about creating an AppFlow connector profile, see the +[CreateConnectorProfile][2] page in the Amazon AppFlow API Reference. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppflowConnectorProfile } from "./.gen/providers/aws/appflow-connector-profile"; +import { DataAwsIamPolicy } from "./.gen/providers/aws/data-aws-iam-policy"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { RedshiftCluster } from "./.gen/providers/aws/redshift-cluster"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new IamRole(this, "example", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "sts:AssumeRole", + Effect: "Allow", + Principal: { + Service: "ec2.amazonaws.com", + }, + Sid: "", + }, + ], + Version: "2012-10-17", + }) + ), + managedPolicyArns: [Token.asString(test.arn)], + name: "example_role", + }); + const awsRedshiftClusterExample = new RedshiftCluster(this, "example_1", { + clusterIdentifier: "example_cluster", + clusterType: "single-node", + databaseName: "example_db", + masterPassword: "examplePassword123!", + masterUsername: "exampleuser", + nodeType: "dc1.large", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRedshiftClusterExample.overrideLogicalId("example"); + const awsS3BucketExample = new S3Bucket(this, "example_2", { + bucket: "example_bucket", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketExample.overrideLogicalId("example"); + const dataAwsIamPolicyExample = new DataAwsIamPolicy(this, "example_3", { + name: "AmazonRedshiftAllCommandsFullAccess", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyExample.overrideLogicalId("example"); + const awsAppflowConnectorProfileExample = new AppflowConnectorProfile( + this, + "example_4", + { + connectionMode: "Public", + connectorProfileConfig: { + connectorProfileCredentials: { + redshift: { + password: Token.asString( + awsRedshiftClusterExample.masterPassword + ), + username: Token.asString( + awsRedshiftClusterExample.masterUsername + ), + }, + }, + connectorProfileProperties: { + redshift: { + bucketName: Token.asString(awsS3BucketExample.name), + databaseUrl: + "jdbc:redshift://${" + + awsRedshiftClusterExample.endpoint + + "}/${" + + awsRedshiftClusterExample.databaseName + + "}", + roleArn: example.arn, + }, + }, + }, + connectorType: "Redshift", + name: "example_profile", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppflowConnectorProfileExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The AppFlow connector profile argument layout is a complex structure. The following top-level arguments are supports: + +* `name ` (Required) - Name of the connector profile. The name is unique for each `connectorProfile` in your AWS account. +* `connectionMode` (Required) - Indicates the connection mode and specifies whether it is public or private. Private flows use AWS PrivateLink to route data over AWS infrastructure without exposing it to the public internet. One of: `public`, `private`. +* `connectorLabel` (Optional) - The label of the connector. The label is unique for each ConnectorRegistration in your AWS account. Only needed if calling for `customConnector` connector type. +* `connectorProfileConfig` (Required) - Defines the connector-specific configuration and credentials. See [Connector Profile Config](#connector-profile-config) for more details. +* `connectorType` (Required) - The type of connector. One of: `amplitude`, `customConnector`, `customerProfiles`, `datadog`, `dynatrace`, `eventBridge`, `googleanalytics`, `honeycode`, `infornexus`, `lookoutMetrics`, `marketo`, `redshift`, `s3`, `salesforce`, `sapoData`, `servicenow`, `singular`, `slack`, `snowflake`, `trendmicro`, `upsolver`, `veeva`, `zendesk`. +* `kmsArn` (Optional) - ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key. + +### Connector Profile Config + +* `connectorProfileCredentials` (Required) - The connector-specific credentials required by each connector. See [Connector Profile Credentials](#connector-profile-credentials) for more details. +* `connectorProfileProperties` (Required) - The connector-specific properties of the profile configuration. See [Connector Profile Properties](#connector-profile-properties) for more details. + +### Connector Profile Credentials + +* `amplitude` (Optional) - The connector-specific credentials required when using Amplitude. See [Amplitude Connector Profile Credentials](#amplitude-connector-profile-credentials) for more details. +* `customConnector` (Optional) - The connector-specific profile credentials required when using the custom connector. See [Custom Connector Profile Credentials](#custom-connector-profile-credentials) for more details. +* `datadog` (Optional) - Connector-specific credentials required when using Datadog. See [Datadog Connector Profile Credentials](#datadog-connector-profile-credentials) for more details. +* `dynatrace` (Optional) - The connector-specific credentials required when using Dynatrace. See [Dynatrace Connector Profile Credentials](#dynatrace-connector-profile-credentials) for more details. +* `googleAnalytics` (Optional) - The connector-specific credentials required when using Google Analytics. See [Google Analytics Connector Profile Credentials](#google-analytics-connector-profile-credentials) for more details. +* `honeycode` (Optional) - The connector-specific credentials required when using Amazon Honeycode. See [Honeycode Connector Profile Credentials](#honeycode-connector-profile-credentials) for more details. +* `inforNexus` (Optional) - The connector-specific credentials required when using Infor Nexus. See [Infor Nexus Connector Profile Credentials](#infor-nexus-connector-profile-credentials) for more details. +* `marketo` (Optional) - Connector-specific credentials required when using Marketo. See [Marketo Connector Profile Credentials](#marketo-connector-profile-credentials) for more details. +* `redshift` (Optional) - Connector-specific credentials required when using Amazon Redshift. See [Redshift Connector Profile Credentials](#redshift-connector-profile-credentials) for more details. +* `salesforce` (Optional) - The connector-specific credentials required when using Salesforce. See [Salesforce Connector Profile Credentials](#salesforce-connector-profile-credentials) for more details. +* `sapoData` (Optional) - The connector-specific credentials required when using SAPOData. See [SAPOData Connector Profile Credentials](#sapodata-connector-profile-credentials) for more details. +* `serviceNow` (Optional) - The connector-specific credentials required when using ServiceNow. See [ServiceNow Connector Profile Credentials](#servicenow-connector-profile-credentials) for more details. +* `singular` (Optional) - Connector-specific credentials required when using Singular. See [Singular Connector Profile Credentials](#singular-connector-profile-credentials) for more details. +* `slack` (Optional) - Connector-specific credentials required when using Slack. See [Slack Connector Profile Credentials](#amplitude-connector-profile-credentials) for more details. +* `snowflake` (Optional) - The connector-specific credentials required when using Snowflake. See [Snowflake Connector Profile Credentials](#snowflake-connector-profile-credentials) for more details. +* `trendmicro` (Optional) - The connector-specific credentials required when using Trend Micro. See [Trend Micro Connector Profile Credentials](#trendmicro-connector-profile-credentials) for more details. +* `veeva` (Optional) - Connector-specific credentials required when using Veeva. See [Veeva Connector Profile Credentials](#veeva-connector-profile-credentials) for more details. +* `zendesk` (Optional) - Connector-specific credentials required when using Zendesk. See [Zendesk Connector Profile Credentials](#zendesk-connector-profile-credentials) for more details. + +#### Amplitude Connector Profile Credentials + +* `apiKey` (Required) - Unique alphanumeric identifier used to authenticate a user, developer, or calling program to your API. +* `secretKey` (Required) - The Secret Access Key portion of the credentials. + +#### Custom Connector Profile Credentials + +* `apiKey` (Optional) - API keys required for the authentication of the user. + * `apiKey` (Required) - The API key required for API key authentication. + * `apiSecretKey` (Optional) - The API secret key required for API key authentication. +* `authenticationType` (Required) - The authentication type that the custom connector uses for authenticating while creating a connector profile. One of: `apikey`, `basic`, `custom`, `oauth2`. +* `basic` (Optional) - Basic credentials that are required for the authentication of the user. + * `password` (Required) - The password to use to connect to a resource. + * `username` (Required) - The username to use to connect to a resource. +* `custom` (Optional) - If the connector uses the custom authentication mechanism, this holds the required credentials. + * `credentialsMap` (Optional) - A map that holds custom authentication credentials. + * `customAuthenticationType` (Required) - The custom authentication type that the connector uses. +* `oauth2` (Optional) - OAuth 2.0 credentials required for the authentication of the user. + * `accessToken` (Optional) - The access token used to access the connector on your behalf. + * `clientId` (Optional) - The identifier for the desired client. + * `clientSecret` (Optional) - The client secret used by the OAuth client to authenticate to the authorization server. + * `oauthRequest` (Optional) - Used by select connectors for which the OAuth workflow is supported. See [OAuth Request](#oauth-request) for more details. + * `refreshToken` (Optional) - The refresh token used to refresh an expired access token. + +#### Datadog Connector Profile Credentials + +* `apiKey` (Required) - Unique alphanumeric identifier used to authenticate a user, developer, or calling program to your API. +* `applicationKey` (Required) - Application keys, in conjunction with your API key, give you full access to Datadog’s programmatic API. Application keys are associated with the user account that created them. The application key is used to log all requests made to the API. + +#### Dynatrace Connector Profile Credentials + +* `apiToken` (Required) - The API tokens used by Dynatrace API to authenticate various API calls. + +#### Google Analytics Connector Profile Credentials + +* `accessToken` (Optional) - The credentials used to access protected Google Analytics resources. +* `clientId` (Required) - The identifier for the desired client. +* `clientSecret` (Required) - The client secret used by the OAuth client to authenticate to the authorization server. +* `oauthRequest` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. +* `refreshToken` (Optional) - The credentials used to acquire new access tokens. This is required only for OAuth2 access tokens, and is not required for OAuth1 access tokens. + +#### Honeycode Connector Profile Credentials + +* `accessToken` (Optional) - The credentials used to access protected Amazon Honeycode resources. +* `oauthRequest` (Optional) - Used by select connectors for which the OAuth workflow is supported, such as Salesforce, Google Analytics, Marketo, Zendesk, and Slack. See [OAuth Request](#oauth-request) for more details. +* `refreshToken` (Optional) - The credentials used to acquire new access tokens. + +#### Infor Nexus Connector Profile Credentials + +* `accessKeyId` (Required) - The Access Key portion of the credentials. +* `datakey` (Required) - Encryption keys used to encrypt data. +* `secretAccessKey` (Required) - The secret key used to sign requests. +* `userId` (Required) - Identifier for the user. + +#### Marketo Connector Profile Credentials + +* `accessToken` (Optional) - The credentials used to access protected Marketo resources. +* `clientId` (Required) - The identifier for the desired client. +* `clientSecret` (Required) - The client secret used by the OAuth client to authenticate to the authorization server. +* `oauthRequest` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. + +#### Redshift Connector Profile Credentials + +* `password` (Required) - Password that corresponds to the user name. +* `username` (Required) - Name of the user. + +#### Salesforce Connector Profile Credentials + +* `accessToken` (Optional) - The credentials used to access protected Salesforce resources. +* `clientCredentialsArn` (Optional) - The secret manager ARN, which contains the client ID and client secret of the connected app. +* `oauthRequest` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. +* `refreshToken` (Optional) - The credentials used to acquire new access tokens. + +#### SAPOData Connector Profile Credentials + +* `basicAuthCredentials` (Optional) - The SAPOData basic authentication credentials. + * `password` (Required) - The password to use to connect to a resource. + * `username` (Required) - The username to use to connect to a resource. +* `oauthCredentials` (Optional) - The SAPOData OAuth type authentication credentials. + * `accessToken` (Optional) - The access token used to access protected SAPOData resources. + * `clientId` (Required) - The identifier for the desired client. + * `clientSecret` (Required) - The client secret used by the OAuth client to authenticate to the authorization server. + * `oauthRequest` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. + * `refreshToken` (Optional) - The refresh token used to refresh expired access token. + +#### ServiceNow Connector Profile Credentials + +* `password` (Required) - Password that corresponds to the user name. +* `username` (Required) - Name of the user. + +#### Singular Connector Profile Credentials + +* `apiKey` (Required) - Unique alphanumeric identifier used to authenticate a user, developer, or calling program to your API. + +#### Slack Connector Profile Credentials + +* `accessToken` (Optional) - The credentials used to access protected Slack resources. +* `clientId` (Required) - The identifier for the client. +* `clientSecret` (Required) - The client secret used by the OAuth client to authenticate to the authorization server. +* `oauthRequest` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. + +#### Snowflake Connector Profile Credentials + +* `password` (Required) - Password that corresponds to the user name. +* `username` (Required) - Name of the user. + +#### Trendmicro Connector Profile Credentials + +* `apiSecretKey` (Required) - The Secret Access Key portion of the credentials. + +#### Veeva Connector Profile Credentials + +* `password` (Required) - Password that corresponds to the user name. +* `username` (Required) - Name of the user. + +#### Zendesk Connector Profile Credentials + +* `accessToken` (Optional) - The credentials used to access protected Zendesk resources. +* `clientId` (Required) - The identifier for the desired client. +* `clientSecret` (Required) - The client secret used by the OAuth client to authenticate to the authorization server. +* `oauthRequest` (Optional) - The OAuth requirement needed to request security tokens from the connector endpoint. See [OAuth Request](#oauth-request) for more details. + +##### OAuth Request + +* `authCode` (Optional) - The code provided by the connector when it has been authenticated via the connected app. +* `redirectUri` (Optional) - The URL to which the authentication server redirects the browser after authorization has been granted. + +### Connector Profile Properties + +* `customConnector` (Optional) - The connector-specific profile properties required when using the custom connector. See [Custom Connector Profile Properties](#custom-connector-profile-properties) for more details. +* `datadog` (Optional) - Connector-specific properties required when using Datadog. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `dynatrace` (Optional) - The connector-specific properties required when using Dynatrace. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `inforNexus` (Optional) - The connector-specific properties required when using Infor Nexus. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `marketo` (Optional) - Connector-specific properties required when using Marketo. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `redshift` (Optional) - Connector-specific properties required when using Amazon Redshift. See [Redshift Connector Profile Properties](#redshift-connector-profile-properties) for more details. +* `salesforce` (Optional) - The connector-specific properties required when using Salesforce. See [Salesforce Connector Profile Properties](#salesforce-connector-profile-properties) for more details. +* `sapoData` (Optional) - The connector-specific properties required when using SAPOData. See [SAPOData Connector Profile Properties](#sapodata-connector-profile-properties) for more details. +* `serviceNow` (Optional) - The connector-specific properties required when using ServiceNow. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `slack` (Optional) - Connector-specific properties required when using Slack. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `snowflake` (Optional) - The connector-specific properties required when using Snowflake. See [Snowflake Connector Profile Properties](#snowflake-connector-profile-properties) for more details. +* `veeva` (Optional) - Connector-specific properties required when using Veeva. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. +* `zendesk` (Optional) - Connector-specific properties required when using Zendesk. See [Generic Connector Profile Properties](#generic-connector-profile-properties) for more details. + +#### Custom Connector Profile Properties + +* `oauth2Properties` (Optional) - The OAuth 2.0 properties required for OAuth 2.0 authentication. + * `oauth2GrantType` (Required) - The OAuth 2.0 grant type used by connector for OAuth 2.0 authentication. One of: `authorizationCode`, `clientCredentials`. + * `tokenUrl` (Required) - The token URL required for OAuth 2.0 authentication. + * `tokenUrlCustomProperties` (Optional) - Associates your token URL with a map of properties that you define. Use this parameter to provide any additional details that the connector requires to authenticate your request. +* `profileProperties` (Optional) - A map of properties that are required to create a profile for the custom connector. + +#### Generic Connector Profile Properties + +Datadog, Dynatrace, Infor Nexus, Marketo, ServiceNow, Slack, Veeva, and Zendesk all support the following attributes: + +* `instanceUrl` (Required) - The location of the Datadog resource. + +#### Redshift Connector Profile Properties + +* `bucketName` (Required) - A name for the associated Amazon S3 bucket. +* `bucketPrefix` (Optional) - The object key for the destination bucket in which Amazon AppFlow places the files. +* `clusterIdentifier` (Optional) - The unique ID that's assigned to an Amazon Redshift cluster. +* `databaseName` (Optional) - The name of an Amazon Redshift database. +* `databaseUrl` (Required) - The JDBC URL of the Amazon Redshift cluster. +* `dataApiRoleArn` (Optional) - ARN of the IAM role that permits AppFlow to access the database through Data API. +* `roleArn` (Required) - ARN of the IAM role. + +#### Salesforce Connector Profile Properties + +* `instanceUrl` (Optional) - The location of the Salesforce resource. +* `isSandboxEnvironment` (Optional) - Indicates whether the connector profile applies to a sandbox or production environment. + +#### SAPOData Connector Profile Properties + +* `applicationHostUrl` (Required) - The location of the SAPOData resource. +* `applicationServicePath` (Required) - The application path to catalog service. +* `clientNumber` (Required) - The client number for the client creating the connection. +* `logonLanguage` (Optional) - The logon language of SAPOData instance. +* `oauthProperties` (Optional) - The SAPOData OAuth properties required for OAuth type authentication. + * `authCodeUrl` (Required) - The authorization code url required to redirect to SAP Login Page to fetch authorization code for OAuth type authentication. + * `oauthScopes` (Required) - The OAuth scopes required for OAuth type authentication. + * `tokenUrl` (Required) - The token url required to fetch access/refresh tokens using authorization code and also to refresh expired access token using refresh token. +* `portNumber` (Required) - The port number of the SAPOData instance. +* `privateLinkServiceName` (Optional) - The SAPOData Private Link service name to be used for private data transfers. + +#### Snowflake Connector Profile Properties + +* `accountName` (Optional) - The name of the account. +* `bucketName` (Required) - The name of the Amazon S3 bucket associated with Snowflake. +* `bucketPrefix` (Optional) - The bucket path that refers to the Amazon S3 bucket associated with Snowflake. +* `privateLinkServiceName` (Optional) - The Snowflake Private Link service name to be used for private data transfers. +* `region` (Optional) - AWS Region of the Snowflake account. +* `stage` (Required) - Name of the Amazon S3 stage that was created while setting up an Amazon S3 stage in the Snowflake account. This is written in the following format: `..`. +* `warehouse` (Required) - The name of the Snowflake warehouse. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the connector profile. +* `credentialsArn` - ARN of the connector profile credentials. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppFlow Connector Profile using the connector profile `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppFlow Connector Profile using the connector profile `arn`. For example: + +```console +% terraform import aws_appflow_connector_profile.profile arn:aws:appflow:us-west-2:123456789012:connectorprofile/example-profile +``` + +[1]: https://docs.aws.amazon.com/appflow/1.0/APIReference/Welcome.html +[2]: https://docs.aws.amazon.com/appflow/1.0/APIReference/API_CreateConnectorProfile.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appflow_flow.html.markdown b/website/docs/cdktf/typescript/r/appflow_flow.html.markdown new file mode 100644 index 00000000000..430257d48af --- /dev/null +++ b/website/docs/cdktf/typescript/r/appflow_flow.html.markdown @@ -0,0 +1,489 @@ +--- +subcategory: "AppFlow" +layout: "aws" +page_title: "AWS: aws_appflow_flow" +description: |- + Provides an AppFlow Flow resource. +--- + + + +# Resource: aws_appflow_flow + +Provides an AppFlow flow resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppflowFlow } from "./.gen/providers/aws/appflow-flow"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const exampleDestination = new S3Bucket(this, "example_destination", { + bucket: "example-destination", + }); + const exampleSource = new S3Bucket(this, "example_source", { + bucket: "example-source", + }); + new S3Object(this, "example", { + bucket: exampleSource.id, + key: "example_source.csv", + source: "example_source.csv", + }); + const dataAwsIamPolicyDocumentExampleDestination = + new DataAwsIamPolicyDocument(this, "example_destination_3", { + statement: [ + { + actions: [ + "s3:PutObject", + "s3:AbortMultipartUpload", + "s3:ListMultipartUploadParts", + "s3:ListBucketMultipartUploads", + "s3:GetBucketAcl", + "s3:PutObjectAcl", + ], + effect: "Allow", + principals: [ + { + identifiers: ["appflow.amazonaws.com"], + type: "Service", + }, + ], + resources: [ + "arn:aws:s3:::example_destination", + "arn:aws:s3:::example_destination/*", + ], + sid: "AllowAppFlowDestinationActions", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExampleDestination.overrideLogicalId( + "example_destination" + ); + const dataAwsIamPolicyDocumentExampleSource = new DataAwsIamPolicyDocument( + this, + "example_source_4", + { + statement: [ + { + actions: ["s3:ListBucket", "s3:GetObject"], + effect: "Allow", + principals: [ + { + identifiers: ["appflow.amazonaws.com"], + type: "Service", + }, + ], + resources: [ + "arn:aws:s3:::example_source", + "arn:aws:s3:::example_source/*", + ], + sid: "AllowAppFlowSourceActions", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExampleSource.overrideLogicalId("example_source"); + const awsS3BucketPolicyExampleDestination = new S3BucketPolicy( + this, + "example_destination_5", + { + bucket: exampleDestination.id, + policy: Token.asString(dataAwsIamPolicyDocumentExampleDestination.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPolicyExampleDestination.overrideLogicalId( + "example_destination" + ); + const awsS3BucketPolicyExampleSource = new S3BucketPolicy( + this, + "example_source_6", + { + bucket: exampleSource.id, + policy: Token.asString(dataAwsIamPolicyDocumentExampleSource.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPolicyExampleSource.overrideLogicalId("example_source"); + const awsAppflowFlowExample = new AppflowFlow(this, "example_7", { + destinationFlowConfig: [ + { + connectorType: "S3", + destinationConnectorProperties: { + s3: { + bucketName: Token.asString( + awsS3BucketPolicyExampleDestination.bucket + ), + s3OutputFormatConfig: { + prefixConfig: { + prefixType: "PATH", + }, + }, + }, + }, + }, + ], + name: "example", + sourceFlowConfig: { + connectorType: "S3", + sourceConnectorProperties: { + s3: { + bucketName: Token.asString(awsS3BucketPolicyExampleSource.bucket), + bucketPrefix: "example", + }, + }, + }, + task: [ + { + connectorOperator: [ + { + s3: "NO_OP", + }, + ], + destinationField: "exampleField", + sourceFields: ["exampleField"], + taskType: "Map", + }, + ], + triggerConfig: { + triggerType: "OnDemand", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppflowFlowExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the flow. +* `destinationFlowConfig` - (Required) A [Destination Flow Config](#destination-flow-config) that controls how Amazon AppFlow places data in the destination connector. +* `sourceFlowConfig` - (Required) The [Source Flow Config](#source-flow-config) that controls how Amazon AppFlow retrieves data from the source connector. +* `task` - (Required) A [Task](#task) that Amazon AppFlow performs while transferring the data in the flow run. +* `triggerConfig` - (Required) A [Trigger](#trigger-config) that determine how and when the flow runs. +* `description` - (Optional) Description of the flow you want to create. +* `kmsArn` - (Optional) ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### Destination Flow Config + +* `connectorType` - (Required) Type of connector, such as Salesforce, Amplitude, and so on. Valid values are `salesforce`, `singular`, `slack`, `redshift`, `s3`, `marketo`, `googleanalytics`, `zendesk`, `servicenow`, `datadog`, `trendmicro`, `snowflake`, `dynatrace`, `infornexus`, `amplitude`, `veeva`, `eventBridge`, `lookoutMetrics`, `upsolver`, `honeycode`, `customerProfiles`, `sapoData`, and `customConnector`. +* `destinationConnectorProperties` - (Required) This stores the information that is required to query a particular connector. See [Destination Connector Properties](#destination-connector-properties) for more information. +* `apiVersion` - (Optional) API version that the destination connector uses. +* `connectorProfileName` - (Optional) Name of the connector profile. This name must be unique for each connector profile in the AWS account. + +#### Destination Connector Properties + +* `customConnector` - (Optional) Properties that are required to query the custom Connector. See [Custom Connector Destination Properties](#custom-connector-destination-properties) for more details. +* `customerProfiles` - (Optional) Properties that are required to query Amazon Connect Customer Profiles. See [Customer Profiles Destination Properties](#customer-profiles-destination-properties) for more details. +* `eventBridge` - (Optional) Properties that are required to query Amazon EventBridge. See [Generic Destination Properties](#generic-destination-properties) for more details. +* `honeycode` - (Optional) Properties that are required to query Amazon Honeycode. See [Generic Destination Properties](#generic-destination-properties) for more details. +* `marketo` - (Optional) Properties that are required to query Marketo. See [Generic Destination Properties](#generic-destination-properties) for more details. +* `redshift` - (Optional) Properties that are required to query Amazon Redshift. See [Redshift Destination Properties](#redshift-destination-properties) for more details. +* `s3` - (Optional) Properties that are required to query Amazon S3. See [S3 Destination Properties](#s3-destination-properties) for more details. +* `salesforce` - (Optional) Properties that are required to query Salesforce. See [Salesforce Destination Properties](#salesforce-destination-properties) for more details. +* `sapoData` - (Optional) Properties that are required to query SAPOData. See [SAPOData Destination Properties](#sapodata-destination-properties) for more details. +* `snowflake` - (Optional) Properties that are required to query Snowflake. See [Snowflake Destination Properties](#snowflake-destination-properties) for more details. +* `upsolver` - (Optional) Properties that are required to query Upsolver. See [Upsolver Destination Properties](#upsolver-destination-properties) for more details. +* `zendesk` - (Optional) Properties that are required to query Zendesk. See [Zendesk Destination Properties](#zendesk-destination-properties) for more details. + +##### Generic Destination Properties + +EventBridge, Honeycode, and Marketo destination properties all support the following attributes: + +* `object` - (Required) Object specified in the flow destination. +* `errorHandlingConfig` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. + +##### Custom Connector Destination Properties + +* `entityName` - (Required) Entity specified in the custom connector as a destination in the flow. +* `customProperties` - (Optional) Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items. +* `errorHandlingConfig` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See [Error Handling Config](#error-handling-config) for more details. +* `idFieldNames` - (Optional) Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert. +* `writeOperationType` - (Optional) Type of write operation to be performed in the custom connector when it's used as destination. Valid values are `insert`, `upsert`, `update`, and `delete`. + +##### Customer Profiles Destination Properties + +* `domainName` - (Required) Unique name of the Amazon Connect Customer Profiles domain. +* `objectTypeName` - (Optional) Object specified in the Amazon Connect Customer Profiles flow destination. + +##### Redshift Destination Properties + +* `intermediateBucketName` - (Required) Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift. +* `object` - (Required) Object specified in the Amazon Redshift flow destination. +* `bucketPrefix` - (Optional) Object key for the bucket in which Amazon AppFlow places the destination files. +* `errorHandlingConfig` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. + +##### S3 Destination Properties + +* `bucketName` - (Required) Amazon S3 bucket name in which Amazon AppFlow places the transferred data. +* `bucketPrefix` - (Optional) Object key for the bucket in which Amazon AppFlow places the destination files. +* `s3OutputFormatConfig` - (Optional) Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See [S3 Output Format Config](#s3-output-format-config) for more details. + +###### S3 Output Format Config + +* `aggregationConfig` - (Optional) Aggregation settings that you can use to customize the output format of your flow data. See [Aggregation Config](#aggregation-config) for more details. +* `fileType` - (Optional) File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are `csv`, `json`, and `parquet`. +* `prefixConfig` - (Optional) Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See [Prefix Config](#prefix-config) for more details. +* `preserveSourceDataTyping` - (Optional, Boolean) Whether the data types from the source system need to be preserved (Only valid for `parquet` file type) + +##### Salesforce Destination Properties + +* `object` - (Required) Object specified in the flow destination. +* `errorHandlingConfig` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. +* `idFieldNames` - (Optional) Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update or delete. +* `writeOperationType` - (Optional) This specifies the type of write operation to be performed in Salesforce. When the value is `upsert`, then `idFieldNames` is required. Valid values are `insert`, `upsert`, `update`, and `delete`. + +##### SAPOData Destination Properties + +* `objectPath` - (Required) Object path specified in the SAPOData flow destination. +* `errorHandlingConfig` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. +* `idFieldNames` - (Optional) Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update or delete. +* `successResponseHandlingConfig` - (Optional) Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See [Success Response Handling Config](#success-response-handling-config) for more details. +* `writeOperation` - (Optional) Possible write operations in the destination connector. When this value is not provided, this defaults to the `insert` operation. Valid values are `insert`, `upsert`, `update`, and `delete`. + +###### Success Response Handling Config + +* `bucketName` - (Optional) Name of the Amazon S3 bucket. +* `bucketPrefix` - (Optional) Amazon S3 bucket prefix. + +##### Snowflake Destination Properties + +* `intermediateBucketName` - (Required) Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Snowflake. +* `object` - (Required) Object specified in the Amazon Snowflake flow destination. +* `bucketPrefix` - (Optional) Object key for the bucket in which Amazon AppFlow places the destination files. +* `errorHandlingConfig` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. + +##### Upsolver Destination Properties + +* `bucketName` - (Required) Upsolver Amazon S3 bucket name in which Amazon AppFlow places the transferred data. This must begin with `upsolverAppflow`. +* `bucketPrefix` - (Optional) Object key for the Upsolver Amazon S3 Bucket in which Amazon AppFlow places the destination files. +* `s3OutputFormatConfig` - (Optional) Configuration that determines how Amazon AppFlow should format the flow output data when Upsolver is used as the destination. See [Upsolver S3 Output Format Config](#upsolver-s3-output-format-config) for more details. + +###### Upsolver S3 Output Format Config + +* `aggregationConfig` - (Optional) Aggregation settings that you can use to customize the output format of your flow data. See [Aggregation Config](#aggregation-config) for more details. +* `fileType` - (Optional) File type that Amazon AppFlow places in the Upsolver Amazon S3 bucket. Valid values are `csv`, `json`, and `parquet`. +* `prefixConfig` - (Optional) Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See [Prefix Config](#prefix-config) for more details. + +###### Aggregation Config + +* `aggregationType` - (Optional) Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are `none` and `singleFile`. + +###### Prefix Config + +* `prefixFormat` - (Optional) Determines the level of granularity that's included in the prefix. Valid values are `year`, `month`, `day`, `hour`, and `minute`. +* `prefixType` - (Optional) Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are `filename`, `path`, and `pathAndFilename`. + +##### Zendesk Destination Properties + +* `object` - (Required) Object specified in the flow destination. +* `errorHandlingConfig` - (Optional) Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See [Error Handling Config](#error-handling-config) for more details. +* `idFieldNames` - (Optional) Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update or delete. +* `writeOperationType` - (Optional) This specifies the type of write operation to be performed in Zendesk. When the value is `upsert`, then `idFieldNames` is required. Valid values are `insert`, `upsert`, `update`, and `delete`. + +###### Error Handling Config + +* `bucketName` - (Optional) Name of the Amazon S3 bucket. +* `bucketPrefix` - (Optional) Amazon S3 bucket prefix. +* `failOnFirstDestinationError` - (Optional, boolean) If the flow should fail after the first instance of a failure when attempting to place data in the destination. + +### Source Flow Config + +* `connectorType` - (Required) Type of connector, such as Salesforce, Amplitude, and so on. Valid values are `salesforce`, `singular`, `slack`, `redshift`, `s3`, `marketo`, `googleanalytics`, `zendesk`, `servicenow`, `datadog`, `trendmicro`, `snowflake`, `dynatrace`, `infornexus`, `amplitude`, `veeva`, `eventBridge`, `lookoutMetrics`, `upsolver`, `honeycode`, `customerProfiles`, `sapoData`, and `customConnector`. +* `sourceConnectorProperties` - (Required) Information that is required to query a particular source connector. See [Source Connector Properties](#source-connector-properties) for details. +* `apiVersion` - (Optional) API version that the destination connector uses. +* `connectorProfileName` - (Optional) Name of the connector profile. This name must be unique for each connector profile in the AWS account. +* `incrementalPullConfig` - (Optional) Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See [Incremental Pull Config](#incremental-pull-config) for more details. + +#### Source Connector Properties + +* `amplitude` - (Optional) Information that is required for querying Amplitude. See [Generic Source Properties](#generic-source-properties) for more details. +* `customConnector` - (Optional) Properties that are applied when the custom connector is being used as a source. See [Custom Connector Source Properties](#custom-connector-source-properties). +* `datadog` - (Optional) Information that is required for querying Datadog. See [Generic Source Properties](#generic-source-properties) for more details. +* `dynratrace` - (Optional) Information that is required for querying Dynatrace. See [Generic Source Properties](#generic-source-properties) for more details. +* `inforNexus` - (Optional) Information that is required for querying Infor Nexus. See [Generic Source Properties](#generic-source-properties) for more details. +* `marketo` - (Optional) Information that is required for querying Marketo. See [Generic Source Properties](#generic-source-properties) for more details. +* `s3` - (Optional) Information that is required for querying Amazon S3. See [S3 Source Properties](#s3-source-properties) for more details. +* `salesforce` - (Optional) Information that is required for querying Salesforce. See [Salesforce Source Properties](#s3-source-properties) for more details. +* `sapoData` - (Optional) Information that is required for querying SAPOData as a flow source. See [SAPO Source Properties](#sapodata-source-properties) for more details. +* `serviceNow` - (Optional) Information that is required for querying ServiceNow. See [Generic Source Properties](#generic-source-properties) for more details. +* `singular` - (Optional) Information that is required for querying Singular. See [Generic Source Properties](#generic-source-properties) for more details. +* `slack` - (Optional) Information that is required for querying Slack. See [Generic Source Properties](#generic-source-properties) for more details. +* `trendMicro` - (Optional) Information that is required for querying Trend Micro. See [Generic Source Properties](#generic-source-properties) for more details. +* `veeva` - (Optional) Information that is required for querying Veeva. See [Veeva Source Properties](#veeva-source-properties) for more details. +* `zendesk` - (Optional) Information that is required for querying Zendesk. See [Generic Source Properties](#generic-source-properties) for more details. + +##### Generic Source Properties + +Amplitude, Datadog, Dynatrace, Google Analytics, Infor Nexus, Marketo, ServiceNow, Singular, Slack, Trend Micro, and Zendesk source properties all support the following attributes: + +* `object` - (Required) Object specified in the flow source. + +##### Custom Connector Source Properties + +* `entityName` - (Required) Entity specified in the custom connector as a source in the flow. +* `customProperties` - (Optional) Custom properties that are specific to the connector when it's used as a source in the flow. Maximum of 50 items. + +##### S3 Source Properties + +* `bucketName` - (Required) Amazon S3 bucket name where the source files are stored. +* `bucketPrefix` - (Optional) Object key for the Amazon S3 bucket in which the source files are stored. +* `s3InputFormatConfig` - (Optional) When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See [S3 Input Format Config](#s3-input-format-config) for details. + +###### S3 Input Format Config + +* `s3InputFileType` - (Optional) File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are `csv` and `json`. + +##### Salesforce Source Properties + +* `object` - (Required) Object specified in the Salesforce flow source. +* `enableDynamicFieldUpdate` - (Optional, boolean) Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow. +* `includeDeletedRecords` - (Optional, boolean) Whether Amazon AppFlow includes deleted files in the flow run. + +##### SAPOData Source Properties + +* `objectPath` - (Required) Object path specified in the SAPOData flow source. + +##### Veeva Source Properties + +* `object` - (Required) Object specified in the Veeva flow source. +* `documentType` - (Optional) Document type specified in the Veeva document extract flow. +* `includeAllVersions` - (Optional, boolean) Boolean value to include All Versions of files in Veeva document extract flow. +* `includeRenditions` - (Optional, boolean) Boolean value to include file renditions in Veeva document extract flow. +* `includeSourceFiles` - (Optional, boolean) Boolean value to include source files in Veeva document extract flow. + +#### Incremental Pull Config + +* `datetimeTypeFieldName` - (Optional) Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source. + +### Task + +* `sourceFields` - (Required) Source fields to which a particular task is applied. +* `taskType` - (Required) Particular task implementation that Amazon AppFlow performs. Valid values are `arithmetic`, `filter`, `map`, `mapAll`, `mask`, `merge`, `passthrough`, `truncate`, and `validate`. +* `connectorOperator` - (Optional) Operation to be performed on the provided source fields. See [Connector Operator](#connector-operator) for details. +* `destinationField` - (Optional) Field in a destination connector, or a field value against which Amazon AppFlow validates a source field. +* `taskProperties` - (Optional) Map used to store task-related information. The execution service looks for particular information based on the `taskType`. Valid keys are `value`, `values`, `dataType`, `upperBound`, `lowerBound`, `sourceDataType`, `destinationDataType`, `validationAction`, `maskValue`, `maskLength`, `truncateLength`, `mathOperationFieldsOrder`, `concatFormat`, `subfieldCategoryMap`, and `excludeSourceFieldsList`. + +#### Connector Operator + +* `amplitude` - (Optional) Operation to be performed on the provided Amplitude source fields. The only valid value is `between`. +* `customConnector` - (Optional) Operators supported by the custom connector. Valid values are `projection`, `lessThan`, `greaterThan`, `contains`, `between`, `lessThanOrEqualTo`, `greaterThanOrEqualTo`, `equalTo`, `notEqualTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `datadog` - (Optional) Operation to be performed on the provided Datadog source fields. Valid values are `projection`, `between`, `equalTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `dynatrace` - (Optional) Operation to be performed on the provided Dynatrace source fields. Valid values are `projection`, `between`, `equalTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `googleAnalytics` - (Optional) Operation to be performed on the provided Google Analytics source fields. Valid values are `projection` and `between`. +* `inforNexus` - (Optional) Operation to be performed on the provided Infor Nexus source fields. Valid values are `projection`, `between`, `equalTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `marketo` - (Optional) Operation to be performed on the provided Marketo source fields. Valid values are `projection`, `between`, `equalTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `s3` - (Optional) Operation to be performed on the provided Amazon S3 source fields. Valid values are `projection`, `lessThan`, `greaterThan`, `between`, `lessThanOrEqualTo`, `greaterThanOrEqualTo`, `equalTo`, `notEqualTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `salesforce` - (Optional) Operation to be performed on the provided Salesforce source fields. Valid values are `projection`, `lessThan`, `greaterThan`, `contains`, `between`, `lessThanOrEqualTo`, `greaterThanOrEqualTo`, `equalTo`, `notEqualTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `sapoData` - (Optional) Operation to be performed on the provided SAPOData source fields. Valid values are `projection`, `lessThan`, `greaterThan`, `contains`, `between`, `lessThanOrEqualTo`, `greaterThanOrEqualTo`, `equalTo`, `notEqualTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `serviceNow` - (Optional) Operation to be performed on the provided ServiceNow source fields. Valid values are `projection`, `lessThan`, `greaterThan`, `contains`, `between`, `lessThanOrEqualTo`, `greaterThanOrEqualTo`, `equalTo`, `notEqualTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `singular` - (Optional) Operation to be performed on the provided Singular source fields. Valid values are `projection`, `equalTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `slack` - (Optional) Operation to be performed on the provided Slack source fields. Valid values are `projection`, `lessThan`, `greaterThan`, `between`, `lessThanOrEqualTo`, `greaterThanOrEqualTo`, `equalTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `trendmicro` - (Optional) Operation to be performed on the provided Trend Micro source fields. Valid values are `projection`, `equalTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `veeva` - (Optional) Operation to be performed on the provided Veeva source fields. Valid values are `projection`, `lessThan`, `greaterThan`, `contains`, `between`, `lessThanOrEqualTo`, `greaterThanOrEqualTo`, `equalTo`, `notEqualTo`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. +* `zendesk` - (Optional) Operation to be performed on the provided Zendesk source fields. Valid values are `projection`, `greaterThan`, `addition`, `multiplication`, `division`, `subtraction`, `maskAll`, `maskFirstN`, `maskLastN`, `validateNonNull`, `validateNonZero`, `validateNonNegative`, `validateNumeric`, and `noOp`. + +### Trigger Config + +* `triggerType` - (Required) Type of flow trigger. Valid values are `scheduled`, `event`, and `onDemand`. +* `triggerProperties` - (Optional) Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the `scheduled` trigger type. See [Scheduled Trigger Properties](#scheduled-trigger-properties) for details. + +#### Scheduled Trigger Properties + +The `triggerProperties` block only supports one attribute: `scheduled`, a block which in turn supports the following: + +* `scheduleExpression` - (Required) Scheduling expression that determines the rate at which the schedule will run, for example `rate(5Minutes)`. +* `dataPullMode` - (Optional) Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are `incremental` and `complete`. +* `firstExecutionFrom` - (Optional) Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp. +* `scheduleEndTime` - (Optional) Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp. +* `scheduleOffset` - (Optional) Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000. +* `scheduleStartTime` - (Optional) Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp. +* `timezone` - (Optional) Time zone used when referring to the date and time of a scheduled-triggered flow, such as `america/newYork`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppflowFlow } from "./.gen/providers/aws/appflow-flow"; +interface MyConfig { + triggerType: any; + destinationFlowConfig: any; + name: any; + sourceFlowConfig: any; + task: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new AppflowFlow(this, "example", { + triggerConfig: { + scheduled: [ + { + schedule_expression: "rate(1minutes)", + }, + ], + triggerType: config.triggerType, + }, + destinationFlowConfig: config.destinationFlowConfig, + name: config.name, + sourceFlowConfig: config.sourceFlowConfig, + task: config.task, + }); + } +} + +``` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Flow's ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppFlow flows using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppFlow flows using the `arn`. For example: + +```console +% terraform import aws_appflow_flow.example arn:aws:appflow:us-west-2:123456789012:flow/example-flow +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appintegrations_data_integration.html.markdown b/website/docs/cdktf/typescript/r/appintegrations_data_integration.html.markdown new file mode 100644 index 00000000000..d5d2a1cfb87 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appintegrations_data_integration.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "AppIntegrations" +layout: "aws" +page_title: "AWS: aws_appintegrations_data_integration" +description: |- + Provides details about a specific Amazon AppIntegrations Data Integration +--- + + + +# Resource: aws_appintegrations_data_integration + +Provides an Amazon AppIntegrations Data Integration resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppintegrationsDataIntegration } from "./.gen/providers/aws/appintegrations-data-integration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppintegrationsDataIntegration(this, "example", { + description: "example", + kmsKey: test.arn, + name: "example", + scheduleConfig: { + firstExecutionFrom: "1439788442681", + object: "Account", + scheduleExpression: "rate(1 hour)", + }, + sourceUri: "Salesforce://AppFlow/example", + tags: { + Key1: "Value1", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Specifies the description of the Data Integration. +* `kmsKey` - (Required) Specifies the KMS key Amazon Resource Name (ARN) for the Data Integration. +* `name` - (Required) Specifies the name of the Data Integration. +* `scheduleConfig` - (Required) A block that defines the name of the data and how often it should be pulled from the source. The Schedule Config block is documented below. +* `sourceUri` - (Required) Specifies the URI of the data source. Create an [AppFlow Connector Profile](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/appflow_connector_profile) and reference the name of the profile in the URL. An example of this value for Salesforce is `salesforce://appFlow/example` where `example` is the name of the AppFlow Connector Profile. +* `tags` - (Optional) Tags to apply to the Data Integration. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `scheduleConfig` block supports the following arguments: + +* `firstExecutionFrom` - (Required) The start date for objects to import in the first flow run as an Unix/epoch timestamp in milliseconds or in ISO-8601 format. This needs to be a time in the past, meaning that the data created or updated before this given date will not be downloaded. +* `object` - (Required) The name of the object to pull from the data source. Examples of objects in Salesforce include `case`, `account`, or `lead`. +* `scheduleExpression` - (Required) How often the data should be pulled from data source. Examples include `rate(1 hour)`, `rate(3 hours)`, `rate(1 day)`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Data Integration. +* `id` - The identifier of the Data Integration. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon AppIntegrations Data Integrations using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon AppIntegrations Data Integrations using the `id`. For example: + +```console +% terraform import aws_appintegrations_data_integration.example 12345678-1234-1234-1234-123456789123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appintegrations_event_integration.html.markdown b/website/docs/cdktf/typescript/r/appintegrations_event_integration.html.markdown new file mode 100644 index 00000000000..1c7a9fbe4ab --- /dev/null +++ b/website/docs/cdktf/typescript/r/appintegrations_event_integration.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "AppIntegrations" +layout: "aws" +page_title: "AWS: aws_appintegrations_event_integration" +description: |- + Provides details about a specific Amazon AppIntegrations Event Integration +--- + + + +# Resource: aws_appintegrations_event_integration + +Provides an Amazon AppIntegrations Event Integration resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppintegrationsEventIntegration } from "./.gen/providers/aws/appintegrations-event-integration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppintegrationsEventIntegration(this, "example", { + description: "Example Description", + eventFilter: { + source: "aws.partner/examplepartner.com", + }, + eventbridgeBus: "default", + name: "example-name", + tags: { + Name: "Example Event Integration", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the Event Integration. +* `eventbridgeBus` - (Required) EventBridge bus. +* `eventFilter` - (Required) Block that defines the configuration information for the event filter. The Event Filter block is documented below. +* `name` - (Required) Name of the Event Integration. +* `tags` - (Optional) Tags to apply to the Event Integration. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `eventFilter` block supports the following arguments: + +* `source` - (Required) Source of the events. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Event Integration. +* `id` - Identifier of the Event Integration which is the name of the Event Integration. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon AppIntegrations Event Integrations using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon AppIntegrations Event Integrations using the `name`. For example: + +```console +% terraform import aws_appintegrations_event_integration.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/applicationinsights_application.html.markdown b/website/docs/cdktf/typescript/r/applicationinsights_application.html.markdown new file mode 100644 index 00000000000..62e6aae7971 --- /dev/null +++ b/website/docs/cdktf/typescript/r/applicationinsights_application.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "CloudWatch Application Insights" +layout: "aws" +page_title: "AWS: aws_applicationinsights_application" +description: |- + Provides a CloudWatch Application Insights Application resource +--- + + + +# Resource: aws_applicationinsights_application + +Provides a ApplicationInsights Application resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApplicationinsightsApplication } from "./.gen/providers/aws/applicationinsights-application"; +import { ResourcegroupsGroup } from "./.gen/providers/aws/resourcegroups-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ResourcegroupsGroup(this, "example", { + name: "example", + resourceQuery: { + query: Token.asString( + Fn.jsonencode({ + ResourceTypeFilters: ["AWS::EC2::Instance"], + TagFilters: [ + { + Key: "Stage", + Values: ["Test"], + }, + ], + }) + ), + }, + }); + const awsApplicationinsightsApplicationExample = + new ApplicationinsightsApplication(this, "example_1", { + resourceGroupName: example.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApplicationinsightsApplicationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `resourceGroupName` - (Required) Name of the resource group. + +The following arguments are optional: + +* `autoConfigEnabled` - (Optional) Indicates whether Application Insights automatically configures unmonitored resources in the resource group. +* `autoCreate` - (Optional) Configures all of the resources in the resource group by applying the recommended configurations. +* `cweMonitorEnabled` - (Optional) Indicates whether Application Insights can listen to CloudWatch events for the application resources, such as instance terminated, failed deployment, and others. +* `groupingType` - (Optional) Application Insights can create applications based on a resource group or on an account. To create an account-based application using all of the resources in the account, set this parameter to `accountBased`. +* `opsCenterEnabled` - (Optional) When set to `true`, creates opsItems for any problems detected on an application. +* `opsItemSnsTopicArn` - (Optional) SNS topic provided to Application Insights that is associated to the created opsItem. Allows you to receive notifications for updates to the opsItem. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Application. +* `id` - Name of the resource group. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ApplicationInsights Applications using the `resourceGroupName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ApplicationInsights Applications using the `resourceGroupName`. For example: + +```console +% terraform import aws_applicationinsights_application.some some-application +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appmesh_gateway_route.html.markdown b/website/docs/cdktf/typescript/r/appmesh_gateway_route.html.markdown new file mode 100644 index 00000000000..44f45aa9b9c --- /dev/null +++ b/website/docs/cdktf/typescript/r/appmesh_gateway_route.html.markdown @@ -0,0 +1,205 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_gateway_route" +description: |- + Provides an AWS App Mesh gateway route resource. +--- + + + +# Resource: aws_appmesh_gateway_route + +Provides an AWS App Mesh gateway route resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshGatewayRoute } from "./.gen/providers/aws/appmesh-gateway-route"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshGatewayRoute(this, "example", { + meshName: "example-service-mesh", + name: "example-gateway-route", + spec: { + httpRoute: { + action: { + target: { + virtualService: { + virtualServiceName: Token.asString( + awsAppmeshVirtualServiceExample.name + ), + }, + }, + }, + match: { + prefix: "/", + }, + }, + }, + tags: { + Environment: "test", + }, + virtualGatewayName: Token.asString(awsAppmeshVirtualGatewayExample.name), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the gateway route. Must be between 1 and 255 characters in length. +* `meshName` - (Required) Name of the service mesh in which to create the gateway route. Must be between 1 and 255 characters in length. +* `virtualGatewayName` - (Required) Name of the [virtual gateway](/docs/providers/aws/r/appmesh_virtual_gateway.html) to associate the gateway route with. Must be between 1 and 255 characters in length. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `spec` - (Required) Gateway route specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `grpcRoute` - (Optional) Specification of a gRPC gateway route. +* `httpRoute` - (Optional) Specification of an HTTP gateway route. +* `http2Route` - (Optional) Specification of an HTTP/2 gateway route. +* `priority` - (Optional) Priority for the gateway route, between `0` and `1000`. + +The `grpcRoute`, `httpRoute` and `http2Route` objects supports the following: + +* `action` - (Required) Action to take if a match is determined. +* `match` - (Required) Criteria for determining a request match. + +The `grpcRoute`, `httpRoute` and `http2Route`'s `action` object supports the following: + +* `target` - (Required) Target that traffic is routed to when a request matches the gateway route. + +The `target` object supports the following: + +* `port` - (Optional) The port number that corresponds to the target for Virtual Service provider port. This is required when the provider (router or node) of the Virtual Service has multiple listeners. +* `virtualService` - (Required) Virtual service gateway route target. + +The `virtualService` object supports the following: + +* `virtualServiceName` - (Required) Name of the virtual service that traffic is routed to. Must be between 1 and 255 characters in length. + +The `httpRoute` and `http2Route`'s `action` object additionally supports the following: + +* `rewrite` - (Optional) Gateway route action to rewrite. + +The `rewrite` object supports the following: + +* `hostname` - (Optional) Host name to rewrite. +* `path` - (Optional) Exact path to rewrite. +* `prefix` - (Optional) Specified beginning characters to rewrite. + +The `hostname` object supports the following: + +* `defaultTargetHostname` - (Required) Default target host name to write to. Valid values: `enabled`, `disabled`. + +The `path` object supports the following: + +* `exact` - (Required) Value used to replace matched path. + +The `prefix` object supports the following: + +* `defaultPrefix` - (Optional) Default prefix used to replace the incoming route prefix when rewritten. Valid values: `enabled`, `disabled`. +* `value` - (Optional) Value used to replace the incoming route prefix when rewritten. + +The `grpcRoute`'s `match` object supports the following: + +* `serviceName` - (Required) Fully qualified domain name for the service to match from the request. +* `port` - (Optional) The port number to match from the request. + +The `httpRoute` and `http2Route`'s `match` object supports the following: + +* `header` - (Optional) Client request headers to match on. +* `hostname` - (Optional) Host name to match on. +* `path` - (Optional) Client request path to match on. +* `port` - (Optional) The port number to match from the request. +* `prefix` - (Optional) Path to match requests with. This parameter must always start with `/`, which by itself matches all requests to the virtual service name. +* `queryParameter` - (Optional) Client request query parameters to match on. + +The `header` object supports the following: + +* `name` - (Required) Name for the HTTP header in the client request that will be matched on. +* `invert` - (Optional) If `true`, the match is on the opposite of the `match` method and value. Default is `false`. +* `match` - (Optional) Method and value to match the header value sent with a request. Specify one match method. + +The `header`'s `match` object supports the following: + +* `exact` - (Optional) Header value sent by the client must match the specified value exactly. +* `prefix` - (Optional) Header value sent by the client must begin with the specified characters. +* `port`- (Optional) The port number to match from the request. +* `range`- (Optional) Object that specifies the range of numbers that the header value sent by the client must be included in. +* `regex` - (Optional) Header value sent by the client must include the specified characters. +* `suffix` - (Optional) Header value sent by the client must end with the specified characters. + +The `range` object supports the following: + +* `end` - (Required) End of the range. +* `start` - (Requited) Start of the range. + +The `hostname` object supports the following: + +* `exact` - (Optional) Exact host name to match on. +* `suffix` - (Optional) Specified ending characters of the host name to match on. + +The `path` object supports the following: + +* `exact` - (Optional) The exact path to match on. +* `regex` - (Optional) The regex used to match the path. + +The `queryParameter` object supports the following: + +* `name` - (Required) Name for the query parameter that will be matched on. +* `match` - (Optional) The query parameter to match on. + +The `queryParameter`'s `match` object supports the following: + +* `exact` - (Optional) The exact query parameter to match on. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the gateway route. +* `arn` - ARN of the gateway route. +* `createdDate` - Creation date of the gateway route. +* `lastUpdatedDate` - Last update date of the gateway route. +* `resourceOwner` - Resource owner's AWS account ID. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh gateway routes using `meshName` and `virtualGatewayName` together with the gateway route's `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Mesh gateway routes using `meshName` and `virtualGatewayName` together with the gateway route's `name`. For example: + +```console +% terraform import aws_appmesh_gateway_route.example mesh/gw1/example-gateway-route +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appmesh_mesh.html.markdown b/website/docs/cdktf/typescript/r/appmesh_mesh.html.markdown new file mode 100644 index 00000000000..e2eaff1c683 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appmesh_mesh.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_mesh" +description: |- + Provides an AWS App Mesh service mesh resource. +--- + + + +# Resource: aws_appmesh_mesh + +Provides an AWS App Mesh service mesh resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshMesh } from "./.gen/providers/aws/appmesh-mesh"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshMesh(this, "simple", { + name: "simpleapp", + }); + } +} + +``` + +### Egress Filter + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshMesh } from "./.gen/providers/aws/appmesh-mesh"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshMesh(this, "simple", { + name: "simpleapp", + spec: { + egressFilter: { + type: "ALLOW_ALL", + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the service mesh. Must be between 1 and 255 characters in length. +* `spec` - (Optional) Service mesh specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `egressFilter`- (Optional) Egress filter rules for the service mesh. + +The `egressFilter` object supports the following: + +* `type` - (Optional) Egress filter type. By default, the type is `dropAll`. +Valid values are `allowAll` and `dropAll`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the service mesh. +* `arn` - ARN of the service mesh. +* `createdDate` - Creation date of the service mesh. +* `lastUpdatedDate` - Last update date of the service mesh. +* `meshOwner` - AWS account ID of the service mesh's owner. +* `resourceOwner` - Resource owner's AWS account ID. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh service meshes using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Mesh service meshes using the `name`. For example: + +```console +% terraform import aws_appmesh_mesh.simple simpleapp +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appmesh_route.html.markdown b/website/docs/cdktf/typescript/r/appmesh_route.html.markdown new file mode 100644 index 00000000000..e28b2052edc --- /dev/null +++ b/website/docs/cdktf/typescript/r/appmesh_route.html.markdown @@ -0,0 +1,398 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_route" +description: |- + Provides an AWS App Mesh route resource. +--- + + + +# Resource: aws_appmesh_route + +Provides an AWS App Mesh route resource. + +## Example Usage + +### HTTP Routing + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshRoute } from "./.gen/providers/aws/appmesh-route"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshRoute(this, "serviceb", { + meshName: simple.id, + name: "serviceB-route", + spec: { + httpRoute: { + action: { + weightedTarget: [ + { + virtualNode: serviceb1.name, + weight: 90, + }, + { + virtualNode: serviceb2.name, + weight: 10, + }, + ], + }, + match: { + prefix: "/", + }, + }, + }, + virtualRouterName: Token.asString(awsAppmeshVirtualRouterServiceb.name), + }); + } +} + +``` + +### HTTP Header Routing + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshRoute } from "./.gen/providers/aws/appmesh-route"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshRoute(this, "serviceb", { + meshName: simple.id, + name: "serviceB-route", + spec: { + httpRoute: { + action: { + weightedTarget: [ + { + virtualNode: Token.asString(awsAppmeshVirtualNodeServiceb.name), + weight: 100, + }, + ], + }, + match: { + header: [ + { + match: { + prefix: "123", + }, + name: "clientRequestId", + }, + ], + method: "POST", + prefix: "/", + scheme: "https", + }, + }, + }, + virtualRouterName: Token.asString(awsAppmeshVirtualRouterServiceb.name), + }); + } +} + +``` + +### Retry Policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshRoute } from "./.gen/providers/aws/appmesh-route"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshRoute(this, "serviceb", { + meshName: simple.id, + name: "serviceB-route", + spec: { + httpRoute: { + action: { + weightedTarget: [ + { + virtualNode: Token.asString(awsAppmeshVirtualNodeServiceb.name), + weight: 100, + }, + ], + }, + match: { + prefix: "/", + }, + retryPolicy: { + httpRetryEvents: ["server-error"], + maxRetries: 1, + perRetryTimeout: { + unit: "s", + value: 15, + }, + }, + }, + }, + virtualRouterName: Token.asString(awsAppmeshVirtualRouterServiceb.name), + }); + } +} + +``` + +### TCP Routing + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshRoute } from "./.gen/providers/aws/appmesh-route"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshRoute(this, "serviceb", { + meshName: simple.id, + name: "serviceB-route", + spec: { + tcpRoute: { + action: { + weightedTarget: [ + { + virtualNode: serviceb1.name, + weight: 100, + }, + ], + }, + }, + }, + virtualRouterName: Token.asString(awsAppmeshVirtualRouterServiceb.name), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the route. Must be between 1 and 255 characters in length. +* `meshName` - (Required) Name of the service mesh in which to create the route. Must be between 1 and 255 characters in length. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `virtualRouterName` - (Required) Name of the virtual router in which to create the route. Must be between 1 and 255 characters in length. +* `spec` - (Required) Route specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `grpcRoute` - (Optional) GRPC routing information for the route. +* `http2Route` - (Optional) HTTP/2 routing information for the route. +* `httpRoute` - (Optional) HTTP routing information for the route. +* `priority` - (Optional) Priority for the route, between `0` and `1000`. +Routes are matched based on the specified value, where `0` is the highest priority. +* `tcpRoute` - (Optional) TCP routing information for the route. + +The `grpcRoute` object supports the following: + +* `action` - (Required) Action to take if a match is determined. +* `match` - (Required) Criteria for determining an gRPC request match. +* `retryPolicy` - (Optional) Retry policy. +* `timeout` - (Optional) Types of timeouts. + +The `http2Route` and `httpRoute` objects supports the following: + +* `action` - (Required) Action to take if a match is determined. +* `match` - (Required) Criteria for determining an HTTP request match. +* `retryPolicy` - (Optional) Retry policy. +* `timeout` - (Optional) Types of timeouts. + +The `tcpRoute` object supports the following: + +* `action` - (Required) Action to take if a match is determined. +* `timeout` - (Optional) Types of timeouts. + +The `action` object supports the following: + +* `weightedTarget` - (Required) Targets that traffic is routed to when a request matches the route. +You can specify one or more targets and their relative weights with which to distribute traffic. + +The `timeout` object supports the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. + +The `idle` object supports the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `grpcRoute`'s `match` object supports the following: + +* `metadata` - (Optional) Data to match from the gRPC request. +* `methodName` - (Optional) Method name to match from the request. If you specify a name, you must also specify a `serviceName`. +* `serviceName` - (Optional) Fully qualified domain name for the service to match from the request. +* `port`- (Optional) The port number to match from the request. + +The `metadata` object supports the following: + +* `name` - (Required) Name of the route. Must be between 1 and 50 characters in length. +* `invert` - (Optional) If `true`, the match is on the opposite of the `match` criteria. Default is `false`. +* `match` - (Optional) Data to match from the request. + +The `metadata`'s `match` object supports the following: + +* `exact` - (Optional) Value sent by the client must match the specified value exactly. Must be between 1 and 255 characters in length. +* `prefix` - (Optional) Value sent by the client must begin with the specified characters. Must be between 1 and 255 characters in length. +* `port`- (Optional) The port number to match from the request. +* `range`- (Optional) Object that specifies the range of numbers that the value sent by the client must be included in. +* `regex` - (Optional) Value sent by the client must include the specified characters. Must be between 1 and 255 characters in length. +* `suffix` - (Optional) Value sent by the client must end with the specified characters. Must be between 1 and 255 characters in length. + +The `grpcRoute`'s `retryPolicy` object supports the following: + +* `grpcRetryEvents` - (Optional) List of gRPC retry events. +Valid values: `cancelled`, `deadlineExceeded`, `internal`, `resourceExhausted`, `unavailable`. +* `httpRetryEvents` - (Optional) List of HTTP retry events. +Valid values: `clientError` (HTTP status code 409), `gatewayError` (HTTP status codes 502, 503, and 504), `serverError` (HTTP status codes 500, 501, 502, 503, 504, 505, 506, 507, 508, 510, and 511), `streamError` (retry on refused stream). +* `maxRetries` - (Required) Maximum number of retries. +* `perRetryTimeout` - (Required) Per-retry timeout. +* `tcpRetryEvents` - (Optional) List of TCP retry events. The only valid value is `connectionError`. + +The `grpcRoute`'s `timeout` object supports the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. +* `perRequest` - (Optional) Per request timeout. + +The `idle` and `perRequest` objects support the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `http2Route` and `httpRoute`'s `match` object supports the following: + +* `prefix` - (Optional) Path with which to match requests. +This parameter must always start with /, which by itself matches all requests to the virtual router service name. +* `port`- (Optional) The port number to match from the request. +* `header` - (Optional) Client request headers to match on. +* `method` - (Optional) Client request header method to match on. Valid values: `get`, `head`, `post`, `put`, `delete`, `connect`, `options`, `trace`, `patch`. +* `path` - (Optional) Client request path to match on. +* `queryParameter` - (Optional) Client request query parameters to match on. +* `scheme` - (Optional) Client request header scheme to match on. Valid values: `http`, `https`. + +The `match`'s `path` object supports the following: + +* `exact` - (Optional) The exact path to match on. +* `regex` - (Optional) The regex used to match the path. + +The `match`'s `queryParameter` object supports the following: + +* `name` - (Required) Name for the query parameter that will be matched on. +* `match` - (Optional) The query parameter to match on. + +The `queryParameter`'s `match` object supports the following: + +* `exact` - (Optional) The exact query parameter to match on. + +The `http2Route` and `httpRoute`'s `retryPolicy` object supports the following: + +* `httpRetryEvents` - (Optional) List of HTTP retry events. +Valid values: `clientError` (HTTP status code 409), `gatewayError` (HTTP status codes 502, 503, and 504), `serverError` (HTTP status codes 500, 501, 502, 503, 504, 505, 506, 507, 508, 510, and 511), `streamError` (retry on refused stream). +* `maxRetries` - (Required) Maximum number of retries. +* `perRetryTimeout` - (Required) Per-retry timeout. +* `tcpRetryEvents` - (Optional) List of TCP retry events. The only valid value is `connectionError`. + +You must specify at least one value for `httpRetryEvents`, or at least one value for `tcpRetryEvents`. + +The `http2Route` and `httpRoute`'s `timeout` object supports the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. +* `perRequest` - (Optional) Per request timeout. + +The `idle` and `perRequest` objects support the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `perRetryTimeout` object supports the following: + +* `unit` - (Required) Retry unit. Valid values: `ms`, `s`. +* `value` - (Required) Retry value. + +The `weightedTarget` object supports the following: + +* `virtualNode` - (Required) Virtual node to associate with the weighted target. Must be between 1 and 255 characters in length. +* `weight` - (Required) Relative weight of the weighted target. An integer between 0 and 100. +* `port` - (Optional) The targeted port of the weighted object. + +The `header` object supports the following: + +* `name` - (Required) Name for the HTTP header in the client request that will be matched on. +* `invert` - (Optional) If `true`, the match is on the opposite of the `match` method and value. Default is `false`. +* `match` - (Optional) Method and value to match the header value sent with a request. Specify one match method. + +The `header`'s `match` object supports the following: + +* `exact` - (Optional) Header value sent by the client must match the specified value exactly. +* `prefix` - (Optional) Header value sent by the client must begin with the specified characters. +* `port`- (Optional) The port number to match from the request. +* `range`- (Optional) Object that specifies the range of numbers that the header value sent by the client must be included in. +* `regex` - (Optional) Header value sent by the client must include the specified characters. +* `suffix` - (Optional) Header value sent by the client must end with the specified characters. + +The `range` object supports the following: + +* `end` - (Required) End of the range. +* `start` - (Requited) Start of the range. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the route. +* `arn` - ARN of the route. +* `createdDate` - Creation date of the route. +* `lastUpdatedDate` - Last update date of the route. +* `resourceOwner` - Resource owner's AWS account ID. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh virtual routes using `meshName` and `virtualRouterName` together with the route's `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Mesh virtual routes using `meshName` and `virtualRouterName` together with the route's `name`. For example: + +```console +% terraform import aws_appmesh_route.serviceb simpleapp/serviceB/serviceB-route +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appmesh_virtual_gateway.html.markdown b/website/docs/cdktf/typescript/r/appmesh_virtual_gateway.html.markdown new file mode 100644 index 00000000000..ff8c2fbe20b --- /dev/null +++ b/website/docs/cdktf/typescript/r/appmesh_virtual_gateway.html.markdown @@ -0,0 +1,327 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_gateway" +description: |- + Provides an AWS App Mesh virtual gateway resource. +--- + + + +# Resource: aws_appmesh_virtual_gateway + +Provides an AWS App Mesh virtual gateway resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshVirtualGateway } from "./.gen/providers/aws/appmesh-virtual-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshVirtualGateway(this, "example", { + meshName: "example-service-mesh", + name: "example-virtual-gateway", + spec: { + listener: [ + { + portMapping: { + port: 8080, + protocol: "http", + }, + }, + ], + }, + tags: { + Environment: "test", + }, + }); + } +} + +``` + +### Access Logs and TLS + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshVirtualGateway } from "./.gen/providers/aws/appmesh-virtual-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshVirtualGateway(this, "example", { + meshName: "example-service-mesh", + name: "example-virtual-gateway", + spec: { + listener: [ + { + portMapping: { + port: 8080, + protocol: "http", + }, + tls: { + certificate: { + acm: { + certificateArn: Token.asString(awsAcmCertificateExample.arn), + }, + }, + mode: "STRICT", + }, + }, + ], + logging: { + accessLog: { + file: { + path: "/var/log/access.log", + }, + }, + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the virtual gateway. Must be between 1 and 255 characters in length. +* `meshName` - (Required) Name of the service mesh in which to create the virtual gateway. Must be between 1 and 255 characters in length. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `spec` - (Required) Virtual gateway specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `listener` - (Required) Listeners that the mesh endpoint is expected to receive inbound traffic from. You can specify one listener. +* `backendDefaults` - (Optional) Defaults for backends. +* `logging` - (Optional) Inbound and outbound access logging information for the virtual gateway. + +The `backendDefaults` object supports the following: + +* `clientPolicy` - (Optional) Default client policy for virtual gateway backends. + +The `clientPolicy` object supports the following: + +* `tls` - (Optional) Transport Layer Security (TLS) client policy. + +The `tls` object supports the following: + +* `certificate` (Optional) Virtual gateway's client's Transport Layer Security (TLS) certificate. +* `enforce` - (Optional) Whether the policy is enforced. Default is `true`. +* `ports` - (Optional) One or more ports that the policy is enforced for. +* `validation` - (Required) TLS validation context. + +The `certificate` object supports the following: + +* `file` - (Optional) Local file certificate. +* `sds` - (Optional) A [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `file` object supports the following: + +* `certificateChain` - (Required) Certificate chain for the certificate. +* `privateKey` - (Required) Private key for a certificate stored on the file system of the mesh endpoint that the proxy is running on. + +The `sds` object supports the following: + +* `secretName` - (Required) Name of the secret secret requested from the Secret Discovery Service provider representing Transport Layer Security (TLS) materials like a certificate or certificate chain. + +The `validation` object supports the following: + +* `subjectAlternativeNames` - (Optional) SANs for a virtual gateway's listener's Transport Layer Security (TLS) validation context. +* `trust` - (Required) TLS validation context trust. + +The `subjectAlternativeNames` object supports the following: + +* `match` - (Required) Criteria for determining a SAN's match. + +The `match` object supports the following: + +* `exact` - (Required) Values sent must match the specified values exactly. + +The `trust` object supports the following: + +* `acm` - (Optional) TLS validation context trust for an AWS Certificate Manager (ACM) certificate. +* `file` - (Optional) TLS validation context trust for a local file certificate. +* `sds` - (Optional) TLS validation context trust for a [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `acm` object supports the following: + +* `certificateAuthorityArns` - (Required) One or more ACM ARNs. + +The `file` object supports the following: + +* `certificateChain` - (Required) Certificate trust chain for a certificate stored on the file system of the mesh endpoint that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secretName` - (Required) Name of the secret for a virtual gateway's Transport Layer Security (TLS) Secret Discovery Service validation context trust. + +The `listener` object supports the following: + +* `portMapping` - (Required) Port mapping information for the listener. +* `connectionPool` - (Optional) Connection pool information for the listener. +* `healthCheck` - (Optional) Health check information for the listener. +* `tls` - (Optional) Transport Layer Security (TLS) properties for the listener + +The `logging` object supports the following: + +* `accessLog` - (Optional) Access log configuration for a virtual gateway. + +The `accessLog` object supports the following: + +* `file` - (Optional) File object to send virtual gateway access logs to. + +The `file` object supports the following: + +* `format` - (Optional) The specified format for the logs. +* `path` - (Required) File path to write access logs to. You can use `/dev/stdout` to send access logs to standard out. Must be between 1 and 255 characters in length. + +The `format` object supports the following: + +* `json` - (Optional) The logging format for JSON. +* `text` - (Optional) The logging format for text. Must be between 1 and 1000 characters in length. + +The `json` object supports the following: + +* `key` - (Required) The specified key for the JSON. Must be between 1 and 100 characters in length. +* `value` - (Required) The specified value for the JSON. Must be between 1 and 100 characters in length. + +The `portMapping` object supports the following: + +* `port` - (Required) Port used for the port mapping. +* `protocol` - (Required) Protocol used for the port mapping. Valid values are `http`, `http2`, `tcp` and `grpc`. + +The `connectionPool` object supports the following: + +* `grpc` - (Optional) Connection pool information for gRPC listeners. +* `http` - (Optional) Connection pool information for HTTP listeners. +* `http2` - (Optional) Connection pool information for HTTP2 listeners. + +The `grpc` connection pool object supports the following: + +* `maxRequests` - (Required) Maximum number of inflight requests Envoy can concurrently support across hosts in upstream cluster. Minimum value of `1`. + +The `http` connection pool object supports the following: + +* `maxConnections` - (Required) Maximum number of outbound TCP connections Envoy can establish concurrently with all hosts in upstream cluster. Minimum value of `1`. +* `maxPendingRequests` - (Optional) Number of overflowing requests after `maxConnections` Envoy will queue to upstream cluster. Minimum value of `1`. + +The `http2` connection pool object supports the following: + +* `maxRequests` - (Required) Maximum number of inflight requests Envoy can concurrently support across hosts in upstream cluster. Minimum value of `1`. + +The `healthCheck` object supports the following: + +* `healthyThreshold` - (Required) Number of consecutive successful health checks that must occur before declaring listener healthy. +* `intervalMillis`- (Required) Time period in milliseconds between each health check execution. +* `protocol` - (Required) Protocol for the health check request. Valid values are `http`, `http2`, and `grpc`. +* `timeoutMillis` - (Required) Amount of time to wait when receiving a response from the health check, in milliseconds. +* `unhealthyThreshold` - (Required) Number of consecutive failed health checks that must occur before declaring a virtual gateway unhealthy. +* `path` - (Optional) Destination path for the health check request. This is only required if the specified protocol is `http` or `http2`. +* `port` - (Optional) Destination port for the health check request. This port must match the port defined in the `portMapping` for the listener. + +The `tls` object supports the following: + +* `certificate` - (Required) Listener's TLS certificate. +* `mode`- (Required) Listener's TLS mode. Valid values: `disabled`, `permissive`, `strict`. +* `validation`- (Optional) Listener's Transport Layer Security (TLS) validation context. + +The `certificate` object supports the following: + +* `acm` - (Optional) An AWS Certificate Manager (ACM) certificate. +* `file` - (Optional) Local file certificate. +* `sds` - (Optional) A [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `acm` object supports the following: + +* `certificateArn` - (Required) ARN for the certificate. + +The `file` object supports the following: + +* `certificateChain` - (Required) Certificate chain for the certificate. Must be between 1 and 255 characters in length. +* `privateKey` - (Required) Private key for a certificate stored on the file system of the mesh endpoint that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secretName` - (Required) Name of the secret secret requested from the Secret Discovery Service provider representing Transport Layer Security (TLS) materials like a certificate or certificate chain. + +The `validation` object supports the following: + +* `subjectAlternativeNames` - (Optional) SANs for a virtual gateway's listener's Transport Layer Security (TLS) validation context. +* `trust` - (Required) TLS validation context trust. + +The `subjectAlternativeNames` object supports the following: + +* `match` - (Required) Criteria for determining a SAN's match. + +The `match` object supports the following: + +* `exact` - (Required) Values sent must match the specified values exactly. + +The `trust` object supports the following: + +* `file` - (Optional) TLS validation context trust for a local file certificate. +* `sds` - (Optional) TLS validation context trust for a [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `file` object supports the following: + +* `certificateChain` - (Required) Certificate trust chain for a certificate stored on the file system of the mesh endpoint that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secretName` - (Required) Name of the secret for a virtual gateway's Transport Layer Security (TLS) Secret Discovery Service validation context trust. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the virtual gateway. +* `arn` - ARN of the virtual gateway. +* `createdDate` - Creation date of the virtual gateway. +* `lastUpdatedDate` - Last update date of the virtual gateway. +* `resourceOwner` - Resource owner's AWS account ID. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh virtual gateway using `meshName` together with the virtual gateway's `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Mesh virtual gateway using `meshName` together with the virtual gateway's `name`. For example: + +```console +% terraform import aws_appmesh_virtual_gateway.example mesh/gw1 +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appmesh_virtual_node.html.markdown b/website/docs/cdktf/typescript/r/appmesh_virtual_node.html.markdown new file mode 100644 index 00000000000..ad8fec3b6f7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appmesh_virtual_node.html.markdown @@ -0,0 +1,542 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_node" +description: |- + Provides an AWS App Mesh virtual node resource. +--- + + + +# Resource: aws_appmesh_virtual_node + +Provides an AWS App Mesh virtual node resource. + +## Breaking Changes + +Because of backward incompatible API changes (read [here](https://github.com/awslabs/aws-app-mesh-examples/issues/92)), `awsAppmeshVirtualNode` resource definitions created with provider versions earlier than v2.3.0 will need to be modified: + +* Rename the `serviceName` attribute of the `dns` object to `hostname`. + +* Replace the `backends` attribute of the `spec` object with one or more `backend` configuration blocks, +setting `virtualServiceName` to the name of the service. + +The Terraform state associated with existing resources will automatically be migrated. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshVirtualNode } from "./.gen/providers/aws/appmesh-virtual-node"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshVirtualNode(this, "serviceb1", { + meshName: simple.id, + name: "serviceBv1", + spec: { + backend: [ + { + virtualService: { + virtualServiceName: "servicea.simpleapp.local", + }, + }, + ], + listener: [ + { + portMapping: { + port: 8080, + protocol: "http", + }, + }, + ], + serviceDiscovery: { + dns: { + hostname: "serviceb.simpleapp.local", + }, + }, + }, + }); + } +} + +``` + +### AWS Cloud Map Service Discovery + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshVirtualNode } from "./.gen/providers/aws/appmesh-virtual-node"; +import { ServiceDiscoveryHttpNamespace } from "./.gen/providers/aws/service-discovery-http-namespace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ServiceDiscoveryHttpNamespace(this, "example", { + name: "example-ns", + }); + new AppmeshVirtualNode(this, "serviceb1", { + meshName: simple.id, + name: "serviceBv1", + spec: { + backend: [ + { + virtualService: { + virtualServiceName: "servicea.simpleapp.local", + }, + }, + ], + listener: [ + { + portMapping: { + port: 8080, + protocol: "http", + }, + }, + ], + serviceDiscovery: { + awsCloudMap: { + attributes: { + stack: "blue", + }, + namespaceName: example.name, + serviceName: "serviceb1", + }, + }, + }, + }); + } +} + +``` + +### Listener Health Check + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshVirtualNode } from "./.gen/providers/aws/appmesh-virtual-node"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshVirtualNode(this, "serviceb1", { + meshName: simple.id, + name: "serviceBv1", + spec: { + backend: [ + { + virtualService: { + virtualServiceName: "servicea.simpleapp.local", + }, + }, + ], + listener: [ + { + healthCheck: { + healthyThreshold: 2, + intervalMillis: 5000, + path: "/ping", + protocol: "http", + timeoutMillis: 2000, + unhealthyThreshold: 2, + }, + portMapping: { + port: 8080, + protocol: "http", + }, + }, + ], + serviceDiscovery: { + dns: { + hostname: "serviceb.simpleapp.local", + }, + }, + }, + }); + } +} + +``` + +### Logging + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshVirtualNode } from "./.gen/providers/aws/appmesh-virtual-node"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshVirtualNode(this, "serviceb1", { + meshName: simple.id, + name: "serviceBv1", + spec: { + backend: [ + { + virtualService: { + virtualServiceName: "servicea.simpleapp.local", + }, + }, + ], + listener: [ + { + portMapping: { + port: 8080, + protocol: "http", + }, + }, + ], + logging: { + accessLog: { + file: { + path: "/dev/stdout", + }, + }, + }, + serviceDiscovery: { + dns: { + hostname: "serviceb.simpleapp.local", + }, + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the virtual node. Must be between 1 and 255 characters in length. +* `meshName` - (Required) Name of the service mesh in which to create the virtual node. Must be between 1 and 255 characters in length. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `spec` - (Required) Virtual node specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `backend` - (Optional) Backends to which the virtual node is expected to send outbound traffic. +* `backendDefaults` - (Optional) Defaults for backends. +* `listener` - (Optional) Listeners from which the virtual node is expected to receive inbound traffic. +* `logging` - (Optional) Inbound and outbound access logging information for the virtual node. +* `serviceDiscovery` - (Optional) Service discovery information for the virtual node. + +The `backend` object supports the following: + +* `virtualService` - (Required) Virtual service to use as a backend for a virtual node. + +The `virtualService` object supports the following: + +* `clientPolicy` - (Optional) Client policy for the backend. +* `virtualServiceName` - (Required) Name of the virtual service that is acting as a virtual node backend. Must be between 1 and 255 characters in length. + +The `clientPolicy` object supports the following: + +* `tls` - (Optional) Transport Layer Security (TLS) client policy. + +The `tls` object supports the following: + +* `certificate` (Optional) Virtual node's client's Transport Layer Security (TLS) certificate. +* `enforce` - (Optional) Whether the policy is enforced. Default is `true`. +* `ports` - (Optional) One or more ports that the policy is enforced for. +* `validation` - (Required) TLS validation context. + +The `certificate` object supports the following: + +* `file` - (Optional) Local file certificate. +* `sds` - (Optional) A [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `file` object supports the following: + +* `certificateChain` - (Required) Certificate chain for the certificate. +* `privateKey` - (Required) Private key for a certificate stored on the file system of the mesh endpoint that the proxy is running on. + +The `sds` object supports the following: + +* `secretName` - (Required) Name of the secret secret requested from the Secret Discovery Service provider representing Transport Layer Security (TLS) materials like a certificate or certificate chain. + +The `validation` object supports the following: + +* `subjectAlternativeNames` - (Optional) SANs for a TLS validation context. +* `trust` - (Required) TLS validation context trust. + +The `subjectAlternativeNames` object supports the following: + +* `match` - (Required) Criteria for determining a SAN's match. + +The `match` object supports the following: + +* `exact` - (Required) Values sent must match the specified values exactly. + +The `trust` object supports the following: + +* `acm` - (Optional) TLS validation context trust for an AWS Certificate Manager (ACM) certificate. +* `file` - (Optional) TLS validation context trust for a local file certificate. +* `sds` - (Optional) TLS validation context trust for a [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `acm` object supports the following: + +* `certificateAuthorityArns` - (Required) One or more ACM ARNs. + +The `file` object supports the following: + +* `certificateChain` - (Required) Certificate trust chain for a certificate stored on the file system of the virtual node that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secretName` - (Required) Name of the secret secret requested from the Secret Discovery Service provider representing Transport Layer Security (TLS) materials like a certificate or certificate chain. + +The `backendDefaults` object supports the following: + +* `clientPolicy` - (Optional) Default client policy for virtual service backends. See above for details. + +The `listener` object supports the following: + +* `portMapping` - (Required) Port mapping information for the listener. +* `connectionPool` - (Optional) Connection pool information for the listener. +* `healthCheck` - (Optional) Health check information for the listener. +* `outlierDetection` - (Optional) Outlier detection information for the listener. +* `timeout` - (Optional) Timeouts for different protocols. +* `tls` - (Optional) Transport Layer Security (TLS) properties for the listener + +The `logging` object supports the following: + +* `accessLog` - (Optional) Access log configuration for a virtual node. + +The `accessLog` object supports the following: + +* `file` - (Optional) File object to send virtual node access logs to. + +The `file` object supports the following: + +* `format` - (Optional) The specified format for the logs. +* `path` - (Required) File path to write access logs to. You can use `/dev/stdout` to send access logs to standard out. Must be between 1 and 255 characters in length. + +The `format` object supports the following: + +* `json` - (Optional) The logging format for JSON. +* `text` - (Optional) The logging format for text. Must be between 1 and 1000 characters in length. + +The `json` object supports the following: + +* `key` - (Required) The specified key for the JSON. Must be between 1 and 100 characters in length. +* `value` - (Required) The specified value for the JSON. Must be between 1 and 100 characters in length. + +The `serviceDiscovery` object supports the following: + +* `awsCloudMap` - (Optional) Any AWS Cloud Map information for the virtual node. +* `dns` - (Optional) DNS service name for the virtual node. + +The `awsCloudMap` object supports the following: + +* `attributes` - (Optional) String map that contains attributes with values that you can use to filter instances by any custom attribute that you specified when you registered the instance. Only instances that match all of the specified key/value pairs will be returned. +* `namespaceName` - (Required) Name of the AWS Cloud Map namespace to use. +Use the [`awsServiceDiscoveryHttpNamespace`](/docs/providers/aws/r/service_discovery_http_namespace.html) resource to configure a Cloud Map namespace. Must be between 1 and 1024 characters in length. +* `serviceName` - (Required) Name of the AWS Cloud Map service to use. Use the [`awsServiceDiscoveryService`](/docs/providers/aws/r/service_discovery_service.html) resource to configure a Cloud Map service. Must be between 1 and 1024 characters in length. + +The `dns` object supports the following: + +* `hostname` - (Required) DNS host name for your virtual node. +* `ipPreference` - (Optional) The preferred IP version that this virtual node uses. Valid values: `iPv6Preferred`, `iPv4Preferred`, `iPv4Only`, `iPv6Only`. +* `responseType` - (Optional) The DNS response type for the virtual node. Valid values: `loadbalancer`, `endpoints`. + +The `portMapping` object supports the following: + +* `port` - (Required) Port used for the port mapping. +* `protocol` - (Required) Protocol used for the port mapping. Valid values are `http`, `http2`, `tcp` and `grpc`. + +The `connectionPool` object supports the following: + +* `grpc` - (Optional) Connection pool information for gRPC listeners. +* `http` - (Optional) Connection pool information for HTTP listeners. +* `http2` - (Optional) Connection pool information for HTTP2 listeners. +* `tcp` - (Optional) Connection pool information for TCP listeners. + +The `grpc` connection pool object supports the following: + +* `maxRequests` - (Required) Maximum number of inflight requests Envoy can concurrently support across hosts in upstream cluster. Minimum value of `1`. + +The `http` connection pool object supports the following: + +* `maxConnections` - (Required) Maximum number of outbound TCP connections Envoy can establish concurrently with all hosts in upstream cluster. Minimum value of `1`. +* `maxPendingRequests` - (Optional) Number of overflowing requests after `maxConnections` Envoy will queue to upstream cluster. Minimum value of `1`. + +The `http2` connection pool object supports the following: + +* `maxRequests` - (Required) Maximum number of inflight requests Envoy can concurrently support across hosts in upstream cluster. Minimum value of `1`. + +The `tcp` connection pool object supports the following: + +* `maxConnections` - (Required) Maximum number of outbound TCP connections Envoy can establish concurrently with all hosts in upstream cluster. Minimum value of `1`. + +The `healthCheck` object supports the following: + +* `healthyThreshold` - (Required) Number of consecutive successful health checks that must occur before declaring listener healthy. +* `intervalMillis`- (Required) Time period in milliseconds between each health check execution. +* `protocol` - (Required) Protocol for the health check request. Valid values are `http`, `http2`, `tcp` and `grpc`. +* `timeoutMillis` - (Required) Amount of time to wait when receiving a response from the health check, in milliseconds. +* `unhealthyThreshold` - (Required) Number of consecutive failed health checks that must occur before declaring a virtual node unhealthy. +* `path` - (Optional) Destination path for the health check request. This is only required if the specified protocol is `http` or `http2`. +* `port` - (Optional) Destination port for the health check request. This port must match the port defined in the `portMapping` for the listener. + +The `outlierDetection` object supports the following: + +* `baseEjectionDuration` - (Required) Base amount of time for which a host is ejected. +* `interval` - (Required) Time interval between ejection sweep analysis. +* `maxEjectionPercent` - (Required) Maximum percentage of hosts in load balancing pool for upstream service that can be ejected. Will eject at least one host regardless of the value. +Minimum value of `0`. Maximum value of `100`. +* `maxServerErrors` - (Required) Number of consecutive `5Xx` errors required for ejection. Minimum value of `1`. + +The `baseEjectionDuration` and `interval` objects support the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `timeout` object supports the following: + +* `grpc` - (Optional) Timeouts for gRPC listeners. +* `http` - (Optional) Timeouts for HTTP listeners. +* `http2` - (Optional) Timeouts for HTTP2 listeners. +* `tcp` - (Optional) Timeouts for TCP listeners. + +The `grpc` timeout object supports the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. +* `perRequest` - (Optional) Per request timeout. + +The `idle` and `perRequest` objects support the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `http` and `http2` timeout objects support the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. +* `perRequest` - (Optional) Per request timeout. + +The `idle` and `perRequest` objects support the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `tcp` timeout object supports the following: + +* `idle` - (Optional) Idle timeout. An idle timeout bounds the amount of time that a connection may be idle. + +The `idle` object supports the following: + +* `unit` - (Required) Unit of time. Valid values: `ms`, `s`. +* `value` - (Required) Number of time units. Minimum value of `0`. + +The `tls` object supports the following: + +* `certificate` - (Required) Listener's TLS certificate. +* `mode`- (Required) Listener's TLS mode. Valid values: `disabled`, `permissive`, `strict`. +* `validation`- (Optional) Listener's Transport Layer Security (TLS) validation context. + +The `certificate` object supports the following: + +* `acm` - (Optional) An AWS Certificate Manager (ACM) certificate. +* `file` - (Optional) Local file certificate. +* `sds` - (Optional) A [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `acm` object supports the following: + +* `certificateArn` - (Required) ARN for the certificate. + +The `file` object supports the following: + +* `certificateChain` - (Required) Certificate chain for the certificate. Must be between 1 and 255 characters in length. +* `privateKey` - (Required) Private key for a certificate stored on the file system of the virtual node that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secretName` - (Required) Name of the secret secret requested from the Secret Discovery Service provider representing Transport Layer Security (TLS) materials like a certificate or certificate chain. + +The `validation` object supports the following: + +* `subjectAlternativeNames` - (Optional) SANs for a TLS validation context. +* `trust` - (Required) TLS validation context trust. + +The `subjectAlternativeNames` object supports the following: + +* `match` - (Required) Criteria for determining a SAN's match. + +The `match` object supports the following: + +* `exact` - (Required) Values sent must match the specified values exactly. + +The `trust` object supports the following: + +* `file` - (Optional) TLS validation context trust for a local file certificate. +* `sds` - (Optional) TLS validation context trust for a [Secret Discovery Service](https://www.envoyproxy.io/docs/envoy/latest/configuration/security/secret#secret-discovery-service-sds) certificate. + +The `file` object supports the following: + +* `certificateChain` - (Required) Certificate trust chain for a certificate stored on the file system of the mesh endpoint that the proxy is running on. Must be between 1 and 255 characters in length. + +The `sds` object supports the following: + +* `secretName` - (Required) Name of the secret for a virtual node's Transport Layer Security (TLS) Secret Discovery Service validation context trust. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the virtual node. +* `arn` - ARN of the virtual node. +* `createdDate` - Creation date of the virtual node. +* `lastUpdatedDate` - Last update date of the virtual node. +* `resourceOwner` - Resource owner's AWS account ID. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh virtual nodes using `meshName` together with the virtual node's `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Mesh virtual nodes using `meshName` together with the virtual node's `name`. For example: + +```console +% terraform import aws_appmesh_virtual_node.serviceb1 simpleapp/serviceBv1 +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appmesh_virtual_router.html.markdown b/website/docs/cdktf/typescript/r/appmesh_virtual_router.html.markdown new file mode 100644 index 00000000000..f0c0c47b9ba --- /dev/null +++ b/website/docs/cdktf/typescript/r/appmesh_virtual_router.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_router" +description: |- + Provides an AWS App Mesh virtual router resource. +--- + + + +# Resource: aws_appmesh_virtual_router + +Provides an AWS App Mesh virtual router resource. + +## Breaking Changes + +Because of backward incompatible API changes (read [here](https://github.com/awslabs/aws-app-mesh-examples/issues/92) and [here](https://github.com/awslabs/aws-app-mesh-examples/issues/94)), `awsAppmeshVirtualRouter` resource definitions created with provider versions earlier than v2.3.0 will need to be modified: + +* Remove service `serviceNames` from the `spec` argument. AWS has created a `awsAppmeshVirtualService` resource for each service name. Import these resource using `terraform import`. + +* Add a `listener` configuration block to the `spec` argument. + +The Terraform state associated with existing resources will automatically be migrated. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshVirtualRouter } from "./.gen/providers/aws/appmesh-virtual-router"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshVirtualRouter(this, "serviceb", { + meshName: simple.id, + name: "serviceB", + spec: { + listener: [ + { + portMapping: { + port: 8080, + protocol: "http", + }, + }, + ], + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the virtual router. Must be between 1 and 255 characters in length. +* `meshName` - (Required) Name of the service mesh in which to create the virtual router. Must be between 1 and 255 characters in length. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `spec` - (Required) Virtual router specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `listener` - (Optional) Listeners that the virtual router is expected to receive inbound traffic from. +Currently only one listener is supported per virtual router. + +The `listener` object supports the following: + +* `portMapping` - (Required) Port mapping information for the listener. + +The `portMapping` object supports the following: + +* `port` - (Required) Port used for the port mapping. +* `protocol` - (Required) Protocol used for the port mapping. Valid values are `http`,`http2`, `tcp` and `grpc`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the virtual router. +* `arn` - ARN of the virtual router. +* `createdDate` - Creation date of the virtual router. +* `lastUpdatedDate` - Last update date of the virtual router. +* `resourceOwner` - Resource owner's AWS account ID. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh virtual routers using `meshName` together with the virtual router's `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Mesh virtual routers using `meshName` together with the virtual router's `name`. For example: + +```console +% terraform import aws_appmesh_virtual_router.serviceb simpleapp/serviceB +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appmesh_virtual_service.html.markdown b/website/docs/cdktf/typescript/r/appmesh_virtual_service.html.markdown new file mode 100644 index 00000000000..a058a5f5585 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appmesh_virtual_service.html.markdown @@ -0,0 +1,139 @@ +--- +subcategory: "App Mesh" +layout: "aws" +page_title: "AWS: aws_appmesh_virtual_service" +description: |- + Provides an AWS App Mesh virtual service resource. +--- + + + +# Resource: aws_appmesh_virtual_service + +Provides an AWS App Mesh virtual service resource. + +## Example Usage + +### Virtual Node Provider + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshVirtualService } from "./.gen/providers/aws/appmesh-virtual-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshVirtualService(this, "servicea", { + meshName: simple.id, + name: "servicea.simpleapp.local", + spec: { + provider: { + virtualNode: { + virtualNodeName: serviceb1.name, + }, + }, + }, + }); + } +} + +``` + +### Virtual Router Provider + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppmeshVirtualService } from "./.gen/providers/aws/appmesh-virtual-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppmeshVirtualService(this, "servicea", { + meshName: simple.id, + name: "servicea.simpleapp.local", + spec: { + provider: { + virtualRouter: { + virtualRouterName: serviceb.name, + }, + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name to use for the virtual service. Must be between 1 and 255 characters in length. +* `meshName` - (Required) Name of the service mesh in which to create the virtual service. Must be between 1 and 255 characters in length. +* `meshOwner` - (Optional) AWS account ID of the service mesh's owner. Defaults to the account ID the [AWS provider][1] is currently connected to. +* `spec` - (Required) Virtual service specification to apply. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `spec` object supports the following: + +* `provider`- (Optional) App Mesh object that is acting as the provider for a virtual service. You can specify a single virtual node or virtual router. + +The `provider` object supports the following: + +* `virtualNode` - (Optional) Virtual node associated with a virtual service. +* `virtualRouter` - (Optional) Virtual router associated with a virtual service. + +The `virtualNode` object supports the following: + +* `virtualNodeName` - (Required) Name of the virtual node that is acting as a service provider. Must be between 1 and 255 characters in length. + +The `virtualRouter` object supports the following: + +* `virtualRouterName` - (Required) Name of the virtual router that is acting as a service provider. Must be between 1 and 255 characters in length. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the virtual service. +* `arn` - ARN of the virtual service. +* `createdDate` - Creation date of the virtual service. +* `lastUpdatedDate` - Last update date of the virtual service. +* `resourceOwner` - Resource owner's AWS account ID. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Mesh virtual services using `meshName` together with the virtual service's `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Mesh virtual services using `meshName` together with the virtual service's `name`. For example: + +```console +% terraform import aws_appmesh_virtual_service.servicea simpleapp/servicea.simpleapp.local +``` + +[1]: /docs/providers/aws/index.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apprunner_auto_scaling_configuration_version.html.markdown b/website/docs/cdktf/typescript/r/apprunner_auto_scaling_configuration_version.html.markdown new file mode 100644 index 00000000000..5da3af55ee1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/apprunner_auto_scaling_configuration_version.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_auto_scaling_configuration_version" +description: |- + Manages an App Runner AutoScaling Configuration Version. +--- + + + +# Resource: aws_apprunner_auto_scaling_configuration_version + +Manages an App Runner AutoScaling Configuration Version. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApprunnerAutoScalingConfigurationVersion } from "./.gen/providers/aws/apprunner-auto-scaling-configuration-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApprunnerAutoScalingConfigurationVersion(this, "example", { + autoScalingConfigurationName: "example", + maxConcurrency: 50, + maxSize: 10, + minSize: 2, + tags: { + Name: "example-apprunner-autoscaling", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments supported: + +* `autoScalingConfigurationName` - (Required, Forces new resource) Name of the auto scaling configuration. +* `maxConcurrency` - (Optional, Forces new resource) Maximal number of concurrent requests that you want an instance to process. When the number of concurrent requests goes over this limit, App Runner scales up your service. +* `maxSize` - (Optional, Forces new resource) Maximal number of instances that App Runner provisions for your service. +* `minSize` - (Optional, Forces new resource) Minimal number of instances that App Runner provisions for your service. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of this auto scaling configuration version. +* `autoScalingConfigurationRevision` - The revision of this auto scaling configuration. +* `latest` - Whether the auto scaling configuration has the highest `autoScalingConfigurationRevision` among all configurations that share the same `autoScalingConfigurationName`. +* `status` - Current state of the auto scaling configuration. An INACTIVE configuration revision has been deleted and can't be used. It is permanently removed some time after deletion. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner AutoScaling Configuration Versions using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Runner AutoScaling Configuration Versions using the `arn`. For example: + +```console +% terraform import aws_apprunner_auto_scaling_configuration_version.example "arn:aws:apprunner:us-east-1:1234567890:autoscalingconfiguration/example/1/69bdfe0115224b0db49398b7beb68e0f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apprunner_connection.html.markdown b/website/docs/cdktf/typescript/r/apprunner_connection.html.markdown new file mode 100644 index 00000000000..dbaf5726323 --- /dev/null +++ b/website/docs/cdktf/typescript/r/apprunner_connection.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_connection" +description: |- + Manages an App Runner Connection. +--- + + + +# Resource: aws_apprunner_connection + +Manages an App Runner Connection. + +~> **NOTE:** After creation, you must complete the authentication handshake using the App Runner console. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApprunnerConnection } from "./.gen/providers/aws/apprunner-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApprunnerConnection(this, "example", { + connectionName: "example", + providerType: "GITHUB", + tags: { + Name: "example-apprunner-connection", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments supported: + +* `connectionName` - (Required) Name of the connection. +* `providerType` - (Required) Source repository provider. Valid values: `github`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the connection. +* `status` - Current state of the App Runner connection. When the state is `available`, you can use the connection to create an [`awsApprunnerService` resource](apprunner_service.html). +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner Connections using the `connectionName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Runner Connections using the `connectionName`. For example: + +```console +% terraform import aws_apprunner_connection.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apprunner_custom_domain_association.html.markdown b/website/docs/cdktf/typescript/r/apprunner_custom_domain_association.html.markdown new file mode 100644 index 00000000000..204ea21e7f1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/apprunner_custom_domain_association.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_custom_domain_association" +description: |- + Manages an App Runner Custom Domain association. +--- + + + +# Resource: aws_apprunner_custom_domain_association + +Manages an App Runner Custom Domain association. + +~> **NOTE:** After creation, you must use the information in the `certificationValidationRecords` attribute to add CNAME records to your Domain Name System (DNS). For each mapped domain name, add a mapping to the target App Runner subdomain (found in the `dnsTarget` attribute) and one or more certificate validation records. App Runner then performs DNS validation to verify that you own or control the domain name you associated. App Runner tracks domain validity in a certificate stored in AWS Certificate Manager (ACM). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApprunnerCustomDomainAssociation } from "./.gen/providers/aws/apprunner-custom-domain-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApprunnerCustomDomainAssociation(this, "example", { + domainName: "example.com", + serviceArn: Token.asString(awsApprunnerServiceExample.arn), + }); + } +} + +``` + +## Argument Reference + +The following arguments supported: + +* `domainName` - (Required) Custom domain endpoint to association. Specify a base domain e.g., `exampleCom` or a subdomain e.g., `subdomainExampleCom`. +* `enableWwwSubdomain` (Optional) Whether to associate the subdomain with the App Runner service in addition to the base domain. Defaults to `true`. +* `serviceArn` - (Required) ARN of the App Runner service. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `domainName` and `serviceArn` separated by a comma (`,`). +* `certificateValidationRecords` - A set of certificate CNAME records used for this domain name. See [Certificate Validation Records](#certificate-validation-records) below for more details. +* `dnsTarget` - App Runner subdomain of the App Runner service. The custom domain name is mapped to this target name. Attribute only available if resource created (not imported) with Terraform. + +### Certificate Validation Records + +The configuration block consists of the following arguments: + +* `name` - Certificate CNAME record name. +* `status` - Current state of the certificate CNAME record validation. It should change to `success` after App Runner completes validation with your DNS. +* `type` - Record type, always `cname`. +* `value` - Certificate CNAME record value. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner Custom Domain Associations using the `domainName` and `serviceArn` separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Runner Custom Domain Associations using the `domainName` and `serviceArn` separated by a comma (`,`). For example: + +```console +% terraform import aws_apprunner_custom_domain_association.example example.com,arn:aws:apprunner:us-east-1:123456789012:service/example-app/8fe1e10304f84fd2b0df550fe98a71fa +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apprunner_observability_configuration.html.markdown b/website/docs/cdktf/typescript/r/apprunner_observability_configuration.html.markdown new file mode 100644 index 00000000000..c0960f28dc9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/apprunner_observability_configuration.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_observability_configuration" +description: |- + Manages an App Runner Observability Configuration. +--- + + + +# Resource: aws_apprunner_observability_configuration + +Manages an App Runner Observability Configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApprunnerObservabilityConfiguration } from "./.gen/providers/aws/apprunner-observability-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApprunnerObservabilityConfiguration(this, "example", { + observabilityConfigurationName: "example", + tags: { + Name: "example-apprunner-observability-configuration", + }, + traceConfiguration: { + vendor: "AWSXRAY", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments supported: + +* `observabilityConfigurationName` - (Required, Forces new resource) Name of the observability configuration. +* `traceConfiguration` - (Optional) Configuration of the tracing feature within this observability configuration. If you don't specify it, App Runner doesn't enable tracing. See [Trace Configuration](#trace-configuration) below for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Trace Configuration + +The `traceConfiguration` block supports the following argument: + +* `vendor` - (Required) Implementation provider chosen for tracing App Runner services. Valid values: `awsxray`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of this observability configuration. +* `observabilityConfigurationRevision` - The revision of this observability configuration. +* `latest` - Whether the observability configuration has the highest `observabilityConfigurationRevision` among all configurations that share the same `observabilityConfigurationName`. +* `status` - Current state of the observability configuration. An INACTIVE configuration revision has been deleted and can't be used. It is permanently removed some time after deletion. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner Observability Configuration using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Runner Observability Configuration using the `arn`. For example: + +```console +% terraform import aws_apprunner_observability_configuration.example arn:aws:apprunner:us-east-1:1234567890:observabilityconfiguration/example/1/d75bc7ea55b71e724fe5c23452fe22a1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apprunner_service.html.markdown b/website/docs/cdktf/typescript/r/apprunner_service.html.markdown new file mode 100644 index 00000000000..c523939658b --- /dev/null +++ b/website/docs/cdktf/typescript/r/apprunner_service.html.markdown @@ -0,0 +1,330 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_service" +description: |- + Manages an App Runner Service. +--- + + + +# Resource: aws_apprunner_service + +Manages an App Runner Service. + +## Example Usage + +### Service with a Code Repository Source + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApprunnerService } from "./.gen/providers/aws/apprunner-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApprunnerService(this, "example", { + networkConfiguration: { + egressConfiguration: { + egressType: "VPC", + vpcConnectorArn: connector.arn, + }, + }, + serviceName: "example", + sourceConfiguration: { + authenticationConfiguration: { + connectionArn: Token.asString(awsApprunnerConnectionExample.arn), + }, + codeRepository: { + codeConfiguration: { + codeConfigurationValues: { + buildCommand: "python setup.py develop", + port: "8000", + runtime: "PYTHON_3", + startCommand: "python runapp.py", + }, + configurationSource: "API", + }, + repositoryUrl: "https://github.com/example/my-example-python-app", + sourceCodeVersion: { + type: "BRANCH", + value: "main", + }, + }, + }, + tags: { + Name: "example-apprunner-service", + }, + }); + } +} + +``` + +### Service with an Image Repository Source + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApprunnerService } from "./.gen/providers/aws/apprunner-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApprunnerService(this, "example", { + serviceName: "example", + sourceConfiguration: { + autoDeploymentsEnabled: false, + imageRepository: { + imageConfiguration: { + port: "8000", + }, + imageIdentifier: + "public.ecr.aws/aws-containers/hello-app-runner:latest", + imageRepositoryType: "ECR_PUBLIC", + }, + }, + tags: { + Name: "example-apprunner-service", + }, + }); + } +} + +``` + +### Service with Observability Configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApprunnerObservabilityConfiguration } from "./.gen/providers/aws/apprunner-observability-configuration"; +import { ApprunnerService } from "./.gen/providers/aws/apprunner-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ApprunnerObservabilityConfiguration(this, "example", { + observabilityConfigurationName: "example", + traceConfiguration: { + vendor: "AWSXRAY", + }, + }); + const awsApprunnerServiceExample = new ApprunnerService(this, "example_1", { + observabilityConfiguration: { + observabilityConfigurationArn: example.arn, + observabilityEnabled: true, + }, + serviceName: "example", + sourceConfiguration: { + autoDeploymentsEnabled: false, + imageRepository: { + imageConfiguration: { + port: "8000", + }, + imageIdentifier: + "public.ecr.aws/aws-containers/hello-app-runner:latest", + imageRepositoryType: "ECR_PUBLIC", + }, + }, + tags: { + Name: "example-apprunner-service", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApprunnerServiceExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `serviceName` - (Forces new resource) Name of the service. +* `sourceConfiguration` - The source to deploy to the App Runner service. Can be a code or an image repository. See [Source Configuration](#source-configuration) below for more details. + +The following arguments are optional: + +* `autoScalingConfigurationArn` - ARN of an App Runner automatic scaling configuration resource that you want to associate with your service. If not provided, App Runner associates the latest revision of a default auto scaling configuration. +* `encryptionConfiguration` - (Forces new resource) An optional custom encryption key that App Runner uses to encrypt the copy of your source repository that it maintains and your service logs. By default, App Runner uses an AWS managed CMK. See [Encryption Configuration](#encryption-configuration) below for more details. +* `healthCheckConfiguration` - (Forces new resource) Settings of the health check that AWS App Runner performs to monitor the health of your service. See [Health Check Configuration](#health-check-configuration) below for more details. +* `instanceConfiguration` - The runtime configuration of instances (scaling units) of the App Runner service. See [Instance Configuration](#instance-configuration) below for more details. +* `networkConfiguration` - Configuration settings related to network traffic of the web application that the App Runner service runs. See [Network Configuration](#network-configuration) below for more details. +* `observabilityConfiguration` - The observability configuration of your service. See [Observability Configuration](#observability-configuration) below for more details. +* `tags` - Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Encryption Configuration + +The `encryptionConfiguration` block supports the following argument: + +* `kmsKey` - (Required) ARN of the KMS key used for encryption. + +### Health Check Configuration + +The `healthCheckConfiguration` block supports the following arguments: + +* `healthyThreshold` - (Optional) Number of consecutive checks that must succeed before App Runner decides that the service is healthy. Defaults to 1. Minimum value of 1. Maximum value of 20. +* `interval` - (Optional) Time interval, in seconds, between health checks. Defaults to 5. Minimum value of 1. Maximum value of 20. +* `path` - (Optional) URL to send requests to for health checks. Defaults to `/`. Minimum length of 0. Maximum length of 51200. +* `protocol` - (Optional) IP protocol that App Runner uses to perform health checks for your service. Valid values: `tcp`, `http`. Defaults to `tcp`. If you set protocol to `http`, App Runner sends health check requests to the HTTP path specified by `path`. +* `timeout` - (Optional) Time, in seconds, to wait for a health check response before deciding it failed. Defaults to 2. Minimum value of 1. Maximum value of 20. +* `unhealthyThreshold` - (Optional) Number of consecutive checks that must fail before App Runner decides that the service is unhealthy. Defaults to 5. Minimum value of 1. Maximum value of 20. + +### Instance Configuration + +The `instanceConfiguration` block supports the following arguments: + +* `cpu` - (Optional) Number of CPU units reserved for each instance of your App Runner service represented as a String. Defaults to `1024`. Valid values: `256|512|1024|2048|4096|(0.25|0.5|1|2|4) vCPU`. +* `instanceRoleArn` - (Optional) ARN of an IAM role that provides permissions to your App Runner service. These are permissions that your code needs when it calls any AWS APIs. +* `memory` - (Optional) Amount of memory, in MB or GB, reserved for each instance of your App Runner service. Defaults to `2048`. Valid values: `512|1024|2048|3072|4096|6144|8192|10240|12288|(0.5|1|2|3|4|6|8|10|12) GB`. + +### Source Configuration + +The `sourceConfiguration` block supports the following arguments: + +~>**Note:** Either `codeRepository` or `imageRepository` must be specified (but not both). + +* `authenticationConfiguration` - (Optional) Describes resources needed to authenticate access to some source repositories. See [Authentication Configuration](#authentication-configuration) below for more details. +* `autoDeploymentsEnabled` - (Optional) Whether continuous integration from the source repository is enabled for the App Runner service. If set to `true`, each repository change (source code commit or new image version) starts a deployment. Defaults to `true`. +* `codeRepository` - (Optional) Description of a source code repository. See [Code Repository](#code-repository) below for more details. +* `imageRepository` - (Optional) Description of a source image repository. See [Image Repository](#image-repository) below for more details. + +### Authentication Configuration + +The `authenticationConfiguration` block supports the following arguments: + +* `accessRoleArn` - (Optional) ARN of the IAM role that grants the App Runner service access to a source repository. Required for ECR image repositories (but not for ECR Public) +* `connectionArn` - (Optional) ARN of the App Runner connection that enables the App Runner service to connect to a source repository. Required for GitHub code repositories. + +### Network Configuration + +The `networkConfiguration` block supports the following arguments: + +* `ingressConfiguration` - (Optional) Network configuration settings for inbound network traffic. See [Ingress Configuration](#ingress-configuration) below for more details. +* `egressConfiguration` - (Optional) Network configuration settings for outbound message traffic. See [Egress Configuration](#egress-configuration) below for more details. +* `egressType` - (Optional) Type of egress configuration.Set to DEFAULT for access to resources hosted on public networks.Set to VPC to associate your service to a custom VPC specified by VpcConnectorArn. +* `vpcConnectorArn` - ARN of the App Runner VPC connector that you want to associate with your App Runner service. Only valid when EgressType = VPC. + +### Ingress Configuration + +The `ingressConfiguration` block supports the following argument: + +* `isPubliclyAccessible` - (Required) Specifies whether your App Runner service is publicly accessible. To make the service publicly accessible set it to True. To make the service privately accessible, from only within an Amazon VPC set it to False. + +### Egress Configuration + +The `egressConfiguration` block supports the following argument: + +* `egressType` - The type of egress configuration. Valid values are: `default` and `vpc`. +* `vpcConnectorArn` - The Amazon Resource Name (ARN) of the App Runner VPC connector that you want to associate with your App Runner service. Only valid when `EgressType = VPC`. + +### Observability Configuration + +The `observabilityConfiguration` block supports the following arguments: + +* `observabilityEnabled` - (Required) When `true`, an observability configuration resource is associated with the service. +* `observabilityConfigurationArn` - (Optional) ARN of the observability configuration that is associated with the service. Specified only when `observabilityEnabled` is `true`. + +### Code Repository + +The `codeRepository` block supports the following arguments: + +* `codeConfiguration` - (Optional) Configuration for building and running the service from a source code repository. See [Code Configuration](#code-configuration) below for more details. +* `repositoryUrl` - (Required) Location of the repository that contains the source code. +* `sourceCodeVersion` - (Required) Version that should be used within the source code repository. See [Source Code Version](#source-code-version) below for more details. + +### Image Repository + +The `imageRepository` block supports the following arguments: + +* `imageConfiguration` - (Optional) Configuration for running the identified image. See [Image Configuration](#image-configuration) below for more details. +* `imageIdentifier` - (Required) Identifier of an image. For an image in Amazon Elastic Container Registry (Amazon ECR), this is an image name. For the + image name format, see Pulling an image in the Amazon ECR User Guide. +* `imageRepositoryType` - (Required) Type of the image repository. This reflects the repository provider and whether the repository is private or public. Valid values: `ecr` , `ecrPublic`. + +### Code Configuration + +The `codeConfiguration` block supports the following arguments: + +* `codeConfigurationValues` - (Optional) Basic configuration for building and running the App Runner service. Use this parameter to quickly launch an App Runner service without providing an apprunner.yaml file in the source code repository (or ignoring the file if it exists). See [Code Configuration Values](#code-configuration-values) below for more details. +* `configurationSource` - (Required) Source of the App Runner configuration. Valid values: `repository`, `api`. Values are interpreted as follows: + * `repository` - App Runner reads configuration values from the apprunner.yaml file in the + source code repository and ignores the CodeConfigurationValues parameter. + * `api` - App Runner uses configuration values provided in the CodeConfigurationValues + parameter and ignores the apprunner.yaml file in the source code repository. + +### Code Configuration Values + +The `codeConfigurationValues` blocks supports the following arguments: + +* `buildCommand` - (Optional) Command App Runner runs to build your application. +* `port` - (Optional) Port that your application listens to in the container. Defaults to `"8080"`. +* `runtime` - (Required) Runtime environment type for building and running an App Runner service. Represents a programming language runtime. Valid values: `python3`, `nodejs12`, `nodejs14`, `nodejs16`, `corretto8`, `corretto11`, `go1`, `dotnet6`, `php81`, `ruby31`. +* `runtimeEnvironmentSecrets` - (Optional) Secrets and parameters available to your service as environment variables. A map of key/value pairs, where the key is the desired name of the Secret in the environment (i.e. it does not have to match the name of the secret in Secrets Manager or SSM Parameter Store), and the value is the ARN of the secret from AWS Secrets Manager or the ARN of the parameter in AWS SSM Parameter Store. +* `runtimeEnvironmentVariables` - (Optional) Environment variables available to your running App Runner service. A map of key/value pairs. Keys with a prefix of `awsapprunner` are reserved for system use and aren't valid. +* `startCommand` - (Optional) Command App Runner runs to start your application. + +### Image Configuration + +The `imageConfiguration` block supports the following arguments: + +* `port` - (Optional) Port that your application listens to in the container. Defaults to `"8080"`. +* `runtimeEnvironmentSecrets` - (Optional) Secrets and parameters available to your service as environment variables. A map of key/value pairs, where the key is the desired name of the Secret in the environment (i.e. it does not have to match the name of the secret in Secrets Manager or SSM Parameter Store), and the value is the ARN of the secret from AWS Secrets Manager or the ARN of the parameter in AWS SSM Parameter Store. +* `runtimeEnvironmentVariables` - (Optional) Environment variables available to your running App Runner service. A map of key/value pairs. Keys with a prefix of `awsapprunner` are reserved for system use and aren't valid. +* `startCommand` - (Optional) Command App Runner runs to start the application in the source image. If specified, this command overrides the Docker image’s default start command. + +### Source Code Version + +The `sourceCodeVersion` block supports the following arguments: + +* `type` - (Required) Type of version identifier. For a git-based repository, branches represent versions. Valid values: `branch`. +* `value`- (Required) Source code version. For a git-based repository, a branch name maps to a specific version. App Runner uses the most recent commit to the branch. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the App Runner service. +* `serviceId` - An alphanumeric ID that App Runner generated for this service. Unique within the AWS Region. +* `serviceUrl` - Subdomain URL that App Runner generated for this service. You can use this URL to access your service web application. +* `status` - Current state of the App Runner service. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner Services using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Runner Services using the `arn`. For example: + +```console +% terraform import aws_apprunner_service.example arn:aws:apprunner:us-east-1:1234567890:service/example/0a03292a89764e5882c41d8f991c82fe +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apprunner_vpc_connector.html.markdown b/website/docs/cdktf/typescript/r/apprunner_vpc_connector.html.markdown new file mode 100644 index 00000000000..1651a3fefa2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/apprunner_vpc_connector.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_vpc_connector" +description: |- + Manages an App Runner VPC Connector. +--- + + + +# Resource: aws_apprunner_vpc_connector + +Manages an App Runner VPC Connector. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApprunnerVpcConnector } from "./.gen/providers/aws/apprunner-vpc-connector"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApprunnerVpcConnector(this, "connector", { + securityGroups: ["sg1", "sg2"], + subnets: ["subnet1", "subnet2"], + vpcConnectorName: "name", + }); + } +} + +``` + +## Argument Reference + +The following arguments supported: + +* `vpcConnectorName` - (Required) Name for the VPC connector. +* `subnets` (Required) List of IDs of subnets that App Runner should use when it associates your service with a custom Amazon VPC. Specify IDs of subnets of a single Amazon VPC. App Runner determines the Amazon VPC from the subnets you specify. +* `securityGroups` - List of IDs of security groups that App Runner should use for access to AWS resources under the specified subnets. If not specified, App Runner uses the default security group of the Amazon VPC. The default security group allows all outbound traffic. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of VPC connector. +* `status` - Current state of the VPC connector. If the status of a connector revision is INACTIVE, it was deleted and can't be used. Inactive connector revisions are permanently removed some time after they are deleted. +* `vpcConnectorRevision` - The revision of VPC connector. It's unique among all the active connectors ("Status": "ACTIVE") that share the same Name. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner vpc connector using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Runner vpc connector using the `arn`. For example: + +```console +% terraform import aws_apprunner_vpc_connector.example arn:aws:apprunner:us-east-1:1234567890:vpcconnector/example/1/0a03292a89764e5882c41d8f991c82fe +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/apprunner_vpc_ingress_connection.html.markdown b/website/docs/cdktf/typescript/r/apprunner_vpc_ingress_connection.html.markdown new file mode 100644 index 00000000000..c13f2299c68 --- /dev/null +++ b/website/docs/cdktf/typescript/r/apprunner_vpc_ingress_connection.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "App Runner" +layout: "aws" +page_title: "AWS: aws_apprunner_vpc_ingress_connection" +description: |- + Manages an App Runner VPC Ingress Connection. +--- + + + +# Resource: aws_apprunner_vpc_ingress_connection + +Manages an App Runner VPC Ingress Connection. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApprunnerVpcIngressConnection } from "./.gen/providers/aws/apprunner-vpc-ingress-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ApprunnerVpcIngressConnection(this, "example", { + ingressVpcConfiguration: { + vpcEndpointId: apprunner.id, + vpcId: defaultVar.id, + }, + name: "example", + serviceArn: Token.asString(awsApprunnerServiceExample.arn), + tags: { + foo: "bar", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments supported: + +* `name` - (Required) A name for the VPC Ingress Connection resource. It must be unique across all the active VPC Ingress Connections in your AWS account in the AWS Region. +* `serviceArn` - (Required) The Amazon Resource Name (ARN) for this App Runner service that is used to create the VPC Ingress Connection resource. +* `ingressVpcConfiguration` - (Required) Specifications for the customer’s Amazon VPC and the related AWS PrivateLink VPC endpoint that are used to create the VPC Ingress Connection resource. See [Ingress VPC Configuration](#ingress-vpc-configuration) below for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Ingress VPC Configuration + +The `ingressVpcConfiguration` block supports the following argument: + +* `vpcId` - (Required) The ID of the VPC that is used for the VPC endpoint. +* `vpcEndpointId` - (Required) The ID of the VPC endpoint that your App Runner service connects to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the VPC Ingress Connection. +* `domainName` - The domain name associated with the VPC Ingress Connection resource. +* `status` - The current status of the VPC Ingress Connection. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import App Runner VPC Ingress Connection using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import App Runner VPC Ingress Connection using the `arn`. For example: + +```console +% terraform import aws_apprunner_vpc_ingress_connection.example "arn:aws:apprunner:us-west-2:837424938642:vpcingressconnection/example/b379f86381d74825832c2e82080342fa" +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appstream_directory_config.html.markdown b/website/docs/cdktf/typescript/r/appstream_directory_config.html.markdown new file mode 100644 index 00000000000..82c54c124c5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appstream_directory_config.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_directory_config" +description: |- + Provides an AppStream Directory Config +--- + + + +# Resource: aws_appstream_directory_config + +Provides an AppStream Directory Config. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppstreamDirectoryConfig } from "./.gen/providers/aws/appstream-directory-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppstreamDirectoryConfig(this, "example", { + directoryName: "NAME OF DIRECTORY", + organizationalUnitDistinguishedNames: ["DISTINGUISHED NAME"], + serviceAccountCredentials: { + accountName: "NAME OF ACCOUNT", + accountPassword: "PASSWORD OF ACCOUNT", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `directoryName` - (Required) Fully qualified name of the directory. +* `organizationalUnitDistinguishedNames` - (Required) Distinguished names of the organizational units for computer accounts. +* `serviceAccountCredentials` - (Required) Configuration block for the name of the directory and organizational unit (OU) to use to join the directory config to a Microsoft Active Directory domain. See [`serviceAccountCredentials`](#service_account_credentials) below. + +### `serviceAccountCredentials` + +* `accountName` - (Required) User name of the account. This account must have the following privileges: create computer objects, join computers to the domain, and change/reset the password on descendant computer objects for the organizational units specified. +* `accountPassword` - (Required) Password for the account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier (ID) of the appstream directory config. +* `createdTime` - Date and time, in UTC and extended RFC 3339 format, when the directory config was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppstreamDirectoryConfig` using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppstreamDirectoryConfig` using the id. For example: + +```console +% terraform import aws_appstream_directory_config.example directoryNameExample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appstream_fleet.html.markdown b/website/docs/cdktf/typescript/r/appstream_fleet.html.markdown new file mode 100644 index 00000000000..9d0dd2308ca --- /dev/null +++ b/website/docs/cdktf/typescript/r/appstream_fleet.html.markdown @@ -0,0 +1,131 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_fleet" +description: |- + Provides an AppStream fleet +--- + + + +# Resource: aws_appstream_fleet + +Provides an AppStream fleet. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppstreamFleet } from "./.gen/providers/aws/appstream-fleet"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppstreamFleet(this, "test_fleet", { + computeCapacity: { + desiredInstances: 1, + }, + description: "test fleet", + displayName: "test-fleet", + enableDefaultInternetAccess: false, + fleetType: "ON_DEMAND", + idleDisconnectTimeoutInSeconds: 60, + imageName: "Amazon-AppStream2-Sample-Image-02-04-2019", + instanceType: "stream.standard.large", + maxUserDurationInSeconds: 600, + name: "test-fleet", + tags: { + TagName: "tag-value", + }, + vpcConfig: { + subnetIds: ["subnet-06e9b13400c225127"], + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `computeCapacity` - (Required) Configuration block for the desired capacity of the fleet. See below. +* `instanceType` - (Required) Instance type to use when launching fleet instances. +* `name` - (Required) Unique name for the fleet. + +The following arguments are optional: + +* `description` - (Optional) Description to display. +* `disconnectTimeoutInSeconds` - (Optional) Amount of time that a streaming session remains active after users disconnect. +* `displayName` - (Optional) Human-readable friendly name for the AppStream fleet. +* `domainJoinInfo` - (Optional) Configuration block for the name of the directory and organizational unit (OU) to use to join the fleet to a Microsoft Active Directory domain. See below. +* `enableDefaultInternetAccess` - (Optional) Enables or disables default internet access for the fleet. +* `fleetType` - (Optional) Fleet type. Valid values are: `onDemand`, `alwaysOn` +* `iamRoleArn` - (Optional) ARN of the IAM role to apply to the fleet. +* `idleDisconnectTimeoutInSeconds` - (Optional) Amount of time that users can be idle (inactive) before they are disconnected from their streaming session and the `disconnectTimeoutInSeconds` time interval begins. +* `imageName` - (Optional) Name of the image used to create the fleet. +* `imageArn` - (Optional) ARN of the public, private, or shared image to use. +* `streamView` - (Optional) AppStream 2.0 view that is displayed to your users when they stream from the fleet. When `app` is specified, only the windows of applications opened by users display. When `desktop` is specified, the standard desktop that is provided by the operating system displays. If not specified, defaults to `app`. +* `maxUserDurationInSeconds` - (Optional) Maximum amount of time that a streaming session can remain active, in seconds. +* `vpcConfig` - (Optional) Configuration block for the VPC configuration for the image builder. See below. +* `tags` - (Optional) Map of tags to attach to AppStream instances. + +### `computeCapacity` + +* `desiredInstances` - (Required) Desired number of streaming instances. + +### `domainJoinInfo` + +* `directoryName` - (Optional) Fully qualified name of the directory (for example, corp.example.com). +* `organizationalUnitDistinguishedName` - (Optional) Distinguished name of the organizational unit for computer accounts. + +### `vpcConfig` + +* `securityGroupIds` - Identifiers of the security groups for the fleet or image builder. +* `subnetIds` - Identifiers of the subnets to which a network interface is attached from the fleet instance or image builder instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier (ID) of the appstream fleet. +* `arn` - ARN of the appstream fleet. +* `state` - State of the fleet. Can be `starting`, `running`, `stopping` or `stopped` +* `createdTime` - Date and time, in UTC and extended RFC 3339 format, when the fleet was created. +* `computeCapacity` - Describes the capacity status for a fleet. + +### `computeCapacity` + +* `available` - Number of currently available instances that can be used to stream sessions. +* `inUse` - Number of instances in use for streaming. +* `running` - Total number of simultaneous streaming instances that are running. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppstreamFleet` using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppstreamFleet` using the id. For example: + +```console +% terraform import aws_appstream_fleet.example fleetNameExample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appstream_fleet_stack_association.html.markdown b/website/docs/cdktf/typescript/r/appstream_fleet_stack_association.html.markdown new file mode 100644 index 00000000000..ca05ca48958 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appstream_fleet_stack_association.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_fleet_stack_association" +description: |- + Manages an AppStream Fleet Stack association. +--- + + + +# Resource: aws_appstream_fleet_stack_association + +Manages an AppStream Fleet Stack association. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppstreamFleet } from "./.gen/providers/aws/appstream-fleet"; +import { AppstreamFleetStackAssociation } from "./.gen/providers/aws/appstream-fleet-stack-association"; +import { AppstreamStack } from "./.gen/providers/aws/appstream-stack"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AppstreamFleet(this, "example", { + computeCapacity: { + desiredInstances: 1, + }, + imageName: "Amazon-AppStream2-Sample-Image-02-04-2019", + instanceType: "stream.standard.small", + name: "NAME", + }); + const awsAppstreamStackExample = new AppstreamStack(this, "example_1", { + name: "STACK NAME", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppstreamStackExample.overrideLogicalId("example"); + const awsAppstreamFleetStackAssociationExample = + new AppstreamFleetStackAssociation(this, "example_2", { + fleetName: example.name, + stackName: Token.asString(awsAppstreamStackExample.name), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppstreamFleetStackAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `fleetName` - (Required) Name of the fleet. +* `stackName` (Required) Name of the stack. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the appstream stack fleet association, composed of the `fleetName` and `stackName` separated by a slash (`/`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppStream Stack Fleet Association using the `fleetName` and `stackName` separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppStream Stack Fleet Association using the `fleetName` and `stackName` separated by a slash (`/`). For example: + +```console +% terraform import aws_appstream_fleet_stack_association.example fleetName/stackName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appstream_image_builder.html.markdown b/website/docs/cdktf/typescript/r/appstream_image_builder.html.markdown new file mode 100644 index 00000000000..1951c0d02b5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appstream_image_builder.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_image_builder" +description: |- + Provides an AppStream image builder +--- + + + +# Resource: aws_appstream_image_builder + +Provides an AppStream image builder. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppstreamImageBuilder } from "./.gen/providers/aws/appstream-image-builder"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppstreamImageBuilder(this, "test_fleet", { + description: "Description of a ImageBuilder", + displayName: "Display name of a ImageBuilder", + enableDefaultInternetAccess: false, + imageName: "AppStream-WinServer2019-10-05-2022", + instanceType: "stream.standard.large", + name: "Name", + tags: { + Name: "Example Image Builder", + }, + vpcConfig: { + subnetIds: [example.id], + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `instanceType` - (Required) Instance type to use when launching the image builder. +* `name` - (Required) Unique name for the image builder. + +The following arguments are optional: + +* `accessEndpoint` - (Optional) Set of interface VPC endpoint (interface endpoint) objects. Maximum of 4. See below. +* `appstreamAgentVersion` - (Optional) Version of the AppStream 2.0 agent to use for this image builder. +* `description` - (Optional) Description to display. +* `displayName` - (Optional) Human-readable friendly name for the AppStream image builder. +* `domainJoinInfo` - (Optional) Configuration block for the name of the directory and organizational unit (OU) to use to join the image builder to a Microsoft Active Directory domain. See below. +* `enableDefaultInternetAccess` - (Optional) Enables or disables default internet access for the image builder. +* `iamRoleArn` - (Optional) ARN of the IAM role to apply to the image builder. +* `imageArn` - (Optional, Required if `imageName` not provided) ARN of the public, private, or shared image to use. +* `imageName` - (Optional, Required if `imageArn` not provided) Name of the image used to create the image builder. +* `vpcConfig` - (Optional) Configuration block for the VPC configuration for the image builder. See below. +* `tags` - (Optional) Map of tags to assign to the instance. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `accessEndpoint` + +The `accessEndpoint` block supports the following arguments: + +* `endpointType` - (Required) Type of interface endpoint. +* `vpceId` - (Optional) Identifier (ID) of the VPC in which the interface endpoint is used. + +### `domainJoinInfo` + +The `domainJoinInfo` block supports the following arguments: + +* `directoryName` - (Optional) Fully qualified name of the directory (for example, corp.example.com). +* `organizationalUnitDistinguishedName` - (Optional) Distinguished name of the organizational unit for computer accounts. + +### `vpcConfig` + +The `vpcConfig` block supports the following arguments: + +* `securityGroupIds` - (Optional) Identifiers of the security groups for the image builder or image builder. +* `subnetIds` - (Optional) Identifiers of the subnets to which a network interface is attached from the image builder instance or image builder instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the appstream image builder. +* `createdTime` - Date and time, in UTC and extended RFC 3339 format, when the image builder was created. +* `id` - Name of the image builder. +* `state` - State of the image builder. Can be: `pending`, `updatingAgent`, `running`, `stopping`, `stopped`, `rebooting`, `snapshotting`, `deleting`, `failed`, `updating`, `pendingQualification` +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppstreamImageBuilder` using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppstreamImageBuilder` using the `name`. For example: + +```console +% terraform import aws_appstream_image_builder.example imageBuilderExample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appstream_stack.html.markdown b/website/docs/cdktf/typescript/r/appstream_stack.html.markdown new file mode 100644 index 00000000000..8fafe055865 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appstream_stack.html.markdown @@ -0,0 +1,170 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_stack" +description: |- + Provides an AppStream stack +--- + + + +# Resource: aws_appstream_stack + +Provides an AppStream stack. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppstreamStack } from "./.gen/providers/aws/appstream-stack"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppstreamStack(this, "example", { + applicationSettings: { + enabled: true, + settingsGroup: "SettingsGroup", + }, + description: "stack description", + displayName: "stack display name", + feedbackUrl: "http://your-domain/feedback", + name: "stack name", + redirectUrl: "http://your-domain/redirect", + storageConnectors: [ + { + connectorType: "HOMEFOLDERS", + }, + ], + tags: { + TagName: "TagValue", + }, + userSettings: [ + { + action: "CLIPBOARD_COPY_FROM_LOCAL_DEVICE", + permission: "ENABLED", + }, + { + action: "CLIPBOARD_COPY_TO_LOCAL_DEVICE", + permission: "ENABLED", + }, + { + action: "DOMAIN_PASSWORD_SIGNIN", + permission: "ENABLED", + }, + { + action: "DOMAIN_SMART_CARD_SIGNIN", + permission: "DISABLED", + }, + { + action: "FILE_DOWNLOAD", + permission: "ENABLED", + }, + { + action: "FILE_UPLOAD", + permission: "ENABLED", + }, + { + action: "PRINTING_TO_LOCAL_DEVICE", + permission: "ENABLED", + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Unique name for the AppStream stack. + +The following arguments are optional: + +* `accessEndpoints` - (Optional) Set of configuration blocks defining the interface VPC endpoints. Users of the stack can connect to AppStream 2.0 only through the specified endpoints. + See [`accessEndpoints`](#access_endpoints) below. +* `applicationSettings` - (Optional) Settings for application settings persistence. + See [`applicationSettings`](#application_settings) below. +* `description` - (Optional) Description for the AppStream stack. +* `displayName` - (Optional) Stack name to display. +* `embedHostDomains` - (Optional) Domains where AppStream 2.0 streaming sessions can be embedded in an iframe. You must approve the domains that you want to host embedded AppStream 2.0 streaming sessions. +* `feedbackUrl` - (Optional) URL that users are redirected to after they click the Send Feedback link. If no URL is specified, no Send Feedback link is displayed. . +* `redirectUrl` - (Optional) URL that users are redirected to after their streaming session ends. +* `storageConnectors` - (Optional) Configuration block for the storage connectors to enable. + See [`storageConnectors`](#storage_connectors) below. +* `userSettings` - (Optional) Configuration block for the actions that are enabled or disabled for users during their streaming sessions. If not provided, these settings are configured automatically by AWS. If provided, the terraform configuration should include a block for each configurable action. + See [`userSettings`](#user_settings) below. +* `streamingExperienceSettings` - (Optional) The streaming protocol you want your stack to prefer. This can be UDP or TCP. Currently, UDP is only supported in the Windows native client. + See [`streamingExperienceSettings`](#streaming_experience_settings) below. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `accessEndpoints` + +* `endpointType` - (Required) Type of the interface endpoint. + See the [`accessEndpoint` AWS API documentation](https://docs.aws.amazon.com/appstream2/latest/APIReference/API_AccessEndpoint.html) for valid values. +* `vpceId` - (Optional) ID of the VPC in which the interface endpoint is used. + +### `applicationSettings` + +* `enabled` - (Required) Whether application settings should be persisted. +* `settingsGroup` - (Optional) Name of the settings group. + Required when `enabled` is `true`. + Can be up to 100 characters. + +### `storageConnectors` + +* `connectorType` - (Required) Type of storage connector. + Valid values are `homefolders`, `googleDrive`, or `oneDrive`. +* `domains` - (Optional) Names of the domains for the account. +* `resourceIdentifier` - (Optional) ARN of the storage connector. + +### `userSettings` + +* `action` - (Required) Action that is enabled or disabled. + Valid values are `clipboardCopyFromLocalDevice`, `clipboardCopyToLocalDevice`, `fileUpload`, `fileDownload`, `printingToLocalDevice`, `domainPasswordSignin`, or `domainSmartCardSignin`. +* `permission` - (Required) Whether the action is enabled or disabled. + Valid values are `enabled` or `disabled`. + +### `streamingExperienceSettings` + +* `preferredProtocol` - (Optional) The preferred protocol that you want to use while streaming your application. + Valid values are `tcp` and `udp`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the appstream stack. +* `createdTime` - Date and time, in UTC and extended RFC 3339 format, when the stack was created. +* `id` - Unique ID of the appstream stack. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppstreamStack` using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppstreamStack` using the id. For example: + +```console +% terraform import aws_appstream_stack.example stackID +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appstream_user.html.markdown b/website/docs/cdktf/typescript/r/appstream_user.html.markdown new file mode 100644 index 00000000000..fd5383305b2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appstream_user.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_user" +description: |- + Provides an AppStream user +--- + + + +# Resource: aws_appstream_user + +Provides an AppStream user. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppstreamUser } from "./.gen/providers/aws/appstream-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppstreamUser(this, "example", { + authenticationType: "USERPOOL", + firstName: "FIRST NAME", + lastName: "LAST NAME", + userName: "EMAIL", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `authenticationType` - (Required) Authentication type for the user. You must specify USERPOOL. Valid values: `api`, `saml`, `userpool` +* `userName` - (Required) Email address of the user. + +The following arguments are optional: + +* `enabled` - (Optional) Whether the user in the user pool is enabled. +* `firstName` - (Optional) First name, or given name, of the user. +* `lastName` - (Optional) Last name, or surname, of the user. +* `sendEmailNotification` - (Optional) Send an email notification. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the appstream user. +* `createdTime` - Date and time, in UTC and extended RFC 3339 format, when the user was created. +* `id` - Unique ID of the appstream user. +* `status` - Status of the user in the user pool. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppstreamUser` using the `userName` and `authenticationType` separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppstreamUser` using the `userName` and `authenticationType` separated by a slash (`/`). For example: + +```console +% terraform import aws_appstream_user.example UserName/AuthenticationType +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appstream_user_stack_association.html.markdown b/website/docs/cdktf/typescript/r/appstream_user_stack_association.html.markdown new file mode 100644 index 00000000000..d891086620e --- /dev/null +++ b/website/docs/cdktf/typescript/r/appstream_user_stack_association.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "AppStream 2.0" +layout: "aws" +page_title: "AWS: aws_appstream_user_stack_association" +description: |- + Manages an AppStream User Stack association. +--- + + + +# Resource: aws_appstream_user_stack_association + +Manages an AppStream User Stack association. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppstreamStack } from "./.gen/providers/aws/appstream-stack"; +import { AppstreamUser } from "./.gen/providers/aws/appstream-user"; +import { AppstreamUserStackAssociation } from "./.gen/providers/aws/appstream-user-stack-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new AppstreamStack(this, "test", { + name: "STACK NAME", + }); + const awsAppstreamUserTest = new AppstreamUser(this, "test_1", { + authenticationType: "USERPOOL", + userName: "EMAIL", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppstreamUserTest.overrideLogicalId("test"); + const awsAppstreamUserStackAssociationTest = + new AppstreamUserStackAssociation(this, "test_2", { + authenticationType: Token.asString( + awsAppstreamUserTest.authenticationType + ), + stackName: test.name, + userName: Token.asString(awsAppstreamUserTest.userName), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppstreamUserStackAssociationTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `authenticationType` - (Required) Authentication type for the user. +* `stackName` (Required) Name of the stack that is associated with the user. +* `userName` (Required) Email address of the user who is associated with the stack. + +The following arguments are optional: + +* `sendEmailNotification` - (Optional) Whether a welcome email is sent to a user after the user is created in the user pool. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the appstream User Stack association. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppStream User Stack Association using the `userName`, `authenticationType`, and `stackName`, separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppStream User Stack Association using the `userName`, `authenticationType`, and `stackName`, separated by a slash (`/`). For example: + +```console +% terraform import aws_appstream_user_stack_association.example userName/auhtenticationType/stackName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appsync_api_cache.html.markdown b/website/docs/cdktf/typescript/r/appsync_api_cache.html.markdown new file mode 100644 index 00000000000..637c5bad802 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appsync_api_cache.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_api_cache" +description: |- + Provides an AppSync API Cache. +--- + + + +# Resource: aws_appsync_api_cache + +Provides an AppSync API Cache. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncApiCache } from "./.gen/providers/aws/appsync-api-cache"; +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AppsyncGraphqlApi(this, "example", { + authenticationType: "API_KEY", + name: "example", + }); + const awsAppsyncApiCacheExample = new AppsyncApiCache(this, "example_1", { + apiCachingBehavior: "FULL_REQUEST_CACHING", + apiId: example.id, + ttl: 900, + type: "LARGE", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppsyncApiCacheExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) GraphQL API ID. +* `apiCachingBehavior` - (Required) Caching behavior. Valid values are `fullRequestCaching` and `perResolverCaching`. +* `type` - (Required) Cache instance type. Valid values are `small`, `medium`, `large`, `xlarge`, `large2X`, `large4X`, `large8X`, `large12X`, `t2Small`, `t2Medium`, `r4Large`, `r4Xlarge`, `r42Xlarge`, `r44Xlarge`, `r48Xlarge`. +* `ttl` - (Required) TTL in seconds for cache entries. +* `atRestEncryptionEnabled` - (Optional) At-rest encryption flag for cache. You cannot update this setting after creation. +* `transitEncryptionEnabled` - (Optional) Transit encryption flag when connecting to cache. You cannot update this setting after creation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AppSync API ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppsyncApiCache` using the AppSync API ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppsyncApiCache` using the AppSync API ID. For example: + +```console +% terraform import aws_appsync_api_cache.example xxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appsync_api_key.html.markdown b/website/docs/cdktf/typescript/r/appsync_api_key.html.markdown new file mode 100644 index 00000000000..406fb85a4a1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appsync_api_key.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_api_key" +description: |- + Provides an AppSync API Key. +--- + + + +# Resource: aws_appsync_api_key + +Provides an AppSync API Key. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncApiKey } from "./.gen/providers/aws/appsync-api-key"; +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AppsyncGraphqlApi(this, "example", { + authenticationType: "API_KEY", + name: "example", + }); + const awsAppsyncApiKeyExample = new AppsyncApiKey(this, "example_1", { + apiId: example.id, + expires: "2018-05-03T04:00:00Z", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppsyncApiKeyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) ID of the associated AppSync API +* `description` - (Optional) API key description. Defaults to "Managed by Terraform". +* `expires` - (Optional) RFC3339 string representation of the expiry date. Rounded down to nearest hour. By default, it is 7 days from the date of creation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - API Key ID (Formatted as ApiId:Key) +* `key` - API key + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppsyncApiKey` using the AppSync API ID and key separated by `:`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppsyncApiKey` using the AppSync API ID and key separated by `:`. For example: + +```console +% terraform import aws_appsync_api_key.example xxxxx:yyyyy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appsync_datasource.html.markdown b/website/docs/cdktf/typescript/r/appsync_datasource.html.markdown new file mode 100644 index 00000000000..47d1c1ba248 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appsync_datasource.html.markdown @@ -0,0 +1,239 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_datasource" +description: |- + Provides an AppSync Data Source. +--- + + + +# Resource: aws_appsync_datasource + +Provides an AppSync Data Source. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncDatasource } from "./.gen/providers/aws/appsync-datasource"; +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DynamodbTable } from "./.gen/providers/aws/dynamodb-table"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AppsyncGraphqlApi(this, "example", { + authenticationType: "API_KEY", + name: "tf_appsync_example", + }); + const awsDynamodbTableExample = new DynamodbTable(this, "example_1", { + attribute: [ + { + name: "UserId", + type: "S", + }, + ], + hashKey: "UserId", + name: "example", + readCapacity: 1, + writeCapacity: 1, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDynamodbTableExample.overrideLogicalId("example"); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["appsync.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_3", + { + statement: [ + { + actions: ["dynamodb:*"], + effect: "Allow", + resources: [Token.asString(awsDynamodbTableExample.arn)], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsIamRoleExample = new IamRole(this, "example_4", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsIamRolePolicyExample = new IamRolePolicy(this, "example_5", { + name: "example", + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + role: Token.asString(awsIamRoleExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyExample.overrideLogicalId("example"); + const awsAppsyncDatasourceExample = new AppsyncDatasource( + this, + "example_6", + { + apiId: example.id, + dynamodbConfig: { + tableName: Token.asString(awsDynamodbTableExample.name), + }, + name: "tf_appsync_example", + serviceRoleArn: Token.asString(awsIamRoleExample.arn), + type: "AMAZON_DYNAMODB", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppsyncDatasourceExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) API ID for the GraphQL API for the data source. +* `name` - (Required) User-supplied name for the data source. +* `type` - (Required) Type of the Data Source. Valid values: `awsLambda`, `amazonDynamodb`, `amazonElasticsearch`, `http`, `none`, `relationalDatabase`, `amazonEventbridge`, `amazonOpensearchService`. +* `description` - (Optional) Description of the data source. +* `dynamodbConfig` - (Optional) DynamoDB settings. See [DynamoDB Config](#dynamodb-config) +* `elasticsearchConfig` - (Optional) Amazon Elasticsearch settings. See [ElasticSearch Config](#elasticsearch-config) +* `eventBridgeConfig` - (Optional) AWS EventBridge settings. See [Event Bridge Config](#event-bridge-config) +* `httpConfig` - (Optional) HTTP settings. See [HTTP Config](#http-config) +* `lambdaConfig` - (Optional) AWS Lambda settings. See [Lambda Config](#lambda-config) +* `opensearchserviceConfig` - (Optional) Amazon OpenSearch Service settings. See [OpenSearch Service Config](#opensearch-service-config) +* `relationalDatabaseConfig` (Optional) AWS RDS settings. See [Relational Database Config](#relational-database-config) +* `serviceRoleArn` - (Optional) IAM service role ARN for the data source. + +### DynamoDB Config + +This argument supports the following arguments: + +* `tableName` - (Required) Name of the DynamoDB table. +* `region` - (Optional) AWS region of the DynamoDB table. Defaults to current region. +* `useCallerCredentials` - (Optional) Set to `true` to use Amazon Cognito credentials with this data source. +* `deltaSyncConfig` - (Optional) The DeltaSyncConfig for a versioned data source. See [Delta Sync Config](#delta-sync-config) +* `versioned` - (Optional) Detects Conflict Detection and Resolution with this data source. + +### Delta Sync Config + +* `baseTableTtl` - (Optional) The number of minutes that an Item is stored in the data source. +* `deltaSyncTableName` - (Required) The table name. +* `deltaSyncTableTtl` - (Optional) The number of minutes that a Delta Sync log entry is stored in the Delta Sync table. + +### ElasticSearch Config + +This argument supports the following arguments: + +* `endpoint` - (Required) HTTP endpoint of the Elasticsearch domain. +* `region` - (Optional) AWS region of Elasticsearch domain. Defaults to current region. + +### Event Bridge Config + +This argument supports the following arguments: + +* `eventBusArn` - (Required) ARN for the EventBridge bus. + +### HTTP Config + +This argument supports the following arguments: + +* `endpoint` - (Required) HTTP URL. +* `authorizationConfig` - (Optional) Authorization configuration in case the HTTP endpoint requires authorization. See [Authorization Config](#authorization-config). + +#### Authorization Config + +This argument supports the following arguments: + +* `authorizationType` - (Optional) Authorization type that the HTTP endpoint requires. Default values is `awsIam`. +* `awsIamConfig` - (Optional) Identity and Access Management (IAM) settings. See [AWS IAM Config](#aws-iam-config). + +##### AWS IAM Config + +This argument supports the following arguments: + +* `signingRegion` - (Optional) Signing Amazon Web Services Region for IAM authorization. +* `signingServiceName`- (Optional) Signing service name for IAM authorization. + +### Lambda Config + +This argument supports the following arguments: + +* `functionArn` - (Required) ARN for the Lambda function. + +### OpenSearch Service Config + +This argument supports the following arguments: + +* `endpoint` - (Required) HTTP endpoint of the OpenSearch domain. +* `region` - (Optional) AWS region of the OpenSearch domain. Defaults to current region. + +### Relational Database Config + +This argument supports the following arguments: + +* `httpEndpointConfig` - (Required) Amazon RDS HTTP endpoint configuration. See [HTTP Endpoint Config](#http-endpoint-config). +* `sourceType` - (Optional) Source type for the relational database. Valid values: `rdsHttpEndpoint`. + +#### HTTP Endpoint Config + +This argument supports the following arguments: + +* `dbClusterIdentifier` - (Required) Amazon RDS cluster identifier. +* `awsSecretStoreArn` - (Required) AWS secret store ARN for database credentials. +* `databaseName` - (Optional) Logical database name. +* `region` - (Optional) AWS Region for RDS HTTP endpoint. Defaults to current region. +* `schema` - (Optional) Logical schema name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppsyncDatasource` using the `apiId`, a hyphen, and `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppsyncDatasource` using the `apiId`, a hyphen, and `name`. For example: + +```console +% terraform import aws_appsync_datasource.example abcdef123456-example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appsync_domain_name.html.markdown b/website/docs/cdktf/typescript/r/appsync_domain_name.html.markdown new file mode 100644 index 00000000000..5d009ad497e --- /dev/null +++ b/website/docs/cdktf/typescript/r/appsync_domain_name.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_domain_name" +description: |- + Provides an AppSync Domain Name. +--- + + + +# Resource: aws_appsync_domain_name + +Provides an AppSync Domain Name. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncDomainName } from "./.gen/providers/aws/appsync-domain-name"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppsyncDomainName(this, "example", { + certificateArn: Token.asString(awsAcmCertificateExample.arn), + domainName: "api.example.com", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificateArn` - (Required) ARN of the certificate. This can be an Certificate Manager (ACM) certificate or an Identity and Access Management (IAM) server certificate. The certifiacte must reside in us-east-1. +* `description` - (Optional) A description of the Domain Name. +* `domainName` - (Required) Domain name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Appsync Domain Name. +* `appsyncDomainName` - Domain name that AppSync provides. +* `hostedZoneId` - ID of your Amazon Route 53 hosted zone. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppsyncDomainName` using the AppSync domain name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppsyncDomainName` using the AppSync domain name. For example: + +```console +% terraform import aws_appsync_domain_name.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appsync_domain_name_api_association.html.markdown b/website/docs/cdktf/typescript/r/appsync_domain_name_api_association.html.markdown new file mode 100644 index 00000000000..e461514ff1f --- /dev/null +++ b/website/docs/cdktf/typescript/r/appsync_domain_name_api_association.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_domain_name_api_association" +description: |- + Provides an AppSync API Association. +--- + + + +# Resource: aws_appsync_domain_name_api_association + +Provides an AppSync API Association. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncDomainNameApiAssociation } from "./.gen/providers/aws/appsync-domain-name-api-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppsyncDomainNameApiAssociation(this, "example", { + apiId: Token.asString(awsAppsyncGraphqlApiExample.id), + domainName: Token.asString(awsAppsyncDomainNameExample.domainName), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) API ID. +* `domainName` - (Required) Appsync domain name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Appsync domain name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppsyncDomainNameApiAssociation` using the AppSync domain name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppsyncDomainNameApiAssociation` using the AppSync domain name. For example: + +```console +% terraform import aws_appsync_domain_name_api_association.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appsync_function.html.markdown b/website/docs/cdktf/typescript/r/appsync_function.html.markdown new file mode 100644 index 00000000000..3bd57a1ef71 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appsync_function.html.markdown @@ -0,0 +1,163 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_function" +description: |- + Provides an AppSync Function. +--- + + + +# Resource: aws_appsync_function + +Provides an AppSync Function. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncDatasource } from "./.gen/providers/aws/appsync-datasource"; +import { AppsyncFunction } from "./.gen/providers/aws/appsync-function"; +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AppsyncGraphqlApi(this, "example", { + authenticationType: "API_KEY", + name: "example", + schema: + "type Mutation {\n putPost(id: ID!, title: String!): Post\n}\n\ntype Post {\n id: ID!\n title: String!\n}\n\ntype Query {\n singlePost(id: ID!): Post\n}\n\nschema {\n query: Query\n mutation: Mutation\n}\n\n", + }); + const awsAppsyncDatasourceExample = new AppsyncDatasource( + this, + "example_1", + { + apiId: example.id, + httpConfig: { + endpoint: "http://example.com", + }, + name: "example", + type: "HTTP", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppsyncDatasourceExample.overrideLogicalId("example"); + const awsAppsyncFunctionExample = new AppsyncFunction(this, "example_2", { + apiId: example.id, + dataSource: Token.asString(awsAppsyncDatasourceExample.name), + name: "example", + requestMappingTemplate: + '{\n "version": "2018-05-29",\n "method": "GET",\n "resourcePath": "/",\n "params":{\n "headers": $utils.http.copyheaders($ctx.request.headers)\n }\n}\n\n', + responseMappingTemplate: + "#if($ctx.result.statusCode == 200)\n $ctx.result.body\n#else\n $utils.appendError($ctx.result.body, $ctx.result.statusCode)\n#end\n\n", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppsyncFunctionExample.overrideLogicalId("example"); + } +} + +``` + +## Example Usage With Code + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncFunction } from "./.gen/providers/aws/appsync-function"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppsyncFunction(this, "example", { + apiId: Token.asString(awsAppsyncGraphqlApiExample.id), + code: Token.asString(Fn.file("some-code-dir")), + dataSource: Token.asString(awsAppsyncDatasourceExample.name), + name: "example", + runtime: { + name: "APPSYNC_JS", + runtimeVersion: "1.0.0", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) ID of the associated AppSync API. +* `code` - (Optional) The function code that contains the request and response functions. When code is used, the runtime is required. The runtime value must be APPSYNC_JS. +* `dataSource` - (Required) Function data source name. +* `maxBatchSize` - (Optional) Maximum batching size for a resolver. Valid values are between `0` and `2000`. +* `name` - (Required) Function name. The function name does not have to be unique. +* `requestMappingTemplate` - (Optional) Function request mapping template. Functions support only the 2018-05-29 version of the request mapping template. +* `responseMappingTemplate` - (Optional) Function response mapping template. +* `description` - (Optional) Function description. +* `runtime` - (Optional) Describes a runtime used by an AWS AppSync pipeline resolver or AWS AppSync function. Specifies the name and version of the runtime to use. Note that if a runtime is specified, code must also be specified. See [Runtime](#runtime). +* `syncConfig` - (Optional) Describes a Sync configuration for a resolver. See [Sync Config](#sync-config). +* `functionVersion` - (Optional) Version of the request mapping template. Currently the supported value is `20180529`. Does not apply when specifying `code`. + +### Runtime + +This argument supports the following arguments: + +* `name` - (Optional) The name of the runtime to use. Currently, the only allowed value is `appsyncJs`. +* `runtimeVersion` - (Optional) The version of the runtime to use. Currently, the only allowed version is `100`. + +### Sync Config + +This argument supports the following arguments: + +* `conflictDetection` - (Optional) Conflict Detection strategy to use. Valid values are `none` and `version`. +* `conflictHandler` - (Optional) Conflict Resolution strategy to perform in the event of a conflict. Valid values are `none`, `optimisticConcurrency`, `automerge`, and `lambda`. +* `lambdaConflictHandlerConfig` - (Optional) Lambda Conflict Handler Config when configuring `lambda` as the Conflict Handler. See [Lambda Conflict Handler Config](#lambda-conflict-handler-config). + +#### Lambda Conflict Handler Config + +This argument supports the following arguments: + +* `lambdaConflictHandlerArn` - (Optional) ARN for the Lambda function to use as the Conflict Handler. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - API Function ID (Formatted as ApiId-FunctionId) +* `arn` - ARN of the Function object. +* `functionId` - Unique ID representing the Function object. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppsyncFunction` using the AppSync API ID and Function ID separated by `-`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppsyncFunction` using the AppSync API ID and Function ID separated by `-`. For example: + +```console +% terraform import aws_appsync_function.example xxxxx-yyyyy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appsync_graphql_api.html.markdown b/website/docs/cdktf/typescript/r/appsync_graphql_api.html.markdown new file mode 100644 index 00000000000..3f13bf3eba0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appsync_graphql_api.html.markdown @@ -0,0 +1,432 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_graphql_api" +description: |- + Provides an AppSync GraphQL API. +--- + + + +# Resource: aws_appsync_graphql_api + +Provides an AppSync GraphQL API. + +## Example Usage + +### API Key Authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppsyncGraphqlApi(this, "example", { + authenticationType: "API_KEY", + name: "example", + }); + } +} + +``` + +### AWS IAM Authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppsyncGraphqlApi(this, "example", { + authenticationType: "AWS_IAM", + name: "example", + }); + } +} + +``` + +### AWS Cognito User Pool Authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppsyncGraphqlApi(this, "example", { + authenticationType: "AMAZON_COGNITO_USER_POOLS", + name: "example", + userPoolConfig: { + awsRegion: Token.asString(current.name), + defaultAction: "DENY", + userPoolId: Token.asString(awsCognitoUserPoolExample.id), + }, + }); + } +} + +``` + +### OpenID Connect Authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppsyncGraphqlApi(this, "example", { + authenticationType: "OPENID_CONNECT", + name: "example", + openidConnectConfig: { + issuer: "https://example.com", + }, + }); + } +} + +``` + +### AWS Lambda Authorizer Authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +import { LambdaPermission } from "./.gen/providers/aws/lambda-permission"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AppsyncGraphqlApi(this, "example", { + authenticationType: "AWS_LAMBDA", + lambdaAuthorizerConfig: { + authorizerUri: + "arn:aws:lambda:us-east-1:123456789012:function:custom_lambda_authorizer", + }, + name: "example", + }); + new LambdaPermission(this, "appsync_lambda_authorizer", { + action: "lambda:InvokeFunction", + functionName: "custom_lambda_authorizer", + principal: "appsync.amazonaws.com", + sourceArn: example.arn, + statementId: "appsync_lambda_authorizer", + }); + } +} + +``` + +### With Multiple Authentication Providers + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppsyncGraphqlApi(this, "example", { + additionalAuthenticationProvider: [ + { + authenticationType: "AWS_IAM", + }, + ], + authenticationType: "API_KEY", + name: "example", + }); + } +} + +``` + +### With Schema + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppsyncGraphqlApi(this, "example", { + authenticationType: "AWS_IAM", + name: "example", + schema: "schema {\n\tquery: Query\n}\ntype Query {\n test: Int\n}\n\n", + }); + } +} + +``` + +### Enabling Logging + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +interface MyConfig { + authenticationType: any; + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["appsync.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const example = new IamRole(this, "example", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "example", + }); + const awsIamRolePolicyAttachmentExample = new IamRolePolicyAttachment( + this, + "example_2", + { + policyArn: + "arn:aws:iam::aws:policy/service-role/AWSAppSyncPushToCloudWatchLogs", + role: example.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentExample.overrideLogicalId("example"); + const awsAppsyncGraphqlApiExample = new AppsyncGraphqlApi( + this, + "example_3", + { + logConfig: { + cloudwatchLogsRoleArn: example.arn, + fieldLogLevel: "ERROR", + }, + authenticationType: config.authenticationType, + name: config.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppsyncGraphqlApiExample.overrideLogicalId("example"); + } +} + +``` + +### Associate Web ACL (v2) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +import { Wafv2WebAcl } from "./.gen/providers/aws/wafv2-web-acl"; +import { Wafv2WebAclAssociation } from "./.gen/providers/aws/wafv2-web-acl-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AppsyncGraphqlApi(this, "example", { + authenticationType: "API_KEY", + name: "example", + }); + const awsWafv2WebAclExample = new Wafv2WebAcl(this, "example_1", { + defaultAction: { + allow: {}, + }, + description: "Example of a managed rule.", + name: "managed-rule-example", + rule: [ + { + name: "rule-1", + overrideAction: { + block: [{}], + }, + priority: 1, + statement: { + managedRuleGroupStatement: { + name: "AWSManagedRulesCommonRuleSet", + vendorName: "AWS", + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-rule-metric-name", + sampledRequestsEnabled: false, + }, + }, + ], + scope: "REGIONAL", + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-metric-name", + sampledRequestsEnabled: false, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWafv2WebAclExample.overrideLogicalId("example"); + const awsWafv2WebAclAssociationExample = new Wafv2WebAclAssociation( + this, + "example_2", + { + resourceArn: example.arn, + webAclArn: Token.asString(awsWafv2WebAclExample.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWafv2WebAclAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `authenticationType` - (Required) Authentication type. Valid values: `apiKey`, `awsIam`, `amazonCognitoUserPools`, `openidConnect`, `awsLambda` +* `name` - (Required) User-supplied name for the GraphqlApi. +* `logConfig` - (Optional) Nested argument containing logging configuration. Defined below. +* `openidConnectConfig` - (Optional) Nested argument containing OpenID Connect configuration. Defined below. +* `userPoolConfig` - (Optional) Amazon Cognito User Pool configuration. Defined below. +* `lambdaAuthorizerConfig` - (Optional) Nested argument containing Lambda authorizer configuration. Defined below. +* `schema` - (Optional) Schema definition, in GraphQL schema language format. Terraform cannot perform drift detection of this configuration. +* `additionalAuthenticationProvider` - (Optional) One or more additional authentication providers for the GraphqlApi. Defined below. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `xrayEnabled` - (Optional) Whether tracing with X-ray is enabled. Defaults to false. +* `visibility` - (Optional) Sets the value of the GraphQL API to public (`global`) or private (`private`). If no value is provided, the visibility will be set to `global` by default. This value cannot be changed once the API has been created. + +### log_config + +This argument supports the following arguments: + +* `cloudwatchLogsRoleArn` - (Required) Amazon Resource Name of the service role that AWS AppSync will assume to publish to Amazon CloudWatch logs in your account. +* `fieldLogLevel` - (Required) Field logging level. Valid values: `all`, `error`, `none`. +* `excludeVerboseContent` - (Optional) Set to TRUE to exclude sections that contain information such as headers, context, and evaluated mapping templates, regardless of logging level. Valid values: `true`, `false`. Default value: `false` + +### additional_authentication_provider + +This argument supports the following arguments: + +* `authenticationType` - (Required) Authentication type. Valid values: `apiKey`, `awsIam`, `amazonCognitoUserPools`, `openidConnect`, `awsLambda` +* `openidConnectConfig` - (Optional) Nested argument containing OpenID Connect configuration. Defined below. +* `userPoolConfig` - (Optional) Amazon Cognito User Pool configuration. Defined below. + +### openid_connect_config + +This argument supports the following arguments: + +* `issuer` - (Required) Issuer for the OpenID Connect configuration. The issuer returned by discovery MUST exactly match the value of iss in the ID Token. +* `authTtl` - (Optional) Number of milliseconds a token is valid after being authenticated. +* `clientId` - (Optional) Client identifier of the Relying party at the OpenID identity provider. This identifier is typically obtained when the Relying party is registered with the OpenID identity provider. You can specify a regular expression so the AWS AppSync can validate against multiple client identifiers at a time. +* `iatTtl` - (Optional) Number of milliseconds a token is valid after being issued to a user. + +### user_pool_config + +This argument supports the following arguments: + +* `defaultAction` - (Required only if Cognito is used as the default auth provider) Action that you want your GraphQL API to take when a request that uses Amazon Cognito User Pool authentication doesn't match the Amazon Cognito User Pool configuration. Valid: `allow` and `deny` +* `userPoolId` - (Required) User pool ID. +* `appIdClientRegex` - (Optional) Regular expression for validating the incoming Amazon Cognito User Pool app client ID. +* `awsRegion` - (Optional) AWS region in which the user pool was created. + +### lambda_authorizer_config + +This argument supports the following arguments: + +* `authorizerUri` - (Required) ARN of the Lambda function to be called for authorization. Note: This Lambda function must have a resource-based policy assigned to it, to allow `lambda:invokeFunction` from service principal `appsyncAmazonawsCom`. +* `authorizerResultTtlInSeconds` - (Optional) Number of seconds a response should be cached for. The default is 5 minutes (300 seconds). The Lambda function can override this by returning a `ttlOverride` key in its response. A value of 0 disables caching of responses. Minimum value of 0. Maximum value of 3600. +* `identityValidationExpression` - (Optional) Regular expression for validation of tokens before the Lambda function is called. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - API ID +* `arn` - ARN +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uris` - Map of URIs associated with the APIE.g., `uris["GRAPHQL"] = https://ID.appsync-api.REGION.amazonaws.com/graphql` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AppSync GraphQL API using the GraphQL API ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AppSync GraphQL API using the GraphQL API ID. For example: + +```console +% terraform import aws_appsync_graphql_api.example 0123456789 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appsync_resolver.html.markdown b/website/docs/cdktf/typescript/r/appsync_resolver.html.markdown new file mode 100644 index 00000000000..4a7c704ae9f --- /dev/null +++ b/website/docs/cdktf/typescript/r/appsync_resolver.html.markdown @@ -0,0 +1,182 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_resolver" +description: |- + Provides an AppSync Resolver. +--- + + + +# Resource: aws_appsync_resolver + +Provides an AppSync Resolver. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncDatasource } from "./.gen/providers/aws/appsync-datasource"; +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +import { AppsyncResolver } from "./.gen/providers/aws/appsync-resolver"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new AppsyncGraphqlApi(this, "test", { + authenticationType: "API_KEY", + name: "tf-example", + schema: + "type Mutation {\n\tputPost(id: ID!, title: String!): Post\n}\n\ntype Post {\n\tid: ID!\n\ttitle: String!\n}\n\ntype Query {\n\tsinglePost(id: ID!): Post\n}\n\nschema {\n\tquery: Query\n\tmutation: Mutation\n}\n\n", + }); + new AppsyncResolver(this, "Mutation_pipelineTest", { + apiId: test.id, + field: "pipelineTest", + kind: "PIPELINE", + pipelineConfig: { + functions: [test1.functionId, test2.functionId, test3.functionId], + }, + requestTemplate: "{}", + responseTemplate: "$util.toJson($ctx.result)", + type: "Mutation", + }); + const awsAppsyncDatasourceTest = new AppsyncDatasource(this, "test_2", { + apiId: test.id, + httpConfig: { + endpoint: "http://example.com", + }, + name: "tf_example", + type: "HTTP", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppsyncDatasourceTest.overrideLogicalId("test"); + const awsAppsyncResolverTest = new AppsyncResolver(this, "test_3", { + apiId: test.id, + cachingConfig: { + cachingKeys: ["$context.identity.sub", "$context.arguments.id"], + ttl: 60, + }, + dataSource: Token.asString(awsAppsyncDatasourceTest.name), + field: "singlePost", + requestTemplate: + '{\n "version": "2018-05-29",\n "method": "GET",\n "resourcePath": "/",\n "params":{\n "headers": $utils.http.copyheaders($ctx.request.headers)\n }\n}\n\n', + responseTemplate: + "#if($ctx.result.statusCode == 200)\n $ctx.result.body\n#else\n $utils.appendError($ctx.result.body, $ctx.result.statusCode)\n#end\n\n", + type: "Query", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppsyncResolverTest.overrideLogicalId("test"); + } +} + +``` + +## Example Usage JS + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncResolver } from "./.gen/providers/aws/appsync-resolver"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AppsyncResolver(this, "example", { + apiId: test.id, + code: Token.asString(Fn.file("some-code-dir")), + field: "pipelineTest", + kind: "PIPELINE", + pipelineConfig: { + functions: [Token.asString(awsAppsyncFunctionTest.functionId)], + }, + runtime: { + name: "APPSYNC_JS", + runtimeVersion: "1.0.0", + }, + type: "Query", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) API ID for the GraphQL API. +* `code` - (Optional) The function code that contains the request and response functions. When code is used, the runtime is required. The runtime value must be APPSYNC_JS. +* `type` - (Required) Type name from the schema defined in the GraphQL API. +* `field` - (Required) Field name from the schema defined in the GraphQL API. +* `requestTemplate` - (Optional) Request mapping template for UNIT resolver or 'before mapping template' for PIPELINE resolver. Required for non-Lambda resolvers. +* `responseTemplate` - (Optional) Response mapping template for UNIT resolver or 'after mapping template' for PIPELINE resolver. Required for non-Lambda resolvers. +* `dataSource` - (Optional) Data source name. +* `maxBatchSize` - (Optional) Maximum batching size for a resolver. Valid values are between `0` and `2000`. +* `kind` - (Optional) Resolver type. Valid values are `unit` and `pipeline`. +* `syncConfig` - (Optional) Describes a Sync configuration for a resolver. See [Sync Config](#sync-config). +* `pipelineConfig` - (Optional) The caching configuration for the resolver. See [Pipeline Config](#pipeline-config). +* `cachingConfig` - (Optional) The Caching Config. See [Caching Config](#caching-config). +* `runtime` - (Optional) Describes a runtime used by an AWS AppSync pipeline resolver or AWS AppSync function. Specifies the name and version of the runtime to use. Note that if a runtime is specified, code must also be specified. See [Runtime](#runtime). + +### Caching Config + +* `cachingKeys` - (Optional) The caching keys for a resolver that has caching activated. Valid values are entries from the $context.arguments, $context.source, and $context.identity maps. +* `ttl` - (Optional) The TTL in seconds for a resolver that has caching activated. Valid values are between `1` and `3600` seconds. + +### Pipeline Config + +* `functions` - (Optional) A list of Function objects. + +### Sync Config + +* `conflictDetection` - (Optional) Conflict Detection strategy to use. Valid values are `none` and `version`. +* `conflictHandler` - (Optional) Conflict Resolution strategy to perform in the event of a conflict. Valid values are `none`, `optimisticConcurrency`, `automerge`, and `lambda`. +* `lambdaConflictHandlerConfig` - (Optional) Lambda Conflict Handler Config when configuring `lambda` as the Conflict Handler. See [Lambda Conflict Handler Config](#lambda-conflict-handler-config). + +#### Lambda Conflict Handler Config + +* `lambdaConflictHandlerArn` - (Optional) ARN for the Lambda function to use as the Conflict Handler. + +### Runtime + +* `name` - (Optional) The name of the runtime to use. Currently, the only allowed value is `appsyncJs`. +* `runtimeVersion` - (Optional) The version of the runtime to use. Currently, the only allowed version is `100`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAppsyncResolver` using the `apiId`, a hyphen, `type`, a hypen and `field`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAppsyncResolver` using the `apiId`, a hyphen, `type`, a hypen and `field`. For example: + +```console +% terraform import aws_appsync_resolver.example abcdef123456-exampleType-exampleField +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/appsync_type.html.markdown b/website/docs/cdktf/typescript/r/appsync_type.html.markdown new file mode 100644 index 00000000000..f0da5ee8348 --- /dev/null +++ b/website/docs/cdktf/typescript/r/appsync_type.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "AppSync" +layout: "aws" +page_title: "AWS: aws_appsync_type" +description: |- + Provides an AppSync Type. +--- + + + +# Resource: aws_appsync_type + +Provides an AppSync Type. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AppsyncGraphqlApi } from "./.gen/providers/aws/appsync-graphql-api"; +import { AppsyncType } from "./.gen/providers/aws/appsync-type"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AppsyncGraphqlApi(this, "example", { + authenticationType: "API_KEY", + name: "example", + }); + const awsAppsyncTypeExample = new AppsyncType(this, "example_1", { + apiId: example.id, + definition: + "type Mutation\n\n{\nputPost(id: ID!,title: String! ): Post\n\n}\n\n", + format: "SDL", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAppsyncTypeExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `apiId` - (Required) GraphQL API ID. +* `format` - (Required) The type format: `sdl` or `json`. +* `definition` - (Required) The type definition. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the type. +* `description` - The type description. +* `id` - The ID is constructed from `apiId:format:name`. +* `name` - The type name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Appsync Types using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Appsync Types using the `id`. For example: + +```console +% terraform import aws_appsync_type.example api-id:format:name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/athena_data_catalog.html.markdown b/website/docs/cdktf/typescript/r/athena_data_catalog.html.markdown new file mode 100644 index 00000000000..234e34e5932 --- /dev/null +++ b/website/docs/cdktf/typescript/r/athena_data_catalog.html.markdown @@ -0,0 +1,175 @@ +--- +subcategory: "Athena" +layout: "aws" +page_title: "AWS: aws_athena_data_catalog" +description: |- + Provides an Athena data catalog. +--- + + + +# Resource: aws_athena_data_catalog + +Provides an Athena data catalog. + +More information about Athena and Athena data catalogs can be found in the [Athena User Guide](https://docs.aws.amazon.com/athena/latest/ug/what-is.html). + +-> **Tip:** for a more detailed explanation on the usage of `parameters`, see the [DataCatalog API documentation](https://docs.aws.amazon.com/athena/latest/APIReference/API_DataCatalog.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AthenaDataCatalog } from "./.gen/providers/aws/athena-data-catalog"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AthenaDataCatalog(this, "example", { + description: "Example Athena data catalog", + name: "athena-data-catalog", + parameters: { + function: + "arn:aws:lambda:eu-central-1:123456789012:function:not-important-lambda-function", + }, + tags: { + Name: "example-athena-data-catalog", + }, + type: "LAMBDA", + }); + } +} + +``` + +### Hive based Data Catalog + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AthenaDataCatalog } from "./.gen/providers/aws/athena-data-catalog"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AthenaDataCatalog(this, "example", { + description: "Hive based Data Catalog", + name: "hive-data-catalog", + parameters: { + "metadata-function": + "arn:aws:lambda:eu-central-1:123456789012:function:not-important-lambda-function", + }, + type: "HIVE", + }); + } +} + +``` + +### Glue based Data Catalog + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AthenaDataCatalog } from "./.gen/providers/aws/athena-data-catalog"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AthenaDataCatalog(this, "example", { + description: "Glue based Data Catalog", + name: "glue-data-catalog", + parameters: { + "catalog-id": "123456789012", + }, + type: "GLUE", + }); + } +} + +``` + +### Lambda based Data Catalog + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AthenaDataCatalog } from "./.gen/providers/aws/athena-data-catalog"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AthenaDataCatalog(this, "example", { + description: "Lambda based Data Catalog", + name: "lambda-data-catalog", + parameters: { + "metadata-function": + "arn:aws:lambda:eu-central-1:123456789012:function:not-important-lambda-function-1", + "record-function": + "arn:aws:lambda:eu-central-1:123456789012:function:not-important-lambda-function-2", + }, + type: "LAMBDA", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +- `name` - (Required) Name of the data catalog. The catalog name must be unique for the AWS account and can use a maximum of 128 alphanumeric, underscore, at sign, or hyphen characters. +- `type` - (Required) Type of data catalog: `lambda` for a federated catalog, `glue` for AWS Glue Catalog, or `hive` for an external hive metastore. +- `parameters` - (Required) Key value pairs that specifies the Lambda function or functions to use for the data catalog. The mapping used depends on the catalog type. +- `description` - (Required) Description of the data catalog. +- `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `id` - Name of the data catalog. +- `arn` - ARN of the data catalog. +- `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import data catalogs using their `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import data catalogs using their `name`. For example: + +```console +% terraform import aws_athena_data_catalog.example example-data-catalog +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/athena_database.html.markdown b/website/docs/cdktf/typescript/r/athena_database.html.markdown new file mode 100644 index 00000000000..975f9a7decc --- /dev/null +++ b/website/docs/cdktf/typescript/r/athena_database.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "Athena" +layout: "aws" +page_title: "AWS: aws_athena_database" +description: |- + Provides an Athena database. +--- + + + +# Resource: aws_athena_database + +Provides an Athena database. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AthenaDatabase } from "./.gen/providers/aws/athena-database"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsAthenaDatabaseExample = new AthenaDatabase(this, "example_1", { + bucket: example.id, + name: "database_name", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAthenaDatabaseExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of S3 bucket to save the results of the query execution. +* `name` - (Required) Name of the database to create. +* `aclConfiguration` - (Optional) That an Amazon S3 canned ACL should be set to control ownership of stored query results. See [ACL Configuration](#acl-configuration) below. +* `comment` - (Optional) Description of the database. +* `encryptionConfiguration` - (Optional) Encryption key block AWS Athena uses to decrypt the data in S3, such as an AWS Key Management Service (AWS KMS) key. See [Encryption Configuration](#encryption-configuration) below. +* `expectedBucketOwner` - (Optional) AWS account ID that you expect to be the owner of the Amazon S3 bucket. +* `forceDestroy` - (Optional, Default: false) Boolean that indicates all tables should be deleted from the database so that the database can be destroyed without error. The tables are *not* recoverable. +* `properties` - (Optional) Key-value map of custom metadata properties for the database definition. + +### ACL Configuration + +* `s3AclOption` - (Required) Amazon S3 canned ACL that Athena should specify when storing query results. Valid value is `bucketOwnerFullControl`. + +~> **NOTE:** When Athena queries are executed, result files may be created in the specified bucket. Consider using `forceDestroy` on the bucket too in order to avoid any problems when destroying the bucket. + +### Encryption Configuration + +* `encryptionOption` - (Required) Type of key; one of `sseS3`, `sseKms`, `cseKms` +* `kmsKey` - (Optional) KMS key ARN or ID; required for key types `sseKms` and `cseKms`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Database name + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Athena Databases using their name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Athena Databases using their name. For example: + +```console +% terraform import aws_athena_database.example example +``` + +Certain resource arguments, like `encryptionConfiguration` and `bucket`, do not have an API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AthenaDatabase } from "./.gen/providers/aws/athena-database"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AthenaDatabase(this, "example", { + bucket: Token.asString(awsS3BucketExample.id), + lifecycle: { + ignoreChanges: [bucket], + }, + name: "database_name", + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/athena_named_query.html.markdown b/website/docs/cdktf/typescript/r/athena_named_query.html.markdown new file mode 100644 index 00000000000..4d47ecfe158 --- /dev/null +++ b/website/docs/cdktf/typescript/r/athena_named_query.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Athena" +layout: "aws" +page_title: "AWS: aws_athena_named_query" +description: |- + Provides an Athena Named Query resource. +--- + + + +# Resource: aws_athena_named_query + +Provides an Athena Named Query resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AthenaDatabase } from "./.gen/providers/aws/athena-database"; +import { AthenaNamedQuery } from "./.gen/providers/aws/athena-named-query"; +import { AthenaWorkgroup } from "./.gen/providers/aws/athena-workgroup"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new KmsKey(this, "test", { + deletionWindowInDays: 7, + description: "Athena KMS Key", + }); + const hoge = new S3Bucket(this, "hoge", { + bucket: "tf-test", + }); + const awsAthenaDatabaseHoge = new AthenaDatabase(this, "hoge_2", { + bucket: hoge.id, + name: "users", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAthenaDatabaseHoge.overrideLogicalId("hoge"); + const awsAthenaWorkgroupTest = new AthenaWorkgroup(this, "test_3", { + configuration: { + resultConfiguration: { + encryptionConfiguration: { + encryptionOption: "SSE_KMS", + kmsKeyArn: test.arn, + }, + }, + }, + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAthenaWorkgroupTest.overrideLogicalId("test"); + new AthenaNamedQuery(this, "foo", { + database: Token.asString(awsAthenaDatabaseHoge.name), + name: "bar", + query: "SELECT * FROM ${" + awsAthenaDatabaseHoge.name + "} limit 10;", + workgroup: Token.asString(awsAthenaWorkgroupTest.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Plain language name for the query. Maximum length of 128. +* `workgroup` - (Optional) Workgroup to which the query belongs. Defaults to `primary` +* `database` - (Required) Database to which the query belongs. +* `query` - (Required) Text of the query itself. In other words, all query statements. Maximum length of 262144. +* `description` - (Optional) Brief explanation of the query. Maximum length of 1024. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the query. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Athena Named Query using the query ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Athena Named Query using the query ID. For example: + +```console +% terraform import aws_athena_named_query.example 0123456789 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/athena_workgroup.html.markdown b/website/docs/cdktf/typescript/r/athena_workgroup.html.markdown new file mode 100644 index 00000000000..353e5b8329e --- /dev/null +++ b/website/docs/cdktf/typescript/r/athena_workgroup.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "Athena" +layout: "aws" +page_title: "AWS: aws_athena_workgroup" +description: |- + Manages an Athena Workgroup. +--- + + + +# Resource: aws_athena_workgroup + +Provides an Athena Workgroup. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AthenaWorkgroup } from "./.gen/providers/aws/athena-workgroup"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AthenaWorkgroup(this, "example", { + configuration: { + enforceWorkgroupConfiguration: true, + publishCloudwatchMetricsEnabled: true, + resultConfiguration: { + encryptionConfiguration: { + encryptionOption: "SSE_KMS", + kmsKeyArn: Token.asString(awsKmsKeyExample.arn), + }, + outputLocation: "s3://${" + awsS3BucketExample.bucket + "}/output/", + }, + }, + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the workgroup. +* `configuration` - (Optional) Configuration block with various settings for the workgroup. Documented below. +* `description` - (Optional) Description of the workgroup. +* `state` - (Optional) State of the workgroup. Valid values are `disabled` or `enabled`. Defaults to `enabled`. +* `tags` - (Optional) Key-value map of resource tags for the workgroup. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `forceDestroy` - (Optional) Option to delete the workgroup and its contents even if the workgroup contains any named queries. + +### Configuration + +* `bytesScannedCutoffPerQuery` - (Optional) Integer for the upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan. Must be at least `10485760`. +* `enforceWorkgroupConfiguration` - (Optional) Boolean whether the settings for the workgroup override client-side settings. For more information, see [Workgroup Settings Override Client-Side Settings](https://docs.aws.amazon.com/athena/latest/ug/workgroups-settings-override.html). Defaults to `true`. +* `engineVersion` - (Optional) Configuration block for the Athena Engine Versioning. For more information, see [Athena Engine Versioning](https://docs.aws.amazon.com/athena/latest/ug/engine-versions.html). See [Engine Version](#engine-version) below. +* `executionRole` - (Optional) Role used in a notebook session for accessing the user's resources. +* `publishCloudwatchMetricsEnabled` - (Optional) Boolean whether Amazon CloudWatch metrics are enabled for the workgroup. Defaults to `true`. +* `resultConfiguration` - (Optional) Configuration block with result settings. See [Result Configuration](#result-configuration) below. +* `requesterPaysEnabled` - (Optional) If set to true , allows members assigned to a workgroup to reference Amazon S3 Requester Pays buckets in queries. If set to false , workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false . For more information about Requester Pays buckets, see [Requester Pays Buckets](https://docs.aws.amazon.com/AmazonS3/latest/dev/RequesterPaysBuckets.html) in the Amazon Simple Storage Service Developer Guide. + +#### Engine Version + +* `selectedEngineVersion` - (Optional) Requested engine version. Defaults to `auto`. + +#### Result Configuration + +* `encryptionConfiguration` - (Optional) Configuration block with encryption settings. See [Encryption Configuration](#encryption-configuration) below. +* `aclConfiguration` - (Optional) That an Amazon S3 canned ACL should be set to control ownership of stored query results. See [ACL Configuration](#acl-configuration) below. +* `expectedBucketOwner` - (Optional) AWS account ID that you expect to be the owner of the Amazon S3 bucket. +* `outputLocation` - (Optional) Location in Amazon S3 where your query results are stored, such as `s3://path/to/query/bucket/`. For more information, see [Queries and Query Result Files](https://docs.aws.amazon.com/athena/latest/ug/querying.html). + +##### ACL Configuration + +* `s3AclOption` - (Required) Amazon S3 canned ACL that Athena should specify when storing query results. Valid value is `bucketOwnerFullControl`. + +##### Encryption Configuration + +* `encryptionOption` - (Required) Whether Amazon S3 server-side encryption with Amazon S3-managed keys (`sseS3`), server-side encryption with KMS-managed keys (`sseKms`), or client-side encryption with KMS-managed keys (`cseKms`) is used. If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup. +* `kmsKeyArn` - (Optional) For `sseKms` and `cseKms`, this is the KMS key ARN. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the workgroup +* `configuration` - Configuration block with various settings for the workgroup + * `engineVersion` - Configuration block for the Athena Engine Versioning + * `effectiveEngineVersion` - The engine version on which the query runs. If `selectedEngineVersion` is set to `auto`, the effective engine version is chosen by Athena. +* `id` - Workgroup name +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Athena Workgroups using their name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Athena Workgroups using their name. For example: + +```console +% terraform import aws_athena_workgroup.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/auditmanager_account_registration.html.markdown b/website/docs/cdktf/typescript/r/auditmanager_account_registration.html.markdown new file mode 100644 index 00000000000..785c16223b8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/auditmanager_account_registration.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_account_registration" +description: |- + Terraform resource for managing AWS Audit Manager Account Registration. +--- + + + +# Resource: aws_auditmanager_account_registration + +Terraform resource for managing AWS Audit Manager Account Registration. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AuditmanagerAccountRegistration } from "./.gen/providers/aws/auditmanager-account-registration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AuditmanagerAccountRegistration(this, "example", {}); + } +} + +``` + +### Deregister On Destroy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AuditmanagerAccountRegistration } from "./.gen/providers/aws/auditmanager-account-registration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AuditmanagerAccountRegistration(this, "example", { + deregisterOnDestroy: true, + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `delegatedAdminAccount` - (Optional) Identifier for the delegated administrator account. +* `deregisterOnDestroy` - (Optional) Flag to deregister AuditManager in the account upon destruction. Defaults to `false` (ie. AuditManager will remain active in the account, even if this resource is removed). +* `kmsKey` - (Optional) KMS key identifier. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier for the account registration. Since registration is applied per AWS region, this will be the active region name (ex. `usEast1`). +* `status` - Status of the account registration request. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Account Registration resources using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Audit Manager Account Registration resources using the `id`. For example: + +```console +% terraform import aws_auditmanager_account_registration.example us-east-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/auditmanager_assessment.html.markdown b/website/docs/cdktf/typescript/r/auditmanager_assessment.html.markdown new file mode 100644 index 00000000000..98a399b512b --- /dev/null +++ b/website/docs/cdktf/typescript/r/auditmanager_assessment.html.markdown @@ -0,0 +1,135 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_assessment" +description: |- + Terraform resource for managing an AWS Audit Manager Assessment. +--- + + + +# Resource: aws_auditmanager_assessment + +Terraform resource for managing an AWS Audit Manager Assessment. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AuditmanagerAssessment } from "./.gen/providers/aws/auditmanager-assessment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AuditmanagerAssessment(this, "test", { + assessmentReportsDestination: [ + { + destination: "s3://${" + awsS3BucketTest.id + "}", + destinationType: "S3", + }, + ], + frameworkId: Token.asString(awsAuditmanagerFrameworkTest.id), + name: "example", + roles: [ + { + roleArn: Token.asString(awsIamRoleTest.arn), + roleType: "PROCESS_OWNER", + }, + ], + scope: [ + { + awsAccounts: [ + { + id: Token.asString(current.accountId), + }, + ], + awsServices: [ + { + serviceName: "S3", + }, + ], + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the assessment. +* `assessmentReportsDestination` - (Required) Assessment report storage destination configuration. See [`assessmentReportsDestination`](#assessment_reports_destination) below. +* `frameworkId` - (Required) Unique identifier of the framework the assessment will be created from. +* `roles` - (Required) List of roles for the assessment. See [`roles`](#roles) below. +* `scope` - (Required) Amazon Web Services accounts and services that are in scope for the assessment. See [`scope`](#scope) below. + +The following arguments are optional: + +* `description` - (Optional) Description of the assessment. +* `tags` - (Optional) A map of tags to assign to the assessment. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### assessment_reports_destination + +* `destination` - (Required) Destination of the assessment report. This value be in the form `s3://{bucketName}`. +* `destinationType` - (Required) Destination type. Currently, `s3` is the only valid value. + +### roles + +* `roleArn` - (Required) Amazon Resource Name (ARN) of the IAM role. +* `roleType` - (Required) Type of customer persona. For assessment creation, type must always be `processOwner`. + +### scope + +* `awsAccounts` - Amazon Web Services accounts that are in scope for the assessment. See [`awsAccounts`](#aws_accounts) below. +* `awsServices` - Amazon Web Services services that are included in the scope of the assessment. See [`awsServices`](#aws_services) below. + +### aws_accounts + +* `id` - (Required) Identifier for the Amazon Web Services account. + +### aws_services + +* `serviceName` - (Required) Name of the Amazon Web Service. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the assessment. +* `id` - Unique identifier for the assessment. +* `rolesAll` - Complete list of all roles with access to the assessment. This includes both roles explicitly configured via the `roles` block, and any roles which have access to all Audit Manager assessments by default. +* `status` - Status of the assessment. Valid values are `active` and `inactive`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Assessments using the assessment `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Audit Manager Assessments using the assessment `id`. For example: + +```console +% terraform import aws_auditmanager_assessment.example abc123-de45 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/auditmanager_assessment_delegation.html.markdown b/website/docs/cdktf/typescript/r/auditmanager_assessment_delegation.html.markdown new file mode 100644 index 00000000000..50a18b3defa --- /dev/null +++ b/website/docs/cdktf/typescript/r/auditmanager_assessment_delegation.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_assessment_delegation" +description: |- + Terraform resource for managing an AWS Audit Manager Assessment Delegation. +--- + + + +# Resource: aws_auditmanager_assessment_delegation + +Terraform resource for managing an AWS Audit Manager Assessment Delegation. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AuditmanagerAssessmentDelegation } from "./.gen/providers/aws/auditmanager-assessment-delegation"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AuditmanagerAssessmentDelegation(this, "example", { + assessmentId: Token.asString(awsAuditmanagerAssessmentExample.id), + controlSetId: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + roleType: "RESOURCE_OWNER", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `assessmentId` - (Required) Identifier for the assessment. +* `controlSetId` - (Required) Assessment control set name. This value is the control set name used during assessment creation (not the AWS-generated ID). The `id` suffix on this attribute has been preserved to be consistent with the underlying AWS API. +* `roleArn` - (Required) Amazon Resource Name (ARN) of the IAM role. +* `roleType` - (Required) Type of customer persona. For assessment delegation, type must always be `resourceOwner`. + +The following arguments are optional: + +* `comment` - (Optional) Comment describing the delegation request. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `delegationId` - Unique identifier for the delegation. +* `id` - Unique identifier for the resource. This is a comma-separated string containing `assessmentId`, `roleArn`, and `controlSetId`. +* `status` - Status of the delegation. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Assessment Delegation using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Audit Manager Assessment Delegation using the `id`. For example: + +```console +% terraform import aws_auditmanager_assessment_delegation.example abcdef-123456,arn:aws:iam::012345678901:role/example,example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/auditmanager_assessment_report.html.markdown b/website/docs/cdktf/typescript/r/auditmanager_assessment_report.html.markdown new file mode 100644 index 00000000000..e23fe08eba8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/auditmanager_assessment_report.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_assessment_report" +description: |- + Terraform resource for managing an AWS Audit Manager Assessment Report. +--- + + + +# Resource: aws_auditmanager_assessment_report + +Terraform resource for managing an AWS Audit Manager Assessment Report. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AuditmanagerAssessmentReport } from "./.gen/providers/aws/auditmanager-assessment-report"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AuditmanagerAssessmentReport(this, "test", { + assessmentId: Token.asString(awsAuditmanagerAssessmentTest.id), + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the assessment report. +* `assessmentId` - (Required) Unique identifier of the assessment to create the report from. + +The following arguments are optional: + +* `description` - (Optional) Description of the assessment report. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `author` - Name of the user who created the assessment report. +* `id` - Unique identifier for the assessment report. +* `status` - Current status of the specified assessment report. Valid values are `complete`, `inProgress`, and `failed`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Assessment Reports using the assessment report `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Audit Manager Assessment Reports using the assessment report `id`. For example: + +```console +% terraform import aws_auditmanager_assessment_report.example abc123-de45 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/auditmanager_control.html.markdown b/website/docs/cdktf/typescript/r/auditmanager_control.html.markdown new file mode 100644 index 00000000000..3d371a5f8b4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/auditmanager_control.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_control" +description: |- + Terraform resource for managing an AWS Audit Manager Control. +--- + + + +# Resource: aws_auditmanager_control + +Terraform resource for managing an AWS Audit Manager Control. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AuditmanagerControl } from "./.gen/providers/aws/auditmanager-control"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AuditmanagerControl(this, "example", { + controlMappingSources: [ + { + sourceName: "example", + sourceSetUpOption: "Procedural_Controls_Mapping", + sourceType: "MANUAL", + }, + ], + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the control. +* `controlMappingSources` - (Required) Data mapping sources. See [`controlMappingSources`](#control_mapping_sources) below. + +The following arguments are optional: + +* `actionPlanInstructions` - (Optional) Recommended actions to carry out if the control isn't fulfilled. +* `actionPlanTitle` - (Optional) Title of the action plan for remediating the control. +* `description` - (Optional) Description of the control. +* `tags` - (Optional) A map of tags to assign to the control. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `testingInformation` - (Optional) Steps to follow to determine if the control is satisfied. + +### control_mapping_sources + +The following arguments are required: + +* `sourceName` - (Required) Name of the source. +* `sourceSetUpOption` - (Required) The setup option for the data source. This option reflects if the evidence collection is automated or manual. Valid values are `systemControlsMapping` (automated) and `proceduralControlsMapping` (manual). +* `sourceType` - (Required) Type of data source for evidence collection. If `sourceSetUpOption` is manual, the only valid value is `manual`. If `sourceSetUpOption` is automated, valid values are `awsCloudtrail`, `awsConfig`, `awsSecurityHub`, or `awsApiCall`. + +The following arguments are optional: + +* `sourceDescription` - (Optional) Description of the source. +* `sourceFrequency` - (Optional) Frequency of evidence collection. Valid values are `daily`, `weekly`, or `monthly`. +* `sourceKeyword` - (Optional) The keyword to search for in CloudTrail logs, Config rules, Security Hub checks, and Amazon Web Services API names. See [`sourceKeyword`](#source_keyword) below. +* `troubleshootingText` - (Optional) Instructions for troubleshooting the control. + +### source_keyword + +The following arguments are required: + +* `keywordInputType` - (Required) Input method for the keyword. Valid values are `selectFromList`. +* `keywordValue` - (Required) The value of the keyword that's used when mapping a control data source. For example, this can be a CloudTrail event name, a rule name for Config, a Security Hub control, or the name of an Amazon Web Services API call. See the [Audit Manager supported control data sources documentation](https://docs.aws.amazon.com/audit-manager/latest/userguide/control-data-sources.html) for more information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the control. +* `controlMappingSources.*SourceId` - Unique identifier for the source. +* `id` - Unique identifier for the control. +* `type` - Type of control, such as a custom control or a standard control. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an Audit Manager Control using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an Audit Manager Control using the `id`. For example: + +```console +% terraform import aws_auditmanager_control.example abc123-de45 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/auditmanager_framework.html.markdown b/website/docs/cdktf/typescript/r/auditmanager_framework.html.markdown new file mode 100644 index 00000000000..ba74dfe31ca --- /dev/null +++ b/website/docs/cdktf/typescript/r/auditmanager_framework.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_framework" +description: |- + Terraform resource for managing an AWS Audit Manager Framework. +--- + + + +# Resource: aws_auditmanager_framework + +Terraform resource for managing an AWS Audit Manager Framework. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AuditmanagerFramework } from "./.gen/providers/aws/auditmanager-framework"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AuditmanagerFramework(this, "test", { + controlSets: [ + { + controls: [ + { + id: Token.asString(awsAuditmanagerControlTest.id), + }, + ], + name: "example", + }, + ], + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the framework. +* `controlSets` - (Required) Control sets that are associated with the framework. See [`controlSets`](#control_sets) below. + +The following arguments are optional: + +* `complianceType` - (Optional) Compliance type that the new custom framework supports, such as `cis` or `hipaa`. +* `description` - (Optional) Description of the framework. +* `tags` - (Optional) A map of tags to assign to the framework. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### control_sets + +* `name` - (Required) Name of the control set. +* `controls` - (Required) List of controls within the control set. See [`controls`](#controls) below. + +### controls + +* `id` - (Required) Unique identifier of the control. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the framework. +* `controlSets[*]Id` - Unique identifier for the framework control set. +* `id` - Unique identifier for the framework. +* `frameworkType` - Framework type, such as a custom framework or a standard framework. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Framework using the framework `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Audit Manager Framework using the framework `id`. For example: + +```console +% terraform import aws_auditmanager_framework.example abc123-de45 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/auditmanager_framework_share.html.markdown b/website/docs/cdktf/typescript/r/auditmanager_framework_share.html.markdown new file mode 100644 index 00000000000..dc16fcdaaf2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/auditmanager_framework_share.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_framework_share" +description: |- + Terraform resource for managing an AWS Audit Manager Framework Share. +--- + + + +# Resource: aws_auditmanager_framework_share + +Terraform resource for managing an AWS Audit Manager Framework Share. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AuditmanagerFrameworkShare } from "./.gen/providers/aws/auditmanager-framework-share"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AuditmanagerFrameworkShare(this, "example", { + destinationAccount: "012345678901", + destinationRegion: "us-east-1", + frameworkId: Token.asString(awsAuditmanagerFrameworkExample.id), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `destinationAccount` - (Required) Amazon Web Services account of the recipient. +* `destinationRegion` - (Required) Amazon Web Services region of the recipient. +* `frameworkId` - (Required) Unique identifier for the shared custom framework. + +The following arguments are optional: + +* `comment` - (Optional) Comment from the sender about the share request. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier for the share request. +* `status` - Status of the share request. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Framework Share using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Audit Manager Framework Share using the `id`. For example: + +```console +% terraform import aws_auditmanager_framework_share.example abcdef-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/auditmanager_organization_admin_account_registration.html.markdown b/website/docs/cdktf/typescript/r/auditmanager_organization_admin_account_registration.html.markdown new file mode 100644 index 00000000000..6d237d306f1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/auditmanager_organization_admin_account_registration.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Audit Manager" +layout: "aws" +page_title: "AWS: aws_auditmanager_organization_admin_account_registration" +description: |- + Terraform resource for managing AWS Audit Manager Organization Admin Account Registration. +--- + + + +# Resource: aws_auditmanager_organization_admin_account_registration + +Terraform resource for managing AWS Audit Manager Organization Admin Account Registration. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AuditmanagerOrganizationAdminAccountRegistration } from "./.gen/providers/aws/auditmanager-organization-admin-account-registration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AuditmanagerOrganizationAdminAccountRegistration(this, "example", { + adminAccountId: "012345678901", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `adminAccountId` - (Required) Identifier for the organization administrator account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier for the organization administrator account. +* `organizationId` - Identifier for the organization. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Audit Manager Organization Admin Account Registration using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Audit Manager Organization Admin Account Registration using the `id`. For example: + +```console +% terraform import aws_auditmanager_organization_admin_account_registration.example 012345678901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/autoscaling_attachment.html.markdown b/website/docs/cdktf/typescript/r/autoscaling_attachment.html.markdown new file mode 100644 index 00000000000..8370c380ffc --- /dev/null +++ b/website/docs/cdktf/typescript/r/autoscaling_attachment.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_attachment" +description: |- + Terraform resource for managing an AWS Auto Scaling Attachment. +--- + + + +# Resource: aws_autoscaling_attachment + +Attaches a load balancer to an Auto Scaling group. + +~> **NOTE on Auto Scaling Groups, Attachments and Traffic Source Attachments:** Terraform provides standalone Attachment (for attaching Classic Load Balancers and Application Load Balancer, Gateway Load Balancer, or Network Load Balancer target groups) and [Traffic Source Attachment](autoscaling_traffic_source_attachment.html) (for attaching Load Balancers and VPC Lattice target groups) resources and an [Auto Scaling Group](autoscaling_group.html) resource with `loadBalancers`, `targetGroupArns` and `trafficSource` attributes. Do not use the same traffic source in more than one of these resources. Doing so will cause a conflict of attachments. A [`lifecycle` configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) can be used to suppress differences if necessary. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingAttachment } from "./.gen/providers/aws/autoscaling-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AutoscalingAttachment(this, "example", { + autoscalingGroupName: Token.asString(awsAutoscalingGroupExample.id), + elb: Token.asString(awsElbExample.id), + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingAttachment } from "./.gen/providers/aws/autoscaling-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AutoscalingAttachment(this, "example", { + autoscalingGroupName: Token.asString(awsAutoscalingGroupExample.id), + lbTargetGroupArn: Token.asString(awsLbTargetGroupExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `autoscalingGroupName` - (Required) Name of ASG to associate with the ELB. +* `elb` - (Optional) Name of the ELB. +* `lbTargetGroupArn` - (Optional) ARN of a load balancer target group. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/autoscaling_group.html.markdown b/website/docs/cdktf/typescript/r/autoscaling_group.html.markdown new file mode 100644 index 00000000000..896f920a849 --- /dev/null +++ b/website/docs/cdktf/typescript/r/autoscaling_group.html.markdown @@ -0,0 +1,974 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_group" +description: |- + Provides an Auto Scaling Group resource. +--- + + + +# Resource: aws_autoscaling_group + +Provides an Auto Scaling Group resource. + +-> **Note:** You must specify either `launchConfiguration`, `launchTemplate`, or `mixedInstancesPolicy`. + +~> **NOTE on Auto Scaling Groups, Attachments and Traffic Source Attachments:** Terraform provides standalone [Attachment](autoscaling_attachment.html) (for attaching Classic Load Balancers and Application Load Balancer, Gateway Load Balancer, or Network Load Balancer target groups) and [Traffic Source Attachment](autoscaling_traffic_source_attachment.html) (for attaching Load Balancers and VPC Lattice target groups) resources and an Auto Scaling Group resource with `loadBalancers`, `targetGroupArns` and `trafficSource` attributes. Do not use the same traffic source in more than one of these resources. Doing so will cause a conflict of attachments. A [`lifecycle` configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) can be used to suppress differences if necessary. + +> **Hands-on:** Try the [Manage AWS Auto Scaling Groups](https://learn.hashicorp.com/tutorials/terraform/aws-asg?utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS) tutorial on HashiCorp Learn. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { PlacementGroup } from "./.gen/providers/aws/placement-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new PlacementGroup(this, "test", { + name: "test", + strategy: "cluster", + }); + new AutoscalingGroup(this, "bar", { + desiredCapacity: 4, + forceDelete: true, + healthCheckGracePeriod: 300, + healthCheckType: "ELB", + initialLifecycleHook: [ + { + defaultResult: "CONTINUE", + heartbeatTimeout: 2000, + lifecycleTransition: "autoscaling:EC2_INSTANCE_LAUNCHING", + name: "foobar", + notificationMetadata: Token.asString( + Fn.jsonencode({ + foo: "bar", + }) + ), + notificationTargetArn: "arn:aws:sqs:us-east-1:444455556666:queue1*", + roleArn: "arn:aws:iam::123456789012:role/S3Access", + }, + ], + launchConfiguration: foobar.name, + maxSize: 5, + minSize: 2, + name: "foobar3-terraform-test", + placementGroup: test.id, + tag: [ + { + key: "foo", + propagateAtLaunch: true, + value: "bar", + }, + { + key: "lorem", + propagateAtLaunch: false, + value: "ipsum", + }, + ], + timeouts: [ + { + delete: "15m", + }, + ], + vpcZoneIdentifier: [example1.id, example2.id], + }); + } +} + +``` + +### With Latest Version Of Launch Template + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { LaunchTemplate } from "./.gen/providers/aws/launch-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foobar = new LaunchTemplate(this, "foobar", { + imageId: "ami-1a2b3c", + instanceType: "t2.micro", + namePrefix: "foobar", + }); + new AutoscalingGroup(this, "bar", { + availabilityZones: ["us-east-1a"], + desiredCapacity: 1, + launchTemplate: { + id: foobar.id, + version: "$Latest", + }, + maxSize: 1, + minSize: 1, + }); + } +} + +``` + +### Mixed Instances Policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { LaunchTemplate } from "./.gen/providers/aws/launch-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new LaunchTemplate(this, "example", { + imageId: Token.asString(dataAwsAmiExample.id), + instanceType: "c5.large", + namePrefix: "example", + }); + const awsAutoscalingGroupExample = new AutoscalingGroup(this, "example_1", { + availabilityZones: ["us-east-1a"], + desiredCapacity: 1, + maxSize: 1, + minSize: 1, + mixedInstancesPolicy: { + launchTemplate: { + launchTemplateSpecification: { + launchTemplateId: example.id, + }, + override: [ + { + instanceType: "c4.large", + weightedCapacity: "3", + }, + { + instanceType: "c3.large", + weightedCapacity: "2", + }, + ], + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAutoscalingGroupExample.overrideLogicalId("example"); + } +} + +``` + +### Mixed Instances Policy with Spot Instances and Capacity Rebalance + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { LaunchTemplate } from "./.gen/providers/aws/launch-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new LaunchTemplate(this, "example", { + imageId: Token.asString(dataAwsAmiExample.id), + instanceType: "c5.large", + namePrefix: "example", + }); + const awsAutoscalingGroupExample = new AutoscalingGroup(this, "example_1", { + capacityRebalance: true, + desiredCapacity: 12, + maxSize: 15, + minSize: 12, + mixedInstancesPolicy: { + instancesDistribution: { + onDemandBaseCapacity: 0, + onDemandPercentageAboveBaseCapacity: 25, + spotAllocationStrategy: "capacity-optimized", + }, + launchTemplate: { + launchTemplateSpecification: { + launchTemplateId: example.id, + }, + override: [ + { + instanceType: "c4.large", + weightedCapacity: "3", + }, + { + instanceType: "c3.large", + weightedCapacity: "2", + }, + ], + }, + }, + vpcZoneIdentifier: [example1.id, example2.id], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAutoscalingGroupExample.overrideLogicalId("example"); + } +} + +``` + +### Mixed Instances Policy with Instance level LaunchTemplateSpecification Overrides + +When using a diverse instance set, some instance types might require a launch template with configuration values unique to that instance type such as a different AMI (Graviton2), architecture specific user data script, different EBS configuration, or different networking configuration. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { LaunchTemplate } from "./.gen/providers/aws/launch-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new LaunchTemplate(this, "example", { + imageId: Token.asString(dataAwsAmiExample.id), + instanceType: "c5.large", + namePrefix: "example", + }); + const example2 = new LaunchTemplate(this, "example2", { + imageId: Token.asString(dataAwsAmiExample2.id), + namePrefix: "example2", + }); + const awsAutoscalingGroupExample = new AutoscalingGroup(this, "example_2", { + availabilityZones: ["us-east-1a"], + desiredCapacity: 1, + maxSize: 1, + minSize: 1, + mixedInstancesPolicy: { + launchTemplate: { + launchTemplateSpecification: { + launchTemplateId: example.id, + }, + override: [ + { + instanceType: "c4.large", + weightedCapacity: "3", + }, + { + instanceType: "c6g.large", + launchTemplateSpecification: { + launchTemplateId: example2.id, + }, + weightedCapacity: "2", + }, + ], + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAutoscalingGroupExample.overrideLogicalId("example"); + } +} + +``` + +### Mixed Instances Policy with Attribute-based Instance Type Selection + +As an alternative to manually choosing instance types when creating a mixed instances group, you can specify a set of instance attributes that describe your compute requirements. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { LaunchTemplate } from "./.gen/providers/aws/launch-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new LaunchTemplate(this, "example", { + imageId: Token.asString(dataAwsAmiExample.id), + instanceType: "c5.large", + namePrefix: "example", + }); + const awsAutoscalingGroupExample = new AutoscalingGroup(this, "example_1", { + availabilityZones: ["us-east-1a"], + desiredCapacity: 1, + maxSize: 1, + minSize: 1, + mixedInstancesPolicy: { + launchTemplate: { + launchTemplateSpecification: { + launchTemplateId: example.id, + }, + override: [ + { + instanceRequirements: { + memoryMib: { + min: 1000, + }, + vcpuCount: { + min: 4, + }, + }, + }, + ], + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAutoscalingGroupExample.overrideLogicalId("example"); + } +} + +``` + +### Dynamic tagging + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + TerraformVariable, + Token, + TerraformIterator, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const extraTags = new TerraformVariable(this, "extra_tags", { + default: [ + { + key: "Foo", + propagate_at_launch: true, + value: "Bar", + }, + { + key: "Baz", + propagate_at_launch: true, + value: "Bam", + }, + ], + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const testDynamicIterator0 = TerraformIterator.fromList( + Token.asAny(extraTags.value) + ); + new AutoscalingGroup(this, "test", { + launchConfiguration: foobar.name, + maxSize: 5, + minSize: 2, + name: "foobar3-terraform-test", + tag: testDynamicIterator0.dynamic({ + key: propertyAccess(testDynamicIterator0.value, ["key"]), + propagate_at_launch: propertyAccess(testDynamicIterator0.value, [ + "propagate_at_launch", + ]), + value: propertyAccess(testDynamicIterator0.value, ["value"]), + }), + vpcZoneIdentifier: [example1.id, example2.id], + }); + } +} + +``` + +### Automatically refresh all instances after the group is updated + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { DataAwsAmi } from "./.gen/providers/aws/data-aws-ami"; +import { LaunchTemplate } from "./.gen/providers/aws/launch-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsAmi(this, "example", { + filter: [ + { + name: "name", + values: ["amzn-ami-hvm-*-x86_64-gp2"], + }, + ], + mostRecent: true, + owners: ["amazon"], + }); + const awsLaunchTemplateExample = new LaunchTemplate(this, "example_1", { + imageId: Token.asString(example.id), + instanceType: "t3.nano", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLaunchTemplateExample.overrideLogicalId("example"); + const awsAutoscalingGroupExample = new AutoscalingGroup(this, "example_2", { + availabilityZones: ["us-east-1a"], + desiredCapacity: 1, + instanceRefresh: { + preferences: { + minHealthyPercentage: 50, + }, + strategy: "Rolling", + triggers: ["tag"], + }, + launchTemplate: { + id: Token.asString(awsLaunchTemplateExample.id), + version: Token.asString(awsLaunchTemplateExample.latestVersion), + }, + maxSize: 2, + minSize: 1, + tag: [ + { + key: "Key", + propagateAtLaunch: true, + value: "Value", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAutoscalingGroupExample.overrideLogicalId("example"); + } +} + +``` + +### Auto Scaling group with Warm Pool + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { LaunchTemplate } from "./.gen/providers/aws/launch-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AutoscalingGroup(this, "example", { + availabilityZones: ["us-east-1a"], + desiredCapacity: 1, + maxSize: 5, + minSize: 1, + warmPool: { + instanceReusePolicy: { + reuseOnScaleIn: true, + }, + maxGroupPreparedCapacity: 10, + minSize: 1, + poolState: "Hibernated", + }, + }); + const awsLaunchTemplateExample = new LaunchTemplate(this, "example_1", { + imageId: Token.asString(dataAwsAmiExample.id), + instanceType: "c5.large", + namePrefix: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLaunchTemplateExample.overrideLogicalId("example"); + } +} + +``` + +### Auto Scaling group with Traffic Sources + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + propertyAccess, + Token, + TerraformIterator, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const testDynamicIterator0 = TerraformIterator.fromList( + Token.asAny(propertyAccess(awsVpclatticeTargetGroupTest, ["*"])) + ); + new AutoscalingGroup(this, "test", { + forceDelete: true, + maxSize: 1, + minSize: 1, + vpcZoneIdentifier: Token.asList(awsSubnetTest.id), + trafficSource: testDynamicIterator0.dynamic({ + identifier: propertyAccess(testDynamicIterator0.value, ["arn"]), + type: "vpc-lattice", + }), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +- `name` - (Optional) Name of the Auto Scaling Group. By default generated by Terraform. Conflicts with `namePrefix`. +- `namePrefix` - (Optional) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +- `maxSize` - (Required) Maximum size of the Auto Scaling Group. +- `minSize` - (Required) Minimum size of the Auto Scaling Group. + (See also [Waiting for Capacity](#waiting-for-capacity) below.) +- `availabilityZones` - (Optional) A list of Availability Zones where instances in the Auto Scaling group can be created. Used for launching into the default VPC subnet in each Availability Zone when not using the `vpcZoneIdentifier` attribute, or for attaching a network interface when an existing network interface ID is specified in a launch template. Conflicts with `vpcZoneIdentifier`. +- `capacityRebalance` - (Optional) Whether capacity rebalance is enabled. Otherwise, capacity rebalance is disabled. +- `context` - (Optional) Reserved. +- `defaultCooldown` - (Optional) Amount of time, in seconds, after a scaling activity completes before another scaling activity can start. +- `defaultInstanceWarmup` - (Optional) Amount of time, in seconds, until a newly launched instance can contribute to the Amazon CloudWatch metrics. This delay lets an instance finish initializing before Amazon EC2 Auto Scaling aggregates instance metrics, resulting in more reliable usage data. Set this value equal to the amount of time that it takes for resource consumption to become stable after an instance reaches the InService state. (See [Set the default instance warmup for an Auto Scaling group](https://docs.aws.amazon.com/autoscaling/ec2/userguide/ec2-auto-scaling-default-instance-warmup.html)) +- `launchConfiguration` - (Optional) Name of the launch configuration to use. +- `launchTemplate` - (Optional) Nested argument with Launch template specification to use to launch instances. See [Launch Template](#launch_template) below for more details. +- `mixedInstancesPolicy` (Optional) Configuration block containing settings to define launch targets for Auto Scaling groups. See [Mixed Instances Policy](#mixed_instances_policy) below for more details. +- `initialLifecycleHook` - (Optional) One or more + [Lifecycle Hooks](http://docs.aws.amazon.com/autoscaling/latest/userguide/lifecycle-hooks.html) + to attach to the Auto Scaling Group **before** instances are launched. The + syntax is exactly the same as the separate + [`awsAutoscalingLifecycleHook`](/docs/providers/aws/r/autoscaling_lifecycle_hook.html) + resource, without the `autoscalingGroupName` attribute. Please note that this will only work when creating + a new Auto Scaling Group. For all other use-cases, please use `awsAutoscalingLifecycleHook` resource. +- `healthCheckGracePeriod` - (Optional, Default: 300) Time (in seconds) after instance comes into service before checking health. +- `healthCheckType` - (Optional) "EC2" or "ELB". Controls how health checking is done. +- `desiredCapacity` - (Optional) Number of Amazon EC2 instances that + should be running in the group. (See also [Waiting for + Capacity](#waiting-for-capacity) below.) +- `desiredCapacityType` - (Optional) The unit of measurement for the value specified for `desiredCapacity`. Supported for attribute-based instance type selection only. Valid values: `"units"`, `"vcpu"`, `"memoryMib"`. +- `forceDelete` - (Optional) Allows deleting the Auto Scaling Group without waiting + for all instances in the pool to terminate. You can force an Auto Scaling Group to delete + even if it's in the process of scaling a resource. Normally, Terraform + drains all the instances before deleting the group. This bypasses that + behavior and potentially leaves resources dangling. +- `loadBalancers` (Optional) List of elastic load balancer names to add to the autoscaling + group names. Only valid for classic load balancers. For ALBs, use `targetGroupArns` instead. To remove all load balancer attachments an empty list should be specified. +- `trafficSource` (Optional) Attaches one or more traffic sources to the specified Auto Scaling group. +- `vpcZoneIdentifier` (Optional) List of subnet IDs to launch resources in. Subnets automatically determine which availability zones the group will reside. Conflicts with `availabilityZones`. +- `targetGroupArns` (Optional) Set of `awsAlbTargetGroup` ARNs, for use with Application or Network Load Balancing. To remove all target group attachments an empty list should be specified. +- `terminationPolicies` (Optional) List of policies to decide how the instances in the Auto Scaling Group should be terminated. The allowed values are `oldestInstance`, `newestInstance`, `oldestLaunchConfiguration`, `closestToNextInstanceHour`, `oldestLaunchTemplate`, `allocationStrategy`, `default`. Additionally, the ARN of a Lambda function can be specified for custom termination policies. +- `suspendedProcesses` - (Optional) List of processes to suspend for the Auto Scaling Group. The allowed values are `launch`, `terminate`, `healthCheck`, `replaceUnhealthy`, `azRebalance`, `alarmNotification`, `scheduledActions`, `addToLoadBalancer`, `instanceRefresh`. + Note that if you suspend either the `launch` or `terminate` process types, it can prevent your Auto Scaling Group from functioning properly. +- `tag` (Optional) Configuration block(s) containing resource tags. See [Tag](#tag) below for more details. +- `placementGroup` (Optional) Name of the placement group into which you'll launch your instances, if any. +- `metricsGranularity` - (Optional) Granularity to associate with the metrics to collect. The only valid value is `1Minute`. Default is `1Minute`. +- `enabledMetrics` - (Optional) List of metrics to collect. The allowed values are defined by the [underlying AWS API](https://docs.aws.amazon.com/autoscaling/ec2/APIReference/API_EnableMetricsCollection.html). +- `waitForCapacityTimeout` (Default: "10m") Maximum + [duration](https://golang.org/pkg/time/#ParseDuration) that Terraform should + wait for ASG instances to be healthy before timing out. (See also [Waiting + for Capacity](#waiting-for-capacity) below.) Setting this to "0" causes + Terraform to skip all Capacity Waiting behavior. +- `minElbCapacity` - (Optional) Setting this causes Terraform to wait for + this number of instances from this Auto Scaling Group to show up healthy in the + ELB only on creation. Updates will not wait on ELB instance number changes. + (See also [Waiting for Capacity](#waiting-for-capacity) below.) +- `waitForElbCapacity` - (Optional) Setting this will cause Terraform to wait + for exactly this number of healthy instances from this Auto Scaling Group in + all attached load balancers on both create and update operations. (Takes + precedence over `minElbCapacity` behavior.) + (See also [Waiting for Capacity](#waiting-for-capacity) below.) +- `protectFromScaleIn` (Optional) Whether newly launched instances + are automatically protected from termination by Amazon EC2 Auto Scaling when + scaling in. For more information about preventing instances from terminating + on scale in, see [Using instance scale-in protection](https://docs.aws.amazon.com/autoscaling/ec2/userguide/ec2-auto-scaling-instance-protection.html) + in the Amazon EC2 Auto Scaling User Guide. +- `serviceLinkedRoleArn` (Optional) ARN of the service-linked role that the ASG will use to call other AWS services +- `maxInstanceLifetime` (Optional) Maximum amount of time, in seconds, that an instance can be in service, values must be either equal to 0 or between 86400 and 31536000 seconds. +- `instanceRefresh` - (Optional) If this block is configured, start an + [Instance Refresh](https://docs.aws.amazon.com/autoscaling/ec2/userguide/asg-instance-refresh.html) + when this Auto Scaling Group is updated. Defined [below](#instance_refresh). +- `warmPool` - (Optional) If this block is configured, add a [Warm Pool](https://docs.aws.amazon.com/autoscaling/ec2/userguide/ec2-auto-scaling-warm-pools.html) + to the specified Auto Scaling group. Defined [below](#warm_pool) + +### launch_template + +~> **NOTE:** Either `id` or `name` must be specified. + +The top-level `launchTemplate` block supports the following: + +- `id` - (Optional) ID of the launch template. Conflicts with `name`. +- `name` - (Optional) Name of the launch template. Conflicts with `id`. +- `version` - (Optional) Template version. Can be version number, `$latest`, or `$default`. (Default: `$default`). + +### mixed_instances_policy + +- `instancesDistribution` - (Optional) Nested argument containing settings on how to mix on-demand and Spot instances in the Auto Scaling group. Defined below. +- `launchTemplate` - (Required) Nested argument containing launch template settings along with the overrides to specify multiple instance types and weights. Defined below. + +#### mixed_instances_policy instances_distribution + +This configuration block supports the following: + +- `onDemandAllocationStrategy` - (Optional) Strategy to use when launching on-demand instances. Valid values: `prioritized`, `lowestPrice`. Default: `prioritized`. +- `onDemandBaseCapacity` - (Optional) Absolute minimum amount of desired capacity that must be fulfilled by on-demand instances. Default: `0`. +- `onDemandPercentageAboveBaseCapacity` - (Optional) Percentage split between on-demand and Spot instances above the base on-demand capacity. Default: `100`. +- `spotAllocationStrategy` - (Optional) How to allocate capacity across the Spot pools. Valid values: `lowestPrice`, `capacityOptimized`, `capacityOptimizedPrioritized`, and `priceCapacityOptimized`. Default: `lowestPrice`. +- `spotInstancePools` - (Optional) Number of Spot pools per availability zone to allocate capacity. EC2 Auto Scaling selects the cheapest Spot pools and evenly allocates Spot capacity across the number of Spot pools that you specify. Only available with `spotAllocationStrategy` set to `lowestPrice`. Otherwise it must be set to `0`, if it has been defined before. Default: `2`. +- `spotMaxPrice` - (Optional) Maximum price per unit hour that the user is willing to pay for the Spot instances. Default: an empty string which means the on-demand price. + +#### mixed_instances_policy launch_template + +This configuration block supports the following: + +- `launchTemplateSpecification` - (Required) Nested argument defines the Launch Template. Defined below. +- `override` - (Optional) List of nested arguments provides the ability to specify multiple instance types. This will override the same parameter in the launch template. For on-demand instances, Auto Scaling considers the order of preference of instance types to launch based on the order specified in the overrides list. Defined below. + +##### mixed_instances_policy launch_template launch_template_specification + +~> **NOTE:** Either `launchTemplateId` or `launchTemplateName` must be specified. + +This configuration block supports the following: + +- `launchTemplateId` - (Optional) ID of the launch template. Conflicts with `launchTemplateName`. +- `launchTemplateName` - (Optional) Name of the launch template. Conflicts with `launchTemplateId`. +- `version` - (Optional) Template version. Can be version number, `$latest`, or `$default`. (Default: `$default`). + +##### mixed_instances_policy launch_template override + +This configuration block supports the following: + +- `instanceType` - (Optional) Override the instance type in the Launch Template. +- `instanceRequirements` - (Optional) Override the instance type in the Launch Template with instance types that satisfy the requirements. +- `launchTemplateSpecification` - (Optional) Override the instance launch template specification in the Launch Template. +- `weightedCapacity` - (Optional) Number of capacity units, which gives the instance type a proportional weight to other instance types. + +###### mixed_instances_policy launch_template override instance_requirements + +This configuration block supports the following: + +~> **NOTE:** Both `memoryMibMin` and `vcpuCountMin` must be specified. + +- `acceleratorCount` - (Optional) Block describing the minimum and maximum number of accelerators (GPUs, FPGAs, or AWS Inferentia chips). Default is no minimum or maximum. + - `min` - (Optional) Minimum. + - `max` - (Optional) Maximum. Set to `0` to exclude instance types with accelerators. +- `acceleratorManufacturers` - (Optional) List of accelerator manufacturer names. Default is any manufacturer. + + ``` + Valid names: + * amazon-web-services + * amd + * nvidia + * xilinx + ``` + +- `acceleratorNames` - (Optional) List of accelerator names. Default is any acclerator. + + ``` + Valid names: + * a100 - NVIDIA A100 GPUs + * v100 - NVIDIA V100 GPUs + * k80 - NVIDIA K80 GPUs + * t4 - NVIDIA T4 GPUs + * m60 - NVIDIA M60 GPUs + * radeon-pro-v520 - AMD Radeon Pro V520 GPUs + * vu9p - Xilinx VU9P FPGAs + ``` + +- `acceleratorTotalMemoryMib` - (Optional) Block describing the minimum and maximum total memory of the accelerators. Default is no minimum or maximum. + + - `min` - (Optional) Minimum. + - `max` - (Optional) Maximum. + +- `acceleratorTypes` - (Optional) List of accelerator types. Default is any accelerator type. + + ``` + Valid types: + * fpga + * gpu + * inference + ``` + +- `allowedInstanceTypes` - (Optional) List of instance types to apply your specified attributes against. All other instance types are ignored, even if they match your specified attributes. You can use strings with one or more wild cards, represented by an asterisk (\*), to allow an instance type, size, or generation. The following are examples: `m58Xlarge`, `c5*.*`, `m5A.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are allowing the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5A.*`, you are allowing all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is all instance types. + + ~> **NOTE:** If you specify `allowedInstanceTypes`, you can't specify `excludedInstanceTypes`. + +- `bareMetal` - (Optional) Indicate whether bare metal instace types should be `included`, `excluded`, or `required`. Default is `excluded`. +- `baselineEbsBandwidthMbps` - (Optional) Block describing the minimum and maximum baseline EBS bandwidth, in Mbps. Default is no minimum or maximum. + - `min` - (Optional) Minimum. + - `max` - (Optional) Maximum. +- `burstablePerformance` - (Optional) Indicate whether burstable performance instance types should be `included`, `excluded`, or `required`. Default is `excluded`. +- `cpuManufacturers` (Optional) List of CPU manufacturer names. Default is any manufacturer. + + ~> **NOTE:** Don't confuse the CPU hardware manufacturer with the CPU hardware architecture. Instances will be launched with a compatible CPU architecture based on the Amazon Machine Image (AMI) that you specify in your launch template. + + ``` + Valid names: + * amazon-web-services + * amd + * intel + ``` + +- `excludedInstanceTypes` - (Optional) List of instance types to exclude. You can use strings with one or more wild cards, represented by an asterisk (\*), to exclude an instance type, size, or generation. The following are examples: `m58Xlarge`, `c5*.*`, `m5A.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are excluding the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5A.*`, you are excluding all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is no excluded instance types. + + ~> **NOTE:** If you specify `excludedInstanceTypes`, you can't specify `allowedInstanceTypes`. + +- `instanceGenerations` - (Optional) List of instance generation names. Default is any generation. + + ``` + Valid names: + * current - Recommended for best performance. + * previous - For existing applications optimized for older instance types. + ``` + +- `localStorage` - (Optional) Indicate whether instance types with local storage volumes are `included`, `excluded`, or `required`. Default is `included`. +- `localStorageTypes` - (Optional) List of local storage type names. Default any storage type. + + ``` + Value names: + * hdd - hard disk drive + * ssd - solid state drive + ``` + +- `memoryGibPerVcpu` - (Optional) Block describing the minimum and maximum amount of memory (GiB) per vCPU. Default is no minimum or maximum. + - `min` - (Optional) Minimum. May be a decimal number, e.g. `05`. + - `max` - (Optional) Maximum. May be a decimal number, e.g. `05`. +- `memoryMib` - (Required) Block describing the minimum and maximum amount of memory (MiB). Default is no maximum. + - `min` - (Required) Minimum. + - `max` - (Optional) Maximum. +- `networkBandwidthGbps` - (Optional) Block describing the minimum and maximum amount of network bandwidth, in gigabits per second (Gbps). Default is no minimum or maximum. + - `min` - (Optional) Minimum. + - `max` - (Optional) Maximum. +- `networkInterfaceCount` - (Optional) Block describing the minimum and maximum number of network interfaces. Default is no minimum or maximum. + - `min` - (Optional) Minimum. + - `max` - (Optional) Maximum. +- `onDemandMaxPricePercentageOverLowestPrice` - (Optional) Price protection threshold for On-Demand Instances. This is the maximum you’ll pay for an On-Demand Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 20. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. + +- `requireHibernateSupport` - (Optional) Indicate whether instance types must support On-Demand Instance Hibernation, either `true` or `false`. Default is `false`. +- `spotMaxPricePercentageOverLowestPrice` - (Optional) Price protection threshold for Spot Instances. This is the maximum you’ll pay for a Spot Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 100. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. + +- `totalLocalStorageGb` - (Optional) Block describing the minimum and maximum total local storage (GB). Default is no minimum or maximum. + - `min` - (Optional) Minimum. May be a decimal number, e.g. `05`. + - `max` - (Optional) Maximum. May be a decimal number, e.g. `05`. +- `vcpuCount` - (Required) Block describing the minimum and maximum number of vCPUs. Default is no maximum. + - `min` - (Required) Minimum. + - `max` - (Optional) Maximum. + +### tag + +The `tag` attribute accepts exactly one tag declaration with the following fields: + +- `key` - (Required) Key +- `value` - (Required) Value +- `propagateAtLaunch` - (Required) Enables propagation of the tag to + Amazon EC2 instances launched via this ASG + +To declare multiple tags, additional `tag` blocks can be specified. + +~> **NOTE:** Other AWS APIs may automatically add special tags to their associated Auto Scaling Group for management purposes, such as ECS Capacity Providers adding the `amazonEcsManaged` tag. These generally should be included in the configuration so Terraform does not attempt to remove them and so if the `minSize` was greater than zero on creation, that these tag(s) are applied to any initial EC2 Instances in the Auto Scaling Group. If these tag(s) were missing in the Auto Scaling Group configuration on creation, affected EC2 Instances missing the tags may require manual intervention of adding the tags to ensure they work properly with the other AWS service. + +### instance_refresh + +This configuration block supports the following: + +- `strategy` - (Required) Strategy to use for instance refresh. The only allowed value is `rolling`. See [StartInstanceRefresh Action](https://docs.aws.amazon.com/autoscaling/ec2/APIReference/API_StartInstanceRefresh.html#API_StartInstanceRefresh_RequestParameters) for more information. +- `preferences` - (Optional) Override default parameters for Instance Refresh. + - `checkpointDelay` - (Optional) Number of seconds to wait after a checkpoint. Defaults to `3600`. + - `checkpointPercentages` - (Optional) List of percentages for each checkpoint. Values must be unique and in ascending order. To replace all instances, the final number must be `100`. + - `instanceWarmup` - (Optional) Number of seconds until a newly launched instance is configured and ready to use. Default behavior is to use the Auto Scaling Group's health check grace period. + - `minHealthyPercentage` - (Optional) Amount of capacity in the Auto Scaling group that must remain healthy during an instance refresh to allow the operation to continue, as a percentage of the desired capacity of the Auto Scaling group. Defaults to `90`. + - `skipMatching` - (Optional) Replace instances that already have your desired configuration. Defaults to `false`. + - `autoRollback` - (Optional) Automatically rollback if instance refresh fails. Defaults to `false`. This option may only be set to `true` when specifying a `launchTemplate` or `mixedInstancesPolicy`. +- `triggers` - (Optional) Set of additional property names that will trigger an Instance Refresh. A refresh will always be triggered by a change in any of `launchConfiguration`, `launchTemplate`, or `mixedInstancesPolicy`. + +~> **NOTE:** A refresh is started when any of the following Auto Scaling Group properties change: `launchConfiguration`, `launchTemplate`, `mixedInstancesPolicy`. Additional properties can be specified in the `triggers` property of `instanceRefresh`. + +~> **NOTE:** A refresh will not start when `version = "$Latest"` is configured in the `launchTemplate` block. To trigger the instance refresh when a launch template is changed, configure `version` to use the `latestVersion` attribute of the `awsLaunchTemplate` resource. + +~> **NOTE:** Auto Scaling Groups support up to one active instance refresh at a time. When this resource is updated, any existing refresh is cancelled. + +~> **NOTE:** Depending on health check settings and group size, an instance refresh may take a long time or fail. This resource does not wait for the instance refresh to complete. + +### warm_pool + +This configuration block supports the following: + +- `instanceReusePolicy` - (Optional) Whether instances in the Auto Scaling group can be returned to the warm pool on scale in. The default is to terminate instances in the Auto Scaling group when the group scales in. +- `maxGroupPreparedCapacity` - (Optional) Total maximum number of instances that are allowed to be in the warm pool or in any state except Terminated for the Auto Scaling group. +- `minSize` - (Optional) Minimum number of instances to maintain in the warm pool. This helps you to ensure that there is always a certain number of warmed instances available to handle traffic spikes. Defaults to 0 if not specified. +- `poolState` - (Optional) Sets the instance state to transition to after the lifecycle hooks finish. Valid values are: Stopped (default), Running or Hibernated. + +### traffic_source + +- `identifier` - Identifies the traffic source. For Application Load Balancers, Gateway Load Balancers, Network Load Balancers, and VPC Lattice, this will be the Amazon Resource Name (ARN) for a target group in this account and Region. For Classic Load Balancers, this will be the name of the Classic Load Balancer in this account and Region. +- `type` - Provides additional context for the value of Identifier. + The following lists the valid values: + `elb` if `identifier` is the name of a Classic Load Balancer. + `elbv2` if `identifier` is the ARN of an Application Load Balancer, Gateway Load Balancer, or Network Load Balancer target group. + `vpcLattice` if `identifier` is the ARN of a VPC Lattice target group. + +##### instance_reuse_policy + +This configuration block supports the following: + +- `reuseOnScaleIn` - (Optional) Whether instances in the Auto Scaling group can be returned to the warm pool on scale in. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `id` - Auto Scaling Group id. +- `arn` - ARN for this Auto Scaling Group +- `availabilityZones` - Availability zones of the Auto Scaling Group. +- `minSize` - Minimum size of the Auto Scaling Group +- `maxSize` - Maximum size of the Auto Scaling Group +- `defaultCooldown` - Time between a scaling activity and the succeeding scaling activity. +- `defaultInstanceWarmup` - The duration of the default instance warmup, in seconds. +- `name` - Name of the Auto Scaling Group +- `healthCheckGracePeriod` - Time after instance comes into service before checking health. +- `healthCheckType` - "EC2" or "ELB". Controls how health checking is done. +- `desiredCapacity` -The number of Amazon EC2 instances that should be running in the group. +- `launchConfiguration` - The launch configuration of the Auto Scaling Group +- `predictedCapacity` - Predicted capacity of the group. +- `vpcZoneIdentifier` (Optional) - The VPC zone identifier +- `warmPoolSize` - Current size of the warm pool. + +~> **NOTE:** When using `elb` as the `healthCheckType`, `healthCheckGracePeriod` is required. + +~> **NOTE:** Terraform has two types of ways you can add lifecycle hooks - via +the `initialLifecycleHook` attribute from this resource, or via the separate +[`awsAutoscalingLifecycleHook`](/docs/providers/aws/r/autoscaling_lifecycle_hook.html) +resource. `initialLifecycleHook` exists here because any lifecycle hooks +added with `awsAutoscalingLifecycleHook` will not be added until the +Auto Scaling Group has been created, and depending on your +[capacity](#waiting-for-capacity) settings, after the initial instances have +been launched, creating unintended behavior. If you need hooks to run on all +instances, add them with `initialLifecycleHook` here, but take +care to not duplicate these hooks in `awsAutoscalingLifecycleHook`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `10M`) + +## Waiting for Capacity + +A newly-created ASG is initially empty and begins to scale to `minSize` (or +`desiredCapacity`, if specified) by launching instances using the provided +Launch Configuration. These instances take time to launch and boot. + +On ASG Update, changes to these values also take time to result in the target +number of instances providing service. + +Terraform provides two mechanisms to help consistently manage ASG scale up +time across dependent resources. + +#### Waiting for ASG Capacity + +The first is default behavior. Terraform waits after ASG creation for +`minSize` (or `desiredCapacity`, if specified) healthy instances to show up +in the ASG before continuing. + +If `minSize` or `desiredCapacity` are changed in a subsequent update, +Terraform will also wait for the correct number of healthy instances before +continuing. + +Terraform considers an instance "healthy" when the ASG reports `healthStatus: +"healthy"` and `LifecycleState: "InService"`. See the [AWS AutoScaling +Docs](https://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/AutoScalingGroupLifecycle.html) +for more information on an ASG's lifecycle. + +Terraform will wait for healthy instances for up to +`waitForCapacityTimeout`. If ASG creation is taking more than a few minutes, +it's worth investigating for scaling activity errors, which can be caused by +problems with the selected Launch Configuration. + +Setting `waitForCapacityTimeout` to `"0"` disables ASG Capacity waiting. + +#### Waiting for ELB Capacity + +The second mechanism is optional, and affects ASGs with attached ELBs specified +via the `loadBalancers` attribute or with ALBs specified with `targetGroupArns`. + +The `minElbCapacity` parameter causes Terraform to wait for at least the +requested number of instances to show up `"inService"` in all attached ELBs +during ASG creation. It has no effect on ASG updates. + +If `waitForElbCapacity` is set, Terraform will wait for exactly that number +of Instances to be `"inService"` in all attached ELBs on both creation and +updates. + +These parameters can be used to ensure that service is being provided before +Terraform moves on. If new instances don't pass the ELB's health checks for any +reason, the Terraform apply will time out, and the ASG will be marked as +tainted (i.e., marked to be destroyed in a follow up run). + +As with ASG Capacity, Terraform will wait for up to `waitForCapacityTimeout` +for the proper number of instances to be healthy. + +#### Troubleshooting Capacity Waiting Timeouts + +If ASG creation takes more than a few minutes, this could indicate one of a +number of configuration problems. See the [AWS Docs on Load Balancer +Troubleshooting](https://docs.aws.amazon.com/ElasticLoadBalancing/latest/DeveloperGuide/elb-troubleshooting.html) +for more information. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Auto Scaling Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Auto Scaling Groups using the `name`. For example: + +```console +% terraform import aws_autoscaling_group.web web-asg +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/autoscaling_group_tag.html.markdown b/website/docs/cdktf/typescript/r/autoscaling_group_tag.html.markdown new file mode 100644 index 00000000000..170263d0521 --- /dev/null +++ b/website/docs/cdktf/typescript/r/autoscaling_group_tag.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_group_tag" +description: |- + Manages an individual Autoscaling Group tag +--- + + + +# Resource: aws_autoscaling_group_tag + +Manages an individual Autoscaling Group (ASG) tag. This resource should only be used in cases where ASGs are created outside Terraform (e.g., ASGs implicitly created by EKS Node Groups). + +~> **NOTE:** This tagging resource should not be combined with the Terraform resource for managing the parent resource. For example, using `awsAutoscalingGroup` and `awsAutoscalingGroupTag` to manage tags of the same ASG will cause a perpetual difference where the `awsAutoscalingGroup` resource will try to remove the tag being added by the `awsAutoscalingGroupTag` resource. + +~> **NOTE:** This tagging resource does not use the [provider `ignoreTags` configuration](/docs/providers/aws/index.html#ignore_tags). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformIterator, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroupTagA } from "./.gen/providers/aws/autoscaling-group-tag"; +import { EksNodeGroup } from "./.gen/providers/aws/eks-node-group"; +interface MyConfig { + nodeRoleArn: any; + scalingConfig: any; + subnetIds: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleForEachIterator = TerraformIterator.fromList( + Token.asAny( + Fn.toset( + "${[ for asg in ${" + + Fn.flatten( + "${[ for resources in ${" + + awsEksNodeGroupExample.resources + + "} : resources.autoscaling_groups]}" + ) + + "} : asg.name]}" + ) + ) + ); + new AutoscalingGroupTagA(this, "example", { + autoscalingGroupName: Token.asString(exampleForEachIterator.value), + tag: { + key: "k8s.io/cluster-autoscaler/node-template/label/eks.amazonaws.com/capacityType", + propagateAtLaunch: false, + value: "SPOT", + }, + forEach: exampleForEachIterator, + }); + const awsEksNodeGroupExample = new EksNodeGroup(this, "example_1", { + clusterName: "example", + nodeGroupName: "example", + nodeRoleArn: config.nodeRoleArn, + scalingConfig: config.scalingConfig, + subnetIds: config.subnetIds, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEksNodeGroupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `autoscalingGroupName` - (Required) Name of the Autoscaling Group to apply the tag to. +* `tag` - (Required) Tag to create. The `tag` block is documented below. + +The `tag` block supports the following arguments: + +* `key` - (Required) Tag name. +* `value` - (Required) Tag value. +* `propagateAtLaunch` - (Required) Whether to propagate the tags to instances launched by the ASG. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ASG name and key, separated by a comma (`,`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsAutoscalingGroupTag` using the ASG name and key, separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsAutoscalingGroupTag` using the ASG name and key, separated by a comma (`,`). For example: + +```console +% terraform import aws_autoscaling_group_tag.example asg-example,k8s.io/cluster-autoscaler/node-template/label/eks.amazonaws.com/capacityType +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/autoscaling_lifecycle_hook.html.markdown b/website/docs/cdktf/typescript/r/autoscaling_lifecycle_hook.html.markdown new file mode 100644 index 00000000000..232c0f9f1b9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/autoscaling_lifecycle_hook.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_lifecycle_hook" +description: |- + Provides an AutoScaling Lifecycle Hook resource. +--- + + + +# Resource: aws_autoscaling_lifecycle_hook + +Provides an AutoScaling Lifecycle Hook resource. + +~> **NOTE:** Terraform has two types of ways you can add lifecycle hooks - via +the `initialLifecycleHook` attribute from the +[`awsAutoscalingGroup`](/docs/providers/aws/r/autoscaling_group.html) +resource, or via this one. Hooks added via this resource will not be added +until the autoscaling group has been created, and depending on your +[capacity](/docs/providers/aws/r/autoscaling_group.html#waiting-for-capacity) +settings, after the initial instances have been launched, creating unintended +behavior. If you need hooks to run on all instances, add them with +`initialLifecycleHook` in +[`awsAutoscalingGroup`](/docs/providers/aws/r/autoscaling_group.html), +but take care to not duplicate those hooks with this resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { AutoscalingLifecycleHook } from "./.gen/providers/aws/autoscaling-lifecycle-hook"; +interface MyConfig { + maxSize: any; + minSize: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const foobar = new AutoscalingGroup(this, "foobar", { + availabilityZones: ["us-west-2a"], + healthCheckType: "EC2", + name: "terraform-test-foobar5", + tag: [ + { + key: "Foo", + propagateAtLaunch: true, + value: "foo-bar", + }, + ], + terminationPolicies: ["OldestInstance"], + maxSize: config.maxSize, + minSize: config.minSize, + }); + const awsAutoscalingLifecycleHookFoobar = new AutoscalingLifecycleHook( + this, + "foobar_1", + { + autoscalingGroupName: foobar.name, + defaultResult: "CONTINUE", + heartbeatTimeout: 2000, + lifecycleTransition: "autoscaling:EC2_INSTANCE_LAUNCHING", + name: "foobar", + notificationMetadata: Token.asString( + Fn.jsonencode({ + foo: "bar", + }) + ), + notificationTargetArn: "arn:aws:sqs:us-east-1:444455556666:queue1*", + roleArn: "arn:aws:iam::123456789012:role/S3Access", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAutoscalingLifecycleHookFoobar.overrideLogicalId("foobar"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the lifecycle hook. +* `autoscalingGroupName` - (Required) Name of the Auto Scaling group to which you want to assign the lifecycle hook +* `defaultResult` - (Optional) Defines the action the Auto Scaling group should take when the lifecycle hook timeout elapses or if an unexpected failure occurs. The value for this parameter can be either CONTINUE or ABANDON. The default value for this parameter is ABANDON. +* `heartbeatTimeout` - (Optional) Defines the amount of time, in seconds, that can elapse before the lifecycle hook times out. When the lifecycle hook times out, Auto Scaling performs the action defined in the DefaultResult parameter +* `lifecycleTransition` - (Required) Instance state to which you want to attach the lifecycle hook. For a list of lifecycle hook types, see [describe-lifecycle-hook-types](https://docs.aws.amazon.com/cli/latest/reference/autoscaling/describe-lifecycle-hook-types.html#examples) +* `notificationMetadata` - (Optional) Contains additional information that you want to include any time Auto Scaling sends a message to the notification target. +* `notificationTargetArn` - (Optional) ARN of the notification target that Auto Scaling will use to notify you when an instance is in the transition state for the lifecycle hook. This ARN target can be either an SQS queue or an SNS topic. +* `roleArn` - (Optional) ARN of the IAM role that allows the Auto Scaling group to publish to the specified notification target. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AutoScaling Lifecycle Hooks using the role autoscaling_group_name and name separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AutoScaling Lifecycle Hooks using the role autoscaling_group_name and name separated by `/`. For example: + +```console +% terraform import aws_autoscaling_lifecycle_hook.test-lifecycle-hook asg-name/lifecycle-hook-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/autoscaling_notification.html.markdown b/website/docs/cdktf/typescript/r/autoscaling_notification.html.markdown new file mode 100644 index 00000000000..f879be492e7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/autoscaling_notification.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_notification" +description: |- + Provides an AutoScaling Group with Notification support +--- + + + +# Resource: aws_autoscaling_notification + +Provides an AutoScaling Group with Notification support, via SNS Topics. Each of +the `notifications` map to a [Notification Configuration][2] inside Amazon Web +Services, and are applied to each AutoScaling Group you supply. + +## Example Usage + +Basic usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { AutoscalingNotification } from "./.gen/providers/aws/autoscaling-notification"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +interface MyConfig { + maxSize: any; + minSize: any; + maxSize1: any; + minSize1: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const bar = new AutoscalingGroup(this, "bar", { + name: "foobar1-terraform-test", + maxSize: config.maxSize, + minSize: config.minSize, + }); + const foo = new AutoscalingGroup(this, "foo", { + name: "barfoo-terraform-test", + maxSize: config.maxSize1, + minSize: config.minSize1, + }); + const example = new SnsTopic(this, "example", { + name: "example-topic", + }); + new AutoscalingNotification(this, "example_notifications", { + groupNames: [bar.name, foo.name], + notifications: [ + "autoscaling:EC2_INSTANCE_LAUNCH", + "autoscaling:EC2_INSTANCE_TERMINATE", + "autoscaling:EC2_INSTANCE_LAUNCH_ERROR", + "autoscaling:EC2_INSTANCE_TERMINATE_ERROR", + ], + topicArn: example.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `groupNames` - (Required) List of AutoScaling Group Names +* `notifications` - (Required) List of Notification Types that trigger +notifications. Acceptable values are documented [in the AWS documentation here][1] +* `topicArn` - (Required) Topic ARN for notifications to be sent through + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `groupNames` +* `notifications` +* `topicArn` + +[1]: https://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_NotificationConfiguration.html +[2]: https://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_DescribeNotificationConfigurations.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/autoscaling_policy.html.markdown b/website/docs/cdktf/typescript/r/autoscaling_policy.html.markdown new file mode 100644 index 00000000000..b79007c4116 --- /dev/null +++ b/website/docs/cdktf/typescript/r/autoscaling_policy.html.markdown @@ -0,0 +1,557 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_policy" +description: |- + Provides an AutoScaling Scaling Group resource. +--- + + + +# Resource: aws_autoscaling_policy + +Provides an AutoScaling Scaling Policy resource. + +~> **NOTE:** You may want to omit `desiredCapacity` attribute from attached `awsAutoscalingGroup` +when using autoscaling policies. It's good practice to pick either +[manual](https://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-manual-scaling.html) +or [dynamic](https://docs.aws.amazon.com/AutoScaling/latest/DeveloperGuide/as-scale-based-on-demand.html) +(policy-based) scaling. + +> **Hands-on:** Try the [Manage AWS Auto Scaling Groups](https://learn.hashicorp.com/tutorials/terraform/aws-asg?utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS) tutorial on HashiCorp Learn. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { AutoscalingPolicy } from "./.gen/providers/aws/autoscaling-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bar = new AutoscalingGroup(this, "bar", { + availabilityZones: ["us-east-1a"], + forceDelete: true, + healthCheckGracePeriod: 300, + healthCheckType: "ELB", + launchConfiguration: foo.name, + maxSize: 5, + minSize: 2, + name: "foobar3-terraform-test", + }); + new AutoscalingPolicy(this, "bat", { + adjustmentType: "ChangeInCapacity", + autoscalingGroupName: bar.name, + cooldown: 300, + name: "foobar3-terraform-test", + scalingAdjustment: 4, + }); + } +} + +``` + +### Create target tarcking scaling policy using metric math + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingPolicy } from "./.gen/providers/aws/autoscaling-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AutoscalingPolicy(this, "example", { + autoscalingGroupName: "my-test-asg", + name: "foo", + policyType: "TargetTrackingScaling", + targetTrackingConfiguration: { + customizedMetricSpecification: { + metrics: [ + { + id: "m1", + label: + "Get the queue size (the number of messages waiting to be processed)", + metricStat: { + metric: { + dimensions: [ + { + name: "QueueName", + value: "my-queue", + }, + ], + metricName: "ApproximateNumberOfMessagesVisible", + namespace: "AWS/SQS", + }, + stat: "Sum", + }, + returnData: false, + }, + { + id: "m2", + label: "Get the group size (the number of InService instances)", + metricStat: { + metric: { + dimensions: [ + { + name: "AutoScalingGroupName", + value: "my-asg", + }, + ], + metricName: "GroupInServiceInstances", + namespace: "AWS/AutoScaling", + }, + stat: "Average", + }, + returnData: false, + }, + { + expression: "m1 / m2", + id: "e1", + label: "Calculate the backlog per instance", + returnData: true, + }, + ], + }, + targetValue: 100, + }, + }); + } +} + +``` + +### Create predictive scaling policy using customized metrics + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingPolicy } from "./.gen/providers/aws/autoscaling-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AutoscalingPolicy(this, "example", { + autoscalingGroupName: "my-test-asg", + name: "foo", + policyType: "PredictiveScaling", + predictiveScalingConfiguration: { + metricSpecification: { + customizedCapacityMetricSpecification: { + metricDataQueries: [ + { + expression: + "SUM(SEARCH('{AWS/AutoScaling,AutoScalingGroupName} MetricName=\\\"GroupInServiceIntances\\\" my-test-asg', 'Average', 300))", + id: "capacity_sum", + }, + ], + }, + customizedLoadMetricSpecification: { + metricDataQueries: [ + { + expression: + "SUM(SEARCH('{AWS/EC2,AutoScalingGroupName} MetricName=\\\"CPUUtilization\\\" my-test-asg', 'Sum', 3600))", + id: "load_sum", + }, + ], + }, + customizedScalingMetricSpecification: { + metricDataQueries: [ + { + expression: + "SUM(SEARCH('{AWS/AutoScaling,AutoScalingGroupName} MetricName=\\\"GroupInServiceIntances\\\" my-test-asg', 'Average', 300))", + id: "capacity_sum", + returnData: false, + }, + { + expression: + "SUM(SEARCH('{AWS/EC2,AutoScalingGroupName} MetricName=\\\"CPUUtilization\\\" my-test-asg', 'Sum', 300))", + id: "load_sum", + returnData: false, + }, + { + expression: + "load_sum / (capacity_sum * PERIOD(capacity_sum) / 60)", + id: "weighted_average", + }, + ], + }, + targetValue: 10, + }, + }, + }); + } +} + +``` + +### Create predictive scaling policy using customized scaling and predefined load metric + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingPolicy } from "./.gen/providers/aws/autoscaling-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AutoscalingPolicy(this, "example", { + autoscalingGroupName: "my-test-asg", + name: "foo", + policyType: "PredictiveScaling", + predictiveScalingConfiguration: { + metricSpecification: { + customizedScalingMetricSpecification: { + metricDataQueries: [ + { + id: "scaling", + metricStat: { + metric: { + dimensions: [ + { + name: "AutoScalingGroupName", + value: "my-test-asg", + }, + ], + metricName: "CPUUtilization", + namespace: "AWS/EC2", + }, + stat: "Average", + }, + }, + ], + }, + predefinedLoadMetricSpecification: { + predefinedMetricType: "ASGTotalCPUUtilization", + resourceLabel: "testLabel", + }, + targetValue: 10, + }, + }, + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) Name of the policy. +* `autoscalingGroupName` - (Required) Name of the autoscaling group. +* `adjustmentType` - (Optional) Whether the adjustment is an absolute number or a percentage of the current capacity. Valid values are `changeInCapacity`, `exactCapacity`, and `percentChangeInCapacity`. +* `policyType` - (Optional) Policy type, either "SimpleScaling", "StepScaling", "TargetTrackingScaling", or "PredictiveScaling". If this value isn't provided, AWS will default to "SimpleScaling." +* `predictiveScalingConfiguration` - (Optional) Predictive scaling policy configuration to use with Amazon EC2 Auto Scaling. +* `estimatedInstanceWarmup` - (Optional) Estimated time, in seconds, until a newly launched instance will contribute CloudWatch metrics. Without a value, AWS will default to the group's specified cooldown period. +* `enabled` - (Optional) Whether the scaling policy is enabled or disabled. Default: `true`. + +The following argument is only available to "SimpleScaling" and "StepScaling" type policies: + +* `minAdjustmentMagnitude` - (Optional) Minimum value to scale by when `adjustmentType` is set to `percentChangeInCapacity`. + +The following arguments are only available to "SimpleScaling" type policies: + +* `cooldown` - (Optional) Amount of time, in seconds, after a scaling activity completes and before the next scaling activity can start. +* `scalingAdjustment` - (Optional) Number of instances by which to scale. `adjustmentType` determines the interpretation of this number (e.g., as an absolute number or as a percentage of the existing Auto Scaling group size). A positive increment adds to the current capacity and a negative value removes from the current capacity. + +The following arguments are only available to "StepScaling" type policies: + +* `metricAggregationType` - (Optional) Aggregation type for the policy's metrics. Valid values are "Minimum", "Maximum", and "Average". Without a value, AWS will treat the aggregation type as "Average". +* `stepAdjustment` - (Optional) Set of adjustments that manage +group scaling. These have the following structure: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Op, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingPolicy } from "./.gen/providers/aws/autoscaling-policy"; +interface MyConfig { + autoscalingGroupName: any; + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new AutoscalingPolicy(this, "example", { + stepAdjustment: [ + { + metricIntervalLowerBound: Token.asString(1), + metricIntervalUpperBound: Token.asString(2), + scalingAdjustment: Token.asNumber(Op.negate(1)), + }, + { + metricIntervalLowerBound: Token.asString(2), + metricIntervalUpperBound: Token.asString(3), + scalingAdjustment: 1, + }, + ], + autoscalingGroupName: config.autoscalingGroupName, + name: config.name, + }); + } +} + +``` + +The following fields are available in step adjustments: + +* `scalingAdjustment` - (Required) Number of members by which to +scale, when the adjustment bounds are breached. A positive value scales +up. A negative value scales down. +* `metricIntervalLowerBound` - (Optional) Lower bound for the +difference between the alarm threshold and the CloudWatch metric. +Without a value, AWS will treat this bound as negative infinity. +* `metricIntervalUpperBound` - (Optional) Upper bound for the +difference between the alarm threshold and the CloudWatch metric. +Without a value, AWS will treat this bound as positive infinity. The upper bound +must be greater than the lower bound. + +Notice the bounds are **relative** to the alarm threshold, meaning that the starting point is not 0%, but the alarm threshold. Check the official [docs](https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-scaling-simple-step.html#as-scaling-steps) for a detailed example. + +The following arguments are only available to "TargetTrackingScaling" type policies: + +* `targetTrackingConfiguration` - (Optional) Target tracking policy. These have the following structure: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingPolicy } from "./.gen/providers/aws/autoscaling-policy"; +interface MyConfig { + autoscalingGroupName: any; + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new AutoscalingPolicy(this, "example", { + targetTrackingConfiguration: { + predefinedMetricSpecification: { + predefinedMetricType: "ASGAverageCPUUtilization", + }, + targetValue: 40, + }, + autoscalingGroupName: config.autoscalingGroupName, + name: config.name, + }); + } +} + +``` + +The following fields are available in target tracking configuration: + +* `predefinedMetricSpecification` - (Optional) Predefined metric. Conflicts with `customizedMetricSpecification`. +* `customizedMetricSpecification` - (Optional) Customized metric. Conflicts with `predefinedMetricSpecification`. +* `targetValue` - (Required) Target value for the metric. +* `disableScaleIn` - (Optional, Default: false) Whether scale in by the target tracking policy is disabled. + +### predefined_metric_specification + +This argument supports the following arguments: + +* `predefinedMetricType` - (Required) Metric type. +* `resourceLabel` - (Optional) Identifies the resource associated with the metric type. + +### customized_metric_specification + +This argument supports the following arguments: + +* `metricDimension` - (Optional) Dimensions of the metric. +* `metricName` - (Optional) Name of the metric. +* `namespace` - (Optional) Namespace of the metric. +* `statistic` - (Optional) Statistic of the metric. +* `unit` - (Optional) Unit of the metric. +* `metrics` - (Optional) Metrics to include, as a metric data query. + +#### metric_dimension + +This argument supports the following arguments: + +* `name` - (Required) Name of the dimension. +* `value` - (Required) Value of the dimension. + +#### metrics + +This argument supports the following arguments: + +* `expression` - (Optional) Math expression used on the returned metric. You must specify either `expression` or `metricStat`, but not both. +* `id` - (Required) Short name for the metric used in target tracking scaling policy. +* `label` - (Optional) Human-readable label for this metric or expression. +* `metricStat` - (Optional) Structure that defines CloudWatch metric to be used in target tracking scaling policy. You must specify either `expression` or `metricStat`, but not both. +* `returnData` - (Optional) Boolean that indicates whether to return the timestamps and raw data values of this metric, the default is true + +##### metric_stat + +This argument supports the following arguments: + +* `metric` - (Required) Structure that defines the CloudWatch metric to return, including the metric name, namespace, and dimensions. +* `stat` - (Required) Statistic of the metrics to return. +* `unit` - (Optional) Unit of the metrics to return. + +##### metric + +This argument supports the following arguments: + +* `dimensions` - (Optional) Dimensions of the metric. +* `metricName` - (Required) Name of the metric. +* `namespace` - (Required) Namespace of the metric. + +###### dimensions + +This argument supports the following arguments: + +* `name` - (Required) Name of the dimension. +* `value` - (Required) Value of the dimension. + +### predictive_scaling_configuration + +This argument supports the following arguments: + +* `maxCapacityBreachBehavior` - (Optional) Defines the behavior that should be applied if the forecast capacity approaches or exceeds the maximum capacity of the Auto Scaling group. Valid values are `honorMaxCapacity` or `increaseMaxCapacity`. Default is `honorMaxCapacity`. +* `maxCapacityBuffer` - (Optional) Size of the capacity buffer to use when the forecast capacity is close to or exceeds the maximum capacity. Valid range is `0` to `100`. If set to `0`, Amazon EC2 Auto Scaling may scale capacity higher than the maximum capacity to equal but not exceed forecast capacity. +* `metricSpecification` - (Required) This structure includes the metrics and target utilization to use for predictive scaling. +* `mode` - (Optional) Predictive scaling mode. Valid values are `forecastAndScale` and `forecastOnly`. Default is `forecastOnly`. +* `schedulingBufferTime` - (Optional) Amount of time, in seconds, by which the instance launch time can be advanced. Minimum is `0`. + +#### metric_specification + +This argument supports the following arguments: + +* `customizedCapacityMetricSpecification` - (Optional) Customized capacity metric specification. The field is only valid when you use `customizedLoadMetricSpecification` +* `customizedLoadMetricSpecification` - (Optional) Customized load metric specification. +* `customizedScalingMetricSpecification` - (Optional) Customized scaling metric specification. +* `predefinedLoadMetricSpecification` - (Optional) Predefined load metric specification. +* `predefinedMetricPairSpecification` - (Optional) Metric pair specification from which Amazon EC2 Auto Scaling determines the appropriate scaling metric and load metric to use. +* `predefinedScalingMetricSpecification` - (Optional) Predefined scaling metric specification. + +##### predefined_load_metric_specification + +This argument supports the following arguments: + +* `predefinedMetricType` - (Required) Metric type. Valid values are `asgTotalCpuUtilization`, `asgTotalNetworkIn`, `asgTotalNetworkOut`, or `albTargetGroupRequestCount`. +* `resourceLabel` - (Required) Label that uniquely identifies a specific Application Load Balancer target group from which to determine the request count served by your Auto Scaling group. + +##### predefined_metric_pair_specification + +This argument supports the following arguments: + +* `predefinedMetricType` - (Required) Which metrics to use. There are two different types of metrics for each metric type: one is a load metric and one is a scaling metric. For example, if the metric type is `asgcpuUtilization`, the Auto Scaling group's total CPU metric is used as the load metric, and the average CPU metric is used for the scaling metric. Valid values are `asgcpuUtilization`, `asgNetworkIn`, `asgNetworkOut`, or `albRequestCount`. +* `resourceLabel` - (Required) Label that uniquely identifies a specific Application Load Balancer target group from which to determine the request count served by your Auto Scaling group. + +##### predefined_scaling_metric_specification + +This argument supports the following arguments: + +* `predefinedMetricType` - (Required) Describes a scaling metric for a predictive scaling policy. Valid values are `asgAverageCpuUtilization`, `asgAverageNetworkIn`, `asgAverageNetworkOut`, or `albRequestCountPerTarget`. +* `resourceLabel` - (Required) Label that uniquely identifies a specific Application Load Balancer target group from which to determine the request count served by your Auto Scaling group. + +##### customized_scaling_metric_specification + +This argument supports the following arguments: + +* `metricDataQueries` - (Required) List of up to 10 structures that defines custom scaling metric in predictive scaling policy + +##### customized_load_metric_specification + +This argument supports the following arguments: + +* `metricDataQueries` - (Required) List of up to 10 structures that defines custom load metric in predictive scaling policy + +##### customized_capacity_metric_specification + +This argument supports the following arguments: + +* `metricDataQueries` - (Required) List of up to 10 structures that defines custom capacity metric in predictive scaling policy + +##### metric_data_queries + +This argument supports the following arguments: + +* `expression` - (Optional) Math expression used on the returned metric. You must specify either `expression` or `metricStat`, but not both. +* `id` - (Required) Short name for the metric used in predictive scaling policy. +* `label` - (Optional) Human-readable label for this metric or expression. +* `metricStat` - (Optional) Structure that defines CloudWatch metric to be used in predictive scaling policy. You must specify either `expression` or `metricStat`, but not both. +* `returnData` - (Optional) Boolean that indicates whether to return the timestamps and raw data values of this metric, the default is true + +##### metric_stat + +This argument supports the following arguments: + +* `metric` - (Required) Structure that defines the CloudWatch metric to return, including the metric name, namespace, and dimensions. +* `stat` - (Required) Statistic of the metrics to return. +* `unit` - (Optional) Unit of the metrics to return. + +##### metric + +This argument supports the following arguments: + +* `dimensions` - (Optional) Dimensions of the metric. +* `metricName` - (Required) Name of the metric. +* `namespace` - (Required) Namespace of the metric. + +##### dimensions + +This argument supports the following arguments: + +* `name` - (Required) Name of the dimension. +* `value` - (Required) Value of the dimension. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN assigned by AWS to the scaling policy. +* `name` - Scaling policy's name. +* `autoscalingGroupName` - The scaling policy's assigned autoscaling group. +* `adjustmentType` - Scaling policy's adjustment type. +* `policyType` - Scaling policy's type. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AutoScaling scaling policy using the role autoscaling_group_name and name separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AutoScaling scaling policy using the role autoscaling_group_name and name separated by `/`. For example: + +```console +% terraform import aws_autoscaling_policy.test-policy asg-name/policy-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/autoscaling_schedule.html.markdown b/website/docs/cdktf/typescript/r/autoscaling_schedule.html.markdown new file mode 100644 index 00000000000..499fde0308c --- /dev/null +++ b/website/docs/cdktf/typescript/r/autoscaling_schedule.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_schedule" +description: |- + Provides an AutoScaling Schedule resource. +--- + + + +# Resource: aws_autoscaling_schedule + +Provides an AutoScaling Schedule resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { AutoscalingSchedule } from "./.gen/providers/aws/autoscaling-schedule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foobar = new AutoscalingGroup(this, "foobar", { + availabilityZones: ["us-west-2a"], + forceDelete: true, + healthCheckGracePeriod: 300, + healthCheckType: "ELB", + maxSize: 1, + minSize: 1, + name: "terraform-test-foobar5", + terminationPolicies: ["OldestInstance"], + }); + const awsAutoscalingScheduleFoobar = new AutoscalingSchedule( + this, + "foobar_1", + { + autoscalingGroupName: foobar.name, + desiredCapacity: 0, + endTime: "2016-12-12T06:00:00Z", + maxSize: 1, + minSize: 0, + scheduledActionName: "foobar", + startTime: "2016-12-11T18:00:00Z", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAutoscalingScheduleFoobar.overrideLogicalId("foobar"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `autoscalingGroupName` - (Required) The name of the Auto Scaling group. +* `scheduledActionName` - (Required) The name of this scaling action. + +The following arguments are optional: + +* `desiredCapacity` - (Optional) The initial capacity of the Auto Scaling group after the scheduled action runs and the capacity it attempts to maintain. Set to `1` if you don't want to change the desired capacity at the scheduled time. Defaults to `0`. +* `endTime` - (Optional) The date and time for the recurring schedule to end, in UTC with the format `"yyyyMmDdThh:mm:ssZ"` (e.g. `"20210601T00:00:00Z"`). +* `maxSize` - (Optional) The maximum size of the Auto Scaling group. Set to `1` if you don't want to change the maximum size at the scheduled time. Defaults to `0`. +* `minSize` - (Optional) The minimum size of the Auto Scaling group. Set to `1` if you don't want to change the minimum size at the scheduled time. Defaults to `0`. +* `recurrence` - (Optional) The recurring schedule for this action specified using the Unix cron syntax format. +* `startTime` - (Optional) The date and time for the recurring schedule to start, in UTC with the format `"yyyyMmDdThh:mm:ssZ"` (e.g. `"20210601T00:00:00Z"`). +* `timeZone` - (Optional) Specifies the time zone for a cron expression. Valid values are the canonical names of the IANA time zones (such as `etc/gmt+9` or `pacific/tahiti`). + +~> **NOTE:** When `startTime` and `endTime` are specified with `recurrence` , they form the boundaries of when the recurring action will start and stop. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN assigned by AWS to the autoscaling schedule. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AutoScaling ScheduledAction using the `autoScalingGroupName` and `scheduledActionName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AutoScaling ScheduledAction using the `autoScalingGroupName` and `scheduledActionName`. For example: + +```console +% terraform import aws_autoscaling_schedule.resource-name auto-scaling-group-name/scheduled-action-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/autoscaling_traffic_source_attachment.html.markdown b/website/docs/cdktf/typescript/r/autoscaling_traffic_source_attachment.html.markdown new file mode 100644 index 00000000000..1103de6cac9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/autoscaling_traffic_source_attachment.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_autoscaling_traffic_source_attachment" +description: |- + Terraform resource for managing an AWS Auto Scaling Traffic Source Attachment. +--- + + + +# Resource: aws_autoscaling_traffic_source_attachment + +Attaches a traffic source to an Auto Scaling group. + +~> **NOTE on Auto Scaling Groups, Attachments and Traffic Source Attachments:** Terraform provides standalone [Attachment](autoscaling_attachment.html) (for attaching Classic Load Balancers and Application Load Balancer, Gateway Load Balancer, or Network Load Balancer target groups) and Traffic Source Attachment (for attaching Load Balancers and VPC Lattice target groups) resources and an [Auto Scaling Group](autoscaling_group.html) resource with `loadBalancers`, `targetGroupArns` and `trafficSource` attributes. Do not use the same traffic source in more than one of these resources. Doing so will cause a conflict of attachments. A [`lifecycle` configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) can be used to suppress differences if necessary. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingTrafficSourceAttachment } from "./.gen/providers/aws/autoscaling-traffic-source-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AutoscalingTrafficSourceAttachment(this, "example", { + autoscalingGroupName: Token.asString(awsAutoscalingGroupExample.id), + trafficSource: { + identifier: Token.asString(awsLbTargetGroupExample.arn), + type: "elbv2", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +- `autoscalingGroupName` - (Required) The name of the Auto Scaling group. +- `trafficSource` - (Required) The unique identifiers of a traffic sources. + +`trafficSource` supports the following: + +- `identifier` - (Required) Identifies the traffic source. For Application Load Balancers, Gateway Load Balancers, Network Load Balancers, and VPC Lattice, this will be the Amazon Resource Name (ARN) for a target group in this account and Region. For Classic Load Balancers, this will be the name of the Classic Load Balancer in this account and Region. +- `type` - (Required) Provides additional context for the value of `identifier`. + The following lists the valid values: + `elb` if `identifier` is the name of a Classic Load Balancer. + `elbv2` if `identifier` is the ARN of an Application Load Balancer, Gateway Load Balancer, or Network Load Balancer target group. + `vpcLattice` if `identifier` is the ARN of a VPC Lattice target group. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/autoscalingplans_scaling_plan.html.markdown b/website/docs/cdktf/typescript/r/autoscalingplans_scaling_plan.html.markdown new file mode 100644 index 00000000000..0ddadc5c385 --- /dev/null +++ b/website/docs/cdktf/typescript/r/autoscalingplans_scaling_plan.html.markdown @@ -0,0 +1,280 @@ +--- +subcategory: "Auto Scaling Plans" +layout: "aws" +page_title: "AWS: aws_autoscalingplans_scaling_plan" +description: |- + Manages an AWS Auto Scaling scaling plan. +--- + + + +# Resource: aws_autoscalingplans_scaling_plan + +Manages an AWS Auto Scaling scaling plan. +More information can be found in the [AWS Auto Scaling User Guide](https://docs.aws.amazon.com/autoscaling/plans/userguide/what-is-aws-auto-scaling.html). + +~> **NOTE:** The AWS Auto Scaling service uses an AWS IAM service-linked role to manage predictive scaling of Amazon EC2 Auto Scaling groups. The service attempts to automatically create this role the first time a scaling plan with predictive scaling enabled is created. +An [`awsIamServiceLinkedRole`](/docs/providers/aws/r/iam_service_linked_role.html) resource can be used to manually manage this role. +See the [AWS documentation](https://docs.aws.amazon.com/autoscaling/plans/userguide/aws-auto-scaling-service-linked-roles.html#create-service-linked-role-manual) for more details. + +## Example Usage + +### Basic Dynamic Scaling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { AutoscalingplansScalingPlan } from "./.gen/providers/aws/autoscalingplans-scaling-plan"; +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const available = new DataAwsAvailabilityZones(this, "available", {}); + const example = new AutoscalingGroup(this, "example", { + availabilityZones: [ + Token.asString(propertyAccess(available.names, ["0"])), + ], + launchConfiguration: Token.asString(awsLaunchConfigurationExample.name), + maxSize: 3, + minSize: 0, + namePrefix: "example", + tags: [ + { + key: "application", + propagate_at_launch: true, + value: "example", + }, + ], + }); + const awsAutoscalingplansScalingPlanExample = + new AutoscalingplansScalingPlan(this, "example_2", { + applicationSource: { + tagFilter: [ + { + key: "application", + values: ["example"], + }, + ], + }, + name: "example-dynamic-cost-optimization", + scalingInstruction: [ + { + maxCapacity: 3, + minCapacity: 0, + resourceId: Token.asString( + Fn.format("autoScalingGroup/%s", [example.name]) + ), + scalableDimension: "autoscaling:autoScalingGroup:DesiredCapacity", + serviceNamespace: "autoscaling", + targetTrackingConfiguration: [ + { + predefinedScalingMetricSpecification: { + predefinedScalingMetricType: "ASGAverageCPUUtilization", + }, + targetValue: 70, + }, + ], + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAutoscalingplansScalingPlanExample.overrideLogicalId("example"); + } +} + +``` + +### Basic Predictive Scaling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { AutoscalingplansScalingPlan } from "./.gen/providers/aws/autoscalingplans-scaling-plan"; +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const available = new DataAwsAvailabilityZones(this, "available", {}); + const example = new AutoscalingGroup(this, "example", { + availabilityZones: [ + Token.asString(propertyAccess(available.names, ["0"])), + ], + launchConfiguration: Token.asString(awsLaunchConfigurationExample.name), + maxSize: 3, + minSize: 0, + namePrefix: "example", + tags: [ + { + key: "application", + propagate_at_launch: true, + value: "example", + }, + ], + }); + const awsAutoscalingplansScalingPlanExample = + new AutoscalingplansScalingPlan(this, "example_2", { + applicationSource: { + tagFilter: [ + { + key: "application", + values: ["example"], + }, + ], + }, + name: "example-predictive-cost-optimization", + scalingInstruction: [ + { + disableDynamicScaling: true, + maxCapacity: 3, + minCapacity: 0, + predefinedLoadMetricSpecification: { + predefinedLoadMetricType: "ASGTotalCPUUtilization", + }, + predictiveScalingMaxCapacityBehavior: + "SetForecastCapacityToMaxCapacity", + predictiveScalingMode: "ForecastAndScale", + resourceId: Token.asString( + Fn.format("autoScalingGroup/%s", [example.name]) + ), + scalableDimension: "autoscaling:autoScalingGroup:DesiredCapacity", + serviceNamespace: "autoscaling", + targetTrackingConfiguration: [ + { + predefinedScalingMetricSpecification: { + predefinedScalingMetricType: "ASGAverageCPUUtilization", + }, + targetValue: 70, + }, + ], + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAutoscalingplansScalingPlanExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the scaling plan. Names cannot contain vertical bars, colons, or forward slashes. +* `applicationSource` - (Required) CloudFormation stack or set of tags. You can create one scaling plan per application source. +* `scalingInstruction` - (Required) Scaling instructions. More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_ScalingInstruction.html). + +The `applicationSource` object supports the following: + +* `cloudformationStackArn` - (Optional) ARN of a AWS CloudFormation stack. +* `tagFilter` - (Optional) Set of tags. + +The `tagFilter` object supports the following: + +* `key` - (Required) Tag key. +* `values` - (Optional) Tag values. + +The `scalingInstruction` object supports the following: + +* `maxCapacity` - (Required) Maximum capacity of the resource. The exception to this upper limit is if you specify a non-default setting for `predictiveScalingMaxCapacityBehavior`. +* `minCapacity` - (Required) Minimum capacity of the resource. +* `resourceId` - (Required) ID of the resource. This string consists of the resource type and unique identifier. +* `scalableDimension` - (Required) Scalable dimension associated with the resource. Valid values: `autoscaling:autoScalingGroup:desiredCapacity`, `dynamodb:index:readCapacityUnits`, `dynamodb:index:writeCapacityUnits`, `dynamodb:table:readCapacityUnits`, `dynamodb:table:writeCapacityUnits`, `ecs:service:desiredCount`, `ec2:spotFleetRequest:targetCapacity`, `rds:cluster:readReplicaCount`. +* `serviceNamespace` - (Required) Namespace of the AWS service. Valid values: `autoscaling`, `dynamodb`, `ecs`, `ec2`, `rds`. +* `targetTrackingConfiguration` - (Required) Structure that defines new target tracking configurations. Each of these structures includes a specific scaling metric and a target value for the metric, along with various parameters to use with dynamic scaling. +More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_TargetTrackingConfiguration.html). +* `customizedLoadMetricSpecification` - (Optional) Customized load metric to use for predictive scaling. You must specify either `customizedLoadMetricSpecification` or `predefinedLoadMetricSpecification` when configuring predictive scaling. +More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_CustomizedLoadMetricSpecification.html). +* `disableDynamicScaling` - (Optional) Boolean controlling whether dynamic scaling by AWS Auto Scaling is disabled. Defaults to `false`. +* `predefinedLoadMetricSpecification` - (Optional) Predefined load metric to use for predictive scaling. You must specify either `predefinedLoadMetricSpecification` or `customizedLoadMetricSpecification` when configuring predictive scaling. +More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_PredefinedLoadMetricSpecification.html). +* `predictiveScalingMaxCapacityBehavior`- (Optional) Defines the behavior that should be applied if the forecast capacity approaches or exceeds the maximum capacity specified for the resource. +Valid values: `setForecastCapacityToMaxCapacity`, `setMaxCapacityAboveForecastCapacity`, `setMaxCapacityToForecastCapacity`. +* `predictiveScalingMaxCapacityBuffer` - (Optional) Size of the capacity buffer to use when the forecast capacity is close to or exceeds the maximum capacity. +* `predictiveScalingMode` - (Optional) Predictive scaling mode. Valid values: `forecastAndScale`, `forecastOnly`. +* `scalingPolicyUpdateBehavior` - (Optional) Controls whether a resource's externally created scaling policies are kept or replaced. Valid values: `keepExternalPolicies`, `replaceExternalPolicies`. Defaults to `keepExternalPolicies`. +* `scheduledActionBufferTime` - (Optional) Amount of time, in seconds, to buffer the run time of scheduled scaling actions when scaling out. + +The `customizedLoadMetricSpecification` object supports the following: + +* `metricName` - (Required) Name of the metric. +* `namespace` - (Required) Namespace of the metric. +* `statistic` - (Required) Statistic of the metric. Currently, the value must always be `sum`. +* `dimensions` - (Optional) Dimensions of the metric. +* `unit` - (Optional) Unit of the metric. + +The `predefinedLoadMetricSpecification` object supports the following: + +* `predefinedLoadMetricType` - (Required) Metric type. Valid values: `albTargetGroupRequestCount`, `asgTotalCpuUtilization`, `asgTotalNetworkIn`, `asgTotalNetworkOut`. +* `resourceLabel` - (Optional) Identifies the resource associated with the metric type. + +The `targetTrackingConfiguration` object supports the following: + +* `targetValue` - (Required) Target value for the metric. +* `customizedScalingMetricSpecification` - (Optional) Customized metric. You can specify either `customizedScalingMetricSpecification` or `predefinedScalingMetricSpecification`. +More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_CustomizedScalingMetricSpecification.html). +* `disableScaleIn` - (Optional) Boolean indicating whether scale in by the target tracking scaling policy is disabled. Defaults to `false`. +* `predefinedScalingMetricSpecification` - (Optional) Predefined metric. You can specify either `predefinedScalingMetricSpecification` or `customizedScalingMetricSpecification`. +More details can be found in the [AWS Auto Scaling API Reference](https://docs.aws.amazon.com/autoscaling/plans/APIReference/API_PredefinedScalingMetricSpecification.html). +* `estimatedInstanceWarmup` - (Optional) Estimated time, in seconds, until a newly launched instance can contribute to the CloudWatch metrics. +This value is used only if the resource is an Auto Scaling group. +* `scaleInCooldown` - (Optional) Amount of time, in seconds, after a scale in activity completes before another scale in activity can start. +This value is not used if the scalable resource is an Auto Scaling group. +* `scaleOutCooldown` - (Optional) Amount of time, in seconds, after a scale-out activity completes before another scale-out activity can start. +This value is not used if the scalable resource is an Auto Scaling group. + +The `customizedScalingMetricSpecification` object supports the following: + +* `metricName` - (Required) Name of the metric. +* `namespace` - (Required) Namespace of the metric. +* `statistic` - (Required) Statistic of the metric. Valid values: `average`, `maximum`, `minimum`, `sampleCount`, `sum`. +* `dimensions` - (Optional) Dimensions of the metric. +* `unit` - (Optional) Unit of the metric. + +The `predefinedScalingMetricSpecification` object supports the following: + +* `predefinedScalingMetricType` - (Required) Metric type. Valid values: `albRequestCountPerTarget`, `asgAverageCpuUtilization`, `asgAverageNetworkIn`, `asgAverageNetworkOut`, `dynamoDbReadCapacityUtilization`, `dynamoDbWriteCapacityUtilization`, `ecsServiceAverageCpuUtilization`, `ecsServiceAverageMemoryUtilization`, `ec2SpotFleetRequestAverageCpuUtilization`, `ec2SpotFleetRequestAverageNetworkIn`, `ec2SpotFleetRequestAverageNetworkOut`, `rdsReaderAverageCpuUtilization`, `rdsReaderAverageDatabaseConnections`. +* `resourceLabel` - (Optional) Identifies the resource associated with the metric type. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Scaling plan identifier. +* `scalingPlanVersion` - The version number of the scaling plan. This value is always 1. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Auto Scaling scaling plans using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Auto Scaling scaling plans using the `name`. For example: + +```console +% terraform import aws_autoscalingplans_scaling_plan.example MyScale1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/backup_framework.html.markdown b/website/docs/cdktf/typescript/r/backup_framework.html.markdown new file mode 100644 index 00000000000..9195a03da83 --- /dev/null +++ b/website/docs/cdktf/typescript/r/backup_framework.html.markdown @@ -0,0 +1,188 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_framework" +description: |- + Provides an AWS Backup Framework resource. +--- + + + +# Resource: aws_backup_framework + +Provides an AWS Backup Framework resource. + +~> **Note:** For the Deployment Status of the Framework to be successful, please turn on resource tracking to enable AWS Config recording to track configuration changes of your backup resources. This can be done from the AWS Console. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupFramework } from "./.gen/providers/aws/backup-framework"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BackupFramework(this, "Example", { + control: [ + { + inputParameter: [ + { + name: "requiredRetentionDays", + value: "35", + }, + ], + name: "BACKUP_RECOVERY_POINT_MINIMUM_RETENTION_CHECK", + }, + { + inputParameter: [ + { + name: "requiredFrequencyUnit", + value: "hours", + }, + { + name: "requiredRetentionDays", + value: "35", + }, + { + name: "requiredFrequencyValue", + value: "1", + }, + ], + name: "BACKUP_PLAN_MIN_FREQUENCY_AND_MIN_RETENTION_CHECK", + }, + { + name: "BACKUP_RECOVERY_POINT_ENCRYPTED", + }, + { + name: "BACKUP_RESOURCES_PROTECTED_BY_BACKUP_PLAN", + scope: { + complianceResourceTypes: ["EBS"], + }, + }, + { + name: "BACKUP_RECOVERY_POINT_MANUAL_DELETION_DISABLED", + }, + { + inputParameter: [ + { + name: "maxRetentionDays", + value: "100", + }, + { + name: "minRetentionDays", + value: "1", + }, + ], + name: "BACKUP_RESOURCES_PROTECTED_BY_BACKUP_VAULT_LOCK", + scope: { + complianceResourceTypes: ["EBS"], + }, + }, + { + inputParameter: [ + { + name: "recoveryPointAgeUnit", + value: "days", + }, + { + name: "recoveryPointAgeValue", + value: "1", + }, + ], + name: "BACKUP_LAST_RECOVERY_POINT_CREATED", + scope: { + complianceResourceTypes: ["EBS"], + }, + }, + ], + description: "this is an example framework", + name: "exampleFramework", + tags: { + Name: "Example Framework", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `control` - (Required) One or more control blocks that make up the framework. Each control in the list has a name, input parameters, and scope. Detailed below. +* `description` - (Optional) The description of the framework with a maximum of 1,024 characters +* `name` - (Required) The unique name of the framework. The name must be between 1 and 256 characters, starting with a letter, and consisting of letters, numbers, and underscores. +* `tags` - (Optional) Metadata that you can assign to help organize the frameworks you create. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Control Arguments + +`control` has the following attributes: + +* `inputParameter` - (Optional) One or more input parameter blocks. An example of a control with two parameters is: "backup plan frequency is at least daily and the retention period is at least 1 year". The first parameter is daily. The second parameter is 1 year. Detailed below. +* `name` - (Required) The name of a control. This name is between 1 and 256 characters. +* `scope` - (Optional) The scope of a control. The control scope defines what the control will evaluate. Three examples of control scopes are: a specific backup plan, all backup plans with a specific tag, or all backup plans. Detailed below. + +### Input Parameter Arguments + +`inputParameter` has the following attributes: + +* `name` - (Optional) The name of a parameter, for example, BackupPlanFrequency. +* `value` - (Optional) The value of parameter, for example, hourly. + +### Scope Arguments + +`scope` has the following attributes: + +* `complianceResourceIds` - (Optional) The ID of the only AWS resource that you want your control scope to contain. Minimum number of 1 item. Maximum number of 100 items. +* `complianceResourceTypes` - (Optional) Describes whether the control scope includes one or more types of resources, such as EFS or RDS. +* `tags` - (Optional) The tag key-value pair applied to those AWS resources that you want to trigger an evaluation for a rule. A maximum of one key-value pair can be provided. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the backup framework. +* `creationTime` - The date and time that a framework is created, in Unix format and Coordinated Universal Time (UTC). +* `deploymentStatus` - The deployment status of a framework. The statuses are: `createInProgress` | `updateInProgress` | `deleteInProgress` | `completed` | `failed`. +* `id` - The id of the backup framework. +* `status` - A framework consists of one or more controls. Each control governs a resource, such as backup plans, backup selections, backup vaults, or recovery points. You can also turn AWS Config recording on or off for each resource. For more information refer to the [AWS documentation for Framework Status](https://docs.aws.amazon.com/aws-backup/latest/devguide/API_DescribeFramework.html#Backup-DescribeFramework-response-FrameworkStatus) +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `2M`) +* `update` - (Default `2M`) +* `delete` - (Default `2M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup Framework using the `id` which corresponds to the name of the Backup Framework. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Backup Framework using the `id` which corresponds to the name of the Backup Framework. For example: + +```console +% terraform import aws_backup_framework.test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/backup_global_settings.html.markdown b/website/docs/cdktf/typescript/r/backup_global_settings.html.markdown new file mode 100644 index 00000000000..c52a1baa743 --- /dev/null +++ b/website/docs/cdktf/typescript/r/backup_global_settings.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_global_settings" +description: |- + Provides an AWS Backup Global Settings resource. +--- + + + +# Resource: aws_backup_global_settings + +Provides an AWS Backup Global Settings resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupGlobalSettings } from "./.gen/providers/aws/backup-global-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BackupGlobalSettings(this, "test", { + globalSettings: { + isCrossAccountBackupEnabled: "true", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `globalSettings` - (Required) A list of resources along with the opt-in preferences for the account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS Account ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup Global Settings using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Backup Global Settings using the `id`. For example: + +```console +% terraform import aws_backup_global_settings.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/backup_plan.html.markdown b/website/docs/cdktf/typescript/r/backup_plan.html.markdown new file mode 100644 index 00000000000..adfcdf0d45f --- /dev/null +++ b/website/docs/cdktf/typescript/r/backup_plan.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_plan" +description: |- + Provides an AWS Backup plan resource. +--- + + + +# Resource: aws_backup_plan + +Provides an AWS Backup plan resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupPlan } from "./.gen/providers/aws/backup-plan"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BackupPlan(this, "example", { + advancedBackupSetting: [ + { + backupOptions: { + WindowsVSS: "enabled", + }, + resourceType: "EC2", + }, + ], + name: "tf_example_backup_plan", + rule: [ + { + lifecycle: { + deleteAfter: 14, + }, + ruleName: "tf_example_backup_rule", + schedule: "cron(0 12 * * ? *)", + targetVaultName: test.name, + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The display name of a backup plan. +* `rule` - (Required) A rule object that specifies a scheduled task that is used to back up a selection of resources. +* `advancedBackupSetting` - (Optional) An object that specifies backup options for each resource type. +* `tags` - (Optional) Metadata that you can assign to help organize the plans you create. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Rule Arguments + +`rule` supports the following attributes: + +* `ruleName` - (Required) An display name for a backup rule. +* `targetVaultName` - (Required) The name of a logical container where backups are stored. +* `schedule` - (Optional) A CRON expression specifying when AWS Backup initiates a backup job. +* `enableContinuousBackup` - (Optional) Enable continuous backups for supported resources. +* `startWindow` - (Optional) The amount of time in minutes before beginning a backup. +* `completionWindow` - (Optional) The amount of time in minutes AWS Backup attempts a backup before canceling the job and returning an error. +* `lifecycle` - (Optional) The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. Fields documented below. +* `recoveryPointTags` - (Optional) Metadata that you can assign to help organize the resources that you create. +* `copyAction` - (Optional) Configuration block(s) with copy operation settings. Detailed below. + +### Lifecycle Arguments + +`lifecycle` supports the following attributes: + +* `coldStorageAfter` - (Optional) Specifies the number of days after creation that a recovery point is moved to cold storage. +* `deleteAfter` - (Optional) Specifies the number of days after creation that a recovery point is deleted. Must be 90 days greater than `coldStorageAfter`. + +### Copy Action Arguments + +`copyAction` supports the following attributes: + +* `lifecycle` - (Optional) The lifecycle defines when a protected resource is copied over to a backup vault and when it expires. Fields documented above. +* `destinationVaultArn` - (Required) An Amazon Resource Name (ARN) that uniquely identifies the destination backup vault for the copied backup. + +### Advanced Backup Setting Arguments + +`advancedBackupSetting` supports the following arguments: + +* `backupOptions` - (Required) Specifies the backup option for a selected resource. This option is only available for Windows VSS backup jobs. Set to `{ WindowsVSS = "enabled" }` to enable Windows VSS backup option and create a VSS Windows backup. +* `resourceType` - (Required) The type of AWS resource to be backed up. For VSS Windows backups, the only supported resource type is Amazon EC2. Valid values: `ec2`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the backup plan. +* `arn` - The ARN of the backup plan. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version` - Unique, randomly generated, Unicode, UTF-8 encoded string that serves as the version ID of the backup plan. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup Plan using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Backup Plan using the `id`. For example: + +```console +% terraform import aws_backup_plan.test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/backup_region_settings.html.markdown b/website/docs/cdktf/typescript/r/backup_region_settings.html.markdown new file mode 100644 index 00000000000..f741187b466 --- /dev/null +++ b/website/docs/cdktf/typescript/r/backup_region_settings.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_region_settings" +description: |- + Provides an AWS Backup Region Settings resource. +--- + + + +# Resource: aws_backup_region_settings + +Provides an AWS Backup Region Settings resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupRegionSettings } from "./.gen/providers/aws/backup-region-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BackupRegionSettings(this, "test", { + resourceTypeManagementPreference: { + DynamoDB: true, + EFS: true, + }, + resourceTypeOptInPreference: { + Aurora: true, + DocumentDB: true, + DynamoDB: true, + EBS: true, + EC2: true, + EFS: true, + FSx: true, + Neptune: true, + RDS: true, + "Storage Gateway": true, + VirtualMachine: true, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceTypeOptInPreference` - (Required) A map of services along with the opt-in preferences for the Region. +* `resourceTypeManagementPreference` - (Optional) A map of services along with the management preferences for the Region. For more information, see the [AWS Documentation](https://docs.aws.amazon.com/aws-backup/latest/devguide/API_UpdateRegionSettings.html#API_UpdateRegionSettings_RequestSyntax). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS region. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup Region Settings using the `region`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Backup Region Settings using the `region`. For example: + +```console +% terraform import aws_backup_region_settings.test us-west-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/backup_report_plan.html.markdown b/website/docs/cdktf/typescript/r/backup_report_plan.html.markdown new file mode 100644 index 00000000000..ac6be395c66 --- /dev/null +++ b/website/docs/cdktf/typescript/r/backup_report_plan.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_report_plan" +description: |- + Provides an AWS Backup Report Plan resource. +--- + + + +# Resource: aws_backup_report_plan + +Provides an AWS Backup Report Plan resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupReportPlan } from "./.gen/providers/aws/backup-report-plan"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BackupReportPlan(this, "example", { + description: "example description", + name: "example_name", + reportDeliveryChannel: { + formats: ["CSV", "JSON"], + s3BucketName: "example-bucket-name", + }, + reportSetting: { + reportTemplate: "RESTORE_JOB_REPORT", + }, + tags: { + Name: "Example Report Plan", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) The description of the report plan with a maximum of 1,024 characters +* `name` - (Required) The unique name of the report plan. The name must be between 1 and 256 characters, starting with a letter, and consisting of letters, numbers, and underscores. +* `reportDeliveryChannel` - (Required) An object that contains information about where and how to deliver your reports, specifically your Amazon S3 bucket name, S3 key prefix, and the formats of your reports. Detailed below. +* `reportSetting` - (Required) An object that identifies the report template for the report. Reports are built using a report template. Detailed below. +* `tags` - (Optional) Metadata that you can assign to help organize the report plans you create. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Report Delivery Channel Arguments + +`reportDeliveryChannel` supports the following arguments: + +* `formats` - (Optional) A list of the format of your reports: CSV, JSON, or both. If not specified, the default format is CSV. +* `s3BucketName` - (Required) The unique name of the S3 bucket that receives your reports. +* `s3KeyPrefix` - (Optional) The prefix for where Backup Audit Manager delivers your reports to Amazon S3. The prefix is this part of the following path: s3://your-bucket-name/prefix/Backup/us-west-2/year/month/day/report-name. If not specified, there is no prefix. + +### Report Setting Arguments + +`reportSetting` supports the following arguments: + +* `accounts` - (Optional) Specifies the list of accounts a report covers. +* `frameworkArns` - (Optional) Specifies the Amazon Resource Names (ARNs) of the frameworks a report covers. +* `numberOfFrameworks` - (Optional) Specifies the number of frameworks a report covers. +* `organizationUnits` - (Optional) Specifies the list of Organizational Units a report covers. +* `regions` - (Optional) Specifies the list of regions a report covers. +* `reportTemplate` - (Required) Identifies the report template for the report. Reports are built using a report template. The report templates are: `resourceComplianceReport` | `controlComplianceReport` | `backupJobReport` | `copyJobReport` | `restoreJobReport`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the backup report plan. +* `creationTime` - The date and time that a report plan is created, in Unix format and Coordinated Universal Time (UTC). +* `deploymentStatus` - The deployment status of a report plan. The statuses are: `createInProgress` | `updateInProgress` | `deleteInProgress` | `completed`. +* `id` - The id of the backup report plan. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup Report Plan using the `id` which corresponds to the name of the Backup Report Plan. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Backup Report Plan using the `id` which corresponds to the name of the Backup Report Plan. For example: + +```console +% terraform import aws_backup_report_plan.test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/backup_selection.html.markdown b/website/docs/cdktf/typescript/r/backup_selection.html.markdown new file mode 100644 index 00000000000..b0219e5f2ce --- /dev/null +++ b/website/docs/cdktf/typescript/r/backup_selection.html.markdown @@ -0,0 +1,271 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_selection" +description: |- + Manages selection conditions for AWS Backup plan resources. +--- + + + +# Resource: aws_backup_selection + +Manages selection conditions for AWS Backup plan resources. + +## Example Usage + +### IAM Role + +-> For more information about creating and managing IAM Roles for backups and restores, see the [AWS Backup Developer Guide](https://docs.aws.amazon.com/aws-backup/latest/devguide/iam-service-roles.html). + +The below example creates an IAM role with the default managed IAM Policy for allowing AWS Backup to create backups. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupSelection } from "./.gen/providers/aws/backup-selection"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +interface MyConfig { + name: any; + planId: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["backup.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const example = new IamRole(this, "example", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "example", + }); + const awsIamRolePolicyAttachmentExample = new IamRolePolicyAttachment( + this, + "example_2", + { + policyArn: + "arn:aws:iam::aws:policy/service-role/AWSBackupServiceRolePolicyForBackup", + role: example.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentExample.overrideLogicalId("example"); + const awsBackupSelectionExample = new BackupSelection(this, "example_3", { + iamRoleArn: example.arn, + name: config.name, + planId: config.planId, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsBackupSelectionExample.overrideLogicalId("example"); + } +} + +``` + +### Selecting Backups By Tag + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupSelection } from "./.gen/providers/aws/backup-selection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BackupSelection(this, "example", { + iamRoleArn: Token.asString(awsIamRoleExample.arn), + name: "tf_example_backup_selection", + planId: Token.asString(awsBackupPlanExample.id), + selectionTag: [ + { + key: "foo", + type: "STRINGEQUALS", + value: "bar", + }, + ], + }); + } +} + +``` + +### Selecting Backups By Conditions + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupSelection } from "./.gen/providers/aws/backup-selection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BackupSelection(this, "example", { + condition: [ + { + stringEquals: [ + { + key: "aws:ResourceTag/Component", + value: "rds", + }, + ], + stringLike: [ + { + key: "aws:ResourceTag/Application", + value: "app*", + }, + ], + stringNotEquals: [ + { + key: "aws:ResourceTag/Backup", + value: "false", + }, + ], + stringNotLike: [ + { + key: "aws:ResourceTag/Environment", + value: "test*", + }, + ], + }, + ], + iamRoleArn: Token.asString(awsIamRoleExample.arn), + name: "tf_example_backup_selection", + planId: Token.asString(awsBackupPlanExample.id), + resources: ["*"], + }); + } +} + +``` + +### Selecting Backups By Resource + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupSelection } from "./.gen/providers/aws/backup-selection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BackupSelection(this, "example", { + iamRoleArn: Token.asString(awsIamRoleExample.arn), + name: "tf_example_backup_selection", + planId: Token.asString(awsBackupPlanExample.id), + resources: [ + Token.asString(awsDbInstanceExample.arn), + Token.asString(awsEbsVolumeExample.arn), + Token.asString(awsEfsFileSystemExample.arn), + ], + }); + } +} + +``` + +### Selecting Backups By Not Resource + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupSelection } from "./.gen/providers/aws/backup-selection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BackupSelection(this, "example", { + iamRoleArn: Token.asString(awsIamRoleExample.arn), + name: "tf_example_backup_selection", + notResources: [ + Token.asString(awsDbInstanceExample.arn), + Token.asString(awsEbsVolumeExample.arn), + Token.asString(awsEfsFileSystemExample.arn), + ], + planId: Token.asString(awsBackupPlanExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The display name of a resource selection document. +* `planId` - (Required) The backup plan ID to be associated with the selection of resources. +* `iamRoleArn` - (Required) The ARN of the IAM role that AWS Backup uses to authenticate when restoring and backing up the target resource. See the [AWS Backup Developer Guide](https://docs.aws.amazon.com/aws-backup/latest/devguide/access-control.html#managed-policies) for additional information about using AWS managed policies or creating custom policies attached to the IAM role. +* `selectionTag` - (Optional) Tag-based conditions used to specify a set of resources to assign to a backup plan. +* `condition` - (Optional) A list of conditions that you define to assign resources to your backup plans using tags. +* `resources` - (Optional) An array of strings that either contain Amazon Resource Names (ARNs) or match patterns of resources to assign to a backup plan. +* `notResources` - (Optional) An array of strings that either contain Amazon Resource Names (ARNs) or match patterns of resources to exclude from a backup plan. + +Tag conditions (`selectionTag`) support the following: + +* `type` - (Required) An operation, such as `stringEquals`, that is applied to a key-value pair used to filter resources in a selection. +* `key` - (Required) The key in a key-value pair. +* `value` - (Required) The value in a key-value pair. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Backup Selection identifier + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup selection using the role plan_id and id separated by `|`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Backup selection using the role plan_id and id separated by `|`. For example: + +```console +% terraform import aws_backup_selection.example plan-id|selection-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/backup_vault.html.markdown b/website/docs/cdktf/typescript/r/backup_vault.html.markdown new file mode 100644 index 00000000000..e05b0ed3a07 --- /dev/null +++ b/website/docs/cdktf/typescript/r/backup_vault.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_vault" +description: |- + Provides an AWS Backup vault resource. +--- + + + +# Resource: aws_backup_vault + +Provides an AWS Backup vault resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupVault } from "./.gen/providers/aws/backup-vault"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BackupVault(this, "example", { + kmsKeyArn: Token.asString(awsKmsKeyExample.arn), + name: "example_backup_vault", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `forceDestroy` - (Optional, Default: `false`) A boolean that indicates that all recovery points stored in the vault are deleted so that the vault can be destroyed without error. +* `kmsKeyArn` - (Optional) The server-side encryption key that is used to protect your backups. +* `name` - (Required) Name of the backup vault to create. +* `tags` - (Optional) Metadata that you can assign to help organize the resources that you create. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the vault. +* `arn` - The ARN of the vault. +* `recoveryPoints` - The number of recovery points that are stored in a backup vault. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup vault using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Backup vault using the `name`. For example: + +```console +% terraform import aws_backup_vault.test-vault TestVault +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/backup_vault_lock_configuration.html.markdown b/website/docs/cdktf/typescript/r/backup_vault_lock_configuration.html.markdown new file mode 100644 index 00000000000..f6491060028 --- /dev/null +++ b/website/docs/cdktf/typescript/r/backup_vault_lock_configuration.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_vault_lock_configuration" +description: |- + Provides an AWS Backup vault lock configuration resource. +--- + + + +# Resource: aws_backup_vault_lock_configuration + +Provides an AWS Backup vault lock configuration resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupVaultLockConfiguration } from "./.gen/providers/aws/backup-vault-lock-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BackupVaultLockConfiguration(this, "test", { + backupVaultName: "example_backup_vault", + changeableForDays: 3, + maxRetentionDays: 1200, + minRetentionDays: 7, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `backupVaultName` - (Required) Name of the backup vault to add a lock configuration for. +* `changeableForDays` - (Optional) The number of days before the lock date. If omitted creates a vault lock in `governance` mode, otherwise it will create a vault lock in `compliance` mode. +* `maxRetentionDays` - (Optional) The maximum retention period that the vault retains its recovery points. +* `minRetentionDays` - (Optional) The minimum retention period that the vault retains its recovery points. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `backupVaultName` - The name of the vault. +* `backupVaultArn` - The ARN of the vault. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup vault lock configuration using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Backup vault lock configuration using the `name`. For example: + +```console +% terraform import aws_backup_vault_lock_configuration.test TestVault +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/backup_vault_notifications.html.markdown b/website/docs/cdktf/typescript/r/backup_vault_notifications.html.markdown new file mode 100644 index 00000000000..1a998cc8ec9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/backup_vault_notifications.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_vault_notifications" +description: |- + Provides an AWS Backup vault notifications resource. +--- + + + +# Resource: aws_backup_vault_notifications + +Provides an AWS Backup vault notifications resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupVaultNotifications } from "./.gen/providers/aws/backup-vault-notifications"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +import { SnsTopicPolicy } from "./.gen/providers/aws/sns-topic-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new SnsTopic(this, "test", { + name: "backup-vault-events", + }); + const dataAwsIamPolicyDocumentTest = new DataAwsIamPolicyDocument( + this, + "test_1", + { + policyId: "__default_policy_ID", + statement: [ + { + actions: ["SNS:Publish"], + effect: "Allow", + principals: [ + { + identifiers: ["backup.amazonaws.com"], + type: "Service", + }, + ], + resources: [test.arn], + sid: "__default_statement_ID", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentTest.overrideLogicalId("test"); + const awsBackupVaultNotificationsTest = new BackupVaultNotifications( + this, + "test_2", + { + backupVaultEvents: ["BACKUP_JOB_STARTED", "RESTORE_JOB_COMPLETED"], + backupVaultName: "example_backup_vault", + snsTopicArn: test.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsBackupVaultNotificationsTest.overrideLogicalId("test"); + const awsSnsTopicPolicyTest = new SnsTopicPolicy(this, "test_3", { + arn: test.arn, + policy: Token.asString(dataAwsIamPolicyDocumentTest.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicPolicyTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `backupVaultName` - (Required) Name of the backup vault to add notifications for. +* `snsTopicArn` - (Required) The Amazon Resource Name (ARN) that specifies the topic for a backup vault’s events +* `backupVaultEvents` - (Required) An array of events that indicate the status of jobs to back up resources to the backup vault. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the vault. +* `backupVaultArn` - The ARN of the vault. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup vault notifications using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Backup vault notifications using the `name`. For example: + +```console +% terraform import aws_backup_vault_notifications.test TestVault +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/backup_vault_policy.html.markdown b/website/docs/cdktf/typescript/r/backup_vault_policy.html.markdown new file mode 100644 index 00000000000..fb21d2be96f --- /dev/null +++ b/website/docs/cdktf/typescript/r/backup_vault_policy.html.markdown @@ -0,0 +1,115 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_vault_policy" +description: |- + Provides an AWS Backup vault policy resource. +--- + + + +# Resource: aws_backup_vault_policy + +Provides an AWS Backup vault policy resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BackupVault } from "./.gen/providers/aws/backup-vault"; +import { BackupVaultPolicy } from "./.gen/providers/aws/backup-vault-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new BackupVault(this, "example", { + name: "example", + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_1", + { + statement: [ + { + actions: [ + "backup:DescribeBackupVault", + "backup:DeleteBackupVault", + "backup:PutBackupVaultAccessPolicy", + "backup:DeleteBackupVaultAccessPolicy", + "backup:GetBackupVaultAccessPolicy", + "backup:StartBackupJob", + "backup:GetBackupVaultNotifications", + "backup:PutBackupVaultNotifications", + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [example.arn], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsBackupVaultPolicyExample = new BackupVaultPolicy( + this, + "example_2", + { + backupVaultName: example.name, + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsBackupVaultPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `backupVaultName` - (Required) Name of the backup vault to add policy for. +* `policy` - (Required) The backup vault access policy document in JSON format. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the vault. +* `backupVaultArn` - The ARN of the vault. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Backup vault policy using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Backup vault policy using the `name`. For example: + +```console +% terraform import aws_backup_vault_policy.test TestVault +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/batch_compute_environment.html.markdown b/website/docs/cdktf/typescript/r/batch_compute_environment.html.markdown new file mode 100644 index 00000000000..33215eaf471 --- /dev/null +++ b/website/docs/cdktf/typescript/r/batch_compute_environment.html.markdown @@ -0,0 +1,295 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_compute_environment" +description: |- + Creates a AWS Batch compute environment. +--- + + + +# Resource: aws_batch_compute_environment + +Creates a AWS Batch compute environment. Compute environments contain the Amazon ECS container instances that are used to run containerized batch jobs. + +For information about AWS Batch, see [What is AWS Batch?][1] . +For information about compute environment, see [Compute Environments][2] . + +~> **Note:** To prevent a race condition during environment deletion, make sure to set `dependsOn` to the related `awsIamRolePolicyAttachment`; +otherwise, the policy may be destroyed too soon and the compute environment will then get stuck in the `deleting` state, see [Troubleshooting AWS Batch][3] . + +## Example Usage + +### EC2 Type + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BatchComputeEnvironment } from "./.gen/providers/aws/batch-compute-environment"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamInstanceProfile } from "./.gen/providers/aws/iam-instance-profile"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +import { PlacementGroup } from "./.gen/providers/aws/placement-group"; +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const sample = new PlacementGroup(this, "sample", { + name: "sample", + strategy: "cluster", + }); + const awsSecurityGroupSample = new SecurityGroup(this, "sample_1", { + egress: [ + { + cidrBlocks: ["0.0.0.0/0"], + fromPort: 0, + protocol: "-1", + toPort: 0, + }, + ], + name: "aws_batch_compute_environment_security_group", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityGroupSample.overrideLogicalId("sample"); + const awsVpcSample = new Vpc(this, "sample_2", { + cidrBlock: "10.1.0.0/16", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpcSample.overrideLogicalId("sample"); + const batchAssumeRole = new DataAwsIamPolicyDocument( + this, + "batch_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["batch.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const ec2AssumeRole = new DataAwsIamPolicyDocument( + this, + "ec2_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["ec2.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const awsBatchServiceRole = new IamRole(this, "aws_batch_service_role", { + assumeRolePolicy: Token.asString(batchAssumeRole.json), + name: "aws_batch_service_role", + }); + const ecsInstanceRole = new IamRole(this, "ecs_instance_role", { + assumeRolePolicy: Token.asString(ec2AssumeRole.json), + name: "ecs_instance_role", + }); + const awsIamRolePolicyAttachmentAwsBatchServiceRole = + new IamRolePolicyAttachment(this, "aws_batch_service_role_7", { + policyArn: "arn:aws:iam::aws:policy/service-role/AWSBatchServiceRole", + role: awsBatchServiceRole.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentAwsBatchServiceRole.overrideLogicalId( + "aws_batch_service_role" + ); + const awsIamRolePolicyAttachmentEcsInstanceRole = + new IamRolePolicyAttachment(this, "ecs_instance_role_8", { + policyArn: + "arn:aws:iam::aws:policy/service-role/AmazonEC2ContainerServiceforEC2Role", + role: ecsInstanceRole.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentEcsInstanceRole.overrideLogicalId( + "ecs_instance_role" + ); + const awsSubnetSample = new Subnet(this, "sample_9", { + cidrBlock: "10.1.1.0/24", + vpcId: Token.asString(awsVpcSample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSubnetSample.overrideLogicalId("sample"); + const awsIamInstanceProfileEcsInstanceRole = new IamInstanceProfile( + this, + "ecs_instance_role_10", + { + name: "ecs_instance_role", + role: ecsInstanceRole.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamInstanceProfileEcsInstanceRole.overrideLogicalId("ecs_instance_role"); + const awsBatchComputeEnvironmentSample = new BatchComputeEnvironment( + this, + "sample_11", + { + computeEnvironmentName: "sample", + computeResources: { + instanceRole: Token.asString( + awsIamInstanceProfileEcsInstanceRole.arn + ), + instanceType: ["c4.large"], + maxVcpus: 16, + minVcpus: 0, + placementGroup: sample.name, + securityGroupIds: [Token.asString(awsSecurityGroupSample.id)], + subnets: [Token.asString(awsSubnetSample.id)], + type: "EC2", + }, + dependsOn: [awsIamRolePolicyAttachmentAwsBatchServiceRole], + serviceRole: awsBatchServiceRole.arn, + type: "MANAGED", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsBatchComputeEnvironmentSample.overrideLogicalId("sample"); + } +} + +``` + +### Fargate Type + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BatchComputeEnvironment } from "./.gen/providers/aws/batch-compute-environment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BatchComputeEnvironment(this, "sample", { + computeEnvironmentName: "sample", + computeResources: { + maxVcpus: 16, + securityGroupIds: [Token.asString(awsSecurityGroupSample.id)], + subnets: [Token.asString(awsSubnetSample.id)], + type: "FARGATE", + }, + dependsOn: [awsBatchServiceRole], + serviceRole: Token.asString(awsIamRoleAwsBatchServiceRole.arn), + type: "MANAGED", + }); + } +} + +``` + +## Argument Reference + +* `computeEnvironmentName` - (Optional, Forces new resource) The name for your compute environment. Up to 128 letters (uppercase and lowercase), numbers, and underscores are allowed. If omitted, Terraform will assign a random, unique name. +* `computeEnvironmentNamePrefix` - (Optional, Forces new resource) Creates a unique compute environment name beginning with the specified prefix. Conflicts with `computeEnvironmentName`. +* `computeResources` - (Optional) Details of the compute resources managed by the compute environment. This parameter is required for managed compute environments. See details below. +* `eksConfiguration` - (Optional) Details for the Amazon EKS cluster that supports the compute environment. See details below. +* `serviceRole` - (Required) The full Amazon Resource Name (ARN) of the IAM role that allows AWS Batch to make calls to other AWS services on your behalf. +* `state` - (Optional) The state of the compute environment. If the state is `enabled`, then the compute environment accepts jobs from a queue and can scale out automatically based on queues. Valid items are `enabled` or `disabled`. Defaults to `enabled`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Required) The type of the compute environment. Valid items are `managed` or `unmanaged`. + +### compute_resources + +* `allocationStrategy` - (Optional) The allocation strategy to use for the compute resource in case not enough instances of the best fitting instance type can be allocated. Valid items are `bestFitProgressive`, `spotCapacityOptimized` or `bestFit`. Defaults to `bestFit`. See [AWS docs](https://docs.aws.amazon.com/batch/latest/userguide/allocation-strategies.html) for details. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `bidPercentage` - (Optional) Integer of maximum percentage that a Spot Instance price can be when compared with the On-Demand price for that instance type before instances are launched. For example, if your bid percentage is 20% (`20`), then the Spot price must be below 20% of the current On-Demand price for that EC2 instance. If you leave this field empty, the default value is 100% of the On-Demand price. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `desiredVcpus` - (Optional) The desired number of EC2 vCPUS in the compute environment. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `ec2Configuration` - (Optional) Provides information used to select Amazon Machine Images (AMIs) for EC2 instances in the compute environment. If Ec2Configuration isn't specified, the default is ECS_AL2. This parameter isn't applicable to jobs that are running on Fargate resources, and shouldn't be specified. +* `ec2KeyPair` - (Optional) The EC2 key pair that is used for instances launched in the compute environment. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `imageId` - (Optional) The Amazon Machine Image (AMI) ID used for instances launched in the compute environment. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. (Deprecated, use [`ec2Configuration`](#ec2_configuration) `imageIdOverride` instead) +* `instanceRole` - (Optional) The Amazon ECS instance role applied to Amazon EC2 instances in a compute environment. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `instanceType` - (Optional) A list of instance types that may be launched. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `launchTemplate` - (Optional) The launch template to use for your compute resources. See details below. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `maxVcpus` - (Required) The maximum number of EC2 vCPUs that an environment can reach. +* `minVcpus` - (Optional) The minimum number of EC2 vCPUs that an environment should maintain. For `ec2` or `spot` compute environments, if the parameter is not explicitly defined, a `0` default value will be set. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `placementGroup` - (Optional) The Amazon EC2 placement group to associate with your compute resources. +* `securityGroupIds` - (Optional) A list of EC2 security group that are associated with instances launched in the compute environment. This parameter is required for Fargate compute environments. +* `spotIamFleetRole` - (Optional) The Amazon Resource Name (ARN) of the Amazon EC2 Spot Fleet IAM role applied to a SPOT compute environment. This parameter is required for SPOT compute environments. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `subnets` - (Required) A list of VPC subnets into which the compute resources are launched. +* `tags` - (Optional) Key-value pair tags to be applied to resources that are launched in the compute environment. This parameter isn't applicable to jobs running on Fargate resources, and shouldn't be specified. +* `type` - (Required) The type of compute environment. Valid items are `ec2`, `spot`, `fargate` or `fargateSpot`. + +### ec2_configuration + +`ec2Configuration` supports the following: + +* `imageIdOverride` - (Optional) The AMI ID used for instances launched in the compute environment that match the image type. This setting overrides the `imageId` argument in the [`computeResources`](#compute_resources) block. +* `imageType` - (Optional) The image type to match with the instance type to select an AMI. If the `imageIdOverride` parameter isn't specified, then a recent [Amazon ECS-optimized Amazon Linux 2 AMI](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs-optimized_AMI.html#al2ami) (`ecsAl2`) is used. + +### launch_template + +`launchTemplate` supports the following: + +* `launchTemplateId` - (Optional) ID of the launch template. You must specify either the launch template ID or launch template name in the request, but not both. +* `launchTemplateName` - (Optional) Name of the launch template. +* `version` - (Optional) The version number of the launch template. Default: The default version of the launch template. + +### eks_configuration + +`eksConfiguration` supports the following: + +* `eksClusterArn` - (Required) The Amazon Resource Name (ARN) of the Amazon EKS cluster. +* `kubernetesNamespace` - (Required) The namespace of the Amazon EKS cluster. AWS Batch manages pods in this namespace. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the compute environment. +* `ecsClusterArn` - The Amazon Resource Name (ARN) of the underlying Amazon ECS cluster used by the compute environment. +* `status` - The current status of the compute environment (for example, CREATING or VALID). +* `statusReason` - A short, human-readable string to provide additional details about the current status of the compute environment. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Batch compute using the `computeEnvironmentName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS Batch compute using the `computeEnvironmentName`. For example: + +```console +% terraform import aws_batch_compute_environment.sample sample +``` + +[1]: http://docs.aws.amazon.com/batch/latest/userguide/what-is-batch.html +[2]: http://docs.aws.amazon.com/batch/latest/userguide/compute_environments.html +[3]: http://docs.aws.amazon.com/batch/latest/userguide/troubleshooting.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/batch_job_definition.html.markdown b/website/docs/cdktf/typescript/r/batch_job_definition.html.markdown new file mode 100644 index 00000000000..646ed6f4742 --- /dev/null +++ b/website/docs/cdktf/typescript/r/batch_job_definition.html.markdown @@ -0,0 +1,221 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_job_definition" +description: |- + Provides a Batch Job Definition resource. +--- + + + +# Resource: aws_batch_job_definition + +Provides a Batch Job Definition resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BatchJobDefinition } from "./.gen/providers/aws/batch-job-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BatchJobDefinition(this, "test", { + containerProperties: Token.asString( + Fn.jsonencode({ + command: ["ls", "-la"], + environment: [ + { + name: "VARNAME", + value: "VARVAL", + }, + ], + image: "busybox", + mountPoints: [ + { + containerPath: "/tmp", + readOnly: false, + sourceVolume: "tmp", + }, + ], + resourceRequirements: [ + { + type: "VCPU", + value: "0.25", + }, + { + type: "MEMORY", + value: "512", + }, + ], + ulimits: [ + { + hardLimit: 1024, + name: "nofile", + softLimit: 1024, + }, + ], + volumes: [ + { + host: { + sourcePath: "/tmp", + }, + name: "tmp", + }, + ], + }) + ), + name: "tf_test_batch_job_definition", + type: "container", + }); + } +} + +``` + +### Fargate Platform Capability + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BatchJobDefinition } from "./.gen/providers/aws/batch-job-definition"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRolePolicy = new DataAwsIamPolicyDocument( + this, + "assume_role_policy", + { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["ecs-tasks.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const ecsTaskExecutionRole = new IamRole(this, "ecs_task_execution_role", { + assumeRolePolicy: Token.asString(assumeRolePolicy.json), + name: "tf_test_batch_exec_role", + }); + new IamRolePolicyAttachment(this, "ecs_task_execution_role_policy", { + policyArn: + "arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy", + role: ecsTaskExecutionRole.name, + }); + new BatchJobDefinition(this, "test", { + containerProperties: Token.asString( + Fn.jsonencode({ + command: ["echo", "test"], + executionRoleArn: ecsTaskExecutionRole.arn, + fargatePlatformConfiguration: { + platformVersion: "LATEST", + }, + image: "busybox", + jobRoleArn: "arn:aws:iam::123456789012:role/AWSBatchS3ReadOnly", + resourceRequirements: [ + { + type: "VCPU", + value: "0.25", + }, + { + type: "MEMORY", + value: "512", + }, + ], + }) + ), + name: "tf_test_batch_job_definition", + platformCapabilities: ["FARGATE"], + type: "container", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Specifies the name of the job definition. +* `type` - (Required) The type of job definition. Must be `container`. + +The following arguments are optional: + +* `containerProperties` - (Optional) A valid [container properties](http://docs.aws.amazon.com/batch/latest/APIReference/API_RegisterJobDefinition.html) + provided as a single valid JSON document. This parameter is required if the `type` parameter is `container`. +* `parameters` - (Optional) Specifies the parameter substitution placeholders to set in the job definition. +* `platformCapabilities` - (Optional) The platform capabilities required by the job definition. If no value is specified, it defaults to `ec2`. To run the job on Fargate resources, specify `fargate`. +* `propagateTags` - (Optional) Specifies whether to propagate the tags from the job definition to the corresponding Amazon ECS task. Default is `false`. +* `retryStrategy` - (Optional) Specifies the retry strategy to use for failed jobs that are submitted with this job definition. + Maximum number of `retryStrategy` is `1`. Defined below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `timeout` - (Optional) Specifies the timeout for jobs so that if a job runs longer, AWS Batch terminates the job. Maximum number of `timeout` is `1`. Defined below. + +### retry_strategy + +* `attempts` - (Optional) The number of times to move a job to the `runnable` status. You may specify between `1` and `10` attempts. +* `evaluateOnExit` - (Optional) The [evaluate on exit](#evaluate_on_exit) conditions under which the job should be retried or failed. If this parameter is specified, then the `attempts` parameter must also be specified. You may specify up to 5 configuration blocks. + +#### evaluate_on_exit + +* `action` - (Required) Specifies the action to take if all of the specified conditions are met. The values are not case sensitive. Valid values: `retry`, `exit`. +* `onExitCode` - (Optional) A glob pattern to match against the decimal representation of the exit code returned for a job. +* `onReason` - (Optional) A glob pattern to match against the reason returned for a job. +* `onStatusReason` - (Optional) A glob pattern to match against the status reason returned for a job. + +### timeout + +* `attemptDurationSeconds` - (Optional) The time duration in seconds after which AWS Batch terminates your jobs if they have not finished. The minimum value for the timeout is `60` seconds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of the job definition. +* `revision` - The revision of the job definition. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Batch Job Definition using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Batch Job Definition using the `arn`. For example: + +```console +% terraform import aws_batch_job_definition.test arn:aws:batch:us-east-1:123456789012:job-definition/sample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/batch_job_queue.html.markdown b/website/docs/cdktf/typescript/r/batch_job_queue.html.markdown new file mode 100644 index 00000000000..afd6d8b318b --- /dev/null +++ b/website/docs/cdktf/typescript/r/batch_job_queue.html.markdown @@ -0,0 +1,127 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_job_queue" +description: |- + Provides a Batch Job Queue resource. +--- + + + +# Resource: aws_batch_job_queue + +Provides a Batch Job Queue resource. + +## Example Usage + +### Basic Job Queue + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BatchJobQueue } from "./.gen/providers/aws/batch-job-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BatchJobQueue(this, "test_queue", { + computeEnvironments: [testEnvironment1.arn, testEnvironment2.arn], + name: "tf-test-batch-job-queue", + priority: 1, + state: "ENABLED", + }); + } +} + +``` + +### Job Queue with a fair share scheduling policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BatchJobQueue } from "./.gen/providers/aws/batch-job-queue"; +import { BatchSchedulingPolicy } from "./.gen/providers/aws/batch-scheduling-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new BatchSchedulingPolicy(this, "example", { + fairSharePolicy: { + computeReservation: 1, + shareDecaySeconds: 3600, + shareDistribution: [ + { + shareIdentifier: "A1*", + weightFactor: 0.1, + }, + ], + }, + name: "example", + }); + const awsBatchJobQueueExample = new BatchJobQueue(this, "example_1", { + computeEnvironments: [testEnvironment1.arn, testEnvironment2.arn], + name: "tf-test-batch-job-queue", + priority: 1, + schedulingPolicyArn: example.arn, + state: "ENABLED", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsBatchJobQueueExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Specifies the name of the job queue. +* `computeEnvironments` - (Required) Specifies the set of compute environments + mapped to a job queue and their order. The position of the compute environments + in the list will dictate the order. +* `priority` - (Required) The priority of the job queue. Job queues with a higher priority + are evaluated first when associated with the same compute environment. +* `schedulingPolicyArn` - (Optional) The ARN of the fair share scheduling policy. If this parameter is specified, the job queue uses a fair share scheduling policy. If this parameter isn't specified, the job queue uses a first in, first out (FIFO) scheduling policy. After a job queue is created, you can replace but can't remove the fair share scheduling policy. +* `state` - (Required) The state of the job queue. Must be one of: `enabled` or `disabled` +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of the job queue. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Batch Job Queue using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Batch Job Queue using the `arn`. For example: + +```console +% terraform import aws_batch_job_queue.test_queue arn:aws:batch:us-east-1:123456789012:job-queue/sample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/batch_scheduling_policy.html.markdown b/website/docs/cdktf/typescript/r/batch_scheduling_policy.html.markdown new file mode 100644 index 00000000000..9f9e778f967 --- /dev/null +++ b/website/docs/cdktf/typescript/r/batch_scheduling_policy.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "Batch" +layout: "aws" +page_title: "AWS: aws_batch_scheduling_policy" +description: |- + Provides a Batch Scheduling Policy resource. +--- + + + +# Resource: aws_batch_scheduling_policy + +Provides a Batch Scheduling Policy resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BatchSchedulingPolicy } from "./.gen/providers/aws/batch-scheduling-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BatchSchedulingPolicy(this, "example", { + fairSharePolicy: { + computeReservation: 1, + shareDecaySeconds: 3600, + shareDistribution: [ + { + shareIdentifier: "A1*", + weightFactor: 0.1, + }, + { + shareIdentifier: "A2", + weightFactor: 0.2, + }, + ], + }, + name: "example", + tags: { + Name: "Example Batch Scheduling Policy", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fairsharePolicy` - (Optional) A fairshare policy block specifies the `computeReservation`, `shareDelaySeconds`, and `shareDistribution` of the scheduling policy. The `fairsharePolicy` block is documented below. +* `name` - (Required) Specifies the name of the scheduling policy. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `fairsharePolicy` block supports the following arguments: + +* `computeReservation` - (Optional) A value used to reserve some of the available maximum vCPU for fair share identifiers that have not yet been used. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). +* `shareDelaySeconds` - (Optional) The time period to use to calculate a fair share percentage for each fair share identifier in use, in seconds. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). +* `shareDistribution` - (Optional) One or more share distribution blocks which define the weights for the fair share identifiers for the fair share policy. For more information, see [FairsharePolicy](https://docs.aws.amazon.com/batch/latest/APIReference/API_FairsharePolicy.html). The `shareDistribution` block is documented below. + +A `shareDistribution` block supports the following arguments: + +* `shareIdentifier` - (Required) A fair share identifier or fair share identifier prefix. For more information, see [ShareAttributes](https://docs.aws.amazon.com/batch/latest/APIReference/API_ShareAttributes.html). +* `weightFactor` - (Optional) The weight factor for the fair share identifier. For more information, see [ShareAttributes](https://docs.aws.amazon.com/batch/latest/APIReference/API_ShareAttributes.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of the scheduling policy. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Batch Scheduling Policy using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Batch Scheduling Policy using the `arn`. For example: + +```console +% terraform import aws_batch_scheduling_policy.test_policy arn:aws:batch:us-east-1:123456789012:scheduling-policy/sample +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/budgets_budget.html.markdown b/website/docs/cdktf/typescript/r/budgets_budget.html.markdown new file mode 100644 index 00000000000..48e8df71d2a --- /dev/null +++ b/website/docs/cdktf/typescript/r/budgets_budget.html.markdown @@ -0,0 +1,427 @@ +--- +subcategory: "Web Services Budgets" +layout: "aws" +page_title: "AWS: aws_budgets_budget" +description: |- + Provides a budgets budget resource. +--- + + + +# Resource: aws_budgets_budget + +Provides a budgets budget resource. Budgets use the cost visualisation provided by Cost Explorer to show you the status of your budgets, to provide forecasts of your estimated costs, and to track your AWS usage, including your free tier usage. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BudgetsBudget } from "./.gen/providers/aws/budgets-budget"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new BudgetsBudget(this, "ec2", { + budgetType: "COST", + costFilter: [ + { + name: "Service", + values: ["Amazon Elastic Compute Cloud - Compute"], + }, + ], + limitAmount: "1200", + limitUnit: "USD", + name: "budget-ec2-monthly", + notification: [ + { + comparisonOperator: "GREATER_THAN", + notificationType: "FORECASTED", + subscriberEmailAddresses: ["test@example.com"], + threshold: 100, + thresholdType: "PERCENTAGE", + }, + ], + timePeriodEnd: "2087-06-15_00:00", + timePeriodStart: "2017-07-01_00:00", + timeUnit: "MONTHLY", + }); + } +} + +``` + +Create a budget for *$100*. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BudgetsBudget } from "./.gen/providers/aws/budgets-budget"; +interface MyConfig { + timeUnit: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new BudgetsBudget(this, "cost", { + budgetType: "COST", + limitAmount: "100", + limitUnit: "USD", + timeUnit: config.timeUnit, + }); + } +} + +``` + +Create a budget with planned budget limits. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BudgetsBudget } from "./.gen/providers/aws/budgets-budget"; +interface MyConfig { + budgetType: any; + timeUnit: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new BudgetsBudget(this, "cost", { + plannedLimit: [ + { + amount: "100", + startTime: "2017-07-01_00:00", + unit: "USD", + }, + { + amount: "200", + startTime: "2017-08-01_00:00", + unit: "USD", + }, + ], + budgetType: config.budgetType, + timeUnit: config.timeUnit, + }); + } +} + +``` + +Create a budget for s3 with a limit of *3 GB* of storage. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BudgetsBudget } from "./.gen/providers/aws/budgets-budget"; +interface MyConfig { + timeUnit: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new BudgetsBudget(this, "s3", { + budgetType: "USAGE", + limitAmount: "3", + limitUnit: "GB", + timeUnit: config.timeUnit, + }); + } +} + +``` + +Create a Savings Plan Utilization Budget + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BudgetsBudget } from "./.gen/providers/aws/budgets-budget"; +interface MyConfig { + timeUnit: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new BudgetsBudget(this, "savings_plan_utilization", { + budgetType: "SAVINGS_PLANS_UTILIZATION", + costTypes: { + includeCredit: false, + includeDiscount: false, + includeOtherSubscription: false, + includeRecurring: false, + includeRefund: false, + includeSubscription: true, + includeSupport: false, + includeTax: false, + includeUpfront: false, + useBlended: false, + }, + limitAmount: "100.0", + limitUnit: "PERCENTAGE", + timeUnit: config.timeUnit, + }); + } +} + +``` + +Create a RI Utilization Budget + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BudgetsBudget } from "./.gen/providers/aws/budgets-budget"; +interface MyConfig { + timeUnit: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new BudgetsBudget(this, "ri_utilization", { + budgetType: "RI_UTILIZATION", + costFilter: [ + { + name: "Service", + values: ["Amazon Relational Database Service"], + }, + ], + costTypes: { + includeCredit: false, + includeDiscount: false, + includeOtherSubscription: false, + includeRecurring: false, + includeRefund: false, + includeSubscription: true, + includeSupport: false, + includeTax: false, + includeUpfront: false, + useBlended: false, + }, + limitAmount: "100.0", + limitUnit: "PERCENTAGE", + timeUnit: config.timeUnit, + }); + } +} + +``` + +Create a Cost Filter using Resource Tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BudgetsBudget } from "./.gen/providers/aws/budgets-budget"; +interface MyConfig { + budgetType: any; + timeUnit: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new BudgetsBudget(this, "cost", { + costFilter: [ + { + name: "TagKeyValue", + values: ["TagKey$TagValue"], + }, + ], + budgetType: config.budgetType, + timeUnit: config.timeUnit, + }); + } +} + +``` + +Create a cost_filter using resource tags, obtaining the tag value from a terraform variable + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BudgetsBudget } from "./.gen/providers/aws/budgets-budget"; +interface MyConfig { + budgetType: any; + timeUnit: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new BudgetsBudget(this, "cost", { + costFilter: [ + { + name: "TagKeyValue", + values: ["TagKey$${var.TagValue}"], + }, + ], + budgetType: config.budgetType, + timeUnit: config.timeUnit, + }); + } +} + +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to the [AWS official +documentation](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/data-type-budget.html). + +This argument supports the following arguments: + +* `accountId` - (Optional) The ID of the target account for budget. Will use current user's account_id by default if omitted. +* `autoAdjustData` - (Optional) Object containing [AutoAdjustData] which determines the budget amount for an auto-adjusting budget. +* `name` - (Optional) The name of a budget. Unique within accounts. +* `namePrefix` - (Optional) The prefix of the name of a budget. Unique within accounts. +* `budgetType` - (Required) Whether this budget tracks monetary cost or usage. +* `costFilter` - (Optional) A list of [CostFilter](#cost-filter) name/values pair to apply to budget. +* `costTypes` - (Optional) Object containing [CostTypes](#cost-types) The types of cost included in a budget, such as tax and subscriptions. +* `limitAmount` - (Required) The amount of cost or usage being measured for a budget. +* `limitUnit` - (Required) The unit of measurement used for the budget forecast, actual spend, or budget threshold, such as dollars or GB. See [Spend](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/data-type-spend.html) documentation. +* `timePeriodEnd` - (Optional) The end of the time period covered by the budget. There are no restrictions on the end date. Format: `2017010112:00`. +* `timePeriodStart` - (Optional) The start of the time period covered by the budget. If you don't specify a start date, AWS defaults to the start of your chosen time period. The start date must come before the end date. Format: `2017010112:00`. +* `timeUnit` - (Required) The length of time until a budget resets the actual and forecasted spend. Valid values: `monthly`, `quarterly`, `annually`, and `daily`. +* `notification` - (Optional) Object containing [Budget Notifications](#budget-notification). Can be used multiple times to define more than one budget notification. +* `plannedLimit` - (Optional) Object containing [Planned Budget Limits](#planned-budget-limits). Can be used multiple times to plan more than one budget limit. See [PlannedBudgetLimits](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_Budget.html#awscostmanagement-Type-budgets_Budget-PlannedBudgetLimits) documentation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - id of resource. +* `arn` - The ARN of the budget. + +### Auto Adjust Data + +The parameters that determine the budget amount for an auto-adjusting budget. + +`autoAdjustType` (Required) - The string that defines whether your budget auto-adjusts based on historical or forecasted data. Valid values: `forecast`,`historical` +`historicalOptions` (Optional) - Configuration block of [Historical Options](#historical-options). Required for `autoAdjustType` of `historical` Configuration block that defines the historical data that your auto-adjusting budget is based on. +`lastAutoAdjustTime` (Optional) - The last time that your budget was auto-adjusted. + +### Historical Options + +`budgetAdjustmentPeriod` (Required) - The number of budget periods included in the moving-average calculation that determines your auto-adjusted budget amount. +`lookbackAvailablePeriods` (Optional) - The integer that describes how many budget periods in your BudgetAdjustmentPeriod are included in the calculation of your current budget limit. If the first budget period in your BudgetAdjustmentPeriod has no cost data, then that budget period isn’t included in the average that determines your budget limit. You can’t set your own LookBackAvailablePeriods. The value is automatically calculated from the `budgetAdjustmentPeriod` and your historical cost data. + +### Cost Types + +Valid keys for `costTypes` parameter. + +* `includeCredit` - A boolean value whether to include credits in the cost budget. Defaults to `true` +* `includeDiscount` - Whether a budget includes discounts. Defaults to `true` +* `includeOtherSubscription` - A boolean value whether to include other subscription costs in the cost budget. Defaults to `true` +* `includeRecurring` - A boolean value whether to include recurring costs in the cost budget. Defaults to `true` +* `includeRefund` - A boolean value whether to include refunds in the cost budget. Defaults to `true` +* `includeSubscription` - A boolean value whether to include subscriptions in the cost budget. Defaults to `true` +* `includeSupport` - A boolean value whether to include support costs in the cost budget. Defaults to `true` +* `includeTax` - A boolean value whether to include tax in the cost budget. Defaults to `true` +* `includeUpfront` - A boolean value whether to include upfront costs in the cost budget. Defaults to `true` +* `useAmortized` - Whether a budget uses the amortized rate. Defaults to `false` +* `useBlended` - A boolean value whether to use blended costs in the cost budget. Defaults to `false` + +Refer to [AWS CostTypes documentation](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_CostTypes.html) for further detail. + +### Cost Filter + +Based on your choice of budget type, you can choose one or more of the available budget filters. + +* `purchaseType` +* `usageTypeGroup` +* `service` +* `operation` +* `usageType` +* `billingEntity` +* `costCategory` +* `linkedAccount` +* `tagKeyValue` +* `legalEntityName` +* `invoicingEntity` +* `az` +* `region` +* `instanceType` + +Refer to [AWS CostFilter documentation](https://docs.aws.amazon.com/cost-management/latest/userguide/budgets-create-filters.html) for further detail. + +### Budget Notification + +Valid keys for `notification` parameter. + +* `comparisonOperator` - (Required) Comparison operator to use to evaluate the condition. Can be `lessThan`, `equalTo` or `greaterThan`. +* `threshold` - (Required) Threshold when the notification should be sent. +* `thresholdType` - (Required) What kind of threshold is defined. Can be `percentage` OR `absoluteValue`. +* `notificationType` - (Required) What kind of budget value to notify on. Can be `actual` or `forecasted` +* `subscriberEmailAddresses` - (Optional) E-Mail addresses to notify. Either this or `subscriberSnsTopicArns` is required. +* `subscriberSnsTopicArns` - (Optional) SNS topics to notify. Either this or `subscriberEmailAddresses` is required. + +### Planned Budget Limits + +Valid keys for `plannedLimit` parameter. + +* `startTime` - (Required) The start time of the budget limit. Format: `2017010112:00`. See [PlannedBudgetLimits](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_budgets_Budget.html#awscostmanagement-Type-budgets_Budget-PlannedBudgetLimits) documentation. +* `amount` - (Required) The amount of cost or usage being measured for a budget. +* `unit` - (Required) The unit of measurement used for the budget forecast, actual spend, or budget threshold, such as dollars or GB. See [Spend](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/data-type-spend.html) documentation. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import budgets using `accountId:actionId:budgetName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import budgets using `accountId:actionId:budgetName`. For example: + +```console +% terraform import aws_budgets_budget.myBudget 123456789012:myBudget +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/budgets_budget_action.html.markdown b/website/docs/cdktf/typescript/r/budgets_budget_action.html.markdown new file mode 100644 index 00000000000..f9750409352 --- /dev/null +++ b/website/docs/cdktf/typescript/r/budgets_budget_action.html.markdown @@ -0,0 +1,205 @@ +--- +subcategory: "Web Services Budgets" +layout: "aws" +page_title: "AWS: aws_budgets_budget_action" +description: |- + Provides a budget action resource. +--- + + + +# Resource: aws_budgets_budget_action + +Provides a budget action resource. Budget actions are cost savings controls that run either automatically on your behalf or by using a workflow approval process. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BudgetsBudget } from "./.gen/providers/aws/budgets-budget"; +import { BudgetsBudgetAction } from "./.gen/providers/aws/budgets-budget-action"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new BudgetsBudget(this, "example", { + budgetType: "USAGE", + limitAmount: "10.0", + limitUnit: "dollars", + name: "example", + timePeriodStart: "2006-01-02_15:04", + timeUnit: "MONTHLY", + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_1", + { + statement: [ + { + actions: ["ec2:Describe*"], + effect: "Allow", + resources: ["*"], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const current = new DataAwsPartition(this, "current", {}); + const awsIamPolicyExample = new IamPolicy(this, "example_3", { + description: "My example policy", + name: "example", + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamPolicyExample.overrideLogicalId("example"); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["budgets.${" + current.dnsSuffix + "}"], + type: "Service", + }, + ], + }, + ], + }); + const awsIamRoleExample = new IamRole(this, "example_5", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsBudgetsBudgetActionExample = new BudgetsBudgetAction( + this, + "example_6", + { + actionThreshold: { + actionThresholdType: "ABSOLUTE_VALUE", + actionThresholdValue: 100, + }, + actionType: "APPLY_IAM_POLICY", + approvalModel: "AUTOMATIC", + budgetName: example.name, + definition: { + iamActionDefinition: { + policyArn: Token.asString(awsIamPolicyExample.arn), + roles: [Token.asString(awsIamRoleExample.name)], + }, + }, + executionRoleArn: Token.asString(awsIamRoleExample.arn), + notificationType: "ACTUAL", + subscriber: [ + { + address: "example@example.example", + subscriptionType: "EMAIL", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsBudgetsBudgetActionExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Optional) The ID of the target account for budget. Will use current user's account_id by default if omitted. +* `budgetName` - (Required) The name of a budget. +* `actionThreshold` - (Required) The trigger threshold of the action. See [Action Threshold](#action-threshold). +* `actionType` - (Required) The type of action. This defines the type of tasks that can be carried out by this action. This field also determines the format for definition. Valid values are `applyIamPolicy`, `applyScpPolicy`, and `runSsmDocuments`. +* `approvalModel` - (Required) This specifies if the action needs manual or automatic approval. Valid values are `automatic` and `manual`. +* `definition` - (Required) Specifies all of the type-specific parameters. See [Definition](#definition). +* `executionRoleArn` - (Required) The role passed for action execution and reversion. Roles and actions must be in the same account. +* `notificationType` - (Required) The type of a notification. Valid values are `actual` or `forecasted`. +* `subscriber` - (Required) A list of subscribers. See [Subscriber](#subscriber). + +### Action Threshold + +* `actionThresholdType` - (Required) The type of threshold for a notification. Valid values are `percentage` or `absoluteValue`. +* `actionThresholdValue` - (Required) The threshold of a notification. + +### Subscriber + +* `address` - (Required) The address that AWS sends budget notifications to, either an SNS topic or an email. +* `subscriptionType` - (Required) The type of notification that AWS sends to a subscriber. Valid values are `sns` or `email`. + +### Definition + +* `iamActionDefinition` - (Optional) The AWS Identity and Access Management (IAM) action definition details. See [IAM Action Definition](#iam-action-definition). +* `ssmActionDefinition` - (Optional) The AWS Systems Manager (SSM) action definition details. See [SSM Action Definition](#ssm-action-definition). +* `scpActionDefinition` - (Optional) The service control policies (SCPs) action definition details. See [SCP Action Definition](#scp-action-definition). + +#### IAM Action Definition + +* `policyArn` - (Required) The Amazon Resource Name (ARN) of the policy to be attached. +* `groups` - (Optional) A list of groups to be attached. There must be at least one group. +* `roles` - (Optional) A list of roles to be attached. There must be at least one role. +* `users` - (Optional) A list of users to be attached. There must be at least one user. + +#### SCP Action Definition + +* `policyId` - (Required) The policy ID attached. +* `targetIds` - (Optional) A list of target IDs. + +#### SSM Action Definition + +* `actionSubType` - (Required) The action subType. Valid values are `stopEc2Instances` or `stopRdsInstances`. +* `instanceIds` - (Required) The EC2 and RDS instance IDs. +* `region` - (Required) The Region to run the SSM document. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `actionId` - The id of the budget action. +* `id` - ID of resource. +* `arn` - The ARN of the budget action. +* `status` - The status of the budget action. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import budget actions using `accountId:actionId:budgetName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import budget actions using `accountId:actionId:budgetName`. For example: + +```console +% terraform import aws_budgets_budget_action.myBudget 123456789012:some-id:myBudget +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ce_anomaly_monitor.html.markdown b/website/docs/cdktf/typescript/r/ce_anomaly_monitor.html.markdown new file mode 100644 index 00000000000..d92c453e13f --- /dev/null +++ b/website/docs/cdktf/typescript/r/ce_anomaly_monitor.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_anomaly_monitor" +description: |- + Provides a CE Cost Anomaly Monitor +--- + + + +# Resource: aws_ce_anomaly_monitor + +Provides a CE Anomaly Monitor. + +## Example Usage + +There are two main types of a Cost Anomaly Monitor: `dimensional` and `custom`. + +### Dimensional Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CeAnomalyMonitor } from "./.gen/providers/aws/ce-anomaly-monitor"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CeAnomalyMonitor(this, "service_monitor", { + monitorDimension: "SERVICE", + monitorType: "DIMENSIONAL", + name: "AWSServiceMonitor", + }); + } +} + +``` + +### Custom Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CeAnomalyMonitor } from "./.gen/providers/aws/ce-anomaly-monitor"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CeAnomalyMonitor(this, "test", { + monitorSpecification: Token.asString( + Fn.jsonencode({ + And: "null", + CostCategories: "null", + Dimensions: "null", + Not: "null", + Or: "null", + Tags: { + Key: "CostCenter", + MatchOptions: "null", + Values: ["10000"], + }, + }) + ), + monitorType: "CUSTOM", + name: "AWSCustomAnomalyMonitor", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the monitor. +* `monitorType` - (Required) The possible type values. Valid values: `dimensional` | `custom`. +* `monitorDimension` - (Required, if `monitorType` is `dimensional`) The dimensions to evaluate. Valid values: `service`. +* `monitorSpecification` - (Required, if `monitorType` is `custom`) A valid JSON representation for the [Expression](https://docs.aws.amazon.com/aws-cost-management/latest/APIReference/API_Expression.html) object. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the anomaly monitor. +* `id` - Unique ID of the anomaly monitor. Same as `arn`. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsCeAnomalyMonitor` using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsCeAnomalyMonitor` using the `id`. For example: + +```console +% terraform import aws_ce_anomaly_monitor.example costAnomalyMonitorARN +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ce_anomaly_subscription.html.markdown b/website/docs/cdktf/typescript/r/ce_anomaly_subscription.html.markdown new file mode 100644 index 00000000000..96c2109a250 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ce_anomaly_subscription.html.markdown @@ -0,0 +1,267 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_anomaly_subscription" +description: |- + Provides a CE Cost Anomaly Subscription +--- + + + +# Resource: aws_ce_anomaly_subscription + +Provides a CE Anomaly Subscription. + +## Example Usage + +### Basic Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CeAnomalyMonitor } from "./.gen/providers/aws/ce-anomaly-monitor"; +import { CeAnomalySubscription } from "./.gen/providers/aws/ce-anomaly-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new CeAnomalyMonitor(this, "test", { + monitorDimension: "SERVICE", + monitorType: "DIMENSIONAL", + name: "AWSServiceMonitor", + }); + const awsCeAnomalySubscriptionTest = new CeAnomalySubscription( + this, + "test_1", + { + frequency: "DAILY", + monitorArnList: [test.arn], + name: "DAILYSUBSCRIPTION", + subscriber: [ + { + address: "abc@example.com", + type: "EMAIL", + }, + ], + threshold: 100, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCeAnomalySubscriptionTest.overrideLogicalId("test"); + } +} + +``` + +### Threshold Expression + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CeAnomalySubscription } from "./.gen/providers/aws/ce-anomaly-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CeAnomalySubscription(this, "test", { + frequency: "DAILY", + monitorArnList: [Token.asString(awsCeAnomalyMonitorTest.arn)], + name: "AWSServiceMonitor", + subscriber: [ + { + address: "abc@example.com", + type: "EMAIL", + }, + ], + thresholdExpression: { + dimension: { + key: "ANOMALY_TOTAL_IMPACT_ABSOLUTE", + matchOptions: ["GREATER_THAN_OR_EQUAL"], + values: ["100.0"], + }, + }, + }); + } +} + +``` + +### SNS Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CeAnomalyMonitor } from "./.gen/providers/aws/ce-anomaly-monitor"; +import { CeAnomalySubscription } from "./.gen/providers/aws/ce-anomaly-subscription"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +import { SnsTopicPolicy } from "./.gen/providers/aws/sns-topic-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const anomalyMonitor = new CeAnomalyMonitor(this, "anomaly_monitor", { + monitorDimension: "SERVICE", + monitorType: "DIMENSIONAL", + name: "AWSServiceMonitor", + }); + const costAnomalyUpdates = new SnsTopic(this, "cost_anomaly_updates", { + name: "CostAnomalyUpdates", + }); + const snsTopicPolicy = new DataAwsIamPolicyDocument( + this, + "sns_topic_policy", + { + policyId: "__default_policy_ID", + statement: [ + { + actions: ["SNS:Publish"], + effect: "Allow", + principals: [ + { + identifiers: ["costalerts.amazonaws.com"], + type: "Service", + }, + ], + resources: [costAnomalyUpdates.arn], + sid: "AWSAnomalyDetectionSNSPublishingPermissions", + }, + { + actions: [ + "SNS:Subscribe", + "SNS:SetTopicAttributes", + "SNS:RemovePermission", + "SNS:Receive", + "SNS:Publish", + "SNS:ListSubscriptionsByTopic", + "SNS:GetTopicAttributes", + "SNS:DeleteTopic", + "SNS:AddPermission", + ], + condition: [ + { + test: "StringEquals", + values: [accountId.stringValue], + variable: "AWS:SourceOwner", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [costAnomalyUpdates.arn], + sid: "__default_statement_ID", + }, + ], + } + ); + const defaultVar = new SnsTopicPolicy(this, "default", { + arn: costAnomalyUpdates.arn, + policy: Token.asString(snsTopicPolicy.json), + }); + new CeAnomalySubscription(this, "realtime_subscription", { + dependsOn: [defaultVar], + frequency: "IMMEDIATE", + monitorArnList: [anomalyMonitor.arn], + name: "RealtimeAnomalySubscription", + subscriber: [ + { + address: costAnomalyUpdates.arn, + type: "SNS", + }, + ], + threshold: 0, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `accountId` - (Optional) The unique identifier for the AWS account in which the anomaly subscription ought to be created. +* `frequency` - (Required) The frequency that anomaly reports are sent. Valid Values: `daily` | `immediate` | `weekly`. +* `monitorArnList` - (Required) A list of cost anomaly monitors. +* `name` - (Required) The name for the subscription. +* `subscriber` - (Required) A subscriber configuration. Multiple subscribers can be defined. + * `type` - (Required) The type of subscription. Valid Values: `sns` | `email`. + * `address` - (Required) The address of the subscriber. If type is `sns`, this will be the arn of the sns topic. If type is `email`, this will be the destination email address. +* `threshold` - (Optional) The dollar value that triggers a notification if the threshold is exceeded. Depracated, use `thresholdExpression` instead. +* `thresholdExpression` - (Optional) An Expression object used to specify the anomalies that you want to generate alerts for. See [Threshold Expression](#threshold-expression). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Threshold Expression + +* `and` - (Optional) Return results that match both [Dimension](#dimension) objects. +* `costCategory` - (Optional) Configuration block for the filter that's based on values. See [Cost Category](#cost-category) below. +* `dimension` - (Optional) Configuration block for the specific [Dimension](#dimension) to use for. +* `not` - (Optional) Return results that match both [Dimension](#dimension) object. +* `or` - (Optional) Return results that match both [Dimension](#dimension) object. +* `tags` - (Optional) Configuration block for the specific Tag to use for. See [Tags](#tags) below. + +### Cost Category + +* `key` - (Optional) Unique name of the Cost Category. +* `matchOptions` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - (Optional) Specific value of the Cost Category. + +### Dimension + +* `key` - (Optional) Unique name of the Cost Category. +* `matchOptions` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - (Optional) Specific value of the Cost Category. + +### Tags + +* `key` - (Optional) Key for the tag. +* `matchOptions` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - (Optional) Specific value of the Cost Category. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the anomaly subscription. +* `id` - Unique ID of the anomaly subscription. Same as `arn`. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsCeAnomalySubscription` using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsCeAnomalySubscription` using the `id`. For example: + +```console +% terraform import aws_ce_anomaly_subscription.example AnomalySubscriptionARN +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ce_cost_allocation_tag.html.markdown b/website/docs/cdktf/typescript/r/ce_cost_allocation_tag.html.markdown new file mode 100644 index 00000000000..30536b16192 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ce_cost_allocation_tag.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_cost_allocation_tag" +description: |- + Provides a CE Cost Allocation Tag +--- + + + +# Resource: aws_ce_cost_allocation_tag + +Provides a CE Cost Allocation Tag. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CeCostAllocationTag } from "./.gen/providers/aws/ce-cost-allocation-tag"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CeCostAllocationTag(this, "example", { + status: "Active", + tagKey: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `tagKey` - (Required) The key for the cost allocation tag. +* `status` - (Required) The status of a cost allocation tag. Valid values are `active` and `inactive`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The key for the cost allocation tag. +* `type` - The type of cost allocation tag. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsCeCostAllocationTag` using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsCeCostAllocationTag` using the `id`. For example: + +```console +% terraform import aws_ce_cost_allocation_tag.example key +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ce_cost_category.html.markdown b/website/docs/cdktf/typescript/r/ce_cost_category.html.markdown new file mode 100644 index 00000000000..319109b3c78 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ce_cost_category.html.markdown @@ -0,0 +1,167 @@ +--- +subcategory: "CE (Cost Explorer)" +layout: "aws" +page_title: "AWS: aws_ce_cost_category" +description: |- + Provides a CE Cost Category Definition +--- + + + +# Resource: aws_ce_cost_category + +Provides a CE Cost Category. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CeCostCategory } from "./.gen/providers/aws/ce-cost-category"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CeCostCategory(this, "test", { + name: "NAME", + rule: [ + { + rule: { + dimension: { + key: "LINKED_ACCOUNT_NAME", + matchOptions: ["ENDS_WITH"], + values: ["-prod"], + }, + }, + value: "production", + }, + { + rule: { + dimension: { + key: "LINKED_ACCOUNT_NAME", + matchOptions: ["ENDS_WITH"], + values: ["-stg"], + }, + }, + value: "staging", + }, + { + rule: { + dimension: { + key: "LINKED_ACCOUNT_NAME", + matchOptions: ["ENDS_WITH"], + values: ["-dev"], + }, + }, + value: "testing", + }, + ], + ruleVersion: "CostCategoryExpression.v1", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Unique name for the Cost Category. +* `rule` - (Required) Configuration block for the Cost Category rules used to categorize costs. See below. +* `ruleVersion` - (Required) Rule schema version in this particular Cost Category. +* `effectiveStart`- (Optional) The Cost Category's effective start date. It can only be a billing start date (first day of the month). If the date isn't provided, it's the first day of the current month. Dates can't be before the previous twelve months, or in the future. For example `20221101T00:00:00Z`. + +The following arguments are optional: + +* `defaultValue` - (Optional) Default value for the cost category. +* `splitChargeRule` - (Optional) Configuration block for the split charge rules used to allocate your charges between your Cost Category values. See below. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `rule` + +* `inheritedValue` - (Optional) Configuration block for the value the line item is categorized as if the line item contains the matched dimension. See below. +* `rule` - (Optional) Configuration block for the `expression` object used to categorize costs. See below. +* `type` - (Optional) You can define the CostCategoryRule rule type as either `regular` or `inheritedValue`. +* `value` - (Optional) Default value for the cost category. + +### `inheritedValue` + +* `dimensionKey` - (Optional) Key to extract cost category values. +* `dimensionName` - (Optional) Name of the dimension that's used to group costs. If you specify `linkedAccountName`, the cost category value is based on account name. If you specify `tag`, the cost category value will be based on the value of the specified tag key. Valid values are `linkedAccountName`, `tag` + +### `rule` + +* `and` - (Optional) Return results that match both `dimension` objects. +* `costCategory` - (Optional) Configuration block for the filter that's based on `costCategory` values. See below. +* `dimension` - (Optional) Configuration block for the specific `dimension` to use for `expression`. See below. +* `not` - (Optional) Return results that match both `dimension` object. +* `or` - (Optional) Return results that match both `dimension` object. +* `tags` - (Optional) Configuration block for the specific `tag` to use for `expression`. See below. + +### `costCategory` + +* `key` - (Optional) Unique name of the Cost Category. +* `matchOptions` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - (Optional) Specific value of the Cost Category. + +### `dimension` + +* `key` - (Optional) Unique name of the Cost Category. +* `matchOptions` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - (Optional) Specific value of the Cost Category. + +### `tags` + +* `key` - (Optional) Key for the tag. +* `matchOptions` - (Optional) Match options that you can use to filter your results. MatchOptions is only applicable for actions related to cost category. The default values for MatchOptions is `equals` and `caseSensitive`. Valid values are: `equals`, `absent`, `startsWith`, `endsWith`, `contains`, `caseSensitive`, `caseInsensitive`. +* `values` - (Optional) Specific value of the Cost Category. + +### `splitChargeRule` + +* `method` - (Required) Method that's used to define how to split your source costs across your targets. Valid values are `fixed`, `proportional`, `even` +* `parameter` - (Optional) Configuration block for the parameters for a split charge method. This is only required for the `fixed` method. See below. +* `source` - (Required) Cost Category value that you want to split. +* `targets` - (Required) Cost Category values that you want to split costs across. These values can't be used as a source in other split charge rules. + +### `parameter` + +* `type` - (Optional) Parameter type. +* `values` - (Optional) Parameter values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cost category. +* `effectiveEnd` - Effective end data of your Cost Category. +* `id` - Unique ID of the cost category. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsCeCostCategory` using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsCeCostCategory` using the id. For example: + +```console +% terraform import aws_ce_cost_category.example costCategoryARN +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chime_voice_connector.html.markdown b/website/docs/cdktf/typescript/r/chime_voice_connector.html.markdown new file mode 100644 index 00000000000..405f9de2b5c --- /dev/null +++ b/website/docs/cdktf/typescript/r/chime_voice_connector.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector" +description: |- + Enables you to connect your phone system to the telephone network at a substantial cost savings by using SIP trunking. +--- + + + +# Resource: aws_chime_voice_connector + +Enables you to connect your phone system to the telephone network at a substantial cost savings by using SIP trunking. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimeVoiceConnector } from "./.gen/providers/aws/chime-voice-connector"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ChimeVoiceConnector(this, "test", { + awsRegion: "us-east-1", + name: "connector-test-1", + requireEncryption: true, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the Amazon Chime Voice Connector. +* `requireEncryption` - (Required) When enabled, requires encryption for the Amazon Chime Voice Connector. + +The following arguments are optional: + +* `awsRegion` - (Optional) The AWS Region in which the Amazon Chime Voice Connector is created. Default value: `usEast1` +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN (Amazon Resource Name) of the Amazon Chime Voice Connector. +* `outboundHostName` - The outbound host name for the Amazon Chime Voice Connector. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Configuration Recorder using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Configuration Recorder using the name. For example: + +```console +% terraform import aws_chime_voice_connector.test example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chime_voice_connector_group.html.markdown b/website/docs/cdktf/typescript/r/chime_voice_connector_group.html.markdown new file mode 100644 index 00000000000..d0b8c969ce4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/chime_voice_connector_group.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_group" +description: |- + Creates an Amazon Chime Voice Connector group under the administrator's AWS account. +--- + + + +# Resource: aws_chime_voice_connector_group + +Creates an Amazon Chime Voice Connector group under the administrator's AWS account. You can associate Amazon Chime Voice Connectors with the Amazon Chime Voice Connector group by including VoiceConnectorItems in the request. + +You can include Amazon Chime Voice Connectors from different AWS Regions in your group. This creates a fault tolerant mechanism for fallback in case of availability events. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimeVoiceConnector } from "./.gen/providers/aws/chime-voice-connector"; +import { ChimeVoiceConnectorGroup } from "./.gen/providers/aws/chime-voice-connector-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const vc1 = new ChimeVoiceConnector(this, "vc1", { + awsRegion: "us-east-1", + name: "connector-test-1", + requireEncryption: true, + }); + const vc2 = new ChimeVoiceConnector(this, "vc2", { + awsRegion: "us-west-2", + name: "connector-test-2", + requireEncryption: true, + }); + new ChimeVoiceConnectorGroup(this, "group", { + connector: [ + { + priority: 1, + voiceConnectorId: vc1.id, + }, + { + priority: 3, + voiceConnectorId: vc2.id, + }, + ], + name: "test-group", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Amazon Chime Voice Connector group. +* `connector` - (Optional) The Amazon Chime Voice Connectors to route inbound calls to. + +### `connector` + +For Amazon Chime Voice Connector groups, the Amazon Chime Voice Connectors to which to route inbound calls. Includes priority configuration settings. Limit: 3 VoiceConnectorItems per Amazon Chime Voice Connector group. + +* `voiceConnectorId` - (Required) The Amazon Chime Voice Connector ID. +* `priority` - (Required) The priority associated with the Amazon Chime Voice Connector, with 1 being the highest priority. Higher priority Amazon Chime Voice Connectors are attempted first. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Chime Voice Connector group ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Configuration Recorder using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Configuration Recorder using the name. For example: + +```console +% terraform import aws_chime_voice_connector_group.default example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chime_voice_connector_logging.html.markdown b/website/docs/cdktf/typescript/r/chime_voice_connector_logging.html.markdown new file mode 100644 index 00000000000..e56367a58be --- /dev/null +++ b/website/docs/cdktf/typescript/r/chime_voice_connector_logging.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_logging" +description: |- + Adds a logging configuration for the specified Amazon Chime Voice Connector. The logging configuration specifies whether SIP message logs are enabled for sending to Amazon CloudWatch Logs. +--- + + + +# Resource: aws_chime_voice_connector_logging + +Adds a logging configuration for the specified Amazon Chime Voice Connector. The logging configuration specifies whether SIP message logs are enabled for sending to Amazon CloudWatch Logs. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimeVoiceConnector } from "./.gen/providers/aws/chime-voice-connector"; +import { ChimeVoiceConnectorLogging } from "./.gen/providers/aws/chime-voice-connector-logging"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new ChimeVoiceConnector(this, "default", { + name: "vc-name-test", + requireEncryption: true, + }); + const awsChimeVoiceConnectorLoggingDefault = new ChimeVoiceConnectorLogging( + this, + "default_1", + { + enableMediaMetricLogs: true, + enableSipLogs: true, + voiceConnectorId: defaultVar.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsChimeVoiceConnectorLoggingDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voiceConnectorId` - (Required) The Amazon Chime Voice Connector ID. +* `enableSipLogs` - (Optional) When true, enables SIP message logs for sending to Amazon CloudWatch Logs. +* `enableMediaMetricLogs` - (Optional) When true, enables logging of detailed media metrics for Voice Connectors to Amazon CloudWatch logs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Chime Voice Connector ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime Voice Connector Logging using the `voiceConnectorId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Chime Voice Connector Logging using the `voiceConnectorId`. For example: + +```console +% terraform import aws_chime_voice_connector_logging.default abcdef1ghij2klmno3pqr4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chime_voice_connector_origination.html.markdown b/website/docs/cdktf/typescript/r/chime_voice_connector_origination.html.markdown new file mode 100644 index 00000000000..bc56c9479d1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/chime_voice_connector_origination.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_origination" +description: |- + Enable origination settings to control inbound calling to your SIP infrastructure. +--- + + + +# Resource: aws_chime_voice_connector_origination + +Enable origination settings to control inbound calling to your SIP infrastructure. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimeVoiceConnector } from "./.gen/providers/aws/chime-voice-connector"; +import { ChimeVoiceConnectorOrigination } from "./.gen/providers/aws/chime-voice-connector-origination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new ChimeVoiceConnector(this, "default", { + name: "test", + requireEncryption: true, + }); + const awsChimeVoiceConnectorOriginationDefault = + new ChimeVoiceConnectorOrigination(this, "default_1", { + disabled: false, + route: [ + { + host: "127.0.0.1", + port: 8081, + priority: 1, + protocol: "TCP", + weight: 1, + }, + { + host: "127.0.0.2", + port: 8082, + priority: 2, + protocol: "TCP", + weight: 10, + }, + ], + voiceConnectorId: defaultVar.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsChimeVoiceConnectorOriginationDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voiceConnectorId` - (Required) The Amazon Chime Voice Connector ID. +* `route` - (Required) Set of call distribution properties defined for your SIP hosts. See [route](#route) below for more details. Minimum of 1. Maximum of 20. +* `disabled` - (Optional) When origination settings are disabled, inbound calls are not enabled for your Amazon Chime Voice Connector. + +### `route` + +Origination routes define call distribution properties for your SIP hosts to receive inbound calls using your Amazon Chime Voice Connector. Limit: Ten origination routes for each Amazon Chime Voice Connector. + +* `host` - (Required) The FQDN or IP address to contact for origination traffic. +* `port` - (Required) The designated origination route port. Defaults to `5060`. +* `priority` - (Required) The priority associated with the host, with 1 being the highest priority. Higher priority hosts are attempted first. +* `protocol` - (Required) The protocol to use for the origination route. Encryption-enabled Amazon Chime Voice Connectors use TCP protocol by default. +* `weight` - (Required) The weight associated with the host. If hosts are equal in priority, calls are redistributed among them based on their relative weight. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Chime Voice Connector ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime Voice Connector Origination using the `voiceConnectorId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Chime Voice Connector Origination using the `voiceConnectorId`. For example: + +```console +% terraform import aws_chime_voice_connector_origination.default abcdef1ghij2klmno3pqr4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chime_voice_connector_streaming.html.markdown b/website/docs/cdktf/typescript/r/chime_voice_connector_streaming.html.markdown new file mode 100644 index 00000000000..5d7afc47e18 --- /dev/null +++ b/website/docs/cdktf/typescript/r/chime_voice_connector_streaming.html.markdown @@ -0,0 +1,186 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_streaming" +description: |- + The streaming configuration associated with an Amazon Chime Voice Connector. Specifies whether media streaming is enabled for sending to Amazon Kinesis, and shows the retention period for the Amazon Kinesis data, in hours. +--- + + + +# Resource: aws_chime_voice_connector_streaming + +Adds a streaming configuration for the specified Amazon Chime Voice Connector. The streaming configuration specifies whether media streaming is enabled for sending to Amazon Kinesis. +It also sets the retention period, in hours, for the Amazon Kinesis data. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimeVoiceConnector } from "./.gen/providers/aws/chime-voice-connector"; +import { ChimeVoiceConnectorStreaming } from "./.gen/providers/aws/chime-voice-connector-streaming"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new ChimeVoiceConnector(this, "default", { + name: "vc-name-test", + requireEncryption: true, + }); + const awsChimeVoiceConnectorStreamingDefault = + new ChimeVoiceConnectorStreaming(this, "default_1", { + dataRetention: 7, + disabled: false, + streamingNotificationTargets: ["SQS"], + voiceConnectorId: defaultVar.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsChimeVoiceConnectorStreamingDefault.overrideLogicalId("default"); + } +} + +``` + +### Example Usage With Media Insights + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimeVoiceConnector } from "./.gen/providers/aws/chime-voice-connector"; +import { ChimeVoiceConnectorStreaming } from "./.gen/providers/aws/chime-voice-connector-streaming"; +import { ChimesdkmediapipelinesMediaInsightsPipelineConfiguration } from "./.gen/providers/aws/chimesdkmediapipelines-media-insights-pipeline-configuration"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { KinesisStream } from "./.gen/providers/aws/kinesis-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new ChimeVoiceConnector(this, "default", { + name: "vc-name-test", + requireEncryption: true, + }); + const example = new KinesisStream(this, "example", { + name: "ExampleStream", + shardCount: 2, + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["mediapipelines.chime.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const awsIamRoleExample = new IamRole(this, "example_3", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "ExampleResourceAccessRole", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsChimesdkmediapipelinesMediaInsightsPipelineConfigurationExample = + new ChimesdkmediapipelinesMediaInsightsPipelineConfiguration( + this, + "example_4", + { + elements: [ + { + amazonTranscribeCallAnalyticsProcessorConfiguration: { + languageCode: "en-US", + }, + type: "AmazonTranscribeCallAnalyticsProcessor", + }, + { + kinesisDataStreamSinkConfiguration: { + insightsTarget: example.arn, + }, + type: "KinesisDataStreamSink", + }, + ], + name: "ExampleConfig", + resourceAccessRoleArn: Token.asString(awsIamRoleExample.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsChimesdkmediapipelinesMediaInsightsPipelineConfigurationExample.overrideLogicalId( + "example" + ); + const awsChimeVoiceConnectorStreamingDefault = + new ChimeVoiceConnectorStreaming(this, "default_5", { + dataRetention: 7, + disabled: false, + mediaInsightsConfiguration: { + configurationArn: Token.asString( + awsChimesdkmediapipelinesMediaInsightsPipelineConfigurationExample.arn + ), + disabled: false, + }, + streamingNotificationTargets: ["SQS"], + voiceConnectorId: defaultVar.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsChimeVoiceConnectorStreamingDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voiceConnectorId` - (Required) The Amazon Chime Voice Connector ID. +* `dataRetention` - (Required) The retention period, in hours, for the Amazon Kinesis data. +* `disabled` - (Optional) When true, media streaming to Amazon Kinesis is turned off. Default: `false` +* `streamingNotificationTargets` - (Optional) The streaming notification targets. Valid Values: `EventBridge | SNS | SQS` +* `mediaInsightsConfiguration` - (Optional) The media insights configuration. See [`mediaInsightsConfiguration`](#media_insights_configuration). + +### media_insights_configuration + +* `disabled` - (Optional) When `true`, the media insights configuration is not enabled. Defaults to `false`. +* `configurationArn` - (Optional) The media insights configuration that will be invoked by the Voice Connector. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Chime Voice Connector ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime Voice Connector Streaming using the `voiceConnectorId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Chime Voice Connector Streaming using the `voiceConnectorId`. For example: + +```console +% terraform import aws_chime_voice_connector_streaming.default abcdef1ghij2klmno3pqr4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chime_voice_connector_termination.html.markdown b/website/docs/cdktf/typescript/r/chime_voice_connector_termination.html.markdown new file mode 100644 index 00000000000..348c90dd2d8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/chime_voice_connector_termination.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_termination" +description: |- + Enable Termination settings to control outbound calling from your SIP infrastructure. +--- + + + +# Resource: aws_chime_voice_connector_termination + +Enable Termination settings to control outbound calling from your SIP infrastructure. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimeVoiceConnector } from "./.gen/providers/aws/chime-voice-connector"; +import { ChimeVoiceConnectorTermination } from "./.gen/providers/aws/chime-voice-connector-termination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new ChimeVoiceConnector(this, "default", { + name: "vc-name-test", + requireEncryption: true, + }); + const awsChimeVoiceConnectorTerminationDefault = + new ChimeVoiceConnectorTermination(this, "default_1", { + callingRegions: ["US", "CA"], + cidrAllowList: ["50.35.78.96/31"], + cpsLimit: 1, + disabled: false, + voiceConnectorId: defaultVar.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsChimeVoiceConnectorTerminationDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voiceConnectorId` - (Required) The Amazon Chime Voice Connector ID. +* `cidrAllowList` - (Required) The IP addresses allowed to make calls, in CIDR format. +* `callingRegions` - (Required) The countries to which calls are allowed, in ISO 3166-1 alpha-2 format. +* `disabled` - (Optional) When termination settings are disabled, outbound calls can not be made. +* `defaultPhoneNumber` - (Optional) The default caller ID phone number. +* `cpsLimit` - (Optional) The limit on calls per second. Max value based on account service quota. Default value of `1`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Chime Voice Connector ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime Voice Connector Termination using the `voiceConnectorId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Chime Voice Connector Termination using the `voiceConnectorId`. For example: + +```console +% terraform import aws_chime_voice_connector_termination.default abcdef1ghij2klmno3pqr4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chime_voice_connector_termination_credentials.html.markdown b/website/docs/cdktf/typescript/r/chime_voice_connector_termination_credentials.html.markdown new file mode 100644 index 00000000000..990270767c9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/chime_voice_connector_termination_credentials.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "Chime" +layout: "aws" +page_title: "AWS: aws_chime_voice_connector_termination_credentials" +description: |- + Adds termination SIP credentials for the specified Amazon Chime Voice Connector. +--- + + + +# Resource: aws_chime_voice_connector_termination_credentials + +Adds termination SIP credentials for the specified Amazon Chime Voice Connector. + +~> **Note:** Voice Connector Termination Credentials requires a [Voice Connector Termination](/docs/providers/aws/r/chime_voice_connector_termination.html) to be present. Use of `dependsOn` (as shown below) is recommended to avoid race conditions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimeVoiceConnector } from "./.gen/providers/aws/chime-voice-connector"; +import { ChimeVoiceConnectorTermination } from "./.gen/providers/aws/chime-voice-connector-termination"; +import { ChimeVoiceConnectorTerminationCredentials } from "./.gen/providers/aws/chime-voice-connector-termination-credentials"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new ChimeVoiceConnector(this, "default", { + name: "test", + requireEncryption: true, + }); + const awsChimeVoiceConnectorTerminationDefault = + new ChimeVoiceConnectorTermination(this, "default_1", { + callingRegions: ["US", "CA"], + cidrAllowList: ["50.35.78.96/31"], + cpsLimit: 1, + disabled: true, + voiceConnectorId: defaultVar.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsChimeVoiceConnectorTerminationDefault.overrideLogicalId("default"); + const awsChimeVoiceConnectorTerminationCredentialsDefault = + new ChimeVoiceConnectorTerminationCredentials(this, "default_2", { + credentials: [ + { + password: "test!", + username: "test", + }, + ], + dependsOn: [awsChimeVoiceConnectorTerminationDefault], + voiceConnectorId: defaultVar.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsChimeVoiceConnectorTerminationCredentialsDefault.overrideLogicalId( + "default" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voiceConnectorId` - (Required) Amazon Chime Voice Connector ID. +* `credentials` - (Required) List of termination SIP credentials. + +### `credentials` + +The SIP credentials used to authenticate requests to your Amazon Chime Voice Connector. + +* `username` - (Required) RFC2617 compliant username associated with the SIP credentials. +* `password` - (Required) RFC2617 compliant password associated with the SIP credentials. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Chime Voice Connector ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime Voice Connector Termination Credentials using the `voiceConnectorId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Chime Voice Connector Termination Credentials using the `voiceConnectorId`. For example: + +```console +% terraform import aws_chime_voice_connector_termination_credentials.default abcdef1ghij2klmno3pqr4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chimesdkmediapipelines_media_insights_pipeline_configuration.html.markdown b/website/docs/cdktf/typescript/r/chimesdkmediapipelines_media_insights_pipeline_configuration.html.markdown new file mode 100644 index 00000000000..f029f22d701 --- /dev/null +++ b/website/docs/cdktf/typescript/r/chimesdkmediapipelines_media_insights_pipeline_configuration.html.markdown @@ -0,0 +1,501 @@ +--- +subcategory: "Chime SDK Media Pipelines" +layout: "aws" +page_title: "AWS: aws_chimesdkmediapipelines_media_insights_pipeline_configuration" +description: |- + Terraform resource for managing an AWS Chime SDK Media Pipelines Media Insights Pipeline Configuration. +--- + + + +# Resource: aws_chimesdkmediapipelines_media_insights_pipeline_configuration + +Terraform resource for managing an AWS Chime SDK Media Pipelines Media Insights Pipeline Configuration. +Consult the [Call analytics developer guide](https://docs.aws.amazon.com/chime-sdk/latest/dg/call-analytics.html) for more detailed information about usage. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimesdkmediapipelinesMediaInsightsPipelineConfiguration } from "./.gen/providers/aws/chimesdkmediapipelines-media-insights-pipeline-configuration"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { KinesisStream } from "./.gen/providers/aws/kinesis-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new KinesisStream(this, "example", { + name: "example", + shardCount: 2, + }); + const mediaPipelinesAssumeRole = new DataAwsIamPolicyDocument( + this, + "media_pipelines_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["mediapipelines.chime.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const callAnalyticsRole = new IamRole(this, "call_analytics_role", { + assumeRolePolicy: Token.asString(mediaPipelinesAssumeRole.json), + name: "CallAnalyticsRole", + }); + new ChimesdkmediapipelinesMediaInsightsPipelineConfiguration( + this, + "my_configuration", + { + elements: [ + { + amazonTranscribeCallAnalyticsProcessorConfiguration: { + languageCode: "en-US", + }, + type: "AmazonTranscribeCallAnalyticsProcessor", + }, + { + kinesisDataStreamSinkConfiguration: { + insightsTarget: example.arn, + }, + type: "KinesisDataStreamSink", + }, + ], + name: "MyBasicConfiguration", + resourceAccessRoleArn: callAnalyticsRole.arn, + tags: { + Key1: "Value1", + Key2: "Value2", + }, + } + ); + } +} + +``` + +- The required policies on `callAnalyticsRole` will vary based on the selected processors. See [Call analytics resource access role](https://docs.aws.amazon.com/chime-sdk/latest/dg/ca-resource-access-role.html) for directions on choosing appropriate policies. + +### Transcribe Call Analytics processor usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimesdkmediapipelinesMediaInsightsPipelineConfiguration } from "./.gen/providers/aws/chimesdkmediapipelines-media-insights-pipeline-configuration"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const transcribeAssumeRole = new DataAwsIamPolicyDocument( + this, + "transcribe_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["transcribe.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const postCallRole = new IamRole(this, "post_call_role", { + assumeRolePolicy: Token.asString(transcribeAssumeRole.json), + name: "PostCallAccessRole", + }); + new ChimesdkmediapipelinesMediaInsightsPipelineConfiguration( + this, + "my_configuration", + { + elements: [ + { + amazonTranscribeCallAnalyticsProcessorConfiguration: { + callAnalyticsStreamCategories: ["category_1", "category_2"], + contentRedactionType: "PII", + enablePartialResultsStabilization: true, + filterPartialResults: true, + languageCode: "en-US", + languageModelName: "MyLanguageModel", + partialResultsStability: "high", + piiEntityTypes: "ADDRESS,BANK_ACCOUNT_NUMBER", + postCallAnalyticsSettings: { + contentRedactionOutput: "redacted", + dataAccessRoleArn: postCallRole.arn, + outputEncryptionKmsKeyId: "MyKmsKeyId", + outputLocation: "s3://MyBucket", + }, + vocabularyFilterMethod: "mask", + vocabularyFilterName: "MyVocabularyFilter", + vocabularyName: "MyVocabulary", + }, + type: "AmazonTranscribeCallAnalyticsProcessor", + }, + { + kinesisDataStreamSinkConfiguration: { + insightsTarget: example.arn, + }, + type: "KinesisDataStreamSink", + }, + ], + name: "MyCallAnalyticsConfiguration", + resourceAccessRoleArn: Token.asString(awsIamRoleExample.arn), + } + ); + } +} + +``` + +### Real time alerts usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimesdkmediapipelinesMediaInsightsPipelineConfiguration } from "./.gen/providers/aws/chimesdkmediapipelines-media-insights-pipeline-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ChimesdkmediapipelinesMediaInsightsPipelineConfiguration( + this, + "my_configuration", + { + elements: [ + { + amazonTranscribeCallAnalyticsProcessorConfiguration: { + languageCode: "en-US", + }, + type: "AmazonTranscribeCallAnalyticsProcessor", + }, + { + kinesisDataStreamSinkConfiguration: { + insightsTarget: example.arn, + }, + type: "KinesisDataStreamSink", + }, + ], + name: "MyRealTimeAlertConfiguration", + realTimeAlertConfiguration: { + disabled: false, + rules: [ + { + issueDetectionConfiguration: { + ruleName: "MyIssueDetectionRule", + }, + type: "IssueDetection", + }, + { + keywordMatchConfiguration: { + keywords: ["keyword1", "keyword2"], + negate: false, + ruleName: "MyKeywordMatchRule", + }, + type: "KeywordMatch", + }, + { + sentimentConfiguration: { + ruleName: "MySentimentRule", + sentimentType: "NEGATIVE", + timePeriod: 60, + }, + type: "Sentiment", + }, + ], + }, + resourceAccessRoleArn: callAnalyticsRole.arn, + } + ); + } +} + +``` + +### Transcribe processor usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimesdkmediapipelinesMediaInsightsPipelineConfiguration } from "./.gen/providers/aws/chimesdkmediapipelines-media-insights-pipeline-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ChimesdkmediapipelinesMediaInsightsPipelineConfiguration( + this, + "my_configuration", + { + elements: [ + { + amazonTranscribeProcessorConfiguration: { + contentIdentificationType: "PII", + enablePartialResultsStabilization: true, + filterPartialResults: true, + languageCode: "en-US", + languageModelName: "MyLanguageModel", + partialResultsStability: "high", + piiEntityTypes: "ADDRESS,BANK_ACCOUNT_NUMBER", + showSpeakerLabel: true, + vocabularyFilterMethod: "mask", + vocabularyFilterName: "MyVocabularyFilter", + vocabularyName: "MyVocabulary", + }, + type: "AmazonTranscribeProcessor", + }, + { + kinesisDataStreamSinkConfiguration: { + insightsTarget: example.arn, + }, + type: "KinesisDataStreamSink", + }, + ], + name: "MyTranscribeConfiguration", + resourceAccessRoleArn: Token.asString(awsIamRoleExample.arn), + } + ); + } +} + +``` + +### Voice analytics processor usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimesdkmediapipelinesMediaInsightsPipelineConfiguration } from "./.gen/providers/aws/chimesdkmediapipelines-media-insights-pipeline-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ChimesdkmediapipelinesMediaInsightsPipelineConfiguration( + this, + "my_configuration", + { + elements: [ + { + type: "VoiceAnalyticsProcessor", + voiceAnalyticsProcessorConfiguration: { + speakerSearchStatus: "Enabled", + voiceToneAnalysisStatus: "Enabled", + }, + }, + { + lambdaFunctionSinkConfiguration: { + insightsTarget: + "arn:aws:lambda:us-west-2:1111111111:function:MyFunction", + }, + type: "LambdaFunctionSink", + }, + { + snsTopicSinkConfiguration: { + insightsTarget: "arn:aws:sns:us-west-2:1111111111:topic/MyTopic", + }, + type: "SnsTopicSink", + }, + { + sqsQueueSinkConfiguration: { + insightsTarget: "arn:aws:sqs:us-west-2:1111111111:queue/MyQueue", + }, + type: "SqsQueueSink", + }, + { + kinesisDataStreamSinkConfiguration: { + insightsTarget: test.arn, + }, + type: "KinesisDataStreamSink", + }, + ], + name: "MyVoiceAnalyticsConfiguration", + resourceAccessRoleArn: example.arn, + } + ); + } +} + +``` + +### S3 Recording sink usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimesdkmediapipelinesMediaInsightsPipelineConfiguration } from "./.gen/providers/aws/chimesdkmediapipelines-media-insights-pipeline-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ChimesdkmediapipelinesMediaInsightsPipelineConfiguration( + this, + "my_configuration", + { + elements: [ + { + s3RecordingSinkConfiguration: { + destination: "arn:aws:s3:::MyBucket", + }, + type: "S3RecordingSink", + }, + ], + name: "MyS3RecordingConfiguration", + resourceAccessRoleArn: example.arn, + } + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Configuration name. +* `resourceAccessRoleArn` - (Required) ARN of IAM Role used by service to invoke processors and sinks specified by configuration elements. +* `elements` - (Required) Collection of processors and sinks to transform media and deliver data. +* `realTimeAlertConfiguration` - (Optional) Configuration for real-time alert rules to send EventBridge notifications when certain conditions are met. +* `tags` - (Optional) Key-value map of tags for the resource. + +### Elements + +* `type` - (Required) Element type. +* `amazonTranscribeCallAnalyticsProcessorConfiguration` - (Optional) Configuration for Amazon Transcribe Call Analytics processor. + * `callAnalyticsStreamCategories` - (Optional) Filter for category events to be delivered to insights target. + * `contentIdentificationType` - (Optional) Labels all personally identifiable information (PII) identified in Utterance events. + * `contentRedactionType` - (Optional) Redacts all personally identifiable information (PII) identified in Utterance events. + * `enablePartialResultsStabilization` - (Optional) Enables partial result stabilization in Utterance events. + * `filterPartialResults` - (Optional) Filters partial Utterance events from delivery to the insights target. + * `languageCode` - (Required) Language code for the transcription model. + * `languageModelName` - (Optional) Name of custom language model for transcription. + * `partialResultsStability` - (Optional) Level of stability to use when partial results stabilization is enabled. + * `piiEntityTypes` - (Optional) Types of personally identifiable information (PII) to redact from an Utterance event. + * `postCallAnalyticsSettings` - (Optional) Settings for post call analytics. + * `contentRedactionOutput` - (Optional) Should output be redacted. + * `dataAccessRoleArn` - (Required) ARN of the role used by AWS Transcribe to upload your post call analysis. + * `outputEncryptionKmsKeyId` - (Optional) ID of the KMS key used to encrypt the output. + * `outputLocation` - (Required) The Amazon S3 location where you want your Call Analytics post-call transcription output stored. + * `vocabularyFilterMethod` - (Optional) Method for applying a vocabulary filter to Utterance events. + * `vocabularyFilterName` - (Optional) Name of the custom vocabulary filter to use when processing Utterance events. + * `vocabularyName` - (Optional) Name of the custom vocabulary to use when processing Utterance events. +* `amazonTranscribeProcessorConfiguration` - (Optional) Configuration for Amazon Transcribe processor. + * `contentIdentificationType` - (Optional) Labels all personally identifiable information (PII) identified in Transcript events. + * `contentRedactionType` - (Optional) Redacts all personally identifiable information (PII) identified in Transcript events. + * `enablePartialResultsStabilization` - (Optional) Enables partial result stabilization in Transcript events. + * `filterPartialResults` - (Optional) Filters partial Utterance events from delivery to the insights target. + * `languageCode` - (Required) Language code for the transcription model. + * `languageModelName` - (Optional) Name of custom language model for transcription. + * `partialResultsStability` - (Optional) Level of stability to use when partial results stabilization is enabled. + * `piiEntityTypes` - (Optional) Types of personally identifiable information (PII) to redact from a Transcript event. + * `showSpeakerLabel` - (Optional) Enables speaker partitioning (diarization) in your Transcript events. + * `vocabularyFilterMethod` - (Optional) Method for applying a vocabulary filter to Transcript events. + * `vocabularyFilterName` - (Optional) Name of the custom vocabulary filter to use when processing Transcript events. + * `vocabularyName` - (Optional) Name of the custom vocabulary to use when processing Transcript events. +* `kinesisDataStreamSinkConfiguration` - (Optional) Configuration for Kinesis Data Stream sink. + * `insightsTarget` - (Required) Kinesis Data Stream to deliver results. +* `lambdaFunctionSinkConfiguration` - (Optional) Configuration for Lambda Function sink. + * `insightsTarget` - (Required) Lambda Function to deliver results. +* `snsTopicSinkConfiguration` - (Optional) Configuration for SNS Topic sink. + * `insightsTarget` - (Required) SNS topic to deliver results. +* `sqsQueueSinkConfiguration` - (Optional) Configuration for SQS Queue sink. + * `insightsTarget` - (Required) SQS queue to deliver results. +* `s3RecordingSinkConfiguration` - (Optional) Configuration for S3 recording sink. + * `destination` - (Required) S3 URI to deliver recordings. +* `voiceAnalyticsProcessorConfiguration` - (Optional) Configuration for Voice analytics processor. + * `speakerSearchStatus` - (Required) Enable speaker search. + * `voiceToneAnalysisStatus` - (Required) Enable voice tone analysis. + +### Real time alert configuration + +* `rules` - (Required) Collection of real time alert rules + * `type` - (Required) Rule type. + * `issueDetectionConfiguration` - (Optional) Configuration for an issue detection rule. + * `ruleName` - (Required) Rule name. + * `keywordMatchConfiguration` - (Optional) Configuration for a keyword match rule. + * `ruleName` - (Required) Rule name. + * `keywords` - (Required) Collection of keywords to match. + * `negate` - (Optional) Negate the rule. + * `sentimentConfiguration` - (Optional) Configuration for a sentiment rule. + * `ruleName` - (Required) Rule name. + * `sentimentType` - (Required) Sentiment type to match. + * `timePeriod` - (Optional) Analysis interval. +* `disabled` - (Optional) Disables real time alert rules. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Media Insights Pipeline Configuration. +* `id` - Unique ID of the Media Insights Pipeline Configuration. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `3M`) +* `update` - (Default `3M`) +* `delete` - (Default `30S`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Chime SDK Media Pipelines Media Insights Pipeline Configuration using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Chime SDK Media Pipelines Media Insights Pipeline Configuration using the `id`. For example: + +```console +% terraform import aws_chimesdkmediapipelines_media_insights_pipeline_configuration.example abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chimesdkvoice_global_settings.html.markdown b/website/docs/cdktf/typescript/r/chimesdkvoice_global_settings.html.markdown new file mode 100644 index 00000000000..fec519ca859 --- /dev/null +++ b/website/docs/cdktf/typescript/r/chimesdkvoice_global_settings.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Chime SDK Voice" +layout: "aws" +page_title: "AWS: aws_chimesdkvoice_global_settings" +description: |- + Terraform resource for managing Amazon Chime SDK Voice Global Settings. +--- + + + +# Resource: aws_chimesdkvoice_global_settings + +Terraform resource for managing Amazon Chime SDK Voice Global Settings. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimesdkvoiceGlobalSettings } from "./.gen/providers/aws/chimesdkvoice-global-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ChimesdkvoiceGlobalSettings(this, "example", { + voiceConnector: { + cdrBucket: "example-bucket-name", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `voiceConnector` - (Required) The Voice Connector settings. See [voice_connector](#voice_connector). + +### `voiceConnector` + +The Amazon Chime SDK Voice Connector settings. Includes any Amazon S3 buckets designated for storing call detail records. + +* `cdrBucket` - (Optional) The S3 bucket that stores the Voice Connector's call detail records. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS account ID for which the settings are applied. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Chime SDK Voice Global Settings using the `id` (AWS account ID). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS Chime SDK Voice Global Settings using the `id` (AWS account ID). For example: + +```console +% terraform import aws_chimesdkvoice_global_settings.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chimesdkvoice_sip_media_application.html.markdown b/website/docs/cdktf/typescript/r/chimesdkvoice_sip_media_application.html.markdown new file mode 100644 index 00000000000..e3f069cc07d --- /dev/null +++ b/website/docs/cdktf/typescript/r/chimesdkvoice_sip_media_application.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Chime SDK Voice" +layout: "aws" +page_title: "AWS: aws_chimesdkvoice_sip_media_application" +description: |- + A ChimeSDKVoice SIP Media Application is a managed object that passes values from a SIP rule to a target AWS Lambda function. +--- + + + +# Resource: aws_chimesdkvoice_sip_media_application + +A ChimeSDKVoice SIP Media Application is a managed object that passes values from a SIP rule to a target AWS Lambda function. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimesdkvoiceSipMediaApplication } from "./.gen/providers/aws/chimesdkvoice-sip-media-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ChimesdkvoiceSipMediaApplication(this, "example", { + awsRegion: "us-east-1", + endpoints: { + lambdaArn: test.arn, + }, + name: "example-sip-media-application", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `awsRegion` - (Required) The AWS Region in which the AWS Chime SDK Voice Sip Media Application is created. +* `endpoints` - (Required) List of endpoints (Lambda Amazon Resource Names) specified for the SIP media application. Currently, only one endpoint is supported. See [`endpoints`](#endpoints). +* `name` - (Required) The name of the AWS Chime SDK Voice Sip Media Application. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `endpoints` + +The endpoint assigned to the SIP media application. + +* `lambdaArn` - (Required) Valid Amazon Resource Name (ARN) of the Lambda function, version, or alias. The function must be created in the same AWS Region as the SIP media application. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN (Amazon Resource Name) of the AWS Chime SDK Voice Sip Media Application +* `id` - The SIP media application ID. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a ChimeSDKVoice SIP Media Application using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a ChimeSDKVoice SIP Media Application using the `id`. For example: + +```console +% terraform import aws_chimesdkvoice_sip_media_application.example abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chimesdkvoice_sip_rule.html.markdown b/website/docs/cdktf/typescript/r/chimesdkvoice_sip_rule.html.markdown new file mode 100644 index 00000000000..5e25bb7d2a7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/chimesdkvoice_sip_rule.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Chime SDK Voice" +layout: "aws" +page_title: "AWS: aws_chimesdkvoice_sip_rule" +description: |- + A SIP rule associates your SIP media application with a phone number or a Request URI hostname. You can associate a SIP rule with more than one SIP media application. Each application then runs only that rule. +--- + + +# Resource: aws_chimesdkvoice_sip_rule + +A SIP rule associates your SIP media application with a phone number or a Request URI hostname. You can associate a SIP rule with more than one SIP media application. Each application then runs only that rule. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimesdkvoiceSipRule } from "./.gen/providers/aws/chimesdkvoice-sip-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ChimesdkvoiceSipRule(this, "example", { + name: "example-sip-rule", + targetApplications: [ + { + awsRegion: "us-east-1", + priority: 1, + sipMediaApplicationId: exampleSma.id, + }, + ], + triggerType: "RequestUriHostname", + triggerValue: exampleVoiceConnector.outboundHostName, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the SIP rule. +* `targetApplications` - (Required) List of SIP media applications with priority and AWS Region. Only one SIP application per AWS Region can be used. See [`targetApplications`](#target_applications). +* `triggerType` - (Required) The type of trigger assigned to the SIP rule in `triggerValue`. Valid values are `requestUriHostname` or `toPhoneNumber`. +* `triggerValue` - (Required) If `triggerType` is `requestUriHostname`, the value can be the outbound host name of an Amazon Chime Voice Connector. If `triggerType` is `toPhoneNumber`, the value can be a customer-owned phone number in the E164 format. The Sip Media Application specified in the Sip Rule is triggered if the request URI in an incoming SIP request matches the `requestUriHostname`, or if the "To" header in the incoming SIP request matches the `toPhoneNumber` value. + +The following arguments are optional: + +* `disabled` - (Optional) Enables or disables a rule. You must disable rules before you can delete them. + +### `targetApplications` + +List of SIP media applications with priority and AWS Region. Only one SIP application per AWS Region can be used. + +* `awsRegion` - (Required) The AWS Region of the target application. +* `priority` - (Required) Priority of the SIP media application in the target list. +* `sipMediaApplicationId` - (Required) The SIP media application ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The SIP rule ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a ChimeSDKVoice SIP Rule using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a ChimeSDKVoice SIP Rule using the `id`. For example: + +```console +% terraform import aws_chimesdkvoice_sip_rule.example abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/chimesdkvoice_voice_profile_domain.html.markdown b/website/docs/cdktf/typescript/r/chimesdkvoice_voice_profile_domain.html.markdown new file mode 100644 index 00000000000..51c664101e4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/chimesdkvoice_voice_profile_domain.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "Chime SDK Voice" +layout: "aws" +page_title: "AWS: aws_chimesdkvoice_voice_profile_domain" +description: |- + Terraform resource for managing an AWS Chime SDK Voice Profile Domain. +--- + + + +# Resource: aws_chimesdkvoice_voice_profile_domain + +Terraform resource for managing an AWS Chime SDK Voice Profile Domain. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ChimesdkvoiceVoiceProfileDomain } from "./.gen/providers/aws/chimesdkvoice-voice-profile-domain"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new KmsKey(this, "example", { + deletionWindowInDays: 7, + description: "KMS Key for Voice Profile Domain", + }); + const awsChimesdkvoiceVoiceProfileDomainExample = + new ChimesdkvoiceVoiceProfileDomain(this, "example_1", { + description: "My Voice Profile Domain", + name: "ExampleVoiceProfileDomain", + serverSideEncryptionConfiguration: { + kmsKeyArn: example.arn, + }, + tags: { + key1: "value1", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsChimesdkvoiceVoiceProfileDomainExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of Voice Profile Domain. +* `serverSideEncryptionConfiguration` - (Required) Configuration for server side encryption. + * `kmsKeyArn` - (Required) ARN for KMS Key. + +The following arguments are optional: + +* `description` - (Optional) Description of Voice Profile Domain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Voice Profile Domain. +* `id` - ID of the Voice Profile Domain. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30S`) +* `update` - (Default `30S`) +* `delete` - (Default `30S`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Chime SDK Voice Profile Domain using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS Chime SDK Voice Profile Domain using the `id`. For example: + +```console +% terraform import aws_chimesdkvoice_voice_profile_domain.example abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cleanrooms_collaboration.html.markdown b/website/docs/cdktf/typescript/r/cleanrooms_collaboration.html.markdown new file mode 100644 index 00000000000..baaa993008f --- /dev/null +++ b/website/docs/cdktf/typescript/r/cleanrooms_collaboration.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Clean Rooms" +layout: "aws" +page_title: "AWS: aws_cleanrooms_collaboration" +description: |- + Provides a Clean Rooms Collaboration. +--- + + + +# Resource: aws_cleanrooms_collaboration + +Provides a AWS Clean Rooms collaboration. All members included in the definition will be invited to +join the collaboration and can create memberships. + +## Example Usage + +### Collaboration with tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CleanroomsCollaboration } from "./.gen/providers/aws/cleanrooms-collaboration"; +interface MyConfig { + memberAbilities: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new CleanroomsCollaboration(this, "test_collaboration", { + creatorDisplayName: "Creator ", + creatorMemberAbilities: ["CAN_QUERY", "CAN_RECEIVE_RESULTS"], + dataEncryptionMetadata: { + allowClearText: true, + allowDuplicates: true, + allowJoinsOnColumnsWithDifferentNames: true, + preserveNulls: false, + }, + description: "I made this collaboration with terraform!", + member: [ + { + accountId: Token.asString(123456789012), + displayName: "Other member", + memberAbilities: config.memberAbilities, + }, + ], + name: "terraform-example-collaboration", + queryLogStatus: "DISABLED", + tags: { + Project: "Terraform", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) - The name of the collaboration. Collaboration names do not need to be unique. +* `description` - (Required) - A description for a collaboration. +* `creatorMemberAbilities` - (Required -Forces new resource) - The list of member abilities for the creator of the collaboration. Valid v +lues [may be found here](https://docs.aws.amazon.com/clean-rooms/latest/apireference/API_CreateCollaboration.html#API-CreateCollaboration-re +uest-creatorMemberAbilities) +* `creatorDisplayName` - (Required - Forces new resource) - The name for the member record for the collaboration creator. +* `queryLogStatus` - (Required - Forces new resource) - Determines if members of the collaboration can enable query logs within their own +emberships. Valid values [may be found here](https://docs.aws.amazon.com/clean-rooms/latest/apireference/API_CreateCollaboration.html#API-Cr +ateCollaboration-request-queryLogStatus). +* `dataEncryptionMetadata` - (Required - Forces new resource) - a collection of settings which determine how the [c3r client](https://docs +aws.amazon.com/clean-rooms/latest/userguide/crypto-computing.html) will encrypt data for use within this collaboration +* `dataEncryptionMetadataAllowClearText` - (Required - Forces new resource) - Indicates whether encrypted tables can contain cleartext data. This is a boolea + field. +* `dataEncryptionMetadataAllowDuplicates` - (Required - Forces new resource ) - Indicates whether Fingerprint columns can contain duplicate entries. This is a +boolean field. +* `dataEncryptionMetadataAllowJoinsOnColumnsWithDifferentNames` - (Required - Forces new resource) - Indicates whether Fingerprint columns can be joined +n any other Fingerprint column with a different name. This is a boolean field. +* `dataEncryptionMetadataPreserveNulls` - (Required - Forces new resource) - Indicates whether NULL values are to be copied as NULL to encrypted tables (true) +or cryptographically processed (false). +* `member` - (Optional - Forces new resource) - Additional members of the collaboration which will be invited to join the collaboration. +* `memberAccountId` - (Required - Forces new resource) - The account id for the invited member +* `memberDisplayName` - (Required - Forces new resource) - The display name for the invited member +* `memberMemberAbilities` - (Required - Forces new resource) - The list of abilities for the invited member. Valid values [may be found here](https://docs.aws.amazon.com/clean-rooms/latest/apireference/API_CreateCollaboration.html#API-CreateCollaboration-request-creatorMemberAbiliti +s +* `tags` - (Optional) - Key value pairs which tag the collaboration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The arn of the collaboration +* `id` - The id of the collaboration +* `createTime` - The date and time the collaboration was created +* `member status` - For each member included in the collaboration an additional computed attribute of status is added. These values [may be +ound here](https://docs.aws.amazon.com/clean-rooms/latest/apireference/API_MemberSummary.html#API-Type-MemberSummary-status) +* `updatedTime` - The date and time he collaboration was last updated + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `1M`) +- `update` - (Default `1M`) +- `delete` - (Default `1M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloud9_environment_ec2.html.markdown b/website/docs/cdktf/typescript/r/cloud9_environment_ec2.html.markdown new file mode 100644 index 00000000000..d2a25c9eff1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloud9_environment_ec2.html.markdown @@ -0,0 +1,155 @@ +--- +subcategory: "Cloud9" +layout: "aws" +page_title: "AWS: aws_cloud9_environment_ec2" +description: |- + Provides a Cloud9 EC2 Development Environment. +--- + + + +# Resource: aws_cloud9_environment_ec2 + +Provides a Cloud9 EC2 Development Environment. + +## Example Usage + +Basic usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Cloud9EnvironmentEc2 } from "./.gen/providers/aws/cloud9-environment-ec2"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Cloud9EnvironmentEc2(this, "example", { + instanceType: "t2.micro", + name: "example-env", + }); + } +} + +``` + +Get the URL of the Cloud9 environment after creation: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Cloud9EnvironmentEc2 } from "./.gen/providers/aws/cloud9-environment-ec2"; +import { DataAwsInstance } from "./.gen/providers/aws/data-aws-instance"; +interface MyConfig { + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new Cloud9EnvironmentEc2(this, "example", { + instanceType: "t2.micro", + name: config.name, + }); + new DataAwsInstance(this, "cloud9_instance", { + filter: [ + { + name: "tag:aws:cloud9:environment", + values: [example.id], + }, + ], + }); + new TerraformOutput(this, "cloud9_url", { + value: + "https://${" + + region.value + + "}.console.aws.amazon.com/cloud9/ide/${" + + example.id + + "}", + }); + } +} + +``` + +Allocate a static IP to the Cloud9 environment: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Cloud9EnvironmentEc2 } from "./.gen/providers/aws/cloud9-environment-ec2"; +import { DataAwsInstance } from "./.gen/providers/aws/data-aws-instance"; +import { Eip } from "./.gen/providers/aws/eip"; +interface MyConfig { + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new Cloud9EnvironmentEc2(this, "example", { + instanceType: "t2.micro", + name: config.name, + }); + const cloud9Instance = new DataAwsInstance(this, "cloud9_instance", { + filter: [ + { + name: "tag:aws:cloud9:environment", + values: [example.id], + }, + ], + }); + const cloud9Eip = new Eip(this, "cloud9_eip", { + domain: "vpc", + instance: Token.asString(cloud9Instance.id), + }); + new TerraformOutput(this, "cloud9_public_ip", { + value: cloud9Eip.publicIp, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the environment. +* `instanceType` - (Required) The type of instance to connect to the environment, e.g., `t2Micro`. +* `automaticStopTimeMinutes` - (Optional) The number of minutes until the running instance is shut down after the environment has last been used. +* `connectionType` - (Optional) The connection type used for connecting to an Amazon EC2 environment. Valid values are `connectSsh` and `connectSsm`. For more information please refer [AWS documentation for Cloud9](https://docs.aws.amazon.com/cloud9/latest/user-guide/ec2-ssm.html). +* `description` - (Optional) The description of the environment. +* `imageId` - (Optional) The identifier for the Amazon Machine Image (AMI) that's used to create the EC2 instance. Valid values are + * `amazonlinux1X8664` + * `amazonlinux2X8664` + * `ubuntu1804X8664` + * `resolve:ssm:/aws/service/cloud9/amis/amazonlinux1X8664` + * `resolve:ssm:/aws/service/cloud9/amis/amazonlinux2X8664` + * `resolve:ssm:/aws/service/cloud9/amis/ubuntu1804X8664` +* `ownerArn` - (Optional) The ARN of the environment owner. This can be ARN of any AWS IAM principal. Defaults to the environment's creator. +* `subnetId` - (Optional) The ID of the subnet in Amazon VPC that AWS Cloud9 will use to communicate with the Amazon EC2 instance. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the environment. +* `arn` - The ARN of the environment. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `type` - The type of the environment (e.g., `ssh` or `ec2`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloud9_environment_membership.markdown b/website/docs/cdktf/typescript/r/cloud9_environment_membership.markdown new file mode 100644 index 00000000000..536ad83da2e --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloud9_environment_membership.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "Cloud9" +layout: "aws" +page_title: "AWS: aws_cloud9_environment_membership" +description: |- + Provides an environment member to an AWS Cloud9 development environment. +--- + + + +# Resource: aws_cloud9_environment_membership + +Provides an environment member to an AWS Cloud9 development environment. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Cloud9EnvironmentEc2 } from "./.gen/providers/aws/cloud9-environment-ec2"; +import { Cloud9EnvironmentMembership } from "./.gen/providers/aws/cloud9-environment-membership"; +import { IamUser } from "./.gen/providers/aws/iam-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new Cloud9EnvironmentEc2(this, "test", { + instanceType: "t2.micro", + name: "some-env", + }); + const awsIamUserTest = new IamUser(this, "test_1", { + name: "some-user", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamUserTest.overrideLogicalId("test"); + const awsCloud9EnvironmentMembershipTest = new Cloud9EnvironmentMembership( + this, + "test_2", + { + environmentId: test.id, + permissions: "read-only", + userArn: Token.asString(awsIamUserTest.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloud9EnvironmentMembershipTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `environmentId` - (Required) The ID of the environment that contains the environment member you want to add. +* `permissions` - (Required) The type of environment member permissions you want to associate with this environment member. Allowed values are `readOnly` and `readWrite` . +* `userArn` - (Required) The Amazon Resource Name (ARN) of the environment member you want to add. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the environment membership. +* `userId` - he user ID in AWS Identity and Access Management (AWS IAM) of the environment member. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloud9 environment membership using the `environmentId#userArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloud9 environment membership using the `environmentId#userArn`. For example: + +```console +% terraform import aws_cloud9_environment_membership.test environment-id#user-arn +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudcontrolapi_resource.html.markdown b/website/docs/cdktf/typescript/r/cloudcontrolapi_resource.html.markdown new file mode 100644 index 00000000000..e172cedb93f --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudcontrolapi_resource.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "Cloud Control API" +layout: "aws" +page_title: "AWS: aws_cloudcontrolapi_resource" +description: |- + Manages a Cloud Control API Resource. +--- + + + +# Resource: aws_cloudcontrolapi_resource + +Manages a Cloud Control API Resource. The configuration and lifecycle handling of these resources is proxied through Cloud Control API handlers to the backend service. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudcontrolapiResource } from "./.gen/providers/aws/cloudcontrolapi-resource"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudcontrolapiResource(this, "example", { + desiredState: Token.asString( + Fn.jsonencode({ + ClusterName: "example", + Tags: [ + { + Key: "CostCenter", + Value: "IT", + }, + ], + }) + ), + typeName: "AWS::ECS::Cluster", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `desiredState` - (Required) JSON string matching the CloudFormation resource type schema with desired configuration. Terraform configuration expressions can be converted into JSON using the [`jsonencode()` function](https://www.terraform.io/docs/language/functions/jsonencode.html). +* `typeName` - (Required) CloudFormation resource type name. For example, `aws::ec2::vpc`. + +The following arguments are optional: + +* `roleArn` - (Optional) Amazon Resource Name (ARN) of the IAM Role to assume for operations. +* `schema` - (Optional) JSON string of the CloudFormation resource type schema which is used for plan time validation where possible. Automatically fetched if not provided. In large scale environments with multiple resources using the same `typeName`, it is recommended to fetch the schema once via the [`awsCloudformationType` data source](/docs/providers/aws/d/cloudformation_type.html) and use this argument to reduce `describeType` API operation throttling. This value is marked sensitive only to prevent large plan differences from showing. +* `typeVersionId` - (Optional) Identifier of the CloudFormation resource type version. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `properties` - JSON string matching the CloudFormation resource type schema with current configuration. Underlying attributes can be referenced via the [`jsondecode()` function](https://www.terraform.io/docs/language/functions/jsondecode.html), for example, `jsondecode(dataAwsCloudcontrolapiResourceExampleProperties)["example"]`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudformation_stack.html.markdown b/website/docs/cdktf/typescript/r/cloudformation_stack.html.markdown new file mode 100644 index 00000000000..e5172214176 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudformation_stack.html.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_stack" +description: |- + Provides a CloudFormation Stack resource. +--- + + + +# Resource: aws_cloudformation_stack + +Provides a CloudFormation Stack resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudformationStack } from "./.gen/providers/aws/cloudformation-stack"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudformationStack(this, "network", { + name: "networking-stack", + parameters: { + VPCCidr: "10.0.0.0/16", + }, + templateBody: Token.asString( + Fn.jsonencode({ + Parameters: { + VPCCidr: { + Default: "10.0.0.0/16", + Description: + "Enter the CIDR block for the VPC. Default is 10.0.0.0/16.", + Type: "String", + }, + }, + Resources: { + myVpc: { + Properties: { + CidrBlock: { + Ref: "VPCCidr", + }, + Tags: [ + { + Key: "Name", + Value: "Primary_CF_VPC", + }, + ], + }, + Type: "AWS::EC2::VPC", + }, + }, + }) + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Stack name. +* `templateBody` - (Optional) Structure containing the template body (max size: 51,200 bytes). +* `templateUrl` - (Optional) Location of a file containing the template body (max size: 460,800 bytes). +* `capabilities` - (Optional) A list of capabilities. + Valid values: `capabilityIam`, `capabilityNamedIam`, or `capabilityAutoExpand` +* `disableRollback` - (Optional) Set to true to disable rollback of the stack if stack creation failed. + Conflicts with `onFailure`. +* `notificationArns` - (Optional) A list of SNS topic ARNs to publish stack related events. +* `onFailure` - (Optional) Action to be taken if stack creation fails. This must be + one of: `doNothing`, `rollback`, or `delete`. Conflicts with `disableRollback`. +* `parameters` - (Optional) A map of Parameter structures that specify input parameters for the stack. +* `policyBody` - (Optional) Structure containing the stack policy body. + Conflicts w/ `policyUrl`. +* `policyUrl` - (Optional) Location of a file containing the stack policy. + Conflicts w/ `policyBody`. +* `tags` - (Optional) Map of resource tags to associate with this stack. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `iamRoleArn` - (Optional) The ARN of an IAM role that AWS CloudFormation assumes to create the stack. If you don't specify a value, AWS CloudFormation uses the role that was previously associated with the stack. If no role is available, AWS CloudFormation uses a temporary session that is generated from your user credentials. +* `timeoutInMinutes` - (Optional) The amount of time that can pass before the stack status becomes `createFailed`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A unique identifier of the stack. +* `outputs` - A map of outputs from the stack. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `update` - (Default `30M`) +- `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudformation Stacks using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloudformation Stacks using the `name`. For example: + +```console +% terraform import aws_cloudformation_stack.stack networking-stack +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudformation_stack_set.html.markdown b/website/docs/cdktf/typescript/r/cloudformation_stack_set.html.markdown new file mode 100644 index 00000000000..626bb567486 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudformation_stack_set.html.markdown @@ -0,0 +1,207 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_stack_set" +description: |- + Manages a CloudFormation StackSet. +--- + + + +# Resource: aws_cloudformation_stack_set + +Manages a CloudFormation StackSet. StackSets allow CloudFormation templates to be easily deployed across multiple accounts and regions via StackSet Instances ([`awsCloudformationStackSetInstance` resource](/docs/providers/aws/r/cloudformation_stack_set_instance.html)). Additional information about StackSets can be found in the [AWS CloudFormation User Guide](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/what-is-cfnstacksets.html). + +~> **NOTE:** All template parameters, including those with a `default`, must be configured or ignored with the `lifecycle` configuration block `ignoreChanges` argument. + +~> **NOTE:** All `noEcho` template parameters must be ignored with the `lifecycle` configuration block `ignoreChanges` argument. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudformationStackSet } from "./.gen/providers/aws/cloudformation-stack-set"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const awsCloudFormationStackSetAdministrationRoleAssumeRolePolicy = + new DataAwsIamPolicyDocument( + this, + "AWSCloudFormationStackSetAdministrationRole_assume_role_policy", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["cloudformation.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const awsCloudFormationStackSetAdministrationRole = new IamRole( + this, + "AWSCloudFormationStackSetAdministrationRole", + { + assumeRolePolicy: Token.asString( + awsCloudFormationStackSetAdministrationRoleAssumeRolePolicy.json + ), + name: "AWSCloudFormationStackSetAdministrationRole", + } + ); + const example = new CloudformationStackSet(this, "example", { + administrationRoleArn: awsCloudFormationStackSetAdministrationRole.arn, + name: "example", + parameters: { + VPCCidr: "10.0.0.0/16", + }, + templateBody: Token.asString( + Fn.jsonencode({ + Parameters: { + VPCCidr: { + Default: "10.0.0.0/16", + Description: + "Enter the CIDR block for the VPC. Default is 10.0.0.0/16.", + Type: "String", + }, + }, + Resources: { + myVpc: { + Properties: { + CidrBlock: { + Ref: "VPCCidr", + }, + Tags: [ + { + Key: "Name", + Value: "Primary_CF_VPC", + }, + ], + }, + Type: "AWS::EC2::VPC", + }, + }, + }) + ), + }); + const awsCloudFormationStackSetAdministrationRoleExecutionPolicy = + new DataAwsIamPolicyDocument( + this, + "AWSCloudFormationStackSetAdministrationRole_ExecutionPolicy", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + resources: [ + "arn:aws:iam::*:role/${" + example.executionRoleName + "}", + ], + }, + ], + } + ); + const awsIamRolePolicyAwsCloudFormationStackSetAdministrationRoleExecutionPolicy = + new IamRolePolicy( + this, + "AWSCloudFormationStackSetAdministrationRole_ExecutionPolicy_4", + { + name: "ExecutionPolicy", + policy: Token.asString( + awsCloudFormationStackSetAdministrationRoleExecutionPolicy.json + ), + role: awsCloudFormationStackSetAdministrationRole.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAwsCloudFormationStackSetAdministrationRoleExecutionPolicy.overrideLogicalId( + "AWSCloudFormationStackSetAdministrationRole_ExecutionPolicy" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `administrationRoleArn` - (Optional) Amazon Resource Number (ARN) of the IAM Role in the administrator account. This must be defined when using the `selfManaged` permission model. +* `autoDeployment` - (Optional) Configuration block containing the auto-deployment model for your StackSet. This can only be defined when using the `serviceManaged` permission model. + * `enabled` - (Optional) Whether or not auto-deployment is enabled. + * `retainStacksOnAccountRemoval` - (Optional) Whether or not to retain stacks when the account is removed. +* `name` - (Required) Name of the StackSet. The name must be unique in the region where you create your StackSet. The name can contain only alphanumeric characters (case-sensitive) and hyphens. It must start with an alphabetic character and cannot be longer than 128 characters. +* `capabilities` - (Optional) A list of capabilities. Valid values: `capabilityIam`, `capabilityNamedIam`, `capabilityAutoExpand`. +* `operationPreferences` - (Optional) Preferences for how AWS CloudFormation performs a stack set update. +* `description` - (Optional) Description of the StackSet. +* `executionRoleName` - (Optional) Name of the IAM Role in all target accounts for StackSet operations. Defaults to `awsCloudFormationStackSetExecutionRole` when using the `selfManaged` permission model. This should not be defined when using the `serviceManaged` permission model. +* `managedExecution` - (Optional) Configuration block to allow StackSets to perform non-conflicting operations concurrently and queues conflicting operations. + * `active` - (Optional) When set to true, StackSets performs non-conflicting operations concurrently and queues conflicting operations. After conflicting operations finish, StackSets starts queued operations in request order. Default is false. +* `parameters` - (Optional) Key-value map of input parameters for the StackSet template. All template parameters, including those with a `default`, must be configured or ignored with `lifecycle` configuration block `ignoreChanges` argument. All `noEcho` template parameters must be ignored with the `lifecycle` configuration block `ignoreChanges` argument. +* `permissionModel` - (Optional) Describes how the IAM roles required for your StackSet are created. Valid values: `selfManaged` (default), `serviceManaged`. +* `callAs` - (Optional) Specifies whether you are acting as an account administrator in the organization's management account or as a delegated administrator in a member account. Valid values: `self` (default), `delegatedAdmin`. +* `tags` - (Optional) Key-value map of tags to associate with this StackSet and the Stacks created from it. AWS CloudFormation also propagates these tags to supported resources that are created in the Stacks. A maximum number of 50 tags can be specified. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `templateBody` - (Optional) String containing the CloudFormation template body. Maximum size: 51,200 bytes. Conflicts with `templateUrl`. +* `templateUrl` - (Optional) String containing the location of a file containing the CloudFormation template body. The URL must point to a template that is located in an Amazon S3 bucket. Maximum location file size: 460,800 bytes. Conflicts with `templateBody`. + +### `operationPreferences` Argument Reference + +The `operationPreferences` configuration block supports the following arguments: + +* `failureToleranceCount` - (Optional) The number of accounts, per Region, for which this operation can fail before AWS CloudFormation stops the operation in that Region. +* `failureTolerancePercentage` - (Optional) The percentage of accounts, per Region, for which this stack operation can fail before AWS CloudFormation stops the operation in that Region. +* `maxConcurrentCount` - (Optional) The maximum number of accounts in which to perform this operation at one time. +* `maxConcurrentPercentage` - (Optional) The maximum percentage of accounts in which to perform this operation at one time. +* `regionConcurrencyType` - (Optional) The concurrency type of deploying StackSets operations in Regions, could be in parallel or one Region at a time. +* `regionOrder` - (Optional) The order of the Regions in where you want to perform the stack operation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the StackSet. +* `id` - Name of the StackSet. +* `stackSetId` - Unique identifier of the StackSet. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `update` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFormation StackSets using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudFormation StackSets using the `name`. For example: + +```console +% terraform import aws_cloudformation_stack_set.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudformation_stack_set_instance.html.markdown b/website/docs/cdktf/typescript/r/cloudformation_stack_set_instance.html.markdown new file mode 100644 index 00000000000..8870b46d5a3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudformation_stack_set_instance.html.markdown @@ -0,0 +1,255 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_stack_set_instance" +description: |- + Manages a CloudFormation StackSet Instance. +--- + + + +# Resource: aws_cloudformation_stack_set_instance + +Manages a CloudFormation StackSet Instance. Instances are managed in the account and region of the StackSet after the target account permissions have been configured. Additional information about StackSets can be found in the [AWS CloudFormation User Guide](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/what-is-cfnstacksets.html). + +~> **NOTE:** All target accounts must have an IAM Role created that matches the name of the execution role configured in the StackSet (the `executionRoleName` argument in the `awsCloudformationStackSet` resource) in a trust relationship with the administrative account or administration IAM Role. The execution role must have appropriate permissions to manage resources defined in the template along with those required for StackSets to operate. See the [AWS CloudFormation User Guide](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-prereqs.html) for more details. + +~> **NOTE:** To retain the Stack during Terraform resource destroy, ensure `retain_stack = true` has been successfully applied into the Terraform state first. This must be completed _before_ an apply that would destroy the resource. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudformationStackSetInstance } from "./.gen/providers/aws/cloudformation-stack-set-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudformationStackSetInstance(this, "example", { + accountId: "123456789012", + region: "us-east-1", + stackSetName: Token.asString(awsCloudformationStackSetExample.name), + }); + } +} + +``` + +### Example IAM Setup in Target Account + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const awsCloudFormationStackSetExecutionRoleMinimumExecutionPolicy = + new DataAwsIamPolicyDocument( + this, + "AWSCloudFormationStackSetExecutionRole_MinimumExecutionPolicy", + { + statement: [ + { + actions: ["cloudformation:*", "s3:*", "sns:*"], + effect: "Allow", + resources: ["*"], + }, + ], + } + ); + const awsCloudFormationStackSetExecutionRoleAssumeRolePolicy = + new DataAwsIamPolicyDocument( + this, + "AWSCloudFormationStackSetExecutionRole_assume_role_policy", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: [ + awsCloudFormationStackSetAdministrationRole.arn, + ], + type: "AWS", + }, + ], + }, + ], + } + ); + const awsCloudFormationStackSetExecutionRole = new IamRole( + this, + "AWSCloudFormationStackSetExecutionRole", + { + assumeRolePolicy: Token.asString( + awsCloudFormationStackSetExecutionRoleAssumeRolePolicy.json + ), + name: "AWSCloudFormationStackSetExecutionRole", + } + ); + const awsIamRolePolicyAwsCloudFormationStackSetExecutionRoleMinimumExecutionPolicy = + new IamRolePolicy( + this, + "AWSCloudFormationStackSetExecutionRole_MinimumExecutionPolicy_3", + { + name: "MinimumExecutionPolicy", + policy: Token.asString( + awsCloudFormationStackSetExecutionRoleMinimumExecutionPolicy.json + ), + role: awsCloudFormationStackSetExecutionRole.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAwsCloudFormationStackSetExecutionRoleMinimumExecutionPolicy.overrideLogicalId( + "AWSCloudFormationStackSetExecutionRole_MinimumExecutionPolicy" + ); + } +} + +``` + +### Example Deployment across Organizations account + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudformationStackSetInstance } from "./.gen/providers/aws/cloudformation-stack-set-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudformationStackSetInstance(this, "example", { + deploymentTargets: { + organizationalUnitIds: [ + Token.asString( + propertyAccess(awsOrganizationsOrganizationExample.roots, [ + "0", + "id", + ]) + ), + ], + }, + region: "us-east-1", + stackSetName: Token.asString(awsCloudformationStackSetExample.name), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackSetName` - (Required) Name of the StackSet. +* `accountId` - (Optional) Target AWS Account ID to create a Stack based on the StackSet. Defaults to current account. +* `deploymentTargets` - (Optional) The AWS Organizations accounts to which StackSets deploys. StackSets doesn't deploy stack instances to the organization management account, even if the organization management account is in your organization or in an OU in your organization. Drift detection is not possible for this argument. See [deployment_targets](#deployment_targets-argument-reference) below. +* `parameterOverrides` - (Optional) Key-value map of input parameters to override from the StackSet for this Instance. +* `region` - (Optional) Target AWS Region to create a Stack based on the StackSet. Defaults to current region. +* `retainStack` - (Optional) During Terraform resource destroy, remove Instance from StackSet while keeping the Stack and its associated resources. Must be enabled in Terraform state _before_ destroy operation to take effect. You cannot reassociate a retained Stack or add an existing, saved Stack to a new StackSet. Defaults to `false`. +* `callAs` - (Optional) Specifies whether you are acting as an account administrator in the organization's management account or as a delegated administrator in a member account. Valid values: `self` (default), `delegatedAdmin`. +* `operationPreferences` - (Optional) Preferences for how AWS CloudFormation performs a stack set operation. + +### `deploymentTargets` Argument Reference + +The `deploymentTargets` configuration block supports the following arguments: + +* `organizationalUnitIds` - (Optional) The organization root ID or organizational unit (OU) IDs to which StackSets deploys. + +### `operationPreferences` Argument Reference + +The `operationPreferences` configuration block supports the following arguments: + +* `failureToleranceCount` - (Optional) The number of accounts, per Region, for which this operation can fail before AWS CloudFormation stops the operation in that Region. +* `failureTolerancePercentage` - (Optional) The percentage of accounts, per Region, for which this stack operation can fail before AWS CloudFormation stops the operation in that Region. +* `maxConcurrentCount` - (Optional) The maximum number of accounts in which to perform this operation at one time. +* `maxConcurrentPercentage` - (Optional) The maximum percentage of accounts in which to perform this operation at one time. +* `regionConcurrencyType` - (Optional) The concurrency type of deploying StackSets operations in Regions, could be in parallel or one Region at a time. Valid values are `sequential` and `parallel`. +* `regionOrder` - (Optional) The order of the Regions in where you want to perform the stack operation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier for the resource. If `deploymentTargets` is set, this is a comma-delimited string combining stack set name, organizational unit IDs (`/`-delimited), and region (ie. `mystack,ou123/ou456,usEast1`). Otherwise, this is a comma-delimited string combining stack set name, AWS account ID, and region (ie. `mystack,123456789012,usEast1`). +* `organizationalUnitId` - The organization root ID or organizational unit (OU) ID in which the stack is deployed. +* `stackId` - Stack identifier. +* `stackInstanceSummaries` - List of stack instances created from an organizational unit deployment target. This will only be populated when `deploymentTargets` is set. See [`stackInstanceSummaries`](#stack_instance_summaries-attribute-reference). + +### `stackInstanceSummaries` Attribute Reference + +* `accountId` - AWS account ID in which the stack is deployed. +* `organizationalUnitId` - Organizational unit ID in which the stack is deployed. +* `stackId` - Stack identifier. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFormation StackSet Instances that target an AWS Account ID using the StackSet name, target AWS account ID, and target AWS region separated by commas (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import CloudFormation StackSet Instances that target AWS Organizational Units using the StackSet name, a slash (`/`) separated list of organizational unit IDs, and target AWS region separated by commas (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** CloudFormation StackSet Instances that target an AWS Account ID using the StackSet name, target AWS account ID, and target AWS region separated by commas (`,`). For example: + +```console +% terraform import aws_cloudformation_stack_set_instance.example example,123456789012,us-east-1 +``` + +Import CloudFormation StackSet Instances that target AWS Organizational Units using the StackSet name, a slash (`/`) separated list of organizational unit IDs, and target AWS region separated by commas (`,`): + +```console +% terraform import aws_cloudformation_stack_set_instance.example example,ou-sdas-123123123/ou-sdas-789789789,us-east-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudformation_type.html.markdown b/website/docs/cdktf/typescript/r/cloudformation_type.html.markdown new file mode 100644 index 00000000000..acb86aa6d8c --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudformation_type.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "CloudFormation" +layout: "aws" +page_title: "AWS: aws_cloudformation_type" +description: |- + Manages a version of a CloudFormation Type. +--- + + + +# Resource: aws_cloudformation_type + +Manages a version of a CloudFormation Type. + +~> **NOTE:** The destroy operation of this resource marks the version as deprecated. If this was the only `live` version, the type is marked as deprecated. Enable the [resource `lifecycle` configuration block `createBeforeDestroy` argument](https://www.terraform.io/language/meta-arguments/lifecycle#create_before_destroy) in this resource configuration to properly order redeployments in Terraform. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudformationType } from "./.gen/providers/aws/cloudformation-type"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudformationType(this, "example", { + lifecycle: { + createBeforeDestroy: true, + }, + loggingConfig: { + logGroupName: Token.asString(awsCloudwatchLogGroupExample.name), + logRoleArn: Token.asString(awsIamRoleExample.arn), + }, + schemaHandlerPackage: + "s3://${" + + awsS3ObjectExample.bucket + + "}/${" + + awsS3ObjectExample.key + + "}", + type: "RESOURCE", + typeName: "ExampleCompany::ExampleService::ExampleResource", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `executionRoleArn` - (Optional) Amazon Resource Name (ARN) of the IAM Role for CloudFormation to assume when invoking the extension. If your extension calls AWS APIs in any of its handlers, you must create an IAM execution role that includes the necessary permissions to call those AWS APIs, and provision that execution role in your account. When CloudFormation needs to invoke the extension handler, CloudFormation assumes this execution role to create a temporary session token, which it then passes to the extension handler, thereby supplying your extension with the appropriate credentials. +* `loggingConfig` - (Optional) Configuration block containing logging configuration. +* `schemaHandlerPackage` - (Required) URL to the S3 bucket containing the extension project package that contains the necessary files for the extension you want to register. Must begin with `s3://` or `https://`. For example, `s3://exampleBucket/exampleObject`. +* `type` - (Optional) CloudFormation Registry Type. For example, `resource` or `module`. +* `typeName` - (Optional) CloudFormation Type name. For example, `exampleCompany::exampleService::exampleResource`. + +### logging_config + +The `loggingConfig` configuration block supports the following arguments: + +* `logGroupName` - (Required) Name of the CloudWatch Log Group where CloudFormation sends error logging information when invoking the type's handlers. +* `logRoleArn` - (Required) Amazon Resource Name (ARN) of the IAM Role CloudFormation assumes when sending error logging information to CloudWatch Logs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - (Optional) Amazon Resource Name (ARN) of the CloudFormation Type version. See also `typeArn`. +* `defaultVersionId` - Identifier of the CloudFormation Type default version. +* `deprecatedStatus` - Deprecation status of the version. +* `description` - Description of the version. +* `documentationUrl` - URL of the documentation for the CloudFormation Type. +* `isDefaultVersion` - Whether the CloudFormation Type version is the default version. +* `provisioningType` - Provisioning behavior of the CloudFormation Type. +* `schema` - JSON document of the CloudFormation Type schema. +* `sourceUrl` - URL of the source code for the CloudFormation Type. +* `typeArn` - (Optional) Amazon Resource Name (ARN) of the CloudFormation Type. See also `arn`. +* `versionId` - (Optional) Identifier of the CloudFormation Type version. +* `visibility` - Scope of the CloudFormation Type. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsCloudformationType` using the type version Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsCloudformationType` using the type version Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_cloudformation_type.example arn:aws:cloudformation:us-east-1:123456789012:type/resource/ExampleCompany-ExampleService-ExampleType/1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_cache_policy.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_cache_policy.html.markdown new file mode 100644 index 00000000000..19ddb02ddc8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_cache_policy.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_cache_policy" +description: |- + Use the `awsCloudfrontCachePolicy` resource to manage cache policies for CloudFront distributions. This resource allows you to attach cache policies to cache behaviors, which determine the values included in the cache key, such as HTTP headers, cookies, and URL query strings. CloudFront uses the cache key to locate cached objects and return them to viewers. Additionally, the cache policy sets the default, minimum, and maximum time to live (TTL) values for objects in the CloudFront cache. +--- + + + +# Resource: aws_cloudfront_cache_policy + +## Example Usage + +Use the `awsCloudfrontCachePolicy` resource to create a cache policy for CloudFront. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontCachePolicy } from "./.gen/providers/aws/cloudfront-cache-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudfrontCachePolicy(this, "example", { + comment: "test comment", + defaultTtl: 50, + maxTtl: 100, + minTtl: 1, + name: "example-policy", + parametersInCacheKeyAndForwardedToOrigin: { + cookiesConfig: { + cookieBehavior: "whitelist", + cookies: { + items: ["example"], + }, + }, + headersConfig: { + headerBehavior: "whitelist", + headers: { + items: ["example"], + }, + }, + queryStringsConfig: { + queryStringBehavior: "whitelist", + queryStrings: { + items: ["example"], + }, + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Unique name used to identify the cache policy. +* `minTtl` - (Required) Minimum amount of time, in seconds, that objects should remain in the CloudFront cache before a new request is sent to the origin to check for updates. +* `maxTtl` - (Optional) Maximum amount of time, in seconds, that objects stay in the CloudFront cache before CloudFront sends another request to the origin to see if the object has been updated. +* `defaultTtl` - (Optional) Amount of time, in seconds, that objects are allowed to remain in the CloudFront cache before CloudFront sends a new request to the origin server to check if the object has been updated. +* `comment` - (Optional) Description for the cache policy. +* `parametersInCacheKeyAndForwardedToOrigin` - (Required) Configuration for including HTTP headers, cookies, and URL query strings in the cache key. For more information, refer to the [Parameters In Cache Key And Forwarded To Origin](#parameters-in-cache-key-and-forwarded-to-origin) section. + +### Parameters In Cache Key And Forwarded To Origin + +* `cookiesConfig` - (Required) Whether any cookies in viewer requests are included in the cache key and automatically included in requests that CloudFront sends to the origin. See [Cookies Config](#cookies-config) for more information. +* `headersConfig` - (Required) Whether any HTTP headers are included in the cache key and automatically included in requests that CloudFront sends to the origin. See [Headers Config](#headers-config) for more information. +* `queryStringsConfig` - (Required) Whether any URL query strings in viewer requests are included in the cache key. It also automatically includes these query strings in requests that CloudFront sends to the origin. Please refer to the [Query String Config](#query-string-config) for more information. +* `enableAcceptEncodingBrotli` - (Optional) Flag determines whether the Accept-Encoding HTTP header is included in the cache key and in requests that CloudFront sends to the origin. +* `enableAcceptEncodingGzip` - (Optional) Whether the Accept-Encoding HTTP header is included in the cache key and in requests sent to the origin by CloudFront. + +### Cookies Config + +* `cookieBehavior` - (Required) Whether any cookies in viewer requests are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values for `cookieBehavior` are `none`, `whitelist`, `allExcept`, and `all`. +* `cookies` - (Optional) Object that contains a list of cookie names. See [Items](#items) for more information. + +### Headers Config + +* `headerBehavior` - (Required) Whether any HTTP headers are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values for `headerBehavior` are `none` and `whitelist`. +* `headers` - (Optional) Object contains a list of header names. See [Items](#items) for more information. + +### Query String Config + +* `queryStringBehavior` - (Required) Whether URL query strings in viewer requests are included in the cache key and automatically included in requests that CloudFront sends to the origin. Valid values for `queryStringBehavior` are `none`, `whitelist`, `allExcept`, and `all`. +* `queryStrings` - (Optional) Configuration parameter that contains a list of query string names. See [Items](#items) for more information. + +### Items + +* `items` - (Required) List of item names, such as cookies, headers, or query strings. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - Current version of the cache policy. +* `id` - Identifier for the cache policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront cache policies using the `id` of the cache policy. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudFront cache policies using the `id` of the cache policy. For example: + +```console +% terraform import aws_cloudfront_cache_policy.policy 658327ea-f89d-4fab-a63d-7e88639e58f6 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_distribution.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_distribution.html.markdown new file mode 100644 index 00000000000..7c74932c7cd --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_distribution.html.markdown @@ -0,0 +1,597 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_distribution" +description: |- + Provides a CloudFront web distribution resource. +--- + + + +# Resource: aws_cloudfront_distribution + +Creates an Amazon CloudFront web distribution. + +For information about CloudFront distributions, see the [Amazon CloudFront Developer Guide][1]. For specific information about creating CloudFront web distributions, see the [POST Distribution][2] page in the Amazon CloudFront API Reference. + +~> **NOTE:** CloudFront distributions take about 15 minutes to reach a deployed state after creation or modification. During this time, deletes to resources will be blocked. If you need to delete a distribution that is enabled and you do not want to wait, you need to use the `retainOnDelete` flag. + +## Example Usage + +The following example below creates a CloudFront distribution with an S3 origin. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontDistribution } from "./.gen/providers/aws/cloudfront-distribution"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const s3OriginId = "myS3Origin"; + const b = new S3Bucket(this, "b", { + bucket: "mybucket", + tags: { + Name: "My bucket", + }, + }); + new S3BucketAcl(this, "b_acl", { + acl: "private", + bucket: b.id, + }); + new CloudfrontDistribution(this, "s3_distribution", { + aliases: ["mysite.example.com", "yoursite.example.com"], + comment: "Some comment", + defaultCacheBehavior: { + allowedMethods: [ + "DELETE", + "GET", + "HEAD", + "OPTIONS", + "PATCH", + "POST", + "PUT", + ], + cachedMethods: ["GET", "HEAD"], + defaultTtl: 3600, + forwardedValues: { + cookies: { + forward: "none", + }, + queryString: false, + }, + maxTtl: 86400, + minTtl: 0, + targetOriginId: s3OriginId, + viewerProtocolPolicy: "allow-all", + }, + defaultRootObject: "index.html", + enabled: true, + isIpv6Enabled: true, + loggingConfig: { + bucket: "mylogs.s3.amazonaws.com", + includeCookies: false, + prefix: "myprefix", + }, + orderedCacheBehavior: [ + { + allowedMethods: ["GET", "HEAD", "OPTIONS"], + cachedMethods: ["GET", "HEAD", "OPTIONS"], + compress: true, + defaultTtl: 86400, + forwardedValues: { + cookies: { + forward: "none", + }, + headers: ["Origin"], + queryString: false, + }, + maxTtl: 31536000, + minTtl: 0, + pathPattern: "/content/immutable/*", + targetOriginId: s3OriginId, + viewerProtocolPolicy: "redirect-to-https", + }, + { + allowedMethods: ["GET", "HEAD", "OPTIONS"], + cachedMethods: ["GET", "HEAD"], + compress: true, + defaultTtl: 3600, + forwardedValues: { + cookies: { + forward: "none", + }, + queryString: false, + }, + maxTtl: 86400, + minTtl: 0, + pathPattern: "/content/*", + targetOriginId: s3OriginId, + viewerProtocolPolicy: "redirect-to-https", + }, + ], + origin: [ + { + domainName: b.bucketRegionalDomainName, + originAccessControlId: defaultVar.id, + originId: s3OriginId, + }, + ], + priceClass: "PriceClass_200", + restrictions: { + geoRestriction: { + locations: ["US", "CA", "GB", "DE"], + restrictionType: "whitelist", + }, + }, + tags: { + Environment: "production", + }, + viewerCertificate: { + cloudfrontDefaultCertificate: true, + }, + }); + } +} + +``` + +The example below creates a CloudFront distribution with an origin group for failover routing: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontDistribution } from "./.gen/providers/aws/cloudfront-distribution"; +interface MyConfig { + allowedMethods: any; + cachedMethods: any; + viewerProtocolPolicy: any; + enabled: any; + restrictions: any; + viewerCertificate: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new CloudfrontDistribution(this, "s3_distribution", { + defaultCacheBehavior: { + targetOriginId: "groupS3", + allowedMethods: config.allowedMethods, + cachedMethods: config.cachedMethods, + viewerProtocolPolicy: config.viewerProtocolPolicy, + }, + origin: [ + { + domainName: primary.bucketRegionalDomainName, + originId: "primaryS3", + s3OriginConfig: { + originAccessIdentity: defaultVar.cloudfrontAccessIdentityPath, + }, + }, + { + domainName: failover.bucketRegionalDomainName, + originId: "failoverS3", + s3OriginConfig: { + originAccessIdentity: defaultVar.cloudfrontAccessIdentityPath, + }, + }, + ], + originGroup: [ + { + failoverCriteria: { + statusCodes: [403, 404, 500, 502], + }, + member: [ + { + originId: "primaryS3", + }, + { + originId: "failoverS3", + }, + ], + originId: "groupS3", + }, + ], + enabled: config.enabled, + restrictions: config.restrictions, + viewerCertificate: config.viewerCertificate, + }); + } +} + +``` + +CloudFront distribution using [managed policies](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/using-managed-cache-policies.html) (ex: CachingDisabled): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontDistribution } from "./.gen/providers/aws/cloudfront-distribution"; +interface MyConfig { + cachedMethods: any; + viewerProtocolPolicy: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const s3OriginId = "myS3Origin"; + new CloudfrontDistribution(this, "s3_distribution", { + comment: "Some comment", + defaultCacheBehavior: { + allowedMethods: ["GET", "HEAD", "OPTIONS"], + cachePolicyId: "4135ea2d-6df8-44a3-9df3-4b5a84be39ad", + path_pattern: "/content/*", + targetOriginId: s3OriginId, + cachedMethods: config.cachedMethods, + viewerProtocolPolicy: config.viewerProtocolPolicy, + }, + defaultRootObject: "index.html", + enabled: true, + isIpv6Enabled: true, + origin: [ + { + domainName: primary.bucketRegionalDomainName, + originId: "myS3Origin", + s3OriginConfig: { + originAccessIdentity: defaultVar.cloudfrontAccessIdentityPath, + }, + }, + ], + restrictions: { + geoRestriction: { + locations: ["US", "CA", "GB", "DE"], + restrictionType: "whitelist", + }, + }, + viewerCertificate: { + cloudfrontDefaultCertificate: true, + }, + }); + } +} + +``` + +## Argument Reference + +The CloudFront distribution argument layout is a complex structure composed of several sub-resources - these resources are laid out below. + +### Top-Level Arguments + +* `aliases` (Optional) - Extra CNAMEs (alternate domain names), if any, for this distribution. +* `comment` (Optional) - Any comments you want to include about the distribution. +* `customErrorResponse` (Optional) - One or more [custom error response](#custom-error-response-arguments) elements (multiples allowed). +* `defaultCacheBehavior` (Required) - [Default cache behavior](#default-cache-behavior-arguments) for this distribution (maximum one). Requires either `cachePolicyId` (preferred) or `forwardedValues` (deprecated) be set. +* `defaultRootObject` (Optional) - Object that you want CloudFront to return (for example, index.html) when an end user requests the root URL. +* `enabled` (Required) - Whether the distribution is enabled to accept end user requests for content. +* `isIpv6Enabled` (Optional) - Whether the IPv6 is enabled for the distribution. +* `httpVersion` (Optional) - Maximum HTTP version to support on the distribution. Allowed values are `http11`, `http2`, `http2And3` and `http3`. The default is `http2`. +* `loggingConfig` (Optional) - The [logging configuration](#logging-config-arguments) that controls how logs are written to your distribution (maximum one). +* `orderedCacheBehavior` (Optional) - Ordered list of [cache behaviors](#cache-behavior-arguments) resource for this distribution. List from top to bottom in order of precedence. The topmost cache behavior will have precedence 0. +* `origin` (Required) - One or more [origins](#origin-arguments) for this distribution (multiples allowed). +* `originGroup` (Optional) - One or more [origin_group](#origin-group-arguments) for this distribution (multiples allowed). +* `priceClass` (Optional) - Price class for this distribution. One of `priceClassAll`, `priceClass200`, `priceClass100`. +* `restrictions` (Required) - The [restriction configuration](#restrictions-arguments) for this distribution (maximum one). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `viewerCertificate` (Required) - The [SSL configuration](#viewer-certificate-arguments) for this distribution (maximum one). +* `webAclId` (Optional) - Unique identifier that specifies the AWS WAF web ACL, if any, to associate with this distribution. To specify a web ACL created using the latest version of AWS WAF (WAFv2), use the ACL ARN, for example `awsWafv2WebAclExampleArn`. To specify a web ACL created using AWS WAF Classic, use the ACL ID, for example `awsWafWebAclExampleId`. The WAF Web ACL must exist in the WAF Global (CloudFront) region and the credentials configuring this argument must have `waf:getWebAcl` permissions assigned. +* `retainOnDelete` (Optional) - Disables the distribution instead of deleting it when destroying the resource through Terraform. If this is set, the distribution needs to be deleted manually afterwards. Default: `false`. +* `waitForDeployment` (Optional) - If enabled, the resource will wait for the distribution status to change from `inProgress` to `deployed`. Setting this to`false` will skip the process. Default: `true`. + +#### Cache Behavior Arguments + +~> **NOTE:** To achieve the setting of 'Use origin cache headers' without a linked cache policy, use the following TTL values: `minTtl` = 0, `maxTtl` = 31536000, `defaultTtl` = 86400. See [this issue](https://github.com/hashicorp/terraform-provider-aws/issues/19382) for additional context. + +* `allowedMethods` (Required) - Controls which HTTP methods CloudFront processes and forwards to your Amazon S3 bucket or your custom origin. +* `cachedMethods` (Required) - Controls whether CloudFront caches the response to requests using the specified HTTP methods. +* `cachePolicyId` (Optional) - Unique identifier of the cache policy that is attached to the cache behavior. If configuring the `defaultCacheBehavior` either `cachePolicyId` or `forwardedValues` must be set. +* `compress` (Optional) - Whether you want CloudFront to automatically compress content for web requests that include `Accept-Encoding: gzip` in the request header (default: `false`). +* `defaultTtl` (Optional) - Default amount of time (in seconds) that an object is in a CloudFront cache before CloudFront forwards another request in the absence of an `Cache-Control max-age` or `expires` header. +* `fieldLevelEncryptionId` (Optional) - Field level encryption configuration ID. +* `forwardedValues` (Optional, **Deprecated** use `cachePolicyId` or `origin_request_policy_id ` instead) - The [forwarded values configuration](#forwarded-values-arguments) that specifies how CloudFront handles query strings, cookies and headers (maximum one). +* `lambdaFunctionAssociation` (Optional) - A [config block](#lambda-function-association) that triggers a lambda function with specific actions (maximum 4). +* `functionAssociation` (Optional) - A [config block](#function-association) that triggers a cloudfront function with specific actions (maximum 2). +* `maxTtl` (Optional) - Maximum amount of time (in seconds) that an object is in a CloudFront cache before CloudFront forwards another request to your origin to determine whether the object has been updated. Only effective in the presence of `Cache-Control max-age`, `Cache-Control s-maxage`, and `expires` headers. +* `minTtl` (Optional) - Minimum amount of time that you want objects to stay in CloudFront caches before CloudFront queries your origin to see whether the object has been updated. Defaults to 0 seconds. +* `originRequestPolicyId` (Optional) - Unique identifier of the origin request policy that is attached to the behavior. +* `pathPattern` (Required) - Pattern (for example, `images/*Jpg`) that specifies which requests you want this cache behavior to apply to. +* `realtimeLogConfigArn` (Optional) - ARN of the [real-time log configuration](cloudfront_realtime_log_config.html) that is attached to this cache behavior. +* `responseHeadersPolicyId` (Optional) - Identifier for a response headers policy. +* `smoothStreaming` (Optional) - Indicates whether you want to distribute media files in Microsoft Smooth Streaming format using the origin that is associated with this cache behavior. +* `targetOriginId` (Required) - Value of ID for the origin that you want CloudFront to route requests to when a request matches the path pattern either for a cache behavior or for the default cache behavior. +* `trustedKeyGroups` (Optional) - List of key group IDs that CloudFront can use to validate signed URLs or signed cookies. See the [CloudFront User Guide](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-trusted-signers.html) for more information about this feature. +* `trustedSigners` (Optional) - List of AWS account IDs (or `self`) that you want to allow to create signed URLs for private content. See the [CloudFront User Guide](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-trusted-signers.html) for more information about this feature. +* `viewerProtocolPolicy` (Required) - Use this element to specify the protocol that users can use to access the files in the origin specified by TargetOriginId when a request matches the path pattern in PathPattern. One of `allowAll`, `httpsOnly`, or `redirectToHttps`. + +##### Forwarded Values Arguments + +* `cookies` (Required) - The [forwarded values cookies](#cookies-arguments) that specifies how CloudFront handles cookies (maximum one). +* `headers` (Optional) - Headers, if any, that you want CloudFront to vary upon for this cache behavior. Specify `*` to include all headers. +* `queryString` (Required) - Indicates whether you want CloudFront to forward query strings to the origin that is associated with this cache behavior. +* `queryStringCacheKeys` (Optional) - When specified, along with a value of `true` for `queryString`, all query strings are forwarded, however only the query string keys listed in this argument are cached. When omitted with a value of `true` for `queryString`, all query string keys are cached. + +##### Lambda Function Association + +Lambda@Edge allows you to associate an AWS Lambda Function with a predefined +event. You can associate a single function per event type. See [What is +Lambda@Edge](http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/what-is-lambda-at-edge.html) +for more information. + +Example configuration: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontDistribution } from "./.gen/providers/aws/cloudfront-distribution"; +interface MyConfig { + allowedMethods: any; + cachedMethods: any; + pathPattern: any; + targetOriginId: any; + viewerProtocolPolicy: any; + defaultCacheBehavior: any; + enabled: any; + origin: any; + restrictions: any; + viewerCertificate: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new CloudfrontDistribution(this, "example", { + orderedCacheBehavior: [ + { + lambdaFunctionAssociation: [ + { + eventType: "viewer-request", + includeBody: false, + lambdaArn: Token.asString(awsLambdaFunctionExample.qualifiedArn), + }, + ], + allowedMethods: config.allowedMethods, + cachedMethods: config.cachedMethods, + pathPattern: config.pathPattern, + targetOriginId: config.targetOriginId, + viewerProtocolPolicy: config.viewerProtocolPolicy, + }, + ], + defaultCacheBehavior: config.defaultCacheBehavior, + enabled: config.enabled, + origin: config.origin, + restrictions: config.restrictions, + viewerCertificate: config.viewerCertificate, + }); + } +} + +``` + +* `eventType` (Required) - Specific event to trigger this function. Valid values: `viewerRequest`, `originRequest`, `viewerResponse`, `originResponse`. +* `lambdaArn` (Required) - ARN of the Lambda function. +* `includeBody` (Optional) - When set to true it exposes the request body to the lambda function. Defaults to false. Valid values: `true`, `false`. + +##### Function Association + +With CloudFront Functions in Amazon CloudFront, you can write lightweight functions in JavaScript for high-scale, latency-sensitive CDN customizations. You can associate a single function per event type. See [CloudFront Functions](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/cloudfront-functions.html) +for more information. + +Example configuration: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontDistribution } from "./.gen/providers/aws/cloudfront-distribution"; +interface MyConfig { + allowedMethods: any; + cachedMethods: any; + pathPattern: any; + targetOriginId: any; + viewerProtocolPolicy: any; + defaultCacheBehavior: any; + enabled: any; + origin: any; + restrictions: any; + viewerCertificate: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new CloudfrontDistribution(this, "example", { + orderedCacheBehavior: [ + { + functionAssociation: [ + { + eventType: "viewer-request", + functionArn: Token.asString(awsCloudfrontFunctionExample.arn), + }, + ], + allowedMethods: config.allowedMethods, + cachedMethods: config.cachedMethods, + pathPattern: config.pathPattern, + targetOriginId: config.targetOriginId, + viewerProtocolPolicy: config.viewerProtocolPolicy, + }, + ], + defaultCacheBehavior: config.defaultCacheBehavior, + enabled: config.enabled, + origin: config.origin, + restrictions: config.restrictions, + viewerCertificate: config.viewerCertificate, + }); + } +} + +``` + +* `eventType` (Required) - Specific event to trigger this function. Valid values: `viewerRequest` or `viewerResponse`. +* `functionArn` (Required) - ARN of the CloudFront function. + +##### Cookies Arguments + +* `forward` (Required) - Whether you want CloudFront to forward cookies to the origin that is associated with this cache behavior. You can specify `all`, `none` or `whitelist`. If `whitelist`, you must include the subsequent `whitelistedNames`. +* `whitelistedNames` (Optional) - If you have specified `whitelist` to `forward`, the whitelisted cookies that you want CloudFront to forward to your origin. + +#### Custom Error Response Arguments + +* `errorCachingMinTtl` (Optional) - Minimum amount of time you want HTTP error codes to stay in CloudFront caches before CloudFront queries your origin to see whether the object has been updated. +* `errorCode` (Required) - 4xx or 5xx HTTP status code that you want to customize. +* `responseCode` (Optional) - HTTP status code that you want CloudFront to return with the custom error page to the viewer. +* `responsePagePath` (Optional) - Path of the custom error page (for example, `/custom404Html`). + +#### Default Cache Behavior Arguments + +The arguments for `defaultCacheBehavior` are the same as for +[`orderedCacheBehavior`](#cache-behavior-arguments), except for the `pathPattern` +argument should not be specified. + +#### Logging Config Arguments + +* `bucket` (Required) - Amazon S3 bucket to store the access logs in, for example, `myawslogbucketS3AmazonawsCom`. +* `includeCookies` (Optional) - Whether to include cookies in access logs (default: `false`). +* `prefix` (Optional) - Prefix to the access log filenames for this distribution, for example, `myprefix/`. + +#### Origin Arguments + +* `connectionAttempts` (Optional) - Number of times that CloudFront attempts to connect to the origin. Must be between 1-3. Defaults to 3. +* `connectionTimeout` (Optional) - Number of seconds that CloudFront waits when trying to establish a connection to the origin. Must be between 1-10. Defaults to 10. +* `customOriginConfig` - The [CloudFront custom origin](#custom-origin-config-arguments) configuration information. If an S3 origin is required, use `originAccessControlId` or `s3OriginConfig` instead. +* `domainName` (Required) - DNS domain name of either the S3 bucket, or web site of your custom origin. +* `customHeader` (Optional) - One or more sub-resources with `name` and `value` parameters that specify header data that will be sent to the origin (multiples allowed). +* `originAccessControlId` (Optional) - Unique identifier of a [CloudFront origin access control][8] for this origin. +* `originId` (Required) - Unique identifier for the origin. +* `originPath` (Optional) - Optional element that causes CloudFront to request your content from a directory in your Amazon S3 bucket or your custom origin. +* `originShield` - The [CloudFront Origin Shield](#origin-shield-arguments) configuration information. Using Origin Shield can help reduce the load on your origin. For more information, see [Using Origin Shield](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/origin-shield.html) in the Amazon CloudFront Developer Guide. +* `s3OriginConfig` - The [CloudFront S3 origin](#s3-origin-config-arguments) configuration information. If a custom origin is required, use `customOriginConfig` instead. + +##### Custom Origin Config Arguments + +* `httpPort` (Required) - HTTP port the custom origin listens on. +* `httpsPort` (Required) - HTTPS port the custom origin listens on. +* `originProtocolPolicy` (Required) - Origin protocol policy to apply to your origin. One of `httpOnly`, `httpsOnly`, or `matchViewer`. +* `originSslProtocols` (Required) - SSL/TLS protocols that you want CloudFront to use when communicating with your origin over HTTPS. A list of one or more of `ssLv3`, `tlSv1`, `tlSv11`, and `tlSv12`. +* `originKeepaliveTimeout` - (Optional) The Custom KeepAlive timeout, in seconds. By default, AWS enforces an upper limit of `60`. But you can request an [increase](http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/RequestAndResponseBehaviorCustomOrigin.html#request-custom-request-timeout). Defaults to `5`. +* `originReadTimeout` - (Optional) The Custom Read timeout, in seconds. By default, AWS enforces an upper limit of `60`. But you can request an [increase](http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/RequestAndResponseBehaviorCustomOrigin.html#request-custom-request-timeout). Defaults to `30`. + +##### Origin Shield Arguments + +* `enabled` (Required) - Whether Origin Shield is enabled. +* `originShieldRegion` (Required) - AWS Region for Origin Shield. To specify a region, use the region code, not the region name. For example, specify the US East (Ohio) region as us-east-2. + +##### S3 Origin Config Arguments + +* `originAccessIdentity` (Required) - The [CloudFront origin access identity][5] to associate with the origin. + +#### Origin Group Arguments + +* `originId` (Required) - Unique identifier for the origin group. +* `failoverCriteria` (Required) - The [failover criteria](#failover-criteria-arguments) for when to failover to the secondary origin. +* `member` (Required) - Ordered [member](#member-arguments) configuration blocks assigned to the origin group, where the first member is the primary origin. You must specify two members. + +##### Failover Criteria Arguments + +* `statusCodes` (Required) - List of HTTP status codes for the origin group. + +##### Member Arguments + +* `originId` (Required) - Unique identifier of the member origin. + +#### Restrictions Arguments + +The `restrictions` sub-resource takes another single sub-resource named `geoRestriction` (see the example for usage). + +The arguments of `geoRestriction` are: + +* `locations` (Required) - [ISO 3166-1-alpha-2 codes][4] for which you want CloudFront either to distribute your content (`whitelist`) or not distribute your content (`blacklist`). If the type is specified as `none` an empty array can be used. +* `restrictionType` (Required) - Method that you want to use to restrict distribution of your content by country: `none`, `whitelist`, or `blacklist`. + +#### Viewer Certificate Arguments + +* `acmCertificateArn` - ARN of the [AWS Certificate Manager][6] certificate that you wish to use with this distribution. Specify this, `cloudfrontDefaultCertificate`, or `iamCertificateId`. The ACM certificate must be in US-EAST-1. +* `cloudfrontDefaultCertificate` - `true` if you want viewers to use HTTPS to request your objects and you're using the CloudFront domain name for your distribution. Specify this, `acmCertificateArn`, or `iamCertificateId`. +* `iamCertificateId` - IAM certificate identifier of the custom viewer certificate for this distribution if you are using a custom domain. Specify this, `acmCertificateArn`, or `cloudfrontDefaultCertificate`. +* `minimumProtocolVersion` - Minimum version of the SSL protocol that you want CloudFront to use for HTTPS connections. Can only be set if `cloudfront_default_certificate = false`. See all possible values in [this](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/secure-connections-supported-viewer-protocols-ciphers.html) table under "Security policy." Some examples include: `tlSv122019` and `tlSv122021`. Default: `tlSv1`. **NOTE**: If you are using a custom certificate (specified with `acmCertificateArn` or `iamCertificateId`), and have specified `sniOnly` in `sslSupportMethod`, `tlSv1` or later must be specified. If you have specified `vip` in `sslSupportMethod`, only `ssLv3` or `tlSv1` can be specified. If you have specified `cloudfrontDefaultCertificate`, `tlSv1` must be specified. +* `sslSupportMethod` - How you want CloudFront to serve HTTPS requests. One of `vip` or `sniOnly`. Required if you specify `acmCertificateArn` or `iamCertificateId`. **NOTE:** `vip` causes CloudFront to use a dedicated IP address and may incur extra charges. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier for the distribution. For example: `edfdvbd632Bhds5`. +* `arn` - ARN for the distribution. For example: `arn:aws:cloudfront::123456789012:distribution/edfdvbd632Bhds5`, where `123456789012` is your AWS account ID. +* `callerReference` - Internal value used by CloudFront to allow future updates to the distribution configuration. +* `status` - Current status of the distribution. `deployed` if the distribution's information is fully propagated throughout the Amazon CloudFront system. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `trustedKeyGroups` - List of nested attributes for active trusted key groups, if the distribution is set up to serve private content with signed URLs. + * `enabled` - `true` if any of the key groups have public keys that CloudFront can use to verify the signatures of signed URLs and signed cookies. + * `items` - List of nested attributes for each key group. + * `keyGroupId` - ID of the key group that contains the public keys. + * `keyPairIds` - Set of CloudFront key pair IDs. +* `trustedSigners` - List of nested attributes for active trusted signers, if the distribution is set up to serve private content with signed URLs. + * `enabled` - `true` if any of the AWS accounts listed as trusted signers have active CloudFront key pairs + * `items` - List of nested attributes for each trusted signer + * `awsAccountNumber` - AWS account ID or `self` + * `keyPairIds` - Set of active CloudFront key pairs associated with the signer account +* `domainName` - Domain name corresponding to the distribution. For example: `d604721Fxaaqy9CloudfrontNet`. +* `lastModifiedTime` - Date and time the distribution was last modified. +* `inProgressValidationBatches` - Number of invalidation batches currently in progress. +* `etag` - Current version of the distribution's information. For example: `e2Qwruhapomqzl`. +* `hostedZoneId` - CloudFront Route 53 zone ID that can be used to route an [Alias Resource Record Set][7] to. This attribute is simply an alias for the zone ID `z2Fdtndataqyw2`. + +[1]: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/Introduction.html +[2]: https://docs.aws.amazon.com/cloudfront/latest/APIReference/API_CreateDistribution.html +[3]: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-restricting-access-to-s3.html +[4]: http://www.iso.org/iso/country_codes/iso_3166_code_lists/country_names_and_code_elements.htm +[5]: /docs/providers/aws/r/cloudfront_origin_access_identity.html +[6]: https://aws.amazon.com/certificate-manager/ +[7]: http://docs.aws.amazon.com/Route53/latest/APIReference/CreateAliasRRSAPI.html +[8]: /docs/providers/aws/r/cloudfront_origin_access_control.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront Distributions using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudFront Distributions using the `id`. For example: + +```console +% terraform import aws_cloudfront_distribution.distribution E74FTE3EXAMPLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_field_level_encryption_config.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_field_level_encryption_config.html.markdown new file mode 100644 index 00000000000..ac0f8b050d1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_field_level_encryption_config.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_field_level_encryption_config" +description: |- + Provides a CloudFront Field-level Encryption Config resource. +--- + + + +# Resource: aws_cloudfront_field_level_encryption_config + +Provides a CloudFront Field-level Encryption Config resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontFieldLevelEncryptionConfig } from "./.gen/providers/aws/cloudfront-field-level-encryption-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudfrontFieldLevelEncryptionConfig(this, "test", { + comment: "test comment", + contentTypeProfileConfig: { + contentTypeProfiles: { + items: [ + { + contentType: "application/x-www-form-urlencoded", + format: "URLEncoded", + }, + ], + }, + forwardWhenContentTypeIsUnknown: true, + }, + queryArgProfileConfig: { + forwardWhenQueryArgProfileIsUnknown: true, + queryArgProfiles: { + items: [ + { + profileId: Token.asString( + awsCloudfrontFieldLevelEncryptionProfileTest.id + ), + queryArg: "Arg1", + }, + ], + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `comment` - (Optional) An optional comment about the Field Level Encryption Config. +* `contentTypeProfileConfig` - (Required) [Content Type Profile Config](#content-type-profile-config) specifies when to forward content if a content type isn't recognized and profiles to use as by default in a request if a query argument doesn't specify a profile to use. +* `queryArgProfileConfig` - (Required) [Query Arg Profile Config](#query-arg-profile-config) that specifies when to forward content if a profile isn't found and the profile that can be provided as a query argument in a request. + +### Content Type Profile Config + +* `forwardWhenContentTypeIsUnknown` - (Required) specifies what to do when an unknown content type is provided for the profile. If true, content is forwarded without being encrypted when the content type is unknown. If false (the default), an error is returned when the content type is unknown. +* `contentTypeProfiles` - (Required) Object that contains an attribute `items` that contains the list of configurations for a field-level encryption content type-profile. See [Content Type Profile](#content-type-profile). + +### Content Type Profile + +* `contentType` - (Required) he content type for a field-level encryption content type-profile mapping. Valid value is `application/xWwwFormUrlencoded`. +* `format` - (Required) The format for a field-level encryption content type-profile mapping. Valid value is `urlEncoded`. +* `profileId` - (Optional) The profile ID for a field-level encryption content type-profile mapping. + +### Query Arg Profile Config + +* `forwardWhenQueryArgProfileIsUnknown` - (Required) Flag to set if you want a request to be forwarded to the origin even if the profile specified by the field-level encryption query argument, fle-profile, is unknown. +* `queryArgProfiles` - (Optional) Object that contains an attribute `items` that contains the list ofrofiles specified for query argument-profile mapping for field-level encryption. see [Query Arg Profile](#query-arg-profile). + +### Query Arg Profile + +* `profileId` - (Required) ID of profile to use for field-level encryption query argument-profile mapping +* `queryArg` - (Required) Query argument for field-level encryption query argument-profile mapping. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `callerReference` - Internal value used by CloudFront to allow future updates to the Field Level Encryption Config. +* `etag` - The current version of the Field Level Encryption Config. For example: `e2Qwruhapomqzl`. +* `id` - The identifier for the Field Level Encryption Config. For example: `k3D5Eweudccxon`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudfront Field Level Encryption Config using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloudfront Field Level Encryption Config using the `id`. For example: + +```console +% terraform import aws_cloudfront_field_level_encryption_config.config E74FTE3AEXAMPLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_field_level_encryption_profile.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_field_level_encryption_profile.html.markdown new file mode 100644 index 00000000000..cab4b0f91d9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_field_level_encryption_profile.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_field_level_encryption_profile" +description: |- + Provides a CloudFront Field-level Encryption Profile resource. +--- + + + +# Resource: aws_cloudfront_field_level_encryption_profile + +Provides a CloudFront Field-level Encryption Profile resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontFieldLevelEncryptionProfile } from "./.gen/providers/aws/cloudfront-field-level-encryption-profile"; +import { CloudfrontPublicKey } from "./.gen/providers/aws/cloudfront-public-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudfrontPublicKey(this, "example", { + comment: "test public key", + encodedKey: Token.asString(Fn.file("public_key.pem")), + name: "test_key", + }); + new CloudfrontFieldLevelEncryptionProfile(this, "test", { + comment: "test comment", + encryptionEntities: { + items: [ + { + fieldPatterns: { + items: ["DateOfBirth"], + }, + providerId: "test provider", + publicKeyId: example.id, + }, + ], + }, + name: "test profile", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Field Level Encryption Profile. +* `comment` - (Optional) An optional comment about the Field Level Encryption Profile. +* `encryptionEntities` - (Required) The [encryption entities](#encryption-entities) config block for field-level encryption profiles that contains an attribute `items` which includes the encryption key and field pattern specifications. + +### Encryption Entities + +* `publicKeyId` - (Required) The public key associated with a set of field-level encryption patterns, to be used when encrypting the fields that match the patterns. +* `providerId` - (Required) The provider associated with the public key being used for encryption. +* `fieldPatterns` - (Required) Object that contains an attribute `items` that contains the list of field patterns in a field-level encryption content type profile specify the fields that you want to be encrypted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `callerReference` - Internal value used by CloudFront to allow future updates to the Field Level Encryption Profile. +* `etag` - The current version of the Field Level Encryption Profile. For example: `e2Qwruhapomqzl`. +* `id` - The identifier for the Field Level Encryption Profile. For example: `k3D5Eweudccxon`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudfront Field Level Encryption Profile using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloudfront Field Level Encryption Profile using the `id`. For example: + +```console +% terraform import aws_cloudfront_field_level_encryption_profile.profile K3D5EWEUDCCXON +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_function.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_function.html.markdown new file mode 100644 index 00000000000..5d0a8b0393b --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_function.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_function" +description: |- + Provides a CloudFront Function resource. With CloudFront Functions in Amazon CloudFront, you can write lightweight functions in JavaScript for high-scale, latency-sensitive CDN customizations. +--- + + + +# Resource: aws_cloudfront_function + +Provides a CloudFront Function resource. With CloudFront Functions in Amazon CloudFront, you can write lightweight functions in JavaScript for high-scale, latency-sensitive CDN customizations. + +See [CloudFront Functions](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/cloudfront-functions.html) + +~> **NOTE:** You cannot delete a function if it’s associated with a cache behavior. First, update your distributions to remove the function association from all cache behaviors, then delete the function. + +## Example Usage + +### Basic Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontFunction } from "./.gen/providers/aws/cloudfront-function"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudfrontFunction(this, "test", { + code: Token.asString(Fn.file("${path.module}/function.js")), + comment: "my function", + name: "test", + publish: true, + runtime: "cloudfront-js-1.0", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Unique name for your CloudFront Function. +* `code` - (Required) Source code of the function +* `runtime` - (Required) Identifier of the function's runtime. Currently only `cloudfrontJs10` is valid. + +The following arguments are optional: + +* `comment` - (Optional) Comment. +* `publish` - (Optional) Whether to publish creation/change as Live CloudFront Function Version. Defaults to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) identifying your CloudFront Function. +* `etag` - ETag hash of the function. This is the value for the `development` stage of the function. +* `liveStageEtag` - ETag hash of any `live` stage of the function. +* `status` - Status of the function. Can be `unpublished`, `unassociated` or `associated`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront Functions using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudFront Functions using the `name`. For example: + +```console +% terraform import aws_cloudfront_function.test my_test_function +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_key_group.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_key_group.html.markdown new file mode 100644 index 00000000000..207e040083e --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_key_group.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_key_group" +description: |- + Provides a CloudFront key group. +--- + + + +# Resource: aws_cloudfront_key_group + +## Example Usage + +The following example below creates a CloudFront key group. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontKeyGroup } from "./.gen/providers/aws/cloudfront-key-group"; +import { CloudfrontPublicKey } from "./.gen/providers/aws/cloudfront-public-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudfrontPublicKey(this, "example", { + comment: "example public key", + encodedKey: Token.asString(Fn.file("public_key.pem")), + name: "example-key", + }); + const awsCloudfrontKeyGroupExample = new CloudfrontKeyGroup( + this, + "example_1", + { + comment: "example key group", + items: [example.id], + name: "example-key-group", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudfrontKeyGroupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `comment` - (Optional) A comment to describe the key group.. +* `items` - (Required) A list of the identifiers of the public keys in the key group. +* `name` - (Required) A name to identify the key group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - The identifier for this version of the key group. +* `id` - The identifier for the key group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront Key Group using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudFront Key Group using the `id`. For example: + +```console +% terraform import aws_cloudfront_key_group.example 4b4f2r1c-315d-5c2e-f093-216t50jed10f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_monitoring_subscription.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_monitoring_subscription.html.markdown new file mode 100644 index 00000000000..9f32d227c57 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_monitoring_subscription.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_monitoring_subscription" +description: |- + Provides a CloudFront monitoring subscription resource. +--- + + + +# Resource: aws_cloudfront_monitoring_subscription + +Provides a CloudFront real-time log configuration resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontMonitoringSubscription } from "./.gen/providers/aws/cloudfront-monitoring-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudfrontMonitoringSubscription(this, "example", { + distributionId: Token.asString(awsCloudfrontDistributionExample.id), + monitoringSubscription: { + realtimeMetricsSubscriptionConfig: { + realtimeMetricsSubscriptionStatus: "Enabled", + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `distributionId` - (Required) The ID of the distribution that you are enabling metrics for. +* `monitoringSubscription` - (Required) A monitoring subscription. This structure contains information about whether additional CloudWatch metrics are enabled for a given CloudFront distribution. + +### monitoring_subscription + +* `realtimeMetricsSubscriptionConfig` - (Required) A subscription configuration for additional CloudWatch metrics. See below. + +### realtime_metrics_subscription_config + +* `realtimeMetricsSubscriptionStatus` - (Required) A flag that indicates whether additional CloudWatch metrics are enabled for a given CloudFront distribution. Valid values are `enabled` and `disabled`. See below. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the CloudFront monitoring subscription, which corresponds to the `distributionId`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront monitoring subscription using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudFront monitoring subscription using the id. For example: + +```console +% terraform import aws_cloudfront_monitoring_subscription.example E3QYSUHO4VYRGB +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_origin_access_control.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_origin_access_control.html.markdown new file mode 100644 index 00000000000..fcaf99cc8dd --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_origin_access_control.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_access_control" +description: |- + Terraform resource for managing an AWS CloudFront Origin Access Control. +--- + + + +# Resource: aws_cloudfront_origin_access_control + +Manages an AWS CloudFront Origin Access Control, which is used by CloudFront Distributions with an Amazon S3 bucket as the origin. + +Read more about Origin Access Control in the [CloudFront Developer Guide](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-restricting-access-to-s3.html). + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontOriginAccessControl } from "./.gen/providers/aws/cloudfront-origin-access-control"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudfrontOriginAccessControl(this, "example", { + description: "Example Policy", + name: "example", + originAccessControlOriginType: "s3", + signingBehavior: "always", + signingProtocol: "sigv4", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) A name that identifies the Origin Access Control. +* `description` - (Optional) The description of the Origin Access Control. Defaults to "Managed by Terraform" if omitted. +* `originAccessControlOriginType` - (Required) The type of origin that this Origin Access Control is for. Valid values are `s3`, and `mediastore`. +* `signingBehavior` - (Required) Specifies which requests CloudFront signs. Specify `always` for the most common use case. Allowed values: `always`, `never`, and `noOverride`. +* `signingProtocol` - (Required) Determines how CloudFront signs (authenticates) requests. The only valid value is `sigv4`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier of this Origin Access Control. +* `etag` - The current version of this Origin Access Control. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront Origin Access Control using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudFront Origin Access Control using the `id`. For example: + +```console +% terraform import aws_cloudfront_origin_access_control.example E327GJI25M56DG +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_origin_access_identity.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_origin_access_identity.html.markdown new file mode 100644 index 00000000000..8dbd6f8db1e --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_origin_access_identity.html.markdown @@ -0,0 +1,186 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_access_identity" +description: |- + Provides a CloudFront origin access identity. +--- + + + +# Resource: aws_cloudfront_origin_access_identity + +Creates an Amazon CloudFront origin access identity. + +For information about CloudFront distributions, see the +[Amazon CloudFront Developer Guide][1]. For more information on generating +origin access identities, see +[Using an Origin Access Identity to Restrict Access to Your Amazon S3 Content][2]. + +## Example Usage + +The following example below creates a CloudFront origin access identity. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontOriginAccessIdentity } from "./.gen/providers/aws/cloudfront-origin-access-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudfrontOriginAccessIdentity(this, "example", { + comment: "Some comment", + }); + } +} + +``` + +## Argument Reference + +* `comment` (Optional) - An optional comment for the origin access identity. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier for the distribution. For example: `edfdvbd632Bhds5`. +* `callerReference` - Internal value used by CloudFront to allow future + updates to the origin access identity. +* `cloudfrontAccessIdentityPath` - A shortcut to the full path for the + origin access identity to use in CloudFront, see below. +* `etag` - The current version of the origin access identity's information. + For example: `e2Qwruhapomqzl`. +* `iamArn` - A pre-generated ARN for use in S3 bucket policies (see below). + Example: `arn:aws:iam::cloudfront:user/CloudFront Origin Access Identity + E2QWRUHAPOMQZL`. +* `s3CanonicalUserId` - The Amazon S3 canonical user ID for the origin + access identity, which you use when giving the origin access identity read + permission to an object in Amazon S3. + +## Using With CloudFront + +Normally, when referencing an origin access identity in CloudFront, you need to +prefix the ID with the `originAccessIdentity/cloudfront/` special path. +The `cloudfrontAccessIdentityPath` allows this to be circumvented. +The below snippet demonstrates use with the `s3OriginConfig` structure for the +[`awsCloudfrontDistribution`][3] resource: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontDistribution } from "./.gen/providers/aws/cloudfront-distribution"; +interface MyConfig { + domainName: any; + originId: any; + defaultCacheBehavior: any; + enabled: any; + restrictions: any; + viewerCertificate: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new CloudfrontDistribution(this, "example", { + origin: [ + { + s3OriginConfig: { + originAccessIdentity: Token.asString( + awsCloudfrontOriginAccessIdentityExample.cloudfrontAccessIdentityPath + ), + }, + domainName: config.domainName, + originId: config.originId, + }, + ], + defaultCacheBehavior: config.defaultCacheBehavior, + enabled: config.enabled, + restrictions: config.restrictions, + viewerCertificate: config.viewerCertificate, + }); + } +} + +``` + +### Updating your bucket policy + +Note that the AWS API may translate the `s3CanonicalUserId` `canonicalUser` +principal into an `aws` IAM ARN principal when supplied in an +[`awsS3Bucket`][4] bucket policy, causing spurious diffs in Terraform. If +you see this behaviour, use the `iamArn` instead: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const s3Policy = new DataAwsIamPolicyDocument(this, "s3_policy", { + statement: [ + { + actions: ["s3:GetObject"], + principals: [ + { + identifiers: [example.iamArn], + type: "AWS", + }, + ], + resources: ["${" + awsS3BucketExample.arn + "}/*"], + }, + ], + }); + new S3BucketPolicy(this, "example", { + bucket: Token.asString(awsS3BucketExample.id), + policy: Token.asString(s3Policy.json), + }); + } +} + +``` + +[1]: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/Introduction.html +[2]: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-restricting-access-to-s3.html +[3]: /docs/providers/aws/r/cloudfront_distribution.html +[4]: /docs/providers/aws/r/s3_bucket.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudfront Origin Access Identities using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloudfront Origin Access Identities using the `id`. For example: + +```console +% terraform import aws_cloudfront_origin_access_identity.origin_access E74FTE3AEXAMPLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_origin_request_policy.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_origin_request_policy.html.markdown new file mode 100644 index 00000000000..45367e49658 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_origin_request_policy.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_origin_request_policy" +description: |- + Determines the values that CloudFront includes in requests that it sends to the origin. +--- + + + +# Resource: aws_cloudfront_origin_request_policy + +## Example Usage + +The following example below creates a CloudFront origin request policy. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontOriginRequestPolicy } from "./.gen/providers/aws/cloudfront-origin-request-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudfrontOriginRequestPolicy(this, "example", { + comment: "example comment", + cookiesConfig: { + cookieBehavior: "whitelist", + cookies: { + items: ["example"], + }, + }, + headersConfig: { + headerBehavior: "whitelist", + headers: { + items: ["example"], + }, + }, + name: "example-policy", + queryStringsConfig: { + queryStringBehavior: "whitelist", + queryStrings: { + items: ["example"], + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Unique name to identify the origin request policy. +* `comment` - (Optional) Comment to describe the origin request policy. +* `cookiesConfig` - (Required) Object that determines whether any cookies in viewer requests (and if so, which cookies) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Cookies Config](#cookies-config) for more information. +* `headersConfig` - (Required) Object that determines whether any HTTP headers (and if so, which headers) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Headers Config](#headers-config) for more information. +* `queryStringsConfig` - (Required) Object that determines whether any URL query strings in viewer requests (and if so, which query strings) are included in the origin request key and automatically included in requests that CloudFront sends to the origin. See [Query String Config](#query-string-config) for more information. + +### Cookies Config + +`cookieBehavior` - (Required) Determines whether any cookies in viewer requests are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `all`, `allExcept`. +`cookies` - (Optional) Object that contains a list of cookie names. See [Items](#items) for more information. + +### Headers Config + +`headerBehavior` - (Required) Determines whether any HTTP headers are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `allViewer`, `allViewerAndWhitelistCloudFront`, `allExcept`. +`headers` - (Optional) Object that contains a list of header names. See [Items](#items) for more information. + +### Query String Config + +`queryStringBehavior` - (Required) Determines whether any URL query strings in viewer requests are included in the origin request key and automatically included in requests that CloudFront sends to the origin. Valid values are `none`, `whitelist`, `all`, `allExcept`. +`queryStrings` - (Optional) Object that contains a list of query string names. See [Items](#items) for more information. + +### Items + +`items` - (Required) List of item names (cookies, headers, or query strings). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - The current version of the origin request policy. +* `id` - The identifier for the origin request policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudfront Origin Request Policies using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloudfront Origin Request Policies using the `id`. For example: + +```console +% terraform import aws_cloudfront_origin_request_policy.policy ccca32ef-dce3-4df3-80df-1bd3000bc4d3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_public_key.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_public_key.html.markdown new file mode 100644 index 00000000000..03c83cab59c --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_public_key.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_public_key" +description: |- + Provides a CloudFront Public Key which you add to CloudFront to use with features like field-level encryption. +--- + + + +# Resource: aws_cloudfront_public_key + +## Example Usage + +The following example below creates a CloudFront public key. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontPublicKey } from "./.gen/providers/aws/cloudfront-public-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudfrontPublicKey(this, "example", { + comment: "test public key", + encodedKey: Token.asString(Fn.file("public_key.pem")), + name: "test_key", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `comment` - (Optional) An optional comment about the public key. +* `encodedKey` - (Required) The encoded public key that you want to add to CloudFront to use with features like field-level encryption. +* `name` - (Optional) The name for the public key. By default generated by Terraform. +* `namePrefix` - (Optional) The name for the public key. Conflicts with `name`. + +**NOTE:** When setting `encodedKey` value, there needs a newline at the end of string. Otherwise, multiple runs of terraform will want to recreate the `awsCloudfrontPublicKey` resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `callerReference` - Internal value used by CloudFront to allow future updates to the public key configuration. +* `etag` - The current version of the public key. For example: `e2Qwruhapomqzl`. +* `id` - The identifier for the public key. For example: `k3D5Eweudccxon`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront Public Key using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudFront Public Key using the `id`. For example: + +```console +% terraform import aws_cloudfront_public_key.example K3D5EWEUDCCXON +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_realtime_log_config.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_realtime_log_config.html.markdown new file mode 100644 index 00000000000..7a3b0a33752 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_realtime_log_config.html.markdown @@ -0,0 +1,143 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_realtime_log_config" +description: |- + Provides a CloudFront real-time log configuration resource. +--- + + + +# Resource: aws_cloudfront_realtime_log_config + +Provides a CloudFront real-time log configuration resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontRealtimeLogConfig } from "./.gen/providers/aws/cloudfront-realtime-log-config"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["cloudfront.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const example = new DataAwsIamPolicyDocument(this, "example", { + statement: [ + { + actions: [ + "kinesis:DescribeStreamSummary", + "kinesis:DescribeStream", + "kinesis:PutRecord", + "kinesis:PutRecords", + ], + effect: "Allow", + resources: [Token.asString(awsKinesisStreamExample.arn)], + }, + ], + }); + const awsIamRoleExample = new IamRole(this, "example_2", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "cloudfront-realtime-log-config-example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsIamRolePolicyExample = new IamRolePolicy(this, "example_3", { + name: "cloudfront-realtime-log-config-example", + policy: Token.asString(example.json), + role: Token.asString(awsIamRoleExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyExample.overrideLogicalId("example"); + const awsCloudfrontRealtimeLogConfigExample = + new CloudfrontRealtimeLogConfig(this, "example_4", { + dependsOn: [awsIamRolePolicyExample], + endpoint: { + kinesisStreamConfig: { + roleArn: Token.asString(awsIamRoleExample.arn), + streamArn: Token.asString(awsKinesisStreamExample.arn), + }, + streamType: "Kinesis", + }, + fields: ["timestamp", "c-ip"], + name: "example", + samplingRate: 75, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudfrontRealtimeLogConfigExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `endpoint` - (Required) The Amazon Kinesis data streams where real-time log data is sent. +* `fields` - (Required) The fields that are included in each real-time log record. See the [AWS documentation](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/real-time-logs.html#understand-real-time-log-config-fields) for supported values. +* `name` - (Required) The unique name to identify this real-time log configuration. +* `samplingRate` - (Required) The sampling rate for this real-time log configuration. The sampling rate determines the percentage of viewer requests that are represented in the real-time log data. An integer between `1` and `100`, inclusive. + +The `endpoint` object supports the following: + +* `kinesisStreamConfig` - (Required) The Amazon Kinesis data stream configuration. +* `streamType` - (Required) The type of data stream where real-time log data is sent. The only valid value is `kinesis`. + +The `kinesisStreamConfig` object supports the following: + +* `roleArn` - (Required) The ARN of an [IAM role](iam_role.html) that CloudFront can use to send real-time log data to the Kinesis data stream. +See the [AWS documentation](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/real-time-logs.html#understand-real-time-log-config-iam-role) for more information. +* `streamArn` - (Required) The ARN of the [Kinesis data stream](kinesis_stream.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the CloudFront real-time log configuration. +* `arn` - The ARN (Amazon Resource Name) of the CloudFront real-time log configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudFront real-time log configurations using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudFront real-time log configurations using the ARN. For example: + +```console +% terraform import aws_cloudfront_realtime_log_config.example arn:aws:cloudfront::111122223333:realtime-log-config/ExampleNameForRealtimeLogConfig +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudfront_response_headers_policy.html.markdown b/website/docs/cdktf/typescript/r/cloudfront_response_headers_policy.html.markdown new file mode 100644 index 00000000000..b0108f7e358 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudfront_response_headers_policy.html.markdown @@ -0,0 +1,235 @@ +--- +subcategory: "CloudFront" +layout: "aws" +page_title: "AWS: aws_cloudfront_response_headers_policy" +description: |- + Provides a CloudFront response headers policy resource. +--- + + + +# Resource: aws_cloudfront_response_headers_policy + +Provides a CloudFront response headers policy resource. +A response headers policy contains information about a set of HTTP response headers and their values. +After you create a response headers policy, you can use its ID to attach it to one or more cache behaviors in a CloudFront distribution. +When it’s attached to a cache behavior, CloudFront adds the headers in the policy to every response that it sends for requests that match the cache behavior. + +## Example Usage + +The example below creates a CloudFront response headers policy. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontResponseHeadersPolicy } from "./.gen/providers/aws/cloudfront-response-headers-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudfrontResponseHeadersPolicy(this, "example", { + comment: "test comment", + corsConfig: { + accessControlAllowCredentials: true, + accessControlAllowHeaders: { + items: ["test"], + }, + accessControlAllowMethods: { + items: ["GET"], + }, + accessControlAllowOrigins: { + items: ["test.example.comtest"], + }, + originOverride: true, + }, + name: "example-policy", + }); + } +} + +``` + +The example below creates a CloudFront response headers policy with a custom headers config. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontResponseHeadersPolicy } from "./.gen/providers/aws/cloudfront-response-headers-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudfrontResponseHeadersPolicy(this, "example", { + customHeadersConfig: { + items: [ + { + header: "X-Permitted-Cross-Domain-Policies", + override: true, + value: "none", + }, + { + header: "X-Test", + override: true, + value: "none", + }, + ], + }, + name: "example-headers-policy", + }); + } +} + +``` + +The example below creates a CloudFront response headers policy with a custom headers config and server timing headers config. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudfrontResponseHeadersPolicy } from "./.gen/providers/aws/cloudfront-response-headers-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudfrontResponseHeadersPolicy(this, "example", { + customHeadersConfig: { + items: [ + { + header: "X-Permitted-Cross-Domain-Policies", + override: true, + value: "none", + }, + ], + }, + name: "example-headers-policy", + serverTimingHeadersConfig: { + enabled: true, + samplingRate: 50, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A unique name to identify the response headers policy. +* `comment` - (Optional) A comment to describe the response headers policy. The comment cannot be longer than 128 characters. +* `corsConfig` - (Optional) A configuration for a set of HTTP response headers that are used for Cross-Origin Resource Sharing (CORS). See [Cors Config](#cors-config) for more information. +* `customHeadersConfig` - (Optional) Object that contains an attribute `items` that contains a list of custom headers. See [Custom Header](#custom-header) for more information. +* `removeHeadersConfig` - (Optional) A configuration for a set of HTTP headers to remove from the HTTP response. Object that contains an attribute `items` that contains a list of headers. See [Remove Header](#remove-header) for more information. +* `securityHeadersConfig` - (Optional) A configuration for a set of security-related HTTP response headers. See [Security Headers Config](#security-headers-config) for more information. +* `serverTimingHeadersConfig` - (Optional) A configuration for enabling the Server-Timing header in HTTP responses sent from CloudFront. See [Server Timing Headers Config](#server-timing-headers-config) for more information. + +### Cors Config + +* `accessControlAllowCredentials` - (Required) A Boolean value that CloudFront uses as the value for the `accessControlAllowCredentials` HTTP response header. +* `accessControlAllowHeaders` - (Required) Object that contains an attribute `items` that contains a list of HTTP header names that CloudFront includes as values for the `accessControlAllowHeaders` HTTP response header. +* `accessControlAllowMethods` - (Required) Object that contains an attribute `items` that contains a list of HTTP methods that CloudFront includes as values for the `accessControlAllowMethods` HTTP response header. Valid values: `get` | `post` | `options` | `put` | `delete` | `head` | `all` +* `accessControlAllowOrigins` - (Required) Object that contains an attribute `items` that contains a list of origins that CloudFront can use as the value for the `accessControlAllowOrigin` HTTP response header. +* `accessControlExposeHeaders` - (Optional) Object that contains an attribute `items` that contains a list of HTTP headers that CloudFront includes as values for the `accessControlExposeHeaders` HTTP response header. +* `accessControlMaxAgeSec` - (Optional) A number that CloudFront uses as the value for the `accessControlMaxAge` HTTP response header. +* `originOverride` - (Required) A Boolean value that determines how CloudFront behaves for the HTTP response header. + +### Custom Header + +* `header` - (Required) The HTTP response header name. +* `override` - (Required) Whether CloudFront overrides a response header with the same name received from the origin with the header specifies here. +* `value` - (Required) The value for the HTTP response header. + +### Remove Header + +* `header` - (Required) The HTTP header name. + +### Security Headers Config + +* `contentSecurityPolicy` - (Optional) The policy directives and their values that CloudFront includes as values for the `contentSecurityPolicy` HTTP response header. See [Content Security Policy](#content-security-policy) for more information. +* `contentTypeOptions` - (Optional) Determines whether CloudFront includes the `xContentTypeOptions` HTTP response header with its value set to `nosniff`. See [Content Type Options](#content-type-options) for more information. +* `frameOptions` - (Optional) Determines whether CloudFront includes the `xFrameOptions` HTTP response header and the header’s value. See [Frame Options](#frame-options) for more information. +* `referrerPolicy` - (Optional) Determines whether CloudFront includes the `referrerPolicy` HTTP response header and the header’s value. See [Referrer Policy](#referrer-policy) for more information. +* `strictTransportSecurity` - (Optional) Determines whether CloudFront includes the `strictTransportSecurity` HTTP response header and the header’s value. See [Strict Transport Security](#strict-transport-security) for more information. +* `xssProtection` - (Optional) Determine whether CloudFront includes the `xXssProtection` HTTP response header and the header’s value. See [XSS Protection](#xss-protection) for more information. + +### Content Security Policy + +* `contentSecurityPolicy` - (Required) The policy directives and their values that CloudFront includes as values for the `contentSecurityPolicy` HTTP response header. +* `override` - (Required) Whether CloudFront overrides the `contentSecurityPolicy` HTTP response header received from the origin with the one specified in this response headers policy. + +### Content Type Options + +* `override` - (Required) Whether CloudFront overrides the `xContentTypeOptions` HTTP response header received from the origin with the one specified in this response headers policy. + +### Frame Options + +* `frameOption` - (Required) The value of the `xFrameOptions` HTTP response header. Valid values: `deny` | `sameorigin` +* `override` - (Required) Whether CloudFront overrides the `xFrameOptions` HTTP response header received from the origin with the one specified in this response headers policy. + +### Referrer Policy + +* `referrerPolicy` - (Required) The value of the `referrerPolicy` HTTP response header. Valid Values: `noReferrer` | `noReferrerWhenDowngrade` | `origin` | `originWhenCrossOrigin` | `sameOrigin` | `strictOrigin` | `strictOriginWhenCrossOrigin` | `unsafeUrl` +* `override` - (Required) Whether CloudFront overrides the `referrerPolicy` HTTP response header received from the origin with the one specified in this response headers policy. + +### Strict Transport Security + +* `accessControlMaxAgeSec` - (Required) A number that CloudFront uses as the value for the `maxAge` directive in the `strictTransportSecurity` HTTP response header. +* `includeSubdomains` - (Optional) Whether CloudFront includes the `includeSubDomains` directive in the `strictTransportSecurity` HTTP response header. +* `override` - (Required) Whether CloudFront overrides the `strictTransportSecurity` HTTP response header received from the origin with the one specified in this response headers policy. +* `preload` - (Optional) Whether CloudFront includes the `preload` directive in the `strictTransportSecurity` HTTP response header. + +### XSS Protection + +* `modeBlock` - (Optional) Whether CloudFront includes the `mode=block` directive in the `xXssProtection` header. +* `override` - (Required) Whether CloudFront overrides the `xXssProtection` HTTP response header received from the origin with the one specified in this response headers policy. +* `protection` - (Required) A Boolean value that determines the value of the `xXssProtection` HTTP response header. When this setting is `true`, the value of the `xXssProtection` header is `1`. When this setting is `false`, the value of the `xXssProtection` header is `0`. +* `reportUri` - (Optional) A reporting URI, which CloudFront uses as the value of the report directive in the `xXssProtection` header. You cannot specify a `reportUri` when `modeBlock` is `true`. + +### Server Timing Headers Config + +* `enabled` - (Required) A Whether CloudFront adds the `serverTiming` header to HTTP responses that it sends in response to requests that match a cache behavior that's associated with this response headers policy. +* `samplingRate` - (Required) A number 0–100 (inclusive) that specifies the percentage of responses that you want CloudFront to add the Server-Timing header to. Valid range: Minimum value of 0.0. Maximum value of 100.0. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - The current version of the response headers policy. +* `id` - The identifier for the response headers policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudfront Response Headers Policies using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloudfront Response Headers Policies using the `id`. For example: + +```console +% terraform import aws_cloudfront_response_headers_policy.policy 658327ea-f89d-4fab-a63d-7e88639e58f9 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudhsm_v2_cluster.html.markdown b/website/docs/cdktf/typescript/r/cloudhsm_v2_cluster.html.markdown new file mode 100644 index 00000000000..d43c6dbf434 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudhsm_v2_cluster.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "CloudHSM" +layout: "aws" +page_title: "AWS: aws_cloudhsm_v2_cluster" +description: |- + Provides a CloudHSM v2 resource. +--- + + + +# Resource: aws_cloudhsm_v2_cluster + +Creates an Amazon CloudHSM v2 cluster. + +For information about CloudHSM v2, see the +[AWS CloudHSM User Guide][1] and the [Amazon +CloudHSM API Reference][2]. + +~> **NOTE:** A CloudHSM Cluster can take several minutes to set up. +Practically no single attribute can be updated, except for `tags`. +If you need to delete a cluster, you have to remove its HSM modules first. +To initialize cluster, you have to add an HSM instance to the cluster, then sign CSR and upload it. + +## Example Usage + +The following example below creates a CloudHSM cluster. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Token, + TerraformCount, + Fn, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudhsmV2Cluster } from "./.gen/providers/aws/cloudhsm-v2-cluster"; +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + region: awsRegion.stringValue, + }); + const cloudhsmV2Vpc = new Vpc(this, "cloudhsm_v2_vpc", { + cidrBlock: "10.0.0.0/16", + tags: { + Name: "example-aws_cloudhsm_v2_cluster", + }, + }); + const available = new DataAwsAvailabilityZones(this, "available", {}); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const cloudhsmV2SubnetsCount = TerraformCount.of(Token.asNumber("2")); + const cloudhsmV2Subnets = new Subnet(this, "cloudhsm_v2_subnets", { + availabilityZone: Token.asString( + Fn.element( + available.names, + Token.asNumber(cloudhsmV2SubnetsCount.index) + ) + ), + cidrBlock: Token.asString( + Fn.element(subnets.value, Token.asNumber(cloudhsmV2SubnetsCount.index)) + ), + mapPublicIpOnLaunch: false, + tags: { + Name: "example-aws_cloudhsm_v2_cluster", + }, + vpcId: cloudhsmV2Vpc.id, + count: cloudhsmV2SubnetsCount, + }); + new CloudhsmV2Cluster(this, "cloudhsm_v2_cluster", { + hsmType: "hsm1.medium", + subnetIds: Token.asList(propertyAccess(cloudhsmV2Subnets, ["*", "id"])), + tags: { + Name: "example-aws_cloudhsm_v2_cluster", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `sourceBackupIdentifier` - (Optional) ID of Cloud HSM v2 cluster backup to be restored. +* `hsmType` - (Required) The type of HSM module in the cluster. Currently, only `hsm1Medium` is supported. +* `subnetIds` - (Required) The IDs of subnets in which cluster will operate. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `clusterId` - The id of the CloudHSM cluster. +* `clusterState` - The state of the CloudHSM cluster. +* `vpcId` - The id of the VPC that the CloudHSM cluster resides in. +* `securityGroupId` - The ID of the security group associated with the CloudHSM cluster. +* `clusterCertificates` - The list of cluster certificates. + * `clusterCertificates0ClusterCertificate` - The cluster certificate issued (signed) by the issuing certificate authority (CA) of the cluster's owner. + * `clusterCertificates0ClusterCsr` - The certificate signing request (CSR). Available only in `uninitialized` state after an HSM instance is added to the cluster. + * `clusterCertificates0AwsHardwareCertificate` - The HSM hardware certificate issued (signed) by AWS CloudHSM. + * `clusterCertificates0HsmCertificate` - The HSM certificate issued (signed) by the HSM hardware. + * `clusterCertificates0ManufacturerHardwareCertificate` - The HSM hardware certificate issued (signed) by the hardware manufacturer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[1]: https://docs.aws.amazon.com/cloudhsm/latest/userguide/introduction.html +[2]: https://docs.aws.amazon.com/cloudhsm/latest/APIReference/Welcome.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudHSM v2 Clusters using the cluster `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudHSM v2 Clusters using the cluster `id`. For example: + +```console +% terraform import aws_cloudhsm_v2_cluster.test_cluster cluster-aeb282a201 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudhsm_v2_hsm.html.markdown b/website/docs/cdktf/typescript/r/cloudhsm_v2_hsm.html.markdown new file mode 100644 index 00000000000..d837c6d174a --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudhsm_v2_hsm.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "CloudHSM" +layout: "aws" +page_title: "AWS: aws_cloudhsm_v2_hsm" +description: |- + Provides a CloudHSM v2 HSM module resource. +--- + + + +# Resource: aws_cloudhsm_v2_hsm + +Creates an HSM module in Amazon CloudHSM v2 cluster. + +## Example Usage + +The following example below creates an HSM module in CloudHSM cluster. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudhsmV2Hsm } from "./.gen/providers/aws/cloudhsm-v2-hsm"; +import { DataAwsCloudhsmV2Cluster } from "./.gen/providers/aws/data-aws-cloudhsm-v2-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const cluster = new DataAwsCloudhsmV2Cluster(this, "cluster", { + clusterId: cloudhsmClusterId.stringValue, + }); + new CloudhsmV2Hsm(this, "cloudhsm_v2_hsm", { + clusterId: Token.asString(cluster.clusterId), + subnetId: Token.asString(propertyAccess(cluster.subnetIds, ["0"])), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +~> **NOTE:** Either `subnetId` or `availabilityZone` must be specified. + +* `clusterId` - (Required) The ID of Cloud HSM v2 cluster to which HSM will be added. +* `subnetId` - (Optional) The ID of subnet in which HSM module will be located. Conflicts with `availabilityZone`. +* `availabilityZone` - (Optional) The IDs of AZ in which HSM module will be located. Conflicts with `subnetId`. +* `ipAddress` - (Optional) The IP address of HSM module. Must be within the CIDR of selected subnet. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `hsmId` - The id of the HSM module. +* `hsmState` - The state of the HSM module. +* `hsmEniId` - The id of the ENI interface allocated for HSM module. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import HSM modules using their HSM ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import HSM modules using their HSM ID. For example: + +```console +% terraform import aws_cloudhsm_v2_hsm.bar hsm-quo8dahtaca +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudsearch_domain.html.markdown b/website/docs/cdktf/typescript/r/cloudsearch_domain.html.markdown new file mode 100644 index 00000000000..3870e00871b --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudsearch_domain.html.markdown @@ -0,0 +1,141 @@ +--- +subcategory: "CloudSearch" +layout: "aws" +page_title: "AWS: aws_cloudsearch_domain" +description: |- + Provides an CloudSearch domain resource. +--- + + + +# Resource: aws_cloudsearch_domain + +Provides an CloudSearch domain resource. + +Terraform waits for the domain to become `active` when applying a configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudsearchDomain } from "./.gen/providers/aws/cloudsearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudsearchDomain(this, "example", { + indexField: [ + { + analysisScheme: "_en_default_", + highlight: false, + name: "headline", + return: true, + search: true, + sort: true, + type: "text", + }, + { + facet: true, + name: "price", + return: true, + search: true, + sort: true, + sourceFields: "headline", + type: "double", + }, + ], + name: "example-domain", + scalingParameters: { + desiredInstanceType: "search.medium", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `endpointOptions` - (Optional) Domain endpoint options. Documented below. +* `indexField` - (Optional) The index fields for documents added to the domain. Documented below. +* `multiAz` - (Optional) Whether or not to maintain extra instances for the domain in a second Availability Zone to ensure high availability. +* `name` - (Required) The name of the CloudSearch domain. +* `scalingParameters` - (Optional) Domain scaling parameters. Documented below. + +### endpoint_options + +This configuration block supports the following attributes: + +* `enforceHttps` - (Optional) Enables or disables the requirement that all requests to the domain arrive over HTTPS. +* `tlsSecurityPolicy` - (Optional) The minimum required TLS version. See the [AWS documentation](https://docs.aws.amazon.com/cloudsearch/latest/developerguide/API_DomainEndpointOptions.html) for valid values. + +### scaling_parameters + +This configuration block supports the following attributes: + +* `desiredInstanceType` - (Optional) The instance type that you want to preconfigure for your domain. See the [AWS documentation](https://docs.aws.amazon.com/cloudsearch/latest/developerguide/API_ScalingParameters.html) for valid values. +* `desiredPartitionCount` - (Optional) The number of partitions you want to preconfigure for your domain. Only valid when you select `search2Xlarge` as the instance type. +* `desiredReplicationCount` - (Optional) The number of replicas you want to preconfigure for each index partition. + +### index_field + +This configuration block supports the following attributes: + +* `name` - (Required) A unique name for the field. Field names must begin with a letter and be at least 3 and no more than 64 characters long. The allowed characters are: `a`-`z` (lower-case letters), `0`-`9`, and `_` (underscore). The name `score` is reserved and cannot be used as a field name. +* `type` - (Required) The field type. Valid values: `date`, `dateArray`, `double`, `doubleArray`, `int`, `intArray`, `literal`, `literalArray`, `text`, `textArray`. +* `analysisScheme` - (Optional) The analysis scheme you want to use for a `text` field. The analysis scheme specifies the language-specific text processing options that are used during indexing. +* `defaultValue` - (Optional) The default value for the field. This value is used when no value is specified for the field in the document data. +* `facet` - (Optional) You can get facet information by enabling this. +* `highlight` - (Optional) You can highlight information. +* `return` - (Optional) You can enable returning the value of all searchable fields. +* `search` - (Optional) You can set whether this index should be searchable or not. +* `sort` - (Optional) You can enable the property to be sortable. +* `sourceFields` - (Optional) A comma-separated list of source fields to map to the field. Specifying a source field copies data from one field to another, enabling you to use the same source data in different ways by configuring different options for the fields. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The domain's ARN. +* `documentServiceEndpoint` - The service endpoint for updating documents in a search domain. +* `domainId` - An internally generated unique identifier for the domain. +* `searchServiceEndpoint` - The service endpoint for requesting search results from a search domain. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudSearch Domains using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudSearch Domains using the `name`. For example: + +```console +% terraform import aws_cloudsearch_domain.example example-domain +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudsearch_domain_service_access_policy.html.markdown b/website/docs/cdktf/typescript/r/cloudsearch_domain_service_access_policy.html.markdown new file mode 100644 index 00000000000..7f04461bd14 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudsearch_domain_service_access_policy.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "CloudSearch" +layout: "aws" +page_title: "AWS: aws_cloudsearch_domain_service_access_policy" +description: |- + Provides an CloudSearch domain service access policy resource. +--- + + + +# Resource: aws_cloudsearch_domain_service_access_policy + +Provides an CloudSearch domain service access policy resource. + +Terraform waits for the domain service access policy to become `active` when applying a configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudsearchDomain } from "./.gen/providers/aws/cloudsearch-domain"; +import { CloudsearchDomainServiceAccessPolicy } from "./.gen/providers/aws/cloudsearch-domain-service-access-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudsearchDomain(this, "example", { + name: "example-domain", + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_1", + { + statement: [ + { + actions: ["cloudsearch:search", "cloudsearch:document"], + condition: [ + { + test: "IpAddress", + values: ["192.0.2.0/32"], + variable: "aws:SourceIp", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "*", + }, + ], + sid: "search_only", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsCloudsearchDomainServiceAccessPolicyExample = + new CloudsearchDomainServiceAccessPolicy(this, "example_2", { + accessPolicy: Token.asString(dataAwsIamPolicyDocumentExample.json), + domainName: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudsearchDomainServiceAccessPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accessPolicy` - (Required) The access rules you want to configure. These rules replace any existing rules. See the [AWS documentation](https://docs.aws.amazon.com/cloudsearch/latest/developerguide/configuring-access.html) for details. +* `domainName` - (Required) The CloudSearch domain name the policy applies to. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `update` - (Default `20M`) +* `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudSearch domain service access policies using the domain name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudSearch domain service access policies using the domain name. For example: + +```console +% terraform import aws_cloudsearch_domain_service_access_policy.example example-domain +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudtrail.html.markdown b/website/docs/cdktf/typescript/r/cloudtrail.html.markdown new file mode 100644 index 00000000000..8a32cb7b481 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudtrail.html.markdown @@ -0,0 +1,562 @@ +--- +subcategory: "CloudTrail" +layout: "aws" +page_title: "AWS: aws_cloudtrail" +description: |- + Provides a CloudTrail resource. +--- + + + +# Resource: aws_cloudtrail + +Provides a CloudTrail resource. + +-> **Tip:** For a multi-region trail, this resource must be in the home region of the trail. + +-> **Tip:** For an organization trail, this resource must be in the master account of the organization. + +## Example Usage + +### Basic + +Enable CloudTrail to capture all compatible management events in region. +For capturing events from services like IAM, `includeGlobalServiceEvents` must be enabled. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Cloudtrail } from "./.gen/providers/aws/cloudtrail"; +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "tf-test-trail", + forceDestroy: true, + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsPartitionCurrent = new DataAwsPartition(this, "current_2", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsPartitionCurrent.overrideLogicalId("current"); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_3", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const awsCloudtrailExample = new Cloudtrail(this, "example_4", { + includeGlobalServiceEvents: false, + name: "example", + s3BucketName: example.id, + s3KeyPrefix: "prefix", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudtrailExample.overrideLogicalId("example"); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_5", + { + statement: [ + { + actions: ["s3:GetBucketAcl"], + condition: [ + { + test: "StringEquals", + values: [ + "arn:${" + + dataAwsPartitionCurrent.partition + + "}:cloudtrail:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:trail/example", + ], + variable: "aws:SourceArn", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["cloudtrail.amazonaws.com"], + type: "Service", + }, + ], + resources: [example.arn], + sid: "AWSCloudTrailAclCheck", + }, + { + actions: ["s3:PutObject"], + condition: [ + { + test: "StringEquals", + values: ["bucket-owner-full-control"], + variable: "s3:x-amz-acl", + }, + { + test: "StringEquals", + values: [ + "arn:${" + + dataAwsPartitionCurrent.partition + + "}:cloudtrail:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:trail/example", + ], + variable: "aws:SourceArn", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["cloudtrail.amazonaws.com"], + type: "Service", + }, + ], + resources: [ + "${" + + example.arn + + "}/prefix/AWSLogs/${" + + current.accountId + + "}/*", + ], + sid: "AWSCloudTrailWrite", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsS3BucketPolicyExample = new S3BucketPolicy(this, "example_6", { + bucket: example.id, + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPolicyExample.overrideLogicalId("example"); + } +} + +``` + +### Data Event Logging + +CloudTrail can log [Data Events](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-data-events-with-cloudtrail.html) for certain services such as S3 objects and Lambda function invocations. Additional information about data event configuration can be found in the following links: + +* [CloudTrail API DataResource documentation](https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_DataResource.html) (for basic event selector). +* [CloudTrail API AdvancedFieldSelector documentation](https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_AdvancedFieldSelector.html) (for advanced event selector). + +#### Logging All Lambda Function Invocations By Using Basic Event Selectors + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Cloudtrail } from "./.gen/providers/aws/cloudtrail"; +interface MyConfig { + name: any; + s3BucketName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new Cloudtrail(this, "example", { + eventSelector: [ + { + dataResource: [ + { + type: "AWS::Lambda::Function", + values: ["arn:aws:lambda"], + }, + ], + includeManagementEvents: true, + readWriteType: "All", + }, + ], + name: config.name, + s3BucketName: config.s3BucketName, + }); + } +} + +``` + +#### Logging All S3 Object Events By Using Basic Event Selectors + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Cloudtrail } from "./.gen/providers/aws/cloudtrail"; +interface MyConfig { + name: any; + s3BucketName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new Cloudtrail(this, "example", { + eventSelector: [ + { + dataResource: [ + { + type: "AWS::S3::Object", + values: ["arn:aws:s3"], + }, + ], + includeManagementEvents: true, + readWriteType: "All", + }, + ], + name: config.name, + s3BucketName: config.s3BucketName, + }); + } +} + +``` + +#### Logging Individual S3 Bucket Events By Using Basic Event Selectors + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Cloudtrail } from "./.gen/providers/aws/cloudtrail"; +import { DataAwsS3Bucket } from "./.gen/providers/aws/data-aws-s3-bucket"; +interface MyConfig { + name: any; + s3BucketName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const importantBucket = new DataAwsS3Bucket(this, "important-bucket", { + bucket: "important-bucket", + }); + new Cloudtrail(this, "example", { + eventSelector: [ + { + dataResource: [ + { + type: "AWS::S3::Object", + values: ["${" + importantBucket.arn + "}/"], + }, + ], + includeManagementEvents: true, + readWriteType: "All", + }, + ], + name: config.name, + s3BucketName: config.s3BucketName, + }); + } +} + +``` + +#### Logging All S3 Object Events Except For Two S3 Buckets By Using Advanced Event Selectors + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Cloudtrail } from "./.gen/providers/aws/cloudtrail"; +import { DataAwsS3Bucket } from "./.gen/providers/aws/data-aws-s3-bucket"; +interface MyConfig { + name: any; + s3BucketName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const notImportantBucket1 = new DataAwsS3Bucket( + this, + "not-important-bucket-1", + { + bucket: "not-important-bucket-1", + } + ); + const notImportantBucket2 = new DataAwsS3Bucket( + this, + "not-important-bucket-2", + { + bucket: "not-important-bucket-2", + } + ); + new Cloudtrail(this, "example", { + advancedEventSelector: [ + { + fieldSelector: [ + { + equalTo: ["Data"], + field: "eventCategory", + }, + { + field: "resources.ARN", + notStartsWith: [ + "${" + notImportantBucket1.arn + "}/", + "${" + notImportantBucket2.arn + "}/", + ], + }, + { + equalTo: ["AWS::S3::Object"], + field: "resources.type", + }, + ], + name: "Log all S3 objects events except for two S3 buckets", + }, + { + fieldSelector: [ + { + equalTo: ["Management"], + field: "eventCategory", + }, + ], + name: "Log readOnly and writeOnly management events", + }, + ], + name: config.name, + s3BucketName: config.s3BucketName, + }); + } +} + +``` + +#### Logging Individual S3 Buckets And Specific Event Names By Using Advanced Event Selectors + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Cloudtrail } from "./.gen/providers/aws/cloudtrail"; +import { DataAwsS3Bucket } from "./.gen/providers/aws/data-aws-s3-bucket"; +interface MyConfig { + name: any; + s3BucketName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const importantBucket1 = new DataAwsS3Bucket(this, "important-bucket-1", { + bucket: "important-bucket-1", + }); + const importantBucket2 = new DataAwsS3Bucket(this, "important-bucket-2", { + bucket: "important-bucket-2", + }); + const importantBucket3 = new DataAwsS3Bucket(this, "important-bucket-3", { + bucket: "important-bucket-3", + }); + new Cloudtrail(this, "example", { + advancedEventSelector: [ + { + fieldSelector: [ + { + equalTo: ["Data"], + field: "eventCategory", + }, + { + equalTo: ["PutObject", "DeleteObject"], + field: "eventName", + }, + { + field: "resources.ARN", + startsWith: [ + "${" + importantBucket1.arn + "}/", + "${" + importantBucket2.arn + "}/", + ], + }, + { + equalTo: ["false"], + field: "readOnly", + }, + { + equalTo: ["AWS::S3::Object"], + field: "resources.type", + }, + ], + name: "Log PutObject and DeleteObject events for two S3 buckets", + }, + { + fieldSelector: [ + { + equalTo: ["Data"], + field: "eventCategory", + }, + { + field: "eventName", + startsWith: ["Delete"], + }, + { + equalTo: ["${" + importantBucket3.arn + "}/important-prefix"], + field: "resources.ARN", + }, + { + equalTo: ["false"], + field: "readOnly", + }, + { + equalTo: ["AWS::S3::Object"], + field: "resources.type", + }, + ], + name: "Log Delete* events for one S3 bucket", + }, + ], + name: config.name, + s3BucketName: config.s3BucketName, + }); + } +} + +``` + +#### Sending Events to CloudWatch Logs + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Cloudtrail } from "./.gen/providers/aws/cloudtrail"; +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +interface MyConfig { + name: any; + s3BucketName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: "Example", + }); + const awsCloudtrailExample = new Cloudtrail(this, "example_1", { + cloudWatchLogsGroupArn: "${" + example.arn + "}:*", + name: config.name, + s3BucketName: config.s3BucketName, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudtrailExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the trail. +* `s3BucketName` - (Required) Name of the S3 bucket designated for publishing log files. + +The following arguments are optional: + +* `advancedEventSelector` - (Optional) Specifies an advanced event selector for enabling data event logging. Fields documented below. Conflicts with `eventSelector`. +* `cloudWatchLogsGroupArn` - (Optional) Log group name using an ARN that represents the log group to which CloudTrail logs will be delivered. Note that CloudTrail requires the Log Stream wildcard. +* `cloudWatchLogsRoleArn` - (Optional) Role for the CloudWatch Logs endpoint to assume to write to a user’s log group. +* `enableLogFileValidation` - (Optional) Whether log file integrity validation is enabled. Defaults to `false`. +* `enableLogging` - (Optional) Enables logging for the trail. Defaults to `true`. Setting this to `false` will pause logging. +* `eventSelector` - (Optional) Specifies an event selector for enabling data event logging. Fields documented below. Please note the [CloudTrail limits](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/WhatIsCloudTrail-Limits.html) when configuring these. Conflicts with `advancedEventSelector`. +* `includeGlobalServiceEvents` - (Optional) Whether the trail is publishing events from global services such as IAM to the log files. Defaults to `true`. +* `insightSelector` - (Optional) Configuration block for identifying unusual operational activity. See details below. +* `isMultiRegionTrail` - (Optional) Whether the trail is created in the current region or in all regions. Defaults to `false`. +* `isOrganizationTrail` - (Optional) Whether the trail is an AWS Organizations trail. Organization trails log events for the master account and all member accounts. Can only be created in the organization master account. Defaults to `false`. +* `kmsKeyId` - (Optional) KMS key ARN to use to encrypt the logs delivered by CloudTrail. +* `s3KeyPrefix` - (Optional) S3 key prefix that follows the name of the bucket you have designated for log file delivery. +* `snsTopicName` - (Optional) Name of the Amazon SNS topic defined for notification of log file delivery. +* `tags` - (Optional) Map of tags to assign to the trail. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### event_selector + +* `dataResource` - (Optional) Configuration block for data events. See details below. +* `excludeManagementEventSources` (Optional) - A set of event sources to exclude. Valid values include: `kmsAmazonawsCom` and `rdsdataAmazonawsCom`. `includeManagementEvents` must be set to`true` to allow this. +* `includeManagementEvents` - (Optional) Whether to include management events for your trail. Defaults to `true`. +* `readWriteType` - (Optional) Type of events to log. Valid values are `readOnly`, `writeOnly`, `all`. Default value is `all`. + +#### data_resource + +* `type` - (Required) Resource type in which you want to log data events. You can specify only the following value: "AWS::S3::Object", "AWS::Lambda::Function" and "AWS::DynamoDB::Table". +* `values` - (Required) List of ARN strings or partial ARN strings to specify selectors for data audit events over data resources. ARN list is specific to single-valued `type`. For example, `arn:aws:s3:::/` for all objects in a bucket, `arn:aws:s3:::/key` for specific objects, `arn:aws:lambda` for all lambda events within an account, `arn:aws:lambda:::function:` for a specific Lambda function, `arn:aws:dynamodb` for all DDB events for all tables within an account, or `arn:aws:dynamodb:::table/
` for a specific DynamoDB table. + +### insight_selector + +* `insightType` - (Optional) Type of insights to log on a trail. Valid values are: `apiCallRateInsight` and `apiErrorRateInsight`. + +### Advanced Event Selector Arguments + +* `fieldSelector` (Required) - Specifies the selector statements in an advanced event selector. Fields documented below. +* `name` (Optional) - Name of the advanced event selector. + +#### Field Selector Arguments + +* `field` (Required) - Field in an event record on which to filter events to be logged. You can specify only the following values: `readOnly`, `eventSource`, `eventName`, `eventCategory`, `resourcesType`, `resourcesArn`. +* `endsWith` (Optional) - A list of values that includes events that match the last few characters of the event record field specified as the value of `field`. +* `equals` (Optional) - A list of values that includes events that match the exact value of the event record field specified as the value of `field`. This is the only valid operator that you can use with the `readOnly`, `eventCategory`, and `resourcesType` fields. +* `notEndsWith` (Optional) - A list of values that excludes events that match the last few characters of the event record field specified as the value of `field`. +* `notEquals` (Optional) - A list of values that excludes events that match the exact value of the event record field specified as the value of `field`. +* `notStartsWith` (Optional) - A list of values that excludes events that match the first few characters of the event record field specified as the value of `field`. +* `startsWith` (Optional) - A list of values that includes events that match the first few characters of the event record field specified as the value of `field`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the trail. +* `homeRegion` - Region in which the trail was created. +* `id` - Name of the trail. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudtrails using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloudtrails using the `name`. For example: + +```console +% terraform import aws_cloudtrail.sample my-sample-trail +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudtrail_event_data_store.html.markdown b/website/docs/cdktf/typescript/r/cloudtrail_event_data_store.html.markdown new file mode 100644 index 00000000000..ecb50aeb2a8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudtrail_event_data_store.html.markdown @@ -0,0 +1,165 @@ +--- +subcategory: "CloudTrail" +layout: "aws" +page_title: "AWS: aws_cloudtrail_event_data_store" +description: |- + Provides a CloudTrail Event Data Store resource. +--- + + + +# Resource: aws_cloudtrail_event_data_store + +Provides a CloudTrail Event Data Store. + +More information about event data stores can be found in the [Event Data Store User Guide](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/query-event-data-store.html). + +-> **Tip:** For an organization event data store you must create this resource in the management account. + +## Example Usage + +### Basic + +The most simple event data store configuration requires us to only set the `name` attribute. The event data store will automatically capture all management events. To capture management events from all the regions, `multiRegionEnabled` must be `true`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudtrailEventDataStore } from "./.gen/providers/aws/cloudtrail-event-data-store"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudtrailEventDataStore(this, "example", { + name: "example-event-data-store", + }); + } +} + +``` + +### Data Event Logging + +CloudTrail can log [Data Events](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-data-events-with-cloudtrail.html) for certain services such as S3 bucket objects and Lambda function invocations. Additional information about data event configuration can be found in the following links: + +- [CloudTrail API AdvancedFieldSelector documentation](https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_AdvancedFieldSelector.html) + +#### Log all DynamoDB PutEvent actions for a specific DynamoDB table + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudtrailEventDataStore } from "./.gen/providers/aws/cloudtrail-event-data-store"; +import { DataAwsDynamodbTable } from "./.gen/providers/aws/data-aws-dynamodb-table"; +interface MyConfig { + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const table = new DataAwsDynamodbTable(this, "table", { + name: "not-important-dynamodb-table", + }); + new CloudtrailEventDataStore(this, "example", { + advancedEventSelector: [ + { + fieldSelector: [ + { + equalTo: ["Data"], + field: "eventCategory", + }, + { + equalTo: ["AWS::DynamoDB::Table"], + field: "resources.type", + }, + { + equalTo: ["PutItem"], + field: "eventName", + }, + { + equalTo: [Token.asString(table.arn)], + field: "resources.ARN", + }, + ], + name: "Log all DynamoDB PutEvent actions for a specific DynamoDB table", + }, + ], + name: config.name, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +- `name` - (Required) The name of the event data store. +- `advancedEventSelector` - (Optional) The advanced event selectors to use to select the events for the data store. For more information about how to use advanced event selectors, see [Log events by using advanced event selectors](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-data-events-with-cloudtrail.html#creating-data-event-selectors-advanced) in the CloudTrail User Guide. +- `multiRegionEnabled` - (Optional) Specifies whether the event data store includes events from all regions, or only from the region in which the event data store is created. Default: `true`. +- `organizationEnabled` - (Optional) Specifies whether an event data store collects events logged for an organization in AWS Organizations. Default: `false`. +- `retentionPeriod` - (Optional) The retention period of the event data store, in days. You can set a retention period of up to 2555 days, the equivalent of seven years. Default: `2555`. +- `kmsKeyId` - Specifies the AWS KMS key ID to use to encrypt the events delivered by CloudTrail. The value can be an alias name prefixed by alias/, a fully specified ARN to an alias, a fully specified ARN to a key, or a globally unique identifier. +- `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +- `terminationProtectionEnabled` - (Optional) Specifies whether termination protection is enabled for the event data store. If termination protection is enabled, you cannot delete the event data store until termination protection is disabled. Default: `true`. + +### Advanced Event Selector Arguments + +`advancedEventSelector` supports the following arguments: + +- `name` (Optional) - Specifies the name of the advanced event selector. +- `fieldSelector` (Required) - Specifies the selector statements in an advanced event selector. Fields documented below. + +#### Field Selector Arguments + +`fieldSelector` supports the following arguments: + +- `field` (Required) - Specifies a field in an event record on which to filter events to be logged. You can specify only the following values: `readOnly`, `eventSource`, `eventName`, `eventCategory`, `resourcesType`, `resourcesArn`. +- `equals` (Optional) - A list of values that includes events that match the exact value of the event record field specified as the value of `field`. This is the only valid operator that you can use with the `readOnly`, `eventCategory`, and `resourcesType` fields. +- `notEquals` (Optional) - A list of values that excludes events that match the exact value of the event record field specified as the value of `field`. +- `startsWith` (Optional) - A list of values that includes events that match the first few characters of the event record field specified as the value of `field`. +- `notStartsWith` (Optional) - A list of values that excludes events that match the first few characters of the event record field specified as the value of `field`. +- `endsWith` (Optional) - A list of values that includes events that match the last few characters of the event record field specified as the value of `field`. +- `notEndsWith` (Optional) - A list of values that excludes events that match the last few characters of the event record field specified as the value of `field`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - ARN of the event data store. +- `id` - Name of the event data store. +- `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import event data stores using their `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import event data stores using their `arn`. For example: + +```console +% terraform import aws_cloudtrail_event_data_store.example arn:aws:cloudtrail:us-east-1:123456789123:eventdatastore/22333815-4414-412c-b155-dd254033gfhf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_composite_alarm.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_composite_alarm.html.markdown new file mode 100644 index 00000000000..db222d97d6f --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_composite_alarm.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "CloudWatch" +layout: "aws" +page_title: "AWS: aws_cloudwatch_composite_alarm" +description: |- + Provides a CloudWatch Composite Alarm resource. +--- + + + +# Resource: aws_cloudwatch_composite_alarm + +Provides a CloudWatch Composite Alarm resource. + +~> **NOTE:** An alarm (composite or metric) cannot be destroyed when there are other composite alarms depending on it. This can lead to a cyclical dependency on update, as Terraform will unsuccessfully attempt to destroy alarms before updating the rule. Consider using `dependsOn`, references to alarm names, and two-stage updates. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchCompositeAlarm } from "./.gen/providers/aws/cloudwatch-composite-alarm"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchCompositeAlarm(this, "example", { + alarmActions: Token.asList(awsSnsTopicExample.arn), + alarmDescription: "This is a composite alarm!", + alarmName: "example-composite-alarm", + alarmRule: + "ALARM(${" + + alpha.alarmName + + "}) OR\nALARM(${" + + bravo.alarmName + + "})\n\n", + okActions: Token.asList(awsSnsTopicExample.arn), + }); + } +} + +``` + +## Argument Reference + +* `actionsEnabled` - (Optional, Forces new resource) Indicates whether actions should be executed during any changes to the alarm state of the composite alarm. Defaults to `true`. +* `alarmActions` - (Optional) The set of actions to execute when this alarm transitions to the `alarm` state from any other state. Each action is specified as an ARN. Up to 5 actions are allowed. +* `alarmDescription` - (Optional) The description for the composite alarm. +* `alarmName` - (Required) The name for the composite alarm. This name must be unique within the region. +* `alarmRule` - (Required) An expression that specifies which other alarms are to be evaluated to determine this composite alarm's state. For syntax, see [Creating a Composite Alarm](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Create_Composite_Alarm.html). The maximum length is 10240 characters. +* `insufficientDataActions` - (Optional) The set of actions to execute when this alarm transitions to the `insufficientData` state from any other state. Each action is specified as an ARN. Up to 5 actions are allowed. +* `okActions` - (Optional) The set of actions to execute when this alarm transitions to an `ok` state from any other state. Each action is specified as an ARN. Up to 5 actions are allowed. +* `tags` - (Optional) A map of tags to associate with the alarm. Up to 50 tags are allowed. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the composite alarm. +* `id` - The ID of the composite alarm resource, which is equivalent to its `alarmName`. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a CloudWatch Composite Alarm using the `alarmName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a CloudWatch Composite Alarm using the `alarmName`. For example: + +```console +% terraform import aws_cloudwatch_composite_alarm.test my-alarm +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_dashboard.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_dashboard.html.markdown new file mode 100644 index 00000000000..7aea2c592d7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_dashboard.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "CloudWatch" +layout: "aws" +page_title: "AWS: aws_cloudwatch_dashboard" +description: |- + Provides a CloudWatch Dashboard resource. +--- + + + +# Resource: aws_cloudwatch_dashboard + +Provides a CloudWatch Dashboard resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchDashboard } from "./.gen/providers/aws/cloudwatch-dashboard"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchDashboard(this, "main", { + dashboardBody: Token.asString( + Fn.jsonencode({ + widgets: [ + { + height: 6, + properties: { + metrics: [ + ["AWS/EC2", "CPUUtilization", "InstanceId", "i-012345"], + ], + period: 300, + region: "us-east-1", + stat: "Average", + title: "EC2 Instance CPU", + }, + type: "metric", + width: 12, + x: 0, + y: 0, + }, + { + height: 3, + properties: { + markdown: "Hello world", + }, + type: "text", + width: 3, + x: 0, + y: 7, + }, + ], + }) + ), + dashboardName: "my-dashboard", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dashboardName` - (Required) The name of the dashboard. +* `dashboardBody` - (Required) The detailed information about the dashboard, including what widgets are included and their location on the dashboard. You can read more about the body structure in the [documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/CloudWatch-Dashboard-Body-Structure.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `dashboardArn` - The Amazon Resource Name (ARN) of the dashboard. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch dashboards using the `dashboardName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch dashboards using the `dashboardName`. For example: + +```console +% terraform import aws_cloudwatch_dashboard.sample dashboard_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_event_api_destination.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_event_api_destination.html.markdown new file mode 100644 index 00000000000..31beaf4ddb9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_event_api_destination.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_api_destination" +description: |- + Provides an EventBridge event API Destination resource. +--- + + + +# Resource: aws_cloudwatch_event_api_destination + +Provides an EventBridge event API Destination resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventApiDestination } from "./.gen/providers/aws/cloudwatch-event-api-destination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchEventApiDestination(this, "test", { + connectionArn: Token.asString(awsCloudwatchEventConnectionTest.arn), + description: "An API Destination", + httpMethod: "POST", + invocationEndpoint: "https://api.destination.com/endpoint", + invocationRateLimitPerSecond: 20, + name: "api-destination", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the new API Destination. The name must be unique for your account. Maximum of 64 characters consisting of numbers, lower/upper case letters, .,-,_. +* `description` - (Optional) The description of the new API Destination. Maximum of 512 characters. +* `invocationEndpoint` - (Required) URL endpoint to invoke as a target. This could be a valid endpoint generated by a partner service. You can include "*" as path parameters wildcards to be set from the Target HttpParameters. +* `httpMethod` - (Required) Select the HTTP method used for the invocation endpoint, such as GET, POST, PUT, etc. +* `invocationRateLimitPerSecond` - (Optional) Enter the maximum number of invocations per second to allow for this destination. Enter a value greater than 0 (default 300). +* `connectionArn` - (Required) ARN of the EventBridge Connection to use for the API Destination. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the event API Destination. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge API Destinations using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EventBridge API Destinations using the `name`. For example: + +```console +% terraform import aws_cloudwatch_event_api_destination.test api-destination +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_event_archive.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_event_archive.html.markdown new file mode 100644 index 00000000000..08415d85096 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_event_archive.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_archive" +description: |- + Provides an EventBridge event archive resource. +--- + + + +# Resource: aws_cloudwatch_event_archive + +Provides an EventBridge event archive resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventArchive } from "./.gen/providers/aws/cloudwatch-event-archive"; +import { CloudwatchEventBus } from "./.gen/providers/aws/cloudwatch-event-bus"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const order = new CloudwatchEventBus(this, "order", { + name: "orders", + }); + const awsCloudwatchEventArchiveOrder = new CloudwatchEventArchive( + this, + "order_1", + { + eventSourceArn: order.arn, + name: "order-archive", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventArchiveOrder.overrideLogicalId("order"); + } +} + +``` + +## Example all optional arguments + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventArchive } from "./.gen/providers/aws/cloudwatch-event-archive"; +import { CloudwatchEventBus } from "./.gen/providers/aws/cloudwatch-event-bus"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const order = new CloudwatchEventBus(this, "order", { + name: "orders", + }); + const awsCloudwatchEventArchiveOrder = new CloudwatchEventArchive( + this, + "order_1", + { + description: "Archived events from order service", + eventPattern: Token.asString( + Fn.jsonencode({ + source: ["company.team.order"], + }) + ), + eventSourceArn: order.arn, + name: "order-archive", + retentionDays: 7, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventArchiveOrder.overrideLogicalId("order"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the new event archive. The archive name cannot exceed 48 characters. +* `eventSourceArn` - (Required) Event bus source ARN from where these events should be archived. +* `description` - (Optional) The description of the new event archive. +* `eventPattern` - (Optional) Instructs the new event archive to only capture events matched by this pattern. By default, it attempts to archive every event received in the `eventSourceArn`. +* `retentionDays` - (Optional) The maximum number of days to retain events in the new event archive. By default, it archives indefinitely. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the event archive. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an EventBridge archive using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an EventBridge archive using the `name`. For example: + +```console +% terraform import aws_cloudwatch_event_archive.imported_event_archive order-archive +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_event_bus.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_event_bus.html.markdown new file mode 100644 index 00000000000..fe6904df279 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_event_bus.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_bus" +description: |- + Provides an EventBridge event bus resource. +--- + + + +# Resource: aws_cloudwatch_event_bus + +Provides an EventBridge event bus resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventBus } from "./.gen/providers/aws/cloudwatch-event-bus"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchEventBus(this, "messenger", { + name: "chat-messages", + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventBus } from "./.gen/providers/aws/cloudwatch-event-bus"; +import { DataAwsCloudwatchEventSource } from "./.gen/providers/aws/data-aws-cloudwatch-event-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const examplepartner = new DataAwsCloudwatchEventSource( + this, + "examplepartner", + { + namePrefix: "aws.partner/examplepartner.com", + } + ); + const awsCloudwatchEventBusExamplepartner = new CloudwatchEventBus( + this, + "examplepartner_1", + { + eventSourceName: Token.asString(examplepartner.name), + name: Token.asString(examplepartner.name), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventBusExamplepartner.overrideLogicalId("examplepartner"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the new event bus. The names of custom event buses can't contain the / character. To create a partner event bus, ensure the `name` matches the `eventSourceName`. +* `eventSourceName` (Optional) The partner event source that the new event bus will be matched with. Must match `name`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the event bus. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge event buses using the `name` (which can also be a partner event source name). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EventBridge event buses using the `name` (which can also be a partner event source name). For example: + +```console +% terraform import aws_cloudwatch_event_bus.messenger chat-messages +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_event_bus_policy.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_event_bus_policy.html.markdown new file mode 100644 index 00000000000..32f5a677277 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_event_bus_policy.html.markdown @@ -0,0 +1,241 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_bus_policy" +description: |- + Provides a resource to create an EventBridge policy to support cross-account events. +--- + + + +# Resource: aws_cloudwatch_event_bus_policy + +Provides a resource to create an EventBridge resource policy to support cross-account events. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +~> **Note:** The EventBridge bus policy resource (`awsCloudwatchEventBusPolicy`) is incompatible with the EventBridge permission resource (`awsCloudwatchEventPermission`) and will overwrite permissions. + +## Example Usage + +### Account Access + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventBusPolicy } from "./.gen/providers/aws/cloudwatch-event-bus-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new DataAwsIamPolicyDocument(this, "test", { + statement: [ + { + actions: ["events:PutEvents"], + effect: "Allow", + principals: [ + { + identifiers: ["123456789012"], + type: "AWS", + }, + ], + resources: [ + "arn:aws:events:eu-west-1:123456789012:event-bus/default", + ], + sid: "DevAccountAccess", + }, + ], + }); + const awsCloudwatchEventBusPolicyTest = new CloudwatchEventBusPolicy( + this, + "test_1", + { + eventBusName: Token.asString(awsCloudwatchEventBusTest.name), + policy: Token.asString(test.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventBusPolicyTest.overrideLogicalId("test"); + } +} + +``` + +### Organization Access + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventBusPolicy } from "./.gen/providers/aws/cloudwatch-event-bus-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new DataAwsIamPolicyDocument(this, "test", { + statement: [ + { + actions: [ + "events:DescribeRule", + "events:ListRules", + "events:ListTargetsByRule", + "events:ListTagsForResource", + ], + condition: [ + { + test: "StringEquals", + values: [example.id], + variable: "aws:PrincipalOrgID", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [ + "arn:aws:events:eu-west-1:123456789012:rule/*", + "arn:aws:events:eu-west-1:123456789012:event-bus/default", + ], + sid: "OrganizationAccess", + }, + ], + }); + const awsCloudwatchEventBusPolicyTest = new CloudwatchEventBusPolicy( + this, + "test_1", + { + eventBusName: Token.asString(awsCloudwatchEventBusTest.name), + policy: Token.asString(test.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventBusPolicyTest.overrideLogicalId("test"); + } +} + +``` + +### Multiple Statements + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventBusPolicy } from "./.gen/providers/aws/cloudwatch-event-bus-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new DataAwsIamPolicyDocument(this, "test", { + statement: [ + { + actions: ["events:PutEvents"], + effect: "Allow", + principals: [ + { + identifiers: ["123456789012"], + type: "AWS", + }, + ], + resources: [ + "arn:aws:events:eu-west-1:123456789012:event-bus/default", + ], + sid: "DevAccountAccess", + }, + { + actions: [ + "events:DescribeRule", + "events:ListRules", + "events:ListTargetsByRule", + "events:ListTagsForResource", + ], + condition: [ + { + test: "StringEquals", + values: [example.id], + variable: "aws:PrincipalOrgID", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [ + "arn:aws:events:eu-west-1:123456789012:rule/*", + "arn:aws:events:eu-west-1:123456789012:event-bus/default", + ], + sid: "OrganizationAccess", + }, + ], + }); + const awsCloudwatchEventBusPolicyTest = new CloudwatchEventBusPolicy( + this, + "test_1", + { + eventBusName: Token.asString(awsCloudwatchEventBusTest.name), + policy: Token.asString(test.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventBusPolicyTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) The text of the policy. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `eventBusName` - (Optional) The name of the event bus to set the permissions on. + If you omit this, the permissions are set on the `default` event bus. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the EventBridge event bus. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an EventBridge policy using the `eventBusName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an EventBridge policy using the `eventBusName`. For example: + +```console +% terraform import aws_cloudwatch_event_bus_policy.DevAccountAccess example-event-bus +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_event_connection.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_event_connection.html.markdown new file mode 100644 index 00000000000..56811aee6f6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_event_connection.html.markdown @@ -0,0 +1,273 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_connection" +description: |- + Provides an EventBridge connection resource. +--- + + + +# Resource: aws_cloudwatch_event_connection + +Provides an EventBridge connection resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventConnection } from "./.gen/providers/aws/cloudwatch-event-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchEventConnection(this, "test", { + authParameters: { + apiKey: { + key: "x-signature", + value: "1234", + }, + }, + authorizationType: "API_KEY", + description: "A connection description", + name: "ngrok-connection", + }); + } +} + +``` + +## Example Usage Basic Authorization + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventConnection } from "./.gen/providers/aws/cloudwatch-event-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchEventConnection(this, "test", { + authParameters: { + basic: { + password: "Pass1234!", + username: "user", + }, + }, + authorizationType: "BASIC", + description: "A connection description", + name: "ngrok-connection", + }); + } +} + +``` + +## Example Usage OAuth Authorization + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventConnection } from "./.gen/providers/aws/cloudwatch-event-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchEventConnection(this, "test", { + authParameters: { + oauth: { + authorizationEndpoint: "https://auth.url.com/endpoint", + clientParameters: { + clientId: "1234567890", + clientSecret: "Pass1234!", + }, + httpMethod: "GET", + oauthHttpParameters: { + body: [ + { + isValueSecret: false, + key: "body-parameter-key", + value: "body-parameter-value", + }, + ], + header: [ + { + isValueSecret: false, + key: "header-parameter-key", + value: "header-parameter-value", + }, + ], + queryString: [ + { + isValueSecret: false, + key: "query-string-parameter-key", + value: "query-string-parameter-value", + }, + ], + }, + }, + }, + authorizationType: "OAUTH_CLIENT_CREDENTIALS", + description: "A connection description", + name: "ngrok-connection", + }); + } +} + +``` + +## Example Usage Invocation Http Parameters + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventConnection } from "./.gen/providers/aws/cloudwatch-event-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchEventConnection(this, "test", { + authParameters: { + basic: { + password: "Pass1234!", + username: "user", + }, + invocationHttpParameters: { + body: [ + { + isValueSecret: false, + key: "body-parameter-key", + value: "body-parameter-value", + }, + { + isValueSecret: true, + key: "body-parameter-key2", + value: "body-parameter-value2", + }, + ], + header: [ + { + isValueSecret: false, + key: "header-parameter-key", + value: "header-parameter-value", + }, + ], + queryString: [ + { + isValueSecret: false, + key: "query-string-parameter-key", + value: "query-string-parameter-value", + }, + ], + }, + }, + authorizationType: "BASIC", + description: "A connection description", + name: "ngrok-connection", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the new connection. Maximum of 64 characters consisting of numbers, lower/upper case letters, .,-,_. +* `description` - (Optional) Enter a description for the connection. Maximum of 512 characters. +* `authorizationType` - (Required) Choose the type of authorization to use for the connection. One of `apiKey`,`basic`,`oauthClientCredentials`. +* `authParameters` - (Required) Parameters used for authorization. A maximum of 1 are allowed. Documented below. +* `invocationHttpParameters` - (Optional) Invocation Http Parameters are additional credentials used to sign each Invocation of the ApiDestination created from this Connection. If the ApiDestination Rule Target has additional HttpParameters, the values will be merged together, with the Connection Invocation Http Parameters taking precedence. Secret values are stored and managed by AWS Secrets Manager. A maximum of 1 are allowed. Documented below. + +`authParameters` support the following: + +* `apiKey` - (Optional) Parameters used for API_KEY authorization. An API key to include in the header for each authentication request. A maximum of 1 are allowed. Conflicts with `basic` and `oauth`. Documented below. +* `basic` - (Optional) Parameters used for BASIC authorization. A maximum of 1 are allowed. Conflicts with `apiKey` and `oauth`. Documented below. +* `oauth` - (Optional) Parameters used for OAUTH_CLIENT_CREDENTIALS authorization. A maximum of 1 are allowed. Conflicts with `basic` and `apiKey`. Documented below. + +`apiKey` support the following: + +* `key` - (Required) Header Name. +* `value` - (Required) Header Value. Created and stored in AWS Secrets Manager. + +`basic` support the following: + +* `username` - (Required) A username for the authorization. +* `password` - (Required) A password for the authorization. Created and stored in AWS Secrets Manager. + +`oauth` support the following: + +* `authorizationEndpoint` - (Required) The URL to the authorization endpoint. +* `httpMethod` - (Required) A password for the authorization. Created and stored in AWS Secrets Manager. +* `clientParameters` - (Required) Contains the client parameters for OAuth authorization. Contains the following two parameters. + * `clientId` - (Required) The client ID for the credentials to use for authorization. Created and stored in AWS Secrets Manager. + * `clientSecret` - (Required) The client secret for the credentials to use for authorization. Created and stored in AWS Secrets Manager. +* `oauthHttpParameters` - (Required) OAuth Http Parameters are additional credentials used to sign the request to the authorization endpoint to exchange the OAuth Client information for an access token. Secret values are stored and managed by AWS Secrets Manager. A maximum of 1 are allowed. Documented below. + +`invocationHttpParameters` and `oauthHttpParameters` support the following: + +* `body` - (Optional) Contains additional body string parameters for the connection. You can include up to 100 additional body string parameters per request. Each additional parameter counts towards the event payload size, which cannot exceed 64 KB. Each parameter can contain the following: + * `key` - (Required) The key for the parameter. + * `value` - (Required) The value associated with the key. Created and stored in AWS Secrets Manager if is secret. + * `isValueSecret` - (Optional) Specified whether the value is secret. + +* `header` - (Optional) Contains additional header parameters for the connection. You can include up to 100 additional body string parameters per request. Each additional parameter counts towards the event payload size, which cannot exceed 64 KB. Each parameter can contain the following: + * `key` - (Required) The key for the parameter. + * `value` - (Required) The value associated with the key. Created and stored in AWS Secrets Manager if is secret. + * `isValueSecret` - (Optional) Specified whether the value is secret. + +* `queryString` - (Optional) Contains additional query string parameters for the connection. You can include up to 100 additional body string parameters per request. Each additional parameter counts towards the event payload size, which cannot exceed 64 KB. Each parameter can contain the following: + * `key` - (Required) The key for the parameter. + * `value` - (Required) The value associated with the key. Created and stored in AWS Secrets Manager if is secret. + * `isValueSecret` - (Optional) Specified whether the value is secret. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the connection. +* `secretArn` - The Amazon Resource Name (ARN) of the secret created from the authorization parameters specified for the connection. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge connection using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EventBridge EventBridge connection using the `name`. For example: + +```console +% terraform import aws_cloudwatch_event_connection.test ngrok-connection +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_event_endpoint.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_event_endpoint.html.markdown new file mode 100644 index 00000000000..0d4112d58f2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_event_endpoint.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_endpoint" +description: |- + Provides a resource to create an EventBridge Global Endpoint. +--- + + + +# Resource: aws_cloudwatch_event_endpoint + +Provides a resource to create an EventBridge Global Endpoint. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventEndpoint } from "./.gen/providers/aws/cloudwatch-event-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchEventEndpoint(this, "this", { + eventBus: [ + { + eventBusArn: primary.arn, + }, + { + eventBusArn: secondary.arn, + }, + ], + name: "global-endpoint", + replicationConfig: { + state: "DISABLED", + }, + roleArn: replication.arn, + routingConfig: { + failoverConfig: { + primary: { + healthCheck: Token.asString(awsRoute53HealthCheckPrimary.arn), + }, + secondary: { + route: "us-east-2", + }, + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) A description of the global endpoint. +* `eventBus` - (Required) The event buses to use. The names of the event buses must be identical in each Region. Exactly two event buses are required. Documented below. +* `name` - (Required) The name of the global endpoint. +* `replicationConfig` - (Optional) Parameters used for replication. Documented below. +* `roleArn` - (Optional) The ARN of the IAM role used for replication between event buses. +* `routingConfig` - (Required) Parameters used for routing, including the health check and secondary Region. Documented below. + +`eventBus` supports the following: + +* `eventBusArn` - (Required) The ARN of the event bus the endpoint is associated with. + +`replicationConfig` supports the following: + +* `state` - (Optional) The state of event replication. Valid values: `enabled`, `disabled`. The default state is `enabled`, which means you must supply a `roleArn`. If you don't have a `roleArn` or you don't want event replication enabled, set `state` to `disabled`. + +`routingConfig` support the following: + +* `failoverConfig` - (Required) Parameters used for failover. This includes what triggers failover and what happens when it's triggered. Documented below. + +`failoverConfig` support the following: + +* `primary` - (Required) Parameters used for the primary Region. Documented below. +* `secondary` - (Required) Parameters used for the secondary Region, the Region that events are routed to when failover is triggered or event replication is enabled. Documented below. + +`primary` support the following: + +* `healthCheck` - (Required) The ARN of the health check used by the endpoint to determine whether failover is triggered. + +`secondary` support the following: + +* `route` - (Required) The name of the secondary Region. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the endpoint that was created. +* `endpointUrl` - The URL of the endpoint that was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge Global Endpoints using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EventBridge Global Endpoints using the `name`. For example: + +```console +% terraform import aws_cloudwatch_event_endpoint.imported_endpoint example-endpoint +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_event_permission.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_event_permission.html.markdown new file mode 100644 index 00000000000..6a31d39a356 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_event_permission.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_permission" +description: |- + Provides a resource to create an EventBridge permission to support cross-account events in the current account default event bus. +--- + + + +# Resource: aws_cloudwatch_event_permission + +Provides a resource to create an EventBridge permission to support cross-account events in the current account default event bus. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +~> **Note:** The EventBridge bus policy resource (`awsCloudwatchEventBusPolicy`) is incompatible with the EventBridge permission resource (`awsCloudwatchEventPermission`) and will overwrite permissions. + +## Example Usage + +### Account Access + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventPermission } from "./.gen/providers/aws/cloudwatch-event-permission"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchEventPermission(this, "DevAccountAccess", { + principal: "123456789012", + statementId: "DevAccountAccess", + }); + } +} + +``` + +### Organization Access + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventPermission } from "./.gen/providers/aws/cloudwatch-event-permission"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchEventPermission(this, "OrganizationAccess", { + condition: { + key: "aws:PrincipalOrgID", + type: "StringEquals", + value: example.id, + }, + principal: "*", + statementId: "OrganizationAccess", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `principal` - (Required) The 12-digit AWS account ID that you are permitting to put events to your default event bus. Specify `*` to permit any account to put events to your default event bus, optionally limited by `condition`. +* `statementId` - (Required) An identifier string for the external account that you are granting permissions to. +* `action` - (Optional) The action that you are enabling the other account to perform. Defaults to `events:putEvents`. +* `condition` - (Optional) Configuration block to limit the event bus permissions you are granting to only accounts that fulfill the condition. Specified below. +* `eventBusName` - (Optional) The name of the event bus to set the permissions on. + If you omit this, the permissions are set on the `default` event bus. + +### condition + +* `key` - (Required) Key for the condition. Valid values: `aws:principalOrgId`. +* `type` - (Required) Type of condition. Value values: `stringEquals`. +* `value` - (Required) Value for the key. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The statement ID of the EventBridge permission. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge permissions using the `eventBusName/statementId` (if you omit `eventBusName`, the `default` event bus will be used). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EventBridge permissions using the `eventBusName/statementId` (if you omit `eventBusName`, the `default` event bus will be used). For example: + +```console +% terraform import aws_cloudwatch_event_permission.DevAccountAccess example-event-bus/DevAccountAccess +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_event_rule.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_event_rule.html.markdown new file mode 100644 index 00000000000..f6062c247d5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_event_rule.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_rule" +description: |- + Provides an EventBridge Rule resource. +--- + + + +# Resource: aws_cloudwatch_event_rule + +Provides an EventBridge Rule resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```terraform +resource "aws_cloudwatch_event_rule" "console" { + name = "capture-aws-sign-in" + description = "Capture each AWS Console Sign In" + + event_pattern = jsonencode({ + detail-type = [ + "AWS Console Sign In via CloudTrail" + ] + }) +} + +resource "aws_cloudwatch_event_target" "sns" { + rule = aws_cloudwatch_event_rule.console.name + target_id = "SendToSNS" + arn = aws_sns_topic.aws_logins.arn +} + +resource "aws_sns_topic" "aws_logins" { + name = "aws-console-logins" +} + +resource "aws_sns_topic_policy" "default" { + arn = aws_sns_topic.aws_logins.arn + policy = data.aws_iam_policy_document.sns_topic_policy.json +} + +data "aws_iam_policy_document" "sns_topic_policy" { + statement { + effect = "Allow" + actions = ["SNS:Publish"] + + principals { + type = "Service" + identifiers = ["events.amazonaws.com"] + } + + resources = [aws_sns_topic.aws_logins.arn] + } +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the rule. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `scheduleExpression` - (Optional) The scheduling expression. For example, `cron(0 20 * * ? *)` or `rate(5 minutes)`. At least one of `scheduleExpression` or `eventPattern` is required. Can only be used on the default event bus. For more information, refer to the AWS documentation [Schedule Expressions for Rules](https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html). +* `eventBusName` - (Optional) The name or ARN of the event bus to associate with this rule. + If you omit this, the `default` event bus is used. +* `eventPattern` - (Optional) The event pattern described a JSON object. At least one of `scheduleExpression` or `eventPattern` is required. See full documentation of [Events and Event Patterns in EventBridge](https://docs.aws.amazon.com/eventbridge/latest/userguide/eventbridge-and-event-patterns.html) for details. +* `description` - (Optional) The description of the rule. +* `roleArn` - (Optional) The Amazon Resource Name (ARN) associated with the role that is used for target invocation. +* `isEnabled` - (Optional) Whether the rule should be enabled (defaults to `true`). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the rule. +* `arn` - The Amazon Resource Name (ARN) of the rule. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge Rules using the `eventBusName/ruleName` (if you omit `eventBusName`, the `default` event bus will be used). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EventBridge Rules using the `eventBusName/ruleName` (if you omit `eventBusName`, the `default` event bus will be used). For example: + +```console +% terraform import aws_cloudwatch_event_rule.console example-event-bus/capture-console-sign-in +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_event_target.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_event_target.html.markdown new file mode 100644 index 00000000000..bc517d30434 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_event_target.html.markdown @@ -0,0 +1,824 @@ +--- +subcategory: "EventBridge" +layout: "aws" +page_title: "AWS: aws_cloudwatch_event_target" +description: |- + Provides an EventBridge Target resource. +--- + + + +# Resource: aws_cloudwatch_event_target + +Provides an EventBridge Target resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +### Kinesis Usage + +```terraform +resource "aws_cloudwatch_event_target" "yada" { + target_id = "Yada" + rule = aws_cloudwatch_event_rule.console.name + arn = aws_kinesis_stream.test_stream.arn + + run_command_targets { + key = "tag:Name" + values = ["FooBar"] + } + + run_command_targets { + key = "InstanceIds" + values = ["i-162058cd308bffec2"] + } +} + +resource "aws_cloudwatch_event_rule" "console" { + name = "capture-ec2-scaling-events" + description = "Capture all EC2 scaling events" + + event_pattern = jsonencode({ + source = [ + "aws.autoscaling" + ] + + detail-type = [ + "EC2 Instance Launch Successful", + "EC2 Instance Terminate Successful", + "EC2 Instance Launch Unsuccessful", + "EC2 Instance Terminate Unsuccessful" + ] + }) +} + +resource "aws_kinesis_stream" "test_stream" { + name = "terraform-kinesis-test" + shard_count = 1 +} +``` + +### SSM Document Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventRule } from "./.gen/providers/aws/cloudwatch-event-rule"; +import { CloudwatchEventTarget } from "./.gen/providers/aws/cloudwatch-event-target"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +import { SsmDocument } from "./.gen/providers/aws/ssm-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const stopInstances = new CloudwatchEventRule(this, "stop_instances", { + description: "Stop instances nightly", + name: "StopInstance", + scheduleExpression: "cron(0 0 * * ? *)", + }); + const stopInstance = new SsmDocument(this, "stop_instance", { + content: Token.asString( + Fn.jsonencode({ + description: "Stop an instance", + parameters: {}, + runtimeConfig: { + "aws:runShellScript": { + properties: [ + { + id: "0.aws:runShellScript", + runCommand: ["halt"], + }, + ], + }, + }, + schemaVersion: "1.2", + }) + ), + documentType: "Command", + name: "stop_instance", + }); + const ssmLifecycle = new DataAwsIamPolicyDocument(this, "ssm_lifecycle", { + statement: [ + { + actions: ["ssm:SendCommand"], + condition: [ + { + test: "StringEquals", + values: ["*"], + variable: "ec2:ResourceTag/Terminate", + }, + ], + effect: "Allow", + resources: ["arn:aws:ec2:eu-west-1:1234567890:instance/*"], + }, + { + actions: ["ssm:SendCommand"], + effect: "Allow", + resources: [stopInstance.arn], + }, + ], + }); + const ssmLifecycleTrust = new DataAwsIamPolicyDocument( + this, + "ssm_lifecycle_trust", + { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["events.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const awsIamPolicySsmLifecycle = new IamPolicy(this, "ssm_lifecycle_4", { + name: "SSMLifecycle", + policy: Token.asString(ssmLifecycle.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamPolicySsmLifecycle.overrideLogicalId("ssm_lifecycle"); + const awsIamRoleSsmLifecycle = new IamRole(this, "ssm_lifecycle_5", { + assumeRolePolicy: Token.asString(ssmLifecycleTrust.json), + name: "SSMLifecycle", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleSsmLifecycle.overrideLogicalId("ssm_lifecycle"); + const awsIamRolePolicyAttachmentSsmLifecycle = new IamRolePolicyAttachment( + this, + "ssm_lifecycle_6", + { + policyArn: Token.asString(awsIamPolicySsmLifecycle.arn), + role: Token.asString(awsIamRoleSsmLifecycle.name), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentSsmLifecycle.overrideLogicalId("ssm_lifecycle"); + const awsCloudwatchEventTargetStopInstances = new CloudwatchEventTarget( + this, + "stop_instances_7", + { + arn: stopInstance.arn, + roleArn: Token.asString(awsIamRoleSsmLifecycle.arn), + rule: stopInstances.name, + runCommandTargets: [ + { + key: "tag:Terminate", + values: ["midnight"], + }, + ], + targetId: "StopInstance", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventTargetStopInstances.overrideLogicalId("stop_instances"); + } +} + +``` + +### RunCommand Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventRule } from "./.gen/providers/aws/cloudwatch-event-rule"; +import { CloudwatchEventTarget } from "./.gen/providers/aws/cloudwatch-event-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const stopInstances = new CloudwatchEventRule(this, "stop_instances", { + description: "Stop instances nightly", + name: "StopInstance", + scheduleExpression: "cron(0 0 * * ? *)", + }); + const awsCloudwatchEventTargetStopInstances = new CloudwatchEventTarget( + this, + "stop_instances_1", + { + arn: + "arn:aws:ssm:${" + awsRegion.value + "}::document/AWS-RunShellScript", + input: '{\\"commands\\":[\\"halt\\"]}', + roleArn: ssmLifecycle.arn, + rule: stopInstances.name, + runCommandTargets: [ + { + key: "tag:Terminate", + values: ["midnight"], + }, + ], + targetId: "StopInstance", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventTargetStopInstances.overrideLogicalId("stop_instances"); + } +} + +``` + +### ECS Run Task with Role and Task Override Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventTarget } from "./.gen/providers/aws/cloudwatch-event-target"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["events.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const ecsEventsRunTaskWithAnyRole = new DataAwsIamPolicyDocument( + this, + "ecs_events_run_task_with_any_role", + { + statement: [ + { + actions: ["iam:PassRole"], + effect: "Allow", + resources: ["*"], + }, + { + actions: ["ecs:RunTask"], + effect: "Allow", + resources: [ + Token.asString(Fn.replace(taskName.arn, "/:\\\\d+$/", ":*")), + ], + }, + ], + } + ); + const ecsEvents = new IamRole(this, "ecs_events", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "ecs_events", + }); + const awsIamRolePolicyEcsEventsRunTaskWithAnyRole = new IamRolePolicy( + this, + "ecs_events_run_task_with_any_role_3", + { + name: "ecs_events_run_task_with_any_role", + policy: Token.asString(ecsEventsRunTaskWithAnyRole.json), + role: ecsEvents.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyEcsEventsRunTaskWithAnyRole.overrideLogicalId( + "ecs_events_run_task_with_any_role" + ); + new CloudwatchEventTarget(this, "ecs_scheduled_task", { + arn: clusterName.arn, + ecsTarget: { + taskCount: 1, + taskDefinitionArn: taskName.arn, + }, + input: Token.asString( + Fn.jsonencode({ + containerOverrides: [ + { + command: ["bin/console", "scheduled-task"], + name: "name-of-container-to-override", + }, + ], + }) + ), + roleArn: ecsEvents.arn, + rule: everyHour.name, + targetId: "run-scheduled-task-every-hour", + }); + } +} + +``` + +### API Gateway target + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ApiGatewayDeployment } from "./.gen/providers/aws/api-gateway-deployment"; +import { ApiGatewayStage } from "./.gen/providers/aws/api-gateway-stage"; +import { CloudwatchEventRule } from "./.gen/providers/aws/cloudwatch-event-rule"; +import { CloudwatchEventTarget } from "./.gen/providers/aws/cloudwatch-event-target"; +interface MyConfig { + stageName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new ApiGatewayDeployment(this, "example", { + restApiId: Token.asString(awsApiGatewayRestApiExample.id), + }); + const awsApiGatewayStageExample = new ApiGatewayStage(this, "example_1", { + deploymentId: example.id, + restApiId: Token.asString(awsApiGatewayRestApiExample.id), + stageName: config.stageName, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsApiGatewayStageExample.overrideLogicalId("example"); + const awsCloudwatchEventRuleExample = new CloudwatchEventRule( + this, + "example_2", + {} + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventRuleExample.overrideLogicalId("example"); + const awsCloudwatchEventTargetExample = new CloudwatchEventTarget( + this, + "example_3", + { + arn: "${" + awsApiGatewayStageExample.executionArn + "}/GET", + httpTarget: { + headerParameters: { + Env: "Test", + }, + queryStringParameters: { + Body: "$.detail.body", + }, + }, + rule: Token.asString(awsCloudwatchEventRuleExample.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventTargetExample.overrideLogicalId("example"); + } +} + +``` + +### Cross-Account Event Bus target + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventRule } from "./.gen/providers/aws/cloudwatch-event-rule"; +import { CloudwatchEventTarget } from "./.gen/providers/aws/cloudwatch-event-target"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const stopInstances = new CloudwatchEventRule(this, "stop_instances", { + description: "Stop instances nightly", + name: "StopInstance", + scheduleExpression: "cron(0 0 * * ? *)", + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["events.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const eventBusInvokeRemoteEventBus = new DataAwsIamPolicyDocument( + this, + "event_bus_invoke_remote_event_bus", + { + statement: [ + { + actions: ["events:PutEvents"], + effect: "Allow", + resources: [ + "arn:aws:events:eu-west-1:1234567890:event-bus/My-Event-Bus", + ], + }, + ], + } + ); + const awsIamPolicyEventBusInvokeRemoteEventBus = new IamPolicy( + this, + "event_bus_invoke_remote_event_bus_3", + { + name: "event_bus_invoke_remote_event_bus", + policy: Token.asString(eventBusInvokeRemoteEventBus.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamPolicyEventBusInvokeRemoteEventBus.overrideLogicalId( + "event_bus_invoke_remote_event_bus" + ); + const awsIamRoleEventBusInvokeRemoteEventBus = new IamRole( + this, + "event_bus_invoke_remote_event_bus_4", + { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "event-bus-invoke-remote-event-bus", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleEventBusInvokeRemoteEventBus.overrideLogicalId( + "event_bus_invoke_remote_event_bus" + ); + const awsIamRolePolicyAttachmentEventBusInvokeRemoteEventBus = + new IamRolePolicyAttachment(this, "event_bus_invoke_remote_event_bus_5", { + policyArn: Token.asString(awsIamPolicyEventBusInvokeRemoteEventBus.arn), + role: Token.asString(awsIamRoleEventBusInvokeRemoteEventBus.name), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentEventBusInvokeRemoteEventBus.overrideLogicalId( + "event_bus_invoke_remote_event_bus" + ); + const awsCloudwatchEventTargetStopInstances = new CloudwatchEventTarget( + this, + "stop_instances_6", + { + arn: "arn:aws:events:eu-west-1:1234567890:event-bus/My-Event-Bus", + roleArn: Token.asString(awsIamRoleEventBusInvokeRemoteEventBus.arn), + rule: stopInstances.name, + targetId: "StopInstance", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventTargetStopInstances.overrideLogicalId("stop_instances"); + } +} + +``` + +### Input Transformer Usage - JSON Object + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventRule } from "./.gen/providers/aws/cloudwatch-event-rule"; +import { CloudwatchEventTarget } from "./.gen/providers/aws/cloudwatch-event-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchEventRule(this, "example", {}); + const awsCloudwatchEventTargetExample = new CloudwatchEventTarget( + this, + "example_1", + { + arn: Token.asString(awsLambdaFunctionExample.arn), + inputTransformer: { + inputPaths: { + instance: "$.detail.instance", + status: "$.detail.status", + }, + inputTemplate: + '{\n "instance_id": ,\n "instance_status": \n}\n\n', + }, + rule: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventTargetExample.overrideLogicalId("example"); + } +} + +``` + +### Input Transformer Usage - Simple String + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventRule } from "./.gen/providers/aws/cloudwatch-event-rule"; +import { CloudwatchEventTarget } from "./.gen/providers/aws/cloudwatch-event-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchEventRule(this, "example", {}); + const awsCloudwatchEventTargetExample = new CloudwatchEventTarget( + this, + "example_1", + { + arn: Token.asString(awsLambdaFunctionExample.arn), + inputTransformer: { + inputPaths: { + instance: "$.detail.instance", + status: "$.detail.status", + }, + inputTemplate: '\\" is in state \\"', + }, + rule: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventTargetExample.overrideLogicalId("example"); + } +} + +``` + +### Cloudwatch Log Group Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventRule } from "./.gen/providers/aws/cloudwatch-event-rule"; +import { CloudwatchEventTarget } from "./.gen/providers/aws/cloudwatch-event-target"; +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { CloudwatchLogResourcePolicy } from "./.gen/providers/aws/cloudwatch-log-resource-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchEventRule(this, "example", { + description: "GuardDuty Findings", + eventPattern: Token.asString( + Fn.jsonencode({ + source: ["aws.guardduty"], + }) + ), + name: "guard-duty_event_rule", + tags: { + Environment: "example", + }, + }); + const awsCloudwatchLogGroupExample = new CloudwatchLogGroup( + this, + "example_1", + { + name: "/aws/events/guardduty/logs", + retentionInDays: 1, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogGroupExample.overrideLogicalId("example"); + const exampleLogPolicy = new DataAwsIamPolicyDocument( + this, + "example_log_policy", + { + statement: [ + { + actions: ["logs:CreateLogStream"], + effect: "Allow", + principals: [ + { + identifiers: [ + "events.amazonaws.com", + "delivery.logs.amazonaws.com", + ], + type: "Service", + }, + ], + resources: ["${" + awsCloudwatchLogGroupExample.arn + "}:*"], + }, + { + actions: ["logs:PutLogEvents"], + condition: [ + { + test: "ArnEquals", + values: [example.arn], + variable: "aws:SourceArn", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: [ + "events.amazonaws.com", + "delivery.logs.amazonaws.com", + ], + type: "Service", + }, + ], + resources: ["${" + awsCloudwatchLogGroupExample.arn + "}:*:*"], + }, + ], + } + ); + const awsCloudwatchEventTargetExample = new CloudwatchEventTarget( + this, + "example_3", + { + arn: Token.asString(awsCloudwatchLogGroupExample.arn), + rule: example.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchEventTargetExample.overrideLogicalId("example"); + const awsCloudwatchLogResourcePolicyExample = + new CloudwatchLogResourcePolicy(this, "example_4", { + policyDocument: Token.asString(exampleLogPolicy.json), + policyName: "guardduty-log-publishing-policy", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogResourcePolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +-> **Note:** In order to be able to have your AWS Lambda function or + SNS topic invoked by an EventBridge rule, you must set up the right permissions + using [`awsLambdaPermission`](/docs/providers/aws/r/lambda_permission.html) + or [`awsSnsTopicPolicy`](/docs/providers/aws/r/sns_topic.html#policy). + More info [here](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-use-resource-based.html). + +The following arguments are required: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the target. +* `rule` - (Required) The name of the rule you want to add targets to. + +The following arguments are optional: + +* `batchTarget` - (Optional) Parameters used when you are using the rule to invoke an Amazon Batch Job. Documented below. A maximum of 1 are allowed. +* `deadLetterConfig` - (Optional) Parameters used when you are providing a dead letter config. Documented below. A maximum of 1 are allowed. +* `ecsTarget` - (Optional) Parameters used when you are using the rule to invoke Amazon ECS Task. Documented below. A maximum of 1 are allowed. +* `eventBusName` - (Optional) The name or ARN of the event bus to associate with the rule. + If you omit this, the `default` event bus is used. +* `httpTarget` - (Optional) Parameters used when you are using the rule to invoke an API Gateway REST endpoint. Documented below. A maximum of 1 is allowed. +* `input` - (Optional) Valid JSON text passed to the target. Conflicts with `inputPath` and `inputTransformer`. +* `inputPath` - (Optional) The value of the [JSONPath](http://goessner.net/articles/JsonPath/) that is used for extracting part of the matched event when passing it to the target. Conflicts with `input` and `inputTransformer`. +* `inputTransformer` - (Optional) Parameters used when you are providing a custom input to a target based on certain event data. Conflicts with `input` and `inputPath`. +* `kinesisTarget` - (Optional) Parameters used when you are using the rule to invoke an Amazon Kinesis Stream. Documented below. A maximum of 1 are allowed. +* `roleArn` - (Optional) The Amazon Resource Name (ARN) of the IAM role to be used for this target when the rule is triggered. Required if `ecsTarget` is used or target in `arn` is EC2 instance, Kinesis data stream, Step Functions state machine, or Event Bus in different account or region. +* `runCommandTargets` - (Optional) Parameters used when you are using the rule to invoke Amazon EC2 Run Command. Documented below. A maximum of 5 are allowed. +* `redshiftTarget` - (Optional) Parameters used when you are using the rule to invoke an Amazon Redshift Statement. Documented below. A maximum of 1 are allowed. +* `retryPolicy` - (Optional) Parameters used when you are providing retry policies. Documented below. A maximum of 1 are allowed. +* `sqsTarget` - (Optional) Parameters used when you are using the rule to invoke an Amazon SQS Queue. Documented below. A maximum of 1 are allowed. +* `targetId` - (Optional) The unique target assignment ID. If missing, will generate a random, unique id. + +### batch_target + +* `jobDefinition` - (Required) The ARN or name of the job definition to use if the event target is an AWS Batch job. This job definition must already exist. +* `jobName` - (Required) The name to use for this execution of the job, if the target is an AWS Batch job. +* `arraySize` - (Optional) The size of the array, if this is an array batch job. Valid values are integers between 2 and 10,000. +* `jobAttempts` - (Optional) The number of times to attempt to retry, if the job fails. Valid values are 1 to 10. + +### capacity_provider_strategy + +* `capacityProvider` - (Required) Short name of the capacity provider. +* `weight` - (Required) The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. +* `base` - (Optional) The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. Defaults to `0`. + +### dead_letter_config + +* `arn` - (Optional) - ARN of the SQS queue specified as the target for the dead-letter queue. + +### ecs_target + +* `taskDefinitionArn` - (Required) The ARN of the task definition to use if the event target is an Amazon ECS cluster. +* `capacityProviderStrategy` - (Optional) The capacity provider strategy to use for the task. If a `capacityProviderStrategy` specified, the `launchType` parameter must be omitted. If no `capacityProviderStrategy` or `launchType` is specified, the default capacity provider strategy for the cluster is used. Can be one or more. See below. +* `enableEcsManagedTags` - (Optional) Specifies whether to enable Amazon ECS managed tags for the task. +* `enableExecuteCommand` - (Optional) Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. +* `group` - (Optional) Specifies an ECS task group for the task. The maximum length is 255 characters. +* `launchType` - (Optional) Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. Valid values include: `ec2`, `external`, or `fargate`. +* `networkConfiguration` - (Optional) Use this if the ECS task uses the awsvpc network mode. This specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. Required if `launchType` is `fargate` because the awsvpc mode is required for Fargate tasks. +* `orderedPlacementStrategy` - (Optional) An array of placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. +* `placementConstraint` - (Optional) An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). See Below. +* `platformVersion` - (Optional) Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as `110`. This is used only if LaunchType is FARGATE. For more information about valid platform versions, see [AWS Fargate Platform Versions](http://docs.aws.amazon.com/AmazonECS/latest/developerguide/platform_versions.html). +* `propagateTags` - (Optional) Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. The only valid value is: `taskDefinition`. +* `taskCount` - (Optional) The number of tasks to create based on the TaskDefinition. Defaults to `1`. +* `tags` - (Optional) A map of tags to assign to ecs resources. + +### http_target + +* `headerParameters` - (Optional) Enables you to specify HTTP headers to add to the request. +* `pathParameterValues` - (Optional) The list of values that correspond sequentially to any path variables in your endpoint ARN (for example `arn:aws:executeApi:usEast1:123456:myapi/*/post/pets/*`). +* `queryStringParameters` - (Optional) Represents keys/values of query string parameters that are appended to the invoked endpoint. + +### input_transformer + +* `inputTemplate` - (Required) Template to customize data sent to the target. Must be valid JSON. To send a string value, the string value must include double quotes. Values must be escaped for both JSON and Terraform, e.g., `"\"Your string goes here.\\nA new line.\""` +* `inputPaths` - (Optional) Key value pairs specified in the form of JSONPath (for example, time = $.time) + * You can have as many as 100 key-value pairs. + * You must use JSON dot notation, not bracket notation. + * The keys can't start with "AWS". + +### kinesis_target + +* `partitionKeyPath` - (Optional) The JSON path to be extracted from the event and used as the partition key. + +### network_configuration + +* `subnets` - (Required) The subnets associated with the task or service. +* `securityGroups` - (Optional) The security groups associated with the task or service. If you do not specify a security group, the default security group for the VPC is used. +* `assignPublicIp` - (Optional) Assign a public IP address to the ENI (Fargate launch type only). Valid values are `true` or `false`. Defaults to `false`. + +For more information, see [Task Networking](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-networking.html) + +### ordered_placement_strategy + +* `type` - (Required) Type of placement strategy. The only valid values at this time are `binpack`, `random` and `spread`. +* `field` - (Optional) The field to apply the placement strategy against. For the `spread` placement strategy, valid values are `instanceId` (or `host`, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as `attribute:ecsAvailabilityZone`. For the `binpack` placement strategy, valid values are `cpu` and `memory`. For the `random` placement strategy, this field is not used. For more information, see [Amazon ECS task placement strategies](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-placement-strategies.html). + +### placement_constraint + +* `type` - (Required) Type of constraint. The only valid values at this time are `memberOf` and `distinctInstance`. +* `expression` - (Optional) Cluster Query Language expression to apply to the constraint. Does not need to be specified for the `distinctInstance` type. For more information, see [Cluster Query Language in the Amazon EC2 Container Service Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html). + +### redshift_target + +* `database` - (Required) The name of the database. +* `dbUser` - (Optional) The database user name. +* `secretsManagerArn` - (Optional) The name or ARN of the secret that enables access to the database. +* `sql` - (Optional) The SQL statement text to run. +* `statementName` - (Optional) The name of the SQL statement. +* `withEvent` - (Optional) Indicates whether to send an event back to EventBridge after the SQL statement runs. + +### retry_policy + +* `maximumEventAgeInSeconds` - (Optional) The age in seconds to continue to make retry attempts. +* `maximumRetryAttempts` - (Optional) maximum number of retry attempts to make before the request fails + +### run_command_targets + +* `key` - (Required) Can be either `tag:tagKey` or `instanceIds`. +* `values` - (Required) If Key is `tag:tagKey`, Values is a list of tag values. If Key is `instanceIds`, Values is a list of Amazon EC2 instance IDs. + +### sqs_target + +* `messageGroupId` - (Optional) The FIFO message group ID to use as the target. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge Targets using `eventBusName/ruleName/targetId` (if you omit `eventBusName`, the `default` event bus will be used). For example: + + ```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EventBridge Targets using `eventBusName/ruleName/targetId` (if you omit `eventBusName`, the `default` event bus will be used). For example: + + ```console +% terraform import aws_cloudwatch_event_target.test-event-target rule-name/target-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_log_data_protection_policy.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_log_data_protection_policy.html.markdown new file mode 100644 index 00000000000..3f525656256 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_log_data_protection_policy.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_data_protection_policy" +description: |- + Provides a CloudWatch Log Data Protection Policy resource. +--- + + + +# Resource: aws_cloudwatch_log_data_protection_policy + +Provides a CloudWatch Log Data Protection Policy resource. + +Read more about protecting sensitive user data in the [User Guide](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/mask-sensitive-log-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogDataProtectionPolicy } from "./.gen/providers/aws/cloudwatch-log-data-protection-policy"; +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: "example", + }); + const awsS3BucketExample = new S3Bucket(this, "example_1", { + bucket: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketExample.overrideLogicalId("example"); + const awsCloudwatchLogDataProtectionPolicyExample = + new CloudwatchLogDataProtectionPolicy(this, "example_2", { + logGroupName: example.name, + policyDocument: Token.asString( + Fn.jsonencode({ + Name: "Example", + Statement: [ + { + DataIdentifier: [ + "arn:aws:dataprotection::aws:data-identifier/EmailAddress", + ], + Operation: { + Audit: { + FindingsDestination: { + S3: { + Bucket: awsS3BucketExample.bucket, + }, + }, + }, + }, + Sid: "Audit", + }, + { + DataIdentifier: [ + "arn:aws:dataprotection::aws:data-identifier/EmailAddress", + ], + Operation: { + Deidentify: { + MaskConfig: {}, + }, + }, + Sid: "Redact", + }, + ], + Version: "2021-06-01", + }) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogDataProtectionPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `logGroupName` - (Required) The name of the log group under which the log stream is to be created. +* `policyDocument` - (Required) Specifies the data protection policy in JSON. Read more at [Data protection policy syntax](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/mask-sensitive-log-data-start.html#mask-sensitive-log-data-policysyntax). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import this resource using the `logGroupName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import this resource using the `logGroupName`. For example: + +```console +% terraform import aws_cloudwatch_log_data_protection_policy.example my-log-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_log_destination.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_log_destination.html.markdown new file mode 100644 index 00000000000..7fc36b128da --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_log_destination.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_destination" +description: |- + Provides a CloudWatch Logs destination. +--- + + + +# Resource: aws_cloudwatch_log_destination + +Provides a CloudWatch Logs destination resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogDestination } from "./.gen/providers/aws/cloudwatch-log-destination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchLogDestination(this, "test_destination", { + name: "test_destination", + roleArn: iamForCloudwatch.arn, + targetArn: kinesisForCloudwatch.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name for the log destination. +* `roleArn` - (Required) The ARN of an IAM role that grants Amazon CloudWatch Logs permissions to put data into the target. +* `targetArn` - (Required) The ARN of the target Amazon Kinesis stream resource for the destination. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the log destination. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Logs destinations using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Logs destinations using the `name`. For example: + +```console +% terraform import aws_cloudwatch_log_destination.test_destination test_destination +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_log_destination_policy.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_log_destination_policy.html.markdown new file mode 100644 index 00000000000..deda1f1d703 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_log_destination_policy.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_destination_policy" +description: |- + Provides a CloudWatch Logs destination policy. +--- + + + +# Resource: aws_cloudwatch_log_destination_policy + +Provides a CloudWatch Logs destination policy resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogDestination } from "./.gen/providers/aws/cloudwatch-log-destination"; +import { CloudwatchLogDestinationPolicy } from "./.gen/providers/aws/cloudwatch-log-destination-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testDestination = new CloudwatchLogDestination( + this, + "test_destination", + { + name: "test_destination", + roleArn: iamForCloudwatch.arn, + targetArn: kinesisForCloudwatch.arn, + } + ); + const testDestinationPolicy = new DataAwsIamPolicyDocument( + this, + "test_destination_policy", + { + statement: [ + { + actions: ["logs:PutSubscriptionFilter"], + effect: "Allow", + principals: [ + { + identifiers: ["123456789012"], + type: "AWS", + }, + ], + resources: [testDestination.arn], + }, + ], + } + ); + const awsCloudwatchLogDestinationPolicyTestDestinationPolicy = + new CloudwatchLogDestinationPolicy(this, "test_destination_policy_2", { + accessPolicy: Token.asString(testDestinationPolicy.json), + destinationName: testDestination.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogDestinationPolicyTestDestinationPolicy.overrideLogicalId( + "test_destination_policy" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `destinationName` - (Required) A name for the subscription filter +* `accessPolicy` - (Required) The policy document. This is a JSON formatted string. +* `forceUpdate` - (Optional) Specify true if you are updating an existing destination policy to grant permission to an organization ID instead of granting permission to individual AWS accounts. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Logs destination policies using the `destinationName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Logs destination policies using the `destinationName`. For example: + +```console +% terraform import aws_cloudwatch_log_destination_policy.test_destination_policy test_destination +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_log_group.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_log_group.html.markdown new file mode 100644 index 00000000000..9b9bd7691b3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_log_group.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_group" +description: |- + Provides a CloudWatch Log Group resource. +--- + + + +# Resource: aws_cloudwatch_log_group + +Provides a CloudWatch Log Group resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchLogGroup(this, "yada", { + name: "Yada", + tags: { + Application: "serviceA", + Environment: "production", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the log group. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `skipDestroy` - (Optional) Set to true if you do not wish the log group (and any logs it may contain) to be deleted at destroy time, and instead just remove the log group from the Terraform state. +* `retentionInDays` - (Optional) Specifies the number of days + you want to retain log events in the specified log group. Possible values are: 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, 1096, 1827, 2192, 2557, 2922, 3288, 3653, and 0. + If you select 0, the events in the log group are always retained and never expire. +* `kmsKeyId` - (Optional) The ARN of the KMS Key to use when encrypting log data. Please note, after the AWS KMS CMK is disassociated from the log group, +AWS CloudWatch Logs stops encrypting newly ingested data for the log group. All previously ingested data remains encrypted, and AWS CloudWatch Logs requires +permissions for the CMK whenever the encrypted data is requested. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the log group. Any `:*` suffix added by the API, denoting all CloudWatch Log Streams under the CloudWatch Log Group, is removed for greater compatibility with other AWS services that do not accept the suffix. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudwatch Log Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloudwatch Log Groups using the `name`. For example: + +```console +% terraform import aws_cloudwatch_log_group.test_group yada +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_log_metric_filter.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_log_metric_filter.html.markdown new file mode 100644 index 00000000000..30af69432de --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_log_metric_filter.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_metric_filter" +description: |- + Provides a CloudWatch Log Metric Filter resource. +--- + + + +# Resource: aws_cloudwatch_log_metric_filter + +Provides a CloudWatch Log Metric Filter resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { CloudwatchLogMetricFilter } from "./.gen/providers/aws/cloudwatch-log-metric-filter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const dada = new CloudwatchLogGroup(this, "dada", { + name: "MyApp/access.log", + }); + new CloudwatchLogMetricFilter(this, "yada", { + logGroupName: dada.name, + metricTransformation: { + name: "EventCount", + namespace: "YourNamespace", + value: "1", + }, + name: "MyAppAccessCount", + pattern: "", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name for the metric filter. +* `pattern` - (Required) A valid [CloudWatch Logs filter pattern](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/FilterAndPatternSyntax.html) + for extracting metric data out of ingested log events. +* `logGroupName` - (Required) The name of the log group to associate the metric filter with. +* `metricTransformation` - (Required) A block defining collection of information needed to define how metric data gets emitted. See below. + +The `metricTransformation` block supports the following arguments: + +* `name` - (Required) The name of the CloudWatch metric to which the monitored log information should be published (e.g., `errorCount`) +* `namespace` - (Required) The destination namespace of the CloudWatch metric. +* `value` - (Required) What to publish to the metric. For example, if you're counting the occurrences of a particular term like "Error", the value will be "1" for each occurrence. If you're counting the bytes transferred the published value will be the value in the log event. +* `defaultValue` - (Optional) The value to emit when a filter pattern does not match a log event. Conflicts with `dimensions`. +* `dimensions` - (Optional) Map of fields to use as dimensions for the metric. Up to 3 dimensions are allowed. Conflicts with `defaultValue`. +* `unit` - (Optional) The unit to assign to the metric. If you omit this, the unit is set as `none`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the metric filter. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Log Metric Filter using the `logGroupName:name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Log Metric Filter using the `logGroupName:name`. For example: + +```console +% terraform import aws_cloudwatch_log_metric_filter.test /aws/lambda/function:test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_log_resource_policy.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_log_resource_policy.html.markdown new file mode 100644 index 00000000000..5be80481755 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_log_resource_policy.html.markdown @@ -0,0 +1,154 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_resource_policy" +description: |- + Provides a resource to manage a CloudWatch log resource policy +--- + + + +# Resource: aws_cloudwatch_log_resource_policy + +Provides a resource to manage a CloudWatch log resource policy. + +## Example Usage + +### Elasticsearch Log Publishing + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogResourcePolicy } from "./.gen/providers/aws/cloudwatch-log-resource-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const elasticsearchLogPublishingPolicy = new DataAwsIamPolicyDocument( + this, + "elasticsearch-log-publishing-policy", + { + statement: [ + { + actions: [ + "logs:CreateLogStream", + "logs:PutLogEvents", + "logs:PutLogEventsBatch", + ], + principals: [ + { + identifiers: ["es.amazonaws.com"], + type: "Service", + }, + ], + resources: ["arn:aws:logs:*"], + }, + ], + } + ); + const awsCloudwatchLogResourcePolicyElasticsearchLogPublishingPolicy = + new CloudwatchLogResourcePolicy( + this, + "elasticsearch-log-publishing-policy_1", + { + policyDocument: Token.asString(elasticsearchLogPublishingPolicy.json), + policyName: "elasticsearch-log-publishing-policy", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogResourcePolicyElasticsearchLogPublishingPolicy.overrideLogicalId( + "elasticsearch-log-publishing-policy" + ); + } +} + +``` + +### Route53 Query Logging + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogResourcePolicy } from "./.gen/providers/aws/cloudwatch-log-resource-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const route53QueryLoggingPolicy = new DataAwsIamPolicyDocument( + this, + "route53-query-logging-policy", + { + statement: [ + { + actions: ["logs:CreateLogStream", "logs:PutLogEvents"], + principals: [ + { + identifiers: ["route53.amazonaws.com"], + type: "Service", + }, + ], + resources: ["arn:aws:logs:*:*:log-group:/aws/route53/*"], + }, + ], + } + ); + const awsCloudwatchLogResourcePolicyRoute53QueryLoggingPolicy = + new CloudwatchLogResourcePolicy(this, "route53-query-logging-policy_1", { + policyDocument: Token.asString(route53QueryLoggingPolicy.json), + policyName: "route53-query-logging-policy", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogResourcePolicyRoute53QueryLoggingPolicy.overrideLogicalId( + "route53-query-logging-policy" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policyDocument` - (Required) Details of the resource policy, including the identity of the principal that is enabled to put logs to this account. This is formatted as a JSON string. Maximum length of 5120 characters. +* `policyName` - (Required) Name of the resource policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the CloudWatch log resource policy + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch log resource policies using the policy name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch log resource policies using the policy name. For example: + +```console +% terraform import aws_cloudwatch_log_resource_policy.MyPolicy MyPolicy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_log_stream.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_log_stream.html.markdown new file mode 100644 index 00000000000..c337700b390 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_log_stream.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_stream" +description: |- + Provides a CloudWatch Log Stream resource. +--- + + + +# Resource: aws_cloudwatch_log_stream + +Provides a CloudWatch Log Stream resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { CloudwatchLogStream } from "./.gen/providers/aws/cloudwatch-log-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const yada = new CloudwatchLogGroup(this, "yada", { + name: "Yada", + }); + new CloudwatchLogStream(this, "foo", { + logGroupName: yada.name, + name: "SampleLogStream1234", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the log stream. Must not be longer than 512 characters and must not contain `:` +* `logGroupName` - (Required) The name of the log group under which the log stream is to be created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the log stream. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudwatch Log Stream using the stream's `logGroupName` and `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloudwatch Log Stream using the stream's `logGroupName` and `name`. For example: + +```console +% terraform import aws_cloudwatch_log_stream.foo Yada:SampleLogStream1234 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_log_subscription_filter.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_log_subscription_filter.html.markdown new file mode 100644 index 00000000000..aee81511138 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_log_subscription_filter.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_log_subscription_filter" +description: |- + Provides a CloudWatch Logs subscription filter. +--- + + + +# Resource: aws_cloudwatch_log_subscription_filter + +Provides a CloudWatch Logs subscription filter resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogSubscriptionFilter } from "./.gen/providers/aws/cloudwatch-log-subscription-filter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchLogSubscriptionFilter(this, "test_lambdafunction_logfilter", { + destinationArn: testLogstream.arn, + distribution: "Random", + filterPattern: "logtype test", + logGroupName: "/aws/lambda/example_lambda_name", + name: "test_lambdafunction_logfilter", + roleArn: iamForLambda.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name for the subscription filter +* `destinationArn` - (Required) The ARN of the destination to deliver matching log events to. Kinesis stream or Lambda function ARN. +* `filterPattern` - (Required) A valid CloudWatch Logs filter pattern for subscribing to a filtered stream of log events. Use empty string `""` to match everything. For more information, see the [Amazon CloudWatch Logs User Guide](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html). +* `logGroupName` - (Required) The name of the log group to associate the subscription filter with +* `roleArn` - (Optional) The ARN of an IAM role that grants Amazon CloudWatch Logs permissions to deliver ingested log events to the destination. If you use Lambda as a destination, you should skip this argument and use `awsLambdaPermission` resource for granting access from CloudWatch logs to the destination Lambda function. +* `distribution` - (Optional) The method used to distribute log data to the destination. By default log data is grouped by log stream, but the grouping can be set to random for a more even distribution. This property is only applicable when the destination is an Amazon Kinesis stream. Valid values are "Random" and "ByLogStream". + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Logs subscription filter using the log group name and subscription filter name separated by `|`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Logs subscription filter using the log group name and subscription filter name separated by `|`. For example: + +```console +% terraform import aws_cloudwatch_log_subscription_filter.test_lambdafunction_logfilter /aws/lambda/example_lambda_name|test_lambdafunction_logfilter +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_metric_alarm.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_metric_alarm.html.markdown new file mode 100644 index 00000000000..684f1d0e271 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_metric_alarm.html.markdown @@ -0,0 +1,341 @@ +--- +subcategory: "CloudWatch" +layout: "aws" +page_title: "AWS: aws_cloudwatch_metric_alarm" +description: |- + Provides a CloudWatch Metric Alarm resource. +--- + + + +# Resource: aws_cloudwatch_metric_alarm + +Provides a CloudWatch Metric Alarm resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchMetricAlarm } from "./.gen/providers/aws/cloudwatch-metric-alarm"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchMetricAlarm(this, "foobar", { + alarmDescription: "This metric monitors ec2 cpu utilization", + alarmName: "terraform-test-foobar5", + comparisonOperator: "GreaterThanOrEqualToThreshold", + evaluationPeriods: 2, + insufficientDataActions: [], + metricName: "CPUUtilization", + namespace: "AWS/EC2", + period: 120, + statistic: "Average", + threshold: 80, + }); + } +} + +``` + +## Example in Conjunction with Scaling Policies + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingPolicy } from "./.gen/providers/aws/autoscaling-policy"; +import { CloudwatchMetricAlarm } from "./.gen/providers/aws/cloudwatch-metric-alarm"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bat = new AutoscalingPolicy(this, "bat", { + adjustmentType: "ChangeInCapacity", + autoscalingGroupName: bar.name, + cooldown: 300, + name: "foobar3-terraform-test", + scalingAdjustment: 4, + }); + const awsCloudwatchMetricAlarmBat = new CloudwatchMetricAlarm( + this, + "bat_1", + { + alarmActions: [bat.arn], + alarmDescription: "This metric monitors ec2 cpu utilization", + alarmName: "terraform-test-foobar5", + comparisonOperator: "GreaterThanOrEqualToThreshold", + dimensions: { + AutoScalingGroupName: bar.name, + }, + evaluationPeriods: 2, + metricName: "CPUUtilization", + namespace: "AWS/EC2", + period: 120, + statistic: "Average", + threshold: 80, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchMetricAlarmBat.overrideLogicalId("bat"); + } +} + +``` + +## Example with an Expression + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchMetricAlarm } from "./.gen/providers/aws/cloudwatch-metric-alarm"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchMetricAlarm(this, "foobar", { + alarmDescription: "Request error rate has exceeded 10%", + alarmName: "terraform-test-foobar", + comparisonOperator: "GreaterThanOrEqualToThreshold", + evaluationPeriods: 2, + insufficientDataActions: [], + metricQuery: [ + { + expression: "m2/m1*100", + id: "e1", + label: "Error Rate", + returnData: Token.asBoolean("true"), + }, + { + id: "m1", + metric: { + dimensions: { + LoadBalancer: "app/web", + }, + metricName: "RequestCount", + namespace: "AWS/ApplicationELB", + period: 120, + stat: "Sum", + unit: "Count", + }, + }, + { + id: "m2", + metric: { + dimensions: { + LoadBalancer: "app/web", + }, + metricName: "HTTPCode_ELB_5XX_Count", + namespace: "AWS/ApplicationELB", + period: 120, + stat: "Sum", + unit: "Count", + }, + }, + ], + threshold: 10, + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchMetricAlarm } from "./.gen/providers/aws/cloudwatch-metric-alarm"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchMetricAlarm(this, "xx_anomaly_detection", { + alarmDescription: "This metric monitors ec2 cpu utilization", + alarmName: "terraform-test-foobar", + comparisonOperator: "GreaterThanUpperThreshold", + evaluationPeriods: 2, + insufficientDataActions: [], + metricQuery: [ + { + expression: "ANOMALY_DETECTION_BAND(m1)", + id: "e1", + label: "CPUUtilization (Expected)", + returnData: Token.asBoolean("true"), + }, + { + id: "m1", + metric: { + dimensions: { + InstanceId: "i-abc123", + }, + metricName: "CPUUtilization", + namespace: "AWS/EC2", + period: 120, + stat: "Average", + unit: "Count", + }, + returnData: Token.asBoolean("true"), + }, + ], + thresholdMetricId: "e1", + }); + } +} + +``` + +## Example of monitoring Healthy Hosts on NLB using Target Group and NLB + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchMetricAlarm } from "./.gen/providers/aws/cloudwatch-metric-alarm"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchMetricAlarm(this, "nlb_healthyhosts", { + actionsEnabled: Token.asBoolean("true"), + alarmActions: [sns.arn], + alarmDescription: "Number of healthy nodes in Target Group", + alarmName: "alarmname", + comparisonOperator: "LessThanThreshold", + dimensions: { + LoadBalancer: lb.arnSuffix, + TargetGroup: lbTg.arnSuffix, + }, + evaluationPeriods: 1, + metricName: "HealthyHostCount", + namespace: "AWS/NetworkELB", + okActions: [sns.arn], + period: 60, + statistic: "Average", + threshold: logstashServersCount.numberValue, + }); + } +} + +``` + +~> **NOTE:** You cannot create a metric alarm consisting of both `statistic` and `extendedStatistic` parameters. +You must choose one or the other + +## Argument Reference + +See [related part of AWS Docs](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_PutMetricAlarm.html) +for details about valid values. + +This argument supports the following arguments: + +* `alarmName` - (Required) The descriptive name for the alarm. This name must be unique within the user's AWS account +* `comparisonOperator` - (Required) The arithmetic operation to use when comparing the specified Statistic and Threshold. The specified Statistic value is used as the first operand. Either of the following is supported: `greaterThanOrEqualToThreshold`, `greaterThanThreshold`, `lessThanThreshold`, `lessThanOrEqualToThreshold`. Additionally, the values `lessThanLowerOrGreaterThanUpperThreshold`, `lessThanLowerThreshold`, and `greaterThanUpperThreshold` are used only for alarms based on anomaly detection models. +* `evaluationPeriods` - (Required) The number of periods over which data is compared to the specified threshold. +* `metricName` - (Optional) The name for the alarm's associated metric. + See docs for [supported metrics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `namespace` - (Optional) The namespace for the alarm's associated metric. See docs for the [list of namespaces](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/aws-namespaces.html). + See docs for [supported metrics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `period` - (Optional) The period in seconds over which the specified `statistic` is applied. + Valid values are `10`, `30`, or any multiple of `60`. +* `statistic` - (Optional) The statistic to apply to the alarm's associated metric. + Either of the following is supported: `sampleCount`, `average`, `sum`, `minimum`, `maximum` +* `threshold` - (Optional) The value against which the specified statistic is compared. This parameter is required for alarms based on static thresholds, but should not be used for alarms based on anomaly detection models. +* `thresholdMetricId` - (Optional) If this is an alarm based on an anomaly detection model, make this value match the ID of the ANOMALY_DETECTION_BAND function. +* `actionsEnabled` - (Optional) Indicates whether or not actions should be executed during any changes to the alarm's state. Defaults to `true`. +* `alarmActions` - (Optional) The list of actions to execute when this alarm transitions into an ALARM state from any other state. Each action is specified as an Amazon Resource Name (ARN). +* `alarmDescription` - (Optional) The description for the alarm. +* `datapointsToAlarm` - (Optional) The number of datapoints that must be breaching to trigger the alarm. +* `dimensions` - (Optional) The dimensions for the alarm's associated metric. For the list of available dimensions see the AWS documentation [here](http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `insufficientDataActions` - (Optional) The list of actions to execute when this alarm transitions into an INSUFFICIENT_DATA state from any other state. Each action is specified as an Amazon Resource Name (ARN). +* `okActions` - (Optional) The list of actions to execute when this alarm transitions into an OK state from any other state. Each action is specified as an Amazon Resource Name (ARN). +* `unit` - (Optional) The unit for the alarm's associated metric. +* `extendedStatistic` - (Optional) The percentile statistic for the metric associated with the alarm. Specify a value between p0.0 and p100. +* `treatMissingData` - (Optional) Sets how this alarm is to handle missing data points. The following values are supported: `missing`, `ignore`, `breaching` and `notBreaching`. Defaults to `missing`. +* `evaluateLowSampleCountPercentiles` - (Optional) Used only for alarms based on percentiles. + If you specify `ignore`, the alarm state will not change during periods with too few data points to be statistically significant. + If you specify `evaluate` or omit this parameter, the alarm will always be evaluated and possibly change state no matter how many data points are available. +The following values are supported: `ignore`, and `evaluate`. +* `metricQuery` (Optional) Enables you to create an alarm based on a metric math expression. You may specify at most 20. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +~> **NOTE:** If you specify at least one `metricQuery`, you may not specify a `metricName`, `namespace`, `period` or `statistic`. If you do not specify a `metricQuery`, you must specify each of these (although you may use `extendedStatistic` instead of `statistic`). + +### Nested fields + +#### `metricQuery` + +* `id` - (Required) A short name used to tie this object to the results in the response. If you are performing math expressions on this set of data, this name represents that data and can serve as a variable in the mathematical expression. The valid characters are letters, numbers, and underscore. The first character must be a lowercase letter. +* `accountId` - (Optional) The ID of the account where the metrics are located, if this is a cross-account alarm. +* `expression` - (Optional) The math expression to be performed on the returned data, if this object is performing a math expression. This expression can use the id of the other metrics to refer to those metrics, and can also use the id of other expressions to use the result of those expressions. For more information about metric math expressions, see Metric Math Syntax and Functions in the [Amazon CloudWatch User Guide](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/using-metric-math.html#metric-math-syntax). +* `label` - (Optional) A human-readable label for this metric or expression. This is especially useful if this is an expression, so that you know what the value represents. +* `metric` - (Optional) The metric to be returned, along with statistics, period, and units. Use this parameter only if this object is retrieving a metric and not performing a math expression on returned data. +* `period` - (Optional) Granularity in seconds of returned data points. + For metrics with regular resolution, valid values are any multiple of `60`. + For high-resolution metrics, valid values are `1`, `5`, `10`, `30`, or any multiple of `60`. +* `returnData` - (Optional) Specify exactly one `metricQuery` to be `true` to use that `metricQuery` result as the alarm. + +~> **NOTE:** You must specify either `metric` or `expression`. Not both. + +#### `metric` + +* `dimensions` - (Optional) The dimensions for this metric. For the list of available dimensions see the AWS documentation [here](http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `metricName` - (Required) The name for this metric. + See docs for [supported metrics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `namespace` - (Required) The namespace for this metric. See docs for the [list of namespaces](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/aws-namespaces.html). + See docs for [supported metrics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). +* `period` - (Required) Granularity in seconds of returned data points. + For metrics with regular resolution, valid values are any multiple of `60`. + For high-resolution metrics, valid values are `1`, `5`, `10`, `30`, or any multiple of `60`. +* `stat` - (Required) The statistic to apply to this metric. + See docs for [supported statistics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Statistics-definitions.html). +* `unit` - (Optional) The unit for this metric. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the CloudWatch Metric Alarm. +* `id` - The ID of the health check. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Metric Alarm using the `alarmName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Metric Alarm using the `alarmName`. For example: + +```console +% terraform import aws_cloudwatch_metric_alarm.test alarm-12345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_metric_stream.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_metric_stream.html.markdown new file mode 100644 index 00000000000..c70aa9b67fe --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_metric_stream.html.markdown @@ -0,0 +1,295 @@ +--- +subcategory: "CloudWatch" +layout: "aws" +page_title: "AWS: aws_cloudwatch_metric_stream" +description: |- + Provides a CloudWatch Metric Stream resource. +--- + + + +# Resource: aws_cloudwatch_metric_stream + +Provides a CloudWatch Metric Stream resource. + +## Example Usage + +### Filters + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchMetricStream } from "./.gen/providers/aws/cloudwatch-metric-stream"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bucket = new S3Bucket(this, "bucket", { + bucket: "metric-stream-test-bucket", + }); + new S3BucketAcl(this, "bucket_acl", { + acl: "private", + bucket: bucket.id, + }); + const firehoseAssumeRole = new DataAwsIamPolicyDocument( + this, + "firehose_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["firehose.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const firehoseToS3 = new DataAwsIamPolicyDocument(this, "firehose_to_s3", { + statement: [ + { + actions: [ + "s3:AbortMultipartUpload", + "s3:GetBucketLocation", + "s3:GetObject", + "s3:ListBucket", + "s3:ListBucketMultipartUploads", + "s3:PutObject", + ], + effect: "Allow", + resources: [bucket.arn, "${" + bucket.arn + "}/*"], + }, + ], + }); + const streamsAssumeRole = new DataAwsIamPolicyDocument( + this, + "streams_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["streams.metrics.cloudwatch.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const awsIamRoleFirehoseToS3 = new IamRole(this, "firehose_to_s3_5", { + assumeRolePolicy: Token.asString(firehoseAssumeRole.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleFirehoseToS3.overrideLogicalId("firehose_to_s3"); + const metricStreamToFirehose = new IamRole( + this, + "metric_stream_to_firehose", + { + assumeRolePolicy: Token.asString(streamsAssumeRole.json), + name: "metric_stream_to_firehose_role", + } + ); + const awsIamRolePolicyFirehoseToS3 = new IamRolePolicy( + this, + "firehose_to_s3_7", + { + name: "default", + policy: Token.asString(firehoseToS3.json), + role: Token.asString(awsIamRoleFirehoseToS3.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyFirehoseToS3.overrideLogicalId("firehose_to_s3"); + const s3Stream = new KinesisFirehoseDeliveryStream(this, "s3_stream", { + destination: "extended_s3", + extendedS3Configuration: { + bucketArn: bucket.arn, + roleArn: Token.asString(awsIamRoleFirehoseToS3.arn), + }, + name: "metric-stream-test-stream", + }); + const dataAwsIamPolicyDocumentMetricStreamToFirehose = + new DataAwsIamPolicyDocument(this, "metric_stream_to_firehose_9", { + statement: [ + { + actions: ["firehose:PutRecord", "firehose:PutRecordBatch"], + effect: "Allow", + resources: [s3Stream.arn], + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentMetricStreamToFirehose.overrideLogicalId( + "metric_stream_to_firehose" + ); + new CloudwatchMetricStream(this, "main", { + firehoseArn: s3Stream.arn, + includeFilter: [ + { + metricNames: ["CPUUtilization", "NetworkOut"], + namespace: "AWS/EC2", + }, + { + metricNames: [], + namespace: "AWS/EBS", + }, + ], + name: "my-metric-stream", + outputFormat: "json", + roleArn: metricStreamToFirehose.arn, + }); + const awsIamRolePolicyMetricStreamToFirehose = new IamRolePolicy( + this, + "metric_stream_to_firehose_11", + { + name: "default", + policy: Token.asString( + dataAwsIamPolicyDocumentMetricStreamToFirehose.json + ), + role: metricStreamToFirehose.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyMetricStreamToFirehose.overrideLogicalId( + "metric_stream_to_firehose" + ); + } +} + +``` + +### Additional Statistics + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchMetricStream } from "./.gen/providers/aws/cloudwatch-metric-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchMetricStream(this, "main", { + firehoseArn: s3Stream.arn, + name: "my-metric-stream", + outputFormat: "json", + roleArn: metricStreamToFirehose.arn, + statisticsConfiguration: [ + { + additionalStatistics: ["p1", "tm99"], + includeMetric: [ + { + metricName: "CPUUtilization", + namespace: "AWS/EC2", + }, + ], + }, + { + additionalStatistics: ["TS(50.5:)"], + includeMetric: [ + { + metricName: "CPUUtilization", + namespace: "AWS/EC2", + }, + ], + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `firehoseArn` - (Required) ARN of the Amazon Kinesis Firehose delivery stream to use for this metric stream. +* `roleArn` - (Required) ARN of the IAM role that this metric stream will use to access Amazon Kinesis Firehose resources. For more information about role permissions, see [Trust between CloudWatch and Kinesis Data Firehose](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-metric-streams-trustpolicy.html). +* `outputFormat` - (Required) Output format for the stream. Possible values are `json` and `opentelemetry07`. For more information about output formats, see [Metric streams output formats](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-metric-streams-formats.html). + +The following arguments are optional: + +* `excludeFilter` - (Optional) List of exclusive metric filters. If you specify this parameter, the stream sends metrics from all metric namespaces except for the namespaces and the conditional metric names that you specify here. If you don't specify metric names or provide empty metric names whole metric namespace is excluded. Conflicts with `includeFilter`. +* `includeFilter` - (Optional) List of inclusive metric filters. If you specify this parameter, the stream sends only the conditional metric names from the metric namespaces that you specify here. If you don't specify metric names or provide empty metric names whole metric namespace is included. Conflicts with `excludeFilter`. +* `name` - (Optional, Forces new resource) Friendly name of the metric stream. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional, Forces new resource) Creates a unique friendly name beginning with the specified prefix. Conflicts with `name`. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `statisticsConfiguration` - (Optional) For each entry in this array, you specify one or more metrics and the list of additional statistics to stream for those metrics. The additional statistics that you can stream depend on the stream's `outputFormat`. If the OutputFormat is `json`, you can stream any additional statistic that is supported by CloudWatch, listed in [CloudWatch statistics definitions](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Statistics-definitions.html.html). If the OutputFormat is `opentelemetry07`, you can stream percentile statistics (p99 etc.). See details below. +* `includeLinkedAccountsMetrics` (Optional) If you are creating a metric stream in a monitoring account, specify true to include metrics from source accounts that are linked to this monitoring account, in the metric stream. The default is false. For more information about linking accounts, see [CloudWatch cross-account observability](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-Unified-Cross-Account.html). + +### Nested Fields + +#### `excludeFilter` + +* `namespace` - (Required) Name of the metric namespace in the filter. +* `metricNames` - (Optional) An array that defines the metrics you want to exclude for this metric namespace + +#### `includeFilter` + +* `namespace` - (Required) Name of the metric namespace in the filter. +* `metricNames` - (Optional) An array that defines the metrics you want to include for this metric namespace + +#### `statisticsConfigurations` + +* `additionalStatistics` - (Required) The additional statistics to stream for the metrics listed in `includeMetrics`. +* `includeMetric` - (Required) An array that defines the metrics that are to have additional statistics streamed. See details below. + +#### `includeMetrics` + +* `metricName` - (Required) The name of the metric. +* `namespace` - (Required) The namespace of the metric. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the metric stream. +* `creationDate` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the metric stream was created. +* `lastUpdateDate` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the metric stream was last updated. +* `state` - State of the metric stream. Possible values are `running` and `stopped`. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch metric streams using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch metric streams using the `name`. For example: + +```console +% terraform import aws_cloudwatch_metric_stream.sample sample-stream-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cloudwatch_query_definition.html.markdown b/website/docs/cdktf/typescript/r/cloudwatch_query_definition.html.markdown new file mode 100644 index 00000000000..a559f95b475 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cloudwatch_query_definition.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "CloudWatch Logs" +layout: "aws" +page_title: "AWS: aws_cloudwatch_query_definition" +description: |- + Provides a CloudWatch Logs query definition resource. +--- + + + +# Resource: aws_cloudwatch_query_definition + +Provides a CloudWatch Logs query definition resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchQueryDefinition } from "./.gen/providers/aws/cloudwatch-query-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CloudwatchQueryDefinition(this, "example", { + logGroupNames: ["/aws/logGroup1", "/aws/logGroup2"], + name: "custom_query", + queryString: + "fields @timestamp, @message\n| sort @timestamp desc\n| limit 25\n\n", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the query. +* `queryString` - (Required) The query to save. You can read more about CloudWatch Logs Query Syntax in the [documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/CWL_QuerySyntax.html). +* `logGroupNames` - (Optional) Specific log groups to use with the query. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `queryDefinitionId` - The query definition ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch query definitions using the query definition ARN. The ARN can be found on the "Edit Query" page for the query in the AWS Console. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch query definitions using the query definition ARN. The ARN can be found on the "Edit Query" page for the query in the AWS Console. For example: + +```console +% terraform import aws_cloudwatch_query_definition.example arn:aws:logs:us-west-2:123456789012:query-definition:269951d7-6f75-496d-9d7b-6b7a5486bdbd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codeartifact_domain.html.markdown b/website/docs/cdktf/typescript/r/codeartifact_domain.html.markdown new file mode 100644 index 00000000000..714ba2092fb --- /dev/null +++ b/website/docs/cdktf/typescript/r/codeartifact_domain.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_domain" +description: |- + Provides a CodeArtifact Domain resource. +--- + + + +# Resource: aws_codeartifact_domain + +Provides a CodeArtifact Domain Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodeartifactDomain } from "./.gen/providers/aws/codeartifact-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodeartifactDomain(this, "example", { + domain: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) The name of the domain to create. All domain names in an AWS Region that are in the same AWS account must be unique. The domain name is used as the prefix in DNS hostnames. Do not use sensitive information in a domain name because it is publicly discoverable. +* `encryptionKey` - (Optional) The encryption key for the domain. This is used to encrypt content stored in a domain. The KMS Key Amazon Resource Name (ARN). The default aws/codeartifact AWS KMS master key is used if this element is absent. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the Domain. +* `arn` - The ARN of the Domain. +* `owner` - The AWS account ID that owns the domain. +* `repositoryCount` - The number of repositories in the domain. +* `createdTime` - A timestamp that represents the date and time the domain was created in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `assetSizeBytes` - The total size of all assets in the domain. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeArtifact Domain using the CodeArtifact Domain arn. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeArtifact Domain using the CodeArtifact Domain arn. For example: + +```console +% terraform import aws_codeartifact_domain.example arn:aws:codeartifact:us-west-2:012345678912:domain/tf-acc-test-8593714120730241305 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codeartifact_domain_permissions_policy.html.markdown b/website/docs/cdktf/typescript/r/codeartifact_domain_permissions_policy.html.markdown new file mode 100644 index 00000000000..f20d8045521 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codeartifact_domain_permissions_policy.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_domain_permissions_policy" +description: |- + Provides a CodeArtifact Domain Permissions Policy resource. +--- + + + +# Resource: aws_codeartifact_domain_permissions_policy + +Provides a CodeArtifact Domains Permissions Policy Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodeartifactDomain } from "./.gen/providers/aws/codeartifact-domain"; +import { CodeartifactDomainPermissionsPolicy } from "./.gen/providers/aws/codeartifact-domain-permissions-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new KmsKey(this, "example", { + description: "domain key", + }); + const awsCodeartifactDomainExample = new CodeartifactDomain( + this, + "example_1", + { + domain: "example", + encryptionKey: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodeartifactDomainExample.overrideLogicalId("example"); + const test = new DataAwsIamPolicyDocument(this, "test", { + statement: [ + { + actions: ["codeartifact:CreateRepository"], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "*", + }, + ], + resources: [Token.asString(awsCodeartifactDomainExample.arn)], + }, + ], + }); + const awsCodeartifactDomainPermissionsPolicyTest = + new CodeartifactDomainPermissionsPolicy(this, "test_3", { + domain: Token.asString(awsCodeartifactDomainExample.domain), + policyDocument: Token.asString(test.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodeartifactDomainPermissionsPolicyTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) The name of the domain on which to set the resource policy. +* `policyDocument` - (Required) A JSON policy string to be set as the access control resource policy on the provided domain. +* `domainOwner` - (Optional) The account number of the AWS account that owns the domain. +* `policyRevision` - (Optional) The current revision of the resource policy to be set. This revision is used for optimistic locking, which prevents others from overwriting your changes to the domain's resource policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Name of Domain. +* `resourceArn` - The ARN of the resource associated with the resource policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeArtifact Domain Permissions Policies using the CodeArtifact Domain ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeArtifact Domain Permissions Policies using the CodeArtifact Domain ARN. For example: + +```console +% terraform import aws_codeartifact_domain_permissions_policy.example arn:aws:codeartifact:us-west-2:012345678912:domain/tf-acc-test-1928056699409417367 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codeartifact_repository.html.markdown b/website/docs/cdktf/typescript/r/codeartifact_repository.html.markdown new file mode 100644 index 00000000000..16c75b436af --- /dev/null +++ b/website/docs/cdktf/typescript/r/codeartifact_repository.html.markdown @@ -0,0 +1,166 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_repository" +description: |- + Provides a CodeArtifact Repository resource. +--- + + + +# Resource: aws_codeartifact_repository + +Provides a CodeArtifact Repository Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodeartifactDomain } from "./.gen/providers/aws/codeartifact-domain"; +import { CodeartifactRepository } from "./.gen/providers/aws/codeartifact-repository"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new KmsKey(this, "example", { + description: "domain key", + }); + const awsCodeartifactDomainExample = new CodeartifactDomain( + this, + "example_1", + { + domain: "example", + encryptionKey: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodeartifactDomainExample.overrideLogicalId("example"); + new CodeartifactRepository(this, "test", { + domain: Token.asString(awsCodeartifactDomainExample.domain), + repository: "example", + }); + } +} + +``` + +## Example Usage with upstream repository + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodeartifactRepository } from "./.gen/providers/aws/codeartifact-repository"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const upstream = new CodeartifactRepository(this, "upstream", { + domain: test.domain, + repository: "upstream", + }); + new CodeartifactRepository(this, "test", { + domain: example.domain, + repository: "example", + upstream: [ + { + repositoryName: upstream.repository, + }, + ], + }); + } +} + +``` + +## Example Usage with external connection + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodeartifactRepository } from "./.gen/providers/aws/codeartifact-repository"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodeartifactRepository(this, "test", { + domain: example.domain, + externalConnections: { + externalConnectionName: "public:npmjs", + }, + repository: "example", + }); + new CodeartifactRepository(this, "upstream", { + domain: Token.asString(awsCodeartifactDomainTest.domain), + repository: "upstream", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) The domain that contains the created repository. +* `repository` - (Required) The name of the repository to create. +* `domainOwner` - (Optional) The account number of the AWS account that owns the domain. +* `description` - (Optional) The description of the repository. +* `upstream` - (Optional) A list of upstream repositories to associate with the repository. The order of the upstream repositories in the list determines their priority order when AWS CodeArtifact looks for a requested package version. see [Upstream](#upstream) +* `externalConnections` - An array of external connections associated with the repository. Only one external connection can be set per repository. see [External Connections](#external-connections). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Upstream + +* `repositoryName` - (Required) The name of an upstream repository. + +### External Connections + +* `externalConnectionName` - (Required) The name of the external connection associated with a repository. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the repository. +* `arn` - The ARN of the repository. +* `administratorAccount` - The account number of the AWS account that manages the repository. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeArtifact Repository using the CodeArtifact Repository ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeArtifact Repository using the CodeArtifact Repository ARN. For example: + +```console +% terraform import aws_codeartifact_repository.example arn:aws:codeartifact:us-west-2:012345678912:repository/tf-acc-test-6968272603913957763/tf-acc-test-6968272603913957763 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codeartifact_repository_permissions_policy.html.markdown b/website/docs/cdktf/typescript/r/codeartifact_repository_permissions_policy.html.markdown new file mode 100644 index 00000000000..a73b39de9cb --- /dev/null +++ b/website/docs/cdktf/typescript/r/codeartifact_repository_permissions_policy.html.markdown @@ -0,0 +1,131 @@ +--- +subcategory: "CodeArtifact" +layout: "aws" +page_title: "AWS: aws_codeartifact_repository_permissions_policy" +description: |- + Provides a CodeArtifact Repository Permissions Policy resource. +--- + + + +# Resource: aws_codeartifact_repository_permissions_policy + +Provides a CodeArtifact Repostory Permissions Policy Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodeartifactDomain } from "./.gen/providers/aws/codeartifact-domain"; +import { CodeartifactRepository } from "./.gen/providers/aws/codeartifact-repository"; +import { CodeartifactRepositoryPermissionsPolicy } from "./.gen/providers/aws/codeartifact-repository-permissions-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new KmsKey(this, "example", { + description: "domain key", + }); + const awsCodeartifactDomainExample = new CodeartifactDomain( + this, + "example_1", + { + domain: "example", + encryptionKey: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodeartifactDomainExample.overrideLogicalId("example"); + const awsCodeartifactRepositoryExample = new CodeartifactRepository( + this, + "example_2", + { + domain: Token.asString(awsCodeartifactDomainExample.domain), + repository: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodeartifactRepositoryExample.overrideLogicalId("example"); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_3", + { + statement: [ + { + actions: ["codeartifact:ReadFromRepository"], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "*", + }, + ], + resources: [Token.asString(awsCodeartifactRepositoryExample.arn)], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsCodeartifactRepositoryPermissionsPolicyExample = + new CodeartifactRepositoryPermissionsPolicy(this, "example_4", { + domain: Token.asString(awsCodeartifactDomainExample.domain), + policyDocument: Token.asString(dataAwsIamPolicyDocumentExample.json), + repository: Token.asString(awsCodeartifactRepositoryExample.repository), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodeartifactRepositoryPermissionsPolicyExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repository` - (Required) The name of the repository to set the resource policy on. +* `domain` - (Required) The name of the domain on which to set the resource policy. +* `policyDocument` - (Required) A JSON policy string to be set as the access control resource policy on the provided domain. +* `domainOwner` - (Optional) The account number of the AWS account that owns the domain. +* `policyRevision` - (Optional) The current revision of the resource policy to be set. This revision is used for optimistic locking, which prevents others from overwriting your changes to the domain's resource policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the resource associated with the resource policy. +* `resourceArn` - The ARN of the resource associated with the resource policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeArtifact Repository Permissions Policies using the CodeArtifact Repository ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeArtifact Repository Permissions Policies using the CodeArtifact Repository ARN. For example: + +```console +% terraform import aws_codeartifact_repository_permissions_policy.example arn:aws:codeartifact:us-west-2:012345678912:repository/tf-acc-test-6968272603913957763/tf-acc-test-6968272603913957763 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codebuild_project.html.markdown b/website/docs/cdktf/typescript/r/codebuild_project.html.markdown new file mode 100644 index 00000000000..0573c1fcf04 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codebuild_project.html.markdown @@ -0,0 +1,441 @@ +--- +subcategory: "CodeBuild" +layout: "aws" +page_title: "AWS: aws_codebuild_project" +description: |- + Provides a CodeBuild Project resource. +--- + + + +# Resource: aws_codebuild_project + +Provides a CodeBuild Project resource. See also the [`awsCodebuildWebhook` resource](/docs/providers/aws/r/codebuild_webhook.html), which manages the webhook to the source (e.g., the "rebuild every time a code change is pushed" option in the CodeBuild web console). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodebuildProject } from "./.gen/providers/aws/codebuild-project"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_1", { + acl: "private", + bucket: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["codebuild.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_3", + { + statement: [ + { + actions: [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents", + ], + effect: "Allow", + resources: ["*"], + }, + { + actions: [ + "ec2:CreateNetworkInterface", + "ec2:DescribeDhcpOptions", + "ec2:DescribeNetworkInterfaces", + "ec2:DeleteNetworkInterface", + "ec2:DescribeSubnets", + "ec2:DescribeSecurityGroups", + "ec2:DescribeVpcs", + ], + effect: "Allow", + resources: ["*"], + }, + { + actions: ["ec2:CreateNetworkInterfacePermission"], + condition: [ + { + test: "StringEquals", + values: [example1.arn, example2.arn], + variable: "ec2:Subnet", + }, + { + test: "StringEquals", + values: ["codebuild.amazonaws.com"], + variable: "ec2:AuthorizedService", + }, + ], + effect: "Allow", + resources: [ + "arn:aws:ec2:us-east-1:123456789012:network-interface/*", + ], + }, + { + actions: ["s3:*"], + effect: "Allow", + resources: [example.arn, "${" + example.arn + "}/*"], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsIamRoleExample = new IamRole(this, "example_4", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsIamRolePolicyExample = new IamRolePolicy(this, "example_5", { + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + role: Token.asString(awsIamRoleExample.name), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyExample.overrideLogicalId("example"); + const awsCodebuildProjectExample = new CodebuildProject(this, "example_6", { + artifacts: { + type: "NO_ARTIFACTS", + }, + buildTimeout: Token.asNumber("5"), + cache: { + location: example.bucket, + type: "S3", + }, + description: "test_codebuild_project", + environment: { + computeType: "BUILD_GENERAL1_SMALL", + environmentVariable: [ + { + name: "SOME_KEY1", + value: "SOME_VALUE1", + }, + { + name: "SOME_KEY2", + type: "PARAMETER_STORE", + value: "SOME_VALUE2", + }, + ], + image: "aws/codebuild/amazonlinux2-x86_64-standard:4.0", + imagePullCredentialsType: "CODEBUILD", + type: "LINUX_CONTAINER", + }, + logsConfig: { + cloudwatchLogs: { + groupName: "log-group", + streamName: "log-stream", + }, + s3Logs: { + location: "${" + example.id + "}/build-log", + status: "ENABLED", + }, + }, + name: "test-project", + serviceRole: Token.asString(awsIamRoleExample.arn), + source: { + gitCloneDepth: 1, + gitSubmodulesConfig: { + fetchSubmodules: true, + }, + location: "https://github.com/mitchellh/packer.git", + type: "GITHUB", + }, + sourceVersion: "master", + tags: { + Environment: "Test", + }, + vpcConfig: { + securityGroupIds: [ + Token.asString(awsSecurityGroupExample1.id), + Token.asString(awsSecurityGroupExample2.id), + ], + subnets: [example1.id, example2.id], + vpcId: Token.asString(awsVpcExample.id), + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodebuildProjectExample.overrideLogicalId("example"); + new CodebuildProject(this, "project-with-cache", { + artifacts: { + type: "NO_ARTIFACTS", + }, + buildTimeout: Token.asNumber("5"), + cache: { + modes: ["LOCAL_DOCKER_LAYER_CACHE", "LOCAL_SOURCE_CACHE"], + type: "LOCAL", + }, + description: "test_codebuild_project_cache", + environment: { + computeType: "BUILD_GENERAL1_SMALL", + environmentVariable: [ + { + name: "SOME_KEY1", + value: "SOME_VALUE1", + }, + ], + image: "aws/codebuild/amazonlinux2-x86_64-standard:4.0", + imagePullCredentialsType: "CODEBUILD", + type: "LINUX_CONTAINER", + }, + name: "test-project-cache", + queuedTimeout: Token.asNumber("5"), + serviceRole: Token.asString(awsIamRoleExample.arn), + source: { + gitCloneDepth: 1, + location: "https://github.com/mitchellh/packer.git", + type: "GITHUB", + }, + tags: { + Environment: "Test", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `artifacts` - (Required) Configuration block. Detailed below. +* `environment` - (Required) Configuration block. Detailed below. +* `name` - (Required) Project's name. +* `serviceRole` - (Required) Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account. +* `source` - (Required) Configuration block. Detailed below. + +The following arguments are optional: + +* `badgeEnabled` - (Optional) Generates a publicly-accessible URL for the projects build badge. Available as `badgeUrl` attribute when enabled. +* `buildBatchConfig` - (Optional) Defines the batch build options for the project. +* `buildTimeout` - (Optional) Number of minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait until timing out any related build that does not get marked as completed. The default is 60 minutes. +* `cache` - (Optional) Configuration block. Detailed below. +* `concurrentBuildLimit` - (Optional) Specify a maximum number of concurrent builds for the project. The value specified must be greater than 0 and less than the account concurrent running builds limit. +* `description` - (Optional) Short description of the project. +* `fileSystemLocations` - (Optional) A set of file system locations to mount inside the build. File system locations are documented below. +* `encryptionKey` - (Optional) AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build project's build output artifacts. +* `logsConfig` - (Optional) Configuration block. Detailed below. +* `projectVisibility` - (Optional) Specifies the visibility of the project's builds. Possible values are: `publicRead` and `private`. Default value is `private`. +* `resourceAccessRole` - The ARN of the IAM role that enables CodeBuild to access the CloudWatch Logs and Amazon S3 artifacts for the project's builds. +* `queuedTimeout` - (Optional) Number of minutes, from 5 to 480 (8 hours), a build is allowed to be queued before it times out. The default is 8 hours. +* `secondaryArtifacts` - (Optional) Configuration block. Detailed below. +* `secondarySources` - (Optional) Configuration block. Detailed below. +* `secondarySourceVersion` - (Optional) Configuration block. Detailed below. +* `sourceVersion` - (Optional) Version of the build input to be built for this project. If not specified, the latest version is used. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcConfig` - (Optional) Configuration block. Detailed below. + +### artifacts + +* `artifactIdentifier` - (Optional) Artifact identifier. Must be the same specified inside the AWS CodeBuild build specification. +* `bucketOwnerAccess` - (Optional) Specifies the bucket owner's access for objects that another account uploads to their Amazon S3 bucket. By default, only the account that uploads the objects to the bucket has access to these objects. This property allows you to give the bucket owner access to these objects. Valid values are `none`, `readOnly`, and `full`. your CodeBuild service role must have the `s3:putBucketAcl` permission. This permission allows CodeBuild to modify the access control list for the bucket. +* `encryptionDisabled` - (Optional) Whether to disable encrypting output artifacts. If `type` is set to `noArtifacts`, this value is ignored. Defaults to `false`. +* `location` - (Optional) Information about the build output artifact location. If `type` is set to `codepipeline` or `noArtifacts`, this value is ignored. If `type` is set to `s3`, this is the name of the output bucket. +* `name` - (Optional) Name of the project. If `type` is set to `s3`, this is the name of the output artifact object +* `namespaceType` - (Optional) Namespace to use in storing build artifacts. If `type` is set to `s3`, then valid values are `buildId`, `none`. +* `overrideArtifactName` (Optional) Whether a name specified in the build specification overrides the artifact name. +* `packaging` - (Optional) Type of build output artifact to create. If `type` is set to `s3`, valid values are `none`, `zip` +* `path` - (Optional) If `type` is set to `s3`, this is the path to the output artifact. +* `type` - (Required) Build output artifact's type. Valid values: `codepipeline`, `noArtifacts`, `s3`. + +### build_batch_config + +* `combineArtifacts` - (Optional) Specifies if the build artifacts for the batch build should be combined into a single artifact location. +* `restrictions` - (Optional) Configuration block specifying the restrictions for the batch build. Detailed below. +* `serviceRole` - (Required) Specifies the service role ARN for the batch build project. +* `timeoutInMins` - (Optional) Specifies the maximum amount of time, in minutes, that the batch build must be completed in. + +#### build_batch_config: restrictions + +* `computeTypesAllowed` - (Optional) An array of strings that specify the compute types that are allowed for the batch build. See [Build environment compute types](https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-compute-types.html) in the AWS CodeBuild User Guide for these values. +* `maximumBuildsAllowed` - (Optional) Specifies the maximum number of builds allowed. + +### cache + +* `location` - (Required when cache type is `s3`) Location where the AWS CodeBuild project stores cached resources. For type `s3`, the value must be a valid S3 bucket name/prefix. +* `modes` - (Required when cache type is `local`) Specifies settings that AWS CodeBuild uses to store and reuse build dependencies. Valid values: `localSourceCache`, `localDockerLayerCache`, `localCustomCache`. +* `type` - (Optional) Type of storage that will be used for the AWS CodeBuild project cache. Valid values: `noCache`, `local`, `s3`. Defaults to `noCache`. + +### environment + +* `certificate` - (Optional) ARN of the S3 bucket, path prefix and object key that contains the PEM-encoded certificate. +* `computeType` - (Required) Information about the compute resources the build project will use. Valid values: `buildGeneral1Small`, `buildGeneral1Medium`, `buildGeneral1Large`, `buildGeneral12Xlarge`. `buildGeneral1Small` is only valid if `type` is set to `linuxContainer`. When `type` is set to `linuxGpuContainer`, `computeType` must be `buildGeneral1Large`. +* `environmentVariable` - (Optional) Configuration block. Detailed below. +* `imagePullCredentialsType` - (Optional) Type of credentials AWS CodeBuild uses to pull images in your build. Valid values: `codebuild`, `serviceRole`. When you use a cross-account or private registry image, you must use SERVICE_ROLE credentials. When you use an AWS CodeBuild curated image, you must use CodeBuild credentials. Defaults to `codebuild`. +* `image` - (Required) Docker image to use for this build project. Valid values include [Docker images provided by CodeBuild](https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-available.html) (e.g `aws/codebuild/amazonlinux2X8664Standard:40`), [Docker Hub images](https://hub.docker.com/) (e.g., `hashicorp/terraform:latest`), and full Docker repository URIs such as those for ECR (e.g., `137112412989DkrEcrUsWest2AmazonawsCom/amazonlinux:latest`). +* `privilegedMode` - (Optional) Whether to enable running the Docker daemon inside a Docker container. Defaults to `false`. +* `registryCredential` - (Optional) Configuration block. Detailed below. +* `type` - (Required) Type of build environment to use for related builds. Valid values: `linuxContainer`, `linuxGpuContainer`, `windowsContainer` (deprecated), `windowsServer2019Container`, `armContainer`. For additional information, see the [CodeBuild User Guide](https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-compute-types.html). + +#### environment: environment_variable + +* `name` - (Required) Environment variable's name or key. +* `type` - (Optional) Type of environment variable. Valid values: `parameterStore`, `plaintext`, `secretsManager`. +* `value` - (Required) Environment variable's value. + +#### environment: registry_credential + +Credentials for access to a private Docker registry. + +* `credential` - (Required) ARN or name of credentials created using AWS Secrets Manager. +* `credentialProvider` - (Required) Service that created the credentials to access a private Docker registry. Valid value: `secretsManager` (AWS Secrets Manager). + +### file_system_locations + +See [ProjectFileSystemLocation](https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ProjectFileSystemLocation.html) for more details of the fields. + +* `identifier` - (Optional) The name used to access a file system created by Amazon EFS. CodeBuild creates an environment variable by appending the identifier in all capital letters to CODEBUILD\_. For example, if you specify my-efs for identifier, a new environment variable is create named CODEBUILD_MY-EFS. +* `location` - (Optional) A string that specifies the location of the file system created by Amazon EFS. Its format is `efsDnsName:/directoryPath`. +* `mountOptions` - (Optional) The mount options for a file system created by AWS EFS. +* `mountPoint` - (Optional) The location in the container where you mount the file system. +* `type` - (Optional) The type of the file system. The one supported type is `efs`. + +### logs_config + +* `cloudwatchLogs` - (Optional) Configuration block. Detailed below. +* `s3Logs` - (Optional) Configuration block. Detailed below. + +#### logs_config: cloudwatch_logs + +* `groupName` - (Optional) Group name of the logs in CloudWatch Logs. +* `status` - (Optional) Current status of logs in CloudWatch Logs for a build project. Valid values: `enabled`, `disabled`. Defaults to `enabled`. +* `streamName` - (Optional) Stream name of the logs in CloudWatch Logs. + +#### logs_config: s3_logs + +* `encryptionDisabled` - (Optional) Whether to disable encrypting S3 logs. Defaults to `false`. +* `location` - (Optional) Name of the S3 bucket and the path prefix for S3 logs. Must be set if status is `enabled`, otherwise it must be empty. +* `status` - (Optional) Current status of logs in S3 for a build project. Valid values: `enabled`, `disabled`. Defaults to `disabled`. +* `bucketOwnerAccess` - (Optional) Specifies the bucket owner's access for objects that another account uploads to their Amazon S3 bucket. By default, only the account that uploads the objects to the bucket has access to these objects. This property allows you to give the bucket owner access to these objects. Valid values are `none`, `readOnly`, and `full`. your CodeBuild service role must have the `s3:putBucketAcl` permission. This permission allows CodeBuild to modify the access control list for the bucket. + +### secondary_artifacts + +* `artifactIdentifier` - (Required) Artifact identifier. Must be the same specified inside the AWS CodeBuild build specification. +* `bucketOwnerAccess` - (Optional) Specifies the bucket owner's access for objects that another account uploads to their Amazon S3 bucket. By default, only the account that uploads the objects to the bucket has access to these objects. This property allows you to give the bucket owner access to these objects. Valid values are `none`, `readOnly`, and `full`. The CodeBuild service role must have the `s3:putBucketAcl` permission. This permission allows CodeBuild to modify the access control list for the bucket. +* `encryptionDisabled` - (Optional) Whether to disable encrypting output artifacts. If `type` is set to `noArtifacts`, this value is ignored. Defaults to `false`. +* `location` - (Optional) Information about the build output artifact location. If `type` is set to `codepipeline` or `noArtifacts`, this value is ignored if specified. If `type` is set to `s3`, this is the name of the output bucket. If `path` is not specified, `location` can specify the path of the output artifact in the output bucket. +* `name` - (Optional) Name of the project. If `type` is set to `codepipeline` or `noArtifacts`, this value is ignored if specified. If `type` is set to `s3`, this is the name of the output artifact object. +* `namespaceType` - (Optional) Namespace to use in storing build artifacts. If `type` is set to `codepipeline` or `noArtifacts`, this value is ignored if specified. If `type` is set to `s3`, valid values are `buildId` or `none`. +* `overrideArtifactName` (Optional) Whether a name specified in the build specification overrides the artifact name. +* `packaging` - (Optional) Type of build output artifact to create. If `type` is set to `codepipeline` or `noArtifacts`, this value is ignored if specified. If `type` is set to `s3`, valid values are `none` or `zip`. +* `path` - (Optional) Along with `namespaceType` and `name`, the pattern that AWS CodeBuild uses to name and store the output artifact. If `type` is set to `codepipeline` or `noArtifacts`, this value is ignored if specified. If `type` is set to `s3`, this is the path to the output artifact. +* `type` - (Required) Build output artifact's type. Valid values `codepipeline`, `noArtifacts`, and `s3`. + +### secondary_sources + +* `buildspec` - (Optional) The build spec declaration to use for this build project's related builds. This must be set when `type` is `noSource`. It can either be a path to a file residing in the repository to be built or a local file path leveraging the `file()` built-in. +* `gitCloneDepth` - (Optional) Truncate git history to this many commits. Use `0` for a `full` checkout which you need to run commands like `git branch --show-current`. See [AWS CodePipeline User Guide: Tutorial: Use full clone with a GitHub pipeline source](https://docs.aws.amazon.com/codepipeline/latest/userguide/tutorials-github-gitclone.html) for details. +* `gitSubmodulesConfig` - (Optional) Configuration block. Detailed below. +* `insecureSsl` - (Optional) Ignore SSL warnings when connecting to source control. +* `location` - (Optional) Location of the source code from git or s3. +* `reportBuildStatus` - (Optional) Whether to report the status of a build's start and finish to your source provider. This option is only valid when your source provider is `github`, `bitbucket`, or `githubEnterprise`. +* `buildStatusConfig` - (Optional) Configuration block that contains information that defines how the build project reports the build status to the source provider. This option is only used when the source provider is `github`, `githubEnterprise`, or `bitbucket`. `buildStatusConfig` blocks are documented below. +* `sourceIdentifier` - (Required) An identifier for this project source. The identifier can only contain alphanumeric characters and underscores, and must be less than 128 characters in length. +* `type` - (Required) Type of repository that contains the source code to be built. Valid values: `codecommit`, `codepipeline`, `github`, `githubEnterprise`, `bitbucket` or `s3`. + +#### secondary_sources: git_submodules_config + +This block is only valid when the `type` is `codecommit`, `github` or `githubEnterprise`. + +* `fetchSubmodules` - (Required) Whether to fetch Git submodules for the AWS CodeBuild build project. + +#### secondary_sources: build_status_config + +* `context` - (Optional) Specifies the context of the build status CodeBuild sends to the source provider. The usage of this parameter depends on the source provider. +* `targetUrl` - (Optional) Specifies the target url of the build status CodeBuild sends to the source provider. The usage of this parameter depends on the source provider. + +### secondary_source_version + +* `sourceIdentifier` - (Required) An identifier for a source in the build project. +* `sourceVersion` - (Required) The source version for the corresponding source identifier. See [AWS docs](https://docs.aws.amazon.com/codebuild/latest/APIReference/API_ProjectSourceVersion.html#CodeBuild-Type-ProjectSourceVersion-sourceVersion) for more details. + +### source + +* `buildspec` - (Optional) Build specification to use for this build project's related builds. This must be set when `type` is `noSource`. +* `gitCloneDepth` - (Optional) Truncate git history to this many commits. Use `0` for a `full` checkout which you need to run commands like `git branch --show-current`. See [AWS CodePipeline User Guide: Tutorial: Use full clone with a GitHub pipeline source](https://docs.aws.amazon.com/codepipeline/latest/userguide/tutorials-github-gitclone.html) for details. +* `gitSubmodulesConfig` - (Optional) Configuration block. Detailed below. +* `insecureSsl` - (Optional) Ignore SSL warnings when connecting to source control. +* `location` - (Optional) Location of the source code from git or s3. +* `reportBuildStatus` - (Optional) Whether to report the status of a build's start and finish to your source provider. This option is only valid when the `type` is `bitbucket` or `github`. +* `buildStatusConfig` - (Optional) Configuration block that contains information that defines how the build project reports the build status to the source provider. This option is only used when the source provider is `github`, `githubEnterprise`, or `bitbucket`. `buildStatusConfig` blocks are documented below. +* `type` - (Required) Type of repository that contains the source code to be built. Valid values: `codecommit`, `codepipeline`, `github`, `githubEnterprise`, `bitbucket`, `s3`, `noSource`. + +#### source: git_submodules_config + +This block is only valid when the `type` is `codecommit`, `github` or `githubEnterprise`. + +* `fetchSubmodules` - (Required) Whether to fetch Git submodules for the AWS CodeBuild build project. + +#### source: build_status_config + +* `context` - (Optional) Specifies the context of the build status CodeBuild sends to the source provider. The usage of this parameter depends on the source provider. +* `targetUrl` - (Optional) Specifies the target url of the build status CodeBuild sends to the source provider. The usage of this parameter depends on the source provider. + +### vpc_config + +* `securityGroupIds` - (Required) Security group IDs to assign to running builds. +* `subnets` - (Required) Subnet IDs within which to run builds. +* `vpcId` - (Required) ID of the VPC within which to run builds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the CodeBuild project. +* `badgeUrl` - URL of the build badge when `badgeEnabled` is enabled. +* `id` - Name (if imported via `name`) or ARN (if created via Terraform or imported via ARN) of the CodeBuild project. +* `publicProjectAlias` - The project identifier used with the public build APIs. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeBuild Project using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeBuild Project using the `name`. For example: + +```console +% terraform import aws_codebuild_project.name project-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codebuild_report_group.html.markdown b/website/docs/cdktf/typescript/r/codebuild_report_group.html.markdown new file mode 100644 index 00000000000..b0168b6c131 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codebuild_report_group.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "CodeBuild" +layout: "aws" +page_title: "AWS: aws_codebuild_report_group" +description: |- + Provides a CodeBuild Report Group resource. +--- + + + +# Resource: aws_codebuild_report_group + +Provides a CodeBuild Report Groups Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodebuildReportGroup } from "./.gen/providers/aws/codebuild-report-group"; +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "my-test", + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_2", + { + statement: [ + { + actions: ["kms:*"], + effect: "Allow", + principals: [ + { + identifiers: ["arn:aws:iam::${" + current.accountId + "}:root"], + type: "AWS", + }, + ], + resources: ["*"], + sid: "Enable IAM User Permissions", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsKmsKeyExample = new KmsKey(this, "example_3", { + deletionWindowInDays: 7, + description: "my test kms key", + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsKeyExample.overrideLogicalId("example"); + const awsCodebuildReportGroupExample = new CodebuildReportGroup( + this, + "example_4", + { + exportConfig: { + s3Destination: { + bucket: example.id, + encryptionDisabled: false, + encryptionKey: Token.asString(awsKmsKeyExample.arn), + packaging: "NONE", + path: "/some", + }, + type: "S3", + }, + name: "my test report group", + type: "TEST", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodebuildReportGroupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of a Report Group. +* `type` - (Required) The type of the Report Group. Valid value are `test` and `codeCoverage`. +* `exportConfig` - (Required) Information about the destination where the raw data of this Report Group is exported. see [Export Config](#export-config) documented below. +* `deleteReports` - (Optional) If `true`, deletes any reports that belong to a report group before deleting the report group. If `false`, you must delete any reports in the report group before deleting it. Default value is `false`. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Export Config + +* `type` - (Required) The export configuration type. Valid values are `s3` and `noExport`. +* `s3Destination` - (Required) contains information about the S3 bucket where the run of a report is exported. see [S3 Destination](#s3-destination) documented below. + +#### S3 Destination + +* `bucket`- (Required) The name of the S3 bucket where the raw data of a report are exported. +* `encryptionKey` - (Required) The encryption key for the report's encrypted raw data. The KMS key ARN. +* `encryptionDisabled`- (Optional) A boolean value that specifies if the results of a report are encrypted. + **Note: the API does not currently allow setting encryption as disabled** +* `packaging` - (Optional) The type of build output artifact to create. Valid values are: `none` (default) and `zip`. +* `path` - (Optional) The path to the exported report's raw data results. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of Report Group. +* `arn` - The ARN of Report Group. +* `created` - The date and time this Report Group was created. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeBuild Report Group using the CodeBuild Report Group arn. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeBuild Report Group using the CodeBuild Report Group arn. For example: + +```console +% terraform import aws_codebuild_report_group.example arn:aws:codebuild:us-west-2:123456789:report-group/report-group-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codebuild_resource_policy.html.markdown b/website/docs/cdktf/typescript/r/codebuild_resource_policy.html.markdown new file mode 100644 index 00000000000..119b8010651 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codebuild_resource_policy.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "CodeBuild" +layout: "aws" +page_title: "AWS: aws_codebuild_resource_policy" +description: |- + Provides a CodeBuild Resource Policy resource. +--- + + + +# Resource: aws_codebuild_resource_policy + +Provides a CodeBuild Resource Policy Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodebuildReportGroup } from "./.gen/providers/aws/codebuild-report-group"; +import { CodebuildResourcePolicy } from "./.gen/providers/aws/codebuild-resource-policy"; +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CodebuildReportGroup(this, "example", { + exportConfig: { + type: "NO_EXPORT", + }, + name: "example", + type: "TEST", + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsPartitionCurrent = new DataAwsPartition(this, "current_2", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsPartitionCurrent.overrideLogicalId("current"); + const awsCodebuildResourcePolicyExample = new CodebuildResourcePolicy( + this, + "example_3", + { + policy: Token.asString( + Fn.jsonencode({ + Id: "default", + Statement: [ + { + Action: [ + "codebuild:BatchGetReportGroups", + "codebuild:BatchGetReports", + "codebuild:ListReportsForReportGroup", + "codebuild:DescribeTestCases", + ], + Effect: "Allow", + Principal: { + AWS: + "arn:${" + + dataAwsPartitionCurrent.partition + + "}:iam::${" + + current.accountId + + "}:root", + }, + Resource: example.arn, + Sid: "default", + }, + ], + Version: "2012-10-17", + }) + ), + resourceArn: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodebuildResourcePolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceArn` - (Required) The ARN of the Project or ReportGroup resource you want to associate with a resource policy. +* `policy` - (Required) A JSON-formatted resource policy. For more information, see [Sharing a Projec](https://docs.aws.amazon.com/codebuild/latest/userguide/project-sharing.html#project-sharing-share) and [Sharing a Report Group](https://docs.aws.amazon.com/codebuild/latest/userguide/report-groups-sharing.html#report-groups-sharing-share). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of Resource. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeBuild Resource Policy using the CodeBuild Resource Policy arn. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeBuild Resource Policy using the CodeBuild Resource Policy arn. For example: + +```console +% terraform import aws_codebuild_resource_policy.example arn:aws:codebuild:us-west-2:123456789:report-group/report-group-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codebuild_source_credential.html.markdown b/website/docs/cdktf/typescript/r/codebuild_source_credential.html.markdown new file mode 100644 index 00000000000..50b00447c92 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codebuild_source_credential.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "CodeBuild" +layout: "aws" +page_title: "AWS: aws_codebuild_source_credential" +description: |- + Provides a CodeBuild Source Credential resource. +--- + + + +# Resource: aws_codebuild_source_credential + +Provides a CodeBuild Source Credentials Resource. + +~> **NOTE:** +[Codebuild only allows a single credential per given server type in a given region](https://docs.aws.amazon.com/cdk/api/v2/docs/aws-cdk-lib.aws_codebuild.GitHubSourceCredentials.html). Therefore, when you define `awsCodebuildSourceCredential`, [`awsCodebuildProject` resource](/docs/providers/aws/r/codebuild_project.html) defined in the same module will use it. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodebuildSourceCredential } from "./.gen/providers/aws/codebuild-source-credential"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodebuildSourceCredential(this, "example", { + authType: "PERSONAL_ACCESS_TOKEN", + serverType: "GITHUB", + token: "example", + }); + } +} + +``` + +### Bitbucket Server Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodebuildSourceCredential } from "./.gen/providers/aws/codebuild-source-credential"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodebuildSourceCredential(this, "example", { + authType: "BASIC_AUTH", + serverType: "BITBUCKET", + token: "example", + userName: "test-user", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `authType` - (Required) The type of authentication used to connect to a GitHub, GitHub Enterprise, or Bitbucket repository. An OAUTH connection is not supported by the API. +* `serverType` - (Required) The source provider used for this project. +* `token` - (Required) For `gitHub` or `GitHub Enterprise`, this is the personal access token. For `bitbucket`, this is the app password. +* `userName` - (Optional) The Bitbucket username when the authType is `basicAuth`. This parameter is not valid for other types of source providers or connections. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of Source Credential. +* `arn` - The ARN of Source Credential. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeBuild Source Credential using the CodeBuild Source Credential arn. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeBuild Source Credential using the CodeBuild Source Credential arn. For example: + +```console +% terraform import aws_codebuild_source_credential.example arn:aws:codebuild:us-west-2:123456789:token:github +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codebuild_webhook.html.markdown b/website/docs/cdktf/typescript/r/codebuild_webhook.html.markdown new file mode 100644 index 00000000000..2df1fd85548 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codebuild_webhook.html.markdown @@ -0,0 +1,161 @@ +--- +subcategory: "CodeBuild" +layout: "aws" +page_title: "AWS: aws_codebuild_webhook" +description: |- + Provides a CodeBuild Webhook resource. +--- + + + +# Resource: aws_codebuild_webhook + +Manages a CodeBuild webhook, which is an endpoint accepted by the CodeBuild service to trigger builds from source code repositories. Depending on the source type of the CodeBuild project, the CodeBuild service may also automatically create and delete the actual repository webhook as well. + +## Example Usage + +### Bitbucket and GitHub + +When working with [Bitbucket](https://bitbucket.org) and [GitHub](https://github.com) source CodeBuild webhooks, the CodeBuild service will automatically create (on `awsCodebuildWebhook` resource creation) and delete (on `awsCodebuildWebhook` resource deletion) the Bitbucket/GitHub repository webhook using its granted OAuth permissions. This behavior cannot be controlled by Terraform. + +~> **Note:** The AWS account that Terraform uses to create this resource *must* have authorized CodeBuild to access Bitbucket/GitHub's OAuth API in each applicable region. This is a manual step that must be done *before* creating webhooks with this resource. If OAuth is not configured, AWS will return an error similar to `ResourceNotFoundException: Could not find access token for server type github`. More information can be found in the CodeBuild User Guide for [Bitbucket](https://docs.aws.amazon.com/codebuild/latest/userguide/sample-bitbucket-pull-request.html) and [GitHub](https://docs.aws.amazon.com/codebuild/latest/userguide/sample-github-pull-request.html). + +~> **Note:** Further managing the automatically created Bitbucket/GitHub webhook with the `bitbucketHook`/`githubRepositoryWebhook` resource is only possible with importing that resource after creation of the `awsCodebuildWebhook` resource. The CodeBuild API does not ever provide the `secret` attribute for the `awsCodebuildWebhook` resource in this scenario. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodebuildWebhook } from "./.gen/providers/aws/codebuild-webhook"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodebuildWebhook(this, "example", { + buildType: "BUILD", + filterGroup: [ + { + filter: [ + { + pattern: "PUSH", + type: "EVENT", + }, + { + pattern: "master", + type: "BASE_REF", + }, + ], + }, + ], + projectName: Token.asString(awsCodebuildProjectExample.name), + }); + } +} + +``` + +### GitHub Enterprise + +When working with [GitHub Enterprise](https://enterprise.github.com/) source CodeBuild webhooks, the GHE repository webhook must be separately managed (e.g., manually or with the `githubRepositoryWebhook` resource). + +More information creating webhooks with GitHub Enterprise can be found in the [CodeBuild User Guide](https://docs.aws.amazon.com/codebuild/latest/userguide/sample-github-enterprise.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodebuildWebhook } from "./.gen/providers/aws/codebuild-webhook"; +import { RepositoryWebhook } from "./.gen/providers/github/repository-webhook"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*The following providers are missing schema information and might need manual adjustments to synthesize correctly: github. + For a more precise conversion please use the --provider flag in convert.*/ + const example = new CodebuildWebhook(this, "example", { + projectName: Token.asString(awsCodebuildProjectExample.name), + }); + const githubRepositoryWebhookExample = new RepositoryWebhook( + this, + "example_1", + { + active: true, + configuration: [ + { + content_type: "json", + insecure_ssl: false, + secret: example.secret, + url: example.payloadUrl, + }, + ], + events: ["push"], + name: "example", + repository: githubRepositoryExample.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + githubRepositoryWebhookExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `projectName` - (Required) The name of the build project. +* `buildType` - (Optional) The type of build this webhook will trigger. Valid values for this parameter are: `build`, `buildBatch`. +* `branchFilter` - (Optional) A regular expression used to determine which branches get built. Default is all branches are built. We recommend using `filterGroup` over `branchFilter`. +* `filterGroup` - (Optional) Information about the webhook's trigger. Filter group blocks are documented below. + +`filterGroup` supports the following: + +* `filter` - (Required) A webhook filter for the group. Filter blocks are documented below. + +`filter` supports the following: + +* `type` - (Required) The webhook filter group's type. Valid values for this parameter are: `event`, `baseRef`, `headRef`, `actorAccountId`, `filePath`, `commitMessage`. At least one filter group must specify `event` as its type. +* `pattern` - (Required) For a filter that uses `event` type, a comma-separated string that specifies one event: `push`, `pullRequestCreated`, `pullRequestUpdated`, `pullRequestReopened`. `pullRequestMerged` works with GitHub & GitHub Enterprise only. For a filter that uses any of the other filter types, a regular expression. +* `excludeMatchedPattern` - (Optional) If set to `true`, the specified filter does *not* trigger a build. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the build project. +* `payloadUrl` - The CodeBuild endpoint where webhook events are sent. +* `secret` - The secret token of the associated repository. Not returned by the CodeBuild API for all source types. +* `url` - The URL to the webhook. + +~> **Note:** The `secret` attribute is only set on resource creation, so if the secret is manually rotated, terraform will not pick up the change on subsequent runs. In that case, the webhook resource should be tainted and re-created to get the secret back in sync. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeBuild Webhooks using the CodeBuild Project name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeBuild Webhooks using the CodeBuild Project name. For example: + +```console +% terraform import aws_codebuild_webhook.example MyProjectName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codecommit_approval_rule_template.html.markdown b/website/docs/cdktf/typescript/r/codecommit_approval_rule_template.html.markdown new file mode 100644 index 00000000000..7bd4d575d82 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codecommit_approval_rule_template.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_approval_rule_template" +description: |- + Provides a CodeCommit Approval Rule Template Resource. +--- + + + +# Resource: aws_codecommit_approval_rule_template + +Provides a CodeCommit Approval Rule Template Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodecommitApprovalRuleTemplate } from "./.gen/providers/aws/codecommit-approval-rule-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodecommitApprovalRuleTemplate(this, "example", { + content: Token.asString( + Fn.jsonencode({ + DestinationReferences: ["refs/heads/master"], + Statements: [ + { + ApprovalPoolMembers: [ + "arn:aws:sts::123456789012:assumed-role/CodeCommitReview/*", + ], + NumberOfApprovalsNeeded: 2, + Type: "Approvers", + }, + ], + Version: "2018-11-08", + }) + ), + description: "This is an example approval rule template", + name: "MyExampleApprovalRuleTemplate", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Required) The content of the approval rule template. Maximum of 3000 characters. +* `name` - (Required) The name for the approval rule template. Maximum of 100 characters. +* `description` - (Optional) The description of the approval rule template. Maximum of 1000 characters. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `approvalRuleTemplateId` - The ID of the approval rule template +* `creationDate` - The date the approval rule template was created, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `lastModifiedDate` - The date the approval rule template was most recently changed, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `lastModifiedUser` - The Amazon Resource Name (ARN) of the user who made the most recent changes to the approval rule template. +* `ruleContentSha256` - The SHA-256 hash signature for the content of the approval rule template. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeCommit approval rule templates using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeCommit approval rule templates using the `name`. For example: + +```console +% terraform import aws_codecommit_approval_rule_template.imported ExistingApprovalRuleTemplateName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codecommit_approval_rule_template_association.html.markdown b/website/docs/cdktf/typescript/r/codecommit_approval_rule_template_association.html.markdown new file mode 100644 index 00000000000..53d5dc417e8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codecommit_approval_rule_template_association.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_approval_rule_template_association" +description: |- + Associates a CodeCommit Approval Rule Template with a Repository. +--- + + + +# Resource: aws_codecommit_approval_rule_template_association + +Associates a CodeCommit Approval Rule Template with a Repository. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodecommitApprovalRuleTemplateAssociation } from "./.gen/providers/aws/codecommit-approval-rule-template-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodecommitApprovalRuleTemplateAssociation(this, "example", { + approvalRuleTemplateName: Token.asString( + awsCodecommitApprovalRuleTemplateExample.name + ), + repositoryName: Token.asString( + awsCodecommitRepositoryExample.repositoryName + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `approvalRuleTemplateName` - (Required) The name for the approval rule template. +* `repositoryName` - (Required) The name of the repository that you want to associate with the template. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the approval rule template and name of the repository, separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeCommit approval rule template associations using the `approvalRuleTemplateName` and `repositoryName` separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeCommit approval rule template associations using the `approvalRuleTemplateName` and `repositoryName` separated by a comma (`,`). For example: + +```console +% terraform import aws_codecommit_approval_rule_template_association.example approver-rule-for-example,MyExampleRepo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codecommit_repository.html.markdown b/website/docs/cdktf/typescript/r/codecommit_repository.html.markdown new file mode 100644 index 00000000000..1376b2da3ff --- /dev/null +++ b/website/docs/cdktf/typescript/r/codecommit_repository.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_repository" +description: |- + Provides a CodeCommit Repository Resource. +--- + + + +# Resource: aws_codecommit_repository + +Provides a CodeCommit Repository Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodecommitRepository } from "./.gen/providers/aws/codecommit-repository"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodecommitRepository(this, "test", { + description: "This is the Sample App Repository", + repositoryName: "MyTestRepository", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repositoryName` - (Required) The name for the repository. This needs to be less than 100 characters. +* `description` - (Optional) The description of the repository. This needs to be less than 1000 characters +* `defaultBranch` - (Optional) The default branch of the repository. The branch specified here needs to exist. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `repositoryId` - The ID of the repository +* `arn` - The ARN of the repository +* `cloneUrlHttp` - The URL to use for cloning the repository over HTTPS. +* `cloneUrlSsh` - The URL to use for cloning the repository over SSH. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Codecommit repository using repository name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Codecommit repository using repository name. For example: + +```console +% terraform import aws_codecommit_repository.imported ExistingRepo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codecommit_trigger.html.markdown b/website/docs/cdktf/typescript/r/codecommit_trigger.html.markdown new file mode 100644 index 00000000000..2479f294768 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codecommit_trigger.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "CodeCommit" +layout: "aws" +page_title: "AWS: aws_codecommit_trigger" +description: |- + Provides a CodeCommit Trigger Resource. +--- + + + +# Resource: aws_codecommit_trigger + +Provides a CodeCommit Trigger Resource. + +~> **NOTE:** Terraform currently can create only one trigger per repository, even if multiple aws_codecommit_trigger resources are defined. Moreover, creating triggers with Terraform will delete all other triggers in the repository (also manually-created triggers). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodecommitRepository } from "./.gen/providers/aws/codecommit-repository"; +import { CodecommitTrigger } from "./.gen/providers/aws/codecommit-trigger"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new CodecommitRepository(this, "test", { + repositoryName: "test", + }); + const awsCodecommitTriggerTest = new CodecommitTrigger(this, "test_1", { + repositoryName: test.repositoryName, + trigger: [ + { + destinationArn: Token.asString(awsSnsTopicTest.arn), + events: ["all"], + name: "all", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodecommitTriggerTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repositoryName` - (Required) The name for the repository. This needs to be less than 100 characters. +* `name` - (Required) The name of the trigger. +* `destinationArn` - (Required) The ARN of the resource that is the target for a trigger. For example, the ARN of a topic in Amazon Simple Notification Service (SNS). +* `customData` - (Optional) Any custom data associated with the trigger that will be included in the information sent to the target of the trigger. +* `branches` - (Optional) The branches that will be included in the trigger configuration. If no branches are specified, the trigger will apply to all branches. +* `events` - (Required) The repository events that will cause the trigger to run actions in another service, such as sending a notification through Amazon Simple Notification Service (SNS). If no events are specified, the trigger will run for all repository events. Event types include: `all`, `updateReference`, `createReference`, `deleteReference`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `configurationId` - System-generated unique identifier. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codedeploy_app.html.markdown b/website/docs/cdktf/typescript/r/codedeploy_app.html.markdown new file mode 100644 index 00000000000..21b8601d6b2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codedeploy_app.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "CodeDeploy" +layout: "aws" +page_title: "AWS: aws_codedeploy_app" +description: |- + Provides a CodeDeploy application. +--- + + + +# Resource: aws_codedeploy_app + +Provides a CodeDeploy application to be used as a basis for deployments + +## Example Usage + +### ECS Application + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodedeployApp } from "./.gen/providers/aws/codedeploy-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodedeployApp(this, "example", { + computePlatform: "ECS", + name: "example", + }); + } +} + +``` + +### Lambda Application + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodedeployApp } from "./.gen/providers/aws/codedeploy-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodedeployApp(this, "example", { + computePlatform: "Lambda", + name: "example", + }); + } +} + +``` + +### Server Application + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodedeployApp } from "./.gen/providers/aws/codedeploy-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodedeployApp(this, "example", { + computePlatform: "Server", + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the application. +* `computePlatform` - (Optional) The compute platform can either be `ecs`, `lambda`, or `server`. Default is `server`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the CodeDeploy application. +* `applicationId` - The application ID. +* `id` - Amazon's assigned ID for the application. +* `name` - The application's name. +* `githubAccountName` - The name for a connection to a GitHub account. +* `linkedToGithub` - Whether the user has authenticated with GitHub for the specified application. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeDeploy Applications using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeDeploy Applications using the `name`. For example: + +```console +% terraform import aws_codedeploy_app.example my-application +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codedeploy_deployment_config.html.markdown b/website/docs/cdktf/typescript/r/codedeploy_deployment_config.html.markdown new file mode 100644 index 00000000000..2e4afce1946 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codedeploy_deployment_config.html.markdown @@ -0,0 +1,191 @@ +--- +subcategory: "CodeDeploy" +layout: "aws" +page_title: "AWS: aws_codedeploy_deployment_config" +description: |- + Provides a CodeDeploy deployment config. +--- + + + +# Resource: aws_codedeploy_deployment_config + +Provides a CodeDeploy deployment config for an application + +## Example Usage + +### Server Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodedeployDeploymentConfig } from "./.gen/providers/aws/codedeploy-deployment-config"; +import { CodedeployDeploymentGroup } from "./.gen/providers/aws/codedeploy-deployment-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new CodedeployDeploymentConfig(this, "foo", { + deploymentConfigName: "test-deployment-config", + minimumHealthyHosts: { + type: "HOST_COUNT", + value: 2, + }, + }); + const awsCodedeployDeploymentGroupFoo = new CodedeployDeploymentGroup( + this, + "foo_1", + { + alarmConfiguration: { + alarms: ["my-alarm-name"], + enabled: true, + }, + appName: fooApp.name, + autoRollbackConfiguration: { + enabled: true, + events: ["DEPLOYMENT_FAILURE"], + }, + deploymentConfigName: foo.id, + deploymentGroupName: "bar", + ec2TagFilter: [ + { + key: "filterkey", + type: "KEY_AND_VALUE", + value: "filtervalue", + }, + ], + serviceRoleArn: fooRole.arn, + triggerConfiguration: [ + { + triggerEvents: ["DeploymentFailure"], + triggerName: "foo-trigger", + triggerTargetArn: "foo-topic-arn", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodedeployDeploymentGroupFoo.overrideLogicalId("foo"); + } +} + +``` + +### Lambda Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodedeployDeploymentConfig } from "./.gen/providers/aws/codedeploy-deployment-config"; +import { CodedeployDeploymentGroup } from "./.gen/providers/aws/codedeploy-deployment-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new CodedeployDeploymentConfig(this, "foo", { + computePlatform: "Lambda", + deploymentConfigName: "test-deployment-config", + trafficRoutingConfig: { + timeBasedLinear: { + interval: 10, + percentage: 10, + }, + type: "TimeBasedLinear", + }, + }); + const awsCodedeployDeploymentGroupFoo = new CodedeployDeploymentGroup( + this, + "foo_1", + { + alarmConfiguration: { + alarms: ["my-alarm-name"], + enabled: true, + }, + appName: fooApp.name, + autoRollbackConfiguration: { + enabled: true, + events: ["DEPLOYMENT_STOP_ON_ALARM"], + }, + deploymentConfigName: foo.id, + deploymentGroupName: "bar", + serviceRoleArn: fooRole.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodedeployDeploymentGroupFoo.overrideLogicalId("foo"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deploymentConfigName` - (Required) The name of the deployment config. +* `computePlatform` - (Optional) The compute platform can be `server`, `lambda`, or `ecs`. Default is `server`. +* `minimumHealthyHosts` - (Optional) A minimum_healthy_hosts block. Required for `server` compute platform. Minimum Healthy Hosts are documented below. +* `trafficRoutingConfig` - (Optional) A traffic_routing_config block. Traffic Routing Config is documented below. + +The `minimumHealthyHosts` block supports the following: + +* `type` - (Required) The type can either be `fleetPercent` or `hostCount`. +* `value` - (Required) The value when the type is `fleetPercent` represents the minimum number of healthy instances as +a percentage of the total number of instances in the deployment. If you specify FLEET_PERCENT, at the start of the +deployment, AWS CodeDeploy converts the percentage to the equivalent number of instance and rounds up fractional instances. +When the type is `hostCount`, the value represents the minimum number of healthy instances as an absolute value. + +The `trafficRoutingConfig` block supports the following: + +* `type` - (Optional) Type of traffic routing config. One of `timeBasedCanary`, `timeBasedLinear`, `allAtOnce`. +* `timeBasedCanary` - (Optional) The time based canary configuration information. If `type` is `timeBasedLinear`, use `timeBasedLinear` instead. +* `timeBasedLinear` - (Optional) The time based linear configuration information. If `type` is `timeBasedCanary`, use `timeBasedCanary` instead. + +The `timeBasedCanary` block supports the following: + +* `interval` - (Optional) The number of minutes between the first and second traffic shifts of a `timeBasedCanary` deployment. +* `percentage` - (Optional) The percentage of traffic to shift in the first increment of a `timeBasedCanary` deployment. + +The `timeBasedLinear` block supports the following: + +* `interval` - (Optional) The number of minutes between each incremental traffic shift of a `timeBasedLinear` deployment. +* `percentage` - (Optional) The percentage of traffic that is shifted at the start of each increment of a `timeBasedLinear` deployment. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The deployment group's config name. +* `deploymentConfigId` - The AWS Assigned deployment config id + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeDeploy Deployment Configurations using the `deploymentConfigName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeDeploy Deployment Configurations using the `deploymentConfigName`. For example: + +```console +% terraform import aws_codedeploy_deployment_config.example my-deployment-config +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codedeploy_deployment_group.html.markdown b/website/docs/cdktf/typescript/r/codedeploy_deployment_group.html.markdown new file mode 100644 index 00000000000..3a9598a91ba --- /dev/null +++ b/website/docs/cdktf/typescript/r/codedeploy_deployment_group.html.markdown @@ -0,0 +1,442 @@ +--- +subcategory: "CodeDeploy" +layout: "aws" +page_title: "AWS: aws_codedeploy_deployment_group" +description: |- + Provides a CodeDeploy deployment group. +--- + + + +# Resource: aws_codedeploy_deployment_group + +Provides a CodeDeploy Deployment Group for a CodeDeploy Application + +~> **NOTE on blue/green deployments:** When using `greenFleetProvisioningOption` with the `copyAutoScalingGroup` action, CodeDeploy will create a new ASG with a different name. This ASG is _not_ managed by terraform and will conflict with existing configuration and state. You may want to use a different approach to managing deployments that involve multiple ASG, such as `discoverExisting` with separate blue and green ASG. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodedeployApp } from "./.gen/providers/aws/codedeploy-app"; +import { CodedeployDeploymentGroup } from "./.gen/providers/aws/codedeploy-deployment-group"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CodedeployApp(this, "example", { + name: "example-app", + }); + const awsSnsTopicExample = new SnsTopic(this, "example_1", { + name: "example-topic", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicExample.overrideLogicalId("example"); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["codedeploy.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const awsIamRoleExample = new IamRole(this, "example_3", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "example-role", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + new IamRolePolicyAttachment(this, "AWSCodeDeployRole", { + policyArn: "arn:aws:iam::aws:policy/service-role/AWSCodeDeployRole", + role: Token.asString(awsIamRoleExample.name), + }); + const awsCodedeployDeploymentGroupExample = new CodedeployDeploymentGroup( + this, + "example_5", + { + alarmConfiguration: { + alarms: ["my-alarm-name"], + enabled: true, + }, + appName: example.name, + autoRollbackConfiguration: { + enabled: true, + events: ["DEPLOYMENT_FAILURE"], + }, + deploymentGroupName: "example-group", + ec2TagSet: [ + { + ec2TagFilter: [ + { + key: "filterkey1", + type: "KEY_AND_VALUE", + value: "filtervalue", + }, + { + key: "filterkey2", + type: "KEY_AND_VALUE", + value: "filtervalue", + }, + ], + }, + ], + serviceRoleArn: Token.asString(awsIamRoleExample.arn), + triggerConfiguration: [ + { + triggerEvents: ["DeploymentFailure"], + triggerName: "example-trigger", + triggerTargetArn: Token.asString(awsSnsTopicExample.arn), + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodedeployDeploymentGroupExample.overrideLogicalId("example"); + } +} + +``` + +### Blue Green Deployments with ECS + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodedeployApp } from "./.gen/providers/aws/codedeploy-app"; +import { CodedeployDeploymentGroup } from "./.gen/providers/aws/codedeploy-deployment-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CodedeployApp(this, "example", { + computePlatform: "ECS", + name: "example", + }); + const awsCodedeployDeploymentGroupExample = new CodedeployDeploymentGroup( + this, + "example_1", + { + appName: example.name, + autoRollbackConfiguration: { + enabled: true, + events: ["DEPLOYMENT_FAILURE"], + }, + blueGreenDeploymentConfig: { + deploymentReadyOption: { + actionOnTimeout: "CONTINUE_DEPLOYMENT", + }, + terminateBlueInstancesOnDeploymentSuccess: { + action: "TERMINATE", + terminationWaitTimeInMinutes: 5, + }, + }, + deploymentConfigName: "CodeDeployDefault.ECSAllAtOnce", + deploymentGroupName: "example", + deploymentStyle: { + deploymentOption: "WITH_TRAFFIC_CONTROL", + deploymentType: "BLUE_GREEN", + }, + ecsService: { + clusterName: Token.asString(awsEcsClusterExample.name), + serviceName: Token.asString(awsEcsServiceExample.name), + }, + loadBalancerInfo: { + targetGroupPairInfo: { + prodTrafficRoute: { + listenerArns: [Token.asString(awsLbListenerExample.arn)], + }, + targetGroup: [ + { + name: blue.name, + }, + { + name: green.name, + }, + ], + }, + }, + serviceRoleArn: Token.asString(awsIamRoleExample.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodedeployDeploymentGroupExample.overrideLogicalId("example"); + } +} + +``` + +### Blue Green Deployments with Servers and Classic ELB + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodedeployApp } from "./.gen/providers/aws/codedeploy-app"; +import { CodedeployDeploymentGroup } from "./.gen/providers/aws/codedeploy-deployment-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CodedeployApp(this, "example", { + name: "example-app", + }); + const awsCodedeployDeploymentGroupExample = new CodedeployDeploymentGroup( + this, + "example_1", + { + appName: example.name, + blueGreenDeploymentConfig: { + deploymentReadyOption: { + actionOnTimeout: "STOP_DEPLOYMENT", + waitTimeInMinutes: 60, + }, + greenFleetProvisioningOption: { + action: "DISCOVER_EXISTING", + }, + terminateBlueInstancesOnDeploymentSuccess: { + action: "KEEP_ALIVE", + }, + }, + deploymentGroupName: "example-group", + deploymentStyle: { + deploymentOption: "WITH_TRAFFIC_CONTROL", + deploymentType: "BLUE_GREEN", + }, + loadBalancerInfo: { + elbInfo: [ + { + name: Token.asString(awsElbExample.name), + }, + ], + }, + serviceRoleArn: Token.asString(awsIamRoleExample.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodedeployDeploymentGroupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `appName` - (Required) The name of the application. +* `deploymentGroupName` - (Required) The name of the deployment group. +* `serviceRoleArn` - (Required) The service role ARN that allows deployments. +* `alarmConfiguration` - (Optional) Configuration block of alarms associated with the deployment group (documented below). +* `autoRollbackConfiguration` - (Optional) Configuration block of the automatic rollback configuration associated with the deployment group (documented below). +* `autoscalingGroups` - (Optional) Autoscaling groups associated with the deployment group. +* `blueGreenDeploymentConfig` - (Optional) Configuration block of the blue/green deployment options for a deployment group (documented below). +* `deploymentConfigName` - (Optional) The name of the group's deployment config. The default is "CodeDeployDefault.OneAtATime". +* `deploymentStyle` - (Optional) Configuration block of the type of deployment, either in-place or blue/green, you want to run and whether to route deployment traffic behind a load balancer (documented below). +* `ec2TagFilter` - (Optional) Tag filters associated with the deployment group. See the AWS docs for details. +* `ec2TagSet` - (Optional) Configuration block(s) of Tag filters associated with the deployment group, which are also referred to as tag groups (documented below). See the AWS docs for details. +* `ecsService` - (Optional) Configuration block(s) of the ECS services for a deployment group (documented below). +* `loadBalancerInfo` - (Optional) Single configuration block of the load balancer to use in a blue/green deployment (documented below). +* `onPremisesInstanceTagFilter` - (Optional) On premise tag filters associated with the group. See the AWS docs for details. +* `triggerConfiguration` - (Optional) Configuration block(s) of the triggers for the deployment group (documented below). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### alarm_configuration Argument Reference + +You can configure a deployment to stop when a **CloudWatch** alarm detects that a metric has fallen below or exceeded a defined threshold. `alarmConfiguration` supports the following: + +* `alarms` - (Optional) A list of alarms configured for the deployment group. _A maximum of 10 alarms can be added to a deployment group_. +* `enabled` - (Optional) Indicates whether the alarm configuration is enabled. This option is useful when you want to temporarily deactivate alarm monitoring for a deployment group without having to add the same alarms again later. +* `ignorePollAlarmFailure` - (Optional) Indicates whether a deployment should continue if information about the current state of alarms cannot be retrieved from CloudWatch. The default value is `false`. + * `true`: The deployment will proceed even if alarm status information can't be retrieved. + * `false`: The deployment will stop if alarm status information can't be retrieved. + +_Only one `alarmConfiguration` is allowed_. + +### auto_rollback_configuration Argument Reference + +You can configure a deployment group to automatically rollback when a deployment fails or when a monitoring threshold you specify is met. In this case, the last known good version of an application revision is deployed. `autoRollbackConfiguration` supports the following: + +* `enabled` - (Optional) Indicates whether a defined automatic rollback configuration is currently enabled for this Deployment Group. If you enable automatic rollback, you must specify at least one event type. +* `events` - (Optional) The event type or types that trigger a rollback. Supported types are `deploymentFailure` and `deploymentStopOnAlarm`. + +_Only one `autoRollbackConfiguration` is allowed_. + +### blue_green_deployment_config Argument Reference + +You can configure options for a blue/green deployment. `blueGreenDeploymentConfig` supports the following: + +* `deploymentReadyOption` - (Optional) Information about the action to take when newly provisioned instances are ready to receive traffic in a blue/green deployment (documented below). +* `greenFleetProvisioningOption` - (Optional) Information about how instances are provisioned for a replacement environment in a blue/green deployment (documented below). +* `terminateBlueInstancesOnDeploymentSuccess` - (Optional) Information about whether to terminate instances in the original fleet during a blue/green deployment (documented below). + +_Only one `blueGreenDeploymentConfig` is allowed_. + +You can configure how traffic is rerouted to instances in a replacement environment in a blue/green deployment. `deploymentReadyOption` supports the following: + +* `actionOnTimeout` - (Optional) When to reroute traffic from an original environment to a replacement environment in a blue/green deployment. + * `continueDeployment`: Register new instances with the load balancer immediately after the new application revision is installed on the instances in the replacement environment. + * `stopDeployment`: Do not register new instances with load balancer unless traffic is rerouted manually. If traffic is not rerouted manually before the end of the specified wait period, the deployment status is changed to Stopped. +* `waitTimeInMinutes` - (Optional) The number of minutes to wait before the status of a blue/green deployment changed to Stopped if rerouting is not started manually. Applies only to the `stopDeployment` option for `actionOnTimeout`. + +You can configure how instances will be added to the replacement environment in a blue/green deployment. `greenFleetProvisioningOption` supports the following: + +* `action` - (Optional) The method used to add instances to a replacement environment. + * `discoverExisting`: Use instances that already exist or will be created manually. + * `copyAutoScalingGroup`: Use settings from a specified **Auto Scaling** group to define and create instances in a new Auto Scaling group. _Exactly one Auto Scaling group must be specified_ when selecting `copyAutoScalingGroup`. Use `autoscalingGroups` to specify the Auto Scaling group. + +You can configure how instances in the original environment are terminated when a blue/green deployment is successful. `terminateBlueInstancesOnDeploymentSuccess` supports the following: + +* `action` - (Optional) The action to take on instances in the original environment after a successful blue/green deployment. + * `terminate`: Instances are terminated after a specified wait time. + * `keepAlive`: Instances are left running after they are deregistered from the load balancer and removed from the deployment group. +* `terminationWaitTimeInMinutes` - (Optional) The number of minutes to wait after a successful blue/green deployment before terminating instances from the original environment. + +### deployment_style Argument Reference + +You can configure the type of deployment, either in-place or blue/green, you want to run and whether to route deployment traffic behind a load balancer. `deploymentStyle` supports the following: + +* `deploymentOption` - (Optional) Indicates whether to route deployment traffic behind a load balancer. Valid Values are `withTrafficControl` or `withoutTrafficControl`. Default is `withoutTrafficControl`. +* `deploymentType` - (Optional) Indicates whether to run an in-place deployment or a blue/green deployment. Valid Values are `inPlace` or `blueGreen`. Default is `inPlace`. + +_Only one `deploymentStyle` is allowed_. + +### ec2_tag_filter Argument Reference + +The `ec2TagFilter` configuration block supports the following: + +* `key` - (Optional) The key of the tag filter. +* `type` - (Optional) The type of the tag filter, either `keyOnly`, `valueOnly`, or `keyAndValue`. +* `value` - (Optional) The value of the tag filter. + +Multiple occurrences of `ec2TagFilter` are allowed, where any instance that matches to at least one of the tag filters is selected. + +### ec2_tag_set Argument Reference + +You can form a tag group by putting a set of tag filters into `ec2TagSet`. If multiple tag groups are specified, any instance that matches to at least one tag filter of every tag group is selected. + +### ecs_service Argument Reference + +Each `ecsService` configuration block supports the following: + +* `clusterName` - (Required) The name of the ECS cluster. +* `serviceName` - (Required) The name of the ECS service. + +### load_balancer_info Argument Reference + +You can configure the **Load Balancer** to use in a deployment. `loadBalancerInfo` supports the following: + +* `elbInfo` - (Optional) The Classic Elastic Load Balancer to use in a deployment. Conflicts with `targetGroupInfo` and `targetGroupPairInfo`. +* `targetGroupInfo` - (Optional) The (Application/Network Load Balancer) target group to use in a deployment. Conflicts with `elbInfo` and `targetGroupPairInfo`. +* `targetGroupPairInfo` - (Optional) The (Application/Network Load Balancer) target group pair to use in a deployment. Conflicts with `elbInfo` and `targetGroupInfo`. + +#### load_balancer_info elb_info Argument Reference + +The `elbInfo` configuration block supports the following: + +* `name` - (Optional) The name of the load balancer that will be used to route traffic from original instances to replacement instances in a blue/green deployment. For in-place deployments, the name of the load balancer that instances are deregistered from so they are not serving traffic during a deployment, and then re-registered with after the deployment completes. + +#### load_balancer_info target_group_info Argument Reference + +The `targetGroupInfo` configuration block supports the following: + +* `name` - (Optional) The name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment completes. + +#### load_balancer_info target_group_pair_info Argument Reference + +The `targetGroupPairInfo` configuration block supports the following: + +* `prodTrafficRoute` - (Required) Configuration block for the production traffic route (documented below). +* `targetGroup` - (Required) Configuration blocks for a target group within a target group pair (documented below). +* `testTrafficRoute` - (Optional) Configuration block for the test traffic route (documented below). + +##### load_balancer_info target_group_pair_info prod_traffic_route Argument Reference + +The `prodTrafficRoute` configuration block supports the following: + +* `listenerArns` - (Required) List of Amazon Resource Names (ARNs) of the load balancer listeners. Must contain exactly one listener ARN. + +##### load_balancer_info target_group_pair_info target_group Argument Reference + +The `targetGroup` configuration block supports the following: + +* `name` - (Required) Name of the target group. + +##### load_balancer_info target_group_pair_info test_traffic_route Argument Reference + +The `testTrafficRoute` configuration block supports the following: + +* `listenerArns` - (Required) List of Amazon Resource Names (ARNs) of the load balancer listeners. + +### on_premises_instance_tag_filter Argument Reference + +The `onPremisesInstanceTagFilter` configuration block supports the following: + +* `key` - (Optional) The key of the tag filter. +* `type` - (Optional) The type of the tag filter, either `keyOnly`, `valueOnly`, or `keyAndValue`. +* `value` - (Optional) The value of the tag filter. + +### trigger_configuration Argument Reference + +Add triggers to a Deployment Group to receive notifications about events related to deployments or instances in the group. Notifications are sent to subscribers of the **SNS** topic associated with the trigger. _CodeDeploy must have permission to publish to the topic from this deployment group_. `triggerConfiguration` supports the following: + +* `triggerEvents` - (Required) The event type or types for which notifications are triggered. Some values that are supported: `deploymentStart`, `deploymentSuccess`, `deploymentFailure`, `deploymentStop`, `deploymentRollback`, `instanceStart`, `instanceSuccess`, `instanceFailure`. See [the CodeDeploy documentation][1] for all possible values. +* `triggerName` - (Required) The name of the notification trigger. +* `triggerTargetArn` - (Required) The ARN of the SNS topic through which notifications are sent. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the CodeDeploy deployment group. +* `id` - Application name and deployment group name. +* `computePlatform` - The destination platform type for the deployment. +* `deploymentGroupId` - The ID of the CodeDeploy deployment group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeDeploy Deployment Groups using `appName`, a colon, and `deploymentGroupName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeDeploy Deployment Groups using `appName`, a colon, and `deploymentGroupName`. For example: + +```console +% terraform import aws_codedeploy_deployment_group.example my-application:my-deployment-group +``` + +[1]: http://docs.aws.amazon.com/codedeploy/latest/userguide/monitoring-sns-event-notifications-create-trigger.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codegurureviewer_repository_association.html.markdown b/website/docs/cdktf/typescript/r/codegurureviewer_repository_association.html.markdown new file mode 100644 index 00000000000..e030e3b048b --- /dev/null +++ b/website/docs/cdktf/typescript/r/codegurureviewer_repository_association.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "CodeGuru Reviewer" +layout: "aws" +page_title: "AWS: aws_codegurureviewer_repository_association" +description: |- + Terraform resource for managing an AWS CodeGuru Reviewer Repository Association. +--- + + + +# Resource: aws_codegurureviewer_repository_association + +Terraform resource for managing an AWS CodeGuru Reviewer Repository Association. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodecommitRepository } from "./.gen/providers/aws/codecommit-repository"; +import { CodegurureviewerRepositoryAssociation } from "./.gen/providers/aws/codegurureviewer-repository-association"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CodecommitRepository(this, "example", { + lifecycle: { + ignoreChanges: ["codeguruReviewer"], + }, + repositoryName: "example-repo", + }); + const awsKmsKeyExample = new KmsKey(this, "example_1", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsKeyExample.overrideLogicalId("example"); + const awsCodegurureviewerRepositoryAssociationExample = + new CodegurureviewerRepositoryAssociation(this, "example_2", { + kmsKeyDetails: { + encryptionOption: "CUSTOMER_MANAGED_CMK", + kmsKeyId: Token.asString(awsKmsKeyExample.keyId), + }, + repository: { + codecommit: { + name: example.repositoryName, + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodegurureviewerRepositoryAssociationExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `repository` - (Required) An object describing the repository to associate. Valid values: `bitbucket`, `codecommit`, `githubEnterpriseServer`, or `s3Bucket`. Block is documented below. Note: for repositories that leverage CodeStar connections (ex. `bitbucket`, `githubEnterpriseServer`) the connection must be in `available` status prior to creating this resource. + +The following arguments are optional: + +* `kmsKeyDetails` - (Optional) An object describing the KMS key to asssociate. Block is documented below. + +## repository + +This configuration block supports the following: + +### bitbucket + +* `connectionArn` - (Required) The Amazon Resource Name (ARN) of an AWS CodeStar Connections connection. +* `name` - (Required) The name of the third party source repository. +* `owner` - (Required) The username for the account that owns the repository. + +### codecommit + +* `name` - (Required) The name of the AWS CodeCommit repository. + +### github_enterprise_server + +* `connectionArn` - (Required) The Amazon Resource Name (ARN) of an AWS CodeStar Connections connection. +* `name` - (Required) The name of the third party source repository. +* `owner` - (Required) The username for the account that owns the repository. + +### s3_bucket + +* `bucketName` - (Required) The name of the S3 bucket used for associating a new S3 repository. Note: The name must begin with `codeguruReviewer`. +* `name` - (Required) The name of the repository in the S3 bucket. + +## kms_key_details + +This configuration block supports the following: + +* `encryptionOption` - (Optional) The encryption option for a repository association. It is either owned by AWS Key Management Service (KMS) (`awsOwnedCmk`) or customer managed (`customerManagedCmk`). +* `kmsKeyId` - (Optional) The ID of the AWS KMS key that is associated with a repository association. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) identifying the repository association. +* `associationId` - The ID of the repository association. +* `connectionArn` - The Amazon Resource Name (ARN) of an AWS CodeStar Connections connection. +* `id` - The Amazon Resource Name (ARN) identifying the repository association. +* `name` - The name of the repository. +* `owner` - The owner of the repository. +* `providerType` - The provider type of the repository association. +* `state` - The state of the repository association. +* `stateReason` - A description of why the repository association is in the current state. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) +* `update` - (Default `180M`) +* `delete` - (Default `90M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codepipeline.markdown b/website/docs/cdktf/typescript/r/codepipeline.markdown new file mode 100644 index 00000000000..53411c5d426 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codepipeline.markdown @@ -0,0 +1,261 @@ +--- +subcategory: "CodePipeline" +layout: "aws" +page_title: "AWS: aws_codepipeline" +description: |- + Provides a CodePipeline +--- + + + +# Resource: aws_codepipeline + +Provides a CodePipeline. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Codepipeline } from "./.gen/providers/aws/codepipeline"; +import { CodestarconnectionsConnection } from "./.gen/providers/aws/codestarconnections-connection"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsKmsAlias } from "./.gen/providers/aws/data-aws-kms-alias"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CodestarconnectionsConnection(this, "example", { + name: "example-connection", + providerType: "GitHub", + }); + const codepipelineBucket = new S3Bucket(this, "codepipeline_bucket", { + bucket: "test-bucket", + }); + new S3BucketAcl(this, "codepipeline_bucket_acl", { + acl: "private", + bucket: codepipelineBucket.id, + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["codepipeline.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const codepipelinePolicy = new DataAwsIamPolicyDocument( + this, + "codepipeline_policy", + { + statement: [ + { + actions: [ + "s3:GetObject", + "s3:GetObjectVersion", + "s3:GetBucketVersioning", + "s3:PutObjectAcl", + "s3:PutObject", + ], + effect: "Allow", + resources: [ + codepipelineBucket.arn, + "${" + codepipelineBucket.arn + "}/*", + ], + }, + { + actions: ["codestar-connections:UseConnection"], + effect: "Allow", + resources: [example.arn], + }, + { + actions: ["codebuild:BatchGetBuilds", "codebuild:StartBuild"], + effect: "Allow", + resources: ["*"], + }, + ], + } + ); + const s3Kmskey = new DataAwsKmsAlias(this, "s3kmskey", { + name: "alias/myKmsKey", + }); + const codepipelineRole = new IamRole(this, "codepipeline_role", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "test-role", + }); + const awsIamRolePolicyCodepipelinePolicy = new IamRolePolicy( + this, + "codepipeline_policy_7", + { + name: "codepipeline_policy", + policy: Token.asString(codepipelinePolicy.json), + role: codepipelineRole.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyCodepipelinePolicy.overrideLogicalId("codepipeline_policy"); + new Codepipeline(this, "codepipeline", { + artifactStore: [ + { + encryptionKey: { + id: Token.asString(s3Kmskey.arn), + type: "KMS", + }, + location: codepipelineBucket.bucket, + type: "S3", + }, + ], + name: "tf-test-pipeline", + roleArn: codepipelineRole.arn, + stage: [ + { + action: [ + { + category: "Source", + configuration: { + BranchName: "main", + ConnectionArn: example.arn, + FullRepositoryId: "my-organization/example", + }, + name: "Source", + outputArtifacts: ["source_output"], + owner: "AWS", + provider: "CodeStarSourceConnection", + version: "1", + }, + ], + name: "Source", + }, + { + action: [ + { + category: "Build", + configuration: { + ProjectName: "test", + }, + inputArtifacts: ["source_output"], + name: "Build", + outputArtifacts: ["build_output"], + owner: "AWS", + provider: "CodeBuild", + version: "1", + }, + ], + name: "Build", + }, + { + action: [ + { + category: "Deploy", + configuration: { + ActionMode: "REPLACE_ON_FAILURE", + Capabilities: "CAPABILITY_AUTO_EXPAND,CAPABILITY_IAM", + OutputFileName: "CreateStackOutput.json", + StackName: "MyStack", + TemplatePath: "build_output::sam-templated.yaml", + }, + inputArtifacts: ["build_output"], + name: "Deploy", + owner: "AWS", + provider: "CloudFormation", + version: "1", + }, + ], + name: "Deploy", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the pipeline. +* `roleArn` - (Required) A service role Amazon Resource Name (ARN) that grants AWS CodePipeline permission to make calls to AWS services on your behalf. +* `artifactStore` (Required) One or more artifact_store blocks. Artifact stores are documented below. +* `stage` (Minimum of at least two `stage` blocks is required) A stage block. Stages are documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +An `artifactStore` block supports the following arguments: + +* `location` - (Required) The location where AWS CodePipeline stores artifacts for a pipeline; currently only `s3` is supported. +* `type` - (Required) The type of the artifact store, such as Amazon S3 +* `encryptionKey` - (Optional) The encryption key block AWS CodePipeline uses to encrypt the data in the artifact store, such as an AWS Key Management Service (AWS KMS) key. If you don't specify a key, AWS CodePipeline uses the default key for Amazon Simple Storage Service (Amazon S3). An `encryptionKey` block is documented below. +* `region` - (Optional) The region where the artifact store is located. Required for a cross-region CodePipeline, do not provide for a single-region CodePipeline. + +An `encryptionKey` block supports the following arguments: + +* `id` - (Required) The KMS key ARN or ID +* `type` - (Required) The type of key; currently only `kms` is supported + +A `stage` block supports the following arguments: + +* `name` - (Required) The name of the stage. +* `action` - (Required) The action(s) to include in the stage. Defined as an `action` block below + +An `action` block supports the following arguments: + +* `category` - (Required) A category defines what kind of action can be taken in the stage, and constrains the provider type for the action. Possible values are `approval`, `build`, `deploy`, `invoke`, `source` and `test`. +* `owner` - (Required) The creator of the action being called. Possible values are `aws`, `custom` and `thirdParty`. +* `name` - (Required) The action declaration's name. +* `provider` - (Required) The provider of the service being called by the action. Valid providers are determined by the action category. Provider names are listed in the [Action Structure Reference](https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference.html) documentation. +* `version` - (Required) A string that identifies the action type. +* `configuration` - (Optional) A map of the action declaration's configuration. Configurations options for action types and providers can be found in the [Pipeline Structure Reference](http://docs.aws.amazon.com/codepipeline/latest/userguide/reference-pipeline-structure.html#action-requirements) and [Action Structure Reference](https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference.html) documentation. +* `inputArtifacts` - (Optional) A list of artifact names to be worked on. +* `outputArtifacts` - (Optional) A list of artifact names to output. Output artifact names must be unique within a pipeline. +* `roleArn` - (Optional) The ARN of the IAM service role that will perform the declared action. This is assumed through the roleArn for the pipeline. +* `runOrder` - (Optional) The order in which actions are run. +* `region` - (Optional) The region in which to run the action. +* `namespace` - (Optional) The namespace all output variables will be accessed from. + +~> **Note:** The input artifact of an action must exactly match the output artifact declared in a preceding action, but the input artifact does not have to be the next action in strict sequence from the action that provided the output artifact. Actions in parallel can declare different output artifacts, which are in turn consumed by different following actions. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The codepipeline ID. +* `arn` - The codepipeline ARN. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodePipelines using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodePipelines using the name. For example: + +```console +% terraform import aws_codepipeline.foo example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codepipeline_custom_action_type.html.markdown b/website/docs/cdktf/typescript/r/codepipeline_custom_action_type.html.markdown new file mode 100644 index 00000000000..b3aaa79d207 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codepipeline_custom_action_type.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "CodePipeline" +layout: "aws" +page_title: "AWS: aws_codepipeline_custom_action_type" +description: |- + Provides a CodePipeline CustomActionType. +--- + + + +# Resource: aws_codepipeline_custom_action_type + +Provides a CodeDeploy CustomActionType + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodepipelineCustomActionType } from "./.gen/providers/aws/codepipeline-custom-action-type"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodepipelineCustomActionType(this, "example", { + category: "Build", + inputArtifactDetails: { + maximumCount: 1, + minimumCount: 0, + }, + outputArtifactDetails: { + maximumCount: 1, + minimumCount: 0, + }, + providerName: "example", + version: "1", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `category` - (Required) The category of the custom action. Valid values: `source`, `build`, `deploy`, `test`, `invoke`, `approval` +* `configurationProperty` - (Optional) The configuration properties for the custom action. Max 10 items. + +The `configurationProperty` object supports the following: + +* `description` - (Optional) The description of the action configuration property. +* `key` - (Required) Whether the configuration property is a key. +* `name` - (Required) The name of the action configuration property. +* `queryable` - (Optional) Indicates that the property will be used in conjunction with PollForJobs. +* `required` - (Required) Whether the configuration property is a required value. +* `secret`- (Required) Whether the configuration property is secret. +* `type`- (Optional) The type of the configuration property. Valid values: `string`, `number`, `boolean` + +* `inputArtifactDetails` - (Required) The details of the input artifact for the action. + +The `inputArtifactDetails` object supports the following: + +* `maximumCount` - (Required) The maximum number of artifacts allowed for the action type. Min: 0, Max: 5 +* `minimumCount` - (Required) The minimum number of artifacts allowed for the action type. Min: 0, Max: 5 + +* `outputArtifactDetails` - (Required) The details of the output artifact of the action. + +The `outputArtifactDetails` object supports the following: + +* `maximumCount` - (Required) The maximum number of artifacts allowed for the action type. Min: 0, Max: 5 +* `minimumCount` - (Required) The minimum number of artifacts allowed for the action type. Min: 0, Max: 5 + +* `providerName` - (Required) The provider of the service used in the custom action +* `settings` - (Optional) The settings for an action type. + +The `settings` object supports the following: + +* `entityUrlTemplate` - (Optional) The URL returned to the AWS CodePipeline console that provides a deep link to the resources of the external system. +* `executionUrlTemplate` - (Optional) The URL returned to the AWS CodePipeline console that contains a link to the top-level landing page for the external system. +* `revisionUrlTemplate` - (Optional) The URL returned to the AWS CodePipeline console that contains a link to the page where customers can update or change the configuration of the external action. +* `thirdPartyConfigurationUrl` - (Optional) The URL of a sign-up page where users can sign up for an external service and perform initial configuration of the action provided by that service. + +* `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `version` - (Required) The version identifier of the custom action. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Composed of category, provider and version. For example, `build:terraform:1` +* `arn` - The action ARN. +* `owner` - The creator of the action being called. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeDeploy CustomActionType using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeDeploy CustomActionType using the `id`. For example: + +```console +% terraform import aws_codepipeline_custom_action_type.example Build:terraform:1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codepipeline_webhook.markdown b/website/docs/cdktf/typescript/r/codepipeline_webhook.markdown new file mode 100644 index 00000000000..0b631a24f51 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codepipeline_webhook.markdown @@ -0,0 +1,174 @@ +--- +subcategory: "CodePipeline" +layout: "aws" +page_title: "AWS: aws_codepipeline_webhook" +description: |- + Provides a CodePipeline Webhook +--- + + + +# Resource: aws_codepipeline_webhook + +Provides a CodePipeline Webhook. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Codepipeline } from "./.gen/providers/aws/codepipeline"; +import { CodepipelineWebhook } from "./.gen/providers/aws/codepipeline-webhook"; +import { RepositoryWebhook } from "./.gen/providers/github/repository-webhook"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*The following providers are missing schema information and might need manual adjustments to synthesize correctly: github. + For a more precise conversion please use the --provider flag in convert.*/ + const webhookSecret = "super-secret"; + const bar = new Codepipeline(this, "bar", { + artifactStore: [ + { + encryptionKey: { + id: Token.asString(s3Kmskey.arn), + type: "KMS", + }, + location: Token.asString(awsS3BucketBar.bucket), + type: "S3", + }, + ], + name: "tf-test-pipeline", + roleArn: Token.asString(awsIamRoleBar.arn), + stage: [ + { + action: [ + { + category: "Source", + configuration: { + Branch: "master", + Owner: "my-organization", + Repo: "test", + }, + name: "Source", + outputArtifacts: ["test"], + owner: "ThirdParty", + provider: "GitHub", + version: "1", + }, + ], + name: "Source", + }, + { + action: [ + { + category: "Build", + configuration: { + ProjectName: "test", + }, + inputArtifacts: ["test"], + name: "Build", + owner: "AWS", + provider: "CodeBuild", + version: "1", + }, + ], + name: "Build", + }, + ], + }); + const awsCodepipelineWebhookBar = new CodepipelineWebhook(this, "bar_1", { + authentication: "GITHUB_HMAC", + authenticationConfiguration: { + secretToken: webhookSecret, + }, + filter: [ + { + jsonPath: "$.ref", + matchEquals: "refs/heads/{Branch}", + }, + ], + name: "test-webhook-github-bar", + targetAction: "Source", + targetPipeline: bar.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodepipelineWebhookBar.overrideLogicalId("bar"); + const githubRepositoryWebhookBar = new RepositoryWebhook(this, "bar_2", { + configuration: [ + { + content_type: "json", + insecure_ssl: true, + secret: webhookSecret, + url: awsCodepipelineWebhookBar.url, + }, + ], + events: ["push"], + name: "web", + repository: repo.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + githubRepositoryWebhookBar.overrideLogicalId("bar"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the webhook. +* `authentication` - (Required) The type of authentication to use. One of `ip`, `githubHmac`, or `unauthenticated`. +* `authenticationConfiguration` - (Optional) An `auth` block. Required for `ip` and `githubHmac`. Auth blocks are documented below. +* `filter` (Required) One or more `filter` blocks. Filter blocks are documented below. +* `targetAction` - (Required) The name of the action in a pipeline you want to connect to the webhook. The action must be from the source (first) stage of the pipeline. +* `targetPipeline` - (Required) The name of the pipeline. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +An `authenticationConfiguration` block supports the following arguments: + +* `secretToken` - (Optional) The shared secret for the GitHub repository webhook. Set this as `secret` in your `githubRepositoryWebhook`'s `configuration` block. Required for `githubHmac`. +* `allowedIpRange` - (Optional) A valid CIDR block for `ip` filtering. Required for `ip`. + +A `filter` block supports the following arguments: + +* `jsonPath` - (Required) The [JSON path](https://github.com/json-path/JsonPath) to filter on. +* `matchEquals` - (Required) The value to match on (e.g., `refs/heads/{branch}`). See [AWS docs](https://docs.aws.amazon.com/codepipeline/latest/APIReference/API_WebhookFilterRule.html) for details. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The CodePipeline webhook's ARN. +* `id` - The CodePipeline webhook's ARN. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `url` - The CodePipeline webhook's URL. POST events to this endpoint to trigger the target. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodePipeline Webhooks using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodePipeline Webhooks using their ARN. For example: + +```console +% terraform import aws_codepipeline_webhook.example arn:aws:codepipeline:us-west-2:123456789012:webhook:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codestarconnections_connection.markdown b/website/docs/cdktf/typescript/r/codestarconnections_connection.markdown new file mode 100644 index 00000000000..4078a0afb28 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codestarconnections_connection.markdown @@ -0,0 +1,151 @@ +--- +subcategory: "CodeStar Connections" +layout: "aws" +page_title: "AWS: aws_codestarconnections_connection" +description: |- + Provides a CodeStar Connection +--- + + + +# Resource: aws_codestarconnections_connection + +Provides a CodeStar Connection. + +~> **NOTE:** The `awsCodestarconnectionsConnection` resource is created in the state `pending`. Authentication with the connection provider must be completed in the AWS Console. See the [AWS documentation](https://docs.aws.amazon.com/dtconsole/latest/userguide/connections-update.html) for details. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Codepipeline } from "./.gen/providers/aws/codepipeline"; +import { CodestarconnectionsConnection } from "./.gen/providers/aws/codestarconnections-connection"; +interface MyConfig { + location: any; + type: any; + category: any; + name: any; + owner: any; + provider: any; + version: any; + category1: any; + name1: any; + owner1: any; + provider1: any; + version1: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new CodestarconnectionsConnection(this, "example", { + name: "example-connection", + providerType: "Bitbucket", + }); + const awsCodepipelineExample = new Codepipeline(this, "example_1", { + artifactStore: [ + { + location: config.location, + type: config.type, + }, + ], + name: "tf-test-pipeline", + roleArn: codepipelineRole.arn, + stage: [ + { + action: [ + { + category: "Source", + configuration: { + BranchName: "main", + ConnectionArn: example.arn, + FullRepositoryId: "my-organization/test", + }, + name: "Source", + outputArtifacts: ["source_output"], + owner: "AWS", + provider: "CodeStarSourceConnection", + version: "1", + }, + ], + name: "Source", + }, + { + action: [ + { + category: config.category, + name: config.name, + owner: config.owner, + provider: config.provider, + version: config.version, + }, + ], + name: "Build", + }, + { + action: [ + { + category: config.category1, + name: config.name1, + owner: config.owner1, + provider: config.provider1, + version: config.version1, + }, + ], + name: "Deploy", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCodepipelineExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the connection to be created. The name must be unique in the calling AWS account. Changing `name` will create a new resource. +* `providerType` - (Optional) The name of the external provider where your third-party code repository is configured. Valid values are `bitbucket`, `gitHub` or `gitHubEnterpriseServer`. Changing `providerType` will create a new resource. Conflicts with `hostArn` +* `hostArn` - (Optional) The Amazon Resource Name (ARN) of the host associated with the connection. Conflicts with `providerType` +* `tags` - (Optional) Map of key-value resource tags to associate with the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The codestar connection ARN. +* `arn` - The codestar connection ARN. +* `connectionStatus` - The codestar connection status. Possible values are `pending`, `available` and `error`. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeStar connections using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeStar connections using the ARN. For example: + +```console +% terraform import aws_codestarconnections_connection.test-connection arn:aws:codestar-connections:us-west-1:0123456789:connection/79d4d357-a2ee-41e4-b350-2fe39ae59448 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codestarconnections_host.markdown b/website/docs/cdktf/typescript/r/codestarconnections_host.markdown new file mode 100644 index 00000000000..693a6b359e5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/codestarconnections_host.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "CodeStar Connections" +layout: "aws" +page_title: "AWS: aws_codestarconnections_host" +description: |- + Provides a CodeStar Host +--- + + + +# Resource: aws_codestarconnections_host + +Provides a CodeStar Host. + +~> **NOTE:** The `awsCodestarconnectionsHost` resource is created in the state `pending`. Authentication with the host provider must be completed in the AWS Console. For more information visit [Set up a pending host](https://docs.aws.amazon.com/dtconsole/latest/userguide/connections-host-setup.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodestarconnectionsHost } from "./.gen/providers/aws/codestarconnections-host"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CodestarconnectionsHost(this, "example", { + name: "example-host", + providerEndpoint: "https://example.com", + providerType: "GitHubEnterpriseServer", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the host to be created. The name must be unique in the calling AWS account. +* `providerEndpoint` - (Required) The endpoint of the infrastructure to be represented by the host after it is created. +* `providerType` - (Required) The name of the external provider where your third-party code repository is configured. +* `vpcConfiguration` - (Optional) The VPC configuration to be provisioned for the host. A VPC must be configured, and the infrastructure to be represented by the host must already be connected to the VPC. + +A `vpcConfiguration` block supports the following arguments: + +* `securityGroupIds` - (Required) ID of the security group or security groups associated with the Amazon VPC connected to the infrastructure where your provider type is installed. +* `subnetIds` - (Required) The ID of the subnet or subnets associated with the Amazon VPC connected to the infrastructure where your provider type is installed. +* `tlsCertificate` - (Optional) The value of the Transport Layer Security (TLS) certificate associated with the infrastructure where your provider type is installed. +* `vpcId` - (Required) The ID of the Amazon VPC connected to the infrastructure where your provider type is installed. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The CodeStar Host ARN. +* `arn` - The CodeStar Host ARN. +* `status` - The CodeStar Host status. Possible values are `pending`, `available`, `vpcConfigDeleting`, `vpcConfigInitializing`, and `vpcConfigFailedInitialization`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeStar Host using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeStar Host using the ARN. For example: + +```console +% terraform import aws_codestarconnections_host.example-host arn:aws:codestar-connections:us-west-1:0123456789:host/79d4d357-a2ee-41e4-b350-2fe39ae59448 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/codestarnotifications_notification_rule.markdown b/website/docs/cdktf/typescript/r/codestarnotifications_notification_rule.markdown new file mode 100644 index 00000000000..f820066e0be --- /dev/null +++ b/website/docs/cdktf/typescript/r/codestarnotifications_notification_rule.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "CodeStar Notifications" +layout: "aws" +page_title: "AWS: aws_codestarnotifications_notification_rule" +description: |- + Provides a CodeStar Notifications Rule +--- + + + +# Resource: aws_codestarnotifications_notification_rule + +Provides a CodeStar Notifications Rule. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CodecommitRepository } from "./.gen/providers/aws/codecommit-repository"; +import { CodestarnotificationsNotificationRule } from "./.gen/providers/aws/codestarnotifications-notification-rule"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +import { SnsTopicPolicy } from "./.gen/providers/aws/sns-topic-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const code = new CodecommitRepository(this, "code", { + repositoryName: "example-code-repo", + }); + const notif = new SnsTopic(this, "notif", { + name: "notification", + }); + const notifAccess = new DataAwsIamPolicyDocument(this, "notif_access", { + statement: [ + { + actions: ["sns:Publish"], + principals: [ + { + identifiers: ["codestar-notifications.amazonaws.com"], + type: "Service", + }, + ], + resources: [notif.arn], + }, + ], + }); + new CodestarnotificationsNotificationRule(this, "commits", { + detailType: "BASIC", + eventTypeIds: ["codecommit-repository-comments-on-commits"], + name: "example-code-repo-commits", + resource: code.arn, + target: [ + { + address: notif.arn, + }, + ], + }); + new SnsTopicPolicy(this, "default", { + arn: notif.arn, + policy: Token.asString(notifAccess.json), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `detailType` - (Required) The level of detail to include in the notifications for this resource. Possible values are `basic` and `full`. +* `eventTypeIds` - (Required) A list of event types associated with this notification rule. + For list of allowed events see [here](https://docs.aws.amazon.com/codestar-notifications/latest/userguide/concepts.html#concepts-api). +* `name` - (Required) The name of notification rule. +* `resource` - (Required) The ARN of the resource to associate with the notification rule. +* `status` - (Optional) The status of the notification rule. Possible values are `enabled` and `disabled`, default is `enabled`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `target` - (Optional) Configuration blocks containing notification target information. Can be specified multiple times. At least one target must be specified on creation. + +An `target` block supports the following arguments: + +* `address` - (Required) The ARN of notification rule target. For example, a SNS Topic ARN. +* `type` - (Optional) The type of the notification target. Default value is `sns`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The codestar notification rule ARN. +* `arn` - The codestar notification rule ARN. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CodeStar notification rule using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CodeStar notification rule using the ARN. For example: + +```console +% terraform import aws_codestarnotifications_notification_rule.foo arn:aws:codestar-notifications:us-west-1:0123456789:notificationrule/2cdc68a3-8f7c-4893-b6a5-45b362bd4f2b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_identity_pool.markdown b/website/docs/cdktf/typescript/r/cognito_identity_pool.markdown new file mode 100644 index 00000000000..bc59442ad14 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_identity_pool.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "Cognito Identity" +layout: "aws" +page_title: "AWS: aws_cognito_identity_pool" +description: |- + Provides an AWS Cognito Identity Pool. +--- + + + +# Resource: aws_cognito_identity_pool + +Provides an AWS Cognito Identity Pool. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoIdentityPool } from "./.gen/providers/aws/cognito-identity-pool"; +import { IamSamlProvider } from "./.gen/providers/aws/iam-saml-provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new IamSamlProvider(this, "default", { + name: "my-saml-provider", + samlMetadataDocument: Token.asString(Fn.file("saml-metadata.xml")), + }); + new CognitoIdentityPool(this, "main", { + allowClassicFlow: false, + allowUnauthenticatedIdentities: false, + cognitoIdentityProviders: [ + { + clientId: "6lhlkkfbfb4q5kpp90urffae", + providerName: + "cognito-idp.us-east-1.amazonaws.com/us-east-1_Tv0493apJ", + serverSideTokenCheck: false, + }, + { + clientId: "7kodkvfqfb4qfkp39eurffae", + providerName: + "cognito-idp.us-east-1.amazonaws.com/eu-west-1_Zr231apJu", + serverSideTokenCheck: false, + }, + ], + identityPoolName: "identity pool", + openidConnectProviderArns: [ + "arn:aws:iam::123456789012:oidc-provider/id.example.com", + ], + samlProviderArns: [defaultVar.arn], + supportedLoginProviders: { + "accounts.google.com": "123456789012.apps.googleusercontent.com", + "graph.facebook.com": "7346241598935552", + }, + }); + } +} + +``` + +## Argument Reference + +The Cognito Identity Pool argument layout is a structure composed of several sub-resources - these resources are laid out below. + +* `identityPoolName` (Required) - The Cognito Identity Pool name. +* `allowUnauthenticatedIdentities` (Required) - Whether the identity pool supports unauthenticated logins or not. +* `allowClassicFlow` (Optional) - Enables or disables the classic / basic authentication flow. Default is `false`. +* `developerProviderName` (Optional) - The "domain" by which Cognito will refer to your users. This name acts as a placeholder that allows your +backend and the Cognito service to communicate about the developer provider. +* `cognitoIdentityProviders` (Optional) - An array of [Amazon Cognito Identity user pools](#cognito-identity-providers) and their client IDs. +* `openidConnectProviderArns` (Optional) - Set of OpendID Connect provider ARNs. +* `samlProviderArns` (Optional) - An array of Amazon Resource Names (ARNs) of the SAML provider for your identity. +* `supportedLoginProviders` (Optional) - Key-Value pairs mapping provider names to provider app IDs. +* `tags` - (Optional) A map of tags to assign to the Identity Pool. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +#### Cognito Identity Providers + +* `clientId` (Optional) - The client ID for the Amazon Cognito Identity User Pool. +* `providerName` (Optional) - The provider name for an Amazon Cognito Identity User Pool. +* `serverSideTokenCheck` (Optional) - Whether server-side token validation is enabled for the identity provider’s token or not. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - An identity pool ID, e.g. `usWest2:1A2345678901234B5CdeF6789G01H2I3`. +* `arn` - The ARN of the identity pool. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito Identity Pool using its ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cognito Identity Pool using its ID. For example: + +```console +% terraform import aws_cognito_identity_pool.mypool us-west-2:1a234567-8901-234b-5cde-f6789g01h2i3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_identity_pool_provider_principal_tag.markdown b/website/docs/cdktf/typescript/r/cognito_identity_pool_provider_principal_tag.markdown new file mode 100644 index 00000000000..0a0a492e2f6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_identity_pool_provider_principal_tag.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "Cognito Identity" +layout: "aws" +page_title: "AWS: aws_cognito_identity_pool_provider_principal_tag" +description: |- + Provides an AWS Cognito Identity Principal Mapping. +--- + + + +# Resource: aws_cognito_identity_pool_provider_principal_tag + +Provides an AWS Cognito Identity Principal Mapping. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoIdentityPool } from "./.gen/providers/aws/cognito-identity-pool"; +import { CognitoIdentityPoolProviderPrincipalTag } from "./.gen/providers/aws/cognito-identity-pool-provider-principal-tag"; +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolClient } from "./.gen/providers/aws/cognito-user-pool-client"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CognitoUserPool(this, "example", { + autoVerifiedAttributes: ["email"], + name: "user pool", + }); + const awsCognitoUserPoolClientExample = new CognitoUserPoolClient( + this, + "example_1", + { + name: "client", + supportedIdentityProviders: Token.asList( + Fn.compact(Token.asList(["COGNITO"])) + ), + userPoolId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserPoolClientExample.overrideLogicalId("example"); + const awsCognitoIdentityPoolExample = new CognitoIdentityPool( + this, + "example_2", + { + allowUnauthenticatedIdentities: false, + cognitoIdentityProviders: [ + { + clientId: Token.asString(awsCognitoUserPoolClientExample.id), + providerName: example.endpoint, + serverSideTokenCheck: false, + }, + ], + identityPoolName: "identity pool", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoIdentityPoolExample.overrideLogicalId("example"); + const awsCognitoIdentityPoolProviderPrincipalTagExample = + new CognitoIdentityPoolProviderPrincipalTag(this, "example_3", { + identityPoolId: Token.asString(awsCognitoIdentityPoolExample.id), + identityProviderName: example.endpoint, + principalTags: { + test: "value", + }, + useDefaults: false, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoIdentityPoolProviderPrincipalTagExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `identityPoolId` (Required) - An identity pool ID. +* `identityProviderName` (Required) - The name of the identity provider. +* `principalTags`: (Optional: []) - String to string map of variables. +* `useDefaults`: (Optional: true) use default (username and clientID) attribute mappings. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito Identity Pool Roles Attachment using the Identity Pool ID and provider name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cognito Identity Pool Roles Attachment using the Identity Pool ID and provider name. For example: + +```console +% terraform import aws_cognito_identity_pool_provider_principal_tag.example us-west-2_abc123:CorpAD +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_identity_pool_roles_attachment.markdown b/website/docs/cdktf/typescript/r/cognito_identity_pool_roles_attachment.markdown new file mode 100644 index 00000000000..fcc4582c340 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_identity_pool_roles_attachment.markdown @@ -0,0 +1,179 @@ +--- +subcategory: "Cognito Identity" +layout: "aws" +page_title: "AWS: aws_cognito_identity_pool_roles_attachment" +description: |- + Provides an AWS Cognito Identity Pool Roles Attachment. +--- + + + +# Resource: aws_cognito_identity_pool_roles_attachment + +Provides an AWS Cognito Identity Pool Roles Attachment. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoIdentityPool } from "./.gen/providers/aws/cognito-identity-pool"; +import { CognitoIdentityPoolRolesAttachment } from "./.gen/providers/aws/cognito-identity-pool-roles-attachment"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new CognitoIdentityPool(this, "main", { + allowUnauthenticatedIdentities: false, + identityPoolName: "identity pool", + supportedLoginProviders: { + "graph.facebook.com": "7346241598935555", + }, + }); + const authenticated = new DataAwsIamPolicyDocument(this, "authenticated", { + statement: [ + { + actions: ["sts:AssumeRoleWithWebIdentity"], + condition: [ + { + test: "StringEquals", + values: [main.id], + variable: "cognito-identity.amazonaws.com:aud", + }, + { + test: "ForAnyValue:StringLike", + values: ["authenticated"], + variable: "cognito-identity.amazonaws.com:amr", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["cognito-identity.amazonaws.com"], + type: "Federated", + }, + ], + }, + ], + }); + const authenticatedRolePolicy = new DataAwsIamPolicyDocument( + this, + "authenticated_role_policy", + { + statement: [ + { + actions: [ + "mobileanalytics:PutEvents", + "cognito-sync:*", + "cognito-identity:*", + ], + effect: "Allow", + resources: ["*"], + }, + ], + } + ); + const awsIamRoleAuthenticated = new IamRole(this, "authenticated_3", { + assumeRolePolicy: Token.asString(authenticated.json), + name: "cognito_authenticated", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleAuthenticated.overrideLogicalId("authenticated"); + const awsIamRolePolicyAuthenticated = new IamRolePolicy( + this, + "authenticated_4", + { + name: "authenticated_policy", + policy: Token.asString(authenticatedRolePolicy.json), + role: Token.asString(awsIamRoleAuthenticated.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAuthenticated.overrideLogicalId("authenticated"); + const awsCognitoIdentityPoolRolesAttachmentMain = + new CognitoIdentityPoolRolesAttachment(this, "main_5", { + identityPoolId: main.id, + roleMapping: [ + { + ambiguousRoleResolution: "AuthenticatedRole", + identityProvider: "graph.facebook.com", + mappingRule: [ + { + claim: "isAdmin", + matchType: "Equals", + roleArn: Token.asString(awsIamRoleAuthenticated.arn), + value: "paid", + }, + ], + type: "Rules", + }, + ], + roles: { + authenticated: Token.asString(awsIamRoleAuthenticated.arn), + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoIdentityPoolRolesAttachmentMain.overrideLogicalId("main"); + } +} + +``` + +## Argument Reference + +The Cognito Identity Pool Roles Attachment argument layout is a structure composed of several sub-resources - these resources are laid out below. + +* `identityPoolId` (Required) - An identity pool ID in the format `regionGuid`. +* `roleMapping` (Optional) - A List of [Role Mapping](#role-mappings). +* `roles` (Required) - The map of roles associated with this pool. For a given role, the key will be either "authenticated" or "unauthenticated" and the value will be the Role ARN. + +#### Role Mappings + +* `identityProvider` (Required) - A string identifying the identity provider, for example, "graph.facebook.com" or "cognito-idp.us-east-1.amazonaws.com/us-east-1_abcdefghi:app_client_id". Depends on `cognitoIdentityProviders` set on `awsCognitoIdentityPool` resource or a `awsCognitoIdentityProvider` resource. +* `ambiguousRoleResolution` (Optional) - Specifies the action to be taken if either no rules match the claim value for the Rules type, or there is no cognito:preferred_role claim and there are multiple cognito:roles matches for the Token type. `required` if you specify Token or Rules as the Type. +* `mappingRule` (Optional) - The [Rules Configuration](#rules-configuration) to be used for mapping users to roles. You can specify up to 25 rules per identity provider. Rules are evaluated in order. The first one to match specifies the role. +* `type` (Required) - The role mapping type. + +#### Rules Configuration + +* `claim` (Required) - The claim name that must be present in the token, for example, "isAdmin" or "paid". +* `matchType` (Required) - The match condition that specifies how closely the claim value in the IdP token must match Value. +* `roleArn` (Required) - The role ARN. +* `value` (Required) - A brief string that the claim must match, for example, "paid" or "yes". + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identity pool ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito Identity Pool Roles Attachment using the Identity Pool ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cognito Identity Pool Roles Attachment using the Identity Pool ID. For example: + +```console +% terraform import aws_cognito_identity_pool_roles_attachment.example us-west-2:b64805ad-cb56-40ba-9ffc-f5d8207e6d42 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_identity_provider.html.markdown b/website/docs/cdktf/typescript/r/cognito_identity_provider.html.markdown new file mode 100644 index 00000000000..3dcfa9bdcf5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_identity_provider.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_identity_provider" +side_bar_current: "docs-aws-resource-cognito-identity-provider" +description: |- + Provides a Cognito User Identity Provider resource. +--- + + + +# Resource: aws_cognito_identity_provider + +Provides a Cognito User Identity Provider resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoIdentityProvider } from "./.gen/providers/aws/cognito-identity-provider"; +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CognitoUserPool(this, "example", { + autoVerifiedAttributes: ["email"], + name: "example-pool", + }); + new CognitoIdentityProvider(this, "example_provider", { + attributeMapping: { + email: "email", + username: "sub", + }, + providerDetails: { + authorize_scopes: "email", + client_id: "your client_id", + client_secret: "your client_secret", + }, + providerName: "Google", + providerType: "Google", + userPoolId: example.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `userPoolId` (Required) - The user pool id +* `providerName` (Required) - The provider name +* `providerType` (Required) - The provider type. [See AWS API for valid values](https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_CreateIdentityProvider.html#CognitoUserPools-CreateIdentityProvider-request-ProviderType) +* `attributeMapping` (Optional) - The map of attribute mapping of user pool attributes. [AttributeMapping in AWS API documentation](https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_CreateIdentityProvider.html#CognitoUserPools-CreateIdentityProvider-request-AttributeMapping) +* `idpIdentifiers` (Optional) - The list of identity providers. +* `providerDetails` (Optional) - The map of identity details, such as access token + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsCognitoIdentityProvider` resources using their User Pool ID and Provider Name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsCognitoIdentityProvider` resources using their User Pool ID and Provider Name. For example: + +```console +% terraform import aws_cognito_identity_provider.example us-west-2_abc123:CorpAD +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_managed_user_pool_client.html.markdown b/website/docs/cdktf/typescript/r/cognito_managed_user_pool_client.html.markdown new file mode 100644 index 00000000000..1b1a37a7802 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_managed_user_pool_client.html.markdown @@ -0,0 +1,206 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_managed_user_pool_client" +description: |- + Use the `awsCognitoUserPoolClient` resource to manage a Cognito User Pool Client. This resource is created by another service. +--- + + + +# Resource: aws_cognito_managed_user_pool_client + +Use the `awsCognitoUserPoolClient` resource to manage a Cognito User Pool Client. + +**This resource is advanced** and has special caveats to consider before use. Please read this document completely before using the resource. + +Use the `awsCognitoManagedUserPoolClient` resource to manage a Cognito User Pool Client that is automatically created by an AWS service. For instance, when [configuring an OpenSearch Domain to use Cognito authentication](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/cognito-auth.html), the OpenSearch service creates the User Pool Client during setup and removes it when it is no longer required. As a result, the `awsCognitoManagedUserPoolClient` resource does not create or delete this resource, but instead assumes management of it. + +Use the [`awsCognitoUserPoolClient`](cognito_user_pool_client.html) resource to manage Cognito User Pool Clients for normal use cases. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoIdentityPool } from "./.gen/providers/aws/cognito-identity-pool"; +import { CognitoManagedUserPoolClient } from "./.gen/providers/aws/cognito-managed-user-pool-client"; +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +import { OpensearchDomain } from "./.gen/providers/aws/opensearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CognitoIdentityPool(this, "example", { + identityPoolName: "example", + lifecycle: { + ignoreChanges: [cognitoIdentityProviders], + }, + }); + const awsCognitoUserPoolExample = new CognitoUserPool(this, "example_1", { + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserPoolExample.overrideLogicalId("example"); + const current = new DataAwsPartition(this, "current", {}); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_3", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["es.${" + current.dnsSuffix + "}"], + type: "Service", + }, + ], + sid: "", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsIamRoleExample = new IamRole(this, "example_4", { + assumeRolePolicy: Token.asString(dataAwsIamPolicyDocumentExample.json), + name: "example-role", + path: "/service-role/", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsIamRolePolicyAttachmentExample = new IamRolePolicyAttachment( + this, + "example_5", + { + policyArn: + "arn:${" + + current.partition + + "}:iam::aws:policy/AmazonESCognitoAccess", + role: Token.asString(awsIamRoleExample.name), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentExample.overrideLogicalId("example"); + const awsOpensearchDomainExample = new OpensearchDomain(this, "example_6", { + cognitoOptions: { + enabled: true, + identityPoolId: example.id, + roleArn: Token.asString(awsIamRoleExample.arn), + userPoolId: Token.asString(awsCognitoUserPoolExample.id), + }, + dependsOn: [ + awsCognitoUserPoolDomainExample, + awsIamRolePolicyAttachmentExample, + ], + domainName: "example", + ebsOptions: { + ebsEnabled: true, + volumeSize: 10, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOpensearchDomainExample.overrideLogicalId("example"); + const awsCognitoManagedUserPoolClientExample = + new CognitoManagedUserPoolClient(this, "example_7", { + dependsOn: [awsOpensearchDomainExample], + namePrefix: "AmazonOpenSearchService-example", + userPoolId: Token.asString(awsCognitoUserPoolExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoManagedUserPoolClientExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `userPoolId` - (Required) User pool that the client belongs to. +* `namePattern` - (Required, one of `namePattern` or `namePrefix`) Regular expression that matches the name of the desired User Pool Client. It must only match one User Pool Client. +* `namePrefix` - (Required, one of `namePrefix` or `namePattern`) String that matches the beginning of the name of the desired User Pool Client. It must match only one User Pool Client. + +The following arguments are optional: + +* `accessTokenValidity` - (Optional) Time limit, between 5 minutes and 1 day, after which the access token is no longer valid and cannot be used. By default, the unit is hours. The unit can be overridden by a value in `tokenValidityUnitsAccessToken`. +* `allowedOauthFlowsUserPoolClient` - (Optional) Whether the client is allowed to use the OAuth protocol when interacting with Cognito user pools. +* `allowedOauthFlows` - (Optional) List of allowed OAuth flows, including code, implicit, and client_credentials. +* `allowedOauthScopes` - (Optional) List of allowed OAuth scopes, including phone, email, openid, profile, and aws.cognito.signin.user.admin. +* `analyticsConfiguration` - (Optional) Configuration block for Amazon Pinpoint analytics that collects metrics for this user pool. See [details below](#analytics_configuration). +* `authSessionValidity` - (Optional) Duration, in minutes, of the session token created by Amazon Cognito for each API request in an authentication flow. The session token must be responded to by the native user of the user pool before it expires. Valid values for `authSessionValidity` are between `3` and `15`, with a default value of `3`. +* `callbackUrls` - (Optional) List of allowed callback URLs for the identity providers. +* `defaultRedirectUri` - (Optional) Default redirect URI and must be included in the list of callback URLs. +* `enableTokenRevocation` - (Optional) Enables or disables token revocation. +* `enablePropagateAdditionalUserContextData` - (Optional) Enables the propagation of additional user context data. +* `explicitAuthFlows` - (Optional) List of authentication flows. The available options include ADMIN_NO_SRP_AUTH, CUSTOM_AUTH_FLOW_ONLY, USER_PASSWORD_AUTH, ALLOW_ADMIN_USER_PASSWORD_AUTH, ALLOW_CUSTOM_AUTH, ALLOW_USER_PASSWORD_AUTH, ALLOW_USER_SRP_AUTH, and ALLOW_REFRESH_TOKEN_AUTH. +* `generateSecret` - (Optional) Boolean flag indicating whether an application secret should be generated. +* `idTokenValidity` - (Optional) Time limit, between 5 minutes and 1 day, after which the ID token is no longer valid and cannot be used. By default, the unit is hours. The unit can be overridden by a value in `tokenValidityUnitsIdToken`. +* `logoutUrls` - (Optional) List of allowed logout URLs for the identity providers. +* `preventUserExistenceErrors` - (Optional) Setting determines the errors and responses returned by Cognito APIs when a user does not exist in the user pool during authentication, account confirmation, and password recovery. +* `readAttributes` - (Optional) List of user pool attributes that the application client can read from. +* `refreshTokenValidity` - (Optional) Time limit, between 60 minutes and 10 years, after which the refresh token is no longer valid and cannot be used. By default, the unit is days. The unit can be overridden by a value in `tokenValidityUnitsRefreshToken`. +* `supportedIdentityProviders` - (Optional) List of provider names for the identity providers that are supported on this client. It uses the `providerName` attribute of the `awsCognitoIdentityProvider` resource(s), or the equivalent string(s). +* `tokenValidityUnits` - (Optional) Configuration block for representing the validity times in units. See details below. [Detailed below](#token_validity_units). +* `writeAttributes` - (Optional) List of user pool attributes that the application client can write to. + +### analytics_configuration + +Either `applicationArn` or `applicationId` is required for this configuration block. + +* `applicationArn` - (Optional) Application ARN for an Amazon Pinpoint application. It conflicts with `externalId` and `roleArn`. +* `applicationId` - (Optional) Unique identifier for an Amazon Pinpoint application. +* `externalId` - (Optional) ID for the Analytics Configuration and conflicts with `applicationArn`. +* `roleArn` - (Optional) ARN of an IAM role that authorizes Amazon Cognito to publish events to Amazon Pinpoint analytics. It conflicts with `applicationArn`. +* `userDataShared` - (Optional) If `userDataShared` is set to `true`, Amazon Cognito will include user data in the events it publishes to Amazon Pinpoint analytics. + +### token_validity_units + +Valid values for the following arguments are: `seconds`, `minutes`, `hours`, or `days`. + +* `accessToken` - (Optional) Time unit for the value in `accessTokenValidity` and defaults to `hours`. +* `idToken` - (Optional) Time unit for the value in `idTokenValidity`, and it defaults to `hours`. +* `refreshToken` - (Optional) Time unit for the value in `refreshTokenValidity` and defaults to `days`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `clientSecret` - Client secret of the user pool client. +* `id` - Unique identifier for the user pool client. +* `name` - Name of the user pool client. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Pool Clients using the `id` of the Cognito User Pool and the `id` of the Cognito User Pool Client. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cognito User Pool Clients using the `id` of the Cognito User Pool and the `id` of the Cognito User Pool Client. For example: + +```console +% terraform import aws_cognito_managed_user_pool_client.client us-west-2_abc123/3ho4ek12345678909nh3fmhpko +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_resource_server.html.markdown b/website/docs/cdktf/typescript/r/cognito_resource_server.html.markdown new file mode 100644 index 00000000000..b76d0b6200c --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_resource_server.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_resource_server" +side_bar_current: "docs-aws-resource-cognito-resource-server" +description: |- + Provides a Cognito Resource Server. +--- + + + +# Resource: aws_cognito_resource_server + +Provides a Cognito Resource Server. + +## Example Usage + +### Create a basic resource server + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoResourceServer } from "./.gen/providers/aws/cognito-resource-server"; +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const pool = new CognitoUserPool(this, "pool", { + name: "pool", + }); + new CognitoResourceServer(this, "resource", { + identifier: "https://example.com", + name: "example", + userPoolId: pool.id, + }); + } +} + +``` + +### Create a resource server with sample-scope + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoResourceServer } from "./.gen/providers/aws/cognito-resource-server"; +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const pool = new CognitoUserPool(this, "pool", { + name: "pool", + }); + new CognitoResourceServer(this, "resource", { + identifier: "https://example.com", + name: "example", + scope: [ + { + scopeDescription: "a Sample Scope Description", + scopeName: "sample-scope", + }, + ], + userPoolId: pool.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `identifier` - (Required) An identifier for the resource server. +* `name` - (Required) A name for the resource server. +* `scope` - (Optional) A list of [Authorization Scope](#authorization-scope). + +### Authorization Scope + +* `scopeName` - (Required) The scope name. +* `scopeDescription` - (Required) The scope description. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `scopeIdentifiers` - A list of all scopes configured for this resource server in the format identifier/scope_name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsCognitoResourceServer` using their User Pool ID and Identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsCognitoResourceServer` using their User Pool ID and Identifier. For example: + +```console +% terraform import aws_cognito_resource_server.example "us-west-2_abc123|https://example.com" +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_risk_configuration.html.markdown b/website/docs/cdktf/typescript/r/cognito_risk_configuration.html.markdown new file mode 100644 index 00000000000..384eb8fba6d --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_risk_configuration.html.markdown @@ -0,0 +1,151 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_risk_configuration" +description: |- + Provides a Cognito Risk Configuration resource. +--- + + + +# Resource: aws_cognito_risk_configuration + +Provides a Cognito Risk Configuration resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoRiskConfiguration } from "./.gen/providers/aws/cognito-risk-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CognitoRiskConfiguration(this, "example", { + riskExceptionConfiguration: { + blockedIpRangeList: ["10.10.10.10/32"], + }, + userPoolId: Token.asString(awsCognitoUserPoolExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `userPoolId` - (Required) The user pool ID. +* `clientId` - (Optional) The app client ID. When the client ID is not provided, the same risk configuration is applied to all the clients in the User Pool. +* `accountTakeoverRiskConfiguration` - (Optional) The account takeover risk configuration. See details below. +* `compromisedCredentialsRiskConfiguration` - (Optional) The compromised credentials risk configuration. See details below. +* `riskExceptionConfiguration` - (Optional) The configuration to override the risk decision. See details below. + +### account_takeover_risk_configuration + +* `notifyConfiguration` - (Required) The notify configuration used to construct email notifications. See details below. +* `actions` - (Required) Account takeover risk configuration actions. See details below. + +#### notify_configuration + +* `blockEmail` - (Optional) Email template used when a detected risk event is blocked. See notify email type below. +* `mfaEmail` - (Optional) The multi-factor authentication (MFA) email template used when MFA is challenged as part of a detected risk. See notify email type below. +* `noActionEmail` - (Optional) The email template used when a detected risk event is allowed. See notify email type below. +* `from` - (Optional) The email address that is sending the email. The address must be either individually verified with Amazon Simple Email Service, or from a domain that has been verified with Amazon SES. +* `replyTo` - (Optional) The destination to which the receiver of an email should reply to. +* `sourceArn` - (Required) The Amazon Resource Name (ARN) of the identity that is associated with the sending authorization policy. This identity permits Amazon Cognito to send for the email address specified in the From parameter. + +##### notify email type + +* `htmlBody` - (Required) The email HTML body. +* `subject` - (Required) The email subject. +* `textBody` - (Required) The email text body. + +#### actions + +* `highAction` - (Optional) Action to take for a high risk. See action block below. +* `lowAction` - (Optional) Action to take for a low risk. See action block below. +* `mediumAction` - (Optional) Action to take for a medium risk. See action block below. + +##### action + +* `eventAction` - (Required) The action to take in response to the account takeover action. Valid values are `block`, `mfaIfConfigured`, `mfaRequired` and `noAction`. +* `notify` - (Required) Whether to send a notification. + +### compromised_credentials_risk_configuration + +* `eventFilter` - (Optional) Perform the action for these events. The default is to perform all events if no event filter is specified. Valid values are `signIn`, `passwordChange`, and `signUp`. +* `actions` - (Required) The compromised credentials risk configuration actions. See details below. + +#### actions + +* `eventAction` - (Optional) The event action. Valid values are `block` or `noAction`. + +### risk_exception_configuration + +* `blockedIpRangeList` - (Optional) Overrides the risk decision to always block the pre-authentication requests. + The IP range is in CIDR notation, a compact representation of an IP address and its routing prefix. + Can contain a maximum of 200 items. +* `skippedIpRangeList` - (Optional) Risk detection isn't performed on the IP addresses in this range list. + The IP range is in CIDR notation. + Can contain a maximum of 200 items. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The user pool ID or the user pool ID and Client Id separated by a `:` if the configuration is client specific. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito Risk Configurations using the user pool ID or the user pool ID and Client Id separated by a `:`. For example: + +Import using the user pool ID: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import using the user pool ID and Client ID separated by a `:`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** Cognito Risk Configurations using the user pool ID or the user pool ID and Client Id separated by a `:`. For example: + +Import using the user pool ID: + +```console +% terraform import aws_cognito_risk_configuration.main example +``` + +Import using the user pool ID and Client ID separated by a `:`: + +```console +% terraform import aws_cognito_risk_configuration.main example:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_user.html.markdown b/website/docs/cdktf/typescript/r/cognito_user.html.markdown new file mode 100644 index 00000000000..dded8329db2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_user.html.markdown @@ -0,0 +1,149 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user" +description: |- + Provides a Cognito User resource. +--- + + + +# Resource: aws_cognito_user + +Provides a Cognito User Resource. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUser } from "./.gen/providers/aws/cognito-user"; +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CognitoUserPool(this, "example", { + name: "MyExamplePool", + }); + const awsCognitoUserExample = new CognitoUser(this, "example_1", { + userPoolId: example.id, + username: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserExample.overrideLogicalId("example"); + } +} + +``` + +### Setting user attributes + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUser } from "./.gen/providers/aws/cognito-user"; +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CognitoUserPool(this, "example", { + name: "mypool", + schema: [ + { + attributeDataType: "Boolean", + developerOnlyAttribute: false, + mutable: false, + name: "terraform", + required: false, + }, + { + attributeDataType: "String", + developerOnlyAttribute: false, + mutable: false, + name: "foo", + required: false, + stringAttributeConstraints: {}, + }, + ], + }); + const awsCognitoUserExample = new CognitoUser(this, "example_1", { + attributes: { + email: "no-reply@hashicorp.com", + email_verified: Token.asString(true), + foo: "bar", + terraform: Token.asString(true), + }, + userPoolId: example.id, + username: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `userPoolId` - (Required) The user pool ID for the user pool where the user will be created. +* `username` - (Required) The username for the user. Must be unique within the user pool. Must be a UTF-8 string between 1 and 128 characters. After the user is created, the username cannot be changed. + +The following arguments are optional: + +* `attributes` - (Optional) A map that contains user attributes and attribute values to be set for the user. +* `clientMetadata` - (Optional) A map of custom key-value pairs that you can provide as input for any custom workflows that user creation triggers. Amazon Cognito does not store the `clientMetadata` value. This data is available only to Lambda triggers that are assigned to a user pool to support custom workflows. If your user pool configuration does not include triggers, the ClientMetadata parameter serves no purpose. For more information, see [Customizing User Pool Workflows with Lambda Triggers](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-identity-pools-working-with-aws-lambda-triggers.html). +* `desiredDeliveryMediums` - (Optional) A list of mediums to the welcome message will be sent through. Allowed values are `email` and `sms`. If it's provided, make sure you have also specified `email` attribute for the `email` medium and `phoneNumber` for the `sms`. More than one value can be specified. Amazon Cognito does not store the `desiredDeliveryMediums` value. Defaults to `["sms"]`. +* `enabled` - (Optional) Specifies whether the user should be enabled after creation. The welcome message will be sent regardless of the `enabled` value. The behavior can be changed with `messageAction` argument. Defaults to `true`. +* `forceAliasCreation` - (Optional) If this parameter is set to True and the `phoneNumber` or `email` address specified in the `attributes` parameter already exists as an alias with a different user, Amazon Cognito will migrate the alias from the previous user to the newly created user. The previous user will no longer be able to log in using that alias. Amazon Cognito does not store the `forceAliasCreation` value. Defaults to `false`. +* `messageAction` - (Optional) Set to `resend` to resend the invitation message to a user that already exists and reset the expiration limit on the user's account. Set to `suppress` to suppress sending the message. Only one value can be specified. Amazon Cognito does not store the `messageAction` value. +* `password` - (Optional) The user's permanent password. This password must conform to the password policy specified by user pool the user belongs to. The welcome message always contains only `temporaryPassword` value. You can suppress sending the welcome message with the `messageAction` argument. Amazon Cognito does not store the `password` value. Conflicts with `temporaryPassword`. +* `temporaryPassword` - (Optional) The user's temporary password. Conflicts with `password`. +* `validationData` - (Optional) The user's validation data. This is an array of name-value pairs that contain user attributes and attribute values that you can use for custom validation, such as restricting the types of user accounts that can be registered. Amazon Cognito does not store the `validationData` value. For more information, see [Customizing User Pool Workflows with Lambda Triggers](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-identity-pools-working-with-aws-lambda-triggers.html). + +~> **NOTE:** Clearing `password` or `temporaryPassword` does not reset user's password in Cognito. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `status` - current user status. +* `sub` - unique user id that is never reassignable to another user. +* `mfaPreference` - user's settings regarding MFA settings and preferences. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User using the `userPoolId`/`name` attributes concatenated. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cognito User using the `userPoolId`/`name` attributes concatenated. For example: + +```console +% terraform import aws_cognito_user.user us-east-1_vG78M4goG/user +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_user_group.html.markdown b/website/docs/cdktf/typescript/r/cognito_user_group.html.markdown new file mode 100644 index 00000000000..78a65fe5a41 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_user_group.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_group" +description: |- + Provides a Cognito User Group resource. +--- + + + +# Resource: aws_cognito_user_group + +Provides a Cognito User Group resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserGroup } from "./.gen/providers/aws/cognito-user-group"; +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new CognitoUserPool(this, "main", { + name: "identity pool", + }); + const groupRole = new DataAwsIamPolicyDocument(this, "group_role", { + statement: [ + { + actions: ["sts:AssumeRoleWithWebIdentity"], + condition: [ + { + test: "StringEquals", + values: ["us-east-1:12345678-dead-beef-cafe-123456790ab"], + variable: "cognito-identity.amazonaws.com:aud", + }, + { + test: "ForAnyValue:StringLike", + values: ["authenticated"], + variable: "cognito-identity.amazonaws.com:amr", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["cognito-identity.amazonaws.com"], + type: "Federated", + }, + ], + }, + ], + }); + const awsIamRoleGroupRole = new IamRole(this, "group_role_2", { + assumeRolePolicy: Token.asString(groupRole.json), + name: "user-group-role", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleGroupRole.overrideLogicalId("group_role"); + const awsCognitoUserGroupMain = new CognitoUserGroup(this, "main_3", { + description: "Managed by Terraform", + name: "user-group", + precedence: 42, + roleArn: Token.asString(awsIamRoleGroupRole.arn), + userPoolId: main.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserGroupMain.overrideLogicalId("main"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the user group. +* `userPoolId` - (Required) The user pool ID. +* `description` - (Optional) The description of the user group. +* `precedence` - (Optional) The precedence of the user group. +* `roleArn` - (Optional) The ARN of the IAM role to be associated with the user group. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Groups using the `userPoolId`/`name` attributes concatenated. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cognito User Groups using the `userPoolId`/`name` attributes concatenated. For example: + +```console +% terraform import aws_cognito_user_group.group us-east-1_vG78M4goG/user-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_user_in_group.html.markdown b/website/docs/cdktf/typescript/r/cognito_user_in_group.html.markdown new file mode 100644 index 00000000000..9e2e33ce53b --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_user_in_group.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_in_group" +description: |- + Adds the specified user to the specified group. +--- + + + +# Resource: aws_cognito_user_in_group + +Adds the specified user to the specified group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUser } from "./.gen/providers/aws/cognito-user"; +import { CognitoUserGroup } from "./.gen/providers/aws/cognito-user-group"; +import { CognitoUserInGroup } from "./.gen/providers/aws/cognito-user-in-group"; +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CognitoUserPool(this, "example", { + name: "example", + passwordPolicy: { + minimumLength: 6, + requireNumbers: false, + requireSymbols: false, + requireUppercase: false, + temporaryPasswordValidityDays: 7, + }, + }); + const awsCognitoUserExample = new CognitoUser(this, "example_1", { + userPoolId: example.id, + username: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserExample.overrideLogicalId("example"); + const awsCognitoUserGroupExample = new CognitoUserGroup(this, "example_2", { + name: "example", + userPoolId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserGroupExample.overrideLogicalId("example"); + const awsCognitoUserInGroupExample = new CognitoUserInGroup( + this, + "example_3", + { + groupName: Token.asString(awsCognitoUserGroupExample.name), + userPoolId: example.id, + username: Token.asString(awsCognitoUserExample.username), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserInGroupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `userPoolId` - (Required) The user pool ID of the user and group. +* `groupName` - (Required) The name of the group to which the user is to be added. +* `username` - (Required) The username of the user to be added to the group. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_user_pool.markdown b/website/docs/cdktf/typescript/r/cognito_user_pool.markdown new file mode 100644 index 00000000000..19dd870dd30 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_user_pool.markdown @@ -0,0 +1,336 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool" +description: |- + Provides a Cognito User Pool resource. +--- + + + +# Resource: aws_cognito_user_pool + +Provides a Cognito User Pool resource. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CognitoUserPool(this, "pool", { + name: "mypool", + }); + } +} + +``` + +### Enabling SMS and Software Token Multi-Factor Authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +interface MyConfig { + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new CognitoUserPool(this, "example", { + mfaConfiguration: "ON", + smsAuthenticationMessage: "Your code is {####}", + smsConfiguration: { + externalId: "example", + snsCallerArn: Token.asString(awsIamRoleExample.arn), + snsRegion: "us-east-1", + }, + softwareTokenMfaConfiguration: { + enabled: true, + }, + name: config.name, + }); + } +} + +``` + +### Using Account Recovery Setting + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CognitoUserPool(this, "test", { + accountRecoverySetting: { + recoveryMechanism: [ + { + name: "verified_email", + priority: 1, + }, + { + name: "verified_phone_number", + priority: 2, + }, + ], + }, + name: "mypool", + }); + } +} + +``` + +## Argument Reference + +The following argument is required: + +* `name` - (Required) Name of the user pool. + +The following arguments are optional: + +* `accountRecoverySetting` - (Optional) Configuration block to define which verified available method a user can use to recover their forgotten password. [Detailed below](#account_recovery_setting). +* `adminCreateUserConfig` - (Optional) Configuration block for creating a new user profile. [Detailed below](#admin_create_user_config). +* `aliasAttributes` - (Optional) Attributes supported as an alias for this user pool. Valid values: `phoneNumber`, `email`, or `preferredUsername`. Conflicts with `usernameAttributes`. +* `autoVerifiedAttributes` - (Optional) Attributes to be auto-verified. Valid values: `email`, `phoneNumber`. +* `deletionProtection` - (Optional) When active, DeletionProtection prevents accidental deletion of your user pool. Before you can delete a user pool that you have protected against deletion, you must deactivate this feature. Valid values are `active` and `inactive`, Default value is `inactive`. +* `deviceConfiguration` - (Optional) Configuration block for the user pool's device tracking. [Detailed below](#device_configuration). +* `emailConfiguration` - (Optional) Configuration block for configuring email. [Detailed below](#email_configuration). +* `emailVerificationMessage` - (Optional) String representing the email verification message. Conflicts with `verificationMessageTemplate` configuration block `emailMessage` argument. +* `emailVerificationSubject` - (Optional) String representing the email verification subject. Conflicts with `verificationMessageTemplate` configuration block `emailSubject` argument. +* `lambdaConfig` - (Optional) Configuration block for the AWS Lambda triggers associated with the user pool. [Detailed below](#lambda_config). +* `mfaConfiguration` - (Optional) Multi-Factor Authentication (MFA) configuration for the User Pool. Defaults of `off`. Valid values are `off` (MFA Tokens are not required), `on` (MFA is required for all users to sign in; requires at least one of `smsConfiguration` or `softwareTokenMfaConfiguration` to be configured), or `optional` (MFA Will be required only for individual users who have MFA Enabled; requires at least one of `smsConfiguration` or `softwareTokenMfaConfiguration` to be configured). +* `passwordPolicy` - (Optional) Configuration block for information about the user pool password policy. [Detailed below](#password_policy). +* `schema` - (Optional) Configuration block for the schema attributes of a user pool. [Detailed below](#schema). Schema attributes from the [standard attribute set](https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-settings-attributes.html#cognito-user-pools-standard-attributes) only need to be specified if they are different from the default configuration. Attributes can be added, but not modified or removed. Maximum of 50 attributes. +* `smsAuthenticationMessage` - (Optional) String representing the SMS authentication message. The Message must contain the `{####}` placeholder, which will be replaced with the code. +* `smsConfiguration` - (Optional) Configuration block for Short Message Service (SMS) settings. [Detailed below](#sms_configuration). These settings apply to SMS user verification and SMS Multi-Factor Authentication (MFA). Due to Cognito API restrictions, the SMS configuration cannot be removed without recreating the Cognito User Pool. For user data safety, this resource will ignore the removal of this configuration by disabling drift detection. To force resource recreation after this configuration has been applied, see the [`taint` command](https://www.terraform.io/docs/commands/taint.html). +* `smsVerificationMessage` - (Optional) String representing the SMS verification message. Conflicts with `verificationMessageTemplate` configuration block `smsMessage` argument. +* `softwareTokenMfaConfiguration` - (Optional) Configuration block for software token Mult-Factor Authentication (MFA) settings. [Detailed below](#software_token_mfa_configuration). +* `tags` - (Optional) Map of tags to assign to the User Pool. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `userAttributeUpdateSettings` - (Optional) Configuration block for user attribute update settings. [Detailed below](#user_attribute_update_settings). +* `userPoolAddOns` - (Optional) Configuration block for user pool add-ons to enable user pool advanced security mode features. [Detailed below](#user_pool_add_ons). +* `usernameAttributes` - (Optional) Whether email addresses or phone numbers can be specified as usernames when a user signs up. Conflicts with `aliasAttributes`. +* `usernameConfiguration` - (Optional) Configuration block for username configuration. [Detailed below](#username_configuration). +* `verificationMessageTemplate` - (Optional) Configuration block for verification message templates. [Detailed below](#verification_message_template). + +### account_recovery_setting + +* `recoveryMechanism` - (Optional) List of Account Recovery Options of the following structure: + * `name` - (Required) Recovery method for a user. Can be of the following: `verifiedEmail`, `verifiedPhoneNumber`, and `adminOnly`. + * `priority` - (Required) Positive integer specifying priority of a method with 1 being the highest priority. + +### admin_create_user_config + +* `allowAdminCreateUserOnly` - (Optional) Set to True if only the administrator is allowed to create user profiles. Set to False if users can sign themselves up via an app. +* `inviteMessageTemplate` - (Optional) Invite message template structure. [Detailed below](#invite_message_template). + +#### invite_message_template + +* `emailMessage` - (Optional) Message template for email messages. Must contain `{username}` and `{####}` placeholders, for username and temporary password, respectively. +* `emailSubject` - (Optional) Subject line for email messages. +* `smsMessage` - (Optional) Message template for SMS messages. Must contain `{username}` and `{####}` placeholders, for username and temporary password, respectively. + +### device_configuration + +* `challengeRequiredOnNewDevice` - (Optional) Whether a challenge is required on a new device. Only applicable to a new device. +* `deviceOnlyRememberedOnUserPrompt` - (Optional) Whether a device is only remembered on user prompt. `false` equates to "Always" remember, `true` is "User Opt In," and not using a `deviceConfiguration` block is "No." + +### email_configuration + +* `configurationSet` - (Optional) Email configuration set name from SES. +* `emailSendingAccount` - (Optional) Email delivery method to use. `cognitoDefault` for the default email functionality built into Cognito or `developer` to use your Amazon SES configuration. Required to be `developer` if `fromEmailAddress` is set. +* `fromEmailAddress` - (Optional) Sender’s email address or sender’s display name with their email address (e.g., `john@exampleCom`, `John Smith ` or `\"John Smith Ph.D.\" `). Escaped double quotes are required around display names that contain certain characters as specified in [RFC 5322](https://tools.ietf.org/html/rfc5322). +* `replyToEmailAddress` - (Optional) REPLY-TO email address. +* `sourceArn` - (Optional) ARN of the SES verified email identity to use. Required if `emailSendingAccount` is set to `developer`. + +### lambda_config + +* `createAuthChallenge` - (Optional) ARN of the lambda creating an authentication challenge. +* `customMessage` - (Optional) Custom Message AWS Lambda trigger. +* `defineAuthChallenge` - (Optional) Defines the authentication challenge. +* `postAuthentication` - (Optional) Post-authentication AWS Lambda trigger. +* `postConfirmation` - (Optional) Post-confirmation AWS Lambda trigger. +* `preAuthentication` - (Optional) Pre-authentication AWS Lambda trigger. +* `preSignUp` - (Optional) Pre-registration AWS Lambda trigger. +* `preTokenGeneration` - (Optional) Allow to customize identity token claims before token generation. +* `userMigration` - (Optional) User migration Lambda config type. +* `verifyAuthChallengeResponse` - (Optional) Verifies the authentication challenge response. +* `kmsKeyId` - (Optional) The Amazon Resource Name of Key Management Service Customer master keys. Amazon Cognito uses the key to encrypt codes and temporary passwords sent to CustomEmailSender and CustomSMSSender. +* `customEmailSender` - (Optional) A custom email sender AWS Lambda trigger. See [custom_email_sender](#custom_email_sender) Below. +* `customSmsSender` - (Optional) A custom SMS sender AWS Lambda trigger. See [custom_sms_sender](#custom_sms_sender) Below. + +#### custom_email_sender + +* `lambdaArn` - (Required) The Lambda Amazon Resource Name of the Lambda function that Amazon Cognito triggers to send email notifications to users. +* `lambdaVersion` - (Required) The Lambda version represents the signature of the "request" attribute in the "event" information Amazon Cognito passes to your custom email Lambda function. The only supported value is `v10`. + +#### custom_sms_sender + +* `lambdaArn` - (Required) The Lambda Amazon Resource Name of the Lambda function that Amazon Cognito triggers to send SMS notifications to users. +* `lambdaVersion` - (Required) The Lambda version represents the signature of the "request" attribute in the "event" information Amazon Cognito passes to your custom SMS Lambda function. The only supported value is `v10`. + +### password_policy + +* `minimumLength` - (Optional) Minimum length of the password policy that you have set. +* `requireLowercase` - (Optional) Whether you have required users to use at least one lowercase letter in their password. +* `requireNumbers` - (Optional) Whether you have required users to use at least one number in their password. +* `requireSymbols` - (Optional) Whether you have required users to use at least one symbol in their password. +* `requireUppercase` - (Optional) Whether you have required users to use at least one uppercase letter in their password. +* `temporaryPasswordValidityDays` - (Optional) In the password policy you have set, refers to the number of days a temporary password is valid. If the user does not sign-in during this time, their password will need to be reset by an administrator. + +### schema + +~> **NOTE:** When defining an `attributeDataType` of `string` or `number`, the respective attribute constraints configuration block (e.g `stringAttributeConstraints` or `numberAttributeConstraints`) is **required** to prevent recreation of the Terraform resource. This requirement is true for both standard (e.g., name, email) and custom schema attributes. + +* `attributeDataType` - (Required) Attribute data type. Must be one of `boolean`, `number`, `string`, `dateTime`. +* `developerOnlyAttribute` - (Optional) Whether the attribute type is developer only. +* `mutable` - (Optional) Whether the attribute can be changed once it has been created. +* `name` - (Required) Name of the attribute. +* `numberAttributeConstraints` - (Required when `attributeDataType` is `number`) Configuration block for the constraints for an attribute of the number type. [Detailed below](#number_attribute_constraints). +* `required` - (Optional) Whether a user pool attribute is required. If the attribute is required and the user does not provide a value, registration or sign-in will fail. +* `stringAttributeConstraints` - (Required when `attributeDataType` is `string`) Constraints for an attribute of the string type. [Detailed below](#string_attribute_constraints). + +#### schema: Defaults for Standard Attributes + +The [standard attributes](https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-settings-attributes.html#cognito-user-pools-standard-attributes) have the following defaults. Note that attributes which match the default values are not stored in Terraform state when importing. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +interface MyConfig { + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new CognitoUserPool(this, "example", { + schema: [ + { + attributeDataType: "", + developerOnlyAttribute: false, + mutable: true, + name: "", + required: false, + stringAttributeConstraints: { + maxLength: Token.asString(2048), + minLength: Token.asString(0), + }, + }, + ], + name: config.name, + }); + } +} + +``` + +#### number_attribute_constraints + +* `maxValue` - (Optional) Maximum value of an attribute that is of the number data type. +* `minValue` - (Optional) Minimum value of an attribute that is of the number data type. + +#### string_attribute_constraints + +* `maxLength` - (Optional) Maximum length of an attribute value of the string type. +* `minLength` - (Optional) Minimum length of an attribute value of the string type. + +### sms_configuration + +* `externalId` - (Required) External ID used in IAM role trust relationships. For more information about using external IDs, see [How to Use an External ID When Granting Access to Your AWS Resources to a Third Party](http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html). +* `snsCallerArn` - (Required) ARN of the Amazon SNS caller. This is usually the IAM role that you've given Cognito permission to assume. +* `snsRegion` - (Optional) The AWS Region to use with Amazon SNS integration. You can choose the same Region as your user pool, or a supported Legacy Amazon SNS alternate Region. Amazon Cognito resources in the Asia Pacific (Seoul) AWS Region must use your Amazon SNS configuration in the Asia Pacific (Tokyo) Region. For more information, see [SMS message settings for Amazon Cognito user pools](https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-sms-settings.html). + +### software_token_mfa_configuration + +The following arguments are required in the `softwareTokenMfaConfiguration` configuration block: + +* `enabled` - (Required) Boolean whether to enable software token Multi-Factor (MFA) tokens, such as Time-based One-Time Password (TOTP). To disable software token MFA When `smsConfiguration` is not present, the `mfaConfiguration` argument must be set to `off` and the `softwareTokenMfaConfiguration` configuration block must be fully removed. + +### user_attribute_update_settings + +* `attributesRequireVerificationBeforeUpdate` - (Required) A list of attributes requiring verification before update. If set, the provided value(s) must also be set in `autoVerifiedAttributes`. Valid values: `email`, `phoneNumber`. + +### user_pool_add_ons + +* `advancedSecurityMode` - (Required) Mode for advanced security, must be one of `off`, `audit` or `enforced`. + +### username_configuration + +* `caseSensitive` - (Required) Whether username case sensitivity will be applied for all users in the user pool through Cognito APIs. + +### verification_message_template + +* `defaultEmailOption` - (Optional) Default email option. Must be either `confirmWithCode` or `confirmWithLink`. Defaults to `confirmWithCode`. +* `emailMessage` - (Optional) Email message template. Must contain the `{####}` placeholder. Conflicts with `emailVerificationMessage` argument. +* `emailMessageByLink` - (Optional) Email message template for sending a confirmation link to the user, it must contain the `{##Click Here##}` placeholder. +* `emailSubject` - (Optional) Subject line for the email message template. Conflicts with `emailVerificationSubject` argument. +* `emailSubjectByLink` - (Optional) Subject line for the email message template for sending a confirmation link to the user. +* `smsMessage` - (Optional) SMS message template. Must contain the `{####}` placeholder. Conflicts with `smsVerificationMessage` argument. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the user pool. +* `creationDate` - Date the user pool was created. +* `customDomain` - A custom domain name that you provide to Amazon Cognito. This parameter applies only if you use a custom domain to host the sign-up and sign-in pages for your application. For example: `authExampleCom`. +* `domain` - Holds the domain prefix if the user pool has a domain associated with it. +* `endpoint` - Endpoint name of the user pool. Example format: `cognitoIdpRegionAmazonawsCom/xxxxYyyyy` +* `estimatedNumberOfUsers` - A number estimating the size of the user pool. +* `id` - ID of the user pool. +* `lastModifiedDate` - Date the user pool was last modified. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Pools using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cognito User Pools using the `id`. For example: + +```console +% terraform import aws_cognito_user_pool.pool us-west-2_abc123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_user_pool_client.html.markdown b/website/docs/cdktf/typescript/r/cognito_user_pool_client.html.markdown new file mode 100644 index 00000000000..19d7a8bf6b0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_user_pool_client.html.markdown @@ -0,0 +1,294 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_client" +description: |- + Provides a Cognito User Pool Client resource. +--- + + + +# Resource: aws_cognito_user_pool_client + +Provides a Cognito User Pool Client resource. + +To manage a User Pool Client created by another service, such as when [configuring an OpenSearch Domain to use Cognito authentication](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/cognito-auth.html), +use the [`awsCognitoManagedUserPoolClient` resource](cognito_managed_user_pool_client.html) instead. + +## Example Usage + +### Create a basic user pool client + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolClient } from "./.gen/providers/aws/cognito-user-pool-client"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const pool = new CognitoUserPool(this, "pool", { + name: "pool", + }); + new CognitoUserPoolClient(this, "client", { + name: "client", + userPoolId: pool.id, + }); + } +} + +``` + +### Create a user pool client with no SRP authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolClient } from "./.gen/providers/aws/cognito-user-pool-client"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const pool = new CognitoUserPool(this, "pool", { + name: "pool", + }); + new CognitoUserPoolClient(this, "client", { + explicitAuthFlows: ["ADMIN_NO_SRP_AUTH"], + generateSecret: true, + name: "client", + userPoolId: pool.id, + }); + } +} + +``` + +### Create a user pool client with pinpoint analytics + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolClient } from "./.gen/providers/aws/cognito-user-pool-client"; +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new CognitoUserPool(this, "test", { + name: "pool", + }); + const awsPinpointAppTest = new PinpointApp(this, "test_1", { + name: "pinpoint", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsPinpointAppTest.overrideLogicalId("test"); + const current = new DataAwsCallerIdentity(this, "current", {}); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["cognito-idp.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const dataAwsIamPolicyDocumentTest = new DataAwsIamPolicyDocument( + this, + "test_4", + { + statement: [ + { + actions: [ + "mobiletargeting:UpdateEndpoint", + "mobiletargeting:PutEvents", + ], + effect: "Allow", + resources: [ + "arn:aws:mobiletargeting:*:${" + + current.accountId + + "}:apps/${" + + awsPinpointAppTest.applicationId + + "}*", + ], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentTest.overrideLogicalId("test"); + const awsIamRoleTest = new IamRole(this, "test_5", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "role", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleTest.overrideLogicalId("test"); + const awsIamRolePolicyTest = new IamRolePolicy(this, "test_6", { + name: "role_policy", + policy: Token.asString(dataAwsIamPolicyDocumentTest.json), + role: Token.asString(awsIamRoleTest.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyTest.overrideLogicalId("test"); + const awsCognitoUserPoolClientTest = new CognitoUserPoolClient( + this, + "test_7", + { + analyticsConfiguration: [ + { + applicationId: Token.asString(awsPinpointAppTest.applicationId), + externalId: "some_id", + roleArn: Token.asString(awsIamRoleTest.arn), + userDataShared: true, + }, + ], + name: "pool_client", + userPoolId: test.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserPoolClientTest.overrideLogicalId("test"); + } +} + +``` + +### Create a user pool client with Cognito as the identity provider + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolClient } from "./.gen/providers/aws/cognito-user-pool-client"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const pool = new CognitoUserPool(this, "pool", { + name: "pool", + }); + new CognitoUserPoolClient(this, "userpool_client", { + allowedOauthFlows: ["code", "implicit"], + allowedOauthFlowsUserPoolClient: true, + allowedOauthScopes: ["email", "openid"], + callbackUrls: ["https://example.com"], + name: "client", + supportedIdentityProviders: ["COGNITO"], + userPoolId: pool.id, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the application client. +* `userPoolId` - (Required) User pool the client belongs to. + +The following arguments are optional: + +* `accessTokenValidity` - (Optional) Time limit, between 5 minutes and 1 day, after which the access token is no longer valid and cannot be used. + By default, the unit is hours. + The unit can be overridden by a value in `tokenValidityUnitsAccessToken`. +* `allowedOauthFlowsUserPoolClient` - (Optional) Whether the client is allowed to follow the OAuth protocol when interacting with Cognito user pools. +* `allowedOauthFlows` - (Optional) List of allowed OAuth flows (code, implicit, client_credentials). +* `allowedOauthScopes` - (Optional) List of allowed OAuth scopes (phone, email, openid, profile, and aws.cognito.signin.user.admin). +* `analyticsConfiguration` - (Optional) Configuration block for Amazon Pinpoint analytics for collecting metrics for this user pool. [Detailed below](#analytics_configuration). +* `authSessionValidity` - (Optional) Amazon Cognito creates a session token for each API request in an authentication flow. AuthSessionValidity is the duration, in minutes, of that session token. Your user pool native user must respond to each authentication challenge before the session expires. Valid values between `3` and `15`. Default value is `3`. +* `callbackUrls` - (Optional) List of allowed callback URLs for the identity providers. +* `defaultRedirectUri` - (Optional) Default redirect URI. Must be in the list of callback URLs. +* `enableTokenRevocation` - (Optional) Enables or disables token revocation. +* `enablePropagateAdditionalUserContextData` - (Optional) Activates the propagation of additional user context data. +* `explicitAuthFlows` - (Optional) List of authentication flows (ADMIN_NO_SRP_AUTH, CUSTOM_AUTH_FLOW_ONLY, USER_PASSWORD_AUTH, ALLOW_ADMIN_USER_PASSWORD_AUTH, ALLOW_CUSTOM_AUTH, ALLOW_USER_PASSWORD_AUTH, ALLOW_USER_SRP_AUTH, ALLOW_REFRESH_TOKEN_AUTH). +* `generateSecret` - (Optional) Should an application secret be generated. +* `idTokenValidity` - (Optional) Time limit, between 5 minutes and 1 day, after which the ID token is no longer valid and cannot be used. + By default, the unit is hours. + The unit can be overridden by a value in `tokenValidityUnitsIdToken`. +* `logoutUrls` - (Optional) List of allowed logout URLs for the identity providers. +* `preventUserExistenceErrors` - (Optional) Choose which errors and responses are returned by Cognito APIs during authentication, account confirmation, and password recovery when the user does not exist in the user pool. When set to `enabled` and the user does not exist, authentication returns an error indicating either the username or password was incorrect, and account confirmation and password recovery return a response indicating a code was sent to a simulated destination. When set to `legacy`, those APIs will return a `userNotFoundException` exception if the user does not exist in the user pool. +* `readAttributes` - (Optional) List of user pool attributes the application client can read from. +* `refreshTokenValidity` - (Optional) Time limit, between 60 minutes and 10 years, after which the refresh token is no longer valid and cannot be used. + By default, the unit is days. + The unit can be overridden by a value in `tokenValidityUnitsRefreshToken`. +* `supportedIdentityProviders` - (Optional) List of provider names for the identity providers that are supported on this client. Uses the `providerName` attribute of `awsCognitoIdentityProvider` resource(s), or the equivalent string(s). +* `tokenValidityUnits` - (Optional) Configuration block for units in which the validity times are represented in. [Detailed below](#token_validity_units). +* `writeAttributes` - (Optional) List of user pool attributes the application client can write to. + +### analytics_configuration + +Either `applicationArn` or `applicationId` is required. + +* `applicationArn` - (Optional) Application ARN for an Amazon Pinpoint application. Conflicts with `externalId` and `roleArn`. +* `applicationId` - (Optional) Application ID for an Amazon Pinpoint application. +* `externalId` - (Optional) ID for the Analytics Configuration. Conflicts with `applicationArn`. +* `roleArn` - (Optional) ARN of an IAM role that authorizes Amazon Cognito to publish events to Amazon Pinpoint analytics. Conflicts with `applicationArn`. +* `userDataShared` (Optional) If set to `true`, Amazon Cognito will include user data in the events it publishes to Amazon Pinpoint analytics. + +### token_validity_units + +Valid values for the following arguments are: `seconds`, `minutes`, `hours` or `days`. + +* `accessToken` - (Optional) Time unit in for the value in `accessTokenValidity`, defaults to `hours`. +* `idToken` - (Optional) Time unit in for the value in `idTokenValidity`, defaults to `hours`. +* `refreshToken` - (Optional) Time unit in for the value in `refreshTokenValidity`, defaults to `days`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `clientSecret` - Client secret of the user pool client. +* `id` - ID of the user pool client. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Pool Clients using the `id` of the Cognito User Pool, and the `id` of the Cognito User Pool Client. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cognito User Pool Clients using the `id` of the Cognito User Pool, and the `id` of the Cognito User Pool Client. For example: + +```console +% terraform import aws_cognito_user_pool_client.client us-west-2_abc123/3ho4ek12345678909nh3fmhpko +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_user_pool_domain.html.markdown b/website/docs/cdktf/typescript/r/cognito_user_pool_domain.html.markdown new file mode 100644 index 00000000000..3d8beed3497 --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_user_pool_domain.html.markdown @@ -0,0 +1,134 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_domain" +description: |- + Provides a Cognito User Pool Domain resource. +--- + + + +# Resource: aws_cognito_user_pool_domain + +Provides a Cognito User Pool Domain resource. + +## Example Usage + +### Amazon Cognito domain + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolDomain } from "./.gen/providers/aws/cognito-user-pool-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CognitoUserPool(this, "example", { + name: "example-pool", + }); + new CognitoUserPoolDomain(this, "main", { + domain: "example-domain", + userPoolId: example.id, + }); + } +} + +``` + +### Custom Cognito domain + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolDomain } from "./.gen/providers/aws/cognito-user-pool-domain"; +import { DataAwsRoute53Zone } from "./.gen/providers/aws/data-aws-route53-zone"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CognitoUserPool(this, "example", { + name: "example-pool", + }); + const main = new CognitoUserPoolDomain(this, "main", { + certificateArn: cert.arn, + domain: "example-domain", + userPoolId: example.id, + }); + const dataAwsRoute53ZoneExample = new DataAwsRoute53Zone( + this, + "example_2", + { + name: "example.com", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRoute53ZoneExample.overrideLogicalId("example"); + new Route53Record(this, "auth-cognito-A", { + alias: { + evaluateTargetHealth: false, + name: main.cloudfrontDistribution, + zoneId: main.cloudfrontDistributionZoneId, + }, + name: main.domain, + type: "A", + zoneId: Token.asString(dataAwsRoute53ZoneExample.zoneId), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) For custom domains, this is the fully-qualified domain name, such as auth.example.com. For Amazon Cognito prefix domains, this is the prefix alone, such as auth. +* `userPoolId` - (Required) The user pool ID. +* `certificateArn` - (Optional) The ARN of an ISSUED ACM certificate in us-east-1 for a custom domain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `awsAccountId` - The AWS account ID for the user pool owner. +* `cloudfrontDistribution` - The Amazon CloudFront endpoint (e.g. `dpp0Gtxikpq3YCloudfrontNet`) that you use as the target of the alias that you set up with your Domain Name Service (DNS) provider. +* `cloudfrontDistributionArn` - The URL of the CloudFront distribution. This is required to generate the ALIAS `awsRoute53Record` +* `cloudfrontDistributionZoneId` - The Route 53 hosted zone ID of the CloudFront distribution. +* `s3Bucket` - The S3 bucket where the static files for this domain are stored. +* `version` - The app version. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Pool Domains using the `domain`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cognito User Pool Domains using the `domain`. For example: + +```console +% terraform import aws_cognito_user_pool_domain.main auth.example.org +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cognito_user_pool_ui_customization.html.markdown b/website/docs/cdktf/typescript/r/cognito_user_pool_ui_customization.html.markdown new file mode 100644 index 00000000000..53d15223aba --- /dev/null +++ b/website/docs/cdktf/typescript/r/cognito_user_pool_ui_customization.html.markdown @@ -0,0 +1,155 @@ +--- +subcategory: "Cognito IDP (Identity Provider)" +layout: "aws" +page_title: "AWS: aws_cognito_user_pool_ui_customization" +description: |- + Provides a Cognito User Pool UI Customization resource. +--- + + + +# Resource: aws_cognito_user_pool_ui_customization + +Provides a Cognito User Pool UI Customization resource. + +~> **Note:** To use this resource, the user pool must have a domain associated with it. For more information, see the Amazon Cognito Developer Guide on [Customizing the Built-in Sign-In and Sign-up Webpages](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-app-ui-customization.html). + +## Example Usage + +### UI customization settings for a single client + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolClient } from "./.gen/providers/aws/cognito-user-pool-client"; +import { CognitoUserPoolDomain } from "./.gen/providers/aws/cognito-user-pool-domain"; +import { CognitoUserPoolUiCustomization } from "./.gen/providers/aws/cognito-user-pool-ui-customization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CognitoUserPool(this, "example", { + name: "example", + }); + const awsCognitoUserPoolClientExample = new CognitoUserPoolClient( + this, + "example_1", + { + name: "example", + userPoolId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserPoolClientExample.overrideLogicalId("example"); + const awsCognitoUserPoolDomainExample = new CognitoUserPoolDomain( + this, + "example_2", + { + domain: "example", + userPoolId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserPoolDomainExample.overrideLogicalId("example"); + const awsCognitoUserPoolUiCustomizationExample = + new CognitoUserPoolUiCustomization(this, "example_3", { + clientId: Token.asString(awsCognitoUserPoolClientExample.id), + css: ".label-customizable {font-weight: 400;}", + imageFile: Token.asString(Fn.filebase64("logo.png")), + userPoolId: Token.asString(awsCognitoUserPoolDomainExample.userPoolId), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserPoolUiCustomizationExample.overrideLogicalId("example"); + } +} + +``` + +### UI customization settings for all clients + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolDomain } from "./.gen/providers/aws/cognito-user-pool-domain"; +import { CognitoUserPoolUiCustomization } from "./.gen/providers/aws/cognito-user-pool-ui-customization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CognitoUserPool(this, "example", { + name: "example", + }); + const awsCognitoUserPoolDomainExample = new CognitoUserPoolDomain( + this, + "example_1", + { + domain: "example", + userPoolId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserPoolDomainExample.overrideLogicalId("example"); + const awsCognitoUserPoolUiCustomizationExample = + new CognitoUserPoolUiCustomization(this, "example_2", { + css: ".label-customizable {font-weight: 400;}", + imageFile: Token.asString(Fn.filebase64("logo.png")), + userPoolId: Token.asString(awsCognitoUserPoolDomainExample.userPoolId), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserPoolUiCustomizationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clientId` (Optional) The client ID for the client app. Defaults to `all`. If `all` is specified, the `css` and/or `imageFile` settings will be used for every client that has no UI customization set previously. +* `css` (Optional) - The CSS values in the UI customization, provided as a String. At least one of `css` or `imageFile` is required. +* `imageFile` (Optional) - The uploaded logo image for the UI customization, provided as a base64-encoded String. Drift detection is not possible for this argument. At least one of `css` or `imageFile` is required. +* `userPoolId` (Required) - The user pool ID for the user pool. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `creationDate` - The creation date in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) for the UI customization. +* `cssVersion` - The CSS version number. +* `imageUrl` - The logo image URL for the UI customization. +* `lastModifiedDate` - The last-modified date in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) for the UI customization. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cognito User Pool UI Customizations using the `userPoolId` and `clientId` separated by `,`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cognito User Pool UI Customizations using the `userPoolId` and `clientId` separated by `,`. For example: + +```console +% terraform import aws_cognito_user_pool_ui_customization.example us-west-2_ZCTarbt5C,12bu4fuk3mlgqa2rtrujgp6egq +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/comprehend_document_classifier.html.markdown b/website/docs/cdktf/typescript/r/comprehend_document_classifier.html.markdown new file mode 100644 index 00000000000..11640d155eb --- /dev/null +++ b/website/docs/cdktf/typescript/r/comprehend_document_classifier.html.markdown @@ -0,0 +1,174 @@ +--- +subcategory: "Comprehend" +layout: "aws" +page_title: "AWS: aws_comprehend_document_classifier" +description: |- + Terraform resource for managing an AWS Comprehend Document Classifier. +--- + + + +# Resource: aws_comprehend_document_classifier + +Terraform resource for managing an AWS Comprehend Document Classifier. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ComprehendDocumentClassifier } from "./.gen/providers/aws/comprehend-document-classifier"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +interface MyConfig { + bucket: any; + key: any; + bucket1: any; + key1: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const documents = new S3Object(this, "documents", { + bucket: config.bucket, + key: config.key, + }); + new S3Object(this, "entities", { + bucket: config.bucket1, + key: config.key1, + }); + new ComprehendDocumentClassifier(this, "example", { + dataAccessRoleArn: Token.asString(awsIamRoleExample.arn), + dependsOn: [awsIamRolePolicyExample], + inputDataConfig: { + s3Uri: "s3://${" + test.bucket + "}/${" + documents.id + "}", + }, + languageCode: "en", + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `dataAccessRoleArn` - (Required) The ARN for an IAM Role which allows Comprehend to read the training and testing data. +* `inputDataConfig` - (Required) Configuration for the training and testing data. + See the [`inputDataConfig` Configuration Block](#input_data_config-configuration-block) section below. +* `languageCode` - (Required) Two-letter language code for the language. + One of `en`, `es`, `fr`, `it`, `de`, or `pt`. +* `name` - (Required) Name for the Document Classifier. + Has a maximum length of 63 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + +The following arguments are optional: + +* `mode` - (Optional, Default: `multiClass`) The document classification mode. + One of `multiClass` or `multiLabel`. + `multiClass` is also known as "Single Label" in the AWS Console. +* `modelKmsKeyId` - (Optional) KMS Key used to encrypt trained Document Classifiers. + Can be a KMS Key ID or a KMS Key ARN. +* `outputDataConfig` - (Optional) Configuration for the output results of training. + See the [`outputDataConfig` Configuration Block](#output_data_config-configuration-block) section below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` Configuration Block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `versionName` - (Optional) Name for the version of the Document Classifier. + Each version must have a unique name within the Document Classifier. + If omitted, Terraform will assign a random, unique version name. + If explicitly set to `""`, no version name will be set. + Has a maximum length of 63 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + Conflicts with `versionNamePrefix`. +* `versionNamePrefix` - (Optional) Creates a unique version name beginning with the specified prefix. + Has a maximum length of 37 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + Conflicts with `versionName`. +* `volumeKmsKeyId` - (Optional) KMS Key used to encrypt storage volumes during job processing. + Can be a KMS Key ID or a KMS Key ARN. +* `vpcConfig` - (Optional) Configuration parameters for VPC to contain Document Classifier resources. + See the [`vpcConfig` Configuration Block](#vpc_config-configuration-block) section below. + +### `inputDataConfig` Configuration Block + +* `augmentedManifests` - (Optional) List of training datasets produced by Amazon SageMaker Ground Truth. + Used if `dataFormat` is `augmentedManifest`. + See the [`augmentedManifests` Configuration Block](#augmented_manifests-configuration-block) section below. +* `dataFormat` - (Optional, Default: `comprehendCsv`) The format for the training data. + One of `comprehendCsv` or `augmentedManifest`. +* `labelDelimiter` - (Optional) Delimiter between labels when training a multi-label classifier. + Valid values are `|`, `~`, `!`, `@`, `#`, `$`, `%`, `^`, `*`, `-`, `_`, `+`, `=`, `\`, `:`, `;`, `>`, `?`, `/`, ``, and ``. + Default is `|`. +* `s3Uri` - (Optional) Location of training documents. + Used if `dataFormat` is `comprehendCsv`. +* `testS3Uri` - (Optional) Location of test documents. + +### `augmentedManifests` Configuration Block + +* `annotationDataS3Uri` - (Optional) Location of annotation files. +* `attributeNames` - (Required) The JSON attribute that contains the annotations for the training documents. +* `documentType` - (Optional, Default: `plainTextDocument`) Type of augmented manifest. + One of `plainTextDocument` or `semiStructuredDocument`. +* `s3Uri` - (Required) Location of augmented manifest file. +* `sourceDocumentsS3Uri` - (Optional) Location of source PDF files. +* `split` - (Optional, Default: `train`) Purpose of data in augmented manifest. + One of `train` or `test`. + +### `outputDataConfig` Configuration Block + +* `kmsKeyId` - (Optional) KMS Key used to encrypt the output documents. + Can be a KMS Key ID, a KMS Key ARN, a KMS Alias name, or a KMS Alias ARN. +* `outputS3Uri` - (Computed) Full path for the output documents. +* `s3Uri` - (Required) Destination path for the output documents. + The full path to the output file will be returned in `outputS3Uri`. + +### `vpcConfig` Configuration Block + +* `securityGroupIds` - (Required) List of security group IDs. +* `subnets` - (Required) List of VPC subnets. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Document Classifier version. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +`awsComprehendDocumentClassifier` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +* `create` - (Optional, Default: `60M`) +* `update` - (Optional, Default: `60M`) +* `delete` - (Optional, Default: `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Comprehend Document Classifier using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Comprehend Document Classifier using the ARN. For example: + +```console +% terraform import aws_comprehend_document_classifier.example arn:aws:comprehend:us-west-2:123456789012:document_classifier/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/comprehend_entity_recognizer.html.markdown b/website/docs/cdktf/typescript/r/comprehend_entity_recognizer.html.markdown new file mode 100644 index 00000000000..c71f5424ad6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/comprehend_entity_recognizer.html.markdown @@ -0,0 +1,205 @@ +--- +subcategory: "Comprehend" +layout: "aws" +page_title: "AWS: aws_comprehend_entity_recognizer" +description: |- + Terraform resource for managing an AWS Comprehend Entity Recognizer. +--- + + + +# Resource: aws_comprehend_entity_recognizer + +Terraform resource for managing an AWS Comprehend Entity Recognizer. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ComprehendEntityRecognizer } from "./.gen/providers/aws/comprehend-entity-recognizer"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +interface MyConfig { + bucket: any; + key: any; + bucket1: any; + key1: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const documents = new S3Object(this, "documents", { + bucket: config.bucket, + key: config.key, + }); + const entities = new S3Object(this, "entities", { + bucket: config.bucket1, + key: config.key1, + }); + new ComprehendEntityRecognizer(this, "example", { + dataAccessRoleArn: Token.asString(awsIamRoleExample.arn), + dependsOn: [awsIamRolePolicyExample], + inputDataConfig: { + documents: { + s3Uri: + "s3://${" + + awsS3BucketDocuments.bucket + + "}/${" + + documents.id + + "}", + }, + entityList: { + s3Uri: + "s3://${" + awsS3BucketEntities.bucket + "}/${" + entities.id + "}", + }, + entityTypes: [ + { + type: "ENTITY_1", + }, + { + type: "ENTITY_2", + }, + ], + }, + languageCode: "en", + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `dataAccessRoleArn` - (Required) The ARN for an IAM Role which allows Comprehend to read the training and testing data. +* `inputDataConfig` - (Required) Configuration for the training and testing data. + See the [`inputDataConfig` Configuration Block](#input_data_config-configuration-block) section below. +* `languageCode` - (Required) Two-letter language code for the language. + One of `en`, `es`, `fr`, `it`, `de`, or `pt`. +* `name` - (Required) Name for the Entity Recognizer. + Has a maximum length of 63 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + +The following arguments are optional: + +* `modelKmsKeyId` - (Optional) The ID or ARN of a KMS Key used to encrypt trained Entity Recognizers. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` Configuration Block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `versionName` - (Optional) Name for the version of the Entity Recognizer. + Each version must have a unique name within the Entity Recognizer. + If omitted, Terraform will assign a random, unique version name. + If explicitly set to `""`, no version name will be set. + Has a maximum length of 63 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + Conflicts with `versionNamePrefix`. +* `versionNamePrefix` - (Optional) Creates a unique version name beginning with the specified prefix. + Has a maximum length of 37 characters. + Can contain upper- and lower-case letters, numbers, and hypen (`-`). + Conflicts with `versionName`. +* `volumeKmsKeyId` - (Optional) ID or ARN of a KMS Key used to encrypt storage volumes during job processing. +* `vpcConfig` - (Optional) Configuration parameters for VPC to contain Entity Recognizer resources. + See the [`vpcConfig` Configuration Block](#vpc_config-configuration-block) section below. + +### `inputDataConfig` Configuration Block + +* `annotations` - (Optional) Specifies location of the document annotation data. + See the [`annotations` Configuration Block](#annotations-configuration-block) section below. + One of `annotations` or `entityList` is required. +* `augmentedManifests` - (Optional) List of training datasets produced by Amazon SageMaker Ground Truth. + Used if `dataFormat` is `augmentedManifest`. + See the [`augmentedManifests` Configuration Block](#augmented_manifests-configuration-block) section below. +* `dataFormat` - (Optional, Default: `comprehendCsv`) The format for the training data. + One of `comprehendCsv` or `augmentedManifest`. +* `documents` - (Optional) Specifies a collection of training documents. + Used if `dataFormat` is `comprehendCsv`. + See the [`documents` Configuration Block](#documents-configuration-block) section below. +* `entityList` - (Optional) Specifies location of the entity list data. + See the [`entityList` Configuration Block](#entity_list-configuration-block) section below. + One of `entityList` or `annotations` is required. +* `entityTypes` - (Required) Set of entity types to be recognized. + Has a maximum of 25 items. + See the [`entityTypes` Configuration Block](#entity_types-configuration-block) section below. + +### `annotations` Configuration Block + +* `s3Uri` - (Required) Location of training annotations. +* `testS3Uri` - (Optional) Location of test annotations. + +### `augmentedManifests` Configuration Block + +* `annotationDataS3Uri` - (Optional) Location of annotation files. +* `attributeNames` - (Required) The JSON attribute that contains the annotations for the training documents. +* `documentType` - (Optional, Default: `plainTextDocument`) Type of augmented manifest. + One of `plainTextDocument` or `semiStructuredDocument`. +* `s3Uri` - (Required) Location of augmented manifest file. +* `sourceDocumentsS3Uri` - (Optional) Location of source PDF files. +* `split` - (Optional, Default: `train`) Purpose of data in augmented manifest. + One of `train` or `test`. + +### `documents` Configuration Block + +* `inputFormat` - (Optional, Default: `oneDocPerLine`) Specifies how the input files should be processed. + One of `oneDocPerLine` or `oneDocPerFile`. +* `s3Uri` - (Required) Location of training documents. +* `testS3Uri` - (Optional) Location of test documents. + +### `entityList` Configuration Block + +* `s3Uri` - (Required) Location of entity list. + +### `entityTypes` Configuration Block + +* `type` - (Required) An entity type to be matched by the Entity Recognizer. + Cannot contain a newline (`\n`), carriage return (`\r`), or tab (`\t`). + +### `vpcConfig` Configuration Block + +* `securityGroupIds` - (Required) List of security group IDs. +* `subnets` - (Required) List of VPC subnets. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Entity Recognizer version. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +`awsComprehendEntityRecognizer` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +* `create` - (Optional, Default: `60M`) +* `update` - (Optional, Default: `60M`) +* `delete` - (Optional, Default: `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Comprehend Entity Recognizer using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Comprehend Entity Recognizer using the ARN. For example: + +```console +% terraform import aws_comprehend_entity_recognizer.example arn:aws:comprehend:us-west-2:123456789012:entity-recognizer/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_aggregate_authorization.markdown b/website/docs/cdktf/typescript/r/config_aggregate_authorization.markdown new file mode 100644 index 00000000000..9a0030a2212 --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_aggregate_authorization.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_aggregate_authorization" +description: |- + Manages an AWS Config Aggregate Authorization. +--- + + + +# Resource: aws_config_aggregate_authorization + +Manages an AWS Config Aggregate Authorization + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigAggregateAuthorization } from "./.gen/providers/aws/config-aggregate-authorization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConfigAggregateAuthorization(this, "example", { + accountId: "123456789012", + region: "eu-west-2", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Required) Account ID +* `region` - (Required) Region +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the authorization +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config aggregate authorizations using `accountId:region`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Config aggregate authorizations using `accountId:region`. For example: + +```console +% terraform import aws_config_aggregate_authorization.example 123456789012:us-east-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_config_rule.html.markdown b/website/docs/cdktf/typescript/r/config_config_rule.html.markdown new file mode 100644 index 00000000000..12328976563 --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_config_rule.html.markdown @@ -0,0 +1,261 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_config_rule" +description: |- + Provides an AWS Config Rule. +--- + + + +# Resource: aws_config_config_rule + +Provides an AWS Config Rule. + +~> **Note:** Config Rule requires an existing [Configuration Recorder](/docs/providers/aws/r/config_configuration_recorder.html) to be present. Use of `dependsOn` is recommended (as shown below) to avoid race conditions. + +## Example Usage + +### AWS Managed Rules + +AWS managed rules can be used by setting the source owner to `aws` and the source identifier to the name of the managed rule. More information about AWS managed rules can be found in the [AWS Config Developer Guide](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigConfigRule } from "./.gen/providers/aws/config-config-rule"; +import { ConfigConfigurationRecorder } from "./.gen/providers/aws/config-configuration-recorder"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["config.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const p = new DataAwsIamPolicyDocument(this, "p", { + statement: [ + { + actions: ["config:Put*"], + effect: "Allow", + resources: ["*"], + }, + ], + }); + const r = new IamRole(this, "r", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "my-awsconfig-role", + }); + const awsIamRolePolicyP = new IamRolePolicy(this, "p_3", { + name: "my-awsconfig-policy", + policy: Token.asString(p.json), + role: r.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyP.overrideLogicalId("p"); + const foo = new ConfigConfigurationRecorder(this, "foo", { + name: "example", + roleArn: r.arn, + }); + const awsConfigConfigRuleR = new ConfigConfigRule(this, "r_5", { + dependsOn: [foo], + name: "example", + source: { + owner: "AWS", + sourceIdentifier: "S3_BUCKET_VERSIONING_ENABLED", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigConfigRuleR.overrideLogicalId("r"); + } +} + +``` + +### Custom Rules + +Custom rules can be used by setting the source owner to `customLambda` and the source identifier to the Amazon Resource Name (ARN) of the Lambda Function. The AWS Config service must have permissions to invoke the Lambda Function, e.g., via the [`awsLambdaPermission` resource](/docs/providers/aws/r/lambda_permission.html). More information about custom rules can be found in the [AWS Config Developer Guide](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_develop-rules.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigConfigRule } from "./.gen/providers/aws/config-config-rule"; +import { ConfigConfigurationRecorder } from "./.gen/providers/aws/config-configuration-recorder"; +import { LambdaFunction } from "./.gen/providers/aws/lambda-function"; +import { LambdaPermission } from "./.gen/providers/aws/lambda-permission"; +interface MyConfig { + roleArn: any; + functionName: any; + role: any; + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new ConfigConfigurationRecorder(this, "example", { + roleArn: config.roleArn, + }); + const awsLambdaFunctionExample = new LambdaFunction(this, "example_1", { + functionName: config.functionName, + role: config.role, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLambdaFunctionExample.overrideLogicalId("example"); + const awsLambdaPermissionExample = new LambdaPermission(this, "example_2", { + action: "lambda:InvokeFunction", + functionName: Token.asString(awsLambdaFunctionExample.arn), + principal: "config.amazonaws.com", + statementId: "AllowExecutionFromConfig", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLambdaPermissionExample.overrideLogicalId("example"); + const awsConfigConfigRuleExample = new ConfigConfigRule(this, "example_3", { + dependsOn: [example, awsLambdaPermissionExample], + source: { + owner: "CUSTOM_LAMBDA", + sourceIdentifier: Token.asString(awsLambdaFunctionExample.arn), + }, + name: config.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigConfigRuleExample.overrideLogicalId("example"); + } +} + +``` + +### Custom Policies + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigConfigRule } from "./.gen/providers/aws/config-config-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConfigConfigRule(this, "example", { + name: "example", + source: { + customPolicyDetails: { + policyRuntime: "guard-2.x.x", + policyText: + '\t rule tableisactive when\n\t\t resourceType == "AWS::DynamoDB::Table" {\n\t\t configuration.tableStatus == [\'ACTIVE\']\n\t }\n\t \n\t rule checkcompliance when\n\t\t resourceType == "AWS::DynamoDB::Table"\n\t\t tableisactive {\n\t\t\t supplementaryConfiguration.ContinuousBackupsDescription.pointInTimeRecoveryDescription.pointInTimeRecoveryStatus == "ENABLED"\n\t }\n\n', + }, + owner: "CUSTOM_POLICY", + sourceDetail: [ + { + messageType: "ConfigurationItemChangeNotification", + }, + ], + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the rule +* `description` - (Optional) Description of the rule +* `inputParameters` - (Optional) A string in JSON format that is passed to the AWS Config rule Lambda function. +* `maximumExecutionFrequency` - (Optional) The maximum frequency with which AWS Config runs evaluations for a rule. +* `scope` - (Optional) Scope defines which resources can trigger an evaluation for the rule. See [Source](#source) Below. +* `source` - (Required) Source specifies the rule owner, the rule identifier, and the notifications that cause the function to evaluate your AWS resources. See [Scope](#scope) Below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Scope + +Defines which resources can trigger an evaluation for the rule. +If you do not specify a scope, evaluations are triggered when any resource in the recording group changes. + +* `complianceResourceId` - (Optional) The IDs of the only AWS resource that you want to trigger an evaluation for the rule. If you specify a resource ID, you must specify one resource type for `complianceResourceTypes`. +* `complianceResourceTypes` - (Optional) A list of resource types of only those AWS resources that you want to trigger an evaluation for the ruleE.g., `aws::ec2::instance`. You can only specify one type if you also specify a resource ID for `complianceResourceId`. See [relevant part of AWS Docs](http://docs.aws.amazon.com/config/latest/APIReference/API_ResourceIdentifier.html#config-Type-ResourceIdentifier-resourceType) for available types. +* `tagKey` - (Optional, Required if `tagValue` is specified) The tag key that is applied to only those AWS resources that you want you want to trigger an evaluation for the rule. +* `tagValue` - (Optional) The tag value applied to only those AWS resources that you want to trigger an evaluation for the rule. + +### Source + +Provides the rule owner (AWS or customer), the rule identifier, and the notifications that cause the function to evaluate your AWS resources. + +* `owner` - (Required) Indicates whether AWS or the customer owns and manages the AWS Config rule. Valid values are `aws`, `customLambda` or `customPolicy`. For more information about managed rules, see the [AWS Config Managed Rules documentation](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html). For more information about custom rules, see the [AWS Config Custom Rules documentation](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_develop-rules.html). Custom Lambda Functions require permissions to allow the AWS Config service to invoke them, e.g., via the [`awsLambdaPermission` resource](/docs/providers/aws/r/lambda_permission.html). +* `sourceIdentifier` - (Optional) For AWS Config managed rules, a predefined identifier, e.g `iamPasswordPolicy`. For custom Lambda rules, the identifier is the ARN of the Lambda Function, such as `arn:aws:lambda:usEast1:123456789012:function:customRuleName` or the [`arn` attribute of the `awsLambdaFunction` resource](/docs/providers/aws/r/lambda_function.html#arn). +* `sourceDetail` - (Optional) Provides the source and type of the event that causes AWS Config to evaluate your AWS resources. Only valid if `owner` is `customLambda` or `customPolicy`. See [Source Detail](#source-detail) Below. +* `customPolicyDetails` - (Optional) Provides the runtime system, policy definition, and whether debug logging is enabled. Required when owner is set to `customPolicy`. See [Custom Policy Details](#custom-policy-details) Below. + +#### Source Detail + +* `eventSource` - (Optional) The source of the event, such as an AWS service, that triggers AWS Config to evaluate your AWSresources. This defaults to `awsConfig` and is the only valid value. +* `maximumExecutionFrequency` - (Optional) The frequency that you want AWS Config to run evaluations for a rule that istriggered periodically. If specified, requires `messageType` to be `scheduledNotification`. +* `messageType` - (Optional) The type of notification that triggers AWS Config to run an evaluation for a rule. You canspecify the following notification types: + * `configurationItemChangeNotification` - Triggers an evaluation when AWS Config delivers a configuration item as a result of a resource change. + * `oversizedConfigurationItemChangeNotification` - Triggers an evaluation when AWS Config delivers an oversized configuration item. AWS Config may generate this notification type when a resource changes and the notification exceeds the maximum size allowed by Amazon SNS. + * `scheduledNotification` - Triggers a periodic evaluation at the frequency specified for `maximumExecutionFrequency`. + * `configurationSnapshotDeliveryCompleted` - Triggers a periodic evaluation when AWS Config delivers a configuration snapshot. + +#### Custom Policy Details + +* `enableDebugLogDelivery` - (Optional) The boolean expression for enabling debug logging for your Config Custom Policy rule. The default value is `false`. +* `policyRuntime` - (Required) The runtime system for your Config Custom Policy rule. Guard is a policy-as-code language that allows you to write policies that are enforced by Config Custom Policy rules. For more information about Guard, see the [Guard GitHub Repository](https://github.com/aws-cloudformation/cloudformation-guard). +* `policyText` - (Required) The policy definition containing the logic for your Config Custom Policy rule. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the config rule +* `ruleId` - The ID of the config rule +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config Rule using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Config Rule using the name. For example: + +```console +% terraform import aws_config_config_rule.foo example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_configuration_aggregator.html.markdown b/website/docs/cdktf/typescript/r/config_configuration_aggregator.html.markdown new file mode 100644 index 00000000000..69466876d0a --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_configuration_aggregator.html.markdown @@ -0,0 +1,165 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_configuration_aggregator" +description: |- + Manages an AWS Config Configuration Aggregator. +--- + + + +# Resource: aws_config_configuration_aggregator + +Manages an AWS Config Configuration Aggregator + +## Example Usage + +### Account Based Aggregation + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigConfigurationAggregator } from "./.gen/providers/aws/config-configuration-aggregator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConfigConfigurationAggregator(this, "account", { + accountAggregationSource: { + accountIds: ["123456789012"], + regions: ["us-west-2"], + }, + name: "example", + }); + } +} + +``` + +### Organization Based Aggregation + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigConfigurationAggregator } from "./.gen/providers/aws/config-configuration-aggregator"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["config.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const organization = new IamRole(this, "organization", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "example", + }); + const awsIamRolePolicyAttachmentOrganization = new IamRolePolicyAttachment( + this, + "organization_2", + { + policyArn: + "arn:aws:iam::aws:policy/service-role/AWSConfigRoleForOrganizations", + role: organization.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentOrganization.overrideLogicalId("organization"); + const awsConfigConfigurationAggregatorOrganization = + new ConfigConfigurationAggregator(this, "organization_3", { + dependsOn: [awsIamRolePolicyAttachmentOrganization], + name: "example", + organizationAggregationSource: { + allRegions: true, + roleArn: organization.arn, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigConfigurationAggregatorOrganization.overrideLogicalId( + "organization" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the configuration aggregator. +* `accountAggregationSource` - (Optional) The account(s) to aggregate config data from as documented below. +* `organizationAggregationSource` - (Optional) The organization to aggregate config data from as documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Either `accountAggregationSource` or `organizationAggregationSource` must be specified. + +### `accountAggregationSource` + +* `accountIds` - (Required) List of 12-digit account IDs of the account(s) being aggregated. +* `allRegions` - (Optional) If true, aggregate existing AWS Config regions and future regions. +* `regions` - (Optional) List of source regions being aggregated. + +Either `regions` or `allRegions` (as true) must be specified. + +### `organizationAggregationSource` + +~> **Note:** If your source type is an organization, you must be signed in to the master account and all features must be enabled in your organization. AWS Config calls EnableAwsServiceAccess API to enable integration between AWS Config and AWS Organizations. + +* `allRegions` - (Optional) If true, aggregate existing AWS Config regions and future regions. +* `regions` - (Optional) List of source regions being aggregated. +* `roleArn` - (Required) ARN of the IAM role used to retrieve AWS Organization details associated with the aggregator account. + +Either `regions` or `allRegions` (as true) must be specified. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the aggregator +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Configuration Aggregators using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Configuration Aggregators using the name. For example: + +```console +% terraform import aws_config_configuration_aggregator.example foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_configuration_recorder.html.markdown b/website/docs/cdktf/typescript/r/config_configuration_recorder.html.markdown new file mode 100644 index 00000000000..203f02d6901 --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_configuration_recorder.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_configuration_recorder" +description: |- + Provides an AWS Config Configuration Recorder. +--- + + + +# Resource: aws_config_configuration_recorder + +Provides an AWS Config Configuration Recorder. Please note that this resource **does not start** the created recorder automatically. + +~> **Note:** _Starting_ the Configuration Recorder requires a [delivery channel](/docs/providers/aws/r/config_delivery_channel.html) (while delivery channel creation requires Configuration Recorder). This is why [`awsConfigConfigurationRecorderStatus`](/docs/providers/aws/r/config_configuration_recorder_status.html) is a separate resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigConfigurationRecorder } from "./.gen/providers/aws/config-configuration-recorder"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["config.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const r = new IamRole(this, "r", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "awsconfig-example", + }); + new ConfigConfigurationRecorder(this, "foo", { + name: "example", + roleArn: r.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the recorder. Defaults to `default`. Changing it recreates the resource. +* `roleArn` - (Required) Amazon Resource Name (ARN) of the IAM role. Used to make read or write requests to the delivery channel and to describe the AWS resources associated with the account. See [AWS Docs](http://docs.aws.amazon.com/config/latest/developerguide/iamrole-permissions.html) for more details. +* `recordingGroup` - (Optional) Recording group - see below. + +### `recordingGroup` + +* `allSupported` - (Optional) Specifies whether AWS Config records configuration changes for every supported type of regional resource (which includes any new type that will become supported in the future). Conflicts with `resourceTypes`. Defaults to `true`. +* `exclusionByResourceTypes` - (Optional) An object that specifies how AWS Config excludes resource types from being recorded by the configuration recorder.To use this option, you must set the useOnly field of RecordingStrategy to `exclusionByResourceTypes` Requires `all_supported = false`. Conflicts with `resourceTypes`. +* `includeGlobalResourceTypes` - (Optional) Specifies whether AWS Config includes all supported types of _global resources_ with the resources that it records. Requires `all_supported = true`. Conflicts with `resourceTypes`. +* `recordingStrategy` - (Optional) Recording Strategy - see below.. +* `resourceTypes` - (Optional) A list that specifies the types of AWS resources for which AWS Config records configuration changes (for example, `aws::ec2::instance` or `aws::cloudTrail::trail`). See [relevant part of AWS Docs](http://docs.aws.amazon.com/config/latest/APIReference/API_ResourceIdentifier.html#config-Type-ResourceIdentifier-resourceType) for available types. In order to use this attribute, `allSupported` must be set to false. + +#### `recordingStrategy` + +* ` use_only` - (Optional) The recording strategy for the configuration recorder.See [relevant part of AWS Docs](https://docs.aws.amazon.com/config/latest/APIReference/API_RecordingStrategy.html) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the recorder + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Configuration Recorder using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Configuration Recorder using the name. For example: + +```console +% terraform import aws_config_configuration_recorder.foo example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_configuration_recorder_status.html.markdown b/website/docs/cdktf/typescript/r/config_configuration_recorder_status.html.markdown new file mode 100644 index 00000000000..46d09b24ed9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_configuration_recorder_status.html.markdown @@ -0,0 +1,139 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_configuration_recorder_status" +description: |- + Manages status of an AWS Config Configuration Recorder. +--- + + + +# Resource: aws_config_configuration_recorder_status + +Manages status (recording / stopped) of an AWS Config Configuration Recorder. + +~> **Note:** Starting Configuration Recorder requires a [Delivery Channel](/docs/providers/aws/r/config_delivery_channel.html) to be present. Use of `dependsOn` (as shown below) is recommended to avoid race conditions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigConfigurationRecorder } from "./.gen/providers/aws/config-configuration-recorder"; +import { ConfigConfigurationRecorderStatus } from "./.gen/providers/aws/config-configuration-recorder-status"; +import { ConfigDeliveryChannel } from "./.gen/providers/aws/config-delivery-channel"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const b = new S3Bucket(this, "b", { + bucket: "awsconfig-example", + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["config.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const p = new DataAwsIamPolicyDocument(this, "p", { + statement: [ + { + actions: ["s3:*"], + effect: "Allow", + resources: [b.arn, "${" + b.arn + "}/*"], + }, + ], + }); + const foo = new ConfigDeliveryChannel(this, "foo", { + name: "example", + s3BucketName: b.bucket, + }); + const r = new IamRole(this, "r", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "example-awsconfig", + }); + const awsIamRolePolicyP = new IamRolePolicy(this, "p_5", { + name: "awsconfig-example", + policy: Token.asString(p.json), + role: r.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyP.overrideLogicalId("p"); + new IamRolePolicyAttachment(this, "a", { + policyArn: "arn:aws:iam::aws:policy/service-role/AWS_ConfigRole", + role: r.name, + }); + const awsConfigConfigurationRecorderFoo = new ConfigConfigurationRecorder( + this, + "foo_7", + { + name: "example", + roleArn: r.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigConfigurationRecorderFoo.overrideLogicalId("foo"); + const awsConfigConfigurationRecorderStatusFoo = + new ConfigConfigurationRecorderStatus(this, "foo_8", { + dependsOn: [foo], + isEnabled: true, + name: Token.asString(awsConfigConfigurationRecorderFoo.name), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigConfigurationRecorderStatusFoo.overrideLogicalId("foo"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the recorder +* `isEnabled` - (Required) Whether the configuration recorder should be enabled or disabled. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Configuration Recorder Status using the name of the Configuration Recorder. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Configuration Recorder Status using the name of the Configuration Recorder. For example: + +```console +% terraform import aws_config_configuration_recorder_status.foo example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_conformance_pack.html.markdown b/website/docs/cdktf/typescript/r/config_conformance_pack.html.markdown new file mode 100644 index 00000000000..715e2e336ae --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_conformance_pack.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_conformance_pack" +description: |- + Manages a Config Conformance Pack +--- + + + +# Resource: aws_config_conformance_pack + +Manages a Config Conformance Pack. More information about this collection of Config rules and remediation actions can be found in the +[Conformance Packs](https://docs.aws.amazon.com/config/latest/developerguide/conformance-packs.html) documentation. +Sample Conformance Pack templates may be found in the +[AWS Config Rules Repository](https://github.com/awslabs/aws-config-rules/tree/master/aws-config-conformance-packs). + +~> **NOTE:** The account must have a Configuration Recorder with proper IAM permissions before the Conformance Pack will +successfully create or update. See also the +[`awsConfigConfigurationRecorder` resource](/docs/providers/aws/r/config_configuration_recorder.html). + +## Example Usage + +### Template Body + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigConformancePack } from "./.gen/providers/aws/config-conformance-pack"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConfigConformancePack(this, "example", { + dependsOn: [awsConfigConfigurationRecorderExample], + inputParameter: [ + { + parameterName: "AccessKeysRotatedParameterMaxAccessKeyAge", + parameterValue: "90", + }, + ], + name: "example", + templateBody: + "Parameters:\n AccessKeysRotatedParameterMaxAccessKeyAge:\n Type: String\nResources:\n IAMPasswordPolicy:\n Properties:\n ConfigRuleName: IAMPasswordPolicy\n Source:\n Owner: AWS\n SourceIdentifier: IAM_PASSWORD_POLICY\n Type: AWS::Config::ConfigRule\n\n", + }); + } +} + +``` + +### Template S3 URI + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigConformancePack } from "./.gen/providers/aws/config-conformance-pack"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsS3ObjectExample = new S3Object(this, "example_1", { + bucket: example.id, + content: + "Resources:\n IAMPasswordPolicy:\n Properties:\n ConfigRuleName: IAMPasswordPolicy\n Source:\n Owner: AWS\n SourceIdentifier: IAM_PASSWORD_POLICY\n Type: AWS::Config::ConfigRule\n\n", + key: "example-key", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ObjectExample.overrideLogicalId("example"); + const awsConfigConformancePackExample = new ConfigConformancePack( + this, + "example_2", + { + dependsOn: [awsConfigConfigurationRecorderExample], + name: "example", + templateS3Uri: + "s3://${" + example.bucket + "}/${" + awsS3ObjectExample.key + "}", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigConformancePackExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +~> **Note:** If both `templateBody` and `templateS3Uri` are specified, AWS Config uses the `templateS3Uri` and ignores the `templateBody`. + +This argument supports the following arguments: + +* `name` - (Required, Forces new resource) The name of the conformance pack. Must begin with a letter and contain from 1 to 256 alphanumeric characters and hyphens. +* `deliveryS3Bucket` - (Optional) Amazon S3 bucket where AWS Config stores conformance pack templates. Maximum length of 63. +* `deliveryS3KeyPrefix` - (Optional) The prefix for the Amazon S3 bucket. Maximum length of 1024. +* `inputParameter` - (Optional) Set of configuration blocks describing input parameters passed to the conformance pack template. Documented below. When configured, the parameters must also be included in the `templateBody` or in the template stored in Amazon S3 if using `templateS3Uri`. +* `templateBody` - (Optional, required if `templateS3Uri` is not provided) A string containing full conformance pack template body. Maximum length of 51200. Drift detection is not possible with this argument. +* `templateS3Uri` - (Optional, required if `templateBody` is not provided) Location of file, e.g., `s3://bucketname/prefix`, containing the template body. The uri must point to the conformance pack template that is located in an Amazon S3 bucket in the same region as the conformance pack. Maximum length of 1024. Drift detection is not possible with this argument. + +### input_parameter Argument Reference + +The `inputParameter` configuration block supports the following arguments: + +* `parameterName` - (Required) The input key. +* `parameterValue` - (Required) The input value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the conformance pack. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config Conformance Packs using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Config Conformance Packs using the `name`. For example: + +```console +% terraform import aws_config_conformance_pack.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_delivery_channel.html.markdown b/website/docs/cdktf/typescript/r/config_delivery_channel.html.markdown new file mode 100644 index 00000000000..45233a518bd --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_delivery_channel.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_delivery_channel" +description: |- + Provides an AWS Config Delivery Channel. +--- + + + +# Resource: aws_config_delivery_channel + +Provides an AWS Config Delivery Channel. + +~> **Note:** Delivery Channel requires a [Configuration Recorder](/docs/providers/aws/r/config_configuration_recorder.html) to be present. Use of `dependsOn` (as shown below) is recommended to avoid race conditions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigConfigurationRecorder } from "./.gen/providers/aws/config-configuration-recorder"; +import { ConfigDeliveryChannel } from "./.gen/providers/aws/config-delivery-channel"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const b = new S3Bucket(this, "b", { + bucket: "example-awsconfig", + forceDestroy: true, + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["config.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const p = new DataAwsIamPolicyDocument(this, "p", { + statement: [ + { + actions: ["s3:*"], + effect: "Allow", + resources: [b.arn, "${" + b.arn + "}/*"], + }, + ], + }); + const r = new IamRole(this, "r", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "awsconfig-example", + }); + const awsIamRolePolicyP = new IamRolePolicy(this, "p_4", { + name: "awsconfig-example", + policy: Token.asString(p.json), + role: r.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyP.overrideLogicalId("p"); + const foo = new ConfigConfigurationRecorder(this, "foo", { + name: "example", + roleArn: r.arn, + }); + const awsConfigDeliveryChannelFoo = new ConfigDeliveryChannel( + this, + "foo_6", + { + dependsOn: [foo], + name: "example", + s3BucketName: b.bucket, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigDeliveryChannelFoo.overrideLogicalId("foo"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the delivery channel. Defaults to `default`. Changing it recreates the resource. +* `s3BucketName` - (Required) The name of the S3 bucket used to store the configuration history. +* `s3KeyPrefix` - (Optional) The prefix for the specified S3 bucket. +* `s3KmsKeyArn` - (Optional) The ARN of the AWS KMS key used to encrypt objects delivered by AWS Config. Must belong to the same Region as the destination S3 bucket. +* `snsTopicArn` - (Optional) The ARN of the SNS topic that AWS Config delivers notifications to. +* `snapshotDeliveryProperties` - (Optional) Options for how AWS Config delivers configuration snapshots. See below + +### `snapshotDeliveryProperties` + +* `deliveryFrequency` - (Optional) - The frequency with which AWS Config recurringly delivers configuration snapshotsE.g., `oneHour` or `threeHours`. Valid values are listed [here](https://docs.aws.amazon.com/config/latest/APIReference/API_ConfigSnapshotDeliveryProperties.html#API_ConfigSnapshotDeliveryProperties_Contents). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the delivery channel. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Delivery Channel using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Delivery Channel using the name. For example: + +```console +% terraform import aws_config_delivery_channel.foo example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_organization_conformance_pack.html.markdown b/website/docs/cdktf/typescript/r/config_organization_conformance_pack.html.markdown new file mode 100644 index 00000000000..a5e35241787 --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_organization_conformance_pack.html.markdown @@ -0,0 +1,166 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_organization_conformance_pack" +description: |- + Manages a Config Organization Conformance Pack +--- + + + +# Resource: aws_config_organization_conformance_pack + +Manages a Config Organization Conformance Pack. More information can be found in the [Managing Conformance Packs Across all Accounts in Your Organization](https://docs.aws.amazon.com/config/latest/developerguide/conformance-pack-organization-apis.html) and [AWS Config Managed Rules](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html) documentation. Example conformance pack templates may be found in the [AWS Config Rules Repository](https://github.com/awslabs/aws-config-rules/tree/master/aws-config-conformance-packs). + +~> **NOTE:** This resource must be created in the Organization master account or a delegated administrator account, and the Organization must have all features enabled. Every Organization account except those configured in the `excludedAccounts` argument must have a Configuration Recorder with proper IAM permissions before the Organization Conformance Pack will successfully create or update. See also the [`awsConfigConfigurationRecorder` resource](/docs/providers/aws/r/config_configuration_recorder.html). + +## Example Usage + +### Using Template Body + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigOrganizationConformancePack } from "./.gen/providers/aws/config-organization-conformance-pack"; +import { OrganizationsOrganization } from "./.gen/providers/aws/organizations-organization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new OrganizationsOrganization(this, "example", { + awsServiceAccessPrincipals: ["config-multiaccountsetup.amazonaws.com"], + featureSet: "ALL", + }); + const awsConfigOrganizationConformancePackExample = + new ConfigOrganizationConformancePack(this, "example_1", { + dependsOn: [awsConfigConfigurationRecorderExample, example], + inputParameter: [ + { + parameterName: "AccessKeysRotatedParameterMaxAccessKeyAge", + parameterValue: "90", + }, + ], + name: "example", + templateBody: + "Parameters:\n AccessKeysRotatedParameterMaxAccessKeyAge:\n Type: String\nResources:\n IAMPasswordPolicy:\n Properties:\n ConfigRuleName: IAMPasswordPolicy\n Source:\n Owner: AWS\n SourceIdentifier: IAM_PASSWORD_POLICY\n Type: AWS::Config::ConfigRule\n\n", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigOrganizationConformancePackExample.overrideLogicalId("example"); + } +} + +``` + +### Using Template S3 URI + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigOrganizationConformancePack } from "./.gen/providers/aws/config-organization-conformance-pack"; +import { OrganizationsOrganization } from "./.gen/providers/aws/organizations-organization"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new OrganizationsOrganization(this, "example", { + awsServiceAccessPrincipals: ["config-multiaccountsetup.amazonaws.com"], + featureSet: "ALL", + }); + const awsS3BucketExample = new S3Bucket(this, "example_1", { + bucket: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketExample.overrideLogicalId("example"); + const awsS3ObjectExample = new S3Object(this, "example_2", { + bucket: Token.asString(awsS3BucketExample.id), + content: + "Resources:\n IAMPasswordPolicy:\n Properties:\n ConfigRuleName: IAMPasswordPolicy\n Source:\n Owner: AWS\n SourceIdentifier: IAM_PASSWORD_POLICY\n Type: AWS::Config::ConfigRule\n\n", + key: "example-key", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ObjectExample.overrideLogicalId("example"); + const awsConfigOrganizationConformancePackExample = + new ConfigOrganizationConformancePack(this, "example_3", { + dependsOn: [awsConfigConfigurationRecorderExample, example], + name: "example", + templateS3Uri: + "s3://${" + + awsS3BucketExample.bucket + + "}/${" + + awsS3ObjectExample.key + + "}", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigOrganizationConformancePackExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required, Forces new resource) The name of the organization conformance pack. Must begin with a letter and contain from 1 to 128 alphanumeric characters and hyphens. +* `deliveryS3Bucket` - (Optional) Amazon S3 bucket where AWS Config stores conformance pack templates. Delivery bucket must begin with `awsconfigconforms` prefix. Maximum length of 63. +* `deliveryS3KeyPrefix` - (Optional) The prefix for the Amazon S3 bucket. Maximum length of 1024. +* `excludedAccounts` - (Optional) Set of AWS accounts to be excluded from an organization conformance pack while deploying a conformance pack. Maximum of 1000 accounts. +* `inputParameter` - (Optional) Set of configuration blocks describing input parameters passed to the conformance pack template. Documented below. When configured, the parameters must also be included in the `templateBody` or in the template stored in Amazon S3 if using `templateS3Uri`. +* `templateBody` - (Optional, Conflicts with `templateS3Uri`) A string containing full conformance pack template body. Maximum length of 51200. Drift detection is not possible with this argument. +* `templateS3Uri` - (Optional, Conflicts with `templateBody`) Location of file, e.g., `s3://bucketname/prefix`, containing the template body. The uri must point to the conformance pack template that is located in an Amazon S3 bucket in the same region as the conformance pack. Maximum length of 1024. Drift detection is not possible with this argument. + +### input_parameter Argument Reference + +The `inputParameter` configuration block supports the following arguments: + +* `parameterName` - (Required) The input key. +* `parameterValue` - (Required) The input value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the organization conformance pack. +* `id` - The name of the organization conformance pack. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `10M`) +- `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config Organization Conformance Packs using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Config Organization Conformance Packs using the `name`. For example: + +```console +% terraform import aws_config_organization_conformance_pack.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_organization_custom_policy_rule.html.markdown b/website/docs/cdktf/typescript/r/config_organization_custom_policy_rule.html.markdown new file mode 100644 index 00000000000..2e8eb13b150 --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_organization_custom_policy_rule.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_organization_custom_policy_rule" +description: |- + Terraform resource for managing an AWS Config Organization Custom Policy. +--- + + + +# Resource: aws_config_organization_custom_policy_rule + +Manages a Config Organization Custom Policy Rule. More information about these rules can be found in the [Enabling AWS Config Rules Across all Accounts in Your Organization](https://docs.aws.amazon.com/config/latest/developerguide/config-rule-multi-account-deployment.html) and [AWS Config Managed Rules](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html) documentation. For working with Organization Managed Rules (those invoking an AWS managed rule), see the [`awsConfigOrganizationManagedRule` resource](/docs/providers/aws/r/config_organization_managed_rule.html). + +~> **NOTE:** This resource must be created in the Organization master account and rules will include the master account unless its ID is added to the `excludedAccounts` argument. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigOrganizationCustomPolicyRule } from "./.gen/providers/aws/config-organization-custom-policy-rule"; +interface MyConfig { + triggerTypes: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new ConfigOrganizationCustomPolicyRule(this, "example", { + name: "example_rule_name", + policyRuntime: "guard-2.x.x", + policyText: + 'let status = [\'ACTIVE\']\n\nrule tableisactive when\n resourceType == "AWS::DynamoDB::Table" {\n configuration.tableStatus == %status\n}\n\nrule checkcompliance when\n resourceType == "AWS::DynamoDB::Table"\n tableisactive {\n let pitr = supplementaryConfiguration.ContinuousBackupsDescription.pointInTimeRecoveryDescription.pointInTimeRecoveryStatus\n %pitr == "ENABLED"\n }\n\n', + resourceTypesScope: ["AWS::DynamoDB::Table"], + triggerTypes: config.triggerTypes, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) name of the rule +* `policyText` - (Required) policy definition containing the logic for your organization AWS Config Custom Policy rule +* `policyRuntime` - (Required) runtime system for your organization AWS Config Custom Policy rules +* `triggerTypes` - (Required) List of notification types that trigger AWS Config to run an evaluation for the rule. Valid values: `configurationItemChangeNotification`, `oversizedConfigurationItemChangeNotification` + +The following arguments are optional: + +* `description` - (Optional) Description of the rule +* `debugLogDeliveryAccounts` - (Optional) List of AWS account identifiers to exclude from the rule +* `excludedAccounts` - (Optional) List of AWS account identifiers to exclude from the rule +* `inputParameters` - (Optional) A string in JSON format that is passed to the AWS Config Rule Lambda Function +* `maximumExecutionFrequency` - (Optional) Maximum frequency with which AWS Config runs evaluations for a rule, if the rule is triggered at a periodic frequency. Defaults to `twentyFourHours` for periodic frequency triggered rules. Valid values: `oneHour`, `threeHours`, `sixHours`, `twelveHours`, or `twentyFourHours`. +* `resourceIdScope` - (Optional) Identifier of the AWS resource to evaluate +* `resourceTypesScope` - (Optional) List of types of AWS resources to evaluate +* `tagKeyScope` - (Optional, Required if `tagValueScope` is configured) Tag key of AWS resources to evaluate +* `tagValueScope` - (Optional) Tag value of AWS resources to evaluate + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the rule + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `20M`) +* `update` - (Default `20M`) +* `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a Config Organization Custom Policy Rule using the `name` argument. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a Config Organization Custom Policy Rule using the `name` argument. For example: + +```console +% terraform import aws_config_organization_custom_policy_rule.example example_rule_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_organization_custom_rule.html.markdown b/website/docs/cdktf/typescript/r/config_organization_custom_rule.html.markdown new file mode 100644 index 00000000000..27c82847ce4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_organization_custom_rule.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_organization_custom_rule" +description: |- + Manages a Config Organization Custom Rule +--- + + + +# Resource: aws_config_organization_custom_rule + +Manages a Config Organization Custom Rule. More information about these rules can be found in the [Enabling AWS Config Rules Across all Accounts in Your Organization](https://docs.aws.amazon.com/config/latest/developerguide/config-rule-multi-account-deployment.html) and [AWS Config Managed Rules](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html) documentation. For working with Organization Managed Rules (those invoking an AWS managed rule), see the [`awsConfigOrganizationManagedRule` resource](/docs/providers/aws/r/config_organization_managed_rule.html). + +~> **NOTE:** This resource must be created in the Organization master account and rules will include the master account unless its ID is added to the `excludedAccounts` argument. + +~> **NOTE:** The proper Lambda permission to allow the AWS Config service invoke the Lambda Function must be in place before the rule will successfully create or update. See also the [`awsLambdaPermission` resource](/docs/providers/aws/r/lambda_permission.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigOrganizationCustomRule } from "./.gen/providers/aws/config-organization-custom-rule"; +import { LambdaPermission } from "./.gen/providers/aws/lambda-permission"; +import { OrganizationsOrganization } from "./.gen/providers/aws/organizations-organization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new LambdaPermission(this, "example", { + action: "lambda:InvokeFunction", + functionName: Token.asString(awsLambdaFunctionExample.arn), + principal: "config.amazonaws.com", + statementId: "AllowExecutionFromConfig", + }); + const awsOrganizationsOrganizationExample = new OrganizationsOrganization( + this, + "example_1", + { + awsServiceAccessPrincipals: ["config-multiaccountsetup.amazonaws.com"], + featureSet: "ALL", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOrganizationsOrganizationExample.overrideLogicalId("example"); + const awsConfigOrganizationCustomRuleExample = + new ConfigOrganizationCustomRule(this, "example_2", { + dependsOn: [example, awsOrganizationsOrganizationExample], + lambdaFunctionArn: Token.asString(awsLambdaFunctionExample.arn), + name: "example", + triggerTypes: ["ConfigurationItemChangeNotification"], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigOrganizationCustomRuleExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `lambdaFunctionArn` - (Required) Amazon Resource Name (ARN) of the rule Lambda Function +* `name` - (Required) The name of the rule +* `triggerTypes` - (Required) List of notification types that trigger AWS Config to run an evaluation for the rule. Valid values: `configurationItemChangeNotification`, `oversizedConfigurationItemChangeNotification`, and `scheduledNotification` +* `description` - (Optional) Description of the rule +* `excludedAccounts` - (Optional) List of AWS account identifiers to exclude from the rule +* `inputParameters` - (Optional) A string in JSON format that is passed to the AWS Config Rule Lambda Function +* `maximumExecutionFrequency` - (Optional) The maximum frequency with which AWS Config runs evaluations for a rule, if the rule is triggered at a periodic frequency. Defaults to `twentyFourHours` for periodic frequency triggered rules. Valid values: `oneHour`, `threeHours`, `sixHours`, `twelveHours`, or `twentyFourHours`. +* `resourceIdScope` - (Optional) Identifier of the AWS resource to evaluate +* `resourceTypesScope` - (Optional) List of types of AWS resources to evaluate +* `tagKeyScope` - (Optional, Required if `tagValueScope` is configured) Tag key of AWS resources to evaluate +* `tagValueScope` - (Optional) Tag value of AWS resources to evaluate + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the rule + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `delete` - (Default `5M`) +* `update` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config Organization Custom Rules using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Config Organization Custom Rules using the name. For example: + +```console +% terraform import aws_config_organization_custom_rule.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_organization_managed_rule.html.markdown b/website/docs/cdktf/typescript/r/config_organization_managed_rule.html.markdown new file mode 100644 index 00000000000..be2ff53612b --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_organization_managed_rule.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_organization_managed_rule" +description: |- + Manages a Config Organization Managed Rule +--- + + + +# Resource: aws_config_organization_managed_rule + +Manages a Config Organization Managed Rule. More information about these rules can be found in the [Enabling AWS Config Rules Across all Accounts in Your Organization](https://docs.aws.amazon.com/config/latest/developerguide/config-rule-multi-account-deployment.html) and [AWS Config Managed Rules](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html) documentation. For working with Organization Custom Rules (those invoking a custom Lambda Function), see the [`awsConfigOrganizationCustomRule` resource](/docs/providers/aws/r/config_organization_custom_rule.html). + +~> **NOTE:** This resource must be created in the Organization master account and rules will include the master account unless its ID is added to the `excludedAccounts` argument. + +~> **NOTE:** Every Organization account except those configured in the `excludedAccounts` argument must have a Configuration Recorder with proper IAM permissions before the rule will successfully create or update. See also the [`awsConfigConfigurationRecorder` resource](/docs/providers/aws/r/config_configuration_recorder.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigOrganizationManagedRule } from "./.gen/providers/aws/config-organization-managed-rule"; +import { OrganizationsOrganization } from "./.gen/providers/aws/organizations-organization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new OrganizationsOrganization(this, "example", { + awsServiceAccessPrincipals: ["config-multiaccountsetup.amazonaws.com"], + featureSet: "ALL", + }); + const awsConfigOrganizationManagedRuleExample = + new ConfigOrganizationManagedRule(this, "example_1", { + dependsOn: [example], + name: "example", + ruleIdentifier: "IAM_PASSWORD_POLICY", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigOrganizationManagedRuleExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the rule +* `ruleIdentifier` - (Required) Identifier of an available AWS Config Managed Rule to call. For available values, see the [List of AWS Config Managed Rules](https://docs.aws.amazon.com/config/latest/developerguide/managed-rules-by-aws-config.html) documentation +* `description` - (Optional) Description of the rule +* `excludedAccounts` - (Optional) List of AWS account identifiers to exclude from the rule +* `inputParameters` - (Optional) A string in JSON format that is passed to the AWS Config Rule Lambda Function +* `maximumExecutionFrequency` - (Optional) The maximum frequency with which AWS Config runs evaluations for a rule, if the rule is triggered at a periodic frequency. Defaults to `twentyFourHours` for periodic frequency triggered rules. Valid values: `oneHour`, `threeHours`, `sixHours`, `twelveHours`, or `twentyFourHours`. +* `resourceIdScope` - (Optional) Identifier of the AWS resource to evaluate +* `resourceTypesScope` - (Optional) List of types of AWS resources to evaluate +* `tagKeyScope` - (Optional, Required if `tagValueScope` is configured) Tag key of AWS resources to evaluate +* `tagValueScope` - (Optional) Tag value of AWS resources to evaluate + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the rule + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `delete` - (Default `5M`) +* `update` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Config Organization Managed Rules using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Config Organization Managed Rules using the name. For example: + +```console +% terraform import aws_config_organization_managed_rule.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/config_remediation_configuration.html.markdown b/website/docs/cdktf/typescript/r/config_remediation_configuration.html.markdown new file mode 100644 index 00000000000..1e5f1374c7a --- /dev/null +++ b/website/docs/cdktf/typescript/r/config_remediation_configuration.html.markdown @@ -0,0 +1,145 @@ +--- +subcategory: "Config" +layout: "aws" +page_title: "AWS: aws_config_remediation_configuration" +description: |- + Provides an AWS Config Remediation Configuration. +--- + + + +# Resource: aws_config_remediation_configuration + +Provides an AWS Config Remediation Configuration. + +~> **Note:** Config Remediation Configuration requires an existing [Config Rule](/docs/providers/aws/r/config_config_rule.html) to be present. + +## Example Usage + +AWS managed rules can be used by setting the source owner to `aws` and the source identifier to the name of the managed rule. More information about AWS managed rules can be found in the [AWS Config Developer Guide](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConfigConfigRule } from "./.gen/providers/aws/config-config-rule"; +import { ConfigRemediationConfiguration } from "./.gen/providers/aws/config-remediation-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const thisVar = new ConfigConfigRule(this, "this", { + name: "example", + source: { + owner: "AWS", + sourceIdentifier: "S3_BUCKET_VERSIONING_ENABLED", + }, + }); + const awsConfigRemediationConfigurationThis = + new ConfigRemediationConfiguration(this, "this_1", { + automatic: true, + configRuleName: thisVar.name, + executionControls: { + ssmControls: { + concurrentExecutionRatePercentage: 25, + errorPercentage: 20, + }, + }, + maximumAutomaticAttempts: 10, + parameter: [ + { + name: "AutomationAssumeRole", + staticValue: "arn:aws:iam::875924563244:role/security_config", + }, + { + name: "BucketName", + resourceValue: "RESOURCE_ID", + }, + { + name: "SSEAlgorithm", + staticValue: "AES256", + }, + ], + resourceType: "AWS::S3::Bucket", + retryAttemptSeconds: 600, + targetId: "AWS-EnableS3BucketEncryption", + targetType: "SSM_DOCUMENT", + targetVersion: "1", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConfigRemediationConfigurationThis.overrideLogicalId("this"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `configRuleName` - (Required) Name of the AWS Config rule. +* `targetId` - (Required) Target ID is the name of the public document. +* `targetType` - (Required) Type of the target. Target executes remediation. For example, SSM document. + +The following arguments are optional: + +* `automatic` - (Optional) Remediation is triggered automatically if `true`. +* `executionControls` - (Optional) Configuration block for execution controls. See below. +* `maximumAutomaticAttempts` - (Optional) Maximum number of failed attempts for auto-remediation. If you do not select a number, the default is 5. +* `parameter` - (Optional) Can be specified multiple times for each parameter. Each parameter block supports arguments below. +* `resourceType` - (Optional) Type of resource. +* `retryAttemptSeconds` - (Optional) Maximum time in seconds that AWS Config runs auto-remediation. If you do not select a number, the default is 60 seconds. +* `targetVersion` - (Optional) Version of the target. For example, version of the SSM document + +### `executionControls` + +* `ssmControls` - (Required) Configuration block for SSM controls. See below. + +#### `ssmControls` + +One or both of these values are required. + +* `concurrentExecutionRatePercentage` - (Optional) Maximum percentage of remediation actions allowed to run in parallel on the non-compliant resources for that specific rule. The default value is 10%. +* `errorPercentage` - (Optional) Percentage of errors that are allowed before SSM stops running automations on non-compliant resources for that specific rule. The default is 50%. + +### `parameter` + +The value is either a dynamic (resource) value or a static value. You must select either a dynamic value or a static value. + +* `name` - (Required) Name of the attribute. +* `resourceValue` - (Optional) Value is dynamic and changes at run-time. +* `staticValue` - (Optional) Value is static and does not change at run-time. +* `staticValues` - (Optional) List of static values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Config Remediation Configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Remediation Configurations using the name config_rule_name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Remediation Configurations using the name config_rule_name. For example: + +```console +% terraform import aws_config_remediation_configuration.this example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_bot_association.markdown b/website/docs/cdktf/typescript/r/connect_bot_association.markdown new file mode 100644 index 00000000000..5f3d700e26c --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_bot_association.markdown @@ -0,0 +1,162 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_bot_association" +description: |- + Associates an Amazon Connect instance to an Amazon Lex (V1) bot +--- + + + +# Resource: aws_connect_bot_association + +Allows the specified Amazon Connect instance to access the specified Amazon Lex (V1) bot. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) and [Add an Amazon Lex bot](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-lex.html). + +~> **NOTE:** This resource only currently supports Amazon Lex (V1) Associations. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectBotAssociation } from "./.gen/providers/aws/connect-bot-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectBotAssociation(this, "example", { + instanceId: Token.asString(awsConnectInstanceExample.id), + lexBot: { + lexRegion: "us-west-2", + name: "Test", + }, + }); + } +} + +``` + +### Including a sample Lex bot + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectBotAssociation } from "./.gen/providers/aws/connect-bot-association"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { LexBot } from "./.gen/providers/aws/lex-bot"; +import { LexIntent } from "./.gen/providers/aws/lex-intent"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new LexIntent(this, "example", { + createVersion: true, + fulfillmentActivity: { + type: "ReturnIntent", + }, + name: "connect_lex_intent", + sampleUtterances: ["I would like to pick up flowers."], + }); + const current = new DataAwsRegion(this, "current", {}); + const awsLexBotExample = new LexBot(this, "example_2", { + abortStatement: { + message: [ + { + content: "Sorry, I am not able to assist at this time.", + contentType: "PlainText", + }, + ], + }, + childDirected: false, + clarificationPrompt: { + maxAttempts: 2, + message: [ + { + content: "I didn't understand you, what would you like to do?", + contentType: "PlainText", + }, + ], + }, + intent: [ + { + intentName: example.name, + intentVersion: "1", + }, + ], + name: "connect_lex_bot", + processBehavior: "BUILD", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLexBotExample.overrideLogicalId("example"); + const awsConnectBotAssociationExample = new ConnectBotAssociation( + this, + "example_3", + { + instanceId: Token.asString(awsConnectInstanceExample.id), + lexBot: { + lexRegion: Token.asString(current.name), + name: Token.asString(awsLexBotExample.name), + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsConnectBotAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instanceId` - (Required) The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. +* `lexBot` - (Required) Configuration information of an Amazon Lex (V1) bot. Detailed below. + +### lex_bot + +The `lexBot` configuration block supports the following: + +* `name` - (Required) The name of the Amazon Lex (V1) bot. +* `lexRegion` - (Optional) The Region that the Amazon Lex (V1) bot was created in. Defaults to current region. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Connect instance ID, Lex (V1) bot name, and Lex (V1) bot region separated by colons (`:`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsConnectBotAssociation` using the Amazon Connect instance ID, Lex (V1) bot name, and Lex (V1) bot region separated by colons (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsConnectBotAssociation` using the Amazon Connect instance ID, Lex (V1) bot name, and Lex (V1) bot region separated by colons (`:`). For example: + +```console +% terraform import aws_connect_bot_association.example aaaaaaaa-bbbb-cccc-dddd-111111111111:Example:us-west-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_contact_flow.html.markdown b/website/docs/cdktf/typescript/r/connect_contact_flow.html.markdown new file mode 100644 index 00000000000..9046573bf00 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_contact_flow.html.markdown @@ -0,0 +1,164 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_contact_flow" +description: |- + Provides details about a specific Amazon Connect Contact Flow. +--- + + + +# Resource: aws_connect_contact_flow + +Provides an Amazon Connect Contact Flow resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +This resource embeds or references Contact Flows specified in Amazon Connect Contact Flow Language. For more information see +[Amazon Connect Flow language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html) + +!> **WARN:** Contact Flows exported from the Console [Contact Flow import/export](https://docs.aws.amazon.com/connect/latest/adminguide/contact-flow-import-export.html) are not in the Amazon Connect Contact Flow Language and can not be used with this resource. Instead, the recommendation is to use the AWS CLI [`describeContactFlow`](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/connect/describe-contact-flow.html). +See [example](#with-external-content) below which uses `jq` to extract the `content` attribute and saves it to a local file. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectContactFlow } from "./.gen/providers/aws/connect-contact-flow"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectContactFlow(this, "test", { + content: Token.asString( + Fn.jsonencode({ + Actions: [ + { + Identifier: "12345678-1234-1234-1234-123456789012", + Parameters: { + Text: "Thanks for calling the sample flow!", + }, + Transitions: { + Conditions: [], + Errors: [], + NextAction: "abcdef-abcd-abcd-abcd-abcdefghijkl", + }, + Type: "MessageParticipant", + }, + { + Identifier: "abcdef-abcd-abcd-abcd-abcdefghijkl", + Parameters: {}, + Transitions: {}, + Type: "DisconnectParticipant", + }, + ], + StartAction: "12345678-1234-1234-1234-123456789012", + Version: "2019-10-30", + }) + ), + description: "Test Contact Flow Description", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Test", + tags: { + Application: "Terraform", + Method: "Create", + Name: "Test Contact Flow", + }, + type: "CONTACT_FLOW", + }); + } +} + +``` + +### With External Content + +Use the AWS CLI to extract Contact Flow Content: + +```console +% aws connect describe-contact-flow --instance-id 1b3c5d8-1b3c-1b3c-1b3c-1b3c5d81b3c5 --contact-flow-id c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 --region us-west-2 | jq '.ContactFlow.Content | fromjson' > contact_flow.json +``` + +Use the generated file as input: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectContactFlow } from "./.gen/providers/aws/connect-contact-flow"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectContactFlow(this, "test", { + contentHash: Token.asString(Fn.filebase64sha256("contact_flow.json")), + description: "Test Contact Flow Description", + filename: "contact_flow.json", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Test", + tags: { + Application: "Terraform", + Method: "Create", + Name: "Test Contact Flow", + }, + type: "CONTACT_FLOW", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Optional) Specifies the content of the Contact Flow, provided as a JSON string, written in Amazon Connect Contact Flow Language. If defined, the `filename` argument cannot be used. +* `contentHash` - (Optional) Used to trigger updates. Must be set to a base64-encoded SHA256 hash of the Contact Flow source specified with `filename`. The usual way to set this is filebase64sha256("mycontact_flow.json") (Terraform 0.11.12 and later) or base64sha256(file("mycontact_flow.json")) (Terraform 0.11.11 and earlier), where "mycontact_flow.json" is the local filename of the Contact Flow source. +* `description` - (Optional) Specifies the description of the Contact Flow. +* `filename` - (Optional) The path to the Contact Flow source within the local filesystem. Conflicts with `content`. +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) Specifies the name of the Contact Flow. +* `tags` - (Optional) Tags to apply to the Contact Flow. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional, Forces new resource) Specifies the type of the Contact Flow. Defaults to `contactFlow`. Allowed Values are: `contactFlow`, `customerQueue`, `customerHold`, `customerWhisper`, `agentHold`, `agentWhisper`, `outboundWhisper`, `agentTransfer`, `queueTransfer`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Contact Flow. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Contact Flow separated by a colon (`:`). +* `contactFlowId` - The identifier of the Contact Flow. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Contact Flows using the `instanceId` and `contactFlowId` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect Contact Flows using the `instanceId` and `contactFlowId` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_contact_flow.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_contact_flow_module.html.markdown b/website/docs/cdktf/typescript/r/connect_contact_flow_module.html.markdown new file mode 100644 index 00000000000..1cbf2a9ae55 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_contact_flow_module.html.markdown @@ -0,0 +1,179 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_contact_flow_module" +description: |- + Provides details about a specific Amazon Connect Contact Flow Module. +--- + + + +# Resource: aws_connect_contact_flow_module + +Provides an Amazon Connect Contact Flow Module resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +This resource embeds or references Contact Flows Modules specified in Amazon Connect Contact Flow Language. For more information see +[Amazon Connect Flow language](https://docs.aws.amazon.com/connect/latest/adminguide/flow-language.html) + +!> **WARN:** Contact Flow Modules exported from the Console [See Contact Flow import/export which is the same for Contact Flow Modules](https://docs.aws.amazon.com/connect/latest/adminguide/contact-flow-import-export.html) are not in the Amazon Connect Contact Flow Language and can not be used with this resource. Instead, the recommendation is to use the AWS CLI [`describeContactFlowModule`](https://docs.aws.amazon.com/cli/latest/reference/connect/describe-contact-flow-module.html). +See [example](#with-external-content) below which uses `jq` to extract the `content` attribute and saves it to a local file. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectContactFlowModule } from "./.gen/providers/aws/connect-contact-flow-module"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectContactFlowModule(this, "example", { + content: Token.asString( + Fn.jsonencode({ + Actions: [ + { + Identifier: "12345678-1234-1234-1234-123456789012", + Parameters: { + Text: "Hello contact flow module", + }, + Transitions: { + Conditions: [], + Errors: [], + NextAction: "abcdef-abcd-abcd-abcd-abcdefghijkl", + }, + Type: "MessageParticipant", + }, + { + Identifier: "abcdef-abcd-abcd-abcd-abcdefghijkl", + Parameters: {}, + Transitions: {}, + Type: "DisconnectParticipant", + }, + ], + Settings: { + InputParameters: [], + OutputParameters: [], + Transitions: [ + { + Description: "", + DisplayName: "Success", + ReferenceName: "Success", + }, + { + Description: "", + DisplayName: "Error", + ReferenceName: "Error", + }, + ], + }, + StartAction: "12345678-1234-1234-1234-123456789012", + Version: "2019-10-30", + }) + ), + description: "Example Contact Flow Module Description", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example", + tags: { + Application: "Terraform", + Method: "Create", + Name: "Example Contact Flow Module", + }, + }); + } +} + +``` + +### With External Content + +Use the AWS CLI to extract Contact Flow Content: + +```console +% aws connect describe-contact-flow-module --instance-id 1b3c5d8-1b3c-1b3c-1b3c-1b3c5d81b3c5 --contact-flow-module-id c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 --region us-west-2 | jq '.ContactFlowModule.Content | fromjson' > contact_flow_module.json +``` + +Use the generated file as input: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectContactFlowModule } from "./.gen/providers/aws/connect-contact-flow-module"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectContactFlowModule(this, "example", { + contentHash: Token.asString( + Fn.filebase64sha256("contact_flow_module.json") + ), + description: "Example Contact Flow Module Description", + filename: "contact_flow_module.json", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example", + tags: { + Application: "Terraform", + Method: "Create", + Name: "Example Contact Flow Module", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Optional) Specifies the content of the Contact Flow Module, provided as a JSON string, written in Amazon Connect Contact Flow Language. If defined, the `filename` argument cannot be used. +* `contentHash` - (Optional) Used to trigger updates. Must be set to a base64-encoded SHA256 hash of the Contact Flow Module source specified with `filename`. The usual way to set this is filebase64sha256("contact_flow_module.json") (Terraform 0.11.12 and later) or base64sha256(file("contact_flow_module.json")) (Terraform 0.11.11 and earlier), where "contact_flow_module.json" is the local filename of the Contact Flow Module source. +* `description` - (Optional) Specifies the description of the Contact Flow Module. +* `filename` - (Optional) The path to the Contact Flow Module source within the local filesystem. Conflicts with `content`. +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) Specifies the name of the Contact Flow Module. +* `tags` - (Optional) Tags to apply to the Contact Flow Module. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Contact Flow Module. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Contact Flow Module separated by a colon (`:`). +* `contactFlowModuleId` - The identifier of the Contact Flow Module. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Contact Flow Modules using the `instanceId` and `contactFlowModuleId` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect Contact Flow Modules using the `instanceId` and `contactFlowModuleId` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_contact_flow_module.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_hours_of_operation.html.markdown b/website/docs/cdktf/typescript/r/connect_hours_of_operation.html.markdown new file mode 100644 index 00000000000..79ca2e0961e --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_hours_of_operation.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_hours_of_operation" +description: |- + Provides details about a specific Amazon Connect Hours of Operation. +--- + + + +# Resource: aws_connect_hours_of_operation + +Provides an Amazon Connect Hours of Operation resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectHoursOfOperation } from "./.gen/providers/aws/connect-hours-of-operation"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectHoursOfOperation(this, "test", { + config: [ + { + day: "MONDAY", + endTime: { + hours: 23, + minutes: 8, + }, + startTime: { + hours: 8, + minutes: 0, + }, + }, + { + day: "TUESDAY", + endTime: { + hours: 21, + minutes: 0, + }, + startTime: { + hours: 9, + minutes: 0, + }, + }, + ], + description: "Monday office hours", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Office Hours", + tags: { + Name: "Example Hours of Operation", + }, + timeZone: "EST", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `config` - (Required) One or more config blocks which define the configuration information for the hours of operation: day, start time, and end time . Config blocks are documented below. +* `description` - (Optional) Specifies the description of the Hours of Operation. +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) Specifies the name of the Hours of Operation. +* `tags` - (Optional) Tags to apply to the Hours of Operation. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `timeZone` - (Required) Specifies the time zone of the Hours of Operation. + +A `config` block supports the following arguments: + +* `day` - (Required) Specifies the day that the hours of operation applies to. +* `endTime` - (Required) A end time block specifies the time that your contact center closes. The `endTime` is documented below. +* `startTime` - (Required) A start time block specifies the time that your contact center opens. The `startTime` is documented below. + +A `endTime` block supports the following arguments: + +* `hours` - (Required) Specifies the hour of closing. +* `minutes` - (Required) Specifies the minute of closing. + +A `startTime` block supports the following arguments: + +* `hours` - (Required) Specifies the hour of opening. +* `minutes` - (Required) Specifies the minute of opening. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Hours of Operation. +* `hoursOfOperationId` - The identifier for the hours of operation. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Hours of Operation separated by a colon (`:`). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Hours of Operations using the `instanceId` and `hoursOfOperationId` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect Hours of Operations using the `instanceId` and `hoursOfOperationId` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_hours_of_operation.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_instance.html.markdown b/website/docs/cdktf/typescript/r/connect_instance.html.markdown new file mode 100644 index 00000000000..42897279ed8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_instance.html.markdown @@ -0,0 +1,149 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_instance" +description: |- + Provides details about a specific Connect Instance. +--- + + + +# Resource: aws_connect_instance + +Provides an Amazon Connect instance resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +!> **WARN:** Amazon Connect enforces a limit of [100 combined instance creation and deletions every 30 days](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-service-limits.html#feature-limits). For example, if you create 80 instances and delete 20 of them, you must wait 30 days to create or delete another instance. Use care when creating or deleting instances. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectInstance } from "./.gen/providers/aws/connect-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectInstance(this, "test", { + identityManagementType: "CONNECT_MANAGED", + inboundCallsEnabled: true, + instanceAlias: "friendly-name-connect", + outboundCallsEnabled: true, + }); + } +} + +``` + +## Example Usage with Existing Active Directory + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectInstance } from "./.gen/providers/aws/connect-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectInstance(this, "test", { + directoryId: Token.asString(awsDirectoryServiceDirectoryTest.id), + identityManagementType: "EXISTING_DIRECTORY", + inboundCallsEnabled: true, + instanceAlias: "friendly-name-connect", + outboundCallsEnabled: true, + }); + } +} + +``` + +## Example Usage with SAML + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectInstance } from "./.gen/providers/aws/connect-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectInstance(this, "test", { + identityManagementType: "SAML", + inboundCallsEnabled: true, + instanceAlias: "friendly-name-connect", + outboundCallsEnabled: true, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `autoResolveBestVoicesEnabled` - (Optional) Specifies whether auto resolve best voices is enabled. Defaults to `true`. +* `contactFlowLogsEnabled` - (Optional) Specifies whether contact flow logs are enabled. Defaults to `false`. +* `contactLensEnabled` - (Optional) Specifies whether contact lens is enabled. Defaults to `true`. +* `directoryId` - (Optional) The identifier for the directory if identity_management_type is `existingDirectory`. +* `earlyMediaEnabled` - (Optional) Specifies whether early media for outbound calls is enabled . Defaults to `true` if outbound calls is enabled. +* `identityManagementType` - (Required) Specifies the identity management type attached to the instance. Allowed Values are: `saml`, `connectManaged`, `existingDirectory`. +* `inboundCallsEnabled` - (Required) Specifies whether inbound calls are enabled. +* `instanceAlias` - (Optional) Specifies the name of the instance. Required if `directoryId` not specified. +* `multiPartyConferenceEnabled` - (Optional) Specifies whether multi-party calls/conference is enabled. Defaults to `false`. +* `outboundCallsEnabled` - (Required) Specifies whether outbound calls are enabled. + + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the instance. +* `arn` - Amazon Resource Name (ARN) of the instance. +* `createdTime` - When the instance was created. +* `serviceRole` - The service role of the instance. +* `status` - The state of the instance. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Connect instances using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Connect instances using the `id`. For example: + +```console +% terraform import aws_connect_instance.example f1288a1f-6193-445a-b47e-af739b2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_instance_storage_config.html.markdown b/website/docs/cdktf/typescript/r/connect_instance_storage_config.html.markdown new file mode 100644 index 00000000000..bb2f66308a8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_instance_storage_config.html.markdown @@ -0,0 +1,258 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_instance_storage_config" +description: |- + Provides details about a specific Amazon Connect Instance Storage Config. +--- + + + +# Resource: aws_connect_instance_storage_config + +Provides an Amazon Connect Instance Storage Config resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +### Storage Config Kinesis Firehose Config + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectInstanceStorageConfig } from "./.gen/providers/aws/connect-instance-storage-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectInstanceStorageConfig(this, "example", { + instanceId: Token.asString(awsConnectInstanceExample.id), + resourceType: "CONTACT_TRACE_RECORDS", + storageConfig: { + kinesisFirehoseConfig: { + firehoseArn: Token.asString( + awsKinesisFirehoseDeliveryStreamExample.arn + ), + }, + storageType: "KINESIS_FIREHOSE", + }, + }); + } +} + +``` + +### Storage Config Kinesis Stream Config + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectInstanceStorageConfig } from "./.gen/providers/aws/connect-instance-storage-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectInstanceStorageConfig(this, "example", { + instanceId: Token.asString(awsConnectInstanceExample.id), + resourceType: "CONTACT_TRACE_RECORDS", + storageConfig: { + kinesisStreamConfig: { + streamArn: Token.asString(awsKinesisStreamExample.arn), + }, + storageType: "KINESIS_STREAM", + }, + }); + } +} + +``` + +### Storage Config Kinesis Video Stream Config + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectInstanceStorageConfig } from "./.gen/providers/aws/connect-instance-storage-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectInstanceStorageConfig(this, "example", { + instanceId: Token.asString(awsConnectInstanceExample.id), + resourceType: "MEDIA_STREAMS", + storageConfig: { + kinesisVideoStreamConfig: { + encryptionConfig: { + encryptionType: "KMS", + keyId: Token.asString(awsKmsKeyExample.arn), + }, + prefix: "example", + retentionPeriodHours: 3, + }, + storageType: "KINESIS_VIDEO_STREAM", + }, + }); + } +} + +``` + +### Storage Config S3 Config + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectInstanceStorageConfig } from "./.gen/providers/aws/connect-instance-storage-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectInstanceStorageConfig(this, "example", { + instanceId: Token.asString(awsConnectInstanceExample.id), + resourceType: "CHAT_TRANSCRIPTS", + storageConfig: { + s3Config: { + bucketName: Token.asString(awsS3BucketExample.id), + bucketPrefix: "example", + }, + storageType: "S3", + }, + }); + } +} + +``` + +### Storage Config S3 Config with Encryption Config + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectInstanceStorageConfig } from "./.gen/providers/aws/connect-instance-storage-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectInstanceStorageConfig(this, "example", { + instanceId: Token.asString(awsConnectInstanceExample.id), + resourceType: "CHAT_TRANSCRIPTS", + storageConfig: { + s3Config: { + bucketName: Token.asString(awsS3BucketExample.id), + bucketPrefix: "example", + encryptionConfig: { + encryptionType: "KMS", + keyId: Token.asString(awsKmsKeyExample.arn), + }, + }, + storageType: "S3", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `resourceType` - (Required) A valid resource type. Valid Values: `agentEvents` | `attachments` | `callRecordings` | `chatTranscripts` | `contactEvaluations` | `contactTraceRecords` | `mediaStreams` | `realTimeContactAnalysisSegments` | `scheduledReports`. +* `storageConfig` - (Required) Specifies the storage configuration options for the Connect Instance. [Documented below](#storage_config). + +### `storageConfig` + +The `storageConfig` configuration block supports the following arguments: + +* `kinesisFirehoseConfig` - (Required if `type` is set to `kinesisFirehose`) A block that specifies the configuration of the Kinesis Firehose delivery stream. [Documented below](#kinesis_firehose_config). +* `kinesisStreamConfig` - (Required if `type` is set to `kinesisStream`) A block that specifies the configuration of the Kinesis data stream. [Documented below](#kinesis_stream_config). +* `kinesisVideoStreamConfig` - (Required if `type` is set to `kinesisVideoStream`) A block that specifies the configuration of the Kinesis video stream. [Documented below](#kinesis_video_stream_config). +* `s3Config` - (Required if `type` is set to `s3`) A block that specifies the configuration of S3 Bucket. [Documented below](#s3_config). +* `storageType` - (Required) A valid storage type. Valid Values: `s3` | `kinesisVideoStream` | `kinesisStream` | `kinesisFirehose`. + +#### `kinesisFirehoseConfig` + +The `kinesisFirehoseConfig` configuration block supports the following arguments: + +* `firehoseArn` - (Required) The Amazon Resource Name (ARN) of the delivery stream. + +#### `kinesisStreamConfig` + +The `kinesisStreamConfig` configuration block supports the following arguments: + +* `streamArn` - (Required) The Amazon Resource Name (ARN) of the data stream. + +#### `kinesisVideoStreamConfig` + +The `kinesisVideoStreamConfig` configuration block supports the following arguments: + +* `encryptionConfig` - (Required) The encryption configuration. [Documented below](#encryption_config). +* `prefix` - (Required) The prefix of the video stream. Minimum length of `1`. Maximum length of `128`. When read from the state, the value returned is `Connect-Contact` since the API appends additional details to the `prefix`. +* `retentionPeriodHours` - (Required) The number of hours data is retained in the stream. Kinesis Video Streams retains the data in a data store that is associated with the stream. Minimum value of `0`. Maximum value of `87600`. A value of `0`, indicates that the stream does not persist data. + +#### `s3Config` + +The `s3Config` configuration block supports the following arguments: + +* `bucketName` - (Required) The S3 bucket name. +* `bucketPrefix` - (Required) The S3 bucket prefix. +* `encryptionConfig` - (Optional) The encryption configuration. [Documented below](#encryption_config). + +#### `encryptionConfig` + +The `encryptionConfig` configuration block supports the following arguments: + +* `encryptionType` - (Required) The type of encryption. Valid Values: `kms`. +* `keyId` - (Required) The full ARN of the encryption key. Be sure to provide the full ARN of the encryption key, not just the ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `associationId` - The existing association identifier that uniquely identifies the resource type and storage config for the given instance ID. +* `id` - The identifier of the hosting Amazon Connect Instance, `associationId`, and `resourceType` separated by a colon (`:`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Instance Storage Configs using the `instanceId`, `associationId`, and `resourceType` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect Instance Storage Configs using the `instanceId`, `associationId`, and `resourceType` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_instance_storage_config.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5:CHAT_TRANSCRIPTS +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_lambda_function_association.markdown b/website/docs/cdktf/typescript/r/connect_lambda_function_association.markdown new file mode 100644 index 00000000000..679f72e5b28 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_lambda_function_association.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_lambda_function_association" +description: |- + Provides details about a specific Connect Lambda Function Association. +--- + + + +# Resource: aws_connect_lambda_function_association + +Provides an Amazon Connect Lambda Function Association. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) and [Invoke AWS Lambda functions](https://docs.aws.amazon.com/connect/latest/adminguide/connect-lambda-functions.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectLambdaFunctionAssociation } from "./.gen/providers/aws/connect-lambda-function-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectLambdaFunctionAssociation(this, "example", { + functionArn: Token.asString(awsLambdaFunctionExample.arn), + instanceId: Token.asString(awsConnectInstanceExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `functionArn` - (Required) Amazon Resource Name (ARN) of the Lambda Function, omitting any version or alias qualifier. +* `instanceId` - (Required) The identifier of the Amazon Connect instance. You can find the instanceId in the ARN of the instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Connect instance ID and Lambda Function ARN separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsConnectLambdaFunctionAssociation` using the `instanceId` and `functionArn` separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsConnectLambdaFunctionAssociation` using the `instanceId` and `functionArn` separated by a comma (`,`). For example: + +```console +% terraform import aws_connect_lambda_function_association.example aaaaaaaa-bbbb-cccc-dddd-111111111111,arn:aws:lambda:us-west-2:123456789123:function:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_phone_number.html.markdown b/website/docs/cdktf/typescript/r/connect_phone_number.html.markdown new file mode 100644 index 00000000000..dd2e17056e2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_phone_number.html.markdown @@ -0,0 +1,153 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_phone_number" +description: |- + Provides details about a specific Amazon Connect Phone Number. +--- + + + +# Resource: aws_connect_phone_number + +Provides an Amazon Connect Phone Number resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectPhoneNumber } from "./.gen/providers/aws/connect-phone-number"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectPhoneNumber(this, "example", { + countryCode: "US", + tags: { + hello: "world", + }, + targetArn: Token.asString(awsConnectInstanceExample.arn), + type: "DID", + }); + } +} + +``` + +### Description + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectPhoneNumber } from "./.gen/providers/aws/connect-phone-number"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectPhoneNumber(this, "example", { + countryCode: "US", + description: "example description", + targetArn: Token.asString(awsConnectInstanceExample.arn), + type: "DID", + }); + } +} + +``` + +### Prefix to filter phone numbers + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectPhoneNumber } from "./.gen/providers/aws/connect-phone-number"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectPhoneNumber(this, "example", { + countryCode: "US", + prefix: "+18005", + targetArn: Token.asString(awsConnectInstanceExample.arn), + type: "DID", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `countryCode` - (Required, Forces new resource) The ISO country code. For a list of Valid values, refer to [PhoneNumberCountryCode](https://docs.aws.amazon.com/connect/latest/APIReference/API_SearchAvailablePhoneNumbers.html#connect-SearchAvailablePhoneNumbers-request-PhoneNumberCountryCode). +* `description` - (Optional, Forces new resource) The description of the phone number. +* `prefix` - (Optional, Forces new resource) The prefix of the phone number that is used to filter available phone numbers. If provided, it must contain `+` as part of the country code. Do not specify this argument when importing the resource. +* `tags` - (Optional) Tags to apply to the Phone Number. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `targetArn` - (Required) The Amazon Resource Name (ARN) for Amazon Connect instances that phone numbers are claimed to. +* `type` - (Required, Forces new resource) The type of phone number. Valid Values: `tollFree` | `did`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the phone number. +* `phoneNumber` - The phone number. Phone numbers are formatted `[+] [country code] [subscriber number including area code]`. +* `id` - The identifier of the phone number. +* `status` - A block that specifies status of the phone number. [Documented below](#status). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### `status` + +The `status` configuration block supports the following attributes: + +* `message` - The status message. +* `status` - The status of the phone number. Valid Values: `claimed` | `inProgress` | `failed`. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `2M`) +* `update` - (Default `2M`) +* `delete` - (Default `2M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Phone Numbers using its `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect Phone Numbers using its `id`. For example: + +```console +% terraform import aws_connect_phone_number.example 12345678-abcd-1234-efgh-9876543210ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_queue.html.markdown b/website/docs/cdktf/typescript/r/connect_queue.html.markdown new file mode 100644 index 00000000000..849243b9266 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_queue.html.markdown @@ -0,0 +1,159 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_queue" +description: |- + Provides details about a specific Amazon Connect Queue +--- + + + +# Resource: aws_connect_queue + +Provides an Amazon Connect Queue resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectQueue } from "./.gen/providers/aws/connect-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectQueue(this, "test", { + description: "Example Description", + hoursOfOperationId: "12345678-1234-1234-1234-123456789012", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example Name", + tags: { + Name: "Example Queue", + }, + }); + } +} + +``` + +### With Quick Connect IDs + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectQueue } from "./.gen/providers/aws/connect-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectQueue(this, "test", { + description: "Example Description", + hoursOfOperationId: "12345678-1234-1234-1234-123456789012", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example Name", + quickConnectIds: ["12345678-abcd-1234-abcd-123456789012"], + tags: { + Name: "Example Queue with Quick Connect IDs", + }, + }); + } +} + +``` + +### With Outbound Caller Config + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectQueue } from "./.gen/providers/aws/connect-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectQueue(this, "test", { + description: "Example Description", + hoursOfOperationId: "12345678-1234-1234-1234-123456789012", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example Name", + outboundCallerConfig: { + outboundCallerIdName: "example", + outboundCallerIdNumberId: "12345678-abcd-1234-abcd-123456789012", + outboundFlowId: "87654321-defg-1234-defg-987654321234", + }, + tags: { + Name: "Example Queue with Outbound Caller Config", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Specifies the description of the Queue. +* `hoursOfOperationId` - (Required) Specifies the identifier of the Hours of Operation. +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `maxContacts` - (Optional) Specifies the maximum number of contacts that can be in the queue before it is considered full. Minimum value of 0. +* `name` - (Required) Specifies the name of the Queue. +* `outboundCallerConfig` - (Required) A block that defines the outbound caller ID name, number, and outbound whisper flow. The Outbound Caller Config block is documented below. +* `quickConnectIds` - (Optional) Specifies a list of quick connects ids that determine the quick connects available to agents who are working the queue. +* `status` - (Optional) Specifies the description of the Queue. Valid values are `enabled`, `disabled`. +* `tags` - (Optional) Tags to apply to the Queue. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `outboundCallerConfig` block supports the following arguments: + +* `outboundCallerIdName` - (Optional) Specifies the caller ID name. +* `outboundCallerIdNumberId` - (Optional) Specifies the caller ID number. +* `outboundFlowId` - (Optional) Specifies outbound whisper flow to be used during an outbound call. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Queue. +* `queueId` - The identifier for the Queue. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Queue separated by a colon (`:`). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Queues using the `instanceId` and `queueId` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect Queues using the `instanceId` and `queueId` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_queue.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_quick_connect.html.markdown b/website/docs/cdktf/typescript/r/connect_quick_connect.html.markdown new file mode 100644 index 00000000000..ddaeadbe7ba --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_quick_connect.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_quick_connect" +description: |- + Provides details about a specific Amazon Quick Connect +--- + + + +# Resource: aws_connect_quick_connect + +Provides an Amazon Connect Quick Connect resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectQuickConnect } from "./.gen/providers/aws/connect-quick-connect"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectQuickConnect(this, "test", { + description: "quick connect phone number", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "Example Name", + quickConnectConfig: { + phoneConfig: [ + { + phoneNumber: "+12345678912", + }, + ], + quickConnectType: "PHONE_NUMBER", + }, + tags: { + Name: "Example Quick Connect", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Specifies the description of the Quick Connect. +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) Specifies the name of the Quick Connect. +* `quickConnectConfig` - (Required) A block that defines the configuration information for the Quick Connect: `quickConnectType` and one of `phoneConfig`, `queueConfig`, `userConfig` . The Quick Connect Config block is documented below. +* `tags` - (Optional) Tags to apply to the Quick Connect. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `quickConnectConfig` block supports the following arguments: + +* `quickConnectType` - (Required) Specifies the configuration type of the quick connect. valid values are `phoneNumber`, `queue`, `user`. +* `phoneConfig` - (Optional) Specifies the phone configuration of the Quick Connect. This is required only if `quickConnectType` is `phoneNumber`. The `phoneConfig` block is documented below. +* `queueConfig` - (Optional) Specifies the queue configuration of the Quick Connect. This is required only if `quickConnectType` is `queue`. The `queueConfig` block is documented below. +* `userConfig` - (Optional) Specifies the user configuration of the Quick Connect. This is required only if `quickConnectType` is `user`. The `userConfig` block is documented below. + +A `phoneConfig` block supports the following arguments: + +* `phoneNumber` - (Required) Specifies the phone number in in E.164 format. + +A `queueConfig` block supports the following arguments: + +* `contactFlowId` - (Required) Specifies the identifier of the contact flow. +* `queueId` - (Required) Specifies the identifier for the queue. + +A `userConfig` block supports the following arguments: + +* `contactFlowId` - (Required) Specifies the identifier of the contact flow. +* `userId` - (Required) Specifies the identifier for the user. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Quick Connect. +* `quickConnectId` - The identifier for the Quick Connect. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Quick Connect separated by a colon (`:`). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Quick Connects using the `instanceId` and `quickConnectId` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect Quick Connects using the `instanceId` and `quickConnectId` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_quick_connect.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_routing_profile.html.markdown b/website/docs/cdktf/typescript/r/connect_routing_profile.html.markdown new file mode 100644 index 00000000000..642573256e4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_routing_profile.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_routing_profile" +description: |- + Provides details about a specific Amazon Connect Routing Profile. +--- + + + +# Resource: aws_connect_routing_profile + +Provides an Amazon Connect Routing Profile resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectRoutingProfile } from "./.gen/providers/aws/connect-routing-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectRoutingProfile(this, "example", { + defaultOutboundQueueId: "12345678-1234-1234-1234-123456789012", + description: "example description", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + mediaConcurrencies: [ + { + channel: "VOICE", + concurrency: 1, + }, + ], + name: "example", + queueConfigs: [ + { + channel: "VOICE", + delay: 2, + priority: 1, + queueId: "12345678-1234-1234-1234-123456789012", + }, + ], + tags: { + Name: "Example Routing Profile", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `defaultOutboundQueueId` - (Required) Specifies the default outbound queue for the Routing Profile. +* `description` - (Required) Specifies the description of the Routing Profile. +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `mediaConcurrencies` - (Required) One or more `mediaConcurrencies` blocks that specify the channels that agents can handle in the Contact Control Panel (CCP) for this Routing Profile. The `mediaConcurrencies` block is documented below. +* `name` - (Required) Specifies the name of the Routing Profile. +* `queueConfigs` - (Optional) One or more `queueConfigs` blocks that specify the inbound queues associated with the routing profile. If no queue is added, the agent only can make outbound calls. The `queueConfigs` block is documented below. +* `tags` - (Optional) Tags to apply to the Routing Profile. If configured with a provider +[`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `mediaConcurrencies` block supports the following arguments: + +* `channel` - (Required) Specifies the channels that agents can handle in the Contact Control Panel (CCP). Valid values are `voice`, `chat`, `task`. +* `concurrency` - (Required) Specifies the number of contacts an agent can have on a channel simultaneously. Valid Range for `voice`: Minimum value of 1. Maximum value of 1. Valid Range for `chat`: Minimum value of 1. Maximum value of 10. Valid Range for `task`: Minimum value of 1. Maximum value of 10. + +A `queueConfigs` block supports the following arguments: + +* `channel` - (Required) Specifies the channels agents can handle in the Contact Control Panel (CCP) for this routing profile. Valid values are `voice`, `chat`, `task`. +* `delay` - (Required) Specifies the delay, in seconds, that a contact should be in the queue before they are routed to an available agent +* `priority` - (Required) Specifies the order in which contacts are to be handled for the queue. +* `queueId` - (Required) Specifies the identifier for the queue. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Routing Profile. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Routing Profile separated by a colon (`:`). +* `queueConfigs` - In addition to the arguments used in the `queueConfigs` argument block, there are additional attributes exported within the `queueConfigs` block. These additional attributes are documented below. +* `routingProfileId` - The identifier for the Routing Profile. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +A `queueConfigs` block supports the following attributes in addition to the arguments defined earlier: + +* `queueArn` - ARN for the queue. +* `queueName` - Name for the queue. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Routing Profiles using the `instanceId` and `routingProfileId` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect Routing Profiles using the `instanceId` and `routingProfileId` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_routing_profile.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_security_profile.html.markdown b/website/docs/cdktf/typescript/r/connect_security_profile.html.markdown new file mode 100644 index 00000000000..c13c1ad1cd2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_security_profile.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_security_profile" +description: |- + Provides details about a specific Amazon Connect Security Profile. +--- + + + +# Resource: aws_connect_security_profile + +Provides an Amazon Connect Security Profile resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectSecurityProfile } from "./.gen/providers/aws/connect-security-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectSecurityProfile(this, "example", { + description: "example description", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "example", + permissions: ["BasicAgentAccess", "OutboundCallAccess"], + tags: { + Name: "Example Security Profile", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Specifies the description of the Security Profile. +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) Specifies the name of the Security Profile. +* `permissions` - (Optional) Specifies a list of permissions assigned to the security profile. +* `tags` - (Optional) Tags to apply to the Security Profile. If configured with a provider +[`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Security Profile. +* `organizationResourceId` - The organization resource identifier for the security profile. +* `securityProfileId` - The identifier for the Security Profile. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the Security Profile separated by a colon (`:`). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Security Profiles using the `instanceId` and `securityProfileId` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect Security Profiles using the `instanceId` and `securityProfileId` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_security_profile.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_user.html.markdown b/website/docs/cdktf/typescript/r/connect_user.html.markdown new file mode 100644 index 00000000000..4e70b598f33 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_user.html.markdown @@ -0,0 +1,268 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user" +description: |- + Provides details about a specific Amazon Connect User +--- + + + +# Resource: aws_connect_user + +Provides an Amazon Connect User resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectUser } from "./.gen/providers/aws/connect-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectUser(this, "example", { + identityInfo: { + firstName: "example", + lastName: "example2", + }, + instanceId: Token.asString(awsConnectInstanceExample.id), + name: "example", + password: "Password123", + phoneConfig: { + afterContactWorkTimeLimit: 0, + phoneType: "SOFT_PHONE", + }, + routingProfileId: Token.asString( + awsConnectRoutingProfileExample.routingProfileId + ), + securityProfileIds: [ + Token.asString(awsConnectSecurityProfileExample.securityProfileId), + ], + }); + } +} + +``` + +### With hierarchy_group_id + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectUser } from "./.gen/providers/aws/connect-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectUser(this, "example", { + hierarchyGroupId: Token.asString( + awsConnectUserHierarchyGroupExample.hierarchyGroupId + ), + identityInfo: { + firstName: "example", + lastName: "example2", + }, + instanceId: Token.asString(awsConnectInstanceExample.id), + name: "example", + password: "Password123", + phoneConfig: { + afterContactWorkTimeLimit: 0, + phoneType: "SOFT_PHONE", + }, + routingProfileId: Token.asString( + awsConnectRoutingProfileExample.routingProfileId + ), + securityProfileIds: [ + Token.asString(awsConnectSecurityProfileExample.securityProfileId), + ], + }); + } +} + +``` + +### With identity_info filled + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectUser } from "./.gen/providers/aws/connect-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectUser(this, "example", { + identityInfo: { + email: "example@example.com", + firstName: "example", + lastName: "example2", + }, + instanceId: Token.asString(awsConnectInstanceExample.id), + name: "example", + password: "Password123", + phoneConfig: { + afterContactWorkTimeLimit: 0, + phoneType: "SOFT_PHONE", + }, + routingProfileId: Token.asString( + awsConnectRoutingProfileExample.routingProfileId + ), + securityProfileIds: [ + Token.asString(awsConnectSecurityProfileExample.securityProfileId), + ], + }); + } +} + +``` + +### With phone_config phone type as desk phone + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectUser } from "./.gen/providers/aws/connect-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectUser(this, "example", { + instanceId: Token.asString(awsConnectInstanceExample.id), + name: "example", + password: "Password123", + phoneConfig: { + afterContactWorkTimeLimit: 0, + phoneType: "SOFT_PHONE", + }, + routingProfileId: Token.asString( + awsConnectRoutingProfileExample.routingProfileId + ), + securityProfileIds: [ + Token.asString(awsConnectSecurityProfileExample.securityProfileId), + ], + }); + } +} + +``` + +### With multiple Security profile ids specified in security_profile_ids + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectUser } from "./.gen/providers/aws/connect-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectUser(this, "example", { + instanceId: Token.asString(awsConnectInstanceExample.id), + name: "example", + password: "Password123", + phoneConfig: { + afterContactWorkTimeLimit: 0, + autoAccept: false, + deskPhoneNumber: "+112345678912", + phoneType: "DESK_PHONE", + }, + routingProfileId: Token.asString( + awsConnectRoutingProfileExample.routingProfileId + ), + securityProfileIds: [ + Token.asString(awsConnectSecurityProfileExample.securityProfileId), + example2.securityProfileId, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `directoryUserId` - (Optional) The identifier of the user account in the directory used for identity management. If Amazon Connect cannot access the directory, you can specify this identifier to authenticate users. If you include the identifier, we assume that Amazon Connect cannot access the directory. Otherwise, the identity information is used to authenticate users from your directory. This parameter is required if you are using an existing directory for identity management in Amazon Connect when Amazon Connect cannot access your directory to authenticate users. If you are using SAML for identity management and include this parameter, an error is returned. +* `hierarchyGroupId` - (Optional) The identifier of the hierarchy group for the user. +* `identityInfo` - (Optional) A block that contains information about the identity of the user. Documented below. +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) The user name for the account. For instances not using SAML for identity management, the user name can include up to 20 characters. If you are using SAML for identity management, the user name can include up to 64 characters from `[aZAZ09_-.\@]+`. +* `password` - (Optional) The password for the user account. A password is required if you are using Amazon Connect for identity management. Otherwise, it is an error to include a password. +* `phoneConfig` - (Required) A block that contains information about the phone settings for the user. Documented below. +* `routingProfileId` - (Required) The identifier of the routing profile for the user. +* `securityProfileIds` - (Required) A list of identifiers for the security profiles for the user. Specify a minimum of 1 and maximum of 10 security profile ids. For more information, see [Best Practices for Security Profiles](https://docs.aws.amazon.com/connect/latest/adminguide/security-profile-best-practices.html) in the Amazon Connect Administrator Guide. +* `tags` - (Optional) Tags to apply to the user. If configured with a provider +[`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +A `identityInfo` block supports the following arguments: + +* `email` - (Optional) The email address. If you are using SAML for identity management and include this parameter, an error is returned. Note that updates to the `email` is supported. From the [UpdateUserIdentityInfo API documentation](https://docs.aws.amazon.com/connect/latest/APIReference/API_UpdateUserIdentityInfo.html) it is strongly recommended to limit who has the ability to invoke `updateUserIdentityInfo`. Someone with that ability can change the login credentials of other users by changing their email address. This poses a security risk to your organization. They can change the email address of a user to the attacker's email address, and then reset the password through email. For more information, see [Best Practices for Security Profiles](https://docs.aws.amazon.com/connect/latest/adminguide/security-profile-best-practices.html) in the Amazon Connect Administrator Guide. +* `firstName` - (Optional) The first name. This is required if you are using Amazon Connect or SAML for identity management. Minimum length of 1. Maximum length of 100. +* `lastName` - (Optional) The last name. This is required if you are using Amazon Connect or SAML for identity management. Minimum length of 1. Maximum length of 100. + +A `phoneConfig` block supports the following arguments: + +* `afterContactWorkTimeLimit` - (Optional) The After Call Work (ACW) timeout setting, in seconds. Minimum value of 0. +* `autoAccept` - (Optional) When Auto-Accept Call is enabled for an available agent, the agent connects to contacts automatically. +* `deskPhoneNumber` - (Optional) The phone number for the user's desk phone. Required if `phoneType` is set as `deskPhone`. +* `phoneType` - (Required) The phone type. Valid values are `deskPhone` and `softPhone`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the user. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the user +separated by a colon (`:`). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `userId` - The identifier for the user. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Users using the `instanceId` and `userId` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect Users using the `instanceId` and `userId` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_user.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_user_hierarchy_group.html.markdown b/website/docs/cdktf/typescript/r/connect_user_hierarchy_group.html.markdown new file mode 100644 index 00000000000..043bf6a4fa9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_user_hierarchy_group.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user_hierarchy_group" +description: |- + Provides details about a specific Amazon Connect User Hierarchy Group +--- + + + +# Resource: aws_connect_user_hierarchy_group + +Provides an Amazon Connect User Hierarchy Group resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +~> **NOTE:** The User Hierarchy Structure must be created before creating a User Hierarchy Group. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectUserHierarchyGroup } from "./.gen/providers/aws/connect-user-hierarchy-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectUserHierarchyGroup(this, "example", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "example", + tags: { + Name: "Example User Hierarchy Group", + }, + }); + } +} + +``` + +### With a parent group + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectUserHierarchyGroup } from "./.gen/providers/aws/connect-user-hierarchy-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const parent = new ConnectUserHierarchyGroup(this, "parent", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "parent", + tags: { + Name: "Example User Hierarchy Group Parent", + }, + }); + new ConnectUserHierarchyGroup(this, "child", { + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + name: "child", + parentGroupId: parent.hierarchyGroupId, + tags: { + Name: "Example User Hierarchy Group Child", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `name` - (Required) The name of the user hierarchy group. Must not be more than 100 characters. +* `parentGroupId` - (Optional) The identifier for the parent hierarchy group. The user hierarchy is created at level one if the parent group ID is null. +* `tags` - (Optional) Tags to apply to the hierarchy group. If configured with a provider +[`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the hierarchy group. +* `hierarchyGroupId` - The identifier for the hierarchy group. +* `hierarchyPath` - A block that contains information about the levels in the hierarchy group. The `hierarchyPath` block is documented below. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the hierarchy group +separated by a colon (`:`). +* `levelId` - The identifier of the level in the hierarchy group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +A `hierarchyPath` block supports the following attributes: + +* `levelOne` - A block that defines the details of level one. The level block is documented below. +* `levelTwo` - A block that defines the details of level two. The level block is documented below. +* `levelThree` - A block that defines the details of level three. The level block is documented below. +* `levelFour` - A block that defines the details of level four. The level block is documented below. +* `levelFive` - A block that defines the details of level five. The level block is documented below. + +A level block supports the following attributes: + +* `arn` - The Amazon Resource Name (ARN) of the hierarchy group. +* `id` - The identifier of the hierarchy group. +* `name` - The name of the hierarchy group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect User Hierarchy Groups using the `instanceId` and `hierarchyGroupId` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect User Hierarchy Groups using the `instanceId` and `hierarchyGroupId` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_user_hierarchy_group.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_user_hierarchy_structure.html.markdown b/website/docs/cdktf/typescript/r/connect_user_hierarchy_structure.html.markdown new file mode 100644 index 00000000000..9d98adfb93b --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_user_hierarchy_structure.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_user_hierarchy_structure" +description: |- + Provides details about a specific Amazon Connect User Hierarchy Structure +--- + + + +# Resource: aws_connect_user_hierarchy_structure + +Provides an Amazon Connect User Hierarchy Structure resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectUserHierarchyStructure } from "./.gen/providers/aws/connect-user-hierarchy-structure"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectUserHierarchyStructure(this, "example", { + hierarchyStructure: { + levelOne: { + name: "levelone", + }, + }, + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +### With Five Levels + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectUserHierarchyStructure } from "./.gen/providers/aws/connect-user-hierarchy-structure"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectUserHierarchyStructure(this, "example", { + hierarchyStructure: { + levelFive: { + name: "levelfive", + }, + levelFour: { + name: "levelfour", + }, + levelOne: { + name: "levelone", + }, + levelThree: { + name: "levelthree", + }, + levelTwo: { + name: "leveltwo", + }, + }, + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `hierarchyStructure` - (Required) A block that defines the hierarchy structure's levels. The `hierarchyStructure` block is documented below. +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. + +A `hierarchyStructure` block supports the following arguments: + +* `levelOne` - (Optional) A block that defines the details of level one. The level block is documented below. +* `levelTwo` - (Optional) A block that defines the details of level two. The level block is documented below. +* `levelThree` - (Optional) A block that defines the details of level three. The level block is documented below. +* `levelFour` - (Optional) A block that defines the details of level four. The level block is documented below. +* `levelFive` - (Optional) A block that defines the details of level five. The level block is documented below. + +Each level block supports the following arguments: + +* `name` - (Required) The name of the user hierarchy level. Must not be more than 50 characters. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `hierarchyStructure` - In addition to the arguments defined initially, there are attributes added to the levels created. These additional attributes are documented below. +* `id` - The identifier of the hosting Amazon Connect Instance. + +A level block supports the following additional attributes: + +* `arn` - The Amazon Resource Name (ARN) of the hierarchy level. +* `id` - The identifier of the hierarchy level. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect User Hierarchy Structures using the `instanceId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect User Hierarchy Structures using the `instanceId`. For example: + +```console +% terraform import aws_connect_user_hierarchy_structure.example f1288a1f-6193-445a-b47e-af739b2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/connect_vocabulary.html.markdown b/website/docs/cdktf/typescript/r/connect_vocabulary.html.markdown new file mode 100644 index 00000000000..ceeaa975ec7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/connect_vocabulary.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Connect" +layout: "aws" +page_title: "AWS: aws_connect_vocabulary" +description: |- + Provides details about a specific Amazon Connect Vocabulary +--- + + + +# Resource: aws_connect_vocabulary + +Provides an Amazon Connect Vocabulary resource. For more information see +[Amazon Connect: Getting Started](https://docs.aws.amazon.com/connect/latest/adminguide/amazon-connect-get-started.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ConnectVocabulary } from "./.gen/providers/aws/connect-vocabulary"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ConnectVocabulary(this, "example", { + content: + "Phrase\tIPA\tSoundsLike\tDisplayAs\nLos-Angeles\t\t\tLos Angeles\nF.B.I.\t\u025B f b i a\u026A\t\tFBI\nEtienne\t\teh-tee-en\t\n", + instanceId: "aaaaaaaa-bbbb-cccc-dddd-111111111111", + languageCode: "en-US", + name: "example", + tags: { + Key1: "Value1", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Required) The content of the custom vocabulary in plain-text format with a table of values. Each row in the table represents a word or a phrase, described with Phrase, IPA, SoundsLike, and DisplayAs fields. Separate the fields with TAB characters. For more information, see [Create a custom vocabulary using a table](https://docs.aws.amazon.com/transcribe/latest/dg/custom-vocabulary.html#create-vocabulary-table). Minimum length of `1`. Maximum length of `60000`. +* `instanceId` - (Required) Specifies the identifier of the hosting Amazon Connect Instance. +* `languageCode` - (Required) The language code of the vocabulary entries. For a list of languages and their corresponding language codes, see [What is Amazon Transcribe?](https://docs.aws.amazon.com/transcribe/latest/dg/transcribe-whatis.html). Valid Values are `arAe`, `deCh`, `deDe`, `enAb`, `enAu`, `enGb`, `enIe`, `enIn`, `enUs`, `enWl`, `esEs`, `esUs`, `frCa`, `frFr`, `hiIn`, `itIt`, `jaJp`, `koKr`, `ptBr`, `ptPt`, `zhCn`. +* `name` - (Required) A unique name of the custom vocabulary. Must not be more than 140 characters. +* `tags` - (Optional) Tags to apply to the vocabulary. If configured with a provider +[`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `delete` - (Default `100M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the vocabulary. +* `failureReason` - The reason why the custom vocabulary was not created. +* `id` - The identifier of the hosting Amazon Connect Instance and identifier of the vocabulary +separated by a colon (`:`). +* `lastModifiedTime` - The timestamp when the custom vocabulary was last modified. +* `state` - The current state of the custom vocabulary. Valid values are `creationInProgress`, `active`, `creationFailed`, `deleteInProgress`. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vocabularyId` - The identifier of the custom vocabulary. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Connect Vocabularies using the `instanceId` and `vocabularyId` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Connect Vocabularies using the `instanceId` and `vocabularyId` separated by a colon (`:`). For example: + +```console +% terraform import aws_connect_vocabulary.example f1288a1f-6193-445a-b47e-af739b2:c1d4e5f6-1b3c-1b3c-1b3c-c1d4e5f6c1d4e5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/controltower_control.html.markdown b/website/docs/cdktf/typescript/r/controltower_control.html.markdown new file mode 100644 index 00000000000..a4ff0c1c04e --- /dev/null +++ b/website/docs/cdktf/typescript/r/controltower_control.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Control Tower" +layout: "aws" +page_title: "AWS: aws_controltower_control" +description: |- + Allows the application of pre-defined controls to organizational units. +--- + + + +# Resource: aws_controltower_control + +Allows the application of pre-defined controls to organizational units. For more information on usage, please see the +[AWS Control Tower User Guide](https://docs.aws.amazon.com/controltower/latest/userguide/enable-guardrails.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ControltowerControl } from "./.gen/providers/aws/controltower-control"; +import { DataAwsOrganizationsOrganization } from "./.gen/providers/aws/data-aws-organizations-organization"; +import { DataAwsOrganizationsOrganizationalUnits } from "./.gen/providers/aws/data-aws-organizations-organizational-units"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsOrganizationsOrganization(this, "example", {}); + const dataAwsOrganizationsOrganizationalUnitsExample = + new DataAwsOrganizationsOrganizationalUnits(this, "example_1", { + parentId: Token.asString(propertyAccess(example.roots, ["0", "id"])), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsOrganizationsOrganizationalUnitsExample.overrideLogicalId("example"); + const current = new DataAwsRegion(this, "current", {}); + const awsControltowerControlExample = new ControltowerControl( + this, + "example_3", + { + controlIdentifier: + "arn:aws:controltower:${" + + current.name + + "}::control/AWS-GR_EC2_VOLUME_INUSE_CHECK", + targetIdentifier: Token.asString( + propertyAccess( + "${[ for x in ${" + + dataAwsOrganizationsOrganizationalUnitsExample.children + + '} : x.arn if x.name == "Infrastructure"]}', + ["0"] + ) + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsControltowerControlExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `controlIdentifier` - (Required) The ARN of the control. Only Strongly recommended and Elective controls are permitted, with the exception of the Region deny guardrail. +* `targetIdentifier` - (Required) The ARN of the organizational unit. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the organizational unit. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Control Tower Controls using their `organizationalUnitArn/controlIdentifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Control Tower Controls using their `organizationalUnitArn/controlIdentifier`. For example: + +```console +% terraform import aws_controltower_control.example arn:aws:organizations::123456789101:ou/o-qqaejywet/ou-qg5o-ufbhdtv3,arn:aws:controltower:us-east-1::control/WTDSMKDKDNLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/cur_report_definition.html.markdown b/website/docs/cdktf/typescript/r/cur_report_definition.html.markdown new file mode 100644 index 00000000000..8883c5ea83d --- /dev/null +++ b/website/docs/cdktf/typescript/r/cur_report_definition.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Cost and Usage Report" +layout: "aws" +page_title: "AWS: aws_cur_report_definition" +description: |- + Provides a Cost and Usage Report Definition. +--- + + + +# Resource: aws_cur_report_definition + +Manages Cost and Usage Report Definitions. + +~> *NOTE:* The AWS Cost and Usage Report service is only available in `usEast1` currently. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CurReportDefinition } from "./.gen/providers/aws/cur-report-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CurReportDefinition(this, "example_cur_report_definition", { + additionalArtifacts: ["REDSHIFT", "QUICKSIGHT"], + additionalSchemaElements: ["RESOURCES", "SPLIT_COST_ALLOCATION_DATA"], + compression: "GZIP", + format: "textORcsv", + reportName: "example-cur-report-definition", + s3Bucket: "example-bucket-name", + s3Region: "us-east-1", + timeUnit: "HOURLY", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `reportName` - (Required) Unique name for the report. Must start with a number/letter and is case sensitive. Limited to 256 characters. +* `timeUnit` - (Required) The frequency on which report data are measured and displayed. Valid values are: `daily`, `hourly`, `monthly`. +* `format` - (Required) Format for report. Valid values are: `textORcsv`, `parquet`. If `parquet` is used, then Compression must also be `parquet`. +* `compression` - (Required) Compression format for report. Valid values are: `gzip`, `zip`, `parquet`. If `parquet` is used, then format must also be `parquet`. +* `additionalSchemaElements` - (Required) A list of schema elements. Valid values are: `resources`, `splitCostAllocationData`. +* `s3Bucket` - (Required) Name of the existing S3 bucket to hold generated reports. +* `s3Prefix` - (Optional) Report path prefix. Limited to 256 characters. +* `s3Region` - (Required) Region of the existing S3 bucket to hold generated reports. +* `additionalArtifacts` - (Required) A list of additional artifacts. Valid values are: `redshift`, `quicksight`, `athena`. When ATHENA exists within additional_artifacts, no other artifact type can be declared and report_versioning must be `overwriteReport`. +* `refreshClosedReports` - (Optional) Set to true to update your reports after they have been finalized if AWS detects charges related to previous months. +* `reportVersioning` - (Optional) Overwrite the previous version of each report or to deliver the report in addition to the previous versions. Valid values are: `createNewReport` and `overwriteReport`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the cur report. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Report Definitions using the `reportName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Report Definitions using the `reportName`. For example: + +```console +% terraform import aws_cur_report_definition.example_cur_report_definition example-cur-report-definition +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/customer_gateway.html.markdown b/website/docs/cdktf/typescript/r/customer_gateway.html.markdown new file mode 100644 index 00000000000..033160bbfa2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/customer_gateway.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_customer_gateway" +description: |- + Provides a customer gateway inside a VPC. These objects can be + connected to VPN gateways via VPN connections, and allow you to + establish tunnels between your network and the VPC. +--- + + + +# Resource: aws_customer_gateway + +Provides a customer gateway inside a VPC. These objects can be connected to VPN gateways via VPN connections, and allow you to establish tunnels between your network and the VPC. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CustomerGateway } from "./.gen/providers/aws/customer-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new CustomerGateway(this, "main", { + bgpAsn: Token.asString(65000), + ipAddress: "172.83.124.10", + tags: { + Name: "main-customer-gateway", + }, + type: "ipsec.1", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bgpAsn` - (Required) The gateway's Border Gateway Protocol (BGP) Autonomous System Number (ASN). +* `certificateArn` - (Optional) The Amazon Resource Name (ARN) for the customer gateway certificate. +* `deviceName` - (Optional) A name for the customer gateway device. +* `ipAddress` - (Optional) The IPv4 address for the customer gateway device's outside interface. +* `type` - (Required) The type of customer gateway. The only type AWS + supports at this time is "ipsec.1". +* `tags` - (Optional) Tags to apply to the gateway. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The amazon-assigned ID of the gateway. +* `arn` - The ARN of the customer gateway. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Customer Gateways using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Customer Gateways using the `id`. For example: + +```console +% terraform import aws_customer_gateway.main cgw-b4dc3961 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dataexchange_data_set.html.markdown b/website/docs/cdktf/typescript/r/dataexchange_data_set.html.markdown new file mode 100644 index 00000000000..3ad256ee4fd --- /dev/null +++ b/website/docs/cdktf/typescript/r/dataexchange_data_set.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Data Exchange" +layout: "aws" +page_title: "AWS: aws_dataexchange_data_set" +description: |- + Provides a DataExchange DataSet +--- + + + +# Resource: aws_dataexchange_data_set + +Provides a resource to manage AWS Data Exchange DataSets. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataexchangeDataSet } from "./.gen/providers/aws/dataexchange-data-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataexchangeDataSet(this, "example", { + assetType: "S3_SNAPSHOT", + description: "example", + name: "example", + }); + } +} + +``` + +## Argument Reference + +* `assetType` - (Required) The type of asset that is added to a data set. Valid values are: `s3Snapshot`, `redshiftDataShare`, and `apiGatewayApi`. +* `description` - (Required) A description for the data set. +* `name` - (Required) The name of the data set. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Id of the data set. +* `arn` - The Amazon Resource Name of this data set. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DataExchange DataSets using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DataExchange DataSets using their ARN. For example: + +```console +% terraform import aws_dataexchange_data_set.example arn:aws:dataexchange:us-west-2:123456789012:data-sets/4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dataexchange_revision.html.markdown b/website/docs/cdktf/typescript/r/dataexchange_revision.html.markdown new file mode 100644 index 00000000000..ef4e33fe25b --- /dev/null +++ b/website/docs/cdktf/typescript/r/dataexchange_revision.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Data Exchange" +layout: "aws" +page_title: "AWS: aws_dataexchange_revision" +description: |- + Provides a DataExchange Revision +--- + + + +# Resource: aws_dataexchange_revision + +Provides a resource to manage AWS Data Exchange Revisions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataexchangeRevision } from "./.gen/providers/aws/dataexchange-revision"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DataexchangeRevision(this, "example", { + dataSetId: Token.asString(awsDataexchangeDataSetExample.id), + }); + } +} + +``` + +## Argument Reference + +* `dataSetId` - (Required) The dataset id. +* `comment` - (Required) An optional comment about the revision. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Id of the data set. +* `revisionId` - The Id of the revision. +* `arn` - The Amazon Resource Name of this data set. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DataExchange Revisions using their `dataSetId:revisionId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DataExchange Revisions using their `dataSetId:revisionId`. For example: + +```console +% terraform import aws_dataexchange_revision.example 4fa784c7-ccb4-4dbf-ba4f-02198320daa1:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datapipeline_pipeline.html.markdown b/website/docs/cdktf/typescript/r/datapipeline_pipeline.html.markdown new file mode 100644 index 00000000000..b04ddc699dd --- /dev/null +++ b/website/docs/cdktf/typescript/r/datapipeline_pipeline.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Data Pipeline" +layout: "aws" +page_title: "AWS: aws_datapipeline_pipeline" +description: |- + Provides a AWS DataPipeline Pipeline. +--- + + + +# Resource: aws_datapipeline_pipeline + +Provides a DataPipeline Pipeline resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatapipelinePipeline } from "./.gen/providers/aws/datapipeline-pipeline"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatapipelinePipeline(this, "default", { + name: "tf-pipeline-default", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of Pipeline. +* `description` - (Optional) The description of Pipeline. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the client certificate. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatapipelinePipeline` using the id (Pipeline ID). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatapipelinePipeline` using the id (Pipeline ID). For example: + +```console +% terraform import aws_datapipeline_pipeline.default df-1234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datapipeline_pipeline_definition.html.markdown b/website/docs/cdktf/typescript/r/datapipeline_pipeline_definition.html.markdown new file mode 100644 index 00000000000..25bfeec7973 --- /dev/null +++ b/website/docs/cdktf/typescript/r/datapipeline_pipeline_definition.html.markdown @@ -0,0 +1,164 @@ +--- +subcategory: "Data Pipeline" +layout: "aws" +page_title: "AWS: aws_datapipeline_pipeline_definition" +description: |- + Provides a DataPipeline Definition. +--- + + + +# Resource: aws_datapipeline_pipeline_definition + +Provides a DataPipeline Pipeline Definition resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatapipelinePipeline } from "./.gen/providers/aws/datapipeline-pipeline"; +import { DatapipelinePipelineDefinition } from "./.gen/providers/aws/datapipeline-pipeline-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new DatapipelinePipeline(this, "default", { + name: "tf-pipeline-default", + }); + new DatapipelinePipelineDefinition(this, "example", { + pipelineId: defaultVar.id, + pipelineObject: [ + { + field: [ + { + key: "workerGroup", + stringValue: "workerGroup", + }, + ], + id: "Default", + name: "Default", + }, + { + field: [ + { + key: "startDateTime", + stringValue: "2012-12-12T00:00:00", + }, + { + key: "type", + stringValue: "Schedule", + }, + { + key: "period", + stringValue: "1 hour", + }, + { + key: "endDateTime", + stringValue: "2012-12-21T18:00:00", + }, + ], + id: "Schedule", + name: "Schedule", + }, + { + field: [ + { + key: "type", + stringValue: "ShellCommandActivity", + }, + { + key: "command", + stringValue: "echo hello", + }, + { + key: "parent", + stringValue: "Default", + }, + { + key: "schedule", + stringValue: "Schedule", + }, + ], + id: "SayHello", + name: "SayHello", + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `pipelineId` - (Required) ID of the pipeline. +* `pipelineObject` - (Required) Configuration block for the objects that define the pipeline. See below + +The following arguments are optional: + +* `parameterObject` - (Optional) Configuration block for the parameter objects used in the pipeline definition. See below +* `parameterValue` - (Optional) Configuration block for the parameter values used in the pipeline definition. See below + +### `pipelineObject` + +* `field` - (Required) Configuration block for Key-value pairs that define the properties of the object. See below +* `id` - (Required) ID of the object. +* `name` - (Required) ARN of the storage connector. + +### `field` + +* `key` - (Required) Field identifier. +* `refValue` - (Optional) Field value, expressed as the identifier of another object +* `stringValue` - (Optional) Field value, expressed as a String. + +### `parameterObject` + +* `attribute` - (Required) Configuration block for attributes of the parameter object. See below +* `id` - (Required) ID of the parameter object. + +### `attribute` + +* `key` - (Required) Field identifier. +* `stringValue` - (Required) Field value, expressed as a String. + +### `parameterValue` + +* `id` - (Required) ID of the parameter value. +* `stringValue` - (Required) Field value, expressed as a String. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique ID of the datapipeline definition. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatapipelinePipelineDefinition` using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatapipelinePipelineDefinition` using the id. For example: + +```console +% terraform import aws_datapipeline_pipeline_definition.example df-1234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datasync_agent.html.markdown b/website/docs/cdktf/typescript/r/datasync_agent.html.markdown new file mode 100644 index 00000000000..a95b6da52cc --- /dev/null +++ b/website/docs/cdktf/typescript/r/datasync_agent.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_agent" +description: |- + Manages an AWS DataSync Agent in the provider region +--- + + + +# Resource: aws_datasync_agent + +Manages an AWS DataSync Agent deployed on premises. + +~> **NOTE:** One of `activationKey` or `ipAddress` must be provided for resource creation (agent activation). Neither is required for resource import. If using `ipAddress`, Terraform must be able to make an HTTP (port 80) GET request to the specified IP address from where it is running. The agent will turn off that HTTP server after activation. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncAgent } from "./.gen/providers/aws/datasync-agent"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncAgent(this, "example", { + ipAddress: "1.2.3.4", + name: "example", + }); + } +} + +``` + +## Example Usage with VPC Endpoints + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkInterface } from "./.gen/providers/aws/data-aws-network-interface"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { DatasyncAgent } from "./.gen/providers/aws/datasync-agent"; +import { VpcEndpoint } from "./.gen/providers/aws/vpc-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsRegion(this, "current", {}); + const example = new VpcEndpoint(this, "example", { + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + serviceName: "com.amazonaws.${" + current.name + "}.datasync", + subnetIds: [Token.asString(awsSubnetExample.id)], + vpcEndpointType: "Interface", + vpcId: Token.asString(awsVpcExample.id), + }); + const dataAwsNetworkInterfaceExample = new DataAwsNetworkInterface( + this, + "example_2", + { + id: Token.asString( + propertyAccess(Fn.tolist(example.networkInterfaceIds), ["0"]) + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsNetworkInterfaceExample.overrideLogicalId("example"); + const awsDatasyncAgentExample = new DatasyncAgent(this, "example_3", { + ipAddress: "1.2.3.4", + name: "example", + privateLinkEndpoint: Token.asString( + dataAwsNetworkInterfaceExample.privateIp + ), + securityGroupArns: [Token.asString(awsSecurityGroupExample.arn)], + subnetArns: [Token.asString(awsSubnetExample.arn)], + vpcEndpointId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDatasyncAgentExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the DataSync Agent. +* `activationKey` - (Optional) DataSync Agent activation key during resource creation. Conflicts with `ipAddress`. If an `ipAddress` is provided instead, Terraform will retrieve the `activationKey` as part of the resource creation. +* `ipAddress` - (Optional) DataSync Agent IP address to retrieve activation key during resource creation. Conflicts with `activationKey`. DataSync Agent must be accessible on port 80 from where Terraform is running. +* `privateLinkEndpoint` - (Optional) The IP address of the VPC endpoint the agent should connect to when retrieving an activation key during resource creation. Conflicts with `activationKey`. +* `securityGroupArns` - (Optional) The ARNs of the security groups used to protect your data transfer task subnets. +* `subnetArns` - (Optional) The Amazon Resource Names (ARNs) of the subnets in which DataSync will create elastic network interfaces for each data transfer task. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Agent. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcEndpointId` - (Optional) The ID of the VPC (virtual private cloud) endpoint that the agent has access to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Agent. +* `arn` - Amazon Resource Name (ARN) of the DataSync Agent. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatasyncAgent` using the DataSync Agent Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatasyncAgent` using the DataSync Agent Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_agent.example arn:aws:datasync:us-east-1:123456789012:agent/agent-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datasync_location_efs.html.markdown b/website/docs/cdktf/typescript/r/datasync_location_efs.html.markdown new file mode 100644 index 00000000000..2fc47fc6e7d --- /dev/null +++ b/website/docs/cdktf/typescript/r/datasync_location_efs.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_efs" +description: |- + Manages an EFS Location within AWS DataSync. +--- + + + +# Resource: aws_datasync_location_efs + +Manages an AWS DataSync EFS Location. + +~> **NOTE:** The EFS File System must have a mounted EFS Mount Target before creating this resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncLocationEfs } from "./.gen/providers/aws/datasync-location-efs"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncLocationEfs(this, "example", { + ec2Config: { + securityGroupArns: [Token.asString(awsSecurityGroupExample.arn)], + subnetArn: Token.asString(awsSubnetExample.arn), + }, + efsFileSystemArn: Token.asString(awsEfsMountTargetExample.fileSystemArn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accessPointArn` - (Optional) Specifies the Amazon Resource Name (ARN) of the access point that DataSync uses to access the Amazon EFS file system. +* `ec2Config` - (Required) Configuration block containing EC2 configurations for connecting to the EFS File System. +* `efsFileSystemArn` - (Required) Amazon Resource Name (ARN) of EFS File System. +* `fileSystemAccessRoleArn` - (Optional) Specifies an Identity and Access Management (IAM) role that DataSync assumes when mounting the Amazon EFS file system. +* `inTransitEncryption` - (Optional) Specifies whether you want DataSync to use TLS encryption when transferring data to or from your Amazon EFS file system. Valid values are `none` and `tls12`. +* `subdirectory` - (Optional) Subdirectory to perform actions as source or destination. Default `/`. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### ec2_config Argument Reference + +The `ec2Config` configuration block supports the following arguments: + +* `securityGroupArns` - (Required) List of Amazon Resource Names (ARNs) of the EC2 Security Groups that are associated with the EFS Mount Target. +* `subnetArn` - (Required) Amazon Resource Name (ARN) of the EC2 Subnet that is associated with the EFS Mount Target. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatasyncLocationEfs` using the DataSync Task Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatasyncLocationEfs` using the DataSync Task Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_efs.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datasync_location_fsx_lustre_file_system.html.markdown b/website/docs/cdktf/typescript/r/datasync_location_fsx_lustre_file_system.html.markdown new file mode 100644 index 00000000000..8f996ad7ab1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/datasync_location_fsx_lustre_file_system.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_fsx_lustre_file_system" +description: |- + Manages an FSx Lustre Location within AWS DataSync. +--- + + + +# Resource: aws_datasync_location_fsx_lustre_file_system + +Manages an AWS DataSync FSx Lustre Location. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncLocationFsxLustreFileSystem } from "./.gen/providers/aws/datasync-location-fsx-lustre-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncLocationFsxLustreFileSystem(this, "example", { + fsxFilesystemArn: Token.asString(awsFsxLustreFileSystemExample.arn), + securityGroupArns: [Token.asString(awsSecurityGroupExample.arn)], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fsxFilesystemArn` - (Required) The Amazon Resource Name (ARN) for the FSx for Lustre file system. +* `securityGroupArns` - (Optional) The Amazon Resource Names (ARNs) of the security groups that are to use to configure the FSx for Lustre file system. +* `subdirectory` - (Optional) Subdirectory to perform actions as source or destination. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uri` - The URL of the FSx for Lustre location that was described. +* `creationTime` - The time that the FSx for Lustre location was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatasyncLocationFsxLustreFileSystem` using the `dataSyncArn#fSxLustreArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatasyncLocationFsxLustreFileSystem` using the `dataSyncArn#fSxLustreArn`. For example: + +```console +% terraform import aws_datasync_location_fsx_lustre_file_system.example arn:aws:datasync:us-west-2:123456789012:location/loc-12345678901234567#arn:aws:fsx:us-west-2:476956259333:file-system/fs-08e04cd442c1bb94a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datasync_location_fsx_openzfs_file_system.html.markdown b/website/docs/cdktf/typescript/r/datasync_location_fsx_openzfs_file_system.html.markdown new file mode 100644 index 00000000000..9894541364a --- /dev/null +++ b/website/docs/cdktf/typescript/r/datasync_location_fsx_openzfs_file_system.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_fsx_openzfs_file_system" +description: |- + Manages an FSx OpenZfs Location within AWS DataSync. +--- + + + +# Resource: aws_datasync_location_fsx_openzfs_file_system + +Manages an AWS DataSync FSx OpenZfs Location. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncLocationFsxOpenzfsFileSystem } from "./.gen/providers/aws/datasync-location-fsx-openzfs-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncLocationFsxOpenzfsFileSystem(this, "example", { + fsxFilesystemArn: Token.asString(awsFsxOpenzfsFileSystemExample.arn), + protocol: { + nfs: { + mountOptions: { + version: "AUTOMATIC", + }, + }, + }, + securityGroupArns: [Token.asString(awsSecurityGroupExample.arn)], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fsxFilesystemArn` - (Required) The Amazon Resource Name (ARN) for the FSx for OpenZfs file system. +* `protocol` - (Required) The type of protocol that DataSync uses to access your file system. See below. +* `securityGroupArns` - (Optional) The Amazon Resource Names (ARNs) of the security groups that are to use to configure the FSx for openzfs file system. +* `subdirectory` - (Optional) Subdirectory to perform actions as source or destination. Must start with `/fsx`. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### protocol + +* `nfs` - (Required) Represents the Network File System (NFS) protocol that DataSync uses to access your FSx for OpenZFS file system. See below. + +### nfs + +* `mountOptions` - (Required) Represents the mount options that are available for DataSync to access an NFS location. See below. + +### mount_options + +* `version` - (Optional) The specific NFS version that you want DataSync to use for mounting your NFS share. Valid values: `automatic`, `nfs3`, `nfs40` and `nfs41`. Default: `automatic` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uri` - The URL of the FSx for openzfs location that was described. +* `creationTime` - The time that the FSx for openzfs location was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatasyncLocationFsxOpenzfsFileSystem` using the `dataSyncArn#fSxOpenzfsArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatasyncLocationFsxOpenzfsFileSystem` using the `dataSyncArn#fSxOpenzfsArn`. For example: + +```console +% terraform import aws_datasync_location_fsx_openzfs_file_system.example arn:aws:datasync:us-west-2:123456789012:location/loc-12345678901234567#arn:aws:fsx:us-west-2:123456789012:file-system/fs-08e04cd442c1bb94a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datasync_location_fsx_windows_file_system.html.markdown b/website/docs/cdktf/typescript/r/datasync_location_fsx_windows_file_system.html.markdown new file mode 100644 index 00000000000..36eef535c6d --- /dev/null +++ b/website/docs/cdktf/typescript/r/datasync_location_fsx_windows_file_system.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_fsx_windows_file_system" +description: |- + Manages an FSx Windows Location within AWS DataSync. +--- + + + +# Resource: aws_datasync_location_fsx_windows_file_system + +Manages an AWS DataSync FSx Windows Location. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncLocationFsxWindowsFileSystem } from "./.gen/providers/aws/datasync-location-fsx-windows-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncLocationFsxWindowsFileSystem(this, "example", { + fsxFilesystemArn: Token.asString(awsFsxWindowsFileSystemExample.arn), + password: "SuperSecretPassw0rd", + securityGroupArns: [Token.asString(awsSecurityGroupExample.arn)], + user: "SomeUser", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fsxFilesystemArn` - (Required) The Amazon Resource Name (ARN) for the FSx for Windows file system. +* `password` - (Required) The password of the user who has the permissions to access files and folders in the FSx for Windows file system. +* `user` - (Required) The user who has the permissions to access files and folders in the FSx for Windows file system. +* `domain` - (Optional) The name of the Windows domain that the FSx for Windows server belongs to. +* `securityGroupArns` - (Optional) The Amazon Resource Names (ARNs) of the security groups that are to use to configure the FSx for Windows file system. +* `subdirectory` - (Optional) Subdirectory to perform actions as source or destination. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uri` - The URL of the FSx for Windows location that was described. +* `creationTime` - The time that the FSx for Windows location was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatasyncLocationFsxWindowsFileSystem` using the `dataSyncArn#fSxWindowsArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatasyncLocationFsxWindowsFileSystem` using the `dataSyncArn#fSxWindowsArn`. For example: + +```console +% terraform import aws_datasync_location_fsx_windows_file_system.example arn:aws:datasync:us-west-2:123456789012:location/loc-12345678901234567#arn:aws:fsx:us-west-2:476956259333:file-system/fs-08e04cd442c1bb94a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datasync_location_hdfs.html.markdown b/website/docs/cdktf/typescript/r/datasync_location_hdfs.html.markdown new file mode 100644 index 00000000000..0a094639f7e --- /dev/null +++ b/website/docs/cdktf/typescript/r/datasync_location_hdfs.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_hdfs" +description: |- + Manages an AWS DataSync HDFS Location +--- + + + +# Resource: aws_datasync_location_hdfs + +Manages an HDFS Location within AWS DataSync. + +~> **NOTE:** The DataSync Agents must be available before creating this resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncLocationHdfs } from "./.gen/providers/aws/datasync-location-hdfs"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncLocationHdfs(this, "example", { + agentArns: [Token.asString(awsDatasyncAgentExample.arn)], + authenticationType: "SIMPLE", + nameNode: [ + { + hostname: Token.asString(awsInstanceExample.privateDns), + port: 80, + }, + ], + simpleUser: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `agentArns` - (Required) A list of DataSync Agent ARNs with which this location will be associated. +* `authenticationType` - (Required) The type of authentication used to determine the identity of the user. Valid values are `simple` and `kerberos`. +* `nameNode` - (Required) The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below. +* `simpleUser` - (Optional) The user name used to identify the client on the host operating system. If `simple` is specified for `authenticationType`, this parameter is required. +* `blockSize` - (Optional) The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB). +* `replicationFactor` - (Optional) The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes. +* `kerberosKeytab` - (Optional) The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. If `kerberos` is specified for `authenticationType`, this parameter is required. +* `kerberosKrb5Conf` - (Optional) The krb5.conf file that contains the Kerberos configuration information. If `kerberos` is specified for `authenticationType`, this parameter is required. +* `kerberosPrincipal` - (Optional) The Kerberos principal with access to the files and folders on the HDFS cluster. If `kerberos` is specified for `authenticationType`, this parameter is required. +* `kmsKeyProviderUri` - (Optional) The URI of the HDFS cluster's Key Management Server (KMS). +* `qopConfiguration` - (Optional) The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If `qopConfiguration` isn't specified, `rpcProtection` and `dataTransferProtection` default to `privacy`. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. +* `subdirectory` - (Optional) A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### name_node Argument Reference + +* `hostname` - (Required) The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network. +* `port` - (Required) The port that the NameNode uses to listen to client requests. + +### qop_configuration Argument Reference + +* `dataTransferProtection` - (Optional) The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are `disabled`, `authentication`, `integrity` and `privacy`. +* `rpcProtection` - (Optional)The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are `disabled`, `authentication`, `integrity` and `privacy`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatasyncLocationHdfs` using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatasyncLocationHdfs` using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_hdfs.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datasync_location_nfs.html.markdown b/website/docs/cdktf/typescript/r/datasync_location_nfs.html.markdown new file mode 100644 index 00000000000..2d08f09b189 --- /dev/null +++ b/website/docs/cdktf/typescript/r/datasync_location_nfs.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_nfs" +description: |- + Manages an AWS DataSync NFS Location +--- + + + +# Resource: aws_datasync_location_nfs + +Manages an NFS Location within AWS DataSync. + +~> **NOTE:** The DataSync Agents must be available before creating this resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncLocationNfs } from "./.gen/providers/aws/datasync-location-nfs"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncLocationNfs(this, "example", { + onPremConfig: { + agentArns: [Token.asString(awsDatasyncAgentExample.arn)], + }, + serverHostname: "nfs.example.com", + subdirectory: "/exported/path", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `mountOptions` - (Optional) Configuration block containing mount options used by DataSync to access the NFS Server. +* `onPremConfig` - (Required) Configuration block containing information for connecting to the NFS File System. +* `serverHostname` - (Required) Specifies the IP address or DNS name of the NFS server. The DataSync Agent(s) use this to mount the NFS server. +* `subdirectory` - (Required) Subdirectory to perform actions as source or destination. Should be exported by the NFS server. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### mount_options Argument Reference + +The `mountOptions` configuration block supports the following arguments: + +* `version` - (Optional) The specific NFS version that you want DataSync to use for mounting your NFS share. Valid values: `automatic`, `nfs3`, `nfs40` and `nfs41`. Default: `automatic` + +### on_prem_config Argument Reference + +The `onPremConfig` configuration block supports the following arguments: + +* `agentArns` - (Required) List of Amazon Resource Names (ARNs) of the DataSync Agents used to connect to the NFS server. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatasyncLocationNfs` using the DataSync Task Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatasyncLocationNfs` using the DataSync Task Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_nfs.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datasync_location_object_storage.html.markdown b/website/docs/cdktf/typescript/r/datasync_location_object_storage.html.markdown new file mode 100644 index 00000000000..7a1833b8416 --- /dev/null +++ b/website/docs/cdktf/typescript/r/datasync_location_object_storage.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_object_storage" +description: |- + Manages an AWS DataSync Object Storage Location +--- + + + +# Resource: aws_datasync_location_object_storage + +Manages a Object Storage Location within AWS DataSync. + +~> **NOTE:** The DataSync Agents must be available before creating this resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncLocationObjectStorage } from "./.gen/providers/aws/datasync-location-object-storage"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncLocationObjectStorage(this, "example", { + agentArns: [Token.asString(awsDatasyncAgentExample.arn)], + bucketName: "example", + serverHostname: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `agentArns` - (Required) A list of DataSync Agent ARNs with which this location will be associated. +* `accessKey` - (Optional) The access key is used if credentials are required to access the self-managed object storage server. If your object storage requires a user name and password to authenticate, use `accessKey` and `secretKey` to provide the user name and password, respectively. +* `bucketName` - (Required) The bucket on the self-managed object storage server that is used to read data from. +* `secretKey` - (Optional) The secret key is used if credentials are required to access the self-managed object storage server. If your object storage requires a user name and password to authenticate, use `accessKey` and `secretKey` to provide the user name and password, respectively. +* `serverCertificate` - (Optional) Specifies a certificate to authenticate with an object storage system that uses a private or self-signed certificate authority (CA). You must specify a Base64-encoded .pem string. The certificate can be up to 32768 bytes (before Base64 encoding). +* `serverHostname` - (Required) The name of the self-managed object storage server. This value is the IP address or Domain Name Service (DNS) name of the object storage server. An agent uses this host name to mount the object storage server in a network. +* `serverProtocol` - (Optional) The protocol that the object storage server uses to communicate. Valid values are `http` or `https`. +* `serverPort` - (Optional) The port that your self-managed object storage server accepts inbound network traffic on. The server port is set by default to TCP 80 (`http`) or TCP 443 (`https`). You can specify a custom port if your self-managed object storage server requires one. +* `subdirectory` - (Optional) A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `uri` - The URL of the Object Storage location that was described. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatasyncLocationObjectStorage` using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatasyncLocationObjectStorage` using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_object_storage.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datasync_location_s3.html.markdown b/website/docs/cdktf/typescript/r/datasync_location_s3.html.markdown new file mode 100644 index 00000000000..ef7f6b6a18a --- /dev/null +++ b/website/docs/cdktf/typescript/r/datasync_location_s3.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_s3" +description: |- + Manages an AWS DataSync S3 Location +--- + + + +# Resource: aws_datasync_location_s3 + +Manages an S3 Location within AWS DataSync. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncLocationS3 } from "./.gen/providers/aws/datasync-location-s3"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncLocationS3(this, "example", { + s3BucketArn: Token.asString(awsS3BucketExample.arn), + s3Config: { + bucketAccessRoleArn: Token.asString(awsIamRoleExample.arn), + }, + subdirectory: "/example/prefix", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `agentArns` - (Optional) A list of DataSync Agent ARNs with which this location will be associated. +* `s3BucketArn` - (Required) Amazon Resource Name (ARN) of the S3 Bucket. +* `s3Config` - (Required) Configuration block containing information for connecting to S3. +* `s3StorageClass` - (Optional) The Amazon S3 storage class that you want to store your files in when this location is used as a task destination. [Valid values](https://docs.aws.amazon.com/datasync/latest/userguide/create-s3-location.html#using-storage-classes) +* `subdirectory` - (Required) Prefix to perform actions as source or destination. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### s3_config Argument Reference + +The `s3Config` configuration block supports the following arguments: + +* `bucketAccessRoleArn` - (Required) ARN of the IAM Role used to connect to the S3 Bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Location. +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatasyncLocationS3` using the DataSync Task Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatasyncLocationS3` using the DataSync Task Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_s3.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datasync_location_smb.html.markdown b/website/docs/cdktf/typescript/r/datasync_location_smb.html.markdown new file mode 100644 index 00000000000..0d6336d07e7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/datasync_location_smb.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_location_smb" +description: |- + Manages an AWS DataSync SMB Location +--- + + + +# Resource: aws_datasync_location_smb + +Manages a SMB Location within AWS DataSync. + +~> **NOTE:** The DataSync Agents must be available before creating this resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncLocationSmb } from "./.gen/providers/aws/datasync-location-smb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncLocationSmb(this, "example", { + agentArns: [Token.asString(awsDatasyncAgentExample.arn)], + password: "ANotGreatPassword", + serverHostname: "smb.example.com", + subdirectory: "/exported/path", + user: "Guest", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `agentArns` - (Required) A list of DataSync Agent ARNs with which this location will be associated. +* `domain` - (Optional) The name of the Windows domain the SMB server belongs to. +* `mountOptions` - (Optional) Configuration block containing mount options used by DataSync to access the SMB Server. Can be `automatic`, `smb2`, or `smb3`. +* `password` - (Required) The password of the user who can mount the share and has file permissions in the SMB. +* `serverHostname` - (Required) Specifies the IP address or DNS name of the SMB server. The DataSync Agent(s) use this to mount the SMB share. +* `subdirectory` - (Required) Subdirectory to perform actions as source or destination. Should be exported by the NFS server. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `user` - (Required) The user who can mount the share and has file and folder permissions in the SMB share. + +### mount_options Argument Reference + +The `mountOptions` configuration block supports the following arguments: + +* `version` - (Optional) The specific SMB version that you want DataSync to use for mounting your SMB share. Valid values: `automatic`, `smb2`, and `smb3`. Default: `automatic` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the DataSync Location. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatasyncLocationSmb` using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatasyncLocationSmb` using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_location_smb.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/datasync_task.html.markdown b/website/docs/cdktf/typescript/r/datasync_task.html.markdown new file mode 100644 index 00000000000..8a6f878a5e6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/datasync_task.html.markdown @@ -0,0 +1,187 @@ +--- +subcategory: "DataSync" +layout: "aws" +page_title: "AWS: aws_datasync_task" +description: |- + Manages an AWS DataSync Task +--- + + + +# Resource: aws_datasync_task + +Manages an AWS DataSync Task, which represents a configuration for synchronization. Starting an execution of these DataSync Tasks (actually synchronizing files) is performed outside of this Terraform resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Op, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncTask } from "./.gen/providers/aws/datasync-task"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncTask(this, "example", { + destinationLocationArn: destination.arn, + name: "example", + options: { + bytesPerSecond: Token.asNumber(Op.negate(1)), + }, + sourceLocationArn: source.arn, + }); + } +} + +``` + +## Example Usage with Scheduling + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncTask } from "./.gen/providers/aws/datasync-task"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncTask(this, "example", { + destinationLocationArn: destination.arn, + name: "example", + schedule: { + scheduleExpression: "cron(0 12 ? * SUN,WED *)", + }, + sourceLocationArn: source.arn, + }); + } +} + +``` + +## Example Usage with Filtering + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DatasyncTask } from "./.gen/providers/aws/datasync-task"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DatasyncTask(this, "example", { + destinationLocationArn: destination.arn, + excludes: { + filterType: "SIMPLE_PATTERN", + value: "/folder1|/folder2", + }, + includes: { + filterType: "SIMPLE_PATTERN", + value: "/folder1|/folder2", + }, + name: "example", + sourceLocationArn: source.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `destinationLocationArn` - (Required) Amazon Resource Name (ARN) of destination DataSync Location. +* `sourceLocationArn` - (Required) Amazon Resource Name (ARN) of source DataSync Location. +* `cloudwatchLogGroupArn` - (Optional) Amazon Resource Name (ARN) of the CloudWatch Log Group that is used to monitor and log events in the sync task. +* `excludes` - (Optional) Filter rules that determines which files to exclude from a task. +* `includes` - (Optional) Filter rules that determines which files to include in a task. +* `name` - (Optional) Name of the DataSync Task. +* `options` - (Optional) Configuration block containing option that controls the default behavior when you start an execution of this DataSync Task. For each individual task execution, you can override these options by specifying an overriding configuration in those executions. +* `schedule` - (Optional) Specifies a schedule used to periodically transfer files from a source to a destination location. +* `tags` - (Optional) Key-value pairs of resource tags to assign to the DataSync Task. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### options Argument Reference + +~> **NOTE:** If `atime` is set to `bestEffort`, `mtime` must be set to `preserve`. If `atime` is set to `none`, `mtime` must be set to `none`. + +The `options` configuration block supports the following arguments: + +* `atime` - (Optional) A file metadata that shows the last time a file was accessed (that is when the file was read or written to). If set to `bestEffort`, the DataSync Task attempts to preserve the original (that is, the version before sync `preparing` phase) `atime` attribute on all source files. Valid values: `bestEffort`, `none`. Default: `bestEffort`. +* `bytesPerSecond` - (Optional) Limits the bandwidth utilized. For example, to set a maximum of 1 MB, set this value to `1048576`. Value values: `1` or greater. Default: `1` (unlimited). +* `gid` - (Optional) Group identifier of the file's owners. Valid values: `both`, `intValue`, `name`, `none`. Default: `intValue` (preserve integer value of the ID). +* `logLevel` - (Optional) Determines the type of logs that DataSync publishes to a log stream in the Amazon CloudWatch log group that you provide. Valid values: `off`, `basic`, `transfer`. Default: `off`. +* `mtime` - (Optional) A file metadata that indicates the last time a file was modified (written to) before the sync `preparing` phase. Value values: `none`, `preserve`. Default: `preserve`. +* `objectTags` - (Optional) Specifies whether object tags are maintained when transferring between object storage systems. If you want your DataSync task to ignore object tags, specify the NONE value. Valid values: `preserve`, `none`. Default value: `preserve`. +* `overwriteMode` - (Optional) Determines whether files at the destination should be overwritten or preserved when copying files. Valid values: `always`, `never`. Default: `always`. +* `posixPermissions` - (Optional) Determines which users or groups can access a file for a specific purpose such as reading, writing, or execution of the file. Valid values: `none`, `preserve`. Default: `preserve`. +* `preserveDeletedFiles` - (Optional) Whether files deleted in the source should be removed or preserved in the destination file system. Valid values: `preserve`, `remove`. Default: `preserve`. +* `preserveDevices` - (Optional) Whether the DataSync Task should preserve the metadata of block and character devices in the source files system, and recreate the files with that device name and metadata on the destination. The DataSync Task can’t sync the actual contents of such devices, because many of the devices are non-terminal and don’t return an end of file (EOF) marker. Valid values: `none`, `preserve`. Default: `none` (ignore special devices). +* `securityDescriptorCopyFlags` - (Optional) Determines which components of the SMB security descriptor are copied from source to destination objects. This value is only used for transfers between SMB and Amazon FSx for Windows File Server locations, or between two Amazon FSx for Windows File Server locations. Valid values: `none`, `ownerDacl`, `ownerDaclSacl`. Default: `ownerDacl`. +* `taskQueueing` - (Optional) Determines whether tasks should be queued before executing the tasks. Valid values: `enabled`, `disabled`. Default `enabled`. +* `transferMode` - (Optional) Determines whether DataSync transfers only the data and metadata that differ between the source and the destination location, or whether DataSync transfers all the content from the source, without comparing to the destination location. Valid values: `changed`, `all`. Default: `changed` +* `uid` - (Optional) User identifier of the file's owners. Valid values: `both`, `intValue`, `name`, `none`. Default: `intValue` (preserve integer value of the ID). +* `verifyMode` - (Optional) Whether a data integrity verification should be performed at the end of a task execution after all data and metadata have been transferred. Valid values: `none`, `pointInTimeConsistent`, `onlyFilesTransferred`. Default: `pointInTimeConsistent`. + +### Schedule + +* `scheduleExpression` - (Required) Specifies the schedule you want your task to use for repeated executions. For more information, see [Schedule Expressions for Rules](https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html). + +### excludes Argument Reference + +* `filterType` - (Optional) The type of filter rule to apply. Valid values: `simplePattern`. +* `value` - (Optional) A single filter string that consists of the patterns to exclude. The patterns are delimited by "|" (that is, a pipe), for example: `/folder1|/folder2` + +### includes Argument Reference + +* `filterType` - (Optional) The type of filter rule to apply. Valid values: `simplePattern`. +* `value` - (Optional) A single filter string that consists of the patterns to include. The patterns are delimited by "|" (that is, a pipe), for example: `/folder1|/folder2` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the DataSync Task. +* `arn` - Amazon Resource Name (ARN) of the DataSync Task. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDatasyncTask` using the DataSync Task Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDatasyncTask` using the DataSync Task Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_datasync_task.example arn:aws:datasync:us-east-1:123456789012:task/task-12345678901234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dax_cluster.html.markdown b/website/docs/cdktf/typescript/r/dax_cluster.html.markdown new file mode 100644 index 00000000000..253026480e2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dax_cluster.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "DynamoDB Accelerator (DAX)" +layout: "aws" +page_title: "AWS: aws_dax_cluster" +description: |- + Provides an DAX Cluster resource. +--- + + + +# Resource: aws_dax_cluster + +Provides a DAX Cluster resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DaxCluster } from "./.gen/providers/aws/dax-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DaxCluster(this, "bar", { + clusterName: "cluster-example", + iamRoleArn: Token.asString(example.arn), + nodeType: "dax.r4.large", + replicationFactor: 1, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clusterEndpointEncryptionType` – (Optional) The type of encryption the +cluster's endpoint should support. Valid values are: `none` and `tls`. +Default value is `none`. + +* `clusterName` – (Required) Group identifier. DAX converts this name to +lowercase + +* `iamRoleArn` - (Required) A valid Amazon Resource Name (ARN) that identifies +an IAM role. At runtime, DAX will assume this role and use the role's +permissions to access DynamoDB on your behalf + +* `nodeType` – (Required) The compute and memory capacity of the nodes. See +[Nodes][1] for supported node types + +* `replicationFactor` – (Required) The number of nodes in the DAX cluster. A +replication factor of 1 will create a single-node cluster, without any read +replicas + +* `availabilityZones` - (Optional) List of Availability Zones in which the +nodes will be created + +* `description` – (Optional) Description for the cluster + +* `notificationTopicArn` – (Optional) An Amazon Resource Name (ARN) of an +SNS topic to send DAX notifications to. Example: +`arn:aws:sns:usEast1:012345678999:mySnsTopic` + +* `parameterGroupName` – (Optional) Name of the parameter group to associate +with this DAX cluster + +* `maintenanceWindow` – (Optional) Specifies the weekly time range for when +maintenance on the cluster is performed. The format is `ddd:hh24:miDdd:hh24:mi` +(24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: +`sun:05:00Sun:09:00` + +* `securityGroupIds` – (Optional) One or more VPC security groups associated +with the cluster + +* `serverSideEncryption` - (Optional) Encrypt at rest options + +* `subnetGroupName` – (Optional) Name of the subnet group to be used for the +cluster + +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `serverSideEncryption` object supports the following: + +* `enabled` - (Optional) Whether to enable encryption at rest. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the DAX cluster + +* `nodes` - List of node objects including `id`, `address`, `port` and +`availabilityZone`. Referenceable e.g., as +`${awsDaxClusterTestNodes0Address}` + +* `configurationEndpoint` - The configuration endpoint for this DAX cluster, +consisting of a DNS name and a port number + +* `clusterAddress` - The DNS name of the DAX cluster without the port appended + +* `port` - The port used by the configuration endpoint + +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `45M`) +- `update` - (Default `45M`) +- `delete` - (Default `90M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DAX Clusters using the `clusterName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DAX Clusters using the `clusterName`. For example: + +```console +% terraform import aws_dax_cluster.my_cluster my_cluster +``` + +[1]: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DAX.concepts.cluster.html#DAX.concepts.nodes + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dax_parameter_group.html.markdown b/website/docs/cdktf/typescript/r/dax_parameter_group.html.markdown new file mode 100644 index 00000000000..69346b299fc --- /dev/null +++ b/website/docs/cdktf/typescript/r/dax_parameter_group.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "DynamoDB Accelerator (DAX)" +layout: "aws" +page_title: "AWS: aws_dax_parameter_group" +description: |- + Provides an DAX Parameter Group resource. +--- + + + +# Resource: aws_dax_parameter_group + +Provides a DAX Parameter Group resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DaxParameterGroup } from "./.gen/providers/aws/dax-parameter-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DaxParameterGroup(this, "example", { + name: "example", + parameters: [ + { + name: "query-ttl-millis", + value: "100000", + }, + { + name: "record-ttl-millis", + value: "100000", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` – (Required) The name of the parameter group. + +* `description` - (Optional, ForceNew) A description of the parameter group. + +* `parameters` – (Optional) The parameters of the parameter group. + +## parameters + +`parameters` supports the following: + +* `name` - (Required) The name of the parameter. +* `value` - (Required) The value for the parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the parameter group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DAX Parameter Group using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DAX Parameter Group using the `name`. For example: + +```console +% terraform import aws_dax_parameter_group.example my_dax_pg +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dax_subnet_group.html.markdown b/website/docs/cdktf/typescript/r/dax_subnet_group.html.markdown new file mode 100644 index 00000000000..1ba92d6d2f4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dax_subnet_group.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "DynamoDB Accelerator (DAX)" +layout: "aws" +page_title: "AWS: aws_dax_subnet_group" +description: |- + Provides an DAX Subnet Group resource. +--- + + + +# Resource: aws_dax_subnet_group + +Provides a DAX Subnet Group resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DaxSubnetGroup } from "./.gen/providers/aws/dax-subnet-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DaxSubnetGroup(this, "example", { + name: "example", + subnetIds: [example1.id, example2.id], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` – (Required) The name of the subnet group. +* `description` - (Optional) A description of the subnet group. +* `subnetIds` – (Required) A list of VPC subnet IDs for the subnet group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the subnet group. +* `vpcId` – VPC ID of the subnet group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DAX Subnet Group using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DAX Subnet Group using the `name`. For example: + +```console +% terraform import aws_dax_subnet_group.example my_dax_sg +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_cluster_snapshot.html.markdown b/website/docs/cdktf/typescript/r/db_cluster_snapshot.html.markdown new file mode 100644 index 00000000000..a9e3a63ad47 --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_cluster_snapshot.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_cluster_snapshot" +description: |- + Manages an RDS database cluster snapshot. +--- + + + +# Resource: aws_db_cluster_snapshot + +Manages an RDS database cluster snapshot for Aurora clusters. For managing RDS database instance snapshots, see the [`awsDbSnapshot` resource](/docs/providers/aws/r/db_snapshot.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbClusterSnapshot } from "./.gen/providers/aws/db-cluster-snapshot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbClusterSnapshot(this, "example", { + dbClusterIdentifier: Token.asString(awsRdsClusterExample.id), + dbClusterSnapshotIdentifier: "resourcetestsnapshot1234", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dbClusterIdentifier` - (Required) The DB Cluster Identifier from which to take the snapshot. +* `dbClusterSnapshotIdentifier` - (Required) The Identifier for the snapshot. +* `tags` - (Optional) A map of tags to assign to the DB cluster. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `allocatedStorage` - Allocated storage size in gigabytes (GB). +* `availabilityZones` - List of EC2 Availability Zones that instances in the DB cluster snapshot can be restored in. +* `dbClusterSnapshotArn` - The Amazon Resource Name (ARN) for the DB Cluster Snapshot. +* `engine` - Name of the database engine. +* `engineVersion` - Version of the database engine for this DB cluster snapshot. +* `kmsKeyId` - If storage_encrypted is true, the AWS KMS key identifier for the encrypted DB cluster snapshot. +* `licenseModel` - License model information for the restored DB cluster. +* `port` - Port that the DB cluster was listening on at the time of the snapshot. +* `sourceDbClusterSnapshotIdentifier` - DB Cluster Snapshot ARN that the DB Cluster Snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `storageEncrypted` - Whether the DB cluster snapshot is encrypted. +* `status` - The status of this DB Cluster Snapshot. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - The VPC ID associated with the DB cluster snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDbClusterSnapshot` using the cluster snapshot identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDbClusterSnapshot` using the cluster snapshot identifier. For example: + +```console +% terraform import aws_db_cluster_snapshot.example my-cluster-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_event_subscription.html.markdown b/website/docs/cdktf/typescript/r/db_event_subscription.html.markdown new file mode 100644 index 00000000000..6c17d95f1e6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_event_subscription.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_event_subscription" +description: |- + Provides a DB event subscription resource. +--- + + + +# Resource: aws_db_event_subscription + +Provides a DB event subscription resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbEventSubscription } from "./.gen/providers/aws/db-event-subscription"; +import { DbInstance } from "./.gen/providers/aws/db-instance"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new DbInstance(this, "default", { + allocatedStorage: 10, + dbName: "mydb", + dbSubnetGroupName: "my_database_subnet_group", + engine: "mysql", + engineVersion: "5.6.17", + instanceClass: "db.t2.micro", + parameterGroupName: "default.mysql5.6", + password: "bar", + username: "foo", + }); + const awsSnsTopicDefault = new SnsTopic(this, "default_1", { + name: "rds-events", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicDefault.overrideLogicalId("default"); + const awsDbEventSubscriptionDefault = new DbEventSubscription( + this, + "default_2", + { + eventCategories: [ + "availability", + "deletion", + "failover", + "failure", + "low storage", + "maintenance", + "notification", + "read replica", + "recovery", + "restoration", + ], + name: "rds-event-sub", + snsTopic: Token.asString(awsSnsTopicDefault.arn), + sourceIds: [defaultVar.identifier], + sourceType: "db-instance", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDbEventSubscriptionDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the DB event subscription. By default generated by Terraform. +* `namePrefix` - (Optional) The name of the DB event subscription. Conflicts with `name`. +* `snsTopic` - (Required) The SNS topic to send events to. +* `sourceIds` - (Optional) A list of identifiers of the event sources for which events will be returned. If not specified, then all sources are included in the response. If specified, a source_type must also be specified. +* `sourceType` - (Optional) The type of source that will be generating the events. Valid options are `dbInstance`, `dbSecurityGroup`, `dbParameterGroup`, `dbSnapshot`, `dbCluster`, `dbClusterSnapshot`, or `dbProxy`. If not set, all sources will be subscribed to. +* `eventCategories` - (Optional) A list of event categories for a SourceType that you want to subscribe to. See http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Events.html or run `aws rds describe-event-categories`. +* `enabled` - (Optional) A boolean flag to enable/disable the subscription. Defaults to true. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the RDS event notification subscription +* `arn` - The Amazon Resource Name of the RDS event notification subscription +* `customerAwsId` - The AWS customer account associated with the RDS event notification subscription +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40M`) +- `delete` - (Default `40M`) +- `update` - (Default `40M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB Event Subscriptions using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DB Event Subscriptions using the `name`. For example: + +```console +% terraform import aws_db_event_subscription.default rds-event-sub +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_instance.html.markdown b/website/docs/cdktf/typescript/r/db_instance.html.markdown new file mode 100644 index 00000000000..da54d42a6b4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_instance.html.markdown @@ -0,0 +1,564 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_instance" +description: |- + Provides an RDS instance resource. +--- + + + +# Resource: aws_db_instance + +Provides an RDS instance resource. A DB instance is an isolated database +environment in the cloud. A DB instance can contain multiple user-created +databases. + +Changes to a DB instance can occur when you manually change a parameter, such as +`allocatedStorage`, and are reflected in the next maintenance window. Because +of this, Terraform may report a difference in its planning phase because a +modification has not yet taken place. You can use the `applyImmediately` flag +to instruct the service to apply the change immediately (see documentation +below). + +When upgrading the major version of an engine, `allowMajorVersionUpgrade` must be set to `true`. + +~> **Note:** using `applyImmediately` can result in a brief downtime as the server reboots. +See the AWS Docs on [RDS Instance Maintenance][instance-maintenance] for more information. + +~> **Note:** All arguments including the username and password will be stored in the raw state as plain-text. +[Read more about sensitive data instate](https://www.terraform.io/docs/state/sensitive-data.html). + +> **Hands-on:** Try the [Manage AWS RDS Instances](https://learn.hashicorp.com/tutorials/terraform/aws-rds) tutorial on HashiCorp Learn. + +## RDS Instance Class Types + +Amazon RDS supports three types of instance classes: Standard, Memory Optimized, and Burstable Performance. +For more information please read the AWS RDS documentation about [DB Instance Class Types](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.DBInstanceClass.html) + +## Low-Downtime Updates + +By default, RDS applies updates to DB Instances in-place, which can lead to service interruptions. +Low-downtime updates minimize service interruptions by performing the updates with an [RDS Blue/Green deployment][blue-green] and switching over the instances when complete. + +Low-downtime updates are only available for DB Instances using MySQL and MariaDB, +as other engines are not supported by RDS Blue/Green deployments. + +Backups must be enabled to use low-downtime updates. + +Enable low-downtime updates by setting `blueGreenUpdateEnabled` to `true`. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstance } from "./.gen/providers/aws/db-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbInstance(this, "default", { + allocatedStorage: 10, + dbName: "mydb", + engine: "mysql", + engineVersion: "5.7", + instanceClass: "db.t3.micro", + parameterGroupName: "default.mysql5.7", + password: "foobarbaz", + skipFinalSnapshot: true, + username: "foo", + }); + } +} + +``` + +### RDS Custom for Oracle Usage with Replica + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsKmsKey } from "./.gen/providers/aws/data-aws-kms-key"; +import { DataAwsRdsOrderableDbInstance } from "./.gen/providers/aws/data-aws-rds-orderable-db-instance"; +import { DbInstance } from "./.gen/providers/aws/db-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const byId = new DataAwsKmsKey(this, "by_id", { + keyId: "example-ef278353ceba4a5a97de6784565b9f78", + }); + const customOracle = new DataAwsRdsOrderableDbInstance( + this, + "custom-oracle", + { + engine: "custom-oracle-ee", + engineVersion: "19.c.ee.002", + licenseModel: "bring-your-own-license", + preferredInstanceClasses: [ + "db.r5.24xlarge", + "db.r5.16xlarge", + "db.r5.12xlarge", + ], + storageType: "gp3", + } + ); + const defaultVar = new DbInstance(this, "default", { + allocatedStorage: 50, + autoMinorVersionUpgrade: false, + backupRetentionPeriod: 7, + customIamInstanceProfile: "AWSRDSCustomInstanceProfile", + dbSubnetGroupName: dbSubnetGroupName, + engine: Token.asString(customOracle.engine), + engineVersion: Token.asString(customOracle.engineVersion), + identifier: "ee-instance-demo", + instanceClass: Token.asString(customOracle.instanceClass), + kmsKeyId: Token.asString(byId.arn), + licenseModel: Token.asString(customOracle.licenseModel), + multiAz: false, + password: "avoid-plaintext-passwords", + storageEncrypted: true, + timeouts: [ + { + create: "3h", + delete: "3h", + update: "3h", + }, + ], + username: "test", + }); + new DbInstance(this, "test-replica", { + autoMinorVersionUpgrade: false, + backupRetentionPeriod: 7, + customIamInstanceProfile: "AWSRDSCustomInstanceProfile", + identifier: "ee-instance-replica", + instanceClass: Token.asString(customOracle.instanceClass), + kmsKeyId: Token.asString(byId.arn), + multiAz: false, + replicaMode: "mounted", + replicateSourceDb: defaultVar.identifier, + skipFinalSnapshot: true, + storageEncrypted: true, + timeouts: [ + { + create: "3h", + delete: "3h", + update: "3h", + }, + ], + }); + } +} + +``` + +### Storage Autoscaling + +To enable Storage Autoscaling with instances that support the feature, define the `maxAllocatedStorage` argument higher than the `allocatedStorage` argument. Terraform will automatically hide differences with the `allocatedStorage` argument value if autoscaling occurs. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstance } from "./.gen/providers/aws/db-instance"; +interface MyConfig { + instanceClass: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new DbInstance(this, "example", { + allocatedStorage: 50, + maxAllocatedStorage: 100, + instanceClass: config.instanceClass, + }); + } +} + +``` + +### Managed Master Passwords via Secrets Manager, default KMS Key + +-> More information about RDS/Aurora Aurora integrates with Secrets Manager to manage master user passwords for your DB clusters can be found in the [RDS User Guide](https://aws.amazon.com/about-aws/whats-new/2022/12/amazon-rds-integration-aws-secrets-manager/) and [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/rds-secrets-manager.html). + +You can specify the `manageMasterUserPassword` attribute to enable managing the master password with Secrets Manager. You can also update an existing cluster to use Secrets Manager by specify the `manageMasterUserPassword` attribute and removing the `password` attribute (removal is required). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstance } from "./.gen/providers/aws/db-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbInstance(this, "default", { + allocatedStorage: 10, + dbName: "mydb", + engine: "mysql", + engineVersion: "5.7", + instanceClass: "db.t3.micro", + manageMasterUserPassword: true, + parameterGroupName: "default.mysql5.7", + username: "foo", + }); + } +} + +``` + +### Managed Master Passwords via Secrets Manager, specific KMS Key + +-> More information about RDS/Aurora Aurora integrates with Secrets Manager to manage master user passwords for your DB clusters can be found in the [RDS User Guide](https://aws.amazon.com/about-aws/whats-new/2022/12/amazon-rds-integration-aws-secrets-manager/) and [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/rds-secrets-manager.html). + +You can specify the `masterUserSecretKmsKeyId` attribute to specify a specific KMS Key. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstance } from "./.gen/providers/aws/db-instance"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new KmsKey(this, "example", { + description: "Example KMS Key", + }); + new DbInstance(this, "default", { + allocatedStorage: 10, + dbName: "mydb", + engine: "mysql", + engineVersion: "5.7", + instanceClass: "db.t3.micro", + manageMasterUserPassword: true, + masterUserSecretKmsKeyId: example.keyId, + parameterGroupName: "default.mysql5.7", + username: "foo", + }); + } +} + +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to the [AWS official +documentation](http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html). + +This argument supports the following arguments: + +* `allocatedStorage` - (Required unless a `snapshotIdentifier` or `replicateSourceDb` is provided) The allocated storage in gibibytes. If `maxAllocatedStorage` is configured, this argument represents the initial storage allocation and differences from the configuration will be ignored automatically when Storage Autoscaling occurs. If `replicateSourceDb` is set, the value is ignored during the creation of the instance. +* `allowMajorVersionUpgrade` - (Optional) Indicates that major version +upgrades are allowed. Changing this parameter does not result in an outage and +the change is asynchronously applied as soon as possible. +* `applyImmediately` - (Optional) Specifies whether any database modifications +are applied immediately, or during the next maintenance window. Default is +`false`. See [Amazon RDS Documentation for more +information.](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.DBInstance.Modifying.html) +* `autoMinorVersionUpgrade` - (Optional) Indicates that minor engine upgrades +will be applied automatically to the DB instance during the maintenance window. +Defaults to true. +* `availabilityZone` - (Optional) The AZ for the RDS instance. +* `backupRetentionPeriod` - (Optional) The days to retain backups for. + Must be between `0` and `35`. + Default is `0`. + Must be greater than `0` if the database is used as a source for a [Read Replica][instance-replication], + uses [low-downtime updates](#low-downtime-updates), + or will use [RDS Blue/Green deployments][blue-green]. +* `backupTarget` - (Optional, Forces new resource) Specifies where automated backups and manual snapshots are stored. Possible values are `region` (default) and `outposts`. See [Working with Amazon RDS on AWS Outposts](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-on-outposts.html) for more information. +* `backupWindow` - (Optional) The daily time range (in UTC) during which automated backups are created if they are enabled. + Example: "09:46-10:16". Must not overlap with `maintenanceWindow`. +* `blueGreenUpdate` - (Optional) Enables low-downtime updates using [RDS Blue/Green deployments][blue-green]. + See [blue_green_update](#blue_green_update) below +* `caCertIdentifier` - (Optional) The identifier of the CA certificate for the DB instance. +* `characterSetName` - (Optional) The character set name to use for DB +encoding in Oracle and Microsoft SQL instances (collation). This can't be changed. See [Oracle Character Sets +Supported in Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.OracleCharacterSets.html) +or [Server-Level Collation for Microsoft SQL Server](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.SQLServer.CommonDBATasks.Collation.html) for more information. +* `copyTagsToSnapshot` – (Optional, boolean) Copy all Instance `tags` to snapshots. Default is `false`. +* `customIamInstanceProfile` - (Optional) The instance profile associated with the underlying Amazon EC2 instance of an RDS Custom DB instance. +* `dbName` - (Optional) The name of the database to create when the DB instance is created. If this parameter is not specified, no database is created in the DB instance. Note that this does not apply for Oracle or SQL Server engines. See the [AWS documentation](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/rds/create-db-instance.html) for more details on what applies for those engines. If you are providing an Oracle db name, it needs to be in all upper case. Cannot be specified for a replica. +* `dbSubnetGroupName` - (Optional) Name of [DB subnet group](/docs/providers/aws/r/db_subnet_group.html). DB instance will +be created in the VPC associated with the DB subnet group. If unspecified, will +be created in the `default` VPC, or in EC2 Classic, if available. When working +with read replicas, it should be specified only if the source database +specifies an instance in another AWS Region. See [DBSubnetGroupName in API +action CreateDBInstanceReadReplica](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstanceReadReplica.html) +for additional read replica contraints. +* `deleteAutomatedBackups` - (Optional) Specifies whether to remove automated backups immediately after the DB instance is deleted. Default is `true`. +* `deletionProtection` - (Optional) If the DB instance should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`. +* `domain` - (Optional) The ID of the Directory Service Active Directory domain to create the instance in. +* `domainIamRoleName` - (Optional, but required if domain is provided) The name of the IAM role to be used when making API calls to the Directory Service. +* `enabledCloudwatchLogsExports` - (Optional) Set of log types to enable for exporting to CloudWatch logs. If omitted, no logs will be exported. Valid values (depending on `engine`). MySQL and MariaDB: `audit`, `error`, `general`, `slowquery`. PostgreSQL: `postgresql`, `upgrade`. MSSQL: `agent` , `error`. Oracle: `alert`, `audit`, `listener`, `trace`. +* `engine` - (Required unless a `snapshotIdentifier` or `replicateSourceDb` is provided) The database engine to use. For supported values, see the Engine parameter in [API action CreateDBInstance](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html). Note that for Amazon Aurora instances the engine must match the [DB cluster](/docs/providers/aws/r/rds_cluster.html)'s engine'. For information on the difference between the available Aurora MySQL engines see [Comparison between Aurora MySQL 1 and Aurora MySQL 2](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraMySQL.Updates.20180206.html) in the Amazon RDS User Guide. +* `engineVersion` - (Optional) The engine version to use. If `autoMinorVersionUpgrade` is enabled, you can provide a prefix of the version such as `57` (for `5710`). The actual engine version used is returned in the attribute `engineVersionActual`, see [Attribute Reference](#attribute-reference) below. For supported values, see the EngineVersion parameter in [API action CreateDBInstance](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html). Note that for Amazon Aurora instances the engine version must match the [DB cluster](/docs/providers/aws/r/rds_cluster.html)'s engine version'. +* `finalSnapshotIdentifier` - (Optional) The name of your final DB snapshot +when this DB instance is deleted. Must be provided if `skipFinalSnapshot` is +set to `false`. The value must begin with a letter, only contain alphanumeric characters and hyphens, and not end with a hyphen or contain two consecutive hyphens. Must not be provided when deleting a read replica. +* `iamDatabaseAuthenticationEnabled` - (Optional) Specifies whether mappings of AWS Identity and Access Management (IAM) accounts to database +accounts is enabled. +* `identifier` - (Optional) The name of the RDS instance, if omitted, Terraform will assign a random, unique identifier. Required if `restoreToPointInTime` is specified. +* `identifierPrefix` - (Optional) Creates a unique identifier beginning with the specified prefix. Conflicts with `identifier`. +* `instanceClass` - (Required) The instance type of the RDS instance. +* `iops` - (Optional) The amount of provisioned IOPS. Setting this implies a +storage_type of "io1". Can only be set when `storageType` is `"io1"` or `"gp3"`. +Cannot be specified for gp3 storage if the `allocatedStorage` value is below a per-`engine` threshold. +See the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html#gp3-storage) for details. +* `kmsKeyId` - (Optional) The ARN for the KMS encryption key. If creating an +encrypted replica, set this to the destination KMS ARN. +* `licenseModel` - (Optional, but required for some DB engines, i.e., Oracle +SE1) License model information for this DB instance. +* `maintenanceWindow` - (Optional) The window to perform maintenance in. +Syntax: "ddd:hh24:mi-ddd:hh24:mi". Eg: "Mon:00:00-Mon:03:00". See [RDS +Maintenance Window +docs](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_UpgradeDBInstance.Maintenance.html#AdjustingTheMaintenanceWindow) +for more information. +* `manageMasterUserPassword` - (Optional) Set to true to allow RDS to manage the master user password in Secrets Manager. Cannot be set if `password` is provided. +* `masterUserSecretKmsKeyId` - (Optional) The Amazon Web Services KMS key identifier is the key ARN, key ID, alias ARN, or alias name for the KMS key. To use a KMS key in a different Amazon Web Services account, specify the key ARN or alias ARN. If not specified, the default KMS key for your Amazon Web Services account is used. +* `maxAllocatedStorage` - (Optional) When configured, the upper limit to which Amazon RDS can automatically scale the storage of the DB instance. Configuring this will automatically ignore differences to `allocatedStorage`. Must be greater than or equal to `allocatedStorage` or `0` to disable Storage Autoscaling. +* `monitoringInterval` - (Optional) The interval, in seconds, between points +when Enhanced Monitoring metrics are collected for the DB instance. To disable +collecting Enhanced Monitoring metrics, specify 0. The default is 0. Valid +Values: 0, 1, 5, 10, 15, 30, 60. +* `monitoringRoleArn` - (Optional) The ARN for the IAM role that permits RDS +to send enhanced monitoring metrics to CloudWatch Logs. You can find more +information on the [AWS +Documentation](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Monitoring.html) +what IAM permissions are needed to allow Enhanced Monitoring for RDS Instances. +* `multiAz` - (Optional) Specifies if the RDS instance is multi-AZ +* `ncharCharacterSetName` - (Optional, Forces new resource) The national character set is used in the NCHAR, NVARCHAR2, and NCLOB data types for Oracle instances. This can't be changed. See [Oracle Character Sets +Supported in Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.OracleCharacterSets.html). +* `networkType` - (Optional) The network type of the DB instance. Valid values: `ipv4`, `dual`. +* `optionGroupName` - (Optional) Name of the DB option group to associate. +* `parameterGroupName` - (Optional) Name of the DB parameter group to +associate. +* `password` - (Required unless `manageMasterUserPassword` is set to true or unless a `snapshotIdentifier` or `replicateSourceDb` +is provided or `manageMasterUserPassword` is set.) Password for the master DB user. Note that this may show up in +logs, and it will be stored in the state file. Cannot be set if `manageMasterUserPassword` is set to `true`. +* `performanceInsightsEnabled` - (Optional) Specifies whether Performance Insights are enabled. Defaults to false. +* `performanceInsightsKmsKeyId` - (Optional) The ARN for the KMS key to encrypt Performance Insights data. When specifying `performanceInsightsKmsKeyId`, `performanceInsightsEnabled` needs to be set to true. Once KMS key is set, it can never be changed. +* `performanceInsightsRetentionPeriod` - (Optional) Amount of time in days to retain Performance Insights data. Valid values are `7`, `731` (2 years) or a multiple of `31`. When specifying `performanceInsightsRetentionPeriod`, `performanceInsightsEnabled` needs to be set to true. Defaults to '7'. +* `port` - (Optional) The port on which the DB accepts connections. +* `publiclyAccessible` - (Optional) Bool to control if instance is publicly +accessible. Default is `false`. +* `replicaMode` - (Optional) Specifies whether the replica is in either `mounted` or `openReadOnly` mode. This attribute +is only supported by Oracle instances. Oracle replicas operate in `openReadOnly` mode unless otherwise specified. See [Working with Oracle Read Replicas](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/oracle-read-replicas.html) for more information. +* `replicateSourceDb` - (Optional) Specifies that this resource is a Replicate +database, and to use this value as the source database. This correlates to the +`identifier` of another Amazon RDS Database to replicate (if replicating within +a single region) or ARN of the Amazon RDS Database to replicate (if replicating +cross-region). Note that if you are +creating a cross-region replica of an encrypted database you will also need to +specify a `kmsKeyId`. See [DB Instance Replication][instance-replication] and [Working with +PostgreSQL and MySQL Read Replicas](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_ReadRepl.html) +for more information on using Replication. +* `restoreToPointInTime` - (Optional, Forces new resource) A configuration block for restoring a DB instance to an arbitrary point in time. Requires the `identifier` argument to be set with the name of the new DB instance to be created. See [Restore To Point In Time](#restore-to-point-in-time) below for details. +* `s3Import` - (Optional) Restore from a Percona Xtrabackup in S3. See [Importing Data into an Amazon RDS MySQL DB Instance](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/MySQL.Procedural.Importing.html) +* `skipFinalSnapshot` - (Optional) Determines whether a final DB snapshot is +created before the DB instance is deleted. If true is specified, no DBSnapshot +is created. If false is specified, a DB snapshot is created before the DB +instance is deleted, using the value from `finalSnapshotIdentifier`. Default +is `false`. +* `snapshotIdentifier` - (Optional) Specifies whether or not to create this +database from a snapshot. This correlates to the snapshot ID you'd find in the +RDS console, e.g: rds:production-2015-06-26-06-05. +* `storageEncrypted` - (Optional) Specifies whether the DB instance is +encrypted. Note that if you are creating a cross-region read replica this field +is ignored and you should instead declare `kmsKeyId` with a valid ARN. The +default is `false` if not specified. +* `storageType` - (Optional) One of "standard" (magnetic), "gp2" (general +purpose SSD), "gp3" (general purpose SSD that needs `iops` independently) +or "io1" (provisioned IOPS SSD). The default is "io1" if `iops` is specified, +"gp2" if not. +* `storageThroughput` - (Optional) The storage throughput value for the DB instance. Can only be set when `storageType` is `"gp3"`. Cannot be specified if the `allocatedStorage` value is below a per-`engine` threshold. See the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html#gp3-storage) for details. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `timezone` - (Optional) Time zone of the DB instance. `timezone` is currently +only supported by Microsoft SQL Server. The `timezone` can only be set on +creation. See [MSSQL User +Guide](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_SQLServer.html#SQLServer.Concepts.General.TimeZone) +for more information. +* `username` - (Required unless a `snapshotIdentifier` or `replicateSourceDb` +is provided) Username for the master DB user. Cannot be specified for a replica. +* `vpcSecurityGroupIds` - (Optional) List of VPC security groups to +associate. +* `customerOwnedIpEnabled` - (Optional) Indicates whether to enable a customer-owned IP address (CoIP) for an RDS on Outposts DB instance. See [CoIP for RDS on Outposts](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-on-outposts.html#rds-on-outposts.coip) for more information. + +~> **NOTE:** Removing the `replicateSourceDb` attribute from an existing RDS +Replicate database managed by Terraform will promote the database to a fully +standalone database. + +### Restore To Point In Time + +-> **Note:** You can restore to any point in time before the source DB instance's `latestRestorableTime` or a point up to the number of days specified in the source DB instance's `backupRetentionPeriod`. +For more information, please refer to the [Developer Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_PIT.html). +This setting does not apply to `auroraMysql` or `auroraPostgresql` DB engines. For Aurora, refer to the [`awsRdsCluster` resource documentation](/docs/providers/aws/r/rds_cluster.html#restore_in_time). + +The `restoreToPointInTime` block supports the following arguments: + +* `restoreTime` - (Optional) The date and time to restore from. Value must be a time in Universal Coordinated Time (UTC) format and must be before the latest restorable time for the DB instance. Cannot be specified with `useLatestRestorableTime`. +* `sourceDbInstanceIdentifier` - (Optional) The identifier of the source DB instance from which to restore. Must match the identifier of an existing DB instance. Required if `sourceDbInstanceAutomatedBackupsArn` or `sourceDbiResourceId` is not specified. +* `sourceDbInstanceAutomatedBackupsArn` - (Optional) The ARN of the automated backup from which to restore. Required if `sourceDbInstanceIdentifier` or `sourceDbiResourceId` is not specified. +* `sourceDbiResourceId` - (Optional) The resource ID of the source DB instance from which to restore. Required if `sourceDbInstanceIdentifier` or `sourceDbInstanceAutomatedBackupsArn` is not specified. +* `useLatestRestorableTime` - (Optional) A boolean value that indicates whether the DB instance is restored from the latest backup time. Defaults to `false`. Cannot be specified with `restoreTime`. + +### S3 Import Options + +Full details on the core parameters and impacts are in the API Docs: [RestoreDBInstanceFromS3](http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_RestoreDBInstanceFromS3.html). Sample + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstance } from "./.gen/providers/aws/db-instance"; +interface MyConfig { + instanceClass: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new DbInstance(this, "db", { + s3Import: { + bucketName: "mybucket", + bucketPrefix: "backups", + ingestionRole: + "arn:aws:iam::1234567890:role/role-xtrabackup-rds-restore", + sourceEngine: "mysql", + sourceEngineVersion: "5.6", + }, + instanceClass: config.instanceClass, + }); + } +} + +``` + +* `bucketName` - (Required) The bucket name where your backup is stored +* `bucketPrefix` - (Optional) Can be blank, but is the path to your backup +* `ingestionRole` - (Required) Role applied to load the data. +* `sourceEngine` - (Required, as of Feb 2018 only 'mysql' supported) Source engine for the backup +* `sourceEngineVersion` - (Required, as of Feb 2018 only '5.6' supported) Version of the source engine used to make the backup + +This will not recreate the resource if the S3 object changes in some way. It's only used to initialize the database. + +## blue_green_update + +* `enabled` - (Optional) Enables [low-downtime updates](#low-downtime-updates) when `true`. + Default is `false`. + +[instance-replication]: +https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.Replication.html +[instance-maintenance]: +https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_UpgradeDBInstance.Maintenance.html +[blue-green]: +https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/blue-green-deployments.html + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `address` - The hostname of the RDS instance. See also `endpoint` and `port`. +* `arn` - The ARN of the RDS instance. +* `allocatedStorage` - The amount of allocated storage. +* `availabilityZone` - The availability zone of the instance. +* `backupRetentionPeriod` - The backup retention period. +* `backupWindow` - The backup window. +* `caCertIdentifier` - Identifier of the CA certificate for the +DB instance. +* `dbName` - The database name. +* `domain` - The ID of the Directory Service Active Directory domain the instance is joined to +* `domainIamRoleName` - The name of the IAM role to be used when making API calls to the Directory Service. +* `endpoint` - The connection endpoint in `address:port` format. +* `engine` - The database engine. +* `engineVersionActual` - The running version of the database. +* `hostedZoneId` - The canonical hosted zone ID of the DB instance (to be used +in a Route 53 Alias record). +* `id` - RDS DBI resource ID. +* `instanceClass`- The RDS instance class. +* `latestRestorableTime` - The latest time, in UTC [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), to which a database can be restored with point-in-time restore. +* `listenerEndpoint` - Specifies the listener connection endpoint for SQL Server Always On. See [endpoint](#endpoint) below. +* `maintenanceWindow` - The instance maintenance window. +* `masterUserSecret` - A block that specifies the master user secret. Only available when `manageMasterUserPassword` is set to true. [Documented below](#master_user_secret). +* `multiAz` - If the RDS instance is multi AZ enabled. +* `port` - The database port. +* `resourceId` - The RDS Resource ID of this instance. +* `status` - The RDS instance status. +* `storageEncrypted` - Whether the DB instance is encrypted. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `username` - The master username for the database. + +On Oracle and Microsoft SQL instances the following is exported additionally: + +* `characterSetName` - The character set (collation) used on Oracle and Microsoft SQL instances. + +### Endpoint + +* `address` - Specifies the DNS address of the DB instance. +* `hostedZoneId` - Specifies the ID that Amazon Route 53 assigns when you create a hosted zone. +* `port` - Specifies the port that the database engine is listening on. + +### master_user_secret + +The `masterUserSecret` configuration block supports the following attributes: + +* `kmsKeyId` - The Amazon Web Services KMS key identifier that is used to encrypt the secret. +* `secretArn` - The Amazon Resource Name (ARN) of the secret. +* `secretStatus` - The status of the secret. Valid Values: `creating` | `active` | `rotating` | `impaired`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40M`) +- `update` - (Default `80M`) +- `delete` - (Default `60M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB Instances using the `identifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DB Instances using the `identifier`. For example: + +```console +% terraform import aws_db_instance.default mydb-rds-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_instance_automated_backups_replication.markdown b/website/docs/cdktf/typescript/r/db_instance_automated_backups_replication.markdown new file mode 100644 index 00000000000..1e0bbf67e0f --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_instance_automated_backups_replication.markdown @@ -0,0 +1,168 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_instance_automated_backups_replication" +description: |- + Enables replication of automated backups to a different AWS Region. +--- + + + +# Resource: aws_db_instance_automated_backups_replication + +Manage cross-region replication of automated backups to a different AWS Region. Documentation for cross-region automated backup replication can be found at: + +* [Replicating automated backups to another AWS Region](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_ReplicateBackups.html) + +-> **Note:** This resource has to be created in the destination region. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstanceAutomatedBackupsReplication } from "./.gen/providers/aws/db-instance-automated-backups-replication"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbInstanceAutomatedBackupsReplication(this, "default", { + retentionPeriod: 14, + sourceDbInstanceArn: "arn:aws:rds:us-west-2:123456789012:db:mydatabase", + }); + } +} + +``` + +## Encrypting the automated backup with KMS + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstanceAutomatedBackupsReplication } from "./.gen/providers/aws/db-instance-automated-backups-replication"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbInstanceAutomatedBackupsReplication(this, "default", { + kmsKeyId: + "arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012", + sourceDbInstanceArn: "arn:aws:rds:us-west-2:123456789012:db:mydatabase", + }); + } +} + +``` + +## Example including a RDS DB instance + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstance } from "./.gen/providers/aws/db-instance"; +import { DbInstanceAutomatedBackupsReplication } from "./.gen/providers/aws/db-instance-automated-backups-replication"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + region: "us-east-1", + }); + const replica = new AwsProvider(this, "aws_1", { + alias: "replica", + region: "us-west-2", + }); + const defaultVar = new DbInstance(this, "default", { + allocatedStorage: 10, + backupRetentionPeriod: 7, + dbName: "mydb", + engine: "postgres", + engineVersion: "13.4", + identifier: "mydb", + instanceClass: "db.t3.micro", + password: "mustbeeightcharacters", + skipFinalSnapshot: true, + storageEncrypted: true, + username: "masterusername", + }); + const awsKmsKeyDefault = new KmsKey(this, "default_3", { + description: "Encryption key for automated backups", + provider: replica, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsKeyDefault.overrideLogicalId("default"); + const awsDbInstanceAutomatedBackupsReplicationDefault = + new DbInstanceAutomatedBackupsReplication(this, "default_4", { + kmsKeyId: Token.asString(awsKmsKeyDefault.arn), + provider: replica, + sourceDbInstanceArn: defaultVar.arn, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDbInstanceAutomatedBackupsReplicationDefault.overrideLogicalId( + "default" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `kmsKeyId` - (Optional, Forces new resource) The AWS KMS key identifier for encryption of the replicated automated backups. The KMS key ID is the Amazon Resource Name (ARN) for the KMS encryption key in the destination AWS Region, for example, `arn:aws:kms:usEast1:123456789012:key/akiaiosfodnn7Example`. +* `preSignedUrl` - (Optional, Forces new resource) A URL that contains a [Signature Version 4](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html) signed request for the [`startDbInstanceAutomatedBackupsReplication`](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_StartDBInstanceAutomatedBackupsReplication.html) action to be called in the AWS Region of the source DB instance. +* `retentionPeriod` - (Optional, Forces new resource) The retention period for the replicated automated backups, defaults to `7`. +* `sourceDbInstanceArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the source DB instance for the replicated automated backups, for example, `arn:aws:rds:usWest2:123456789012:db:mydatabase`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the replicated automated backups. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `75M`) +- `delete` - (Default `75M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS instance automated backups replication using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import RDS instance automated backups replication using the `arn`. For example: + +```console +% terraform import aws_db_instance_automated_backups_replication.default arn:aws:rds:us-east-1:123456789012:auto-backup:ab-faaa2mgdj1vmp4xflr7yhsrmtbtob7ltrzzz2my +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_instance_role_association.html.markdown b/website/docs/cdktf/typescript/r/db_instance_role_association.html.markdown new file mode 100644 index 00000000000..d2011cce186 --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_instance_role_association.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_instance_role_association" +description: |- + Manages an RDS DB Instance association with an IAM Role. +--- + + + +# Resource: aws_db_instance_role_association + +Manages an RDS DB Instance association with an IAM Role. Example use cases: + +* [Amazon RDS Oracle integration with Amazon S3](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/oracle-s3-integration.html) +* [Importing Amazon S3 Data into an RDS PostgreSQL DB Instance](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_PostgreSQL.S3Import.html) + +-> To manage the RDS DB Instance IAM Role for [Enhanced Monitoring](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Monitoring.OS.html), see the `awsDbInstance` resource `monitoringRoleArn` argument instead. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstanceRoleAssociation } from "./.gen/providers/aws/db-instance-role-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbInstanceRoleAssociation(this, "example", { + dbInstanceIdentifier: Token.asString(awsDbInstanceExample.identifier), + featureName: "S3_INTEGRATION", + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dbInstanceIdentifier` - (Required) DB Instance Identifier to associate with the IAM Role. +* `featureName` - (Required) Name of the feature for association. This can be found in the AWS documentation relevant to the integration or a full list is available in the `supportedFeatureNames` list returned by [AWS CLI rds describe-db-engine-versions](https://docs.aws.amazon.com/cli/latest/reference/rds/describe-db-engine-versions.html). +* `roleArn` - (Required) Amazon Resource Name (ARN) of the IAM Role to associate with the DB Instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - DB Instance Identifier and IAM Role ARN separated by a comma (`,`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDbInstanceRoleAssociation` using the DB Instance Identifier and IAM Role ARN separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDbInstanceRoleAssociation` using the DB Instance Identifier and IAM Role ARN separated by a comma (`,`). For example: + +```console +% terraform import aws_db_instance_role_association.example my-db-instance,arn:aws:iam::123456789012:role/my-role +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_option_group.html.markdown b/website/docs/cdktf/typescript/r/db_option_group.html.markdown new file mode 100644 index 00000000000..1b061f95a75 --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_option_group.html.markdown @@ -0,0 +1,140 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_option_group" +description: |- + Provides an RDS DB option group resource. +--- + + + +# Resource: aws_db_option_group + +Provides an RDS DB option group resource. Documentation of the available options for various RDS engines can be found at: + +* [MariaDB Options](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.MariaDB.Options.html) +* [Microsoft SQL Server Options](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.SQLServer.Options.html) +* [MySQL Options](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.MySQL.Options.html) +* [Oracle Options](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.Oracle.Options.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbOptionGroup } from "./.gen/providers/aws/db-option-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbOptionGroup(this, "example", { + engineName: "sqlserver-ee", + majorEngineVersion: "11.00", + name: "option-group-test-terraform", + option: [ + { + optionName: "Timezone", + optionSettings: [ + { + name: "TIME_ZONE", + value: "UTC", + }, + ], + }, + { + optionName: "SQLSERVER_BACKUP_RESTORE", + optionSettings: [ + { + name: "IAM_ROLE_ARN", + value: Token.asString(awsIamRoleExample.arn), + }, + ], + }, + { + optionName: "TDE", + }, + ], + optionGroupDescription: "Terraform Option Group", + }); + } +} + +``` + +~> **Note:** Any modifications to the `awsDbOptionGroup` are set to happen immediately as we default to applying immediately. + +~> **WARNING:** You can perform a destroy on a `awsDbOptionGroup`, as long as it is not associated with any Amazon RDS resource. An option group can be associated with a DB instance, a manual DB snapshot, or an automated DB snapshot. + +If you try to delete an option group that is associated with an Amazon RDS resource, an error similar to the following is returned: + +> An error occurred (InvalidOptionGroupStateFault) when calling the DeleteOptionGroup operation: The option group 'optionGroupName' cannot be deleted because it is in use. + +More information about this can be found [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_WorkingWithOptionGroups.html#USER_WorkingWithOptionGroups.Delete). + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the option group. If omitted, Terraform will assign a random, unique name. Must be lowercase, to match as it is stored in AWS. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. Must be lowercase, to match as it is stored in AWS. +* `optionGroupDescription` - (Optional) The description of the option group. Defaults to "Managed by Terraform". +* `engineName` - (Required) Specifies the name of the engine that this option group should be associated with. +* `majorEngineVersion` - (Required) Specifies the major version of the engine that this option group should be associated with. +* `option` - (Optional) A list of Options to apply. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Option blocks support the following: + +* `optionName` - (Required) The Name of the Option (e.g., MEMCACHED). +* `optionSettings` - (Optional) A list of option settings to apply. +* `port` - (Optional) The Port number when connecting to the Option (e.g., 11211). +* `version` - (Optional) The version of the option (e.g., 13.1.0.0). +* `dbSecurityGroupMemberships` - (Optional) A list of DB Security Groups for which the option is enabled. +* `vpcSecurityGroupMemberships` - (Optional) A list of VPC Security Groups for which the option is enabled. + +Option Settings blocks support the following: + +* `name` - (Optional) The Name of the setting. +* `value` - (Optional) The Value of the setting. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The db option group name. +* `arn` - The ARN of the db option group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `15M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB Option groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DB Option groups using the `name`. For example: + +```console +% terraform import aws_db_option_group.example mysql-option-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_parameter_group.html.markdown b/website/docs/cdktf/typescript/r/db_parameter_group.html.markdown new file mode 100644 index 00000000000..9b7910187be --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_parameter_group.html.markdown @@ -0,0 +1,162 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_parameter_group" +description: |- + Provides an RDS DB parameter group resource. +--- + + + +# Resource: aws_db_parameter_group + +Provides an RDS DB parameter group resource. Documentation of the available parameters for various RDS engines can be found at: + +* [Aurora MySQL Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraMySQL.Reference.html) +* [Aurora PostgreSQL Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraPostgreSQL.Reference.html) +* [MariaDB Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.MariaDB.Parameters.html) +* [Oracle Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_ModifyInstance.Oracle.html#USER_ModifyInstance.Oracle.sqlnet) +* [PostgreSQL Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.PostgreSQL.CommonDBATasks.html#Appendix.PostgreSQL.CommonDBATasks.Parameters) + +> **Hands-on:** For an example of the `awsDbParameterGroup` in use, follow the [Manage AWS RDS Instances](https://learn.hashicorp.com/tutorials/terraform/aws-rds?in=terraform/aws&utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS) tutorial on HashiCorp Learn. + +~> **NOTE:** After applying your changes, you may encounter a perpetual diff in your Terraform plan +output for a `parameter` whose `value` remains unchanged but whose `applyMethod` is changing +(e.g., from `immediate` to `pendingReboot`, or `pendingReboot` to `immediate`). If only the +apply method of a parameter is changing, the AWS API will not register this change. To change +the `applyMethod` of a parameter, its value must also change. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbParameterGroup } from "./.gen/providers/aws/db-parameter-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbParameterGroup(this, "default", { + family: "mysql5.6", + name: "rds-pg", + parameter: [ + { + name: "character_set_server", + value: "utf8", + }, + { + name: "character_set_client", + value: "utf8", + }, + ], + }); + } +} + +``` + +### `createBeforeDestroy` Lifecycle Configuration + +The [`createBeforeDestroy`](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#create_before_destroy) +lifecycle configuration is necessary for modifications that force re-creation of an existing, +in-use parameter group. This includes common situations like changing the group `name` or +bumping the `family` version during a major version upgrade. This configuration will prevent destruction +of the deposed parameter group while still in use by the database during upgrade. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstance } from "./.gen/providers/aws/db-instance"; +import { DbParameterGroup } from "./.gen/providers/aws/db-parameter-group"; +interface MyConfig { + instanceClass: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new DbParameterGroup(this, "example", { + family: "postgres13", + lifecycle: { + createBeforeDestroy: true, + }, + name: "my-pg", + parameter: [ + { + name: "log_connections", + value: "1", + }, + ], + }); + const awsDbInstanceExample = new DbInstance(this, "example_1", { + applyImmediately: true, + parameterGroupName: example.name, + instanceClass: config.instanceClass, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDbInstanceExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the DB parameter group. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `family` - (Required, Forces new resource) The family of the DB parameter group. +* `description` - (Optional, Forces new resource) The description of the DB parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of DB parameters to apply. Note that parameters may differ from a family to an other. Full list of all parameters can be discovered via [`aws rds describe-db-parameters`](https://docs.aws.amazon.com/cli/latest/reference/rds/describe-db-parameters.html) after initial creation of the group. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +* `name` - (Required) The name of the DB parameter. +* `value` - (Required) The value of the DB parameter. +* `applyMethod` - (Optional) "immediate" (default), or "pending-reboot". Some + engines can't apply some parameters without a reboot, and you will need to + specify "pending-reboot" here. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The db parameter group name. +* `arn` - The ARN of the db parameter group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB Parameter groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DB Parameter groups using the `name`. For example: + +```console +% terraform import aws_db_parameter_group.rds_pg rds-pg +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_proxy.html.markdown b/website/docs/cdktf/typescript/r/db_proxy.html.markdown new file mode 100644 index 00000000000..3550c1a5895 --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_proxy.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_proxy" +description: |- + Provides an RDS DB proxy resource. +--- + + + +# Resource: aws_db_proxy + +Provides an RDS DB proxy resource. For additional information, see the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-proxy.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbProxy } from "./.gen/providers/aws/db-proxy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbProxy(this, "example", { + auth: [ + { + authScheme: "SECRETS", + description: "example", + iamAuth: "DISABLED", + secretArn: Token.asString(awsSecretsmanagerSecretExample.arn), + }, + ], + debugLogging: false, + engineFamily: "MYSQL", + idleClientTimeout: 1800, + name: "example", + requireTls: true, + roleArn: Token.asString(awsIamRoleExample.arn), + tags: { + Key: "value", + Name: "example", + }, + vpcSecurityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + vpcSubnetIds: [Token.asString(awsSubnetExample.id)], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The identifier for the proxy. This name must be unique for all proxies owned by your AWS account in the specified AWS Region. An identifier must begin with a letter and must contain only ASCII letters, digits, and hyphens; it can't end with a hyphen or contain two consecutive hyphens. +* `auth` - (Required) Configuration block(s) with authorization mechanisms to connect to the associated instances or clusters. Described below. +* `debugLogging` - (Optional) Whether the proxy includes detailed information about SQL statements in its logs. This information helps you to debug issues involving SQL behavior or the performance and scalability of the proxy connections. The debug information includes the text of SQL statements that you submit through the proxy. Thus, only enable this setting when needed for debugging, and only when you have security measures in place to safeguard any sensitive information that appears in the logs. +* `engineFamily` - (Required, Forces new resource) The kinds of databases that the proxy can connect to. This value determines which database network protocol the proxy recognizes when it interprets network traffic to and from the database. For Aurora MySQL, RDS for MariaDB, and RDS for MySQL databases, specify `mysql`. For Aurora PostgreSQL and RDS for PostgreSQL databases, specify `postgresql`. For RDS for Microsoft SQL Server, specify `sqlserver`. Valid values are `mysql`, `postgresql`, and `sqlserver`. +* `idleClientTimeout` - (Optional) The number of seconds that a connection to the proxy can be inactive before the proxy disconnects it. You can set this value higher or lower than the connection timeout limit for the associated database. +* `requireTls` - (Optional) A Boolean parameter that specifies whether Transport Layer Security (TLS) encryption is required for connections to the proxy. By enabling this setting, you can enforce encrypted TLS connections to the proxy. +* `roleArn` - (Required) The Amazon Resource Name (ARN) of the IAM role that the proxy uses to access secrets in AWS Secrets Manager. +* `vpcSecurityGroupIds` - (Optional) One or more VPC security group IDs to associate with the new proxy. +* `vpcSubnetIds` - (Required) One or more VPC subnet IDs to associate with the new proxy. +* `tags` - (Optional) A mapping of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +`auth` blocks support the following: + +* `authScheme` - (Optional) The type of authentication that the proxy uses for connections from the proxy to the underlying database. One of `secrets`. +* `clientPasswordAuthType` - (Optional) The type of authentication the proxy uses for connections from clients. Valid values are `mysqlNativePassword`, `postgresScramSha256`, `postgresMd5`, and `sqlServerAuthentication`. +* `description` - (Optional) A user-specified description about the authentication used by a proxy to log in as a specific database user. +* `iamAuth` - (Optional) Whether to require or disallow AWS Identity and Access Management (IAM) authentication for connections to the proxy. One of `disabled`, `required`. +* `secretArn` - (Optional) The Amazon Resource Name (ARN) representing the secret that the proxy uses to authenticate to the RDS DB instance or Aurora DB cluster. These secrets are stored within Amazon Secrets Manager. +* `username` - (Optional) The name of the database user to which the proxy connects. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) for the proxy. +* `arn` - The Amazon Resource Name (ARN) for the proxy. +* `endpoint` - The endpoint that you can use to connect to the proxy. You include the endpoint value in the connection string for a database client application. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `update` - (Default `30M`) +- `delete` - (Default `60M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB proxies using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DB proxies using the `name`. For example: + +```console +% terraform import aws_db_proxy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_proxy_default_target_group.html.markdown b/website/docs/cdktf/typescript/r/db_proxy_default_target_group.html.markdown new file mode 100644 index 00000000000..384f72d5c21 --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_proxy_default_target_group.html.markdown @@ -0,0 +1,127 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_proxy_default_target_group" +description: |- + Manage an RDS DB proxy default target group resource. +--- + + + +# Resource: aws_db_proxy_default_target_group + +Provides a resource to manage an RDS DB proxy default target group resource. + +The `awsDbProxyDefaultTargetGroup` behaves differently from normal resources, in that Terraform does not _create_ or _destroy_ this resource, since it implicitly exists as part of an RDS DB Proxy. On Terraform resource creation it is automatically imported and on resource destruction, Terraform performs no actions in RDS. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbProxy } from "./.gen/providers/aws/db-proxy"; +import { DbProxyDefaultTargetGroup } from "./.gen/providers/aws/db-proxy-default-target-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DbProxy(this, "example", { + auth: [ + { + authScheme: "SECRETS", + description: "example", + iamAuth: "DISABLED", + secretArn: Token.asString(awsSecretsmanagerSecretExample.arn), + }, + ], + debugLogging: false, + engineFamily: "MYSQL", + idleClientTimeout: 1800, + name: "example", + requireTls: true, + roleArn: Token.asString(awsIamRoleExample.arn), + tags: { + Key: "value", + Name: "example", + }, + vpcSecurityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + vpcSubnetIds: [Token.asString(awsSubnetExample.id)], + }); + const awsDbProxyDefaultTargetGroupExample = new DbProxyDefaultTargetGroup( + this, + "example_1", + { + connectionPoolConfig: { + connectionBorrowTimeout: 120, + initQuery: "SET x=1, y=2", + maxConnectionsPercent: 100, + maxIdleConnectionsPercent: 50, + sessionPinningFilters: ["EXCLUDE_VARIABLE_SETS"], + }, + dbProxyName: example.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDbProxyDefaultTargetGroupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dbProxyName` - (Required) Name of the RDS DB Proxy. +* `connectionPoolConfig` - (Optional) The settings that determine the size and behavior of the connection pool for the target group. + +`connectionPoolConfig` blocks support the following: + +* `connectionBorrowTimeout` - (Optional) The number of seconds for a proxy to wait for a connection to become available in the connection pool. Only applies when the proxy has opened its maximum number of connections and all connections are busy with client sessions. +* `initQuery` - (Optional) One or more SQL statements for the proxy to run when opening each new database connection. Typically used with `set` statements to make sure that each connection has identical settings such as time zone and character set. This setting is empty by default. For multiple statements, use semicolons as the separator. You can also include multiple variables in a single `set` statement, such as `SET x=1, y=2`. +* `maxConnectionsPercent` - (Optional) The maximum size of the connection pool for each target in a target group. For Aurora MySQL, it is expressed as a percentage of the max_connections setting for the RDS DB instance or Aurora DB cluster used by the target group. +* `maxIdleConnectionsPercent` - (Optional) Controls how actively the proxy closes idle database connections in the connection pool. A high value enables the proxy to leave a high percentage of idle connections open. A low value causes the proxy to close idle client connections and return the underlying database connections to the connection pool. For Aurora MySQL, it is expressed as a percentage of the max_connections setting for the RDS DB instance or Aurora DB cluster used by the target group. +* `sessionPinningFilters` - (Optional) Each item in the list represents a class of SQL operations that normally cause all later statements in a session using a proxy to be pinned to the same underlying database connection. Including an item in the list exempts that class of SQL operations from the pinning behavior. Currently, the only allowed value is `excludeVariableSets`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the RDS DB Proxy. +* `arn` - The Amazon Resource Name (ARN) representing the target group. +* `name` - The name of the default target group. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `update` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB proxy default target groups using the `dbProxyName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DB proxy default target groups using the `dbProxyName`. For example: + +```console +% terraform import aws_db_proxy_default_target_group.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_proxy_endpoint.html.markdown b/website/docs/cdktf/typescript/r/db_proxy_endpoint.html.markdown new file mode 100644 index 00000000000..76ae374b8c0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_proxy_endpoint.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_proxy_endpoint" +description: |- + Provides an RDS DB proxy endpoint resource. +--- + + + +# Resource: aws_db_proxy_endpoint + +Provides an RDS DB proxy endpoint resource. For additional information, see the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-proxy-endpoints.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbProxyEndpoint } from "./.gen/providers/aws/db-proxy-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbProxyEndpoint(this, "example", { + dbProxyEndpointName: "example", + dbProxyName: test.name, + targetRole: "READ_ONLY", + vpcSubnetIds: Token.asList(propertyAccess(awsSubnetTest, ["*", "id"])), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dbProxyEndpointName` - (Required) The identifier for the proxy endpoint. An identifier must begin with a letter and must contain only ASCII letters, digits, and hyphens; it can't end with a hyphen or contain two consecutive hyphens. +* `dbProxyName` - (Required) The name of the DB proxy associated with the DB proxy endpoint that you create. +* `vpcSubnetIds` - (Required) One or more VPC subnet IDs to associate with the new proxy. +* `vpcSecurityGroupIds` - (Optional) One or more VPC security group IDs to associate with the new proxy. +* `targetRole` - (Optional) Indicates whether the DB proxy endpoint can be used for read/write or read-only operations. The default is `readWrite`. Valid values are `readWrite` and `readOnly`. +* `tags` - (Optional) A mapping of tags to assign to the resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the proxy and proxy endpoint separated by `/`, `dbProxyName/dbProxyEndpointName`. +* `arn` - The Amazon Resource Name (ARN) for the proxy endpoint. +* `endpoint` - The endpoint that you can use to connect to the proxy. You include the endpoint value in the connection string for a database client application. +* `isDefault` - Indicates whether this endpoint is the default endpoint for the associated DB proxy. +* `vpcId` - The VPC ID of the DB proxy endpoint. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `update` - (Default `30M`) +- `delete` - (Default `60M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB proxy endpoints using the `dbProxyName/dbProxyEndpointName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DB proxy endpoints using the `dbProxyName/dbProxyEndpointName`. For example: + +```console +% terraform import aws_db_proxy_endpoint.example example/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_proxy_target.html.markdown b/website/docs/cdktf/typescript/r/db_proxy_target.html.markdown new file mode 100644 index 00000000000..9a0c3cfe83a --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_proxy_target.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_proxy_target" +description: |- + Provides an RDS DB proxy target resource. +--- + + + +# Resource: aws_db_proxy_target + +Provides an RDS DB proxy target resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbProxy } from "./.gen/providers/aws/db-proxy"; +import { DbProxyDefaultTargetGroup } from "./.gen/providers/aws/db-proxy-default-target-group"; +import { DbProxyTarget } from "./.gen/providers/aws/db-proxy-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DbProxy(this, "example", { + auth: [ + { + authScheme: "SECRETS", + description: "example", + iamAuth: "DISABLED", + secretArn: Token.asString(awsSecretsmanagerSecretExample.arn), + }, + ], + debugLogging: false, + engineFamily: "MYSQL", + idleClientTimeout: 1800, + name: "example", + requireTls: true, + roleArn: Token.asString(awsIamRoleExample.arn), + tags: { + Key: "value", + Name: "example", + }, + vpcSecurityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + vpcSubnetIds: [Token.asString(awsSubnetExample.id)], + }); + const awsDbProxyDefaultTargetGroupExample = new DbProxyDefaultTargetGroup( + this, + "example_1", + { + connectionPoolConfig: { + connectionBorrowTimeout: 120, + initQuery: "SET x=1, y=2", + maxConnectionsPercent: 100, + maxIdleConnectionsPercent: 50, + sessionPinningFilters: ["EXCLUDE_VARIABLE_SETS"], + }, + dbProxyName: example.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDbProxyDefaultTargetGroupExample.overrideLogicalId("example"); + const awsDbProxyTargetExample = new DbProxyTarget(this, "example_2", { + dbInstanceIdentifier: Token.asString(awsDbInstanceExample.identifier), + dbProxyName: example.name, + targetGroupName: Token.asString(awsDbProxyDefaultTargetGroupExample.name), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDbProxyTargetExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dbProxyName` - (Required, Forces new resource) The name of the DB proxy. +* `targetGroupName` - (Required, Forces new resource) The name of the target group. +* `dbInstanceIdentifier` - (Optional, Forces new resource) DB instance identifier. +* `dbClusterIdentifier` - (Optional, Forces new resource) DB cluster identifier. + +**NOTE:** Either `dbInstanceIdentifier` or `dbClusterIdentifier` should be specified and both should not be specified together + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `endpoint` - Hostname for the target RDS DB Instance. Only returned for `rdsInstance` type. +* `id` - Identifier of `dbProxyName`, `targetGroupName`, target type (e.g., `rdsInstance` or `trackedCluster`), and resource identifier separated by forward slashes (`/`). +* `port` - Port for the target RDS DB Instance or Aurora DB Cluster. +* `rdsResourceId` - Identifier representing the DB Instance or DB Cluster target. +* `targetArn` - Amazon Resource Name (ARN) for the DB instance or DB cluster. Currently not returned by the RDS API. +* `trackedClusterId` - DB Cluster identifier for the DB Instance target. Not returned unless manually importing an `rdsInstance` target that is part of a DB Cluster. +* `type` - Type of targetE.g., `rdsInstance` or `trackedCluster` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS DB Proxy Targets using the `dbProxyName`, `targetGroupName`, target type (such as `rdsInstance` or `trackedCluster`), and resource identifier separated by forward slashes (`/`). For example: + +Instances: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Provisioned Clusters: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** RDS DB Proxy Targets using the `dbProxyName`, `targetGroupName`, target type (such as `rdsInstance` or `trackedCluster`), and resource identifier separated by forward slashes (`/`). For example: + +Instances: + +```console +% terraform import aws_db_proxy_target.example example-proxy/default/RDS_INSTANCE/example-instance +``` + +Provisioned Clusters: + +```console +% terraform import aws_db_proxy_target.example example-proxy/default/TRACKED_CLUSTER/example-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_snapshot.html.markdown b/website/docs/cdktf/typescript/r/db_snapshot.html.markdown new file mode 100644 index 00000000000..aac5ed746fb --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_snapshot.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_snapshot" +description: |- + Manages an RDS database instance snapshot. +--- + + + +# Resource: aws_db_snapshot + +Manages an RDS database instance snapshot. For managing RDS database cluster snapshots, see the [`awsDbClusterSnapshot` resource](/docs/providers/aws/r/db_cluster_snapshot.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstance } from "./.gen/providers/aws/db-instance"; +import { DbSnapshot } from "./.gen/providers/aws/db-snapshot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bar = new DbInstance(this, "bar", { + allocatedStorage: 10, + backupRetentionPeriod: 0, + dbName: "baz", + engine: "mysql", + engineVersion: "5.6.21", + instanceClass: "db.t2.micro", + maintenanceWindow: "Fri:09:00-Fri:09:30", + parameterGroupName: "default.mysql5.6", + password: "barbarbarbar", + username: "foo", + }); + new DbSnapshot(this, "test", { + dbInstanceIdentifier: bar.identifier, + dbSnapshotIdentifier: "testsnapshot1234", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dbInstanceIdentifier` - (Required) The DB Instance Identifier from which to take the snapshot. +* `dbSnapshotIdentifier` - (Required) The Identifier for the snapshot. +* `sharedAccounts` - (Optional) List of AWS Account ids to share snapshot with, use `all` to make snaphot public. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `allocatedStorage` - Specifies the allocated storage size in gigabytes (GB). +* `availabilityZone` - Specifies the name of the Availability Zone the DB instance was located in at the time of the DB snapshot. +* `dbSnapshotArn` - The Amazon Resource Name (ARN) for the DB snapshot. +* `encrypted` - Specifies whether the DB snapshot is encrypted. +* `engine` - Specifies the name of the database engine. +* `engineVersion` - Specifies the version of the database engine. +* `iops` - Specifies the Provisioned IOPS (I/O operations per second) value of the DB instance at the time of the snapshot. +* `kmsKeyId` - The ARN for the KMS encryption key. +* `licenseModel` - License model information for the restored DB instance. +* `optionGroupName` - Provides the option group name for the DB snapshot. +* `sourceDbSnapshotIdentifier` - The DB snapshot Arn that the DB snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `sourceRegion` - The region that the DB snapshot was created in or copied from. +* `status` - Specifies the status of this DB snapshot. +* `storageType` - Specifies the storage type associated with DB snapshot. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - Provides the VPC ID associated with the DB snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDbSnapshot` using the snapshot identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDbSnapshot` using the snapshot identifier. For example: + +```console +% terraform import aws_db_snapshot.example my-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_snapshot_copy.html.markdown b/website/docs/cdktf/typescript/r/db_snapshot_copy.html.markdown new file mode 100644 index 00000000000..b3c4e22c4e8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_snapshot_copy.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_snapshot_copy" +description: |- + Manages an RDS database instance snapshot copy. +--- + + + +# Resource: aws_db_snapshot_copy + +Manages an RDS database instance snapshot copy. For managing RDS database cluster snapshots, see the [`awsDbClusterSnapshot` resource](/docs/providers/aws/r/db_cluster_snapshot.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstance } from "./.gen/providers/aws/db-instance"; +import { DbSnapshot } from "./.gen/providers/aws/db-snapshot"; +import { DbSnapshotCopy } from "./.gen/providers/aws/db-snapshot-copy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DbInstance(this, "example", { + allocatedStorage: 10, + backupRetentionPeriod: 0, + dbName: "baz", + engine: "mysql", + engineVersion: "5.6.21", + instanceClass: "db.t2.micro", + maintenanceWindow: "Fri:09:00-Fri:09:30", + parameterGroupName: "default.mysql5.6", + password: "barbarbarbar", + username: "foo", + }); + const awsDbSnapshotExample = new DbSnapshot(this, "example_1", { + dbInstanceIdentifier: example.identifier, + dbSnapshotIdentifier: "testsnapshot1234", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDbSnapshotExample.overrideLogicalId("example"); + const awsDbSnapshotCopyExample = new DbSnapshotCopy(this, "example_2", { + sourceDbSnapshotIdentifier: Token.asString( + awsDbSnapshotExample.dbSnapshotArn + ), + targetDbSnapshotIdentifier: "testsnapshot1234-copy", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDbSnapshotCopyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `copyTags` - (Optional) Whether to copy existing tags. Defaults to `false`. +* `destinationRegion` - (Optional) The Destination region to place snapshot copy. +* `kmsKeyId` - (Optional) KMS key ID. +* `optionGroupName`- (Optional) The name of an option group to associate with the copy of the snapshot. +* `presignedUrl` - (Optional) he URL that contains a Signature Version 4 signed request. +* `sourceDbSnapshotIdentifier` - (Required) Snapshot identifier of the source snapshot. +* `targetCustomAvailabilityZone` - (Optional) The external custom Availability Zone. +* `targetDbSnapshotIdentifier` - (Required) The Identifier for the snapshot. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Snapshot Identifier. +* `allocatedStorage` - Specifies the allocated storage size in gigabytes (GB). +* `availabilityZone` - Specifies the name of the Availability Zone the DB instance was located in at the time of the DB snapshot. +* `dbSnapshotArn` - The Amazon Resource Name (ARN) for the DB snapshot. +* `encrypted` - Specifies whether the DB snapshot is encrypted. +* `engine` - Specifies the name of the database engine. +* `engineVersion` - Specifies the version of the database engine. +* `iops` - Specifies the Provisioned IOPS (I/O operations per second) value of the DB instance at the time of the snapshot. +* `kmsKeyId` - The ARN for the KMS encryption key. +* `licenseModel` - License model information for the restored DB instance. +* `optionGroupName` - Provides the option group name for the DB snapshot. +* `sourceDbSnapshotIdentifier` - The DB snapshot Arn that the DB snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `sourceRegion` - The region that the DB snapshot was created in or copied from. +* `storageType` - Specifies the storage type associated with DB snapshot. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - Provides the VPC ID associated with the DB snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDbSnapshotCopy` using the snapshot identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDbSnapshotCopy` using the snapshot identifier. For example: + +```console +% terraform import aws_db_snapshot_copy.example my-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/db_subnet_group.html.markdown b/website/docs/cdktf/typescript/r/db_subnet_group.html.markdown new file mode 100644 index 00000000000..7d4d29d40cc --- /dev/null +++ b/website/docs/cdktf/typescript/r/db_subnet_group.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_db_subnet_group" +description: |- + Provides an RDS DB subnet group resource. +--- + + + +# Resource: aws_db_subnet_group + +Provides an RDS DB subnet group resource. + +> **Hands-on:** For an example of the `awsDbSubnetGroup` in use, follow the [Manage AWS RDS Instances](https://learn.hashicorp.com/tutorials/terraform/aws-rds?in=terraform/aws&utm_source=WEBSITE&utm_medium=WEB_IO&utm_offer=ARTICLE_PAGE&utm_content=DOCS) tutorial on HashiCorp Learn. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbSubnetGroup } from "./.gen/providers/aws/db-subnet-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbSubnetGroup(this, "default", { + name: "main", + subnetIds: [frontend.id, backend.id], + tags: { + Name: "My DB subnet group", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the DB subnet group. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) The description of the DB subnet group. Defaults to "Managed by Terraform". +* `subnetIds` - (Required) A list of VPC subnet IDs. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The db subnet group name. +* `arn` - The ARN of the db subnet group. +* `supportedNetworkTypes` - The network type of the db subnet group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - Provides the VPC ID of the DB subnet group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DB Subnet groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DB Subnet groups using the `name`. For example: + +```console +% terraform import aws_db_subnet_group.default production-subnet-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/default_network_acl.html.markdown b/website/docs/cdktf/typescript/r/default_network_acl.html.markdown new file mode 100644 index 00000000000..283ef377351 --- /dev/null +++ b/website/docs/cdktf/typescript/r/default_network_acl.html.markdown @@ -0,0 +1,247 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_network_acl" +description: |- + Manage a default network ACL. +--- + + + +# Resource: aws_default_network_acl + +Provides a resource to manage a VPC's default network ACL. This resource can manage the default network ACL of the default or a non-default VPC. + +~> **NOTE:** This is an advanced resource with special caveats. Please read this document in its entirety before using this resource. The `awsDefaultNetworkAcl` behaves differently from normal resources. Terraform does not _create_ this resource but instead attempts to "adopt" it into management. + +Every VPC has a default network ACL that can be managed but not destroyed. When Terraform first adopts the Default Network ACL, it **immediately removes all rules in the ACL**. It then proceeds to create any rules specified in the configuration. This step is required so that only the rules specified in the configuration are created. + +This resource treats its inline rules as absolute; only the rules defined inline are created, and any additions/removals external to this resource will result in diffs being shown. For these reasons, this resource is incompatible with the `awsNetworkAclRule` resource. + +For more information about Network ACLs, see the AWS Documentation on [Network ACLs][aws-network-acls]. + +## Example Usage + +### Basic Example + +The following config gives the Default Network ACL the same rules that AWS includes but pulls the resource under management by Terraform. This means that any ACL rules added or changed will be detected as drift. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Op, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DefaultNetworkAcl } from "./.gen/providers/aws/default-network-acl"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const mainvpc = new Vpc(this, "mainvpc", { + cidrBlock: "10.1.0.0/16", + }); + new DefaultNetworkAcl(this, "default", { + defaultNetworkAclId: mainvpc.defaultNetworkAclId, + egress: [ + { + action: "allow", + cidrBlock: "0.0.0.0/0", + fromPort: 0, + protocol: Token.asString(Op.negate(1)), + ruleNo: 100, + toPort: 0, + }, + ], + ingress: [ + { + action: "allow", + cidrBlock: "0.0.0.0/0", + fromPort: 0, + protocol: Token.asString(Op.negate(1)), + ruleNo: 100, + toPort: 0, + }, + ], + }); + } +} + +``` + +### Example: Deny All Egress Traffic, Allow Ingress + +The following denies all Egress traffic by omitting any `egress` rules, while including the default `ingress` rule to allow all traffic. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Op, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DefaultNetworkAcl } from "./.gen/providers/aws/default-network-acl"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const mainvpc = new Vpc(this, "mainvpc", { + cidrBlock: "10.1.0.0/16", + }); + new DefaultNetworkAcl(this, "default", { + defaultNetworkAclId: mainvpc.defaultNetworkAclId, + ingress: [ + { + action: "allow", + cidrBlock: Token.asString(awsDefaultVpcMainvpc.cidrBlock), + fromPort: 0, + protocol: Token.asString(Op.negate(1)), + ruleNo: 100, + toPort: 0, + }, + ], + }); + } +} + +``` + +### Example: Deny All Traffic To Any Subnet In The Default Network ACL + +This config denies all traffic in the Default ACL. This can be useful if you want to lock down the VPC to force all resources to assign a non-default ACL. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DefaultNetworkAcl } from "./.gen/providers/aws/default-network-acl"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const mainvpc = new Vpc(this, "mainvpc", { + cidrBlock: "10.1.0.0/16", + }); + new DefaultNetworkAcl(this, "default", { + defaultNetworkAclId: mainvpc.defaultNetworkAclId, + }); + } +} + +``` + +### Managing Subnets In A Default Network ACL + +Within a VPC, all Subnets must be associated with a Network ACL. In order to "delete" the association between a Subnet and a non-default Network ACL, the association is destroyed by replacing it with an association between the Subnet and the Default ACL instead. + +When managing the Default Network ACL, you cannot "remove" Subnets. Instead, they must be reassigned to another Network ACL, or the Subnet itself must be destroyed. Because of these requirements, removing the `subnetIds` attribute from the configuration of a `awsDefaultNetworkAcl` resource may result in a reoccurring plan, until the Subnets are reassigned to another Network ACL or are destroyed. + +Because Subnets are by default associated with the Default Network ACL, any non-explicit association will show up as a plan to remove the Subnet. For example: if you have a custom `awsNetworkAcl` with two subnets attached, and you remove the `awsNetworkAcl` resource, after successfully destroying this resource future plans will show a diff on the managed `awsDefaultNetworkAcl`, as those two Subnets have been orphaned by the now destroyed network acl and thus adopted by the Default Network ACL. In order to avoid a reoccurring plan, they will need to be reassigned, destroyed, or added to the `subnetIds` attribute of the `awsDefaultNetworkAcl` entry. + +As an alternative to the above, you can also specify the following lifecycle configuration in your `awsDefaultNetworkAcl` resource: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DefaultNetworkAcl } from "./.gen/providers/aws/default-network-acl"; +interface MyConfig { + defaultNetworkAclId: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new DefaultNetworkAcl(this, "default", { + lifecycle: { + ignoreChanges: [subnetIds], + }, + defaultNetworkAclId: config.defaultNetworkAclId, + }); + } +} + +``` + +### Removing `awsDefaultNetworkAcl` From Your Configuration + +Each AWS VPC comes with a Default Network ACL that cannot be deleted. The `awsDefaultNetworkAcl` allows you to manage this Network ACL, but Terraform cannot destroy it. Removing this resource from your configuration will remove it from your statefile and management, **but will not destroy the Network ACL.** All Subnets associations and ingress or egress rules will be left as they are at the time of removal. You can resume managing them via the AWS Console. + +## Argument Reference + +The following arguments are required: + +* `defaultNetworkAclId` - (Required) Network ACL ID to manage. This attribute is exported from `awsVpc`, or manually found via the AWS Console. + +The following arguments are optional: + +* `egress` - (Optional) Configuration block for an egress rule. Detailed below. +* `ingress` - (Optional) Configuration block for an ingress rule. Detailed below. +* `subnetIds` - (Optional) List of Subnet IDs to apply the ACL to. See the notes above on Managing Subnets in the Default Network ACL +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### egress and ingress + +Both the `egress` and `ingress` configuration blocks have the same arguments. + +The following arguments are required: + +* `action` - (Required) The action to take. +* `fromPort` - (Required) The from port to match. +* `protocol` - (Required) The protocol to match. If using the -1 'all' protocol, you must specify a from and to port of 0. +* `ruleNo` - (Required) The rule number. Used for ordering. +* `toPort` - (Required) The to port to match. + +The following arguments are optional: + +* `cidrBlock` - (Optional) The CIDR block to match. This must be a valid network mask. +* `icmpCode` - (Optional) The ICMP type code to be used. Default 0. +* `icmpType` - (Optional) The ICMP type to be used. Default 0. +* `ipv6CidrBlock` - (Optional) The IPv6 CIDR block. + +-> For more information on ICMP types and codes, see [Internet Control Message Protocol (ICMP) Parameters](https://www.iana.org/assignments/icmp-parameters/icmp-parameters.xhtml). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Default Network ACL +* `id` - ID of the Default Network ACL +* `ownerId` - ID of the AWS account that owns the Default Network ACL +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - ID of the associated VPC + +[aws-network-acls]: http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_ACLs.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Default Network ACLs using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Default Network ACLs using the `id`. For example: + +```console +% terraform import aws_default_network_acl.sample acl-7aaabd18 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/default_route_table.html.markdown b/website/docs/cdktf/typescript/r/default_route_table.html.markdown new file mode 100644 index 00000000000..890a217b7e7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/default_route_table.html.markdown @@ -0,0 +1,163 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_route_table" +description: |- + Provides a resource to manage a default route table of a VPC. +--- + + + +# Resource: aws_default_route_table + +Provides a resource to manage a default route table of a VPC. This resource can manage the default route table of the default or a non-default VPC. + +~> **NOTE:** This is an advanced resource with special caveats. Please read this document in its entirety before using this resource. The `awsDefaultRouteTable` resource behaves differently from normal resources. Terraform does not _create_ this resource but instead attempts to "adopt" it into management. **Do not** use both `awsDefaultRouteTable` to manage a default route table **and** `awsMainRouteTableAssociation` with the same VPC due to possible route conflicts. See [aws_main_route_table_association][tf-main-route-table-association] documentation for more details. + +Every VPC has a default route table that can be managed but not destroyed. When Terraform first adopts a default route table, it **immediately removes all defined routes**. It then proceeds to create any routes specified in the configuration. This step is required so that only the routes specified in the configuration exist in the default route table. + +For more information, see the Amazon VPC User Guide on [Route Tables](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Route_Tables.html). For information about managing normal route tables in Terraform, see [`awsRouteTable`](/docs/providers/aws/r/route_table.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DefaultRouteTable } from "./.gen/providers/aws/default-route-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DefaultRouteTable(this, "example", { + defaultRouteTableId: Token.asString(awsVpcExample.defaultRouteTableId), + route: [ + { + cidrBlock: "10.0.1.0/24", + gatewayId: Token.asString(awsInternetGatewayExample.id), + }, + { + egressOnlyGatewayId: Token.asString( + awsEgressOnlyInternetGatewayExample.id + ), + ipv6CidrBlock: "::/0", + }, + ], + tags: { + Name: "example", + }, + }); + } +} + +``` + +To subsequently remove all managed routes: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DefaultRouteTable } from "./.gen/providers/aws/default-route-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DefaultRouteTable(this, "example", { + defaultRouteTableId: Token.asString(awsVpcExample.defaultRouteTableId), + route: [], + tags: { + Name: "example", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `defaultRouteTableId` - (Required) ID of the default route table. + +The following arguments are optional: + +* `propagatingVgws` - (Optional) List of virtual gateways for propagation. +* `route` - (Optional) Configuration block of routes. Detailed below. This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). This means that omitting this argument is interpreted as ignoring any existing routes. To remove all managed routes an empty list should be specified. See the example above. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### route + +This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +One of the following destination arguments must be supplied: + +* `cidrBlock` - (Required) The CIDR block of the route. +* `ipv6CidrBlock` - (Optional) The Ipv6 CIDR block of the route +* `destinationPrefixListId` - (Optional) The ID of a [managed prefix list](ec2_managed_prefix_list.html) destination of the route. + +One of the following target arguments must be supplied: + +* `coreNetworkArn` - (Optional) The Amazon Resource Name (ARN) of a core network. +* `egressOnlyGatewayId` - (Optional) Identifier of a VPC Egress Only Internet Gateway. +* `gatewayId` - (Optional) Identifier of a VPC internet gateway or a virtual private gateway. +* `instanceId` - (Optional) Identifier of an EC2 instance. +* `natGatewayId` - (Optional) Identifier of a VPC NAT gateway. +* `networkInterfaceId` - (Optional) Identifier of an EC2 network interface. +* `transitGatewayId` - (Optional) Identifier of an EC2 Transit Gateway. +* `vpcEndpointId` - (Optional) Identifier of a VPC Endpoint. This route must be removed prior to VPC Endpoint deletion. +* `vpcPeeringConnectionId` - (Optional) Identifier of a VPC peering connection. + +Note that the default route, mapping the VPC's CIDR block to "local", is created implicitly and cannot be specified. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the route table. +* `arn` - The ARN of the route table. +* `ownerId` - ID of the AWS account that owns the route table. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - ID of the VPC. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `2M`) +- `update` - (Default `2M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Default VPC route tables using the `vpcId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Default VPC route tables using the `vpcId`. For example: + +```console +% terraform import aws_default_route_table.example vpc-33cc44dd +``` + +[aws-route-tables]: http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_Route_Tables.html#Route_Replacing_Main_Table +[tf-route-tables]: /docs/providers/aws/r/route_table.html +[tf-main-route-table-association]: /docs/providers/aws/r/main_route_table_association.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/default_security_group.html.markdown b/website/docs/cdktf/typescript/r/default_security_group.html.markdown new file mode 100644 index 00000000000..6994dd9b9c9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/default_security_group.html.markdown @@ -0,0 +1,167 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_security_group" +description: |- + Manage a default security group resource. +--- + + + +# Resource: aws_default_security_group + +Provides a resource to manage a default security group. This resource can manage the default security group of the default or a non-default VPC. + +~> **NOTE:** This is an advanced resource with special caveats. Please read this document in its entirety before using this resource. The `awsDefaultSecurityGroup` resource behaves differently from normal resources. Terraform does not _create_ this resource but instead attempts to "adopt" it into management. + +When Terraform first begins managing the default security group, it **immediately removes all ingress and egress rules in the Security Group**. It then creates any rules specified in the configuration. This way only the rules specified in the configuration are created. + +This resource treats its inline rules as absolute; only the rules defined inline are created, and any additions/removals external to this resource will result in diff shown. For these reasons, this resource is incompatible with the `awsSecurityGroupRule` resource. + +For more information about default security groups, see the AWS documentation on [Default Security Groups][aws-default-security-groups]. To manage normal security groups, see the [`awsSecurityGroup`](/docs/providers/aws/r/security_group.html) resource. + +## Example Usage + +The following config gives the default security group the same rules that AWS provides by default but under management by Terraform. This means that any ingress or egress rules added or changed will be detected as drift. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Op, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DefaultSecurityGroup } from "./.gen/providers/aws/default-security-group"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const mainvpc = new Vpc(this, "mainvpc", { + cidrBlock: "10.1.0.0/16", + }); + new DefaultSecurityGroup(this, "default", { + egress: [ + { + cidrBlocks: ["0.0.0.0/0"], + fromPort: 0, + protocol: "-1", + toPort: 0, + }, + ], + ingress: [ + { + fromPort: 0, + protocol: Token.asString(Op.negate(1)), + selfAttribute: true, + toPort: 0, + }, + ], + vpcId: mainvpc.id, + }); + } +} + +``` + +### Example Config To Deny All Egress Traffic, Allowing Ingress + +The following denies all Egress traffic by omitting any `egress` rules, while including the default `ingress` rule to allow all traffic. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Op, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DefaultSecurityGroup } from "./.gen/providers/aws/default-security-group"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const mainvpc = new Vpc(this, "mainvpc", { + cidrBlock: "10.1.0.0/16", + }); + new DefaultSecurityGroup(this, "default", { + ingress: [ + { + fromPort: 0, + protocol: Token.asString(Op.negate(1)), + selfAttribute: true, + toPort: 0, + }, + ], + vpcId: mainvpc.id, + }); + } +} + +``` + +### Removing `awsDefaultSecurityGroup` From Your Configuration + +Removing this resource from your configuration will remove it from your statefile and management, but will not destroy the Security Group. All ingress or egress rules will be left as they are at the time of removal. You can resume managing them via the AWS Console. + +## Argument Reference + +The following arguments are optional: + +* `egress` - (Optional, VPC only) Configuration block. Detailed below. +* `ingress` - (Optional) Configuration block. Detailed below. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcId` - (Optional, Forces new resource) VPC ID. **Note that changing the `vpcId` will _not_ restore any default security group rules that were modified, added, or removed.** It will be left in its current state. + +### egress and ingress + +Both arguments are processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +Both `egress` and `ingress` objects have the same arguments. + +* `cidrBlocks` - (Optional) List of CIDR blocks. +* `description` - (Optional) Description of this rule. +* `fromPort` - (Required) Start port (or ICMP type number if protocol is `icmp`) +* `ipv6CidrBlocks` - (Optional) List of IPv6 CIDR blocks. +* `prefixListIds` - (Optional) List of prefix list IDs (for allowing access to VPC endpoints) +* `protocol` - (Required) Protocol. If you select a protocol of "-1" (semantically equivalent to `all`, which is not a valid value here), you must specify a `fromPort` and `toPort` equal to `0`. If not `icmp`, `tcp`, `udp`, or `1` use the [protocol number](https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml). +* `securityGroups` - (Optional) List of security groups. A group name can be used relative to the default VPC. Otherwise, group ID. +* `self` - (Optional) Whether the security group itself will be added as a source to this egress rule. +* `toPort` - (Required) End range port (or ICMP code if protocol is `icmp`). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the security group. +* `description` - Description of the security group. +* `id` - ID of the security group. +* `name` - Name of the security group. +* `ownerId` - Owner ID. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[aws-default-security-groups]: http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-network-security.html#default-security-group + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Groups using the security group `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Security Groups using the security group `id`. For example: + +```console +% terraform import aws_default_security_group.default_sg sg-903004f8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/default_subnet.html.markdown b/website/docs/cdktf/typescript/r/default_subnet.html.markdown new file mode 100644 index 00000000000..191dec246bf --- /dev/null +++ b/website/docs/cdktf/typescript/r/default_subnet.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_subnet" +description: |- + Manage a default subnet resource. +--- + + + +# Resource: aws_default_subnet + +Provides a resource to manage a [default subnet](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/default-vpc.html#default-vpc-basics) in the current region. + +**This is an advanced resource** and has special caveats to be aware of when using it. Please read this document in its entirety before using this resource. + +The `awsDefaultSubnet` resource behaves differently from normal resources in that if a default subnet exists in the specified Availability Zone, Terraform does not _create_ this resource, but instead "adopts" it into management. +If no default subnet exists, Terraform creates a new default subnet. +By default, `terraform destroy` does not delete the default subnet but does remove the resource from Terraform state. +Set the `forceDestroy` argument to `true` to delete the default subnet. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DefaultSubnet } from "./.gen/providers/aws/default-subnet"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DefaultSubnet(this, "default_az1", { + availabilityZone: "us-west-2a", + tags: { + Name: "Default subnet for us-west-2a", + }, + }); + } +} + +``` + +## Argument Reference + +The arguments of an `awsDefaultSubnet` differ slightly from those of [`awsSubnet`](subnet.html): + +* `availabilityZone` is required +* The `availabilityZoneId`, `cidrBlock` and `vpcId` arguments become computed attributes +* The default value for `mapPublicIpOnLaunch` is `true` + +This resource supports the following additional arguments: + +* `forceDestroy` - (Optional) Whether destroying the resource deletes the default subnet. Default: `false` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `availabilityZoneId` - The AZ ID of the subnet +* `cidrBlock` - The IPv4 CIDR block assigned to the subnet +* `vpcId` - The ID of the VPC the subnet is in + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import subnets using the subnet `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import subnets using the subnet `id`. For example: + +```console +% terraform import aws_default_subnet.public_subnet subnet-9d4a7b6c +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/default_vpc.html.markdown b/website/docs/cdktf/typescript/r/default_vpc.html.markdown new file mode 100644 index 00000000000..7ab21d7e63c --- /dev/null +++ b/website/docs/cdktf/typescript/r/default_vpc.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_vpc" +description: |- + Manage a default VPC resource. +--- + + + +# Resource: aws_default_vpc + +Provides a resource to manage the [default AWS VPC](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/default-vpc.html) +in the current AWS Region. + +If you created your AWS account after 2013-12-04 you have a default VPC in each AWS Region. + +**This is an advanced resource** and has special caveats to be aware of when using it. Please read this document in its entirety before using this resource. + +The `awsDefaultVpc` resource behaves differently from normal resources in that if a default VPC exists, Terraform does not _create_ this resource, but instead "adopts" it into management. +If no default VPC exists, Terraform creates a new default VPC, which leads to the implicit creation of [other resources](https://docs.aws.amazon.com/vpc/latest/userguide/default-vpc.html#default-vpc-components). +By default, `terraform destroy` does not delete the default VPC but does remove the resource from Terraform state. +Set the `forceDestroy` argument to `true` to delete the default VPC. + +## Example Usage + +Basic usage with tags: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DefaultVpc } from "./.gen/providers/aws/default-vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DefaultVpc(this, "default", { + tags: { + Name: "Default VPC", + }, + }); + } +} + +``` + +## Argument Reference + +The arguments of an `awsDefaultVpc` differ slightly from those of [`awsVpc`](vpc.html): + +* The `cidrBlock` and `instanceTenancy` arguments become computed attributes +* The default value for `enableDnsHostnames` is `true` + +This resource supports the following additional arguments: + +* `forceDestroy` - (Optional) Whether destroying the resource deletes the default VPC. Default: `false` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `cidrBlock` - The primary IPv4 CIDR block for the VPC +* `instanceTenancy` - The allowed tenancy of instances launched into the VPC + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Default VPCs using the VPC `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Default VPCs using the VPC `id`. For example: + +```console +% terraform import aws_default_vpc.default vpc-a01106c2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/default_vpc_dhcp_options.html.markdown b/website/docs/cdktf/typescript/r/default_vpc_dhcp_options.html.markdown new file mode 100644 index 00000000000..ffbd351727c --- /dev/null +++ b/website/docs/cdktf/typescript/r/default_vpc_dhcp_options.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_default_vpc_dhcp_options" +description: |- + Manage the default VPC DHCP Options resource. +--- + + + +# Resource: aws_default_vpc_dhcp_options + +Provides a resource to manage the [default AWS DHCP Options Set](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_DHCP_Options.html#AmazonDNS) +in the current region. + +Each AWS region comes with a default set of DHCP options. +**This is an advanced resource**, and has special caveats to be aware of when +using it. Please read this document in its entirety before using this resource. + +The `awsDefaultVpcDhcpOptions` behaves differently from normal resources, in that +Terraform does not _create_ this resource, but instead "adopts" it +into management. + +## Example Usage + +Basic usage with tags: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DefaultVpcDhcpOptions } from "./.gen/providers/aws/default-vpc-dhcp-options"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DefaultVpcDhcpOptions(this, "default", { + tags: { + Name: "Default DHCP Option Set", + }, + }); + } +} + +``` + +## Argument Reference + +The arguments of an `awsDefaultVpcDhcpOptions` differ slightly from `awsVpcDhcpOptions` resources. +Namely, the `domainName`, `domainNameServers` and `ntpServers` arguments are computed. +The following arguments are still supported: + +* `netbiosNameServers` - (Optional) List of NETBIOS name servers. +* `netbiosNodeType` - (Optional) The NetBIOS node type (1, 2, 4, or 8). AWS recommends to specify 2 since broadcast and multicast are not supported in their network. For more information about these node types, see [RFC 2132](http://www.ietf.org/rfc/rfc2132.txt). +* `ownerId` - The ID of the AWS account that owns the DHCP options set. +* `tags` - (Optional) A map of tags to assign to the resource. + +### Removing `awsDefaultVpcDhcpOptions` from your configuration + +The `awsDefaultVpcDhcpOptions` resource allows you to manage a region's default DHCP Options Set, +but Terraform cannot destroy it. Removing this resource from your configuration +will remove it from your statefile and management, but will not destroy the DHCP Options Set. +You can resume managing the DHCP Options Set via the AWS Console. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the DHCP Options Set. +* `arn` - The ARN of the DHCP Options Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC DHCP Options using the DHCP Options `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import VPC DHCP Options using the DHCP Options `id`. For example: + +```console +% terraform import aws_default_vpc_dhcp_options.default_options dopt-d9070ebb +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/detective_graph.html.markdown b/website/docs/cdktf/typescript/r/detective_graph.html.markdown new file mode 100644 index 00000000000..d0a1266e140 --- /dev/null +++ b/website/docs/cdktf/typescript/r/detective_graph.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Detective" +layout: "aws" +page_title: "AWS: aws_detective_graph" +description: |- + Provides a resource to manage an Amazon Detective graph. +--- + + + +# Resource: aws_detective_graph + +Provides a resource to manage an [AWS Detective Graph](https://docs.aws.amazon.com/detective/latest/APIReference/API_CreateGraph.html). As an AWS account may own only one Detective graph per region, provisioning multiple Detective graphs requires a separate provider configuration for each graph. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DetectiveGraph } from "./.gen/providers/aws/detective-graph"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DetectiveGraph(this, "example", { + tags: { + Name: "example-detective-graph", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the instance. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN of the Detective Graph. +* `graphArn` - ARN of the Detective Graph. +* `createdTime` - Date and time, in UTC and extended RFC 3339 format, when the Amazon Detective Graph was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDetectiveGraph` using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDetectiveGraph` using the ARN. For example: + +```console +% terraform import aws_detective_graph.example arn:aws:detective:us-east-1:123456789101:graph:231684d34gh74g4bae1dbc7bd807d02d +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/detective_invitation_accepter.html.markdown b/website/docs/cdktf/typescript/r/detective_invitation_accepter.html.markdown new file mode 100644 index 00000000000..94bf0f0fe94 --- /dev/null +++ b/website/docs/cdktf/typescript/r/detective_invitation_accepter.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Detective" +layout: "aws" +page_title: "AWS: aws_detective_invitation_accepter" +description: |- + Provides a resource to manage an Amazon Detective member invitation accepter. +--- + + + +# Resource: aws_detective_invitation_accepter + +Provides a resource to manage an [Amazon Detective Invitation Accepter](https://docs.aws.amazon.com/detective/latest/APIReference/API_AcceptInvitation.html). Ensure that the accepter is configured to use the AWS account you wish to _accept_ the invitation from the primary graph owner account. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DetectiveGraph } from "./.gen/providers/aws/detective-graph"; +import { DetectiveInvitationAccepter } from "./.gen/providers/aws/detective-invitation-accepter"; +import { DetectiveMember } from "./.gen/providers/aws/detective-member"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new DetectiveGraph(this, "primary", {}); + const awsDetectiveMemberPrimary = new DetectiveMember(this, "primary_1", { + accountId: "ACCOUNT ID", + emailAddress: "EMAIL", + graphArn: primary.id, + message: "Message of the invite", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDetectiveMemberPrimary.overrideLogicalId("primary"); + new DetectiveInvitationAccepter(this, "member", { + dependsOn: [awsDetectiveMemberPrimary], + graphArn: primary.graphArn, + provider: "awsalternate", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `graphArn` - (Required) ARN of the behavior graph that the member account is accepting the invitation for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier (ID) of the Detective invitation accepter. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDetectiveInvitationAccepter` using the graph ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDetectiveInvitationAccepter` using the graph ARN. For example: + +```console +% terraform import aws_detective_invitation_accepter.example arn:aws:detective:us-east-1:123456789101:graph:231684d34gh74g4bae1dbc7bd807d02d +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/detective_member.html.markdown b/website/docs/cdktf/typescript/r/detective_member.html.markdown new file mode 100644 index 00000000000..7b3ef8a8e39 --- /dev/null +++ b/website/docs/cdktf/typescript/r/detective_member.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Detective" +layout: "aws" +page_title: "AWS: aws_detective_member" +description: |- + Provides a resource to manage an Amazon Detective member. +--- + + + +# Resource: aws_detective_member + +Provides a resource to manage an [Amazon Detective Member](https://docs.aws.amazon.com/detective/latest/APIReference/API_CreateMembers.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DetectiveGraph } from "./.gen/providers/aws/detective-graph"; +import { DetectiveMember } from "./.gen/providers/aws/detective-member"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DetectiveGraph(this, "example", {}); + const awsDetectiveMemberExample = new DetectiveMember(this, "example_1", { + accountId: "AWS ACCOUNT ID", + disableEmailNotification: true, + emailAddress: "EMAIL", + graphArn: example.id, + message: "Message of the invitation", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDetectiveMemberExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Required) AWS account ID for the account. +* `emailAddress` - (Required) Email address for the account. +* `graphArn` - (Required) ARN of the behavior graph to invite the member accounts to contribute their data to. +* `message` - (Optional) A custom message to include in the invitation. Amazon Detective adds this message to the standard content that it sends for an invitation. +* `disableEmailNotification` - (Optional) If set to true, then the root user of the invited account will _not_ receive an email notification. This notification is in addition to an alert that the root user receives in AWS Personal Health Dashboard. By default, this is set to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identifier (ID) of the Detective. +* `status` - Current membership status of the member account. +* `administratorId` - AWS account ID for the administrator account. +* `volumeUsageInBytes` - Data volume in bytes per day for the member account. +* `invitedTime` - Date and time, in UTC and extended RFC 3339 format, when an Amazon Detective membership invitation was last sent to the account. +* `updatedTime` - Date and time, in UTC and extended RFC 3339 format, of the most recent change to the member account's status. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDetectiveMember` using the ARN of the graph followed by the account ID of the member account. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDetectiveMember` using the ARN of the graph followed by the account ID of the member account. For example: + +```console +% terraform import aws_detective_member.example arn:aws:detective:us-east-1:123456789101:graph:231684d34gh74g4bae1dbc7bd807d02d/123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/devicefarm_device_pool.html.markdown b/website/docs/cdktf/typescript/r/devicefarm_device_pool.html.markdown new file mode 100644 index 00000000000..8b2963f0cc1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/devicefarm_device_pool.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_device_pool" +description: |- + Provides a Devicefarm device_pool +--- + + + +# Resource: aws_devicefarm_device_pool + +Provides a resource to manage AWS Device Farm Device Pools. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DevicefarmDevicePool } from "./.gen/providers/aws/devicefarm-device-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DevicefarmDevicePool(this, "example", { + name: "example", + projectArn: Token.asString(awsDevicefarmProjectExample.arn), + rule: [ + { + attribute: "OS_VERSION", + operator: "EQUALS", + value: '\\"AVAILABLE\\"', + }, + ], + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) The name of the Device Pool +* `projectArn` - (Required) The ARN of the project for the device pool. +* `rule` - (Required) The device pool's rules. See [Rule](#rule). +* `description` - (Optional) The device pool's description. +* `maxDevices` - (Optional) The number of devices that Device Farm can add to your device pool. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Rule + +* `attribute` - (Optional) The rule's stringified attribute. Valid values are: `appiumVersion`, `arn`, `availability`, `fleetType`, `formFactor`, `instanceArn`, `instanceLabels`, `manufacturer`, `model`, `osVersion`, `platform`, `remoteAccessEnabled`, `remoteDebugEnabled`. +* `operator` - (Optional) Specifies how Device Farm compares the rule's attribute to the value. For the operators that are supported by each attribute. Valid values are: `equals`, `notIn`, `in`, `greaterThan`, `greaterThanOrEquals`, `lessThan`, `lessThanOrEquals`, `contains`. +* `value` - (Optional) The rule's value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this Device Pool +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Device Pools using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DeviceFarm Device Pools using their ARN. For example: + +```console +% terraform import aws_devicefarm_device_pool.example arn:aws:devicefarm:us-west-2:123456789012:devicepool:4fa784c7-ccb4-4dbf-ba4f-02198320daa1/4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/devicefarm_instance_profile.html.markdown b/website/docs/cdktf/typescript/r/devicefarm_instance_profile.html.markdown new file mode 100644 index 00000000000..24c4b4eb19b --- /dev/null +++ b/website/docs/cdktf/typescript/r/devicefarm_instance_profile.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_instance_profile" +description: |- + Provides a Devicefarm instance profile +--- + + + +# Resource: aws_devicefarm_instance_profile + +Provides a resource to manage AWS Device Farm Instance Profiles. +∂ +~> **NOTE:** AWS currently has limited regional support for Device Farm (e.g., `usWest2`). See [AWS Device Farm endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/devicefarm.html) for information on supported regions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DevicefarmInstanceProfile } from "./.gen/providers/aws/devicefarm-instance-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DevicefarmInstanceProfile(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +* `description` - (Optional) The description of the instance profile. +* `excludeAppPackagesFromCleanup` - (Optional) An array of strings that specifies the list of app packages that should not be cleaned up from the device after a test run. +* `name` - (Required) The name for the instance profile. +* `packageCleanup` - (Optional) When set to `true`, Device Farm removes app packages after a test run. The default value is `false` for private devices. +* `rebootAfterUse` - (Optional) When set to `true`, Device Farm reboots the instance after a test run. The default value is `true`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this instance profile. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Instance Profiles using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DeviceFarm Instance Profiles using their ARN. For example: + +```console +% terraform import aws_devicefarm_instance_profile.example arn:aws:devicefarm:us-west-2:123456789012:instanceprofile:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/devicefarm_network_profile.html.markdown b/website/docs/cdktf/typescript/r/devicefarm_network_profile.html.markdown new file mode 100644 index 00000000000..a9c41592b84 --- /dev/null +++ b/website/docs/cdktf/typescript/r/devicefarm_network_profile.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_network_profile" +description: |- + Provides a Devicefarm network profile +--- + + + +# Resource: aws_devicefarm_network_profile + +Provides a resource to manage AWS Device Farm Network Profiles. +∂ +~> **NOTE:** AWS currently has limited regional support for Device Farm (e.g., `usWest2`). See [AWS Device Farm endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/devicefarm.html) for information on supported regions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DevicefarmNetworkProfile } from "./.gen/providers/aws/devicefarm-network-profile"; +import { DevicefarmProject } from "./.gen/providers/aws/devicefarm-project"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DevicefarmProject(this, "example", { + name: "example", + }); + const awsDevicefarmNetworkProfileExample = new DevicefarmNetworkProfile( + this, + "example_1", + { + name: "example", + projectArn: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDevicefarmNetworkProfileExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +* `description` - (Optional) The description of the network profile. +* `downlinkBandwidthBits` - (Optional) The data throughput rate in bits per second, as an integer from `0` to `104857600`. Default value is `104857600`. +* `downlinkDelayMs` - (Optional) Delay time for all packets to destination in milliseconds as an integer from `0` to `2000`. +* `downlinkJitterMs` - (Optional) Time variation in the delay of received packets in milliseconds as an integer from `0` to `2000`. +* `downlinkLossPercent` - (Optional) Proportion of received packets that fail to arrive from `0` to `100` percent. +* `name` - (Required) The name for the network profile. +* `uplinkBandwidthBits` - (Optional) The data throughput rate in bits per second, as an integer from `0` to `104857600`. Default value is `104857600`. +* `uplinkDelayMs` - (Optional) Delay time for all packets to destination in milliseconds as an integer from `0` to `2000`. +* `uplinkJitterMs` - (Optional) Time variation in the delay of received packets in milliseconds as an integer from `0` to `2000`. +* `uplinkLossPercent` - (Optional) Proportion of received packets that fail to arrive from `0` to `100` percent. +* `projectArn` - (Required) The ARN of the project for the network profile. +* `type` - (Optional) The type of network profile to create. Valid values are listed are `private` and `curated`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this network profile. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Network Profiles using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DeviceFarm Network Profiles using their ARN. For example: + +```console +% terraform import aws_devicefarm_network_profile.example arn:aws:devicefarm:us-west-2:123456789012:networkprofile:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/devicefarm_project.html.markdown b/website/docs/cdktf/typescript/r/devicefarm_project.html.markdown new file mode 100644 index 00000000000..501331a3b4c --- /dev/null +++ b/website/docs/cdktf/typescript/r/devicefarm_project.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_project" +description: |- + Provides a Devicefarm project +--- + + + +# Resource: aws_devicefarm_project + +Provides a resource to manage AWS Device Farm Projects. + +For more information about Device Farm Projects, see the AWS Documentation on +[Device Farm Projects][aws-get-project]. + +~> **NOTE:** AWS currently has limited regional support for Device Farm (e.g., `usWest2`). See [AWS Device Farm endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/devicefarm.html) for information on supported regions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DevicefarmProject } from "./.gen/providers/aws/devicefarm-project"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DevicefarmProject(this, "awesome_devices", { + name: "my-device-farm", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) The name of the project +* `defaultJobTimeoutMinutes` - (Optional) Sets the execution timeout value (in minutes) for a project. All test runs in this project use the specified execution timeout value unless overridden when scheduling a run. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this project +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[aws-get-project]: http://docs.aws.amazon.com/devicefarm/latest/APIReference/API_GetProject.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Projects using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DeviceFarm Projects using their ARN. For example: + +```console +% terraform import aws_devicefarm_project.example arn:aws:devicefarm:us-west-2:123456789012:project:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/devicefarm_test_grid_project.html.markdown b/website/docs/cdktf/typescript/r/devicefarm_test_grid_project.html.markdown new file mode 100644 index 00000000000..6a294ac412d --- /dev/null +++ b/website/docs/cdktf/typescript/r/devicefarm_test_grid_project.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_test_grid_project" +description: |- + Provides a Devicefarm test_grid_project +--- + + + +# Resource: aws_devicefarm_test_grid_project + +Provides a resource to manage AWS Device Farm Test Grid Projects. + +~> **NOTE:** AWS currently has limited regional support for Device Farm (e.g., `usWest2`). See [AWS Device Farm endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/devicefarm.html) for information on supported regions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DevicefarmTestGridProject } from "./.gen/providers/aws/devicefarm-test-grid-project"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DevicefarmTestGridProject(this, "example", { + name: "example", + vpcConfig: { + securityGroupIds: Token.asList( + propertyAccess(awsSecurityGroupExample, ["*", "id"]) + ), + subnetIds: Token.asList(propertyAccess(awsSubnetExample, ["*", "id"])), + vpcId: Token.asString(awsVpcExample.id), + }, + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) The name of the Selenium testing project. +* `description` - (Optional) Human-readable description of the project. +* `vpcConfig` - (Required) The VPC security groups and subnets that are attached to a project. See [VPC Config](#vpc-config) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### VPC Config + +* `securityGroupIds` - (Required) A list of VPC security group IDs in your Amazon VPC. +* `subnetIds` - (Required) A list of VPC subnet IDs in your Amazon VPC. +* `vpcId` - (Required) The ID of the Amazon VPC. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this Test Grid Project. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Test Grid Projects using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DeviceFarm Test Grid Projects using their ARN. For example: + +```console +% terraform import aws_devicefarm_test_grid_project.example arn:aws:devicefarm:us-west-2:123456789012:testgrid-project:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/devicefarm_upload.html.markdown b/website/docs/cdktf/typescript/r/devicefarm_upload.html.markdown new file mode 100644 index 00000000000..33306ce235d --- /dev/null +++ b/website/docs/cdktf/typescript/r/devicefarm_upload.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Device Farm" +layout: "aws" +page_title: "AWS: aws_devicefarm_upload" +description: |- + Provides a Devicefarm upload +--- + + + +# Resource: aws_devicefarm_upload + +Provides a resource to manage AWS Device Farm Uploads. + +~> **NOTE:** AWS currently has limited regional support for Device Farm (e.g., `usWest2`). See [AWS Device Farm endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/devicefarm.html) for information on supported regions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DevicefarmProject } from "./.gen/providers/aws/devicefarm-project"; +import { DevicefarmUpload } from "./.gen/providers/aws/devicefarm-upload"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DevicefarmProject(this, "example", { + name: "example", + }); + const awsDevicefarmUploadExample = new DevicefarmUpload(this, "example_1", { + name: "example", + projectArn: example.arn, + type: "APPIUM_JAVA_TESTNG_TEST_SPEC", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDevicefarmUploadExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +* `contentType` - (Optional) The upload's content type (for example, application/octet-stream). +* `name` - (Required) The upload's file name. The name should not contain any forward slashes (/). If you are uploading an iOS app, the file name must end with the .ipa extension. If you are uploading an Android app, the file name must end with the .apk extension. For all others, the file name must end with the .zip file extension. +* `projectArn` - (Required) The ARN of the project for the upload. +* `type` - (Required) The upload's upload type. See [AWS Docs](https://docs.aws.amazon.com/devicefarm/latest/APIReference/API_CreateUpload.html#API_CreateUpload_RequestSyntax) for valid list of values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name of this upload. +* `url` - The presigned Amazon S3 URL that was used to store a file using a PUT request. +* `category` - The upload's category. +* `metadata` - The upload's metadata. For example, for Android, this contains information that is parsed from the manifest and is displayed in the AWS Device Farm console after the associated app is uploaded. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DeviceFarm Uploads using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DeviceFarm Uploads using their ARN. For example: + +```console +% terraform import aws_devicefarm_upload.example arn:aws:devicefarm:us-west-2:123456789012:upload:4fa784c7-ccb4-4dbf-ba4f-02198320daa1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/directory_service_conditional_forwarder.html.markdown b/website/docs/cdktf/typescript/r/directory_service_conditional_forwarder.html.markdown new file mode 100644 index 00000000000..fda7a7a92cd --- /dev/null +++ b/website/docs/cdktf/typescript/r/directory_service_conditional_forwarder.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_conditional_forwarder" +description: |- + Provides a conditional forwarder for managed Microsoft AD in AWS Directory Service. +--- + + + +# Resource: aws_directory_service_conditional_forwarder + +Provides a conditional forwarder for managed Microsoft AD in AWS Directory Service. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DirectoryServiceConditionalForwarder } from "./.gen/providers/aws/directory-service-conditional-forwarder"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DirectoryServiceConditionalForwarder(this, "example", { + directoryId: ad.id, + dnsIps: ["8.8.8.8", "8.8.4.4"], + remoteDomainName: "example.com", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `directoryId` - (Required) ID of directory. +* `dnsIps` - (Required) A list of forwarder IP addresses. +* `remoteDomainName` - (Required) The fully qualified domain name of the remote domain for which forwarders will be used. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import conditional forwarders using the directory id and remote_domain_name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import conditional forwarders using the directory id and remote_domain_name. For example: + +```console +% terraform import aws_directory_service_conditional_forwarder.example d-1234567890:example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/directory_service_directory.html.markdown b/website/docs/cdktf/typescript/r/directory_service_directory.html.markdown new file mode 100644 index 00000000000..850b0c083c4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/directory_service_directory.html.markdown @@ -0,0 +1,245 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_directory" +description: |- + Provides a directory in AWS Directory Service. +--- + + + +# Resource: aws_directory_service_directory + +Provides a Simple or Managed Microsoft directory in AWS Directory Service. + +~> **Note:** All arguments including the password and customer username will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +### SimpleAD + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DirectoryServiceDirectory } from "./.gen/providers/aws/directory-service-directory"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new Vpc(this, "main", { + cidrBlock: "10.0.0.0/16", + }); + const bar = new Subnet(this, "bar", { + availabilityZone: "us-west-2b", + cidrBlock: "10.0.2.0/24", + vpcId: main.id, + }); + const foo = new Subnet(this, "foo", { + availabilityZone: "us-west-2a", + cidrBlock: "10.0.1.0/24", + vpcId: main.id, + }); + const awsDirectoryServiceDirectoryBar = new DirectoryServiceDirectory( + this, + "bar_3", + { + name: "corp.notexample.com", + password: "SuperSecretPassw0rd", + size: "Small", + tags: { + Project: "foo", + }, + vpcSettings: { + subnetIds: [foo.id, bar.id], + vpcId: main.id, + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceDirectoryBar.overrideLogicalId("bar"); + } +} + +``` + +### Microsoft Active Directory (MicrosoftAD) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DirectoryServiceDirectory } from "./.gen/providers/aws/directory-service-directory"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new Vpc(this, "main", { + cidrBlock: "10.0.0.0/16", + }); + const bar = new Subnet(this, "bar", { + availabilityZone: "us-west-2b", + cidrBlock: "10.0.2.0/24", + vpcId: main.id, + }); + const foo = new Subnet(this, "foo", { + availabilityZone: "us-west-2a", + cidrBlock: "10.0.1.0/24", + vpcId: main.id, + }); + const awsDirectoryServiceDirectoryBar = new DirectoryServiceDirectory( + this, + "bar_3", + { + edition: "Standard", + name: "corp.notexample.com", + password: "SuperSecretPassw0rd", + tags: { + Project: "foo", + }, + type: "MicrosoftAD", + vpcSettings: { + subnetIds: [foo.id, bar.id], + vpcId: main.id, + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceDirectoryBar.overrideLogicalId("bar"); + } +} + +``` + +### Microsoft Active Directory Connector (ADConnector) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DirectoryServiceDirectory } from "./.gen/providers/aws/directory-service-directory"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new Vpc(this, "main", { + cidrBlock: "10.0.0.0/16", + }); + const bar = new Subnet(this, "bar", { + availabilityZone: "us-west-2b", + cidrBlock: "10.0.2.0/24", + vpcId: main.id, + }); + const foo = new Subnet(this, "foo", { + availabilityZone: "us-west-2a", + cidrBlock: "10.0.1.0/24", + vpcId: main.id, + }); + new DirectoryServiceDirectory(this, "connector", { + connectSettings: { + customerDnsIps: ["A.B.C.D"], + customerUsername: "Admin", + subnetIds: [foo.id, bar.id], + vpcId: main.id, + }, + name: "corp.notexample.com", + password: "SuperSecretPassw0rd", + size: "Small", + type: "ADConnector", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The fully qualified name for the directory, such as `corpExampleCom` +* `password` - (Required) The password for the directory administrator or connector user. +* `size` - (Optional) (For `simpleAd` and `adConnector` types) The size of the directory (`small` or `large` are accepted values). `large` by default. +* `vpcSettings` - (Required for `simpleAd` and `microsoftAd`) VPC related information about the directory. Fields documented below. +* `connectSettings` - (Required for `adConnector`) Connector related information about the directory. Fields documented below. +* `alias` - (Optional) The alias for the directory (must be unique amongst all aliases in AWS). Required for `enableSso`. +* `description` - (Optional) A textual description for the directory. +* `desiredNumberOfDomainControllers` - (Optional) The number of domain controllers desired in the directory. Minimum value of `2`. Scaling of domain controllers is only supported for `microsoftAd` directories. +* `shortName` - (Optional) The short name of the directory, such as `corp`. +* `enableSso` - (Optional) Whether to enable single-sign on for the directory. Requires `alias`. Defaults to `false`. +* `type` (Optional) - The directory type (`simpleAd`, `adConnector` or `microsoftAd` are accepted values). Defaults to `simpleAd`. +* `edition` - (Optional, for type `microsoftAd` only) The MicrosoftAD edition (`standard` or `enterprise`). Defaults to `enterprise`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +**vpc_settings** supports the following: + +* `subnetIds` - (Required) The identifiers of the subnets for the directory servers (2 subnets in 2 different AZs). +* `vpcId` - (Required) The identifier of the VPC that the directory is in. + +**connect_settings** supports the following: + +* `customerUsername` - (Required) The username corresponding to the password provided. +* `customerDnsIps` - (Required) The DNS IP addresses of the domain to connect to. +* `subnetIds` - (Required) The identifiers of the subnets for the directory servers (2 subnets in 2 different AZs). +* `vpcId` - (Required) The identifier of the VPC that the directory is in. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The directory identifier. +* `accessUrl` - The access URL for the directory, such as `http://aliasAwsappsCom`. +* `dnsIpAddresses` - A list of IP addresses of the DNS servers for the directory or connector. +* `securityGroupId` - The ID of the security group created by the directory. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +`connectSettings` (for `adConnector`) is also exported with the following attributes: + +* `connectIps` - The IP addresses of the AD Connector servers. + +## Timeouts + +`awsDirectoryServiceDirectory` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +- `create` - (Default `60 minutes`) Used for directory creation +- `update` - (Default `60 minutes`) Used for directory update +- `delete` - (Default `60 minutes`) Used for directory deletion + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DirectoryService directories using the directory `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DirectoryService directories using the directory `id`. For example: + +```console +% terraform import aws_directory_service_directory.sample d-926724cf57 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/directory_service_log_subscription.html.markdown b/website/docs/cdktf/typescript/r/directory_service_log_subscription.html.markdown new file mode 100644 index 00000000000..6ee18a77925 --- /dev/null +++ b/website/docs/cdktf/typescript/r/directory_service_log_subscription.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_log_subscription" +description: |- + Provides a Log subscription for AWS Directory Service that pushes logs to cloudwatch. +--- + + + +# Resource: aws_directory_service_log_subscription + +Provides a Log subscription for AWS Directory Service that pushes logs to cloudwatch. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { CloudwatchLogResourcePolicy } from "./.gen/providers/aws/cloudwatch-log-resource-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DirectoryServiceLogSubscription } from "./.gen/providers/aws/directory-service-log-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: + "/aws/directoryservice/${" + + awsDirectoryServiceDirectoryExample.id + + "}", + retentionInDays: 14, + }); + const awsDirectoryServiceLogSubscriptionExample = + new DirectoryServiceLogSubscription(this, "example_1", { + directoryId: Token.asString(awsDirectoryServiceDirectoryExample.id), + logGroupName: example.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceLogSubscriptionExample.overrideLogicalId("example"); + const adLogPolicy = new DataAwsIamPolicyDocument(this, "ad-log-policy", { + statement: [ + { + actions: ["logs:CreateLogStream", "logs:PutLogEvents"], + effect: "Allow", + principals: [ + { + identifiers: ["ds.amazonaws.com"], + type: "Service", + }, + ], + resources: ["${" + example.arn + "}:*"], + }, + ], + }); + const awsCloudwatchLogResourcePolicyAdLogPolicy = + new CloudwatchLogResourcePolicy(this, "ad-log-policy_3", { + policyDocument: Token.asString(adLogPolicy.json), + policyName: "ad-log-policy", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogResourcePolicyAdLogPolicy.overrideLogicalId( + "ad-log-policy" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `directoryId` - (Required) ID of directory. +* `logGroupName` - (Required) Name of the cloudwatch log group to which the logs should be published. The log group should be already created and the directory service principal should be provided with required permission to create stream and publish logs. Changing this value would delete the current subscription and create a new one. A directory can only have one log subscription at a time. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Directory Service Log Subscriptions using the directory id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Directory Service Log Subscriptions using the directory id. For example: + +```console +% terraform import aws_directory_service_log_subscription.msad d-1234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/directory_service_radius_settings.html.markdown b/website/docs/cdktf/typescript/r/directory_service_radius_settings.html.markdown new file mode 100644 index 00000000000..76177e627e1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/directory_service_radius_settings.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_radius_settings" +description: |- + Manages a directory's multi-factor authentication (MFA) using a Remote Authentication Dial In User Service (RADIUS) server. +--- + + + +# Resource: aws_directory_service_radius_settings + +Manages a directory's multi-factor authentication (MFA) using a Remote Authentication Dial In User Service (RADIUS) server. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DirectoryServiceRadiusSettings } from "./.gen/providers/aws/directory-service-radius-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DirectoryServiceRadiusSettings(this, "example", { + authenticationProtocol: "PAP", + directoryId: Token.asString(awsDirectoryServiceDirectoryExample.id), + displayLabel: "example", + radiusPort: 1812, + radiusRetries: 4, + radiusServers: ["10.0.1.5"], + radiusTimeout: 1, + sharedSecret: "12345678", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `authenticationProtocol` - (Optional) The protocol specified for your RADIUS endpoints. Valid values: `pap`, `chap`, `msChaPv1`, `msChaPv2`. +* `directoryId` - (Required) The identifier of the directory for which you want to manager RADIUS settings. +* `displayLabel` - (Required) Display label. +* `radiusPort` - (Required) The port that your RADIUS server is using for communications. Your self-managed network must allow inbound traffic over this port from the AWS Directory Service servers. +* `radiusRetries` - (Required) The maximum number of times that communication with the RADIUS server is attempted. Minimum value of `0`. Maximum value of `10`. +* `radiusServers` - (Required) An array of strings that contains the fully qualified domain name (FQDN) or IP addresses of the RADIUS server endpoints, or the FQDN or IP addresses of your RADIUS server load balancer. +* `radiusTimeout` - (Required) The amount of time, in seconds, to wait for the RADIUS server to respond. Minimum value of `1`. Maximum value of `50`. +* `sharedSecret` - (Required) Required for enabling RADIUS on the directory. +* `useSameUsername` - (Optional) Not currently used. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The directory identifier. + +## Timeouts + +`awsDirectoryServiceRadiusSettings` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +- `create` - (Default `30 minutes`) Used for RADIUS settings creation +- `update` - (Default `30 minutes`) Used for RADIUS settings update + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RADIUS settings using the directory ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import RADIUS settings using the directory ID. For example: + +```console +% terraform import aws_directory_service_radius_settings.example d-926724cf57 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/directory_service_region.html.markdown b/website/docs/cdktf/typescript/r/directory_service_region.html.markdown new file mode 100644 index 00000000000..9395409ec52 --- /dev/null +++ b/website/docs/cdktf/typescript/r/directory_service_region.html.markdown @@ -0,0 +1,227 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_region" +description: |- + Manages a replicated Region and directory for Multi-Region replication. +--- + + + +# Resource: aws_directory_service_region + +Manages a replicated Region and directory for Multi-Region replication. +Multi-Region replication is only supported for the Enterprise Edition of AWS Managed Microsoft AD. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Token, + TerraformCount, + propertyAccess, + Fn, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { DirectoryServiceDirectory } from "./.gen/providers/aws/directory-service-directory"; +import { DirectoryServiceRegion } from "./.gen/providers/aws/directory-service-region"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + region: "us-west-2", + }); + const secondary = new AwsProvider(this, "aws_1", { + alias: "secondary", + region: "us-east-2", + }); + const example = new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + tags: { + Name: "Primary", + }, + }); + const exampleSecondary = new Vpc(this, "example-secondary", { + cidrBlock: "10.1.0.0/16", + provider: secondary, + tags: { + Name: "Secondary", + }, + }); + const available = new DataAwsAvailabilityZones(this, "available", { + filter: [ + { + name: "opt-in-status", + values: ["opt-in-not-required"], + }, + ], + state: "available", + }); + const availableSecondary = new DataAwsAvailabilityZones( + this, + "available-secondary", + { + filter: [ + { + name: "opt-in-status", + values: ["opt-in-not-required"], + }, + ], + provider: secondary, + state: "available", + } + ); + const dataAwsRegionExample = new DataAwsRegion(this, "example_6", { + provider: secondary, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionExample.overrideLogicalId("example"); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleCount = TerraformCount.of(Token.asNumber("2")); + const awsSubnetExample = new Subnet(this, "example_7", { + availabilityZone: Token.asString( + propertyAccess(available.names, [exampleCount.index]) + ), + cidrBlock: Token.asString( + Fn.cidrsubnet(example.cidrBlock, 8, Token.asNumber(exampleCount.index)) + ), + tags: { + Name: "Primary", + }, + vpcId: example.id, + count: exampleCount, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSubnetExample.overrideLogicalId("example"); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleSecondaryCount = TerraformCount.of(Token.asNumber("2")); + const awsSubnetExampleSecondary = new Subnet(this, "example-secondary_8", { + availabilityZone: Token.asString( + propertyAccess(availableSecondary.names, [exampleSecondaryCount.index]) + ), + cidrBlock: Token.asString( + Fn.cidrsubnet( + exampleSecondary.cidrBlock, + 8, + Token.asNumber(exampleSecondaryCount.index) + ) + ), + provider: secondary, + tags: { + Name: "Secondary", + }, + vpcId: exampleSecondary.id, + count: exampleSecondaryCount, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSubnetExampleSecondary.overrideLogicalId("example-secondary"); + const awsDirectoryServiceDirectoryExample = new DirectoryServiceDirectory( + this, + "example_9", + { + name: "example.com", + password: "SuperSecretPassw0rd", + type: "MicrosoftAD", + vpcSettings: { + subnetIds: Token.asList( + propertyAccess(awsSubnetExample, ["*", "id"]) + ), + vpcId: example.id, + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceDirectoryExample.overrideLogicalId("example"); + const awsDirectoryServiceRegionExample = new DirectoryServiceRegion( + this, + "example_10", + { + directoryId: Token.asString(awsDirectoryServiceDirectoryExample.id), + regionName: Token.asString(dataAwsRegionExample.name), + tags: { + Name: "Secondary", + }, + vpcSettings: { + subnetIds: Token.asList( + propertyAccess(awsSubnetExampleSecondary, ["*", "id"]) + ), + vpcId: exampleSecondary.id, + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceRegionExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `desiredNumberOfDomainControllers` - (Optional) The number of domain controllers desired in the replicated directory. Minimum value of `2`. +* `directoryId` - (Required) The identifier of the directory to which you want to add Region replication. +* `regionName` - (Required) The name of the Region where you want to add domain controllers for replication. +* `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcSettings` - (Required) VPC information in the replicated Region. Detailed below. + +### `vpcSettings` + +* `subnetIds` - (Required) The identifiers of the subnets for the directory servers. +* `vpcId` - (Optional) The identifier of the VPC in which to create the directory. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +`awsDirectoryServiceRegion` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +- `create` - (Default `180 minutes`) Used for Region addition +- `update` - (Default `90 minutes`) Used for replicated directory update +- `delete` - (Default `90 minutes`) Used for Region removal + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Replicated Regions using directory ID,Region name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Replicated Regions using directory ID,Region name. For example: + +```console +% terraform import aws_directory_service_region.example d-9267651497,us-east-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/directory_service_shared_directory.html.markdown b/website/docs/cdktf/typescript/r/directory_service_shared_directory.html.markdown new file mode 100644 index 00000000000..ac399d2d4c2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/directory_service_shared_directory.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_shared_directory" +description: |- + Manages a directory in your account (directory owner) shared with another account (directory consumer). +--- + + + +# Resource: aws_directory_service_shared_directory + +Manages a directory in your account (directory owner) shared with another account (directory consumer). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DirectoryServiceDirectory } from "./.gen/providers/aws/directory-service-directory"; +import { DirectoryServiceSharedDirectory } from "./.gen/providers/aws/directory-service-shared-directory"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DirectoryServiceDirectory(this, "example", { + edition: "Standard", + name: "tf-example", + password: "SuperSecretPassw0rd", + type: "MicrosoftAD", + vpcSettings: { + subnetIds: Token.asList(propertyAccess(awsSubnetExample, ["*", "id"])), + vpcId: Token.asString(awsVpcExample.id), + }, + }); + const awsDirectoryServiceSharedDirectoryExample = + new DirectoryServiceSharedDirectory(this, "example_1", { + directoryId: example.id, + notes: "You wanna have a catch?", + target: { + id: Token.asString(receiver.accountId), + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceSharedDirectoryExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `directoryId` - (Required) Identifier of the Managed Microsoft AD directory that you want to share with other accounts. +* `target` - (Required) Identifier for the directory consumer account with whom the directory is to be shared. See below. + +The following arguments are optional: + +* `method` - (Optional) Method used when sharing a directory. Valid values are `organizations` and `handshake`. Default is `handshake`. +* `notes` - (Optional, Sensitive) Message sent by the directory owner to the directory consumer to help the directory consumer administrator determine whether to approve or reject the share invitation. + +### `target` + +* `id` - (Required) Identifier of the directory consumer account. +* `type` - (Optional) Type of identifier to be used in the `id` field. Valid value is `account`. Default is `account`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the shared directory. +* `sharedDirectoryId` - Identifier of the directory that is stored in the directory consumer account that corresponds to the shared directory in the owner account. + +## Timeouts + +`awsDirectoryServiceSharedDirectory` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +- `delete` - (Default `60 minutes`) Used for shared directory deletion + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Directory Service Shared Directories using the owner directory ID/shared directory ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Directory Service Shared Directories using the owner directory ID/shared directory ID. For example: + +```console +% terraform import aws_directory_service_shared_directory.example d-1234567890/d-9267633ece +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/directory_service_shared_directory_accepter.html.markdown b/website/docs/cdktf/typescript/r/directory_service_shared_directory_accepter.html.markdown new file mode 100644 index 00000000000..e57ad3be867 --- /dev/null +++ b/website/docs/cdktf/typescript/r/directory_service_shared_directory_accepter.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_shared_directory_accepter" +description: |- + Accepts a shared directory in a consumer account. +--- + + + +# Resource: aws_directory_service_shared_directory_accepter + +Accepts a shared directory in a consumer account. + +~> **NOTE:** Destroying this resource removes the shared directory from the consumer account only. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DirectoryServiceSharedDirectory } from "./.gen/providers/aws/directory-service-shared-directory"; +import { DirectoryServiceSharedDirectoryAccepter } from "./.gen/providers/aws/directory-service-shared-directory-accepter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DirectoryServiceSharedDirectory(this, "example", { + directoryId: Token.asString(awsDirectoryServiceDirectoryExample.id), + notes: "example", + target: { + id: Token.asString(receiver.accountId), + }, + }); + const awsDirectoryServiceSharedDirectoryAccepterExample = + new DirectoryServiceSharedDirectoryAccepter(this, "example_1", { + provider: "awsalternate", + sharedDirectoryId: example.sharedDirectoryId, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceSharedDirectoryAccepterExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `sharedDirectoryId` - (Required) Identifier of the directory that is stored in the directory consumer account that corresponds to the shared directory in the owner account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the shared directory. +* `method` - Method used when sharing a directory (i.e., `organizations` or `handshake`). +* `notes` - Message sent by the directory owner to the directory consumer to help the directory consumer administrator determine whether to approve or reject the share invitation. +* `ownerAccountId` - Account identifier of the directory owner. +* `ownerDirectoryId` - Identifier of the Managed Microsoft AD directory from the perspective of the directory owner. + +## Timeouts + +`awsDirectoryServiceSharedDirectoryAccepter` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +- `create` - (Default `60 minutes`) Used for directory creation +- `delete` - (Default `60 minutes`) Used for directory deletion + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Directory Service Shared Directories using the shared directory ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Directory Service Shared Directories using the shared directory ID. For example: + +```console +% terraform import aws_directory_service_shared_directory_accepter.example d-9267633ece +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/directory_service_trust.html.markdown b/website/docs/cdktf/typescript/r/directory_service_trust.html.markdown new file mode 100644 index 00000000000..64d5ea666c4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/directory_service_trust.html.markdown @@ -0,0 +1,199 @@ +--- +subcategory: "Directory Service" +layout: "aws" +page_title: "AWS: aws_directory_service_trust" +description: |- + Manages a trust relationship between two Active Directory Directories. +--- + + + +# Resource: aws_directory_service_trust + +Manages a trust relationship between two Active Directory Directories. + +The directories may either be both AWS Managed Microsoft AD domains or an AWS Managed Microsoft AD domain and a self-managed Active Directory Domain. + +The Trust relationship must be configured on both sides of the relationship. +If a Trust has only been created on one side, it will be in the state `verifyFailed`. +Once the second Trust is created, the first will update to the correct state. + +## Example Usage + +### Two-Way Trust + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DirectoryServiceDirectory } from "./.gen/providers/aws/directory-service-directory"; +import { DirectoryServiceTrust } from "./.gen/providers/aws/directory-service-trust"; +interface MyConfig { + password: any; + password1: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const one = new DirectoryServiceDirectory(this, "one", { + name: "one.example.com", + type: "MicrosoftAD", + password: config.password, + }); + const two = new DirectoryServiceDirectory(this, "two", { + name: "two.example.com", + type: "MicrosoftAD", + password: config.password1, + }); + const awsDirectoryServiceTrustOne = new DirectoryServiceTrust( + this, + "one_2", + { + conditionalForwarderIpAddrs: two.dnsIpAddresses, + directoryId: one.id, + remoteDomainName: two.name, + trustDirection: "Two-Way", + trustPassword: "Some0therPassword", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceTrustOne.overrideLogicalId("one"); + const awsDirectoryServiceTrustTwo = new DirectoryServiceTrust( + this, + "two_3", + { + conditionalForwarderIpAddrs: one.dnsIpAddresses, + directoryId: two.id, + remoteDomainName: one.name, + trustDirection: "Two-Way", + trustPassword: "Some0therPassword", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceTrustTwo.overrideLogicalId("two"); + } +} + +``` + +### One-Way Trust + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DirectoryServiceDirectory } from "./.gen/providers/aws/directory-service-directory"; +import { DirectoryServiceTrust } from "./.gen/providers/aws/directory-service-trust"; +interface MyConfig { + password: any; + password1: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const one = new DirectoryServiceDirectory(this, "one", { + name: "one.example.com", + type: "MicrosoftAD", + password: config.password, + }); + const two = new DirectoryServiceDirectory(this, "two", { + name: "two.example.com", + type: "MicrosoftAD", + password: config.password1, + }); + const awsDirectoryServiceTrustOne = new DirectoryServiceTrust( + this, + "one_2", + { + conditionalForwarderIpAddrs: two.dnsIpAddresses, + directoryId: one.id, + remoteDomainName: two.name, + trustDirection: "One-Way: Incoming", + trustPassword: "Some0therPassword", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceTrustOne.overrideLogicalId("one"); + const awsDirectoryServiceTrustTwo = new DirectoryServiceTrust( + this, + "two_3", + { + conditionalForwarderIpAddrs: one.dnsIpAddresses, + directoryId: two.id, + remoteDomainName: one.name, + trustDirection: "One-Way: Outgoing", + trustPassword: "Some0therPassword", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceTrustTwo.overrideLogicalId("two"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `conditionalForwarderIpAddrs` - (Optional) Set of IPv4 addresses for the DNS server associated with the remote Directory. + Can contain between 1 and 4 values. +* `deleteAssociatedConditionalForwarder` - (Optional) Whether to delete the conditional forwarder when deleting the Trust relationship. +* `directoryId` - (Required) ID of the Directory. +* `remoteDomainName` - (Required) Fully qualified domain name of the remote Directory. +* `selectiveAuth` - (Optional) Whether to enable selective authentication. + Valid values are `enabled` and `disabled`. + Default value is `disabled`. +* `trustDirection` - (Required) The direction of the Trust relationship. + Valid values are `One-Way: Outgoing`, `One-Way: Incoming`, and `twoWay`. +* `trustPassword` - (Required) Password for the Trust. + Does not need to match the passwords for either Directory. + Can contain upper- and lower-case letters, numbers, and punctuation characters. + May be up to 128 characters long. +* `trustType` - (Optional) Type of the Trust relationship. + Valid values are `forest` and `external`. + Default value is `forest`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `createdDateTime` - Date and time when the Trust was created. +* `id` - The Trust identifier. +* `lastUpdatedDateTime` - Date and time when the Trust was last updated. +* `stateLastUpdatedDateTime` - Date and time when the Trust state in `trustState` was last updated. +* `trustState` - State of the Trust relationship. + One of `created`, `verifyFailed`,`verified`, `updateFailed`,`updated`,`deleted`, or `failed`. +* `trustStateReason` - Reason for the Trust state set in `trustState`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the Trust relationship using the directory ID and remote domain name, separated by a `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the Trust relationship using the directory ID and remote domain name, separated by a `/`. For example: + +```console +% terraform import aws_directory_service_trust.example d-926724cf57/directory.example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dlm_lifecycle_policy.markdown b/website/docs/cdktf/typescript/r/dlm_lifecycle_policy.markdown new file mode 100644 index 00000000000..37de5e43afc --- /dev/null +++ b/website/docs/cdktf/typescript/r/dlm_lifecycle_policy.markdown @@ -0,0 +1,401 @@ +--- +subcategory: "DLM (Data Lifecycle Manager)" +layout: "aws" +page_title: "AWS: aws_dlm_lifecycle_policy" +description: |- + Provides a Data Lifecycle Manager (DLM) lifecycle policy for managing snapshots. +--- + + + +# Resource: aws_dlm_lifecycle_policy + +Provides a [Data Lifecycle Manager (DLM) lifecycle policy](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/snapshot-lifecycle.html) for managing snapshots. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DlmLifecyclePolicy } from "./.gen/providers/aws/dlm-lifecycle-policy"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["dlm.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const dlmLifecycle = new DataAwsIamPolicyDocument(this, "dlm_lifecycle", { + statement: [ + { + actions: [ + "ec2:CreateSnapshot", + "ec2:CreateSnapshots", + "ec2:DeleteSnapshot", + "ec2:DescribeInstances", + "ec2:DescribeVolumes", + "ec2:DescribeSnapshots", + ], + effect: "Allow", + resources: ["*"], + }, + { + actions: ["ec2:CreateTags"], + effect: "Allow", + resources: ["arn:aws:ec2:*::snapshot/*"], + }, + ], + }); + const dlmLifecycleRole = new IamRole(this, "dlm_lifecycle_role", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "dlm-lifecycle-role", + }); + const awsIamRolePolicyDlmLifecycle = new IamRolePolicy( + this, + "dlm_lifecycle_3", + { + name: "dlm-lifecycle-policy", + policy: Token.asString(dlmLifecycle.json), + role: dlmLifecycleRole.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyDlmLifecycle.overrideLogicalId("dlm_lifecycle"); + new DlmLifecyclePolicy(this, "example", { + description: "example DLM lifecycle policy", + executionRoleArn: dlmLifecycleRole.arn, + policyDetails: { + resourceTypes: ["VOLUME"], + schedule: [ + { + copyTags: false, + createRule: { + interval: 24, + intervalUnit: "HOURS", + times: ["23:45"], + }, + name: "2 weeks of daily snapshots", + retainRule: { + count: 14, + }, + tagsToAdd: { + snapshotCreator: "DLM", + }, + }, + ], + targetTags: { + snapshot: "true", + }, + }, + state: "ENABLED", + }); + } +} + +``` + +### Example Cross-Region Snapshot Copy Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DlmLifecyclePolicy } from "./.gen/providers/aws/dlm-lifecycle-policy"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + const key = new DataAwsIamPolicyDocument(this, "key", { + statement: [ + { + actions: ["kms:*"], + effect: "Allow", + principals: [ + { + identifiers: ["arn:aws:iam::${" + current.accountId + "}:root"], + type: "AWS", + }, + ], + resources: ["*"], + sid: "Enable IAM User Permissions", + }, + ], + }); + const dlmCrossRegionCopyCmk = new KmsKey( + this, + "dlm_cross_region_copy_cmk", + { + description: "Example Alternate Region KMS Key", + policy: Token.asString(key.json), + provider: alternate, + } + ); + new DlmLifecyclePolicy(this, "example", { + description: "example DLM lifecycle policy", + executionRoleArn: dlmLifecycleRole.arn, + policyDetails: { + resourceTypes: ["VOLUME"], + schedule: [ + { + copyTags: false, + createRule: { + interval: 24, + intervalUnit: "HOURS", + times: ["23:45"], + }, + crossRegionCopyRule: [ + { + cmkArn: dlmCrossRegionCopyCmk.arn, + copyTags: true, + encrypted: true, + retainRule: { + interval: 30, + intervalUnit: "DAYS", + }, + target: "us-west-2", + }, + ], + name: "2 weeks of daily snapshots", + retainRule: { + count: 14, + }, + tagsToAdd: { + snapshotCreator: "DLM", + }, + }, + ], + targetTags: { + snapshot: "true", + }, + }, + state: "ENABLED", + }); + } +} + +``` + +### Example Event Based Policy Usage + +``` +data "aws_caller_identity" "current" {} + +resource "aws_dlm_lifecycle_policy" "example" { + description = "tf-acc-basic" + execution_role_arn = aws_iam_role.example.arn + + policy_details { + policy_type = "EVENT_BASED_POLICY" + + action { + name = "tf-acc-basic" + cross_region_copy { + encryption_configuration {} + retain_rule { + interval = 15 + interval_unit = "MONTHS" + } + + target = %[1]q + } + } + + event_source { + type = "MANAGED_CWE" + parameters { + description_regex = "^.*Created for policy: policy-1234567890abcdef0.*$" + event_type = "shareSnapshot" + snapshot_owner = [data.aws_caller_identity.current.account_id] + } + } + } +} + +data "aws_iam_policy" "example" { + name = "AWSDataLifecycleManagerServiceRole" +} + +resource "aws_iam_role_policy_attachment" "example" { + role = aws_iam_role.example.id + policy_arn = data.aws_iam_policy.example.arn +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Required) A description for the DLM lifecycle policy. +* `executionRoleArn` - (Required) The ARN of an IAM role that is able to be assumed by the DLM service. +* `policyDetails` - (Required) See the [`policyDetails` configuration](#policy-details-arguments) block. Max of 1. +* `state` - (Optional) Whether the lifecycle policy should be enabled or disabled. `enabled` or `disabled` are valid values. Defaults to `enabled`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +#### Policy Details arguments + +* `action` - (Optional) The actions to be performed when the event-based policy is triggered. You can specify only one action per policy. This parameter is required for event-based policies only. If you are creating a snapshot or AMI policy, omit this parameter. See the [`action` configuration](#action-arguments) block. +* `eventSource` - (Optional) The event that triggers the event-based policy. This parameter is required for event-based policies only. If you are creating a snapshot or AMI policy, omit this parameter. See the [`eventSource` configuration](#event-source-arguments) block. +* `resourceTypes` - (Optional) A list of resource types that should be targeted by the lifecycle policy. Valid values are `volume` and `instance`. +* `resourceLocations` - (Optional) The location of the resources to backup. If the source resources are located in an AWS Region, specify `cloud`. If the source resources are located on an Outpost in your account, specify `outpost`. If you specify `outpost`, Amazon Data Lifecycle Manager backs up all resources of the specified type with matching target tags across all of the Outposts in your account. Valid values are `cloud` and `outpost`. +* `policyType` - (Optional) The valid target resource types and actions a policy can manage. Specify `ebsSnapshotManagement` to create a lifecycle policy that manages the lifecycle of Amazon EBS snapshots. Specify `imageManagement` to create a lifecycle policy that manages the lifecycle of EBS-backed AMIs. Specify `eventBasedPolicy` to create an event-based policy that performs specific actions when a defined event occurs in your AWS account. Default value is `ebsSnapshotManagement`. +* `parameters` - (Optional) A set of optional parameters for snapshot and AMI lifecycle policies. See the [`parameters` configuration](#parameters-arguments) block. +* `schedule` - (Optional) See the [`schedule` configuration](#schedule-arguments) block. +* `targetTags` (Optional) A map of tag keys and their values. Any resources that match the `resourceTypes` and are tagged with _any_ of these tags will be targeted. + +~> Note: You cannot have overlapping lifecycle policies that share the same `targetTags`. Terraform is unable to detect this at plan time but it will fail during apply. + +#### Action arguments + +* `crossRegionCopy` - (Optional) The rule for copying shared snapshots across Regions. See the [`crossRegionCopy` configuration](#action-cross-region-copy-rule-arguments) block. +* `name` - (Optional) A descriptive name for the action. + +##### Action Cross Region Copy Rule arguments + +* `encryptionConfiguration` - (Required) The encryption settings for the copied snapshot. See the [`encryptionConfiguration`](#encryption-configuration-arguments) block. Max of 1 per action. +* `retainRule` - (Required) Specifies the retention rule for cross-Region snapshot copies. See the [`retainRule`](#cross-region-copy-rule-retain-rule-arguments) block. Max of 1 per action. +* `target` - (Required) The target Region or the Amazon Resource Name (ARN) of the target Outpost for the snapshot copies. + +###### Encryption Configuration arguments + +* `cmkArn` - (Optional) The Amazon Resource Name (ARN) of the AWS KMS key to use for EBS encryption. If this parameter is not specified, the default KMS key for the account is used. +* `encrypted` - (Required) To encrypt a copy of an unencrypted snapshot when encryption by default is not enabled, enable encryption using this parameter. Copies of encrypted snapshots are encrypted, even if this parameter is false or when encryption by default is not enabled. + +#### Event Source arguments + +* `parameters` - (Required) Information about the event. See the [`parameters` configuration](#event-source-parameters-arguments) block. +* `type` - (Required) The source of the event. Currently only managed CloudWatch Events rules are supported. Valid values are `managedCwe`. + +##### Event Source Parameters arguments + +* `descriptionRegex` - (Required) The snapshot description that can trigger the policy. The description pattern is specified using a regular expression. The policy runs only if a snapshot with a description that matches the specified pattern is shared with your account. +* `eventType` - (Required) The type of event. Currently, only `shareSnapshot` events are supported. +* `snapshotOwner` - (Required) The IDs of the AWS accounts that can trigger policy by sharing snapshots with your account. The policy only runs if one of the specified AWS accounts shares a snapshot with your account. + +#### Parameters arguments + +* `excludeBootVolume` - (Optional) Indicates whether to exclude the root volume from snapshots created using CreateSnapshots. The default is `false`. +* `noReboot` - (Optional) Applies to AMI lifecycle policies only. Indicates whether targeted instances are rebooted when the lifecycle policy runs. `true` indicates that targeted instances are not rebooted when the policy runs. `false` indicates that target instances are rebooted when the policy runs. The default is `true` (instances are not rebooted). + +#### Schedule arguments + +* `copyTags` - (Optional) Copy all user-defined tags on a source volume to snapshots of the volume created by this policy. +* `createRule` - (Required) See the [`createRule`](#create-rule-arguments) block. Max of 1 per schedule. +* `crossRegionCopyRule` (Optional) - See the [`crossRegionCopyRule`](#cross-region-copy-rule-arguments) block. Max of 3 per schedule. +* `name` - (Required) A name for the schedule. +* `deprecateRule` - (Required) See the [`deprecateRule`](#deprecate-rule-arguments) block. Max of 1 per schedule. +* `fastRestoreRule` - (Required) See the [`fastRestoreRule`](#fast-restore-rule-arguments) block. Max of 1 per schedule. +* `retainRule` - (Required) See the [`retainRule`](#retain-rule-arguments) block. Max of 1 per schedule. +* `shareRule` - (Required) See the [`shareRule`](#share-rule-arguments) block. Max of 1 per schedule. +* `tagsToAdd` - (Optional) A map of tag keys and their values. DLM lifecycle policies will already tag the snapshot with the tags on the volume. This configuration adds extra tags on top of these. +* `variableTags` - (Optional) A map of tag keys and variable values, where the values are determined when the policy is executed. Only `$(instanceId)` or `$(timestamp)` are valid values. Can only be used when `resourceTypes` is `instance`. + +#### Create Rule arguments + +* `cronExpression` - (Optional) The schedule, as a Cron expression. The schedule interval must be between 1 hour and 1 year. +* `interval` - (Optional) How often this lifecycle policy should be evaluated. `1`, `2`,`3`,`4`,`6`,`8`,`12` or `24` are valid values. +* `intervalUnit` - (Optional) The unit for how often the lifecycle policy should be evaluated. `hours` is currently the only allowed value and also the default value. +* `location` - (Optional) Specifies the destination for snapshots created by the policy. To create snapshots in the same Region as the source resource, specify `cloud`. To create snapshots on the same Outpost as the source resource, specify `outpostLocal`. If you omit this parameter, `cloud` is used by default. If the policy targets resources in an AWS Region, then you must create snapshots in the same Region as the source resource. If the policy targets resources on an Outpost, then you can create snapshots on the same Outpost as the source resource, or in the Region of that Outpost. Valid values are `cloud` and `outpostLocal`. +* `times` - (Optional) A list of times in 24 hour clock format that sets when the lifecycle policy should be evaluated. Max of 1. + +#### Deprecate Rule arguments + +* `count` - (Optional) Specifies the number of oldest AMIs to deprecate. Must be an integer between `1` and `1000`. +* `interval` - (Optional) Specifies the period after which to deprecate AMIs created by the schedule. The maximum is 100 years. This is equivalent to 1200 months, 5200 weeks, or 36500 days. +* `intervalUnit` - (Optional) The unit of time for time-based retention. Valid values are `days`, `weeks`, `months`, `years`. + +#### Fast Restore Rule arguments + +* `availabilityZones` - (Required) The Availability Zones in which to enable fast snapshot restore. +* `count` - (Optional) The number of snapshots to be enabled with fast snapshot restore. Must be an integer between `1` and `1000`. +* `interval` - (Optional) The amount of time to enable fast snapshot restore. The maximum is 100 years. This is equivalent to 1200 months, 5200 weeks, or 36500 days. +* `intervalUnit` - (Optional) The unit of time for enabling fast snapshot restore. Valid values are `days`, `weeks`, `months`, `years`. + +#### Retain Rule arguments + +* `count` - (Optional) How many snapshots to keep. Must be an integer between `1` and `1000`. +* `interval` - (Optional) The amount of time to retain each snapshot. The maximum is 100 years. This is equivalent to 1200 months, 5200 weeks, or 36500 days. +* `intervalUnit` - (Optional) The unit of time for time-based retention. Valid values are `days`, `weeks`, `months`, `years`. + +#### Share Rule arguments + +* `targetAccounts` - (Required) The IDs of the AWS accounts with which to share the snapshots. +* `interval` - (Optional) The period after which snapshots that are shared with other AWS accounts are automatically unshared. +* `intervalUnit` - (Optional) The unit of time for the automatic unsharing interval. Valid values are `days`, `weeks`, `months`, `years`. + +#### Cross Region Copy Rule arguments + +* `cmkArn` - (Optional) The Amazon Resource Name (ARN) of the AWS KMS customer master key (CMK) to use for EBS encryption. If this argument is not specified, the default KMS key for the account is used. +* `copyTags` - (Optional) Whether to copy all user-defined tags from the source snapshot to the cross-region snapshot copy. +* `deprecateRule` - (Optional) The AMI deprecation rule for cross-Region AMI copies created by the rule. See the [`deprecateRule`](#cross-region-copy-rule-deprecate-rule-arguments) block. +* `encrypted` - (Required) To encrypt a copy of an unencrypted snapshot if encryption by default is not enabled, enable encryption using this parameter. Copies of encrypted snapshots are encrypted, even if this parameter is false or if encryption by default is not enabled. +* `retainRule` - (Required) The retention rule that indicates how long snapshot copies are to be retained in the destination Region. See the [`retainRule`](#cross-region-copy-rule-retain-rule-arguments) block. Max of 1 per schedule. +* `target` - (Required) The target Region or the Amazon Resource Name (ARN) of the target Outpost for the snapshot copies. + +#### Cross Region Copy Rule Deprecate Rule arguments + +* `interval` - (Required) The period after which to deprecate the cross-Region AMI copies. The period must be less than or equal to the cross-Region AMI copy retention period, and it can't be greater than 10 years. This is equivalent to 120 months, 520 weeks, or 3650 days. +* `intervalUnit` - (Required) The unit of time in which to measure the `interval`. Valid values: `days`, `weeks`, `months`, or `years`. + +#### Cross Region Copy Rule Retain Rule arguments + +* `interval` - (Required) The amount of time to retain each snapshot. The maximum is 100 years. This is equivalent to 1200 months, 5200 weeks, or 36500 days. +* `intervalUnit` - (Required) The unit of time for time-based retention. Valid values: `days`, `weeks`, `months`, or `years`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the DLM Lifecycle Policy. +* `id` - Identifier of the DLM Lifecycle Policy. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DLM lifecycle policies using their policy ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DLM lifecycle policies using their policy ID. For example: + +```console +% terraform import aws_dlm_lifecycle_policy.example policy-abcdef12345678901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dms_certificate.html.markdown b/website/docs/cdktf/typescript/r/dms_certificate.html.markdown new file mode 100644 index 00000000000..675b6e3b4c3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dms_certificate.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_certificate" +description: |- + Provides a DMS (Data Migration Service) certificate resource. +--- + + + +# Resource: aws_dms_certificate + +Provides a DMS (Data Migration Service) certificate resource. DMS certificates can be created, deleted, and imported. + +~> **Note:** All arguments including the PEM encoded certificate will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DmsCertificate } from "./.gen/providers/aws/dms-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DmsCertificate(this, "test", { + certificateId: "test-dms-certificate-tf", + certificatePem: "...", + tags: { + Name: "test", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificateId` - (Required) The certificate identifier. + + - Must contain from 1 to 255 alphanumeric characters and hyphens. + +* `certificatePem` - (Optional) The contents of the .pem X.509 certificate file for the certificate. Either `certificatePem` or `certificateWallet` must be set. +* `certificateWallet` - (Optional) The contents of the Oracle Wallet certificate for use with SSL, provided as a base64-encoded String. Either `certificatePem` or `certificateWallet` must be set. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `certificateArn` - The Amazon Resource Name (ARN) for the certificate. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import certificates using the `certificateId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import certificates using the `certificateId`. For example: + +```console +% terraform import aws_dms_certificate.test test-dms-certificate-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dms_endpoint.html.markdown b/website/docs/cdktf/typescript/r/dms_endpoint.html.markdown new file mode 100644 index 00000000000..7d43cce8911 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dms_endpoint.html.markdown @@ -0,0 +1,246 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_endpoint" +description: |- + Provides a DMS (Data Migration Service) endpoint resource. +--- + + + +# Resource: aws_dms_endpoint + +Provides a DMS (Data Migration Service) endpoint resource. DMS endpoints can be created, updated, deleted, and imported. + +~> **Note:** All arguments including the password will be stored in the raw state as plain-text. [Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **Note:** The `s3Settings` argument is deprecated, may not be maintained, and will be removed in a future version. Use the [`awsDmsS3Endpoint`](/docs/providers/aws/r/dms_s3_endpoint.html) resource instead. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DmsEndpoint } from "./.gen/providers/aws/dms-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DmsEndpoint(this, "test", { + certificateArn: + "arn:aws:acm:us-east-1:123456789012:certificate/12345678-1234-1234-1234-123456789012", + databaseName: "test", + endpointId: "test-dms-endpoint-tf", + endpointType: "source", + engineName: "aurora", + extraConnectionAttributes: "", + kmsKeyArn: + "arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012", + password: "test", + port: 3306, + serverName: "test", + sslMode: "none", + tags: { + Name: "test", + }, + username: "test", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `endpointId` - (Required) Database endpoint identifier. Identifiers must contain from 1 to 255 alphanumeric characters or hyphens, begin with a letter, contain only ASCII letters, digits, and hyphens, not end with a hyphen, and not contain two consecutive hyphens. +* `endpointType` - (Required) Type of endpoint. Valid values are `source`, `target`. +* `engineName` - (Required) Type of engine for the endpoint. Valid values are `aurora`, `auroraPostgresql`, `azuredb`, `azureSqlManagedInstance`, `db2`, `db2Zos`, `docdb`, `dynamodb`, `elasticsearch`, `kafka`, `kinesis`, `mariadb`, `mongodb`, `mysql`, `opensearch`, `oracle`, `postgres`, `redshift`, `s3`, `sqlserver`, `sybase`. Please note that some of engine names are available only for `target` endpoint type (e.g. `redshift`). +* `kmsKeyArn` - (Required when `engineName` is `mongodb`, cannot be set when `engineName` is `s3`, optional otherwise) ARN for the KMS key that will be used to encrypt the connection parameters. If you do not specify a value for `kmsKeyArn`, then AWS DMS will use your default encryption key. AWS KMS creates the default encryption key for your AWS account. Your AWS account has a different default encryption key for each AWS region. To encrypt an S3 target with a KMS Key, use the parameter `s3SettingsServerSideEncryptionKmsKeyId`. When `engineName` is `redshift`, `kmsKeyArn` is the KMS Key for the Redshift target and the parameter `redshiftSettingsServerSideEncryptionKmsKeyId` encrypts the S3 intermediate storage. + +The following arguments are optional: + +* `certificateArn` - (Optional, Default: empty string) ARN for the certificate. +* `databaseName` - (Optional) Name of the endpoint database. +* `elasticsearchSettings` - (Optional) Configuration block for OpenSearch settings. See below. +* `extraConnectionAttributes` - (Optional) Additional attributes associated with the connection. For available attributes for a `source` Endpoint, see [Sources for data migration](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Source.html). For available attributes for a `target` Endpoint, see [Targets for data migration](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.html). +* `kafkaSettings` - (Optional) Configuration block for Kafka settings. See below. +* `kinesisSettings` - (Optional) Configuration block for Kinesis settings. See below. +* `mongodbSettings` - (Optional) Configuration block for MongoDB settings. See below. +* `password` - (Optional) Password to be used to login to the endpoint database. +* `port` - (Optional) Port used by the endpoint database. +* `redshiftSettings` - (Optional) Configuration block for Redshift settings. See below. +* `s3Settings` - (Optional) (**Deprecated**, use the [`awsDmsS3Endpoint`](/docs/providers/aws/r/dms_s3_endpoint.html) resource instead) Configuration block for S3 settings. See below. +* `secretsManagerAccessRoleArn` - (Optional) ARN of the IAM role that specifies AWS DMS as the trusted entity and has the required permissions to access the value in SecretsManagerSecret. +* `secretsManagerArn` - (Optional) Full ARN, partial ARN, or friendly name of the SecretsManagerSecret that contains the endpoint connection details. Supported only when `engineName` is `aurora`, `auroraPostgresql`, `mariadb`, `mongodb`, `mysql`, `oracle`, `postgres`, `redshift`, or `sqlserver`. +* `serverName` - (Optional) Host name of the server. +* `serviceAccessRole` - (Optional) ARN used by the service access IAM role for dynamodb endpoints. +* `sslMode` - (Optional, Default: `none`) SSL mode to use for the connection. Valid values are `none`, `require`, `verifyCa`, `verifyFull` +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `username` - (Optional) User name to be used to login to the endpoint database. + +### elasticsearch_settings + +-> Additional information can be found in the [Using Amazon OpenSearch Service as a Target for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Elasticsearch.html). + +* `endpointUri` - (Required) Endpoint for the OpenSearch cluster. +* `errorRetryDuration` - (Optional) Maximum number of seconds for which DMS retries failed API requests to the OpenSearch cluster. Default is `300`. +* `fullLoadErrorPercentage` - (Optional) Maximum percentage of records that can fail to be written before a full load operation stops. Default is `10`. +* `serviceAccessRoleArn` - (Required) ARN of the IAM Role with permissions to write to the OpenSearch cluster. + +### kafka_settings + +-> Additional information can be found in the [Using Apache Kafka as a Target for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Kafka.html). + +* `broker` - (Required) Kafka broker location. Specify in the form broker-hostname-or-ip:port. +* `includeControlDetails` - (Optional) Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. Default is `false`. +* `includeNullAndEmpty` - (Optional) Include NULL and empty columns for records migrated to the endpoint. Default is `false`. +* `includePartitionValue` - (Optional) Shows the partition value within the Kafka message output unless the partition type is `schemaTableType`. Default is `false`. +* `includeTableAlterOperations` - (Optional) Includes any data definition language (DDL) operations that change the table in the control data, such as `renameTable`, `dropTable`, `addColumn`, `dropColumn`, and `renameColumn`. Default is `false`. +* `includeTransactionDetails` - (Optional) Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for `transactionId`, previous `transactionId`, and `transactionRecordId` (the record offset within a transaction). Default is `false`. +* `messageFormat` - (Optional) Output format for the records created on the endpoint. Message format is `json` (default) or `jsonUnformatted` (a single line with no tab). +* `messageMaxBytes` - (Optional) Maximum size in bytes for records created on the endpoint Default is `1,000,000`. +* `noHexPrefix` - (Optional) Set this optional parameter to true to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, AWS DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the `noHexPrefix` endpoint setting to enable migration of RAW data type columns without adding the `'0X'` prefix. +* `partitionIncludeSchemaTable` - (Optional) Prefixes schema and table names to partition values, when the partition type is `primaryKeyType`. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. Default is `false`. +* `saslPassword` - (Optional) Secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication. +* `saslUsername` - (Optional) Secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication. +* `securityProtocol` - (Optional) Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include `sslEncryption`, `sslAuthentication`, and `saslSsl`. `saslSsl` requires `saslUsername` and `saslPassword`. +* `sslCaCertificateArn` - (Optional) ARN for the private certificate authority (CA) cert that AWS DMS uses to securely connect to your Kafka target endpoint. +* `sslClientCertificateArn` - (Optional) ARN of the client certificate used to securely connect to a Kafka target endpoint. +* `sslClientKeyArn` - (Optional) ARN for the client private key used to securely connect to a Kafka target endpoint. +* `sslClientKeyPassword` - (Optional) Password for the client private key used to securely connect to a Kafka target endpoint. +* `topic` - (Optional) Kafka topic for migration. Default is `kafkaDefaultTopic`. + +### kinesis_settings + +-> Additional information can be found in the [Using Amazon Kinesis Data Streams as a Target for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Kinesis.html). + +* `includeControlDetails` - (Optional) Shows detailed control information for table definition, column definition, and table and column changes in the Kinesis message output. Default is `false`. +* `includeNullAndEmpty` - (Optional) Include NULL and empty columns in the target. Default is `false`. +* `includePartitionValue` - (Optional) Shows the partition value within the Kinesis message output, unless the partition type is schema-table-type. Default is `false`. +* `includeTableAlterOperations` - (Optional) Includes any data definition language (DDL) operations that change the table in the control data. Default is `false`. +* `includeTransactionDetails` - (Optional) Provides detailed transaction information from the source database. Default is `false`. +* `messageFormat` - (Optional) Output format for the records created. Default is `json`. Valid values are `json` and `jsonUnformatted` (a single line with no tab). +* `partitionIncludeSchemaTable` - (Optional) Prefixes schema and table names to partition values, when the partition type is primary-key-type. Default is `false`. +* `serviceAccessRoleArn` - (Optional) ARN of the IAM Role with permissions to write to the Kinesis data stream. +* `streamArn` - (Optional) ARN of the Kinesis data stream. + +### mongodb_settings + +-> Additional information can be found in the [Using MongoDB as a Source for AWS DMS documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Source.MongoDB.html). + +* `authMechanism` - (Optional) Authentication mechanism to access the MongoDB source endpoint. Default is `default`. +* `authSource` - (Optional) Authentication database name. Not used when `authType` is `no`. Default is `admin`. +* `authType` - (Optional) Authentication type to access the MongoDB source endpoint. Default is `password`. +* `docsToInvestigate` - (Optional) Number of documents to preview to determine the document organization. Use this setting when `nestingLevel` is set to `one`. Default is `1000`. +* `extractDocId` - (Optional) Document ID. Use this setting when `nestingLevel` is set to `none`. Default is `false`. +* `nestingLevel` - (Optional) Specifies either document or table mode. Default is `none`. Valid values are `one` (table mode) and `none` (document mode). + +### redis_settings + +-> Additional information can be found in the [Using Redis as a target for AWS Database Migration Service](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Redis.html). + +* `authPassword` - (Optional) The password provided with the auth-role and auth-token options of the AuthType setting for a Redis target endpoint. +* `authType` - (Required) The type of authentication to perform when connecting to a Redis target. Options include `none`, `authToken`, and `authRole`. The `authToken` option requires an `authPassword` value to be provided. The `authRole` option requires `authUserName` and `authPassword` values to be provided. +* `authUserName` - (Optional) The username provided with the `authRole` option of the AuthType setting for a Redis target endpoint. +* `serverName` - (Required) Fully qualified domain name of the endpoint. +* `port` - (Required) Transmission Control Protocol (TCP) port for the endpoint. +* `sslCaCertificateArn` - (Optional) The Amazon Resource Name (ARN) for the certificate authority (CA) that DMS uses to connect to your Redis target endpoint. +* `sslSecurityProtocol`- (Optional) The plaintext option doesn't provide Transport Layer Security (TLS) encryption for traffic between endpoint and database. Options include `plaintext`, `sslEncryption`. The default is `sslEncryption`. + +### redshift_settings + +-> Additional information can be found in the [Using Amazon Redshift as a Target for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Redshift.html). + +* `bucketFolder` - (Optional) Custom S3 Bucket Object prefix for intermediate storage. +* `bucketName` - (Optional) Custom S3 Bucket name for intermediate storage. +* `encryptionMode` - (Optional) The server-side encryption mode that you want to encrypt your intermediate .csv object files copied to S3. Defaults to `sseS3`. Valid values are `sseS3` and `sseKms`. +* `serverSideEncryptionKmsKeyId` - (Required when `encryptionMode` is `sseKms`, must not be set otherwise) ARN or Id of KMS Key to use when `encryptionMode` is `sseKms`. +* `serviceAccessRoleArn` - (Optional) Amazon Resource Name (ARN) of the IAM Role with permissions to read from or write to the S3 Bucket for intermediate storage. + +### s3_settings + +~> **Deprecated:** This argument is deprecated, may not be maintained, and will be removed in a future version. Use the [`awsDmsS3Endpoint`](/docs/providers/aws/r/dms_s3_endpoint.html) resource instead. + +-> Additional information can be found in the [Using Amazon S3 as a Source for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Source.S3.html) and [Using Amazon S3 as a Target for AWS Database Migration Service documentation](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.S3.html). + +* `addColumnName` - (Optional) Whether to add column name information to the .csv output file. Default is `false`. +* `bucketFolder` - (Optional) S3 object prefix. +* `bucketName` - (Optional) S3 bucket name. +* `cannedAclForObjects` - (Optional) Predefined (canned) access control list for objects created in an S3 bucket. Valid values include `none`, `private`, `publicRead`, `publicReadWrite`, `authenticatedRead`, `awsExecRead`, `bucketOwnerRead`, and `bucketOwnerFullControl`. Default is `none`. +* `cdcInsertsAndUpdates` - (Optional) Whether to write insert and update operations to .csv or .parquet output files. Default is `false`. +* `cdcInsertsOnly` - (Optional) Whether to write insert operations to .csv or .parquet output files. Default is `false`. +* `cdcMaxBatchInterval` - (Optional) Maximum length of the interval, defined in seconds, after which to output a file to Amazon S3. Default is `60`. +* `cdcMinFileSize` - (Optional) Minimum file size condition as defined in kilobytes to output a file to Amazon S3. Default is `32000`. **NOTE:** Previously, this setting was measured in megabytes but now represents kilobytes. Update configurations accordingly. +* `cdcPath` - (Optional) Folder path of CDC files. For an S3 source, this setting is required if a task captures change data; otherwise, it's optional. If `cdcPath` is set, AWS DMS reads CDC files from this path and replicates the data changes to the target endpoint. Supported in AWS DMS versions 3.4.2 and later. +* `compressionType` - (Optional) Set to compress target files. Default is `none`. Valid values are `gzip` and `none`. +* `csvDelimiter` - (Optional) Delimiter used to separate columns in the source files. Default is `,`. +* `csvNoSupValue` - (Optional) String to use for all columns not included in the supplemental log. +* `csvNullValue` - (Optional) String to as null when writing to the target. +* `csvRowDelimiter` - (Optional) Delimiter used to separate rows in the source files. Default is `\n`. +* `dataFormat` - (Optional) Output format for the files that AWS DMS uses to create S3 objects. Valid values are `csv` and `parquet`. Default is `csv`. +* `dataPageSize` - (Optional) Size of one data page in bytes. Default is `1048576` (1 MiB). +* `datePartitionDelimiter` - (Optional) Date separating delimiter to use during folder partitioning. Valid values are `slash`, `underscore`, `dash`, and `none`. Default is `slash`. +* `datePartitionEnabled` - (Optional) Partition S3 bucket folders based on transaction commit dates. Default is `false`. +* `datePartitionSequence` - (Optional) Date format to use during folder partitioning. Use this parameter when `datePartitionEnabled` is set to true. Valid values are `yyyymmdd`, `yyyymmddhh`, `yyyymm`, `mmyyyydd`, and `ddmmyyyy`. Default is `yyyymmdd`. +* `dictPageSizeLimit` - (Optional) Maximum size in bytes of an encoded dictionary page of a column. Default is `1048576` (1 MiB). +* `enableStatistics` - (Optional) Whether to enable statistics for Parquet pages and row groups. Default is `true`. +* `encodingType` - (Optional) Type of encoding to use. Value values are `rleDictionary`, `plain`, and `plainDictionary`. Default is `rleDictionary`. +* `encryptionMode` - (Optional) Server-side encryption mode that you want to encrypt your .csv or .parquet object files copied to S3. Valid values are `sseS3` and `sseKms`. Default is `sseS3`. +* `externalTableDefinition` - (Optional) JSON document that describes how AWS DMS should interpret the data. +* `ignoreHeaderRows` - (Optional) When this value is set to `1`, DMS ignores the first row header in a .csv file. Default is `0`. +* `includeOpForFullLoad` - (Optional) Whether to enable a full load to write INSERT operations to the .csv output files only to indicate how the rows were added to the source database. Default is `false`. +* `maxFileSize` - (Optional) Maximum size (in KB) of any .csv file to be created while migrating to an S3 target during full load. Valid values are from `1` to `1048576`. Default is `1048576` (1 GB). +* `parquetTimestampInMillisecond` - (Optional) - Specifies the precision of any TIMESTAMP column values written to an S3 object file in .parquet format. Default is `false`. +* `parquetVersion` - (Optional) Version of the .parquet file format. Default is `parquet10`. Valid values are `parquet10` and `parquet20`. +* `preserveTransactions` - (Optional) Whether DMS saves the transaction order for a CDC load on the S3 target specified by `cdcPath`. Default is `false`. +* `rfc4180` - (Optional) For an S3 source, whether each leading double quotation mark has to be followed by an ending double quotation mark. Default is `true`. +* `rowGroupLength` - (Optional) Number of rows in a row group. Default is `10000`. +* `serverSideEncryptionKmsKeyId` - (Required when `encryptionMode` is `sseKms`, must not be set otherwise) ARN or Id of KMS Key to use when `encryptionMode` is `sseKms`. +* `serviceAccessRoleArn` - (Optional) ARN of the IAM Role with permissions to read from or write to the S3 Bucket. +* `timestampColumnName` - (Optional) Column to add with timestamp information to the endpoint data for an Amazon S3 target. +* `useCsvNoSupValue` - (Optional) Whether to use `csvNoSupValue` for columns not included in the supplemental log. +* `useTaskStartTimeForFullLoadTimestamp` - (Optional) When set to true, uses the task start time as the timestamp column value instead of the time data is written to target. For full load, when set to true, each row of the timestamp column contains the task start time. For CDC loads, each row of the timestamp column contains the transaction commit time. When set to false, the full load timestamp in the timestamp column increments with the time data arrives at the target. Default is `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `endpointArn` - ARN for the endpoint. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import endpoints using the `endpointId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import endpoints using the `endpointId`. For example: + +```console +% terraform import aws_dms_endpoint.test test-dms-endpoint-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dms_event_subscription.html.markdown b/website/docs/cdktf/typescript/r/dms_event_subscription.html.markdown new file mode 100644 index 00000000000..5d86d5fd0b3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dms_event_subscription.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_event_subscription" +description: |- + Provides a DMS (Data Migration Service) event subscription resource. +--- + + + +# Resource: aws_dms_event_subscription + +Provides a DMS (Data Migration Service) event subscription resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DmsEventSubscription } from "./.gen/providers/aws/dms-event-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DmsEventSubscription(this, "example", { + enabled: true, + eventCategories: ["creation", "failure"], + name: "my-favorite-event-subscription", + snsTopicArn: Token.asString(awsSnsTopicExample.arn), + sourceIds: [ + Token.asString(awsDmsReplicationTaskExample.replicationTaskId), + ], + sourceType: "replication-task", + tags: { + Name: "example", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of event subscription. +* `enabled` - (Optional, Default: true) Whether the event subscription should be enabled. +* `eventCategories` - (Optional) List of event categories to listen for, see `describeEventCategories` for a canonical list. +* `sourceType` - (Optional, Default: all events) Type of source for events. Valid values: `replicationInstance` or `replicationTask` +* `sourceIds` - (Required) Ids of sources to listen to. +* `snsTopicArn` - (Required) SNS topic arn to send events on. +* `tags` - (Optional) Map of resource tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the DMS Event Subscription. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import event subscriptions using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import event subscriptions using the `name`. For example: + +```console +% terraform import aws_dms_event_subscription.test my-awesome-event-subscription +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dms_replication_instance.html.markdown b/website/docs/cdktf/typescript/r/dms_replication_instance.html.markdown new file mode 100644 index 00000000000..4a409c3f665 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dms_replication_instance.html.markdown @@ -0,0 +1,196 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_instance" +description: |- + Provides a DMS (Data Migration Service) replication instance resource. +--- + + + +# Resource: aws_dms_replication_instance + +Provides a DMS (Data Migration Service) replication instance resource. DMS replication instances can be created, updated, deleted, and imported. + +## Example Usage + +Create required roles and then create a DMS instance, setting the depends_on to the required role policy attachments. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DmsReplicationInstance } from "./.gen/providers/aws/dms-replication-instance"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const dmsAssumeRole = new DataAwsIamPolicyDocument( + this, + "dms_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["dms.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const dmsAccessForEndpoint = new IamRole(this, "dms-access-for-endpoint", { + assumeRolePolicy: Token.asString(dmsAssumeRole.json), + name: "dms-access-for-endpoint", + }); + const dmsCloudwatchLogsRole = new IamRole( + this, + "dms-cloudwatch-logs-role", + { + assumeRolePolicy: Token.asString(dmsAssumeRole.json), + name: "dms-cloudwatch-logs-role", + } + ); + const dmsVpcRole = new IamRole(this, "dms-vpc-role", { + assumeRolePolicy: Token.asString(dmsAssumeRole.json), + name: "dms-vpc-role", + }); + const dmsAccessForEndpointAmazonDmsRedshiftS3Role = + new IamRolePolicyAttachment( + this, + "dms-access-for-endpoint-AmazonDMSRedshiftS3Role", + { + policyArn: + "arn:aws:iam::aws:policy/service-role/AmazonDMSRedshiftS3Role", + role: dmsAccessForEndpoint.name, + } + ); + const dmsCloudwatchLogsRoleAmazonDmsCloudWatchLogsRole = + new IamRolePolicyAttachment( + this, + "dms-cloudwatch-logs-role-AmazonDMSCloudWatchLogsRole", + { + policyArn: + "arn:aws:iam::aws:policy/service-role/AmazonDMSCloudWatchLogsRole", + role: dmsCloudwatchLogsRole.name, + } + ); + const dmsVpcRoleAmazonDmsvpcManagementRole = new IamRolePolicyAttachment( + this, + "dms-vpc-role-AmazonDMSVPCManagementRole", + { + policyArn: + "arn:aws:iam::aws:policy/service-role/AmazonDMSVPCManagementRole", + role: dmsVpcRole.name, + } + ); + new DmsReplicationInstance(this, "test", { + allocatedStorage: 20, + applyImmediately: true, + autoMinorVersionUpgrade: true, + availabilityZone: "us-west-2c", + dependsOn: [ + dmsAccessForEndpointAmazonDmsRedshiftS3Role, + dmsCloudwatchLogsRoleAmazonDmsCloudWatchLogsRole, + dmsVpcRoleAmazonDmsvpcManagementRole, + ], + engineVersion: "3.1.4", + kmsKeyArn: + "arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012", + multiAz: false, + preferredMaintenanceWindow: "sun:10:30-sun:14:30", + publiclyAccessible: true, + replicationInstanceClass: "dms.t2.micro", + replicationInstanceId: "test-dms-replication-instance-tf", + replicationSubnetGroupId: testDmsReplicationSubnetGroupTf.id, + tags: { + Name: "test", + }, + vpcSecurityGroupIds: ["sg-12345678"], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `allocatedStorage` - (Optional, Default: 50, Min: 5, Max: 6144) The amount of storage (in gigabytes) to be initially allocated for the replication instance. +* `allowMajorVersionUpgrade` - (Optional, Default: false) Indicates that major version upgrades are allowed. +* `applyImmediately` - (Optional, Default: false) Indicates whether the changes should be applied immediately or during the next maintenance window. Only used when updating an existing resource. +* `autoMinorVersionUpgrade` - (Optional, Default: false) Indicates that minor engine upgrades will be applied automatically to the replication instance during the maintenance window. +* `availabilityZone` - (Optional) The EC2 Availability Zone that the replication instance will be created in. +* `engineVersion` - (Optional) The engine version number of the replication instance. +* `kmsKeyArn` - (Optional) The Amazon Resource Name (ARN) for the KMS key that will be used to encrypt the connection parameters. If you do not specify a value for `kmsKeyArn`, then AWS DMS will use your default encryption key. AWS KMS creates the default encryption key for your AWS account. Your AWS account has a different default encryption key for each AWS region. +* `multiAz` - (Optional) Specifies if the replication instance is a multi-az deployment. You cannot set the `availabilityZone` parameter if the `multiAz` parameter is set to `true`. +* `preferredMaintenanceWindow` - (Optional) The weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC). + + - Default: A 30-minute window selected at random from an 8-hour block of time per region, occurring on a random day of the week. + - Format: `ddd:hh24:miDdd:hh24:mi` + - Valid Days: `mon, tue, wed, thu, fri, sat, sun` + - Constraints: Minimum 30-minute window. + +* `publiclyAccessible` - (Optional, Default: false) Specifies the accessibility options for the replication instance. A value of true represents an instance with a public IP address. A value of false represents an instance with a private IP address. +* `replicationInstanceClass` - (Required) The compute and memory capacity of the replication instance as specified by the replication instance class. See [AWS DMS User Guide](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_ReplicationInstance.Types.html) for available instance sizes and advice on which one to choose. +* `replicationInstanceId` - (Required) The replication instance identifier. This parameter is stored as a lowercase string. + + - Must contain from 1 to 63 alphanumeric characters or hyphens. + - First character must be a letter. + - Cannot end with a hyphen + - Cannot contain two consecutive hyphens. + +* `replicationSubnetGroupId` - (Optional) A subnet group to associate with the replication instance. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcSecurityGroupIds` - (Optional) A list of VPC security group IDs to be used with the replication instance. The VPC security groups must work with the VPC containing the replication instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `replicationInstanceArn` - The Amazon Resource Name (ARN) of the replication instance. +* `replicationInstancePrivateIps` - A list of the private IP addresses of the replication instance. +* `replicationInstancePublicIps` - A list of the public IP addresses of the replication instance. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40M`) +- `update` - (Default `30M`) +- `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import replication instances using the `replicationInstanceId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import replication instances using the `replicationInstanceId`. For example: + +```console +% terraform import aws_dms_replication_instance.test test-dms-replication-instance-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dms_replication_subnet_group.html.markdown b/website/docs/cdktf/typescript/r/dms_replication_subnet_group.html.markdown new file mode 100644 index 00000000000..ee77f976d46 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dms_replication_subnet_group.html.markdown @@ -0,0 +1,153 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_subnet_group" +description: |- + Provides a DMS (Data Migration Service) subnet group resource. +--- + + + +# Resource: aws_dms_replication_subnet_group + +Provides a DMS (Data Migration Service) replication subnet group resource. DMS replication subnet groups can be created, updated, deleted, and imported. + +~> **Note:** AWS requires a special IAM role called `dmsVpcRole` when using this resource. See the example below to create it as part of your configuration. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DmsReplicationSubnetGroup } from "./.gen/providers/aws/dms-replication-subnet-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DmsReplicationSubnetGroup(this, "example", { + replicationSubnetGroupDescription: "Example replication subnet group", + replicationSubnetGroupId: "example-dms-replication-subnet-group-tf", + subnetIds: ["subnet-12345678", "subnet-12345679"], + tags: { + Name: "example", + }, + }); + } +} + +``` + +### Creating special IAM role + +If your account does not already include the `dmsVpcRole` IAM role, you will need to create it to allow DMS to manage subnets in the VPC. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DmsReplicationSubnetGroup } from "./.gen/providers/aws/dms-replication-subnet-group"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const dmsVpcRole = new IamRole(this, "dms-vpc-role", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "sts:AssumeRole", + Effect: "Allow", + Principal: { + Service: "dms.amazonaws.com", + }, + }, + ], + Version: "2012-10-17", + }) + ), + description: "Allows DMS to manage VPC", + name: "dms-vpc-role", + }); + const example = new IamRolePolicyAttachment(this, "example", { + policyArn: + "arn:aws:iam::aws:policy/service-role/AmazonDMSVPCManagementRole", + role: dmsVpcRole.name, + }); + const awsDmsReplicationSubnetGroupExample = new DmsReplicationSubnetGroup( + this, + "example_2", + { + dependsOn: [example], + replicationSubnetGroupDescription: "Example", + replicationSubnetGroupId: "example-id", + subnetIds: ["subnet-12345678", "subnet-12345679"], + tags: { + Name: "example-id", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDmsReplicationSubnetGroupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `replicationSubnetGroupDescription` - (Required) Description for the subnet group. +* `replicationSubnetGroupId` - (Required) Name for the replication subnet group. This value is stored as a lowercase string. It must contain no more than 255 alphanumeric characters, periods, spaces, underscores, or hyphens and cannot be `default`. +* `subnetIds` - (Required) List of at least 2 EC2 subnet IDs for the subnet group. The subnets must cover at least 2 availability zones. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - The ID of the VPC the subnet group is in. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `15M`) +- `update` - (Default `15M`) +- `delete` - (Default `15M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import replication subnet groups using the `replicationSubnetGroupId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import replication subnet groups using the `replicationSubnetGroupId`. For example: + +```console +% terraform import aws_dms_replication_subnet_group.test test-dms-replication-subnet-group-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dms_replication_task.html.markdown b/website/docs/cdktf/typescript/r/dms_replication_task.html.markdown new file mode 100644 index 00000000000..86d26420b3e --- /dev/null +++ b/website/docs/cdktf/typescript/r/dms_replication_task.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_replication_task" +description: |- + Provides a DMS (Data Migration Service) replication task resource. +--- + + + +# Resource: aws_dms_replication_task + +Provides a DMS (Data Migration Service) replication task resource. DMS replication tasks can be created, updated, deleted, and imported. + +~> **NOTE:** Changing most arguments will stop the task if it is running. You can set `startReplicationTask` to resume the task afterwards. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DmsReplicationTask } from "./.gen/providers/aws/dms-replication-task"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DmsReplicationTask(this, "test", { + cdcStartTime: Token.asString(1484346880), + migrationType: "full-load", + replicationInstanceArn: + testDmsReplicationInstanceTf.replicationInstanceArn, + replicationTaskId: "test-dms-replication-task-tf", + replicationTaskSettings: "...", + sourceEndpointArn: testDmsSourceEndpointTf.endpointArn, + tableMappings: + '{\\"rules\\":[{\\"rule-type\\":\\"selection\\",\\"rule-id\\":\\"1\\",\\"rule-name\\":\\"1\\",\\"object-locator\\":{\\"schema-name\\":\\"%\\",\\"table-name\\":\\"%\\"},\\"rule-action\\":\\"include\\"}]}', + tags: { + Name: "test", + }, + targetEndpointArn: testDmsTargetEndpointTf.endpointArn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cdcStartPosition` - (Optional, Conflicts with `cdcStartTime`) Indicates when you want a change data capture (CDC) operation to start. The value can be in date, checkpoint, or LSN/SCN format depending on the source engine. For more information, see [Determining a CDC native start point](https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Task.CDC.html#CHAP_Task.CDC.StartPoint.Native). +* `cdcStartTime` - (Optional, Conflicts with `cdcStartPosition`) The Unix timestamp integer for the start of the Change Data Capture (CDC) operation. +* `migrationType` - (Required) The migration type. Can be one of `full-load | cdc | full-load-and-cdc`. +* `replicationInstanceArn` - (Required) The Amazon Resource Name (ARN) of the replication instance. +* `replicationTaskId` - (Required) The replication task identifier. + + - Must contain from 1 to 255 alphanumeric characters or hyphens. + - First character must be a letter. + - Cannot end with a hyphen. + - Cannot contain two consecutive hyphens. + +* `replicationTaskSettings` - (Optional) An escaped JSON string that contains the task settings. For a complete list of task settings, see [Task Settings for AWS Database Migration Service Tasks](http://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.CustomizingTasks.TaskSettings.html). +* `sourceEndpointArn` - (Required) The Amazon Resource Name (ARN) string that uniquely identifies the source endpoint. +* `startReplicationTask` - (Optional) Whether to run or stop the replication task. +* `tableMappings` - (Required) An escaped JSON string that contains the table mappings. For information on table mapping see [Using Table Mapping with an AWS Database Migration Service Task to Select and Filter Data](http://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.CustomizingTasks.TableMapping.html) +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `targetEndpointArn` - (Required) The Amazon Resource Name (ARN) string that uniquely identifies the target endpoint. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `replicationTaskArn` - The Amazon Resource Name (ARN) for the replication task. +* `status` - Replication Task status. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import replication tasks using the `replicationTaskId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import replication tasks using the `replicationTaskId`. For example: + +```console +% terraform import aws_dms_replication_task.test test-dms-replication-task-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dms_s3_endpoint.html.markdown b/website/docs/cdktf/typescript/r/dms_s3_endpoint.html.markdown new file mode 100644 index 00000000000..6f2d4496733 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dms_s3_endpoint.html.markdown @@ -0,0 +1,213 @@ +--- +subcategory: "DMS (Database Migration)" +layout: "aws" +page_title: "AWS: aws_dms_s3_endpoint" +description: |- + Provides a DMS (Data Migration Service) S3 endpoint resource. +--- + + + +# Resource: aws_dms_s3_endpoint + +Provides a DMS (Data Migration Service) S3 endpoint resource. DMS S3 endpoints can be created, updated, deleted, and imported. + +~> **Note:** AWS is deprecating `extraConnectionAttributes`, such as used with `awsDmsEndpoint`. This resource is an alternative to `awsDmsEndpoint` and does not use `extraConnectionAttributes`. (AWS currently includes `extraConnectionAttributes` in the raw responses to the AWS Provider requests and so they may be visible in Terraform logs.) + +~> **Note:** Some of this resource's arguments have default values that come from the AWS Provider. Other default values are provided by AWS and subject to change without notice. When relying on AWS defaults, the Terraform state will often have a zero value. For example, the AWS Provider does not provide a default for `cdcMaxBatchInterval` but the AWS default is `60` (seconds). However, the Terraform state will show `0` since this is the value return by AWS when no value is present. Below, we aim to flag the defaults that come from AWS (_e.g._, "AWS default..."). + +## Example Usage + +### Minimal Configuration + +This is the minimal configuration for an `awsDmsS3Endpoint`. This endpoint will rely on the AWS Provider and AWS defaults. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DmsS3Endpoint } from "./.gen/providers/aws/dms-s3-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DmsS3Endpoint(this, "example", { + bucketName: "beckut_name", + dependsOn: [awsIamRolePolicyExample], + endpointId: "donnedtipi", + endpointType: "target", + serviceAccessRoleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +### Complete Configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DmsS3Endpoint } from "./.gen/providers/aws/dms-s3-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DmsS3Endpoint(this, "example", { + addColumnName: true, + addTrailingPaddingCharacter: false, + bucketFolder: "folder", + bucketName: "bucket_name", + cannedAclForObjects: "private", + cdcInsertsAndUpdates: true, + cdcInsertsOnly: false, + cdcMaxBatchInterval: 100, + cdcMinFileSize: 16, + cdcPath: "cdc/path", + compressionType: "GZIP", + csvDelimiter: ";", + csvNoSupValue: "x", + csvNullValue: "?", + csvRowDelimiter: "\\r\\n", + dataFormat: "parquet", + dataPageSize: 1100000, + datePartitionDelimiter: "UNDERSCORE", + datePartitionEnabled: true, + datePartitionSequence: "yyyymmddhh", + datePartitionTimezone: "Asia/Seoul", + dependsOn: [awsIamRolePolicyExample], + dictPageSizeLimit: 1000000, + enableStatistics: false, + encodingType: "plain", + encryptionMode: "SSE_S3", + endpointId: "donnedtipi", + endpointType: "target", + expectedBucketOwner: Token.asString(current.accountId), + externalTableDefinition: "etd", + ignoreHeaderRows: 1, + includeOpForFullLoad: true, + maxFileSize: 1000000, + parquetTimestampInMillisecond: true, + parquetVersion: "parquet-2-0", + preserveTransactions: false, + rfc4180: false, + rowGroupLength: 11000, + serverSideEncryptionKmsKeyId: Token.asString(awsKmsKeyExample.arn), + serviceAccessRoleArn: Token.asString(awsIamRoleExample.arn), + sslMode: "none", + tags: { + Name: "donnedtipi", + Remove: "to-remove", + Update: "to-update", + }, + timestampColumnName: "tx_commit_time", + useCsvNoSupValue: false, + useTaskStartTimeForFullLoadTimestamp: true, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `bucketName` - (Required) S3 bucket name. +* `cdcPath` - (Required for CDC; otherwise, Optional) Folder path of CDC files. If `cdcPath` is set, AWS DMS reads CDC files from this path and replicates the data changes to the target endpoint. Supported in AWS DMS versions 3.4.2 and later. +* `endpointId` - (Required) Database endpoint identifier. Identifiers must contain from 1 to 255 alphanumeric characters or hyphens, begin with a letter, contain only ASCII letters, digits, and hyphens, not end with a hyphen, and not contain two consecutive hyphens. +* `endpointType` - (Required) Type of endpoint. Valid values are `source`, `target`. +* `externalTableDefinition` - (Required for `source` endpoints; otherwise, Optional) JSON document that describes how AWS DMS should interpret the data. +* `serviceAccessRoleArn` - (Required) ARN of the IAM role with permissions to the S3 Bucket. + +The following arguments are optional: + +* `addColumnName` - (Optional) Whether to add column name information to the .csv output file. Default is `false`. +* `addTrailingPaddingCharacter` - (Optional) Whether to add padding. Default is `false`. (Ignored for source endpoints.) +* `bucketFolder` - (Optional) S3 object prefix. +* `cannedAclForObjects` - (Optional) Predefined (canned) access control list for objects created in an S3 bucket. Valid values include `none`, `private`, `publicRead`, `publicReadWrite`, `authenticatedRead`, `awsExecRead`, `bucketOwnerRead`, and `bucketOwnerFullControl`. Default is `none`. +* `cdcInsertsAndUpdates` - (Optional) Whether to write insert and update operations to .csv or .parquet output files. Default is `false`. +* `cdcInsertsOnly` - (Optional) Whether to write insert operations to .csv or .parquet output files. Default is `false`. +* `cdcMaxBatchInterval` - (Optional) Maximum length of the interval, defined in seconds, after which to output a file to Amazon S3. (AWS default is `60`.) +* `cdcMinFileSize` - (Optional) Minimum file size condition as defined in kilobytes to output a file to Amazon S3. (AWS default is 32000 KB.) +* `certificateArn` - (Optional, Default: empty string) ARN for the certificate. +* `compressionType` - (Optional) Set to compress target files. Valid values are `gzip` and `none`. Default is `none`. (Ignored for source endpoints.) +* `csvDelimiter` - (Optional) Delimiter used to separate columns in the source files. Default is `,`. +* `csvNoSupValue` - (Optional) Only applies if output files for a CDC load are written in .csv format. If `useCsvNoSupValue` is set to `true`, string to use for all columns not included in the supplemental log. If you do not specify a string value, DMS uses the null value for these columns regardless of `useCsvNoSupValue`. (Ignored for source endpoints.) +* `csvNullValue` - (Optional) String to as null when writing to the target. (AWS default is `null`.) +* `csvRowDelimiter` - (Optional) Delimiter used to separate rows in the source files. Default is newline (_i.e._, `\n`). +* `dataFormat` - (Optional) Output format for the files that AWS DMS uses to create S3 objects. Valid values are `csv` and `parquet`. (Ignored for source endpoints -- only `csv` is valid.) +* `dataPageSize` - (Optional) Size of one data page in bytes. (AWS default is 1 MiB, _i.e._, `1048576`.) +* `datePartitionDelimiter` - (Optional) Date separating delimiter to use during folder partitioning. Valid values are `slash`, `underscore`, `dash`, and `none`. (AWS default is `slash`.) (Ignored for source endpoints.) +* `datePartitionEnabled` - (Optional) Partition S3 bucket folders based on transaction commit dates. Default is `false`. (Ignored for source endpoints.) +* `datePartitionSequence` - (Optional) Date format to use during folder partitioning. Use this parameter when `datePartitionEnabled` is set to true. Valid values are `yyyymmdd`, `yyyymmddhh`, `yyyymm`, `mmyyyydd`, and `ddmmyyyy`. (AWS default is `yyyymmdd`.) (Ignored for source endpoints.) +* `datePartitionTimezone` - (Optional) Convert the current UTC time to a timezone. The conversion occurs when a date partition folder is created and a CDC filename is generated. The timezone format is Area/Location (_e.g._, `europe/paris`). Use this when `datePartitionEnabled` is `true`. (Ignored for source endpoints.) +* `detachTargetOnLobLookupFailureParquet` - (Optional) Undocumented argument for use as directed by AWS Support. +* `dictPageSizeLimit` - (Optional) Maximum size in bytes of an encoded dictionary page of a column. (AWS default is 1 MiB, _i.e._, `1048576`.) +* `enableStatistics` - (Optional) Whether to enable statistics for Parquet pages and row groups. Default is `true`. +* `encodingType` - (Optional) Type of encoding to use. Value values are `rleDictionary`, `plain`, and `plainDictionary`. (AWS default is `rleDictionary`.) +* `encryptionMode` - (Optional) Server-side encryption mode that you want to encrypt your .csv or .parquet object files copied to S3. Valid values are `sseS3` and `sseKms`. (AWS default is `sseS3`.) (Ignored for source endpoints -- only `sseS3` is valid.) +* `expectedBucketOwner` - (Optional) Bucket owner to prevent sniping. Value is an AWS account ID. +* `ignoreHeaderRows` - (Optional, Force New) When this value is set to `1`, DMS ignores the first row header in a .csv file. (AWS default is `0`.) +* `includeOpForFullLoad` - (Optional) Whether to enable a full load to write INSERT operations to the .csv output files only to indicate how the rows were added to the source database. Default is `false`. +* `kmsKeyArn` - (Optional) ARN for the KMS key that will be used to encrypt the connection parameters. If you do not specify a value for `kmsKeyArn`, then AWS DMS will use your default encryption key. AWS KMS creates the default encryption key for your AWS account. Your AWS account has a different default encryption key for each AWS region. +* `maxFileSize` - (Optional) Maximum size (in KB) of any .csv file to be created while migrating to an S3 target during full load. Valid values are from `1` to `1048576`. (AWS default is 1 GB, _i.e._, `1048576`.) +* `parquetTimestampInMillisecond` - (Optional) - Specifies the precision of any TIMESTAMP column values written to an S3 object file in .parquet format. Default is `false`. (Ignored for source endpoints.) +* `parquetVersion` - (Optional) Version of the .parquet file format. Valid values are `parquet10` and `parquet20`. (AWS default is `parquet10`.) (Ignored for source endpoints.) +* `preserveTransactions` - (Optional) Whether DMS saves the transaction order for a CDC load on the S3 target specified by `cdcPath`. Default is `false`. (Ignored for source endpoints.) +* `rfc4180` - (Optional) For an S3 source, whether each leading double quotation mark has to be followed by an ending double quotation mark. Default is `true`. +* `rowGroupLength` - (Optional) Number of rows in a row group. (AWS default is `10000`.) +* `serverSideEncryptionKmsKeyId` - (Optional) When `encryptionMode` is `sseKms`, ARN for the AWS KMS key. (Ignored for source endpoints -- only `sseS3` `encryptionMode` is valid.) +* `sslMode` - (Optional) SSL mode to use for the connection. Valid values are `none`, `require`, `verifyCa`, `verifyFull`. (AWS default is `none`.) +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `timestampColumnName` - (Optional) Column to add with timestamp information to the endpoint data for an Amazon S3 target. +* `useCsvNoSupValue` - (Optional) Whether to use `csvNoSupValue` for columns not included in the supplemental log. (Ignored for source endpoints.) +* `useTaskStartTimeForFullLoadTimestamp` - (Optional) When set to `true`, uses the task start time as the timestamp column value instead of the time data is written to target. For full load, when set to `true`, each row of the timestamp column contains the task start time. For CDC loads, each row of the timestamp column contains the transaction commit time.When set to false, the full load timestamp in the timestamp column increments with the time data arrives at the target. Default is `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `endpointArn` - ARN for the endpoint. +* `engineDisplayName` - Expanded name for the engine name. +* `externalId` - Can be used for cross-account validation. Use it in another account with `awsDmsS3Endpoint` to create the endpoint cross-account. +* `status` - Status of the endpoint. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import endpoints using the `endpointId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import endpoints using the `endpointId`. For example: + +```console +% terraform import aws_dms_s3_endpoint.example example-dms-endpoint-tf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/docdb_cluster.html.markdown b/website/docs/cdktf/typescript/r/docdb_cluster.html.markdown new file mode 100644 index 00000000000..6d8294994de --- /dev/null +++ b/website/docs/cdktf/typescript/r/docdb_cluster.html.markdown @@ -0,0 +1,139 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_cluster" +description: |- + Manages a DocumentDB Aurora Cluster +--- + + + +# Resource: aws_docdb_cluster + +Manages a DocumentDB Cluster. + +Changes to a DocumentDB Cluster can occur when you manually change a +parameter, such as `port`, and are reflected in the next maintenance +window. Because of this, Terraform may report a difference in its planning +phase because a modification has not yet taken place. You can use the +`applyImmediately` flag to instruct the service to apply the change immediately +(see documentation below). + +~> **Note:** using `applyImmediately` can result in a brief downtime as the server reboots. +~> **Note:** All arguments including the username and password will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DocdbCluster } from "./.gen/providers/aws/docdb-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DocdbCluster(this, "docdb", { + backupRetentionPeriod: 5, + clusterIdentifier: "my-docdb-cluster", + engine: "docdb", + masterPassword: "mustbeeightchars", + masterUsername: "foo", + preferredBackupWindow: "07:00-09:00", + skipFinalSnapshot: true, + }); + } +} + +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation](https://docs.aws.amazon.com/cli/latest/reference/docdb/create-db-cluster.html). + +This argument supports the following arguments: + +* `applyImmediately` - (Optional) Specifies whether any cluster modifications + are applied immediately, or during the next maintenance window. Default is + `false`. +* `availabilityZones` - (Optional) A list of EC2 Availability Zones that + instances in the DB cluster can be created in. +* `backupRetentionPeriod` - (Optional) The days to retain backups for. Default `1` +* `clusterIdentifierPrefix` - (Optional, Forces new resource) Creates a unique cluster identifier beginning with the specified prefix. Conflicts with `clusterIdentifier`. +* `clusterIdentifier` - (Optional, Forces new resources) The cluster identifier. If omitted, Terraform will assign a random, unique identifier. +* `dbSubnetGroupName` - (Optional) A DB subnet group to associate with this DB instance. +* `dbClusterParameterGroupName` - (Optional) A cluster parameter group to associate with the cluster. +* `deletionProtection` - (Optional) A value that indicates whether the DB cluster has deletion protection enabled. The database can't be deleted when deletion protection is enabled. By default, deletion protection is disabled. +* `enabledCloudwatchLogsExports` - (Optional) List of log types to export to cloudwatch. If omitted, no logs will be exported. + The following log types are supported: `audit`, `profiler`. +* `engineVersion` - (Optional) The database engine version. Updating this argument results in an outage. +* `engine` - (Optional) The name of the database engine to be used for this DB cluster. Defaults to `docdb`. Valid Values: `docdb` +* `finalSnapshotIdentifier` - (Optional) The name of your final DB snapshot + when this DB cluster is deleted. If omitted, no final snapshot will be + made. +* `globalClusterIdentifier` - (Optional) The global cluster identifier specified on [`awsDocdbGlobalCluster`](/docs/providers/aws/r/docdb_global_cluster.html). +* `kmsKeyId` - (Optional) The ARN for the KMS encryption key. When specifying `kmsKeyId`, `storageEncrypted` needs to be set to true. +* `masterPassword` - (Required unless a `snapshotIdentifier` or unless a `globalClusterIdentifier` is provided when the cluster is the "secondary" cluster of a global database) Password for the master DB user. Note that this may + show up in logs, and it will be stored in the state file. Please refer to the DocumentDB Naming Constraints. +* `masterUsername` - (Required unless a `snapshotIdentifier` or unless a `globalClusterIdentifier` is provided when the cluster is the "secondary" cluster of a global database) Username for the master DB user. +* `port` - (Optional) The port on which the DB accepts connections +* `preferredBackupWindow` - (Optional) The daily time range during which automated backups are created if automated backups are enabled using the BackupRetentionPeriod parameter.Time in UTC +Default: A 30-minute window selected at random from an 8-hour block of time per regionE.g., 04:00-09:00 +* `preferredMaintenanceWindow` - (Optional) The weekly time range during which system maintenance can occur, in (UTC) e.g., wed:04:00-wed:04:30 +* `skipFinalSnapshot` - (Optional) Determines whether a final DB snapshot is created before the DB cluster is deleted. If true is specified, no DB snapshot is created. If false is specified, a DB snapshot is created before the DB cluster is deleted, using the value from `finalSnapshotIdentifier`. Default is `false`. +* `snapshotIdentifier` - (Optional) Specifies whether or not to create this cluster from a snapshot. You can use either the name or ARN when specifying a DB cluster snapshot, or the ARN when specifying a DB snapshot. Automated snapshots **should not** be used for this attribute, unless from a different cluster. Automated snapshots are deleted as part of cluster destruction when the resource is replaced. +* `storageEncrypted` - (Optional) Specifies whether the DB cluster is encrypted. The default is `false`. +* `tags` - (Optional) A map of tags to assign to the DB cluster. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcSecurityGroupIds` - (Optional) List of VPC security groups to associate + with the Cluster + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster +* `clusterMembers` – List of DocumentDB Instances that are a part of this cluster +* `clusterResourceId` - The DocumentDB Cluster Resource ID +* `endpoint` - The DNS address of the DocumentDB instance +* `hostedZoneId` - The Route53 Hosted Zone ID of the endpoint +* `id` - The DocumentDB Cluster Identifier +* `readerEndpoint` - A read-only endpoint for the DocumentDB cluster, automatically load-balanced across replicas +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120M`) +- `update` - (Default `120M`) +- `delete` - (Default `120M`) +any cleanup task during the destroying process. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DocumentDB Clusters using the `clusterIdentifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DocumentDB Clusters using the `clusterIdentifier`. For example: + +```console +% terraform import aws_docdb_cluster.docdb_cluster docdb-prod-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/docdb_cluster_instance.html.markdown b/website/docs/cdktf/typescript/r/docdb_cluster_instance.html.markdown new file mode 100644 index 00000000000..952062caa0d --- /dev/null +++ b/website/docs/cdktf/typescript/r/docdb_cluster_instance.html.markdown @@ -0,0 +1,151 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_cluster_instance" +description: |- + Provides an DocumentDB Cluster Resource Instance +--- + + + +# Resource: aws_docdb_cluster_instance + +Provides an DocumentDB Cluster Resource Instance. A Cluster Instance Resource defines +attributes that are specific to a single instance in a [DocumentDB Cluster][1]. + +You do not designate a primary and subsequent replicas. Instead, you simply add DocumentDB +Instances and DocumentDB manages the replication. You can use the [count][3] +meta-parameter to make multiple instances and join them all to the same DocumentDB +Cluster, or you may specify different Cluster Instance resources with various +`instanceClass` sizes. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformCount, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DocdbCluster } from "./.gen/providers/aws/docdb-cluster"; +import { DocdbClusterInstance } from "./.gen/providers/aws/docdb-cluster-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new DocdbCluster(this, "default", { + availabilityZones: ["us-west-2a", "us-west-2b", "us-west-2c"], + clusterIdentifier: "docdb-cluster-demo", + masterPassword: "barbut8chars", + masterUsername: "foo", + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const clusterInstancesCount = TerraformCount.of(Token.asNumber("2")); + new DocdbClusterInstance(this, "cluster_instances", { + clusterIdentifier: defaultVar.id, + identifier: "docdb-cluster-demo-${" + clusterInstancesCount.index + "}", + instanceClass: "db.r5.large", + count: clusterInstancesCount, + }); + } +} + +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation](https://docs.aws.amazon.com/cli/latest/reference/docdb/create-db-instance.html). + +This argument supports the following arguments: + +* `applyImmediately` - (Optional) Specifies whether any database modifications + are applied immediately, or during the next maintenance window. Default is`false`. +* `autoMinorVersionUpgrade` - (Optional) This parameter does not apply to Amazon DocumentDB. Amazon DocumentDB does not perform minor version upgrades regardless of the value set (see [docs](https://docs.aws.amazon.com/documentdb/latest/developerguide/API_DBInstance.html)). Default `true`. +* `availabilityZone` - (Optional, Computed) The EC2 Availability Zone that the DB instance is created in. See [docs](https://docs.aws.amazon.com/documentdb/latest/developerguide/API_CreateDBInstance.html) about the details. +* `clusterIdentifier` - (Required) The identifier of the [`awsDocdbCluster`](/docs/providers/aws/r/docdb_cluster.html) in which to launch this instance. +* `enablePerformanceInsights` - (Optional) A value that indicates whether to enable Performance Insights for the DB Instance. Default `false`. See [docs] (https://docs.aws.amazon.com/documentdb/latest/developerguide/performance-insights.html) about the details. +* `engine` - (Optional) The name of the database engine to be used for the DocumentDB instance. Defaults to `docdb`. Valid Values: `docdb`. +* `identifier` - (Optional, Forces new resource) The identifier for the DocumentDB instance, if omitted, Terraform will assign a random, unique identifier. +* `identifierPrefix` - (Optional, Forces new resource) Creates a unique identifier beginning with the specified prefix. Conflicts with `identifier`. +* `instanceClass` - (Required) The instance class to use. For details on CPU and memory, see [Scaling for DocumentDB Instances][2]. + DocumentDB currently supports the below instance classes. + Please see [AWS Documentation][4] for complete details. + - db.r5.large + - db.r5.xlarge + - db.r5.2xlarge + - db.r5.4xlarge + - db.r5.12xlarge + - db.r5.24xlarge + - db.r4.large + - db.r4.xlarge + - db.r4.2xlarge + - db.r4.4xlarge + - db.r4.8xlarge + - db.r4.16xlarge + - db.t3.medium +* `performanceInsightsKmsKeyId` - (Optional) The KMS key identifier is the key ARN, key ID, alias ARN, or alias name for the KMS key. If you do not specify a value for PerformanceInsightsKMSKeyId, then Amazon DocumentDB uses your default KMS key. +* `preferredMaintenanceWindow` - (Optional) The window to perform maintenance in. + Syntax: "ddd:hh24:mi-ddd:hh24:mi". Eg: "Mon:00:00-Mon:03:00". +* `promotionTier` - (Optional) Default 0. Failover Priority setting on instance level. The reader who has lower tier has higher priority to get promoter to writer. +* `tags` - (Optional) A map of tags to assign to the instance. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster instance +* `dbSubnetGroupName` - The DB subnet group to associate with this DB instance. +* `dbiResourceId` - The region-unique, immutable identifier for the DB instance. +* `endpoint` - The DNS address for this instance. May not be writable +* `engineVersion` - The database engine version +* `kmsKeyId` - The ARN for the KMS encryption key if one is set to the cluster. +* `port` - The database port +* `preferredBackupWindow` - The daily time range during which automated backups are created if automated backups are enabled. +* `storageEncrypted` - Specifies whether the DB cluster is encrypted. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `writer` – Boolean indicating if this instance is writable. `false` indicates this instance is a read replica. +* `caCertIdentifier` - (Optional) The identifier of the CA certificate for the DB instance. + +[1]: /docs/providers/aws/r/docdb_cluster.html +[2]: https://docs.aws.amazon.com/documentdb/latest/developerguide/db-cluster-manage-performance.html#db-cluster-manage-scaling-instance +[3]: https://www.terraform.io/docs/configuration/meta-arguments/count.html +[4]: https://docs.aws.amazon.com/documentdb/latest/developerguide/db-instance-classes.html#db-instance-class-specs + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `90M`) +restoring from Snapshots +- `update` - (Default `90M`) +- `delete` - (Default `90M`) +the time required to take snapshots + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DocumentDB Cluster Instances using the `identifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DocumentDB Cluster Instances using the `identifier`. For example: + +```console +% terraform import aws_docdb_cluster_instance.prod_instance_1 aurora-cluster-instance-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/docdb_cluster_parameter_group.html.markdown b/website/docs/cdktf/typescript/r/docdb_cluster_parameter_group.html.markdown new file mode 100644 index 00000000000..d1432c52862 --- /dev/null +++ b/website/docs/cdktf/typescript/r/docdb_cluster_parameter_group.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_cluster_parameter_group" +description: |- + Manages a DocumentDB Cluster Parameter Group +--- + + + +# Resource: aws_docdb_cluster_parameter_group + +Manages a DocumentDB Cluster Parameter Group + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DocdbClusterParameterGroup } from "./.gen/providers/aws/docdb-cluster-parameter-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DocdbClusterParameterGroup(this, "example", { + description: "docdb cluster parameter group", + family: "docdb3.6", + name: "example", + parameter: [ + { + name: "tls", + value: "enabled", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the DocumentDB cluster parameter group. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `family` - (Required, Forces new resource) The family of the DocumentDB cluster parameter group. +* `description` - (Optional, Forces new resource) The description of the DocumentDB cluster parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of DocumentDB parameters to apply. Setting parameters to system default values may show a difference on imported resources. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +~> **NOTE:** These arguments take a `string` representation of their values. + +* `name` - (Required) The name of the DocumentDB parameter. +* `value` - (Required) The value of the DocumentDB parameter. +* `applyMethod` - (Optional) Valid values are `immediate` and `pendingReboot`. Defaults to `pendingReboot`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The DocumentDB cluster parameter group name. +* `arn` - The ARN of the DocumentDB cluster parameter group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DocumentDB Cluster Parameter Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DocumentDB Cluster Parameter Groups using the `name`. For example: + +```console +% terraform import aws_docdb_cluster_parameter_group.cluster_pg production-pg-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/docdb_cluster_snapshot.html.markdown b/website/docs/cdktf/typescript/r/docdb_cluster_snapshot.html.markdown new file mode 100644 index 00000000000..b68f6fcb736 --- /dev/null +++ b/website/docs/cdktf/typescript/r/docdb_cluster_snapshot.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_cluster_snapshot" +description: |- + Manages a DocumentDB database cluster snapshot. +--- + + + +# Resource: aws_docdb_cluster_snapshot + +Manages a DocumentDB database cluster snapshot for DocumentDB clusters. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DocdbClusterSnapshot } from "./.gen/providers/aws/docdb-cluster-snapshot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DocdbClusterSnapshot(this, "example", { + dbClusterIdentifier: Token.asString(awsDocdbClusterExample.id), + dbClusterSnapshotIdentifier: "resourcetestsnapshot1234", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dbClusterIdentifier` - (Required) The DocumentDB Cluster Identifier from which to take the snapshot. +* `dbClusterSnapshotIdentifier` - (Required) The Identifier for the snapshot. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `availabilityZones` - List of EC2 Availability Zones that instances in the DocumentDB cluster snapshot can be restored in. +* `dbClusterSnapshotArn` - The Amazon Resource Name (ARN) for the DocumentDB Cluster Snapshot. +* `engine` - Specifies the name of the database engine. +* `engineVersion` - Version of the database engine for this DocumentDB cluster snapshot. +* `kmsKeyId` - If storage_encrypted is true, the AWS KMS key identifier for the encrypted DocumentDB cluster snapshot. +* `port` - Port that the DocumentDB cluster was listening on at the time of the snapshot. +* `sourceDbClusterSnapshotIdentifier` - The DocumentDB Cluster Snapshot Arn that the DocumentDB Cluster Snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `storageEncrypted` - Specifies whether the DocumentDB cluster snapshot is encrypted. +* `status` - The status of this DocumentDB Cluster Snapshot. +* `vpcId` - The VPC ID associated with the DocumentDB cluster snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDocdbClusterSnapshot` using the cluster snapshot identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDocdbClusterSnapshot` using the cluster snapshot identifier. For example: + +```console +% terraform import aws_docdb_cluster_snapshot.example my-cluster-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/docdb_event_subscription.html.markdown b/website/docs/cdktf/typescript/r/docdb_event_subscription.html.markdown new file mode 100644 index 00000000000..a54001491cf --- /dev/null +++ b/website/docs/cdktf/typescript/r/docdb_event_subscription.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_event_subscription" +description: |- + Provides a DocumentDB event subscription resource. +--- + + + +# Resource: aws_docdb_event_subscription + +Provides a DocumentDB event subscription resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DocdbCluster } from "./.gen/providers/aws/docdb-cluster"; +import { DocdbEventSubscription } from "./.gen/providers/aws/docdb-event-subscription"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DocdbCluster(this, "example", { + availabilityZones: [ + Token.asString(propertyAccess(available.names, ["0"])), + Token.asString(propertyAccess(available.names, ["1"])), + Token.asString(propertyAccess(available.names, ["2"])), + ], + clusterIdentifier: "example", + masterPassword: "mustbeeightcharaters", + masterUsername: "foo", + skipFinalSnapshot: true, + }); + const awsSnsTopicExample = new SnsTopic(this, "example_1", { + name: "example-events", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicExample.overrideLogicalId("example"); + const awsDocdbEventSubscriptionExample = new DocdbEventSubscription( + this, + "example_2", + { + enabled: true, + eventCategories: ["creation", "failure"], + name: "example", + snsTopicArn: Token.asString(awsSnsTopicExample.arn), + sourceIds: [example.id], + sourceType: "db-cluster", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDocdbEventSubscriptionExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the DocumentDB event subscription. By default generated by Terraform. +* `namePrefix` - (Optional) The name of the DocumentDB event subscription. Conflicts with `name`. +* `snsTopic` - (Required) The SNS topic to send events to. +* `sourceIds` - (Optional) A list of identifiers of the event sources for which events will be returned. If not specified, then all sources are included in the response. If specified, a source_type must also be specified. +* `sourceType` - (Optional) The type of source that will be generating the events. Valid options are `dbInstance`, `dbCluster`, `dbParameterGroup`, `dbSecurityGroup`,` db-cluster-snapshot`. If not set, all sources will be subscribed to. +* `eventCategories` - (Optional) A list of event categories for a SourceType that you want to subscribe to. See https://docs.aws.amazon.com/documentdb/latest/developerguide/API_Event.html or run `aws docdb describe-event-categories`. +* `enabled` - (Optional) A boolean flag to enable/disable the subscription. Defaults to true. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the DocumentDB event notification subscription +* `arn` - The Amazon Resource Name of the DocumentDB event notification subscription +* `customerAwsId` - The AWS customer account associated with the DocumentDB event notification subscription +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40M`) +- `delete` - (Default `40M`) +- `update` - (Default `40M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DocumentDB Event Subscriptions using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DocumentDB Event Subscriptions using the `name`. For example: + +```console +% terraform import aws_docdb_event_subscription.example event-sub +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/docdb_global_cluster.html.markdown b/website/docs/cdktf/typescript/r/docdb_global_cluster.html.markdown new file mode 100644 index 00000000000..fef999877b6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/docdb_global_cluster.html.markdown @@ -0,0 +1,216 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_global_cluster" +description: |- + Manages a DocumentDB Global Cluster +--- + + + +# Resource: aws_docdb_global_cluster + +Manages an DocumentDB Global Cluster. A global cluster consists of one primary region and up to five read-only secondary regions. You issue write operations directly to the primary cluster in the primary region and Amazon DocumentDB automatically replicates the data to the secondary regions using dedicated infrastructure. + +More information about DocumentDB Global Clusters can be found in the [DocumentDB Developer Guide](https://docs.aws.amazon.com/documentdb/latest/developerguide/global-clusters.html). + +## Example Usage + +### New DocumentDB Global Cluster + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DocdbCluster } from "./.gen/providers/aws/docdb-cluster"; +import { DocdbClusterInstance } from "./.gen/providers/aws/docdb-cluster-instance"; +import { DocdbGlobalCluster } from "./.gen/providers/aws/docdb-global-cluster"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new AwsProvider(this, "aws", { + alias: "primary", + region: "us-east-2", + }); + const secondary = new AwsProvider(this, "aws_1", { + alias: "secondary", + region: "us-east-1", + }); + const example = new DocdbGlobalCluster(this, "example", { + engine: "docdb", + engineVersion: "4.0.0", + globalClusterIdentifier: "global-test", + }); + const awsDocdbClusterPrimary = new DocdbCluster(this, "primary", { + clusterIdentifier: "test-primary-cluster", + dbSubnetGroupName: "default", + engine: example.engine, + engineVersion: example.engineVersion, + globalClusterIdentifier: example.id, + masterPassword: "somepass123", + masterUsername: "username", + provider: primary, + }); + const awsDocdbClusterSecondary = new DocdbCluster(this, "secondary", { + clusterIdentifier: "test-secondary-cluster", + dbSubnetGroupName: "default", + engine: example.engine, + engineVersion: example.engineVersion, + globalClusterIdentifier: example.id, + provider: secondary, + }); + const awsDocdbClusterInstancePrimary = new DocdbClusterInstance( + this, + "primary_5", + { + clusterIdentifier: Token.asString(awsDocdbClusterPrimary.id), + engine: example.engine, + identifier: "test-primary-cluster-instance", + instanceClass: "db.r5.large", + provider: primary, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDocdbClusterInstancePrimary.overrideLogicalId("primary"); + const awsDocdbClusterInstanceSecondary = new DocdbClusterInstance( + this, + "secondary_6", + { + clusterIdentifier: Token.asString(awsDocdbClusterSecondary.id), + dependsOn: [awsDocdbClusterInstancePrimary], + engine: example.engine, + identifier: "test-secondary-cluster-instance", + instanceClass: "db.r5.large", + provider: secondary, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDocdbClusterInstanceSecondary.overrideLogicalId("secondary"); + } +} + +``` + +### New Global Cluster From Existing DB Cluster + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DocdbCluster } from "./.gen/providers/aws/docdb-cluster"; +import { DocdbGlobalCluster } from "./.gen/providers/aws/docdb-global-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DocdbCluster(this, "example", { + lifecycle: { + ignoreChanges: [globalClusterIdentifier], + }, + }); + const awsDocdbGlobalClusterExample = new DocdbGlobalCluster( + this, + "example_1", + { + globalClusterIdentifier: "example", + sourceDbClusterIdentifier: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDocdbGlobalClusterExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `globalClusterIdentifier` - (Required, Forces new resources) The global cluster identifier. +* `databaseName` - (Optional, Forces new resources) Name for an automatically created database on cluster creation. +* `deletionProtection` - (Optional) If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`. +* `engine` - (Optional, Forces new resources) Name of the database engine to be used for this DB cluster. Terraform will only perform drift detection if a configuration value is provided. Current Valid values: `docdb`. Defaults to `docdb`. Conflicts with `sourceDbClusterIdentifier`. +* `engineVersion` - (Optional) Engine version of the global database. Upgrading the engine version will result in all cluster members being immediately updated and will. + * **NOTE:** Upgrading major versions is not supported. +* `sourceDbClusterIdentifier` - (Optional) Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. Terraform cannot perform drift detection of this value. +* `storageEncrypted` - (Optional, Forces new resources) Specifies whether the DB cluster is encrypted. The default is `false` unless `sourceDbClusterIdentifier` is specified and encrypted. Terraform will only perform drift detection if a configuration value is provided. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Global Cluster Amazon Resource Name (ARN) +* `globalClusterMembers` - Set of objects containing Global Cluster members. + * `dbClusterArn` - Amazon Resource Name (ARN) of member DB Cluster. + * `isWriter` - Whether the member is the primary DB Cluster. +* `globalClusterResourceId` - AWS Region-unique, immutable identifier for the global database cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. +* `id` - DocumentDB Global Cluster ID. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDocdbGlobalCluster` using the Global Cluster identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDocdbGlobalCluster` using the Global Cluster identifier. For example: + +```console +% terraform import aws_docdb_global_cluster.example example +``` + +Certain resource arguments, like `sourceDbClusterIdentifier`, do not have an API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DocdbGlobalCluster } from "./.gen/providers/aws/docdb-global-cluster"; +interface MyConfig { + globalClusterIdentifier: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new DocdbGlobalCluster(this, "example", { + lifecycle: { + ignoreChanges: [sourceDbClusterIdentifier], + }, + globalClusterIdentifier: config.globalClusterIdentifier, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/docdb_subnet_group.html.markdown b/website/docs/cdktf/typescript/r/docdb_subnet_group.html.markdown new file mode 100644 index 00000000000..3d5d65f5240 --- /dev/null +++ b/website/docs/cdktf/typescript/r/docdb_subnet_group.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "DocumentDB" +layout: "aws" +page_title: "AWS: aws_docdb_subnet_group" +description: |- + Provides an DocumentDB subnet group resource. +--- + + + +# Resource: aws_docdb_subnet_group + +Provides an DocumentDB subnet group resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DocdbSubnetGroup } from "./.gen/providers/aws/docdb-subnet-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DocdbSubnetGroup(this, "default", { + name: "main", + subnetIds: [frontend.id, backend.id], + tags: { + Name: "My docdb subnet group", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the docDB subnet group. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) The description of the docDB subnet group. Defaults to "Managed by Terraform". +* `subnetIds` - (Required) A list of VPC subnet IDs. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The docDB subnet group name. +* `arn` - The ARN of the docDB subnet group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DocumentDB Subnet groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DocumentDB Subnet groups using the `name`. For example: + +```console +% terraform import aws_docdb_subnet_group.default production-subnet-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_bgp_peer.html.markdown b/website/docs/cdktf/typescript/r/dx_bgp_peer.html.markdown new file mode 100644 index 00000000000..2c2592ddcfc --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_bgp_peer.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_bgp_peer" +description: |- + Provides a Direct Connect BGP peer resource. +--- + + + +# Resource: aws_dx_bgp_peer + +Provides a Direct Connect BGP peer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxBgpPeer } from "./.gen/providers/aws/dx-bgp-peer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DxBgpPeer(this, "peer", { + addressFamily: "ipv6", + bgpAsn: 65351, + virtualInterfaceId: foo.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `addressFamily` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgpAsn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `virtualInterfaceId` - (Required) The ID of the Direct Connect virtual interface on which to create the BGP peer. +* `amazonAddress` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. +Required for IPv4 BGP peers on public virtual interfaces. +* `bgpAuthKey` - (Optional) The authentication key for BGP configuration. +* `customerAddress` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. +Required for IPv4 BGP peers on public virtual interfaces. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the BGP peer resource. +* `bgpStatus` - The Up/Down state of the BGP peer. +* `bgpPeerId` - The ID of the BGP peer. +* `awsDevice` - The Direct Connect endpoint on which the BGP peer terminates. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_connection.html.markdown b/website/docs/cdktf/typescript/r/dx_connection.html.markdown new file mode 100644 index 00000000000..64cac9f7cd4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_connection.html.markdown @@ -0,0 +1,148 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_connection" +description: |- + Provides a Connection of Direct Connect. +--- + + + +# Resource: aws_dx_connection + +Provides a Connection of Direct Connect. + +## Example Usage + +### Create a connection + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxConnection } from "./.gen/providers/aws/dx-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DxConnection(this, "hoge", { + bandwidth: "1Gbps", + location: "EqDC2", + name: "tf-dx-connection", + }); + } +} + +``` + +### Request a MACsec-capable connection + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxConnection } from "./.gen/providers/aws/dx-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DxConnection(this, "example", { + bandwidth: "10Gbps", + location: "EqDA2", + name: "tf-dx-connection", + requestMacsec: true, + }); + } +} + +``` + +### Configure encryption mode for MACsec-capable connections + +-> **NOTE:** You can only specify the `encryptionMode` argument once the connection is in an `available` state. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxConnection } from "./.gen/providers/aws/dx-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DxConnection(this, "example", { + bandwidth: "10Gbps", + encryptionMode: "must_encrypt", + location: "EqDC2", + name: "tf-dx-connection", + requestMacsec: true, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bandwidth` - (Required) The bandwidth of the connection. Valid values for dedicated connections: 1Gbps, 10Gbps. Valid values for hosted connections: 50Mbps, 100Mbps, 200Mbps, 300Mbps, 400Mbps, 500Mbps, 1Gbps, 2Gbps, 5Gbps, 10Gbps and 100Gbps. Case sensitive. +* `encryptionMode` - (Optional) The connection MAC Security (MACsec) encryption mode. MAC Security (MACsec) is only available on dedicated connections. Valid values are `noEncrypt`, `shouldEncrypt`, and `mustEncrypt`. +* `location` - (Required) The AWS Direct Connect location where the connection is located. See [DescribeLocations](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_DescribeLocations.html) for the list of AWS Direct Connect locations. Use `locationCode`. +* `name` - (Required) The name of the connection. +* `providerName` - (Optional) The name of the service provider associated with the connection. +* `requestMacsec` - (Optional) Boolean value indicating whether you want the connection to support MAC Security (MACsec). MAC Security (MACsec) is only available on dedicated connections. See [MACsec prerequisites](https://docs.aws.amazon.com/directconnect/latest/UserGuide/direct-connect-mac-sec-getting-started.html#mac-sec-prerequisites) for more information about MAC Security (MACsec) prerequisites. Default value: `false`. + +~> **NOTE:** Changing the value of `requestMacsec` will cause the resource to be destroyed and re-created. + +* `skipDestroy` - (Optional) Set to true if you do not wish the connection to be deleted at destroy time, and instead just removed from the Terraform state. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the connection. +* `awsDevice` - The Direct Connect endpoint on which the physical connection terminates. +* `hasLogicalRedundancy` - Indicates whether the connection supports a secondary BGP peer in the same address family (IPv4/IPv6). +* `id` - The ID of the connection. +* `jumboFrameCapable` - Boolean value representing if jumbo frames have been enabled for this connection. +* `macsecCapable` - Boolean value indicating whether the connection supports MAC Security (MACsec). +* `ownerAccountId` - The ID of the AWS account that owns the connection. +* `partnerName` - The name of the AWS Direct Connect service provider associated with the connection. +* `portEncryptionStatus` - The MAC Security (MACsec) port link status of the connection. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vlanId` - The VLAN ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect connections using the connection `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect connections using the connection `id`. For example: + +```console +% terraform import aws_dx_connection.test_connection dxcon-ffre0ec3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_connection_association.html.markdown b/website/docs/cdktf/typescript/r/dx_connection_association.html.markdown new file mode 100644 index 00000000000..812147eb36e --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_connection_association.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_connection_association" +description: |- + Associates a Direct Connect Connection with a LAG. +--- + + + +# Resource: aws_dx_connection_association + +Associates a Direct Connect Connection with a LAG. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxConnection } from "./.gen/providers/aws/dx-connection"; +import { DxConnectionAssociation } from "./.gen/providers/aws/dx-connection-association"; +import { DxLag } from "./.gen/providers/aws/dx-lag"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DxConnection(this, "example", { + bandwidth: "1Gbps", + location: "EqSe2-EQ", + name: "example", + }); + const awsDxLagExample = new DxLag(this, "example_1", { + connectionsBandwidth: "1Gbps", + location: "EqSe2-EQ", + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDxLagExample.overrideLogicalId("example"); + const awsDxConnectionAssociationExample = new DxConnectionAssociation( + this, + "example_2", + { + connectionId: example.id, + lagId: Token.asString(awsDxLagExample.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDxConnectionAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connectionId` - (Required) The ID of the connection. +* `lagId` - (Required) The ID of the LAG with which to associate the connection. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_connection_confirmation.html.markdown b/website/docs/cdktf/typescript/r/dx_connection_confirmation.html.markdown new file mode 100644 index 00000000000..57b4cd51734 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_connection_confirmation.html.markdown @@ -0,0 +1,54 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_connection_confirmation" +description: |- + Provides a confirmation of the creation of the specified hosted connection on an interconnect. +--- + + + +# Resource: aws_dx_connection_confirmation + +Provides a confirmation of the creation of the specified hosted connection on an interconnect. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxConnectionConfirmation } from "./.gen/providers/aws/dx-connection-confirmation"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DxConnectionConfirmation(this, "confirmation", { + connectionId: "dxcon-ffabc123", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connectionId` - (Required) The ID of the hosted connection. + +### Removing `awsDxConnectionConfirmation` from your configuration + +Removing an `awsDxConnectionConfirmation` resource from your configuration will remove it +from your statefile and management, **but will not destroy the Hosted Connection.** + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the connection. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_gateway.html.markdown b/website/docs/cdktf/typescript/r/dx_gateway.html.markdown new file mode 100644 index 00000000000..3b8160cbd54 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_gateway.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_gateway" +description: |- + Provides a Direct Connect Gateway. +--- + + + +# Resource: aws_dx_gateway + +Provides a Direct Connect Gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxGateway } from "./.gen/providers/aws/dx-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DxGateway(this, "example", { + amazonSideAsn: "64512", + name: "tf-dxg-example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the connection. +* `amazonSideAsn` - (Required) The ASN to be configured on the Amazon side of the connection. The ASN must be in the private range of 64,512 to 65,534 or 4,200,000,000 to 4,294,967,294. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the gateway. +* `ownerAccountId` - AWS Account ID of the gateway. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect Gateways using the gateway `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect Gateways using the gateway `id`. For example: + +```console +% terraform import aws_dx_gateway.test abcd1234-dcba-5678-be23-cdef9876ab45 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_gateway_association.html.markdown b/website/docs/cdktf/typescript/r/dx_gateway_association.html.markdown new file mode 100644 index 00000000000..7aa6169a58c --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_gateway_association.html.markdown @@ -0,0 +1,213 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_gateway_association" +description: |- + Associates a Direct Connect Gateway with a VGW or transit gateway. +--- + + + +# Resource: aws_dx_gateway_association + +Associates a Direct Connect Gateway with a VGW or transit gateway. + +To create a cross-account association, create an [`awsDxGatewayAssociationProposal` resource](/docs/providers/aws/r/dx_gateway_association_proposal.html) +in the AWS account that owns the VGW or transit gateway and then accept the proposal in the AWS account that owns the Direct Connect Gateway +by creating an `awsDxGatewayAssociation` resource with the `proposalId` and `associatedGatewayOwnerAccountId` attributes set. + +## Example Usage + +### VPN Gateway Association + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxGateway } from "./.gen/providers/aws/dx-gateway"; +import { DxGatewayAssociation } from "./.gen/providers/aws/dx-gateway-association"; +import { Vpc } from "./.gen/providers/aws/vpc"; +import { VpnGateway } from "./.gen/providers/aws/vpn-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DxGateway(this, "example", { + amazonSideAsn: "64512", + name: "example", + }); + const awsVpcExample = new Vpc(this, "example_1", { + cidrBlock: "10.255.255.0/28", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpcExample.overrideLogicalId("example"); + const awsVpnGatewayExample = new VpnGateway(this, "example_2", { + vpcId: Token.asString(awsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpnGatewayExample.overrideLogicalId("example"); + const awsDxGatewayAssociationExample = new DxGatewayAssociation( + this, + "example_3", + { + associatedGatewayId: Token.asString(awsVpnGatewayExample.id), + dxGatewayId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDxGatewayAssociationExample.overrideLogicalId("example"); + } +} + +``` + +### Transit Gateway Association + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxGateway } from "./.gen/providers/aws/dx-gateway"; +import { DxGatewayAssociation } from "./.gen/providers/aws/dx-gateway-association"; +import { Ec2TransitGateway } from "./.gen/providers/aws/ec2-transit-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DxGateway(this, "example", { + amazonSideAsn: "64512", + name: "example", + }); + const awsEc2TransitGatewayExample = new Ec2TransitGateway( + this, + "example_1", + {} + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEc2TransitGatewayExample.overrideLogicalId("example"); + const awsDxGatewayAssociationExample = new DxGatewayAssociation( + this, + "example_2", + { + allowedPrefixes: ["10.255.255.0/30", "10.255.255.8/30"], + associatedGatewayId: Token.asString(awsEc2TransitGatewayExample.id), + dxGatewayId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDxGatewayAssociationExample.overrideLogicalId("example"); + } +} + +``` + +### Allowed Prefixes + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxGateway } from "./.gen/providers/aws/dx-gateway"; +import { DxGatewayAssociation } from "./.gen/providers/aws/dx-gateway-association"; +import { Vpc } from "./.gen/providers/aws/vpc"; +import { VpnGateway } from "./.gen/providers/aws/vpn-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DxGateway(this, "example", { + amazonSideAsn: "64512", + name: "example", + }); + const awsVpcExample = new Vpc(this, "example_1", { + cidrBlock: "10.255.255.0/28", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpcExample.overrideLogicalId("example"); + const awsVpnGatewayExample = new VpnGateway(this, "example_2", { + vpcId: Token.asString(awsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpnGatewayExample.overrideLogicalId("example"); + const awsDxGatewayAssociationExample = new DxGatewayAssociation( + this, + "example_3", + { + allowedPrefixes: ["210.52.109.0/24", "175.45.176.0/22"], + associatedGatewayId: Token.asString(awsVpnGatewayExample.id), + dxGatewayId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDxGatewayAssociationExample.overrideLogicalId("example"); + } +} + +``` + +A full example of how to create a VPN Gateway in one AWS account, create a Direct Connect Gateway in a second AWS account, and associate the VPN Gateway with the Direct Connect Gateway via the `awsDxGatewayAssociationProposal` and `awsDxGatewayAssociation` resources can be found in [the `/examples/dxGatewayCrossAccountVgwAssociation` directory within the Github Repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/dx-gateway-cross-account-vgw-association). + +## Argument Reference + +~> **NOTE:** `dxGatewayId` and `associatedGatewayId` must be specified for single account Direct Connect gateway associations. + +This argument supports the following arguments: + +* `dxGatewayId` - (Required) The ID of the Direct Connect gateway. +* `associatedGatewayId` - (Optional) The ID of the VGW or transit gateway with which to associate the Direct Connect gateway. +Used for single account Direct Connect gateway associations. +* `associatedGatewayOwnerAccountId` - (Optional) The ID of the AWS account that owns the VGW or transit gateway with which to associate the Direct Connect gateway. +Used for cross-account Direct Connect gateway associations. +* `proposalId` - (Optional) The ID of the Direct Connect gateway association proposal. +Used for cross-account Direct Connect gateway associations. +* `allowedPrefixes` - (Optional) VPC prefixes (CIDRs) to advertise to the Direct Connect gateway. Defaults to the CIDR block of the VPC associated with the Virtual Gateway. To enable drift detection, must be configured. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Direct Connect gateway association resource. +* `associatedGatewayType` - The type of the associated gateway, `transitGateway` or `virtualPrivateGateway`. +* `dxGatewayAssociationId` - The ID of the Direct Connect gateway association. +* `dxGatewayOwnerAccountId` - The ID of the AWS account that owns the Direct Connect gateway. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `update` - (Default `30M`) +- `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect gateway associations using `dxGatewayId` together with `associatedGatewayId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect gateway associations using `dxGatewayId` together with `associatedGatewayId`. For example: + +```console +% terraform import aws_dx_gateway_association.example 345508c3-7215-4aef-9832-07c125d5bd0f/vgw-98765432 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_gateway_association_proposal.html.markdown b/website/docs/cdktf/typescript/r/dx_gateway_association_proposal.html.markdown new file mode 100644 index 00000000000..d0442342ad9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_gateway_association_proposal.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_gateway_association_proposal" +description: |- + Manages a Direct Connect Gateway Association Proposal. +--- + + + +# Resource: aws_dx_gateway_association_proposal + +Manages a Direct Connect Gateway Association Proposal, typically for enabling cross-account associations. For single account associations, see the [`awsDxGatewayAssociation` resource](/docs/providers/aws/r/dx_gateway_association.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxGatewayAssociationProposal } from "./.gen/providers/aws/dx-gateway-association-proposal"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DxGatewayAssociationProposal(this, "example", { + associatedGatewayId: Token.asString(awsVpnGatewayExample.id), + dxGatewayId: Token.asString(awsDxGatewayExample.id), + dxGatewayOwnerAccountId: Token.asString( + awsDxGatewayExample.ownerAccountId + ), + }); + } +} + +``` + +A full example of how to create a VPN Gateway in one AWS account, create a Direct Connect Gateway in a second AWS account, and associate the VPN Gateway with the Direct Connect Gateway via the `awsDxGatewayAssociationProposal` and `awsDxGatewayAssociation` resources can be found in [the `/examples/dxGatewayCrossAccountVgwAssociation` directory within the Github Repository](https://github.com/hashicorp/terraform-provider-aws/tree/main/examples/dx-gateway-cross-account-vgw-association). + +## Argument Reference + +This resource supports the following arguments: + +* `associatedGatewayId` - (Required) The ID of the VGW or transit gateway with which to associate the Direct Connect gateway. +* `dxGatewayId` - (Required) Direct Connect Gateway identifier. +* `dxGatewayOwnerAccountId` - (Required) AWS Account identifier of the Direct Connect Gateway's owner. +* `allowedPrefixes` - (Optional) VPC prefixes (CIDRs) to advertise to the Direct Connect gateway. Defaults to the CIDR block of the VPC associated with the Virtual Gateway. To enable drift detection, must be configured. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Direct Connect Gateway Association Proposal identifier. +* `associatedGatewayOwnerAccountId` - The ID of the AWS account that owns the VGW or transit gateway with which to associate the Direct Connect gateway. +* `associatedGatewayType` - The type of the associated gateway, `transitGateway` or `virtualPrivateGateway`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect Gateway Association Proposals using either a proposal ID or proposal ID, Direct Connect Gateway ID and associated gateway ID separated by `/`. For example: + +Using a proposal ID: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using a proposal ID, Direct Connect Gateway ID and associated gateway ID separated by `/`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**With `terraform import`**, import Direct Connect Gateway Association Proposals using either a proposal ID or proposal ID, Direct Connect Gateway ID and associated gateway ID separated by `/`. For example: + +Using a proposal ID: + +```console +% terraform import aws_dx_gateway_association_proposal.example ac90e981-b718-4364-872d-65478c84fafe +``` + +Using a proposal ID, Direct Connect Gateway ID and associated gateway ID separated by `/`: + +```console +% terraform import aws_dx_gateway_association_proposal.example ac90e981-b718-4364-872d-65478c84fafe/abcd1234-dcba-5678-be23-cdef9876ab45/vgw-12345678 +``` + +The latter case is useful when a previous proposal has been accepted and deleted by AWS. +The `awsDxGatewayAssociationProposal` resource will then represent a pseudo-proposal for the same Direct Connect Gateway and associated gateway. If no previous proposal is available, use a tool like [`uuidgen`](http://manpages.ubuntu.com/manpages/bionic/man1/uuidgen.1.html) to generate a new random pseudo-proposal ID. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_hosted_connection.html.markdown b/website/docs/cdktf/typescript/r/dx_hosted_connection.html.markdown new file mode 100644 index 00000000000..6c95b941dce --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_hosted_connection.html.markdown @@ -0,0 +1,67 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_connection" +description: |- + Provides a hosted connection on the specified interconnect or a link aggregation group (LAG) of interconnects. Intended for use by AWS Direct Connect Partners only. +--- + + + +# Resource: aws_dx_hosted_connection + +Provides a hosted connection on the specified interconnect or a link aggregation group (LAG) of interconnects. Intended for use by AWS Direct Connect Partners only. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxHostedConnection } from "./.gen/providers/aws/dx-hosted-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DxHostedConnection(this, "hosted", { + bandwidth: "100Mbps", + connectionId: "dxcon-ffabc123", + name: "tf-dx-hosted-connection", + ownerAccountId: "123456789012", + vlan: 1, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the connection. +* `bandwidth` - (Required) The bandwidth of the connection. Valid values for dedicated connections: 1Gbps, 10Gbps. Valid values for hosted connections: 50Mbps, 100Mbps, 200Mbps, 300Mbps, 400Mbps, 500Mbps, 1Gbps, 2Gbps, 5Gbps and 10Gbps. Case sensitive. +* `connectionId` - (Required) The ID of the interconnect or LAG. +* `ownerAccountId` - (Required) The ID of the AWS account of the customer for the connection. +* `vlan` - (Required) The dedicated VLAN provisioned to the hosted connection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the connection. +* `jumboFrameCapable` - Boolean value representing if jumbo frames have been enabled for this connection. +* `hasLogicalRedundancy` - Indicates whether the connection supports a secondary BGP peer in the same address family (IPv4/IPv6). +* `awsDevice` - The Direct Connect endpoint on which the physical connection terminates. +* `state` - The state of the connection. Possible values include: ordering, requested, pending, available, down, deleting, deleted, rejected, unknown. See [AllocateHostedConnection](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_AllocateHostedConnection.html) for a description of each connection state. +* `lagId` - The ID of the LAG. +* `loaIssueTime` - The time of the most recent call to [DescribeLoa](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_DescribeLoa.html) for this connection. +* `location` - The location of the connection. +* `partnerName` - The name of the AWS Direct Connect service provider associated with the connection. +* `providerName` - The name of the service provider associated with the connection. +* `region` - The AWS Region where the connection is located. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_hosted_private_virtual_interface.html.markdown b/website/docs/cdktf/typescript/r/dx_hosted_private_virtual_interface.html.markdown new file mode 100644 index 00000000000..4c90f6dda13 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_hosted_private_virtual_interface.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_private_virtual_interface" +description: |- + Provides a Direct Connect hosted private virtual interface resource. +--- + + + +# Resource: aws_dx_hosted_private_virtual_interface + +Provides a Direct Connect hosted private virtual interface resource. This resource represents the allocator's side of the hosted virtual interface. +A hosted virtual interface is a virtual interface that is owned by another AWS account. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxHostedPrivateVirtualInterface } from "./.gen/providers/aws/dx-hosted-private-virtual-interface"; +interface MyConfig { + ownerAccountId: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new DxHostedPrivateVirtualInterface(this, "foo", { + addressFamily: "ipv4", + bgpAsn: 65352, + connectionId: "dxcon-zzzzzzzz", + name: "vif-foo", + vlan: 4094, + ownerAccountId: config.ownerAccountId, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `addressFamily` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgpAsn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connectionId` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `ownerAccountId` - (Required) The AWS account that will own the new virtual interface. +* `vlan` - (Required) The VLAN ID. +* `amazonAddress` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `mtu` - (Optional) The maximum transmission unit (MTU) is the size, in bytes, of the largest permissible packet that can be passed over the connection. The MTU of a virtual private interface can be either `1500` or `9001` (jumbo frames). Default is `1500`. +* `bgpAuthKey` - (Optional) The authentication key for BGP configuration. +* `customerAddress` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `jumboFrameCapable` - Indicates whether jumbo frames (9001 MTU) are supported. +* `awsDevice` - The Direct Connect endpoint on which the virtual interface terminates. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted private virtual interfaces using the VIF `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect hosted private virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_private_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_hosted_private_virtual_interface_accepter.html.markdown b/website/docs/cdktf/typescript/r/dx_hosted_private_virtual_interface_accepter.html.markdown new file mode 100644 index 00000000000..3bf8a516f3a --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_hosted_private_virtual_interface_accepter.html.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_private_virtual_interface_accepter" +description: |- + Provides a resource to manage the accepter's side of a Direct Connect hosted private virtual interface. +--- + + + +# Resource: aws_dx_hosted_private_virtual_interface_accepter + +Provides a resource to manage the accepter's side of a Direct Connect hosted private virtual interface. +This resource accepts ownership of a private virtual interface created by another AWS account. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DxHostedPrivateVirtualInterface } from "./.gen/providers/aws/dx-hosted-private-virtual-interface"; +import { DxHostedPrivateVirtualInterfaceAccepter } from "./.gen/providers/aws/dx-hosted-private-virtual-interface-accepter"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { VpnGateway } from "./.gen/providers/aws/vpn-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", {}); + const accepter = new AwsProvider(this, "aws_1", { + alias: "accepter", + }); + const vpnGw = new VpnGateway(this, "vpn_gw", { + provider: accepter, + }); + const dataAwsCallerIdentityAccepter = new DataAwsCallerIdentity( + this, + "accepter", + { + provider: accepter, + } + ); + const creator = new DxHostedPrivateVirtualInterface(this, "creator", { + addressFamily: "ipv4", + bgpAsn: 65352, + connectionId: "dxcon-zzzzzzzz", + dependsOn: [vpnGw], + name: "vif-foo", + ownerAccountId: Token.asString(dataAwsCallerIdentityAccepter.accountId), + vlan: 4094, + }); + const awsDxHostedPrivateVirtualInterfaceAccepterAccepter = + new DxHostedPrivateVirtualInterfaceAccepter(this, "accepter_5", { + provider: accepter, + tags: { + Side: "Accepter", + }, + virtualInterfaceId: creator.id, + vpnGatewayId: vpnGw.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDxHostedPrivateVirtualInterfaceAccepterAccepter.overrideLogicalId( + "accepter" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `virtualInterfaceId` - (Required) The ID of the Direct Connect virtual interface to accept. +* `dxGatewayId` - (Optional) The ID of the Direct Connect gateway to which to connect the virtual interface. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpnGatewayId` - (Optional) The ID of the [virtual private gateway](vpn_gateway.html) to which to connect the virtual interface. + +### Removing `awsDxHostedPrivateVirtualInterfaceAccepter` from your configuration + +AWS allows a Direct Connect hosted private virtual interface to be deleted from either the allocator's or accepter's side. +However, Terraform only allows the Direct Connect hosted private virtual interface to be deleted from the allocator's side +by removing the corresponding `awsDxHostedPrivateVirtualInterface` resource from your configuration. +Removing a `awsDxHostedPrivateVirtualInterfaceAccepter` resource from your configuration will remove it +from your statefile and management, **but will not delete the Direct Connect virtual interface.** + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted private virtual interfaces using the VIF `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect hosted private virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_private_virtual_interface_accepter.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_hosted_public_virtual_interface.html.markdown b/website/docs/cdktf/typescript/r/dx_hosted_public_virtual_interface.html.markdown new file mode 100644 index 00000000000..21e03d47a68 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_hosted_public_virtual_interface.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_public_virtual_interface" +description: |- + Provides a Direct Connect hosted public virtual interface resource. +--- + + + +# Resource: aws_dx_hosted_public_virtual_interface + +Provides a Direct Connect hosted public virtual interface resource. This resource represents the allocator's side of the hosted virtual interface. +A hosted virtual interface is a virtual interface that is owned by another AWS account. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxHostedPublicVirtualInterface } from "./.gen/providers/aws/dx-hosted-public-virtual-interface"; +interface MyConfig { + ownerAccountId: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new DxHostedPublicVirtualInterface(this, "foo", { + addressFamily: "ipv4", + amazonAddress: "175.45.176.2/30", + bgpAsn: 65352, + connectionId: "dxcon-zzzzzzzz", + customerAddress: "175.45.176.1/30", + name: "vif-foo", + routeFilterPrefixes: ["210.52.109.0/24", "175.45.176.0/22"], + vlan: 4094, + ownerAccountId: config.ownerAccountId, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `addressFamily` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgpAsn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connectionId` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `ownerAccountId` - (Required) The AWS account that will own the new virtual interface. +* `routeFilterPrefixes` - (Required) A list of routes to be advertised to the AWS network in this region. +* `vlan` - (Required) The VLAN ID. +* `amazonAddress` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `bgpAuthKey` - (Optional) The authentication key for BGP configuration. +* `customerAddress` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `awsDevice` - The Direct Connect endpoint on which the virtual interface terminates. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted public virtual interfaces using the VIF `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect hosted public virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_public_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_hosted_public_virtual_interface_accepter.html.markdown b/website/docs/cdktf/typescript/r/dx_hosted_public_virtual_interface_accepter.html.markdown new file mode 100644 index 00000000000..160f795cc70 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_hosted_public_virtual_interface_accepter.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_public_virtual_interface_accepter" +description: |- + Provides a resource to manage the accepter's side of a Direct Connect hosted public virtual interface. +--- + + + +# Resource: aws_dx_hosted_public_virtual_interface_accepter + +Provides a resource to manage the accepter's side of a Direct Connect hosted public virtual interface. +This resource accepts ownership of a public virtual interface created by another AWS account. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DxHostedPublicVirtualInterface } from "./.gen/providers/aws/dx-hosted-public-virtual-interface"; +import { DxHostedPublicVirtualInterfaceAccepter } from "./.gen/providers/aws/dx-hosted-public-virtual-interface-accepter"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", {}); + const accepter = new AwsProvider(this, "aws_1", { + alias: "accepter", + }); + const dataAwsCallerIdentityAccepter = new DataAwsCallerIdentity( + this, + "accepter", + { + provider: accepter, + } + ); + const creator = new DxHostedPublicVirtualInterface(this, "creator", { + addressFamily: "ipv4", + amazonAddress: "175.45.176.2/30", + bgpAsn: 65352, + connectionId: "dxcon-zzzzzzzz", + customerAddress: "175.45.176.1/30", + name: "vif-foo", + ownerAccountId: Token.asString(dataAwsCallerIdentityAccepter.accountId), + routeFilterPrefixes: ["210.52.109.0/24", "175.45.176.0/22"], + vlan: 4094, + }); + const awsDxHostedPublicVirtualInterfaceAccepterAccepter = + new DxHostedPublicVirtualInterfaceAccepter(this, "accepter_4", { + provider: accepter, + tags: { + Side: "Accepter", + }, + virtualInterfaceId: creator.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDxHostedPublicVirtualInterfaceAccepterAccepter.overrideLogicalId( + "accepter" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `virtualInterfaceId` - (Required) The ID of the Direct Connect virtual interface to accept. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Removing `awsDxHostedPublicVirtualInterfaceAccepter` from your configuration + +AWS allows a Direct Connect hosted public virtual interface to be deleted from either the allocator's or accepter's side. +However, Terraform only allows the Direct Connect hosted public virtual interface to be deleted from the allocator's side +by removing the corresponding `awsDxHostedPublicVirtualInterface` resource from your configuration. +Removing a `awsDxHostedPublicVirtualInterfaceAccepter` resource from your configuration will remove it +from your statefile and management, **but will not delete the Direct Connect virtual interface.** + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted public virtual interfaces using the VIF `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect hosted public virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_public_virtual_interface_accepter.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_hosted_transit_virtual_interface.html.markdown b/website/docs/cdktf/typescript/r/dx_hosted_transit_virtual_interface.html.markdown new file mode 100644 index 00000000000..b2570666ec0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_hosted_transit_virtual_interface.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_transit_virtual_interface" +description: |- + Provides a Direct Connect hosted transit virtual interface resource. +--- + + + +# Resource: aws_dx_hosted_transit_virtual_interface + +Provides a Direct Connect hosted transit virtual interface resource. +This resource represents the allocator's side of the hosted virtual interface. +A hosted virtual interface is a virtual interface that is owned by another AWS account. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxHostedTransitVirtualInterface } from "./.gen/providers/aws/dx-hosted-transit-virtual-interface"; +interface MyConfig { + ownerAccountId: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new DxHostedTransitVirtualInterface(this, "example", { + addressFamily: "ipv4", + bgpAsn: 65352, + connectionId: Token.asString(awsDxConnectionExample.id), + name: "tf-transit-vif-example", + vlan: 4094, + ownerAccountId: config.ownerAccountId, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `addressFamily` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgpAsn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connectionId` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `ownerAccountId` - (Required) The AWS account that will own the new virtual interface. +* `vlan` - (Required) The VLAN ID. +* `amazonAddress` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `bgpAuthKey` - (Optional) The authentication key for BGP configuration. +* `customerAddress` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. +* `mtu` - (Optional) The maximum transmission unit (MTU) is the size, in bytes, of the largest permissible packet that can be passed over the connection. The MTU of a virtual transit interface can be either `1500` or `8500` (jumbo frames). Default is `1500`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `awsDevice` - The Direct Connect endpoint on which the virtual interface terminates. +* `jumboFrameCapable` - Indicates whether jumbo frames (8500 MTU) are supported. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted transit virtual interfaces using the VIF `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect hosted transit virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_transit_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_hosted_transit_virtual_interface_accepter.html.markdown b/website/docs/cdktf/typescript/r/dx_hosted_transit_virtual_interface_accepter.html.markdown new file mode 100644 index 00000000000..1c9c696090e --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_hosted_transit_virtual_interface_accepter.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_hosted_transit_virtual_interface_accepter" +description: |- + Provides a resource to manage the accepter's side of a Direct Connect hosted transit virtual interface. +--- + + + +# Resource: aws_dx_hosted_transit_virtual_interface_accepter + +Provides a resource to manage the accepter's side of a Direct Connect hosted transit virtual interface. +This resource accepts ownership of a transit virtual interface created by another AWS account. + +-> **NOTE:** AWS allows a Direct Connect hosted transit virtual interface to be deleted from either the allocator's or accepter's side. However, Terraform only allows the Direct Connect hosted transit virtual interface to be deleted from the allocator's side by removing the corresponding `awsDxHostedTransitVirtualInterface` resource from your configuration. Removing a `awsDxHostedTransitVirtualInterfaceAccepter` resource from your configuration will remove it from your statefile and management, **but will not delete the Direct Connect virtual interface.** + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DxGateway } from "./.gen/providers/aws/dx-gateway"; +import { DxHostedTransitVirtualInterface } from "./.gen/providers/aws/dx-hosted-transit-virtual-interface"; +import { DxHostedTransitVirtualInterfaceAccepter } from "./.gen/providers/aws/dx-hosted-transit-virtual-interface-accepter"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", {}); + const accepter = new AwsProvider(this, "aws_1", { + alias: "accepter", + }); + const example = new DxGateway(this, "example", { + amazonSideAsn: Token.asString(64512), + name: "tf-dxg-example", + provider: accepter, + }); + const dataAwsCallerIdentityAccepter = new DataAwsCallerIdentity( + this, + "accepter", + { + provider: accepter, + } + ); + const creator = new DxHostedTransitVirtualInterface(this, "creator", { + addressFamily: "ipv4", + bgpAsn: 65352, + connectionId: "dxcon-zzzzzzzz", + dependsOn: [example], + name: "tf-transit-vif-example", + ownerAccountId: Token.asString(dataAwsCallerIdentityAccepter.accountId), + vlan: 4094, + }); + const awsDxHostedTransitVirtualInterfaceAccepterAccepter = + new DxHostedTransitVirtualInterfaceAccepter(this, "accepter_5", { + dxGatewayId: example.id, + provider: accepter, + tags: { + Side: "Accepter", + }, + virtualInterfaceId: creator.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDxHostedTransitVirtualInterfaceAccepterAccepter.overrideLogicalId( + "accepter" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dxGatewayId` - (Required) The ID of the [Direct Connect gateway](dx_gateway.html) to which to connect the virtual interface. +* `virtualInterfaceId` - (Required) The ID of the Direct Connect virtual interface to accept. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect hosted transit virtual interfaces using the VIF `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect hosted transit virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_hosted_transit_virtual_interface_accepter.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_lag.html.markdown b/website/docs/cdktf/typescript/r/dx_lag.html.markdown new file mode 100644 index 00000000000..29c6b16f4c3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_lag.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_lag" +description: |- + Provides a Direct Connect LAG. +--- + + + +# Resource: aws_dx_lag + +Provides a Direct Connect LAG. Connections can be added to the LAG via the [`awsDxConnection`](/docs/providers/aws/r/dx_connection.html) and [`awsDxConnectionAssociation`](/docs/providers/aws/r/dx_connection_association.html) resources. + +~> *NOTE:* When creating a LAG, if no existing connection is specified, Direct Connect will create a connection and Terraform will remove this unmanaged connection during resource creation. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxLag } from "./.gen/providers/aws/dx-lag"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DxLag(this, "hoge", { + connectionsBandwidth: "1Gbps", + forceDestroy: true, + location: "EqDC2", + name: "tf-dx-lag", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the LAG. +* `connectionsBandwidth` - (Required) The bandwidth of the individual physical connections bundled by the LAG. Valid values: 50Mbps, 100Mbps, 200Mbps, 300Mbps, 400Mbps, 500Mbps, 1Gbps, 2Gbps, 5Gbps, 10Gbps and 100Gbps. Case sensitive. +* `location` - (Required) The AWS Direct Connect location in which the LAG should be allocated. See [DescribeLocations](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_DescribeLocations.html) for the list of AWS Direct Connect locations. Use `locationCode`. +* `connectionId` - (Optional) The ID of an existing dedicated connection to migrate to the LAG. +* `forceDestroy` - (Optional, Default:false) A boolean that indicates all connections associated with the LAG should be deleted so that the LAG can be destroyed without error. These objects are *not* recoverable. +* `providerName` - (Optional) The name of the service provider associated with the LAG. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the LAG. +* `hasLogicalRedundancy` - Indicates whether the LAG supports a secondary BGP peer in the same address family (IPv4/IPv6). +* `id` - The ID of the LAG. +* `jumboFrameCapable` -Indicates whether jumbo frames (9001 MTU) are supported. +* `ownerAccountId` - The ID of the AWS account that owns the LAG. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect LAGs using the LAG `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect LAGs using the LAG `id`. For example: + +```console +% terraform import aws_dx_lag.test_lag dxlag-fgnsp5rq +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_macsec_key_association.html.markdown b/website/docs/cdktf/typescript/r/dx_macsec_key_association.html.markdown new file mode 100644 index 00000000000..36b856c29b7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_macsec_key_association.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_macsec_key_association" +description: |- + Provides a MAC Security (MACSec) secret key resource for use with Direct Connect. +--- + + + +# Resource: aws_dx_macsec_key_association + +Provides a MAC Security (MACSec) secret key resource for use with Direct Connect. See [MACsec prerequisites](https://docs.aws.amazon.com/directconnect/latest/UserGuide/direct-connect-mac-sec-getting-started.html#mac-sec-prerequisites) for information about MAC Security (MACsec) prerequisites. + +Creating this resource will also create a resource of type [`awsSecretsmanagerSecret`](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/secretsmanager_secret) which is managed by Direct Connect. While you can import this resource into your Terraform state, because this secret is managed by Direct Connect, you will not be able to make any modifications to it. See [How AWS Direct Connect uses AWS Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/integrating_how-services-use-secrets_directconnect.html) for details. + +~> **Note:** All arguments including `ckn` and `cak` will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **Note:** The `secretArn` argument can only be used to reference a previously created MACSec key. You cannot associate a Secrets Manager secret created outside of the `awsDxMacsecKeyAssociation` resource. + +## Example Usage + +### Create MACSec key with CKN and CAK + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDxConnection } from "./.gen/providers/aws/data-aws-dx-connection"; +import { DxMacsecKeyAssociation } from "./.gen/providers/aws/dx-macsec-key-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsDxConnection(this, "example", { + name: "tf-dx-connection", + }); + new DxMacsecKeyAssociation(this, "test", { + cak: "abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789", + ckn: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + connectionId: Token.asString(example.id), + }); + } +} + +``` + +### Create MACSec key with existing Secrets Manager secret + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDxConnection } from "./.gen/providers/aws/data-aws-dx-connection"; +import { DataAwsSecretsmanagerSecret } from "./.gen/providers/aws/data-aws-secretsmanager-secret"; +import { DxMacsecKeyAssociation } from "./.gen/providers/aws/dx-macsec-key-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsDxConnection(this, "example", { + name: "tf-dx-connection", + }); + const dataAwsSecretsmanagerSecretExample = new DataAwsSecretsmanagerSecret( + this, + "example_1", + { + name: "directconnect!prod/us-east-1/directconnect/0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsSecretsmanagerSecretExample.overrideLogicalId("example"); + new DxMacsecKeyAssociation(this, "test", { + connectionId: Token.asString(example.id), + secretArn: Token.asString(dataAwsSecretsmanagerSecretExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cak` - (Optional) The MAC Security (MACsec) CAK to associate with the dedicated connection. The valid values are 64 hexadecimal characters (0-9, A-E). Required if using `ckn`. +* `ckn` - (Optional) The MAC Security (MACsec) CKN to associate with the dedicated connection. The valid values are 64 hexadecimal characters (0-9, A-E). Required if using `cak`. +* `connectionId` - (Required) The ID of the dedicated Direct Connect connection. The connection must be a dedicated connection in the `available` state. +* `secretArn` - (Optional) The Amazon Resource Name (ARN) of the MAC Security (MACsec) secret key to associate with the dedicated connection. + +~> **Note:** `ckn` and `cak` are mutually exclusive with `secretArn` - these arguments cannot be used together. If you use `ckn` and `cak`, you should not use `secretArn`. If you use the `secretArn` argument to reference an existing MAC Security (MACSec) secret key, you should not use `ckn` or `cak`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the MAC Security (MACSec) secret key resource. +* `startOn` - The date in UTC format that the MAC Security (MACsec) secret key takes effect. +* `state` - The state of the MAC Security (MACsec) secret key. The possible values are: associating, associated, disassociating, disassociated. See [MacSecKey](https://docs.aws.amazon.com/directconnect/latest/APIReference/API_MacSecKey.html#DX-Type-MacSecKey-state) for descriptions of each state. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_private_virtual_interface.html.markdown b/website/docs/cdktf/typescript/r/dx_private_virtual_interface.html.markdown new file mode 100644 index 00000000000..ac51f4fb1bf --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_private_virtual_interface.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_private_virtual_interface" +description: |- + Provides a Direct Connect private virtual interface resource. +--- + + + +# Resource: aws_dx_private_virtual_interface + +Provides a Direct Connect private virtual interface resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxPrivateVirtualInterface } from "./.gen/providers/aws/dx-private-virtual-interface"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DxPrivateVirtualInterface(this, "foo", { + addressFamily: "ipv4", + bgpAsn: 65352, + connectionId: "dxcon-zzzzzzzz", + name: "vif-foo", + vlan: 4094, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `addressFamily` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgpAsn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connectionId` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `vlan` - (Required) The VLAN ID. +* `amazonAddress` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `bgpAuthKey` - (Optional) The authentication key for BGP configuration. +* `customerAddress` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. +* `dxGatewayId` - (Optional) The ID of the Direct Connect gateway to which to connect the virtual interface. +* `mtu` - (Optional) The maximum transmission unit (MTU) is the size, in bytes, of the largest permissible packet that can be passed over the connection. +The MTU of a virtual private interface can be either `1500` or `9001` (jumbo frames). Default is `1500`. +* `sitelinkEnabled` - (Optional) Indicates whether to enable or disable SiteLink. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpnGatewayId` - (Optional) The ID of the [virtual private gateway](vpn_gateway.html) to which to connect the virtual interface. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `awsDevice` - The Direct Connect endpoint on which the virtual interface terminates. +* `jumboFrameCapable` - Indicates whether jumbo frames (9001 MTU) are supported. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect private virtual interfaces using the VIF `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect private virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_private_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_public_virtual_interface.html.markdown b/website/docs/cdktf/typescript/r/dx_public_virtual_interface.html.markdown new file mode 100644 index 00000000000..b0f7b0fd8da --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_public_virtual_interface.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_public_virtual_interface" +description: |- + Provides a Direct Connect public virtual interface resource. +--- + + + +# Resource: aws_dx_public_virtual_interface + +Provides a Direct Connect public virtual interface resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxPublicVirtualInterface } from "./.gen/providers/aws/dx-public-virtual-interface"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DxPublicVirtualInterface(this, "foo", { + addressFamily: "ipv4", + amazonAddress: "175.45.176.2/30", + bgpAsn: 65352, + connectionId: "dxcon-zzzzzzzz", + customerAddress: "175.45.176.1/30", + name: "vif-foo", + routeFilterPrefixes: ["210.52.109.0/24", "175.45.176.0/22"], + vlan: 4094, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `addressFamily` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgpAsn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connectionId` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `vlan` - (Required) The VLAN ID. +* `amazonAddress` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `bgpAuthKey` - (Optional) The authentication key for BGP configuration. +* `customerAddress` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. +* `routeFilterPrefixes` - (Required) A list of routes to be advertised to the AWS network in this region. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `awsDevice` - The Direct Connect endpoint on which the virtual interface terminates. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect public virtual interfaces using the VIF `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect public virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_public_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dx_transit_virtual_interface.html.markdown b/website/docs/cdktf/typescript/r/dx_transit_virtual_interface.html.markdown new file mode 100644 index 00000000000..7f5fac0749c --- /dev/null +++ b/website/docs/cdktf/typescript/r/dx_transit_virtual_interface.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Direct Connect" +layout: "aws" +page_title: "AWS: aws_dx_transit_virtual_interface" +description: |- + Provides a Direct Connect transit virtual interface resource. +--- + + + +# Resource: aws_dx_transit_virtual_interface + +Provides a Direct Connect transit virtual interface resource. +A transit virtual interface is a VLAN that transports traffic from a [Direct Connect gateway](dx_gateway.html) to one or more [transit gateways](ec2_transit_gateway.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DxGateway } from "./.gen/providers/aws/dx-gateway"; +import { DxTransitVirtualInterface } from "./.gen/providers/aws/dx-transit-virtual-interface"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DxGateway(this, "example", { + amazonSideAsn: Token.asString(64512), + name: "tf-dxg-example", + }); + const awsDxTransitVirtualInterfaceExample = new DxTransitVirtualInterface( + this, + "example_1", + { + addressFamily: "ipv4", + bgpAsn: 65352, + connectionId: Token.asString(awsDxConnectionExample.id), + dxGatewayId: example.id, + name: "tf-transit-vif-example", + vlan: 4094, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDxTransitVirtualInterfaceExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `addressFamily` - (Required) The address family for the BGP peer. `ipv4 ` or `ipv6`. +* `bgpAsn` - (Required) The autonomous system (AS) number for Border Gateway Protocol (BGP) configuration. +* `connectionId` - (Required) The ID of the Direct Connect connection (or LAG) on which to create the virtual interface. +* `dxGatewayId` - (Required) The ID of the Direct Connect gateway to which to connect the virtual interface. +* `name` - (Required) The name for the virtual interface. +* `vlan` - (Required) The VLAN ID. +* `amazonAddress` - (Optional) The IPv4 CIDR address to use to send traffic to Amazon. Required for IPv4 BGP peers. +* `bgpAuthKey` - (Optional) The authentication key for BGP configuration. +* `customerAddress` - (Optional) The IPv4 CIDR destination address to which Amazon should send traffic. Required for IPv4 BGP peers. +* `mtu` - (Optional) The maximum transmission unit (MTU) is the size, in bytes, of the largest permissible packet that can be passed over the connection. +The MTU of a virtual transit interface can be either `1500` or `8500` (jumbo frames). Default is `1500`. +* `sitelinkEnabled` - (Optional) Indicates whether to enable or disable SiteLink. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the virtual interface. +* `arn` - The ARN of the virtual interface. +* `awsDevice` - The Direct Connect endpoint on which the virtual interface terminates. +* `jumboFrameCapable` - Indicates whether jumbo frames (8500 MTU) are supported. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Direct Connect transit virtual interfaces using the VIF `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Direct Connect transit virtual interfaces using the VIF `id`. For example: + +```console +% terraform import aws_dx_transit_virtual_interface.test dxvif-33cc44dd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dynamodb_contributor_insights.html.markdown b/website/docs/cdktf/typescript/r/dynamodb_contributor_insights.html.markdown new file mode 100644 index 00000000000..a3e2a15cf5d --- /dev/null +++ b/website/docs/cdktf/typescript/r/dynamodb_contributor_insights.html.markdown @@ -0,0 +1,70 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_contributor_insights" +description: |- + Provides a DynamoDB contributor insights resource +--- + + + +# Resource: aws_dynamodb_contributor_insights + +Provides a DynamoDB contributor insights resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DynamodbContributorInsights } from "./.gen/providers/aws/dynamodb-contributor-insights"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DynamodbContributorInsights(this, "test", { + tableName: "ExampleTableName", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `tableName` - (Required) The name of the table to enable contributor insights +* `indexName` - (Optional) The global secondary index name + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDynamodbContributorInsights` using the format `name:tableName/index:indexName`, followed by the account number. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDynamodbContributorInsights` using the format `name:tableName/index:indexName`, followed by the account number. For example: + +```console +% terraform import aws_dynamodb_contributor_insights.test name:ExampleTableName/index:ExampleIndexName/123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dynamodb_global_table.html.markdown b/website/docs/cdktf/typescript/r/dynamodb_global_table.html.markdown new file mode 100644 index 00000000000..135a3eaba53 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dynamodb_global_table.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_global_table" +description: |- + Manages DynamoDB Global Tables V1 (version 2017.11.29) +--- + + + +# Resource: aws_dynamodb_global_table + +Manages [DynamoDB Global Tables V1 (version 2017.11.29)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V1.html). These are layered on top of existing DynamoDB Tables. + +~> **NOTE:** To instead manage [DynamoDB Global Tables V2 (version 2019.11.21)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V2.html), use the [`awsDynamodbTable` resource](/docs/providers/aws/r/dynamodb_table.html) `replica` configuration block. + +~> Note: There are many restrictions before you can properly create DynamoDB Global Tables in multiple regions. See the [AWS DynamoDB Global Table Requirements](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables_reqs_bestpractices.html) for more information. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DynamodbGlobalTable } from "./.gen/providers/aws/dynamodb-global-table"; +import { DynamodbTable } from "./.gen/providers/aws/dynamodb-table"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const usEast1 = new AwsProvider(this, "aws", { + alias: "us-east-1", + region: "us-east-1", + }); + const usWest2 = new AwsProvider(this, "aws_1", { + alias: "us-west-2", + region: "us-west-2", + }); + const awsDynamodbTableUsEast1 = new DynamodbTable(this, "us-east-1", { + attribute: [ + { + name: "myAttribute", + type: "S", + }, + ], + hashKey: "myAttribute", + name: "myTable", + provider: usEast1, + readCapacity: 1, + streamEnabled: true, + streamViewType: "NEW_AND_OLD_IMAGES", + writeCapacity: 1, + }); + const awsDynamodbTableUsWest2 = new DynamodbTable(this, "us-west-2", { + attribute: [ + { + name: "myAttribute", + type: "S", + }, + ], + hashKey: "myAttribute", + name: "myTable", + provider: usWest2, + readCapacity: 1, + streamEnabled: true, + streamViewType: "NEW_AND_OLD_IMAGES", + writeCapacity: 1, + }); + new DynamodbGlobalTable(this, "myTable", { + dependsOn: [awsDynamodbTableUsEast1, awsDynamodbTableUsWest2], + name: "myTable", + provider: usEast1, + replica: [ + { + regionName: "us-east-1", + }, + { + regionName: "us-west-2", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the global table. Must match underlying DynamoDB Table names in all regions. +* `replica` - (Required) Underlying DynamoDB Table. At least 1 replica must be defined. See below. + +### Nested Fields + +#### `replica` + +* `regionName` - (Required) AWS region name of replica DynamoDB TableE.g., `usEast1` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the DynamoDB Global Table +* `arn` - The ARN of the DynamoDB Global Table + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DynamoDB Global Tables using the global table name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DynamoDB Global Tables using the global table name. For example: + +```console +% terraform import aws_dynamodb_global_table.MyTable MyTable +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dynamodb_kinesis_streaming_destination.html.markdown b/website/docs/cdktf/typescript/r/dynamodb_kinesis_streaming_destination.html.markdown new file mode 100644 index 00000000000..342eff2f031 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dynamodb_kinesis_streaming_destination.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_kinesis_streaming_destination" +description: |- + Enables a Kinesis streaming destination for a DynamoDB table +--- + + + +# Resource: aws_dynamodb_kinesis_streaming_destination + +Enables a [Kinesis streaming destination](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/kds.html) for data replication of a DynamoDB table. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DynamodbKinesisStreamingDestination } from "./.gen/providers/aws/dynamodb-kinesis-streaming-destination"; +import { DynamodbTable } from "./.gen/providers/aws/dynamodb-table"; +import { KinesisStream } from "./.gen/providers/aws/kinesis-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DynamodbTable(this, "example", { + attribute: [ + { + name: "id", + type: "S", + }, + ], + hashKey: "id", + name: "orders", + }); + const awsKinesisStreamExample = new KinesisStream(this, "example_1", { + name: "order_item_changes", + shardCount: 1, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKinesisStreamExample.overrideLogicalId("example"); + const awsDynamodbKinesisStreamingDestinationExample = + new DynamodbKinesisStreamingDestination(this, "example_2", { + streamArn: Token.asString(awsKinesisStreamExample.arn), + tableName: example.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDynamodbKinesisStreamingDestinationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `streamArn` - (Required) The ARN for a Kinesis data stream. This must exist in the same account and region as the DynamoDB table. + +* `tableName` - (Required) The name of the DynamoDB table. There + can only be one Kinesis streaming destination for a given DynamoDB table. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `tableName` and `streamArn` separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DynamoDB Kinesis Streaming Destinations using the `tableName` and `streamArn` separated by `,`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DynamoDB Kinesis Streaming Destinations using the `tableName` and `streamArn` separated by `,`. For example: + +```console +% terraform import aws_dynamodb_kinesis_streaming_destination.example example,arn:aws:kinesis:us-east-1:111122223333:exampleStreamName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dynamodb_table.html.markdown b/website/docs/cdktf/typescript/r/dynamodb_table.html.markdown new file mode 100644 index 00000000000..e4e0a43501a --- /dev/null +++ b/website/docs/cdktf/typescript/r/dynamodb_table.html.markdown @@ -0,0 +1,338 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_table" +description: |- + Provides a DynamoDB table resource +--- + + + +# Resource: aws_dynamodb_table + +Provides a DynamoDB table resource. + +~> **Note:** We recommend using `lifecycle` [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) for `readCapacity` and/or `writeCapacity` if there's [autoscaling policy](/docs/providers/aws/r/appautoscaling_policy.html) attached to the table. + +~> **Note:** When using [aws_dynamodb_table_replica](/docs/providers/aws/r/dynamodb_table_replica.html) with this resource, use `lifecycle` [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) for `replica`, _e.g._, `lifecycle { ignore_changes = [replica] }`. + +## DynamoDB Table attributes + +Only define attributes on the table object that are going to be used as: + +* Table hash key or range key +* LSI or GSI hash key or range key + +The DynamoDB API expects attribute structure (name and type) to be passed along when creating or updating GSI/LSIs or creating the initial table. In these cases it expects the Hash / Range keys to be provided. Because these get re-used in numerous places (i.e the table's range key could be a part of one or more GSIs), they are stored on the table object to prevent duplication and increase consistency. If you add attributes here that are not used in these scenarios it can cause an infinite loop in planning. + +## Example Usage + +### Basic Example + +The following dynamodb table description models the table and GSI shown in the [AWS SDK example documentation](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GSI.html) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DynamodbTable } from "./.gen/providers/aws/dynamodb-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DynamodbTable(this, "basic-dynamodb-table", { + attribute: [ + { + name: "UserId", + type: "S", + }, + { + name: "GameTitle", + type: "S", + }, + { + name: "TopScore", + type: "N", + }, + ], + billingMode: "PROVISIONED", + globalSecondaryIndex: [ + { + hashKey: "GameTitle", + name: "GameTitleIndex", + nonKeyAttributes: ["UserId"], + projectionType: "INCLUDE", + rangeKey: "TopScore", + readCapacity: 10, + writeCapacity: 10, + }, + ], + hashKey: "UserId", + name: "GameScores", + rangeKey: "GameTitle", + readCapacity: 20, + tags: { + Environment: "production", + Name: "dynamodb-table-1", + }, + ttl: { + attributeName: "TimeToExist", + enabled: false, + }, + writeCapacity: 20, + }); + } +} + +``` + +### Global Tables + +This resource implements support for [DynamoDB Global Tables V2 (version 2019.11.21)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V2.html) via `replica` configuration blocks. For working with [DynamoDB Global Tables V1 (version 2017.11.29)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V1.html), see the [`awsDynamodbGlobalTable` resource](/docs/providers/aws/r/dynamodb_global_table.html). + +~> **Note:** [aws_dynamodb_table_replica](/docs/providers/aws/r/dynamodb_table_replica.html) is an alternate way of configuring Global Tables. Do not use `replica` configuration blocks of `awsDynamodbTable` together with [aws_dynamodb_table_replica](/docs/providers/aws/r/dynamodb_table_replica.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DynamodbTable } from "./.gen/providers/aws/dynamodb-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DynamodbTable(this, "example", { + attribute: [ + { + name: "TestTableHashKey", + type: "S", + }, + ], + billingMode: "PAY_PER_REQUEST", + hashKey: "TestTableHashKey", + name: "example", + replica: [ + { + regionName: "us-east-2", + }, + { + regionName: "us-west-2", + }, + ], + streamEnabled: true, + streamViewType: "NEW_AND_OLD_IMAGES", + }); + } +} + +``` + +### Replica Tagging + +You can manage global table replicas' tags in various ways. This example shows using `replica.*PropagateTags` for the first replica and the `awsDynamodbTag` resource for the other. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { DynamodbTable } from "./.gen/providers/aws/dynamodb-table"; +import { DynamodbTag } from "./.gen/providers/aws/dynamodb-tag"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { AwsalternateProvider } from "./.gen/providers/awsalternate/provider"; +import { AwsthirdProvider } from "./.gen/providers/awsthird/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*The following providers are missing schema information and might need manual adjustments to synthesize correctly: awsalternate, awsthird. + For a more precise conversion please use the --provider flag in convert.*/ + new AwsProvider(this, "aws", { + region: "us-west-2", + }); + new AwsalternateProvider(this, "awsalternate", { + region: "us-east-1", + }); + new AwsthirdProvider(this, "awsthird", { + region: "us-east-2", + }); + const alternate = new DataAwsRegion(this, "alternate", { + provider: "awsalternate", + }); + const current = new DataAwsRegion(this, "current", {}); + const third = new DataAwsRegion(this, "third", { + provider: "awsthird", + }); + const example = new DynamodbTable(this, "example", { + attribute: [ + { + name: "TestTableHashKey", + type: "S", + }, + ], + billingMode: "PAY_PER_REQUEST", + hashKey: "TestTableHashKey", + name: "example-13281", + replica: [ + { + regionName: Token.asString(alternate.name), + }, + { + propagateTags: true, + regionName: Token.asString(third.name), + }, + ], + streamEnabled: true, + streamViewType: "NEW_AND_OLD_IMAGES", + tags: { + Architect: "Eleanor", + Zone: "SW", + }, + }); + const awsDynamodbTagExample = new DynamodbTag(this, "example_7", { + key: "Architect", + resourceArn: Token.asString( + Fn.replace( + example.arn, + Token.asString(current.name), + Token.asString(alternate.name) + ) + ), + value: "Gigi", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDynamodbTagExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +Required arguments: + +* `attribute` - (Required) Set of nested attribute definitions. Only required for `hashKey` and `rangeKey` attributes. See below. +* `hashKey` - (Required, Forces new resource) Attribute to use as the hash (partition) key. Must also be defined as an `attribute`. See below. +* `name` - (Required) Unique within a region name of the table. + +Optional arguments: + +* `billingMode` - (Optional) Controls how you are charged for read and write throughput and how you manage capacity. The valid values are `provisioned` and `payPerRequest`. Defaults to `provisioned`. +* `deletionProtectionEnabled` - (Optional) Enables deletion protection for table. Defaults to `false`. +* `globalSecondaryIndex` - (Optional) Describe a GSI for the table; subject to the normal limits on the number of GSIs, projected attributes, etc. See below. +* `localSecondaryIndex` - (Optional, Forces new resource) Describe an LSI on the table; these can only be allocated _at creation_ so you cannot change this definition after you have created the resource. See below. +* `pointInTimeRecovery` - (Optional) Enable point-in-time recovery options. See below. +* `rangeKey` - (Optional, Forces new resource) Attribute to use as the range (sort) key. Must also be defined as an `attribute`, see below. +* `readCapacity` - (Optional) Number of read units for this table. If the `billingMode` is `provisioned`, this field is required. +* `replica` - (Optional) Configuration block(s) with [DynamoDB Global Tables V2 (version 2019.11.21)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V2.html) replication configurations. See below. +* `restoreDateTime` - (Optional) Time of the point-in-time recovery point to restore. +* `restoreSourceName` - (Optional) Name of the table to restore. Must match the name of an existing table. +* `restoreToLatestTime` - (Optional) If set, restores table to the most recent point-in-time recovery point. +* `serverSideEncryption` - (Optional) Encryption at rest options. AWS DynamoDB tables are automatically encrypted at rest with an AWS-owned Customer Master Key if this argument isn't specified. See below. +* `streamEnabled` - (Optional) Whether Streams are enabled. +* `streamViewType` - (Optional) When an item in the table is modified, StreamViewType determines what information is written to the table's stream. Valid values are `keysOnly`, `newImage`, `oldImage`, `newAndOldImages`. +* `tableClass` - (Optional) Storage class of the table. + Valid values are `standard` and `standardInfrequentAccess`. + Default value is `standard`. +* `tags` - (Optional) A map of tags to populate on the created table. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `ttl` - (Optional) Configuration block for TTL. See below. +* `writeCapacity` - (Optional) Number of write units for this table. If the `billingMode` is `provisioned`, this field is required. + +### `attribute` + +* `name` - (Required) Name of the attribute +* `type` - (Required) Attribute type. Valid values are `s` (string), `n` (number), `b` (binary). + +#### `globalSecondaryIndex` + +* `hashKey` - (Required) Name of the hash key in the index; must be defined as an attribute in the resource. +* `name` - (Required) Name of the index. +* `nonKeyAttributes` - (Optional) Only required with `include` as a projection type; a list of attributes to project into the index. These do not need to be defined as attributes on the table. +* `projectionType` - (Required) One of `all`, `include` or `keysOnly` where `all` projects every attribute into the index, `keysOnly` projects into the index only the table and index hash_key and sort_key attributes , `include` projects into the index all of the attributes that are defined in `nonKeyAttributes` in addition to the attributes that that`keysOnly` project. +* `rangeKey` - (Optional) Name of the range key; must be defined +* `readCapacity` - (Optional) Number of read units for this index. Must be set if billing_mode is set to PROVISIONED. +* `writeCapacity` - (Optional) Number of write units for this index. Must be set if billing_mode is set to PROVISIONED. + +### `localSecondaryIndex` + +* `name` - (Required) Name of the index +* `nonKeyAttributes` - (Optional) Only required with `include` as a projection type; a list of attributes to project into the index. These do not need to be defined as attributes on the table. +* `projectionType` - (Required) One of `all`, `include` or `keysOnly` where `all` projects every attribute into the index, `keysOnly` projects into the index only the table and index hash_key and sort_key attributes , `include` projects into the index all of the attributes that are defined in `nonKeyAttributes` in addition to the attributes that that`keysOnly` project. +* `rangeKey` - (Required) Name of the range key. + +### `pointInTimeRecovery` + +* `enabled` - (Required) Whether to enable point-in-time recovery. It can take 10 minutes to enable for new tables. If the `pointInTimeRecovery` block is not provided, this defaults to `false`. + +### `replica` + +* `kmsKeyArn` - (Optional, Forces new resource) ARN of the CMK that should be used for the AWS KMS encryption. This argument should only be used if the key is different from the default KMS-managed DynamoDB key, `alias/aws/dynamodb`. **Note:** This attribute will _not_ be populated with the ARN of _default_ keys. +* `pointInTimeRecovery` - (Optional) Whether to enable Point In Time Recovery for the replica. Default is `false`. +* `propagateTags` - (Optional) Whether to propagate the global table's tags to a replica. Default is `false`. Changes to tags only move in one direction: from global (source) to replica. In other words, tag drift on a replica will not trigger an update. Tag or replica changes on the global table, whether from drift or configuration changes, are propagated to replicas. Changing from `true` to `false` on a subsequent `apply` means replica tags are left as they were, unmanaged, not deleted. +* `regionName` - (Required) Region name of the replica. + +### `serverSideEncryption` + +* `enabled` - (Required) Whether or not to enable encryption at rest using an AWS managed KMS customer master key (CMK). If `enabled` is `false` then server-side encryption is set to AWS-_owned_ key (shown as `default` in the AWS console). Potentially confusingly, if `enabled` is `true` and no `kmsKeyArn` is specified then server-side encryption is set to the _default_ KMS-_managed_ key (shown as `kms` in the AWS console). The [AWS KMS documentation](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html) explains the difference between AWS-_owned_ and KMS-_managed_ keys. +* `kmsKeyArn` - (Optional) ARN of the CMK that should be used for the AWS KMS encryption. This argument should only be used if the key is different from the default KMS-managed DynamoDB key, `alias/aws/dynamodb`. **Note:** This attribute will _not_ be populated with the ARN of _default_ keys. + +### `ttl` + +* `enabled` - (Required) Whether TTL is enabled. +* `attributeName` - (Required) Name of the table attribute to store the TTL timestamp in. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the table +* `id` - Name of the table +* `replica.*Arn` - ARN of the replica +* `replica.*StreamArn` - ARN of the replica Table Stream. Only available when `stream_enabled = true`. +* `replica.*StreamLabel` - Timestamp, in ISO 8601 format, for the replica stream. Note that this timestamp is not a unique identifier for the stream on its own. However, the combination of AWS customer ID, table name and this field is guaranteed to be unique. It can be used for creating CloudWatch Alarms. Only available when `stream_enabled = true`. +* `streamArn` - ARN of the Table Stream. Only available when `stream_enabled = true` +* `streamLabel` - Timestamp, in ISO 8601 format, for this stream. Note that this timestamp is not a unique identifier for the stream on its own. However, the combination of AWS customer ID, table name and this field is guaranteed to be unique. It can be used for creating CloudWatch Alarms. Only available when `stream_enabled = true`. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +~> **Note:** There are a variety of default timeouts set internally. If you set a shorter custom timeout than one of the defaults, the custom timeout will not be respected as the longer of the custom or internal default will be used. + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `60M`) +* `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DynamoDB tables using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DynamoDB tables using the `name`. For example: + +```console +% terraform import aws_dynamodb_table.basic-dynamodb-table GameScores +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dynamodb_table_item.html.markdown b/website/docs/cdktf/typescript/r/dynamodb_table_item.html.markdown new file mode 100644 index 00000000000..095f39fb493 --- /dev/null +++ b/website/docs/cdktf/typescript/r/dynamodb_table_item.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_table_item" +description: |- + Provides a DynamoDB table item resource +--- + + + +# Resource: aws_dynamodb_table_item + +Provides a DynamoDB table item resource + +-> **Note:** This resource is not meant to be used for managing large amounts of data in your table, it is not designed to scale. + You should perform **regular backups** of all data in the table, see [AWS docs for more](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/BackupRestore.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DynamodbTable } from "./.gen/providers/aws/dynamodb-table"; +import { DynamodbTableItem } from "./.gen/providers/aws/dynamodb-table-item"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DynamodbTable(this, "example", { + attribute: [ + { + name: "exampleHashKey", + type: "S", + }, + ], + hashKey: "exampleHashKey", + name: "example-name", + readCapacity: 10, + writeCapacity: 10, + }); + const awsDynamodbTableItemExample = new DynamodbTableItem( + this, + "example_1", + { + hashKey: example.hashKey, + item: '{\n "exampleHashKey": {"S": "something"},\n "one": {"N": "11111"},\n "two": {"N": "22222"},\n "three": {"N": "33333"},\n "four": {"N": "44444"}\n}\n\n', + tableName: example.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDynamodbTableItemExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +~> **Note:** Names included in `item` are represented internally with everything but letters removed. There is the possibility of collisions if two names, once filtered, are the same. For example, the names `yourNameHere` and `yournamehere` will overlap and cause an error. + +This argument supports the following arguments: + +* `hashKey` - (Required) Hash key to use for lookups and identification of the item +* `item` - (Required) JSON representation of a map of attribute name/value pairs, one for each attribute. Only the primary key attributes are required; you can optionally provide other attribute name-value pairs for the item. +* `rangeKey` - (Optional) Range key to use for lookups and identification of the item. Required if there is range key defined in the table. +* `tableName` - (Required) Name of the table to contain the item. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +## Import + +You cannot import DynamoDB table items. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dynamodb_table_replica.html.markdown b/website/docs/cdktf/typescript/r/dynamodb_table_replica.html.markdown new file mode 100644 index 00000000000..9e4ec8972ec --- /dev/null +++ b/website/docs/cdktf/typescript/r/dynamodb_table_replica.html.markdown @@ -0,0 +1,136 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_table_replica" +description: |- + Provides a DynamoDB table replica resource +--- + + + +# Resource: aws_dynamodb_table_replica + +Provides a DynamoDB table replica resource for [DynamoDB Global Tables V2 (version 2019.11.21)](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/globaltables.V2.html). + +~> **Note:** Use `lifecycle` [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) for `replica` in the associated [aws_dynamodb_table](/docs/providers/aws/r/dynamodb_table.html) configuration. + +~> **Note:** Do not use the `replica` configuration block of [aws_dynamodb_table](/docs/providers/aws/r/dynamodb_table.html) together with this resource as the two configuration options are mutually exclusive. + +## Example Usage + +### Basic Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DynamodbTable } from "./.gen/providers/aws/dynamodb-table"; +import { DynamodbTableReplicaA } from "./.gen/providers/aws/dynamodb-table-replica"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new AwsProvider(this, "aws", { + alias: "main", + region: "us-west-2", + }); + const alt = new AwsProvider(this, "aws_1", { + alias: "alt", + region: "us-east-2", + }); + const example = new DynamodbTable(this, "example", { + attribute: [ + { + name: "BrodoBaggins", + type: "S", + }, + ], + billingMode: "PAY_PER_REQUEST", + hashKey: "BrodoBaggins", + lifecycle: { + ignoreChanges: [replica], + }, + name: "TestTable", + provider: main, + streamEnabled: true, + streamViewType: "NEW_AND_OLD_IMAGES", + }); + const awsDynamodbTableReplicaExample = new DynamodbTableReplicaA( + this, + "example_3", + { + globalTableArn: example.arn, + provider: alt, + tags: { + Name: "IZPAWS", + Pozo: "Amargo", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDynamodbTableReplicaExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +Required arguments: + +* `globalTableArn` - (Required) ARN of the _main_ or global table which this resource will replicate. + +Optional arguments: + +* `kmsKeyArn` - (Optional, Forces new resource) ARN of the CMK that should be used for the AWS KMS encryption. This argument should only be used if the key is different from the default KMS-managed DynamoDB key, `alias/aws/dynamodb`. **Note:** This attribute will _not_ be populated with the ARN of _default_ keys. +* `pointInTimeRecovery` - (Optional) Whether to enable Point In Time Recovery for the replica. Default is `false`. +* `tableClassOverride` - (Optional, Forces new resource) Storage class of the table replica. Valid values are `standard` and `standardInfrequentAccess`. If not used, the table replica will use the same class as the global table. +* `tags` - (Optional) Map of tags to populate on the created table. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the table replica. +* `id` - Name of the table and region of the main global table joined with a semicolon (_e.g._, `tableName:usEast1`). +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DynamoDB table replicas using the `tableName:mainRegion`. For example: + +~> **Note:** When importing, use the region where the initial or _main_ global table resides, _not_ the region of the replica. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DynamoDB table replicas using the `tableName:mainRegion`. For example: + +~> **Note:** When importing, use the region where the initial or _main_ global table resides, _not_ the region of the replica. + +```console +% terraform import aws_dynamodb_table_replica.example TestTable:us-west-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/dynamodb_tag.html.markdown b/website/docs/cdktf/typescript/r/dynamodb_tag.html.markdown new file mode 100644 index 00000000000..a48273887fe --- /dev/null +++ b/website/docs/cdktf/typescript/r/dynamodb_tag.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "DynamoDB" +layout: "aws" +page_title: "AWS: aws_dynamodb_tag" +description: |- + Manages an individual DynamoDB resource tag +--- + + + +# Resource: aws_dynamodb_tag + +Manages an individual DynamoDB resource tag. This resource should only be used in cases where DynamoDB resources are created outside Terraform (e.g., Table replicas in other regions). + +~> **NOTE:** This tagging resource should not be combined with the Terraform resource for managing the parent resource. For example, using `awsDynamodbTable` and `awsDynamodbTag` to manage tags of the same DynamoDB Table in the same region will cause a perpetual difference where the `awsDynamodbCluster` resource will try to remove the tag being added by the `awsDynamodbTag` resource. + +~> **NOTE:** This tagging resource does not use the [provider `ignoreTags` configuration](/docs/providers/aws/index.html#ignore_tags). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { DynamodbTable } from "./.gen/providers/aws/dynamodb-table"; +import { DynamodbTag } from "./.gen/providers/aws/dynamodb-tag"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +interface MyConfig { + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new AwsProvider(this, "aws", { + region: "us-west-2", + }); + const replica = new AwsProvider(this, "aws_1", { + alias: "replica", + region: "us-east-1", + }); + const current = new DataAwsRegion(this, "current", {}); + const dataAwsRegionReplica = new DataAwsRegion(this, "replica", { + provider: replica, + }); + new DynamodbTable(this, "example", { + replica: [ + { + regionName: Token.asString(dataAwsRegionReplica.name), + }, + ], + name: config.name, + }); + new DynamodbTag(this, "test", { + key: "testkey", + provider: replica, + resourceArn: Token.asString( + Fn.replace( + Token.asString(awsDynamodbTableTest.arn), + Token.asString(current.name), + Token.asString(dataAwsRegionReplica.name) + ) + ), + value: "testvalue", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceArn` - (Required) Amazon Resource Name (ARN) of the DynamoDB resource to tag. +* `key` - (Required) Tag name. +* `value` - (Required) Tag value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - DynamoDB resource identifier and key, separated by a comma (`,`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsDynamodbTag` using the DynamoDB resource identifier and key, separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsDynamodbTag` using the DynamoDB resource identifier and key, separated by a comma (`,`). For example: + +```console +% terraform import aws_dynamodb_tag.example arn:aws:dynamodb:us-east-1:123456789012:table/example,Name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ebs_default_kms_key.html.markdown b/website/docs/cdktf/typescript/r/ebs_default_kms_key.html.markdown new file mode 100644 index 00000000000..b1a58ddeffd --- /dev/null +++ b/website/docs/cdktf/typescript/r/ebs_default_kms_key.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_default_kms_key" +description: |- + Manages the default customer master key (CMK) that your AWS account uses to encrypt EBS volumes. +--- + + + +# Resource: aws_ebs_default_kms_key + +Provides a resource to manage the default customer master key (CMK) that your AWS account uses to encrypt EBS volumes. + +Your AWS account has an AWS-managed default CMK that is used for encrypting an EBS volume when no CMK is specified in the API call that creates the volume. +By using the `awsEbsDefaultKmsKey` resource, you can specify a customer-managed CMK to use in place of the AWS-managed default CMK. + +~> **NOTE:** Creating an `awsEbsDefaultKmsKey` resource does not enable default EBS encryption. Use the [`awsEbsEncryptionByDefault`](ebs_encryption_by_default.html) to enable default EBS encryption. + +~> **NOTE:** Destroying this resource will reset the default CMK to the account's AWS-managed default CMK for EBS. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EbsDefaultKmsKey } from "./.gen/providers/aws/ebs-default-kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EbsDefaultKmsKey(this, "example", { + keyArn: Token.asString(awsKmsKeyExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `keyArn` - (Required, ForceNew) The ARN of the AWS Key Management Service (AWS KMS) customer master key (CMK) to use to encrypt the EBS volume. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EBS default KMS CMK using the KMS key ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the EBS default KMS CMK using the KMS key ARN. For example: + +```console +% terraform import aws_ebs_default_kms_key.example arn:aws:kms:us-east-1:123456789012:key/abcd-1234 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ebs_encryption_by_default.html.markdown b/website/docs/cdktf/typescript/r/ebs_encryption_by_default.html.markdown new file mode 100644 index 00000000000..90181356cef --- /dev/null +++ b/website/docs/cdktf/typescript/r/ebs_encryption_by_default.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_encryption_by_default" +description: |- + Manages whether default EBS encryption is enabled for your AWS account in the current AWS region. +--- + + + +# Resource: aws_ebs_encryption_by_default + +Provides a resource to manage whether default EBS encryption is enabled for your AWS account in the current AWS region. To manage the default KMS key for the region, see the [`awsEbsDefaultKmsKey` resource](/docs/providers/aws/r/ebs_default_kms_key.html). + +~> **NOTE:** Removing this Terraform resource disables default EBS encryption. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EbsEncryptionByDefault } from "./.gen/providers/aws/ebs-encryption-by-default"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EbsEncryptionByDefault(this, "example", { + enabled: true, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `enabled` - (Optional) Whether or not default EBS encryption is enabled. Valid values are `true` or `false`. Defaults to `true`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the default EBS encryption state. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the default EBS encryption state. For example: + +```console +% terraform import aws_ebs_encryption_by_default.example default +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ebs_snapshot.html.markdown b/website/docs/cdktf/typescript/r/ebs_snapshot.html.markdown new file mode 100644 index 00000000000..e6599a37dbd --- /dev/null +++ b/website/docs/cdktf/typescript/r/ebs_snapshot.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_snapshot" +description: |- + Provides an elastic block storage snapshot resource. +--- + + + +# Resource: aws_ebs_snapshot + +Creates a Snapshot of an EBS Volume. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EbsSnapshot } from "./.gen/providers/aws/ebs-snapshot"; +import { EbsVolume } from "./.gen/providers/aws/ebs-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new EbsVolume(this, "example", { + availabilityZone: "us-west-2a", + size: 40, + tags: { + Name: "HelloWorld", + }, + }); + new EbsSnapshot(this, "example_snapshot", { + tags: { + Name: "HelloWorld_snap", + }, + volumeId: example.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `volumeId` - (Required) The Volume ID of which to make a snapshot. +* `description` - (Optional) A description of what the snapshot is. +* `outpostArn` - (Optional) The Amazon Resource Name (ARN) of the Outpost on which to create a local snapshot. +* `storageTier` - (Optional) The name of the storage tier. Valid values are `archive` and `standard`. Default value is `standard`. +* `permanentRestore` - (Optional) Indicates whether to permanently restore an archived snapshot. +* `temporaryRestoreDays` - (Optional) Specifies the number of days for which to temporarily restore an archived snapshot. Required for temporary restores only. The snapshot will be automatically re-archived after this period. +* `tags` - (Optional) A map of tags to assign to the snapshot. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the EBS Snapshot. +* `id` - The snapshot ID (e.g., snap-59fcb34e). +* `ownerId` - The AWS account ID of the EBS snapshot owner. +* `ownerAlias` - Value from an Amazon-maintained list (`amazon`, `awsMarketplace`, `microsoft`) of snapshot owners. +* `encrypted` - Whether the snapshot is encrypted. +* `volumeSize` - The size of the drive in GiBs. +* `kmsKeyId` - The ARN for the KMS encryption key. +* `dataEncryptionKeyId` - The data encryption key identifier for the snapshot. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EBS Snapshot using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EBS Snapshot using the `id`. For example: + +```console +% terraform import aws_ebs_snapshot.id snap-049df61146c4d7901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ebs_snapshot_copy.html.markdown b/website/docs/cdktf/typescript/r/ebs_snapshot_copy.html.markdown new file mode 100644 index 00000000000..044618d365c --- /dev/null +++ b/website/docs/cdktf/typescript/r/ebs_snapshot_copy.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_snapshot_copy" +description: |- + Duplicates an existing Amazon snapshot +--- + + + +# Resource: aws_ebs_snapshot_copy + +Creates a Snapshot of a snapshot. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EbsSnapshot } from "./.gen/providers/aws/ebs-snapshot"; +import { EbsSnapshotCopy } from "./.gen/providers/aws/ebs-snapshot-copy"; +import { EbsVolume } from "./.gen/providers/aws/ebs-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new EbsVolume(this, "example", { + availabilityZone: "us-west-2a", + size: 40, + tags: { + Name: "HelloWorld", + }, + }); + const exampleSnapshot = new EbsSnapshot(this, "example_snapshot", { + tags: { + Name: "HelloWorld_snap", + }, + volumeId: example.id, + }); + new EbsSnapshotCopy(this, "example_copy", { + sourceRegion: "us-west-2", + sourceSnapshotId: exampleSnapshot.id, + tags: { + Name: "HelloWorld_copy_snap", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) A description of what the snapshot is. +* `encrypted` - Whether the snapshot is encrypted. +* `kmsKeyId` - The ARN for the KMS encryption key. +* `sourceSnapshotId` The ARN for the snapshot to be copied. +* `sourceRegion` The region of the source snapshot. +* `storageTier` - (Optional) The name of the storage tier. Valid values are `archive` and `standard`. Default value is `standard`. +* `permanentRestore` - (Optional) Indicates whether to permanently restore an archived snapshot. +* `temporaryRestoreDays` - (Optional) Specifies the number of days for which to temporarily restore an archived snapshot. Required for temporary restores only. The snapshot will be automatically re-archived after this period. +* `tags` - A map of tags for the snapshot. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the EBS Snapshot. +* `id` - The snapshot ID (e.g., snap-59fcb34e). +* `ownerId` - The AWS account ID of the snapshot owner. +* `ownerAlias` - Value from an Amazon-maintained list (`amazon`, `awsMarketplace`, `microsoft`) of snapshot owners. +* `volumeSize` - The size of the drive in GiBs. +* `dataEncryptionKeyId` - The data encryption key identifier for the snapshot. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ebs_snapshot_import.html.markdown b/website/docs/cdktf/typescript/r/ebs_snapshot_import.html.markdown new file mode 100644 index 00000000000..9f3b4d69eb9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ebs_snapshot_import.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_snapshot_import" +description: |- + Provides an elastic block storage snapshot import resource. +--- + + + +# Resource: aws_ebs_snapshot_import + +Imports a disk image from S3 as a Snapshot. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EbsSnapshotImport } from "./.gen/providers/aws/ebs-snapshot-import"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EbsSnapshotImport(this, "example", { + diskContainer: { + format: "VHD", + userBucket: { + s3Bucket: "disk-images", + s3Key: "source.vhd", + }, + }, + roleName: "disk-image-import", + tags: { + Name: "HelloWorld", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clientData` - (Optional) The client-specific data. Detailed below. +* `description` - (Optional) The description string for the import snapshot task. +* `diskContainer` - (Required) Information about the disk container. Detailed below. +* `encrypted` - (Optional) Specifies whether the destination snapshot of the imported image should be encrypted. The default KMS key for EBS is used unless you specify a non-default KMS key using KmsKeyId. +* `kmsKeyId` - (Optional) An identifier for the symmetric KMS key to use when creating the encrypted snapshot. This parameter is only required if you want to use a non-default KMS key; if this parameter is not specified, the default KMS key for EBS is used. If a KmsKeyId is specified, the Encrypted flag must also be set. +* `storageTier` - (Optional) The name of the storage tier. Valid values are `archive` and `standard`. Default value is `standard`. +* `permanentRestore` - (Optional) Indicates whether to permanently restore an archived snapshot. +* `temporaryRestoreDays` - (Optional) Specifies the number of days for which to temporarily restore an archived snapshot. Required for temporary restores only. The snapshot will be automatically re-archived after this period. +* `roleName` - (Optional) The name of the IAM Role the VM Import/Export service will assume. This role needs certain permissions. See https://docs.aws.amazon.com/vm-import/latest/userguide/vmie_prereqs.html#vmimport-role. Default: `vmimport` +* `tags` - (Optional) A map of tags to assign to the snapshot. + +### client_data Configuration Block + +* `comment` - (Optional) A user-defined comment about the disk upload. +* `uploadStart` - (Optional) The time that the disk upload starts. +* `uploadEnd` - (Optional) The time that the disk upload ends. +* `uploadSize` - (Optional) The size of the uploaded disk image, in GiB. + +### disk_container Configuration Block + +* `description` - (Optional) The description of the disk image being imported. +* `format` - (Required) The format of the disk image being imported. One of `vhd` or `vmdk`. +* `url` - (Optional) The URL to the Amazon S3-based disk image being imported. It can either be a https URL (https://..) or an Amazon S3 URL (s3://..). One of `url` or `userBucket` must be set. +* `userBucket` - (Optional) The Amazon S3 bucket for the disk image. One of `url` or `userBucket` must be set. Detailed below. + +### user_bucket Configuration Block + +* `s3Bucket` - The name of the Amazon S3 bucket where the disk image is located. +* `s3Key` - The file name of the disk image. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `60M`) +- `delete` - (Default `10M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the EBS Snapshot. +* `id` - The snapshot ID (e.g., snap-59fcb34e). +* `ownerId` - The AWS account ID of the EBS snapshot owner. +* `ownerAlias` - Value from an Amazon-maintained list (`amazon`, `awsMarketplace`, `microsoft`) of snapshot owners. +* `volumeSize` - The size of the drive in GiBs. +* `dataEncryptionKeyId` - The data encryption key identifier for the snapshot. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ebs_volume.html.markdown b/website/docs/cdktf/typescript/r/ebs_volume.html.markdown new file mode 100644 index 00000000000..331e29d5536 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ebs_volume.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_ebs_volume" +description: |- + Provides an elastic block storage resource. +--- + + + +# Resource: aws_ebs_volume + +Manages a single EBS volume. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EbsVolume } from "./.gen/providers/aws/ebs-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EbsVolume(this, "example", { + availabilityZone: "us-west-2a", + size: 40, + tags: { + Name: "HelloWorld", + }, + }); + } +} + +``` + +~> **NOTE:** At least one of `size` or `snapshotId` is required when specifying an EBS volume + +## Argument Reference + +This resource supports the following arguments: + +* `availabilityZone` - (Required) The AZ where the EBS volume will exist. +* `encrypted` - (Optional) If true, the disk will be encrypted. +* `finalSnapshot` - (Optional) If true, snapshot will be created before volume deletion. Any tags on the volume will be migrated to the snapshot. By default set to false +* `iops` - (Optional) The amount of IOPS to provision for the disk. Only valid for `type` of `io1`, `io2` or `gp3`. +* `multiAttachEnabled` - (Optional) Specifies whether to enable Amazon EBS Multi-Attach. Multi-Attach is supported on `io1` and `io2` volumes. +* `size` - (Optional) The size of the drive in GiBs. +* `snapshotId` (Optional) A snapshot to base the EBS volume off of. +* `outpostArn` - (Optional) The Amazon Resource Name (ARN) of the Outpost. +* `type` - (Optional) The type of EBS volume. Can be `standard`, `gp2`, `gp3`, `io1`, `io2`, `sc1` or `st1` (Default: `gp2`). +* `kmsKeyId` - (Optional) The ARN for the KMS encryption key. When specifying `kmsKeyId`, `encrypted` needs to be set to true. Note: Terraform must be running with credentials which have the `generateDataKeyWithoutPlaintext` permission on the specified KMS key as required by the [EBS KMS CMK volume provisioning process](https://docs.aws.amazon.com/kms/latest/developerguide/services-ebs.html#ebs-cmk) to prevent a volume from being created and almost immediately deleted. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `throughput` - (Optional) The throughput that the volume supports, in MiB/s. Only valid for `type` of `gp3`. + +~> **NOTE:** When changing the `size`, `iops` or `type` of an instance, there are [considerations](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/considerations.html) to be aware of. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The volume ID (e.g., vol-59fcb34e). +* `arn` - The volume ARN (e.g., arn:aws:ec2:us-east-1:0123456789012:volume/vol-59fcb34e). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `update` - (Default `5M`) +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EBS Volumes using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EBS Volumes using the `id`. For example: + +```console +% terraform import aws_ebs_volume.id vol-049df61146c4d7901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_availability_zone_group.html.markdown b/website/docs/cdktf/typescript/r/ec2_availability_zone_group.html.markdown index 98a75ed34ee..6a9172dc9eb 100644 --- a/website/docs/cdktf/typescript/r/ec2_availability_zone_group.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_availability_zone_group.html.markdown @@ -44,18 +44,32 @@ The following arguments are required: * `groupName` - (Required) Name of the Availability Zone Group. * `optInStatus` - (Required) Indicates whether to enable or disable Availability Zone Group. Valid values: `optedIn` or `notOptedIn`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - Name of the Availability Zone Group. ## Import -EC2 Availability Zone Groups can be imported using the group name, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EC2 Availability Zone Groups using the group name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_availability_zone_group.example us-west-2-lax-1 + +Using `terraform import`, import EC2 Availability Zone Groups using the group name. For example: + +```console +% terraform import aws_ec2_availability_zone_group.example us-west-2-lax-1 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_capacity_reservation.html.markdown b/website/docs/cdktf/typescript/r/ec2_capacity_reservation.html.markdown index ce65fd6ccb6..462762c0017 100644 --- a/website/docs/cdktf/typescript/r/ec2_capacity_reservation.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_capacity_reservation.html.markdown @@ -39,7 +39,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `availabilityZone` - (Required) The Availability Zone in which to create the Capacity Reservation. * `ebsOptimized` - (Optional) Indicates whether the Capacity Reservation supports EBS-optimized instances. @@ -55,9 +55,9 @@ The following arguments are supported: * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `tenancy` - (Optional) Indicates the tenancy of the Capacity Reservation. Specify either `default` or `dedicated`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The Capacity Reservation ID. * `ownerId` - The ID of the AWS account that owns the Capacity Reservation. @@ -66,10 +66,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Capacity Reservations can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Capacity Reservations using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_capacity_reservation.web cr-0123456789abcdef0 + +Using `terraform import`, import Capacity Reservations using the `id`. For example: + +```console +% terraform import aws_ec2_capacity_reservation.web cr-0123456789abcdef0 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_carrier_gateway.html.markdown b/website/docs/cdktf/typescript/r/ec2_carrier_gateway.html.markdown index 23a41c37e73..aa6b58ed78b 100644 --- a/website/docs/cdktf/typescript/r/ec2_carrier_gateway.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_carrier_gateway.html.markdown @@ -39,14 +39,14 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `vpcId` - (Required) The ID of the VPC to associate with the carrier gateway. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the carrier gateway. * `id` - The ID of the carrier gateway. @@ -55,11 +55,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2CarrierGateway` can be imported using the carrier gateway's ID, -e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2CarrierGateway` using the carrier gateway's ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_carrier_gateway.example cgw-12345 + +Using `terraform import`, import `awsEc2CarrierGateway` using the carrier gateway's ID. For example: + +```console +% terraform import aws_ec2_carrier_gateway.example cgw-12345 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_client_vpn_authorization_rule.html.markdown b/website/docs/cdktf/typescript/r/ec2_client_vpn_authorization_rule.html.markdown index 4e332caa56c..825f1b71ee9 100644 --- a/website/docs/cdktf/typescript/r/ec2_client_vpn_authorization_rule.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_client_vpn_authorization_rule.html.markdown @@ -39,7 +39,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `clientVpnEndpointId` - (Required) The ID of the Client VPN endpoint. * `targetNetworkCidr` - (Required) The IPv4 address range, in CIDR notation, of the network to which the authorization rule applies. @@ -47,9 +47,9 @@ The following arguments are supported: * `authorizeAllGroups` - (Optional) Indicates whether the authorization rule grants access to all clients. One of `accessGroupId` or `authorizeAllGroups` must be set. * `description` - (Optional) A brief description of the authorization rule. -## Attributes Reference +## Attribute Reference -No additional attributes are exported. +This resource exports no additional attributes. ## Timeouts @@ -60,14 +60,48 @@ No additional attributes are exported. ## Import -AWS Client VPN authorization rules can be imported using the endpoint ID and target network CIDR. If there is a specific group name that is included as well. All values are separated by a `,`. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Client VPN authorization rules using the endpoint ID and target network CIDR. If there is a specific group name, include that also. All values are separated by a `,`. For example: + +Using the endpoint ID and target network CIDR: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_client_vpn_authorization_rule.example cvpn-endpoint-0ac3a1abbccddd666,10.1.0.0/24 + +Using the endpoint ID, target network CIDR, and group name: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + ``` +**Using `terraform import` to import** AWS Client VPN authorization rules using the endpoint ID and target network CIDR. If there is a specific group name, include that also. All values are separated by a `,`. For example: + +Using the endpoint ID and target network CIDR: + +```console +% terraform import aws_ec2_client_vpn_authorization_rule.example cvpn-endpoint-0ac3a1abbccddd666,10.1.0.0/24 ``` -$ terraform import aws_ec2_client_vpn_authorization_rule.example cvpn-endpoint-0ac3a1abbccddd666,10.1.0.0/24,team-a + +Using the endpoint ID, target network CIDR, and group name: + +```console +% terraform import aws_ec2_client_vpn_authorization_rule.example cvpn-endpoint-0ac3a1abbccddd666,10.1.0.0/24,team-a ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_client_vpn_endpoint.html.markdown b/website/docs/cdktf/typescript/r/ec2_client_vpn_endpoint.html.markdown index 49ceee1cde1..2f6cda4760d 100644 --- a/website/docs/cdktf/typescript/r/ec2_client_vpn_endpoint.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_client_vpn_endpoint.html.markdown @@ -50,7 +50,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `authenticationOptions` - (Required) Information about the authentication method to be used to authenticate clients. * `clientCidrBlock` - (Required) The IPv4 address range, in CIDR notation, from which to assign client IP addresses. The address range cannot overlap with the local CIDR of the VPC in which the associated subnet is located, or the routes that you add manually. The address range cannot be changed after the Client VPN endpoint has been created. The CIDR block should be /22 or greater. @@ -97,9 +97,9 @@ One of the following arguments must be supplied: * `cloudwatchLogStream` - (Optional) The name of the CloudWatch Logs log stream to which the connection data is published. * `enabled` - (Required) Indicates whether connection logging is enabled. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the Client VPN endpoint. * `dnsName` - The DNS name to be used by clients when establishing their VPN session. @@ -108,10 +108,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -AWS Client VPN endpoints can be imported using the `id` value found via `aws ec2 describe-client-vpn-endpoints`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Client VPN endpoints using the `id` value found via `aws ec2 describe-client-vpn-endpoints`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_client_vpn_endpoint.example cvpn-endpoint-0ac3a1abbccddd666 + +Using `terraform import`, import AWS Client VPN endpoints using the `id` value found via `aws ec2 describe-client-vpn-endpoints`. For example: + +```console +% terraform import aws_ec2_client_vpn_endpoint.example cvpn-endpoint-0ac3a1abbccddd666 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_client_vpn_network_association.html.markdown b/website/docs/cdktf/typescript/r/ec2_client_vpn_network_association.html.markdown index fbf0cefea91..f28089cd04a 100644 --- a/website/docs/cdktf/typescript/r/ec2_client_vpn_network_association.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_client_vpn_network_association.html.markdown @@ -38,14 +38,14 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `clientVpnEndpointId` - (Required) The ID of the Client VPN endpoint. * `subnetId` - (Required) The ID of the subnet to associate with the Client VPN endpoint. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The unique ID of the target network association. * `associationId` - The unique ID of the target network association. @@ -60,10 +60,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -AWS Client VPN network associations can be imported using the endpoint ID and the association ID. Values are separated by a `,`. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Client VPN network associations using the endpoint ID and the association ID. Values are separated by a `,`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_client_vpn_network_association.example cvpn-endpoint-0ac3a1abbccddd666,vpn-assoc-0b8db902465d069ad + +Using `terraform import`, import AWS Client VPN network associations using the endpoint ID and the association ID. Values are separated by a `,`. For example: + +```console +% terraform import aws_ec2_client_vpn_network_association.example cvpn-endpoint-0ac3a1abbccddd666,vpn-assoc-0b8db902465d069ad ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_client_vpn_route.html.markdown b/website/docs/cdktf/typescript/r/ec2_client_vpn_route.html.markdown index 61e4ab40edc..c45a66f03ea 100644 --- a/website/docs/cdktf/typescript/r/ec2_client_vpn_route.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_client_vpn_route.html.markdown @@ -70,16 +70,16 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `clientVpnEndpointId` - (Required) The ID of the Client VPN endpoint. * `destinationCidrBlock` - (Required) The IPv4 address range, in CIDR notation, of the route destination. * `description` - (Optional) A brief description of the route. * `targetVpcSubnetId` - (Required) The ID of the Subnet to route the traffic through. It must already be attached to the Client VPN. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the Client VPN endpoint. * `origin` - Indicates how the Client VPN route was added. Will be `addRoute` for routes created by this resource. @@ -94,10 +94,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -AWS Client VPN routes can be imported using the endpoint ID, target subnet ID, and destination CIDR block. All values are separated by a `,`. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Client VPN routes using the endpoint ID, target subnet ID, and destination CIDR block. All values are separated by a `,`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_client_vpn_route.example cvpn-endpoint-1234567890abcdef,subnet-9876543210fedcba,10.1.0.0/24 + +Using `terraform import`, import AWS Client VPN routes using the endpoint ID, target subnet ID, and destination CIDR block. All values are separated by a `,`. For example: + +```console +% terraform import aws_ec2_client_vpn_route.example cvpn-endpoint-1234567890abcdef,subnet-9876543210fedcba,10.1.0.0/24 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_fleet.html.markdown b/website/docs/cdktf/typescript/r/ec2_fleet.html.markdown index 5a95b11adf3..6085e10bee8 100644 --- a/website/docs/cdktf/typescript/r/ec2_fleet.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_fleet.html.markdown @@ -47,7 +47,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `context` - (Optional) Reserved. * `excessCapacityTerminationPolicy` - (Optional) Whether running instances should be terminated if the total target capacity of the EC2 Fleet is decreased below the current size of the EC2. Valid values: `noTermination`, `termination`. Defaults to `termination`. Supported only for fleets of type `maintain`. @@ -233,9 +233,9 @@ This configuration block supports the following: * `totalTargetCapacity` - (Required) The number of units to request, filled using `defaultTargetCapacityType`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - Fleet identifier * `arn` - The ARN of the fleet @@ -259,10 +259,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2Fleet` can be imported by using the Fleet identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2Fleet` using the Fleet identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_fleet.example fleet-b9b55d27-c5fc-41ac-a6f3-48fcc91f080c + +Using `terraform import`, import `awsEc2Fleet` using the Fleet identifier. For example: + +```console +% terraform import aws_ec2_fleet.example fleet-b9b55d27-c5fc-41ac-a6f3-48fcc91f080c ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_host.html.markdown b/website/docs/cdktf/typescript/r/ec2_host.html.markdown index 2038d250e73..34048007468 100644 --- a/website/docs/cdktf/typescript/r/ec2_host.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_host.html.markdown @@ -39,8 +39,9 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: +* `assetId` - (Optional) The ID of the Outpost hardware asset on which to allocate the Dedicated Hosts. This parameter is supported only if you specify OutpostArn. If you are allocating the Dedicated Hosts in a Region, omit this parameter. * `autoPlacement` - (Optional) Indicates whether the host accepts any untargeted instance launches that match its instance type configuration, or if it only accepts Host tenancy instance launches that specify its unique host ID. Valid values: `on`, `off`. Default: `on`. * `availabilityZone` - (Required) The Availability Zone in which to allocate the Dedicated Host. * `hostRecovery` - (Optional) Indicates whether to enable or disable host recovery for the Dedicated Host. Valid values: `on`, `off`. Default: `off`. @@ -49,9 +50,9 @@ The following arguments are supported: * `outpostArn` - (Optional) The Amazon Resource Name (ARN) of the AWS Outpost on which to allocate the Dedicated Host. * `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the allocated Dedicated Host. This is used to launch an instance onto a specific host. * `arn` - The ARN of the Dedicated Host. @@ -60,10 +61,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Hosts can be imported using the host `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import hosts using the host `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_host.example h-0385a99d0e4b20cbb + +Using `terraform import`, import hosts using the host `id`. For example: + +```console +% terraform import aws_ec2_host.example h-0385a99d0e4b20cbb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_instance_connect_endpoint.html.markdown b/website/docs/cdktf/typescript/r/ec2_instance_connect_endpoint.html.markdown new file mode 100644 index 00000000000..6dd68cacbba --- /dev/null +++ b/website/docs/cdktf/typescript/r/ec2_instance_connect_endpoint.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_ec2_instance_connect_endpoint" +description: |- + Provides an EC2 Instance Connect Endpoint resource. +--- + + + +# Resource: aws_ec2_instance_connect_endpoint + +Manages an EC2 Instance Connect Endpoint. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Ec2InstanceConnectEndpoint } from "./.gen/providers/aws/ec2-instance-connect-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Ec2InstanceConnectEndpoint(this, "example", { + subnetId: Token.asString(awsSubnetExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `preserveClientIp` - (Optional) Indicates whether your client's IP address is preserved as the source. Default: `true`. +* `securityGroupIds` - (Optional) One or more security groups to associate with the endpoint. If you don't specify a security group, the default security group for the VPC will be associated with the endpoint. +* `subnetId` - (Required) The ID of the subnet in which to create the EC2 Instance Connect Endpoint. +* `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the EC2 Instance Connect Endpoint. +* `availabilityZone` - The Availability Zone of the EC2 Instance Connect Endpoint. +* `dnsName` - The DNS name of the EC2 Instance Connect Endpoint. +* `fipsDnsName` - The DNS name of the EC2 Instance Connect FIPS Endpoint. +* `networkInterfaceIds` - The IDs of the ENIs that Amazon EC2 automatically created when creating the EC2 Instance Connect Endpoint. +* `ownerId` - The ID of the AWS account that created the EC2 Instance Connect Endpoint. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - The ID of the VPC in which the EC2 Instance Connect Endpoint was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EC2 Instance Connect Endpoints using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EC2 Instance Connect Endpoints using the `id`. For example: + +```console +% terraform import aws_ec2_instance_connect_endpoint.example eice-012345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_instance_state.html.markdown b/website/docs/cdktf/typescript/r/ec2_instance_state.html.markdown index b5abaf9ed7a..1c99129453b 100644 --- a/website/docs/cdktf/typescript/r/ec2_instance_state.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_instance_state.html.markdown @@ -73,9 +73,9 @@ The following arguments are optional: * `force` - (Optional) Whether to request a forced stop when `state` is `stopped`. Otherwise (_i.e._, `state` is `running`), ignored. When an instance is forced to stop, it does not flush file system caches or file system metadata, and you must subsequently perform file system check and repair. Not recommended for Windows instances. Defaults to `false`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - ID of the instance (matches `instanceId`). @@ -89,10 +89,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2InstanceState` can be imported by using the `instanceId` attribute, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2InstanceState` using the `instanceId` attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_instance_state.test i-02cae6557dfcf2f96 + +Using `terraform import`, import `awsEc2InstanceState` using the `instanceId` attribute. For example: + +```console +% terraform import aws_ec2_instance_state.test i-02cae6557dfcf2f96 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_local_gateway_route.html.markdown b/website/docs/cdktf/typescript/r/ec2_local_gateway_route.html.markdown index 13889e9027b..14df7402393 100644 --- a/website/docs/cdktf/typescript/r/ec2_local_gateway_route.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_local_gateway_route.html.markdown @@ -48,18 +48,32 @@ The following arguments are required: * `localGatewayRouteTableId` - (Required) Identifier of EC2 Local Gateway Route Table. * `localGatewayVirtualInterfaceGroupId` - (Required) Identifier of EC2 Local Gateway Virtual Interface Group. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Local Gateway Route Table identifier and destination CIDR block separated by underscores (`_`) ## Import -`awsEc2LocalGatewayRoute` can be imported by using the EC2 Local Gateway Route Table identifier and destination CIDR block separated by underscores (`_`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2LocalGatewayRoute` using the EC2 Local Gateway Route Table identifier and destination CIDR block separated by underscores (`_`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_local_gateway_route.example lgw-rtb-12345678_172.16.0.0/16 + +Using `terraform import`, import `awsEc2LocalGatewayRoute` using the EC2 Local Gateway Route Table identifier and destination CIDR block separated by underscores (`_`). For example: + +```console +% terraform import aws_ec2_local_gateway_route.example lgw-rtb-12345678_172.16.0.0/16 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_local_gateway_route_table_vpc_association.html.markdown b/website/docs/cdktf/typescript/r/ec2_local_gateway_route_table_vpc_association.html.markdown index b828a524155..a00012b8630 100644 --- a/website/docs/cdktf/typescript/r/ec2_local_gateway_route_table_vpc_association.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_local_gateway_route_table_vpc_association.html.markdown @@ -65,19 +65,33 @@ The following arguments are optional: * `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - Identifier of EC2 Local Gateway Route Table VPC Association. * `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). ## Import -`awsEc2LocalGatewayRouteTableVpcAssociation` can be imported by using the Local Gateway Route Table VPC Association identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2LocalGatewayRouteTableVpcAssociation` using the Local Gateway Route Table VPC Association identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_local_gateway_route_table_vpc_association.example lgw-vpc-assoc-1234567890abcdef + +Using `terraform import`, import `awsEc2LocalGatewayRouteTableVpcAssociation` using the Local Gateway Route Table VPC Association identifier. For example: + +```console +% terraform import aws_ec2_local_gateway_route_table_vpc_association.example lgw-vpc-assoc-1234567890abcdef ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_managed_prefix_list.html.markdown b/website/docs/cdktf/typescript/r/ec2_managed_prefix_list.html.markdown index 3962e759304..f1b27250f88 100644 --- a/website/docs/cdktf/typescript/r/ec2_managed_prefix_list.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_managed_prefix_list.html.markdown @@ -65,7 +65,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `addressFamily` - (Required, Forces new resource) Address family (`iPv4` or `iPv6`) of this prefix list. * `entry` - (Optional) Configuration block for prefix list entry. Detailed below. Different entries may have overlapping CIDR blocks, but a particular CIDR should not be duplicated. @@ -78,9 +78,9 @@ The following arguments are supported: * `cidr` - (Required) CIDR block of this entry. * `description` - (Optional) Description of this entry. Due to API limitations, updating only the description of an existing entry requires temporarily removing and re-adding the entry. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the prefix list. * `id` - ID of the prefix list. @@ -90,10 +90,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Prefix Lists can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Prefix Lists using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_managed_prefix_list.default pl-0570a1d2d725c16be + +Using `terraform import`, import Prefix Lists using the `id`. For example: + +```console +% terraform import aws_ec2_managed_prefix_list.default pl-0570a1d2d725c16be ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_managed_prefix_list_entry.html.markdown b/website/docs/cdktf/typescript/r/ec2_managed_prefix_list_entry.html.markdown index cdd9b408b5f..1ec5aec3ebb 100644 --- a/website/docs/cdktf/typescript/r/ec2_managed_prefix_list_entry.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_managed_prefix_list_entry.html.markdown @@ -3,30 +3,22 @@ subcategory: "VPC (Virtual Private Cloud)" layout: "aws" page_title: "AWS: aws_ec2_managed_prefix_list_entry" description: |- - Provides a managed prefix list entry resource. + Use the `awsEc2ManagedPrefixListEntry` resource to manage a managed prefix list entry. --- # Resource: aws_ec2_managed_prefix_list_entry -Provides a managed prefix list entry resource. +Use the `awsPrefixListEntry` resource to manage a managed prefix list entry. -~> **NOTE on Managed Prefix Lists and Managed Prefix List Entries:** Terraform -currently provides both a standalone Managed Prefix List Entry resource (a single entry), -and a [Managed Prefix List resource](ec2_managed_prefix_list.html) with entries defined -in-line. At this time you cannot use a Managed Prefix List with in-line rules in -conjunction with any Managed Prefix List Entry resources. Doing so will cause a conflict -of entries and will overwrite entries. +~> **NOTE:** Terraform currently provides two resources for managing Managed Prefix Lists and Managed Prefix List Entries. The standalone resource, [Managed Prefix List Entry](ec2_managed_prefix_list_entry.html), is used to manage a single entry. The [Managed Prefix List resource](ec2_managed_prefix_list.html) is used to manage multiple entries defined in-line. It is important to note that you cannot use a Managed Prefix List with in-line rules in conjunction with any Managed Prefix List Entry resources. This will result in a conflict of entries and will cause the entries to be overwritten. -~> **NOTE on Managed Prefix Lists with many entries:** To improved execution times on larger -updates, if you plan to create a prefix list with more than 100 entries, it is **recommended** -that you use the inline `entry` block as part of the [Managed Prefix List resource](ec2_managed_prefix_list.html) -resource instead. +~> **NOTE:** To improve execution times on larger updates, it is recommended to use the inline `entry` block as part of the Managed Prefix List resource when creating a prefix list with more than 100 entries. You can find more information about the resource [here](ec2_managed_prefix_list.html). ## Example Usage -Basic usage +Basic usage. ```typescript // Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug @@ -61,24 +53,38 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidr` - (Required) CIDR block of this entry. -* `description` - (Optional) Description of this entry. Due to API limitations, updating only the description of an entry requires recreating the entry. +* `description` - (Optional) Description of this entry. Please note that due to API limitations, updating only the description of an entry will require recreating the entry. * `prefixListId` - (Required) CIDR block of this entry. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - ID of the managed prefix list entry. ## Import -Prefix List Entries can be imported using the `prefixListId` and `cidr` separated by a `,`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import prefix list entries using `prefixListId` and `cidr` separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_managed_prefix_list_entry.default pl-0570a1d2d725c16be,10.0.3.0/24 + +Using `terraform import`, import prefix list entries using `prefixListId` and `cidr` separated by a comma (`,`). For example: + +```console +% terraform import aws_ec2_managed_prefix_list_entry.default pl-0570a1d2d725c16be,10.0.3.0/24 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_network_insights_analysis.html.markdown b/website/docs/cdktf/typescript/r/ec2_network_insights_analysis.html.markdown index 95d59a83a79..bb73a2a5872 100644 --- a/website/docs/cdktf/typescript/r/ec2_network_insights_analysis.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_network_insights_analysis.html.markdown @@ -52,9 +52,9 @@ The following arguments are optional: * `waitForCompletion` - (Optional) If enabled, the resource will wait for the Network Insights Analysis status to change to `succeeded` or `failed`. Setting this to `false` will skip the process. Default: `true`. * `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `alternatePathHints` - Potential intermediate components of a feasible path. Described below. * `arn` - ARN of the Network Insights Analysis. @@ -76,10 +76,24 @@ The `alternatePathHints` object supports the following: ## Import -Network Insights Analyses can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Insights Analyses using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_network_insights_analysis.test nia-0462085c957f11a55 + +Using `terraform import`, import Network Insights Analyses using the `id`. For example: + +```console +% terraform import aws_ec2_network_insights_analysis.test nia-0462085c957f11a55 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_network_insights_path.html.markdown b/website/docs/cdktf/typescript/r/ec2_network_insights_path.html.markdown index d609e87ad81..d0801355788 100644 --- a/website/docs/cdktf/typescript/r/ec2_network_insights_path.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_network_insights_path.html.markdown @@ -51,9 +51,9 @@ The following arguments are optional: * `destinationPort` - (Optional) Destination port to analyze access to. * `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the Network Insights Path. * `id` - ID of the Network Insights Path. @@ -61,10 +61,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Network Insights Paths can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Insights Paths using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_network_insights_path.test nip-00edfba169923aefd + +Using `terraform import`, import Network Insights Paths using the `id`. For example: + +```console +% terraform import aws_ec2_network_insights_path.test nip-00edfba169923aefd ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_serial_console_access.html.markdown b/website/docs/cdktf/typescript/r/ec2_serial_console_access.html.markdown index 1edb64f20bc..595cbd9211a 100644 --- a/website/docs/cdktf/typescript/r/ec2_serial_console_access.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_serial_console_access.html.markdown @@ -38,20 +38,34 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `enabled` - (Optional) Whether or not serial console access is enabled. Valid values are `true` or `false`. Defaults to `true`. -## Attributes Reference +## Attribute Reference -No additional attributes are exported. +This resource exports no additional attributes. ## Import -Serial console access state can be imported, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import serial console access state. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_serial_console_access.example default + +Using `terraform import`, import serial console access state. For example: + +```console +% terraform import aws_ec2_serial_console_access.example default ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_subnet_cidr_reservation.html.markdown b/website/docs/cdktf/typescript/r/ec2_subnet_cidr_reservation.html.markdown index 38123cbd029..8b797547d11 100644 --- a/website/docs/cdktf/typescript/r/ec2_subnet_cidr_reservation.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_subnet_cidr_reservation.html.markdown @@ -38,26 +38,40 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidrBlock` - (Required) The CIDR block for the reservation. * `reservationType` - (Required) The type of reservation to create. Valid values: `explicit`, `prefix` * `subnetId` - (Required) The ID of the subnet to create the reservation for. * `description` - (Optional) A brief description of the reservation. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - ID of the CIDR reservation. * `ownerId` - ID of the AWS account that owns this CIDR reservation. ## Import -Existing CIDR reservations can be imported using `subnetId:reservationId`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Existing CIDR reservations using `subnetId:reservationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_subnet_cidr_reservation.example subnet-01llsxvsxabqiymcz:scr-4mnvz6wb7otksjcs9 + +Using `terraform import`, import Existing CIDR reservations using `subnetId:reservationId`. For example: + +```console +% terraform import aws_ec2_subnet_cidr_reservation.example subnet-01llsxvsxabqiymcz:scr-4mnvz6wb7otksjcs9 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_tag.html.markdown b/website/docs/cdktf/typescript/r/ec2_tag.html.markdown index 0f21b268a2b..d0db2f09216 100644 --- a/website/docs/cdktf/typescript/r/ec2_tag.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_tag.html.markdown @@ -68,24 +68,38 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `resourceId` - (Required) The ID of the EC2 resource to manage the tag for. * `key` - (Required) The tag name. * `value` - (Required) The value of the tag. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 resource identifier and key, separated by a comma (`,`) ## Import -`awsEc2Tag` can be imported by using the EC2 resource identifier and key, separated by a comma (`,`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2Tag` using the EC2 resource identifier and key, separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_tag.example tgw-attach-1234567890abcdef,Name + +Using `terraform import`, import `awsEc2Tag` using the EC2 resource identifier and key, separated by a comma (`,`). For example: + +```console +% terraform import aws_ec2_tag.example tgw-attach-1234567890abcdef,Name ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_traffic_mirror_filter.html.markdown b/website/docs/cdktf/typescript/r/ec2_traffic_mirror_filter.html.markdown index 0306d02bdd6..823b7df6a1f 100644 --- a/website/docs/cdktf/typescript/r/ec2_traffic_mirror_filter.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_traffic_mirror_filter.html.markdown @@ -40,15 +40,15 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional, Forces new resource) A description of the filter. * `networkServices` - (Optional) List of amazon network services that should be mirrored. Valid values: `amazonDns`. * `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the traffic mirror filter. * `id` - The name of the filter. @@ -56,10 +56,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Traffic mirror filter can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import traffic mirror filter using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_traffic_mirror_filter.foo tmf-0fbb93ddf38198f64 + +Using `terraform import`, import traffic mirror filter using the `id`. For example: + +```console +% terraform import aws_ec2_traffic_mirror_filter.foo tmf-0fbb93ddf38198f64 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_traffic_mirror_filter_rule.html.markdown b/website/docs/cdktf/typescript/r/ec2_traffic_mirror_filter_rule.html.markdown index b7609d24b4c..8bffd3a8301 100644 --- a/website/docs/cdktf/typescript/r/ec2_traffic_mirror_filter_rule.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_traffic_mirror_filter_rule.html.markdown @@ -68,7 +68,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional) Description of the traffic mirror filter rule. * `trafficMirrorFilterId` - (Required) ID of the traffic mirror filter to which this rule should be added @@ -86,19 +86,33 @@ Traffic mirror port range support following attributes: * `fromPort` - (Optional) Starting port of the range * `toPort` - (Optional) Ending port of the range -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the traffic mirror filter rule. * `id` - Name of the traffic mirror filter rule. ## Import -Traffic mirror rules can be imported using the `trafficMirrorFilterId` and `id` separated by `:` e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import traffic mirror rules using the `trafficMirrorFilterId` and `id` separated by `:`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_traffic_mirror_filter_rule.rule tmf-0fbb93ddf38198f64:tmfr-05a458f06445d0aee + +Using `terraform import`, import traffic mirror rules using the `trafficMirrorFilterId` and `id` separated by `:`. For example: + +```console +% terraform import aws_ec2_traffic_mirror_filter_rule.rule tmf-0fbb93ddf38198f64:tmfr-05a458f06445d0aee ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_traffic_mirror_session.html.markdown b/website/docs/cdktf/typescript/r/ec2_traffic_mirror_session.html.markdown index 6ee861c3aef..aa641d018b2 100644 --- a/website/docs/cdktf/typescript/r/ec2_traffic_mirror_session.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_traffic_mirror_session.html.markdown @@ -52,7 +52,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional) A description of the traffic mirror session. * `networkInterfaceId` - (Required, Forces new) ID of the source network interface. Not all network interfaces are eligible as mirror sources. On EC2 instances only nitro based instances support mirroring. @@ -63,9 +63,9 @@ The following arguments are supported: * `virtualNetworkId` - (Optional) - The VXLAN ID for the Traffic Mirror session. For more information about the VXLAN protocol, see RFC 7348. If you do not specify a VirtualNetworkId, an account-wide unique id is chosen at random. * `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the traffic mirror session. * `id` - The name of the session. @@ -74,10 +74,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Traffic mirror sessions can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import traffic mirror sessions using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_traffic_mirror_session.session tms-0d8aa3ca35897b82e + +Using `terraform import`, import traffic mirror sessions using the `id`. For example: + +```console +% terraform import aws_ec2_traffic_mirror_session.session tms-0d8aa3ca35897b82e ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_traffic_mirror_target.html.markdown b/website/docs/cdktf/typescript/r/ec2_traffic_mirror_target.html.markdown index f329da704aa..a5fc2e142dc 100644 --- a/website/docs/cdktf/typescript/r/ec2_traffic_mirror_target.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_traffic_mirror_target.html.markdown @@ -48,7 +48,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional, Forces new) A description of the traffic mirror session. * `networkInterfaceId` - (Optional, Forces new) The network interface ID that is associated with the target. @@ -58,9 +58,9 @@ The following arguments are supported: **NOTE:** Either `networkInterfaceId` or `networkLoadBalancerArn` should be specified and both should not be specified together -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the Traffic Mirror target. * `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). @@ -69,10 +69,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Traffic mirror targets can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import traffic mirror targets using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_traffic_mirror_target.target tmt-0c13a005422b86606 + +Using `terraform import`, import traffic mirror targets using the `id`. For example: + +```console +% terraform import aws_ec2_traffic_mirror_target.target tmt-0c13a005422b86606 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway.html.markdown index 68302dc1cbb..1db9d6982dd 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway.html.markdown @@ -36,7 +36,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `amazonSideAsn` - (Optional) Private Autonomous System Number (ASN) for the Amazon side of a BGP session. The range is `64512` to `65534` for 16-bit ASNs and `4200000000` to `4294967294` for 32-bit ASNs. Default value: `64512`. @@ -52,9 +52,9 @@ The following arguments are supported: * `transitGatewayCidrBlocks` - (Optional) One or more IPv4 or IPv6 CIDR blocks for the transit gateway. Must be a size /24 CIDR block or larger for IPv4, or a size /64 CIDR block or larger for IPv6. * `vpnEcmpSupport` - (Optional) Whether VPN Equal Cost Multipath Protocol support is enabled. Valid values: `disable`, `enable`. Default value: `enable`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - EC2 Transit Gateway Amazon Resource Name (ARN) * `associationDefaultRouteTableId` - Identifier of the default association route table @@ -73,10 +73,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGateway` can be imported by using the EC2 Transit Gateway identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGateway` using the EC2 Transit Gateway identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway.example tgw-12345678 + +Using `terraform import`, import `awsEc2TransitGateway` using the EC2 Transit Gateway identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway.example tgw-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_connect.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_connect.html.markdown index 3cc51991b62..6321b39916d 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_connect.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_connect.html.markdown @@ -43,18 +43,18 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: -* `protocol` - (Optional) The tunnel protocol. Valida values: `gre`. Default is `gre`. +* `protocol` - (Optional) The tunnel protocol. Valid values: `gre`. Default is `gre`. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Connect. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `transitGatewayDefaultRouteTableAssociation` - (Optional) Boolean whether the Connect should be associated with the EC2 Transit Gateway association default route table. This cannot be configured or perform drift detection with Resource Access Manager shared EC2 Transit Gateways. Default value: `true`. * `transitGatewayDefaultRouteTablePropagation` - (Optional) Boolean whether the Connect should propagate routes with the EC2 Transit Gateway propagation default route table. This cannot be configured or perform drift detection with Resource Access Manager shared EC2 Transit Gateways. Default value: `true`. * `transitGatewayId` - (Required) Identifier of EC2 Transit Gateway. * `transportAttachmentId` - (Required) The underlaying VPC attachment -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). @@ -69,10 +69,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGatewayConnect` can be imported by using the EC2 Transit Gateway Connect identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayConnect` using the EC2 Transit Gateway Connect identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway_connect.example tgw-attach-12345678 + +Using `terraform import`, import `awsEc2TransitGatewayConnect` using the EC2 Transit Gateway Connect identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_connect.example tgw-attach-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_connect_peer.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_connect_peer.html.markdown index d25dea8798f..d097607060a 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_connect_peer.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_connect_peer.html.markdown @@ -48,7 +48,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `bgpAsn` - (Optional) The BGP ASN number assigned customer device. If not provided, it will use the same BGP ASN as is associated with Transit Gateway. * `insideCidrBlocks` - (Required) The CIDR block that will be used for addressing within the tunnel. It must contain exactly one IPv4 CIDR block and up to one IPv6 CIDR block. The IPv4 CIDR block must be /29 size and must be within 169.254.0.0/16 range, with exception of: 169.254.0.0/29, 169.254.1.0/29, 169.254.2.0/29, 169.254.3.0/29, 169.254.4.0/29, 169.254.5.0/29, 169.254.169.248/29. The IPv6 CIDR block must be /125 size and must be within fd00::/8. The first IP from each CIDR block is assigned for customer gateway, the second and third is for Transit Gateway (An example: from range 169.254.100.0/29, .1 is assigned to customer gateway and .2 and .3 are assigned to Transit Gateway) @@ -57,9 +57,9 @@ The following arguments are supported: * `transitGatewayAddress` - (Optional) The IP address assigned to Transit Gateway, which will be used as tunnel endpoint. This address must be from associated Transit Gateway CIDR block. The address must be from the same address family as `peerAddress`. If not set explicitly, it will be selected from associated Transit Gateway CIDR blocks * `transitGatewayAttachmentId` - (Required) The Transit Gateway Connect -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Connect Peer identifier * `arn` - EC2 Transit Gateway Connect Peer ARN @@ -76,10 +76,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGatewayConnectPeer` can be imported by using the EC2 Transit Gateway Connect Peer identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayConnectPeer` using the EC2 Transit Gateway Connect Peer identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway_connect_peer.example tgw-connect-peer-12345678 + +Using `terraform import`, import `awsEc2TransitGatewayConnectPeer` using the EC2 Transit Gateway Connect Peer identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_connect_peer.example tgw-connect-peer-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_domain.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_domain.html.markdown index f30859baa6b..89c86d5e1a5 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_domain.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_domain.html.markdown @@ -165,7 +165,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transitGatewayId` - (Required) EC2 Transit Gateway identifier. The EC2 Transit Gateway must have `multicastSupport` enabled. * `autoAcceptSharedAssociations` - (Optional) Whether to automatically accept cross-account subnet associations that are associated with the EC2 Transit Gateway Multicast Domain. Valid values: `disable`, `enable`. Default value: `disable`. @@ -173,9 +173,9 @@ The following arguments are supported: * `staticSourcesSupport` - (Optional) Whether to enable support for statically configuring multicast group sources for the EC2 Transit Gateway Multicast Domain. Valid values: `disable`, `enable`. Default value: `disable`. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Multicast Domain. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Multicast Domain identifier. * `arn` - EC2 Transit Gateway Multicast Domain Amazon Resource Name (ARN). @@ -191,10 +191,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGatewayMulticastDomain` can be imported by using the EC2 Transit Gateway Multicast Domain identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayMulticastDomain` using the EC2 Transit Gateway Multicast Domain identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -terraform import aws_ec2_transit_gateway_multicast_domain.example tgw-mcast-domain-12345 + +Using `terraform import`, import `awsEc2TransitGatewayMulticastDomain` using the EC2 Transit Gateway Multicast Domain identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_multicast_domain.example tgw-mcast-domain-12345 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_domain_association.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_domain_association.html.markdown index 7bc7c0f3cea..ce76b7805bd 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_domain_association.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_domain_association.html.markdown @@ -67,15 +67,15 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `subnetId` - (Required) The ID of the subnet to associate with the transit gateway multicast domain. * `transitGatewayAttachmentId` - (Required) The ID of the transit gateway attachment. * `transitGatewayMulticastDomainId` - (Required) The ID of the transit gateway multicast domain. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Multicast Domain Association identifier. @@ -86,4 +86,4 @@ In addition to all arguments above, the following attributes are exported: - `create` - (Default `10M`) - `delete` - (Default `10M`) - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_group_member.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_group_member.html.markdown index c158055f0f7..9940fb9862f 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_group_member.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_group_member.html.markdown @@ -41,16 +41,16 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `groupIpAddress` - (Required) The IP address assigned to the transit gateway multicast group. * `networkInterfaceId` - (Required) The group members' network interface ID to register with the transit gateway multicast group. * `transitGatewayMulticastDomainId` - (Required) The ID of the transit gateway multicast domain. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Multicast Group Member identifier. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_group_source.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_group_source.html.markdown index 1f3b479b19b..2bf0e113ae9 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_group_source.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_multicast_group_source.html.markdown @@ -41,16 +41,16 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `groupIpAddress` - (Required) The IP address assigned to the transit gateway multicast group. * `networkInterfaceId` - (Required) The group members' network interface ID to register with the transit gateway multicast group. * `transitGatewayMulticastDomainId` - (Required) The ID of the transit gateway multicast domain. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Multicast Group Member identifier. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_peering_attachment.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_peering_attachment.html.markdown index 82aed464de2..83f9079b8da 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_peering_attachment.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_peering_attachment.html.markdown @@ -73,7 +73,7 @@ A full example of how to create a Transit Gateway in one AWS account, share it w ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `peerAccountId` - (Optional) Account ID of EC2 Transit Gateway to peer with. Defaults to the account ID the [AWS provider][1] is currently connected to. * `peerRegion` - (Required) Region of EC2 Transit Gateway to peer with. @@ -81,21 +81,35 @@ The following arguments are supported: * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Peering Attachment. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `transitGatewayId` - (Required) Identifier of EC2 Transit Gateway. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). ## Import -`awsEc2TransitGatewayPeeringAttachment` can be imported by using the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayPeeringAttachment` using the EC2 Transit Gateway Attachment identifier. For example: -```sh -terraform import aws_ec2_transit_gateway_peering_attachment.example tgw-attach-12345678 +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsEc2TransitGatewayPeeringAttachment` using the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_peering_attachment.example tgw-attach-12345678 ``` [1]: /docs/providers/aws/index.html - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_peering_attachment_accepter.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_peering_attachment_accepter.html.markdown index d6f2e056cee..adf8f53c0ad 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_peering_attachment_accepter.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_peering_attachment_accepter.html.markdown @@ -43,14 +43,14 @@ A full example of how to create a Transit Gateway in one AWS account, share it w ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transitGatewayAttachmentId` - (Required) The ID of the EC2 Transit Gateway Peering Attachment to manage. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Peering Attachment. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `transitGatewayId` - Identifier of EC2 Transit Gateway. @@ -60,10 +60,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGatewayPeeringAttachmentAccepter` can be imported by using the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayPeeringAttachmentAccepter` using the EC2 Transit Gateway Attachment identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway_peering_attachment_accepter.example tgw-attach-12345678 + +Using `terraform import`, import `awsEc2TransitGatewayPeeringAttachmentAccepter` using the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_peering_attachment_accepter.example tgw-attach-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_policy_table.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_policy_table.html.markdown index c3a4f2c0614..be252a8163c 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_policy_table.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_policy_table.html.markdown @@ -39,14 +39,14 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transitGatewayId` - (Required) EC2 Transit Gateway identifier. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Policy Table. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - EC2 Transit Gateway Policy Table Amazon Resource Name (ARN). * `id` - EC2 Transit Gateway Policy Table identifier. @@ -55,10 +55,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGatewayPolicyTable` can be imported by using the EC2 Transit Gateway Policy Table identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayPolicyTable` using the EC2 Transit Gateway Policy Table identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway_policy_table.example tgw-rtb-12345678 + +Using `terraform import`, import `awsEc2TransitGatewayPolicyTable` using the EC2 Transit Gateway Policy Table identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_policy_table.example tgw-rtb-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_policy_table_association.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_policy_table_association.html.markdown index 2666dfc4601..91be2b53db4 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_policy_table_association.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_policy_table_association.html.markdown @@ -41,14 +41,14 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transitGatewayAttachmentId` - (Required) Identifier of EC2 Transit Gateway Attachment. * `transitGatewayPolicyTableId` - (Required) Identifier of EC2 Transit Gateway Policy Table. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Policy Table identifier combined with EC2 Transit Gateway Attachment identifier * `resourceId` - Identifier of the resource @@ -56,10 +56,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGatewayPolicyTableAssociation` can be imported by using the EC2 Transit Gateway Policy Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayPolicyTableAssociation` using the EC2 Transit Gateway Policy Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway_policy_table_association.example tgw-rtb-12345678_tgw-attach-87654321 + +Using `terraform import`, import `awsEc2TransitGatewayPolicyTableAssociation` using the EC2 Transit Gateway Policy Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_policy_table_association.example tgw-rtb-12345678_tgw-attach-87654321 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_prefix_list_reference.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_prefix_list_reference.html.markdown index 41334b7583c..37b80f47170 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_prefix_list_reference.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_prefix_list_reference.html.markdown @@ -80,18 +80,32 @@ The following arguments are optional: * `blackhole` - (Optional) Indicates whether to drop traffic that matches the Prefix List. Defaults to `false`. * `transitGatewayAttachmentId` - (Optional) Identifier of EC2 Transit Gateway Attachment. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Route Table identifier and EC2 Prefix List identifier, separated by an underscore (`_`) ## Import -`awsEc2TransitGatewayPrefixListReference` can be imported by using the EC2 Transit Gateway Route Table identifier and EC2 Prefix List identifier, separated by an underscore (`_`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayPrefixListReference` using the EC2 Transit Gateway Route Table identifier and EC2 Prefix List identifier, separated by an underscore (`_`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsEc2TransitGatewayPrefixListReference` using the EC2 Transit Gateway Route Table identifier and EC2 Prefix List identifier, separated by an underscore (`_`). For example: ```console -$ terraform import aws_ec2_transit_gateway_prefix_list_reference.example tgw-rtb-12345678_pl-12345678 +% terraform import aws_ec2_transit_gateway_prefix_list_reference.example tgw-rtb-12345678_pl-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_route.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_route.html.markdown index 540ea2667d9..276b6cd67b0 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_route.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_route.html.markdown @@ -70,25 +70,39 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `destinationCidrBlock` - (Required) IPv4 or IPv6 RFC1924 CIDR used for destination matches. Routing decisions are based on the most specific match. * `transitGatewayAttachmentId` - (Optional) Identifier of EC2 Transit Gateway Attachment (required if `blackhole` is set to false). * `blackhole` - (Optional) Indicates whether to drop traffic that matches this route (default to `false`). * `transitGatewayRouteTableId` - (Required) Identifier of EC2 Transit Gateway Route Table. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Route Table identifier combined with destination ## Import -`awsEc2TransitGatewayRoute` can be imported by using the EC2 Transit Gateway Route Table, an underscore, and the destination, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayRoute` using the EC2 Transit Gateway Route Table, an underscore, and the destination. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway_route.example tgw-rtb-12345678_0.0.0.0/0 + +Using `terraform import`, import `awsEc2TransitGatewayRoute` using the EC2 Transit Gateway Route Table, an underscore, and the destination. For example: + +```console +% terraform import aws_ec2_transit_gateway_route.example tgw-rtb-12345678_0.0.0.0/0 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table.html.markdown index 24ff115349a..67ba6f25885 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table.html.markdown @@ -36,14 +36,14 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transitGatewayId` - (Required) Identifier of EC2 Transit Gateway. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway Route Table. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - EC2 Transit Gateway Route Table Amazon Resource Name (ARN). * `defaultAssociationRouteTable` - Boolean whether this is the default association route table for the EC2 Transit Gateway. @@ -53,10 +53,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGatewayRouteTable` can be imported by using the EC2 Transit Gateway Route Table identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayRouteTable` using the EC2 Transit Gateway Route Table identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway_route_table.example tgw-rtb-12345678 + +Using `terraform import`, import `awsEc2TransitGatewayRouteTable` using the EC2 Transit Gateway Route Table identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_route_table.example tgw-rtb-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table_association.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table_association.html.markdown index bb23e478ba0..e78055a5839 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table_association.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table_association.html.markdown @@ -41,15 +41,15 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transitGatewayAttachmentId` - (Required) Identifier of EC2 Transit Gateway Attachment. * `transitGatewayRouteTableId` - (Required) Identifier of EC2 Transit Gateway Route Table. * `replaceExistingAssociation` - (Optional) Boolean whether the Gateway Attachment should remove any current Route Table association before associating with the specified Route Table. Default value: `false`. This argument is intended for use with EC2 Transit Gateways shared into the current account, otherwise the `transitGatewayDefaultRouteTableAssociation` argument of the `awsEc2TransitGatewayVpcAttachment` resource should be used. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Route Table identifier combined with EC2 Transit Gateway Attachment identifier * `resourceId` - Identifier of the resource @@ -57,10 +57,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGatewayRouteTableAssociation` can be imported by using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayRouteTableAssociation` using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway_route_table_association.example tgw-rtb-12345678_tgw-attach-87654321 + +Using `terraform import`, import `awsEc2TransitGatewayRouteTableAssociation` using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_route_table_association.example tgw-rtb-12345678_tgw-attach-87654321 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table_propagation.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table_propagation.html.markdown index a21b2f43903..85f6372a23f 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table_propagation.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_route_table_propagation.html.markdown @@ -41,14 +41,14 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transitGatewayAttachmentId` - (Required) Identifier of EC2 Transit Gateway Attachment. * `transitGatewayRouteTableId` - (Required) Identifier of EC2 Transit Gateway Route Table. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Route Table identifier combined with EC2 Transit Gateway Attachment identifier * `resourceId` - Identifier of the resource @@ -56,10 +56,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGatewayRouteTablePropagation` can be imported by using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayRouteTablePropagation` using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway_route_table_propagation.example tgw-rtb-12345678_tgw-attach-87654321 + +Using `terraform import`, import `awsEc2TransitGatewayRouteTablePropagation` using the EC2 Transit Gateway Route Table identifier, an underscore, and the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_route_table_propagation.example tgw-rtb-12345678_tgw-attach-87654321 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_vpc_attachment.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_vpc_attachment.html.markdown index 9d169f1d160..704aa524c84 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_vpc_attachment.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_vpc_attachment.html.markdown @@ -40,7 +40,7 @@ A full example of how to create a Transit Gateway in one AWS account, share it w ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `subnetIds` - (Required) Identifiers of EC2 Subnets. * `transitGatewayId` - (Required) Identifier of EC2 Transit Gateway. @@ -52,9 +52,9 @@ The following arguments are supported: * `transitGatewayDefaultRouteTableAssociation` - (Optional) Boolean whether the VPC Attachment should be associated with the EC2 Transit Gateway association default route table. This cannot be configured or perform drift detection with Resource Access Manager shared EC2 Transit Gateways. Default value: `true`. * `transitGatewayDefaultRouteTablePropagation` - (Optional) Boolean whether the VPC Attachment should propagate routes with the EC2 Transit Gateway propagation default route table. This cannot be configured or perform drift detection with Resource Access Manager shared EC2 Transit Gateways. Default value: `true`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). @@ -62,10 +62,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGatewayVpcAttachment` can be imported by using the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayVpcAttachment` using the EC2 Transit Gateway Attachment identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway_vpc_attachment.example tgw-attach-12345678 + +Using `terraform import`, import `awsEc2TransitGatewayVpcAttachment` using the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_vpc_attachment.example tgw-attach-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ec2_transit_gateway_vpc_attachment_accepter.html.markdown b/website/docs/cdktf/typescript/r/ec2_transit_gateway_vpc_attachment_accepter.html.markdown index cdfd6c213d2..ffc2eb3161d 100644 --- a/website/docs/cdktf/typescript/r/ec2_transit_gateway_vpc_attachment_accepter.html.markdown +++ b/website/docs/cdktf/typescript/r/ec2_transit_gateway_vpc_attachment_accepter.html.markdown @@ -49,16 +49,16 @@ A full example of how to create a Transit Gateway in one AWS account, share it w ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `transitGatewayAttachmentId` - (Required) The ID of the EC2 Transit Gateway Attachment to manage. * `transitGatewayDefaultRouteTableAssociation` - (Optional) Boolean whether the VPC Attachment should be associated with the EC2 Transit Gateway association default route table. Default value: `true`. * `transitGatewayDefaultRouteTablePropagation` - (Optional) Boolean whether the VPC Attachment should propagate routes with the EC2 Transit Gateway propagation default route table. Default value: `true`. * `tags` - (Optional) Key-value tags for the EC2 Transit Gateway VPC Attachment. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - EC2 Transit Gateway Attachment identifier * `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). @@ -72,10 +72,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsEc2TransitGatewayVpcAttachmentAccepter` can be imported by using the EC2 Transit Gateway Attachment identifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEc2TransitGatewayVpcAttachmentAccepter` using the EC2 Transit Gateway Attachment identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_ec2_transit_gateway_vpc_attachment_accepter.example tgw-attach-12345678 + +Using `terraform import`, import `awsEc2TransitGatewayVpcAttachmentAccepter` using the EC2 Transit Gateway Attachment identifier. For example: + +```console +% terraform import aws_ec2_transit_gateway_vpc_attachment_accepter.example tgw-attach-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecr_lifecycle_policy.html.markdown b/website/docs/cdktf/typescript/r/ecr_lifecycle_policy.html.markdown new file mode 100644 index 00000000000..cc95c7a3cef --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecr_lifecycle_policy.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_lifecycle_policy" +description: |- + Manages an ECR repository lifecycle policy. +--- + + + +# Resource: aws_ecr_lifecycle_policy + +Manages an ECR repository lifecycle policy. + +~> **NOTE:** Only one `awsEcrLifecyclePolicy` resource can be used with the same ECR repository. To apply multiple rules, they must be combined in the `policy` JSON. + +~> **NOTE:** The AWS ECR API seems to reorder rules based on `rulePriority`. If you define multiple rules that are not sorted in ascending `rulePriority` order in the Terraform code, the resource will be flagged for recreation every `terraform plan`. + +## Example Usage + +### Policy on untagged image + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcrLifecyclePolicy } from "./.gen/providers/aws/ecr-lifecycle-policy"; +import { EcrRepository } from "./.gen/providers/aws/ecr-repository"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new EcrRepository(this, "foo", { + name: "bar", + }); + new EcrLifecyclePolicy(this, "foopolicy", { + policy: + '{\n "rules": [\n {\n "rulePriority": 1,\n "description": "Expire images older than 14 days",\n "selection": {\n "tagStatus": "untagged",\n "countType": "sinceImagePushed",\n "countUnit": "days",\n "countNumber": 14\n },\n "action": {\n "type": "expire"\n }\n }\n ]\n}\n\n', + repository: foo.name, + }); + } +} + +``` + +### Policy on tagged image + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcrLifecyclePolicy } from "./.gen/providers/aws/ecr-lifecycle-policy"; +import { EcrRepository } from "./.gen/providers/aws/ecr-repository"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new EcrRepository(this, "foo", { + name: "bar", + }); + new EcrLifecyclePolicy(this, "foopolicy", { + policy: + '{\n "rules": [\n {\n "rulePriority": 1,\n "description": "Keep last 30 images",\n "selection": {\n "tagStatus": "tagged",\n "tagPrefixList": ["v"],\n "countType": "imageCountMoreThan",\n "countNumber": 30\n },\n "action": {\n "type": "expire"\n }\n }\n ]\n}\n\n', + repository: foo.name, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repository` - (Required) Name of the repository to apply the policy. +* `policy` - (Required) The policy document. This is a JSON formatted string. See more details about [Policy Parameters](http://docs.aws.amazon.com/AmazonECR/latest/userguide/LifecyclePolicies.html#lifecycle_policy_parameters) in the official AWS docs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `repository` - The name of the repository. +* `registryId` - The registry ID where the repository was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Lifecycle Policy using the name of the repository. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECR Lifecycle Policy using the name of the repository. For example: + +```console +% terraform import aws_ecr_lifecycle_policy.example tf-example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecr_pull_through_cache_rule.html.markdown b/website/docs/cdktf/typescript/r/ecr_pull_through_cache_rule.html.markdown new file mode 100644 index 00000000000..1e184ce2751 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecr_pull_through_cache_rule.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_pull_through_cache_rule" +description: |- + Provides an Elastic Container Registry Pull Through Cache Rule. +--- + + + +# Resource: aws_ecr_pull_through_cache_rule + +Provides an Elastic Container Registry Pull Through Cache Rule. + +More information about pull through cache rules, including the set of supported +upstream repositories, see [Using pull through cache rules](https://docs.aws.amazon.com/AmazonECR/latest/userguide/pull-through-cache.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcrPullThroughCacheRule } from "./.gen/providers/aws/ecr-pull-through-cache-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcrPullThroughCacheRule(this, "example", { + ecrRepositoryPrefix: "ecr-public", + upstreamRegistryUrl: "public.ecr.aws", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `ecrRepositoryPrefix` - (Required, Forces new resource) The repository name prefix to use when caching images from the source registry. +* `upstreamRegistryUrl` - (Required, Forces new resource) The registry URL of the upstream public registry to use as the source. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `registryId` - The registry ID where the repository was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a pull-through cache rule using the `ecrRepositoryPrefix`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a pull-through cache rule using the `ecrRepositoryPrefix`. For example: + +```console +% terraform import aws_ecr_pull_through_cache_rule.example ecr-public +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecr_registry_policy.html.markdown b/website/docs/cdktf/typescript/r/ecr_registry_policy.html.markdown new file mode 100644 index 00000000000..8c6715f68a0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecr_registry_policy.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_registry_policy" +description: |- + Provides an Elastic Container Registry Policy. +--- + + + +# Resource: aws_ecr_registry_policy + +Provides an Elastic Container Registry Policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { EcrRegistryPolicy } from "./.gen/providers/aws/ecr-registry-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsPartitionCurrent = new DataAwsPartition(this, "current_1", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsPartitionCurrent.overrideLogicalId("current"); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_2", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + new EcrRegistryPolicy(this, "example", { + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["ecr:ReplicateImage"], + Effect: "Allow", + Principal: { + AWS: + "arn:${" + + dataAwsPartitionCurrent.partition + + "}:iam::${" + + current.accountId + + "}:root", + }, + Resource: [ + "arn:${" + + dataAwsPartitionCurrent.partition + + "}:ecr:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:repository/*", + ], + Sid: "testpolicy", + }, + ], + Version: "2012-10-17", + }) + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `registryId` - The registry ID where the registry was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Registry Policy using the registry id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECR Registry Policy using the registry id. For example: + +```console +% terraform import aws_ecr_registry_policy.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecr_registry_scanning_configuration.html.markdown b/website/docs/cdktf/typescript/r/ecr_registry_scanning_configuration.html.markdown new file mode 100644 index 00000000000..1e0bed5fd3f --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecr_registry_scanning_configuration.html.markdown @@ -0,0 +1,132 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_registry_scanning_configuration" +description: |- + Provides an Elastic Container Registry Scanning Configuration. +--- + + + +# Resource: aws_ecr_registry_scanning_configuration + +Provides an Elastic Container Registry Scanning Configuration. Can't be completely deleted, instead reverts to the default `basic` scanning configuration without rules. + +## Example Usage + +### Basic example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcrRegistryScanningConfiguration } from "./.gen/providers/aws/ecr-registry-scanning-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcrRegistryScanningConfiguration(this, "configuration", { + rule: [ + { + repositoryFilter: [ + { + filter: "example", + filterType: "WILDCARD", + }, + ], + scanFrequency: "CONTINUOUS_SCAN", + }, + ], + scanType: "ENHANCED", + }); + } +} + +``` + +### Multiple rules + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcrRegistryScanningConfiguration } from "./.gen/providers/aws/ecr-registry-scanning-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcrRegistryScanningConfiguration(this, "test", { + rule: [ + { + repositoryFilter: [ + { + filter: "*", + filterType: "WILDCARD", + }, + ], + scanFrequency: "SCAN_ON_PUSH", + }, + { + repositoryFilter: [ + { + filter: "example", + filterType: "WILDCARD", + }, + ], + scanFrequency: "CONTINUOUS_SCAN", + }, + ], + scanType: "ENHANCED", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +- `scanType` - (Required) the scanning type to set for the registry. Can be either `enhanced` or `basic`. +- `rule` - (Optional) One or multiple blocks specifying scanning rules to determine which repository filters are used and at what frequency scanning will occur. See [below for schema](#rule). + +### rule + +- `repositoryFilter` - (Required) One or more repository filter blocks, containing a `filter` (required string filtering repositories, see pattern regex [here](https://docs.aws.amazon.com/AmazonECR/latest/APIReference/API_ScanningRepositoryFilter.html)) and a `filterType` (required string, currently only `wildcard` is supported). +- `scanFrequency` - (Required) The frequency that scans are performed at for a private registry. Can be `scanOnPush`, `continuousScan`, or `manual`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `registryId` - The registry ID the scanning configuration applies to. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Scanning Configurations using the `registryId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECR Scanning Configurations using the `registryId`. For example: + +```console +% terraform import aws_ecr_registry_scanning_configuration.example 012345678901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecr_replication_configuration.html.markdown b/website/docs/cdktf/typescript/r/ecr_replication_configuration.html.markdown new file mode 100644 index 00000000000..a40bb3912b7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecr_replication_configuration.html.markdown @@ -0,0 +1,198 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_replication_configuration" +description: |- + Provides an Elastic Container Registry Replication Configuration. +--- + + + +# Resource: aws_ecr_replication_configuration + +Provides an Elastic Container Registry Replication Configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsRegions } from "./.gen/providers/aws/data-aws-regions"; +import { EcrReplicationConfiguration } from "./.gen/providers/aws/ecr-replication-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + const example = new DataAwsRegions(this, "example", {}); + const awsEcrReplicationConfigurationExample = + new EcrReplicationConfiguration(this, "example_2", { + replicationConfiguration: { + rule: [ + { + destination: [ + { + region: Token.asString(propertyAccess(example.names, ["0"])), + registryId: Token.asString(current.accountId), + }, + ], + }, + ], + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEcrReplicationConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +## Multiple Region Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsRegions } from "./.gen/providers/aws/data-aws-regions"; +import { EcrReplicationConfiguration } from "./.gen/providers/aws/ecr-replication-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + const example = new DataAwsRegions(this, "example", {}); + const awsEcrReplicationConfigurationExample = + new EcrReplicationConfiguration(this, "example_2", { + replicationConfiguration: { + rule: [ + { + destination: [ + { + region: Token.asString(propertyAccess(example.names, ["0"])), + registryId: Token.asString(current.accountId), + }, + { + region: Token.asString(propertyAccess(example.names, ["1"])), + registryId: Token.asString(current.accountId), + }, + ], + }, + ], + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEcrReplicationConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +## Repository Filter Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsRegions } from "./.gen/providers/aws/data-aws-regions"; +import { EcrReplicationConfiguration } from "./.gen/providers/aws/ecr-replication-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + const example = new DataAwsRegions(this, "example", {}); + const awsEcrReplicationConfigurationExample = + new EcrReplicationConfiguration(this, "example_2", { + replicationConfiguration: { + rule: [ + { + destination: [ + { + region: Token.asString(propertyAccess(example.names, ["0"])), + registryId: Token.asString(current.accountId), + }, + ], + repositoryFilter: [ + { + filter: "prod-microservice", + filterType: "PREFIX_MATCH", + }, + ], + }, + ], + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEcrReplicationConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `replicationConfiguration` - (Required) Replication configuration for a registry. See [Replication Configuration](#replication-configuration). + +### Replication Configuration + +* `rule` - (Required) The replication rules for a replication configuration. A maximum of 10 are allowed per `replicationConfiguration`. See [Rule](#rule) + +### Rule + +* `destination` - (Required) the details of a replication destination. A maximum of 25 are allowed per `rule`. See [Destination](#destination). +* `repositoryFilter` - (Optional) filters for a replication rule. See [Repository Filter](#repository-filter). + +### Destination + +* `region` - (Required) A Region to replicate to. +* `registryId` - (Required) The account ID of the destination registry to replicate to. + +### Repository Filter + +* `filter` - (Required) The repository filter details. +* `filterType` - (Required) The repository filter type. The only supported value is `prefixMatch`, which is a repository name prefix specified with the filter parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `registryId` - The registry ID where the replication configuration was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Replication Configuration using the `registryId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECR Replication Configuration using the `registryId`. For example: + +```console +% terraform import aws_ecr_replication_configuration.service 012345678912 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecr_repository.html.markdown b/website/docs/cdktf/typescript/r/ecr_repository.html.markdown new file mode 100644 index 00000000000..a309d74e3ac --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecr_repository.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_repository" +description: |- + Provides an Elastic Container Registry Repository. +--- + + + +# Resource: aws_ecr_repository + +Provides an Elastic Container Registry Repository. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcrRepository } from "./.gen/providers/aws/ecr-repository"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcrRepository(this, "foo", { + imageScanningConfiguration: { + scanOnPush: true, + }, + imageTagMutability: "MUTABLE", + name: "bar", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the repository. +* `encryptionConfiguration` - (Optional) Encryption configuration for the repository. See [below for schema](#encryption_configuration). +* `forceDelete` - (Optional) If `true`, will delete the repository even if it contains images. + Defaults to `false`. +* `imageTagMutability` - (Optional) The tag mutability setting for the repository. Must be one of: `mutable` or `immutable`. Defaults to `mutable`. +* `imageScanningConfiguration` - (Optional) Configuration block that defines image scanning configuration for the repository. By default, image scanning must be manually triggered. See the [ECR User Guide](https://docs.aws.amazon.com/AmazonECR/latest/userguide/image-scanning.html) for more information about image scanning. + * `scanOnPush` - (Required) Indicates whether images are scanned after being pushed to the repository (true) or not scanned (false). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### encryption_configuration + +* `encryptionType` - (Optional) The encryption type to use for the repository. Valid values are `aes256` or `kms`. Defaults to `aes256`. +* `kmsKey` - (Optional) The ARN of the KMS key to use when `encryptionType` is `kms`. If not specified, uses the default AWS managed key for ECR. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Full ARN of the repository. +* `registryId` - The registry ID where the repository was created. +* `repositoryUrl` - The URL of the repository (in the form `awsAccountIdDkrEcrRegionAmazonawsCom/repositoryName`). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Repositories using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECR Repositories using the `name`. For example: + +```console +% terraform import aws_ecr_repository.service test-service +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecr_repository_policy.html.markdown b/website/docs/cdktf/typescript/r/ecr_repository_policy.html.markdown new file mode 100644 index 00000000000..f77fb092a77 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecr_repository_policy.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "ECR (Elastic Container Registry)" +layout: "aws" +page_title: "AWS: aws_ecr_repository_policy" +description: |- + Provides an Elastic Container Registry Repository Policy. +--- + + + +# Resource: aws_ecr_repository_policy + +Provides an Elastic Container Registry Repository Policy. + +Note that currently only one policy may be applied to a repository. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { EcrRepository } from "./.gen/providers/aws/ecr-repository"; +import { EcrRepositoryPolicy } from "./.gen/providers/aws/ecr-repository-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new EcrRepository(this, "foo", { + name: "bar", + }); + const foopolicy = new DataAwsIamPolicyDocument(this, "foopolicy", { + statement: [ + { + actions: [ + "ecr:GetDownloadUrlForLayer", + "ecr:BatchGetImage", + "ecr:BatchCheckLayerAvailability", + "ecr:PutImage", + "ecr:InitiateLayerUpload", + "ecr:UploadLayerPart", + "ecr:CompleteLayerUpload", + "ecr:DescribeRepositories", + "ecr:GetRepositoryPolicy", + "ecr:ListImages", + "ecr:DeleteRepository", + "ecr:BatchDeleteImage", + "ecr:SetRepositoryPolicy", + "ecr:DeleteRepositoryPolicy", + ], + effect: "Allow", + principals: [ + { + identifiers: ["123456789012"], + type: "AWS", + }, + ], + sid: "new policy", + }, + ], + }); + const awsEcrRepositoryPolicyFoopolicy = new EcrRepositoryPolicy( + this, + "foopolicy_2", + { + policy: Token.asString(foopolicy.json), + repository: foo.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEcrRepositoryPolicyFoopolicy.overrideLogicalId("foopolicy"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repository` - (Required) Name of the repository to apply the policy. +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `repository` - The name of the repository. +* `registryId` - The registry ID where the repository was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Repository Policy using the repository name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECR Repository Policy using the repository name. For example: + +```console +% terraform import aws_ecr_repository_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecrpublic_repository.html.markdown b/website/docs/cdktf/typescript/r/ecrpublic_repository.html.markdown new file mode 100644 index 00000000000..dcd8a1a66f4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecrpublic_repository.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "ECR Public" +layout: "aws" +page_title: "AWS: aws_ecrpublic_repository" +description: |- + Provides a Public Elastic Container Registry Repository. +--- + + + +# Resource: aws_ecrpublic_repository + +Provides a Public Elastic Container Registry Repository. + +~> **NOTE:** This resource can only be used in the `usEast1` region. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcrpublicRepository } from "./.gen/providers/aws/ecrpublic-repository"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const usEast1 = new AwsProvider(this, "aws", { + alias: "us_east_1", + region: "us-east-1", + }); + new EcrpublicRepository(this, "foo", { + catalogData: { + aboutText: "About Text", + architectures: ["ARM"], + description: "Description", + logoImageBlob: Token.asString(Fn.filebase64(png)), + operatingSystems: ["Linux"], + usageText: "Usage Text", + }, + provider: usEast1, + repositoryName: "bar", + tags: { + env: "production", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repositoryName` - (Required) Name of the repository. +* `catalogData` - (Optional) Catalog data configuration for the repository. See [below for schema](#catalog_data). +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### catalog_data + +* `aboutText` - (Optional) A detailed description of the contents of the repository. It is publicly visible in the Amazon ECR Public Gallery. The text must be in markdown format. +* `architectures` - (Optional) The system architecture that the images in the repository are compatible with. On the Amazon ECR Public Gallery, the following supported architectures will appear as badges on the repository and are used as search filters: `arm`, `ARM 64`, `x86`, `x8664` +* `description` - (Optional) A short description of the contents of the repository. This text appears in both the image details and also when searching for repositories on the Amazon ECR Public Gallery. +* `logoImageBlob` - (Optional) The base64-encoded repository logo payload. (Only visible for verified accounts) Note that drift detection is disabled for this attribute. +* `operatingSystems` - (Optional) The operating systems that the images in the repository are compatible with. On the Amazon ECR Public Gallery, the following supported operating systems will appear as badges on the repository and are used as search filters: `linux`, `windows` +* `usageText` - (Optional) Detailed information on how to use the contents of the repository. It is publicly visible in the Amazon ECR Public Gallery. The usage text provides context, support information, and additional usage details for users of the repository. The text must be in markdown format. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Full ARN of the repository. +* `id` - The repository name. +* `registryId` - The registry ID where the repository was created. +* `repositoryUri` - The URI of the repository. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Public Repositories using the `repositoryName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECR Public Repositories using the `repositoryName`. For example: + +```console +% terraform import aws_ecrpublic_repository.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecrpublic_repository_policy.html.markdown b/website/docs/cdktf/typescript/r/ecrpublic_repository_policy.html.markdown new file mode 100644 index 00000000000..3a7c7ed6c05 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecrpublic_repository_policy.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "ECR Public" +layout: "aws" +page_title: "AWS: aws_ecrpublic_repository_policy" +description: |- + Provides an Elastic Container Registry Public Repository Policy. +--- + + + +# Resource: aws_ecrpublic_repository_policy + +Provides an Elastic Container Registry Public Repository Policy. + +Note that currently only one policy may be applied to a repository. + +~> **NOTE:** This resource can only be used in the `usEast1` region. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { EcrpublicRepository } from "./.gen/providers/aws/ecrpublic-repository"; +import { EcrpublicRepositoryPolicy } from "./.gen/providers/aws/ecrpublic-repository-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new EcrpublicRepository(this, "example", { + repositoryName: "example", + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_1", + { + statement: [ + { + actions: [ + "ecr:GetDownloadUrlForLayer", + "ecr:BatchGetImage", + "ecr:BatchCheckLayerAvailability", + "ecr:PutImage", + "ecr:InitiateLayerUpload", + "ecr:UploadLayerPart", + "ecr:CompleteLayerUpload", + "ecr:DescribeRepositories", + "ecr:GetRepositoryPolicy", + "ecr:ListImages", + "ecr:DeleteRepository", + "ecr:BatchDeleteImage", + "ecr:SetRepositoryPolicy", + "ecr:DeleteRepositoryPolicy", + ], + effect: "Allow", + principals: [ + { + identifiers: ["123456789012"], + type: "AWS", + }, + ], + sid: "new policy", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsEcrpublicRepositoryPolicyExample = new EcrpublicRepositoryPolicy( + this, + "example_2", + { + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + repositoryName: example.repositoryName, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEcrpublicRepositoryPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `repositoryName` - (Required) Name of the repository to apply the policy. +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `registryId` - The registry ID where the repository was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECR Public Repository Policy using the repository name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECR Public Repository Policy using the repository name. For example: + +```console +% terraform import aws_ecrpublic_repository_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecs_account_setting_default.html.markdown b/website/docs/cdktf/typescript/r/ecs_account_setting_default.html.markdown new file mode 100644 index 00000000000..b54136d9e73 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecs_account_setting_default.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_account_setting_default" +description: |- + Provides an ECS Default account setting. +--- + + + +# Resource: aws_ecs_account_setting_default + +Provides an ECS default account setting for a specific ECS Resource name within a specific region. More information can be found on the [ECS Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs-account-settings.html). + +~> **NOTE:** The AWS API does not delete this resource. When you run `destroy`, the provider will attempt to disable the setting. + +~> **NOTE:** Your AWS account may not support disabling `containerInstanceLongArnFormat`, `serviceLongArnFormat`, and `taskLongArnFormat`. If your account does not support disabling these, "destroying" this resource will not disable the setting nor cause a Terraform error. However, the AWS Provider will log an AWS error: `InvalidParameterException: You can no longer disable Long Arn settings`. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsAccountSettingDefault } from "./.gen/providers/aws/ecs-account-setting-default"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsAccountSettingDefault(this, "test", { + name: "taskLongArnFormat", + value: "enabled", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the account setting to set. Valid values are `serviceLongArnFormat`, `taskLongArnFormat`, `containerInstanceLongArnFormat`, `awsvpcTrunking` and `containerInsights`. +* `value` - (Required) State of the setting. Valid values are `enabled` and `disabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN that identifies the account setting. +* `prinicpalArn` - ARN that identifies the account setting. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS Account Setting defaults using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECS Account Setting defaults using the `name`. For example: + +```console +% terraform import aws_ecs_account_setting_default.example taskLongArnFormat +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecs_capacity_provider.html.markdown b/website/docs/cdktf/typescript/r/ecs_capacity_provider.html.markdown new file mode 100644 index 00000000000..742c2230a04 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecs_capacity_provider.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_capacity_provider" +description: |- + Provides an ECS cluster capacity provider. +--- + + + +# Resource: aws_ecs_capacity_provider + +Provides an ECS cluster capacity provider. More information can be found on the [ECS Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-capacity-providers.html). + +~> **NOTE:** Associating an ECS Capacity Provider to an Auto Scaling Group will automatically add the `amazonEcsManaged` tag to the Auto Scaling Group. This tag should be included in the `awsAutoscalingGroup` resource configuration to prevent Terraform from removing it in subsequent executions as well as ensuring the `amazonEcsManaged` tag is propagated to all EC2 Instances in the Auto Scaling Group if `minSize` is above 0 on creation. Any EC2 Instances in the Auto Scaling Group without this tag must be manually be updated, otherwise they may cause unexpected scaling behavior and metrics. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { EcsCapacityProvider } from "./.gen/providers/aws/ecs-capacity-provider"; +interface MyConfig { + maxSize: any; + minSize: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const test = new AutoscalingGroup(this, "test", { + tag: [ + { + key: "AmazonECSManaged", + propagateAtLaunch: true, + value: Token.asString(true), + }, + ], + maxSize: config.maxSize, + minSize: config.minSize, + }); + const awsEcsCapacityProviderTest = new EcsCapacityProvider(this, "test_1", { + autoScalingGroupProvider: { + autoScalingGroupArn: test.arn, + managedScaling: { + maximumScalingStepSize: 1000, + minimumScalingStepSize: 1, + status: "ENABLED", + targetCapacity: 10, + }, + managedTerminationProtection: "ENABLED", + }, + name: "test", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEcsCapacityProviderTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `autoScalingGroupProvider` - (Required) Configuration block for the provider for the ECS auto scaling group. Detailed below. +* `name` - (Required) Name of the capacity provider. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `autoScalingGroupProvider` + +* `autoScalingGroupArn` - (Required) - ARN of the associated auto scaling group. +* `managedScaling` - (Optional) - Configuration block defining the parameters of the auto scaling. Detailed below. +* `managedTerminationProtection` - (Optional) - Enables or disables container-aware termination of instances in the auto scaling group when scale-in happens. Valid values are `enabled` and `disabled`. + +### `managedScaling` + +* `instanceWarmupPeriod` - (Optional) Period of time, in seconds, after a newly launched Amazon EC2 instance can contribute to CloudWatch metrics for Auto Scaling group. If this parameter is omitted, the default value of 300 seconds is used. +* `maximumScalingStepSize` - (Optional) Maximum step adjustment size. A number between 1 and 10,000. +* `minimumScalingStepSize` - (Optional) Minimum step adjustment size. A number between 1 and 10,000. +* `status` - (Optional) Whether auto scaling is managed by ECS. Valid values are `enabled` and `disabled`. +* `targetCapacity` - (Optional) Target utilization for the capacity provider. A number between 1 and 100. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN that identifies the capacity provider. +* `id` - ARN that identifies the capacity provider. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS Capacity Providers using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECS Capacity Providers using the `name`. For example: + +```console +% terraform import aws_ecs_capacity_provider.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecs_cluster.html.markdown b/website/docs/cdktf/typescript/r/ecs_cluster.html.markdown new file mode 100644 index 00000000000..118f721a31c --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecs_cluster.html.markdown @@ -0,0 +1,153 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_cluster" +description: |- + Provides an ECS cluster. +--- + + + +# Resource: aws_ecs_cluster + +Provides an ECS cluster. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsCluster } from "./.gen/providers/aws/ecs-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsCluster(this, "foo", { + name: "white-hart", + setting: [ + { + name: "containerInsights", + value: "enabled", + }, + ], + }); + } +} + +``` + +### Example with Log Configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { EcsCluster } from "./.gen/providers/aws/ecs-cluster"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: "example", + }); + const awsKmsKeyExample = new KmsKey(this, "example_1", { + deletionWindowInDays: 7, + description: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsKeyExample.overrideLogicalId("example"); + new EcsCluster(this, "test", { + configuration: { + executeCommandConfiguration: { + kmsKeyId: Token.asString(awsKmsKeyExample.arn), + logConfiguration: { + cloudWatchEncryptionEnabled: true, + cloudWatchLogGroupName: example.name, + }, + logging: "OVERRIDE", + }, + }, + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `configuration` - (Optional) The execute command configuration for the cluster. Detailed below. +* `name` - (Required) Name of the cluster (up to 255 letters, numbers, hyphens, and underscores) +* `serviceConnectDefaults` - (Optional) Configures a default Service Connect namespace. Detailed below. +* `setting` - (Optional) Configuration block(s) with cluster settings. For example, this can be used to enable CloudWatch Container Insights for a cluster. Detailed below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `configuration` + +* `executeCommandConfiguration` - (Optional) The details of the execute command configuration. Detailed below. + +#### `executeCommandConfiguration` + +* `kmsKeyId` - (Optional) The AWS Key Management Service key ID to encrypt the data between the local client and the container. +* `logConfiguration` - (Optional) The log configuration for the results of the execute command actions Required when `logging` is `override`. Detailed below. +* `logging` - (Optional) The log setting to use for redirecting logs for your execute command results. Valid values are `none`, `default`, and `override`. + +##### `logConfiguration` + +* `cloudWatchEncryptionEnabled` - (Optional) Whether or not to enable encryption on the CloudWatch logs. If not specified, encryption will be disabled. +* `cloudWatchLogGroupName` - (Optional) The name of the CloudWatch log group to send logs to. +* `s3BucketName` - (Optional) The name of the S3 bucket to send logs to. +* `s3BucketEncryptionEnabled` - (Optional) Whether or not to enable encryption on the logs sent to S3. If not specified, encryption will be disabled. +* `s3KeyPrefix` - (Optional) An optional folder in the S3 bucket to place logs in. + +### `setting` + +* `name` - (Required) Name of the setting to manage. Valid values: `containerInsights`. +* `value` - (Required) The value to assign to the setting. Valid values are `enabled` and `disabled`. + +### `serviceConnectDefaults` + +* `namespace` - (Required) The ARN of the [`awsServiceDiscoveryHttpNamespace`](/docs/providers/aws/r/service_discovery_http_namespace.html) that's used when you create a service and don't specify a Service Connect configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN that identifies the cluster. +* `id` - ARN that identifies the cluster. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS clusters using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECS clusters using the `name`. For example: + +```console +% terraform import aws_ecs_cluster.stateless stateless-app +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecs_cluster_capacity_providers.html.markdown b/website/docs/cdktf/typescript/r/ecs_cluster_capacity_providers.html.markdown new file mode 100644 index 00000000000..ac64d76b0c1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecs_cluster_capacity_providers.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_cluster_capacity_providers" +description: |- + Provides an ECS cluster capacity providers resource. +--- + + + +# Resource: aws_ecs_cluster_capacity_providers + +Manages the capacity providers of an ECS Cluster. + +More information about capacity providers can be found in the [ECS User Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-capacity-providers.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsCluster } from "./.gen/providers/aws/ecs-cluster"; +import { EcsClusterCapacityProviders } from "./.gen/providers/aws/ecs-cluster-capacity-providers"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new EcsCluster(this, "example", { + name: "my-cluster", + }); + const awsEcsClusterCapacityProvidersExample = + new EcsClusterCapacityProviders(this, "example_1", { + capacityProviders: ["FARGATE"], + clusterName: example.name, + defaultCapacityProviderStrategy: [ + { + base: 1, + capacityProvider: "FARGATE", + weight: 100, + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEcsClusterCapacityProvidersExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `capacityProviders` - (Optional) Set of names of one or more capacity providers to associate with the cluster. Valid values also include `fargate` and `fargateSpot`. +* `clusterName` - (Required, Forces new resource) Name of the ECS cluster to manage capacity providers for. +* `defaultCapacityProviderStrategy` - (Optional) Set of capacity provider strategies to use by default for the cluster. Detailed below. + +### default_capacity_provider_strategy Configuration Block + +* `capacityProvider` - (Required) Name of the capacity provider. +* `weight` - (Optional) The relative percentage of the total number of launched tasks that should use the specified capacity provider. The `weight` value is taken into consideration after the `base` count of tasks has been satisfied. Defaults to `0`. +* `base` - (Optional) The number of tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. Defaults to `0`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same as `clusterName`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS cluster capacity providers using the `clusterName` attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECS cluster capacity providers using the `clusterName` attribute. For example: + +```console +% terraform import aws_ecs_cluster_capacity_providers.example my-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecs_service.html.markdown b/website/docs/cdktf/typescript/r/ecs_service.html.markdown new file mode 100644 index 00000000000..f063b312c5e --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecs_service.html.markdown @@ -0,0 +1,402 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_service" +description: |- + Provides an ECS service. +--- + + + +# Resource: aws_ecs_service + +-> **Note:** To prevent a race condition during service deletion, make sure to set `dependsOn` to the related `awsIamRolePolicy`; otherwise, the policy may be destroyed too soon and the ECS service will then get stuck in the `draining` state. + +Provides an ECS service - effectively a task that is expected to run until an error occurs or a user terminates it (typically a webserver or a database). + +See [ECS Services section in AWS developer guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs_services.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsService } from "./.gen/providers/aws/ecs-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsService(this, "mongo", { + cluster: foo.id, + dependsOn: [awsIamRolePolicyFoo], + desiredCount: 3, + iamRole: Token.asString(awsIamRoleFoo.arn), + loadBalancer: [ + { + containerName: "mongo", + containerPort: 8080, + targetGroupArn: Token.asString(awsLbTargetGroupFoo.arn), + }, + ], + name: "mongodb", + orderedPlacementStrategy: [ + { + field: "cpu", + type: "binpack", + }, + ], + placementConstraints: [ + { + expression: + "attribute:ecs.availability-zone in [us-west-2a, us-west-2b]", + type: "memberOf", + }, + ], + taskDefinition: Token.asString(awsEcsTaskDefinitionMongo.arn), + }); + } +} + +``` + +### Ignoring Changes to Desired Count + +You can utilize the generic Terraform resource [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignoreChanges` to create an ECS service with an initial count of running instances, then ignore any changes to that count caused externally (e.g., Application Autoscaling). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsService } from "./.gen/providers/aws/ecs-service"; +interface MyConfig { + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new EcsService(this, "example", { + desiredCount: 2, + lifecycle: { + ignoreChanges: [desiredCount], + }, + name: config.name, + }); + } +} + +``` + +### Daemon Scheduling Strategy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsService } from "./.gen/providers/aws/ecs-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsService(this, "bar", { + cluster: foo.id, + name: "bar", + schedulingStrategy: "DAEMON", + taskDefinition: Token.asString(awsEcsTaskDefinitionBar.arn), + }); + } +} + +``` + +### CloudWatch Deployment Alarms + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsService } from "./.gen/providers/aws/ecs-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsService(this, "example", { + alarms: { + alarmNames: [Token.asString(awsCloudwatchMetricAlarmExample.alarmName)], + enable: true, + rollback: true, + }, + cluster: Token.asString(awsEcsClusterExample.id), + name: "example", + }); + } +} + +``` + +### External Deployment Controller + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsService } from "./.gen/providers/aws/ecs-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsService(this, "example", { + cluster: Token.asString(awsEcsClusterExample.id), + deploymentController: { + type: "EXTERNAL", + }, + name: "example", + }); + } +} + +``` + +### Redeploy Service On Every Apply + +The key used with `triggers` is arbitrary. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsService } from "./.gen/providers/aws/ecs-service"; +interface MyConfig { + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new EcsService(this, "example", { + forceNewDeployment: true, + triggers: { + redeployment: Token.asString(Fn.timestamp()), + }, + name: config.name, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the service (up to 255 letters, numbers, hyphens, and underscores) + +The following arguments are optional: + +* `alarms` - (Optional) Information about the CloudWatch alarms. [See below](#alarms). +* `capacityProviderStrategy` - (Optional) Capacity provider strategies to use for the service. Can be one or more. These can be updated without destroying and recreating the service only if `force_new_deployment = true` and not changing from 0 `capacityProviderStrategy` blocks to greater than 0, or vice versa. See below. +* `cluster` - (Optional) ARN of an ECS cluster. +* `deploymentCircuitBreaker` - (Optional) Configuration block for deployment circuit breaker. See below. +* `deploymentController` - (Optional) Configuration block for deployment controller configuration. See below. +* `deploymentMaximumPercent` - (Optional) Upper limit (as a percentage of the service's desiredCount) of the number of running tasks that can be running in a service during a deployment. Not valid when using the `daemon` scheduling strategy. +* `deploymentMinimumHealthyPercent` - (Optional) Lower limit (as a percentage of the service's desiredCount) of the number of running tasks that must remain running and healthy in a service during a deployment. +* `desiredCount` - (Optional) Number of instances of the task definition to place and keep running. Defaults to 0. Do not specify if using the `daemon` scheduling strategy. +* `enableEcsManagedTags` - (Optional) Specifies whether to enable Amazon ECS managed tags for the tasks within the service. +* `enableExecuteCommand` - (Optional) Specifies whether to enable Amazon ECS Exec for the tasks within the service. +* `forceNewDeployment` - (Optional) Enable to force a new task deployment of the service. This can be used to update tasks to use a newer Docker image with same image/tag combination (e.g., `myimage:latest`), roll Fargate tasks onto a newer platform version, or immediately deploy `orderedPlacementStrategy` and `placementConstraints` updates. +* `healthCheckGracePeriodSeconds` - (Optional) Seconds to ignore failing load balancer health checks on newly instantiated tasks to prevent premature shutdown, up to 2147483647. Only valid for services configured to use load balancers. +* `iamRole` - (Optional) ARN of the IAM role that allows Amazon ECS to make calls to your load balancer on your behalf. This parameter is required if you are using a load balancer with your service, but only if your task definition does not use the `awsvpc` network mode. If using `awsvpc` network mode, do not specify this role. If your account has already created the Amazon ECS service-linked role, that role is used by default for your service unless you specify a role here. +* `launchType` - (Optional) Launch type on which to run your service. The valid values are `ec2`, `fargate`, and `external`. Defaults to `ec2`. +* `loadBalancer` - (Optional) Configuration block for load balancers. See below. +* `networkConfiguration` - (Optional) Network configuration for the service. This parameter is required for task definitions that use the `awsvpc` network mode to receive their own Elastic Network Interface, and it is not supported for other network modes. See below. +* `orderedPlacementStrategy` - (Optional) Service level strategy rules that are taken into consideration during task placement. List from top to bottom in order of precedence. Updates to this configuration will take effect next task deployment unless `forceNewDeployment` is enabled. The maximum number of `orderedPlacementStrategy` blocks is `5`. See below. +* `placementConstraints` - (Optional) Rules that are taken into consideration during task placement. Updates to this configuration will take effect next task deployment unless `forceNewDeployment` is enabled. Maximum number of `placementConstraints` is `10`. See below. +* `platformVersion` - (Optional) Platform version on which to run your service. Only applicable for `launchType` set to `fargate`. Defaults to `latest`. More information about Fargate platform versions can be found in the [AWS ECS User Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/platform_versions.html). +* `propagateTags` - (Optional) Specifies whether to propagate the tags from the task definition or the service to the tasks. The valid values are `service` and `taskDefinition`. +* `schedulingStrategy` - (Optional) Scheduling strategy to use for the service. The valid values are `replica` and `daemon`. Defaults to `replica`. Note that [*Tasks using the Fargate launch type or the `codeDeploy` or `external` deployment controller types don't support the `daemon` scheduling strategy*](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_CreateService.html). +* `serviceConnectConfiguration` - (Optional) The ECS Service Connect configuration for this service to discover and connect to services, and be discovered by, and connected from, other services within a namespace. See below. +* `serviceRegistries` - (Optional) Service discovery registries for the service. The maximum number of `serviceRegistries` blocks is `1`. See below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `taskDefinition` - (Optional) Family and revision (`family:revision`) or full ARN of the task definition that you want to run in your service. Required unless using the `external` deployment controller. If a revision is not specified, the latest `active` revision is used. +* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger an in-place update (redeployment). Useful with `timestamp()`. See example above. +* `waitForSteadyState` - (Optional) If `true`, Terraform will wait for the service to reach a steady state (like [`aws ecs wait services-stable`](https://docs.aws.amazon.com/cli/latest/reference/ecs/wait/services-stable.html)) before continuing. Default `false`. + +### alarms + +The `alarms` configuration block supports the following: + +* `alarmNames` - (Required) One or more CloudWatch alarm names. +* `enable` - (Required) Determines whether to use the CloudWatch alarm option in the service deployment process. +* `rollback` - (Required) Determines whether to configure Amazon ECS to roll back the service if a service deployment fails. If rollback is used, when a service deployment fails, the service is rolled back to the last deployment that completed successfully. + +### capacity_provider_strategy + +The `capacityProviderStrategy` configuration block supports the following: + +* `base` - (Optional) Number of tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. +* `capacityProvider` - (Required) Short name of the capacity provider. +* `weight` - (Required) Relative percentage of the total number of launched tasks that should use the specified capacity provider. + +### deployment_circuit_breaker + +The `deploymentCircuitBreaker` configuration block supports the following: + +* `enable` - (Required) Whether to enable the deployment circuit breaker logic for the service. +* `rollback` - (Required) Whether to enable Amazon ECS to roll back the service if a service deployment fails. If rollback is enabled, when a service deployment fails, the service is rolled back to the last deployment that completed successfully. + +### deployment_controller + +The `deploymentController` configuration block supports the following: + +* `type` - (Optional) Type of deployment controller. Valid values: `codeDeploy`, `ecs`, `external`. Default: `ecs`. + +### load_balancer + +`loadBalancer` supports the following: + +* `elbName` - (Required for ELB Classic) Name of the ELB (Classic) to associate with the service. +* `targetGroupArn` - (Required for ALB/NLB) ARN of the Load Balancer target group to associate with the service. +* `containerName` - (Required) Name of the container to associate with the load balancer (as it appears in a container definition). +* `containerPort` - (Required) Port on the container to associate with the load balancer. + +-> **Version note:** Multiple `loadBalancer` configuration block support was added in Terraform AWS Provider version 2.22.0. This allows configuration of [ECS service support for multiple target groups](https://aws.amazon.com/about-aws/whats-new/2019/07/amazon-ecs-services-now-support-multiple-load-balancer-target-groups/). + +### network_configuration + +`networkConfiguration` support the following: + +* `subnets` - (Required) Subnets associated with the task or service. +* `securityGroups` - (Optional) Security groups associated with the task or service. If you do not specify a security group, the default security group for the VPC is used. +* `assignPublicIp` - (Optional) Assign a public IP address to the ENI (Fargate launch type only). Valid values are `true` or `false`. Default `false`. + +For more information, see [Task Networking](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-networking.html) + +### ordered_placement_strategy + +`orderedPlacementStrategy` supports the following: + +* `type` - (Required) Type of placement strategy. Must be one of: `binpack`, `random`, or `spread` +* `field` - (Optional) For the `spread` placement strategy, valid values are `instanceId` (or `host`, + which has the same effect), or any platform or custom attribute that is applied to a container instance. + For the `binpack` type, valid values are `memory` and `cpu`. For the `random` type, this attribute is not + needed. For more information, see [Placement Strategy](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_PlacementStrategy.html). + +-> **Note:** for `spread`, `host` and `instanceId` will be normalized, by AWS, to be `instanceId`. This means the statefile will show `instanceId` but your config will differ if you use `host`. + +### placement_constraints + +`placementConstraints` support the following: + +* `type` - (Required) Type of constraint. The only valid values at this time are `memberOf` and `distinctInstance`. +* `expression` - (Optional) Cluster Query Language expression to apply to the constraint. Does not need to be specified for the `distinctInstance` type. For more information, see [Cluster Query Language in the Amazon EC2 Container Service Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html). + +### service_registries + +`serviceRegistries` support the following: + +* `registryArn` - (Required) ARN of the Service Registry. The currently supported service registry is Amazon Route 53 Auto Naming Service(`awsServiceDiscoveryService`). For more information, see [Service](https://docs.aws.amazon.com/Route53/latest/APIReference/API_autonaming_Service.html) +* `port` - (Optional) Port value used if your Service Discovery service specified an SRV record. +* `containerPort` - (Optional) Port value, already specified in the task definition, to be used for your service discovery service. +* `containerName` - (Optional) Container name value, already specified in the task definition, to be used for your service discovery service. + +### service_connect_configuration + +`serviceConnectConfiguration` supports the following: + +* `enabled` - (Required) Specifies whether to use Service Connect with this service. +* `logConfiguration` - (Optional) The log configuration for the container. See below. +* `namespace` - (Optional) The namespace name or ARN of the [`awsServiceDiscoveryHttpNamespace`](/docs/providers/aws/r/service_discovery_http_namespace.html) for use with Service Connect. +* `service` - (Optional) The list of Service Connect service objects. See below. + +### log_configuration + +`logConfiguration` supports the following: + +* `logDriver` - (Required) The log driver to use for the container. +* `options` - (Optional) The configuration options to send to the log driver. +* `secretOption` - (Optional) The secrets to pass to the log configuration. See below. + +### secret_option + +`secretOption` supports the following: + +* `name` - (Required) The name of the secret. +* `valueFrom` - (Required) The secret to expose to the container. The supported values are either the full ARN of the AWS Secrets Manager secret or the full ARN of the parameter in the SSM Parameter Store. + +### service + +`service` supports the following: + +* `clientAlias` - (Optional) The list of client aliases for this Service Connect service. You use these to assign names that can be used by client applications. The maximum number of client aliases that you can have in this list is 1. See below. +* `discoveryName` - (Optional) The name of the new AWS Cloud Map service that Amazon ECS creates for this Amazon ECS service. +* `ingressPortOverride` - (Optional) The port number for the Service Connect proxy to listen on. +* `portName` - (Required) The name of one of the `portMappings` from all the containers in the task definition of this Amazon ECS service. + +### client_alias + +`clientAlias` supports the following: + +* `dnsName` - (Optional) The name that you use in the applications of client tasks to connect to this service. +* `port` - (Required) The listening port number for the Service Connect proxy. This port is available inside of all of the tasks within the same namespace. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `cluster` - Amazon Resource Name (ARN) of cluster which the service runs on. +* `desiredCount` - Number of instances of the task definition. +* `iamRole` - ARN of IAM role used for ELB. +* `id` - ARN that identifies the service. +* `name` - Name of the service. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20M`) +- `update` - (Default `20M`) +- `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS services using the `name` together with ecs cluster `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECS services using the `name` together with ecs cluster `name`. For example: + +```console +% terraform import aws_ecs_service.imported cluster-name/service-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecs_tag.html.markdown b/website/docs/cdktf/typescript/r/ecs_tag.html.markdown new file mode 100644 index 00000000000..62b99800c2d --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecs_tag.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_tag" +description: |- + Manages an individual ECS resource tag +--- + + + +# Resource: aws_ecs_tag + +Manages an individual ECS resource tag. This resource should only be used in cases where ECS resources are created outside Terraform (e.g., ECS Clusters implicitly created by Batch Compute Environments). + +~> **NOTE:** This tagging resource should not be combined with the Terraform resource for managing the parent resource. For example, using `awsEcsCluster` and `awsEcsTag` to manage tags of the same ECS Cluster will cause a perpetual difference where the `awsEcsCluster` resource will try to remove the tag being added by the `awsEcsTag` resource. + +~> **NOTE:** This tagging resource does not use the [provider `ignoreTags` configuration](/docs/providers/aws/index.html#ignore_tags). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { BatchComputeEnvironment } from "./.gen/providers/aws/batch-compute-environment"; +import { EcsTag } from "./.gen/providers/aws/ecs-tag"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new BatchComputeEnvironment(this, "example", { + computeEnvironmentName: "example", + serviceRole: Token.asString(awsIamRoleExample.arn), + type: "UNMANAGED", + }); + const awsEcsTagExample = new EcsTag(this, "example_1", { + key: "Name", + resourceArn: example.ecsClusterArn, + value: "Hello World", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEcsTagExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceArn` - (Required) Amazon Resource Name (ARN) of the ECS resource to tag. +* `key` - (Required) Tag name. +* `value` - (Required) Tag value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ECS resource identifier and key, separated by a comma (`,`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsEcsTag` using the ECS resource identifier and key, separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsEcsTag` using the ECS resource identifier and key, separated by a comma (`,`). For example: + +```console +% terraform import aws_ecs_tag.example arn:aws:ecs:us-east-1:123456789012:cluster/example,Name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecs_task_definition.html.markdown b/website/docs/cdktf/typescript/r/ecs_task_definition.html.markdown new file mode 100644 index 00000000000..34352385c1d --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecs_task_definition.html.markdown @@ -0,0 +1,422 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_task_definition" +description: |- + Manages a revision of an ECS task definition. +--- + + + +# Resource: aws_ecs_task_definition + +Manages a revision of an ECS task definition to be used in `awsEcsService`. + +## Example Usage + +### Basic Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsTaskDefinition } from "./.gen/providers/aws/ecs-task-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsTaskDefinition(this, "service", { + containerDefinitions: Token.asString( + Fn.jsonencode([ + { + cpu: 10, + essential: true, + image: "service-first", + memory: 512, + name: "first", + portMappings: [ + { + containerPort: 80, + hostPort: 80, + }, + ], + }, + { + cpu: 10, + essential: true, + image: "service-second", + memory: 256, + name: "second", + portMappings: [ + { + containerPort: 443, + hostPort: 443, + }, + ], + }, + ]) + ), + family: "service", + placementConstraints: [ + { + expression: + "attribute:ecs.availability-zone in [us-west-2a, us-west-2b]", + type: "memberOf", + }, + ], + volume: [ + { + hostPath: "/ecs/service-storage", + name: "service-storage", + }, + ], + }); + } +} + +``` + +### With AppMesh Proxy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsTaskDefinition } from "./.gen/providers/aws/ecs-task-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsTaskDefinition(this, "service", { + containerDefinitions: Token.asString( + Fn.file("task-definitions/service.json") + ), + family: "service", + proxyConfiguration: { + containerName: "applicationContainerName", + properties: { + AppPorts: "8080", + EgressIgnoredIPs: "169.254.170.2,169.254.169.254", + IgnoredUID: "1337", + ProxyEgressPort: Token.asString(15001), + ProxyIngressPort: Token.asString(15000), + }, + type: "APPMESH", + }, + }); + } +} + +``` + +### Example Using `dockerVolumeConfiguration` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsTaskDefinition } from "./.gen/providers/aws/ecs-task-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsTaskDefinition(this, "service", { + containerDefinitions: Token.asString( + Fn.file("task-definitions/service.json") + ), + family: "service", + volume: [ + { + dockerVolumeConfiguration: { + autoprovision: true, + driver: "local", + driverOpts: { + device: "${" + fs.dnsName + "}:/", + o: + "addr=${" + + fs.dnsName + + "},rsize=1048576,wsize=1048576,hard,timeo=600,retrans=2,noresvport", + type: "nfs", + }, + scope: "shared", + }, + name: "service-storage", + }, + ], + }); + } +} + +``` + +### Example Using `efsVolumeConfiguration` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsTaskDefinition } from "./.gen/providers/aws/ecs-task-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsTaskDefinition(this, "service", { + containerDefinitions: Token.asString( + Fn.file("task-definitions/service.json") + ), + family: "service", + volume: [ + { + efsVolumeConfiguration: { + authorizationConfig: { + accessPointId: test.id, + iam: "ENABLED", + }, + fileSystemId: fs.id, + rootDirectory: "/opt/data", + transitEncryption: "ENABLED", + transitEncryptionPort: 2999, + }, + name: "service-storage", + }, + ], + }); + } +} + +``` + +### Example Using `fsxWindowsFileServerVolumeConfiguration` + +```terraform +resource "aws_ecs_task_definition" "service" { + family = "service" + container_definitions = file("task-definitions/service.json") + + volume { + name = "service-storage" + + fsx_windows_file_server_volume_configuration { + file_system_id = aws_fsx_windows_file_system.test.id + root_directory = "\\data" + + authorization_config { + credentials_parameter = aws_secretsmanager_secret_version.test.arn + domain = aws_directory_service_directory.test.name + } + } + } +} + +resource "aws_secretsmanager_secret_version" "test" { + secret_id = aws_secretsmanager_secret.test.id + secret_string = jsonencode({ username : "admin", password : aws_directory_service_directory.test.password }) +} +``` + +### Example Using `containerDefinitions` and `inferenceAccelerator` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsTaskDefinition } from "./.gen/providers/aws/ecs-task-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsTaskDefinition(this, "test", { + containerDefinitions: + '[\n {\n "cpu": 10,\n "command": ["sleep", "10"],\n "entryPoint": ["/"],\n "environment": [\n {"name": "VARNAME", "value": "VARVAL"}\n ],\n "essential": true,\n "image": "jenkins",\n "memory": 128,\n "name": "jenkins",\n "portMappings": [\n {\n "containerPort": 80,\n "hostPort": 8080\n }\n ],\n "resourceRequirements":[\n {\n "type":"InferenceAccelerator",\n "value":"device_1"\n }\n ]\n }\n]\n\n', + family: "test", + inferenceAccelerator: [ + { + deviceName: "device_1", + deviceType: "eia1.medium", + }, + ], + }); + } +} + +``` + +### Example Using `runtimePlatform` and `fargate` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsTaskDefinition } from "./.gen/providers/aws/ecs-task-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsTaskDefinition(this, "test", { + containerDefinitions: + '[\n {\n "name": "iis",\n "image": "mcr.microsoft.com/windows/servercore/iis",\n "cpu": 1024,\n "memory": 2048,\n "essential": true\n }\n]\n\n', + cpu: Token.asString(1024), + family: "test", + memory: Token.asString(2048), + networkMode: "awsvpc", + requiresCompatibilities: ["FARGATE"], + runtimePlatform: { + cpuArchitecture: "X86_64", + operatingSystemFamily: "WINDOWS_SERVER_2019_CORE", + }, + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** Proper escaping is required for JSON field values containing quotes (`"`) such as `environment` values. If directly setting the JSON, they should be escaped as `\"` in the JSON, e.g., `"value": "I \"love\" escaped quotes"`. If using a Terraform variable value, they should be escaped as `\\\"` in the variable, e.g., `value = "I \\\"love\\\" escaped quotes"` in the variable and `"value": "${var.myvariable}"` in the JSON. + +The following arguments are required: + +* `containerDefinitions` - (Required) A list of valid [container definitions](http://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_ContainerDefinition.html) provided as a single valid JSON document. Please note that you should only provide values that are part of the container definition document. For a detailed description of what parameters are available, see the [Task Definition Parameters](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definition_parameters.html) section from the official [Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide). +* `family` - (Required) A unique name for your task definition. + +The following arguments are optional: + +* `cpu` - (Optional) Number of cpu units used by the task. If the `requiresCompatibilities` is `fargate` this field is required. +* `executionRoleArn` - (Optional) ARN of the task execution role that the Amazon ECS container agent and the Docker daemon can assume. +* `inferenceAccelerator` - (Optional) Configuration block(s) with Inference Accelerators settings. [Detailed below.](#inference_accelerator) +* `ipcMode` - (Optional) IPC resource namespace to be used for the containers in the task The valid values are `host`, `task`, and `none`. +* `memory` - (Optional) Amount (in MiB) of memory used by the task. If the `requiresCompatibilities` is `fargate` this field is required. +* `networkMode` - (Optional) Docker networking mode to use for the containers in the task. Valid values are `none`, `bridge`, `awsvpc`, and `host`. +* `runtimePlatform` - (Optional) Configuration block for [runtime_platform](#runtime_platform) that containers in your task may use. +* `pidMode` - (Optional) Process namespace to use for the containers in the task. The valid values are `host` and `task`. +* `placementConstraints` - (Optional) Configuration block for rules that are taken into consideration during task placement. Maximum number of `placementConstraints` is `10`. [Detailed below](#placement_constraints). +* `proxyConfiguration` - (Optional) Configuration block for the App Mesh proxy. [Detailed below.](#proxy_configuration) +* `ephemeralStorage` - (Optional) The amount of ephemeral storage to allocate for the task. This parameter is used to expand the total amount of ephemeral storage available, beyond the default amount, for tasks hosted on AWS Fargate. See [Ephemeral Storage](#ephemeral_storage). +* `requiresCompatibilities` - (Optional) Set of launch types required by the task. The valid values are `ec2` and `fargate`. +* `skipDestroy` - (Optional) Whether to retain the old revision when the resource is destroyed or replacement is necessary. Default is `false`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `taskRoleArn` - (Optional) ARN of IAM role that allows your Amazon ECS container task to make calls to other AWS services. +* `volume` - (Optional) Configuration block for [volumes](#volume) that containers in your task may use. Detailed below. + +### volume + +* `dockerVolumeConfiguration` - (Optional) Configuration block to configure a [docker volume](#docker_volume_configuration). Detailed below. +* `efsVolumeConfiguration` - (Optional) Configuration block for an [EFS volume](#efs_volume_configuration). Detailed below. +* `fsxWindowsFileServerVolumeConfiguration` - (Optional) Configuration block for an [FSX Windows File Server volume](#fsx_windows_file_server_volume_configuration). Detailed below. +* `hostPath` - (Optional) Path on the host container instance that is presented to the container. If not set, ECS will create a nonpersistent data volume that starts empty and is deleted after the task has finished. +* `name` - (Required) Name of the volume. This name is referenced in the `sourceVolume` +parameter of container definition in the `mountPoints` section. + +### docker_volume_configuration + +For more information, see [Specifying a Docker volume in your Task Definition Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/docker-volumes.html#specify-volume-config) + +* `autoprovision` - (Optional) If this value is `true`, the Docker volume is created if it does not already exist. *Note*: This field is only used if the scope is `shared`. +* `driverOpts` - (Optional) Map of Docker driver specific options. +* `driver` - (Optional) Docker volume driver to use. The driver value must match the driver name provided by Docker because it is used for task placement. +* `labels` - (Optional) Map of custom metadata to add to your Docker volume. +* `scope` - (Optional) Scope for the Docker volume, which determines its lifecycle, either `task` or `shared`. Docker volumes that are scoped to a `task` are automatically provisioned when the task starts and destroyed when the task stops. Docker volumes that are scoped as `shared` persist after the task stops. + +### efs_volume_configuration + +For more information, see [Specifying an EFS volume in your Task Definition Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/efs-volumes.html#specify-efs-config) + +* `fileSystemId` - (Required) ID of the EFS File System. +* `rootDirectory` - (Optional) Directory within the Amazon EFS file system to mount as the root directory inside the host. If this parameter is omitted, the root of the Amazon EFS volume will be used. Specifying / will have the same effect as omitting this parameter. This argument is ignored when using `authorizationConfig`. +* `transitEncryption` - (Optional) Whether or not to enable encryption for Amazon EFS data in transit between the Amazon ECS host and the Amazon EFS server. Transit encryption must be enabled if Amazon EFS IAM authorization is used. Valid values: `enabled`, `disabled`. If this parameter is omitted, the default value of `disabled` is used. +* `transitEncryptionPort` - (Optional) Port to use for transit encryption. If you do not specify a transit encryption port, it will use the port selection strategy that the Amazon EFS mount helper uses. +* `authorizationConfig` - (Optional) Configuration block for [authorization](#authorization_config) for the Amazon EFS file system. Detailed below. + +### runtime_platform + +* `operatingSystemFamily` - (Optional) If the `requiresCompatibilities` is `fargate` this field is required; must be set to a valid option from the [operating system family in the runtime platform](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definition_parameters.html#runtime-platform) setting +* `cpuArchitecture` - (Optional) Must be set to either `x8664` or `arm64`; see [cpu architecture](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_definition_parameters.html#runtime-platform) + +#### authorization_config + +* `accessPointId` - (Optional) Access point ID to use. If an access point is specified, the root directory value will be relative to the directory set for the access point. If specified, transit encryption must be enabled in the EFSVolumeConfiguration. +* `iam` - (Optional) Whether or not to use the Amazon ECS task IAM role defined in a task definition when mounting the Amazon EFS file system. If enabled, transit encryption must be enabled in the EFSVolumeConfiguration. Valid values: `enabled`, `disabled`. If this parameter is omitted, the default value of `disabled` is used. + +### fsx_windows_file_server_volume_configuration + +For more information, see [Specifying an FSX Windows File Server volume in your Task Definition Developer Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/tutorial-wfsx-volumes.html) + +* `fileSystemId` - (Required) The Amazon FSx for Windows File Server file system ID to use. +* `rootDirectory` - (Required) The directory within the Amazon FSx for Windows File Server file system to mount as the root directory inside the host. +* `authorizationConfig` - (Required) Configuration block for [authorization](#authorization_config) for the Amazon FSx for Windows File Server file system detailed below. + +#### authorization_config + +* `credentialsParameter` - (Required) The authorization credential option to use. The authorization credential options can be provided using either the Amazon Resource Name (ARN) of an AWS Secrets Manager secret or AWS Systems Manager Parameter Store parameter. The ARNs refer to the stored credentials. +* `domain` - (Required) A fully qualified domain name hosted by an AWS Directory Service Managed Microsoft AD (Active Directory) or self-hosted AD on Amazon EC2. + +### placement_constraints + +* `expression` - (Optional) Cluster Query Language expression to apply to the constraint. For more information, see [Cluster Query Language in the Amazon EC2 Container Service Developer Guide](http://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html). +* `type` - (Required) Type of constraint. Use `memberOf` to restrict selection to a group of valid candidates. Note that `distinctInstance` is not supported in task definitions. + +### proxy_configuration + +* `containerName` - (Required) Name of the container that will serve as the App Mesh proxy. +* `properties` - (Required) Set of network configuration parameters to provide the Container Network Interface (CNI) plugin, specified a key-value mapping. +* `type` - (Optional) Proxy type. The default value is `appmesh`. The only supported value is `appmesh`. + +### ephemeral_storage + +* `sizeInGib` - (Required) The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is `21` GiB and the maximum supported value is `200` GiB. + +### inference_accelerator + +* `deviceName` - (Required) Elastic Inference accelerator device name. The deviceName must also be referenced in a container definition as a ResourceRequirement. +* `deviceType` - (Required) Elastic Inference accelerator type to use. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Full ARN of the Task Definition (including both `family` and `revision`). +* `arnWithoutRevision` - ARN of the Task Definition with the trailing `revision` removed. This may be useful for situations where the latest task definition is always desired. If a revision isn't specified, the latest ACTIVE revision is used. See the [AWS documentation](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_StartTask.html#ECS-StartTask-request-taskDefinition) for details. +* `revision` - Revision of the task in a particular family. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS Task Definitions using their ARNs. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECS Task Definitions using their ARNs. For example: + +```console +% terraform import aws_ecs_task_definition.example arn:aws:ecs:us-east-1:012345678910:task-definition/mytaskfamily:123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ecs_task_set.html.markdown b/website/docs/cdktf/typescript/r/ecs_task_set.html.markdown new file mode 100644 index 00000000000..491d03cde31 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ecs_task_set.html.markdown @@ -0,0 +1,186 @@ +--- +subcategory: "ECS (Elastic Container)" +layout: "aws" +page_title: "AWS: aws_ecs_task_set" +description: |- + Provides an ECS task set. +--- + + + +# Resource: aws_ecs_task_set + +Provides an ECS task set - effectively a task that is expected to run until an error occurs or a user terminates it (typically a webserver or a database). + +See [ECS Task Set section in AWS developer guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/deployment-type-external.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsTaskSet } from "./.gen/providers/aws/ecs-task-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EcsTaskSet(this, "example", { + cluster: Token.asString(awsEcsClusterExample.id), + loadBalancer: [ + { + containerName: "mongo", + containerPort: 8080, + targetGroupArn: Token.asString(awsLbTargetGroupExample.arn), + }, + ], + service: Token.asString(awsEcsServiceExample.id), + taskDefinition: Token.asString(awsEcsTaskDefinitionExample.arn), + }); + } +} + +``` + +### Ignoring Changes to Scale + +You can utilize the generic Terraform resource [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignoreChanges` to create an ECS service with an initial count of running instances, then ignore any changes to that count caused externally (e.g. Application Autoscaling). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EcsTaskSet } from "./.gen/providers/aws/ecs-task-set"; +interface MyConfig { + cluster: any; + service: any; + taskDefinition: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new EcsTaskSet(this, "example", { + lifecycle: { + ignoreChanges: ["scale"], + }, + scale: { + value: 50, + }, + cluster: config.cluster, + service: config.service, + taskDefinition: config.taskDefinition, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `service` - (Required) The short name or ARN of the ECS service. +* `cluster` - (Required) The short name or ARN of the cluster that hosts the service to create the task set in. +* `taskDefinition` - (Required) The family and revision (`family:revision`) or full ARN of the task definition that you want to run in your service. + +The following arguments are optional: + +* `capacityProviderStrategy` - (Optional) The capacity provider strategy to use for the service. Can be one or more. [Defined below](#capacity_provider_strategy). +* `externalId` - (Optional) The external ID associated with the task set. +* `forceDelete` - (Optional) Whether to allow deleting the task set without waiting for scaling down to 0. You can force a task set to delete even if it's in the process of scaling a resource. Normally, Terraform drains all the tasks before deleting the task set. This bypasses that behavior and potentially leaves resources dangling. +* `launchType` - (Optional) The launch type on which to run your service. The valid values are `ec2`, `fargate`, and `external`. Defaults to `ec2`. +* `loadBalancer` - (Optional) Details on load balancers that are used with a task set. [Detailed below](#load_balancer). +* `platformVersion` - (Optional) The platform version on which to run your service. Only applicable for `launchType` set to `fargate`. Defaults to `latest`. More information about Fargate platform versions can be found in the [AWS ECS User Guide](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/platform_versions.html). +* `networkConfiguration` - (Optional) The network configuration for the service. This parameter is required for task definitions that use the `awsvpc` network mode to receive their own Elastic Network Interface, and it is not supported for other network modes. [Detailed below](#network_configuration). +* `scale` - (Optional) A floating-point percentage of the desired number of tasks to place and keep running in the task set. [Detailed below](#scale). +* `serviceRegistries` - (Optional) The service discovery registries for the service. The maximum number of `serviceRegistries` blocks is `1`. [Detailed below](#service_registries). +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. If you have set `copyTagsToBackups` to true, and you specify one or more tags, no existing file system tags are copied from the file system to the backup. +* `waitUntilStable` - (Optional) Whether `terraform` should wait until the task set has reached `steadyState`. +* `waitUntilStableTimeout` - (Optional) Wait timeout for task set to reach `steadyState`. Valid time units include `ns`, `us` (or `µs`), `ms`, `s`, `m`, and `h`. Default `10M`. + +## capacity_provider_strategy + +The `capacityProviderStrategy` configuration block supports the following: + +* `capacityProvider` - (Required) The short name or full Amazon Resource Name (ARN) of the capacity provider. +* `weight` - (Required) The relative percentage of the total number of launched tasks that should use the specified capacity provider. +* `base` - (Optional) The number of tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. + +## load_balancer + +The `loadBalancer` configuration block supports the following: + +* `containerName` - (Required) The name of the container to associate with the load balancer (as it appears in a container definition). +* `loadBalancerName` - (Optional, Required for ELB Classic) The name of the ELB (Classic) to associate with the service. +* `targetGroupArn` - (Optional, Required for ALB/NLB) The ARN of the Load Balancer target group to associate with the service. +* `containerPort` - (Optional) The port on the container to associate with the load balancer. Defaults to `0` if not specified. + +~> **Note:** Specifying multiple `loadBalancer` configurations is still not supported by AWS for ECS task set. + +## network_configuration + +The `networkConfiguration` configuration block supports the following: + +* `subnets` - (Required) The subnets associated with the task or service. Maximum of 16. +* `securityGroups` - (Optional) The security groups associated with the task or service. If you do not specify a security group, the default security group for the VPC is used. Maximum of 5. +* `assignPublicIp` - (Optional) Whether to assign a public IP address to the ENI (`fargate` launch type only). Valid values are `true` or `false`. Default `false`. + +For more information, see [Task Networking](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-networking.html). + +## scale + +The `scale` configuration block supports the following: + +* `unit` - (Optional) The unit of measure for the scale value. Default: `percent`. +* `value` - (Optional) The value, specified as a percent total of a service's `desiredCount`, to scale the task set. Defaults to `0` if not specified. Accepted values are numbers between 0.0 and 100.0. + +## service_registries + +`serviceRegistries` support the following: + +* `registryArn` - (Required) The ARN of the Service Registry. The currently supported service registry is Amazon Route 53 Auto Naming Service([`awsServiceDiscoveryService` resource](/docs/providers/aws/r/service_discovery_service.html)). For more information, see [Service](https://docs.aws.amazon.com/Route53/latest/APIReference/API_autonaming_Service.html). +* `port` - (Optional) The port value used if your Service Discovery service specified an SRV record. +* `containerPort` - (Optional) The port value, already specified in the task definition, to be used for your service discovery service. +* `containerName` - (Optional) The container name value, already specified in the task definition, to be used for your service discovery service. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `taskSetId`, `service` and `cluster` separated by commas (`,`). +* `arn` - The Amazon Resource Name (ARN) that identifies the task set. +* `stabilityStatus` - The stability status. This indicates whether the task set has reached a steady state. +* `status` - The status of the task set. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `taskSetId` - The ID of the task set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ECS Task Sets using the `taskSetId`, `service`, and `cluster` separated by commas (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ECS Task Sets using the `taskSetId`, `service`, and `cluster` separated by commas (`,`). For example: + +```console +% terraform import aws_ecs_task_set.example ecs-svc/7177320696926227436,arn:aws:ecs:us-west-2:123456789101:service/example/example-1234567890,arn:aws:ecs:us-west-2:123456789101:cluster/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/efs_access_point.html.markdown b/website/docs/cdktf/typescript/r/efs_access_point.html.markdown new file mode 100644 index 00000000000..b6271be341b --- /dev/null +++ b/website/docs/cdktf/typescript/r/efs_access_point.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_access_point" +description: |- + Provides an Elastic File System (EFS) access point. +--- + + + +# Resource: aws_efs_access_point + +Provides an Elastic File System (EFS) access point. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EfsAccessPoint } from "./.gen/providers/aws/efs-access-point"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EfsAccessPoint(this, "test", { + fileSystemId: foo.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fileSystemId` - (Required) ID of the file system for which the access point is intended. +* `posixUser` - (Optional) Operating system user and group applied to all file system requests made using the access point. [Detailed](#posix_user) below. +* `rootDirectory`- (Optional) Directory on the Amazon EFS file system that the access point provides access to. [Detailed](#root_directory) below. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### posix_user + +* `gid` - (Required) POSIX group ID used for all file system operations using this access point. +* `secondaryGids` - (Optional) Secondary POSIX group IDs used for all file system operations using this access point. +* `uid` - (Required) POSIX user ID used for all file system operations using this access point. + +### root_directory + +The access point exposes the specified file system path as the root directory of your file system to applications using the access point. NFS clients using the access point can only access data in the access point's RootDirectory and it's subdirectories. + +* `creationInfo` - (Optional) POSIX IDs and permissions to apply to the access point's Root Directory. See [Creation Info](#creation_info) below. +* `path` - (Optional) Path on the EFS file system to expose as the root directory to NFS clients using the access point to access the EFS file system. A path can have up to four subdirectories. If the specified path does not exist, you are required to provide `creationInfo`. + +### creation_info + +If the `path` specified does not exist, EFS creates the root directory using the `creationInfo` settings when a client connects to an access point. + +* `ownerGid` - (Required) POSIX group ID to apply to the `rootDirectory`. +* `ownerUid` - (Required) POSIX user ID to apply to the `rootDirectory`. +* `permissions` - (Required) POSIX permissions to apply to the RootDirectory, in the format of an octal number representing the file's mode bits. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the access point. +* `fileSystemArn` - ARN of the file system. +* `id` - ID of the access point. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EFS access points using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the EFS access points using the `id`. For example: + +```console +% terraform import aws_efs_access_point.test fsap-52a643fb +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/efs_backup_policy.html.markdown b/website/docs/cdktf/typescript/r/efs_backup_policy.html.markdown new file mode 100644 index 00000000000..fbff00fa817 --- /dev/null +++ b/website/docs/cdktf/typescript/r/efs_backup_policy.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_backup_policy" +description: |- + Provides an Elastic File System (EFS) Backup Policy resource. +--- + + + +# Resource: aws_efs_backup_policy + +Provides an Elastic File System (EFS) Backup Policy resource. +Backup policies turn automatic backups on or off for an existing file system. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EfsBackupPolicy } from "./.gen/providers/aws/efs-backup-policy"; +import { EfsFileSystem } from "./.gen/providers/aws/efs-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const fs = new EfsFileSystem(this, "fs", { + creationToken: "my-product", + }); + new EfsBackupPolicy(this, "policy", { + backupPolicy: { + status: "ENABLED", + }, + fileSystemId: fs.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fileSystemId` - (Required) The ID of the EFS file system. +* `backupPolicy` - (Required) A backup_policy object (documented below). + +### Backup Policy Arguments + +`backupPolicy` supports the following arguments: + +* `status` - (Required) A status of the backup policy. Valid values: `enabled`, `disabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID that identifies the file system (e.g., fs-ccfc0d65). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EFS backup policies using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the EFS backup policies using the `id`. For example: + +```console +% terraform import aws_efs_backup_policy.example fs-6fa144c6 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/efs_file_system.html.markdown b/website/docs/cdktf/typescript/r/efs_file_system.html.markdown new file mode 100644 index 00000000000..13e178992e2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/efs_file_system.html.markdown @@ -0,0 +1,134 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_file_system" +description: |- + Provides an Elastic File System (EFS) File System resource. +--- + + + +# Resource: aws_efs_file_system + +Provides an Elastic File System (EFS) File System resource. + +## Example Usage + +### EFS File System w/ tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EfsFileSystem } from "./.gen/providers/aws/efs-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EfsFileSystem(this, "foo", { + creationToken: "my-product", + tags: { + Name: "MyProduct", + }, + }); + } +} + +``` + +### Using lifecycle policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EfsFileSystem } from "./.gen/providers/aws/efs-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EfsFileSystem(this, "foo_with_lifecyle_policy", { + creationToken: "my-product", + lifecyclePolicy: [ + { + transitionToIa: "AFTER_30_DAYS", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `availabilityZoneName` - (Optional) the AWS Availability Zone in which to create the file system. Used to create a file system that uses One Zone storage classes. See [user guide](https://docs.aws.amazon.com/efs/latest/ug/storage-classes.html) for more information. +* `creationToken` - (Optional) A unique name (a maximum of 64 characters are allowed) +used as reference when creating the Elastic File System to ensure idempotent file +system creation. By default generated by Terraform. See [Elastic File System](http://docs.aws.amazon.com/efs/latest/ug/) +user guide for more information. +* `encrypted` - (Optional) If true, the disk will be encrypted. +* `kmsKeyId` - (Optional) The ARN for the KMS encryption key. When specifying kms_key_id, encrypted needs to be set to true. +* `lifecyclePolicy` - (Optional) A file system [lifecycle policy](https://docs.aws.amazon.com/efs/latest/ug/API_LifecyclePolicy.html) object (documented below). +* `performanceMode` - (Optional) The file system performance mode. Can be either `"generalPurpose"` or `"maxIo"` (Default: `"generalPurpose"`). +* `provisionedThroughputInMibps` - (Optional) The throughput, measured in MiB/s, that you want to provision for the file system. Only applicable with `throughputMode` set to `provisioned`. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `throughputMode` - (Optional) Throughput mode for the file system. Defaults to `bursting`. Valid values: `bursting`, `provisioned`, or `elastic`. When using `provisioned`, also set `provisionedThroughputInMibps`. + +### Lifecycle Policy Arguments + +`lifecyclePolicy` supports the following arguments: + +* `transitionToIa` - (Optional) Indicates how long it takes to transition files to the IA storage class. Valid values: `after1Day`, `after7Days`, `after14Days`, `after30Days`, `after60Days`, or `after90Days`. +* `transitionToPrimaryStorageClass` - (Optional) Describes the policy used to transition a file from infequent access storage to primary storage. Valid values: `after1Access`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `availabilityZoneId` - The identifier of the Availability Zone in which the file system's One Zone storage classes exist. +* `id` - The ID that identifies the file system (e.g., fs-ccfc0d65). +* `dnsName` - The DNS name for the filesystem per [documented convention](http://docs.aws.amazon.com/efs/latest/ug/mounting-fs-mount-cmd-dns-name.html). +* `ownerId` - The AWS account that created the file system. If the file system was createdby an IAM user, the parent account to which the user belongs is the owner. +* `numberOfMountTargets` - The current number of mount targets that the file system has. +* `sizeInBytes` - The latest known metered size (in bytes) of data stored in the file system, the value is not the exact size that the file system was at any point in time. See [Size In Bytes](#size-in-bytes). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### Size In Bytes + +* `value` - The latest known metered size (in bytes) of data stored in the file system. +* `valueInIa` - The latest known metered size (in bytes) of data stored in the Infrequent Access storage class. +* `valueInStandard` - The latest known metered size (in bytes) of data stored in the Standard storage class. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EFS file systems using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the EFS file systems using the `id`. For example: + +```console +% terraform import aws_efs_file_system.foo fs-6fa144c6 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/efs_file_system_policy.html.markdown b/website/docs/cdktf/typescript/r/efs_file_system_policy.html.markdown new file mode 100644 index 00000000000..09d3b0ca0db --- /dev/null +++ b/website/docs/cdktf/typescript/r/efs_file_system_policy.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_file_system_policy" +description: |- + Provides an Elastic File System (EFS) File System Policy resource. +--- + + + +# Resource: aws_efs_file_system_policy + +Provides an Elastic File System (EFS) File System Policy resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { EfsFileSystem } from "./.gen/providers/aws/efs-file-system"; +import { EfsFileSystemPolicy } from "./.gen/providers/aws/efs-file-system-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const fs = new EfsFileSystem(this, "fs", { + creationToken: "my-product", + }); + const policy = new DataAwsIamPolicyDocument(this, "policy", { + statement: [ + { + actions: [ + "elasticfilesystem:ClientMount", + "elasticfilesystem:ClientWrite", + ], + condition: [ + { + test: "Bool", + values: ["true"], + variable: "aws:SecureTransport", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [fs.arn], + sid: "ExampleStatement01", + }, + ], + }); + const awsEfsFileSystemPolicyPolicy = new EfsFileSystemPolicy( + this, + "policy_2", + { + fileSystemId: fs.id, + policy: Token.asString(policy.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEfsFileSystemPolicyPolicy.overrideLogicalId("policy"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `fileSystemId` - (Required) The ID of the EFS file system. +* `policy` - (Required) The JSON formatted file system policy for the EFS file system. see [Docs](https://docs.aws.amazon.com/efs/latest/ug/access-control-overview.html#access-control-manage-access-intro-resource-policies) for more info. + +The following arguments are optional: + +* `bypassPolicyLockoutSafetyCheck` - (Optional) A flag to indicate whether to bypass the `awsEfsFileSystemPolicy` lockout safety check. The policy lockout safety check determines whether the policy in the request will prevent the principal making the request will be locked out from making future `putFileSystemPolicy` requests on the file system. Set `bypassPolicyLockoutSafetyCheck` to `true` only when you intend to prevent the principal that is making the request from making a subsequent `putFileSystemPolicy` request on the file system. The default value is `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID that identifies the file system (e.g., fs-ccfc0d65). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EFS file system policies using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the EFS file system policies using the `id`. For example: + +```console +% terraform import aws_efs_file_system_policy.foo fs-6fa144c6 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/efs_mount_target.html.markdown b/website/docs/cdktf/typescript/r/efs_mount_target.html.markdown new file mode 100644 index 00000000000..b0060e9d7fa --- /dev/null +++ b/website/docs/cdktf/typescript/r/efs_mount_target.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_mount_target" +description: |- + Provides an Elastic File System (EFS) mount target. +--- + + + +# Resource: aws_efs_mount_target + +Provides an Elastic File System (EFS) mount target. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EfsMountTarget } from "./.gen/providers/aws/efs-mount-target"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new Vpc(this, "foo", { + cidrBlock: "10.0.0.0/16", + }); + const alpha = new Subnet(this, "alpha", { + availabilityZone: "us-west-2a", + cidrBlock: "10.0.1.0/24", + vpcId: foo.id, + }); + const awsEfsMountTargetAlpha = new EfsMountTarget(this, "alpha_2", { + fileSystemId: Token.asString(awsEfsFileSystemFoo.id), + subnetId: alpha.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEfsMountTargetAlpha.overrideLogicalId("alpha"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fileSystemId` - (Required) The ID of the file system for which the mount target is intended. +* `subnetId` - (Required) The ID of the subnet to add the mount target in. +* `ipAddress` - (Optional) The address (within the address range of the specified subnet) at +which the file system may be mounted via the mount target. +* `securityGroups` - (Optional) A list of up to 5 VPC security group IDs (that must +be for the same VPC as subnet specified) in effect for the mount target. + +## Attribute Reference + +~> **Note:** The `dnsName` and `mountTargetDnsName` attributes are only useful if the mount target is in a VPC that has +support for DNS hostnames enabled. See [Using DNS with Your VPC](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/vpc-dns.html) +and [VPC resource](/docs/providers/aws/r/vpc.html#enable_dns_hostnames) in Terraform for more information. + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the mount target. +* `dnsName` - The DNS name for the EFS file system. +* `mountTargetDnsName` - The DNS name for the given subnet/AZ per [documented convention](http://docs.aws.amazon.com/efs/latest/ug/mounting-fs-mount-cmd-dns-name.html). +* `fileSystemArn` - Amazon Resource Name of the file system. +* `networkInterfaceId` - The ID of the network interface that Amazon EFS created when it created the mount target. +* `availabilityZoneName` - The name of the Availability Zone (AZ) that the mount target resides in. +* `availabilityZoneId` - The unique and consistent identifier of the Availability Zone (AZ) that the mount target resides in. +* `ownerId` - AWS account ID that owns the resource. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the EFS mount targets using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the EFS mount targets using the `id`. For example: + +```console +% terraform import aws_efs_mount_target.alpha fsmt-52a643fb +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/efs_replication_configuration.html.markdown b/website/docs/cdktf/typescript/r/efs_replication_configuration.html.markdown new file mode 100644 index 00000000000..9f88f30cb60 --- /dev/null +++ b/website/docs/cdktf/typescript/r/efs_replication_configuration.html.markdown @@ -0,0 +1,134 @@ +--- +subcategory: "EFS (Elastic File System)" +layout: "aws" +page_title: "AWS: aws_efs_replication_configuration" +description: Provides an Elastic File System (EFS) Replication Configuration. +--- + + + +# Resource: aws_efs_replication_configuration + +Creates a replica of an existing EFS file system in the same or another region. Creating this resource causes the source EFS file system to be replicated to a new read-only destination EFS file system. Deleting this resource will cause the replication from source to destination to stop and the destination file system will no longer be read only. + +~> **NOTE:** Deleting this resource does **not** delete the destination file system that was created. + +## Example Usage + +Will create a replica using regional storage in us-west-2 that will be encrypted by the default EFS KMS key `/aws/elasticfilesystem`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EfsFileSystem } from "./.gen/providers/aws/efs-file-system"; +import { EfsReplicationConfiguration } from "./.gen/providers/aws/efs-replication-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new EfsFileSystem(this, "example", {}); + const awsEfsReplicationConfigurationExample = + new EfsReplicationConfiguration(this, "example_1", { + destination: { + region: "us-west-2", + }, + sourceFileSystemId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEfsReplicationConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +Replica will be created as One Zone storage in the us-west-2b Availability Zone and encrypted with the specified KMS key. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EfsFileSystem } from "./.gen/providers/aws/efs-file-system"; +import { EfsReplicationConfiguration } from "./.gen/providers/aws/efs-replication-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new EfsFileSystem(this, "example", {}); + const awsEfsReplicationConfigurationExample = + new EfsReplicationConfiguration(this, "example_1", { + destination: { + availabilityZoneName: "us-west-2b", + kmsKeyId: "1234abcd-12ab-34cd-56ef-1234567890ab", + }, + sourceFileSystemId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEfsReplicationConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `sourceFileSystemId` - (Required) The ID of the file system that is to be replicated. +* `destination` - (Required) A destination configuration block (documented below). + +### Destination Arguments + +`destination` supports the following arguments: + +* `availabilityZoneName` - (Optional) The availability zone in which the replica should be created. If specified, the replica will be created with One Zone storage. If omitted, regional storage will be used. +* `kmsKeyId` - (Optional) The Key ID, ARN, alias, or alias ARN of the KMS key that should be used to encrypt the replica file system. If omitted, the default KMS key for EFS `/aws/elasticfilesystem` will be used. +* `region` - (Optional) The region in which the replica should be created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `creationTime` - When the replication configuration was created. +* `originalSourceFileSystemArn` - The Amazon Resource Name (ARN) of the original source Amazon EFS file system in the replication configuration. +* `sourceFileSystemArn` - The Amazon Resource Name (ARN) of the current source file system in the replication configuration. +* `sourceFileSystemRegion` - The AWS Region in which the source Amazon EFS file system is located. +* `destination[0]FileSystemId` - The fs ID of the replica. +* `destination[0]Status` - The status of the replication. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) +* `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EFS Replication Configurations using the file system ID of either the source or destination file system. When importing, the `availabilityZoneName` and `kmsKeyId` attributes must **not** be set in the configuration. The AWS API does not return these values when querying the replication configuration and their presence will therefore show as a diff in a subsequent plan. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EFS Replication Configurations using the file system ID of either the source or destination file system. When importing, the `availabilityZoneName` and `kmsKeyId` attributes must **not** be set in the configuration. The AWS API does not return these values when querying the replication configuration and their presence will therefore show as a diff in a subsequent plan. For example: + +```console +% terraform import aws_efs_replication_configuration.example fs-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/egress_only_internet_gateway.html.markdown b/website/docs/cdktf/typescript/r/egress_only_internet_gateway.html.markdown new file mode 100644 index 00000000000..b9fdaa866e2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/egress_only_internet_gateway.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_egress_only_internet_gateway" +description: |- + Provides a resource to create an egress-only Internet gateway. +--- + + + +# Resource: aws_egress_only_internet_gateway + +[IPv6 only] Creates an egress-only Internet gateway for your VPC. +An egress-only Internet gateway is used to enable outbound communication +over IPv6 from instances in your VPC to the Internet, and prevents hosts +outside of your VPC from initiating an IPv6 connection with your instance. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EgressOnlyInternetGateway } from "./.gen/providers/aws/egress-only-internet-gateway"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Vpc(this, "example", { + assignGeneratedIpv6CidrBlock: true, + cidrBlock: "10.1.0.0/16", + }); + const awsEgressOnlyInternetGatewayExample = new EgressOnlyInternetGateway( + this, + "example_1", + { + tags: { + Name: "main", + }, + vpcId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEgressOnlyInternetGatewayExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpcId` - (Required) The VPC ID to create in. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the egress-only Internet gateway. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Egress-only Internet gateways using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Egress-only Internet gateways using the `id`. For example: + +```console +% terraform import aws_egress_only_internet_gateway.example eigw-015e0e244e24dfe8a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/eip.html.markdown b/website/docs/cdktf/typescript/r/eip.html.markdown new file mode 100644 index 00000000000..3eb89e94181 --- /dev/null +++ b/website/docs/cdktf/typescript/r/eip.html.markdown @@ -0,0 +1,220 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_eip" +description: |- + Provides an Elastic IP resource. +--- + + + +# Resource: aws_eip + +Provides an Elastic IP resource. + +~> **Note:** EIP may require IGW to exist prior to association. Use `dependsOn` to set an explicit dependency on the IGW. + +~> **Note:** Do not use `networkInterface` to associate the EIP to `awsLb` or `awsNatGateway` resources. Instead use the `allocationId` available in those resources to allow AWS to manage the association, otherwise you will see `authFailure` errors. + +## Example Usage + +### Single EIP associated with an instance + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Eip } from "./.gen/providers/aws/eip"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Eip(this, "lb", { + domain: "vpc", + instance: web.id, + }); + } +} + +``` + +### Multiple EIPs associated with a single network interface + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Eip } from "./.gen/providers/aws/eip"; +import { NetworkInterface } from "./.gen/providers/aws/network-interface"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const multiIp = new NetworkInterface(this, "multi-ip", { + privateIps: ["10.0.0.10", "10.0.0.11"], + subnetId: main.id, + }); + new Eip(this, "one", { + associateWithPrivateIp: "10.0.0.10", + domain: "vpc", + networkInterface: multiIp.id, + }); + new Eip(this, "two", { + associateWithPrivateIp: "10.0.0.11", + domain: "vpc", + networkInterface: multiIp.id, + }); + } +} + +``` + +### Attaching an EIP to an Instance with a pre-assigned private ip (VPC Only) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Eip } from "./.gen/providers/aws/eip"; +import { Instance } from "./.gen/providers/aws/instance"; +import { InternetGateway } from "./.gen/providers/aws/internet-gateway"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new Vpc(this, "default", { + cidrBlock: "10.0.0.0/16", + enableDnsHostnames: true, + }); + const gw = new InternetGateway(this, "gw", { + vpcId: defaultVar.id, + }); + const tfTestSubnet = new Subnet(this, "tf_test_subnet", { + cidrBlock: "10.0.0.0/24", + dependsOn: [gw], + mapPublicIpOnLaunch: true, + vpcId: defaultVar.id, + }); + const foo = new Instance(this, "foo", { + ami: "ami-5189a661", + instanceType: "t2.micro", + privateIp: "10.0.0.12", + subnetId: tfTestSubnet.id, + }); + new Eip(this, "bar", { + associateWithPrivateIp: "10.0.0.12", + dependsOn: [gw], + domain: "vpc", + instance: foo.id, + }); + } +} + +``` + +### Allocating EIP from the BYOIP pool + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Eip } from "./.gen/providers/aws/eip"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Eip(this, "byoip-ip", { + domain: "vpc", + publicIpv4Pool: "ipv4pool-ec2-012345", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `address` - (Optional) IP address from an EC2 BYOIP pool. This option is only available for VPC EIPs. +* `associateWithPrivateIp` - (Optional) User-specified primary or secondary private IP address to associate with the Elastic IP address. If no private IP address is specified, the Elastic IP address is associated with the primary private IP address. +* `customerOwnedIpv4Pool` - (Optional) ID of a customer-owned address pool. For more on customer owned IP addressed check out [Customer-owned IP addresses guide](https://docs.aws.amazon.com/outposts/latest/userguide/outposts-networking-components.html#ip-addressing). +* `domain` - Indicates if this EIP is for use in VPC (`vpc`). +* `instance` - (Optional) EC2 instance ID. +* `networkBorderGroup` - (Optional) Location from which the IP address is advertised. Use this parameter to limit the address to this location. +* `networkInterface` - (Optional) Network interface ID to associate with. +* `publicIpv4Pool` - (Optional) EC2 IPv4 address pool identifier or `amazon`. + This option is only available for VPC EIPs. +* `tags` - (Optional) Map of tags to assign to the resource. Tags can only be applied to EIPs in a VPC. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc` - (Optional **Deprecated**) Boolean if the EIP is in a VPC or not. Use `domain` instead. + Defaults to `true` unless the region supports EC2-Classic. + +~> **NOTE:** You can specify either the `instance` ID or the `networkInterface` ID, but not both. Including both will **not** return an error from the AWS API, but will have undefined behavior. See the relevant [AssociateAddress API Call][1] for more information. + +~> **NOTE:** Specifying both `publicIpv4Pool` and `address` won't cause an error but `address` will be used in the +case both options are defined as the api only requires one or the other. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `allocationId` - ID that AWS assigns to represent the allocation of the Elastic IP address for use with instances in a VPC. +* `associationId` - ID representing the association of the address with an instance in a VPC. +* `carrierIp` - Carrier IP address. +* `customerOwnedIp` - Customer owned IP. +* `id` - Contains the EIP allocation ID. +* `privateDns` - The Private DNS associated with the Elastic IP address (if in VPC). +* `privateIp` - Contains the private IP address (if in VPC). +* `publicDns` - Public DNS associated with the Elastic IP address. +* `publicIp` - Contains the public IP address. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +~> **Note:** The resource computes the `publicDns` and `privateDns` attributes according to the [VPC DNS Guide](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-dns.html#vpc-dns-hostnames) as they are not available with the EC2 API. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `15M`) +- `update` - (Default `5M`) +- `delete` - (Default `3M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EIPs in a VPC using their Allocation ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EIPs in a VPC using their Allocation ID. For example: + +```console +% terraform import aws_eip.bar eipalloc-00a10e96 +``` + +[1]: https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_AssociateAddress.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/eip_association.html.markdown b/website/docs/cdktf/typescript/r/eip_association.html.markdown new file mode 100644 index 00000000000..e807006e641 --- /dev/null +++ b/website/docs/cdktf/typescript/r/eip_association.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_eip_association" +description: |- + Provides an AWS EIP Association +--- + + + +# Resource: aws_eip_association + +Provides an AWS EIP Association as a top level resource, to associate and +disassociate Elastic IPs from AWS Instances and Network Interfaces. + +~> **NOTE:** Do not use this resource to associate an EIP to `awsLb` or `awsNatGateway` resources. Instead use the `allocationId` available in those resources to allow AWS to manage the association, otherwise you will see `authFailure` errors. + +~> **NOTE:** `awsEipAssociation` is useful in scenarios where EIPs are either +pre-existing or distributed to customers or users and therefore cannot be changed. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Eip } from "./.gen/providers/aws/eip"; +import { EipAssociation } from "./.gen/providers/aws/eip-association"; +import { Instance } from "./.gen/providers/aws/instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Eip(this, "example", { + domain: "vpc", + }); + const web = new Instance(this, "web", { + ami: "ami-21f78e11", + availabilityZone: "us-west-2a", + instanceType: "t2.micro", + tags: { + Name: "HelloWorld", + }, + }); + new EipAssociation(this, "eip_assoc", { + allocationId: example.id, + instanceId: web.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `allocationId` - (Optional) The allocation ID. This is required for EC2-VPC. +* `allowReassociation` - (Optional, Boolean) Whether to allow an Elastic IP to +be re-associated. Defaults to `true` in VPC. +* `instanceId` - (Optional) The ID of the instance. This is required for +EC2-Classic. For EC2-VPC, you can specify either the instance ID or the +network interface ID, but not both. The operation fails if you specify an +instance ID unless exactly one network interface is attached. +* `networkInterfaceId` - (Optional) The ID of the network interface. If the +instance has more than one network interface, you must specify a network +interface ID. +* `privateIpAddress` - (Optional) The primary or secondary private IP address +to associate with the Elastic IP address. If no private IP address is +specified, the Elastic IP address is associated with the primary private IP +address. +* `publicIp` - (Optional) The Elastic IP address. This is required for EC2-Classic. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `associationId` - The ID that represents the association of the Elastic IP +address with an instance. +* `allocationId` - As above +* `instanceId` - As above +* `networkInterfaceId` - As above +* `privateIpAddress` - As above +* `publicIp` - As above + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EIP Assocations using their association IDs. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EIP Assocations using their association IDs. For example: + +```console +% terraform import aws_eip_association.test eipassoc-ab12c345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/eks_addon.html.markdown b/website/docs/cdktf/typescript/r/eks_addon.html.markdown index 2c2be54b2b4..56e7014ae07 100644 --- a/website/docs/cdktf/typescript/r/eks_addon.html.markdown +++ b/website/docs/cdktf/typescript/r/eks_addon.html.markdown @@ -262,9 +262,9 @@ The following arguments are optional: for service accounts on your cluster](https://docs.aws.amazon.com/eks/latest/userguide/enable-iam-roles-for-service-accounts.html) in the Amazon EKS User Guide. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of the EKS add-on. * `id` - EKS Cluster name and EKS Addon name separated by a colon (`:`). @@ -283,10 +283,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -EKS add-on can be imported using the `clusterName` and `addonName` separated by a colon (`:`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS add-on using the `clusterName` and `addonName` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_eks_addon.my_eks_addon my_cluster_name:my_addon_name + +Using `terraform import`, import EKS add-on using the `clusterName` and `addonName` separated by a colon (`:`). For example: + +```console +% terraform import aws_eks_addon.my_eks_addon my_cluster_name:my_addon_name ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/eks_cluster.html.markdown b/website/docs/cdktf/typescript/r/eks_cluster.html.markdown index b7a129b45f6..32f8da00b5d 100644 --- a/website/docs/cdktf/typescript/r/eks_cluster.html.markdown +++ b/website/docs/cdktf/typescript/r/eks_cluster.html.markdown @@ -317,14 +317,14 @@ The following arguments are optional: ### encryption_config -The following arguments are supported in the `encryptionConfig` configuration block: +The `encryptionConfig` configuration block supports the following arguments: * `provider` - (Required) Configuration block with provider for encryption. Detailed below. * `resources` - (Required) List of strings with resources to be encrypted. Valid values: `secrets`. #### provider -The following arguments are supported in the `provider` configuration block: +The `provider` configuration block supports the following arguments: * `keyArn` - (Required) ARN of the Key Management Service (KMS) customer master key (CMK). The CMK must be symmetric, created in the same region as the cluster, and if the CMK was created in a different account, the user must have access to the CMK. For more information, see [Allowing Users in Other Accounts to Use a CMK in the AWS Key Management Service Developer Guide](https://docs.aws.amazon.com/kms/latest/developerguide/key-policy-modifying-external-accounts.html). @@ -338,7 +338,7 @@ The following arguments are supported in the `provider` configuration block: ### kubernetes_network_config -The following arguments are supported in the `kubernetesNetworkConfig` configuration block: +The `kubernetesNetworkConfig` configuration block supports the following arguments: * `serviceIpv4Cidr` - (Optional) The CIDR block to assign Kubernetes pod and service IP addresses from. If you don't specify a block, Kubernetes assigns addresses from either the 10.100.0.0/16 or 172.20.0.0/16 CIDR blocks. We recommend that you specify a block that does not overlap with resources in other networks that are peered or connected to your VPC. You can only specify a custom CIDR block when you create a cluster, changing this value will force a new cluster to be created. The block must meet the following requirements: @@ -351,7 +351,7 @@ The following arguments are supported in the `kubernetesNetworkConfig` configura ### outpost_config -The following arguments are supported in the `outpostConfig` configuration block: +The `outpostConfig` configuration block supports the following arguments: * `controlPlaneInstanceType` - (Required) The Amazon EC2 instance type that you want to use for your local Amazon EKS cluster on Outposts. The instance type that you specify is used for all Kubernetes control plane instances. The instance type can't be changed after cluster creation. Choose an instance type based on the number of nodes that your cluster will have. If your cluster will have: @@ -364,15 +364,15 @@ The following arguments are supported in the `outpostConfig` configuration block For a list of the available Amazon EC2 instance types, see Compute and storage in AWS Outposts rack features The control plane is not automatically scaled by Amazon EKS. * `controlPlanePlacement` - (Optional) An object representing the placement configuration for all the control plane instances of your local Amazon EKS cluster on AWS Outpost. -The following arguments are supported in the `controlPlanePlacement` configuration block: +The `controlPlanePlacement` configuration block supports the following arguments: * `groupName` - (Required) The name of the placement group for the Kubernetes control plane instances. This setting can't be changed after cluster creation. * `outpostArns` - (Required) The ARN of the Outpost that you want to use for your local Amazon EKS cluster on Outposts. This argument is a list of arns, but only a single Outpost ARN is supported currently. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the cluster. * `certificateAuthority` - Attribute block containing `certificateAuthorityData` for your cluster. Detailed below. @@ -415,10 +415,24 @@ Note that the `update` timeout is used separately for both `version` and `vpcCon ## Import -EKS Clusters can be imported using the `name`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS Clusters using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_eks_cluster.my_cluster my_cluster + +Using `terraform import`, import EKS Clusters using the `name`. For example: + +```console +% terraform import aws_eks_cluster.my_cluster my_cluster ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/eks_fargate_profile.html.markdown b/website/docs/cdktf/typescript/r/eks_fargate_profile.html.markdown index 9a33a402e8f..12ea4296c4f 100644 --- a/website/docs/cdktf/typescript/r/eks_fargate_profile.html.markdown +++ b/website/docs/cdktf/typescript/r/eks_fargate_profile.html.markdown @@ -112,9 +112,9 @@ The following arguments are optional: * `labels` - (Optional) Key-value map of Kubernetes labels for selection. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of the EKS Fargate Profile. * `id` - EKS Cluster name and EKS Fargate Profile name separated by a colon (`:`). @@ -130,10 +130,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -EKS Fargate Profiles can be imported using the `clusterName` and `fargateProfileName` separated by a colon (`:`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS Fargate Profiles using the `clusterName` and `fargateProfileName` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_eks_fargate_profile.my_fargate_profile my_cluster:my_fargate_profile + +Using `terraform import`, import EKS Fargate Profiles using the `clusterName` and `fargateProfileName` separated by a colon (`:`). For example: + +```console +% terraform import aws_eks_fargate_profile.my_fargate_profile my_cluster:my_fargate_profile ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/eks_identity_provider_config.html.markdown b/website/docs/cdktf/typescript/r/eks_identity_provider_config.html.markdown index 781fdfaefd7..4bd54f3a739 100644 --- a/website/docs/cdktf/typescript/r/eks_identity_provider_config.html.markdown +++ b/website/docs/cdktf/typescript/r/eks_identity_provider_config.html.markdown @@ -41,7 +41,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `clusterName` – (Required) Name of the EKS Cluster. * `oidc` - (Required) Nested attribute containing [OpenID Connect](https://openid.net/connect/) identity provider information for the cluster. Detailed below. @@ -58,9 +58,9 @@ The following arguments are supported: * `usernameClaim` - (Optional) The JWT claim that the provider will use as the username. * `usernamePrefix` - (Optional) A prefix that is prepended to username claims. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of the EKS Identity Provider Configuration. * `id` - EKS Cluster name and EKS Identity Provider Configuration name separated by a colon (`:`). @@ -76,10 +76,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -EKS Identity Provider Configurations can be imported using the `clusterName` and `identityProviderConfigName` separated by a colon (`:`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS Identity Provider Configurations using the `clusterName` and `identityProviderConfigName` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_eks_identity_provider_config.my_identity_provider_config my_cluster:my_identity_provider_config + +Using `terraform import`, import EKS Identity Provider Configurations using the `clusterName` and `identityProviderConfigName` separated by a colon (`:`). For example: + +```console +% terraform import aws_eks_identity_provider_config.my_identity_provider_config my_cluster:my_identity_provider_config ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/eks_node_group.html.markdown b/website/docs/cdktf/typescript/r/eks_node_group.html.markdown index 9ce32ff882d..208f4b12da3 100644 --- a/website/docs/cdktf/typescript/r/eks_node_group.html.markdown +++ b/website/docs/cdktf/typescript/r/eks_node_group.html.markdown @@ -297,9 +297,9 @@ The following arguments are mutually exclusive. * `maxUnavailable` - (Optional) Desired max number of unavailable worker nodes during node group update. * `maxUnavailablePercentage` - (Optional) Desired max percentage of unavailable worker nodes during node group update. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of the EKS Node Group. * `id` - EKS Cluster name and EKS Node Group name separated by a colon (`:`). @@ -320,10 +320,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -EKS Node Groups can be imported using the `clusterName` and `nodeGroupName` separated by a colon (`:`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS Node Groups using the `clusterName` and `nodeGroupName` separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_eks_node_group.my_node_group my_cluster:my_node_group + +Using `terraform import`, import EKS Node Groups using the `clusterName` and `nodeGroupName` separated by a colon (`:`). For example: + +```console +% terraform import aws_eks_node_group.my_node_group my_cluster:my_node_group ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elastic_beanstalk_application.html.markdown b/website/docs/cdktf/typescript/r/elastic_beanstalk_application.html.markdown new file mode 100644 index 00000000000..6726ba495ca --- /dev/null +++ b/website/docs/cdktf/typescript/r/elastic_beanstalk_application.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_application" +description: |- + Provides an Elastic Beanstalk Application Resource +--- + + + +# Resource: aws_elastic_beanstalk_application + +Provides an Elastic Beanstalk Application Resource. Elastic Beanstalk allows +you to deploy and manage applications in the AWS cloud without worrying about +the infrastructure that runs those applications. + +This resource creates an application that has one configuration template named +`default`, and no application versions + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticBeanstalkApplication } from "./.gen/providers/aws/elastic-beanstalk-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticBeanstalkApplication(this, "tftest", { + appversionLifecycle: { + deleteSourceFromS3: true, + maxCount: 128, + serviceRole: beanstalkService.arn, + }, + description: "tf-test-desc", + name: "tf-test-name", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the application, must be unique within your account +* `description` - (Optional) Short description of the application +* `tags` - (Optional) Key-value map of tags for the Elastic Beanstalk Application. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Application version lifecycle (`appversionLifecycle`) supports the following settings. Only one of either `maxCount` or `maxAgeInDays` can be provided: + +* `serviceRole` - (Required) The ARN of an IAM service role under which the application version is deleted. Elastic Beanstalk must have permission to assume this role. +* `maxCount` - (Optional) The maximum number of application versions to retain ('max_age_in_days' and 'max_count' cannot be enabled simultaneously.). +* `maxAgeInDays` - (Optional) The number of days to retain an application version ('max_age_in_days' and 'max_count' cannot be enabled simultaneously.). +* `deleteSourceFromS3` - (Optional) Set to `true` to delete a version's source bundle from S3 when the application version is deleted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS for this Elastic Beanstalk Application. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elastic Beanstalk Applications using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Elastic Beanstalk Applications using the `name`. For example: + +```console +% terraform import aws_elastic_beanstalk_application.tf_test tf-test-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elastic_beanstalk_application_version.html.markdown b/website/docs/cdktf/typescript/r/elastic_beanstalk_application_version.html.markdown new file mode 100644 index 00000000000..b4554da93d4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elastic_beanstalk_application_version.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_application_version" +description: |- + Provides an Elastic Beanstalk Application Version Resource +--- + + + +# Resource: aws_elastic_beanstalk_application_version + +Provides an Elastic Beanstalk Application Version Resource. Elastic Beanstalk allows +you to deploy and manage applications in the AWS cloud without worrying about +the infrastructure that runs those applications. + +This resource creates a Beanstalk Application Version that can be deployed to a Beanstalk +Environment. + +~> **NOTE on Application Version Resource:** When using the Application Version resource with multiple +[Elastic Beanstalk Environments](elastic_beanstalk_environment.html) it is possible that an error may be returned +when attempting to delete an Application Version while it is still in use by a different environment. +To work around this you can either create each environment in a separate AWS account or create your `awsElasticBeanstalkApplicationVersion` resources with a unique names in your Elastic Beanstalk Application. For example <revision>-<environment>. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticBeanstalkApplication } from "./.gen/providers/aws/elastic-beanstalk-application"; +import { ElasticBeanstalkApplicationVersion } from "./.gen/providers/aws/elastic-beanstalk-application-version"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticBeanstalkApplication(this, "default", { + description: "tf-test-desc", + name: "tf-test-name", + }); + const awsS3BucketDefault = new S3Bucket(this, "default_1", { + bucket: "tftest.applicationversion.bucket", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketDefault.overrideLogicalId("default"); + const awsS3ObjectDefault = new S3Object(this, "default_2", { + bucket: Token.asString(awsS3BucketDefault.id), + key: "beanstalk/go-v1.zip", + source: "go-v1.zip", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ObjectDefault.overrideLogicalId("default"); + const awsElasticBeanstalkApplicationVersionDefault = + new ElasticBeanstalkApplicationVersion(this, "default_3", { + application: "tf-test-name", + bucket: Token.asString(awsS3BucketDefault.id), + description: "application version created by terraform", + key: Token.asString(awsS3ObjectDefault.id), + name: "tf-test-version-label", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsElasticBeanstalkApplicationVersionDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `application` - (Required) Name of the Beanstalk Application the version is associated with. +* `bucket` - (Required) S3 bucket that contains the Application Version source bundle. +* `key` - (Required) S3 object that is the Application Version source bundle. +* `name` - (Required) Unique name for the this Application Version. + +The following arguments are optional: + +* `description` - (Optional) Short description of the Application Version. +* `forceDelete` - (Optional) On delete, force an Application Version to be deleted when it may be in use by multiple Elastic Beanstalk Environments. +* `tags` - (Optional) Key-value map of tags for the Elastic Beanstalk Application Version. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN assigned by AWS for this Elastic Beanstalk Application. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elastic_beanstalk_configuration_template.html.markdown b/website/docs/cdktf/typescript/r/elastic_beanstalk_configuration_template.html.markdown new file mode 100644 index 00000000000..d05f48a93c7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elastic_beanstalk_configuration_template.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_configuration_template" +description: |- + Provides an Elastic Beanstalk Configuration Template +--- + + + +# Resource: aws_elastic_beanstalk_configuration_template + +Provides an Elastic Beanstalk Configuration Template, which are associated with +a specific application and are used to deploy different versions of the +application with the same configuration settings. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticBeanstalkApplication } from "./.gen/providers/aws/elastic-beanstalk-application"; +import { ElasticBeanstalkConfigurationTemplate } from "./.gen/providers/aws/elastic-beanstalk-configuration-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const tftest = new ElasticBeanstalkApplication(this, "tftest", { + description: "tf-test-desc", + name: "tf-test-name", + }); + new ElasticBeanstalkConfigurationTemplate(this, "tf_template", { + application: tftest.name, + name: "tf-test-template-config", + solutionStackName: "64bit Amazon Linux 2015.09 v2.0.8 running Go 1.4", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A unique name for this Template. +* `application` – (Required) name of the application to associate with this configuration template +* `description` - (Optional) Short description of the Template +* `environmentId` – (Optional) The ID of the environment used with this configuration template +* `setting` – (Optional) Option settings to configure the new Environment. These + override specific values that are set as defaults. The format is detailed + below in [Option Settings](#option-settings) +* `solutionStackName` – (Optional) A solution stack to base your Template +off of. Example stacks can be found in the [Amazon API documentation][1] + +## Option Settings + +The `setting` field supports the following format: + +* `namespace` - unique namespace identifying the option's associated AWS resource +* `name` - name of the configuration option +* `value` - value for the configuration option +* `resource` - (Optional) resource name for [scheduled action](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/command-options-general.html#command-options-general-autoscalingscheduledaction) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `name` +* `application` +* `description` +* `environmentId` +* `optionSettings` +* `solutionStackName` + +[1]: https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/concepts.platforms.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elastic_beanstalk_environment.html.markdown b/website/docs/cdktf/typescript/r/elastic_beanstalk_environment.html.markdown new file mode 100644 index 00000000000..2d44a1c4d8d --- /dev/null +++ b/website/docs/cdktf/typescript/r/elastic_beanstalk_environment.html.markdown @@ -0,0 +1,185 @@ +--- +subcategory: "Elastic Beanstalk" +layout: "aws" +page_title: "AWS: aws_elastic_beanstalk_environment" +description: |- + Provides an Elastic Beanstalk Environment Resource +--- + + + +# Resource: aws_elastic_beanstalk_environment + +Provides an Elastic Beanstalk Environment Resource. Elastic Beanstalk allows +you to deploy and manage applications in the AWS cloud without worrying about +the infrastructure that runs those applications. + +Environments are often things such as `development`, `integration`, or +`production`. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticBeanstalkApplication } from "./.gen/providers/aws/elastic-beanstalk-application"; +import { ElasticBeanstalkEnvironment } from "./.gen/providers/aws/elastic-beanstalk-environment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const tftest = new ElasticBeanstalkApplication(this, "tftest", { + description: "tf-test-desc", + name: "tf-test-name", + }); + new ElasticBeanstalkEnvironment(this, "tfenvtest", { + application: tftest.name, + name: "tf-test-name", + solutionStackName: "64bit Amazon Linux 2015.03 v2.0.3 running Go 1.4", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A unique name for this Environment. This name is used + in the application URL +* `application` – (Required) Name of the application that contains the version + to be deployed +* `cnamePrefix` - (Optional) Prefix to use for the fully qualified DNS name of + the Environment. +* `description` - (Optional) Short description of the Environment +* `tier` - (Optional) Elastic Beanstalk Environment tier. Valid values are `worker` + or `webServer`. If tier is left blank `webServer` will be used. +* `setting` – (Optional) Option settings to configure the new Environment. These + override specific values that are set as defaults. The format is detailed + below in [Option Settings](#option-settings) +* `solutionStackName` – (Optional) A solution stack to base your environment +off of. Example stacks can be found in the [Amazon API documentation][1] +* `templateName` – (Optional) The name of the Elastic Beanstalk Configuration + template to use in deployment +* `platformArn` – (Optional) The [ARN][2] of the Elastic Beanstalk [Platform][3] + to use in deployment +* `waitForReadyTimeout` - (Default `20M`) The maximum + [duration](https://golang.org/pkg/time/#ParseDuration) that Terraform should + wait for an Elastic Beanstalk Environment to be in a ready state before timing + out. +* `pollInterval` – The time between polling the AWS API to +check if changes have been applied. Use this to adjust the rate of API calls +for any `create` or `update` action. Minimum `10S`, maximum `180S`. Omit this to +use the default behavior, which is an exponential backoff +* `versionLabel` - (Optional) The name of the Elastic Beanstalk Application Version +to use in deployment. +* `tags` - (Optional) A set of tags to apply to the Environment. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Option Settings + +Some options can be stack-specific, check [AWS Docs](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/command-options-general.html) +for supported options and examples. + +The `setting` and `allSettings` mappings support the following format: + +* `namespace` - unique namespace identifying the option's associated AWS resource +* `name` - name of the configuration option +* `value` - value for the configuration option +* `resource` - (Optional) resource name for [scheduled action](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/command-options-general.html#command-options-general-autoscalingscheduledaction) + +### Example With Options + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticBeanstalkApplication } from "./.gen/providers/aws/elastic-beanstalk-application"; +import { ElasticBeanstalkEnvironment } from "./.gen/providers/aws/elastic-beanstalk-environment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const tftest = new ElasticBeanstalkApplication(this, "tftest", { + description: "tf-test-desc", + name: "tf-test-name", + }); + new ElasticBeanstalkEnvironment(this, "tfenvtest", { + application: tftest.name, + name: "tf-test-name", + setting: [ + { + name: "VPCId", + namespace: "aws:ec2:vpc", + value: "vpc-xxxxxxxx", + }, + { + name: "Subnets", + namespace: "aws:ec2:vpc", + value: "subnet-xxxxxxxx", + }, + ], + solutionStackName: "64bit Amazon Linux 2015.03 v2.0.3 running Go 1.4", + }); + } +} + +``` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the Elastic Beanstalk Environment. +* `name` - Name of the Elastic Beanstalk Environment. +* `description` - Description of the Elastic Beanstalk Environment. +* `tier` - The environment tier specified. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `application` – The Elastic Beanstalk Application specified for this environment. +* `setting` – Settings specifically set for this Environment. +* `allSettings` – List of all option settings configured in this Environment. These + are a combination of default settings and their overrides from `setting` in + the configuration. +* `cname` - Fully qualified DNS name for this Environment. +* `autoscalingGroups` - The autoscaling groups used by this Environment. +* `instances` - Instances used by this Environment. +* `launchConfigurations` - Launch configurations in use by this Environment. +* `loadBalancers` - Elastic load balancers in use by this Environment. +* `queues` - SQS queues in use by this Environment. +* `triggers` - Autoscaling triggers in use by this Environment. +* `endpointUrl` - The URL to the Load Balancer for this Environment + +[1]: https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/concepts.platforms.html +[2]: https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html +[3]: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-platformarn + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elastic Beanstalk Environments using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Elastic Beanstalk Environments using the `id`. For example: + +```console +% terraform import aws_elastic_beanstalk_environment.prodenv e-rpqsewtp2j +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elasticache_cluster.html.markdown b/website/docs/cdktf/typescript/r/elasticache_cluster.html.markdown new file mode 100644 index 00000000000..6053e221d39 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elasticache_cluster.html.markdown @@ -0,0 +1,336 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_cluster" +description: |- + Provides an ElastiCache Cluster resource. +--- + + + +# Resource: aws_elasticache_cluster + +Provides an ElastiCache Cluster resource, which manages either a +[Memcached cluster](https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/WhatIs.html), a +[single-node Redis instance](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/WhatIs.html), or a +[read replica in a Redis (Cluster Mode Enabled) replication group]. + +For working with Redis (Cluster Mode Enabled) replication groups, see the +[`awsElasticacheReplicationGroup` resource](/docs/providers/aws/r/elasticache_replication_group.html). + +~> **Note:** When you change an attribute, such as `numCacheNodes`, by default +it is applied in the next maintenance window. Because of this, Terraform may report +a difference in its planning phase because the actual modification has not yet taken +place. You can use the `applyImmediately` flag to instruct the service to apply the +change immediately. Using `applyImmediately` can result in a brief downtime as the server reboots. +See the AWS Documentation on Modifying an ElastiCache Cache Cluster for +[ElastiCache for Memcached](https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/Clusters.Modify.html) or +[ElastiCache for Redis](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.Modify.html) +for more information. + +~> **Note:** Any attribute changes that re-create the resource will be applied immediately, regardless of the value of `applyImmediately`. + +## Example Usage + +### Memcached Cluster + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheCluster } from "./.gen/providers/aws/elasticache-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticacheCluster(this, "example", { + clusterId: "cluster-example", + engine: "memcached", + nodeType: "cache.m4.large", + numCacheNodes: 2, + parameterGroupName: "default.memcached1.4", + port: 11211, + }); + } +} + +``` + +### Redis Instance + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheCluster } from "./.gen/providers/aws/elasticache-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticacheCluster(this, "example", { + clusterId: "cluster-example", + engine: "redis", + engineVersion: "3.2.10", + nodeType: "cache.m4.large", + numCacheNodes: 1, + parameterGroupName: "default.redis3.2", + port: 6379, + }); + } +} + +``` + +### Redis Cluster Mode Disabled Read Replica Instance + +These inherit their settings from the replication group. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheCluster } from "./.gen/providers/aws/elasticache-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticacheCluster(this, "replica", { + clusterId: "cluster-example", + replicationGroupId: example.id, + }); + } +} + +``` + +### Redis Log Delivery configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheCluster } from "./.gen/providers/aws/elasticache-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticacheCluster(this, "test", { + applyImmediately: true, + clusterId: "mycluster", + engine: "redis", + logDeliveryConfiguration: [ + { + destination: example.name, + destinationType: "cloudwatch-logs", + logFormat: "text", + logType: "slow-log", + }, + { + destination: Token.asString( + awsKinesisFirehoseDeliveryStreamExample.name + ), + destinationType: "kinesis-firehose", + logFormat: "json", + logType: "engine-log", + }, + ], + nodeType: "cache.t3.micro", + numCacheNodes: 1, + port: 6379, + }); + } +} + +``` + +### Elasticache Cluster in Outpost + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsOutpostsOutpost } from "./.gen/providers/aws/data-aws-outposts-outpost"; +import { DataAwsOutpostsOutposts } from "./.gen/providers/aws/data-aws-outposts-outposts"; +import { ElasticacheCluster } from "./.gen/providers/aws/elasticache-cluster"; +import { ElasticacheSubnetGroup } from "./.gen/providers/aws/elasticache-subnet-group"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + }); + const dataAwsOutpostsOutpostsExample = new DataAwsOutpostsOutposts( + this, + "example_1", + {} + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsOutpostsOutpostsExample.overrideLogicalId("example"); + const awsSubnetExample = new Subnet(this, "example_2", { + cidrBlock: "10.0.1.0/24", + tags: { + Name: "my-subnet", + }, + vpcId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSubnetExample.overrideLogicalId("example"); + const dataAwsOutpostsOutpostExample = new DataAwsOutpostsOutpost( + this, + "example_3", + { + id: Token.asString( + propertyAccess(Fn.tolist(dataAwsOutpostsOutpostsExample.ids), ["0"]) + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsOutpostsOutpostExample.overrideLogicalId("example"); + const awsElasticacheSubnetGroupExample = new ElasticacheSubnetGroup( + this, + "example_4", + { + name: "my-cache-subnet", + subnetIds: [Token.asString(awsSubnetExample.id)], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsElasticacheSubnetGroupExample.overrideLogicalId("example"); + const awsElasticacheClusterExample = new ElasticacheCluster( + this, + "example_5", + { + clusterId: "cluster-example", + engine: "memcached", + nodeType: "cache.r5.large", + numCacheNodes: 2, + outpostMode: "single-outpost", + parameterGroupName: "default.memcached1.4", + port: 11211, + preferredOutpostArn: Token.asString(dataAwsOutpostsOutpostExample.arn), + subnetGroupName: Token.asString(awsElasticacheSubnetGroupExample.name), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsElasticacheClusterExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `clusterId` – (Required) Group identifier. ElastiCache converts this name to lowercase. Changing this value will re-create the resource. +* `engine` – (Optional, Required if `replicationGroupId` is not specified) Name of the cache engine to be used for this cache cluster. Valid values are `memcached` or `redis`. +* `nodeType` – (Required unless `replicationGroupId` is provided) The instance class used. See AWS documentation for information on [supported node types for Redis](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/CacheNodes.SupportedTypes.html) and [guidance on selecting node types for Redis](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/nodes-select-size.html). See AWS documentation for information on [supported node types for Memcached](https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/CacheNodes.SupportedTypes.html) and [guidance on selecting node types for Memcached](https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/nodes-select-size.html). For Memcached, changing this value will re-create the resource. +* `numCacheNodes` – (Required unless `replicationGroupId` is provided) The initial number of cache nodes that the cache cluster will have. For Redis, this value must be 1. For Memcached, this value must be between 1 and 40. If this number is reduced on subsequent runs, the highest numbered nodes will be removed. +* `parameterGroupName` – (Required unless `replicationGroupId` is provided) The name of the parameter group to associate with this cache cluster. + +The following arguments are optional: + +* `applyImmediately` - (Optional) Whether any database modifications are applied immediately, or during the next maintenance window. Default is `false`. See [Amazon ElastiCache Documentation for more information.](https://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_ModifyCacheCluster.html). +* `autoMinorVersionUpgrade` - (Optional) Specifies whether minor version engine upgrades will be applied automatically to the underlying Cache Cluster instances during the maintenance window. + Only supported for engine type `"redis"` and if the engine version is 6 or higher. + Defaults to `true`. +* `availabilityZone` - (Optional) Availability Zone for the cache cluster. If you want to create cache nodes in multi-az, use `preferredAvailabilityZones` instead. Default: System chosen Availability Zone. Changing this value will re-create the resource. +* `azMode` - (Optional, Memcached only) Whether the nodes in this Memcached node group are created in a single Availability Zone or created across multiple Availability Zones in the cluster's region. Valid values for this parameter are `singleAz` or `crossAz`, default is `singleAz`. If you want to choose `crossAz`, `numCacheNodes` must be greater than `1`. +* `engineVersion` – (Optional) Version number of the cache engine to be used. + If not set, defaults to the latest version. + See [Describe Cache Engine Versions](https://docs.aws.amazon.com/cli/latest/reference/elasticache/describe-cache-engine-versions.html) in the AWS Documentation for supported versions. + When `engine` is `redis` and the version is 7 or higher, the major and minor version should be set, e.g., `72`. + When the version is 6, the major and minor version can be set, e.g., `62`, + or the minor version can be unspecified which will use the latest version at creation time, e.g., `6X`. + Otherwise, specify the full version desired, e.g., `506`. + The actual engine version used is returned in the attribute `engineVersionActual`, see [Attribute Reference](#attribute-reference) below. +* `finalSnapshotIdentifier` - (Optional, Redis only) Name of your final cluster snapshot. If omitted, no final snapshot will be made. +* `ipDiscovery` - (Optional) The IP version to advertise in the discovery protocol. Valid values are `ipv4` or `ipv6`. +* `logDeliveryConfiguration` - (Optional, Redis only) Specifies the destination and format of Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log). See the documentation on [Amazon ElastiCache](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html). See [Log Delivery Configuration](#log-delivery-configuration) below for more details. +* `maintenanceWindow` – (Optional) Specifies the weekly time range for when maintenance +on the cache cluster is performed. The format is `ddd:hh24:miDdd:hh24:mi` (24H Clock UTC). +The minimum maintenance window is a 60 minute period. Example: `sun:05:00Sun:09:00`. +* `networkType` - (Optional) The IP versions for cache cluster connections. IPv6 is supported with Redis engine `62` onword or Memcached version `166` for all [Nitro system](https://aws.amazon.com/ec2/nitro/) instances. Valid values are `ipv4`, `ipv6` or `dualStack`. +* `notificationTopicArn` – (Optional) ARN of an SNS topic to send ElastiCache notifications to. Example: `arn:aws:sns:usEast1:012345678999:mySnsTopic`. +* `outpostMode` - (Optional) Specify the outpost mode that will apply to the cache cluster creation. Valid values are `"singleOutpost"` and `"crossOutpost"`, however AWS currently only supports `"singleOutpost"` mode. +* `port` – (Optional) The port number on which each of the cache nodes will accept connections. For Memcached the default is 11211, and for Redis the default port is 6379. Cannot be provided with `replicationGroupId`. Changing this value will re-create the resource. +* `preferredAvailabilityZones` - (Optional, Memcached only) List of the Availability Zones in which cache nodes are created. If you are creating your cluster in an Amazon VPC you can only locate nodes in Availability Zones that are associated with the subnets in the selected subnet group. The number of Availability Zones listed must equal the value of `numCacheNodes`. If you want all the nodes in the same Availability Zone, use `availabilityZone` instead, or repeat the Availability Zone multiple times in the list. Default: System chosen Availability Zones. Detecting drift of existing node availability zone is not currently supported. Updating this argument by itself to migrate existing node availability zones is not currently supported and will show a perpetual difference. +* `preferredOutpostArn` - (Optional, Required if `outpostMode` is specified) The outpost ARN in which the cache cluster will be created. +* `replicationGroupId` - (Optional, Required if `engine` is not specified) ID of the replication group to which this cluster should belong. If this parameter is specified, the cluster is added to the specified replication group as a read replica; otherwise, the cluster is a standalone primary that is not part of any replication group. +* `securityGroupIds` – (Optional, VPC only) One or more VPC security groups associated with the cache cluster +* `snapshotArns` – (Optional, Redis only) Single-element string list containing an Amazon Resource Name (ARN) of a Redis RDB snapshot file stored in Amazon S3. The object name cannot contain any commas. Changing `snapshotArns` forces a new resource. +* `snapshotName` - (Optional, Redis only) Name of a snapshot from which to restore data into the new node group. Changing `snapshotName` forces a new resource. +* `snapshotRetentionLimit` - (Optional, Redis only) Number of days for which ElastiCache will retain automatic cache cluster snapshots before deleting them. For example, if you set SnapshotRetentionLimit to 5, then a snapshot that was taken today will be retained for 5 days before being deleted. If the value of SnapshotRetentionLimit is set to zero (0), backups are turned off. Please note that setting a `snapshotRetentionLimit` is not supported on cache.t1.micro cache nodes +* `snapshotWindow` - (Optional, Redis only) Daily time range (in UTC) during which ElastiCache will begin taking a daily snapshot of your cache cluster. Example: 05:00-09:00 +* `subnetGroupName` – (Optional, VPC only) Name of the subnet group to be used for the cache cluster. Changing this value will re-create the resource. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Log Delivery Configuration + +The `logDeliveryConfiguration` block allows the streaming of Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log) to CloudWatch Logs or Kinesis Data Firehose. Max of 2 blocks. + +* `destination` - Name of either the CloudWatch Logs LogGroup or Kinesis Data Firehose resource. +* `destinationType` - For CloudWatch Logs use `cloudwatchLogs` or for Kinesis Data Firehose use `kinesisFirehose`. +* `logFormat` - Valid values are `json` or `text` +* `logType` - Valid values are `slowLog` or `engineLog`. Max 1 of each. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the created ElastiCache Cluster. +* `engineVersionActual` - Because ElastiCache pulls the latest minor or patch for a version, this attribute returns the running version of the cache engine. +* `cacheNodes` - List of node objects including `id`, `address`, `port` and `availabilityZone`. +* `clusterAddress` - (Memcached only) DNS name of the cache cluster without the port appended. +* `configurationEndpoint` - (Memcached only) Configuration endpoint to allow host discovery. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40M`) +- `update` - (Default `80M`) +- `delete` - (Default `40M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache Clusters using the `clusterId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ElastiCache Clusters using the `clusterId`. For example: + +```console +% terraform import aws_elasticache_cluster.my_cluster my_cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elasticache_global_replication_group.html.markdown b/website/docs/cdktf/typescript/r/elasticache_global_replication_group.html.markdown new file mode 100644 index 00000000000..395bc62f929 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elasticache_global_replication_group.html.markdown @@ -0,0 +1,191 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_global_replication_group" +description: |- + Provides an ElastiCache Global Replication Group resource. +--- + + + +# Resource: aws_elasticache_global_replication_group + +Provides an ElastiCache Global Replication Group resource, which manages replication between two or more Replication Groups in different regions. For more information, see the [ElastiCache User Guide](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Redis-Global-Datastore.html). + +## Example Usage + +### Global replication group with one secondary replication group + +The global replication group depends on the primary group existing. Secondary replication groups depend on the global replication group. Terraform dependency management will handle this transparently using resource value references. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheGlobalReplicationGroup } from "./.gen/providers/aws/elasticache-global-replication-group"; +import { ElasticacheReplicationGroup } from "./.gen/providers/aws/elasticache-replication-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new ElasticacheReplicationGroup(this, "primary", { + description: "primary replication group", + engine: "redis", + engineVersion: "5.0.6", + nodeType: "cache.m5.large", + numCacheClusters: 1, + replicationGroupId: "example-primary", + }); + const example = new ElasticacheGlobalReplicationGroup(this, "example", { + globalReplicationGroupIdSuffix: "example", + primaryReplicationGroupId: primary.id, + }); + new ElasticacheReplicationGroup(this, "secondary", { + description: "secondary replication group", + globalReplicationGroupId: example.globalReplicationGroupId, + numCacheClusters: 1, + provider: otherRegion, + replicationGroupId: "example-secondary", + }); + } +} + +``` + +### Managing Redis Engine Versions + +The initial Redis version is determined by the version set on the primary replication group. +However, once it is part of a Global Replication Group, +the Global Replication Group manages the version of all member replication groups. + +The member replication groups must have [`lifecycleIgnoreChanges[engineVersion]`](https://www.terraform.io/language/meta-arguments/lifecycle) set, +or Terraform will always return a diff. + +In this example, +the primary replication group will be created with Redis 6.0, +and then upgraded to Redis 6.2 once added to the Global Replication Group. +The secondary replication group will be created with Redis 6.2. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheGlobalReplicationGroup } from "./.gen/providers/aws/elasticache-global-replication-group"; +import { ElasticacheReplicationGroup } from "./.gen/providers/aws/elasticache-replication-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new ElasticacheReplicationGroup(this, "primary", { + description: "primary replication group", + engine: "redis", + engineVersion: "6.0", + lifecycle: { + ignoreChanges: [engineVersion], + }, + nodeType: "cache.m5.large", + numCacheClusters: 1, + replicationGroupId: "example-primary", + }); + const example = new ElasticacheGlobalReplicationGroup(this, "example", { + engineVersion: "6.2", + globalReplicationGroupIdSuffix: "example", + primaryReplicationGroupId: primary.id, + }); + new ElasticacheReplicationGroup(this, "secondary", { + description: "secondary replication group", + globalReplicationGroupId: example.globalReplicationGroupId, + lifecycle: { + ignoreChanges: [engineVersion], + }, + numCacheClusters: 1, + provider: otherRegion, + replicationGroupId: "example-secondary", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `automaticFailoverEnabled` - (Optional) Specifies whether read-only replicas will be automatically promoted to read/write primary if the existing primary fails. + When creating, by default the Global Replication Group inherits the automatic failover setting of the primary replication group. +* `cacheNodeType` - (Optional) The instance class used. + See AWS documentation for information on [supported node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/CacheNodes.SupportedTypes.html) + and [guidance on selecting node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/nodes-select-size.html). + When creating, by default the Global Replication Group inherits the node type of the primary replication group. +* `engineVersion` - (Optional) Redis version to use for the Global Replication Group. + When creating, by default the Global Replication Group inherits the version of the primary replication group. + If a version is specified, the Global Replication Group and all member replication groups will be upgraded to this version. + Cannot be downgraded without replacing the Global Replication Group and all member replication groups. + When the version is 7 or higher, the major and minor version should be set, e.g., `72`. + When the version is 6, the major and minor version can be set, e.g., `62`, + or the minor version can be unspecified which will use the latest version at creation time, e.g., `6X`. + The actual engine version used is returned in the attribute `engineVersionActual`, see [Attribute Reference](#attribute-reference) below. +* `globalReplicationGroupIdSuffix` – (Required) The suffix name of a Global Datastore. If `globalReplicationGroupIdSuffix` is changed, creates a new resource. +* `primaryReplicationGroupId` – (Required) The ID of the primary cluster that accepts writes and will replicate updates to the secondary cluster. If `primaryReplicationGroupId` is changed, creates a new resource. +* `globalReplicationGroupDescription` – (Optional) A user-created description for the global replication group. +* `numNodeGroups` - (Optional) The number of node groups (shards) on the global replication group. +* `parameterGroupName` - (Optional) An ElastiCache Parameter Group to use for the Global Replication Group. + Required when upgrading a major engine version, but will be ignored if left configured after the upgrade is complete. + Specifying without a major version upgrade will fail. + Note that ElastiCache creates a copy of this parameter group for each member replication group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the ElastiCache Global Replication Group. +* `arn` - The ARN of the ElastiCache Global Replication Group. +* `engineVersionActual` - The full version number of the cache engine running on the members of this global replication group. +* `atRestEncryptionEnabled` - A flag that indicate whether the encryption at rest is enabled. +* `authTokenEnabled` - A flag that indicate whether AuthToken (password) is enabled. +* `clusterEnabled` - Indicates whether the Global Datastore is cluster enabled. +* `engine` - The name of the cache engine to be used for the clusters in this global replication group. +* `globalReplicationGroupId` - The full ID of the global replication group. +* `globalNodeGroups` - Set of node groups (shards) on the global replication group. + Has the values: + * `globalNodeGroupId` - The ID of the global node group. + * `slots` - The keyspace for this node group. +* `transitEncryptionEnabled` - A flag that indicates whether the encryption in transit is enabled. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) +* `update` - (Default `60M`) +* `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache Global Replication Groups using the `globalReplicationGroupId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ElastiCache Global Replication Groups using the `globalReplicationGroupId`. For example: + +```console +% terraform import aws_elasticache_global_replication_group.my_global_replication_group okuqm-global-replication-group-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elasticache_parameter_group.html.markdown b/website/docs/cdktf/typescript/r/elasticache_parameter_group.html.markdown new file mode 100644 index 00000000000..967099f4b06 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elasticache_parameter_group.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_parameter_group" +description: |- + Provides an ElastiCache parameter group resource. +--- + + + +# Resource: aws_elasticache_parameter_group + +Provides an ElastiCache parameter group resource. + +~> **NOTE:** Attempting to remove the `reservedMemory` parameter when `family` is set to `redis26` or `redis28` may show a perpetual difference in Terraform due to an ElastiCache API limitation. Leave that parameter configured with any value to workaround the issue. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheParameterGroup } from "./.gen/providers/aws/elasticache-parameter-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticacheParameterGroup(this, "default", { + family: "redis2.8", + name: "cache-params", + parameter: [ + { + name: "activerehashing", + value: "yes", + }, + { + name: "min-slaves-to-write", + value: "2", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the ElastiCache parameter group. +* `family` - (Required) The family of the ElastiCache parameter group. +* `description` - (Optional) The description of the ElastiCache parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of ElastiCache parameters to apply. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +* `name` - (Required) The name of the ElastiCache parameter. +* `value` - (Required) The value of the ElastiCache parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ElastiCache parameter group name. +* `arn` - The AWS ARN associated with the parameter group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache Parameter Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ElastiCache Parameter Groups using the `name`. For example: + +```console +% terraform import aws_elasticache_parameter_group.default redis-params +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elasticache_replication_group.html.markdown b/website/docs/cdktf/typescript/r/elasticache_replication_group.html.markdown new file mode 100644 index 00000000000..f478597c8b2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elasticache_replication_group.html.markdown @@ -0,0 +1,342 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_replication_group" +description: |- + Provides an ElastiCache Replication Group resource. +--- + + + +# Resource: aws_elasticache_replication_group + +Provides an ElastiCache Replication Group resource. + +For working with a [Memcached cluster](https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/WhatIs.html) or a +[single-node Redis instance (Cluster Mode Disabled)](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/WhatIs.html), +see the [`awsElasticacheCluster` resource](/docs/providers/aws/r/elasticache_cluster.html). + +~> **Note:** When you change an attribute, such as `engineVersion`, by +default the ElastiCache API applies it in the next maintenance window. Because +of this, Terraform may report a difference in its planning phase because the +actual modification has not yet taken place. You can use the +`applyImmediately` flag to instruct the service to apply the change +immediately. Using `applyImmediately` can result in a brief downtime as +servers reboots. +See the AWS Documentation on +[Modifying an ElastiCache Cache Cluster](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.Modify.html) +for more information. + +~> **Note:** Any attribute changes that re-create the resource will be applied immediately, regardless of the value of `applyImmediately`. + +~> **Note:** Be aware of the terminology collision around "cluster" for `awsElasticacheReplicationGroup`. For example, it is possible to create a ["Cluster Mode Disabled [Redis] Cluster"](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.Create.CON.Redis.html). With "Cluster Mode Enabled", the data will be stored in shards (called "node groups"). See [Redis Cluster Configuration](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/cluster-create-determine-requirements.html#redis-cluster-configuration) for a diagram of the differences. To enable cluster mode, use a parameter group that has cluster mode enabled. The default parameter groups provided by AWS end with ".cluster.on", for example `defaultRedis6XClusterOn`. + +## Example Usage + +### Redis Cluster Mode Disabled + +To create a single shard primary with single read replica: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheReplicationGroup } from "./.gen/providers/aws/elasticache-replication-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticacheReplicationGroup(this, "example", { + automaticFailoverEnabled: true, + description: "example description", + nodeType: "cache.m4.large", + numCacheClusters: 2, + parameterGroupName: "default.redis3.2", + port: 6379, + preferredCacheClusterAzs: ["us-west-2a", "us-west-2b"], + replicationGroupId: "tf-rep-group-1", + }); + } +} + +``` + +You have two options for adjusting the number of replicas: + +* Adjusting `numCacheClusters` directly. This will attempt to automatically add or remove replicas, but provides no granular control (e.g., preferred availability zone, cache cluster ID) for the added or removed replicas. This also currently expects cache cluster IDs in the form of `replicationGroupId00#`. +* Otherwise for fine grained control of the underlying cache clusters, they can be added or removed with the [`awsElasticacheCluster` resource](/docs/providers/aws/r/elasticache_cluster.html) and its `replicationGroupId` attribute. In this situation, you will need to utilize the [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignoreChanges` to prevent perpetual differences during Terraform plan with the `numCacheCluster` attribute. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformCount, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheCluster } from "./.gen/providers/aws/elasticache-cluster"; +import { ElasticacheReplicationGroup } from "./.gen/providers/aws/elasticache-replication-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ElasticacheReplicationGroup(this, "example", { + automaticFailoverEnabled: true, + description: "example description", + lifecycle: { + ignoreChanges: [numCacheClusters], + }, + nodeType: "cache.m4.large", + numCacheClusters: 2, + parameterGroupName: "default.redis3.2", + port: 6379, + preferredCacheClusterAzs: ["us-west-2a", "us-west-2b"], + replicationGroupId: "tf-rep-group-1", + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const replicaCount = TerraformCount.of(Token.asNumber("1")); + new ElasticacheCluster(this, "replica", { + clusterId: "tf-rep-group-1-${" + replicaCount.index + "}", + replicationGroupId: example.id, + count: replicaCount, + }); + } +} + +``` + +### Redis Cluster Mode Enabled + +To create two shards with a primary and a single read replica each: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheReplicationGroup } from "./.gen/providers/aws/elasticache-replication-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticacheReplicationGroup(this, "baz", { + automaticFailoverEnabled: true, + description: "example description", + nodeType: "cache.t2.small", + numNodeGroups: 2, + parameterGroupName: "default.redis3.2.cluster.on", + port: 6379, + replicasPerNodeGroup: 1, + replicationGroupId: "tf-redis-cluster", + }); + } +} + +``` + +### Redis Log Delivery configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheReplicationGroup } from "./.gen/providers/aws/elasticache-replication-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticacheReplicationGroup(this, "test", { + applyImmediately: true, + autoMinorVersionUpgrade: Token.asString(false), + description: "test description", + logDeliveryConfiguration: [ + { + destination: example.name, + destinationType: "cloudwatch-logs", + logFormat: "text", + logType: "slow-log", + }, + { + destination: Token.asString( + awsKinesisFirehoseDeliveryStreamExample.name + ), + destinationType: "kinesis-firehose", + logFormat: "json", + logType: "engine-log", + }, + ], + maintenanceWindow: "tue:06:30-tue:07:30", + nodeType: "cache.t3.small", + port: 6379, + replicationGroupId: "myreplicaciongroup", + snapshotWindow: "01:00-02:00", + }); + } +} + +``` + +~> **Note:** We currently do not support passing a `primaryClusterId` in order to create the Replication Group. + +~> **Note:** Automatic Failover is unavailable for Redis versions earlier than 2.8.6, +and unavailable on T1 node types. For T2 node types, it is only available on Redis version 3.2.4 or later with cluster mode enabled. See the [High Availability Using Replication Groups](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Replication.html) guide +for full details on using Replication Groups. + +### Creating a secondary replication group for a global replication group + +A Global Replication Group can have one one two secondary Replication Groups in different regions. These are added to an existing Global Replication Group. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheGlobalReplicationGroup } from "./.gen/providers/aws/elasticache-global-replication-group"; +import { ElasticacheReplicationGroup } from "./.gen/providers/aws/elasticache-replication-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new ElasticacheReplicationGroup(this, "primary", { + description: "primary replication group", + engine: "redis", + engineVersion: "5.0.6", + nodeType: "cache.m5.large", + numCacheClusters: 1, + provider: otherRegion, + replicationGroupId: "example-primary", + }); + const example = new ElasticacheGlobalReplicationGroup(this, "example", { + globalReplicationGroupIdSuffix: "example", + primaryReplicationGroupId: primary.id, + provider: otherRegion, + }); + new ElasticacheReplicationGroup(this, "secondary", { + description: "secondary replication group", + globalReplicationGroupId: example.globalReplicationGroupId, + numCacheClusters: 1, + replicationGroupId: "example-secondary", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `description` – (Required) User-created description for the replication group. Must not be empty. +* `replicationGroupId` – (Required) Replication group identifier. This parameter is stored as a lowercase string. + +The following arguments are optional: + +* `applyImmediately` - (Optional) Specifies whether any modifications are applied immediately, or during the next maintenance window. Default is `false`. +* `atRestEncryptionEnabled` - (Optional) Whether to enable encryption at rest. +* `authToken` - (Optional) Password used to access a password protected server. Can be specified only if `transit_encryption_enabled = true`. +* `autoMinorVersionUpgrade` - (Optional) Specifies whether minor version engine upgrades will be applied automatically to the underlying Cache Cluster instances during the maintenance window. + Only supported for engine type `"redis"` and if the engine version is 6 or higher. + Defaults to `true`. +* `automaticFailoverEnabled` - (Optional) Specifies whether a read-only replica will be automatically promoted to read/write primary if the existing primary fails. If enabled, `numCacheClusters` must be greater than 1. Must be enabled for Redis (cluster mode enabled) replication groups. Defaults to `false`. +* `dataTieringEnabled` - (Optional) Enables data tiering. Data tiering is only supported for replication groups using the r6gd node type. This parameter must be set to `true` when using r6gd nodes. +* `engine` - (Optional) Name of the cache engine to be used for the clusters in this replication group. The only valid value is `redis`. +* `engineVersion` - (Optional) Version number of the cache engine to be used for the cache clusters in this replication group. + If the version is 7 or higher, the major and minor version should be set, e.g., `72`. + If the version is 6, the major and minor version can be set, e.g., `62`, + or the minor version can be unspecified which will use the latest version at creation time, e.g., `6X`. + Otherwise, specify the full version desired, e.g., `506`. + The actual engine version used is returned in the attribute `engineVersionActual`, see [Attribute Reference](#attribute-reference) below. +* `finalSnapshotIdentifier` - (Optional) The name of your final node group (shard) snapshot. ElastiCache creates the snapshot from the primary node in the cluster. If omitted, no final snapshot will be made. +* `globalReplicationGroupId` - (Optional) The ID of the global replication group to which this replication group should belong. If this parameter is specified, the replication group is added to the specified global replication group as a secondary replication group; otherwise, the replication group is not part of any global replication group. If `globalReplicationGroupId` is set, the `numNodeGroups` parameter cannot be set. +* `kmsKeyId` - (Optional) The ARN of the key that you wish to use if encrypting at rest. If not supplied, uses service managed encryption. Can be specified only if `at_rest_encryption_enabled = true`. +* `logDeliveryConfiguration` - (Optional, Redis only) Specifies the destination and format of Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log). See the documentation on [Amazon ElastiCache](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log). See [Log Delivery Configuration](#log-delivery-configuration) below for more details. +* `maintenanceWindow` – (Optional) Specifies the weekly time range for when maintenance on the cache cluster is performed. The format is `ddd:hh24:miDdd:hh24:mi` (24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: `sun:05:00Sun:09:00` +* `multiAzEnabled` - (Optional) Specifies whether to enable Multi-AZ Support for the replication group. If `true`, `automaticFailoverEnabled` must also be enabled. Defaults to `false`. +* `nodeType` - (Optional) Instance class to be used. See AWS documentation for information on [supported node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/CacheNodes.SupportedTypes.html) and [guidance on selecting node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/nodes-select-size.html). Required unless `globalReplicationGroupId` is set. Cannot be set if `globalReplicationGroupId` is set. +* `notificationTopicArn` – (Optional) ARN of an SNS topic to send ElastiCache notifications to. Example: `arn:aws:sns:usEast1:012345678999:mySnsTopic` +* `numCacheClusters` - (Optional) Number of cache clusters (primary and replicas) this replication group will have. If Multi-AZ is enabled, the value of this parameter must be at least 2. Updates will occur before other modifications. Conflicts with `numNodeGroups`. Defaults to `1`. +* `numNodeGroups` - (Optional) Number of node groups (shards) for this Redis replication group. + Changing this number will trigger a resizing operation before other settings modifications. +* `parameterGroupName` - (Optional) Name of the parameter group to associate with this replication group. If this argument is omitted, the default cache parameter group for the specified engine is used. To enable "cluster mode", i.e., data sharding, use a parameter group that has the parameter `clusterEnabled` set to true. +* `port` – (Optional) Port number on which each of the cache nodes will accept connections. For Memcache the default is 11211, and for Redis the default port is 6379. +* `preferredCacheClusterAzs` - (Optional) List of EC2 availability zones in which the replication group's cache clusters will be created. The order of the availability zones in the list is considered. The first item in the list will be the primary node. Ignored when updating. +* `replicasPerNodeGroup` - (Optional) Number of replica nodes in each node group. + Changing this number will trigger a resizing operation before other settings modifications. + Valid values are 0 to 5. +* `securityGroupIds` - (Optional) One or more Amazon VPC security groups associated with this replication group. Use this parameter only when you are creating a replication group in an Amazon Virtual Private Cloud +* `securityGroupNames` - (Optional) List of cache security group names to associate with this replication group. +* `snapshotArns` – (Optional) List of ARNs that identify Redis RDB snapshot files stored in Amazon S3. The names object names cannot contain any commas. +* `snapshotName` - (Optional) Name of a snapshot from which to restore data into the new node group. Changing the `snapshotName` forces a new resource. +* `snapshotRetentionLimit` - (Optional, Redis only) Number of days for which ElastiCache will retain automatic cache cluster snapshots before deleting them. For example, if you set SnapshotRetentionLimit to 5, then a snapshot that was taken today will be retained for 5 days before being deleted. If the value of `snapshotRetentionLimit` is set to zero (0), backups are turned off. Please note that setting a `snapshotRetentionLimit` is not supported on cache.t1.micro cache nodes +* `snapshotWindow` - (Optional, Redis only) Daily time range (in UTC) during which ElastiCache will begin taking a daily snapshot of your cache cluster. The minimum snapshot window is a 60 minute period. Example: `05:0009:00` +* `subnetGroupName` - (Optional) Name of the cache subnet group to be used for the replication group. +* `tags` - (Optional) Map of tags to assign to the resource. Adding tags to this resource will add or overwrite any existing tags on the clusters in the replication group and not to the group itself. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `transitEncryptionEnabled` - (Optional) Whether to enable encryption in transit. +* `userGroupIds` - (Optional) User Group ID to associate with the replication group. Only a maximum of one (1) user group ID is valid. **NOTE:** This argument _is_ a set because the AWS specification allows for multiple IDs. However, in practice, AWS only allows a maximum size of one. + +### Log Delivery Configuration + +The `logDeliveryConfiguration` block allows the streaming of Redis [SLOWLOG](https://redis.io/commands/slowlog) or Redis [Engine Log](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Log_Delivery.html#Log_contents-engine-log) to CloudWatch Logs or Kinesis Data Firehose. Max of 2 blocks. + +* `destination` - Name of either the CloudWatch Logs LogGroup or Kinesis Data Firehose resource. +* `destinationType` - For CloudWatch Logs use `cloudwatchLogs` or for Kinesis Data Firehose use `kinesisFirehose`. +* `logFormat` - Valid values are `json` or `text` +* `logType` - Valid values are `slowLog` or `engineLog`. Max 1 of each. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the created ElastiCache Replication Group. +* `engineVersionActual` - Because ElastiCache pulls the latest minor or patch for a version, this attribute returns the running version of the cache engine. +* `clusterEnabled` - Indicates if cluster mode is enabled. +* `configurationEndpointAddress` - Address of the replication group configuration endpoint when cluster mode is enabled. +* `id` - ID of the ElastiCache Replication Group. +* `memberClusters` - Identifiers of all the nodes that are part of this replication group. +* `primaryEndpointAddress` - (Redis only) Address of the endpoint for the primary node in the replication group, if the cluster mode is disabled. +* `readerEndpointAddress` - (Redis only) Address of the endpoint for the reader node in the replication group, if the cluster mode is disabled. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) +* `delete` - (Default `40M`) +* `update` - (Default `40M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache Replication Groups using the `replicationGroupId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ElastiCache Replication Groups using the `replicationGroupId`. For example: + +```console +% terraform import aws_elasticache_replication_group.my_replication_group replication-group-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elasticache_subnet_group.html.markdown b/website/docs/cdktf/typescript/r/elasticache_subnet_group.html.markdown new file mode 100644 index 00000000000..20d388f8375 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elasticache_subnet_group.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_subnet_group" +description: |- + Provides an ElastiCache Subnet Group resource. +--- + + + +# Resource: aws_elasticache_subnet_group + +Provides an ElastiCache Subnet Group resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheSubnetGroup } from "./.gen/providers/aws/elasticache-subnet-group"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new Vpc(this, "foo", { + cidrBlock: "10.0.0.0/16", + tags: { + Name: "tf-test", + }, + }); + const awsSubnetFoo = new Subnet(this, "foo_1", { + availabilityZone: "us-west-2a", + cidrBlock: "10.0.0.0/24", + tags: { + Name: "tf-test", + }, + vpcId: foo.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSubnetFoo.overrideLogicalId("foo"); + new ElasticacheSubnetGroup(this, "bar", { + name: "tf-test-cache-subnet", + subnetIds: [Token.asString(awsSubnetFoo.id)], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` – (Required) Name for the cache subnet group. ElastiCache converts this name to lowercase. +* `description` – (Optional) Description for the cache subnet group. Defaults to "Managed by Terraform". +* `subnetIds` – (Required) List of VPC Subnet IDs for the cache subnet group +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `description` - The Description of the ElastiCache Subnet Group. +* `name` - The Name of the ElastiCache Subnet Group. +* `subnetIds` - The Subnet IDs of the ElastiCache Subnet Group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache Subnet Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ElastiCache Subnet Groups using the `name`. For example: + +```console +% terraform import aws_elasticache_subnet_group.bar tf-test-cache-subnet +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elasticache_user.html.markdown b/website/docs/cdktf/typescript/r/elasticache_user.html.markdown new file mode 100644 index 00000000000..e55574d4b35 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elasticache_user.html.markdown @@ -0,0 +1,155 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_user" +description: |- + Provides an ElastiCache user. +--- + + + +# Resource: aws_elasticache_user + +Provides an ElastiCache user resource. + +~> **Note:** All arguments including the username and passwords will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheUser } from "./.gen/providers/aws/elasticache-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticacheUser(this, "test", { + accessString: + "on ~app::* -@all +@read +@hash +@bitmap +@geo -setbit -bitfield -hset -hsetnx -hmset -hincrby -hincrbyfloat -hdel -bitop -geoadd -georadius -georadiusbymember", + engine: "REDIS", + passwords: ["password123456789"], + userId: "testUserId", + userName: "testUserName", + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheUser } from "./.gen/providers/aws/elasticache-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticacheUser(this, "test", { + accessString: "on ~* +@all", + authenticationMode: { + type: "iam", + }, + engine: "REDIS", + userId: "testUserId", + userName: "testUserName", + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheUser } from "./.gen/providers/aws/elasticache-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticacheUser(this, "test", { + accessString: "on ~* +@all", + authenticationMode: { + passwords: ["password1", "password2"], + type: "password", + }, + engine: "REDIS", + userId: "testUserId", + userName: "testUserName", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `accessString` - (Required) Access permissions string used for this user. See [Specifying Permissions Using an Access String](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Clusters.RBAC.html#Access-string) for more details. +* `engine` - (Required) The current supported value is `redis`. +* `userId` - (Required) The ID of the user. +* `userName` - (Required) The username of the user. + +The following arguments are optional: + +* `authenticationMode` - (Optional) Denotes the user's authentication properties. Detailed below. +* `noPasswordRequired` - (Optional) Indicates a password is not required for this user. +* `passwords` - (Optional) Passwords used for this user. You can create up to two passwords for each user. +* `tags` - (Optional) A list of tags to be added to this resource. A tag is a key-value pair. + +### authentication_mode Configuration Block + +* `passwords` - (Optional) Specifies the passwords to use for authentication if `type` is set to `password`. +* `type` - (Required) Specifies the authentication type. Possible options are: `password`, `noPasswordRequired` or `iam`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the created ElastiCache User. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `update` - (Default `5M`) +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache users using the `userId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ElastiCache users using the `userId`. For example: + +```console +% terraform import aws_elasticache_user.my_user userId1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elasticache_user_group.html.markdown b/website/docs/cdktf/typescript/r/elasticache_user_group.html.markdown new file mode 100644 index 00000000000..0dd5ec8b557 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elasticache_user_group.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_user_group" +description: |- + Provides an ElastiCache user group. +--- + + + +# Resource: aws_elasticache_user_group + +Provides an ElastiCache user group resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheUser } from "./.gen/providers/aws/elasticache-user"; +import { ElasticacheUserGroup } from "./.gen/providers/aws/elasticache-user-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new ElasticacheUser(this, "test", { + accessString: + "on ~app::* -@all +@read +@hash +@bitmap +@geo -setbit -bitfield -hset -hsetnx -hmset -hincrby -hincrbyfloat -hdel -bitop -geoadd -georadius -georadiusbymember", + engine: "REDIS", + passwords: ["password123456789"], + userId: "testUserId", + userName: "default", + }); + const awsElasticacheUserGroupTest = new ElasticacheUserGroup( + this, + "test_1", + { + engine: "REDIS", + userGroupId: "userGroupId", + userIds: [test.userId], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsElasticacheUserGroupTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `engine` - (Required) The current supported value is `redis`. +* `userGroupId` - (Required) The ID of the user group. + +The following arguments are optional: + +* `userIds` - (Optional) The list of user IDs that belong to the user group. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The user group identifier. +* `arn` - The ARN that identifies the user group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache user groups using the `userGroupId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ElastiCache user groups using the `userGroupId`. For example: + +```console +% terraform import aws_elasticache_user_group.my_user_group userGoupId1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elasticache_user_group_association.html.markdown b/website/docs/cdktf/typescript/r/elasticache_user_group_association.html.markdown new file mode 100644 index 00000000000..83f6c8c891b --- /dev/null +++ b/website/docs/cdktf/typescript/r/elasticache_user_group_association.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "ElastiCache" +layout: "aws" +page_title: "AWS: aws_elasticache_user_group_association" +description: |- + Associate an ElastiCache user and user group. +--- + + + +# Resource: aws_elasticache_user_group_association + +Associate an existing ElastiCache user and an existing user group. + +~> **NOTE:** Terraform will detect changes in the `awsElasticacheUserGroup` since `awsElasticacheUserGroupAssociation` changes the user IDs associated with the user group. You can ignore these changes with the `lifecycle` `ignoreChanges` meta argument as shown in the example. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticacheUser } from "./.gen/providers/aws/elasticache-user"; +import { ElasticacheUserGroup } from "./.gen/providers/aws/elasticache-user-group"; +import { ElasticacheUserGroupAssociation } from "./.gen/providers/aws/elasticache-user-group-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new ElasticacheUser(this, "default", { + accessString: + "on ~app::* -@all +@read +@hash +@bitmap +@geo -setbit -bitfield -hset -hsetnx -hmset -hincrby -hincrbyfloat -hdel -bitop -geoadd -georadius -georadiusbymember", + engine: "REDIS", + passwords: ["password123456789"], + userId: "defaultUserID", + userName: "default", + }); + const example = new ElasticacheUser(this, "example", { + accessString: + "on ~app::* -@all +@read +@hash +@bitmap +@geo -setbit -bitfield -hset -hsetnx -hmset -hincrby -hincrbyfloat -hdel -bitop -geoadd -georadius -georadiusbymember", + engine: "REDIS", + passwords: ["password123456789"], + userId: "exampleUserID", + userName: "exampleuser", + }); + const awsElasticacheUserGroupExample = new ElasticacheUserGroup( + this, + "example_2", + { + engine: "REDIS", + lifecycle: { + ignoreChanges: [userIds], + }, + userGroupId: "userGroupId", + userIds: [defaultVar.userId], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsElasticacheUserGroupExample.overrideLogicalId("example"); + const awsElasticacheUserGroupAssociationExample = + new ElasticacheUserGroupAssociation(this, "example_3", { + userGroupId: Token.asString(awsElasticacheUserGroupExample.userGroupId), + userId: example.userId, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsElasticacheUserGroupAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `userGroupId` - (Required) ID of the user group. +* `userId` - (Required) ID of the user to associated with the user group. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ElastiCache user group associations using the `userGroupId` and `userId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ElastiCache user group associations using the `userGroupId` and `userId`. For example: + +```console +% terraform import aws_elasticache_user_group_association.example userGoupId1,userId +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elasticsearch_domain.html.markdown b/website/docs/cdktf/typescript/r/elasticsearch_domain.html.markdown new file mode 100644 index 00000000000..0f733612d97 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elasticsearch_domain.html.markdown @@ -0,0 +1,449 @@ +--- +subcategory: "Elasticsearch" +layout: "aws" +page_title: "AWS: aws_elasticsearch_domain" +description: |- + Terraform resource for managing an AWS Elasticsearch Domain. +--- + + + +# Resource: aws_elasticsearch_domain + +Manages an AWS Elasticsearch Domain. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticsearchDomain } from "./.gen/providers/aws/elasticsearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElasticsearchDomain(this, "example", { + clusterConfig: { + instanceType: "r4.large.elasticsearch", + }, + domainName: "example", + elasticsearchVersion: "7.10", + tags: { + Domain: "TestDomain", + }, + }); + } +} + +``` + +### Access Policy + +-> See also: [`awsElasticsearchDomainPolicy` resource](/docs/providers/aws/r/elasticsearch_domain_policy.html) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformVariable, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { ElasticsearchDomain } from "./.gen/providers/aws/elasticsearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const domain = new TerraformVariable(this, "domain", { + default: "tf-test", + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_2", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + new ElasticsearchDomain(this, "example", { + accessPolicies: + '{\n "Version": "2012-10-17",\n "Statement": [\n {\n "Action": "es:*",\n "Principal": "*",\n "Effect": "Allow",\n "Resource": "arn:aws:es:${' + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:domain/${" + + domain.value + + '}/*",\n "Condition": {\n "IpAddress": {"aws:SourceIp": ["66.193.100.22/32"]}\n }\n }\n ]\n}\n\n', + domainName: domain.stringValue, + }); + } +} + +``` + +### Log Publishing to CloudWatch Logs + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { CloudwatchLogResourcePolicy } from "./.gen/providers/aws/cloudwatch-log-resource-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { ElasticsearchDomain } from "./.gen/providers/aws/elasticsearch-domain"; +interface MyConfig { + domainName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: "example", + }); + const awsElasticsearchDomainExample = new ElasticsearchDomain( + this, + "example_1", + { + logPublishingOptions: [ + { + cloudwatchLogGroupArn: example.arn, + logType: "INDEX_SLOW_LOGS", + }, + ], + domainName: config.domainName, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsElasticsearchDomainExample.overrideLogicalId("example"); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_2", + { + statement: [ + { + actions: [ + "logs:PutLogEvents", + "logs:PutLogEventsBatch", + "logs:CreateLogStream", + ], + effect: "Allow", + principals: [ + { + identifiers: ["es.amazonaws.com"], + type: "Service", + }, + ], + resources: ["arn:aws:logs:*"], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsCloudwatchLogResourcePolicyExample = + new CloudwatchLogResourcePolicy(this, "example_3", { + policyDocument: Token.asString(dataAwsIamPolicyDocumentExample.json), + policyName: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogResourcePolicyExample.overrideLogicalId("example"); + } +} + +``` + +### VPC based ES + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + TerraformVariable, + Token, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { DataAwsSubnets } from "./.gen/providers/aws/data-aws-subnets"; +import { DataAwsVpc } from "./.gen/providers/aws/data-aws-vpc"; +import { ElasticsearchDomain } from "./.gen/providers/aws/elasticsearch-domain"; +import { IamServiceLinkedRole } from "./.gen/providers/aws/iam-service-linked-role"; +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const domain = new TerraformVariable(this, "domain", { + default: "tf-test", + }); + const vpc = new TerraformVariable(this, "vpc", {}); + const es = new IamServiceLinkedRole(this, "es", { + awsServiceName: "opensearchservice.amazonaws.com", + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_4", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const selected = new DataAwsVpc(this, "selected", { + tags: { + Name: vpc.stringValue, + }, + }); + const awsSecurityGroupEs = new SecurityGroup(this, "es_6", { + description: "Managed by Terraform", + ingress: [ + { + cidrBlocks: [Token.asString(selected.cidrBlock)], + fromPort: 443, + protocol: "tcp", + toPort: 443, + }, + ], + name: "${" + vpc.value + "}-elasticsearch-${" + domain.value + "}", + vpcId: Token.asString(selected.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityGroupEs.overrideLogicalId("es"); + const dataAwsSubnetsSelected = new DataAwsSubnets(this, "selected_7", { + filter: [ + { + name: "vpc-id", + values: [Token.asString(selected.id)], + }, + ], + tags: { + Tier: "private", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsSubnetsSelected.overrideLogicalId("selected"); + const awsElasticsearchDomainEs = new ElasticsearchDomain(this, "es_8", { + accessPolicies: + '{\n\t"Version": "2012-10-17",\n\t"Statement": [\n\t\t{\n\t\t\t"Action": "es:*",\n\t\t\t"Principal": "*",\n\t\t\t"Effect": "Allow",\n\t\t\t"Resource": "arn:aws:es:${' + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:domain/${" + + domain.value + + '}/*"\n\t\t}\n\t]\n}\n\n', + advancedOptions: { + "rest.action.multi.allow_explicit_index": "true", + }, + clusterConfig: { + instanceType: "m4.large.elasticsearch", + zoneAwarenessEnabled: true, + }, + dependsOn: [es], + domainName: domain.stringValue, + elasticsearchVersion: "6.3", + tags: { + Domain: "TestDomain", + }, + vpcOptions: { + securityGroupIds: [Token.asString(awsSecurityGroupEs.id)], + subnetIds: [ + Token.asString(propertyAccess(dataAwsSubnetsSelected.ids, ["0"])), + Token.asString(propertyAccess(dataAwsSubnetsSelected.ids, ["1"])), + ], + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsElasticsearchDomainEs.overrideLogicalId("es"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `domainName` - (Required) Name of the domain. + +The following arguments are optional: + +* `accessPolicies` - (Optional) IAM policy document specifying the access policies for the domain. +* `advancedOptions` - (Optional) Key-value string pairs to specify advanced configuration options. Note that the values for these configuration options must be strings (wrapped in quotes) or they may be wrong and cause a perpetual diff, causing Terraform to want to recreate your Elasticsearch domain on every apply. +* `advancedSecurityOptions` - (Optional) Configuration block for [fine-grained access control](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/fgac.html). Detailed below. +* `autoTuneOptions` - (Optional) Configuration block for the Auto-Tune options of the domain. Detailed below. +* `clusterConfig` - (Optional) Configuration block for the cluster of the domain. Detailed below. +* `cognitoOptions` - (Optional) Configuration block for authenticating Kibana with Cognito. Detailed below. +* `domainEndpointOptions` - (Optional) Configuration block for domain endpoint HTTP(S) related options. Detailed below. +* `ebsOptions` - (Optional) Configuration block for EBS related options, may be required based on chosen [instance size](https://aws.amazon.com/elasticsearch-service/pricing/). Detailed below. +* `elasticsearchVersion` - (Optional) Version of Elasticsearch to deploy. Defaults to `15`. +* `encryptAtRest` - (Optional) Configuration block for encrypt at rest options. Only available for [certain instance types](http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/aes-supported-instance-types.html). Detailed below. +* `logPublishingOptions` - (Optional) Configuration block for publishing slow and application logs to CloudWatch Logs. This block can be declared multiple times, for each log_type, within the same resource. Detailed below. +* `nodeToNodeEncryption` - (Optional) Configuration block for node-to-node encryption options. Detailed below. +* `snapshotOptions` - (Optional) Configuration block for snapshot related options. Detailed below. DEPRECATED. For domains running Elasticsearch 5.3 and later, Amazon ES takes hourly automated snapshots, making this setting irrelevant. For domains running earlier versions of Elasticsearch, Amazon ES takes daily automated snapshots. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcOptions` - (Optional) Configuration block for VPC related options. Adding or removing this configuration forces a new resource ([documentation](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-vpc.html#es-vpc-limitations)). Detailed below. + +### advanced_security_options + +* `enabled` - (Required, Forces new resource) Whether advanced security is enabled. +* `internalUserDatabaseEnabled` - (Optional, Default: false) Whether the internal user database is enabled. If not set, defaults to `false` by the AWS API. +* `masterUserOptions` - (Optional) Configuration block for the main user. Detailed below. + +#### master_user_options + +* `masterUserArn` - (Optional) ARN for the main user. Only specify if `internalUserDatabaseEnabled` is not set or set to `false`. +* `masterUserName` - (Optional) Main user's username, which is stored in the Amazon Elasticsearch Service domain's internal database. Only specify if `internalUserDatabaseEnabled` is set to `true`. +* `masterUserPassword` - (Optional) Main user's password, which is stored in the Amazon Elasticsearch Service domain's internal database. Only specify if `internalUserDatabaseEnabled` is set to `true`. + +### auto_tune_options + +* `desiredState` - (Required) The Auto-Tune desired state for the domain. Valid values: `enabled` or `disabled`. +* `maintenanceSchedule` - (Required if `rollbackOnDisable` is set to `defaultRollback`) Configuration block for Auto-Tune maintenance windows. Can be specified multiple times for each maintenance window. Detailed below. +* `rollbackOnDisable` - (Optional) Whether to roll back to default Auto-Tune settings when disabling Auto-Tune. Valid values: `defaultRollback` or `noRollback`. + +#### maintenance_schedule + +* `startAt` - (Required) Date and time at which to start the Auto-Tune maintenance schedule in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `duration` - (Required) Configuration block for the duration of the Auto-Tune maintenance window. Detailed below. +* `cronExpressionForRecurrence` - (Required) A cron expression specifying the recurrence pattern for an Auto-Tune maintenance schedule. + +##### duration + +* `value` - (Required) An integer specifying the value of the duration of an Auto-Tune maintenance window. +* `unit` - (Required) The unit of time specifying the duration of an Auto-Tune maintenance window. Valid values: `hours`. + +### cluster_config + +* `coldStorageOptions` - (Optional) Configuration block containing cold storage configuration. Detailed below. +* `dedicatedMasterCount` - (Optional) Number of dedicated main nodes in the cluster. +* `dedicatedMasterEnabled` - (Optional) Whether dedicated main nodes are enabled for the cluster. +* `dedicatedMasterType` - (Optional) Instance type of the dedicated main nodes in the cluster. +* `instanceCount` - (Optional) Number of instances in the cluster. +* `instanceType` - (Optional) Instance type of data nodes in the cluster. +* `warmCount` - (Optional) Number of warm nodes in the cluster. Valid values are between `2` and `150`. `warmCount` can be only and must be set when `warmEnabled` is set to `true`. +* `warmEnabled` - (Optional) Whether to enable warm storage. +* `warmType` - (Optional) Instance type for the Elasticsearch cluster's warm nodes. Valid values are `ultrawarm1MediumElasticsearch`, `ultrawarm1LargeElasticsearch` and `ultrawarm1XlargeElasticsearch`. `warmType` can be only and must be set when `warmEnabled` is set to `true`. +* `zoneAwarenessConfig` - (Optional) Configuration block containing zone awareness settings. Detailed below. +* `zoneAwarenessEnabled` - (Optional) Whether zone awareness is enabled, set to `true` for multi-az deployment. To enable awareness with three Availability Zones, the `availabilityZoneCount` within the `zoneAwarenessConfig` must be set to `3`. + +#### cold_storage_options + +* `enabled` - (Optional) Boolean to enable cold storage for an Elasticsearch domain. Defaults to `false`. Master and ultrawarm nodes must be enabled for cold storage. + +#### zone_awareness_config + +* `availabilityZoneCount` - (Optional) Number of Availability Zones for the domain to use with `zoneAwarenessEnabled`. Defaults to `2`. Valid values: `2` or `3`. + +### cognito_options + +AWS documentation: [Amazon Cognito Authentication for Kibana](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-cognito-auth.html) + +* `enabled` - (Optional, Default: false) Whether Amazon Cognito authentication with Kibana is enabled or not. +* `identityPoolId` - (Required) ID of the Cognito Identity Pool to use. +* `roleArn` - (Required) ARN of the IAM role that has the AmazonESCognitoAccess policy attached. +* `userPoolId` - (Required) ID of the Cognito User Pool to use. + +### domain_endpoint_options + +* `customEndpointCertificateArn` - (Optional) ACM certificate ARN for your custom endpoint. +* `customEndpointEnabled` - (Optional) Whether to enable custom endpoint for the Elasticsearch domain. +* `customEndpoint` - (Optional) Fully qualified domain for your custom endpoint. +* `enforceHttps` - (Optional) Whether or not to require HTTPS. Defaults to `true`. +* `tlsSecurityPolicy` - (Optional) Name of the TLS security policy that needs to be applied to the HTTPS endpoint. Valid values: `policyMinTls10201907` and `policyMinTls12201907`. Terraform will only perform drift detection if a configuration value is provided. + +### ebs_options + +* `ebsEnabled` - (Required) Whether EBS volumes are attached to data nodes in the domain. +* `iops` - (Optional) Baseline input/output (I/O) performance of EBS volumes attached to data nodes. Applicable only for the GP3 and Provisioned IOPS EBS volume types. +* `throughput` - (Required if `volumeType` is set to `gp3`) Specifies the throughput (in MiB/s) of the EBS volumes attached to data nodes. Applicable only for the gp3 volume type. +* `volumeSize` - (Required if `ebsEnabled` is set to `true`.) Size of EBS volumes attached to data nodes (in GiB). +* `volumeType` - (Optional) Type of EBS volumes attached to data nodes. + +### encrypt_at_rest + +~> **Note:** You can enable `encryptAtRest` _in place_ for an existing, unencrypted domain only if your Elasticsearch version is 6.7 or greater. For lower versions, if you enable `encryptAtRest`, Terraform with recreate the domain, potentially causing data loss. For any version, if you disable `encryptAtRest` for an existing, encrypted domain, Terraform will recreate the domain, potentially causing data loss. If you change the `kmsKeyId`, Terraform will also recreate the domain, potentially causing data loss. + +* `enabled` - (Required) Whether to enable encryption at rest. If the `encryptAtRest` block is not provided then this defaults to `false`. Enabling encryption on new domains requires `elasticsearchVersion` 5.1 or greater. +* `kmsKeyId` - (Optional) KMS key ARN to encrypt the Elasticsearch domain with. If not specified then it defaults to using the `aws/es` service KMS key. Note that KMS will accept a KMS key ID but will return the key ARN. To prevent Terraform detecting unwanted changes, use the key ARN instead. + +### log_publishing_options + +* `cloudwatchLogGroupArn` - (Required) ARN of the Cloudwatch log group to which log needs to be published. +* `enabled` - (Optional, Default: true) Whether given log publishing option is enabled or not. +* `logType` - (Required) Type of Elasticsearch log. Valid values: `indexSlowLogs`, `searchSlowLogs`, `esApplicationLogs`, `auditLogs`. + +### node_to_node_encryption + +~> **Note:** You can enable `nodeToNodeEncryption` _in place_ for an existing, unencrypted domain only if your Elasticsearch version is 6.7 or greater. For lower versions, if you enable `nodeToNodeEncryption`, Terraform will recreate the domain, potentially causing data loss. For any version, if you disable `nodeToNodeEncryption` for an existing, node-to-node encrypted domain, Terraform will recreate the domain, potentially causing data loss. + +* `enabled` - (Required) Whether to enable node-to-node encryption. If the `nodeToNodeEncryption` block is not provided then this defaults to `false`. Enabling node-to-node encryption of a new domain requires an `elasticsearchVersion` of `60` or greater. + +### snapshot_options + +* `automatedSnapshotStartHour` - (Required) Hour during which the service takes an automated daily snapshot of the indices in the domain. + +### vpc_options + +AWS documentation: [VPC Support for Amazon Elasticsearch Service Domains](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-vpc.html) + +~> **Note:** You must have created the service linked role for the Elasticsearch service to use `vpcOptions`. If you need to create the service linked role at the same time as the Elasticsearch domain then you must use `dependsOn` to make sure that the role is created before the Elasticsearch domain. See the [VPC based ES domain example](#vpc-based-es) above. + +-> Security Groups and Subnets referenced in these attributes must all be within the same VPC. This determines what VPC the endpoints are created in. + +* `securityGroupIds` - (Optional) List of VPC Security Group IDs to be applied to the Elasticsearch domain endpoints. If omitted, the default Security Group for the VPC will be used. +* `subnetIds` - (Required) List of VPC Subnet IDs for the Elasticsearch domain endpoints to be created in. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the domain. +* `domainId` - Unique identifier for the domain. +* `domainName` - Name of the Elasticsearch domain. +* `endpoint` - Domain-specific endpoint used to submit index, search, and data upload requests. +* `kibanaEndpoint` - Domain-specific endpoint for kibana without https scheme. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcOptions0AvailabilityZones` - If the domain was created inside a VPC, the names of the availability zones the configured `subnetIds` were created inside. +* `vpcOptions0VpcId` - If the domain was created inside a VPC, the ID of the VPC. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) +* `update` - (Default `60M`) +* `delete` - (Default `90M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elasticsearch domains using the `domainName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Elasticsearch domains using the `domainName`. For example: + +```console +% terraform import aws_elasticsearch_domain.example domain_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elasticsearch_domain_policy.html.markdown b/website/docs/cdktf/typescript/r/elasticsearch_domain_policy.html.markdown new file mode 100644 index 00000000000..f1b06024e49 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elasticsearch_domain_policy.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "Elasticsearch" +layout: "aws" +page_title: "AWS: aws_elasticsearch_domain_policy" +description: |- + Provides an Elasticsearch Domain Policy. +--- + + + +# Resource: aws_elasticsearch_domain_policy + +Allows setting policy to an Elasticsearch domain while referencing domain attributes (e.g., ARN) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticsearchDomain } from "./.gen/providers/aws/elasticsearch-domain"; +import { ElasticsearchDomainPolicy } from "./.gen/providers/aws/elasticsearch-domain-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ElasticsearchDomain(this, "example", { + domainName: "tf-test", + elasticsearchVersion: "2.3", + }); + new ElasticsearchDomainPolicy(this, "main", { + accessPolicies: + '{\n "Version": "2012-10-17",\n "Statement": [\n {\n "Action": "es:*",\n "Principal": "*",\n "Effect": "Allow",\n "Condition": {\n "IpAddress": {"aws:SourceIp": "127.0.0.1/32"}\n },\n "Resource": "${' + + example.arn + + '}/*"\n }\n ]\n}\n\n', + domainName: example.domainName, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domainName` - (Required) Name of the domain. +* `accessPolicies` - (Optional) IAM policy document specifying the access policies for the domain + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elasticsearch_domain_saml_options.html.markdown b/website/docs/cdktf/typescript/r/elasticsearch_domain_saml_options.html.markdown new file mode 100644 index 00000000000..7550f36782a --- /dev/null +++ b/website/docs/cdktf/typescript/r/elasticsearch_domain_saml_options.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "Elasticsearch" +layout: "aws" +page_title: "AWS: aws_elasticsearch_domain_saml_options" +description: |- + Terraform resource for managing SAML authentication options for an AWS Elasticsearch Domain. +--- + + + +# Resource: aws_elasticsearch_domain_saml_options + +Manages SAML authentication options for an AWS Elasticsearch Domain. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticsearchDomain } from "./.gen/providers/aws/elasticsearch-domain"; +import { ElasticsearchDomainSamlOptions } from "./.gen/providers/aws/elasticsearch-domain-saml-options"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ElasticsearchDomain(this, "example", { + clusterConfig: { + instanceType: "r4.large.elasticsearch", + }, + domainName: "example", + elasticsearchVersion: "1.5", + snapshotOptions: { + automatedSnapshotStartHour: 23, + }, + tags: { + Domain: "TestDomain", + }, + }); + const awsElasticsearchDomainSamlOptionsExample = + new ElasticsearchDomainSamlOptions(this, "example_1", { + domainName: example.domainName, + samlOptions: { + enabled: true, + idp: { + entityId: "https://example.com", + metadataContent: Token.asString(Fn.file("./saml-metadata.xml")), + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsElasticsearchDomainSamlOptionsExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `domainName` - (Required) Name of the domain. + +The following arguments are optional: + +* `samlOptions` - (Optional) The SAML authentication options for an AWS Elasticsearch Domain. + +### saml_options + +* `enabled` - (Required) Whether SAML authentication is enabled. +* `idp` - (Optional) Information from your identity provider. +* `masterBackendRole` - (Optional) This backend role from the SAML IdP receives full permissions to the cluster, equivalent to a new master user. +* `masterUserName` - (Optional) This username from the SAML IdP receives full permissions to the cluster, equivalent to a new master user. +* `rolesKey` - (Optional) Element of the SAML assertion to use for backend roles. Default is roles. +* `sessionTimeoutMinutes` - (Optional) Duration of a session in minutes after a user logs in. Default is 60. Maximum value is 1,440. +* `subjectKey` - (Optional) Custom SAML attribute to use for user names. Default is an empty string - `""`. This will cause Elasticsearch to use the `nameId` element of the `subject`, which is the default location for name identifiers in the SAML specification. + +#### idp + +* `entityId` - (Required) The unique Entity ID of the application in SAML Identity Provider. +* `metadataContent` - (Required) The Metadata of the SAML application in xml format. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the domain the SAML options are associated with. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elasticsearch domains using the `domainName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Elasticsearch domains using the `domainName`. For example: + +```console +% terraform import aws_elasticsearch_domain_saml_options.example domain_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elastictranscoder_pipeline.html.markdown b/website/docs/cdktf/typescript/r/elastictranscoder_pipeline.html.markdown new file mode 100644 index 00000000000..52909e05c46 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elastictranscoder_pipeline.html.markdown @@ -0,0 +1,139 @@ +--- +subcategory: "Elastic Transcoder" +layout: "aws" +page_title: "AWS: aws_elastictranscoder_pipeline" +description: |- + Provides an Elastic Transcoder pipeline resource. +--- + + + +# Resource: aws_elastictranscoder_pipeline + +Provides an Elastic Transcoder pipeline resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElastictranscoderPipeline } from "./.gen/providers/aws/elastictranscoder-pipeline"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElastictranscoderPipeline(this, "bar", { + contentConfig: { + bucket: contentBucket.id, + storageClass: "Standard", + }, + inputBucket: inputBucket.id, + name: "aws_elastictranscoder_pipeline_tf_test_", + role: testRole.arn, + thumbnailConfig: { + bucket: thumbBucket.id, + storageClass: "Standard", + }, + }); + } +} + +``` + +## Argument Reference + +See ["Create Pipeline"](http://docs.aws.amazon.com/elastictranscoder/latest/developerguide/create-pipeline.html) in the AWS docs for reference. + +This argument supports the following arguments: + +* `awsKmsKeyArn` - (Optional) The AWS Key Management Service (AWS KMS) key that you want to use with this pipeline. +* `contentConfig` - (Optional) The ContentConfig object specifies information about the Amazon S3 bucket in which you want Elastic Transcoder to save transcoded files and playlists. (documented below) +* `contentConfigPermissions` - (Optional) The permissions for the `contentConfig` object. (documented below) +* `inputBucket` - (Required) The Amazon S3 bucket in which you saved the media files that you want to transcode and the graphics that you want to use as watermarks. +* `name` - (Optional, Forces new resource) The name of the pipeline. Maximum 40 characters +* `notifications` - (Optional) The Amazon Simple Notification Service (Amazon SNS) topic that you want to notify to report job status. (documented below) +* `outputBucket` - (Optional) The Amazon S3 bucket in which you want Elastic Transcoder to save the transcoded files. +* `role` - (Required) The IAM Amazon Resource Name (ARN) for the role that you want Elastic Transcoder to use to transcode jobs for this pipeline. +* `thumbnailConfig` - (Optional) The ThumbnailConfig object specifies information about the Amazon S3 bucket in which you want Elastic Transcoder to save thumbnail files. (documented below) +* `thumbnailConfigPermissions` - (Optional) The permissions for the `thumbnailConfig` object. (documented below) + +The `contentConfig` object specifies information about the Amazon S3 bucket in +which you want Elastic Transcoder to save transcoded files and playlists: which +bucket to use, and the storage class that you want to assign to the files. If +you specify values for `contentConfig`, you must also specify values for +`thumbnailConfig`. If you specify values for `contentConfig` and +`thumbnailConfig`, omit the `outputBucket` object. + +The `contentConfig` object supports the following: + +* `bucket` - The Amazon S3 bucket in which you want Elastic Transcoder to save transcoded files and playlists. +* `storageClass` - The Amazon S3 storage class, `standard` or `reducedRedundancy`, that you want Elastic Transcoder to assign to the files and playlists that it stores in your Amazon S3 bucket. + +The `contentConfigPermissions` object supports the following: + +* `access` - The permission that you want to give to the AWS user that you specified in `contentConfigPermissionsGrantee`. Valid values are `read`, `readAcp`, `writeAcp` or `fullControl`. +* `grantee` - The AWS user or group that you want to have access to transcoded files and playlists. +* `granteeType` - Specify the type of value that appears in the `contentConfigPermissionsGrantee` object. Valid values are `canonical`, `email` or `group`. + +The `notifications` object supports the following: + +* `completed` - The topic ARN for the Amazon SNS topic that you want to notify when Elastic Transcoder has finished processing a job in this pipeline. +* `error` - The topic ARN for the Amazon SNS topic that you want to notify when Elastic Transcoder encounters an error condition while processing a job in this pipeline. +* `progressing` - The topic ARN for the Amazon Simple Notification Service (Amazon SNS) topic that you want to notify when Elastic Transcoder has started to process a job in this pipeline. +* `warning` - The topic ARN for the Amazon SNS topic that you want to notify when Elastic Transcoder encounters a warning condition while processing a job in this pipeline. + +The `thumbnailConfig` object specifies information about the Amazon S3 bucket in +which you want Elastic Transcoder to save thumbnail files: which bucket to use, +which users you want to have access to the files, the type of access you want +users to have, and the storage class that you want to assign to the files. If +you specify values for `contentConfig`, you must also specify values for +`thumbnailConfig` even if you don't want to create thumbnails. (You control +whether to create thumbnails when you create a job. For more information, see +ThumbnailPattern in the topic Create Job.) If you specify values for +`contentConfig` and `thumbnailConfig`, omit the OutputBucket object. + +The `thumbnailConfig` object supports the following: + +* `bucket` - The Amazon S3 bucket in which you want Elastic Transcoder to save thumbnail files. +* `storageClass` - The Amazon S3 storage class, Standard or ReducedRedundancy, that you want Elastic Transcoder to assign to the thumbnails that it stores in your Amazon S3 bucket. + +The `thumbnailConfigPermissions` object supports the following: + +* `access` - The permission that you want to give to the AWS user that you specified in `thumbnailConfigPermissionsGrantee`. Valid values are `read`, `readAcp`, `writeAcp` or `fullControl`. +* `grantee` - The AWS user or group that you want to have access to thumbnail files. +* `granteeType` - Specify the type of value that appears in the `thumbnailConfigPermissionsGrantee` object. Valid values are `canonical`, `email` or `group`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Elastictranscoder pipeline. +* `arn` - The ARN of the Elastictranscoder pipeline. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elastic Transcoder pipelines using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Elastic Transcoder pipelines using the `id`. For example: + +```console +% terraform import aws_elastictranscoder_pipeline.basic_pipeline 1407981661351-cttk8b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elastictranscoder_preset.html.markdown b/website/docs/cdktf/typescript/r/elastictranscoder_preset.html.markdown new file mode 100644 index 00000000000..950a00657b6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elastictranscoder_preset.html.markdown @@ -0,0 +1,202 @@ +--- +subcategory: "Elastic Transcoder" +layout: "aws" +page_title: "AWS: aws_elastictranscoder_preset" +description: |- + Provides an Elastic Transcoder preset resource. +--- + + + +# Resource: aws_elastictranscoder_preset + +Provides an Elastic Transcoder preset resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElastictranscoderPreset } from "./.gen/providers/aws/elastictranscoder-preset"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElastictranscoderPreset(this, "bar", { + audio: { + audioPackingMode: "SingleTrack", + bitRate: Token.asString(96), + channels: Token.asString(2), + codec: "AAC", + sampleRate: Token.asString(44100), + }, + audioCodecOptions: { + profile: "AAC-LC", + }, + container: "mp4", + description: "Sample Preset", + name: "sample_preset", + thumbnails: { + format: "png", + interval: Token.asString(120), + maxHeight: "auto", + maxWidth: "auto", + paddingPolicy: "Pad", + sizingPolicy: "Fit", + }, + video: { + bitRate: "1600", + codec: "H.264", + displayAspectRatio: "16:9", + fixedGop: "false", + frameRate: "auto", + keyframesMaxDist: Token.asString(240), + maxFrameRate: "60", + maxHeight: "auto", + maxWidth: "auto", + paddingPolicy: "Pad", + sizingPolicy: "Fit", + }, + videoCodecOptions: { + ColorSpaceConversionMode: "None", + InterlacedMode: "Progressive", + Level: "2.2", + MaxReferenceFrames: Token.asString(3), + Profile: "main", + }, + videoWatermarks: [ + { + horizontalAlign: "Right", + horizontalOffset: "10px", + id: "Terraform Test", + maxHeight: "20%", + maxWidth: "20%", + opacity: "55.5", + sizingPolicy: "ShrinkToFit", + target: "Content", + verticalAlign: "Bottom", + verticalOffset: "10px", + }, + ], + }); + } +} + +``` + +## Argument Reference + +See ["Create Preset"](http://docs.aws.amazon.com/elastictranscoder/latest/developerguide/create-preset.html) in the AWS docs for reference. + +This argument supports the following arguments: + +* `audio` - (Optional, Forces new resource) Audio parameters object (documented below). +* `audioCodecOptions` - (Optional, Forces new resource) Codec options for the audio parameters (documented below) +* `container` - (Required, Forces new resource) The container type for the output file. Valid values are `flac`, `flv`, `fmp4`, `gif`, `mp3`, `mp4`, `mpg`, `mxf`, `oga`, `ogg`, `ts`, and `webm`. +* `description` - (Optional, Forces new resource) A description of the preset (maximum 255 characters) +* `name` - (Optional, Forces new resource) The name of the preset. (maximum 40 characters) +* `thumbnails` - (Optional, Forces new resource) Thumbnail parameters object (documented below) +* `video` - (Optional, Forces new resource) Video parameters object (documented below) +* `videoWatermarks` - (Optional, Forces new resource) Watermark parameters for the video parameters (documented below) +* `videoCodecOptions` (Optional, Forces new resource) Codec options for the video parameters + +The `audio` object supports the following: + +* `audioPackingMode` - The method of organizing audio channels and tracks. Use Audio:Channels to specify the number of channels in your output, and Audio:AudioPackingMode to specify the number of tracks and their relation to the channels. If you do not specify an Audio:AudioPackingMode, Elastic Transcoder uses SingleTrack. +* `bitRate` - The bit rate of the audio stream in the output file, in kilobits/second. Enter an integer between 64 and 320, inclusive. +* `channels` - The number of audio channels in the output file +* `codec` - The audio codec for the output file. Valid values are `aac`, `flac`, `mp2`, `mp3`, `pcm`, and `vorbis`. +* `sampleRate` - The sample rate of the audio stream in the output file, in hertz. Valid values are: `auto`, `22050`, `32000`, `44100`, `48000`, `96000` + +The `audioCodecOptions` object supports the following: + +* `bitDepth` - The bit depth of a sample is how many bits of information are included in the audio samples. Valid values are `16` and `24`. (FLAC/PCM Only) +* `bitOrder` - The order the bits of a PCM sample are stored in. The supported value is LittleEndian. (PCM Only) +* `profile` - If you specified AAC for Audio:Codec, choose the AAC profile for the output file. +* `signed` - Whether audio samples are represented with negative and positive numbers (signed) or only positive numbers (unsigned). The supported value is Signed. (PCM Only) + +The `thumbnails` object supports the following: + +* `aspectRatio` - The aspect ratio of thumbnails. The following values are valid: auto, 1:1, 4:3, 3:2, 16:9 +* `format` - The format of thumbnails, if any. Valid formats are jpg and png. +* `interval` - The approximate number of seconds between thumbnails. The value must be an integer. The actual interval can vary by several seconds from one thumbnail to the next. +* `maxHeight` - The maximum height of thumbnails, in pixels. If you specify auto, Elastic Transcoder uses 1080 (Full HD) as the default value. If you specify a numeric value, enter an even integer between 32 and 3072, inclusive. +* `maxWidth` - The maximum width of thumbnails, in pixels. If you specify auto, Elastic Transcoder uses 1920 (Full HD) as the default value. If you specify a numeric value, enter an even integer between 32 and 4096, inclusive. +* `paddingPolicy` - When you set PaddingPolicy to Pad, Elastic Transcoder might add black bars to the top and bottom and/or left and right sides of thumbnails to make the total size of the thumbnails match the values that you specified for thumbnail MaxWidth and MaxHeight settings. +* `resolution` - The width and height of thumbnail files in pixels, in the format WidthxHeight, where both values are even integers. The values cannot exceed the width and height that you specified in the Video:Resolution object. (To better control resolution and aspect ratio of thumbnails, we recommend that you use the thumbnail values `maxWidth`, `maxHeight`, `sizingPolicy`, and `paddingPolicy` instead of `resolution` and `aspectRatio`. The two groups of settings are mutually exclusive. Do not use them together) +* `sizingPolicy` - A value that controls scaling of thumbnails. Valid values are: `fit`, `fill`, `stretch`, `keep`, `shrinkToFit`, and `shrinkToFill`. + +The `video` object supports the following: + +* `aspectRatio` - The display aspect ratio of the video in the output file. Valid values are: `auto`, `1:1`, `4:3`, `3:2`, `16:9`. (Note; to better control resolution and aspect ratio of output videos, we recommend that you use the values `maxWidth`, `maxHeight`, `sizingPolicy`, `paddingPolicy`, and `displayAspectRatio` instead of `resolution` and `aspectRatio`.) +* `bitRate` - The bit rate of the video stream in the output file, in kilobits/second. You can configure variable bit rate or constant bit rate encoding. +* `codec` - The video codec for the output file. Valid values are `gif`, `h264`, `mpeg2`, `vp8`, and `vp9`. +* `displayAspectRatio` - The value that Elastic Transcoder adds to the metadata in the output file. If you set DisplayAspectRatio to auto, Elastic Transcoder chooses an aspect ratio that ensures square pixels. If you specify another option, Elastic Transcoder sets that value in the output file. +* `fixedGop` - Whether to use a fixed value for Video:FixedGOP. Not applicable for containers of type gif. Valid values are true and false. Also known as, Fixed Number of Frames Between Keyframes. +* `frameRate` - The frames per second for the video stream in the output file. The following values are valid: `auto`, `10`, `15`, `2397`, `24`, `25`, `2997`, `30`, `50`, `60`. +* `keyframesMaxDist` - The maximum number of frames between key frames. Not applicable for containers of type gif. +* `maxFrameRate` - If you specify auto for FrameRate, Elastic Transcoder uses the frame rate of the input video for the frame rate of the output video, up to the maximum frame rate. If you do not specify a MaxFrameRate, Elastic Transcoder will use a default of 30. +* `maxHeight` - The maximum height of the output video in pixels. If you specify auto, Elastic Transcoder uses 1080 (Full HD) as the default value. If you specify a numeric value, enter an even integer between 96 and 3072, inclusive. +* `maxWidth` - The maximum width of the output video in pixels. If you specify auto, Elastic Transcoder uses 1920 (Full HD) as the default value. If you specify a numeric value, enter an even integer between 128 and 4096, inclusive. +* `paddingPolicy` - When you set PaddingPolicy to Pad, Elastic Transcoder might add black bars to the top and bottom and/or left and right sides of the output video to make the total size of the output video match the values that you specified for `maxWidth` and `maxHeight`. +* `resolution` - The width and height of the video in the output file, in pixels. Valid values are `auto` and `widthxheight`. (see note for `aspectRatio`) +* `sizingPolicy` - A value that controls scaling of the output video. Valid values are: `fit`, `fill`, `stretch`, `keep`, `shrinkToFit`, `shrinkToFill`. + +The `videoWatermarks` object supports the following: + +* `horizontalAlign` - The horizontal position of the watermark unless you specify a nonzero value for `horzontalOffset`. +* `horizontalOffset` - The amount by which you want the horizontal position of the watermark to be offset from the position specified by `horizontalAlign`. +* `id` - A unique identifier for the settings for one watermark. The value of Id can be up to 40 characters long. You can specify settings for up to four watermarks. +* `maxHeight` - The maximum height of the watermark. +* `maxWidth` - The maximum width of the watermark. +* `opacity` - A percentage that indicates how much you want a watermark to obscure the video in the location where it appears. +* `sizingPolicy` - A value that controls scaling of the watermark. Valid values are: `fit`, `stretch`, `shrinkToFit` +* `target` - A value that determines how Elastic Transcoder interprets values that you specified for `videoWatermarksHorizontalOffset`, `videoWatermarksVerticalOffset`, `videoWatermarksMaxWidth`, and `videoWatermarksMaxHeight`. Valid values are `content` and `frame`. +* `verticalAlign` - The vertical position of the watermark unless you specify a nonzero value for `verticalAlign`. Valid values are `top`, `bottom`, `center`. +* `verticalOffset` - The amount by which you want the vertical position of the watermark to be offset from the position specified by `verticalAlign` + +The `videoCodecOptions` map supports the following: + +* `profile` - The codec profile that you want to use for the output file. (H.264/VP8 Only) +* `level` - The H.264 level that you want to use for the output file. Elastic Transcoder supports the following levels: `1`, `1B`, `11`, `12`, `13`, `2`, `21`, `22`, `3`, `31`, `32`, `4`, `41` (H.264 only) +* `maxReferenceFrames` - The maximum number of previously decoded frames to use as a reference for decoding future frames. Valid values are integers 0 through 16. (H.264 only) +* `maxBitRate` - The maximum number of kilobits per second in the output video. Specify a value between 16 and 62,500 inclusive, or `auto`. (Optional, H.264/MPEG2/VP8/VP9 only) +* `bufferSize` - The maximum number of kilobits in any x seconds of the output video. This window is commonly 10 seconds, the standard segment duration when you're using ts for the container type of the output video. Specify an integer greater than 0. If you specify MaxBitRate and omit BufferSize, Elastic Transcoder sets BufferSize to 10 times the value of MaxBitRate. (Optional, H.264/MPEG2/VP8/VP9 only) +* `interlacedMode` - The interlace mode for the output video. (Optional, H.264/MPEG2 Only) +* `colorSpaceConversion` - The color space conversion Elastic Transcoder applies to the output video. Valid values are `none`, `bt709ToBt601`, `bt601ToBt709`, and `auto`. (Optional, H.264/MPEG2 Only) +* `chromaSubsampling` - The sampling pattern for the chroma (color) channels of the output video. Valid values are `yuv420P` and `yuv422P`. +* `loopCount` - The number of times you want the output gif to loop (Gif only) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Elastic Transcoder Preset. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elastic Transcoder presets using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Elastic Transcoder presets using the `id`. For example: + +```console +% terraform import aws_elastictranscoder_preset.basic_preset 1407981661351-cttk8b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elb.html.markdown b/website/docs/cdktf/typescript/r/elb.html.markdown new file mode 100644 index 00000000000..4285751721b --- /dev/null +++ b/website/docs/cdktf/typescript/r/elb.html.markdown @@ -0,0 +1,186 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_elb" +description: |- + Provides an Elastic Load Balancer resource. +--- + + + +# Resource: aws_elb + +Provides an Elastic Load Balancer resource, also known as a "Classic +Load Balancer" after the release of +[Application/Network Load Balancers](/docs/providers/aws/r/lb.html). + +~> **NOTE on ELB Instances and ELB Attachments:** Terraform currently +provides both a standalone [ELB Attachment resource](elb_attachment.html) +(describing an instance attached to an ELB), and an ELB resource with +`instances` defined in-line. At this time you cannot use an ELB with in-line +instances in conjunction with a ELB Attachment resources. Doing so will cause a +conflict and will overwrite attachments. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Elb } from "./.gen/providers/aws/elb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Elb(this, "bar", { + accessLogs: { + bucket: "foo", + bucketPrefix: "bar", + interval: 60, + }, + availabilityZones: ["us-west-2a", "us-west-2b", "us-west-2c"], + connectionDraining: true, + connectionDrainingTimeout: 400, + crossZoneLoadBalancing: true, + healthCheck: { + healthyThreshold: 2, + interval: 30, + target: "HTTP:8000/", + timeout: 3, + unhealthyThreshold: 2, + }, + idleTimeout: 400, + instances: [foo.id], + listener: [ + { + instancePort: 8000, + instanceProtocol: "http", + lbPort: 80, + lbProtocol: "http", + }, + { + instancePort: 8000, + instanceProtocol: "http", + lbPort: 443, + lbProtocol: "https", + sslCertificateId: + "arn:aws:iam::123456789012:server-certificate/certName", + }, + ], + name: "foobar-terraform-elb", + tags: { + Name: "foobar-terraform-elb", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the ELB. By default generated by Terraform. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +* `accessLogs` - (Optional) An Access Logs block. Access Logs documented below. +* `availabilityZones` - (Required for an EC2-classic ELB) The AZ's to serve traffic in. +* `securityGroups` - (Optional) A list of security group IDs to assign to the ELB. + Only valid if creating an ELB within a VPC +* `subnets` - (Required for a VPC ELB) A list of subnet IDs to attach to the ELB. When an update to subnets will remove all current subnets, this will force a new resource. +* `instances` - (Optional) A list of instance ids to place in the ELB pool. +* `internal` - (Optional) If true, ELB will be an internal ELB. +* `listener` - (Required) A list of listener blocks. Listeners documented below. +* `healthCheck` - (Optional) A health_check block. Health Check documented below. +* `crossZoneLoadBalancing` - (Optional) Enable cross-zone load balancing. Default: `true` +* `idleTimeout` - (Optional) The time in seconds that the connection is allowed to be idle. Default: `60` +* `connectionDraining` - (Optional) Boolean to enable connection draining. Default: `false` +* `connectionDrainingTimeout` - (Optional) The time in seconds to allow for connections to drain. Default: `300` +* `desyncMitigationMode` - (Optional) Determines how the load balancer handles requests that might pose a security risk to an application due to HTTP desync. Valid values are `monitor`, `defensive` (default), `strictest`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Exactly one of `availabilityZones` or `subnets` must be specified: this +determines if the ELB exists in a VPC or in EC2-classic. + +Access Logs (`accessLogs`) support the following: + +* `bucket` - (Required) The S3 bucket name to store the logs in. +* `bucketPrefix` - (Optional) The S3 bucket prefix. Logs are stored in the root if not configured. +* `interval` - (Optional) The publishing interval in minutes. Valid values: `5` and `60`. Default: `60` +* `enabled` - (Optional) Boolean to enable / disable `accessLogs`. Default is `true` + +Listeners (`listener`) support the following: + +* `instancePort` - (Required) The port on the instance to route to +* `instanceProtocol` - (Required) The protocol to use to the instance. Valid + values are `http`, `https`, `tcp`, or `ssl` +* `lbPort` - (Required) The port to listen on for the load balancer +* `lbProtocol` - (Required) The protocol to listen on. Valid values are `http`, + `https`, `tcp`, or `ssl` +* `sslCertificateId` - (Optional) The ARN of an SSL certificate you have +uploaded to AWS IAM. **Note ECDSA-specific restrictions below. Only valid when `lbProtocol` is either HTTPS or SSL** + +Health Check (`healthCheck`) supports the following: + +* `healthyThreshold` - (Required) The number of checks before the instance is declared healthy. +* `unhealthyThreshold` - (Required) The number of checks before the instance is declared unhealthy. +* `target` - (Required) The target of the check. Valid pattern is "${PROTOCOL}:${PORT}${PATH}", where PROTOCOL + values are: + * `http`, `https` - PORT and PATH are required + * `tcp`, `ssl` - PORT is required, PATH is not supported +* `interval` - (Required) The interval between checks. +* `timeout` - (Required) The length of time before the check times out. + +## Note on ECDSA Key Algorithm + +If the ARN of the `sslCertificateId` that is pointed to references a +certificate that was signed by an ECDSA key, note that ELB only supports the +P256 and P384 curves. Using a certificate signed by a key using a different +curve could produce the error `errSslVersionOrCipherMismatch` in your +browser. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the ELB +* `arn` - The ARN of the ELB +* `name` - The name of the ELB +* `dnsName` - The DNS name of the ELB +* `instances` - The list of instances in the ELB +* `sourceSecurityGroup` - The name of the security group that you can use as + part of your inbound rules for your load balancer's back-end application + instances. Use this for Classic or Default VPC only. +* `sourceSecurityGroupId` - The ID of the security group that you can use as + part of your inbound rules for your load balancer's back-end application + instances. Only available on ELBs launched in a VPC. +* `zoneId` - The canonical hosted zone ID of the ELB (to be used in a Route 53 Alias record) +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import ELBs using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import ELBs using the `name`. For example: + +```console +% terraform import aws_elb.bar elb-production-12345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/elb_attachment.html.markdown b/website/docs/cdktf/typescript/r/elb_attachment.html.markdown new file mode 100644 index 00000000000..38d18ba5468 --- /dev/null +++ b/website/docs/cdktf/typescript/r/elb_attachment.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_elb_attachment" +description: |- + Provides an Elastic Load Balancer Attachment resource. +--- + + + +# Resource: aws_elb_attachment + +Attaches an EC2 instance to an Elastic Load Balancer (ELB). For attaching resources with Application Load Balancer (ALB) or Network Load Balancer (NLB), see the [`awsLbTargetGroupAttachment` resource](/docs/providers/aws/r/lb_target_group_attachment.html). + +~> **NOTE on ELB Instances and ELB Attachments:** Terraform currently provides +both a standalone ELB Attachment resource (describing an instance attached to +an ELB), and an [Elastic Load Balancer resource](elb.html) with +`instances` defined in-line. At this time you cannot use an ELB with in-line +instances in conjunction with an ELB Attachment resource. Doing so will cause a +conflict and will overwrite attachments. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElbAttachment } from "./.gen/providers/aws/elb-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ElbAttachment(this, "baz", { + elb: bar.id, + instance: foo.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `elb` - (Required) The name of the ELB. +* `instance` - (Required) Instance ID to place in the ELB pool. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/emr_block_public_access_configuration.html.markdown b/website/docs/cdktf/typescript/r/emr_block_public_access_configuration.html.markdown new file mode 100644 index 00000000000..72a7fdd6411 --- /dev/null +++ b/website/docs/cdktf/typescript/r/emr_block_public_access_configuration.html.markdown @@ -0,0 +1,172 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_block_public_access_configuration" +description: |- + Terraform resource for managing an AWS EMR Block Public Access Configuration. +--- + + + +# Resource: aws_emr_block_public_access_configuration + +Terraform resource for managing an AWS EMR block public access configuration. This region level security configuration restricts the launch of EMR clusters that have associated security groups permitting public access on unspecified ports. See the [EMR Block Public Access Configuration](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-block-public-access.html) documentation for further information. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrBlockPublicAccessConfiguration } from "./.gen/providers/aws/emr-block-public-access-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrBlockPublicAccessConfiguration(this, "example", { + blockPublicSecurityGroupRules: true, + }); + } +} + +``` + +### Default Configuration + +By default, each AWS region is equipped with a block public access configuration that prevents EMR clusters from being launched if they have security group rules permitting public access on any port except for port 22. The default configuration can be managed using this Terraform resource. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrBlockPublicAccessConfiguration } from "./.gen/providers/aws/emr-block-public-access-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrBlockPublicAccessConfiguration(this, "example", { + blockPublicSecurityGroupRules: true, + permittedPublicSecurityGroupRuleRange: [ + { + maxRange: 22, + minRange: 22, + }, + ], + }); + } +} + +``` + +~> **NOTE:** If an `awsEmrBlockPublicAccessConfiguration` Terraform resource is destroyed, the configuration will reset to this default configuration. + +### Multiple Permitted Public Security Group Rule Ranges + +The resource permits specification of multiple `permittedPublicSecurityGroupRuleRange` blocks. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrBlockPublicAccessConfiguration } from "./.gen/providers/aws/emr-block-public-access-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrBlockPublicAccessConfiguration(this, "example", { + blockPublicSecurityGroupRules: true, + permittedPublicSecurityGroupRuleRange: [ + { + maxRange: 22, + minRange: 22, + }, + { + maxRange: 101, + minRange: 100, + }, + ], + }); + } +} + +``` + +### Disabling Block Public Access + +To permit EMR clusters to be launched in the configured region regardless of associated security group rules, the Block Public Access feature can be disabled using this Terraform resource. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrBlockPublicAccessConfiguration } from "./.gen/providers/aws/emr-block-public-access-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrBlockPublicAccessConfiguration(this, "example", { + blockPublicSecurityGroupRules: false, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `blockPublicSecurityGroupRules` - (Required) Enable or disable EMR Block Public Access. + +The following arguments are optional: + +* `permittedPublicSecurityGroupRuleRange` - (Optional) Configuration block for defining permitted public security group rule port ranges. Can be defined multiple times per resource. Only valid if `blockPublicSecurityGroupRules` is set to `true`. + +### `permittedPublicSecurityGroupRuleRange` + +This block is used to define a range of TCP ports that should form exceptions to the Block Public Access Configuration. If an attempt is made to launch an EMR cluster in the configured region and account, with `block_public_security_group_rules = true`, the EMR cluster will be permitted to launch even if there are security group rules permitting public access to ports in this range. + +* `minRange` - (Required) The first port in the range of TCP ports. +* `maxRange` - (Required) The final port in the range of TCP ports. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the current EMR Block Public Access Configuration. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the current EMR Block Public Access Configuration. For example: + +```console +% terraform import aws_emr_block_public_access_configuration.example current +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/emr_cluster.html.markdown b/website/docs/cdktf/typescript/r/emr_cluster.html.markdown new file mode 100644 index 00000000000..7051e375c1b --- /dev/null +++ b/website/docs/cdktf/typescript/r/emr_cluster.html.markdown @@ -0,0 +1,913 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_cluster" +description: |- + Provides an Elastic MapReduce Cluster +--- + + + +# Resource: aws_emr_cluster + +Provides an Elastic MapReduce Cluster, a web service that makes it easy to process large amounts of data efficiently. See [Amazon Elastic MapReduce Documentation](https://aws.amazon.com/documentation/elastic-mapreduce/) for more information. + +To configure [Instance Groups](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-group-configuration.html#emr-plan-instance-groups) for [task nodes](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-master-core-task-nodes.html#emr-plan-task), see the [`awsEmrInstanceGroup` resource](/docs/providers/aws/r/emr_instance_group.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrCluster } from "./.gen/providers/aws/emr-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrCluster(this, "cluster", { + additionalInfo: + '{\n "instanceAwsClientConfiguration": {\n "proxyPort": 8099,\n "proxyHost": "myproxy.example.com"\n }\n}\n\n', + applications: ["Spark"], + bootstrapAction: [ + { + args: ["instance.isMaster=true", "echo running on master node"], + name: "runif", + path: "s3://elasticmapreduce/bootstrap-actions/run-if", + }, + ], + configurationsJson: + ' [\n {\n "Classification": "hadoop-env",\n "Configurations": [\n {\n "Classification": "export",\n "Properties": {\n "JAVA_HOME": "/usr/lib/jvm/java-1.8.0"\n }\n }\n ],\n "Properties": {}\n },\n {\n "Classification": "spark-env",\n "Configurations": [\n {\n "Classification": "export",\n "Properties": {\n "JAVA_HOME": "/usr/lib/jvm/java-1.8.0"\n }\n }\n ],\n "Properties": {}\n }\n ]\n\n', + coreInstanceGroup: { + autoscalingPolicy: + '{\n"Constraints": {\n "MinCapacity": 1,\n "MaxCapacity": 2\n},\n"Rules": [\n {\n "Name": "ScaleOutMemoryPercentage",\n "Description": "Scale out if YARNMemoryAvailablePercentage is less than 15",\n "Action": {\n "SimpleScalingPolicyConfiguration": {\n "AdjustmentType": "CHANGE_IN_CAPACITY",\n "ScalingAdjustment": 1,\n "CoolDown": 300\n }\n },\n "Trigger": {\n "CloudWatchAlarmDefinition": {\n "ComparisonOperator": "LESS_THAN",\n "EvaluationPeriods": 1,\n "MetricName": "YARNMemoryAvailablePercentage",\n "Namespace": "AWS/ElasticMapReduce",\n "Period": 300,\n "Statistic": "AVERAGE",\n "Threshold": 15.0,\n "Unit": "PERCENT"\n }\n }\n }\n]\n}\n\n', + bidPrice: "0.30", + ebsConfig: [ + { + size: Token.asNumber("40"), + type: "gp2", + volumesPerInstance: 1, + }, + ], + instanceCount: 1, + instanceType: "c4.large", + }, + ebsRootVolumeSize: 100, + ec2Attributes: { + emrManagedMasterSecurityGroup: sg.id, + emrManagedSlaveSecurityGroup: sg.id, + instanceProfile: emrProfile.arn, + subnetId: main.id, + }, + keepJobFlowAliveWhenNoSteps: true, + masterInstanceGroup: { + instanceType: "m4.large", + }, + name: "emr-test-arn", + releaseLabel: "emr-4.6.0", + serviceRole: iamEmrServiceRole.arn, + tags: { + env: "env", + role: "rolename", + }, + terminationProtection: false, + }); + } +} + +``` + +The `awsEmrCluster` resource typically requires two IAM roles, one for the EMR Cluster to use as a service role, and another is assigned to every EC2 instance in a cluster and each application process that runs on a cluster assumes this role for permissions to interact with other AWS services. An additional role, the Auto Scaling role, is required if your cluster uses automatic scaling in Amazon EMR. + +The default AWS managed EMR service role is called `emrDefaultRole` with Amazon managed policy `amazonEmrServicePolicyV2` attached. The name of default instance profile role is `emrEc2DefaultRole` with default managed policy `amazonElasticMapReduceforEc2Role` attached, but it is on the path to deprecation and will not be replaced with another default managed policy. You'll need to create and specify an instance profile to replace the deprecated role and default policy. See the [Configure IAM service roles for Amazon EMR](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-iam-roles.html) guide for more information on these IAM roles. There is also a fully-bootable example Terraform configuration at the bottom of this page. + +### Instance Fleet + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrCluster } from "./.gen/providers/aws/emr-cluster"; +import { EmrInstanceFleet } from "./.gen/providers/aws/emr-instance-fleet"; +interface MyConfig { + name: any; + releaseLabel: any; + serviceRole: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new EmrCluster(this, "example", { + coreInstanceFleet: { + instanceTypeConfigs: [ + { + bidPriceAsPercentageOfOnDemandPrice: 80, + ebsConfig: [ + { + size: 100, + type: "gp2", + volumesPerInstance: 1, + }, + ], + instanceType: "m3.xlarge", + weightedCapacity: 1, + }, + { + bidPriceAsPercentageOfOnDemandPrice: 100, + ebsConfig: [ + { + size: 100, + type: "gp2", + volumesPerInstance: 1, + }, + ], + instanceType: "m4.xlarge", + weightedCapacity: 1, + }, + { + bidPriceAsPercentageOfOnDemandPrice: 100, + ebsConfig: [ + { + size: 100, + type: "gp2", + volumesPerInstance: 1, + }, + ], + instanceType: "m4.2xlarge", + weightedCapacity: 2, + }, + ], + launchSpecifications: { + spotSpecification: [ + { + allocationStrategy: "capacity-optimized", + blockDurationMinutes: 0, + timeoutAction: "SWITCH_TO_ON_DEMAND", + timeoutDurationMinutes: 10, + }, + ], + }, + name: "core fleet", + targetOnDemandCapacity: 2, + targetSpotCapacity: 2, + }, + masterInstanceFleet: { + instanceTypeConfigs: [ + { + instanceType: "m4.xlarge", + }, + ], + targetOnDemandCapacity: 1, + }, + name: config.name, + releaseLabel: config.releaseLabel, + serviceRole: config.serviceRole, + }); + new EmrInstanceFleet(this, "task", { + clusterId: example.id, + instanceTypeConfigs: [ + { + bidPriceAsPercentageOfOnDemandPrice: 100, + ebsConfig: [ + { + size: 100, + type: "gp2", + volumesPerInstance: 1, + }, + ], + instanceType: "m4.xlarge", + weightedCapacity: 1, + }, + { + bidPriceAsPercentageOfOnDemandPrice: 100, + ebsConfig: [ + { + size: 100, + type: "gp2", + volumesPerInstance: 1, + }, + ], + instanceType: "m4.2xlarge", + weightedCapacity: 2, + }, + ], + launchSpecifications: { + spotSpecification: [ + { + allocationStrategy: "capacity-optimized", + blockDurationMinutes: 0, + timeoutAction: "TERMINATE_CLUSTER", + timeoutDurationMinutes: 10, + }, + ], + }, + name: "task fleet", + targetOnDemandCapacity: 1, + targetSpotCapacity: 1, + }); + } +} + +``` + +### Enable Debug Logging + +[Debug logging in EMR](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-plan-debugging.html) is implemented as a step. It is highly recommended that you utilize the [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignoreChanges` if other steps are being managed outside of Terraform. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrCluster } from "./.gen/providers/aws/emr-cluster"; +interface MyConfig { + name: any; + releaseLabel: any; + serviceRole: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new EmrCluster(this, "example", { + lifecycle: { + ignoreChanges: [step], + }, + step: [ + { + actionOnFailure: "TERMINATE_CLUSTER", + hadoopJarStep: [ + { + args: ["state-pusher-script"], + jar: "command-runner.jar", + }, + ], + name: "Setup Hadoop Debugging", + }, + ], + name: config.name, + releaseLabel: config.releaseLabel, + serviceRole: config.serviceRole, + }); + } +} + +``` + +### Multiple Node Master Instance Group + +Available in EMR version 5.23.0 and later, an EMR Cluster can be launched with three master nodes for high availability. Additional information about this functionality and its requirements can be found in the [EMR Management Guide](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-plan-ha.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrCluster } from "./.gen/providers/aws/emr-cluster"; +import { Subnet } from "./.gen/providers/aws/subnet"; +interface MyConfig { + vpcId: any; + instanceType: any; + instanceProfile: any; + instanceType1: any; + name: any; + serviceRole: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new Subnet(this, "example", { + mapPublicIpOnLaunch: true, + vpcId: config.vpcId, + }); + const awsEmrClusterExample = new EmrCluster(this, "example_1", { + coreInstanceGroup: { + instanceType: config.instanceType, + }, + ec2Attributes: { + subnetId: example.id, + instanceProfile: config.instanceProfile, + }, + masterInstanceGroup: { + instanceCount: 3, + instanceType: config.instanceType1, + }, + releaseLabel: "emr-5.24.1", + terminationProtection: true, + name: config.name, + serviceRole: config.serviceRole, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEmrClusterExample.overrideLogicalId("example"); + } +} + +``` + +### Bootable Cluster + +**NOTE:** This configuration demonstrates a minimal configuration needed to boot an example EMR Cluster. It is not meant to display best practices. As with all examples, use at your own risk. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { EmrCluster } from "./.gen/providers/aws/emr-cluster"; +import { IamInstanceProfile } from "./.gen/providers/aws/iam-instance-profile"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { InternetGateway } from "./.gen/providers/aws/internet-gateway"; +import { MainRouteTableAssociation } from "./.gen/providers/aws/main-route-table-association"; +import { RouteTable } from "./.gen/providers/aws/route-table"; +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new Vpc(this, "main", { + cidrBlock: "168.31.0.0/16", + enableDnsHostnames: true, + tags: { + name: "emr_test", + }, + }); + const ec2AssumeRole = new DataAwsIamPolicyDocument( + this, + "ec2_assume_role", + { + statement: [ + { + actions: Token.asList("sts:AssumeRole"), + effect: "Allow", + principals: [ + { + identifiers: ["ec2.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const emrAssumeRole = new DataAwsIamPolicyDocument( + this, + "emr_assume_role", + { + statement: [ + { + actions: Token.asList("sts:AssumeRole"), + effect: "Allow", + principals: [ + { + identifiers: ["elasticmapreduce.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const iamEmrProfilePolicy = new DataAwsIamPolicyDocument( + this, + "iam_emr_profile_policy", + { + statement: [ + { + actions: [ + "cloudwatch:*", + "dynamodb:*", + "ec2:Describe*", + "elasticmapreduce:Describe*", + "elasticmapreduce:ListBootstrapActions", + "elasticmapreduce:ListClusters", + "elasticmapreduce:ListInstanceGroups", + "elasticmapreduce:ListInstances", + "elasticmapreduce:ListSteps", + "kinesis:CreateStream", + "kinesis:DeleteStream", + "kinesis:DescribeStream", + "kinesis:GetRecords", + "kinesis:GetShardIterator", + "kinesis:MergeShards", + "kinesis:PutRecord", + "kinesis:SplitShard", + "rds:Describe*", + "s3:*", + "sdb:*", + "sns:*", + "sqs:*", + ], + effect: "Allow", + resources: ["*"], + }, + ], + } + ); + const iamEmrServicePolicy = new DataAwsIamPolicyDocument( + this, + "iam_emr_service_policy", + { + statement: [ + { + actions: [ + "ec2:AuthorizeSecurityGroupEgress", + "ec2:AuthorizeSecurityGroupIngress", + "ec2:CancelSpotInstanceRequests", + "ec2:CreateNetworkInterface", + "ec2:CreateSecurityGroup", + "ec2:CreateTags", + "ec2:DeleteNetworkInterface", + "ec2:DeleteSecurityGroup", + "ec2:DeleteTags", + "ec2:DescribeAvailabilityZones", + "ec2:DescribeAccountAttributes", + "ec2:DescribeDhcpOptions", + "ec2:DescribeInstanceStatus", + "ec2:DescribeInstances", + "ec2:DescribeKeyPairs", + "ec2:DescribeNetworkAcls", + "ec2:DescribeNetworkInterfaces", + "ec2:DescribePrefixLists", + "ec2:DescribeRouteTables", + "ec2:DescribeSecurityGroups", + "ec2:DescribeSpotInstanceRequests", + "ec2:DescribeSpotPriceHistory", + "ec2:DescribeSubnets", + "ec2:DescribeVpcAttribute", + "ec2:DescribeVpcEndpoints", + "ec2:DescribeVpcEndpointServices", + "ec2:DescribeVpcs", + "ec2:DetachNetworkInterface", + "ec2:ModifyImageAttribute", + "ec2:ModifyInstanceAttribute", + "ec2:RequestSpotInstances", + "ec2:RevokeSecurityGroupEgress", + "ec2:RunInstances", + "ec2:TerminateInstances", + "ec2:DeleteVolume", + "ec2:DescribeVolumeStatus", + "ec2:DescribeVolumes", + "ec2:DetachVolume", + "iam:GetRole", + "iam:GetRolePolicy", + "iam:ListInstanceProfiles", + "iam:ListRolePolicies", + "iam:PassRole", + "s3:CreateBucket", + "s3:Get*", + "s3:List*", + "sdb:BatchPutAttributes", + "sdb:Select", + "sqs:CreateQueue", + "sqs:Delete*", + "sqs:GetQueue*", + "sqs:PurgeQueue", + "sqs:ReceiveMessage", + ], + effect: "Allow", + resources: ["*"], + }, + ], + } + ); + const iamEmrProfileRole = new IamRole(this, "iam_emr_profile_role", { + assumeRolePolicy: Token.asString(ec2AssumeRole.json), + name: "iam_emr_profile_role", + }); + const iamEmrServiceRole = new IamRole(this, "iam_emr_service_role", { + assumeRolePolicy: Token.asString(emrAssumeRole.json), + name: "iam_emr_service_role", + }); + const awsIamRolePolicyIamEmrProfilePolicy = new IamRolePolicy( + this, + "iam_emr_profile_policy_7", + { + name: "iam_emr_profile_policy", + policy: Token.asString(iamEmrProfilePolicy.json), + role: iamEmrProfileRole.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyIamEmrProfilePolicy.overrideLogicalId( + "iam_emr_profile_policy" + ); + const awsIamRolePolicyIamEmrServicePolicy = new IamRolePolicy( + this, + "iam_emr_service_policy_8", + { + name: "iam_emr_service_policy", + policy: Token.asString(iamEmrServicePolicy.json), + role: iamEmrServiceRole.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyIamEmrServicePolicy.overrideLogicalId( + "iam_emr_service_policy" + ); + const gw = new InternetGateway(this, "gw", { + vpcId: main.id, + }); + const r = new RouteTable(this, "r", { + route: [ + { + cidrBlock: "0.0.0.0/0", + gatewayId: gw.id, + }, + ], + vpcId: main.id, + }); + const awsSubnetMain = new Subnet(this, "main_11", { + cidrBlock: "168.31.0.0/20", + tags: { + name: "emr_test", + }, + vpcId: main.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSubnetMain.overrideLogicalId("main"); + const emrProfile = new IamInstanceProfile(this, "emr_profile", { + name: "emr_profile", + role: iamEmrProfileRole.name, + }); + new MainRouteTableAssociation(this, "a", { + routeTableId: r.id, + vpcId: main.id, + }); + const allowAccess = new SecurityGroup(this, "allow_access", { + dependsOn: [awsSubnetMain], + description: "Allow inbound traffic", + egress: [ + { + cidrBlocks: ["0.0.0.0/0"], + fromPort: 0, + protocol: "-1", + toPort: 0, + }, + ], + ingress: [ + { + cidrBlocks: [main.cidrBlock], + fromPort: 0, + protocol: "-1", + toPort: 0, + }, + ], + lifecycle: { + ignoreChanges: [ingress, egress], + }, + name: "allow_access", + tags: { + name: "emr_test", + }, + vpcId: main.id, + }); + new EmrCluster(this, "cluster", { + applications: ["Spark"], + bootstrapAction: [ + { + args: ["instance.isMaster=true", "echo running on master node"], + name: "runif", + path: "s3://elasticmapreduce/bootstrap-actions/run-if", + }, + ], + configurationsJson: + ' [\n {\n "Classification": "hadoop-env",\n "Configurations": [\n {\n "Classification": "export",\n "Properties": {\n "JAVA_HOME": "/usr/lib/jvm/java-1.8.0"\n }\n }\n ],\n "Properties": {}\n },\n {\n "Classification": "spark-env",\n "Configurations": [\n {\n "Classification": "export",\n "Properties": {\n "JAVA_HOME": "/usr/lib/jvm/java-1.8.0"\n }\n }\n ],\n "Properties": {}\n }\n ]\n\n', + coreInstanceGroup: { + instanceCount: 1, + instanceType: "m5.xlarge", + }, + ec2Attributes: { + emrManagedMasterSecurityGroup: allowAccess.id, + emrManagedSlaveSecurityGroup: allowAccess.id, + instanceProfile: emrProfile.arn, + subnetId: Token.asString(awsSubnetMain.id), + }, + masterInstanceGroup: { + instanceType: "m5.xlarge", + }, + name: "emr-test-arn", + releaseLabel: "emr-4.6.0", + serviceRole: iamEmrServiceRole.arn, + tags: { + dns_zone: "env_zone", + env: "env", + name: "name-env", + role: "rolename", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the job flow. +* `releaseLabel` - (Required) Release label for the Amazon EMR release. +* `serviceRole` - (Required) IAM role that will be assumed by the Amazon EMR service to access AWS resources. + +The following arguments are optional: + +* `additionalInfo` - (Optional) JSON string for selecting additional features such as adding proxy information. Note: Currently there is no API to retrieve the value of this argument after EMR cluster creation from provider, therefore Terraform cannot detect drift from the actual EMR cluster if its value is changed outside Terraform. +* `applications` - (Optional) A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster. For a list of applications available for each Amazon EMR release version, see the [Amazon EMR Release Guide](https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-release-components.html). +* `autoscalingRole` - (Optional) IAM role for automatic scaling policies. The IAM role provides permissions that the automatic scaling feature requires to launch and terminate EC2 instances in an instance group. +* `autoTerminationPolicy` - (Optional) An auto-termination policy for an Amazon EMR cluster. An auto-termination policy defines the amount of idle time in seconds after which a cluster automatically terminates. See [Auto Termination Policy](#auto_termination_policy) Below. +* `bootstrapAction` - (Optional) Ordered list of bootstrap actions that will be run before Hadoop is started on the cluster nodes. See below. +* `configurations` - (Optional) List of configurations supplied for the EMR cluster you are creating. Supply a configuration object for applications to override their default configuration. See [AWS Documentation](https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-configure-apps.html) for more information. +* `configurationsJson` - (Optional) JSON string for supplying list of configurations for the EMR cluster. + +~> **NOTE on `configurationsJson`:** If the `configurations` value is empty then you should skip the `configurations` field instead of providing an empty list as a value, `"Configurations": []`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrCluster } from "./.gen/providers/aws/emr-cluster"; +interface MyConfig { + name: any; + releaseLabel: any; + serviceRole: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new EmrCluster(this, "cluster", { + configurationsJson: + ' [\n {\n "Classification": "hadoop-env",\n "Configurations": [\n {\n "Classification": "export",\n "Properties": {\n "JAVA_HOME": "/usr/lib/jvm/java-1.8.0"\n }\n }\n ],\n "Properties": {}\n }\n ]\n\n', + name: config.name, + releaseLabel: config.releaseLabel, + serviceRole: config.serviceRole, + }); + } +} + +``` + +* `coreInstanceFleet` - (Optional) Configuration block to use an [Instance Fleet](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-fleet.html) for the core node type. Cannot be specified if any `coreInstanceGroup` configuration blocks are set. Detailed below. +* `coreInstanceGroup` - (Optional) Configuration block to use an [Instance Group](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-group-configuration.html#emr-plan-instance-groups) for the [core node type](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-master-core-task-nodes.html#emr-plan-core). +* `customAmiId` - (Optional) Custom Amazon Linux AMI for the cluster (instead of an EMR-owned AMI). Available in Amazon EMR version 5.7.0 and later. +* `ebsRootVolumeSize` - (Optional) Size in GiB of the EBS root device volume of the Linux AMI that is used for each EC2 instance. Available in Amazon EMR version 4.x and later. +* `ec2Attributes` - (Optional) Attributes for the EC2 instances running the job flow. See below. +* `keepJobFlowAliveWhenNoSteps` - (Optional) Switch on/off run cluster with no steps or when all steps are complete (default is on) +* `kerberosAttributes` - (Optional) Kerberos configuration for the cluster. See below. +* `listStepsStates` - (Optional) List of [step states](https://docs.aws.amazon.com/emr/latest/APIReference/API_StepStatus.html) used to filter returned steps +* `logEncryptionKmsKeyId` - (Optional) AWS KMS customer master key (CMK) key ID or arn used for encrypting log files. This attribute is only available with EMR version 5.30.0 and later, excluding EMR 6.0.0. +* `logUri` - (Optional) S3 bucket to write the log files of the job flow. If a value is not provided, logs are not created. +* `masterInstanceFleet` - (Optional) Configuration block to use an [Instance Fleet](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-fleet.html) for the master node type. Cannot be specified if any `masterInstanceGroup` configuration blocks are set. Detailed below. +* `masterInstanceGroup` - (Optional) Configuration block to use an [Instance Group](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-group-configuration.html#emr-plan-instance-groups) for the [master node type](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-master-core-task-nodes.html#emr-plan-master). +* `placementGroupConfig` - (Optional) The specified placement group configuration for an Amazon EMR cluster. +* `scaleDownBehavior` - (Optional) Way that individual Amazon EC2 instances terminate when an automatic scale-in activity occurs or an `instance group` is resized. +* `securityConfiguration` - (Optional) Security configuration name to attach to the EMR cluster. Only valid for EMR clusters with `releaseLabel` 4.8.0 or greater. +* `step` - (Optional) List of steps to run when creating the cluster. See below. It is highly recommended to utilize the [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignoreChanges` if other steps are being managed outside of Terraform. This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). +* `stepConcurrencyLevel` - (Optional) Number of steps that can be executed concurrently. You can specify a maximum of 256 steps. Only valid for EMR clusters with `releaseLabel` 5.28.0 or greater (default is 1). +* `tags` - (Optional) list of tags to apply to the EMR Cluster. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `terminationProtection` - (Optional) Switch on/off termination protection (default is `false`, except when using multiple master nodes). Before attempting to destroy the resource when termination protection is enabled, this configuration must be applied with its value set to `false`. +* `visibleToAllUsers` - (Optional) Whether the job flow is visible to all IAM users of the AWS account associated with the job flow. Default value is `true`. + +### bootstrap_action + +* `args` - (Optional) List of command line arguments to pass to the bootstrap action script. +* `name` - (Required) Name of the bootstrap action. +* `path` - (Required) Location of the script to run during a bootstrap action. Can be either a location in Amazon S3 or on a local file system. + +### auto_termination_policy + +* `idleTimeout` - (Optional) Specifies the amount of idle time in seconds after which the cluster automatically terminates. You can specify a minimum of `60` seconds and a maximum of `604800` seconds (seven days). + +### configurations + +A configuration classification that applies when provisioning cluster instances, which can include configurations for applications and software that run on the cluster. See [Configuring Applications](https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-configure-apps.html). + +* `classification` - (Optional) Classification within a configuration. +* `properties` - (Optional) Map of properties specified within a configuration classification. + +### core_instance_fleet + +* `instanceTypeConfigs` - (Optional) Configuration block for instance fleet. +* `launchSpecifications` - (Optional) Configuration block for launch specification. +* `name` - (Optional) Friendly name given to the instance fleet. +* `targetOnDemandCapacity` - (Optional) The target capacity of On-Demand units for the instance fleet, which determines how many On-Demand instances to provision. +* `targetSpotCapacity` - (Optional) Target capacity of Spot units for the instance fleet, which determines how many Spot instances to provision. + +#### instance_type_configs + +* `bidPrice` - (Optional) Bid price for each EC2 Spot instance type as defined by `instanceType`. Expressed in USD. If neither `bidPrice` nor `bidPriceAsPercentageOfOnDemandPrice` is provided, `bidPriceAsPercentageOfOnDemandPrice` defaults to 100%. +* `bidPriceAsPercentageOfOnDemandPrice` - (Optional) Bid price, as a percentage of On-Demand price, for each EC2 Spot instance as defined by `instanceType`. Expressed as a number (for example, 20 specifies 20%). If neither `bidPrice` nor `bidPriceAsPercentageOfOnDemandPrice` is provided, `bidPriceAsPercentageOfOnDemandPrice` defaults to 100%. +* `configurations` - (Optional) Configuration classification that applies when provisioning cluster instances, which can include configurations for applications and software that run on the cluster. List of `configuration` blocks. +* `ebsConfig` - (Optional) Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. +* `instanceType` - (Required) EC2 instance type, such as m4.xlarge. +* `weightedCapacity` - (Optional) Number of units that a provisioned instance of this type provides toward fulfilling the target capacities defined in `awsEmrInstanceFleet`. + +#### launch_specifications + +* `onDemandSpecification` - (Optional) Configuration block for on demand instances launch specifications. +* `spotSpecification` - (Optional) Configuration block for spot instances launch specifications. + +##### on_demand_specification + +The launch specification for On-Demand instances in the instance fleet, which determines the allocation strategy. +The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x versions. On-Demand instances allocation strategy is available in Amazon EMR version 5.12.1 and later. + +* `allocationStrategy` - (Required) Specifies the strategy to use in launching On-Demand instance fleets. Currently, the only option is `lowestPrice` (the default), which launches the lowest price first. + +##### spot_specification + +The launch specification for Spot instances in the fleet, which determines the defined duration, provisioning timeout behavior, and allocation strategy. + +* `allocationStrategy` - (Required) Specifies the strategy to use in launching Spot instance fleets. Valid values include `capacityOptimized`, `diversified`, `lowestPrice`, `priceCapacityOptimized`. See the [AWS documentation](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-instance-fleet.html#emr-instance-fleet-allocation-strategy) for details on each strategy type. +* `blockDurationMinutes` - (Optional) Defined duration for Spot instances (also known as Spot blocks) in minutes. When specified, the Spot instance does not terminate before the defined duration expires, and defined duration pricing for Spot instances applies. Valid values are 60, 120, 180, 240, 300, or 360. The duration period starts as soon as a Spot instance receives its instance ID. At the end of the duration, Amazon EC2 marks the Spot instance for termination and provides a Spot instance termination notice, which gives the instance a two-minute warning before it terminates. +* `timeoutAction` - (Required) Action to take when TargetSpotCapacity has not been fulfilled when the TimeoutDurationMinutes has expired; that is, when all Spot instances could not be provisioned within the Spot provisioning timeout. Valid values are `terminateCluster` and `switchToOnDemand`. SWITCH_TO_ON_DEMAND specifies that if no Spot instances are available, On-Demand Instances should be provisioned to fulfill any remaining Spot capacity. +* `timeoutDurationMinutes` - (Required) Spot provisioning timeout period in minutes. If Spot instances are not provisioned within this time period, the TimeOutAction is taken. Minimum value is 5 and maximum value is 1440. The timeout applies only during initial provisioning, when the cluster is first created. + +### core_instance_group + +* `autoscalingPolicy` - (Optional) String containing the [EMR Auto Scaling Policy](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-automatic-scaling.html) JSON. +* `bidPrice` - (Optional) Bid price for each EC2 instance in the instance group, expressed in USD. By setting this attribute, the instance group is being declared as a Spot Instance, and will implicitly create a Spot request. Leave this blank to use On-Demand Instances. +* `ebsConfig` - (Optional) Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. +* `instanceCount` - (Optional) Target number of instances for the instance group. Must be at least 1. Defaults to 1. +* `instanceType` - (Required) EC2 instance type for all instances in the instance group. +* `name` - (Optional) Friendly name given to the instance group. + +#### ebs_config + +* `iops` - (Optional) Number of I/O operations per second (IOPS) that the volume supports. +* `size` - (Required) Volume size, in gibibytes (GiB). +* `type` - (Required) Volume type. Valid options are `gp3`, `gp2`, `io1`, `standard`, `st1` and `sc1`. See [EBS Volume Types](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSVolumeTypes.html). +* `throughput` - (Optional) The throughput, in mebibyte per second (MiB/s). +* `volumesPerInstance` - (Optional) Number of EBS volumes with this configuration to attach to each EC2 instance in the instance group (default is 1). + +### ec2_attributes + +Attributes for the Amazon EC2 instances running the job flow: + +* `additionalMasterSecurityGroups` - (Optional) String containing a comma separated list of additional Amazon EC2 security group IDs for the master node. +* `additionalSlaveSecurityGroups` - (Optional) String containing a comma separated list of additional Amazon EC2 security group IDs for the slave nodes as a comma separated string. +* `emrManagedMasterSecurityGroup` - (Optional) Identifier of the Amazon EC2 EMR-Managed security group for the master node. +* `emrManagedSlaveSecurityGroup` - (Optional) Identifier of the Amazon EC2 EMR-Managed security group for the slave nodes. +* `instanceProfile` - (Required) Instance Profile for EC2 instances of the cluster assume this role. +* `keyName` - (Optional) Amazon EC2 key pair that can be used to ssh to the master node as the user called `hadoop`. +* `serviceAccessSecurityGroup` - (Optional) Identifier of the Amazon EC2 service-access security group - required when the cluster runs on a private subnet. +* `subnetId` - (Optional) VPC subnet id where you want the job flow to launch. Cannot specify the `cc14Xlarge` instance type for nodes of a job flow launched in an Amazon VPC. +* `subnetIds` - (Optional) List of VPC subnet id-s where you want the job flow to launch. Amazon EMR identifies the best Availability Zone to launch instances according to your fleet specifications. + +~> **NOTE on EMR-Managed security groups:** These security groups will have any missing inbound or outbound access rules added and maintained by AWS, to ensure proper communication between instances in a cluster. The EMR service will maintain these rules for groups provided in `emrManagedMasterSecurityGroup` and `emrManagedSlaveSecurityGroup`; attempts to remove the required rules may succeed, only for the EMR service to re-add them in a matter of minutes. This may cause Terraform to fail to destroy an environment that contains an EMR cluster, because the EMR service does not revoke rules added on deletion, leaving a cyclic dependency between the security groups that prevents their deletion. To avoid this, use the `revokeRulesOnDelete` optional attribute for any Security Group used in `emrManagedMasterSecurityGroup` and `emrManagedSlaveSecurityGroup`. See [Amazon EMR-Managed Security Groups](http://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-man-sec-groups.html) for more information about the EMR-managed security group rules. + +### kerberos_attributes + +* `adDomainJoinPassword` - (Optional) Active Directory password for `adDomainJoinUser`. Terraform cannot perform drift detection of this configuration. +* `adDomainJoinUser` - (Optional) Required only when establishing a cross-realm trust with an Active Directory domain. A user with sufficient privileges to join resources to the domain. Terraform cannot perform drift detection of this configuration. +* `crossRealmTrustPrincipalPassword` - (Optional) Required only when establishing a cross-realm trust with a KDC in a different realm. The cross-realm principal password, which must be identical across realms. Terraform cannot perform drift detection of this configuration. +* `kdcAdminPassword` - (Required) Password used within the cluster for the kadmin service on the cluster-dedicated KDC, which maintains Kerberos principals, password policies, and keytabs for the cluster. Terraform cannot perform drift detection of this configuration. +* `realm` - (Required) Name of the Kerberos realm to which all nodes in a cluster belong. For example, `ec2Internal` + +### master_instance_fleet + +* `instanceTypeConfigs` - (Optional) Configuration block for instance fleet. +* `launchSpecifications` - (Optional) Configuration block for launch specification. +* `name` - (Optional) Friendly name given to the instance fleet. +* `targetOnDemandCapacity` - (Optional) Target capacity of On-Demand units for the instance fleet, which determines how many On-Demand instances to provision. +* `targetSpotCapacity` - (Optional) Target capacity of Spot units for the instance fleet, which determines how many Spot instances to provision. + +#### instance_type_configs + +See `instanceTypeConfigs` above, under `coreInstanceFleet`. + +#### launch_specifications + +See `launchSpecifications` above, under `coreInstanceFleet`. + +### master_instance_group + +Supported nested arguments for the `masterInstanceGroup` configuration block: + +* `bidPrice` - (Optional) Bid price for each EC2 instance in the instance group, expressed in USD. By setting this attribute, the instance group is being declared as a Spot Instance, and will implicitly create a Spot request. Leave this blank to use On-Demand Instances. +* `ebsConfig` - (Optional) Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. +* `instanceCount` - (Optional) Target number of instances for the instance group. Must be 1 or 3. Defaults to 1. Launching with multiple master nodes is only supported in EMR version 5.23.0+, and requires this resource's `coreInstanceGroup` to be configured. Public (Internet accessible) instances must be created in VPC subnets that have [map public IP on launch](/docs/providers/aws/r/subnet.html#map_public_ip_on_launch) enabled. Termination protection is automatically enabled when launched with multiple master nodes and Terraform must have the `termination_protection = false` configuration applied before destroying this resource. +* `instanceType` - (Required) EC2 instance type for all instances in the instance group. +* `name` - (Optional) Friendly name given to the instance group. + +#### ebs_config + +See `ebsConfig` under `coreInstanceGroup` above. + +### step + +This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +* `actionOnFailure` - (Required) Action to take if the step fails. Valid values: `terminateJobFlow`, `terminateCluster`, `cancelAndWait`, and `continue` +* `hadoopJarStep` - (Required) JAR file used for the step. See below. +* `name` - (Required) Name of the step. + +#### hadoop_jar_step + +This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +* `args` - (Optional) List of command line arguments passed to the JAR file's main function when executed. +* `jar` - (Required) Path to a JAR file run during the step. +* `mainClass` - (Optional) Name of the main class in the specified Java file. If not specified, the JAR file should specify a Main-Class in its manifest file. +* `properties` - (Optional) Key-Value map of Java properties that are set when the step runs. You can use these properties to pass key value pairs to your main function. + +### placement_group_config + +* `instanceRole` - (Required) Role of the instance in the cluster. Valid Values: `master`, `core`, `task`. +* `placementStrategy` - (Optional) EC2 Placement Group strategy associated with instance role. Valid Values: `spread`, `partition`, `cluster`, `none`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `applications` - Applications installed on this cluster. +* `arn`- ARN of the cluster. +* `bootstrapAction` - List of bootstrap actions that will be run before Hadoop is started on the cluster nodes. +* `configurations` - List of Configurations supplied to the EMR cluster. +* `coreInstanceGroup0Id` - Core node type Instance Group ID, if using Instance Group for this node type. +* `ec2Attributes` - Provides information about the EC2 instances in a cluster grouped by category: key name, subnet ID, IAM instance profile, and so on. +* `id` - ID of the cluster. +* `logUri` - Path to the Amazon S3 location where logs for this cluster are stored. +* `masterInstanceGroup0Id` - Master node type Instance Group ID, if using Instance Group for this node type. +* `masterPublicDns` - The DNS name of the master node. If the cluster is on a private subnet, this is the private DNS name. On a public subnet, this is the public DNS name. +* `name` - Name of the cluster. +* `releaseLabel` - Release label for the Amazon EMR release. +* `serviceRole` - IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `visibleToAllUsers` - Indicates whether the job flow is visible to all IAM users of the AWS account associated with the job flow. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR clusters using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EMR clusters using the `id`. For example: + +```console +% terraform import aws_emr_cluster.cluster j-123456ABCDEF +``` + +Since the API does not return the actual values for Kerberos configurations, environments with those Terraform configurations will need to use the [`lifecycle` configuration block `ignoreChanges` argument](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) available to all Terraform resources to prevent perpetual differences. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrCluster } from "./.gen/providers/aws/emr-cluster"; +interface MyConfig { + name: any; + releaseLabel: any; + serviceRole: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new EmrCluster(this, "example", { + lifecycle: { + ignoreChanges: [kerberosAttributes], + }, + name: config.name, + releaseLabel: config.releaseLabel, + serviceRole: config.serviceRole, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/emr_instance_fleet.html.markdown b/website/docs/cdktf/typescript/r/emr_instance_fleet.html.markdown new file mode 100644 index 00000000000..841eb499145 --- /dev/null +++ b/website/docs/cdktf/typescript/r/emr_instance_fleet.html.markdown @@ -0,0 +1,174 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_instance_fleet" +description: |- + Provides an Elastic MapReduce Cluster Instance Fleet +--- + + + +# Resource: aws_emr_instance_fleet + +Provides an Elastic MapReduce Cluster Instance Fleet configuration. +See [Amazon Elastic MapReduce Documentation](https://aws.amazon.com/documentation/emr/) for more information. + +~> **NOTE:** At this time, Instance Fleets cannot be destroyed through the API nor +web interface. Instance Fleets are destroyed when the EMR Cluster is destroyed. +Terraform will resize any Instance Fleet to zero when destroying the resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrInstanceFleet } from "./.gen/providers/aws/emr-instance-fleet"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrInstanceFleet(this, "task", { + clusterId: cluster.id, + instanceTypeConfigs: [ + { + bidPriceAsPercentageOfOnDemandPrice: 100, + ebsConfig: [ + { + size: 100, + type: "gp2", + volumesPerInstance: 1, + }, + ], + instanceType: "m4.xlarge", + weightedCapacity: 1, + }, + { + bidPriceAsPercentageOfOnDemandPrice: 100, + ebsConfig: [ + { + size: 100, + type: "gp2", + volumesPerInstance: 1, + }, + ], + instanceType: "m4.2xlarge", + weightedCapacity: 2, + }, + ], + launchSpecifications: { + spotSpecification: [ + { + allocationStrategy: "capacity-optimized", + blockDurationMinutes: 0, + timeoutAction: "TERMINATE_CLUSTER", + timeoutDurationMinutes: 10, + }, + ], + }, + name: "task fleet", + targetOnDemandCapacity: 1, + targetSpotCapacity: 1, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clusterId` - (Required) ID of the EMR Cluster to attach to. Changing this forces a new resource to be created. +* `instanceTypeConfigs` - (Optional) Configuration block for instance fleet +* `launchSpecifications` - (Optional) Configuration block for launch specification +* `targetOnDemandCapacity` - (Optional) The target capacity of On-Demand units for the instance fleet, which determines how many On-Demand instances to provision. +* `targetSpotCapacity` - (Optional) The target capacity of Spot units for the instance fleet, which determines how many Spot instances to provision. +* `name` - (Optional) Friendly name given to the instance fleet. + +## instance_type_configs Configuration Block + +* `bidPrice` - (Optional) The bid price for each EC2 Spot instance type as defined by `instanceType`. Expressed in USD. If neither `bidPrice` nor `bidPriceAsPercentageOfOnDemandPrice` is provided, `bidPriceAsPercentageOfOnDemandPrice` defaults to 100%. +* `bidPriceAsPercentageOfOnDemandPrice` - (Optional) The bid price, as a percentage of On-Demand price, for each EC2 Spot instance as defined by `instanceType`. Expressed as a number (for example, 20 specifies 20%). If neither `bidPrice` nor `bidPriceAsPercentageOfOnDemandPrice` is provided, `bidPriceAsPercentageOfOnDemandPrice` defaults to 100%. +* `configurations` - (Optional) A configuration classification that applies when provisioning cluster instances, which can include configurations for applications and software that run on the cluster. List of `configuration` blocks. +* `ebsConfig` - (Optional) Configuration block(s) for EBS volumes attached to each instance in the instance group. Detailed below. +* `instanceType` - (Required) An EC2 instance type, such as m4.xlarge. +* `weightedCapacity` - (Optional) The number of units that a provisioned instance of this type provides toward fulfilling the target capacities defined in `awsEmrInstanceFleet`. + +## configurations Configuration Block + +A configuration classification that applies when provisioning cluster instances, which can include configurations for applications and software that run on the cluster. See [Configuring Applications](https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-configure-apps.html). + +* `classification` - (Optional) The classification within a configuration. +* `properties` - (Optional) A map of properties specified within a configuration classification + +## ebs_config + +Attributes for the EBS volumes attached to each EC2 instance in the `masterInstanceGroup` and `coreInstanceGroup` configuration blocks: + +* `size` - (Required) The volume size, in gibibytes (GiB). +* `type` - (Required) The volume type. Valid options are `gp2`, `io1`, `standard` and `st1`. See [EBS Volume Types](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSVolumeTypes.html). +* `iops` - (Optional) The number of I/O operations per second (IOPS) that the volume supports +* `volumesPerInstance` - (Optional) The number of EBS volumes with this configuration to attach to each EC2 instance in the instance group (default is 1) + +## launch_specifications Configuration Block + +* `onDemandSpecification` - (Optional) Configuration block for on demand instances launch specifications +* `spotSpecification` - (Optional) Configuration block for spot instances launch specifications + +## on_demand_specification Configuration Block + +The launch specification for On-Demand instances in the instance fleet, which determines the allocation strategy. +The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x versions. On-Demand instances allocation strategy is available in Amazon EMR version 5.12.1 and later. + +* `allocationStrategy` - (Required) Specifies the strategy to use in launching On-Demand instance fleets. Currently, the only option is `lowestPrice` (the default), which launches the lowest price first. + +## spot_specification Configuration Block + +The launch specification for Spot instances in the fleet, which determines the defined duration, provisioning timeout behavior, and allocation strategy. + +* `allocationStrategy` - (Required) Specifies the strategy to use in launching Spot instance fleets. Currently, the only option is `capacityOptimized` (the default), which launches instances from Spot instance pools with optimal capacity for the number of instances that are launching. +* `blockDurationMinutes` - (Optional) The defined duration for Spot instances (also known as Spot blocks) in minutes. When specified, the Spot instance does not terminate before the defined duration expires, and defined duration pricing for Spot instances applies. Valid values are 60, 120, 180, 240, 300, or 360. The duration period starts as soon as a Spot instance receives its instance ID. At the end of the duration, Amazon EC2 marks the Spot instance for termination and provides a Spot instance termination notice, which gives the instance a two-minute warning before it terminates. +* `timeoutAction` - (Required) The action to take when TargetSpotCapacity has not been fulfilled when the TimeoutDurationMinutes has expired; that is, when all Spot instances could not be provisioned within the Spot provisioning timeout. Valid values are `terminateCluster` and `switchToOnDemand`. SWITCH_TO_ON_DEMAND specifies that if no Spot instances are available, On-Demand Instances should be provisioned to fulfill any remaining Spot capacity. +* `timeoutDurationMinutes` - (Required) The spot provisioning timeout period in minutes. If Spot instances are not provisioned within this time period, the TimeOutAction is taken. Minimum value is 5 and maximum value is 1440. The timeout applies only during initial provisioning, when the cluster is first created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier of the instance fleet. + +* `provisionedOnDemandCapacity` The number of On-Demand units that have been provisioned for the instance +fleet to fulfill TargetOnDemandCapacity. This provisioned capacity might be less than or greater than TargetOnDemandCapacity. + +* `provisionedSpotCapacity` The number of Spot units that have been provisioned for this instance fleet +to fulfill TargetSpotCapacity. This provisioned capacity might be less than or greater than TargetSpotCapacity. + +* `status` The current status of the instance fleet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR Instance Fleet using the EMR Cluster identifier and Instance Fleet identifier separated by a forward slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EMR Instance Fleet using the EMR Cluster identifier and Instance Fleet identifier separated by a forward slash (`/`). For example: + +```console +% terraform import aws_emr_instance_fleet.example j-123456ABCDEF/if-15EK4O09RZLNR +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/emr_instance_group.html.markdown b/website/docs/cdktf/typescript/r/emr_instance_group.html.markdown new file mode 100644 index 00000000000..49ef077093b --- /dev/null +++ b/website/docs/cdktf/typescript/r/emr_instance_group.html.markdown @@ -0,0 +1,123 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_instance_group" +description: |- + Provides an Elastic MapReduce Cluster Instance Group +--- + + + +# Resource: aws_emr_instance_group + +Provides an Elastic MapReduce Cluster Instance Group configuration. +See [Amazon Elastic MapReduce Documentation](https://aws.amazon.com/documentation/emr/) for more information. + +~> **NOTE:** At this time, Instance Groups cannot be destroyed through the API nor +web interface. Instance Groups are destroyed when the EMR Cluster is destroyed. +Terraform will resize any Instance Group to zero when destroying the resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrInstanceGroup } from "./.gen/providers/aws/emr-instance-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrInstanceGroup(this, "task", { + clusterId: tfTestCluster.id, + instanceCount: 1, + instanceType: "m5.xlarge", + name: "my little instance group", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` (Required) Human friendly name given to the instance group. Changing this forces a new resource to be created. +* `clusterId` (Required) ID of the EMR Cluster to attach to. Changing this forces a new resource to be created. +* `instanceType` (Required) The EC2 instance type for all instances in the instance group. Changing this forces a new resource to be created. +* `instanceCount` (Optional) target number of instances for the instance group. defaults to 0. +* `bidPrice` - (Optional) If set, the bid price for each EC2 instance in the instance group, expressed in USD. By setting this attribute, the instance group is being declared as a Spot Instance, and will implicitly create a Spot request. Leave this blank to use On-Demand Instances. +* `ebsOptimized` (Optional) Indicates whether an Amazon EBS volume is EBS-optimized. Changing this forces a new resource to be created. +* `ebsConfig` (Optional) One or more `ebsConfig` blocks as defined below. Changing this forces a new resource to be created. +* `autoscalingPolicy` - (Optional) The autoscaling policy document. This is a JSON formatted string. See [EMR Auto Scaling](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-automatic-scaling.html) +* `configurationsJson` - (Optional) A JSON string for supplying list of configurations specific to the EMR instance group. Note that this can only be changed when using EMR release 5.21 or later. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrInstanceGroup } from "./.gen/providers/aws/emr-instance-group"; +interface MyConfig { + clusterId: any; + instanceType: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new EmrInstanceGroup(this, "task", { + configurationsJson: + ' [\n {\n "Classification": "hadoop-env",\n "Configurations": [\n {\n "Classification": "export",\n "Properties": {\n "JAVA_HOME": "/usr/lib/jvm/java-1.8.0"\n }\n }\n ],\n "Properties": {}\n }\n ]\n\n', + clusterId: config.clusterId, + instanceType: config.instanceType, + }); + } +} + +``` + +`ebsConfig` supports the following: + +* `iops` - (Optional) The number of I/O operations per second (IOPS) that the volume supports. +* `size` - (Optional) The volume size, in gibibytes (GiB). This can be a number from 1 - 1024. If the volume type is EBS-optimized, the minimum value is 10. +* `type` - (Optional) The volume type. Valid options are 'gp2', 'io1' and 'standard'. +* `volumesPerInstance` - (Optional) The number of EBS Volumes to attach per instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The EMR Instance ID +* `runningInstanceCount` The number of instances currently running in this instance group. +* `status` The current status of the instance group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR task instance group using their EMR Cluster id and Instance Group id separated by a forward-slash `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EMR task instance group using their EMR Cluster id and Instance Group id separated by a forward-slash `/`. For example: + +```console +% terraform import aws_emr_instance_group.task_group j-123456ABCDEF/ig-15EK4O09RZLNR +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/emr_managed_scaling_policy.html.markdown b/website/docs/cdktf/typescript/r/emr_managed_scaling_policy.html.markdown new file mode 100644 index 00000000000..033d34b5bfc --- /dev/null +++ b/website/docs/cdktf/typescript/r/emr_managed_scaling_policy.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_managed_scaling_policy" +description: |- + Provides a resource for EMR Managed Scaling policy +--- + + + +# Resource: aws_emr_managed_scaling_policy + +Provides a Managed Scaling policy for EMR Cluster. With Amazon EMR versions 5.30.0 and later (except for Amazon EMR 6.0.0), you can enable EMR managed scaling to automatically increase or decrease the number of instances or units in your cluster based on workload. See [Using EMR Managed Scaling in Amazon EMR](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-managed-scaling.html) for more information. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrCluster } from "./.gen/providers/aws/emr-cluster"; +import { EmrManagedScalingPolicy } from "./.gen/providers/aws/emr-managed-scaling-policy"; +interface MyConfig { + serviceRole: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const sample = new EmrCluster(this, "sample", { + coreInstanceGroup: { + instanceType: "c4.large", + }, + masterInstanceGroup: { + instanceType: "m4.large", + }, + name: "emr-sample-cluster", + releaseLabel: "emr-5.30.0", + serviceRole: config.serviceRole, + }); + new EmrManagedScalingPolicy(this, "samplepolicy", { + clusterId: sample.id, + computeLimits: [ + { + maximumCapacityUnits: 10, + maximumCoreCapacityUnits: 10, + maximumOndemandCapacityUnits: 2, + minimumCapacityUnits: 2, + unitType: "Instances", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clusterId` - (Required) ID of the EMR cluster +* `computeLimits` - (Required) Configuration block with compute limit settings. Described below. + +### compute_limits + +* `unitType` - (Required) The unit type used for specifying a managed scaling policy. Valid Values: `instanceFleetUnits` | `instances` | `vcpu` +* `minimumCapacityUnits` - (Required) The lower boundary of EC2 units. It is measured through VCPU cores or instances for instance groups and measured through units for instance fleets. Managed scaling activities are not allowed beyond this boundary. The limit only applies to the core and task nodes. The master node cannot be scaled after initial configuration. +* `maximumCapacityUnits` - (Required) The upper boundary of EC2 units. It is measured through VCPU cores or instances for instance groups and measured through units for instance fleets. Managed scaling activities are not allowed beyond this boundary. The limit only applies to the core and task nodes. The master node cannot be scaled after initial configuration. +* `maximumOndemandCapacityUnits` - (Optional) The upper boundary of On-Demand EC2 units. It is measured through VCPU cores or instances for instance groups and measured through units for instance fleets. The On-Demand units are not allowed to scale beyond this boundary. The parameter is used to split capacity allocation between On-Demand and Spot instances. +* `maximumCoreCapacityUnits` - (Optional) The upper boundary of EC2 units for core node type in a cluster. It is measured through VCPU cores or instances for instance groups and measured through units for instance fleets. The core units are not allowed to scale beyond this boundary. The parameter is used to split capacity allocation between core and task nodes. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR Managed Scaling Policies using the EMR Cluster identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EMR Managed Scaling Policies using the EMR Cluster identifier. For example: + +```console +% terraform import aws_emr_managed_scaling_policy.example j-123456ABCDEF +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/emr_security_configuration.html.markdown b/website/docs/cdktf/typescript/r/emr_security_configuration.html.markdown new file mode 100644 index 00000000000..2ea491ec8c6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/emr_security_configuration.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_security_configuration" +description: |- + Provides a resource to manage AWS EMR Security Configurations +--- + + + +# Resource: aws_emr_security_configuration + +Provides a resource to manage AWS EMR Security Configurations + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrSecurityConfiguration } from "./.gen/providers/aws/emr-security-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrSecurityConfiguration(this, "foo", { + configuration: + '{\n "EncryptionConfiguration": {\n "AtRestEncryptionConfiguration": {\n "S3EncryptionConfiguration": {\n "EncryptionMode": "SSE-S3"\n },\n "LocalDiskEncryptionConfiguration": {\n "EncryptionKeyProviderType": "AwsKms",\n "AwsKmsKey": "arn:aws:kms:us-west-2:187416307283:alias/tf_emr_test_key"\n }\n },\n "EnableInTransitEncryption": false,\n "EnableAtRestEncryption": true\n }\n}\n\n', + name: "emrsc_other", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the EMR Security Configuration. By default generated by Terraform. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +* `configuration` - (Required) A JSON formatted Security Configuration + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the EMR Security Configuration (Same as the `name`) +* `name` - The Name of the EMR Security Configuration +* `configuration` - The JSON formatted Security Configuration +* `creationDate` - Date the Security Configuration was created + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR Security Configurations using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EMR Security Configurations using the `name`. For example: + +```console +% terraform import aws_emr_security_configuration.sc example-sc-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/emr_studio.html.markdown b/website/docs/cdktf/typescript/r/emr_studio.html.markdown new file mode 100644 index 00000000000..c92996169cc --- /dev/null +++ b/website/docs/cdktf/typescript/r/emr_studio.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_studio" +description: |- + Provides an Elastic MapReduce Studio +--- + + + +# Resource: aws_emr_studio + +Provides an Elastic MapReduce Studio. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrStudio } from "./.gen/providers/aws/emr-studio"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrStudio(this, "example", { + authMode: "SSO", + defaultS3Location: "s3://${" + test.bucket + "}/test", + engineSecurityGroupId: Token.asString(awsSecurityGroupTest.id), + name: "example", + serviceRole: Token.asString(awsIamRoleTest.arn), + subnetIds: [Token.asString(awsSubnetTest.id)], + userRole: Token.asString(awsIamRoleTest.arn), + vpcId: Token.asString(awsVpcTest.id), + workspaceSecurityGroupId: Token.asString(awsSecurityGroupTest.id), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `authMode`- (Required) Specifies whether the Studio authenticates users using IAM or Amazon Web Services SSO. Valid values are `sso` or `iam`. +* `defaultS3Location` - (Required) The Amazon S3 location to back up Amazon EMR Studio Workspaces and notebook files. +* `name` - (Required) A descriptive name for the Amazon EMR Studio. +* `engineSecurityGroupId` - (Required) The ID of the Amazon EMR Studio Engine security group. The Engine security group allows inbound network traffic from the Workspace security group, and it must be in the same VPC specified by `vpcId`. +* `serviceRole` - (Required) The IAM role that the Amazon EMR Studio assumes. The service role provides a way for Amazon EMR Studio to interoperate with other Amazon Web Services services. +* `subnetIds` - (Required) A list of subnet IDs to associate with the Amazon EMR Studio. A Studio can have a maximum of 5 subnets. The subnets must belong to the VPC specified by `vpcId`. Studio users can create a Workspace in any of the specified subnets. +* `vpcId` - (Required) The ID of the Amazon Virtual Private Cloud (Amazon VPC) to associate with the Studio. +* `workspaceSecurityGroupId` - (Required) The ID of the Amazon EMR Studio Workspace security group. The Workspace security group allows outbound network traffic to resources in the Engine security group, and it must be in the same VPC specified by `vpcId`. + +The following arguments are optional: + +* `description` - (Optional) A detailed description of the Amazon EMR Studio. +* `idpAuthUrl` - (Optional) The authentication endpoint of your identity provider (IdP). Specify this value when you use IAM authentication and want to let federated users log in to a Studio with the Studio URL and credentials from your IdP. Amazon EMR Studio redirects users to this endpoint to enter credentials. +* `idpRelayStateParameterName` - (Optional) The name that your identity provider (IdP) uses for its RelayState parameter. For example, RelayState or TargetSource. Specify this value when you use IAM authentication and want to let federated users log in to a Studio using the Studio URL. The RelayState parameter differs by IdP. +* `tags` - (Optional) list of tags to apply to the EMR Cluster. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `userRole` - (Optional) - The IAM user role that users and groups assume when logged in to an Amazon EMR Studio. Only specify a User Role when you use Amazon Web Services SSO authentication. The permissions attached to the User Role can be scoped down for each user or group using session policies. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn`- ARN of the studio. +* `url` - The unique access URL of the Amazon EMR Studio. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR studios using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EMR studios using the `id`. For example: + +```console +% terraform import aws_emr_studio.studio es-123456ABCDEF +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/emr_studio_session_mapping.html.markdown b/website/docs/cdktf/typescript/r/emr_studio_session_mapping.html.markdown new file mode 100644 index 00000000000..d7560f706f3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/emr_studio_session_mapping.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "EMR" +layout: "aws" +page_title: "AWS: aws_emr_studio_session_mapping" +description: |- + Provides an Elastic MapReduce Studio +--- + + + +# Resource: aws_emr_studio_session_mapping + +Provides an Elastic MapReduce Studio Session Mapping. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrStudioSessionMapping } from "./.gen/providers/aws/emr-studio-session-mapping"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrStudioSessionMapping(this, "example", { + identityId: "example", + identityType: "USER", + sessionPolicyArn: Token.asString(awsIamPolicyExample.arn), + studioId: Token.asString(awsEmrStudioExample.id), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `identityId`- (Optional) The globally unique identifier (GUID) of the user or group from the Amazon Web Services SSO Identity Store. +* `identityName` - (Optional) The name of the user or group from the Amazon Web Services SSO Identity Store. +* `identityType` - (Required) Specifies whether the identity to map to the Amazon EMR Studio is a `user` or a `group`. +* `sessionPolicyArn` - (Required) The Amazon Resource Name (ARN) for the session policy that will be applied to the user or group. You should specify the ARN for the session policy that you want to apply, not the ARN of your user role. +* `studioId` - (Required) The ID of the Amazon EMR Studio to which the user or group will be mapped. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id`- The id of the Elastic MapReduce Studio Session Mapping. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR studio session mappings using `studioId:identityType:identityId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EMR studio session mappings using `studioId:identityType:identityId`. For example: + +```console +% terraform import aws_emr_studio_session_mapping.example es-xxxxx:USER:xxxxx-xxx-xxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/emrcontainers_job_template.markdown b/website/docs/cdktf/typescript/r/emrcontainers_job_template.markdown new file mode 100644 index 00000000000..b8280832a76 --- /dev/null +++ b/website/docs/cdktf/typescript/r/emrcontainers_job_template.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "EMR Containers" +layout: "aws" +page_title: "AWS: aws_emrcontainers_job_template" +description: |- + Manages an EMR Containers (EMR on EKS) Job Template +--- + + + +# Resource: aws_emrcontainers_job_template + +Manages an EMR Containers (EMR on EKS) Job Template. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrcontainersJobTemplate } from "./.gen/providers/aws/emrcontainers-job-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrcontainersJobTemplate(this, "example", { + jobTemplateData: { + executionRoleArn: Token.asString(awsIamRoleExample.arn), + jobDriver: { + sparkSqlJobDriver: { + entryPoint: "default", + }, + }, + releaseLabel: "emr-6.10.0-latest", + }, + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `jobTemplateData` - (Required) The job template data which holds values of StartJobRun API request. +* `kmsKeyArn` - (Optional) The KMS key ARN used to encrypt the job template. +* `name` – (Required) The specified name of the job template. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### job_template_data Arguments + +* `configurationOverrides` - (Optional) The configuration settings that are used to override defaults configuration. +* `executionRoleArn` - (Required) The execution role ARN of the job run. +* `jobDriver` - (Required) Specify the driver that the job runs on. Exactly one of the two available job drivers is required, either sparkSqlJobDriver or sparkSubmitJobDriver. +* `jobTags` - (Optional) The tags assigned to jobs started using the job template. +* `releaseLabel` - (Required) The release version of Amazon EMR. + +#### configuration_overrides Arguments + +* `applicationConfiguration` - (Optional) The configurations for the application running by the job run. +* `monitoringConfiguration` - (Optional) The configurations for monitoring. + +##### application_configuration Arguments + +* `classification` - (Required) The classification within a configuration. +* `configurations` - (Optional) A list of additional configurations to apply within a configuration object. +* `properties` - (Optional) A set of properties specified within a configuration classification. + +##### monitoring_configuration Arguments + +* `cloudWatchMonitoringConfiguration` - (Optional) Monitoring configurations for CloudWatch. +* `persistentAppUi` - (Optional) Monitoring configurations for the persistent application UI. +* `s3MonitoringConfiguration` - (Optional) Amazon S3 configuration for monitoring log publishing. + +###### cloud_watch_monitoring_configuration Arguments + +* `logGroupName` - (Required) The name of the log group for log publishing. +* `logStreamNamePrefix` - (Optional) The specified name prefix for log streams. + +###### s3_monitoring_configuration Arguments + +* `logUri` - (Optional) Amazon S3 destination URI for log publishing. + +#### job_driver Arguments + +* `sparkSqlJobDriver` - (Optional) The job driver for job type. +* `sparkSubmitJobDriver` - (Optional) The job driver parameters specified for spark submit. + +##### spark_sql_job_driver Arguments + +* `entryPoint` - (Optional) The SQL file to be executed. +* `sparkSqlParameters` - (Optional) The Spark parameters to be included in the Spark SQL command. + +##### spark_submit_job_driver Arguments + +* `entryPoint` - (Required) The entry point of job application. +* `entryPointArguments` - (Optional) The arguments for job application. +* `sparkSubmitParameters` - (Optional) The Spark submit parameters that are used for job runs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the job template. +* `id` - The ID of the job template. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS job templates using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EKS job templates using the `id`. For example: + +```console +% terraform import aws_emrcontainers_job_template.example a1b2c3d4e5f6g7h8i9j10k11l +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/emrcontainers_virtual_cluster.markdown b/website/docs/cdktf/typescript/r/emrcontainers_virtual_cluster.markdown new file mode 100644 index 00000000000..4bbacedfe3e --- /dev/null +++ b/website/docs/cdktf/typescript/r/emrcontainers_virtual_cluster.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "EMR Containers" +layout: "aws" +page_title: "AWS: aws_emrcontainers_virtual_cluster" +description: |- + Manages an EMR Containers (EMR on EKS) Virtual Cluster +--- + + + +# Resource: aws_emrcontainers_virtual_cluster + +Manages an EMR Containers (EMR on EKS) Virtual Cluster. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrcontainersVirtualCluster } from "./.gen/providers/aws/emrcontainers-virtual-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrcontainersVirtualCluster(this, "example", { + containerProvider: { + id: Token.asString(awsEksClusterExample.name), + info: { + eksInfo: { + namespace: "default", + }, + }, + type: "EKS", + }, + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `containerProvider` - (Required) Configuration block for the container provider associated with your cluster. +* `name` – (Required) Name of the virtual cluster. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### container_provider Arguments + +* `id` - The name of the container provider that is running your EMR Containers cluster +* `info` - Nested list containing information about the configuration of the container provider + * `eksInfo` - Nested list containing EKS-specific information about the cluster where the EMR Containers cluster is running + * `namespace` - The namespace where the EMR Containers cluster is running +* `type` - The type of the container provider + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cluster. +* `id` - The ID of the cluster. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EKS Clusters using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EKS Clusters using the `id`. For example: + +```console +% terraform import aws_emrcontainers_virtual_cluster.example a1b2c3d4e5f6g7h8i9j10k11l +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/emrserverless_application.markdown b/website/docs/cdktf/typescript/r/emrserverless_application.markdown new file mode 100644 index 00000000000..e353073feeb --- /dev/null +++ b/website/docs/cdktf/typescript/r/emrserverless_application.markdown @@ -0,0 +1,191 @@ +--- +subcategory: "EMR Serverless" +layout: "aws" +page_title: "AWS: aws_emrserverless_application" +description: |- + Manages an EMR Serverless Application +--- + + + +# Resource: aws_emrserverless_application + +Manages an EMR Serverless Application. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrserverlessApplication } from "./.gen/providers/aws/emrserverless-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrserverlessApplication(this, "example", { + name: "example", + releaseLabel: "emr-6.6.0", + type: "hive", + }); + } +} + +``` + +### Initial Capacity Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrserverlessApplication } from "./.gen/providers/aws/emrserverless-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrserverlessApplication(this, "example", { + initialCapacity: [ + { + initialCapacityConfig: { + workerConfiguration: { + cpu: "2 vCPU", + memory: "10 GB", + }, + workerCount: 1, + }, + initialCapacityType: "HiveDriver", + }, + ], + name: "example", + releaseLabel: "emr-6.6.0", + type: "hive", + }); + } +} + +``` + +### Maximum Capacity Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EmrserverlessApplication } from "./.gen/providers/aws/emrserverless-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EmrserverlessApplication(this, "example", { + maximumCapacity: { + cpu: "2 vCPU", + memory: "10 GB", + }, + name: "example", + releaseLabel: "emr-6.6.0", + type: "hive", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `architecture` – (Optional) The CPU architecture of an application. Valid values are `arm64` or `x8664`. Default value is `x8664`. +* `autoStartConfiguration` – (Optional) The configuration for an application to automatically start on job submission. +* `autoStopConfiguration` – (Optional) The configuration for an application to automatically stop after a certain amount of time being idle. +* `imageConfiguration` – (Optional) The image configuration applied to all worker types. +* `initialCapacity` – (Optional) The capacity to initialize when the application is created. +* `maximumCapacity` – (Optional) The maximum capacity to allocate when the application is created. This is cumulative across all workers at any given point in time, not just when an application is created. No new resources will be created once any one of the defined limits is hit. +* `name` – (Required) The name of the application. +* `networkConfiguration` – (Optional) The network configuration for customer VPC connectivity. +* `releaseLabel` – (Required) The EMR release version associated with the application. +* `type` – (Required) The type of application you want to start, such as `spark` or `hive`. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### auto_start_configuration Arguments + +* `enabled` - (Optional) Enables the application to automatically start on job submission. Defaults to `true`. + +### auto_stop_configuration Arguments + +* `enabled` - (Optional) Enables the application to automatically stop after a certain amount of time being idle. Defaults to `true`. +* `idleTimeoutMinutes` - (Optional) The amount of idle time in minutes after which your application will automatically stop. Defaults to `15` minutes. + +### initial_capacity Arguments + +* `initialCapacityConfig` - (Optional) The initial capacity configuration per worker. +* `initialCapacityType` - (Required) The worker type for an analytics framework. For Spark applications, the key can either be set to `driver` or `executor`. For Hive applications, it can be set to `hiveDriver` or `tezTask`. + +### maximum_capacity Arguments + +* `cpu` - (Required) The maximum allowed CPU for an application. +* `disk` - (Optional) The maximum allowed disk for an application. +* `memory` - (Required) The maximum allowed resources for an application. + +### network_configuration Arguments + +* `securityGroupIds` - (Optional) The array of security group Ids for customer VPC connectivity. +* `subnetIds` - (Optional) The array of subnet Ids for customer VPC connectivity. + +#### image_configuration Arguments + +* `imageUri` - (Required) The image URI. + +#### initial_capacity_config Arguments + +* `workerConfiguration` - (Optional) The resource configuration of the initial capacity configuration. +* `workerCount` - (Required) The number of workers in the initial capacity configuration. + +##### worker_configuration Arguments + +* `cpu` - (Required) The CPU requirements for every worker instance of the worker type. +* `disk` - (Optional) The disk requirements for every worker instance of the worker type. +* `memory` - (Required) The memory requirements for every worker instance of the worker type. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cluster. +* `id` - The ID of the cluster. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EMR Severless applications using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EMR Severless applications using the `id`. For example: + +```console +% terraform import aws_emrserverless_application.example id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/evidently_feature.html.markdown b/website/docs/cdktf/typescript/r/evidently_feature.html.markdown new file mode 100644 index 00000000000..b12f8e04441 --- /dev/null +++ b/website/docs/cdktf/typescript/r/evidently_feature.html.markdown @@ -0,0 +1,246 @@ +--- +subcategory: "CloudWatch Evidently" +layout: "aws" +page_title: "AWS: aws_evidently_feature" +description: |- + Provides a CloudWatch Evidently Feature resource. +--- + + + +# Resource: aws_evidently_feature + +Provides a CloudWatch Evidently Feature resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyFeature } from "./.gen/providers/aws/evidently-feature"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyFeature(this, "example", { + description: "example description", + name: "example", + project: Token.asString(awsEvidentlyProjectExample.name), + tags: { + Key1: "example Feature", + }, + variations: [ + { + name: "Variation1", + value: { + stringValue: "example", + }, + }, + ], + }); + } +} + +``` + +### With default variation + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyFeature } from "./.gen/providers/aws/evidently-feature"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyFeature(this, "example", { + defaultVariation: "Variation2", + name: "example", + project: Token.asString(awsEvidentlyProjectExample.name), + variations: [ + { + name: "Variation1", + value: { + stringValue: "exampleval1", + }, + }, + { + name: "Variation2", + value: { + stringValue: "exampleval2", + }, + }, + ], + }); + } +} + +``` + +### With entity overrides + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyFeature } from "./.gen/providers/aws/evidently-feature"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyFeature(this, "example", { + entityOverrides: { + test1: "Variation1", + }, + name: "example", + project: Token.asString(awsEvidentlyProjectExample.name), + variations: [ + { + name: "Variation1", + value: { + stringValue: "exampleval1", + }, + }, + { + name: "Variation2", + value: { + stringValue: "exampleval2", + }, + }, + ], + }); + } +} + +``` + +### With evaluation strategy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyFeature } from "./.gen/providers/aws/evidently-feature"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyFeature(this, "example", { + entityOverrides: { + test1: "Variation1", + }, + evaluationStrategy: "ALL_RULES", + name: "example", + project: Token.asString(awsEvidentlyProjectExample.name), + variations: [ + { + name: "Variation1", + value: { + stringValue: "exampleval1", + }, + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `defaultVariation` - (Optional) The name of the variation to use as the default variation. The default variation is served to users who are not allocated to any ongoing launches or experiments of this feature. This variation must also be listed in the `variations` structure. If you omit `defaultVariation`, the first variation listed in the `variations` structure is used as the default variation. +* `description` - (Optional) Specifies the description of the feature. +* `entityOverrides` - (Optional) Specify users that should always be served a specific variation of a feature. Each user is specified by a key-value pair . For each key, specify a user by entering their user ID, account ID, or some other identifier. For the value, specify the name of the variation that they are to be served. +* `evaluationStrategy` - (Optional) Specify `allRules` to activate the traffic allocation specified by any ongoing launches or experiments. Specify `defaultVariation` to serve the default variation to all users instead. +* `name` - (Required) The name for the new feature. Minimum length of `1`. Maximum length of `127`. +* `project` - (Required) The name or ARN of the project that is to contain the new feature. +* `tags` - (Optional) Tags to apply to the feature. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `variations` - (Required) One or more blocks that contain the configuration of the feature's different variations. [Detailed below](#variations) + +### `variations` + +The `variations` block supports the following arguments: + +* `name` - (Required) The name of the variation. Minimum length of `1`. Maximum length of `127`. +* `value` - (Required) A block that specifies the value assigned to this variation. [Detailed below](#value) + +#### `value` + +The `value` block supports the following arguments: + +~> **NOTE:** You must specify exactly one of `boolValue`, `doubleValue`, `longValue`, `stringValue`. + +* `boolValue` - (Optional) If this feature uses the Boolean variation type, this field contains the Boolean value of this variation. +* `doubleValue` - (Optional) If this feature uses the double integer variation type, this field contains the double integer value of this variation. +* `longValue` - (Optional) If this feature uses the long variation type, this field contains the long value of this variation. Minimum value of `9007199254740991`. Maximum value of `9007199254740991`. +* `stringValue` - (Optional) If this feature uses the string variation type, this field contains the string value of this variation. Minimum length of `0`. Maximum length of `512`. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `2M`) +* `delete` - (Default `2M`) +* `update` - (Default `2M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the feature. +* `createdTime` - The date and time that the feature is created. +* `evaluationRules` - One or more blocks that define the evaluation rules for the feature. [Detailed below](#evaluation_rules) +* `id` - The feature `name` and the project `name` or `arn` separated by a colon (`:`). +* `lastUpdatedTime` - The date and time that the feature was most recently updated. +* `status` - The current state of the feature. Valid values are `available` and `updating`. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `valueType` - Defines the type of value used to define the different feature variations. Valid Values: `string`, `long`, `double`, `boolean`. + +### `evaluationRules` + +The `evaluationRules` block supports the following attributes: + +* `name` - The name of the experiment or launch. +* `type` - This value is `awsEvidentlySplits` if this is an evaluation rule for a launch, and it is `awsEvidentlyOnlineab` if this is an evaluation rule for an experiment. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Evidently Feature using the feature `name` and `name` or `arn` of the hosting CloudWatch Evidently Project separated by a `:`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Evidently Feature using the feature `name` and `name` or `arn` of the hosting CloudWatch Evidently Project separated by a `:`. For example: + +```console +% terraform import aws_evidently_feature.example exampleFeatureName:arn:aws:evidently:us-east-1:123456789012:project/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/evidently_launch.html.markdown b/website/docs/cdktf/typescript/r/evidently_launch.html.markdown new file mode 100644 index 00000000000..97e22a63ccd --- /dev/null +++ b/website/docs/cdktf/typescript/r/evidently_launch.html.markdown @@ -0,0 +1,498 @@ +--- +subcategory: "CloudWatch Evidently" +layout: "aws" +page_title: "AWS: aws_evidently_launch" +description: |- + Provides a CloudWatch Evidently Launch resource. +--- + + + +# Resource: aws_evidently_launch + +Provides a CloudWatch Evidently Launch resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyLaunch } from "./.gen/providers/aws/evidently-launch"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyLaunch(this, "example", { + groups: [ + { + feature: Token.asString(awsEvidentlyFeatureExample.name), + name: "Variation1", + variation: "Variation1", + }, + ], + name: "example", + project: Token.asString(awsEvidentlyProjectExample.name), + scheduledSplitsConfig: { + steps: [ + { + groupWeights: { + Variation1: 0, + }, + startTime: "2024-01-07 01:43:59+00:00", + }, + ], + }, + }); + } +} + +``` + +### With description + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyLaunch } from "./.gen/providers/aws/evidently-launch"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyLaunch(this, "example", { + description: "example description", + groups: [ + { + feature: Token.asString(awsEvidentlyFeatureExample.name), + name: "Variation1", + variation: "Variation1", + }, + ], + name: "example", + project: Token.asString(awsEvidentlyProjectExample.name), + scheduledSplitsConfig: { + steps: [ + { + groupWeights: { + Variation1: 0, + }, + startTime: "2024-01-07 01:43:59+00:00", + }, + ], + }, + }); + } +} + +``` + +### With multiple groups + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyLaunch } from "./.gen/providers/aws/evidently-launch"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyLaunch(this, "example", { + groups: [ + { + description: "first-group", + feature: Token.asString(awsEvidentlyFeatureExample.name), + name: "Variation1", + variation: "Variation1", + }, + { + description: "second-group", + feature: Token.asString(awsEvidentlyFeatureExample.name), + name: "Variation2", + variation: "Variation2", + }, + ], + name: "example", + project: Token.asString(awsEvidentlyProjectExample.name), + scheduledSplitsConfig: { + steps: [ + { + groupWeights: { + Variation1: 0, + Variation2: 0, + }, + startTime: "2024-01-07 01:43:59+00:00", + }, + ], + }, + }); + } +} + +``` + +### With metric_monitors + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyLaunch } from "./.gen/providers/aws/evidently-launch"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyLaunch(this, "example", { + groups: [ + { + feature: Token.asString(awsEvidentlyFeatureExample.name), + name: "Variation1", + variation: "Variation1", + }, + ], + metricMonitors: [ + { + metricDefinition: { + entityIdKey: "entity_id_key1", + eventPattern: + '{\\"Price\\":[{\\"numeric\\":[\\">\\",11,\\"<=\\",22]}]}', + name: "name1", + unitLabel: "unit_label1", + valueKey: "value_key1", + }, + }, + { + metricDefinition: { + entityIdKey: "entity_id_key2", + eventPattern: + '{\\"Price\\":[{\\"numeric\\":[\\">\\",9,\\"<=\\",19]}]}', + name: "name2", + unitLabel: "unit_label2", + valueKey: "value_key2", + }, + }, + ], + name: "example", + project: Token.asString(awsEvidentlyProjectExample.name), + scheduledSplitsConfig: { + steps: [ + { + groupWeights: { + Variation1: 0, + }, + startTime: "2024-01-07 01:43:59+00:00", + }, + ], + }, + }); + } +} + +``` + +### With randomization_salt + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyLaunch } from "./.gen/providers/aws/evidently-launch"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyLaunch(this, "example", { + groups: [ + { + feature: Token.asString(awsEvidentlyFeatureExample.name), + name: "Variation1", + variation: "Variation1", + }, + ], + name: "example", + project: Token.asString(awsEvidentlyProjectExample.name), + randomizationSalt: "example randomization salt", + scheduledSplitsConfig: { + steps: [ + { + groupWeights: { + Variation1: 0, + }, + startTime: "2024-01-07 01:43:59+00:00", + }, + ], + }, + }); + } +} + +``` + +### With multiple steps + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyLaunch } from "./.gen/providers/aws/evidently-launch"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyLaunch(this, "example", { + groups: [ + { + feature: Token.asString(awsEvidentlyFeatureExample.name), + name: "Variation1", + variation: "Variation1", + }, + { + feature: Token.asString(awsEvidentlyFeatureExample.name), + name: "Variation2", + variation: "Variation2", + }, + ], + name: "example", + project: Token.asString(awsEvidentlyProjectExample.name), + scheduledSplitsConfig: { + steps: [ + { + groupWeights: { + Variation1: 15, + Variation2: 10, + }, + startTime: "2024-01-07 01:43:59+00:00", + }, + { + groupWeights: { + Variation1: 20, + Variation2: 25, + }, + startTime: "2024-01-08 01:43:59+00:00", + }, + ], + }, + }); + } +} + +``` + +### With segment overrides + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyLaunch } from "./.gen/providers/aws/evidently-launch"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyLaunch(this, "example", { + groups: [ + { + feature: Token.asString(awsEvidentlyFeatureExample.name), + name: "Variation1", + variation: "Variation1", + }, + { + feature: Token.asString(awsEvidentlyFeatureExample.name), + name: "Variation2", + variation: "Variation2", + }, + ], + name: "example", + project: Token.asString(awsEvidentlyProjectExample.name), + scheduledSplitsConfig: { + steps: [ + { + groupWeights: { + Variation1: 0, + Variation2: 0, + }, + segmentOverrides: [ + { + evaluationOrder: 1, + segment: Token.asString(awsEvidentlySegmentExample.name), + weights: { + Variation2: 10000, + }, + }, + { + evaluationOrder: 2, + segment: Token.asString(awsEvidentlySegmentExample.name), + weights: { + Variation1: 40000, + Variation2: 30000, + }, + }, + ], + startTime: "2024-01-08 01:43:59+00:00", + }, + ], + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Specifies the description of the launch. +* `groups` - (Required) One or up to five blocks that contain the feature and variations that are to be used for the launch. [Detailed below](#groups). +* `metricMonitors` - (Optional) One or up to three blocks that define the metrics that will be used to monitor the launch performance. [Detailed below](#metric_monitors). +* `name` - (Required) The name for the new launch. Minimum length of `1`. Maximum length of `127`. +* `project` - (Required) The name or ARN of the project that is to contain the new launch. +* `randomizationSalt` - (Optional) When Evidently assigns a particular user session to a launch, it must use a randomization ID to determine which variation the user session is served. This randomization ID is a combination of the entity ID and randomizationSalt. If you omit randomizationSalt, Evidently uses the launch name as the randomizationSalt. +* `scheduledSplitsConfig` - (Optional) A block that defines the traffic allocation percentages among the feature variations during each step of the launch. [Detailed below](#scheduled_splits_config). +* `tags` - (Optional) Tags to apply to the launch. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `groups` + +The `groups` block supports the following arguments: + +* `description` - (Optional) Specifies the description of the launch group. +* `feature` - (Required) Specifies the name of the feature that the launch is using. +* `name` - (Required) Specifies the name of the lahnch group. +* `variation` - (Required) Specifies the feature variation to use for this launch group. + +### `metricMonitors` + +The `metricMonitors` block supports the following arguments: + +* `metricDefinition` - (Required) A block that defines the metric. [Detailed below](#metric_definition). + +#### `metricDefinition` + +The `metricDefinition` block supports the following arguments: + +* `entityIdKey` - (Required) Specifies the entity, such as a user or session, that does an action that causes a metric value to be recorded. An example is `userDetailsUserId`. +* `eventPattern` - (Required) Specifies The EventBridge event pattern that defines how the metric is recorded. +* `name` - (Required) Specifies the name for the metric. +* `unitLabel` - (Optional) Specifies a label for the units that the metric is measuring. +* `valueKey` - (Required) Specifies the value that is tracked to produce the metric. + +### `scheduledSplitsConfig` + +The `scheduledSplitsConfig` block supports the following arguments: + +* `steps` - (Required) One or up to six blocks that define the traffic allocation percentages among the feature variations during each step of the launch. This also defines the start time of each step. [Detailed below](#steps). + +#### `steps` + +The `steps` block supports the following arguments: + +* `groupWeights` - (Required) The traffic allocation percentages among the feature variations during one step of a launch. This is a set of key-value pairs. The keys are variation names. The values represent the percentage of traffic to allocate to that variation during this step. For more information, refer to the [AWS documentation for ScheduledSplitConfig groupWeights](https://docs.aws.amazon.com/cloudwatchevidently/latest/APIReference/API_ScheduledSplitConfig.html). +* `segmentOverrides` - (Required) One or up to six blocks that specify different traffic splits for one or more audience segments. A segment is a portion of your audience that share one or more characteristics. Examples could be Chrome browser users, users in Europe, or Firefox browser users in Europe who also fit other criteria that your application collects, such as age. [Detailed below](#segment_overrides). +* `startTime` - (Required) Specifies the date and time that this step of the launch starts. + +##### `segmentOverrides` + +* `evaluationOrder` - (Required) Specifies a number indicating the order to use to evaluate segment overrides, if there are more than one. Segment overrides with lower numbers are evaluated first. +* `segment` - (Required) The name or ARN of the segment to use. +* `weights` - (Required) The traffic allocation percentages among the feature variations to assign to this segment. This is a set of key-value pairs. The keys are variation names. The values represent the amount of traffic to allocate to that variation for this segment. This is expressed in thousandths of a percent, so a weight of 50000 represents 50% of traffic. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `2M`) +* `delete` - (Default `2M`) +* `update` - (Default `2M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the launch. +* `createdTime` - The date and time that the launch is created. +* `execution` - A block that contains information about the start and end times of the launch. [Detailed below](#execution) +* `id` - The launch `name` and the project `name` or `arn` separated by a colon (`:`). +* `lastUpdatedTime` - The date and time that the launch was most recently updated. +* `status` - The current state of the launch. Valid values are `created`, `updating`, `running`, `completed`, and `cancelled`. +* `statusReason` - If the launch was stopped, this is the string that was entered by the person who stopped the launch, to explain why it was stopped. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `type` - The type of launch. + +### `execution` + +The `execution` block supports the following attributes: + +* `endedTime` - The date and time that the launch ended. +* `startedTime` - The date and time that the launch started. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Evidently Launch using the `name` of the launch and `name` of the project or `arn` of the hosting CloudWatch Evidently Project separated by a `:`. For example: + +Import using the `name` of the launch and `name` of the project separated by a `:`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import using the `name` of the launch and `arn` of the project separated by a `:`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** CloudWatch Evidently Launch using the `name` of the launch and `name` of the project or `arn` of the hosting CloudWatch Evidently Project separated by a `:`. For example: + +Import using the `name` of the launch and `name` of the project separated by a `:`: + +```console +% terraform import aws_evidently_launch.example exampleLaunchName:exampleProjectName +``` + +Import using the `name` of the launch and `arn` of the project separated by a `:`: + +```console +% terraform import aws_evidently_launch.example exampleLaunchName:arn:aws:evidently:us-east-1:123456789012:project/exampleProjectName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/evidently_project.html.markdown b/website/docs/cdktf/typescript/r/evidently_project.html.markdown new file mode 100644 index 00000000000..b51ecbccfac --- /dev/null +++ b/website/docs/cdktf/typescript/r/evidently_project.html.markdown @@ -0,0 +1,177 @@ +--- +subcategory: "CloudWatch Evidently" +layout: "aws" +page_title: "AWS: aws_evidently_project" +description: |- + Provides a CloudWatch Evidently Project resource. +--- + + + +# Resource: aws_evidently_project + +Provides a CloudWatch Evidently Project resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyProject } from "./.gen/providers/aws/evidently-project"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyProject(this, "example", { + description: "Example Description", + name: "Example", + tags: { + Key1: "example Project", + }, + }); + } +} + +``` + +### Store evaluation events in a CloudWatch Log Group + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyProject } from "./.gen/providers/aws/evidently-project"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyProject(this, "example", { + dataDelivery: { + cloudwatchLogs: { + logGroup: "example-log-group-name", + }, + }, + description: "Example Description", + name: "Example", + tags: { + Key1: "example Project", + }, + }); + } +} + +``` + +### Store evaluation events in an S3 bucket + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlyProject } from "./.gen/providers/aws/evidently-project"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlyProject(this, "example", { + dataDelivery: { + s3Destination: { + bucket: "example-bucket-name", + prefix: "example", + }, + }, + description: "Example Description", + name: "Example", + tags: { + Key1: "example Project", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dataDelivery` - (Optional) A block that contains information about where Evidently is to store evaluation events for longer term storage, if you choose to do so. If you choose not to store these events, Evidently deletes them after using them to produce metrics and other experiment results that you can view. See below. +* `description` - (Optional) Specifies the description of the project. +* `name` - (Required) A name for the project. +* `tags` - (Optional) Tags to apply to the project. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `dataDelivery` block supports the following arguments: + +~> **NOTE:** You can't specify both `cloudwatchLogs` and `s3Destination`. + +* `cloudwatchLogs` - (Optional) A block that defines the CloudWatch Log Group that stores the evaluation events. See below. +* `s3Destination` - (Optional) A block that defines the S3 bucket and prefix that stores the evaluation events. See below. + +The `cloudwatchLogs` block supports the following arguments: + +* `logGroup` - (Optional) The name of the log group where the project stores evaluation events. + +The `s3Destination` block supports the following arguments: + +* `bucket` - (Optional) The name of the bucket in which Evidently stores evaluation events. +* `prefix` - (Optional) The bucket prefix in which Evidently stores evaluation events. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `2M`) +* `delete` - (Default `2M`) +* `update` - (Default `2M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `activeExperimentCount` - The number of ongoing experiments currently in the project. +* `activeLaunchCount` - The number of ongoing launches currently in the project. +* `arn` - The ARN of the project. +* `createdTime` - The date and time that the project is created. +* `experimentCount` - The number of experiments currently in the project. This includes all experiments that have been created and not deleted, whether they are ongoing or not. +* `featureCount` - The number of features currently in the project. +* `id` - The ID has the same value as the arn of the project. +* `lastUpdatedTime` - The date and time that the project was most recently updated. +* `launchCount` - The number of launches currently in the project. This includes all launches that have been created and not deleted, whether they are ongoing or not. +* `status` - The current state of the project. Valid values are `available` and `updating`. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Evidently Project using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Evidently Project using the `arn`. For example: + +```console +% terraform import aws_evidently_project.example arn:aws:evidently:us-east-1:123456789012:segment/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/evidently_segment.html.markdown b/website/docs/cdktf/typescript/r/evidently_segment.html.markdown new file mode 100644 index 00000000000..e32e6f21dd3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/evidently_segment.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "CloudWatch Evidently" +layout: "aws" +page_title: "AWS: aws_evidently_segment" +description: |- + Provides a CloudWatch Evidently Segment resource. +--- + + + +# Resource: aws_evidently_segment + +Provides a CloudWatch Evidently Segment resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlySegment } from "./.gen/providers/aws/evidently-segment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlySegment(this, "example", { + name: "example", + pattern: '{\\"Price\\":[{\\"numeric\\":[\\">\\",10,\\"<=\\",20]}]}', + tags: { + Key1: "example Segment", + }, + }); + } +} + +``` + +### With JSON object in pattern + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlySegment } from "./.gen/providers/aws/evidently-segment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlySegment(this, "example", { + name: "example", + pattern: + ' {\n "Price": [\n {\n "numeric": [">",10,"<=",20]\n }\n ]\n }\n\n', + tags: { + Key1: "example Segment", + }, + }); + } +} + +``` + +### With Description + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EvidentlySegment } from "./.gen/providers/aws/evidently-segment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new EvidentlySegment(this, "example", { + description: "example", + name: "example", + pattern: '{\\"Price\\":[{\\"numeric\\":[\\">\\",10,\\"<=\\",20]}]}', + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional, Forces new resource) Specifies the description of the segment. +* `name` - (Required, Forces new resource) A name for the segment. +* `pattern` - (Required, Forces new resource) The pattern to use for the segment. For more information about pattern syntax, see [Segment rule pattern syntax](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-Evidently-segments.html#CloudWatch-Evidently-segments-syntax.html). +* `tags` - (Optional) Tags to apply to the segment. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the segment. +* `createdTime` - The date and time that the segment is created. +* `experimentCount` - The number of experiments that this segment is used in. This count includes all current experiments, not just those that are currently running. +* `id` - The ID has the same value as the ARN of the segment. +* `lastUpdatedTime` - The date and time that this segment was most recently updated. +* `launchCount` - The number of launches that this segment is used in. This count includes all current launches, not just those that are currently running. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Evidently Segment using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Evidently Segment using the `arn`. For example: + +```console +% terraform import aws_evidently_segment.example arn:aws:evidently:us-west-2:123456789012:segment/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/finspace_kx_cluster.html.markdown b/website/docs/cdktf/typescript/r/finspace_kx_cluster.html.markdown new file mode 100644 index 00000000000..0d9fe803699 --- /dev/null +++ b/website/docs/cdktf/typescript/r/finspace_kx_cluster.html.markdown @@ -0,0 +1,225 @@ +--- +subcategory: "FinSpace" +layout: "aws" +page_title: "AWS: aws_finspace_kx_cluster" +description: |- + Terraform resource for managing an AWS FinSpace Kx Cluster. +--- + + + +# Resource: aws_finspace_kx_cluster + +Terraform resource for managing an AWS FinSpace Kx Cluster. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FinspaceKxCluster } from "./.gen/providers/aws/finspace-kx-cluster"; +interface MyConfig { + cacheConfigurations: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new FinspaceKxCluster(this, "example", { + availabilityZoneId: "use1-az2", + azMode: "SINGLE", + cacheStorageConfigurations: [ + { + size: 1200, + type: "CACHE_1000", + }, + ], + capacityConfiguration: { + nodeCount: 2, + nodeType: "kx.s.2xlarge", + }, + code: { + s3Bucket: test.id, + s3Key: object.key, + }, + database: [ + { + cache_configuration: [ + { + cache_type: "CACHE_1000", + db_paths: "/", + }, + ], + databaseName: Token.asString(awsFinspaceKxDatabaseExample.name), + cacheConfigurations: config.cacheConfigurations, + }, + ], + environmentId: Token.asString(awsFinspaceKxEnvironmentExample.id), + name: "my-tf-kx-cluster", + releaseLabel: "1.0", + type: "HDB", + vpcConfiguration: { + ipAddressType: "IP_V4", + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + subnetIds: [Token.asString(awsSubnetExample.id)], + vpcId: Token.asString(awsVpcTest.id), + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `azMode` - (Required) The number of availability zones you want to assign per cluster. This can be one of the following: + * SINGLE - Assigns one availability zone per cluster. + * MULTI - Assigns all the availability zones per cluster. +* `capacityConfiguration` - (Required) Structure for the metadata of a cluster. Includes information like the CPUs needed, memory of instances, and number of instances. See [capacity_configuration](#capacity_configuration). +* `environmentId` - (Required) Unique identifier for the KX environment. +* `name` - (Required) Unique name for the cluster that you want to create. +* `releaseLabel` - (Required) Version of FinSpace Managed kdb to run. +* `type` - (Required) Type of KDB database. The following types are available: + * HDB - Historical Database. The data is only accessible with read-only permissions from one of the FinSpace managed KX databases mounted to the cluster. + * RDB - Realtime Database. This type of database captures all the data from a ticker plant and stores it in memory until the end of day, after which it writes all of its data to a disk and reloads the HDB. This cluster type requires local storage for temporary storage of data during the savedown process. If you specify this field in your request, you must provide the `savedownStorageConfiguration` parameter. + * GATEWAY - A gateway cluster allows you to access data across processes in kdb systems. It allows you to create your own routing logic using the initialization scripts and custom code. This type of cluster does not require a writable local storage. +* `vpcConfiguration` - (Required) Configuration details about the network where the Privatelink endpoint of the cluster resides. See [vpc_configuration](#vpc_configuration). + +The following arguments are optional: + +* `autoScalingConfiguration` - (Optional) Configuration based on which FinSpace will scale in or scale out nodes in your cluster. See [auto_scaling_configuration](#auto_scaling_configuration). +* `availabilityZoneId` - (Optional) The availability zone identifiers for the requested regions. Required when `azMode` is set to SINGLE. +* `cacheStorageConfigurations` - (Optional) Configurations for a read only cache storage associated with a cluster. This cache will be stored as an FSx Lustre that reads from the S3 store. See [cache_storage_configuration](#cache_storage_configuration). +* `code` - (Optional) Details of the custom code that you want to use inside a cluster when analyzing data. Consists of the S3 source bucket, location, object version, and the relative path from where the custom code is loaded into the cluster. See [code](#code). +* `commandLineArguments` - (Optional) List of key-value pairs to make available inside the cluster. +* `database` - (Optional) KX database that will be available for querying. Defined below. +* `description` - (Optional) Description of the cluster. +* `executionRole` - (Optional) An IAM role that defines a set of permissions associated with a cluster. These permissions are assumed when a cluster attempts to access another cluster. +* `initializationScript` - (Optional) Path to Q program that will be run at launch of a cluster. This is a relative path within .zip file that contains the custom code, which will be loaded on the cluster. It must include the file name itself. For example, somedir/init.q. +* `savedownStorageConfiguration` - (Optional) Size and type of the temporary storage that is used to hold data during the savedown process. This parameter is required when you choose `type` as RDB. All the data written to this storage space is lost when the cluster node is restarted. See [savedown_storage_configuration](#savedown_storage_configuration). +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### auto_scaling_configuration + +The auto_scaling_configuration block supports the following arguments: + +* `autoScalingMetric` - (Required) Metric your cluster will track in order to scale in and out. For example, CPU_UTILIZATION_PERCENTAGE is the average CPU usage across all nodes in a cluster. +* `minNodeCount` - (Required) Lowest number of nodes to scale. Must be at least 1 and less than the `maxNodeCount`. If nodes in cluster belong to multiple availability zones, then `minNodeCount` must be at least 3. +* `maxNodeCount` - (Required) Highest number of nodes to scale. Cannot be greater than 5 +* `metricTarget` - (Required) Desired value of chosen `autoScalingMetric`. When metric drops below this value, cluster will scale in. When metric goes above this value, cluster will scale out. Can be set between 0 and 100 percent. +* `scaleInCooldownSeconds` - (Required) Duration in seconds that FinSpace will wait after a scale in event before initiating another scaling event. +* `scaleOutCooldownSeconds` - (Required) Duration in seconds that FinSpace will wait after a scale out event before initiating another scaling event. + +### capacity_configuration + +The capacity_configuration block supports the following arguments: + +* `nodeType` - (Required) Determines the hardware of the host computer used for your cluster instance. Each node type offers different memory and storage capabilities. Choose a node type based on the requirements of the application or software that you plan to run on your instance. + + You can only specify one of the following values: + * kx.s.large – The node type with a configuration of 12 GiB memory and 2 vCPUs. + * kx.s.xlarge – The node type with a configuration of 27 GiB memory and 4 vCPUs. + * kx.s.2xlarge – The node type with a configuration of 54 GiB memory and 8 vCPUs. + * kx.s.4xlarge – The node type with a configuration of 108 GiB memory and 16 vCPUs. + * kx.s.8xlarge – The node type with a configuration of 216 GiB memory and 32 vCPUs. + * kx.s.16xlarge – The node type with a configuration of 432 GiB memory and 64 vCPUs. + * kx.s.32xlarge – The node type with a configuration of 864 GiB memory and 128 vCPUs. +* `nodeCount` - (Required) Number of instances running in a cluster. Must be at least 1 and at most 5. + +### cache_storage_configuration + +The cache_storage_configuration block supports the following arguments: + +* `type` - (Required) Type of cache storage . The valid values are: + * CACHE_1000 - This type provides at least 1000 MB/s disk access throughput. +* `size` - (Required) Size of cache in Gigabytes. + +### code + +The code block supports the following arguments: + +* `s3Bucket` - (Required) Unique name for the S3 bucket. +* `s3Key` - (Required) Full S3 path (excluding bucket) to the .zip file that contains the code to be loaded onto the cluster when it’s started. +* `s3ObjectVersion` - (Optional) Version of an S3 Object. + +### database + +The database block supports the following arguments: + +* `databaseName` - (Required) Name of the KX database. +* `cacheConfigurations` - (Optional) Configuration details for the disk cache to increase performance reading from a KX database mounted to the cluster. See [cache_configurations](#cache_configurations). +* `changesetId` - (Optional) A unique identifier of the changeset that is associated with the cluster. + +#### cache_configurations + +The cache_configuration block supports the following arguments: + +* `cacheType` - (Required) Type of disk cache. +* `dbPaths` - (Optional) Paths within the database to cache. + +### savedown_storage_configuration + +The savedown_storage_configuration block supports the following arguments: + +* `type` - (Required) Type of writeable storage space for temporarily storing your savedown data. The valid values are: + * SDS01 - This type represents 3000 IOPS and io2 ebs volume type. +* `size` - (Required) Size of temporary storage in bytes. + +### vpc_configuration + +The vpc_configuration block supports the following arguments: + +* `vpcId` - (Required) Identifier of the VPC endpoint +* `securityGroupIds` - (Required) Unique identifier of the VPC security group applied to the VPC endpoint ENI for the cluster. +* `subnet_ids `- (Required) Identifier of the subnet that the Privatelink VPC endpoint uses to connect to the cluster. +* `ipAddressType` - (Required) IP address type for cluster network configuration parameters. The following type is available: IP_V4 - IP address version 4. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) identifier of the KX cluster. +* `createdTimestamp` - Timestamp at which the cluster is created in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `id` - A comma-delimited string joining environment ID and cluster name. +* `lastModifiedTimestamp` - Last timestamp at which the cluster was updated in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `2M`) +* `delete` - (Default `40M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an AWS FinSpace Kx Cluster using the `id` (environment ID and cluster name, comma-delimited). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an AWS FinSpace Kx Cluster using the `id` (environment ID and cluster name, comma-delimited). For example: + +```console +% terraform import aws_finspace_kx_cluster.example n3ceo7wqxoxcti5tujqwzs,my-tf-kx-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/finspace_kx_database.html.markdown b/website/docs/cdktf/typescript/r/finspace_kx_database.html.markdown new file mode 100644 index 00000000000..3ba7267906d --- /dev/null +++ b/website/docs/cdktf/typescript/r/finspace_kx_database.html.markdown @@ -0,0 +1,115 @@ +--- +subcategory: "FinSpace" +layout: "aws" +page_title: "AWS: aws_finspace_kx_database" +description: |- + Terraform resource for managing an AWS FinSpace Kx Database. +--- + + + +# Resource: aws_finspace_kx_database + +Terraform resource for managing an AWS FinSpace Kx Database. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FinspaceKxDatabase } from "./.gen/providers/aws/finspace-kx-database"; +import { FinspaceKxEnvironment } from "./.gen/providers/aws/finspace-kx-environment"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new KmsKey(this, "example", { + deletionWindowInDays: 7, + description: "Example KMS Key", + }); + const awsFinspaceKxEnvironmentExample = new FinspaceKxEnvironment( + this, + "example_1", + { + kmsKeyId: example.arn, + name: "my-tf-kx-environment", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFinspaceKxEnvironmentExample.overrideLogicalId("example"); + const awsFinspaceKxDatabaseExample = new FinspaceKxDatabase( + this, + "example_2", + { + description: "Example database description", + environmentId: Token.asString(awsFinspaceKxEnvironmentExample.id), + name: "my-tf-kx-database", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFinspaceKxDatabaseExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `environmentId` - (Required) Unique identifier for the KX environment. +* `name` - (Required) Name of the KX database. + +The following arguments are optional: + +* `description` - (Optional) Description of the KX database. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) identifier of the KX database. +* `createdTimestamp` - Timestamp at which the databse is created in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `id` - A comma-delimited string joining environment ID and database name. +* `lastModifiedTimestamp` - Last timestamp at which the database was updated in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an AWS FinSpace Kx Database using the `id` (environment ID and database name, comma-delimited). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an AWS FinSpace Kx Database using the `id` (environment ID and database name, comma-delimited). For example: + +```console +% terraform import aws_finspace_kx_database.example n3ceo7wqxoxcti5tujqwzs,my-tf-kx-database +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/finspace_kx_environment.html.markdown b/website/docs/cdktf/typescript/r/finspace_kx_environment.html.markdown new file mode 100644 index 00000000000..e96e23d258a --- /dev/null +++ b/website/docs/cdktf/typescript/r/finspace_kx_environment.html.markdown @@ -0,0 +1,167 @@ +--- +subcategory: "FinSpace" +layout: "aws" +page_title: "AWS: aws_finspace_kx_environment" +description: |- + Terraform resource for managing an AWS FinSpace Kx Environment. +--- + + + +# Resource: aws_finspace_kx_environment + +Terraform resource for managing an AWS FinSpace Kx Environment. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FinspaceKxEnvironment } from "./.gen/providers/aws/finspace-kx-environment"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new KmsKey(this, "example", { + deletionWindowInDays: 7, + description: "Sample KMS Key", + }); + const awsFinspaceKxEnvironmentExample = new FinspaceKxEnvironment( + this, + "example_1", + { + kmsKeyId: example.arn, + name: "my-tf-kx-environment", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFinspaceKxEnvironmentExample.overrideLogicalId("example"); + } +} + +``` + +### With Network Setup + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Ec2TransitGateway } from "./.gen/providers/aws/ec2-transit-gateway"; +import { FinspaceKxEnvironment } from "./.gen/providers/aws/finspace-kx-environment"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Ec2TransitGateway(this, "example", { + description: "example", + }); + const awsKmsKeyExample = new KmsKey(this, "example_1", { + deletionWindowInDays: 7, + description: "Sample KMS Key", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsKeyExample.overrideLogicalId("example"); + new FinspaceKxEnvironment(this, "example_env", { + customDnsConfiguration: [ + { + customDnsServerIp: "10.0.0.76", + customDnsServerName: "example.finspace.amazonaws.com", + }, + ], + description: "Environment description", + kmsKeyId: Token.asString(awsKmsKeyExample.arn), + name: "my-tf-kx-environment", + transitGatewayConfiguration: { + routableCidrSpace: "100.64.0.0/26", + transitGatewayId: example.id, + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the KX environment that you want to create. +* `kmsKeyId` - (Required) KMS key ID to encrypt your data in the FinSpace environment. + +The following arguments are optional: + +* `customDnsConfiguration` - (Optional) List of DNS server name and server IP. This is used to set up Route-53 outbound resolvers. Defined below. +* `description` - (Optional) Description for the KX environment. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `transitGatewayConfiguration` - (Optional) Transit gateway and network configuration that is used to connect the KX environment to an internal network. Defined below. + +### custom_dns_configuration + +The custom_dns_configuration block supports the following arguments: + +* `customDnsServerIp` - (Required) IP address of the DNS server. +* `customDnsServerName` - (Required) Name of the DNS server. + +### transit_gateway_configuration + +The transit_gateway_configuration block supports the following arguments: + +* `routableCidrSpace` - (Required) Routing CIDR on behalf of KX environment. It could be any “/26 range in the 100.64.0.0 CIDR space. After providing, it will be added to the customer’s transit gateway routing table so that the traffics could be routed to KX network. +* `transitGatewayId` - (Required) Identifier of the transit gateway created by the customer to connect outbound traffics from KX network to your internal network. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) identifier of the KX environment. +* `availabilityZones` - AWS Availability Zone IDs that this environment is available in. Important when selecting VPC subnets to use in cluster creation. +* `createdTimestamp` - Timestamp at which the environment is created in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `id` - Unique identifier for the KX environment. +* `infrastructureAccountId` - Unique identifier for the AWS environment infrastructure account. +* `lastModifiedTimestamp` - Last timestamp at which the environment was updated in FinSpace. Value determined as epoch time in seconds. For example, the value for Monday, November 1, 2021 12:00:00 PM UTC is specified as 1635768000. +* `status` - Status of environment creation +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an AWS FinSpace Kx Environment using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an AWS FinSpace Kx Environment using the `id`. For example: + +```console +% terraform import aws_finspace_kx_environment.example n3ceo7wqxoxcti5tujqwzs +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/finspace_kx_user.html.markdown b/website/docs/cdktf/typescript/r/finspace_kx_user.html.markdown new file mode 100644 index 00000000000..e1fd89da963 --- /dev/null +++ b/website/docs/cdktf/typescript/r/finspace_kx_user.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "FinSpace" +layout: "aws" +page_title: "AWS: aws_finspace_kx_user" +description: |- + Terraform resource for managing an AWS FinSpace Kx User. +--- + + + +# Resource: aws_finspace_kx_user + +Terraform resource for managing an AWS FinSpace Kx User. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FinspaceKxEnvironment } from "./.gen/providers/aws/finspace-kx-environment"; +import { FinspaceKxUser } from "./.gen/providers/aws/finspace-kx-user"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new IamRole(this, "example", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "sts:AssumeRole", + Effect: "Allow", + Principal: { + Service: "ec2.amazonaws.com", + }, + Sid: "", + }, + ], + Version: "2012-10-17", + }) + ), + name: "example-role", + }); + const awsKmsKeyExample = new KmsKey(this, "example_1", { + deletionWindowInDays: 7, + description: "Example KMS Key", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsKeyExample.overrideLogicalId("example"); + const awsFinspaceKxEnvironmentExample = new FinspaceKxEnvironment( + this, + "example_2", + { + kmsKeyId: Token.asString(awsKmsKeyExample.arn), + name: "my-tf-kx-environment", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFinspaceKxEnvironmentExample.overrideLogicalId("example"); + const awsFinspaceKxUserExample = new FinspaceKxUser(this, "example_3", { + environmentId: Token.asString(awsFinspaceKxEnvironmentExample.id), + iamRole: example.arn, + name: "my-tf-kx-user", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFinspaceKxUserExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) A unique identifier for the user. +* `environmentId` - (Required) Unique identifier for the KX environment. +* `iamRole` - (Required) IAM role ARN to be associated with the user. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) identifier of the KX user. +* `id` - A comma-delimited string joining environment ID and user name. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an AWS FinSpace Kx User using the `id` (environment ID and user name, comma-delimited). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an AWS FinSpace Kx User using the `id` (environment ID and user name, comma-delimited). For example: + +```console +% terraform import aws_finspace_kx_user.example n3ceo7wqxoxcti5tujqwzs,my-tf-kx-user +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fis_experiment_template.html.markdown b/website/docs/cdktf/typescript/r/fis_experiment_template.html.markdown new file mode 100644 index 00000000000..0646dda7e69 --- /dev/null +++ b/website/docs/cdktf/typescript/r/fis_experiment_template.html.markdown @@ -0,0 +1,178 @@ +--- +subcategory: "FIS (Fault Injection Simulator)" +layout: "aws" +page_title: "AWS: aws_fis_experiment_template" +description: |- + Provides an FIS Experiment Template. +--- + + + +# Resource: aws_fis_experiment_template + +Provides an FIS Experiment Template, which can be used to run an experiment. +An experiment template contains one or more actions to run on specified targets during an experiment. +It also contains the stop conditions that prevent the experiment from going out of bounds. +See [Amazon Fault Injection Simulator](https://docs.aws.amazon.com/fis/index.html) +for more information. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FisExperimentTemplate } from "./.gen/providers/aws/fis-experiment-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FisExperimentTemplate(this, "example", { + action: [ + { + actionId: "aws:ec2:terminate-instances", + name: "example-action", + target: { + key: "Instances", + value: "example-target", + }, + }, + ], + description: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + stopCondition: [ + { + source: "none", + }, + ], + target: [ + { + name: "example-target", + resourceTag: [ + { + key: "env", + value: "example", + }, + ], + resourceType: "aws:ec2:instance", + selectionMode: "COUNT(1)", + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `action` - (Required) Action to be performed during an experiment. See below. +* `description` - (Required) Description for the experiment template. +* `roleArn` - (Required) ARN of an IAM role that grants the AWS FIS service permission to perform service actions on your behalf. +* `stopCondition` - (Required) When an ongoing experiment should be stopped. See below. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `target` - (Optional) Target of an action. See below. +* `logConfiguration` - (Optional) The configuration for experiment logging. See below. + +### `action` + +* `actionId` - (Required) ID of the action. To find out what actions are supported see [AWS FIS actions reference](https://docs.aws.amazon.com/fis/latest/userguide/fis-actions-reference.html). +* `name` - (Required) Friendly name of the action. +* `description` - (Optional) Description of the action. +* `parameter` - (Optional) Parameter(s) for the action, if applicable. See below. +* `startAfter` - (Optional) Set of action names that must complete before this action can be executed. +* `target` - (Optional) Action's target, if applicable. See below. + +#### `parameter` + +* `key` - (Required) Parameter name. +* `value` - (Required) Parameter value. + +For a list of parameters supported by each action, see [AWS FIS actions reference](https://docs.aws.amazon.com/fis/latest/userguide/fis-actions-reference.html). + +#### `target` (`action.*Target`) + +* `key` - (Required) Target type. Valid values are `cluster` (EKS Cluster), `clusters` (ECS Clusters), `dbInstances` (RDS DB Instances), `instances` (EC2 Instances), `nodegroups` (EKS Node groups), `roles` (IAM Roles), `spotInstances` (EC2 Spot Instances), `subnets` (VPC Subnets), `volumes` (EBS Volumes) , `pods` (EKS Pods), `tasks` (ECS Tasks). See the [documentation](https://docs.aws.amazon.com/fis/latest/userguide/actions.html#action-targets) for more details. +* `value` - (Required) Target name, referencing a corresponding target. + +### `stopCondition` + +* `source` - (Required) Source of the condition. One of `none`, `aws:cloudwatch:alarm`. +* `value` - (Optional) ARN of the CloudWatch alarm. Required if the source is a CloudWatch alarm. + +### `target` + +* `name` - (Required) Friendly name given to the target. +* `resourceType` - (Required) AWS resource type. The resource type must be supported for the specified action. To find out what resource types are supported, see [Targets for AWS FIS](https://docs.aws.amazon.com/fis/latest/userguide/targets.html#resource-types). +* `selectionMode` - (Required) Scopes the identified resources. Valid values are `all` (all identified resources), `count(n)` (randomly select `n` of the identified resources), `percent(n)` (randomly select `n` percent of the identified resources). +* `filter` - (Optional) Filter(s) for the target. Filters can be used to select resources based on specific attributes returned by the respective describe action of the resource type. For more information, see [Targets for AWS FIS](https://docs.aws.amazon.com/fis/latest/userguide/targets.html#target-filters). See below. +* `resourceArns` - (Optional) Set of ARNs of the resources to target with an action. Conflicts with `resourceTag`. +* `resourceTag` - (Optional) Tag(s) the resources need to have to be considered a valid target for an action. Conflicts with `resourceArns`. See below. +* `parameters` - (Optional) The resource type parameters. + +~> **NOTE:** The `target` configuration block requires either `resourceArns` or `resourceTag`. + +#### `filter` + +* `path` - (Required) Attribute path for the filter. +* `values` - (Required) Set of attribute values for the filter. + +~> **NOTE:** Values specified in a `filter` are joined with an `or` clause, while values across multiple `filter` blocks are joined with an `and` clause. For more information, see [Targets for AWS FIS](https://docs.aws.amazon.com/fis/latest/userguide/targets.html#target-filters). + +#### `resourceTag` + +* `key` - (Required) Tag key. +* `value` - (Required) Tag value. + +### `logConfiguration` + +* `logSchemaVersion` - (Required) The schema version. See [documentation](https://docs.aws.amazon.com/fis/latest/userguide/monitoring-logging.html#experiment-log-schema) for the list of schema versions. +* `cloudwatchLogsConfiguration` - (Optional) The configuration for experiment logging to Amazon CloudWatch Logs. See below. +* `s3Configuration` - (Optional) The configuration for experiment logging to Amazon S3. See below. + +#### `cloudwatchLogsConfiguration` + +* `logGroupArn` - (Required) The Amazon Resource Name (ARN) of the destination Amazon CloudWatch Logs log group. + +#### `s3Configuration` + +* `bucketName` - (Required) The name of the destination bucket. +* `prefix` - (Optional) The bucket prefix. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Experiment Template ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FIS Experiment Templates using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import FIS Experiment Templates using the `id`. For example: + +```console +% terraform import aws_fis_experiment_template.template EXT123AbCdEfGhIjK +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/flow_log.html.markdown b/website/docs/cdktf/typescript/r/flow_log.html.markdown new file mode 100644 index 00000000000..c2179c7aa64 --- /dev/null +++ b/website/docs/cdktf/typescript/r/flow_log.html.markdown @@ -0,0 +1,331 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_flow_log" +description: |- + Provides a VPC/Subnet/ENI Flow Log +--- + + + +# Resource: aws_flow_log + +Provides a VPC/Subnet/ENI/Transit Gateway/Transit Gateway Attachment Flow Log to capture IP traffic for a specific network +interface, subnet, or VPC. Logs are sent to a CloudWatch Log Group, a S3 Bucket, or Amazon Kinesis Data Firehose + +## Example Usage + +### CloudWatch Logging + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { FlowLog } from "./.gen/providers/aws/flow-log"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: "example", + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["vpc-flow-logs.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_2", + { + statement: [ + { + actions: [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents", + "logs:DescribeLogGroups", + "logs:DescribeLogStreams", + ], + effect: "Allow", + resources: ["*"], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsIamRoleExample = new IamRole(this, "example_3", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsIamRolePolicyExample = new IamRolePolicy(this, "example_4", { + name: "example", + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + role: Token.asString(awsIamRoleExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyExample.overrideLogicalId("example"); + const awsFlowLogExample = new FlowLog(this, "example_5", { + iamRoleArn: Token.asString(awsIamRoleExample.arn), + logDestination: example.arn, + trafficType: "ALL", + vpcId: Token.asString(awsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFlowLogExample.overrideLogicalId("example"); + } +} + +``` + +### Amazon Kinesis Data Firehose logging + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { FlowLog } from "./.gen/providers/aws/flow-log"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_1", { + acl: "private", + bucket: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["firehose.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_3", + { + actions: [ + "logs:CreateLogDelivery", + "logs:DeleteLogDelivery", + "logs:ListLogDeliveries", + "logs:GetLogDelivery", + "firehose:TagDeliveryStream", + ], + effect: "Allow", + resources: ["*"], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsIamRoleExample = new IamRole(this, "example_4", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "firehose_test_role", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsIamRolePolicyExample = new IamRolePolicy(this, "example_5", { + name: "test", + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + role: Token.asString(awsIamRoleExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyExample.overrideLogicalId("example"); + const awsKinesisFirehoseDeliveryStreamExample = + new KinesisFirehoseDeliveryStream(this, "example_6", { + destination: "extended_s3", + extendedS3Configuration: { + bucketArn: example.arn, + roleArn: Token.asString(awsIamRoleExample.arn), + }, + name: "kinesis_firehose_test", + tags: { + LogDeliveryEnabled: "true", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKinesisFirehoseDeliveryStreamExample.overrideLogicalId("example"); + const awsFlowLogExample = new FlowLog(this, "example_7", { + logDestination: Token.asString( + awsKinesisFirehoseDeliveryStreamExample.arn + ), + logDestinationType: "kinesis-data-firehose", + trafficType: "ALL", + vpcId: Token.asString(awsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFlowLogExample.overrideLogicalId("example"); + } +} + +``` + +### S3 Logging + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FlowLog } from "./.gen/providers/aws/flow-log"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsFlowLogExample = new FlowLog(this, "example_1", { + logDestination: example.arn, + logDestinationType: "s3", + trafficType: "ALL", + vpcId: Token.asString(awsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFlowLogExample.overrideLogicalId("example"); + } +} + +``` + +### S3 Logging in Apache Parquet format with per-hour partitions + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FlowLog } from "./.gen/providers/aws/flow-log"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsFlowLogExample = new FlowLog(this, "example_1", { + destinationOptions: { + fileFormat: "parquet", + perHourPartition: true, + }, + logDestination: example.arn, + logDestinationType: "s3", + trafficType: "ALL", + vpcId: Token.asString(awsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFlowLogExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +~> **NOTE:** One of `eniId`, `subnetId`, `transitGatewayId`, `transitGatewayAttachmentId`, or `vpcId` must be specified. + +This argument supports the following arguments: + +* `trafficType` - (Required) The type of traffic to capture. Valid values: `accept`,`reject`, `all`. +* `deliverCrossAccountRole` - (Optional) ARN of the IAM role that allows Amazon EC2 to publish flow logs across accounts. +* `eniId` - (Optional) Elastic Network Interface ID to attach to +* `iamRoleArn` - (Optional) The ARN for the IAM role that's used to post flow logs to a CloudWatch Logs log group +* `logDestinationType` - (Optional) The type of the logging destination. Valid values: `cloudWatchLogs`, `s3`, `kinesisDataFirehose`. Default: `cloudWatchLogs`. +* `logDestination` - (Optional) The ARN of the logging destination. Either `logDestination` or `logGroupName` must be set. +* `logGroupName` - (Optional) **Deprecated:** Use `logDestination` instead. The name of the CloudWatch log group. Either `logGroupName` or `logDestination` must be set. +* `subnetId` - (Optional) Subnet ID to attach to +* `transitGatewayId` - (Optional) Transit Gateway ID to attach to +* `transitGatewayAttachmentId` - (Optional) Transit Gateway Attachment ID to attach to +* `vpcId` - (Optional) VPC ID to attach to +* `logFormat` - (Optional) The fields to include in the flow log record, in the order in which they should appear. +* `maxAggregationInterval` - (Optional) The maximum interval of time + during which a flow of packets is captured and aggregated into a flow + log record. Valid Values: `60` seconds (1 minute) or `600` seconds (10 + minutes). Default: `600`. When `transitGatewayId` or `transitGatewayAttachmentId` is specified, `maxAggregationInterval` *must* be 60 seconds (1 minute). +* `destinationOptions` - (Optional) Describes the destination options for a flow log. More details below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### destination_options + +Describes the destination options for a flow log. + +* `fileFormat` - (Optional) The format for the flow log. Default value: `plainText`. Valid values: `plainText`, `parquet`. +* `hiveCompatiblePartitions` - (Optional) Indicates whether to use Hive-compatible prefixes for flow logs stored in Amazon S3. Default value: `false`. +* `perHourPartition` - (Optional) Indicates whether to partition the flow log per hour. This reduces the cost and response time for queries. Default value: `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Flow Log ID +* `arn` - The ARN of the Flow Log. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Flow Logs using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Flow Logs using the `id`. For example: + +```console +% terraform import aws_flow_log.test_flow_log fl-1a2b3c4d +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fms_admin_account.html.markdown b/website/docs/cdktf/typescript/r/fms_admin_account.html.markdown new file mode 100644 index 00000000000..aedc0b5af9b --- /dev/null +++ b/website/docs/cdktf/typescript/r/fms_admin_account.html.markdown @@ -0,0 +1,69 @@ +--- +subcategory: "FMS (Firewall Manager)" +layout: "aws" +page_title: "AWS: aws_fms_admin_account" +description: |- + Provides a resource to associate/disassociate an AWS Firewall Manager administrator account +--- + + + +# Resource: aws_fms_admin_account + +Provides a resource to associate/disassociate an AWS Firewall Manager administrator account. This operation must be performed in the `usEast1` region. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FmsAdminAccount } from "./.gen/providers/aws/fms-admin-account"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FmsAdminAccount(this, "example", {}); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Optional) The AWS account ID to associate with AWS Firewall Manager as the AWS Firewall Manager administrator account. This can be an AWS Organizations master account or a member account. Defaults to the current account. Must be configured to perform drift detection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS account ID of the AWS Firewall Manager administrator account. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Firewall Manager administrator account association using the account ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Firewall Manager administrator account association using the account ID. For example: + +```console +% terraform import aws_fms_admin_account.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fms_policy.html.markdown b/website/docs/cdktf/typescript/r/fms_policy.html.markdown new file mode 100644 index 00000000000..2fd3240d2b0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/fms_policy.html.markdown @@ -0,0 +1,153 @@ +--- +subcategory: "FMS (Firewall Manager)" +layout: "aws" +page_title: "AWS: aws_fms_policy" +description: |- + Provides a resource to create an AWS Firewall Manager policy +--- + + + +# Resource: aws_fms_policy + +Provides a resource to create an AWS Firewall Manager policy. You need to be using AWS organizations and have enabled the Firewall Manager administrator account. + +~> **NOTE:** Due to limitations with testing, we provide it as best effort. If you find it useful, and have the ability to help test or notice issues, consider reaching out to us on [GitHub](https://github.com/hashicorp/terraform-provider-aws). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FmsPolicy } from "./.gen/providers/aws/fms-policy"; +import { WafregionalRuleGroup } from "./.gen/providers/aws/wafregional-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new WafregionalRuleGroup(this, "example", { + metricName: "WAFRuleGroupExample", + name: "WAF-Rule-Group-Example", + }); + const awsFmsPolicyExample = new FmsPolicy(this, "example_1", { + excludeResourceTags: false, + name: "FMS-Policy-Example", + remediationEnabled: false, + resourceType: "AWS::ElasticLoadBalancingV2::LoadBalancer", + securityServicePolicyData: { + managedServiceData: Token.asString( + Fn.jsonencode({ + defaultAction: { + type: "BLOCK", + }, + overrideCustomerWebACLAssociation: false, + ruleGroups: [ + { + id: example.id, + overrideAction: { + type: "COUNT", + }, + }, + ], + type: "WAF", + }) + ), + type: "WAF", + }, + tags: { + Name: "example-fms-policy", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFmsPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required, Forces new resource) The friendly name of the AWS Firewall Manager Policy. +* `deleteAllPolicyResources` - (Optional) If true, the request will also perform a clean-up process. Defaults to `true`. More information can be found here [AWS Firewall Manager delete policy](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_DeletePolicy.html) +* `deleteUnusedFmManagedResources` - (Optional) If true, Firewall Manager will automatically remove protections from resources that leave the policy scope. Defaults to `false`. More information can be found here [AWS Firewall Manager policy contents](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_Policy.html) +* `description` - (Optional) The description of the AWS Network Firewall firewall policy. +* `excludeMap` - (Optional) A map of lists of accounts and OU's to exclude from the policy. +* `excludeResourceTags` - (Required, Forces new resource) A boolean value, if true the tags that are specified in the `resourceTags` are not protected by this policy. If set to false and resource_tags are populated, resources that contain tags will be protected by this policy. +* `includeMap` - (Optional) A map of lists of accounts and OU's to include in the policy. +* `remediationEnabled` - (Required) A boolean value, indicates if the policy should automatically applied to resources that already exist in the account. +* `resourceTags` - (Optional) A map of resource tags, that if present will filter protections on resources based on the exclude_resource_tags. +* `resourceType` - (Optional) A resource type to protect. Conflicts with `resourceTypeList`. See the [FMS API Reference](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_Policy.html#fms-Type-Policy-ResourceType) for more information about supported values. +* `resourceTypeList` - (Optional) A list of resource types to protect. Conflicts with `resourceType`. See the [FMS API Reference](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_Policy.html#fms-Type-Policy-ResourceType) for more information about supported values. Lists with only one element are not supported, instead use `resourceType`. +* `securityServicePolicyData` - (Required) The objects to include in Security Service Policy Data. Documented below. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## `excludeMap` Configuration Block + +* `account` - (Optional) A list of AWS Organization member Accounts that you want to exclude from this AWS FMS Policy. +* `orgunit` - (Optional) A list of IDs of the AWS Organizational Units that you want to exclude from this AWS FMS Policy. Specifying an OU is the equivalent of specifying all accounts in the OU and in any of its child OUs, including any child OUs and accounts that are added at a later time. + +You can specify inclusions or exclusions, but not both. If you specify an `includeMap`, AWS Firewall Manager applies the policy to all accounts specified by the `includeMap`, and does not evaluate any `excludeMap` specifications. If you do not specify an `includeMap`, then Firewall Manager applies the policy to all accounts except for those specified by the `excludeMap`. + +## `includeMap` Configuration Block + +* `account` - (Optional) A list of AWS Organization member Accounts that you want to include for this AWS FMS Policy. +* `orgunit` - (Optional) A list of IDs of the AWS Organizational Units that you want to include for this AWS FMS Policy. Specifying an OU is the equivalent of specifying all accounts in the OU and in any of its child OUs, including any child OUs and accounts that are added at a later time. + +You can specify inclusions or exclusions, but not both. If you specify an `includeMap`, AWS Firewall Manager applies the policy to all accounts specified by the `includeMap`, and does not evaluate any `excludeMap` specifications. If you do not specify an `includeMap`, then Firewall Manager applies the policy to all accounts except for those specified by the `excludeMap`. + +## `securityServicePolicyData` Configuration Block + +* `managedServiceData` - (Optional) Details about the service that are specific to the service type, in JSON format. For service type `shieldAdvanced`, this is an empty string. Examples depending on `type` can be found in the [AWS Firewall Manager SecurityServicePolicyData API Reference](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_SecurityServicePolicyData.html). +* `policyOption` - (Optional) Contains the Network Firewall firewall policy options to configure a centralized deployment model. Documented below. +* `type` - (Required, Forces new resource) The service that the policy is using to protect the resources. For the current list of supported types, please refer to the [AWS Firewall Manager SecurityServicePolicyData API Type Reference](https://docs.aws.amazon.com/fms/2018-01-01/APIReference/API_SecurityServicePolicyData.html#fms-Type-SecurityServicePolicyData-Type). + +## `policyOption` Configuration Block + +* `networkFirewallPolicy` - (Optional) Defines the deployment model to use for the firewall policy. Documented below. +* `thirdpartyFirewallPolicy` - (Optional) Defines the policy options for a third-party firewall policy. Documented below. + +## `networkFirewallPolicy` Configuration Block + +* `firewallDeploymentModel` - (Optional) Defines the deployment model to use for the firewall policy. To use a distributed model, remove the `policyOption` section. Valid values are `centralized` and `distributed`. + +## `thirdpartyFirewallPolicy` Configuration Block + +* `firewallDeploymentModel` - (Optional) Defines the deployment model to use for the third-party firewall policy. Valid values are `centralized` and `distributed`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS account ID of the AWS Firewall Manager administrator account. +* `policyUpdateToken` - A unique identifier for each update to the policy. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Firewall Manager policies using the policy ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Firewall Manager policies using the policy ID. For example: + +```console +% terraform import aws_fms_policy.example 5be49585-a7e3-4c49-dde1-a179fe4a619a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fsx_backup.html.markdown b/website/docs/cdktf/typescript/r/fsx_backup.html.markdown new file mode 100644 index 00000000000..701f05b3136 --- /dev/null +++ b/website/docs/cdktf/typescript/r/fsx_backup.html.markdown @@ -0,0 +1,193 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_backup" +description: |- + Manages a FSx Backup. +--- + + + +# Resource: aws_fsx_backup + +Provides a FSx Backup resource. + +## Example Usage + +## Lustre Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxBackup } from "./.gen/providers/aws/fsx-backup"; +import { FsxLustreFileSystem } from "./.gen/providers/aws/fsx-lustre-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new FsxLustreFileSystem(this, "example", { + deploymentType: "PERSISTENT_1", + perUnitStorageThroughput: 50, + storageCapacity: 1200, + subnetIds: [Token.asString(awsSubnetExample.id)], + }); + const awsFsxBackupExample = new FsxBackup(this, "example_1", { + fileSystemId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFsxBackupExample.overrideLogicalId("example"); + } +} + +``` + +## Windows Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxBackup } from "./.gen/providers/aws/fsx-backup"; +import { FsxWindowsFileSystem } from "./.gen/providers/aws/fsx-windows-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new FsxWindowsFileSystem(this, "example", { + activeDirectoryId: eample.id, + skipFinalBackup: true, + storageCapacity: 32, + subnetIds: [example1.id], + throughputCapacity: 8, + }); + const awsFsxBackupExample = new FsxBackup(this, "example_1", { + fileSystemId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFsxBackupExample.overrideLogicalId("example"); + } +} + +``` + +## ONTAP Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxBackup } from "./.gen/providers/aws/fsx-backup"; +import { FsxOntapVolume } from "./.gen/providers/aws/fsx-ontap-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new FsxOntapVolume(this, "example", { + junctionPath: "/example", + name: "example", + sizeInMegabytes: 1024, + storageEfficiencyEnabled: true, + storageVirtualMachineId: test.id, + }); + const awsFsxBackupExample = new FsxBackup(this, "example_1", { + volumeId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFsxBackupExample.overrideLogicalId("example"); + } +} + +``` + +## OpenZFS Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxBackup } from "./.gen/providers/aws/fsx-backup"; +import { FsxOpenzfsFileSystem } from "./.gen/providers/aws/fsx-openzfs-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new FsxOpenzfsFileSystem(this, "example", { + deploymentType: "SINGLE_AZ_1", + storageCapacity: 64, + subnetIds: [Token.asString(awsSubnetExample.id)], + throughputCapacity: 64, + }); + const awsFsxBackupExample = new FsxBackup(this, "example_1", { + fileSystemId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFsxBackupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +Note - Only file_system_id or volume_id can be specified. file_system_id is used for Lustre and Windows, volume_id is used for ONTAP. + +* `fileSystemId` - (Optional) The ID of the file system to back up. Required if backing up Lustre or Windows file systems. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. If you have set `copyTagsToBackups` to true, and you specify one or more tags, no existing file system tags are copied from the file system to the backup. +* `volumeId` - (Optional) The ID of the volume to back up. Required if backing up a ONTAP Volume. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the backup. +* `id` - Identifier of the backup, e.g., `fs12345678` +* `kmsKeyId` - The ID of the AWS Key Management Service (AWS KMS) key used to encrypt the backup of the Amazon FSx file system's data at rest. +* `ownerId` - AWS account identifier that created the file system. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `type` - The type of the file system backup. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) +* `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx Backups using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import FSx Backups using the `id`. For example: + +```console +% terraform import aws_fsx_backup.example fs-543ab12b1ca672f33 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fsx_data_repository_association.html.markdown b/website/docs/cdktf/typescript/r/fsx_data_repository_association.html.markdown new file mode 100644 index 00000000000..68564f7daee --- /dev/null +++ b/website/docs/cdktf/typescript/r/fsx_data_repository_association.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_data_repository_association" +description: |- + Manages a FSx for Lustre Data Repository Association. +--- + + + +# Resource: aws_fsx_data_repository_association + +Manages a FSx for Lustre Data Repository Association. See [Linking your file system to an S3 bucket](https://docs.aws.amazon.com/fsx/latest/LustreGuide/create-dra-linked-data-repo.html) for more information. + +~> **NOTE:** Data Repository Associations are only compatible with AWS FSx for Lustre File Systems and `persistent2` deployment type. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxDataRepositoryAssociation } from "./.gen/providers/aws/fsx-data-repository-association"; +import { FsxLustreFileSystem } from "./.gen/providers/aws/fsx-lustre-file-system"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new FsxLustreFileSystem(this, "example", { + deploymentType: "PERSISTENT_2", + perUnitStorageThroughput: 125, + storageCapacity: 1200, + subnetIds: [Token.asString(awsSubnetExample.id)], + }); + const awsS3BucketExample = new S3Bucket(this, "example_1", { + bucket: "my-bucket", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketExample.overrideLogicalId("example"); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_2", { + acl: "private", + bucket: Token.asString(awsS3BucketExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + const awsFsxDataRepositoryAssociationExample = + new FsxDataRepositoryAssociation(this, "example_3", { + dataRepositoryPath: "s3://${" + awsS3BucketExample.id + "}", + fileSystemId: example.id, + fileSystemPath: "/my-bucket", + s3: { + autoExportPolicy: { + events: ["NEW", "CHANGED", "DELETED"], + }, + autoImportPolicy: { + events: ["NEW", "CHANGED", "DELETED"], + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFsxDataRepositoryAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `batchImportMetaDataOnCreate` - (Optional) Set to true to run an import data repository task to import metadata from the data repository to the file system after the data repository association is created. Defaults to `false`. +* `dataRepositoryPath` - (Required) The path to the Amazon S3 data repository that will be linked to the file system. The path must be an S3 bucket s3://myBucket/myPrefix/. This path specifies where in the S3 data repository files will be imported from or exported to. The same S3 bucket cannot be linked more than once to the same file system. +* `fileSystemId` - (Required) The ID of the Amazon FSx file system to on which to create a data repository association. +* `fileSystemPath` - (Required) A path on the file system that points to a high-level directory (such as `/ns1/`) or subdirectory (such as `/ns1/subdir/`) that will be mapped 1-1 with `dataRepositoryPath`. The leading forward slash in the name is required. Two data repository associations cannot have overlapping file system paths. For example, if a data repository is associated with file system path `/ns1/`, then you cannot link another data repository with file system path `/ns1/ns2`. This path specifies where in your file system files will be exported from or imported to. This file system directory can be linked to only one Amazon S3 bucket, and no other S3 bucket can be linked to the directory. +* `importedFileChunkSize` - (Optional) For files imported from a data repository, this value determines the stripe count and maximum amount of data per file (in MiB) stored on a single physical disk. The maximum number of disks that a single file can be striped across is limited by the total number of disks that make up the file system. +* `s3` - (Optional) See the [`s3` configuration](#s3-arguments) block. Max of 1. +The configuration for an Amazon S3 data repository linked to an Amazon FSx Lustre file system with a data repository association. The configuration defines which file events (new, changed, or deleted files or directories) are automatically imported from the linked data repository to the file system or automatically exported from the file system to the data repository. +* `deleteDataInFilesystem` - (Optional) Set to true to delete files from the file system upon deleting this data repository association. Defaults to `false`. +* `tags` - (Optional) A map of tags to assign to the data repository association. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +#### S3 arguments + +* `autoExportPolicy` - (Optional) Specifies the type of updated objects that will be automatically exported from your file system to the linked S3 bucket. See the [`events` configuration](#events-arguments) block. +* `autoImportPolicy` - (Optional) Specifies the type of updated objects that will be automatically imported from the linked S3 bucket to your file system. See the [`events` configuration](#events-arguments) block. + +#### Events arguments + +* `events` - (Optional) A list of file event types to automatically export to your linked S3 bucket or import from the linked S3 bucket. Valid values are `new`, `changed`, `deleted`. Max of 3. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `id` - Identifier of the data repository association, e.g., `dra12345678` +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) +* `update` - (Default `10M`) +* `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx Data Repository Associations using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import FSx Data Repository Associations using the `id`. For example: + +```console +% terraform import aws_fsx_data_repository_association.example dra-0b1cfaeca11088b10 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fsx_file_cache.html.markdown b/website/docs/cdktf/typescript/r/fsx_file_cache.html.markdown new file mode 100644 index 00000000000..3e5c9b2a748 --- /dev/null +++ b/website/docs/cdktf/typescript/r/fsx_file_cache.html.markdown @@ -0,0 +1,158 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_file_cache" +description: |- + Terraform resource for managing an Amazon File Cache cache. +--- + + + +# Resource: aws_fsx_file_cache + +Terraform resource for managing an Amazon File Cache cache. +See the [Create File Cache](https://docs.aws.amazon.com/fsx/latest/APIReference/API_CreateFileCache.html) for more information. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxFileCache } from "./.gen/providers/aws/fsx-file-cache"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FsxFileCache(this, "example", { + dataRepositoryAssociation: [ + { + dataRepositoryPath: "nfs://filer.domain.com", + dataRepositorySubdirectories: ["test", "test2"], + fileCachePath: "/ns1", + nfs: [ + { + dnsIps: ["192.168.0.1", "192.168.0.2"], + version: "NFS3", + }, + ], + }, + ], + fileCacheType: "LUSTRE", + fileCacheTypeVersion: "2.12", + lustreConfiguration: [ + { + deploymentType: "CACHE_1", + metadataConfiguration: [ + { + storageCapacity: 2400, + }, + ], + perUnitStorageThroughput: 1000, + weeklyMaintenanceStartTime: "2:05:00", + }, + ], + storageCapacity: 1200, + subnetIds: [test1.id], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `fileCacheType` - The type of cache that you're creating. The only supported value is `lustre`. +* `fileCacheTypeVersion` - The version for the type of cache that you're creating. The only supported value is `212`. +* `storageCapacity` - The storage capacity of the cache in gibibytes (GiB). Valid values are `1200` GiB, `2400` GiB, and increments of `2400` GiB. +* `subnetIds` - A list of subnet IDs that the cache will be accessible from. You can specify only one subnet ID. + +The following arguments are optional: + +* `copyTagsToDataRepositoryAssociations` - A boolean flag indicating whether tags for the cache should be copied to data repository associations. This value defaults to false. +* `dataRepositoryAssociation` - See the [`dataRepositoryAssociation` configuration](#data-repository-association-arguments) block. Max of 8. +A list of up to 8 configurations for data repository associations (DRAs) to be created during the cache creation. The DRAs link the cache to either an Amazon S3 data repository or a Network File System (NFS) data repository that supports the NFSv3 protocol. The DRA configurations must meet the following requirements: 1) All configurations on the list must be of the same data repository type, either all S3 or all NFS. A cache can't link to different data repository types at the same time. 2) An NFS DRA must link to an NFS file system that supports the NFSv3 protocol. DRA automatic import and automatic export is not supported. +* `kmsKeyId` - Specifies the ID of the AWS Key Management Service (AWS KMS) key to use for encrypting data on an Amazon File Cache. If a KmsKeyId isn't specified, the Amazon FSx-managed AWS KMS key for your account is used. +* `lustreConfiguration` - See the [`lustreConfiguration`](#lustre-configuration-arguments) block. Required when `fileCacheType` is `lustre`. +* `securityGroupIds` - A list of IDs specifying the security groups to apply to all network interfaces created for Amazon File Cache access. +* `tags` - (Optional) A map of tags to assign to the file cache. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +#### Data Repository Association arguments + +The `dataRepositoryAssociation` configuration block supports the following arguments: + +* `fileCachePath` - (Required) A path on the cache that points to a high-level directory (such as /ns1/) or subdirectory (such as /ns1/subdir/) that will be mapped 1-1 with DataRepositoryPath. The leading forward slash in the name is required. Two data repository associations cannot have overlapping cache paths. For example, if a data repository is associated with cache path /ns1/, then you cannot link another data repository with cache path /ns1/ns2. This path specifies where in your cache files will be exported from. This cache directory can be linked to only one data repository, and no data repository other can be linked to the directory. Note: The cache path can only be set to root (/) on an NFS DRA when DataRepositorySubdirectories is specified. If you specify root (/) as the cache path, you can create only one DRA on the cache. The cache path cannot be set to root (/) for an S3 DRA. +* `dataRepositoryPath` - (Optional) The path to the S3 or NFS data repository that links to the cache. +* `dataRepositorySubdirectories` - (Optional) A list of NFS Exports that will be linked with this data repository association. The Export paths are in the format /exportpath1. To use this parameter, you must configure DataRepositoryPath as the domain name of the NFS file system. The NFS file system domain name in effect is the root of the subdirectories. Note that DataRepositorySubdirectories is not supported for S3 data repositories. Max of 500. +* `nfs` - (Optional) - (Optional) See the [`nfs` configuration](#nfs-arguments) block. + +#### NFS arguments + +The `nfs` configuration block supports the following arguments: + +* `version` - (Required) - The version of the NFS (Network File System) protocol of the NFS data repository. The only supported value is NFS3, which indicates that the data repository must support the NFSv3 protocol. The only supported value is `nfs3`. +* `dnsIps` - (Optional) - A list of up to 2 IP addresses of DNS servers used to resolve the NFS file system domain name. The provided IP addresses can either be the IP addresses of a DNS forwarder or resolver that the customer manages and runs inside the customer VPC, or the IP addresses of the on-premises DNS servers. + +#### Lustre Configuration arguments + +The `lustreConfiguration` configuration block supports the following arguments: + +* `deploymentType` - (Required) Specifies the cache deployment type. The only supported value is `cache1`. +* `metadataConfiguration` - (Required) The configuration for a Lustre MDT (Metadata Target) storage volume. See the [`metadataConfiguration`](#metadata-configuration-arguments) block. +* `perUnitStorageThroughput` - (Required) Provisions the amount of read and write throughput for each 1 tebibyte (TiB) of cache storage capacity, in MB/s/TiB. The only supported value is `1000`. +* `weeklyMaintenanceStartTime` - (Optional) A recurring weekly time, in the format `d:hh:mm`. `d` is the day of the week, for which `1` represents Monday and `7` represents Sunday. `hh` is the zero-padded hour of the day (0-23), and `mm` is the zero-padded minute of the hour. For example, 1:05:00 specifies maintenance at 5 AM Monday. See the [ISO week date](https://en.wikipedia.org/wiki/ISO_week_date) for more information. + +#### Metadata Configuration arguments + +The `metadataConfiguration` configuration block supports the following arguments: + +* `storageCapacity` - (Required) The storage capacity of the Lustre MDT (Metadata Target) storage volume in gibibytes (GiB). The only supported value is `2400` GiB. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) for the resource. +* `dataRepositoryAssociationIds` - A list of IDs of data repository associations that are associated with this cache. +* `dnsName` - The Domain Name System (DNS) name for the cache. +* `fileCacheId` - The system-generated, unique ID of the cache. +* `id` - The system-generated, unique ID of the cache. +* `networkInterfaceIds` - A list of network interface IDs. +* `vpcId` - The ID of your virtual private cloud (VPC). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon File Cache cache using the resource `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon File Cache cache using the resource `id`. For example: + +```console +% terraform import aws_fsx_file_cache.example fc-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fsx_lustre_file_system.html.markdown b/website/docs/cdktf/typescript/r/fsx_lustre_file_system.html.markdown new file mode 100644 index 00000000000..838863b854e --- /dev/null +++ b/website/docs/cdktf/typescript/r/fsx_lustre_file_system.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_lustre_file_system" +description: |- + Manages a FSx Lustre File System. +--- + + + +# Resource: aws_fsx_lustre_file_system + +Manages a FSx Lustre File System. See the [FSx Lustre Guide](https://docs.aws.amazon.com/fsx/latest/LustreGuide/what-is.html) for more information. + +~> **NOTE:** `autoImportPolicy`, `exportPath`, `importPath` and `importedFileChunkSize` are not supported with the `persistent2` deployment type. Use `awsFsxDataRepositoryAssociation` instead. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxLustreFileSystem } from "./.gen/providers/aws/fsx-lustre-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FsxLustreFileSystem(this, "example", { + importPath: "s3://${" + awsS3BucketExample.bucket + "}", + storageCapacity: 1200, + subnetIds: [Token.asString(awsSubnetExample.id)], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `storageCapacity` - (Optional) The storage capacity (GiB) of the file system. Minimum of `1200`. See more details at [Allowed values for Fsx storage capacity](https://docs.aws.amazon.com/fsx/latest/APIReference/API_CreateFileSystem.html#FSx-CreateFileSystem-request-StorageCapacity). Update is allowed only for `scratch2`, `persistent1` and `persistent2` deployment types, See more details at [Fsx Storage Capacity Update](https://docs.aws.amazon.com/fsx/latest/APIReference/API_UpdateFileSystem.html#FSx-UpdateFileSystem-request-StorageCapacity). Required when not creating filesystem for a backup. +* `subnetIds` - (Required) A list of IDs for the subnets that the file system will be accessible from. File systems currently support only one subnet. The file server is also launched in that subnet's Availability Zone. +* `backupId` - (Optional) The ID of the source backup to create the filesystem from. +* `exportPath` - (Optional) S3 URI (with optional prefix) where the root of your Amazon FSx file system is exported. Can only be specified with `importPath` argument and the path must use the same Amazon S3 bucket as specified in `importPath`. Set equal to `importPath` to overwrite files on export. Defaults to `s3://{IMPORT BUCKET}/FSxLustre{CREATION TIMESTAMP}`. Only supported on `persistent1` deployment types. +* `importPath` - (Optional) S3 URI (with optional prefix) that you're using as the data repository for your FSx for Lustre file system. For example, `s3://exampleBucket/optionalPrefix/`. Only supported on `persistent1` deployment types. +* `importedFileChunkSize` - (Optional) For files imported from a data repository, this value determines the stripe count and maximum amount of data per file (in MiB) stored on a single physical disk. Can only be specified with `importPath` argument. Defaults to `1024`. Minimum of `1` and maximum of `512000`. Only supported on `persistent1` deployment types. +* `securityGroupIds` - (Optional) A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `weeklyMaintenanceStartTime` - (Optional) The preferred start time (in `d:hh:mm` format) to perform weekly maintenance, in the UTC time zone. +* `deploymentType` - (Optional) - The filesystem deployment type. One of: `scratch1`, `scratch2`, `persistent1`, `persistent2`. +* `kmsKeyId` - (Optional) ARN for the KMS Key to encrypt the file system at rest, applicable for `persistent1` and `persistent2` deployment_type. Defaults to an AWS managed KMS Key. +* `perUnitStorageThroughput` - (Optional) - Describes the amount of read and write throughput for each 1 tebibyte of storage, in MB/s/TiB, required for the `persistent1` and `persistent2` deployment_type. Valid values for `persistent1` deployment_type and `ssd` storage_type are 50, 100, 200. Valid values for `persistent1` deployment_type and `hdd` storage_type are 12, 40. Valid values for `persistent2` deployment_type and ` SSD` storage_type are 125, 250, 500, 1000. +* `automaticBackupRetentionDays` - (Optional) The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days. only valid for `persistent1` and `persistent2` deployment_type. +* `storageType` - (Optional) - The filesystem storage type. Either `ssd` or `hdd`, defaults to `ssd`. `hdd` is only supported on `persistent1` deployment types. +* `driveCacheType` - (Optional) - The type of drive cache used by `persistent1` filesystems that are provisioned with `hdd` storage_type. Required for `hdd` storage_type, set to either `read` or `none`. +* `dailyAutomaticBackupStartTime` - (Optional) A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. only valid for `persistent1` and `persistent2` deployment_type. Requires `automaticBackupRetentionDays` to be set. +* `autoImportPolicy` - (Optional) How Amazon FSx keeps your file and directory listings up to date as you add or modify objects in your linked S3 bucket. see [Auto Import Data Repo](https://docs.aws.amazon.com/fsx/latest/LustreGuide/autoimport-data-repo.html) for more details. Only supported on `persistent1` deployment types. +* `copyTagsToBackups` - (Optional) A boolean flag indicating whether tags for the file system should be copied to backups. Applicable for `persistent1` and `persistent2` deployment_type. The default value is false. +* `dataCompressionType` - (Optional) Sets the data compression configuration for the file system. Valid values are `lz4` and `none`. Default value is `none`. Unsetting this value reverts the compression type back to `none`. +* `fileSystemTypeVersion` - (Optional) Sets the Lustre version for the file system that you're creating. Valid values are 2.10 for `scratch1`, `scratch2` and `persistent1` deployment types. Valid values for 2.12 include all deployment types. +* `logConfiguration` - (Optional) The Lustre logging configuration used when creating an Amazon FSx for Lustre file system. When logging is enabled, Lustre logs error and warning events for data repositories associated with your file system to Amazon CloudWatch Logs. +* `rootSquashConfiguration` - (Optional) The Lustre root squash configuration used when creating an Amazon FSx for Lustre file system. When enabled, root squash restricts root-level access from clients that try to access your file system as a root user. + +### log_configuration + +* `destination` - (Optional) The Amazon Resource Name (ARN) that specifies the destination of the logs. The name of the Amazon CloudWatch Logs log group must begin with the `/aws/fsx` prefix. If you do not provide a destination, Amazon FSx will create and use a log stream in the CloudWatch Logs `/aws/fsx/lustre` log group. +* `level` - (Optional) Sets which data repository events are logged by Amazon FSx. Valid values are `warnOnly`, `failureOnly`, `errorOnly`, `warnError` and `disabled`. Default value is `disabled`. + +### root_squash_configuration + +* `noSquashNids` - (Optional) When root squash is enabled, you can optionally specify an array of NIDs of clients for which root squash does not apply. A client NID is a Lustre Network Identifier used to uniquely identify a client. You can specify the NID as either a single address or a range of addresses: 1. A single address is described in standard Lustre NID format by specifying the client’s IP address followed by the Lustre network ID (for example, 10.0.1.6@tcp). 2. An address range is described using a dash to separate the range (for example, 10.0.[2-10].[1-255]@tcp). +* `rootSquash` - (Optional) You enable root squash by setting a user ID (UID) and group ID (GID) for the file system in the format UID:GID (for example, 365534:65534). The UID and GID values can range from 0 to 4294967294. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `dnsName` - DNS name for the file system, e.g., `fs12345678FsxUsWest2AmazonawsCom` +* `id` - Identifier of the file system, e.g., `fs12345678` +* `networkInterfaceIds` - Set of Elastic Network Interface identifiers from which the file system is accessible. As explained in the [documentation](https://docs.aws.amazon.com/fsx/latest/LustreGuide/mounting-on-premises.html), the first network interface returned is the primary network interface. +* `mountName` - The value to be used when mounting the filesystem. +* `ownerId` - AWS account identifier that created the file system. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - Identifier of the Virtual Private Cloud for the file system. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx File Systems using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import FSx File Systems using the `id`. For example: + +```console +% terraform import aws_fsx_lustre_file_system.example fs-543ab12b1ca672f33 +``` + +Certain resource arguments, like `securityGroupIds`, do not have a FSx API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxLustreFileSystem } from "./.gen/providers/aws/fsx-lustre-file-system"; +interface MyConfig { + subnetIds: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new FsxLustreFileSystem(this, "example", { + lifecycle: { + ignoreChanges: [securityGroupIds], + }, + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + subnetIds: config.subnetIds, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fsx_ontap_file_system.html.markdown b/website/docs/cdktf/typescript/r/fsx_ontap_file_system.html.markdown new file mode 100644 index 00000000000..b6feb66ba48 --- /dev/null +++ b/website/docs/cdktf/typescript/r/fsx_ontap_file_system.html.markdown @@ -0,0 +1,156 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_ontap_file_system" +description: |- + Manages an Amazon FSx for NetApp ONTAP file system. +--- + + + +# Resource: aws_fsx_ontap_file_system + +Manages an Amazon FSx for NetApp ONTAP file system. +See the [FSx ONTAP User Guide](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/what-is-fsx-ontap.html) for more information. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOntapFileSystem } from "./.gen/providers/aws/fsx-ontap-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FsxOntapFileSystem(this, "test", { + deploymentType: "MULTI_AZ_1", + preferredSubnetId: test1.id, + storageCapacity: 1024, + subnetIds: [test1.id, test2.id], + throughputCapacity: 512, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `storageCapacity` - (Optional) The storage capacity (GiB) of the file system. Valid values between `1024` and `196608`. +* `subnetIds` - (Required) A list of IDs for the subnets that the file system will be accessible from. Upto 2 subnets can be provided. +* `preferredSubnetId` - (Required) The ID for a subnet. A subnet is a range of IP addresses in your virtual private cloud (VPC). +* `securityGroupIds` - (Optional) A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces. +* `weeklyMaintenanceStartTime` - (Optional) The preferred start time (in `d:hh:mm` format) to perform weekly maintenance, in the UTC time zone. +* `deploymentType` - (Optional) - The filesystem deployment type. Supports `multiAz1` and `singleAz1`. +* `kmsKeyId` - (Optional) ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key. +* `automaticBackupRetentionDays` - (Optional) The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days. +* `dailyAutomaticBackupStartTime` - (Optional) A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automaticBackupRetentionDays` to be set. +* `diskIopsConfiguration` - (Optional) The SSD IOPS configuration for the Amazon FSx for NetApp ONTAP file system. See [Disk Iops Configuration](#disk-iops-configuration) Below. +* `endpointIpAddressRange` - (Optional) Specifies the IP address range in which the endpoints to access your file system will be created. By default, Amazon FSx selects an unused IP address range for you from the 198.19.* range. +* `storageType` - (Optional) - The filesystem storage type. defaults to `ssd`. +* `fsxAdminPassword` - (Optional) The ONTAP administrative password for the fsxadmin user that you can use to administer your file system using the ONTAP CLI and REST API. +* `routeTableIds` - (Optional) Specifies the VPC route tables in which your file system's endpoints will be created. You should specify all VPC route tables associated with the subnets in which your clients are located. By default, Amazon FSx selects your VPC's default route table. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `throughputCapacity` - (Required) Sets the throughput capacity (in MBps) for the file system that you're creating. Valid values are `128`, `256`, `512`, `1024`, `2048`, and `4096`. + +### Disk Iops Configuration + +* `iops` - (Optional) - The total number of SSD IOPS provisioned for the file system. +* `mode` - (Optional) - Specifies whether the number of IOPS for the file system is using the system. Valid values are `automatic` and `userProvisioned`. Default value is `automatic`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `dnsName` - DNS name for the file system, e.g., `fs12345678FsxUsWest2AmazonawsCom` +* `endpoints` - The endpoints that are used to access data or to manage the file system using the NetApp ONTAP CLI, REST API, or NetApp SnapMirror. See [Endpoints](#endpoints) below. +* `id` - Identifier of the file system, e.g., `fs12345678` +* `networkInterfaceIds` - Set of Elastic Network Interface identifiers from which the file system is accessible The first network interface returned is the primary network interface. +* `ownerId` - AWS account identifier that created the file system. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - Identifier of the Virtual Private Cloud for the file system. + +### Endpoints + +* `intercluster` - An endpoint for managing your file system by setting up NetApp SnapMirror with other ONTAP systems. See [Endpoint](#endpoint). +* `management` - An endpoint for managing your file system using the NetApp ONTAP CLI and NetApp ONTAP API. See [Endpoint](#endpoint). + +#### Endpoint + +* `dnsName` - The Domain Name Service (DNS) name for the file system. You can mount your file system using its DNS name. +* `ipAddresses` - IP addresses of the file system endpoint. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) +* `update` - (Default `60M`) +* `delete` - (Default `60M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx File Systems using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import FSx File Systems using the `id`. For example: + +```console +% terraform import aws_fsx_ontap_file_system.example fs-543ab12b1ca672f33 +``` + +Certain resource arguments, like `securityGroupIds`, do not have a FSx API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOntapFileSystem } from "./.gen/providers/aws/fsx-ontap-file-system"; +interface MyConfig { + deploymentType: any; + preferredSubnetId: any; + subnetIds: any; + throughputCapacity: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new FsxOntapFileSystem(this, "example", { + lifecycle: { + ignoreChanges: [securityGroupIds], + }, + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + deploymentType: config.deploymentType, + preferredSubnetId: config.preferredSubnetId, + subnetIds: config.subnetIds, + throughputCapacity: config.throughputCapacity, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fsx_ontap_storage_virtual_machine.html.markdown b/website/docs/cdktf/typescript/r/fsx_ontap_storage_virtual_machine.html.markdown new file mode 100644 index 00000000000..5e32ba97718 --- /dev/null +++ b/website/docs/cdktf/typescript/r/fsx_ontap_storage_virtual_machine.html.markdown @@ -0,0 +1,187 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_ontap_storage_virtual_machine" +description: |- + Manages a FSx Storage Virtual Machine. +--- + + + +# Resource: aws_fsx_ontap_storage_virtual_machine + +Manages a FSx Storage Virtual Machine. +See the [FSx ONTAP User Guide](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/managing-svms.html) for more information. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOntapStorageVirtualMachine } from "./.gen/providers/aws/fsx-ontap-storage-virtual-machine"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FsxOntapStorageVirtualMachine(this, "test", { + fileSystemId: Token.asString(awsFsxOntapFileSystemTest.id), + name: "test", + }); + } +} + +``` + +### Using a Self-Managed Microsoft Active Directory + +Additional information for using AWS Directory Service with ONTAP File Systems can be found in the [FSx ONTAP Guide](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/self-managed-AD.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOntapStorageVirtualMachine } from "./.gen/providers/aws/fsx-ontap-storage-virtual-machine"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FsxOntapStorageVirtualMachine(this, "test", { + activeDirectoryConfiguration: { + netbiosName: "mysvm", + selfManagedActiveDirectoryConfiguration: { + dnsIps: ["10.0.0.111", "10.0.0.222"], + domainName: "corp.example.com", + password: "avoid-plaintext-passwords", + username: "Admin", + }, + }, + fileSystemId: Token.asString(awsFsxOntapFileSystemTest.id), + name: "mysvm", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `activeDirectoryConfiguration` - (Optional) Configuration block that Amazon FSx uses to join the FSx ONTAP Storage Virtual Machine(SVM) to your Microsoft Active Directory (AD) directory. Detailed below. +* `fileSystemId` - (Required) The ID of the Amazon FSx ONTAP File System that this SVM will be created on. +* `name` - (Required) The name of the SVM. You can use a maximum of 47 alphanumeric characters, plus the underscore (_) special character. +* `rootVolumeSecurityStyle` - (Optional) Specifies the root volume security style, Valid values are `unix`, `ntfs`, and `mixed`. All volumes created under this SVM will inherit the root security style unless the security style is specified on the volume. Default value is `unix`. +* `tags` - (Optional) A map of tags to assign to the storage virtual machine. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### active_directory_configuration + +The `activeDirectoryConfiguration` configuration block supports the following arguments: + +* `netbiosName` - (Required) The NetBIOS name of the Active Directory computer object that will be created for your SVM. This is often the same as the SVM name but can be different. AWS limits to 15 characters because of standard NetBIOS naming limits. +* `selfManagedActiveDirectory` - (Optional) Configuration block that Amazon FSx uses to join the SVM to your self-managed (including on-premises) Microsoft Active Directory (AD) directory. + +### self_managed_active_directory + +The `selfManagedActiveDirectory` configuration block supports the following arguments: + +* `dnsIps` - (Required) A list of up to three IP addresses of DNS servers or domain controllers in the self-managed AD directory. +* `domainName` - (Required) The fully qualified domain name of the self-managed AD directory. For example, `corpExampleCom`. +* `password` - (Required) The password for the service account on your self-managed AD domain that Amazon FSx will use to join to your AD domain. +* `username` - (Required) The user name for the service account on your self-managed AD domain that Amazon FSx will use to join to your AD domain. +* `fileSystemAdministratorsGroup` - (Optional) The name of the domain group whose members are granted administrative privileges for the SVM. The group that you specify must already exist in your domain. Defaults to `Domain Admins`. +* `organizationalUnitDistinguishedName` - (Optional) The fully qualified distinguished name of the organizational unit within your self-managed AD directory that the Windows File Server instance will join. For example, `ou=fSx,dc=yourdomain,dc=corp,dc=com`. Only accepts OU as the direct parent of the SVM. If none is provided, the SVM is created in the default location of your self-managed AD directory. To learn more, see [RFC 2253](https://tools.ietf.org/html/rfc2253). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the storage virtual machine. +* `endpoints` - The endpoints that are used to access data or to manage the storage virtual machine using the NetApp ONTAP CLI, REST API, or NetApp SnapMirror. See [Endpoints](#endpoints) below. +* `id` - Identifier of the storage virtual machine, e.g., `svm12345678` +* `subtype` - Describes the SVM's subtype, e.g. `default` +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uuid` - The SVM's UUID (universally unique identifier). + +### Endpoints + +* `iscsi` - An endpoint for accessing data on your storage virtual machine via iSCSI protocol. See [Endpoint](#endpoint). +* `management` - An endpoint for managing your file system using the NetApp ONTAP CLI and NetApp ONTAP API. See [Endpoint](#endpoint). +* `nfs` - An endpoint for accessing data on your storage virtual machine via NFS protocol. See [Endpoint](#endpoint). +* `smb` - An endpoint for accessing data on your storage virtual machine via SMB protocol. This is only set if an active_directory_configuration has been set. See [Endpoint](#endpoint). + +#### Endpoint + +* `dnsName` - The Domain Name Service (DNS) name for the storage virtual machine. You can mount your storage virtual machine using its DNS name. +* `ipAddresses` - IP addresses of the storage virtual machine endpoint. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `delete` - (Default `30M`) +* `update` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx Storage Virtual Machine using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import FSx Storage Virtual Machine using the `id`. For example: + +```console +% terraform import aws_fsx_ontap_storage_virtual_machine.example svm-12345678abcdef123 +``` + +Certain resource arguments, like `svmAdminPassword` and the `selfManagedActiveDirectory` configuation block `password`, do not have a FSx API method for reading the information after creation. If these arguments are set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOntapStorageVirtualMachine } from "./.gen/providers/aws/fsx-ontap-storage-virtual-machine"; +interface MyConfig { + fileSystemId: any; + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new FsxOntapStorageVirtualMachine(this, "example", { + lifecycle: { + ignoreChanges: [svmAdminPassword], + }, + svmAdminPassword: "avoid-plaintext-passwords", + fileSystemId: config.fileSystemId, + name: config.name, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fsx_ontap_volume.html.markdown b/website/docs/cdktf/typescript/r/fsx_ontap_volume.html.markdown new file mode 100644 index 00000000000..30836ae8eb5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/fsx_ontap_volume.html.markdown @@ -0,0 +1,143 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_ontap_volume" +description: |- + Manages a FSx ONTAP Volume. +--- + + + +# Resource: aws_fsx_ontap_volume + +Manages a FSx ONTAP Volume. +See the [FSx ONTAP User Guide](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/managing-volumes.html) for more information. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOntapVolume } from "./.gen/providers/aws/fsx-ontap-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FsxOntapVolume(this, "test", { + junctionPath: "/test", + name: "test", + sizeInMegabytes: 1024, + storageEfficiencyEnabled: true, + storageVirtualMachineId: Token.asString( + awsFsxOntapStorageVirtualMachineTest.id + ), + }); + } +} + +``` + +### Using Tiering Policy + +Additional information on tiering policy with ONTAP Volumes can be found in the [FSx ONTAP Guide](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/managing-volumes.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOntapVolume } from "./.gen/providers/aws/fsx-ontap-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FsxOntapVolume(this, "test", { + junctionPath: "/test", + name: "test", + sizeInMegabytes: 1024, + storageEfficiencyEnabled: true, + storageVirtualMachineId: Token.asString( + awsFsxOntapStorageVirtualMachineTest.id + ), + tieringPolicy: { + coolingPeriod: 31, + name: "AUTO", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Volume. You can use a maximum of 203 alphanumeric characters, plus the underscore (_) special character. +* `junctionPath` - (Optional) Specifies the location in the storage virtual machine's namespace where the volume is mounted. The junction_path must have a leading forward slash, such as `/vol3` +* `ontapVolumeType` - (Optional) Specifies the type of volume, valid values are `rw`, `dp`. Default value is `rw`. These can be set by the ONTAP CLI or API. This setting is used as part of migration and replication [Migrating to Amazon FSx for NetApp ONTAP](https://docs.aws.amazon.com/fsx/latest/ONTAPGuide/migrating-fsx-ontap.html) +* `securityStyle` - (Optional) Specifies the volume security style, Valid values are `unix`, `ntfs`, and `mixed`. +* `sizeInMegabytes` - (Required) Specifies the size of the volume, in megabytes (MB), that you are creating. +* `skipFinalBackup` - (Optional) When enabled, will skip the default final backup taken when the volume is deleted. This configuration must be applied separately before attempting to delete the resource to have the desired behavior. Defaults to `false`. +* `storageEfficiencyEnabled` - (Optional) Set to true to enable deduplication, compression, and compaction storage efficiency features on the volume. +* `storageVirtualMachineId` - (Required) Specifies the storage virtual machine in which to create the volume. +* `tags` - (Optional) A map of tags to assign to the volume. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### tiering_policy + +The `tieringPolicy` configuration block supports the following arguments: + +* `name` - (Required) Specifies the tiering policy for the ONTAP volume for moving data to the capacity pool storage. Valid values are `snapshotOnly`, `auto`, `all`, `none`. Default value is `snapshotOnly`. +* `coolingPeriod` - (Optional) Specifies the number of days that user data in a volume must remain inactive before it is considered "cold" and moved to the capacity pool. Used with `auto` and `snapshotOnly` tiering policies only. Valid values are whole numbers between 2 and 183. Default values are 31 days for `auto` and 2 days for `snapshotOnly`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the volune. +* `id` - Identifier of the volume, e.g., `fsvol12345678` +* `fileSystemId` - Describes the file system for the volume, e.g. `fs12345679` +* `flexcacheEndpointType` - Specifies the FlexCache endpoint type of the volume, Valid values are `none`, `origin`, `cache`. Default value is `none`. These can be set by the ONTAP CLI or API and are use with FlexCache feature. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uuid` - The Volume's UUID (universally unique identifier). +* `volumeType` - The type of volume, currently the only valid value is `ontap`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `delete` - (Default `30M`) +* `update` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx ONTAP volume using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import FSx ONTAP volume using the `id`. For example: + +```console +% terraform import aws_fsx_ontap_volume.example fsvol-12345678abcdef123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fsx_openzfs_file_system.html.markdown b/website/docs/cdktf/typescript/r/fsx_openzfs_file_system.html.markdown new file mode 100644 index 00000000000..245c0746d37 --- /dev/null +++ b/website/docs/cdktf/typescript/r/fsx_openzfs_file_system.html.markdown @@ -0,0 +1,167 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_openzfs_file_system" +description: |- + Manages an Amazon FSx for OpenZFS file system. +--- + + + +# Resource: aws_fsx_openzfs_file_system + +Manages an Amazon FSx for OpenZFS file system. +See the [FSx OpenZFS User Guide](https://docs.aws.amazon.com/fsx/latest/OpenZFSGuide/what-is-fsx.html) for more information. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOpenzfsFileSystem } from "./.gen/providers/aws/fsx-openzfs-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FsxOpenzfsFileSystem(this, "test", { + deploymentType: "SINGLE_AZ_1", + storageCapacity: 64, + subnetIds: [test1.id], + throughputCapacity: 64, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deploymentType` - (Required) - The filesystem deployment type. Valid values: `singleAz1` and `singleAz2`. +* `storageCapacity` - (Required) The storage capacity (GiB) of the file system. Valid values between `64` and `524288`. +* `subnetIds` - (Required) A list of IDs for the subnets that the file system will be accessible from. Exactly 1 subnet need to be provided. +* `throughputCapacity` - (Required) Throughput (MB/s) of the file system. Valid values depend on `deploymentType`. Must be one of `64`, `128`, `256`, `512`, `1024`, `2048`, `3072`, `4096` for `singleAz1`. Must be one of `160`, `320`, `640`, `1280`, `2560`, `3840`, `5120`, `7680`, `10240` for `singleAz2`. +* `automaticBackupRetentionDays` - (Optional) The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days. +* `backupId` - (Optional) The ID of the source backup to create the filesystem from. +* `copyTagsToBackups` - (Optional) A boolean flag indicating whether tags for the file system should be copied to backups. The default value is false. +* `copyTagsToVolumes` - (Optional) A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false. +* `dailyAutomaticBackupStartTime` - (Optional) A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automaticBackupRetentionDays` to be set. +* `diskIopsConfiguration` - (Optional) The SSD IOPS configuration for the Amazon FSx for OpenZFS file system. See [Disk Iops Configuration](#disk-iops-configuration) Below. +* `kmsKeyId` - (Optional) ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key. +* `rootVolumeConfiguration` - (Optional) The configuration for the root volume of the file system. All other volumes are children or the root volume. See [Root Volume Configuration](#root-volume-configuration) Below. +* `securityGroupIds` - (Optional) A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces. +* `storageType` - (Optional) The filesystem storage type. Only `ssd` is supported. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `weeklyMaintenanceStartTime` - (Optional) The preferred start time (in `d:hh:mm` format) to perform weekly maintenance, in the UTC time zone. + +### Disk Iops Configuration + +* `iops` - (Optional) - The total number of SSD IOPS provisioned for the file system. +* `mode` - (Optional) - Specifies whether the number of IOPS for the file system is using the system. Valid values are `automatic` and `userProvisioned`. Default value is `automatic`. + +### Root Volume Configuration + +* `copyTagsToSnapshots` - (Optional) - A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false. +* `dataCompressionType` - (Optional) - Method used to compress the data on the volume. Valid values are `lz4`, `none` or `zstd`. Child volumes that don't specify compression option will inherit from parent volume. This option on file system applies to the root volume. +* `nfsExports` - (Optional) - NFS export configuration for the root volume. Exactly 1 item. See [NFS Exports](#nfs-exports) Below. +* `readOnly` - (Optional) - specifies whether the volume is read-only. Default is false. +* `recordSizeKib` - (Optional) - Specifies the record size of an OpenZFS root volume, in kibibytes (KiB). Valid values are `4`, `8`, `16`, `32`, `64`, `128`, `256`, `512`, or `1024` KiB. The default is `128` KiB. +* `userAndGroupQuotas` - (Optional) - Specify how much storage users or groups can use on the volume. Maximum of 100 items. See [User and Group Quotas](#user-and-group-quotas) Below. + +### NFS Exports + +* `clientConfigurations` - (Required) - A list of configuration objects that contain the client and options for mounting the OpenZFS file system. Maximum of 25 items. See [Client Configurations](#client configurations) Below. + +### Client Configurations + +* `clients` - (Required) - A value that specifies who can mount the file system. You can provide a wildcard character (*), an IP address (0.0.0.0), or a CIDR address (192.0.2.0/24. By default, Amazon FSx uses the wildcard character when specifying the client. +* `options` - (Required) - The options to use when mounting the file system. Maximum of 20 items. See the [Linix NFS exports man page](https://linux.die.net/man/5/exports) for more information. `crossmount` and `sync` are used by default. + +### User and Group Quotas + +* `id` - (Required) - The ID of the user or group. Valid values between `0` and `2147483647` +* `storageCapacityQuotaGib` - (Required) - The amount of storage that the user or group can use in gibibytes (GiB). Valid values between `0` and `2147483647` +* `type` - (Required) - A value that specifies whether the quota applies to a user or group. Valid values are `user` or `group`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `dnsName` - DNS name for the file system, e.g., `fs12345678FsxUsWest2AmazonawsCom` +* `id` - Identifier of the file system, e.g., `fs12345678` +* `networkInterfaceIds` - Set of Elastic Network Interface identifiers from which the file system is accessible The first network interface returned is the primary network interface. +* `rootVolumeId` - Identifier of the root volume, e.g., `fsvol12345678` +* `ownerId` - AWS account identifier that created the file system. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - Identifier of the Virtual Private Cloud for the file system. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) +* `update` - (Default `60M`) +* `delete` - (Default `60M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx File Systems using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import FSx File Systems using the `id`. For example: + +```console +% terraform import aws_fsx_openzfs_file_system.example fs-543ab12b1ca672f33 +``` + +Certain resource arguments, like `securityGroupIds`, do not have a FSx API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOpenzfsFileSystem } from "./.gen/providers/aws/fsx-openzfs-file-system"; +interface MyConfig { + deploymentType: any; + subnetIds: any; + throughputCapacity: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new FsxOpenzfsFileSystem(this, "example", { + lifecycle: { + ignoreChanges: [securityGroupIds], + }, + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + deploymentType: config.deploymentType, + subnetIds: config.subnetIds, + throughputCapacity: config.throughputCapacity, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fsx_openzfs_snapshot.html.markdown b/website/docs/cdktf/typescript/r/fsx_openzfs_snapshot.html.markdown new file mode 100644 index 00000000000..2018cb18e60 --- /dev/null +++ b/website/docs/cdktf/typescript/r/fsx_openzfs_snapshot.html.markdown @@ -0,0 +1,143 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_openzfs_snapshot" +description: |- + Manages an Amazon FSx for OpenZFS snapshot. +--- + + + +# Resource: aws_fsx_openzfs_snapshot + +Manages an Amazon FSx for OpenZFS volume. +See the [FSx OpenZFS User Guide](https://docs.aws.amazon.com/fsx/latest/OpenZFSGuide/what-is-fsx.html) for more information. + +## Example Usage + +### Root volume Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOpenzfsFileSystem } from "./.gen/providers/aws/fsx-openzfs-file-system"; +import { FsxOpenzfsSnapshot } from "./.gen/providers/aws/fsx-openzfs-snapshot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new FsxOpenzfsFileSystem(this, "example", { + deploymentType: "SINGLE_AZ_1", + storageCapacity: 64, + subnetIds: [Token.asString(awsSubnetExample.id)], + throughputCapacity: 64, + }); + const awsFsxOpenzfsSnapshotExample = new FsxOpenzfsSnapshot( + this, + "example_1", + { + name: "example", + volumeId: example.rootVolumeId, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFsxOpenzfsSnapshotExample.overrideLogicalId("example"); + } +} + +``` + +### Child volume Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOpenzfsFileSystem } from "./.gen/providers/aws/fsx-openzfs-file-system"; +import { FsxOpenzfsSnapshot } from "./.gen/providers/aws/fsx-openzfs-snapshot"; +import { FsxOpenzfsVolume } from "./.gen/providers/aws/fsx-openzfs-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new FsxOpenzfsFileSystem(this, "example", { + deploymentType: "SINGLE_AZ_1", + storageCapacity: 64, + subnetIds: [Token.asString(awsSubnetExample.id)], + throughputCapacity: 64, + }); + const awsFsxOpenzfsVolumeExample = new FsxOpenzfsVolume(this, "example_1", { + name: "example", + parentVolumeId: example.rootVolumeId, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFsxOpenzfsVolumeExample.overrideLogicalId("example"); + const awsFsxOpenzfsSnapshotExample = new FsxOpenzfsSnapshot( + this, + "example_2", + { + name: "example", + volumeId: Token.asString(awsFsxOpenzfsVolumeExample.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsFsxOpenzfsSnapshotExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Snapshot. You can use a maximum of 203 alphanumeric characters plus either _ or - or : or . for the name. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. If you have set `copyTagsToBackups` to true, and you specify one or more tags, no existing file system tags are copied from the file system to the backup. +* `volumeId` - (Optional) The ID of the volume to snapshot. This can be the root volume or a child volume. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the snapshot. +* `id` - Identifier of the snapshot, e.g., `fsvolsnap12345678` +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `delete` - (Default `30M`) +* `update` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx OpenZFS snapshot using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import FSx OpenZFS snapshot using the `id`. For example: + +```console +% terraform import aws_fsx_openzfs_snapshot.example fs-543ab12b1ca672f33 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fsx_openzfs_volume.html.markdown b/website/docs/cdktf/typescript/r/fsx_openzfs_volume.html.markdown new file mode 100644 index 00000000000..1ae7500cc43 --- /dev/null +++ b/website/docs/cdktf/typescript/r/fsx_openzfs_volume.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_openzfs_volume" +description: |- + Manages an Amazon FSx for OpenZFS volume. +--- + + + +# Resource: aws_fsx_openzfs_volume + +Manages an Amazon FSx for OpenZFS volume. +See the [FSx OpenZFS User Guide](https://docs.aws.amazon.com/fsx/latest/OpenZFSGuide/what-is-fsx.html) for more information. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxOpenzfsVolume } from "./.gen/providers/aws/fsx-openzfs-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FsxOpenzfsVolume(this, "test", { + name: "testvolume", + parentVolumeId: Token.asString(awsFsxOpenzfsFileSystemTest.rootVolumeId), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Volume. You can use a maximum of 203 alphanumeric characters, plus the underscore (_) special character. +* `parentVolumeId` - (Required) The volume id of volume that will be the parent volume for the volume being created, this could be the root volume created from the `awsFsxOpenzfsFileSystem` resource with the `rootVolumeId` or the `id` property of another `awsFsxOpenzfsVolume`. +* `originSnapshot` - (Optional) The ARN of the source snapshot to create the volume from. +* `copyTagsToSnapshots` - (Optional) A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false. +* `dataCompressionType` - (Optional) Method used to compress the data on the volume. Valid values are `none` or `zstd`. Child volumes that don't specify compression option will inherit from parent volume. This option on file system applies to the root volume. +* `nfsExports` - (Optional) NFS export configuration for the root volume. Exactly 1 item. See [NFS Exports](#nfs-exports) Below. +* `readOnly` - (Optional) specifies whether the volume is read-only. Default is false. +* `recordSizeKib` - (Optional) The record size of an OpenZFS volume, in kibibytes (KiB). Valid values are `4`, `8`, `16`, `32`, `64`, `128`, `256`, `512`, or `1024` KiB. The default is `128` KiB. +* `storageCapacityQuotaGib` - (Optional) The maximum amount of storage in gibibytes (GiB) that the volume can use from its parent. +* `storageCapacityReservationGib` - (Optional) The amount of storage in gibibytes (GiB) to reserve from the parent volume. +* `userAndGroupQuotas` - (Optional) - Specify how much storage users or groups can use on the volume. Maximum of 100 items. See [User and Group Quotas](#user-and-group-quotas) Below. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### NFS Exports + +* `clientConfigurations` - (Required) - A list of configuration objects that contain the client and options for mounting the OpenZFS file system. Maximum of 25 items. See [Client Configurations](#client configurations) Below. + +### Client Configurations + +* `clients` - (Required) - A value that specifies who can mount the file system. You can provide a wildcard character (*), an IP address (0.0.0.0), or a CIDR address (192.0.2.0/24. By default, Amazon FSx uses the wildcard character when specifying the client. +* `options` - (Required) - The options to use when mounting the file system. Maximum of 20 items. See the [Linix NFS exports man page](https://linux.die.net/man/5/exports) for more information. `crossmount` and `sync` are used by default. + +### User and Group Quotas + +* `id` - (Required) - The ID of the user or group. Valid values between `0` and `2147483647` +* `storageCapacityQuotaGib` - (Required) - The amount of storage that the user or group can use in gibibytes (GiB). Valid values between `0` and `2147483647` +* `type` - (Required) - A value that specifies whether the quota applies to a user or group. Valid values are `user` or `group`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `id` - Identifier of the file system, e.g., `fsvol12345678` +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx Volumes using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import FSx Volumes using the `id`. For example: + +```console +% terraform import aws_fsx_openzfs_volume.example fsvol-543ab12b1ca672f33 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/fsx_windows_file_system.html.markdown b/website/docs/cdktf/typescript/r/fsx_windows_file_system.html.markdown new file mode 100644 index 00000000000..7d779d4d20d --- /dev/null +++ b/website/docs/cdktf/typescript/r/fsx_windows_file_system.html.markdown @@ -0,0 +1,199 @@ +--- +subcategory: "FSx" +layout: "aws" +page_title: "AWS: aws_fsx_windows_file_system" +description: |- + Manages a FSx Windows File System. +--- + + + +# Resource: aws_fsx_windows_file_system + +Manages a FSx Windows File System. See the [FSx Windows Guide](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/what-is.html) for more information. + +~> **NOTE:** Either the `activeDirectoryId` argument or `selfManagedActiveDirectory` configuration block must be specified. + +## Example Usage + +### Using AWS Directory Service + +Additional information for using AWS Directory Service with Windows File Systems can be found in the [FSx Windows Guide](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/fsx-aws-managed-ad.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxWindowsFileSystem } from "./.gen/providers/aws/fsx-windows-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FsxWindowsFileSystem(this, "example", { + activeDirectoryId: Token.asString(awsDirectoryServiceDirectoryExample.id), + kmsKeyId: Token.asString(awsKmsKeyExample.arn), + storageCapacity: 300, + subnetIds: [Token.asString(awsSubnetExample.id)], + throughputCapacity: 1024, + }); + } +} + +``` + +### Using a Self-Managed Microsoft Active Directory + +Additional information for using AWS Directory Service with Windows File Systems can be found in the [FSx Windows Guide](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/self-managed-AD.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxWindowsFileSystem } from "./.gen/providers/aws/fsx-windows-file-system"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new FsxWindowsFileSystem(this, "example", { + kmsKeyId: Token.asString(awsKmsKeyExample.arn), + selfManagedActiveDirectory: { + dnsIps: ["10.0.0.111", "10.0.0.222"], + domainName: "corp.example.com", + password: "avoid-plaintext-passwords", + username: "Admin", + }, + storageCapacity: 300, + subnetIds: [Token.asString(awsSubnetExample.id)], + throughputCapacity: 1024, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `subnetIds` - (Required) A list of IDs for the subnets that the file system will be accessible from. To specify more than a single subnet set `deploymentType` to `multiAz1`. +* `throughputCapacity` - (Required) Throughput (megabytes per second) of the file system in power of 2 increments. Minimum of `8` and maximum of `2048`. + +The following arguments are optional: + +* `activeDirectoryId` - (Optional) The ID for an existing Microsoft Active Directory instance that the file system should join when it's created. Cannot be specified with `selfManagedActiveDirectory`. +* `aliases` - (Optional) An array DNS alias names that you want to associate with the Amazon FSx file system. For more information, see [Working with DNS Aliases](https://docs.aws.amazon.com/fsx/latest/WindowsGuide/managing-dns-aliases.html) +* `auditLogConfiguration` - (Optional) The configuration that Amazon FSx for Windows File Server uses to audit and log user accesses of files, folders, and file shares on the Amazon FSx for Windows File Server file system. See below. +* `automaticBackupRetentionDays` - (Optional) The number of days to retain automatic backups. Minimum of `0` and maximum of `90`. Defaults to `7`. Set to `0` to disable. +* `backupId` - (Optional) The ID of the source backup to create the filesystem from. +* `copyTagsToBackups` - (Optional) A boolean flag indicating whether tags on the file system should be copied to backups. Defaults to `false`. +* `dailyAutomaticBackupStartTime` - (Optional) The preferred time (in `hh:mm` format) to take daily automatic backups, in the UTC time zone. +* `deploymentType` - (Optional) Specifies the file system deployment type, valid values are `multiAz1`, `singleAz1` and `singleAz2`. Default value is `singleAz1`. +* `kmsKeyId` - (Optional) ARN for the KMS Key to encrypt the file system at rest. Defaults to an AWS managed KMS Key. +* `preferredSubnetId` - (Optional) Specifies the subnet in which you want the preferred file server to be located. Required for when deployment type is `multiAz1`. +* `securityGroupIds` - (Optional) A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces. +* `selfManagedActiveDirectory` - (Optional) Configuration block that Amazon FSx uses to join the Windows File Server instance to your self-managed (including on-premises) Microsoft Active Directory (AD) directory. Cannot be specified with `activeDirectoryId`. Detailed below. +* `skipFinalBackup` - (Optional) When enabled, will skip the default final backup taken when the file system is deleted. This configuration must be applied separately before attempting to delete the resource to have the desired behavior. Defaults to `false`. +* `tags` - (Optional) A map of tags to assign to the file system. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `storageCapacity` - (Optional) Storage capacity (GiB) of the file system. Minimum of 32 and maximum of 65536. If the storage type is set to `hdd` the minimum value is 2000. Required when not creating filesystem for a backup. +* `storageType` - (Optional) Specifies the storage type, Valid values are `ssd` and `hdd`. `hdd` is supported on `singleAz2` and `multiAz1` Windows file system deployment types. Default value is `ssd`. +* `weeklyMaintenanceStartTime` - (Optional) The preferred start time (in `d:hh:mm` format) to perform weekly maintenance, in the UTC time zone. + +### self_managed_active_directory + +The `selfManagedActiveDirectory` configuration block supports the following arguments: + +* `dnsIps` - (Required) A list of up to two IP addresses of DNS servers or domain controllers in the self-managed AD directory. The IP addresses need to be either in the same VPC CIDR range as the file system or in the private IP version 4 (IPv4) address ranges as specified in [RFC 1918](https://tools.ietf.org/html/rfc1918). +* `domainName` - (Required) The fully qualified domain name of the self-managed AD directory. For example, `corpExampleCom`. +* `password` - (Required) The password for the service account on your self-managed AD domain that Amazon FSx will use to join to your AD domain. +* `username` - (Required) The user name for the service account on your self-managed AD domain that Amazon FSx will use to join to your AD domain. +* `fileSystemAdministratorsGroup` - (Optional) The name of the domain group whose members are granted administrative privileges for the file system. Administrative privileges include taking ownership of files and folders, and setting audit controls (audit ACLs) on files and folders. The group that you specify must already exist in your domain. Defaults to `Domain Admins`. +* `organizationalUnitDistinguishedName` - (Optional) The fully qualified distinguished name of the organizational unit within your self-managed AD directory that the Windows File Server instance will join. For example, `ou=fSx,dc=yourdomain,dc=corp,dc=com`. Only accepts OU as the direct parent of the file system. If none is provided, the FSx file system is created in the default location of your self-managed AD directory. To learn more, see [RFC 2253](https://tools.ietf.org/html/rfc2253). + +### audit_log_configuration + +* `auditLogDestination` - (Optional) The Amazon Resource Name (ARN) for the destination of the audit logs. The destination can be any Amazon CloudWatch Logs log group ARN or Amazon Kinesis Data Firehose delivery stream ARN. Can be specified when `fileAccessAuditLogLevel` and `fileShareAccessAuditLogLevel` are not set to `disabled`. The name of the Amazon CloudWatch Logs log group must begin with the `/aws/fsx` prefix. The name of the Amazon Kinesis Data Firehouse delivery stream must begin with the `awsFsx` prefix. If you do not provide a destination in `auditLogDestionation`, Amazon FSx will create and use a log stream in the CloudWatch Logs /aws/fsx/windows log group. +* `fileAccessAuditLogLevel` - (Optional) Sets which attempt type is logged by Amazon FSx for file and folder accesses. Valid values are `successOnly`, `failureOnly`, `successAndFailure`, and `disabled`. Default value is `disabled`. +* `fileShareAccessAuditLogLevel` - (Optional) Sets which attempt type is logged by Amazon FSx for file share accesses. Valid values are `successOnly`, `failureOnly`, `successAndFailure`, and `disabled`. Default value is `disabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name of the file system. +* `dnsName` - DNS name for the file system, e.g., `fs12345678CorpExampleCom` (domain name matching the Active Directory domain name) +* `id` - Identifier of the file system (e.g. `fs12345678`). +* `networkInterfaceIds` - Set of Elastic Network Interface identifiers from which the file system is accessible. +* `ownerId` - AWS account identifier that created the file system. +* `preferredFileServerIp` - The IP address of the primary, or preferred, file server. +* `remoteAdministrationEndpoint` - For `multiAz1` deployment types, use this endpoint when performing administrative tasks on the file system using Amazon FSx Remote PowerShell. For `singleAz1` deployment types, this is the DNS name of the file system. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcId` - Identifier of the Virtual Private Cloud for the file system. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `45M`) +* `delete` - (Default `30M`) +* `update` - (Default `45M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import FSx File Systems using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import FSx File Systems using the `id`. For example: + +```console +% terraform import aws_fsx_windows_file_system.example fs-543ab12b1ca672f33 +``` + +Certain resource arguments, like `securityGroupIds` and the `selfManagedActiveDirectory` configuation block `password`, do not have a FSx API method for reading the information after creation. If these arguments are set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { FsxWindowsFileSystem } from "./.gen/providers/aws/fsx-windows-file-system"; +interface MyConfig { + subnetIds: any; + throughputCapacity: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new FsxWindowsFileSystem(this, "example", { + lifecycle: { + ignoreChanges: [securityGroupIds], + }, + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + subnetIds: config.subnetIds, + throughputCapacity: config.throughputCapacity, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/gamelift_alias.html.markdown b/website/docs/cdktf/typescript/r/gamelift_alias.html.markdown new file mode 100644 index 00000000000..c19386d779c --- /dev/null +++ b/website/docs/cdktf/typescript/r/gamelift_alias.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_alias" +description: |- + Provides a GameLift Alias resource. +--- + + + +# Resource: aws_gamelift_alias + +Provides a GameLift Alias resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GameliftAlias } from "./.gen/providers/aws/gamelift-alias"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GameliftAlias(this, "example", { + description: "Example Description", + name: "example-alias", + routingStrategy: { + message: "Example Message", + type: "TERMINAL", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the alias. +* `description` - (Optional) Description of the alias. +* `routingStrategy` - (Required) Specifies the fleet and/or routing type to use for the alias. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Fields + +#### `routingStrategy` + +* `fleetId` - (Optional) ID of the GameLift Fleet to point the alias to. +* `message` - (Optional) Message text to be used with the `terminal` routing strategy. +* `type` - (Required) Type of routing strategyE.g., `simple` or `terminal` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Alias ID. +* `arn` - Alias ARN. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Aliases using the ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GameLift Aliases using the ID. For example: + +```console +% terraform import aws_gamelift_alias.example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/gamelift_build.html.markdown b/website/docs/cdktf/typescript/r/gamelift_build.html.markdown new file mode 100644 index 00000000000..e24c64d49e4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/gamelift_build.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_build" +description: |- + Provides a GameLift Build resource. +--- + + + +# Resource: aws_gamelift_build + +Provides an GameLift Build resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GameliftBuild } from "./.gen/providers/aws/gamelift-build"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GameliftBuild(this, "test", { + name: "example-build", + operatingSystem: "WINDOWS_2012", + storageLocation: { + bucket: Token.asString(awsS3BucketTest.id), + key: Token.asString(awsS3ObjectTest.key), + roleArn: Token.asString(awsIamRoleTest.arn), + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the build +* `operatingSystem` - (Required) Operating system that the game server binaries are built to run onE.g., `windows2012`, `amazonLinux` or `amazonLinux2`. +* `storageLocation` - (Required) Information indicating where your game build files are stored. See below. +* `version` - (Optional) Version that is associated with this build. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Fields + +#### `storageLocation` + +* `bucket` - (Required) Name of your S3 bucket. +* `key` - (Required) Name of the zip file containing your build files. +* `roleArn` - (Required) ARN of the access role that allows Amazon GameLift to access your S3 bucket. +* `objectVersion` - (Optional) A specific version of the file. If not set, the latest version of the file is retrieved. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - GameLift Build ID. +* `arn` - GameLift Build ARN. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Builds using the ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GameLift Builds using the ID. For example: + +```console +% terraform import aws_gamelift_build.example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/gamelift_fleet.html.markdown b/website/docs/cdktf/typescript/r/gamelift_fleet.html.markdown new file mode 100644 index 00000000000..a673a6e1464 --- /dev/null +++ b/website/docs/cdktf/typescript/r/gamelift_fleet.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_fleet" +description: |- + Provides a GameLift Fleet resource. +--- + + + +# Resource: aws_gamelift_fleet + +Provides a GameLift Fleet resource. + +## Example Usage + +```terraform +resource "aws_gamelift_fleet" "example" { + build_id = aws_gamelift_build.example.id + ec2_instance_type = "t2.micro" + fleet_type = "ON_DEMAND" + name = "example-fleet-name" + + runtime_configuration { + server_process { + concurrent_executions = 1 + launch_path = "C:\\game\\GomokuServer.exe" + } + } +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `buildId` - (Optional) ID of the GameLift Build to be deployed on the fleet. +* `certificateConfiguration` - (Optional) Prompts GameLift to generate a TLS/SSL certificate for the fleet. See [certificate_configuration](#certificate_configuration). +* `description` - (Optional) Human-readable description of the fleet. +* `ec2InboundPermission` - (Optional) Range of IP addresses and port settings that permit inbound traffic to access server processes running on the fleet. See below. +* `ec2InstanceType` - (Required) Name of an EC2 instance typeE.g., `t2Micro` +* `fleetType` - (Optional) Type of fleet. This value must be `onDemand` or `spot`. Defaults to `onDemand`. +* `instanceRoleArn` - (Optional) ARN of an IAM role that instances in the fleet can assume. +* `metricGroups` - (Optional) List of names of metric groups to add this fleet to. A metric group tracks metrics across all fleets in the group. Defaults to `default`. +* `name` - (Required) The name of the fleet. +* `newGameSessionProtectionPolicy` - (Optional) Game session protection policy to apply to all instances in this fleetE.g., `fullProtection`. Defaults to `noProtection`. +* `resourceCreationLimitPolicy` - (Optional) Policy that limits the number of game sessions an individual player can create over a span of time for this fleet. See below. +* `runtimeConfiguration` - (Optional) Instructions for launching server processes on each instance in the fleet. See below. +* `scriptId` - (Optional) ID of the GameLift Script to be deployed on the fleet. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Fields + +#### `certificateConfiguration` + +* `certificateType` - (Optional) Indicates whether a TLS/SSL certificate is generated for a fleet. Valid values are `disabled` and `generated`. Default value is `disabled`. + +#### `ec2InboundPermission` + +* `fromPort` - (Required) Starting value for a range of allowed port numbers. +* `ipRange` - (Required) Range of allowed IP addresses expressed in CIDR notationE.g., `000.000.000.000/[subnet mask]` or `0.0.0.0/[subnet mask]`. +* `protocol` - (Required) Network communication protocol used by the fleetE.g., `tcp` or `udp` +* `toPort` - (Required) Ending value for a range of allowed port numbers. Port numbers are end-inclusive. This value must be higher than `fromPort`. + +#### `resourceCreationLimitPolicy` + +* `newGameSessionsPerCreator` - (Optional) Maximum number of game sessions that an individual can create during the policy period. +* `policyPeriodInMinutes` - (Optional) Time span used in evaluating the resource creation limit policy. + +#### `runtimeConfiguration` + +* `gameSessionActivationTimeoutSeconds` - (Optional) Maximum amount of time (in seconds) that a game session can remain in status `activating`. +* `maxConcurrentGameSessionActivations` - (Optional) Maximum number of game sessions with status `activating` to allow on an instance simultaneously. +* `serverProcess` - (Optional) Collection of server process configurations that describe which server processes to run on each instance in a fleet. See below. + +#### `serverProcess` + +* `concurrentExecutions` - (Required) Number of server processes using this configuration to run concurrently on an instance. +* `launchPath` - (Required) Location of the server executable in a game build. All game builds are installed on instances at the root : for Windows instances `c:\game`, and for Linux instances `/local/game`. +* `parameters` - (Optional) Optional list of parameters to pass to the server executable on launch. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Fleet ID. +* `arn` - Fleet ARN. +* `buildArn` - Build ARN. +* `operatingSystem` - Operating system of the fleet's computing resources. +* `scriptArn` - Script ARN. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `70M`) +* `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Fleets using the ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GameLift Fleets using the ID. For example: + +```console +% terraform import aws_gamelift_fleet.example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/gamelift_game_server_group.markdown b/website/docs/cdktf/typescript/r/gamelift_game_server_group.markdown new file mode 100644 index 00000000000..ba9f9813184 --- /dev/null +++ b/website/docs/cdktf/typescript/r/gamelift_game_server_group.markdown @@ -0,0 +1,260 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_game_server_group" +description: |- + Provides a GameLift Game Server Group resource. +--- + + + +# Resource: aws_gamelift_game_server_group + +Provides an GameLift Game Server Group resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GameliftGameServerGroup } from "./.gen/providers/aws/gamelift-game-server-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GameliftGameServerGroup(this, "example", { + dependsOn: [awsIamRolePolicyAttachmentExample], + gameServerGroupName: "example", + instanceDefinition: [ + { + instanceType: "c5.large", + }, + { + instanceType: "c5a.large", + }, + ], + launchTemplate: { + id: Token.asString(awsLaunchTemplateExample.id), + }, + maxSize: 1, + minSize: 1, + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +Full usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GameliftGameServerGroup } from "./.gen/providers/aws/gamelift-game-server-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GameliftGameServerGroup(this, "example", { + autoScalingPolicy: { + estimatedInstanceWarmup: 60, + targetTrackingConfiguration: { + targetValue: 75, + }, + }, + balancingStrategy: "SPOT_ONLY", + dependsOn: [awsIamRolePolicyAttachmentExample], + gameServerGroupName: "example", + gameServerProtectionPolicy: "FULL_PROTECTION", + instanceDefinition: [ + { + instanceType: "c5.large", + weightedCapacity: "1", + }, + { + instanceType: "c5.2xlarge", + weightedCapacity: "2", + }, + ], + launchTemplate: { + id: Token.asString(awsLaunchTemplateExample.id), + version: "1", + }, + maxSize: 1, + minSize: 1, + roleArn: Token.asString(awsIamRoleExample.arn), + tags: { + Name: "example", + }, + vpcSubnets: ["subnet-12345678", "subnet-23456789"], + }); + } +} + +``` + +### Example IAM Role for GameLift Game Server Group + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: [ + "autoscaling.amazonaws.com", + "gamelift.amazonaws.com", + ], + type: "Service", + }, + ], + }, + ], + }); + const current = new DataAwsPartition(this, "current", {}); + const example = new IamRole(this, "example", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "gamelift-game-server-group-example", + }); + const awsIamRolePolicyAttachmentExample = new IamRolePolicyAttachment( + this, + "example_3", + { + policyArn: + "arn:${" + + current.partition + + "}:iam::aws:policy/GameLiftGameServerGroupPolicy", + role: example.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `balancingStrategy` - (Optional) Indicates how GameLift FleetIQ balances the use of Spot Instances and On-Demand Instances. + Valid values: `spotOnly`, `spotPreferred`, `onDemandOnly`. Defaults to `spotPreferred`. +* `gameServerGroupName` - (Required) Name of the game server group. + This value is used to generate unique ARN identifiers for the EC2 Auto Scaling group and the GameLift FleetIQ game server group. +* `gameServerProtectionPolicy` - (Optional) Indicates whether instances in the game server group are protected from early termination. + Unprotected instances that have active game servers running might be terminated during a scale-down event, + causing players to be dropped from the game. + Protected instances cannot be terminated while there are active game servers running except in the event + of a forced game server group deletion. + Valid values: `noProtection`, `fullProtection`. Defaults to `noProtection`. +* `maxSize` - (Required) The maximum number of instances allowed in the EC2 Auto Scaling group. + During automatic scaling events, GameLift FleetIQ and EC2 do not scale up the group above this maximum. +* `minSize` - (Required) The minimum number of instances allowed in the EC2 Auto Scaling group. + During automatic scaling events, GameLift FleetIQ and EC2 do not scale down the group below this minimum. +* `roleArn` - (Required) ARN for an IAM role that allows Amazon GameLift to access your EC2 Auto Scaling groups. +* `tags` - (Optional) Key-value map of resource tags +* `vpcSubnets` - (Optional) A list of VPC subnets to use with instances in the game server group. + By default, all GameLift FleetIQ-supported Availability Zones are used. + +### `autoScalingPolicy` + +Configuration settings to define a scaling policy for the Auto Scaling group that is optimized for game hosting. +The scaling policy uses the metric `percentUtilizedGameServers` to maintain a buffer of idle game servers that +can immediately accommodate new games and players. + +* `estimatedInstanceWarmup` - (Optional) Length of time, in seconds, it takes for a new instance to start + new game server processes and register with GameLift FleetIQ. + Specifying a warm-up time can be useful, particularly with game servers that take a long time to start up, + because it avoids prematurely starting new instances. Defaults to `60`. + +#### `targetTrackingConfiguration` + +Settings for a target-based scaling policy applied to Auto Scaling group. +These settings are used to create a target-based policy that tracks the GameLift FleetIQ metric `percentUtilizedGameServers` +and specifies a target value for the metric. + +* `targetValue` - (Required) Desired value to use with a game server group target-based scaling policy. + +### `instanceDefinition` + +The EC2 instance types and sizes to use in the Auto Scaling group. +The instance definitions must specify at least two different instance types that are supported by GameLift FleetIQ. + +* `instanceType` - (Required) An EC2 instance type. +* `weightedCapacity` - (Optional) Instance weighting that indicates how much this instance type contributes + to the total capacity of a game server group. + Instance weights are used by GameLift FleetIQ to calculate the instance type's cost per unit hour and better identify + the most cost-effective options. + +### `launchTemplate` + +The EC2 launch template that contains configuration settings and game server code to be deployed to all instances in the game server group. +You can specify the template using either the template name or ID. + +* `id` - (Optional) A unique identifier for an existing EC2 launch template. +* `name` - (Optional) A readable identifier for an existing EC2 launch template. +* `version` - (Optional) The version of the EC2 launch template to use. If none is set, the default is the first version created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the GameLift Game Server Group. +* `arn` - The ARN of the GameLift Game Server Group. +* `autoScalingGroupArn` - The ARN of the created EC2 Auto Scaling group. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Game Server Group using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GameLift Game Server Group using the `name`. For example: + +```console +% terraform import aws_gamelift_game_server_group.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/gamelift_game_session_queue.html.markdown b/website/docs/cdktf/typescript/r/gamelift_game_session_queue.html.markdown new file mode 100644 index 00000000000..814687711f7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/gamelift_game_session_queue.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_game_session_queue" +description: |- + Provides a GameLift Game Session Queue resource. +--- + + + +# Resource: aws_gamelift_game_session_queue + +Provides an GameLift Game Session Queue resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GameliftGameSessionQueue } from "./.gen/providers/aws/gamelift-game-session-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GameliftGameSessionQueue(this, "test", { + destinations: [usWest2Fleet.arn, euCentral1Fleet.arn], + name: "example-session-queue", + notificationTarget: gameSessionQueueNotifications.arn, + playerLatencyPolicy: [ + { + maximumIndividualPlayerLatencyMilliseconds: 100, + policyDurationSeconds: 5, + }, + { + maximumIndividualPlayerLatencyMilliseconds: 200, + }, + ], + timeoutInSeconds: 60, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the session queue. +* `timeoutInSeconds` - (Required) Maximum time a game session request can remain in the queue. +* `customEventData` - (Optional) Information to be added to all events that are related to this game session queue. +* `destinations` - (Optional) List of fleet/alias ARNs used by session queue for placing game sessions. +* `notificationTarget` - (Optional) An SNS topic ARN that is set up to receive game session placement notifications. +* `playerLatencyPolicy` - (Optional) One or more policies used to choose fleet based on player latency. See below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Fields + +#### `playerLatencyPolicy` + +* `maximumIndividualPlayerLatencyMilliseconds` - (Required) Maximum latency value that is allowed for any player. +* `policyDurationSeconds` - (Optional) Length of time that the policy is enforced while placing a new game session. Absence of value for this attribute means that the policy is enforced until the queue times out. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Game Session Queue ARN. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Game Session Queues using their `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GameLift Game Session Queues using their `name`. For example: + +```console +% terraform import aws_gamelift_game_session_queue.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/gamelift_script.html.markdown b/website/docs/cdktf/typescript/r/gamelift_script.html.markdown new file mode 100644 index 00000000000..a5e257164a2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/gamelift_script.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "GameLift" +layout: "aws" +page_title: "AWS: aws_gamelift_script" +description: |- + Provides a GameLift Script resource. +--- + + + +# Resource: aws_gamelift_script + +Provides an GameLift Script resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GameliftScript } from "./.gen/providers/aws/gamelift-script"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GameliftScript(this, "example", { + name: "example-script", + storageLocation: { + bucket: Token.asString(awsS3BucketExample.id), + key: Token.asString(awsS3ObjectExample.key), + roleArn: Token.asString(awsIamRoleExample.arn), + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the script +* `storageLocation` - (Optional) Information indicating where your game script files are stored. See below. +* `version` - (Optional) Version that is associated with this script. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `zipFile` - (Optional) A data object containing your Realtime scripts and dependencies as a zip file. The zip file can have one or multiple files. Maximum size of a zip file is 5 MB. + +### Nested Fields + +#### `storageLocation` + +* `bucket` - (Required) Name of your S3 bucket. +* `key` - (Required) Name of the zip file containing your script files. +* `roleArn` - (Required) ARN of the access role that allows Amazon GameLift to access your S3 bucket. +* `objectVersion` - (Optional) A specific version of the file. If not set, the latest version of the file is retrieved. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - GameLift Script ID. +* `arn` - GameLift Script ARN. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GameLift Scripts using the ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GameLift Scripts using the ID. For example: + +```console +% terraform import aws_gamelift_script.example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glacier_vault.html.markdown b/website/docs/cdktf/typescript/r/glacier_vault.html.markdown new file mode 100644 index 00000000000..56d07a174d9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glacier_vault.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "S3 Glacier" +layout: "aws" +page_title: "AWS: aws_glacier_vault" +description: |- + Provides a Glacier Vault. +--- + + + +# Resource: aws_glacier_vault + +Provides a Glacier Vault Resource. You can refer to the [Glacier Developer Guide](https://docs.aws.amazon.com/amazonglacier/latest/dev/working-with-vaults.html) for a full explanation of the Glacier Vault functionality + +~> **NOTE:** When removing a Glacier Vault, the Vault must be empty. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { GlacierVault } from "./.gen/providers/aws/glacier-vault"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const awsSnsTopic = new SnsTopic(this, "aws_sns_topic", { + name: "glacier-sns-topic", + }); + const myArchive = new DataAwsIamPolicyDocument(this, "my_archive", { + statement: [ + { + actions: ["glacier:InitiateJob", "glacier:GetJobOutput"], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "*", + }, + ], + resources: [ + "arn:aws:glacier:eu-west-1:432981146916:vaults/MyArchive", + ], + sid: "add-read-only-perm", + }, + ], + }); + const awsGlacierVaultMyArchive = new GlacierVault(this, "my_archive_2", { + accessPolicy: Token.asString(myArchive.json), + name: "MyArchive", + notification: { + events: ["ArchiveRetrievalCompleted", "InventoryRetrievalCompleted"], + snsTopic: awsSnsTopic.arn, + }, + tags: { + Test: "MyArchive", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlacierVaultMyArchive.overrideLogicalId("my_archive"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Vault. Names can be between 1 and 255 characters long and the valid characters are a-z, A-Z, 0-9, '_' (underscore), '-' (hyphen), and '.' (period). +* `accessPolicy` - (Optional) The policy document. This is a JSON formatted string. + The heredoc syntax or `file` function is helpful here. Use the [Glacier Developer Guide](https://docs.aws.amazon.com/amazonglacier/latest/dev/vault-access-policy.html) for more information on Glacier Vault Policy +* `notification` - (Optional) The notifications for the Vault. Fields documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +**notification** supports the following: + +* `events` - (Required) You can configure a vault to publish a notification for `archiveRetrievalCompleted` and `inventoryRetrievalCompleted` events. +* `snsTopic` - (Required) The SNS Topic ARN. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `location` - The URI of the vault that was created. +* `arn` - The ARN of the vault. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glacier Vaults using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glacier Vaults using the `name`. For example: + +```console +% terraform import aws_glacier_vault.archive my_archive +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glacier_vault_lock.html.markdown b/website/docs/cdktf/typescript/r/glacier_vault_lock.html.markdown new file mode 100644 index 00000000000..e955b83b56a --- /dev/null +++ b/website/docs/cdktf/typescript/r/glacier_vault_lock.html.markdown @@ -0,0 +1,135 @@ +--- +subcategory: "S3 Glacier" +layout: "aws" +page_title: "AWS: aws_glacier_vault_lock" +description: |- + Manages a Glacier Vault Lock. +--- + + + +# Resource: aws_glacier_vault_lock + +Manages a Glacier Vault Lock. You can refer to the [Glacier Developer Guide](https://docs.aws.amazon.com/amazonglacier/latest/dev/vault-lock.html) for a full explanation of the Glacier Vault Lock functionality. + +~> **NOTE:** This resource allows you to test Glacier Vault Lock policies by setting the `completeLock` argument to `false`. When testing policies in this manner, the Glacier Vault Lock automatically expires after 24 hours and Terraform will show this resource as needing recreation after that time. To permanently apply the policy, set the `completeLock` argument to `true`. When changing `completeLock` to `true`, it is expected the resource will show as recreating. + +!> **WARNING:** Once a Glacier Vault Lock is completed, it is immutable. The deletion of the Glacier Vault Lock is not be possible and attempting to remove it from Terraform will return an error. Set the `ignoreDeletionError` argument to `true` and apply this configuration before attempting to delete this resource via Terraform or use `terraform state rm` to remove this resource from Terraform management. + +## Example Usage + +### Testing Glacier Vault Lock Policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { GlacierVault } from "./.gen/providers/aws/glacier-vault"; +import { GlacierVaultLock } from "./.gen/providers/aws/glacier-vault-lock"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new GlacierVault(this, "example", { + name: "example", + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_1", + { + statement: [ + { + actions: ["glacier:DeleteArchive"], + condition: [ + { + test: "NumericLessThanEquals", + values: ["365"], + variable: "glacier:ArchiveAgeinDays", + }, + ], + effect: "Deny", + resources: [example.arn], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsGlacierVaultLockExample = new GlacierVaultLock(this, "example_2", { + completeLock: false, + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + vaultName: example.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlacierVaultLockExample.overrideLogicalId("example"); + } +} + +``` + +### Permanently Applying Glacier Vault Lock Policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlacierVaultLock } from "./.gen/providers/aws/glacier-vault-lock"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlacierVaultLock(this, "example", { + completeLock: true, + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + vaultName: Token.asString(awsGlacierVaultExample.name), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `completeLock` - (Required) Boolean whether to permanently apply this Glacier Lock Policy. Once completed, this cannot be undone. If set to `false`, the Glacier Lock Policy remains in a testing mode for 24 hours. After that time, the Glacier Lock Policy is automatically removed by Glacier and the Terraform resource will show as needing recreation. Changing this from `false` to `true` will show as resource recreation, which is expected. Changing this from `true` to `false` is not possible unless the Glacier Vault is recreated at the same time. +* `policy` - (Required) JSON string containing the IAM policy to apply as the Glacier Vault Lock policy. +* `vaultName` - (Required) The name of the Glacier Vault. +* `ignoreDeletionError` - (Optional) Allow Terraform to ignore the error returned when attempting to delete the Glacier Lock Policy. This can be used to delete or recreate the Glacier Vault via Terraform, for example, if the Glacier Vault Lock policy permits that action. This should only be used in conjunction with `completeLock` being set to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Glacier Vault name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glacier Vault Locks using the Glacier Vault name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glacier Vault Locks using the Glacier Vault name. For example: + +```console +% terraform import aws_glacier_vault_lock.example example-vault +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/globalaccelerator_accelerator.html.markdown b/website/docs/cdktf/typescript/r/globalaccelerator_accelerator.html.markdown new file mode 100644 index 00000000000..3eb6ce42af1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/globalaccelerator_accelerator.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_accelerator" +description: |- + Provides a Global Accelerator accelerator. +--- + + + +# Resource: aws_globalaccelerator_accelerator + +Creates a Global Accelerator accelerator. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlobalacceleratorAccelerator } from "./.gen/providers/aws/globalaccelerator-accelerator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlobalacceleratorAccelerator(this, "example", { + attributes: { + flowLogsEnabled: true, + flowLogsS3Bucket: "example-bucket", + flowLogsS3Prefix: "flow-logs/", + }, + enabled: true, + ipAddressType: "IPV4", + ipAddresses: ["1.2.3.4"], + name: "Example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the accelerator. +* `ipAddressType` - (Optional) The value for the address type. Defaults to `ipv4`. Valid values: `ipv4`, `dualStack`. +* `ipAddresses` - (Optional) The IP addresses to use for BYOIP accelerators. If not specified, the service assigns IP addresses. Valid values: 1 or 2 IPv4 addresses. +* `enabled` - (Optional) Indicates whether the accelerator is enabled. Defaults to `true`. Valid values: `true`, `false`. +* `attributes` - (Optional) The attributes of the accelerator. Fields documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +`attributes` supports the following arguments: + +* `flowLogsEnabled` - (Optional) Indicates whether flow logs are enabled. Defaults to `false`. Valid values: `true`, `false`. +* `flowLogsS3Bucket` - (Optional) The name of the Amazon S3 bucket for the flow logs. Required if `flowLogsEnabled` is `true`. +* `flowLogsS3Prefix` - (Optional) The prefix for the location in the Amazon S3 bucket for the flow logs. Required if `flowLogsEnabled` is `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the accelerator. +* `dnsName` - The DNS name of the accelerator. For example, `a5D53Ff5Ee6Bca4CeAwsglobalacceleratorCom`. +* `dualStackDnsName` - The Domain Name System (DNS) name that Global Accelerator creates that points to a dual-stack accelerator's four static IP addresses: two IPv4 addresses and two IPv6 addresses. For example, `a1234567890AbcdefDualstackAwsglobalacceleratorCom`. +* `hostedZoneId` -- The Global Accelerator Route 53 zone ID that can be used to + route an [Alias Resource Record Set][1] to the Global Accelerator. This attribute + is simply an alias for the zone ID `z2Bj6Xq5Fk7U4H`. +* `ipSets` - IP address set associated with the accelerator. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +**ip_sets** exports the following attributes: + +* `ipAddresses` - A list of IP addresses in the IP address set. +* `ipFamily` - The type of IP addresses included in this IP set. + +[1]: https://docs.aws.amazon.com/Route53/latest/APIReference/API_AliasTarget.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator accelerators using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Global Accelerator accelerators using the `arn`. For example: + +```console +% terraform import aws_globalaccelerator_accelerator.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/globalaccelerator_custom_routing_accelerator.html.markdown b/website/docs/cdktf/typescript/r/globalaccelerator_custom_routing_accelerator.html.markdown new file mode 100644 index 00000000000..f644620687c --- /dev/null +++ b/website/docs/cdktf/typescript/r/globalaccelerator_custom_routing_accelerator.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_custom_routing_accelerator" +description: |- + Provides a Global Accelerator custom routing accelerator. +--- + + + +# Resource: aws_globalaccelerator_custom_routing_accelerator + +Creates a Global Accelerator custom routing accelerator. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlobalacceleratorCustomRoutingAccelerator } from "./.gen/providers/aws/globalaccelerator-custom-routing-accelerator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlobalacceleratorCustomRoutingAccelerator(this, "example", { + attributes: { + flowLogsEnabled: true, + flowLogsS3Bucket: "example-bucket", + flowLogsS3Prefix: "flow-logs/", + }, + enabled: true, + ipAddressType: "IPV4", + ipAddresses: ["1.2.3.4"], + name: "Example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of a custom routing accelerator. +* `ipAddressType` - (Optional) The IP address type that an accelerator supports. For a custom routing accelerator, the value must be `"ipv4"`. +* `ipAddresses` - (Optional) The IP addresses to use for BYOIP accelerators. If not specified, the service assigns IP addresses. Valid values: 1 or 2 IPv4 addresses. +* `enabled` - (Optional) Indicates whether the accelerator is enabled. Defaults to `true`. Valid values: `true`, `false`. +* `attributes` - (Optional) The attributes of the accelerator. Fields documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +`attributes` supports the following arguments: + +* `flowLogsEnabled` - (Optional) Indicates whether flow logs are enabled. Defaults to `false`. Valid values: `true`, `false`. +* `flowLogsS3Bucket` - (Optional) The name of the Amazon S3 bucket for the flow logs. Required if `flowLogsEnabled` is `true`. +* `flowLogsS3Prefix` - (Optional) The prefix for the location in the Amazon S3 bucket for the flow logs. Required if `flowLogsEnabled` is `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the custom accelerator. +* `dnsName` - The DNS name of the accelerator. For example, `a5D53Ff5Ee6Bca4CeAwsglobalacceleratorCom`. +* `hostedZoneId` -- The Global Accelerator Route 53 zone ID that can be used to + route an [Alias Resource Record Set][1] to the Global Accelerator. This attribute + is simply an alias for the zone ID `z2Bj6Xq5Fk7U4H`. +* `ipSets` - IP address set associated with the accelerator. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +**ip_sets** exports the following attributes: + +* `ipAddresses` - A list of IP addresses in the IP address set. +* `ipFamily` - The type of IP addresses included in this IP set. + +[1]: https://docs.aws.amazon.com/Route53/latest/APIReference/API_AliasTarget.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator custom routing accelerators using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Global Accelerator custom routing accelerators using the `arn`. For example: + +```console +% terraform import aws_globalaccelerator_custom_routing_accelerator.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/globalaccelerator_custom_routing_endpoint_group.html.markdown b/website/docs/cdktf/typescript/r/globalaccelerator_custom_routing_endpoint_group.html.markdown new file mode 100644 index 00000000000..f3d21ee4b54 --- /dev/null +++ b/website/docs/cdktf/typescript/r/globalaccelerator_custom_routing_endpoint_group.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_custom_routing_endpoint_group" +description: |- + Provides a Global Accelerator custom routing endpoint group. +--- + + + +# Resource: aws_globalaccelerator_custom_routing_endpoint_group + +Provides a Global Accelerator custom routing endpoint group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlobalacceleratorCustomRoutingEndpointGroup } from "./.gen/providers/aws/globalaccelerator-custom-routing-endpoint-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlobalacceleratorCustomRoutingEndpointGroup(this, "example", { + destinationConfiguration: [ + { + fromPort: 80, + protocols: ["TCP"], + toPort: 8080, + }, + ], + endpointConfiguration: [ + { + endpointId: Token.asString(awsSubnetExample.id), + }, + ], + listenerArn: Token.asString( + awsGlobalacceleratorCustomRoutingListenerExample.id + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `listenerArn` - (Required) The Amazon Resource Name (ARN) of the custom routing listener. +* `destinationConfiguration` - (Required) The port ranges and protocols for all endpoints in a custom routing endpoint group to accept client traffic on. Fields documented below. +* `endpointConfiguration` - (Optional) The list of endpoint objects. Fields documented below. +* `endpointGroupRegion` (Optional) - The name of the AWS Region where the custom routing endpoint group is located. + +`destinationConfiguration` supports the following arguments: + +* `fromPort` - (Required) The first port, inclusive, in the range of ports for the endpoint group that is associated with a custom routing accelerator. +* `protocols` - (Required) The protocol for the endpoint group that is associated with a custom routing accelerator. The protocol can be either `"tcp"` or `"udp"`. +* `toPort` - (Required) The last port, inclusive, in the range of ports for the endpoint group that is associated with a custom routing accelerator. + +`endpointConfiguration` supports the following arguments: + +* `endpointId` - (Optional) An ID for the endpoint. For custom routing accelerators, this is the virtual private cloud (VPC) subnet ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the custom routing endpoint group. +* `arn` - The Amazon Resource Name (ARN) of the custom routing endpoint group. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator custom routing endpoint groups using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Global Accelerator custom routing endpoint groups using the `id`. For example: + +```console +% terraform import aws_globalaccelerator_custom_routing_endpoint_group.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/listener/xxxxxxx/endpoint-group/xxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/globalaccelerator_custom_routing_listener.html.markdown b/website/docs/cdktf/typescript/r/globalaccelerator_custom_routing_listener.html.markdown new file mode 100644 index 00000000000..0f35168997f --- /dev/null +++ b/website/docs/cdktf/typescript/r/globalaccelerator_custom_routing_listener.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_custom_routing_listener" +description: |- + Provides a Global Accelerator custom routing listener. +--- + + + +# Resource: aws_globalaccelerator_custom_routing_listener + +Provides a Global Accelerator custom routing listener. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlobalacceleratorCustomRoutingAccelerator } from "./.gen/providers/aws/globalaccelerator-custom-routing-accelerator"; +import { GlobalacceleratorCustomRoutingListener } from "./.gen/providers/aws/globalaccelerator-custom-routing-listener"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new GlobalacceleratorCustomRoutingAccelerator( + this, + "example", + { + attributes: { + flowLogsEnabled: true, + flowLogsS3Bucket: "example-bucket", + flowLogsS3Prefix: "flow-logs/", + }, + enabled: true, + ipAddressType: "IPV4", + name: "Example", + } + ); + const awsGlobalacceleratorCustomRoutingListenerExample = + new GlobalacceleratorCustomRoutingListener(this, "example_1", { + acceleratorArn: example.id, + portRange: [ + { + fromPort: 80, + toPort: 80, + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlobalacceleratorCustomRoutingListenerExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `acceleratorArn` - (Required) The Amazon Resource Name (ARN) of a custom routing accelerator. +* `portRange` - (Optional) The list of port ranges for the connections from clients to the accelerator. Fields documented below. + +`portRange` supports the following arguments: + +* `fromPort` - (Optional) The first port in the range of ports, inclusive. +* `toPort` - (Optional) The last port in the range of ports, inclusive. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the custom routing listener. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator custom routing listeners using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Global Accelerator custom routing listeners using the `id`. For example: + +```console +% terraform import aws_globalaccelerator_custom_routing_listener.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/listener/xxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/globalaccelerator_endpoint_group.html.markdown b/website/docs/cdktf/typescript/r/globalaccelerator_endpoint_group.html.markdown new file mode 100644 index 00000000000..8e6d5803ba4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/globalaccelerator_endpoint_group.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_endpoint_group" +description: |- + Provides a Global Accelerator endpoint group. +--- + + + +# Resource: aws_globalaccelerator_endpoint_group + +Provides a Global Accelerator endpoint group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlobalacceleratorEndpointGroup } from "./.gen/providers/aws/globalaccelerator-endpoint-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlobalacceleratorEndpointGroup(this, "example", { + endpointConfiguration: [ + { + endpointId: Token.asString(awsLbExample.arn), + weight: 100, + }, + ], + listenerArn: Token.asString(awsGlobalacceleratorListenerExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `listenerArn` - (Required) The Amazon Resource Name (ARN) of the listener. +* `endpointGroupRegion` (Optional) - The name of the AWS Region where the endpoint group is located. +* `healthCheckIntervalSeconds` - (Optional) The time—10 seconds or 30 seconds—between each health check for an endpoint. The default value is 30. +* `healthCheckPath` - (Optional) If the protocol is HTTP/S, then this specifies the path that is the destination for health check targets. The default value is slash (`/`). Terraform will only perform drift detection of its value when present in a configuration. +* `healthCheckPort` - (Optional) The port that AWS Global Accelerator uses to check the health of endpoints that are part of this endpoint group. The default port is the listener port that this endpoint group is associated with. If listener port is a list of ports, Global Accelerator uses the first port in the list. +Terraform will only perform drift detection of its value when present in a configuration. +* `healthCheckProtocol` - (Optional) The protocol that AWS Global Accelerator uses to check the health of endpoints that are part of this endpoint group. The default value is TCP. +* `thresholdCount` - (Optional) The number of consecutive health checks required to set the state of a healthy endpoint to unhealthy, or to set an unhealthy endpoint to healthy. The default value is 3. +* `trafficDialPercentage` - (Optional) The percentage of traffic to send to an AWS Region. Additional traffic is distributed to other endpoint groups for this listener. The default value is 100. +* `endpointConfiguration` - (Optional) The list of endpoint objects. Fields documented below. +* `portOverride` - (Optional) Override specific listener ports used to route traffic to endpoints that are part of this endpoint group. Fields documented below. + +`endpointConfiguration` supports the following arguments: + +* `clientIpPreservationEnabled` - (Optional) Indicates whether client IP address preservation is enabled for an Application Load Balancer endpoint. See the [AWS documentation](https://docs.aws.amazon.com/global-accelerator/latest/dg/preserve-client-ip-address.html) for more details. The default value is `false`. +**Note:** When client IP address preservation is enabled, the Global Accelerator service creates an EC2 Security Group in the VPC named `globalAccelerator` that must be deleted (potentially outside of Terraform) before the VPC will successfully delete. If this EC2 Security Group is not deleted, Terraform will retry the VPC deletion for a few minutes before reporting a `dependencyViolation` error. This cannot be resolved by re-running Terraform. +* `endpointId` - (Optional) An ID for the endpoint. If the endpoint is a Network Load Balancer or Application Load Balancer, this is the Amazon Resource Name (ARN) of the resource. If the endpoint is an Elastic IP address, this is the Elastic IP address allocation ID. +* `weight` - (Optional) The weight associated with the endpoint. When you add weights to endpoints, you configure AWS Global Accelerator to route traffic based on proportions that you specify. + +`portOverride` supports the following arguments: + +* `endpointPort` - (Required) The endpoint port that you want a listener port to be mapped to. This is the port on the endpoint, such as the Application Load Balancer or Amazon EC2 instance. +* `listenerPort` - (Required) The listener port that you want to map to a specific endpoint port. This is the port that user traffic arrives to the Global Accelerator on. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the endpoint group. +* `arn` - The Amazon Resource Name (ARN) of the endpoint group. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator endpoint groups using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Global Accelerator endpoint groups using the `id`. For example: + +```console +% terraform import aws_globalaccelerator_endpoint_group.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/listener/xxxxxxx/endpoint-group/xxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/globalaccelerator_listener.html.markdown b/website/docs/cdktf/typescript/r/globalaccelerator_listener.html.markdown new file mode 100644 index 00000000000..0534282398b --- /dev/null +++ b/website/docs/cdktf/typescript/r/globalaccelerator_listener.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Global Accelerator" +layout: "aws" +page_title: "AWS: aws_globalaccelerator_listener" +description: |- + Provides a Global Accelerator listener. +--- + + + +# Resource: aws_globalaccelerator_listener + +Provides a Global Accelerator listener. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlobalacceleratorAccelerator } from "./.gen/providers/aws/globalaccelerator-accelerator"; +import { GlobalacceleratorListener } from "./.gen/providers/aws/globalaccelerator-listener"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new GlobalacceleratorAccelerator(this, "example", { + attributes: { + flowLogsEnabled: true, + flowLogsS3Bucket: "example-bucket", + flowLogsS3Prefix: "flow-logs/", + }, + enabled: true, + ipAddressType: "IPV4", + name: "Example", + }); + const awsGlobalacceleratorListenerExample = new GlobalacceleratorListener( + this, + "example_1", + { + acceleratorArn: example.id, + clientAffinity: "SOURCE_IP", + portRange: [ + { + fromPort: 80, + toPort: 80, + }, + ], + protocol: "TCP", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlobalacceleratorListenerExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `acceleratorArn` - (Required) The Amazon Resource Name (ARN) of your accelerator. +* `clientAffinity` - (Optional) Direct all requests from a user to the same endpoint. Valid values are `none`, `sourceIp`. Default: `none`. If `none`, Global Accelerator uses the "five-tuple" properties of source IP address, source port, destination IP address, destination port, and protocol to select the hash value. If `sourceIp`, Global Accelerator uses the "two-tuple" properties of source (client) IP address and destination IP address to select the hash value. +* `protocol` - (Optional) The protocol for the connections from clients to the accelerator. Valid values are `tcp`, `udp`. +* `portRange` - (Optional) The list of port ranges for the connections from clients to the accelerator. Fields documented below. + +`portRange` supports the following arguments: + +* `fromPort` - (Optional) The first port in the range of ports, inclusive. +* `toPort` - (Optional) The last port in the range of ports, inclusive. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the listener. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Global Accelerator listeners using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Global Accelerator listeners using the `id`. For example: + +```console +% terraform import aws_globalaccelerator_listener.example arn:aws:globalaccelerator::111111111111:accelerator/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/listener/xxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_catalog_database.html.markdown b/website/docs/cdktf/typescript/r/glue_catalog_database.html.markdown new file mode 100644 index 00000000000..6a42b2730f5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_catalog_database.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_catalog_database" +description: |- + Provides a Glue Catalog Database. +--- + + + +# Resource: aws_glue_catalog_database + +Provides a Glue Catalog Database Resource. You can refer to the [Glue Developer Guide](http://docs.aws.amazon.com/glue/latest/dg/populate-data-catalog.html) for a full explanation of the Glue Data Catalog functionality + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCatalogDatabase } from "./.gen/providers/aws/glue-catalog-database"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueCatalogDatabase(this, "aws_glue_catalog_database", { + name: "MyCatalogDatabase", + }); + } +} + +``` + +### Create Table Default Permissions + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCatalogDatabase } from "./.gen/providers/aws/glue-catalog-database"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueCatalogDatabase(this, "aws_glue_catalog_database", { + createTableDefaultPermission: [ + { + permissions: ["SELECT"], + principal: { + dataLakePrincipalIdentifier: "IAM_ALLOWED_PRINCIPALS", + }, + }, + ], + name: "MyCatalogDatabase", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `catalogId` - (Optional) ID of the Glue Catalog to create the database in. If omitted, this defaults to the AWS Account ID. +* `createTableDefaultPermission` - (Optional) Creates a set of default permissions on the table for principals. See [`createTableDefaultPermission`](#create_table_default_permission) below. +* `description` - (Optional) Description of the database. +* `locationUri` - (Optional) Location of the database (for example, an HDFS path). +* `name` - (Required) Name of the database. The acceptable characters are lowercase letters, numbers, and the underscore character. +* `parameters` - (Optional) List of key-value pairs that define parameters and properties of the database. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `targetDatabase` - (Optional) Configuration block for a target database for resource linking. See [`targetDatabase`](#target_database) below. + +### target_database + +* `catalogId` - (Required) ID of the Data Catalog in which the database resides. +* `databaseName` - (Required) Name of the catalog database. +* `region` - (Optional) Region of the target database. + +### create_table_default_permission + +* `permissions` - (Optional) The permissions that are granted to the principal. +* `principal` - (Optional) The principal who is granted permissions.. See [`principal`](#principal) below. + +#### principal + +* `dataLakePrincipalIdentifier` - (Optional) An identifier for the Lake Formation principal. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Glue Catalog Database. +* `id` - Catalog ID and name of the database. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Catalog Databases using the `catalogId:name`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Catalog Databases using the `catalogId:name`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```console +% terraform import aws_glue_catalog_database.database 123456789012:my_database +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_catalog_table.html.markdown b/website/docs/cdktf/typescript/r/glue_catalog_table.html.markdown new file mode 100644 index 00000000000..64c22b2f64f --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_catalog_table.html.markdown @@ -0,0 +1,232 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_catalog_table" +description: |- + Provides a Glue Catalog Table. +--- + + + +# Resource: aws_glue_catalog_table + +Provides a Glue Catalog Table Resource. You can refer to the [Glue Developer Guide](http://docs.aws.amazon.com/glue/latest/dg/populate-data-catalog.html) for a full explanation of the Glue Data Catalog functionality. + +## Example Usage + +### Basic Table + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCatalogTable } from "./.gen/providers/aws/glue-catalog-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueCatalogTable(this, "aws_glue_catalog_table", { + databaseName: "MyCatalogDatabase", + name: "MyCatalogTable", + }); + } +} + +``` + +### Parquet Table for Athena + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCatalogTable } from "./.gen/providers/aws/glue-catalog-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueCatalogTable(this, "aws_glue_catalog_table", { + databaseName: "MyCatalogDatabase", + name: "MyCatalogTable", + parameters: { + EXTERNAL: "TRUE", + "parquet.compression": "SNAPPY", + }, + storageDescriptor: { + columns: [ + { + name: "my_string", + type: "string", + }, + { + name: "my_double", + type: "double", + }, + { + comment: "", + name: "my_date", + type: "date", + }, + { + comment: "", + name: "my_bigint", + type: "bigint", + }, + { + comment: "", + name: "my_struct", + type: "struct", + }, + ], + inputFormat: + "org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat", + location: "s3://my-bucket/event-streams/my-stream", + outputFormat: + "org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat", + serDeInfo: { + name: "my-stream", + parameters: { + "serialization.format": Token.asString(1), + }, + serializationLibrary: + "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", + }, + }, + tableType: "EXTERNAL_TABLE", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the table. For Hive compatibility, this must be entirely lowercase. +* `databaseName` - (Required) Name of the metadata database where the table metadata resides. For Hive compatibility, this must be all lowercase. + +The follow arguments are optional: + +* `catalogId` - (Optional) ID of the Glue Catalog and database to create the table in. If omitted, this defaults to the AWS Account ID plus the database name. +* `description` - (Optional) Description of the table. +* `owner` - (Optional) Owner of the table. +* `parameters` - (Optional) Properties associated with this table, as a list of key-value pairs. +* `partitionIndex` - (Optional) Configuration block for a maximum of 3 partition indexes. See [`partitionIndex`](#partition_index) below. +* `partitionKeys` - (Optional) Configuration block of columns by which the table is partitioned. Only primitive types are supported as partition keys. See [`partitionKeys`](#partition_keys) below. +* `retention` - (Optional) Retention time for this table. +* `storageDescriptor` - (Optional) Configuration block for information about the physical storage of this table. For more information, refer to the [Glue Developer Guide](https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-catalog-tables.html#aws-glue-api-catalog-tables-StorageDescriptor). See [`storageDescriptor`](#storage_descriptor) below. +* `tableType` - (Optional) Type of this table (EXTERNAL_TABLE, VIRTUAL_VIEW, etc.). While optional, some Athena DDL queries such as `ALTER TABLE` and `SHOW CREATE TABLE` will fail if this argument is empty. +* `targetTable` - (Optional) Configuration block of a target table for resource linking. See [`targetTable`](#target_table) below. +* `viewExpandedText` - (Optional) If the table is a view, the expanded text of the view; otherwise null. +* `viewOriginalText` - (Optional) If the table is a view, the original text of the view; otherwise null. + +### partition_index + +~> **NOTE:** A `partitionIndex` cannot be added to an existing `glueCatalogTable`. +This will destroy and recreate the table, possibly resulting in data loss. +To add an index to an existing table, see the [`gluePartitionIndex` resource](/docs/providers/aws/r/glue_partition_index.html) for configuration details. + +* `indexName` - (Required) Name of the partition index. +* `keys` - (Required) Keys for the partition index. + +### partition_keys + +* `comment` - (Optional) Free-form text comment. +* `name` - (Required) Name of the Partition Key. +* `type` - (Optional) Datatype of data in the Partition Key. + +### storage_descriptor + +* `bucketColumns` - (Optional) List of reducer grouping columns, clustering columns, and bucketing columns in the table. +* `columns` - (Optional) Configuration block for columns in the table. See [`columns`](#columns) below. +* `compressed` - (Optional) Whether the data in the table is compressed. +* `inputFormat` - (Optional) Input format: SequenceFileInputFormat (binary), or TextInputFormat, or a custom format. +* `location` - (Optional) Physical location of the table. By default this takes the form of the warehouse location, followed by the database location in the warehouse, followed by the table name. +* `numberOfBuckets` - (Optional) Must be specified if the table contains any dimension columns. +* `outputFormat` - (Optional) Output format: SequenceFileOutputFormat (binary), or IgnoreKeyTextOutputFormat, or a custom format. +* `parameters` - (Optional) User-supplied properties in key-value form. +* `schemaReference` - (Optional) Object that references a schema stored in the AWS Glue Schema Registry. When creating a table, you can pass an empty list of columns for the schema, and instead use a schema reference. See [Schema Reference](#schema_reference) below. +* `serDeInfo` - (Optional) Configuration block for serialization and deserialization ("SerDe") information. See [`serDeInfo`](#ser_de_info) below. +* `skewedInfo` - (Optional) Configuration block with information about values that appear very frequently in a column (skewed values). See [`skewedInfo`](#skewed_info) below. +* `sortColumns` - (Optional) Configuration block for the sort order of each bucket in the table. See [`sortColumns`](#sort_columns) below. +* `storedAsSubDirectories` - (Optional) Whether the table data is stored in subdirectories. + +#### columns + +* `comment` - (Optional) Free-form text comment. +* `name` - (Required) Name of the Column. +* `parameters` - (Optional) Key-value pairs defining properties associated with the column. +* `type` - (Optional) Datatype of data in the Column. + +#### schema_reference + +* `schemaId` - (Optional) Configuration block that contains schema identity fields. Either this or the `schemaVersionId` has to be provided. See [`schemaId`](#schema_id) below. +* `schemaVersionId` - (Optional) Unique ID assigned to a version of the schema. Either this or the `schemaId` has to be provided. +* `schemaVersionNumber` - (Required) Version number of the schema. + +##### schema_id + +* `registryName` - (Optional) Name of the schema registry that contains the schema. Must be provided when `schemaName` is specified and conflicts with `schemaArn`. +* `schemaArn` - (Optional) ARN of the schema. One of `schemaArn` or `schemaName` has to be provided. +* `schemaName` - (Optional) Name of the schema. One of `schemaArn` or `schemaName` has to be provided. + +#### ser_de_info + +* `name` - (Optional) Name of the SerDe. +* `parameters` - (Optional) Map of initialization parameters for the SerDe, in key-value form. +* `serializationLibrary` - (Optional) Usually the class that implements the SerDe. An example is `orgApacheHadoopHiveSerde2ColumnarColumnarSerDe`. + +#### sort_columns + +* `column` - (Required) Name of the column. +* `sortOrder` - (Required) Whether the column is sorted in ascending (`1`) or descending order (`0`). + +#### skewed_info + +* `skewedColumnNames` - (Optional) List of names of columns that contain skewed values. +* `skewedColumnValueLocationMaps` - (Optional) List of values that appear so frequently as to be considered skewed. +* `skewedColumnValues` - (Optional) Map of skewed values to the columns that contain them. + +### target_table + +* `catalogId` - (Required) ID of the Data Catalog in which the table resides. +* `databaseName` - (Required) Name of the catalog database that contains the target table. +* `name` - (Required) Name of the target table. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the Glue Table. +* `id` - Catalog ID, Database name and of the name table. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Tables using the catalog ID (usually AWS account ID), database name, and table name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Tables using the catalog ID (usually AWS account ID), database name, and table name. For example: + +```console +% terraform import aws_glue_catalog_table.MyTable 123456789012:MyDatabase:MyTable +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_classifier.html.markdown b/website/docs/cdktf/typescript/r/glue_classifier.html.markdown new file mode 100644 index 00000000000..144214c99a8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_classifier.html.markdown @@ -0,0 +1,177 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_classifier" +description: |- + Provides an Glue Classifier resource. +--- + + + +# Resource: aws_glue_classifier + +Provides a Glue Classifier resource. + +~> **NOTE:** It is only valid to create one type of classifier (csv, grok, JSON, or XML). Changing classifier types will recreate the classifier. + +## Example Usage + +### Csv Classifier + +```terraform +resource "aws_glue_classifier" "example" { + name = "example" + + csv_classifier { + allow_single_column = false + contains_header = "PRESENT" + delimiter = "," + disable_value_trimming = false + header = ["example1", "example2"] + quote_symbol = "'" + } +} +``` + +### Grok Classifier + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueClassifier } from "./.gen/providers/aws/glue-classifier"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueClassifier(this, "example", { + grokClassifier: { + classification: "example", + grokPattern: "example", + }, + name: "example", + }); + } +} + +``` + +### JSON Classifier + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueClassifier } from "./.gen/providers/aws/glue-classifier"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueClassifier(this, "example", { + jsonClassifier: { + jsonPath: "example", + }, + name: "example", + }); + } +} + +``` + +### XML Classifier + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueClassifier } from "./.gen/providers/aws/glue-classifier"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueClassifier(this, "example", { + name: "example", + xmlClassifier: { + classification: "example", + rowTag: "example", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `csvClassifier` - (Optional) A classifier for Csv content. Defined below. +* `grokClassifier` – (Optional) A classifier that uses grok patterns. Defined below. +* `jsonClassifier` – (Optional) A classifier for JSON content. Defined below. +* `name` – (Required) The name of the classifier. +* `xmlClassifier` – (Optional) A classifier for XML content. Defined below. + +### csv_classifier + +* `allowSingleColumn` - (Optional) Enables the processing of files that contain only one column. +* `containsHeader` - (Optional) Indicates whether the CSV file contains a header. This can be one of "ABSENT", "PRESENT", or "UNKNOWN". +* `customDatatypeConfigured` - (Optional) A custom symbol to denote what combines content into a single column value. It must be different from the column delimiter. +* `customDatatypes` - (Optional) A list of supported custom datatypes. Valid values are `binary`, `boolean`, `date`, `decimal`, `double`, `float`, `int`, `long`, `short`, `string`, `timestamp`. +* `delimiter` - (Optional) The delimiter used in the Csv to separate columns. +* `disableValueTrimming` - (Optional) Specifies whether to trim column values. +* `header` - (Optional) A list of strings representing column names. +* `quoteSymbol` - (Optional) A custom symbol to denote what combines content into a single column value. It must be different from the column delimiter. + +### grok_classifier + +* `classification` - (Required) An identifier of the data format that the classifier matches, such as Twitter, JSON, Omniture logs, Amazon CloudWatch Logs, and so on. +* `customPatterns` - (Optional) Custom grok patterns used by this classifier. +* `grokPattern` - (Required) The grok pattern used by this classifier. + +### json_classifier + +* `jsonPath` - (Required) A `jsonPath` string defining the JSON data for the classifier to classify. AWS Glue supports a subset of `jsonPath`, as described in [Writing JsonPath Custom Classifiers](https://docs.aws.amazon.com/glue/latest/dg/custom-classifier.html#custom-classifier-json). + +### xml_classifier + +* `classification` - (Required) An identifier of the data format that the classifier matches. +* `rowTag` - (Required) The XML tag designating the element that contains each record in an XML document being parsed. Note that this cannot identify a self-closing element (closed by `/>`). An empty row element that contains only attributes can be parsed as long as it ends with a closing tag (for example, `` is okay, but `` is not). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the classifier + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Classifiers using their name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Classifiers using their name. For example: + +```console +% terraform import aws_glue_classifier.MyClassifier MyClassifier +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_connection.html.markdown b/website/docs/cdktf/typescript/r/glue_connection.html.markdown new file mode 100644 index 00000000000..df9f62d7399 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_connection.html.markdown @@ -0,0 +1,162 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_connection" +description: |- + Provides an Glue Connection resource. +--- + + + +# Resource: aws_glue_connection + +Provides a Glue Connection resource. + +## Example Usage + +### Non-VPC Connection + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueConnection } from "./.gen/providers/aws/glue-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueConnection(this, "example", { + connectionProperties: { + JDBC_CONNECTION_URL: "jdbc:mysql://example.com/exampledatabase", + PASSWORD: "examplepassword", + USERNAME: "exampleusername", + }, + name: "example", + }); + } +} + +``` + +### Non-VPC Connection with secret manager reference + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSecretmanagerSecret } from "./.gen/providers/aws/"; +import { GlueConnection } from "./.gen/providers/aws/glue-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsSecretmanagerSecret(this, "example", { + name: "example-secret", + }); + const awsGlueConnectionExample = new GlueConnection(this, "example_1", { + connectionProperties: { + JDBC_CONNECTION_URL: "jdbc:mysql://example.com/exampledatabase", + SECRET_ID: Token.asString(example.name), + }, + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlueConnectionExample.overrideLogicalId("example"); + } +} + +``` + +### VPC Connection + +For more information, see the [AWS Documentation](https://docs.aws.amazon.com/glue/latest/dg/populate-add-connection.html#connection-JDBC-VPC). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueConnection } from "./.gen/providers/aws/glue-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueConnection(this, "example", { + connectionProperties: { + JDBC_CONNECTION_URL: + "jdbc:mysql://${" + + awsRdsClusterExample.endpoint + + "}/exampledatabase", + PASSWORD: "examplepassword", + USERNAME: "exampleusername", + }, + name: "example", + physicalConnectionRequirements: { + availabilityZone: Token.asString(awsSubnetExample.availabilityZone), + securityGroupIdList: [Token.asString(awsSecurityGroupExample.id)], + subnetId: Token.asString(awsSubnetExample.id), + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `catalogId` – (Optional) The ID of the Data Catalog in which to create the connection. If none is supplied, the AWS account ID is used by default. +* `connectionProperties` – (Optional) A map of key-value pairs used as parameters for this connection. +* `connectionType` – (Optional) The type of the connection. Supported are: `custom`, `jdbc`, `kafka`, `marketplace`, `mongodb`, and `network`. Defaults to `jbdc`. +* `description` – (Optional) Description of the connection. +* `matchCriteria` – (Optional) A list of criteria that can be used in selecting this connection. +* `name` – (Required) The name of the connection. +* `physicalConnectionRequirements` - (Optional) A map of physical connection requirements, such as VPC and SecurityGroup. Defined below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### physical_connection_requirements + +* `availabilityZone` - (Optional) The availability zone of the connection. This field is redundant and implied by `subnetId`, but is currently an api requirement. +* `securityGroupIdList` - (Optional) The security group ID list used by the connection. +* `subnetId` - (Optional) The subnet ID used by the connection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Catalog ID and name of the connection +* `arn` - The ARN of the Glue Connection. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Connections using the `catalogId` (AWS account ID if not custom) and `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Connections using the `catalogId` (AWS account ID if not custom) and `name`. For example: + +```console +% terraform import aws_glue_connection.MyConnection 123456789012:MyConnection +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_crawler.html.markdown b/website/docs/cdktf/typescript/r/glue_crawler.html.markdown new file mode 100644 index 00000000000..75c87b6d96a --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_crawler.html.markdown @@ -0,0 +1,343 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_crawler" +description: |- + Manages a Glue Crawler +--- + + + +# Resource: aws_glue_crawler + +Manages a Glue Crawler. More information can be found in the [AWS Glue Developer Guide](https://docs.aws.amazon.com/glue/latest/dg/add-crawler.html) + +## Example Usage + +### DynamoDB Target Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCrawler } from "./.gen/providers/aws/glue-crawler"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueCrawler(this, "example", { + databaseName: Token.asString(awsGlueCatalogDatabaseExample.name), + dynamodbTarget: [ + { + path: "table-name", + }, + ], + name: "example", + role: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +### JDBC Target Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCrawler } from "./.gen/providers/aws/glue-crawler"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueCrawler(this, "example", { + databaseName: Token.asString(awsGlueCatalogDatabaseExample.name), + jdbcTarget: [ + { + connectionName: Token.asString(awsGlueConnectionExample.name), + path: "database-name/%", + }, + ], + name: "example", + role: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +### S3 Target Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCrawler } from "./.gen/providers/aws/glue-crawler"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueCrawler(this, "example", { + databaseName: Token.asString(awsGlueCatalogDatabaseExample.name), + name: "example", + role: Token.asString(awsIamRoleExample.arn), + s3Target: [ + { + path: "s3://${" + awsS3BucketExample.bucket + "}", + }, + ], + }); + } +} + +``` + +### Catalog Target Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCrawler } from "./.gen/providers/aws/glue-crawler"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueCrawler(this, "example", { + catalogTarget: [ + { + databaseName: Token.asString(awsGlueCatalogDatabaseExample.name), + tables: [Token.asString(awsGlueCatalogTableExample.name)], + }, + ], + configuration: + '{\n "Version":1.0,\n "Grouping": {\n "TableGroupingPolicy": "CombineCompatibleSchemas"\n }\n}\n\n', + databaseName: Token.asString(awsGlueCatalogDatabaseExample.name), + name: "example", + role: Token.asString(awsIamRoleExample.arn), + schemaChangePolicy: { + deleteBehavior: "LOG", + }, + }); + } +} + +``` + +### MongoDB Target Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCrawler } from "./.gen/providers/aws/glue-crawler"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueCrawler(this, "example", { + databaseName: Token.asString(awsGlueCatalogDatabaseExample.name), + mongodbTarget: [ + { + connectionName: Token.asString(awsGlueConnectionExample.name), + path: "database-name/%", + }, + ], + name: "example", + role: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +### Configuration Settings Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCrawler } from "./.gen/providers/aws/glue-crawler"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueCrawler(this, "events_crawler", { + configuration: Token.asString( + Fn.jsonencode({ + CrawlerOutput: { + Partitions: { + AddOrUpdateBehavior: "InheritFromTable", + }, + }, + Grouping: { + TableGroupingPolicy: "CombineCompatibleSchemas", + }, + Version: 1, + }) + ), + databaseName: glueDatabase.name, + name: "events_crawler_${" + environmentName.value + "}", + role: glueRole.arn, + s3Target: [ + { + path: "s3://${" + dataLakeBucket.bucket + "}", + }, + ], + schedule: "cron(0 1 * * ? *)", + tags: Token.asStringMap(tags.value), + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** Must specify at least one of `dynamodbTarget`, `jdbcTarget`, `s3Target`, `mongodbTarget` or `catalogTarget`. + +This argument supports the following arguments: + +* `databaseName` (Required) Glue database where results are written. +* `name` (Required) Name of the crawler. +* `role` (Required) The IAM role friendly name (including path without leading slash), or ARN of an IAM role, used by the crawler to access other resources. +* `classifiers` (Optional) List of custom classifiers. By default, all AWS classifiers are included in a crawl, but these custom classifiers always override the default classifiers for a given classification. +* `configuration` (Optional) JSON string of configuration information. For more details see [Setting Crawler Configuration Options](https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html). +* `description` (Optional) Description of the crawler. +* `deltaTarget` (Optional) List of nested Delta Lake target arguments. See [Delta Target](#delta-target) below. +* `dynamodbTarget` (Optional) List of nested DynamoDB target arguments. See [Dynamodb Target](#dynamodb-target) below. +* `jdbcTarget` (Optional) List of nested JBDC target arguments. See [JDBC Target](#jdbc-target) below. +* `s3Target` (Optional) List nested Amazon S3 target arguments. See [S3 Target](#s3-target) below. +* `mongodbTarget` (Optional) List nested MongoDB target arguments. See [MongoDB Target](#mongodb-target) below. +* `icebergTarget` (Optional) List nested Iceberg target arguments. See [Iceberg Target](#iceberg-target) below. +* `schedule` (Optional) A cron expression used to specify the schedule. For more information, see [Time-Based Schedules for Jobs and Crawlers](https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html). For example, to run something every day at 12:15 UTC, you would specify: `cron(15 12 * * ? *)`. +* `schemaChangePolicy` (Optional) Policy for the crawler's update and deletion behavior. See [Schema Change Policy](#schema-change-policy) below. +* `lakeFormationConfiguration` (Optional) Specifies Lake Formation configuration settings for the crawler. See [Lake Formation Configuration](#lake-formation-configuration) below. +* `lineageConfiguration` (Optional) Specifies data lineage configuration settings for the crawler. See [Lineage Configuration](#lineage-configuration) below. +* `recrawlPolicy` (Optional) A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since the last crawler run.. See [Recrawl Policy](#recrawl-policy) below. +* `securityConfiguration` (Optional) The name of Security Configuration to be used by the crawler +* `tablePrefix` (Optional) The table prefix used for catalog tables that are created. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Dynamodb Target + +* `path` - (Required) The name of the DynamoDB table to crawl. +* `scanAll` - (Optional) Indicates whether to scan all the records, or to sample rows from the table. Scanning all the records can take a long time when the table is not a high throughput table. defaults to `true`. +* `scanRate` - (Optional) The percentage of the configured read capacity units to use by the AWS Glue crawler. The valid values are null or a value between 0.1 to 1.5. + +### JDBC Target + +* `connectionName` - (Required) The name of the connection to use to connect to the JDBC target. +* `path` - (Required) The path of the JDBC target. +* `exclusions` - (Optional) A list of glob patterns used to exclude from the crawl. +* `enableAdditionalMetadata` - (Optional) Specify a value of `rawtypes` or `comments` to enable additional metadata intable responses. `rawtypes` provides the native-level datatype. `comments` provides comments associated with a column or table in the database. + +### S3 Target + +* `path` - (Required) The path to the Amazon S3 target. +* `connectionName` - (Optional) The name of a connection which allows crawler to access data in S3 within a VPC. +* `exclusions` - (Optional) A list of glob patterns used to exclude from the crawl. +* `sampleSize` - (Optional) Sets the number of files in each leaf folder to be crawled when crawling sample files in a dataset. If not set, all the files are crawled. A valid value is an integer between 1 and 249. +* `eventQueueArn` - (Optional) The ARN of the SQS queue to receive S3 notifications from. +* `dlqEventQueueArn` - (Optional) The ARN of the dead-letter SQS queue. + +### Catalog Target + +* `connectionName` - (Optional) The name of the connection for an Amazon S3-backed Data Catalog table to be a target of the crawl when using a Catalog connection type paired with a `network` Connection type. +* `databaseName` - (Required) The name of the Glue database to be synchronized. +* `tables` - (Required) A list of catalog tables to be synchronized. +* `eventQueueArn` - (Optional) A valid Amazon SQS ARN. +* `dlqEventQueueArn` - (Optional) A valid Amazon SQS ARN. + +~> **Note:** `deletionBehavior` of catalog target doesn't support `deprecateInDatabase`. + +-> **Note:** `configuration` for catalog target crawlers will have `{ ... "Grouping": { "TableGroupingPolicy": "CombineCompatibleSchemas"} }` by default. + +### MongoDB Target + +* `connectionName` - (Required) The name of the connection to use to connect to the Amazon DocumentDB or MongoDB target. +* `path` - (Required) The path of the Amazon DocumentDB or MongoDB target (database/collection). +* `scanAll` - (Optional) Indicates whether to scan all the records, or to sample rows from the table. Scanning all the records can take a long time when the table is not a high throughput table. Default value is `true`. + +### Iceberg Target + +* `connectionName` - (Optional) The name of the connection to use to connect to the Iceberg target. +* `paths` - (Required) One or more Amazon S3 paths that contains Iceberg metadata folders as s3://bucket/prefix. +* `exclusions` - (Optional) A list of glob patterns used to exclude from the crawl. +* `maximumTraversalDepth` - (Required) The maximum depth of Amazon S3 paths that the crawler can traverse to discover the Iceberg metadata folder in your Amazon S3 path. Used to limit the crawler run time. Valid values are between `1` and `20`. + +### Delta Target + +* `connectionName` - (Optional) The name of the connection to use to connect to the Delta table target. +* `createNativeDeltaTable` (Optional) Specifies whether the crawler will create native tables, to allow integration with query engines that support querying of the Delta transaction log directly. +* `deltaTables` - (Required) A list of the Amazon S3 paths to the Delta tables. +* `writeManifest` - (Required) Specifies whether to write the manifest files to the Delta table path. + +### Schema Change Policy + +* `deleteBehavior` - (Optional) The deletion behavior when the crawler finds a deleted object. Valid values: `log`, `deleteFromDatabase`, or `deprecateInDatabase`. Defaults to `deprecateInDatabase`. +* `updateBehavior` - (Optional) The update behavior when the crawler finds a changed schema. Valid values: `log` or `updateInDatabase`. Defaults to `updateInDatabase`. + +### Lake Formation Configuration + +* `accountId` - (Optional) Required for cross account crawls. For same account crawls as the target data, this can omitted. +* `useLakeFormationCredentials` - (Optional) Specifies whether to use Lake Formation credentials for the crawler instead of the IAM role credentials. + +### Lineage Configuration + +* `crawlerLineageSettings` - (Optional) Specifies whether data lineage is enabled for the crawler. Valid values are: `enable` and `disable`. Default value is `disable`. + +### Recrawl Policy + +* `recrawlBehavior` - (Optional) Specifies whether to crawl the entire dataset again, crawl only folders that were added since the last crawler run, or crawl what S3 notifies the crawler of via SQS. Valid Values are: `crawlEventMode`, `crawlEverything` and `crawlNewFoldersOnly`. Default value is `crawlEverything`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Crawler name +* `arn` - The ARN of the crawler +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Crawlers using `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Crawlers using `name`. For example: + +```console +% terraform import aws_glue_crawler.MyJob MyJob +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_data_catalog_encryption_settings.html.markdown b/website/docs/cdktf/typescript/r/glue_data_catalog_encryption_settings.html.markdown new file mode 100644 index 00000000000..8618608b1a0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_data_catalog_encryption_settings.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_data_catalog_encryption_settings" +description: |- + Provides a Glue Data Catalog Encryption Settings resource. +--- + + + +# Resource: aws_glue_data_catalog_encryption_settings + +Provides a Glue Data Catalog Encryption Settings resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueDataCatalogEncryptionSettings } from "./.gen/providers/aws/glue-data-catalog-encryption-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueDataCatalogEncryptionSettings(this, "example", { + dataCatalogEncryptionSettings: { + connectionPasswordEncryption: { + awsKmsKeyId: test.arn, + returnConnectionPasswordEncrypted: true, + }, + encryptionAtRest: { + catalogEncryptionMode: "SSE-KMS", + sseAwsKmsKeyId: test.arn, + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dataCatalogEncryptionSettings` – (Required) The security configuration to set. see [Data Catalog Encryption Settings](#data_catalog_encryption_settings). +* `catalogId` – (Optional) The ID of the Data Catalog to set the security configuration for. If none is provided, the AWS account ID is used by default. + +### data_catalog_encryption_settings + +* `connectionPasswordEncryption` - (Required) When connection password protection is enabled, the Data Catalog uses a customer-provided key to encrypt the password as part of CreateConnection or UpdateConnection and store it in the ENCRYPTED_PASSWORD field in the connection properties. You can enable catalog encryption or only password encryption. see [Connection Password Encryption](#connection_password_encryption). +* `encryptionAtRest` - (Required) Specifies the encryption-at-rest configuration for the Data Catalog. see [Encryption At Rest](#encryption_at_rest). + +### connection_password_encryption + +* `returnConnectionPasswordEncrypted` - (Required) When set to `true`, passwords remain encrypted in the responses of GetConnection and GetConnections. This encryption takes effect independently of the catalog encryption. +* `awsKmsKeyId` - (Optional) A KMS key ARN that is used to encrypt the connection password. If connection password protection is enabled, the caller of CreateConnection and UpdateConnection needs at least `kms:encrypt` permission on the specified AWS KMS key, to encrypt passwords before storing them in the Data Catalog. + +### encryption_at_rest + +* `catalogEncryptionMode` - (Required) The encryption-at-rest mode for encrypting Data Catalog data. Valid values are `disabled` and `sseKms`. +* `sseAwsKmsKeyId` - (Optional) The ARN of the AWS KMS key to use for encryption at rest. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Data Catalog to set the security configuration for. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Data Catalog Encryption Settings using `catalogId` (AWS account ID if not custom). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Data Catalog Encryption Settings using `catalogId` (AWS account ID if not custom). For example: + +```console +% terraform import aws_glue_data_catalog_encryption_settings.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_data_quality_ruleset.html.markdown b/website/docs/cdktf/typescript/r/glue_data_quality_ruleset.html.markdown new file mode 100644 index 00000000000..e5083f76c8d --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_data_quality_ruleset.html.markdown @@ -0,0 +1,165 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_data_quality_ruleset" +description: |- + Provides a Glue Data Quality Ruleset. +--- + + + +# Resource: aws_glue_data_quality_ruleset + +Provides a Glue Data Quality Ruleset Resource. You can refer to the [Glue Developer Guide](https://docs.aws.amazon.com/glue/latest/dg/glue-data-quality.html) for a full explanation of the Glue Data Quality Ruleset functionality + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueDataQualityRuleset } from "./.gen/providers/aws/glue-data-quality-ruleset"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueDataQualityRuleset(this, "example", { + name: "example", + ruleset: 'Rules = [Completeness \\"colA\\" between 0.4 and 0.8]', + }); + } +} + +``` + +### With description + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueDataQualityRuleset } from "./.gen/providers/aws/glue-data-quality-ruleset"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueDataQualityRuleset(this, "example", { + description: "example", + name: "example", + ruleset: 'Rules = [Completeness \\"colA\\" between 0.4 and 0.8]', + }); + } +} + +``` + +### With tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueDataQualityRuleset } from "./.gen/providers/aws/glue-data-quality-ruleset"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueDataQualityRuleset(this, "example", { + name: "example", + ruleset: 'Rules = [Completeness \\"colA\\" between 0.4 and 0.8]', + tags: { + hello: "world", + }, + }); + } +} + +``` + +### With target_table + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueDataQualityRuleset } from "./.gen/providers/aws/glue-data-quality-ruleset"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueDataQualityRuleset(this, "example", { + name: "example", + ruleset: 'Rules = [Completeness \\"colA\\" between 0.4 and 0.8]', + targetTable: { + databaseName: Token.asString(awsGlueCatalogDatabaseExample.name), + tableName: Token.asString(awsGlueCatalogTableExample.name), + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the data quality ruleset. +* `name` - (Required, Forces new resource) Name of the data quality ruleset. +* `ruleset` - (Optional) A Data Quality Definition Language (DQDL) ruleset. For more information, see the AWS Glue developer guide. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `targetTable` - (Optional, Forces new resource) A Configuration block specifying a target table associated with the data quality ruleset. See [`targetTable`](#target_table) below. + +### target_table + +* `catalogId` - (Optional, Forces new resource) The catalog id where the AWS Glue table exists. +* `databaseName` - (Required, Forces new resource) Name of the database where the AWS Glue table exists. +* `tableName` - (Required, Forces new resource) Name of the AWS Glue table. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Glue Data Quality Ruleset. +* `createdOn` - The time and date that this data quality ruleset was created. +* `lastModifiedOn` - The time and date that this data quality ruleset was created. +* `recommendationRunId` - When a ruleset was created from a recommendation run, this run ID is generated to link the two together. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Data Quality Ruleset using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Data Quality Ruleset using the `name`. For example: + +```console +% terraform import aws_glue_data_quality_ruleset.example exampleName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_dev_endpoint.markdown b/website/docs/cdktf/typescript/r/glue_dev_endpoint.markdown new file mode 100644 index 00000000000..5b438349c47 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_dev_endpoint.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_dev_endpoint" +description: |- + Provides a Glue Development Endpoint resource. +--- + + + +# Resource: aws_glue_dev_endpoint + +Provides a Glue Development Endpoint resource. + +## Example Usage + +Basic usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { GlueDevEndpoint } from "./.gen/providers/aws/glue-dev-endpoint"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsIamPolicyDocument(this, "example", { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["glue.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const awsIamRoleExample = new IamRole(this, "example_1", { + assumeRolePolicy: Token.asString(example.json), + name: "AWSGlueServiceRole-foo", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + new IamRolePolicyAttachment(this, "example-AWSGlueServiceRole", { + policyArn: "arn:aws:iam::aws:policy/service-role/AWSGlueServiceRole", + role: Token.asString(awsIamRoleExample.name), + }); + const awsGlueDevEndpointExample = new GlueDevEndpoint(this, "example_3", { + name: "foo", + roleArn: Token.asString(awsIamRoleExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlueDevEndpointExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `arguments` - (Optional) A map of arguments used to configure the endpoint. +* `extraJarsS3Path` - (Optional) Path to one or more Java Jars in an S3 bucket that should be loaded in this endpoint. +* `extraPythonLibsS3Path` - (Optional) Path(s) to one or more Python libraries in an S3 bucket that should be loaded in this endpoint. Multiple values must be complete paths separated by a comma. +* `glueVersion` - (Optional) - Specifies the versions of Python and Apache Spark to use. Defaults to AWS Glue version 0.9. +* `name` - (Required) The name of this endpoint. It must be unique in your account. +* `numberOfNodes` - (Optional) The number of AWS Glue Data Processing Units (DPUs) to allocate to this endpoint. Conflicts with `workerType`. +* `numberOfWorkers` - (Optional) The number of workers of a defined worker type that are allocated to this endpoint. This field is available only when you choose worker type G.1X or G.2X. +* `publicKey` - (Optional) The public key to be used by this endpoint for authentication. +* `publicKeys` - (Optional) A list of public keys to be used by this endpoint for authentication. +* `roleArn` - (Required) The IAM role for this endpoint. +* `securityConfiguration` - (Optional) The name of the Security Configuration structure to be used with this endpoint. +* `securityGroupIds` - (Optional) Security group IDs for the security groups to be used by this endpoint. +* `subnetId` - (Optional) The subnet ID for the new endpoint to use. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `workerType` - (Optional) The type of predefined worker that is allocated to this endpoint. Accepts a value of Standard, G.1X, or G.2X. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the endpoint. +* `name` - The name of the new endpoint. +* `privateAddress` - A private IP address to access the endpoint within a VPC, if this endpoint is created within one. +* `publicAddress` - The public IP address used by this endpoint. The PublicAddress field is present only when you create a non-VPC endpoint. +* `yarnEndpointAddress` - The YARN endpoint address used by this endpoint. +* `zeppelinRemoteSparkInterpreterPort` - The Apache Zeppelin port for the remote Apache Spark interpreter. +* `availabilityZone` - The AWS availability zone where this endpoint is located. +* `vpcId` - he ID of the VPC used by this endpoint. +* `status` - The current status of this endpoint. +* `failureReason` - The reason for a current failure in this endpoint. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a Glue Development Endpoint using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a Glue Development Endpoint using the `name`. For example: + +```console +% terraform import aws_glue_dev_endpoint.example foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_job.html.markdown b/website/docs/cdktf/typescript/r/glue_job.html.markdown new file mode 100644 index 00000000000..d13dbe16412 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_job.html.markdown @@ -0,0 +1,249 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_job" +description: |- + Provides an Glue Job resource. +--- + + + +# Resource: aws_glue_job + +Provides a Glue Job resource. + +-> Glue functionality, such as monitoring and logging of jobs, is typically managed with the `defaultArguments` argument. See the [Special Parameters Used by AWS Glue](https://docs.aws.amazon.com/glue/latest/dg/aws-glue-programming-etl-glue-arguments.html) topic in the Glue developer guide for additional information. + +## Example Usage + +### Python Job + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueJob } from "./.gen/providers/aws/glue-job"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueJob(this, "example", { + command: { + scriptLocation: "s3://${" + awsS3BucketExample.bucket + "}/example.py", + }, + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +### Ray Job + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueJob } from "./.gen/providers/aws/glue-job"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueJob(this, "example", { + command: { + name: "glueray", + pythonVersion: "3.9", + runtime: "Ray2.4", + scriptLocation: "s3://${" + awsS3BucketExample.bucket + "}/example.py", + }, + glueVersion: "4.0", + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + workerType: "Z.2X", + }); + } +} + +``` + +### Scala Job + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueJob } from "./.gen/providers/aws/glue-job"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueJob(this, "example", { + command: { + scriptLocation: + "s3://${" + awsS3BucketExample.bucket + "}/example.scala", + }, + defaultArguments: { + "--job-language": "scala", + }, + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +### Streaming Job + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueJob } from "./.gen/providers/aws/glue-job"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueJob(this, "example", { + command: { + name: "gluestreaming", + scriptLocation: + "s3://${" + awsS3BucketExample.bucket + "}/example.script", + }, + name: "example streaming job", + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +### Enabling CloudWatch Logs and Metrics + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { GlueJob } from "./.gen/providers/aws/glue-job"; +interface MyConfig { + command: any; + name: any; + roleArn: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: "example", + retentionInDays: 14, + }); + const awsGlueJobExample = new GlueJob(this, "example_1", { + defaultArguments: { + "--continuous-log-logGroup": example.name, + "--enable-continuous-cloudwatch-log": "true", + "--enable-continuous-log-filter": "true", + "--enable-metrics": "", + }, + command: config.command, + name: config.name, + roleArn: config.roleArn, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlueJobExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `command` – (Required) The command of the job. Defined below. +* `connections` – (Optional) The list of connections used for this job. +* `defaultArguments` – (Optional) The map of default arguments for this job. You can specify arguments here that your own job-execution script consumes, as well as arguments that AWS Glue itself consumes. For information about how to specify and consume your own Job arguments, see the [Calling AWS Glue APIs in Python](http://docs.aws.amazon.com/glue/latest/dg/aws-glue-programming-python-calling.html) topic in the developer guide. For information about the key-value pairs that AWS Glue consumes to set up your job, see the [Special Parameters Used by AWS Glue](http://docs.aws.amazon.com/glue/latest/dg/aws-glue-programming-python-glue-arguments.html) topic in the developer guide. +* `nonOverridableArguments` – (Optional) Non-overridable arguments for this job, specified as name-value pairs. +* `description` – (Optional) Description of the job. +* `executionProperty` – (Optional) Execution property of the job. Defined below. +* `glueVersion` - (Optional) The version of glue to use, for example "1.0". Ray jobs should set this to 4.0 or greater. For information about available versions, see the [AWS Glue Release Notes](https://docs.aws.amazon.com/glue/latest/dg/release-notes.html). +* `executionClass` - (Optional) Indicates whether the job is run with a standard or flexible execution class. The standard execution class is ideal for time-sensitive workloads that require fast job startup and dedicated resources. Valid value: `flex`, `standard`. +* `maxCapacity` – (Optional) The maximum number of AWS Glue data processing units (DPUs) that can be allocated when this job runs. `required` when `pythonshell` is set, accept either `00625` or `10`. Use `numberOfWorkers` and `workerType` arguments instead with `glueVersion` `20` and above. +* `maxRetries` – (Optional) The maximum number of times to retry this job if it fails. +* `name` – (Required) The name you assign to this job. It must be unique in your account. +* `notificationProperty` - (Optional) Notification property of the job. Defined below. +* `roleArn` – (Required) The ARN of the IAM role associated with this job. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `timeout` – (Optional) The job timeout in minutes. The default is 2880 minutes (48 hours) for `glueetl` and `pythonshell` jobs, and null (unlimited) for `gluestreaming` jobs. +* `securityConfiguration` - (Optional) The name of the Security Configuration to be associated with the job. +* `workerType` - (Optional) The type of predefined worker that is allocated when a job runs. Accepts a value of Standard, G.1X, G.2X, or G.025X for Spark jobs. Accepts the value Z.2X for Ray jobs. + * For the Standard worker type, each worker provides 4 vCPU, 16 GB of memory and a 50GB disk, and 2 executors per worker. + * For the G.1X worker type, each worker maps to 1 DPU (4 vCPU, 16 GB of memory, 64 GB disk), and provides 1 executor per worker. Recommended for memory-intensive jobs. + * For the G.2X worker type, each worker maps to 2 DPU (8 vCPU, 32 GB of memory, 128 GB disk), and provides 1 executor per worker. Recommended for memory-intensive jobs. + * For the G.025X worker type, each worker maps to 0.25 DPU (2 vCPU, 4GB of memory, 64 GB disk), and provides 1 executor per worker. Recommended for low volume streaming jobs. Only available for Glue version 3.0. + * For the Z.2X worker type, each worker maps to 2 M-DPU (8vCPU, 64 GB of m emory, 128 GB disk), and provides up to 8 Ray workers based on the autoscaler. +* `numberOfWorkers` - (Optional) The number of workers of a defined workerType that are allocated when a job runs. + +### command Argument Reference + +* `name` - (Optional) The name of the job command. Defaults to `glueetl`. Use `pythonshell` for Python Shell Job Type, `glueray` for Ray Job Type, or `gluestreaming` for Streaming Job Type. `maxCapacity` needs to be set if `pythonshell` is chosen. +* `scriptLocation` - (Required) Specifies the S3 path to a script that executes a job. +* `pythonVersion` - (Optional) The Python version being used to execute a Python shell job. Allowed values are 2, 3 or 3.9. Version 3 refers to Python 3.6. +* `runtime` - (Optional) In Ray jobs, runtime is used to specify the versions of Ray, Python and additional libraries available in your environment. This field is not used in other job types. For supported runtime environment values, see [Working with Ray jobs](https://docs.aws.amazon.com/glue/latest/dg/ray-jobs-section.html#author-job-ray-runtimes) in the Glue Developer Guide. + +### execution_property Argument Reference + +* `maxConcurrentRuns` - (Optional) The maximum number of concurrent runs allowed for a job. The default is 1. + +### notification_property Argument Reference + +* `notifyDelayAfter` - (Optional) After a job run starts, the number of minutes to wait before sending a job run delay notification. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Glue Job +* `id` - Job name +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Jobs using `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Jobs using `name`. For example: + +```console +% terraform import aws_glue_job.MyJob MyJob +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_ml_transform.html.markdown b/website/docs/cdktf/typescript/r/glue_ml_transform.html.markdown new file mode 100644 index 00000000000..afabd06a284 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_ml_transform.html.markdown @@ -0,0 +1,202 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_ml_transform" +description: |- + Provides a Glue ML Transform resource. +--- + + + +# Resource: aws_glue_ml_transform + +Provides a Glue ML Transform resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCatalogDatabase } from "./.gen/providers/aws/glue-catalog-database"; +import { GlueCatalogTable } from "./.gen/providers/aws/glue-catalog-table"; +import { GlueMlTransform } from "./.gen/providers/aws/glue-ml-transform"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new GlueCatalogDatabase(this, "test", { + name: "example", + }); + const awsGlueCatalogTableTest = new GlueCatalogTable(this, "test_1", { + databaseName: test.name, + name: "example", + owner: "my_owner", + parameters: { + param1: "param1_val", + }, + partitionKeys: [ + { + comment: "my_column_1_comment", + name: "my_column_1", + type: "int", + }, + { + comment: "my_column_2_comment", + name: "my_column_2", + type: "string", + }, + ], + retention: 1, + storageDescriptor: { + bucketColumns: ["bucket_column_1"], + columns: [ + { + comment: "my_column1_comment", + name: "my_column_1", + type: "int", + }, + { + comment: "my_column2_comment", + name: "my_column_2", + type: "string", + }, + ], + compressed: false, + inputFormat: "SequenceFileInputFormat", + location: "my_location", + numberOfBuckets: 1, + outputFormat: "SequenceFileInputFormat", + parameters: { + param1: "param1_val", + }, + serDeInfo: { + name: "ser_de_name", + parameters: { + param1: "param_val_1", + }, + serializationLibrary: + "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe", + }, + skewedInfo: { + skewedColumnNames: ["my_column_1"], + skewedColumnValueLocationMaps: { + my_column_1: "my_column_1_val_loc_map", + }, + skewedColumnValues: ["skewed_val_1"], + }, + sortColumns: [ + { + column: "my_column_1", + sortOrder: 1, + }, + ], + storedAsSubDirectories: false, + }, + tableType: "VIRTUAL_VIEW", + viewExpandedText: "view_expanded_text_1", + viewOriginalText: "view_original_text_1", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlueCatalogTableTest.overrideLogicalId("test"); + const awsGlueMlTransformTest = new GlueMlTransform(this, "test_2", { + dependsOn: [awsIamRolePolicyAttachmentTest], + inputRecordTables: [ + { + databaseName: Token.asString(awsGlueCatalogTableTest.databaseName), + tableName: Token.asString(awsGlueCatalogTableTest.name), + }, + ], + name: "example", + parameters: { + findMatchesParameters: { + primaryKeyColumnName: "my_column_1", + }, + transformType: "FIND_MATCHES", + }, + roleArn: Token.asString(awsIamRoleTest.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlueMlTransformTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` – (Required) The name you assign to this ML Transform. It must be unique in your account. +* `inputRecordTables` - (Required) A list of AWS Glue table definitions used by the transform. see [Input Record Tables](#input_record_tables). +* `parameters` - (Required) The algorithmic parameters that are specific to the transform type used. Conditionally dependent on the transform type. see [Parameters](#parameters). +* `roleArn` – (Required) The ARN of the IAM role associated with this ML Transform. +* `description` – (Optional) Description of the ML Transform. +* `glueVersion` - (Optional) The version of glue to use, for example "1.0". For information about available versions, see the [AWS Glue Release Notes](https://docs.aws.amazon.com/glue/latest/dg/release-notes.html). +* `maxCapacity` – (Optional) The number of AWS Glue data processing units (DPUs) that are allocated to task runs for this transform. You can allocate from `2` to `100` DPUs; the default is `10`. `maxCapacity` is a mutually exclusive option with `numberOfWorkers` and `workerType`. +* `maxRetries` – (Optional) The maximum number of times to retry this ML Transform if it fails. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `timeout` – (Optional) The ML Transform timeout in minutes. The default is 2880 minutes (48 hours). +* `workerType` - (Optional) The type of predefined worker that is allocated when an ML Transform runs. Accepts a value of `standard`, `g1X`, or `g2X`. Required with `numberOfWorkers`. +* `numberOfWorkers` - (Optional) The number of workers of a defined `workerType` that are allocated when an ML Transform runs. Required with `workerType`. + +### input_record_tables + +* `databaseName` - (Required) A database name in the AWS Glue Data Catalog. +* `tableName` - (Required) A table name in the AWS Glue Data Catalog. +* `catalogId` - (Optional) A unique identifier for the AWS Glue Data Catalog. +* `connectionName`- (Optional) The name of the connection to the AWS Glue Data Catalog. + +### parameters + +* `transformType` - (Required) The type of machine learning transform. For information about the types of machine learning transforms, see [Creating Machine Learning Transforms](http://docs.aws.amazon.com/glue/latest/dg/add-job-machine-learning-transform.html). +* `findMatchesParameters` - (Required) The parameters for the find matches algorithm. see [Find Matches Parameters](#find_matches_parameters). + +#### find_matches_parameters + +* `accuracyCostTradeOff` - (Optional) The value that is selected when tuning your transform for a balance between accuracy and cost. +* `enforceProvidedLabels` - (Optional) The value to switch on or off to force the output to match the provided labels from users. +* `precisionRecallTradeOff` - (Optional) The value selected when tuning your transform for a balance between precision and recall. +* `primaryKeyColumnName` - (Optional) The name of a column that uniquely identifies rows in the source table. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Glue ML Transform. +* `id` - Glue ML Transform ID. +* `labelCount` - The number of labels available for this transform. +* `schema` - The object that represents the schema that this transform accepts. see [Schema](#schema). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### schema + +* `name` - The name of the column. +* `dataType` - The type of data in the column. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue ML Transforms using `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue ML Transforms using `id`. For example: + +```console +% terraform import aws_glue_ml_transform.example tfm-c2cafbe83b1c575f49eaca9939220e2fcd58e2d5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_partition.html.markdown b/website/docs/cdktf/typescript/r/glue_partition.html.markdown new file mode 100644 index 00000000000..520b1377656 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_partition.html.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_partition" +description: |- + Provides a Glue Partition. +--- + + + +# Resource: aws_glue_partition + +Provides a Glue Partition Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GluePartition } from "./.gen/providers/aws/glue-partition"; +interface MyConfig { + partitionValues: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new GluePartition(this, "example", { + databaseName: "some-database", + tableName: "some-table", + values: ["some-value"], + partitionValues: config.partitionValues, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `databaseName` - (Required) Name of the metadata database where the table metadata resides. For Hive compatibility, this must be all lowercase. +* `partitionValues` - (Required) The values that define the partition. +* `catalogId` - (Optional) ID of the Glue Catalog and database to create the table in. If omitted, this defaults to the AWS Account ID plus the database name. +* `storageDescriptor` - (Optional) A [storage descriptor](#storage_descriptor) object containing information about the physical storage of this table. You can refer to the [Glue Developer Guide](https://docs.aws.amazon.com/glue/latest/dg/aws-glue-api-catalog-tables.html#aws-glue-api-catalog-tables-StorageDescriptor) for a full explanation of this object. +* `parameters` - (Optional) Properties associated with this table, as a list of key-value pairs. + +##### storage_descriptor + +* `columns` - (Optional) A list of the [Columns](#column) in the table. +* `location` - (Optional) The physical location of the table. By default this takes the form of the warehouse location, followed by the database location in the warehouse, followed by the table name. +* `inputFormat` - (Optional) The input format: SequenceFileInputFormat (binary), or TextInputFormat, or a custom format. +* `outputFormat` - (Optional) The output format: SequenceFileOutputFormat (binary), or IgnoreKeyTextOutputFormat, or a custom format. +* `compressed` - (Optional) True if the data in the table is compressed, or False if not. +* `numberOfBuckets` - (Optional) Must be specified if the table contains any dimension columns. +* `serDeInfo` - (Optional) [Serialization/deserialization (SerDe)](#ser_de_info) information. +* `bucketColumns` - (Optional) A list of reducer grouping columns, clustering columns, and bucketing columns in the table. +* `sortColumns` - (Optional) A list of [Order](#sort_columns) objects specifying the sort order of each bucket in the table. +* `parameters` - (Optional) User-supplied properties in key-value form. +* `skewedInfo` - (Optional) Information about values that appear very frequently in a column (skewed values). +* `storedAsSubDirectories` - (Optional) True if the table data is stored in subdirectories, or False if not. + +##### column + +* `name` - (Required) The name of the Column. +* `type` - (Optional) The datatype of data in the Column. +* `comment` - (Optional) Free-form text comment. + +##### ser_de_info + +* `name` - (Optional) Name of the SerDe. +* `parameters` - (Optional) A map of initialization parameters for the SerDe, in key-value form. +* `serializationLibrary` - (Optional) Usually the class that implements the SerDe. An example is: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe. + +##### sort_columns + +* `column` - (Required) The name of the column. +* `sortOrder` - (Required) Indicates that the column is sorted in ascending order (== 1), or in descending order (==0). + +##### skewed_info + +* `skewedColumnNames` - (Optional) A list of names of columns that contain skewed values. +* `skewedColumnValueLocationMaps` - (Optional) A list of values that appear so frequently as to be considered skewed. +* `skewedColumnValues` - (Optional) A map of skewed values to the columns that contain them. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - partition id. +* `creationTime` - The time at which the partition was created. +* `lastAnalyzedTime` - The last time at which column statistics were computed for this partition. +* `lastAccessedTime` - The last time at which the partition was accessed. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Partitions using the catalog ID (usually AWS account ID), database name, table name and partition values. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Partitions using the catalog ID (usually AWS account ID), database name, table name and partition values. For example: + +```console +% terraform import aws_glue_partition.part 123456789012:MyDatabase:MyTable:val1#val2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_partition_index.html.markdown b/website/docs/cdktf/typescript/r/glue_partition_index.html.markdown new file mode 100644 index 00000000000..b681b629c96 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_partition_index.html.markdown @@ -0,0 +1,171 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_partition_index" +description: |- + Provides a Glue Partition Index. +--- + + + +# Resource: aws_glue_partition_index + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCatalogDatabase } from "./.gen/providers/aws/glue-catalog-database"; +import { GlueCatalogTable } from "./.gen/providers/aws/glue-catalog-table"; +import { GluePartitionIndex } from "./.gen/providers/aws/glue-partition-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new GlueCatalogDatabase(this, "example", { + name: "example", + }); + const awsGlueCatalogTableExample = new GlueCatalogTable(this, "example_1", { + databaseName: example.name, + name: "example", + owner: "my_owner", + parameters: { + param1: "param1_val", + }, + partitionKeys: [ + { + comment: "my_column_1_comment", + name: "my_column_1", + type: "int", + }, + { + comment: "my_column_2_comment", + name: "my_column_2", + type: "string", + }, + ], + retention: 1, + storageDescriptor: { + bucketColumns: ["bucket_column_1"], + columns: [ + { + comment: "my_column1_comment", + name: "my_column_1", + type: "int", + }, + { + comment: "my_column2_comment", + name: "my_column_2", + type: "string", + }, + ], + compressed: false, + inputFormat: "SequenceFileInputFormat", + location: "my_location", + numberOfBuckets: 1, + outputFormat: "SequenceFileInputFormat", + parameters: { + param1: "param1_val", + }, + serDeInfo: { + name: "ser_de_name", + parameters: { + param1: "param_val_1", + }, + serializationLibrary: + "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe", + }, + skewedInfo: { + skewedColumnNames: ["my_column_1"], + skewedColumnValueLocationMaps: { + my_column_1: "my_column_1_val_loc_map", + }, + skewedColumnValues: ["skewed_val_1"], + }, + sortColumns: [ + { + column: "my_column_1", + sortOrder: 1, + }, + ], + storedAsSubDirectories: false, + }, + tableType: "VIRTUAL_VIEW", + viewExpandedText: "view_expanded_text_1", + viewOriginalText: "view_original_text_1", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlueCatalogTableExample.overrideLogicalId("example"); + const awsGluePartitionIndexExample = new GluePartitionIndex( + this, + "example_2", + { + databaseName: example.name, + partitionIndex: { + indexName: "example", + keys: ["my_column_1", "my_column_2"], + }, + tableName: Token.asString(awsGlueCatalogTableExample.name), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGluePartitionIndexExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `tableName` - (Required) Name of the table. For Hive compatibility, this must be entirely lowercase. +* `databaseName` - (Required) Name of the metadata database where the table metadata resides. For Hive compatibility, this must be all lowercase. +* `partitionIndex` - (Required) Configuration block for a partition index. See [`partitionIndex`](#partition_index) below. +* `catalogId` - (Optional) The catalog ID where the table resides. + +### partition_index + +* `indexName` - (Required) Name of the partition index. +* `keys` - (Required) Keys for the partition index. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Catalog ID, Database name, table name, and index name. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) +* `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Partition Indexes using the catalog ID (usually AWS account ID), database name, table name, and index name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Partition Indexes using the catalog ID (usually AWS account ID), database name, table name, and index name. For example: + +```console +% terraform import aws_glue_partition_index.example 123456789012:MyDatabase:MyTable:index-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_registry.html.markdown b/website/docs/cdktf/typescript/r/glue_registry.html.markdown new file mode 100644 index 00000000000..8adbd545228 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_registry.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_registry" +description: |- + Provides a Glue Registry resource. +--- + + + +# Resource: aws_glue_registry + +Provides a Glue Registry resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueRegistry } from "./.gen/providers/aws/glue-registry"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueRegistry(this, "example", { + registryName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `registryName` – (Required) The Name of the registry. +* `description` – (Optional) A description of the registry. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Glue Registry. +* `id` - Amazon Resource Name (ARN) of Glue Registry. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Registries using `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Registries using `arn`. For example: + +```console +% terraform import aws_glue_registry.example arn:aws:glue:us-west-2:123456789012:registry/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_resource_policy.html.markdown b/website/docs/cdktf/typescript/r/glue_resource_policy.html.markdown new file mode 100644 index 00000000000..292805ce8f8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_resource_policy.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_resource_policy" +description: |- + Provides a resource to configure the aws glue resource policy. +--- + + + +# Resource: aws_glue_resource_policy + +Provides a Glue resource policy. Only one can exist per region. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { GlueResourcePolicy } from "./.gen/providers/aws/glue-resource-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsPartitionCurrent = new DataAwsPartition(this, "current_1", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsPartitionCurrent.overrideLogicalId("current"); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_2", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const glueExamplePolicy = new DataAwsIamPolicyDocument( + this, + "glue-example-policy", + { + statement: [ + { + actions: ["glue:CreateTable"], + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [ + "arn:${" + + dataAwsPartitionCurrent.partition + + "}:glue:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:*", + ], + }, + ], + } + ); + new GlueResourcePolicy(this, "example", { + policy: Token.asString(glueExamplePolicy.json), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` – (Required) The policy to be applied to the aws glue data catalog. +* `enableHybrid` - (Optional) Indicates that you are using both methods to grant cross-account. Valid values are `true` and `false`. Note the terraform will not perform drift detetction on this field as its not return on read. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Resource Policy using the account ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Resource Policy using the account ID. For example: + +```console +% terraform import aws_glue_resource_policy.Test 12356789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_schema.html.markdown b/website/docs/cdktf/typescript/r/glue_schema.html.markdown new file mode 100644 index 00000000000..98fe11090b9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_schema.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_schema" +description: |- + Provides a Glue Schema resource. +--- + + + +# Resource: aws_glue_schema + +Provides a Glue Schema resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueSchema } from "./.gen/providers/aws/glue-schema"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueSchema(this, "example", { + compatibility: "NONE", + dataFormat: "AVRO", + registryArn: test.arn, + schemaDefinition: + '{\\"type\\": \\"record\\", \\"name\\": \\"r1\\", \\"fields\\": [ {\\"name\\": \\"f1\\", \\"type\\": \\"int\\"}, {\\"name\\": \\"f2\\", \\"type\\": \\"string\\"} ]}', + schemaName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `schemaName` – (Required) The Name of the schema. +* `registryArn` - (Required) The ARN of the Glue Registry to create the schema in. +* `dataFormat` - (Required) The data format of the schema definition. Valid values are `avro`, `json` and `protobuf`. +* `compatibility` - (Required) The compatibility mode of the schema. Values values are: `none`, `disabled`, `backward`, `backwardAll`, `forward`, `forwardAll`, `full`, and `fullAll`. +* `schemaDefinition` - (Required) The schema definition using the `dataFormat` setting for `schemaName`. +* `description` – (Optional) A description of the schema. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the schema. +* `id` - Amazon Resource Name (ARN) of the schema. +* `registryName` - The name of the Glue Registry. +* `latestSchemaVersion` - The latest version of the schema associated with the returned schema definition. +* `nextSchemaVersion` - The next version of the schema associated with the returned schema definition. +* `schemaCheckpoint` - The version number of the checkpoint (the last time the compatibility mode was changed). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Registries using `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Registries using `arn`. For example: + +```console +% terraform import aws_glue_schema.example arn:aws:glue:us-west-2:123456789012:schema/example/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_security_configuration.html.markdown b/website/docs/cdktf/typescript/r/glue_security_configuration.html.markdown new file mode 100644 index 00000000000..8c520ed4cdc --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_security_configuration.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_security_configuration" +description: |- + Manages a Glue Security Configuration +--- + + + +# Resource: aws_glue_security_configuration + +Manages a Glue Security Configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueSecurityConfiguration } from "./.gen/providers/aws/glue-security-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueSecurityConfiguration(this, "example", { + encryptionConfiguration: { + cloudwatchEncryption: { + cloudwatchEncryptionMode: "DISABLED", + }, + jobBookmarksEncryption: { + jobBookmarksEncryptionMode: "DISABLED", + }, + s3Encryption: { + kmsKeyArn: Token.asString(dataAwsKmsKeyExample.arn), + s3EncryptionMode: "SSE-KMS", + }, + }, + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `encryptionConfiguration` – (Required) Configuration block containing encryption configuration. Detailed below. +* `name` – (Required) Name of the security configuration. + +### encryption_configuration Argument Reference + +* `cloudwatch_encryption ` - (Required) A `cloudwatch_encryption ` block as described below, which contains encryption configuration for CloudWatch. +* `job_bookmarks_encryption ` - (Required) A `job_bookmarks_encryption ` block as described below, which contains encryption configuration for job bookmarks. +* `s3Encryption` - (Required) A `s3_encryption ` block as described below, which contains encryption configuration for S3 data. + +#### cloudwatch_encryption Argument Reference + +* `cloudwatchEncryptionMode` - (Optional) Encryption mode to use for CloudWatch data. Valid values: `disabled`, `sseKms`. Default value: `disabled`. +* `kmsKeyArn` - (Optional) Amazon Resource Name (ARN) of the KMS key to be used to encrypt the data. + +#### job_bookmarks_encryption Argument Reference + +* `jobBookmarksEncryptionMode` - (Optional) Encryption mode to use for job bookmarks data. Valid values: `cseKms`, `disabled`. Default value: `disabled`. +* `kmsKeyArn` - (Optional) Amazon Resource Name (ARN) of the KMS key to be used to encrypt the data. + +#### s3_encryption Argument Reference + +* `s3EncryptionMode` - (Optional) Encryption mode to use for S3 data. Valid values: `disabled`, `sseKms`, `sseS3`. Default value: `disabled`. +* `kmsKeyArn` - (Optional) Amazon Resource Name (ARN) of the KMS key to be used to encrypt the data. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Glue security configuration name + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Security Configurations using `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Security Configurations using `name`. For example: + +```console +% terraform import aws_glue_security_configuration.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_trigger.html.markdown b/website/docs/cdktf/typescript/r/glue_trigger.html.markdown new file mode 100644 index 00000000000..97a50bf9de1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_trigger.html.markdown @@ -0,0 +1,271 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_trigger" +description: |- + Manages a Glue Trigger resource. +--- + + + +# Resource: aws_glue_trigger + +Manages a Glue Trigger resource. + +## Example Usage + +### Conditional Trigger + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueTrigger } from "./.gen/providers/aws/glue-trigger"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueTrigger(this, "example", { + actions: [ + { + jobName: example1.name, + }, + ], + name: "example", + predicate: { + conditions: [ + { + jobName: example2.name, + state: "SUCCEEDED", + }, + ], + }, + type: "CONDITIONAL", + }); + } +} + +``` + +### On-Demand Trigger + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueTrigger } from "./.gen/providers/aws/glue-trigger"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueTrigger(this, "example", { + actions: [ + { + jobName: Token.asString(awsGlueJobExample.name), + }, + ], + name: "example", + type: "ON_DEMAND", + }); + } +} + +``` + +### Scheduled Trigger + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueTrigger } from "./.gen/providers/aws/glue-trigger"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueTrigger(this, "example", { + actions: [ + { + jobName: Token.asString(awsGlueJobExample.name), + }, + ], + name: "example", + schedule: "cron(15 12 * * ? *)", + type: "SCHEDULED", + }); + } +} + +``` + +### Conditional Trigger with Crawler Action + +**Note:** Triggers can have both a crawler action and a crawler condition, just no example provided. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueTrigger } from "./.gen/providers/aws/glue-trigger"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueTrigger(this, "example", { + actions: [ + { + crawlerName: example1.name, + }, + ], + name: "example", + predicate: { + conditions: [ + { + jobName: example2.name, + state: "SUCCEEDED", + }, + ], + }, + type: "CONDITIONAL", + }); + } +} + +``` + +### Conditional Trigger with Crawler Condition + +**Note:** Triggers can have both a crawler action and a crawler condition, just no example provided. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueTrigger } from "./.gen/providers/aws/glue-trigger"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueTrigger(this, "example", { + actions: [ + { + jobName: example1.name, + }, + ], + name: "example", + predicate: { + conditions: [ + { + crawlState: "SUCCEEDED", + crawlerName: example2.name, + }, + ], + }, + type: "CONDITIONAL", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `actions` – (Required) List of actions initiated by this trigger when it fires. See [Actions](#actions) Below. +* `description` – (Optional) A description of the new trigger. +* `enabled` – (Optional) Start the trigger. Defaults to `true`. +* `name` – (Required) The name of the trigger. +* `predicate` – (Optional) A predicate to specify when the new trigger should fire. Required when trigger type is `conditional`. See [Predicate](#predicate) Below. +* `schedule` – (Optional) A cron expression used to specify the schedule. [Time-Based Schedules for Jobs and Crawlers](https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html) +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `startOnCreation` – (Optional) Set to true to start `scheduled` and `conditional` triggers when created. True is not supported for `onDemand` triggers. +* `type` – (Required) The type of trigger. Valid values are `conditional`, `event`, `onDemand`, and `scheduled`. +* `workflowName` - (Optional) A workflow to which the trigger should be associated to. Every workflow graph (DAG) needs a starting trigger (`onDemand` or `scheduled` type) and can contain multiple additional `conditional` triggers. +* `eventBatchingCondition` - (Optional) Batch condition that must be met (specified number of events received or batch time window expired) before EventBridge event trigger fires. See [Event Batching Condition](#event-batching-condition). + +### Actions + +* `arguments` - (Optional) Arguments to be passed to the job. You can specify arguments here that your own job-execution script consumes, as well as arguments that AWS Glue itself consumes. +* `crawlerName` - (Optional) The name of the crawler to be executed. Conflicts with `jobName`. +* `jobName` - (Optional) The name of a job to be executed. Conflicts with `crawlerName`. +* `timeout` - (Optional) The job run timeout in minutes. It overrides the timeout value of the job. +* `securityConfiguration` - (Optional) The name of the Security Configuration structure to be used with this action. +* `notificationProperty` - (Optional) Specifies configuration properties of a job run notification. See [Notification Property](#notification-property) details below. + +#### Notification Property + +* `notifyDelayAfter` - (Optional) After a job run starts, the number of minutes to wait before sending a job run delay notification. + +### Predicate + +* `conditions` - (Required) A list of the conditions that determine when the trigger will fire. See [Conditions](#conditions). +* `logical` - (Optional) How to handle multiple conditions. Defaults to `and`. Valid values are `and` or `any`. + +#### Conditions + +* `jobName` - (Optional) The name of the job to watch. If this is specified, `state` must also be specified. Conflicts with `crawlerName`. +* `state` - (Optional) The condition job state. Currently, the values supported are `succeeded`, `stopped`, `timeout` and `failed`. If this is specified, `jobName` must also be specified. Conflicts with `crawlerState`. +* `crawlerName` - (Optional) The name of the crawler to watch. If this is specified, `crawlState` must also be specified. Conflicts with `jobName`. +* `crawlState` - (Optional) The condition crawl state. Currently, the values supported are `running`, `succeeded`, `cancelled`, and `failed`. If this is specified, `crawlerName` must also be specified. Conflicts with `state`. +* `logicalOperator` - (Optional) A logical operator. Defaults to `equals`. + +### Event Batching Condition + +* `batchSize` - (Required)Number of events that must be received from Amazon EventBridge before EventBridge event trigger fires. +* `batchWindow` - (Optional) Window of time in seconds after which EventBridge event trigger fires. Window starts when first event is received. Default value is `900`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Glue Trigger +* `id` - Trigger name +* `state` - The current state of the trigger. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Triggers using `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Triggers using `name`. For example: + +```console +% terraform import aws_glue_trigger.MyTrigger MyTrigger +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_user_defined_function.html.markdown b/website/docs/cdktf/typescript/r/glue_user_defined_function.html.markdown new file mode 100644 index 00000000000..600b63b8040 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_user_defined_function.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_user_defined_function" +description: |- + Provides a Glue User Defined Function. +--- + + + +# Resource: aws_glue_user_defined_function + +Provides a Glue User Defined Function Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCatalogDatabase } from "./.gen/providers/aws/glue-catalog-database"; +import { GlueUserDefinedFunction } from "./.gen/providers/aws/glue-user-defined-function"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new GlueCatalogDatabase(this, "example", { + name: "my_database", + }); + const awsGlueUserDefinedFunctionExample = new GlueUserDefinedFunction( + this, + "example_1", + { + catalogId: example.catalogId, + className: "class", + databaseName: example.name, + name: "my_func", + ownerName: "owner", + ownerType: "GROUP", + resourceUris: [ + { + resourceType: "ARCHIVE", + uri: "uri", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlueUserDefinedFunctionExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the function. +* `catalogId` - (Optional) ID of the Glue Catalog to create the function in. If omitted, this defaults to the AWS Account ID. +* `databaseName` - (Required) The name of the Database to create the Function. +* `className` - (Required) The Java class that contains the function code. +* `ownerName` - (Required) The owner of the function. +* `ownerType` - (Required) The owner type. can be one of `user`, `role`, and `group`. +* `resourceUris` - (Optional) The configuration block for Resource URIs. See [resource uris](#resource-uris) below for more details. + +### Resource URIs + +* `resourceType` - (Required) The type of the resource. can be one of `jar`, `file`, and `archive`. +* `uri` - (Required) The URI for accessing the resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id`- The id of the Glue User Defined Function. +* `arn`- The ARN of the Glue User Defined Function. +* `createTime`- The time at which the function was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue User Defined Functions using the `catalogId:databaseName:functionName`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue User Defined Functions using the `catalogId:databaseName:functionName`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```console +% terraform import aws_glue_user_defined_function.func 123456789012:my_database:my_func +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/glue_workflow.html.markdown b/website/docs/cdktf/typescript/r/glue_workflow.html.markdown new file mode 100644 index 00000000000..308c0ca1e71 --- /dev/null +++ b/website/docs/cdktf/typescript/r/glue_workflow.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Glue" +layout: "aws" +page_title: "AWS: aws_glue_workflow" +description: |- + Provides a Glue Workflow resource. +--- + + + +# Resource: aws_glue_workflow + +Provides a Glue Workflow resource. +The workflow graph (DAG) can be build using the `awsGlueTrigger` resource. +See the example below for creating a graph with four nodes (two triggers and two jobs). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueTrigger } from "./.gen/providers/aws/glue-trigger"; +import { GlueWorkflow } from "./.gen/providers/aws/glue-workflow"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new GlueWorkflow(this, "example", { + name: "example", + }); + new GlueTrigger(this, "example-inner", { + actions: [ + { + jobName: "another-example-job", + }, + ], + name: "trigger-inner", + predicate: { + conditions: [ + { + jobName: "example-job", + state: "SUCCEEDED", + }, + ], + }, + type: "CONDITIONAL", + workflowName: example.name, + }); + new GlueTrigger(this, "example-start", { + actions: [ + { + jobName: "example-job", + }, + ], + name: "trigger-start", + type: "ON_DEMAND", + workflowName: example.name, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` – (Required) The name you assign to this workflow. +* `defaultRunProperties` – (Optional) A map of default run properties for this workflow. These properties are passed to all jobs associated to the workflow. +* `description` – (Optional) Description of the workflow. +* `maxConcurrentRuns` - (Optional) Prevents exceeding the maximum number of concurrent runs of any of the component jobs. If you leave this parameter blank, there is no limit to the number of concurrent workflow runs. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Glue Workflow +* `id` - Workflow name +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Glue Workflows using `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Glue Workflows using `name`. For example: + +```console +% terraform import aws_glue_workflow.MyWorkflow MyWorkflow +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/grafana_license_association.html.markdown b/website/docs/cdktf/typescript/r/grafana_license_association.html.markdown new file mode 100644 index 00000000000..8f58a68567f --- /dev/null +++ b/website/docs/cdktf/typescript/r/grafana_license_association.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_license_association" +description: |- + Provides an Amazon Managed Grafana workspace license association resource. +--- + + + +# Resource: aws_grafana_license_association + +Provides an Amazon Managed Grafana workspace license association resource. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GrafanaLicenseAssociation } from "./.gen/providers/aws/grafana-license-association"; +import { GrafanaWorkspace } from "./.gen/providers/aws/grafana-workspace"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assume = new IamRole(this, "assume", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "sts:AssumeRole", + Effect: "Allow", + Principal: { + Service: "grafana.amazonaws.com", + }, + Sid: "", + }, + ], + Version: "2012-10-17", + }) + ), + name: "grafana-assume", + }); + const example = new GrafanaWorkspace(this, "example", { + accountAccessType: "CURRENT_ACCOUNT", + authenticationProviders: ["SAML"], + permissionType: "SERVICE_MANAGED", + roleArn: assume.arn, + }); + const awsGrafanaLicenseAssociationExample = new GrafanaLicenseAssociation( + this, + "example_2", + { + licenseType: "ENTERPRISE_FREE_TRIAL", + workspaceId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGrafanaLicenseAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `licenseType` - (Required) The type of license for the workspace license association. Valid values are `enterprise` and `enterpriseFreeTrial`. +* `workspaceId` - (Required) The workspace id. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `freeTrialExpiration` - If `licenseType` is set to `enterpriseFreeTrial`, this is the expiration date of the free trial. +* `licenseExpiration` - If `licenseType` is set to `enterprise`, this is the expiration date of the enterprise license. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Grafana workspace license association using the workspace's `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Grafana workspace license association using the workspace's `id`. For example: + +```console +% terraform import aws_grafana_license_association.example g-2054c75a02 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/grafana_role_association.html.markdown b/website/docs/cdktf/typescript/r/grafana_role_association.html.markdown new file mode 100644 index 00000000000..7c5db4c779b --- /dev/null +++ b/website/docs/cdktf/typescript/r/grafana_role_association.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_role_association" +description: |- + Provides an Amazon Managed Grafana workspace role association resource. +--- + + + +# Resource: aws_grafana_role_association + +Provides an Amazon Managed Grafana workspace role association resource. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GrafanaRoleAssociation } from "./.gen/providers/aws/grafana-role-association"; +import { GrafanaWorkspace } from "./.gen/providers/aws/grafana-workspace"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assume = new IamRole(this, "assume", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "sts:AssumeRole", + Effect: "Allow", + Principal: { + Service: "grafana.amazonaws.com", + }, + Sid: "", + }, + ], + Version: "2012-10-17", + }) + ), + name: "grafana-assume", + }); + const example = new GrafanaWorkspace(this, "example", { + accountAccessType: "CURRENT_ACCOUNT", + authenticationProviders: ["SAML"], + permissionType: "SERVICE_MANAGED", + roleArn: assume.arn, + }); + const awsGrafanaRoleAssociationExample = new GrafanaRoleAssociation( + this, + "example_2", + { + role: "ADMIN", + userIds: ["USER_ID_1", "USER_ID_2"], + workspaceId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGrafanaRoleAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `role` - (Required) The grafana role. Valid values can be found [here](https://docs.aws.amazon.com/grafana/latest/APIReference/API_UpdateInstruction.html#ManagedGrafana-Type-UpdateInstruction-role). +* `workspaceId` - (Required) The workspace id. + +The following arguments are optional: + +* `groupIds` - (Optional) The AWS SSO group ids to be assigned the role given in `role`. +* `userIds` - (Optional) The AWS SSO user ids to be assigned the role given in `role`. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/grafana_workspace.html.markdown b/website/docs/cdktf/typescript/r/grafana_workspace.html.markdown new file mode 100644 index 00000000000..5535bf59b82 --- /dev/null +++ b/website/docs/cdktf/typescript/r/grafana_workspace.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_workspace" +description: |- + Provides an Amazon Managed Grafana workspace resource. +--- + + + +# Resource: aws_grafana_workspace + +Provides an Amazon Managed Grafana workspace resource. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GrafanaWorkspace } from "./.gen/providers/aws/grafana-workspace"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assume = new IamRole(this, "assume", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "sts:AssumeRole", + Effect: "Allow", + Principal: { + Service: "grafana.amazonaws.com", + }, + Sid: "", + }, + ], + Version: "2012-10-17", + }) + ), + name: "grafana-assume", + }); + new GrafanaWorkspace(this, "example", { + accountAccessType: "CURRENT_ACCOUNT", + authenticationProviders: ["SAML"], + permissionType: "SERVICE_MANAGED", + roleArn: assume.arn, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `accountAccessType` - (Required) The type of account access for the workspace. Valid values are `currentAccount` and `organization`. If `organization` is specified, then `organizationalUnits` must also be present. +* `authenticationProviders` - (Required) The authentication providers for the workspace. Valid values are `awsSso`, `saml`, or both. +* `permissionType` - (Required) The permission type of the workspace. If `serviceManaged` is specified, the IAM roles and IAM policy attachments are generated automatically. If `customerManaged` is specified, the IAM roles and IAM policy attachments will not be created. + +The following arguments are optional: + +* `configuration` - (Optional) The configuration string for the workspace that you create. For more information about the format and configuration options available, see [Working in your Grafana workspace](https://docs.aws.amazon.com/grafana/latest/userguide/AMG-configure-workspace.html). +* `dataSources` - (Optional) The data sources for the workspace. Valid values are `amazonOpensearchService`, `athena`, `cloudwatch`, `prometheus`, `redshift`, `sitewise`, `timestream`, `xray` +* `description` - (Optional) The workspace description. +* `grafanaVersion` - (Optional) Specifies the version of Grafana to support in the new workspace. Supported values are `84` and `94`. If not specified, defaults to `84`. +* `name` - (Optional) The Grafana workspace name. +* `networkAccessControl` - (Optional) Configuration for network access to your workspace.See [Network Access Control](#network-access-control) below. +* `notificationDestinations` - (Optional) The notification destinations. If a data source is specified here, Amazon Managed Grafana will create IAM roles and permissions needed to use these destinations. Must be set to `sns`. +* `organizationRoleName` - (Optional) The role name that the workspace uses to access resources through Amazon Organizations. +* `organizationalUnits` - (Optional) The Amazon Organizations organizational units that the workspace is authorized to use data sources from. +* `roleArn` - (Optional) The IAM role ARN that the workspace assumes. +* `stackSetName` - (Optional) The AWS CloudFormation stack set name that provisions IAM roles to be used by the workspace. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcConfiguration` - (Optional) The configuration settings for an Amazon VPC that contains data sources for your Grafana workspace to connect to. See [VPC Configuration](#vpc-configuration) below. + +### Network Access Control + +* `prefixListIds` - (Required) - An array of prefix list IDs. +* `vpceIds` - (Required) - An array of Amazon VPC endpoint IDs for the workspace. The only VPC endpoints that can be specified here are interface VPC endpoints for Grafana workspaces (using the com.amazonaws.[region].grafana-workspace service endpoint). Other VPC endpoints will be ignored. + +### VPC Configuration + +* `securityGroupIds` - (Required) - The list of Amazon EC2 security group IDs attached to the Amazon VPC for your Grafana workspace to connect. +* `subnetIds` - (Required) - The list of Amazon EC2 subnet IDs created in the Amazon VPC for your Grafana workspace to connect. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Grafana workspace. +* `endpoint` - The endpoint of the Grafana workspace. +* `grafanaVersion` - The version of Grafana running on the workspace. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Grafana Workspace using the workspace's `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Grafana Workspace using the workspace's `id`. For example: + +```console +% terraform import aws_grafana_workspace.example g-2054c75a02 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/grafana_workspace_api_key.html.markdown b/website/docs/cdktf/typescript/r/grafana_workspace_api_key.html.markdown new file mode 100644 index 00000000000..195d5f4c107 --- /dev/null +++ b/website/docs/cdktf/typescript/r/grafana_workspace_api_key.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_workspace_api_key" +description: |- + Creates a Grafana API key for the workspace. This key can be used to authenticate requests sent to the workspace's HTTP API. +--- + + + +# Resource: aws_grafana_workspace_api_key + +Provides an Amazon Managed Grafana workspace API Key resource. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GrafanaWorkspaceApiKey } from "./.gen/providers/aws/grafana-workspace-api-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GrafanaWorkspaceApiKey(this, "key", { + keyName: "test-key", + keyRole: "VIEWER", + secondsToLive: 3600, + workspaceId: test.id, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +- `keyName` - (Required) Specifies the name of the API key. Key names must be unique to the workspace. +- `keyRole` - (Required) Specifies the permission level of the API key. Valid values are `viewer`, `editor`, or `admin`. +- `secondsToLive` - (Required) Specifies the time in seconds until the API key expires. Keys can be valid for up to 30 days. +- `workspaceId` - (Required) The ID of the workspace that the API key is valid for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `key` - The key token in JSON format. Use this value as a bearer token to authenticate HTTP requests to the workspace. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/grafana_workspace_saml_configuration.html.markdown b/website/docs/cdktf/typescript/r/grafana_workspace_saml_configuration.html.markdown new file mode 100644 index 00000000000..8b871bf05ec --- /dev/null +++ b/website/docs/cdktf/typescript/r/grafana_workspace_saml_configuration.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "Managed Grafana" +layout: "aws" +page_title: "AWS: aws_grafana_workspace_saml_configuration" +description: |- + Provides an Amazon Managed Grafana workspace SAML configuration resource. +--- + + + +# Resource: aws_grafana_workspace_saml_configuration + +Provides an Amazon Managed Grafana workspace SAML configuration resource. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GrafanaWorkspace } from "./.gen/providers/aws/grafana-workspace"; +import { GrafanaWorkspaceSamlConfiguration } from "./.gen/providers/aws/grafana-workspace-saml-configuration"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assume = new IamRole(this, "assume", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "sts:AssumeRole", + Effect: "Allow", + Principal: { + Service: "grafana.amazonaws.com", + }, + Sid: "", + }, + ], + Version: "2012-10-17", + }) + ), + name: "grafana-assume", + }); + const example = new GrafanaWorkspace(this, "example", { + accountAccessType: "CURRENT_ACCOUNT", + authenticationProviders: ["SAML"], + permissionType: "SERVICE_MANAGED", + roleArn: assume.arn, + }); + const awsGrafanaWorkspaceSamlConfigurationExample = + new GrafanaWorkspaceSamlConfiguration(this, "example_2", { + editorRoleValues: ["editor"], + idpMetadataUrl: "https://my_idp_metadata.url", + workspaceId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGrafanaWorkspaceSamlConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `editorRoleValues` - (Required) The editor role values. +* `workspaceId` - (Required) The workspace id. + +The following arguments are optional: + +* `adminRoleValues` - (Optional) The admin role values. +* `allowedOrganizations` - (Optional) The allowed organizations. +* `emailAssertion` - (Optional) The email assertion. +* `groupsAssertion` - (Optional) The groups assertion. +* `idpMetadataUrl` - (Optional) The IDP Metadata URL. Note that either `idpMetadataUrl` or `idpMetadataXml` (but not both) must be specified. +* `idpMetadataXml` - (Optional) The IDP Metadata XML. Note that either `idpMetadataUrl` or `idpMetadataXml` (but not both) must be specified. +* `loginAssertion` - (Optional) The login assertion. +* `loginValidityDuration` - (Optional) The login validity duration. +* `nameAssertion` - (Optional) The name assertion. +* `orgAssertion` - (Optional) The org assertion. +* `roleAssertion` - (Optional) The role assertion. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `status` - The status of the SAML configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Grafana Workspace SAML configuration using the workspace's `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Grafana Workspace SAML configuration using the workspace's `id`. For example: + +```console +% terraform import aws_grafana_workspace_saml_configuration.example g-2054c75a02 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/guardduty_detector.html.markdown b/website/docs/cdktf/typescript/r/guardduty_detector.html.markdown new file mode 100644 index 00000000000..13af0f29983 --- /dev/null +++ b/website/docs/cdktf/typescript/r/guardduty_detector.html.markdown @@ -0,0 +1,151 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_detector" +description: |- + Provides a resource to manage a GuardDuty detector +--- + + + +# Resource: aws_guardduty_detector + +Provides a resource to manage a GuardDuty detector. + +~> **NOTE:** Deleting this resource is equivalent to "disabling" GuardDuty for an AWS region, which removes all existing findings. You can set the `enable` attribute to `false` to instead "suspend" monitoring and feedback reporting while keeping existing data. See the [Suspending or Disabling Amazon GuardDuty documentation](https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_suspend-disable.html) for more information. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GuarddutyDetector } from "./.gen/providers/aws/guardduty-detector"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GuarddutyDetector(this, "MyDetector", { + datasources: { + kubernetes: { + auditLogs: { + enable: false, + }, + }, + malwareProtection: { + scanEc2InstanceWithFindings: { + ebsVolumes: { + enable: true, + }, + }, + }, + s3Logs: { + enable: true, + }, + }, + enable: true, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `enable` - (Optional) Enable monitoring and feedback reporting. Setting to `false` is equivalent to "suspending" GuardDuty. Defaults to `true`. +* `findingPublishingFrequency` - (Optional) Specifies the frequency of notifications sent for subsequent finding occurrences. If the detector is a GuardDuty member account, the value is determined by the GuardDuty primary account and cannot be modified, otherwise defaults to `sixHours`. For standalone and GuardDuty primary accounts, it must be configured in Terraform to enable drift detection. Valid values for standalone and primary accounts: `fifteenMinutes`, `oneHour`, `sixHours`. See [AWS Documentation](https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_findings_cloudwatch.html#guardduty_findings_cloudwatch_notification_frequency) for more information. +* `datasources` - (Optional) Describes which data sources will be enabled for the detector. See [Data Sources](#data-sources) below for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Data Sources + +The `datasources` block supports the following: + +* `s3Logs` - (Optional) Configures [S3 protection](https://docs.aws.amazon.com/guardduty/latest/ug/s3-protection.html). + See [S3 Logs](#s3-logs) below for more details. +* `kubernetes` - (Optional) Configures [Kubernetes protection](https://docs.aws.amazon.com/guardduty/latest/ug/kubernetes-protection.html). + See [Kubernetes](#kubernetes) and [Kubernetes Audit Logs](#kubernetes-audit-logs) below for more details. +* `malwareProtection` - (Optional) Configures [Malware Protection](https://docs.aws.amazon.com/guardduty/latest/ug/malware-protection.html). + See [Malware Protection](#malware-protection), [Scan EC2 instance with findings](#scan-ec2-instance-with-findings) and [EBS volumes](#ebs-volumes) below for more details. + +### S3 Logs + +The `s3Logs` block supports the following: + +* `enable` - (Required) If true, enables [S3 protection](https://docs.aws.amazon.com/guardduty/latest/ug/s3-protection.html). + Defaults to `true`. + +### Kubernetes + +The `kubernetes` block supports the following: + +* `auditLogs` - (Required) Configures Kubernetes audit logs as a data source for [Kubernetes protection](https://docs.aws.amazon.com/guardduty/latest/ug/kubernetes-protection.html). + See [Kubernetes Audit Logs](#kubernetes-audit-logs) below for more details. + +### Kubernetes Audit Logs + +The `auditLogs` block supports the following: + +* `enable` - (Required) If true, enables Kubernetes audit logs as a data source for [Kubernetes protection](https://docs.aws.amazon.com/guardduty/latest/ug/kubernetes-protection.html). + Defaults to `true`. + +### Malware Protection + +`malwareProtection` block supports the following: + +* `scanEc2InstanceWithFindings` - (Required) Configure whether [Malware Protection](https://docs.aws.amazon.com/guardduty/latest/ug/malware-protection.html) is enabled as data source for EC2 instances with findings for the detector. + See [Scan EC2 instance with findings](#scan-ec2-instance-with-findings) below for more details. + +#### Scan EC2 instance with findings + +The `scanEc2InstanceWithFindings` block supports the following: + +* `ebsVolumes` - (Required) Configure whether scanning EBS volumes is enabled as data source for the detector for instances with findings. + See [EBS volumes](#ebs-volumes) below for more details. + +#### EBS volumes + +The `ebsVolumes` block supports the following: + +* `enable` - (Required) If true, enables [Malware Protection](https://docs.aws.amazon.com/guardduty/latest/ug/malware-protection.html) as data source for the detector. + Defaults to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `accountId` - The AWS account ID of the GuardDuty detector +* `arn` - Amazon Resource Name (ARN) of the GuardDuty detector +* `id` - The ID of the GuardDuty detector +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty detectors using the detector ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GuardDuty detectors using the detector ID. For example: + +```console +% terraform import aws_guardduty_detector.MyDetector 00b00fd5aecc0ab60a708659477e9617 +``` + +The ID of the detector can be retrieved via the [AWS CLI](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/guardduty/list-detectors.html) using `aws guardduty list-detectors`. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/guardduty_filter.html.markdown b/website/docs/cdktf/typescript/r/guardduty_filter.html.markdown new file mode 100644 index 00000000000..6c20ce93f8e --- /dev/null +++ b/website/docs/cdktf/typescript/r/guardduty_filter.html.markdown @@ -0,0 +1,115 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_filter" +description: |- + Provides a resource to manage a GuardDuty filter +--- + + + +# Resource: aws_guardduty_filter + +Provides a resource to manage a GuardDuty filter. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GuarddutyFilter } from "./.gen/providers/aws/guardduty-filter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GuarddutyFilter(this, "MyFilter", { + action: "ARCHIVE", + detectorId: example.id, + findingCriteria: { + criterion: [ + { + equalTo: ["eu-west-1"], + field: "region", + }, + { + field: "service.additionalInfo.threatListName", + notEquals: ["some-threat", "another-threat"], + }, + { + field: "updatedAt", + greaterThan: "2020-01-01T00:00:00Z", + lessThan: "2020-02-01T00:00:00Z", + }, + { + field: "severity", + greaterThanOrEqual: "4", + }, + ], + }, + name: "MyFilter", + rank: 1, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `detectorId` - (Required) ID of a GuardDuty detector, attached to your account. +* `name` - (Required) The name of your filter. +* `description` - (Optional) Description of the filter. +* `rank` - (Required) Specifies the position of the filter in the list of current filters. Also specifies the order in which this filter is applied to the findings. +* `action` - (Required) Specifies the action that is to be applied to the findings that match the filter. Can be one of `archive` or `noop`. +* `tags` (Optional) - The tags that you want to add to the Filter resource. A tag consists of a key and a value. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `findingCriteria` (Required) - Represents the criteria to be used in the filter for querying findings. Contains one or more `criterion` blocks, documented [below](#criterion). + +### criterion + +The `criterion` block suports the following: + +* `field` - (Required) The name of the field to be evaluated. The full list of field names can be found in [AWS documentation](https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_filter-findings.html#filter_criteria). +* `equals` - (Optional) List of string values to be evaluated. +* `notEquals` - (Optional) List of string values to be evaluated. +* `greaterThan` - (Optional) A value to be evaluated. Accepts either an integer or a date in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `greaterThanOrEqual` - (Optional) A value to be evaluated. Accepts either an integer or a date in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `lessThan` - (Optional) A value to be evaluated. Accepts either an integer or a date in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `lessThanOrEqual` - (Optional) A value to be evaluated. Accepts either an integer or a date in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the GuardDuty filter. +* `id` - A compound field, consisting of the ID of the GuardDuty detector and the name of the filter. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty filters using the detector ID and filter's name separated by a colon. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GuardDuty filters using the detector ID and filter's name separated by a colon. For example: + +```console +% terraform import aws_guardduty_filter.MyFilter 00b00fd5aecc0ab60a708659477e9617:MyFilter +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/guardduty_invite_accepter.html.markdown b/website/docs/cdktf/typescript/r/guardduty_invite_accepter.html.markdown new file mode 100644 index 00000000000..592583e62e3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/guardduty_invite_accepter.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_invite_accepter" +description: |- + Provides a resource to accept a pending GuardDuty invite on creation, ensure the detector has the correct primary account on read, and disassociate with the primary account upon removal. +--- + + + +# Resource: aws_guardduty_invite_accepter + +Provides a resource to accept a pending GuardDuty invite on creation, ensure the detector has the correct primary account on read, and disassociate with the primary account upon removal. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GuarddutyDetector } from "./.gen/providers/aws/guardduty-detector"; +import { GuarddutyInviteAccepter } from "./.gen/providers/aws/guardduty-invite-accepter"; +import { GuarddutyMember } from "./.gen/providers/aws/guardduty-member"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new AwsProvider(this, "aws", { + alias: "primary", + }); + const member = new AwsProvider(this, "aws_1", { + alias: "member", + }); + const awsGuarddutyDetectorMember = new GuarddutyDetector(this, "member", { + provider: member, + }); + const awsGuarddutyDetectorPrimary = new GuarddutyDetector(this, "primary", { + provider: primary, + }); + const awsGuarddutyMemberMember = new GuarddutyMember(this, "member_4", { + accountId: Token.asString(awsGuarddutyDetectorMember.accountId), + detectorId: Token.asString(awsGuarddutyDetectorPrimary.id), + email: "required@example.com", + invite: true, + provider: primary, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGuarddutyMemberMember.overrideLogicalId("member"); + const awsGuarddutyInviteAccepterMember = new GuarddutyInviteAccepter( + this, + "member_5", + { + dependsOn: [awsGuarddutyMemberMember], + detectorId: Token.asString(awsGuarddutyDetectorMember.id), + masterAccountId: Token.asString(awsGuarddutyDetectorPrimary.accountId), + provider: member, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGuarddutyInviteAccepterMember.overrideLogicalId("member"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `detectorId` - (Required) The detector ID of the member GuardDuty account. +* `masterAccountId` - (Required) AWS account ID for primary account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - GuardDuty member detector ID + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `1M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsGuarddutyInviteAccepter` using the member GuardDuty detector ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsGuarddutyInviteAccepter` using the member GuardDuty detector ID. For example: + +```console +% terraform import aws_guardduty_invite_accepter.member 00b00fd5aecc0ab60a708659477e9617 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/guardduty_ipset.html.markdown b/website/docs/cdktf/typescript/r/guardduty_ipset.html.markdown new file mode 100644 index 00000000000..88e23c4ee0b --- /dev/null +++ b/website/docs/cdktf/typescript/r/guardduty_ipset.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "GuardDuty" +layout: aws +page_title: 'AWS: aws_guardduty_ipset' +description: Provides a resource to manage a GuardDuty IPSet +--- + + + +# Resource: aws_guardduty_ipset + +Provides a resource to manage a GuardDuty IPSet. + +~> **Note:** Currently in GuardDuty, users from member accounts cannot upload and further manage IPSets. IPSets that are uploaded by the primary account are imposed on GuardDuty functionality in its member accounts. See the [GuardDuty API Documentation](https://docs.aws.amazon.com/guardduty/latest/ug/create-ip-set.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GuarddutyDetector } from "./.gen/providers/aws/guardduty-detector"; +import { GuarddutyIpset } from "./.gen/providers/aws/guardduty-ipset"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new GuarddutyDetector(this, "primary", { + enable: true, + }); + const bucket = new S3Bucket(this, "bucket", {}); + new S3BucketAcl(this, "bucket_acl", { + acl: "private", + bucket: bucket.id, + }); + const myIpSet = new S3Object(this, "MyIPSet", { + bucket: bucket.id, + content: "10.0.0.0/8\n\n", + key: "MyIPSet", + }); + new GuarddutyIpset(this, "example", { + activate: true, + detectorId: primary.id, + format: "TXT", + location: + "https://s3.amazonaws.com/${" + + myIpSet.bucket + + "}/${" + + myIpSet.key + + "}", + name: "MyIPSet", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `activate` - (Required) Specifies whether GuardDuty is to start using the uploaded IPSet. +* `detectorId` - (Required) The detector ID of the GuardDuty. +* `format` - (Required) The format of the file that contains the IPSet. Valid values: `txt` | `stix` | `otxCsv` | `alienVault` | `proofPoint` | `fireEye` +* `location` - (Required) The URI of the file that contains the IPSet. +* `name` - (Required) The friendly name to identify the IPSet. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the GuardDuty IPSet. +* `id` - The ID of the GuardDuty IPSet. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty IPSet using the primary GuardDuty detector ID and IPSet ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GuardDuty IPSet using the primary GuardDuty detector ID and IPSet ID. For example: + +```console +% terraform import aws_guardduty_ipset.MyIPSet 00b00fd5aecc0ab60a708659477e9617:123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/guardduty_member.html.markdown b/website/docs/cdktf/typescript/r/guardduty_member.html.markdown new file mode 100644 index 00000000000..7a3e4f16e92 --- /dev/null +++ b/website/docs/cdktf/typescript/r/guardduty_member.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_member" +description: |- + Provides a resource to manage a GuardDuty member +--- + + + +# Resource: aws_guardduty_member + +Provides a resource to manage a GuardDuty member. To accept invitations in member accounts, see the [`awsGuarddutyInviteAccepter` resource](/docs/providers/aws/r/guardduty_invite_accepter.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GuarddutyDetector } from "./.gen/providers/aws/guardduty-detector"; +import { GuarddutyMember } from "./.gen/providers/aws/guardduty-member"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const member = new GuarddutyDetector(this, "member", { + enable: true, + provider: dev, + }); + const primary = new GuarddutyDetector(this, "primary", { + enable: true, + }); + const awsGuarddutyMemberMember = new GuarddutyMember(this, "member_2", { + accountId: member.accountId, + detectorId: primary.id, + email: "required@example.com", + invitationMessage: "please accept guardduty invitation", + invite: true, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGuarddutyMemberMember.overrideLogicalId("member"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Required) AWS account ID for member account. +* `detectorId` - (Required) The detector ID of the GuardDuty account where you want to create member accounts. +* `email` - (Required) Email address for member account. +* `invite` - (Optional) Boolean whether to invite the account to GuardDuty as a member. Defaults to `false`. To detect if an invitation needs to be (re-)sent, the Terraform state value is `true` based on a `relationshipStatus` of `disabled`, `enabled`, `invited`, or `emailVerificationInProgress`. +* `invitationMessage` - (Optional) Message for invitation. +* `disableEmailNotification` - (Optional) Boolean whether an email notification is sent to the accounts. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the GuardDuty member +* `relationshipStatus` - The status of the relationship between the member account and its primary account. More information can be found in [Amazon GuardDuty API Reference](https://docs.aws.amazon.com/guardduty/latest/ug/get-members.html). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `1M`) +- `update` - (Default `1M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty members using the primary GuardDuty detector ID and member AWS account ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GuardDuty members using the primary GuardDuty detector ID and member AWS account ID. For example: + +```console +% terraform import aws_guardduty_member.MyMember 00b00fd5aecc0ab60a708659477e9617:123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/guardduty_organization_admin_account.html.markdown b/website/docs/cdktf/typescript/r/guardduty_organization_admin_account.html.markdown new file mode 100644 index 00000000000..d1014a33cd6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/guardduty_organization_admin_account.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_organization_admin_account" +description: |- + Manages a GuardDuty Organization Admin Account +--- + + + +# Resource: aws_guardduty_organization_admin_account + +Manages a GuardDuty Organization Admin Account. The AWS account utilizing this resource must be an Organizations primary account. More information about Organizations support in GuardDuty can be found in the [GuardDuty User Guide](https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_organizations.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GuarddutyDetector } from "./.gen/providers/aws/guardduty-detector"; +import { GuarddutyOrganizationAdminAccount } from "./.gen/providers/aws/guardduty-organization-admin-account"; +import { OrganizationsOrganization } from "./.gen/providers/aws/organizations-organization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GuarddutyDetector(this, "example", {}); + const awsOrganizationsOrganizationExample = new OrganizationsOrganization( + this, + "example_1", + { + awsServiceAccessPrincipals: ["guardduty.amazonaws.com"], + featureSet: "ALL", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOrganizationsOrganizationExample.overrideLogicalId("example"); + const awsGuarddutyOrganizationAdminAccountExample = + new GuarddutyOrganizationAdminAccount(this, "example_2", { + adminAccountId: "123456789012", + dependsOn: [awsOrganizationsOrganizationExample], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGuarddutyOrganizationAdminAccountExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `adminAccountId` - (Required) AWS account identifier to designate as a delegated administrator for GuardDuty. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS account identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty Organization Admin Account using the AWS account ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GuardDuty Organization Admin Account using the AWS account ID. For example: + +```console +% terraform import aws_guardduty_organization_admin_account.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/guardduty_organization_configuration.html.markdown b/website/docs/cdktf/typescript/r/guardduty_organization_configuration.html.markdown new file mode 100644 index 00000000000..9a7569caede --- /dev/null +++ b/website/docs/cdktf/typescript/r/guardduty_organization_configuration.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "GuardDuty" +layout: "aws" +page_title: "AWS: aws_guardduty_organization_configuration" +description: |- + Manages the GuardDuty Organization Configuration +--- + + + +# Resource: aws_guardduty_organization_configuration + +Manages the GuardDuty Organization Configuration in the current AWS Region. The AWS account utilizing this resource must have been assigned as a delegated Organization administrator account, e.g., via the [`awsGuarddutyOrganizationAdminAccount` resource](/docs/providers/aws/r/guardduty_organization_admin_account.html). More information about Organizations support in GuardDuty can be found in the [GuardDuty User Guide](https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_organizations.html). + +~> **NOTE:** This is an advanced Terraform resource. Terraform will automatically assume management of the GuardDuty Organization Configuration without import and perform no actions on removal from the Terraform configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GuarddutyDetector } from "./.gen/providers/aws/guardduty-detector"; +import { GuarddutyOrganizationConfiguration } from "./.gen/providers/aws/guardduty-organization-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new GuarddutyDetector(this, "example", { + enable: true, + }); + const awsGuarddutyOrganizationConfigurationExample = + new GuarddutyOrganizationConfiguration(this, "example_1", { + autoEnableOrganizationMembers: "ALL", + datasources: { + kubernetes: { + auditLogs: { + enable: true, + }, + }, + malwareProtection: { + scanEc2InstanceWithFindings: { + ebsVolumes: { + autoEnable: true, + }, + }, + }, + s3Logs: { + autoEnable: true, + }, + }, + detectorId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGuarddutyOrganizationConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +~> **NOTE:** One of `autoEnable` or `autoEnableOrganizationMembers` must be specified. + +This argument supports the following arguments: + +* `autoEnable` - (Optional) *Deprecated:* Use `autoEnableOrganizationMembers` instead. When this setting is enabled, all new accounts that are created in, or added to, the organization are added as a member accounts of the organization’s GuardDuty delegated administrator and GuardDuty is enabled in that AWS Region. +* `autoEnableOrganizationMembers` - (Optional) Indicates the auto-enablement configuration of GuardDuty for the member accounts in the organization. Valid values are `all`, `new`, `none`. +* `detectorId` - (Required) The detector ID of the GuardDuty account. +* `datasources` - (Optional) Configuration for the collected datasources. + +`datasources` supports the following: + +* `s3Logs` - (Optional) Enable S3 Protection automatically for new member accounts. +* `kubernetes` - (Optional) Enable Kubernetes Audit Logs Monitoring automatically for new member accounts. +* `malwareProtection` - (Optional) Enable Malware Protection automatically for new member accounts. + +### S3 Logs + +`s3Logs` block supports the following: + +* `autoEnable` - (Optional) Set to `true` if you want S3 data event logs to be automatically enabled for new members of the organization. Default: `false` + +### Kubernetes + +`kubernetes` block supports the following: + +* `auditLogs` - (Required) Enable Kubernetes Audit Logs Monitoring automatically for new member accounts. [Kubernetes protection](https://docs.aws.amazon.com/guardduty/latest/ug/kubernetes-protection.html). + See [Kubernetes Audit Logs](#kubernetes-audit-logs) below for more details. + +#### Kubernetes Audit Logs + +The `auditLogs` block supports the following: + +* `enable` - (Required) If true, enables Kubernetes audit logs as a data source for [Kubernetes protection](https://docs.aws.amazon.com/guardduty/latest/ug/kubernetes-protection.html). + Defaults to `true`. + +### Malware Protection + +`malwareProtection` block supports the following: + +* `scanEc2InstanceWithFindings` - (Required) Configure whether [Malware Protection](https://docs.aws.amazon.com/guardduty/latest/ug/malware-protection.html) for EC2 instances with findings should be auto-enabled for new members joining the organization. + See [Scan EC2 instance with findings](#scan-ec2-instance-with-findings) below for more details. + +#### Scan EC2 instance with findings + +The `scanEc2InstanceWithFindings` block supports the following: + +* `ebsVolumes` - (Required) Configure whether scanning EBS volumes should be auto-enabled for new members joining the organization + See [EBS volumes](#ebs-volumes) below for more details. + +#### EBS volumes + +The `ebsVolumes` block supports the following: + +* `autoEnable` - (Required) If true, enables [Malware Protection](https://docs.aws.amazon.com/guardduty/latest/ug/malware-protection.html) for all new accounts joining the organization. + Defaults to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the GuardDuty Detector. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty Organization Configurations using the GuardDuty Detector ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GuardDuty Organization Configurations using the GuardDuty Detector ID. For example: + +```console +% terraform import aws_guardduty_organization_configuration.example 00b00fd5aecc0ab60a708659477e9617 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/guardduty_publishing_destination.html.markdown b/website/docs/cdktf/typescript/r/guardduty_publishing_destination.html.markdown new file mode 100644 index 00000000000..a3618b160ef --- /dev/null +++ b/website/docs/cdktf/typescript/r/guardduty_publishing_destination.html.markdown @@ -0,0 +1,176 @@ +--- +subcategory: "GuardDuty" +layout: aws +page_title: 'AWS: aws_guardduty_publishing_destination' +description: Provides a resource to manage a GuardDuty PublishingDestination +--- + + + +# Resource: aws_guardduty_publishing_destination + +Provides a resource to manage a GuardDuty PublishingDestination. Requires an existing GuardDuty Detector. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { GuarddutyDetector } from "./.gen/providers/aws/guardduty-detector"; +import { GuarddutyPublishingDestination } from "./.gen/providers/aws/guardduty-publishing-destination"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testGd = new GuarddutyDetector(this, "test_gd", { + enable: true, + }); + const gdBucket = new S3Bucket(this, "gd_bucket", { + bucket: "example", + forceDestroy: true, + }); + new S3BucketAcl(this, "gd_bucket_acl", { + acl: "private", + bucket: gdBucket.id, + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const bucketPol = new DataAwsIamPolicyDocument(this, "bucket_pol", { + statement: [ + { + actions: ["s3:PutObject"], + principals: [ + { + identifiers: ["guardduty.amazonaws.com"], + type: "Service", + }, + ], + resources: ["${" + gdBucket.arn + "}/*"], + sid: "Allow PutObject", + }, + { + actions: ["s3:GetBucketLocation"], + principals: [ + { + identifiers: ["guardduty.amazonaws.com"], + type: "Service", + }, + ], + resources: [gdBucket.arn], + sid: "Allow GetBucketLocation", + }, + ], + }); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_5", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const gdBucketPolicy = new S3BucketPolicy(this, "gd_bucket_policy", { + bucket: gdBucket.id, + policy: Token.asString(bucketPol.json), + }); + const kmsPol = new DataAwsIamPolicyDocument(this, "kms_pol", { + statement: [ + { + actions: ["kms:GenerateDataKey"], + principals: [ + { + identifiers: ["guardduty.amazonaws.com"], + type: "Service", + }, + ], + resources: [ + "arn:aws:kms:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:key/*", + ], + sid: "Allow GuardDuty to encrypt findings", + }, + { + actions: ["kms:*"], + principals: [ + { + identifiers: ["arn:aws:iam::${" + current.accountId + "}:root"], + type: "AWS", + }, + ], + resources: [ + "arn:aws:kms:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:key/*", + ], + sid: "Allow all users to modify/delete key (test only)", + }, + ], + }); + const gdKey = new KmsKey(this, "gd_key", { + deletionWindowInDays: 7, + description: "Temporary key for AccTest of TF", + policy: Token.asString(kmsPol.json), + }); + new GuarddutyPublishingDestination(this, "test", { + dependsOn: [gdBucketPolicy], + destinationArn: gdBucket.arn, + detectorId: testGd.id, + kmsKeyArn: gdKey.arn, + }); + } +} + +``` + +~> **Note:** Please do not use this simple example for Bucket-Policy and KMS Key Policy in a production environment. It is much too open for such a use-case. Refer to the AWS documentation here: https://docs.aws.amazon.com/guardduty/latest/ug/guardduty_exportfindings.html + +## Argument Reference + +This resource supports the following arguments: + +* `detectorId` - (Required) The detector ID of the GuardDuty. +* `destinationArn` - (Required) The bucket arn and prefix under which the findings get exported. Bucket-ARN is required, the prefix is optional and will be `awsLogs/[accountId]/guardDuty/[region]/` if not provided +* `kmsKeyArn` - (Required) The ARN of the KMS key used to encrypt GuardDuty findings. GuardDuty enforces this to be encrypted. +* `destinationType`- (Optional) Currently there is only "S3" available as destination type which is also the default value + +~> **Note:** In case of missing permissions (S3 Bucket Policy _or_ KMS Key permissions) the resource will fail to create. If the permissions are changed after resource creation, this can be asked from the AWS API via the "DescribePublishingDestination" call (https://docs.aws.amazon.com/cli/latest/reference/guardduty/describe-publishing-destination.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the GuardDuty PublishingDestination and the detector ID. Format: `:` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty PublishingDestination using the master GuardDuty detector ID and PublishingDestinationID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GuardDuty PublishingDestination using the master GuardDuty detector ID and PublishingDestinationID. For example: + +```console +% terraform import aws_guardduty_publishing_destination.test a4b86f26fa42e7e7cf0d1c333ea77777:a4b86f27a0e464e4a7e0516d242f1234 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/guardduty_threatintelset.html.markdown b/website/docs/cdktf/typescript/r/guardduty_threatintelset.html.markdown new file mode 100644 index 00000000000..95e81c47d6d --- /dev/null +++ b/website/docs/cdktf/typescript/r/guardduty_threatintelset.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "GuardDuty" +layout: aws +page_title: 'AWS: aws_guardduty_threatintelset' +description: Provides a resource to manage a GuardDuty ThreatIntelSet +--- + + + +# Resource: aws_guardduty_threatintelset + +Provides a resource to manage a GuardDuty ThreatIntelSet. + +~> **Note:** Currently in GuardDuty, users from member accounts cannot upload and further manage ThreatIntelSets. ThreatIntelSets that are uploaded by the primary account are imposed on GuardDuty functionality in its member accounts. See the [GuardDuty API Documentation](https://docs.aws.amazon.com/guardduty/latest/ug/create-threat-intel-set.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GuarddutyDetector } from "./.gen/providers/aws/guardduty-detector"; +import { GuarddutyThreatintelset } from "./.gen/providers/aws/guardduty-threatintelset"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new GuarddutyDetector(this, "primary", { + enable: true, + }); + const bucket = new S3Bucket(this, "bucket", {}); + new S3BucketAcl(this, "bucket_acl", { + acl: "private", + bucket: bucket.id, + }); + const myThreatIntelSet = new S3Object(this, "MyThreatIntelSet", { + acl: "public-read", + bucket: bucket.id, + content: "10.0.0.0/8\n\n", + key: "MyThreatIntelSet", + }); + const awsGuarddutyThreatintelsetMyThreatIntelSet = + new GuarddutyThreatintelset(this, "MyThreatIntelSet_4", { + activate: true, + detectorId: primary.id, + format: "TXT", + location: + "https://s3.amazonaws.com/${" + + myThreatIntelSet.bucket + + "}/${" + + myThreatIntelSet.key + + "}", + name: "MyThreatIntelSet", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGuarddutyThreatintelsetMyThreatIntelSet.overrideLogicalId( + "MyThreatIntelSet" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `activate` - (Required) Specifies whether GuardDuty is to start using the uploaded ThreatIntelSet. +* `detectorId` - (Required) The detector ID of the GuardDuty. +* `format` - (Required) The format of the file that contains the ThreatIntelSet. Valid values: `txt` | `stix` | `otxCsv` | `alienVault` | `proofPoint` | `fireEye` +* `location` - (Required) The URI of the file that contains the ThreatIntelSet. +* `name` - (Required) The friendly name to identify the ThreatIntelSet. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the GuardDuty ThreatIntelSet. +* `id` - The ID of the GuardDuty ThreatIntelSet and the detector ID. Format: `:` +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import GuardDuty ThreatIntelSet using the primary GuardDuty detector ID and ThreatIntelSetID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import GuardDuty ThreatIntelSet using the primary GuardDuty detector ID and ThreatIntelSetID. For example: + +```console +% terraform import aws_guardduty_threatintelset.MyThreatIntelSet 00b00fd5aecc0ab60a708659477e9617:123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_access_key.html.markdown b/website/docs/cdktf/typescript/r/iam_access_key.html.markdown new file mode 100644 index 00000000000..150cfd146c4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_access_key.html.markdown @@ -0,0 +1,140 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_access_key" +description: |- + Provides an IAM access key. This is a set of credentials that allow API requests to be made as an IAM user. +--- + + + +# Resource: aws_iam_access_key + +Provides an IAM access key. This is a set of credentials that allow API requests to be made as an IAM user. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamAccessKey } from "./.gen/providers/aws/iam-access-key"; +import { IamUser } from "./.gen/providers/aws/iam-user"; +import { IamUserPolicy } from "./.gen/providers/aws/iam-user-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const lb = new IamUser(this, "lb", { + name: "loadbalancer", + path: "/system/", + }); + const lbRo = new DataAwsIamPolicyDocument(this, "lb_ro", { + statement: [ + { + actions: ["ec2:Describe*"], + effect: "Allow", + resources: ["*"], + }, + ], + }); + const awsIamAccessKeyLb = new IamAccessKey(this, "lb_2", { + pgpKey: "keybase:some_person_that_exists", + user: lb.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamAccessKeyLb.overrideLogicalId("lb"); + const awsIamUserPolicyLbRo = new IamUserPolicy(this, "lb_ro_3", { + name: "test", + policy: Token.asString(lbRo.json), + user: lb.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamUserPolicyLbRo.overrideLogicalId("lb_ro"); + new TerraformOutput(this, "secret", { + value: awsIamAccessKeyLb.encryptedSecret, + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamAccessKey } from "./.gen/providers/aws/iam-access-key"; +import { IamUser } from "./.gen/providers/aws/iam-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new IamUser(this, "test", { + name: "test", + path: "/test/", + }); + const awsIamAccessKeyTest = new IamAccessKey(this, "test_1", { + user: test.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamAccessKeyTest.overrideLogicalId("test"); + new TerraformOutput(this, "aws_iam_smtp_password_v4", { + value: awsIamAccessKeyTest.sesSmtpPasswordV4, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `pgpKey` - (Optional) Either a base-64 encoded PGP public key, or a keybase username in the form `keybase:somePersonThatExists`, for use in the `encryptedSecret` output attribute. If providing a base-64 encoded PGP public key, make sure to provide the "raw" version and not the "armored" one (e.g. avoid passing the `a` option to `gpg --export`). +* `status` - (Optional) Access key status to apply. Defaults to `active`. Valid values are `active` and `inactive`. +* `user` - (Required) IAM user to associate with this access key. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `createDate` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the access key was created. +* `encryptedSecret` - Encrypted secret, base64 encoded, if `pgpKey` was specified. This attribute is not available for imported resources. The encrypted secret may be decrypted using the command line, for example: `terraform output -raw encrypted_secret | base64 --decode | keybase pgp decrypt`. +* `encryptedSesSmtpPasswordV4` - Encrypted SES SMTP password, base64 encoded, if `pgpKey` was specified. This attribute is not available for imported resources. The encrypted password may be decrypted using the command line, for example: `terraform output -raw encrypted_ses_smtp_password_v4 | base64 --decode | keybase pgp decrypt`. +* `id` - Access key ID. +* `keyFingerprint` - Fingerprint of the PGP key used to encrypt the secret. This attribute is not available for imported resources. +* `secret` - Secret access key. This attribute is not available for imported resources. Note that this will be written to the state file. If you use this, please protect your backend state file judiciously. Alternatively, you may supply a `pgpKey` instead, which will prevent the secret from being stored in plaintext, at the cost of preventing the use of the secret key in automation. +* `sesSmtpPasswordV4` - Secret access key converted into an SES SMTP password by applying [AWS's documented Sigv4 conversion algorithm](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/smtp-credentials.html#smtp-credentials-convert). This attribute is not available for imported resources. As SigV4 is region specific, valid Provider regions are `apSouth1`, `apSoutheast2`, `euCentral1`, `euWest1`, `usEast1` and `usWest2`. See current [AWS SES regions](https://docs.aws.amazon.com/general/latest/gr/rande.html#ses_region). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Access Keys using the identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Access Keys using the identifier. For example: + +```console +% terraform import aws_iam_access_key.example AKIA1234567890 +``` + +Resource attributes such as `encryptedSecret`, `keyFingerprint`, `pgpKey`, `secret`, `sesSmtpPasswordV4`, and `encryptedSesSmtpPasswordV4` are not available for imported resources as this information cannot be read from the IAM API. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_account_alias.html.markdown b/website/docs/cdktf/typescript/r/iam_account_alias.html.markdown new file mode 100644 index 00000000000..3f390de9fb3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_account_alias.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_account_alias" +description: |- + Manages the account alias for the AWS Account. +--- + + + +# Resource: aws_iam_account_alias + +-> **Note:** There is only a single account alias per AWS account. + +Manages the account alias for the AWS Account. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamAccountAlias } from "./.gen/providers/aws/iam-account-alias"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamAccountAlias(this, "alias", { + accountAlias: "my-account-alias", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountAlias` - (Required) The account alias + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the current Account Alias using the `accountAlias`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the current Account Alias using the `accountAlias`. For example: + +```console +% terraform import aws_iam_account_alias.alias my-account-alias +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_account_password_policy.html.markdown b/website/docs/cdktf/typescript/r/iam_account_password_policy.html.markdown new file mode 100644 index 00000000000..deebdcc988e --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_account_password_policy.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_account_password_policy" +description: |- + Manages Password Policy for the AWS Account. +--- + + + +# Resource: aws_iam_account_password_policy + +-> **Note:** There is only a single policy allowed per AWS account. An existing policy will be lost when using this resource as an effect of this limitation. + +Manages Password Policy for the AWS Account. +See more about [Account Password Policy](http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html) +in the official AWS docs. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamAccountPasswordPolicy } from "./.gen/providers/aws/iam-account-password-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamAccountPasswordPolicy(this, "strict", { + allowUsersToChangePassword: true, + minimumPasswordLength: 8, + requireLowercaseCharacters: true, + requireNumbers: true, + requireSymbols: true, + requireUppercaseCharacters: true, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `allowUsersToChangePassword` - (Optional) Whether to allow users to change their own password +* `hardExpiry` - (Optional) Whether users are prevented from setting a new password after their password has expired (i.e., require administrator reset) +* `maxPasswordAge` - (Optional) The number of days that an user password is valid. +* `minimumPasswordLength` - (Optional) Minimum length to require for user passwords. +* `passwordReusePrevention` - (Optional) The number of previous passwords that users are prevented from reusing. +* `requireLowercaseCharacters` - (Optional) Whether to require lowercase characters for user passwords. +* `requireNumbers` - (Optional) Whether to require numbers for user passwords. +* `requireSymbols` - (Optional) Whether to require symbols for user passwords. +* `requireUppercaseCharacters` - (Optional) Whether to require uppercase characters for user passwords. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `expirePasswords` - Indicates whether passwords in the account expire. Returns `true` if `maxPasswordAge` contains a value greater than `0`. Returns `false` if it is `0` or _not present_. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Account Password Policy using the word `iamAccountPasswordPolicy`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Account Password Policy using the word `iamAccountPasswordPolicy`. For example: + +```console +% terraform import aws_iam_account_password_policy.strict iam-account-password-policy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_group.html.markdown b/website/docs/cdktf/typescript/r/iam_group.html.markdown new file mode 100644 index 00000000000..a1b1a8dfe49 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_group.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_group" +description: |- + Provides an IAM group. +--- + + + +# Resource: aws_iam_group + +Provides an IAM group. + +~> **NOTE on user management:** Using `awsIamGroupMembership` or `awsIamUserGroupMembership` resources in addition to manually managing user/group membership using the console may lead to configuration drift or conflicts. For this reason, it's recommended to either manage membership entirely with Terraform or entirely within the AWS console. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamGroup } from "./.gen/providers/aws/iam-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamGroup(this, "developers", { + name: "developers", + path: "/users/", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The group's name. The name must consist of upper and lowercase alphanumeric characters with no spaces. You can also include any of the following characters: `=,.@`. Group names are not distinguished by case. For example, you cannot create groups named both "ADMINS" and "admins". +* `path` - (Optional, default "/") Path in which to create the group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The group's ID. +* `arn` - The ARN assigned by AWS for this group. +* `name` - The group's name. +* `path` - The path of the group in IAM. +* `uniqueId` - The [unique ID][1] assigned by AWS. + + [1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html#GUIDs + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Groups using the `name`. For example: + +```console +% terraform import aws_iam_group.developers developers +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_group_membership.html.markdown b/website/docs/cdktf/typescript/r/iam_group_membership.html.markdown new file mode 100644 index 00000000000..c18891032ac --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_group_membership.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_group_membership" +description: |- + Provides a top level resource to manage IAM Group membership for IAM Users. +--- + + + +# Resource: aws_iam_group_membership + +~> **WARNING:** Multiple aws_iam_group_membership resources with the same group name will produce inconsistent behavior! + +Provides a top level resource to manage IAM Group membership for IAM Users. For +more information on managing IAM Groups or IAM Users, see [IAM Groups][1] or +[IAM Users][2] + +~> **Note:** `awsIamGroupMembership` will conflict with itself if used more than once with the same group. To non-exclusively manage the users in a group, see the +[`awsIamUserGroupMembership` resource][3]. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamGroup } from "./.gen/providers/aws/iam-group"; +import { IamGroupMembership } from "./.gen/providers/aws/iam-group-membership"; +import { IamUser } from "./.gen/providers/aws/iam-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const group = new IamGroup(this, "group", { + name: "test-group", + }); + const userOne = new IamUser(this, "user_one", { + name: "test-user", + }); + const userTwo = new IamUser(this, "user_two", { + name: "test-user-two", + }); + new IamGroupMembership(this, "team", { + group: group.name, + name: "tf-testing-group-membership", + users: [userOne.name, userTwo.name], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name to identify the Group Membership +* `users` - (Required) A list of IAM User names to associate with the Group +* `group` – (Required) The IAM Group name to attach the list of `users` to + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `name` - The name to identify the Group Membership +* `users` - list of IAM User names +* `group` – IAM Group name + +[1]: /docs/providers/aws/r/iam_group.html +[2]: /docs/providers/aws/r/iam_user.html +[3]: /docs/providers/aws/r/iam_user_group_membership.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_group_policy.html.markdown b/website/docs/cdktf/typescript/r/iam_group_policy.html.markdown new file mode 100644 index 00000000000..e8f91b46670 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_group_policy.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_group_policy" +description: |- + Provides an IAM policy attached to a group. +--- + + + +# Resource: aws_iam_group_policy + +Provides an IAM policy attached to a group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamGroup } from "./.gen/providers/aws/iam-group"; +import { IamGroupPolicy } from "./.gen/providers/aws/iam-group-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const myDevelopers = new IamGroup(this, "my_developers", { + name: "developers", + path: "/users/", + }); + new IamGroupPolicy(this, "my_developer_policy", { + group: myDevelopers.name, + name: "my_developer_policy", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["ec2:Describe*"], + Effect: "Allow", + Resource: "*", + }, + ], + Version: "2012-10-17", + }) + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) +* `name` - (Optional) The name of the policy. If omitted, Terraform will +assign a random, unique name. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +* `group` - (Required) The IAM group to attach to the policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The group policy ID. +* `group` - The group to which this policy applies. +* `name` - The name of the policy. +* `policy` - The policy document attached to the group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Group Policies using the `groupName:groupPolicyName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Group Policies using the `groupName:groupPolicyName`. For example: + +```console +% terraform import aws_iam_group_policy.mypolicy group_of_mypolicy_name:mypolicy_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_group_policy_attachment.markdown b/website/docs/cdktf/typescript/r/iam_group_policy_attachment.markdown new file mode 100644 index 00000000000..852a608e3cc --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_group_policy_attachment.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_group_policy_attachment" +description: |- + Attaches a Managed IAM Policy to an IAM group +--- + + + +# Resource: aws_iam_group_policy_attachment + +Attaches a Managed IAM Policy to an IAM group + +~> **NOTE:** The usage of this resource conflicts with the `awsIamPolicyAttachment` resource and will permanently show a difference if both are defined. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamGroup } from "./.gen/providers/aws/iam-group"; +import { IamGroupPolicyAttachment } from "./.gen/providers/aws/iam-group-policy-attachment"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const group = new IamGroup(this, "group", { + name: "test-group", + }); + const policy = new IamPolicy(this, "policy", { + description: "A test policy", + name: "test-policy", + policy: "{ ... policy JSON ... }", + }); + new IamGroupPolicyAttachment(this, "test-attach", { + group: group.name, + policyArn: policy.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `group` (Required) - The group the policy should be applied to +* `policyArn` (Required) - The ARN of the policy you want to apply + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM group policy attachments using the group name and policy arn separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM group policy attachments using the group name and policy arn separated by `/`. For example: + +```console +% terraform import aws_iam_group_policy_attachment.test-attach test-group/arn:aws:iam::xxxxxxxxxxxx:policy/test-policy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_instance_profile.html.markdown b/website/docs/cdktf/typescript/r/iam_instance_profile.html.markdown new file mode 100644 index 00000000000..26b1a6ade2e --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_instance_profile.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_instance_profile" +description: |- + Provides an IAM instance profile. +--- + + + +# Resource: aws_iam_instance_profile + +Provides an IAM instance profile. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamInstanceProfile } from "./.gen/providers/aws/iam-instance-profile"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["ec2.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const role = new IamRole(this, "role", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "test_role", + path: "/", + }); + new IamInstanceProfile(this, "test_profile", { + name: "test_profile", + role: role.name, + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `name` - (Optional, Forces new resource) Name of the instance profile. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. Can be a string of characters consisting of upper and lowercase alphanumeric characters and these special characters: `_`, `+`, `=`, `,`, `.`, `@`, `-`. Spaces are not allowed. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `path` - (Optional, default "/") Path to the instance profile. For more information about paths, see [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html) in the IAM User Guide. Can be a string of characters consisting of either a forward slash (`/`) by itself or a string that must begin and end with forward slashes. Can include any ASCII character from the ! (\u0021) through the DEL character (\u007F), including most punctuation characters, digits, and upper and lowercase letters. +* `role` - (Optional) Name of the role to add to the profile. +* `tags` - (Optional) Map of resource tags for the IAM Instance Profile. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN assigned by AWS to the instance profile. +* `createDate` - Creation timestamp of the instance profile. +* `id` - Instance profile's ID. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uniqueId` - [Unique ID][1] assigned by AWS. + + [1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html#GUIDs + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Instance Profiles using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Instance Profiles using the `name`. For example: + +```console +% terraform import aws_iam_instance_profile.test_profile app-instance-profile-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_openid_connect_provider.html.markdown b/website/docs/cdktf/typescript/r/iam_openid_connect_provider.html.markdown new file mode 100644 index 00000000000..4616d6a2dee --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_openid_connect_provider.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_openid_connect_provider" +description: |- + Provides an IAM OpenID Connect provider. +--- + + + +# Resource: aws_iam_openid_connect_provider + +Provides an IAM OpenID Connect provider. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamOpenidConnectProvider } from "./.gen/providers/aws/iam-openid-connect-provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamOpenidConnectProvider(this, "default", { + clientIdList: [ + "266362248691-342342xasdasdasda-apps.googleusercontent.com", + ], + thumbprintList: ["cf23df2207d99a74fbe169e3eba035e633b65d94"], + url: "https://accounts.google.com", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `url` - (Required) The URL of the identity provider. Corresponds to the _iss_ claim. +* `clientIdList` - (Required) A list of client IDs (also known as audiences). When a mobile or web app registers with an OpenID Connect provider, they establish a value that identifies the application. (This is the value that's sent as the client_id parameter on OAuth requests.) +* `thumbprintList` - (Required) A list of server certificate thumbprints for the OpenID Connect (OIDC) identity provider's server certificate(s). +* `tags` - (Optional) Map of resource tags for the IAM OIDC provider. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS for this provider. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM OpenID Connect Providers using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM OpenID Connect Providers using the `arn`. For example: + +```console +% terraform import aws_iam_openid_connect_provider.default arn:aws:iam::123456789012:oidc-provider/accounts.google.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_policy.html.markdown b/website/docs/cdktf/typescript/r/iam_policy.html.markdown new file mode 100644 index 00000000000..7571e535979 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_policy.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_policy" +description: |- + Provides an IAM policy. +--- + + + +# Resource: aws_iam_policy + +Provides an IAM policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamPolicy(this, "policy", { + description: "My test policy", + name: "test_policy", + path: "/", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["ec2:Describe*"], + Effect: "Allow", + Resource: "*", + }, + ], + Version: "2012-10-17", + }) + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional, Forces new resource) Description of the IAM policy. +* `name` - (Optional, Forces new resource) The name of the policy. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `path` - (Optional, default "/") Path in which to create the policy. + See [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html) for more information. +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) +* `tags` - (Optional) Map of resource tags for the IAM Policy. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN assigned by AWS to this policy. +* `arn` - The ARN assigned by AWS to this policy. +* `description` - The description of the policy. +* `name` - The name of the policy. +* `path` - The path of the policy in IAM. +* `policy` - The policy document. +* `policyId` - The policy's ID. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Policies using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Policies using the `arn`. For example: + +```console +% terraform import aws_iam_policy.administrator arn:aws:iam::123456789012:policy/UsersManageOwnCredentials +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_policy_attachment.html.markdown b/website/docs/cdktf/typescript/r/iam_policy_attachment.html.markdown new file mode 100644 index 00000000000..2ad15f1458e --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_policy_attachment.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_policy_attachment" +description: |- + Attaches a Managed IAM Policy to user(s), role(s), and/or group(s) +--- + + + +# Resource: aws_iam_policy_attachment + +Attaches a Managed IAM Policy to user(s), role(s), and/or group(s) + +!> **WARNING:** The aws_iam_policy_attachment resource creates **exclusive** attachments of IAM policies. Across the entire AWS account, all of the users/roles/groups to which a single policy is attached must be declared by a single aws_iam_policy_attachment resource. This means that even any users/roles/groups that have the attached policy via any other mechanism (including other Terraform resources) will have that attached policy revoked by this resource. Consider `awsIamRolePolicyAttachment`, `awsIamUserPolicyAttachment`, or `awsIamGroupPolicyAttachment` instead. These resources do not enforce exclusive attachment of an IAM policy. + +~> **NOTE:** The usage of this resource conflicts with the `awsIamGroupPolicyAttachment`, `awsIamRolePolicyAttachment`, and `awsIamUserPolicyAttachment` resources and will permanently show a difference if both are defined. + +~> **NOTE:** For a given role, this resource is incompatible with using the [`awsIamRole` resource](/docs/providers/aws/r/iam_role.html) `managedPolicyArns` argument. When using that argument and this resource, both will attempt to manage the role's managed policy attachments and Terraform will show a permanent difference. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamGroup } from "./.gen/providers/aws/iam-group"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { IamPolicyAttachment } from "./.gen/providers/aws/iam-policy-attachment"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamUser } from "./.gen/providers/aws/iam-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const group = new IamGroup(this, "group", { + name: "test-group", + }); + const user = new IamUser(this, "user", { + name: "test-user", + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["ec2.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const policy = new DataAwsIamPolicyDocument(this, "policy", { + statement: [ + { + actions: ["ec2:Describe*"], + effect: "Allow", + resources: ["*"], + }, + ], + }); + const awsIamPolicyPolicy = new IamPolicy(this, "policy_4", { + description: "A test policy", + name: "test-policy", + policy: Token.asString(policy.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamPolicyPolicy.overrideLogicalId("policy"); + const role = new IamRole(this, "role", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "test-role", + }); + new IamPolicyAttachment(this, "test-attach", { + groups: [group.name], + name: "test-attachment", + policyArn: Token.asString(awsIamPolicyPolicy.arn), + roles: [role.name], + users: [user.name], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` (Required) - The name of the attachment. This cannot be an empty string. +* `users` (Optional) - The user(s) the policy should be applied to +* `roles` (Optional) - The role(s) the policy should be applied to +* `groups` (Optional) - The group(s) the policy should be applied to +* `policyArn` (Required) - The ARN of the policy you want to apply + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The policy's ID. +* `name` - The name of the attachment. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_role.html.markdown b/website/docs/cdktf/typescript/r/iam_role.html.markdown new file mode 100644 index 00000000000..aa733c324fd --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_role.html.markdown @@ -0,0 +1,330 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_role" +description: |- + Provides an IAM role. +--- + + + +# Resource: aws_iam_role + +Provides an IAM role. + +~> **NOTE:** If policies are attached to the role via the [`awsIamPolicyAttachment` resource](/docs/providers/aws/r/iam_policy_attachment.html) and you are modifying the role `name` or `path`, the `forceDetachPolicies` argument must be set to `true` and applied before attempting the operation otherwise you will encounter a `deleteConflict` error. The [`awsIamRolePolicyAttachment` resource (recommended)](/docs/providers/aws/r/iam_role_policy_attachment.html) does not have this requirement. + +~> **NOTE:** If you use this resource's `managedPolicyArns` argument or `inlinePolicy` configuration blocks, this resource will take over exclusive management of the role's respective policy types (e.g., both policy types if both arguments are used). These arguments are incompatible with other ways of managing a role's policies, such as [`awsIamPolicyAttachment`](/docs/providers/aws/r/iam_policy_attachment.html), [`awsIamRolePolicyAttachment`](/docs/providers/aws/r/iam_role_policy_attachment.html), and [`awsIamRolePolicy`](/docs/providers/aws/r/iam_role_policy.html). If you attempt to manage a role's policies by multiple means, you will get resource cycling and/or errors. + +## Example Usage + +### Basic Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamRole(this, "test_role", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "sts:AssumeRole", + Effect: "Allow", + Principal: { + Service: "ec2.amazonaws.com", + }, + Sid: "", + }, + ], + Version: "2012-10-17", + }) + ), + name: "test_role", + tags: { + "tag-key": "tag-value", + }, + }); + } +} + +``` + +### Example of Using Data Source for Assume Role Policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const instanceAssumeRolePolicy = new DataAwsIamPolicyDocument( + this, + "instance_assume_role_policy", + { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["ec2.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + new IamRole(this, "instance", { + assumeRolePolicy: Token.asString(instanceAssumeRolePolicy.json), + name: "instance_role", + path: "/system/", + }); + } +} + +``` + +### Example of Exclusive Inline Policies + +This example creates an IAM role with two inline IAM policies. If someone adds another inline policy out-of-band, on the next apply, Terraform will remove that policy. If someone deletes these policies out-of-band, Terraform will recreate them. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const inlinePolicy = new DataAwsIamPolicyDocument(this, "inline_policy", { + statement: [ + { + actions: ["ec2:DescribeAccountAttributes"], + resources: ["*"], + }, + ], + }); + new IamRole(this, "example", { + assumeRolePolicy: Token.asString(instanceAssumeRolePolicy.json), + inlinePolicy: [ + { + name: "my_inline_policy", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["ec2:Describe*"], + Effect: "Allow", + Resource: "*", + }, + ], + Version: "2012-10-17", + }) + ), + }, + { + name: "policy-8675309", + policy: Token.asString(inlinePolicy.json), + }, + ], + name: "yak_role", + }); + } +} + +``` + +### Example of Removing Inline Policies + +This example creates an IAM role with what appears to be empty IAM `inlinePolicy` argument instead of using `inlinePolicy` as a configuration block. The result is that if someone were to add an inline policy out-of-band, on the next apply, Terraform will remove that policy. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamRole(this, "example", { + assumeRolePolicy: Token.asString(instanceAssumeRolePolicy.json), + inlinePolicy: [{}], + name: "yak_role", + }); + } +} + +``` + +### Example of Exclusive Managed Policies + +This example creates an IAM role and attaches two managed IAM policies. If someone attaches another managed policy out-of-band, on the next apply, Terraform will detach that policy. If someone detaches these policies out-of-band, Terraform will attach them again. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const policyOne = new IamPolicy(this, "policy_one", { + name: "policy-618033", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["ec2:Describe*"], + Effect: "Allow", + Resource: "*", + }, + ], + Version: "2012-10-17", + }) + ), + }); + const policyTwo = new IamPolicy(this, "policy_two", { + name: "policy-381966", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["s3:ListAllMyBuckets", "s3:ListBucket", "s3:HeadBucket"], + Effect: "Allow", + Resource: "*", + }, + ], + Version: "2012-10-17", + }) + ), + }); + new IamRole(this, "example", { + assumeRolePolicy: Token.asString(instanceAssumeRolePolicy.json), + managedPolicyArns: [policyOne.arn, policyTwo.arn], + name: "yak_role", + }); + } +} + +``` + +### Example of Removing Managed Policies + +This example creates an IAM role with an empty `managedPolicyArns` argument. If someone attaches a policy out-of-band, on the next apply, Terraform will detach that policy. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamRole } from "./.gen/providers/aws/iam-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamRole(this, "example", { + assumeRolePolicy: Token.asString(instanceAssumeRolePolicy.json), + managedPolicyArns: [], + name: "yak_role", + }); + } +} + +``` + +## Argument Reference + +The following argument is required: + +* `assumeRolePolicy` - (Required) Policy that grants an entity permission to assume the role. + +~> **NOTE:** The `assumeRolePolicy` is very similar to but slightly different than a standard IAM policy and cannot use an `awsIamPolicy` resource. However, it _can_ use an `awsIamPolicyDocument` [data source](/docs/providers/aws/d/iam_policy_document.html). See the example above of how this works. + +The following arguments are optional: + +* `description` - (Optional) Description of the role. +* `forceDetachPolicies` - (Optional) Whether to force detaching any policies the role has before destroying it. Defaults to `false`. +* `inlinePolicy` - (Optional) Configuration block defining an exclusive set of IAM inline policies associated with the IAM role. See below. If no blocks are configured, Terraform will not manage any inline policies in this resource. Configuring one empty block (i.e., `inline_policy {}`) will cause Terraform to remove _all_ inline policies added out of band on `apply`. +* `managedPolicyArns` - (Optional) Set of exclusive IAM managed policy ARNs to attach to the IAM role. If this attribute is not configured, Terraform will ignore policy attachments to this resource. When configured, Terraform will align the role's managed policy attachments with this set by attaching or detaching managed policies. Configuring an empty set (i.e., `managed_policy_arns = []`) will cause Terraform to remove _all_ managed policy attachments. +* `maxSessionDuration` - (Optional) Maximum session duration (in seconds) that you want to set for the specified role. If you do not specify a value for this setting, the default maximum of one hour is applied. This setting can have a value from 1 hour to 12 hours. +* `name` - (Optional, Forces new resource) Friendly name of the role. If omitted, Terraform will assign a random, unique name. See [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html) for more information. +* `namePrefix` - (Optional, Forces new resource) Creates a unique friendly name beginning with the specified prefix. Conflicts with `name`. +* `path` - (Optional) Path to the role. See [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html) for more information. +* `permissionsBoundary` - (Optional) ARN of the policy that is used to set the permissions boundary for the role. +* `tags` - Key-value mapping of tags for the IAM role. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### inline_policy + +This configuration block supports the following: + +~> **NOTE:** Since one empty block (i.e., `inline_policy {}`) is valid syntactically to remove out of band policies on `apply`, `name` and `policy` are technically _optional_. However, they are both _required_ in order to manage actual inline policies. Not including one or the other may not result in Terraform errors but will result in unpredictable and incorrect behavior. + +* `name` - (Required) Name of the role policy. +* `policy` - (Required) Policy document as a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/tutorials/terraform/aws-iam-policy). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) specifying the role. +* `createDate` - Creation date of the IAM role. +* `id` - Name of the role. +* `name` - Name of the role. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uniqueId` - Stable and unique string identifying the role. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Roles using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Roles using the `name`. For example: + +```console +% terraform import aws_iam_role.developer developer_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_role_policy.html.markdown b/website/docs/cdktf/typescript/r/iam_role_policy.html.markdown new file mode 100644 index 00000000000..4ae522e0457 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_role_policy.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_role_policy" +description: |- + Provides an IAM role policy. +--- + + + +# Resource: aws_iam_role_policy + +Provides an IAM role inline policy. + +~> **NOTE:** For a given role, this resource is incompatible with using the [`awsIamRole` resource](/docs/providers/aws/r/iam_role.html) `inlinePolicy` argument. When using that argument and this resource, both will attempt to manage the role's inline policies and Terraform will show a permanent difference. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testRole = new IamRole(this, "test_role", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "sts:AssumeRole", + Effect: "Allow", + Principal: { + Service: "ec2.amazonaws.com", + }, + Sid: "", + }, + ], + Version: "2012-10-17", + }) + ), + name: "test_role", + }); + new IamRolePolicy(this, "test_policy", { + name: "test_policy", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["ec2:Describe*"], + Effect: "Allow", + Resource: "*", + }, + ], + Version: "2012-10-17", + }) + ), + role: testRole.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the role policy. If omitted, Terraform will +assign a random, unique name. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +* `policy` - (Required) The inline policy document. This is a JSON formatted string. For more information about building IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy) +* `role` - (Required) The name of the IAM role to attach to the policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The role policy ID, in the form of `roleName:rolePolicyName`. +* `name` - The name of the policy. +* `policy` - The policy document attached to the role. +* `role` - The name of the role associated with the policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Role Policies using the `roleName:rolePolicyName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Role Policies using the `roleName:rolePolicyName`. For example: + +```console +% terraform import aws_iam_role_policy.mypolicy role_of_mypolicy_name:mypolicy_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_role_policy_attachment.markdown b/website/docs/cdktf/typescript/r/iam_role_policy_attachment.markdown new file mode 100644 index 00000000000..137fa10309c --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_role_policy_attachment.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_role_policy_attachment" +description: |- + Attaches a Managed IAM Policy to an IAM role +--- + + + +# Resource: aws_iam_role_policy_attachment + +Attaches a Managed IAM Policy to an IAM role + +~> **NOTE:** The usage of this resource conflicts with the `awsIamPolicyAttachment` resource and will permanently show a difference if both are defined. + +~> **NOTE:** For a given role, this resource is incompatible with using the [`awsIamRole` resource](/docs/providers/aws/r/iam_role.html) `managedPolicyArns` argument. When using that argument and this resource, both will attempt to manage the role's managed policy attachments and Terraform will show a permanent difference. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["ec2.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const policy = new DataAwsIamPolicyDocument(this, "policy", { + statement: [ + { + actions: ["ec2:Describe*"], + effect: "Allow", + resources: ["*"], + }, + ], + }); + const awsIamPolicyPolicy = new IamPolicy(this, "policy_2", { + description: "A test policy", + name: "test-policy", + policy: Token.asString(policy.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamPolicyPolicy.overrideLogicalId("policy"); + const role = new IamRole(this, "role", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "test-role", + }); + new IamRolePolicyAttachment(this, "test-attach", { + policyArn: Token.asString(awsIamPolicyPolicy.arn), + role: role.name, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `role` (Required) - The name of the IAM role to which the policy should be applied +* `policyArn` (Required) - The ARN of the policy you want to apply + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM role policy attachments using the role name and policy arn separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM role policy attachments using the role name and policy arn separated by `/`. For example: + +```console +% terraform import aws_iam_role_policy_attachment.test-attach test-role/arn:aws:iam::xxxxxxxxxxxx:policy/test-policy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_saml_provider.html.markdown b/website/docs/cdktf/typescript/r/iam_saml_provider.html.markdown new file mode 100644 index 00000000000..a8be6cfcfa7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_saml_provider.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_saml_provider" +description: |- + Provides an IAM SAML provider. +--- + + + +# Resource: aws_iam_saml_provider + +Provides an IAM SAML provider. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamSamlProvider } from "./.gen/providers/aws/iam-saml-provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamSamlProvider(this, "default", { + name: "myprovider", + samlMetadataDocument: Token.asString(Fn.file("saml-metadata.xml")), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the provider to create. +* `samlMetadataDocument` - (Required) An XML document generated by an identity provider that supports SAML 2.0. +* `tags` - (Optional) Map of resource tags for the IAM SAML provider. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS for this provider. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `validUntil` - The expiration date and time for the SAML provider in RFC1123 format, e.g., `Mon, 02 Jan 2006 15:04:05 MST`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM SAML Providers using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM SAML Providers using the `arn`. For example: + +```console +% terraform import aws_iam_saml_provider.default arn:aws:iam::123456789012:saml-provider/SAMLADFS +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_security_token_service_preferences.html.markdown b/website/docs/cdktf/typescript/r/iam_security_token_service_preferences.html.markdown new file mode 100644 index 00000000000..82262299445 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_security_token_service_preferences.html.markdown @@ -0,0 +1,49 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_security_token_service_preferences" +description: |- + Provides an IAM Security Token Service Preferences resource. +--- + + + +# Resource: aws_iam_security_token_service_preferences + +Provides an IAM Security Token Service Preferences resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamSecurityTokenServicePreferences } from "./.gen/providers/aws/"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamSecurityTokenServicePreferences(this, "example", { + global_endpoint_token_version: "v2Token", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `globalEndpointTokenVersion` - (Required) The version of the STS global endpoint token. Valid values: `v1Token`, `v2Token`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS Account ID. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_server_certificate.html.markdown b/website/docs/cdktf/typescript/r/iam_server_certificate.html.markdown new file mode 100644 index 00000000000..dbcf92df443 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_server_certificate.html.markdown @@ -0,0 +1,187 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_server_certificate" +description: |- + Provides an IAM Server Certificate +--- + + + +# Resource: aws_iam_server_certificate + +Provides an IAM Server Certificate resource to upload Server Certificates. +Certs uploaded to IAM can easily work with other AWS services such as: + +- AWS Elastic Beanstalk +- Elastic Load Balancing +- CloudFront +- AWS OpsWorks + +For information about server certificates in IAM, see [Managing Server +Certificates][2] in AWS Documentation. + +~> **Note:** All arguments including the private key will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +**Using certs on file:** + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamServerCertificate } from "./.gen/providers/aws/iam-server-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamServerCertificate(this, "test_cert", { + certificateBody: Token.asString(Fn.file("self-ca-cert.pem")), + name: "some_test_cert", + privateKey: Token.asString(Fn.file("test-key.pem")), + }); + } +} + +``` + +**Example with cert in-line:** + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamServerCertificate } from "./.gen/providers/aws/iam-server-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamServerCertificate(this, "test_cert_alt", { + certificateBody: + "-----BEGIN CERTIFICATE-----\n[......] # cert contents\n-----END CERTIFICATE-----\n\n", + name: "alt_test_cert", + privateKey: + "-----BEGIN RSA PRIVATE KEY-----\n[......] # cert contents\n-----END RSA PRIVATE KEY-----\n\n", + }); + } +} + +``` + +**Use in combination with an AWS ELB resource:** + +Some properties of an IAM Server Certificates cannot be updated while they are +in use. In order for Terraform to effectively manage a Certificate in this situation, it is +recommended you utilize the `namePrefix` attribute and enable the +`createBeforeDestroy` [lifecycle block][lifecycle]. This will allow Terraform +to create a new, updated `awsIamServerCertificate` resource and replace it in +dependant resources before attempting to destroy the old version. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Elb } from "./.gen/providers/aws/elb"; +import { IamServerCertificate } from "./.gen/providers/aws/iam-server-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testCert = new IamServerCertificate(this, "test_cert", { + certificateBody: Token.asString(Fn.file("self-ca-cert.pem")), + lifecycle: { + createBeforeDestroy: true, + }, + namePrefix: "example-cert", + privateKey: Token.asString(Fn.file("test-key.pem")), + }); + new Elb(this, "ourapp", { + availabilityZones: ["us-west-2a"], + crossZoneLoadBalancing: true, + listener: [ + { + instancePort: 8000, + instanceProtocol: "http", + lbPort: 443, + lbProtocol: "https", + sslCertificateId: testCert.arn, + }, + ], + name: "terraform-asg-deployment-example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the Server Certificate. Do not include the + path in this value. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified + prefix. Conflicts with `name`. +* `certificateBody` – (Required) The contents of the public key certificate in + PEM-encoded format. +* `certificateChain` – (Optional) The contents of the certificate chain. + This is typically a concatenation of the PEM-encoded public key certificates + of the chain. +* `privateKey` – (Required) The contents of the private key in PEM-encoded format. +* `path` - (Optional) The IAM path for the server certificate. If it is not + included, it defaults to a slash (/). If this certificate is for use with + AWS CloudFront, the path must be in format `/cloudfront/yourPathHere`. + See [IAM Identifiers][1] for more details on IAM Paths. +* `tags` - (Optional) Map of resource tags for the server certificate. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +~> **NOTE:** AWS performs behind-the-scenes modifications to some certificate files if they do not adhere to a specific format. These modifications will result in terraform forever believing that it needs to update the resources since the local and AWS file contents will not match after theses modifications occur. In order to prevent this from happening you must ensure that all your PEM-encoded files use UNIX line-breaks and that `certificateBody` contains only one certificate. All other certificates should go in `certificateChain`. It is common for some Certificate Authorities to issue certificate files that have DOS line-breaks and that are actually multiple certificates concatenated together in order to form a full certificate chain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the server certificate. +* `expiration` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) on which the certificate is set to expire. +* `id` - The unique Server Certificate name +* `name` - The name of the Server Certificate +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uploadDate` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) when the server certificate was uploaded. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Server Certificates using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Server Certificates using the `name`. For example: + +```console +% terraform import aws_iam_server_certificate.certificate example.com-certificate-until-2018 +``` + +[1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html +[2]: https://docs.aws.amazon.com/IAM/latest/UserGuide/ManagingServerCerts.html +[lifecycle]: /docs/configuration/resources.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_service_linked_role.html.markdown b/website/docs/cdktf/typescript/r/iam_service_linked_role.html.markdown new file mode 100644 index 00000000000..8216244c168 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_service_linked_role.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_service_linked_role" +description: |- + Provides an IAM service-linked role. +--- + + + +# Resource: aws_iam_service_linked_role + +Provides an [IAM service-linked role](https://docs.aws.amazon.com/IAM/latest/UserGuide/using-service-linked-roles.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamServiceLinkedRole } from "./.gen/providers/aws/iam-service-linked-role"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamServiceLinkedRole(this, "elasticbeanstalk", { + awsServiceName: "elasticbeanstalk.amazonaws.com", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `awsServiceName` - (Required, Forces new resource) The AWS service to which this role is attached. You use a string similar to a URL but without the `http://` in front. For example: `elasticbeanstalkAmazonawsCom`. To find the full list of services that support service-linked roles, check [the docs](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_aws-services-that-work-with-iam.html). +* `customSuffix` - (Optional, forces new resource) Additional string appended to the role name. Not all AWS services support custom suffixes. +* `description` - (Optional) The description of the role. +* `tags` - Key-value mapping of tags for the IAM role. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the role. +* `arn` - The Amazon Resource Name (ARN) specifying the role. +* `createDate` - The creation date of the IAM role. +* `name` - The name of the role. +* `path` - The path of the role. +* `uniqueId` - The stable and unique string identifying the role. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM service-linked roles using role ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM service-linked roles using role ARN. For example: + +```console +% terraform import aws_iam_service_linked_role.elasticbeanstalk arn:aws:iam::123456789012:role/aws-service-role/elasticbeanstalk.amazonaws.com/AWSServiceRoleForElasticBeanstalk +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_service_specific_credential.html.markdown b/website/docs/cdktf/typescript/r/iam_service_specific_credential.html.markdown new file mode 100644 index 00000000000..34c0181920e --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_service_specific_credential.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_service_specific_credential" +description: |- + Provides an IAM Service Specific Credential. +--- + + + +# Resource: aws_iam_service_specific_credential + +Provides an IAM Service Specific Credential. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamServiceSpecificCredential } from "./.gen/providers/aws/iam-service-specific-credential"; +import { IamUser } from "./.gen/providers/aws/iam-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new IamUser(this, "example", { + name: "example", + }); + const awsIamServiceSpecificCredentialExample = + new IamServiceSpecificCredential(this, "example_1", { + serviceName: "codecommit.amazonaws.com", + userName: example.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamServiceSpecificCredentialExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `serviceName` - (Required) The name of the AWS service that is to be associated with the credentials. The service you specify here is the only service that can be accessed using these credentials. +* `userName` - (Required) The name of the IAM user that is to be associated with the credentials. The new service-specific credentials have the same permissions as the associated user except that they can be used only to access the specified service. +* `status` - (Optional) The status to be assigned to the service-specific credential. Valid values are `active` and `inactive`. Default value is `active`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The combination of `serviceName` and `userName` as such: `serviceName:userName:serviceSpecificCredentialId`. +* `servicePassword` - The generated password for the service-specific credential. +* `serviceUserName` - The generated user name for the service-specific credential. This value is generated by combining the IAM user's name combined with the ID number of the AWS account, as in `janeAt123456789012`, for example. +* `serviceSpecificCredentialId` - The unique identifier for the service-specific credential. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Service Specific Credentials using the `serviceName:userName:serviceSpecificCredentialId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Service Specific Credentials using the `serviceName:userName:serviceSpecificCredentialId`. For example: + +```console +% terraform import aws_iam_service_specific_credential.default `codecommit.amazonaws.com:example:some-id` +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_signing_certificate.html.markdown b/website/docs/cdktf/typescript/r/iam_signing_certificate.html.markdown new file mode 100644 index 00000000000..226272b89ca --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_signing_certificate.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_signing_certificate" +description: |- + Provides an IAM Signing Certificate +--- + + + +# Resource: aws_iam_signing_certificate + +Provides an IAM Signing Certificate resource to upload Signing Certificates. + +~> **Note:** All arguments including the certificate body will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +**Using certs on file:** + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamSigningCertificate } from "./.gen/providers/aws/iam-signing-certificate"; +interface MyConfig { + userName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new IamSigningCertificate(this, "test_cert", { + certificateBody: Token.asString(Fn.file("self-ca-cert.pem")), + username: "some_test_cert", + userName: config.userName, + }); + } +} + +``` + +**Example with cert in-line:** + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamSigningCertificate } from "./.gen/providers/aws/iam-signing-certificate"; +interface MyConfig { + userName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new IamSigningCertificate(this, "test_cert_alt", { + certificateBody: + "-----BEGIN CERTIFICATE-----\n[......] # cert contents\n-----END CERTIFICATE-----\n\n", + username: "some_test_cert", + userName: config.userName, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificateBody` – (Required) The contents of the signing certificate in PEM-encoded format. +* `status` – (Optional) The status you want to assign to the certificate. `active` means that the certificate can be used for programmatic calls to Amazon Web Services `inactive` means that the certificate cannot be used. +* `userName` – (Required) The name of the user the signing certificate is for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `certificateId` - The ID for the signing certificate. +* `id` - The `certificateId:userName` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Signing Certificates using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Signing Certificates using the `id`. For example: + +```console +% terraform import aws_iam_signing_certificate.certificate IDIDIDIDID:user-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_user.html.markdown b/website/docs/cdktf/typescript/r/iam_user.html.markdown new file mode 100644 index 00000000000..b202e876ae5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_user.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user" +description: |- + Provides an IAM user. +--- + + + +# Resource: aws_iam_user + +Provides an IAM user. + +~> *NOTE:* If policies are attached to the user via the [`awsIamPolicyAttachment` resource](/docs/providers/aws/r/iam_policy_attachment.html) and you are modifying the user `name` or `path`, the `forceDestroy` argument must be set to `true` and applied before attempting the operation otherwise you will encounter a `deleteConflict` error. The [`awsIamUserPolicyAttachment` resource (recommended)](/docs/providers/aws/r/iam_user_policy_attachment.html) does not have this requirement. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamAccessKey } from "./.gen/providers/aws/iam-access-key"; +import { IamUser } from "./.gen/providers/aws/iam-user"; +import { IamUserPolicy } from "./.gen/providers/aws/iam-user-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const lb = new IamUser(this, "lb", { + name: "loadbalancer", + path: "/system/", + tags: { + "tag-key": "tag-value", + }, + }); + const lbRo = new DataAwsIamPolicyDocument(this, "lb_ro", { + statement: [ + { + actions: ["ec2:Describe*"], + effect: "Allow", + resources: ["*"], + }, + ], + }); + const awsIamAccessKeyLb = new IamAccessKey(this, "lb_2", { + user: lb.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamAccessKeyLb.overrideLogicalId("lb"); + const awsIamUserPolicyLbRo = new IamUserPolicy(this, "lb_ro_3", { + name: "test", + policy: Token.asString(lbRo.json), + user: lb.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamUserPolicyLbRo.overrideLogicalId("lb_ro"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The user's name. The name must consist of upper and lowercase alphanumeric characters with no spaces. You can also include any of the following characters: `=,.@`. User names are not distinguished by case. For example, you cannot create users named both "TESTUSER" and "testuser". +* `path` - (Optional, default "/") Path in which to create the user. +* `permissionsBoundary` - (Optional) The ARN of the policy that is used to set the permissions boundary for the user. +* `forceDestroy` - (Optional, default false) When destroying this user, destroy even if it + has non-Terraform-managed IAM access keys, login profile or MFA devices. Without `forceDestroy` + a user with non-Terraform-managed access keys and login profile will fail to be destroyed. +* `tags` - Key-value map of tags for the IAM user. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS for this user. +* `name` - The user's name. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uniqueId` - The [unique ID][1] assigned by AWS. + + [1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html#GUIDs + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Users using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Users using the `name`. For example: + +```console +% terraform import aws_iam_user.lb loadbalancer +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_user_group_membership.html.markdown b/website/docs/cdktf/typescript/r/iam_user_group_membership.html.markdown new file mode 100644 index 00000000000..2d87e549ca4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_user_group_membership.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_group_membership" +description: |- + Provides a resource for adding an IAM User to IAM Groups without conflicting + with itself. +--- + + + +# Resource: aws_iam_user_group_membership + +Provides a resource for adding an [IAM User][2] to [IAM Groups][1]. This +resource can be used multiple times with the same user for non-overlapping +groups. + +To exclusively manage the users in a group, see the +[`awsIamGroupMembership` resource][3]. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamGroup } from "./.gen/providers/aws/iam-group"; +import { IamUser } from "./.gen/providers/aws/iam-user"; +import { IamUserGroupMembership } from "./.gen/providers/aws/iam-user-group-membership"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const group1 = new IamGroup(this, "group1", { + name: "group1", + }); + const group2 = new IamGroup(this, "group2", { + name: "group2", + }); + const group3 = new IamGroup(this, "group3", { + name: "group3", + }); + const user1 = new IamUser(this, "user1", { + name: "user1", + }); + new IamUserGroupMembership(this, "example1", { + groups: [group1.name, group2.name], + user: user1.name, + }); + new IamUserGroupMembership(this, "example2", { + groups: [group3.name], + user: user1.name, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `user` - (Required) The name of the [IAM User][2] to add to groups +* `groups` - (Required) A list of [IAM Groups][1] to add the user to + +## Attribute Reference + +This resource exports no additional attributes. + +[1]: /docs/providers/aws/r/iam_group.html +[2]: /docs/providers/aws/r/iam_user.html +[3]: /docs/providers/aws/r/iam_group_membership.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM user group membership using the user name and group names separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM user group membership using the user name and group names separated by `/`. For example: + +```console +% terraform import aws_iam_user_group_membership.example1 user1/group1/group2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_user_login_profile.html.markdown b/website/docs/cdktf/typescript/r/iam_user_login_profile.html.markdown new file mode 100644 index 00000000000..b6cf2abc904 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_user_login_profile.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_login_profile" +description: |- + Manages an IAM User Login Profile +--- + + + +# Resource: aws_iam_user_login_profile + +Manages an IAM User Login Profile with limited support for password creation during Terraform resource creation. Uses PGP to encrypt the password for safe transport to the user. PGP keys can be obtained from Keybase. + +-> To reset an IAM User login password via Terraform, you can use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html) or change any of the arguments. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamUser } from "./.gen/providers/aws/iam-user"; +import { IamUserLoginProfile } from "./.gen/providers/aws/iam-user-login-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new IamUser(this, "example", { + forceDestroy: true, + name: "example", + path: "/", + }); + const awsIamUserLoginProfileExample = new IamUserLoginProfile( + this, + "example_1", + { + pgpKey: "keybase:some_person_that_exists", + user: example.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamUserLoginProfileExample.overrideLogicalId("example"); + new TerraformOutput(this, "password", { + value: awsIamUserLoginProfileExample.encryptedPassword, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `user` - (Required) The IAM user's name. +* `pgpKey` - (Optional) Either a base-64 encoded PGP public key, or a keybase username in the form `keybase:username`. Only applies on resource creation. Drift detection is not possible with this argument. +* `passwordLength` - (Optional) The length of the generated password on resource creation. Only applies on resource creation. Drift detection is not possible with this argument. Default value is `20`. +* `passwordResetRequired` - (Optional) Whether the user should be forced to reset the generated password on resource creation. Only applies on resource creation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `password` - The plain text password, only available when `pgpKey` is not provided. +* `keyFingerprint` - The fingerprint of the PGP key used to encrypt the password. Only available if password was handled on Terraform resource creation, not import. +* `encryptedPassword` - The encrypted password, base64 encoded. Only available if password was handled on Terraform resource creation, not import. + +~> **NOTE:** The encrypted password may be decrypted using the command line, + for example: `terraform output password | base64 --decode | keybase pgp decrypt`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM User Login Profiles without password information via the IAM User name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM User Login Profiles without password information via the IAM User name. For example: + +```console +% terraform import aws_iam_user_login_profile.example myusername +``` + +Since Terraform has no method to read the PGP or password information during import, use the [Terraform resource `lifecycle` configuration block `ignoreChanges` argument](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to ignore them (unless you want to recreate a password). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamUserLoginProfile } from "./.gen/providers/aws/iam-user-login-profile"; +interface MyConfig { + user: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new IamUserLoginProfile(this, "example", { + lifecycle: { + ignoreChanges: [passwordLength, passwordResetRequired, pgpKey], + }, + user: config.user, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_user_policy.html.markdown b/website/docs/cdktf/typescript/r/iam_user_policy.html.markdown new file mode 100644 index 00000000000..9f187db4a81 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_user_policy.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_policy" +description: |- + Provides an IAM policy attached to a user. +--- + + + +# Resource: aws_iam_user_policy + +Provides an IAM policy attached to a user. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamAccessKey } from "./.gen/providers/aws/iam-access-key"; +import { IamUser } from "./.gen/providers/aws/iam-user"; +import { IamUserPolicy } from "./.gen/providers/aws/iam-user-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const lb = new IamUser(this, "lb", { + name: "loadbalancer", + path: "/system/", + }); + new IamUserPolicy(this, "lb_ro", { + name: "test", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["ec2:Describe*"], + Effect: "Allow", + Resource: "*", + }, + ], + Version: "2012-10-17", + }) + ), + user: lb.name, + }); + const awsIamAccessKeyLb = new IamAccessKey(this, "lb_2", { + user: lb.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamAccessKeyLb.overrideLogicalId("lb"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) The policy document. This is a JSON formatted string. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `name` - (Optional) The name of the policy. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `user` - (Required) IAM user to which to attach this policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The user policy ID, in the form of `userName:userPolicyName`. +* `name` - The name of the policy (always set). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM User Policies using the `userName:userPolicyName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM User Policies using the `userName:userPolicyName`. For example: + +```console +% terraform import aws_iam_user_policy.mypolicy user_of_mypolicy_name:mypolicy_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_user_policy_attachment.markdown b/website/docs/cdktf/typescript/r/iam_user_policy_attachment.markdown new file mode 100644 index 00000000000..6c0e370a837 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_user_policy_attachment.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_policy_attachment" +description: |- + Attaches a Managed IAM Policy to an IAM user +--- + + + +# Resource: aws_iam_user_policy_attachment + +Attaches a Managed IAM Policy to an IAM user + +~> **NOTE:** The usage of this resource conflicts with the `awsIamPolicyAttachment` resource and will permanently show a difference if both are defined. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { IamUser } from "./.gen/providers/aws/iam-user"; +import { IamUserPolicyAttachment } from "./.gen/providers/aws/iam-user-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const policy = new IamPolicy(this, "policy", { + description: "A test policy", + name: "test-policy", + policy: "{ ... policy JSON ... }", + }); + const user = new IamUser(this, "user", { + name: "test-user", + }); + new IamUserPolicyAttachment(this, "test-attach", { + policyArn: policy.arn, + user: user.name, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `user` (Required) - The user the policy should be applied to +* `policyArn` (Required) - The ARN of the policy you want to apply + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM user policy attachments using the user name and policy arn separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM user policy attachments using the user name and policy arn separated by `/`. For example: + +```console +% terraform import aws_iam_user_policy_attachment.test-attach test-user/arn:aws:iam::xxxxxxxxxxxx:policy/test-policy +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_user_ssh_key.html.markdown b/website/docs/cdktf/typescript/r/iam_user_ssh_key.html.markdown new file mode 100644 index 00000000000..726c38178e9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_user_ssh_key.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_user_ssh_key" +description: |- + Uploads an SSH public key and associates it with the specified IAM user. +--- + + + +# Resource: aws_iam_user_ssh_key + +Uploads an SSH public key and associates it with the specified IAM user. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamUser } from "./.gen/providers/aws/iam-user"; +import { IamUserSshKey } from "./.gen/providers/aws/iam-user-ssh-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const user = new IamUser(this, "user", { + name: "test-user", + path: "/", + }); + const awsIamUserSshKeyUser = new IamUserSshKey(this, "user_1", { + encoding: "SSH", + publicKey: + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD3F6tyPEFEzV0LX3X8BsXdMsQz1x2cEikKDEY0aIj41qgxMCP/iteneqXSIFZBp5vizPvaoIR3Um9xK7PGoW8giupGn+EPuxIA4cDM4vzOqOkiMPhz5XK0whEjkVzTo4+S0puvDZuwIsdiW9mxhJc7tgBNL0cYlWSYVkz4G/fslNfRPW5mYAM49f4fhtxPb5ok4Q2Lg9dPKVHO/Bgeu5woMc7RY0p1ej6D4CKFE6lymSDJpW0YHX/wqE9+cfEauh7xZcG0q9t2ta6F6fmX0agvpFyZo8aFbXeUBr7osSCJNgvavWbM/06niWrOvYX2xwWdhXmXSrbX8ZbabVohBK41 mytest@mydomain.com", + username: user.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamUserSshKeyUser.overrideLogicalId("user"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `username` - (Required) The name of the IAM user to associate the SSH public key with. +* `encoding` - (Required) Specifies the public key encoding format to use in the response. To retrieve the public key in ssh-rsa format, use `ssh`. To retrieve the public key in PEM format, use `pem`. +* `publicKey` - (Required) The SSH public key. The public key must be encoded in ssh-rsa format or PEM format. +* `status` - (Optional) The status to assign to the SSH public key. Active means the key can be used for authentication with an AWS CodeCommit repository. Inactive means the key cannot be used. Default is `active`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `sshPublicKeyId` - The unique identifier for the SSH public key. +* `fingerprint` - The MD5 message digest of the SSH public key. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSH public keys using the `username`, `sshPublicKeyId`, and `encoding`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSH public keys using the `username`, `sshPublicKeyId`, and `encoding`. For example: + +```console +% terraform import aws_iam_user_ssh_key.user user:APKAJNCNNJICVN7CFKCA:SSH +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iam_virtual_mfa_device.html.markdown b/website/docs/cdktf/typescript/r/iam_virtual_mfa_device.html.markdown new file mode 100644 index 00000000000..0d264a7f333 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iam_virtual_mfa_device.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "IAM (Identity & Access Management)" +layout: "aws" +page_title: "AWS: aws_iam_virtual_mfa_device" +description: |- + Provides an IAM Virtual MFA Device +--- + + + +# Resource: aws_iam_virtual_mfa_device + +Provides an IAM Virtual MFA Device. + +~> **Note:** All attributes will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **Note:** A virtual MFA device cannot be directly associated with an IAM User from Terraform. + To associate the virtual MFA device with a user and enable it, use the code returned in either `base32StringSeed` or `qrCodePng` to generate TOTP authentication codes. + The authentication codes can then be used with the AWS CLI command [`aws iam enable-mfa-device`](https://docs.aws.amazon.com/cli/latest/reference/iam/enable-mfa-device.html) or the AWS API call [`enableMfaDevice`](https://docs.aws.amazon.com/IAM/latest/APIReference/API_EnableMFADevice.html). + +## Example Usage + +**Using certs on file:** + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamVirtualMfaDevice } from "./.gen/providers/aws/iam-virtual-mfa-device"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IamVirtualMfaDevice(this, "example", { + virtualMfaDeviceName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `virtualMfaDeviceName` - (Required) The name of the virtual MFA device. Use with path to uniquely identify a virtual MFA device. +* `path` – (Optional) The path for the virtual MFA device. +* `tags` - (Optional) Map of resource tags for the virtual mfa device. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the virtual mfa device. +* `base32StringSeed` - The base32 seed defined as specified in [RFC3548](https://tools.ietf.org/html/rfc3548.txt). The `base32StringSeed` is base64-encoded. +* `enableDate` - The date and time when the virtual MFA device was enabled. +* `qrCodePng` - A QR code PNG image that encodes `otpauth://totp/$virtualMfaDeviceName@$accountName?secret=$base32String` where `$virtualMfaDeviceName` is one of the create call arguments. AccountName is the user name if set (otherwise, the account ID), and Base32String is the seed in base32 format. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `userName` - The associated IAM User name if the virtual MFA device is enabled. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IAM Virtual MFA Devices using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IAM Virtual MFA Devices using the `arn`. For example: + +```console +% terraform import aws_iam_virtual_mfa_device.example arn:aws:iam::123456789012:mfa/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/identitystore_group.html.markdown b/website/docs/cdktf/typescript/r/identitystore_group.html.markdown new file mode 100644 index 00000000000..6a7c0233d77 --- /dev/null +++ b/website/docs/cdktf/typescript/r/identitystore_group.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "SSO Identity Store" +layout: "aws" +page_title: "AWS: aws_identitystore_group" +description: |- + Terraform resource for managing an AWS IdentityStore Group. +--- + + + +# Resource: aws_identitystore_group + +Terraform resource for managing an AWS IdentityStore Group. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IdentitystoreGroup } from "./.gen/providers/aws/identitystore-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IdentitystoreGroup(this, "this", { + description: "Example description", + displayName: "Example group", + identityStoreId: Token.asString( + propertyAccess(Fn.tolist(example.identityStoreIds), ["0"]) + ), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `identityStoreId` - (Required) The globally unique identifier for the identity store. + +The following arguments are optional: + +* `displayName` - (Optional) A string containing the name of the group. This value is commonly displayed when the group is referenced. +* `description` - (Optional) A string containing the description of the group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `groupId` - The identifier of the newly created group in the identity store. +* `externalIds` - A list of external IDs that contains the identifiers issued to this resource by an external identity provider. See [External IDs](#external-ids) below. + +### External IDs + +* `id` - The identifier issued to this resource by an external identity provider. +* `issuer` - The issuer for an external identifier. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) +* `update` - (Default `180M`) +* `delete` - (Default `90M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an Identity Store Group using the combination `identityStoreId/groupId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an Identity Store Group using the combination `identityStoreId/groupId`. For example: + +```console +% terraform import aws_identitystore_group.example d-9c6705e95c/b8a1c340-8031-7071-a2fb-7dc540320c30 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/identitystore_group_membership.html.markdown b/website/docs/cdktf/typescript/r/identitystore_group_membership.html.markdown new file mode 100644 index 00000000000..edfe01533cc --- /dev/null +++ b/website/docs/cdktf/typescript/r/identitystore_group_membership.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "SSO Identity Store" +layout: "aws" +page_title: "AWS: aws_identitystore_group_membership" +description: |- + Terraform resource for managing an AWS IdentityStore Group Membership. +--- + + + +# Resource: aws_identitystore_group_membership + +Terraform resource for managing an AWS IdentityStore Group Membership. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsoadminInstances } from "./.gen/providers/aws/data-aws-ssoadmin-instances"; +import { IdentitystoreGroup } from "./.gen/providers/aws/identitystore-group"; +import { IdentitystoreGroupMembership } from "./.gen/providers/aws/identitystore-group-membership"; +import { IdentitystoreUser } from "./.gen/providers/aws/identitystore-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsSsoadminInstances(this, "example", {}); + const awsIdentitystoreGroupExample = new IdentitystoreGroup( + this, + "example_1", + { + description: "Some group name", + displayName: "MyGroup", + identityStoreId: Token.asString( + propertyAccess(Fn.tolist(example.identityStoreIds), ["0"]) + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIdentitystoreGroupExample.overrideLogicalId("example"); + const awsIdentitystoreUserExample = new IdentitystoreUser( + this, + "example_2", + { + displayName: "John Doe", + identityStoreId: Token.asString( + propertyAccess(Fn.tolist(example.identityStoreIds), ["0"]) + ), + name: { + familyName: "Doe", + givenName: "John", + }, + userName: "john.doe@example.com", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIdentitystoreUserExample.overrideLogicalId("example"); + const awsIdentitystoreGroupMembershipExample = + new IdentitystoreGroupMembership(this, "example_3", { + groupId: Token.asString(awsIdentitystoreGroupExample.groupId), + identityStoreId: Token.asString( + propertyAccess(Fn.tolist(example.identityStoreIds), ["0"]) + ), + memberId: Token.asString(awsIdentitystoreUserExample.userId), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIdentitystoreGroupMembershipExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `memberId` - (Required) The identifier for a user in the Identity Store. +* `groupId` - (Required) The identifier for a group in the Identity Store. +* `identityStoreId` - (Required) Identity Store ID associated with the Single Sign-On Instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `membershipId` - The identifier of the newly created group membership in the Identity Store. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsIdentitystoreGroupMembership` using the `identityStoreId/membershipId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsIdentitystoreGroupMembership` using the `identityStoreId/membershipId`. For example: + +```console +% terraform import aws_identitystore_group_membership.example d-0000000000/00000000-0000-0000-0000-000000000000 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/identitystore_user.html.markdown b/website/docs/cdktf/typescript/r/identitystore_user.html.markdown new file mode 100644 index 00000000000..af8d030027a --- /dev/null +++ b/website/docs/cdktf/typescript/r/identitystore_user.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "SSO Identity Store" +layout: "aws" +page_title: "AWS: aws_identitystore_user" +description: |- + Terraform resource for managing an AWS Identity Store User. +--- + + + +# Resource: aws_identitystore_user + +This resource manages a User resource within an Identity Store. + +-> **Note:** If you use an external identity provider or Active Directory as your identity source, +use this resource with caution. IAM Identity Center does not support outbound synchronization, +so your identity source does not automatically update with the changes that you make to +users using this resource. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IdentitystoreUser } from "./.gen/providers/aws/identitystore-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IdentitystoreUser(this, "example", { + displayName: "John Doe", + emails: { + value: "john@example.com", + }, + identityStoreId: Token.asString( + propertyAccess( + Fn.tolist(dataAwsSsoadminInstancesExample.identityStoreIds), + ["0"] + ) + ), + name: { + familyName: "Doe", + givenName: "John", + }, + userName: "johndoe", + }); + } +} + +``` + +## Argument Reference + +-> Unless specified otherwise, all fields can contain up to 1024 characters of free-form text. + +The following arguments are required: + +* `displayName` - (Required) The name that is typically displayed when the user is referenced. +* `identityStoreId` - (Required, Forces new resource) The globally unique identifier for the identity store that this user is in. +* `name` - (Required) Details about the user's full name. Detailed below. +* `userName` - (Required, Forces new resource) A unique string used to identify the user. This value can consist of letters, accented characters, symbols, numbers, and punctuation. This value is specified at the time the user is created and stored as an attribute of the user object in the identity store. The limit is 128 characters. + +The following arguments are optional: + +* `addresses` - (Optional) Details about the user's address. At most 1 address is allowed. Detailed below. +* `emails` - (Optional) Details about the user's email. At most 1 email is allowed. Detailed below. +* `locale` - (Optional) The user's geographical region or location. +* `nickname` - (Optional) An alternate name for the user. +* `phoneNumbers` - (Optional) Details about the user's phone number. At most 1 phone number is allowed. Detailed below. +* `preferredLanguage` - (Optional) The preferred language of the user. +* `profileUrl` - (Optional) An URL that may be associated with the user. +* `timezone` - (Optional) The user's time zone. +* `title` - (Optional) The user's title. +* `userType` - (Optional) The user type. + +### addresses Configuration Block + +* `country` - (Optional) The country that this address is in. +* `formatted` - (Optional) The name that is typically displayed when the address is shown for display. +* `locality` - (Optional) The address locality. +* `postalCode` - (Optional) The postal code of the address. +* `primary` - (Optional) When `true`, this is the primary address associated with the user. +* `region` - (Optional) The region of the address. +* `streetAddress` - (Optional) The street of the address. +* `type` - (Optional) The type of address. + +### emails Configuration Block + +* `primary` - (Optional) When `true`, this is the primary email associated with the user. +* `type` - (Optional) The type of email. +* `value` - (Optional) The email address. This value must be unique across the identity store. + +### name Configuration Block + +The following arguments are required: + +* `familyName` - (Required) The family name of the user. +* `givenName` - (Required) The given name of the user. + +The following arguments are optional: + +* `formatted` - (Optional) The name that is typically displayed when the name is shown for display. +* `honorificPrefix` - (Optional) The honorific prefix of the user. +* `honorificSuffix` - (Optional) The honorific suffix of the user. +* `middleName` - (Optional) The middle name of the user. + +### phone_numbers Configuration Block + +* `primary` - (Optional) When `true`, this is the primary phone number associated with the user. +* `type` - (Optional) The type of phone number. +* `value` - (Optional) The user's phone number. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `externalIds` - A list of identifiers issued to this resource by an external identity provider. + * `id` - The identifier issued to this resource by an external identity provider. + * `issuer` - The issuer for an external identifier. +* `userId` - The identifier for this user in the identity store. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an Identity Store User using the combination `identityStoreId/userId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an Identity Store User using the combination `identityStoreId/userId`. For example: + +```console +% terraform import aws_identitystore_user.example d-9c6705e95c/065212b4-9061-703b-5876-13a517ae2a7c +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/imagebuilder_component.html.markdown b/website/docs/cdktf/typescript/r/imagebuilder_component.html.markdown new file mode 100644 index 00000000000..3d18c28a203 --- /dev/null +++ b/website/docs/cdktf/typescript/r/imagebuilder_component.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_component" +description: |- + Manage an Image Builder Component +--- + + + +# Resource: aws_imagebuilder_component + +Manages an Image Builder Component. + +## Example Usage + +### Inline Data Document + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ImagebuilderComponent } from "./.gen/providers/aws/imagebuilder-component"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ImagebuilderComponent(this, "example", { + data: Token.asString( + Fn.yamlencode({ + phases: [ + { + name: "build", + steps: [ + { + action: "ExecuteBash", + inputs: { + commands: ["echo 'hello world'"], + }, + name: "example", + onFailure: "Continue", + }, + ], + }, + ], + schemaVersion: 1, + }) + ), + name: "example", + platform: "Linux", + version: "1.0.0", + }); + } +} + +``` + +### URI Document + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ImagebuilderComponent } from "./.gen/providers/aws/imagebuilder-component"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ImagebuilderComponent(this, "example", { + name: "example", + platform: "Linux", + uri: + "s3://${" + + awsS3ObjectExample.bucket + + "}/${" + + awsS3ObjectExample.key + + "}", + version: "1.0.0", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the component. +* `platform` - (Required) Platform of the component. +* `version` - (Required) Version of the component. + +The following attributes are optional: + +* `changeDescription` - (Optional) Change description of the component. +* `data` - (Optional) Inline YAML string with data of the component. Exactly one of `data` and `uri` can be specified. Terraform will only perform drift detection of its value when present in a configuration. +* `description` - (Optional) Description of the component. +* `kmsKeyId` - (Optional) Amazon Resource Name (ARN) of the Key Management Service (KMS) Key used to encrypt the component. +* `skipDestroy` - (Optional) Whether to retain the old version when the resource is destroyed or replacement is necessary. Defaults to `false`. +* `supportedOsVersions` - (Optional) Set of Operating Systems (OS) supported by the component. +* `tags` - (Optional) Key-value map of resource tags for the component. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `uri` - (Optional) S3 URI with data of the component. Exactly one of `data` and `uri` can be specified. + +~> **NOTE:** Updating `data` or `uri` requires specifying a new `version`. This causes replacement of the resource. The `skipDestroy` argument can be used to retain the old version. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - (Required) Amazon Resource Name (ARN) of the component. +* `dateCreated` - Date the component was created. +* `encrypted` - Encryption status of the component. +* `owner` - Owner of the component. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `type` - Type of the component. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsImagebuilderComponents` resources using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsImagebuilderComponents` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_component.example arn:aws:imagebuilder:us-east-1:123456789012:component/example/1.0.0/1 +``` + +Certain resource arguments, such as `uri`, cannot be read via the API and imported into Terraform. Terraform will display a difference for these arguments the first run after import if declared in the Terraform configuration for an imported resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/imagebuilder_container_recipe.html.markdown b/website/docs/cdktf/typescript/r/imagebuilder_container_recipe.html.markdown new file mode 100644 index 00000000000..296b790608f --- /dev/null +++ b/website/docs/cdktf/typescript/r/imagebuilder_container_recipe.html.markdown @@ -0,0 +1,167 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_container_recipe" +description: |- + Manage an Image Builder Container Recipe +--- + + + +# Resource: aws_imagebuilder_container_recipe + +Manages an Image Builder Container Recipe. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ImagebuilderContainerRecipe } from "./.gen/providers/aws/imagebuilder-container-recipe"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ImagebuilderContainerRecipe(this, "example", { + component: [ + { + componentArn: Token.asString(awsImagebuilderComponentExample.arn), + parameter: [ + { + name: "Parameter1", + value: "Value1", + }, + { + name: "Parameter2", + value: "Value2", + }, + ], + }, + ], + containerType: "DOCKER", + dockerfileTemplateData: + "FROM {{{ imagebuilder:parentImage }}}\n{{{ imagebuilder:environments }}}\n{{{ imagebuilder:components }}}\n\n", + name: "example", + parentImage: + "arn:aws:imagebuilder:eu-central-1:aws:image/amazon-linux-x86-latest/x.x.x", + targetRepository: { + repositoryName: Token.asString(awsEcrRepositoryExample.name), + service: "ECR", + }, + version: "1.0.0", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `component` - (Required) Ordered configuration block(s) with components for the container recipe. Detailed below. +* `containerType` - (Required) The type of the container to create. Valid values: `docker`. +* `name` - (Required) The name of the container recipe. +* `parentImage` (Required) The base image for the container recipe. +* `targetRepository` (Required) The destination repository for the container image. Detailed below. +* `version` (Required) Version of the container recipe. + +The following attributes are optional: + +* `description` - (Optional) The description of the container recipe. +* `dockerfileTemplateData` - (Optional) The Dockerfile template used to build the image as an inline data blob. +* `dockerfileTemplateUri` - (Optional) The Amazon S3 URI for the Dockerfile that will be used to build the container image. +* `instanceConfiguration` - (Optional) Configuration block used to configure an instance for building and testing container images. Detailed below. +* `kmsKeyId` - (Optional) The KMS key used to encrypt the container image. +* `platformOverride` - (Optional) Specifies the operating system platform when you use a custom base image. +* `tags` - (Optional) Key-value map of resource tags for the container recipe. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `workingDirectory` - (Optional) The working directory to be used during build and test workflows. + +### component + +The `component` block supports the following arguments: + +* `componentArn` - (Required) Amazon Resource Name (ARN) of the Image Builder Component to associate. +* `parameter` - (Optional) Configuration block(s) for parameters to configure the component. Detailed below. + +### parameter + +The following arguments are required: + +* `name` - (Required) The name of the component parameter. +* `value` - (Required) The value for the named component parameter. + +### target_repository + +The following arguments are required: + +* `repositoryName` - (Required) The name of the container repository where the output container image is stored. This name is prefixed by the repository location. +* `service` - (Required) The service in which this image is registered. Valid values: `ecr`. + +### instance_configuration + +The following arguments are optional: + +* `blockDeviceMapping` - (Optional) Configuration block(s) with block device mappings for the container recipe. Detailed below. +* `image` - (Optional) The AMI ID to use as the base image for a container build and test instance. If not specified, Image Builder will use the appropriate ECS-optimized AMI as a base image. + +### block_device_mapping + +The following arguments are optional: + +* `deviceName` - (Optional) Name of the device. For example, `/dev/sda` or `/dev/xvdb`. +* `ebs` - (Optional) Configuration block with Elastic Block Storage (EBS) block device mapping settings. Detailed below. +* `noDevice` - (Optional) Set to `true` to remove a mapping from the parent image. +* `virtualName` - (Optional) Virtual device name. For example, `ephemeral0`. Instance store volumes are numbered starting from 0. + +#### ebs + +The following arguments are optional: + +* `deleteOnTermination` - (Optional) Whether to delete the volume on termination. Defaults to unset, which is the value inherited from the parent image. +* `encrypted` - (Optional) Whether to encrypt the volume. Defaults to unset, which is the value inherited from the parent image. +* `iops` - (Optional) Number of Input/Output (I/O) operations per second to provision for an `io1` or `io2` volume. +* `kmsKeyId` - (Optional) Amazon Resource Name (ARN) of the Key Management Service (KMS) Key for encryption. +* `snapshotId` - (Optional) Identifier of the EC2 Volume Snapshot. +* `throughput` - (Optional) For GP3 volumes only. The throughput in MiB/s that the volume supports. +* `volumeSize` - (Optional) Size of the volume, in GiB. +* `volumeType` - (Optional) Type of the volume. For example, `gp2` or `io2`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - (Required) Amazon Resource Name (ARN) of the container recipe. +* `dateCreated` - Date the container recipe was created. +* `encrypted` - A flag that indicates if the target container is encrypted. +* `owner` - Owner of the container recipe. +* `platform` - Platform of the container recipe. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsImagebuilderContainerRecipe` resources using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsImagebuilderContainerRecipe` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_container_recipe.example arn:aws:imagebuilder:us-east-1:123456789012:container-recipe/example/1.0.0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/imagebuilder_distribution_configuration.html.markdown b/website/docs/cdktf/typescript/r/imagebuilder_distribution_configuration.html.markdown new file mode 100644 index 00000000000..cbb56934e4a --- /dev/null +++ b/website/docs/cdktf/typescript/r/imagebuilder_distribution_configuration.html.markdown @@ -0,0 +1,168 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_distribution_configuration" +description: |- + Manage an Image Builder Distribution Configuration +--- + + + +# Resource: aws_imagebuilder_distribution_configuration + +Manages an Image Builder Distribution Configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ImagebuilderDistributionConfiguration } from "./.gen/providers/aws/imagebuilder-distribution-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ImagebuilderDistributionConfiguration(this, "example", { + distribution: [ + { + amiDistributionConfiguration: { + amiTags: { + CostCenter: "IT", + }, + launchPermission: { + userIds: ["123456789012"], + }, + name: "example-{{ imagebuilder:buildDate }}", + }, + launchTemplateConfiguration: [ + { + launchTemplateId: "lt-0aaa1bcde2ff3456", + }, + ], + region: "us-east-1", + }, + ], + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the distribution configuration. +* `distribution` - (Required) One or more configuration blocks with distribution settings. Detailed below. + +The following arguments are optional: + +* `description` - (Optional) Description of the distribution configuration. +* `tags` - (Optional) Key-value map of resource tags for the distribution configuration. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### distribution + +The following arguments are required: + +* `region` - (Required) AWS Region for the distribution. + +The following arguments are optional: + +* `amiDistributionConfiguration` - (Optional) Configuration block with Amazon Machine Image (AMI) distribution settings. Detailed below. +* `containerDistributionConfiguration` - (Optional) Configuration block with container distribution settings. Detailed below. +* `fastLaunchConfiguration` - (Optional) Set of Windows faster-launching configurations to use for AMI distribution. Detailed below. +* `launchTemplateConfiguration` - (Optional) Set of launch template configuration settings that apply to image distribution. Detailed below. +* `licenseConfigurationArns` - (Optional) Set of Amazon Resource Names (ARNs) of License Manager License Configurations. + +### ami_distribution_configuration + +The following arguments are optional: + +* `amiTags` - (Optional) Key-value map of tags to apply to the distributed AMI. +* `description` - (Optional) Description to apply to the distributed AMI. +* `kmsKeyId` - (Optional) Amazon Resource Name (ARN) of the Key Management Service (KMS) Key to encrypt the distributed AMI. +* `launchPermission` - (Optional) Configuration block of EC2 launch permissions to apply to the distributed AMI. Detailed below. +* `name` - (Optional) Name to apply to the distributed AMI. +* `targetAccountIds` - (Optional) Set of AWS Account identifiers to distribute the AMI. + +### launch_permission + +The following arguments are optional: + +* `organizationArns` - (Optional) Set of AWS Organization ARNs to assign. +* `organizationalUnitArns` - (Optional) Set of AWS Organizational Unit ARNs to assign. +* `userGroups` - (Optional) Set of EC2 launch permission user groups to assign. Use `all` to distribute a public AMI. +* `userIds` - (Optional) Set of AWS Account identifiers to assign. + +### container_distribution_configuration + +* `containerTags` - (Optional) Set of tags that are attached to the container distribution configuration. +* `description` - (Optional) Description of the container distribution configuration. +* `targetRepository` (Required) Configuration block with the destination repository for the container distribution configuration. + +### target_repository + +* `repositoryName` - (Required) The name of the container repository where the output container image is stored. This name is prefixed by the repository location. +* `service` - (Required) The service in which this image is registered. Valid values: `ecr`. + +### fast_launch_configuration + +* `accountId` - (Required) The owner account ID for the fast-launch enabled Windows AMI. +* `enabled` - (Required) A Boolean that represents the current state of faster launching for the Windows AMI. Set to `true` to start using Windows faster launching, or `false` to stop using it. +* `launchTemplate` - (Optional) Configuration block for the launch template that the fast-launch enabled Windows AMI uses when it launches Windows instances to create pre-provisioned snapshots. Detailed below. +* `maxParallelLaunches` - (Optional) The maximum number of parallel instances that are launched for creating resources. +* `snapshotConfiguration` - (Optional) Configuration block for managing the number of snapshots that are created from pre-provisioned instances for the Windows AMI when faster launching is enabled. Detailed below. + +### launch_template + +* `launchTemplateId` - (Optional) The ID of the launch template to use for faster launching for a Windows AMI. +* `launchTemplateName` - (Optional) The name of the launch template to use for faster launching for a Windows AMI. +* `launchTemplateVersion` - (Optional) The version of the launch template to use for faster launching for a Windows AMI. + +### snapshot_configuration + +* `targetResourceCount` - (Optional) The number of pre-provisioned snapshots to keep on hand for a fast-launch enabled Windows AMI. + +### launch_template_configuration + +* `default` - (Optional) Indicates whether to set the specified Amazon EC2 launch template as the default launch template. Defaults to `true`. +* `accountId` - The account ID that this configuration applies to. +* `launchTemplateId` - (Required) The ID of the Amazon EC2 launch template to use. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - (Required) Amazon Resource Name (ARN) of the distribution configuration. +* `dateCreated` - Date the distribution configuration was created. +* `dateUpdated` - Date the distribution configuration was updated. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsImagebuilderDistributionConfigurations` resources using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsImagebuilderDistributionConfigurations` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_distribution_configuration.example arn:aws:imagebuilder:us-east-1:123456789012:distribution-configuration/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/imagebuilder_image.html.markdown b/website/docs/cdktf/typescript/r/imagebuilder_image.html.markdown new file mode 100644 index 00000000000..b9dce491446 --- /dev/null +++ b/website/docs/cdktf/typescript/r/imagebuilder_image.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image" +description: |- + Manages an Image Builder Image +--- + + + +# Resource: aws_imagebuilder_image + +Manages an Image Builder Image. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ImagebuilderImage } from "./.gen/providers/aws/imagebuilder-image"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ImagebuilderImage(this, "example", { + distributionConfigurationArn: Token.asString( + awsImagebuilderDistributionConfigurationExample.arn + ), + imageRecipeArn: Token.asString(awsImagebuilderImageRecipeExample.arn), + infrastructureConfigurationArn: Token.asString( + awsImagebuilderInfrastructureConfigurationExample.arn + ), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `infrastructureConfigurationArn` - (Required) Amazon Resource Name (ARN) of the Image Builder Infrastructure Configuration. + +The following arguments are optional: + +* `containerRecipeArn` - (Optional) - Amazon Resource Name (ARN) of the container recipe. +* `distributionConfigurationArn` - (Optional) Amazon Resource Name (ARN) of the Image Builder Distribution Configuration. +* `enhancedImageMetadataEnabled` - (Optional) Whether additional information about the image being created is collected. Defaults to `true`. +* `imageRecipeArn` - (Optional) Amazon Resource Name (ARN) of the image recipe. +* `imageTestsConfiguration` - (Optional) Configuration block with image tests configuration. Detailed below. +* `tags` - (Optional) Key-value map of resource tags for the Image Builder Image. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### image_tests_configuration + +The following arguments are optional: + +* `imageTestsEnabled` - (Optional) Whether image tests are enabled. Defaults to `true`. +* `timeoutMinutes` - (Optional) Number of minutes before image tests time out. Valid values are between `60` and `1440`. Defaults to `720`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the image. +* `dateCreated` - Date the image was created. +* `platform` - Platform of the image. +* `osVersion` - Operating System version of the image. +* `outputResources` - List of objects with resources created by the image. + * `amis` - Set of objects with each Amazon Machine Image (AMI) created. + * `accountId` - Account identifier of the AMI. + * `description` - Description of the AMI. + * `image` - Identifier of the AMI. + * `name` - Name of the AMI. + * `region` - Region of the AMI. + * `containers` - Set of objects with each container image created and stored in the output repository. + * `imageUris` - Set of URIs for created containers. + * `region` - Region of the container image. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version` - Version of the image. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsImagebuilderImage` resources using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsImagebuilderImage` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_image.example arn:aws:imagebuilder:us-east-1:123456789012:image/example/1.0.0/1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/imagebuilder_image_pipeline.html.markdown b/website/docs/cdktf/typescript/r/imagebuilder_image_pipeline.html.markdown new file mode 100644 index 00000000000..7d6753319af --- /dev/null +++ b/website/docs/cdktf/typescript/r/imagebuilder_image_pipeline.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_pipeline" +description: |- + Manages an Image Builder Image Pipeline +--- + + + +# Resource: aws_imagebuilder_image_pipeline + +Manages an Image Builder Image Pipeline. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ImagebuilderImagePipeline } from "./.gen/providers/aws/imagebuilder-image-pipeline"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ImagebuilderImagePipeline(this, "example", { + imageRecipeArn: Token.asString(awsImagebuilderImageRecipeExample.arn), + infrastructureConfigurationArn: Token.asString( + awsImagebuilderInfrastructureConfigurationExample.arn + ), + name: "example", + schedule: { + scheduleExpression: "cron(0 0 * * ? *)", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `infrastructureConfigurationArn` - (Required) Amazon Resource Name (ARN) of the Image Builder Infrastructure Configuration. +* `name` - (Required) Name of the image pipeline. + +The following arguments are optional: + +* `containerRecipeArn` - (Optional) Amazon Resource Name (ARN) of the container recipe. +* `description` - (Optional) Description of the image pipeline. +* `distributionConfigurationArn` - (Optional) Amazon Resource Name (ARN) of the Image Builder Distribution Configuration. +* `enhancedImageMetadataEnabled` - (Optional) Whether additional information about the image being created is collected. Defaults to `true`. +* `imageRecipeArn` - (Optional) Amazon Resource Name (ARN) of the image recipe. +* `imageTestsConfiguration` - (Optional) Configuration block with image tests configuration. Detailed below. +* `schedule` - (Optional) Configuration block with schedule settings. Detailed below. +* `status` - (Optional) Status of the image pipeline. Valid values are `disabled` and `enabled`. Defaults to `enabled`. +* `tags` - (Optional) Key-value map of resource tags for the image pipeline. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### image_tests_configuration + +The following arguments are optional: + +* `imageTestsEnabled` - (Optional) Whether image tests are enabled. Defaults to `true`. +* `timeoutMinutes` - (Optional) Number of minutes before image tests time out. Valid values are between `60` and `1440`. Defaults to `720`. + +### schedule + +The following arguments are required: + +* `scheduleExpression` - (Required) Cron expression of how often the pipeline start condition is evaluated. For example, `cron(0 0 * * ? *)` is evaluated every day at midnight UTC. Configurations using the five field syntax that was previously accepted by the API, such as `cron(0 0 * * *)`, must be updated to the six field syntax. For more information, see the [Image Builder User Guide](https://docs.aws.amazon.com/imagebuilder/latest/userguide/cron-expressions.html). + +The following arguments are optional: + +* `pipelineExecutionStartCondition` - (Optional) Condition when the pipeline should trigger a new image build. Valid values are `expressionMatchAndDependencyUpdatesAvailable` and `expressionMatchOnly`. Defaults to `expressionMatchAndDependencyUpdatesAvailable`. + +* `timezone` - (Optional) The timezone that applies to the scheduling expression. For example, "Etc/UTC", "America/Los_Angeles" in the [IANA timezone format](https://www.joda.org/joda-time/timezones.html). If not specified this defaults to UTC. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the image pipeline. +* `dateCreated` - Date the image pipeline was created. +* `dateLastRun` - Date the image pipeline was last run. +* `dateNextRun` - Date the image pipeline will run next. +* `dateUpdated` - Date the image pipeline was updated. +* `platform` - Platform of the image pipeline. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsImagebuilderImagePipeline` resources using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsImagebuilderImagePipeline` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_image_pipeline.example arn:aws:imagebuilder:us-east-1:123456789012:image-pipeline/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/imagebuilder_image_recipe.html.markdown b/website/docs/cdktf/typescript/r/imagebuilder_image_recipe.html.markdown new file mode 100644 index 00000000000..9944dfabf82 --- /dev/null +++ b/website/docs/cdktf/typescript/r/imagebuilder_image_recipe.html.markdown @@ -0,0 +1,161 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_image_recipe" +description: |- + Manage an Image Builder Image Recipe +--- + + + +# Resource: aws_imagebuilder_image_recipe + +Manages an Image Builder Image Recipe. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ImagebuilderImageRecipe } from "./.gen/providers/aws/imagebuilder-image-recipe"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ImagebuilderImageRecipe(this, "example", { + blockDeviceMapping: [ + { + deviceName: "/dev/xvdb", + ebs: { + deleteOnTermination: Token.asString(true), + volumeSize: 100, + volumeType: "gp2", + }, + }, + ], + component: [ + { + componentArn: Token.asString(awsImagebuilderComponentExample.arn), + parameter: [ + { + name: "Parameter1", + value: "Value1", + }, + { + name: "Parameter2", + value: "Value2", + }, + ], + }, + ], + name: "example", + parentImage: + "arn:${" + + current.partition + + "}:imagebuilder:${" + + dataAwsRegionCurrent.name + + "}:aws:image/amazon-linux-2-x86/x.x.x", + version: "1.0.0", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `component` - Ordered configuration block(s) with components for the image recipe. Detailed below. +* `name` - Name of the image recipe. +* `parentImage` - The image recipe uses this image as a base from which to build your customized image. The value can be the base image ARN or an AMI ID. +* `version` - The semantic version of the image recipe, which specifies the version in the following format, with numeric values in each position to indicate a specific version: major.minor.patch. For example: 1.0.0. + +The following attributes are optional: + +* `blockDeviceMapping` - Configuration block(s) with block device mappings for the image recipe. Detailed below. +* `description` - Description of the image recipe. +* `systemsManagerAgent` - Configuration block for the Systems Manager Agent installed by default by Image Builder. Detailed below. +* `tags` - Key-value map of resource tags for the image recipe. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `userDataBase64` Base64 encoded user data. Use this to provide commands or a command script to run when you launch your build instance. +* `workingDirectory` - The working directory to be used during build and test workflows. + +### block_device_mapping + +The following arguments are optional: + +* `deviceName` - Name of the device. For example, `/dev/sda` or `/dev/xvdb`. +* `ebs` - Configuration block with Elastic Block Storage (EBS) block device mapping settings. Detailed below. +* `noDevice` - Set to `true` to remove a mapping from the parent image. +* `virtualName` - Virtual device name. For example, `ephemeral0`. Instance store volumes are numbered starting from 0. + +#### ebs + +The following arguments are optional: + +* `deleteOnTermination` - Whether to delete the volume on termination. Defaults to unset, which is the value inherited from the parent image. +* `encrypted` - Whether to encrypt the volume. Defaults to unset, which is the value inherited from the parent image. +* `iops` - Number of Input/Output (I/O) operations per second to provision for an `io1` or `io2` volume. +* `kmsKeyId` - Amazon Resource Name (ARN) of the Key Management Service (KMS) Key for encryption. +* `snapshotId` - Identifier of the EC2 Volume Snapshot. +* `throughput` - For GP3 volumes only. The throughput in MiB/s that the volume supports. +* `volumeSize` - Size of the volume, in GiB. +* `volumeType` - Type of the volume. For example, `gp2` or `io2`. + +### component + +The `component` block supports the following arguments: + +* `componentArn` - (Required) Amazon Resource Name (ARN) of the Image Builder Component to associate. +* `parameter` - (Optional) Configuration block(s) for parameters to configure the component. Detailed below. + +### parameter + +The following arguments are required: + +* `name` - The name of the component parameter. +* `value` - The value for the named component parameter. + +### systems_manager_agent + +The following arguments are required: + +* `uninstallAfterBuild` - Whether to remove the Systems Manager Agent after the image has been built. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - (Required) Amazon Resource Name (ARN) of the image recipe. +* `dateCreated` - Date the image recipe was created. +* `owner` - Owner of the image recipe. +* `platform` - Platform of the image recipe. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsImagebuilderImageRecipe` resources using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsImagebuilderImageRecipe` resources using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_image_recipe.example arn:aws:imagebuilder:us-east-1:123456789012:image-recipe/example/1.0.0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/imagebuilder_infrastructure_configuration.html.markdown b/website/docs/cdktf/typescript/r/imagebuilder_infrastructure_configuration.html.markdown new file mode 100644 index 00000000000..d52bd944a90 --- /dev/null +++ b/website/docs/cdktf/typescript/r/imagebuilder_infrastructure_configuration.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "EC2 Image Builder" +layout: "aws" +page_title: "AWS: aws_imagebuilder_infrastructure_configuration" +description: |- + Manages an Image Builder Infrastructure Configuration +--- + + + +# Resource: aws_imagebuilder_infrastructure_configuration + +Manages an Image Builder Infrastructure Configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ImagebuilderInfrastructureConfiguration } from "./.gen/providers/aws/imagebuilder-infrastructure-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ImagebuilderInfrastructureConfiguration(this, "example", { + description: "example description", + instanceProfileName: Token.asString(awsIamInstanceProfileExample.name), + instanceTypes: ["t2.nano", "t3.micro"], + keyPair: Token.asString(awsKeyPairExample.keyName), + logging: { + s3Logs: { + s3BucketName: Token.asString(awsS3BucketExample.bucket), + s3KeyPrefix: "logs", + }, + }, + name: "example", + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + snsTopicArn: Token.asString(awsSnsTopicExample.arn), + subnetId: main.id, + tags: { + foo: "bar", + }, + terminateInstanceOnFailure: true, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `instanceProfileName` - (Required) Name of IAM Instance Profile. +* `name` - (Required) Name for the configuration. + +The following arguments are optional: + +* `description` - (Optional) Description for the configuration. +* `instanceMetadataOptions` - (Optional) Configuration block with instance metadata options for the HTTP requests that pipeline builds use to launch EC2 build and test instances. Detailed below. +* `instanceTypes` - (Optional) Set of EC2 Instance Types. +* `keyPair` - (Optional) Name of EC2 Key Pair. +* `logging` - (Optional) Configuration block with logging settings. Detailed below. +* `resourceTags` - (Optional) Key-value map of resource tags to assign to infrastructure created by the configuration. +* `securityGroupIds` - (Optional) Set of EC2 Security Group identifiers. +* `snsTopicArn` - (Optional) Amazon Resource Name (ARN) of SNS Topic. +* `subnetId` - (Optional) EC2 Subnet identifier. Also requires `securityGroupIds` argument. +* `tags` - (Optional) Key-value map of resource tags to assign to the configuration. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `terminateInstanceOnFailure` - (Optional) Enable if the instance should be terminated when the pipeline fails. Defaults to `false`. + +### instance_metadata_options + +The following arguments are optional: + +* `httpPutResponseHopLimit` - The number of hops that an instance can traverse to reach its destonation. +* `httpTokens` - Whether a signed token is required for instance metadata retrieval requests. Valid values: `required`, `optional`. + +### logging + +The following arguments are required: + +* `s3Logs` - (Required) Configuration block with S3 logging settings. Detailed below. + +### s3_logs + +The following arguments are required: + +* `s3BucketName` - (Required) Name of the S3 Bucket. + +The following arguments are optional: + +* `s3KeyPrefix` - (Optional) Prefix to use for S3 logs. Defaults to `/`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the configuration. +* `arn` - Amazon Resource Name (ARN) of the configuration. +* `dateCreated` - Date when the configuration was created. +* `dateUpdated` - Date when the configuration was updated. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsImagebuilderInfrastructureConfiguration` using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsImagebuilderInfrastructureConfiguration` using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_imagebuilder_infrastructure_configuration.example arn:aws:imagebuilder:us-east-1:123456789012:infrastructure-configuration/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/inspector2_delegated_admin_account.html.markdown b/website/docs/cdktf/typescript/r/inspector2_delegated_admin_account.html.markdown new file mode 100644 index 00000000000..bc189d86611 --- /dev/null +++ b/website/docs/cdktf/typescript/r/inspector2_delegated_admin_account.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Inspector" +layout: "aws" +page_title: "AWS: aws_inspector2_delegated_admin_account" +description: |- + Terraform resource for managing an Amazon Inspector Delegated Admin Account. +--- + + + +# Resource: aws_inspector2_delegated_admin_account + +Terraform resource for managing an Amazon Inspector Delegated Admin Account. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { Inspector2DelegatedAdminAccount } from "./.gen/providers/aws/inspector2-delegated-admin-account"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new Inspector2DelegatedAdminAccount(this, "example", { + accountId: Token.asString(current.accountId), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `accountId` - (Required) Account to enable as delegated admin account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `relationshipStatus` - Status of this delegated admin account. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `15M`) +* `delete` - (Default `15M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Inspector Delegated Admin Account using the `accountId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Inspector Delegated Admin Account using the `accountId`. For example: + +```console +% terraform import aws_inspector2_delegated_admin_account.example 012345678901 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/inspector2_enabler.html.markdown b/website/docs/cdktf/typescript/r/inspector2_enabler.html.markdown new file mode 100644 index 00000000000..bdef364638d --- /dev/null +++ b/website/docs/cdktf/typescript/r/inspector2_enabler.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Inspector" +layout: "aws" +page_title: "AWS: aws_inspector2_enabler" +description: |- + Terraform resource for enabling Amazon Inspector resource scans. +--- + + + +# Resource: aws_inspector2_enabler + +Terraform resource for enabling Amazon Inspector resource scans. + +This resource must be created in the Organization's Administrator Account. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Inspector2Enabler } from "./.gen/providers/aws/inspector2-enabler"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Inspector2Enabler(this, "example", { + accountIds: ["123456789012"], + resourceTypes: ["EC2"], + }); + } +} + +``` + +### For the Calling Account + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { Inspector2Enabler } from "./.gen/providers/aws/inspector2-enabler"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new Inspector2Enabler(this, "test", { + accountIds: [Token.asString(current.accountId)], + resourceTypes: ["ECR", "EC2"], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `accountIds` - (Required) Set of account IDs. + Can contain one of: the Organization's Administrator Account, or one or more Member Accounts. +* `resourceTypes` - (Required) Type of resources to scan. + Valid values are `ec2`, `ecr`, and `lambda`. + At least one item is required. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/inspector2_member_association.html.markdown b/website/docs/cdktf/typescript/r/inspector2_member_association.html.markdown new file mode 100644 index 00000000000..3284c4f41f0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/inspector2_member_association.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Inspector" +layout: "aws" +page_title: "AWS: aws_inspector2_member_association" +description: |- + Terraform resource for managing an Amazon Inspector Member Association. +--- + + + +# Resource: aws_inspector2_member_association + +Terraform resource for associating accounts to existing Inspector instances. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Inspector2MemberAssociation } from "./.gen/providers/aws/inspector2-member-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Inspector2MemberAssociation(this, "example", { + accountId: "123456789012", + }); + } +} + +``` + +## Argument Reference + +The following argument is required: + +* `accountId` - (Required) ID of the account to associate + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `delegatedAdminAccountId` - Account ID of the delegated administrator account +* `relationshipStatus` - Status of the member relationship +* `updatedAt` - Date and time of the last update of the relationship + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Inspector Member Association using the `accountId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Inspector Member Association using the `accountId`. For example: + +```console +% terraform import aws_inspector2_member_association.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/inspector2_organization_configuration.html.markdown b/website/docs/cdktf/typescript/r/inspector2_organization_configuration.html.markdown new file mode 100644 index 00000000000..270647cb617 --- /dev/null +++ b/website/docs/cdktf/typescript/r/inspector2_organization_configuration.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Inspector" +layout: "aws" +page_title: "AWS: aws_inspector2_organization_configuration" +description: |- + Terraform resource for managing an Amazon Inspector Organization Configuration. +--- + + + +# Resource: aws_inspector2_organization_configuration + +Terraform resource for managing an Amazon Inspector Organization Configuration. + +~> **NOTE:** In order for this resource to work, the account you use must be an Inspector Delegated Admin Account. + +~> **NOTE:** When this resource is deleted, EC2, ECR and Lambda scans will no longer be automatically enabled for new members of your Amazon Inspector organization. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Inspector2OrganizationConfiguration } from "./.gen/providers/aws/inspector2-organization-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Inspector2OrganizationConfiguration(this, "example", { + autoEnable: { + ec2: true, + ecr: false, + lambda: true, + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `autoEnable` - (Required) Configuration block for auto enabling. See below. + +### `autoEnable` + +* `ec2` - (Required) Whether Amazon EC2 scans are automatically enabled for new members of your Amazon Inspector organization. +* `ecr` - (Required) Whether Amazon ECR scans are automatically enabled for new members of your Amazon Inspector organization. +* `lambda` - (Optional) Whether Lambda Function scans are automatically enabled for new members of your Amazon Inspector organization. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `maxAccountLimitReached` - Whether your configuration reached the max account limit. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/inspector_assessment_target.html.markdown b/website/docs/cdktf/typescript/r/inspector_assessment_target.html.markdown new file mode 100644 index 00000000000..01f59296e54 --- /dev/null +++ b/website/docs/cdktf/typescript/r/inspector_assessment_target.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Inspector Classic" +layout: "aws" +page_title: "AWS: aws_inspector_assessment_target" +description: |- + Provides an Inspector Classic Assessment Target. +--- + + + +# Resource: aws_inspector_assessment_target + +Provides an Inspector Classic Assessment Target + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { InspectorAssessmentTarget } from "./.gen/providers/aws/inspector-assessment-target"; +import { InspectorResourceGroup } from "./.gen/providers/aws/inspector-resource-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bar = new InspectorResourceGroup(this, "bar", { + tags: { + Env: "bar", + Name: "foo", + }, + }); + new InspectorAssessmentTarget(this, "foo", { + name: "assessment target", + resourceGroupArn: bar.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the assessment target. +* `resourceGroupArn` (Optional) Inspector Resource Group Amazon Resource Name (ARN) stating tags for instance matching. If not specified, all EC2 instances in the current AWS account and region are included in the assessment target. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The target assessment ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Inspector Classic Assessment Targets using their Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Inspector Classic Assessment Targets using their Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_inspector_assessment_target.example arn:aws:inspector:us-east-1:123456789012:target/0-xxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/inspector_assessment_template.html.markdown b/website/docs/cdktf/typescript/r/inspector_assessment_template.html.markdown new file mode 100644 index 00000000000..b3c2550be62 --- /dev/null +++ b/website/docs/cdktf/typescript/r/inspector_assessment_template.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Inspector Classic" +layout: "aws" +page_title: "AWS: aws_inspector_assessment_template" +description: |- + Provides an Inspector Classic Assessment Template. +--- + + + +# Resource: aws_inspector_assessment_template + +Provides an Inspector Classic Assessment Template + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { InspectorAssessmentTemplate } from "./.gen/providers/aws/inspector-assessment-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new InspectorAssessmentTemplate(this, "example", { + duration: 3600, + eventSubscription: [ + { + event: "ASSESSMENT_RUN_COMPLETED", + topicArn: Token.asString(awsSnsTopicExample.arn), + }, + ], + name: "example", + rulesPackageArns: [ + "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-9hgA516p", + "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-H5hpSawc", + "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-JJOtZiqQ", + "arn:aws:inspector:us-west-2:758058086616:rulespackage/0-vg5GGHSD", + ], + targetArn: Token.asString(awsInspectorAssessmentTargetExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the assessment template. +* `targetArn` - (Required) The assessment target ARN to attach the template to. +* `duration` - (Required) The duration of the inspector run. +* `rulesPackageArns` - (Required) The rules to be used during the run. +* `eventSubscription` - (Optional) A block that enables sending notifications about a specified assessment template event to a designated SNS topic. See [Event Subscriptions](#event-subscriptions) for details. +* `tags` - (Optional) Key-value map of tags for the Inspector assessment template. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Event Subscriptions + +The event subscription configuration block supports the following arguments: + +* `event` - (Required) The event for which you want to receive SNS notifications. Valid values are `assessmentRunStarted`, `assessmentRunCompleted`, `assessmentRunStateChanged`, and `findingReported`. +* `topicArn` - (Required) The ARN of the SNS topic to which notifications are sent. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The template assessment ARN. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsInspectorAssessmentTemplate` using the template assessment ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsInspectorAssessmentTemplate` using the template assessment ARN. For example: + +```console +% terraform import aws_inspector_assessment_template.example arn:aws:inspector:us-west-2:123456789012:target/0-9IaAzhGR/template/0-WEcjR8CH +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/inspector_resource_group.html.markdown b/website/docs/cdktf/typescript/r/inspector_resource_group.html.markdown new file mode 100644 index 00000000000..836f4f57a18 --- /dev/null +++ b/website/docs/cdktf/typescript/r/inspector_resource_group.html.markdown @@ -0,0 +1,52 @@ +--- +subcategory: "Inspector Classic" +layout: "aws" +page_title: "AWS: aws_inspector_resource_group" +description: |- + Provides an Amazon Inspector Classic Resource Group. +--- + + + +# Resource: aws_inspector_resource_group + +Provides an Amazon Inspector Classic Resource Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { InspectorResourceGroup } from "./.gen/providers/aws/inspector-resource-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new InspectorResourceGroup(this, "example", { + tags: { + Env: "bar", + Name: "foo", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `tags` - (Required) Key-value map of tags that are used to select the EC2 instances to be included in an [Amazon Inspector assessment target](/docs/providers/aws/r/inspector_assessment_target.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The resource group ARN. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/instance.html.markdown b/website/docs/cdktf/typescript/r/instance.html.markdown index 21955fee960..000241c0b61 100644 --- a/website/docs/cdktf/typescript/r/instance.html.markdown +++ b/website/docs/cdktf/typescript/r/instance.html.markdown @@ -255,7 +255,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `ami` - (Optional) AMI to use for the instance. Required unless `launchTemplate` is specified and the Launch Template specifes an AMI. If an AMI is specified in the Launch Template, setting `ami` will override the AMI specified in the Launch Template. * `associatePublicIpAddress` - (Optional) Whether to associate a public IP address with an instance in a VPC. @@ -479,9 +479,9 @@ The `launchTemplate` block supports the following: * `name` - Name of the launch template. Conflicts with `id`. * `version` - Template version. Can be a specific version number, `$latest` or `$default`. The default value is `$default`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the instance. * `capacityReservationSpecification` - Capacity reservation specification of the instance. @@ -518,10 +518,24 @@ For `instanceMarketOptions`, in addition to the arguments above, the following a ## Import -Instances can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import instances using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_instance.web i-12345678 + +Using `terraform import`, import instances using the `id`. For example: + +```console +% terraform import aws_instance.web i-12345678 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/internet_gateway.html.markdown b/website/docs/cdktf/typescript/r/internet_gateway.html.markdown new file mode 100644 index 00000000000..b16b549a86f --- /dev/null +++ b/website/docs/cdktf/typescript/r/internet_gateway.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_internet_gateway" +description: |- + Provides a resource to create a VPC Internet Gateway. +--- + + + +# Resource: aws_internet_gateway + +Provides a resource to create a VPC Internet Gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { InternetGateway } from "./.gen/providers/aws/internet-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new InternetGateway(this, "gw", { + tags: { + Name: "main", + }, + vpcId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpcId` - (Optional) The VPC ID to create in. See the [aws_internet_gateway_attachment](internet_gateway_attachment.html) resource for an alternate way to attach an Internet Gateway to a VPC. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +-> **Note:** It's recommended to denote that the AWS Instance or Elastic IP depends on the Internet Gateway. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Instance } from "./.gen/providers/aws/instance"; +import { InternetGateway } from "./.gen/providers/aws/internet-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const gw = new InternetGateway(this, "gw", { + vpcId: main.id, + }); + new Instance(this, "foo", { + dependsOn: [gw], + }); + } +} + +``` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Internet Gateway. +* `arn` - The ARN of the Internet Gateway. +* `ownerId` - The ID of the AWS account that owns the internet gateway. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20M`) +- `update` - (Default `20M`) +- `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Internet Gateways using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Internet Gateways using the `id`. For example: + +```console +% terraform import aws_internet_gateway.gw igw-c0a643a9 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/internet_gateway_attachment.html.markdown b/website/docs/cdktf/typescript/r/internet_gateway_attachment.html.markdown new file mode 100644 index 00000000000..d7d2bbdd604 --- /dev/null +++ b/website/docs/cdktf/typescript/r/internet_gateway_attachment.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_internet_gateway_attachment" +description: |- + Provides a resource to create a VPC Internet Gateway Attachment. +--- + + + +# Resource: aws_internet_gateway_attachment + +Provides a resource to create a VPC Internet Gateway Attachment. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { InternetGateway } from "./.gen/providers/aws/internet-gateway"; +import { InternetGatewayAttachment } from "./.gen/providers/aws/internet-gateway-attachment"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new InternetGateway(this, "example", {}); + const awsVpcExample = new Vpc(this, "example_1", { + cidrBlock: "10.1.0.0/16", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpcExample.overrideLogicalId("example"); + const awsInternetGatewayAttachmentExample = new InternetGatewayAttachment( + this, + "example_2", + { + internetGatewayId: example.id, + vpcId: Token.asString(awsVpcExample.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsInternetGatewayAttachmentExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `internetGatewayId` - (Required) The ID of the internet gateway. +* `vpcId` - (Required) The ID of the VPC. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the VPC and Internet Gateway separated by a colon. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20M`) +- `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Internet Gateway Attachments using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Internet Gateway Attachments using the `id`. For example: + +```console +% terraform import aws_internet_gateway_attachment.example igw-c0a643a9:vpc-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/internetmonitor_monitor.html.markdown b/website/docs/cdktf/typescript/r/internetmonitor_monitor.html.markdown new file mode 100644 index 00000000000..42ad52c47bc --- /dev/null +++ b/website/docs/cdktf/typescript/r/internetmonitor_monitor.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "CloudWatch Internet Monitor" +layout: "aws" +page_title: "AWS: aws_internetmonitor_monitor" +description: |- + Provides a CloudWatch Internet Monitor Monitor resource +--- + + + +# Resource: aws_internetmonitor_monitor + +Provides a Internet Monitor Monitor resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { InternetmonitorMonitor } from "./.gen/providers/aws/internetmonitor-monitor"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new InternetmonitorMonitor(this, "example", { + monitorName: "exmple", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `monitorName` - (Required) The name of the monitor. + +The following arguments are optional: + +* `healthEventsConfig` - (Optional) Health event thresholds. A health event threshold percentage, for performance and availability, determines when Internet Monitor creates a health event when there's an internet issue that affects your application end users. See [Health Events Config](#health-events-config) below. +* `internetMeasurementsLogDelivery` - (Optional) Publish internet measurements for Internet Monitor to an Amazon S3 bucket in addition to CloudWatch Logs. +* `maxCityNetworksToMonitor` - (Optional) The maximum number of city-networks to monitor for your resources. A city-network is the location (city) where clients access your application resources from and the network or ASN, such as an internet service provider (ISP), that clients access the resources through. This limit helps control billing costs. +* `resources` - (Optional) The resources to include in a monitor, which you provide as a set of Amazon Resource Names (ARNs). +* `status` - (Optional) The status for a monitor. The accepted values for Status with the UpdateMonitor API call are the following: `active` and `inactive`. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `trafficPercentageToMonitor` - (Optional) The percentage of the internet-facing traffic for your application that you want to monitor with this monitor. + +### Health Events Config + +Defines the health event threshold percentages, for performance score and availability score. Amazon CloudWatch Internet Monitor creates a health event when there's an internet issue that affects your application end users where a health score percentage is at or below a set threshold. If you don't set a health event threshold, the default value is 95%. + +* `availabilityScoreThreshold` - (Optional) The health event threshold percentage set for availability scores. +* `performanceScoreThreshold` - (Optional) The health event threshold percentage set for performance scores. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Monitor. +* `id` - Name of the monitor. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Internet Monitor Monitors using the `monitorName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Internet Monitor Monitors using the `monitorName`. For example: + +```console +% terraform import aws_internetmonitor_monitor.some some-monitor +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_authorizer.html.markdown b/website/docs/cdktf/typescript/r/iot_authorizer.html.markdown new file mode 100644 index 00000000000..773f67a4e08 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_authorizer.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_authorizer" +description: |- + Creates and manages an AWS IoT Authorizer. +--- + + + +# Resource: aws_iot_authorizer + +Creates and manages an AWS IoT Authorizer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotAuthorizer } from "./.gen/providers/aws/iot-authorizer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IotAuthorizer(this, "example", { + authorizerFunctionArn: Token.asString(awsLambdaFunctionExample.arn), + name: "example", + signingDisabled: false, + status: "ACTIVE", + tokenKeyName: "Token-Header", + tokenSigningPublicKeys: { + Key1: Token.asString( + Fn.file("test-fixtures/iot-authorizer-signing-key.pem") + ), + }, + }); + } +} + +``` + +## Argument Reference + +* `authorizerFunctionArn` - (Required) The ARN of the authorizer's Lambda function. +* `enableCachingForHttp` - (Optional) Specifies whether the HTTP caching is enabled or not. Default: `false`. +* `name` - (Required) The name of the authorizer. +* `signingDisabled` - (Optional) Specifies whether AWS IoT validates the token signature in an authorization request. Default: `false`. +* `status` - (Optional) The status of Authorizer request at creation. Valid values: `active`, `inactive`. Default: `active`. +* `tokenKeyName` - (Optional) The name of the token key used to extract the token from the HTTP headers. This value is required if signing is enabled in your authorizer. +* `tokenSigningPublicKeys` - (Optional) The public keys used to verify the digital signature returned by your custom authentication service. This value is required if signing is enabled in your authorizer. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the authorizer. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IOT Authorizers using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IOT Authorizers using the name. For example: + +```console +% terraform import aws_iot_authorizer.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_certificate.html.markdown b/website/docs/cdktf/typescript/r/iot_certificate.html.markdown new file mode 100644 index 00000000000..fccd191bd30 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_certificate.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_certificate" +description: |- + Creates and manages an AWS IoT certificate. +--- + + + +# Resource: aws_iot_certificate + +Creates and manages an AWS IoT certificate. + +## Example Usage + +### With CSR + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotCertificate } from "./.gen/providers/aws/iot-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IotCertificate(this, "cert", { + active: true, + csr: Token.asString(Fn.file("/my/csr.pem")), + }); + } +} + +``` + +### Without CSR + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotCertificate } from "./.gen/providers/aws/iot-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IotCertificate(this, "cert", { + active: true, + }); + } +} + +``` + +### From existing certificate without a CA + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotCertificate } from "./.gen/providers/aws/iot-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IotCertificate(this, "cert", { + active: true, + certificatePem: Token.asString(Fn.file("/my/cert.pem")), + }); + } +} + +``` + +## Argument Reference + +* `active` - (Required) Boolean flag to indicate if the certificate should be active +* `csr` - (Optional) The certificate signing request. Review + [CreateCertificateFromCsr](https://docs.aws.amazon.com/iot/latest/apireference/API_CreateCertificateFromCsr.html) + for more information on generating a certificate from a certificate signing request (CSR). + If none is specified both the certificate and keys will be generated, review [CreateKeysAndCertificate](https://docs.aws.amazon.com/iot/latest/apireference/API_CreateKeysAndCertificate.html) + for more information on generating keys and a certificate. +* `certificatePem` - (Optional) The certificate to be registered. If `caPem` is unspecified, review + [RegisterCertificateWithoutCA](https://docs.aws.amazon.com/iot/latest/apireference/API_RegisterCertificateWithoutCA.html). + If `caPem` is specified, review + [RegisterCertificate](https://docs.aws.amazon.com/iot/latest/apireference/API_RegisterCertificate.html) + for more information on registering a certificate. +* `caPem` - (Optional) The CA certificate for the certificate to be registered. If this is set, the CA needs to be registered with AWS IoT beforehand. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The internal ID assigned to this certificate. +* `arn` - The ARN of the created certificate. +* `certificatePem` - The certificate data, in PEM format. +* `publicKey` - When neither CSR nor certificate is provided, the public key. +* `privateKey` - When neither CSR nor certificate is provided, the private key. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_indexing_configuration.html.markdown b/website/docs/cdktf/typescript/r/iot_indexing_configuration.html.markdown new file mode 100644 index 00000000000..8251a21414d --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_indexing_configuration.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_indexing_configuration" +description: |- + Managing IoT Thing indexing. +--- + + + +# Resource: aws_iot_indexing_configuration + +Managing [IoT Thing indexing](https://docs.aws.amazon.com/iot/latest/developerguide/managing-index.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotIndexingConfiguration } from "./.gen/providers/aws/iot-indexing-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IotIndexingConfiguration(this, "example", { + thingIndexingConfiguration: { + customField: [ + { + name: "shadow.desired.power", + type: "Boolean", + }, + { + name: "attributes.version", + type: "Number", + }, + { + name: "shadow.name.thing1shadow.desired.DefaultDesired", + type: "String", + }, + { + name: "deviceDefender.securityProfile1.NUMBER_VALUE_BEHAVIOR.lastViolationValue.number", + type: "Number", + }, + ], + deviceDefenderIndexingMode: "VIOLATIONS", + namedShadowIndexingMode: "ON", + thingConnectivityIndexingMode: "STATUS", + thingIndexingMode: "REGISTRY_AND_SHADOW", + }, + }); + } +} + +``` + +## Argument Reference + +* `thingGroupIndexingConfiguration` - (Optional) Thing group indexing configuration. See below. +* `thingIndexingConfiguration` - (Optional) Thing indexing configuration. See below. + +### thing_group_indexing_configuration + +The `thingGroupIndexingConfiguration` configuration block supports the following: + +* `customField` - (Optional) A list of thing group fields to index. This list cannot contain any managed fields. See below. +* `managedField` - (Optional) Contains fields that are indexed and whose types are already known by the Fleet Indexing service. See below. +* `thingGroupIndexingMode` - (Required) Thing group indexing mode. Valid values: `off`, `on`. + +### thing_indexing_configuration + +The `thingIndexingConfiguration` configuration block supports the following: + +* `customField` - (Optional) Contains custom field names and their data type. See below. +* `deviceDefenderIndexingMode` - (Optional) Device Defender indexing mode. Valid values: `violations`, `off`. Default: `off`. +* `managedField` - (Optional) Contains fields that are indexed and whose types are already known by the Fleet Indexing service. See below. +* `namedShadowIndexingMode` - (Optional) [Named shadow](https://docs.aws.amazon.com/iot/latest/developerguide/iot-device-shadows.html) indexing mode. Valid values: `on`, `off`. Default: `off`. +* `thingConnectivityIndexingMode` - (Optional) Thing connectivity indexing mode. Valid values: `status`, `off`. Default: `off`. +* `thingIndexingMode` - (Required) Thing indexing mode. Valid values: `registry`, `registryAndShadow`, `off`. + +### field + +The `customField` and `managedField` configuration blocks supports the following: + +* `name` - (Optional) The name of the field. +* `type` - (Optional) The data type of the field. Valid values: `number`, `string`, `boolean`. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_logging_options.html.markdown b/website/docs/cdktf/typescript/r/iot_logging_options.html.markdown new file mode 100644 index 00000000000..00d851feb53 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_logging_options.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_logging_options" +description: |- + Provides a resource to manage default logging options. +--- + + + +# Resource: aws_iot_logging_options + +Provides a resource to manage [default logging options](https://docs.aws.amazon.com/iot/latest/developerguide/configure-logging.html#configure-logging-console). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotLoggingOptions } from "./.gen/providers/aws/iot-logging-options"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IotLoggingOptions(this, "example", { + defaultLogLevel: "WARN", + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +## Argument Reference + +* `defaultLogLevel` - (Optional) The default logging level. Valid Values: `"debug"`, `"info"`, `"error"`, `"warn"`, `"disabled"`. +* `disableAllLogs` - (Optional) If `true` all logs are disabled. The default is `false`. +* `roleArn` - (Required) The ARN of the role that allows IoT to write to Cloudwatch logs. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_policy.html.markdown b/website/docs/cdktf/typescript/r/iot_policy.html.markdown new file mode 100644 index 00000000000..3f4a102c087 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_policy.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_policy" +description: |- + Provides an IoT policy. +--- + + + +# Resource: aws_iot_policy + +Provides an IoT policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotPolicy } from "./.gen/providers/aws/iot-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IotPolicy(this, "pubsub", { + name: "PubSubToAnyTopic", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["iot:*"], + Effect: "Allow", + Resource: "*", + }, + ], + Version: "2012-10-17", + }) + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the policy. +* `policy` - (Required) The policy document. This is a JSON formatted string. Use the [IoT Developer Guide](http://docs.aws.amazon.com/iot/latest/developerguide/iot-policies.html) for more information on IoT Policies. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS to this policy. +* `name` - The name of this policy. +* `defaultVersionId` - The default version of this policy. +* `policy` - The policy document. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT policies using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IoT policies using the `name`. For example: + +```console +% terraform import aws_iot_policy.pubsub PubSubToAnyTopic +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_policy_attachment.html.markdown b/website/docs/cdktf/typescript/r/iot_policy_attachment.html.markdown new file mode 100644 index 00000000000..f4507443161 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_policy_attachment.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_policy_attachment" +description: |- + Provides an IoT policy attachment. +--- + + + +# Resource: aws_iot_policy_attachment + +Provides an IoT policy attachment. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IotCertificate } from "./.gen/providers/aws/iot-certificate"; +import { IotPolicy } from "./.gen/providers/aws/iot-policy"; +import { IotPolicyAttachment } from "./.gen/providers/aws/iot-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const cert = new IotCertificate(this, "cert", { + active: true, + csr: Token.asString(Fn.file("csr.pem")), + }); + const pubsub = new DataAwsIamPolicyDocument(this, "pubsub", { + statement: [ + { + actions: ["iot:*"], + effect: "Allow", + resources: ["*"], + }, + ], + }); + const awsIotPolicyPubsub = new IotPolicy(this, "pubsub_2", { + name: "PubSubToAnyTopic", + policy: Token.asString(pubsub.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIotPolicyPubsub.overrideLogicalId("pubsub"); + new IotPolicyAttachment(this, "att", { + policy: Token.asString(awsIotPolicyPubsub.name), + target: cert.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) The name of the policy to attach. +* `target` - (Required) The identity to which the policy is attached. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_provisioning_template.html.markdown b/website/docs/cdktf/typescript/r/iot_provisioning_template.html.markdown new file mode 100644 index 00000000000..2940008b746 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_provisioning_template.html.markdown @@ -0,0 +1,159 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_provisioning_template" +description: |- + Manages an IoT fleet provisioning template. +--- + + + +# Resource: aws_iot_provisioning_template + +Manages an IoT fleet provisioning template. For more info, see the AWS documentation on [fleet provisioning](https://docs.aws.amazon.com/iot/latest/developerguide/provision-wo-cert.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +import { IotPolicy } from "./.gen/providers/aws/iot-policy"; +import { IotProvisioningTemplate } from "./.gen/providers/aws/iot-provisioning-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const devicePolicy = new DataAwsIamPolicyDocument(this, "device_policy", { + statement: [ + { + actions: ["iot:Subscribe"], + resources: ["*"], + }, + ], + }); + const iotAssumeRolePolicy = new DataAwsIamPolicyDocument( + this, + "iot_assume_role_policy", + { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["iot.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const iotFleetProvisioning = new IamRole(this, "iot_fleet_provisioning", { + assumeRolePolicy: Token.asString(iotAssumeRolePolicy.json), + name: "IoTProvisioningServiceRole", + path: "/service-role/", + }); + new IamRolePolicyAttachment(this, "iot_fleet_provisioning_registration", { + policyArn: + "arn:aws:iam::aws:policy/service-role/AWSIoTThingsRegistration", + role: iotFleetProvisioning.name, + }); + const awsIotPolicyDevicePolicy = new IotPolicy(this, "device_policy_4", { + name: "DevicePolicy", + policy: Token.asString(devicePolicy.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIotPolicyDevicePolicy.overrideLogicalId("device_policy"); + new IotProvisioningTemplate(this, "fleet", { + description: "My provisioning template", + name: "FleetTemplate", + provisioningRoleArn: iotFleetProvisioning.arn, + templateBody: Token.asString( + Fn.jsonencode({ + Parameters: { + SerialNumber: { + Type: "String", + }, + }, + Resources: { + certificate: { + Properties: { + CertificateId: { + Ref: "AWS::IoT::Certificate::Id", + }, + Status: "Active", + }, + Type: "AWS::IoT::Certificate", + }, + policy: { + Properties: { + PolicyName: awsIotPolicyDevicePolicy.name, + }, + Type: "AWS::IoT::Policy", + }, + }, + }) + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the fleet provisioning template. +* `description` - (Optional) The description of the fleet provisioning template. +* `enabled` - (Optional) True to enable the fleet provisioning template, otherwise false. +* `preProvisioningHook` - (Optional) Creates a pre-provisioning hook template. Details below. +* `provisioningRoleArn` - (Required) The role ARN for the role associated with the fleet provisioning template. This IoT role grants permission to provision a device. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `templateBody` - (Required) The JSON formatted contents of the fleet provisioning template. + +### pre_provisioning_hook + +The `preProvisioningHook` configuration block supports the following: + +* `payloadVersion` - (Optional) The version of the payload that was sent to the target function. The only valid (and the default) payload version is `"20200401"`. +* `targetArn` - (Optional) The ARN of the target function. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN that identifies the provisioning template. +* `defaultVersionId` - The default version of the fleet provisioning template. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT fleet provisioning templates using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IoT fleet provisioning templates using the `name`. For example: + +```console +% terraform import aws_iot_provisioning_template.fleet FleetProvisioningTemplate +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_role_alias.html.markdown b/website/docs/cdktf/typescript/r/iot_role_alias.html.markdown new file mode 100644 index 00000000000..0b24b695621 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_role_alias.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_role_alias" +description: |- + Provides an IoT role alias. +--- + + + +# Resource: aws_iot_role_alias + +Provides an IoT role alias. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IotRoleAlias } from "./.gen/providers/aws/iot-role-alias"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["credentials.iot.amazonaws.com"], + type: "Service", + }, + ], + }); + const role = new IamRole(this, "role", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "dynamodb-access-role", + }); + new IotRoleAlias(this, "alias", { + alias: "Thermostat-dynamodb-access-role-alias", + roleArn: role.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `alias` - (Required) The name of the role alias. +* `roleArn` - (Required) The identity of the role to which the alias refers. +* `credentialDuration` - (Optional) The duration of the credential, in seconds. If you do not specify a value for this setting, the default maximum of one hour is applied. This setting can have a value from 900 seconds (15 minutes) to 43200 seconds (12 hours). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS to this role alias. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IOT Role Alias using the alias. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IOT Role Alias using the alias. For example: + +```console +% terraform import aws_iot_role_alias.example myalias +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_thing.html.markdown b/website/docs/cdktf/typescript/r/iot_thing.html.markdown new file mode 100644 index 00000000000..a6c1134d019 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_thing.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_thing" +description: |- + Creates and manages an AWS IoT Thing. +--- + + + +# Resource: aws_iot_thing + +Creates and manages an AWS IoT Thing. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotThing } from "./.gen/providers/aws/iot-thing"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IotThing(this, "example", { + attributes: { + First: "examplevalue", + }, + name: "example", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) The name of the thing. +* `attributes` - (Optional) Map of attributes of the thing. +* `thingTypeName` - (Optional) The thing type name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `defaultClientId` - The default client ID. +* `version` - The current version of the thing record in the registry. +* `arn` - The ARN of the thing. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IOT Things using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IOT Things using the name. For example: + +```console +% terraform import aws_iot_thing.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_thing_group.html.markdown b/website/docs/cdktf/typescript/r/iot_thing_group.html.markdown new file mode 100644 index 00000000000..a6242809c03 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_thing_group.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_thing_group" +description: |- + Manages an AWS IoT Thing Group. +--- + + + +# Resource: aws_iot_thing_group + +Manages an AWS IoT Thing Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotThingGroup } from "./.gen/providers/aws/iot-thing-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const parent = new IotThingGroup(this, "parent", { + name: "parent", + }); + new IotThingGroup(this, "example", { + name: "example", + parentGroupName: parent.name, + properties: { + attributePayload: { + attributes: { + One: "11111", + Two: "TwoTwo", + }, + }, + description: "This is my thing group", + }, + tags: { + terraform: "true", + }, + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) The name of the Thing Group. +* `parentGroupName` - (Optional) The name of the parent Thing Group. +* `properties` - (Optional) The Thing Group properties. Defined below. +* `tags` - (Optional) Key-value mapping of resource tags + +### properties Reference + +* `attributePayload` - (Optional) The Thing Group attributes. Defined below. +* `description` - (Optional) A description of the Thing Group. + +### attribute_payload Reference + +* `attributes` - (Optional) Key-value map. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the Thing Group. +* `id` - The Thing Group ID. +* `version` - The current version of the Thing Group record in the registry. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT Things Groups using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IoT Things Groups using the name. For example: + +```console +% terraform import aws_iot_thing_group.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_thing_group_membership.html.markdown b/website/docs/cdktf/typescript/r/iot_thing_group_membership.html.markdown new file mode 100644 index 00000000000..21a5e96d1f4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_thing_group_membership.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_thing_group_membership" +description: |- + Adds an IoT Thing to an IoT Thing Group. +--- + + + +# Resource: aws_iot_thing_group_membership + +Adds an IoT Thing to an IoT Thing Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotThingGroupMembership } from "./.gen/providers/aws/iot-thing-group-membership"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IotThingGroupMembership(this, "example", { + overrideDynamicGroup: true, + thingGroupName: "example-group", + thingName: "example-thing", + }); + } +} + +``` + +## Argument Reference + +* `thingName` - (Required) The name of the thing to add to a group. +* `thingGroupName` - (Required) The name of the group to which you are adding a thing. +* `overrideDynamicGroup` - (Optional) Override dynamic thing groups with static thing groups when 10-group limit is reached. If a thing belongs to 10 thing groups, and one or more of those groups are dynamic thing groups, adding a thing to a static group removes the thing from the last dynamic group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The membership ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT Thing Group Membership using the thing group name and thing name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IoT Thing Group Membership using the thing group name and thing name. For example: + +```console +% terraform import aws_iot_thing_group_membership.example thing_group_name/thing_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_thing_principal_attachment.html.markdown b/website/docs/cdktf/typescript/r/iot_thing_principal_attachment.html.markdown new file mode 100644 index 00000000000..af766635da3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_thing_principal_attachment.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_thing_principal_attachment" +description: |- + Provides AWS IoT Thing Principal attachment. +--- + + + +# Resource: aws_iot_thing_principal_attachment + +Attaches Principal to AWS IoT Thing. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotCertificate } from "./.gen/providers/aws/iot-certificate"; +import { IotThing } from "./.gen/providers/aws/iot-thing"; +import { IotThingPrincipalAttachment } from "./.gen/providers/aws/iot-thing-principal-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const cert = new IotCertificate(this, "cert", { + active: true, + csr: Token.asString(Fn.file("csr.pem")), + }); + const example = new IotThing(this, "example", { + name: "example", + }); + new IotThingPrincipalAttachment(this, "att", { + principal: cert.arn, + thing: example.name, + }); + } +} + +``` + +## Argument Reference + +* `principal` - (Required) The AWS IoT Certificate ARN or Amazon Cognito Identity ID. +* `thing` - (Required) The name of the thing. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_thing_type.html.markdown b/website/docs/cdktf/typescript/r/iot_thing_type.html.markdown new file mode 100644 index 00000000000..a2458fafc23 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_thing_type.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_thing_type" +description: |- + Creates and manages an AWS IoT Thing Type. +--- + + + +# Resource: aws_iot_thing_type + +Creates and manages an AWS IoT Thing Type. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotThingType } from "./.gen/providers/aws/iot-thing-type"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IotThingType(this, "foo", { + name: "my_iot_thing", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required, Forces New Resource) The name of the thing type. +* `deprecated` - (Optional, Defaults to false) Whether the thing type is deprecated. If true, no new things could be associated with this type. +* `properties` - (Optional), Configuration block that can contain the following properties of the thing type: + * `description` - (Optional, Forces New Resource) The description of the thing type. + * `searchableAttributes` - (Optional, Forces New Resource) A list of searchable thing attribute names. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the created AWS IoT Thing Type. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IOT Thing Types using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IOT Thing Types using the name. For example: + +```console +% terraform import aws_iot_thing_type.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_topic_rule.html.markdown b/website/docs/cdktf/typescript/r/iot_topic_rule.html.markdown new file mode 100644 index 00000000000..ce89b176e79 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_topic_rule.html.markdown @@ -0,0 +1,287 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_topic_rule" +description: |- + Creates and manages an AWS IoT topic rule +--- + + + +# Resource: aws_iot_topic_rule + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { IotTopicRule } from "./.gen/providers/aws/iot-topic-rule"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const myerrortopic = new SnsTopic(this, "myerrortopic", { + name: "myerrortopic", + }); + const mytopic = new SnsTopic(this, "mytopic", { + name: "mytopic", + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["iot.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const iamPolicyForLambda = new DataAwsIamPolicyDocument( + this, + "iam_policy_for_lambda", + { + statement: [ + { + actions: ["sns:Publish"], + effect: "Allow", + resources: [mytopic.arn], + }, + ], + } + ); + const role = new IamRole(this, "role", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "myrole", + }); + const awsIamRolePolicyIamPolicyForLambda = new IamRolePolicy( + this, + "iam_policy_for_lambda_5", + { + name: "mypolicy", + policy: Token.asString(iamPolicyForLambda.json), + role: role.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyIamPolicyForLambda.overrideLogicalId( + "iam_policy_for_lambda" + ); + new IotTopicRule(this, "rule", { + description: "Example rule", + enabled: true, + errorAction: { + sns: { + messageFormat: "RAW", + roleArn: role.arn, + targetArn: myerrortopic.arn, + }, + }, + name: "MyRule", + sns: [ + { + messageFormat: "RAW", + roleArn: role.arn, + targetArn: mytopic.arn, + }, + ], + sql: "SELECT * FROM 'topic/test'", + sqlVersion: "2016-03-23", + }); + } +} + +``` + +## Argument Reference + +* `name` - (Required) The name of the rule. +* `description` - (Optional) The description of the rule. +* `enabled` - (Required) Specifies whether the rule is enabled. +* `sql` - (Required) The SQL statement used to query the topic. For more information, see AWS IoT SQL Reference (http://docs.aws.amazon.com/iot/latest/developerguide/iot-rules.html#aws-iot-sql-reference) in the AWS IoT Developer Guide. +* `sqlVersion` - (Required) The version of the SQL rules engine to use when evaluating the rule. +* `errorAction` - (Optional) Configuration block with error action to be associated with the rule. See the documentation for `cloudwatchAlarm`, `cloudwatchLogs`, `cloudwatchMetric`, `dynamodb`, `dynamodbv2`, `elasticsearch`, `firehose`, `http`, `iotAnalytics`, `iotEvents`, `kafka`, `kinesis`, `lambda`, `republish`, `s3`, `sns`, `sqs`, `stepFunctions`, `timestream` configuration blocks for further configuration details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `cloudwatchAlarm` object takes the following arguments: + +* `alarmName` - (Required) The CloudWatch alarm name. +* `roleArn` - (Required) The IAM role ARN that allows access to the CloudWatch alarm. +* `stateReason` - (Required) The reason for the alarm change. +* `stateValue` - (Required) The value of the alarm state. Acceptable values are: OK, ALARM, INSUFFICIENT_DATA. + +The `cloudwatchLogs` object takes the following arguments: + +* `logGroupName` - (Required) The CloudWatch log group name. +* `roleArn` - (Required) The IAM role ARN that allows access to the CloudWatch alarm. + +The `cloudwatchMetric` object takes the following arguments: + +* `metricName` - (Required) The CloudWatch metric name. +* `metricNamespace` - (Required) The CloudWatch metric namespace name. +* `metricTimestamp` - (Optional) An optional Unix timestamp (http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#about_timestamp). +* `metricUnit` - (Required) The metric unit (supported units can be found here: http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#Unit) +* `metricValue` - (Required) The CloudWatch metric value. +* `roleArn` - (Required) The IAM role ARN that allows access to the CloudWatch metric. + +The `dynamodb` object takes the following arguments: + +* `hashKeyField` - (Required) The hash key name. +* `hashKeyType` - (Optional) The hash key type. Valid values are "STRING" or "NUMBER". +* `hashKeyValue` - (Required) The hash key value. +* `payloadField` - (Optional) The action payload. +* `rangeKeyField` - (Optional) The range key name. +* `rangeKeyType` - (Optional) The range key type. Valid values are "STRING" or "NUMBER". +* `rangeKeyValue` - (Optional) The range key value. +* `operation` - (Optional) The operation. Valid values are "INSERT", "UPDATE", or "DELETE". +* `roleArn` - (Required) The ARN of the IAM role that grants access to the DynamoDB table. +* `tableName` - (Required) The name of the DynamoDB table. + +The `dynamodbv2` object takes the following arguments: + +* `putItem` - (Required) Configuration block with DynamoDB Table to which the message will be written. Nested arguments below. + * `tableName` - (Required) The name of the DynamoDB table. +* `roleArn` - (Required) The ARN of the IAM role that grants access to the DynamoDB table. + +The `elasticsearch` object takes the following arguments: + +* `endpoint` - (Required) The endpoint of your Elasticsearch domain. +* `id` - (Required) The unique identifier for the document you are storing. +* `index` - (Required) The Elasticsearch index where you want to store your data. +* `roleArn` - (Required) The IAM role ARN that has access to Elasticsearch. +* `type` - (Required) The type of document you are storing. + +The `firehose` object takes the following arguments: + +* `deliveryStreamName` - (Required) The delivery stream name. +* `roleArn` - (Required) The IAM role ARN that grants access to the Amazon Kinesis Firehose stream. +* `separator` - (Optional) A character separator that is used to separate records written to the Firehose stream. Valid values are: '\n' (newline), '\t' (tab), '\r\n' (Windows newline), ',' (comma). +* `batchMode` - (Optional) The payload that contains a JSON array of records will be sent to Kinesis Firehose via a batch call. + +The `http` object takes the following arguments: + +* `url` - (Required) The HTTPS URL. +* `confirmationUrl` - (Optional) The HTTPS URL used to verify ownership of `url`. +* `httpHeader` - (Optional) Custom HTTP header IoT Core should send. It is possible to define more than one custom header. + +The `httpHeader` object takes the following arguments: + +* `key` - (Required) The name of the HTTP header. +* `value` - (Required) The value of the HTTP header. + +The `iotAnalytics` object takes the following arguments: + +* `channelName` - (Required) Name of AWS IOT Analytics channel. +* `roleArn` - (Required) The ARN of the IAM role that grants access. +* `batchMode` - (Optional) The payload that contains a JSON array of records will be sent to IoT Analytics via a batch call. + +The `iotEvents` object takes the following arguments: + +* `inputName` - (Required) The name of the AWS IoT Events input. +* `roleArn` - (Required) The ARN of the IAM role that grants access. +* `messageId` - (Optional) Use this to ensure that only one input (message) with a given messageId is processed by an AWS IoT Events detector. +* `batchMode` - (Optional) The payload that contains a JSON array of records will be sent to IoT Events via a batch call. + +The `kafka` object takes the following arguments: + +* `clientProperties` - (Required) Properties of the Apache Kafka producer client. For more info, see the [AWS documentation](https://docs.aws.amazon.com/iot/latest/developerguide/apache-kafka-rule-action.html). +* `destinationArn` - (Required) The ARN of Kafka action's VPC [`awsIotTopicRuleDestination`](iot_topic_rule_destination.html) . +* `key` - (Optional) The Kafka message key. +* `partition` - (Optional) The Kafka message partition. +* `topic` - (Optional) The Kafka topic for messages to be sent to the Kafka broker. + +The `kinesis` object takes the following arguments: + +* `partitionKey` - (Optional) The partition key. +* `roleArn` - (Required) The ARN of the IAM role that grants access to the Amazon Kinesis stream. +* `streamName` - (Required) The name of the Amazon Kinesis stream. + +The `lambda` object takes the following arguments: + +* `functionArn` - (Required) The ARN of the Lambda function. + +The `republish` object takes the following arguments: + +* `roleArn` - (Required) The ARN of the IAM role that grants access. +* `topic` - (Required) The name of the MQTT topic the message should be republished to. +* `qos` - (Optional) The Quality of Service (QoS) level to use when republishing messages. Valid values are 0 or 1. The default value is 0. + +The `s3` object takes the following arguments: + +* `bucketName` - (Required) The Amazon S3 bucket name. +* `cannedAcl` - (Optional) The Amazon S3 canned ACL that controls access to the object identified by the object key. [Valid values](https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl). +* `key` - (Required) The object key. +* `roleArn` - (Required) The ARN of the IAM role that grants access. + +The `sns` object takes the following arguments: + +* `messageFormat` - (Required) The message format of the message to publish. Accepted values are "JSON" and "RAW". +* `roleArn` - (Required) The ARN of the IAM role that grants access. +* `targetArn` - (Required) The ARN of the SNS topic. + +The `sqs` object takes the following arguments: + +* `queueUrl` - (Required) The URL of the Amazon SQS queue. +* `roleArn` - (Required) The ARN of the IAM role that grants access. +* `useBase64` - (Required) Specifies whether to use Base64 encoding. + +The `stepFunctions` object takes the following arguments: + +* `executionNamePrefix` - (Optional) The prefix used to generate, along with a UUID, the unique state machine execution name. +* `stateMachineName` - (Required) The name of the Step Functions state machine whose execution will be started. +* `roleArn` - (Required) The ARN of the IAM role that grants access to start execution of the state machine. + +The `timestream` object takes the following arguments: + +* `databaseName` - (Required) The name of an Amazon Timestream database. +* `dimension` - (Required) Configuration blocks with metadata attributes of the time series that are written in each measure record. Nested arguments below. + * `name` - (Required) The metadata dimension name. This is the name of the column in the Amazon Timestream database table record. + * `value` - (Required) The value to write in this column of the database record. +* `roleArn` - (Required) The ARN of the role that grants permission to write to the Amazon Timestream database table. +* `tableName` - (Required) The name of the database table into which to write the measure records. +* `timestamp` - (Optional) Configuration block specifying an application-defined value to replace the default value assigned to the Timestream record's timestamp in the time column. Nested arguments below. + * `unit` - (Required) The precision of the timestamp value that results from the expression described in value. Valid values: `seconds`, `milliseconds`, `microseconds`, `nanoseconds`. + * `value` - (Required) An expression that returns a long epoch time value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the topic rule +* `arn` - The ARN of the topic rule +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT Topic Rules using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IoT Topic Rules using the `name`. For example: + +```console +% terraform import aws_iot_topic_rule.rule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/iot_topic_rule_destination.html.markdown b/website/docs/cdktf/typescript/r/iot_topic_rule_destination.html.markdown new file mode 100644 index 00000000000..6bc82184528 --- /dev/null +++ b/website/docs/cdktf/typescript/r/iot_topic_rule_destination.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "IoT Core" +layout: "aws" +page_title: "AWS: aws_iot_topic_rule_destination" +description: |- + Creates and manages an AWS IoT topic rule destination +--- + + + +# Resource: aws_iot_topic_rule_destination + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IotTopicRuleDestination } from "./.gen/providers/aws/iot-topic-rule-destination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IotTopicRuleDestination(this, "example", { + vpcConfiguration: { + roleArn: Token.asString(awsIamRoleExample.arn), + securityGroups: [Token.asString(awsSecurityGroupExample.id)], + subnetIds: Token.asList(propertyAccess(awsSubnetExample, ["*", "id"])), + vpcId: Token.asString(awsVpcExample.id), + }, + }); + } +} + +``` + +## Argument Reference + +* `enabled` - (Optional) Whether or not to enable the destination. Default: `true`. +* `vpcConfiguration` - (Required) Configuration of the virtual private cloud (VPC) connection. For more info, see the [AWS documentation](https://docs.aws.amazon.com/iot/latest/developerguide/vpc-rule-action.html). + +The `vpcConfiguration` object takes the following arguments: + +* `roleArn` - (Required) The ARN of a role that has permission to create and attach to elastic network interfaces (ENIs). +* `securityGroups` - (Optional) The security groups of the VPC destination. +* `subnetIds` - (Required) The subnet IDs of the VPC destination. +* `vpcId` - (Required) The ID of the VPC. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the topic rule destination + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IoT topic rule destinations using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IoT topic rule destinations using the `arn`. For example: + +```console +% terraform import aws_iot_topic_rule_destination.example arn:aws:iot:us-west-2:123456789012:ruledestination/vpc/2ce781c8-68a6-4c52-9c62-63fe489ecc60 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ivs_channel.html.markdown b/website/docs/cdktf/typescript/r/ivs_channel.html.markdown new file mode 100644 index 00000000000..e71b553113e --- /dev/null +++ b/website/docs/cdktf/typescript/r/ivs_channel.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "IVS (Interactive Video)" +layout: "aws" +page_title: "AWS: aws_ivs_channel" +description: |- + Terraform resource for managing an AWS IVS (Interactive Video) Channel. +--- + + + +# Resource: aws_ivs_channel + +Terraform resource for managing an AWS IVS (Interactive Video) Channel. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IvsChannel } from "./.gen/providers/aws/ivs-channel"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IvsChannel(this, "example", { + name: "channel-1", + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `authorized` - (Optional) If `true`, channel is private (enabled for playback authorization). +* `latencyMode` - (Optional) Channel latency mode. Valid values: `normal`, `low`. +* `name` - (Optional) Channel name. +* `recordingConfigurationArn` - (Optional) Recording configuration ARN. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) Channel type, which determines the allowable resolution and bitrate. Valid values: `standard`, `basic`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Channel. +* `ingestEndpoint` - Channel ingest endpoint, part of the definition of an ingest server, used when setting up streaming software. +* `playbackUrl` - Channel playback URL. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IVS (Interactive Video) Channel using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IVS (Interactive Video) Channel using the ARN. For example: + +```console +% terraform import aws_ivs_channel.example arn:aws:ivs:us-west-2:326937407773:channel/0Y1lcs4U7jk5 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ivs_playback_key_pair.html.markdown b/website/docs/cdktf/typescript/r/ivs_playback_key_pair.html.markdown new file mode 100644 index 00000000000..433bfdf2286 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ivs_playback_key_pair.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "IVS (Interactive Video)" +layout: "aws" +page_title: "AWS: aws_ivs_playback_key_pair" +description: |- + Terraform resource for managing an AWS IVS (Interactive Video) Playback Key Pair. +--- + + + +# Resource: aws_ivs_playback_key_pair + +Terraform resource for managing an AWS IVS (Interactive Video) Playback Key Pair. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IvsPlaybackKeyPair } from "./.gen/providers/aws/ivs-playback-key-pair"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IvsPlaybackKeyPair(this, "example", { + publicKey: Token.asString(Fn.file("./public-key.pem")), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `publicKey` - (Required) Public portion of a customer-generated key pair. Must be an ECDSA public key in PEM format. + +The following arguments are optional: + +* `name` - (Optional) Playback Key Pair name. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Playback Key Pair. +* `fingerprint` - Key-pair identifier. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IVS (Interactive Video) Playback Key Pair using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IVS (Interactive Video) Playback Key Pair using the ARN. For example: + +```console +% terraform import aws_ivs_playback_key_pair.example arn:aws:ivs:us-west-2:326937407773:playback-key/KDJRJNQhiQzA +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ivs_recording_configuration.html.markdown b/website/docs/cdktf/typescript/r/ivs_recording_configuration.html.markdown new file mode 100644 index 00000000000..be1b1a99c04 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ivs_recording_configuration.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "IVS (Interactive Video)" +layout: "aws" +page_title: "AWS: aws_ivs_recording_configuration" +description: |- + Terraform resource for managing an AWS IVS (Interactive Video) Recording Configuration. +--- + + + +# Resource: aws_ivs_recording_configuration + +Terraform resource for managing an AWS IVS (Interactive Video) Recording Configuration. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IvsRecordingConfiguration } from "./.gen/providers/aws/ivs-recording-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IvsRecordingConfiguration(this, "example", { + destinationConfiguration: { + s3: { + bucketName: "ivs-stream-archive", + }, + }, + name: "recording_configuration-1", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `destinationConfiguration` - Object containing destination configuration for where recorded video will be stored. + * `s3` - S3 destination configuration where recorded videos will be stored. + * `bucketName` - S3 bucket name where recorded videos will be stored. + +The following arguments are optional: + +* `name` - (Optional) Recording Configuration name. +* `recordingReconnectWindowSeconds` - (Optional) If a broadcast disconnects and then reconnects within the specified interval, the multiple streams will be considered a single broadcast and merged together. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `thumbnailConfiguration` - (Optional) Object containing information to enable/disable the recording of thumbnails for a live session and modify the interval at which thumbnails are generated for the live session. + * `recordingMode` - (Optional) Thumbnail recording mode. Valid values: `disabled`, `interval`. + * `targetIntervalSeconds` (Configurable [and required] only if `recordingMode` is `interval`) - The targeted thumbnail-generation interval in seconds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Recording Configuration. +* `state` - The current state of the Recording Configuration. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `10M`) +* `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IVS (Interactive Video) Recording Configuration using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IVS (Interactive Video) Recording Configuration using the ARN. For example: + +```console +% terraform import aws_ivs_recording_configuration.example arn:aws:ivs:us-west-2:326937407773:recording-configuration/KAk1sHBl2L47 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ivschat_logging_configuration.html.markdown b/website/docs/cdktf/typescript/r/ivschat_logging_configuration.html.markdown new file mode 100644 index 00000000000..a272d124b11 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ivschat_logging_configuration.html.markdown @@ -0,0 +1,217 @@ +--- +subcategory: "IVS (Interactive Video) Chat" +layout: "aws" +page_title: "AWS: aws_ivschat_logging_configuration" +description: |- + Terraform resource for managing an AWS IVS (Interactive Video) Chat Logging Configuration. +--- + + + +# Resource: aws_ivschat_logging_configuration + +Terraform resource for managing an AWS IVS (Interactive Video) Chat Logging Configuration. + +## Example Usage + +### Basic Usage - Logging to CloudWatch + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { IvschatLoggingConfiguration } from "./.gen/providers/aws/ivschat-logging-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", {}); + const awsIvschatLoggingConfigurationExample = + new IvschatLoggingConfiguration(this, "example_1", { + destinationConfiguration: { + cloudwatchLogs: { + logGroupName: example.name, + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIvschatLoggingConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +### Basic Usage - Logging to Kinesis Firehose with Extended S3 + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IvschatLoggingConfiguration } from "./.gen/providers/aws/ivschat-logging-configuration"; +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucketPrefix: "tf-ivschat-logging-bucket", + }); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_1", { + acl: "private", + bucket: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["firehose.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const awsIamRoleExample = new IamRole(this, "example_3", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "firehose_example_role", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsKinesisFirehoseDeliveryStreamExample = + new KinesisFirehoseDeliveryStream(this, "example_4", { + destination: "extended_s3", + extendedS3Configuration: { + bucketArn: example.arn, + roleArn: Token.asString(awsIamRoleExample.arn), + }, + name: "terraform-kinesis-firehose-extended-s3-example-stream", + tags: { + LogDeliveryEnabled: "true", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKinesisFirehoseDeliveryStreamExample.overrideLogicalId("example"); + const awsIvschatLoggingConfigurationExample = + new IvschatLoggingConfiguration(this, "example_5", { + destinationConfiguration: { + firehose: { + deliveryStreamName: Token.asString( + awsKinesisFirehoseDeliveryStreamExample.name + ), + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIvschatLoggingConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +### Basic Usage - Logging to S3 + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IvschatLoggingConfiguration } from "./.gen/providers/aws/ivschat-logging-configuration"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket_name: "tf-ivschat-logging", + forceDestroy: true, + }); + const awsIvschatLoggingConfigurationExample = + new IvschatLoggingConfiguration(this, "example_1", { + destinationConfiguration: { + s3: { + bucketName: example.id, + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIvschatLoggingConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `destinationConfiguration` - (Required) Object containing destination configuration for where chat activity will be logged. This object must contain exactly one of the following children arguments: + * `cloudwatchLogs` - An Amazon CloudWatch Logs destination configuration where chat activity will be logged. + * `logGroupName` - Name of the Amazon Cloudwatch Logs destination where chat activity will be logged. + * `firehose` - An Amazon Kinesis Data Firehose destination configuration where chat activity will be logged. + * `deliveryStreamName` - Name of the Amazon Kinesis Firehose delivery stream where chat activity will be logged. + * `s3` - An Amazon S3 destination configuration where chat activity will be logged. + * `bucketName` - Name of the Amazon S3 bucket where chat activity will be logged. + +The following arguments are optional: + +* `name` - (Optional) Logging Configuration name. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Logging Configuration. +* `id` - ID of the Logging Configuration. +* `state` - State of the Logging Configuration. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IVS (Interactive Video) Chat Logging Configuration using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IVS (Interactive Video) Chat Logging Configuration using the ARN. For example: + +```console +% terraform import aws_ivschat_logging_configuration.example arn:aws:ivschat:us-west-2:326937407773:logging-configuration/MMUQc8wcqZmC +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ivschat_room.html.markdown b/website/docs/cdktf/typescript/r/ivschat_room.html.markdown new file mode 100644 index 00000000000..868df251588 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ivschat_room.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "IVS (Interactive Video) Chat" +layout: "aws" +page_title: "AWS: aws_ivschat_room" +description: |- + Terraform resource for managing an AWS IVS (Interactive Video) Chat Room. +--- + + + +# Resource: aws_ivschat_room + +Terraform resource for managing an AWS IVS (Interactive Video) Chat Room. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IvschatRoom } from "./.gen/providers/aws/ivschat-room"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new IvschatRoom(this, "example", { + name: "tf-room", + }); + } +} + +``` + +## Usage with Logging Configuration to S3 Bucket + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IvschatLoggingConfiguration } from "./.gen/providers/aws/ivschat-logging-configuration"; +import { IvschatRoom } from "./.gen/providers/aws/ivschat-room"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucketPrefix: "tf-ivschat-logging-bucket-", + forceDestroy: true, + }); + const awsIvschatLoggingConfigurationExample = + new IvschatLoggingConfiguration(this, "example_1", { + destinationConfiguration: { + s3: { + bucketName: example.id, + }, + }, + lifecycle: { + createBeforeDestroy: true, + }, + name: "tf-ivschat-loggingconfiguration", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIvschatLoggingConfigurationExample.overrideLogicalId("example"); + const awsIvschatRoomExample = new IvschatRoom(this, "example_2", { + loggingConfigurationIdentifiers: [ + Token.asString(awsIvschatLoggingConfigurationExample.arn), + ], + name: "tf-ivschat-room", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIvschatRoomExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `loggingConfigurationIdentifiers` - (Optional) List of Logging Configuration + ARNs to attach to the room. +* `maximumMessageLength` - (Optional) Maximum number of characters in a single + message. Messages are expected to be UTF-8 encoded and this limit applies + specifically to rune/code-point count, not number of bytes. +* `maximumMessageRatePerSecond` - (Optional) Maximum number of messages per + second that can be sent to the room (by all clients). +* `messageReviewHandler` - (Optional) Configuration information for optional + review of messages. + * `fallbackResult` - (Optional) The fallback behavior (whether the message + is allowed or denied) if the handler does not return a valid response, + encounters an error, or times out. Valid values: `allow`, `deny`. + * `uri` - (Optional) ARN of the lambda message review handler function. +* `name` - (Optional) Room name. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Room. +* `id` - Room ID +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IVS (Interactive Video) Chat Room using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import IVS (Interactive Video) Chat Room using the ARN. For example: + +```console +% terraform import aws_ivschat_room.example arn:aws:ivschat:us-west-2:326937407773:room/GoXEXyB4VwHb +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kendra_data_source.html.markdown b/website/docs/cdktf/typescript/r/kendra_data_source.html.markdown new file mode 100644 index 00000000000..3c4ed4717e9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/kendra_data_source.html.markdown @@ -0,0 +1,695 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_data_source" +description: |- + Terraform resource for managing an AWS Kendra Data Source. +--- + + + +# Resource: aws_kendra_data_source + +Terraform resource for managing an AWS Kendra Data Source. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + description: "example", + indexId: Token.asString(awsKendraIndexExample.id), + languageCode: "en", + name: "example", + tags: { + hello: "world", + }, + type: "CUSTOM", + }); + } +} + +``` + +### S3 Connector + +#### With Schedule + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + s3Configuration: { + bucketName: Token.asString(awsS3BucketExample.id), + }, + }, + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + schedule: "cron(9 10 1 * ? *)", + type: "S3", + }); + } +} + +``` + +#### With Access Control List + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + s3Configuration: { + accessControlListConfiguration: { + keyPath: "s3://${" + awsS3BucketExample.id + "}/path-1", + }, + bucketName: Token.asString(awsS3BucketExample.id), + }, + }, + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + type: "S3", + }); + } +} + +``` + +#### With Documents Metadata Configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + s3Configuration: { + bucketName: Token.asString(awsS3BucketExample.id), + documentsMetadataConfiguration: { + s3Prefix: "example", + }, + exclusionPatterns: ["example"], + inclusionPatterns: ["hello"], + inclusionPrefixes: ["world"], + }, + }, + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + type: "S3", + }); + } +} + +``` + +### Web Crawler Connector + +#### With Seed URLs + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + webCrawlerConfiguration: { + urls: { + seedUrlConfiguration: { + seedUrls: ["REPLACE_WITH_YOUR_URL"], + }, + }, + }, + }, + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + type: "WEBCRAWLER", + }); + } +} + +``` + +#### With Site Maps + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + webCrawlerConfiguration: { + urls: { + siteMapsConfiguration: { + siteMaps: ["REPLACE_WITH_YOUR_URL"], + }, + }, + }, + }, + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + type: "WEBCRAWLER", + }); + } +} + +``` + +#### With Web Crawler Mode + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + webCrawlerConfiguration: { + urls: { + seedUrlConfiguration: { + seedUrls: ["REPLACE_WITH_YOUR_URL"], + webCrawlerMode: "SUBDOMAINS", + }, + }, + }, + }, + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + type: "WEBCRAWLER", + }); + } +} + +``` + +#### With Authentication Configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + webCrawlerConfiguration: { + authenticationConfiguration: { + basicAuthentication: [ + { + credentials: Token.asString(awsSecretsmanagerSecretExample.arn), + host: "a.example.com", + port: Token.asNumber("443"), + }, + ], + }, + urls: { + seedUrlConfiguration: { + seedUrls: ["REPLACE_WITH_YOUR_URL"], + }, + }, + }, + }, + dependsOn: [awsSecretsmanagerSecretVersionExample], + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + type: "WEBCRAWLER", + }); + } +} + +``` + +#### With Crawl Depth + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + webCrawlerConfiguration: { + crawlDepth: 3, + urls: { + seedUrlConfiguration: { + seedUrls: ["REPLACE_WITH_YOUR_URL"], + }, + }, + }, + }, + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + type: "WEBCRAWLER", + }); + } +} + +``` + +#### With Max Links Per Page + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + webCrawlerConfiguration: { + maxLinksPerPage: 100, + urls: { + seedUrlConfiguration: { + seedUrls: ["REPLACE_WITH_YOUR_URL"], + }, + }, + }, + }, + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + type: "WEBCRAWLER", + }); + } +} + +``` + +#### With Max Urls Per Minute Crawl Rate + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + webCrawlerConfiguration: { + maxUrlsPerMinuteCrawlRate: 300, + urls: { + seedUrlConfiguration: { + seedUrls: ["REPLACE_WITH_YOUR_URL"], + }, + }, + }, + }, + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + type: "WEBCRAWLER", + }); + } +} + +``` + +#### With Proxy Configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + webCrawlerConfiguration: { + proxyConfiguration: { + credentials: Token.asString(awsSecretsmanagerSecretExample.arn), + host: "a.example.com", + port: Token.asNumber("443"), + }, + urls: { + seedUrlConfiguration: { + seedUrls: ["REPLACE_WITH_YOUR_URL"], + }, + }, + }, + }, + dependsOn: [awsSecretsmanagerSecretVersionExample], + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + type: "WEBCRAWLER", + }); + } +} + +``` + +#### With URL Exclusion and Inclusion Patterns + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraDataSource } from "./.gen/providers/aws/kendra-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraDataSource(this, "example", { + configuration: { + webCrawlerConfiguration: { + urlExclusionPatterns: ["example"], + urlInclusionPatterns: ["hello"], + urls: { + seedUrlConfiguration: { + seedUrls: ["REPLACE_WITH_YOUR_URL"], + }, + }, + }, + }, + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + type: "WEBCRAWLER", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `indexId` - (Required, Forces new resource) The identifier of the index for your Amazon Kendra data source. +* `name` - (Required) A name for your data source connector. +* `roleArn` - (Required, Optional in one scenario) The Amazon Resource Name (ARN) of a role with permission to access the data source connector. For more information, see [IAM roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). You can't specify the `roleArn` parameter when the `type` parameter is set to `custom`. The `roleArn` parameter is required for all other data sources. +* `type` - (Required, Forces new resource) The type of data source repository. For an updated list of values, refer to [Valid Values for Type](https://docs.aws.amazon.com/kendra/latest/dg/API_CreateDataSource.html#Kendra-CreateDataSource-request-Type). + +The following arguments are optional: + +* `configuration` - (Optional) A block with the configuration information to connect to your Data Source repository. You can't specify the `configuration` block when the `type` parameter is set to `custom`. [Detailed below](#configuration-block). +* `customDocumentEnrichmentConfiguration` - (Optional) A block with the configuration information for altering document metadata and content during the document ingestion process. For more information on how to create, modify and delete document metadata, or make other content alterations when you ingest documents into Amazon Kendra, see [Customizing document metadata during the ingestion process](https://docs.aws.amazon.com/kendra/latest/dg/custom-document-enrichment.html). [Detailed below](#custom_document_enrichment_configuration-block). +* `description` - (Optional) A description for the Data Source connector. +* `languageCode` - (Optional) The code for a language. This allows you to support a language for all documents when creating the Data Source connector. English is supported by default. For more information on supported languages, including their codes, see [Adding documents in languages other than English](https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html). +* `schedule` - (Optional) Sets the frequency for Amazon Kendra to check the documents in your Data Source repository and update the index. If you don't set a schedule Amazon Kendra will not periodically update the index. You can call the `startDataSourceSyncJob` API to update the index. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### configuration Block + +The `configuration` configuration block supports the following arguments: + +* `s3Configuration` - (Required if `type` is set to `s3`) A block that provides the configuration information to connect to an Amazon S3 bucket as your data source. [Detailed below](#s3_configuration-block). +* `webCrawlerConfiguration` - (Required if `type` is set to `webcrawler`) A block that provides the configuration information required for Amazon Kendra Web Crawler. [Detailed below](#web_crawler_configuration-block). + +### s3_configuration Block + +The `s3Configuration` configuration block supports the following arguments: + +* `accessControlListConfiguration` - (Optional) A block that provides the path to the S3 bucket that contains the user context filtering files for the data source. For the format of the file, see [Access control for S3 data sources](https://docs.aws.amazon.com/kendra/latest/dg/s3-acl.html). [Detailed below](#access_control_list_configuration-block). +* `bucketName` - (Required) The name of the bucket that contains the documents. +* `documentsMetadataConfiguration` - (Optional) A block that defines the Document metadata files that contain information such as the document access control information, source URI, document author, and custom attributes. Each metadata file contains metadata about a single document. [Detailed below](#documents_metadata_configuration-block). +* `exclusionPatterns` - (Optional) A list of glob patterns for documents that should not be indexed. If a document that matches an inclusion prefix or inclusion pattern also matches an exclusion pattern, the document is not indexed. Refer to [Exclusion Patterns for more examples](https://docs.aws.amazon.com/kendra/latest/dg/API_S3DataSourceConfiguration.html#Kendra-Type-S3DataSourceConfiguration-ExclusionPatterns). +* `inclusionPatterns` - (Optional) A list of glob patterns for documents that should be indexed. If a document that matches an inclusion pattern also matches an exclusion pattern, the document is not indexed. Refer to [Inclusion Patterns for more examples](https://docs.aws.amazon.com/kendra/latest/dg/API_S3DataSourceConfiguration.html#Kendra-Type-S3DataSourceConfiguration-InclusionPatterns). +* `inclusionPrefixes` - (Optional) A list of S3 prefixes for the documents that should be included in the index. + +### access_control_list_configuration Block + +The `accessControlListConfiguration` configuration block supports the following arguments: + +* `keyPath` - (Optional) Path to the AWS S3 bucket that contains the ACL files. + +### documents_metadata_configuration Block + +The `documentsMetadataConfiguration` configuration block supports the following arguments: + +* `s3Prefix` - (Optional) A prefix used to filter metadata configuration files in the AWS S3 bucket. The S3 bucket might contain multiple metadata files. Use `s3Prefix` to include only the desired metadata files. + +### web_crawler_configuration Block + +The `webCrawlerConfiguration` configuration block supports the following arguments: + +* `authenticationConfiguration` - (Optional) A block with the configuration information required to connect to websites using authentication. You can connect to websites using basic authentication of user name and password. You use a secret in AWS Secrets Manager to store your authentication credentials. You must provide the website host name and port number. For example, the host name of `https://aExampleCom/page1Html` is `"aExampleCom"` and the port is `443`, the standard port for HTTPS. [Detailed below](#authentication_configuration-block). +* `crawlDepth` - (Optional) Specifies the number of levels in a website that you want to crawl. The first level begins from the website seed or starting point URL. For example, if a website has 3 levels – index level (i.e. seed in this example), sections level, and subsections level – and you are only interested in crawling information up to the sections level (i.e. levels 0-1), you can set your depth to 1. The default crawl depth is set to `2`. Minimum value of `0`. Maximum value of `10`. +* `maxContentSizePerPageInMegaBytes` - (Optional) The maximum size (in MB) of a webpage or attachment to crawl. Files larger than this size (in MB) are skipped/not crawled. The default maximum size of a webpage or attachment is set to `50` MB. Minimum value of `10E06`. Maximum value of `50`. +* `maxLinksPerPage` - (Optional) The maximum number of URLs on a webpage to include when crawling a website. This number is per webpage. As a website’s webpages are crawled, any URLs the webpages link to are also crawled. URLs on a webpage are crawled in order of appearance. The default maximum links per page is `100`. Minimum value of `1`. Maximum value of `1000`. +* `maxUrlsPerMinuteCrawlRate` - (Optional) The maximum number of URLs crawled per website host per minute. The default maximum number of URLs crawled per website host per minute is `300`. Minimum value of `1`. Maximum value of `300`. +* `proxyConfiguration` - (Optional) Configuration information required to connect to your internal websites via a web proxy. You must provide the website host name and port number. For example, the host name of `https://aExampleCom/page1Html` is `"aExampleCom"` and the port is `443`, the standard port for HTTPS. Web proxy credentials are optional and you can use them to connect to a web proxy server that requires basic authentication. To store web proxy credentials, you use a secret in [AWS Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/intro.html). [Detailed below](#proxy_configuration-block). +* `urlExclusionPatterns` - (Optional) A list of regular expression patterns to exclude certain URLs to crawl. URLs that match the patterns are excluded from the index. URLs that don't match the patterns are included in the index. If a URL matches both an inclusion and exclusion pattern, the exclusion pattern takes precedence and the URL file isn't included in the index. Array Members: Minimum number of `0` items. Maximum number of `100` items. Length Constraints: Minimum length of `1`. Maximum length of `150`. +* `urlInclusionPatterns` - (Optional) A list of regular expression patterns to include certain URLs to crawl. URLs that match the patterns are included in the index. URLs that don't match the patterns are excluded from the index. If a URL matches both an inclusion and exclusion pattern, the exclusion pattern takes precedence and the URL file isn't included in the index. Array Members: Minimum number of `0` items. Maximum number of `100` items. Length Constraints: Minimum length of `1`. Maximum length of `150`. +* `urls` - (Required) A block that specifies the seed or starting point URLs of the websites or the sitemap URLs of the websites you want to crawl. You can include website subdomains. You can list up to `100` seed URLs and up to `3` sitemap URLs. You can only crawl websites that use the secure communication protocol, Hypertext Transfer Protocol Secure (HTTPS). If you receive an error when crawling a website, it could be that the website is blocked from crawling. When selecting websites to index, you must adhere to the [Amazon Acceptable Use Policy](https://aws.amazon.com/aup/) and all other Amazon terms. Remember that you must only use Amazon Kendra Web Crawler to index your own webpages, or webpages that you have authorization to index. [Detailed below](#urls-block). + +### authentication_configuration Block + +The `authenticationConfiguration` configuration block supports the following arguments: + +* `basicAuthentication` - (Optional) The list of configuration information that's required to connect to and crawl a website host using basic authentication credentials. The list includes the name and port number of the website host. [Detailed below](#basic_authentication-block). + +### basic_authentication Block + +The `basicAuthentication` configuration block supports the following arguments: + +* `credentials` - (Required) Your secret ARN, which you can create in AWS Secrets Manager. You use a secret if basic authentication credentials are required to connect to a website. The secret stores your credentials of user name and password. +* `host` - (Required) The name of the website host you want to connect to using authentication credentials. For example, the host name of `https://aExampleCom/page1Html` is `"aExampleCom"`. +* `port` - (Required) The port number of the website host you want to connect to using authentication credentials. For example, the port for `https://aExampleCom/page1Html` is `443`, the standard port for HTTPS. + +### proxy_configuration Block + +The `proxyConfiguration` configuration block supports the following arguments: + +* `credentials` - (Optional) Your secret ARN, which you can create in AWS Secrets Manager. The credentials are optional. You use a secret if web proxy credentials are required to connect to a website host. Amazon Kendra currently support basic authentication to connect to a web proxy server. The secret stores your credentials. +* `host` - (Required) The name of the website host you want to connect to via a web proxy server. For example, the host name of `https://aExampleCom/page1Html` is `"aExampleCom"`. +* `port` - (Required) The port number of the website host you want to connect to via a web proxy server. For example, the port for `https://aExampleCom/page1Html` is `443`, the standard port for HTTPS. + +### urls Block + +The `urls` configuration block supports the following arguments: + +* `seedUrlConfiguration` - (Optional) A block that specifies the configuration of the seed or starting point URLs of the websites you want to crawl. You can choose to crawl only the website host names, or the website host names with subdomains, or the website host names with subdomains and other domains that the webpages link to. You can list up to `100` seed URLs. [Detailed below](#seed_url_configuration-block). +* `siteMapsConfiguration` - (Optional) A block that specifies the configuration of the sitemap URLs of the websites you want to crawl. Only URLs belonging to the same website host names are crawled. You can list up to `3` sitemap URLs. [Detailed below](#site_maps_configuration-block). + +### seed_url_configuration Block + +The `seedUrlConfiguration` configuration block supports the following arguments: + +* `seedUrls` - (Required) The list of seed or starting point URLs of the websites you want to crawl. The list can include a maximum of `100` seed URLs. Array Members: Minimum number of `0` items. Maximum number of `100` items. Length Constraints: Minimum length of `1`. Maximum length of `2048`. +* `webCrawlerMode` - (Optional) The default mode is set to `hostOnly`. You can choose one of the following modes: + * `hostOnly` – crawl only the website host names. For example, if the seed URL is `"abcExampleCom"`, then only URLs with host name `"abcExampleCom"` are crawled. + * `subdomains` – crawl the website host names with subdomains. For example, if the seed URL is `"abcExampleCom"`, then `"aAbcExampleCom"` and `"bAbcExampleCom"` are also crawled. + * `everything` – crawl the website host names with subdomains and other domains that the webpages link to. + +### site_maps_configuration Block + +The `siteMapsConfiguration` configuration block supports the following arguments: + +* `siteMaps` - (Required) The list of sitemap URLs of the websites you want to crawl. The list can include a maximum of `3` sitemap URLs. + +### custom_document_enrichment_configuration Block + +The `customDocumentEnrichmentConfiguration` configuration block supports the following arguments: + +* `inlineConfigurations` - (Optional) Configuration information to alter document attributes or metadata fields and content when ingesting documents into Amazon Kendra. Minimum number of `0` items. Maximum number of `100` items. [Detailed below](#inline_configurations-block). +* `postExtractionHookConfiguration` - (Optional) A block that specifies the configuration information for invoking a Lambda function in AWS Lambda on the structured documents with their metadata and text extracted. You can use a Lambda function to apply advanced logic for creating, modifying, or deleting document metadata and content. For more information, see [Advanced data manipulation](https://docs.aws.amazon.com/kendra/latest/dg/custom-document-enrichment.html#advanced-data-manipulation). [Detailed below](#pre_extraction_hook_configuration-and-post_extraction_hook_configuration-blocks). +* `preExtractionHookConfiguration` - (Optional) Configuration information for invoking a Lambda function in AWS Lambda on the original or raw documents before extracting their metadata and text. You can use a Lambda function to apply advanced logic for creating, modifying, or deleting document metadata and content. For more information, see [Advanced data manipulation](https://docs.aws.amazon.com/kendra/latest/dg/custom-document-enrichment.html#advanced-data-manipulation). [Detailed below](#pre_extraction_hook_configuration-and-post_extraction_hook_configuration-blocks). +* `roleArn` - (Optional) The Amazon Resource Name (ARN) of a role with permission to run `preExtractionHookConfiguration` and `postExtractionHookConfiguration` for altering document metadata and content during the document ingestion process. For more information, see [IAM roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). + +### inline_configurations Block + +The `inlineConfigurations` configuration block supports the following arguments: + +* `condition` - (Optional) Configuration of the condition used for the target document attribute or metadata field when ingesting documents into Amazon Kendra. See [condition](#condition-block). +* `documentContentDeletion` - (Optional) `true` to delete content if the condition used for the target attribute is met. +* `target` - (Optional) Configuration of the target document attribute or metadata field when ingesting documents into Amazon Kendra. You can also include a value. [Detailed below](#target-block). + +### condition Block + +The `condition` configuration blocks supports the following arguments: + +* `conditionDocumentAttributeKey` - (Required) The identifier of the document attribute used for the condition. For example, `sourceUri` could be an identifier for the attribute or metadata field that contains source URIs associated with the documents. Amazon Kendra currently does not support `documentBody` as an attribute key used for the condition. +* `conditionOnValue` - (Optional) The value used by the operator. For example, you can specify the value 'financial' for strings in the `sourceUri` field that partially match or contain this value. See [condition_on_value](#condition_on_value-block). +* `operator` - (Required) The condition operator. For example, you can use `contains` to partially match a string. Valid Values: `greaterThan` | `greaterThanOrEquals` | `lessThan` | `lessThanOrEquals` | `equals` | `notEquals` | `contains` | `notContains` | `exists` | `notExists` | `beginsWith`. + +### target Block + +The `target` configuration block supports the following arguments: + +* `targetDocumentAttributeKey` - (Optional) The identifier of the target document attribute or metadata field. For example, 'Department' could be an identifier for the target attribute or metadata field that includes the department names associated with the documents. +* `targetDocumentAttributeValue` - (Optional) The target value you want to create for the target attribute. For example, 'Finance' could be the target value for the target attribute key 'Department'. See [target_document_attribute_value](#target_document_attribute_value-block). +* `targetDocumentAttributeValueDeletion` - (Optional) `true` to delete the existing target value for your specified target attribute key. You cannot create a target value and set this to `true`. To create a target value (`targetDocumentAttributeValue`), set this to `false`. + +### target_document_attribute_value Block + +The `targetDocumentAttributeValue` configuration blocks supports the following arguments: + +* `dateValue` - (Optional) A date expressed as an ISO 8601 string. It is important for the time zone to be included in the ISO 8601 date-time format. As of this writing only UTC is supported. For example, `20120325T12:30:10+00:00`. +* `longValue` - (Optional) A long integer value. +* `stringListValue` - (Optional) A list of strings. +* `string` - (Optional) A string, such as "department". + +### pre_extraction_hook_configuration and post_extraction_hook_configuration Blocks + +The `preExtractionHookConfiguration` and `postExtractionHookConfiguration` configuration blocks each supports the following arguments: + +* `invocationCondition` - (Optional) A block that specifies the condition used for when a Lambda function should be invoked. For example, you can specify a condition that if there are empty date-time values, then Amazon Kendra should invoke a function that inserts the current date-time. See [invocation_condition](#invocation_condition-block). +* `lambdaArn` - (Required) The Amazon Resource Name (ARN) of a Lambda Function that can manipulate your document metadata fields or attributes and content. +* `s3Bucket` - (Required) Stores the original, raw documents or the structured, parsed documents before and after altering them. For more information, see [Data contracts for Lambda functions](https://docs.aws.amazon.com/kendra/latest/dg/custom-document-enrichment.html#cde-data-contracts-lambda). + +### invocation_condition Block + +The `invocationCondition` configuration blocks supports the following arguments: + +* `conditionDocumentAttributeKey` - (Required) The identifier of the document attribute used for the condition. For example, `sourceUri` could be an identifier for the attribute or metadata field that contains source URIs associated with the documents. Amazon Kendra currently does not support `documentBody` as an attribute key used for the condition. +* `conditionOnValue` - (Optional) The value used by the operator. For example, you can specify the value 'financial' for strings in the `sourceUri` field that partially match or contain this value. See [condition_on_value](#condition_on_value-block). +* `operator` - (Required) The condition operator. For example, you can use `contains` to partially match a string. Valid Values: `greaterThan` | `greaterThanOrEquals` | `lessThan` | `lessThanOrEquals` | `equals` | `notEquals` | `contains` | `notContains` | `exists` | `notExists` | `beginsWith`. + +### condition_on_value Block + +The `conditionOnValue` configuration blocks supports the following arguments: + +* `dateValue` - (Optional) A date expressed as an ISO 8601 string. It is important for the time zone to be included in the ISO 8601 date-time format. As of this writing only UTC is supported. For example, `20120325T12:30:10+00:00`. +* `longValue` - (Optional) A long integer value. +* `stringListValue` - (Optional) A list of strings. +* `string` - (Optional) A string, such as "department". + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Data Source. +* `createdAt` - The Unix timestamp of when the Data Source was created. +* `dataSourceId` - The unique identifiers of the Data Source. +* `errorMessage` - When the Status field value is `failed`, the ErrorMessage field contains a description of the error that caused the Data Source to fail. +* `id` - The unique identifiers of the Data Source and index separated by a slash (`/`). +* `status` - The current status of the Data Source. When the status is `active` the Data Source is ready to use. When the status is `failed`, the `errorMessage` field contains the reason that the Data Source failed. +* `updatedAt` - The Unix timestamp of when the Data Source was last updated. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kendra Data Source using the unique identifiers of the data_source and index separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Kendra Data Source using the unique identifiers of the data_source and index separated by a slash (`/`). For example: + +```console +% terraform import aws_kendra_data_source.example 1045d08d-66ef-4882-b3ed-dfb7df183e90/b34dfdf7-1f2b-4704-9581-79e00296845f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kendra_experience.html.markdown b/website/docs/cdktf/typescript/r/kendra_experience.html.markdown new file mode 100644 index 00000000000..57094573960 --- /dev/null +++ b/website/docs/cdktf/typescript/r/kendra_experience.html.markdown @@ -0,0 +1,132 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_experience" +description: |- + Terraform resource for managing an AWS Kendra Experience. +--- + + + +# Resource: aws_kendra_experience + +Terraform resource for managing an AWS Kendra Experience. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraExperience } from "./.gen/providers/aws/kendra-experience"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraExperience(this, "example", { + configuration: { + contentSourceConfiguration: { + directPutContent: true, + faqIds: [Token.asString(awsKendraFaqExample.faqId)], + }, + userIdentityConfiguration: { + identityAttributeName: + "12345ec453-1546651e-79c4-4554-91fa-00b43ccfa245", + }, + }, + description: "My Kendra Experience", + indexId: Token.asString(awsKendraIndexExample.id), + name: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** By default of the AWS Kendra API, updates to an existing `awsKendraExperience` resource (e.g. updating the `name`) will also update the `configurationContentSourceConfigurationDirectPutContent` parameter to `false` if not already provided. + +The following arguments are required: + +* `indexId` - (Required, Forces new resource) The identifier of the index for your Amazon Kendra experience. +* `name` - (Required) A name for your Amazon Kendra experience. +* `roleArn` - (Required) The Amazon Resource Name (ARN) of a role with permission to access `Query API`, `QuerySuggestions API`, `SubmitFeedback API`, and `AWS SSO` that stores your user and group information. For more information, see [IAM roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). + +The following arguments are optional: + +* `description` - (Optional, Forces new resource if removed) A description for your Amazon Kendra experience. +* `configuration` - (Optional) Configuration information for your Amazon Kendra experience. Terraform will only perform drift detection of its value when present in a configuration. [Detailed below](#configuration). + +### `configuration` + +~> **NOTE:** By default of the AWS Kendra API, the `contentSourceConfigurationDirectPutContent` parameter will be set to `false` if not provided. + +The `configuration` configuration block supports the following arguments: + +* `contentSourceConfiguration` - (Optional, Required if `userIdentityConfiguration` not provided) The identifiers of your data sources and FAQs. Or, you can specify that you want to use documents indexed via the `BatchPutDocument API`. Terraform will only perform drift detection of its value when present in a configuration. [Detailed below](#content_source_configuration). +* `userIdentityConfiguration` - (Optional, Required if `contentSourceConfiguration` not provided) The AWS SSO field name that contains the identifiers of your users, such as their emails. [Detailed below](#user_identity_configuration). + +### `contentSourceConfiguration` + +The `contentSourceConfiguration` configuration block supports the following arguments: + +* `dataSourceIds` - (Optional) The identifiers of the data sources you want to use for your Amazon Kendra experience. Maximum number of 100 items. +* `directPutContent` - (Optional) Whether to use documents you indexed directly using the `BatchPutDocument API`. Defaults to `false`. +* `faqIds` - (Optional) The identifier of the FAQs that you want to use for your Amazon Kendra experience. Maximum number of 100 items. + +### `userIdentityConfiguration` + +The `userIdentityConfiguration` configuration block supports the following argument: + +* `identityAttributeName` - (Required) The AWS SSO field name that contains the identifiers of your users, such as their emails. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifiers of the experience and index separated by a slash (`/`). +* `arn` - ARN of the Experience. +* `endpoints` - Shows the endpoint URLs for your Amazon Kendra experiences. The URLs are unique and fully hosted by AWS. + * `endpoint` - The endpoint of your Amazon Kendra experience. + * `endpointType` - The type of endpoint for your Amazon Kendra experience. +* `experienceId` - The unique identifier of the experience. +* `status` - The current processing status of your Amazon Kendra experience. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kendra Experience using the unique identifiers of the experience and index separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Kendra Experience using the unique identifiers of the experience and index separated by a slash (`/`). For example: + +```console +% terraform import aws_kendra_experience.example 1045d08d-66ef-4882-b3ed-dfb7df183e90/b34dfdf7-1f2b-4704-9581-79e00296845f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kendra_faq.html.markdown b/website/docs/cdktf/typescript/r/kendra_faq.html.markdown new file mode 100644 index 00000000000..ea8272b40d5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/kendra_faq.html.markdown @@ -0,0 +1,169 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_faq" +description: |- + Terraform resource for managing an AWS Kendra FAQ. +--- + + + +# Resource: aws_kendra_faq + +Terraform resource for managing an AWS Kendra FAQ. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraFaq } from "./.gen/providers/aws/kendra-faq"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraFaq(this, "example", { + indexId: Token.asString(awsKendraIndexExample.id), + name: "Example", + roleArn: Token.asString(awsIamRoleExample.arn), + s3Path: { + bucket: Token.asString(awsS3BucketExample.id), + key: Token.asString(awsS3ObjectExample.key), + }, + tags: { + Name: "Example Kendra Faq", + }, + }); + } +} + +``` + +### With File Format + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraFaq } from "./.gen/providers/aws/kendra-faq"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraFaq(this, "example", { + fileFormat: "CSV", + indexId: Token.asString(awsKendraIndexExample.id), + name: "Example", + roleArn: Token.asString(awsIamRoleExample.arn), + s3Path: { + bucket: Token.asString(awsS3BucketExample.id), + key: Token.asString(awsS3ObjectExample.key), + }, + }); + } +} + +``` + +### With Language Code + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraFaq } from "./.gen/providers/aws/kendra-faq"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraFaq(this, "example", { + indexId: Token.asString(awsKendraIndexExample.id), + languageCode: "en", + name: "Example", + roleArn: Token.asString(awsIamRoleExample.arn), + s3Path: { + bucket: Token.asString(awsS3BucketExample.id), + key: Token.asString(awsS3ObjectExample.key), + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `indexId`- (Required, Forces new resource) The identifier of the index for a FAQ. +* `name` - (Required, Forces new resource) The name that should be associated with the FAQ. +* `roleArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of a role with permission to access the S3 bucket that contains the FAQs. For more information, see [IAM Roles for Amazon Kendra](https://docs.aws.amazon.com/kendra/latest/dg/iam-roles.html). +* `s3Path` - (Required, Forces new resource) The S3 location of the FAQ input data. Detailed below. + +The `s3Path` configuration block supports the following arguments: + +* `bucket` - (Required, Forces new resource) The name of the S3 bucket that contains the file. +* `key` - (Required, Forces new resource) The name of the file. + +The following arguments are optional: + +* `description` - (Optional, Forces new resource) The description for a FAQ. +* `fileFormat` - (Optional, Forces new resource) The file format used by the input files for the FAQ. Valid Values are `csv`, `csvWithHeader`, `json`. +* `languageCode` - (Optional, Forces new resource) The code for a language. This shows a supported language for the FAQ document. English is supported by default. For more information on supported languages, including their codes, see [Adding documents in languages other than English](https://docs.aws.amazon.com/kendra/latest/dg/in-adding-languages.html). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the FAQ. +* `createdAt` - The Unix datetime that the FAQ was created. +* `errorMessage` - When the Status field value is `failed`, this contains a message that explains why. +* `faqId` - The identifier of the FAQ. +* `id` - The unique identifiers of the FAQ and index separated by a slash (`/`) +* `status` - The status of the FAQ. It is ready to use when the status is ACTIVE. +* `updatedAt` - The date and time that the FAQ was last updated. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsKendraFaq` using the unique identifiers of the FAQ and index separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsKendraFaq` using the unique identifiers of the FAQ and index separated by a slash (`/`). For example: + +```console +% terraform import aws_kendra_faq.example faq-123456780/idx-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kendra_index.html.markdown b/website/docs/cdktf/typescript/r/kendra_index.html.markdown new file mode 100644 index 00000000000..bf5e90c7574 --- /dev/null +++ b/website/docs/cdktf/typescript/r/kendra_index.html.markdown @@ -0,0 +1,818 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_index" +description: |- + Provides an Amazon Kendra Index resource. +--- + + + +# Resource: aws_kendra_index + +Provides an Amazon Kendra Index resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraIndex } from "./.gen/providers/aws/kendra-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraIndex(this, "example", { + description: "example", + edition: "DEVELOPER_EDITION", + name: "example", + roleArn: thisVar.arn, + tags: { + Key1: "Value1", + }, + }); + } +} + +``` + +### With capacity units + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraIndex } from "./.gen/providers/aws/kendra-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraIndex(this, "example", { + capacityUnits: { + queryCapacityUnits: 2, + storageCapacityUnits: 2, + }, + edition: "DEVELOPER_EDITION", + name: "example", + roleArn: thisVar.arn, + }); + } +} + +``` + +### With server side encryption configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraIndex } from "./.gen/providers/aws/kendra-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraIndex(this, "example", { + name: "example", + roleArn: thisVar.arn, + serverSideEncryptionConfiguration: { + kmsKeyId: Token.asString(dataAwsKmsKeyThis.arn), + }, + }); + } +} + +``` + +### With user group resolution configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraIndex } from "./.gen/providers/aws/kendra-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraIndex(this, "example", { + name: "example", + roleArn: thisVar.arn, + userGroupResolutionConfiguration: { + userGroupResolutionMode: "AWS_SSO", + }, + }); + } +} + +``` + +### With Document Metadata Configuration Updates + +#### Specifying the predefined elements + +Refer to [Amazon Kendra documentation on built-in document fields](https://docs.aws.amazon.com/kendra/latest/dg/hiw-index.html#index-reserved-fields) for more information. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraIndex } from "./.gen/providers/aws/kendra-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraIndex(this, "example", { + documentMetadataConfigurationUpdates: [ + { + name: "_authors", + relevance: { + importance: 1, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: false, + }, + type: "STRING_LIST_VALUE", + }, + { + name: "_category", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_created_at", + relevance: { + duration: "25920000s", + freshness: false, + importance: 1, + rankOrder: "ASCENDING", + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "DATE_VALUE", + }, + { + name: "_data_source_id", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_document_title", + relevance: { + importance: 2, + valuesImportanceMap: {}, + }, + search: { + displayable: true, + facetable: false, + searchable: true, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_excerpt_page_number", + relevance: { + importance: 2, + rankOrder: "ASCENDING", + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: false, + }, + type: "LONG_VALUE", + }, + { + name: "_faq_id", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_file_type", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_language_code", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_last_updated_at", + relevance: { + duration: "25920000s", + freshness: false, + importance: 1, + rankOrder: "ASCENDING", + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "DATE_VALUE", + }, + { + name: "_source_uri", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: true, + facetable: false, + searchable: false, + sortable: false, + }, + type: "STRING_VALUE", + }, + { + name: "_tenant_id", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_version", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_view_count", + relevance: { + importance: 1, + rankOrder: "ASCENDING", + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "LONG_VALUE", + }, + ], + name: "example", + roleArn: thisVar.arn, + }); + } +} + +``` + +#### Appending additional elements + +The example below shows additional elements with names, `exampleStringValue`, `exampleLongValue`, `exampleStringListValue`, `exampleDateValue` representing the 4 types of `stringValue`, `longValue`, `stringListValue`, `dateValue` respectively. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraIndex } from "./.gen/providers/aws/kendra-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraIndex(this, "example", { + documentMetadataConfigurationUpdates: [ + { + name: "_authors", + relevance: { + importance: 1, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: false, + }, + type: "STRING_LIST_VALUE", + }, + { + name: "_category", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_created_at", + relevance: { + duration: "25920000s", + freshness: false, + importance: 1, + rankOrder: "ASCENDING", + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "DATE_VALUE", + }, + { + name: "_data_source_id", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_document_title", + relevance: { + importance: 2, + valuesImportanceMap: {}, + }, + search: { + displayable: true, + facetable: false, + searchable: true, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_excerpt_page_number", + relevance: { + importance: 2, + rankOrder: "ASCENDING", + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: false, + }, + type: "LONG_VALUE", + }, + { + name: "_faq_id", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_file_type", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_language_code", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_last_updated_at", + relevance: { + duration: "25920000s", + freshness: false, + importance: 1, + rankOrder: "ASCENDING", + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "DATE_VALUE", + }, + { + name: "_source_uri", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: true, + facetable: false, + searchable: false, + sortable: false, + }, + type: "STRING_VALUE", + }, + { + name: "_tenant_id", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_version", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "_view_count", + relevance: { + importance: 1, + rankOrder: "ASCENDING", + }, + search: { + displayable: false, + facetable: false, + searchable: false, + sortable: true, + }, + type: "LONG_VALUE", + }, + { + name: "example-string-value", + relevance: { + importance: 1, + valuesImportanceMap: {}, + }, + search: { + displayable: true, + facetable: true, + searchable: true, + sortable: true, + }, + type: "STRING_VALUE", + }, + { + name: "example-long-value", + relevance: { + importance: 1, + rankOrder: "ASCENDING", + }, + search: { + displayable: true, + facetable: true, + searchable: false, + sortable: true, + }, + type: "LONG_VALUE", + }, + { + name: "example-string-list-value", + relevance: { + importance: 1, + }, + search: { + displayable: true, + facetable: true, + searchable: true, + sortable: false, + }, + type: "STRING_LIST_VALUE", + }, + { + name: "example-date-value", + relevance: { + duration: "25920000s", + freshness: false, + importance: 1, + rankOrder: "ASCENDING", + }, + search: { + displayable: true, + facetable: true, + searchable: false, + sortable: false, + }, + type: "DATE_VALUE", + }, + ], + name: "example", + roleArn: thisVar.arn, + }); + } +} + +``` + +### With JSON token type configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraIndex } from "./.gen/providers/aws/kendra-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraIndex(this, "example", { + name: "example", + roleArn: thisVar.arn, + userTokenConfigurations: { + jsonTokenTypeConfiguration: { + groupAttributeField: "groups", + userNameAttributeField: "username", + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `capacityUnits` - (Optional) A block that sets the number of additional document storage and query capacity units that should be used by the index. [Detailed below](#capacity_units). +* `description` - (Optional) The description of the Index. +* `documentMetadataConfigurationUpdates` - (Optional) One or more blocks that specify the configuration settings for any metadata applied to the documents in the index. Minimum number of 0 items. Maximum number of 500 items. If specified, you must define all elements, including those that are provided by default. These index fields are documented at [Amazon Kendra Index documentation](https://docs.aws.amazon.com/kendra/latest/dg/hiw-index.html). For an example resource that defines these default index fields, refer to the [default example above](#specifying-the-predefined-elements). For an example resource that appends additional index fields, refer to the [append example above](#appending-additional-elements). All arguments for each block must be specified. Note that blocks cannot be removed since index fields cannot be deleted. This argument is [detailed below](#document_metadata_configuration_updates). +* `edition` - (Optional) The Amazon Kendra edition to use for the index. Choose `developerEdition` for indexes intended for development, testing, or proof of concept. Use `enterpriseEdition` for your production databases. Once you set the edition for an index, it can't be changed. Defaults to `enterpriseEdition` +* `name` - (Required) Specifies the name of the Index. +* `roleArn` - (Required) An AWS Identity and Access Management (IAM) role that gives Amazon Kendra permissions to access your Amazon CloudWatch logs and metrics. This is also the role you use when you call the `batchPutDocument` API to index documents from an Amazon S3 bucket. +* `serverSideEncryptionConfiguration` - (Optional) A block that specifies the identifier of the AWS KMS customer managed key (CMK) that's used to encrypt data indexed by Amazon Kendra. Amazon Kendra doesn't support asymmetric CMKs. [Detailed below](#server_side_encryption_configuration). +* `userContextPolicy` - (Optional) The user context policy. Valid values are `attributeFilter` or `userToken`. For more information, refer to [UserContextPolicy](https://docs.aws.amazon.com/kendra/latest/APIReference/API_CreateIndex.html#kendra-CreateIndex-request-UserContextPolicy). Defaults to `attributeFilter`. +* `userGroupResolutionConfiguration` - (Optional) A block that enables fetching access levels of groups and users from an AWS Single Sign-On identity source. To configure this, see [UserGroupResolutionConfiguration](https://docs.aws.amazon.com/kendra/latest/dg/API_UserGroupResolutionConfiguration.html). [Detailed below](#user_group_resolution_configuration). +* `userTokenConfigurations` - (Optional) A block that specifies the user token configuration. [Detailed below](#user_token_configurations). +* `tags` - (Optional) Tags to apply to the Index. If configured with a provider +[`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `capacityUnits` + +A `capacityUnits` block supports the following arguments: + +* `queryCapacityUnits` - (Required) The amount of extra query capacity for an index and GetQuerySuggestions capacity. For more information, refer to [QueryCapacityUnits](https://docs.aws.amazon.com/kendra/latest/dg/API_CapacityUnitsConfiguration.html#Kendra-Type-CapacityUnitsConfiguration-QueryCapacityUnits). +* `storageCapacityUnits` - (Required) The amount of extra storage capacity for an index. A single capacity unit provides 30 GB of storage space or 100,000 documents, whichever is reached first. Minimum value of 0. + +### `documentMetadataConfigurationUpdates` + +A `documentMetadataConfigurationUpdates` block supports the following arguments: + +* `name` - (Required) The name of the index field. Minimum length of 1. Maximum length of 30. +* `relevance` - (Required) A block that provides manual tuning parameters to determine how the field affects the search results. [Detailed below](#relevance) +* `search` - (Required) A block that provides information about how the field is used during a search. Documented below. [Detailed below](#search) +* `type` - (Required) The data type of the index field. Valid values are `stringValue`, `stringListValue`, `longValue`, `dateValue`. + +#### `relevance` + +A `relevance` block supports the following attributes: + +* `duration` - (Required if type is of `dateValue`) Specifies the time period that the boost applies to. For more information, refer to [Duration](https://docs.aws.amazon.com/kendra/latest/dg/API_Relevance.html#Kendra-Type-Relevance-Duration). +* `freshness` - (Required if type is of `dateValue`) Indicates that this field determines how "fresh" a document is. For more information, refer to [Freshness](https://docs.aws.amazon.com/kendra/latest/dg/API_Relevance.html#Kendra-Type-Relevance-Freshness). +* `importance` - (Required for all types) The relative importance of the field in the search. Larger numbers provide more of a boost than smaller numbers. Minimum value of 1. Maximum value of 10. +* `rankOrder` - (Required if type is of `dateValue`, or `longValue`) Determines how values should be interpreted. For more information, refer to [RankOrder](https://docs.aws.amazon.com/kendra/latest/dg/API_Relevance.html#Kendra-Type-Relevance-RankOrder). +* `valuesImportanceMap` - (Required if type is of `stringValue`) A list of values that should be given a different boost when they appear in the result list. For more information, refer to [ValueImportanceMap](https://docs.aws.amazon.com/kendra/latest/dg/API_Relevance.html#Kendra-Type-Relevance-ValueImportanceMap). + +#### `search` + +A `search` block supports the following attributes: + +* `displayable` - (Required) Determines whether the field is returned in the query response. The default is `true`. +* `facetable` - (Required) Indicates that the field can be used to create search facets, a count of results for each value in the field. The default is `false`. +* `searchable` - (Required) Determines whether the field is used in the search. If the Searchable field is true, you can use relevance tuning to manually tune how Amazon Kendra weights the field in the search. The default is `true` for `string` fields and `false` for `number` and `date` fields. +* `sortable` - (Required) Determines whether the field can be used to sort the results of a query. If you specify sorting on a field that does not have Sortable set to true, Amazon Kendra returns an exception. The default is `false`. + +### `serverSideEncryptionConfiguration` + +A `serverSideEncryptionConfiguration` block supports the following arguments: + +* `kmsKeyId` - (Optional) The identifier of the AWS KMScustomer master key (CMK). Amazon Kendra doesn't support asymmetric CMKs. + +### `userGroupResolutionConfiguration` + +A `userGroupResolutionConfiguration` block supports the following arguments: + +* `userGroupResolutionMode` - (Required) The identity store provider (mode) you want to use to fetch access levels of groups and users. AWS Single Sign-On is currently the only available mode. Your users and groups must exist in an AWS SSO identity source in order to use this mode. Valid Values are `awsSso` or `none`. + +### `userTokenConfigurations` + +A `userTokenConfigurations` block supports the following arguments: + +* `jsonTokenTypeConfiguration` - (Optional) A block that specifies the information about the JSON token type configuration. [Detailed below](#json_token_type_configuration). +* `jwtTokenTypeConfiguration` - (Optional) A block that specifies the information about the JWT token type configuration. [Detailed below](#jwt_token_type_configuration). + +#### `jsonTokenTypeConfiguration` + +A `jsonTokenTypeConfiguration` block supports the following arguments: + +* `groupAttributeField` - (Required) The group attribute field. Minimum length of 1. Maximum length of 2048. +* `userNameAttributeField` - (Required) The user name attribute field. Minimum length of 1. Maximum length of 2048. + +#### `jwtTokenTypeConfiguration` + +A `jwtTokenTypeConfiguration` block supports the following arguments: + +* `claimRegex` - (Optional) The regular expression that identifies the claim. Minimum length of 1. Maximum length of 100. +* `groupAttributeField` - (Optional) The group attribute field. Minimum length of 1. Maximum length of 100. +* `issuer` - (Optional) The issuer of the token. Minimum length of 1. Maximum length of 65. +* `keyLocation` - (Required) The location of the key. Valid values are `url` or `secretManager` +* `secretsManagerArn` - (Optional) The Amazon Resource Name (ARN) of the secret. +* `url` - (Optional) The signing key URL. Valid pattern is `^(https?|ftp|file):\/\/([^\s]*)` +* `userNameAttributeField` - (Optional) The user name attribute field. Minimum length of 1. Maximum length of 100. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `40M`) +* `delete` - (Default `40M`) +* `update` - (Default `40M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Index. +* `createdAt` - The Unix datetime that the index was created. +* `errorMessage` - When the Status field value is `failed`, this contains a message that explains why. +* `id` - The identifier of the Index. +* `indexStatistics` - A block that provides information about the number of FAQ questions and answers and the number of text documents indexed. [Detailed below](#index_statistics). +* `status` - The current status of the index. When the value is `active`, the index is ready for use. If the Status field value is `failed`, the `errorMessage` field contains a message that explains why. +* `updatedAt` - The Unix datetime that the index was last updated. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### `indexStatistics` + +A `indexStatistics` block supports the following attributes: + +* `faqStatistics` - A block that specifies the number of question and answer topics in the index. [Detailed below](#faq_statistics). +* `textDocumentStatistics` - A block that specifies the number of text documents indexed. [Detailed below](#text_document_statistics). + +#### `faqStatistics` + +A `faqStatistics` block supports the following attributes: + +* `indexedQuestionAnswersCount` - The total number of FAQ questions and answers contained in the index. + +#### `textDocumentStatistics` + +A `textDocumentStatistics` block supports the following attributes: + +* `indexedTextBytes` - The total size, in bytes, of the indexed documents. +* `indexedTextDocumentsCount` - The number of text documents indexed. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Amazon Kendra Indexes using its `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Amazon Kendra Indexes using its `id`. For example: + +```console +% terraform import aws_kendra_index.example 12345678-1234-5678-9123-123456789123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kendra_query_suggestions_block_list.html.markdown b/website/docs/cdktf/typescript/r/kendra_query_suggestions_block_list.html.markdown new file mode 100644 index 00000000000..4c6bd17ef44 --- /dev/null +++ b/website/docs/cdktf/typescript/r/kendra_query_suggestions_block_list.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_query_suggestions_block_list" +description: |- + Terraform resource for managing an AWS Kendra block list used for query suggestions for an index +--- + + + +# Resource: aws_kendra_query_suggestions_block_list + +Use the `awsKendraIndexBlockList` resource to manage an AWS Kendra block list used for query suggestions for an index. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraQuerySuggestionsBlockList } from "./.gen/providers/aws/kendra-query-suggestions-block-list"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraQuerySuggestionsBlockList(this, "example", { + indexId: Token.asString(awsKendraIndexExample.id), + name: "Example", + roleArn: Token.asString(awsIamRoleExample.arn), + sourceS3Path: { + bucket: Token.asString(awsS3BucketExample.id), + key: "example/suggestions.txt", + }, + tags: { + Name: "Example Kendra Index", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `indexId` - (Required, Forces New Resource) Identifier of the index for a block list. +* `name` - (Required) Name for the block list. +* `roleArn` - (Required) IAM (Identity and Access Management) role used to access the block list text file in S3. +* `sourceS3Path` - (Required) S3 path where your block list text file is located. See details below. + +The `sourceS3Path` configuration block supports the following arguments: + +* `bucket` - (Required) Name of the S3 bucket that contains the file. +* `key` - (Required) Name of the file. + +The following arguments are optional: + +* `description` - (Optional) Description for a block list. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block), tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the block list. +* `querySuggestionsBlockListId` - Unique identifier of the block list. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider's [default_tags configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +Configuration options for operation timeouts can be found [here](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts). + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the `awsKendraQuerySuggestionsBlockList` resource using the unique identifiers of the block list and index separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the `awsKendraQuerySuggestionsBlockList` resource using the unique identifiers of the block list and index separated by a slash (`/`). For example: + +```console +% terraform import aws_kendra_query_suggestions_block_list.example blocklist-123456780/idx-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kendra_thesaurus.html.markdown b/website/docs/cdktf/typescript/r/kendra_thesaurus.html.markdown new file mode 100644 index 00000000000..e80ffc0b05a --- /dev/null +++ b/website/docs/cdktf/typescript/r/kendra_thesaurus.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "Kendra" +layout: "aws" +page_title: "AWS: aws_kendra_thesaurus" +description: |- + Terraform resource for managing an AWS Kendra Thesaurus. +--- + + + +# Resource: aws_kendra_thesaurus + +Terraform resource for managing an AWS Kendra Thesaurus. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KendraThesaurus } from "./.gen/providers/aws/kendra-thesaurus"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KendraThesaurus(this, "example", { + indexId: Token.asString(awsKendraIndexExample.id), + name: "Example", + roleArn: Token.asString(awsIamRoleExample.arn), + sourceS3Path: { + bucket: Token.asString(awsS3BucketExample.id), + key: Token.asString(awsS3ObjectExample.key), + }, + tags: { + Name: "Example Kendra Thesaurus", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `indexId`- (Required, Forces new resource) The identifier of the index for a thesaurus. +* `name` - (Required) The name for the thesaurus. +* `roleArn` - (Required) The IAM (Identity and Access Management) role used to access the thesaurus file in S3. +* `sourceS3Path` - (Required) The S3 path where your thesaurus file sits in S3. Detailed below. + +The `sourceS3Path` configuration block supports the following arguments: + +* `bucket` - (Required) The name of the S3 bucket that contains the file. +* `key` - (Required) The name of the file. + +The following arguments are optional: + +* `description` - (Optional) The description for a thesaurus. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the thesaurus. +* `id` - The unique identifiers of the thesaurus and index separated by a slash (`/`). +* `status` - The current status of the thesaurus. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsKendraThesaurus` using the unique identifiers of the thesaurus and index separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsKendraThesaurus` using the unique identifiers of the thesaurus and index separated by a slash (`/`). For example: + +```console +% terraform import aws_kendra_thesaurus.example thesaurus-123456780/idx-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/key_pair.html.markdown b/website/docs/cdktf/typescript/r/key_pair.html.markdown new file mode 100644 index 00000000000..5ea9a2ed337 --- /dev/null +++ b/website/docs/cdktf/typescript/r/key_pair.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_key_pair" +description: |- + Provides a Key Pair resource. Currently this supports importing an existing key pair but not creating a new key pair. +--- + + + +# Resource: aws_key_pair + +Provides an [EC2 key pair](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html) resource. A key pair is used to control login access to EC2 instances. + +Currently this resource requires an existing user-supplied key pair. This key pair's public key will be registered with AWS to allow logging-in to EC2 instances. + +When importing an existing key pair the public key material may be in any format supported by AWS. Supported formats (per the [AWS documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html#how-to-generate-your-own-key-and-import-it-to-aws)) are: + +* OpenSSH public key format (the format in ~/.ssh/authorized_keys) +* Base64 encoded DER format +* SSH public key file format as specified in RFC4716 + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KeyPair } from "./.gen/providers/aws/key-pair"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KeyPair(this, "deployer", { + keyName: "deployer-key", + publicKey: + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD3F6tyPEFEzV0LX3X8BsXdMsQz1x2cEikKDEY0aIj41qgxMCP/iteneqXSIFZBp5vizPvaoIR3Um9xK7PGoW8giupGn+EPuxIA4cDM4vzOqOkiMPhz5XK0whEjkVzTo4+S0puvDZuwIsdiW9mxhJc7tgBNL0cYlWSYVkz4G/fslNfRPW5mYAM49f4fhtxPb5ok4Q2Lg9dPKVHO/Bgeu5woMc7RY0p1ej6D4CKFE6lymSDJpW0YHX/wqE9+cfEauh7xZcG0q9t2ta6F6fmX0agvpFyZo8aFbXeUBr7osSCJNgvavWbM/06niWrOvYX2xwWdhXmXSrbX8ZbabVohBK41 email@example.com", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `keyName` - (Optional) The name for the key pair. If neither `keyName` nor `keyNamePrefix` is provided, Terraform will create a unique key name using the prefix `terraform`. +* `keyNamePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `keyName`. If neither `keyName` nor `keyNamePrefix` is provided, Terraform will create a unique key name using the prefix `terraform`. +* `publicKey` - (Required) The public key material. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The key pair name. +* `arn` - The key pair ARN. +* `keyName` - The key pair name. +* `keyPairId` - The key pair ID. +* `keyType` - The type of key pair. +* `fingerprint` - The MD5 public key fingerprint as specified in section 4 of RFC 4716. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Key Pairs using the `keyName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Key Pairs using the `keyName`. For example: + +```console +% terraform import aws_key_pair.deployer deployer-key +``` + +~> **NOTE:** The AWS API does not include the public key in the response, so `terraform apply` will attempt to replace the key pair. There is currently no supported workaround for this limitation. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/keyspaces_keyspace.html.markdown b/website/docs/cdktf/typescript/r/keyspaces_keyspace.html.markdown new file mode 100644 index 00000000000..2d9ae2a3599 --- /dev/null +++ b/website/docs/cdktf/typescript/r/keyspaces_keyspace.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Keyspaces (for Apache Cassandra)" +layout: "aws" +page_title: "AWS: aws_keyspaces_keyspace" +description: |- + Provides a Keyspaces Keyspace. +--- + + + +# Resource: aws_keyspaces_keyspace + +Provides a Keyspaces Keyspace. + +More information about keyspaces can be found in the [Keyspaces User Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/what-is-keyspaces.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KeyspacesKeyspace } from "./.gen/providers/aws/keyspaces-keyspace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KeyspacesKeyspace(this, "example", { + name: "my_keyspace", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required, Forces new resource) The name of the keyspace to be created. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the keyspace. +* `arn` - The ARN of the keyspace. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `1M`) +- `delete` - (Default `1M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a keyspace using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a keyspace using the `name`. For example: + +```console +% terraform import aws_keyspaces_keyspace.example my_keyspace +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/keyspaces_table.html.markdown b/website/docs/cdktf/typescript/r/keyspaces_table.html.markdown new file mode 100644 index 00000000000..f2f18d9ed48 --- /dev/null +++ b/website/docs/cdktf/typescript/r/keyspaces_table.html.markdown @@ -0,0 +1,161 @@ +--- +subcategory: "Keyspaces (for Apache Cassandra)" +layout: "aws" +page_title: "AWS: aws_keyspaces_table" +description: |- + Provides a Keyspaces Table. +--- + + + +# Resource: aws_keyspaces_table + +Provides a Keyspaces Table. + +More information about Keyspaces tables can be found in the [Keyspaces Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/working-with-tables.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KeyspacesTable } from "./.gen/providers/aws/keyspaces-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KeyspacesTable(this, "example", { + keyspaceName: Token.asString(awsKeyspacesKeyspaceExample.name), + schemaDefinition: { + column: [ + { + name: "Message", + type: "ASCII", + }, + ], + partitionKey: [ + { + name: "Message", + }, + ], + }, + tableName: "my_table", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `keyspaceName` - (Required) The name of the keyspace that the table is going to be created in. +* `tableName` - (Required) The name of the table. + +The following arguments are optional: + +* `capacitySpecification` - (Optional) Specifies the read/write throughput capacity mode for the table. +* `clientSideTimestamps` - (Optional) Enables client-side timestamps for the table. By default, the setting is disabled. +* `comment` - (Optional) A description of the table. +* `defaultTimeToLive` - (Optional) The default Time to Live setting in seconds for the table. More information can be found in the [Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/TTL-how-it-works.html#ttl-howitworks_default_ttl). +* `encryptionSpecification` - (Optional) Specifies how the encryption key for encryption at rest is managed for the table. More information can be found in the [Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/EncryptionAtRest.html). +* `pointInTimeRecovery` - (Optional) Specifies if point-in-time recovery is enabled or disabled for the table. More information can be found in the [Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/PointInTimeRecovery.html). +* `schemaDefinition` - (Optional) Describes the schema of the table. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `ttl` - (Optional) Enables Time to Live custom settings for the table. More information can be found in the [Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/TTL.html). + +The `capacitySpecification` object takes the following arguments: + +* `readCapacityUnits` - (Optional) The throughput capacity specified for read operations defined in read capacity units (RCUs). +* `throughputMode` - (Optional) The read/write throughput capacity mode for a table. Valid values: `payPerRequest`, `provisioned`. The default value is `payPerRequest`. +* `writeCapacityUnits` - (Optional) The throughput capacity specified for write operations defined in write capacity units (WCUs). + +The `clientSideTimestamps` object takes the following arguments: + +* `status` - (Required) Shows how to enable client-side timestamps settings for the specified table. Valid values: `enabled`. + +The `comment` object takes the following arguments: + +* `message` - (Required) A description of the table. + +The `encryptionSpecification` object takes the following arguments: + +* `kmsKeyIdentifier` - (Optional) The Amazon Resource Name (ARN) of the customer managed KMS key. +* `type` - (Optional) The encryption option specified for the table. Valid values: `awsOwnedKmsKey`, `customerManagedKmsKey`. The default value is `awsOwnedKmsKey`. + +The `pointInTimeRecovery` object takes the following arguments: + +* `status` - (Optional) Valid values: `enabled`, `disabled`. The default value is `disabled`. + +The `schemaDefinition` object takes the following arguments: + +* `column` - (Required) The regular columns of the table. +* `partitionKey` - (Required) The columns that are part of the partition key of the table . +* `clusteringKey` - (Required) The columns that are part of the clustering key of the table. +* `staticColumn` - (Required) The columns that have been defined as `static`. Static columns store values that are shared by all rows in the same partition. + +The `column` object takes the following arguments: + +* `name` - (Required) The name of the column. +* `type` - (Required) The data type of the column. See the [Developer Guide](https://docs.aws.amazon.com/keyspaces/latest/devguide/cql.elements.html#cql.data-types) for a list of available data types. + +The `partitionKey` object takes the following arguments: + +* `name` - (Required) The name of the partition key column. + +The `clusteringKey` object takes the following arguments: + +* `name` - (Required) The name of the clustering key column. +* `orderBy` - (Required) The order modifier. Valid values: `asc`, `desc`. + +The `staticColumn` object takes the following arguments: + +* `name` - (Required) The name of the static column. + +The `ttl` object takes the following arguments: + +* `status` - (Optional) Valid values: `enabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the table. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `30M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a table using the `keyspaceName` and `tableName` separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a table using the `keyspaceName` and `tableName` separated by `/`. For example: + +```console +% terraform import aws_keyspaces_table.example my_keyspace/my_table +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kinesis_analytics_application.html.markdown b/website/docs/cdktf/typescript/r/kinesis_analytics_application.html.markdown new file mode 100644 index 00000000000..c556b26d64f --- /dev/null +++ b/website/docs/cdktf/typescript/r/kinesis_analytics_application.html.markdown @@ -0,0 +1,413 @@ +--- +subcategory: "Kinesis Analytics" +layout: "aws" +page_title: "AWS: aws_kinesis_analytics_application" +description: |- + Provides a AWS Kinesis Analytics Application +--- + + + +# Resource: aws_kinesis_analytics_application + +Provides a Kinesis Analytics Application resource. Kinesis Analytics is a managed service that +allows processing and analyzing streaming data using standard SQL. + +For more details, see the [Amazon Kinesis Analytics Documentation][1]. + +-> **Note:** To manage Amazon Kinesis Data Analytics for Apache Flink applications, use the [`awsKinesisanalyticsv2Application`](/docs/providers/aws/r/kinesisanalyticsv2_application.html) resource. + +## Example Usage + +### Kinesis Stream Input + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KinesisAnalyticsApplication } from "./.gen/providers/aws/kinesis-analytics-application"; +import { KinesisStream } from "./.gen/providers/aws/kinesis-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testStream = new KinesisStream(this, "test_stream", { + name: "terraform-kinesis-test", + shardCount: 1, + }); + new KinesisAnalyticsApplication(this, "test_application", { + inputs: { + kinesisStream: { + resourceArn: testStream.arn, + roleArn: test.arn, + }, + namePrefix: "test_prefix", + parallelism: { + count: 1, + }, + schema: { + recordColumns: [ + { + mapping: "$.test", + name: "test", + sqlType: "VARCHAR(8)", + }, + ], + recordEncoding: "UTF-8", + recordFormat: { + mappingParameters: { + json: { + recordRowPath: "$", + }, + }, + }, + }, + }, + name: "kinesis-analytics-application-test", + }); + } +} + +``` + +### Starting An Application + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { CloudwatchLogStream } from "./.gen/providers/aws/cloudwatch-log-stream"; +import { KinesisAnalyticsApplication } from "./.gen/providers/aws/kinesis-analytics-application"; +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +import { KinesisStream } from "./.gen/providers/aws/kinesis-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: "analytics", + }); + const awsCloudwatchLogStreamExample = new CloudwatchLogStream( + this, + "example_1", + { + logGroupName: example.name, + name: "example-kinesis-application", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogStreamExample.overrideLogicalId("example"); + const awsKinesisFirehoseDeliveryStreamExample = + new KinesisFirehoseDeliveryStream(this, "example_2", { + destination: "extended_s3", + extendedS3Configuration: { + bucketArn: Token.asString(awsS3BucketExample.arn), + roleArn: Token.asString(awsIamRoleExample.arn), + }, + name: "example-kinesis-delivery-stream", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKinesisFirehoseDeliveryStreamExample.overrideLogicalId("example"); + const awsKinesisStreamExample = new KinesisStream(this, "example_3", { + name: "example-kinesis-stream", + shardCount: 1, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKinesisStreamExample.overrideLogicalId("example"); + new KinesisAnalyticsApplication(this, "test", { + cloudwatchLoggingOptions: { + logStreamArn: Token.asString(awsCloudwatchLogStreamExample.arn), + roleArn: Token.asString(awsIamRoleExample.arn), + }, + inputs: { + kinesisStream: { + resourceArn: Token.asString(awsKinesisStreamExample.arn), + roleArn: Token.asString(awsIamRoleExample.arn), + }, + namePrefix: "example_prefix", + schema: { + recordColumns: [ + { + name: "COLUMN_1", + sqlType: "INTEGER", + }, + ], + recordFormat: { + mappingParameters: { + csv: { + recordColumnDelimiter: ",", + recordRowDelimiter: "|", + }, + }, + }, + }, + startingPositionConfiguration: [ + { + startingPosition: "NOW", + }, + ], + }, + name: "example-application", + outputs: [ + { + kinesisFirehose: { + resourceArn: Token.asString( + awsKinesisFirehoseDeliveryStreamExample.arn + ), + roleArn: Token.asString(awsIamRoleExample.arn), + }, + name: "OUTPUT_1", + schema: { + recordFormatType: "CSV", + }, + }, + ], + startApplication: true, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the Kinesis Analytics Application. +* `code` - (Optional) SQL Code to transform input data, and generate output. +* `description` - (Optional) Description of the application. +* `cloudwatchLoggingOptions` - (Optional) The CloudWatch log stream options to monitor application errors. +See [CloudWatch Logging Options](#cloudwatch-logging-options) below for more details. +* `inputs` - (Optional) Input configuration of the application. See [Inputs](#inputs) below for more details. +* `outputs` - (Optional) Output destination configuration of the application. See [Outputs](#outputs) below for more details. +* `referenceDataSources` - (Optional) An S3 Reference Data Source for the application. +See [Reference Data Sources](#reference-data-sources) below for more details. +* `startApplication` - (Optional) Whether to start or stop the Kinesis Analytics Application. To start an application, an input with a defined `startingPosition` must be configured. +To modify an application's starting position, first stop the application by setting `start_application = false`, then update `startingPosition` and set `start_application = true`. +* `tags` - Key-value map of tags for the Kinesis Analytics Application. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### CloudWatch Logging Options + +Configure a CloudWatch Log Stream to monitor application errors. + +The `cloudwatchLoggingOptions` block supports the following: + +* `logStreamArn` - (Required) The ARN of the CloudWatch Log Stream. +* `roleArn` - (Required) The ARN of the IAM Role used to send application messages. + +### Inputs + +Configure an Input for the Kinesis Analytics Application. You can only have 1 Input configured. + +The `inputs` block supports the following: + +* `namePrefix` - (Required) The Name Prefix to use when creating an in-application stream. +* `schema` - (Required) The Schema format of the data in the streaming source. See [Source Schema](#source-schema) below for more details. +* `kinesisFirehose` - (Optional) The Kinesis Firehose configuration for the streaming source. Conflicts with `kinesisStream`. +See [Kinesis Firehose](#kinesis-firehose) below for more details. +* `kinesisStream` - (Optional) The Kinesis Stream configuration for the streaming source. Conflicts with `kinesisFirehose`. +See [Kinesis Stream](#kinesis-stream) below for more details. +* `parallelism` - (Optional) The number of Parallel in-application streams to create. +See [Parallelism](#parallelism) below for more details. +* `processingConfiguration` - (Optional) The Processing Configuration to transform records as they are received from the stream. +See [Processing Configuration](#processing-configuration) below for more details. +* `startingPositionConfiguration` (Optional) The point at which the application starts processing records from the streaming source. +See [Starting Position Configuration](#starting-position-configuration) below for more details. + +### Outputs + +Configure Output destinations for the Kinesis Analytics Application. You can have a maximum of 3 destinations configured. + +The `outputs` block supports the following: + +* `name` - (Required) The Name of the in-application stream. +* `schema` - (Required) The Schema format of the data written to the destination. See [Destination Schema](#destination-schema) below for more details. +* `kinesisFirehose` - (Optional) The Kinesis Firehose configuration for the destination stream. Conflicts with `kinesisStream`. +See [Kinesis Firehose](#kinesis-firehose) below for more details. +* `kinesisStream` - (Optional) The Kinesis Stream configuration for the destination stream. Conflicts with `kinesisFirehose`. +See [Kinesis Stream](#kinesis-stream) below for more details. +* `lambda` - (Optional) The Lambda function destination. See [Lambda](#lambda) below for more details. + +### Reference Data Sources + +Add a Reference Data Source to the Kinesis Analytics Application. You can only have 1 Reference Data Source. + +The `referenceDataSources` block supports the following: + +* `schema` - (Required) The Schema format of the data in the streaming source. See [Source Schema](#source-schema) below for more details. +* `tableName` - (Required) The in-application Table Name. +* `s3` - (Optional) The S3 configuration for the reference data source. See [S3 Reference](#s3-reference) below for more details. + +#### Kinesis Firehose + +Configuration for a Kinesis Firehose delivery stream. + +The `kinesisFirehose` block supports the following: + +* `resourceArn` - (Required) The ARN of the Kinesis Firehose delivery stream. +* `roleArn` - (Required) The ARN of the IAM Role used to access the stream. + +#### Kinesis Stream + +Configuration for a Kinesis Stream. + +The `kinesisStream` block supports the following: + +* `resourceArn` - (Required) The ARN of the Kinesis Stream. +* `roleArn` - (Required) The ARN of the IAM Role used to access the stream. + +#### Destination Schema + +The Schema format of the data in the destination. + +The `schema` block supports the following: + +* `recordFormatType` - (Required) The Format Type of the records on the output stream. Can be `csv` or `json`. + +#### Source Schema + +The Schema format of the data in the streaming source. + +The `schema` block supports the following: + +* `recordColumns` - (Required) The Record Column mapping for the streaming source data element. +See [Record Columns](#record-columns) below for more details. +* `recordFormat` - (Required) The Record Format and mapping information to schematize a record. +See [Record Format](#record-format) below for more details. +* `recordEncoding` - (Optional) The Encoding of the record in the streaming source. + +#### Parallelism + +Configures the number of Parallel in-application streams to create. + +The `parallelism` block supports the following: + +* `count` - (Required) The Count of streams. + +#### Processing Configuration + +The Processing Configuration to transform records as they are received from the stream. + +The `processingConfiguration` block supports the following: + +* `lambda` - (Required) The Lambda function configuration. See [Lambda](#lambda) below for more details. + +#### Lambda + +The Lambda function that pre-processes records in the stream. + +The `lambda` block supports the following: + +* `resourceArn` - (Required) The ARN of the Lambda function. +* `roleArn` - (Required) The ARN of the IAM Role used to access the Lambda function. + +#### Starting Position Configuration + +The point at which the application reads from the streaming source. + +The `startingPositionConfiguration` block supports the following: + +* `startingPosition` - (Required) The starting position on the stream. Valid values: `lastStoppedPoint`, `now`, `trimHorizon`. + +#### Record Columns + +The Column mapping of each data element in the streaming source to the corresponding column in the in-application stream. + +The `recordColumns` block supports the following: + +* `name` - (Required) Name of the column. +* `sqlType` - (Required) The SQL Type of the column. +* `mapping` - (Optional) The Mapping reference to the data element. + +#### Record Format + +The Record Format and relevant mapping information that should be applied to schematize the records on the stream. + +The `recordFormat` block supports the following: + +* `recordFormatType` - (Required) The type of Record Format. Can be `csv` or `json`. +* `mappingParameters` - (Optional) The Mapping Information for the record format. +See [Mapping Parameters](#mapping-parameters) below for more details. + +#### Mapping Parameters + +Provides Mapping information specific to the record format on the streaming source. + +The `mappingParameters` block supports the following: + +* `csv` - (Optional) Mapping information when the record format uses delimiters. +See [CSV Mapping Parameters](#csv-mapping-parameters) below for more details. +* `json` - (Optional) Mapping information when JSON is the record format on the streaming source. +See [JSON Mapping Parameters](#json-mapping-parameters) below for more details. + +#### CSV Mapping Parameters + +Mapping information when the record format uses delimiters. + +The `csv` block supports the following: + +* `recordColumnDelimiter` - (Required) The Column Delimiter. +* `recordRowDelimiter` - (Required) The Row Delimiter. + +#### JSON Mapping Parameters + +Mapping information when JSON is the record format on the streaming source. + +The `json` block supports the following: + +* `recordRowPath` - (Required) Path to the top-level parent that contains the records. + +#### S3 Reference + +Identifies the S3 bucket and object that contains the reference data. + +The `s3` blcok supports the following: + +* `bucketArn` - (Required) The S3 Bucket ARN. +* `fileKey` - (Required) The File Key name containing reference data. +* `roleArn` - (Required) The IAM Role ARN to read the data. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the Kinesis Analytics Application. +* `arn` - The ARN of the Kinesis Analytics Appliation. +* `createTimestamp` - The Timestamp when the application version was created. +* `lastUpdateTimestamp` - The Timestamp when the application was last updated. +* `status` - The Status of the application. +* `version` - The Version of the application. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[1]: https://docs.aws.amazon.com/kinesisanalytics/latest/dev/what-is.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kinesis Analytics Application using ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Kinesis Analytics Application using ARN. For example: + +```console +% terraform import aws_kinesis_analytics_application.example arn:aws:kinesisanalytics:us-west-2:1234567890:application/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kinesis_firehose_delivery_stream.html.markdown b/website/docs/cdktf/typescript/r/kinesis_firehose_delivery_stream.html.markdown new file mode 100644 index 00000000000..de71cc011af --- /dev/null +++ b/website/docs/cdktf/typescript/r/kinesis_firehose_delivery_stream.html.markdown @@ -0,0 +1,1011 @@ +--- +subcategory: "Kinesis Firehose" +layout: "aws" +page_title: "AWS: aws_kinesis_firehose_delivery_stream" +description: |- + Provides a AWS Kinesis Firehose Delivery Stream +--- + + + +# Resource: aws_kinesis_firehose_delivery_stream + +Provides a Kinesis Firehose Delivery Stream resource. Amazon Kinesis Firehose is a fully managed, elastic service to easily deliver real-time data streams to destinations such as Amazon S3 and Amazon Redshift. + +For more details, see the [Amazon Kinesis Firehose Documentation][1]. + +## Example Usage + +### Extended S3 Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +import { LambdaFunction } from "./.gen/providers/aws/lambda-function"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bucket = new S3Bucket(this, "bucket", { + bucket: "tf-test-bucket", + }); + new S3BucketAcl(this, "bucket_acl", { + acl: "private", + bucket: bucket.id, + }); + const firehoseAssumeRole = new DataAwsIamPolicyDocument( + this, + "firehose_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["firehose.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const lambdaAssumeRole = new DataAwsIamPolicyDocument( + this, + "lambda_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["lambda.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const firehoseRole = new IamRole(this, "firehose_role", { + assumeRolePolicy: Token.asString(firehoseAssumeRole.json), + name: "firehose_test_role", + }); + const lambdaIam = new IamRole(this, "lambda_iam", { + assumeRolePolicy: Token.asString(lambdaAssumeRole.json), + name: "lambda_iam", + }); + const lambdaProcessor = new LambdaFunction(this, "lambda_processor", { + filename: "lambda.zip", + functionName: "firehose_lambda_processor", + handler: "exports.handler", + role: lambdaIam.arn, + runtime: "nodejs16.x", + }); + new KinesisFirehoseDeliveryStream(this, "extended_s3_stream", { + destination: "extended_s3", + extendedS3Configuration: { + bucketArn: bucket.arn, + processingConfiguration: { + enabled: Token.asBoolean("true"), + processors: [ + { + parameters: [ + { + parameterName: "LambdaArn", + parameterValue: "${" + lambdaProcessor.arn + "}:$LATEST", + }, + ], + type: "Lambda", + }, + ], + }, + roleArn: firehoseRole.arn, + }, + name: "terraform-kinesis-firehose-extended-s3-test-stream", + }); + } +} + +``` + +### Extended S3 Destination with dynamic partitioning + +These examples use built-in Firehose functionality, rather than requiring a lambda. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KinesisFirehoseDeliveryStream(this, "extended_s3_stream", { + destination: "extended_s3", + extendedS3Configuration: { + bucketArn: bucket.arn, + bufferingSize: 64, + dynamicPartitioningConfiguration: { + enabled: Token.asBoolean("true"), + }, + errorOutputPrefix: + "errors/year=!{timestamp:yyyy}/month=!{timestamp:MM}/day=!{timestamp:dd}/hour=!{timestamp:HH}/!{firehose:error-output-type}/", + prefix: + "data/customer_id=!{partitionKeyFromQuery:customer_id}/year=!{timestamp:yyyy}/month=!{timestamp:MM}/day=!{timestamp:dd}/hour=!{timestamp:HH}/", + processingConfiguration: { + enabled: Token.asBoolean("true"), + processors: [ + { + parameters: [ + { + parameterName: "SubRecordType", + parameterValue: "JSON", + }, + ], + type: "RecordDeAggregation", + }, + { + type: "AppendDelimiterToRecord", + }, + { + parameters: [ + { + parameterName: "JsonParsingEngine", + parameterValue: "JQ-1.6", + }, + { + parameterName: "MetadataExtractionQuery", + parameterValue: "{customer_id:.customer_id}", + }, + ], + type: "MetadataExtraction", + }, + ], + }, + roleArn: firehoseRole.arn, + }, + name: "terraform-kinesis-firehose-extended-s3-test-stream", + }); + } +} + +``` + +Multiple Dynamic Partitioning Keys (maximum of 50) can be added by comma separating the `parameterValue`. + +The following example adds the Dynamic Partitioning Keys: `storeId` and `customerId` to the S3 prefix. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KinesisFirehoseDeliveryStream(this, "extended_s3_stream", { + destination: "extended_s3", + extendedS3Configuration: { + bucketArn: bucket.arn, + bufferingSize: 64, + dynamicPartitioningConfiguration: { + enabled: Token.asBoolean("true"), + }, + errorOutputPrefix: + "errors/year=!{timestamp:yyyy}/month=!{timestamp:MM}/day=!{timestamp:dd}/hour=!{timestamp:HH}/!{firehose:error-output-type}/", + prefix: + "data/store_id=!{partitionKeyFromQuery:store_id}/customer_id=!{partitionKeyFromQuery:customer_id}/year=!{timestamp:yyyy}/month=!{timestamp:MM}/day=!{timestamp:dd}/hour=!{timestamp:HH}/", + processingConfiguration: { + enabled: Token.asBoolean("true"), + processors: [ + { + parameters: [ + { + parameterName: "JsonParsingEngine", + parameterValue: "JQ-1.6", + }, + { + parameterName: "MetadataExtractionQuery", + parameterValue: + "{store_id:.store_id,customer_id:.customer_id}", + }, + ], + type: "MetadataExtraction", + }, + ], + }, + roleArn: firehoseRole.arn, + }, + name: "terraform-kinesis-firehose-extended-s3-test-stream", + }); + } +} + +``` + +### Redshift Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +import { RedshiftCluster } from "./.gen/providers/aws/redshift-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testCluster = new RedshiftCluster(this, "test_cluster", { + clusterIdentifier: "tf-redshift-cluster", + clusterType: "single-node", + databaseName: "test", + masterPassword: "T3stPass", + masterUsername: "testuser", + nodeType: "dc1.large", + }); + new KinesisFirehoseDeliveryStream(this, "test_stream", { + destination: "redshift", + name: "terraform-kinesis-firehose-test-stream", + redshiftConfiguration: { + clusterJdbcurl: + "jdbc:redshift://${" + + testCluster.endpoint + + "}/${" + + testCluster.databaseName + + "}", + copyOptions: "delimiter '|'", + dataTableColumns: "test-col", + dataTableName: "test-table", + password: "T3stPass", + roleArn: firehoseRole.arn, + s3BackupConfiguration: { + bucketArn: bucket.arn, + bufferingInterval: 300, + bufferingSize: 15, + compressionFormat: "GZIP", + roleArn: firehoseRole.arn, + }, + s3BackupMode: "Enabled", + s3Configuration: { + bucketArn: bucket.arn, + bufferingInterval: 400, + bufferingSize: 10, + compressionFormat: "GZIP", + roleArn: firehoseRole.arn, + }, + username: "testuser", + }, + }); + } +} + +``` + +### Elasticsearch Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ElasticsearchDomain } from "./.gen/providers/aws/elasticsearch-domain"; +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testCluster = new ElasticsearchDomain(this, "test_cluster", { + domainName: "firehose-es-test", + }); + new KinesisFirehoseDeliveryStream(this, "test_stream", { + destination: "elasticsearch", + elasticsearchConfiguration: { + domainArn: testCluster.arn, + indexName: "test", + processingConfiguration: { + enabled: Token.asBoolean("true"), + processors: [ + { + parameters: [ + { + parameterName: "LambdaArn", + parameterValue: "${" + lambdaProcessor.arn + "}:$LATEST", + }, + ], + type: "Lambda", + }, + ], + }, + roleArn: firehoseRole.arn, + s3Configuration: { + bucketArn: bucket.arn, + bufferingInterval: 400, + bufferingSize: 10, + compressionFormat: "GZIP", + roleArn: firehoseRole.arn, + }, + typeName: "test", + }, + name: "terraform-kinesis-firehose-test-stream", + }); + } +} + +``` + +### Elasticsearch Destination With VPC + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { ElasticsearchDomain } from "./.gen/providers/aws/elasticsearch-domain"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testCluster = new ElasticsearchDomain(this, "test_cluster", { + clusterConfig: { + instanceCount: 2, + instanceType: "t2.small.elasticsearch", + zoneAwarenessEnabled: true, + }, + domainName: "es-test", + ebsOptions: { + ebsEnabled: true, + volumeSize: 10, + }, + vpcOptions: { + securityGroupIds: [first.id], + subnetIds: [Token.asString(awsSubnetFirst.id), second.id], + }, + }); + const firehoseElasticsearch = new DataAwsIamPolicyDocument( + this, + "firehose-elasticsearch", + { + statement: [ + { + actions: ["es:*"], + effect: "Allow", + resources: [testCluster.arn, "${" + testCluster.arn + "}/*"], + }, + { + actions: [ + "ec2:DescribeVpcs", + "ec2:DescribeVpcAttribute", + "ec2:DescribeSubnets", + "ec2:DescribeSecurityGroups", + "ec2:DescribeNetworkInterfaces", + "ec2:CreateNetworkInterface", + "ec2:CreateNetworkInterfacePermission", + "ec2:DeleteNetworkInterface", + ], + effect: "Allow", + resources: ["*"], + }, + ], + } + ); + const awsIamRolePolicyFirehoseElasticsearch = new IamRolePolicy( + this, + "firehose-elasticsearch_2", + { + name: "elasticsearch", + policy: Token.asString(firehoseElasticsearch.json), + role: firehose.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyFirehoseElasticsearch.overrideLogicalId( + "firehose-elasticsearch" + ); + new KinesisFirehoseDeliveryStream(this, "test", { + dependsOn: [awsIamRolePolicyFirehoseElasticsearch], + destination: "elasticsearch", + elasticsearchConfiguration: { + domainArn: testCluster.arn, + indexName: "test", + roleArn: firehose.arn, + s3Configuration: { + bucketArn: bucket.arn, + roleArn: firehose.arn, + }, + typeName: "test", + vpcConfig: { + roleArn: firehose.arn, + securityGroupIds: [first.id], + subnetIds: [Token.asString(awsSubnetFirst.id), second.id], + }, + }, + name: "terraform-kinesis-firehose-es", + }); + } +} + +``` + +### Opensearch Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +import { OpensearchDomain } from "./.gen/providers/aws/opensearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testCluster = new OpensearchDomain(this, "test_cluster", { + domainName: "firehose-os-test", + }); + new KinesisFirehoseDeliveryStream(this, "test_stream", { + destination: "opensearch", + name: "terraform-kinesis-firehose-test-stream", + opensearchConfiguration: { + domainArn: testCluster.arn, + indexName: "test", + processingConfiguration: { + enabled: Token.asBoolean("true"), + processors: [ + { + parameters: [ + { + parameterName: "LambdaArn", + parameterValue: "${" + lambdaProcessor.arn + "}:$LATEST", + }, + ], + type: "Lambda", + }, + ], + }, + roleArn: firehoseRole.arn, + s3Configuration: { + bucketArn: bucket.arn, + bufferingInterval: 400, + bufferingSize: 10, + compressionFormat: "GZIP", + roleArn: firehoseRole.arn, + }, + }, + }); + } +} + +``` + +### Opensearch Destination With VPC + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +import { OpensearchDomain } from "./.gen/providers/aws/opensearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testCluster = new OpensearchDomain(this, "test_cluster", { + clusterConfig: { + instanceCount: 2, + instanceType: "m4.large.search", + zoneAwarenessEnabled: true, + }, + domainName: "es-test", + ebsOptions: { + ebsEnabled: true, + volumeSize: 10, + }, + vpcOptions: { + securityGroupIds: [first.id], + subnetIds: [Token.asString(awsSubnetFirst.id), second.id], + }, + }); + const firehoseOpensearch = new IamRolePolicy(this, "firehose-opensearch", { + name: "opensearch", + policy: + '{\n "Version": "2012-10-17",\n "Statement": [\n {\n "Effect": "Allow",\n "Action": [\n "es:*"\n ],\n "Resource": [\n "${' + + testCluster.arn + + '}",\n "${' + + testCluster.arn + + '}/*"\n ]\n },\n {\n "Effect": "Allow",\n "Action": [\n "ec2:DescribeVpcs",\n "ec2:DescribeVpcAttribute",\n "ec2:DescribeSubnets",\n "ec2:DescribeSecurityGroups",\n "ec2:DescribeNetworkInterfaces",\n "ec2:CreateNetworkInterface",\n "ec2:CreateNetworkInterfacePermission",\n "ec2:DeleteNetworkInterface"\n ],\n "Resource": [\n "*"\n ]\n }\n ]\n}\n\n', + role: firehose.id, + }); + new KinesisFirehoseDeliveryStream(this, "test", { + dependsOn: [firehoseOpensearch], + destination: "opensearch", + name: "terraform-kinesis-firehose-os", + opensearchConfiguration: { + domainArn: testCluster.arn, + indexName: "test", + roleArn: firehose.arn, + s3Configuration: { + bucketArn: bucket.arn, + roleArn: firehose.arn, + }, + vpcConfig: { + roleArn: firehose.arn, + securityGroupIds: [first.id], + subnetIds: [Token.asString(awsSubnetFirst.id), second.id], + }, + }, + }); + } +} + +``` + +### Splunk Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KinesisFirehoseDeliveryStream(this, "test_stream", { + destination: "splunk", + name: "terraform-kinesis-firehose-test-stream", + splunkConfiguration: { + hecAcknowledgmentTimeout: 600, + hecEndpoint: "https://http-inputs-mydomain.splunkcloud.com:443", + hecEndpointType: "Event", + hecToken: "51D4DA16-C61B-4F5F-8EC7-ED4301342A4A", + s3BackupMode: "FailedEventsOnly", + s3Configuration: { + bucketArn: bucket.arn, + bufferingInterval: 400, + bufferingSize: 10, + compressionFormat: "GZIP", + roleArn: firehose.arn, + }, + }, + }); + } +} + +``` + +### HTTP Endpoint (e.g., New Relic) Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KinesisFirehoseDeliveryStream(this, "test_stream", { + destination: "http_endpoint", + httpEndpointConfiguration: { + accessKey: "my-key", + bufferingInterval: 600, + bufferingSize: 15, + name: "New Relic", + requestConfiguration: { + commonAttributes: [ + { + name: "testname", + value: "testvalue", + }, + { + name: "testname2", + value: "testvalue2", + }, + ], + contentEncoding: "GZIP", + }, + roleArn: firehose.arn, + s3BackupMode: "FailedDataOnly", + s3Configuration: { + bucketArn: bucket.arn, + bufferingInterval: 400, + bufferingSize: 10, + compressionFormat: "GZIP", + roleArn: firehose.arn, + }, + url: "https://aws-api.newrelic.com/firehose/v1", + }, + name: "terraform-kinesis-firehose-test-stream", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name to identify the stream. This is unique to the AWS account and region the Stream is created in. When using for WAF logging, name must be prefixed with `awsWafLogs`. See [AWS Documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-policies.html#waf-policies-logging-config) for more details. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `kinesisSourceConfiguration` - (Optional) Allows the ability to specify the kinesis stream that is used as the source of the firehose delivery stream. +* `serverSideEncryption` - (Optional) Encrypt at rest options. +Server-side encryption should not be enabled when a kinesis stream is configured as the source of the firehose delivery stream. +* `destination` – (Required) This is the destination to where the data is delivered. The only options are `s3` (Deprecated, use `extendedS3` instead), `extendedS3`, `redshift`, `elasticsearch`, `splunk`, `httpEndpoint` and `opensearch`. +is redshift). More details are given below. +* `extendedS3Configuration` - (Optional, only Required when `destination` is `extendedS3`) Enhanced configuration options for the s3 destination. More details are given below. +* `redshiftConfiguration` - (Optional) Configuration options if redshift is the destination. +Using `redshiftConfiguration` requires the user to also specify a +`s3Configuration` block. More details are given below. +* `elasticsearchConfiguration` - (Optional) Configuration options if elasticsearch is the destination. More details are given below. +* `opensearchConfiguration` - (Optional) Configuration options if opensearch is the destination. More details are given below. +* `splunkConfiguration` - (Optional) Configuration options if splunk is the destination. More details are given below. +* `httpEndpointConfiguration` - (Optional) Configuration options if http_endpoint is the destination. requires the user to also specify a `s3Configuration` block. More details are given below. + +The `kinesisSourceConfiguration` object supports the following: + +* `kinesisStreamArn` (Required) The kinesis stream used as the source of the firehose delivery stream. +* `roleArn` (Required) The ARN of the role that provides access to the source Kinesis stream. + +The `serverSideEncryption` object supports the following: + +* `enabled` - (Optional) Whether to enable encryption at rest. Default is `false`. +* `keyType`- (Optional) Type of encryption key. Default is `awsOwnedCmk`. Valid values are `awsOwnedCmk` and `customerManagedCmk` +* `keyArn` - (Optional) Amazon Resource Name (ARN) of the encryption key. Required when `keyType` is `customerManagedCmk`. + +The `extendedS3Configuration` object supports the same fields from [s3_configuration](#s3-configuration) as well as the following: + +* `dataFormatConversionConfiguration` - (Optional) Nested argument for the serializer, deserializer, and schema for converting data from the JSON format to the Parquet or ORC format before writing it to Amazon S3. More details given below. +* `processingConfiguration` - (Optional) The data processing configuration. More details are given below. +* `s3BackupMode` - (Optional) The Amazon S3 backup mode. Valid values are `disabled` and `enabled`. Default value is `disabled`. +* `s3BackupConfiguration` - (Optional) The configuration for backup in Amazon S3. Required if `s3BackupMode` is `enabled`. Supports the same fields as `s3Configuration` object. +* `dynamicPartitioningConfiguration` - (Optional) The configuration for dynamic partitioning. See [Dynamic Partitioning Configuration](#dynamic_partitioning_configuration) below for more details. Required when using dynamic partitioning. + +The `redshiftConfiguration` object supports the following: + +* `clusterJdbcurl` - (Required) The jdbcurl of the redshift cluster. +* `username` - (Required) The username that the firehose delivery stream will assume. It is strongly recommended that the username and password provided is used exclusively for Amazon Kinesis Firehose purposes, and that the permissions for the account are restricted for Amazon Redshift INSERT permissions. +* `password` - (Required) The password for the username above. +* `retryDuration` - (Optional) The length of time during which Firehose retries delivery after a failure, starting from the initial request and including the first attempt. The default value is 3600 seconds (60 minutes). Firehose does not retry if the value of DurationInSeconds is 0 (zero) or if the first delivery attempt takes longer than the current value. +* `roleArn` - (Required) The arn of the role the stream assumes. +* `s3Configuration` - (Required) The S3 Configuration. See [s3_configuration](#s3-configuration) for more details. +* `s3BackupMode` - (Optional) The Amazon S3 backup mode. Valid values are `disabled` and `enabled`. Default value is `disabled`. +* `s3BackupConfiguration` - (Optional) The configuration for backup in Amazon S3. Required if `s3BackupMode` is `enabled`. Supports the same fields as `s3Configuration` object. +* `dataTableName` - (Required) The name of the table in the redshift cluster that the s3 bucket will copy to. +* `copyOptions` - (Optional) Copy options for copying the data from the s3 intermediate bucket into redshift, for example to change the default delimiter. For valid values, see the [AWS documentation](http://docs.aws.amazon.com/firehose/latest/APIReference/API_CopyCommand.html) +* `dataTableColumns` - (Optional) The data table columns that will be targeted by the copy command. +* `cloudwatchLoggingOptions` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below +* `processingConfiguration` - (Optional) The data processing configuration. More details are given below. + +The `elasticsearchConfiguration` object supports the following: + +* `bufferingInterval` - (Optional) Buffer incoming data for the specified period of time, in seconds between 60 to 900, before delivering it to the destination. The default value is 300s. +* `bufferingSize` - (Optional) Buffer incoming data to the specified size, in MBs between 1 to 100, before delivering it to the destination. The default value is 5MB. +* `domainArn` - (Optional) The ARN of the Amazon ES domain. The pattern needs to be `arn:.*`. Conflicts with `clusterEndpoint`. +* `clusterEndpoint` - (Optional) The endpoint to use when communicating with the cluster. Conflicts with `domainArn`. +* `indexName` - (Required) The Elasticsearch index name. +* `indexRotationPeriod` - (Optional) The Elasticsearch index rotation period. Index rotation appends a timestamp to the IndexName to facilitate expiration of old data. Valid values are `noRotation`, `oneHour`, `oneDay`, `oneWeek`, and `oneMonth`. The default value is `oneDay`. +* `retryDuration` - (Optional) After an initial failure to deliver to Amazon Elasticsearch, the total amount of time, in seconds between 0 to 7200, during which Firehose re-attempts delivery (including the first attempt). After this time has elapsed, the failed documents are written to Amazon S3. The default value is 300s. There will be no retry if the value is 0. +* `roleArn` - (Required) The ARN of the IAM role to be assumed by Firehose for calling the Amazon ES Configuration API and for indexing documents. The IAM role must have permission for `describeElasticsearchDomain`, `describeElasticsearchDomains`, and `describeElasticsearchDomainConfig`. The pattern needs to be `arn:.*`. +* `s3Configuration` - (Required) The S3 Configuration. See [s3_configuration](#s3-configuration) for more details. +* `s3BackupMode` - (Optional) Defines how documents should be delivered to Amazon S3. Valid values are `failedDocumentsOnly` and `allDocuments`. Default value is `failedDocumentsOnly`. +* `typeName` - (Optional) The Elasticsearch type name with maximum length of 100 characters. +* `cloudwatchLoggingOptions` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below +* `vpcConfig` - (Optional) The VPC configuration for the delivery stream to connect to Elastic Search associated with the VPC. More details are given below +* `processingConfiguration` - (Optional) The data processing configuration. More details are given below. + +The `opensearchConfiguration` object supports the following: + +* `bufferingInterval` - (Optional) Buffer incoming data for the specified period of time, in seconds between 60 to 900, before delivering it to the destination. The default value is 300s. +* `bufferingSize` - (Optional) Buffer incoming data to the specified size, in MBs between 1 to 100, before delivering it to the destination. The default value is 5MB. +* `domainArn` - (Optional) The ARN of the Amazon ES domain. The pattern needs to be `arn:.*`. Conflicts with `clusterEndpoint`. +* `clusterEndpoint` - (Optional) The endpoint to use when communicating with the cluster. Conflicts with `domainArn`. +* `indexName` - (Required) The Opensearch index name. +* `indexRotationPeriod` - (Optional) The Opensearch index rotation period. Index rotation appends a timestamp to the IndexName to facilitate expiration of old data. Valid values are `noRotation`, `oneHour`, `oneDay`, `oneWeek`, and `oneMonth`. The default value is `oneDay`. +* `retryDuration` - (Optional) After an initial failure to deliver to Amazon OpenSearch, the total amount of time, in seconds between 0 to 7200, during which Firehose re-attempts delivery (including the first attempt). After this time has elapsed, the failed documents are written to Amazon S3. The default value is 300s. There will be no retry if the value is 0. +* `roleArn` - (Required) The ARN of the IAM role to be assumed by Firehose for calling the Amazon ES Configuration API and for indexing documents. The IAM role must have permission for `describeDomain`, `describeDomains`, and `describeDomainConfig`. The pattern needs to be `arn:.*`. +* `s3Configuration` - (Required) The S3 Configuration. See [s3_configuration](#s3-configuration) for more details. +* `s3BackupMode` - (Optional) Defines how documents should be delivered to Amazon S3. Valid values are `failedDocumentsOnly` and `allDocuments`. Default value is `failedDocumentsOnly`. +* `typeName` - (Optional) The Elasticsearch type name with maximum length of 100 characters. Types are deprecated in OpenSearch_1.1. TypeName must be empty. +* `cloudwatchLoggingOptions` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below +* `vpcConfig` - (Optional) The VPC configuration for the delivery stream to connect to OpenSearch associated with the VPC. More details are given below +* `processingConfiguration` - (Optional) The data processing configuration. More details are given below. + +The `splunkConfiguration` objects supports the following: + +* `hecAcknowledgmentTimeout` - (Optional) The amount of time, in seconds between 180 and 600, that Kinesis Firehose waits to receive an acknowledgment from Splunk after it sends it data. +* `hecEndpoint` - (Required) The HTTP Event Collector (HEC) endpoint to which Kinesis Firehose sends your data. +* `hecEndpointType` - (Optional) The HEC endpoint type. Valid values are `raw` or `event`. The default value is `raw`. +* `hecToken` - (Required) The GUID that you obtain from your Splunk cluster when you create a new HEC endpoint. +* `s3Configuration` - (Required) The S3 Configuration. See [s3_configuration](#s3-configuration) for more details. +* `s3BackupMode` - (Optional) Defines how documents should be delivered to Amazon S3. Valid values are `failedEventsOnly` and `allEvents`. Default value is `failedEventsOnly`. +* `retryDuration` - (Optional) After an initial failure to deliver to Splunk, the total amount of time, in seconds between 0 to 7200, during which Firehose re-attempts delivery (including the first attempt). After this time has elapsed, the failed documents are written to Amazon S3. The default value is 300s. There will be no retry if the value is 0. +* `cloudwatchLoggingOptions` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below. +* `processingConfiguration` - (Optional) The data processing configuration. More details are given below. + +The `httpEndpointConfiguration` objects supports the following: + +* `url` - (Required) The HTTP endpoint URL to which Kinesis Firehose sends your data. +* `name` - (Optional) The HTTP endpoint name. +* `accessKey` - (Optional) The access key required for Kinesis Firehose to authenticate with the HTTP endpoint selected as the destination. +* `roleArn` - (Required) Kinesis Data Firehose uses this IAM role for all the permissions that the delivery stream needs. The pattern needs to be `arn:.*`. +* `s3Configuration` - (Required) The S3 Configuration. See [s3_configuration](#s3-configuration) for more details. +* `s3BackupMode` - (Optional) Defines how documents should be delivered to Amazon S3. Valid values are `failedDataOnly` and `allData`. Default value is `failedDataOnly`. +* `bufferingSize` - (Optional) Buffer incoming data to the specified size, in MBs, before delivering it to the destination. The default value is 5. +* `bufferingInterval` - (Optional) Buffer incoming data for the specified period of time, in seconds, before delivering it to the destination. The default value is 300 (5 minutes). +* `cloudwatchLoggingOptions` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below. +* `processingConfiguration` - (Optional) The data processing configuration. More details are given below. +* `requestConfiguration` - (Optional) The request configuration. More details are given below. +* `retryDuration` - (Optional) Total amount of seconds Firehose spends on retries. This duration starts after the initial attempt fails, It does not include the time periods during which Firehose waits for acknowledgment from the specified destination after each attempt. Valid values between `0` and `7200`. Default is `300`. + +The `cloudwatchLoggingOptions` object supports the following: + +* `enabled` - (Optional) Enables or disables the logging. Defaults to `false`. +* `logGroupName` - (Optional) The CloudWatch group name for logging. This value is required if `enabled` is true. +* `logStreamName` - (Optional) The CloudWatch log stream name for logging. This value is required if `enabled` is true. + +The `processingConfiguration` object supports the following: + +* `enabled` - (Optional) Enables or disables data processing. +* `processors` - (Optional) Array of data processors. More details are given below + +The `processors` array objects support the following: + +* `type` - (Required) The type of processor. Valid Values: `recordDeAggregation`, `lambda`, `metadataExtraction`, `appendDelimiterToRecord`. Validation is done against [AWS SDK constants](https://docs.aws.amazon.com/sdk-for-go/api/service/firehose/#pkg-constants); so that values not explicitly listed may also work. +* `parameters` - (Optional) Array of processor parameters. More details are given below + +The `parameters` array objects support the following: + +* `parameterName` - (Required) Parameter name. Valid Values: `lambdaArn`, `numberOfRetries`, `metadataExtractionQuery`, `jsonParsingEngine`, `roleArn`, `bufferSizeInMBs`, `bufferIntervalInSeconds`, `subRecordType`, `delimiter`. Validation is done against [AWS SDK constants](https://docs.aws.amazon.com/sdk-for-go/api/service/firehose/#pkg-constants); so that values not explicitly listed may also work. +* `parameterValue` - (Required) Parameter value. Must be between 1 and 512 length (inclusive). When providing a Lambda ARN, you should specify the resource version as well. + +~> **NOTE:** Parameters with default values, including `numberOfRetries`(default: 3), `roleArn`(default: firehose role ARN), `bufferSizeInMBs`(default: 3), and `bufferIntervalInSeconds`(default: 60), are not stored in terraform state. To prevent perpetual differences, it is therefore recommended to only include parameters with non-default values. + +The `requestConfiguration` object supports the following: + +* `contentEncoding` - (Optional) Kinesis Data Firehose uses the content encoding to compress the body of a request before sending the request to the destination. Valid values are `none` and `gzip`. Default value is `none`. +* `commonAttributes` - (Optional) Describes the metadata sent to the HTTP endpoint destination. More details are given below + +The `commonAttributes` array objects support the following: + +* `name` - (Required) The name of the HTTP endpoint common attribute. +* `value` - (Required) The value of the HTTP endpoint common attribute. + +The `vpcConfig` object supports the following: + +* `subnetIds` - (Required) A list of subnet IDs to associate with Kinesis Firehose. +* `securityGroupIds` - (Required) A list of security group IDs to associate with Kinesis Firehose. +* `roleArn` - (Required) The ARN of the IAM role to be assumed by Firehose for calling the Amazon EC2 configuration API and for creating network interfaces. Make sure role has necessary [IAM permissions](https://docs.aws.amazon.com/firehose/latest/dev/controlling-access.html#using-iam-es-vpc) + +### data_format_conversion_configuration + +~> **NOTE:** Once configured, the data format conversion configuration can only be disabled, in which the configuration values will remain, but will not be active. It is not currently possible to completely remove the configuration without recreating the resource. + +Example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +interface MyConfig { + bucketArn: any; + roleArn: any; + destination: any; + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new KinesisFirehoseDeliveryStream(this, "example", { + extendedS3Configuration: { + bufferingSize: 128, + dataFormatConversionConfiguration: { + inputFormatConfiguration: { + deserializer: { + hiveJsonSerDe: {}, + }, + }, + outputFormatConfiguration: { + serializer: { + orcSerDe: {}, + }, + }, + schemaConfiguration: { + databaseName: Token.asString( + awsGlueCatalogTableExample.databaseName + ), + roleArn: Token.asString(awsIamRoleExample.arn), + tableName: Token.asString(awsGlueCatalogTableExample.name), + }, + }, + bucketArn: config.bucketArn, + roleArn: config.roleArn, + }, + destination: config.destination, + name: config.name, + }); + } +} + +``` + +* `inputFormatConfiguration` - (Required) Nested argument that specifies the deserializer that you want Kinesis Data Firehose to use to convert the format of your data from JSON. More details below. +* `outputFormatConfiguration` - (Required) Nested argument that specifies the serializer that you want Kinesis Data Firehose to use to convert the format of your data to the Parquet or ORC format. More details below. +* `schemaConfiguration` - (Required) Nested argument that specifies the AWS Glue Data Catalog table that contains the column information. More details below. +* `enabled` - (Optional) Defaults to `true`. Set it to `false` if you want to disable format conversion while preserving the configuration details. + +#### S3 Configuration + +* `roleArn` - (Required) The ARN of the AWS credentials. +* `bucketArn` - (Required) The ARN of the S3 bucket +* `prefix` - (Optional) The "YYYY/MM/DD/HH" time format prefix is automatically used for delivered S3 files. You can specify an extra prefix to be added in front of the time format prefix. Note that if the prefix ends with a slash, it appears as a folder in the S3 bucket +* `bufferingSize` - (Optional) Buffer incoming data to the specified size, in MBs, before delivering it to the destination. The default value is 5. + We recommend setting SizeInMBs to a value greater than the amount of data you typically ingest into the delivery stream in 10 seconds. For example, if you typically ingest data at 1 MB/sec set SizeInMBs to be 10 MB or higher. +* `bufferingInterval` - (Optional) Buffer incoming data for the specified period of time, in seconds, before delivering it to the destination. The default value is 300. +* `compressionFormat` - (Optional) The compression format. If no value is specified, the default is `uncompressed`. Other supported values are `gzip`, `zip`, `snappy`, & `hadoopSnappy`. +* `errorOutputPrefix` - (Optional) Prefix added to failed records before writing them to S3. Not currently supported for `redshift` destination. This prefix appears immediately following the bucket name. For information about how to specify this prefix, see [Custom Prefixes for Amazon S3 Objects](https://docs.aws.amazon.com/firehose/latest/dev/s3-prefixes.html). +* `kmsKeyArn` - (Optional) Specifies the KMS key ARN the stream will use to encrypt data. If not set, no encryption will + be used. +* `cloudwatchLoggingOptions` - (Optional) The CloudWatch Logging Options for the delivery stream. More details are given below + +#### input_format_configuration + +* `deserializer` - (Required) Nested argument that specifies which deserializer to use. You can choose either the Apache Hive JSON SerDe or the OpenX JSON SerDe. More details below. + +##### deserializer + +~> **NOTE:** One of the deserializers must be configured. If no nested configuration needs to occur simply declare as `XXX_json_ser_de = []` or `XXX_json_ser_de {}`. + +* `hiveJsonSerDe` - (Optional) Nested argument that specifies the native Hive / HCatalog JsonSerDe. More details below. +* `openXJsonSerDe` - (Optional) Nested argument that specifies the OpenX SerDe. More details below. + +###### hive_json_ser_de + +* `timestampFormats` - (Optional) A list of how you want Kinesis Data Firehose to parse the date and time stamps that may be present in your input data JSON. To specify these format strings, follow the pattern syntax of JodaTime's DateTimeFormat format strings. For more information, see [Class DateTimeFormat](https://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html). You can also use the special value millis to parse time stamps in epoch milliseconds. If you don't specify a format, Kinesis Data Firehose uses java.sql.Timestamp::valueOf by default. + +###### open_x_json_ser_de + +* `caseInsensitive` - (Optional) When set to true, which is the default, Kinesis Data Firehose converts JSON keys to lowercase before deserializing them. +* `columnToJsonKeyMappings` - (Optional) A map of column names to JSON keys that aren't identical to the column names. This is useful when the JSON contains keys that are Hive keywords. For example, timestamp is a Hive keyword. If you have a JSON key named timestamp, set this parameter to `{ ts = "timestamp" }` to map this key to a column named ts. +* `convertDotsInJsonKeysToUnderscores` - (Optional) When set to `true`, specifies that the names of the keys include dots and that you want Kinesis Data Firehose to replace them with underscores. This is useful because Apache Hive does not allow dots in column names. For example, if the JSON contains a key whose name is "a.b", you can define the column name to be "a_b" when using this option. Defaults to `false`. + +#### output_format_configuration + +* `serializer` - (Required) Nested argument that specifies which serializer to use. You can choose either the ORC SerDe or the Parquet SerDe. More details below. + +##### serializer + +~> **NOTE:** One of the serializers must be configured. If no nested configuration needs to occur simply declare as `XXX_ser_de = []` or `XXX_ser_de {}`. + +* `orcSerDe` - (Optional) Nested argument that specifies converting data to the ORC format before storing it in Amazon S3. For more information, see [Apache ORC](https://orc.apache.org/docs/). More details below. +* `parquetSerDe` - (Optional) Nested argument that specifies converting data to the Parquet format before storing it in Amazon S3. For more information, see [Apache Parquet](https://parquet.apache.org/documentation/latest/). More details below. + +###### orc_ser_de + +* `blockSizeBytes` - (Optional) The Hadoop Distributed File System (HDFS) block size. This is useful if you intend to copy the data from Amazon S3 to HDFS before querying. The default is 256 MiB and the minimum is 64 MiB. Kinesis Data Firehose uses this value for padding calculations. +* `bloomFilterColumns` - (Optional) A list of column names for which you want Kinesis Data Firehose to create bloom filters. +* `bloomFilterFalsePositiveProbability` - (Optional) The Bloom filter false positive probability (FPP). The lower the FPP, the bigger the Bloom filter. The default value is `005`, the minimum is `0`, and the maximum is `1`. +* `compression` - (Optional) The compression code to use over data blocks. The default is `snappy`. +* `dictionaryKeyThreshold` - (Optional) A float that represents the fraction of the total number of non-null rows. To turn off dictionary encoding, set this fraction to a number that is less than the number of distinct keys in a dictionary. To always use dictionary encoding, set this threshold to `1`. +* `enablePadding` - (Optional) Set this to `true` to indicate that you want stripes to be padded to the HDFS block boundaries. This is useful if you intend to copy the data from Amazon S3 to HDFS before querying. The default is `false`. +* `formatVersion` - (Optional) The version of the file to write. The possible values are `v011` and `v012`. The default is `v012`. +* `paddingTolerance` - (Optional) A float between 0 and 1 that defines the tolerance for block padding as a decimal fraction of stripe size. The default value is `005`, which means 5 percent of stripe size. For the default values of 64 MiB ORC stripes and 256 MiB HDFS blocks, the default block padding tolerance of 5 percent reserves a maximum of 3.2 MiB for padding within the 256 MiB block. In such a case, if the available size within the block is more than 3.2 MiB, a new, smaller stripe is inserted to fit within that space. This ensures that no stripe crosses block boundaries and causes remote reads within a node-local task. Kinesis Data Firehose ignores this parameter when `enablePadding` is `false`. +* `rowIndexStride` - (Optional) The number of rows between index entries. The default is `10000` and the minimum is `1000`. +* `stripeSizeBytes` - (Optional) The number of bytes in each stripe. The default is 64 MiB and the minimum is 8 MiB. + +###### parquet_ser_de + +* `blockSizeBytes` - (Optional) The Hadoop Distributed File System (HDFS) block size. This is useful if you intend to copy the data from Amazon S3 to HDFS before querying. The default is 256 MiB and the minimum is 64 MiB. Kinesis Data Firehose uses this value for padding calculations. +* `compression` - (Optional) The compression code to use over data blocks. The possible values are `uncompressed`, `snappy`, and `gzip`, with the default being `snappy`. Use `snappy` for higher decompression speed. Use `gzip` if the compression ratio is more important than speed. +* `enableDictionaryCompression` - (Optional) Indicates whether to enable dictionary compression. +* `maxPaddingBytes` - (Optional) The maximum amount of padding to apply. This is useful if you intend to copy the data from Amazon S3 to HDFS before querying. The default is `0`. +* `pageSizeBytes` - (Optional) The Parquet page size. Column chunks are divided into pages. A page is conceptually an indivisible unit (in terms of compression and encoding). The minimum value is 64 KiB and the default is 1 MiB. +* `writerVersion` - (Optional) Indicates the version of row format to output. The possible values are `v1` and `v2`. The default is `v1`. + +#### schema_configuration + +* `databaseName` - (Required) Specifies the name of the AWS Glue database that contains the schema for the output data. +* `roleArn` - (Required) The role that Kinesis Data Firehose can use to access AWS Glue. This role must be in the same account you use for Kinesis Data Firehose. Cross-account roles aren't allowed. +* `tableName` - (Required) Specifies the AWS Glue table that contains the column information that constitutes your data schema. +* `catalogId` - (Optional) The ID of the AWS Glue Data Catalog. If you don't supply this, the AWS account ID is used by default. +* `region` - (Optional) If you don't specify an AWS Region, the default is the current region. +* `versionId` - (Optional) Specifies the table version for the output data schema. Defaults to `latest`. + +#### dynamic_partitioning_configuration + +Required when using [dynamic partitioning](https://docs.aws.amazon.com/firehose/latest/dev/dynamic-partitioning.html). + +* `enabled` - (Optional) Enables or disables dynamic partitioning. Defaults to `false`. +* `retryDuration` - (Optional) Total amount of seconds Firehose spends on retries. Valid values between 0 and 7200. Default is 300. + +~> **NOTE:** You can enable dynamic partitioning only when you create a new delivery stream. Once you enable dynamic partitioning on a delivery stream, it cannot be disabled on this delivery stream. Therefore, Terraform will recreate the resource whenever dynamic partitioning is enabled or disabled. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the Stream +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[1]: https://aws.amazon.com/documentation/firehose/ + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `10M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kinesis Firehose Delivery streams using the stream ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Kinesis Firehose Delivery streams using the stream ARN. For example: + +```console +% terraform import aws_kinesis_firehose_delivery_stream.foo arn:aws:firehose:us-east-1:XXX:deliverystream/example +``` + +Note: Import does not work for stream destination `s3`. Consider using `extendedS3` since `s3` destination is deprecated. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kinesis_stream.html.markdown b/website/docs/cdktf/typescript/r/kinesis_stream.html.markdown new file mode 100644 index 00000000000..ac37c80d46b --- /dev/null +++ b/website/docs/cdktf/typescript/r/kinesis_stream.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Kinesis" +layout: "aws" +page_title: "AWS: aws_kinesis_stream" +description: |- + Provides a AWS Kinesis Stream +--- + + + +# Resource: aws_kinesis_stream + +Provides a Kinesis Stream resource. Amazon Kinesis is a managed service that +scales elastically for real-time processing of streaming big data. + +For more details, see the [Amazon Kinesis Documentation][1]. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KinesisStream } from "./.gen/providers/aws/kinesis-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KinesisStream(this, "test_stream", { + name: "terraform-kinesis-test", + retentionPeriod: 48, + shardCount: 1, + shardLevelMetrics: ["IncomingBytes", "OutgoingBytes"], + streamModeDetails: { + streamMode: "PROVISIONED", + }, + tags: { + Environment: "test", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name to identify the stream. This is unique to the AWS account and region the Stream is created in. +* `shardCount` – (Optional) The number of shards that the stream will use. If the `streamMode` is `provisioned`, this field is required. +Amazon has guidelines for specifying the Stream size that should be referenced when creating a Kinesis stream. See [Amazon Kinesis Streams][2] for more. +* `retentionPeriod` - (Optional) Length of time data records are accessible after they are added to the stream. The maximum value of a stream's retention period is 8760 hours. Minimum value is 24. Default is 24. +* `shardLevelMetrics` - (Optional) A list of shard-level CloudWatch metrics which can be enabled for the stream. See [Monitoring with CloudWatch][3] for more. Note that the value ALL should not be used; instead you should provide an explicit list of metrics you wish to enable. +* `enforceConsumerDeletion` - (Optional) A boolean that indicates all registered consumers should be deregistered from the stream so that the stream can be destroyed without error. The default value is `false`. +* `encryptionType` - (Optional) The encryption type to use. The only acceptable values are `none` or `kms`. The default value is `none`. +* `kmsKeyId` - (Optional) The GUID for the customer-managed KMS key to use for encryption. You can also use a Kinesis-owned master key by specifying the alias `alias/aws/kinesis`. +* `streamModeDetails` - (Optional) Indicates the [capacity mode](https://docs.aws.amazon.com/streams/latest/dev/how-do-i-size-a-stream.html) of the data stream. Detailed below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### stream_mode_details Configuration Block + +* `streamMode` - (Required) Specifies the capacity mode of the stream. Must be either `provisioned` or `onDemand`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique Stream id +* `name` - The unique Stream name +* `shardCount` - The count of Shards for this Stream +* `arn` - The Amazon Resource Name (ARN) specifying the Stream (same as `id`) +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `update` - (Default `120M`) +- `delete` - (Default `120M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kinesis Streams using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Kinesis Streams using the `name`. For example: + +```console +% terraform import aws_kinesis_stream.test_stream terraform-kinesis-test +``` + +[1]: https://aws.amazon.com/documentation/kinesis/ +[2]: https://docs.aws.amazon.com/kinesis/latest/dev/amazon-kinesis-streams.html +[3]: https://docs.aws.amazon.com/streams/latest/dev/monitoring-with-cloudwatch.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kinesis_stream_consumer.html.markdown b/website/docs/cdktf/typescript/r/kinesis_stream_consumer.html.markdown new file mode 100644 index 00000000000..a4f52f006df --- /dev/null +++ b/website/docs/cdktf/typescript/r/kinesis_stream_consumer.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "Kinesis" +layout: "aws" +page_title: "AWS: aws_kinesis_stream_consumer" +description: |- + Manages a Kinesis Stream Consumer. +--- + + + +# Resource: aws_kinesis_stream_consumer + +Provides a resource to manage a Kinesis Stream Consumer. + +-> **Note:** You can register up to 20 consumers per stream. A given consumer can only be registered with one stream at a time. + +For more details, see the [Amazon Kinesis Stream Consumer Documentation][1]. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KinesisStream } from "./.gen/providers/aws/kinesis-stream"; +import { KinesisStreamConsumer } from "./.gen/providers/aws/kinesis-stream-consumer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new KinesisStream(this, "example", { + name: "example-stream", + shardCount: 1, + }); + const awsKinesisStreamConsumerExample = new KinesisStreamConsumer( + this, + "example_1", + { + name: "example-consumer", + streamArn: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKinesisStreamConsumerExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required, Forces new resource) Name of the stream consumer. +* `streamArn` – (Required, Forces new resource) Amazon Resource Name (ARN) of the data stream the consumer is registered with. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the stream consumer. +* `creationTimestamp` - Approximate timestamp in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) of when the stream consumer was created. +* `id` - Amazon Resource Name (ARN) of the stream consumer. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kinesis Stream Consumers using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Kinesis Stream Consumers using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_kinesis_stream_consumer.example arn:aws:kinesis:us-west-2:123456789012:stream/example/consumer/example:1616044553 +``` + +[1]: https://docs.aws.amazon.com/streams/latest/dev/amazon-kinesis-consumers.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kinesis_video_stream.html.markdown b/website/docs/cdktf/typescript/r/kinesis_video_stream.html.markdown new file mode 100644 index 00000000000..63c30271ed2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/kinesis_video_stream.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Kinesis Video" +layout: "aws" +page_title: "AWS: aws_kinesis_video_stream" +description: |- + Provides a AWS Kinesis Video Stream +--- + + + +# Resource: aws_kinesis_video_stream + +Provides a Kinesis Video Stream resource. Amazon Kinesis Video Streams makes it easy to securely stream video from connected devices to AWS for analytics, machine learning (ML), playback, and other processing. + +For more details, see the [Amazon Kinesis Documentation][1]. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KinesisVideoStream } from "./.gen/providers/aws/kinesis-video-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KinesisVideoStream(this, "default", { + dataRetentionInHours: 1, + deviceName: "kinesis-video-device-name", + mediaType: "video/h264", + name: "terraform-kinesis-video-stream", + tags: { + Name: "terraform-kinesis-video-stream", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name to identify the stream. This is unique to the +AWS account and region the Stream is created in. +* `dataRetentionInHours` – (Optional) The number of hours that you want to retain the data in the stream. Kinesis Video Streams retains the data in a data store that is associated with the stream. The default value is `0`, indicating that the stream does not persist data. +* `deviceName` - (Optional) The name of the device that is writing to the stream. **In the current implementation, Kinesis Video Streams does not use this name.** +* `kmsKeyId` - (Optional) The ID of the AWS Key Management Service (AWS KMS) key that you want Kinesis Video Streams to use to encrypt stream data. If no key ID is specified, the default, Kinesis Video-managed key (`aws/kinesisvideo`) is used. +* `mediaType` - (Optional) The media type of the stream. Consumers of the stream can use this information when processing the stream. For more information about media types, see [Media Types][2]. If you choose to specify the MediaType, see [Naming Requirements][3] for guidelines. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique Stream id +* `arn` - The Amazon Resource Name (ARN) specifying the Stream (same as `id`) +* `creationTime` - A time stamp that indicates when the stream was created. +* `version` - The version of the stream. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `update` - (Default `120M`) +- `delete` - (Default `120M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Kinesis Streams using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Kinesis Streams using the `arn`. For example: + +```console +% terraform import aws_kinesis_video_stream.test_stream arn:aws:kinesisvideo:us-west-2:123456789012:stream/terraform-kinesis-test/1554978910975 +``` + +[1]: https://aws.amazon.com/documentation/kinesis/ +[2]: http://www.iana.org/assignments/media-types/media-types.xhtml +[3]: https://tools.ietf.org/html/rfc6838#section-4.2 + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kinesisanalyticsv2_application.html.markdown b/website/docs/cdktf/typescript/r/kinesisanalyticsv2_application.html.markdown new file mode 100644 index 00000000000..4743dc4351f --- /dev/null +++ b/website/docs/cdktf/typescript/r/kinesisanalyticsv2_application.html.markdown @@ -0,0 +1,561 @@ +--- +subcategory: "Kinesis Analytics V2" +layout: "aws" +page_title: "AWS: aws_kinesisanalyticsv2_application" +description: |- + Manages a Kinesis Analytics v2 Application. +--- + + + +# Resource: aws_kinesisanalyticsv2_application + +Manages a Kinesis Analytics v2 Application. +This resource can be used to manage both Kinesis Data Analytics for SQL applications and Kinesis Data Analytics for Apache Flink applications. + +-> **Note:** Kinesis Data Analytics for SQL applications created using this resource cannot currently be viewed in the AWS Console. To manage Kinesis Data Analytics for SQL applications that can also be viewed in the AWS Console, use the [`awsKinesisAnalyticsApplication`](/docs/providers/aws/r/kinesis_analytics_application.html) resource. + +## Example Usage + +### Apache Flink Application + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Kinesisanalyticsv2Application } from "./.gen/providers/aws/kinesisanalyticsv2-application"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example-flink-application", + }); + const awsS3ObjectExample = new S3Object(this, "example_1", { + bucket: example.id, + key: "example-flink-application", + source: "flink-app.jar", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ObjectExample.overrideLogicalId("example"); + const awsKinesisanalyticsv2ApplicationExample = + new Kinesisanalyticsv2Application(this, "example_2", { + applicationConfiguration: { + applicationCodeConfiguration: { + codeContent: { + s3ContentLocation: { + bucketArn: example.arn, + fileKey: Token.asString(awsS3ObjectExample.key), + }, + }, + codeContentType: "ZIPFILE", + }, + environmentProperties: { + propertyGroup: [ + { + propertyGroupId: "PROPERTY-GROUP-1", + propertyMap: { + Key1: "Value1", + }, + }, + { + propertyGroupId: "PROPERTY-GROUP-2", + propertyMap: { + KeyA: "ValueA", + KeyB: "ValueB", + }, + }, + ], + }, + flinkApplicationConfiguration: { + checkpointConfiguration: { + configurationType: "DEFAULT", + }, + monitoringConfiguration: { + configurationType: "CUSTOM", + logLevel: "DEBUG", + metricsLevel: "TASK", + }, + parallelismConfiguration: { + autoScalingEnabled: true, + configurationType: "CUSTOM", + parallelism: 10, + parallelismPerKpu: 4, + }, + }, + }, + name: "example-flink-application", + runtimeEnvironment: "FLINK-1_8", + serviceExecutionRole: Token.asString(awsIamRoleExample.arn), + tags: { + Environment: "test", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKinesisanalyticsv2ApplicationExample.overrideLogicalId("example"); + } +} + +``` + +### SQL Application + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { CloudwatchLogStream } from "./.gen/providers/aws/cloudwatch-log-stream"; +import { Kinesisanalyticsv2Application } from "./.gen/providers/aws/kinesisanalyticsv2-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: "example-sql-application", + }); + const awsCloudwatchLogStreamExample = new CloudwatchLogStream( + this, + "example_1", + { + logGroupName: example.name, + name: "example-sql-application", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogStreamExample.overrideLogicalId("example"); + const awsKinesisanalyticsv2ApplicationExample = + new Kinesisanalyticsv2Application(this, "example_2", { + applicationConfiguration: { + applicationCodeConfiguration: { + codeContent: { + textContent: "SELECT 1;\n\n", + }, + codeContentType: "PLAINTEXT", + }, + sqlApplicationConfiguration: { + input: { + inputParallelism: { + count: 3, + }, + inputSchema: { + recordColumn: [ + { + mapping: "MAPPING-1", + name: "COLUMN_1", + sqlType: "VARCHAR(8)", + }, + { + name: "COLUMN_2", + sqlType: "DOUBLE", + }, + ], + recordEncoding: "UTF-8", + recordFormat: { + mappingParameters: { + csvMappingParameters: { + recordColumnDelimiter: ",", + recordRowDelimiter: "\n\n", + }, + }, + recordFormatType: "CSV", + }, + }, + kinesisStreamsInput: { + resourceArn: Token.asString(awsKinesisStreamExample.arn), + }, + namePrefix: "PREFIX_1", + }, + output: [ + { + destinationSchema: { + recordFormatType: "JSON", + }, + lambdaOutput: { + resourceArn: Token.asString(awsLambdaFunctionExample.arn), + }, + name: "OUTPUT_1", + }, + { + destinationSchema: { + recordFormatType: "CSV", + }, + kinesisFirehoseOutput: { + resourceArn: Token.asString( + awsKinesisFirehoseDeliveryStreamExample.arn + ), + }, + name: "OUTPUT_2", + }, + ], + referenceDataSource: { + referenceSchema: { + recordColumn: [ + { + name: "COLUMN_1", + sqlType: "INTEGER", + }, + ], + recordFormat: { + mappingParameters: { + jsonMappingParameters: { + recordRowPath: "$", + }, + }, + recordFormatType: "JSON", + }, + }, + s3ReferenceDataSource: { + bucketArn: Token.asString(awsS3BucketExample.arn), + fileKey: "KEY-1", + }, + tableName: "TABLE-1", + }, + }, + }, + cloudwatchLoggingOptions: { + logStreamArn: Token.asString(awsCloudwatchLogStreamExample.arn), + }, + name: "example-sql-application", + runtimeEnvironment: "SQL-1_0", + serviceExecutionRole: Token.asString(awsIamRoleExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKinesisanalyticsv2ApplicationExample.overrideLogicalId("example"); + } +} + +``` + +### VPC Configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Kinesisanalyticsv2Application } from "./.gen/providers/aws/kinesisanalyticsv2-application"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example-flink-application", + }); + const awsS3ObjectExample = new S3Object(this, "example_1", { + bucket: example.id, + key: "example-flink-application", + source: "flink-app.jar", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ObjectExample.overrideLogicalId("example"); + const awsKinesisanalyticsv2ApplicationExample = + new Kinesisanalyticsv2Application(this, "example_2", { + applicationConfiguration: { + applicationCodeConfiguration: { + codeContent: { + s3ContentLocation: { + bucketArn: example.arn, + fileKey: Token.asString(awsS3ObjectExample.key), + }, + }, + codeContentType: "ZIPFILE", + }, + vpcConfiguration: { + securityGroupIds: [ + Token.asString( + propertyAccess(awsSecurityGroupExample, ["0", "id"]) + ), + Token.asString( + propertyAccess(awsSecurityGroupExample, ["1", "id"]) + ), + ], + subnetIds: [Token.asString(awsSubnetExample.id)], + }, + }, + name: "example-flink-application", + runtimeEnvironment: "FLINK-1_8", + serviceExecutionRole: Token.asString(awsIamRoleExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKinesisanalyticsv2ApplicationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the application. +* `runtimeEnvironment` - (Required) The runtime environment for the application. Valid values: `sql10`, `flink16`, `flink18`, `flink111`, `flink113`, `flink115`. +* `serviceExecutionRole` - (Required) The ARN of the [IAM role](/docs/providers/aws/r/iam_role.html) used by the application to access Kinesis data streams, Kinesis Data Firehose delivery streams, Amazon S3 objects, and other external resources. +* `applicationConfiguration` - (Optional) The application's configuration +* `cloudwatchLoggingOptions` - (Optional) A [CloudWatch log stream](/docs/providers/aws/r/cloudwatch_log_stream.html) to monitor application configuration errors. +* `description` - (Optional) A summary description of the application. +* `forceStop` - (Optional) Whether to force stop an unresponsive Flink-based application. +* `startApplication` - (Optional) Whether to start or stop the application. +* `tags` - (Optional) A map of tags to assign to the application. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `applicationConfiguration` object supports the following: + +* `applicationCodeConfiguration` - (Required) The code location and type parameters for the application. +* `applicationSnapshotConfiguration` - (Optional) Describes whether snapshots are enabled for a Flink-based application. +* `environmentProperties` - (Optional) Describes execution properties for a Flink-based application. +* `flinkApplicationConfiguration` - (Optional) The configuration of a Flink-based application. +* `runConfiguration` - (Optional) Describes the starting properties for a Flink-based application. +* `sqlApplicationConfiguration` - (Optional) The configuration of a SQL-based application. +* `vpcConfiguration` - (Optional) The VPC configuration of a Flink-based application. + +The `applicationCodeConfiguration` object supports the following: + +* `codeContentType` - (Required) Specifies whether the code content is in text or zip format. Valid values: `plaintext`, `zipfile`. +* `codeContent` - (Optional) The location and type of the application code. + +The `codeContent` object supports the following: + +* `s3ContentLocation` - (Optional) Information about the Amazon S3 bucket containing the application code. +* `textContent` - (Optional) The text-format code for the application. + +The `s3ContentLocation` object supports the following: + +* `bucketArn` - (Required) The ARN for the S3 bucket containing the application code. +* `fileKey` - (Required) The file key for the object containing the application code. +* `objectVersion` - (Optional) The version of the object containing the application code. + +The `applicationSnapshotConfiguration` object supports the following: + +* `snapshotsEnabled` - (Required) Describes whether snapshots are enabled for a Flink-based Kinesis Data Analytics application. + +The `environmentProperties` object supports the following: + +* `propertyGroup` - (Required) Describes the execution property groups. + +The `propertyGroup` object supports the following: + +* `propertyGroupId` - (Required) The key of the application execution property key-value map. +* `propertyMap` - (Required) Application execution property key-value map. + +The `flinkApplicationConfiguration` object supports the following: + +* `checkpointConfiguration` - (Optional) Describes an application's checkpointing configuration. +* `monitoringConfiguration` - (Optional) Describes configuration parameters for CloudWatch logging for an application. +* `parallelismConfiguration` - (Optional) Describes parameters for how an application executes multiple tasks simultaneously. + +The `checkpointConfiguration` object supports the following: + +* `configurationType` - (Required) Describes whether the application uses Kinesis Data Analytics' default checkpointing behavior. Valid values: `custom`, `default`. Set this attribute to `custom` in order for any specified `checkpointingEnabled`, `checkpointInterval`, or `minPauseBetweenCheckpoints` attribute values to be effective. If this attribute is set to `default`, the application will always use the following values: + * `checkpointing_enabled = true` + * `checkpoint_interval = 60000` + * `min_pause_between_checkpoints = 5000` +* `checkpointingEnabled` - (Optional) Describes whether checkpointing is enabled for a Flink-based Kinesis Data Analytics application. +* `checkpointInterval` - (Optional) Describes the interval in milliseconds between checkpoint operations. +* `minPauseBetweenCheckpoints` - (Optional) Describes the minimum time in milliseconds after a checkpoint operation completes that a new checkpoint operation can start. + +The `monitoringConfiguration` object supports the following: + +* `configurationType` - (Required) Describes whether to use the default CloudWatch logging configuration for an application. Valid values: `custom`, `default`. Set this attribute to `custom` in order for any specified `logLevel` or `metricsLevel` attribute values to be effective. +* `logLevel` - (Optional) Describes the verbosity of the CloudWatch Logs for an application. Valid values: `debug`, `error`, `info`, `warn`. +* `metricsLevel` - (Optional) Describes the granularity of the CloudWatch Logs for an application. Valid values: `application`, `operator`, `parallelism`, `task`. + +The `parallelismConfiguration` object supports the following: + +* `configurationType` - (Required) Describes whether the application uses the default parallelism for the Kinesis Data Analytics service. Valid values: `custom`, `default`. Set this attribute to `custom` in order for any specified `autoScalingEnabled`, `parallelism`, or `parallelismPerKpu` attribute values to be effective. +* `autoScalingEnabled` - (Optional) Describes whether the Kinesis Data Analytics service can increase the parallelism of the application in response to increased throughput. +* `parallelism` - (Optional) Describes the initial number of parallel tasks that a Flink-based Kinesis Data Analytics application can perform. +* `parallelismPerKpu` - (Optional) Describes the number of parallel tasks that a Flink-based Kinesis Data Analytics application can perform per Kinesis Processing Unit (KPU) used by the application. + +The `runConfiguration` object supports the following: + +* `applicationRestoreConfiguration` - (Optional) The restore behavior of a restarting application. +* `flinkRunConfiguration` - (Optional) The starting parameters for a Flink-based Kinesis Data Analytics application. + +The `applicationRestoreConfiguration` object supports the following: + +* `applicationRestoreType` - (Required) Specifies how the application should be restored. Valid values: `restoreFromCustomSnapshot`, `restoreFromLatestSnapshot`, `skipRestoreFromSnapshot`. +* `snapshotName` - (Optional) The identifier of an existing snapshot of application state to use to restart an application. The application uses this value if `restoreFromCustomSnapshot` is specified for `applicationRestoreType`. + +The `flinkRunConfiguration` object supports the following: + +* `allowNonRestoredState` - (Optional) When restoring from a snapshot, specifies whether the runtime is allowed to skip a state that cannot be mapped to the new program. Default is `false`. + +The `sqlApplicationConfiguration` object supports the following: + +* `input` - (Optional) The input stream used by the application. +* `output` - (Optional) The destination streams used by the application. +* `referenceDataSource` - (Optional) The reference data source used by the application. + +The `input` object supports the following: + +* `inputSchema` - (Required) Describes the format of the data in the streaming source, and how each data element maps to corresponding columns in the in-application stream that is being created. +* `namePrefix` - (Required) The name prefix to use when creating an in-application stream. +* `inputParallelism` - (Optional) Describes the number of in-application streams to create. +* `inputProcessingConfiguration` - (Optional) The input processing configuration for the input. +An input processor transforms records as they are received from the stream, before the application's SQL code executes. +* `inputStartingPositionConfiguration` (Optional) The point at which the application starts processing records from the streaming source. +* `kinesisFirehoseInput` - (Optional) If the streaming source is a [Kinesis Data Firehose delivery stream](/docs/providers/aws/r/kinesis_firehose_delivery_stream.html), identifies the delivery stream's ARN. +* `kinesisStreamsInput` - (Optional) If the streaming source is a [Kinesis data stream](/docs/providers/aws/r/kinesis_stream.html), identifies the stream's Amazon Resource Name (ARN). + +The `inputParallelism` object supports the following: + +* `count` - (Optional) The number of in-application streams to create. + +The `inputProcessingConfiguration` object supports the following: + +* `inputLambdaProcessor` - (Required) Describes the [Lambda function](/docs/providers/aws/r/lambda_function.html) that is used to preprocess the records in the stream before being processed by your application code. + +The `inputLambdaProcessor` object supports the following: + +* `resourceArn` - (Required) The ARN of the Lambda function that operates on records in the stream. + +The `inputSchema` object supports the following: + +* `recordColumn` - (Required) Describes the mapping of each data element in the streaming source to the corresponding column in the in-application stream. +* `recordFormat` - (Required) Specifies the format of the records on the streaming source. +* `recordEncoding` - (Optional) Specifies the encoding of the records in the streaming source. For example, `utf8`. + +The `recordColumn` object supports the following: + +* `name` - (Required) The name of the column that is created in the in-application input stream or reference table. +* `sqlType` - (Required) The type of column created in the in-application input stream or reference table. +* `mapping` - (Optional) A reference to the data element in the streaming input or the reference data source. + +The `recordFormat` object supports the following: + +* `mappingParameters` - (Required) Provides additional mapping information specific to the record format (such as JSON, CSV, or record fields delimited by some delimiter) on the streaming source. +* `recordFormatType` - (Required) The type of record format. Valid values: `csv`, `json`. + +The `mappingParameters` object supports the following: + +* `csvMappingParameters` - (Optional) Provides additional mapping information when the record format uses delimiters (for example, CSV). +* `jsonMappingParameters` - (Optional) Provides additional mapping information when JSON is the record format on the streaming source. + +The `csvMappingParameters` object supports the following: + +* `recordColumnDelimiter` - (Required) The column delimiter. For example, in a CSV format, a comma (`,`) is the typical column delimiter. +* `recordRowDelimiter` - (Required) The row delimiter. For example, in a CSV format, `\n` is the typical row delimiter. + +The `jsonMappingParameters` object supports the following: + +* `recordRowPath` - (Required) The path to the top-level parent that contains the records. + +The `inputStartingPositionConfiguration` object supports the following: + +~> **NOTE:** To modify an application's starting position, first stop the application by setting `start_application = false`, then update `startingPosition` and set `start_application = true`. + +* `inputStartingPosition` - (Required) The starting position on the stream. Valid values: `lastStoppedPoint`, `now`, `trimHorizon`. + +The `kinesisFirehoseInput` object supports the following: + +* `resourceArn` - (Required) The ARN of the delivery stream. + +The `kinesisStreamsInput` object supports the following: + +* `resourceArn` - (Required) The ARN of the input Kinesis data stream to read. + +The `output` object supports the following: + +* `destinationSchema` - (Required) Describes the data format when records are written to the destination. +* `name` - (Required) The name of the in-application stream. +* `kinesisFirehoseOutput` - (Optional) Identifies a [Kinesis Data Firehose delivery stream](/docs/providers/aws/r/kinesis_firehose_delivery_stream.html) as the destination. +* `kinesisStreamsOutput` - (Optional) Identifies a [Kinesis data stream](/docs/providers/aws/r/kinesis_stream.html) as the destination. +* `lambdaOutput` - (Optional) Identifies a [Lambda function](/docs/providers/aws/r/lambda_function.html) as the destination. + +The `destinationSchema` object supports the following: + +* `recordFormatType` - (Required) Specifies the format of the records on the output stream. Valid values: `csv`, `json`. + +The `kinesisFirehoseOutput` object supports the following: + +* `resourceArn` - (Required) The ARN of the destination delivery stream to write to. + +The `kinesisStreamsOutput` object supports the following: + +* `resourceArn` - (Required) The ARN of the destination Kinesis data stream to write to. + +The `lambdaOutput` object supports the following: + +* `resourceArn` - (Required) The ARN of the destination Lambda function to write to. + +The `referenceDataSource` object supports the following: + +* `referenceSchema` - (Required) Describes the format of the data in the streaming source, and how each data element maps to corresponding columns created in the in-application stream. +* `s3ReferenceDataSource` - (Required) Identifies the S3 bucket and object that contains the reference data. +* `tableName` - (Required) The name of the in-application table to create. + +The `referenceSchema` object supports the following: + +* `recordColumn` - (Required) Describes the mapping of each data element in the streaming source to the corresponding column in the in-application stream. +* `recordFormat` - (Required) Specifies the format of the records on the streaming source. +* `recordEncoding` - (Optional) Specifies the encoding of the records in the streaming source. For example, `utf8`. + +The `s3ReferenceDataSource` object supports the following: + +* `bucketArn` - (Required) The ARN of the S3 bucket. +* `fileKey` - (Required) The object key name containing the reference data. + +The `vpcConfiguration` object supports the following: + +* `securityGroupIds` - (Required) The [Security Group](/docs/providers/aws/r/security_group.html) IDs used by the VPC configuration. +* `subnetIds` - (Required) The [Subnet](/docs/providers/aws/r/subnet.html) IDs used by the VPC configuration. + +The `cloudwatchLoggingOptions` object supports the following: + +* `logStreamArn` - (Required) The ARN of the CloudWatch log stream to receive application messages. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The application identifier. +* `arn` - The ARN of the application. +* `createTimestamp` - The current timestamp when the application was created. +* `lastUpdateTimestamp` - The current timestamp when the application was last updated. +* `status` - The status of the application. +* `versionId` - The current application version. Kinesis Data Analytics updates the `versionId` each time the application is updated. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsKinesisanalyticsv2Application` using the application ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsKinesisanalyticsv2Application` using the application ARN. For example: + +```console +% terraform import aws_kinesisanalyticsv2_application.example arn:aws:kinesisanalytics:us-west-2:123456789012:application/example-sql-application +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kinesisanalyticsv2_application_snapshot.html.markdown b/website/docs/cdktf/typescript/r/kinesisanalyticsv2_application_snapshot.html.markdown new file mode 100644 index 00000000000..036ea7f8933 --- /dev/null +++ b/website/docs/cdktf/typescript/r/kinesisanalyticsv2_application_snapshot.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Kinesis Analytics V2" +layout: "aws" +page_title: "AWS: aws_kinesisanalyticsv2_application_snapshot" +description: |- + Manages a Kinesis Analytics v2 Application Snapshot. +--- + + + +# Resource: aws_kinesisanalyticsv2_application_snapshot + +Manages a Kinesis Analytics v2 Application Snapshot. +Snapshots are the AWS implementation of [Flink Savepoints](https://ci.apache.org/projects/flink/flink-docs-release-1.11/ops/state/savepoints.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Kinesisanalyticsv2ApplicationSnapshot } from "./.gen/providers/aws/kinesisanalyticsv2-application-snapshot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Kinesisanalyticsv2ApplicationSnapshot(this, "example", { + applicationName: Token.asString( + awsKinesisanalyticsv2ApplicationExample.name + ), + snapshotName: "example-snapshot", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationName` - (Required) The name of an existing [Kinesis Analytics v2 Application](/docs/providers/aws/r/kinesisanalyticsv2_application.html). Note that the application must be running for a snapshot to be created. +* `snapshotName` - (Required) The name of the application snapshot. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The application snapshot identifier. +* `applicationVersionId` - The current application version ID when the snapshot was created. +* `snapshotCreationTimestamp` - The timestamp of the application snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsKinesisanalyticsv2Application` using `applicationName` together with `snapshotName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsKinesisanalyticsv2Application` using `applicationName` together with `snapshotName`. For example: + +```console +% terraform import aws_kinesisanalyticsv2_application_snapshot.example example-application/example-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kms_alias.html.markdown b/website/docs/cdktf/typescript/r/kms_alias.html.markdown new file mode 100644 index 00000000000..a1164f5512c --- /dev/null +++ b/website/docs/cdktf/typescript/r/kms_alias.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_alias" +description: |- + Provides a display name for a customer master key. +--- + + + +# Resource: aws_kms_alias + +Provides an alias for a KMS customer master key. AWS Console enforces 1-to-1 mapping between aliases & keys, +but API (hence Terraform too) allows you to create as many aliases as +the [account limits](http://docs.aws.amazon.com/kms/latest/developerguide/limits.html) allow you. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsAlias } from "./.gen/providers/aws/kms-alias"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const a = new KmsKey(this, "a", {}); + const awsKmsAliasA = new KmsAlias(this, "a_1", { + name: "alias/my-key-alias", + targetKeyId: a.keyId, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsAliasA.overrideLogicalId("a"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The display name of the alias. The name must start with the word "alias" followed by a forward slash (alias/) +* `namePrefix` - (Optional) Creates an unique alias beginning with the specified prefix. +The name must start with the word "alias" followed by a forward slash (alias/). Conflicts with `name`. +* `targetKeyId` - (Required) Identifier for the key for which the alias is for, can be either an ARN or key_id. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the key alias. +* `targetKeyArn` - The Amazon Resource Name (ARN) of the target key identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS aliases using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import KMS aliases using the `name`. For example: + +```console +% terraform import aws_kms_alias.a alias/my-key-alias +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kms_ciphertext.html.markdown b/website/docs/cdktf/typescript/r/kms_ciphertext.html.markdown new file mode 100644 index 00000000000..460404fb75b --- /dev/null +++ b/website/docs/cdktf/typescript/r/kms_ciphertext.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_ciphertext" +description: |- + Provides ciphertext encrypted using a KMS key +--- + + + +# Resource: aws_kms_ciphertext + +The KMS ciphertext resource allows you to encrypt plaintext into ciphertext +by using an AWS KMS customer master key. The value returned by this resource +is stable across every apply. For a changing ciphertext value each apply, see +the [`awsKmsCiphertext` data source](/docs/providers/aws/d/kms_ciphertext.html). + +~> **Note:** All arguments including the plaintext be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsCiphertext } from "./.gen/providers/aws/kms-ciphertext"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const oauthConfig = new KmsKey(this, "oauth_config", { + description: "oauth config", + isEnabled: true, + }); + new KmsCiphertext(this, "oauth", { + keyId: oauthConfig.keyId, + plaintext: + '{\n "client_id": "e587dbae22222f55da22",\n "client_secret": "8289575d00000ace55e1815ec13673955721b8a5"\n}\n\n', + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `plaintext` - (Required) Data to be encrypted. Note that this may show up in logs, and it will be stored in the state file. +* `keyId` - (Required) Globally unique key ID for the customer master key. +* `context` - (Optional) An optional mapping that makes up the encryption context. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `ciphertextBlob` - Base64 encoded ciphertext + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kms_custom_key_store.html.markdown b/website/docs/cdktf/typescript/r/kms_custom_key_store.html.markdown new file mode 100644 index 00000000000..e36b3f9dc92 --- /dev/null +++ b/website/docs/cdktf/typescript/r/kms_custom_key_store.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_custom_key_store" +description: |- + Terraform resource for managing an AWS KMS (Key Management) Custom Key Store. +--- + + + +# Resource: aws_kms_custom_key_store + +Terraform resource for managing an AWS KMS (Key Management) Custom Key Store. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsCustomKeyStore } from "./.gen/providers/aws/kms-custom-key-store"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KmsCustomKeyStore(this, "test", { + cloudHsmClusterId: cloudHsmClusterId.stringValue, + customKeyStoreName: "kms-custom-key-store-test", + keyStorePassword: "noplaintextpasswords1", + trustAnchorCertificate: Token.asString(Fn.file("anchor-certificate.crt")), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `cloudHsmClusterId` - (Required) Cluster ID of CloudHSM. +* `customKeyStoreName` - (Required) Unique name for Custom Key Store. +* `keyStorePassword` - (Required) Password for `kmsuser` on CloudHSM. +* `trustAnchorCertificate` - (Required) Customer certificate used for signing on CloudHSM. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Custom Key Store ID + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `15M`) +* `update` - (Default `15M`) +* `delete` - (Default `15M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS (Key Management) Custom Key Store using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import KMS (Key Management) Custom Key Store using the `id`. For example: + +```console +% terraform import aws_kms_custom_key_store.example cks-5ebd4ef395a96288e +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kms_external_key.html.markdown b/website/docs/cdktf/typescript/r/kms_external_key.html.markdown new file mode 100644 index 00000000000..d6b7c78433d --- /dev/null +++ b/website/docs/cdktf/typescript/r/kms_external_key.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_external_key" +description: |- + Manages a single-Region or multi-Region primary KMS key that uses external key material. +--- + + + +# Resource: aws_kms_external_key + +Manages a single-Region or multi-Region primary KMS key that uses external key material. +To instead manage a single-Region or multi-Region primary KMS key where AWS automatically generates and potentially rotates key material, see the [`awsKmsKey` resource](/docs/providers/aws/r/kms_key.html). + +~> **Note:** All arguments including the key material will be stored in the raw state as plain-text. [Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsExternalKey } from "./.gen/providers/aws/kms-external-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KmsExternalKey(this, "example", { + description: "KMS EXTERNAL for AMI encryption", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bypassPolicyLockoutSafetyCheck` - (Optional) Specifies whether to disable the policy lockout check performed when creating or updating the key's policy. Setting this value to `true` increases the risk that the key becomes unmanageable. For more information, refer to the scenario in the [Default Key Policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default-allow-root-enable-iam) section in the AWS Key Management Service Developer Guide. Defaults to `false`. +* `deletionWindowInDays` - (Optional) Duration in days after which the key is deleted after destruction of the resource. Must be between `7` and `30` days. Defaults to `30`. +* `description` - (Optional) Description of the key. +* `enabled` - (Optional) Specifies whether the key is enabled. Keys pending import can only be `false`. Imported keys default to `true` unless expired. +* `keyMaterialBase64` - (Optional) Base64 encoded 256-bit symmetric encryption key material to import. The CMK is permanently associated with this key material. The same key material can be reimported, but you cannot import different key material. +* `multiRegion` - (Optional) Indicates whether the KMS key is a multi-Region (`true`) or regional (`false`) key. Defaults to `false`. +* `policy` - (Optional) A key policy JSON document. If you do not provide a key policy, AWS KMS attaches a default key policy to the CMK. +* `tags` - (Optional) A key-value map of tags to assign to the key. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `validTo` - (Optional) Time at which the imported key material expires. When the key material expires, AWS KMS deletes the key material and the CMK becomes unusable. If not specified, key material does not expire. Valid values: [RFC3339 time string](https://tools.ietf.org/html/rfc3339#section-5.8) (`yyyyMmDdthh:mm:ssz`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the key. +* `expirationModel` - Whether the key material expires. Empty when pending key material import, otherwise `keyMaterialExpires` or `keyMaterialDoesNotExpire`. +* `id` - The unique identifier for the key. +* `keyState` - The state of the CMK. +* `keyUsage` - The cryptographic operations for which you can use the CMK. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS External Keys using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import KMS External Keys using the `id`. For example: + +```console +% terraform import aws_kms_external_key.a arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kms_grant.html.markdown b/website/docs/cdktf/typescript/r/kms_grant.html.markdown new file mode 100644 index 00000000000..1bfedea7c90 --- /dev/null +++ b/website/docs/cdktf/typescript/r/kms_grant.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_grant" +description: |- + Provides a resource-based access control mechanism for KMS Customer Master Keys. +--- + + + +# Resource: aws_kms_grant + +Provides a resource-based access control mechanism for a KMS customer master key. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { KmsGrant } from "./.gen/providers/aws/kms-grant"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const a = new KmsKey(this, "a", {}); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: Token.asList("lambda.amazonaws.com"), + type: "Service", + }, + ], + }, + ], + }); + const awsIamRoleA = new IamRole(this, "a_2", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "iam-role-for-grant", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleA.overrideLogicalId("a"); + const awsKmsGrantA = new KmsGrant(this, "a_3", { + constraints: [ + { + encryptionContextEquals: { + Department: "Finance", + }, + }, + ], + granteePrincipal: Token.asString(awsIamRoleA.arn), + keyId: a.keyId, + name: "my-grant", + operations: ["Encrypt", "Decrypt", "GenerateDataKey"], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsGrantA.overrideLogicalId("a"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resources) A friendly name for identifying the grant. +* `keyId` - (Required, Forces new resources) The unique identifier for the customer master key (CMK) that the grant applies to. Specify the key ID or the Amazon Resource Name (ARN) of the CMK. To specify a CMK in a different AWS account, you must use the key ARN. +* `granteePrincipal` - (Required, Forces new resources) The principal that is given permission to perform the operations that the grant permits in ARN format. Note that due to eventual consistency issues around IAM principals, terraform's state may not always be refreshed to reflect what is true in AWS. +* `operations` - (Required, Forces new resources) A list of operations that the grant permits. The permitted values are: `decrypt`, `encrypt`, `generateDataKey`, `generateDataKeyWithoutPlaintext`, `reEncryptFrom`, `reEncryptTo`, `sign`, `verify`, `getPublicKey`, `createGrant`, `retireGrant`, `describeKey`, `generateDataKeyPair`, or `generateDataKeyPairWithoutPlaintext`. +* `retiringPrincipal` - (Optional, Forces new resources) The principal that is given permission to retire the grant by using RetireGrant operation in ARN format. Note that due to eventual consistency issues around IAM principals, terraform's state may not always be refreshed to reflect what is true in AWS. +* `constraints` - (Optional, Forces new resources) A structure that you can use to allow certain operations in the grant only when the desired encryption context is present. For more information about encryption context, see [Encryption Context](http://docs.aws.amazon.com/kms/latest/developerguide/encryption-context.html). +* `grantCreationTokens` - (Optional, Forces new resources) A list of grant tokens to be used when creating the grant. See [Grant Tokens](http://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#grant_token) for more information about grant tokens. +* `retireOnDelete` -(Defaults to false, Forces new resources) If set to false (the default) the grants will be revoked upon deletion, and if set to true the grants will try to be retired upon deletion. Note that retiring grants requires special permissions, hence why we default to revoking grants. + See [RetireGrant](https://docs.aws.amazon.com/kms/latest/APIReference/API_RetireGrant.html) for more information. + +The `constraints` block supports the following arguments: + +* `encryptionContextEquals` - (Optional) A list of key-value pairs that must match the encryption context in subsequent cryptographic operation requests. The grant allows the operation only when the encryption context in the request is the same as the encryption context specified in this constraint. Conflicts with `encryptionContextSubset`. +* `encryptionContextSubset` - (Optional) A list of key-value pairs that must be included in the encryption context of subsequent cryptographic operation requests. The grant allows the cryptographic operation only when the encryption context in the request includes the key-value pairs specified in this constraint, although it can include additional key-value pairs. Conflicts with `encryptionContextEquals`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `grantId` - The unique identifier for the grant. +* `grantToken` - The grant token for the created grant. For more information, see [Grant Tokens](http://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#grant_token). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS Grants using the Key ID and Grant ID separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import KMS Grants using the Key ID and Grant ID separated by a colon (`:`). For example: + +```console +% terraform import aws_kms_grant.test 1234abcd-12ab-34cd-56ef-1234567890ab:abcde1237f76e4ba7987489ac329fbfba6ad343d6f7075dbd1ef191f0120514 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kms_key.html.markdown b/website/docs/cdktf/typescript/r/kms_key.html.markdown new file mode 100644 index 00000000000..a788f2003ee --- /dev/null +++ b/website/docs/cdktf/typescript/r/kms_key.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_key" +description: |- + Manages a single-Region or multi-Region primary KMS key. +--- + + + +# Resource: aws_kms_key + +Manages a single-Region or multi-Region primary KMS key. + +~> **NOTE on KMS Key Policy:** KMS Key Policy can be configured in either the standalone resource [`awsKmsKeyPolicy`](kms_key_policy.html) +or with the parameter `policy` in this resource. +Configuring with both will cause inconsistencies and may overwrite configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsKey } from "./.gen/providers/aws/kms-key"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new KmsKey(this, "a", { + deletionWindowInDays: 10, + description: "KMS key 1", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) The description of the key as viewed in AWS console. +* `keyUsage` - (Optional) Specifies the intended use of the key. Valid values: `encryptDecrypt`, `signVerify`, or `generateVerifyMac`. +Defaults to `encryptDecrypt`. +* `customKeyStoreId` - (Optional) ID of the KMS [Custom Key Store](https://docs.aws.amazon.com/kms/latest/developerguide/create-cmk-keystore.html) where the key will be stored instead of KMS (eg CloudHSM). +* `customerMasterKeySpec` - (Optional) Specifies whether the key contains a symmetric key or an asymmetric key pair and the encryption algorithms or signing algorithms that the key supports. +Valid values: `symmetricDefault`, `rsa2048`, `rsa3072`, `rsa4096`, `hmac256`, `eccNistP256`, `eccNistP384`, `eccNistP521`, or `eccSecgP256K1`. Defaults to `symmetricDefault`. For help with choosing a key spec, see the [AWS KMS Developer Guide](https://docs.aws.amazon.com/kms/latest/developerguide/symm-asymm-choose.html). +* `policy` - (Optional) A valid policy JSON document. Although this is a key policy, not an IAM policy, an [`awsIamPolicyDocument`](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document), in the form that designates a principal, can be used. For more information about building policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +~> **NOTE:** Note: All KMS keys must have a key policy. If a key policy is not specified, AWS gives the KMS key a [default key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default) that gives all principals in the owning account unlimited access to all KMS operations for the key. This default key policy effectively delegates all access control to IAM policies and KMS grants. + +* `bypassPolicyLockoutSafetyCheck` - (Optional) A flag to indicate whether to bypass the key policy lockout safety check. +Setting this value to true increases the risk that the KMS key becomes unmanageable. Do not set this value to true indiscriminately. +For more information, refer to the scenario in the [Default Key Policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default-allow-root-enable-iam) section in the _AWS Key Management Service Developer Guide_. +The default value is `false`. +* `deletionWindowInDays` - (Optional) The waiting period, specified in number of days. After the waiting period ends, AWS KMS deletes the KMS key. +If you specify a value, it must be between `7` and `30`, inclusive. If you do not specify a value, it defaults to `30`. +If the KMS key is a multi-Region primary key with replicas, the waiting period begins when the last of its replica keys is deleted. Otherwise, the waiting period begins immediately. +* `isEnabled` - (Optional) Specifies whether the key is enabled. Defaults to `true`. +* `enableKeyRotation` - (Optional) Specifies whether [key rotation](http://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html) is enabled. Defaults to `false`. +* `multiRegion` - (Optional) Indicates whether the KMS key is a multi-Region (`true`) or regional (`false`) key. Defaults to `false`. +* `tags` - (Optional) A map of tags to assign to the object. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the key. +* `keyId` - The globally unique identifier for the key. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS Keys using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import KMS Keys using the `id`. For example: + +```console +% terraform import aws_kms_key.a 1234abcd-12ab-34cd-56ef-1234567890ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kms_key_policy.html.markdown b/website/docs/cdktf/typescript/r/kms_key_policy.html.markdown new file mode 100644 index 00000000000..c5b56626993 --- /dev/null +++ b/website/docs/cdktf/typescript/r/kms_key_policy.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_key_policy" +description: |- + Attaches a policy to a KMS Key. +--- + + + +# Resource: aws_kms_key_policy + +Attaches a policy to a KMS Key. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { KmsKeyPolicy } from "./.gen/providers/aws/kms-key-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new KmsKey(this, "example", { + description: "example", + }); + const awsKmsKeyPolicyExample = new KmsKeyPolicy(this, "example_1", { + keyId: example.id, + policy: Token.asString( + Fn.jsonencode({ + Id: "example", + Statement: [ + { + Action: "kms:*", + Effect: "Allow", + Principal: { + AWS: "*", + }, + Resource: "*", + Sid: "Enable IAM User Permissions", + }, + ], + Version: "2012-10-17", + }) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsKeyPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `keyId` - (Required) The ID of the KMS Key to attach the policy. +* `policy` - (Required) A valid policy JSON document. Although this is a key policy, not an IAM policy, an [`awsIamPolicyDocument`](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document), in the form that designates a principal, can be used. For more information about building policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +~> **NOTE:** Note: All KMS keys must have a key policy. If a key policy is not specified, or this resource is destroyed, AWS gives the KMS key a [default key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default) that gives all principals in the owning account unlimited access to all KMS operations for the key. This default key policy effectively delegates all access control to IAM policies and KMS grants. + +* `bypassPolicyLockoutSafetyCheck` - (Optional) A flag to indicate whether to bypass the key policy lockout safety check. +Setting this value to true increases the risk that the KMS key becomes unmanageable. Do not set this value to true indiscriminately. If this value is set, and the resource is destroyed, a warning will be shown, and the resource will be removed from state. +For more information, refer to the scenario in the [Default Key Policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default-allow-root-enable-iam) section in the _AWS Key Management Service Developer Guide_. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS Key Policies using the `keyId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import KMS Key Policies using the `keyId`. For example: + +```console +% terraform import aws_kms_key_policy.a 1234abcd-12ab-34cd-56ef-1234567890ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kms_replica_external_key.html.markdown b/website/docs/cdktf/typescript/r/kms_replica_external_key.html.markdown new file mode 100644 index 00000000000..f85e2880cfb --- /dev/null +++ b/website/docs/cdktf/typescript/r/kms_replica_external_key.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_replica_external_key" +description: |- + Manages a KMS multi-Region replica key that uses external key material. +--- + + + +# Resource: aws_kms_replica_external_key + +Manages a KMS multi-Region replica key that uses external key material. +See the [AWS KMS Developer Guide](https://docs.aws.amazon.com/kms/latest/developerguide/multi-region-keys-import.html) for more information on importing key material into multi-Region keys. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsExternalKey } from "./.gen/providers/aws/kms-external-key"; +import { KmsReplicaExternalKey } from "./.gen/providers/aws/kms-replica-external-key"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new AwsProvider(this, "aws", { + alias: "primary", + region: "us-east-1", + }); + new AwsProvider(this, "aws_1", { + region: "us-west-2", + }); + new KmsExternalKey(this, "primary", { + deletionWindowInDays: 30, + description: "Multi-Region primary key", + enabled: true, + keyMaterialBase64: "...", + multiRegion: true, + provider: primary, + }); + new KmsReplicaExternalKey(this, "replica", { + deletionWindowInDays: 7, + description: "Multi-Region replica key", + keyMaterialBase64: "...", + primaryKeyArn: Token.asString(awsKmsExternalPrimary.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bypassPolicyLockoutSafetyCheck` - (Optional) A flag to indicate whether to bypass the key policy lockout safety check. +Setting this value to true increases the risk that the KMS key becomes unmanageable. Do not set this value to true indiscriminately. +For more information, refer to the scenario in the [Default Key Policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default-allow-root-enable-iam) section in the _AWS Key Management Service Developer Guide_. +The default value is `false`. +* `deletionWindowInDays` - (Optional) The waiting period, specified in number of days. After the waiting period ends, AWS KMS deletes the KMS key. +If you specify a value, it must be between `7` and `30`, inclusive. If you do not specify a value, it defaults to `30`. +* `description` - (Optional) A description of the KMS key. +* `enabled` - (Optional) Specifies whether the replica key is enabled. Disabled KMS keys cannot be used in cryptographic operations. Keys pending import can only be `false`. Imported keys default to `true` unless expired. +* `keyMaterialBase64` - (Optional) Base64 encoded 256-bit symmetric encryption key material to import. The KMS key is permanently associated with this key material. The same key material can be [reimported](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html#reimport-key-material), but you cannot import different key material. +* `policy` - (Optional) The key policy to attach to the KMS key. If you do not specify a key policy, AWS KMS attaches the [default key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default) to the KMS key. +For more information about building policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `primaryKeyArn` - (Required) The ARN of the multi-Region primary key to replicate. The primary key must be in a different AWS Region of the same AWS Partition. You can create only one replica of a given primary key in each AWS Region. +* `tags` - (Optional) A map of tags to assign to the replica key. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `validTo` - (Optional) Time at which the imported key material expires. When the key material expires, AWS KMS deletes the key material and the key becomes unusable. If not specified, key material does not expire. Valid values: [RFC3339 time string](https://tools.ietf.org/html/rfc3339#section-5.8) (`yyyyMmDdthh:mm:ssz`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the replica key. The key ARNs of related multi-Region keys differ only in the Region value. +* `expirationModel` - Whether the key material expires. Empty when pending key material import, otherwise `keyMaterialExpires` or `keyMaterialDoesNotExpire`. +* `keyId` - The key ID of the replica key. Related multi-Region keys have the same key ID. +* `keyState` - The state of the replica key. +* `keyUsage` - The [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations) for which you can use the KMS key. This is a shared property of multi-Region keys. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS multi-Region replica keys using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import KMS multi-Region replica keys using the `id`. For example: + +```console +% terraform import aws_kms_replica_external_key.example 1234abcd-12ab-34cd-56ef-1234567890ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/kms_replica_key.html.markdown b/website/docs/cdktf/typescript/r/kms_replica_key.html.markdown new file mode 100644 index 00000000000..5c91c1d1a60 --- /dev/null +++ b/website/docs/cdktf/typescript/r/kms_replica_key.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "KMS (Key Management)" +layout: "aws" +page_title: "AWS: aws_kms_replica_key" +description: |- + Manages a KMS multi-Region replica key. +--- + + + +# Resource: aws_kms_replica_key + +Manages a KMS multi-Region replica key. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { KmsReplicaKey } from "./.gen/providers/aws/kms-replica-key"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new AwsProvider(this, "aws", { + alias: "primary", + region: "us-east-1", + }); + new AwsProvider(this, "aws_1", { + region: "us-west-2", + }); + const awsKmsKeyPrimary = new KmsKey(this, "primary", { + deletionWindowInDays: 30, + description: "Multi-Region primary key", + multiRegion: true, + provider: primary, + }); + new KmsReplicaKey(this, "replica", { + deletionWindowInDays: 7, + description: "Multi-Region replica key", + primaryKeyArn: Token.asString(awsKmsKeyPrimary.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bypassPolicyLockoutSafetyCheck` - (Optional) A flag to indicate whether to bypass the key policy lockout safety check. +Setting this value to true increases the risk that the KMS key becomes unmanageable. Do not set this value to true indiscriminately. +For more information, refer to the scenario in the [Default Key Policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default-allow-root-enable-iam) section in the _AWS Key Management Service Developer Guide_. +The default value is `false`. +* `deletionWindowInDays` - (Optional) The waiting period, specified in number of days. After the waiting period ends, AWS KMS deletes the KMS key. +If you specify a value, it must be between `7` and `30`, inclusive. If you do not specify a value, it defaults to `30`. +* `description` - (Optional) A description of the KMS key. +* `enabled` - (Optional) Specifies whether the replica key is enabled. Disabled KMS keys cannot be used in cryptographic operations. The default value is `true`. +* `policy` - (Optional) The key policy to attach to the KMS key. If you do not specify a key policy, AWS KMS attaches the [default key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html#key-policy-default) to the KMS key. +For more information about building policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `primaryKeyArn` - (Required) The ARN of the multi-Region primary key to replicate. The primary key must be in a different AWS Region of the same AWS Partition. You can create only one replica of a given primary key in each AWS Region. +* `tags` - (Optional) A map of tags to assign to the replica key. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the replica key. The key ARNs of related multi-Region keys differ only in the Region value. +* `keyId` - The key ID of the replica key. Related multi-Region keys have the same key ID. +* `keyRotationEnabled` - A Boolean value that specifies whether key rotation is enabled. This is a shared property of multi-Region keys. +* `keySpec` - The type of key material in the KMS key. This is a shared property of multi-Region keys. +* `keyUsage` - The [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations) for which you can use the KMS key. This is a shared property of multi-Region keys. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import KMS multi-Region replica keys using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import KMS multi-Region replica keys using the `id`. For example: + +```console +% terraform import aws_kms_replica_key.example 1234abcd-12ab-34cd-56ef-1234567890ab +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lakeformation_data_lake_settings.html.markdown b/website/docs/cdktf/typescript/r/lakeformation_data_lake_settings.html.markdown new file mode 100644 index 00000000000..de7192c2535 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lakeformation_data_lake_settings.html.markdown @@ -0,0 +1,148 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_data_lake_settings" +description: |- + Manages data lake administrators and default database and table permissions +--- + + + +# Resource: aws_lakeformation_data_lake_settings + +Manages Lake Formation principals designated as data lake administrators and lists of principal permission entries for default create database and default create table permissions. + +~> **NOTE:** Lake Formation introduces fine-grained access control for data in your data lake. Part of the changes include the `iamAllowedPrincipals` principal in order to make Lake Formation backwards compatible with existing IAM and Glue permissions. For more information, see [Changing the Default Security Settings for Your Data Lake](https://docs.aws.amazon.com/lake-formation/latest/dg/change-settings.html) and [Upgrading AWS Glue Data Permissions to the AWS Lake Formation Model](https://docs.aws.amazon.com/lake-formation/latest/dg/upgrade-glue-lake-formation.html). + +## Example Usage + +### Data Lake Admins + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LakeformationDataLakeSettings } from "./.gen/providers/aws/lakeformation-data-lake-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LakeformationDataLakeSettings(this, "example", { + admins: [test.arn, Token.asString(awsIamRoleTest.arn)], + }); + } +} + +``` + +### Create Default Permissions + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LakeformationDataLakeSettings } from "./.gen/providers/aws/lakeformation-data-lake-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LakeformationDataLakeSettings(this, "example", { + admins: [test.arn, Token.asString(awsIamRoleTest.arn)], + createDatabaseDefaultPermissions: [ + { + permissions: ["SELECT", "ALTER", "DROP"], + principal: test.arn, + }, + ], + createTableDefaultPermissions: [ + { + permissions: ["ALL"], + principal: Token.asString(awsIamRoleTest.arn), + }, + ], + }); + } +} + +``` + +### Enable EMR access to LakeFormation resources + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LakeformationDataLakeSettings } from "./.gen/providers/aws/lakeformation-data-lake-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LakeformationDataLakeSettings(this, "example", { + admins: [test.arn, Token.asString(awsIamRoleTest.arn)], + allowExternalDataFiltering: true, + authorizedSessionTagValueList: ["Amazon EMR"], + createDatabaseDefaultPermissions: [ + { + permissions: ["SELECT", "ALTER", "DROP"], + principal: test.arn, + }, + ], + createTableDefaultPermissions: [ + { + permissions: ["ALL"], + principal: Token.asString(awsIamRoleTest.arn), + }, + ], + externalDataFilteringAllowList: [ + Token.asString(current.accountId), + Token.asString(thirdParty.accountId), + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `admins` – (Optional) Set of ARNs of AWS Lake Formation principals (IAM users or roles). +* `catalogId` – (Optional) Identifier for the Data Catalog. By default, the account ID. +* `createDatabaseDefaultPermissions` - (Optional) Up to three configuration blocks of principal permissions for default create database permissions. Detailed below. +* `createTableDefaultPermissions` - (Optional) Up to three configuration blocks of principal permissions for default create table permissions. Detailed below. +* `trustedResourceOwners` – (Optional) List of the resource-owning account IDs that the caller's account can use to share their user access details (user ARNs). +* `allowExternalDataFiltering` - (Optional) Whether to allow Amazon EMR clusters to access data managed by Lake Formation. +* `externalDataFilteringAllowList` - (Optional) A list of the account IDs of Amazon Web Services accounts with Amazon EMR clusters that are to perform data filtering. +* `authorizedSessionTagValueList` - (Optional) Lake Formation relies on a privileged process secured by Amazon EMR or the third party integrator to tag the user's role while assuming it. + +~> **NOTE:** Although optional, not including `admins`, `createDatabaseDefaultPermissions`, `createTableDefaultPermissions`, and/or `trustedResourceOwners` results in the setting being cleared. + +### create_database_default_permissions + +The following arguments are optional: + +* `permissions` - (Optional) List of permissions that are granted to the principal. Valid values may include `all`, `select`, `alter`, `drop`, `delete`, `insert`, `describe`, and `createTable`. For more details, see [Lake Formation Permissions Reference](https://docs.aws.amazon.com/lake-formation/latest/dg/lf-permissions-reference.html). +* `principal` - (Optional) Principal who is granted permissions. To enforce metadata and underlying data access control only by IAM on new databases and tables set `principal` to `iamAllowedPrincipals` and `permissions` to `["all"]`. + +### create_table_default_permissions + +The following arguments are optional: + +* `permissions` - (Optional) List of permissions that are granted to the principal. Valid values may include `all`, `select`, `alter`, `drop`, `delete`, `insert`, and `describe`. For more details, see [Lake Formation Permissions Reference](https://docs.aws.amazon.com/lake-formation/latest/dg/lf-permissions-reference.html). +* `principal` - (Optional) Principal who is granted permissions. To enforce metadata and underlying data access control only by IAM on new databases and tables set `principal` to `iamAllowedPrincipals` and `permissions` to `["all"]`. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lakeformation_lf_tag.html.markdown b/website/docs/cdktf/typescript/r/lakeformation_lf_tag.html.markdown new file mode 100644 index 00000000000..20354eefceb --- /dev/null +++ b/website/docs/cdktf/typescript/r/lakeformation_lf_tag.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_lf_tag" +description: |- + Creates a tag with the specified name and values. +--- + + + +# Resource: aws_lakeformation_lf_tag + +Creates an LF-Tag with the specified name and values. Each key must have at least one value. The maximum number of values permitted is 15. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LakeformationLfTag } from "./.gen/providers/aws/lakeformation-lf-tag"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LakeformationLfTag(this, "example", { + key: "module", + values: ["Orders", "Sales", "Customers"], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `catalogId` - (Optional) ID of the Data Catalog to create the tag in. If omitted, this defaults to the AWS Account ID. +* `key` - (Required) Key-name for the tag. +* `values` - (Required) List of possible values an attribute can take. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Catalog ID and key-name of the tag + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lake Formation LF-Tags using the `catalogId:key`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Lake Formation LF-Tags using the `catalogId:key`. If you have not set a Catalog ID specify the AWS Account ID that the database is in. For example: + +```console +% terraform import aws_lakeformation_lf_tag.example 123456789012:some_key +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lakeformation_permissions.html.markdown b/website/docs/cdktf/typescript/r/lakeformation_permissions.html.markdown new file mode 100644 index 00000000000..ee8d412c508 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lakeformation_permissions.html.markdown @@ -0,0 +1,334 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_permissions" +description: |- + Grants permissions to the principal to access metadata in the Data Catalog and data organized in underlying data storage such as Amazon S3. +--- + + + +# Resource: aws_lakeformation_permissions + +Grants permissions to the principal to access metadata in the Data Catalog and data organized in underlying data storage such as Amazon S3. Permissions are granted to a principal, in a Data Catalog, relative to a Lake Formation resource, which includes the Data Catalog, databases, tables, LF-tags, and LF-tag policies. For more information, see [Security and Access Control to Metadata and Data in Lake Formation](https://docs.aws.amazon.com/lake-formation/latest/dg/security-data-access.html). + +!> **WARNING:** Lake Formation permissions are not in effect by default within AWS. Using this resource will not secure your data and will result in errors if you do not change the security settings for existing resources and the default security settings for new resources. See [Default Behavior and `iamAllowedPrincipals`](#default-behavior-and-iamallowedprincipals) for additional details. + +~> **NOTE:** In general, the `principal` should _NOT_ be a Lake Formation administrator or the entity (e.g., IAM role) that is running Terraform. Administrators have implicit permissions. These should be managed by granting or not granting administrator rights using `awsLakeformationDataLakeSettings`, _not_ with this resource. + +## Default Behavior and `iamAllowedPrincipals` + +**_Lake Formation permissions are not in effect by default within AWS._** `iamAllowedPrincipals` (i.e., `iamAllowedPrincipals`) conflicts with individual Lake Formation permissions (i.e., non-`iamAllowedPrincipals` permissions), will cause unexpected behavior, and may result in errors. + +When using Lake Formation, choose ONE of the following options as they are mutually exclusive: + +1. Use this resource (`awsLakeformationPermissions`), change the default security settings using [`awsLakeformationDataLakeSettings`](/docs/providers/aws/r/lakeformation_data_lake_settings.html), and remove existing `iamAllowedPrincipals` permissions +2. Use `iamAllowedPrincipals` without `awsLakeformationPermissions` + +This example shows removing the `iamAllowedPrincipals` default security settings and making the caller a Lake Formation admin. Since `createDatabaseDefaultPermissions` and `createTableDefaultPermissions` are not set in the [`awsLakeformationDataLakeSettings`](/docs/providers/aws/r/lakeformation_data_lake_settings.html) resource, they are cleared. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamSessionContext } from "./.gen/providers/aws/data-aws-iam-session-context"; +import { LakeformationDataLakeSettings } from "./.gen/providers/aws/lakeformation-data-lake-settings"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsIamSessionContextCurrent = new DataAwsIamSessionContext( + this, + "current_1", + { + arn: Token.asString(current.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamSessionContextCurrent.overrideLogicalId("current"); + new LakeformationDataLakeSettings(this, "test", { + admins: [Token.asString(dataAwsIamSessionContextCurrent.issuerArn)], + }); + } +} + +``` + +To remove existing `iamAllowedPrincipals` permissions, use the [AWS Lake Formation Console](https://console.aws.amazon.com/lakeformation/) or [AWS CLI](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/lakeformation/batch-revoke-permissions.html). + +`iamAllowedPrincipals` is a hook to maintain backwards compatibility with AWS Glue. `iamAllowedPrincipals` is a pseudo-entity group that acts like a Lake Formation principal. The group includes any IAM users and roles that are allowed access to your Data Catalog resources by your IAM policies. + +This is Lake Formation's default behavior: + +* Lake Formation grants `super` permission to `iamAllowedPrincipals` on all existing AWS Glue Data Catalog resources. +* Lake Formation enables "Use only IAM access control" for new Data Catalog resources. + +For more details, see [Changing the Default Security Settings for Your Data Lake](https://docs.aws.amazon.com/lake-formation/latest/dg/change-settings.html). + +### Problem Using `iamAllowedPrincipals` + +AWS does not support combining `iamAllowedPrincipals` permissions and non-`iamAllowedPrincipals` permissions. Doing so results in unexpected permissions and behaviors. For example, this configuration grants a user `select` on a column in a table. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { GlueCatalogDatabase } from "./.gen/providers/aws/glue-catalog-database"; +import { GlueCatalogTable } from "./.gen/providers/aws/glue-catalog-table"; +import { LakeformationPermissions } from "./.gen/providers/aws/lakeformation-permissions"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new GlueCatalogDatabase(this, "example", { + name: "sadabate", + }); + const awsGlueCatalogTableExample = new GlueCatalogTable(this, "example_1", { + databaseName: test.name, + name: "abelt", + storageDescriptor: { + columns: [ + { + name: "event", + type: "string", + }, + ], + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsGlueCatalogTableExample.overrideLogicalId("example"); + const awsLakeformationPermissionsExample = new LakeformationPermissions( + this, + "example_2", + { + permissions: ["SELECT"], + principal: "arn:aws:iam:us-east-1:123456789012:user/SanHolo", + tableWithColumns: { + columnNames: ["event"], + databaseName: Token.asString(awsGlueCatalogTableExample.databaseName), + name: Token.asString(awsGlueCatalogTableExample.name), + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLakeformationPermissionsExample.overrideLogicalId("example"); + } +} + +``` + +The resulting permissions depend on whether the table had `iamAllowedPrincipals` (IAP) permissions or not. + +| Result With IAP | Result Without IAP | +| ---- | ---- | +| `select` column wildcard (i.e., all columns) | `select` on `"event"` (as expected) | + +## Using Lake Formation Permissions + +Lake Formation grants implicit permissions to data lake administrators, database creators, and table creators. These implicit permissions cannot be revoked _per se_. If this resource reads implicit permissions, it will attempt to revoke them, which causes an error when the resource is destroyed. + +There are two ways to avoid these errors. First, and the way we recommend, is to avoid using this resource with principals that have implicit permissions. A second, error-prone option, is to grant explicit permissions (and `permissionsWithGrantOption`) to "overwrite" a principal's implicit permissions, which you can then revoke with this resource. For more information, see [Implicit Lake Formation Permissions](https://docs.aws.amazon.com/lake-formation/latest/dg/implicit-permissions.html). + +If the `principal` is also a data lake administrator, AWS grants implicit permissions that can cause errors using this resource. For example, AWS implicitly grants a `principal`/administrator `permissions` and `permissionsWithGrantOption` of `all`, `alter`, `delete`, `describe`, `drop`, `insert`, and `select` on a table. If you use this resource to explicitly grant the `principal`/administrator `permissions` but _not_ `permissionsWithGrantOption` of `all`, `alter`, `delete`, `describe`, `drop`, `insert`, and `select` on the table, this resource will read the implicit `permissionsWithGrantOption` and attempt to revoke them when the resource is destroyed. Doing so will cause an `InvalidInputException: No permissions revoked` error because you cannot revoke implicit permissions _per se_. To workaround this problem, explicitly grant the `principal`/administrator `permissions` _and_ `permissionsWithGrantOption`, which can then be revoked. Similarly, granting a `principal`/administrator permissions on a table with columns and providing `columnNames`, will result in a `InvalidInputException: Permissions modification is invalid` error because you are narrowing the implicit permissions. Instead, set `wildcard` to `true` and remove the `columnNames`. + +## Example Usage + +### Grant Permissions For A Lake Formation S3 Resource + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LakeformationPermissions } from "./.gen/providers/aws/lakeformation-permissions"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LakeformationPermissions(this, "example", { + dataLocation: { + arn: Token.asString(awsLakeformationResourceExample.arn), + }, + permissions: ["ALL"], + principal: workflowRole.arn, + }); + } +} + +``` + +### Grant Permissions For A Glue Catalog Database + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LakeformationPermissions } from "./.gen/providers/aws/lakeformation-permissions"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LakeformationPermissions(this, "example", { + database: { + catalogId: "110376042874", + name: Token.asString(awsGlueCatalogDatabaseExample.name), + }, + permissions: ["CREATE_TABLE", "ALTER", "DROP"], + principal: workflowRole.arn, + }); + } +} + +``` + +### Grant Permissions Using Tag-Based Access Control + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LakeformationPermissions } from "./.gen/providers/aws/lakeformation-permissions"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LakeformationPermissions(this, "test", { + lfTagPolicy: { + expression: [ + { + key: "Team", + values: ["Sales"], + }, + { + key: "Environment", + values: ["Dev", "Production"], + }, + ], + resourceType: "DATABASE", + }, + permissions: ["CREATE_TABLE", "ALTER", "DROP"], + principal: salesRole.arn, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `permissions` – (Required) List of permissions granted to the principal. Valid values may include `all`, `alter`, `associate`, `createDatabase`, `createTable`, `dataLocationAccess`, `delete`, `describe`, `drop`, `insert`, and `select`. For details on each permission, see [Lake Formation Permissions Reference](https://docs.aws.amazon.com/lake-formation/latest/dg/lf-permissions-reference.html). +* `principal` – (Required) Principal to be granted the permissions on the resource. Supported principals include `iamAllowedPrincipals` (see [Default Behavior and `iamAllowedPrincipals`](#default-behavior-and-iamallowedprincipals) above), IAM roles, users, groups, SAML groups and users, QuickSight groups, OUs, and organizations as well as AWS account IDs for cross-account permissions. For more information, see [Lake Formation Permissions Reference](https://docs.aws.amazon.com/lake-formation/latest/dg/lf-permissions-reference.html). + +~> **NOTE:** We highly recommend that the `principal` _NOT_ be a Lake Formation administrator (granted using `awsLakeformationDataLakeSettings`). The entity (e.g., IAM role) running Terraform will most likely need to be a Lake Formation administrator. As such, the entity will have implicit permissions and does not need permissions granted through this resource. + +One of the following is required: + +* `catalogResource` - (Optional) Whether the permissions are to be granted for the Data Catalog. Defaults to `false`. +* `dataLocation` - (Optional) Configuration block for a data location resource. Detailed below. +* `database` - (Optional) Configuration block for a database resource. Detailed below. +* `lfTag` - (Optional) Configuration block for an LF-tag resource. Detailed below. +* `lfTagPolicy` - (Optional) Configuration block for an LF-tag policy resource. Detailed below. +* `table` - (Optional) Configuration block for a table resource. Detailed below. +* `tableWithColumns` - (Optional) Configuration block for a table with columns resource. Detailed below. + +The following arguments are optional: + +* `catalogId` – (Optional) Identifier for the Data Catalog. By default, the account ID. The Data Catalog is the persistent metadata store. It contains database definitions, table definitions, and other control information to manage your Lake Formation environment. +* `permissionsWithGrantOption` - (Optional) Subset of `permissions` which the principal can pass. + +### data_location + +The following argument is required: + +* `arn` – (Required) Amazon Resource Name (ARN) that uniquely identifies the data location resource. + +The following argument is optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog where the location is registered with Lake Formation. By default, it is the account ID of the caller. + +### database + +The following argument is required: + +* `name` – (Required) Name of the database resource. Unique to the Data Catalog. + +The following argument is optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### lf_tag + +The following arguments are required: + +* `key` – (Required) The key-name for the tag. +* `values` - (Required) A list of possible values an attribute can take. + +The following argument is optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### lf_tag_policy + +The following arguments are required: + +* `resourceType` – (Required) The resource type for which the tag policy applies. Valid values are `database` and `table`. +* `expression` - (Required) A list of tag conditions that apply to the resource's tag policy. Configuration block for tag conditions that apply to the policy. See [`expression`](#expression) below. + +The following argument is optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +#### expression + +* `key` – (Required) The key-name of an LF-Tag. +* `values` - (Required) A list of possible values of an LF-Tag. + +### table + +The following argument is required: + +* `databaseName` – (Required) Name of the database for the table. Unique to a Data Catalog. +* `name` - (Required, at least one of `name` or `wildcard`) Name of the table. +* `wildcard` - (Required, at least one of `name` or `wildcard`) Whether to use a wildcard representing every table under a database. Defaults to `false`. + +The following arguments are optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### table_with_columns + +The following arguments are required: + +* `columnNames` - (Required, at least one of `columnNames` or `wildcard`) Set of column names for the table. +* `databaseName` – (Required) Name of the database for the table with columns resource. Unique to the Data Catalog. +* `name` – (Required) Name of the table resource. +* `wildcard` - (Required, at least one of `columnNames` or `wildcard`) Whether to use a column wildcard. If `excludedColumnNames` is included, `wildcard` must be set to `true` to avoid Terraform reporting a difference. + +The following arguments are optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. +* `excludedColumnNames` - (Optional) Set of column names for the table to exclude. If `excludedColumnNames` is included, `wildcard` must be set to `true` to avoid Terraform reporting a difference. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lakeformation_resource.html.markdown b/website/docs/cdktf/typescript/r/lakeformation_resource.html.markdown new file mode 100644 index 00000000000..eaf7c015603 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lakeformation_resource.html.markdown @@ -0,0 +1,62 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_resource" +description: |- + Registers a Lake Formation resource as managed by the Data Catalog. +--- + + + +# Resource: aws_lakeformation_resource + +Registers a Lake Formation resource (e.g., S3 bucket) as managed by the Data Catalog. In other words, the S3 path is added to the data lake. + +Choose a role that has read/write access to the chosen Amazon S3 path or use the service-linked role. When you register the S3 path, the service-linked role and a new inline policy are created on your behalf. Lake Formation adds the first path to the inline policy and attaches it to the service-linked role. When you register subsequent paths, Lake Formation adds the path to the existing policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsS3Bucket } from "./.gen/providers/aws/data-aws-s3-bucket"; +import { LakeformationResource } from "./.gen/providers/aws/lakeformation-resource"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsS3Bucket(this, "example", { + bucket: "an-example-bucket", + }); + const awsLakeformationResourceExample = new LakeformationResource( + this, + "example_1", + { + arn: Token.asString(example.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLakeformationResourceExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +* `arn` – (Required) Amazon Resource Name (ARN) of the resource, an S3 path. +* `roleArn` – (Optional) Role that has read/write access to the resource. If not provided, the Lake Formation service-linked role must exist and is used. + +~> **NOTE:** AWS does not support registering an S3 location with an IAM role and subsequently updating the S3 location registration to a service-linked role. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `lastModified` - (Optional) The date and time the resource was last modified in [RFC 3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lakeformation_resource_lf_tags.html.markdown b/website/docs/cdktf/typescript/r/lakeformation_resource_lf_tags.html.markdown new file mode 100644 index 00000000000..3395ce692b9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lakeformation_resource_lf_tags.html.markdown @@ -0,0 +1,194 @@ +--- +subcategory: "Lake Formation" +layout: "aws" +page_title: "AWS: aws_lakeformation_resource_lf_tags" +description: |- + Manages an attachment between one or more LF-tags and an existing Lake Formation resource. +--- + + + +# Resource: aws_lakeformation_resource_lf_tags + +Manages an attachment between one or more existing LF-tags and an existing Lake Formation resource. + +## Example Usage + +### Database Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LakeformationLfTag } from "./.gen/providers/aws/lakeformation-lf-tag"; +import { LakeformationResourceLfTags } from "./.gen/providers/aws/lakeformation-resource-lf-tags"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new LakeformationLfTag(this, "example", { + key: "right", + values: [ + "abbey", + "village", + "luffield", + "woodcote", + "copse", + "chapel", + "stowe", + "club", + ], + }); + const awsLakeformationResourceLfTagsExample = + new LakeformationResourceLfTags(this, "example_1", { + database: { + name: Token.asString(awsGlueCatalogDatabaseExample.name), + }, + lfTag: [ + { + key: example.key, + value: "stowe", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLakeformationResourceLfTagsExample.overrideLogicalId("example"); + } +} + +``` + +### Multiple Tags Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LakeformationLfTag } from "./.gen/providers/aws/lakeformation-lf-tag"; +import { LakeformationResourceLfTags } from "./.gen/providers/aws/lakeformation-resource-lf-tags"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LakeformationLfTag(this, "example", { + key: "right", + values: [ + "abbey", + "village", + "luffield", + "woodcote", + "copse", + "chapel", + "stowe", + "club", + ], + }); + new LakeformationLfTag(this, "example2", { + key: "left", + values: [ + "farm", + "theloop", + "aintree", + "brooklands", + "maggotts", + "becketts", + "vale", + ], + }); + const awsLakeformationResourceLfTagsExample = + new LakeformationResourceLfTags(this, "example_2", { + database: { + name: Token.asString(awsGlueCatalogDatabaseExample.name), + }, + lfTag: [ + { + key: "right", + value: "luffield", + }, + { + key: "left", + value: "aintree", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLakeformationResourceLfTagsExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `lfTag` – (Required) Set of LF-tags to attach to the resource. See below. + +Exactly one of the following is required: + +* `database` - (Optional) Configuration block for a database resource. See below. +* `table` - (Optional) Configuration block for a table resource. See below. +* `tableWithColumns` - (Optional) Configuration block for a table with columns resource. See below. + +The following arguments are optional: + +* `catalogId` – (Optional) Identifier for the Data Catalog. By default, the account ID. The Data Catalog is the persistent metadata store. It contains database definitions, table definitions, and other control information to manage your Lake Formation environment. + +### lf_tag + +The following arguments are required: + +* `key` – (Required) Key name for an existing LF-tag. +* `value` - (Required) Value from the possible values for the LF-tag. + +The following argument is optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### database + +The following argument is required: + +* `name` – (Required) Name of the database resource. Unique to the Data Catalog. + +The following argument is optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### table + +The following argument is required: + +* `databaseName` – (Required) Name of the database for the table. Unique to a Data Catalog. +* `name` - (Required, at least one of `name` or `wildcard`) Name of the table. +* `wildcard` - (Required, at least one of `name` or `wildcard`) Whether to use a wildcard representing every table under a database. Defaults to `false`. + +The following arguments are optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. + +### table_with_columns + +The following arguments are required: + +* `columnNames` - (Required, at least one of `columnNames` or `wildcard`) Set of column names for the table. +* `databaseName` – (Required) Name of the database for the table with columns resource. Unique to the Data Catalog. +* `name` – (Required) Name of the table resource. +* `wildcard` - (Required, at least one of `columnNames` or `wildcard`) Whether to use a column wildcard. If `excludedColumnNames` is included, `wildcard` must be set to `true` to avoid Terraform reporting a difference. + +The following arguments are optional: + +* `catalogId` - (Optional) Identifier for the Data Catalog. By default, it is the account ID of the caller. +* `excludedColumnNames` - (Optional) Set of column names for the table to exclude. If `excludedColumnNames` is included, `wildcard` must be set to `true` to avoid Terraform reporting a difference. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lambda_alias.html.markdown b/website/docs/cdktf/typescript/r/lambda_alias.html.markdown index 87698ff3655..096995ad243 100644 --- a/website/docs/cdktf/typescript/r/lambda_alias.html.markdown +++ b/website/docs/cdktf/typescript/r/lambda_alias.html.markdown @@ -53,13 +53,13 @@ class MyConvertedCode extends TerraformStack { * `functionVersion` - (Required) Lambda function version for which you are creating the alias. Pattern: `(\$latest|[09]+)`. * `routingConfig` - (Optional) The Lambda alias' route configuration settings. Fields documented below -For **routing_config** the following attributes are supported: +`routingConfig` supports the following arguments: * `additionalVersionWeights` - (Optional) A map that defines the proportion of events that should be sent to different versions of a lambda function. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) identifying your Lambda function alias. * `invokeArn` - The ARN to be used for invoking Lambda Function from API Gateway - to be used in [`awsApiGatewayIntegration`](/docs/providers/aws/r/api_gateway_integration.html)'s `uri` @@ -70,10 +70,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda Function Aliases can be imported using the `functionName/alias`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda Function Aliases using the `functionName/alias`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_lambda_alias.test_lambda_alias my_test_lambda_function/my_alias + +Using `terraform import`, import Lambda Function Aliases using the `functionName/alias`. For example: + +```console +% terraform import aws_lambda_alias.test_lambda_alias my_test_lambda_function/my_alias ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lambda_code_signing_config.html.markdown b/website/docs/cdktf/typescript/r/lambda_code_signing_config.html.markdown index cf4a30afa61..431bfc42ae5 100644 --- a/website/docs/cdktf/typescript/r/lambda_code_signing_config.html.markdown +++ b/website/docs/cdktf/typescript/r/lambda_code_signing_config.html.markdown @@ -56,9 +56,9 @@ The `policies` block supports the following argument: * `untrustedArtifactOnDeployment` - (Required) Code signing configuration policy for deployment validation failure. If you set the policy to Enforce, Lambda blocks the deployment request if code-signing validation checks fail. If you set the policy to Warn, Lambda allows the deployment and creates a CloudWatch log. Valid values: `warn`, `enforce`. Default value: `warn`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of the code signing configuration. * `configId` - Unique identifier for the code signing configuration. @@ -68,10 +68,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Code Signing Configs can be imported using their ARN, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Code Signing Configs using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_lambda_code_signing_config.imported_csc arn:aws:lambda:us-west-2:123456789012:code-signing-config:csc-0f6c334abcdea4d8b + +Using `terraform import`, import Code Signing Configs using their ARN. For example: + +```console +% terraform import aws_lambda_code_signing_config.imported_csc arn:aws:lambda:us-west-2:123456789012:code-signing-config:csc-0f6c334abcdea4d8b ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lambda_event_source_mapping.html.markdown b/website/docs/cdktf/typescript/r/lambda_event_source_mapping.html.markdown index e196414bb67..8f85dd0fe66 100644 --- a/website/docs/cdktf/typescript/r/lambda_event_source_mapping.html.markdown +++ b/website/docs/cdktf/typescript/r/lambda_event_source_mapping.html.markdown @@ -282,7 +282,7 @@ class MyConvertedCode extends TerraformStack { * `maximumRecordAgeInSeconds`: - (Optional) The maximum age of a record that Lambda sends to a function for processing. Only available for stream sources (DynamoDB and Kinesis). Must be either -1 (forever, and the default value) or between 60 and 604800 (inclusive). * `maximumRetryAttempts`: - (Optional) The maximum number of times to retry when the function returns an error. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of -1 (forever), maximum of 10000. * `parallelizationFactor`: - (Optional) The number of batches to process from each shard concurrently. Only available for stream sources (DynamoDB and Kinesis). Minimum and default of 1, maximum of 10. -* `queues` - (Optional) The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. A single queue name must be specified. +* `queues` - (Optional) The name of the Amazon MQ broker destination queue to consume. Only available for MQ sources. The list must contain exactly one queue name. * `scalingConfig` - (Optional) Scaling configuration of the event source. Only available for SQS queues. Detailed below. * `selfManagedEventSource`: - (Optional) For Self Managed Kafka sources, the location of the self managed cluster. If set, configuration must also include `sourceAccessConfiguration`. Detailed below. * `selfManagedKafkaEventSourceConfig` - (Optional) Additional configuration block for Self Managed Kafka sources. Incompatible with "event_source_arn" and "amazon_managed_kafka_event_source_config". Detailed below. @@ -335,9 +335,9 @@ class MyConvertedCode extends TerraformStack { * `type` - (Required) The type of this configuration. For Self Managed Kafka you will need to supply blocks for type `vpcSubnet` and `vpcSecurityGroup`. * `uri` - (Required) The URI for this configuration. For type `vpcSubnet` the value should be `subnet:subnetId` where `subnetId` is the value you would find in an aws_subnet resource's id attribute. For type `vpcSecurityGroup` the value should be `securityGroup:securityGroupId` where `securityGroupId` is the value you would find in an aws_security_group resource's id attribute. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `functionArn` - The the ARN of the Lambda function the event source mapping is sending events to. (Note: this is a computed value that differs from `functionName` above.) * `lastModified` - The date this resource was last modified. @@ -351,10 +351,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda event source mappings can be imported using the `uuid` (event source mapping identifier), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda event source mappings using the `uuid` (event source mapping identifier). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_lambda_event_source_mapping.event_source_mapping 12345kxodurf3443 + +Using `terraform import`, import Lambda event source mappings using the `uuid` (event source mapping identifier). For example: + +```console +% terraform import aws_lambda_event_source_mapping.event_source_mapping 12345kxodurf3443 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lambda_function.html.markdown b/website/docs/cdktf/typescript/r/lambda_function.html.markdown index 8b792ff6999..d7b524dff86 100644 --- a/website/docs/cdktf/typescript/r/lambda_function.html.markdown +++ b/website/docs/cdktf/typescript/r/lambda_function.html.markdown @@ -76,7 +76,7 @@ class MyConvertedCode extends TerraformStack { functionName: "lambda_function_name", handler: "index.test", role: iamForLambda.arn, - runtime: "nodejs16.x", + runtime: "nodejs18.x", sourceCodeHash: Token.asString(lambda.outputBase64Sha256), }); } @@ -165,7 +165,7 @@ class MyConvertedCode extends TerraformStack { functionName: "lambda_function_name", handler: "index.test", role: iamForLambda.arn, - runtime: "nodejs14.x", + runtime: "nodejs18.x", }); } } @@ -413,9 +413,9 @@ For network connectivity to AWS resources in a VPC, specify a list of security g * `securityGroupIds` - (Required) List of security group IDs associated with the Lambda function. * `subnetIds` - (Required) List of subnet IDs associated with the Lambda function. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) identifying your Lambda Function. * `invokeArn` - ARN to be used for invoking Lambda Function from API Gateway - to be used in [`awsApiGatewayIntegration`](/docs/providers/aws/r/api_gateway_integration.html)'s `uri`. @@ -452,10 +452,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda Functions can be imported using the `functionName`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda Functions using the `functionName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_lambda_function.test_lambda my_test_lambda_function + +Using `terraform import`, import Lambda Functions using the `functionName`. For example: + +```console +% terraform import aws_lambda_function.test_lambda my_test_lambda_function ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lambda_function_event_invoke_config.html.markdown b/website/docs/cdktf/typescript/r/lambda_function_event_invoke_config.html.markdown index 69960c6c67b..fa7e04868b8 100644 --- a/website/docs/cdktf/typescript/r/lambda_function_event_invoke_config.html.markdown +++ b/website/docs/cdktf/typescript/r/lambda_function_event_invoke_config.html.markdown @@ -173,38 +173,96 @@ The following arguments are required: * `destination` - (Required) Amazon Resource Name (ARN) of the destination resource. See the [Lambda Developer Guide](https://docs.aws.amazon.com/lambda/latest/dg/invocation-async.html#invocation-async-destinations) for acceptable resource types and associated IAM permissions. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - Fully qualified Lambda Function name or Amazon Resource Name (ARN) ## Import -Lambda Function Event Invoke Configs can be imported using the fully qualified Function name or Amazon Resource Name (ARN), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda Function Event Invoke Configs using the fully qualified Function name or Amazon Resource Name (ARN). For example: ARN without qualifier (all versions and aliases): -``` -$ terraform import aws_lambda_function_event_invoke_config.example arn:aws:us-east-1:123456789012:function:my_function +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + ``` ARN with qualifier: -``` -$ terraform import aws_lambda_function_event_invoke_config.example arn:aws:us-east-1:123456789012:function:my_function:production +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + ``` Name without qualifier (all versions and aliases): -``` -$ terraform import aws_lambda_function_event_invoke_config.example my_function +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + ``` Name with qualifier: +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + ``` -$ terraform import aws_lambda_function_event_invoke_config.example my_function:production + +**Using `terraform import` to import** Lambda Function Event Invoke Configs using the fully qualified Function name or Amazon Resource Name (ARN). For example: + +ARN without qualifier (all versions and aliases): + +```console +% terraform import aws_lambda_function_event_invoke_config.example arn:aws:us-east-1:123456789012:function:my_function +``` + +ARN with qualifier: + +```console +% terraform import aws_lambda_function_event_invoke_config.example arn:aws:us-east-1:123456789012:function:my_function:production +``` + +Name without qualifier (all versions and aliases): + +```console +% terraform import aws_lambda_function_event_invoke_config.example my_function +``` + +Name with qualifier: + +```console +% terraform import aws_lambda_function_event_invoke_config.example my_function:production ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lambda_function_url.html.markdown b/website/docs/cdktf/typescript/r/lambda_function_url.html.markdown index 93aed718555..17ece5d0143 100644 --- a/website/docs/cdktf/typescript/r/lambda_function_url.html.markdown +++ b/website/docs/cdktf/typescript/r/lambda_function_url.html.markdown @@ -69,9 +69,9 @@ This configuration block supports the following attributes: * `exposeHeaders` - (Optional) The HTTP headers in your function response that you want to expose to origins that call the function URL. * `maxAge` - (Optional) The maximum amount of time, in seconds, that web browsers can cache results of a preflight request. By default, this is set to `0`, which means that the browser doesn't cache results. The maximum value is `86400`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `functionArn` - The Amazon Resource Name (ARN) of the function. * `functionUrl` - The HTTP URL endpoint for the function in the format `https://LambdaUrl.OnAws`. @@ -79,10 +79,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda function URLs can be imported using the `functionName` or `functionName/qualifier`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda function URLs using the `functionName` or `functionName/qualifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_lambda_function_url.test_lambda_url my_test_lambda_function + +Using `terraform import`, import Lambda function URLs using the `functionName` or `functionName/qualifier`. For example: + +```console +% terraform import aws_lambda_function_url.test_lambda_url my_test_lambda_function ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lambda_invocation.html.markdown b/website/docs/cdktf/typescript/r/lambda_invocation.html.markdown index 7533d94e305..03490938b46 100644 --- a/website/docs/cdktf/typescript/r/lambda_invocation.html.markdown +++ b/website/docs/cdktf/typescript/r/lambda_invocation.html.markdown @@ -187,10 +187,10 @@ The following arguments are optional: * `terraformKey` - (Optional) The JSON key used to store lifecycle information in the input JSON payload. Defaults to `tf`. This additional key is only included when `lifecycleScope` is set to `crud`. * `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger a re-invocation. To force a re-invocation without changing these keys/values, use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html). -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `result` - String result of the lambda function invocation. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lambda_layer_version.html.markdown b/website/docs/cdktf/typescript/r/lambda_layer_version.html.markdown index 11a639fc99e..852ac05bdcb 100644 --- a/website/docs/cdktf/typescript/r/lambda_layer_version.html.markdown +++ b/website/docs/cdktf/typescript/r/lambda_layer_version.html.markdown @@ -70,9 +70,9 @@ The following arguments are optional: * `skipDestroy` - (Optional) Whether to retain the old version of a previously deployed Lambda Layer. Default is `false`. When this is not set to `true`, changing any of `compatibleArchitectures`, `compatibleRuntimes`, `description`, `filename`, `layerName`, `licenseInfo`, `s3Bucket`, `s3Key`, `s3ObjectVersion`, or `sourceCodeHash` forces deletion of the existing layer version and creation of a new layer version. * `sourceCodeHash` - (Optional) Used to trigger updates. Must be set to a base64-encoded SHA256 hash of the package file specified with either `filename` or `s3Key`. The usual way to set this is `${filebase64Sha256("fileZip")}` (Terraform 0.11.12 or later) or `${base64Sha256(file("fileZip"))}` (Terraform 0.11.11 and earlier), where "file.zip" is the local filename of the lambda layer source archive. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the Lambda Layer with version. * `createdDate` - Date this resource was created. @@ -89,12 +89,26 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda Layers can be imported using `arn`. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda Layers using `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import \ + +Using `terraform import`, import Lambda Layers using `arn`. For example: + +```console +% terraform import \ aws_lambda_layer_version.test_layer \ arn:aws:lambda:_REGION_:_ACCOUNT_ID_:layer:_LAYER_NAME_:_LAYER_VERSION_ ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lambda_layer_version_permission.html.markdown b/website/docs/cdktf/typescript/r/lambda_layer_version_permission.html.markdown index 0579fbeb0be..b525514a594 100644 --- a/website/docs/cdktf/typescript/r/lambda_layer_version_permission.html.markdown +++ b/website/docs/cdktf/typescript/r/lambda_layer_version_permission.html.markdown @@ -44,7 +44,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `action` - (Required) Action, which will be allowed. `lambda:getLayerVersion` value is suggested by AWS documantation. * `layerName` (Required) The name or ARN of the Lambda Layer, which you want to grant access to. @@ -54,9 +54,9 @@ The following arguments are supported: * `versionNumber` (Required) Version of Lambda Layer, which you want to grant access to. Note: permissions only apply to a single version of a layer. * `skipDestroy` - (Optional) Whether to retain the old version of a previously deployed Lambda Layer. Default is `false`. When this is not set to `true`, changing any of `compatibleArchitectures`, `compatibleRuntimes`, `description`, `filename`, `layerName`, `licenseInfo`, `s3Bucket`, `s3Key`, `s3ObjectVersion`, or `sourceCodeHash` forces deletion of the existing layer version and creation of a new layer version. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The `layerName` and `versionNumber`, separated by a comma (`,`). * `revisionId` - A unique identifier for the current revision of the policy. @@ -64,12 +64,26 @@ In addition to all arguments above, the following attributes are exported: ## Import -Lambda Layer Permissions can be imported using `layerName` and `versionNumber`, separated by a comma (`,`). +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda Layer Permissions using `layerName` and `versionNumber`, separated by a comma (`,`). For example: -```sh -$ terraform import aws_lambda_layer_version_permission.example arn:aws:lambda:us-west-2:123456654321:layer:test_layer1,1 +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Lambda Layer Permissions using `layerName` and `versionNumber`, separated by a comma (`,`). For example: + +```console +% terraform import aws_lambda_layer_version_permission.example arn:aws:lambda:us-west-2:123456654321:layer:test_layer1,1 ``` [1]: https://docs.aws.amazon.com/lambda/latest/dg/access-control-resource-based.html#permissions-resource-xaccountlayer - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lambda_permission.html.markdown b/website/docs/cdktf/typescript/r/lambda_permission.html.markdown index bdb1119daea..9cc31e46837 100644 --- a/website/docs/cdktf/typescript/r/lambda_permission.html.markdown +++ b/website/docs/cdktf/typescript/r/lambda_permission.html.markdown @@ -328,18 +328,46 @@ class MyConvertedCode extends TerraformStack { [2]: https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-control-access-using-iam-policies-to-invoke-api.html [3]: https://docs.aws.amazon.com/lambda/latest/dg/urls-auth.html -## Attributes Reference +## Attribute Reference -No additional attributes are exported. +This resource exports no additional attributes. ## Import -Lambda permission statements can be imported using function_name/statement_id, with an optional qualifier, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lambda permission statements using function_name/statement_id with an optional qualifier. For example: +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Lambda permission statements using function_name/statement_id with an optional qualifier. For example: + +```console +% terraform import aws_lambda_permission.test_lambda_permission my_test_lambda_function/AllowExecutionFromCloudWatch ``` -$ terraform import aws_lambda_permission.test_lambda_permission my_test_lambda_function/AllowExecutionFromCloudWatch -$ terraform import aws_lambda_permission.test_lambda_permission my_test_lambda_function:qualifier_name/AllowExecutionFromCloudWatch +```console +% terraform import aws_lambda_permission.test_lambda_permission my_test_lambda_function:qualifier_name/AllowExecutionFromCloudWatch ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lambda_provisioned_concurrency_config.html.markdown b/website/docs/cdktf/typescript/r/lambda_provisioned_concurrency_config.html.markdown index a920e635f57..09a54a4eaab 100644 --- a/website/docs/cdktf/typescript/r/lambda_provisioned_concurrency_config.html.markdown +++ b/website/docs/cdktf/typescript/r/lambda_provisioned_concurrency_config.html.markdown @@ -76,9 +76,9 @@ The following arguments are optional: * `skipDestroy` - (Optional) Whether to retain the provisoned concurrency configuration upon destruction. Defaults to `false`. If set to `true`, the resource in simply removed from state instead. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - Lambda Function name and qualifier separated by a comma (`,`). @@ -91,10 +91,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -A Lambda Provisioned Concurrency Configuration can be imported using the `functionName` and `qualifier` separated by a comma (`,`), e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a Lambda Provisioned Concurrency Configuration using the `functionName` and `qualifier` separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_lambda_provisioned_concurrency_config.example my_function,production + +Using `terraform import`, import a Lambda Provisioned Concurrency Configuration using the `functionName` and `qualifier` separated by a comma (`,`). For example: + +```console +% terraform import aws_lambda_provisioned_concurrency_config.example my_function,production ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/launch_configuration.html.markdown b/website/docs/cdktf/typescript/r/launch_configuration.html.markdown new file mode 100644 index 00000000000..1f2fad63969 --- /dev/null +++ b/website/docs/cdktf/typescript/r/launch_configuration.html.markdown @@ -0,0 +1,297 @@ +--- +subcategory: "Auto Scaling" +layout: "aws" +page_title: "AWS: aws_launch_configuration" +description: |- + Provides a resource to create a new launch configuration, used for autoscaling groups. +--- + + + +# Resource: aws_launch_configuration + +Provides a resource to create a new launch configuration, used for autoscaling groups. + +!> **WARNING:** The use of launch configurations is discouraged in favour of launch templates. Read more in the [AWS EC2 Documentation](https://docs.aws.amazon.com/autoscaling/ec2/userguide/launch-configurations.html). + +-> **Note** When using `awsLaunchConfiguration` with `awsAutoscalingGroup`, it is recommended to use the `namePrefix` (Optional) instead of the `name` (Optional) attribute. This will allow Terraform lifecycles to detect changes to the launch configuration and update the autoscaling group correctly. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAmi } from "./.gen/providers/aws/data-aws-ami"; +import { LaunchConfiguration } from "./.gen/providers/aws/launch-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ubuntu = new DataAwsAmi(this, "ubuntu", { + filter: [ + { + name: "name", + values: ["ubuntu/images/hvm-ssd/ubuntu-trusty-14.04-amd64-server-*"], + }, + { + name: "virtualization-type", + values: ["hvm"], + }, + ], + mostRecent: true, + owners: ["099720109477"], + }); + new LaunchConfiguration(this, "as_conf", { + imageId: Token.asString(ubuntu.id), + instanceType: "t2.micro", + name: "web_config", + }); + } +} + +``` + +## Using with AutoScaling Groups + +Launch Configurations cannot be updated after creation with the Amazon +Web Service API. In order to update a Launch Configuration, Terraform will +destroy the existing resource and create a replacement. In order to effectively +use a Launch Configuration resource with an [AutoScaling Group resource][1], +it's recommended to specify `createBeforeDestroy` in a [lifecycle][2] block. +Either omit the Launch Configuration `name` attribute, or specify a partial name +with `namePrefix`. Example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { DataAwsAmi } from "./.gen/providers/aws/data-aws-ami"; +import { LaunchConfiguration } from "./.gen/providers/aws/launch-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ubuntu = new DataAwsAmi(this, "ubuntu", { + filter: [ + { + name: "name", + values: ["ubuntu/images/hvm-ssd/ubuntu-trusty-14.04-amd64-server-*"], + }, + { + name: "virtualization-type", + values: ["hvm"], + }, + ], + mostRecent: true, + owners: ["099720109477"], + }); + const asConf = new LaunchConfiguration(this, "as_conf", { + imageId: Token.asString(ubuntu.id), + instanceType: "t2.micro", + lifecycle: { + createBeforeDestroy: true, + }, + namePrefix: "terraform-lc-example-", + }); + new AutoscalingGroup(this, "bar", { + launchConfiguration: asConf.name, + lifecycle: { + createBeforeDestroy: true, + }, + maxSize: 2, + minSize: 1, + name: "terraform-asg-example", + }); + } +} + +``` + +With this setup Terraform generates a unique name for your Launch +Configuration and can then update the AutoScaling Group without conflict before +destroying the previous Launch Configuration. + +## Using with Spot Instances + +Launch configurations can set the spot instance pricing to be used for the +Auto Scaling Group to reserve instances. Simply specifying the `spotPrice` +parameter will set the price on the Launch Configuration which will attempt to +reserve your instances at this price. See the [AWS Spot Instance +documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-spot-instances.html) +for more information or how to launch [Spot Instances][3] with Terraform. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AutoscalingGroup } from "./.gen/providers/aws/autoscaling-group"; +import { DataAwsAmi } from "./.gen/providers/aws/data-aws-ami"; +import { LaunchConfiguration } from "./.gen/providers/aws/launch-configuration"; +interface MyConfig { + maxSize: any; + minSize: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const ubuntu = new DataAwsAmi(this, "ubuntu", { + filter: [ + { + name: "name", + values: ["ubuntu/images/hvm-ssd/ubuntu-trusty-14.04-amd64-server-*"], + }, + { + name: "virtualization-type", + values: ["hvm"], + }, + ], + mostRecent: true, + owners: ["099720109477"], + }); + const asConf = new LaunchConfiguration(this, "as_conf", { + imageId: Token.asString(ubuntu.id), + instanceType: "m4.large", + lifecycle: { + createBeforeDestroy: true, + }, + spotPrice: "0.001", + }); + new AutoscalingGroup(this, "bar", { + launchConfiguration: asConf.name, + name: "terraform-asg-example", + maxSize: config.maxSize, + minSize: config.minSize, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `imageId` - (Required) The EC2 image ID to launch. +* `instanceType` - (Required) The size of instance to launch. + +The following arguments are optional: + +* `associatePublicIpAddress` - (Optional) Associate a public ip address with an instance in a VPC. +* `ebsBlockDevice` - (Optional) Additional EBS block devices to attach to the instance. See [Block Devices](#block-devices) below for details. +* `ebsOptimized` - (Optional) If true, the launched EC2 instance will be EBS-optimized. +* `enableMonitoring` - (Optional) Enables/disables detailed monitoring. This is enabled by default. +* `ephemeralBlockDevice` - (Optional) Customize Ephemeral (also known as "Instance Store") volumes on the instance. See [Block Devices](#block-devices) below for details. +* `iamInstanceProfile` - (Optional) The name attribute of the IAM instance profile to associate with launched instances. +* `keyName` - (Optional) The key name that should be used for the instance. +* `metadataOptions` - The metadata options for the instance. + * `httpEndpoint` - The state of the metadata service: `enabled`, `disabled`. + * `httpTokens` - If session tokens are required: `optional`, `required`. + * `httpPutResponseHopLimit` - The desired HTTP PUT response hop limit for instance metadata requests. +* `name` - (Optional) The name of the launch configuration. If you leave this blank, Terraform will auto-generate a unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `securityGroups` - (Optional) A list of associated security group IDS. +* `placementTenancy` - (Optional) The tenancy of the instance. Valid values are `default` or `dedicated`, see [AWS's Create Launch Configuration](http://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_CreateLaunchConfiguration.html) for more details. +* `rootBlockDevice` - (Optional) Customize details about the root block device of the instance. See [Block Devices](#block-devices) below for details. +* `spotPrice` - (Optional; Default: On-demand price) The maximum price to use for reserving spot instances. +* `userData` - (Optional) The user data to provide when launching the instance. Do not pass gzip-compressed data via this argument; see `userDataBase64` instead. +* `userDataBase64` - (Optional) Can be used instead of `userData` to pass base64-encoded binary data directly. Use this instead of `userData` whenever the value is not a valid UTF-8 string. For example, gzip-encoded user data must be base64-encoded and passed via this argument to avoid corruption. + +## Block devices + +Each of the `*BlockDevice` attributes controls a portion of the AWS +Launch Configuration's "Block Device Mapping". It's a good idea to familiarize yourself with [AWS's Block Device +Mapping docs](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/block-device-mapping-concepts.html) +to understand the implications of using these attributes. + +Each AWS Instance type has a different set of Instance Store block devices +available for attachment. AWS [publishes a +list](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#StorageOnInstanceTypes) +of which ephemeral devices are available on each type. The devices are always +identified by the `virtualName` in the format `ephemeral{0N}`. + +~> **NOTE:** Changes to `*BlockDevice` configuration of _existing_ resources +cannot currently be detected by Terraform. After updating to block device +configuration, resource recreation can be manually triggered by using the +[`taint` command](https://www.terraform.io/docs/commands/taint.html). + +### ebs_block_device + +Modifying any of the `ebsBlockDevice` settings requires resource replacement. + +* `deviceName` - (Required) The name of the device to mount. +* `snapshotId` - (Optional) The Snapshot ID to mount. +* `volumeType` - (Optional) The type of volume. Can be `standard`, `gp2`, `gp3`, `st1`, `sc1` or `io1`. +* `volumeSize` - (Optional) The size of the volume in gigabytes. +* `iops` - (Optional) The amount of provisioned + [IOPS](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). + This must be set with a `volumeType` of `"io1"`. +* `throughput` - (Optional) The throughput (MiBps) to provision for a `gp3` volume. +* `deleteOnTermination` - (Optional) Whether the volume should be destroyed + on instance termination (Default: `true`). +* `encrypted` - (Optional) Whether the volume should be encrypted or not. Defaults to `false`. +* `noDevice` - (Optional) Whether the device in the block device mapping of the AMI is suppressed. + +### ephemeral_block_device + +* `deviceName` - (Required) The name of the block device to mount on the instance. +* `noDevice` - (Optional) Whether the device in the block device mapping of the AMI is suppressed. +* `virtualName` - (Optional) The [Instance Store Device Name](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#InstanceStoreDeviceNames). + +### root_block_device + +-> Modifying any of the `rootBlockDevice` settings requires resource replacement. + +* `deleteOnTermination` - (Optional) Whether the volume should be destroyed on instance termination. Defaults to `true`. +* `encrypted` - (Optional) Whether the volume should be encrypted or not. Defaults to `false`. +* `iops` - (Optional) The amount of provisioned [IOPS](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). This must be set with a `volumeType` of `io1`. +* `throughput` - (Optional) The throughput (MiBps) to provision for a `gp3` volume. +* `volumeSize` - (Optional) The size of the volume in gigabytes. +* `volumeType` - (Optional) The type of volume. Can be `standard`, `gp2`, `gp3`, `st1`, `sc1` or `io1`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the launch configuration. +* `arn` - The Amazon Resource Name of the launch configuration. +* `name` - The name of the launch configuration. + +[1]: /docs/providers/aws/r/autoscaling_group.html +[2]: https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html +[3]: /docs/providers/aws/r/spot_instance_request.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import launch configurations using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import launch configurations using the `name`. For example: + +```console +% terraform import aws_launch_configuration.as_conf terraform-lg-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/launch_template.html.markdown b/website/docs/cdktf/typescript/r/launch_template.html.markdown new file mode 100644 index 00000000000..5250f78a703 --- /dev/null +++ b/website/docs/cdktf/typescript/r/launch_template.html.markdown @@ -0,0 +1,527 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_launch_template" +description: |- + Provides an EC2 launch template resource. Can be used to create instances or auto scaling groups. +--- + + + +# Resource: aws_launch_template + +Provides an EC2 launch template resource. Can be used to create instances or auto scaling groups. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LaunchTemplate } from "./.gen/providers/aws/launch-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LaunchTemplate(this, "foo", { + blockDeviceMappings: [ + { + deviceName: "/dev/sdf", + ebs: { + volumeSize: 20, + }, + }, + ], + capacityReservationSpecification: { + capacityReservationPreference: "open", + }, + cpuOptions: { + coreCount: 4, + threadsPerCore: 2, + }, + creditSpecification: { + cpuCredits: "standard", + }, + disableApiStop: true, + disableApiTermination: true, + ebsOptimized: Token.asString(true), + elasticGpuSpecifications: [ + { + type: "test", + }, + ], + elasticInferenceAccelerator: { + type: "eia1.medium", + }, + iamInstanceProfile: { + name: "test", + }, + imageId: "ami-test", + instanceInitiatedShutdownBehavior: "terminate", + instanceMarketOptions: { + marketType: "spot", + }, + instanceType: "t2.micro", + kernelId: "test", + keyName: "test", + licenseSpecification: [ + { + licenseConfigurationArn: + "arn:aws:license-manager:eu-west-1:123456789012:license-configuration:lic-0123456789abcdef0123456789abcdef", + }, + ], + metadataOptions: { + httpEndpoint: "enabled", + httpPutResponseHopLimit: 1, + httpTokens: "required", + instanceMetadataTags: "enabled", + }, + monitoring: { + enabled: true, + }, + name: "foo", + networkInterfaces: [ + { + associatePublicIpAddress: Token.asString(true), + }, + ], + placement: { + availabilityZone: "us-west-2a", + }, + ramDiskId: "test", + tagSpecifications: [ + { + resourceType: "instance", + tags: { + Name: "test", + }, + }, + ], + userData: Token.asString(Fn.filebase64("${path.module}/example.sh")), + vpcSecurityGroupIds: ["sg-12345678"], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `blockDeviceMappings` - (Optional) Specify volumes to attach to the instance besides the volumes specified by the AMI. + See [Block Devices](#block-devices) below for details. +* `capacityReservationSpecification` - (Optional) Targeting for EC2 capacity reservations. See [Capacity Reservation Specification](#capacity-reservation-specification) below for more details. +* `cpuOptions` - (Optional) The CPU options for the instance. See [CPU Options](#cpu-options) below for more details. +* `creditSpecification` - (Optional) Customize the credit specification of the instance. See [Credit + Specification](#credit-specification) below for more details. +* `defaultVersion` - (Optional) Default Version of the launch template. +* `description` - (Optional) Description of the launch template. +* `disableApiStop` - (Optional) If true, enables [EC2 Instance Stop Protection](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Stop_Start.html#Using_StopProtection). +* `disableApiTermination` - (Optional) If `true`, enables [EC2 Instance + Termination Protection](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#Using_ChangingDisableAPITermination) +* `ebsOptimized` - (Optional) If `true`, the launched EC2 instance will be EBS-optimized. +* `elasticGpuSpecifications` - (Optional) The elastic GPU to attach to the instance. See [Elastic GPU](#elastic-gpu) + below for more details. +* `elasticInferenceAccelerator` - (Optional) Configuration block containing an Elastic Inference Accelerator to attach to the instance. See [Elastic Inference Accelerator](#elastic-inference-accelerator) below for more details. +* `enclaveOptions` - (Optional) Enable Nitro Enclaves on launched instances. See [Enclave Options](#enclave-options) below for more details. +* `hibernationOptions` - (Optional) The hibernation options for the instance. See [Hibernation Options](#hibernation-options) below for more details. +* `iamInstanceProfile` - (Optional) The IAM Instance Profile to launch the instance with. See [Instance Profile](#instance-profile) + below for more details. +* `imageId` - (Optional) The AMI from which to launch the instance. +* `instanceInitiatedShutdownBehavior` - (Optional) Shutdown behavior for the instance. Can be `stop` or `terminate`. + (Default: `stop`). +* `instanceMarketOptions` - (Optional) The market (purchasing) option for the instance. See [Market Options](#market-options) + below for details. +* `instanceRequirements` - (Optional) The attribute requirements for the type of instance. If present then `instanceType` cannot be present. +* `instanceType` - (Optional) The type of the instance. If present then `instanceRequirements` cannot be present. +* `kernelId` - (Optional) The kernel ID. +* `keyName` - (Optional) The key name to use for the instance. +* `licenseSpecification` - (Optional) A list of license specifications to associate with. See [License Specification](#license-specification) below for more details. +* `maintenanceOptions` - (Optional) The maintenance options for the instance. See [Maintenance Options](#maintenance-options) below for more details. +* `metadataOptions` - (Optional) Customize the metadata options for the instance. See [Metadata Options](#metadata-options) below for more details. +* `monitoring` - (Optional) The monitoring option for the instance. See [Monitoring](#monitoring) below for more details. +* `name` - (Optional) The name of the launch template. If you leave this blank, Terraform will auto-generate a unique name. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `networkInterfaces` - (Optional) Customize network interfaces to be attached at instance boot time. See [Network + Interfaces](#network-interfaces) below for more details. +* `placement` - (Optional) The placement of the instance. See [Placement](#placement) below for more details. +* `privateDnsNameOptions` - (Optional) The options for the instance hostname. The default values are inherited from the subnet. See [Private DNS Name Options](#private-dns-name-options) below for more details. +* `ramDiskId` - (Optional) The ID of the RAM disk. +* `securityGroupNames` - (Optional) A list of security group names to associate with. If you are creating Instances in a VPC, use + `vpcSecurityGroupIds` instead. +* `tagSpecifications` - (Optional) The tags to apply to the resources during launch. See [Tag Specifications](#tag-specifications) below for more details. +* `tags` - (Optional) A map of tags to assign to the launch template. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `updateDefaultVersion` - (Optional) Whether to update Default Version each update. Conflicts with `defaultVersion`. +* `userData` - (Optional) The base64-encoded user data to provide when launching the instance. +* `vpcSecurityGroupIds` - (Optional) A list of security group IDs to associate with. Conflicts with `networkInterfacesSecurityGroups` + +### Block devices + +Configure additional volumes of the instance besides specified by the AMI. It's a good idea to familiarize yourself with + [AWS's Block Device Mapping docs](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/block-device-mapping-concepts.html) + to understand the implications of using these attributes. + +To find out more information for an existing AMI to override the configuration, such as `deviceName`, you can use the [AWS CLI ec2 describe-images command](https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-images.html). + +Each `blockDeviceMappings` supports the following: + +* `deviceName` - (Required) The name of the device to mount. +* `ebs` - (Optional) Configure EBS volume properties. +* `noDevice` - (Optional) Suppresses the specified device included in the AMI's block device mapping. +* `virtualName` - (Optional) The [Instance Store Device + Name](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#InstanceStoreDeviceNames) + (e.g., `"ephemeral0"`). + +The `ebs` block supports the following: + +* `deleteOnTermination` - (Optional) Whether the volume should be destroyed on instance termination. + See [Preserving Amazon EBS Volumes on Instance Termination](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/terminating-instances.html#preserving-volumes-on-termination) for more information. +* `encrypted` - (Optional) Enables [EBS encryption](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html) on the volume. + Cannot be used with `snapshotId`. +* `iops` - (Optional) The amount of provisioned [IOPS](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). + This must be set with a `volumeType` of `"io1/io2/gp3"`. +* `kmsKeyId` - (Optional) The ARN of the AWS Key Management Service (AWS KMS) customer master key (CMK) to use when creating the encrypted volume. + `encrypted` must be set to `true` when this is set. +* `snapshotId` - (Optional) The Snapshot ID to mount. +* `throughput` - (Optional) The throughput to provision for a `gp3` volume in MiB/s (specified as an integer, e.g., 500), with a maximum of 1,000 MiB/s. +* `volumeSize` - (Optional) The size of the volume in gigabytes. +* `volumeType` - (Optional) The volume type. + Can be one of `standard`, `gp2`, `gp3`, `io1`, `io2`, `sc1` or `st1`. + +### Capacity Reservation Specification + +The `capacityReservationSpecification` block supports the following: + +* `capacityReservationPreference` - Indicates the instance's Capacity Reservation preferences. Can be `open` or `none`. (Default `none`). +* `capacityReservationTarget` - Used to target a specific Capacity Reservation: + +The `capacityReservationTarget` block supports the following: + +* `capacityReservationId` - The ID of the Capacity Reservation in which to run the instance. +* `capacityReservationResourceGroupArn` - The ARN of the Capacity Reservation resource group in which to run the instance. + +### CPU Options + +The `cpuOptions` block supports the following: + +* `amdSevSnp` - Indicates whether to enable the instance for AMD SEV-SNP. AMD SEV-SNP is supported with M6a, R6a, and C6a instance types only. Valid values are `enabled` and `disabled`. +* `coreCount` - The number of CPU cores for the instance. +* `threadsPerCore` - The number of threads per CPU core. + To disable Intel Hyper-Threading Technology for the instance, specify a value of 1. + Otherwise, specify the default value of 2. + +Both number of CPU cores and threads per core must be specified. Valid number of CPU cores and threads per core for the instance type can be found in the [CPU Options Documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-optimize-cpu.html?shortFooter=true#cpu-options-supported-instances-values) + +### Credit Specification + +Credit specification can be applied/modified to the EC2 Instance at any time. + +The `creditSpecification` block supports the following: + +* `cpuCredits` - The credit option for CPU usage. + Can be `standard` or `unlimited`. + T3 instances are launched as `unlimited` by default. + T2 instances are launched as `standard` by default. + +### Elastic GPU + +Attach an elastic GPU the instance. + +The `elasticGpuSpecifications` block supports the following: + +* `type` - The [Elastic GPU Type](https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/elastic-gpus.html#elastic-gpus-basics) + +### Elastic Inference Accelerator + +Attach an Elastic Inference Accelerator to the instance. Additional information about Elastic Inference in EC2 can be found in the [EC2 User Guide](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/elastic-inference.html). + +The `elasticInferenceAccelerator` configuration block supports the following: + +* `type` - (Required) Accelerator type. + +### Enclave Options + +The `enclaveOptions` block supports the following: + +* `enabled` - If set to `true`, Nitro Enclaves will be enabled on the instance. + +For more information, see the documentation on [Nitro Enclaves](https://docs.aws.amazon.com/enclaves/latest/user/nitro-enclave.html). + +### Hibernation Options + +The `hibernationOptions` block supports the following: + +* `configured` - If set to `true`, the launched EC2 instance will hibernation enabled. + +### Instance Profile + +The [IAM Instance Profile](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html) +to attach. + +The `iamInstanceProfile` block supports the following: + +* `arn` - The Amazon Resource Name (ARN) of the instance profile. +* `name` - The name of the instance profile. + +### Instance Requirements + +This configuration block supports the following: + +~> **NOTE:** Both `memoryMibMin` and `vcpuCountMin` must be specified. + +* `acceleratorCount` - (Optional) Block describing the minimum and maximum number of accelerators (GPUs, FPGAs, or AWS Inferentia chips). Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. Set to `0` to exclude instance types with accelerators. +* `acceleratorManufacturers` - (Optional) List of accelerator manufacturer names. Default is any manufacturer. + + ``` + Valid names: + * amazon-web-services + * amd + * nvidia + * xilinx + ``` + +* `acceleratorNames` - (Optional) List of accelerator names. Default is any acclerator. + + ``` + Valid names: + * a100 - NVIDIA A100 GPUs + * v100 - NVIDIA V100 GPUs + * k80 - NVIDIA K80 GPUs + * t4 - NVIDIA T4 GPUs + * m60 - NVIDIA M60 GPUs + * radeon-pro-v520 - AMD Radeon Pro V520 GPUs + * vu9p - Xilinx VU9P FPGAs + ``` + +* `acceleratorTotalMemoryMib` - (Optional) Block describing the minimum and maximum total memory of the accelerators. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `acceleratorTypes` - (Optional) List of accelerator types. Default is any accelerator type. + + ``` + Valid types: + * fpga + * gpu + * inference + ``` + +* `allowedInstanceTypes` - (Optional) List of instance types to apply your specified attributes against. All other instance types are ignored, even if they match your specified attributes. You can use strings with one or more wild cards, represented by an asterisk (\*), to allow an instance type, size, or generation. The following are examples: `m58Xlarge`, `c5*.*`, `m5A.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are allowing the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5A.*`, you are allowing all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is all instance types. + + ~> **NOTE:** If you specify `allowedInstanceTypes`, you can't specify `excludedInstanceTypes`. + +* `bareMetal` - (Optional) Indicate whether bare metal instace types should be `included`, `excluded`, or `required`. Default is `excluded`. +* `baselineEbsBandwidthMbps` - (Optional) Block describing the minimum and maximum baseline EBS bandwidth, in Mbps. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `burstablePerformance` - (Optional) Indicate whether burstable performance instance types should be `included`, `excluded`, or `required`. Default is `excluded`. +* `cpuManufacturers` (Optional) List of CPU manufacturer names. Default is any manufacturer. + + ~> **NOTE:** Don't confuse the CPU hardware manufacturer with the CPU hardware architecture. Instances will be launched with a compatible CPU architecture based on the Amazon Machine Image (AMI) that you specify in your launch template. + + ``` + Valid names: + * amazon-web-services + * amd + * intel + ``` + +* `excludedInstanceTypes` - (Optional) List of instance types to exclude. You can use strings with one or more wild cards, represented by an asterisk (\*), to exclude an instance type, size, or generation. The following are examples: `m58Xlarge`, `c5*.*`, `m5A.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are excluding the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5A.*`, you are excluding all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is no excluded instance types. + + ~> **NOTE:** If you specify `excludedInstanceTypes`, you can't specify `allowedInstanceTypes`. + +* `instanceGenerations` - (Optional) List of instance generation names. Default is any generation. + + ``` + Valid names: + * current - Recommended for best performance. + * previous - For existing applications optimized for older instance types. + ``` + +* `localStorage` - (Optional) Indicate whether instance types with local storage volumes are `included`, `excluded`, or `required`. Default is `included`. +* `localStorageTypes` - (Optional) List of local storage type names. Default any storage type. + + ``` + Value names: + * hdd - hard disk drive + * ssd - solid state drive + ``` + +* `memoryGibPerVcpu` - (Optional) Block describing the minimum and maximum amount of memory (GiB) per vCPU. Default is no minimum or maximum. + * `min` - (Optional) Minimum. May be a decimal number, e.g. `05`. + * `max` - (Optional) Maximum. May be a decimal number, e.g. `05`. +* `memoryMib` - (Required) Block describing the minimum and maximum amount of memory (MiB). Default is no maximum. + * `min` - (Required) Minimum. + * `max` - (Optional) Maximum. +* `networkBandwidthGbps` - (Optional) Block describing the minimum and maximum amount of network bandwidth, in gigabits per second (Gbps). Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `networkInterfaceCount` - (Optional) Block describing the minimum and maximum number of network interfaces. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `onDemandMaxPricePercentageOverLowestPrice` - (Optional) The price protection threshold for On-Demand Instances. This is the maximum you’ll pay for an On-Demand Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 20. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. +* `requireHibernateSupport` - (Optional) Indicate whether instance types must support On-Demand Instance Hibernation, either `true` or `false`. Default is `false`. +* `spotMaxPricePercentageOverLowestPrice` - (Optional) The price protection threshold for Spot Instances. This is the maximum you’ll pay for a Spot Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 100. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. +* `totalLocalStorageGb` - (Optional) Block describing the minimum and maximum total local storage (GB). Default is no minimum or maximum. + * `min` - (Optional) Minimum. May be a decimal number, e.g. `05`. + * `max` - (Optional) Maximum. May be a decimal number, e.g. `05`. +* `vcpuCount` - (Required) Block describing the minimum and maximum number of vCPUs. Default is no maximum. + * `min` - (Required) Minimum. + * `max` - (Optional) Maximum. + +### License Specification + +Associate one of more license configurations. + +The `licenseSpecification` block supports the following: + +* `licenseConfigurationArn` - (Required) ARN of the license configuration. + +### Maintenance Options + +The `maintenanceOptions` block supports the following: + +* `autoRecovery` - (Optional) Disables the automatic recovery behavior of your instance or sets it to default. Can be `"default"` or `"disabled"`. See [Recover your instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-recover.html) for more details. + +### Market Options + +The market (purchasing) option for the instances. + +The `instanceMarketOptions` block supports the following: + +* `marketType` - The market type. Can be `spot`. +* `spotOptions` - The options for [Spot Instance](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-spot-instances.html) + +The `spotOptions` block supports the following: + +* `blockDurationMinutes` - The required duration in minutes. This value must be a multiple of 60. +* `instanceInterruptionBehavior` - The behavior when a Spot Instance is interrupted. Can be `hibernate`, + `stop`, or `terminate`. (Default: `terminate`). +* `maxPrice` - The maximum hourly price you're willing to pay for the Spot Instances. +* `spotInstanceType` - The Spot Instance request type. Can be `oneTime`, or `persistent`. +* `validUntil` - The end date of the request. + +### Metadata Options + +The metadata options for the instances. + +The `metadataOptions` block supports the following: + +* `httpEndpoint` - (Optional) Whether the metadata service is available. Can be `"enabled"` or `"disabled"`. (Default: `"enabled"`). +* `httpTokens` - (Optional) Whether or not the metadata service requires session tokens, also referred to as _Instance Metadata Service Version 2 (IMDSv2)_. Can be `"optional"` or `"required"`. (Default: `"optional"`). +* `httpPutResponseHopLimit` - (Optional) The desired HTTP PUT response hop limit for instance metadata requests. The larger the number, the further instance metadata requests can travel. Can be an integer from `1` to `64`. (Default: `1`). +* `httpProtocolIpv6` - (Optional) Enables or disables the IPv6 endpoint for the instance metadata service. Can be `"enabled"` or `"disabled"`. +* `instanceMetadataTags` - (Optional) Enables or disables access to instance tags from the instance metadata service. Can be `"enabled"` or `"disabled"`. + +For more information, see the documentation on the [Instance Metadata Service](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html). + +### Monitoring + +The `monitoring` block supports the following: + +* `enabled` - If `true`, the launched EC2 instance will have detailed monitoring enabled. + +### Network Interfaces + +Attaches one or more [Network Interfaces](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-eni.html) to the instance. + +Check limitations for autoscaling group in [Creating an Auto Scaling Group Using a Launch Template Guide](https://docs.aws.amazon.com/autoscaling/ec2/userguide/create-asg-launch-template.html#limitations) + +Each `networkInterfaces` block supports the following: + +* `associateCarrierIpAddress` - (Optional) Associate a Carrier IP address with `eth0` for a new network interface. + Use this option when you launch an instance in a Wavelength Zone and want to associate a Carrier IP address with the network interface. + Boolean value, can be left unset. +* `associatePublicIpAddress` - (Optional) Associate a public ip address with the network interface. + Boolean value, can be left unset. +* `deleteOnTermination` - (Optional) Whether the network interface should be destroyed on instance termination. +* `description` - (Optional) Description of the network interface. +* `deviceIndex` - (Optional) The integer index of the network interface attachment. +* `interfaceType` - (Optional) The type of network interface. To create an Elastic Fabric Adapter (EFA), specify `efa`. +* `ipv4PrefixCount` - (Optional) The number of IPv4 prefixes to be automatically assigned to the network interface. Conflicts with `ipv4Prefixes` +* `ipv4Prefixes` - (Optional) One or more IPv4 prefixes to be assigned to the network interface. Conflicts with `ipv4PrefixCount` +* `ipv6Addresses` - (Optional) One or more specific IPv6 addresses from the IPv6 CIDR block range of your subnet. Conflicts with `ipv6AddressCount` +* `ipv6AddressCount` - (Optional) The number of IPv6 addresses to assign to a network interface. Conflicts with `ipv6Addresses` +* `ipv6PrefixCount` - (Optional) The number of IPv6 prefixes to be automatically assigned to the network interface. Conflicts with `ipv6Prefixes` +* `ipv6Prefixes` - (Optional) One or more IPv6 prefixes to be assigned to the network interface. Conflicts with `ipv6PrefixCount` +* `networkInterfaceId` - (Optional) The ID of the network interface to attach. +* `networkCardIndex` - (Optional) The index of the network card. Some instance types support multiple network cards. The primary network interface must be assigned to network card index 0. The default is network card index 0. +* `privateIpAddress` - (Optional) The primary private IPv4 address. +* `ipv4AddressCount` - (Optional) The number of secondary private IPv4 addresses to assign to a network interface. Conflicts with `ipv4Addresses` +* `ipv4Addresses` - (Optional) One or more private IPv4 addresses to associate. Conflicts with `ipv4AddressCount` +* `securityGroups` - (Optional) A list of security group IDs to associate. +* `subnetId` - (Optional) The VPC Subnet ID to associate. + +### Placement + +The [Placement Group](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/placement-groups.html) of the instance. + +The `placement` block supports the following: + +* `affinity` - (Optional) The affinity setting for an instance on a Dedicated Host. +* `availabilityZone` - (Optional) The Availability Zone for the instance. +* `groupName` - (Optional) The name of the placement group for the instance. +* `hostId` - (Optional) The ID of the Dedicated Host for the instance. +* `hostResourceGroupArn` - (Optional) The ARN of the Host Resource Group in which to launch instances. +* `spreadDomain` - (Optional) Reserved for future use. +* `tenancy` - (Optional) The tenancy of the instance (if the instance is running in a VPC). Can be `default`, `dedicated`, or `host`. +* `partitionNumber` - (Optional) The number of the partition the instance should launch in. Valid only if the placement group strategy is set to partition. + +### Private DNS Name Options + +The `privateDnsNameOptions` block supports the following: + +* `enableResourceNameDnsAaaaRecord` - (Optional) Indicates whether to respond to DNS queries for instance hostnames with DNS AAAA records. +* `enableResourceNameDnsARecord` - (Optional) Indicates whether to respond to DNS queries for instance hostnames with DNS A records. +* `hostnameType` - (Optional) The type of hostname for Amazon EC2 instances. For IPv4 only subnets, an instance DNS name must be based on the instance IPv4 address. For IPv6 native subnets, an instance DNS name must be based on the instance ID. For dual-stack subnets, you can specify whether DNS names use the instance IPv4 address or the instance ID. Valid values: `ipName` and `resourceName`. + +### Tag Specifications + +The tags to apply to the resources during launch. You can tag instances, volumes, elastic GPUs and spot instance requests. More information can be found in the [EC2 API documentation](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_LaunchTemplateTagSpecificationRequest.html). + +Each `tagSpecifications` block supports the following: + +* `resourceType` - (Optional) The type of resource to tag. +* `tags` -(Optional) A map of tags to assign to the resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the launch template. +* `id` - The ID of the launch template. +* `latestVersion` - The latest version of the launch template. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Launch Templates using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Launch Templates using the `id`. For example: + +```console +% terraform import aws_launch_template.web lt-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lb.html.markdown b/website/docs/cdktf/typescript/r/lb.html.markdown new file mode 100644 index 00000000000..b59c24aa543 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lb.html.markdown @@ -0,0 +1,243 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb" +description: |- + Provides a Load Balancer resource. +--- + + + +# Resource: aws_lb + +Provides a Load Balancer resource. + +~> **Note:** `awsAlb` is known as `awsLb`. The functionality is identical. + +## Example Usage + +### Application Load Balancer + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Lb } from "./.gen/providers/aws/lb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Lb(this, "test", { + accessLogs: { + bucket: lbLogs.id, + enabled: true, + prefix: "test-lb", + }, + enableDeletionProtection: true, + internal: false, + loadBalancerType: "application", + name: "test-lb-tf", + securityGroups: [lbSg.id], + subnets: Token.asList( + "${[ for subnet in ${" + public.fqn + "} : subnet.id]}" + ), + tags: { + Environment: "production", + }, + }); + } +} + +``` + +### Network Load Balancer + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Lb } from "./.gen/providers/aws/lb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Lb(this, "test", { + enableDeletionProtection: true, + internal: false, + loadBalancerType: "network", + name: "test-lb-tf", + subnets: Token.asList( + "${[ for subnet in ${" + public.fqn + "} : subnet.id]}" + ), + tags: { + Environment: "production", + }, + }); + } +} + +``` + +### Specifying Elastic IPs + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Lb } from "./.gen/providers/aws/lb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Lb(this, "example", { + loadBalancerType: "network", + name: "example", + subnetMapping: [ + { + allocationId: example1.id, + subnetId: Token.asString(awsSubnetExample1.id), + }, + { + allocationId: example2.id, + subnetId: Token.asString(awsSubnetExample2.id), + }, + ], + }); + } +} + +``` + +### Specifying private IP addresses for an internal-facing load balancer + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Lb } from "./.gen/providers/aws/lb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Lb(this, "example", { + loadBalancerType: "network", + name: "example", + subnetMapping: [ + { + privateIpv4Address: "10.0.1.15", + subnetId: example1.id, + }, + { + privateIpv4Address: "10.0.2.15", + subnetId: example2.id, + }, + ], + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** Please note that internal LBs can only use `ipv4` as the `ipAddressType`. You can only change to `dualstack` `ipAddressType` if the selected subnets are IPv6 enabled. + +~> **NOTE:** Please note that one of either `subnets` or `subnetMapping` is required. + +This argument supports the following arguments: + +* `accessLogs` - (Optional) An Access Logs block. Access Logs documented below. +* `customerOwnedIpv4Pool` - (Optional) The ID of the customer owned ipv4 pool to use for this load balancer. +* `desyncMitigationMode` - (Optional) Determines how the load balancer handles requests that might pose a security risk to an application due to HTTP desync. Valid values are `monitor`, `defensive` (default), `strictest`. +* `dropInvalidHeaderFields` - (Optional) Indicates whether HTTP headers with header fields that are not valid are removed by the load balancer (true) or routed to targets (false). The default is false. Elastic Load Balancing requires that message header names contain only alphanumeric characters and hyphens. Only valid for Load Balancers of type `application`. +* `enableCrossZoneLoadBalancing` - (Optional) If true, cross-zone load balancing of the load balancer will be enabled. For `network` and `gateway` type load balancers, this feature is disabled by default (`false`). For `application` load balancer this feature is always enabled (`true`) and cannot be disabled. Defaults to `false`. +* `enableDeletionProtection` - (Optional) If true, deletion of the load balancer will be disabled via the AWS API. This will prevent Terraform from deleting the load balancer. Defaults to `false`. +* `enableHttp2` - (Optional) Indicates whether HTTP/2 is enabled in `application` load balancers. Defaults to `true`. +* `enableTlsVersionAndCipherSuiteHeaders` - (Optional) Indicates whether the two headers (`xAmznTlsVersion` and `xAmznTlsCipherSuite`), which contain information about the negotiated TLS version and cipher suite, are added to the client request before sending it to the target. Only valid for Load Balancers of type `application`. Defaults to `false` +* `enableXffClientPort` - (Optional) Indicates whether the X-Forwarded-For header should preserve the source port that the client used to connect to the load balancer in `application` load balancers. Defaults to `false`. +* `enableWafFailOpen` - (Optional) Indicates whether to allow a WAF-enabled load balancer to route requests to targets if it is unable to forward the request to AWS WAF. Defaults to `false`. +* `idleTimeout` - (Optional) The time in seconds that the connection is allowed to be idle. Only valid for Load Balancers of type `application`. Default: 60. +* `internal` - (Optional) If true, the LB will be internal. Defaults to `false`. +* `ipAddressType` - (Optional) The type of IP addresses used by the subnets for your load balancer. The possible values are `ipv4` and `dualstack`. +* `loadBalancerType` - (Optional) The type of load balancer to create. Possible values are `application`, `gateway`, or `network`. The default value is `application`. +* `name` - (Optional) The name of the LB. This name must be unique within your AWS account, can have a maximum of 32 characters, +must contain only alphanumeric characters or hyphens, and must not begin or end with a hyphen. If not specified, +Terraform will autogenerate a name beginning with `tfLb`. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `securityGroups` - (Optional) A list of security group IDs to assign to the LB. Only valid for Load Balancers of type `application`. +* `preserveHostHeader` - (Optional) Indicates whether the Application Load Balancer should preserve the Host header in the HTTP request and send it to the target without any change. Defaults to `false`. +* `subnetMapping` - (Optional) A subnet mapping block as documented below. +* `subnets` - (Optional) A list of subnet IDs to attach to the LB. Subnets +cannot be updated for Load Balancers of type `network`. Changing this value +for load balancers of type `network` will force a recreation of the resource. +* `xffHeaderProcessingMode` - (Optional) Determines how the load balancer modifies the `xForwardedFor` header in the HTTP request before sending the request to the target. The possible values are `append`, `preserve`, and `remove`. Only valid for Load Balancers of type `application`. The default is `append`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### access_logs + +* `bucket` - (Required) The S3 bucket name to store the logs in. +* `enabled` - (Optional) Boolean to enable / disable `accessLogs`. Defaults to `false`, even when `bucket` is specified. +* `prefix` - (Optional) The S3 bucket prefix. Logs are stored in the root if not configured. + +### subnet_mapping + +* `subnetId` - (Required) ID of the subnet of which to attach to the load balancer. You can specify only one subnet per Availability Zone. +* `allocationId` - (Optional) The allocation ID of the Elastic IP address for an internet-facing load balancer. +* `ipv6Address` - (Optional) The IPv6 address. You associate IPv6 CIDR blocks with your VPC and choose the subnets where you launch both internet-facing and internal Application Load Balancers or Network Load Balancers. +* `privateIpv4Address` - (Optional) The private IPv4 address for an internal load balancer. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the load balancer (matches `id`). +* `arnSuffix` - The ARN suffix for use with CloudWatch Metrics. +* `dnsName` - The DNS name of the load balancer. +* `id` - The ARN of the load balancer (matches `arn`). +* `subnetMapping.*OutpostId` - ID of the Outpost containing the load balancer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `zoneId` - The canonical hosted zone ID of the load balancer (to be used in a Route 53 Alias record). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import LBs using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import LBs using their ARN. For example: + +```console +% terraform import aws_lb.bar arn:aws:elasticloadbalancing:us-west-2:123456789012:loadbalancer/app/my-load-balancer/50dc6c495c0c9188 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lb_cookie_stickiness_policy.html.markdown b/website/docs/cdktf/typescript/r/lb_cookie_stickiness_policy.html.markdown new file mode 100644 index 00000000000..b1dfda72fe6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lb_cookie_stickiness_policy.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_lb_cookie_stickiness_policy" +description: |- + Provides a load balancer cookie stickiness policy, which allows an ELB to control the sticky session lifetime of the browser. +--- + + + +# Resource: aws_lb_cookie_stickiness_policy + +Provides a load balancer cookie stickiness policy, which allows an ELB to control the sticky session lifetime of the browser. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Elb } from "./.gen/providers/aws/elb"; +import { LbCookieStickinessPolicy } from "./.gen/providers/aws/lb-cookie-stickiness-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const lb = new Elb(this, "lb", { + availabilityZones: ["us-east-1a"], + listener: [ + { + instancePort: 8000, + instanceProtocol: "http", + lbPort: 80, + lbProtocol: "http", + }, + ], + name: "test-lb", + }); + new LbCookieStickinessPolicy(this, "foo", { + cookieExpirationPeriod: 600, + lbPort: 80, + loadBalancer: lb.id, + name: "foo-policy", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the stickiness policy. +* `loadBalancer` - (Required) The load balancer to which the policy + should be attached. +* `lbPort` - (Required) The load balancer port to which the policy + should be applied. This must be an active listener on the load +balancer. +* `cookieExpirationPeriod` - (Optional) The time period after which + the session cookie should be considered stale, expressed in seconds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `name` - The name of the stickiness policy. +* `loadBalancer` - The load balancer to which the policy is attached. +* `lbPort` - The load balancer port to which the policy is applied. +* `cookieExpirationPeriod` - The time period after which the session cookie is considered stale, expressed in seconds. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lb_listener.html.markdown b/website/docs/cdktf/typescript/r/lb_listener.html.markdown new file mode 100644 index 00000000000..64d705b15d1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lb_listener.html.markdown @@ -0,0 +1,499 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_listener" +description: |- + Provides a Load Balancer Listener resource. +--- + + + +# Resource: aws_lb_listener + +Provides a Load Balancer Listener resource. + +~> **Note:** `awsAlbListener` is known as `awsLbListener`. The functionality is identical. + +## Example Usage + +### Forward Action + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Lb } from "./.gen/providers/aws/lb"; +import { LbListener } from "./.gen/providers/aws/lb-listener"; +import { LbTargetGroup } from "./.gen/providers/aws/lb-target-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const frontEnd = new Lb(this, "front_end", {}); + const awsLbTargetGroupFrontEnd = new LbTargetGroup(this, "front_end_1", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbTargetGroupFrontEnd.overrideLogicalId("front_end"); + const awsLbListenerFrontEnd = new LbListener(this, "front_end_2", { + certificateArn: + "arn:aws:iam::187416307283:server-certificate/test_cert_rab3wuqwgja25ct3n4jdj2tzu4", + defaultAction: [ + { + targetGroupArn: Token.asString(awsLbTargetGroupFrontEnd.arn), + type: "forward", + }, + ], + loadBalancerArn: frontEnd.arn, + port: Token.asNumber("443"), + protocol: "HTTPS", + sslPolicy: "ELBSecurityPolicy-2016-08", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbListenerFrontEnd.overrideLogicalId("front_end"); + } +} + +``` + +To a NLB: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LbListener } from "./.gen/providers/aws/lb-listener"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LbListener(this, "front_end", { + alpnPolicy: "HTTP2Preferred", + certificateArn: + "arn:aws:iam::187416307283:server-certificate/test_cert_rab3wuqwgja25ct3n4jdj2tzu4", + defaultAction: [ + { + targetGroupArn: Token.asString(awsLbTargetGroupFrontEnd.arn), + type: "forward", + }, + ], + loadBalancerArn: Token.asString(awsLbFrontEnd.arn), + port: Token.asNumber("443"), + protocol: "TLS", + }); + } +} + +``` + +### Redirect Action + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Lb } from "./.gen/providers/aws/lb"; +import { LbListener } from "./.gen/providers/aws/lb-listener"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const frontEnd = new Lb(this, "front_end", {}); + const awsLbListenerFrontEnd = new LbListener(this, "front_end_1", { + defaultAction: [ + { + redirect: { + port: "443", + protocol: "HTTPS", + statusCode: "HTTP_301", + }, + type: "redirect", + }, + ], + loadBalancerArn: frontEnd.arn, + port: Token.asNumber("80"), + protocol: "HTTP", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbListenerFrontEnd.overrideLogicalId("front_end"); + } +} + +``` + +### Fixed-response Action + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Lb } from "./.gen/providers/aws/lb"; +import { LbListener } from "./.gen/providers/aws/lb-listener"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const frontEnd = new Lb(this, "front_end", {}); + const awsLbListenerFrontEnd = new LbListener(this, "front_end_1", { + defaultAction: [ + { + fixedResponse: { + contentType: "text/plain", + messageBody: "Fixed response content", + statusCode: "200", + }, + type: "fixed-response", + }, + ], + loadBalancerArn: frontEnd.arn, + port: Token.asNumber("80"), + protocol: "HTTP", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbListenerFrontEnd.overrideLogicalId("front_end"); + } +} + +``` + +### Authenticate-cognito Action + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolClient } from "./.gen/providers/aws/cognito-user-pool-client"; +import { CognitoUserPoolDomain } from "./.gen/providers/aws/cognito-user-pool-domain"; +import { Lb } from "./.gen/providers/aws/lb"; +import { LbListener } from "./.gen/providers/aws/lb-listener"; +import { LbTargetGroup } from "./.gen/providers/aws/lb-target-group"; +interface MyConfig { + name: any; + name1: any; + userPoolId: any; + domain: any; + userPoolId1: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const pool = new CognitoUserPool(this, "pool", { + name: config.name, + }); + const client = new CognitoUserPoolClient(this, "client", { + name: config.name1, + userPoolId: config.userPoolId, + }); + const domain = new CognitoUserPoolDomain(this, "domain", { + domain: config.domain, + userPoolId: config.userPoolId1, + }); + const frontEnd = new Lb(this, "front_end", {}); + const awsLbTargetGroupFrontEnd = new LbTargetGroup(this, "front_end_4", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbTargetGroupFrontEnd.overrideLogicalId("front_end"); + const awsLbListenerFrontEnd = new LbListener(this, "front_end_5", { + defaultAction: [ + { + authenticateCognito: { + userPoolArn: pool.arn, + userPoolClientId: client.id, + userPoolDomain: domain.domain, + }, + type: "authenticate-cognito", + }, + { + targetGroupArn: Token.asString(awsLbTargetGroupFrontEnd.arn), + type: "forward", + }, + ], + loadBalancerArn: frontEnd.arn, + port: Token.asNumber("80"), + protocol: "HTTP", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbListenerFrontEnd.overrideLogicalId("front_end"); + } +} + +``` + +### Authenticate-OIDC Action + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Lb } from "./.gen/providers/aws/lb"; +import { LbListener } from "./.gen/providers/aws/lb-listener"; +import { LbTargetGroup } from "./.gen/providers/aws/lb-target-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const frontEnd = new Lb(this, "front_end", {}); + const awsLbTargetGroupFrontEnd = new LbTargetGroup(this, "front_end_1", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbTargetGroupFrontEnd.overrideLogicalId("front_end"); + const awsLbListenerFrontEnd = new LbListener(this, "front_end_2", { + defaultAction: [ + { + authenticateOidc: { + authorizationEndpoint: "https://example.com/authorization_endpoint", + clientId: "client_id", + clientSecret: "client_secret", + issuer: "https://example.com", + tokenEndpoint: "https://example.com/token_endpoint", + userInfoEndpoint: "https://example.com/user_info_endpoint", + }, + type: "authenticate-oidc", + }, + { + targetGroupArn: Token.asString(awsLbTargetGroupFrontEnd.arn), + type: "forward", + }, + ], + loadBalancerArn: frontEnd.arn, + port: Token.asNumber("80"), + protocol: "HTTP", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbListenerFrontEnd.overrideLogicalId("front_end"); + } +} + +``` + +### Gateway Load Balancer Listener + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Lb } from "./.gen/providers/aws/lb"; +import { LbListener } from "./.gen/providers/aws/lb-listener"; +import { LbTargetGroup } from "./.gen/providers/aws/lb-target-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Lb(this, "example", { + loadBalancerType: "gateway", + name: "example", + subnetMapping: [ + { + subnetId: Token.asString(awsSubnetExample.id), + }, + ], + }); + const awsLbTargetGroupExample = new LbTargetGroup(this, "example_1", { + healthCheck: { + port: Token.asString(80), + protocol: "HTTP", + }, + name: "example", + port: 6081, + protocol: "GENEVE", + vpcId: Token.asString(awsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbTargetGroupExample.overrideLogicalId("example"); + const awsLbListenerExample = new LbListener(this, "example_2", { + defaultAction: [ + { + targetGroupArn: Token.asString(awsLbTargetGroupExample.id), + type: "forward", + }, + ], + loadBalancerArn: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbListenerExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `defaultAction` - (Required) Configuration block for default actions. Detailed below. +* `loadBalancerArn` - (Required, Forces New Resource) ARN of the load balancer. + +The following arguments are optional: + +* `alpnPolicy` - (Optional) Name of the Application-Layer Protocol Negotiation (ALPN) policy. Can be set if `protocol` is `tls`. Valid values are `http1Only`, `http2Only`, `http2Optional`, `http2Preferred`, and `none`. +* `certificateArn` - (Optional) ARN of the default SSL server certificate. Exactly one certificate is required if the protocol is HTTPS. For adding additional SSL certificates, see the [`awsLbListenerCertificate` resource](/docs/providers/aws/r/lb_listener_certificate.html). +* `port` - (Optional) Port on which the load balancer is listening. Not valid for Gateway Load Balancers. +* `protocol` - (Optional) Protocol for connections from clients to the load balancer. For Application Load Balancers, valid values are `http` and `https`, with a default of `http`. For Network Load Balancers, valid values are `tcp`, `tls`, `udp`, and `tcpUdp`. Not valid to use `udp` or `tcpUdp` if dual-stack mode is enabled. Not valid for Gateway Load Balancers. +* `sslPolicy` - (Optional) Name of the SSL Policy for the listener. Required if `protocol` is `https` or `tls`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +~> **NOTE::** Please note that listeners that are attached to Application Load Balancers must use either `http` or `https` protocols while listeners that are attached to Network Load Balancers must use the `tcp` protocol. + +### default_action + +The following arguments are required: + +* `type` - (Required) Type of routing action. Valid values are `forward`, `redirect`, `fixedResponse`, `authenticateCognito` and `authenticateOidc`. + +The following arguments are optional: + +* `authenticateCognito` - (Optional) Configuration block for using Amazon Cognito to authenticate users. Specify only when `type` is `authenticateCognito`. Detailed below. +* `authenticateOidc` - (Optional) Configuration block for an identity provider that is compliant with OpenID Connect (OIDC). Specify only when `type` is `authenticateOidc`. Detailed below. +* `fixedResponse` - (Optional) Information for creating an action that returns a custom HTTP response. Required if `type` is `fixedResponse`. +* `forward` - (Optional) Configuration block for creating an action that distributes requests among one or more target groups. Specify only if `type` is `forward`. If you specify both `forward` block and `targetGroupArn` attribute, you can specify only one target group using `forward` and it must be the same target group specified in `targetGroupArn`. Detailed below. +* `order` - (Optional) Order for the action. This value is required for rules with multiple actions. The action with the lowest value for order is performed first. Valid values are between `1` and `50000`. +* `redirect` - (Optional) Configuration block for creating a redirect action. Required if `type` is `redirect`. Detailed below. +* `targetGroupArn` - (Optional) ARN of the Target Group to which to route traffic. Specify only if `type` is `forward` and you want to route to a single target group. To route to one or more target groups, use a `forward` block instead. + +#### authenticate_cognito + +The following arguments are required: + +* `userPoolArn` - (Required) ARN of the Cognito user pool. +* `userPoolClientId` - (Required) ID of the Cognito user pool client. +* `userPoolDomain` - (Required) Domain prefix or fully-qualified domain name of the Cognito user pool. + +The following arguments are optional: + +* `authenticationRequestExtraParams` - (Optional) Query parameters to include in the redirect request to the authorization endpoint. Max: 10. Detailed below. +* `onUnauthenticatedRequest` - (Optional) Behavior if the user is not authenticated. Valid values are `deny`, `allow` and `authenticate`. +* `scope` - (Optional) Set of user claims to be requested from the IdP. +* `sessionCookieName` - (Optional) Name of the cookie used to maintain session information. +* `sessionTimeout` - (Optional) Maximum duration of the authentication session, in seconds. + +##### authentication_request_extra_params + +* `key` - (Required) Key of query parameter. +* `value` - (Required) Value of query parameter. + +#### authenticate_oidc + +The following arguments are required: + +* `authorizationEndpoint` - (Required) Authorization endpoint of the IdP. +* `clientId` - (Required) OAuth 2.0 client identifier. +* `clientSecret` - (Required) OAuth 2.0 client secret. +* `issuer` - (Required) OIDC issuer identifier of the IdP. +* `tokenEndpoint` - (Required) Token endpoint of the IdP. +* `userInfoEndpoint` - (Required) User info endpoint of the IdP. + +The following arguments are optional: + +* `authenticationRequestExtraParams` - (Optional) Query parameters to include in the redirect request to the authorization endpoint. Max: 10. +* `onUnauthenticatedRequest` - (Optional) Behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate` +* `scope` - (Optional) Set of user claims to be requested from the IdP. +* `sessionCookieName` - (Optional) Name of the cookie used to maintain session information. +* `sessionTimeout` - (Optional) Maximum duration of the authentication session, in seconds. + +#### fixed_response + +The following arguments are required: + +* `contentType` - (Required) Content type. Valid values are `text/plain`, `text/css`, `text/html`, `application/javascript` and `application/json`. + +The following arguments are optional: + +* `messageBody` - (Optional) Message body. +* `statusCode` - (Optional) HTTP response code. Valid values are `2Xx`, `4Xx`, or `5Xx`. + +#### forward + +The following arguments are required: + +* `targetGroup` - (Required) Set of 1-5 target group blocks. Detailed below. + +The following arguments are optional: + +* `stickiness` - (Optional) Configuration block for target group stickiness for the rule. Detailed below. + +##### target_group + +The following arguments are required: + +* `arn` - (Required) ARN of the target group. + +The following arguments are optional: + +* `weight` - (Optional) Weight. The range is 0 to 999. + +##### stickiness + +The following arguments are required: + +* `duration` - (Required) Time period, in seconds, during which requests from a client should be routed to the same target group. The range is 1-604800 seconds (7 days). + +The following arguments are optional: + +* `enabled` - (Optional) Whether target group stickiness is enabled. Default is `false`. + +#### redirect + +~> **NOTE::** You can reuse URI components using the following reserved keywords: `#{protocol}`, `#{host}`, `#{port}`, `#{path}` (the leading "/" is removed) and `#{query}`. + +The following arguments are required: + +* `statusCode` - (Required) HTTP redirect code. The redirect is either permanent (`http301`) or temporary (`http302`). + +The following arguments are optional: + +* `host` - (Optional) Hostname. This component is not percent-encoded. The hostname can contain `#{host}`. Defaults to `#{host}`. +* `path` - (Optional) Absolute path, starting with the leading "/". This component is not percent-encoded. The path can contain #{host}, #{path}, and #{port}. Defaults to `/#{path}`. +* `port` - (Optional) Port. Specify a value from `1` to `65535` or `#{port}`. Defaults to `#{port}`. +* `protocol` - (Optional) Protocol. Valid values are `http`, `https`, or `#{protocol}`. Defaults to `#{protocol}`. +* `query` - (Optional) Query parameters, URL-encoded when necessary, but not percent-encoded. Do not include the leading "?". Defaults to `#{query}`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the listener (matches `id`). +* `id` - ARN of the listener (matches `arn`). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import listeners using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import listeners using their ARN. For example: + +```console +% terraform import aws_lb_listener.front_end arn:aws:elasticloadbalancing:us-west-2:187416307283:listener/app/front-end-alb/8e4497da625e2d8a/9ab28ade35828f96 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lb_listener_certificate.html.markdown b/website/docs/cdktf/typescript/r/lb_listener_certificate.html.markdown new file mode 100644 index 00000000000..64c9584db92 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lb_listener_certificate.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_listener_certificate" +description: |- + Provides a Load Balancer Listener Certificate resource. +--- + + + +# Resource: aws_lb_listener_certificate + +Provides a Load Balancer Listener Certificate resource. + +This resource is for additional certificates and does not replace the default certificate on the listener. + +~> **Note:** `awsAlbListenerCertificate` is known as `awsLbListenerCertificate`. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmCertificate } from "./.gen/providers/aws/acm-certificate"; +import { Lb } from "./.gen/providers/aws/lb"; +import { LbListener } from "./.gen/providers/aws/lb-listener"; +import { LbListenerCertificate } from "./.gen/providers/aws/lb-listener-certificate"; +interface MyConfig { + defaultAction: any; + loadBalancerArn: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new AcmCertificate(this, "example", {}); + new Lb(this, "front_end", {}); + const awsLbListenerFrontEnd = new LbListener(this, "front_end_2", { + defaultAction: config.defaultAction, + loadBalancerArn: config.loadBalancerArn, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbListenerFrontEnd.overrideLogicalId("front_end"); + const awsLbListenerCertificateExample = new LbListenerCertificate( + this, + "example_3", + { + certificateArn: example.arn, + listenerArn: Token.asString(awsLbListenerFrontEnd.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbListenerCertificateExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `listenerArn` - (Required, Forces New Resource) The ARN of the listener to which to attach the certificate. +* `certificateArn` - (Required, Forces New Resource) The ARN of the certificate to attach to the listener. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `listenerArn` and `certificateArn` separated by a `_`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Listener Certificates using the listener arn and certificate arn, separated by an underscore (`_`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Listener Certificates using the listener arn and certificate arn, separated by an underscore (`_`). For example: + +```console +% terraform import aws_lb_listener_certificate.example arn:aws:elasticloadbalancing:us-west-2:123456789012:listener/app/test/8e4497da625e2d8a/9ab28ade35828f96/67b3d2d36dd7c26b_arn:aws:iam::123456789012:server-certificate/tf-acc-test-6453083910015726063 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lb_listener_rule.html.markdown b/website/docs/cdktf/typescript/r/lb_listener_rule.html.markdown new file mode 100644 index 00000000000..b642d66605f --- /dev/null +++ b/website/docs/cdktf/typescript/r/lb_listener_rule.html.markdown @@ -0,0 +1,374 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_listener_rule" +description: |- + Provides a Load Balancer Listener Rule resource. +--- + + + +# Resource: aws_lb_listener_rule + +Provides a Load Balancer Listener Rule resource. + +~> **Note:** `awsAlbListenerRule` is known as `awsLbListenerRule`. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolClient } from "./.gen/providers/aws/cognito-user-pool-client"; +import { CognitoUserPoolDomain } from "./.gen/providers/aws/cognito-user-pool-domain"; +import { Lb } from "./.gen/providers/aws/lb"; +import { LbListener } from "./.gen/providers/aws/lb-listener"; +import { LbListenerRule } from "./.gen/providers/aws/lb-listener-rule"; +interface MyConfig { + name: any; + name1: any; + userPoolId: any; + domain: any; + userPoolId1: any; + defaultAction: any; + loadBalancerArn: any; + condition: any; + condition1: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const pool = new CognitoUserPool(this, "pool", { + name: config.name, + }); + const client = new CognitoUserPoolClient(this, "client", { + name: config.name1, + userPoolId: config.userPoolId, + }); + const domain = new CognitoUserPoolDomain(this, "domain", { + domain: config.domain, + userPoolId: config.userPoolId1, + }); + new Lb(this, "front_end", {}); + const awsLbListenerFrontEnd = new LbListener(this, "front_end_4", { + defaultAction: config.defaultAction, + loadBalancerArn: config.loadBalancerArn, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbListenerFrontEnd.overrideLogicalId("front_end"); + new LbListenerRule(this, "admin", { + action: [ + { + authenticateCognito: { + userPoolArn: pool.arn, + userPoolClientId: client.id, + userPoolDomain: domain.domain, + }, + type: "authenticate-cognito", + }, + { + targetGroupArn: static.arn, + type: "forward", + }, + ], + listenerArn: Token.asString(awsLbListenerFrontEnd.arn), + condition: config.condition, + }); + new LbListenerRule(this, "health_check", { + action: [ + { + fixedResponse: { + contentType: "text/plain", + messageBody: "HEALTHY", + statusCode: "200", + }, + type: "fixed-response", + }, + ], + condition: [ + { + queryString: [ + { + key: "health", + value: "check", + }, + { + value: "bar", + }, + ], + }, + ], + listenerArn: Token.asString(awsLbListenerFrontEnd.arn), + }); + new LbListenerRule(this, "host_based_routing", { + action: [ + { + forward: { + stickiness: { + duration: 600, + enabled: true, + }, + targetGroup: [ + { + arn: main.arn, + weight: 80, + }, + { + arn: canary.arn, + weight: 20, + }, + ], + }, + type: "forward", + }, + ], + condition: [ + { + hostHeader: { + values: ["my-service.*.terraform.io"], + }, + }, + ], + listenerArn: Token.asString(awsLbListenerFrontEnd.arn), + priority: 99, + }); + new LbListenerRule(this, "host_based_weighted_routing", { + action: [ + { + targetGroupArn: static.arn, + type: "forward", + }, + ], + condition: [ + { + hostHeader: { + values: ["my-service.*.terraform.io"], + }, + }, + ], + listenerArn: Token.asString(awsLbListenerFrontEnd.arn), + priority: 99, + }); + new LbListenerRule(this, "oidc", { + action: [ + { + authenticateOidc: { + authorizationEndpoint: "https://example.com/authorization_endpoint", + clientId: "client_id", + clientSecret: "client_secret", + issuer: "https://example.com", + tokenEndpoint: "https://example.com/token_endpoint", + userInfoEndpoint: "https://example.com/user_info_endpoint", + }, + type: "authenticate-oidc", + }, + { + targetGroupArn: static.arn, + type: "forward", + }, + ], + listenerArn: Token.asString(awsLbListenerFrontEnd.arn), + condition: config.condition1, + }); + new LbListenerRule(this, "redirect_http_to_https", { + action: [ + { + redirect: { + port: "443", + protocol: "HTTPS", + statusCode: "HTTP_301", + }, + type: "redirect", + }, + ], + condition: [ + { + httpHeader: { + httpHeaderName: "X-Forwarded-For", + values: ["192.168.1.*"], + }, + }, + ], + listenerArn: Token.asString(awsLbListenerFrontEnd.arn), + }); + new LbListenerRule(this, "static", { + action: [ + { + targetGroupArn: static.arn, + type: "forward", + }, + ], + condition: [ + { + pathPattern: { + values: ["/static/*"], + }, + }, + { + hostHeader: { + values: ["example.com"], + }, + }, + ], + listenerArn: Token.asString(awsLbListenerFrontEnd.arn), + priority: 100, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `listenerArn` - (Required, Forces New Resource) The ARN of the listener to which to attach the rule. +* `priority` - (Optional) The priority for the rule between `1` and `50000`. Leaving it unset will automatically set the rule with next available priority after currently existing highest rule. A listener can't have multiple rules with the same priority. +* `action` - (Required) An Action block. Action blocks are documented below. +* `condition` - (Required) A Condition block. Multiple condition blocks of different types can be set and all must be satisfied for the rule to match. Condition blocks are documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Action Blocks + +Action Blocks (for `action`) support the following: + +* `type` - (Required) The type of routing action. Valid values are `forward`, `redirect`, `fixedResponse`, `authenticateCognito` and `authenticateOidc`. +* `targetGroupArn` - (Optional) The ARN of the Target Group to which to route traffic. Specify only if `type` is `forward` and you want to route to a single target group. To route to one or more target groups, use a `forward` block instead. +* `forward` - (Optional) Information for creating an action that distributes requests among one or more target groups. Specify only if `type` is `forward`. If you specify both `forward` block and `targetGroupArn` attribute, you can specify only one target group using `forward` and it must be the same target group specified in `targetGroupArn`. +* `redirect` - (Optional) Information for creating a redirect action. Required if `type` is `redirect`. +* `fixedResponse` - (Optional) Information for creating an action that returns a custom HTTP response. Required if `type` is `fixedResponse`. +* `authenticateCognito` - (Optional) Information for creating an authenticate action using Cognito. Required if `type` is `authenticateCognito`. +* `authenticateOidc` - (Optional) Information for creating an authenticate action using OIDC. Required if `type` is `authenticateOidc`. + +Forward Blocks (for `forward`) support the following: + +* `targetGroup` - (Required) One or more target groups block. +* `stickiness` - (Optional) The target group stickiness for the rule. + +Target Group Blocks (for `targetGroup`) supports the following: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the target group. +* `weight` - (Optional) The weight. The range is 0 to 999. + +Target Group Stickiness Config Blocks (for `stickiness`) supports the following: + +* `enabled` - (Required) Indicates whether target group stickiness is enabled. +* `duration` - (Optional) The time period, in seconds, during which requests from a client should be routed to the same target group. The range is 1-604800 seconds (7 days). + +Redirect Blocks (for `redirect`) support the following: + +~> **NOTE::** You can reuse URI components using the following reserved keywords: `#{protocol}`, `#{host}`, `#{port}`, `#{path}` (the leading "/" is removed) and `#{query}`. + +* `host` - (Optional) The hostname. This component is not percent-encoded. The hostname can contain `#{host}`. Defaults to `#{host}`. +* `path` - (Optional) The absolute path, starting with the leading "/". This component is not percent-encoded. The path can contain #{host}, #{path}, and #{port}. Defaults to `/#{path}`. +* `port` - (Optional) The port. Specify a value from `1` to `65535` or `#{port}`. Defaults to `#{port}`. +* `protocol` - (Optional) The protocol. Valid values are `http`, `https`, or `#{protocol}`. Defaults to `#{protocol}`. +* `query` - (Optional) The query parameters, URL-encoded when necessary, but not percent-encoded. Do not include the leading "?". Defaults to `#{query}`. +* `statusCode` - (Required) The HTTP redirect code. The redirect is either permanent (`http301`) or temporary (`http302`). + +Fixed-response Blocks (for `fixedResponse`) support the following: + +* `contentType` - (Required) The content type. Valid values are `text/plain`, `text/css`, `text/html`, `application/javascript` and `application/json`. +* `messageBody` - (Optional) The message body. +* `statusCode` - (Optional) The HTTP response code. Valid values are `2Xx`, `4Xx`, or `5Xx`. + +Authenticate Cognito Blocks (for `authenticateCognito`) supports the following: + +* `authenticationRequestExtraParams` - (Optional) The query parameters to include in the redirect request to the authorization endpoint. Max: 10. +* `onUnauthenticatedRequest` - (Optional) The behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate` +* `scope` - (Optional) The set of user claims to be requested from the IdP. +* `sessionCookieName` - (Optional) The name of the cookie used to maintain session information. +* `sessionTimeout` - (Optional) The maximum duration of the authentication session, in seconds. +* `userPoolArn` - (Required) The ARN of the Cognito user pool. +* `userPoolClientId` - (Required) The ID of the Cognito user pool client. +* `userPoolDomain` - (Required) The domain prefix or fully-qualified domain name of the Cognito user pool. + +Authenticate OIDC Blocks (for `authenticateOidc`) supports the following: + +* `authenticationRequestExtraParams` - (Optional) The query parameters to include in the redirect request to the authorization endpoint. Max: 10. +* `authorizationEndpoint` - (Required) The authorization endpoint of the IdP. +* `clientId` - (Required) The OAuth 2.0 client identifier. +* `clientSecret` - (Required) The OAuth 2.0 client secret. +* `issuer` - (Required) The OIDC issuer identifier of the IdP. +* `onUnauthenticatedRequest` - (Optional) The behavior if the user is not authenticated. Valid values: `deny`, `allow` and `authenticate` +* `scope` - (Optional) The set of user claims to be requested from the IdP. +* `sessionCookieName` - (Optional) The name of the cookie used to maintain session information. +* `sessionTimeout` - (Optional) The maximum duration of the authentication session, in seconds. +* `tokenEndpoint` - (Required) The token endpoint of the IdP. +* `userInfoEndpoint` - (Required) The user info endpoint of the IdP. + +Authentication Request Extra Params Blocks (for `authenticationRequestExtraParams`) supports the following: + +* `key` - (Required) The key of query parameter +* `value` - (Required) The value of query parameter + +### Condition Blocks + +One or more condition blocks can be set per rule. Most condition types can only be specified once per rule except for `httpHeader` and `queryString` which can be specified multiple times. + +Condition Blocks (for `condition`) support the following: + +* `hostHeader` - (Optional) Contains a single `values` item which is a list of host header patterns to match. The maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). Only one pattern needs to match for the condition to be satisfied. +* `httpHeader` - (Optional) HTTP headers to match. [HTTP Header block](#http-header-blocks) fields documented below. +* `httpRequestMethod` - (Optional) Contains a single `values` item which is a list of HTTP request methods or verbs to match. Maximum size is 40 characters. Only allowed characters are A-Z, hyphen (-) and underscore (\_). Comparison is case sensitive. Wildcards are not supported. Only one needs to match for the condition to be satisfied. AWS recommends that GET and HEAD requests are routed in the same way because the response to a HEAD request may be cached. +* `pathPattern` - (Optional) Contains a single `values` item which is a list of path patterns to match against the request URL. Maximum size of each pattern is 128 characters. Comparison is case sensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). Only one pattern needs to match for the condition to be satisfied. Path pattern is compared only to the path of the URL, not to its query string. To compare against the query string, use a `queryString` condition. +* `queryString` - (Optional) Query strings to match. [Query String block](#query-string-blocks) fields documented below. +* `sourceIp` - (Optional) Contains a single `values` item which is a list of source IP CIDR notations to match. You can use both IPv4 and IPv6 addresses. Wildcards are not supported. Condition is satisfied if the source IP address of the request matches one of the CIDR blocks. Condition is not satisfied by the addresses in the `xForwardedFor` header, use `httpHeader` condition instead. + +~> **NOTE::** Exactly one of `hostHeader`, `httpHeader`, `httpRequestMethod`, `pathPattern`, `queryString` or `sourceIp` must be set per condition. + +#### HTTP Header Blocks + +HTTP Header Blocks (for `httpHeader`) support the following: + +* `httpHeaderName` - (Required) Name of HTTP header to search. The maximum size is 40 characters. Comparison is case insensitive. Only RFC7240 characters are supported. Wildcards are not supported. You cannot use HTTP header condition to specify the host header, use a `hostHeader` condition instead. +* `values` - (Required) List of header value patterns to match. Maximum size of each pattern is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). If the same header appears multiple times in the request they will be searched in order until a match is found. Only one pattern needs to match for the condition to be satisfied. To require that all of the strings are a match, create one condition block per string. + +#### Query String Blocks + +Query String Blocks (for `queryString`) support the following: + +* `values` - (Required) Query string pairs or values to match. Query String Value blocks documented below. Multiple `values` blocks can be specified, see example above. Maximum size of each string is 128 characters. Comparison is case insensitive. Wildcard characters supported: * (matches 0 or more characters) and ? (matches exactly 1 character). To search for a literal '\*' or '?' character in a query string, escape the character with a backslash (\\). Only one pair needs to match for the condition to be satisfied. + +Query String Value Blocks (for `queryStringValues`) support the following: + +* `key` - (Optional) Query string key pattern to match. +* `value` - (Required) Query string value pattern to match. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the rule (matches `arn`) +* `arn` - The ARN of the rule (matches `id`) +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import rules using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import rules using their ARN. For example: + +```console +% terraform import aws_lb_listener_rule.front_end arn:aws:elasticloadbalancing:us-west-2:187416307283:listener-rule/app/test/8e4497da625e2d8a/9ab28ade35828f96/67b3d2d36dd7c26b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lb_ssl_negotiation_policy.html.markdown b/website/docs/cdktf/typescript/r/lb_ssl_negotiation_policy.html.markdown new file mode 100644 index 00000000000..7ed1abad0b0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lb_ssl_negotiation_policy.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_lb_ssl_negotiation_policy" +description: |- + Provides a load balancer SSL negotiation policy, which allows an ELB to control which ciphers and protocols are supported during SSL negotiations between a client and a load balancer. +--- + + + +# Resource: aws_lb_ssl_negotiation_policy + +Provides a load balancer SSL negotiation policy, which allows an ELB to control the ciphers and protocols that are supported during SSL negotiations between a client and a load balancer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Elb } from "./.gen/providers/aws/elb"; +import { LbSslNegotiationPolicy } from "./.gen/providers/aws/lb-ssl-negotiation-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const lb = new Elb(this, "lb", { + availabilityZones: ["us-east-1a"], + listener: [ + { + instancePort: 8000, + instanceProtocol: "https", + lbPort: 443, + lbProtocol: "https", + sslCertificateId: + "arn:aws:iam::123456789012:server-certificate/certName", + }, + ], + name: "test-lb", + }); + new LbSslNegotiationPolicy(this, "foo", { + attribute: [ + { + name: "Protocol-TLSv1", + value: "false", + }, + { + name: "Protocol-TLSv1.1", + value: "false", + }, + { + name: "Protocol-TLSv1.2", + value: "true", + }, + { + name: "Server-Defined-Cipher-Order", + value: "true", + }, + { + name: "ECDHE-RSA-AES128-GCM-SHA256", + value: "true", + }, + { + name: "AES128-GCM-SHA256", + value: "true", + }, + { + name: "EDH-RSA-DES-CBC3-SHA", + value: "false", + }, + ], + lbPort: 443, + loadBalancer: lb.id, + name: "foo-policy", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the SSL negotiation policy. +* `loadBalancer` - (Required) The load balancer to which the policy + should be attached. +* `lbPort` - (Required) The load balancer port to which the policy + should be applied. This must be an active listener on the load +balancer. +* `attribute` - (Optional) An SSL Negotiation policy attribute. Each has two properties: + * `name` - The name of the attribute + * `value` - The value of the attribute +* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger a redeployment. To force a redeployment without changing these keys/values, use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html). + +To set your attributes, please see the [AWS Elastic Load Balancing Developer Guide](http://docs.aws.amazon.com/ElasticLoadBalancing/latest/DeveloperGuide/elb-security-policy-table.html) for a listing of the supported SSL protocols, SSL options, and SSL ciphers. + +~> **NOTE:** The AWS documentation references Server Order Preference, which the AWS Elastic Load Balancing API refers to as `serverDefinedCipherOrder`. If you wish to set Server Order Preference, use this value instead. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `name` - The name of the stickiness policy. +* `loadBalancer` - The load balancer to which the policy is attached. +* `lbPort` - The load balancer port to which the policy is applied. +* `attribute` - The SSL Negotiation policy attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lb_target_group.html.markdown b/website/docs/cdktf/typescript/r/lb_target_group.html.markdown new file mode 100644 index 00000000000..4e174783507 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lb_target_group.html.markdown @@ -0,0 +1,222 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_target_group" +description: |- + Provides a Target Group resource for use with Load Balancers. +--- + + + +# Resource: aws_lb_target_group + +Provides a Target Group resource for use with Load Balancer resources. + +~> **Note:** `awsAlbTargetGroup` is known as `awsLbTargetGroup`. The functionality is identical. + +## Example Usage + +### Instance Target Group + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LbTargetGroup } from "./.gen/providers/aws/lb-target-group"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new Vpc(this, "main", { + cidrBlock: "10.0.0.0/16", + }); + new LbTargetGroup(this, "test", { + name: "tf-example-lb-tg", + port: 80, + protocol: "HTTP", + vpcId: main.id, + }); + } +} + +``` + +### IP Target Group + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LbTargetGroup } from "./.gen/providers/aws/lb-target-group"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new Vpc(this, "main", { + cidrBlock: "10.0.0.0/16", + }); + new LbTargetGroup(this, "ip-example", { + name: "tf-example-lb-tg", + port: 80, + protocol: "HTTP", + targetType: "ip", + vpcId: main.id, + }); + } +} + +``` + +### Lambda Target Group + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LbTargetGroup } from "./.gen/providers/aws/lb-target-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LbTargetGroup(this, "lambda-example", { + name: "tf-example-lb-tg", + targetType: "lambda", + }); + } +} + +``` + +### ALB Target Group + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LbTargetGroup } from "./.gen/providers/aws/lb-target-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LbTargetGroup(this, "alb-example", { + name: "tf-example-lb-alb-tg", + port: 80, + protocol: "TCP", + targetType: "alb", + vpcId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connectionTermination` - (Optional) Whether to terminate connections at the end of the deregistration timeout on Network Load Balancers. See [doc](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-target-groups.html#deregistration-delay) for more information. Default is `false`. +* `deregistrationDelay` - (Optional) Amount time for Elastic Load Balancing to wait before changing the state of a deregistering target from draining to unused. The range is 0-3600 seconds. The default value is 300 seconds. +* `healthCheck` - (Optional, Maximum of 1) Health Check configuration block. Detailed below. +* `lambdaMultiValueHeadersEnabled` - (Optional) Whether the request and response headers exchanged between the load balancer and the Lambda function include arrays of values or strings. Only applies when `targetType` is `lambda`. Default is `false`. +* `loadBalancingAlgorithmType` - (Optional) Determines how the load balancer selects targets when routing requests. Only applicable for Application Load Balancer Target Groups. The value is `roundRobin` or `leastOutstandingRequests`. The default is `roundRobin`. +* `loadBalancingCrossZoneEnabled` - (Optional) Indicates whether cross zone load balancing is enabled. The value is `"true"`, `"false"` or `"useLoadBalancerConfiguration"`. The default is `"useLoadBalancerConfiguration"`. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. Cannot be longer than 6 characters. +* `name` - (Optional, Forces new resource) Name of the target group. If omitted, Terraform will assign a random, unique name. This name must be unique per region per account, can have a maximum of 32 characters, must contain only alphanumeric characters or hyphens, and must not begin or end with a hyphen. +* `port` - (May be required, Forces new resource) Port on which targets receive traffic, unless overridden when registering a specific target. Required when `targetType` is `instance`, `ip` or `alb`. Does not apply when `targetType` is `lambda`. +* `preserveClientIp` - (Optional) Whether client IP preservation is enabled. See [doc](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-target-groups.html#client-ip-preservation) for more information. +* `protocolVersion` - (Optional, Forces new resource) Only applicable when `protocol` is `http` or `https`. The protocol version. Specify `grpc` to send requests to targets using gRPC. Specify `http2` to send requests to targets using HTTP/2. The default is `http1`, which sends requests to targets using HTTP/1.1 +* `protocol` - (May be required, Forces new resource) Protocol to use for routing traffic to the targets. Should be one of `geneve`, `http`, `https`, `tcp`, `tcpUdp`, `tls`, or `udp`. Required when `targetType` is `instance`, `ip` or `alb`. Does not apply when `targetType` is `lambda`. +* `proxyProtocolV2` - (Optional) Whether to enable support for proxy protocol v2 on Network Load Balancers. See [doc](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-target-groups.html#proxy-protocol) for more information. Default is `false`. +* `slowStart` - (Optional) Amount time for targets to warm up before the load balancer sends them a full share of requests. The range is 30-900 seconds or 0 to disable. The default value is 0 seconds. +* `stickiness` - (Optional, Maximum of 1) Stickiness configuration block. Detailed below. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `targetFailover` - (Optional) Target failover block. Only applicable for Gateway Load Balancer target groups. See [target_failover](#target_failover) for more information. +* `targetType` - (May be required, Forces new resource) Type of target that you must specify when registering targets with this target group. See [doc](https://docs.aws.amazon.com/elasticloadbalancing/latest/APIReference/API_CreateTargetGroup.html) for supported values. The default is `instance`. + + Note that you can't specify targets for a target group using both instance IDs and IP addresses. + + If the target type is `ip`, specify IP addresses from the subnets of the virtual private cloud (VPC) for the target group, the RFC 1918 range (10.0.0.0/8, 172.16.0.0/12, and 192.168.0.0/16), and the RFC 6598 range (100.64.0.0/10). You can't specify publicly routable IP addresses. + + Network Load Balancers do not support the `lambda` target type. + + Application Load Balancers do not support the `alb` target type. +* `ipAddressType` (Optional, forces new resource) The type of IP addresses used by the target group, only supported when target type is set to `ip`. Possible values are `ipv4` or `ipv6`. +* `vpcId` - (Optional, Forces new resource) Identifier of the VPC in which to create the target group. Required when `targetType` is `instance`, `ip` or `alb`. Does not apply when `targetType` is `lambda`. + +### health_check + +~> **Note:** The Health Check parameters you can set vary by the `protocol` of the Target Group. Many parameters cannot be set to custom values for `network` load balancers at this time. See http://docs.aws.amazon.com/elasticloadbalancing/latest/APIReference/API_CreateTargetGroup.html for a complete reference. Keep in mind, that health checks produce actual requests to the backend. The underlying function is invoked when `targetType` is set to `lambda`. + +* `enabled` - (Optional) Whether health checks are enabled. Defaults to `true`. +* `healthyThreshold` - (Optional) Number of consecutive health check successes required before considering a target healthy. The range is 2-10. Defaults to 3. +* `interval` - (Optional) Approximate amount of time, in seconds, between health checks of an individual target. The range is 5-300. For `lambda` target groups, it needs to be greater than the timeout of the underlying `lambda`. Defaults to 30. +* `matcher` (May be required) Response codes to use when checking for a healthy responses from a target. You can specify multiple values (for example, "200,202" for HTTP(s) or "0,12" for GRPC) or a range of values (for example, "200-299" or "0-99"). Required for HTTP/HTTPS/GRPC ALB. Only applies to Application Load Balancers (i.e., HTTP/HTTPS/GRPC) not Network Load Balancers (i.e., TCP). +* `path` - (May be required) Destination for the health check request. Required for HTTP/HTTPS ALB and HTTP NLB. Only applies to HTTP/HTTPS. +* `port` - (Optional) The port the load balancer uses when performing health checks on targets. Default is traffic-port. +* `protocol` - (Optional) Protocol the load balancer uses when performing health checks on targets. Must be either `tcp`, `http`, or `https`. The TCP protocol is not supported for health checks if the protocol of the target group is HTTP or HTTPS. Defaults to HTTP. +* `timeout` - (optional) Amount of time, in seconds, during which no response from a target means a failed health check. The range is 2–120 seconds. For target groups with a protocol of HTTP, the default is 6 seconds. For target groups with a protocol of TCP, TLS or HTTPS, the default is 10 seconds. For target groups with a protocol of GENEVE, the default is 5 seconds. If the target type is lambda, the default is 30 seconds. +* `unhealthyThreshold` - (Optional) Number of consecutive health check failures required before considering a target unhealthy. The range is 2-10. Defaults to 3. + +### stickiness + +~> **NOTE:** Currently, an NLB (i.e., protocol of `http` or `https`) can have an invalid `stickiness` block with `type` set to `lbCookie` as long as `enabled` is set to `false`. However, please update your configurations to avoid errors in a future version of the provider: either remove the invalid `stickiness` block or set the `type` to `sourceIp`. + +* `cookieDuration` - (Optional) Only used when the type is `lbCookie`. The time period, in seconds, during which requests from a client should be routed to the same target. After this time period expires, the load balancer-generated cookie is considered stale. The range is 1 second to 1 week (604800 seconds). The default value is 1 day (86400 seconds). +* `cookieName` - (Optional) Name of the application based cookie. AWSALB, AWSALBAPP, and AWSALBTG prefixes are reserved and cannot be used. Only needed when type is `appCookie`. +* `enabled` - (Optional) Boolean to enable / disable `stickiness`. Default is `true`. +* `type` - (Required) The type of sticky sessions. The only current possible values are `lbCookie`, `appCookie` for ALBs, `sourceIp` for NLBs, and `sourceIpDestIp`, `sourceIpDestIpProto` for GWLBs. + +### target_failover + +~> **NOTE:** This block is only applicable for a Gateway Load Balancer (GWLB). The two attributes `onDeregistration` and `onUnhealthy` cannot be set independently. The value you set for both attributes must be the same. + +* `onDeregistration` - (Optional) Indicates how the GWLB handles existing flows when a target is deregistered. Possible values are `rebalance` and `noRebalance`. Must match the attribute value set for `onUnhealthy`. Default: `noRebalance`. +* `onUnhealthy` - Indicates how the GWLB handles existing flows when a target is unhealthy. Possible values are `rebalance` and `noRebalance`. Must match the attribute value set for `onDeregistration`. Default: `noRebalance`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arnSuffix` - ARN suffix for use with CloudWatch Metrics. +* `arn` - ARN of the Target Group (matches `id`). +* `id` - ARN of the Target Group (matches `arn`). +* `name` - Name of the Target Group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Target Groups using their ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Target Groups using their ARN. For example: + +```console +% terraform import aws_lb_target_group.app_front_end arn:aws:elasticloadbalancing:us-west-2:187416307283:targetgroup/app-front-end/20cfe21448b66314 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lb_target_group_attachment.html.markdown b/website/docs/cdktf/typescript/r/lb_target_group_attachment.html.markdown new file mode 100644 index 00000000000..5d98be4a839 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lb_target_group_attachment.html.markdown @@ -0,0 +1,191 @@ +--- +subcategory: "ELB (Elastic Load Balancing)" +layout: "aws" +page_title: "AWS: aws_lb_target_group_attachment" +description: |- + Provides the ability to register instances and containers with a LB + target group +--- + + + +# Resource: aws_lb_target_group_attachment + +Provides the ability to register instances and containers with an Application Load Balancer (ALB) or Network Load Balancer (NLB) target group. For attaching resources with Elastic Load Balancer (ELB), see the [`awsElbAttachment` resource](/docs/providers/aws/r/elb_attachment.html). + +~> **Note:** `awsAlbTargetGroupAttachment` is known as `awsLbTargetGroupAttachment`. The functionality is identical. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Instance } from "./.gen/providers/aws/instance"; +import { LbTargetGroup } from "./.gen/providers/aws/lb-target-group"; +import { LbTargetGroupAttachment } from "./.gen/providers/aws/lb-target-group-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new Instance(this, "test", {}); + const awsLbTargetGroupTest = new LbTargetGroup(this, "test_1", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbTargetGroupTest.overrideLogicalId("test"); + const awsLbTargetGroupAttachmentTest = new LbTargetGroupAttachment( + this, + "test_2", + { + port: 80, + targetGroupArn: Token.asString(awsLbTargetGroupTest.arn), + targetId: test.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbTargetGroupAttachmentTest.overrideLogicalId("test"); + } +} + +``` + +### Lambda Target + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LambdaFunction } from "./.gen/providers/aws/lambda-function"; +import { LambdaPermission } from "./.gen/providers/aws/lambda-permission"; +import { LbTargetGroup } from "./.gen/providers/aws/lb-target-group"; +import { LbTargetGroupAttachment } from "./.gen/providers/aws/lb-target-group-attachment"; +interface MyConfig { + functionName: any; + role: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const test = new LambdaFunction(this, "test", { + functionName: config.functionName, + role: config.role, + }); + const awsLbTargetGroupTest = new LbTargetGroup(this, "test_1", { + name: "test", + targetType: "lambda", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbTargetGroupTest.overrideLogicalId("test"); + const withLb = new LambdaPermission(this, "with_lb", { + action: "lambda:InvokeFunction", + functionName: test.functionName, + principal: "elasticloadbalancing.amazonaws.com", + sourceArn: Token.asString(awsLbTargetGroupTest.arn), + statementId: "AllowExecutionFromlb", + }); + const awsLbTargetGroupAttachmentTest = new LbTargetGroupAttachment( + this, + "test_3", + { + dependsOn: [withLb], + targetGroupArn: Token.asString(awsLbTargetGroupTest.arn), + targetId: test.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbTargetGroupAttachmentTest.overrideLogicalId("test"); + } +} + +``` + +### Registering Multiple Targets + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + Token, + TerraformCount, + TerraformIterator, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Instance } from "./.gen/providers/aws/instance"; +import { LbTargetGroup } from "./.gen/providers/aws/lb-target-group"; +import { LbTargetGroupAttachment } from "./.gen/providers/aws/lb-target-group-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleCount = TerraformCount.of(Token.asNumber("3")); + const example = new Instance(this, "example", { + count: exampleCount, + }); + const awsLbTargetGroupExample = new LbTargetGroup(this, "example_1", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbTargetGroupExample.overrideLogicalId("example"); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleForEachIterator = TerraformIterator.fromList( + Token.asAny("${{ for k, v in ${" + example.fqn + "} : v.id => v}}") + ); + const awsLbTargetGroupAttachmentExample = new LbTargetGroupAttachment( + this, + "example_2", + { + port: 80, + targetGroupArn: Token.asString(awsLbTargetGroupExample.arn), + targetId: Token.asString( + propertyAccess(exampleForEachIterator.value, ["id"]) + ), + forEach: exampleForEachIterator, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLbTargetGroupAttachmentExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `targetGroupArn` - (Required) The ARN of the target group with which to register targets. +* `targetId` (Required) The ID of the target. This is the Instance ID for an instance, or the container ID for an ECS container. If the target type is `ip`, specify an IP address. If the target type is `lambda`, specify the Lambda function ARN. If the target type is `alb`, specify the ALB ARN. + +The following arguments are optional: + +* `availabilityZone` - (Optional) The Availability Zone where the IP address of the target is to be registered. If the private IP address is outside of the VPC scope, this value must be set to `all`. +* `port` - (Optional) The port on which targets receive traffic. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A unique identifier for the attachment. + +## Import + +You cannot import Target Group Attachments. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lex_bot.html.markdown b/website/docs/cdktf/typescript/r/lex_bot.html.markdown new file mode 100644 index 00000000000..cf732794a8c --- /dev/null +++ b/website/docs/cdktf/typescript/r/lex_bot.html.markdown @@ -0,0 +1,174 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_bot" +description: |- + Provides an Amazon Lex bot resource. +--- + + + +# Resource: aws_lex_bot + +Provides an Amazon Lex Bot resource. For more information see +[Amazon Lex: How It Works](https://docs.aws.amazon.com/lex/latest/dg/how-it-works.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LexBot } from "./.gen/providers/aws/lex-bot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LexBot(this, "order_flowers_bot", { + abortStatement: { + message: [ + { + content: "Sorry, I am not able to assist at this time", + contentType: "PlainText", + }, + ], + }, + childDirected: false, + clarificationPrompt: { + maxAttempts: 2, + message: [ + { + content: "I didn't understand you, what would you like to do?", + contentType: "PlainText", + }, + ], + }, + createVersion: false, + description: "Bot to order flowers on the behalf of a user", + idleSessionTtlInSeconds: 600, + intent: [ + { + intentName: "OrderFlowers", + intentVersion: "1", + }, + ], + locale: "en-US", + name: "OrderFlowers", + processBehavior: "BUILD", + voiceId: "Salli", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `abortStatement` - (Required) The message that Amazon Lex uses to abort a conversation. Attributes are documented under [statement](#statement). +* `childDirected` - (Required) By specifying true, you confirm that your use of Amazon Lex is related to a website, program, or other application that is directed or targeted, in whole or in part, to children under age 13 and subject to COPPA. For more information see the [Amazon Lex FAQ](https://aws.amazon.com/lex/faqs#data-security) and the [Amazon Lex PutBot API Docs](https://docs.aws.amazon.com/lex/latest/dg/API_PutBot.html#lex-PutBot-request-childDirected). +* `clarificationPrompt` - (Required) The message that Amazon Lex uses when it doesn't understand the user's request. Attributes are documented under [prompt](#prompt). +* `createVersion` - (Optional) Determines if a new bot version is created when the initial resource is created and on each update. Defaults to `false`. +* `description` - (Optional) A description of the bot. Must be less than or equal to 200 characters in length. +* `detectSentiment` - (Optional) When set to true user utterances are sent to Amazon Comprehend for sentiment analysis. If you don't specify detectSentiment, the default is `false`. +* `enableModelImprovements` - (Optional) Set to `true` to enable access to natural language understanding improvements. When you set the `enableModelImprovements` parameter to true you can use the `nluIntentConfidenceThreshold` parameter to configure confidence scores. For more information, see [Confidence Scores](https://docs.aws.amazon.com/lex/latest/dg/confidence-scores.html). You can only set the `enableModelImprovements` parameter in certain Regions. If you set the parameter to true, your bot has access to accuracy improvements. For more information see the [Amazon Lex Bot PutBot API Docs](https://docs.aws.amazon.com/lex/latest/dg/API_PutBot.html#lex-PutBot-request-enableModelImprovements). +* `idleSessionTtlInSeconds` - (Optional) The maximum time in seconds that Amazon Lex retains the data gathered in a conversation. Default is `300`. Must be a number between 60 and 86400 (inclusive). +* `locale` - (Optional) Specifies the target locale for the bot. Any intent used in the bot must be compatible with the locale of the bot. For available locales, see [Amazon Lex Bot PutBot API Docs](https://docs.aws.amazon.com/lex/latest/dg/API_PutBot.html#lex-PutBot-request-locale). Default is `enUs`. +* `intent` - (Required) A set of Intent objects. Each intent represents a command that a user can express. Attributes are documented under [intent](#intent). Can have up to 250 Intent objects. +* `name` - (Required) The name of the bot that you want to create, case sensitive. Must be between 2 and 50 characters in length. +* `nluIntentConfidenceThreshold` - (Optional) Determines the threshold where Amazon Lex will insert the AMAZON.FallbackIntent, AMAZON.KendraSearchIntent, or both when returning alternative intents in a PostContent or PostText response. AMAZON.FallbackIntent and AMAZON.KendraSearchIntent are only inserted if they are configured for the bot. For more information see [Amazon Lex Bot PutBot API Docs](https://docs.aws.amazon.com/lex/latest/dg/API_PutBot.html#lex-PutBot-request-nluIntentConfidenceThreshold) This value requires `enableModelImprovements` to be set to `true` and the default is `0`. Must be a float between 0 and 1. +* `processBehavior` - (Optional) If you set the `processBehavior` element to `build`, Amazon Lex builds the bot so that it can be run. If you set the element to `save` Amazon Lex saves the bot, but doesn't build it. Default is `save`. +* `voiceId` - (Optional) The Amazon Polly voice ID that you want Amazon Lex to use for voice interactions with the user. The locale configured for the voice must match the locale of the bot. For more information, see [Available Voices](http://docs.aws.amazon.com/polly/latest/dg/voicelist.html) in the Amazon Polly Developer Guide. + +### intent + +Identifies the specific version of an intent. + +* `intentName` - (Required) The name of the intent. Must be less than or equal to 100 characters in length. +* `intentVersion` - (Required) The version of the intent. Must be less than or equal to 64 characters in length. + +### message + +The message object that provides the message text and its type. + +* `content` - (Required) The text of the message. +* `contentType` - (Required) The content type of the message string. +* `groupNumber` - (Optional) Identifies the message group that the message belongs to. When a group +is assigned to a message, Amazon Lex returns one message from each group in the response. + +### prompt + +Obtains information from the user. To define a prompt, provide one or more messages and specify the +number of attempts to get information from the user. If you provide more than one message, Amazon +Lex chooses one of the messages to use to prompt the user. + +* `maxAttempts` - (Required) The number of times to prompt the user for information. +* `message` - (Required) A set of messages, each of which provides a message string and its type. +You can specify the message string in plain text or in Speech Synthesis Markup Language (SSML). +Attributes are documented under [message](#message). +* `responseCard` - (Optional) The response card. Amazon Lex will substitute session attributes and +slot values into the response card. For more information, see +[Example: Using a Response Card](https://docs.aws.amazon.com/lex/latest/dg/ex-resp-card.html). + +### statement + +A statement is a map with a set of message maps and an optional response card string. Messages +convey information to the user. At runtime, Amazon Lex selects the message to convey. + +* `message` - (Required) A set of messages, each of which provides a message string and its type. You +can specify the message string in plain text or in Speech Synthesis Markup Language (SSML). Attributes +are documented under [message](#message). +* `responseCard` - (Optional) The response card. Amazon Lex will substitute session attributes and +slot values into the response card. For more information, see +[Example: Using a Response Card](https://docs.aws.amazon.com/lex/latest/dg/ex-resp-card.html). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `checksum` - Checksum identifying the version of the bot that was created. The checksum is not +included as an argument because the resource will add it automatically when updating the bot. +* `createdDate` - The date when the bot version was created. +* `failureReason` - If status is FAILED, Amazon Lex provides the reason that it failed to build the bot. +* `lastUpdatedDate` - The date when the $LATEST version of this bot was updated. +* `status` - When you send a request to create or update a bot, Amazon Lex sets the status response +element to BUILDING. After Amazon Lex builds the bot, it sets status to READY. If Amazon Lex can't +build the bot, it sets status to FAILED. Amazon Lex returns the reason for the failure in the +failure_reason response element. +* `version` - The version of the bot. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import bots using their name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import bots using their name. For example: + +```console +% terraform import aws_lex_bot.order_flowers_bot OrderFlowers +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lex_bot_alias.html.markdown b/website/docs/cdktf/typescript/r/lex_bot_alias.html.markdown new file mode 100644 index 00000000000..570bc457878 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lex_bot_alias.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_bot_alias" +description: |- + Provides an Amazon Lex Bot Alias resource. +--- + + + +# Resource: aws_lex_bot_alias + +Provides an Amazon Lex Bot Alias resource. For more information see +[Amazon Lex: How It Works](https://docs.aws.amazon.com/lex/latest/dg/how-it-works.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LexBotAlias } from "./.gen/providers/aws/lex-bot-alias"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LexBotAlias(this, "order_flowers_prod", { + botName: "OrderFlowers", + botVersion: "1", + description: "Production Version of the OrderFlowers Bot.", + name: "OrderFlowersProd", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `botName` - (Required) The name of the bot. +* `botVersion` - (Required) The name of the bot. +* `conversationLogs` - (Optional) The settings that determine how Amazon Lex uses conversation logs for the alias. Attributes are documented under [conversation_logs](#conversation_logs). +* `description` - (Optional) A description of the alias. Must be less than or equal to 200 characters in length. +* `name` - (Required) The name of the alias. The name is not case sensitive. Must be less than or equal to 100 characters in length. + +### conversation_logs + +Contains information about conversation log settings. + +* `iamRoleArn` - (Required) The Amazon Resource Name (ARN) of the IAM role used to write your logs to CloudWatch Logs or an S3 bucket. Must be between 20 and 2048 characters in length. +* `logSettings` - (Optional) The settings for your conversation logs. You can log text, audio, or both. Attributes are documented under [log_settings](#log_settings). + +### log_settings + +The settings for conversation logs. + +* `destination` - (Required) The destination where logs are delivered. Options are `cloudwatchLogs` or `s3`. +* `kmsKeyArn` - (Optional) The Amazon Resource Name (ARN) of the key used to encrypt audio logs in an S3 bucket. This can only be specified when `destination` is set to `s3`. Must be between 20 and 2048 characters in length. +* `logType` - (Required) The type of logging that is enabled. Options are `audio` or `text`. +* `resourceArn` - (Required) The Amazon Resource Name (ARN) of the CloudWatch Logs log group or S3 bucket where the logs are delivered. Must be less than or equal to 2048 characters in length. +* `resourcePrefix` - (Computed) The prefix of the S3 object key for `audio` logs or the log stream name for `text` logs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the bot alias. +* `checksum` - Checksum of the bot alias. +* `createdDate` - The date that the bot alias was created. +* `lastUpdatedDate` - The date that the bot alias was updated. When you create a resource, the creation date and the last updated date are the same. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1M`) +* `update` - (Default `1M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import bot aliases using an ID with the format `botName:botAliasName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import bot aliases using an ID with the format `botName:botAliasName`. For example: + +```console +% terraform import aws_lex_bot_alias.order_flowers_prod OrderFlowers:OrderFlowersProd +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lex_intent.html.markdown b/website/docs/cdktf/typescript/r/lex_intent.html.markdown new file mode 100644 index 00000000000..39af47fb920 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lex_intent.html.markdown @@ -0,0 +1,289 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_intent" +description: |- + Provides an Amazon Lex intent resource. +--- + + + +# Resource: aws_lex_intent + +Provides an Amazon Lex Intent resource. For more information see +[Amazon Lex: How It Works](https://docs.aws.amazon.com/lex/latest/dg/how-it-works.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LexIntent } from "./.gen/providers/aws/lex-intent"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LexIntent(this, "order_flowers_intent", { + confirmationPrompt: { + maxAttempts: 2, + message: [ + { + content: + "Okay, your {FlowerType} will be ready for pickup by {PickupTime} on {PickupDate}. Does this sound okay?", + contentType: "PlainText", + }, + ], + }, + createVersion: false, + description: "Intent to order a bouquet of flowers for pick up", + fulfillmentActivity: { + type: "ReturnIntent", + }, + name: "OrderFlowers", + rejectionStatement: { + message: [ + { + content: "Okay, I will not place your order.", + contentType: "PlainText", + }, + ], + }, + sampleUtterances: [ + "I would like to order some flowers", + "I would like to pick up flowers", + ], + slot: [ + { + description: "The type of flowers to pick up", + name: "FlowerType", + priority: 1, + sampleUtterances: ["I would like to order {FlowerType}"], + slotConstraint: "Required", + slotType: "FlowerTypes", + slotTypeVersion: "$$LATEST", + valueElicitationPrompt: { + maxAttempts: 2, + message: [ + { + content: "What type of flowers would you like to order?", + contentType: "PlainText", + }, + ], + }, + }, + { + description: "The date to pick up the flowers", + name: "PickupDate", + priority: 2, + sampleUtterances: ["I would like to order {FlowerType}"], + slotConstraint: "Required", + slotType: "AMAZON.DATE", + slotTypeVersion: "$$LATEST", + valueElicitationPrompt: { + maxAttempts: 2, + message: [ + { + content: + "What day do you want the {FlowerType} to be picked up?", + contentType: "PlainText", + }, + ], + }, + }, + { + description: "The time to pick up the flowers", + name: "PickupTime", + priority: 3, + sampleUtterances: ["I would like to order {FlowerType}"], + slotConstraint: "Required", + slotType: "AMAZON.TIME", + slotTypeVersion: "$$LATEST", + valueElicitationPrompt: { + maxAttempts: 2, + message: [ + { + content: + "Pick up the {FlowerType} at what time on {PickupDate}?", + contentType: "PlainText", + }, + ], + }, + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `conclusionStatement` - (Optional) The statement that you want Amazon Lex to convey to the user +after the intent is successfully fulfilled by the Lambda function. This element is relevant only if +you provide a Lambda function in the `fulfillmentActivity`. If you return the intent to the client +application, you can't specify this element. The `followUpPrompt` and `conclusionStatement` are +mutually exclusive. You can specify only one. Attributes are documented under [statement](#statement). +* `confirmationPrompt` - (Optional) Prompts the user to confirm the intent. This question should +have a yes or no answer. You you must provide both the `rejectionStatement` and `confirmationPrompt`, +or neither. Attributes are documented under [prompt](#prompt). +* `createVersion` - (Optional) Determines if a new slot type version is created when the initial +resource is created and on each update. Defaults to `false`. +* `description` - (Optional) A description of the intent. Must be less than or equal to 200 characters in length. +* `dialogCodeHook` - (Optional) Specifies a Lambda function to invoke for each user input. You can +invoke this Lambda function to personalize user interaction. Attributes are documented under [code_hook](#code_hook). +* `followUpPrompt` - (Optional) Amazon Lex uses this prompt to solicit additional activity after +fulfilling an intent. For example, after the OrderPizza intent is fulfilled, you might prompt the +user to order a drink. The `followUpPrompt` field and the `conclusionStatement` field are mutually +exclusive. You can specify only one. Attributes are documented under [follow_up_prompt](#follow_up_prompt). +* `fulfillmentActivity` - (Required) Describes how the intent is fulfilled. For example, after a +user provides all of the information for a pizza order, `fulfillmentActivity` defines how the bot +places an order with a local pizza store. Attributes are documented under [fulfillment_activity](#fulfillment_activity). +* `name` - (Required) The name of the intent, not case sensitive. Must be less than or equal to 100 characters in length. +* `parentIntentSignature` - (Optional) A unique identifier for the built-in intent to base this +intent on. To find the signature for an intent, see +[Standard Built-in Intents](https://developer.amazon.com/public/solutions/alexa/alexa-skills-kit/docs/built-in-intent-ref/standard-intents) +in the Alexa Skills Kit. +* `rejectionStatement` - (Optional) When the user answers "no" to the question defined in +`confirmationPrompt`, Amazon Lex responds with this statement to acknowledge that the intent was +canceled. You must provide both the `rejectionStatement` and the `confirmationPrompt`, or neither. +Attributes are documented under [statement](#statement). +* `sampleUtterances` - (Optional) An array of utterances (strings) that a user might say to signal +the intent. For example, "I want {PizzaSize} pizza", "Order {Quantity} {PizzaSize} pizzas". +In each utterance, a slot name is enclosed in curly braces. Must have between 1 and 10 items in the list, and each item must be less than or equal to 200 characters in length. +* `slot` - (Optional) An list of intent slots. At runtime, Amazon Lex elicits required slot values +from the user using prompts defined in the slots. Attributes are documented under [slot](#slot). + +### code_hook + +Specifies a Lambda function that verifies requests to a bot or fulfills the user's request to a bot. + +* `messageVersion` - (Required) The version of the request-response that you want Amazon Lex to use +to invoke your Lambda function. For more information, see +[Using Lambda Functions](https://docs.aws.amazon.com/lex/latest/dg/using-lambda.html). Must be less than or equal to 5 characters in length. +* `uri` - (Required) The Amazon Resource Name (ARN) of the Lambda function. + +### follow_up_prompt + +A prompt for additional activity after an intent is fulfilled. For example, after the OrderPizza +intent is fulfilled, you might prompt the user to find out whether the user wants to order drinks. + +* `prompt` - (Required) Prompts for information from the user. Attributes are documented under [prompt](#prompt). +* `rejectionStatement` - (Optional) If the user answers "no" to the question defined in the prompt field, +Amazon Lex responds with this statement to acknowledge that the intent was canceled. Attributes are +documented below under [statement](#statement). + +### fulfillment_activity + +Describes how the intent is fulfilled after the user provides all of the information required for the intent. + +* `type` - (Required) How the intent should be fulfilled, either by running a Lambda function or by +returning the slot data to the client application. Type can be either `returnIntent` or `codeHook`, as documented [here](https://docs.aws.amazon.com/lex/latest/dg/API_FulfillmentActivity.html). +* `codeHook` - (Optional) A description of the Lambda function that is run to fulfill the intent. +Required if type is CodeHook. Attributes are documented under [code_hook](#code_hook). + +### message + +The message object that provides the message text and its type. + +* `content` - (Required) The text of the message. Must be less than or equal to 1000 characters in length. +* `contentType` - (Required) The content type of the message string. +* `groupNumber` - (Optional) Identifies the message group that the message belongs to. When a group +is assigned to a message, Amazon Lex returns one message from each group in the response. Must be a number between 1 and 5 (inclusive). + +### prompt + +Obtains information from the user. To define a prompt, provide one or more messages and specify the +number of attempts to get information from the user. If you provide more than one message, Amazon +Lex chooses one of the messages to use to prompt the user. + +* `maxAttempts` - (Required) The number of times to prompt the user for information. Must be a number between 1 and 5 (inclusive). +* `message` - (Required) A set of messages, each of which provides a message string and its type. +You can specify the message string in plain text or in Speech Synthesis Markup Language (SSML). +Attributes are documented under [message](#message). Must contain between 1 and 15 messages. +* `responseCard` - (Optional) The response card. Amazon Lex will substitute session attributes and +slot values into the response card. For more information, see +[Example: Using a Response Card](https://docs.aws.amazon.com/lex/latest/dg/ex-resp-card.html). Must be less than or equal to 50000 characters in length. + +### slot + +Identifies the version of a specific slot. + +* `name` - (Required) The name of the intent slot that you want to create. The name is case sensitive. Must be less than or equal to 100 characters in length. +* `slotConstraint` - (Required) Specifies whether the slot is required or optional. +* `description` - (Optional) A description of the bot. Must be less than or equal to 200 characters in length. +* `priority` - (Optional) Directs Lex the order in which to elicit this slot value from the user. +For example, if the intent has two slots with priorities 1 and 2, AWS Lex first elicits a value for +the slot with priority 1. If multiple slots share the same priority, the order in which Lex elicits +values is arbitrary. Must be between 1 and 100. +* `responseCard` - (Optional) The response card. Amazon Lex will substitute session attributes and +slot values into the response card. For more information, see +[Example: Using a Response Card](https://docs.aws.amazon.com/lex/latest/dg/ex-resp-card.html). Must be less than or equal to 50000 characters in length. +* `sampleUtterances` - (Optional) If you know a specific pattern with which users might respond to +an Amazon Lex request for a slot value, you can provide those utterances to improve accuracy. This +is optional. In most cases, Amazon Lex is capable of understanding user utterances. Must have between 1 and 10 items in the list, and each item must be less than or equal to 200 characters in length. +* `slotType` - (Optional) The type of the slot, either a custom slot type that you defined or one of +the built-in slot types. Must be less than or equal to 100 characters in length. +* `slotTypeVersion` - (Optional) The version of the slot type. Must be less than or equal to 64 characters in length. +* `valueElicitationPrompt` - (Optional) The prompt that Amazon Lex uses to elicit the slot value +from the user. Attributes are documented under [prompt](#prompt). + +### statement + +A statement is a map with a set of message maps and an optional response card string. Messages +convey information to the user. At runtime, Amazon Lex selects the message to convey. + +* `message` - (Required) A set of messages, each of which provides a message string and its type. +You can specify the message string in plain text or in Speech Synthesis Markup Language (SSML). +Attributes are documented under [message](#message). Must contain between 1 and 15 messages. +* `responseCard` - (Optional) The response card. Amazon Lex will substitute session attributes and +slot values into the response card. For more information, see +[Example: Using a Response Card](https://docs.aws.amazon.com/lex/latest/dg/ex-resp-card.html). Must be less than or equal to 50000 characters in length. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the Lex intent. +* `checksum` - Checksum identifying the version of the intent that was created. The checksum is not +included as an argument because the resource will add it automatically when updating the intent. +* `createdDate` - The date when the intent version was created. +* `lastUpdatedDate` - The date when the $LATEST version of this intent was updated. +* `version` - The version of the bot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1M`) +* `update` - (Default `1M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import intents using their name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import intents using their name. For example: + +```console +% terraform import aws_lex_intent.order_flowers_intent OrderFlowers +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lex_slot_type.html.markdown b/website/docs/cdktf/typescript/r/lex_slot_type.html.markdown new file mode 100644 index 00000000000..65b2e9e8fce --- /dev/null +++ b/website/docs/cdktf/typescript/r/lex_slot_type.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "Lex Model Building" +layout: "aws" +page_title: "AWS: aws_lex_slot_type" +description: |- + Provides details about a specific Amazon Lex Slot Type +--- + + + +# Resource: aws_lex_slot_type + +Provides an Amazon Lex Slot Type resource. For more information see +[Amazon Lex: How It Works](https://docs.aws.amazon.com/lex/latest/dg/how-it-works.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LexSlotType } from "./.gen/providers/aws/lex-slot-type"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LexSlotType(this, "flower_types", { + createVersion: true, + description: "Types of flowers to order", + enumerationValue: [ + { + synonyms: ["Lirium", "Martagon"], + value: "lilies", + }, + { + synonyms: ["Eduardoregelia", "Podonix"], + value: "tulips", + }, + ], + name: "FlowerTypes", + valueSelectionStrategy: "ORIGINAL_VALUE", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `enumerationValue` - (Required) A list of EnumerationValue objects that defines the values that +the slot type can take. Each value can have a list of synonyms, which are additional values that help +train the machine learning model about the values that it resolves for a slot. Attributes are +documented under [enumeration_value](#enumeration_value). +* `name` - (Required) The name of the slot type. The name is not case sensitive. Must be less than or equal to 100 characters in length. +* `createVersion` - (Optional) +Determines if a new slot type version is created when the initial resource is created and on each +update. Defaults to `false`. +* `description` - (Optional) A description of the slot type. Must be less than or equal to 200 characters in length. +* `valueSelectionStrategy` - (Optional) Determines the slot resolution strategy that Amazon Lex +uses to return slot type values. `originalValue` returns the value entered by the user if the user +value is similar to the slot value. `topResolution` returns the first value in the resolution list +if there is a resolution list for the slot, otherwise null is returned. Defaults to `originalValue`. + +### enumeration_value + +Each slot type can have a set of values. Each enumeration value represents a value the slot type +can take. + +For example, a pizza ordering bot could have a slot type that specifies the type of crust that the +pizza should have. The slot type could include the values: thick, thin, stuffed. + +* `synonyms` - (Optional) Additional values related to the slot type value. Each item must be less than or equal to 140 characters in length. +* `value` - (Required) The value of the slot type. Must be less than or equal to 140 characters in length. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1M`) +* `update` - (Default `1M`) +* `delete` - (Default `5M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `checksum` - Checksum identifying the version of the slot type that was created. The checksum is +not included as an argument because the resource will add it automatically when updating the slot type. +* `createdDate` - The date when the slot type version was created. +* `lastUpdatedDate` - The date when the `$latest` version of this slot type was updated. +* `version` - The version of the slot type. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import slot types using their name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import slot types using their name. For example: + +```console +% terraform import aws_lex_slot_type.flower_types FlowerTypes +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/licensemanager_association.markdown b/website/docs/cdktf/typescript/r/licensemanager_association.markdown new file mode 100644 index 00000000000..172204d1024 --- /dev/null +++ b/website/docs/cdktf/typescript/r/licensemanager_association.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_association" +description: |- + Provides a License Manager association resource. +--- + + + +# Resource: aws_licensemanager_association + +Provides a License Manager association. + +~> **Note:** License configurations can also be associated with launch templates by specifying the `licenseSpecifications` block for an `awsLaunchTemplate`. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAmi } from "./.gen/providers/aws/data-aws-ami"; +import { Instance } from "./.gen/providers/aws/instance"; +import { LicensemanagerAssociation } from "./.gen/providers/aws/licensemanager-association"; +import { LicensemanagerLicenseConfiguration } from "./.gen/providers/aws/licensemanager-license-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new LicensemanagerLicenseConfiguration(this, "example", { + licenseCountingType: "Instance", + name: "Example", + }); + const dataAwsAmiExample = new DataAwsAmi(this, "example_1", { + filter: [ + { + name: "name", + values: ["amzn-ami-vpc-nat*"], + }, + ], + mostRecent: true, + owners: ["amazon"], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsAmiExample.overrideLogicalId("example"); + const awsInstanceExample = new Instance(this, "example_2", { + ami: Token.asString(dataAwsAmiExample.id), + instanceType: "t2.micro", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsInstanceExample.overrideLogicalId("example"); + const awsLicensemanagerAssociationExample = new LicensemanagerAssociation( + this, + "example_3", + { + licenseConfigurationArn: example.arn, + resourceArn: Token.asString(awsInstanceExample.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLicensemanagerAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `licenseConfigurationArn` - (Required) ARN of the license configuration. +* `resourceArn` - (Required) ARN of the resource associated with the license configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The license configuration ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import license configurations using `resourceArn,licenseConfigurationArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import license configurations using `resourceArn,licenseConfigurationArn`. For example: + +```console +% terraform import aws_licensemanager_association.example arn:aws:ec2:eu-west-1:123456789012:image/ami-123456789abcdef01,arn:aws:license-manager:eu-west-1:123456789012:license-configuration:lic-0123456789abcdef0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/licensemanager_grant.html.markdown b/website/docs/cdktf/typescript/r/licensemanager_grant.html.markdown new file mode 100644 index 00000000000..f8adf9c3d07 --- /dev/null +++ b/website/docs/cdktf/typescript/r/licensemanager_grant.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_grant" +description: |- + Provides a License Manager grant resource. +--- + + + +# Resource: aws_licensemanager_grant + +Provides a License Manager grant. This allows for sharing licenses with other AWS accounts. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LicensemanagerGrant } from "./.gen/providers/aws/licensemanager-grant"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LicensemanagerGrant(this, "test", { + allowedOperations: [ + "ListPurchasedLicenses", + "CheckoutLicense", + "CheckInLicense", + "ExtendConsumptionLicense", + "CreateToken", + ], + homeRegion: "us-east-1", + licenseArn: "arn:aws:license-manager::111111111111:license:l-exampleARN", + name: "share-license-with-account", + principal: "arn:aws:iam::111111111112:root", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The Name of the grant. +* `allowedOperations` - (Required) A list of the allowed operations for the grant. This is a subset of the allowed operations on the license. +* `licenseArn` - (Required) The ARN of the license to grant. +* `principal` - (Required) The target account for the grant in the form of the ARN for an account principal of the root user. +* `homeRegion` - (Required) The home region for the license. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The grant ARN (Same as `arn`). +* `arn` - The grant ARN. +* `parentArn` - The parent ARN. +* `status` - The grant status. +* `version` - The grant version. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLicensemanagerGrant` using the grant arn. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLicensemanagerGrant` using the grant arn. For example: + +```console +% terraform import aws_licensemanager_grant.test arn:aws:license-manager::123456789011:grant:g-01d313393d9e443d8664cc054db1e089 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/licensemanager_grant_accepter.html.markdown b/website/docs/cdktf/typescript/r/licensemanager_grant_accepter.html.markdown new file mode 100644 index 00000000000..79078c10f7d --- /dev/null +++ b/website/docs/cdktf/typescript/r/licensemanager_grant_accepter.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_grant_accepter" +description: |- + Accepts a License Manager grant resource. +--- + + + +# Resource: aws_licensemanager_grant_accepter + +Accepts a License Manager grant. This allows for sharing licenses with other aws accounts. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LicensemanagerGrantAccepter } from "./.gen/providers/aws/licensemanager-grant-accepter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LicensemanagerGrantAccepter(this, "test", { + grantArn: + "arn:aws:license-manager::123456789012:grant:g-1cf9fba4ba2f42dcab11c686c4b4d329", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `grantArn` - (Required) The ARN of the grant to accept. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The grant ARN (Same as `arn`). +* `arn` - The grant ARN. +* `name` - The Name of the grant. +* `allowedOperations` - A list of the allowed operations for the grant. +* `licenseArn` - The ARN of the license for the grant. +* `principal` - The target account for the grant. +* `homeRegion` - The home region for the license. +* `parentArn` - The parent ARN. +* `status` - The grant status. +* `version` - The grant version. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLicensemanagerGrantAccepter` using the grant arn. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLicensemanagerGrantAccepter` using the grant arn. For example: + +```console +% terraform import aws_licensemanager_grant_accepter.test arn:aws:license-manager::123456789012:grant:g-1cf9fba4ba2f42dcab11c686c4b4d329 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/licensemanager_license_configuration.markdown b/website/docs/cdktf/typescript/r/licensemanager_license_configuration.markdown new file mode 100644 index 00000000000..39a1f9e661e --- /dev/null +++ b/website/docs/cdktf/typescript/r/licensemanager_license_configuration.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "License Manager" +layout: "aws" +page_title: "AWS: aws_licensemanager_license_configuration" +description: |- + Provides a License Manager license configuration resource. +--- + + + +# Resource: aws_licensemanager_license_configuration + +Provides a License Manager license configuration resource. + +~> **Note:** Removing the `licenseCount` attribute is not supported by the License Manager API - use `terraform taint aws_licensemanager_license_configuration.` to recreate the resource instead. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LicensemanagerLicenseConfiguration } from "./.gen/providers/aws/licensemanager-license-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LicensemanagerLicenseConfiguration(this, "example", { + description: "Example", + licenseCount: 10, + licenseCountHardLimit: true, + licenseCountingType: "Socket", + licenseRules: ["#minimumSockets=2"], + name: "Example", + tags: { + foo: "barr", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name of the license configuration. +* `description` - (Optional) Description of the license configuration. +* `licenseCount` - (Optional) Number of licenses managed by the license configuration. +* `licenseCountHardLimit` - (Optional) Sets the number of available licenses as a hard limit. +* `licenseCountingType` - (Required) Dimension to use to track license inventory. Specify either `vCpu`, `instance`, `core` or `socket`. +* `licenseRules` - (Optional) Array of configured License Manager rules. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Rules + +License rules should be in the format of `#ruleType=ruleValue`. Supported rule types: + +* `minimumVcpus` - Resource must have minimum vCPU count in order to use the license. Default: 1 +* `maximumVcpus` - Resource must have maximum vCPU count in order to use the license. Default: unbounded, limit: 10000 +* `minimumCores` - Resource must have minimum core count in order to use the license. Default: 1 +* `maximumCores` - Resource must have maximum core count in order to use the license. Default: unbounded, limit: 10000 +* `minimumSockets` - Resource must have minimum socket count in order to use the license. Default: 1 +* `maximumSockets` - Resource must have maximum socket count in order to use the license. Default: unbounded, limit: 10000 +* `allowedTenancy` - Defines where the license can be used. If set, restricts license usage to selected tenancies. Specify a comma delimited list of `ec2Default`, `ec2DedicatedHost`, `ec2DedicatedInstance` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The license configuration ARN. +* `id` - The license configuration ARN. +* `ownerAccountId` - Account ID of the owner of the license configuration. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import license configurations using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import license configurations using the `id`. For example: + +```console +% terraform import aws_licensemanager_license_configuration.example arn:aws:license-manager:eu-west-1:123456789012:license-configuration:lic-0123456789abcdef0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_bucket.html.markdown b/website/docs/cdktf/typescript/r/lightsail_bucket.html.markdown new file mode 100644 index 00000000000..af720bac6d8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_bucket.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_bucket" +description: |- + Provides a lightsail bucket +--- + + + +# Resource: aws_lightsail_bucket + +Provides a lightsail bucket. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailBucket } from "./.gen/providers/aws/lightsail-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailBucket(this, "test", { + bundleId: "small_1_0", + name: "mytestbucket", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name for the bucket. +* `bundleId` - (Required) - The ID of the bundle to use for the bucket. A bucket bundle specifies the monthly cost, storage space, and data transfer quota for a bucket. Use the [get-bucket-bundles](https://docs.aws.amazon.com/cli/latest/reference/lightsail/get-bucket-bundles.html) cli command to get a list of bundle IDs that you can specify. +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this bucket (matches `name`). +* `arn` - The ARN of the lightsail bucket. +* `availabilityZone` - The resource Availability Zone. Follows the format us-east-2a (case-sensitive). +* `createdAt` - The timestamp when the bucket was created. +* `region` - The Amazon Web Services Region name. +* `supportCode` - The support code for the resource. Include this code in your email to support when you have questions about a resource in Lightsail. This code enables our support team to look up your Lightsail information more easily. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider `defaultTags` configuration block. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailBucket` using the `name` attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailBucket` using the `name` attribute. For example: + +```console +% terraform import aws_lightsail_bucket.test example-bucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_bucket_access_key.html.markdown b/website/docs/cdktf/typescript/r/lightsail_bucket_access_key.html.markdown new file mode 100644 index 00000000000..5b18322f4b5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_bucket_access_key.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_bucket_access_key" +description: |- + Provides a lightsail bucket access key. This is a set of credentials that allow API requests to be made to the lightsail bucket. +--- + + + +# Resource: aws_lightsail_bucket_access_key + +Provides a lightsail bucket access key. This is a set of credentials that allow API requests to be made to the lightsail bucket. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailBucketAccessKeyAccessKey } from "./.gen/providers/aws/"; +import { LightsailBucket } from "./.gen/providers/aws/lightsail-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailBucket(this, "test", { + bundleId: "small_1_0", + name: "mytestbucket", + }); + const awsLightsailBucketAccessKeyAccessKeyTest = + new LightsailBucketAccessKeyAccessKey(this, "test_1", { + bucket_name: awsLightsailBucketAccessKeyTest.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailBucketAccessKeyAccessKeyTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucketName` - (Required) The name of the bucket that the new access key will belong to, and grant access to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes separated by a `,` to create a unique id: `bucketName`,`accessKeyId` +* `accessKeyId` - The ID of the access key. +* `createdAt` - The timestamp when the access key was created. +* `secretAccessKey` - The secret access key used to sign requests. This attribute is not available for imported resources. Note that this will be written to the state file. +* `status` - The status of the access key. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailBucketAccessKey` using the `id` attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailBucketAccessKey` using the `id` attribute. For example: + +```console +% terraform import aws_lightsail_bucket_access_key.test example-bucket,AKIA47VOQ2KPR7LLRZ6D +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_bucket_resource_access.html.markdown b/website/docs/cdktf/typescript/r/lightsail_bucket_resource_access.html.markdown new file mode 100644 index 00000000000..fe2ff92dd8c --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_bucket_resource_access.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_bucket_resource_access" +description: |- + Provides a lightsail resource access to a bucket. +--- + + + +# Resource: aws_lightsail_bucket_resource_access + +Provides a lightsail resource access to a bucket. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailBucket } from "./.gen/providers/aws/lightsail-bucket"; +import { LightsailBucketResourceAccess } from "./.gen/providers/aws/lightsail-bucket-resource-access"; +import { LightsailInstance } from "./.gen/providers/aws/lightsail-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailBucket(this, "test", { + bundleId: "small_1_0", + name: "mytestbucket", + }); + const awsLightsailBucketResourceAccessTest = + new LightsailBucketResourceAccess(this, "test_1", { + bucketName: Token.asString(awsLightsailBucketResourceAccessTest.id), + resourceName: id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailBucketResourceAccessTest.overrideLogicalId("test"); + const awsLightsailInstanceTest = new LightsailInstance(this, "test_2", { + availabilityZone: "us-east-1b", + blueprintId: "amazon_linux_2", + bundleId: "nano_1_0", + name: "mytestinstance", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailInstanceTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucketName` - (Required) The name of the bucket to grant access to. +* `resourceName` - (Required) The name of the resource to be granted bucket access. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes separated by a `,` to create a unique id: `bucketName`,`resourceName` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailBucketResourceAccess` using the `id` attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailBucketResourceAccess` using the `id` attribute. For example: + +```console +% terraform import aws_lightsail_bucket_resource_access.test example-bucket,example-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_certificate.html.markdown b/website/docs/cdktf/typescript/r/lightsail_certificate.html.markdown new file mode 100644 index 00000000000..0b4e521fcff --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_certificate.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_certificate" +description: |- + Provides a lightsail certificate +--- + + + +# Resource: aws_lightsail_certificate + +Provides a lightsail certificate. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailCertificate } from "./.gen/providers/aws/lightsail-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailCertificate(this, "test", { + domainName: "testdomain.com", + name: "test", + subjectAlternativeNames: ["www.testdomain.com"], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Lightsail load balancer. +* `domainName` - (Required) A domain name for which the certificate should be issued. +* `subjectAlternativeNames` - (Optional) Set of domains that should be SANs in the issued certificate. `domainName` attribute is automatically added as a Subject Alternative Name. +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the lightsail certificate (matches `name`). +* `arn` - The ARN of the lightsail certificate. +* `createdAt` - The timestamp when the instance was created. +* `domainValidationOptions` - Set of domain validation objects which can be used to complete certificate validation. Can have more than one element, e.g., if SANs are defined. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider `defaultTags` configuration block. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailCertificate` using the certificate name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailCertificate` using the certificate name. For example: + +```console +% terraform import aws_lightsail_certificate.test CertificateName +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_container_service.html.markdown b/website/docs/cdktf/typescript/r/lightsail_container_service.html.markdown new file mode 100644 index 00000000000..636fe6d9ee1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_container_service.html.markdown @@ -0,0 +1,253 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_container_service" +description: |- + Provides a resource to manage Lightsail container service +--- + + + +# Resource: aws_lightsail_container_service + +An Amazon Lightsail container service is a highly scalable compute and networking resource on which you can deploy, run, +and manage containers. For more information, see +[Container services in Amazon Lightsail](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-container-services). + +~> **Note:** For more information about the AWS Regions in which you can create Amazon Lightsail container services, +see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail). + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailContainerService } from "./.gen/providers/aws/lightsail-container-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailContainerService(this, "my_container_service", { + isDisabled: false, + name: "container-service-1", + power: "nano", + scale: 1, + tags: { + foo1: "bar1", + foo2: "", + }, + }); + } +} + +``` + +### Public Domain Names + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailContainerService } from "./.gen/providers/aws/lightsail-container-service"; +interface MyConfig { + name: any; + power: any; + scale: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new LightsailContainerService(this, "my_container_service", { + publicDomainNames: { + certificate: [ + { + certificateName: "example-certificate", + domainNames: ["www.example.com"], + }, + ], + }, + name: config.name, + power: config.power, + scale: config.scale, + }); + } +} + +``` + +### Private Registry Access + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { EcrRepositoryPolicy } from "./.gen/providers/aws/ecr-repository-policy"; +import { LightsailContainerService } from "./.gen/providers/aws/lightsail-container-service"; +interface MyConfig { + name: any; + power: any; + scale: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const defaultVar = new LightsailContainerService(this, "default", { + privateRegistryAccess: { + ecrImagePullerRole: { + isActive: true, + }, + }, + name: config.name, + power: config.power, + scale: config.scale, + }); + const dataAwsIamPolicyDocumentDefault = new DataAwsIamPolicyDocument( + this, + "default_1", + { + statement: [ + { + actions: ["ecr:BatchGetImage", "ecr:GetDownloadUrlForLayer"], + effect: "Allow", + principals: [ + { + identifiers: [ + Token.asString( + propertyAccess(defaultVar.privateRegistryAccess, [ + "0", + "ecr_image_puller_role", + "0", + "principal_arn", + ]) + ), + ], + type: "AWS", + }, + ], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentDefault.overrideLogicalId("default"); + const awsEcrRepositoryPolicyDefault = new EcrRepositoryPolicy( + this, + "default_2", + { + policy: Token.asString(dataAwsIamPolicyDocumentDefault.json), + repository: Token.asString(awsEcrRepositoryDefault.name), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEcrRepositoryPolicyDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +~> **NOTE:** You must create and validate an SSL/TLS certificate before you can use `publicDomainNames` with your +container service. For more information, see +[Enabling and managing custom domains for your Amazon Lightsail container services](https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-creating-container-services-certificates). + +This argument supports the following arguments: + +* `name` - (Required) The name for the container service. Names must be of length 1 to 63, and be + unique within each AWS Region in your Lightsail account. +* `power` - (Required) The power specification for the container service. The power specifies the amount of memory, + the number of vCPUs, and the monthly price of each node of the container service. + Possible values: `nano`, `micro`, `small`, `medium`, `large`, `xlarge`. +* `scale` - (Required) The scale specification for the container service. The scale specifies the allocated compute + nodes of the container service. +* `isDisabled` - (Optional) A Boolean value indicating whether the container service is disabled. Defaults to `false`. +* `publicDomainNames` - (Optional) The public domain names to use with the container service, such as example.com + and www.example.com. You can specify up to four public domain names for a container service. The domain names that you + specify are used when you create a deployment with a container configured as the public endpoint of your container + service. If you don't specify public domain names, then you can use the default domain of the container service. + Defined below. +* `privateRegistryAccess` - (Optional) An object to describe the configuration for the container service to access private container image repositories, such as Amazon Elastic Container Registry (Amazon ECR) private repositories. See [Private Registry Access](#private-registry-access) below for more details. +* `tags` - (Optional) Map of container service tags. To tag at launch, specify the tags in the Launch Template. If + configured with a provider + [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) + present, tags with matching keys will overwrite those defined at the provider-level. + +### Private Registry Access + +The `privateRegistryAccess` block supports the following arguments: + +* `ecrImagePullerRole` - (Optional) Describes a request to configure an Amazon Lightsail container service to access private container image repositories, such as Amazon Elastic Container Registry (Amazon ECR) private repositories. See [ECR Image Puller Role](#ecr-image-puller-role) below for more details. + +### ECR Image Puller Role + +The `ecrImagePullerRole` blocks supports the following arguments: + +* `isActive` - (Optional) A Boolean value that indicates whether to activate the role. The default is `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the container service. +* `availabilityZone` - The Availability Zone. Follows the format us-east-2a (case-sensitive). +* `id` - Same as `name`. +* `powerId` - The ID of the power of the container service. +* `principalArn`- The principal ARN of the container service. The principal ARN can be used to create a trust + relationship between your standard AWS account and your Lightsail container service. This allows you to give your + service permission to access resources in your standard AWS account. +* `privateDomainName` - The private domain name of the container service. The private domain name is accessible only + by other resources within the default virtual private cloud (VPC) of your Lightsail account. +* `regionName` - The AWS Region name. +* `resourceType` - The Lightsail resource type of the container service (i.e., ContainerService). +* `state` - The current state of the container service. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider + [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `url` - The publicly accessible URL of the container service. If no public endpoint is specified in the + currentDeployment, this URL returns a 404 response. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lightsail Container Service using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Lightsail Container Service using the `name`. For example: + +```console +% terraform import aws_lightsail_container_service.my_container_service container-service-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_container_service_deployment_version.html.markdown b/website/docs/cdktf/typescript/r/lightsail_container_service_deployment_version.html.markdown new file mode 100644 index 00000000000..56861c3a9c2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_container_service_deployment_version.html.markdown @@ -0,0 +1,142 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_container_service_deployment_version" +description: |- + Provides a resource to manage a deployment version for your Amazon Lightsail container service. +--- + + + +# Resource: aws_lightsail_container_service_deployment_version + +Provides a resource to manage a deployment version for your Amazon Lightsail container service. + +~> **NOTE:** The Amazon Lightsail container service must be enabled to create a deployment. + +~> **NOTE:** This resource allows you to manage an Amazon Lightsail container service deployment version but Terraform cannot destroy it. Removing this resource from your configuration will remove it from your statefile and Terraform management. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailContainerServiceDeploymentVersion } from "./.gen/providers/aws/lightsail-container-service-deployment-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailContainerServiceDeploymentVersion(this, "example", { + container: [ + { + command: [], + containerName: "hello-world", + environment: { + MY_ENVIRONMENT_VARIABLE: "my_value", + }, + image: "amazon/amazon-lightsail:hello-world", + ports: { + 80: "HTTP", + }, + }, + ], + publicEndpoint: { + containerName: "hello-world", + containerPort: 80, + healthCheck: { + healthyThreshold: 2, + intervalSeconds: 5, + path: "/", + successCodes: "200-499", + timeoutSeconds: 2, + unhealthyThreshold: 2, + }, + }, + serviceName: Token.asString(awsLightsailContainerServiceExample.name), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `serviceName` - (Required) The name for the container service. +* `container` - (Required) A set of configuration blocks that describe the settings of the containers that will be launched on the container service. Maximum of 53. [Detailed below](#container). +* `publicEndpoint` - (Optional) A configuration block that describes the settings of the public endpoint for the container service. [Detailed below](#public_endpoint). + +### `container` + +The `container` configuration block supports the following arguments: + +* `containerName` - (Required) The name for the container. +* `image` - (Required) The name of the image used for the container. Container images sourced from your Lightsail container service, that are registered and stored on your service, start with a colon (`:`). For example, `:containerService1Mystaticwebsite1`. Container images sourced from a public registry like Docker Hub don't start with a colon. For example, `nginx:latest` or `nginx`. +* `command` - (Optional) The launch command for the container. A list of string. +* `environment` - (Optional) A key-value map of the environment variables of the container. +* `ports` - (Optional) A key-value map of the open firewall ports of the container. Valid values: `http`, `https`, `tcp`, `udp`. + +### `publicEndpoint` + +The `publicEndpoint` configuration block supports the following arguments: + +* `containerName` - (Required) The name of the container for the endpoint. +* `containerPort` - (Required) The port of the container to which traffic is forwarded to. +* `healthCheck` - (Required) A configuration block that describes the health check configuration of the container. [Detailed below](#health_check). + +### `healthCheck` + +The `healthCheck` configuration block supports the following arguments: + +* `healthyThreshold` - (Optional) The number of consecutive health checks successes required before moving the container to the Healthy state. Defaults to 2. +* `unhealthyThreshold` - (Optional) The number of consecutive health checks failures required before moving the container to the Unhealthy state. Defaults to 2. +* `timeoutSeconds` - (Optional) The amount of time, in seconds, during which no response means a failed health check. You can specify between 2 and 60 seconds. Defaults to 2. +* `intervalSeconds` - (Optional) The approximate interval, in seconds, between health checks of an individual container. You can specify between 5 and 300 seconds. Defaults to 5. +* `path` - (Optional) The path on the container on which to perform the health check. Defaults to "/". +* `successCodes` - (Optional) The HTTP codes to use when checking for a successful response from a container. You can specify values between 200 and 499. Defaults to "200-499". + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `serviceName` and `version` separation by a slash (`/`). +* `createdAt` - The timestamp when the deployment was created. +* `state` - The current state of the container service. +* `version` - The version number of the deployment. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lightsail Container Service Deployment Version using the `serviceName` and `version` separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Lightsail Container Service Deployment Version using the `serviceName` and `version` separated by a slash (`/`). For example: + +```console +% terraform import aws_lightsail_container_service_deployment_version.example container-service-1/1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_database.html.markdown b/website/docs/cdktf/typescript/r/lightsail_database.html.markdown new file mode 100644 index 00000000000..847cf49a7a6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_database.html.markdown @@ -0,0 +1,301 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_database" +description: |- + Provides a Lightsail Database +--- + + + +# Resource: aws_lightsail_database + +Provides a Lightsail Database. Amazon Lightsail is a service to provide easy virtual private servers +with custom software already setup. See [What is Amazon Lightsail?](https://lightsail.aws.amazon.com/ls/docs/getting-started/article/what-is-amazon-lightsail) +for more information. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones"](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services/) for more details + +## Example Usage + +### Basic mysql blueprint + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailDatabase } from "./.gen/providers/aws/lightsail-database"; +interface MyConfig { + relationalDatabaseName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new LightsailDatabase(this, "test", { + availabilityZone: "us-east-1a", + blueprintId: "mysql_8_0", + bundleId: "micro_1_0", + masterDatabaseName: "testdatabasename", + masterPassword: "testdatabasepassword", + masterUsername: "test", + name: "test", + relationalDatabaseName: config.relationalDatabaseName, + }); + } +} + +``` + +### Basic postrgres blueprint + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailDatabase } from "./.gen/providers/aws/lightsail-database"; +interface MyConfig { + relationalDatabaseName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new LightsailDatabase(this, "test", { + availabilityZone: "us-east-1a", + blueprintId: "postgres_12", + bundleId: "micro_1_0", + masterDatabaseName: "testdatabasename", + masterPassword: "testdatabasepassword", + masterUsername: "test", + name: "test", + relationalDatabaseName: config.relationalDatabaseName, + }); + } +} + +``` + +### Custom backup and maintenance windows + +Below is an example that sets a custom backup and maintenance window. Times are specified in UTC. This example will allow daily backups to take place between 16:00 and 16:30 each day. This example also requires any maintiance tasks (anything that would cause an outage, including changing some attributes) to take place on Tuesdays between 17:00 and 17:30. An action taken against this database that would cause an outage will wait until this time window to make the requested changes. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailDatabase } from "./.gen/providers/aws/lightsail-database"; +interface MyConfig { + relationalDatabaseName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new LightsailDatabase(this, "test", { + availabilityZone: "us-east-1a", + blueprintId: "postgres_12", + bundleId: "micro_1_0", + masterDatabaseName: "testdatabasename", + masterPassword: "testdatabasepassword", + masterUsername: "test", + name: "test", + preferredBackupWindow: "16:00-16:30", + preferredMaintenanceWindow: "Tue:17:00-Tue:17:30", + relationalDatabaseName: config.relationalDatabaseName, + }); + } +} + +``` + +### Final Snapshots + +To enable creating a final snapshot of your database on deletion, use the `finalSnapshotName` argument to provide a name to be used for the snapshot. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailDatabase } from "./.gen/providers/aws/lightsail-database"; +interface MyConfig { + relationalDatabaseName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new LightsailDatabase(this, "test", { + availabilityZone: "us-east-1a", + blueprintId: "postgres_12", + bundleId: "micro_1_0", + finalSnapshotName: "MyFinalSnapshot", + masterDatabaseName: "testdatabasename", + masterPassword: "testdatabasepassword", + masterUsername: "test", + name: "test", + preferredBackupWindow: "16:00-16:30", + preferredMaintenanceWindow: "Tue:17:00-Tue:17:30", + relationalDatabaseName: config.relationalDatabaseName, + }); + } +} + +``` + +### Apply Immediately + +To enable applying changes immediately instead of waiting for a maintiance window, use the `applyImmediately` argument. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailDatabase } from "./.gen/providers/aws/lightsail-database"; +interface MyConfig { + relationalDatabaseName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new LightsailDatabase(this, "test", { + applyImmediately: true, + availabilityZone: "us-east-1a", + blueprintId: "postgres_12", + bundleId: "micro_1_0", + masterDatabaseName: "testdatabasename", + masterPassword: "testdatabasepassword", + masterUsername: "test", + name: "test", + relationalDatabaseName: config.relationalDatabaseName, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name to use for your new Lightsail database resource. Names be unique within each AWS Region in your Lightsail account. +* `availabilityZone` - The Availability Zone in which to create your new database. Use the us-east-2a case-sensitive format. +* `masterDatabaseName` - (Required) The name of the master database created when the Lightsail database resource is created. +* `masterPassword` - (Sensitive) The password for the master user of your new database. The password can include any printable ASCII character except "/", """, or "@". +* `masterUsername` - The master user name for your new database. +* `blueprintId` - (Required) The blueprint ID for your new database. A blueprint describes the major engine version of a database. You can get a list of database blueprints IDs by using the AWS CLI command: `aws lightsail get-relational-database-blueprints` +* `bundleId` - (Required) The bundle ID for your new database. A bundle describes the performance specifications for your database (see list below). You can get a list of database bundle IDs by using the AWS CLI command: `aws lightsail get-relational-database-bundles`. +* `preferredBackupWindow` - The daily time range during which automated backups are created for your new database if automated backups are enabled. Must be in the hh24:mi-hh24:mi format. Example: `16:0016:30`. Specified in Coordinated Universal Time (UTC). +* `preferredMaintenanceWindow` - The weekly time range during which system maintenance can occur on your new database. Must be in the ddd:hh24:mi-ddd:hh24:mi format. Specified in Coordinated Universal Time (UTC). Example: `tue:17:00Tue:17:30` +* `publiclyAccessible` - Specifies the accessibility options for your new database. A value of true specifies a database that is available to resources outside of your Lightsail account. A value of false specifies a database that is available only to your Lightsail resources in the same region as your database. +* `applyImmediately` - When true , applies changes immediately. When false , applies changes during the preferred maintenance window. Some changes may cause an outage. +* `backupRetentionEnabled` - When true, enables automated backup retention for your database. When false, disables automated backup retention for your database. Disabling backup retention deletes all automated database backups. Before disabling this, you may want to create a snapshot of your database. +* `skipFinalSnapshot` - Determines whether a final database snapshot is created before your database is deleted. If true is specified, no database snapshot is created. If false is specified, a database snapshot is created before your database is deleted. You must specify the final relational database snapshot name parameter if the skip final snapshot parameter is false. +* `finalSnapshotName` - (Required unless `skip_final_snapshot = true`) The name of the database snapshot created if skip final snapshot is false, which is the default value for that parameter. +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. + +## Blueprint Ids + +A list of all available Lightsail Blueprints for Relational Databases the [aws lightsail get-relational-database-blueprints](https://docs.aws.amazon.com/cli/latest/reference/lightsail/get-relational-database-blueprints.html) aws cli command. + +### Examples + +- `mysql80` +- `postgres12` + +### Prefix + +A Blueprint ID starts with a prefix of the engine type. + +### Suffix + +A Blueprint ID has a sufix of the engine version. + +## Bundles + +A list of all available Lightsail Bundles for Relational Databases the [aws lightsail get-relational-database-bundles](https://docs.aws.amazon.com/cli/latest/reference/lightsail/get-relational-database-bundles.html) aws cli command. + +### Examples + +- `small10` +- `smallHa10` +- `large10` +- `largeHa10` + +### Prefix + +A Bundle ID starts with one of the below size prefixes: + +- `micro` +- `small` +- `medium` +- `large` + +### Infixes (Optional for HA Database) + +A Bundle Id can have the following infix added in order to use the HA option of the selected bundle. + +- `ha` + +### Suffix + +A Bundle ID ends with one of the following suffix: `10` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the Lightsail instance (matches `arn`). +* `arn` - The ARN of the Lightsail instance (matches `id`). +* `caCertificateIdentifier` - The certificate associated with the database. +* `createdAt` - The timestamp when the instance was created. +* `engine` - The database software (for example, MySQL). +* `engineVersion` - The database engine version (for example, 5.7.23). +* `cpuCount` - The number of vCPUs for the database. +* `ramSize` - The amount of RAM in GB for the database. +* `diskSize` - The size of the disk for the database. +* `masterEndpointPort` - The master endpoint network port for the database. +* `masterEndpointAddress` - The master endpoint fqdn for the database. +* `secondaryAvailabilityZone` - Describes the secondary Availability Zone of a high availability database. The secondary database is used for failover support of a high availability database. +* `supportCode` - The support code for the database. Include this code in your email to support when you have questions about a database in Lightsail. This code enables our support team to look up your Lightsail information more easily. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lightsail Databases using their name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Lightsail Databases using their name. For example: + +```console +% terraform import aws_lightsail_database.foo 'bar' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_disk.html.markdown b/website/docs/cdktf/typescript/r/lightsail_disk.html.markdown new file mode 100644 index 00000000000..982bc0e76cc --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_disk.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_disk" +description: |- + Provides a Lightsail Disk resource +--- + + + +# Resource: aws_lightsail_disk + +Provides a Lightsail Disk resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { LightsailDisk } from "./.gen/providers/aws/lightsail-disk"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const available = new DataAwsAvailabilityZones(this, "available", { + filter: [ + { + name: "opt-in-status", + values: ["opt-in-not-required"], + }, + ], + state: "available", + }); + new LightsailDisk(this, "test", { + availabilityZone: Token.asString(propertyAccess(available.names, ["0"])), + name: "test", + sizeInGb: 8, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Lightsail load balancer. +* `sizeInGb` - (Required) The instance port the load balancer will connect. +* `availabilityZone` - (Required) The Availability Zone in which to create your disk. +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the disk (matches `name`). +* `arn` - The ARN of the Lightsail load balancer. +* `createdAt` - The timestamp when the load balancer was created. +* `supportCode` - The support code for the disk. Include this code in your email to support when you have questions about a disk in Lightsail. This code enables our support team to look up your Lightsail information more easily. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailDisk` using the name attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailDisk` using the name attribute. For example: + +```console +% terraform import aws_lightsail_disk.test test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_disk_attachment.html.markdown b/website/docs/cdktf/typescript/r/lightsail_disk_attachment.html.markdown new file mode 100644 index 00000000000..a7ba36333a2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_disk_attachment.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_disk_attachment" +description: |- + Attaches a Lightsail disk to a Lightsail Instance +--- + + + +# Resource: aws_lightsail_disk_attachment + +Attaches a Lightsail disk to a Lightsail Instance + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { LightsailDisk } from "./.gen/providers/aws/lightsail-disk"; +import { LightsailDiskAttachment } from "./.gen/providers/aws/lightsail-disk-attachment"; +import { LightsailInstance } from "./.gen/providers/aws/lightsail-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const available = new DataAwsAvailabilityZones(this, "available", { + filter: [ + { + name: "opt-in-status", + values: ["opt-in-not-required"], + }, + ], + state: "available", + }); + const test = new LightsailDisk(this, "test", { + availabilityZone: Token.asString(propertyAccess(available.names, ["0"])), + name: "test-disk", + sizeInGb: 8, + }); + const awsLightsailInstanceTest = new LightsailInstance(this, "test_2", { + availabilityZone: Token.asString(propertyAccess(available.names, ["0"])), + blueprintId: "amazon_linux_2", + bundleId: "nano_1_0", + name: "test-instance", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailInstanceTest.overrideLogicalId("test"); + const awsLightsailDiskAttachmentTest = new LightsailDiskAttachment( + this, + "test_3", + { + diskName: test.name, + diskPath: "/dev/xvdf", + instanceName: Token.asString(awsLightsailInstanceTest.name), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailDiskAttachmentTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `diskName` - (Required) The name of the Lightsail Disk. +* `instanceName` - (Required) The name of the Lightsail Instance to attach to. +* `diskPath` - (Required) The disk path to expose to the instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes to create a unique id: `diskName`,`instanceName` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailDisk` using the id attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailDisk` using the id attribute. For example: + +```console +% terraform import aws_lightsail_disk_attachment.test test-disk,test-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_distribution.html.markdown b/website/docs/cdktf/typescript/r/lightsail_distribution.html.markdown new file mode 100644 index 00000000000..0f2c1ba2acd --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_distribution.html.markdown @@ -0,0 +1,340 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_distribution" +description: |- + Terraform resource for managing an AWS Lightsail Distribution. +--- + + + +# Resource: aws_lightsail_distribution + +Terraform resource for managing an AWS Lightsail Distribution. + +## Example Usage + +### Basic Usage + +Below is a basic example with a bucket as an origin. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailBucket } from "./.gen/providers/aws/lightsail-bucket"; +import { LightsailDistribution } from "./.gen/providers/aws/lightsail-distribution"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new LightsailBucket(this, "test", { + bundleId: "small_1_0", + name: "test-bucket", + }); + const awsLightsailDistributionTest = new LightsailDistribution( + this, + "test_1", + { + bundleId: "small_1_0", + cacheBehaviorSettings: { + allowedHttpMethods: "GET,HEAD,OPTIONS,PUT,PATCH,POST,DELETE", + cachedHttpMethods: "GET,HEAD", + defaultTtl: 86400, + forwardedCookies: { + option: "none", + }, + forwardedHeaders: { + option: "default", + }, + forwardedQueryStrings: { + option: false, + }, + maximumTtl: 31536000, + minimumTtl: 0, + }, + defaultCacheBehavior: { + behavior: "cache", + }, + name: "test-distribution", + origin: { + name: test.name, + regionName: test.region, + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailDistributionTest.overrideLogicalId("test"); + } +} + +``` + +### instance origin example + +Below is an example of an instance as the origin. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { LightsailDistribution } from "./.gen/providers/aws/lightsail-distribution"; +import { LightsailInstance } from "./.gen/providers/aws/lightsail-instance"; +import { LightsailStaticIp } from "./.gen/providers/aws/lightsail-static-ip"; +import { LightsailStaticIpAttachment } from "./.gen/providers/aws/lightsail-static-ip-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new LightsailStaticIp(this, "test", { + name: "test-static-ip", + }); + const available = new DataAwsAvailabilityZones(this, "available", { + filter: [ + { + name: "opt-in-status", + values: ["opt-in-not-required"], + }, + ], + state: "available", + }); + const awsLightsailInstanceTest = new LightsailInstance(this, "test_2", { + availabilityZone: Token.asString(propertyAccess(available.names, ["0"])), + blueprintId: "amazon_linux_2", + bundleId: "micro_1_0", + name: "test-instance", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailInstanceTest.overrideLogicalId("test"); + const awsLightsailStaticIpAttachmentTest = new LightsailStaticIpAttachment( + this, + "test_3", + { + instanceName: Token.asString(awsLightsailInstanceTest.name), + staticIpName: test.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailStaticIpAttachmentTest.overrideLogicalId("test"); + const awsLightsailDistributionTest = new LightsailDistribution( + this, + "test_4", + { + bundleId: "small_1_0", + defaultCacheBehavior: { + behavior: "cache", + }, + dependsOn: [awsLightsailStaticIpAttachmentTest], + name: "test-distribution", + origin: { + name: Token.asString(awsLightsailInstanceTest.name), + regionName: Token.asString(available.id), + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailDistributionTest.overrideLogicalId("test"); + } +} + +``` + +### lb origin example + +Below is an example with a load balancer as an origin + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { LightsailDistribution } from "./.gen/providers/aws/lightsail-distribution"; +import { LightsailInstance } from "./.gen/providers/aws/lightsail-instance"; +import { LightsailLb } from "./.gen/providers/aws/lightsail-lb"; +import { LightsailLbAttachment } from "./.gen/providers/aws/lightsail-lb-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new LightsailLb(this, "test", { + healthCheckPath: "/", + instancePort: Token.asNumber("80"), + name: "test-load-balancer", + tags: { + foo: "bar", + }, + }); + const available = new DataAwsAvailabilityZones(this, "available", { + filter: [ + { + name: "opt-in-status", + values: ["opt-in-not-required"], + }, + ], + state: "available", + }); + const awsLightsailInstanceTest = new LightsailInstance(this, "test_2", { + availabilityZone: Token.asString(propertyAccess(available.names, ["0"])), + blueprintId: "amazon_linux_2", + bundleId: "nano_1_0", + name: "test-instance", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailInstanceTest.overrideLogicalId("test"); + const awsLightsailLbAttachmentTest = new LightsailLbAttachment( + this, + "test_3", + { + instanceName: Token.asString(awsLightsailInstanceTest.name), + lbName: test.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailLbAttachmentTest.overrideLogicalId("test"); + const awsLightsailDistributionTest = new LightsailDistribution( + this, + "test_4", + { + bundleId: "small_1_0", + defaultCacheBehavior: { + behavior: "cache", + }, + dependsOn: [awsLightsailLbAttachmentTest], + name: "test-distribution", + origin: { + name: test.name, + regionName: Token.asString(available.id), + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailDistributionTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the distribution. +* `bundleId` - (Required) Bundle ID to use for the distribution. +* `defaultCacheBehavior` - (Required) Object that describes the default cache behavior of the distribution. [Detailed below](#default_cache_behavior) +* `origin` - (Required) Object that describes the origin resource of the distribution, such as a Lightsail instance, bucket, or load balancer. [Detailed below](#origin) +* `cacheBehaviorSettings` - (Required) An object that describes the cache behavior settings of the distribution. [Detailed below](#cache_behavior_settings) + +The following arguments are optional: + +* `cacheBehavior` - (Optional) A set of configuration blocks that describe the per-path cache behavior of the distribution. [Detailed below](#cache_behavior) +* `certificateName` - (Optional) The name of the SSL/TLS certificate attached to the distribution, if any. +* `ipAddressType` - (Optional) The IP address type of the distribution. Default: `dualstack`. +* `isEnabled` - (Optional) Indicates whether the distribution is enabled. Default: `true`. +* `tags` - (Optional) Map of tags for the Lightsail Distribution. If + configured with a provider + [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) + present, tags with matching keys will overwrite those defined at the provider-level. + +### default_cache_behavior + +* `behavior` - (Required) The cache behavior of the distribution. Valid values: `cache` and `dontCache`. + +### origin + +* `name` - (Required) The name of the origin resource. Your origin can be an instance with an attached static IP, a bucket, or a load balancer that has at least one instance attached to it. +* `protocolPolicy` - (Optional) The protocol that your Amazon Lightsail distribution uses when establishing a connection with your origin to pull content. +* `regionName` - (Required) The AWS Region name of the origin resource. +* `resourceType` - (Computed) The resource type of the origin resource (e.g., Instance). + +### cache_behavior_settings + +* `allowedHttpMethods` - (Optional) The HTTP methods that are processed and forwarded to the distribution's origin. +* `cachedHttpMethods` - (Optional) The HTTP method responses that are cached by your distribution. +* `defaultTtl` - (Optional) The default amount of time that objects stay in the distribution's cache before the distribution forwards another request to the origin to determine whether the content has been updated. +* `forwardedCookies` - (Required) An object that describes the cookies that are forwarded to the origin. Your content is cached based on the cookies that are forwarded. [Detailed below](#forwarded_cookies) +* `forwardedHeaders` - (Required) An object that describes the headers that are forwarded to the origin. Your content is cached based on the headers that are forwarded. [Detailed below](#forwarded_headers) +* `forwardedQueryStrings` - (Required) An object that describes the query strings that are forwarded to the origin. Your content is cached based on the query strings that are forwarded. [Detailed below](#forwarded_query_strings) +* `maximumTtl` - (Optional) The maximum amount of time that objects stay in the distribution's cache before the distribution forwards another request to the origin to determine whether the object has been updated. +* `minimumTtl` - (Optional) The minimum amount of time that objects stay in the distribution's cache before the distribution forwards another request to the origin to determine whether the object has been updated. + +#### forwarded_cookies + +* `cookiesAllowList` - (Required) The specific cookies to forward to your distribution's origin. +* `option` - (Optional) Specifies which cookies to forward to the distribution's origin for a cache behavior: all, none, or allow-list to forward only the cookies specified in the cookiesAllowList parameter. + +#### forwarded_headers + +* `headersAllowList` - (Required) The specific headers to forward to your distribution's origin. +* `option` - (Optional) The headers that you want your distribution to forward to your origin and base caching on. + +#### forwarded_query_strings + +* `option` - (Optional) Indicates whether the distribution forwards and caches based on query strings. +* `queryStringsAllowedList` - (Required) The specific query strings that the distribution forwards to the origin. + +### cache_behavior + +* `behavior` - (Required) The cache behavior for the specified path. +* `path` - (Required) The path to a directory or file to cached, or not cache. Use an asterisk symbol to specify wildcard directories (path/to/assets/\*), and file types (\*.html, \*jpg, \*js). Directories and file paths are case-sensitive. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `alternativeDomainNames` - The alternate domain names of the distribution. +* `arn` - The Amazon Resource Name (ARN) of the distribution. +* `createdAt` - The timestamp when the distribution was created. +* `domainName` - The domain name of the distribution. +* `location` - An object that describes the location of the distribution, such as the AWS Region and Availability Zone. [Detailed below](#location) +* `originPublicDns` - The public DNS of the origin. +* `resourceType` - The Lightsail resource type (e.g., Distribution). +* `status` - The status of the distribution. +* `supportCode` - The support code. Include this code in your email to support when you have questions about your Lightsail distribution. This code enables our support team to look up your Lightsail information more easily. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### location + +* `availabilityZone` - The Availability Zone. Follows the format us-east-2a (case-sensitive). +* `regionName` - The AWS Region name. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) +* `update` - (Default `180M`) +* `delete` - (Default `90M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lightsail Distribution using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Lightsail Distribution using the `id`. For example: + +```console +% terraform import aws_lightsail_distribution.example rft-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_domain.html.markdown b/website/docs/cdktf/typescript/r/lightsail_domain.html.markdown new file mode 100644 index 00000000000..9390375746e --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_domain.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_domain" +description: |- + Provides an Lightsail Domain +--- + + + +# Resource: aws_lightsail_domain + +Creates a domain resource for the specified domain (e.g., example.com). +You cannot register a new domain name using Lightsail. You must register +a domain name using Amazon Route 53 or another domain name registrar. +If you have already registered your domain, you can enter its name in +this parameter to manage the DNS records for that domain. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailDomain } from "./.gen/providers/aws/lightsail-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailDomain(this, "domain_test", { + domainName: "mydomain.com", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domainName` - (Required) The name of the Lightsail domain to manage + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this domain +* `arn` - The ARN of the Lightsail domain + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_domain_entry.html.markdown b/website/docs/cdktf/typescript/r/lightsail_domain_entry.html.markdown new file mode 100644 index 00000000000..c0bf04492c8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_domain_entry.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_domain_entry" +description: |- + Provides an Lightsail Domain Entry +--- + + + +# Resource: aws_lightsail_domain_entry + +Creates a domain entry resource + +~> **NOTE on `id`:** In an effort to simplify imports, this resource `id` field has been updated to the standard resource id separator, a comma (`,`). For backward compatibility, the previous separator (underscore `_`) can still be used to read and import existing resources. When state is refreshed, the `id` will be updated to use the new standard separator. The previous separator will be deprecated in a future major release. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailDomain } from "./.gen/providers/aws/lightsail-domain"; +import { LightsailDomainEntry } from "./.gen/providers/aws/lightsail-domain-entry"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailDomain(this, "test", { + domainName: "mydomain.com", + }); + const awsLightsailDomainEntryTest = new LightsailDomainEntry( + this, + "test_1", + { + domainName: domainTest.domainName, + name: "www", + target: "127.0.0.1", + type: "A", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailDomainEntryTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domainName` - (Required) The name of the Lightsail domain in which to create the entry +* `name` - (Required) Name of the entry record +* `type` - (Required) Type of record +* `target` - (Required) Target of the domain entry +* `isAlias` - (Optional) If the entry should be an alias Defaults to `false` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes to create a unique id: `name`,`domainName`,`type`,`target` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailDomainEntry` using the id attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailDomainEntry` using the id attribute. For example: + +```console +% terraform import aws_lightsail_domain_entry.example www,mydomain.com,A,127.0.0.1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_instance.html.markdown b/website/docs/cdktf/typescript/r/lightsail_instance.html.markdown new file mode 100644 index 00000000000..7015b929f56 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_instance.html.markdown @@ -0,0 +1,226 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_instance" +description: |- + Provides an Lightsail Instance +--- + + + +# Resource: aws_lightsail_instance + +Provides a Lightsail Instance. Amazon Lightsail is a service to provide easy virtual private servers +with custom software already setup. See [What is Amazon Lightsail?](https://lightsail.aws.amazon.com/ls/docs/getting-started/article/what-is-amazon-lightsail) +for more information. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailInstance } from "./.gen/providers/aws/lightsail-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailInstance(this, "gitlab_test", { + availabilityZone: "us-east-1b", + blueprintId: "amazon_linux_2", + bundleId: "nano_1_0", + keyPairName: "some_key_name", + name: "custom_gitlab", + tags: { + foo: "bar", + }, + }); + } +} + +``` + +### Example With User Data + +Lightsail user data is handled differently than ec2 user data. Lightsail user data only accepts a single lined string. The below example shows installing apache and creating the index page. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailInstance } from "./.gen/providers/aws/lightsail-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailInstance(this, "custom", { + availabilityZone: "us-east-1b", + blueprintId: "amazon_linux_2", + bundleId: "nano_1_0", + name: "custom", + userData: + "sudo yum install -y httpd && sudo systemctl start httpd && sudo systemctl enable httpd && echo '

Deployed via Terraform

' | sudo tee /var/www/html/index.html", + }); + } +} + +``` + +### Enable Auto Snapshots + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailInstance } from "./.gen/providers/aws/lightsail-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailInstance(this, "test", { + addOn: { + snapshotTime: "06:00", + status: "Enabled", + type: "AutoSnapshot", + }, + availabilityZone: "us-east-1b", + blueprintId: "amazon_linux_2", + bundleId: "nano_1_0", + name: "custom_instance", + tags: { + foo: "bar", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Lightsail Instance. Names be unique within each AWS Region in your Lightsail account. +* `availabilityZone` - (Required) The Availability Zone in which to create your +instance (see list below) +* `blueprintId` - (Required) The ID for a virtual private server image. A list of available blueprint IDs can be obtained using the AWS CLI command: `aws lightsail get-blueprints` +* `bundleId` - (Required) The bundle of specification information (see list below) +* `keyPairName` - (Optional) The name of your key pair. Created in the +Lightsail console (cannot use `awsKeyPair` at this time) +* `userData` - (Optional) Single lined launch script as a string to configure server with additional user data +* `ipAddressType` - (Optional) The IP address type of the Lightsail Instance. Valid Values: `dualstack` | `ipv4`. +* `addOn` - (Optional) The add on configuration for the instance. [Detailed below](#add_on). +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `addOn` + +Defines the add on configuration for the instance. The `addOn` configuration block supports the following arguments: + +* `type` - (Required) The add-on type. There is currently only one valid type `autoSnapshot`. +* `snapshotTime` - (Required) The daily time when an automatic snapshot will be created. Must be in HH:00 format, and in an hourly increment and specified in Coordinated Universal Time (UTC). The snapshot will be automatically created between the time specified and up to 45 minutes after. +* `status` - (Required) The status of the add on. Valid Values: `enabled`, `disabled`. + +## Availability Zones + +Lightsail currently supports the following Availability Zones (e.g., `usEast1A`): + +- `apNortheast1{a,c,d}` +- `apNortheast2{a,c}` +- `apSouth1{a,b}` +- `apSoutheast1{a,b,c}` +- `apSoutheast2{a,b,c}` +- `caCentral1{a,b}` +- `euCentral1{a,b,c}` +- `euWest1{a,b,c}` +- `euWest2{a,b,c}` +- `euWest3{a,b,c}` +- `usEast1{a,b,c,d,e,f}` +- `usEast2{a,b,c}` +- `usWest2{a,b,c}` + +## Bundles + +Lightsail currently supports the following Bundle IDs (e.g., an instance in `apNortheast1` would use `small20`): + +### Prefix + +A Bundle ID starts with one of the below size prefixes: + +- `nano` +- `micro` +- `small` +- `medium` +- `large` +- `xlarge` +- `2Xlarge` + +### Suffix + +A Bundle ID ends with one of the following suffixes depending on Availability Zone: + +- ap-northeast-1: `20` +- ap-northeast-2: `20` +- ap-south-1: `21` +- ap-southeast-1: `20` +- ap-southeast-2: `22` +- ca-central-1: `20` +- eu-central-1: `20` +- eu-west-1: `20` +- eu-west-2: `20` +- eu-west-3: `20` +- us-east-1: `20` +- us-east-2: `20` +- us-west-2: `20` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the Lightsail instance (matches `arn`). +* `arn` - The ARN of the Lightsail instance (matches `id`). +* `createdAt` - The timestamp when the instance was created. +* `cpuCount` - The number of vCPUs the instance has. +* `ramSize` - The amount of RAM in GB on the instance (e.g., 1.0). +* `ipv6Addresses` - List of IPv6 addresses for the Lightsail instance. +* `privateIpAddress` - The private IP address of the instance. +* `publicIpAddress` - The public IP address of the instance. +* `isStaticIp` - A Boolean value indicating whether this instance has a static IP assigned to it. +* `username` - The user name for connecting to the instance (e.g., ec2-user). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Lightsail Instances using their name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Lightsail Instances using their name. For example: + +```console +% terraform import aws_lightsail_instance.gitlab_test 'custom_gitlab' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_instance_public_ports.html.markdown b/website/docs/cdktf/typescript/r/lightsail_instance_public_ports.html.markdown new file mode 100644 index 00000000000..058b5176864 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_instance_public_ports.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_instance_public_ports" +description: |- + Provides an Lightsail Instance +--- + + + +# Resource: aws_lightsail_instance_public_ports + +Opens ports for a specific Amazon Lightsail instance, and specifies the IP addresses allowed to connect to the instance through the ports, and the protocol. + +-> See [What is Amazon Lightsail?](https://lightsail.aws.amazon.com/ls/docs/getting-started/article/what-is-amazon-lightsail) for more information. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailInstance } from "./.gen/providers/aws/lightsail-instance"; +import { LightsailInstancePublicPorts } from "./.gen/providers/aws/lightsail-instance-public-ports"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new LightsailInstance(this, "test", { + availabilityZone: Token.asString(propertyAccess(available.names, ["0"])), + blueprintId: "amazon_linux_2", + bundleId: "nano_1_0", + name: "yak_sail", + }); + const awsLightsailInstancePublicPortsTest = + new LightsailInstancePublicPorts(this, "test_1", { + instanceName: test.name, + portInfo: [ + { + fromPort: 80, + protocol: "tcp", + toPort: 80, + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailInstancePublicPortsTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `instanceName` - (Required) Name of the Lightsail Instance. +* `portInfo` - (Required) Configuration block with port information. AWS closes all currently open ports that are not included in the `portInfo`. Detailed below. + +### port_info + +The following arguments are required: + +* `fromPort` - (Required) First port in a range of open ports on an instance. +* `protocol` - (Required) IP protocol name. Valid values are `tcp`, `all`, `udp`, and `icmp`. +* `toPort` - (Required) Last port in a range of open ports on an instance. + +The following arguments are optional: + +* `cidrs` - (Optional) Set of CIDR blocks. +* `cidrListAliases` - (Optional) Set of CIDR aliases that define access for a preconfigured range of IP addresses. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_key_pair.html.markdown b/website/docs/cdktf/typescript/r/lightsail_key_pair.html.markdown new file mode 100644 index 00000000000..4a4a7c03c62 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_key_pair.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_key_pair" +description: |- + Provides an Lightsail Key Pair +--- + + + +# Resource: aws_lightsail_key_pair + +Provides a Lightsail Key Pair, for use with Lightsail Instances. These key pairs +are separate from EC2 Key Pairs, and must be created or imported for use with +Lightsail. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details + +## Example Usage + +### Create New Key Pair + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailKeyPair } from "./.gen/providers/aws/lightsail-key-pair"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailKeyPair(this, "lg_key_pair", { + name: "lg_key_pair", + }); + } +} + +``` + +### Create New Key Pair with PGP Encrypted Private Key + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailKeyPair } from "./.gen/providers/aws/lightsail-key-pair"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailKeyPair(this, "lg_key_pair", { + name: "lg_key_pair", + pgpKey: "keybase:keybaseusername", + }); + } +} + +``` + +### Existing Public Key Import + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailKeyPair } from "./.gen/providers/aws/lightsail-key-pair"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailKeyPair(this, "lg_key_pair", { + name: "importing", + publicKey: Token.asString(Fn.file("~/.ssh/id_rsa.pub")), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the Lightsail Key Pair. If omitted, a unique name will be generated by Terraform +* `pgpKey` – (Optional) An optional PGP key to encrypt the resulting private key material. Only used when creating a new key pair +* `publicKey` - (Required) The public key material. This public key will be imported into Lightsail +* `tags` - (Optional) A map of tags to assign to the collection. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +~> **NOTE:** a PGP key is not required, however it is strongly encouraged. Without a PGP key, the private key material will be stored in state unencrypted.`pgpKey` is ignored if `publicKey` is supplied. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this key pair. +* `arn` - The ARN of the Lightsail key pair. +* `encryptedFingerprint` - The MD5 public key fingerprint for the encrypted private key. +* `encryptedPrivateKey` – the private key material, base 64 encoded and encrypted with the given `pgpKey`. This is only populated when creating a new key and `pgpKey` is supplied. +* `fingerprint` - The MD5 public key fingerprint as specified in section 4 of RFC 4716. +* `publicKey` - the public key, base64 encoded. +* `privateKey` - the private key, base64 encoded. This is only populated when creating a new key, and when no `pgpKey` is provided. + +## Import + +You cannot import Lightsail Key Pairs because the private and public key are only available on initial creation. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_lb.html.markdown b/website/docs/cdktf/typescript/r/lightsail_lb.html.markdown new file mode 100644 index 00000000000..42347dbd169 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_lb.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb" +description: |- + Provides a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb + +Creates a Lightsail load balancer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailLb } from "./.gen/providers/aws/lightsail-lb"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailLb(this, "test", { + healthCheckPath: "/", + instancePort: Token.asNumber("80"), + name: "test-load-balancer", + tags: { + foo: "bar", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Lightsail load balancer. +* `instancePort` - (Required) The instance port the load balancer will connect. +* `healthCheckPath` - (Optional) The health check path of the load balancer. Default value "/". +* `tags` - (Optional) A map of tags to assign to the resource. To create a key-only tag, use an empty string as the value. If configured with a provider `defaultTags` configuration block present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this load balancer (matches `name`). +* `arn` - The ARN of the Lightsail load balancer. +* `createdAt` - The timestamp when the load balancer was created. +* `dnsName` - The DNS name of the load balancer. +* `protocol` - The protocol of the load balancer. +* `publicPorts` - The public ports of the load balancer. +* `supportCode` - The support code for the database. Include this code in your email to support when you have questions about a database in Lightsail. This code enables our support team to look up your Lightsail information more easily. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailLb` using the name attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailLb` using the name attribute. For example: + +```console +% terraform import aws_lightsail_lb.test example-load-balancer +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_lb_attachment.html.markdown b/website/docs/cdktf/typescript/r/lightsail_lb_attachment.html.markdown new file mode 100644 index 00000000000..d23ff255fd1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_lb_attachment.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb_attachment" +description: |- + Attaches a Lightsail Instance to a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb_attachment + +Attaches a Lightsail Instance to a Lightsail Load Balancer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { LightsailInstance } from "./.gen/providers/aws/lightsail-instance"; +import { LightsailLb } from "./.gen/providers/aws/lightsail-lb"; +import { LightsailLbAttachment } from "./.gen/providers/aws/lightsail-lb-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new LightsailLb(this, "test", { + healthCheckPath: "/", + instancePort: Token.asNumber("80"), + name: "test-load-balancer", + tags: { + foo: "bar", + }, + }); + const available = new DataAwsAvailabilityZones(this, "available", { + filter: [ + { + name: "opt-in-status", + values: ["opt-in-not-required"], + }, + ], + state: "available", + }); + const awsLightsailInstanceTest = new LightsailInstance(this, "test_2", { + availabilityZone: Token.asString(propertyAccess(available.names, ["0"])), + blueprintId: "amazon_linux_2", + bundleId: "nano_1_0", + name: "test-instance", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailInstanceTest.overrideLogicalId("test"); + const awsLightsailLbAttachmentTest = new LightsailLbAttachment( + this, + "test_3", + { + instanceName: Token.asString(awsLightsailInstanceTest.name), + lbName: test.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailLbAttachmentTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `lbName` - (Required) The name of the Lightsail load balancer. +* `instanceName` - (Required) The name of the instance to attach to the load balancer. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes to create a unique id: `lbName`,`instanceName` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailLbAttachment` using the name attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailLbAttachment` using the name attribute. For example: + +```console +% terraform import aws_lightsail_lb_attachment.test example-load-balancer,example-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_lb_certificate.html.markdown b/website/docs/cdktf/typescript/r/lightsail_lb_certificate.html.markdown new file mode 100644 index 00000000000..0cc5bcc5e51 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_lb_certificate.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb_certificate" +description: |- + Provides a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb_certificate + +Creates a Lightsail load balancer Certificate resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailLb } from "./.gen/providers/aws/lightsail-lb"; +import { LightsailLbCertificate } from "./.gen/providers/aws/lightsail-lb-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new LightsailLb(this, "test", { + healthCheckPath: "/", + instancePort: Token.asNumber("80"), + name: "test-load-balancer", + tags: { + foo: "bar", + }, + }); + const awsLightsailLbCertificateTest = new LightsailLbCertificate( + this, + "test_1", + { + domainName: "test.com", + lbName: test.id, + name: "test-load-balancer-certificate", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailLbCertificateTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domainName` - (Required) The domain name (e.g., example.com) for your SSL/TLS certificate. +* `lbName` - (Required) The load balancer name where you want to create the SSL/TLS certificate. +* `name` - (Required) The SSL/TLS certificate name. +* `name` - (Required) The SSL/TLS certificate name. +* `subjectAlternativeNames` - (Optional) Set of domains that should be SANs in the issued certificate. `domainName` attribute is automatically added as a Subject Alternative Name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes to create a unique id: `lbName`,`name` +* `arn` - The ARN of the lightsail certificate. +* `createdAt` - The timestamp when the instance was created. +* `domainValidationOptions` - Set of domain validation objects which can be used to complete certificate validation. Can have more than one element, e.g., if SANs are defined. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailLbCertificate` using the id attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailLbCertificate` using the id attribute. For example: + +```console +% terraform import aws_lightsail_lb_certificate.test example-load-balancer,example-load-balancer-certificate +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_lb_certificate_attachment.html.markdown b/website/docs/cdktf/typescript/r/lightsail_lb_certificate_attachment.html.markdown new file mode 100644 index 00000000000..668d1287d30 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_lb_certificate_attachment.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb_certificate_attachment" +description: |- + Attaches a Lightsail Load Balancer Certificate to a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb_certificate_attachment + +Attaches a Lightsail Load Balancer Certificate to a Lightsail Load Balancer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailLb } from "./.gen/providers/aws/lightsail-lb"; +import { LightsailLbCertificate } from "./.gen/providers/aws/lightsail-lb-certificate"; +import { LightsailLbCertificateAttachment } from "./.gen/providers/aws/lightsail-lb-certificate-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new LightsailLb(this, "test", { + healthCheckPath: "/", + instancePort: Token.asNumber("80"), + name: "test-load-balancer", + tags: { + foo: "bar", + }, + }); + const awsLightsailLbCertificateTest = new LightsailLbCertificate( + this, + "test_1", + { + domainName: "test.com", + lbName: test.id, + name: "test-load-balancer-certificate", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailLbCertificateTest.overrideLogicalId("test"); + const awsLightsailLbCertificateAttachmentTest = + new LightsailLbCertificateAttachment(this, "test_2", { + certificateName: Token.asString(awsLightsailLbCertificateTest.name), + lbName: test.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailLbCertificateAttachmentTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `lbName` - (Required) The name of the load balancer to which you want to associate the SSL/TLS certificate. +* `certificateName` - (Required) The name of your SSL/TLS certificate. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of attributes to create a unique id: `lbName`,`certificateName` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailLbCertificateAttachment` using the name attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailLbCertificateAttachment` using the name attribute. For example: + +```console +% terraform import aws_lightsail_lb_certificate_attachment.test example-load-balancer,example-certificate +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_lb_https_redirection_policy.html.markdown b/website/docs/cdktf/typescript/r/lightsail_lb_https_redirection_policy.html.markdown new file mode 100644 index 00000000000..08e8fd4ce55 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_lb_https_redirection_policy.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb_https_redirection_policy" +description: |- + Configures Https Redirection for a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb_https_redirection_policy + +Configures Https Redirection for a Lightsail Load Balancer. A valid Certificate must be attached to the load balancer in order to enable https redirection. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailLb } from "./.gen/providers/aws/lightsail-lb"; +import { LightsailLbCertificate } from "./.gen/providers/aws/lightsail-lb-certificate"; +import { LightsailLbCertificateAttachment } from "./.gen/providers/aws/lightsail-lb-certificate-attachment"; +import { LightsailLbHttpsRedirectionPolicy } from "./.gen/providers/aws/lightsail-lb-https-redirection-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new LightsailLb(this, "test", { + healthCheckPath: "/", + instancePort: Token.asNumber("80"), + name: "test-load-balancer", + tags: { + foo: "bar", + }, + }); + const awsLightsailLbCertificateTest = new LightsailLbCertificate( + this, + "test_1", + { + domainName: "test.com", + lbName: test.id, + name: "test-load-balancer-certificate", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailLbCertificateTest.overrideLogicalId("test"); + const awsLightsailLbCertificateAttachmentTest = + new LightsailLbCertificateAttachment(this, "test_2", { + certificateName: Token.asString(awsLightsailLbCertificateTest.name), + lbName: test.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailLbCertificateAttachmentTest.overrideLogicalId("test"); + const awsLightsailLbHttpsRedirectionPolicyTest = + new LightsailLbHttpsRedirectionPolicy(this, "test_3", { + enabled: true, + lbName: test.name, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailLbHttpsRedirectionPolicyTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `lbName` - (Required) The name of the load balancer to which you want to enable http to https redirection. +* `enabled` - (Required) - The Https Redirection state of the load balancer. `true` to activate http to https redirection or `false` to deactivate http to https redirection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this load balancer (matches `lbName`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailLbHttpsRedirectionPolicy` using the `lbName` attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailLbHttpsRedirectionPolicy` using the `lbName` attribute. For example: + +```console +% terraform import aws_lightsail_lb_https_redirection_policy.test example-load-balancer +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_lb_stickiness_policy.html.markdown b/website/docs/cdktf/typescript/r/lightsail_lb_stickiness_policy.html.markdown new file mode 100644 index 00000000000..8e5b450fa20 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_lb_stickiness_policy.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_lb_stickiness_policy" +description: |- + Configures Session Stickiness for a Lightsail Load Balancer +--- + + + +# Resource: aws_lightsail_lb_stickiness_policy + +Configures Session Stickiness for a Lightsail Load Balancer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailLb } from "./.gen/providers/aws/lightsail-lb"; +import { LightsailLbStickinessPolicy } from "./.gen/providers/aws/lightsail-lb-stickiness-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new LightsailLb(this, "test", { + healthCheckPath: "/", + instancePort: Token.asNumber("80"), + name: "test-load-balancer", + tags: { + foo: "bar", + }, + }); + const awsLightsailLbStickinessPolicyTest = new LightsailLbStickinessPolicy( + this, + "test_1", + { + cookieDuration: 900, + enabled: true, + lbName: test.name, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailLbStickinessPolicyTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `lbName` - (Required) The name of the load balancer to which you want to enable session stickiness. +* `cookieDuration` - (Required) The cookie duration in seconds. This determines the length of the session stickiness. +* `enabled` - (Required) - The Session Stickiness state of the load balancer. `true` to activate session stickiness or `false` to deactivate session stickiness. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name used for this load balancer (matches `lbName`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLightsailLbStickinessPolicy` using the `lbName` attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLightsailLbStickinessPolicy` using the `lbName` attribute. For example: + +```console +% terraform import aws_lightsail_lb_stickiness_policy.test example-load-balancer +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_static_ip.html.markdown b/website/docs/cdktf/typescript/r/lightsail_static_ip.html.markdown new file mode 100644 index 00000000000..de28dbc773b --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_static_ip.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_static_ip" +description: |- + Provides an Lightsail Static IP +--- + + + +# Resource: aws_lightsail_static_ip + +Allocates a static IP address. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailStaticIp } from "./.gen/providers/aws/lightsail-static-ip"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LightsailStaticIp(this, "test", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name for the allocated static IP + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the Lightsail static IP +* `ipAddress` - The allocated static IP address +* `supportCode` - The support code. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/lightsail_static_ip_attachment.html.markdown b/website/docs/cdktf/typescript/r/lightsail_static_ip_attachment.html.markdown new file mode 100644 index 00000000000..0f1deb743f8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/lightsail_static_ip_attachment.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Lightsail" +layout: "aws" +page_title: "AWS: aws_lightsail_static_ip_attachment" +description: |- + Provides an Lightsail Static IP Attachment +--- + + + +# Resource: aws_lightsail_static_ip_attachment + +Provides a static IP address attachment - relationship between a Lightsail static IP & Lightsail instance. + +~> **Note:** Lightsail is currently only supported in a limited number of AWS Regions, please see ["Regions and Availability Zones in Amazon Lightsail"](https://lightsail.aws.amazon.com/ls/docs/overview/article/understanding-regions-and-availability-zones-in-amazon-lightsail) for more details + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LightsailInstance } from "./.gen/providers/aws/lightsail-instance"; +import { LightsailStaticIp } from "./.gen/providers/aws/lightsail-static-ip"; +import { LightsailStaticIpAttachment } from "./.gen/providers/aws/lightsail-static-ip-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new LightsailInstance(this, "test", { + availabilityZone: "us-east-1b", + blueprintId: "string", + bundleId: "string", + keyPairName: "some_key_name", + name: "example", + }); + const awsLightsailStaticIpTest = new LightsailStaticIp(this, "test_1", { + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailStaticIpTest.overrideLogicalId("test"); + const awsLightsailStaticIpAttachmentTest = new LightsailStaticIpAttachment( + this, + "test_2", + { + instanceName: test.id, + staticIpName: Token.asString(awsLightsailStaticIpTest.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLightsailStaticIpAttachmentTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `staticIpName` - (Required) The name of the allocated static IP +* `instanceName` - (Required) The name of the Lightsail instance to attach the IP to + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `ipAddress` - The allocated static IP address + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/load_balancer_backend_server_policy.html.markdown b/website/docs/cdktf/typescript/r/load_balancer_backend_server_policy.html.markdown new file mode 100644 index 00000000000..00423fe9c0c --- /dev/null +++ b/website/docs/cdktf/typescript/r/load_balancer_backend_server_policy.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_load_balancer_backend_server_policy" +description: |- + Attaches a load balancer policy to an ELB backend server. +--- + + + +# Resource: aws_load_balancer_backend_server_policy + +Attaches a load balancer policy to an ELB backend server. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Elb } from "./.gen/providers/aws/elb"; +import { LoadBalancerBackendServerPolicy } from "./.gen/providers/aws/load-balancer-backend-server-policy"; +import { LoadBalancerPolicy } from "./.gen/providers/aws/load-balancer-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const wuTang = new Elb(this, "wu-tang", { + availabilityZones: ["us-east-1a"], + listener: [ + { + instancePort: 443, + instanceProtocol: "http", + lbPort: 443, + lbProtocol: "https", + sslCertificateId: + "arn:aws:iam::000000000000:server-certificate/wu-tang.net", + }, + ], + name: "wu-tang", + tags: { + Name: "wu-tang", + }, + }); + new LoadBalancerPolicy(this, "wu-tang-ca-pubkey-policy", { + loadBalancerName: wuTang.name, + policyAttribute: [ + { + name: "PublicKey", + value: Token.asString(Fn.file("wu-tang-pubkey")), + }, + ], + policyName: "wu-tang-ca-pubkey-policy", + policyTypeName: "PublicKeyPolicyType", + }); + const wuTangRootCaBackendAuthPolicy = new LoadBalancerPolicy( + this, + "wu-tang-root-ca-backend-auth-policy", + { + loadBalancerName: wuTang.name, + policyAttribute: [ + { + name: "PublicKeyPolicyName", + value: wuTangRootCaPubkeyPolicy.policyName, + }, + ], + policyName: "wu-tang-root-ca-backend-auth-policy", + policyTypeName: "BackendServerAuthenticationPolicyType", + } + ); + new LoadBalancerBackendServerPolicy( + this, + "wu-tang-backend-auth-policies-443", + { + instancePort: 443, + loadBalancerName: wuTang.name, + policyNames: [wuTangRootCaBackendAuthPolicy.policyName], + } + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `loadBalancerName` - (Required) The load balancer to attach the policy to. +* `policyNames` - (Required) List of Policy Names to apply to the backend server. +* `instancePort` - (Required) The instance port to apply the policy to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `loadBalancerName` - The load balancer on which the policy is defined. +* `instancePort` - The backend port the policies are applied to + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/load_balancer_listener_policy.html.markdown b/website/docs/cdktf/typescript/r/load_balancer_listener_policy.html.markdown new file mode 100644 index 00000000000..111c87bc2c9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/load_balancer_listener_policy.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_load_balancer_listener_policy" +description: |- + Attaches a load balancer policy to an ELB Listener. +--- + + + +# Resource: aws_load_balancer_listener_policy + +Attaches a load balancer policy to an ELB Listener. + +## Example Usage + +### Custom Policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Elb } from "./.gen/providers/aws/elb"; +import { LoadBalancerListenerPolicy } from "./.gen/providers/aws/load-balancer-listener-policy"; +import { LoadBalancerPolicy } from "./.gen/providers/aws/load-balancer-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const wuTang = new Elb(this, "wu-tang", { + availabilityZones: ["us-east-1a"], + listener: [ + { + instancePort: 443, + instanceProtocol: "http", + lbPort: 443, + lbProtocol: "https", + sslCertificateId: + "arn:aws:iam::000000000000:server-certificate/wu-tang.net", + }, + ], + name: "wu-tang", + tags: { + Name: "wu-tang", + }, + }); + const wuTangSsl = new LoadBalancerPolicy(this, "wu-tang-ssl", { + loadBalancerName: wuTang.name, + policyAttribute: [ + { + name: "ECDHE-ECDSA-AES128-GCM-SHA256", + value: "true", + }, + { + name: "Protocol-TLSv1.2", + value: "true", + }, + ], + policyName: "wu-tang-ssl", + policyTypeName: "SSLNegotiationPolicyType", + }); + new LoadBalancerListenerPolicy(this, "wu-tang-listener-policies-443", { + loadBalancerName: wuTang.name, + loadBalancerPort: 443, + policyNames: [wuTangSsl.policyName], + }); + } +} + +``` + +This example shows how to customize the TLS settings of an HTTPS listener. + +### AWS Predefined Security Policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Elb } from "./.gen/providers/aws/elb"; +import { LoadBalancerListenerPolicy } from "./.gen/providers/aws/load-balancer-listener-policy"; +import { LoadBalancerPolicy } from "./.gen/providers/aws/load-balancer-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const wuTang = new Elb(this, "wu-tang", { + availabilityZones: ["us-east-1a"], + listener: [ + { + instancePort: 443, + instanceProtocol: "http", + lbPort: 443, + lbProtocol: "https", + sslCertificateId: + "arn:aws:iam::000000000000:server-certificate/wu-tang.net", + }, + ], + name: "wu-tang", + tags: { + Name: "wu-tang", + }, + }); + const wuTangSslTls11 = new LoadBalancerPolicy(this, "wu-tang-ssl-tls-1-1", { + loadBalancerName: wuTang.name, + policyAttribute: [ + { + name: "Reference-Security-Policy", + value: "ELBSecurityPolicy-TLS-1-1-2017-01", + }, + ], + policyName: "wu-tang-ssl", + policyTypeName: "SSLNegotiationPolicyType", + }); + new LoadBalancerListenerPolicy(this, "wu-tang-listener-policies-443", { + loadBalancerName: wuTang.name, + loadBalancerPort: 443, + policyNames: [wuTangSslTls11.policyName], + }); + } +} + +``` + +This example shows how to add a [Predefined Security Policy for ELBs](https://docs.aws.amazon.com/elasticloadbalancing/latest/classic/elb-security-policy-table.html) + +## Argument Reference + +This resource supports the following arguments: + +* `loadBalancerName` - (Required) The load balancer to attach the policy to. +* `loadBalancerPort` - (Required) The load balancer listener port to apply the policy to. +* `policyNames` - (Required) List of Policy Names to apply to the backend server. +* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger an update. To force an update without changing these keys/values, use the [`terraform taint` command](https://www.terraform.io/docs/commands/taint.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `loadBalancerName` - The load balancer on which the policy is defined. +* `loadBalancerPort` - The load balancer listener port the policies are applied to + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/load_balancer_policy.html.markdown b/website/docs/cdktf/typescript/r/load_balancer_policy.html.markdown new file mode 100644 index 00000000000..cba9ee52239 --- /dev/null +++ b/website/docs/cdktf/typescript/r/load_balancer_policy.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_load_balancer_policy" +description: |- + Provides a load balancer policy, which can be attached to an ELB listener or backend server. +--- + + + +# Resource: aws_load_balancer_policy + +Provides a load balancer policy, which can be attached to an ELB listener or backend server. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Elb } from "./.gen/providers/aws/elb"; +import { LoadBalancerBackendServerPolicy } from "./.gen/providers/aws/load-balancer-backend-server-policy"; +import { LoadBalancerListenerPolicy } from "./.gen/providers/aws/load-balancer-listener-policy"; +import { LoadBalancerPolicy } from "./.gen/providers/aws/load-balancer-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const wuTang = new Elb(this, "wu-tang", { + availabilityZones: ["us-east-1a"], + listener: [ + { + instancePort: 443, + instanceProtocol: "http", + lbPort: 443, + lbProtocol: "https", + sslCertificateId: + "arn:aws:iam::000000000000:server-certificate/wu-tang.net", + }, + ], + name: "wu-tang", + tags: { + Name: "wu-tang", + }, + }); + new LoadBalancerPolicy(this, "wu-tang-ca-pubkey-policy", { + loadBalancerName: wuTang.name, + policyAttribute: [ + { + name: "PublicKey", + value: Token.asString(Fn.file("wu-tang-pubkey")), + }, + ], + policyName: "wu-tang-ca-pubkey-policy", + policyTypeName: "PublicKeyPolicyType", + }); + const wuTangRootCaBackendAuthPolicy = new LoadBalancerPolicy( + this, + "wu-tang-root-ca-backend-auth-policy", + { + loadBalancerName: wuTang.name, + policyAttribute: [ + { + name: "PublicKeyPolicyName", + value: wuTangRootCaPubkeyPolicy.policyName, + }, + ], + policyName: "wu-tang-root-ca-backend-auth-policy", + policyTypeName: "BackendServerAuthenticationPolicyType", + } + ); + const wuTangSsl = new LoadBalancerPolicy(this, "wu-tang-ssl", { + loadBalancerName: wuTang.name, + policyAttribute: [ + { + name: "ECDHE-ECDSA-AES128-GCM-SHA256", + value: "true", + }, + { + name: "Protocol-TLSv1.2", + value: "true", + }, + ], + policyName: "wu-tang-ssl", + policyTypeName: "SSLNegotiationPolicyType", + }); + new LoadBalancerPolicy(this, "wu-tang-ssl-tls-1-1", { + loadBalancerName: wuTang.name, + policyAttribute: [ + { + name: "Reference-Security-Policy", + value: "ELBSecurityPolicy-TLS-1-1-2017-01", + }, + ], + policyName: "wu-tang-ssl", + policyTypeName: "SSLNegotiationPolicyType", + }); + new LoadBalancerBackendServerPolicy( + this, + "wu-tang-backend-auth-policies-443", + { + instancePort: 443, + loadBalancerName: wuTang.name, + policyNames: [wuTangRootCaBackendAuthPolicy.policyName], + } + ); + new LoadBalancerListenerPolicy(this, "wu-tang-listener-policies-443", { + loadBalancerName: wuTang.name, + loadBalancerPort: 443, + policyNames: [wuTangSsl.policyName], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `loadBalancerName` - (Required) The load balancer on which the policy is defined. +* `policyName` - (Required) The name of the load balancer policy. +* `policyTypeName` - (Required) The policy type. +* `policyAttribute` - (Optional) Policy attribute to apply to the policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `policyName` - The name of the stickiness policy. +* `policyTypeName` - The policy type of the policy. +* `loadBalancerName` - The load balancer on which the policy is defined. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/location_geofence_collection.html.markdown b/website/docs/cdktf/typescript/r/location_geofence_collection.html.markdown new file mode 100644 index 00000000000..25c5c688ca6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/location_geofence_collection.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_geofence_collection" +description: |- + Terraform resource for managing an AWS Location Geofence Collection. +--- + + + +# Resource: aws_location_geofence_collection + +Terraform resource for managing an AWS Location Geofence Collection. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LocationGeofenceCollection } from "./.gen/providers/aws/location-geofence-collection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LocationGeofenceCollection(this, "example", { + collectionName: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `collectionName` - (Required) The name of the geofence collection. + +The following arguments are optional: + +* `description` - (Optional) The optional description for the geofence collection. +* `kmsKeyId` - (Optional) A key identifier for an AWS KMS customer managed key assigned to the Amazon Location resource. +* `tags` - (Optional) Key-value tags for the geofence collection. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `collectionArn` - The Amazon Resource Name (ARN) for the geofence collection resource. Used when you need to specify a resource across all AWS. +* `createTime` - The timestamp for when the geofence collection resource was created in ISO 8601 format. +* `updateTime` - The timestamp for when the geofence collection resource was last updated in ISO 8601 format. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Location Geofence Collection using the `collectionName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Location Geofence Collection using the `collectionName`. For example: + +```console +% terraform import aws_location_geofence_collection.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/location_map.html.markdown b/website/docs/cdktf/typescript/r/location_map.html.markdown new file mode 100644 index 00000000000..7bf91875b11 --- /dev/null +++ b/website/docs/cdktf/typescript/r/location_map.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_map" +description: |- + Provides a Location Service Map. +--- + + + +# Resource: aws_location_map + +Provides a Location Service Map. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LocationMap } from "./.gen/providers/aws/location-map"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LocationMap(this, "example", { + configuration: { + style: "VectorHereBerlin", + }, + mapName: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `configuration` - (Required) Configuration block with the map style selected from an available data provider. Detailed below. +* `mapName` - (Required) The name for the map resource. + +The following arguments are optional: + +* `description` - (Optional) An optional description for the map resource. +* `tags` - (Optional) Key-value tags for the map. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### configuration + +The following arguments are required: + +* `style` - (Required) Specifies the map style selected from an available data provider. Valid values can be found in the [Location Service CreateMap API Reference](https://docs.aws.amazon.com/location/latest/APIReference/API_CreateMap.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `createTime` - The timestamp for when the map resource was created in ISO 8601 format. +* `mapArn` - The Amazon Resource Name (ARN) for the map resource. Used to specify a resource across all AWS. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `updateTime` - The timestamp for when the map resource was last updated in ISO 8601 format. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLocationMap` resources using the map name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLocationMap` resources using the map name. For example: + +```console +% terraform import aws_location_map.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/location_place_index.html.markdown b/website/docs/cdktf/typescript/r/location_place_index.html.markdown new file mode 100644 index 00000000000..b29a7557221 --- /dev/null +++ b/website/docs/cdktf/typescript/r/location_place_index.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_place_index" +description: |- + Provides a Location Service Place Index. +--- + + + +# Resource: aws_location_place_index + +Provides a Location Service Place Index. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LocationPlaceIndex } from "./.gen/providers/aws/location-place-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LocationPlaceIndex(this, "example", { + dataSource: "Here", + indexName: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `dataSource` - (Required) Specifies the geospatial data provider for the new place index. +* `indexName` - (Required) The name of the place index resource. + +The following arguments are optional: + +* `dataSourceConfiguration` - (Optional) Configuration block with the data storage option chosen for requesting Places. Detailed below. +* `description` - (Optional) The optional description for the place index resource. +* `tags` - (Optional) Key-value tags for the place index. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### data_source_configuration + +The following arguments are optional: + +* `intendedUse` - (Optional) Specifies how the results of an operation will be stored by the caller. Valid values: `singleUse`, `storage`. Default: `singleUse`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `createTime` - The timestamp for when the place index resource was created in ISO 8601 format. +* `indexArn` - The Amazon Resource Name (ARN) for the place index resource. Used to specify a resource across AWS. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `updateTime` - The timestamp for when the place index resource was last update in ISO 8601. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLocationPlaceIndex` resources using the place index name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLocationPlaceIndex` resources using the place index name. For example: + +```console +% terraform import aws_location_place_index.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/location_route_calculator.html.markdown b/website/docs/cdktf/typescript/r/location_route_calculator.html.markdown new file mode 100644 index 00000000000..daabcbb6e54 --- /dev/null +++ b/website/docs/cdktf/typescript/r/location_route_calculator.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_route_calculator" +description: |- + Provides a Location Service Route Calculator. +--- + + + +# Resource: aws_location_route_calculator + +Provides a Location Service Route Calculator. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LocationRouteCalculator } from "./.gen/providers/aws/location-route-calculator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LocationRouteCalculator(this, "example", { + calculatorName: "example", + dataSource: "Here", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `calculatorName` - (Required) The name of the route calculator resource. +* `dataSource` - (Required) Specifies the data provider of traffic and road network data. + +The following arguments are optional: + +* `description` - (Optional) The optional description for the route calculator resource. +* `tags` - (Optional) Key-value tags for the route calculator. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `calculatorArn` - The Amazon Resource Name (ARN) for the Route calculator resource. Use the ARN when you specify a resource across AWS. +* `createTime` - The timestamp for when the route calculator resource was created in ISO 8601 format. +* `updateTime` - The timestamp for when the route calculator resource was last update in ISO 8601. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLocationRouteCalculator` using the route calculator name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLocationRouteCalculator` using the route calculator name. For example: + +```console +% terraform import aws_location_route_calculator.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/location_tracker.html.markdown b/website/docs/cdktf/typescript/r/location_tracker.html.markdown new file mode 100644 index 00000000000..b15ec451960 --- /dev/null +++ b/website/docs/cdktf/typescript/r/location_tracker.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_tracker" +description: |- + Provides a Location Service Tracker. +--- + + + +# Resource: aws_location_tracker + +Provides a Location Service Tracker. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LocationTracker } from "./.gen/providers/aws/location-tracker"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new LocationTracker(this, "example", { + trackerName: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `trackerName` - (Required) The name of the tracker resource. + +The following arguments are optional: + +* `description` - (Optional) The optional description for the tracker resource. +* `kmsKeyId` - (Optional) A key identifier for an AWS KMS customer managed key assigned to the Amazon Location resource. +* `positionFiltering` - (Optional) The position filtering method of the tracker resource. Valid values: `timeBased`, `distanceBased`, `accuracyBased`. Default: `timeBased`. +* `tags` - (Optional) Key-value tags for the tracker. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `createTime` - The timestamp for when the tracker resource was created in ISO 8601 format. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `trackerArn` - The Amazon Resource Name (ARN) for the tracker resource. Used when you need to specify a resource across all AWS. +* `updateTime` - The timestamp for when the tracker resource was last updated in ISO 8601 format. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsLocationTracker` resources using the tracker name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsLocationTracker` resources using the tracker name. For example: + +```console +% terraform import aws_location_tracker.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/location_tracker_association.html.markdown b/website/docs/cdktf/typescript/r/location_tracker_association.html.markdown new file mode 100644 index 00000000000..6565a8a3a4a --- /dev/null +++ b/website/docs/cdktf/typescript/r/location_tracker_association.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "Location" +layout: "aws" +page_title: "AWS: aws_location_tracker_association" +description: |- + Terraform resource for managing an AWS Location Tracker Association. +--- + + + +# Resource: aws_location_tracker_association + +Terraform resource for managing an AWS Location Tracker Association. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LocationGeofenceCollection } from "./.gen/providers/aws/location-geofence-collection"; +import { LocationTracker } from "./.gen/providers/aws/location-tracker"; +import { LocationTrackerAssociation } from "./.gen/providers/aws/location-tracker-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new LocationGeofenceCollection(this, "example", { + collectionName: "example", + }); + const awsLocationTrackerExample = new LocationTracker(this, "example_1", { + trackerName: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLocationTrackerExample.overrideLogicalId("example"); + const awsLocationTrackerAssociationExample = new LocationTrackerAssociation( + this, + "example_2", + { + consumerArn: example.collectionArn, + trackerName: Token.asString(awsLocationTrackerExample.trackerName), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsLocationTrackerAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `consumerArn` - (Required) The Amazon Resource Name (ARN) for the geofence collection to be associated to tracker resource. Used when you need to specify a resource across all AWS. +* `trackerName` - (Required) The name of the tracker resource to be associated with a geofence collection. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +`awsLocationTrackerAssociation` provides the following [Timeouts](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) configuration options: + +* `create` - (Optional, Default: `30M`) +* `delete` - (Optional, Default: `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Location Tracker Association using the `trackerName|consumerArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Location Tracker Association using the `trackerName|consumerArn`. For example: + +```console +% terraform import aws_location_tracker_association.example "tracker_name|consumer_arn" +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/macie2_account.html.markdown b/website/docs/cdktf/typescript/r/macie2_account.html.markdown new file mode 100644 index 00000000000..aa7bf4afd8b --- /dev/null +++ b/website/docs/cdktf/typescript/r/macie2_account.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_account" +description: |- + Provides a resource to manage Amazon Macie on an AWS Account. +--- + + + +# Resource: aws_macie2_account + +Provides a resource to manage an [AWS Macie Account](https://docs.aws.amazon.com/macie/latest/APIReference/macie.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Macie2Account } from "./.gen/providers/aws/macie2-account"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Macie2Account(this, "test", { + findingPublishingFrequency: "FIFTEEN_MINUTES", + status: "ENABLED", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `findingPublishingFrequency` - (Optional) Specifies how often to publish updates to policy findings for the account. This includes publishing updates to AWS Security Hub and Amazon EventBridge (formerly called Amazon CloudWatch Events). Valid values are `fifteenMinutes`, `oneHour` or `sixHours`. +* `status` - (Optional) Specifies the status for the account. To enable Amazon Macie and start all Macie activities for the account, set this value to `enabled`. Valid values are `enabled` or `paused`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie account. +* `serviceRole` - The Amazon Resource Name (ARN) of the service-linked role that allows Macie to monitor and analyze data in AWS resources for the account. +* `createdAt` - The date and time, in UTC and extended RFC 3339 format, when the Amazon Macie account was created. +* `updatedAt` - The date and time, in UTC and extended RFC 3339 format, of the most recent change to the status of the Macie account. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsMacie2Account` using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsMacie2Account` using the id. For example: + +```console +% terraform import aws_macie2_account.example abcd1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/macie2_classification_export_configuration.html.markdown b/website/docs/cdktf/typescript/r/macie2_classification_export_configuration.html.markdown new file mode 100644 index 00000000000..4db2effd486 --- /dev/null +++ b/website/docs/cdktf/typescript/r/macie2_classification_export_configuration.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_classification_export_configuration" +description: |- + Provides a resource to manage Classification Results - Export Configuration +--- + + + +# Resource: aws_macie2_classification_export_configuration + +Provides a resource to manage an [Amazon Macie Classification Export Configuration](https://docs.aws.amazon.com/macie/latest/APIReference/classification-export-configuration.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Macie2Account } from "./.gen/providers/aws/macie2-account"; +import { Macie2ClassificationExportConfiguration } from "./.gen/providers/aws/macie2-classification-export-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Macie2Account(this, "example", {}); + const awsMacie2ClassificationExportConfigurationExample = + new Macie2ClassificationExportConfiguration(this, "example_1", { + dependsOn: [example], + s3Destination: { + bucketName: Token.asString(awsS3BucketExample.bucket), + keyPrefix: "exampleprefix/", + kmsKeyArn: Token.asString(awsKmsKeyExample.arn), + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMacie2ClassificationExportConfigurationExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `s3Destination` - (Required) Configuration block for a S3 Destination. Defined below + +### s3_destination Configuration Block + +The `s3Destination` configuration block supports the following arguments: + +* `bucketName` - (Required) The Amazon S3 bucket name in which Amazon Macie exports the data classification results. +* `keyPrefix` - (Optional) The object key for the bucket in which Amazon Macie exports the data classification results. +* `kmsKeyArn` - (Required) Amazon Resource Name (ARN) of the KMS key to be used to encrypt the data. + +Additional information can be found in the [Storing and retaining sensitive data discovery results with Amazon Macie for AWS Macie documentation](https://docs.aws.amazon.com/macie/latest/user/discovery-results-repository-s3.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsMacie2ClassificationExportConfiguration` using the account ID and region. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsMacie2ClassificationExportConfiguration` using the account ID and region. For example: + +```console +% terraform import aws_macie2_classification_export_configuration.example 123456789012:us-west-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/macie2_classification_job.html.markdown b/website/docs/cdktf/typescript/r/macie2_classification_job.html.markdown new file mode 100644 index 00000000000..5a5f81d3e61 --- /dev/null +++ b/website/docs/cdktf/typescript/r/macie2_classification_job.html.markdown @@ -0,0 +1,181 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_classification_job" +description: |- + Provides a resource to manage an AWS Macie Classification Job. +--- + + + +# Resource: aws_macie2_classification_job + +Provides a resource to manage an [AWS Macie Classification Job](https://docs.aws.amazon.com/macie/latest/APIReference/jobs.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Macie2Account } from "./.gen/providers/aws/macie2-account"; +import { Macie2ClassificationJob } from "./.gen/providers/aws/macie2-classification-job"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new Macie2Account(this, "test", {}); + const awsMacie2ClassificationJobTest = new Macie2ClassificationJob( + this, + "test_1", + { + dependsOn: [test], + jobType: "ONE_TIME", + name: "NAME OF THE CLASSIFICATION JOB", + s3JobDefinition: { + bucketDefinitions: [ + { + accountId: "ACCOUNT ID", + buckets: ["S3 BUCKET NAME"], + }, + ], + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMacie2ClassificationJobTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `scheduleFrequency` - (Optional) The recurrence pattern for running the job. To run the job only once, don't specify a value for this property and set the value for the `jobType` property to `oneTime`. (documented below) +* `customDataIdentifierIds` - (Optional) The custom data identifiers to use for data analysis and classification. +* `samplingPercentage` - (Optional) The sampling depth, as a percentage, to apply when processing objects. This value determines the percentage of eligible objects that the job analyzes. If this value is less than 100, Amazon Macie selects the objects to analyze at random, up to the specified percentage, and analyzes all the data in those objects. +* `name` - (Optional) A custom name for the job. The name can contain as many as 500 characters. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) A custom description of the job. The description can contain as many as 200 characters. +* `initialRun` - (Optional) Specifies whether to analyze all existing, eligible objects immediately after the job is created. +* `jobType` - (Required) The schedule for running the job. Valid values are: `oneTime` - Run the job only once. If you specify this value, don't specify a value for the `scheduleFrequency` property. `scheduled` - Run the job on a daily, weekly, or monthly basis. If you specify this value, use the `scheduleFrequency` property to define the recurrence pattern for the job. +* `s3JobDefinition` - (Optional) The S3 buckets that contain the objects to analyze, and the scope of that analysis. (documented below) +* `tags` - (Optional) A map of key-value pairs that specifies the tags to associate with the job. A job can have a maximum of 50 tags. Each tag consists of a tag key and an associated tag value. The maximum length of a tag key is 128 characters. The maximum length of a tag value is 256 characters. +* `jobStatus` - (Optional) The status for the job. Valid values are: `cancelled`, `running` and `userPaused` + +The `scheduleFrequency` object supports the following: + +* `dailySchedule` - (Optional) Specifies a daily recurrence pattern for running the job. +* `weeklySchedule` - (Optional) Specifies a weekly recurrence pattern for running the job. +* `monthlySchedule` - (Optional) Specifies a monthly recurrence pattern for running the job. + +The `s3JobDefinition` object supports the following: + +* `bucketCriteria` - (Optional) The property- and tag-based conditions that determine which S3 buckets to include or exclude from the analysis. Conflicts with `bucketDefinitions`. (documented below) +* `bucketDefinitions` - (Optional) An array of objects, one for each AWS account that owns buckets to analyze. Each object specifies the account ID for an account and one or more buckets to analyze for the account. Conflicts with `bucketCriteria`. (documented below) +* `scoping` - (Optional) The property- and tag-based conditions that determine which objects to include or exclude from the analysis. (documented below) + +### bucket_criteria Configuration Block + +The `bucketCriteria` object supports the following: + +* `excludes` - (Optional) The property- or tag-based conditions that determine which S3 buckets to exclude from the analysis. (documented below) +* `includes` - (Optional) The property- or tag-based conditions that determine which S3 buckets to include in the analysis. (documented below) + +The `excludes` and `includes` object supports the following: + +* `and` - (Optional) An array of conditions, one for each condition that determines which S3 buckets to include or exclude from the job. (documented below) + +The `and` object supports the following: + +* `simpleCriterion` - (Optional) A property-based condition that defines a property, operator, and one or more values for including or excluding an S3 buckets from the job. (documented below) +* `tagCriterion` - (Optional) A tag-based condition that defines the operator and tag keys or tag key and value pairs for including or excluding an S3 buckets from the job. (documented below) + +The `simpleCriterion` object supports the following: + +* `comparator` - (Required) The operator to use in a condition. Valid combination of values are available in the [AWS Documentation](https://docs.aws.amazon.com/macie/latest/APIReference/jobs.html#jobs-model-jobcomparator) +* `key` - (Required) The object property to use in the condition. Valid combination of values are available in the [AWS Documentation](https://docs.aws.amazon.com/macie/latest/APIReference/jobs.html#jobs-model-simplecriterionkeyforjob) +* `values` - (Required) An array that lists the values to use in the condition. Valid combination of values are available in the [AWS Documentation](https://docs.aws.amazon.com/macie/latest/APIReference/jobs.html#jobs-model-simplecriterionforjob) + +The `tagCriterion` object supports the following: + +* `comparator` - (Required) The operator to use in the condition. Valid combination and values are available in the [AWS Documentation](https://docs.aws.amazon.com/macie/latest/APIReference/jobs.html#jobs-model-jobcomparator) +* `tagValues` - (Required) The tag key and value pairs to use in the condition. One or more blocks are allowed. (documented below) + +The `tagValues` object supports the following: + +* `key` - (Required) The tag key. +* `value` - (Required) The tag value. + +### bucket_definitions Configuration Block + +The `bucketDefinitions` object supports the following: + +* `accountId` - (Required) The unique identifier for the AWS account that owns the buckets. +* `buckets` - (Required) An array that lists the names of the buckets. + +### scoping Configuration Block + +The `scoping` object supports the following: + +* `excludes` - (Optional) The property- or tag-based conditions that determine which objects to exclude from the analysis. (documented below) +* `includes` - (Optional) The property- or tag-based conditions that determine which objects to include in the analysis. (documented below) + +The `excludes` and `includes` object supports the following: + +* `and` - (Optional) An array of conditions, one for each condition that determines which objects to include or exclude from the job. (documented below) + +The `and` object supports the following: + +* `simpleScopeTerm` - (Optional) A property-based condition that defines a property, operator, and one or more values for including or excluding an object from the job. (documented below) +* `tagScopeTerm` - (Optional) A tag-based condition that defines the operator and tag keys or tag key and value pairs for including or excluding an object from the job. (documented below) + +The `simpleScopeTerm` object supports the following: + +* `comparator` - (Optional) The operator to use in a condition. Valid values are: `eq`, `gt`, `gte`, `lt`, `lte`, `ne`, `contains`, `startsWith` +* `values` - (Optional) An array that lists the values to use in the condition. +* `key` - (Optional) The object property to use in the condition. + +The `tagScopeTerm` object supports the following: + +* `comparator` - (Optional) The operator to use in the condition. +* `tagValues` - (Optional) The tag keys or tag key and value pairs to use in the condition. +* `key` - (Required) The tag key to use in the condition. The only valid value is `tag`. +* `target` - (Required) The type of object to apply the condition to. The only valid value is `s3Object`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie classification job. +* `createdAt` - The date and time, in UTC and extended RFC 3339 format, when the job was created. +* `userPausedDetails` - If the current status of the job is `userPaused`, specifies when the job was paused and when the job or job run will expire and be cancelled if it isn't resumed. This value is present only if the value for `jobStatus` is `userPaused`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsMacie2ClassificationJob` using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsMacie2ClassificationJob` using the id. For example: + +```console +% terraform import aws_macie2_classification_job.example abcd1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/macie2_custom_data_identifier.html.markdown b/website/docs/cdktf/typescript/r/macie2_custom_data_identifier.html.markdown new file mode 100644 index 00000000000..e34f831677a --- /dev/null +++ b/website/docs/cdktf/typescript/r/macie2_custom_data_identifier.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_custom_data_identifier" +description: |- + Provides a resource to manage an AWS Macie Custom Data Identifier. +--- + + + +# Resource: aws_macie2_custom_data_identifier + +Provides a resource to manage an [AWS Macie Custom Data Identifier](https://docs.aws.amazon.com/macie/latest/APIReference/custom-data-identifiers-id.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Macie2Account } from "./.gen/providers/aws/macie2-account"; +import { Macie2CustomDataIdentifier } from "./.gen/providers/aws/macie2-custom-data-identifier"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Macie2Account(this, "example", {}); + const awsMacie2CustomDataIdentifierExample = new Macie2CustomDataIdentifier( + this, + "example_1", + { + dependsOn: [test], + description: "DESCRIPTION", + ignoreWords: ["ignore"], + keywords: ["keyword"], + maximumMatchDistance: 10, + name: "NAME OF CUSTOM DATA IDENTIFIER", + regex: "[0-9]{3}-[0-9]{2}-[0-9]{4}", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMacie2CustomDataIdentifierExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `regex` - (Optional) The regular expression (regex) that defines the pattern to match. The expression can contain as many as 512 characters. +* `keywords` - (Optional) An array that lists specific character sequences (keywords), one of which must be within proximity (`maximumMatchDistance`) of the regular expression to match. The array can contain as many as 50 keywords. Each keyword can contain 3 - 90 characters. Keywords aren't case sensitive. +* `ignoreWords` - (Optional) An array that lists specific character sequences (ignore words) to exclude from the results. If the text matched by the regular expression is the same as any string in this array, Amazon Macie ignores it. The array can contain as many as 10 ignore words. Each ignore word can contain 4 - 90 characters. Ignore words are case sensitive. +* `name` - (Optional) A custom name for the custom data identifier. The name can contain as many as 128 characters. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) A custom description of the custom data identifier. The description can contain as many as 512 characters. +* `maximumMatchDistance` - (Optional) The maximum number of characters that can exist between text that matches the regex pattern and the character sequences specified by the keywords array. Macie includes or excludes a result based on the proximity of a keyword to text that matches the regex pattern. The distance can be 1 - 300 characters. The default value is 50. +* `tags` - (Optional) A map of key-value pairs that specifies the tags to associate with the custom data identifier. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie custom data identifier. +* `deleted` - Specifies whether the custom data identifier was deleted. If you delete a custom data identifier, Amazon Macie doesn't delete it permanently. Instead, it soft deletes the identifier. +* `createdAt` - The date and time, in UTC and extended RFC 3339 format, when the Amazon Macie account was created. +* `arn` - The Amazon Resource Name (ARN) of the custom data identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsMacie2CustomDataIdentifier` using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsMacie2CustomDataIdentifier` using the id. For example: + +```console +% terraform import aws_macie2_custom_data_identifier.example abcd1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/macie2_findings_filter.html.markdown b/website/docs/cdktf/typescript/r/macie2_findings_filter.html.markdown new file mode 100644 index 00000000000..ebe51b3b4fd --- /dev/null +++ b/website/docs/cdktf/typescript/r/macie2_findings_filter.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_findings_filter" +description: |- + Provides a resource to manage an Amazon Macie Findings Filter. +--- + + + +# Resource: aws_macie2_findings_filter + +Provides a resource to manage an [Amazon Macie Findings Filter](https://docs.aws.amazon.com/macie/latest/APIReference/findingsfilters-id.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Macie2Account } from "./.gen/providers/aws/macie2-account"; +import { Macie2FindingsFilter } from "./.gen/providers/aws/macie2-findings-filter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Macie2Account(this, "example", {}); + new Macie2FindingsFilter(this, "test", { + action: "ARCHIVE", + dependsOn: [awsMacie2AccountTest], + description: "DESCRIPTION", + findingCriteria: { + criterion: [ + { + eq: [Token.asString(current.name)], + field: "region", + }, + ], + }, + name: "NAME OF THE FINDINGS FILTER", + position: 1, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `findingCriteria` - (Required) The criteria to use to filter findings. +* `name` - (Optional) A custom name for the filter. The name must contain at least 3 characters and can contain as many as 64 characters. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) A custom description of the filter. The description can contain as many as 512 characters. +* `action` - (Required) The action to perform on findings that meet the filter criteria (`findingCriteria`). Valid values are: `archive`, suppress (automatically archive) the findings; and, `noop`, don't perform any action on the findings. +* `position` - (Optional) The position of the filter in the list of saved filters on the Amazon Macie console. This value also determines the order in which the filter is applied to findings, relative to other filters that are also applied to the findings. +* `tags` - (Optional) A map of key-value pairs that specifies the tags to associate with the filter. + +The `findingCriteria` object supports the following: + +* `criterion` - (Optional) A condition that specifies the property, operator, and one or more values to use to filter the results. (documented below) + +The `criterion` object supports the following: + +* `field` - (Required) The name of the field to be evaluated. +* `eqExactMatch` - (Optional) The value for the property exclusively matches (equals an exact match for) all the specified values. If you specify multiple values, Amazon Macie uses AND logic to join the values. +* `eq` - (Optional) The value for the property matches (equals) the specified value. If you specify multiple values, Amazon Macie uses OR logic to join the values. +* `neq` - (Optional) The value for the property doesn't match (doesn't equal) the specified value. If you specify multiple values, Amazon Macie uses OR logic to join the values. +* `lt` - (Optional) The value for the property is less than the specified value. +* `lte` - (Optional) The value for the property is less than or equal to the specified value. +* `gt` - (Optional) The value for the property is greater than the specified value. +* `gte` - (Optional) The value for the property is greater than or equal to the specified value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie Findings Filter. +* `arn` - The Amazon Resource Name (ARN) of the Findings Filter. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsMacie2FindingsFilter` using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsMacie2FindingsFilter` using the id. For example: + +```console +% terraform import aws_macie2_findings_filter.example abcd1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/macie2_invitation_accepter.html.markdown b/website/docs/cdktf/typescript/r/macie2_invitation_accepter.html.markdown new file mode 100644 index 00000000000..41e3cb01b07 --- /dev/null +++ b/website/docs/cdktf/typescript/r/macie2_invitation_accepter.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_invitation_accepter" +description: |- + Provides a resource to manage an Amazon Macie Invitation Accepter. +--- + + + +# Resource: aws_macie2_invitation_accepter + +Provides a resource to manage an [Amazon Macie Invitation Accepter](https://docs.aws.amazon.com/macie/latest/APIReference/invitations-accept.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Macie2Account } from "./.gen/providers/aws/macie2-account"; +import { Macie2InvitationAccepter } from "./.gen/providers/aws/macie2-invitation-accepter"; +import { Macie2Member } from "./.gen/providers/aws/macie2-member"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Macie2Account(this, "member", {}); + const primary = new Macie2Account(this, "primary", { + provider: "awsalternate", + }); + const awsMacie2MemberPrimary = new Macie2Member(this, "primary_2", { + accountId: "ACCOUNT ID", + dependsOn: [primary], + email: "EMAIL", + invitationMessage: "Message of the invite", + invite: true, + provider: "awsalternate", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMacie2MemberPrimary.overrideLogicalId("primary"); + const awsMacie2InvitationAccepterMember = new Macie2InvitationAccepter( + this, + "member_3", + { + administratorAccountId: "ADMINISTRATOR ACCOUNT ID", + dependsOn: [awsMacie2MemberPrimary], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMacie2InvitationAccepterMember.overrideLogicalId("member"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `administratorAccountId` - (Required) The AWS account ID for the account that sent the invitation. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie invitation accepter. +* `invitationId` - The unique identifier for the invitation. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsMacie2InvitationAccepter` using the admin account ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsMacie2InvitationAccepter` using the admin account ID. For example: + +```console +% terraform import aws_macie2_invitation_accepter.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/macie2_member.html.markdown b/website/docs/cdktf/typescript/r/macie2_member.html.markdown new file mode 100644 index 00000000000..0c011aa8a66 --- /dev/null +++ b/website/docs/cdktf/typescript/r/macie2_member.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_member" +description: |- + Provides a resource to manage an Amazon Macie Member. +--- + + + +# Resource: aws_macie2_member + +Provides a resource to manage an [Amazon Macie Member](https://docs.aws.amazon.com/macie/latest/APIReference/members-id.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Macie2Account } from "./.gen/providers/aws/macie2-account"; +import { Macie2Member } from "./.gen/providers/aws/macie2-member"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Macie2Account(this, "example", {}); + const awsMacie2MemberExample = new Macie2Member(this, "example_1", { + accountId: "AWS ACCOUNT ID", + dependsOn: [example], + email: "EMAIL", + invitationDisableEmailNotification: true, + invitationMessage: "Message of the invitation", + invite: true, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMacie2MemberExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Required) The AWS account ID for the account. +* `email` - (Required) The email address for the account. +* `tags` - (Optional) A map of key-value pairs that specifies the tags to associate with the account in Amazon Macie. +* `status` - (Optional) Specifies the status for the account. To enable Amazon Macie and start all Macie activities for the account, set this value to `enabled`. Valid values are `enabled` or `paused`. +* `invite` - (Optional) Send an invitation to a member +* `invitationMessage` - (Optional) A custom message to include in the invitation. Amazon Macie adds this message to the standard content that it sends for an invitation. +* `invitationDisableEmailNotification` - (Optional) Specifies whether to send an email notification to the root user of each account that the invitation will be sent to. This notification is in addition to an alert that the root user receives in AWS Personal Health Dashboard. To send an email notification to the root user of each account, set this value to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie Member. +* `arn` - The Amazon Resource Name (ARN) of the account. +* `relationshipStatus` - The current status of the relationship between the account and the administrator account. +* `administratorAccountId` - The AWS account ID for the administrator account. +* `invitedAt` - The date and time, in UTC and extended RFC 3339 format, when an Amazon Macie membership invitation was last sent to the account. This value is null if a Macie invitation hasn't been sent to the account. +* `updatedAt` - The date and time, in UTC and extended RFC 3339 format, of the most recent change to the status of the relationship between the account and the administrator account. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsMacie2Member` using the account ID of the member account. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsMacie2Member` using the account ID of the member account. For example: + +```console +% terraform import aws_macie2_member.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/macie2_organization_admin_account.html.markdown b/website/docs/cdktf/typescript/r/macie2_organization_admin_account.html.markdown new file mode 100644 index 00000000000..8d6ac304b52 --- /dev/null +++ b/website/docs/cdktf/typescript/r/macie2_organization_admin_account.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Macie" +layout: "aws" +page_title: "AWS: aws_macie2_organization_admin_account" +description: |- + Provides a resource to manage an Amazon Macie Organization Admin Account. +--- + + + +# Resource: aws_macie2_organization_admin_account + +Provides a resource to manage an [Amazon Macie Organization Admin Account](https://docs.aws.amazon.com/macie/latest/APIReference/admin.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Macie2Account } from "./.gen/providers/aws/macie2-account"; +import { Macie2OrganizationAdminAccount } from "./.gen/providers/aws/macie2-organization-admin-account"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Macie2Account(this, "example", {}); + const awsMacie2OrganizationAdminAccountExample = + new Macie2OrganizationAdminAccount(this, "example_1", { + adminAccountId: "ID OF THE ADMIN ACCOUNT", + dependsOn: [example], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMacie2OrganizationAdminAccountExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `adminAccountId` - (Required) The AWS account ID for the account to designate as the delegated Amazon Macie administrator account for the organization. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the macie organization admin account. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsMacie2OrganizationAdminAccount` using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsMacie2OrganizationAdminAccount` using the id. For example: + +```console +% terraform import aws_macie2_organization_admin_account.example abcd1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/main_route_table_association.html.markdown b/website/docs/cdktf/typescript/r/main_route_table_association.html.markdown new file mode 100644 index 00000000000..4e14f0d4635 --- /dev/null +++ b/website/docs/cdktf/typescript/r/main_route_table_association.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_main_route_table_association" +description: |- + Provides a resource for managing the main routing table of a VPC. +--- + + + +# Resource: aws_main_route_table_association + +Provides a resource for managing the main routing table of a VPC. + +~> **NOTE:** **Do not** use both `awsDefaultRouteTable` to manage a default route table **and** `awsMainRouteTableAssociation` with the same VPC due to possible route conflicts. See [aws_default_route_table][tf-default-route-table] documentation for more details. +For more information, see the Amazon VPC User Guide on [Route Tables](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Route_Tables.html). For information about managing normal route tables in Terraform, see [`awsRouteTable`](/docs/providers/aws/r/route_table.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MainRouteTableAssociation } from "./.gen/providers/aws/main-route-table-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MainRouteTableAssociation(this, "a", { + routeTableId: bar.id, + vpcId: foo.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpcId` - (Required) The ID of the VPC whose main route table should be set +* `routeTableId` - (Required) The ID of the Route Table to set as the new + main route table for the target VPC + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Route Table Association +* `originalRouteTableId` - Used internally, see __Notes__ below + +## Notes + +On VPC creation, the AWS API always creates an initial Main Route Table. This +resource records the ID of that Route Table under `originalRouteTableId`. +The "Delete" action for a `mainRouteTableAssociation` consists of resetting +this original table as the Main Route Table for the VPC. You'll see this +additional Route Table in the AWS console; it must remain intact in order for +the `mainRouteTableAssociation` delete to work properly. + +[aws-route-tables]: http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_Route_Tables.html#Route_Replacing_Main_Table +[tf-route-tables]: /docs/providers/aws/r/route_table.html +[tf-default-route-table]: /docs/providers/aws/r/default_route_table.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `update` - (Default `2M`) +- `delete` - (Default `5M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/media_convert_queue.html.markdown b/website/docs/cdktf/typescript/r/media_convert_queue.html.markdown new file mode 100644 index 00000000000..dbe86b7462a --- /dev/null +++ b/website/docs/cdktf/typescript/r/media_convert_queue.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Elemental MediaConvert" +layout: "aws" +page_title: "AWS: aws_media_convert_queue" +description: |- + Provides an AWS Elemental MediaConvert Queue. +--- + + + +# Resource: aws_media_convert_queue + +Provides an AWS Elemental MediaConvert Queue. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MediaConvertQueue } from "./.gen/providers/aws/media-convert-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MediaConvertQueue(this, "test", { + name: "tf-test-queue", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A unique identifier describing the queue +* `description` - (Optional) A description of the queue +* `pricingPlan` - (Optional) Specifies whether the pricing plan for the queue is on-demand or reserved. Valid values are `onDemand` or `reserved`. Default to `onDemand`. +* `reservationPlanSettings` - (Optional) A detail pricing plan of the reserved queue. See below. +* `status` - (Optional) A status of the queue. Valid values are `active` or `reserved`. Default to `paused`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Fields + +#### `reservationPlanSettings` + +* `commitment` - (Required) The length of the term of your reserved queue pricing plan commitment. Valid value is `oneYear`. +* `renewalType` - (Required) Specifies whether the term of your reserved queue pricing plan. Valid values are `autoRenew` or `expire`. +* `reservedSlots` - (Required) Specifies the number of reserved transcode slots (RTS) for queue. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The same as `name` +* `arn` - The Arn of the queue +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Media Convert Queue using the queue name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Media Convert Queue using the queue name. For example: + +```console +% terraform import aws_media_convert_queue.test tf-test-queue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/media_package_channel.html.markdown b/website/docs/cdktf/typescript/r/media_package_channel.html.markdown new file mode 100644 index 00000000000..71d2c83dc12 --- /dev/null +++ b/website/docs/cdktf/typescript/r/media_package_channel.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Elemental MediaPackage" +layout: "aws" +page_title: "AWS: aws_media_package_channel" +description: |- + Provides an AWS Elemental MediaPackage Channel. +--- + + + +# Resource: aws_media_package_channel + +Provides an AWS Elemental MediaPackage Channel. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MediaPackageChannel } from "./.gen/providers/aws/media-package-channel"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MediaPackageChannel(this, "kittens", { + channelId: "kitten-channel", + description: "A channel dedicated to amusing videos of kittens.", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `channelId` - (Required) A unique identifier describing the channel +* `description` - (Optional) A description of the channel +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The same as `channelId` +* `arn` - The ARN of the channel +* `hlsIngest` - A single item list of HLS ingest information + * `ingestEndpoints` - A list of the ingest endpoints + * `password` - The password + * `url` - The URL + * `username` - The username +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Media Package Channels using the channel ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Media Package Channels using the channel ID. For example: + +```console +% terraform import aws_media_package_channel.kittens kittens-channel +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/media_store_container.html.markdown b/website/docs/cdktf/typescript/r/media_store_container.html.markdown new file mode 100644 index 00000000000..a86c5927672 --- /dev/null +++ b/website/docs/cdktf/typescript/r/media_store_container.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Elemental MediaStore" +layout: "aws" +page_title: "AWS: aws_media_store_container" +description: |- + Provides a MediaStore Container. +--- + + + +# Resource: aws_media_store_container + +Provides a MediaStore Container. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MediaStoreContainer } from "./.gen/providers/aws/media-store-container"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MediaStoreContainer(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the container. Must contain alphanumeric characters or underscores. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the container. +* `endpoint` - The DNS endpoint of the container. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaStore Container using the MediaStore Container Name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MediaStore Container using the MediaStore Container Name. For example: + +```console +% terraform import aws_media_store_container.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/media_store_container_policy.html.markdown b/website/docs/cdktf/typescript/r/media_store_container_policy.html.markdown new file mode 100644 index 00000000000..7267dfb59d1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/media_store_container_policy.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "Elemental MediaStore" +layout: "aws" +page_title: "AWS: aws_media_store_container_policy" +description: |- + Provides a MediaStore Container Policy. +--- + + + +# Resource: aws_media_store_container_policy + +Provides a MediaStore Container Policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { MediaStoreContainer } from "./.gen/providers/aws/media-store-container"; +import { MediaStoreContainerPolicy } from "./.gen/providers/aws/media-store-container-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new MediaStoreContainer(this, "example", { + name: "example", + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_2", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_3", + { + statement: [ + { + actions: ["mediastore:*"], + condition: [ + { + test: "Bool", + values: ["true"], + variable: "aws:SecureTransport", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["arn:aws:iam::${" + current.accountId + "}:root"], + type: "AWS", + }, + ], + resources: [ + "arn:aws:mediastore:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:container/${" + + example.name + + "}/*", + ], + sid: "MediaStoreFullAccess", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsMediaStoreContainerPolicyExample = new MediaStoreContainerPolicy( + this, + "example_4", + { + containerName: example.name, + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMediaStoreContainerPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `containerName` - (Required) The name of the container. +* `policy` - (Required) The contents of the policy. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaStore Container Policy using the MediaStore Container Name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MediaStore Container Policy using the MediaStore Container Name. For example: + +```console +% terraform import aws_media_store_container_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/medialive_channel.html.markdown b/website/docs/cdktf/typescript/r/medialive_channel.html.markdown new file mode 100644 index 00000000000..a329ad675fd --- /dev/null +++ b/website/docs/cdktf/typescript/r/medialive_channel.html.markdown @@ -0,0 +1,702 @@ +--- +subcategory: "Elemental MediaLive" +layout: "aws" +page_title: "AWS: aws_medialive_channel" +description: |- + Terraform resource for managing an AWS MediaLive Channel. +--- + + + +# Resource: aws_medialive_channel + +Terraform resource for managing an AWS MediaLive Channel. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MedialiveChannel } from "./.gen/providers/aws/medialive-channel"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MedialiveChannel(this, "example", { + channelClass: "STANDARD", + destinations: [ + { + id: "destination", + settings: [ + { + url: "s3://${" + main.id + "}/test1", + }, + { + url: "s3://${" + main2.id + "}/test2", + }, + ], + }, + ], + encoderSettings: { + audioDescriptions: [ + { + audioSelectorName: "example audio selector", + name: "audio-selector", + }, + ], + outputGroups: [ + { + outputGroupSettings: { + archiveGroupSettings: [ + { + destination: { + destinationRefId: "destination", + }, + }, + ], + }, + outputs: [ + { + audioDescriptionNames: ["audio-selector"], + outputName: "example-name", + outputSettings: { + archiveOutputSettings: { + containerSettings: { + m2TsSettings: { + audioBufferModel: "ATSC", + bufferModel: "MULTIPLEX", + rateMode: "CBR", + }, + }, + extension: "m2ts", + nameModifier: "_1", + }, + }, + videoDescriptionName: "example-video", + }, + ], + }, + ], + timecodeConfig: { + source: "EMBEDDED", + }, + videoDescriptions: [ + { + name: "example-video", + }, + ], + }, + inputAttachments: [ + { + inputAttachmentName: "example-input", + inputId: Token.asString(awsMedialiveInputExample.id), + }, + ], + inputSpecification: { + codec: "AVC", + inputResolution: "HD", + maximumBitrate: "MAX_20_MBPS", + }, + name: "example-channel", + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `channelClass` - (Required) Concise argument description. +* `destinations` - (Required) Destinations for channel. See [Destinations](#destinations) for more details. +* `encoderSettings` - (Required) Encoder settings. See [Encoder Settings](#encoder-settings) for more details. +* `inputSpecification` - (Required) Specification of network and file inputs for the channel. +* `name` - (Required) Name of the Channel. + +The following arguments are optional: + +* `cdiInputSpecification` - (Optional) Specification of CDI inputs for this channel. See [CDI Input Specification](#cdi-input-specification) for more details. +* `inputAttachments` - (Optional) Input attachments for the channel. See [Input Attachments](#input-attachments) for more details. +* `logLevel` - (Optional) The log level to write to Cloudwatch logs. +* `maintenance` - (Optional) Maintenance settings for this channel. See [Maintenance](#maintenance) for more details. +* `roleArn` - (Optional) Concise argument description. +* `startChannel` - (Optional) Whether to start/stop channel. Default: `false` +* `tags` - (Optional) A map of tags to assign to the channel. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc` - (Optional) Settings for the VPC outputs. + +### CDI Input Specification + +* `resolution` - (Required) - Maximum CDI input resolution. + +### Destinations + +* `id` - (Required) User-specified id. Ths is used in an output group or an output. +* `mediaPackageSettings` - (Optional) Destination settings for a MediaPackage output; one destination for both encoders. See [Media Package Settings](#media-package-settings) for more details. +* `multiplexSettings` - (Optional) Destination settings for a Multiplex output; one destination for both encoders. See [Multiplex Settings](#multiplex-settings) for more details. +* `settings` - (Optional) Destination settings for a standard output; one destination for each redundant encoder. See [Settings](#settings) for more details. + +### Encoder Settings + +* `audioDescriptions` - (Required) Audio descriptions for the channel. See [Audio Descriptions](#audio-descriptions) for more details. +* `outputGroups` - (Required) Output groups for the channel. See [Output Groups](#output-groups) for more details. +* `timecodeConfig` - (Required) Contains settings used to acquire and adjust timecode information from inputs. See [Timecode Config](#timecode-config) for more details. +* `videoDescriptions` - (Required) Video Descriptions. See [Video Descriptions](#video-descriptions) for more details. +* `captionDescriptions` - (Optional) Caption Descriptions. See [Caption Descriptions](#caption-descriptions) for more details. +* `globalConfiguration` - (Optional) Configuration settings that apply to the event as a whole. See [Global Configuration](#global-configuration) for more details. +* `motionGraphicsConfiguration` - (Optional) Settings for motion graphics. See [Motion Graphics Configuration](#motion-graphics-configuration) for more details. +* `nielsenConfiguration` - (Optional) Nielsen configuration settings. See [Nielsen Configuration](#nielsen-configuration) for more details. +* `availBlanking` - (Optional) Settings for ad avail blanking. See [Avail Blanking](#avail-blanking) for more details. + +### Input Attachments + +* `inputAttachmentName` - (Optional) User-specified name for the attachment. +* `inputId` - (Required) The ID of the input. +* `inputSettings` - (Optional) Settings of an input. See [Input Settings](#input-settings) for more details + +### Input Settings + +* `audioSelectors` - (Optional) Used to select the audio stream to decode for inputs that have multiple. See [Audio Selectors](#audio-selectors) for more details. +* `captionSelectors` - (Optional) Used to select the caption input to use for inputs that have multiple available. See [Caption Selectors](#caption-selectors) for more details. +* `deblockFilter` - (Optional) Enable or disable the deblock filter when filtering. +* `denoiseFilter` - (Optional) Enable or disable the denoise filter when filtering. +* `filterStrength` - (Optional) Adjusts the magnitude of filtering from 1 (minimal) to 5 (strongest). +* `inputFilter` - (Optional) Turns on the filter for the input. +* `networkInputSettings` - (Optional) Input settings. See [Network Input Settings](#network-input-settings) for more details. +* `scte35Pid` - (Optional) PID from which to read SCTE-35 messages. +* `smpte2038DataPreference` - (Optional) Specifies whether to extract applicable ancillary data from a SMPTE-2038 source in the input. +* `sourceEndBehavior` - (Optional) Loop input if it is a file. + +### Audio Selectors + +* `name` - (Required) The name of the audio selector. + +### Caption Selectors + +* `name` - (Optional) The name of the caption selector. +* `languageCode` - (Optional) When specified this field indicates the three letter language code of the caption track to extract from the source. + +### Network Input Settings + +* `hlsInputSettings` - (Optional) Specifies HLS input settings when the uri is for a HLS manifest. See [HLS Input Settings](#hls-input-settings) for more details. +* `serverValidation` - (Optional) Check HTTPS server certificates. + +### HLS Input Settings + +* `bandwidth` - (Optional) The bitrate is specified in bits per second, as in an HLS manifest. +* `bufferSegments` - (Optional) Buffer segments. +* `retries` - (Optional) The number of consecutive times that attempts to read a manifest or segment must fail before the input is considered unavailable. +* `retryInterval` - (Optional) The number of seconds between retries when an attempt to read a manifest or segment fails. +* `scte35SourceType` - (Optional) Identifies the source for the SCTE-35 messages that MediaLive will ingest. + +### Maintenance + +* `maintenanceDay` - (Optional) The day of the week to use for maintenance. +* `maintenanceStartTime` - (Optional) The hour maintenance will start. + +### Media Package Settings + +* `channelId` - (Required) ID of the channel in MediaPackage that is the destination for this output group. + +### Multiplex Settings + +* `multiplexId` - (Required) The ID of the Multiplex that the encoder is providing output to. +* `programName` - (Optional) The program name of the Multiplex program that the encoder is providing output to. + +### Settings + +* `passwordParam` - (Optional) Key used to extract the password from EC2 Parameter store. +* `streamName` - (Optional) Stream name RTMP destinations (URLs of type rtmp://) +* `url` - (Optional) A URL specifying a destination. +* `username` - (Optional) Username for destination. + +### Audio Descriptions + +* `audioSelectorName` - (Required) The name of the audio selector used as the source for this AudioDescription. +* `name` - (Required) The name of this audio description. +* `audioNormalizationSettings` - (Optional) Advanced audio normalization settings. See [Audio Normalization Settings](#audio-normalization-settings) for more details. +* `audioType` - (Optional) Applies only if audioTypeControl is useConfigured. The values for audioType are defined in ISO-IEC 13818-1. +* `audioTypeControl` - (Optional) Determined how audio type is determined. +* `audioWatermarkSettings` - (Optional) Settings to configure one or more solutions that insert audio watermarks in the audio encode. See [Audio Watermark Settings](#audio-watermark-settings) for more details. +* `codecSettings` - (Optional) Audio codec settings. See [Audio Codec Settings](#audio-codec-settings) for more details. + +### Audio Normalization Settings + +* `algorithm` - (Optional) Audio normalization algorithm to use. itu17701 conforms to the CALM Act specification, itu17702 to the EBU R-128 specification. +* `algorithmControl` - (Optional) Algorithm control for the audio description. +* `targetLkfs` - (Optional) Target LKFS (loudness) to adjust volume to. + +### Audio Watermark Settings + +* `nielsenWatermarkSettings` - (Optional) Settings to configure Nielsen Watermarks in the audio encode. See [Nielsen Watermark Settings](#nielsen-watermark-settings) for more details. + +### Audio Codec Settings + +* `aacSettings` - (Optional) Aac Settings. See [AAC Settings](#aac-settings) for more details. +* `ac3Settings` - (Optional) Ac3 Settings. See [AC3 Settings](#ac3-settings) for more details. +* `eac3AtmosSettings` - (Optional) - Eac3 Atmos Settings. See [EAC3 Atmos Settings](#eac3-atmos-settings) +* `eac3Settings` - (Optional) - Eac3 Settings. See [EAC3 Settings](#eac3-settings) + +### AAC Settings + +* `bitrate` - (Optional) Average bitrate in bits/second. +* `codingMode` - (Optional) Mono, Stereo, or 5.1 channel layout. +* `inputType` - (Optional) Set to "broadcasterMixedAd" when input contains pre-mixed main audio + AD (narration) as a stereo pair. +* `profile` - (Optional) AAC profile. +* `rateControlMode` - (Optional) The rate control mode. +* `rawFormat` - (Optional) Sets LATM/LOAS AAC output for raw containers. +* `sampleRate` - (Optional) Sample rate in Hz. +* `spec` - (Optional) Use MPEG-2 AAC audio instead of MPEG-4 AAC audio for raw or MPEG-2 Transport Stream containers. +* `vbrQuality` - (Optional) VBR Quality Level - Only used if rateControlMode is VBR. + +### AC3 Settings + +* `bitrate` - (Optional) Average bitrate in bits/second. +* `bitstreamMode` - (Optional) Specifies the bitstream mode (bsmod) for the emitted AC-3 stream. +* `codingMode` - (Optional) Dolby Digital coding mode. +* `dialnorm` - (Optional) Sets the dialnorm of the output. +* `drcProfile` - (Optional) If set to filmStandard, adds dynamic range compression signaling to the output bitstream as defined in the Dolby Digital specification. +* `lfeFilter` - (Optional) When set to enabled, applies a 120Hz lowpass filter to the LFE channel prior to encoding. +* `metadataControl` - (Optional) Metadata control. + +### EAC3 Atmos Settings + +* `bitrate` - (Optional) Average bitrate in bits/second. +* `codingMode` - (Optional) Dolby Digital Plus with dolby Atmos coding mode. +* `dialnorm` - (Optional) Sets the dialnorm for the output. +* `drcLine` - (Optional) Sets the Dolby dynamic range compression profile. +* `drcRf` - (Optional) Sets the profile for heavy Dolby dynamic range compression. +* `heightTrim` - (Optional) Height dimensional trim. +* `surroundTrim` - (Optional) Surround dimensional trim. + +### EAC3 Settings + +* `attenuationControl` - (Optional) Sets the attenuation control. +* `bitrate` - (Optional) Average bitrate in bits/second. +* `bitstreamMode` - (Optional) Specifies the bitstream mode (bsmod) for the emitted AC-3 stream. +* `codingMode` - (Optional) Dolby Digital Plus coding mode. + +### Nielsen Watermark Settings + +* `nielsenCbetSettings` - (Optional) Used to insert watermarks of type Nielsen CBET. See [Nielsen CBET Settings](#nielsen-cbet-settings) for more details. +* `nielsenDistributionType` - (Optional) Distribution types to assign to the watermarks. Options are `programContent` and `finalDistributor`. +* `nielsenNaesIiNwSettings` - (Optional) Used to insert watermarks of type Nielsen NAES, II (N2) and Nielsen NAES VI (NW). See [Nielsen NAES II NW Settings](#nielsen-naes-ii-nw-settings) for more details. + +### Nielsen CBET Settings + +* `cbetCheckDigit` - (Required) CBET check digits to use for the watermark. +* `cbetStepaside` - (Required) Determines the method of CBET insertion mode when prior encoding is detected on the same layer. +* `csid` - (Required) CBET source ID to use in the watermark. + +### Nielsen NAES II NW Settings + +* `checkDigit` - (Required) Check digit string for the watermark. +* `sid` - (Required) The Nielsen Source ID to include in the watermark. + +### Output Groups + +* `outputGroupSettings` - (Required) Settings associated with the output group. See [Output Group Settings](#output-group-settings) for more details. +* `outputs` - (Required) List of outputs. See [Outputs](#outputs) for more details. +* `name` - (Optional) Custom output group name defined by the user. + +### Output Group Settings + +* `archiveGroupSettings` - (Optional) Archive group settings. See [Archive Group Settings](#archive-group-settings) for more details. +* `mediaPackageGroupSettings` - (Optional) Media package group settings. See [Media Package Group Settings](#media-package-group-settings) for more details. +* `multiplexGroupSttings` - (Optional) Multiplex group settings. Attribute can be passed as an empty block. +* `rtmpGroupSettings` - (Optional) RTMP group settings. See [RTMP Group Settings](#rtmp-group-settings) for more details. +* `udpGroupSttings` - (Optional) UDP group settings. See [UDP Group Settings](#udp-group-settings) for more details. + +### Outputs + +* `outputSettings` - (Required) Settings for output. See [Output Settings](#output-settings) for more details. +* `audioDescriptionNames` - (Optional) The names of the audio descriptions used as audio sources for the output. +* `captionDescriptionNames` - (Optional) The names of the caption descriptions used as caption sources for the output. +* `outputName` - (Required) The name used to identify an output. +* `videoDescriptionName` - (Optional) The name of the video description used as video source for the output. + +### Timecode Config + +* `source` - (Optional) The source for the timecode that will be associated with the events outputs. +* `syncThreshold` - (Optional) Threshold in frames beyond which output timecode is resynchronized to the input timecode. + +### Video Descriptions + +* `name` - (Required) The name of the video description. +* `codecSettings` - (Optional) The video codec settings. See [Video Codec Settings](#video-codec-settings) for more details. +* `height` - Output video height in pixels. +* `respondToAfd` - (Optional) Indicate how to respond to the AFD values that might be in the input video. +* `scalingBehavior` - (Optional) Behavior on how to scale. +* `sharpness` - (Optional) Changes the strength of the anti-alias filter used for scaling. +* `width` - (Optional) Output video width in pixels. + +### Video Codec Settings + +* `frameCaptureSettings` - (Optional) Frame capture settings. See [Frame Capture Settings](#frame-capture-settings) for more details. +* `h264Settings` - (Optional) H264 settings. See [H264 Settings](#h264-settings) for more details. + +### Frame Capture Settings + +* `captureInterval` - (Optional) The frequency at which to capture frames for inclusion in the output. +* `captureIntervalUnits` - (Optional) Unit for the frame capture interval. + +### H264 Settings + +* `adaptiveQuantization` - (Optional) Enables or disables adaptive quantization. +* `afdSignaling` - (Optional) Indicates that AFD values will be written into the output stream. +* `bitrate` - (Optional) Average bitrate in bits/second. +* `bufFilPct` - (Optional) Percentage of the buffer that should initially be filled. +* `bufSize` - (Optional) Size of buffer in bits. +* `colorMetadata` - (Optional) Includes color space metadata in the output. +* `entropyEncoding` - (Optional) Entropy encoding mode. +* `filterSettings` - (Optional) Filters to apply to an encode. See [H264 Filter Settings](#h264-filter-settings) for more details. +* `fixedAfd` - (Optional) Four bit AFD value to write on all frames of video in the output stream. +* `flicerAq` - (Optional) Makes adjustments within each frame to reduce flicker on the I-frames. +* `forceFieldPictures` - (Optional) Controls whether coding is performed on a field basis or on a frame basis. +* `framerateControl` - (Optional) Indicates how the output video frame rate is specified. +* `framerateDenominator` - (Optional) Framerate denominator. +* `framerateNumerator` - (Optional) Framerate numerator. +* `gopBReference` - (Optional) GOP-B reference. +* `gopClosedCadence` - (Optional) Frequency of closed GOPs. +* `gopNumBFrames` - (Optional) Number of B-frames between reference frames. +* `gopSize` - (Optional) GOP size in units of either frames of seconds per `gopSizeUnits`. +* `gopSizeUnits` - (Optional) Indicates if the `gopSize` is specified in frames or seconds. +* `level` - (Optional) H264 level. +* `lookAheadRateControl` - (Optional) Amount of lookahead. +* `maxBitrate` - (Optional) Set the maximum bitrate in order to accommodate expected spikes in the complexity of the video. +* `minInterval` - (Optional) Min interval. +* `numRefFrames` - (Optional) Number of reference frames to use. +* `parControl` - (Optional) Indicates how the output pixel aspect ratio is specified. +* `parDenominator` - (Optional) Pixel Aspect Ratio denominator. +* `parNumerator` - (Optional) Pixel Aspect Ratio numerator. +* `profile` - (Optional) H264 profile. +* `qualityLevel` - (Optional) Quality level. +* `qvbrQualityLevel` - (Optional) Controls the target quality for the video encode. +* `rateControlMode` - (Optional) Rate control mode. +* `scanType` - (Optional) Sets the scan type of the output. +* `sceneChangeDetect` - (Optional) Scene change detection. +* `slices` - (Optional) Number of slices per picture. +* `softness` - (Optional) Softness. +* `spatialAq` - (Optional) Makes adjustments within each frame based on spatial variation of content complexity. +* `subgopLength` - (Optional) Subgop length. +* `syntax` - (Optional) Produces a bitstream compliant with SMPTE RP-2027. +* `temporalAq` - (Optional) Makes adjustments within each frame based on temporal variation of content complexity. +* `timecodeInsertion` - (Optional) Determines how timecodes should be inserted into the video elementary stream. + +### H264 Filter Settings + +* `temporalFilterSettings` - (Optional) Temporal filter settings. See [Temporal Filter Settings](#temporal-filter-settings) + +### H265 Settings + +* `adaptiveQuantization` - (Optional) Enables or disables adaptive quantization. +* `afdSignaling` - (Optional) Indicates that AFD values will be written into the output stream. +* `alternativeTransferFunction` - (Optional) Whether or not EML should insert an Alternative Transfer Function SEI message. +* `bitrate` - (Required) Average bitrate in bits/second. +* `bufSize` - (Optional) Size of buffer in bits. +* `colorMetadata` - (Optional) Includes color space metadata in the output. +* `colorSpaceSettings` (Optional) Define the color metadata for the output. [H265 Color Space Settings](#h265-color-space-settings) for more details. +* `filterSettings` - (Optional) Filters to apply to an encode. See [H265 Filter Settings](#h265-filter-settings) for more details. +* `fixedAfd` - (Optional) Four bit AFD value to write on all frames of video in the output stream. +* `flicerAq` - (Optional) Makes adjustments within each frame to reduce flicker on the I-frames. +* `framerateDenominator` - (Required) Framerate denominator. +* `framerateNumerator` - (Required) Framerate numerator. +* `gopClosedCadence` - (Optional) Frequency of closed GOPs. +* `gopSize` - (Optional) GOP size in units of either frames of seconds per `gopSizeUnits`. +* `gopSizeUnits` - (Optional) Indicates if the `gopSize` is specified in frames or seconds. +* `level` - (Optional) H265 level. +* `lookAheadRateControl` - (Optional) Amount of lookahead. +* `maxBitrate` - (Optional) Set the maximum bitrate in order to accommodate expected spikes in the complexity of the video. +* `minInterval` - (Optional) Min interval. +* `parDenominator` - (Optional) Pixel Aspect Ratio denominator. +* `parNumerator` - (Optional) Pixel Aspect Ratio numerator. +* `profile` - (Optional) H265 profile. +* `qvbrQualityLevel` - (Optional) Controls the target quality for the video encode. +* `rateControlMode` - (Optional) Rate control mode. +* `scanType` - (Optional) Sets the scan type of the output. +* `sceneChangeDetect` - (Optional) Scene change detection. +* `slices` - (Optional) Number of slices per picture. +* `tier` - (Optional) Set the H265 tier in the output. +* `timecodeBurninSettings` - (Optional) Apply a burned in timecode. See [H265 Timecode Burnin Settings](#h265-timecode-burnin-settings) for more details. +* `timecodeInsertion` = (Optional) Determines how timecodes should be inserted into the video elementary stream. + +### H265 Color Space Settings + +* `colorSpacePassthroughSettings` - (Optional) Sets the colorspace metadata to be passed through. +* `dolbyVision81Settings` - (Optional) Set the colorspace to Dolby Vision81. +* `hdr10Settings` - (Optional) Set the colorspace to be HDR10. See [H265 HDR10 Settings](#h265-hdr10-settings) for more details. +* `rec601Settings` - (Optional) Set the colorspace to Rec. 601. +* `rec709Settings` - (Optional) Set the colorspace to Rec. 709. + +### H265 HDR10 Settings + +* `maxCll` - (Optional) Sets the MaxCLL value for HDR10. +* `maxFall` - (Optional) Sets the MaxFALL value for HDR10. + +### H265 Filter Settings + +* `temporalFilterSettings` - (Optional) Temporal filter settings. See [Temporal Filter Settings](#temporal-filter-settings) + +### H265 Timecode Burnin Settings + +* `timecodeBurninFontSize` - (Optional) Sets the size of the burned in timecode. +* `timecodeBurninPosition` - (Optional) Sets the position of the burned in timecode. +* `prefix` - (Optional) Set a prefix on the burned in timecode. + +### Temporal Filter Settings + +* `postFilterSharpening` - (Optional) Post filter sharpening. +* `strength` - (Optional) Filter strength. + +### Caption Descriptions + +* `accessibility` - (Optional) Indicates whether the caption track implements accessibility features such as written descriptions of spoken dialog, music, and sounds. +* `captionSelectorName` - (Required) Specifies which input caption selector to use as a caption source when generating output captions. This field should match a captionSelector name. +* `destinationSettings` - (Optional) Additional settings for captions destination that depend on the destination type. See [Destination Settings](#destination-settings) for more details. +* `languageCode` - (Optional) ISO 639-2 three-digit code. +* `languageDescription` - (Optional) Human readable information to indicate captions available for players (eg. English, or Spanish). +* `name` - (Required) Name of the caption description. Used to associate a caption description with an output. Names must be unique within an event. + +### Destination Settings + +* `aribDestinationSettings` - (Optional) Arib Destination Settings. +* `burnInDestinationSettings` - (Optional) Burn In Destination Settings. See [Burn In Destination Settings](#burn-in-destination-settings) for more details. +* `dvbSubDestinationSettings` - (Optional) Dvb Sub Destination Settings. See [Dvb Sub Destination Settings](#dvb-sub-destination-settings) for more details. +* `ebuTtDDestinationSettings` - (Optional) Ebu Tt D Destination Settings. See [Ebu Tt D Destination Settings](#ebu-tt-d-destination-settings) for more details. +* `embeddedDestinationSettings` - (Optional) Embedded Destination Settings. +* `embeddedPlusScte20DestinationSettings` - (Optional) Embedded Plus Scte20 Destination Settings. +* `rtmpCaptionInfoDestinationSettings` - (Optional) Rtmp Caption Info Destination Settings. +* `scte20PlusEmbeddedDestinationSettings` - (Optional) Scte20 Plus Embedded Destination Settings. +* `scte27DestinationSettings` – (Optional) Scte27 Destination Settings. +* `smpteTtDestinationSettings` – (Optional) Smpte Tt Destination Settings. +* `teletextDestinationSettings` – (Optional) Teletext Destination Settings. +* `ttmlDestinationSettings` – (Optional) Ttml Destination Settings. See [Ttml Destination Settings](#ttml-destination-settings) for more details. +* `webvttDestinationSettings` - (Optional) Webvtt Destination Settings. See [Webvtt Destination Settings](#webvtt-destination-settings) for more details. + +### Burn In Destination Settings + +* `alignment` – (Optional) If no explicit xPosition or yPosition is provided, setting alignment to centered will place the captions at the bottom center of the output. Similarly, setting a left alignment will align captions to the bottom left of the output. If x and y positions are given in conjunction with the alignment parameter, the font will be justified (either left or centered) relative to those coordinates. Selecting “smart” justification will left-justify live subtitles and center-justify pre-recorded subtitles. All burn-in and DVB-Sub font settings must match. +* `backgroundColor` – (Optional) Specifies the color of the rectangle behind the captions. All burn-in and DVB-Sub font settings must match. +* `backgroundOpacity` – (Optional) Specifies the opacity of the background rectangle. 255 is opaque; 0 is transparent. Leaving this parameter out is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match. +* `font` – (Optional) External font file used for caption burn-in. File extension must be ‘ttf’ or ‘tte’. Although the user can select output fonts for many different types of input captions, embedded, STL and teletext sources use a strict grid system. Using external fonts with these caption sources could cause unexpected display of proportional fonts. All burn-in and DVB-Sub font settings must match. See [Font](#font) for more details. +* `fontColor` – (Optional) Specifies the color of the burned-in captions. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `fontOpacity` – (Optional) Specifies the opacity of the burned-in captions. 255 is opaque; 0 is transparent. All burn-in and DVB-Sub font settings must match. +* `fontResolution` – (Optional) Font resolution in DPI (dots per inch); default is 96 dpi. All burn-in and DVB-Sub font settings must match. +* `fontSize` – (Optional) When set to ‘auto’ fontSize will scale depending on the size of the output. Giving a positive integer will specify the exact font size in points. All burn-in and DVB-Sub font settings must match. +* `outlineColor` – (Optional) Specifies font outline color. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `outlineSize` – (Optional) Specifies font outline size in pixels. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `shadowColor` – (Optional) Specifies the color of the shadow cast by the captions. All burn-in and DVB-Sub font settings must match. +* `shadowOpacity` – (Optional) Specifies the opacity of the shadow. 255 is opaque; 0 is transparent. Leaving this parameter out is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match. +* `shadowXOffset` – (Optional) Specifies the horizontal offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels to the left. All burn-in and DVB-Sub font settings must match. +* `shadowYOffset` – (Optional) Specifies the vertical offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels above the text. All burn-in and DVB-Sub font settings must match. +* `teletextGridControl` – (Optional) Controls whether a fixed grid size will be used to generate the output subtitles bitmap. Only applicable for Teletext inputs and DVB-Sub/Burn-in outputs. +* `xPosition` – (Optional) Specifies the horizontal position of the caption relative to the left side of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the left of the output. If no explicit xPosition is provided, the horizontal caption position will be determined by the alignment parameter. All burn-in and DVB-Sub font settings must match. +* `yPosition` – (Optional) Specifies the vertical position of the caption relative to the top of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the top of the output. If no explicit yPosition is provided, the caption will be positioned towards the bottom of the output. All burn-in and DVB-Sub font settings must match. + +### Dvb Sub Destination Settings + +* `alignment` – (Optional) If no explicit xPosition or yPosition is provided, setting alignment to centered will place the captions at the bottom center of the output. Similarly, setting a left alignment will align captions to the bottom left of the output. If x and y positions are given in conjunction with the alignment parameter, the font will be justified (either left or centered) relative to those coordinates. Selecting “smart” justification will left-justify live subtitles and center-justify pre-recorded subtitles. This option is not valid for source captions that are STL or 608/embedded. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `backgroundColor` – (Optional) Specifies the color of the rectangle behind the captions. All burn-in and DVB-Sub font settings must match. +* `backgroundOpacity` – (Optional) Specifies the opacity of the background rectangle. 255 is opaque; 0 is transparent. Leaving this parameter blank is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match. +* `font` – (Optional) External font file used for caption burn-in. File extension must be ‘ttf’ or ‘tte’. Although the user can select output fonts for many different types of input captions, embedded, STL and teletext sources use a strict grid system. Using external fonts with these caption sources could cause unexpected display of proportional fonts. All burn-in and DVB-Sub font settings must match. See [Font](#font) for more details. +* `fontColor` – (Optional) Specifies the color of the burned-in captions. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `fontOpacity` – (Optional) Specifies the opacity of the burned-in captions. 255 is opaque; 0 is transparent. All burn-in and DVB-Sub font settings must match. +* `fontResolution` – (Optional) Font resolution in DPI (dots per inch); default is 96 dpi. All burn-in and DVB-Sub font settings must match. +* `fontSize` – (Optional) When set to auto fontSize will scale depending on the size of the output. Giving a positive integer will specify the exact font size in points. All burn-in and DVB-Sub font settings must match. +* `outlineColor` – (Optional) Specifies font outline color. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `outlineSize` – (Optional) Specifies font outline size in pixels. This option is not valid for source captions that are either 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `shadowColor` – (Optional) Specifies the color of the shadow cast by the captions. All burn-in and DVB-Sub font settings must match. +* `shadowOpacity` – (Optional) Specifies the opacity of the shadow. 255 is opaque; 0 is transparent. Leaving this parameter blank is equivalent to setting it to 0 (transparent). All burn-in and DVB-Sub font settings must match. +* `shadowXOffset` – (Optional) Specifies the horizontal offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels to the left. All burn-in and DVB-Sub font settings must match. +* `shadowYOffset` – (Optional) Specifies the vertical offset of the shadow relative to the captions in pixels. A value of -2 would result in a shadow offset 2 pixels above the text. All burn-in and DVB-Sub font settings must match. +* `teletextGridControl` – (Optional) Controls whether a fixed grid size will be used to generate the output subtitles bitmap. Only applicable for Teletext inputs and DVB-Sub/Burn-in outputs. +* `xPosition` – (Optional) Specifies the horizontal position of the caption relative to the left side of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the left of the output. If no explicit xPosition is provided, the horizontal caption position will be determined by the alignment parameter. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. +* `yPosition` – (Optional) Specifies the vertical position of the caption relative to the top of the output in pixels. A value of 10 would result in the captions starting 10 pixels from the top of the output. If no explicit yPosition is provided, the caption will be positioned towards the bottom of the output. This option is not valid for source captions that are STL, 608/embedded or teletext. These source settings are already pre-defined by the caption stream. All burn-in and DVB-Sub font settings must match. + +### Ebu Tt D Destination Settings + +* `copyrightHolder` – (Optional) Complete this field if you want to include the name of the copyright holder in the copyright tag in the captions metadata. +* `fillLineGap` – (Optional) Specifies how to handle the gap between the lines (in multi-line captions). - enabled: Fill with the captions background color (as specified in the input captions). - disabled: Leave the gap unfilled. +* `fontFamily` – (Optional) Specifies the font family to include in the font data attached to the EBU-TT captions. Valid only if styleControl is set to include. If you leave this field empty, the font family is set to “monospaced”. (If styleControl is set to exclude, the font family is always set to “monospaced”.) You specify only the font family. All other style information (color, bold, position and so on) is copied from the input captions. The size is always set to 100% to allow the downstream player to choose the size. - Enter a list of font families, as a comma-separated list of font names, in order of preference. The name can be a font family (such as “Arial”), or a generic font family (such as “serif”), or “default” (to let the downstream player choose the font). - Leave blank to set the family to “monospace”. +* `styleControl` – (Optional) Specifies the style information (font color, font position, and so on) to include in the font data that is attached to the EBU-TT captions. - include: Take the style information (font color, font position, and so on) from the source captions and include that information in the font data attached to the EBU-TT captions. This option is valid only if the source captions are Embedded or Teletext. - exclude: In the font data attached to the EBU-TT captions, set the font family to “monospaced”. Do not include any other style information. + +### Ttml Destination Settings + +* `styleControl` – (Optional) This field is not currently supported and will not affect the output styling. Leave the default value. + +### Webvtt Destination Settings + +* `styleControl` - (Optional) Controls whether the color and position of the source captions is passed through to the WebVTT output captions. PASSTHROUGH - Valid only if the source captions are EMBEDDED or TELETEXT. NO\_STYLE\_DATA - Don’t pass through the style. The output captions will not contain any font styling information. + +### Font + +* `passwordParam` – (Optional) Key used to extract the password from EC2 Parameter store. +* `uri` – (Required) Path to a file accessible to the live stream. +* `username` – (Optional) Username to be used. + +### Global Configuration + +* `initialAudioGain` – (Optional) Value to set the initial audio gain for the Live Event. +* `inputEndAction` – (Optional) Indicates the action to take when the current input completes (e.g. end-of-file). When switchAndLoopInputs is configured the encoder will restart at the beginning of the first input. When “none” is configured the encoder will transcode either black, a solid color, or a user specified slate images per the “Input Loss Behavior” configuration until the next input switch occurs (which is controlled through the Channel Schedule API). +* `inputLossBehavior` - (Optional) Settings for system actions when input is lost. See [Input Loss Behavior](#input-loss-behavior) for more details. +* `outputLockingMode` – (Optional) Indicates how MediaLive pipelines are synchronized. PIPELINE\_LOCKING - MediaLive will attempt to synchronize the output of each pipeline to the other. EPOCH\_LOCKING - MediaLive will attempt to synchronize the output of each pipeline to the Unix epoch. +* `outputTimingSource` – (Optional) Indicates whether the rate of frames emitted by the Live encoder should be paced by its system clock (which optionally may be locked to another source via NTP) or should be locked to the clock of the source that is providing the input stream. +* `supportLowFramerateInputs` – (Optional) Adjusts video input buffer for streams with very low video framerates. This is commonly set to enabled for music channels with less than one video frame per second. + +### Input Loss Behavior + +* `passwordParam` – (Optional) Key used to extract the password from EC2 Parameter store. +* `uri` – (Required) Path to a file accessible to the live stream. +* `username` – (Optional) Username to be used. + +### Motion Graphics Configuration + +* `motionGraphicsInsertion` – (Optional) Motion Graphics Insertion. +* `motionGraphicsSettings`– (Required) Motion Graphics Settings. See [Motion Graphics Settings](#motion-graphics-settings) for more details. + +### Motion Graphics Settings + +* `htmlMotionGraphicsSettings` – (Optional) Html Motion Graphics Settings. + +### Nielsen Configuration + +* `distributorId` – (Optional) Enter the Distributor ID assigned to your organization by Nielsen. +* `nielsenPcmToId3Tagging` – (Optional) Enables Nielsen PCM to ID3 tagging. + +### Avail Blanking + +* `availBlankingImage` - (Optional) Blanking image to be used. See [Avail Blanking Image](#avail-blanking-image) for more details. +* `state` - (Optional) When set to enabled, causes video, audio and captions to be blanked when insertion metadata is added. + +### Avail Blanking Image + +* `uri` - (Required) Path to a file accessible to the live stream. +* `passwordParam` - (Optional) Key used to extract the password from EC2 Parameter store. +* `username` - (Optional). Username to be used. + +### Archive Group Settings + +* `destination` - (Required) A director and base filename where archive files should be written. See [Destination](#destination) for more details. +* `archiveCdnSettings` - (Optional) Parameters that control the interactions with the CDN. See [Archive CDN Settings](#archive-cdn-settings) for more details. +* `rolloverInterval` - (Optional) Number of seconds to write to archive file before closing and starting a new one. + +### Media Package Group Settings + +* `destination` - (Required) A director and base filename where archive files should be written. See [Destination](#destination) for more details. + +### RTMP Group Settings + +* `adMarkers` - (Optional) The ad marker type for this output group. +* `authenticationScheme` - (Optional) Authentication scheme to use when connecting with CDN. +* `cacheFullBehavior` - (Optional) Controls behavior when content cache fills up. +* `cacheLength` - (Optional) Cache length in seconds, is used to calculate buffer size. +* `captionData` - (Optional) Controls the types of data that passes to onCaptionInfo outputs. +* `inputLossAction` - (Optional) Controls the behavior of the RTMP group if input becomes unavailable. +* `restartDelay` - (Optional) Number of seconds to wait until a restart is initiated. + +### UDP Group Settings + +* `inputLossAction` - (Optional) Specifies behavior of last resort when input video os lost. +* `timedMetadataId3Frame` - (Optional) Indicates ID3 frame that has the timecode. +* `timedMetadtaId3Period`- (Optional) Timed metadata interval in seconds. + +### Destination + +* `destinationRefId` - (Required) Reference ID for the destination. + +### Archive CDN Settings + +* `archiveS3Settings` - (Optional) Archive S3 Settings. See [Archive S3 Settings](#archive-s3-settings) for more details. + +### Archive S3 Settings + +* `cannedAcl` - (Optional) Specify the canned ACL to apply to each S3 request. + +### Output Settings + +* `archiveOutputSettings` - (Optional) Archive output settings. See [Archive Output Settings](#archive-output-settings) for more details. +* `mediaPackageOutputSettings` - (Optional) Media package output settings. This can be set as an empty block. +* `multiplexOutputSettings` - (Optional) Multiplex output settings. See [Multiplex Output Settings](#multiplex-output-settings) for more details. +* `rtmpOutputSettings` - (Optional) RTMP output settings. See [RTMP Output Settings](#rtmp-output-settings) for more details. +* `udpOutputSettings` - (Optional) UDP output settings. See [UDP Output Settings](#udp-output-settings) for more details + +### Archive Output Settings + +* `containerSettings` - (Required) Settings specific to the container type of the file. See [Container Settings](#container-settings) for more details. +* `extension` - (Optional) Output file extension. +* `nameModifier` - (Optional) String concatenated to the end of the destination filename. Required for multiple outputs of the same type. + +### Multiplex Output Settings + +* `destination` - (Required) Destination is a multiplex. See [Destination](#destination) for more details. + +### RTMP Output Settings + +- `destination` - (Required) The RTMP endpoint excluding the stream name. See [Destination](#destination) for more details. +- `certificateMode` - (Optional) Setting to allow self signed or verified RTMP certificates. +- `connectionRetryInterval` - (Optional) Number of seconds to wait before retrying connection to the flash media server if the connection is lost. +- `numRetries` - (Optional) Number of retry attempts. + +### Container Settings + +* `m2TsSettings` - (Optional) M2ts Settings. See [M2ts Settings](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-medialive-channel-m2tssettings.html) for more details. +* `rawSettings`- (Optional) Raw Settings. This can be set as an empty block. + +### UDP Output Settings + +* `containerSettings` - (Required) UDP container settings. See [Container Settings](#container-settings) for more details. +* `destination` - (Required) Destination address and port number for RTP or UDP packets. See [Destination](#destination) for more details. +* `bufferMsec` - (Optional) UDP output buffering in milliseconds. +* `fecOutputSetting` - (Optional) Settings for enabling and adjusting Forward Error Correction on UDP outputs. See [FEC Output Settings](#fec-output-settings) for more details. + +### FEC Output Settings + +* `columnDepth` - (Optional) The height of the FEC protection matrix. +* `includeFec` - (Optional) Enables column only or column and row based FEC. +* `rowLength` - (Optional) The width of the FEC protection matrix. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Channel. +* `channelId` - ID of the Channel. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `15M`) +* `update` - (Default `15M`) +* `delete` - (Default `15M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaLive Channel using the `channelId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MediaLive Channel using the `channelId`. For example: + +```console +% terraform import aws_medialive_channel.example 1234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/medialive_input.html.markdown b/website/docs/cdktf/typescript/r/medialive_input.html.markdown new file mode 100644 index 00000000000..ad89ebd4cef --- /dev/null +++ b/website/docs/cdktf/typescript/r/medialive_input.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "Elemental MediaLive" +layout: "aws" +page_title: "AWS: aws_medialive_input" +description: |- + Terraform resource for managing an AWS MediaLive Input. +--- + + + +# Resource: aws_medialive_input + +Terraform resource for managing an AWS MediaLive Input. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MedialiveInput } from "./.gen/providers/aws/medialive-input"; +import { MedialiveInputSecurityGroup } from "./.gen/providers/aws/medialive-input-security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new MedialiveInputSecurityGroup(this, "example", { + tags: { + ENVIRONMENT: "prod", + }, + whitelistRules: [ + { + cidr: "10.0.0.8/32", + }, + ], + }); + const awsMedialiveInputExample = new MedialiveInput(this, "example_1", { + inputSecurityGroups: [example.id], + name: "example-input", + tags: { + ENVIRONMENT: "prod", + }, + type: "UDP_PUSH", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMedialiveInputExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the input. +* `inputSecurityGroups` - (Required) List of input security groups. +* `type` - (Required) The different types of inputs that AWS Elemental MediaLive supports. + +The following arguments are optional: + +* `destinations` - (Optional) Destination settings for PUSH type inputs. See [Destinations](#destinations) for more details. +* `inputDevices` - (Optional) Settings for the devices. See [Input Devices](#input-devices) for more details. +* `mediaConnectFlows` - (Optional) A list of the MediaConnect Flows. See [Media Connect Flows](#media-connect-flows) for more details. +* `roleArn` - (Optional) The ARN of the role this input assumes during and after creation. +* `sources` - (Optional) The source URLs for a PULL-type input. See [Sources](#sources) for more details. +* `tags`- (Optional) A map of tags to assign to the Input. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc` - (Optional) Settings for a private VPC Input. See [VPC](#vpc) for more details. + +### Destinations + +* `streamName` - A unique name for the location the RTMP stream is being pushed to. + +### Input Devices + +* `id` - The unique ID for the device. + +### Media Connect Flows + +* `flowArn` - The ARN of the MediaConnect Flow + +### Sources + +* `passwordParam` - The key used to extract the password from EC2 Parameter store. +* `url` - The URL where the stream is pulled from. +* `username` - The username for the input source. + +### VPC + +* `subnetIds` - A list of 2 VPC subnet IDs from the same VPC. +* `securityGroupIds` - A list of up to 5 EC2 VPC security group IDs to attach to the Input. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Input. +* `attachedChannels` - Channels attached to Input. +* `inputClass` - The input class. +* `inputPartnerIds` - A list of IDs for all Inputs which are partners of this one. +* `inputSourceType` - Source type of the input. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaLive Input using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MediaLive Input using the `id`. For example: + +```console +% terraform import aws_medialive_input.example 12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/medialive_input_security_group.html.markdown b/website/docs/cdktf/typescript/r/medialive_input_security_group.html.markdown new file mode 100644 index 00000000000..740b8b2bf5c --- /dev/null +++ b/website/docs/cdktf/typescript/r/medialive_input_security_group.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Elemental MediaLive" +layout: "aws" +page_title: "AWS: aws_medialive_input_security_group" +description: |- + Terraform resource for managing an AWS MediaLive InputSecurityGroup. +--- + + + +# Resource: aws_medialive_input_security_group + +Terraform resource for managing an AWS MediaLive InputSecurityGroup. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MedialiveInputSecurityGroup } from "./.gen/providers/aws/medialive-input-security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MedialiveInputSecurityGroup(this, "example", { + tags: { + ENVIRONMENT: "prod", + }, + whitelistRules: [ + { + cidr: "10.0.0.8/32", + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `whitelistRules` - (Required) Whitelist rules. See [Whitelist Rules](#whitelist-rules) for more details. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the InputSecurityGroup. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Whitelist Rules + +* `cidr` (Required) - The IPv4 CIDR that's whitelisted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - InputSecurityGroup Id. +* `arn` - ARN of the InputSecurityGroup. +* `inputs` - The list of inputs currently using this InputSecurityGroup. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaLive InputSecurityGroup using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MediaLive InputSecurityGroup using the `id`. For example: + +```console +% terraform import aws_medialive_input_security_group.example 123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/medialive_multiplex.html.markdown b/website/docs/cdktf/typescript/r/medialive_multiplex.html.markdown new file mode 100644 index 00000000000..0b90d7fc834 --- /dev/null +++ b/website/docs/cdktf/typescript/r/medialive_multiplex.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "Elemental MediaLive" +layout: "aws" +page_title: "AWS: aws_medialive_multiplex" +description: |- + Terraform resource for managing an AWS MediaLive Multiplex. +--- + + + +# Resource: aws_medialive_multiplex + +Terraform resource for managing an AWS MediaLive Multiplex. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { MedialiveMultiplex } from "./.gen/providers/aws/medialive-multiplex"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const available = new DataAwsAvailabilityZones(this, "available", { + state: "available", + }); + new MedialiveMultiplex(this, "example", { + availabilityZones: [ + Token.asString(propertyAccess(available.names, ["0"])), + Token.asString(propertyAccess(available.names, ["1"])), + ], + multiplexSettings: { + maximumVideoBufferDelayMilliseconds: 1000, + transportStreamBitrate: 1000000, + transportStreamId: 1, + transportStreamReservedBitrate: 1, + }, + name: "example-multiplex-changed", + startMultiplex: true, + tags: { + tag1: "value1", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `availabilityZones` - (Required) A list of availability zones. You must specify exactly two. +* `multiplexSettings`- (Required) Multiplex settings. See [Multiplex Settings](#multiplex-settings) for more details. +* `name` - (Required) name of Multiplex. + +The following arguments are optional: + +* `startMultiplex` - (Optional) Whether to start the Multiplex. Defaults to `false`. +* `tags` - (Optional) A map of tags to assign to the Multiplex. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Multiplex Settings + +* `transportStreamBitrate` - (Required) Transport stream bit rate. +* `transportStreamId` - (Required) Unique ID for each multiplex. +* `transportStreamReservedBitrate` - (Optional) Transport stream reserved bit rate. +* `maximumVideoBufferDelayMilliseconds` - (Optional) Maximum video buffer delay. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Multiplex. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaLive Multiplex using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MediaLive Multiplex using the `id`. For example: + +```console +% terraform import aws_medialive_multiplex.example 12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/medialive_multiplex_program.html.markdown b/website/docs/cdktf/typescript/r/medialive_multiplex_program.html.markdown new file mode 100644 index 00000000000..b9033c7a3f1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/medialive_multiplex_program.html.markdown @@ -0,0 +1,141 @@ +--- +subcategory: "Elemental MediaLive" +layout: "aws" +page_title: "AWS: aws_medialive_multiplex_program" +description: |- + Terraform resource for managing an AWS MediaLive MultiplexProgram. +--- + + + +# Resource: aws_medialive_multiplex_program + +Terraform resource for managing an AWS MediaLive MultiplexProgram. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { MedialiveMultiplex } from "./.gen/providers/aws/medialive-multiplex"; +import { MedialiveMultiplexProgram } from "./.gen/providers/aws/medialive-multiplex-program"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const available = new DataAwsAvailabilityZones(this, "available", { + state: "available", + }); + const example = new MedialiveMultiplex(this, "example", { + availabilityZones: [ + Token.asString(propertyAccess(available.names, ["0"])), + Token.asString(propertyAccess(available.names, ["1"])), + ], + multiplexSettings: { + maximumVideoBufferDelayMilliseconds: 1000, + transportStreamBitrate: 1000000, + transportStreamId: 1, + transportStreamReservedBitrate: 1, + }, + name: "example-multiplex-changed", + startMultiplex: true, + tags: { + tag1: "value1", + }, + }); + const awsMedialiveMultiplexProgramExample = new MedialiveMultiplexProgram( + this, + "example_2", + { + multiplexId: example.id, + multiplexProgramSettings: [ + { + preferredChannelPipeline: "CURRENTLY_ACTIVE", + programNumber: 1, + videoSettings: [ + { + constantBitrate: 100000, + }, + ], + }, + ], + programName: "example_program", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMedialiveMultiplexProgramExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `multiplexId` - (Required) Multiplex ID. +* `programName` - (Required) Unique program name. +* `multiplexProgramSettings` - (Required) MultiplexProgram settings. See [Multiplex Program Settings](#multiple-program-settings) for more details. + +The following arguments are optional: + +### Multiple Program Settings + +* `programNumber` - (Required) Unique program number. +* `preferredChannelPipeline` - (Required) Enum for preferred channel pipeline. Options are `currentlyActive`, `pipeline0`, or `pipeline1`. +* `serviceDescriptor` - (Optional) Service Descriptor. See [Service Descriptor](#service-descriptor) for more details. +* `videoSettings` - (Optional) Video settings. See [Video Settings](#video-settings) for more details. + +### Service Descriptor + +* `providerName` - (Required) Unique provider name. +* `serviceName` - (Required) Unique service name. + +### Video Settings + +* `constantBitrate` - (Optional) Constant bitrate value. +* `statmuxSettings` - (Optional) Statmux settings. See [Statmux Settings](#statmux-settings) for more details. + +### Statmux Settings + +* `minimumBitrate` - (Optional) Minimum bitrate. +* `maximumBitrate` - (Optional) Maximum bitrate. +* `priority` - (Optional) Priority value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the MultiplexProgram. +* `exampleAttribute` - Concise description. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MediaLive MultiplexProgram using the `id`, or a combination of "`programName`/`multiplexId`". For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MediaLive MultiplexProgram using the `id`, or a combination of "`programName`/`multiplexId`". For example: + +```console +% terraform import aws_medialive_multiplex_program.example example_program/1234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/memorydb_acl.html.markdown b/website/docs/cdktf/typescript/r/memorydb_acl.html.markdown new file mode 100644 index 00000000000..45f403297da --- /dev/null +++ b/website/docs/cdktf/typescript/r/memorydb_acl.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_acl" +description: |- + Provides a MemoryDB ACL. +--- + + + +# Resource: aws_memorydb_acl + +Provides a MemoryDB ACL. + +More information about users and ACL-s can be found in the [MemoryDB User Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/clusters.acls.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MemorydbAcl } from "./.gen/providers/aws/memorydb-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MemorydbAcl(this, "example", { + name: "my-acl", + userNames: ["my-user-1", "my-user-2"], + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `name` - (Optional, Forces new resource) Name of the ACL. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `userNames` - (Optional) Set of MemoryDB user names to be included in this ACL. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same as `name`. +* `arn` - The ARN of the ACL. +* `minimumEngineVersion` - The minimum engine version supported by the ACL. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an ACL using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an ACL using the `name`. For example: + +```console +% terraform import aws_memorydb_acl.example my-acl +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/memorydb_cluster.html.markdown b/website/docs/cdktf/typescript/r/memorydb_cluster.html.markdown new file mode 100644 index 00000000000..d1e504dc158 --- /dev/null +++ b/website/docs/cdktf/typescript/r/memorydb_cluster.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_cluster" +description: |- + Provides a MemoryDB Cluster. +--- + + + +# Resource: aws_memorydb_cluster + +Provides a MemoryDB Cluster. + +More information about MemoryDB can be found in the [Developer Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/what-is-memorydb-for-redis.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MemorydbCluster } from "./.gen/providers/aws/memorydb-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MemorydbCluster(this, "example", { + aclName: "open-access", + name: "my-cluster", + nodeType: "db.t4g.small", + numShards: 2, + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + snapshotRetentionLimit: 7, + subnetGroupName: Token.asString(awsMemorydbSubnetGroupExample.id), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `aclName` - (Required) The name of the Access Control List to associate with the cluster. +* `nodeType` - (Required) The compute and memory capacity of the nodes in the cluster. See AWS documentation on [supported node types](https://docs.aws.amazon.com/memorydb/latest/devguide/nodes.supportedtypes.html) as well as [vertical scaling](https://docs.aws.amazon.com/memorydb/latest/devguide/cluster-vertical-scaling.html). + +The following arguments are optional: + +* `autoMinorVersionUpgrade` - (Optional, Forces new resource) When set to `true`, the cluster will automatically receive minor engine version upgrades after launch. Defaults to `true`. +* `dataTiering` - (Optional, Forces new resource) Enables data tiering. This option is not supported by all instance types. For more information, see [Data tiering](https://docs.aws.amazon.com/memorydb/latest/devguide/data-tiering.html). +* `description` - (Optional) Description for the cluster. Defaults to `"Managed by Terraform"`. +* `engineVersion` - (Optional) Version number of the Redis engine to be used for the cluster. Downgrades are not supported. +* `finalSnapshotName` - (Optional) Name of the final cluster snapshot to be created when this resource is deleted. If omitted, no final snapshot will be made. +* `kmsKeyArn` - (Optional, Forces new resource) ARN of the KMS key used to encrypt the cluster at rest. +* `maintenanceWindow` - (Optional) Specifies the weekly time range during which maintenance on the cluster is performed. Specify as a range in the format `ddd:hh24:miDdd:hh24:mi` (24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: `sun:23:00Mon:01:30`. +* `name` - (Optional, Forces new resource) Name of the cluster. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `numReplicasPerShard` - (Optional) The number of replicas to apply to each shard, up to a maximum of 5. Defaults to `1` (i.e. 2 nodes per shard). +* `numShards` - (Optional) The number of shards in the cluster. Defaults to `1`. +* `parameterGroupName` - (Optional) The name of the parameter group associated with the cluster. +* `port` - (Optional, Forces new resource) The port number on which each of the nodes accepts connections. Defaults to `6379`. +* `securityGroupIds` - (Optional) Set of VPC Security Group ID-s to associate with this cluster. +* `snapshotArns` - (Optional, Forces new resource) List of ARN-s that uniquely identify RDB snapshot files stored in S3. The snapshot files will be used to populate the new cluster. Object names in the ARN-s cannot contain any commas. +* `snapshotName` - (Optional, Forces new resource) The name of a snapshot from which to restore data into the new cluster. +* `snapshotRetentionLimit` - (Optional) The number of days for which MemoryDB retains automatic snapshots before deleting them. When set to `0`, automatic backups are disabled. Defaults to `0`. +* `snapshotWindow` - (Optional) The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of your shard. Example: `05:0009:00`. +* `snsTopicArn` - (Optional) ARN of the SNS topic to which cluster notifications are sent. +* `subnetGroupName` - (Optional, Forces new resource) The name of the subnet group to be used for the cluster. Defaults to a subnet group consisting of default VPC subnets. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tlsEnabled` - (Optional, Forces new resource) A flag to enable in-transit encryption on the cluster. When set to `false`, the `aclName` must be `openAccess`. Defaults to `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same as `name`. +* `arn` - The ARN of the cluster. +* `clusterEndpoint` + * `address` - DNS hostname of the cluster configuration endpoint. + * `port` - Port number that the cluster configuration endpoint is listening on. +* `enginePatchVersion` - Patch version number of the Redis engine used by the cluster. +* `shards` - Set of shards in this cluster. + * `name` - Name of this shard. + * `numNodes` - Number of individual nodes in this shard. + * `slots` - Keyspace for this shard. Example: `016383`. + * `nodes` - Set of nodes in this shard. + * `availabilityZone` - The Availability Zone in which the node resides. + * `createTime` - The date and time when the node was created. Example: `20220101T21:00:00Z`. + * `name` - Name of this node. + * `endpoint` + * `address` - DNS hostname of the node. + * `port` - Port number that this node is listening on. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120M`) +- `update` - (Default `120M`) +- `delete` - (Default `120M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a cluster using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a cluster using the `name`. For example: + +```console +% terraform import aws_memorydb_cluster.example my-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/memorydb_parameter_group.html.markdown b/website/docs/cdktf/typescript/r/memorydb_parameter_group.html.markdown new file mode 100644 index 00000000000..cbd70c0ceb9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/memorydb_parameter_group.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_parameter_group" +description: |- + Provides a MemoryDB Parameter Group. +--- + + + +# Resource: aws_memorydb_parameter_group + +Provides a MemoryDB Parameter Group. + +More information about parameter groups can be found in the [MemoryDB User Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/parametergroups.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MemorydbParameterGroup } from "./.gen/providers/aws/memorydb-parameter-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MemorydbParameterGroup(this, "example", { + family: "memorydb_redis6", + name: "my-parameter-group", + parameter: [ + { + name: "activedefrag", + value: "yes", + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `family` - (Required, Forces new resource) The engine version that the parameter group can be used with. + +The following arguments are optional: + +* `name` - (Optional, Forces new resource) Name of the parameter group. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional, Forces new resource) Description for the parameter group. Defaults to `"Managed by Terraform"`. +* `parameter` - (Optional) Set of MemoryDB parameters to apply. Any parameters not specified will fall back to their family defaults. Detailed below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### parameter Configuration Block + +* `name` - (Required) The name of the parameter. +* `value` - (Required) The value of the parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same as `name`. +* `arn` - The ARN of the parameter group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a parameter group using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a parameter group using the `name`. For example: + +```console +% terraform import aws_memorydb_parameter_group.example my-parameter-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/memorydb_snapshot.html.markdown b/website/docs/cdktf/typescript/r/memorydb_snapshot.html.markdown new file mode 100644 index 00000000000..0b165cc96ed --- /dev/null +++ b/website/docs/cdktf/typescript/r/memorydb_snapshot.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_snapshot" +description: |- + Provides a MemoryDB Snapshot. +--- + + + +# Resource: aws_memorydb_snapshot + +Provides a MemoryDB Snapshot. + +More information about snapshot and restore can be found in the [MemoryDB User Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/snapshots.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MemorydbSnapshot } from "./.gen/providers/aws/memorydb-snapshot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MemorydbSnapshot(this, "example", { + clusterName: Token.asString(awsMemorydbClusterExample.name), + name: "my-snapshot", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clusterName` - (Required, Forces new resource) Name of the MemoryDB cluster to take a snapshot of. +* `name` - (Optional, Forces new resource) Name of the snapshot. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `kmsKeyArn` - (Optional, Forces new resource) ARN of the KMS key used to encrypt the snapshot at rest. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the snapshot. +* `arn` - The ARN of the snapshot. +* `clusterConfiguration` - The configuration of the cluster from which the snapshot was taken. + * `description` - Description for the cluster. + * `engineVersion` - Version number of the Redis engine used by the cluster. + * `maintenanceWindow` - The weekly time range during which maintenance on the cluster is performed. + * `name` - Name of the cluster. + * `nodeType` - Compute and memory capacity of the nodes in the cluster. + * `numShards` - Number of shards in the cluster. + * `parameterGroupName` - Name of the parameter group associated with the cluster. + * `port` - Port number on which the cluster accepts connections. + * `snapshotRetentionLimit` - Number of days for which MemoryDB retains automatic snapshots before deleting them. + * `snapshotWindow` - The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of the shard. + * `subnetGroupName` - Name of the subnet group used by the cluster. + * `topicArn` - ARN of the SNS topic to which cluster notifications are sent. + * `vpcId` - The VPC in which the cluster exists. +* `source` - Indicates whether the snapshot is from an automatic backup (`automated`) or was created manually (`manual`). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120M`) +- `delete` - (Default `120M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a snapshot using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a snapshot using the `name`. For example: + +```console +% terraform import aws_memorydb_snapshot.example my-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/memorydb_subnet_group.html.markdown b/website/docs/cdktf/typescript/r/memorydb_subnet_group.html.markdown new file mode 100644 index 00000000000..ed6448bb36b --- /dev/null +++ b/website/docs/cdktf/typescript/r/memorydb_subnet_group.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_subnet_group" +description: |- + Provides a MemoryDB Subnet Group. +--- + + + +# Resource: aws_memorydb_subnet_group + +Provides a MemoryDB Subnet Group. + +More information about subnet groups can be found in the [MemoryDB User Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/subnetgroups.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MemorydbSubnetGroup } from "./.gen/providers/aws/memorydb-subnet-group"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + }); + const awsSubnetExample = new Subnet(this, "example_1", { + availabilityZone: "us-west-2a", + cidrBlock: "10.0.0.0/24", + vpcId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSubnetExample.overrideLogicalId("example"); + const awsMemorydbSubnetGroupExample = new MemorydbSubnetGroup( + this, + "example_2", + { + name: "my-subnet-group", + subnetIds: [Token.asString(awsSubnetExample.id)], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMemorydbSubnetGroupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `subnetIds` - (Required) Set of VPC Subnet ID-s for the subnet group. At least one subnet must be provided. + +The following arguments are optional: + +* `name` - (Optional, Forces new resource) Name of the subnet group. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) Description for the subnet group. Defaults to `"Managed by Terraform"`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the subnet group. +* `arn` - The ARN of the subnet group. +* `vpcId` - The VPC in which the subnet group exists. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a subnet group using its `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a subnet group using its `name`. For example: + +```console +% terraform import aws_memorydb_subnet_group.example my-subnet-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/memorydb_user.html.markdown b/website/docs/cdktf/typescript/r/memorydb_user.html.markdown new file mode 100644 index 00000000000..646f731dc30 --- /dev/null +++ b/website/docs/cdktf/typescript/r/memorydb_user.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "MemoryDB for Redis" +layout: "aws" +page_title: "AWS: aws_memorydb_user" +description: |- + Provides a MemoryDB User. +--- + + + +# Resource: aws_memorydb_user + +Provides a MemoryDB User. + +More information about users and ACL-s can be found in the [MemoryDB User Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/clusters.acls.html). + +~> **Note:** All arguments including the username and passwords will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MemorydbUser } from "./.gen/providers/aws/memorydb-user"; +import { Password } from "./.gen/providers/random/password"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*The following providers are missing schema information and might need manual adjustments to synthesize correctly: random. + For a more precise conversion please use the --provider flag in convert.*/ + const example = new Password(this, "example", { + length: 16, + }); + const awsMemorydbUserExample = new MemorydbUser(this, "example_1", { + accessString: "on ~* &* +@all", + authenticationMode: { + passwords: [Token.asString(example.result)], + type: "password", + }, + userName: "my-user", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMemorydbUserExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `accessString` - (Required) The access permissions string used for this user. +* `authenticationMode` - (Required) Denotes the user's authentication properties. Detailed below. +* `userName` - (Required, Forces new resource) Name of the MemoryDB user. Up to 40 characters. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### authentication_mode Configuration Block + +* `passwords` - (Required) The set of passwords used for authentication. You can create up to two passwords for each user. +* `type` - (Required) Indicates whether the user requires a password to authenticate. Must be set to `password`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same as `userName`. +* `arn` - The ARN of the user. +* `minimumEngineVersion` - The minimum engine version supported for the user. +* `authenticationMode` configuration block + * `passwordCount` - The number of passwords belonging to the user. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a user using the `userName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a user using the `userName`. For example: + +```console +% terraform import aws_memorydb_user.example my-user +``` + +The `passwords` are not available for imported resources, as this information cannot be read back from the MemoryDB API. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/mq_broker.html.markdown b/website/docs/cdktf/typescript/r/mq_broker.html.markdown new file mode 100644 index 00000000000..635765e7c67 --- /dev/null +++ b/website/docs/cdktf/typescript/r/mq_broker.html.markdown @@ -0,0 +1,232 @@ +--- +subcategory: "MQ" +layout: "aws" +page_title: "AWS: aws_mq_broker" +description: |- + Provides an MQ Broker Resource +--- + + + +# Resource: aws_mq_broker + +Provides an Amazon MQ broker resource. This resources also manages users for the broker. + +-> For more information on Amazon MQ, see [Amazon MQ documentation](https://docs.aws.amazon.com/amazon-mq/latest/developer-guide/welcome.html). + +~> **NOTE:** Amazon MQ currently places limits on **RabbitMQ** brokers. For example, a RabbitMQ broker cannot have: instances with an associated IP address of an ENI attached to the broker, an associated LDAP server to authenticate and authorize broker connections, storage type `efs`, audit logging, or `configuration` blocks. Although this resource allows you to create RabbitMQ users, RabbitMQ users cannot have console access or groups. Also, Amazon MQ does not return information about RabbitMQ users so drift detection is not possible. + +~> **NOTE:** Changes to an MQ Broker can occur when you change a parameter, such as `configuration` or `user`, and are reflected in the next maintenance window. Because of this, Terraform may report a difference in its planning phase because a modification has not yet taken place. You can use the `applyImmediately` flag to instruct the service to apply the change immediately (see documentation below). Using `applyImmediately` can result in a brief downtime as the broker reboots. + +~> **NOTE:** All arguments including the username and password will be stored in the raw state as plain-text. [Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +### Basic Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MqBroker } from "./.gen/providers/aws/mq-broker"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MqBroker(this, "example", { + brokerName: "example", + configuration: { + id: test.id, + revision: test.latestRevision, + }, + engineType: "ActiveMQ", + engineVersion: "5.15.9", + hostInstanceType: "mq.t2.micro", + securityGroups: [Token.asString(awsSecurityGroupTest.id)], + user: [ + { + password: "MindTheGap", + username: "ExampleUser", + }, + ], + }); + } +} + +``` + +### High-throughput Optimized Example + +This example shows the use of EBS storage for high-throughput optimized performance. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MqBroker } from "./.gen/providers/aws/mq-broker"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MqBroker(this, "example", { + brokerName: "example", + configuration: { + id: test.id, + revision: test.latestRevision, + }, + engineType: "ActiveMQ", + engineVersion: "5.15.9", + hostInstanceType: "mq.m5.large", + securityGroups: [Token.asString(awsSecurityGroupTest.id)], + storageType: "ebs", + user: [ + { + password: "MindTheGap", + username: "ExampleUser", + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `brokerName` - (Required) Name of the broker. +* `engineType` - (Required) Type of broker engine. Valid values are `activeMq` and `rabbitMq`. +* `engineVersion` - (Required) Version of the broker engine. See the [AmazonMQ Broker Engine docs](https://docs.aws.amazon.com/amazon-mq/latest/developer-guide/broker-engine.html) for supported versions. For example, `5150`. +* `hostInstanceType` - (Required) Broker's instance type. For example, `mqT3Micro`, `mqM5Large`. +* `user` - (Required) Configuration block for broker users. For `engineType` of `rabbitMq`, Amazon MQ does not return broker users preventing this resource from making user updates and drift detection. Detailed below. + +The following arguments are optional: + +* `applyImmediately` - (Optional) Specifies whether any broker modifications are applied immediately, or during the next maintenance window. Default is `false`. +* `authenticationStrategy` - (Optional) Authentication strategy used to secure the broker. Valid values are `simple` and `ldap`. `ldap` is not supported for `engineType` `rabbitMq`. +* `autoMinorVersionUpgrade` - (Optional) Whether to automatically upgrade to new minor versions of brokers as Amazon MQ makes releases available. +* `configuration` - (Optional) Configuration block for broker configuration. Applies to `engineType` of `activeMq` only. Detailed below. +* `deploymentMode` - (Optional) Deployment mode of the broker. Valid values are `singleInstance`, `activeStandbyMultiAz`, and `clusterMultiAz`. Default is `singleInstance`. +* `encryptionOptions` - (Optional) Configuration block containing encryption options. Detailed below. +* `ldapServerMetadata` - (Optional) Configuration block for the LDAP server used to authenticate and authorize connections to the broker. Not supported for `engineType` `rabbitMq`. Detailed below. (Currently, AWS may not process changes to LDAP server metadata.) +* `logs` - (Optional) Configuration block for the logging configuration of the broker. Detailed below. +* `maintenanceWindowStartTime` - (Optional) Configuration block for the maintenance window start time. Detailed below. +* `publiclyAccessible` - (Optional) Whether to enable connections from applications outside of the VPC that hosts the broker's subnets. +* `securityGroups` - (Optional) List of security group IDs assigned to the broker. +* `storageType` - (Optional) Storage type of the broker. For `engineType` `activeMq`, the valid values are `efs` and `ebs`, and the AWS-default is `efs`. For `engineType` `rabbitMq`, only `ebs` is supported. When using `ebs`, only the `mqM5` broker instance type family is supported. +* `subnetIds` - (Optional) List of subnet IDs in which to launch the broker. A `singleInstance` deployment requires one subnet. An `activeStandbyMultiAz` deployment requires multiple subnets. +* `tags` - (Optional) Map of tags to assign to the broker. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### configuration + +The following arguments are optional: + +* `id` - (Optional) The Configuration ID. +* `revision` - (Optional) Revision of the Configuration. + +### encryption_options + +The following arguments are optional: + +* `kmsKeyId` - (Optional) Amazon Resource Name (ARN) of Key Management Service (KMS) Customer Master Key (CMK) to use for encryption at rest. Requires setting `useAwsOwnedKey` to `false`. To perform drift detection when AWS-managed CMKs or customer-managed CMKs are in use, this value must be configured. +* `useAwsOwnedKey` - (Optional) Whether to enable an AWS-owned KMS CMK that is not in your account. Defaults to `true`. Setting to `false` without configuring `kmsKeyId` will create an AWS-managed CMK aliased to `aws/mq` in your account. + +### ldap_server_metadata + +The following arguments are optional: + +* `hosts` - (Optional) List of a fully qualified domain name of the LDAP server and an optional failover server. +* `roleBase` - (Optional) Fully qualified name of the directory to search for a user’s groups. +* `roleName` - (Optional) Specifies the LDAP attribute that identifies the group name attribute in the object returned from the group membership query. +* `roleSearchMatching` - (Optional) Search criteria for groups. +* `roleSearchSubtree` - (Optional) Whether the directory search scope is the entire sub-tree. +* `serviceAccountPassword` - (Optional) Service account password. +* `serviceAccountUsername` - (Optional) Service account username. +* `userBase` - (Optional) Fully qualified name of the directory where you want to search for users. +* `userRoleName` - (Optional) Specifies the name of the LDAP attribute for the user group membership. +* `userSearchMatching` - (Optional) Search criteria for users. +* `userSearchSubtree` - (Optional) Whether the directory search scope is the entire sub-tree. + +### logs + +The following arguments are optional: + +* `audit` - (Optional) Enables audit logging. Auditing is only possible for `engineType` of `activeMq`. User management action made using JMX or the ActiveMQ Web Console is logged. Defaults to `false`. +* `general` - (Optional) Enables general logging via CloudWatch. Defaults to `false`. + +### maintenance_window_start_time + +The following arguments are required: + +* `dayOfWeek` - (Required) Day of the week, e.g., `monday`, `tuesday`, or `wednesday`. +* `timeOfDay` - (Required) Time, in 24-hour format, e.g., `02:00`. +* `timeZone` - (Required) Time zone in either the Country/City format or the UTC offset format, e.g., `cet`. + +### user + +* `consoleAccess` - (Optional) Whether to enable access to the [ActiveMQ Web Console](http://activemq.apache.org/web-console.html) for the user. Applies to `engineType` of `activeMq` only. +* `groups` - (Optional) List of groups (20 maximum) to which the ActiveMQ user belongs. Applies to `engineType` of `activeMq` only. +* `password` - (Required) Password of the user. It must be 12 to 250 characters long, at least 4 unique characters, and must not contain commas. +* `replicationUser` - (Optional) Whether to set set replication user. Defaults to `false`. +* `username` - (Required) Username of the user. + +~> **NOTE:** AWS currently does not support updating RabbitMQ users. Updates to users can only be in the RabbitMQ UI. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the broker. +* `id` - Unique ID that Amazon MQ generates for the broker. +* `instances` - List of information about allocated brokers (both active & standby). + * `instances0ConsoleUrl` - The URL of the [ActiveMQ Web Console](http://activemq.apache.org/web-console.html) or the [RabbitMQ Management UI](https://www.rabbitmq.com/management.html#external-monitoring) depending on `engineType`. + * `instances0IpAddress` - IP Address of the broker. + * `instances0Endpoints` - Broker's wire-level protocol endpoints in the following order & format referenceable e.g., as `instances0Endpoints0` (SSL): + * For `activeMq`: + * `ssl://brokerIdMqUsWest2AmazonawsCom:61617` + * `amqp+ssl://brokerIdMqUsWest2AmazonawsCom:5671` + * `stomp+ssl://brokerIdMqUsWest2AmazonawsCom:61614` + * `mqtt+ssl://brokerIdMqUsWest2AmazonawsCom:8883` + * `wss://brokerIdMqUsWest2AmazonawsCom:61619` + * For `rabbitMq`: + * `amqps://brokerIdMqUsWest2AmazonawsCom:5671` +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MQ Brokers using their broker id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MQ Brokers using their broker id. For example: + +```console +% terraform import aws_mq_broker.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/mq_configuration.html.markdown b/website/docs/cdktf/typescript/r/mq_configuration.html.markdown new file mode 100644 index 00000000000..b79885cea68 --- /dev/null +++ b/website/docs/cdktf/typescript/r/mq_configuration.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "MQ" +layout: "aws" +page_title: "AWS: aws_mq_configuration" +description: |- + Provides an MQ configuration Resource +--- + + + +# Resource: aws_mq_configuration + +Provides an MQ Configuration Resource. + +For more information on Amazon MQ, see [Amazon MQ documentation](https://docs.aws.amazon.com/amazon-mq/latest/developer-guide/welcome.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MqConfiguration } from "./.gen/providers/aws/mq-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MqConfiguration(this, "example", { + data: '\n\n \n \n \n \n \n\n\n', + description: "Example Configuration", + engineType: "ActiveMQ", + engineVersion: "5.15.0", + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `data` - (Required) Broker configuration in XML format. See [official docs](https://docs.aws.amazon.com/amazon-mq/latest/developer-guide/amazon-mq-broker-configuration-parameters.html) for supported parameters and format of the XML. +* `engineType` - (Required) Type of broker engine. Valid values are `activeMq` and `rabbitMq`. +* `engineVersion` - (Required) Version of the broker engine. +* `name` - (Required) Name of the configuration. + +The following arguments are optional: + +* `authenticationStrategy` - (Optional) Authentication strategy associated with the configuration. Valid values are `simple` and `ldap`. `ldap` is not supported for `engineType` `rabbitMq`. +* `description` - (Optional) Description of the configuration. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the configuration. +* `id` - Unique ID that Amazon MQ generates for the configuration. +* `latestRevision` - Latest revision of the configuration. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MQ Configurations using the configuration ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MQ Configurations using the configuration ID. For example: + +```console +% terraform import aws_mq_configuration.example c-0187d1eb-88c8-475a-9b79-16ef5a10c94f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/msk_cluster.html.markdown b/website/docs/cdktf/typescript/r/msk_cluster.html.markdown new file mode 100644 index 00000000000..f9d62977800 --- /dev/null +++ b/website/docs/cdktf/typescript/r/msk_cluster.html.markdown @@ -0,0 +1,375 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_cluster" +description: |- + Terraform resource for managing an AWS Managed Streaming for Kafka cluster. +--- + + + +# Resource: aws_msk_cluster + +Manages an Amazon MSK cluster. + +-> **Note:** This resource manages _provisioned_ clusters. To manage a _serverless_ Amazon MSK cluster, use the [`awsMskServerlessCluster`](/docs/providers/aws/r/msk_serverless_cluster.html) resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformOutput, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { KinesisFirehoseDeliveryStream } from "./.gen/providers/aws/kinesis-firehose-delivery-stream"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { MskCluster } from "./.gen/providers/aws/msk-cluster"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new CloudwatchLogGroup(this, "test", { + name: "msk_broker_logs", + }); + const kms = new KmsKey(this, "kms", { + description: "example", + }); + const bucket = new S3Bucket(this, "bucket", { + bucket: "msk-broker-logs-bucket", + }); + new S3BucketAcl(this, "bucket_acl", { + acl: "private", + bucket: bucket.id, + }); + const vpc = new Vpc(this, "vpc", { + cidrBlock: "192.168.0.0/22", + }); + const azs = new DataAwsAvailabilityZones(this, "azs", { + state: "available", + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["firehose.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const firehoseRole = new IamRole(this, "firehose_role", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "firehose_test_role", + }); + const testStream = new KinesisFirehoseDeliveryStream(this, "test_stream", { + destination: "extended_s3", + extendedS3Configuration: { + bucketArn: bucket.arn, + roleArn: firehoseRole.arn, + }, + lifecycle: { + ignoreChanges: ["logDeliveryEnabled"], + }, + name: "terraform-kinesis-firehose-msk-broker-logs-stream", + tags: { + LogDeliveryEnabled: "placeholder", + }, + }); + const sg = new SecurityGroup(this, "sg", { + vpcId: vpc.id, + }); + const subnetAz1 = new Subnet(this, "subnet_az1", { + availabilityZone: Token.asString(propertyAccess(azs.names, ["0"])), + cidrBlock: "192.168.0.0/24", + vpcId: vpc.id, + }); + const subnetAz2 = new Subnet(this, "subnet_az2", { + availabilityZone: Token.asString(propertyAccess(azs.names, ["1"])), + cidrBlock: "192.168.1.0/24", + vpcId: vpc.id, + }); + const subnetAz3 = new Subnet(this, "subnet_az3", { + availabilityZone: Token.asString(propertyAccess(azs.names, ["2"])), + cidrBlock: "192.168.2.0/24", + vpcId: vpc.id, + }); + const example = new MskCluster(this, "example", { + brokerNodeGroupInfo: { + clientSubnets: [subnetAz1.id, subnetAz2.id, subnetAz3.id], + instanceType: "kafka.m5.large", + securityGroups: [sg.id], + storageInfo: { + ebsStorageInfo: { + volumeSize: 1000, + }, + }, + }, + clusterName: "example", + encryptionInfo: { + encryptionAtRestKmsKeyArn: kms.arn, + }, + kafkaVersion: "3.2.0", + loggingInfo: { + brokerLogs: { + cloudwatchLogs: { + enabled: true, + logGroup: test.name, + }, + firehose: { + deliveryStream: testStream.name, + enabled: true, + }, + s3: { + bucket: bucket.id, + enabled: true, + prefix: "logs/msk-", + }, + }, + }, + numberOfBrokerNodes: 3, + openMonitoring: { + prometheus: { + jmxExporter: { + enabledInBroker: true, + }, + nodeExporter: { + enabledInBroker: true, + }, + }, + }, + tags: { + foo: "bar", + }, + }); + new TerraformOutput(this, "bootstrap_brokers_tls", { + value: example.bootstrapBrokersTls, + description: "TLS connection host:port pairs", + }); + new TerraformOutput(this, "zookeeper_connect_string", { + value: example.zookeeperConnectString, + }); + } +} + +``` + +### With volume_throughput argument + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MskCluster } from "./.gen/providers/aws/msk-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MskCluster(this, "example", { + brokerNodeGroupInfo: { + clientSubnets: [subnetAz1.id, subnetAz2.id, subnetAz3.id], + instanceType: "kafka.m5.4xlarge", + securityGroups: [sg.id], + storageInfo: { + ebsStorageInfo: { + provisionedThroughput: { + enabled: true, + volumeThroughput: 250, + }, + volumeSize: 1000, + }, + }, + }, + clusterName: "example", + kafkaVersion: "2.7.1", + numberOfBrokerNodes: 3, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `brokerNodeGroupInfo` - (Required) Configuration block for the broker nodes of the Kafka cluster. +* `clusterName` - (Required) Name of the MSK cluster. +* `kafkaVersion` - (Required) Specify the desired Kafka software version. +* `numberOfBrokerNodes` - (Required) The desired total number of broker nodes in the kafka cluster. It must be a multiple of the number of specified client subnets. +* `clientAuthentication` - (Optional) Configuration block for specifying a client authentication. See below. +* `configurationInfo` - (Optional) Configuration block for specifying a MSK Configuration to attach to Kafka brokers. See below. +* `encryptionInfo` - (Optional) Configuration block for specifying encryption. See below. +* `enhancedMonitoring` - (Optional) Specify the desired enhanced MSK CloudWatch monitoring level. See [Monitoring Amazon MSK with Amazon CloudWatch](https://docs.aws.amazon.com/msk/latest/developerguide/monitoring.html) +* `openMonitoring` - (Optional) Configuration block for JMX and Node monitoring for the MSK cluster. See below. +* `loggingInfo` - (Optional) Configuration block for streaming broker logs to Cloudwatch/S3/Kinesis Firehose. See below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### broker_node_group_info Argument Reference + +* `clientSubnets` - (Required) A list of subnets to connect to in client VPC ([documentation](https://docs.aws.amazon.com/msk/1.0/apireference/clusters.html#clusters-prop-brokernodegroupinfo-clientsubnets)). +* `instanceType` - (Required) Specify the instance type to use for the kafka brokersE.g., kafka.m5.large. ([Pricing info](https://aws.amazon.com/msk/pricing/)) +* `securityGroups` - (Required) A list of the security groups to associate with the elastic network interfaces to control who can communicate with the cluster. +* `azDistribution` - (Optional) The distribution of broker nodes across availability zones ([documentation](https://docs.aws.amazon.com/msk/1.0/apireference/clusters.html#clusters-model-brokerazdistribution)). Currently the only valid value is `default`. +* `connectivityInfo` - (Optional) Information about the cluster access configuration. See below. For security reasons, you can't turn on public access while creating an MSK cluster. However, you can update an existing cluster to make it publicly accessible. You can also create a new cluster and then update it to make it publicly accessible ([documentation](https://docs.aws.amazon.com/msk/latest/developerguide/public-access.html)). +* `storageInfo` - (Optional) A block that contains information about storage volumes attached to MSK broker nodes. See below. + +### broker_node_group_info connectivity_info Argument Reference + +* `publicAccess` - (Optional) Access control settings for brokers. See below. + +### connectivity_info public_access Argument Reference + +* `type` - (Optional) Public access type. Valida values: `disabled`, `serviceProvidedEips`. + +### broker_node_group_info storage_info Argument Reference + +* `ebsStorageInfo` - (Optional) A block that contains EBS volume information. See below. + +### storage_info ebs_storage_info Argument Reference + +* `provisionedThroughput` - (Optional) A block that contains EBS volume provisioned throughput information. To provision storage throughput, you must choose broker type kafka.m5.4xlarge or larger. See below. +* `volumeSize` - (Optional) The size in GiB of the EBS volume for the data drive on each broker node. Minimum value of `1` and maximum value of `16384`. + +### ebs_storage_info provisioned_throughput Argument Reference + +* `enabled` - (Optional) Controls whether provisioned throughput is enabled or not. Default value: `false`. +* `volumeThroughput` - (Optional) Throughput value of the EBS volumes for the data drive on each kafka broker node in MiB per second. The minimum value is `250`. The maximum value varies between broker type. You can refer to the valid values for the maximum volume throughput at the following [documentation on throughput bottlenecks](https://docs.aws.amazon.com/msk/latest/developerguide/msk-provision-throughput.html#throughput-bottlenecks) + +### client_authentication Argument Reference + +* `sasl` - (Optional) Configuration block for specifying SASL client authentication. See below. +* `tls` - (Optional) Configuration block for specifying TLS client authentication. See below. +* `unauthenticated` - (Optional) Enables unauthenticated access. + +#### client_authentication sasl Argument Reference + +* `iam` - (Optional) Enables IAM client authentication. Defaults to `false`. +* `scram` - (Optional) Enables SCRAM client authentication via AWS Secrets Manager. Defaults to `false`. + +#### client_authentication tls Argument Reference + +* `certificateAuthorityArns` - (Optional) List of ACM Certificate Authority Amazon Resource Names (ARNs). + +### configuration_info Argument Reference + +* `arn` - (Required) Amazon Resource Name (ARN) of the MSK Configuration to use in the cluster. +* `revision` - (Required) Revision of the MSK Configuration to use in the cluster. + +### encryption_info Argument Reference + +* `encryptionInTransit` - (Optional) Configuration block to specify encryption in transit. See below. +* `encryptionAtRestKmsKeyArn` - (Optional) You may specify a KMS key short ID or ARN (it will always output an ARN) to use for encrypting your data at rest. If no key is specified, an AWS managed KMS ('aws/msk' managed service) key will be used for encrypting the data at rest. + +#### encryption_info encryption_in_transit Argument Reference + +* `clientBroker` - (Optional) Encryption setting for data in transit between clients and brokers. Valid values: `tls`, `tlsPlaintext`, and `plaintext`. Default value is `tls`. +* `inCluster` - (Optional) Whether data communication among broker nodes is encrypted. Default value: `true`. + +#### open_monitoring Argument Reference + +* `prometheus` - (Required) Configuration block for Prometheus settings for open monitoring. See below. + +#### open_monitoring prometheus Argument Reference + +* `jmxExporter` - (Optional) Configuration block for JMX Exporter. See below. +* `nodeExporter` - (Optional) Configuration block for Node Exporter. See below. + +#### open_monitoring prometheus jmx_exporter Argument Reference + +* `enabledInBroker` - (Required) Indicates whether you want to enable or disable the JMX Exporter. + +#### open_monitoring prometheus node_exporter Argument Reference + +* `enabledInBroker` - (Required) Indicates whether you want to enable or disable the Node Exporter. + +#### logging_info Argument Reference + +* `brokerLogs` - (Required) Configuration block for Broker Logs settings for logging info. See below. + +#### logging_info broker_logs cloudwatch_logs Argument Reference + +* `enabled` - (Optional) Indicates whether you want to enable or disable streaming broker logs to Cloudwatch Logs. +* `logGroup` - (Optional) Name of the Cloudwatch Log Group to deliver logs to. + +#### logging_info broker_logs firehose Argument Reference + +* `enabled` - (Optional) Indicates whether you want to enable or disable streaming broker logs to Kinesis Data Firehose. +* `deliveryStream` - (Optional) Name of the Kinesis Data Firehose delivery stream to deliver logs to. + +#### logging_info broker_logs s3 Argument Reference + +* `enabled` - (Optional) Indicates whether you want to enable or disable streaming broker logs to S3. +* `bucket` - (Optional) Name of the S3 bucket to deliver logs to. +* `prefix` - (Optional) Prefix to append to the folder name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the MSK cluster. +* `bootstrapBrokers` - Comma separated list of one or more hostname:port pairs of kafka brokers suitable to bootstrap connectivity to the kafka cluster. Contains a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `plaintext` or `tlsPlaintext`. The resource sorts values alphabetically. AWS may not always return all endpoints so this value is not guaranteed to be stable across applies. +* `bootstrapBrokersPublicSaslIam` - One or more DNS names (or IP addresses) and SASL IAM port pairs. For example, `b1PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9198,b2PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9198,b3PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9198`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls` and `clientAuthentication0Sasl0Iam` is set to `true` and `brokerNodeGroupInfo0ConnectivityInfo0PublicAccess0Type` is set to `serviceProvidedEips` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrapBrokersPublicSaslScram` - One or more DNS names (or IP addresses) and SASL SCRAM port pairs. For example, `b1PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9196,b2PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9196,b3PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9196`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls` and `clientAuthentication0Sasl0Scram` is set to `true` and `brokerNodeGroupInfo0ConnectivityInfo0PublicAccess0Type` is set to `serviceProvidedEips` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrapBrokersPublicTls` - One or more DNS names (or IP addresses) and TLS port pairs. For example, `b1PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9194,b2PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9194,b3PublicExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9194`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls` and `brokerNodeGroupInfo0ConnectivityInfo0PublicAccess0Type` is set to `serviceProvidedEips` and the cluster fulfill all other requirements for public access. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrapBrokersSaslIam` - One or more DNS names (or IP addresses) and SASL IAM port pairs. For example, `b1ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9098,b2ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9098,b3ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9098`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls` and `clientAuthentication0Sasl0Iam` is set to `true`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrapBrokersSaslScram` - One or more DNS names (or IP addresses) and SASL SCRAM port pairs. For example, `b1ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9096,b2ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9096,b3ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9096`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls` and `clientAuthentication0Sasl0Scram` is set to `true`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `bootstrapBrokersTls` - One or more DNS names (or IP addresses) and TLS port pairs. For example, `b1ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9094,b2ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9094,b3ExampleClusterNameAbcdeC2KafkaUsEast1AmazonawsCom:9094`. This attribute will have a value if `encryptionInfo0EncryptionInTransit0ClientBroker` is set to `tlsPlaintext` or `tls`. The resource sorts the list alphabetically. AWS may not always return all endpoints so the values may not be stable across applies. +* `currentVersion` - Current version of the MSK Cluster used for updates, e.g., `k13V1Ib3Viyzzh` +* `encryptionInfo0EncryptionAtRestKmsKeyArn` - The ARN of the KMS key used for encryption at rest of the broker data volumes. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `storageMode` - Controls storage mode for supported storage tiers. Valid values are: `local` or `tiered`. +* `zookeeperConnectString` - A comma separated list of one or more hostname:port pairs to use to connect to the Apache Zookeeper cluster. The returned values are sorted alphabetically. The AWS API may not return all endpoints, so this value is not guaranteed to be stable across applies. +* `zookeeperConnectStringTls` - A comma separated list of one or more hostname:port pairs to use to connect to the Apache Zookeeper cluster via TLS. The returned values are sorted alphabetically. The AWS API may not return all endpoints, so this value is not guaranteed to be stable across applies. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `120M`) +* `update` - (Default `120M`) +Note that the `update` timeout is used separately for `storageInfo`, `instanceType`, `numberOfBrokerNodes`, `configurationInfo`, `kafkaVersion` and monitoring and logging update timeouts. +* `delete` - (Default `120M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK clusters using the cluster `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MSK clusters using the cluster `arn`. For example: + +```console +% terraform import aws_msk_cluster.example arn:aws:kafka:us-west-2:123456789012:cluster/example/279c0212-d057-4dba-9aa9-1c4e5a25bfc7-3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/msk_configuration.html.markdown b/website/docs/cdktf/typescript/r/msk_configuration.html.markdown new file mode 100644 index 00000000000..0e9e2503bd3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/msk_configuration.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_configuration" +description: |- + Terraform resource for managing an Amazon Managed Streaming for Kafka configuration +--- + + + +# Resource: aws_msk_configuration + +Manages an Amazon Managed Streaming for Kafka configuration. More information can be found on the [MSK Developer Guide](https://docs.aws.amazon.com/msk/latest/developerguide/msk-configuration.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MskConfiguration } from "./.gen/providers/aws/msk-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MskConfiguration(this, "example", { + kafkaVersions: ["2.1.0"], + name: "example", + serverProperties: + "auto.create.topics.enable = true\ndelete.topic.enable = true\n\n", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `serverProperties` - (Required) Contents of the server.properties file. Supported properties are documented in the [MSK Developer Guide](https://docs.aws.amazon.com/msk/latest/developerguide/msk-configuration-properties.html). +* `kafkaVersions` - (Required) List of Apache Kafka versions which can use this configuration. +* `name` - (Required) Name of the configuration. +* `description` - (Optional) Description of the configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the configuration. +* `latestRevision` - Latest revision of the configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK configurations using the configuration ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MSK configurations using the configuration ARN. For example: + +```console +% terraform import aws_msk_configuration.example arn:aws:kafka:us-west-2:123456789012:configuration/example/279c0212-d057-4dba-9aa9-1c4e5a25bfc7-3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/msk_scram_secret_association.html.markdown b/website/docs/cdktf/typescript/r/msk_scram_secret_association.html.markdown new file mode 100644 index 00000000000..f750059ff94 --- /dev/null +++ b/website/docs/cdktf/typescript/r/msk_scram_secret_association.html.markdown @@ -0,0 +1,171 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_scram_secret_association" +description: |- + Associates SCRAM secrets with a Managed Streaming for Kafka (MSK) cluster. +--- + + + +# Resource: aws_msk_scram_secret_association + +Associates SCRAM secrets stored in the Secrets Manager service with a Managed Streaming for Kafka (MSK) cluster. + +-> **Note:** The following assumes the MSK cluster has SASL/SCRAM authentication enabled. See below for example usage or refer to the [Username/Password Authentication](https://docs.aws.amazon.com/msk/latest/developerguide/msk-password.html) section of the MSK Developer Guide for more details. + +To set up username and password authentication for a cluster, create an [`awsSecretsmanagerSecret` resource](/docs/providers/aws/r/secretsmanager_secret.html) and associate +a username and password with the secret with an [`awsSecretsmanagerSecretVersion` resource](/docs/providers/aws/r/secretsmanager_secret_version.html). When creating a secret for the cluster, +the `name` must have the prefix `amazonMsk` and you must either use an existing custom AWS KMS key or create a new +custom AWS KMS key for your secret with the [`awsKmsKey` resource](/docs/providers/aws/r/kms_key.html). It is important to note that a policy is required for the `awsSecretsmanagerSecret` +resource in order for Kafka to be able to read it. This policy is attached automatically when the `awsMskScramSecretAssociation` is used, +however, this policy will not be in terraform and as such, will present a diff on plan/apply. For that reason, you must use the [`awsSecretsmanagerSecretPolicy` +resource](/docs/providers/aws/r/secretsmanager_secret_policy.html) as shown below in order to ensure that the state is in a clean state after the creation of secret and the association to the cluster. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { MskCluster } from "./.gen/providers/aws/msk-cluster"; +import { MskScramSecretAssociation } from "./.gen/providers/aws/msk-scram-secret-association"; +import { SecretsmanagerSecret } from "./.gen/providers/aws/secretsmanager-secret"; +import { SecretsmanagerSecretPolicy } from "./.gen/providers/aws/secretsmanager-secret-policy"; +import { SecretsmanagerSecretVersion } from "./.gen/providers/aws/secretsmanager-secret-version"; +interface MyConfig { + brokerNodeGroupInfo: any; + kafkaVersion: any; + numberOfBrokerNodes: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new KmsKey(this, "example", { + description: "Example Key for MSK Cluster Scram Secret Association", + }); + const awsMskClusterExample = new MskCluster(this, "example_1", { + clientAuthentication: { + sasl: { + scram: true, + }, + }, + clusterName: "example", + brokerNodeGroupInfo: config.brokerNodeGroupInfo, + kafkaVersion: config.kafkaVersion, + numberOfBrokerNodes: config.numberOfBrokerNodes, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMskClusterExample.overrideLogicalId("example"); + const awsSecretsmanagerSecretExample = new SecretsmanagerSecret( + this, + "example_2", + { + kmsKeyId: example.keyId, + name: "AmazonMSK_example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecretsmanagerSecretExample.overrideLogicalId("example"); + const awsSecretsmanagerSecretVersionExample = + new SecretsmanagerSecretVersion(this, "example_3", { + secretId: Token.asString(awsSecretsmanagerSecretExample.id), + secretString: Token.asString( + Fn.jsonencode({ + password: "pass", + username: "user", + }) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecretsmanagerSecretVersionExample.overrideLogicalId("example"); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_4", + { + statement: [ + { + actions: ["secretsmanager:getSecretValue"], + effect: "Allow", + principals: [ + { + identifiers: ["kafka.amazonaws.com"], + type: "Service", + }, + ], + resources: [Token.asString(awsSecretsmanagerSecretExample.arn)], + sid: "AWSKafkaResourcePolicy", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsMskScramSecretAssociationExample = new MskScramSecretAssociation( + this, + "example_5", + { + clusterArn: Token.asString(awsMskClusterExample.arn), + dependsOn: [awsSecretsmanagerSecretVersionExample], + secretArnList: [Token.asString(awsSecretsmanagerSecretExample.arn)], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMskScramSecretAssociationExample.overrideLogicalId("example"); + const awsSecretsmanagerSecretPolicyExample = new SecretsmanagerSecretPolicy( + this, + "example_6", + { + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + secretArn: Token.asString(awsSecretsmanagerSecretExample.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecretsmanagerSecretPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clusterArn` - (Required, Forces new resource) Amazon Resource Name (ARN) of the MSK cluster. +* `secretArnList` - (Required) List of AWS Secrets Manager secret ARNs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the MSK cluster. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK SCRAM Secret Associations using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MSK SCRAM Secret Associations using the `id`. For example: + +```console +% terraform import aws_msk_scram_secret_association.example arn:aws:kafka:us-west-2:123456789012:cluster/example/279c0212-d057-4dba-9aa9-1c4e5a25bfc7-3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/msk_serverless_cluster.html.markdown b/website/docs/cdktf/typescript/r/msk_serverless_cluster.html.markdown new file mode 100644 index 00000000000..e9d98639d11 --- /dev/null +++ b/website/docs/cdktf/typescript/r/msk_serverless_cluster.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "Managed Streaming for Kafka" +layout: "aws" +page_title: "AWS: aws_msk_serverless_cluster" +description: |- + Terraform resource for managing an Amazon MSK Serverless cluster. +--- + + + +# Resource: aws_msk_serverless_cluster + +Manages an Amazon MSK Serverless cluster. + +-> **Note:** To manage a _provisioned_ Amazon MSK cluster, use the [`awsMskCluster`](/docs/providers/aws/r/msk_cluster.html) resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MskServerlessCluster } from "./.gen/providers/aws/msk-serverless-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MskServerlessCluster(this, "example", { + clientAuthentication: { + sasl: { + iam: { + enabled: true, + }, + }, + }, + clusterName: "Example", + vpcConfig: [ + { + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + subnetIds: Token.asList( + propertyAccess(awsSubnetExample, ["*", "id"]) + ), + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clientAuthentication` - (Required) Specifies client authentication information for the serverless cluster. See below. +* `clusterName` - (Required) The name of the serverless cluster. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcConfig` - (Required) VPC configuration information. See below. + +### client_authentication Argument Reference + +* `sasl` - (Required) Details for client authentication using SASL. See below. + +### sasl Argument Reference + +* `iam` - (Required) Details for client authentication using IAM. See below. + +### iam Argument Reference + +* `enabled` - (Required) Whether SASL/IAM authentication is enabled or not. + +### vpc_config Argument Reference + +* `securityGroupIds` - (Optional) Specifies up to five security groups that control inbound and outbound traffic for the serverless cluster. +* `subnetIds` - (Required) A list of subnets in at least two different Availability Zones that host your client applications. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the serverless cluster. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `120M`) +* `delete` - (Default `120M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK serverless clusters using the cluster `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MSK serverless clusters using the cluster `arn`. For example: + +```console +% terraform import aws_msk_serverless_cluster.example arn:aws:kafka:us-west-2:123456789012:cluster/example/279c0212-d057-4dba-9aa9-1c4e5a25bfc7-3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/mskconnect_connector.html.markdown b/website/docs/cdktf/typescript/r/mskconnect_connector.html.markdown new file mode 100644 index 00000000000..dc98e77f981 --- /dev/null +++ b/website/docs/cdktf/typescript/r/mskconnect_connector.html.markdown @@ -0,0 +1,229 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_connector" +description: |- + Provides an Amazon MSK Connect Connector resource. +--- + + + +# Resource: aws_mskconnect_connector + +Provides an Amazon MSK Connect Connector resource. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MskconnectConnector } from "./.gen/providers/aws/mskconnect-connector"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MskconnectConnector(this, "example", { + capacity: { + autoscaling: { + maxWorkerCount: 2, + mcuCount: 1, + minWorkerCount: 1, + scaleInPolicy: { + cpuUtilizationPercentage: 20, + }, + scaleOutPolicy: { + cpuUtilizationPercentage: 80, + }, + }, + }, + connectorConfiguration: { + "connector.class": + "com.github.jcustenborder.kafka.connect.simulator.SimulatorSinkConnector", + "tasks.max": "1", + topics: "example", + }, + kafkaCluster: { + apacheKafkaCluster: { + bootstrapServers: Token.asString( + awsMskClusterExample.bootstrapBrokersTls + ), + vpc: { + securityGroups: [Token.asString(awsSecurityGroupExample.id)], + subnets: [example1.id, example2.id, example3.id], + }, + }, + }, + kafkaClusterClientAuthentication: { + authenticationType: "NONE", + }, + kafkaClusterEncryptionInTransit: { + encryptionType: "TLS", + }, + kafkaconnectVersion: "2.7.1", + name: "example", + plugin: [ + { + customPlugin: { + arn: Token.asString(awsMskconnectCustomPluginExample.arn), + revision: Token.asNumber( + awsMskconnectCustomPluginExample.latestRevision + ), + }, + }, + ], + serviceExecutionRoleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `capacity` - (Required) Information about the capacity allocated to the connector. See below. +* `connectorConfiguration` - (Required) A map of keys to values that represent the configuration for the connector. +* `description` - (Optional) A summary description of the connector. +* `kafkaCluster` - (Required) Specifies which Apache Kafka cluster to connect to. See below. +* `kafkaClusterClientAuthentication` - (Required) Details of the client authentication used by the Apache Kafka cluster. See below. +* `kafkaClusterEncryptionInTransit` - (Required) Details of encryption in transit to the Apache Kafka cluster. See below. +* `kafkaconnectVersion` - (Required) The version of Kafka Connect. It has to be compatible with both the Apache Kafka cluster's version and the plugins. +* `logDelivery` - (Optional) Details about log delivery. See below. +* `name` - (Required) The name of the connector. +* `plugin` - (Required) Specifies which plugins to use for the connector. See below. +* `serviceExecutionRoleArn` - (Required) The Amazon Resource Name (ARN) of the IAM role used by the connector to access the Amazon Web Services resources that it needs. The types of resources depends on the logic of the connector. For example, a connector that has Amazon S3 as a destination must have permissions that allow it to write to the S3 destination bucket. +* `workerConfiguration` - (Optional) Specifies which worker configuration to use with the connector. See below. + +### capacity Configuration Block + +* `autoscaling` - (Optional) Information about the auto scaling parameters for the connector. See below. +* `provisionedCapacity` - (Optional) Details about a fixed capacity allocated to a connector. See below. + +### autoscaling Configuration Block + +* `maxWorkerCount` - (Required) The maximum number of workers allocated to the connector. +* `mcuCount` - (Optional) The number of microcontroller units (MCUs) allocated to each connector worker. Valid values: `1`, `2`, `4`, `8`. The default value is `1`. +* `minWorkerCount` - (Required) The minimum number of workers allocated to the connector. +* `scaleInPolicy` - (Optional) The scale-in policy for the connector. See below. +* `scaleOutPolicy` - (Optional) The scale-out policy for the connector. See below. + +### scale_in_policy Configuration Block + +* `cpuUtilizationPercentage` - (Required) Specifies the CPU utilization percentage threshold at which you want connector scale in to be triggered. + +### scale_out_policy Configuration Block + +* `cpuUtilizationPercentage` - (Required) The CPU utilization percentage threshold at which you want connector scale out to be triggered. + +### provisioned_capacity Configuration Block + +* `mcuCount` - (Optional) The number of microcontroller units (MCUs) allocated to each connector worker. Valid values: `1`, `2`, `4`, `8`. The default value is `1`. +* `workerCount` - (Required) The number of workers that are allocated to the connector. + +### kafka_cluster Configuration Block + +* `apacheKafkaCluster` - (Required) The Apache Kafka cluster to which the connector is connected. + +### apache_kafka_cluster Configuration Block + +* `bootstrapServers` - (Required) The bootstrap servers of the cluster. +* `vpc` - (Required) Details of an Amazon VPC which has network connectivity to the Apache Kafka cluster. + +### vpc Configuration Block + +* `securityGroups` - (Required) The security groups for the connector. +* `subnets` - (Required) The subnets for the connector. + +### kafka_cluster_client_authentication Configuration Block + +* `authenticationType` - (Optional) The type of client authentication used to connect to the Apache Kafka cluster. Valid values: `iam`, `none`. A value of `none` means that no client authentication is used. The default value is `none`. + +### kafka_cluster_encryption_in_transit Configuration Block + +* `encryptionType` - (Optional) The type of encryption in transit to the Apache Kafka cluster. Valid values: `plaintext`, `tls`. The default values is `plaintext`. + +### log_delivery Configuration Block + +* `workerLogDelivery` - (Required) The workers can send worker logs to different destination types. This configuration specifies the details of these destinations. See below. + +### worker_log_delivery Configuration Block + +* `cloudwatchLogs` - (Optional) Details about delivering logs to Amazon CloudWatch Logs. See below. +* `firehose` - (Optional) Details about delivering logs to Amazon Kinesis Data Firehose. See below. +* `s3` - (Optional) Details about delivering logs to Amazon S3. See below. + +### cloudwatch_logs Configuration Block + +* `enabled` - (Optional) Whether log delivery to Amazon CloudWatch Logs is enabled. +* `logGroup` - (Required) The name of the CloudWatch log group that is the destination for log delivery. + +### firehose Configuration Block + +* `deliveryStream` - (Optional) The name of the Kinesis Data Firehose delivery stream that is the destination for log delivery. +* `enabled` - (Required) Specifies whether connector logs get delivered to Amazon Kinesis Data Firehose. + +### s3 Configuration Block + +* `bucket` - (Optional) The name of the S3 bucket that is the destination for log delivery. +* `enabled` - (Required) Specifies whether connector logs get sent to the specified Amazon S3 destination. +* `prefix` - (Optional) The S3 prefix that is the destination for log delivery. + +### plugin Configuration Block + +* `customPlugin` - (Required) Details about a custom plugin. See below. + +### custom_plugin Configuration Block + +* `arn` - (Required) The Amazon Resource Name (ARN) of the custom plugin. +* `revision` - (Required) The revision of the custom plugin. + +### worker_configuration Configuration Block + +* `arn` - (Required) The Amazon Resource Name (ARN) of the worker configuration. +* `revision` - (Required) The revision of the worker configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the connector. +* `version` - The current version of the connector. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `20M`) +* `update` - (Default `20M`) +* `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK Connect Connector using the connector's `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MSK Connect Connector using the connector's `arn`. For example: + +```console +% terraform import aws_mskconnect_connector.example 'arn:aws:kafkaconnect:eu-central-1:123456789012:connector/example/264edee4-17a3-412e-bd76-6681cfc93805-3' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/mskconnect_custom_plugin.html.markdown b/website/docs/cdktf/typescript/r/mskconnect_custom_plugin.html.markdown new file mode 100644 index 00000000000..6d3dd8c7c67 --- /dev/null +++ b/website/docs/cdktf/typescript/r/mskconnect_custom_plugin.html.markdown @@ -0,0 +1,123 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_custom_plugin" +description: |- + Provides an Amazon MSK Connect custom plugin resource. +--- + + + +# Resource: aws_mskconnect_custom_plugin + +Provides an Amazon MSK Connect Custom Plugin Resource. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MskconnectCustomPlugin } from "./.gen/providers/aws/mskconnect-custom-plugin"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsS3ObjectExample = new S3Object(this, "example_1", { + bucket: example.id, + key: "debezium.zip", + source: "debezium.zip", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ObjectExample.overrideLogicalId("example"); + const awsMskconnectCustomPluginExample = new MskconnectCustomPlugin( + this, + "example_2", + { + contentType: "ZIP", + location: { + s3: { + bucketArn: example.arn, + fileKey: Token.asString(awsS3ObjectExample.key), + }, + }, + name: "debezium-example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsMskconnectCustomPluginExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the custom plugin.. +* `contentType` - (Required) The type of the plugin file. Allowed values are `zip` and `jar`. +* `location` - (Required) Information about the location of a custom plugin. See below. + +The following arguments are optional: + +* `description` - (Optional) A summary description of the custom plugin. + +### location Argument Reference + +* `s3` - (Required) Information of the plugin file stored in Amazon S3. See below. + +#### location s3 Argument Reference + +* `bucketArn` - (Required) The Amazon Resource Name (ARN) of an S3 bucket. +* `fileKey` - (Required) The file key for an object in an S3 bucket. +* `objectVersion` - (Optional) The version of an object in an S3 bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - the Amazon Resource Name (ARN) of the custom plugin. +* `latestRevision` - an ID of the latest successfully created revision of the custom plugin. +* `state` - the state of the custom plugin. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) +* `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK Connect Custom Plugin using the plugin's `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MSK Connect Custom Plugin using the plugin's `arn`. For example: + +```console +% terraform import aws_mskconnect_custom_plugin.example 'arn:aws:kafkaconnect:eu-central-1:123456789012:custom-plugin/debezium-example/abcdefgh-1234-5678-9abc-defghijklmno-4' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/mskconnect_worker_configuration.html.markdown b/website/docs/cdktf/typescript/r/mskconnect_worker_configuration.html.markdown new file mode 100644 index 00000000000..b6c8ada43e3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/mskconnect_worker_configuration.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Managed Streaming for Kafka Connect" +layout: "aws" +page_title: "AWS: aws_mskconnect_worker_configuration" +description: |- + Provides an Amazon MSK Connect worker configuration resource. +--- + + + +# Resource: aws_mskconnect_worker_configuration + +Provides an Amazon MSK Connect Worker Configuration Resource. + +## Example Usage + +### Basic configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MskconnectWorkerConfiguration } from "./.gen/providers/aws/mskconnect-worker-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MskconnectWorkerConfiguration(this, "example", { + name: "example", + propertiesFileContent: + "key.converter=org.apache.kafka.connect.storage.StringConverter\nvalue.converter=org.apache.kafka.connect.storage.StringConverter\n\n", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the worker configuration. +* `propertiesFileContent` - (Required) Contents of connect-distributed.properties file. The value can be either base64 encoded or in raw format. + +The following arguments are optional: + +* `description` - (Optional) A summary description of the worker configuration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - the Amazon Resource Name (ARN) of the worker configuration. +* `latestRevision` - an ID of the latest successfully created revision of the worker configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MSK Connect Worker Configuration using the plugin's `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MSK Connect Worker Configuration using the plugin's `arn`. For example: + +```console +% terraform import aws_mskconnect_worker_configuration.example 'arn:aws:kafkaconnect:eu-central-1:123456789012:worker-configuration/example/8848493b-7fcc-478c-a646-4a52634e3378-4' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/mwaa_environment.html.markdown b/website/docs/cdktf/typescript/r/mwaa_environment.html.markdown new file mode 100644 index 00000000000..b81ed746302 --- /dev/null +++ b/website/docs/cdktf/typescript/r/mwaa_environment.html.markdown @@ -0,0 +1,260 @@ +--- +subcategory: "MWAA (Managed Workflows for Apache Airflow)" +layout: "aws" +page_title: "AWS: aws_mwaa_environment" +description: |- + Creates a MWAA Environment +--- + + + +# Resource: aws_mwaa_environment + +Creates a MWAA Environment resource. + +## Example Usage + +A MWAA Environment requires an IAM role (`awsIamRole`), two subnets in the private zone (`awsSubnet`) and a versioned S3 bucket (`awsS3Bucket`). + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MwaaEnvironment } from "./.gen/providers/aws/mwaa-environment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MwaaEnvironment(this, "example", { + dagS3Path: "dags/", + executionRoleArn: Token.asString(awsIamRoleExample.arn), + name: "example", + networkConfiguration: { + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + subnetIds: Token.asList(propertyAccess(private, ["*", "id"])), + }, + sourceBucketArn: Token.asString(awsS3BucketExample.arn), + }); + } +} + +``` + +### Example with Airflow configuration options + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MwaaEnvironment } from "./.gen/providers/aws/mwaa-environment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MwaaEnvironment(this, "example", { + airflowConfigurationOptions: { + "core.default_task_retries": Token.asString(16), + "core.parallelism": Token.asString(1), + }, + dagS3Path: "dags/", + executionRoleArn: Token.asString(awsIamRoleExample.arn), + name: "example", + networkConfiguration: { + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + subnetIds: Token.asList(propertyAccess(private, ["*", "id"])), + }, + sourceBucketArn: Token.asString(awsS3BucketExample.arn), + }); + } +} + +``` + +### Example with logging configurations + +Note that Airflow task logs are enabled by default with the `info` log level. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MwaaEnvironment } from "./.gen/providers/aws/mwaa-environment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MwaaEnvironment(this, "example", { + dagS3Path: "dags/", + executionRoleArn: Token.asString(awsIamRoleExample.arn), + loggingConfiguration: { + dagProcessingLogs: { + enabled: true, + logLevel: "DEBUG", + }, + schedulerLogs: { + enabled: true, + logLevel: "INFO", + }, + taskLogs: { + enabled: true, + logLevel: "WARNING", + }, + webserverLogs: { + enabled: true, + logLevel: "ERROR", + }, + workerLogs: { + enabled: true, + logLevel: "CRITICAL", + }, + }, + name: "example", + networkConfiguration: { + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + subnetIds: Token.asList(propertyAccess(private, ["*", "id"])), + }, + sourceBucketArn: Token.asString(awsS3BucketExample.arn), + }); + } +} + +``` + +### Example with tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { MwaaEnvironment } from "./.gen/providers/aws/mwaa-environment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new MwaaEnvironment(this, "example", { + dagS3Path: "dags/", + executionRoleArn: Token.asString(awsIamRoleExample.arn), + name: "example", + networkConfiguration: { + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + subnetIds: Token.asList(propertyAccess(private, ["*", "id"])), + }, + sourceBucketArn: Token.asString(awsS3BucketExample.arn), + tags: { + Environment: "production", + Name: "example", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `airflowConfigurationOptions` - (Optional) The `airflowConfigurationOptions` parameter specifies airflow override options. Check the [Official documentation](https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-env-variables.html#configuring-env-variables-reference) for all possible configuration options. +* `airflowVersion` - (Optional) Airflow version of your environment, will be set by default to the latest version that MWAA supports. +* `dagS3Path` - (Required) The relative path to the DAG folder on your Amazon S3 storage bucket. For example, dags. For more information, see [Importing DAGs on Amazon MWAA](https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-import.html). +* `environmentClass` - (Optional) Environment class for the cluster. Possible options are `mw1Small`, `mw1Medium`, `mw1Large`. Will be set by default to `mw1Small`. Please check the [AWS Pricing](https://aws.amazon.com/de/managed-workflows-for-apache-airflow/pricing/) for more information about the environment classes. +* `executionRoleArn` - (Required) The Amazon Resource Name (ARN) of the task execution role that the Amazon MWAA and its environment can assume. Check the [official AWS documentation](https://docs.aws.amazon.com/mwaa/latest/userguide/mwaa-create-role.html) for the detailed role specification. +* `kmsKey` - (Optional) The Amazon Resource Name (ARN) of your KMS key that you want to use for encryption. Will be set to the ARN of the managed KMS key `aws/airflow` by default. Please check the [Official Documentation](https://docs.aws.amazon.com/mwaa/latest/userguide/custom-keys-certs.html) for more information. +* `loggingConfiguration` - (Optional) The Apache Airflow logs you want to send to Amazon CloudWatch Logs. +* `maxWorkers` - (Optional) The maximum number of workers that can be automatically scaled up. Value need to be between `1` and `25`. Will be `10` by default. +* `minWorkers` - (Optional) The minimum number of workers that you want to run in your environment. Will be `1` by default. +* `name` - (Required) The name of the Apache Airflow Environment +* `networkConfiguration` - (Required) Specifies the network configuration for your Apache Airflow Environment. This includes two private subnets as well as security groups for the Airflow environment. Each subnet requires internet connection, otherwise the deployment will fail. See [Network configuration](#network-configuration) below for details. +* `pluginsS3ObjectVersion` - (Optional) The plugins.zip file version you want to use. +* `pluginsS3Path` - (Optional) The relative path to the plugins.zip file on your Amazon S3 storage bucket. For example, plugins.zip. If a relative path is provided in the request, then plugins_s3_object_version is required. For more information, see [Importing DAGs on Amazon MWAA](https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-import.html). +* `requirementsS3ObjectVersion` - (Optional) The requirements.txt file version you want to use. +* `requirementsS3Path` - (Optional) The relative path to the requirements.txt file on your Amazon S3 storage bucket. For example, requirements.txt. If a relative path is provided in the request, then requirements_s3_object_version is required. For more information, see [Importing DAGs on Amazon MWAA](https://docs.aws.amazon.com/mwaa/latest/userguide/configuring-dag-import.html). +* `schedulers` - (Optional) The number of schedulers that you want to run in your environment. v2.0.2 and above accepts `2` - `5`, default `2`. v1.10.12 accepts `1`. +* `sourceBucketArn` - (Required) The Amazon Resource Name (ARN) of your Amazon S3 storage bucket. For example, arn:aws:s3:::airflow-mybucketname. +* `startupScriptS3ObjectVersion` - (Optional) The version of the startup shell script you want to use. You must specify the version ID that Amazon S3 assigns to the file every time you update the script. +* `startupScriptS3Path` - (Optional) The relative path to the script hosted in your bucket. The script runs as your environment starts before starting the Apache Airflow process. Use this script to install dependencies, modify configuration options, and set environment variables. See [Using a startup script](https://docs.aws.amazon.com/mwaa/latest/userguide/using-startup-script.html). Supported for environment versions 2.x and later. +* `webserverAccessMode` - (Optional) Specifies whether the webserver should be accessible over the internet or via your specified VPC. Possible options: `privateOnly` (default) and `publicOnly`. +* `weeklyMaintenanceWindowStart` - (Optional) Specifies the start date for the weekly maintenance window. +* `tags` - (Optional) A map of resource tags to associate with the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Logging configurations + +The `loggingConfiguration` block supports the following arguments. + +* `dagProcessingLogs` - (Optional) (Optional) Log configuration options for processing DAGs. See [Module logging configuration](#module-logging-configuration) for more information. Disabled by default. +* `schedulerLogs` - (Optional) Log configuration options for the schedulers. See [Module logging configuration](#module-logging-configuration) for more information. Disabled by default. +* `taskLogs` - (Optional) Log configuration options for DAG tasks. See [Module logging configuration](#module-logging-configuration) for more information. Enabled by default with `info` log level. +* `webserverLogs` - (Optional) Log configuration options for the webservers. See [Module logging configuration](#module-logging-configuration) for more information. Disabled by default. +* `workerLogs` - (Optional) Log configuration options for the workers. See [Module logging configuration](#module-logging-configuration) for more information. Disabled by default. + +### Module logging configuration + +A configuration block to use for logging with respect to the various Apache Airflow services: DagProcessingLogs, SchedulerLogs, TaskLogs, WebserverLogs, and WorkerLogs. It supports the following arguments. + +* `enabled` - (Required) Enabling or disabling the collection of logs +* `logLevel` - (Optional) Logging level. Valid values: `critical`, `error`, `warning`, `info`, `debug`. Will be `info` by default. + +### Network configuration + +The `networkConfiguration` block supports the following arguments. More information about the required subnet and security group settings can be found in the [official AWS documentation](https://docs.aws.amazon.com/mwaa/latest/userguide/vpc-create.html). + +* `securityGroupIds` - (Required) Security groups IDs for the environment. At least one of the security group needs to allow MWAA resources to talk to each other, otherwise MWAA cannot be provisioned. +* `subnetIds` - (Required) The private subnet IDs in which the environment should be created. MWAA requires two subnets. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the MWAA Environment +* `createdAt` - The Created At date of the MWAA Environment +* `loggingConfiguration[0].[0]CloudWatchLogGroupArn` - Provides the ARN for the CloudWatch group where the logs will be published +* `serviceRoleArn` - The Service Role ARN of the Amazon MWAA Environment +* `status` - The status of the Amazon MWAA Environment +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `webserverUrl` - The webserver URL of the MWAA Environment + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120M`) +- `update` - (Default `90M`) +- `delete` - (Default `90M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MWAA Environment using `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MWAA Environment using `name`. For example: + +```console +% terraform import aws_mwaa_environment.example MyAirflowEnvironment +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/nat_gateway.html.markdown b/website/docs/cdktf/typescript/r/nat_gateway.html.markdown new file mode 100644 index 00000000000..6d6bbc6e6d5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/nat_gateway.html.markdown @@ -0,0 +1,169 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_nat_gateway" +description: |- + Provides a resource to create a VPC NAT Gateway. +--- + + + +# Resource: aws_nat_gateway + +Provides a resource to create a VPC NAT Gateway. + +## Example Usage + +### Public NAT + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NatGateway } from "./.gen/providers/aws/nat-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NatGateway(this, "example", { + allocationId: Token.asString(awsEipExample.id), + dependsOn: [awsInternetGatewayExample], + subnetId: Token.asString(awsSubnetExample.id), + tags: { + Name: "gw NAT", + }, + }); + } +} + +``` + +### Public NAT with Secondary Private IP Addresses + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NatGateway } from "./.gen/providers/aws/nat-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NatGateway(this, "example", { + allocationId: Token.asString(awsEipExample.id), + secondary_allocation_ids: [secondary.id], + secondary_private_ip_addresses: ["10.0.1.5"], + subnetId: Token.asString(awsSubnetExample.id), + }); + } +} + +``` + +### Private NAT + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NatGateway } from "./.gen/providers/aws/nat-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NatGateway(this, "example", { + connectivityType: "private", + subnetId: Token.asString(awsSubnetExample.id), + }); + } +} + +``` + +### Private NAT with Secondary Private IP Addresses + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NatGateway } from "./.gen/providers/aws/nat-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NatGateway(this, "example", { + connectivityType: "private", + secondary_private_ip_address_count: 7, + subnetId: Token.asString(awsSubnetExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `allocationId` - (Optional) The Allocation ID of the Elastic IP address for the NAT Gateway. Required for `connectivityType` of `public`. +* `connectivityType` - (Optional) Connectivity type for the NAT Gateway. Valid values are `private` and `public`. Defaults to `public`. +* `privateIp` - (Optional) The private IPv4 address to assign to the NAT Gateway. If you don't provide an address, a private IPv4 address will be automatically assigned. +* `subnetId` - (Required) The Subnet ID of the subnet in which to place the NAT Gateway. +* `secondaryAllocationIds` - (Optional) A list of secondary allocation EIP IDs for this NAT Gateway. +* `secondaryPrivateIpAddressCount` - (Optional) [Private NAT Gateway only] The number of secondary private IPv4 addresses you want to assign to the NAT Gateway. +* `secondaryPrivateIpAddresses` - (Optional) A list of secondary private IPv4 addresses to assign to the NAT Gateway. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `associationId` - The association ID of the Elastic IP address that's associated with the NAT Gateway. Only available when `connectivityType` is `public`. +* `id` - The ID of the NAT Gateway. +* `networkInterfaceId` - The ID of the network interface associated with the NAT Gateway. +* `publicIp` - The Elastic IP address associated with the NAT Gateway. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `10M`) +- `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import NAT Gateways using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import NAT Gateways using the `id`. For example: + +```console +% terraform import aws_nat_gateway.private_gw nat-05dba92075d71c408 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/neptune_cluster.html.markdown b/website/docs/cdktf/typescript/r/neptune_cluster.html.markdown new file mode 100644 index 00000000000..eb6f2d0e756 --- /dev/null +++ b/website/docs/cdktf/typescript/r/neptune_cluster.html.markdown @@ -0,0 +1,179 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_cluster" +description: |- + Provides an Neptune Cluster Resource +--- + + + +# Resource: aws_neptune_cluster + +Provides an Neptune Cluster Resource. A Cluster Resource defines attributes that are +applied to the entire cluster of Neptune Cluster Instances. + +Changes to a Neptune Cluster can occur when you manually change a +parameter, such as `backupRetentionPeriod`, and are reflected in the next maintenance +window. Because of this, Terraform may report a difference in its planning +phase because a modification has not yet taken place. You can use the +`applyImmediately` flag to instruct the service to apply the change immediately +(see documentation below). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneCluster } from "./.gen/providers/aws/neptune-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NeptuneCluster(this, "default", { + applyImmediately: true, + backupRetentionPeriod: 5, + clusterIdentifier: "neptune-cluster-demo", + engine: "neptune", + iamDatabaseAuthenticationEnabled: true, + preferredBackupWindow: "07:00-09:00", + skipFinalSnapshot: true, + }); + } +} + +``` + +~> **Note:** AWS Neptune does not support user name/password–based access control. +See the AWS [Docs](https://docs.aws.amazon.com/neptune/latest/userguide/limits.html) for more information. + +## Argument Reference + +This resource supports the following arguments: + +* `allowMajorVersionUpgrade` - (Optional) Specifies whether upgrades between different major versions are allowed. You must set it to `true` when providing an `engineVersion` parameter that uses a different major version than the DB cluster's current version. Default is `false`. +* `applyImmediately` - (Optional) Specifies whether any cluster modifications are applied immediately, or during the next maintenance window. Default is `false`. +* `availabilityZones` - (Optional) A list of EC2 Availability Zones that instances in the Neptune cluster can be created in. +* `backupRetentionPeriod` - (Optional) The days to retain backups for. Default `1` +* `clusterIdentifier` - (Optional, Forces new resources) The cluster identifier. If omitted, Terraform will assign a random, unique identifier. +* `clusterIdentifierPrefix` - (Optional, Forces new resource) Creates a unique cluster identifier beginning with the specified prefix. Conflicts with `clusterIdentifier`. +* `copyTagsToSnapshot` - (Optional) If set to true, tags are copied to any snapshot of the DB cluster that is created. +* `enableCloudwatchLogsExports` - (Optional) A list of the log types this DB cluster is configured to export to Cloudwatch Logs. Currently only supports `audit`. +* `engine` - (Optional) The name of the database engine to be used for this Neptune cluster. Defaults to `neptune`. +* `engineVersion` - (Optional) The database engine version. +* `finalSnapshotIdentifier` - (Optional) The name of your final Neptune snapshot when this Neptune cluster is deleted. If omitted, no final snapshot will be made. +* `globalClusterIdentifier` - (Optional) The global cluster identifier specified on [`awsNeptuneGlobalCluster`](/docs/providers/aws/r/neptune_global_cluster.html). +* `iamRoles` - (Optional) A List of ARNs for the IAM roles to associate to the Neptune Cluster. +* `iamDatabaseAuthenticationEnabled` - (Optional) Specifies whether or not mappings of AWS Identity and Access Management (IAM) accounts to database accounts is enabled. +* `kmsKeyArn` - (Optional) The ARN for the KMS encryption key. When specifying `kmsKeyArn`, `storageEncrypted` needs to be set to true. +* `neptuneSubnetGroupName` - (Optional) A Neptune subnet group to associate with this Neptune instance. +* `neptuneClusterParameterGroupName` - (Optional) A cluster parameter group to associate with the cluster. +* `neptuneInstanceParameterGroupName` - (Optional) The name of the DB parameter group to apply to all instances of the DB cluster. +* `preferredBackupWindow` - (Optional) The daily time range during which automated backups are created if automated backups are enabled using the BackupRetentionPeriod parameter. Time in UTC. Default: A 30-minute window selected at random from an 8-hour block of time per regionE.g., 04:00-09:00 +* `preferredMaintenanceWindow` - (Optional) The weekly time range during which system maintenance can occur, in (UTC) e.g., wed:04:00-wed:04:30 +* `port` - (Optional) The port on which the Neptune accepts connections. Default is `8182`. +* `replicationSourceIdentifier` - (Optional) ARN of a source Neptune cluster or Neptune instance if this Neptune cluster is to be created as a Read Replica. +* `skipFinalSnapshot` - (Optional) Determines whether a final Neptune snapshot is created before the Neptune cluster is deleted. If true is specified, no Neptune snapshot is created. If false is specified, a Neptune snapshot is created before the Neptune cluster is deleted, using the value from `finalSnapshotIdentifier`. Default is `false`. +* `snapshotIdentifier` - (Optional) Specifies whether or not to create this cluster from a snapshot. You can use either the name or ARN when specifying a Neptune cluster snapshot, or the ARN when specifying a Neptune snapshot. Automated snapshots **should not** be used for this attribute, unless from a different cluster. Automated snapshots are deleted as part of cluster destruction when the resource is replaced. +* `storageEncrypted` - (Optional) Specifies whether the Neptune cluster is encrypted. The default is `false` if not specified. +* `tags` - (Optional) A map of tags to assign to the Neptune cluster. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcSecurityGroupIds` - (Optional) List of VPC security groups to associate with the Cluster +* `deletionProtection` - (Optional) A value that indicates whether the DB cluster has deletion protection enabled.The database can't be deleted when deletion protection is enabled. By default, deletion protection is disabled. +* `serverlessV2ScalingConfiguration` - (Optional) If set, create the Neptune cluster as a serverless one. See [Serverless](#serverless) for example block attributes. + +### Serverless + +**Neptune serverless has some limitations. Please see the [limitations on the AWS documentation](https://docs.aws.amazon.com/neptune/latest/userguide/neptune-serverless.html#neptune-serverless-limitations) before jumping into Neptune Serverless.** + +Neptune serverless requires that the `engineVersion` attribute must be `1201` or above. Also, you need to provide a cluster parameter group compatible with the family `neptune12`. In the example below, the default cluster parameter group is used. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneCluster } from "./.gen/providers/aws/neptune-cluster"; +import { NeptuneClusterInstance } from "./.gen/providers/aws/neptune-cluster-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NeptuneCluster(this, "example", { + applyImmediately: true, + clusterIdentifier: "neptune-cluster-development", + engine: "neptune", + engineVersion: "1.2.0.1", + neptuneClusterParameterGroupName: "default.neptune1.2", + serverlessV2ScalingConfiguration: {}, + skipFinalSnapshot: true, + }); + const awsNeptuneClusterInstanceExample = new NeptuneClusterInstance( + this, + "example_1", + { + clusterIdentifier: example.clusterIdentifier, + instanceClass: "db.serverless", + neptuneParameterGroupName: "default.neptune1.2", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNeptuneClusterInstanceExample.overrideLogicalId("example"); + } +} + +``` + +* `minCapacity`: (default: **2.5**) The minimum Neptune Capacity Units (NCUs) for this cluster. Must be greater or equal than **1**. See [AWS Documentation](https://docs.aws.amazon.com/neptune/latest/userguide/neptune-serverless-capacity-scaling.html) for more details. +* `maxCapacity`: (default: **128**) The maximum Neptune Capacity Units (NCUs) for this cluster. Must be lower or equal than **128**. See [AWS Documentation](https://docs.aws.amazon.com/neptune/latest/userguide/neptune-serverless-capacity-scaling.html) for more details. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Neptune Cluster Amazon Resource Name (ARN) +* `clusterResourceId` - The Neptune Cluster Resource ID +* `clusterMembers` – List of Neptune Instances that are a part of this cluster +* `endpoint` - The DNS address of the Neptune instance +* `hostedZoneId` - The Route53 Hosted Zone ID of the endpoint +* `id` - The Neptune Cluster Identifier +* `readerEndpoint` - A read-only endpoint for the Neptune cluster, automatically load-balanced across replicas +* `status` - The Neptune instance status +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120M`) +- `update` - (Default `120M`) +- `delete` - (Default `120M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNeptuneCluster` using the cluster identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNeptuneCluster` using the cluster identifier. For example: + +```console +% terraform import aws_neptune_cluster.example my-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/neptune_cluster_endpoint.html.markdown b/website/docs/cdktf/typescript/r/neptune_cluster_endpoint.html.markdown new file mode 100644 index 00000000000..cbf21d5d48c --- /dev/null +++ b/website/docs/cdktf/typescript/r/neptune_cluster_endpoint.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_cluster_endpoint" +description: |- + Provides an Neptune Cluster Endpoint Resource +--- + + + +# Resource: aws_neptune_cluster_endpoint + +Provides an Neptune Cluster Endpoint Resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneClusterEndpoint } from "./.gen/providers/aws/neptune-cluster-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NeptuneClusterEndpoint(this, "example", { + clusterEndpointIdentifier: "example", + clusterIdentifier: test.clusterIdentifier, + endpointType: "READER", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clusterIdentifier` - (Required, Forces new resources) The DB cluster identifier of the DB cluster associated with the endpoint. +* `clusterEndpointIdentifier` - (Required, Forces new resources) The identifier of the endpoint. +* `endpointType` - (Required) The type of the endpoint. One of: `reader`, `writer`, `any`. +* `excludedMembers` - (Optional) List of DB instance identifiers that aren't part of the custom endpoint group. All other eligible instances are reachable through the custom endpoint. Only relevant if the list of static members is empty. +* `staticMembers` - (Optional) List of DB instance identifiers that are part of the custom endpoint group. +* `tags` - (Optional) A map of tags to assign to the Neptune cluster. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Neptune Cluster Endpoint Amazon Resource Name (ARN). +* `endpoint` - The DNS address of the endpoint. +* `id` - The Neptune Cluster Endpoint Identifier. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNeptuneClusterEndpoint` using the `clusterIdentifier:endpointIdentfier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNeptuneClusterEndpoint` using the `clusterIdentifier:endpointIdentfier`. For example: + +```console +% terraform import aws_neptune_cluster_endpoint.example my-cluster:my-endpoint +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/neptune_cluster_instance.html.markdown b/website/docs/cdktf/typescript/r/neptune_cluster_instance.html.markdown new file mode 100644 index 00000000000..d77f37f6ec8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/neptune_cluster_instance.html.markdown @@ -0,0 +1,131 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_cluster_instance" +description: |- + Provides an Neptune Cluster Resource Instance +--- + + + +# Resource: aws_neptune_cluster_instance + +A Cluster Instance Resource defines attributes that are specific to a single instance in a Neptune Cluster. + +You can simply add neptune instances and Neptune manages the replication. You can use the [count][1] +meta-parameter to make multiple instances and join them all to the same Neptune Cluster, or you may specify different Cluster Instance resources with various `instanceClass` sizes. + +## Example Usage + +The following example will create a neptune cluster with two neptune instances(one writer and one reader). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformCount, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneCluster } from "./.gen/providers/aws/neptune-cluster"; +import { NeptuneClusterInstance } from "./.gen/providers/aws/neptune-cluster-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new NeptuneCluster(this, "default", { + applyImmediately: true, + backupRetentionPeriod: 5, + clusterIdentifier: "neptune-cluster-demo", + engine: "neptune", + iamDatabaseAuthenticationEnabled: true, + preferredBackupWindow: "07:00-09:00", + skipFinalSnapshot: true, + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleCount = TerraformCount.of(Token.asNumber("2")); + new NeptuneClusterInstance(this, "example", { + applyImmediately: true, + clusterIdentifier: defaultVar.id, + engine: "neptune", + instanceClass: "db.r4.large", + count: exampleCount, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applyImmediately` - (Optional) Specifies whether any instance modifications + are applied immediately, or during the next maintenance window. Default is`false`. +* `autoMinorVersionUpgrade` - (Optional) Indicates that minor engine upgrades will be applied automatically to the instance during the maintenance window. Default is `true`. +* `availabilityZone` - (Optional) The EC2 Availability Zone that the neptune instance is created in. +* `clusterIdentifier` - (Required) The identifier of the [`awsNeptuneCluster`](/docs/providers/aws/r/neptune_cluster.html) in which to launch this instance. +* `engine` - (Optional) The name of the database engine to be used for the neptune instance. Defaults to `neptune`. Valid Values: `neptune`. +* `engineVersion` - (Optional) The neptune engine version. +* `identifier` - (Optional, Forces new resource) The identifier for the neptune instance, if omitted, Terraform will assign a random, unique identifier. +* `identifierPrefix` - (Optional, Forces new resource) Creates a unique identifier beginning with the specified prefix. Conflicts with `identifier`. +* `instanceClass` - (Required) The instance class to use. +* `neptuneSubnetGroupName` - (Required if `publicly_accessible = false`, Optional otherwise) A subnet group to associate with this neptune instance. **NOTE:** This must match the `neptuneSubnetGroupName` of the attached [`awsNeptuneCluster`](/docs/providers/aws/r/neptune_cluster.html). +* `neptuneParameterGroupName` - (Optional) The name of the neptune parameter group to associate with this instance. +* `port` - (Optional) The port on which the DB accepts connections. Defaults to `8182`. +* `preferredBackupWindow` - (Optional) The daily time range during which automated backups are created if automated backups are enabled. Eg: "04:00-09:00" +* `preferredMaintenanceWindow` - (Optional) The window to perform maintenance in. + Syntax: "ddd:hh24:mi-ddd:hh24:mi". Eg: "Mon:00:00-Mon:03:00". +* `promotionTier` - (Optional) Default 0. Failover Priority setting on instance level. The reader who has lower tier has higher priority to get promoter to writer. +* `publiclyAccessible` - (Optional) Bool to control if instance is publicly accessible. Default is `false`. +* `tags` - (Optional) A map of tags to assign to the instance. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `address` - The hostname of the instance. See also `endpoint` and `port`. +* `arn` - Amazon Resource Name (ARN) of neptune instance +* `dbiResourceId` - The region-unique, immutable identifier for the neptune instance. +* `endpoint` - The connection endpoint in `address:port` format. +* `id` - The Instance identifier +* `kmsKeyArn` - The ARN for the KMS encryption key if one is set to the neptune cluster. +* `storageEncrypted` - Specifies whether the neptune cluster is encrypted. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `writer` – Boolean indicating if this instance is writable. `false` indicates this instance is a read replica. + +[1]: https://www.terraform.io/docs/configuration/meta-arguments/count.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `90M`) +- `update` - (Default `90M`) +- `delete` - (Default `90M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNeptuneClusterInstance` using the instance identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNeptuneClusterInstance` using the instance identifier. For example: + +```console +% terraform import aws_neptune_cluster_instance.example my-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/neptune_cluster_parameter_group.html.markdown b/website/docs/cdktf/typescript/r/neptune_cluster_parameter_group.html.markdown new file mode 100644 index 00000000000..041d8e12a80 --- /dev/null +++ b/website/docs/cdktf/typescript/r/neptune_cluster_parameter_group.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_cluster_parameter_group" +description: |- + Manages a Neptune Cluster Parameter Group +--- + + + +# Resource: aws_neptune_cluster_parameter_group + +Manages a Neptune Cluster Parameter Group + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneClusterParameterGroup } from "./.gen/providers/aws/neptune-cluster-parameter-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NeptuneClusterParameterGroup(this, "example", { + description: "neptune cluster parameter group", + family: "neptune1", + name: "example", + parameter: [ + { + name: "neptune_enable_audit_log", + value: Token.asString(1), + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the neptune cluster parameter group. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `family` - (Required) The family of the neptune cluster parameter group. +* `description` - (Optional) The description of the neptune cluster parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of neptune parameters to apply. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +* `name` - (Required) The name of the neptune parameter. +* `value` - (Required) The value of the neptune parameter. +* `applyMethod` - (Optional) Valid values are `immediate` and `pendingReboot`. Defaults to `pendingReboot`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The neptune cluster parameter group name. +* `arn` - The ARN of the neptune cluster parameter group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Neptune Cluster Parameter Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Neptune Cluster Parameter Groups using the `name`. For example: + +```console +% terraform import aws_neptune_cluster_parameter_group.cluster_pg production-pg-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/neptune_cluster_snapshot.html.markdown b/website/docs/cdktf/typescript/r/neptune_cluster_snapshot.html.markdown new file mode 100644 index 00000000000..b00d72d55b6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/neptune_cluster_snapshot.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_cluster_snapshot" +description: |- + Manages a Neptune database cluster snapshot. +--- + + + +# Resource: aws_neptune_cluster_snapshot + +Manages a Neptune database cluster snapshot. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneClusterSnapshot } from "./.gen/providers/aws/neptune-cluster-snapshot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NeptuneClusterSnapshot(this, "example", { + dbClusterIdentifier: Token.asString(awsNeptuneClusterExample.id), + dbClusterSnapshotIdentifier: "resourcetestsnapshot1234", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dbClusterIdentifier` - (Required) The DB Cluster Identifier from which to take the snapshot. +* `dbClusterSnapshotIdentifier` - (Required) The Identifier for the snapshot. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `allocatedStorage` - Specifies the allocated storage size in gigabytes (GB). +* `availabilityZones` - List of EC2 Availability Zones that instances in the DB cluster snapshot can be restored in. +* `dbClusterSnapshotArn` - The Amazon Resource Name (ARN) for the DB Cluster Snapshot. +* `engine` - Specifies the name of the database engine. +* `engineVersion` - Version of the database engine for this DB cluster snapshot. +* `kmsKeyId` - If storage_encrypted is true, the AWS KMS key identifier for the encrypted DB cluster snapshot. +* `licenseModel` - License model information for the restored DB cluster. +* `port` - Port that the DB cluster was listening on at the time of the snapshot. +* `sourceDbClusterSnapshotIdentifier` - The DB Cluster Snapshot Arn that the DB Cluster Snapshot was copied from. It only has value in case of cross customer or cross region copy. +* `storageEncrypted` - Specifies whether the DB cluster snapshot is encrypted. +* `status` - The status of this DB Cluster Snapshot. +* `vpcId` - The VPC ID associated with the DB cluster snapshot. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNeptuneClusterSnapshot` using the cluster snapshot identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNeptuneClusterSnapshot` using the cluster snapshot identifier. For example: + +```console +% terraform import aws_neptune_cluster_snapshot.example my-cluster-snapshot +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/neptune_event_subscription.html.markdown b/website/docs/cdktf/typescript/r/neptune_event_subscription.html.markdown new file mode 100644 index 00000000000..f3d0fe98dd7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/neptune_event_subscription.html.markdown @@ -0,0 +1,136 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_event_subscription" +description: |- + Provides a Neptune event subscription resource. +--- + + + +# Resource: aws_neptune_event_subscription + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneCluster } from "./.gen/providers/aws/neptune-cluster"; +import { NeptuneClusterInstance } from "./.gen/providers/aws/neptune-cluster-instance"; +import { NeptuneEventSubscription } from "./.gen/providers/aws/neptune-event-subscription"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new NeptuneCluster(this, "default", { + applyImmediately: Token.asBoolean("true"), + backupRetentionPeriod: 5, + clusterIdentifier: "neptune-cluster-demo", + engine: "neptune", + iamDatabaseAuthenticationEnabled: Token.asBoolean("true"), + preferredBackupWindow: "07:00-09:00", + skipFinalSnapshot: true, + }); + const example = new NeptuneClusterInstance(this, "example", { + applyImmediately: Token.asBoolean("true"), + clusterIdentifier: defaultVar.id, + engine: "neptune", + instanceClass: "db.r4.large", + }); + const awsSnsTopicDefault = new SnsTopic(this, "default_2", { + name: "neptune-events", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicDefault.overrideLogicalId("default"); + const awsNeptuneEventSubscriptionDefault = new NeptuneEventSubscription( + this, + "default_3", + { + eventCategories: [ + "maintenance", + "availability", + "creation", + "backup", + "restoration", + "recovery", + "deletion", + "failover", + "failure", + "notification", + "configuration change", + "read replica", + ], + name: "neptune-event-sub", + snsTopicArn: Token.asString(awsSnsTopicDefault.arn), + sourceIds: [example.id], + sourceType: "db-instance", + tags: { + env: "test", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNeptuneEventSubscriptionDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `enabled` - (Optional) A boolean flag to enable/disable the subscription. Defaults to true. +* `eventCategories` - (Optional) A list of event categories for a `sourceType` that you want to subscribe to. Run `aws neptune describe-event-categories` to find all the event categories. +* `name` - (Optional) The name of the Neptune event subscription. By default generated by Terraform. +* `namePrefix` - (Optional) The name of the Neptune event subscription. Conflicts with `name`. +* `snsTopicArn` - (Required) The ARN of the SNS topic to send events to. +* `sourceIds` - (Optional) A list of identifiers of the event sources for which events will be returned. If not specified, then all sources are included in the response. If specified, a `sourceType` must also be specified. +* `sourceType` - (Optional) The type of source that will be generating the events. Valid options are `dbInstance`, `dbSecurityGroup`, `dbParameterGroup`, `dbSnapshot`, `dbCluster` or `dbClusterSnapshot`. If not set, all sources will be subscribed to. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Neptune event notification subscription. +* `arn` - The Amazon Resource Name of the Neptune event notification subscription. +* `customerAwsId` - The AWS customer account associated with the Neptune event notification subscription. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `40M`) +- `delete` - (Default `40M`) +- `update` - (Default `40M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNeptuneEventSubscription` using the event subscription name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNeptuneEventSubscription` using the event subscription name. For example: + +```console +% terraform import aws_neptune_event_subscription.example my-event-subscription +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/neptune_global_cluster.html.markdown b/website/docs/cdktf/typescript/r/neptune_global_cluster.html.markdown new file mode 100644 index 00000000000..f9b610b0d70 --- /dev/null +++ b/website/docs/cdktf/typescript/r/neptune_global_cluster.html.markdown @@ -0,0 +1,217 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_global_cluster" +description: |- + Provides an Neptune Global Cluster Resource +--- + + + +# Resource: aws_neptune_global_cluster + +Manages a Neptune Global Cluster. A global cluster consists of one primary region and up to five read-only secondary regions. You issue write operations directly to the primary cluster in the primary region and Amazon Neptune automatically replicates the data to the secondary regions using dedicated infrastructure. + +More information about Neptune Global Clusters can be found in the [Neptune User Guide](https://docs.aws.amazon.com/neptune/latest/userguide/neptune-global-database.html). + +## Example Usage + +### New Neptune Global Cluster + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneCluster } from "./.gen/providers/aws/neptune-cluster"; +import { NeptuneClusterInstance } from "./.gen/providers/aws/neptune-cluster-instance"; +import { NeptuneGlobalCluster } from "./.gen/providers/aws/neptune-global-cluster"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new AwsProvider(this, "aws", { + alias: "primary", + region: "us-east-2", + }); + const secondary = new AwsProvider(this, "aws_1", { + alias: "secondary", + region: "us-east-1", + }); + const example = new NeptuneGlobalCluster(this, "example", { + engine: "neptune", + engineVersion: "1.2.0.0", + globalClusterIdentifier: "global-test", + }); + const awsNeptuneClusterPrimary = new NeptuneCluster(this, "primary", { + clusterIdentifier: "test-primary-cluster", + engine: example.engine, + engineVersion: example.engineVersion, + globalClusterIdentifier: example.id, + neptuneSubnetGroupName: "default", + provider: primary, + }); + const awsNeptuneClusterSecondary = new NeptuneCluster(this, "secondary", { + clusterIdentifier: "test-secondary-cluster", + engine: example.engine, + engineVersion: example.engineVersion, + globalClusterIdentifier: example.id, + neptuneSubnetGroupName: "default", + provider: secondary, + }); + const awsNeptuneClusterInstancePrimary = new NeptuneClusterInstance( + this, + "primary_5", + { + clusterIdentifier: Token.asString(awsNeptuneClusterPrimary.id), + engine: example.engine, + engineVersion: example.engineVersion, + identifier: "test-primary-cluster-instance", + instanceClass: "db.r5.large", + neptuneSubnetGroupName: "default", + provider: primary, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNeptuneClusterInstancePrimary.overrideLogicalId("primary"); + const awsNeptuneClusterInstanceSecondary = new NeptuneClusterInstance( + this, + "secondary_6", + { + clusterIdentifier: Token.asString(awsNeptuneClusterSecondary.id), + dependsOn: [awsNeptuneClusterInstancePrimary], + engine: example.engine, + engineVersion: example.engineVersion, + identifier: "test-secondary-cluster-instance", + instanceClass: "db.r5.large", + neptuneSubnetGroupName: "default", + provider: secondary, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNeptuneClusterInstanceSecondary.overrideLogicalId("secondary"); + } +} + +``` + +### New Global Cluster From Existing DB Cluster + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneCluster } from "./.gen/providers/aws/neptune-cluster"; +import { NeptuneGlobalCluster } from "./.gen/providers/aws/neptune-global-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NeptuneCluster(this, "example", { + lifecycle: { + ignoreChanges: [globalClusterIdentifier], + }, + }); + const awsNeptuneGlobalClusterExample = new NeptuneGlobalCluster( + this, + "example_1", + { + globalClusterIdentifier: "example", + sourceDbClusterIdentifier: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNeptuneGlobalClusterExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `globalClusterIdentifier` - (Required, Forces new resources) The global cluster identifier. +* `deletionProtection` - (Optional) If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`. +* `engine` - (Optional, Forces new resources) Name of the database engine to be used for this DB cluster. Terraform will only perform drift detection if a configuration value is provided. Current Valid values: `neptune`. Conflicts with `sourceDbClusterIdentifier`. +* `engineVersion` - (Optional) Engine version of the global database. Upgrading the engine version will result in all cluster members being immediately updated and will. + * **NOTE:** Upgrading major versions is not supported. +* `sourceDbClusterIdentifier` - (Optional) Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. Terraform cannot perform drift detection of this value. +* `storageEncrypted` - (Optional, Forces new resources) Specifies whether the DB cluster is encrypted. The default is `false` unless `sourceDbClusterIdentifier` is specified and encrypted. Terraform will only perform drift detection if a configuration value is provided. + +### Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts) for certain actions: + +* `create` - (Defaults to 5 mins) Used when creating the Global Cluster +* `update` - (Defaults to 120 mins) Used when updating the Global Cluster members (time is per member) +* `delete` - (Defaults to 5 mins) Used when deleting the Global Cluster members (time is per member) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Global Cluster Amazon Resource Name (ARN) +* `globalClusterMembers` - Set of objects containing Global Cluster members. + * `dbClusterArn` - Amazon Resource Name (ARN) of member DB Cluster. + * `isWriter` - Whether the member is the primary DB Cluster. +* `globalClusterResourceId` - AWS Region-unique, immutable identifier for the global database cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed. +* `id` - Neptune Global Cluster. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNeptuneGlobalCluster` using the Global Cluster identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNeptuneGlobalCluster` using the Global Cluster identifier. For example: + +```console +% terraform import aws_neptune_global_cluster.example example +``` + +Certain resource arguments, like `sourceDbClusterIdentifier`, do not have an API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneGlobalCluster } from "./.gen/providers/aws/neptune-global-cluster"; +interface MyConfig { + globalClusterIdentifier: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new NeptuneGlobalCluster(this, "example", { + lifecycle: { + ignoreChanges: [sourceDbClusterIdentifier], + }, + globalClusterIdentifier: config.globalClusterIdentifier, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/neptune_parameter_group.html.markdown b/website/docs/cdktf/typescript/r/neptune_parameter_group.html.markdown new file mode 100644 index 00000000000..879cb61c483 --- /dev/null +++ b/website/docs/cdktf/typescript/r/neptune_parameter_group.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_parameter_group" +description: |- + Manages a Neptune Parameter Group +--- + + + +# Resource: aws_neptune_parameter_group + +Manages a Neptune Parameter Group + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneParameterGroup } from "./.gen/providers/aws/neptune-parameter-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NeptuneParameterGroup(this, "example", { + family: "neptune1", + name: "example", + parameter: [ + { + name: "neptune_query_timeout", + value: "25", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required, Forces new resource) The name of the Neptune parameter group. +* `family` - (Required) The family of the Neptune parameter group. +* `description` - (Optional) The description of the Neptune parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of Neptune parameters to apply. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +* `name` - (Required) The name of the Neptune parameter. +* `value` - (Required) The value of the Neptune parameter. +* `applyMethod` - (Optional) The apply method of the Neptune parameter. Valid values are `immediate` and `pendingReboot`. Defaults to `pendingReboot`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Neptune parameter group name. +* `arn` - The Neptune parameter group Amazon Resource Name (ARN). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Neptune Parameter Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Neptune Parameter Groups using the `name`. For example: + +```console +% terraform import aws_neptune_parameter_group.some_pg some-pg +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/neptune_subnet_group.html.markdown b/website/docs/cdktf/typescript/r/neptune_subnet_group.html.markdown new file mode 100644 index 00000000000..24ae172d852 --- /dev/null +++ b/website/docs/cdktf/typescript/r/neptune_subnet_group.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Neptune" +layout: "aws" +page_title: "AWS: aws_neptune_subnet_group" +description: |- + Provides an Neptune subnet group resource. +--- + + + +# Resource: aws_neptune_subnet_group + +Provides an Neptune subnet group resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NeptuneSubnetGroup } from "./.gen/providers/aws/neptune-subnet-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NeptuneSubnetGroup(this, "default", { + name: "main", + subnetIds: [frontend.id, backend.id], + tags: { + Name: "My neptune subnet group", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the neptune subnet group. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional) The description of the neptune subnet group. Defaults to "Managed by Terraform". +* `subnetIds` - (Required) A list of VPC subnet IDs. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The neptune subnet group name. +* `arn` - The ARN of the neptune subnet group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Neptune Subnet groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Neptune Subnet groups using the `name`. For example: + +```console +% terraform import aws_neptune_subnet_group.default production-subnet-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/network_acl.html.markdown b/website/docs/cdktf/typescript/r/network_acl.html.markdown new file mode 100644 index 00000000000..6a5e10e9a84 --- /dev/null +++ b/website/docs/cdktf/typescript/r/network_acl.html.markdown @@ -0,0 +1,134 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_acl" +description: |- + Provides an network ACL resource. +--- + + + +# Resource: aws_network_acl + +Provides an network ACL resource. You might set up network ACLs with rules similar +to your security groups in order to add an additional layer of security to your VPC. + +~> **NOTE on Network ACLs and Network ACL Rules:** Terraform currently +provides both a standalone [Network ACL Rule](network_acl_rule.html) resource and a Network ACL resource with rules +defined in-line. At this time you cannot use a Network ACL with in-line rules +in conjunction with any Network ACL Rule resources. Doing so will cause +a conflict of rule settings and will overwrite rules. + +~> **NOTE on Network ACLs and Network ACL Associations:** Terraform provides both a standalone [network ACL association](network_acl_association.html) +resource and a network ACL resource with a `subnetIds` attribute. Do not use the same subnet ID in both a network ACL +resource and a network ACL association resource. Doing so will cause a conflict of associations and will overwrite the association. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkAcl } from "./.gen/providers/aws/network-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkAcl(this, "main", { + egress: [ + { + action: "allow", + cidrBlock: "10.3.0.0/18", + fromPort: 443, + protocol: "tcp", + ruleNo: 200, + toPort: 443, + }, + ], + ingress: [ + { + action: "allow", + cidrBlock: "10.3.0.0/18", + fromPort: 80, + protocol: "tcp", + ruleNo: 100, + toPort: 80, + }, + ], + tags: { + Name: "main", + }, + vpcId: Token.asString(awsVpcMain.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpcId` - (Required) The ID of the associated VPC. +* `subnetIds` - (Optional) A list of Subnet IDs to apply the ACL to +* `ingress` - (Optional) Specifies an ingress rule. Parameters defined below. + This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). +* `egress` - (Optional) Specifies an egress rule. Parameters defined below. + This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### egress and ingress + +Both arguments are processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +Both `egress` and `ingress` support the following keys: + +* `fromPort` - (Required) The from port to match. +* `toPort` - (Required) The to port to match. +* `ruleNo` - (Required) The rule number. Used for ordering. +* `action` - (Required) The action to take. +* `protocol` - (Required) The protocol to match. If using the -1 'all' +protocol, you must specify a from and to port of 0. +* `cidrBlock` - (Optional) The CIDR block to match. This must be a +valid network mask. +* `ipv6CidrBlock` - (Optional) The IPv6 CIDR block. +* `icmpType` - (Optional) The ICMP type to be used. Default 0. +* `icmpCode` - (Optional) The ICMP type code to be used. Default 0. + +~> Note: For more information on ICMP types and codes, see here: https://www.iana.org/assignments/icmp-parameters/icmp-parameters.xhtml + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the network ACL +* `arn` - The ARN of the network ACL +* `ownerId` - The ID of the AWS account that owns the network ACL. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network ACLs using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Network ACLs using the `id`. For example: + +```console +% terraform import aws_network_acl.main acl-7aaabd18 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/network_acl_association.html.markdown b/website/docs/cdktf/typescript/r/network_acl_association.html.markdown new file mode 100644 index 00000000000..924602ec2f7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/network_acl_association.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_acl_association" +description: |- + Provides an network ACL association resource. +--- + + + +# Resource: aws_network_acl_association + +Provides an network ACL association resource which allows you to associate your network ACL with any subnet(s). + +~> **NOTE on Network ACLs and Network ACL Associations:** Terraform provides both a standalone network ACL association resource +and a [network ACL](network_acl.html) resource with a `subnetIds` attribute. Do not use the same subnet ID in both a network ACL +resource and a network ACL association resource. Doing so will cause a conflict of associations and will overwrite the association. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkAclAssociation } from "./.gen/providers/aws/network-acl-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkAclAssociation(this, "main", { + networkAclId: Token.asString(awsNetworkAclMain.id), + subnetId: Token.asString(awsSubnetMain.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `networkAclId` - (Required) The ID of the network ACL. +* `subnetId` - (Required) The ID of the associated Subnet. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the network ACL association + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/network_acl_rule.html.markdown b/website/docs/cdktf/typescript/r/network_acl_rule.html.markdown new file mode 100644 index 00000000000..5faecf7d1d5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/network_acl_rule.html.markdown @@ -0,0 +1,134 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_acl_rule" +description: |- + Provides an network ACL Rule resource. +--- + + + +# Resource: aws_network_acl_rule + +Creates an entry (a rule) in a network ACL with the specified rule number. + +~> **NOTE on Network ACLs and Network ACL Rules:** Terraform currently +provides both a standalone Network ACL Rule resource and a [Network ACL](network_acl.html) resource with rules +defined in-line. At this time you cannot use a Network ACL with in-line rules +in conjunction with any Network ACL Rule resources. Doing so will cause +a conflict of rule settings and will overwrite rules. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkAcl } from "./.gen/providers/aws/network-acl"; +import { NetworkAclRule } from "./.gen/providers/aws/network-acl-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bar = new NetworkAcl(this, "bar", { + vpcId: foo.id, + }); + const awsNetworkAclRuleBar = new NetworkAclRule(this, "bar_1", { + cidrBlock: foo.cidrBlock, + egress: false, + fromPort: 22, + networkAclId: bar.id, + protocol: "tcp", + ruleAction: "allow", + ruleNumber: 200, + toPort: 22, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkAclRuleBar.overrideLogicalId("bar"); + } +} + +``` + +~> **Note:** One of either `cidrBlock` or `ipv6CidrBlock` is required. + +## Argument Reference + +This resource supports the following arguments: + +* `networkAclId` - (Required) The ID of the network ACL. +* `ruleNumber` - (Required) The rule number for the entry (for example, 100). ACL entries are processed in ascending order by rule number. +* `egress` - (Optional, bool) Indicates whether this is an egress rule (rule is applied to traffic leaving the subnet). Default `false`. +* `protocol` - (Required) The protocol. A value of -1 means all protocols. +* `ruleAction` - (Required) Indicates whether to allow or deny the traffic that matches the rule. Accepted values: `allow` | `deny` +* `cidrBlock` - (Optional) The network range to allow or deny, in CIDR notation (for example 172.16.0.0/24 ). +* `ipv6CidrBlock` - (Optional) The IPv6 CIDR block to allow or deny. +* `fromPort` - (Optional) The from port to match. +* `toPort` - (Optional) The to port to match. +* `icmpType` - (Optional) ICMP protocol: The ICMP type. Required if specifying ICMP for the protocolE.g., -1 +* `icmpCode` - (Optional) ICMP protocol: The ICMP code. Required if specifying ICMP for the protocolE.g., -1 + +~> **NOTE:** If the value of `protocol` is `1` or `all`, the `fromPort` and `toPort` values will be ignored and the rule will apply to all ports. + +~> **NOTE:** If the value of `icmpType` is `1` (which results in a wildcard ICMP type), the `icmpCode` must also be set to `1` (wildcard ICMP code). + +~> Note: For more information on ICMP types and codes, see here: https://www.iana.org/assignments/icmp-parameters/icmp-parameters.xhtml + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the network ACL Rule + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import individual rules using `networkAclId:ruleNumber:protocol:egress`, where `protocol` can be a decimal (such as "6") or string (such as "tcp") value. For example: + +**NOTE:** If importing a rule previously provisioned by Terraform, the `protocol` must be the input value used at creation time. For more information on protocol numbers and keywords, see here: https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml. + +Using the procotol's string value: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using the procotol's decimal value: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** individual rules using `networkAclId:ruleNumber:protocol:egress`, where `protocol` can be a decimal (such as "6") or string (such as "tcp") value. For example: + +Using the procotol's string value: + +```console +% terraform import aws_network_acl_rule.my_rule acl-7aaabd18:100:tcp:false +``` + +Using the procotol's decimal value: + +```console +% terraform import aws_network_acl_rule.my_rule acl-7aaabd18:100:6:false +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/network_interface.markdown b/website/docs/cdktf/typescript/r/network_interface.markdown new file mode 100644 index 00000000000..635f6714491 --- /dev/null +++ b/website/docs/cdktf/typescript/r/network_interface.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_interface" +description: |- + Provides an Elastic network interface (ENI) resource. +--- + + + +# Resource: aws_network_interface + +Provides an Elastic network interface (ENI) resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkInterface } from "./.gen/providers/aws/network-interface"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkInterface(this, "test", { + attachment: [ + { + deviceIndex: 1, + instance: Token.asString(awsInstanceTest.id), + }, + ], + privateIps: ["10.0.0.50"], + securityGroups: [web.id], + subnetId: publicA.id, + }); + } +} + +``` + +### Example of Managing Multiple IPs on a Network Interface + +By default, private IPs are managed through the `privateIps` and `privateIpsCount` arguments which manage IPs as a set of IPs that are configured without regard to order. For a new network interface, the same primary IP address is consistently selected from a given set of addresses, regardless of the order provided. However, modifications of the set of addresses of an existing interface will not alter the current primary IP address unless it has been removed from the set. + +In order to manage the private IPs as a sequentially ordered list, configure `privateIpListEnabled` to `true` and use `privateIpList` to manage the IPs. This will disable the `privateIps` and `privateIpsCount` settings, which must be removed from the config file but are still exported. Note that changing the first address of `privateIpList`, which is the primary, always requires a new interface. + +If you are managing a specific set or list of IPs, instead of just using `privateIpsCount`, this is a potential workflow for also leveraging `privateIpsCount` to have AWS automatically assign additional IP addresses: + +1. Comment out `privateIps`, `privateIpList`, `privateIpListEnabled` in your configuration +2. Set the desired `privateIpsCount` (count of the number of secondaries, the primary is not included) +3. Apply to assign the extra IPs +4. Remove `privateIpsCount` and restore your settings from the first step +5. Add the new IPs to your current settings +6. Apply again to update the stored state + +This process can also be used to remove IP addresses in addition to the option of manually removing them. Adding IP addresses in a manually is more difficult because it requires knowledge of which addresses are available. + +## Argument Reference + +The following arguments are required: + +* `subnetId` - (Required) Subnet ID to create the ENI in. + +The following arguments are optional: + +* `attachment` - (Optional) Configuration block to define the attachment of the ENI. See [Attachment](#attachment) below for more details! +* `description` - (Optional) Description for the network interface. +* `interfaceType` - (Optional) Type of network interface to create. Set to `efa` for Elastic Fabric Adapter. Changing `interfaceType` will cause the resource to be destroyed and re-created. +* `ipv4PrefixCount` - (Optional) Number of IPv4 prefixes that AWS automatically assigns to the network interface. +* `ipv4Prefixes` - (Optional) One or more IPv4 prefixes assigned to the network interface. +* `ipv6AddressCount` - (Optional) Number of IPv6 addresses to assign to a network interface. You can't use this option if specifying specific `ipv6Addresses`. If your subnet has the AssignIpv6AddressOnCreation attribute set to `true`, you can specify `0` to override this setting. +* `ipv6AddressListEnabled` - (Optional) Whether `ipv6AddressList` is allowed and controls the IPs to assign to the ENI and `ipv6Addresses` and `ipv6AddressCount` become read-only. Default false. +* `ipv6AddressList` - (Optional) List of private IPs to assign to the ENI in sequential order. +* `ipv6Addresses` - (Optional) One or more specific IPv6 addresses from the IPv6 CIDR block range of your subnet. Addresses are assigned without regard to order. You can't use this option if you're specifying `ipv6AddressCount`. +* `ipv6PrefixCount` - (Optional) Number of IPv6 prefixes that AWS automatically assigns to the network interface. +* `ipv6Prefixes` - (Optional) One or more IPv6 prefixes assigned to the network interface. +* `privateIpList` - (Optional) List of private IPs to assign to the ENI in sequential order. Requires setting `privateIpListEnabled` to `true`. +* `privateIpListEnabled` - (Optional) Whether `privateIpList` is allowed and controls the IPs to assign to the ENI and `privateIps` and `privateIpsCount` become read-only. Default false. +* `privateIps` - (Optional) List of private IPs to assign to the ENI without regard to order. +* `privateIpsCount` - (Optional) Number of secondary private IPs to assign to the ENI. The total number of private IPs will be 1 + `privateIpsCount`, as a primary private IP will be assiged to an ENI by default. +* `securityGroups` - (Optional) List of security group IDs to assign to the ENI. +* `sourceDestCheck` - (Optional) Whether to enable source destination checking for the ENI. Default true. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Attachment + +The `attachment` block supports the following: + +* `instance` - (Required) ID of the instance to attach to. +* `deviceIndex` - (Required) Integer to define the devices index. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the network interface. +* `id` - ID of the network interface. +* `macAddress` - MAC address of the network interface. +* `ownerId` - AWS account ID of the owner of the network interface. +* `privateDnsName` - Private DNS name of the network interface (IPv4). +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Interfaces using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Network Interfaces using the `id`. For example: + +```console +% terraform import aws_network_interface.test eni-e5aa89a3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/network_interface_attachment.html.markdown b/website/docs/cdktf/typescript/r/network_interface_attachment.html.markdown new file mode 100644 index 00000000000..65b3f766942 --- /dev/null +++ b/website/docs/cdktf/typescript/r/network_interface_attachment.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_interface_attachment" +description: |- + Attach an Elastic network interface (ENI) resource with EC2 instance. +--- + + + +# Resource: aws_network_interface_attachment + +Attach an Elastic network interface (ENI) resource with EC2 instance. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkInterfaceAttachmentA } from "./.gen/providers/aws/network-interface-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkInterfaceAttachmentA(this, "test", { + deviceIndex: 0, + instanceId: Token.asString(awsInstanceTest.id), + networkInterfaceId: Token.asString(awsNetworkInterfaceTest.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instanceId` - (Required) Instance ID to attach. +* `networkInterfaceId` - (Required) ENI ID to attach. +* `deviceIndex` - (Required) Network interface index (int). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `instanceId` - Instance ID. +* `networkInterfaceId` - Network interface ID. +* `attachmentId` - The ENI Attachment ID. +* `status` - The status of the Network Interface Attachment. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Elastic network interface (ENI) Attachments using its Attachment ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Elastic network interface (ENI) Attachments using its Attachment ID. For example: + +```console +% terraform import aws_network_interface_attachment.secondary_nic eni-attach-0a33842b4ec347c4c +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/network_interface_sg_attachment.html.markdown b/website/docs/cdktf/typescript/r/network_interface_sg_attachment.html.markdown new file mode 100644 index 00000000000..f74c170c7ca --- /dev/null +++ b/website/docs/cdktf/typescript/r/network_interface_sg_attachment.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_network_interface_sg_attachment" +description: |- + Associates a security group with a network interface. +--- + + + +# Resource: aws_network_interface_sg_attachment + +This resource attaches a security group to an Elastic Network Interface (ENI). +It can be used to attach a security group to any existing ENI, be it a +secondary ENI or one attached as the primary interface on an instance. + +~> **NOTE on instances, interfaces, and security groups:** Terraform currently +provides the capability to assign security groups via the [`awsInstance`][1] +and the [`awsNetworkInterface`][2] resources. Using this resource in +conjunction with security groups provided in-line in those resources will cause +conflicts, and will lead to spurious diffs and undefined behavior - please use +one or the other. + +[1]: /docs/providers/aws/d/instance.html +[2]: /docs/providers/aws/r/network_interface.html + +## Example Usage + +The following provides a very basic example of setting up an instance (provided +by `instance`) in the default security group, creating a security group +(provided by `sg`) and then attaching the security group to the instance's +primary network interface via the `awsNetworkInterfaceSgAttachment` resource, +named `sgAttachment`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAmi } from "./.gen/providers/aws/data-aws-ami"; +import { Instance } from "./.gen/providers/aws/instance"; +import { NetworkInterfaceSgAttachment } from "./.gen/providers/aws/network-interface-sg-attachment"; +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const sg = new SecurityGroup(this, "sg", { + tags: { + type: "terraform-test-security-group", + }, + }); + const ami = new DataAwsAmi(this, "ami", { + filter: [ + { + name: "name", + values: ["amzn-ami-hvm-*"], + }, + ], + mostRecent: true, + owners: ["amazon"], + }); + const instance = new Instance(this, "instance", { + ami: Token.asString(ami.id), + instanceType: "t2.micro", + tags: { + type: "terraform-test-instance", + }, + }); + new NetworkInterfaceSgAttachment(this, "sg_attachment", { + networkInterfaceId: instance.primaryNetworkInterfaceId, + securityGroupId: sg.id, + }); + } +} + +``` + +In this example, `instance` is provided by the `awsInstance` data source, +fetching an external instance, possibly not managed by Terraform. +`sgAttachment` then attaches to the output instance's `networkInterfaceId`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsInstance } from "./.gen/providers/aws/data-aws-instance"; +import { NetworkInterfaceSgAttachment } from "./.gen/providers/aws/network-interface-sg-attachment"; +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const sg = new SecurityGroup(this, "sg", { + tags: { + type: "terraform-test-security-group", + }, + }); + const instance = new DataAwsInstance(this, "instance", { + instanceId: "i-1234567890abcdef0", + }); + new NetworkInterfaceSgAttachment(this, "sg_attachment", { + networkInterfaceId: Token.asString(instance.networkInterfaceId), + securityGroupId: sg.id, + }); + } +} + +``` + +## Argument Reference + +* `securityGroupId` - (Required) The ID of the security group. +* `networkInterfaceId` - (Required) The ID of the network interface to attach to. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Interface Security Group attachments using the associated network interface ID and security group ID, separated by an underscore (`_`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Network Interface Security Group attachments using the associated network interface ID and security group ID, separated by an underscore (`_`). For example: + +```console +% terraform import aws_network_interface_sg_attachment.sg_attachment eni-1234567890abcdef0_sg-1234567890abcdef0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkfirewall_firewall.html.markdown b/website/docs/cdktf/typescript/r/networkfirewall_firewall.html.markdown new file mode 100644 index 00000000000..76931d680a9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkfirewall_firewall.html.markdown @@ -0,0 +1,129 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_firewall" +description: |- + Provides an AWS Network Firewall Firewall resource. +--- + + + +# Resource: aws_networkfirewall_firewall + +Provides an AWS Network Firewall Firewall Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallFirewall } from "./.gen/providers/aws/networkfirewall-firewall"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallFirewall(this, "example", { + firewallPolicyArn: Token.asString( + awsNetworkfirewallFirewallPolicyExample.arn + ), + name: "example", + subnetMapping: [ + { + subnetId: Token.asString(awsSubnetExample.id), + }, + ], + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + vpcId: Token.asString(awsVpcExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deleteProtection` - (Optional) A boolean flag indicating whether it is possible to delete the firewall. Defaults to `false`. + +* `description` - (Optional) A friendly description of the firewall. + +* `encryptionConfiguration` - (Optional) KMS encryption configuration settings. See [Encryption Configuration](#encryption-configuration) below for details. + +* `firewallPolicyArn` - (Required) The Amazon Resource Name (ARN) of the VPC Firewall policy. + +* `firewallPolicyChangeProtection` - (Option) A boolean flag indicating whether it is possible to change the associated firewall policy. Defaults to `false`. + +* `name` - (Required, Forces new resource) A friendly name of the firewall. + +* `subnetChangeProtection` - (Optional) A boolean flag indicating whether it is possible to change the associated subnet(s). Defaults to `false`. + +* `subnetMapping` - (Required) Set of configuration blocks describing the public subnets. Each subnet must belong to a different Availability Zone in the VPC. AWS Network Firewall creates a firewall endpoint in each subnet. See [Subnet Mapping](#subnet-mapping) below for details. + +* `tags` - (Optional) Map of resource tags to associate with the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +* `vpcId` - (Required, Forces new resource) The unique identifier of the VPC where AWS Network Firewall should create the firewall. + +### Encryption Configuration + +`encryptionConfiguration` settings for customer managed KMS keys. Remove this block to use the default AWS-managed KMS encryption (rather than setting `type` to `awsOwnedKmsKey`). + +* `keyId` - (Optional) The ID of the customer managed key. You can use any of the [key identifiers](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-id) that KMS supports, unless you're using a key that's managed by another account. If you're using a key managed by another account, then specify the key ARN. +* `type` - (Required) The type of AWS KMS key to use for encryption of your Network Firewall resources. Valid values are `customerKms` and `awsOwnedKmsKey`. + +### Subnet Mapping + +The `subnetMapping` block supports the following arguments: + +* `ipAddressType` - (Optional) The subnet's IP address type. Valida values: `"dualstack"`, `"ipv4"`. +* `subnetId` - (Required) The unique identifier for the subnet. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) that identifies the firewall. + +* `arn` - The Amazon Resource Name (ARN) that identifies the firewall. + +* `firewallStatus` - Nested list of information about the current status of the firewall. + * `syncStates` - Set of subnets configured for use by the firewall. + * `attachment` - Nested list describing the attachment status of the firewall's association with a single VPC subnet. + * `endpointId` - The identifier of the firewall endpoint that AWS Network Firewall has instantiated in the subnet. You use this to identify the firewall endpoint in the VPC route tables, when you redirect the VPC traffic through the endpoint. + * `subnetId` - The unique identifier of the subnet that you've specified to be used for a firewall endpoint. + * `availabilityZone` - The Availability Zone where the subnet is configured. + +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +* `updateToken` - A string token used when updating a firewall. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Firewall Firewalls using their `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Network Firewall Firewalls using their `arn`. For example: + +```console +% terraform import aws_networkfirewall_firewall.example arn:aws:network-firewall:us-west-1:123456789012:firewall/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkfirewall_firewall_policy.html.markdown b/website/docs/cdktf/typescript/r/networkfirewall_firewall_policy.html.markdown new file mode 100644 index 00000000000..585596d5158 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkfirewall_firewall_policy.html.markdown @@ -0,0 +1,290 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_firewall_policy" +description: |- + Provides an AWS Network Firewall Policy resource. +--- + + + +# Resource: aws_networkfirewall_firewall_policy + +Provides an AWS Network Firewall Firewall Policy Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallFirewallPolicy } from "./.gen/providers/aws/networkfirewall-firewall-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallFirewallPolicy(this, "example", { + firewallPolicy: { + statelessDefaultActions: ["aws:pass"], + statelessFragmentDefaultActions: ["aws:drop"], + statelessRuleGroupReference: [ + { + priority: 1, + resourceArn: Token.asString(awsNetworkfirewallRuleGroupExample.arn), + }, + ], + }, + name: "example", + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + }); + } +} + +``` + +## Policy with a HOME_NET Override + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallFirewallPolicy } from "./.gen/providers/aws/networkfirewall-firewall-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallFirewallPolicy(this, "example", { + firewallPolicy: { + policy_variables: [ + { + rule_variables: [ + { + ip_set: [ + { + definition: ["10.0.0.0/16", "10.1.0.0/24"], + }, + ], + key: "HOME_NET", + }, + ], + }, + ], + statelessDefaultActions: ["aws:pass"], + statelessFragmentDefaultActions: ["aws:drop"], + statelessRuleGroupReference: [ + { + priority: 1, + resourceArn: Token.asString(awsNetworkfirewallRuleGroupExample.arn), + }, + ], + }, + name: "example", + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + }); + } +} + +``` + +## Policy with a Custom Action for Stateless Inspection + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallFirewallPolicy } from "./.gen/providers/aws/networkfirewall-firewall-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallFirewallPolicy(this, "test", { + firewallPolicy: { + statelessCustomAction: [ + { + actionDefinition: { + publishMetricAction: { + dimension: [ + { + value: "1", + }, + ], + }, + }, + actionName: "ExampleCustomAction", + }, + ], + statelessDefaultActions: ["aws:pass", "ExampleCustomAction"], + statelessFragmentDefaultActions: ["aws:drop"], + }, + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) A friendly description of the firewall policy. + +* `encryptionConfiguration` - (Optional) KMS encryption configuration settings. See [Encryption Configuration](#encryption-configuration) below for details. + +* `firewallPolicy` - (Required) A configuration block describing the rule groups and policy actions to use in the firewall policy. See [Firewall Policy](#firewall-policy) below for details. + +* `name` - (Required, Forces new resource) A friendly name of the firewall policy. + +* `tags` - (Optional) Map of resource tags to associate with the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Encryption Configuration + +`encryptionConfiguration` settings for customer managed KMS keys. Remove this block to use the default AWS-managed KMS encryption (rather than setting `type` to `awsOwnedKmsKey`). + +* `keyId` - (Optional) The ID of the customer managed key. You can use any of the [key identifiers](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-id) that KMS supports, unless you're using a key that's managed by another account. If you're using a key managed by another account, then specify the key ARN. +* `type` - (Required) The type of AWS KMS key to use for encryption of your Network Firewall resources. Valid values are `customerKms` and `awsOwnedKmsKey`. + +### Firewall Policy + +The `firewallPolicy` block supports the following arguments: + +* `policyVariables` - (Optional). Contains variables that you can use to override default Suricata settings in your firewall policy. See [Rule Variables](#rule-variables) for details. + +* `statefulDefaultActions` - (Optional) Set of actions to take on a packet if it does not match any stateful rules in the policy. This can only be specified if the policy has a `statefulEngineOptions` block with a `ruleOrder` value of `strictOrder`. You can specify one of either or neither values of `aws:dropStrict` or `aws:dropEstablished`, as well as any combination of `aws:alertStrict` and `aws:alertEstablished`. + +* `statefulEngineOptions` - (Optional) A configuration block that defines options on how the policy handles stateful rules. See [Stateful Engine Options](#stateful-engine-options) below for details. + +* `statefulRuleGroupReference` - (Optional) Set of configuration blocks containing references to the stateful rule groups that are used in the policy. See [Stateful Rule Group Reference](#stateful-rule-group-reference) below for details. + +* `statelessCustomAction` - (Optional) Set of configuration blocks describing the custom action definitions that are available for use in the firewall policy's `statelessDefaultActions`. See [Stateless Custom Action](#stateless-custom-action) below for details. + +* `statelessDefaultActions` - (Required) Set of actions to take on a packet if it does not match any of the stateless rules in the policy. You must specify one of the standard actions including: `aws:drop`, `aws:pass`, or `aws:forwardToSfe`. +In addition, you can specify custom actions that are compatible with your standard action choice. If you want non-matching packets to be forwarded for stateful inspection, specify `aws:forwardToSfe`. + +* `statelessFragmentDefaultActions` - (Required) Set of actions to take on a fragmented packet if it does not match any of the stateless rules in the policy. You must specify one of the standard actions including: `aws:drop`, `aws:pass`, or `aws:forwardToSfe`. +In addition, you can specify custom actions that are compatible with your standard action choice. If you want non-matching packets to be forwarded for stateful inspection, specify `aws:forwardToSfe`. + +* `statelessRuleGroupReference` - (Optional) Set of configuration blocks containing references to the stateless rule groups that are used in the policy. See [Stateless Rule Group Reference](#stateless-rule-group-reference) below for details. + +### Rule Variables + +The `ruleVariables` block supports the following arguments: + +* `key` - (Required) An alphanumeric string to identify the `ipSet`. Valid values: `homeNet` + +* `ipSet` - (Required) A configuration block that defines a set of IP addresses. See [IP Set](#ip-set) below for details. + +### IP Set + +The `ipSet` block supports the following argument: + +* `definition` - (Required) Set of IPv4 or IPv6 addresses in CIDR notation to use for the Suricata `homeNet` variable. + +### Stateful Engine Options + +The `statefulEngineOptions` block supports the following argument: + +~> **NOTE:** If the `strictOrder` rule order is specified, this firewall policy can only reference stateful rule groups that utilize `strictOrder`. + +* `ruleOrder` - Indicates how to manage the order of stateful rule evaluation for the policy. Default value: `defaultActionOrder`. Valid values: `defaultActionOrder`, `strictOrder`. + +* `streamExceptionPolicy` - Describes how to treat traffic which has broken midstream. Default value: `drop`. Valid values: `drop`, `continue`, `reject`. + +### Stateful Rule Group Reference + +The `statefulRuleGroupReference` block supports the following arguments: + +* `priority` - (Optional) An integer setting that indicates the order in which to apply the stateful rule groups in a single policy. This argument must be specified if the policy has a `statefulEngineOptions` block with a `ruleOrder` value of `strictOrder`. AWS Network Firewall applies each stateful rule group to a packet starting with the group that has the lowest priority setting. + +* `resourceArn` - (Required) The Amazon Resource Name (ARN) of the stateful rule group. + +* `override` - (Optional) Configuration block for override values + +#### Override + +* `action` - (Optional) The action that changes the rule group from DROP to ALERT . This only applies to managed rule groups. + +### Stateless Custom Action + +The `statelessCustomAction` block supports the following arguments: + +* `actionDefinition` - (Required) A configuration block describing the custom action associated with the `actionName`. See [Action Definition](#action-definition) below for details. + +* `actionName` - (Required, Forces new resource) A friendly name of the custom action. + +### Stateless Rule Group Reference + +The `statelessRuleGroupReference` block supports the following arguments: + +* `priority` - (Required) An integer setting that indicates the order in which to run the stateless rule groups in a single policy. AWS Network Firewall applies each stateless rule group to a packet starting with the group that has the lowest priority setting. + +* `resourceArn` - (Required) The Amazon Resource Name (ARN) of the stateless rule group. + +### Action Definition + +The `actionDefinition` block supports the following argument: + +* `publishMetricAction` - (Required) A configuration block describing the stateless inspection criteria that publishes the specified metrics to Amazon CloudWatch for the matching packet. You can pair this custom action with any of the standard stateless rule actions. See [Publish Metric Action](#publish-metric-action) below for details. + +### Publish Metric Action + +The `publishMetricAction` block supports the following argument: + +* `dimension` - (Required) Set of configuration blocks describing dimension settings to use for Amazon CloudWatch custom metrics. See [Dimension](#dimension) below for more details. + +### Dimension + +The `dimension` block supports the following argument: + +* `value` - (Required) The string value to use in the custom metric dimension. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) that identifies the firewall policy. + +* `arn` - The Amazon Resource Name (ARN) that identifies the firewall policy. + +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +* `updateToken` - A string token used when updating a firewall policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Firewall Policies using their `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Network Firewall Policies using their `arn`. For example: + +```console +% terraform import aws_networkfirewall_firewall_policy.example arn:aws:network-firewall:us-west-1:123456789012:firewall-policy/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkfirewall_logging_configuration.html.markdown b/website/docs/cdktf/typescript/r/networkfirewall_logging_configuration.html.markdown new file mode 100644 index 00000000000..92dad612e70 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkfirewall_logging_configuration.html.markdown @@ -0,0 +1,174 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_logging_configuration" +description: |- + Provides an AWS Network Firewall Logging Configuration resource. +--- + + + +# Resource: aws_networkfirewall_logging_configuration + +Provides an AWS Network Firewall Logging Configuration Resource + +## Example Usage + +### Logging to S3 + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallLoggingConfiguration } from "./.gen/providers/aws/networkfirewall-logging-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallLoggingConfiguration(this, "example", { + firewallArn: Token.asString(awsNetworkfirewallFirewallExample.arn), + loggingConfiguration: { + logDestinationConfig: [ + { + logDestination: { + bucketName: Token.asString(awsS3BucketExample.bucket), + prefix: "/example", + }, + logDestinationType: "S3", + logType: "FLOW", + }, + ], + }, + }); + } +} + +``` + +### Logging to CloudWatch + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallLoggingConfiguration } from "./.gen/providers/aws/networkfirewall-logging-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallLoggingConfiguration(this, "example", { + firewallArn: Token.asString(awsNetworkfirewallFirewallExample.arn), + loggingConfiguration: { + logDestinationConfig: [ + { + logDestination: { + logGroup: Token.asString(awsCloudwatchLogGroupExample.name), + }, + logDestinationType: "CloudWatchLogs", + logType: "ALERT", + }, + ], + }, + }); + } +} + +``` + +### Logging to Kinesis Data Firehose + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallLoggingConfiguration } from "./.gen/providers/aws/networkfirewall-logging-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallLoggingConfiguration(this, "example", { + firewallArn: Token.asString(awsNetworkfirewallFirewallExample.arn), + loggingConfiguration: { + logDestinationConfig: [ + { + logDestination: { + deliveryStream: Token.asString( + awsKinesisFirehoseDeliveryStreamExample.name + ), + }, + logDestinationType: "KinesisDataFirehose", + logType: "ALERT", + }, + ], + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `firewallArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Network Firewall firewall. + +* `loggingConfiguration` - (Required) A configuration block describing how AWS Network Firewall performs logging for a firewall. See [Logging Configuration](#logging-configuration) below for details. + +### Logging Configuration + +The `loggingConfiguration` block supports the following arguments: + +* `logDestinationConfig` - (Required) Set of configuration blocks describing the logging details for a firewall. See [Log Destination Config](#log-destination-config) below for details. At most, only two blocks can be specified; one for `flow` logs and one for `alert` logs. + +### Log Destination Config + +The `logDestinationConfig` block supports the following arguments: + +* `logDestination` - (Required) A map describing the logging destination for the chosen `logDestinationType`. + * For an Amazon S3 bucket, specify the key `bucketName` with the name of the bucket and optionally specify the key `prefix` with a path. + * For a CloudWatch log group, specify the key `logGroup` with the name of the CloudWatch log group. + * For a Kinesis Data Firehose delivery stream, specify the key `deliveryStream` with the name of the delivery stream. + +* `logDestinationType` - (Required) The location to send logs to. Valid values: `s3`, `cloudWatchLogs`, `kinesisDataFirehose`. + +* `logType` - (Required) The type of log to send. Valid values: `alert` or `flow`. Alert logs report traffic that matches a `statefulRule` with an action setting that sends a log message. Flow logs are standard network traffic flow logs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the associated firewall. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Firewall Logging Configurations using the `firewallArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Network Firewall Logging Configurations using the `firewallArn`. For example: + +```console +% terraform import aws_networkfirewall_logging_configuration.example arn:aws:network-firewall:us-west-1:123456789012:firewall/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkfirewall_resource_policy.html.markdown b/website/docs/cdktf/typescript/r/networkfirewall_resource_policy.html.markdown new file mode 100644 index 00000000000..de21954a20e --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkfirewall_resource_policy.html.markdown @@ -0,0 +1,136 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_resource_policy" +description: |- + Provides an AWS Network Firewall Resource Policy resource. +--- + + + +# Resource: aws_networkfirewall_resource_policy + +Provides an AWS Network Firewall Resource Policy Resource for a rule group or firewall policy. + +## Example Usage + +### For a Firewall Policy resource + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallResourcePolicy } from "./.gen/providers/aws/networkfirewall-resource-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallResourcePolicy(this, "example", { + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: [ + "network-firewall:ListFirewallPolicies", + "network-firewall:CreateFirewall", + "network-firewall:UpdateFirewall", + "network-firewall:AssociateFirewallPolicy", + ], + Effect: "Allow", + Principal: { + AWS: "arn:aws:iam::123456789012:root", + }, + Resource: awsNetworkfirewallFirewallPolicyExample.arn, + }, + ], + Version: "2012-10-17", + }) + ), + resourceArn: Token.asString(awsNetworkfirewallFirewallPolicyExample.arn), + }); + } +} + +``` + +### For a Rule Group resource + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallResourcePolicy } from "./.gen/providers/aws/networkfirewall-resource-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallResourcePolicy(this, "example", { + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: [ + "network-firewall:ListRuleGroups", + "network-firewall:CreateFirewallPolicy", + "network-firewall:UpdateFirewallPolicy", + ], + Effect: "Allow", + Principal: { + AWS: "arn:aws:iam::123456789012:root", + }, + Resource: awsNetworkfirewallRuleGroupExample.arn, + }, + ], + Version: "2012-10-17", + }) + ), + resourceArn: Token.asString(awsNetworkfirewallRuleGroupExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policy` - (Required) JSON formatted policy document that controls access to the Network Firewall resource. The policy must be provided **without whitespaces**. We recommend using [jsonencode](https://www.terraform.io/docs/configuration/functions/jsonencode.html) for formatting as seen in the examples above. For more details, including available policy statement Actions, see the [Policy](https://docs.aws.amazon.com/network-firewall/latest/APIReference/API_PutResourcePolicy.html#API_PutResourcePolicy_RequestSyntax) parameter in the AWS API documentation. + +* `resourceArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the rule group or firewall policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the rule group or firewall policy associated with the resource policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Firewall Resource Policies using the `resourceArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Network Firewall Resource Policies using the `resourceArn`. For example: + +```console +% terraform import aws_networkfirewall_resource_policy.example aws_networkfirewall_rule_group.example arn:aws:network-firewall:us-west-1:123456789012:stateful-rulegroup/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkfirewall_rule_group.html.markdown b/website/docs/cdktf/typescript/r/networkfirewall_rule_group.html.markdown new file mode 100644 index 00000000000..fbc88d084d1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkfirewall_rule_group.html.markdown @@ -0,0 +1,680 @@ +--- +subcategory: "Network Firewall" +layout: "aws" +page_title: "AWS: aws_networkfirewall_rule_group" +description: |- + Provides an AWS Network Firewall Rule Group resource. +--- + + + +# Resource: aws_networkfirewall_rule_group + +Provides an AWS Network Firewall Rule Group Resource + +## Example Usage + +### Stateful Inspection for denying access to a domain + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallRuleGroup } from "./.gen/providers/aws/networkfirewall-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallRuleGroup(this, "example", { + capacity: 100, + name: "example", + ruleGroup: { + rulesSource: { + rulesSourceList: { + generatedRulesType: "DENYLIST", + targetTypes: ["HTTP_HOST"], + targets: ["test.example.com"], + }, + }, + }, + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + type: "STATEFUL", + }); + } +} + +``` + +### Stateful Inspection for permitting packets from a source IP address + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformIterator, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallRuleGroup } from "./.gen/providers/aws/networkfirewall-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ips = ["1.1.1.1/32", "1.0.0.1/32"]; + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleDynamicIterator0 = TerraformIterator.fromList( + Token.asAny(ips) + ); + new NetworkfirewallRuleGroup(this, "example", { + capacity: 50, + description: "Permits http traffic from source", + name: "example", + ruleGroup: { + rulesSource: { + statefulRule: exampleDynamicIterator0.dynamic({ + action: "PASS", + header: [ + { + destination: "ANY", + destination_port: "ANY", + direction: "ANY", + protocol: "HTTP", + source: exampleDynamicIterator0.value, + source_port: "ANY", + }, + ], + rule_option: [ + { + keyword: "sid", + settings: ["1"], + }, + ], + }), + }, + }, + tags: { + Name: "permit HTTP from source", + }, + type: "STATEFUL", + }); + } +} + +``` + +### Stateful Inspection for blocking packets from going to an intended destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallRuleGroup } from "./.gen/providers/aws/networkfirewall-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallRuleGroup(this, "example", { + capacity: 100, + name: "example", + ruleGroup: { + rulesSource: { + statefulRule: [ + { + action: "DROP", + header: { + destination: "124.1.1.24/32", + destinationPort: Token.asString(53), + direction: "ANY", + protocol: "TCP", + source: "1.2.3.4/32", + sourcePort: Token.asString(53), + }, + ruleOption: [ + { + keyword: "sid", + settings: ["1"], + }, + ], + }, + ], + }, + }, + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + type: "STATEFUL", + }); + } +} + +``` + +### Stateful Inspection from rules specifications defined in Suricata flat format + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallRuleGroup } from "./.gen/providers/aws/networkfirewall-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallRuleGroup(this, "example", { + capacity: 100, + name: "example", + rules: Token.asString(Fn.file("example.rules")), + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + type: "STATEFUL", + }); + } +} + +``` + +### Stateful Inspection from rule group specifications using rule variables and Suricata format rules + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallRuleGroup } from "./.gen/providers/aws/networkfirewall-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallRuleGroup(this, "example", { + capacity: 100, + name: "example", + ruleGroup: { + ruleVariables: { + ipSets: [ + { + ipSet: { + definition: ["10.0.0.0/16", "10.0.1.0/24", "192.168.0.0/16"], + }, + key: "WEBSERVERS_HOSTS", + }, + { + ipSet: { + definition: ["1.2.3.4/32"], + }, + key: "EXTERNAL_HOST", + }, + ], + portSets: [ + { + key: "HTTP_PORTS", + portSet: { + definition: ["443", "80"], + }, + }, + ], + }, + rulesSource: { + rulesString: Token.asString(Fn.file("suricata_rules_file")), + }, + }, + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + type: "STATEFUL", + }); + } +} + +``` + +### Stateless Inspection with a Custom Action + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallRuleGroup } from "./.gen/providers/aws/networkfirewall-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallRuleGroup(this, "example", { + capacity: 100, + description: "Stateless Rate Limiting Rule", + name: "example", + ruleGroup: { + rulesSource: { + statelessRulesAndCustomActions: { + customAction: [ + { + actionDefinition: { + publishMetricAction: { + dimension: [ + { + value: "2", + }, + ], + }, + }, + actionName: "ExampleMetricsAction", + }, + ], + statelessRule: [ + { + priority: 1, + ruleDefinition: { + actions: ["aws:pass", "ExampleMetricsAction"], + matchAttributes: { + destination: [ + { + addressDefinition: "124.1.1.5/32", + }, + ], + destinationPort: [ + { + fromPort: 443, + toPort: 443, + }, + ], + protocols: [6], + source: [ + { + addressDefinition: "1.2.3.4/32", + }, + ], + sourcePort: [ + { + fromPort: 443, + toPort: 443, + }, + ], + tcpFlag: [ + { + flags: ["SYN"], + masks: ["SYN", "ACK"], + }, + ], + }, + }, + }, + ], + }, + }, + }, + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + type: "STATELESS", + }); + } +} + +``` + +### IP Set References to the Rule Group + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkfirewallRuleGroup } from "./.gen/providers/aws/networkfirewall-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkfirewallRuleGroup(this, "example", { + capacity: 100, + name: "example", + ruleGroup: { + referenceSets: { + ipSetReferences: [ + { + ipSetReference: [ + { + referenceArn: thisVar.arn, + }, + ], + key: "example", + }, + ], + }, + rulesSource: { + rulesSourceList: { + generatedRulesType: "DENYLIST", + targetTypes: ["HTTP_HOST"], + targets: ["test.example.com"], + }, + }, + }, + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + type: "STATEFUL", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `capacity` - (Required, Forces new resource) The maximum number of operating resources that this rule group can use. For a stateless rule group, the capacity required is the sum of the capacity requirements of the individual rules. For a stateful rule group, the minimum capacity required is the number of individual rules. + +* `description` - (Optional) A friendly description of the rule group. + +* `encryptionConfiguration` - (Optional) KMS encryption configuration settings. See [Encryption Configuration](#encryption-configuration) below for details. + +* `name` - (Required, Forces new resource) A friendly name of the rule group. + +* `ruleGroup` - (Optional) A configuration block that defines the rule group rules. Required unless `rules` is specified. See [Rule Group](#rule-group) below for details. + +* `rules` - (Optional) The stateful rule group rules specifications in Suricata file format, with one rule per line. Use this to import your existing Suricata compatible rule groups. Required unless `ruleGroup` is specified. + +* `tags` - (Optional) A map of key:value pairs to associate with the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +* `type` - (Required) Whether the rule group is stateless (containing stateless rules) or stateful (containing stateful rules). Valid values include: `stateful` or `stateless`. + +### Encryption Configuration + +`encryptionConfiguration` settings for customer managed KMS keys. Remove this block to use the default AWS-managed KMS encryption (rather than setting `type` to `awsOwnedKmsKey`). + +* `keyId` - (Optional) The ID of the customer managed key. You can use any of the [key identifiers](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#key-id) that KMS supports, unless you're using a key that's managed by another account. If you're using a key managed by another account, then specify the key ARN. +* `type` - (Required) The type of AWS KMS key to use for encryption of your Network Firewall resources. Valid values are `customerKms` and `awsOwnedKmsKey`. + +### Rule Group + +The `ruleGroup` block supports the following argument: + +* `referenceSets` - (Optional) A configuration block that defines the IP Set References for the rule group. See [Reference Sets](#reference-sets) below for details. Please notes that there can only be a maximum of 5 `referenceSets` in a `ruleGroup`. See the [AWS documentation](https://docs.aws.amazon.com/network-firewall/latest/developerguide/rule-groups-ip-set-references.html#rule-groups-ip-set-reference-limits) for details. + +* `ruleVariables` - (Optional) A configuration block that defines additional settings available to use in the rules defined in the rule group. Can only be specified for **stateful** rule groups. See [Rule Variables](#rule-variables) below for details. + +* `rulesSource` - (Required) A configuration block that defines the stateful or stateless rules for the rule group. See [Rules Source](#rules-source) below for details. + +* `statefulRuleOptions` - (Optional) A configuration block that defines stateful rule options for the rule group. See [Stateful Rule Options](#stateful-rule-options) below for details. + +### Reference Sets + +The `referenceSets` block supports the following arguments: + +* `ipSetReference` - (Optional) Set of configuration blocks that define the IP Reference information. See [IP Set Reference](#ip-set-reference) below for details. + +### Rule Variables + +The `ruleVariables` block supports the following arguments: + +* `ipSets` - (Optional) Set of configuration blocks that define IP address information. See [IP Sets](#ip-sets) below for details. + +* `portSets` - (Optional) Set of configuration blocks that define port range information. See [Port Sets](#port-sets) below for details. + +### IP Sets + +The `ipSets` block supports the following arguments: + +* `key` - (Required) A unique alphanumeric string to identify the `ipSet`. + +* `ipSet` - (Required) A configuration block that defines a set of IP addresses. See [IP Set](#ip-set) below for details. + +### IP Set + +The `ipSet` configuration block supports the following argument: + +* `definition` - (Required) Set of IP addresses and address ranges, in CIDR notation. + +### IP Set Reference + +The `ipSetReference` configuration block supports the following argument: + +* `key` - (Required) A unique alphanumeric string to identify the `ipSet`. + +* `referenceArn` - (Required) Set of Managed Prefix IP ARN(s) + +### Port Sets + +The `portSets` block supports the following arguments: + +* `key` - (Required) An unique alphanumeric string to identify the `portSet`. + +* `portSet` - (Required) A configuration block that defines a set of port ranges. See [Port Set](#port-set) below for details. + +### Port Set + +The `portSet` configuration block suppports the following argument: + +* `definition` - (Required) Set of port ranges. + +### Rules Source + +The `rulesSource` block supports the following arguments: + +~> **NOTE:** Only one of `rulesSourceList`, `rulesString`, `statefulRule`, or `statelessRulesAndCustomActions` must be specified. + +* `rulesSourceList` - (Optional) A configuration block containing **stateful** inspection criteria for a domain list rule group. See [Rules Source List](#rules-source-list) below for details. + +* `rulesString` - (Optional) The fully qualified name of a file in an S3 bucket that contains Suricata compatible intrusion preventions system (IPS) rules or the Suricata rules as a string. These rules contain **stateful** inspection criteria and the action to take for traffic that matches the criteria. + +* `statefulRule` - (Optional) Set of configuration blocks containing **stateful** inspection criteria for 5-tuple rules to be used together in a rule group. See [Stateful Rule](#stateful-rule) below for details. + +* `statelessRulesAndCustomActions` - (Optional) A configuration block containing **stateless** inspection criteria for a stateless rule group. See [Stateless Rules and Custom Actions](#stateless-rules-and-custom-actions) below for details. + +### Stateful Rule Options + +The `statefulRuleOptions` block supports the following argument: + +~> **NOTE:** If the `strictOrder` rule order is specified, this rule group can only be referenced in firewall policies that also utilize `strictOrder` for the stateful engine. `strictOrder` can only be specified when using a `rulesSource` of `rulesString` or `statefulRule`. + +* `ruleOrder` - (Required) Indicates how to manage the order of the rule evaluation for the rule group. Default value: `defaultActionOrder`. Valid values: `defaultActionOrder`, `strictOrder`. + +### Rules Source List + +The `rulesSourceList` block supports the following arguments: + +* `generatedRulesType` - (Required) String value to specify whether domains in the target list are allowed or denied access. Valid values: `allowlist`, `denylist`. + +* `targetTypes` - (Required) Set of types of domain specifications that are provided in the `targets` argument. Valid values: `httpHost`, `tlsSni`. + +* `targets` - (Required) Set of domains that you want to inspect for in your traffic flows. + +### Stateful Rule + +The `statefulRule` block supports the following arguments: + +* `action` - (Required) Action to take with packets in a traffic flow when the flow matches the stateful rule criteria. For all actions, AWS Network Firewall performs the specified action and discontinues stateful inspection of the traffic flow. Valid values: `alert`, `drop` or `pass`. + +* `header` - (Required) A configuration block containing the stateful 5-tuple inspection criteria for the rule, used to inspect traffic flows. See [Header](#header) below for details. + +* `ruleOption` - (Required) Set of configuration blocks containing additional settings for a stateful rule. See [Rule Option](#rule-option) below for details. + +### Stateless Rules and Custom Actions + +The `statelessRulesAndCustomActions` block supports the following arguments: + +* `customAction` - (Optional) Set of configuration blocks containing custom action definitions that are available for use by the set of `stateless rule`. See [Custom Action](#custom-action) below for details. + +* `statelessRule` - (Required) Set of configuration blocks containing the stateless rules for use in the stateless rule group. See [Stateless Rule](#stateless-rule) below for details. + +### Header + +The `header` block supports the following arguments: + +* `destination` - (Required) The destination IP address or address range to inspect for, in CIDR notation. To match with any address, specify `any`. + +* `destinationPort` - (Required) The destination port to inspect for. To match with any address, specify `any`. + +* `direction` - (Required) The direction of traffic flow to inspect. Valid values: `any` or `forward`. + +* `protocol` - (Required) The protocol to inspect. Valid values: `ip`, `tcp`, `udp`, `icmp`, `http`, `ftp`, `tls`, `smb`, `dns`, `dcerpc`, `ssh`, `smtp`, `imap`, `msn`, `krb5`, `ikev2`, `tftp`, `ntp`, `dhcp`. + +* `source` - (Required) The source IP address or address range for, in CIDR notation. To match with any address, specify `any`. + +* `sourcePort` - (Required) The source port to inspect for. To match with any address, specify `any`. + +### Rule Option + +The `ruleOption` block supports the following arguments: + +* `keyword` - (Required) Keyword defined by open source detection systems like Snort or Suricata for stateful rule inspection. +See [Snort General Rule Options](http://manual-snort-org.s3-website-us-east-1.amazonaws.com/node31.html) or [Suricata Rule Options](https://suricata.readthedocs.io/en/suricata-5.0.1/rules/intro.html#rule-options) for more details. +* `settings` - (Optional) Set of strings for additional settings to use in stateful rule inspection. + +### Custom Action + +The `customAction` block supports the following arguments: + +* `actionDefinition` - (Required) A configuration block describing the custom action associated with the `actionName`. See [Action Definition](#action-definition) below for details. + +* `actionName` - (Required, Forces new resource) A friendly name of the custom action. + +### Stateless Rule + +The `statelessRule` block supports the following arguments: + +* `priority` - (Required) A setting that indicates the order in which to run this rule relative to all of the rules that are defined for a stateless rule group. AWS Network Firewall evaluates the rules in a rule group starting with the lowest priority setting. + +* `ruleDefinition` - (Required) A configuration block defining the stateless 5-tuple packet inspection criteria and the action to take on a packet that matches the criteria. See [Rule Definition](#rule-definition) below for details. + +### Rule Definition + +The `ruleDefinition` block supports the following arguments: + +* `actions` - (Required) Set of actions to take on a packet that matches one of the stateless rule definition's `matchAttributes`. For every rule you must specify 1 standard action, and you can add custom actions. Standard actions include: `aws:pass`, `aws:drop`, `aws:forwardToSfe`. + +* `matchAttributes` - (Required) A configuration block containing criteria for AWS Network Firewall to use to inspect an individual packet in stateless rule inspection. See [Match Attributes](#match-attributes) below for details. + +### Match Attributes + +The `matchAttributes` block supports the following arguments: + +* `destination` - (Optional) Set of configuration blocks describing the destination IP address and address ranges to inspect for, in CIDR notation. If not specified, this matches with any destination address. See [Destination](#destination) below for details. + +* `destinationPort` - (Optional) Set of configuration blocks describing the destination ports to inspect for. If not specified, this matches with any destination port. See [Destination Port](#destination-port) below for details. + +* `protocols` - (Optional) Set of protocols to inspect for, specified using the protocol's assigned internet protocol number (IANA). If not specified, this matches with any protocol. + +* `source` - (Optional) Set of configuration blocks describing the source IP address and address ranges to inspect for, in CIDR notation. If not specified, this matches with any source address. See [Source](#source) below for details. + +* `sourcePort` - (Optional) Set of configuration blocks describing the source ports to inspect for. If not specified, this matches with any source port. See [Source Port](#source-port) below for details. + +* `tcpFlag` - (Optional) Set of configuration blocks containing the TCP flags and masks to inspect for. If not specified, this matches with any settings. + +### Action Definition + +The `actionDefinition` block supports the following argument: + +* `publishMetricAction` - (Required) A configuration block describing the stateless inspection criteria that publishes the specified metrics to Amazon CloudWatch for the matching packet. You can pair this custom action with any of the standard stateless rule actions. See [Publish Metric Action](#publish-metric-action) below for details. + +### Publish Metric Action + +The `publishMetricAction` block supports the following argument: + +* `dimension` - (Required) Set of configuration blocks containing the dimension settings to use for Amazon CloudWatch custom metrics. See [Dimension](#dimension) below for details. + +### Dimension + +The `dimension` block supports the following argument: + +* `value` - (Required) The value to use in the custom metric dimension. + +### Destination + +The `destination` block supports the following argument: + +* `addressDefinition` - (Required) An IP address or a block of IP addresses in CIDR notation. AWS Network Firewall supports all address ranges for IPv4. + +### Destination Port + +The `destinationPort` block supports the following arguments: + +* `fromPort` - (Required) The lower limit of the port range. This must be less than or equal to the `toPort`. + +* `toPort` - (Optional) The upper limit of the port range. This must be greater than or equal to the `fromPort`. + +### Source + +The `source` block supports the following argument: + +* `addressDefinition` - (Required) An IP address or a block of IP addresses in CIDR notation. AWS Network Firewall supports all address ranges for IPv4. + +### Source Port + +The `sourcePort` block supports the following arguments: + +* `fromPort` - (Required) The lower limit of the port range. This must be less than or equal to the `toPort`. + +* `toPort` - (Optional) The upper limit of the port range. This must be greater than or equal to the `fromPort`. + +### TCP Flag + +The `tcpFlag` block supports the following arguments: + +* `flags` - (Required) Set of flags to look for in a packet. This setting can only specify values that are also specified in `masks`. +Valid values: `fin`, `syn`, `rst`, `psh`, `ack`, `urg`, `ece`, `cwr`. + +* `masks` - (Optional) Set of flags to consider in the inspection. To inspect all flags, leave this empty. +Valid values: `fin`, `syn`, `rst`, `psh`, `ack`, `urg`, `ece`, `cwr`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) that identifies the rule group. + +* `arn` - The Amazon Resource Name (ARN) that identifies the rule group. + +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +* `updateToken` - A string token used when updating the rule group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Network Firewall Rule Groups using their `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Network Firewall Rule Groups using their `arn`. For example: + +```console +% terraform import aws_networkfirewall_rule_group.example arn:aws:network-firewall:us-west-1:123456789012:stateful-rulegroup/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_attachment_accepter.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_attachment_accepter.html.markdown new file mode 100644 index 00000000000..3724759cf9e --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_attachment_accepter.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_attachment_accepter" +description: |- + Terraform resource for managing an AWS NetworkManager Attachment Accepter. +--- + + + +# Resource: aws_networkmanager_attachment_accepter + +Terraform resource for managing an AWS NetworkManager Attachment Accepter. + +## Example Usage + +### Example with VPC attachment + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerAttachmentAccepter } from "./.gen/providers/aws/networkmanager-attachment-accepter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerAttachmentAccepter(this, "test", { + attachmentId: vpc.id, + attachmentType: vpc.attachmentType, + }); + } +} + +``` + +### Example with site-to-site VPN attachment + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerAttachmentAccepter } from "./.gen/providers/aws/networkmanager-attachment-accepter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerAttachmentAccepter(this, "test", { + attachmentId: vpn.id, + attachmentType: vpn.attachmentType, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +- `attachmentId` - (Required) The ID of the attachment. +- `attachmentType` - (Required) The type of attachment. Valid values can be found in the [AWS Documentation](https://docs.aws.amazon.com/networkmanager/latest/APIReference/API_ListAttachments.html#API_ListAttachments_RequestSyntax) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `attachmentPolicyRuleNumber` - The policy rule number associated with the attachment. +- `coreNetworkArn` - The ARN of a core network. +- `coreNetworkId` - The id of a core network. +- `edgeLocation` - The Region where the edge is located. +- `ownerAccountId` - The ID of the attachment account owner. +- `resourceArn` - The attachment resource ARN. +- `segmentName` - The name of the segment attachment. +- `state` - The state of the attachment. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_connect_attachment.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_connect_attachment.html.markdown new file mode 100644 index 00000000000..453bf1c9401 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_connect_attachment.html.markdown @@ -0,0 +1,158 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_connect_attachment" +description: |- + Terraform resource for managing an AWS NetworkManager ConnectAttachment. +--- + + + +# Resource: aws_networkmanager_connect_attachment + +Terraform resource for managing an AWS NetworkManager ConnectAttachment. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerConnectAttachment } from "./.gen/providers/aws/networkmanager-connect-attachment"; +import { NetworkmanagerVpcAttachment } from "./.gen/providers/aws/networkmanager-vpc-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NetworkmanagerVpcAttachment(this, "example", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkExample.id), + subnetArns: Token.asList(propertyAccess(awsSubnetExample, ["*", "arn"])), + vpcArn: Token.asString(awsVpcExample.arn), + }); + const awsNetworkmanagerConnectAttachmentExample = + new NetworkmanagerConnectAttachment(this, "example_1", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkExample.id), + edgeLocation: example.edgeLocation, + options: { + protocol: "GRE", + }, + transportAttachmentId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerConnectAttachmentExample.overrideLogicalId("example"); + } +} + +``` + +### Usage with attachment accepter + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerAttachmentAccepter } from "./.gen/providers/aws/networkmanager-attachment-accepter"; +import { NetworkmanagerConnectAttachment } from "./.gen/providers/aws/networkmanager-connect-attachment"; +import { NetworkmanagerVpcAttachment } from "./.gen/providers/aws/networkmanager-vpc-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NetworkmanagerVpcAttachment(this, "example", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkExample.id), + subnetArns: Token.asList(propertyAccess(awsSubnetExample, ["*", "arn"])), + vpcArn: Token.asString(awsVpcExample.arn), + }); + const awsNetworkmanagerAttachmentAccepterExample = + new NetworkmanagerAttachmentAccepter(this, "example_1", { + attachmentId: example.id, + attachmentType: example.attachmentType, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerAttachmentAccepterExample.overrideLogicalId("example"); + const awsNetworkmanagerConnectAttachmentExample = + new NetworkmanagerConnectAttachment(this, "example_2", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkExample.id), + dependsOn: ["aws_networkmanager_attachment_accepter.test"], + edgeLocation: example.edgeLocation, + options: { + protocol: "GRE", + }, + transportAttachmentId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerConnectAttachmentExample.overrideLogicalId("example"); + new NetworkmanagerAttachmentAccepter(this, "example2", { + attachmentId: Token.asString( + awsNetworkmanagerConnectAttachmentExample.id + ), + attachmentType: Token.asString( + awsNetworkmanagerConnectAttachmentExample.attachmentType + ), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +- `coreNetworkId` - (Required) The ID of a core network where you want to create the attachment. +- `transportAttachmentId` - (Required) The ID of the attachment between the two connections. +- `edgeLocation` - (Required) The Region where the edge is located. +- `options` - (Required) Options for creating an attachment. + +The following arguments are optional: + +- `tags` - (Optional) Key-value tags for the attachment. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - The ARN of the attachment. +- `attachmentPolicyRuleNumber` - The policy rule number associated with the attachment. +- `attachmentType` - The type of attachment. +- `coreNetworkArn` - The ARN of a core network. +- `coreNetworkId` - The ID of a core network +- `edgeLocation` - The Region where the edge is located. +- `id` - The ID of the attachment. +- `ownerAccountId` - The ID of the attachment account owner. +- `resourceArn` - The attachment resource ARN. +- `segmentName` - The name of the segment attachment. +- `state` - The state of the attachment. +- `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerConnectAttachment` using the attachment ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerConnectAttachment` using the attachment ID. For example: + +```console +% terraform import aws_networkmanager_connect_attachment.example attachment-0f8fa60d2238d1bd8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_connect_peer.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_connect_peer.html.markdown new file mode 100644 index 00000000000..704092e3530 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_connect_peer.html.markdown @@ -0,0 +1,189 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_connect_peer" +description: |- + Terraform resource for managing an AWS NetworkManager Connect Peer. +--- + + + +# Resource: aws_networkmanager_connect_peer + +Terraform resource for managing an AWS NetworkManager Connect Peer. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerConnectAttachment } from "./.gen/providers/aws/networkmanager-connect-attachment"; +import { NetworkmanagerConnectPeer } from "./.gen/providers/aws/networkmanager-connect-peer"; +import { NetworkmanagerVpcAttachment } from "./.gen/providers/aws/networkmanager-vpc-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NetworkmanagerVpcAttachment(this, "example", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkExample.id), + subnetArns: Token.asList(propertyAccess(awsSubnetExample, ["*", "arn"])), + vpcArn: Token.asString(awsVpcExample.arn), + }); + const awsNetworkmanagerConnectAttachmentExample = + new NetworkmanagerConnectAttachment(this, "example_1", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkExample.id), + edgeLocation: example.edgeLocation, + options: { + protocol: "GRE", + }, + transportAttachmentId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerConnectAttachmentExample.overrideLogicalId("example"); + const awsNetworkmanagerConnectPeerExample = new NetworkmanagerConnectPeer( + this, + "example_2", + { + bgpOptions: { + peerAsn: 65000, + }, + connectAttachmentId: Token.asString( + awsNetworkmanagerConnectAttachmentExample.id + ), + insideCidrBlocks: ["172.16.0.0/16"], + peerAddress: "127.0.0.1", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerConnectPeerExample.overrideLogicalId("example"); + } +} + +``` + +### Usage with attachment accepter + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerAttachmentAccepter } from "./.gen/providers/aws/networkmanager-attachment-accepter"; +import { NetworkmanagerConnectAttachment } from "./.gen/providers/aws/networkmanager-connect-attachment"; +import { NetworkmanagerConnectPeer } from "./.gen/providers/aws/networkmanager-connect-peer"; +import { NetworkmanagerVpcAttachment } from "./.gen/providers/aws/networkmanager-vpc-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NetworkmanagerVpcAttachment(this, "example", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkExample.id), + subnetArns: Token.asList(propertyAccess(awsSubnetExample, ["*", "arn"])), + vpcArn: Token.asString(awsVpcExample.arn), + }); + const awsNetworkmanagerAttachmentAccepterExample = + new NetworkmanagerAttachmentAccepter(this, "example_1", { + attachmentId: example.id, + attachmentType: example.attachmentType, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerAttachmentAccepterExample.overrideLogicalId("example"); + const awsNetworkmanagerConnectAttachmentExample = + new NetworkmanagerConnectAttachment(this, "example_2", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkExample.id), + dependsOn: ["aws_networkmanager_attachment_accepter.test"], + edgeLocation: example.edgeLocation, + options: { + protocol: "GRE", + }, + transportAttachmentId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerConnectAttachmentExample.overrideLogicalId("example"); + const awsNetworkmanagerConnectPeerExample = new NetworkmanagerConnectPeer( + this, + "example_3", + { + bgpOptions: { + peerAsn: 65500, + }, + connectAttachmentId: Token.asString( + awsNetworkmanagerConnectAttachmentExample.id + ), + dependsOn: ["aws_networkmanager_attachment_accepter.example2"], + insideCidrBlocks: ["172.16.0.0/16"], + peerAddress: "127.0.0.1", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerConnectPeerExample.overrideLogicalId("example"); + new NetworkmanagerAttachmentAccepter(this, "example2", { + attachmentId: Token.asString( + awsNetworkmanagerConnectAttachmentExample.id + ), + attachmentType: Token.asString( + awsNetworkmanagerConnectAttachmentExample.attachmentType + ), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +- `connectAttachmentId` - (Required) The ID of the connection attachment. +- `insideCidrBlocks` - (Required) The inside IP addresses used for BGP peering. +- `peerAddress` - (Required) The Connect peer address. + +The following arguments are optional: + +- `bgpOptions` (Optional) The Connect peer BGP options. +- `coreNetworkAddress` (Optional) A Connect peer core network address. +- `tags` - (Optional) Key-value tags for the attachment. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - The ARN of the attachment. +- `configuration` - The configuration of the Connect peer. +- `coreNetworkId` - The ID of a core network. +- `edgeLocation` - The Region where the peer is located. +- `id` - The ID of the Connect peer. +- `state` - The state of the Connect peer. +- `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerConnectPeer` using the connect peer ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerConnectPeer` using the connect peer ID. For example: + +```console +% terraform import aws_networkmanager_connect_peer.example connect-peer-061f3e96275db1acc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_connection.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_connection.html.markdown new file mode 100644 index 00000000000..f4a2edebb91 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_connection.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_connection" +description: |- + Creates a connection between two devices. +--- + + + +# Resource: aws_networkmanager_connection + +Creates a connection between two devices. +The devices can be a physical or virtual appliance that connects to a third-party appliance in a VPC, or a physical appliance that connects to another physical appliance in an on-premises network. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerConnection } from "./.gen/providers/aws/networkmanager-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerConnection(this, "example", { + connectedDeviceId: example2.id, + deviceId: example1.id, + globalNetworkId: Token.asString(awsNetworkmanagerGlobalNetworkExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connectedDeviceId` - (Required) The ID of the second device in the connection. +* `connectedLinkId` - (Optional) The ID of the link for the second device. +* `description` - (Optional) A description of the connection. +* `deviceId` - (Required) The ID of the first device in the connection. +* `globalNetworkId` - (Required) The ID of the global network. +* `linkId` - (Optional) The ID of the link for the first device. +* `tags` - (Optional) Key-value tags for the connection. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the connection. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerConnection` using the connection ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerConnection` using the connection ARN. For example: + +```console +% terraform import aws_networkmanager_connection.example arn:aws:networkmanager::123456789012:device/global-network-0d47f6t230mz46dy4/connection-07f6fd08867abc123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_core_network.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_core_network.html.markdown new file mode 100644 index 00000000000..e5b7913d861 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_core_network.html.markdown @@ -0,0 +1,384 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_core_network" +description: |- + Provides a core network resource. +--- + + + +# Resource: aws_networkmanager_core_network + +Provides a core network resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerCoreNetwork } from "./.gen/providers/aws/networkmanager-core-network"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerCoreNetwork(this, "example", { + globalNetworkId: Token.asString(awsNetworkmanagerGlobalNetworkExample.id), + }); + } +} + +``` + +### With description + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerCoreNetwork } from "./.gen/providers/aws/networkmanager-core-network"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerCoreNetwork(this, "example", { + description: "example", + globalNetworkId: Token.asString(awsNetworkmanagerGlobalNetworkExample.id), + }); + } +} + +``` + +### With tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerCoreNetwork } from "./.gen/providers/aws/networkmanager-core-network"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerCoreNetwork(this, "example", { + globalNetworkId: Token.asString(awsNetworkmanagerGlobalNetworkExample.id), + tags: { + hello: "world", + }, + }); + } +} + +``` + +### With VPC Attachment (Single Region) + +The example below illustrates the scenario where your policy document has static routes pointing to VPC attachments and you want to attach your VPCs to the core network before applying the desired policy document. Set the `createBasePolicy` argument to `true` if your core network does not currently have any `live` policies (e.g. this is the first `terraform apply` with the core network resource), since a `live` policy is required before VPCs can be attached to the core network. Otherwise, if your core network already has a `live` policy, you may exclude the `createBasePolicy` argument. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerCoreNetworkPolicyDocument } from "./.gen/providers/aws/data-aws-networkmanager-core-network-policy-document"; +import { NetworkmanagerCoreNetwork } from "./.gen/providers/aws/networkmanager-core-network"; +import { NetworkmanagerCoreNetworkPolicyAttachment } from "./.gen/providers/aws/networkmanager-core-network-policy-attachment"; +import { NetworkmanagerGlobalNetwork } from "./.gen/providers/aws/networkmanager-global-network"; +import { NetworkmanagerVpcAttachment } from "./.gen/providers/aws/networkmanager-vpc-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NetworkmanagerGlobalNetwork(this, "example", {}); + const awsNetworkmanagerCoreNetworkExample = new NetworkmanagerCoreNetwork( + this, + "example_1", + { + createBasePolicy: true, + globalNetworkId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerCoreNetworkExample.overrideLogicalId("example"); + const awsNetworkmanagerVpcAttachmentExample = + new NetworkmanagerVpcAttachment(this, "example_2", { + coreNetworkId: Token.asString(awsNetworkmanagerCoreNetworkExample.id), + subnetArns: Token.asList( + propertyAccess(awsSubnetExample, ["*", "arn"]) + ), + vpcArn: Token.asString(awsVpcExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerVpcAttachmentExample.overrideLogicalId("example"); + const dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample = + new DataAwsNetworkmanagerCoreNetworkPolicyDocument(this, "example_3", { + coreNetworkConfiguration: [ + { + asnRanges: ["65022-65534"], + edgeLocations: [ + { + location: "us-west-2", + }, + ], + }, + ], + segmentActions: [ + { + action: "create-route", + destinationCidrBlocks: ["0.0.0.0/0"], + destinations: [ + Token.asString(awsNetworkmanagerVpcAttachmentExample.id), + ], + segment: "segment", + }, + ], + segments: [ + { + name: "segment", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample.overrideLogicalId( + "example" + ); + const awsNetworkmanagerCoreNetworkPolicyAttachmentExample = + new NetworkmanagerCoreNetworkPolicyAttachment(this, "example_4", { + coreNetworkId: Token.asString(awsNetworkmanagerCoreNetworkExample.id), + policyDocument: Token.asString( + dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample.json + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerCoreNetworkPolicyAttachmentExample.overrideLogicalId( + "example" + ); + } +} + +``` + +### With VPC Attachment (Multi-Region) + +The example below illustrates the scenario where your policy document has static routes pointing to VPC attachments and you want to attach your VPCs to the core network before applying the desired policy document. Set the `createBasePolicy` argument of the [`awsNetworkmanagerCoreNetwork` resource](/docs/providers/aws/r/networkmanager_core_network.html) to `true` if your core network does not currently have any `live` policies (e.g. this is the first `terraform apply` with the core network resource), since a `live` policy is required before VPCs can be attached to the core network. Otherwise, if your core network already has a `live` policy, you may exclude the `createBasePolicy` argument. For multi-region in a core network that does not yet have a `live` policy, pass a list of regions to the `awsNetworkmanagerCoreNetwork` `basePolicyRegions` argument. In the example below, `usWest2` and `usEast1` are specified in the base policy. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerCoreNetworkPolicyDocument } from "./.gen/providers/aws/data-aws-networkmanager-core-network-policy-document"; +import { NetworkmanagerCoreNetwork } from "./.gen/providers/aws/networkmanager-core-network"; +import { NetworkmanagerCoreNetworkPolicyAttachment } from "./.gen/providers/aws/networkmanager-core-network-policy-attachment"; +import { NetworkmanagerGlobalNetwork } from "./.gen/providers/aws/networkmanager-global-network"; +import { NetworkmanagerVpcAttachment } from "./.gen/providers/aws/networkmanager-vpc-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NetworkmanagerGlobalNetwork(this, "example", {}); + const awsNetworkmanagerCoreNetworkExample = new NetworkmanagerCoreNetwork( + this, + "example_1", + { + basePolicyRegions: ["us-west-2", "us-east-1"], + createBasePolicy: true, + globalNetworkId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerCoreNetworkExample.overrideLogicalId("example"); + const exampleUsEast1 = new NetworkmanagerVpcAttachment( + this, + "example_us_east_1", + { + coreNetworkId: Token.asString(awsNetworkmanagerCoreNetworkExample.id), + provider: "alternate", + subnetArns: Token.asList( + propertyAccess(awsSubnetExampleUsEast1, ["*", "arn"]) + ), + vpcArn: Token.asString(awsVpcExampleUsEast1.arn), + } + ); + const exampleUsWest2 = new NetworkmanagerVpcAttachment( + this, + "example_us_west_2", + { + coreNetworkId: Token.asString(awsNetworkmanagerCoreNetworkExample.id), + subnetArns: Token.asList( + propertyAccess(awsSubnetExampleUsWest2, ["*", "arn"]) + ), + vpcArn: Token.asString(awsVpcExampleUsWest2.arn), + } + ); + const dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample = + new DataAwsNetworkmanagerCoreNetworkPolicyDocument(this, "example_4", { + coreNetworkConfiguration: [ + { + asnRanges: ["65022-65534"], + edgeLocations: [ + { + location: "us-west-2", + }, + { + location: "us-east-1", + }, + ], + }, + ], + segmentActions: [ + { + action: "create-route", + destinationCidrBlocks: ["10.0.0.0/16"], + destinations: [exampleUsWest2.id], + segment: "segment", + }, + { + action: "create-route", + destinationCidrBlocks: ["10.1.0.0/16"], + destinations: [exampleUsEast1.id], + segment: "segment", + }, + ], + segments: [ + { + name: "segment", + }, + { + name: "segment2", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample.overrideLogicalId( + "example" + ); + const awsNetworkmanagerCoreNetworkPolicyAttachmentExample = + new NetworkmanagerCoreNetworkPolicyAttachment(this, "example_5", { + coreNetworkId: Token.asString(awsNetworkmanagerCoreNetworkExample.id), + policyDocument: Token.asString( + dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample.json + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerCoreNetworkPolicyAttachmentExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the Core Network. +* `basePolicyRegion` - (Optional, **Deprecated** use the `basePolicyRegions` argument instead) The base policy created by setting the `createBasePolicy` argument to `true` requires a region to be set in the `edgeLocations`, `location` key. If `basePolicyRegion` is not specified, the region used in the base policy defaults to the region specified in the `provider` block. +* `basePolicyRegions` - (Optional) A list of regions to add to the base policy. The base policy created by setting the `createBasePolicy` argument to `true` requires one or more regions to be set in the `edgeLocations`, `location` key. If `basePolicyRegions` is not specified, the region used in the base policy defaults to the region specified in the `provider` block. +* `createBasePolicy` - (Optional) Specifies whether to create a base policy when a core network is created or updated. A base policy is created and set to `live` to allow attachments to the core network (e.g. VPC Attachments) before applying a policy document provided using the [`awsNetworkmanagerCoreNetworkPolicyAttachment` resource](/docs/providers/aws/r/networkmanager_core_network_policy_attachment.html). This base policy is needed if your core network does not have any `live` policies and your policy document has static routes pointing to VPC attachments and you want to attach your VPCs to the core network before applying the desired policy document. Valid values are `true` or `false`. An example of this Terraform snippet can be found above [for VPC Attachment in a single region](#with-vpc-attachment-single-region) and [for VPC Attachment multi-region](#with-vpc-attachment-multi-region). An example base policy is shown below. This base policy is overridden with the policy that you specify in the [`awsNetworkmanagerCoreNetworkPolicyAttachment` resource](/docs/providers/aws/r/networkmanager_core_network_policy_attachment.html). + +```json +{ + "version": "2021.12", + "core-network-configuration": { + "asn-ranges": [ + "64512-65534" + ], + "vpn-ecmp-support": false, + "edge-locations": [ + { + "location": "us-east-1" + } + ] + }, + "segments": [ + { + "name": "segment", + "description": "base-policy", + "isolate-attachments": false, + "require-attachment-acceptance": false + } + ] +} +``` + +* `globalNetworkId` - (Required) The ID of the global network that a core network will be a part of. +* `tags` - (Optional) Key-value tags for the Core Network. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `delete` - (Default `30M`) +* `update` - (Default `30M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Core Network Amazon Resource Name (ARN). +* `createdAt` - Timestamp when a core network was created. +* `edges` - One or more blocks detailing the edges within a core network. [Detailed below](#edges). +* `id` - Core Network ID. +* `segments` - One or more blocks detailing the segments within a core network. [Detailed below](#segments). +* `state` - Current state of a core network. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### `edges` + +The `edges` configuration block supports the following arguments: + +* `asn` - ASN of a core network edge. +* `edgeLocation` - Region where a core network edge is located. +* `insideCidrBlocks` - Inside IP addresses used for core network edges. + +### `segments` + +The `segments` configuration block supports the following arguments: + +* `edgeLocations` - Regions where the edges are located. +* `name` - Name of a core network segment. +* `sharedSegments` - Shared segments of a core network. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerCoreNetwork` using the core network ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerCoreNetwork` using the core network ID. For example: + +```console +% terraform import aws_networkmanager_core_network.example core-network-0d47f6t230mz46dy4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_core_network_policy_attachment.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_core_network_policy_attachment.html.markdown new file mode 100644 index 00000000000..f6dae9cf4d2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_core_network_policy_attachment.html.markdown @@ -0,0 +1,296 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_core_network_policy_attachment" +description: |- + Provides a Core Network Policy Attachment resource. +--- + + + +# Resource: aws_networkmanager_core_network_policy_attachment + +Provides a Core Network Policy Attachment resource. This puts a Core Network Policy to an existing Core Network and executes the change set, which deploys changes globally based on the policy submitted (Sets the policy to `live`). + +~> **NOTE:** Deleting this resource will not delete the current policy defined in this resource. Deleting this resource will also not revert the current `live` policy to the previous version. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerCoreNetwork } from "./.gen/providers/aws/networkmanager-core-network"; +import { NetworkmanagerCoreNetworkPolicyAttachment } from "./.gen/providers/aws/networkmanager-core-network-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NetworkmanagerCoreNetwork(this, "example", { + globalNetworkId: Token.asString(awsNetworkmanagerGlobalNetworkExample.id), + }); + const awsNetworkmanagerCoreNetworkPolicyAttachmentExample = + new NetworkmanagerCoreNetworkPolicyAttachment(this, "example_1", { + coreNetworkId: example.id, + policyDocument: Token.asString( + dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample.json + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerCoreNetworkPolicyAttachmentExample.overrideLogicalId( + "example" + ); + } +} + +``` + +### With VPC Attachment (Single Region) + +The example below illustrates the scenario where your policy document has static routes pointing to VPC attachments and you want to attach your VPCs to the core network before applying the desired policy document. Set the `createBasePolicy` argument of the [`awsNetworkmanagerCoreNetwork` resource](/docs/providers/aws/r/networkmanager_core_network.html) to `true` if your core network does not currently have any `live` policies (e.g. this is the first `terraform apply` with the core network resource), since a `live` policy is required before VPCs can be attached to the core network. Otherwise, if your core network already has a `live` policy, you may exclude the `createBasePolicy` argument. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerCoreNetworkPolicyDocument } from "./.gen/providers/aws/data-aws-networkmanager-core-network-policy-document"; +import { NetworkmanagerCoreNetwork } from "./.gen/providers/aws/networkmanager-core-network"; +import { NetworkmanagerCoreNetworkPolicyAttachment } from "./.gen/providers/aws/networkmanager-core-network-policy-attachment"; +import { NetworkmanagerGlobalNetwork } from "./.gen/providers/aws/networkmanager-global-network"; +import { NetworkmanagerVpcAttachment } from "./.gen/providers/aws/networkmanager-vpc-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NetworkmanagerGlobalNetwork(this, "example", {}); + const awsNetworkmanagerCoreNetworkExample = new NetworkmanagerCoreNetwork( + this, + "example_1", + { + createBasePolicy: true, + globalNetworkId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerCoreNetworkExample.overrideLogicalId("example"); + const awsNetworkmanagerVpcAttachmentExample = + new NetworkmanagerVpcAttachment(this, "example_2", { + coreNetworkId: Token.asString(awsNetworkmanagerCoreNetworkExample.id), + subnetArns: Token.asList( + propertyAccess(awsSubnetExample, ["*", "arn"]) + ), + vpcArn: Token.asString(awsVpcExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerVpcAttachmentExample.overrideLogicalId("example"); + const dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample = + new DataAwsNetworkmanagerCoreNetworkPolicyDocument(this, "example_3", { + coreNetworkConfiguration: [ + { + asnRanges: ["65022-65534"], + edgeLocations: [ + { + location: "us-west-2", + }, + ], + }, + ], + segmentActions: [ + { + action: "create-route", + destinationCidrBlocks: ["0.0.0.0/0"], + destinations: [ + Token.asString(awsNetworkmanagerVpcAttachmentExample.id), + ], + segment: "segment", + }, + ], + segments: [ + { + name: "segment", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample.overrideLogicalId( + "example" + ); + const awsNetworkmanagerCoreNetworkPolicyAttachmentExample = + new NetworkmanagerCoreNetworkPolicyAttachment(this, "example_4", { + coreNetworkId: Token.asString(awsNetworkmanagerCoreNetworkExample.id), + policyDocument: Token.asString( + dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample.json + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerCoreNetworkPolicyAttachmentExample.overrideLogicalId( + "example" + ); + } +} + +``` + +### With VPC Attachment (Multi-Region) + +The example below illustrates the scenario where your policy document has static routes pointing to VPC attachments and you want to attach your VPCs to the core network before applying the desired policy document. Set the `createBasePolicy` argument of the [`awsNetworkmanagerCoreNetwork` resource](/docs/providers/aws/r/networkmanager_core_network.html) to `true` if your core network does not currently have any `live` policies (e.g. this is the first `terraform apply` with the core network resource), since a `live` policy is required before VPCs can be attached to the core network. Otherwise, if your core network already has a `live` policy, you may exclude the `createBasePolicy` argument. For multi-region in a core network that does not yet have a `live` policy, pass a list of regions to the `awsNetworkmanagerCoreNetwork` `basePolicyRegions` argument. In the example below, `usWest2` and `usEast1` are specified in the base policy. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsNetworkmanagerCoreNetworkPolicyDocument } from "./.gen/providers/aws/data-aws-networkmanager-core-network-policy-document"; +import { NetworkmanagerCoreNetwork } from "./.gen/providers/aws/networkmanager-core-network"; +import { NetworkmanagerCoreNetworkPolicyAttachment } from "./.gen/providers/aws/networkmanager-core-network-policy-attachment"; +import { NetworkmanagerGlobalNetwork } from "./.gen/providers/aws/networkmanager-global-network"; +import { NetworkmanagerVpcAttachment } from "./.gen/providers/aws/networkmanager-vpc-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NetworkmanagerGlobalNetwork(this, "example", {}); + const awsNetworkmanagerCoreNetworkExample = new NetworkmanagerCoreNetwork( + this, + "example_1", + { + basePolicyRegions: ["us-west-2", "us-east-1"], + createBasePolicy: true, + globalNetworkId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerCoreNetworkExample.overrideLogicalId("example"); + const exampleUsEast1 = new NetworkmanagerVpcAttachment( + this, + "example_us_east_1", + { + coreNetworkId: Token.asString(awsNetworkmanagerCoreNetworkExample.id), + provider: "alternate", + subnetArns: Token.asList( + propertyAccess(awsSubnetExampleUsEast1, ["*", "arn"]) + ), + vpcArn: Token.asString(awsVpcExampleUsEast1.arn), + } + ); + const exampleUsWest2 = new NetworkmanagerVpcAttachment( + this, + "example_us_west_2", + { + coreNetworkId: Token.asString(awsNetworkmanagerCoreNetworkExample.id), + subnetArns: Token.asList( + propertyAccess(awsSubnetExampleUsWest2, ["*", "arn"]) + ), + vpcArn: Token.asString(awsVpcExampleUsWest2.arn), + } + ); + const dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample = + new DataAwsNetworkmanagerCoreNetworkPolicyDocument(this, "example_4", { + coreNetworkConfiguration: [ + { + asnRanges: ["65022-65534"], + edgeLocations: [ + { + location: "us-west-2", + }, + { + location: "us-east-1", + }, + ], + }, + ], + segmentActions: [ + { + action: "create-route", + destinationCidrBlocks: ["10.0.0.0/16"], + destinations: [exampleUsWest2.id], + segment: "segment", + }, + { + action: "create-route", + destinationCidrBlocks: ["10.1.0.0/16"], + destinations: [exampleUsEast1.id], + segment: "segment", + }, + ], + segments: [ + { + name: "segment", + }, + { + name: "segment2", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample.overrideLogicalId( + "example" + ); + const awsNetworkmanagerCoreNetworkPolicyAttachmentExample = + new NetworkmanagerCoreNetworkPolicyAttachment(this, "example_5", { + coreNetworkId: Token.asString(awsNetworkmanagerCoreNetworkExample.id), + policyDocument: Token.asString( + dataAwsNetworkmanagerCoreNetworkPolicyDocumentExample.json + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerCoreNetworkPolicyAttachmentExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `coreNetworkId` - (Required) The ID of the core network that a policy will be attached to and made `live`. +* `policyDocument` - (Required) Policy document for creating a core network. Note that updating this argument will result in the new policy document version being set as the `latest` and `live` policy document. Refer to the [Core network policies documentation](https://docs.aws.amazon.com/network-manager/latest/cloudwan/cloudwan-policy-change-sets.html) for more information. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `update` - (Default `30M`). If this is the first time attaching a policy to a core network then this timeout value is also used as the `create` timeout value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `state` - Current state of a core network. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerCoreNetworkPolicyAttachment` using the core network ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerCoreNetworkPolicyAttachment` using the core network ID. For example: + +```console +% terraform import aws_networkmanager_core_network_policy_attachment.example core-network-0d47f6t230mz46dy4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_customer_gateway_association.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_customer_gateway_association.html.markdown new file mode 100644 index 00000000000..8b1be4d86ea --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_customer_gateway_association.html.markdown @@ -0,0 +1,151 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_customer_gateway_association" +description: |- + Associates a customer gateway with a device and optionally, with a link. +--- + + + +# Resource: aws_networkmanager_customer_gateway_association + +Associates a customer gateway with a device and optionally, with a link. +If you specify a link, it must be associated with the specified device. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CustomerGateway } from "./.gen/providers/aws/customer-gateway"; +import { Ec2TransitGateway } from "./.gen/providers/aws/ec2-transit-gateway"; +import { NetworkmanagerCustomerGatewayAssociation } from "./.gen/providers/aws/networkmanager-customer-gateway-association"; +import { NetworkmanagerDevice } from "./.gen/providers/aws/networkmanager-device"; +import { NetworkmanagerGlobalNetwork } from "./.gen/providers/aws/networkmanager-global-network"; +import { NetworkmanagerSite } from "./.gen/providers/aws/networkmanager-site"; +import { NetworkmanagerTransitGatewayRegistration } from "./.gen/providers/aws/networkmanager-transit-gateway-registration"; +import { VpnConnection } from "./.gen/providers/aws/vpn-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CustomerGateway(this, "example", { + bgpAsn: Token.asString(65000), + ipAddress: "172.83.124.10", + type: "ipsec.1", + }); + const awsEc2TransitGatewayExample = new Ec2TransitGateway( + this, + "example_1", + {} + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEc2TransitGatewayExample.overrideLogicalId("example"); + const awsNetworkmanagerGlobalNetworkExample = + new NetworkmanagerGlobalNetwork(this, "example_2", { + description: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerGlobalNetworkExample.overrideLogicalId("example"); + const awsNetworkmanagerSiteExample = new NetworkmanagerSite( + this, + "example_3", + { + globalNetworkId: Token.asString( + awsNetworkmanagerGlobalNetworkExample.id + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerSiteExample.overrideLogicalId("example"); + const awsVpnConnectionExample = new VpnConnection(this, "example_4", { + customerGatewayId: example.id, + staticRoutesOnly: true, + transitGatewayId: Token.asString(awsEc2TransitGatewayExample.id), + type: example.type, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpnConnectionExample.overrideLogicalId("example"); + const awsNetworkmanagerDeviceExample = new NetworkmanagerDevice( + this, + "example_5", + { + globalNetworkId: Token.asString( + awsNetworkmanagerGlobalNetworkExample.id + ), + siteId: Token.asString(awsNetworkmanagerSiteExample.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerDeviceExample.overrideLogicalId("example"); + const awsNetworkmanagerTransitGatewayRegistrationExample = + new NetworkmanagerTransitGatewayRegistration(this, "example_6", { + dependsOn: [awsVpnConnectionExample], + globalNetworkId: Token.asString( + awsNetworkmanagerGlobalNetworkExample.id + ), + transitGatewayArn: Token.asString(awsEc2TransitGatewayExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerTransitGatewayRegistrationExample.overrideLogicalId( + "example" + ); + const awsNetworkmanagerCustomerGatewayAssociationExample = + new NetworkmanagerCustomerGatewayAssociation(this, "example_7", { + customerGatewayArn: example.arn, + dependsOn: [awsNetworkmanagerTransitGatewayRegistrationExample], + deviceId: Token.asString(awsNetworkmanagerDeviceExample.id), + globalNetworkId: Token.asString( + awsNetworkmanagerGlobalNetworkExample.id + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerCustomerGatewayAssociationExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `customerGatewayArn` - (Required) The Amazon Resource Name (ARN) of the customer gateway. +* `deviceId` - (Required) The ID of the device. +* `globalNetworkId` - (Required) The ID of the global network. +* `linkId` - (Optional) The ID of the link. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerCustomerGatewayAssociation` using the global network ID and customer gateway ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerCustomerGatewayAssociation` using the global network ID and customer gateway ARN. For example: + +```console +% terraform import aws_networkmanager_customer_gateway_association.example global-network-0d47f6t230mz46dy4,arn:aws:ec2:us-west-2:123456789012:customer-gateway/cgw-123abc05e04123abc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_device.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_device.html.markdown new file mode 100644 index 00000000000..318883ad652 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_device.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_device" +description: |- + Creates a device in a global network. +--- + + + +# Resource: aws_networkmanager_device + +Creates a device in a global network. If you specify both a site ID and a location, +the location of the site is used for visualization in the Network Manager console. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerDevice } from "./.gen/providers/aws/networkmanager-device"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerDevice(this, "example", { + globalNetworkId: Token.asString(awsNetworkmanagerGlobalNetworkExample.id), + siteId: Token.asString(awsNetworkmanagerSiteExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `awsLocation` - (Optional) The AWS location of the device. Documented below. +* `description` - (Optional) A description of the device. +* `globalNetworkId` - (Required) The ID of the global network. +* `location` - (Optional) The location of the device. Documented below. +* `model` - (Optional) The model of device. +* `serialNumber` - (Optional) The serial number of the device. +* `siteId` - (Optional) The ID of the site. +* `tags` - (Optional) Key-value tags for the device. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) The type of device. +* `vendor` - (Optional) The vendor of the device. + +The `awsLocation` object supports the following: + +* `subnetArn` - (Optional) The Amazon Resource Name (ARN) of the subnet that the device is located in. +* `zone` - (Optional) The Zone that the device is located in. Specify the ID of an Availability Zone, Local Zone, Wavelength Zone, or an Outpost. + +The `location` object supports the following: + +* `address` - (Optional) The physical address. +* `latitude` - (Optional) The latitude. +* `longitude` - (Optional) The longitude. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the device. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerDevice` using the device ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerDevice` using the device ARN. For example: + +```console +% terraform import aws_networkmanager_device.example arn:aws:networkmanager::123456789012:device/global-network-0d47f6t230mz46dy4/device-07f6fd08867abc123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_global_network.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_global_network.html.markdown new file mode 100644 index 00000000000..d31f348c0aa --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_global_network.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_global_network" +description: |- + Provides a global network resource. +--- + + + +# Resource: aws_networkmanager_global_network + +Provides a global network resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerGlobalNetwork } from "./.gen/providers/aws/networkmanager-global-network"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerGlobalNetwork(this, "example", { + description: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the Global Network. +* `tags` - (Optional) Key-value tags for the Global Network. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Global Network Amazon Resource Name (ARN) +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerGlobalNetwork` using the global network ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerGlobalNetwork` using the global network ID. For example: + +```console +% terraform import aws_networkmanager_global_network.example global-network-0d47f6t230mz46dy4 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_link.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_link.html.markdown new file mode 100644 index 00000000000..7e97400ce1e --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_link.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_link" +description: |- + Creates a link for a site. +--- + + + +# Resource: aws_networkmanager_link + +Creates a link for a site. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerLink } from "./.gen/providers/aws/networkmanager-link"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerLink(this, "example", { + bandwidth: { + downloadSpeed: 50, + uploadSpeed: 10, + }, + globalNetworkId: Token.asString(awsNetworkmanagerGlobalNetworkExample.id), + providerName: "MegaCorp", + siteId: Token.asString(awsNetworkmanagerSiteExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bandwidth` - (Required) The upload speed and download speed in Mbps. Documented below. +* `description` - (Optional) A description of the link. +* `globalNetworkId` - (Required) The ID of the global network. +* `providerName` - (Optional) The provider of the link. +* `siteId` - (Required) The ID of the site. +* `tags` - (Optional) Key-value tags for the link. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) The type of the link. + +The `bandwidth` object supports the following: + +* `downloadSpeed` - (Optional) Download speed in Mbps. +* `uploadSpeed` - (Optional) Upload speed in Mbps. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Link Amazon Resource Name (ARN). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerLink` using the link ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerLink` using the link ARN. For example: + +```console +% terraform import aws_networkmanager_link.example arn:aws:networkmanager::123456789012:link/global-network-0d47f6t230mz46dy4/link-444555aaabbb11223 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_link_association.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_link_association.html.markdown new file mode 100644 index 00000000000..b0efade014a --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_link_association.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_link_association" +description: |- + Associates a link to a device. +--- + + + +# Resource: aws_networkmanager_link_association + +Associates a link to a device. +A device can be associated to multiple links and a link can be associated to multiple devices. +The device and link must be in the same global network and the same site. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerLinkAssociation } from "./.gen/providers/aws/networkmanager-link-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerLinkAssociation(this, "example", { + deviceId: Token.asString(awsNetworkmanagerDeviceExample.id), + globalNetworkId: Token.asString(awsNetworkmanagerGlobalNetworkExample.id), + linkId: Token.asString(awsNetworkmanagerLinkExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deviceId` - (Required) The ID of the device. +* `globalNetworkId` - (Required) The ID of the global network. +* `linkId` - (Required) The ID of the link. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerLinkAssociation` using the global network ID, link ID and device ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerLinkAssociation` using the global network ID, link ID and device ID. For example: + +```console +% terraform import aws_networkmanager_link_association.example global-network-0d47f6t230mz46dy4,link-444555aaabbb11223,device-07f6fd08867abc123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_site.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_site.html.markdown new file mode 100644 index 00000000000..77c4ec571ca --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_site.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_site" +description: |- + Creates a site in a global network. +--- + + + +# Resource: aws_networkmanager_site + +Creates a site in a global network. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerGlobalNetwork } from "./.gen/providers/aws/networkmanager-global-network"; +import { NetworkmanagerSite } from "./.gen/providers/aws/networkmanager-site"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new NetworkmanagerGlobalNetwork(this, "example", {}); + const awsNetworkmanagerSiteExample = new NetworkmanagerSite( + this, + "example_1", + { + globalNetworkId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerSiteExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `globalNetworkId` - (Required) The ID of the Global Network to create the site in. +* `description` - (Optional) Description of the Site. +* `location` - (Optional) The site location as documented below. +* `tags` - (Optional) Key-value tags for the Site. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `location` object supports the following: + +* `address` - (Optional) Address of the location. +* `latitude` - (Optional) Latitude of the location. +* `longitude` - (Optional) Longitude of the location. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Site Amazon Resource Name (ARN) +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerSite` using the site ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerSite` using the site ARN. For example: + +```console +% terraform import aws_networkmanager_site.example arn:aws:networkmanager::123456789012:site/global-network-0d47f6t230mz46dy4/site-444555aaabbb11223 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_site_to_site_vpn_attachment.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_site_to_site_vpn_attachment.html.markdown new file mode 100644 index 00000000000..f157328733f --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_site_to_site_vpn_attachment.html.markdown @@ -0,0 +1,231 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_site_to_site_vpn_attachment" +description: |- + Terraform resource for managing an AWS NetworkManager SiteToSiteAttachment. +--- + + + +# Resource: aws_networkmanager_site_to_site_vpn_attachment + +Terraform resource for managing an AWS NetworkManager SiteToSiteAttachment. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerSiteToSiteVpnAttachment } from "./.gen/providers/aws/networkmanager-site-to-site-vpn-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerSiteToSiteVpnAttachment(this, "example", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkExample.id), + vpnConnectionArn: Token.asString(awsVpnConnectionExample.arn), + }); + } +} + +``` + +### Full Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CustomerGateway } from "./.gen/providers/aws/customer-gateway"; +import { DataAwsNetworkmanagerCoreNetworkPolicyDocument } from "./.gen/providers/aws/data-aws-networkmanager-core-network-policy-document"; +import { NetworkmanagerAttachmentAccepter } from "./.gen/providers/aws/networkmanager-attachment-accepter"; +import { NetworkmanagerGlobalNetwork } from "./.gen/providers/aws/networkmanager-global-network"; +import { NetworkmanagerSiteToSiteVpnAttachment } from "./.gen/providers/aws/networkmanager-site-to-site-vpn-attachment"; +import { VpnConnection } from "./.gen/providers/aws/vpn-connection"; +import { NetworkmanagerCoreNetwork } from "./.gen/providers/awscc/networkmanager-core-network"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*The following providers are missing schema information and might need manual adjustments to synthesize correctly: awscc. + For a more precise conversion please use the --provider flag in convert.*/ + const test = new CustomerGateway(this, "test", { + bgpAsn: Token.asString(65000), + ipAddress: "172.0.0.1", + type: "ipsec.1", + }); + const awsNetworkmanagerGlobalNetworkTest = new NetworkmanagerGlobalNetwork( + this, + "test_1", + { + tags: { + Name: "test", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerGlobalNetworkTest.overrideLogicalId("test"); + const awsVpnConnectionTest = new VpnConnection(this, "test_2", { + customerGatewayId: test.id, + tags: { + Name: "test", + }, + type: "ipsec.1", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpnConnectionTest.overrideLogicalId("test"); + const dataAwsNetworkmanagerCoreNetworkPolicyDocumentTest = + new DataAwsNetworkmanagerCoreNetworkPolicyDocument(this, "test_3", { + attachmentPolicies: [ + { + action: { + associationMethod: "constant", + segment: "shared", + }, + conditionLogic: "or", + conditions: [ + { + key: "segment", + operator: "equals", + type: "tag-value", + value: "shared", + }, + ], + ruleNumber: 1, + }, + ], + coreNetworkConfiguration: [ + { + asnRanges: ["64512-64555"], + edgeLocations: [ + { + asn: Token.asString(64512), + location: Token.asString(current.name), + }, + ], + vpnEcmpSupport: false, + }, + ], + segmentActions: [ + { + action: "share", + mode: "attachment-route", + segment: "shared", + shareWith: ["*"], + }, + ], + segments: [ + { + description: "SegmentForSharedServices", + name: "shared", + requireAttachmentAcceptance: true, + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsNetworkmanagerCoreNetworkPolicyDocumentTest.overrideLogicalId( + "test" + ); + const awsccNetworkmanagerCoreNetworkTest = new NetworkmanagerCoreNetwork( + this, + "test_4", + { + global_network_id: awsNetworkmanagerGlobalNetworkTest.id, + policy_document: Fn.jsonencode( + Fn.jsondecode( + Token.asString( + dataAwsNetworkmanagerCoreNetworkPolicyDocumentTest.json + ) + ) + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsccNetworkmanagerCoreNetworkTest.overrideLogicalId("test"); + const awsNetworkmanagerSiteToSiteVpnAttachmentTest = + new NetworkmanagerSiteToSiteVpnAttachment(this, "test_5", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkTest.id), + tags: { + segment: "shared", + }, + vpnConnectionArn: Token.asString(awsVpnConnectionTest.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerSiteToSiteVpnAttachmentTest.overrideLogicalId("test"); + const awsNetworkmanagerAttachmentAccepterTest = + new NetworkmanagerAttachmentAccepter(this, "test_6", { + attachmentId: Token.asString( + awsNetworkmanagerSiteToSiteVpnAttachmentTest.id + ), + attachmentType: Token.asString( + awsNetworkmanagerSiteToSiteVpnAttachmentTest.attachmentType + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerAttachmentAccepterTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +- `coreNetworkId` - (Required) The ID of a core network for the VPN attachment. +- `vpnConnectionArn` - (Required) The ARN of the site-to-site VPN connection. + +The following arguments are optional: + +- `tags` - (Optional) Key-value tags for the attachment. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - The ARN of the attachment. +- `attachmentPolicyRuleNumber` - The policy rule number associated with the attachment. +- `attachmentType` - The type of attachment. +- `coreNetworkArn` - The ARN of a core network. +- `coreNetworkId` - The ID of a core network +- `edgeLocation` - The Region where the edge is located. +- `id` - The ID of the attachment. +- `ownerAccountId` - The ID of the attachment account owner. +- `resourceArn` - The attachment resource ARN. +- `segmentName` - The name of the segment attachment. +- `state` - The state of the attachment. +- `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerSiteToSiteVpnAttachment` using the attachment ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerSiteToSiteVpnAttachment` using the attachment ID. For example: + +```console +% terraform import aws_networkmanager_site_to_site_vpn_attachment.example attachment-0f8fa60d2238d1bd8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_connect_peer_association.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_connect_peer_association.html.markdown new file mode 100644 index 00000000000..86c87813bbe --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_connect_peer_association.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_transit_gateway_connect_peer_association" +description: |- + Associates a transit gateway Connect peer with a device, and optionally, with a link. +--- + + + +# Resource: aws_networkmanager_transit_gateway_connect_peer_association + +Associates a transit gateway Connect peer with a device, and optionally, with a link. +If you specify a link, it must be associated with the specified device. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerTransitGatewayConnectPeerAssociation } from "./.gen/providers/aws/networkmanager-transit-gateway-connect-peer-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerTransitGatewayConnectPeerAssociation(this, "example", { + deviceId: Token.asString(awsNetworkmanagerDeviceExample.id), + globalNetworkId: Token.asString(awsNetworkmanagerGlobalNetworkExample.id), + transitGatewayConnectPeerArn: Token.asString( + awsEc2TransitGatewayConnectPeerExample.arn + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deviceId` - (Required) The ID of the device. +* `globalNetworkId` - (Required) The ID of the global network. +* `linkId` - (Optional) The ID of the link. +* `transitGatewayConnectPeerArn` - (Required) The Amazon Resource Name (ARN) of the Connect peer. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerTransitGatewayConnectPeerAssociation` using the global network ID and customer gateway ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerTransitGatewayConnectPeerAssociation` using the global network ID and customer gateway ARN. For example: + +```console +% terraform import aws_networkmanager_transit_gateway_connect_peer_association.example global-network-0d47f6t230mz46dy4,arn:aws:ec2:us-west-2:123456789012:transit-gateway-connect-peer/tgw-connect-peer-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_peering.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_peering.html.markdown new file mode 100644 index 00000000000..3df75b8cbba --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_peering.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_transit_gateway_peering" +description: |- + Creates a peering connection between an AWS Cloud WAN core network and an AWS Transit Gateway. +--- + + + +# Resource: aws_networkmanager_transit_gateway_peering + +Creates a peering connection between an AWS Cloud WAN core network and an AWS Transit Gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerTransitGatewayPeering } from "./.gen/providers/aws/networkmanager-transit-gateway-peering"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerTransitGatewayPeering(this, "example", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkExample.id), + transitGatewayArn: Token.asString(awsEc2TransitGatewayExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `coreNetworkId` - (Required) The ID of a core network. +* `tags` - (Optional) Key-value tags for the peering. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `transitGatewayArn` - (Required) The ARN of the transit gateway for the peering request. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Peering Amazon Resource Name (ARN). +* `coreNetworkArn` - The ARN of the core network. +* `edgeLocation` - The edge location for the peer. +* `id` - Peering ID. +* `ownerAccountId` - The ID of the account owner. +* `peeringType` - The type of peering. This will be `transitGateway`. +* `resourceArn` - The resource ARN of the peer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `transitGatewayPeeringAttachmentId` - The ID of the transit gateway peering attachment. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerTransitGatewayPeering` using the peering ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerTransitGatewayPeering` using the peering ID. For example: + +```console +% terraform import aws_networkmanager_transit_gateway_peering.example peering-444555aaabbb11223 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_registration.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_registration.html.markdown new file mode 100644 index 00000000000..997ec17701b --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_registration.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_transit_gateway_registration" +description: |- + Registers a transit gateway to a global network. +--- + + + +# Resource: aws_networkmanager_transit_gateway_registration + +Registers a transit gateway to a global network. The transit gateway can be in any AWS Region, +but it must be owned by the same AWS account that owns the global network. +You cannot register a transit gateway in more than one global network. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Ec2TransitGateway } from "./.gen/providers/aws/ec2-transit-gateway"; +import { NetworkmanagerGlobalNetwork } from "./.gen/providers/aws/networkmanager-global-network"; +import { NetworkmanagerTransitGatewayRegistration } from "./.gen/providers/aws/networkmanager-transit-gateway-registration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Ec2TransitGateway(this, "example", {}); + const awsNetworkmanagerGlobalNetworkExample = + new NetworkmanagerGlobalNetwork(this, "example_1", { + description: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerGlobalNetworkExample.overrideLogicalId("example"); + const awsNetworkmanagerTransitGatewayRegistrationExample = + new NetworkmanagerTransitGatewayRegistration(this, "example_2", { + globalNetworkId: Token.asString( + awsNetworkmanagerGlobalNetworkExample.id + ), + transitGatewayArn: example.arn, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsNetworkmanagerTransitGatewayRegistrationExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `globalNetworkId` - (Required) The ID of the Global Network to register to. +* `transitGatewayArn` - (Required) The ARN of the Transit Gateway to register. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerTransitGatewayRegistration` using the global network ID and transit gateway ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerTransitGatewayRegistration` using the global network ID and transit gateway ARN. For example: + +```console +% terraform import aws_networkmanager_transit_gateway_registration.example global-network-0d47f6t230mz46dy4,arn:aws:ec2:us-west-2:123456789012:transit-gateway/tgw-123abc05e04123abc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_route_table_attachment.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_route_table_attachment.html.markdown new file mode 100644 index 00000000000..e8a4ddd38e0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_transit_gateway_route_table_attachment.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_transit_gateway_route_table_attachment" +description: |- + Creates a transit gateway route table attachment. +--- + + + +# Resource: aws_networkmanager_transit_gateway_route_table_attachment + +Creates a transit gateway route table attachment. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerTransitGatewayRouteTableAttachment } from "./.gen/providers/aws/networkmanager-transit-gateway-route-table-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerTransitGatewayRouteTableAttachment(this, "example", { + peeringId: Token.asString( + awsNetworkmanagerTransitGatewayPeeringExample.id + ), + transitGatewayRouteTableArn: Token.asString( + awsEc2TransitGatewayRouteTableExample.arn + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `peeringId` - (Required) The ID of the peer for the attachment. +* `tags` - (Optional) Key-value tags for the attachment. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `transitGatewayRouteTableArn` - (Required) The ARN of the transit gateway route table for the attachment. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Attachment Amazon Resource Name (ARN). +* `attachmentPolicyRuleNumber` - The policy rule number associated with the attachment. +* `attachmentType` - The type of attachment. +* `coreNetworkArn` - The ARN of the core network. +* `coreNetworkId` - The ID of the core network. +* `edgeLocation` - The edge location for the peer. +* `id` - The ID of the attachment. +* `ownerAccountId` - The ID of the attachment account owner. +* `resourceArn` - The attachment resource ARN. +* `segmentName` - The name of the segment attachment. +* `state` - The state of the attachment. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerTransitGatewayRouteTableAttachment` using the attachment ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerTransitGatewayRouteTableAttachment` using the attachment ID. For example: + +```console +% terraform import aws_networkmanager_transit_gateway_route_table_attachment.example attachment-0f8fa60d2238d1bd8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/networkmanager_vpc_attachment.html.markdown b/website/docs/cdktf/typescript/r/networkmanager_vpc_attachment.html.markdown new file mode 100644 index 00000000000..495255831e2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/networkmanager_vpc_attachment.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "Network Manager" +layout: "aws" +page_title: "AWS: aws_networkmanager_vpc_attachment" +description: |- + Terraform resource for managing an AWS NetworkManager VpcAttachment. +--- + + + +# Resource: aws_networkmanager_vpc_attachment + +Terraform resource for managing an AWS NetworkManager VpcAttachment. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { NetworkmanagerVpcAttachment } from "./.gen/providers/aws/networkmanager-vpc-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new NetworkmanagerVpcAttachment(this, "example", { + coreNetworkId: Token.asString(awsccNetworkmanagerCoreNetworkExample.id), + subnetArns: [Token.asString(awsSubnetExample.arn)], + vpcArn: Token.asString(awsVpcExample.arn), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `coreNetworkId` - (Required) The ID of a core network for the VPC attachment. +* `subnetArns` - (Required) The subnet ARN of the VPC attachment. +* `vpcArn` - (Required) The ARN of the VPC. + +The following arguments are optional: + +* `options` - (Optional) Options for the VPC attachment. +* `tags` - (Optional) Key-value tags for the attachment. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### options + +* `applianceModeSupport` - (Optional) Indicates whether appliance mode is supported. If enabled, traffic flow between a source and destination use the same Availability Zone for the VPC attachment for the lifetime of that flow. +* `ipv6Support` - (Optional) Indicates whether IPv6 is supported. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the attachment. +* `attachmentPolicyRuleNumber` - The policy rule number associated with the attachment. +* `attachmentType` - The type of attachment. +* `coreNetworkArn` - The ARN of a core network. +* `edgeLocation` - The Region where the edge is located. +* `id` - The ID of the attachment. +* `ownerAccountId` - The ID of the attachment account owner. +* `resourceArn` - The attachment resource ARN. +* `segmentName` - The name of the segment attachment. +* `state` - The state of the attachment. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsNetworkmanagerVpcAttachment` using the attachment ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsNetworkmanagerVpcAttachment` using the attachment ID. For example: + +```console +% terraform import aws_networkmanager_vpc_attachment.example attachment-0f8fa60d2238d1bd8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/oam_link.html.markdown b/website/docs/cdktf/typescript/r/oam_link.html.markdown new file mode 100644 index 00000000000..768d736e133 --- /dev/null +++ b/website/docs/cdktf/typescript/r/oam_link.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_link" +description: |- + Terraform resource for managing an AWS CloudWatch Observability Access Manager Link. +--- + + + +# Resource: aws_oam_link + +Terraform resource for managing an AWS CloudWatch Observability Access Manager Link. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OamLink } from "./.gen/providers/aws/oam-link"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OamLink(this, "example", { + labelTemplate: "$AccountName", + resourceTypes: ["AWS::CloudWatch::Metric"], + sinkIdentifier: test.id, + tags: { + Env: "prod", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `labelTemplate` - (Required) Human-readable name to use to identify this source account when you are viewing data from it in the monitoring account. +* `resourceTypes` - (Required) Types of data that the source account shares with the monitoring account. +* `sinkIdentifier` - (Required) Identifier of the sink to use to create this link. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the link. +* `label` - Label that is assigned to this link. +* `linkId` - ID string that AWS generated as part of the link ARN. +* `sinkArn` - ARN of the sink that is used for this link. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1M`) +* `update` - (Default `1M`) +* `delete` - (Default `1M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Observability Access Manager Link using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Observability Access Manager Link using the `arn`. For example: + +```console +% terraform import aws_oam_link.example arn:aws:oam:us-west-2:123456789012:link/link-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/oam_sink.html.markdown b/website/docs/cdktf/typescript/r/oam_sink.html.markdown new file mode 100644 index 00000000000..bc301cfa029 --- /dev/null +++ b/website/docs/cdktf/typescript/r/oam_sink.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_sink" +description: |- + Terraform resource for managing an AWS CloudWatch Observability Access Manager Sink. +--- + + + +# Resource: aws_oam_sink + +Terraform resource for managing an AWS CloudWatch Observability Access Manager Sink. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OamSink } from "./.gen/providers/aws/oam-sink"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OamSink(this, "example", { + name: "ExampleSink", + tags: { + Env: "prod", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name for the sink. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Sink. +* `sinkId` - ID string that AWS generated as part of the sink ARN. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1M`) +* `update` - (Default `1M`) +* `delete` - (Default `1M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Observability Access Manager Sink using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Observability Access Manager Sink using the `arn`. For example: + +```console +% terraform import aws_oam_sink.example arn:aws:oam:us-west-2:123456789012:sink/sink-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/oam_sink_policy.html.markdown b/website/docs/cdktf/typescript/r/oam_sink_policy.html.markdown new file mode 100644 index 00000000000..70337adf5bf --- /dev/null +++ b/website/docs/cdktf/typescript/r/oam_sink_policy.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "CloudWatch Observability Access Manager" +layout: "aws" +page_title: "AWS: aws_oam_sink_policy" +description: |- + Terraform resource for managing an AWS CloudWatch Observability Access Manager Sink Policy. +--- + + + +# Resource: aws_oam_sink_policy + +Terraform resource for managing an AWS CloudWatch Observability Access Manager Sink Policy. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OamSink } from "./.gen/providers/aws/oam-sink"; +import { OamSinkPolicy } from "./.gen/providers/aws/oam-sink-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new OamSink(this, "example", { + name: "ExampleSink", + }); + const awsOamSinkPolicyExample = new OamSinkPolicy(this, "example_1", { + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["oam:CreateLink", "oam:UpdateLink"], + Condition: { + "ForAllValues:StringEquals": { + "oam:ResourceTypes": [ + "AWS::CloudWatch::Metric", + "AWS::Logs::LogGroup", + ], + }, + }, + Effect: "Allow", + Principal: { + AWS: ["1111111111111", "222222222222"], + }, + Resource: "*", + }, + ], + Version: "2012-10-17", + }) + ), + sinkIdentifier: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOamSinkPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `sinkIdentifier` - (Required) ARN of the sink to attach this policy to. +* `policy` - (Required) JSON policy to use. If you are updating an existing policy, the entire existing policy is replaced by what you specify here. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Sink. +* `sinkId` - ID string that AWS generated as part of the sink ARN. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `1M`) +* `update` - (Default `1M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Observability Access Manager Sink Policy using the `sinkIdentifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Observability Access Manager Sink Policy using the `sinkIdentifier`. For example: + +```console +% terraform import aws_oam_sink_policy.example arn:aws:oam:us-west-2:123456789012:sink/sink-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opensearch_domain.html.markdown b/website/docs/cdktf/typescript/r/opensearch_domain.html.markdown new file mode 100644 index 00000000000..78be4c08bbb --- /dev/null +++ b/website/docs/cdktf/typescript/r/opensearch_domain.html.markdown @@ -0,0 +1,632 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_domain" +description: |- + Terraform resource for managing an AWS OpenSearch Domain. +--- + + + +# Resource: aws_opensearch_domain + +Manages an Amazon OpenSearch Domain. + +## Elasticsearch vs. OpenSearch + +Amazon OpenSearch Service is the successor to Amazon Elasticsearch Service and supports OpenSearch and legacy Elasticsearch OSS (up to 7.10, the final open source version of the software). + +OpenSearch Domain configurations are similar in many ways to Elasticsearch Domain configurations. However, there are important differences including these: + +* OpenSearch has `engineVersion` while Elasticsearch has `elasticsearchVersion` +* Versions are specified differently - _e.g._, `elasticsearch710` with OpenSearch vs. `710` for Elasticsearch. +* `instanceType` argument values end in `search` for OpenSearch vs. `elasticsearch` for Elasticsearch (_e.g._, `t2MicroSearch` vs. `t2MicroElasticsearch`). +* The AWS-managed service-linked role for OpenSearch is called `awsServiceRoleForAmazonOpenSearchService` instead of `awsServiceRoleForAmazonElasticsearchService` for Elasticsearch. + +There are also some potentially unexpected similarities in configurations: + +* ARNs for both are prefaced with `arn:aws:es:`. +* Both OpenSearch and Elasticsearch use assume role policies that refer to the `principal` `service` as `esAmazonawsCom`. +* IAM policy actions, such as those you will find in `accessPolicies`, are prefaced with `es:` for both. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchDomain } from "./.gen/providers/aws/opensearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchDomain(this, "example", { + clusterConfig: { + instanceType: "r4.large.search", + }, + domainName: "example", + engineVersion: "Elasticsearch_7.10", + tags: { + Domain: "TestDomain", + }, + }); + } +} + +``` + +### Access Policy + +-> See also: [`awsOpensearchDomainPolicy` resource](/docs/providers/aws/r/opensearch_domain_policy.html) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformVariable, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { OpensearchDomain } from "./.gen/providers/aws/opensearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const domain = new TerraformVariable(this, "domain", { + default: "tf-test", + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_2", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const example = new DataAwsIamPolicyDocument(this, "example", { + statement: [ + { + actions: ["es:*"], + condition: [ + { + test: "IpAddress", + values: ["66.193.100.22/32"], + variable: "aws:SourceIp", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "*", + }, + ], + resources: [ + "arn:aws:es:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:domain/${" + + domain.value + + "}/*", + ], + }, + ], + }); + const awsOpensearchDomainExample = new OpensearchDomain(this, "example_4", { + accessPolicies: Token.asString(example.json), + domainName: domain.stringValue, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOpensearchDomainExample.overrideLogicalId("example"); + } +} + +``` + +### Log publishing to CloudWatch Logs + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { CloudwatchLogResourcePolicy } from "./.gen/providers/aws/cloudwatch-log-resource-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { OpensearchDomain } from "./.gen/providers/aws/opensearch-domain"; +interface MyConfig { + domainName: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: "example", + }); + const awsOpensearchDomainExample = new OpensearchDomain(this, "example_1", { + logPublishingOptions: [ + { + cloudwatchLogGroupArn: example.arn, + logType: "INDEX_SLOW_LOGS", + }, + ], + domainName: config.domainName, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOpensearchDomainExample.overrideLogicalId("example"); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_2", + { + statement: [ + { + actions: [ + "logs:PutLogEvents", + "logs:PutLogEventsBatch", + "logs:CreateLogStream", + ], + effect: "Allow", + principals: [ + { + identifiers: ["es.amazonaws.com"], + type: "Service", + }, + ], + resources: ["arn:aws:logs:*"], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsCloudwatchLogResourcePolicyExample = + new CloudwatchLogResourcePolicy(this, "example_3", { + policyDocument: Token.asString(dataAwsIamPolicyDocumentExample.json), + policyName: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogResourcePolicyExample.overrideLogicalId("example"); + } +} + +``` + +### VPC based OpenSearch + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + TerraformVariable, + Token, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { DataAwsSubnets } from "./.gen/providers/aws/data-aws-subnets"; +import { DataAwsVpc } from "./.gen/providers/aws/data-aws-vpc"; +import { IamServiceLinkedRole } from "./.gen/providers/aws/iam-service-linked-role"; +import { OpensearchDomain } from "./.gen/providers/aws/opensearch-domain"; +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const domain = new TerraformVariable(this, "domain", { + default: "tf-test", + }); + const vpc = new TerraformVariable(this, "vpc", {}); + const example = new IamServiceLinkedRole(this, "example", { + awsServiceName: "opensearchservice.amazonaws.com", + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_4", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const dataAwsVpcExample = new DataAwsVpc(this, "example_5", { + tags: { + Name: vpc.stringValue, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsVpcExample.overrideLogicalId("example"); + const awsSecurityGroupExample = new SecurityGroup(this, "example_6", { + description: "Managed by Terraform", + ingress: [ + { + cidrBlocks: [Token.asString(dataAwsVpcExample.cidrBlock)], + fromPort: 443, + protocol: "tcp", + toPort: 443, + }, + ], + name: "${" + vpc.value + "}-opensearch-${" + domain.value + "}", + vpcId: Token.asString(dataAwsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityGroupExample.overrideLogicalId("example"); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_7", + { + statement: [ + { + actions: ["es:*"], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "*", + }, + ], + resources: [ + "arn:aws:es:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:domain/${" + + domain.value + + "}/*", + ], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const dataAwsSubnetsExample = new DataAwsSubnets(this, "example_8", { + filter: [ + { + name: "vpc-id", + values: [Token.asString(dataAwsVpcExample.id)], + }, + ], + tags: { + Tier: "private", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsSubnetsExample.overrideLogicalId("example"); + const awsOpensearchDomainExample = new OpensearchDomain(this, "example_9", { + accessPolicies: Token.asString(dataAwsIamPolicyDocumentExample.json), + advancedOptions: { + "rest.action.multi.allow_explicit_index": "true", + }, + clusterConfig: { + instanceType: "m4.large.search", + zoneAwarenessEnabled: true, + }, + dependsOn: [example], + domainName: domain.stringValue, + engineVersion: "OpenSearch_1.0", + tags: { + Domain: "TestDomain", + }, + vpcOptions: { + securityGroupIds: [Token.asString(awsSecurityGroupExample.id)], + subnetIds: [ + Token.asString(propertyAccess(dataAwsSubnetsExample.ids, ["0"])), + Token.asString(propertyAccess(dataAwsSubnetsExample.ids, ["1"])), + ], + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOpensearchDomainExample.overrideLogicalId("example"); + } +} + +``` + +### Enabling fine-grained access control on an existing domain + +This example shows two configurations: one to create a domain without fine-grained access control and the second to modify the domain to enable fine-grained access control. For more information, see [Enabling fine-grained access control](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/fgac.html). + +#### First apply + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchDomain } from "./.gen/providers/aws/opensearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchDomain(this, "example", { + advancedSecurityOptions: { + anonymousAuthEnabled: true, + enabled: false, + internalUserDatabaseEnabled: true, + masterUserOptions: { + masterUserName: "example", + masterUserPassword: "Barbarbarbar1!", + }, + }, + clusterConfig: { + instanceType: "r5.large.search", + }, + domainEndpointOptions: { + enforceHttps: true, + tlsSecurityPolicy: "Policy-Min-TLS-1-2-2019-07", + }, + domainName: "ggkitty", + ebsOptions: { + ebsEnabled: true, + volumeSize: 10, + }, + encryptAtRest: { + enabled: true, + }, + engineVersion: "Elasticsearch_7.1", + nodeToNodeEncryption: { + enabled: true, + }, + }); + } +} + +``` + +#### Second apply + +Notice that the only change is `advancedSecurityOptions0Enabled` is now set to `true`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchDomain } from "./.gen/providers/aws/opensearch-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchDomain(this, "example", { + advancedSecurityOptions: { + anonymousAuthEnabled: true, + enabled: true, + internalUserDatabaseEnabled: true, + masterUserOptions: { + masterUserName: "example", + masterUserPassword: "Barbarbarbar1!", + }, + }, + clusterConfig: { + instanceType: "r5.large.search", + }, + domainEndpointOptions: { + enforceHttps: true, + tlsSecurityPolicy: "Policy-Min-TLS-1-2-2019-07", + }, + domainName: "ggkitty", + ebsOptions: { + ebsEnabled: true, + volumeSize: 10, + }, + encryptAtRest: { + enabled: true, + }, + engineVersion: "Elasticsearch_7.1", + nodeToNodeEncryption: { + enabled: true, + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `domainName` - (Required) Name of the domain. + +The following arguments are optional: + +* `accessPolicies` - (Optional) IAM policy document specifying the access policies for the domain. +* `advancedOptions` - (Optional) Key-value string pairs to specify advanced configuration options. Note that the values for these configuration options must be strings (wrapped in quotes) or they may be wrong and cause a perpetual diff, causing Terraform to want to recreate your OpenSearch domain on every apply. +* `advancedSecurityOptions` - (Optional) Configuration block for [fine-grained access control](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/fgac.html). Detailed below. +* `autoTuneOptions` - (Optional) Configuration block for the Auto-Tune options of the domain. Detailed below. +* `clusterConfig` - (Optional) Configuration block for the cluster of the domain. Detailed below. +* `cognitoOptions` - (Optional) Configuration block for authenticating dashboard with Cognito. Detailed below. +* `domainEndpointOptions` - (Optional) Configuration block for domain endpoint HTTP(S) related options. Detailed below. +* `ebsOptions` - (Optional) Configuration block for EBS related options, may be required based on chosen [instance size](https://aws.amazon.com/opensearch-service/pricing/). Detailed below. +* `engineVersion` - (Optional) Either `elasticsearchXY` or `openSearchXY` to specify the engine version for the Amazon OpenSearch Service domain. For example, `openSearch10` or `elasticsearch79`. + See [Creating and managing Amazon OpenSearch Service domains](http://docs.aws.amazon.com/opensearch-service/latest/developerguide/createupdatedomains.html#createdomains). + Defaults to the lastest version of OpenSearch. +* `encryptAtRest` - (Optional) Configuration block for encrypt at rest options. Only available for [certain instance types](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/encryption-at-rest.html). Detailed below. +* `logPublishingOptions` - (Optional) Configuration block for publishing slow and application logs to CloudWatch Logs. This block can be declared multiple times, for each log_type, within the same resource. Detailed below. +* `nodeToNodeEncryption` - (Optional) Configuration block for node-to-node encryption options. Detailed below. +* `snapshotOptions` - (Optional) Configuration block for snapshot related options. Detailed below. DEPRECATED. For domains running OpenSearch 5.3 and later, Amazon OpenSearch takes hourly automated snapshots, making this setting irrelevant. For domains running earlier versions, OpenSearch takes daily automated snapshots. +* `tags` - (Optional) Map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcOptions` - (Optional) Configuration block for VPC related options. Adding or removing this configuration forces a new resource ([documentation](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/vpc.html)). Detailed below. +* `offPeakWindowOptions` - (Optional) Configuration to add Off Peak update options. ([documentation](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/off-peak.html)). Detailed below. + +### advanced_security_options + +* `anonymousAuthEnabled` - (Optional) Whether Anonymous auth is enabled. Enables fine-grained access control on an existing domain. Ignored unless `advancedSecurityOptions` are enabled. _Can only be enabled on an existing domain._ +* `enabled` - (Required, Forces new resource when changing from `true` to `false`) Whether advanced security is enabled. +* `internalUserDatabaseEnabled` - (Optional) Whether the internal user database is enabled. Default is `false`. +* `masterUserOptions` - (Optional) Configuration block for the main user. Detailed below. + +#### master_user_options + +* `masterUserArn` - (Optional) ARN for the main user. Only specify if `internalUserDatabaseEnabled` is not set or set to `false`. +* `masterUserName` - (Optional) Main user's username, which is stored in the Amazon OpenSearch Service domain's internal database. Only specify if `internalUserDatabaseEnabled` is set to `true`. +* `masterUserPassword` - (Optional) Main user's password, which is stored in the Amazon OpenSearch Service domain's internal database. Only specify if `internalUserDatabaseEnabled` is set to `true`. + +### auto_tune_options + +* `desiredState` - (Required) Auto-Tune desired state for the domain. Valid values: `enabled` or `disabled`. +* `maintenanceSchedule` - (Required if `rollbackOnDisable` is set to `defaultRollback`) Configuration block for Auto-Tune maintenance windows. Can be specified multiple times for each maintenance window. Detailed below. +* `rollbackOnDisable` - (Optional) Whether to roll back to default Auto-Tune settings when disabling Auto-Tune. Valid values: `defaultRollback` or `noRollback`. + +#### maintenance_schedule + +* `startAt` - (Required) Date and time at which to start the Auto-Tune maintenance schedule in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `duration` - (Required) Configuration block for the duration of the Auto-Tune maintenance window. Detailed below. +* `cronExpressionForRecurrence` - (Required) A cron expression specifying the recurrence pattern for an Auto-Tune maintenance schedule. + +##### duration + +* `value` - (Required) An integer specifying the value of the duration of an Auto-Tune maintenance window. +* `unit` - (Required) Unit of time specifying the duration of an Auto-Tune maintenance window. Valid values: `hours`. + +### cluster_config + +* `coldStorageOptions` - (Optional) Configuration block containing cold storage configuration. Detailed below. +* `dedicatedMasterCount` - (Optional) Number of dedicated main nodes in the cluster. +* `dedicatedMasterEnabled` - (Optional) Whether dedicated main nodes are enabled for the cluster. +* `dedicatedMasterType` - (Optional) Instance type of the dedicated main nodes in the cluster. +* `instanceCount` - (Optional) Number of instances in the cluster. +* `instanceType` - (Optional) Instance type of data nodes in the cluster. +* `warmCount` - (Optional) Number of warm nodes in the cluster. Valid values are between `2` and `150`. `warmCount` can be only and must be set when `warmEnabled` is set to `true`. +* `warmEnabled` - (Optional) Whether to enable warm storage. +* `warmType` - (Optional) Instance type for the OpenSearch cluster's warm nodes. Valid values are `ultrawarm1MediumSearch`, `ultrawarm1LargeSearch` and `ultrawarm1XlargeSearch`. `warmType` can be only and must be set when `warmEnabled` is set to `true`. +* `zoneAwarenessConfig` - (Optional) Configuration block containing zone awareness settings. Detailed below. +* `zoneAwarenessEnabled` - (Optional) Whether zone awareness is enabled, set to `true` for multi-az deployment. To enable awareness with three Availability Zones, the `availabilityZoneCount` within the `zoneAwarenessConfig` must be set to `3`. + +#### cold_storage_options + +* `enabled` - (Optional) Boolean to enable cold storage for an OpenSearch domain. Defaults to `false`. Master and ultrawarm nodes must be enabled for cold storage. + +#### zone_awareness_config + +* `availabilityZoneCount` - (Optional) Number of Availability Zones for the domain to use with `zoneAwarenessEnabled`. Defaults to `2`. Valid values: `2` or `3`. + +### cognito_options + +AWS documentation: [Amazon Cognito Authentication for Dashboard](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/es-cognito-auth.html) + +* `enabled` - (Optional) Whether Amazon Cognito authentication with Dashboard is enabled or not. Default is `false`. +* `identityPoolId` - (Required) ID of the Cognito Identity Pool to use. +* `roleArn` - (Required) ARN of the IAM role that has the AmazonOpenSearchServiceCognitoAccess policy attached. +* `userPoolId` - (Required) ID of the Cognito User Pool to use. + +### domain_endpoint_options + +* `customEndpointCertificateArn` - (Optional) ACM certificate ARN for your custom endpoint. +* `customEndpointEnabled` - (Optional) Whether to enable custom endpoint for the OpenSearch domain. +* `customEndpoint` - (Optional) Fully qualified domain for your custom endpoint. +* `enforceHttps` - (Optional) Whether or not to require HTTPS. Defaults to `true`. +* `tlsSecurityPolicy` - (Optional) Name of the TLS security policy that needs to be applied to the HTTPS endpoint. Valid values: `policyMinTls10201907` and `policyMinTls12201907`. Terraform will only perform drift detection if a configuration value is provided. + +### ebs_options + +* `ebsEnabled` - (Required) Whether EBS volumes are attached to data nodes in the domain. +* `iops` - (Optional) Baseline input/output (I/O) performance of EBS volumes attached to data nodes. Applicable only for the GP3 and Provisioned IOPS EBS volume types. +* `throughput` - (Required if `volumeType` is set to `gp3`) Specifies the throughput (in MiB/s) of the EBS volumes attached to data nodes. Applicable only for the gp3 volume type. +* `volumeSize` - (Required if `ebsEnabled` is set to `true`.) Size of EBS volumes attached to data nodes (in GiB). +* `volumeType` - (Optional) Type of EBS volumes attached to data nodes. + +### encrypt_at_rest + +~> **Note:** You can enable `encryptAtRest` _in place_ for an existing, unencrypted domain only if you are using OpenSearch or your Elasticsearch version is 6.7 or greater. For other versions, if you enable `encryptAtRest`, Terraform with recreate the domain, potentially causing data loss. For any version, if you disable `encryptAtRest` for an existing, encrypted domain, Terraform will recreate the domain, potentially causing data loss. If you change the `kmsKeyId`, Terraform will also recreate the domain, potentially causing data loss. + +* `enabled` - (Required) Whether to enable encryption at rest. If the `encryptAtRest` block is not provided then this defaults to `false`. Enabling encryption on new domains requires an `engineVersion` of `openSearchXY` or `elasticsearch51` or greater. +* `kmsKeyId` - (Optional) KMS key ARN to encrypt the Elasticsearch domain with. If not specified then it defaults to using the `aws/es` service KMS key. Note that KMS will accept a KMS key ID but will return the key ARN. To prevent Terraform detecting unwanted changes, use the key ARN instead. + +### log_publishing_options + +* `cloudwatchLogGroupArn` - (Required) ARN of the Cloudwatch log group to which log needs to be published. +* `enabled` - (Optional, Default: true) Whether given log publishing option is enabled or not. +* `logType` - (Required) Type of OpenSearch log. Valid values: `indexSlowLogs`, `searchSlowLogs`, `esApplicationLogs`, `auditLogs`. + +### node_to_node_encryption + +~> **Note:** You can enable `nodeToNodeEncryption` _in place_ for an existing, unencrypted domain only if you are using OpenSearch or your Elasticsearch version is 6.7 or greater. For other versions, if you enable `nodeToNodeEncryption`, Terraform will recreate the domain, potentially causing data loss. For any version, if you disable `nodeToNodeEncryption` for an existing, node-to-node encrypted domain, Terraform will recreate the domain, potentially causing data loss. + +* `enabled` - (Required) Whether to enable node-to-node encryption. If the `nodeToNodeEncryption` block is not provided then this defaults to `false`. Enabling node-to-node encryption of a new domain requires an `engineVersion` of `openSearchXY` or `elasticsearch60` or greater. + +### snapshot_options + +* `automatedSnapshotStartHour` - (Required) Hour during which the service takes an automated daily snapshot of the indices in the domain. + +### vpc_options + +AWS documentation: [VPC Support for Amazon OpenSearch Service Domains](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/es-vpc.html) + +~> **Note:** You must have created the service linked role for the OpenSearch service to use `vpcOptions`. If you need to create the service linked role at the same time as the OpenSearch domain then you must use `dependsOn` to make sure that the role is created before the OpenSearch domain. See the [VPC based ES domain example](#vpc-based-opensearch) above. + +-> Security Groups and Subnets referenced in these attributes must all be within the same VPC. This determines what VPC the endpoints are created in. + +* `securityGroupIds` - (Optional) List of VPC Security Group IDs to be applied to the OpenSearch domain endpoints. If omitted, the default Security Group for the VPC will be used. +* `subnetIds` - (Required) List of VPC Subnet IDs for the OpenSearch domain endpoints to be created in. + +### off_peak_window_options + +AWS documentation: [Off Peak Hours Support for Amazon OpenSearch Service Domains](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/off-peak.html) + +* `enabled` - (Optional) Enabled disabled toggle for off-peak update window. +* `offPeakWindow` - (Optional) + * `windowStartTime` - (Optional) 10h window for updates + * `hours` - (Required) Starting hour of the 10-hour window for updates + * `minutes` - (Required) Starting minute of the 10-hour window for updates + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the domain. +* `domainId` - Unique identifier for the domain. +* `domainName` - Name of the OpenSearch domain. +* `endpoint` - Domain-specific endpoint used to submit index, search, and data upload requests. +* `dashboardEndpoint` - Domain-specific endpoint for Dashboard without https scheme. +* `kibanaEndpoint` - (**Deprecated**) Domain-specific endpoint for kibana without https scheme. Use the `dashboardEndpoint` attribute instead. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `vpcOptions0AvailabilityZones` - If the domain was created inside a VPC, the names of the availability zones the configured `subnetIds` were created inside. +* `vpcOptions0VpcId` - If the domain was created inside a VPC, the ID of the VPC. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) +* `update` - (Default `180M`) +* `delete` - (Default `90M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearch domains using the `domainName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import OpenSearch domains using the `domainName`. For example: + +```console +% terraform import aws_opensearch_domain.example domain_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opensearch_domain_policy.html.markdown b/website/docs/cdktf/typescript/r/opensearch_domain_policy.html.markdown new file mode 100644 index 00000000000..302f5797d1a --- /dev/null +++ b/website/docs/cdktf/typescript/r/opensearch_domain_policy.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_domain_policy" +description: |- + Provides an OpenSearch Domain Policy. +--- + + + +# Resource: aws_opensearch_domain_policy + +Allows setting policy to an OpenSearch domain while referencing domain attributes (e.g., ARN). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { OpensearchDomain } from "./.gen/providers/aws/opensearch-domain"; +import { OpensearchDomainPolicy } from "./.gen/providers/aws/opensearch-domain-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new OpensearchDomain(this, "example", { + domainName: "tf-test", + engineVersion: "OpenSearch_1.1", + }); + const main = new DataAwsIamPolicyDocument(this, "main", { + actions: ["es:*"], + condition: [ + { + test: "IpAddress", + values: "127.0.0.1/32", + variable: "aws:SourceIp", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "*", + }, + ], + resources: ["${" + example.arn + "}/*"], + }); + const awsOpensearchDomainPolicyMain = new OpensearchDomainPolicy( + this, + "main_2", + { + accessPolicies: Token.asString(main.json), + domainName: example.domainName, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOpensearchDomainPolicyMain.overrideLogicalId("main"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accessPolicies` - (Optional) IAM policy document specifying the access policies for the domain +* `domainName` - (Required) Name of the domain. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `update` - (Default `180M`) +* `delete` - (Default `90M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opensearch_domain_saml_options.html.markdown b/website/docs/cdktf/typescript/r/opensearch_domain_saml_options.html.markdown new file mode 100644 index 00000000000..e286503fa66 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opensearch_domain_saml_options.html.markdown @@ -0,0 +1,123 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_domain_saml_options" +description: |- + Terraform resource for managing SAML authentication options for an AWS OpenSearch Domain. +--- + + + +# Resource: aws_opensearch_domain_saml_options + +Manages SAML authentication options for an AWS OpenSearch Domain. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchDomain } from "./.gen/providers/aws/opensearch-domain"; +import { OpensearchDomainSamlOptions } from "./.gen/providers/aws/opensearch-domain-saml-options"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new OpensearchDomain(this, "example", { + clusterConfig: { + instanceType: "r4.large.search", + }, + domainName: "example", + engineVersion: "OpenSearch_1.1", + snapshotOptions: { + automatedSnapshotStartHour: 23, + }, + tags: { + Domain: "TestDomain", + }, + }); + const awsOpensearchDomainSamlOptionsExample = + new OpensearchDomainSamlOptions(this, "example_1", { + domainName: example.domainName, + samlOptions: { + enabled: true, + idp: { + entityId: "https://example.com", + metadataContent: Token.asString(Fn.file("./saml-metadata.xml")), + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOpensearchDomainSamlOptionsExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `domainName` - (Required) Name of the domain. + +The following arguments are optional: + +* `samlOptions` - (Optional) SAML authentication options for an AWS OpenSearch Domain. + +### saml_options + +* `enabled` - (Required) Whether SAML authentication is enabled. +* `idp` - (Optional) Information from your identity provider. +* `masterBackendRole` - (Optional) This backend role from the SAML IdP receives full permissions to the cluster, equivalent to a new master user. +* `masterUserName` - (Optional) This username from the SAML IdP receives full permissions to the cluster, equivalent to a new master user. +* `rolesKey` - (Optional) Element of the SAML assertion to use for backend roles. Default is roles. +* `sessionTimeoutMinutes` - (Optional) Duration of a session in minutes after a user logs in. Default is 60. Maximum value is 1,440. +* `subjectKey` - (Optional) Element of the SAML assertion to use for username. Default is NameID. + +#### idp + +* `entityId` - (Required) Unique Entity ID of the application in SAML Identity Provider. +* `metadataContent` - (Required) Metadata of the SAML application in xml format. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the domain the SAML options are associated with. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `update` - (Default `180M`) +* `delete` - (Default `90M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearch domains using the `domainName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import OpenSearch domains using the `domainName`. For example: + +```console +% terraform import aws_opensearch_domain_saml_options.example domain_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opensearch_inbound_connection_accepter.html.markdown b/website/docs/cdktf/typescript/r/opensearch_inbound_connection_accepter.html.markdown new file mode 100644 index 00000000000..357c4f18529 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opensearch_inbound_connection_accepter.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_inbound_connection_accepter" +description: |- + Terraform resource for managing an AWS OpenSearch Inbound Connection Accepter. +--- + + + +# Resource: aws_opensearch_inbound_connection_accepter + +Manages an [AWS Opensearch Inbound Connection Accepter](https://docs.aws.amazon.com/opensearch-service/latest/APIReference/API_AcceptInboundConnection.html). If connecting domains from different AWS accounts, ensure that the accepter is configured to use the AWS account where the _remote_ opensearch domain exists. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { OpensearchInboundConnectionAccepter } from "./.gen/providers/aws/opensearch-inbound-connection-accepter"; +import { OpensearchOutboundConnection } from "./.gen/providers/aws/opensearch-outbound-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_1", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const foo = new OpensearchOutboundConnection(this, "foo", { + connectionAlias: "outbound_connection", + localDomainInfo: { + domainName: localDomain.domainName, + ownerId: Token.asString(current.accountId), + region: Token.asString(dataAwsRegionCurrent.name), + }, + remoteDomainInfo: { + domainName: remoteDomain.domainName, + ownerId: Token.asString(current.accountId), + region: Token.asString(dataAwsRegionCurrent.name), + }, + }); + const awsOpensearchInboundConnectionAccepterFoo = + new OpensearchInboundConnectionAccepter(this, "foo_3", { + connectionId: foo.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOpensearchInboundConnectionAccepterFoo.overrideLogicalId("foo"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connectionId` - (Required, Forces new resource) Specifies the ID of the connection to accept. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Id of the connection to accept. +* `connectionStatus` - Status of the connection request. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Opensearch Inbound Connection Accepters using the Inbound Connection ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS Opensearch Inbound Connection Accepters using the Inbound Connection ID. For example: + +```console +% terraform import aws_opensearch_inbound_connection_accepter.foo connection-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opensearch_outbound_connection.html.markdown b/website/docs/cdktf/typescript/r/opensearch_outbound_connection.html.markdown new file mode 100644 index 00000000000..02db2c715be --- /dev/null +++ b/website/docs/cdktf/typescript/r/opensearch_outbound_connection.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "OpenSearch" +layout: "aws" +page_title: "AWS: aws_opensearch_outbound_connection" +description: |- + Terraform resource for managing an AWS OpenSearch Outbound Connection. +--- + + + +# Resource: aws_opensearch_outbound_connection + +Manages an AWS Opensearch Outbound Connection. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { OpensearchOutboundConnection } from "./.gen/providers/aws/opensearch-outbound-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_1", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + new OpensearchOutboundConnection(this, "foo", { + connectionAlias: "outbound_connection", + localDomainInfo: { + domainName: localDomain.domainName, + ownerId: Token.asString(current.accountId), + region: Token.asString(dataAwsRegionCurrent.name), + }, + remoteDomainInfo: { + domainName: remoteDomain.domainName, + ownerId: Token.asString(current.accountId), + region: Token.asString(dataAwsRegionCurrent.name), + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `connectionAlias` - (Required, Forces new resource) Specifies the connection alias that will be used by the customer for this connection. +* `localDomainInfo` - (Required, Forces new resource) Configuration block for the local Opensearch domain. +* `remoteDomainInfo` - (Required, Forces new resource) Configuration block for the remote Opensearch domain. + +### local_domain_info + +* `ownerId` - (Required, Forces new resource) The Account ID of the owner of the local domain. +* `domainName` - (Required, Forces new resource) The name of the local domain. +* `region` - (Required, Forces new resource) The region of the local domain. + +### remote_domain_info + +* `ownerId` - (Required, Forces new resource) The Account ID of the owner of the remote domain. +* `domainName` - (Required, Forces new resource) The name of the remote domain. +* `region` - (Required, Forces new resource) The region of the remote domain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Id of the connection. +* `connectionStatus` - Status of the connection request. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Opensearch Outbound Connections using the Outbound Connection ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS Opensearch Outbound Connections using the Outbound Connection ID. For example: + +```console +% terraform import aws_opensearch_outbound_connection.foo connection-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opensearchserverless_access_policy.html.markdown b/website/docs/cdktf/typescript/r/opensearchserverless_access_policy.html.markdown new file mode 100644 index 00000000000..5a057844de9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opensearchserverless_access_policy.html.markdown @@ -0,0 +1,193 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_access_policy" +description: |- + Terraform resource for managing an AWS OpenSearch Serverless Access Policy. +--- + + + +# Resource: aws_opensearchserverless_access_policy + +Terraform resource for managing an AWS OpenSearch Serverless Access Policy. See AWS documentation for [data access policies](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/serverless-data-access.html) and [supported data access policy permissions](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/serverless-data-access.html#serverless-data-supported-permissions). + +## Example Usage + +### Grant all collection and index permissions + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { OpensearchserverlessAccessPolicy } from "./.gen/providers/aws/opensearchserverless-access-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new OpensearchserverlessAccessPolicy(this, "example", { + description: "read and write permissions", + name: "example", + policy: Token.asString( + Fn.jsonencode([ + { + Principal: [current.arn], + Rules: [ + { + Permission: ["aoss:*"], + Resource: ["index/example-collection/*"], + ResourceType: "index", + }, + { + Permission: ["aoss:*"], + Resource: ["collection/example-collection"], + ResourceType: "collection", + }, + ], + }, + ]) + ), + type: "data", + }); + } +} + +``` + +### Grant read-only collection and index permissions + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { OpensearchserverlessAccessPolicy } from "./.gen/providers/aws/opensearchserverless-access-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new OpensearchserverlessAccessPolicy(this, "example", { + description: "read-only permissions", + name: "example", + policy: Token.asString( + Fn.jsonencode([ + { + Principal: [current.arn], + Rules: [ + { + Permission: ["aoss:DescribeIndex", "aoss:ReadDocument"], + Resource: ["index/example-collection/*"], + ResourceType: "index", + }, + { + Permission: ["aoss:DescribeCollectionItems"], + Resource: ["collection/example-collection"], + ResourceType: "collection", + }, + ], + }, + ]) + ), + type: "data", + }); + } +} + +``` + +### Grant SAML identity permissions + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchserverlessAccessPolicy } from "./.gen/providers/aws/opensearchserverless-access-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchserverlessAccessPolicy(this, "example", { + description: "saml permissions", + name: "example", + policy: Token.asString( + Fn.jsonencode([ + { + Principal: [ + "saml/123456789012/myprovider/user/Annie", + "saml/123456789012/anotherprovider/group/Accounting", + ], + Rules: [ + { + Permission: ["aoss:*"], + Resource: ["index/example-collection/*"], + ResourceType: "index", + }, + { + Permission: ["aoss:*"], + Resource: ["collection/example-collection"], + ResourceType: "collection", + }, + ], + }, + ]) + ), + type: "data", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the policy. +* `policy` - (Required) JSON policy document to use as the content for the new policy +* `type` - (Required) Type of access policy. Must be `data`. + +The following arguments are optional: + +* `description` - (Optional) Description of the policy. Typically used to store information about the permissions defined in the policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `policyVersion` - Version of the policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearchServerless Access Policy using the `name` and `type` arguments separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import OpenSearchServerless Access Policy using the `name` and `type` arguments separated by a slash (`/`). For example: + +```console +% terraform import aws_opensearchserverless_access_policy.example example/data +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opensearchserverless_collection.html.markdown b/website/docs/cdktf/typescript/r/opensearchserverless_collection.html.markdown new file mode 100644 index 00000000000..37c54979050 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opensearchserverless_collection.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_collection" +description: |- + Terraform resource for managing an AWS OpenSearch Collection. +--- + + + +# Resource: aws_opensearchserverless_collection + +Terraform resource for managing an AWS OpenSearch Serverless Collection. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchserverlessCollection } from "./.gen/providers/aws/opensearchserverless-collection"; +import { OpensearchserverlessSecurityPolicy } from "./.gen/providers/aws/opensearchserverless-security-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new OpensearchserverlessSecurityPolicy(this, "example", { + name: "example", + policy: Token.asString( + Fn.jsonencode({ + AWSOwnedKey: true, + Rules: [ + { + Resource: ["collection/example"], + ResourceType: "collection", + }, + ], + }) + ), + type: "encryption", + }); + const awsOpensearchserverlessCollectionExample = + new OpensearchserverlessCollection(this, "example_1", { + dependsOn: [example], + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOpensearchserverlessCollectionExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the collection. + +The following arguments are optional: + +* `description` - (Optional) Description of the collection. +* `tags` - (Optional) A map of tags to assign to the collection. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `type` - (Optional) Type of collection. One of `search` or `timeseries`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the collection. +* `collectionEndpoint` - Collection-specific endpoint used to submit index, search, and data upload requests to an OpenSearch Serverless collection. +* `dashboardEndpont` - Collection-specific endpoint used to access OpenSearch Dashboards. +* `kmsKeyArn` - The ARN of the Amazon Web Services KMS key used to encrypt the collection. +* `id` - Unique identifier for the collection. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20M`) +- `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearchServerless Collection using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import OpenSearchServerless Collection using the `id`. For example: + +```console +% terraform import aws_opensearchserverless_collection.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opensearchserverless_security_config.html.markdown b/website/docs/cdktf/typescript/r/opensearchserverless_security_config.html.markdown new file mode 100644 index 00000000000..5d49aef15e7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opensearchserverless_security_config.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_security_config" +description: |- + Terraform resource for managing an AWS OpenSearch Serverless Security Config. +--- + + + +# Resource: aws_opensearchserverless_security_config + +Terraform resource for managing an AWS OpenSearch Serverless Security Config. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchserverlessSecurityConfig } from "./.gen/providers/aws/opensearchserverless-security-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchserverlessSecurityConfig(this, "example", { + name: "example", + samlOptions: [ + { + metadata: Token.asString(Fn.file("${path.module}/idp-metadata.xml")), + }, + ], + type: "saml", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required, Forces new resource) Name of the policy. +* `samlOptions` - (Required) Configuration block for SAML options. +* `type` - (Required) Type of configuration. Must be `saml`. + +The following arguments are optional: + +* `description` - (Optional) Description of the security configuration. + +### saml_options + +* `groupAttribute` - (Optional) Group attribute for this SAML integration. +* `metadata` - (Required) The XML IdP metadata file generated from your identity provider. +* `sessionTimeout` - (Optional) Session timeout, in minutes. Minimum is 5 minutes and maximum is 720 minutes (12 hours). Default is 60 minutes. +* `userAttribute` - (Optional) User attribute for this SAML integration. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `configVersion` - Version of the configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearchServerless Access Policy using the `name` argument prefixed with the string `saml/accountId/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import OpenSearchServerless Access Policy using the `name` argument prefixed with the string `saml/accountId/`. For example: + +```console +% terraform import aws_opensearchserverless_security_config.example saml/123456789012/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opensearchserverless_security_policy.html.markdown b/website/docs/cdktf/typescript/r/opensearchserverless_security_policy.html.markdown new file mode 100644 index 00000000000..cd82c712e7b --- /dev/null +++ b/website/docs/cdktf/typescript/r/opensearchserverless_security_policy.html.markdown @@ -0,0 +1,309 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_security_policy" +description: |- + Terraform resource for managing an AWS OpenSearch Serverless Security Policy. +--- + + + +# Resource: aws_opensearchserverless_security_policy + +Terraform resource for managing an AWS OpenSearch Serverless Security Policy. See AWS documentation for [encryption policies](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/serverless-encryption.html#serverless-encryption-policies) and [network policies](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/serverless-network.html#serverless-network-policies). + +## Example Usage + +### Encryption Security Policy + +#### Applies to a single collection + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchserverlessSecurityPolicy } from "./.gen/providers/aws/opensearchserverless-security-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchserverlessSecurityPolicy(this, "example", { + description: "encryption security policy for example-collection", + name: "example", + policy: Token.asString( + Fn.jsonencode({ + AWSOwnedKey: true, + Rules: [ + { + Resource: ["collection/example-collection"], + ResourceType: "collection", + }, + ], + }) + ), + type: "encryption", + }); + } +} + +``` + +#### Applies to multiple collections + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchserverlessSecurityPolicy } from "./.gen/providers/aws/opensearchserverless-security-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchserverlessSecurityPolicy(this, "example", { + description: + 'encryption security policy for collections that begin with \\"example\\"', + name: "example", + policy: Token.asString( + Fn.jsonencode({ + AWSOwnedKey: true, + Rules: [ + { + Resource: ["collection/example*"], + ResourceType: "collection", + }, + ], + }) + ), + type: "encryption", + }); + } +} + +``` + +#### Using a customer managed key + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchserverlessSecurityPolicy } from "./.gen/providers/aws/opensearchserverless-security-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchserverlessSecurityPolicy(this, "example", { + description: "encryption security policy using customer KMS key", + name: "example", + policy: Token.asString( + Fn.jsonencode({ + AWSOwnedKey: false, + KmsARN: + "arn:aws:kms:us-east-1:123456789012:key/93fd6da4-a317-4c17-bfe9-382b5d988b36", + Rules: [ + { + Resource: ["collection/customer-managed-key-collection"], + ResourceType: "collection", + }, + ], + }) + ), + type: "encryption", + }); + } +} + +``` + +### Network Security Policy + +#### Allow public access to the collection endpoint and the Dashboards endpoint + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchserverlessSecurityPolicy } from "./.gen/providers/aws/opensearchserverless-security-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchserverlessSecurityPolicy(this, "example", { + description: "Public access", + name: "example", + policy: Token.asString( + Fn.jsonencode([ + { + AllowFromPublic: true, + Description: + "Public access to collection and Dashboards endpoint for example collection", + Rules: [ + { + Resource: ["collection/example-collection"], + ResourceType: "collection", + }, + { + Resource: ["collection/example-collection"], + ResourceType: "dashboard", + }, + ], + }, + ]) + ), + type: "network", + }); + } +} + +``` + +#### Allow VPC access to the collection endpoint and the Dashboards endpoint + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchserverlessSecurityPolicy } from "./.gen/providers/aws/opensearchserverless-security-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchserverlessSecurityPolicy(this, "example", { + description: "VPC access", + name: "example", + policy: Token.asString( + Fn.jsonencode([ + { + AllowFromPublic: false, + Description: + "VPC access to collection and Dashboards endpoint for example collection", + Rules: [ + { + Resource: ["collection/example-collection"], + ResourceType: "collection", + }, + { + Resource: ["collection/example-collection"], + ResourceType: "dashboard", + }, + ], + SourceVPCEs: ["vpce-050f79086ee71ac05"], + }, + ]) + ), + type: "network", + }); + } +} + +``` + +#### Mixed access for different collections + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchserverlessSecurityPolicy } from "./.gen/providers/aws/opensearchserverless-security-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchserverlessSecurityPolicy(this, "example", { + description: "Mixed access for marketing and sales", + name: "example", + policy: Token.asString( + Fn.jsonencode([ + { + AllowFromPublic: false, + Description: "Marketing access", + Rules: [ + { + Resource: ["collection/marketing*"], + ResourceType: "collection", + }, + { + Resource: ["collection/marketing*"], + ResourceType: "dashboard", + }, + ], + SourceVPCEs: ["vpce-050f79086ee71ac05"], + }, + { + AllowFromPublic: true, + Description: "Sales access", + Rules: [ + { + Resource: ["collection/finance"], + ResourceType: "collection", + }, + ], + }, + ]) + ), + type: "network", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the policy. +* `policy` - (Required) JSON policy document to use as the content for the new policy +* `type` - (Required) Type of security policy. One of `encryption` or `network`. + +The following arguments are optional: + +* `description` - (Optional) Description of the policy. Typically used to store information about the permissions defined in the policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `policyVersion` - Version of the policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearchServerless Security Policy using the `name` and `type` arguments separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import OpenSearchServerless Security Policy using the `name` and `type` arguments separated by a slash (`/`). For example: + +```console +% terraform import aws_opensearchserverless_security_policy.example example/encryption +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opensearchserverless_vpc_endpoint.html.markdown b/website/docs/cdktf/typescript/r/opensearchserverless_vpc_endpoint.html.markdown new file mode 100644 index 00000000000..88788dce38d --- /dev/null +++ b/website/docs/cdktf/typescript/r/opensearchserverless_vpc_endpoint.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "OpenSearch Serverless" +layout: "aws" +page_title: "AWS: aws_opensearchserverless_vpc_endpoint" +description: |- + Terraform resource for managing an AWS OpenSearch Serverless VPC Endpoint. +--- + + + +# Resource: aws_opensearchserverless_vpc_endpoint + +Terraform resource for managing an AWS OpenSearchServerless VPC Endpoint. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpensearchserverlessVpcEndpoint } from "./.gen/providers/aws/opensearchserverless-vpc-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpensearchserverlessVpcEndpoint(this, "example", { + name: "myendpoint", + subnetIds: [Token.asString(awsSubnetExample.id)], + vpcId: Token.asString(awsVpcExample.id), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the interface endpoint. +* `subnetIds` - (Required) One or more subnet IDs from which you'll access OpenSearch Serverless. Up to 6 subnets can be provided. +* `vpcId` - (Required) ID of the VPC from which you'll access OpenSearch Serverless. + +The following arguments are optional: + +* `securityGroupIds` - (Optional) One or more security groups that define the ports, protocols, and sources for inbound traffic that you are authorizing into your endpoint. Up to 5 security groups can be provided. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Unique identified of the Vpc Endpoint. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpenSearchServerless Vpc Endpointa using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import OpenSearchServerless Vpc Endpointa using the `id`. For example: + +```console +% terraform import aws_opensearchserverless_vpc_endpoint.example vpce-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_application.html.markdown b/website/docs/cdktf/typescript/r/opsworks_application.html.markdown new file mode 100644 index 00000000000..1a98901655e --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_application.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_application" +description: |- + Provides an OpsWorks application resource. +--- + + + +# Resource: aws_opsworks_application + +Provides an OpsWorks application resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksApplication } from "./.gen/providers/aws/opsworks-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksApplication(this, "foo-app", { + appSource: [ + { + revision: "master", + type: "git", + url: "https://github.com/example.git", + }, + ], + autoBundleOnDeploy: Token.asString(true), + description: "This is a Rails application", + documentRoot: "public", + domains: ["example.com", "sub.example.com"], + enableSsl: true, + environment: [ + { + key: "key", + secure: false, + value: "value", + }, + ], + name: "foobar application", + railsEnv: "staging", + shortName: "foobar", + sslConfiguration: [ + { + certificate: Token.asString(Fn.file("./foobar.crt")), + privateKey: Token.asString(Fn.file("./foobar.key")), + }, + ], + stackId: main.id, + type: "rails", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A human-readable name for the application. +* `shortName` - (Required) A short, machine-readable name for the application. This can only be defined on resource creation and ignored on resource update. +* `stackId` - (Required) ID of the stack the application will belong to. +* `type` - (Required) Opsworks application type. One of `awsFlowRuby`, `java`, `rails`, `php`, `nodejs`, `static` or `other`. +* `description` - (Optional) A description of the app. +* `environment` - (Optional) Object to define environment variables. Object is described below. +* `enableSsl` - (Optional) Whether to enable SSL for the app. This must be set in order to let `sslConfigurationPrivateKey`, `sslConfigurationCertificate` and `sslConfigurationChain` take effect. +* `sslConfiguration` - (Optional) The SSL configuration of the app. Object is described below. +* `appSource` - (Optional) SCM configuration of the app as described below. +* `dataSourceArn` - (Optional) The data source's ARN. +* `dataSourceType` - (Optional) The data source's type one of `autoSelectOpsworksMysqlInstance`, `opsworksMysqlInstance`, or `rdsDbInstance`. +* `dataSourceDatabaseName` - (Optional) The database name. +* `domains` - (Optional) A list of virtual host alias. +* `documentRoot` - (Optional) Subfolder for the document root for application of type `rails`. +* `autoBundleOnDeploy` - (Optional) Run bundle install when deploying for application of type `rails`. +* `railsEnv` - (Required if `type` = `rails`) The name of the Rails environment for application of type `rails`. +* `awsFlowRubySettings` - (Optional) Specify activity and workflow workers for your app using the aws-flow gem. + +An `appSource` block supports the following arguments (can only be defined once per resource): + +* `type` - (Required) The type of source to use. For example, "archive". +* `url` - (Required) The URL where the app resource can be found. +* `username` - (Optional) Username to use when authenticating to the source. +* `password` - (Optional) Password to use when authenticating to the source. Terraform cannot perform drift detection of this configuration. +* `sshKey` - (Optional) SSH key to use when authenticating to the source. Terraform cannot perform drift detection of this configuration. +* `revision` - (Optional) For sources that are version-aware, the revision to use. + +An `environment` block supports the following arguments: + +* `key` - (Required) Variable name. +* `value` - (Required) Variable value. +* `secure` - (Optional) Set visibility of the variable value to `true` or `false`. + +A `sslConfiguration` block supports the following arguments (can only be defined once per resource): + +* `privateKey` - (Required) The private key; the contents of the certificate's domain.key file. +* `certificate` - (Required) The contents of the certificate's domain.crt file. +* `chain` - (Optional) Can be used to specify an intermediate certificate authority key or client authentication. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the application. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Opsworks Application using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Opsworks Application using the `id`. For example: + +```console +% terraform import aws_opsworks_application.test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_custom_layer.html.markdown b/website/docs/cdktf/typescript/r/opsworks_custom_layer.html.markdown new file mode 100644 index 00000000000..b2472a1d937 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_custom_layer.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_custom_layer" +description: |- + Provides an OpsWorks custom layer resource. +--- + + + +# Resource: aws_opsworks_custom_layer + +Provides an OpsWorks custom layer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksCustomLayer } from "./.gen/providers/aws/opsworks-custom-layer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksCustomLayer(this, "custlayer", { + name: "My Awesome Custom Layer", + shortName: "awesome", + stackId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A human-readable name for the layer. +* `shortName` - (Required) A short, machine-readable name for the layer, which will be used to identify it in the Chef node JSON. +* `stackId` - (Required) ID of the stack the layer will belong to. +* `autoAssignElasticIps` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `autoAssignPublicIps` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `cloudwatchConfiguration` - (Optional) Will create an EBS volume and connect it to the layer's instances. See [Cloudwatch Configuration](#cloudwatch-configuration). +* `customInstanceProfileArn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `customSecurityGroupIds` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `autoHealing` - (Optional) Whether to enable auto-healing for the layer. +* `installUpdatesOnBoot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instanceShutdownTimeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elasticLoadBalancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drainElbOnShutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `loadBasedAutoScaling` - (Optional) Load-based auto scaling configuration. See [Load Based AutoScaling](#load-based-autoscaling) +* `systemPackages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `useEbsOptimizedInstances` - (Optional) Whether to use EBS-optimized instances. +* `ebsVolume` - (Optional) Will create an EBS volume and connect it to the layer's instances. See [EBS Volume](#ebs-volume). +* `customJson` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `customConfigureRecipes` +* `customDeployRecipes` +* `customSetupRecipes` +* `customShutdownRecipes` +* `customUndeployRecipes` + +### EBS Volume + +* `mountPoint` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `numberOfDisks` - (Required) The number of disks to use for the EBS volume. +* `raidLevel` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. +* `encrypted` - (Optional) Encrypt the volume. + +### Cloudwatch Configuration + +* `enabled` - (Optional) +* `logStreams` - (Optional) A block the specifies how an opsworks logs look like. See [Log Streams](#log-streams). + +#### Log Streams + +* `file` - (Required) Specifies log files that you want to push to CloudWatch Logs. File can point to a specific file or multiple files (by using wild card characters such as /var/log/system.log*). +* `logGroupName` - (Required) Specifies the destination log group. A log group is created automatically if it doesn't already exist. +* `batchCount` - (Optional) Specifies the max number of log events in a batch, up to `10000`. The default value is `1000`. +* `batchSize` - (Optional) Specifies the maximum size of log events in a batch, in bytes, up to `1048576` bytes. The default value is `32768` bytes. +* `bufferDuration` - (Optional) Specifies the time duration for the batching of log events. The minimum value is `5000` and default value is `5000`. +* `datetimeFormat` - (Optional) Specifies how the timestamp is extracted from logs. For more information, see the CloudWatch Logs Agent Reference (https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/AgentReference.html). +* `encoding` - (Optional) Specifies the encoding of the log file so that the file can be read correctly. The default is `utf8`. +* `fileFingerprintLines` - (Optional) Specifies the range of lines for identifying a file. The valid values are one number, or two dash-delimited numbers, such as `1`, `25`. The default value is `1`. +* `initialPosition` - (Optional) Specifies where to start to read data (`startOfFile` or `endOfFile`). The default is `startOfFile`. +* `multilineStartPattern` - (Optional) Specifies the pattern for identifying the start of a log message. +* `timeZone` - (Optional) Specifies the time zone of log event time stamps. + +### Load Based Autoscaling + +* `downscaling` - (Optional) The downscaling settings, as defined below, used for load-based autoscaling +* `enable` - (Optional) Whether load-based auto scaling is enabled for the layer. +* `upscaling` - (Optional) The upscaling settings, as defined below, used for load-based autoscaling + +The `downscaling` and `upscaling` blocks supports the following arguments: + +Though the three thresholds are optional, at least one threshold must be set when using load-based autoscaling. + +* `alarms` - (Optional) Custom Cloudwatch auto scaling alarms, to be used as thresholds. This parameter takes a list of up to five alarm names, which are case sensitive and must be in the same region as the stack. +* `cpuThreshold` - (Optional) The CPU utilization threshold, as a percent of the available CPU. A value of -1 disables the threshold. +* `ignoreMetricsTime` - (Optional) The amount of time (in minutes) after a scaling event occurs that AWS OpsWorks Stacks should ignore metrics and suppress additional scaling events. +* `instanceCount` - (Optional) The number of instances to add or remove when the load exceeds a threshold. +* `loadThreshold` - (Optional) The load threshold. A value of -1 disables the threshold. +* `memoryThreshold` - (Optional) The memory utilization threshold, as a percent of the available memory. A value of -1 disables the threshold. +* `thresholdsWaitTime` - (Optional) The amount of time, in minutes, that the load must exceed a threshold before more instances are added or removed. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpsWorks Custom Layers using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import OpsWorks Custom Layers using the `id`. For example: + +```console +% terraform import aws_opsworks_custom_layer.bar 00000000-0000-0000-0000-000000000000 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_ecs_cluster_layer.html.markdown b/website/docs/cdktf/typescript/r/opsworks_ecs_cluster_layer.html.markdown new file mode 100644 index 00000000000..d843ccf923c --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_ecs_cluster_layer.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_ecs_cluster_layer" +description: |- + Provides an OpsWorks HAProxy layer resource. +--- + + + +# Resource: aws_opsworks_ecs_cluster_layer + +Provides an OpsWorks ECS Cluster layer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksEcsClusterLayer } from "./.gen/providers/aws/opsworks-ecs-cluster-layer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksEcsClusterLayer(this, "example", { + ecsClusterArn: Token.asString(awsEcsClusterExample.arn), + stackId: Token.asString(awsOpsworksStackExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackId` - (Required) ID of the stack the layer will belong to. +* `ecsClusterArn` - (Required) The ECS Cluster ARN of the layer. +* `name` - (Optional) A human-readable name for the layer. +* `autoAssignElasticIps` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `autoAssignPublicIps` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `customInstanceProfileArn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `customSecurityGroupIds` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `autoHealing` - (Optional) Whether to enable auto-healing for the layer. +* `installUpdatesOnBoot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instanceShutdownTimeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elasticLoadBalancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drainElbOnShutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `systemPackages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `useEbsOptimizedInstances` - (Optional) Whether to use EBS-optimized instances. +* `ebsVolume` - (Optional) `ebsVolume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `customJson` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A mapping of tags to assign to the resource. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `customConfigureRecipes` +* `customDeployRecipes` +* `customSetupRecipes` +* `customShutdownRecipes` +* `customUndeployRecipes` + +An `ebsVolume` block supports the following arguments: + +* `mountPoint` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `numberOfDisks` - (Required) The number of disks to use for the EBS volume. +* `raidLevel` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_ganglia_layer.html.markdown b/website/docs/cdktf/typescript/r/opsworks_ganglia_layer.html.markdown new file mode 100644 index 00000000000..54bb0c69c9b --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_ganglia_layer.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_ganglia_layer" +description: |- + Provides an OpsWorks Ganglia layer resource. +--- + + + +# Resource: aws_opsworks_ganglia_layer + +Provides an OpsWorks Ganglia layer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksGangliaLayer } from "./.gen/providers/aws/opsworks-ganglia-layer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksGangliaLayer(this, "monitor", { + password: "foobarbaz", + stackId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackId` - (Required) ID of the stack the layer will belong to. +* `password` - (Required) The password to use for Ganglia. +* `name` - (Optional) A human-readable name for the layer. +* `autoAssignElasticIps` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `autoAssignPublicIps` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `customInstanceProfileArn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `customSecurityGroupIds` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `autoHealing` - (Optional) Whether to enable auto-healing for the layer. +* `installUpdatesOnBoot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instanceShutdownTimeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elasticLoadBalancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drainElbOnShutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `systemPackages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `url` - (Optional) The URL path to use for Ganglia. Defaults to "/ganglia". +* `username` - (Optiona) The username to use for Ganglia. Defaults to "opsworks". +* `useEbsOptimizedInstances` - (Optional) Whether to use EBS-optimized instances. +* `ebsVolume` - (Optional) `ebsVolume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `customJson` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `customConfigureRecipes` +* `customDeployRecipes` +* `customSetupRecipes` +* `customShutdownRecipes` +* `customUndeployRecipes` + +An `ebsVolume` block supports the following arguments: + +* `mountPoint` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `numberOfDisks` - (Required) The number of disks to use for the EBS volume. +* `raidLevel` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_haproxy_layer.html.markdown b/website/docs/cdktf/typescript/r/opsworks_haproxy_layer.html.markdown new file mode 100644 index 00000000000..fc774db88e1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_haproxy_layer.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_haproxy_layer" +description: |- + Provides an OpsWorks HAProxy layer resource. +--- + + + +# Resource: aws_opsworks_haproxy_layer + +Provides an OpsWorks haproxy layer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksHaproxyLayer } from "./.gen/providers/aws/opsworks-haproxy-layer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksHaproxyLayer(this, "lb", { + stackId: main.id, + statsPassword: "foobarbaz", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackId` - (Required) ID of the stack the layer will belong to. +* `statsPassword` - (Required) The password to use for HAProxy stats. +* `name` - (Optional) A human-readable name for the layer. +* `autoAssignElasticIps` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `autoAssignPublicIps` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `customInstanceProfileArn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `customSecurityGroupIds` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `autoHealing` - (Optional) Whether to enable auto-healing for the layer. +* `healthcheckMethod` - (Optional) HTTP method to use for instance healthchecks. Defaults to "OPTIONS". +* `healthcheckUrl` - (Optional) URL path to use for instance healthchecks. Defaults to "/". +* `installUpdatesOnBoot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instanceShutdownTimeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elasticLoadBalancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drainElbOnShutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `statsEnabled` - (Optional) Whether to enable HAProxy stats. +* `statsUrl` - (Optional) The HAProxy stats URL. Defaults to "/haproxy?stats". +* `statsUser` - (Optional) The username for HAProxy stats. Defaults to "opsworks". +* `systemPackages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `useEbsOptimizedInstances` - (Optional) Whether to use EBS-optimized instances. +* `ebsVolume` - (Optional) `ebsVolume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `customJson` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `customConfigureRecipes` +* `customDeployRecipes` +* `customSetupRecipes` +* `customShutdownRecipes` +* `customUndeployRecipes` + +An `ebsVolume` block supports the following arguments: + +* `mountPoint` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `numberOfDisks` - (Required) The number of disks to use for the EBS volume. +* `raidLevel` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_instance.html.markdown b/website/docs/cdktf/typescript/r/opsworks_instance.html.markdown new file mode 100644 index 00000000000..432ea8ba052 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_instance.html.markdown @@ -0,0 +1,174 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_instance" +description: |- + Provides an OpsWorks instance resource. +--- + + + +# Resource: aws_opsworks_instance + +Provides an OpsWorks instance resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksInstance } from "./.gen/providers/aws/opsworks-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksInstance(this, "my-instance", { + instanceType: "t2.micro", + layerIds: [myLayer.id], + os: "Amazon Linux 2015.09", + stackId: main.id, + state: "stopped", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `layerIds` - (Required) List of the layers the instance will belong to. +* `stackId` - (Required) Identifier of the stack the instance will belong to. + +The following arguments are optional: + +* `agentVersion` - (Optional) OpsWorks agent to install. Default is `inherit`. +* `amiId` - (Optional) AMI to use for the instance. If an AMI is specified, `os` must be `custom`. +* `architecture` - (Optional) Machine architecture for created instances. Valid values are `x8664` or `i386`. The default is `x8664`. +* `autoScalingType` - (Optional) Creates load-based or time-based instances. Valid values are `load`, `timer`. +* `availabilityZone` - (Optional) Name of the availability zone where instances will be created by default. +* `deleteEbs` - (Optional) Whether to delete EBS volume on deletion. Default is `true`. +* `deleteEip` - (Optional) Whether to delete the Elastic IP on deletion. +* `ebsBlockDevice` - (Optional) Configuration block for additional EBS block devices to attach to the instance. See [Block Devices](#block-devices) below. +* `ebsOptimized` - (Optional) Whether the launched EC2 instance will be EBS-optimized. +* `ecsClusterArn` - (Optional) ECS cluster's ARN for container instances. +* `elasticIp` - (Optional) Instance Elastic IP address. +* `ephemeralBlockDevice` - (Optional) Configuration block for ephemeral (also known as "Instance Store") volumes on the instance. See [Block Devices](#block-devices) below. +* `hostname` - (Optional) Instance's host name. +* `infrastructureClass` - (Optional) For registered instances, infrastructure class: ec2 or on-premises. +* `installUpdatesOnBoot` - (Optional) Controls where to install OS and package updates when the instance boots. Default is `true`. +* `instanceProfileArn` - (Optional) ARN of the instance's IAM profile. +* `instanceType` - (Optional) Type of instance to start. +* `os` - (Optional) Name of operating system that will be installed. +* `rootBlockDevice` - (Optional) Configuration block for the root block device of the instance. See [Block Devices](#block-devices) below. +* `rootDeviceType` - (Optional) Name of the type of root device instances will have by default. Valid values are `ebs` or `instanceStore`. +* `sshKeyName` - (Optional) Name of the SSH keypair that instances will have by default. +* `state` - (Optional) Desired state of the instance. Valid values are `running` or `stopped`. +* `subnetId` - (Optional) Subnet ID to attach to. +* `tenancy` - (Optional) Instance tenancy to use. Valid values are `default`, `dedicated` or `host`. +* `virtualizationType` - (Optional) Keyword to choose what virtualization mode created instances will use. Valid values are `paravirtual` or `hvm`. + +## Block devices + +Each of the `*BlockDevice` attributes controls a portion of the AWS +Instance's "Block Device Mapping". It's a good idea to familiarize yourself with [AWS's Block Device +Mapping docs](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/block-device-mapping-concepts.html) +to understand the implications of using these attributes. + +### `ebsBlockDevice` + +* `deleteOnTermination` - (Optional) Whether the volume should be destroyed on instance termination. Default is `true`. +* `deviceName` - (Required) Name of the device to mount. +* `iops` - (Optional) Amount of provisioned [IOPS](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). This must be set with a `volumeType` of `io1`. +* `snapshotId` - (Optional) Snapshot ID to mount. +* `volumeSize` - (Optional) Size of the volume in gigabytes. +* `volumeType` - (Optional) Type of volume. Valid values are `standard`, `gp2`, or `io1`. Default is `standard`. + +Modifying any `ebsBlockDevice` currently requires resource replacement. + +### `ephemeralBlockDevice` + +* `deviceName` - Name of the block device to mount on the instance. +* `virtualName` - The [Instance Store Device Name](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#InstanceStoreDeviceNames) (e.g., `ephemeral0`). + +Each AWS Instance type has a different set of Instance Store block devices +available for attachment. AWS [publishes a +list](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html#StorageOnInstanceTypes) +of which ephemeral devices are available on each type. The devices are always +identified by the `virtualName` in the format `ephemeral{0N}`. + +### `rootBlockDevice` + +* `deleteOnTermination` - (Optional) Whether the volume should be destroyed on instance termination. Default is `true`. +* `iops` - (Optional) Amount of provisioned [IOPS](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-io-characteristics.html). This must be set with a `volumeType` of `io1`. +* `volumeSize` - (Optional) Size of the volume in gigabytes. +* `volumeType` - (Optional) Type of volume. Valid values are `standard`, `gp2`, or `io1`. Default is `standard`. + +Modifying any of the `rootBlockDevice` settings requires resource +replacement. + +~> **NOTE:** Currently, changes to `*BlockDevice` configuration of _existing_ +resources cannot be automatically detected by Terraform. After making updates +to block device configuration, resource recreation can be manually triggered by +using the [`taint` command](https://www.terraform.io/docs/commands/taint.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `createdAt` - Time that the instance was created. +* `ec2InstanceId` - EC2 instance ID. +* `id` - ID of the OpsWorks instance. +* `lastServiceErrorId` - ID of the last service error. +* `platform` - Instance's platform. +* `privateDns` - Private DNS name assigned to the instance. Can only be used inside the Amazon EC2, and only available if you've enabled DNS hostnames for your VPC. +* `privateIp` - Private IP address assigned to the instance. +* `publicDns` - Public DNS name assigned to the instance. For EC2-VPC, this is only available if you've enabled DNS hostnames for your VPC. +* `publicIp` - Public IP address assigned to the instance, if applicable. +* `registeredBy` - For registered instances, who performed the registration. +* `reportedAgentVersion` - Instance's reported AWS OpsWorks Stacks agent version. +* `reportedOsFamily` - For registered instances, the reported operating system family. +* `reportedOsName` - For registered instances, the reported operating system name. +* `reportedOsVersion` - For registered instances, the reported operating system version. +* `rootDeviceVolumeId` - Root device volume ID. +* `securityGroupIds` - Associated security groups. +* `sshHostDsaKeyFingerprint` - SSH key's Deep Security Agent (DSA) fingerprint. +* `sshHostRsaKeyFingerprint` - SSH key's RSA fingerprint. +* `status` - Instance status. Will be one of `booting`, `connectionLost`, `online`, `pending`, `rebooting`, `requested`, `runningSetup`, `setupFailed`, `shuttingDown`, `startFailed`, `stopFailed`, `stopped`, `stopping`, `terminated`, or `terminating`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) +- `update` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Opsworks Instances using the instance `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Opsworks Instances using the instance `id`. For example: + +```console +% terraform import aws_opsworks_instance.my_instance 4d6d1710-ded9-42a1-b08e-b043ad7af1e2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_java_app_layer.html.markdown b/website/docs/cdktf/typescript/r/opsworks_java_app_layer.html.markdown new file mode 100644 index 00000000000..9f5c28e0a74 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_java_app_layer.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_java_app_layer" +description: |- + Provides an OpsWorks Java application layer resource. +--- + + + +# Resource: aws_opsworks_java_app_layer + +Provides an OpsWorks Java application layer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksJavaAppLayer } from "./.gen/providers/aws/opsworks-java-app-layer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksJavaAppLayer(this, "app", { + stackId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackId` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `appServer` - (Optional) Keyword for the application container to use. Defaults to "tomcat". +* `appServerVersion` - (Optional) Version of the selected application container to use. Defaults to "7". +* `autoAssignElasticIps` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `autoAssignPublicIps` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `customInstanceProfileArn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `customSecurityGroupIds` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `autoHealing` - (Optional) Whether to enable auto-healing for the layer. +* `installUpdatesOnBoot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instanceShutdownTimeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `jvmType` - (Optional) Keyword for the type of JVM to use. Defaults to `openjdk`. +* `jvmOptions` - (Optional) Options to set for the JVM. +* `jvmVersion` - (Optional) Version of JVM to use. Defaults to "7". +* `elasticLoadBalancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drainElbOnShutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `systemPackages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `useEbsOptimizedInstances` - (Optional) Whether to use EBS-optimized instances. +* `ebsVolume` - (Optional) `ebsVolume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `customJson` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `customConfigureRecipes` +* `customDeployRecipes` +* `customSetupRecipes` +* `customShutdownRecipes` +* `customUndeployRecipes` + +An `ebsVolume` block supports the following arguments: + +* `mountPoint` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `numberOfDisks` - (Required) The number of disks to use for the EBS volume. +* `raidLevel` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_memcached_layer.html.markdown b/website/docs/cdktf/typescript/r/opsworks_memcached_layer.html.markdown new file mode 100644 index 00000000000..93afdd2a9eb --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_memcached_layer.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_memcached_layer" +description: |- + Provides an OpsWorks memcached layer resource. +--- + + + +# Resource: aws_opsworks_memcached_layer + +Provides an OpsWorks memcached layer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksMemcachedLayer } from "./.gen/providers/aws/opsworks-memcached-layer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksMemcachedLayer(this, "cache", { + stackId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackId` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `allocatedMemory` - (Optional) Amount of memory to allocate for the cache on each instance, in megabytes. Defaults to 512MB. +* `autoAssignElasticIps` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `autoAssignPublicIps` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `customInstanceProfileArn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `customSecurityGroupIds` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `autoHealing` - (Optional) Whether to enable auto-healing for the layer. +* `installUpdatesOnBoot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instanceShutdownTimeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elasticLoadBalancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drainElbOnShutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `systemPackages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `useEbsOptimizedInstances` - (Optional) Whether to use EBS-optimized instances. +* `ebsVolume` - (Optional) `ebsVolume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `customJson` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `customConfigureRecipes` +* `customDeployRecipes` +* `customSetupRecipes` +* `customShutdownRecipes` +* `customUndeployRecipes` + +An `ebsVolume` block supports the following arguments: + +* `mountPoint` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `numberOfDisks` - (Required) The number of disks to use for the EBS volume. +* `raidLevel` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_mysql_layer.html.markdown b/website/docs/cdktf/typescript/r/opsworks_mysql_layer.html.markdown new file mode 100644 index 00000000000..c21857e3a0a --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_mysql_layer.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_mysql_layer" +description: |- + Provides an OpsWorks MySQL layer resource. +--- + + + +# Resource: aws_opsworks_mysql_layer + +Provides an OpsWorks MySQL layer resource. + +~> **Note:** All arguments including the root password will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksMysqlLayer } from "./.gen/providers/aws/opsworks-mysql-layer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksMysqlLayer(this, "db", { + stackId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackId` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `autoAssignElasticIps` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `autoAssignPublicIps` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `customInstanceProfileArn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `customSecurityGroupIds` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `autoHealing` - (Optional) Whether to enable auto-healing for the layer. +* `installUpdatesOnBoot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instanceShutdownTimeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elasticLoadBalancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drainElbOnShutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `rootPassword` - (Optional) Root password to use for MySQL. +* `rootPasswordOnAllInstances` - (Optional) Whether to set the root user password to all instances in the stack so they can access the instances in this layer. +* `systemPackages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `useEbsOptimizedInstances` - (Optional) Whether to use EBS-optimized instances. +* `ebsVolume` - (Optional) `ebsVolume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `customJson` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `customConfigureRecipes` +* `customDeployRecipes` +* `customSetupRecipes` +* `customShutdownRecipes` +* `customUndeployRecipes` + +An `ebsVolume` block supports the following arguments: + +* `mountPoint` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `numberOfDisks` - (Required) The number of disks to use for the EBS volume. +* `raidLevel` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_nodejs_app_layer.html.markdown b/website/docs/cdktf/typescript/r/opsworks_nodejs_app_layer.html.markdown new file mode 100644 index 00000000000..9556fa84e69 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_nodejs_app_layer.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_nodejs_app_layer" +description: |- + Provides an OpsWorks NodeJS application layer resource. +--- + + + +# Resource: aws_opsworks_nodejs_app_layer + +Provides an OpsWorks NodeJS application layer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksNodejsAppLayer } from "./.gen/providers/aws/opsworks-nodejs-app-layer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksNodejsAppLayer(this, "app", { + stackId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackId` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `autoAssignElasticIps` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `autoAssignPublicIps` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `customInstanceProfileArn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `customSecurityGroupIds` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `autoHealing` - (Optional) Whether to enable auto-healing for the layer. +* `installUpdatesOnBoot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instanceShutdownTimeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elasticLoadBalancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drainElbOnShutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `nodejsVersion` - (Optional) The version of NodeJS to use. Defaults to "0.10.38". +* `systemPackages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `useEbsOptimizedInstances` - (Optional) Whether to use EBS-optimized instances. +* `ebsVolume` - (Optional) `ebsVolume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `customJson` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `customConfigureRecipes` +* `customDeployRecipes` +* `customSetupRecipes` +* `customShutdownRecipes` +* `customUndeployRecipes` + +An `ebsVolume` block supports the following arguments: + +* `mountPoint` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `numberOfDisks` - (Required) The number of disks to use for the EBS volume. +* `raidLevel` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_permission.html.markdown b/website/docs/cdktf/typescript/r/opsworks_permission.html.markdown new file mode 100644 index 00000000000..b339513f2c7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_permission.html.markdown @@ -0,0 +1,57 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_permission" +description: |- + Provides an OpsWorks permission resource. +--- + + + +# Resource: aws_opsworks_permission + +Provides an OpsWorks permission resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksPermission } from "./.gen/providers/aws/opsworks-permission"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksPermission(this, "my_stack_permission", { + allowSsh: true, + allowSudo: true, + level: "iam_only", + stackId: stack.id, + userArn: user.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `allowSsh` - (Optional) Whether the user is allowed to use SSH to communicate with the instance +* `allowSudo` - (Optional) Whether the user is allowed to use sudo to elevate privileges +* `userArn` - (Required) The user's IAM ARN to set permissions for +* `level` - (Optional) The users permission level. Mus be one of `deny`, `show`, `deploy`, `manage`, `iamOnly` +* `stackId` - (Required) The stack to set the permissions for + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The computed id of the permission. Please note that this is only used internally to identify the permission. This value is not used in aws. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_php_app_layer.html.markdown b/website/docs/cdktf/typescript/r/opsworks_php_app_layer.html.markdown new file mode 100644 index 00000000000..cc520be5618 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_php_app_layer.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_php_app_layer" +description: |- + Provides an OpsWorks PHP application layer resource. +--- + + + +# Resource: aws_opsworks_php_app_layer + +Provides an OpsWorks PHP application layer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksPhpAppLayer } from "./.gen/providers/aws/opsworks-php-app-layer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksPhpAppLayer(this, "app", { + stackId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackId` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `autoAssignElasticIps` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `autoAssignPublicIps` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `customInstanceProfileArn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `customSecurityGroupIds` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `autoHealing` - (Optional) Whether to enable auto-healing for the layer. +* `installUpdatesOnBoot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instanceShutdownTimeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elasticLoadBalancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drainElbOnShutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `systemPackages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `useEbsOptimizedInstances` - (Optional) Whether to use EBS-optimized instances. +* `ebsVolume` - (Optional) `ebsVolume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `customJson` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `customConfigureRecipes` +* `customDeployRecipes` +* `customSetupRecipes` +* `customShutdownRecipes` +* `customUndeployRecipes` + +An `ebsVolume` block supports the following arguments: + +* `mountPoint` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `numberOfDisks` - (Required) The number of disks to use for the EBS volume. +* `raidLevel` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpsWorks PHP Application Layers using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import OpsWorks PHP Application Layers using the `id`. For example: + +```console +% terraform import aws_opsworks_php_app_layer.bar 00000000-0000-0000-0000-000000000000 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_rails_app_layer.html.markdown b/website/docs/cdktf/typescript/r/opsworks_rails_app_layer.html.markdown new file mode 100644 index 00000000000..a71ece2c595 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_rails_app_layer.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_rails_app_layer" +description: |- + Provides an OpsWorks Ruby on Rails application layer resource. +--- + + + +# Resource: aws_opsworks_rails_app_layer + +Provides an OpsWorks Ruby on Rails application layer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksRailsAppLayer } from "./.gen/providers/aws/opsworks-rails-app-layer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksRailsAppLayer(this, "app", { + stackId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackId` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `appServer` - (Optional) Keyword for the app server to use. Defaults to "apache_passenger". +* `autoAssignElasticIps` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `autoAssignPublicIps` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `bundlerVersion` - (Optional) When OpsWorks is managing Bundler, which version to use. Defaults to "1.5.3". +* `customInstanceProfileArn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `customSecurityGroupIds` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `autoHealing` - (Optional) Whether to enable auto-healing for the layer. +* `installUpdatesOnBoot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instanceShutdownTimeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elasticLoadBalancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drainElbOnShutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `manageBundler` - (Optional) Whether OpsWorks should manage bundler. On by default. +* `passengerVersion` - (Optional) The version of Passenger to use. Defaults to "4.0.46". +* `rubyVersion` - (Optional) The version of Ruby to use. Defaults to "2.0.0". +* `rubygemsVersion` - (Optional) The version of RubyGems to use. Defaults to "2.2.2". +* `systemPackages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `useEbsOptimizedInstances` - (Optional) Whether to use EBS-optimized instances. +* `ebsVolume` - (Optional) `ebsVolume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `customJson` - (Optional) Custom JSON attributes to apply to the layer. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `customConfigureRecipes` +* `customDeployRecipes` +* `customSetupRecipes` +* `customShutdownRecipes` +* `customUndeployRecipes` + +An `ebsVolume` block supports the following arguments: + +* `mountPoint` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `numberOfDisks` - (Required) The number of disks to use for the EBS volume. +* `raidLevel` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_rds_db_instance.html.markdown b/website/docs/cdktf/typescript/r/opsworks_rds_db_instance.html.markdown new file mode 100644 index 00000000000..c14bf491c95 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_rds_db_instance.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_rds_db_instance" +description: |- + Provides an OpsWorks RDS DB Instance resource. +--- + + + +# Resource: aws_opsworks_rds_db_instance + +Provides an OpsWorks RDS DB Instance resource. + +~> **Note:** All arguments including the username and password will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksRdsDbInstance } from "./.gen/providers/aws/opsworks-rds-db-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksRdsDbInstance(this, "my_instance", { + dbPassword: "somePass", + dbUser: "someUser", + rdsDbInstanceArn: Token.asString(awsDbInstanceMyInstance.arn), + stackId: myStack.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackId` - (Required) The stack to register a db instance for. Changing this will force a new resource. +* `rdsDbInstanceArn` - (Required) The db instance to register for this stack. Changing this will force a new resource. +* `dbUser` - (Required) A db username +* `dbPassword` - (Required) A db password + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The computed id. Please note that this is only used internally to identify the stack <-> instance relation. This value is not used in aws. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_stack.html.markdown b/website/docs/cdktf/typescript/r/opsworks_stack.html.markdown new file mode 100644 index 00000000000..be62ba7fbf0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_stack.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_stack" +description: |- + Provides an OpsWorks stack resource. +--- + + + +# Resource: aws_opsworks_stack + +Provides an OpsWorks stack resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksStack } from "./.gen/providers/aws/opsworks-stack"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksStack(this, "main", { + customJson: '{\n "foobar": {\n "version": "1.0.0"\n }\n}\n\n', + defaultInstanceProfileArn: opsworks.arn, + name: "awesome-stack", + region: "us-west-1", + serviceRoleArn: Token.asString(awsIamRoleOpsworks.arn), + tags: { + Name: "foobar-terraform-stack", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the stack. +* `region` - (Required) The name of the region where the stack will exist. +* `serviceRoleArn` - (Required) The ARN of an IAM role that the OpsWorks service will act as. +* `defaultInstanceProfileArn` - (Required) The ARN of an IAM Instance Profile that created instances will have by default. +* `agentVersion` - (Optional) If set to `"latest"`, OpsWorks will automatically install the latest version. +* `berkshelfVersion` - (Optional) If `manageBerkshelf` is enabled, the version of Berkshelf to use. +* `color` - (Optional) Color to paint next to the stack's resources in the OpsWorks console. +* `configurationManagerName` - (Optional) Name of the configuration manager to use. Defaults to "Chef". +* `configurationManagerVersion` - (Optional) Version of the configuration manager to use. Defaults to "11.4". +* `customCookbooksSource` - (Optional) When `useCustomCookbooks` is set, provide this sub-object as described below. +* `customJson` - (Optional) User defined JSON passed to "Chef". Use a "here doc" for multiline JSON. +* `defaultAvailabilityZone` - (Optional) Name of the availability zone where instances will be created by default. + Cannot be set when `vpcId` is set. +* `defaultOs` - (Optional) Name of OS that will be installed on instances by default. +* `defaultRootDeviceType` - (Optional) Name of the type of root device instances will have by default. +* `defaultSshKeyName` - (Optional) Name of the SSH keypair that instances will have by default. +* `defaultSubnetId` - (Optional) ID of the subnet in which instances will be created by default. + Required if `vpcId` is set to a VPC other than the default VPC, and forbidden if it isn't. +* `hostnameTheme` - (Optional) Keyword representing the naming scheme that will be used for instance hostnames within this stack. +* `manageBerkshelf` - (Optional) Boolean value controlling whether Opsworks will run Berkshelf for this stack. +* `tags` - (Optional) A map of tags to assign to the resource. + If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `useCustomCookbooks` - (Optional) Boolean value controlling whether the custom cookbook settings are enabled. +* `useOpsworksSecurityGroups` - (Optional) Boolean value controlling whether the standard OpsWorks security groups apply to created instances. +* `vpcId` - (Optional) ID of the VPC that this stack belongs to. + Defaults to the region's default VPC. +* `customJson` - (Optional) Custom JSON attributes to apply to the entire stack. + +The `customCookbooksSource` block supports the following arguments: + +* `type` - (Required) The type of source to use. For example, "archive". +* `url` - (Required) The URL where the cookbooks resource can be found. +* `username` - (Optional) Username to use when authenticating to the source. +* `password` - (Optional) Password to use when authenticating to the source. Terraform cannot perform drift detection of this configuration. +* `sshKey` - (Optional) SSH key to use when authenticating to the source. Terraform cannot perform drift detection of this configuration. +* `revision` - (Optional) For sources that are version-aware, the revision to use. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the stack. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpsWorks stacks using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import OpsWorks stacks using the `id`. For example: + +```console +% terraform import aws_opsworks_stack.bar 00000000-0000-0000-0000-000000000000 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_static_web_layer.html.markdown b/website/docs/cdktf/typescript/r/opsworks_static_web_layer.html.markdown new file mode 100644 index 00000000000..74081482473 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_static_web_layer.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_static_web_layer" +description: |- + Provides an OpsWorks static web server layer resource. +--- + + + +# Resource: aws_opsworks_static_web_layer + +Provides an OpsWorks static web server layer resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksStaticWebLayer } from "./.gen/providers/aws/opsworks-static-web-layer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksStaticWebLayer(this, "web", { + stackId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `stackId` - (Required) ID of the stack the layer will belong to. +* `name` - (Optional) A human-readable name for the layer. +* `autoAssignElasticIps` - (Optional) Whether to automatically assign an elastic IP address to the layer's instances. +* `autoAssignPublicIps` - (Optional) For stacks belonging to a VPC, whether to automatically assign a public IP address to each of the layer's instances. +* `customInstanceProfileArn` - (Optional) The ARN of an IAM profile that will be used for the layer's instances. +* `customSecurityGroupIds` - (Optional) Ids for a set of security groups to apply to the layer's instances. +* `autoHealing` - (Optional) Whether to enable auto-healing for the layer. +* `installUpdatesOnBoot` - (Optional) Whether to install OS and package updates on each instance when it boots. +* `instanceShutdownTimeout` - (Optional) The time, in seconds, that OpsWorks will wait for Chef to complete after triggering the Shutdown event. +* `elasticLoadBalancer` - (Optional) Name of an Elastic Load Balancer to attach to this layer +* `drainElbOnShutdown` - (Optional) Whether to enable Elastic Load Balancing connection draining. +* `systemPackages` - (Optional) Names of a set of system packages to install on the layer's instances. +* `useEbsOptimizedInstances` - (Optional) Whether to use EBS-optimized instances. +* `ebsVolume` - (Optional) `ebsVolume` blocks, as described below, will each create an EBS volume and connect it to the layer's instances. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following extra optional arguments, all lists of Chef recipe names, allow +custom Chef recipes to be applied to layer instances at the five different +lifecycle events, if custom cookbooks are enabled on the layer's stack: + +* `customConfigureRecipes` +* `customDeployRecipes` +* `customSetupRecipes` +* `customShutdownRecipes` +* `customUndeployRecipes` + +An `ebsVolume` block supports the following arguments: + +* `mountPoint` - (Required) The path to mount the EBS volume on the layer's instances. +* `size` - (Required) The size of the volume in gigabytes. +* `numberOfDisks` - (Required) The number of disks to use for the EBS volume. +* `raidLevel` - (Required) The RAID level to use for the volume. +* `type` - (Optional) The type of volume to create. This may be `standard` (the default), `io1` or `gp2`. +* `iops` - (Optional) For PIOPS volumes, the IOPS per disk. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id of the layer. +* `arn` - The Amazon Resource Name(ARN) of the layer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import OpsWorks static web server Layers using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import OpsWorks static web server Layers using the `id`. For example: + +```console +% terraform import aws_opsworks_static_web_layer.bar 00000000-0000-0000-0000-000000000000 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/opsworks_user_profile.html.markdown b/website/docs/cdktf/typescript/r/opsworks_user_profile.html.markdown new file mode 100644 index 00000000000..1308faac9b9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/opsworks_user_profile.html.markdown @@ -0,0 +1,53 @@ +--- +subcategory: "OpsWorks" +layout: "aws" +page_title: "AWS: aws_opsworks_user_profile" +description: |- + Provides an OpsWorks User Profile resource. +--- + + + +# Resource: aws_opsworks_user_profile + +Provides an OpsWorks User Profile resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OpsworksUserProfile } from "./.gen/providers/aws/opsworks-user-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OpsworksUserProfile(this, "my_profile", { + sshUsername: "my_user", + userArn: user.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `userArn` - (Required) The user's IAM ARN +* `allowSelfManagement` - (Optional) Whether users can specify their own SSH public key through the My Settings page +* `sshUsername` - (Required) The ssh username, with witch this user wants to log in +* `sshPublicKey` - (Optional) The users public key + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Same value as `userArn` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/organizations_account.html.markdown b/website/docs/cdktf/typescript/r/organizations_account.html.markdown new file mode 100644 index 00000000000..399cba9fda0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/organizations_account.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_account" +description: |- + Provides a resource to create a member account in the current AWS Organization. +--- + + + +# Resource: aws_organizations_account + +Provides a resource to create a member account in the current organization. + +~> **Note:** Account management must be done from the organization's root account. + +~> **Note:** By default, deleting this Terraform resource will only remove an AWS account from an organization. You must set the `closeOnDeletion` flag to true to close the account. It is worth noting that quotas are enforced when using the `closeOnDeletion` argument, which can produce a [CLOSE_ACCOUNT_QUOTA_EXCEEDED](https://docs.aws.amazon.com/organizations/latest/APIReference/API_CloseAccount.html) error, and require you to close the account manually. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OrganizationsAccount } from "./.gen/providers/aws/organizations-account"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OrganizationsAccount(this, "account", { + email: "john@doe.org", + name: "my_new_account", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `email` - (Required) Email address of the owner to assign to the new member account. This email address must not already be associated with another AWS account. +* `name` - (Required) Friendly name for the member account. + +The following arguments are optional: + +* `closeOnDeletion` - (Optional) If true, a deletion event will close the account. Otherwise, it will only remove from the organization. This is not supported for GovCloud accounts. +* `createGovcloud` - (Optional) Whether to also create a GovCloud account. The GovCloud account is tied to the main (commercial) account this resource creates. If `true`, the GovCloud account ID is available in the `govcloudId` attribute. The only way to manage the GovCloud account with Terraform is to subsequently import the account using this resource. +* `iamUserAccessToBilling` - (Optional) If set to `allow`, the new account enables IAM users and roles to access account billing information if they have the required permissions. If set to `deny`, then only the root user (and no roles) of the new account can access account billing information. If this is unset, the AWS API will default this to `allow`. If the resource is created and this option is changed, it will try to recreate the account. +* `parentId` - (Optional) Parent Organizational Unit ID or Root ID for the account. Defaults to the Organization default Root ID. A configuration must be present for this argument to perform drift detection. +* `roleName` - (Optional) The name of an IAM role that Organizations automatically preconfigures in the new member account. This role trusts the root account, allowing users in the root account to assume the role, as permitted by the root account administrator. The role has administrator permissions in the new member account. The Organizations API provides no method for reading this information after account creation, so Terraform cannot perform drift detection on its value and will always show a difference for a configured value after import unless [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) is used. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN for this account. +* `govcloudId` - ID for a GovCloud account created with the account. +* `id` - The AWS account id +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the AWS member account using the `accountId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the AWS member account using the `accountId`. For example: + +```console +% terraform import aws_organizations_account.my_account 111111111111 +``` + +Certain resource arguments, like `roleName`, do not have an Organizations API method for reading the information after account creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OrganizationsAccount } from "./.gen/providers/aws/organizations-account"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OrganizationsAccount(this, "account", { + email: "john@doe.org", + lifecycle: { + ignoreChanges: [roleName], + }, + name: "my_new_account", + roleName: "myOrganizationRole", + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/organizations_delegated_administrator.html.markdown b/website/docs/cdktf/typescript/r/organizations_delegated_administrator.html.markdown new file mode 100644 index 00000000000..b95fe36c006 --- /dev/null +++ b/website/docs/cdktf/typescript/r/organizations_delegated_administrator.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_delegated_administrator" +description: |- + Provides a resource to manage an AWS Organizations Delegated Administrator. +--- + + + +# Resource: aws_organizations_delegated_administrator + +Provides a resource to manage an [AWS Organizations Delegated Administrator](https://docs.aws.amazon.com/organizations/latest/APIReference/API_RegisterDelegatedAdministrator.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OrganizationsDelegatedAdministrator } from "./.gen/providers/aws/organizations-delegated-administrator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OrganizationsDelegatedAdministrator(this, "example", { + accountId: "123456789012", + servicePrincipal: "principal", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Required) The account ID number of the member account in the organization to register as a delegated administrator. +* `servicePrincipal` - (Required) The service principal of the AWS service for which you want to make the member account a delegated administrator. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the delegated administrator. +* `arn` - The Amazon Resource Name (ARN) of the delegated administrator's account. +* `delegationEnabledDate` - The date when the account was made a delegated administrator. +* `email` - The email address that is associated with the delegated administrator's AWS account. +* `joinedMethod` - The method by which the delegated administrator's account joined the organization. +* `joinedTimestamp` - The date when the delegated administrator's account became a part of the organization. +* `name` - The friendly name of the delegated administrator's account. +* `status` - The status of the delegated administrator's account in the organization. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsOrganizationsDelegatedAdministrator` using the account ID and its service principal. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsOrganizationsDelegatedAdministrator` using the account ID and its service principal. For example: + +```console +% terraform import aws_organizations_delegated_administrator.example 123456789012/config.amazonaws.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/organizations_organization.html.markdown b/website/docs/cdktf/typescript/r/organizations_organization.html.markdown new file mode 100644 index 00000000000..f0b6af8fd91 --- /dev/null +++ b/website/docs/cdktf/typescript/r/organizations_organization.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organization" +description: |- + Provides a resource to create an organization. +--- + + + +# Resource: aws_organizations_organization + +Provides a resource to create an organization. + +!> **WARNING:** When migrating from a `featureSet` of `consolidatedBilling` to `all`, the Organization account owner will received an email stating the following: "You started the process to enable all features for your AWS organization. As part of that process, all member accounts that joined your organization by invitation must approve the change. You don’t need approval from member accounts that you directly created from within your AWS organization." After all member accounts have accepted the invitation, the Organization account owner must then finalize the changes via the [AWS Console](https://console.aws.amazon.com/organizations/home#/organization/settings/migration-progress). Until these steps are performed, Terraform will perpetually show a difference, and the `describeOrganization` API will continue to show the `featureSet` as `consolidatedBilling`. See the [AWS Organizations documentation](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_org_support-all-features.html) for more information. + +!> **WARNING:** [Warning from the AWS Docs](https://docs.aws.amazon.com/organizations/latest/APIReference/API_EnableAWSServiceAccess.html): "We recommend that you enable integration between AWS Organizations and the specified AWS service by using the console or commands that are provided by the specified service. Doing so ensures that the service is aware that it can create the resources that are required for the integration. How the service creates those resources in the organization's accounts depends on that service. For more information, see the documentation for the other AWS service." + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OrganizationsOrganization } from "./.gen/providers/aws/organizations-organization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OrganizationsOrganization(this, "org", { + awsServiceAccessPrincipals: [ + "cloudtrail.amazonaws.com", + "config.amazonaws.com", + ], + featureSet: "ALL", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `awsServiceAccessPrincipals` - (Optional) List of AWS service principal names for which you want to enable integration with your organization. This is typically in the form of a URL, such as service-abbreviation.amazonaws.com. Organization must have `featureSet` set to `all`. Some services do not support enablement via this endpoint, see [warning in aws docs](https://docs.aws.amazon.com/organizations/latest/APIReference/API_EnableAWSServiceAccess.html). +* `enabledPolicyTypes` - (Optional) List of Organizations policy types to enable in the Organization Root. Organization must have `featureSet` set to `all`. For additional information about valid policy types (e.g., `aiservicesOptOutPolicy`, `backupPolicy`, `serviceControlPolicy`, and `tagPolicy`), see the [AWS Organizations API Reference](https://docs.aws.amazon.com/organizations/latest/APIReference/API_EnablePolicyType.html). +* `featureSet` - (Optional) Specify "ALL" (default) or "CONSOLIDATED_BILLING". + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `accounts` - List of organization accounts including the master account. For a list excluding the master account, see the `nonMasterAccounts` attribute. All elements have these attributes: + * `arn` - ARN of the account + * `email` - Email of the account + * `id` - Identifier of the account + * `name` - Name of the account + * `status` - Current status of the account +* `arn` - ARN of the organization +* `id` - Identifier of the organization +* `masterAccountArn` - ARN of the master account +* `masterAccountEmail` - Email address of the master account +* `masterAccountId` - Identifier of the master account +* `nonMasterAccounts` - List of organization accounts excluding the master account. For a list including the master account, see the `accounts` attribute. All elements have these attributes: + * `arn` - ARN of the account + * `email` - Email of the account + * `id` - Identifier of the account + * `name` - Name of the account + * `status` - Current status of the account +* `roots` - List of organization roots. All elements have these attributes: + * `arn` - ARN of the root + * `id` - Identifier of the root + * `name` - Name of the root + * `policyTypes` - List of policy types enabled for this root. All elements have these attributes: + * `name` - The name of the policy type + * `status` - The status of the policy type as it relates to the associated root + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the AWS organization using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the AWS organization using the `id`. For example: + +```console +% terraform import aws_organizations_organization.my_org o-1234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/organizations_organizational_unit.html.markdown b/website/docs/cdktf/typescript/r/organizations_organizational_unit.html.markdown new file mode 100644 index 00000000000..b74636293de --- /dev/null +++ b/website/docs/cdktf/typescript/r/organizations_organizational_unit.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_organizational_unit" +description: |- + Provides a resource to create an organizational unit. +--- + + + +# Resource: aws_organizations_organizational_unit + +Provides a resource to create an organizational unit. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OrganizationsOrganizationalUnit } from "./.gen/providers/aws/organizations-organizational-unit"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OrganizationsOrganizationalUnit(this, "example", { + name: "example", + parentId: Token.asString( + propertyAccess(awsOrganizationsOrganizationExample.roots, ["0", "id"]) + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - The name for the organizational unit +* `parentId` - ID of the parent organizational unit, which may be the root +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `accounts` - List of child accounts for this Organizational Unit. Does not return account information for child Organizational Units. All elements have these attributes: + * `arn` - ARN of the account + * `email` - Email of the account + * `id` - Identifier of the account + * `name` - Name of the account +* `arn` - ARN of the organizational unit +* `id` - Identifier of the organization unit +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Organizations Organizational Units using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS Organizations Organizational Units using the `id`. For example: + +```console +% terraform import aws_organizations_organizational_unit.example ou-1234567 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/organizations_policy.html.markdown b/website/docs/cdktf/typescript/r/organizations_policy.html.markdown new file mode 100644 index 00000000000..667e292fd7f --- /dev/null +++ b/website/docs/cdktf/typescript/r/organizations_policy.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_policy" +description: |- + Provides a resource to manage an AWS Organizations policy. +--- + + + +# Resource: aws_organizations_policy + +Provides a resource to manage an [AWS Organizations policy](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { OrganizationsPolicy } from "./.gen/providers/aws/organizations-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsIamPolicyDocument(this, "example", { + statement: [ + { + actions: ["*"], + effect: "Allow", + resources: ["*"], + }, + ], + }); + const awsOrganizationsPolicyExample = new OrganizationsPolicy( + this, + "example_1", + { + content: Token.asString(example.json), + name: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsOrganizationsPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Required) The policy content to add to the new policy. For example, if you create a [service control policy (SCP)](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_scp.html), this string must be JSON text that specifies the permissions that admins in attached accounts can delegate to their users, groups, and roles. For more information about the SCP syntax, see the [Service Control Policy Syntax documentation](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_reference_scp-syntax.html) and for more information on the Tag Policy syntax, see the [Tag Policy Syntax documentation](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_example-tag-policies.html). +* `name` - (Required) The friendly name to assign to the policy. +* `description` - (Optional) A description to assign to the policy. +* `skipDestroy` - (Optional) If set to `true`, destroy will **not** delete the policy and instead just remove the resource from state. This can be useful in situations where the policies (and the associated attachment) must be preserved to meet the AWS minimum requirement of 1 attached policy. +* `type` - (Optional) The type of policy to create. Valid values are `aiservicesOptOutPolicy`, `backupPolicy`, `serviceControlPolicy` (SCP), and `tagPolicy`. Defaults to `serviceControlPolicy`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) of the policy. +* `arn` - Amazon Resource Name (ARN) of the policy. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsOrganizationsPolicy` using the policy ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsOrganizationsPolicy` using the policy ID. For example: + +```console +% terraform import aws_organizations_policy.example p-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/organizations_policy_attachment.html.markdown b/website/docs/cdktf/typescript/r/organizations_policy_attachment.html.markdown new file mode 100644 index 00000000000..e1a2f6059ea --- /dev/null +++ b/website/docs/cdktf/typescript/r/organizations_policy_attachment.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_policy_attachment" +description: |- + Provides a resource to attach an AWS Organizations policy to an organization account, root, or unit. +--- + + + +# Resource: aws_organizations_policy_attachment + +Provides a resource to attach an AWS Organizations policy to an organization account, root, or unit. + +## Example Usage + +### Organization Account + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OrganizationsPolicyAttachment } from "./.gen/providers/aws/organizations-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OrganizationsPolicyAttachment(this, "account", { + policyId: example.id, + targetId: "123456789012", + }); + } +} + +``` + +### Organization Root + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OrganizationsPolicyAttachment } from "./.gen/providers/aws/organizations-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OrganizationsPolicyAttachment(this, "root", { + policyId: example.id, + targetId: Token.asString( + propertyAccess(awsOrganizationsOrganizationExample.roots, ["0", "id"]) + ), + }); + } +} + +``` + +### Organization Unit + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OrganizationsPolicyAttachment } from "./.gen/providers/aws/organizations-policy-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OrganizationsPolicyAttachment(this, "unit", { + policyId: example.id, + targetId: Token.asString(awsOrganizationsOrganizationalUnitExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `policyId` - (Required) The unique identifier (ID) of the policy that you want to attach to the target. +* `targetId` - (Required) The unique identifier (ID) of the root, organizational unit, or account number that you want to attach the policy to. +* `skipDestroy` - (Optional) If set to `true`, destroy will **not** detach the policy and instead just remove the resource from state. This can be useful in situations where the attachment must be preserved to meet the AWS minimum requirement of 1 attached policy. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsOrganizationsPolicyAttachment` using the target ID and policy ID. For example: + +With an account target: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsOrganizationsPolicyAttachment` using the target ID and policy ID. For example: + +With an account target: + +```console +% terraform import aws_organizations_policy_attachment.account 123456789012:p-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/organizations_resource_policy.html.markdown b/website/docs/cdktf/typescript/r/organizations_resource_policy.html.markdown new file mode 100644 index 00000000000..51e475d7b30 --- /dev/null +++ b/website/docs/cdktf/typescript/r/organizations_resource_policy.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Organizations" +layout: "aws" +page_title: "AWS: aws_organizations_resource_policy" +description: |- + Provides a resource to manage an AWS Organizations resource policy. +--- + + + +# Resource: aws_organizations_resource_policy + +Provides a resource to manage a resource-based delegation policy that can be used to delegate policy management for AWS Organizations to specified member accounts to perform policy actions that are by default available only to the management account. See the [_AWS Organizations User Guide_](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_delegate_policies.html) for more information. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OrganizationsResourcePolicy } from "./.gen/providers/aws/organizations-resource-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new OrganizationsResourcePolicy(this, "example", { + content: + '{\n "Version": "2012-10-17",\n "Statement": [\n {\n "Sid": "DelegatingNecessaryDescribeListActions",\n "Effect": "Allow",\n "Principal": {\n "AWS": "arn:aws:iam::123456789012:root"\n },\n "Action": [\n "organizations:DescribeOrganization",\n "organizations:DescribeOrganizationalUnit",\n "organizations:DescribeAccount",\n "organizations:DescribePolicy",\n "organizations:DescribeEffectivePolicy",\n "organizations:ListRoots",\n "organizations:ListOrganizationalUnitsForParent",\n "organizations:ListParents",\n "organizations:ListChildren",\n "organizations:ListAccounts",\n "organizations:ListAccountsForParent",\n "organizations:ListPolicies",\n "organizations:ListPoliciesForTarget",\n "organizations:ListTargetsForPolicy",\n "organizations:ListTagsForResource"\n ],\n "Resource": "*"\n }\n ]\n}\n\n', + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `content` - (Required) Content for the resource policy. The text must be correctly formatted JSON that complies with the syntax for the resource policy's type. See the [_AWS Organizations User Guide_](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_delegate_examples.html) for examples. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the resource policy. +* `id` - The unique identifier (ID) of the resource policy. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsOrganizationsResourcePolicy` using the resource policy ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsOrganizationsResourcePolicy` using the resource policy ID. For example: + +```console +% terraform import aws_organizations_resource_policy.example rp-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pinpoint_adm_channel.markdown b/website/docs/cdktf/typescript/r/pinpoint_adm_channel.markdown new file mode 100644 index 00000000000..26dde53b5d5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/pinpoint_adm_channel.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_adm_channel" +description: |- + Provides a Pinpoint ADM Channel resource. +--- + + + +# Resource: aws_pinpoint_adm_channel + +Provides a Pinpoint ADM (Amazon Device Messaging) Channel resource. + +~> **Note:** All arguments including the Client ID and Client Secret will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PinpointAdmChannel } from "./.gen/providers/aws/pinpoint-adm-channel"; +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const app = new PinpointApp(this, "app", {}); + new PinpointAdmChannel(this, "channel", { + applicationId: app.applicationId, + clientId: "", + clientSecret: "", + enabled: true, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required) The application ID. +* `clientId` - (Required) Client ID (part of OAuth Credentials) obtained via Amazon Developer Account. +* `clientSecret` - (Required) Client Secret (part of OAuth Credentials) obtained via Amazon Developer Account. +* `enabled` - (Optional) Specifies whether to enable the channel. Defaults to `true`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint ADM Channel using the `applicationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Pinpoint ADM Channel using the `applicationId`. For example: + +```console +% terraform import aws_pinpoint_adm_channel.channel application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pinpoint_apns_channel.markdown b/website/docs/cdktf/typescript/r/pinpoint_apns_channel.markdown new file mode 100644 index 00000000000..b0cec26c08a --- /dev/null +++ b/website/docs/cdktf/typescript/r/pinpoint_apns_channel.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_apns_channel" +description: |- + Provides a Pinpoint APNs Channel resource. +--- + + + +# Resource: aws_pinpoint_apns_channel + +Provides a Pinpoint APNs Channel resource. + +~> **Note:** All arguments, including certificates and tokens, will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PinpointApnsChannel } from "./.gen/providers/aws/pinpoint-apns-channel"; +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const app = new PinpointApp(this, "app", {}); + new PinpointApnsChannel(this, "apns", { + applicationId: app.applicationId, + certificate: Token.asString(Fn.file("./certificate.pem")), + privateKey: Token.asString(Fn.file("./private_key.key")), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required) The application ID. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. +* `defaultAuthenticationMethod` - (Optional) The default authentication method used for APNs. + __NOTE__: Amazon Pinpoint uses this default for every APNs push notification that you send using the console. + You can override the default when you send a message programmatically using the Amazon Pinpoint API, the AWS CLI, or an AWS SDK. + If your default authentication type fails, Amazon Pinpoint doesn't attempt to use the other authentication type. + +One of the following sets of credentials is also required. + +If you choose to use __Certificate credentials__ you will have to provide: + +* `certificate` - (Required) The pem encoded TLS Certificate from Apple. +* `privateKey` - (Required) The Certificate Private Key file (ie. `key` file). + +If you choose to use __Key credentials__ you will have to provide: + +* `bundleId` - (Required) The ID assigned to your iOS app. To find this value, choose Certificates, IDs & Profiles, choose App IDs in the Identifiers section, and choose your app. +* `teamId` - (Required) The ID assigned to your Apple developer account team. This value is provided on the Membership page. +* `tokenKey` - (Required) The `p8` file that you download from your Apple developer account when you create an authentication key. +* `tokenKeyId` - (Required) The ID assigned to your signing key. To find this value, choose Certificates, IDs & Profiles, and choose your key in the Keys section. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint APNs Channel using the `applicationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Pinpoint APNs Channel using the `applicationId`. For example: + +```console +% terraform import aws_pinpoint_apns_channel.apns application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pinpoint_apns_sandbox_channel.markdown b/website/docs/cdktf/typescript/r/pinpoint_apns_sandbox_channel.markdown new file mode 100644 index 00000000000..75b471a259c --- /dev/null +++ b/website/docs/cdktf/typescript/r/pinpoint_apns_sandbox_channel.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_apns_sandbox_channel" +description: |- + Provides a Pinpoint APNs Sandbox Channel resource. +--- + + + +# Resource: aws_pinpoint_apns_sandbox_channel + +Provides a Pinpoint APNs Sandbox Channel resource. + +~> **Note:** All arguments, including certificates and tokens, will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PinpointApnsSandboxChannel } from "./.gen/providers/aws/pinpoint-apns-sandbox-channel"; +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const app = new PinpointApp(this, "app", {}); + new PinpointApnsSandboxChannel(this, "apns_sandbox", { + applicationId: app.applicationId, + certificate: Token.asString(Fn.file("./certificate.pem")), + privateKey: Token.asString(Fn.file("./private_key.key")), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required) The application ID. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. +* `defaultAuthenticationMethod` - (Optional) The default authentication method used for APNs Sandbox. + __NOTE__: Amazon Pinpoint uses this default for every APNs push notification that you send using the console. + You can override the default when you send a message programmatically using the Amazon Pinpoint API, the AWS CLI, or an AWS SDK. + If your default authentication type fails, Amazon Pinpoint doesn't attempt to use the other authentication type. + +One of the following sets of credentials is also required. + +If you choose to use __Certificate credentials__ you will have to provide: + +* `certificate` - (Required) The pem encoded TLS Certificate from Apple. +* `privateKey` - (Required) The Certificate Private Key file (ie. `key` file). + +If you choose to use __Key credentials__ you will have to provide: + +* `bundleId` - (Required) The ID assigned to your iOS app. To find this value, choose Certificates, IDs & Profiles, choose App IDs in the Identifiers section, and choose your app. +* `teamId` - (Required) The ID assigned to your Apple developer account team. This value is provided on the Membership page. +* `tokenKey` - (Required) The `p8` file that you download from your Apple developer account when you create an authentication key. +* `tokenKeyId` - (Required) The ID assigned to your signing key. To find this value, choose Certificates, IDs & Profiles, and choose your key in the Keys section. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint APNs Sandbox Channel using the `applicationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Pinpoint APNs Sandbox Channel using the `applicationId`. For example: + +```console +% terraform import aws_pinpoint_apns_sandbox_channel.apns_sandbox application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pinpoint_apns_voip_channel.markdown b/website/docs/cdktf/typescript/r/pinpoint_apns_voip_channel.markdown new file mode 100644 index 00000000000..a49f9b592ad --- /dev/null +++ b/website/docs/cdktf/typescript/r/pinpoint_apns_voip_channel.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_apns_voip_channel" +description: |- + Provides a Pinpoint APNs VoIP Channel resource. +--- + + + +# Resource: aws_pinpoint_apns_voip_channel + +Provides a Pinpoint APNs VoIP Channel resource. + +~> **Note:** All arguments, including certificates and tokens, will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PinpointApnsVoipChannel } from "./.gen/providers/aws/pinpoint-apns-voip-channel"; +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const app = new PinpointApp(this, "app", {}); + new PinpointApnsVoipChannel(this, "apns_voip", { + applicationId: app.applicationId, + certificate: Token.asString(Fn.file("./certificate.pem")), + privateKey: Token.asString(Fn.file("./private_key.key")), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required) The application ID. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. +* `defaultAuthenticationMethod` - (Optional) The default authentication method used for APNs. + __NOTE__: Amazon Pinpoint uses this default for every APNs push notification that you send using the console. + You can override the default when you send a message programmatically using the Amazon Pinpoint API, the AWS CLI, or an AWS SDK. + If your default authentication type fails, Amazon Pinpoint doesn't attempt to use the other authentication type. + +One of the following sets of credentials is also required. + +If you choose to use __Certificate credentials__ you will have to provide: + +* `certificate` - (Required) The pem encoded TLS Certificate from Apple. +* `privateKey` - (Required) The Certificate Private Key file (ie. `key` file). + +If you choose to use __Key credentials__ you will have to provide: + +* `bundleId` - (Required) The ID assigned to your iOS app. To find this value, choose Certificates, IDs & Profiles, choose App IDs in the Identifiers section, and choose your app. +* `teamId` - (Required) The ID assigned to your Apple developer account team. This value is provided on the Membership page. +* `tokenKey` - (Required) The `p8` file that you download from your Apple developer account when you create an authentication key. +* `tokenKeyId` - (Required) The ID assigned to your signing key. To find this value, choose Certificates, IDs & Profiles, and choose your key in the Keys section. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint APNs VoIP Channel using the `applicationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Pinpoint APNs VoIP Channel using the `applicationId`. For example: + +```console +% terraform import aws_pinpoint_apns_voip_channel.apns_voip application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pinpoint_apns_voip_sandbox_channel.markdown b/website/docs/cdktf/typescript/r/pinpoint_apns_voip_sandbox_channel.markdown new file mode 100644 index 00000000000..cb73dd3f437 --- /dev/null +++ b/website/docs/cdktf/typescript/r/pinpoint_apns_voip_sandbox_channel.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_apns_voip_sandbox_channel" +description: |- + Provides a Pinpoint APNs VoIP Sandbox Channel resource. +--- + + + +# Resource: aws_pinpoint_apns_voip_sandbox_channel + +Provides a Pinpoint APNs VoIP Sandbox Channel resource. + +~> **Note:** All arguments, including certificates and tokens, will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PinpointApnsVoipSandboxChannel } from "./.gen/providers/aws/pinpoint-apns-voip-sandbox-channel"; +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const app = new PinpointApp(this, "app", {}); + new PinpointApnsVoipSandboxChannel(this, "apns_voip_sandbox", { + applicationId: app.applicationId, + certificate: Token.asString(Fn.file("./certificate.pem")), + privateKey: Token.asString(Fn.file("./private_key.key")), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required) The application ID. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. +* `defaultAuthenticationMethod` - (Optional) The default authentication method used for APNs. + __NOTE__: Amazon Pinpoint uses this default for every APNs push notification that you send using the console. + You can override the default when you send a message programmatically using the Amazon Pinpoint API, the AWS CLI, or an AWS SDK. + If your default authentication type fails, Amazon Pinpoint doesn't attempt to use the other authentication type. + +One of the following sets of credentials is also required. + +If you choose to use __Certificate credentials__ you will have to provide: + +* `certificate` - (Required) The pem encoded TLS Certificate from Apple. +* `privateKey` - (Required) The Certificate Private Key file (ie. `key` file). + +If you choose to use __Key credentials__ you will have to provide: + +* `bundleId` - (Required) The ID assigned to your iOS app. To find this value, choose Certificates, IDs & Profiles, choose App IDs in the Identifiers section, and choose your app. +* `teamId` - (Required) The ID assigned to your Apple developer account team. This value is provided on the Membership page. +* `tokenKey` - (Required) The `p8` file that you download from your Apple developer account when you create an authentication key. +* `tokenKeyId` - (Required) The ID assigned to your signing key. To find this value, choose Certificates, IDs & Profiles, and choose your key in the Keys section. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint APNs VoIP Sandbox Channel using the `applicationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Pinpoint APNs VoIP Sandbox Channel using the `applicationId`. For example: + +```console +% terraform import aws_pinpoint_apns_voip_sandbox_channel.apns_voip_sandbox application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pinpoint_app.markdown b/website/docs/cdktf/typescript/r/pinpoint_app.markdown new file mode 100644 index 00000000000..e2a86508799 --- /dev/null +++ b/website/docs/cdktf/typescript/r/pinpoint_app.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_app" +description: |- + Provides a Pinpoint App resource. +--- + + + +# Resource: aws_pinpoint_app + +Provides a Pinpoint App resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new PinpointApp(this, "example", { + limits: { + maximumDuration: 600, + }, + name: "test-app", + quietTime: { + end: "06:00", + start: "00:00", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The application name. By default generated by Terraform +* `namePrefix` - (Optional) The name of the Pinpoint application. Conflicts with `name` +* `campaignHook` - (Optional) Specifies settings for invoking an AWS Lambda function that customizes a segment for a campaign +* `limits` - (Optional) The default campaign limits for the app. These limits apply to each campaign for the app, unless the campaign overrides the default with limits of its own +* `quietTime` - (Optional) The default quiet time for the app. Each campaign for this app sends no messages during this time unless the campaign overrides the default with a quiet time of its own +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +`campaignHook` supports the following: + +* `lambdaFunctionName` - (Optional) Lambda function name or ARN to be called for delivery. Conflicts with `webUrl` +* `mode` - (Required if `lambdaFunctionName` or `webUrl` are provided) What mode Lambda should be invoked in. Valid values for this parameter are `delivery`, `filter`. +* `webUrl` - (Optional) Web URL to call for hook. If the URL has authentication specified it will be added as authentication to the request. Conflicts with `lambdaFunctionName` + +`limits` supports the following: + +* `daily` - (Optional) The maximum number of messages that the campaign can send daily. +* `maximumDuration` - (Optional) The length of time (in seconds) that the campaign can run before it ends and message deliveries stop. This duration begins at the scheduled start time for the campaign. The minimum value is 60. +* `messagesPerSecond` - (Optional) The number of messages that the campaign can send per second. The minimum value is 50, and the maximum is 20000. +* `total` - (Optional) The maximum total number of messages that the campaign can send. + +`quietTime` supports the following: + +* `end` - (Optional) The default end time for quiet time in ISO 8601 format. Required if `start` is set +* `start` - (Optional) The default start time for quiet time in ISO 8601 format. Required if `end` is set + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `applicationId` - The Application ID of the Pinpoint App. +* `arn` - Amazon Resource Name (ARN) of the PinPoint Application +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint App using the `applicationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Pinpoint App using the `applicationId`. For example: + +```console +% terraform import aws_pinpoint_app.name application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pinpoint_baidu_channel.markdown b/website/docs/cdktf/typescript/r/pinpoint_baidu_channel.markdown new file mode 100644 index 00000000000..a8f84ebfbbc --- /dev/null +++ b/website/docs/cdktf/typescript/r/pinpoint_baidu_channel.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_baidu_channel" +description: |- + Provides a Pinpoint Baidu Channel resource. +--- + + + +# Resource: aws_pinpoint_baidu_channel + +Provides a Pinpoint Baidu Channel resource. + +~> **Note:** All arguments including the Api Key and Secret Key will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +import { PinpointBaiduChannel } from "./.gen/providers/aws/pinpoint-baidu-channel"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const app = new PinpointApp(this, "app", {}); + new PinpointBaiduChannel(this, "channel", { + apiKey: "", + applicationId: app.applicationId, + secretKey: "", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required) The application ID. +* `enabled` - (Optional) Specifies whether to enable the channel. Defaults to `true`. +* `apiKey` - (Required) Platform credential API key from Baidu. +* `secretKey` - (Required) Platform credential Secret key from Baidu. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint Baidu Channel using the `applicationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Pinpoint Baidu Channel using the `applicationId`. For example: + +```console +% terraform import aws_pinpoint_baidu_channel.channel application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pinpoint_email_channel.markdown b/website/docs/cdktf/typescript/r/pinpoint_email_channel.markdown new file mode 100644 index 00000000000..90bf48079ba --- /dev/null +++ b/website/docs/cdktf/typescript/r/pinpoint_email_channel.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_email_channel" +description: |- + Provides a Pinpoint Email Channel resource. +--- + + + +# Resource: aws_pinpoint_email_channel + +Provides a Pinpoint Email Channel resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +import { PinpointEmailChannel } from "./.gen/providers/aws/pinpoint-email-channel"; +import { SesDomainIdentity } from "./.gen/providers/aws/ses-domain-identity"; +interface MyConfig { + identity: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const app = new PinpointApp(this, "app", {}); + new SesDomainIdentity(this, "identity", { + domain: "example.com", + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["pinpoint.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const rolePolicy = new DataAwsIamPolicyDocument(this, "role_policy", { + statement: [ + { + actions: ["mobileanalytics:PutEvents", "mobileanalytics:PutItems"], + effect: "Allow", + resources: ["*"], + }, + ], + }); + const role = new IamRole(this, "role", { + assumeRolePolicy: Token.asString(assumeRole.json), + }); + const awsIamRolePolicyRolePolicy = new IamRolePolicy( + this, + "role_policy_5", + { + name: "role_policy", + policy: Token.asString(rolePolicy.json), + role: role.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyRolePolicy.overrideLogicalId("role_policy"); + new PinpointEmailChannel(this, "email", { + applicationId: app.applicationId, + fromAddress: "user@example.com", + roleArn: role.arn, + identity: config.identity, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required) The application ID. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. +* `configurationSet` - (Optional) The ARN of the Amazon SES configuration set that you want to apply to messages that you send through the channel. +* `fromAddress` - (Required) The email address used to send emails from. You can use email only (`user@exampleCom`) or friendly address (`User `). This field comply with [RFC 5322](https://www.ietf.org/rfc/rfc5322.txt). +* `identity` - (Required) The ARN of an identity verified with SES. +* `roleArn` - (Optional) The ARN of an IAM Role used to submit events to Mobile Analytics' event ingestion service. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `messagesPerSecond` - Messages per second that can be sent. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint Email Channel using the `applicationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Pinpoint Email Channel using the `applicationId`. For example: + +```console +% terraform import aws_pinpoint_email_channel.email application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pinpoint_event_stream.markdown b/website/docs/cdktf/typescript/r/pinpoint_event_stream.markdown new file mode 100644 index 00000000000..3c1de95ef4f --- /dev/null +++ b/website/docs/cdktf/typescript/r/pinpoint_event_stream.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_event_stream" +description: |- + Provides a Pinpoint Event Stream resource. +--- + + + +# Resource: aws_pinpoint_event_stream + +Provides a Pinpoint Event Stream resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { KinesisStream } from "./.gen/providers/aws/kinesis-stream"; +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +import { PinpointEventStream } from "./.gen/providers/aws/pinpoint-event-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testStream = new KinesisStream(this, "test_stream", { + name: "pinpoint-kinesis-test", + shardCount: 1, + }); + const app = new PinpointApp(this, "app", {}); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["pinpoint.us-east-1.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const testRolePolicy = new DataAwsIamPolicyDocument( + this, + "test_role_policy", + { + statement: [ + { + actions: ["kinesis:PutRecords", "kinesis:DescribeStream"], + effect: "Allow", + resources: ["arn:aws:kinesis:us-east-1:*:*/*"], + }, + ], + } + ); + const testRole = new IamRole(this, "test_role", { + assumeRolePolicy: Token.asString(assumeRole.json), + }); + const awsIamRolePolicyTestRolePolicy = new IamRolePolicy( + this, + "test_role_policy_5", + { + name: "test_policy", + policy: Token.asString(testRolePolicy.json), + role: testRole.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyTestRolePolicy.overrideLogicalId("test_role_policy"); + new PinpointEventStream(this, "stream", { + applicationId: app.applicationId, + destinationStreamArn: testStream.arn, + roleArn: testRole.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required) The application ID. +* `destinationStreamArn` - (Required) The Amazon Resource Name (ARN) of the Amazon Kinesis stream or Firehose delivery stream to which you want to publish events. +* `roleArn` - (Required) The IAM role that authorizes Amazon Pinpoint to publish events to the stream in your account. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint Event Stream using the `applicationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Pinpoint Event Stream using the `applicationId`. For example: + +```console +% terraform import aws_pinpoint_event_stream.stream application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pinpoint_gcm_channel.markdown b/website/docs/cdktf/typescript/r/pinpoint_gcm_channel.markdown new file mode 100644 index 00000000000..9b3e6face6c --- /dev/null +++ b/website/docs/cdktf/typescript/r/pinpoint_gcm_channel.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_gcm_channel" +description: |- + Provides a Pinpoint GCM Channel resource. +--- + + + +# Resource: aws_pinpoint_gcm_channel + +Provides a Pinpoint GCM Channel resource. + +~> **Note:** Api Key argument will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +import { PinpointGcmChannel } from "./.gen/providers/aws/pinpoint-gcm-channel"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const app = new PinpointApp(this, "app", {}); + new PinpointGcmChannel(this, "gcm", { + apiKey: "api_key", + applicationId: app.applicationId, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required) The application ID. +* `apiKey` - (Required) Platform credential API key from Google. +* `enabled` - (Optional) Whether the channel is enabled or disabled. Defaults to `true`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Pinpoint GCM Channel using the `applicationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Pinpoint GCM Channel using the `applicationId`. For example: + +```console +% terraform import aws_pinpoint_gcm_channel.gcm application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pinpoint_sms_channel.markdown b/website/docs/cdktf/typescript/r/pinpoint_sms_channel.markdown new file mode 100644 index 00000000000..d3bc8617d79 --- /dev/null +++ b/website/docs/cdktf/typescript/r/pinpoint_sms_channel.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Pinpoint" +layout: "aws" +page_title: "AWS: aws_pinpoint_sms_channel" +description: |- + Use the `awsPinpointSmsChannel` resource to manage Pinpoint SMS Channels. +--- + + + +# Resource: aws_pinpoint_sms_channel + +Use the `awsPinpointSmsChannel` resource to manage Pinpoint SMS Channels. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PinpointApp } from "./.gen/providers/aws/pinpoint-app"; +import { PinpointSmsChannel } from "./.gen/providers/aws/pinpoint-sms-channel"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const app = new PinpointApp(this, "app", {}); + new PinpointSmsChannel(this, "sms", { + applicationId: app.applicationId, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `applicationId` - (Required) ID of the application. +* `enabled` - (Optional) Whether the channel is enabled or disabled. By default, it is set to `true`. +* `senderId` - (Optional) Identifier of the sender for your messages. +* `shortCode` - (Optional) Short Code registered with the phone provider. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `promotionalMessagesPerSecond` - Maximum number of promotional messages that can be sent per second. +* `transactionalMessagesPerSecond` - Maximum number of transactional messages per second that can be sent. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the Pinpoint SMS Channel using the `applicationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the Pinpoint SMS Channel using the `applicationId`. For example: + +```console +% terraform import aws_pinpoint_sms_channel.sms application-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/pipes_pipe.html.markdown b/website/docs/cdktf/typescript/r/pipes_pipe.html.markdown new file mode 100644 index 00000000000..5201f1adbde --- /dev/null +++ b/website/docs/cdktf/typescript/r/pipes_pipe.html.markdown @@ -0,0 +1,618 @@ +--- +subcategory: "EventBridge Pipes" +layout: "aws" +page_title: "AWS: aws_pipes_pipe" +description: |- + Terraform resource for managing an AWS EventBridge Pipes Pipe. +--- + + + +# Resource: aws_pipes_pipe + +Terraform resource for managing an AWS EventBridge Pipes Pipe. + +You can find out more about EventBridge Pipes in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes.html). + +EventBridge Pipes are very configurable, and may require IAM permissions to work correctly. More information on the configuration options and IAM permissions can be found in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes.html). + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { PipesPipe } from "./.gen/providers/aws/pipes-pipe"; +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const source = new SqsQueue(this, "source", {}); + const target = new SqsQueue(this, "target", {}); + const main = new DataAwsCallerIdentity(this, "main", {}); + const test = new IamRole(this, "test", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: { + Action: "sts:AssumeRole", + Condition: { + StringEquals: { + "aws:SourceAccount": main.accountId, + }, + }, + Effect: "Allow", + Principal: { + Service: "pipes.amazonaws.com", + }, + }, + Version: "2012-10-17", + }) + ), + }); + const awsIamRolePolicySource = new IamRolePolicy(this, "source_4", { + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: [ + "sqs:DeleteMessage", + "sqs:GetQueueAttributes", + "sqs:ReceiveMessage", + ], + Effect: "Allow", + Resource: [source.arn], + }, + ], + Version: "2012-10-17", + }) + ), + role: test.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicySource.overrideLogicalId("source"); + const awsIamRolePolicyTarget = new IamRolePolicy(this, "target_5", { + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["sqs:SendMessage"], + Effect: "Allow", + Resource: [target.arn], + }, + ], + Version: "2012-10-17", + }) + ), + role: test.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyTarget.overrideLogicalId("target"); + new PipesPipe(this, "example", { + dependsOn: [awsIamRolePolicySource, awsIamRolePolicyTarget], + name: "example-pipe", + roleArn: Token.asString(awsIamRoleExample.arn), + source: source.arn, + target: target.arn, + }); + } +} + +``` + +### Enrichment Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PipesPipe } from "./.gen/providers/aws/pipes-pipe"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new PipesPipe(this, "example", { + enrichment: Token.asString(awsCloudwatchEventApiDestinationExample.arn), + enrichmentParameters: { + httpParameters: { + "example-header": "example-value", + "second-example-header": "second-example-value", + }, + path_parameter_values: ["example-path-param"], + query_string_parameters: [ + { + "example-query-string": "example-value", + "second-example-query-string": "second-example-value", + }, + ], + }, + name: "example-pipe", + roleArn: Token.asString(awsIamRoleExample.arn), + source: source.arn, + target: target.arn, + }); + } +} + +``` + +### Filter Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PipesPipe } from "./.gen/providers/aws/pipes-pipe"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new PipesPipe(this, "example", { + name: "example-pipe", + roleArn: Token.asString(awsIamRoleExample.arn), + source: source.arn, + sourceParameters: { + filterCriteria: { + filter: [ + { + pattern: Token.asString( + Fn.jsonencode({ + source: ["event-source"], + }) + ), + }, + ], + }, + }, + target: target.arn, + }); + } +} + +``` + +### SQS Source and Target Configuration Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PipesPipe } from "./.gen/providers/aws/pipes-pipe"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new PipesPipe(this, "example", { + name: "example-pipe", + roleArn: Token.asString(awsIamRoleExample.arn), + source: source.arn, + sourceParameters: { + sqsQueueParameters: { + batchSize: 1, + maximumBatchingWindowInSeconds: 2, + }, + }, + target: target.arn, + targetParameters: { + sqs_queue: [ + { + message_deduplication_id: "example-dedupe", + message_group_id: "example-group", + }, + ], + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `roleArn` - (Required) ARN of the role that allows the pipe to send data to the target. +* `source` - (Required) Source resource of the pipe (typically an ARN). +* `target` - (Required) Target resource of the pipe (typically an ARN). + +The following arguments are optional: + +* `description` - (Optional) A description of the pipe. At most 512 characters. +* `desiredState` - (Optional) The state the pipe should be in. One of: `running`, `stopped`. +* `enrichment` - (Optional) Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes.html#pipes-enrichment). +* `enrichmentParameters` - (Optional) Parameters to configure enrichment for your pipe. Detailed below. +* `name` - (Optional) Name of the pipe. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `sourceParameters` - (Optional) Parameters to configure a source for the pipe. Detailed below. +* `targetParameters` - (Optional) Parameters to configure a target for your pipe. Detailed below. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### enrichment_parameters Configuration Block + +You can find out more about EventBridge Pipes Enrichment in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/pipes-enrichment.html). + +* `inputTemplate` - (Optional) Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters. +* `httpParameters` - (Optional) Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below. + +#### enrichment_parameters.http_parameters Configuration Block + +* `headerParameters` - (Optional) Key-value mapping of the headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination. +* `pathParameterValues` - (Optional) The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*"). +* `queryStringParameters` - (Optional) Key-value mapping of the query strings that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination. + +### source_parameters Configuration Block + +You can find out more about EventBridge Pipes Sources in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes-event-source.html). + +* `activemqBrokerParameters` - (Optional) The parameters for using an Active MQ broker as a source. Detailed below. +* `dynamodbStreamParameters` - (Optional) The parameters for using a DynamoDB stream as a source. Detailed below. +* `filterCriteria` - (Optional) The collection of event patterns used to [filter events](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes-event-filtering.html). Detailed below. +* `kinesisStreamParameters` - (Optional) The parameters for using a Kinesis stream as a source. Detailed below. +* `managedStreamingKafkaParameters` - (Optional) The parameters for using an MSK stream as a source. Detailed below. +* `rabbitmqBrokerParameters` - (Optional) The parameters for using a Rabbit MQ broker as a source. Detailed below. +* `selfManagedKafkaParameters` - (Optional) The parameters for using a self-managed Apache Kafka stream as a source. Detailed below. +* `sqsQueueParameters` - (Optional) The parameters for using a Amazon SQS stream as a source. Detailed below. + +#### source_parameters.filter_criteria Configuration Block + +* `filter` - (Optional) An array of up to 5 event patterns. Detailed below. + +##### source_parameters.filter_criteria.filter Configuration Block + +* `pattern` - (Required) The event pattern. At most 4096 characters. + +#### source_parameters.activemq_broker_parameters Configuration Block + +* `batchSize` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `credentials` - (Required) The credentials needed to access the resource. Detailed below. +* `maximumBatchingWindowInSeconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `queueName` - (Required) The name of the destination queue to consume. Maximum length of 1000. + +##### source_parameters.activemq_broker_parameters.credentials Configuration Block + +* `basicAuth` - (Required) The ARN of the Secrets Manager secret containing the basic auth credentials. + +#### source_parameters.dynamodb_stream_parameters Configuration Block + +* `batchSize` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `deadLetterConfig` - (Optional) Define the target queue to send dead-letter queue events to. Detailed below. +* `maximumBatchingWindowInSeconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `maximumRecordAgeInSeconds` - (Optional) Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800. +* `maximumRetryAttempts` - (Optional) Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000. +* `onPartialBatchItemFailure` - (Optional) Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT. +* `parallelizationFactor` - (Optional)The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10. +* `startingPosition` - (Optional) The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST. + +##### source_parameters.dynamodb_stream_parameters.dead_letter_config Configuration Block + +* `arn` - (Optional) The ARN of the Amazon SQS queue specified as the target for the dead-letter queue. + +#### source_parameters.kinesis_stream_parameters Configuration Block + +* `batchSize` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `deadLetterConfig` - (Optional) Define the target queue to send dead-letter queue events to. Detailed below. +* `maximumBatchingWindowInSeconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `maximumRecordAgeInSeconds` - (Optional) Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800. +* `maximumRetryAttempts` - (Optional) Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000. +* `onPartialBatchItemFailure` - (Optional) Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT. +* `parallelizationFactor` - (Optional)The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10. +* `startingPosition` - (Required) The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST, AT_TIMESTAMP. +* `startingPositionTimestamp` - (Optional) With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds. + +##### source_parameters.kinesis_stream_parameters.dead_letter_config Configuration Block + +* `arn` - (Optional) The ARN of the Amazon SQS queue specified as the target for the dead-letter queue. + +#### source_parameters.managed_streaming_kafka_parameters Configuration Block + +* `batchSize` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `consumerGroupId` - (Optional) The name of the destination queue to consume. Maximum value of 200. +* `credentials` - (Optional) The credentials needed to access the resource. Detailed below. +* `maximumBatchingWindowInSeconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `startingPosition` - (Optional) The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST. +* `topicName` - (Required) The name of the topic that the pipe will read from. Maximum length of 249. + +##### source_parameters.managed_streaming_kafka_parameters.credentials Configuration Block + +* `clientCertificateTlsAuth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. +* `saslScram512Auth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. + +#### source_parameters.rabbitmq_broker_parameters Configuration Block + +* `batchSize` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `credentials` - (Required) The credentials needed to access the resource. Detailed below. +* `maximumBatchingWindowInSeconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `queueName` - (Required) The name of the destination queue to consume. Maximum length of 1000. +* `virtualHost` - (Optional) The name of the virtual host associated with the source broker. Maximum length of 200. + +##### source_parameters.rabbitmq_broker_parameters.credentials Configuration Block + +* `basicAuth` - (Required) The ARN of the Secrets Manager secret containing the credentials. + +#### source_parameters.self_managed_kafka_parameters Configuration Block + +* `additionalBootstrapServers` - (Optional) An array of server URLs. Maximum number of 2 items, each of maximum length 300. +* `batchSize` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `consumerGroupId` - (Optional) The name of the destination queue to consume. Maximum value of 200. +* `credentials` - (Optional) The credentials needed to access the resource. Detailed below. +* `maximumBatchingWindowInSeconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. +* `serverRootCaCertificate` - (Optional) The ARN of the Secrets Manager secret used for certification. +* `startingPosition` - (Optional) The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST. +* `topicName` - (Required) The name of the topic that the pipe will read from. Maximum length of 249. +* `vpc` - (Optional) This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below. + +##### source_parameters.self_managed_kafka_parameters.credentials Configuration Block + +* `basicAuth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. +* `clientCertificateTlsAuth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. +* `saslScram256Auth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. +* `saslScram512Auth` - (Optional) The ARN of the Secrets Manager secret containing the credentials. + +##### source_parameters.self_managed_kafka_parameters.vpc Configuration Block + +* `securityGroups` - (Optional) List of security groups associated with the stream. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used. +* `subnets` - (Optional) List of the subnets associated with the stream. These subnets must all be in the same VPC. You can specify as many as 16 subnets. + +#### source_parameters.sqs_queue_parameters Configuration Block + +* `batchSize` - (Optional) The maximum number of records to include in each batch. Maximum value of 10000. +* `maximumBatchingWindowInSeconds` - (Optional) The maximum length of a time to wait for events. Maximum value of 300. + +### target_parameters Configuration Block + +You can find out more about EventBridge Pipes Targets in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes-event-target.html). + +* `batchJobParameters` - (Optional) The parameters for using an AWS Batch job as a target. Detailed below. +* `cloudwatchLogsParameters` - (Optional) The parameters for using an CloudWatch Logs log stream as a target. Detailed below. +* `ecsTaskParameters` - (Optional) The parameters for using an Amazon ECS task as a target. Detailed below. +* `eventbridgeEventBusParameters` - (Optional) The parameters for using an EventBridge event bus as a target. Detailed below. +* `httpParameters` - (Optional) These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below. +* `inputTemplate` - (Optional) Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters. +* `kinesisStreamParameters` - (Optional) The parameters for using a Kinesis stream as a source. Detailed below. +* `lambdaFunctionParameters` - (Optional) The parameters for using a Lambda function as a target. Detailed below. +* `redshiftDataParameters` - (Optional) These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below. +* `sagemakerPipelineParameters` - (Optional) The parameters for using a SageMaker pipeline as a target. Detailed below. +* `sqsQueueParameters` - (Optional) The parameters for using a Amazon SQS stream as a target. Detailed below. +* `stepFunctionStateMachineParameters` - (Optional) The parameters for using a Step Functions state machine as a target. Detailed below. + +#### target_parameters.batch_job_parameters Configuration Block + +* `arrayProperties` - (Optional) The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below. +* `containerOverrides` - (Optional) The overrides that are sent to a container. Detailed below. +* `dependsOn` - (Optional) A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below. +* `jobDefinition` - (Required) The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used. +* `jobName` - (Required) The name of the job. It can be up to 128 letters long. +* `parameters` - (Optional) Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below. +* `retryStrategy` - (Optional) The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below. + +##### target_parameters.batch_job_parameters.array_properties Configuration Block + +* `size` - (Optional) The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000. + +##### target_parameters.batch_job_parameters.container_overrides Configuration Block + +* `command` - (Optional) List of commands to send to the container that overrides the default command from the Docker image or the task definition. +* `environment` - (Optional) The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. Environment variables cannot start with " AWS Batch ". This naming convention is reserved for variables that AWS Batch sets. Detailed below. +* `instanceType` - (Optional) The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided. +* `resourceRequirement` - (Optional) The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include GPU, MEMORY, and VCPU. Detailed below. + +###### target_parameters.batch_job_parameters.container_overrides.environment Configuration Block + +* `name` - (Optional) The name of the key-value pair. For environment variables, this is the name of the environment variable. +* `value` - (Optional) The value of the key-value pair. For environment variables, this is the value of the environment variable. + +###### target_parameters.batch_job_parameters.container_overrides.resource_requirement Configuration Block + +* `type` - (Optional) The type of resource to assign to a container. The supported resources include GPU, MEMORY, and VCPU. +* `value` - (Optional) The quantity of the specified resource to reserve for the container. [The values vary based on the type specified](https://docs.aws.amazon.com/eventbridge/latest/pipes-reference/API_BatchResourceRequirement.html). + +##### target_parameters.batch_job_parameters.depends_on Configuration Block + +* `jobId` - (Optional) The job ID of the AWS Batch job that's associated with this dependency. +* `type` - (Optional) The type of the job dependency. Valid Values: N_TO_N, SEQUENTIAL. + +##### target_parameters.batch_job_parameters.retry_strategy Configuration Block + +* `attempts` - (Optional) The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10. + +#### target_parameters.cloudwatch_logs_parameters Configuration Block + +* `logStreamName` - (Optional) The name of the log stream. +* `timestamp` - (Optional) The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp + +#### target_parameters.ecs_task_parameters Configuration Block + +* `capacityProviderStrategy` - (Optional) List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below. +* `enableEcsManagedTags` - (Optional) Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false. +* `enableExecuteCommand` - (Optional) Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false. +* `group` - (Optional) Specifies an Amazon ECS task group for the task. The maximum length is 255 characters. +* `launchType` - (Optional) Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL +* `networkConfiguration` - (Optional) Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below. +* `overrides` - (Optional) The overrides that are associated with a task. Detailed below. +* `placementConstraint` - (Optional) An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below. +* `placementStrategy` - (Optional) The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below. +* `platformVersion` - (Optional) Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE. +* `propagateTags` - (Optional) Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION +* `referenceId` - (Optional) The reference ID to use for the task. Maximum length of 1,024. +* `tags` - (Optional) Key-value map of tags that you apply to the task to help you categorize and organize them. +* `taskCount` - (Optional) The number of tasks to create based on TaskDefinition. The default is 1. +* `taskDefinitionArn` - (Optional) The ARN of the task definition to use if the event target is an Amazon ECS task. + +##### target_parameters.ecs_task_parameters.capacity_provider_strategy Configuration Block + +* `base` - (Optional) The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000. +* `capacityProvider` - (Optional) The short name of the capacity provider. Maximum value of 255. +* `weight` - (Optional) The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000. + +##### target_parameters.ecs_task_parameters.network_configuration Configuration Block + +* `awsVpcConfiguration` - (Optional) Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below. + +###### target_parameters.ecs_task_parameters.network_configuration.aws_vpc_configuration Configuration Block + +* `assignPublicIp` - (Optional) Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED. +* `securityGroups` - (Optional) Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used. +* `subnets` - (Optional) Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets. + +##### target_parameters.ecs_task_parameters.overrides Configuration Block + +* `containerOverride` - (Optional) One or more container overrides that are sent to a task. Detailed below. +* `cpu` - (Optional) The cpu override for the task. +* `ephemeralStorage` - (Optional) The ephemeral storage setting override for the task. Detailed below. +* `executionRoleArn` - (Optional) The Amazon Resource Name (ARN) of the task execution IAM role override for the task. +* `inferenceAcceleratorOverride` - (Optional) List of Elastic Inference accelerator overrides for the task. Detailed below. +* `memory` - (Optional) The memory override for the task. +* `taskRoleArn` - (Optional) The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. + +###### target_parameters.ecs_task_parameters.overrides.container_override Configuration Block + +* `command` - (Optional) List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name. +* `cpu` - (Optional) The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name. +* `environment` - (Optional) The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below. +* `environmentFile` - (Optional) A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below. +* `memory` - (Optional) The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name. +* `memoryReservation` - (Optional) The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name. +* `name` - (Optional) The name of the container that receives the override. This parameter is required if any override is specified. +* `resourceRequirement` - (Optional) The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below. + +###### target_parameters.ecs_task_parameters.overrides.container_override.environment Configuration Block + +* `name` - (Optional) The name of the key-value pair. For environment variables, this is the name of the environment variable. +* `value` - (Optional) The value of the key-value pair. For environment variables, this is the value of the environment variable. + +###### target_parameters.ecs_task_parameters.overrides.container_override.environment_file Configuration Block + +* `type` - (Optional) The file type to use. The only supported value is s3. +* `value` - (Optional) The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file. + +###### target_parameters.ecs_task_parameters.overrides.container_override.resource_requirement Configuration Block + +* `type` - (Optional) The type of resource to assign to a container. The supported values are GPU or InferenceAccelerator. +* `value` - (Optional) The value for the specified resource type. If the GPU type is used, the value is the number of physical GPUs the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on. If the InferenceAccelerator type is used, the value matches the deviceName for an InferenceAccelerator specified in a task definition. + +###### target_parameters.ecs_task_parameters.overrides.ephemeral_storage Configuration Block + +* `sizeInGib` - (Required) The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB. + +###### target_parameters.ecs_task_parameters.overrides.inference_accelerator_override Configuration Block + +* `deviceName` - (Optional) The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition. +* `deviceType` - (Optional) The Elastic Inference accelerator type to use. + +##### target_parameters.ecs_task_parameters.placement_constraint Configuration Block + +* `expression` - (Optional) A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000. +* `type` - (Optional) The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates. Valid Values: distinctInstance, memberOf. + +##### target_parameters.ecs_task_parameters.placement_strategy Configuration Block + +* `field` - (Optional) The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255. +* `type` - (Optional) The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack. + +#### target_parameters.eventbridge_event_bus_parameters Configuration Block + +* `detailType` - (Optional) A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail. +* `endpointId` - (Optional) The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo. +* `resources` - (Optional) List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present. +* `source` - (Optional) The source of the event. Maximum length of 256. +* `time` - (Optional) The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp + +#### target_parameters.http_parameters Configuration Block + +* `headerParameters` - (Optional) Key-value mapping of the headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination. Detailed below. +* `pathParameterValues` - (Optional) The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*"). +* `queryStringParameters` - (Optional) Key-value mapping of the query strings that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination. Detailed below. + +#### target_parameters.kinesis_stream_parameters Configuration Block + +* `partitionKey` - (Required) Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream. + +#### target_parameters.lambda_function_parameters Configuration Block + +* `invocationType` - (Optional) Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET. + +#### target_parameters.redshift_data_parameters Configuration Block + +* `database` - (Required) The name of the database. Required when authenticating using temporary credentials. +* `dbUser` - (Optional) The database user name. Required when authenticating using temporary credentials. +* `secretManagerArn` - (Optional) The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager. +* `sqls` - (Optional) List of SQL statements text to run, each of maximum length of 100,000. +* `statementName` - (Optional) The name of the SQL statement. You can name the SQL statement when you create it to identify the query. +* `withEvent` - (Optional) Indicates whether to send an event back to EventBridge after the SQL statement runs. + +#### target_parameters.sagemaker_pipeline_parameters Configuration Block + +* `pipelineParameter` - (Optional) List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below. + +##### target_parameters.sagemaker_pipeline_parameters.parameters Configuration Block + +* `name` - (Optional) Name of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 256. +* `value` - (Optional) Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024. + +#### target_parameters.sqs_queue_parameters Configuration Block + +* `messageDeduplicationId` - (Optional) This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages. +* `messageGroupId` - (Optional) The FIFO message group ID to use as the target. + +#### target_parameters.step_function_state_machine_parameters Configuration Block + +* `invocationType` - (Optional) Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of this pipe. +* `id` - Same as `name`. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import pipes using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import pipes using the `name`. For example: + +```console +% terraform import aws_pipes_pipe.example my-pipe +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/placement_group.html.markdown b/website/docs/cdktf/typescript/r/placement_group.html.markdown new file mode 100644 index 00000000000..71dfbeabd92 --- /dev/null +++ b/website/docs/cdktf/typescript/r/placement_group.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_placement_group" +description: |- + Provides an EC2 placement group. +--- + + + +# Resource: aws_placement_group + +Provides an EC2 placement group. Read more about placement groups +in [AWS Docs](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/placement-groups.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PlacementGroup } from "./.gen/providers/aws/placement-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new PlacementGroup(this, "web", { + name: "hunky-dory-pg", + strategy: "cluster", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the placement group. +* `partitionCount` - (Optional) The number of partitions to create in the + placement group. Can only be specified when the `strategy` is set to + `partition`. Valid values are 1 - 7 (default is `2`). +* `spreadLevel` - (Optional) Determines how placement groups spread instances. Can only be used + when the `strategy` is set to `spread`. Can be `host` or `rack`. `host` can only be used for Outpost placement groups. Defaults to `rack`. +* `strategy` - (Required) The placement strategy. Can be `cluster`, `partition` or `spread`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the placement group. +* `id` - The name of the placement group. +* `placementGroupId` - The ID of the placement group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import placement groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import placement groups using the `name`. For example: + +```console +% terraform import aws_placement_group.prod_pg production-placement-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/prometheus_alert_manager_definition.html.markdown b/website/docs/cdktf/typescript/r/prometheus_alert_manager_definition.html.markdown new file mode 100644 index 00000000000..8d99f98ed49 --- /dev/null +++ b/website/docs/cdktf/typescript/r/prometheus_alert_manager_definition.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "AMP (Managed Prometheus)" +layout: "aws" +page_title: "AWS: aws_prometheus_alert_manager_definition" +description: |- + Manages an Amazon Managed Service for Prometheus (AMP) Alert Manager Definition +--- + + + +# Resource: aws_prometheus_alert_manager_definition + +Manages an Amazon Managed Service for Prometheus (AMP) Alert Manager Definition + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PrometheusAlertManagerDefinition } from "./.gen/providers/aws/prometheus-alert-manager-definition"; +import { PrometheusWorkspace } from "./.gen/providers/aws/prometheus-workspace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const demo = new PrometheusWorkspace(this, "demo", {}); + const awsPrometheusAlertManagerDefinitionDemo = + new PrometheusAlertManagerDefinition(this, "demo_1", { + definition: + "alertmanager_config: |\n route:\n receiver: 'default'\n receivers:\n - name: 'default'\n\n", + workspaceId: demo.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsPrometheusAlertManagerDefinitionDemo.overrideLogicalId("demo"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `workspaceId` - (Required) ID of the prometheus workspace the alert manager definition should be linked to +* `definition` - (Required) the alert manager definition that you want to be applied. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-alert-manager.html). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the prometheus alert manager definition using the workspace identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the prometheus alert manager definition using the workspace identifier. For example: + +```console +% terraform import aws_prometheus_alert_manager_definition.demo ws-C6DCB907-F2D7-4D96-957B-66691F865D8B +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/prometheus_rule_group_namespace.html.markdown b/website/docs/cdktf/typescript/r/prometheus_rule_group_namespace.html.markdown new file mode 100644 index 00000000000..25feb8fff2f --- /dev/null +++ b/website/docs/cdktf/typescript/r/prometheus_rule_group_namespace.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "AMP (Managed Prometheus)" +layout: "aws" +page_title: "AWS: aws_prometheus_rule_group_namespace" +description: |- + Manages an Amazon Managed Service for Prometheus (AMP) Rule Group Namespace +--- + + + +# Resource: aws_prometheus_rule_group_namespace + +Manages an Amazon Managed Service for Prometheus (AMP) Rule Group Namespace + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PrometheusRuleGroupNamespace } from "./.gen/providers/aws/prometheus-rule-group-namespace"; +import { PrometheusWorkspace } from "./.gen/providers/aws/prometheus-workspace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const demo = new PrometheusWorkspace(this, "demo", {}); + const awsPrometheusRuleGroupNamespaceDemo = + new PrometheusRuleGroupNamespace(this, "demo_1", { + data: "groups:\n - name: test\n rules:\n - record: metric:recording_rule\n expr: avg(rate(container_cpu_usage_seconds_total[5m]))\n\n", + name: "rules", + workspaceId: demo.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsPrometheusRuleGroupNamespaceDemo.overrideLogicalId("demo"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the rule group namespace +* `workspaceId` - (Required) ID of the prometheus workspace the rule group namespace should be linked to +* `data` - (Required) the rule group namespace data that you want to be applied. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-Ruler.html). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the prometheus rule group namespace using the arn. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import the prometheus rule group namespace using the arn. For example: + +```console +% terraform import aws_prometheus_rule_group_namespace.demo arn:aws:aps:us-west-2:123456789012:rulegroupsnamespace/IDstring/namespace_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/prometheus_workspace.html.markdown b/website/docs/cdktf/typescript/r/prometheus_workspace.html.markdown new file mode 100644 index 00000000000..2854eb78849 --- /dev/null +++ b/website/docs/cdktf/typescript/r/prometheus_workspace.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "AMP (Managed Prometheus)" +layout: "aws" +page_title: "AWS: aws_prometheus_workspace" +description: |- + Manages an Amazon Managed Service for Prometheus (AMP) Workspace +--- + + + +# Resource: aws_prometheus_workspace + +Manages an Amazon Managed Service for Prometheus (AMP) Workspace. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { PrometheusWorkspace } from "./.gen/providers/aws/prometheus-workspace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new PrometheusWorkspace(this, "example", { + alias: "example", + tags: { + Environment: "production", + }, + }); + } +} + +``` + +### CloudWatch Logging + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { PrometheusWorkspace } from "./.gen/providers/aws/prometheus-workspace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: "example", + }); + const awsPrometheusWorkspaceExample = new PrometheusWorkspace( + this, + "example_1", + { + loggingConfiguration: { + logGroupArn: "${" + example.arn + "}:*", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsPrometheusWorkspaceExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `alias` - (Optional) The alias of the prometheus workspace. See more [in AWS Docs](https://docs.aws.amazon.com/prometheus/latest/userguide/AMP-onboard-create-workspace.html). +* `loggingConfiguration` - (Optional) Logging configuration for the workspace. See [Logging Configuration](#logging-configuration) below for details. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Logging Configuration + +The `loggingConfiguration` block supports the following arguments: + +* `logGroupArn` - (Required) The ARN of the CloudWatch log group to which the vended log data will be published. This log group must exist. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the workspace. +* `id` - Identifier of the workspace +* `prometheusEndpoint` - Prometheus endpoint available for this workspace. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AMP Workspaces using the identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AMP Workspaces using the identifier. For example: + +```console +% terraform import aws_prometheus_workspace.demo ws-C6DCB907-F2D7-4D96-957B-66691F865D8B +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/proxy_protocol_policy.html.markdown b/website/docs/cdktf/typescript/r/proxy_protocol_policy.html.markdown new file mode 100644 index 00000000000..fb066fa0398 --- /dev/null +++ b/website/docs/cdktf/typescript/r/proxy_protocol_policy.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "ELB Classic" +layout: "aws" +page_title: "AWS: aws_proxy_protocol_policy" +description: |- + Provides a proxy protocol policy, which allows an ELB to carry a client connection information to a backend. +--- + + + +# Resource: aws_proxy_protocol_policy + +Provides a proxy protocol policy, which allows an ELB to carry a client connection information to a backend. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Elb } from "./.gen/providers/aws/elb"; +import { ProxyProtocolPolicy } from "./.gen/providers/aws/proxy-protocol-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const lb = new Elb(this, "lb", { + availabilityZones: ["us-east-1a"], + listener: [ + { + instancePort: 25, + instanceProtocol: "tcp", + lbPort: 25, + lbProtocol: "tcp", + }, + { + instancePort: 587, + instanceProtocol: "tcp", + lbPort: 587, + lbProtocol: "tcp", + }, + ], + name: "test-lb", + }); + new ProxyProtocolPolicy(this, "smtp", { + instancePorts: ["25", "587"], + loadBalancer: lb.name, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `loadBalancer` - (Required) The load balancer to which the policy + should be attached. +* `instancePorts` - (Required) List of instance ports to which the policy + should be applied. This can be specified if the protocol is SSL or TCP. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the policy. +* `loadBalancer` - The load balancer to which the policy is attached. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/qldb_ledger.html.markdown b/website/docs/cdktf/typescript/r/qldb_ledger.html.markdown new file mode 100644 index 00000000000..5fca86d62f5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/qldb_ledger.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "QLDB (Quantum Ledger Database)" +layout: "aws" +page_title: "AWS: aws_qldb_ledger" +description: |- + Provides an QLDB Resource resource. +--- + + + +# Resource: aws_qldb_ledger + +Provides an AWS Quantum Ledger Database (QLDB) resource + +~> **NOTE:** Deletion protection is enabled by default. To successfully delete this resource via Terraform, `deletion_protection = false` must be applied before attempting deletion. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QldbLedger } from "./.gen/providers/aws/qldb-ledger"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QldbLedger(this, "sample-ledger", { + name: "sample-ledger", + permissionsMode: "STANDARD", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deletionProtection` - (Optional) The deletion protection for the QLDB Ledger instance. By default it is `true`. To delete this resource via Terraform, this value must be configured to `false` and applied first before attempting deletion. +* `kmsKey` - (Optional) The key in AWS Key Management Service (AWS KMS) to use for encryption of data at rest in the ledger. For more information, see the [AWS documentation](https://docs.aws.amazon.com/qldb/latest/developerguide/encryption-at-rest.html). Valid values are `"awsOwnedKmsKey"` to use an AWS KMS key that is owned and managed by AWS on your behalf, or the ARN of a valid symmetric customer managed KMS key. +* `name` - (Optional) The friendly name for the QLDB Ledger instance. By default generated by Terraform. +* `permissionsMode` - (Required) The permissions mode for the QLDB ledger instance. Specify either `allowAll` or `standard`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Name of the QLDB Ledger +* `arn` - The ARN of the QLDB Ledger +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QLDB Ledgers using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import QLDB Ledgers using the `name`. For example: + +```console +% terraform import aws_qldb_ledger.sample-ledger sample-ledger +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/qldb_stream.html.markdown b/website/docs/cdktf/typescript/r/qldb_stream.html.markdown new file mode 100644 index 00000000000..d5ad920a935 --- /dev/null +++ b/website/docs/cdktf/typescript/r/qldb_stream.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "QLDB (Quantum Ledger Database)" +layout: "aws" +page_title: "AWS: aws_qldb_stream" +description: |- + Provides a QLDB Stream resource. +--- + + + +# Resource: aws_qldb_stream + +Provides an AWS Quantum Ledger Database (QLDB) Stream resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QldbStream } from "./.gen/providers/aws/qldb-stream"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QldbStream(this, "example", { + inclusiveStartTime: "2021-01-01T00:00:00Z", + kinesisConfiguration: { + aggregationEnabled: false, + streamArn: + "arn:aws:kinesis:us-east-1:xxxxxxxxxxxx:stream/example-kinesis-stream", + }, + ledgerName: "existing-ledger-name", + roleArn: "sample-role-arn", + streamName: "sample-ledger-stream", + tags: { + example: "tag", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `exclusiveEndTime` - (Optional) The exclusive date and time that specifies when the stream ends. If you don't define this parameter, the stream runs indefinitely until you cancel it. It must be in ISO 8601 date and time format and in Universal Coordinated Time (UTC). For example: `"20190613T21:36:34Z"`. +* `inclusiveStartTime` - (Required) The inclusive start date and time from which to start streaming journal data. This parameter must be in ISO 8601 date and time format and in Universal Coordinated Time (UTC). For example: `"20190613T21:36:34Z"`. This cannot be in the future and must be before `exclusiveEndTime`. If you provide a value that is before the ledger's `creationDateTime`, QLDB effectively defaults it to the ledger's `creationDateTime`. +* `kinesisConfiguration` - (Required) The configuration settings of the Kinesis Data Streams destination for your stream request. Documented below. +* `ledgerName` - (Required) The name of the QLDB ledger. +* `roleArn` - (Required) The Amazon Resource Name (ARN) of the IAM role that grants QLDB permissions for a journal stream to write data records to a Kinesis Data Streams resource. +* `streamName` - (Required) The name that you want to assign to the QLDB journal stream. User-defined names can help identify and indicate the purpose of a stream. Your stream name must be unique among other active streams for a given ledger. Stream names have the same naming constraints as ledger names, as defined in the [Amazon QLDB Developer Guide](https://docs.aws.amazon.com/qldb/latest/developerguide/limits.html#limits.naming). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### kinesis_configuration + +The `kinesisConfiguration` block supports the following arguments: + +* `aggregationEnabled` - (Optional) Enables QLDB to publish multiple data records in a single Kinesis Data Streams record, increasing the number of records sent per API call. Default: `true`. +* `streamArn` - (Required) The Amazon Resource Name (ARN) of the Kinesis Data Streams resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the QLDB Stream. +* `arn` - The ARN of the QLDB Stream. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `8M`) +- `delete` - (Default `5M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_account_subscription.html.markdown b/website/docs/cdktf/typescript/r/quicksight_account_subscription.html.markdown new file mode 100644 index 00000000000..7101183dfd8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_account_subscription.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_account_subscription" +description: |- + Terraform resource for managing an AWS QuickSight Account Subscription. +--- + + + +# Resource: aws_quicksight_account_subscription + +Terraform resource for managing an AWS QuickSight Account Subscription. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightAccountSubscription } from "./.gen/providers/aws/quicksight-account-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightAccountSubscription(this, "subscription", { + accountName: "quicksight-terraform", + authenticationMethod: "IAM_AND_QUICKSIGHT", + edition: "ENTERPRISE", + notificationEmail: "notification@email.com", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `accountName` - (Required) Name of your Amazon QuickSight account. This name is unique over all of AWS, and it appears only when users sign in. +* `authenticationMethod` - (Required) Method that you want to use to authenticate your Amazon QuickSight account. Currently, the valid values for this parameter are `iamAndQuicksight`, `iamOnly`, and `activeDirectory`. +* `edition` - (Required) Edition of Amazon QuickSight that you want your account to have. Currently, you can choose from `standard`, `enterprise` or `enterpriseAndQ`. +* `notificationEmail` - (Required) Email address that you want Amazon QuickSight to send notifications to regarding your Amazon QuickSight account or Amazon QuickSight subscription. + +The following arguments are optional: + +* `activeDirectoryName` - (Optional) Name of your Active Directory. This field is required if `activeDirectory` is the selected authentication method of the new Amazon QuickSight account. +* `adminGroup` - (Optional) Admin group associated with your Active Directory. This field is required if `activeDirectory` is the selected authentication method of the new Amazon QuickSight account. +* `authorGroup` - (Optional) Author group associated with your Active Directory. +* `awsAccountId` - (Optional) AWS account ID hosting the QuickSight account. Default to provider account. +* `contactNumber` - (Optional) A 10-digit phone number for the author of the Amazon QuickSight account to use for future communications. This field is required if `enterppriseAndQ` is the selected edition of the new Amazon QuickSight account. +* `directoryId` - (Optional) Active Directory ID that is associated with your Amazon QuickSight account. +* `emailAddress` - (Optional) Email address of the author of the Amazon QuickSight account to use for future communications. This field is required if `enterppriseAndQ` is the selected edition of the new Amazon QuickSight account. +* `firstName` - (Optional) First name of the author of the Amazon QuickSight account to use for future communications. This field is required if `enterppriseAndQ` is the selected edition of the new Amazon QuickSight account. +* `lastName` - (Optional) Last name of the author of the Amazon QuickSight account to use for future communications. This field is required if `enterppriseAndQ` is the selected edition of the new Amazon QuickSight account. +* `readerGroup` - (Optional) Reader group associated with your Active Direcrtory. +* `realm` - (Optional) Realm of the Active Directory that is associated with your Amazon QuickSight account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `accountSubscriptionStatus` - Status of the Amazon QuickSight account's subscription. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) +* `delete` - (Default `10M`) + +## Import + +You cannot import this resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_analysis.html.markdown b/website/docs/cdktf/typescript/r/quicksight_analysis.html.markdown new file mode 100644 index 00000000000..a7a8888db04 --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_analysis.html.markdown @@ -0,0 +1,224 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_analysis" +description: |- + Manages a QuickSight Analysis. +--- + + + +# Resource: aws_quicksight_analysis + +Resource for managing a QuickSight Analysis. + +## Example Usage + +### From Source Template + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightAnalysis } from "./.gen/providers/aws/quicksight-analysis"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightAnalysis(this, "example", { + analysisId: "example-id", + name: "example-name", + sourceEntity: { + sourceTemplate: { + arn: source.arn, + dataSetReferences: [ + { + dataSetArn: dataset.arn, + dataSetPlaceholder: "1", + }, + ], + }, + }, + }); + } +} + +``` + +### With Definition + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightAnalysis } from "./.gen/providers/aws/quicksight-analysis"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightAnalysis(this, "example", { + analysisId: "example-id", + definition: { + dataSetIdentifiersDeclarations: [ + { + dataSetArn: dataset.arn, + identifier: "1", + }, + ], + sheets: [ + { + sheetId: "Example1", + title: "Example", + visuals: [ + { + lineChartVisual: { + chartConfiguration: { + fieldWells: { + lineChartAggregatedFieldWells: { + category: [ + { + categoricalDimensionField: { + column: { + columnName: "Column1", + dataSetIdentifier: "1", + }, + fieldId: "1", + }, + }, + ], + values: [ + { + categoricalMeasureField: { + aggregationFunction: "COUNT", + column: { + columnName: "Column1", + dataSetIdentifier: "1", + }, + fieldId: "2", + }, + }, + ], + }, + }, + }, + title: { + formatText: { + plainText: "Line Chart Example", + }, + }, + visualId: "LineChart", + }, + }, + ], + }, + ], + }, + name: "example-name", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `analysisId` - (Required, Forces new resource) Identifier for the analysis. +* `name` - (Required) Display name for the analysis. + +The following arguments are optional: + +* `awsAccountId` - (Optional, Forces new resource) AWS account ID. +* `definition` - (Optional) A detailed analysis definition. Only one of `definition` or `sourceEntity` should be configured. See [definition](#definition). +* `parameters` - (Optional) The parameters for the creation of the analysis, which you want to use to override the default settings. An analysis can have any type of parameters, and some parameters might accept multiple values. See [parameters](#parameters). +* `permissions` - (Optional) A set of resource permissions on the analysis. Maximum of 64 items. See [permissions](#permissions). +* `recoveryWindowInDays` - (Optional) A value that specifies the number of days that Amazon QuickSight waits before it deletes the analysis. Use `0` to force deletion without recovery. Minimum value of `7`. Maximum value of `30`. Default to `30`. +* `sourceEntity` - (Optional) The entity that you are using as a source when you create the analysis (template). Only one of `definition` or `sourceEntity` should be configured. See [source_entity](#source_entity). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `themeArn` - (Optional) The Amazon Resource Name (ARN) of the theme that is being used for this analysis. The theme ARN must exist in the same AWS account where you create the analysis. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### source_entity + +* `sourceTemplate` - (Optional) The source template. See [source_template](#source_template). + +### source_template + +* `arn` - (Required) The Amazon Resource Name (ARN) of the resource. +* `dataSetReferences` - (Required) List of dataset references. See [data_set_references](#data_set_references). + +### data_set_references + +* `dataSetArn` - (Required) Dataset Amazon Resource Name (ARN). +* `dataSetPlaceholder` - (Required) Dataset placeholder. + +### parameters + +* `dateTimeParameters` - (Optional) A list of parameters that have a data type of date-time. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DateTimeParameter.html). +* `decimalParameters` - (Optional) A list of parameters that have a data type of decimal. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DecimalParameter.html). +* `integerParameters` - (Optional) A list of parameters that have a data type of integer. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_IntegerParameter.html). +* `stringParameters` - (Optional) A list of parameters that have a data type of string. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_StringParameter.html). + +### definition + +* `dataSetIdentifiersDeclarations` - (Required) A list dataset identifier declarations. With this mapping,you can use dataset identifiers instead of dataset Amazon Resource Names (ARNs) throughout the analysis sub-structures. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DataSetIdentifierDeclaration.html). +* `analysisDefaults` - (Optional) The configuration for default analysis settings. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_AnalysisDefaults.html). +* `calculatedFields` - (Optional) A list of calculated field definitions for the analysis. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_CalculatedField.html). +* `columnConfigurations` - (Optional) A list of analysis-level column configurations. Column configurations are used to set default formatting for a column that's used throughout an analysis. See [AWS API Documentation for complete description](ttps://docs.aws.amazon.com/quicksight/latest/APIReference/API_ColumnConfiguration.html). +* `filterGroups` - (Optional) A list of filter definitions for an analysis. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_FilterGroup.html). For more information, see [Filtering Data](https://docs.aws.amazon.com/quicksight/latest/user/filtering-visual-data.html) in Amazon QuickSight User Guide. +* `parametersDeclarations` - (Optional) A list of parameter declarations for an analysis. Parameters are named variables that can transfer a value for use by an action or an object. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ParameterDeclaration.html). For more information, see [Parameters in Amazon QuickSight](https://docs.aws.amazon.com/quicksight/latest/user/parameters-in-quicksight.html) in the Amazon QuickSight User Guide. +* `sheets` - (Optional) A list of sheet definitions for an analysis. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_SheetDefinition.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the analysis. +* `createdTime` - The time that the analysis was created. +* `id` - A comma-delimited string joining AWS account ID and analysis ID. +* `lastUpdatedTime` - The time that the analysis was last updated. +* `status` - The analysis creation status. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Analysis using the AWS account ID and analysis ID separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a QuickSight Analysis using the AWS account ID and analysis ID separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_analysis.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_dashboard.html.markdown b/website/docs/cdktf/typescript/r/quicksight_dashboard.html.markdown new file mode 100644 index 00000000000..b6474e8ddc2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_dashboard.html.markdown @@ -0,0 +1,282 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_dashboard" +description: |- + Manages a QuickSight Dashboard. +--- + + + +# Resource: aws_quicksight_dashboard + +Resource for managing a QuickSight Dashboard. + +## Example Usage + +### From Source Template + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightDashboard } from "./.gen/providers/aws/quicksight-dashboard"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightDashboard(this, "example", { + dashboardId: "example-id", + name: "example-name", + sourceEntity: { + sourceTemplate: { + arn: source.arn, + dataSetReferences: [ + { + dataSetArn: dataset.arn, + dataSetPlaceholder: "1", + }, + ], + }, + }, + versionDescription: "version", + }); + } +} + +``` + +### With Definition + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightDashboard } from "./.gen/providers/aws/quicksight-dashboard"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightDashboard(this, "example", { + dashboardId: "example-id", + definition: { + dataSetIdentifiersDeclarations: [ + { + dataSetArn: dataset.arn, + identifier: "1", + }, + ], + sheets: [ + { + sheetId: "Example1", + title: "Example", + visuals: [ + { + lineChartVisual: { + chartConfiguration: { + fieldWells: { + lineChartAggregatedFieldWells: { + category: [ + { + categoricalDimensionField: { + column: { + columnName: "Column1", + dataSetIdentifier: "1", + }, + fieldId: "1", + }, + }, + ], + values: [ + { + categoricalMeasureField: { + aggregationFunction: "COUNT", + column: { + columnName: "Column1", + dataSetIdentifier: "1", + }, + fieldId: "2", + }, + }, + ], + }, + }, + }, + title: { + formatText: { + plainText: "Line Chart Example", + }, + }, + visualId: "LineChart", + }, + }, + ], + }, + ], + }, + name: "example-name", + versionDescription: "version", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `dashboardId` - (Required, Forces new resource) Identifier for the dashboard. +* `name` - (Required) Display name for the dashboard. +* `versionDescription` - (Required) A description of the current dashboard version being created/updated. + +The following arguments are optional: + +* `awsAccountId` - (Optional, Forces new resource) AWS account ID. +* `dashboardPublishOptions` - (Optional) Options for publishing the dashboard. See [dashboard_publish_options](#dashboard_publish_options). +* `definition` - (Optional) A detailed dashboard definition. Only one of `definition` or `sourceEntity` should be configured. See [definition](#definition). +* `parameters` - (Optional) The parameters for the creation of the dashboard, which you want to use to override the default settings. A dashboard can have any type of parameters, and some parameters might accept multiple values. See [parameters](#parameters). +* `permissions` - (Optional) A set of resource permissions on the dashboard. Maximum of 64 items. See [permissions](#permissions). +* `sourceEntity` - (Optional) The entity that you are using as a source when you create the dashboard (template). Only one of `definition` or `sourceEntity` should be configured. See [source_entity](#source_entity). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `themeArn` - (Optional) The Amazon Resource Name (ARN) of the theme that is being used for this dashboard. The theme ARN must exist in the same AWS account where you create the dashboard. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### source_entity + +* `sourceTemplate` - (Optional) The source template. See [source_template](#source_template). + +### source_template + +* `arn` - (Required) The Amazon Resource Name (ARN) of the resource. +* `dataSetReferences` - (Required) List of dataset references. See [data_set_references](#data_set_references). + +### data_set_references + +* `dataSetArn` - (Required) Dataset Amazon Resource Name (ARN). +* `dataSetPlaceholder` - (Required) Dataset placeholder. + +### dashboard_publish_options + +* `adHocFilteringOption` - (Optional) Ad hoc (one-time) filtering option. See [ad_hoc_filtering_option](#ad_hoc_filtering_option). +* `dataPointDrillUpDownOption` - (Optional) The drill-down options of data points in a dashboard. See [data_point_drill_up_down_option](#data_point_drill_up_down_option). +* `dataPointMenuLabelOption` - (Optional) The data point menu label options of a dashboard. See [data_point_menu_label_option](#data_point_menu_label_option). +* `dataPointTooltipOption` - (Optional) The data point tool tip options of a dashboard. See [data_point_tooltip_option](#data_point_tooltip_option). +* `exportToCsvOption` - (Optional) Export to .csv option. See [export_to_csv_option](#export_to_csv_option). +* `exportWithHiddenFieldsOption` - (Optional) Determines if hidden fields are exported with a dashboard. See [export_with_hidden_fields_option](#export_with_hidden_fields_option). +* `sheetControlsOption` - (Optional) Sheet controls option. See [sheet_controls_option](#sheet_controls_option). +* `sheetLayoutElementMaximizationOption` - (Optional) The sheet layout maximization options of a dashboard. See [sheet_layout_element_maximization_option](#sheet_layout_element_maximization_option). +* `visualAxisSortOption` - (Optional) The axis sort options of a dashboard. See [visual_axis_sort_option](#visual_axis_sort_option). +* `visualMenuOption` - (Optional) The menu options of a visual in a dashboard. See [visual_menu_option](#visual_menu_option). + +### ad_hoc_filtering_option + +* `availabilityStatus` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### data_point_drill_up_down_option + +* `availabilityStatus` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### data_point_menu_label_option + +* `availabilityStatus` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### data_point_tooltip_option + +* `availabilityStatus` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### export_to_csv_option + +* `availabilityStatus` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### export_with_hidden_fields_option + +* `availabilityStatus` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### sheet_controls_option + +* `visibilityState` - (Optional) Visibility state. Possibles values: EXPANDED, COLLAPSED. + +### sheet_layout_element_maximization_option + +* `availabilityStatus` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### visual_axis_sort_option + +* `availabilityStatus` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### visual_menu_option + +* `availabilityStatus` - (Optional) Availability status. Possibles values: ENABLED, DISABLED. + +### parameters + +* `dateTimeParameters` - (Optional) A list of parameters that have a data type of date-time. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DateTimeParameter.html). +* `decimalParameters` - (Optional) A list of parameters that have a data type of decimal. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DecimalParameter.html). +* `integerParameters` - (Optional) A list of parameters that have a data type of integer. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_IntegerParameter.html). +* `stringParameters` - (Optional) A list of parameters that have a data type of string. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_StringParameter.html). + +### definition + +* `dataSetIdentifiersDeclarations` - (Required) A list dataset identifier declarations. With this mapping,you can use dataset identifiers instead of dataset Amazon Resource Names (ARNs) throughout the dashboard's sub-structures. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DataSetIdentifierDeclaration.html). +* `analysisDefaults` - (Optional) The configuration for default analysis settings. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_AnalysisDefaults.html). +* `calculatedFields` - (Optional) A list of calculated field definitions for the dashboard. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_CalculatedField.html). +* `columnConfigurations` - (Optional) A list of dashboard-level column configurations. Column configurations are used to set default formatting for a column that's used throughout a dashboard. See [AWS API Documentation for complete description](ttps://docs.aws.amazon.com/quicksight/latest/APIReference/API_ColumnConfiguration.html). +* `filterGroups` - (Optional) A list of filter definitions for a dashboard. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_FilterGroup.html). For more information, see [Filtering Data](https://docs.aws.amazon.com/quicksight/latest/user/filtering-visual-data.html) in Amazon QuickSight User Guide. +* `parametersDeclarations` - (Optional) A list of parameter declarations for a dashboard. Parameters are named variables that can transfer a value for use by an action or an object. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ParameterDeclaration.html). For more information, see [Parameters in Amazon QuickSight](https://docs.aws.amazon.com/quicksight/latest/user/parameters-in-quicksight.html) in the Amazon QuickSight User Guide. +* `sheets` - (Optional) A list of sheet definitions for a dashboard. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_SheetDefinition.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the dashboard. +* `createdTime` - The time that the dashboard was created. +* `id` - A comma-delimited string joining AWS account ID and dashboard ID. +* `lastUpdatedTime` - The time that the dashboard was last updated. +* `sourceEntityArn` - Amazon Resource Name (ARN) of a template that was used to create this dashboard. +* `status` - The dashboard creation status. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `versionNumber` - The version number of the dashboard version. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Dashboard using the AWS account ID and dashboard ID separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a QuickSight Dashboard using the AWS account ID and dashboard ID separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_dashboard.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_data_set.html.markdown b/website/docs/cdktf/typescript/r/quicksight_data_set.html.markdown new file mode 100644 index 00000000000..1ed9b4e8553 --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_data_set.html.markdown @@ -0,0 +1,516 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_data_set" +description: |- + Manages a Resource QuickSight Data Set. +--- + + + +# Resource: aws_quicksight_data_set + +Resource for managing a QuickSight Data Set. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightDataSet } from "./.gen/providers/aws/quicksight-data-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightDataSet(this, "example", { + dataSetId: "example-id", + importMode: "SPICE", + name: "example-name", + physicalTableMap: [ + { + physicalTableMapId: "example-id", + s3Source: { + dataSourceArn: Token.asString(awsQuicksightDataSourceExample.arn), + inputColumns: [ + { + name: "Column1", + type: "STRING", + }, + ], + uploadSettings: { + format: "JSON", + }, + }, + }, + ], + }); + } +} + +``` + +### With Column Level Permission Rules + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightDataSet } from "./.gen/providers/aws/quicksight-data-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightDataSet(this, "example", { + columnLevelPermissionRules: [ + { + columnNames: ["Column1"], + principals: [Token.asString(awsQuicksightUserExample.arn)], + }, + ], + dataSetId: "example-id", + importMode: "SPICE", + name: "example-name", + physicalTableMap: [ + { + physicalTableMapId: "example-id", + s3Source: { + dataSourceArn: Token.asString(awsQuicksightDataSourceExample.arn), + inputColumns: [ + { + name: "Column1", + type: "STRING", + }, + ], + uploadSettings: { + format: "JSON", + }, + }, + }, + ], + }); + } +} + +``` + +### With Field Folders + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightDataSet } from "./.gen/providers/aws/quicksight-data-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightDataSet(this, "example", { + dataSetId: "example-id", + fieldFolders: [ + { + columns: ["Column1"], + description: "example description", + fieldFoldersId: "example-id", + }, + ], + importMode: "SPICE", + name: "example-name", + physicalTableMap: [ + { + physicalTableMapId: "example-id", + s3Source: { + dataSourceArn: Token.asString(awsQuicksightDataSourceExample.arn), + inputColumns: [ + { + name: "Column1", + type: "STRING", + }, + ], + uploadSettings: { + format: "JSON", + }, + }, + }, + ], + }); + } +} + +``` + +### With Permissions + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightDataSet } from "./.gen/providers/aws/quicksight-data-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightDataSet(this, "example", { + dataSetId: "example-id", + importMode: "SPICE", + name: "example-name", + permissions: [ + { + actions: [ + "quicksight:DescribeDataSet", + "quicksight:DescribeDataSetPermissions", + "quicksight:PassDataSet", + "quicksight:DescribeIngestion", + "quicksight:ListIngestions", + ], + principal: Token.asString(awsQuicksightUserExample.arn), + }, + ], + physicalTableMap: [ + { + physicalTableMapId: "example-id", + s3Source: { + dataSourceArn: Token.asString(awsQuicksightDataSourceExample.arn), + inputColumns: [ + { + name: "Column1", + type: "STRING", + }, + ], + uploadSettings: { + format: "JSON", + }, + }, + }, + ], + }); + } +} + +``` + +### With Row Level Permission Tag Configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightDataSet } from "./.gen/providers/aws/quicksight-data-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightDataSet(this, "example", { + dataSetId: "example-id", + importMode: "SPICE", + name: "example-name", + physicalTableMap: [ + { + physicalTableMapId: "example-id", + s3Source: { + dataSourceArn: Token.asString(awsQuicksightDataSourceExample.arn), + inputColumns: [ + { + name: "Column1", + type: "STRING", + }, + ], + uploadSettings: { + format: "JSON", + }, + }, + }, + ], + rowLevelPermissionTagConfiguration: { + status: "ENABLED", + tagRules: [ + { + columnName: "Column1", + matchAllValue: "*", + tagKey: "tagkey", + tagMultiValueDelimiter: ",", + }, + ], + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `dataSetId` - (Required, Forces new resource) Identifier for the data set. +* `importMode` - (Required) Indicates whether you want to import the data into SPICE. Valid values are `spice` and `directQuery`. +* `name` - (Required) Display name for the dataset. +* `physicalTableMap` - (Required) Declares the physical tables that are available in the underlying data sources. See [physical_table_map](#physical_table_map). + +The following arguments are optional: + +* `awsAccountId` - (Optional, Forces new resource) AWS account ID. +* `columnGroups` - (Optional) Groupings of columns that work together in certain Amazon QuickSight features. Currently, only geospatial hierarchy is supported. See [column_groups](#column_groups). +* `columnLevelPermissionRules` - (Optional) A set of 1 or more definitions of a [ColumnLevelPermissionRule](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ColumnLevelPermissionRule.html). See [column_level_permission_rules](#column_level_permission_rules). +* `dataSetUsageConfiguration` - (Optional) The usage configuration to apply to child datasets that reference this dataset as a source. See [data_set_usage_configuration](#data_set_usage_configuration). +* `fieldFolders` - (Optional) The folder that contains fields and nested subfolders for your dataset. See [field_folders](#field_folders). +* `logicalTableMap` - (Optional) Configures the combination and transformation of the data from the physical tables. Maximum of 1 entry. See [logical_table_map](#logical_table_map). +* `permissions` - (Optional) A set of resource permissions on the data source. Maximum of 64 items. See [permissions](#permissions). +* `rowLevelPermissionDataSet` - (Optional) The row-level security configuration for the data that you want to create. See [row_level_permission_data_set](#row_level_permission_data_set). +* `rowLevelPermissionTagConfiguration` - (Optional) The configuration of tags on a dataset to set row-level security. Row-level security tags are currently supported for anonymous embedding only. See [row_level_permission_tag_configuration](#row_level_permission_tag_configuration). +* `refreshProperties` - (Optional) The refresh properties for the data set. **NOTE**: Only valid when `importMode` is set to `spice`. See [refresh_properties](#refresh_properties). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### physical_table_map + +For a `physicalTableMap` item to be valid, only one of `customSql`, `relationalTable`, or `s3Source` should be configured. + +* `physicalTableMapId` - (Required) Key of the physical table map. +* `customSql` - (Optional) A physical table type built from the results of the custom SQL query. See [custom_sql](#custom_sql). +* `relationalTable` - (Optional) A physical table type for relational data sources. See [relational_table](#relational_table). +* `s3Source` - (Optional) A physical table type for as S3 data source. See [s3_source](#s3_source). + +### custom_sql + +* `dataSourceArn` - (Required) ARN of the data source. +* `name` - (Required) Display name for the SQL query result. +* `sqlQuery` - (Required) SQL query. +* `columns` - (Optional) Column schema from the SQL query result set. See [columns](#columns). + +### columns + +* `name` - (Required) Name of this column in the underlying data source. +* `type` - (Required) Data type of the column. + +### relational_table + +* `dataSourceArn` - (Required) ARN of the data source. +* `inputColumns` - (Required) Column schema of the table. See [input_columns](#input_columns). +* `name` - (Required) Name of the relational table. +* `catalog` - (Optional) Catalog associated with the table. +* `schema` - (Optional) Schema name. This name applies to certain relational database engines. + +### input_columns + +* `name` - (Required) Name of this column in the underlying data source. +* `type` - (Required) Data type of the column. + +### s3_source + +* `dataSourceArn` - (Required) ARN of the data source. +* `inputColumns` - (Required) Column schema of the table. See [input_columns](#input_columns). +* `uploadSettings` - (Required) Information about the format for the S3 source file or files. See [upload_settings](#upload_settings). + +### upload_settings + +* `containsHeader` - (Optional) Whether the file has a header row, or the files each have a header row. +* `delimiter` - (Optional) Delimiter between values in the file. +* `format` - (Optional) File format. Valid values are `csv`, `tsv`, `clf`, `elf`, `xlsx`, and `json`. +* `startFromRow` - (Optional) A row number to start reading data from. +* `textQualifier` - (Optional) Text qualifier. Valid values are `doubleQuote` and `singleQuote`. + +### column_groups + +* `geoSpatialColumnGroup` - (Optional) Geospatial column group that denotes a hierarchy. See [geo_spatial_column_group](#geo_spatial_column_group). + +### geo_spatial_column_group + +* `columns` - (Required) Columns in this hierarchy. +* `countryCode` - (Required) Country code. Valid values are `us`. +* `name` - (Required) A display name for the hierarchy. + +### column_level_permission_rules + +* `columnNames` - (Optional) An array of column names. +* `principals` - (Optional) An array of ARNs for Amazon QuickSight users or groups. + +### data_set_usage_configuration + +* `disableUseAsDirectQuerySource` - (Optional) Controls whether a child dataset of a direct query can use this dataset as a source. +* `disableUseAsImportedSource` - (Optional) Controls whether a child dataset that's stored in QuickSight can use this dataset as a source. + +### field_folders + +* `fieldFoldersId` - (Required) Key of the field folder map. +* `columns` - (Optional) An array of column names to add to the folder. A column can only be in one folder. +* `description` - (Optional) Field folder description. + +### logical_table_map + +* `alias` - (Required) A display name for the logical table. +* `logicalTableMapId` - (Required) Key of the logical table map. +* `dataTransforms` - (Optional) Transform operations that act on this logical table. For this structure to be valid, only one of the attributes can be non-null. See [data_transforms](#data_transforms). +* `source` - (Optional) Source of this logical table. See [source](#source). + +### data_transforms + +* `castColumnTypeOperation` - (Optional) A transform operation that casts a column to a different type. See [cast_column_type_operation](#cast_column_type_operation). +* `createColumnsOperation` - (Optional) An operation that creates calculated columns. Columns created in one such operation form a lexical closure. See [create_columns_operation](#create_columns_operation). +* `filterOperation` - (Optional) An operation that filters rows based on some condition. See [filter_operation](#filter_operation). +* `projectOperation` - (Optional) An operation that projects columns. Operations that come after a projection can only refer to projected columns. See [project_operation](#project_operation). +* `renameColumnOperation` - (Optional) An operation that renames a column. See [rename_column_operation](#rename_column_operation). +* `tagColumnOperation` - (Optional) An operation that tags a column with additional information. See [tag_column_operation](#tag_column_operation). +* `untagColumnOperation` - (Optional) A transform operation that removes tags associated with a column. See [untag_column_operation](#untag_column_operation). + +### cast_column_type_operation + +* `columnName` - (Required) Column name. +* `newColumnType` - (Required) New column data type. Valid values are `string`, `integer`, `decimal`, `datetime`. +* `format` - (Optional) When casting a column from string to datetime type, you can supply a string in a format supported by Amazon QuickSight to denote the source data format. + +### create_columns_operation + +* `columns` - (Required) Calculated columns to create. See [columns](#columns-1). + +### columns + +* `columnId` - (Required) A unique ID to identify a calculated column. During a dataset update, if the column ID of a calculated column matches that of an existing calculated column, Amazon QuickSight preserves the existing calculated column. +* `columnName` - (Required) Column name. +* `expression` - (Required) An expression that defines the calculated column. + +### filter_operation + +* `conditionExpression` - (Required) An expression that must evaluate to a Boolean value. Rows for which the expression evaluates to true are kept in the dataset. + +### project_operation + +* `projectedColumns` - (Required) Projected columns. + +### rename_column_operation + +* `columnName` - (Required) Column to be renamed. +* `newColumnName` - (Required) New name for the column. + +### tag_column_operation + +* `columnName` - (Required) Column name. +* `tags` - (Required) The dataset column tag, currently only used for geospatial type tagging. See [tags](#tags). + +### tags + +* `columnDescription` - (Optional) A description for a column. See [column_description](#column_description). +* `columnGeographicRole` - (Optional) A geospatial role for a column. Valid values are `country`, `state`, `county`, `city`, `postcode`, `longitude`, and `latitude`. + +### column_description + +* `text` - (Optional) The text of a description for a column. + +### untag_column_operation + +* `columnName` - (Required) Column name. +* `tagNames` - (Required) The column tags to remove from this column. + +### source + +* `dataSetArn` - (Optional) ARN of the parent data set. +* `joinInstruction` - (Optional) Specifies the result of a join of two logical tables. See [join_instruction](#join_instruction). +* `physicalTableId` - (Optional) Physical table ID. + +### join_instruction + +* `leftOperand` - (Required) Operand on the left side of a join. +* `onClause` - (Required) Join instructions provided in the ON clause of a join. +* `rightOperand` - (Required) Operand on the right side of a join. +* `type` - (Required) Type of join. Valid values are `inner`, `outer`, `left`, and `right`. +* `leftJoinKeyProperties` - (Optional) Join key properties of the left operand. See [left_join_key_properties](#left_join_key_properties). +* `rightJoinKeyProperties` - (Optional) Join key properties of the right operand. See [right_join_key_properties](#right_join_key_properties). + +### left_join_key_properties + +* `uniqueKey` - (Optional) A value that indicates that a row in a table is uniquely identified by the columns in a join key. This is used by Amazon QuickSight to optimize query performance. + +### right_join_key_properties + +* `uniqueKey` - (Optional) A value that indicates that a row in a table is uniquely identified by the columns in a join key. This is used by Amazon QuickSight to optimize query performance. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### row_level_permission_data_set + +* `arn` - (Required) ARN of the dataset that contains permissions for RLS. +* `permissionPolicy` - (Required) Type of permissions to use when interpreting the permissions for RLS. Valid values are `grantAccess` and `denyAccess`. +* `formatVersion` - (Optional) User or group rules associated with the dataset that contains permissions for RLS. +* `namespace` - (Optional) Namespace associated with the dataset that contains permissions for RLS. +* `status` - (Optional) Status of the row-level security permission dataset. If enabled, the status is `enabled`. If disabled, the status is `disabled`. + +### row_level_permission_tag_configuration + +* `tagRules` - (Required) A set of rules associated with row-level security, such as the tag names and columns that they are assigned to. See [tag_rules](#tag_rules). +* `status` - (Optional) The status of row-level security tags. If enabled, the status is `enabled`. If disabled, the status is `disabled`. + +### refresh_properties + +* `refreshConfiguration` - (Required) The refresh configuration for the data set. See [refresh_configuration](#refresh_configuration). + +### refresh_configuration + +* `incrementalRefresh` - (Required) The incremental refresh for the data set. See [incremental_refresh](#incremental_refresh). + +### incremental_refresh + +* `lookbackWindow` - (Required) The lookback window setup for an incremental refresh configuration. See [lookback_window](#lookback_window). + +### lookback_window + +* `columnName` - (Required) The name of the lookback window column. +* `size` - (Required) The lookback window column size. +* `sizeUnit` - (Required) The size unit that is used for the lookback window column. Valid values for this structure are `hour`, `day`, and `week`. + +### tag_rules + +* `columnName` - (Required) Column name that a tag key is assigned to. +* `tagKey` - (Required) Unique key for a tag. +* `matchAllValue` - (Optional) A string that you want to use to filter by all the values in a column in the dataset and don’t want to list the values one by one. +* `tagMultiValueDelimiter` - (Optional) A string that you want to use to delimit the values when you pass the values at run time. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the data set. +* `id` - A comma-delimited string joining AWS account ID and data set ID. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Data Set using the AWS account ID and data set ID separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a QuickSight Data Set using the AWS account ID and data set ID separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_data_set.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_data_source.html.markdown b/website/docs/cdktf/typescript/r/quicksight_data_source.html.markdown new file mode 100644 index 00000000000..bb819646a5c --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_data_source.html.markdown @@ -0,0 +1,254 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_data_source" +description: |- + Manages a Resource QuickSight Data Source. +--- + + + +# Resource: aws_quicksight_data_source + +Resource for managing QuickSight Data Source + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightDataSource } from "./.gen/providers/aws/quicksight-data-source"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightDataSource(this, "default", { + dataSourceId: "example-id", + name: "My Cool Data in S3", + parameters: { + s3: { + manifestFileLocation: { + bucket: "my-bucket", + key: "path/to/manifest.json", + }, + }, + }, + type: "S3", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `dataSourceId` - (Required, Forces new resource) An identifier for the data source. +* `name` - (Required) A name for the data source, maximum of 128 characters. +* `parameters` - (Required) The [parameters](#parameters-argument-reference) used to connect to this data source (exactly one). +* `type` - (Required) The type of the data source. See the [AWS Documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_CreateDataSource.html#QS-CreateDataSource-request-Type) for the complete list of valid values. + +The following arguments are optional: + +* `awsAccountId` - (Optional, Forces new resource) The ID for the AWS account that the data source is in. Currently, you use the ID for the AWS account that contains your Amazon QuickSight account. +* `credentials` - (Optional) The credentials Amazon QuickSight uses to connect to your underlying source. Currently, only credentials based on user name and password are supported. See [Credentials](#credentials-argument-reference) below for more details. +* `permission` - (Optional) A set of resource permissions on the data source. Maximum of 64 items. See [Permission](#permission-argument-reference) below for more details. +* `sslProperties` - (Optional) Secure Socket Layer (SSL) properties that apply when Amazon QuickSight connects to your underlying source. See [SSL Properties](#ssl_properties-argument-reference) below for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcConnectionProperties`- (Optional) Use this parameter only when you want Amazon QuickSight to use a VPC connection when connecting to your underlying source. See [VPC Connection Properties](#vpc_connection_properties-argument-reference) below for more details. + +### credentials Argument Reference + +* `copySourceArn` (Optional, Conflicts with `credentialPair`) - The Amazon Resource Name (ARN) of a data source that has the credential pair that you want to use. +When the value is not null, the `credentialPair` from the data source in the ARN is used. +* `credentialPair` (Optional, Conflicts with `copySourceArn`) - Credential pair. See [Credential Pair](#credential_pair-argument-reference) below for more details. + +### credential_pair Argument Reference + +* `password` - (Required) Password, maximum length of 1024 characters. +* `username` - (Required) User name, maximum length of 64 characters. + +### parameters Argument Reference + +To specify data source connection parameters, exactly one of the following sub-objects must be provided. + +* `amazonElasticsearch` - (Optional) [Parameters](#amazon_elasticsearch-argument-reference) for connecting to Amazon Elasticsearch. +* `athena` - (Optional) [Parameters](#athena-argument-reference) for connecting to Athena. +* `aurora` - (Optional) [Parameters](#aurora-argument-reference) for connecting to Aurora MySQL. +* `auroraPostgresql` - (Optional) [Parameters](#aurora_postgresql-argument-reference) for connecting to Aurora Postgresql. +* `awsIotAnalytics` - (Optional) [Parameters](#aws_iot_analytics-argument-reference) for connecting to AWS IOT Analytics. +* `jira` - (Optional) [Parameters](#jira-fargument-reference) for connecting to Jira. +* `mariaDb` - (Optional) [Parameters](#maria_db-argument-reference) for connecting to MariaDB. +* `mysql` - (Optional) [Parameters](#mysql-argument-reference) for connecting to MySQL. +* `oracle` - (Optional) [Parameters](#oracle-argument-reference) for connecting to Oracle. +* `postgresql` - (Optional) [Parameters](#postgresql-argument-reference) for connecting to Postgresql. +* `presto` - (Optional) [Parameters](#presto-argument-reference) for connecting to Presto. +* `rds` - (Optional) [Parameters](#rds-argument-reference) for connecting to RDS. +* `redshift` - (Optional) [Parameters](#redshift-argument-reference) for connecting to Redshift. +* `s3` - (Optional) [Parameters](#s3-argument-reference) for connecting to S3. +* `serviceNow` - (Optional) [Parameters](#service_now-argument-reference) for connecting to ServiceNow. +* `snowflake` - (Optional) [Parameters](#snowflake-argument-reference) for connecting to Snowflake. +* `spark` - (Optional) [Parameters](#spark-argument-reference) for connecting to Spark. +* `sqlServer` - (Optional) [Parameters](#sql_server-argument-reference) for connecting to SQL Server. +* `teradata` - (Optional) [Parameters](#teradata-argument-reference) for connecting to Teradata. +* `twitter` - (Optional) [Parameters](#twitter-argument-reference) for connecting to Twitter. + +### permission Argument Reference + +* `actions` - (Required) Set of IAM actions to grant or revoke permissions on. Max of 16 items. +* `principal` - (Required) The Amazon Resource Name (ARN) of the principal. + +### ssl_properties Argument Reference + +* `disableSsl` - (Required) A Boolean option to control whether SSL should be disabled. + +### vpc_connection_properties Argument Reference + +* `vpcConnectionArn` - (Required) The Amazon Resource Name (ARN) for the VPC connection. + +### amazon_elasticsearch Argument Reference + +* `domain` - (Required) The OpenSearch domain. + +### athena Argument Reference + +* `workGroup` - (Optional) The work-group to which to connect. + +### aurora Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### aurora_postgresql Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### aws_iot_analytics Argument Reference + +* `dataSetName` - (Required) The name of the data set to which to connect. + +### jira fArgument Reference + +* `siteBaseUrl` - (Required) The base URL of the Jira instance's site to which to connect. + +### maria_db Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### mysql Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### oracle Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### postgresql Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### presto Argument Reference + +* `catalog` - (Required) The catalog to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The port to which to connect. + +### rds Argument Reference + +* `database` - (Required) The database to which to connect. +* `instanceId` - (Optional) The instance ID to which to connect. + +### redshift Argument Reference + +* `clusterId` - (Optional, Required if `host` and `port` are not provided) The ID of the cluster to which to connect. +* `database` - (Required) The database to which to connect. +* `host` - (Optional, Required if `clusterId` is not provided) The host to which to connect. +* `port` - (Optional, Required if `clusterId` is not provided) The port to which to connect. + +### s3 Argument Reference + +* `manifestFileLocation` - (Required) An [object containing the S3 location](#manifest_file_location-argument-reference) of the S3 manifest file. + +### manifest_file_location Argument Reference + +* `bucket` - (Required) The name of the bucket that contains the manifest file. +* `key` - (Required) The key of the manifest file within the bucket. + +### service_now Argument Reference + +* `siteBaseUrl` - (Required) The base URL of the Jira instance's site to which to connect. + +### snowflake Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `warehouse` - (Required) The warehouse to which to connect. + +### spark Argument Reference + +* `host` - (Required) The host to which to connect. +* `port` - (Required) The warehouse to which to connect. + +### sql_server Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The warehouse to which to connect. + +### teradata Argument Reference + +* `database` - (Required) The database to which to connect. +* `host` - (Required) The host to which to connect. +* `port` - (Required) The warehouse to which to connect. + +#### twitter Argument Reference + +* `maxRows` - (Required) The maximum number of rows to query. +* `query` - (Required) The Twitter query to retrieve the data. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the data source +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight data source using the AWS account ID, and data source ID separated by a slash (`/`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a QuickSight data source using the AWS account ID, and data source ID separated by a slash (`/`). For example: + +```console +% terraform import aws_quicksight_data_source.example 123456789123/my-data-source-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_folder.html.markdown b/website/docs/cdktf/typescript/r/quicksight_folder.html.markdown new file mode 100644 index 00000000000..669d9e1eca0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_folder.html.markdown @@ -0,0 +1,168 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_folder" +description: |- + Manages a QuickSight Folder. +--- + + + +# Resource: aws_quicksight_folder + +Resource for managing a QuickSight Folder. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightFolder } from "./.gen/providers/aws/quicksight-folder"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightFolder(this, "example", { + folderId: "example-id", + name: "example-name", + }); + } +} + +``` + +### With Permissions + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightFolder } from "./.gen/providers/aws/quicksight-folder"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightFolder(this, "example", { + folderId: "example-id", + name: "example-name", + permissions: [ + { + actions: [ + "quicksight:CreateFolder", + "quicksight:DescribeFolder", + "quicksight:UpdateFolder", + "quicksight:DeleteFolder", + "quicksight:CreateFolderMembership", + "quicksight:DeleteFolderMembership", + "quicksight:DescribeFolderPermissions", + "quicksight:UpdateFolderPermissions", + ], + principal: Token.asString(awsQuicksightUserExample.arn), + }, + ], + }); + } +} + +``` + +### With Parent Folder + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightFolder } from "./.gen/providers/aws/quicksight-folder"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const parent = new QuicksightFolder(this, "parent", { + folderId: "parent-id", + name: "parent-name", + }); + new QuicksightFolder(this, "example", { + folderId: "example-id", + name: "example-name", + parentFolderArn: parent.arn, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `folderId` - (Required, Forces new resource) Identifier for the folder. +* `name` - (Required) Display name for the folder. + +The following arguments are optional: + +* `awsAccountId` - (Optional, Forces new resource) AWS account ID. +* `folderType` - (Optional) The type of folder. By default, it is `shared`. Valid values are: `shared`. +* `parentFolderArn` - (Optional) The Amazon Resource Name (ARN) for the parent folder. If not set, creates a root-level folder. +* `permissions` - (Optional) A set of resource permissions on the folder. Maximum of 64 items. See [permissions](#permissions). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the folder. +* `createdTime` - The time that the folder was created. +* `folderPath` - An array of ancestor ARN strings for the folder. Empty for root-level folders. +* `id` - A comma-delimited string joining AWS account ID and folder ID. +* `lastUpdatedTime` - The time that the folder was last updated. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `read` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight folder using the AWS account ID and folder ID name separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a QuickSight folder using the AWS account ID and folder ID name separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_folder.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_folder_membership.html.markdown b/website/docs/cdktf/typescript/r/quicksight_folder_membership.html.markdown new file mode 100644 index 00000000000..0c0a8b1e69d --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_folder_membership.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_folder_membership" +description: |- + Terraform resource for managing an AWS QuickSight Folder Membership. +--- + + + +# Resource: aws_quicksight_folder_membership + +Terraform resource for managing an AWS QuickSight Folder Membership. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightFolderMembership } from "./.gen/providers/aws/quicksight-folder-membership"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightFolderMembership(this, "example", { + folderId: Token.asString(awsQuicksightFolderExample.folderId), + memberId: Token.asString(awsQuicksightDataSetExample.dataSetId), + memberType: "DATASET", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `folderId` - (Required, Forces new resource) Identifier for the folder. +* `memberId` - (Required, Forces new resource) ID of the asset (the dashboard, analysis, or dataset). +* `memberType` - (Required, Forces new resource) Type of the member. Valid values are `analysis`, `dashboard`, and `dataset`. + +The following arguments are optional: + +* `awsAccountId` - (Optional, Forces new resource) AWS account ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A comma-delimited string joining AWS account ID, folder ID, member type, and member ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Folder Membership using the AWS account ID, folder ID, member type, and member ID separated by commas (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import QuickSight Folder Membership using the AWS account ID, folder ID, member type, and member ID separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_folder_membership.example 123456789012,example-folder,DATASET,example-dataset +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_group.html.markdown b/website/docs/cdktf/typescript/r/quicksight_group.html.markdown new file mode 100644 index 00000000000..dcd9d9d815d --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_group.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_group" +description: |- + Manages a Resource QuickSight Group. +--- + + + +# Resource: aws_quicksight_group + +Resource for managing QuickSight Group + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightGroup } from "./.gen/providers/aws/quicksight-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightGroup(this, "example", { + groupName: "tf-example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `groupName` - (Required) A name for the group. +* `awsAccountId` - (Optional) The ID for the AWS account that the group is in. Currently, you use the ID for the AWS account that contains your Amazon QuickSight account. +* `description` - (Optional) A description for the group. +* `namespace` - (Optional) The namespace. Currently, you should set this to `default`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of group + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Group using the aws account id, namespace and group name separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import QuickSight Group using the aws account id, namespace and group name separated by `/`. For example: + +```console +% terraform import aws_quicksight_group.example 123456789123/default/tf-example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_group_membership.html.markdown b/website/docs/cdktf/typescript/r/quicksight_group_membership.html.markdown new file mode 100644 index 00000000000..8415454262b --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_group_membership.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_group_membership" +description: |- + Manages a Resource QuickSight Group Membership. +--- + + + +# Resource: aws_quicksight_group_membership + +Resource for managing QuickSight Group Membership + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightGroupMembership } from "./.gen/providers/aws/quicksight-group-membership"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightGroupMembership(this, "example", { + groupName: "all-access-users", + memberName: "john_smith", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `groupName` - (Required) The name of the group in which the member will be added. +* `memberName` - (Required) The name of the member to add to the group. +* `awsAccountId` - (Optional) The ID for the AWS account that the group is in. Currently, you use the ID for the AWS account that contains your Amazon QuickSight account. +* `namespace` - (Required) The namespace that you want the user to be a part of. Defaults to `default`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Group membership using the AWS account ID, namespace, group name and member name separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import QuickSight Group membership using the AWS account ID, namespace, group name and member name separated by `/`. For example: + +```console +% terraform import aws_quicksight_group_membership.example 123456789123/default/all-access-users/john_smith +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_iam_policy_assignment.html.markdown b/website/docs/cdktf/typescript/r/quicksight_iam_policy_assignment.html.markdown new file mode 100644 index 00000000000..8ebde4b34ed --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_iam_policy_assignment.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_iam_policy_assignment" +description: |- + Terraform resource for managing an AWS QuickSight IAM Policy Assignment. +--- + + + +# Resource: aws_quicksight_iam_policy_assignment + +Terraform resource for managing an AWS QuickSight IAM Policy Assignment. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightIamPolicyAssignment } from "./.gen/providers/aws/quicksight-iam-policy-assignment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightIamPolicyAssignment(this, "example", { + assignmentName: "example", + assignmentStatus: "ENABLED", + identities: [ + { + user: [Token.asString(awsQuicksightUserExample.userName)], + }, + ], + policyArn: Token.asString(awsIamPolicyExample.arn), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `assignmentName` - (Required) Name of the assignment. +* `assignmentStatus` - (Required) Status of the assignment. Valid values are `enabled`, `disabled`, and `draft`. + +The following arguments are optional: + +* `awsAccountId` - (Optional) AWS account ID. +* `identities` - (Optional) Amazon QuickSight users, groups, or both to assign the policy to. See [`identities`](#identities). +* `namespace` - (Optional) Namespace that contains the assignment. Defaults to `default`. +* `policyArn` - (Optional) ARN of the IAM policy to apply to the Amazon QuickSight users and groups specified in this assignment. + +### identities + +* `groups` - (Optional) Array of Quicksight group names to assign the policy to. +* `user` - (Optional) Array of Quicksight user names to assign the policy to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `assignmentId` - Assignment ID. +* `id` - A comma-delimited string joining AWS account ID, namespace, and assignment name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight IAM Policy Assignment using the AWS account ID, namespace, and assignment name separated by commas (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import QuickSight IAM Policy Assignment using the AWS account ID, namespace, and assignment name separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_iam_policy_assignment.example 123456789012,default,example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_ingestion.html.markdown b/website/docs/cdktf/typescript/r/quicksight_ingestion.html.markdown new file mode 100644 index 00000000000..28c48411431 --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_ingestion.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_ingestion" +description: |- + Terraform resource for managing an AWS QuickSight Ingestion. +--- + + + +# Resource: aws_quicksight_ingestion + +Terraform resource for managing an AWS QuickSight Ingestion. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightIngestion } from "./.gen/providers/aws/quicksight-ingestion"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightIngestion(this, "example", { + dataSetId: Token.asString(awsQuicksightDataSetExample.dataSetId), + ingestionId: "example-id", + ingestionType: "FULL_REFRESH", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `dataSetId` - (Required) ID of the dataset used in the ingestion. +* `ingestionId` - (Required) ID for the ingestion. +* `ingestionType` - (Required) Type of ingestion to be created. Valid values are `incrementalRefresh` and `fullRefresh`. + +The following arguments are optional: + +* `awsAccountId` - (Optional) AWS account ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Ingestion. +* `id` - A comma-delimited string joining AWS account ID, data set ID, and ingestion ID. +* `ingestionStatus` - Ingestion status. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Ingestion using the AWS account ID, data set ID, and ingestion ID separated by commas (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import QuickSight Ingestion using the AWS account ID, data set ID, and ingestion ID separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_ingestion.example 123456789012,example-dataset-id,example-ingestion-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_namespace.html.markdown b/website/docs/cdktf/typescript/r/quicksight_namespace.html.markdown new file mode 100644 index 00000000000..dadb59b15c9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_namespace.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_namespace" +description: |- + Terraform resource for managing an AWS QuickSight Namespace. +--- + + + +# Resource: aws_quicksight_namespace + +Terraform resource for managing an AWS QuickSight Namespace. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightNamespace } from "./.gen/providers/aws/quicksight-namespace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightNamespace(this, "example", { + namespace: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `namespace` - (Required) Name of the namespace. + +The following arguments are optional: + +* `awsAccountId` - (Optional) AWS account ID. +* `identityStore` - (Optional) User identity directory type. Defaults to `quicksight`, the only current valid value. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Namespace. +* `capacityRegion` - Namespace AWS Region. +* `creationStatus` - Creation status of the namespace. +* `id` - A comma-delimited string joining AWS account ID and namespace. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `2M`) +* `delete` - (Default `2M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Namespace using the AWS account ID and namespace separated by commas (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import QuickSight Namespace using the AWS account ID and namespace separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_namespace.example 123456789012,example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_refresh_schedule.html.markdown b/website/docs/cdktf/typescript/r/quicksight_refresh_schedule.html.markdown new file mode 100644 index 00000000000..673b0cbca8e --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_refresh_schedule.html.markdown @@ -0,0 +1,189 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_refresh_schedule" +description: |- + Manages a Resource QuickSight Refresh Schedule. +--- + + + +# Resource: aws_quicksight_refresh_schedule + +Resource for managing a QuickSight Refresh Schedule. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightRefreshSchedule } from "./.gen/providers/aws/quicksight-refresh-schedule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightRefreshSchedule(this, "example", { + dataSetId: "dataset-id", + schedule: [ + { + refreshType: "FULL_REFRESH", + scheduleFrequency: [ + { + interval: "HOURLY", + }, + ], + }, + ], + scheduleId: "schedule-id", + }); + } +} + +``` + +### With Weekly Refresh + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightRefreshSchedule } from "./.gen/providers/aws/quicksight-refresh-schedule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightRefreshSchedule(this, "example", { + dataSetId: "dataset-id", + schedule: [ + { + refreshType: "INCREMENTAL_REFRESH", + scheduleFrequency: [ + { + interval: "WEEKLY", + refreshOnDay: [ + { + dayOfWeek: "MONDAY", + }, + ], + timeOfTheDay: "01:00", + timezone: "Europe/London", + }, + ], + }, + ], + scheduleId: "schedule-id", + }); + } +} + +``` + +### With Monthly Refresh + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightRefreshSchedule } from "./.gen/providers/aws/quicksight-refresh-schedule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightRefreshSchedule(this, "example", { + dataSetId: "dataset-id", + schedule: [ + { + refreshType: "INCREMENTAL_REFRESH", + scheduleFrequency: [ + { + interval: "MONTHLY", + refreshOnDay: [ + { + dayOfMonth: "1", + }, + ], + timeOfTheDay: "01:00", + timezone: "Europe/London", + }, + ], + }, + ], + scheduleId: "schedule-id", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `dataSetId` - (Required, Forces new resource) The ID of the dataset. +* `scheduleId` - (Required, Forces new resource) The ID of the refresh schedule. +* `schedule` - (Required) The [refresh schedule](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_RefreshSchedule.html). See [schedule](#schedule) + +The following arguments are optional: + +* `awsAccountId` - (Optional, Forces new resource) AWS account ID. + +### schedule + +* `refreshType` - (Required) The type of refresh that the dataset undergoes. Valid values are `incrementalRefresh` and `fullRefresh`. +* `startAfterDateTime` (Optional) Time after which the refresh schedule can be started, expressed in `yyyyMmDdthh:mm:ss` format. +* `scheduleFrequency` - (Optional) The configuration of the [schedule frequency](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_RefreshFrequency.html). See [schedule_frequency](#schedule_frequency). + +### schedule_frequency + +* `interval` - (Required) The interval between scheduled refreshes. Valid values are `minute15`, `minute30`, `hourly`, `daily`, `weekly` and `monthly`. +* `timeOfTheDay` - (Optional) The time of day that you want the dataset to refresh. This value is expressed in `hh:mm` format. This field is not required for schedules that refresh hourly. +* `timezone` - (Optional) The timezone that you want the refresh schedule to use. +* `refreshOnDay` - (Optional) The [refresh on entity](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ScheduleRefreshOnEntity.html) configuration for weekly or monthly schedules. See [refresh_on_day](#refresh_on_day). + +### refresh_on_day + +* `dayOfMonth` - (Optional) The day of the month that you want to schedule refresh on. +* `dayOfWeek` - (Optional) The day of the week that you want to schedule a refresh on. Valid values are `sunday`, `monday`, `tuesday`, `wednesday`, `thursday`, `friday` and `saturday`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the refresh schedule. +* `id` - A comma-delimited string joining AWS account ID, data set ID & refresh schedule ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Refresh Schedule using the AWS account ID, data set ID and schedule ID separated by commas (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a QuickSight Refresh Schedule using the AWS account ID, data set ID and schedule ID separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_refresh_schedule.example 123456789012,dataset-id,schedule-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_template.html.markdown b/website/docs/cdktf/typescript/r/quicksight_template.html.markdown new file mode 100644 index 00000000000..df4c03a1a95 --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_template.html.markdown @@ -0,0 +1,226 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_template" +description: |- + Manages a QuickSight Template. +--- + + + +# Resource: aws_quicksight_template + +Resource for managing a QuickSight Template. + +## Example Usage + +### From Source Template + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightTemplate } from "./.gen/providers/aws/quicksight-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightTemplate(this, "example", { + name: "example-name", + sourceEntity: { + sourceTemplate: { + arn: source.arn, + }, + }, + templateId: "example-id", + versionDescription: "version", + }); + } +} + +``` + +### With Definition + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightTemplate } from "./.gen/providers/aws/quicksight-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightTemplate(this, "example", { + definition: { + dataSetConfiguration: [ + { + dataSetSchema: { + columnSchemaList: [ + { + dataType: "STRING", + name: "Column1", + }, + { + dataType: "INTEGER", + name: "Column2", + }, + ], + }, + placeholder: "1", + }, + ], + sheets: [ + { + sheetId: "Test1", + title: "Test", + visuals: [ + { + barChartVisual: { + chartConfiguration: { + fieldWells: { + barChartAggregatedFieldWells: { + category: [ + { + categoricalDimensionField: { + column: { + columnName: "Column1", + dataSetIdentifier: "1", + }, + fieldId: "1", + }, + }, + ], + values: [ + { + numericalMeasureField: { + aggregationFunction: { + simpleNumericalAggregation: "SUM", + }, + column: { + columnName: "Column2", + dataSetIdentifier: "1", + }, + fieldId: "2", + }, + }, + ], + }, + }, + }, + visualId: "BarChart", + }, + }, + ], + }, + ], + }, + name: "example-name", + templateId: "example-id", + versionDescription: "version", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `templateId` - (Required, Forces new resource) Identifier for the template. +* `name` - (Required) Display name for the template. +* `versionDescription` - (Required) A description of the current template version being created/updated. + +The following arguments are optional: + +* `awsAccountId` - (Optional, Forces new resource) AWS account ID. +* `definition` - (Optional) A detailed template definition. Only one of `definition` or `sourceEntity` should be configured. See [definition](#definition). +* `permissions` - (Optional) A set of resource permissions on the template. Maximum of 64 items. See [permissions](#permissions). +* `sourceEntity` - (Optional) The entity that you are using as a source when you create the template (analysis or template). Only one of `definition` or `sourceEntity` should be configured. See [source_entity](#source_entity). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### source_entity + +* `sourceAnalysis` - (Optional) The source analysis, if it is based on an analysis.. Only one of `sourceAnalysis` or `sourceTemplate` should be configured. See [source_analysis](#source_analysis). +* `sourceTemplate` - (Optional) The source template, if it is based on an template.. Only one of `sourceAnalysis` or `sourceTemplate` should be configured. See [source_template](#source_template). + +### source_analysis + +* `arn` - (Required) The Amazon Resource Name (ARN) of the resource. +* `dataSetReferences` - (Required) A list of dataset references used as placeholders in the template. See [data_set_references](#data_set_references). + +### data_set_references + +* `dataSetArn` - (Required) Dataset Amazon Resource Name (ARN). +* `dataSetPlaceholder` - (Required) Dataset placeholder. + +### source_template + +* `arn` - (Required) The Amazon Resource Name (ARN) of the resource. + +### definition + +* `dataSetConfiguration` - (Required) A list of dataset configurations. These configurations define the required columns for each dataset used within a template. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_DataSetConfiguration.html). +* `analysisDefaults` - (Optional) The configuration for default analysis settings. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_AnalysisDefaults.html). +* `calculatedFields` - (Optional) A list of calculated field definitions for the template. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_CalculatedField.html). +* `columnConfigurations` - (Optional) A list of template-level column configurations. Column configurations are used to set default formatting for a column that's used throughout a template. See [AWS API Documentation for complete description](ttps://docs.aws.amazon.com/quicksight/latest/APIReference/API_ColumnConfiguration.html). +* `filterGroups` - (Optional) A list of filter definitions for a template. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_FilterGroup.html). For more information, see [Filtering Data](https://docs.aws.amazon.com/quicksight/latest/user/filtering-visual-data.html) in Amazon QuickSight User Guide. +* `parametersDeclarations` - (Optional) A list of parameter declarations for a template. Parameters are named variables that can transfer a value for use by an action or an object. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ParameterDeclaration.html). For more information, see [Parameters in Amazon QuickSight](https://docs.aws.amazon.com/quicksight/latest/user/parameters-in-quicksight.html) in the Amazon QuickSight User Guide. +* `sheets` - (Optional) A list of sheet definitions for a template. See [AWS API Documentation for complete description](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_SheetDefinition.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the template. +* `createdTime` - The time that the template was created. +* `id` - A comma-delimited string joining AWS account ID and template ID. +* `lastUpdatedTime` - The time that the template was last updated. +* `sourceEntityArn` - Amazon Resource Name (ARN) of an analysis or template that was used to create this template. +* `status` - The template creation status. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `versionNumber` - The version number of the template version. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Template using the AWS account ID and template ID separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a QuickSight Template using the AWS account ID and template ID separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_template.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_template_alias.html.markdown b/website/docs/cdktf/typescript/r/quicksight_template_alias.html.markdown new file mode 100644 index 00000000000..64aad18615d --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_template_alias.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_template_alias" +description: |- + Terraform resource for managing an AWS QuickSight Template Alias. +--- + + + +# Resource: aws_quicksight_template_alias + +Terraform resource for managing an AWS QuickSight Template Alias. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightTemplateAlias } from "./.gen/providers/aws/quicksight-template-alias"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightTemplateAlias(this, "example", { + aliasName: "example-alias", + templateId: test.templateId, + templateVersionNumber: test.versionNumber, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `aliasName` - (Required, Forces new resource) Display name of the template alias. +* `templateId` - (Required, Forces new resource) ID of the template. +* `templateVersionNumber` - (Required) Version number of the template. + +The following arguments are optional: + +* `awsAccountId` - (Optional, Forces new resource) AWS account ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the template alias. +* `id` - A comma-delimited string joining AWS account ID, template ID, and alias name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight Template Alias using the AWS account ID, template ID, and alias name separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import QuickSight Template Alias using the AWS account ID, template ID, and alias name separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_template_alias.example 123456789012,example-id,example-alias +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_theme.html.markdown b/website/docs/cdktf/typescript/r/quicksight_theme.html.markdown new file mode 100644 index 00000000000..868487749bb --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_theme.html.markdown @@ -0,0 +1,188 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_theme" +description: |- + Manages a QuickSight Theme. +--- + + + +# Resource: aws_quicksight_theme + +Resource for managing a QuickSight Theme. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightTheme } from "./.gen/providers/aws/quicksight-theme"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightTheme(this, "example", { + baseThemeId: "MIDNIGHT", + configuration: { + dataColorPalette: { + colors: [ + "#FFFFFF", + "#111111", + "#222222", + "#333333", + "#444444", + "#555555", + "#666666", + "#777777", + "#888888", + "#999999", + ], + emptyFillColor: "#FFFFFF", + minMaxGradient: ["#FFFFFF", "#111111"], + }, + }, + name: "example", + themeId: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `themeId` - (Required, Forces new resource) Identifier of the theme. +* `baseThemeId` - (Required) The ID of the theme that a custom theme will inherit from. All themes inherit from one of the starting themes defined by Amazon QuickSight. For a list of the starting themes, use ListThemes or choose Themes from within an analysis. +* `name` - (Required) Display name of the theme. +* `configuration` - (Required) The theme configuration, which contains the theme display properties. See [configuration](#configuration). + +The following arguments are optional: + +* `awsAccountId` - (Optional, Forces new resource) AWS account ID. +* `permissions` - (Optional) A set of resource permissions on the theme. Maximum of 64 items. See [permissions](#permissions). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `versionDescription` - (Optional) A description of the current theme version being created/updated. + +### permissions + +* `actions` - (Required) List of IAM actions to grant or revoke permissions on. +* `principal` - (Required) ARN of the principal. See the [ResourcePermission documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_ResourcePermission.html) for the applicable ARN values. + +### configuration + +* `dataColorPalette` - (Optional) Color properties that apply to chart data colors. See [data_color_palette](#data_color_palette). +* `sheet` - (Optional) Display options related to sheets. See [sheet](#sheet). +* `typography` - (Optional) Determines the typography options. See [typography](#typography). +* `uiColorPalette` - (Optional) Color properties that apply to the UI and to charts, excluding the colors that apply to data. See [ui_color_palette](#ui_color_palette). + +### data_color_palette + +* `colors` - (Optional) List of hexadecimal codes for the colors. Minimum of 8 items and maximum of 20 items. +* `emptyFillColor` - (Optional) The hexadecimal code of a color that applies to charts where a lack of data is highlighted. +* `minMaxGradient` - (Optional) The minimum and maximum hexadecimal codes that describe a color gradient. List of exactly 2 items. + +### sheet + +* `tile` - (Optional) The display options for tiles. See [tile](#tile). +* `tileLayout` - (Optional) The layout options for tiles. See [tile_layout](#tile_layout). + +### tile + +* `border` - (Optional) The border around a tile. See [border](#border). + +### border + +* `show` - (Optional) The option to enable display of borders for visuals. + +### tile_layout + +* `gutter` - (Optional) The gutter settings that apply between tiles. See [gutter](#gutter). +* `margin` - (Optional) The margin settings that apply around the outside edge of sheets. See [margin](#margin). + +### gutter + +* `show` - (Optional) This Boolean value controls whether to display a gutter space between sheet tiles. + +### margin + +* `show` - (Optional) This Boolean value controls whether to display sheet margins. + +### typography + +* `fontFamilies` - (Optional) Determines the list of font families. Maximum number of 5 items. See [font_families](#font_families). + +### font_families + +* `fontFamily` - (Optional) Font family name. + +### ui_color_palette + +* `accent` - (Optional) Color (hexadecimal) that applies to selected states and buttons. +* `accentForeground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the accent color. +* `danger` - (Optional) Color (hexadecimal) that applies to error messages. +* `dangerForeground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the error color. +* `dimension` - (Optional) Color (hexadecimal) that applies to the names of fields that are identified as dimensions. +* `dimensionForeground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the dimension color. +* `measure` - (Optional) Color (hexadecimal) that applies to the names of fields that are identified as measures. +* `measureForeground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the measure color. +* `primaryBackground` - (Optional) Color (hexadecimal) that applies to visuals and other high emphasis UI. +* `primaryForeground` - (Optional) Color (hexadecimal) of text and other foreground elements that appear over the primary background regions, such as grid lines, borders, table banding, icons, and so on. +* `secondaryBackground` - (Optional) Color (hexadecimal) that applies to the sheet background and sheet controls. +* `secondaryForeground` - (Optional) Color (hexadecimal) that applies to any sheet title, sheet control text, or UI that appears over the secondary background. +* `success` - (Optional) Color (hexadecimal) that applies to success messages, for example the check mark for a successful download. +* `successForeground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the success color. +* `warning` - (Optional) Color (hexadecimal) that applies to warning and informational messages. +* `warningForeground` - (Optional) Color (hexadecimal) that applies to any text or other elements that appear over the warning color. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the theme. +* `createdTime` - The time that the theme was created. +* `id` - A comma-delimited string joining AWS account ID and theme ID. +* `lastUpdatedTime` - The time that the theme was last updated. +* `status` - The theme creation status. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `versionNumber` - The version number of the theme version. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a QuickSight Theme using the AWS account ID and theme ID separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a QuickSight Theme using the AWS account ID and theme ID separated by a comma (`,`). For example: + +```console +% terraform import aws_quicksight_theme.example 123456789012,example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_user.html.markdown b/website/docs/cdktf/typescript/r/quicksight_user.html.markdown new file mode 100644 index 00000000000..16f8e4d2d9d --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_user.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_user" +description: |- + Manages a Resource QuickSight User. +--- + + + +# Resource: aws_quicksight_user + +Resource for managing QuickSight User + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { QuicksightUser } from "./.gen/providers/aws/quicksight-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new QuicksightUser(this, "example", { + email: "author@example.com", + iamArn: "arn:aws:iam::123456789012:user/Example", + identityType: "IAM", + namespace: "foo", + sessionName: "an-author", + userRole: "AUTHOR", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `email` - (Required) The email address of the user that you want to register. +* `identityType` - (Required) Amazon QuickSight supports several ways of managing the identity of users. This parameter accepts either `iam` or `quicksight`. If `iam` is specified, the `iamArn` must also be specified. +* `userRole` - (Required) The Amazon QuickSight role of the user. The user role can be one of the following: `reader`, `author`, or `admin` +* `userName` - (Optional) The Amazon QuickSight user name that you want to create for the user you are registering. Only valid for registering a user with `identityType` set to `quicksight`. +* `awsAccountId` - (Optional) The ID for the AWS account that the user is in. Currently, you use the ID for the AWS account that contains your Amazon QuickSight account. +* `iamArn` - (Optional) The ARN of the IAM user or role that you are registering with Amazon QuickSight. +* `namespace` - (Optional) The Amazon Quicksight namespace to create the user in. Defaults to `default`. +* `sessionName` - (Optional) The name of the IAM session to use when assuming roles that can embed QuickSight dashboards. Only valid for registering users using an assumed IAM role. Additionally, if registering multiple users using the same IAM role, each user needs to have a unique session name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the user + +## Import + +You cannot import this resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/quicksight_vpc_connection.html.markdown b/website/docs/cdktf/typescript/r/quicksight_vpc_connection.html.markdown new file mode 100644 index 00000000000..b3a5792ec15 --- /dev/null +++ b/website/docs/cdktf/typescript/r/quicksight_vpc_connection.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "QuickSight" +layout: "aws" +page_title: "AWS: aws_quicksight_vpc_connection" +description: |- + Terraform resource for managing an AWS QuickSight VPC Connection. +--- + + + +# Resource: aws_quicksight_vpc_connection + +Terraform resource for managing an AWS QuickSight VPC Connection. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { QuicksightVpcConnection } from "./.gen/providers/aws/quicksight-vpc-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const vpcConnectionRole = new IamRole(this, "vpc_connection_role", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "sts:AssumeRole", + Effect: "Allow", + Principal: { + Service: "quicksight.amazonaws.com", + }, + }, + ], + Version: "2012-10-17", + }) + ), + inlinePolicy: [ + { + name: "QuickSightVPCConnectionRolePolicy", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: [ + "ec2:CreateNetworkInterface", + "ec2:ModifyNetworkInterfaceAttribute", + "ec2:DeleteNetworkInterface", + "ec2:DescribeSubnets", + "ec2:DescribeSecurityGroups", + ], + Effect: "Allow", + Resource: ["*"], + }, + ], + Version: "2012-10-17", + }) + ), + }, + ], + }); + new QuicksightVpcConnection(this, "example", { + name: "Example Connection", + roleArn: vpcConnectionRole.arn, + securityGroupIds: ["sg-00000000000000000"], + subnetIds: ["subnet-00000000000000000", "subnet-00000000000000001"], + vpcConnectionId: "example-connection-id", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `vpcConnectionId` - (Required) The ID of the VPC connection. +* `name` - (Required) The display name for the VPC connection. +* `roleArn` - (Required) The IAM role to associate with the VPC connection. +* `securityGroupIds` - (Required) A list of security group IDs for the VPC connection. +* `subnetIds` - (Required) A list of subnet IDs for the VPC connection. + +The following arguments are optional: + +* `awsAccountId` - (Optional) AWS account ID. +* `dnsResolvers` - (Optional) A list of IP addresses of DNS resolver endpoints for the VPC connection. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the VPC connection. +* `availabilityStatus` - The availability status of the VPC connection. Valid values are `available`, `unavailable` or `partiallyAvailable`. +* `id` - A comma-delimited string joining AWS account ID and VPC connection ID. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import QuickSight VPC connection using the AWS account ID and VPC connection ID separated by commas (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import QuickSight VPC connection using the AWS account ID and VPC connection ID separated by commas (`,`). For example: + +```console +% terraform import aws_quicksight_vpc_connection.example 123456789012,example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ram_principal_association.markdown b/website/docs/cdktf/typescript/r/ram_principal_association.markdown new file mode 100644 index 00000000000..2cf355df9aa --- /dev/null +++ b/website/docs/cdktf/typescript/r/ram_principal_association.markdown @@ -0,0 +1,122 @@ +--- +subcategory: "RAM (Resource Access Manager)" +layout: "aws" +page_title: "AWS: aws_ram_principal_association" +description: |- + Provides a Resource Access Manager (RAM) principal association. +--- + + + +# Resource: aws_ram_principal_association + +Provides a Resource Access Manager (RAM) principal association. Depending if [RAM Sharing with AWS Organizations is enabled](https://docs.aws.amazon.com/ram/latest/userguide/getting-started-sharing.html#getting-started-sharing-orgs), the RAM behavior with different principal types changes. + +When RAM Sharing with AWS Organizations is enabled: + +- For AWS Account ID, Organization, and Organizational Unit principals within the same AWS Organization, no resource share invitation is sent and resources become available automatically after creating the association. +- For AWS Account ID principals outside the AWS Organization, a resource share invitation is sent and must be accepted before resources become available. See the [`awsRamResourceShareAccepter` resource](/docs/providers/aws/r/ram_resource_share_accepter.html) to accept these invitations. + +When RAM Sharing with AWS Organizations is not enabled: + +- Organization and Organizational Unit principals cannot be used. +- For AWS Account ID principals, a resource share invitation is sent and must be accepted before resources become available. See the [`awsRamResourceShareAccepter` resource](/docs/providers/aws/r/ram_resource_share_accepter.html) to accept these invitations. + +## Example Usage + +### AWS Account ID + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RamPrincipalAssociation } from "./.gen/providers/aws/ram-principal-association"; +import { RamResourceShare } from "./.gen/providers/aws/ram-resource-share"; +interface MyConfig { + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new RamResourceShare(this, "example", { + allowExternalPrincipals: true, + name: config.name, + }); + const awsRamPrincipalAssociationExample = new RamPrincipalAssociation( + this, + "example_1", + { + principal: "111111111111", + resourceShareArn: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRamPrincipalAssociationExample.overrideLogicalId("example"); + } +} + +``` + +### AWS Organization + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RamPrincipalAssociation } from "./.gen/providers/aws/ram-principal-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RamPrincipalAssociation(this, "example", { + principal: Token.asString(awsOrganizationsOrganizationExample.arn), + resourceShareArn: Token.asString(awsRamResourceShareExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `principal` - (Required) The principal to associate with the resource share. Possible values are an AWS account ID, an AWS Organizations Organization ARN, or an AWS Organizations Organization Unit ARN. +* `resourceShareArn` - (Required) The Amazon Resource Name (ARN) of the resource share. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the Resource Share and the principal, separated by a comma. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RAM Principal Associations using their Resource Share ARN and the `principal` separated by a comma. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import RAM Principal Associations using their Resource Share ARN and the `principal` separated by a comma. For example: + +```console +% terraform import aws_ram_principal_association.example arn:aws:ram:eu-west-1:123456789012:resource-share/73da1ab9-b94a-4ba3-8eb4-45917f7f4b12,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ram_resource_association.html.markdown b/website/docs/cdktf/typescript/r/ram_resource_association.html.markdown new file mode 100644 index 00000000000..c91dd712a85 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ram_resource_association.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "RAM (Resource Access Manager)" +layout: "aws" +page_title: "AWS: aws_ram_resource_association" +description: |- + Manages a Resource Access Manager (RAM) Resource Association. +--- + + + +# Resource: aws_ram_resource_association + +Manages a Resource Access Manager (RAM) Resource Association. + +~> *NOTE:* Certain AWS resources (e.g., EC2 Subnets) can only be shared in an AWS account that is a member of an AWS Organizations organization with organization-wide Resource Access Manager functionality enabled. See the [Resource Access Manager User Guide](https://docs.aws.amazon.com/ram/latest/userguide/what-is.html) and AWS service specific documentation for additional information. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RamResourceAssociation } from "./.gen/providers/aws/ram-resource-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RamResourceAssociation(this, "example", { + resourceArn: Token.asString(awsSubnetExample.arn), + resourceShareArn: Token.asString(awsRamResourceShareExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceArn` - (Required) Amazon Resource Name (ARN) of the resource to associate with the RAM Resource Share. +* `resourceShareArn` - (Required) Amazon Resource Name (ARN) of the RAM Resource Share. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the resource share. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RAM Resource Associations using their Resource Share ARN and Resource ARN separated by a comma. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import RAM Resource Associations using their Resource Share ARN and Resource ARN separated by a comma. For example: + +```console +% terraform import aws_ram_resource_association.example arn:aws:ram:eu-west-1:123456789012:resource-share/73da1ab9-b94a-4ba3-8eb4-45917f7f4b12,arn:aws:ec2:eu-west-1:123456789012:subnet/subnet-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ram_resource_share.markdown b/website/docs/cdktf/typescript/r/ram_resource_share.markdown new file mode 100644 index 00000000000..ed9d8051a6b --- /dev/null +++ b/website/docs/cdktf/typescript/r/ram_resource_share.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "RAM (Resource Access Manager)" +layout: "aws" +page_title: "AWS: aws_ram_resource_share" +description: |- + Manages a Resource Access Manager (RAM) Resource Share. +--- + + + +# Resource: aws_ram_resource_share + +Manages a Resource Access Manager (RAM) Resource Share. To associate principals with the share, see the [`awsRamPrincipalAssociation` resource](/docs/providers/aws/r/ram_principal_association.html). To associate resources with the share, see the [`awsRamResourceAssociation` resource](/docs/providers/aws/r/ram_resource_association.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RamResourceShare } from "./.gen/providers/aws/ram-resource-share"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RamResourceShare(this, "example", { + allowExternalPrincipals: true, + name: "example", + tags: { + Environment: "Production", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the resource share. +* `allowExternalPrincipals` - (Optional) Indicates whether principals outside your organization can be associated with a resource share. +* `permissionArns` - (Optional) Specifies the Amazon Resource Names (ARNs) of the RAM permission to associate with the resource share. If you do not specify an ARN for the permission, RAM automatically attaches the default version of the permission for each resource type. You can associate only one permission with each resource type included in the resource share. +* `tags` - (Optional) A map of tags to assign to the resource share. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the resource share. +* `id` - The Amazon Resource Name (ARN) of the resource share. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import resource shares using the `arn` of the resource share. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import resource shares using the `arn` of the resource share. For example: + +```console +% terraform import aws_ram_resource_share.example arn:aws:ram:eu-west-1:123456789012:resource-share/73da1ab9-b94a-4ba3-8eb4-45917f7f4b12 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ram_resource_share_accepter.markdown b/website/docs/cdktf/typescript/r/ram_resource_share_accepter.markdown new file mode 100644 index 00000000000..831301024f2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ram_resource_share_accepter.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "RAM (Resource Access Manager)" +layout: "aws" +page_title: "AWS: aws_ram_resource_share_accepter" +description: |- + Manages accepting a Resource Access Manager (RAM) Resource Share invitation. +--- + + + +# Resource: aws_ram_resource_share_accepter + +Manage accepting a Resource Access Manager (RAM) Resource Share invitation. From a _receiver_ AWS account, accept an invitation to share resources that were shared by a _sender_ AWS account. To create a resource share in the _sender_, see the [`awsRamResourceShare` resource](/docs/providers/aws/r/ram_resource_share.html). + +~> **Note:** If both AWS accounts are in the same Organization and [RAM Sharing with AWS Organizations is enabled](https://docs.aws.amazon.com/ram/latest/userguide/getting-started-sharing.html#getting-started-sharing-orgs), this resource is not necessary as RAM Resource Share invitations are not used. + +## Example Usage + +This configuration provides an example of using multiple Terraform AWS providers to configure two different AWS accounts. In the _sender_ account, the configuration creates a `awsRamResourceShare` and uses a data source in the _receiver_ account to create a `awsRamPrincipalAssociation` resource with the _receiver's_ account ID. In the _receiver_ account, the configuration accepts the invitation to share resources with the `awsRamResourceShareAccepter`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { RamPrincipalAssociation } from "./.gen/providers/aws/ram-principal-association"; +import { RamResourceShare } from "./.gen/providers/aws/ram-resource-share"; +import { RamResourceShareAccepter } from "./.gen/providers/aws/ram-resource-share-accepter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + profile: "profile2", + }); + const alternate = new AwsProvider(this, "aws_1", { + alias: "alternate", + profile: "profile1", + }); + const senderShare = new RamResourceShare(this, "sender_share", { + allowExternalPrincipals: true, + name: "tf-test-resource-share", + provider: alternate, + tags: { + Name: "tf-test-resource-share", + }, + }); + const receiver = new DataAwsCallerIdentity(this, "receiver", {}); + const senderInvite = new RamPrincipalAssociation(this, "sender_invite", { + principal: Token.asString(receiver.accountId), + provider: alternate, + resourceShareArn: senderShare.arn, + }); + new RamResourceShareAccepter(this, "receiver_accept", { + shareArn: senderInvite.resourceShareArn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `shareArn` - (Required) The ARN of the resource share. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `invitationArn` - The ARN of the resource share invitation. +* `shareId` - The ID of the resource share as displayed in the console. +* `status` - The status of the resource share (ACTIVE, PENDING, FAILED, DELETING, DELETED). +* `receiverAccountId` - The account ID of the receiver account which accepts the invitation. +* `senderAccountId` - The account ID of the sender account which submits the invitation. +* `shareName` - The name of the resource share. +* `resources` - A list of the resource ARNs shared via the resource share. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import resource share accepters using the resource share ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import resource share accepters using the resource share ARN. For example: + +```console +% terraform import aws_ram_resource_share_accepter.example arn:aws:ram:us-east-1:123456789012:resource-share/c4b56393-e8d9-89d9-6dc9-883752de4767 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rbin_rule.html.markdown b/website/docs/cdktf/typescript/r/rbin_rule.html.markdown new file mode 100644 index 00000000000..52c205a0752 --- /dev/null +++ b/website/docs/cdktf/typescript/r/rbin_rule.html.markdown @@ -0,0 +1,127 @@ +--- +subcategory: "Recycle Bin (RBin)" +layout: "aws" +page_title: "AWS: aws_rbin_rule" +description: |- + Terraform resource for managing an AWS RBin Rule. +--- + + + +# Resource: aws_rbin_rule + +Terraform resource for managing an AWS RBin Rule. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RbinRule } from "./.gen/providers/aws/rbin-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RbinRule(this, "example", { + description: "example_rule", + resourceTags: [ + { + resourceTagKey: "tag_key", + resourceTagValue: "tag_value", + }, + ], + resourceType: "EBS_SNAPSHOT", + retentionPeriod: { + retentionPeriodUnit: "DAYS", + retentionPeriodValue: 10, + }, + tags: { + test_tag_key: "test_tag_value", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `resourceType` - (Required) The resource type to be retained by the retention rule. Valid values are `ebsSnapshot` and `ec2Image`. +* `retentionPeriod` - (Required) Information about the retention period for which the retention rule is to retain resources. See [`retentionPeriod`](#retention_period) below. + +The following arguments are optional: + +* `description` - (Optional) The retention rule description. +* `resourceTags` - (Optional) Specifies the resource tags to use to identify resources that are to be retained by a tag-level retention rule. See [`resourceTags`](#resource_tags) below. +* `lockConfiguration` - (Optional) Information about the retention rule lock configuration. See [`lockConfiguration`](#lock_configuration) below. + +### retention_period + +The following arguments are required: + +* `retentionPeriodUnit` - (Required) The unit of time in which the retention period is measured. Currently, only DAYS is supported. +* `retentionPeriodValue` - (Required) The period value for which the retention rule is to retain resources. The period is measured using the unit specified for RetentionPeriodUnit. + +### resource_tags + +The following argument is required: + +* `resourceTagKey` - (Required) The tag key. + +The following argument is optional: + +* `resourceTagValue` - (Optional) The tag value. + +### lock_configuration + +The following argument is required: + +* `unlockDelay` - (Required) Information about the retention rule unlock delay. See [`unlockDelay`](#unlock_delay) below. + +### unlock_delay + +The following arguments are required: + +* `unlockDelayUnit` - (Required) The unit of time in which to measure the unlock delay. Currently, the unlock delay can be measure only in days. +* `unlockDelayValue` - (Required) The unlock delay period, measured in the unit specified for UnlockDelayUnit. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - (String) ID of the Rule. +* `lockEndTime` - (Timestamp) The date and time at which the unlock delay is set to expire. Only returned for retention rules that have been unlocked and that are still within the unlock delay period. +* `lockState` - (Optional) The lock state of the retention rules to list. Only retention rules with the specified lock state are returned. Valid values are `locked`, `pendingUnlock`, `unlocked`. +* `status` - (String) The state of the retention rule. Only retention rules that are in the `available` state retain resources. Valid values include `pending` and `available`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RBin Rule using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import RBin Rule using the `id`. For example: + +```console +% terraform import aws_rbin_rule.example examplerule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rds_cluster.html.markdown b/website/docs/cdktf/typescript/r/rds_cluster.html.markdown new file mode 100644 index 00000000000..a3c292fdfad --- /dev/null +++ b/website/docs/cdktf/typescript/r/rds_cluster.html.markdown @@ -0,0 +1,650 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster" +description: |- + Manages an RDS Aurora Cluster +--- + + + +# Resource: aws_rds_cluster + +Manages a [RDS Aurora Cluster][2]. To manage cluster instances that inherit configuration from the cluster (when not running the cluster in `serverless` engine mode), see the [`awsRdsClusterInstance` resource](/docs/providers/aws/r/rds_cluster_instance.html). To manage non-Aurora databases (e.g., MySQL, PostgreSQL, SQL Server, etc.), see the [`awsDbInstance` resource](/docs/providers/aws/r/db_instance.html). + +For information on the difference between the available Aurora MySQL engines +see [Comparison between Aurora MySQL 1 and Aurora MySQL 2](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraMySQL.Updates.20180206.html) +in the Amazon RDS User Guide. + +Changes to an RDS Cluster can occur when you manually change a +parameter, such as `port`, and are reflected in the next maintenance +window. Because of this, Terraform may report a difference in its planning +phase because a modification has not yet taken place. You can use the +`applyImmediately` flag to instruct the service to apply the change immediately +(see documentation below). + +~> **Note:** using `applyImmediately` can result in a +brief downtime as the server reboots. See the AWS Docs on [RDS Maintenance][4] +for more information. + +~> **Note:** All arguments including the username and password will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **NOTE on RDS Clusters and RDS Cluster Role Associations:** Terraform provides both a standalone [RDS Cluster Role Association](rds_cluster_role_association.html) - (an association between an RDS Cluster and a single IAM Role) and +an RDS Cluster resource with `iamRoles` attributes. +Use one resource or the other to associate IAM Roles and RDS Clusters. +Not doing so will cause a conflict of associations and will result in the association being overwritten. + +## Example Usage + +### Aurora MySQL 2.x (MySQL 5.7) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RdsCluster(this, "default", { + availabilityZones: ["us-west-2a", "us-west-2b", "us-west-2c"], + backupRetentionPeriod: 5, + clusterIdentifier: "aurora-cluster-demo", + databaseName: "mydb", + engine: "aurora-mysql", + engineVersion: "5.7.mysql_aurora.2.03.2", + masterPassword: "bar", + masterUsername: "foo", + preferredBackupWindow: "07:00-09:00", + }); + } +} + +``` + +### Aurora MySQL 1.x (MySQL 5.6) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +interface MyConfig { + engine: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new RdsCluster(this, "default", { + availabilityZones: ["us-west-2a", "us-west-2b", "us-west-2c"], + backupRetentionPeriod: 5, + clusterIdentifier: "aurora-cluster-demo", + databaseName: "mydb", + masterPassword: "bar", + masterUsername: "foo", + preferredBackupWindow: "07:00-09:00", + engine: config.engine, + }); + } +} + +``` + +### Aurora with PostgreSQL engine + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RdsCluster(this, "postgresql", { + availabilityZones: ["us-west-2a", "us-west-2b", "us-west-2c"], + backupRetentionPeriod: 5, + clusterIdentifier: "aurora-cluster-demo", + databaseName: "mydb", + engine: "aurora-postgresql", + masterPassword: "bar", + masterUsername: "foo", + preferredBackupWindow: "07:00-09:00", + }); + } +} + +``` + +### Aurora Multi-Master Cluster + +-> More information about Aurora Multi-Master Clusters can be found in the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-multi-master.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +interface MyConfig { + engine: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new RdsCluster(this, "example", { + clusterIdentifier: "example", + dbSubnetGroupName: Token.asString(awsDbSubnetGroupExample.name), + engineMode: "multimaster", + masterPassword: "barbarbarbar", + masterUsername: "foo", + skipFinalSnapshot: true, + engine: config.engine, + }); + } +} + +``` + +### RDS Multi-AZ Cluster + +-> More information about RDS Multi-AZ Clusters can be found in the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/multi-az-db-clusters-concepts.html). + +To create a Multi-AZ RDS cluster, you must additionally specify the `engine`, `storageType`, `allocatedStorage`, `iops` and `dbClusterInstanceClass` attributes. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RdsCluster(this, "example", { + allocatedStorage: 100, + availabilityZones: ["us-west-2a", "us-west-2b", "us-west-2c"], + clusterIdentifier: "example", + dbClusterInstanceClass: "db.r6gd.xlarge", + engine: "mysql", + iops: 1000, + masterPassword: "mustbeeightcharaters", + masterUsername: "test", + storageType: "io1", + }); + } +} + +``` + +### RDS Serverless v2 Cluster + +-> More information about RDS Serverless v2 Clusters can be found in the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-serverless-v2.html). + +To create a Serverless v2 RDS cluster, you must additionally specify the `engineMode` and `serverlessv2ScalingConfiguration` attributes. An `awsRdsClusterInstance` resource must also be added to the cluster with the `instanceClass` attribute specified. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +import { RdsClusterInstance } from "./.gen/providers/aws/rds-cluster-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new RdsCluster(this, "example", { + clusterIdentifier: "example", + databaseName: "test", + engine: "aurora-postgresql", + engineMode: "provisioned", + engineVersion: "13.6", + masterPassword: "must_be_eight_characters", + masterUsername: "test", + serverlessv2ScalingConfiguration: { + maxCapacity: 1, + minCapacity: 0.5, + }, + }); + const awsRdsClusterInstanceExample = new RdsClusterInstance( + this, + "example_1", + { + clusterIdentifier: example.id, + engine: example.engine, + engineVersion: example.engineVersion, + instanceClass: "db.serverless", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterInstanceExample.overrideLogicalId("example"); + } +} + +``` + +### RDS/Aurora Managed Master Passwords via Secrets Manager, default KMS Key + +-> More information about RDS/Aurora Aurora integrates with Secrets Manager to manage master user passwords for your DB clusters can be found in the [RDS User Guide](https://aws.amazon.com/about-aws/whats-new/2022/12/amazon-rds-integration-aws-secrets-manager/) and [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/rds-secrets-manager.html). + +You can specify the `manageMasterUserPassword` attribute to enable managing the master password with Secrets Manager. You can also update an existing cluster to use Secrets Manager by specify the `manageMasterUserPassword` attribute and removing the `masterPassword` attribute (removal is required). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +interface MyConfig { + engine: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new RdsCluster(this, "test", { + clusterIdentifier: "example", + databaseName: "test", + manageMasterUserPassword: true, + masterUsername: "test", + engine: config.engine, + }); + } +} + +``` + +### RDS/Aurora Managed Master Passwords via Secrets Manager, specific KMS Key + +-> More information about RDS/Aurora Aurora integrates with Secrets Manager to manage master user passwords for your DB clusters can be found in the [RDS User Guide](https://aws.amazon.com/about-aws/whats-new/2022/12/amazon-rds-integration-aws-secrets-manager/) and [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/rds-secrets-manager.html). + +You can specify the `masterUserSecretKmsKeyId` attribute to specify a specific KMS Key. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +interface MyConfig { + engine: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new KmsKey(this, "example", { + description: "Example KMS Key", + }); + new RdsCluster(this, "test", { + clusterIdentifier: "example", + databaseName: "test", + manageMasterUserPassword: true, + masterUserSecretKmsKeyId: example.keyId, + masterUsername: "test", + engine: config.engine, + }); + } +} + +``` + +### Global Cluster Restored From Snapshot + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsDbClusterSnapshot } from "./.gen/providers/aws/data-aws-db-cluster-snapshot"; +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +import { RdsGlobalCluster } from "./.gen/providers/aws/rds-global-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsDbClusterSnapshot(this, "example", { + dbClusterIdentifier: "example-original-cluster", + mostRecent: true, + }); + const awsRdsClusterExample = new RdsCluster(this, "example_1", { + clusterIdentifier: "example", + engine: "aurora", + engineVersion: "5.6.mysql_aurora.1.22.4", + lifecycle: { + ignoreChanges: [snapshotIdentifier, globalClusterIdentifier], + }, + snapshotIdentifier: Token.asString(example.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterExample.overrideLogicalId("example"); + const awsRdsGlobalClusterExample = new RdsGlobalCluster(this, "example_2", { + forceDestroy: true, + globalClusterIdentifier: "example", + sourceDbClusterIdentifier: Token.asString(awsRdsClusterExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsGlobalClusterExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the AWS official documentation : + +* [create-db-cluster](https://docs.aws.amazon.com/cli/latest/reference/rds/create-db-cluster.html) +* [modify-db-cluster](https://docs.aws.amazon.com/cli/latest/reference/rds/modify-db-cluster.html) + +This argument supports the following arguments: + +* `allocatedStorage` - (Optional) (Required for Multi-AZ DB cluster) The amount of storage in gibibytes (GiB) to allocate to each DB instance in the Multi-AZ DB cluster. +* `allowMajorVersionUpgrade` - (Optional) Enable to allow major engine version upgrades when changing engine versions. Defaults to `false`. +* `applyImmediately` - (Optional) Specifies whether any cluster modifications are applied immediately, or during the next maintenance window. Default is `false`. See [Amazon RDS Documentation for more information.](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.DBInstance.Modifying.html) +* `availabilityZones` - (Optional) List of EC2 Availability Zones for the DB cluster storage where DB cluster instances can be created. RDS automatically assigns 3 AZs if less than 3 AZs are configured, which will show as a difference requiring resource recreation next Terraform apply. We recommend specifying 3 AZs or using [the `lifecycle` configuration block `ignoreChanges` argument](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) if necessary. A maximum of 3 AZs can be configured. +* `backtrackWindow` - (Optional) Target backtrack window, in seconds. Only available for `aurora` and `auroraMysql` engines currently. To disable backtracking, set this value to `0`. Defaults to `0`. Must be between `0` and `259200` (72 hours) +* `backupRetentionPeriod` - (Optional) Days to retain backups for. Default `1` +* `clusterIdentifierPrefix` - (Optional, Forces new resource) Creates a unique cluster identifier beginning with the specified prefix. Conflicts with `clusterIdentifier`. +* `clusterIdentifier` - (Optional, Forces new resources) The cluster identifier. If omitted, Terraform will assign a random, unique identifier. +* `copyTagsToSnapshot` – (Optional, boolean) Copy all Cluster `tags` to snapshots. Default is `false`. +* `databaseName` - (Optional) Name for an automatically created database on cluster creation. There are different naming restrictions per database engine: [RDS Naming Constraints][5] +* `dbClusterInstanceClass` - (Optional) (Required for Multi-AZ DB cluster) The compute and memory capacity of each DB instance in the Multi-AZ DB cluster, for example db.m6g.xlarge. Not all DB instance classes are available in all AWS Regions, or for all database engines. For the full list of DB instance classes and availability for your engine, see [DB instance class](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.DBInstanceClass.html) in the Amazon RDS User Guide. +* `dbInstanceParameterGroupName` - (Optional) Instance parameter group to associate with all instances of the DB cluster. The `dbInstanceParameterGroupName` parameter is only valid in combination with the `allowMajorVersionUpgrade` parameter. +* `dbSubnetGroupName` - (Optional) DB subnet group to associate with this DB instance. **NOTE:** This must match the `dbSubnetGroupName` specified on every [`awsRdsClusterInstance`](/docs/providers/aws/r/rds_cluster_instance.html) in the cluster. +* `deletionProtection` - (Optional) If the DB instance should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`. +* `enableGlobalWriteForwarding` - (Optional) Whether cluster should forward writes to an associated global cluster. Applied to secondary clusters to enable them to forward writes to an [`awsRdsGlobalCluster`](/docs/providers/aws/r/rds_global_cluster.html)'s primary cluster. See the [Aurora Userguide documentation](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database-write-forwarding.html) for more information. +* `enableHttpEndpoint` - (Optional) Enable HTTP endpoint (data API). Only valid when `engineMode` is set to `serverless`. +* `enabledCloudwatchLogsExports` - (Optional) Set of log types to export to cloudwatch. If omitted, no logs will be exported. The following log types are supported: `audit`, `error`, `general`, `slowquery`, `postgresql` (PostgreSQL). +* `engineMode` - (Optional) Database engine mode. Valid values: `global` (only valid for Aurora MySQL 1.21 and earlier), `multimaster`, `parallelquery`, `provisioned`, `serverless`. Defaults to: `provisioned`. See the [RDS User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/aurora-serverless.html) for limitations when using `serverless`. +* `engineVersion` - (Optional) Database engine version. Updating this argument results in an outage. See the [Aurora MySQL](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Updates.html) and [Aurora Postgres](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraPostgreSQL.Updates.html) documentation for your configured engine to determine this value, or by running `aws rds describe-db-engine-versions`. For example with Aurora MySQL 2, a potential value for this argument is `57MysqlAurora2032`. The value can contain a partial version where supported by the API. The actual engine version used is returned in the attribute `engineVersionActual`, , see [Attribute Reference](#attribute-reference) below. +* `engine` - (Required) Name of the database engine to be used for this DB cluster. Valid Values: `auroraMysql`, `auroraPostgresql`, `mysql`, `postgres`. (Note that `mysql` and `postgres` are Multi-AZ RDS clusters). +* `finalSnapshotIdentifier` - (Optional) Name of your final DB snapshot when this DB cluster is deleted. If omitted, no final snapshot will be made. +* `globalClusterIdentifier` - (Optional) Global cluster identifier specified on [`awsRdsGlobalCluster`](/docs/providers/aws/r/rds_global_cluster.html). +* `iamDatabaseAuthenticationEnabled` - (Optional) Specifies whether or not mappings of AWS Identity and Access Management (IAM) accounts to database accounts is enabled. Please see [AWS Documentation](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/UsingWithRDS.IAMDBAuth.html) for availability and limitations. +* `iamRoles` - (Optional) List of ARNs for the IAM roles to associate to the RDS Cluster. +* `iops` - (Optional) Amount of Provisioned IOPS (input/output operations per second) to be initially allocated for each DB instance in the Multi-AZ DB cluster. For information about valid Iops values, see [Amazon RDS Provisioned IOPS storage to improve performance](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html#USER_PIOPS) in the Amazon RDS User Guide. (This setting is required to create a Multi-AZ DB cluster). Must be a multiple between .5 and 50 of the storage amount for the DB cluster. +* `kmsKeyId` - (Optional) ARN for the KMS encryption key. When specifying `kmsKeyId`, `storageEncrypted` needs to be set to true. +* `manageMasterUserPassword` - (Optional) Set to true to allow RDS to manage the master user password in Secrets Manager. Cannot be set if `masterPassword` is provided. +* `masterPassword` - (Required unless `manageMasterUserPassword` is set to true or unless a `snapshotIdentifier` or `replicationSourceIdentifier` is provided or unless a `globalClusterIdentifier` is provided when the cluster is the "secondary" cluster of a global database) Password for the master DB user. Note that this may show up in logs, and it will be stored in the state file. Please refer to the [RDS Naming Constraints][5]. Cannot be set if `manageMasterUserPassword` is set to `true`. +* `masterUserSecretKmsKeyId` - (Optional) Amazon Web Services KMS key identifier is the key ARN, key ID, alias ARN, or alias name for the KMS key. To use a KMS key in a different Amazon Web Services account, specify the key ARN or alias ARN. If not specified, the default KMS key for your Amazon Web Services account is used. +* `masterUsername` - (Required unless a `snapshotIdentifier` or `replicationSourceIdentifier` is provided or unless a `globalClusterIdentifier` is provided when the cluster is the "secondary" cluster of a global database) Username for the master DB user. Please refer to the [RDS Naming Constraints][5]. This argument does not support in-place updates and cannot be changed during a restore from snapshot. +* `networkType` - (Optional) Network type of the cluster. Valid values: `ipv4`, `dual`. +* `port` - (Optional) Port on which the DB accepts connections +* `preferredBackupWindow` - (Optional) Daily time range during which automated backups are created if automated backups are enabled using the BackupRetentionPeriod parameter.Time in UTC. Default: A 30-minute window selected at random from an 8-hour block of time per regionE.g., 04:00-09:00 +* `preferredMaintenanceWindow` - (Optional) Weekly time range during which system maintenance can occur, in (UTC) e.g., wed:04:00-wed:04:30 +* `replicationSourceIdentifier` - (Optional) ARN of a source DB cluster or DB instance if this DB cluster is to be created as a Read Replica. If DB Cluster is part of a Global Cluster, use the [`lifecycle` configuration block `ignoreChanges` argument](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to prevent Terraform from showing differences for this argument instead of configuring this value. +* `restoreToPointInTime` - (Optional) Nested attribute for [point in time restore](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/USER_PIT.html). More details below. +* `scalingConfiguration` - (Optional) Nested attribute with scaling properties. Only valid when `engineMode` is set to `serverless`. More details below. +* `serverlessv2ScalingConfiguration`- (Optional) Nested attribute with scaling properties for ServerlessV2. Only valid when `engineMode` is set to `provisioned`. More details below. +* `skipFinalSnapshot` - (Optional) Determines whether a final DB snapshot is created before the DB cluster is deleted. If true is specified, no DB snapshot is created. If false is specified, a DB snapshot is created before the DB cluster is deleted, using the value from `finalSnapshotIdentifier`. Default is `false`. +* `snapshotIdentifier` - (Optional) Specifies whether or not to create this cluster from a snapshot. You can use either the name or ARN when specifying a DB cluster snapshot, or the ARN when specifying a DB snapshot. Conflicts with `globalClusterIdentifier`. Clusters cannot be restored from snapshot **and** joined to an existing global cluster in a single operation. See the [AWS documentation](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database-getting-started.html#aurora-global-database.use-snapshot) or the [Global Cluster Restored From Snapshot example](#global-cluster-restored-from-snapshot) for instructions on building a global cluster starting with a snapshot. +* `sourceRegion` - (Optional) The source region for an encrypted replica DB cluster. +* `storageEncrypted` - (Optional) Specifies whether the DB cluster is encrypted. The default is `false` for `provisioned` `engineMode` and `true` for `serverless` `engineMode`. When restoring an unencrypted `snapshotIdentifier`, the `kmsKeyId` argument must be provided to encrypt the restored cluster. Terraform will only perform drift detection if a configuration value is provided. +* `storageType` - (Optional) (Required for Multi-AZ DB clusters) (Forces new for Multi-AZ DB clusters) Specifies the storage type to be associated with the DB cluster. For Aurora DB clusters, `storageType` modifications can be done in-place. For Multi-AZ DB Clusters, the `iops` argument must also be set. Valid values are: `""`, `auroraIopt1` (Aurora DB Clusters); `io1` (Multi-AZ DB Clusters). Default: `""` (Aurora DB Clusters); `io1` (Multi-AZ DB Clusters). +* `tags` - (Optional) A map of tags to assign to the DB cluster. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpcSecurityGroupIds` - (Optional) List of VPC security groups to associate with the Cluster + +### S3 Import Options + +Full details on the core parameters and impacts are in the API Docs: [RestoreDBClusterFromS3](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_RestoreDBClusterFromS3.html). Requires that the S3 bucket be in the same region as the RDS cluster you're trying to create. Sample: + +~> **NOTE:** RDS Aurora Serverless does not support loading data from S3, so its not possible to directly use `engineMode` set to `serverless` with `s3Import`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RdsCluster(this, "db", { + engine: "aurora", + s3Import: { + bucketName: "mybucket", + bucketPrefix: "backups", + ingestionRole: + "arn:aws:iam::1234567890:role/role-xtrabackup-rds-restore", + sourceEngine: "mysql", + sourceEngineVersion: "5.6", + }, + }); + } +} + +``` + +* `bucketName` - (Required) Bucket name where your backup is stored +* `bucketPrefix` - (Optional) Can be blank, but is the path to your backup +* `ingestionRole` - (Required) Role applied to load the data. +* `sourceEngine` - (Required) Source engine for the backup +* `sourceEngineVersion` - (Required) Version of the source engine used to make the backup + +This will not recreate the resource if the S3 object changes in some way. It's only used to initialize the database. This only works currently with the aurora engine. See AWS for currently supported engines and options. See [Aurora S3 Migration Docs](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraMySQL.Migrating.ExtMySQL.html#AuroraMySQL.Migrating.ExtMySQL.S3). + +### restore_to_point_in_time Argument Reference + +~> **NOTE:** The DB cluster is created from the source DB cluster with the same configuration as the original DB cluster, except that the new DB cluster is created with the default DB security group. Thus, the following arguments should only be specified with the source DB cluster's respective values: `databaseName`, `masterUsername`, `storageEncrypted`, `replicationSourceIdentifier`, and `sourceRegion`. + +Example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +interface MyConfig { + engine: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new RdsCluster(this, "example-clone", { + restoreToPointInTime: { + restoreType: "copy-on-write", + sourceClusterIdentifier: "example", + useLatestRestorableTime: true, + }, + engine: config.engine, + }); + } +} + +``` + +* `sourceClusterIdentifier` - (Required) Identifier of the source database cluster from which to restore. +* `restoreType` - (Optional) Type of restore to be performed. + Valid options are `fullCopy` (default) and `copyOnWrite`. +* `useLatestRestorableTime` - (Optional) Set to true to restore the database cluster to the latest restorable backup time. Defaults to false. Conflicts with `restoreToTime`. +* `restoreToTime` - (Optional) Date and time in UTC format to restore the database cluster to. Conflicts with `useLatestRestorableTime`. + +### scaling_configuration Argument Reference + +~> **NOTE:** `scalingConfiguration` configuration is only valid when `engineMode` is set to `serverless`. + +Example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +interface MyConfig { + engine: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new RdsCluster(this, "example", { + engineMode: "serverless", + scalingConfiguration: { + autoPause: true, + maxCapacity: 256, + minCapacity: 2, + secondsUntilAutoPause: 300, + timeoutAction: "ForceApplyCapacityChange", + }, + engine: config.engine, + }); + } +} + +``` + +* `autoPause` - (Optional) Whether to enable automatic pause. A DB cluster can be paused only when it's idle (it has no connections). If a DB cluster is paused for more than seven days, the DB cluster might be backed up with a snapshot. In this case, the DB cluster is restored when there is a request to connect to it. Defaults to `true`. +* `maxCapacity` - (Optional) Maximum capacity for an Aurora DB cluster in `serverless` DB engine mode. The maximum capacity must be greater than or equal to the minimum capacity. Valid Aurora MySQL capacity values are `1`, `2`, `4`, `8`, `16`, `32`, `64`, `128`, `256`. Valid Aurora PostgreSQL capacity values are (`2`, `4`, `8`, `16`, `32`, `64`, `192`, and `384`). Defaults to `16`. +* `minCapacity` - (Optional) Minimum capacity for an Aurora DB cluster in `serverless` DB engine mode. The minimum capacity must be lesser than or equal to the maximum capacity. Valid Aurora MySQL capacity values are `1`, `2`, `4`, `8`, `16`, `32`, `64`, `128`, `256`. Valid Aurora PostgreSQL capacity values are (`2`, `4`, `8`, `16`, `32`, `64`, `192`, and `384`). Defaults to `1`. +* `secondsUntilAutoPause` - (Optional) Time, in seconds, before an Aurora DB cluster in serverless mode is paused. Valid values are `300` through `86400`. Defaults to `300`. +* `timeoutAction` - (Optional) Action to take when the timeout is reached. Valid values: `forceApplyCapacityChange`, `rollbackCapacityChange`. Defaults to `rollbackCapacityChange`. See [documentation](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-serverless.how-it-works.html#aurora-serverless.how-it-works.timeout-action). + +### serverlessv2_scaling_configuration Argument Reference + +~> **NOTE:** serverlessv2_scaling_configuration configuration is only valid when engine_mode is set to provisioned + +Example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +interface MyConfig { + engine: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new RdsCluster(this, "example", { + serverlessv2ScalingConfiguration: { + maxCapacity: 128, + minCapacity: 0.5, + }, + engine: config.engine, + }); + } +} + +``` + +* `maxCapacity` - (Required) Maximum capacity for an Aurora DB cluster in `provisioned` DB engine mode. The maximum capacity must be greater than or equal to the minimum capacity. Valid capacity values are in a range of `05` up to `128` in steps of `05`. +* `minCapacity` - (Required) Minimum capacity for an Aurora DB cluster in `provisioned` DB engine mode. The minimum capacity must be lesser than or equal to the maximum capacity. Valid capacity values are in a range of `05` up to `128` in steps of `05`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster +* `id` - RDS Cluster Identifier +* `clusterIdentifier` - RDS Cluster Identifier +* `clusterResourceId` - RDS Cluster Resource ID +* `clusterMembers` – List of RDS Instances that are a part of this cluster +* `availabilityZones` - Availability zone of the instance +* `backupRetentionPeriod` - Backup retention period +* `preferredBackupWindow` - Daily time range during which the backups happen +* `preferredMaintenanceWindow` - Maintenance window +* `endpoint` - DNS address of the RDS instance +* `readerEndpoint` - Read-only endpoint for the Aurora cluster, automatically +load-balanced across replicas +* `engine` - Database engine +* `engineVersionActual` - Running version of the database. +* `databaseName` - Database name +* `port` - Database port +* `masterUsername` - Master username for the database +* `masterUserSecret` - Block that specifies the master user secret. Only available when `manageMasterUserPassword` is set to true. [Documented below](#master_user_secret). +* `storageEncrypted` - Specifies whether the DB cluster is encrypted +* `replicationSourceIdentifier` - ARN of the source DB cluster or DB instance if this DB cluster is created as a Read Replica. +* `hostedZoneId` - Route53 Hosted Zone ID of the endpoint +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[1]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Overview.Replication.html +[2]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Aurora.html +[3]: /docs/providers/aws/r/rds_cluster_instance.html +[4]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_UpgradeDBInstance.Maintenance.html +[5]: http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Limits.html#RDS_Limits.Constraints + +### master_user_secret + +The `masterUserSecret` configuration block supports the following attributes: + +* `kmsKeyId` - Amazon Web Services KMS key identifier that is used to encrypt the secret. +* `secretArn` - Amazon Resource Name (ARN) of the secret. +* `secretStatus` - Status of the secret. Valid Values: `creating` | `active` | `rotating` | `impaired`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `120M`) +- `update` - (Default `120M`) +- `delete` - (Default `120M`) +any cleanup task during the destroying process. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS Clusters using the `clusterIdentifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import RDS Clusters using the `clusterIdentifier`. For example: + +```console +% terraform import aws_rds_cluster.aurora_cluster aurora-prod-cluster +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rds_cluster_activity_stream.html.markdown b/website/docs/cdktf/typescript/r/rds_cluster_activity_stream.html.markdown new file mode 100644 index 00000000000..0cdf60ce99f --- /dev/null +++ b/website/docs/cdktf/typescript/r/rds_cluster_activity_stream.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster_activity_stream" +description: |- + Manages RDS Aurora Cluster Database Activity Streams +--- + + + +# Resource: aws_rds_cluster_activity_stream + +Manages RDS Aurora Cluster Database Activity Streams. + +Database Activity Streams have some limits and requirements, refer to the [Monitoring Amazon Aurora using Database Activity Streams][1] documentation for detailed limitations and requirements. + +~> **Note:** This resource always calls the RDS [`startActivityStream`][2] API with the `applyImmediately` parameter set to `true`. This is because the Terraform needs the activity stream to be started in order for it to get the associated attributes. + +~> **Note:** This resource depends on having at least one `awsRdsClusterInstance` created. To avoid race conditions when all resources are being created together, add an explicit resource reference using the [resource `dependsOn` meta-argument](https://www.terraform.io/docs/configuration/resources.html#depends_on-explicit-resource-dependencies). + +~> **Note:** This resource is available in all regions except the following: `cnNorth1`, `cnNorthwest1`, `usGovEast1`, `usGovWest1` + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +import { RdsClusterActivityStream } from "./.gen/providers/aws/rds-cluster-activity-stream"; +import { RdsClusterInstance } from "./.gen/providers/aws/rds-cluster-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new KmsKey(this, "default", { + description: "AWS KMS Key to encrypt Database Activity Stream", + }); + const awsRdsClusterDefault = new RdsCluster(this, "default_1", { + availabilityZones: ["us-west-2a", "us-west-2b", "us-west-2c"], + clusterIdentifier: "aurora-cluster-demo", + databaseName: "mydb", + engine: "aurora-postgresql", + engineVersion: "13.4", + masterPassword: "mustbeeightcharaters", + masterUsername: "foo", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterDefault.overrideLogicalId("default"); + const awsRdsClusterInstanceDefault = new RdsClusterInstance( + this, + "default_2", + { + clusterIdentifier: Token.asString( + awsRdsClusterDefault.clusterIdentifier + ), + engine: Token.asString(awsRdsClusterDefault.engine), + identifier: "aurora-instance-demo", + instanceClass: "db.r6g.large", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterInstanceDefault.overrideLogicalId("default"); + const awsRdsClusterActivityStreamDefault = new RdsClusterActivityStream( + this, + "default_3", + { + dependsOn: [awsRdsClusterInstanceDefault], + kmsKeyId: defaultVar.keyId, + mode: "async", + resourceArn: Token.asString(awsRdsClusterDefault.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterActivityStreamDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation][3]. + +This argument supports the following arguments: + +* `resourceArn` - (Required, Forces new resources) The Amazon Resource Name (ARN) of the DB cluster. +* `mode` - (Required, Forces new resources) Specifies the mode of the database activity stream. Database events such as a change or access generate an activity stream event. The database session can handle these events either synchronously or asynchronously. One of: `sync`, `async`. +* `kmsKeyId` - (Required, Forces new resources) The AWS KMS key identifier for encrypting messages in the database activity stream. The AWS KMS key identifier is the key ARN, key ID, alias ARN, or alias name for the KMS key. +* `engineNativeAuditFieldsIncluded` - (Optional, Forces new resources) Specifies whether the database activity stream includes engine-native audit fields. This option only applies to an Oracle DB instance. By default, no engine-native audit fields are included. Defaults `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the DB cluster. +* `kinesisStreamName` - The name of the Amazon Kinesis data stream to be used for the database activity stream. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS Aurora Cluster Database Activity Streams using the `resourceArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import RDS Aurora Cluster Database Activity Streams using the `resourceArn`. For example: + +```console +% terraform import aws_rds_cluster_activity_stream.default arn:aws:rds:us-west-2:123456789012:cluster:aurora-cluster-demo +``` + +[1]: https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/DBActivityStreams.html +[2]: https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_StartActivityStream.html +[3]: https://docs.aws.amazon.com/cli/latest/reference/rds/start-activity-stream.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rds_cluster_endpoint.html.markdown b/website/docs/cdktf/typescript/r/rds_cluster_endpoint.html.markdown new file mode 100644 index 00000000000..daeb6d1e4f5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/rds_cluster_endpoint.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster_endpoint" +description: |- + Manages an RDS Aurora Cluster Endpoint +--- + + + +# Resource: aws_rds_cluster_endpoint + +Manages an RDS Aurora Cluster Endpoint. +You can refer to the [User Guide][1]. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +import { RdsClusterEndpoint } from "./.gen/providers/aws/rds-cluster-endpoint"; +import { RdsClusterInstance } from "./.gen/providers/aws/rds-cluster-instance"; +interface MyConfig { + engine: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const defaultVar = new RdsCluster(this, "default", { + availabilityZones: ["us-west-2a", "us-west-2b", "us-west-2c"], + backupRetentionPeriod: 5, + clusterIdentifier: "aurora-cluster-demo", + databaseName: "mydb", + masterPassword: "bar", + masterUsername: "foo", + preferredBackupWindow: "07:00-09:00", + engine: config.engine, + }); + const test1 = new RdsClusterInstance(this, "test1", { + applyImmediately: true, + clusterIdentifier: defaultVar.id, + engine: defaultVar.engine, + engineVersion: defaultVar.engineVersion, + identifier: "test1", + instanceClass: "db.t2.small", + }); + const test2 = new RdsClusterInstance(this, "test2", { + applyImmediately: true, + clusterIdentifier: defaultVar.id, + engine: defaultVar.engine, + engineVersion: defaultVar.engineVersion, + identifier: "test2", + instanceClass: "db.t2.small", + }); + const test3 = new RdsClusterInstance(this, "test3", { + applyImmediately: true, + clusterIdentifier: defaultVar.id, + engine: defaultVar.engine, + engineVersion: defaultVar.engineVersion, + identifier: "test3", + instanceClass: "db.t2.small", + }); + new RdsClusterEndpoint(this, "eligible", { + clusterEndpointIdentifier: "reader", + clusterIdentifier: defaultVar.id, + customEndpointType: "READER", + excludedMembers: [test1.id, test2.id], + }); + new RdsClusterEndpoint(this, "static", { + clusterEndpointIdentifier: "static", + clusterIdentifier: defaultVar.id, + customEndpointType: "READER", + staticMembers: [test1.id, test3.id], + }); + } +} + +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation](https://docs.aws.amazon.com/cli/latest/reference/rds/create-db-cluster-endpoint.html). + +This argument supports the following arguments: + +* `clusterIdentifier` - (Required, Forces new resources) The cluster identifier. +* `clusterEndpointIdentifier` - (Required, Forces new resources) The identifier to use for the new endpoint. This parameter is stored as a lowercase string. +* `customEndpointType` - (Required) The type of the endpoint. One of: READER , ANY . +* `staticMembers` - (Optional) List of DB instance identifiers that are part of the custom endpoint group. Conflicts with `excludedMembers`. +* `excludedMembers` - (Optional) List of DB instance identifiers that aren't part of the custom endpoint group. All other eligible instances are reachable through the custom endpoint. Only relevant if the list of static members is empty. Conflicts with `staticMembers`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster +* `id` - The RDS Cluster Endpoint Identifier +* `endpoint` - A custom endpoint for the Aurora cluster +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS Clusters Endpoint using the `clusterEndpointIdentifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import RDS Clusters Endpoint using the `clusterEndpointIdentifier`. For example: + +```console +% terraform import aws_rds_cluster_endpoint.custom_reader aurora-prod-cluster-custom-reader +``` + +[1]: https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/Aurora.Overview.Endpoints.html#Aurora.Endpoints.Cluster + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rds_cluster_instance.html.markdown b/website/docs/cdktf/typescript/r/rds_cluster_instance.html.markdown new file mode 100644 index 00000000000..ef13180b80e --- /dev/null +++ b/website/docs/cdktf/typescript/r/rds_cluster_instance.html.markdown @@ -0,0 +1,164 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster_instance" +description: |- + Provides an RDS Cluster Resource Instance +--- + + + +# Resource: aws_rds_cluster_instance + +Provides an RDS Cluster Instance Resource. A Cluster Instance Resource defines +attributes that are specific to a single instance in a [RDS Cluster][3], +specifically running Amazon Aurora. + +Unlike other RDS resources that support replication, with Amazon Aurora you do +not designate a primary and subsequent replicas. Instead, you simply add RDS +Instances and Aurora manages the replication. You can use the [count][5] +meta-parameter to make multiple instances and join them all to the same RDS +Cluster, or you may specify different Cluster Instance resources with various +`instanceClass` sizes. + +For more information on Amazon Aurora, see [Aurora on Amazon RDS][2] in the Amazon RDS User Guide. + +~> **NOTE:** Deletion Protection from the RDS service can only be enabled at the cluster level, not for individual cluster instances. You can still add the [`preventDestroy` lifecycle behavior](https://www.terraform.io/language/meta-arguments/lifecycle#prevent_destroy) to your Terraform resource configuration if you desire protection from accidental deletion. + +~> **NOTE:** `aurora` is no longer a valid `engine` because of [Amazon Aurora's MySQL-Compatible Edition version 1 end of life](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/Aurora.MySQL56.EOL.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformCount, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +import { RdsClusterInstance } from "./.gen/providers/aws/rds-cluster-instance"; +interface MyConfig { + engine: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const defaultVar = new RdsCluster(this, "default", { + availabilityZones: ["us-west-2a", "us-west-2b", "us-west-2c"], + clusterIdentifier: "aurora-cluster-demo", + databaseName: "mydb", + masterPassword: "barbut8chars", + masterUsername: "foo", + engine: config.engine, + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const clusterInstancesCount = TerraformCount.of(Token.asNumber("2")); + new RdsClusterInstance(this, "cluster_instances", { + clusterIdentifier: defaultVar.id, + engine: defaultVar.engine, + engineVersion: defaultVar.engineVersion, + identifier: "aurora-cluster-demo-${" + clusterInstancesCount.index + "}", + instanceClass: "db.r4.large", + count: clusterInstancesCount, + }); + } +} + +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation](https://docs.aws.amazon.com/cli/latest/reference/rds/create-db-instance.html). + +This argument supports the following arguments: + +* `applyImmediately` - (Optional) Specifies whether any database modifications are applied immediately, or during the next maintenance window. Default is`false`. +* `autoMinorVersionUpgrade` - (Optional) Indicates that minor engine upgrades will be applied automatically to the DB instance during the maintenance window. Default `true`. +* `availabilityZone` - (Optional, Computed, Forces new resource) EC2 Availability Zone that the DB instance is created in. See [docs](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html) about the details. +* `caCertIdentifier` - (Optional) Identifier of the CA certificate for the DB instance. +* `clusterIdentifier` - (Required, Forces new resource) Identifier of the [`awsRdsCluster`](/docs/providers/aws/r/rds_cluster.html) in which to launch this instance. +* `copyTagsToSnapshot` – (Optional, boolean) Indicates whether to copy all of the user-defined tags from the DB instance to snapshots of the DB instance. Default `false`. +* `dbParameterGroupName` - (Optional) Name of the DB parameter group to associate with this instance. +* `dbSubnetGroupName` - (Required if `publicly_accessible = false`, Optional otherwise, Forces new resource) DB subnet group to associate with this DB instance. **NOTE:** This must match the `dbSubnetGroupName` of the attached [`awsRdsCluster`](/docs/providers/aws/r/rds_cluster.html). +* `engineVersion` - (Optional) Database engine version. +* `engine` - (Required, Forces new resource) Name of the database engine to be used for the RDS instance. Valid Values: `auroraMysql`, `auroraPostgresql`, `mysql`, `postgres`. +* `identifierPrefix` - (Optional, Forces new resource) Creates a unique identifier beginning with the specified prefix. Conflicts with `identifier`. +* `identifier` - (Optional, Forces new resource) Identifier for the RDS instance, if omitted, Terraform will assign a random, unique identifier. +* `instanceClass` - (Required) Instance class to use. For details on CPU and memory, see [Scaling Aurora DB Instances][4]. Aurora uses `db.*` instance classes/types. Please see [AWS Documentation][7] for currently available instance classes and complete details. +* `monitoringInterval` - (Optional) Interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance. To disable collecting Enhanced Monitoring metrics, specify 0. The default is 0. Valid Values: 0, 1, 5, 10, 15, 30, 60. +* `monitoringRoleArn` - (Optional) ARN for the IAM role that permits RDS to send enhanced monitoring metrics to CloudWatch Logs. You can find more information on the [AWS Documentation](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Monitoring.html) what IAM permissions are needed to allow Enhanced Monitoring for RDS Instances. +* `performanceInsightsEnabled` - (Optional) Specifies whether Performance Insights is enabled or not. +* `performanceInsightsKmsKeyId` - (Optional) ARN for the KMS key to encrypt Performance Insights data. When specifying `performanceInsightsKmsKeyId`, `performanceInsightsEnabled` needs to be set to true. +* `performanceInsightsRetentionPeriod` - (Optional) Amount of time in days to retain Performance Insights data. Valid values are `7`, `731` (2 years) or a multiple of `31`. When specifying `performanceInsightsRetentionPeriod`, `performanceInsightsEnabled` needs to be set to true. Defaults to '7'. +* `preferredBackupWindow` - (Optional) Daily time range during which automated backups are created if automated backups are enabled. Eg: "04:00-09:00". **NOTE:** If `preferredBackupWindow` is set at the cluster level, this argument **must** be omitted. +* `preferredMaintenanceWindow` - (Optional) Window to perform maintenance in. Syntax: "ddd:hh24:mi-ddd:hh24:mi". Eg: "Mon:00:00-Mon:03:00". +* `promotionTier` - (Optional) Default 0. Failover Priority setting on instance level. The reader who has lower tier has higher priority to get promoted to writer. +* `publiclyAccessible` - (Optional) Bool to control if instance is publicly accessible. Default `false`. See the documentation on [Creating DB Instances][6] for more details on controlling this property. +* `tags` - (Optional) Map of tags to assign to the instance. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster instance +* `clusterIdentifier` - RDS Cluster Identifier +* `identifier` - Instance identifier +* `id` - Instance identifier +* `writer` – Boolean indicating if this instance is writable. `false` indicates this instance is a read replica. +* `availabilityZone` - Availability zone of the instance +* `endpoint` - DNS address for this instance. May not be writable +* `engine` - Database engine +* `engineVersionActual` - Database engine version +* `port` - Database port +* `storageEncrypted` - Specifies whether the DB cluster is encrypted. +* `kmsKeyId` - ARN for the KMS encryption key if one is set to the cluster. +* `networkType` - Network type of the DB instance. +* `dbiResourceId` - Region-unique, immutable identifier for the DB instance. +* `performanceInsightsEnabled` - Specifies whether Performance Insights is enabled or not. +* `performanceInsightsKmsKeyId` - ARN for the KMS encryption key used by Performance Insights. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[2]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Aurora.html +[3]: /docs/providers/aws/r/rds_cluster.html +[4]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Aurora.Managing.html +[5]: https://www.terraform.io/docs/configuration/meta-arguments/count.html +[6]: https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html +[7]: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.DBInstanceClass.html + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `90M`) +- `update` - (Default `90M`) +- `delete` - (Default `90M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS Cluster Instances using the `identifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import RDS Cluster Instances using the `identifier`. For example: + +```console +% terraform import aws_rds_cluster_instance.prod_instance_1 aurora-cluster-instance-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rds_cluster_parameter_group.markdown b/website/docs/cdktf/typescript/r/rds_cluster_parameter_group.markdown new file mode 100644 index 00000000000..3f9f5e15c42 --- /dev/null +++ b/website/docs/cdktf/typescript/r/rds_cluster_parameter_group.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster_parameter_group" +description: |- + Provides an RDS DB cluster parameter group resource. +--- + + + +# Resource: aws_rds_cluster_parameter_group + +Provides an RDS DB cluster parameter group resource. Documentation of the available parameters for various Aurora engines can be found at: + +* [Aurora MySQL Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraMySQL.Reference.html) +* [Aurora PostgreSQL Parameters](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/AuroraPostgreSQL.Reference.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsClusterParameterGroup } from "./.gen/providers/aws/rds-cluster-parameter-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RdsClusterParameterGroup(this, "default", { + description: "RDS default cluster parameter group", + family: "aurora5.6", + name: "rds-cluster-pg", + parameter: [ + { + name: "character_set_server", + value: "utf8", + }, + { + name: "character_set_client", + value: "utf8", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the DB cluster parameter group. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `family` - (Required) The family of the DB cluster parameter group. +* `description` - (Optional) The description of the DB cluster parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of DB parameters to apply. Note that parameters may differ from a family to an other. Full list of all parameters can be discovered via [`aws rds describe-db-cluster-parameters`](https://docs.aws.amazon.com/cli/latest/reference/rds/describe-db-cluster-parameters.html) after initial creation of the group. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +Parameter blocks support the following: + +* `name` - (Required) The name of the DB parameter. +* `value` - (Required) The value of the DB parameter. +* `applyMethod` - (Optional) "immediate" (default), or "pending-reboot". Some + engines can't apply some parameters without a reboot, and you will need to + specify "pending-reboot" here. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The db cluster parameter group name. +* `arn` - The ARN of the db cluster parameter group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS Cluster Parameter Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import RDS Cluster Parameter Groups using the `name`. For example: + +```console +% terraform import aws_rds_cluster_parameter_group.cluster_pg production-pg-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rds_cluster_role_association.html.markdown b/website/docs/cdktf/typescript/r/rds_cluster_role_association.html.markdown new file mode 100644 index 00000000000..f52941355fa --- /dev/null +++ b/website/docs/cdktf/typescript/r/rds_cluster_role_association.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_cluster_role_association" +description: |- + Manages a RDS DB Cluster association with an IAM Role. +--- + + + +# Resource: aws_rds_cluster_role_association + +Manages a RDS DB Cluster association with an IAM Role. Example use cases: + +* [Creating an IAM Role to Allow Amazon Aurora to Access AWS Services](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Integrating.Authorizing.IAM.CreateRole.html) +* [Importing Amazon S3 Data into an RDS PostgreSQL DB Cluster](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_PostgreSQL.S3Import.html) + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsClusterRoleAssociation } from "./.gen/providers/aws/rds-cluster-role-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RdsClusterRoleAssociation(this, "example", { + dbClusterIdentifier: Token.asString(awsRdsClusterExample.id), + featureName: "S3_INTEGRATION", + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dbClusterIdentifier` - (Required) DB Cluster Identifier to associate with the IAM Role. +* `featureName` - (Required) Name of the feature for association. This can be found in the AWS documentation relevant to the integration or a full list is available in the `supportedFeatureNames` list returned by [AWS CLI rds describe-db-engine-versions](https://docs.aws.amazon.com/cli/latest/reference/rds/describe-db-engine-versions.html). +* `roleArn` - (Required) Amazon Resource Name (ARN) of the IAM Role to associate with the DB Cluster. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - DB Cluster Identifier and IAM Role ARN separated by a comma (`,`) + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsRdsClusterRoleAssociation` using the DB Cluster Identifier and IAM Role ARN separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsRdsClusterRoleAssociation` using the DB Cluster Identifier and IAM Role ARN separated by a comma (`,`). For example: + +```console +% terraform import aws_rds_cluster_role_association.example my-db-cluster,arn:aws:iam::123456789012:role/my-role +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rds_export_task.html.markdown b/website/docs/cdktf/typescript/r/rds_export_task.html.markdown new file mode 100644 index 00000000000..ccaec09c791 --- /dev/null +++ b/website/docs/cdktf/typescript/r/rds_export_task.html.markdown @@ -0,0 +1,226 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_export_task" +description: |- + Terraform resource for managing an AWS RDS (Relational Database) Export Task. +--- + + + +# Resource: aws_rds_export_task + +Terraform resource for managing an AWS RDS (Relational Database) Export Task. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsExportTask } from "./.gen/providers/aws/rds-export-task"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RdsExportTask(this, "example", { + exportTaskIdentifier: "example", + iamRoleArn: Token.asString(awsIamRoleExample.arn), + kmsKeyId: Token.asString(awsKmsKeyExample.arn), + s3BucketName: Token.asString(awsS3BucketExample.id), + sourceArn: Token.asString(awsDbSnapshotExample.dbSnapshotArn), + }); + } +} + +``` + +### Complete Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DbInstance } from "./.gen/providers/aws/db-instance"; +import { DbSnapshot } from "./.gen/providers/aws/db-snapshot"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { RdsExportTask } from "./.gen/providers/aws/rds-export-task"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DbInstance(this, "example", { + allocatedStorage: 10, + dbName: "test", + engine: "mysql", + engineVersion: "5.7", + identifier: "example", + instanceClass: "db.t3.micro", + parameterGroupName: "default.mysql5.7", + password: "foobarbaz", + skipFinalSnapshot: true, + username: "foo", + }); + const awsDbSnapshotExample = new DbSnapshot(this, "example_1", { + dbInstanceIdentifier: example.identifier, + dbSnapshotIdentifier: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDbSnapshotExample.overrideLogicalId("example"); + const awsIamRoleExample = new IamRole(this, "example_2", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "sts:AssumeRole", + Effect: "Allow", + Principal: { + Service: "export.rds.amazonaws.com", + }, + Sid: "", + }, + ], + Version: "2012-10-17", + }) + ), + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsKmsKeyExample = new KmsKey(this, "example_3", { + deletionWindowInDays: 10, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsKeyExample.overrideLogicalId("example"); + const awsS3BucketExample = new S3Bucket(this, "example_4", { + bucket: "example", + forceDestroy: true, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketExample.overrideLogicalId("example"); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_5", { + acl: "private", + bucket: Token.asString(awsS3BucketExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_6", + { + statement: [ + { + actions: ["s3:ListAllMyBuckets"], + resources: ["*"], + }, + { + actions: ["s3:GetBucketLocation", "s3:ListBucket"], + resources: [Token.asString(awsS3BucketExample.arn)], + }, + { + actions: ["s3:GetObject", "s3:PutObject", "s3:DeleteObject"], + resources: ["${" + awsS3BucketExample.arn + "}/*"], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsIamPolicyExample = new IamPolicy(this, "example_7", { + name: "example", + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamPolicyExample.overrideLogicalId("example"); + const awsIamRolePolicyAttachmentExample = new IamRolePolicyAttachment( + this, + "example_8", + { + policyArn: Token.asString(awsIamPolicyExample.arn), + role: Token.asString(awsIamRoleExample.name), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentExample.overrideLogicalId("example"); + const awsRdsExportTaskExample = new RdsExportTask(this, "example_9", { + exportOnly: ["database"], + exportTaskIdentifier: "example", + iamRoleArn: Token.asString(awsIamRoleExample.arn), + kmsKeyId: Token.asString(awsKmsKeyExample.arn), + s3BucketName: Token.asString(awsS3BucketExample.id), + s3Prefix: "my_prefix/example", + sourceArn: Token.asString(awsDbSnapshotExample.dbSnapshotArn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsExportTaskExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `exportTaskIdentifier` - (Required) Unique identifier for the snapshot export task. +* `iamRoleArn` - (Required) ARN of the IAM role to use for writing to the Amazon S3 bucket. +* `kmsKeyId` - (Required) ID of the Amazon Web Services KMS key to use to encrypt the snapshot. +* `s3BucketName` - (Required) Name of the Amazon S3 bucket to export the snapshot to. +* `sourceArn` - (Required) Amazon Resource Name (ARN) of the snapshot to export. + +The following arguments are optional: + +* `exportOnly` - (Optional) Data to be exported from the snapshot. If this parameter is not provided, all the snapshot data is exported. Valid values are documented in the [AWS StartExportTask API documentation](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_StartExportTask.html#API_StartExportTask_RequestParameters). +* `s3Prefix` - (Optional) Amazon S3 bucket prefix to use as the file name and path of the exported snapshot. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `failureCause` - Reason the export failed, if it failed. +* `id` - Unique identifier for the snapshot export task (same value as `exportTaskIdentifier`). +* `percentProgress` - Progress of the snapshot export task as a percentage. +* `snapshotTime` - Time that the snapshot was created. +* `sourceType` - Type of source for the export. +* `status` - Status of the export task. +* `taskEndTime` - Time that the snapshot export task completed. +* `taskStartTime` - Time that the snapshot export task started. +* `warningMessage` - Warning about the snapshot export task, if any. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a RDS (Relational Database) Export Task using the `exportTaskIdentifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a RDS (Relational Database) Export Task using the `exportTaskIdentifier`. For example: + +```console +% terraform import aws_rds_export_task.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rds_global_cluster.html.markdown b/website/docs/cdktf/typescript/r/rds_global_cluster.html.markdown new file mode 100644 index 00000000000..aac73ff0a4b --- /dev/null +++ b/website/docs/cdktf/typescript/r/rds_global_cluster.html.markdown @@ -0,0 +1,360 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_global_cluster" +description: |- + Manages an RDS Global Cluster +--- + + + +# Resource: aws_rds_global_cluster + +Manages an RDS Global Cluster, which is an Aurora global database spread across multiple regions. The global database contains a single primary cluster with read-write capability, and a read-only secondary cluster that receives data from the primary cluster through high-speed replication performed by the Aurora storage subsystem. + +More information about Aurora global databases can be found in the [Aurora User Guide](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database.html#aurora-global-database-creating). + +## Example Usage + +### New MySQL Global Cluster + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +import { RdsClusterInstance } from "./.gen/providers/aws/rds-cluster-instance"; +import { RdsGlobalCluster } from "./.gen/providers/aws/rds-global-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new RdsGlobalCluster(this, "example", { + databaseName: "example_db", + engine: "aurora", + engineVersion: "5.6.mysql_aurora.1.22.2", + globalClusterIdentifier: "global-test", + }); + const primary = new RdsCluster(this, "primary", { + clusterIdentifier: "test-primary-cluster", + databaseName: "example_db", + dbSubnetGroupName: "default", + engine: example.engine, + engineVersion: example.engineVersion, + globalClusterIdentifier: example.id, + masterPassword: "somepass123", + masterUsername: "username", + provider: awsPrimary, + }); + const awsRdsClusterInstancePrimary = new RdsClusterInstance( + this, + "primary_2", + { + clusterIdentifier: primary.id, + dbSubnetGroupName: "default", + engine: example.engine, + engineVersion: example.engineVersion, + identifier: "test-primary-cluster-instance", + instanceClass: "db.r4.large", + provider: awsPrimary, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterInstancePrimary.overrideLogicalId("primary"); + const secondary = new RdsCluster(this, "secondary", { + clusterIdentifier: "test-secondary-cluster", + dbSubnetGroupName: "default", + dependsOn: [awsRdsClusterInstancePrimary], + engine: example.engine, + engineVersion: example.engineVersion, + globalClusterIdentifier: example.id, + provider: awsSecondary, + }); + const awsRdsClusterInstanceSecondary = new RdsClusterInstance( + this, + "secondary_4", + { + clusterIdentifier: secondary.id, + dbSubnetGroupName: "default", + engine: example.engine, + engineVersion: example.engineVersion, + identifier: "test-secondary-cluster-instance", + instanceClass: "db.r4.large", + provider: awsSecondary, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterInstanceSecondary.overrideLogicalId("secondary"); + } +} + +``` + +### New PostgreSQL Global Cluster + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +import { RdsClusterInstance } from "./.gen/providers/aws/rds-cluster-instance"; +import { RdsGlobalCluster } from "./.gen/providers/aws/rds-global-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new AwsProvider(this, "aws", { + alias: "primary", + region: "us-east-2", + }); + const secondary = new AwsProvider(this, "aws_1", { + alias: "secondary", + region: "us-east-1", + }); + const example = new RdsGlobalCluster(this, "example", { + databaseName: "example_db", + engine: "aurora-postgresql", + engineVersion: "11.9", + globalClusterIdentifier: "global-test", + }); + const awsRdsClusterPrimary = new RdsCluster(this, "primary", { + clusterIdentifier: "test-primary-cluster", + databaseName: "example_db", + dbSubnetGroupName: "default", + engine: example.engine, + engineVersion: example.engineVersion, + globalClusterIdentifier: example.id, + masterPassword: "somepass123", + masterUsername: "username", + provider: primary, + }); + const awsRdsClusterInstancePrimary = new RdsClusterInstance( + this, + "primary_4", + { + clusterIdentifier: Token.asString(awsRdsClusterPrimary.id), + dbSubnetGroupName: "default", + engine: example.engine, + engineVersion: example.engineVersion, + identifier: "test-primary-cluster-instance", + instanceClass: "db.r4.large", + provider: primary, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterInstancePrimary.overrideLogicalId("primary"); + const awsRdsClusterSecondary = new RdsCluster(this, "secondary", { + clusterIdentifier: "test-secondary-cluster", + dbSubnetGroupName: "default", + dependsOn: [awsRdsClusterInstancePrimary], + engine: example.engine, + engineVersion: example.engineVersion, + globalClusterIdentifier: example.id, + provider: secondary, + skipFinalSnapshot: true, + }); + const awsRdsClusterInstanceSecondary = new RdsClusterInstance( + this, + "secondary_6", + { + clusterIdentifier: Token.asString(awsRdsClusterSecondary.id), + dbSubnetGroupName: "default", + engine: example.engine, + engineVersion: example.engineVersion, + identifier: "test-secondary-cluster-instance", + instanceClass: "db.r4.large", + provider: secondary, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterInstanceSecondary.overrideLogicalId("secondary"); + } +} + +``` + +### New Global Cluster From Existing DB Cluster + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +import { RdsGlobalCluster } from "./.gen/providers/aws/rds-global-cluster"; +interface MyConfig { + engine: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new RdsCluster(this, "example", { + lifecycle: { + ignoreChanges: [globalClusterIdentifier], + }, + engine: config.engine, + }); + const awsRdsGlobalClusterExample = new RdsGlobalCluster(this, "example_1", { + forceDestroy: true, + globalClusterIdentifier: "example", + sourceDbClusterIdentifier: example.arn, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsGlobalClusterExample.overrideLogicalId("example"); + } +} + +``` + +### Upgrading Engine Versions + +When you upgrade the version of an `awsRdsGlobalCluster`, Terraform will attempt to in-place upgrade the engine versions of all associated clusters. Since the `awsRdsCluster` resource is being updated through the `awsRdsGlobalCluster`, you are likely to get an error (`Provider produced inconsistent final plan`). To avoid this, use the `lifecycle` `ignoreChanges` meta argument as shown below on the `awsRdsCluster`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsCluster } from "./.gen/providers/aws/rds-cluster"; +import { RdsClusterInstance } from "./.gen/providers/aws/rds-cluster-instance"; +import { RdsGlobalCluster } from "./.gen/providers/aws/rds-global-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new RdsGlobalCluster(this, "example", { + engine: "aurora-mysql", + engineVersion: "5.7.mysql_aurora.2.07.5", + globalClusterIdentifier: "kyivkharkiv", + }); + const primary = new RdsCluster(this, "primary", { + allowMajorVersionUpgrade: true, + applyImmediately: true, + clusterIdentifier: "odessadnipro", + databaseName: "totoro", + engine: example.engine, + engineVersion: example.engineVersion, + globalClusterIdentifier: example.id, + lifecycle: { + ignoreChanges: [engineVersion], + }, + masterPassword: "satsukimae", + masterUsername: "maesatsuki", + skipFinalSnapshot: true, + }); + const awsRdsClusterInstancePrimary = new RdsClusterInstance( + this, + "primary_2", + { + applyImmediately: true, + clusterIdentifier: primary.id, + engine: primary.engine, + engineVersion: primary.engineVersion, + identifier: "donetsklviv", + instanceClass: "db.r4.large", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRdsClusterInstancePrimary.overrideLogicalId("primary"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `globalClusterIdentifier` - (Required, Forces new resources) Global cluster identifier. +* `databaseName` - (Optional, Forces new resources) Name for an automatically created database on cluster creation. +* `deletionProtection` - (Optional) If the Global Cluster should have deletion protection enabled. The database can't be deleted when this value is set to `true`. The default is `false`. +* `engine` - (Optional, Forces new resources) Name of the database engine to be used for this DB cluster. Terraform will only perform drift detection if a configuration value is provided. Valid values: `aurora`, `auroraMysql`, `auroraPostgresql`. Defaults to `aurora`. Conflicts with `sourceDbClusterIdentifier`. +* `engineVersion` - (Optional) Engine version of the Aurora global database. The `engine`, `engineVersion`, and `instanceClass` (on the `awsRdsClusterInstance`) must together support global databases. See [Using Amazon Aurora global databases](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database.html) for more information. By upgrading the engine version, Terraform will upgrade cluster members. **NOTE:** To avoid an `inconsistent final plan` error while upgrading, use the `lifecycle` `ignoreChanges` for `engineVersion` meta argument on the associated `awsRdsCluster` resource as shown above in [Upgrading Engine Versions](#upgrading-engine-versions) example. +* `forceDestroy` - (Optional) Enable to remove DB Cluster members from Global Cluster on destroy. Required with `sourceDbClusterIdentifier`. +* `sourceDbClusterIdentifier` - (Optional) Amazon Resource Name (ARN) to use as the primary DB Cluster of the Global Cluster on creation. Terraform cannot perform drift detection of this value. +* `storageEncrypted` - (Optional, Forces new resources) Specifies whether the DB cluster is encrypted. The default is `false` unless `sourceDbClusterIdentifier` is specified and encrypted. Terraform will only perform drift detection if a configuration value is provided. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - RDS Global Cluster Amazon Resource Name (ARN) +* `globalClusterMembers` - Set of objects containing Global Cluster members. + * `dbClusterArn` - Amazon Resource Name (ARN) of member DB Cluster + * `isWriter` - Whether the member is the primary DB Cluster +* `globalClusterResourceId` - AWS Region-unique, immutable identifier for the global database cluster. This identifier is found in AWS CloudTrail log entries whenever the AWS KMS key for the DB cluster is accessed +* `id` - RDS Global Cluster identifier + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `update` - (Default `90M`) +- `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsRdsGlobalCluster` using the RDS Global Cluster identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsRdsGlobalCluster` using the RDS Global Cluster identifier. For example: + +```console +% terraform import aws_rds_global_cluster.example example +``` + +Certain resource arguments, like `forceDestroy`, only exist within Terraform. If the argument is set in the Terraform configuration on an imported resource, Terraform will show a difference on the first plan after import to update the state value. This change is safe to apply immediately so the state matches the desired configuration. + +Certain resource arguments, like `sourceDbClusterIdentifier`, do not have an API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RdsGlobalCluster } from "./.gen/providers/aws/rds-global-cluster"; +interface MyConfig { + globalClusterIdentifier: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new RdsGlobalCluster(this, "example", { + lifecycle: { + ignoreChanges: [sourceDbClusterIdentifier], + }, + globalClusterIdentifier: config.globalClusterIdentifier, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rds_reserved_instance.html.markdown b/website/docs/cdktf/typescript/r/rds_reserved_instance.html.markdown new file mode 100644 index 00000000000..a85de6f0411 --- /dev/null +++ b/website/docs/cdktf/typescript/r/rds_reserved_instance.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "RDS (Relational Database)" +layout: "aws" +page_title: "AWS: aws_rds_reserved_instance" +description: |- + Manages an RDS DB Reserved Instance +--- + + + +# Resource: aws_rds_reserved_instance + +Manages an RDS DB Reserved Instance. + +~> **NOTE:** Once created, a reservation is valid for the `duration` of the provided `offeringId` and cannot be deleted. Performing a `destroy` will only remove the resource from state. For more information see [RDS Reserved Instances Documentation](https://aws.amazon.com/rds/reserved-instances/) and [PurchaseReservedDBInstancesOffering](https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_PurchaseReservedDBInstancesOffering.html). + +~> **NOTE:** Due to the expense of testing this resource, we provide it as best effort. If you find it useful, and have the ability to help test or notice issues, consider reaching out to us on [GitHub](https://github.com/hashicorp/terraform-provider-aws). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRdsReservedInstanceOffering } from "./.gen/providers/aws/data-aws-rds-reserved-instance-offering"; +import { RdsReservedInstance } from "./.gen/providers/aws/rds-reserved-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new DataAwsRdsReservedInstanceOffering(this, "test", { + dbInstanceClass: "db.t2.micro", + duration: 31536000, + multiAz: false, + offeringType: "All Upfront", + productDescription: "mysql", + }); + new RdsReservedInstance(this, "example", { + instanceCount: 3, + offeringId: Token.asString(test.offeringId), + reservationId: "optionalCustomReservationID", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `offeringId` - (Required) ID of the Reserved DB instance offering to purchase. To determine an `offeringId`, see the `awsRdsReservedInstanceOffering` data source. + +The following arguments are optional: + +* `instanceCount` - (Optional) Number of instances to reserve. Default value is `1`. +* `reservationId` - (Optional) Customer-specified identifier to track this reservation. +* `tags` - (Optional) Map of tags to assign to the DB reservation. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN for the reserved DB instance. +* `id` - Unique identifier for the reservation. same as `reservationId`. +* `currencyCode` - Currency code for the reserved DB instance. +* `duration` - Duration of the reservation in seconds. +* `fixedPrice` – Fixed price charged for this reserved DB instance. +* `dbInstanceClass` - DB instance class for the reserved DB instance. +* `leaseId` - Unique identifier for the lease associated with the reserved DB instance. Amazon Web Services Support might request the lease ID for an issue related to a reserved DB instance. +* `multiAz` - Whether the reservation applies to Multi-AZ deployments. +* `offeringType` - Offering type of this reserved DB instance. +* `productDescription` - Description of the reserved DB instance. +* `recurringCharges` - Recurring price charged to run this reserved DB instance. +* `startTime` - Time the reservation started. +* `state` - State of the reserved DB instance. +* `usagePrice` - Hourly price charged for this reserved DB instance. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `update` - (Default `10M`) +- `delete` - (Default `1M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import RDS DB Instance Reservations using the `instanceId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import RDS DB Instance Reservations using the `instanceId`. For example: + +```console +% terraform import aws_rds_reserved_instance.reservation_instance CustomReservationID +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_authentication_profile.html.markdown b/website/docs/cdktf/typescript/r/redshift_authentication_profile.html.markdown new file mode 100644 index 00000000000..28e65fd35ad --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_authentication_profile.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_authentication_profile" +description: |- + Creates a Redshift authentication profile +--- + + + +# Resource: aws_redshift_authentication_profile + +Creates a Redshift authentication profile + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftAuthenticationProfile } from "./.gen/providers/aws/redshift-authentication-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftAuthenticationProfile(this, "example", { + authenticationProfileContent: Token.asString( + Fn.jsonencode({ + AllowDBUserOverride: "1", + App_ID: "example", + Client_ID: "ExampleClientID", + }) + ), + authenticationProfileName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `authenticationProfileName` - (Required, Forces new resource) The name of the authentication profile. +* `authenticationProfileContent` - (Required) The content of the authentication profile in JSON format. The maximum length of the JSON string is determined by a quota for your account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the authentication profile. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Authentication by `authenticationProfileName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Authentication by `authenticationProfileName`. For example: + +```console +% terraform import aws_redshift_authentication_profile.test example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_cluster.html.markdown b/website/docs/cdktf/typescript/r/redshift_cluster.html.markdown new file mode 100644 index 00000000000..8a359551c82 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_cluster.html.markdown @@ -0,0 +1,181 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_cluster" +description: |- + Provides a Redshift Cluster resource. +--- + + + +# Resource: aws_redshift_cluster + +Provides a Redshift Cluster Resource. + +~> **Note:** All arguments including the username and password will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **NOTE:** A Redshift cluster's default IAM role can be managed both by this resource's `defaultIamRoleArn` argument and the [`awsRedshiftClusterIamRoles`](redshift_cluster_iam_roles.html) resource's `defaultIamRoleArn` argument. Do not configure different values for both arguments. Doing so will cause a conflict of default IAM roles. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftCluster } from "./.gen/providers/aws/redshift-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftCluster(this, "example", { + clusterIdentifier: "tf-redshift-cluster", + clusterType: "single-node", + databaseName: "mydb", + masterPassword: "Mustbe8characters", + masterUsername: "exampleuser", + nodeType: "dc1.large", + }); + } +} + +``` + +## Argument Reference + +For more detailed documentation about each argument, refer to +the [AWS official documentation](http://docs.aws.amazon.com/cli/latest/reference/redshift/index.html#cli-aws-redshift). + +This argument supports the following arguments: + +* `clusterIdentifier` - (Required) The Cluster Identifier. Must be a lower case string. +* `databaseName` - (Optional) The name of the first database to be created when the cluster is created. + If you do not provide a name, Amazon Redshift will create a default database called `dev`. +* `defaultIamRoleArn` - (Optional) The Amazon Resource Name (ARN) for the IAM role that was set as default for the cluster when the cluster was created. +* `nodeType` - (Required) The node type to be provisioned for the cluster. +* `clusterType` - (Optional) The cluster type to use. Either `singleNode` or `multiNode`. +* `masterPassword` - (Required unless a `snapshotIdentifier` is provided) Password for the master DB user. + Note that this may show up in logs, and it will be stored in the state file. Password must contain at least 8 chars and + contain at least one uppercase letter, one lowercase letter, and one number. +* `masterUsername` - (Required unless a `snapshotIdentifier` is provided) Username for the master DB user. +* `vpcSecurityGroupIds` - (Optional) A list of Virtual Private Cloud (VPC) security groups to be associated with the cluster. +* `clusterSubnetGroupName` - (Optional) The name of a cluster subnet group to be associated with this cluster. If this parameter is not provided the resulting cluster will be deployed outside virtual private cloud (VPC). +* `availabilityZone` - (Optional) The EC2 Availability Zone (AZ) in which you want Amazon Redshift to provision the cluster. For example, if you have several EC2 instances running in a specific Availability Zone, then you might want the cluster to be provisioned in the same zone in order to decrease network latency. Can only be changed if `availabilityZoneRelocationEnabled` is `true`. +* `availabilityZoneRelocationEnabled` - (Optional) If true, the cluster can be relocated to another availabity zone, either automatically by AWS or when requested. Default is `false`. Available for use on clusters from the RA3 instance family. +* `preferredMaintenanceWindow` - (Optional) The weekly time range (in UTC) during which automated cluster maintenance can occur. + Format: ddd:hh24:mi-ddd:hh24:mi +* `clusterParameterGroupName` - (Optional) The name of the parameter group to be associated with this cluster. +* `automatedSnapshotRetentionPeriod` - (Optional) The number of days that automated snapshots are retained. If the value is 0, automated snapshots are disabled. Even if automated snapshots are disabled, you can still create manual snapshots when you want with create-cluster-snapshot. Default is 1. +* `port` - (Optional) The port number on which the cluster accepts incoming connections. Valid values are between `1115` and `65535`. + The cluster is accessible only via the JDBC and ODBC connection strings. + Part of the connection string requires the port on which the cluster will listen for incoming connections. + Default port is `5439`. +* `clusterVersion` - (Optional) The version of the Amazon Redshift engine software that you want to deploy on the cluster. + The version selected runs on all the nodes in the cluster. +* `allowVersionUpgrade` - (Optional) If true , major version upgrades can be applied during the maintenance window to the Amazon Redshift engine that is running on the cluster. Default is `true`. +* `applyImmediately` - (Optional) Specifies whether any cluster modifications are applied immediately, or during the next maintenance window. Default is `false`. +* `aquaConfigurationStatus` - (Optional, **Deprecated**) The value represents how the cluster is configured to use AQUA (Advanced Query Accelerator) after the cluster is restored. + No longer supported by the AWS API. + Always returns `auto`. +* `numberOfNodes` - (Optional) The number of compute nodes in the cluster. This parameter is required when the ClusterType parameter is specified as multi-node. Default is 1. +* `publiclyAccessible` - (Optional) If true, the cluster can be accessed from a public network. Default is `true`. +* `encrypted` - (Optional) If true , the data in the cluster is encrypted at rest. +* `enhancedVpcRouting` - (Optional) If true , enhanced VPC routing is enabled. +* `kmsKeyId` - (Optional) The ARN for the KMS encryption key. When specifying `kmsKeyId`, `encrypted` needs to be set to true. +* `elasticIp` - (Optional) The Elastic IP (EIP) address for the cluster. +* `skipFinalSnapshot` - (Optional) Determines whether a final snapshot of the cluster is created before Amazon Redshift deletes the cluster. If true , a final cluster snapshot is not created. If false , a final cluster snapshot is created before the cluster is deleted. Default is false. +* `finalSnapshotIdentifier` - (Optional) The identifier of the final snapshot that is to be created immediately before deleting the cluster. If this parameter is provided, `skipFinalSnapshot` must be false. +* `snapshotIdentifier` - (Optional) The name of the snapshot from which to create the new cluster. +* `snapshotClusterIdentifier` - (Optional) The name of the cluster the source snapshot was created from. +* `ownerAccount` - (Optional) The AWS customer account used to create or copy the snapshot. Required if you are restoring a snapshot you do not own, optional if you own the snapshot. +* `iamRoles` - (Optional) A list of IAM Role ARNs to associate with the cluster. A Maximum of 10 can be associated to the cluster at any time. +* `logging` - (Optional) Logging, documented below. +* `maintenanceTrackName` - (Optional) The name of the maintenance track for the restored cluster. When you take a snapshot, the snapshot inherits the MaintenanceTrack value from the cluster. The snapshot might be on a different track than the cluster that was the source for the snapshot. For example, suppose that you take a snapshot of a cluster that is on the current track and then change the cluster to be on the trailing track. In this case, the snapshot and the source cluster are on different tracks. Default value is `current`. +* `manualSnapshotRetentionPeriod` - (Optional) The default number of days to retain a manual snapshot. If the value is -1, the snapshot is retained indefinitely. This setting doesn't change the retention period of existing snapshots. Valid values are between `1` and `3653`. Default value is `1`. +* `snapshotCopy` - (Optional) Configuration of automatic copy of snapshots from one region to another. Documented below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Blocks + +#### `logging` + +* `enable` - (Required) Enables logging information such as queries and connection attempts, for the specified Amazon Redshift cluster. +* `bucketName` - (Optional, required when `enable` is `true` and `logDestinationType` is `s3`) The name of an existing S3 bucket where the log files are to be stored. Must be in the same region as the cluster and the cluster must have read bucket and put object permissions. +For more information on the permissions required for the bucket, please read the AWS [documentation](http://docs.aws.amazon.com/redshift/latest/mgmt/db-auditing.html#db-auditing-enable-logging) +* `s3KeyPrefix` - (Optional) The prefix applied to the log file names. +* `logDestinationType` - (Optional) The log destination type. An enum with possible values of `s3` and `cloudwatch`. +* `logExports` - (Optional) The collection of exported log types. Log types include the connection log, user log and user activity log. Required when `logDestinationType` is `cloudwatch`. Valid log types are `connectionlog`, `userlog`, and `useractivitylog`. + +#### `snapshotCopy` + +* `destinationRegion` - (Required) The destination region that you want to copy snapshots to. +* `retentionPeriod` - (Optional) The number of days to retain automated snapshots in the destination region after they are copied from the source region. Defaults to `7`. +* `grantName` - (Optional) The name of the snapshot copy grant to use when snapshots of an AWS KMS-encrypted cluster are copied to the destination region. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of cluster +* `id` - The Redshift Cluster ID. +* `clusterIdentifier` - The Cluster Identifier +* `clusterType` - The cluster type +* `nodeType` - The type of nodes in the cluster +* `databaseName` - The name of the default database in the Cluster +* `availabilityZone` - The availability zone of the Cluster +* `automatedSnapshotRetentionPeriod` - The backup retention period +* `preferredMaintenanceWindow` - The backup window +* `endpoint` - The connection endpoint +* `encrypted` - Whether the data in the cluster is encrypted +* `vpcSecurityGroupIds` - The VPC security group Ids associated with the cluster +* `dnsName` - The DNS name of the cluster +* `port` - The Port the cluster responds on +* `clusterVersion` - The version of Redshift engine software +* `clusterParameterGroupName` - The name of the parameter group to be associated with this cluster +* `clusterSubnetGroupName` - The name of a cluster subnet group to be associated with this cluster +* `clusterPublicKey` - The public key for the cluster +* `clusterRevisionNumber` - The specific revision number of the database in the cluster +* `clusterNodes` - The nodes in the cluster. Cluster node blocks are documented below +* `clusterNamespaceArn` - The namespace Amazon Resource Name (ARN) of the cluster +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +Cluster nodes (for `clusterNodes`) support the following attributes: + +* `nodeRole` - Whether the node is a leader node or a compute node +* `privateIpAddress` - The private IP address of a node within a cluster +* `publicIpAddress` - The public IP address of a node within a cluster + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `75M`) +- `update` - (Default `75M`) +- `delete` - (Default `40M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Clusters using the `clusterIdentifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Clusters using the `clusterIdentifier`. For example: + +```console +% terraform import aws_redshift_cluster.myprodcluster tf-redshift-cluster-12345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_cluster_iam_roles.html.markdown b/website/docs/cdktf/typescript/r/redshift_cluster_iam_roles.html.markdown new file mode 100644 index 00000000000..1319d3c36a3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_cluster_iam_roles.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_cluster_iam_roles" +description: |- + Provides a Redshift Cluster IAM Roles resource. +--- + + + +# Resource: aws_redshift_cluster_iam_roles + +Provides a Redshift Cluster IAM Roles resource. + +~> **NOTE:** A Redshift cluster's default IAM role can be managed both by this resource's `defaultIamRoleArn` argument and the [`awsRedshiftCluster`](redshift_cluster.html) resource's `defaultIamRoleArn` argument. Do not configure different values for both arguments. Doing so will cause a conflict of default IAM roles. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftClusterIamRoles } from "./.gen/providers/aws/redshift-cluster-iam-roles"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftClusterIamRoles(this, "example", { + clusterIdentifier: Token.asString( + awsRedshiftClusterExample.clusterIdentifier + ), + iamRoleArns: [Token.asString(awsIamRoleExample.arn)], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clusterIdentifier` - (Required) The name of the Redshift Cluster IAM Roles. +* `iamRoleArns` - (Optional) A list of IAM Role ARNs to associate with the cluster. A Maximum of 10 can be associated to the cluster at any time. +* `defaultIamRoleArn` - (Optional) The Amazon Resource Name (ARN) for the IAM role that was set as default for the cluster when the cluster was created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Redshift Cluster ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Cluster IAM Roless using the `clusterIdentifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Cluster IAM Roless using the `clusterIdentifier`. For example: + +```console +% terraform import aws_redshift_cluster_iam_roles.examplegroup1 example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_cluster_snapshot.html.markdown b/website/docs/cdktf/typescript/r/redshift_cluster_snapshot.html.markdown new file mode 100644 index 00000000000..af18a1132bf --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_cluster_snapshot.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_cluster_snapshot" +description: |- + Creates a Redshift cluster snapshot +--- + + + +# Resource: aws_redshift_cluster_snapshot + +Creates a Redshift cluster snapshot + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftClusterSnapshot } from "./.gen/providers/aws/redshift-cluster-snapshot"; +interface MyConfig { + clusterIdentifier: any; + snapshotIdentifier: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new RedshiftClusterSnapshot(this, "example", { + cluster_snapshot_content: Fn.jsonencode({ + AllowDBUserOverride: "1", + App_ID: "example", + Client_ID: "ExampleClientID", + }), + cluster_snapshot_name: "example", + clusterIdentifier: config.clusterIdentifier, + snapshotIdentifier: config.snapshotIdentifier, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clusterIdentifier` - (Required, Forces new resource) The cluster identifier for which you want a snapshot. +* `snapshotIdentifier` - (Required, Forces new resource) A unique identifier for the snapshot that you are requesting. This identifier must be unique for all snapshots within the Amazon Web Services account. +* `manualSnapshotRetentionPeriod` - (Optional) The number of days that a manual snapshot is retained. If the value is `1`, the manual snapshot is retained indefinitely. Valid values are -1 and between `1` and `3653`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the snapshot. +* `id` - A unique identifier for the snapshot that you are requesting. This identifiermust be unique for all snapshots within the Amazon Web Services account. +* `kmsKeyId` - The Key Management Service (KMS) key ID of the encryption key that was used to encrypt data in the cluster from which the snapshot was taken. +* `ownerAccount` - For manual snapshots, the Amazon Web Services account used to create or copy the snapshot. For automatic snapshots, the owner of the cluster. The owner can perform all snapshot actions, such as sharing a manual snapshot. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Cluster Snapshots using `snapshotIdentifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Cluster Snapshots using `snapshotIdentifier`. For example: + +```console +% terraform import aws_redshift_cluster_snapshot.test example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_endpoint_access.html.markdown b/website/docs/cdktf/typescript/r/redshift_endpoint_access.html.markdown new file mode 100644 index 00000000000..3e089913815 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_endpoint_access.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_endpoint_access" +description: |- + Provides a Redshift Endpoint Access resource. +--- + + + +# Resource: aws_redshift_endpoint_access + +Creates a new Amazon Redshift endpoint access. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftEndpointAccess } from "./.gen/providers/aws/redshift-endpoint-access"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftEndpointAccess(this, "example", { + clusterIdentifier: Token.asString( + awsRedshiftClusterExample.clusterIdentifier + ), + endpointName: "example", + subnetGroupName: Token.asString(awsRedshiftSubnetGroupExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clusterIdentifier` - (Required) The cluster identifier of the cluster to access. +* `endpointName` - (Required) The Redshift-managed VPC endpoint name. +* `resourceOwner` - (Optional) The Amazon Web Services account ID of the owner of the cluster. This is only required if the cluster is in another Amazon Web Services account. +* `subnetGroupName` - (Required) The subnet group from which Amazon Redshift chooses the subnet to deploy the endpoint. +* `vpcSecurityGroupIds` - (Optional) The security group that defines the ports, protocols, and sources for inbound traffic that you are authorizing into your endpoint. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `address` - The DNS address of the endpoint. +* `id` - The Redshift-managed VPC endpoint name. +* `port` - The port number on which the cluster accepts incoming connections. +* `vpcEndpoint` - The connection endpoint for connecting to an Amazon Redshift cluster through the proxy. See details below. + +### VPC Endpoint + +* `networkInterface` - One or more network interfaces of the endpoint. Also known as an interface endpoint. See details below. +* `vpcEndpointId` - The connection endpoint ID for connecting an Amazon Redshift cluster through the proxy. +* `vpcId` - The VPC identifier that the endpoint is associated. + +### Network Interface + +* `availabilityZone` - The Availability Zone. +* `networkInterfaceId` - The network interface identifier. +* `privateIpAddress` - The IPv4 address of the network interface within the subnet. +* `subnetId` - The subnet identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift endpoint access using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift endpoint access using the `name`. For example: + +```console +% terraform import aws_redshift_endpoint_access.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_endpoint_authorization.html.markdown b/website/docs/cdktf/typescript/r/redshift_endpoint_authorization.html.markdown new file mode 100644 index 00000000000..83cb53c6056 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_endpoint_authorization.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_endpoint_authorization" +description: |- + Provides a Redshift Endpoint Authorization resource. +--- + + + +# Resource: aws_redshift_endpoint_authorization + +Creates a new Amazon Redshift endpoint authorization. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftEndpointAuthorization } from "./.gen/providers/aws/redshift-endpoint-authorization"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftEndpointAuthorization(this, "example", { + account: "01234567910", + clusterIdentifier: Token.asString( + awsRedshiftClusterExample.clusterIdentifier + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `account` - (Required) The Amazon Web Services account ID to grant access to. +* `clusterIdentifier` - (Required) The cluster identifier of the cluster to grant access to. +* `forceDelete` - (Optional) Indicates whether to force the revoke action. If true, the Redshift-managed VPC endpoints associated with the endpoint authorization are also deleted. Default value is `false`. +* `vpcIds` - (Optional) The virtual private cloud (VPC) identifiers to grant access to. If none are specified all VPCs in shared account are allowed. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `allowedAllVpcs` - Indicates whether all VPCs in the grantee account are allowed access to the cluster. +* `id` - The identifier of the Redshift Endpoint Authorization, `account`, and `clusterIdentifier` separated by a colon (`:`). +* `endpointCount` - The number of Redshift-managed VPC endpoints created for the authorization. +* `grantee` - The Amazon Web Services account ID of the grantee of the cluster. +* `grantor` - The Amazon Web Services account ID of the cluster owner. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift endpoint authorization using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift endpoint authorization using the `id`. For example: + +```console +% terraform import aws_redshift_endpoint_authorization.example 01234567910:cluster-example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_event_subscription.html.markdown b/website/docs/cdktf/typescript/r/redshift_event_subscription.html.markdown new file mode 100644 index 00000000000..7593e11876d --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_event_subscription.html.markdown @@ -0,0 +1,115 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_event_subscription" +description: |- + Provides a Redshift event subscription resource. +--- + + + +# Resource: aws_redshift_event_subscription + +Provides a Redshift event subscription resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftCluster } from "./.gen/providers/aws/redshift-cluster"; +import { RedshiftEventSubscription } from "./.gen/providers/aws/redshift-event-subscription"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +interface MyConfig { + nodeType: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const defaultVar = new RedshiftCluster(this, "default", { + clusterIdentifier: "default", + databaseName: "default", + nodeType: config.nodeType, + }); + const awsSnsTopicDefault = new SnsTopic(this, "default_1", { + name: "redshift-events", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicDefault.overrideLogicalId("default"); + const awsRedshiftEventSubscriptionDefault = new RedshiftEventSubscription( + this, + "default_2", + { + eventCategories: [ + "configuration", + "management", + "monitoring", + "security", + ], + name: "redshift-event-sub", + severity: "INFO", + snsTopicArn: Token.asString(awsSnsTopicDefault.arn), + sourceIds: [defaultVar.id], + sourceType: "cluster", + tags: { + Name: "default", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRedshiftEventSubscriptionDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Redshift event subscription. +* `snsTopicArn` - (Required) The ARN of the SNS topic to send events to. +* `sourceIds` - (Optional) A list of identifiers of the event sources for which events will be returned. If not specified, then all sources are included in the response. If specified, a `sourceType` must also be specified. +* `sourceType` - (Optional) The type of source that will be generating the events. Valid options are `cluster`, `clusterParameterGroup`, `clusterSecurityGroup`, `clusterSnapshot`, or `scheduledAction`. If not set, all sources will be subscribed to. +* `severity` - (Optional) The event severity to be published by the notification subscription. Valid options are `info` or `error`. Default value of `info`. +* `eventCategories` - (Optional) A list of event categories for a SourceType that you want to subscribe to. See https://docs.aws.amazon.com/redshift/latest/mgmt/working-with-event-notifications.html or run `aws redshift describe-event-categories`. +* `enabled` - (Optional) A boolean flag to enable/disable the subscription. Defaults to `true`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift event notification subscription +* `id` - The name of the Redshift event notification subscription +* `customerAwsId` - The AWS customer account associated with the Redshift event notification subscription +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Event Subscriptions using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Event Subscriptions using the `name`. For example: + +```console +% terraform import aws_redshift_event_subscription.default redshift-event-sub +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_hsm_client_certificate.html.markdown b/website/docs/cdktf/typescript/r/redshift_hsm_client_certificate.html.markdown new file mode 100644 index 00000000000..3d82b6a688e --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_hsm_client_certificate.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_hsm_client_certificate" +description: |- + Creates an HSM client certificate that an Amazon Redshift cluster will use to connect to the client's HSM in order to store and retrieve the keys used to encrypt the cluster databases. +--- + + + +# Resource: aws_redshift_hsm_client_certificate + +Creates an HSM client certificate that an Amazon Redshift cluster will use to connect to the client's HSM in order to store and retrieve the keys used to encrypt the cluster databases. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftHsmClientCertificate } from "./.gen/providers/aws/redshift-hsm-client-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftHsmClientCertificate(this, "example", { + hsmClientCertificateIdentifier: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `hsmClientCertificateIdentifier` - (Required, Forces new resource) The identifier of the HSM client certificate. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Hsm Client Certificate. +* `hsmClientCertificatePublicKey` - The public key that the Amazon Redshift cluster will use to connect to the HSM. You must register the public key in the HSM. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift HSM Client Certificates using `hsmClientCertificateIdentifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift HSM Client Certificates using `hsmClientCertificateIdentifier`. For example: + +```console +% terraform import aws_redshift_hsm_client_certificate.test example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_hsm_configuration.html.markdown b/website/docs/cdktf/typescript/r/redshift_hsm_configuration.html.markdown new file mode 100644 index 00000000000..177e962788d --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_hsm_configuration.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_hsm_configuration" +description: |- + Creates an HSM configuration that contains the information required by an Amazon Redshift cluster to store and use database encryption keys in a Hardware Security Module (HSM). +--- + + + +# Resource: aws_redshift_hsm_configuration + + Creates an HSM configuration that contains the information required by an Amazon Redshift cluster to store and use database encryption keys in a Hardware Security Module (HSM). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftHsmConfiguration } from "./.gen/providers/aws/redshift-hsm-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftHsmConfiguration(this, "example", { + description: "example", + hsmConfigurationIdentifier: "example", + hsmIpAddress: "10.0.0.1", + hsmPartitionName: "aws", + hsmPartitionPassword: "example", + hsmServerPublicCertificate: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Required, Forces new resource) A text description of the HSM configuration to be created. +* `hsmConfigurationIdentifier` - (Required, Forces new resource) The identifier to be assigned to the new Amazon Redshift HSM configuration. +* `hsmIpAddress` - (Required, Forces new resource) The IP address that the Amazon Redshift cluster must use to access the HSM. +* `hsmPartitionName` - (Required, Forces new resource) The name of the partition in the HSM where the Amazon Redshift clusters will store their database encryption keys. +* `hsmPartitionPassword` - (Required, Forces new resource) The password required to access the HSM partition. +* `hsmServerPublicCertificate` - (Required, Forces new resource) The HSMs public certificate file. When using Cloud HSM, the file name is server.pem. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Hsm Client Certificate. +* `hsmConfigurationPublicKey` - The public key that the Amazon Redshift cluster will use to connect to the HSM. You must register the public key in the HSM. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift HSM Client Certificates using `hsmConfigurationIdentifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift HSM Client Certificates using `hsmConfigurationIdentifier`. For example: + +```console +% terraform import aws_redshift_hsm_configuration.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_parameter_group.html.markdown b/website/docs/cdktf/typescript/r/redshift_parameter_group.html.markdown new file mode 100644 index 00000000000..6abf4722a5d --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_parameter_group.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_parameter_group" +description: |- + Provides a Redshift Cluster parameter group resource. +--- + + + +# Resource: aws_redshift_parameter_group + +Provides a Redshift Cluster parameter group resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftParameterGroup } from "./.gen/providers/aws/redshift-parameter-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftParameterGroup(this, "bar", { + family: "redshift-1.0", + name: "parameter-group-test-terraform", + parameter: [ + { + name: "require_ssl", + value: "true", + }, + { + name: "query_group", + value: "example", + }, + { + name: "enable_user_activity_logging", + value: "true", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Redshift parameter group. +* `family` - (Required) The family of the Redshift parameter group. +* `description` - (Optional) The description of the Redshift parameter group. Defaults to "Managed by Terraform". +* `parameter` - (Optional) A list of Redshift parameters to apply. + +Parameter blocks support the following: + +* `name` - (Required) The name of the Redshift parameter. +* `value` - (Required) The value of the Redshift parameter. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +You can read more about the parameters that Redshift supports in the [documentation](http://docs.aws.amazon.com/redshift/latest/mgmt/working-with-parameter-groups.html) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of parameter group +* `id` - The Redshift parameter group name. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Parameter Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Parameter Groups using the `name`. For example: + +```console +% terraform import aws_redshift_parameter_group.paramgroup1 parameter-group-test-terraform +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_partner.html.markdown b/website/docs/cdktf/typescript/r/redshift_partner.html.markdown new file mode 100644 index 00000000000..020faf9a2cc --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_partner.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_partner" +description: |- + Provides a Redshift Partner resource. +--- + + + +# Resource: aws_redshift_partner + +Creates a new Amazon Redshift Partner Integration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftPartner } from "./.gen/providers/aws/redshift-partner"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftPartner(this, "example", { + accountId: Token.asString(1234567910), + clusterIdentifier: Token.asString(awsRedshiftClusterExample.id), + databaseName: Token.asString(awsRedshiftClusterExample.databaseName), + partnerName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Required) The Amazon Web Services account ID that owns the cluster. +* `clusterIdentifier` - (Required) The cluster identifier of the cluster that receives data from the partner. +* `databaseName` - (Required) The name of the database that receives data from the partner. +* `partnerName` - (Required) The name of the partner that is authorized to send data. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the Redshift partner, `accountId`, `clusterIdentifier`, `databaseName`, `partnerName` separated by a colon (`:`). +* `status` - (Optional) The partner integration status. +* `statusMessage` - (Optional) The status message provided by the partner. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift usage limits using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift usage limits using the `id`. For example: + +```console +% terraform import aws_redshift_partner.example 01234567910:cluster-example-id:example:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_scheduled_action.html.markdown b/website/docs/cdktf/typescript/r/redshift_scheduled_action.html.markdown new file mode 100644 index 00000000000..7f154c397cb --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_scheduled_action.html.markdown @@ -0,0 +1,200 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_scheduled_action" +description: |- + Provides a Redshift Scheduled Action resource. +--- + + + +# Resource: aws_redshift_scheduled_action + +## Example Usage + +### Pause Cluster Action + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +import { RedshiftScheduledAction } from "./.gen/providers/aws/redshift-scheduled-action"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["scheduler.redshift.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const example = new DataAwsIamPolicyDocument(this, "example", { + statement: [ + { + actions: [ + "redshift:PauseCluster", + "redshift:ResumeCluster", + "redshift:ResizeCluster", + ], + effect: "Allow", + resources: ["*"], + }, + ], + }); + const awsIamPolicyExample = new IamPolicy(this, "example_2", { + name: "redshift_scheduled_action", + policy: Token.asString(example.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamPolicyExample.overrideLogicalId("example"); + const awsIamRoleExample = new IamRole(this, "example_3", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "redshift_scheduled_action", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsIamRolePolicyAttachmentExample = new IamRolePolicyAttachment( + this, + "example_4", + { + policyArn: Token.asString(awsIamPolicyExample.arn), + role: Token.asString(awsIamRoleExample.name), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentExample.overrideLogicalId("example"); + const awsRedshiftScheduledActionExample = new RedshiftScheduledAction( + this, + "example_5", + { + iamRole: Token.asString(awsIamRoleExample.arn), + name: "tf-redshift-scheduled-action", + schedule: "cron(00 23 * * ? *)", + targetAction: { + pauseCluster: { + clusterIdentifier: "tf-redshift001", + }, + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRedshiftScheduledActionExample.overrideLogicalId("example"); + } +} + +``` + +### Resize Cluster Action + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftScheduledAction } from "./.gen/providers/aws/redshift-scheduled-action"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftScheduledAction(this, "example", { + iamRole: Token.asString(awsIamRoleExample.arn), + name: "tf-redshift-scheduled-action", + schedule: "cron(00 23 * * ? *)", + targetAction: { + resizeCluster: { + clusterIdentifier: "tf-redshift001", + clusterType: "multi-node", + nodeType: "dc1.large", + numberOfNodes: 2, + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The scheduled action name. +* `description` - (Optional) The description of the scheduled action. +* `enable` - (Optional) Whether to enable the scheduled action. Default is `true` . +* `startTime` - (Optional) The start time in UTC when the schedule is active, in UTC RFC3339 format(for example, YYYY-MM-DDTHH:MM:SSZ). +* `endTime` - (Optional) The end time in UTC when the schedule is active, in UTC RFC3339 format(for example, YYYY-MM-DDTHH:MM:SSZ). +* `schedule` - (Required) The schedule of action. The schedule is defined format of "at expression" or "cron expression", for example `at(20160304T17:27:00)` or `cron(0 10 ? * MON *)`. See [Scheduled Action](https://docs.aws.amazon.com/redshift/latest/APIReference/API_ScheduledAction.html) for more information. +* `iamRole` - (Required) The IAM role to assume to run the scheduled action. +* `targetAction` - (Required) Target action. Documented below. + +### Nested Blocks + +#### `targetAction` + +* `pauseCluster` - (Optional) An action that runs a `pauseCluster` API operation. Documented below. +* `resizeCluster` - (Optional) An action that runs a `resizeCluster` API operation. Documented below. +* `resumeCluster` - (Optional) An action that runs a `resumeCluster` API operation. Documented below. + +### `pauseCluster` + +* `clusterIdentifier` - (Required) The identifier of the cluster to be paused. + +### `resizeCluster` + +* `clusterIdentifier` - (Required) The unique identifier for the cluster to resize. +* `classic` - (Optional) A boolean value indicating whether the resize operation is using the classic resize process. Default: `false`. +* `clusterType` - (Optional) The new cluster type for the specified cluster. +* `nodeType` - (Optional) The new node type for the nodes you are adding. +* `numberOfNodes` - (Optional) The new number of nodes for the cluster. + +### `resumeCluster` + +* `clusterIdentifier` - (Required) The identifier of the cluster to be resumed. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Redshift Scheduled Action name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Scheduled Action using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Scheduled Action using the `name`. For example: + +```console +% terraform import aws_redshift_scheduled_action.example tf-redshift-scheduled-action +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_snapshot_copy_grant.html.markdown b/website/docs/cdktf/typescript/r/redshift_snapshot_copy_grant.html.markdown new file mode 100644 index 00000000000..a1b3f1ef1bb --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_snapshot_copy_grant.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_snapshot_copy_grant" +description: |- + Creates a snapshot copy grant that allows AWS Redshift to encrypt copied snapshots with a customer master key from AWS KMS in a destination region. +--- + + + +# Resource: aws_redshift_snapshot_copy_grant + +Creates a snapshot copy grant that allows AWS Redshift to encrypt copied snapshots with a customer master key from AWS KMS in a destination region. + +Note that the grant must exist in the destination region, and not in the region of the cluster. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftCluster } from "./.gen/providers/aws/redshift-cluster"; +import { RedshiftSnapshotCopyGrant } from "./.gen/providers/aws/redshift-snapshot-copy-grant"; +interface MyConfig { + clusterIdentifier: any; + nodeType: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const test = new RedshiftSnapshotCopyGrant(this, "test", { + snapshotCopyGrantName: "my-grant", + }); + const awsRedshiftClusterTest = new RedshiftCluster(this, "test_1", { + snapshotCopy: { + destinationRegion: "us-east-2", + grantName: test.snapshotCopyGrantName, + }, + clusterIdentifier: config.clusterIdentifier, + nodeType: config.nodeType, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRedshiftClusterTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `snapshotCopyGrantName` - (Required, Forces new resource) A friendly name for identifying the grant. +* `kmsKeyId` - (Optional, Forces new resource) The unique identifier for the customer master key (CMK) that the grant applies to. Specify the key ID or the Amazon Resource Name (ARN) of the CMK. To specify a CMK in a different AWS account, you must use the key ARN. If not specified, the default key is used. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of snapshot copy grant +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Snapshot Copy Grants by name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Snapshot Copy Grants by name. For example: + +```console +% terraform import aws_redshift_snapshot_copy_grant.test my-grant +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_snapshot_schedule.html.markdown b/website/docs/cdktf/typescript/r/redshift_snapshot_schedule.html.markdown new file mode 100644 index 00000000000..4ac3f5894bd --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_snapshot_schedule.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_snapshot_schedule" +description: |- + Provides an Redshift Snapshot Schedule resource. +--- + + + +# Resource: aws_redshift_snapshot_schedule + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftSnapshotSchedule } from "./.gen/providers/aws/redshift-snapshot-schedule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftSnapshotSchedule(this, "default", { + definitions: ["rate(12 hours)"], + identifier: "tf-redshift-snapshot-schedule", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `identifier` - (Optional, Forces new resource) The snapshot schedule identifier. If omitted, Terraform will assign a random, unique identifier. +* `identifierPrefix` - (Optional, Forces new resource) Creates a unique +identifier beginning with the specified prefix. Conflicts with `identifier`. +* `description` - (Optional) The description of the snapshot schedule. +* `definitions` - (Optional) The definition of the snapshot schedule. The definition is made up of schedule expressions, for example `cron(30 12 *)` or `rate(12 hours)`. +* `forceDestroy` - (Optional) Whether to destroy all associated clusters with this snapshot schedule on deletion. Must be enabled and applied before attempting deletion. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Snapshot Schedule. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Snapshot Schedule using the `identifier`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Snapshot Schedule using the `identifier`. For example: + +```console +% terraform import aws_redshift_snapshot_schedule.default tf-redshift-snapshot-schedule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_snapshot_schedule_association.html.markdown b/website/docs/cdktf/typescript/r/redshift_snapshot_schedule_association.html.markdown new file mode 100644 index 00000000000..bebf91ee139 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_snapshot_schedule_association.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_snapshot_schedule_association" +description: |- + Provides an Association Redshift Cluster and Snapshot Schedule resource. +--- + + + +# Resource: aws_redshift_snapshot_schedule_association + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftCluster } from "./.gen/providers/aws/redshift-cluster"; +import { RedshiftSnapshotSchedule } from "./.gen/providers/aws/redshift-snapshot-schedule"; +import { RedshiftSnapshotScheduleAssociation } from "./.gen/providers/aws/redshift-snapshot-schedule-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new RedshiftCluster(this, "default", { + clusterIdentifier: "tf-redshift-cluster", + clusterType: "single-node", + databaseName: "mydb", + masterPassword: "Mustbe8characters", + masterUsername: "foo", + nodeType: "dc1.large", + }); + const awsRedshiftSnapshotScheduleDefault = new RedshiftSnapshotSchedule( + this, + "default_1", + { + definitions: ["rate(12 hours)"], + identifier: "tf-redshift-snapshot-schedule", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRedshiftSnapshotScheduleDefault.overrideLogicalId("default"); + const awsRedshiftSnapshotScheduleAssociationDefault = + new RedshiftSnapshotScheduleAssociation(this, "default_2", { + clusterIdentifier: defaultVar.id, + scheduleIdentifier: Token.asString( + awsRedshiftSnapshotScheduleDefault.id + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRedshiftSnapshotScheduleAssociationDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clusterIdentifier` - (Required, Forces new resource) The cluster identifier. +* `scheduleIdentifier` - (Required, Forces new resource) The snapshot schedule identifier. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Snapshot Schedule Association using the `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Snapshot Schedule Association using the `/`. For example: + +```console +% terraform import aws_redshift_snapshot_schedule_association.default tf-redshift-cluster/tf-redshift-snapshot-schedule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_subnet_group.html.markdown b/website/docs/cdktf/typescript/r/redshift_subnet_group.html.markdown new file mode 100644 index 00000000000..c9c8f026931 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_subnet_group.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_subnet_group" +description: |- + Provides a Redshift Subnet Group resource. +--- + + + +# Resource: aws_redshift_subnet_group + +Creates a new Amazon Redshift subnet group. You must provide a list of one or more subnets in your existing Amazon Virtual Private Cloud (Amazon VPC) when creating Amazon Redshift subnet group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftSubnetGroup } from "./.gen/providers/aws/redshift-subnet-group"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new Vpc(this, "foo", { + cidrBlock: "10.1.0.0/16", + }); + const bar = new Subnet(this, "bar", { + availabilityZone: "us-west-2b", + cidrBlock: "10.1.2.0/24", + tags: { + Name: "tf-dbsubnet-test-2", + }, + vpcId: foo.id, + }); + const awsSubnetFoo = new Subnet(this, "foo_2", { + availabilityZone: "us-west-2a", + cidrBlock: "10.1.1.0/24", + tags: { + Name: "tf-dbsubnet-test-1", + }, + vpcId: foo.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSubnetFoo.overrideLogicalId("foo"); + const awsRedshiftSubnetGroupFoo = new RedshiftSubnetGroup(this, "foo_3", { + name: "foo", + subnetIds: [Token.asString(awsSubnetFoo.id), bar.id], + tags: { + environment: "Production", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRedshiftSubnetGroupFoo.overrideLogicalId("foo"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the Redshift Subnet group. +* `description` - (Optional) The description of the Redshift Subnet group. Defaults to "Managed by Terraform". +* `subnetIds` - (Required) An array of VPC subnet IDs. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Subnet group name +* `id` - The Redshift Subnet group ID. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift subnet groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift subnet groups using the `name`. For example: + +```console +% terraform import aws_redshift_subnet_group.testgroup1 test-cluster-subnet-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshift_usage_limit.html.markdown b/website/docs/cdktf/typescript/r/redshift_usage_limit.html.markdown new file mode 100644 index 00000000000..e1e291aeb91 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshift_usage_limit.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Redshift" +layout: "aws" +page_title: "AWS: aws_redshift_usage_limit" +description: |- + Provides a Redshift Usage Limit resource. +--- + + + +# Resource: aws_redshift_usage_limit + +Creates a new Amazon Redshift Usage Limit. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftUsageLimit } from "./.gen/providers/aws/redshift-usage-limit"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftUsageLimit(this, "example", { + amount: 60, + clusterIdentifier: Token.asString(awsRedshiftClusterExample.id), + featureType: "concurrency-scaling", + limitType: "time", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `amount` - (Required) The limit amount. If time-based, this amount is in minutes. If data-based, this amount is in terabytes (TB). The value must be a positive number. +* `breachAction` - (Optional) The action that Amazon Redshift takes when the limit is reached. The default is `log`. Valid values are `log`, `emitMetric`, and `disable`. +* `clusterIdentifier` - (Required) The identifier of the cluster that you want to limit usage. +* `featureType` - (Required) The Amazon Redshift feature that you want to limit. Valid values are `spectrum`, `concurrencyScaling`, and `crossRegionDatasharing`. +* `limitType` - (Required) The type of limit. Depending on the feature type, this can be based on a time duration or data size. If FeatureType is `spectrum`, then LimitType must be `dataScanned`. If FeatureType is `concurrencyScaling`, then LimitType must be `time`. If FeatureType is `crossRegionDatasharing`, then LimitType must be `dataScanned`. Valid values are `dataScanned`, and `time`. +* `period` - (Optional) The time period that the amount applies to. A weekly period begins on Sunday. The default is `monthly`. Valid values are `daily`, `weekly`, and `monthly`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Usage Limit. +* `id` - The Redshift Usage Limit ID. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift usage limits using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift usage limits using the `id`. For example: + +```console +% terraform import aws_redshift_usage_limit.example example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshiftdata_statement.html.markdown b/website/docs/cdktf/typescript/r/redshiftdata_statement.html.markdown new file mode 100644 index 00000000000..5335d2eb255 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshiftdata_statement.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "Redshift Data" +layout: "aws" +page_title: "AWS: aws_redshiftdata_statement" +description: |- + Provides a Redshift Data Statement execution resource. +--- + + + +# Resource: aws_redshiftdata_statement + +Executes a Redshift Data Statement. + +## Example Usage + +### cluster_identifier + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftdataStatement } from "./.gen/providers/aws/redshiftdata-statement"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftdataStatement(this, "example", { + clusterIdentifier: Token.asString( + awsRedshiftClusterExample.clusterIdentifier + ), + database: Token.asString(awsRedshiftClusterExample.databaseName), + dbUser: Token.asString(awsRedshiftClusterExample.masterUsername), + sql: "CREATE GROUP group_name;", + }); + } +} + +``` + +### workgroup_name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftdataStatement } from "./.gen/providers/aws/redshiftdata-statement"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftdataStatement(this, "example", { + database: "dev", + sql: "CREATE GROUP group_name;", + workgroupName: Token.asString( + awsRedshiftserverlessWorkgroupExample.workgroupName + ), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `database` - (Required) The name of the database. +* `sql` - (Required) The SQL statement text to run. + +The following arguments are optional: + +* `clusterIdentifier` - (Optional) The cluster identifier. This parameter is required when connecting to a cluster and authenticating using either Secrets Manager or temporary credentials. +* `dbUser` - (Optional) The database user name. +* `secretArn` - (Optional) The name or ARN of the secret that enables access to the database. +* `statementName` - (Optional) The name of the SQL statement. You can name the SQL statement when you create it to identify the query. +* `withEvent` - (Optional) A value that indicates whether to send an event to the Amazon EventBridge event bus after the SQL statement runs. +* `workgroupName` - (Optional) The serverless workgroup name. This parameter is required when connecting to a serverless workgroup and authenticating using either Secrets Manager or temporary credentials. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Redshift Data Statement ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Data Statements using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Data Statements using the `id`. For example: + +```console +% terraform import aws_redshiftdata_statement.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshiftserverless_endpoint_access.html.markdown b/website/docs/cdktf/typescript/r/redshiftserverless_endpoint_access.html.markdown new file mode 100644 index 00000000000..ea5c5865ad6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshiftserverless_endpoint_access.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_endpoint_access" +description: |- + Provides a Redshift Serverless Endpoint Access resource. +--- + + + +# Resource: aws_redshiftserverless_endpoint_access + +Creates a new Amazon Redshift Serverless Endpoint Access. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftserverlessEndpointAccess } from "./.gen/providers/aws/redshiftserverless-endpoint-access"; +interface MyConfig { + subnetIds: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new RedshiftserverlessEndpointAccess(this, "example", { + endpointName: "example", + workgroupName: "example", + subnetIds: config.subnetIds, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `endpointName` - (Required) The name of the endpoint. +* `subnetIds` - (Required) An array of VPC subnet IDs to associate with the endpoint. +* `vpcSecurityGroupIds` - (Optional) An array of security group IDs to associate with the workgroup. +* `workgroupName` - (Required) The name of the workgroup. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Endpoint Access. +* `id` - The Redshift Endpoint Access Name. +* `address` - The DNS address of the VPC endpoint. +* `port` - The port that Amazon Redshift Serverless listens on. +* `vpcEndpoint` - The VPC endpoint or the Redshift Serverless workgroup. See `VPC Endpoint` below. + +#### VPC Endpoint + +* `vpcEndpointId` - The DNS address of the VPC endpoint. +* `vpcId` - The port that Amazon Redshift Serverless listens on. +* `networkInterface` - The network interfaces of the endpoint.. See `Network Interface` below. + +##### Network Interface + +* `availabilityZone` - The availability Zone. +* `networkInterfaceId` - The unique identifier of the network interface. +* `privateIpAddress` - The IPv4 address of the network interface within the subnet. +* `subnetId` - The unique identifier of the subnet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Endpoint Access using the `endpointName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Serverless Endpoint Access using the `endpointName`. For example: + +```console +% terraform import aws_redshiftserverless_endpoint_access.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshiftserverless_namespace.html.markdown b/website/docs/cdktf/typescript/r/redshiftserverless_namespace.html.markdown new file mode 100644 index 00000000000..1857a056529 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshiftserverless_namespace.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_namespace" +description: |- + Provides a Redshift Serverless Namespace resource. +--- + + + +# Resource: aws_redshiftserverless_namespace + +Creates a new Amazon Redshift Serverless Namespace. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftserverlessNamespace } from "./.gen/providers/aws/redshiftserverless-namespace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftserverlessNamespace(this, "example", { + namespaceName: "concurrency-scaling", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `adminUserPassword` - (Optional) The password of the administrator for the first database created in the namespace. +* `adminUsername` - (Optional) The username of the administrator for the first database created in the namespace. +* `dbName` - (Optional) The name of the first database created in the namespace. +* `defaultIamRoleArn` - (Optional) The Amazon Resource Name (ARN) of the IAM role to set as a default in the namespace. When specifying `defaultIamRoleArn`, it also must be part of `iamRoles`. +* `iamRoles` - (Optional) A list of IAM roles to associate with the namespace. +* `kmsKeyId` - (Optional) The ARN of the Amazon Web Services Key Management Service key used to encrypt your data. +* `logExports` - (Optional) The types of logs the namespace can export. Available export types are `userlog`, `connectionlog`, and `useractivitylog`. +* `namespaceName` - (Required) The name of the namespace. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Namespace. +* `id` - The Redshift Namespace Name. +* `namespaceId` - The Redshift Namespace ID. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Namespaces using the `namespaceName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Serverless Namespaces using the `namespaceName`. For example: + +```console +% terraform import aws_redshiftserverless_namespace.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshiftserverless_resource_policy.html.markdown b/website/docs/cdktf/typescript/r/redshiftserverless_resource_policy.html.markdown new file mode 100644 index 00000000000..e5e771f173d --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshiftserverless_resource_policy.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_resource_policy" +description: |- + Provides a Redshift Serverless Resource Policy resource. +--- + + + +# Resource: aws_redshiftserverless_resource_policy + +Creates a new Amazon Redshift Serverless Resource Policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftserverlessResourcePolicy } from "./.gen/providers/aws/redshiftserverless-resource-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftserverlessResourcePolicy(this, "example", { + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["redshift-serverless:RestoreFromSnapshot"], + Effect: "Allow", + Principal: { + AWS: ["12345678901"], + }, + Sid: "", + }, + ], + Version: "2012-10-17", + }) + ), + resourceArn: Token.asString(awsRedshiftserverlessSnapshotExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceArn` - (Required) The Amazon Resource Name (ARN) of the account to create or update a resource policy for. +* `policy` - (Required) The policy to create or update. For example, the following policy grants a user authorization to restore a snapshot. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the account to create or update a resource policy for. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Resource Policies using the `resourceArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Serverless Resource Policies using the `resourceArn`. For example: + +```console +% terraform import aws_redshiftserverless_resource_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshiftserverless_snapshot.html.markdown b/website/docs/cdktf/typescript/r/redshiftserverless_snapshot.html.markdown new file mode 100644 index 00000000000..354679bdb79 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshiftserverless_snapshot.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_snapshot" +description: |- + Provides a Redshift Serverless Snapshot resource. +--- + + + +# Resource: aws_redshiftserverless_snapshot + +Creates a new Amazon Redshift Serverless Snapshot. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftserverlessSnapshot } from "./.gen/providers/aws/redshiftserverless-snapshot"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftserverlessSnapshot(this, "example", { + namespaceName: Token.asString( + awsRedshiftserverlessWorkgroupExample.namespaceName + ), + snapshotName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `namespaceName` - (Required) The namespace to create a snapshot for. +* `snapshotName` - (Required) The name of the snapshot. +* `retentionPeriod` - (Optional) How long to retain the created snapshot. Default value is `1`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `accountsWithProvisionedRestoreAccess` - All of the Amazon Web Services accounts that have access to restore a snapshot to a provisioned cluster. +* `accountsWithRestoreAccess` - All of the Amazon Web Services accounts that have access to restore a snapshot to a namespace. +* `adminUsername` - The username of the database within a snapshot. +* `arn` - The Amazon Resource Name (ARN) of the snapshot. +* `id` - The name of the snapshot. +* `kmsKeyId` - The unique identifier of the KMS key used to encrypt the snapshot. +* `namespaceArn` - The Amazon Resource Name (ARN) of the namespace the snapshot was created from. +* `ownerAccount` - The owner Amazon Web Services; account of the snapshot. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Snapshots using the `snapshotName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Serverless Snapshots using the `snapshotName`. For example: + +```console +% terraform import aws_redshiftserverless_snapshot.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshiftserverless_usage_limit.html.markdown b/website/docs/cdktf/typescript/r/redshiftserverless_usage_limit.html.markdown new file mode 100644 index 00000000000..452bae0f24b --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshiftserverless_usage_limit.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_usage_limit" +description: |- + Provides a Redshift Serverless Usage Limit resource. +--- + + + +# Resource: aws_redshiftserverless_usage_limit + +Creates a new Amazon Redshift Serverless Usage Limit. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftserverlessUsageLimit } from "./.gen/providers/aws/redshiftserverless-usage-limit"; +import { RedshiftserverlessWorkgroup } from "./.gen/providers/aws/redshiftserverless-workgroup"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new RedshiftserverlessWorkgroup(this, "example", { + namespaceName: Token.asString( + awsRedshiftserverlessNamespaceExample.namespaceName + ), + workgroupName: "example", + }); + const awsRedshiftserverlessUsageLimitExample = + new RedshiftserverlessUsageLimit(this, "example_1", { + amount: 60, + resourceArn: example.arn, + usageType: "serverless-compute", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRedshiftserverlessUsageLimitExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `amount` - (Required) The limit amount. If time-based, this amount is in Redshift Processing Units (RPU) consumed per hour. If data-based, this amount is in terabytes (TB) of data transferred between Regions in cross-account sharing. The value must be a positive number. +* `breachAction` - (Optional) The action that Amazon Redshift Serverless takes when the limit is reached. Valid values are `log`, `emitMetric`, and `deactivate`. The default is `log`. +* `period` - (Optional) The time period that the amount applies to. A weekly period begins on Sunday. Valid values are `daily`, `weekly`, and `monthly`. The default is `monthly`. +* `resourceArn` - (Required) The Amazon Resource Name (ARN) of the Amazon Redshift Serverless resource to create the usage limit for. +* `usageType` - (Required) The type of Amazon Redshift Serverless usage to create a usage limit for. Valid values are `serverlessCompute` or `crossRegionDatasharing`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Usage Limit. +* `id` - The Redshift Usage Limit id. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Usage Limits using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Serverless Usage Limits using the `id`. For example: + +```console +% terraform import aws_redshiftserverless_usage_limit.example example-id +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/redshiftserverless_workgroup.html.markdown b/website/docs/cdktf/typescript/r/redshiftserverless_workgroup.html.markdown new file mode 100644 index 00000000000..2c88838fdc6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/redshiftserverless_workgroup.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "Redshift Serverless" +layout: "aws" +page_title: "AWS: aws_redshiftserverless_workgroup" +description: |- + Provides a Redshift Serverless Workgroup resource. +--- + + + +# Resource: aws_redshiftserverless_workgroup + +Creates a new Amazon Redshift Serverless Workgroup. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RedshiftserverlessWorkgroup } from "./.gen/providers/aws/redshiftserverless-workgroup"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RedshiftserverlessWorkgroup(this, "example", { + namespaceName: "concurrency-scaling", + workgroupName: "concurrency-scaling", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `namespaceName` - (Required) The name of the namespace. +* `workgroupName` - (Required) The name of the workgroup. + +The following arguments are optional: + +* `baseCapacity` - (Optional) The base data warehouse capacity of the workgroup in Redshift Processing Units (RPUs). +* `configParameter` - (Optional) An array of parameters to set for more control over a serverless database. See `Config Parameter` below. +* `enhancedVpcRouting` - (Optional) The value that specifies whether to turn on enhanced virtual private cloud (VPC) routing, which forces Amazon Redshift Serverless to route traffic through your VPC instead of over the internet. +* `publiclyAccessible` - (Optional) A value that specifies whether the workgroup can be accessed from a public network. +* `securityGroupIds` - (Optional) An array of security group IDs to associate with the workgroup. +* `subnetIds` - (Optional) An array of VPC subnet IDs to associate with the workgroup. When set, must contain at least three subnets spanning three Availability Zones. A minimum number of IP addresses is required and scales with the Base Capacity. For more information, see the following [AWS document](https://docs.aws.amazon.com/redshift/latest/mgmt/serverless-known-issues.html). +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Config Parameter + +* `parameterKey` - (Required) The key of the parameter. The options are `autoMv`, `datestyle`, `enableCaseSensitiveIdentifier`, `enableUserActivityLogging`, `queryGroup`, `searchPath` and [query monitoring metrics](https://docs.aws.amazon.com/redshift/latest/dg/cm-c-wlm-query-monitoring-rules.html#cm-c-wlm-query-monitoring-metrics-serverless) that let you define performance boundaries: `maxQueryCpuTime`, `maxQueryBlocksRead`, `maxScanRowCount`, `maxQueryExecutionTime`, `maxQueryQueueTime`, `maxQueryCpuUsagePercent`, `maxQueryTempBlocksToDisk`, `maxJoinRowCount` and `maxNestedLoopJoinRowCount`. +* `parameterValue` - (Required) The value of the parameter to set. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Redshift Serverless Workgroup. +* `id` - The Redshift Workgroup Name. +* `workgroupId` - The Redshift Workgroup ID. +* `endpoint` - The endpoint that is created from the workgroup. See `endpoint` below. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### Endpoint + +* `address` - The DNS address of the VPC endpoint. +* `port` - The port that Amazon Redshift Serverless listens on. +* `vpcEndpoint` - The VPC endpoint or the Redshift Serverless workgroup. See `VPC Endpoint` below. + +#### VPC Endpoint + +* `vpcEndpointId` - The DNS address of the VPC endpoint. +* `vpcId` - The port that Amazon Redshift Serverless listens on. +* `networkInterface` - The network interfaces of the endpoint.. See `Network Interface` below. + +##### Network Interface + +* `availabilityZone` - The availability Zone. +* `networkInterfaceId` - The unique identifier of the network interface. +* `privateIpAddress` - The IPv4 address of the network interface within the subnet. +* `subnetId` - The unique identifier of the subnet. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20M`) +- `update` - (Default `20M`) +- `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Redshift Serverless Workgroups using the `workgroupName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Redshift Serverless Workgroups using the `workgroupName`. For example: + +```console +% terraform import aws_redshiftserverless_workgroup.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/resourceexplorer2_index.html.markdown b/website/docs/cdktf/typescript/r/resourceexplorer2_index.html.markdown new file mode 100644 index 00000000000..ebb12a7cf8d --- /dev/null +++ b/website/docs/cdktf/typescript/r/resourceexplorer2_index.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Resource Explorer" +layout: "aws" +page_title: "AWS: aws_resourceexplorer2_index" +description: |- + Provides a resource to manage a Resource Explorer index in the current AWS Region. +--- + + + +# Resource: aws_resourceexplorer2_index + +Provides a resource to manage a Resource Explorer index in the current AWS Region. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Resourceexplorer2Index } from "./.gen/providers/aws/resourceexplorer2-index"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Resourceexplorer2Index(this, "example", { + type: "LOCAL", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `type` - (Required) The type of the index. Valid values: `aggregator`, `local`. To understand the difference between `local` and `aggregator`, see the [_AWS Resource Explorer User Guide_](https://docs.aws.amazon.com/resource-explorer/latest/userguide/manage-aggregator-region.html). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `2H`) +- `update` - (Default `2H`) +- `delete` - (Default `10M`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Resource Explorer index. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Resource Explorer indexes using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Resource Explorer indexes using the `arn`. For example: + +```console +% terraform import aws_resourceexplorer2_index.example arn:aws:resource-explorer-2:us-east-1:123456789012:index/6047ac4e-207e-4487-9bcf-cb53bb0ff5cc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/resourceexplorer2_view.html.markdown b/website/docs/cdktf/typescript/r/resourceexplorer2_view.html.markdown new file mode 100644 index 00000000000..4b2e426bbc7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/resourceexplorer2_view.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "Resource Explorer" +layout: "aws" +page_title: "AWS: aws_resourceexplorer2_view" +description: |- + Provides a resource to manage a Resource Explorer view. +--- + + + +# Resource: aws_resourceexplorer2_view + +Provides a resource to manage a Resource Explorer view. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Resourceexplorer2Index } from "./.gen/providers/aws/resourceexplorer2-index"; +import { Resourceexplorer2View } from "./.gen/providers/aws/resourceexplorer2-view"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Resourceexplorer2Index(this, "example", { + type: "LOCAL", + }); + const awsResourceexplorer2ViewExample = new Resourceexplorer2View( + this, + "example_1", + { + dependsOn: [example], + filters: [ + { + filterString: "resourcetype:ec2:instance", + }, + ], + includedProperty: [ + { + name: "tags", + }, + ], + name: "exampleview", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsResourceexplorer2ViewExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `defaultView` - (Optional) Specifies whether the view is the [_default view_](https://docs.aws.amazon.com/resource-explorer/latest/userguide/manage-views-about.html#manage-views-about-default) for the AWS Region. Default: `false`. +* `filters` - (Optional) Specifies which resources are included in the results of queries made using this view. See [Filters](#filters) below for more details. +* `includedProperty` - (Optional) Optional fields to be included in search results from this view. See [Included Properties](#included-properties) below for more details. +* `name` - (Required) The name of the view. The name must be no more than 64 characters long, and can include letters, digits, and the dash (-) character. The name must be unique within its AWS Region. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Filters + +The `filters` block supports the following: + +* `filterString` - (Required) The string that contains the search keywords, prefixes, and operators to control the results that can be returned by a search operation. For more details, see [Search query syntax](https://docs.aws.amazon.com/resource-explorer/latest/userguide/using-search-query-syntax.html). + +### Included Properties + +The `includedProperty` block supports the following: + +* `name` - (Required) The name of the property that is included in this view. Valid values: `tags`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Resource Explorer view. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Resource Explorer views using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Resource Explorer views using the `arn`. For example: + +```console +% terraform import aws_resourceexplorer2_view.example arn:aws:resource-explorer-2:us-west-2:123456789012:view/exampleview/e0914f6c-6c27-4b47-b5d4-6b28381a2421 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/resourcegroups_group.html.markdown b/website/docs/cdktf/typescript/r/resourcegroups_group.html.markdown new file mode 100644 index 00000000000..ef3a012b0d0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/resourcegroups_group.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Resource Groups" +layout: "aws" +page_title: "AWS: aws_resourcegroups_group" +description: |- + Provides a Resource Group. +--- + + + +# Resource: aws_resourcegroups_group + +Provides a Resource Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ResourcegroupsGroup } from "./.gen/providers/aws/resourcegroups-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ResourcegroupsGroup(this, "test", { + name: "test-group", + resourceQuery: { + query: + '{\n "ResourceTypeFilters": [\n "AWS::EC2::Instance"\n ],\n "TagFilters": [\n {\n "Key": "Stage",\n "Values": ["Test"]\n }\n ]\n}\n\n', + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The resource group's name. A resource group name can have a maximum of 127 characters, including letters, numbers, hyphens, dots, and underscores. The name cannot start with `aws` or `aws`. +* `configuration` - (Optional) A configuration associates the resource group with an AWS service and specifies how the service can interact with the resources in the group. See below for details. +* `description` - (Optional) A description of the resource group. +* `resourceQuery` - (Required) A `resourceQuery` block. Resource queries are documented below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `resourceQuery` block supports the following arguments: + +* `query` - (Required) The resource query as a JSON string. +* `type` - (Required) The type of the resource query. Defaults to `tagFilters10`. + +The `configuration` block supports the following arguments: + +* `type` - (Required) Specifies the type of group configuration item. +* `parameters` - (Optional) A collection of parameters for this group configuration item. See below for details. + +The `parameters` block supports the following arguments: + +* `name` - (Required) The name of the group configuration parameter. +* `values` - (Optional) The value or values to be used for the specified parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN assigned by AWS for this resource group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import resource groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import resource groups using the `name`. For example: + +```console +% terraform import aws_resourcegroups_group.foo resource-group-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/resourcegroups_resource.html.markdown b/website/docs/cdktf/typescript/r/resourcegroups_resource.html.markdown new file mode 100644 index 00000000000..1f84f243fe1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/resourcegroups_resource.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Resource Groups" +layout: "aws" +page_title: "AWS: aws_resourcegroups_resource" +description: |- + Terraform resource for managing an AWS Resource Groups Resource. +--- + + + +# Resource: aws_resourcegroups_resource + +Terraform resource for managing an AWS Resource Groups Resource. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Ec2Host } from "./.gen/providers/aws/ec2-host"; +import { ResourcegroupsGroup } from "./.gen/providers/aws/resourcegroups-group"; +import { ResourcegroupsResource } from "./.gen/providers/aws/resourcegroups-resource"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Ec2Host(this, "example", { + autoPlacement: "on", + availabilityZone: "us-east-1a", + hostRecovery: "off", + instanceFamily: "t3", + }); + const awsResourcegroupsGroupExample = new ResourcegroupsGroup( + this, + "example_1", + { + name: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsResourcegroupsGroupExample.overrideLogicalId("example"); + const awsResourcegroupsResourceExample = new ResourcegroupsResource( + this, + "example_2", + { + groupArn: Token.asString(awsResourcegroupsGroupExample.arn), + resourceArn: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsResourcegroupsResourceExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `groupArn` - (Required) The name or the ARN of the resource group to add resources to. + +The following arguments are optional: + +* `resourceArn` - (Required) The ARN of the resource to be added to the group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `resourceType` - The resource type of a resource, such as `aws::ec2::instance`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `delete` - (Default `5M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rolesanywhere_profile.html.markdown b/website/docs/cdktf/typescript/r/rolesanywhere_profile.html.markdown new file mode 100644 index 00000000000..31a793b5891 --- /dev/null +++ b/website/docs/cdktf/typescript/r/rolesanywhere_profile.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "Roles Anywhere" +layout: "aws" +page_title: "AWS: aws_rolesanywhere_profile" +description: |- + Provides a Roles Anywhere Profile resource +--- + + + +# Resource: aws_rolesanywhere_profile + +Terraform resource for managing a Roles Anywhere Profile. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { RolesanywhereProfile } from "./.gen/providers/aws/rolesanywhere-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new IamRole(this, "test", { + assumeRolePolicy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: [ + "sts:AssumeRole", + "sts:TagSession", + "sts:SetSourceIdentity", + ], + Effect: "Allow", + Principal: { + Service: "rolesanywhere.amazonaws.com", + }, + Sid: "", + }, + ], + Version: "2012-10-17", + }) + ), + name: "test", + path: "/", + }); + const awsRolesanywhereProfileTest = new RolesanywhereProfile( + this, + "test_1", + { + name: "example", + roleArns: [test.arn], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRolesanywhereProfileTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `durationSeconds` - (Optional) The number of seconds the vended session credentials are valid for. Defaults to 3600. +* `enabled` - (Optional) Whether or not the Profile is enabled. +* `managedPolicyArns` - (Optional) A list of managed policy ARNs that apply to the vended session credentials. +* `name` - (Required) The name of the Profile. +* `requireInstanceProperties` - (Optional) Specifies whether instance properties are required in [CreateSession](https://docs.aws.amazon.com/rolesanywhere/latest/APIReference/API_CreateSession.html) requests with this profile. +* `roleArns` - (Required) A list of IAM roles that this profile can assume +* `sessionPolicy` - (Optional) A session policy that applies to the trust boundary of the vended session credentials. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Profile +* `id` - The Profile ID. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsRolesanywhereProfile` using its `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsRolesanywhereProfile` using its `id`. For example: + +```console +% terraform import aws_rolesanywhere_profile.example db138a85-8925-4f9f-a409-08231233cacf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rolesanywhere_trust_anchor.html.markdown b/website/docs/cdktf/typescript/r/rolesanywhere_trust_anchor.html.markdown new file mode 100644 index 00000000000..2a35fa0e1e4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/rolesanywhere_trust_anchor.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "Roles Anywhere" +layout: "aws" +page_title: "AWS: aws_rolesanywhere_trust_anchor" +description: |- + Provides a Roles Anywhere Trust Anchor resource +--- + + + +# Resource: aws_rolesanywhere_trust_anchor + +Terraform resource for managing a Roles Anywhere Trust Anchor. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AcmpcaCertificate } from "./.gen/providers/aws/acmpca-certificate"; +import { AcmpcaCertificateAuthority } from "./.gen/providers/aws/acmpca-certificate-authority"; +import { AcmpcaCertificateAuthorityCertificate } from "./.gen/providers/aws/acmpca-certificate-authority-certificate"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +import { RolesanywhereTrustAnchor } from "./.gen/providers/aws/rolesanywhere-trust-anchor"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new AcmpcaCertificateAuthority(this, "example", { + certificateAuthorityConfiguration: { + keyAlgorithm: "RSA_4096", + signingAlgorithm: "SHA512WITHRSA", + subject: { + commonName: "example.com", + }, + }, + permanentDeletionTimeInDays: 7, + type: "ROOT", + }); + const awsAcmpcaCertificateAuthorityCertificateExample = + new AcmpcaCertificateAuthorityCertificate(this, "example_1", { + certificate: Token.asString(awsAcmpcaCertificateExample.certificate), + certificateAuthorityArn: example.arn, + certificateChain: Token.asString( + awsAcmpcaCertificateExample.certificateChain + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaCertificateAuthorityCertificateExample.overrideLogicalId( + "example" + ); + new RolesanywhereTrustAnchor(this, "test", { + dependsOn: [awsAcmpcaCertificateAuthorityCertificateExample], + name: "example", + source: { + sourceData: { + acmPcaArn: example.arn, + }, + sourceType: "AWS_ACM_PCA", + }, + }); + const current = new DataAwsPartition(this, "current", {}); + const awsAcmpcaCertificateTest = new AcmpcaCertificate(this, "test_4", { + certificateAuthorityArn: example.arn, + certificateSigningRequest: example.certificateSigningRequest, + signingAlgorithm: "SHA512WITHRSA", + templateArn: + "arn:${" + + current.partition + + "}:acm-pca:::template/RootCACertificate/V1", + validity: { + type: "YEARS", + value: Token.asString(1), + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAcmpcaCertificateTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `enabled` - (Optional) Whether or not the Trust Anchor should be enabled. +* `name` - (Required) The name of the Trust Anchor. +* `source` - (Required) The source of trust, documented below +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested Blocks + +#### `source` + +* `sourceData` - (Required) The data denoting the source of trust, documented below +* `sourceType` - (Required) The type of the source of trust. Must be either `awsAcmPca` or `certificateBundle`. + +#### `sourceData` + +* `acmPcaArn` - (Optional, required when `sourceType` is `awsAcmPca`) The ARN of an ACM Private Certificate Authority. +* `x509CertificateData` - (Optional, required when `sourceType` is `certificateBundle`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Trust Anchor +* `id` - The Trust Anchor ID. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsRolesanywhereTrustAnchor` using its `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsRolesanywhereTrustAnchor` using its `id`. For example: + +```console +% terraform import aws_rolesanywhere_trust_anchor.example 92b2fbbb-984d-41a3-a765-e3cbdb69ebb1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route.html.markdown b/website/docs/cdktf/typescript/r/route.html.markdown new file mode 100644 index 00000000000..aa33fa112ba --- /dev/null +++ b/website/docs/cdktf/typescript/r/route.html.markdown @@ -0,0 +1,190 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route" +description: |- + Provides a resource to create a routing entry in a VPC routing table. +--- + + + +# Resource: aws_route + +Provides a resource to create a routing table entry (a route) in a VPC routing table. + +~> **NOTE on Route Tables and Routes:** Terraform currently provides both a standalone Route resource and a [Route Table](route_table.html) resource with routes defined in-line. At this time you cannot use a Route Table with in-line routes in conjunction with any Route resources. Doing so will cause a conflict of rule settings and will overwrite rules. + +~> **NOTE on `gatewayId` attribute:** The AWS API is very forgiving with the resource ID passed in the `gatewayId` attribute. For example an `awsRoute` resource can be created with an [`awsNatGateway`](nat_gateway.html) or [`awsEgressOnlyInternetGateway`](egress_only_internet_gateway.html) ID specified for the `gatewayId` attribute. Specifying anything other than an [`awsInternetGateway`](internet_gateway.html) or [`awsVpnGateway`](vpn_gateway.html) ID will lead to Terraform reporting a permanent diff between your configuration and recorded state, as the AWS API returns the more-specific attribute. If you are experiencing constant diffs with an `awsRoute` resource, the first thing to check is that the correct attribute is being specified. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route } from "./.gen/providers/aws/route"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route(this, "r", { + dependsOn: [testing], + destinationCidrBlock: "10.0.1.0/22", + routeTableId: "rtb-4fbb3ac4", + vpcPeeringConnectionId: "pcx-45ff3dc1", + }); + } +} + +``` + +## Example IPv6 Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EgressOnlyInternetGateway } from "./.gen/providers/aws/egress-only-internet-gateway"; +import { Route } from "./.gen/providers/aws/route"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const vpc = new Vpc(this, "vpc", { + assignGeneratedIpv6CidrBlock: true, + cidrBlock: "10.1.0.0/16", + }); + const egress = new EgressOnlyInternetGateway(this, "egress", { + vpcId: vpc.id, + }); + new Route(this, "r", { + destinationIpv6CidrBlock: "::/0", + egressOnlyGatewayId: egress.id, + routeTableId: "rtb-4fbb3ac4", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `routeTableId` - (Required) The ID of the routing table. + +One of the following destination arguments must be supplied: + +* `destinationCidrBlock` - (Optional) The destination CIDR block. +* `destinationIpv6CidrBlock` - (Optional) The destination IPv6 CIDR block. +* `destinationPrefixListId` - (Optional) The ID of a [managed prefix list](ec2_managed_prefix_list.html) destination. + +One of the following target arguments must be supplied: + +* `carrierGatewayId` - (Optional) Identifier of a carrier gateway. This attribute can only be used when the VPC contains a subnet which is associated with a Wavelength Zone. +* `coreNetworkArn` - (Optional) The Amazon Resource Name (ARN) of a core network. +* `egressOnlyGatewayId` - (Optional) Identifier of a VPC Egress Only Internet Gateway. +* `gatewayId` - (Optional) Identifier of a VPC internet gateway or a virtual private gateway. Specify `local` when updating a previously [imported](#import) local route. +* `natGatewayId` - (Optional) Identifier of a VPC NAT gateway. +* `localGatewayId` - (Optional) Identifier of a Outpost local gateway. +* `networkInterfaceId` - (Optional) Identifier of an EC2 network interface. +* `transitGatewayId` - (Optional) Identifier of an EC2 Transit Gateway. +* `vpcEndpointId` - (Optional) Identifier of a VPC Endpoint. +* `vpcPeeringConnectionId` - (Optional) Identifier of a VPC peering connection. + +Note that the default route, mapping the VPC's CIDR block to "local", is created implicitly and cannot be specified. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +~> **NOTE:** Only the arguments that are configured (one of the above) will be exported as an attribute once the resource is created. + +* `id` - Route identifier computed from the routing table identifier and route destination. +* `instanceId` - Identifier of an EC2 instance. +* `instanceOwnerId` - The AWS account ID of the owner of the EC2 instance. +* `origin` - How the route was created - `createRouteTable`, `createRoute` or `enableVgwRoutePropagation`. +* `state` - The state of the route - `active` or `blackhole`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `update` - (Default `2M`) +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import individual routes using `routetableidDestination`. Import [local routes](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Route_Tables.html#RouteTables) using the VPC's IPv4 or IPv6 CIDR blocks. For example: + +Import a route in route table `rtb656C65616E6F72` with an IPv4 destination CIDR of `104200/16`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import a route in route table `rtb656C65616E6F72` with an IPv6 destination CIDR of `2620:0:2D0:200::8/125`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import a route in route table `rtb656C65616E6F72` with a managed prefix list destination of `pl0570A1D2D725C16Be`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** individual routes using `routetableidDestination`. Import [local routes](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Route_Tables.html#RouteTables) using the VPC's IPv4 or IPv6 CIDR blocks. For example: + +Import a route in route table `rtb656C65616E6F72` with an IPv4 destination CIDR of `104200/16`: + +```console +% terraform import aws_route.my_route rtb-656C65616E6F72_10.42.0.0/16 +``` + +Import a route in route table `rtb656C65616E6F72` with an IPv6 destination CIDR of `2620:0:2D0:200::8/125`: + +```console +% terraform import aws_route.my_route rtb-656C65616E6F72_2620:0:2d0:200::8/125 +``` + +Import a route in route table `rtb656C65616E6F72` with a managed prefix list destination of `pl0570A1D2D725C16Be`: + +```console +% terraform import aws_route.my_route rtb-656C65616E6F72_pl-0570a1d2d725c16be +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_cidr_collection.html.markdown b/website/docs/cdktf/typescript/r/route53_cidr_collection.html.markdown new file mode 100644 index 00000000000..092593a63c5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_cidr_collection.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_cidr_collection" +description: |- + Provides a Route53 CIDR collection resource. +--- + + + +# Resource: aws_route53_cidr_collection + +Provides a Route53 CIDR collection resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53CidrCollection } from "./.gen/providers/aws/route53-cidr-collection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53CidrCollection(this, "example", { + name: "collection-1", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Unique name for the CIDR collection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the CIDR collection. +* `id` - The CIDR collection ID. +* `version` - The lastest version of the CIDR collection. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CIDR collections using their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CIDR collections using their ID. For example: + +```console +% terraform import aws_route53_cidr_collection.example 9ac32814-3e67-0932-6048-8d779cc6f511 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_cidr_location.html.markdown b/website/docs/cdktf/typescript/r/route53_cidr_location.html.markdown new file mode 100644 index 00000000000..7c3f4f47385 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_cidr_location.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_cidr_location" +description: |- + Provides a Route53 CIDR location resource. +--- + + + +# Resource: aws_route53_cidr_location + +Provides a Route53 CIDR location resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53CidrCollection } from "./.gen/providers/aws/route53-cidr-collection"; +import { Route53CidrLocation } from "./.gen/providers/aws/route53-cidr-location"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Route53CidrCollection(this, "example", { + name: "collection-1", + }); + const awsRoute53CidrLocationExample = new Route53CidrLocation( + this, + "example_1", + { + cidrBlocks: ["200.5.3.0/24", "200.6.3.0/24"], + cidrCollectionId: example.id, + name: "office", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53CidrLocationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cidrBlocks` - (Required) CIDR blocks for the location. +* `cidrCollectionId` - (Required) The ID of the CIDR collection to update. +* `name` - (Required) Name for the CIDR location. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The CIDR location ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CIDR locations using their the CIDR collection ID and location name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CIDR locations using their the CIDR collection ID and location name. For example: + +```console +% terraform import aws_route53_cidr_location.example 9ac32814-3e67-0932-6048-8d779cc6f511,office +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_delegation_set.html.markdown b/website/docs/cdktf/typescript/r/route53_delegation_set.html.markdown new file mode 100644 index 00000000000..145ac230011 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_delegation_set.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_delegation_set" +description: |- + Provides a Route53 Delegation Set resource. +--- + + + +# Resource: aws_route53_delegation_set + +Provides a [Route53 Delegation Set](https://docs.aws.amazon.com/Route53/latest/APIReference/API-actions-by-function.html#actions-by-function-reusable-delegation-sets) resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53DelegationSet } from "./.gen/providers/aws/route53-delegation-set"; +import { Route53Zone } from "./.gen/providers/aws/route53-zone"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new Route53DelegationSet(this, "main", { + referenceName: "DynDNS", + }); + new Route53Zone(this, "primary", { + delegationSetId: main.id, + name: "hashicorp.com", + }); + new Route53Zone(this, "secondary", { + delegationSetId: main.id, + name: "terraform.io", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `referenceName` - (Optional) This is a reference name used in Caller Reference + (helpful for identifying single delegation set amongst others) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Delegation Set. +* `id` - The delegation set ID +* `nameServers` - A list of authoritative name servers for the hosted zone + (effectively a list of NS records). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Delegation Sets using the delegation set `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Delegation Sets using the delegation set `id`. For example: + +```console +% terraform import aws_route53_delegation_set.set1 N1PA6795SAMPLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_health_check.html.markdown b/website/docs/cdktf/typescript/r/route53_health_check.html.markdown new file mode 100644 index 00000000000..b8b90547fbe --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_health_check.html.markdown @@ -0,0 +1,202 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_health_check" +description: |- + Provides a Route53 health check. +--- + + +# Resource: aws_route53_health_check + +Provides a Route53 health check. + +## Example Usage + +### Connectivity and HTTP Status Code Check + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53HealthCheck } from "./.gen/providers/aws/route53-health-check"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53HealthCheck(this, "example", { + failureThreshold: Token.asNumber("5"), + fqdn: "example.com", + port: 80, + requestInterval: Token.asNumber("30"), + resourcePath: "/", + tags: { + Name: "tf-test-health-check", + }, + type: "HTTP", + }); + } +} + +``` + +### Connectivity and String Matching Check + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53HealthCheck } from "./.gen/providers/aws/route53-health-check"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53HealthCheck(this, "example", { + failureThreshold: Token.asNumber("5"), + fqdn: "example.com", + port: 443, + requestInterval: Token.asNumber("30"), + resourcePath: "/", + searchString: "example", + type: "HTTPS_STR_MATCH", + }); + } +} + +``` + +### Aggregate Check + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53HealthCheck } from "./.gen/providers/aws/route53-health-check"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53HealthCheck(this, "parent", { + childHealthThreshold: 1, + childHealthchecks: [child.id], + tags: { + Name: "tf-test-calculated-health-check", + }, + type: "CALCULATED", + }); + } +} + +``` + +### CloudWatch Alarm Check + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchMetricAlarm } from "./.gen/providers/aws/cloudwatch-metric-alarm"; +import { Route53HealthCheck } from "./.gen/providers/aws/route53-health-check"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foobar = new CloudwatchMetricAlarm(this, "foobar", { + alarmDescription: "This metric monitors ec2 cpu utilization", + alarmName: "terraform-test-foobar5", + comparisonOperator: "GreaterThanOrEqualToThreshold", + evaluationPeriods: Token.asNumber("2"), + metricName: "CPUUtilization", + namespace: "AWS/EC2", + period: Token.asNumber("120"), + statistic: "Average", + threshold: Token.asNumber("80"), + }); + new Route53HealthCheck(this, "foo", { + cloudwatchAlarmName: foobar.alarmName, + cloudwatchAlarmRegion: "us-west-2", + insufficientDataHealthStatus: "Healthy", + type: "CLOUDWATCH_METRIC", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +~> **Note:** At least one of either `fqdn` or `ipAddress` must be specified. + +* `referenceName` - (Optional) This is a reference name used in Caller Reference + (helpful for identifying single health_check set amongst others) +* `fqdn` - (Optional) The fully qualified domain name of the endpoint to be checked. If a value is set for `ipAddress`, the value set for `fqdn` will be passed in the `host` header. +* `ipAddress` - (Optional) The IP address of the endpoint to be checked. +* `port` - (Optional) The port of the endpoint to be checked. +* `type` - (Required) The protocol to use when performing health checks. Valid values are `http`, `https`, `httpStrMatch`, `httpsStrMatch`, `tcp`, `calculated`, `cloudwatchMetric` and `recoveryControl`. +* `failureThreshold` - (Optional) The number of consecutive health checks that an endpoint must pass or fail. +* `requestInterval` - (Required) The number of seconds between the time that Amazon Route 53 gets a response from your endpoint and the time that it sends the next health-check request. +* `resourcePath` - (Optional) The path that you want Amazon Route 53 to request when performing health checks. +* `searchString` - (Optional) String searched in the first 5120 bytes of the response body for check to be considered healthy. Only valid with `httpStrMatch` and `httpsStrMatch`. +* `measureLatency` - (Optional) A Boolean value that indicates whether you want Route 53 to measure the latency between health checkers in multiple AWS regions and your endpoint and to display CloudWatch latency graphs in the Route 53 console. +* `invertHealthcheck` - (Optional) A boolean value that indicates whether the status of health check should be inverted. For example, if a health check is healthy but Inverted is True , then Route 53 considers the health check to be unhealthy. +* `disabled` - (Optional) A boolean value that stops Route 53 from performing health checks. When set to true, Route 53 will do the following depending on the type of health check: + * For health checks that check the health of endpoints, Route5 53 stops submitting requests to your application, server, or other resource. + * For calculated health checks, Route 53 stops aggregating the status of the referenced health checks. + * For health checks that monitor CloudWatch alarms, Route 53 stops monitoring the corresponding CloudWatch metrics. + + ~> **Note:** After you disable a health check, Route 53 considers the status of the health check to always be healthy. If you configured DNS failover, Route 53 continues to route traffic to the corresponding resources. If you want to stop routing traffic to a resource, change the value of `invertHealthcheck`. +* `enableSni` - (Optional) A boolean value that indicates whether Route53 should send the `fqdn` to the endpoint when performing the health check. This defaults to AWS' defaults: when the `type` is "HTTPS" `enableSni` defaults to `true`, when `type` is anything else `enableSni` defaults to `false`. +* `childHealthchecks` - (Optional) For a specified parent health check, a list of HealthCheckId values for the associated child health checks. +* `childHealthThreshold` - (Optional) The minimum number of child health checks that must be healthy for Route 53 to consider the parent health check to be healthy. Valid values are integers between 0 and 256, inclusive +* `cloudwatchAlarmName` - (Optional) The name of the CloudWatch alarm. +* `cloudwatchAlarmRegion` - (Optional) The CloudWatchRegion that the CloudWatch alarm was created in. +* `insufficientDataHealthStatus` - (Optional) The status of the health check when CloudWatch has insufficient data about the state of associated alarm. Valid values are `healthy` , `unhealthy` and `lastKnownStatus`. +* `regions` - (Optional) A list of AWS regions that you want Amazon Route 53 health checkers to check the specified endpoint from. +* `routingControlArn` - (Optional) The Amazon Resource Name (ARN) for the Route 53 Application Recovery Controller routing control. This is used when health check type is `recoveryControl` +* `tags` - (Optional) A map of tags to assign to the health check. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Health Check. +* `id` - The id of the health check +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Health Checks using the health check `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Health Checks using the health check `id`. For example: + +```console +% terraform import aws_route53_health_check.http_check abcdef11-2222-3333-4444-555555fedcba +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_hosted_zone_dnssec.html.markdown b/website/docs/cdktf/typescript/r/route53_hosted_zone_dnssec.html.markdown new file mode 100644 index 00000000000..e03c7d306e9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_hosted_zone_dnssec.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_hosted_zone_dnssec" +description: |- + Manages Route 53 Hosted Zone DNSSEC +--- + + + +# Resource: aws_route53_hosted_zone_dnssec + +Manages Route 53 Hosted Zone Domain Name System Security Extensions (DNSSEC). For more information about managing DNSSEC in Route 53, see the [Route 53 Developer Guide](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/dns-configuring-dnssec.html). + +!> **WARNING:** If you disable DNSSEC signing for your hosted zone before the DNS changes have propagated, your domain could become unavailable on the internet. When you remove the DS records, you must wait until the longest TTL for the DS records that you remove has expired before you complete the step to disable DNSSEC signing. Please refer to the [Route 53 Developer Guide - Disable DNSSEC](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/dns-configuring-dnssec-disable.html) for a detailed breakdown on the steps required to disable DNSSEC safely for a hosted zone. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { Route53HostedZoneDnssec } from "./.gen/providers/aws/route53-hosted-zone-dnssec"; +import { Route53KeySigningKey } from "./.gen/providers/aws/route53-key-signing-key"; +import { Route53Zone } from "./.gen/providers/aws/route53-zone"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + region: "us-east-1", + }); + const example = new Route53Zone(this, "example", { + name: "example.com", + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const awsKmsKeyExample = new KmsKey(this, "example_3", { + customerMasterKeySpec: "ECC_NIST_P256", + deletionWindowInDays: 7, + keyUsage: "SIGN_VERIFY", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: [ + "kms:DescribeKey", + "kms:GetPublicKey", + "kms:Sign", + "kms:Verify", + ], + Effect: "Allow", + Principal: { + Service: "dnssec-route53.amazonaws.com", + }, + Resource: "*", + Sid: "Allow Route 53 DNSSEC Service", + }, + { + Action: "kms:*", + Effect: "Allow", + Principal: { + AWS: "arn:aws:iam::${" + current.accountId + "}:root", + }, + Resource: "*", + Sid: "Enable IAM User Permissions", + }, + ], + Version: "2012-10-17", + }) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsKeyExample.overrideLogicalId("example"); + const awsRoute53KeySigningKeyExample = new Route53KeySigningKey( + this, + "example_4", + { + hostedZoneId: example.id, + keyManagementServiceArn: Token.asString(awsKmsKeyExample.arn), + name: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53KeySigningKeyExample.overrideLogicalId("example"); + const awsRoute53HostedZoneDnssecExample = new Route53HostedZoneDnssec( + this, + "example_5", + { + dependsOn: [awsRoute53KeySigningKeyExample], + hostedZoneId: Token.asString( + awsRoute53KeySigningKeyExample.hostedZoneId + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53HostedZoneDnssecExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `hostedZoneId` - (Required) Identifier of the Route 53 Hosted Zone. + +The following arguments are optional: + +* `signingStatus` - (Optional) Hosted Zone signing status. Valid values: `signing`, `notSigning`. Defaults to `signing`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Route 53 Hosted Zone identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsRoute53HostedZoneDnssec` resources using the Route 53 Hosted Zone identifier. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsRoute53HostedZoneDnssec` resources using the Route 53 Hosted Zone identifier. For example: + +```console +% terraform import aws_route53_hosted_zone_dnssec.example Z1D633PJN98FT9 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_key_signing_key.html.markdown b/website/docs/cdktf/typescript/r/route53_key_signing_key.html.markdown new file mode 100644 index 00000000000..5002623b56e --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_key_signing_key.html.markdown @@ -0,0 +1,167 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_key_signing_key" +description: |- + Manages an Route 53 Key Signing Key +--- + + + +# Resource: aws_route53_key_signing_key + +Manages a Route 53 Key Signing Key. To manage Domain Name System Security Extensions (DNSSEC) for a Hosted Zone, see the [`awsRoute53HostedZoneDnssec` resource](route53_hosted_zone_dnssec.html). For more information about managing DNSSEC in Route 53, see the [Route 53 Developer Guide](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/dns-configuring-dnssec.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { Route53HostedZoneDnssec } from "./.gen/providers/aws/route53-hosted-zone-dnssec"; +import { Route53KeySigningKey } from "./.gen/providers/aws/route53-key-signing-key"; +import { Route53Zone } from "./.gen/providers/aws/route53-zone"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + region: "us-east-1", + }); + const example = new Route53KeySigningKey(this, "example", { + hostedZoneId: test.id, + keyManagementServiceArn: Token.asString(awsKmsKeyTest.arn), + name: "example", + }); + const awsRoute53ZoneExample = new Route53Zone(this, "example_2", { + name: "example.com", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53ZoneExample.overrideLogicalId("example"); + const current = new DataAwsCallerIdentity(this, "current", {}); + const awsKmsKeyExample = new KmsKey(this, "example_4", { + customerMasterKeySpec: "ECC_NIST_P256", + deletionWindowInDays: 7, + keyUsage: "SIGN_VERIFY", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["kms:DescribeKey", "kms:GetPublicKey", "kms:Sign"], + Condition: { + ArnLike: { + "aws:SourceArn": "arn:aws:route53:::hostedzone/*", + }, + StringEquals: { + "aws:SourceAccount": current.accountId, + }, + }, + Effect: "Allow", + Principal: { + Service: "dnssec-route53.amazonaws.com", + }, + Resource: "*", + Sid: "Allow Route 53 DNSSEC Service", + }, + { + Action: "kms:CreateGrant", + Condition: { + Bool: { + "kms:GrantIsForAWSResource": "true", + }, + }, + Effect: "Allow", + Principal: { + Service: "dnssec-route53.amazonaws.com", + }, + Resource: "*", + Sid: "Allow Route 53 DNSSEC Service to CreateGrant", + }, + { + Action: "kms:*", + Effect: "Allow", + Principal: { + AWS: "arn:aws:iam::${" + current.accountId + "}:root", + }, + Resource: "*", + Sid: "Enable IAM User Permissions", + }, + ], + Version: "2012-10-17", + }) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsKeyExample.overrideLogicalId("example"); + const awsRoute53HostedZoneDnssecExample = new Route53HostedZoneDnssec( + this, + "example_5", + { + dependsOn: [example], + hostedZoneId: example.hostedZoneId, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53HostedZoneDnssecExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `hostedZoneId` - (Required) Identifier of the Route 53 Hosted Zone. +* `keyManagementServiceArn` - (Required) Amazon Resource Name (ARN) of the Key Management Service (KMS) Key. This must be unique for each key-signing key (KSK) in a single hosted zone. This key must be in the `usEast1` Region and meet certain requirements, which are described in the [Route 53 Developer Guide](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/dns-configuring-dnssec-cmk-requirements.html) and [Route 53 API Reference](https://docs.aws.amazon.com/Route53/latest/APIReference/API_CreateKeySigningKey.html). +* `name` - (Required) Name of the key-signing key (KSK). Must be unique for each key-singing key in the same hosted zone. + +The following arguments are optional: + +* `status` - (Optional) Status of the key-signing key (KSK). Valid values: `active`, `inactive`. Defaults to `active`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `digestAlgorithmMnemonic` - A string used to represent the delegation signer digest algorithm. This value must follow the guidelines provided by [RFC-8624 Section 3.3](https://tools.ietf.org/html/rfc8624#section-3.3). +* `digestAlgorithmType` - An integer used to represent the delegation signer digest algorithm. This value must follow the guidelines provided by [RFC-8624 Section 3.3](https://tools.ietf.org/html/rfc8624#section-3.3). +* `digestValue` - A cryptographic digest of a DNSKEY resource record (RR). DNSKEY records are used to publish the public key that resolvers can use to verify DNSSEC signatures that are used to secure certain kinds of information provided by the DNS system. +* `dnskeyRecord` - A string that represents a DNSKEY record. +* `dsRecord` - A string that represents a delegation signer (DS) record. +* `flag` - An integer that specifies how the key is used. For key-signing key (KSK), this value is always 257. +* `id` - Route 53 Hosted Zone identifier and KMS Key identifier, separated by a comma (`,`). +* `keyTag` - An integer used to identify the DNSSEC record for the domain name. The process used to calculate the value is described in [RFC-4034 Appendix B](https://tools.ietf.org/rfc/rfc4034.txt). +* `publicKey` - The public key, represented as a Base64 encoding, as required by [RFC-4034 Page 5](https://tools.ietf.org/rfc/rfc4034.txt). +* `signingAlgorithmMnemonic` - A string used to represent the signing algorithm. This value must follow the guidelines provided by [RFC-8624 Section 3.1](https://tools.ietf.org/html/rfc8624#section-3.1). +* `signingAlgorithmType` - An integer used to represent the signing algorithm. This value must follow the guidelines provided by [RFC-8624 Section 3.1](https://tools.ietf.org/html/rfc8624#section-3.1). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsRoute53KeySigningKey` resources using the Route 53 Hosted Zone identifier and KMS Key identifier, separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsRoute53KeySigningKey` resources using the Route 53 Hosted Zone identifier and KMS Key identifier, separated by a comma (`,`). For example: + +```console +% terraform import aws_route53_key_signing_key.example Z1D633PJN98FT9,example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_query_log.html.markdown b/website/docs/cdktf/typescript/r/route53_query_log.html.markdown new file mode 100644 index 00000000000..751f0eb713e --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_query_log.html.markdown @@ -0,0 +1,136 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_query_log" +description: |- + Provides a Route53 query logging configuration resource. +--- + + + +# Resource: aws_route53_query_log + +Provides a Route53 query logging configuration resource. + +~> **NOTE:** There are restrictions on the configuration of query logging. Notably, +the CloudWatch log group must be in the `usEast1` region, +a permissive CloudWatch log resource policy must be in place, and +the Route53 hosted zone must be public. +See [Configuring Logging for DNS Queries](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/query-logs.html?console_help=true#query-logs-configuring) for additional details. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { CloudwatchLogResourcePolicy } from "./.gen/providers/aws/cloudwatch-log-resource-policy"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { Route53QueryLog } from "./.gen/providers/aws/route53-query-log"; +import { Route53Zone } from "./.gen/providers/aws/route53-zone"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const usEast1 = new AwsProvider(this, "aws", { + alias: "us-east-1", + region: "us-east-1", + }); + const exampleCom = new Route53Zone(this, "example_com", { + name: "example.com", + }); + const route53QueryLoggingPolicy = new DataAwsIamPolicyDocument( + this, + "route53-query-logging-policy", + { + statement: [ + { + actions: ["logs:CreateLogStream", "logs:PutLogEvents"], + principals: [ + { + identifiers: ["route53.amazonaws.com"], + type: "Service", + }, + ], + resources: ["arn:aws:logs:*:*:log-group:/aws/route53/*"], + }, + ], + } + ); + const awsRoute53ExampleCom = new CloudwatchLogGroup( + this, + "aws_route53_example_com", + { + name: "/aws/route53/${" + exampleCom.name + "}", + provider: usEast1, + retentionInDays: 30, + } + ); + const awsCloudwatchLogResourcePolicyRoute53QueryLoggingPolicy = + new CloudwatchLogResourcePolicy(this, "route53-query-logging-policy_4", { + policyDocument: Token.asString(route53QueryLoggingPolicy.json), + policyName: "route53-query-logging-policy", + provider: usEast1, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogResourcePolicyRoute53QueryLoggingPolicy.overrideLogicalId( + "route53-query-logging-policy" + ); + const awsRoute53QueryLogExampleCom = new Route53QueryLog( + this, + "example_com_5", + { + cloudwatchLogGroupArn: awsRoute53ExampleCom.arn, + dependsOn: [awsCloudwatchLogResourcePolicyRoute53QueryLoggingPolicy], + zoneId: exampleCom.zoneId, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53QueryLogExampleCom.overrideLogicalId("example_com"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `cloudwatchLogGroupArn` - (Required) CloudWatch log group ARN to send query logs. +* `zoneId` - (Required) Route53 hosted zone ID to enable query logs. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Query Logging Config. +* `id` - The query logging configuration ID + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 query logging configurations using their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 query logging configurations using their ID. For example: + +```console +% terraform import aws_route53_query_log.example_com xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_record.html.markdown b/website/docs/cdktf/typescript/r/route53_record.html.markdown new file mode 100644 index 00000000000..04732578259 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_record.html.markdown @@ -0,0 +1,291 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_record" +description: |- + Provides a Route53 record resource. +--- + + + +# Resource: aws_route53_record + +Provides a Route53 record resource. + +## Example Usage + +### Simple routing policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53Record(this, "www", { + name: "www.example.com", + records: [lb.publicIp], + ttl: 300, + type: "A", + zoneId: primary.zoneId, + }); + } +} + +``` + +### Weighted routing policy + +Other routing policies are configured similarly. See [Amazon Route 53 Developer Guide](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/routing-policy.html) for details. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53Record(this, "www-dev", { + name: "www", + records: ["dev.example.com"], + setIdentifier: "dev", + ttl: 5, + type: "CNAME", + weightedRoutingPolicy: { + weight: 10, + }, + zoneId: primary.zoneId, + }); + new Route53Record(this, "www-live", { + name: "www", + records: ["live.example.com"], + setIdentifier: "live", + ttl: 5, + type: "CNAME", + weightedRoutingPolicy: { + weight: 90, + }, + zoneId: primary.zoneId, + }); + } +} + +``` + +### Alias record + +See [related part of Amazon Route 53 Developer Guide](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/resource-record-sets-choosing-alias-non-alias.html) +to understand differences between alias and non-alias records. + +TTL for all alias records is [60 seconds](https://aws.amazon.com/route53/faqs/#dns_failover_do_i_need_to_adjust), +you cannot change this, therefore `ttl` has to be omitted in alias records. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Elb } from "./.gen/providers/aws/elb"; +import { Route53Record } from "./.gen/providers/aws/route53-record"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const main = new Elb(this, "main", { + availabilityZones: ["us-east-1c"], + listener: [ + { + instancePort: 80, + instanceProtocol: "http", + lbPort: 80, + lbProtocol: "http", + }, + ], + name: "foobar-terraform-elb", + }); + new Route53Record(this, "www", { + alias: { + evaluateTargetHealth: true, + name: main.dnsName, + zoneId: main.zoneId, + }, + name: "example.com", + type: "A", + zoneId: primary.zoneId, + }); + } +} + +``` + +### NS and SOA Record Management + +When creating Route 53 zones, the `ns` and `soa` records for the zone are automatically created. Enabling the `allowOverwrite` argument will allow managing these records in a single Terraform run without the requirement for `terraform import`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Record } from "./.gen/providers/aws/route53-record"; +import { Route53Zone } from "./.gen/providers/aws/route53-zone"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Route53Zone(this, "example", { + name: "test.example.com", + }); + const awsRoute53RecordExample = new Route53Record(this, "example_1", { + allowOverwrite: true, + name: "test.example.com", + records: [ + Token.asString(propertyAccess(example.nameServers, ["0"])), + Token.asString(propertyAccess(example.nameServers, ["1"])), + Token.asString(propertyAccess(example.nameServers, ["2"])), + Token.asString(propertyAccess(example.nameServers, ["3"])), + ], + ttl: 172800, + type: "NS", + zoneId: example.zoneId, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53RecordExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `zoneId` - (Required) The ID of the hosted zone to contain this record. +* `name` - (Required) The name of the record. +* `type` - (Required) The record type. Valid values are `a`, `aaaa`, `caa`, `cname`, `ds`, `mx`, `naptr`, `ns`, `ptr`, `soa`, `spf`, `srv` and `txt`. +* `ttl` - (Required for non-alias records) The TTL of the record. +* `records` - (Required for non-alias records) A string list of records. To specify a single record value longer than 255 characters such as a TXT record for DKIM, add `\"\"` inside the Terraform configuration string (e.g., `"first255Characters\"\"morecharacters"`). +* `setIdentifier` - (Optional) Unique identifier to differentiate records with routing policies from one another. Required if using `cidrRoutingPolicy`, `failoverRoutingPolicy`, `geolocationRoutingPolicy`, `latencyRoutingPolicy`, `multivalueAnswerRoutingPolicy`, or `weightedRoutingPolicy`. +* `healthCheckId` - (Optional) The health check the record should be associated with. +* `alias` - (Optional) An alias block. Conflicts with `ttl` & `records`. + [Documented below](#alias). +* `cidrRoutingPolicy` - (Optional) A block indicating a routing policy based on the IP network ranges of requestors. Conflicts with any other routing policy. [Documented below](#cidr-routing-policy). +* `failoverRoutingPolicy` - (Optional) A block indicating the routing behavior when associated health check fails. Conflicts with any other routing policy. [Documented below](#failover-routing-policy). +* `geolocationRoutingPolicy` - (Optional) A block indicating a routing policy based on the geolocation of the requestor. Conflicts with any other routing policy. [Documented below](#geolocation-routing-policy). +* `latencyRoutingPolicy` - (Optional) A block indicating a routing policy based on the latency between the requestor and an AWS region. Conflicts with any other routing policy. [Documented below](#latency-routing-policy). +* `multivalueAnswerRoutingPolicy` - (Optional) Set to `true` to indicate a multivalue answer routing policy. Conflicts with any other routing policy. +* `weightedRoutingPolicy` - (Optional) A block indicating a weighted routing policy. Conflicts with any other routing policy. [Documented below](#weighted-routing-policy). +* `allowOverwrite` - (Optional) Allow creation of this record in Terraform to overwrite an existing record, if any. This does not affect the ability to update the record in Terraform and does not prevent other resources within Terraform or manual Route 53 changes outside Terraform from overwriting this record. `false` by default. This configuration is not recommended for most environments. + +Exactly one of `records` or `alias` must be specified: this determines whether it's an alias record. + +### Alias + +Alias records support the following: + +* `name` - (Required) DNS domain name for a CloudFront distribution, S3 bucket, ELB, or another resource record set in this hosted zone. +* `zoneId` - (Required) Hosted zone ID for a CloudFront distribution, S3 bucket, ELB, or Route 53 hosted zone. See [`resourceElbZoneId`](/docs/providers/aws/r/elb.html#zone_id) for example. +* `evaluateTargetHealth` - (Required) Set to `true` if you want Route 53 to determine whether to respond to DNS queries using this resource record set by checking the health of the resource record set. Some resources have special requirements, see [related part of documentation](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/resource-record-sets-values.html#rrsets-values-alias-evaluate-target-health). + +### CIDR Routing Policy + +CIDR routing policies support the following: + +* `collectionId` - (Required) The CIDR collection ID. See the [`awsRoute53CidrCollection` resource](route53_cidr_collection.html) for more details. +* `locationName` - (Required) The CIDR collection location name. See the [`awsRoute53CidrLocation` resource](route53_cidr_location.html) for more details. A `locationName` with an asterisk `"*"` can be used to create a default CIDR record. `collectionId` is still required for default record. + +### Failover Routing Policy + +Failover routing policies support the following: + +* `type` - (Required) `primary` or `secondary`. A `primary` record will be served if its healthcheck is passing, otherwise the `secondary` will be served. See http://docs.aws.amazon.com/Route53/latest/DeveloperGuide/dns-failover-configuring-options.html#dns-failover-failover-rrsets + +### Geolocation Routing Policy + +Geolocation routing policies support the following: + +* `continent` - A two-letter continent code. See http://docs.aws.amazon.com/Route53/latest/APIReference/API_GetGeoLocation.html for code details. Either `continent` or `country` must be specified. +* `country` - A two-character country code or `*` to indicate a default resource record set. +* `subdivision` - (Optional) A subdivision code for a country. + +### Latency Routing Policy + +Latency routing policies support the following: + +* `region` - (Required) An AWS region from which to measure latency. See http://docs.aws.amazon.com/Route53/latest/DeveloperGuide/routing-policy.html#routing-policy-latency + +### Weighted Routing Policy + +Weighted routing policies support the following: + +* `weight` - (Required) A numeric value indicating the relative weight of the record. See http://docs.aws.amazon.com/Route53/latest/DeveloperGuide/routing-policy.html#routing-policy-weighted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `name` - The name of the record. +* `fqdn` - [FQDN](https://en.wikipedia.org/wiki/Fully_qualified_domain_name) built using the zone domain and `name`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Records using the ID of the record, record name, record type, and set identifier. For example: + +Using the ID of the record, which is the zone identifier, record name, and record type, separated by underscores (`_`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the record also contains a set identifier, append it: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** Route53 Records using the ID of the record, record name, record type, and set identifier. For example: + +Using the ID of the record, which is the zone identifier, record name, and record type, separated by underscores (`_`): + +```console +% terraform import aws_route53_record.myrecord Z4KAPRWWNC7JR_dev.example.com_NS +``` + +If the record also contains a set identifier, append it: + +```console +% terraform import aws_route53_record.myrecord Z4KAPRWWNC7JR_dev.example.com_NS_dev +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_config.html.markdown b/website/docs/cdktf/typescript/r/route53_resolver_config.html.markdown new file mode 100644 index 00000000000..c9ce17702d5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_config.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_config" +description: |- + Provides a Route 53 Resolver config resource. +--- + + + +# Resource: aws_route53_resolver_config + +Provides a Route 53 Resolver config resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverConfig } from "./.gen/providers/aws/route53-resolver-config"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + enableDnsHostnames: true, + enableDnsSupport: true, + }); + const awsRoute53ResolverConfigExample = new Route53ResolverConfig( + this, + "example_1", + { + autodefinedReverseFlag: "DISABLE", + resourceId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53ResolverConfigExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceId` - (Required) The ID of the VPC that the configuration is for. +* `autodefinedReverseFlag` - (Required) Indicates whether or not the Resolver will create autodefined rules for reverse DNS lookups. Valid values: `enable`, `disable`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the resolver configuration. +* `ownerId` - The AWS account ID of the owner of the VPC that this resolver configuration applies to. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver configs using the Route 53 Resolver config ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route 53 Resolver configs using the Route 53 Resolver config ID. For example: + +```console +% terraform import aws_route53_resolver_config.example rslvr-rc-715aa20c73a23da7 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_dnssec_config.html.markdown b/website/docs/cdktf/typescript/r/route53_resolver_dnssec_config.html.markdown new file mode 100644 index 00000000000..942d484ef13 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_dnssec_config.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_dnssec_config" +description: |- + Provides a Route 53 Resolver DNSSEC config resource. +--- + + + +# Resource: aws_route53_resolver_dnssec_config + +Provides a Route 53 Resolver DNSSEC config resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverDnssecConfig } from "./.gen/providers/aws/route53-resolver-dnssec-config"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + enableDnsHostnames: true, + enableDnsSupport: true, + }); + const awsRoute53ResolverDnssecConfigExample = + new Route53ResolverDnssecConfig(this, "example_1", { + resourceId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53ResolverDnssecConfigExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceId` - (Required) The ID of the virtual private cloud (VPC) that you're updating the DNSSEC validation status for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN for a configuration for DNSSEC validation. +* `id` - The ID for a configuration for DNSSEC validation. +* `ownerId` - The owner account ID of the virtual private cloud (VPC) for a configuration for DNSSEC validation. +* `validationStatus` - The validation status for a DNSSEC configuration. The status can be one of the following: `enabling`, `enabled`, `disabling` and `disabled`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNSSEC configs using the Route 53 Resolver DNSSEC config ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route 53 Resolver DNSSEC configs using the Route 53 Resolver DNSSEC config ID. For example: + +```console +% terraform import aws_route53_resolver_dnssec_config.example rdsc-be1866ecc1683e95 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_endpoint.html.markdown b/website/docs/cdktf/typescript/r/route53_resolver_endpoint.html.markdown new file mode 100644 index 00000000000..99ac4ffc1ec --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_endpoint.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_endpoint" +description: |- + Provides a Route 53 Resolver endpoint resource. +--- + + + +# Resource: aws_route53_resolver_endpoint + +Provides a Route 53 Resolver endpoint resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverEndpoint } from "./.gen/providers/aws/route53-resolver-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53ResolverEndpoint(this, "foo", { + direction: "INBOUND", + ipAddress: [ + { + subnetId: sn1.id, + }, + { + ip: "10.0.64.4", + subnetId: sn2.id, + }, + ], + name: "foo", + securityGroupIds: [sg1.id, sg2.id], + tags: { + Environment: "Prod", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `direction` - (Required) The direction of DNS queries to or from the Route 53 Resolver endpoint. +Valid values are `inbound` (resolver forwards DNS queries to the DNS service for a VPC from your network or another VPC) +or `outbound` (resolver forwards DNS queries from the DNS service for a VPC to your network or another VPC). +* `ipAddress` - (Required) The subnets and IP addresses in your VPC that you want DNS queries to pass through on the way from your VPCs +to your network (for outbound endpoints) or on the way from your network to your VPCs (for inbound endpoints). Described below. +* `securityGroupIds` - (Required) The ID of one or more security groups that you want to use to control access to this VPC. +* `name` - (Optional) The friendly name of the Route 53 Resolver endpoint. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `ipAddress` object supports the following: + +* `subnetId` - (Required) The ID of the subnet that contains the IP address. +* `ip` - (Optional) The IP address in the subnet that you want to use for DNS queries. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Route 53 Resolver endpoint. +* `arn` - The ARN of the Route 53 Resolver endpoint. +* `hostVpcId` - The ID of the VPC that you want to create the resolver endpoint in. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `update` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver endpoints using the Route 53 Resolver endpoint ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route 53 Resolver endpoints using the Route 53 Resolver endpoint ID. For example: + +```console +% terraform import aws_route53_resolver_endpoint.foo rslvr-in-abcdef01234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_firewall_config.markdown b/website/docs/cdktf/typescript/r/route53_resolver_firewall_config.markdown new file mode 100644 index 00000000000..24d4f1acf37 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_firewall_config.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_config" +description: |- + Provides a Route 53 Resolver DNS Firewall config resource. +--- + + + +# Resource: aws_route53_resolver_firewall_config + +Provides a Route 53 Resolver DNS Firewall config resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverFirewallConfig } from "./.gen/providers/aws/route53-resolver-firewall-config"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + enableDnsHostnames: true, + enableDnsSupport: true, + }); + const awsRoute53ResolverFirewallConfigExample = + new Route53ResolverFirewallConfig(this, "example_1", { + firewallFailOpen: "ENABLED", + resourceId: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53ResolverFirewallConfigExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceId` - (Required) The ID of the VPC that the configuration is for. +* `firewallFailOpen` - (Required) Determines how Route 53 Resolver handles queries during failures, for example when all traffic that is sent to DNS Firewall fails to receive a reply. By default, fail open is disabled, which means the failure mode is closed. This approach favors security over availability. DNS Firewall blocks queries that it is unable to evaluate properly. If you enable this option, the failure mode is open. This approach favors availability over security. DNS Firewall allows queries to proceed if it is unable to properly evaluate them. Valid values: `enabled`, `disabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the firewall configuration. +* `ownerId` - The AWS account ID of the owner of the VPC that this firewall configuration applies to. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNS Firewall configs using the Route 53 Resolver DNS Firewall config ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route 53 Resolver DNS Firewall configs using the Route 53 Resolver DNS Firewall config ID. For example: + +```console +% terraform import aws_route53_resolver_firewall_config.example rdsc-be1866ecc1683e95 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_firewall_domain_list.markdown b/website/docs/cdktf/typescript/r/route53_resolver_firewall_domain_list.markdown new file mode 100644 index 00000000000..d7cafb7282e --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_firewall_domain_list.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_domain_list" +description: |- + Provides a Route 53 Resolver DNS Firewall domain list resource. +--- + + + +# Resource: aws_route53_resolver_firewall_domain_list + +Provides a Route 53 Resolver DNS Firewall domain list resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverFirewallDomainList } from "./.gen/providers/aws/route53-resolver-firewall-domain-list"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53ResolverFirewallDomainList(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name that lets you identify the domain list, to manage and use it. +* `domains` - (Optional) A array of domains for the firewall domain list. +* `tags` - (Optional) A map of tags to assign to the resource. f configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN (Amazon Resource Name) of the domain list. +* `id` - The ID of the domain list. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNS Firewall domain lists using the Route 53 Resolver DNS Firewall domain list ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route 53 Resolver DNS Firewall domain lists using the Route 53 Resolver DNS Firewall domain list ID. For example: + +```console +% terraform import aws_route53_resolver_firewall_domain_list.example rslvr-fdl-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_firewall_rule.markdown b/website/docs/cdktf/typescript/r/route53_resolver_firewall_rule.markdown new file mode 100644 index 00000000000..dc630b18164 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_firewall_rule.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rule" +description: |- + Provides a Route 53 Resolver DNS Firewall rule resource. +--- + + + +# Resource: aws_route53_resolver_firewall_rule + +Provides a Route 53 Resolver DNS Firewall rule resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverFirewallDomainList } from "./.gen/providers/aws/route53-resolver-firewall-domain-list"; +import { Route53ResolverFirewallRule } from "./.gen/providers/aws/route53-resolver-firewall-rule"; +import { Route53ResolverFirewallRuleGroup } from "./.gen/providers/aws/route53-resolver-firewall-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Route53ResolverFirewallDomainList(this, "example", { + domains: ["example.com"], + name: "example", + tags: {}, + }); + const awsRoute53ResolverFirewallRuleGroupExample = + new Route53ResolverFirewallRuleGroup(this, "example_1", { + name: "example", + tags: {}, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53ResolverFirewallRuleGroupExample.overrideLogicalId("example"); + const awsRoute53ResolverFirewallRuleExample = + new Route53ResolverFirewallRule(this, "example_2", { + action: "BLOCK", + blockOverrideDnsType: "CNAME", + blockOverrideDomain: "example.com", + blockOverrideTtl: 1, + blockResponse: "OVERRIDE", + firewallDomainListId: example.id, + firewallRuleGroupId: Token.asString( + awsRoute53ResolverFirewallRuleGroupExample.id + ), + name: "example", + priority: 100, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53ResolverFirewallRuleExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name that lets you identify the rule, to manage and use it. +* `action` - (Required) The action that DNS Firewall should take on a DNS query when it matches one of the domains in the rule's domain list. Valid values: `allow`, `block`, `alert`. +* `blockOverrideDnsType` - (Required if `blockResponse` is `override`) The DNS record's type. This determines the format of the record value that you provided in BlockOverrideDomain. Value values: `cname`. +* `blockOverrideDomain` - (Required if `blockResponse` is `override`) The custom DNS record to send back in response to the query. +* `blockOverrideTtl` - (Required if `blockResponse` is `override`) The recommended amount of time, in seconds, for the DNS resolver or web browser to cache the provided override record. Minimum value of 0. Maximum value of 604800. +* `blockResponse` - (Required if `action` is `block`) The way that you want DNS Firewall to block the request. Valid values: `nodata`, `nxdomain`, `override`. +* `firewallDomainListId` - (Required) The ID of the domain list that you want to use in the rule. +* `firewallRuleGroupId` - (Required) The unique identifier of the firewall rule group where you want to create the rule. +* `priority` - (Required) The setting that determines the processing order of the rule in the rule group. DNS Firewall processes the rules in a rule group by order of priority, starting from the lowest setting. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the rule. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNS Firewall rules using the Route 53 Resolver DNS Firewall rule group ID and domain list ID separated by ':'. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route 53 Resolver DNS Firewall rules using the Route 53 Resolver DNS Firewall rule group ID and domain list ID separated by ':'. For example: + +```console +% terraform import aws_route53_resolver_firewall_rule.example rslvr-frg-0123456789abcdef:rslvr-fdl-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_firewall_rule_group.markdown b/website/docs/cdktf/typescript/r/route53_resolver_firewall_rule_group.markdown new file mode 100644 index 00000000000..d94faf011af --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_firewall_rule_group.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rule_group" +description: |- + Provides a Route 53 Resolver DNS Firewall rule group resource. +--- + + + +# Resource: aws_route53_resolver_firewall_rule_group + +Provides a Route 53 Resolver DNS Firewall rule group resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverFirewallRuleGroup } from "./.gen/providers/aws/route53-resolver-firewall-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53ResolverFirewallRuleGroup(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name that lets you identify the rule group, to manage and use it. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN (Amazon Resource Name) of the rule group. +* `id` - The ID of the rule group. +* `ownerId` - The AWS account ID for the account that created the rule group. When a rule group is shared with your account, this is the account that has shared the rule group with you. +* `shareStatus` - Whether the rule group is shared with other AWS accounts, or was shared with the current account by another AWS account. Sharing is configured through AWS Resource Access Manager (AWS RAM). Valid values: `notShared`, `sharedByMe`, `sharedWithMe` +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNS Firewall rule groups using the Route 53 Resolver DNS Firewall rule group ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route 53 Resolver DNS Firewall rule groups using the Route 53 Resolver DNS Firewall rule group ID. For example: + +```console +% terraform import aws_route53_resolver_firewall_rule_group.example rslvr-frg-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_firewall_rule_group_association.markdown b/website/docs/cdktf/typescript/r/route53_resolver_firewall_rule_group_association.markdown new file mode 100644 index 00000000000..f6a77ab75c8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_firewall_rule_group_association.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_firewall_rule_group_association" +description: |- + Provides a Route 53 Resolver DNS Firewall rule group association resource. +--- + + + +# Resource: aws_route53_resolver_firewall_rule_group_association + +Provides a Route 53 Resolver DNS Firewall rule group association resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverFirewallRuleGroup } from "./.gen/providers/aws/route53-resolver-firewall-rule-group"; +import { Route53ResolverFirewallRuleGroupAssociation } from "./.gen/providers/aws/route53-resolver-firewall-rule-group-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Route53ResolverFirewallRuleGroup(this, "example", { + name: "example", + }); + const awsRoute53ResolverFirewallRuleGroupAssociationExample = + new Route53ResolverFirewallRuleGroupAssociation(this, "example_1", { + firewallRuleGroupId: example.id, + name: "example", + priority: 100, + vpcId: Token.asString(awsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53ResolverFirewallRuleGroupAssociationExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A name that lets you identify the rule group association, to manage and use it. +* `firewallRuleGroupId` - (Required) The unique identifier of the firewall rule group. +* `mutationProtection` - (Optional) If enabled, this setting disallows modification or removal of the association, to help prevent against accidentally altering DNS firewall protections. Valid values: `enabled`, `disabled`. +* `priority` - (Required) The setting that determines the processing order of the rule group among the rule groups that you associate with the specified VPC. DNS Firewall filters VPC traffic starting from the rule group with the lowest numeric priority setting. +* `vpcId` - (Required) The unique identifier of the VPC that you want to associate with the rule group. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN (Amazon Resource Name) of the firewall rule group association. +* `id` - The identifier for the association. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver DNS Firewall rule group associations using the Route 53 Resolver DNS Firewall rule group association ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route 53 Resolver DNS Firewall rule group associations using the Route 53 Resolver DNS Firewall rule group association ID. For example: + +```console +% terraform import aws_route53_resolver_firewall_rule_group_association.example rslvr-frgassoc-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_query_log_config.html.markdown b/website/docs/cdktf/typescript/r/route53_resolver_query_log_config.html.markdown new file mode 100644 index 00000000000..a03a3c906a8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_query_log_config.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_query_log_config" +description: |- + Provides a Route 53 Resolver query logging configuration resource. +--- + + + +# Resource: aws_route53_resolver_query_log_config + +Provides a Route 53 Resolver query logging configuration resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverQueryLogConfig } from "./.gen/providers/aws/route53-resolver-query-log-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53ResolverQueryLogConfig(this, "example", { + destinationArn: Token.asString(awsS3BucketExample.arn), + name: "example", + tags: { + Environment: "Prod", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `destinationArn` - (Required) The ARN of the resource that you want Route 53 Resolver to send query logs. +You can send query logs to an [S3 bucket](s3_bucket.html), a [CloudWatch Logs log group](cloudwatch_log_group.html), or a [Kinesis Data Firehose delivery stream](kinesis_firehose_delivery_stream.html). +* `name` - (Required) The name of the Route 53 Resolver query logging configuration. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Route 53 Resolver query logging configuration. +* `arn` - The ARN (Amazon Resource Name) of the Route 53 Resolver query logging configuration. +* `ownerId` - The AWS account ID of the account that created the query logging configuration. +* `shareStatus` - An indication of whether the query logging configuration is shared with other AWS accounts, or was shared with the current account by another AWS account. +Sharing is configured through AWS Resource Access Manager (AWS RAM). +Values are `notShared`, `sharedByMe` or `sharedWithMe` +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver query logging configurations using the Route 53 Resolver query logging configuration ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route 53 Resolver query logging configurations using the Route 53 Resolver query logging configuration ID. For example: + +```console +% terraform import aws_route53_resolver_query_log_config.example rqlc-92edc3b1838248bf +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_query_log_config_association.html.markdown b/website/docs/cdktf/typescript/r/route53_resolver_query_log_config_association.html.markdown new file mode 100644 index 00000000000..be62c42bbb3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_query_log_config_association.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_query_log_config_association" +description: |- + Provides a Route 53 Resolver query logging configuration association resource. +--- + + + +# Resource: aws_route53_resolver_query_log_config_association + +Provides a Route 53 Resolver query logging configuration association resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverQueryLogConfigAssociation } from "./.gen/providers/aws/route53-resolver-query-log-config-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53ResolverQueryLogConfigAssociation(this, "example", { + resolverQueryLogConfigId: Token.asString( + awsRoute53ResolverQueryLogConfigExample.id + ), + resourceId: Token.asString(awsVpcExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resolverQueryLogConfigId` - (Required) The ID of the [Route 53 Resolver query logging configuration](route53_resolver_query_log_config.html) that you want to associate a VPC with. +* `resourceId` - (Required) The ID of a VPC that you want this query logging configuration to log queries for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` -The ID of the Route 53 Resolver query logging configuration association. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Resolver query logging configuration associations using the Route 53 Resolver query logging configuration association ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route 53 Resolver query logging configuration associations using the Route 53 Resolver query logging configuration association ID. For example: + +```console +% terraform import aws_route53_resolver_query_log_config_association.example rqlca-b320624fef3c4d70 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_rule.html.markdown b/website/docs/cdktf/typescript/r/route53_resolver_rule.html.markdown new file mode 100644 index 00000000000..fca0ed95395 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_rule.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_rule" +description: |- + Provides a Route53 Resolver rule. +--- + + + +# Resource: aws_route53_resolver_rule + +Provides a Route53 Resolver rule. + +## Example Usage + +### System rule + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverRule } from "./.gen/providers/aws/route53-resolver-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53ResolverRule(this, "sys", { + domainName: "subdomain.example.com", + ruleType: "SYSTEM", + }); + } +} + +``` + +### Forward rule + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverRule } from "./.gen/providers/aws/route53-resolver-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53ResolverRule(this, "fwd", { + domainName: "example.com", + name: "example", + resolverEndpointId: foo.id, + ruleType: "FORWARD", + tags: { + Environment: "Prod", + }, + targetIp: [ + { + ip: "123.45.67.89", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domainName` - (Required) DNS queries for this domain name are forwarded to the IP addresses that are specified using `targetIp`. +* `ruleType` - (Required) The rule type. Valid values are `forward`, `system` and `recursive`. +* `name` - (Optional) A friendly name that lets you easily find a rule in the Resolver dashboard in the Route 53 console. +* `resolverEndpointId` (Optional) The ID of the outbound resolver endpoint that you want to use to route DNS queries to the IP addresses that you specify using `targetIp`. +This argument should only be specified for `forward` type rules. +* `targetIp` - (Optional) Configuration block(s) indicating the IPs that you want Resolver to forward DNS queries to (documented below). +This argument should only be specified for `forward` type rules. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `targetIp` object supports the following: + +* `ip` - (Required) One IP address that you want to forward DNS queries to. You can specify only IPv4 addresses. +* `port` - (Optional) The port at `ip` that you want to forward DNS queries to. Default value is `53` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the resolver rule. +* `arn` - The ARN (Amazon Resource Name) for the resolver rule. +* `ownerId` - When a rule is shared with another AWS account, the account ID of the account that the rule is shared with. +* `shareStatus` - Whether the rules is shared and, if so, whether the current account is sharing the rule with another account, or another account is sharing the rule with the current account. +Values are `notShared`, `sharedByMe` or `sharedWithMe` +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Resolver rules using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Resolver rules using the `id`. For example: + +```console +% terraform import aws_route53_resolver_rule.sys rslvr-rr-0123456789abcdef0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_resolver_rule_association.html.markdown b/website/docs/cdktf/typescript/r/route53_resolver_rule_association.html.markdown new file mode 100644 index 00000000000..eb27be46d0d --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_resolver_rule_association.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "Route 53 Resolver" +layout: "aws" +page_title: "AWS: aws_route53_resolver_rule_association" +description: |- + Provides a Route53 Resolver rule association. +--- + + + +# Resource: aws_route53_resolver_rule_association + +Provides a Route53 Resolver rule association. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53ResolverRuleAssociation } from "./.gen/providers/aws/route53-resolver-rule-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53ResolverRuleAssociation(this, "example", { + resolverRuleId: sys.id, + vpcId: foo.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resolverRuleId` - (Required) The ID of the resolver rule that you want to associate with the VPC. +* `vpcId` - (Required) The ID of the VPC that you want to associate the resolver rule with. +* `name` - (Optional) A name for the association that you're creating between a resolver rule and a VPC. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the resolver rule association. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Resolver rule associations using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Resolver rule associations using the `id`. For example: + +```console +% terraform import aws_route53_resolver_rule_association.example rslvr-rrassoc-97242eaf88example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_traffic_policy.html.markdown b/website/docs/cdktf/typescript/r/route53_traffic_policy.html.markdown new file mode 100644 index 00000000000..38dd1fea34d --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_traffic_policy.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_traffic_policy" +description: |- + Manages a Route53 Traffic Policy +--- + + + +# Resource: aws_route53_traffic_policy + +Manages a Route53 Traffic Policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53TrafficPolicy } from "./.gen/providers/aws/route53-traffic-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53TrafficPolicy(this, "example", { + comment: "example comment", + document: + '{\n "AWSPolicyFormatVersion": "2015-10-01",\n "RecordType": "A",\n "Endpoints": {\n "endpoint-start-NkPh": {\n "Type": "value",\n "Value": "10.0.0.2"\n }\n },\n "StartEndpoint": "endpoint-start-NkPh"\n}\n\n', + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the traffic policy. +* `document` - (Required) Policy document. This is a JSON formatted string. For more information about building Route53 traffic policy documents, see the [AWS Route53 Traffic Policy document format](https://docs.aws.amazon.com/Route53/latest/APIReference/api-policies-traffic-policy-document-format.html) + +The following arguments are optional: + +* `comment` - (Optional) Comment for the traffic policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the traffic policy +* `type` - DNS type of the resource record sets that Amazon Route 53 creates when you use a traffic policy to create a traffic policy instance. +* `version` - Version number of the traffic policy. This value is automatically incremented by AWS after each update of this resource. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Traffic Policy using the `id` and `version`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Traffic Policy using the `id` and `version`. For example: + +```console +% terraform import aws_route53_traffic_policy.example 01a52019-d16f-422a-ae72-c306d2b6df7e/1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_traffic_policy_instance.html.markdown b/website/docs/cdktf/typescript/r/route53_traffic_policy_instance.html.markdown new file mode 100644 index 00000000000..fe55416d55c --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_traffic_policy_instance.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_traffic_policy_instance" +description: |- + Provides a Route53 traffic policy instance resource. +--- + + + +# Resource: aws_route53_traffic_policy_instance + +Provides a Route53 traffic policy instance resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53TrafficPolicyInstance } from "./.gen/providers/aws/route53-traffic-policy-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53TrafficPolicyInstance(this, "test", { + hostedZoneId: "Z033120931TAQO548OGJC", + name: "test.example.com", + trafficPolicyId: "b3gb108f-ea6f-45a5-baab-9d112d8b4037", + trafficPolicyVersion: 1, + ttl: 360, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Domain name for which Amazon Route 53 responds to DNS queries by using the resource record sets that Route 53 creates for this traffic policy instance. +* `trafficPolicyId` - (Required) ID of the traffic policy that you want to use to create resource record sets in the specified hosted zone. +* `trafficPolicyVersion` - (Required) Version of the traffic policy +* `hostedZoneId` - (Required) ID of the hosted zone that you want Amazon Route 53 to create resource record sets in by using the configuration in a traffic policy. +* `ttl` - (Required) TTL that you want Amazon Route 53 to assign to all the resource record sets that it creates in the specified hosted zone. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of traffic policy instance. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 traffic policy instance using its id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 traffic policy instance using its id. For example: + +```console +% terraform import aws_route53_traffic_policy_instance.test df579d9a-6396-410e-ac22-e7ad60cf9e7e +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_vpc_association_authorization.html.markdown b/website/docs/cdktf/typescript/r/route53_vpc_association_authorization.html.markdown new file mode 100644 index 00000000000..2cef77517db --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_vpc_association_authorization.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_vpc_association_authorization" +description: |- + Authorizes a VPC in a different account to be associated with a local Route53 Hosted Zone +--- + + + +# Resource: aws_route53_vpc_association_authorization + +Authorizes a VPC in a different account to be associated with a local Route53 Hosted Zone. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { Route53VpcAssociationAuthorization } from "./.gen/providers/aws/route53-vpc-association-authorization"; +import { Route53Zone } from "./.gen/providers/aws/route53-zone"; +import { Route53ZoneAssociation } from "./.gen/providers/aws/route53-zone-association"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", {}); + const alternate = new AwsProvider(this, "aws_1", { + alias: "alternate", + }); + const awsVpcAlternate = new Vpc(this, "alternate", { + cidrBlock: "10.7.0.0/16", + enableDnsHostnames: true, + enableDnsSupport: true, + provider: alternate, + }); + const example = new Vpc(this, "example", { + cidrBlock: "10.6.0.0/16", + enableDnsHostnames: true, + enableDnsSupport: true, + }); + const awsRoute53ZoneExample = new Route53Zone(this, "example_4", { + lifecycle: { + ignoreChanges: [vpc], + }, + name: "example.com", + vpc: [ + { + vpcId: example.id, + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53ZoneExample.overrideLogicalId("example"); + const awsRoute53VpcAssociationAuthorizationExample = + new Route53VpcAssociationAuthorization(this, "example_5", { + vpcId: Token.asString(awsVpcAlternate.id), + zoneId: Token.asString(awsRoute53ZoneExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53VpcAssociationAuthorizationExample.overrideLogicalId("example"); + const awsRoute53ZoneAssociationExample = new Route53ZoneAssociation( + this, + "example_6", + { + provider: alternate, + vpcId: Token.asString( + awsRoute53VpcAssociationAuthorizationExample.vpcId + ), + zoneId: Token.asString( + awsRoute53VpcAssociationAuthorizationExample.zoneId + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53ZoneAssociationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `zoneId` - (Required) The ID of the private hosted zone that you want to authorize associating a VPC with. +* `vpcId` - (Required) The VPC to authorize for association with the private hosted zone. +* `vpcRegion` - (Optional) The VPC's region. Defaults to the region of the AWS provider. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The calculated unique identifier for the association. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 VPC Association Authorizations using the Hosted Zone ID and VPC ID, separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route 53 VPC Association Authorizations using the Hosted Zone ID and VPC ID, separated by a colon (`:`). For example: + +```console +% terraform import aws_route53_vpc_association_authorization.example Z123456ABCDEFG:vpc-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_zone.html.markdown b/website/docs/cdktf/typescript/r/route53_zone.html.markdown new file mode 100644 index 00000000000..b9dce88b654 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_zone.html.markdown @@ -0,0 +1,159 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_zone" +description: |- + Manages a Route53 Hosted Zone +--- + + + +# Resource: aws_route53_zone + +Manages a Route53 Hosted Zone. For managing Domain Name System Security Extensions (DNSSEC), see the [`awsRoute53KeySigningKey`](route53_key_signing_key.html) and [`awsRoute53HostedZoneDnssec`](route53_hosted_zone_dnssec.html) resources. + +## Example Usage + +### Public Zone + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Zone } from "./.gen/providers/aws/route53-zone"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53Zone(this, "primary", { + name: "example.com", + }); + } +} + +``` + +### Public Subdomain Zone + +For use in subdomains, note that you need to create a +`awsRoute53Record` of type `ns` as well as the subdomain +zone. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Record } from "./.gen/providers/aws/route53-record"; +import { Route53Zone } from "./.gen/providers/aws/route53-zone"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const dev = new Route53Zone(this, "dev", { + name: "dev.example.com", + tags: { + Environment: "dev", + }, + }); + const main = new Route53Zone(this, "main", { + name: "example.com", + }); + new Route53Record(this, "dev-ns", { + name: "dev.example.com", + records: Token.asList(dev.nameServers), + ttl: Token.asNumber("30"), + type: "NS", + zoneId: main.zoneId, + }); + } +} + +``` + +### Private Zone + +~> **NOTE:** Terraform provides both exclusive VPC associations defined in-line in this resource via `vpc` configuration blocks and a separate [Zone VPC Association](/docs/providers/aws/r/route53_zone_association.html) resource. At this time, you cannot use in-line VPC associations in conjunction with any `awsRoute53ZoneAssociation` resources with the same zone ID otherwise it will cause a perpetual difference in plan output. You can optionally use the generic Terraform resource [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignoreChanges` to manage additional associations via the `awsRoute53ZoneAssociation` resource. + +~> **NOTE:** Private zones require at least one VPC association at all times. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Zone } from "./.gen/providers/aws/route53-zone"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53Zone(this, "private", { + name: "example.com", + vpc: [ + { + vpcId: example.id, + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) This is the name of the hosted zone. +* `comment` - (Optional) A comment for the hosted zone. Defaults to 'Managed by Terraform'. +* `delegationSetId` - (Optional) The ID of the reusable delegation set whose NS records you want to assign to the hosted zone. Conflicts with `vpc` as delegation sets can only be used for public zones. +* `forceDestroy` - (Optional) Whether to destroy all records (possibly managed outside of Terraform) in the zone when destroying the zone. +* `tags` - (Optional) A map of tags to assign to the zone. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `vpc` - (Optional) Configuration block(s) specifying VPC(s) to associate with a private hosted zone. Conflicts with the `delegationSetId` argument in this resource and any [`awsRoute53ZoneAssociation` resource](/docs/providers/aws/r/route53_zone_association.html) specifying the same zone ID. Detailed below. + +### vpc Argument Reference + +* `vpcId` - (Required) ID of the VPC to associate. +* `vpcRegion` - (Optional) Region of the VPC to associate. Defaults to AWS provider region. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Hosted Zone. +* `zoneId` - The Hosted Zone ID. This can be referenced by zone records. +* `nameServers` - A list of name servers in associated (or default) delegation set. + Find more about delegation sets in [AWS docs](https://docs.aws.amazon.com/Route53/latest/APIReference/actions-on-reusable-delegation-sets.html). +* `primaryNameServer` - The Route 53 name server that created the SOA record. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Zones using the zone `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Zones using the zone `id`. For example: + +```console +% terraform import aws_route53_zone.myzone Z1D633PJN98FT9 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53_zone_association.html.markdown b/website/docs/cdktf/typescript/r/route53_zone_association.html.markdown new file mode 100644 index 00000000000..8f65174f114 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53_zone_association.html.markdown @@ -0,0 +1,132 @@ +--- +subcategory: "Route 53" +layout: "aws" +page_title: "AWS: aws_route53_zone_association" +description: |- + Manages a Route53 Hosted Zone VPC association +--- + + + +# Resource: aws_route53_zone_association + +Manages a Route53 Hosted Zone VPC association. VPC associations can only be made on private zones. See the [`awsRoute53VpcAssociationAuthorization` resource](route53_vpc_association_authorization.html) for setting up cross-account associations. + +~> **NOTE:** Unless explicit association ordering is required (e.g., a separate cross-account association authorization), usage of this resource is not recommended. Use the `vpc` configuration blocks available within the [`awsRoute53Zone` resource](/docs/providers/aws/r/route53_zone.html) instead. + +~> **NOTE:** Terraform provides both this standalone Zone VPC Association resource and exclusive VPC associations defined in-line in the [`awsRoute53Zone` resource](/docs/providers/aws/r/route53_zone.html) via `vpc` configuration blocks. At this time, you cannot use those in-line VPC associations in conjunction with this resource and the same zone ID otherwise it will cause a perpetual difference in plan output. You can optionally use the generic Terraform resource [lifecycle configuration block](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html) with `ignoreChanges` in the `awsRoute53Zone` resource to manage additional associations via this resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Zone } from "./.gen/providers/aws/route53-zone"; +import { Route53ZoneAssociation } from "./.gen/providers/aws/route53-zone-association"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primary = new Vpc(this, "primary", { + cidrBlock: "10.6.0.0/16", + enableDnsHostnames: true, + enableDnsSupport: true, + }); + const secondary = new Vpc(this, "secondary", { + cidrBlock: "10.7.0.0/16", + enableDnsHostnames: true, + enableDnsSupport: true, + }); + const example = new Route53Zone(this, "example", { + lifecycle: { + ignoreChanges: [vpc], + }, + name: "example.com", + vpc: [ + { + vpcId: primary.id, + }, + ], + }); + const awsRoute53ZoneAssociationSecondary = new Route53ZoneAssociation( + this, + "secondary_3", + { + vpcId: secondary.id, + zoneId: example.zoneId, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53ZoneAssociationSecondary.overrideLogicalId("secondary"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `zoneId` - (Required) The private hosted zone to associate. +* `vpcId` - (Required) The VPC to associate with the private hosted zone. +* `vpcRegion` - (Optional) The VPC's region. Defaults to the region of the AWS provider. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The calculated unique identifier for the association. +* `owningAccount` - The account ID of the account that created the hosted zone. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route 53 Hosted Zone Associations using the Hosted Zone ID and VPC ID, separated by a colon (`:`). For example: + +The VPC is in the same region where you have configured the Terraform AWS Provider: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +The VPC is _not_ in the same region where you have configured the Terraform AWS Provider: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** Route 53 Hosted Zone Associations using the Hosted Zone ID and VPC ID, separated by a colon (`:`). For example: + +The VPC is in the same region where you have configured the Terraform AWS Provider: + +```console +% terraform import aws_route53_zone_association.example Z123456ABCDEFG:vpc-12345678 +``` + +The VPC is _not_ in the same region where you have configured the Terraform AWS Provider: + +```console +% terraform import aws_route53_zone_association.example Z123456ABCDEFG:vpc-12345678:us-east-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53domains_registered_domain.html.markdown b/website/docs/cdktf/typescript/r/route53domains_registered_domain.html.markdown new file mode 100644 index 00000000000..f56df035704 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53domains_registered_domain.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "Route 53 Domains" +layout: "aws" +page_title: "AWS: aws_route53domains_registered_domain" +description: |- + Provides a resource to manage a domain that has been registered and associated with the current AWS account. +--- + + + +# Resource: aws_route53domains_registered_domain + +Provides a resource to manage a domain that has been [registered](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/registrar-tld-list.html) and associated with the current AWS account. + +**This is an advanced resource** and has special caveats to be aware of when using it. Please read this document in its entirety before using this resource. + +The `awsRoute53DomainsRegisteredDomain` resource behaves differently from normal resources in that if a domain has been registered, Terraform does not _register_ this domain, but instead "adopts" it into management. `terraform destroy` does not delete the domain but does remove the resource from Terraform state. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53DomainsRegisteredDomain } from "./.gen/providers/aws/route53-domains-registered-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53DomainsRegisteredDomain(this, "example", { + domainName: "example.com", + nameServer: [ + { + name: "ns-195.awsdns-24.com", + }, + { + name: "ns-874.awsdns-45.net", + }, + ], + tags: { + Environment: "test", + }, + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** You must specify the same privacy setting for `adminPrivacy`, `registrantPrivacy` and `techPrivacy`. + +This argument supports the following arguments: + +* `adminContact` - (Optional) Details about the domain administrative contact. +* `adminPrivacy` - (Optional) Whether domain administrative contact information is concealed from WHOIS queries. Default: `true`. +* `autoRenew` - (Optional) Whether the domain registration is set to renew automatically. Default: `true`. +* `domainName` - (Required) The name of the registered domain. +* `nameServer` - (Optional) The list of nameservers for the domain. +* `registrantContact` - (Optional) Details about the domain registrant. +* `registrantPrivacy` - (Optional) Whether domain registrant contact information is concealed from WHOIS queries. Default: `true`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `techContact` - (Optional) Details about the domain technical contact. +* `techPrivacy` - (Optional) Whether domain technical contact information is concealed from WHOIS queries. Default: `true`. +* `transferLock` - (Optional) Whether the domain is locked for transfer. Default: `true`. + +The `adminContact`, `registrantContact` and `techContact` objects support the following: + +* `addressLine1` - (Optional) First line of the contact's address. +* `addressLine2` - (Optional) Second line of contact's address, if any. +* `city` - (Optional) The city of the contact's address. +* `contactType` - (Optional) Indicates whether the contact is a person, company, association, or public organization. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-ContactType) for valid values. +* `countryCode` - (Optional) Code for the country of the contact's address. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-CountryCode) for valid values. +* `email` - (Optional) Email address of the contact. +* `extraParams` - (Optional) A key-value map of parameters required by certain top-level domains. +* `fax` - (Optional) Fax number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]". +* `firstName` - (Optional) First name of contact. +* `lastName` - (Optional) Last name of contact. +* `organizationName` - (Optional) Name of the organization for contact types other than `person`. +* `phoneNumber` - (Optional) The phone number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]". +* `state` - (Optional) The state or province of the contact's city. +* `zipCode` - (Optional) The zip or postal code of the contact's address. + +The `nameServer` object supports the following: + +* `glueIps` - (Optional) Glue IP addresses of a name server. The list can contain only one IPv4 and one IPv6 address. +* `name` - (Required) The fully qualified host name of the name server. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The domain name. +* `abuseContactEmail` - Email address to contact to report incorrect contact information for a domain, to report that the domain is being used to send spam, to report that someone is cybersquatting on a domain name, or report some other type of abuse. +* `abuseContactPhone` - Phone number for reporting abuse. +* `creationDate` - The date when the domain was created as found in the response to a WHOIS query. +* `expirationDate` - The date when the registration for the domain is set to expire. +* `registrarName` - Name of the registrar of the domain as identified in the registry. +* `registrarUrl` - Web address of the registrar. +* `reseller` - Reseller of the domain. +* `statusList` - List of [domain name status codes](https://www.icann.org/resources/pages/epp-status-codes-2014-06-16-en). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `updatedDate` - The last updated date of the domain as found in the response to a WHOIS query. +* `whoisServer` - The fully qualified name of the WHOIS server that can answer the WHOIS query for the domain. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `update` - (Default `30M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_cluster.html.markdown b/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_cluster.html.markdown new file mode 100644 index 00000000000..d11f90e8941 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_cluster.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "Route 53 Recovery Control Config" +layout: "aws" +page_title: "AWS: aws_route53recoverycontrolconfig_cluster" +description: |- + Provides an AWS Route 53 Recovery Control Config Cluster +--- + + + +# Resource: aws_route53recoverycontrolconfig_cluster + +Provides an AWS Route 53 Recovery Control Config Cluster. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53RecoverycontrolconfigCluster } from "./.gen/providers/aws/route53-recoverycontrolconfig-cluster"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53RecoverycontrolconfigCluster(this, "example", { + name: "georgefitzgerald", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Unique name describing the cluster. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cluster +* `clusterEndpoints` - List of 5 endpoints in 5 regions that can be used to talk to the cluster. See below. +* `status` - Status of cluster. `pending` when it is being created, `pendingDeletion` when it is being deleted and `deployed` otherwise. + +### cluster_endpoints + +* `endpoint` - Cluster endpoint. +* `region` - Region of the endpoint. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Control Config cluster using the cluster ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Recovery Control Config cluster using the cluster ARN. For example: + +```console +% terraform import aws_route53recoverycontrolconfig_cluster.mycluster arn:aws:route53-recovery-control::313517334327:cluster/f9ae13be-a11e-4ec7-8522-94a70468e6ea +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_control_panel.html.markdown b/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_control_panel.html.markdown new file mode 100644 index 00000000000..3c9a4a2c01d --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_control_panel.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Route 53 Recovery Control Config" +layout: "aws" +page_title: "AWS: aws_route53recoverycontrolconfig_control_panel" +description: |- + Provides an AWS Route 53 Recovery Control Config Control Panel +--- + + + +# Resource: aws_route53recoverycontrolconfig_control_panel + +Provides an AWS Route 53 Recovery Control Config Control Panel. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53RecoverycontrolconfigControlPanel } from "./.gen/providers/aws/route53-recoverycontrolconfig-control-panel"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53RecoverycontrolconfigControlPanel(this, "example", { + clusterArn: + "arn:aws:route53-recovery-control::123456789012:cluster/8d47920e-d789-437d-803a-2dcc4b204393", + name: "balmorhea", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `clusterArn` - (Required) ARN of the cluster in which this control panel will reside. +* `name` - (Required) Name describing the control panel. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the control panel. +* `defaultControlPanel` - Whether a control panel is default. +* `routingControlCount` - Number routing controls in a control panel. +* `status` - Status of control panel: `pending` when it is being created/updated, `pendingDeletion` when it is being deleted, and `deployed` otherwise. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Control Config Control Panel using the control panel arn. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Recovery Control Config Control Panel using the control panel arn. For example: + +```console +% terraform import aws_route53recoverycontrolconfig_control_panel.mypanel arn:aws:route53-recovery-control::313517334327:controlpanel/1bfba17df8684f5dab0467b71424f7e8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_routing_control.html.markdown b/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_routing_control.html.markdown new file mode 100644 index 00000000000..f5dc6e86d3f --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_routing_control.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "Route 53 Recovery Control Config" +layout: "aws" +page_title: "AWS: aws_route53recoverycontrolconfig_routing_control" +description: |- + Provides an AWS Route 53 Recovery Control Config Routing Control +--- + + + +# Resource: aws_route53recoverycontrolconfig_routing_control + +Provides an AWS Route 53 Recovery Control Config Routing Control. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53RecoverycontrolconfigRoutingControl } from "./.gen/providers/aws/route53-recoverycontrolconfig-routing-control"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53RecoverycontrolconfigRoutingControl(this, "example", { + clusterArn: + "arn:aws:route53-recovery-control::881188118811:cluster/8d47920e-d789-437d-803a-2dcc4b204393", + name: "tinlicker", + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53RecoverycontrolconfigRoutingControl } from "./.gen/providers/aws/route53-recoverycontrolconfig-routing-control"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53RecoverycontrolconfigRoutingControl(this, "example", { + clusterArn: + "arn:aws:route53-recovery-control::881188118811:cluster/8d47920e-d789-437d-803a-2dcc4b204393", + controlPanelArn: + "arn:aws:route53-recovery-control::428113431245:controlpanel/abd5fbfc052d4844a082dbf400f61da8", + name: "thomasoliver", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `clusterArn` - (Required) ARN of the cluster in which this routing control will reside. +* `name` - (Required) The name describing the routing control. + +The following arguments are optional: + +* `controlPanelArn` - (Optional) ARN of the control panel in which this routing control will reside. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the routing control. +* `status` - Status of routing control. `pending` when it is being created/updated, `pendingDeletion` when it is being deleted, and `deployed` otherwise. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Control Config Routing Control using the routing control arn. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Recovery Control Config Routing Control using the routing control arn. For example: + +```console +% terraform import aws_route53recoverycontrolconfig_routing_control.mycontrol arn:aws:route53-recovery-control::313517334327:controlpanel/abd5fbfc052d4844a082dbf400f61da8/routingcontrol/d5d90e587870494b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_safety_rule.html.markdown b/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_safety_rule.html.markdown new file mode 100644 index 00000000000..94410e55301 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53recoverycontrolconfig_safety_rule.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "Route 53 Recovery Control Config" +layout: "aws" +page_title: "AWS: aws_route53recoverycontrolconfig_safety_rule" +description: |- + Provides an AWS Route 53 Recovery Control Config Safety Rule +--- + + + +# Resource: aws_route53recoverycontrolconfig_safety_rule + +Provides an AWS Route 53 Recovery Control Config Safety Rule + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53RecoverycontrolconfigSafetyRule } from "./.gen/providers/aws/route53-recoverycontrolconfig-safety-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53RecoverycontrolconfigSafetyRule(this, "example", { + assertedControls: [ + Token.asString( + awsRoute53RecoverycontrolconfigRoutingControlExample.arn + ), + ], + controlPanelArn: + "arn:aws:route53-recovery-control::313517334327:controlpanel/abd5fbfc052d4844a082dbf400f61da8", + name: "daisyguttridge", + ruleConfig: { + inverted: false, + threshold: 1, + type: "ATLEAST", + }, + waitPeriodMs: 5000, + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53RecoverycontrolconfigSafetyRule } from "./.gen/providers/aws/route53-recoverycontrolconfig-safety-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53RecoverycontrolconfigSafetyRule(this, "example", { + controlPanelArn: + "arn:aws:route53-recovery-control::313517334327:controlpanel/abd5fbfc052d4844a082dbf400f61da8", + gatingControls: [ + Token.asString( + awsRoute53RecoverycontrolconfigRoutingControlExample.arn + ), + ], + name: "i_o", + ruleConfig: { + inverted: false, + threshold: 1, + type: "ATLEAST", + }, + targetControls: [ + Token.asString( + awsRoute53RecoverycontrolconfigRoutingControlExample.arn + ), + ], + waitPeriodMs: 5000, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `controlPanelArn` - (Required) ARN of the control panel in which this safety rule will reside. +* `name` - (Required) Name describing the safety rule. +* `ruleConfig` - (Required) Configuration block for safety rule criteria. See below. +* `waitPeriodMs` - (Required) Evaluation period, in milliseconds (ms), during which any request against the target routing controls will fail. + +The following arguments are optional: + +* `assertedControls` - (Optional) Routing controls that are part of transactions that are evaluated to determine if a request to change a routing control state is allowed. +* `gatingControls` - (Optional) Gating controls for the new gating rule. That is, routing controls that are evaluated by the rule configuration that you specify. +* `targetControls` - (Optional) Routing controls that can only be set or unset if the specified `ruleConfig` evaluates to true for the specified `gatingControls`. + +### rule_config + +* `inverted` - (Required) Logical negation of the rule. +* `threshold` - (Required) Number of controls that must be set when you specify an `atleast` type rule. +* `type` - (Required) Rule type. Valid values are `atleast`, `and`, and `or`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the safety rule. +* `status` - Status of the safety rule. `pending` when it is being created/updated, `pendingDeletion` when it is being deleted, and `deployed` otherwise. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Control Config Safety Rule using the safety rule ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Recovery Control Config Safety Rule using the safety rule ARN. For example: + +```console +% terraform import aws_route53recoverycontrolconfig_safety_rule.myrule arn:aws:route53-recovery-control::313517334327:controlpanel/1bfba17df8684f5dab0467b71424f7e8/safetyrule/3bacc77003364c0f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53recoveryreadiness_cell.html.markdown b/website/docs/cdktf/typescript/r/route53recoveryreadiness_cell.html.markdown new file mode 100644 index 00000000000..aa9fd7b4bd6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53recoveryreadiness_cell.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Route 53 Recovery Readiness" +layout: "aws" +page_title: "AWS: aws_route53recoveryreadiness_cell" +description: |- + Provides an AWS Route 53 Recovery Readiness Cell +--- + + + +# Resource: aws_route53recoveryreadiness_cell + +Provides an AWS Route 53 Recovery Readiness Cell. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53RecoveryreadinessCell } from "./.gen/providers/aws/route53-recoveryreadiness-cell"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53RecoveryreadinessCell(this, "example", { + cellName: "us-west-2-failover-cell", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `cellName` - (Required) Unique name describing the cell. + +The following arguments are optional: + +* `cells` - (Optional) List of cell arns to add as nested fault domains within this cell. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the cell +* `parentReadinessScopes` - List of readiness scopes (recovery groups or cells) that contain this cell. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Readiness cells using the cell name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Recovery Readiness cells using the cell name. For example: + +```console +% terraform import aws_route53recoveryreadiness_cell.us-west-2-failover-cell us-west-2-failover-cell +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53recoveryreadiness_readiness_check.html.markdown b/website/docs/cdktf/typescript/r/route53recoveryreadiness_readiness_check.html.markdown new file mode 100644 index 00000000000..1fd5c3c6239 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53recoveryreadiness_readiness_check.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Route 53 Recovery Readiness" +layout: "aws" +page_title: "AWS: aws_route53recoveryreadiness_readiness_check" +description: |- + Provides an AWS Route 53 Recovery Readiness Readiness Check +--- + + + +# Resource: aws_route53recoveryreadiness_readiness_check + +Provides an AWS Route 53 Recovery Readiness Readiness Check. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53RecoveryreadinessReadinessCheck } from "./.gen/providers/aws/route53-recoveryreadiness-readiness-check"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53RecoveryreadinessReadinessCheck(this, "example", { + readinessCheckName: myCwAlarmCheck, + resourceSetName: myCwAlarmSet, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `readinessCheckName` - (Required) Unique name describing the readiness check. +* `resourceSetName` - (Required) Name describing the resource set that will be monitored for readiness. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the readiness_check +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Readiness readiness checks using the readiness check name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Recovery Readiness readiness checks using the readiness check name. For example: + +```console +% terraform import aws_route53recoveryreadiness_readiness_check.my-cw-alarm-check example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53recoveryreadiness_recovery_group.html.markdown b/website/docs/cdktf/typescript/r/route53recoveryreadiness_recovery_group.html.markdown new file mode 100644 index 00000000000..cbb82bcbe17 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53recoveryreadiness_recovery_group.html.markdown @@ -0,0 +1,83 @@ +--- +subcategory: "Route 53 Recovery Readiness" +layout: "aws" +page_title: "AWS: aws_route53recoveryreadiness_recovery_group" +description: |- + Provides an AWS Route 53 Recovery Readiness Recovery Group +--- + + + +# Resource: aws_route53recoveryreadiness_recovery_group + +Provides an AWS Route 53 Recovery Readiness Recovery Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53RecoveryreadinessRecoveryGroup } from "./.gen/providers/aws/route53-recoveryreadiness-recovery-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53RecoveryreadinessRecoveryGroup(this, "example", { + recoveryGroupName: "my-high-availability-app", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `recoveryGroupName` - (Required) A unique name describing the recovery group. + +The following argument are optional: + +* `cells` - (Optional) List of cell arns to add as nested fault domains within this recovery group +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the recovery group +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Readiness recovery groups using the recovery group name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Recovery Readiness recovery groups using the recovery group name. For example: + +```console +% terraform import aws_route53recoveryreadiness_recovery_group.my-high-availability-app my-high-availability-app +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route53recoveryreadiness_resource_set.html.markdown b/website/docs/cdktf/typescript/r/route53recoveryreadiness_resource_set.html.markdown new file mode 100644 index 00000000000..da8829233d6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route53recoveryreadiness_resource_set.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "Route 53 Recovery Readiness" +layout: "aws" +page_title: "AWS: aws_route53recoveryreadiness_resource_set" +description: |- + Provides an AWS Route 53 Recovery Readiness Resource Set +--- + + + +# Resource: aws_route53recoveryreadiness_resource_set + +Provides an AWS Route 53 Recovery Readiness Resource Set. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53RecoveryreadinessResourceSet } from "./.gen/providers/aws/route53-recoveryreadiness-resource-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Route53RecoveryreadinessResourceSet(this, "example", { + resourceSetName: myCwAlarmSet, + resourceSetType: "AWS::CloudWatch::Alarm", + resources: [ + { + resourceArn: Token.asString(awsCloudwatchMetricAlarmExample.arn), + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceSetName` - (Required) Unique name describing the resource set. +* `resourceSetType` - (Required) Type of the resources in the resource set. +* `resources` - (Required) List of resources to add to this resource set. See below. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### resources + +* `dnsTargetResource` - (Required if `resourceArn` is not set) Component for DNS/Routing Control Readiness Checks. +* `readinessScopes` - (Optional) Recovery group ARN or cell ARN that contains this resource set. +* `resourceArn` - (Required if `dnsTargetResource` is not set) ARN of the resource. + +### dns_target_resource + +* `domainName` - (Optional) DNS Name that acts as the ingress point to a portion of application. +* `hostedZoneArn` - (Optional) Hosted Zone ARN that contains the DNS record with the provided name of target resource. +* `recordSetId` - (Optional) Route53 record set id to uniquely identify a record given a `domainName` and a `recordType`. +* `recordType` - (Optional) Type of DNS Record of target resource. +* `targetResource` - (Optional) Target resource the R53 record specified with the above params points to. + +### target_resource + +* `nlbResource` - (Optional) NLB resource a DNS Target Resource points to. Required if `r53Resource` is not set. +* `r53Resource` - (Optional) Route53 resource a DNS Target Resource record points to. + +### nlb_resource + +* `arn` - (Required) NLB resource ARN. + +### r53_resource + +* `domainName` - (Optional) Domain name that is targeted. +* `recordSetId` - (Optional) Resource record set ID that is targeted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the resource set +* `resources.#ComponentId` - Unique identified for DNS Target Resources, use for readiness checks. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route53 Recovery Readiness resource set name using the resource set name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route53 Recovery Readiness resource set name using the resource set name. For example: + +```console +% terraform import aws_route53recoveryreadiness_resource_set.my-cw-alarm-set example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route_table.html.markdown b/website/docs/cdktf/typescript/r/route_table.html.markdown new file mode 100644 index 00000000000..5aa01418f5f --- /dev/null +++ b/website/docs/cdktf/typescript/r/route_table.html.markdown @@ -0,0 +1,174 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route_table" +description: |- + Provides a resource to create a VPC routing table. +--- + + + +# Resource: aws_route_table + +Provides a resource to create a VPC routing table. + +~> **NOTE on Route Tables and Routes:** Terraform currently +provides both a standalone [Route resource](route.html) and a Route Table resource with routes +defined in-line. At this time you cannot use a Route Table with in-line routes +in conjunction with any Route resources. Doing so will cause +a conflict of rule settings and will overwrite rules. + +~> **NOTE on `gatewayId` and `natGatewayId`:** The AWS API is very forgiving with these two +attributes and the `awsRouteTable` resource can be created with a NAT ID specified as a Gateway ID attribute. +This _will_ lead to a permanent diff between your configuration and statefile, as the API returns the correct +parameters in the returned route table. If you're experiencing constant diffs in your `awsRouteTable` resources, +the first thing to check is whether or not you're specifying a NAT ID instead of a Gateway ID, or vice-versa. + +~> **NOTE on `propagatingVgws` and the `awsVpnGatewayRoutePropagation` resource:** +If the `propagatingVgws` argument is present, it's not supported to _also_ +define route propagations using `awsVpnGatewayRoutePropagation`, since +this resource will delete any propagating gateways not explicitly listed in +`propagatingVgws`. Omit this argument when defining route propagation using +the separate resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RouteTable } from "./.gen/providers/aws/route-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RouteTable(this, "example", { + route: [ + { + cidrBlock: "10.0.1.0/24", + gatewayId: Token.asString(awsInternetGatewayExample.id), + }, + { + egressOnlyGatewayId: Token.asString( + awsEgressOnlyInternetGatewayExample.id + ), + ipv6CidrBlock: "::/0", + }, + ], + tags: { + Name: "example", + }, + vpcId: Token.asString(awsVpcExample.id), + }); + } +} + +``` + +To subsequently remove all managed routes: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RouteTable } from "./.gen/providers/aws/route-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RouteTable(this, "example", { + route: [], + tags: { + Name: "example", + }, + vpcId: Token.asString(awsVpcExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpcId` - (Required) The VPC ID. +* `route` - (Optional) A list of route objects. Their keys are documented below. This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). +This means that omitting this argument is interpreted as ignoring any existing routes. To remove all managed routes an empty list should be specified. See the example above. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `propagatingVgws` - (Optional) A list of virtual gateways for propagation. + +### route Argument Reference + +This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +One of the following destination arguments must be supplied: + +* `cidrBlock` - (Required) The CIDR block of the route. +* `ipv6CidrBlock` - (Optional) The Ipv6 CIDR block of the route. +* `destinationPrefixListId` - (Optional) The ID of a [managed prefix list](ec2_managed_prefix_list.html) destination of the route. + +One of the following target arguments must be supplied: + +* `carrierGatewayId` - (Optional) Identifier of a carrier gateway. This attribute can only be used when the VPC contains a subnet which is associated with a Wavelength Zone. +* `coreNetworkArn` - (Optional) The Amazon Resource Name (ARN) of a core network. +* `egressOnlyGatewayId` - (Optional) Identifier of a VPC Egress Only Internet Gateway. +* `gatewayId` - (Optional) Identifier of a VPC internet gateway or a virtual private gateway. +* `localGatewayId` - (Optional) Identifier of a Outpost local gateway. +* `natGatewayId` - (Optional) Identifier of a VPC NAT gateway. +* `networkInterfaceId` - (Optional) Identifier of an EC2 network interface. +* `transitGatewayId` - (Optional) Identifier of an EC2 Transit Gateway. +* `vpcEndpointId` - (Optional) Identifier of a VPC Endpoint. +* `vpcPeeringConnectionId` - (Optional) Identifier of a VPC peering connection. + +Note that the default route, mapping the VPC's CIDR block to "local", is created implicitly and cannot be specified. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +~> **NOTE:** Only the target that is entered is exported as a readable +attribute once the route resource is created. + +* `id` - The ID of the routing table. +* `arn` - The ARN of the route table. +* `ownerId` - The ID of the AWS account that owns the route table. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `update` - (Default `2M`) +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Route Tables using the route table `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Route Tables using the route table `id`. For example: + +```console +% terraform import aws_route_table.public_rt rtb-4e616f6d69 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/route_table_association.html.markdown b/website/docs/cdktf/typescript/r/route_table_association.html.markdown new file mode 100644 index 00000000000..47ee594c601 --- /dev/null +++ b/website/docs/cdktf/typescript/r/route_table_association.html.markdown @@ -0,0 +1,132 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_route_table_association" +description: |- + Provides a resource to create an association between a route table and a subnet or a route table and an internet gateway or virtual private gateway. +--- + + + +# Resource: aws_route_table_association + +Provides a resource to create an association between a route table and a subnet or a route table and an +internet gateway or virtual private gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RouteTableAssociation } from "./.gen/providers/aws/route-table-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RouteTableAssociation(this, "a", { + routeTableId: bar.id, + subnetId: foo.id, + }); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RouteTableAssociation } from "./.gen/providers/aws/route-table-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RouteTableAssociation(this, "b", { + gatewayId: foo.id, + routeTableId: bar.id, + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** Please note that one of either `subnetId` or `gatewayId` is required. + +This argument supports the following arguments: + +* `subnetId` - (Optional) The subnet ID to create an association. Conflicts with `gatewayId`. +* `gatewayId` - (Optional) The gateway ID to create an association. Conflicts with `subnetId`. +* `routeTableId` - (Required) The ID of the routing table to associate with. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the association + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `update` - (Default `2M`) +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EC2 Route Table Associations using the associated resource ID and Route Table ID separated by a forward slash (`/`). For example: + +~> **NOTE:** Attempting to associate a route table with a subnet or gateway, where either is already associated, will result in an error (e.g., `Resource.AlreadyAssociated: the specified association for route table rtb-4176657279 conflicts with an existing association`) unless you first import the original association. + +With EC2 Subnets: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +With EC2 Internet Gateways: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** EC2 Route Table Associations using the associated resource ID and Route Table ID separated by a forward slash (`/`). For example: + +With EC2 Subnets: + +```console +% terraform import aws_route_table_association.assoc subnet-6777656e646f6c796e/rtb-656c65616e6f72 +``` + +With EC2 Internet Gateways: + +```console +% terraform import aws_route_table_association.assoc igw-01b3a60780f8d034a/rtb-656c65616e6f72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rum_app_monitor.html.markdown b/website/docs/cdktf/typescript/r/rum_app_monitor.html.markdown new file mode 100644 index 00000000000..de94c7c3694 --- /dev/null +++ b/website/docs/cdktf/typescript/r/rum_app_monitor.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "CloudWatch RUM" +layout: "aws" +page_title: "AWS: aws_rum_app_monitor" +description: |- + Provides a CloudWatch RUM App Monitor resource. +--- + + + +# Resource: aws_rum_app_monitor + +Provides a CloudWatch RUM App Monitor resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RumAppMonitor } from "./.gen/providers/aws/rum-app-monitor"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RumAppMonitor(this, "example", { + domain: "localhost", + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the log stream. +* `domain` - (Required) The top-level internet domain name for which your application has administrative authority. +* `appMonitorConfiguration` - (Optional) configuration data for the app monitor. See [app_monitor_configuration](#app_monitor_configuration) below. +* `cwLogEnabled` - (Optional) Data collected by RUM is kept by RUM for 30 days and then deleted. This parameter specifies whether RUM sends a copy of this telemetry data to Amazon CloudWatch Logs in your account. This enables you to keep the telemetry data for more than 30 days, but it does incur Amazon CloudWatch Logs charges. Default value is `false`. +* `customEvents` - (Optional) Specifies whether this app monitor allows the web client to define and send custom events. If you omit this parameter, custom events are `disabled`. See [custom_events](#custom_events) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### app_monitor_configuration + +* `allowCookies` - (Optional) If you set this to `true`, RUM web client sets two cookies, a session cookie and a user cookie. The cookies allow the RUM web client to collect data relating to the number of users an application has and the behavior of the application across a sequence of events. Cookies are stored in the top-level domain of the current page. +* `enableXray` - (Optional) If you set this to `true`, RUM enables X-Ray tracing for the user sessions that RUM samples. RUM adds an X-Ray trace header to allowed HTTP requests. It also records an X-Ray segment for allowed HTTP requests. +* `excludedPages` - (Optional) A list of URLs in your website or application to exclude from RUM data collection. +* `favoritePages` - (Optional) A list of pages in the CloudWatch RUM console that are to be displayed with a "favorite" icon. +* `guestRoleArn` - (Optional) The ARN of the guest IAM role that is attached to the Amazon Cognito identity pool that is used to authorize the sending of data to RUM. +* `identityPoolId` - (Optional) The ID of the Amazon Cognito identity pool that is used to authorize the sending of data to RUM. +* `includedPages` - (Optional) If this app monitor is to collect data from only certain pages in your application, this structure lists those pages. +* `sessionSampleRate` - (Optional) Specifies the percentage of user sessions to use for RUM data collection. Choosing a higher percentage gives you more data but also incurs more costs. The number you specify is the percentage of user sessions that will be used. Default value is `01`. +* `telemetries` - (Optional) An array that lists the types of telemetry data that this app monitor is to collect. Valid values are `errors`, `performance`, and `http`. + +### custom_events + +* `status` - (Optional) Specifies whether this app monitor allows the web client to define and send custom events. The default is for custom events to be `disabled`. Valid values are `disabled` and `enabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) specifying the app monitor. +* `id` - The CloudWatch RUM name as it is the identifier of a RUM. +* `appMonitorId` - The unique ID of the app monitor. Useful for JS templates. +* `cwLogGroup` - The name of the log group where the copies are stored. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudwatch RUM App Monitor using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloudwatch RUM App Monitor using the `name`. For example: + +```console +% terraform import aws_rum_app_monitor.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/rum_metrics_destination.html.markdown b/website/docs/cdktf/typescript/r/rum_metrics_destination.html.markdown new file mode 100644 index 00000000000..e989b6a2372 --- /dev/null +++ b/website/docs/cdktf/typescript/r/rum_metrics_destination.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "CloudWatch RUM" +layout: "aws" +page_title: "AWS: aws_rum_metrics_destination" +description: |- + Provides a CloudWatch RUM Metrics Destination resource. +--- + + + +# Resource: aws_rum_metrics_destination + +Provides a CloudWatch RUM Metrics Destination resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { RumMetricsDestination } from "./.gen/providers/aws/rum-metrics-destination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new RumMetricsDestination(this, "example", { + appMonitorName: Token.asString(awsRumAppMonitorExample.name), + destination: "CloudWatch", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `appMonitorName` - (Required) The name of the CloudWatch RUM app monitor that will send the metrics. +* `destination` - (Required) Defines the destination to send the metrics to. Valid values are `cloudWatch` and `evidently`. If you specify `evidently`, you must also specify the ARN of the CloudWatchEvidently experiment that is to be the destination and an IAM role that has permission to write to the experiment. +* `destinationArn` - (Optional) Use this parameter only if Destination is Evidently. This parameter specifies the ARN of the Evidently experiment that will receive the extended metrics. +* `iamRoleArn` - (Optional) This parameter is required if Destination is Evidently. If Destination is CloudWatch, do not use this parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the CloudWatch RUM app monitor that will send the metrics. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Cloudwatch RUM Metrics Destination using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Cloudwatch RUM Metrics Destination using the `id`. For example: + +```console +% terraform import aws_rum_metrics_destination.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_access_point.html.markdown b/website/docs/cdktf/typescript/r/s3_access_point.html.markdown new file mode 100644 index 00000000000..c9618d34e42 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_access_point.html.markdown @@ -0,0 +1,187 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3_access_point" +description: |- + Manages an S3 Access Point. +--- + + + +# Resource: aws_s3_access_point + +Provides a resource to manage an S3 Access Point. + +~> **NOTE on Access Points and Access Point Policies:** Terraform provides both a standalone [Access Point Policy](s3control_access_point_policy.html) resource and an Access Point resource with a resource policy defined in-line. You cannot use an Access Point with in-line resource policy in conjunction with an Access Point Policy resource. Doing so will cause a conflict of policies and will overwrite the access point's resource policy. + +-> Advanced usage: To use a custom API endpoint for this Terraform resource, use the [`s3Control` endpoint provider configuration](/docs/providers/aws/index.html#s3control), not the `s3` endpoint provider configuration. + +## Example Usage + +### AWS Partition Bucket + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3AccessPoint } from "./.gen/providers/aws/s3-access-point"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsS3AccessPointExample = new S3AccessPoint(this, "example_1", { + bucket: example.id, + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3AccessPointExample.overrideLogicalId("example"); + } +} + +``` + +### S3 on Outposts Bucket + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3AccessPoint } from "./.gen/providers/aws/s3-access-point"; +import { S3ControlBucket } from "./.gen/providers/aws/s3-control-bucket"; +import { Vpc } from "./.gen/providers/aws/vpc"; +interface MyConfig { + outpostId: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const example = new S3ControlBucket(this, "example", { + bucket: "example", + outpostId: config.outpostId, + }); + const awsVpcExample = new Vpc(this, "example_1", { + cidrBlock: "10.0.0.0/16", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpcExample.overrideLogicalId("example"); + const awsS3AccessPointExample = new S3AccessPoint(this, "example_2", { + bucket: example.arn, + name: "example", + vpcConfiguration: { + vpcId: Token.asString(awsVpcExample.id), + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3AccessPointExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Name of an AWS Partition S3 Bucket or the ARN of S3 on Outposts Bucket that you want to associate this access point with. +* `name` - (Required) Name you want to assign to this access point. + +The following arguments are optional: + +* `accountId` - (Optional) AWS account ID for the owner of the bucket for which you want to create an access point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `bucketAccountId` - (Optional) AWS account ID associated with the S3 bucket associated with this access point. +* `policy` - (Optional) Valid JSON document that specifies the policy that you want to apply to this access point. Removing `policy` from your configuration or setting `policy` to null or an empty string (i.e., `policy = ""`) _will not_ delete the policy since it could have been set by `awsS3ControlAccessPointPolicy`. To remove the `policy`, set it to `"{}"` (an empty JSON document). +* `publicAccessBlockConfiguration` - (Optional) Configuration block to manage the `publicAccessBlock` configuration that you want to apply to this Amazon S3 bucket. You can enable the configuration options in any combination. Detailed below. +* `vpcConfiguration` - (Optional) Configuration block to restrict access to this access point to requests from the specified Virtual Private Cloud (VPC). Required for S3 on Outposts. Detailed below. + +### public_access_block_configuration Configuration Block + +The following arguments are optional: + +* `blockPublicAcls` - (Optional) Whether Amazon S3 should block public ACLs for buckets in this account. Defaults to `true`. Enabling this setting does not affect existing policies or ACLs. When set to `true` causes the following behavior: + * PUT Bucket acl and PUT Object acl calls fail if the specified ACL is public. + * PUT Object calls fail if the request includes a public ACL. + * PUT Bucket calls fail if the request includes a public ACL. +* `blockPublicPolicy` - (Optional) Whether Amazon S3 should block public bucket policies for buckets in this account. Defaults to `true`. Enabling this setting does not affect existing bucket policies. When set to `true` causes Amazon S3 to: + * Reject calls to PUT Bucket policy if the specified bucket policy allows public access. +* `ignorePublicAcls` - (Optional) Whether Amazon S3 should ignore public ACLs for buckets in this account. Defaults to `true`. Enabling this setting does not affect the persistence of any existing ACLs and doesn't prevent new public ACLs from being set. When set to `true` causes Amazon S3 to: + * Ignore all public ACLs on buckets in this account and any objects that they contain. +* `restrictPublicBuckets` - (Optional) Whether Amazon S3 should restrict public bucket policies for buckets in this account. Defaults to `true`. Enabling this setting does not affect previously stored bucket policies, except that public and cross-account access within any public bucket policy, including non-public delegation to specific accounts, is blocked. When set to `true`: + * Only the bucket owner and AWS Services can access buckets with public policies. + +### vpc_configuration Configuration Block + +The following arguments are required: + +* `vpcId` - (Required) This access point will only allow connections from the specified VPC ID. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `alias` - Alias of the S3 Access Point. +* `arn` - ARN of the S3 Access Point. +* `domainName` - DNS domain name of the S3 Access Point in the format _`name`_-_`accountId`_.s3-accesspoint._region_.amazonaws.com. +Note: S3 access points only support secure access by HTTPS. HTTP isn't supported. +* `endpoints` - VPC endpoints for the S3 Access Point. +* `hasPublicAccessPolicy` - Indicates whether this access point currently has a policy that allows public access. +* `id` - For Access Point of an AWS Partition S3 Bucket, the AWS account ID and access point name separated by a colon (`:`). For S3 on Outposts Bucket, the ARN of the Access Point. +* `networkOrigin` - Indicates whether this access point allows access from the public Internet. Values are `vpc` (the access point doesn't allow access from the public Internet) and `internet` (the access point allows access from the public Internet, subject to the access point and bucket access policies). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import this resource using the `accountId` and `name` separated by a colon (`:`) for Access Points associated with an AWS Partition S3 Bucket or the ARN for Access Points associated with an S3 on Outposts Bucket. For example: + +Import using the `accountId` and `name` separated by a colon (`:`) for Access Points associated with an AWS Partition S3 Bucket: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import using the ARN for Access Points associated with an S3 on Outposts Bucket: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import.** For example: + +Import using the `accountId` and `name` separated by a colon (`:`) for Access Points associated with an AWS Partition S3 Bucket: + +```console +% terraform import aws_s3_access_point.example 123456789012:example +``` + +Import using the ARN for Access Points associated with an S3 on Outposts Bucket: + +```console +% terraform import aws_s3_access_point.example arn:aws:s3-outposts:us-east-1:123456789012:outpost/op-1234567890123456/accesspoint/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_account_public_access_block.html.markdown b/website/docs/cdktf/typescript/r/s3_account_public_access_block.html.markdown new file mode 100644 index 00000000000..63c146ad306 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_account_public_access_block.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3_account_public_access_block" +description: |- + Manages S3 account-level Public Access Block Configuration +--- + + + +# Resource: aws_s3_account_public_access_block + +Manages S3 account-level Public Access Block configuration. For more information about these settings, see the [AWS S3 Block Public Access documentation](https://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-block-public-access.html). + +~> **NOTE:** Each AWS account may only have one S3 Public Access Block configuration. Multiple configurations of the resource against the same AWS account will cause a perpetual difference. + +-> Advanced usage: To use a custom API endpoint for this Terraform resource, use the [`s3Control` endpoint provider configuration](/docs/providers/aws/index.html#s3control), not the `s3` endpoint provider configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3AccountPublicAccessBlock } from "./.gen/providers/aws/s3-account-public-access-block"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3AccountPublicAccessBlock(this, "example", { + blockPublicAcls: true, + blockPublicPolicy: true, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Optional) AWS account ID to configure. Defaults to automatically determined account ID of the Terraform AWS provider. +* `blockPublicAcls` - (Optional) Whether Amazon S3 should block public ACLs for buckets in this account. Defaults to `false`. Enabling this setting does not affect existing policies or ACLs. When set to `true` causes the following behavior: + * PUT Bucket acl and PUT Object acl calls will fail if the specified ACL allows public access. + * PUT Object calls fail if the request includes a public ACL. +* `blockPublicPolicy` - (Optional) Whether Amazon S3 should block public bucket policies for buckets in this account. Defaults to `false`. Enabling this setting does not affect existing bucket policies. When set to `true` causes Amazon S3 to: + * Reject calls to PUT Bucket policy if the specified bucket policy allows public access. +* `ignorePublicAcls` - (Optional) Whether Amazon S3 should ignore public ACLs for buckets in this account. Defaults to `false`. Enabling this setting does not affect the persistence of any existing ACLs and doesn't prevent new public ACLs from being set. When set to `true` causes Amazon S3 to: + * Ignore all public ACLs on buckets in this account and any objects that they contain. +* `restrictPublicBuckets` - (Optional) Whether Amazon S3 should restrict public bucket policies for buckets in this account. Defaults to `false`. Enabling this setting does not affect previously stored bucket policies, except that public and cross-account access within any public bucket policy, including non-public delegation to specific accounts, is blocked. When set to `true`: + * Only the bucket owner and AWS Services can access buckets with public policies. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS account ID + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsS3AccountPublicAccessBlock` using the AWS account ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsS3AccountPublicAccessBlock` using the AWS account ID. For example: + +```console +% terraform import aws_s3_account_public_access_block.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket.html.markdown new file mode 100644 index 00000000000..8991584741d --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket.html.markdown @@ -0,0 +1,401 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket" +description: |- + Provides a S3 bucket resource. +--- + + + +# Resource: aws_s3_bucket + +Provides a S3 bucket resource. + +-> This functionality is for managing S3 in an AWS Partition. To manage [S3 on Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html), see the [`awsS3ControlBucket`](/docs/providers/aws/r/s3control_bucket.html) resource. + +-> In April 2023, [AWS introduced](https://aws.amazon.com/about-aws/whats-new/2022/12/amazon-s3-automatically-enable-block-public-access-disable-access-control-lists-buckets-april-2023/) updated security defaults for new S3 buckets. See [this issue](https://github.com/hashicorp/terraform-provider-aws/issues/28353) for a information on how this affects the `awsS3Bucket` resource. + +## Example Usage + +### Private Bucket With Tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3Bucket(this, "example", { + bucket: "my-tf-test-bucket", + tags: { + Environment: "Dev", + Name: "My bucket", + }, + }); + } +} + +``` + +### Static Website Hosting + +-> **NOTE:** The `website` attribute is deprecated. +See [`awsS3BucketWebsiteConfiguration`](s3_bucket_website_configuration.html.markdown) for examples with static website hosting configured. + +### CORS Rules + +-> **NOTE:** The `corsRule` attribute is deprecated. +See [`awsS3BucketCorsConfiguration`](s3_bucket_cors_configuration.html.markdown) for examples with CORS rules configured. + +### Versioning + +-> **NOTE:** The `versioning` attribute is deprecated. +See [`awsS3BucketVersioning`](s3_bucket_versioning.html.markdown) for examples with versioning configured. + +### Logging + +-> **NOTE:** The `logging` attribute is deprecated. +See [`awsS3BucketLogging`](s3_bucket_logging.html.markdown) for examples with logging enabled. + +### Object Lifecycle Rules + +-> **NOTE:** The `lifecycleRule` attribute is deprecated. +See [`awsS3BucketLifecycleConfiguration`](s3_bucket_lifecycle_configuration.html.markdown) for examples with object lifecycle rules. + +### Object Lock Configuration + +-> **NOTE:** The `objectLockConfiguration` attribute is deprecated. +See [`awsS3BucketObjectLockConfiguration`](s3_bucket_object_lock_configuration.html.markdown) for examples with object lock configurations on both new and existing buckets. + +### Replication Configuration + +-> **NOTE:** The `replicationConfiguration` attribute is deprecated. +See [`awsS3BucketReplicationConfiguration`](s3_bucket_replication_configuration.html.markdown) for examples with replication configured. + +### Enable SSE-KMS Server Side Encryption + +-> **NOTE:** The `serverSideEncryptionConfiguration` attribute is deprecated. +See [`awsS3BucketServerSideEncryptionConfiguration`](s3_bucket_server_side_encryption_configuration.html.markdown) for examples with server side encryption configured. + +### ACL Policy Grants + +-> **NOTE:** The `acl` and `grant` attributes are deprecated. +See [`awsS3BucketAcl`](s3_bucket_acl.html.markdown) for examples with ACL grants. + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Optional, Forces new resource) Name of the bucket. If omitted, Terraform will assign a random, unique name. Must be lowercase and less than or equal to 63 characters in length. A full list of bucket naming rules [may be found here](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html). +* `bucketPrefix` - (Optional, Forces new resource) Creates a unique bucket name beginning with the specified prefix. Conflicts with `bucket`. Must be lowercase and less than or equal to 37 characters in length. A full list of bucket naming rules [may be found here](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html). +* `forceDestroy` - (Optional, Default:`false`) Boolean that indicates all objects (including any [locked objects](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html)) should be deleted from the bucket *when the bucket is destroyed* so that the bucket can be destroyed without error. These objects are *not* recoverable. This only deletes objects when the bucket is destroyed, *not* when setting this parameter to `true`. Once this parameter is set to `true`, there must be a successful `terraform apply` run before a destroy is required to update this value in the resource state. Without a successful `terraform apply` after this parameter is set, this flag will have no effect. If setting this field in the same operation that would require replacing the bucket or destroying the bucket, this flag will not work. Additionally when importing a bucket, a successful `terraform apply` is required to set this value in state before it will take effect on a destroy operation. +* `objectLockEnabled` - (Optional, Forces new resource) Indicates whether this bucket has an Object Lock configuration enabled. Valid values are `true` or `false`. This argument is not supported in all regions or partitions. +* `tags` - (Optional) Map of tags to assign to the bucket. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The following arguments are deprecated, and will be removed in a future major version: + +* `accelerationStatus` - (Optional, **Deprecated**) Sets the accelerate configuration of an existing bucket. Can be `enabled` or `suspended`. Cannot be used in `cnNorth1` or `usGovWest1`. Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`awsS3BucketAccelerateConfiguration`](s3_bucket_accelerate_configuration.html) instead. +* `acl` - (Optional, **Deprecated**) The [canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Valid values are `private`, `publicRead`, `publicReadWrite`, `awsExecRead`, `authenticatedRead`, and `logDeliveryWrite`. Defaults to `private`. Conflicts with `grant`. Terraform will only perform drift detection if a configuration value is provided. Use the resource [`awsS3BucketAcl`](s3_bucket_acl.html.markdown) instead. +* `grant` - (Optional, **Deprecated**) An [ACL policy grant](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#sample-acl). See [Grant](#grant) below for details. Conflicts with `acl`. Terraform will only perform drift detection if a configuration value is provided. Use the resource [`awsS3BucketAcl`](s3_bucket_acl.html.markdown) instead. +* `corsRule` - (Optional, **Deprecated**) Rule of [Cross-Origin Resource Sharing](https://docs.aws.amazon.com/AmazonS3/latest/dev/cors.html). See [CORS rule](#cors-rule) below for details. Terraform will only perform drift detection if a configuration value is provided. Use the resource [`awsS3BucketCorsConfiguration`](s3_bucket_cors_configuration.html.markdown) instead. +* `lifecycleRule` - (Optional, **Deprecated**) Configuration of [object lifecycle management](http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html). See [Lifecycle Rule](#lifecycle-rule) below for details. Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`awsS3BucketLifecycleConfiguration`](s3_bucket_lifecycle_configuration.html) instead. +* `logging` - (Optional, **Deprecated**) Configuration of [S3 bucket logging](https://docs.aws.amazon.com/AmazonS3/latest/UG/ManagingBucketLogging.html) parameters. See [Logging](#logging) below for details. Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`awsS3BucketLogging`](s3_bucket_logging.html.markdown) instead. +* `objectLockConfiguration` - (Optional, **Deprecated**) Configuration of [S3 object locking](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock.html). See [Object Lock Configuration](#object-lock-configuration) below for details. + Terraform wil only perform drift detection if a configuration value is provided. + Use the `objectLockEnabled` parameter and the resource [`awsS3BucketObjectLockConfiguration`](s3_bucket_object_lock_configuration.html.markdown) instead. +* `policy` - (Optional, **Deprecated**) Valid [bucket policy](https://docs.aws.amazon.com/AmazonS3/latest/dev/example-bucket-policies.html) JSON document. Note that if the policy document is not specific enough (but still valid), Terraform may view the policy as constantly changing in a `terraform plan`. In this case, please make sure you use the verbose/specific version of the policy. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`awsS3BucketPolicy`](s3_bucket_policy.html) instead. +* `replicationConfiguration` - (Optional, **Deprecated**) Configuration of [replication configuration](http://docs.aws.amazon.com/AmazonS3/latest/dev/crr.html). See [Replication Configuration](#replication-configuration) below for details. Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`awsS3BucketReplicationConfiguration`](s3_bucket_replication_configuration.html) instead. +* `requestPayer` - (Optional, **Deprecated**) Specifies who should bear the cost of Amazon S3 data transfer. + Can be either `bucketOwner` or `requester`. By default, the owner of the S3 bucket would incur the costs of any data transfer. + See [Requester Pays Buckets](http://docs.aws.amazon.com/AmazonS3/latest/dev/RequesterPaysBuckets.html) developer guide for more information. + Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`awsS3BucketRequestPaymentConfiguration`](s3_bucket_request_payment_configuration.html) instead. +* `serverSideEncryptionConfiguration` - (Optional, **Deprecated**) Configuration of [server-side encryption configuration](http://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-encryption.html). See [Server Side Encryption Configuration](#server-side-encryption-configuration) below for details. + Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`awsS3BucketServerSideEncryptionConfiguration`](s3_bucket_server_side_encryption_configuration.html) instead. +* `versioning` - (Optional, **Deprecated**) Configuration of the [S3 bucket versioning state](https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html). See [Versioning](#versioning) below for details. Terraform will only perform drift detection if a configuration value is provided. Use the resource [`awsS3BucketVersioning`](s3_bucket_versioning.html.markdown) instead. +* `website` - (Optional, **Deprecated**) Configuration of the [S3 bucket website](https://docs.aws.amazon.com/AmazonS3/latest/userguide/WebsiteHosting.html). See [Website](#website) below for details. Terraform will only perform drift detection if a configuration value is provided. + Use the resource [`awsS3BucketWebsiteConfiguration`](s3_bucket_website_configuration.html.markdown) instead. + +### CORS Rule + +~> **NOTE:** Currently, changes to the `corsRule` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of CORS rules to an S3 bucket, use the `awsS3BucketCorsConfiguration` resource instead. If you use `corsRule` on an `awsS3Bucket`, Terraform will assume management over the full set of CORS rules for the S3 bucket, treating additional CORS rules as drift. For this reason, `corsRule` cannot be mixed with the external `awsS3BucketCorsConfiguration` resource for a given S3 bucket. + +The `corsRule` configuration block supports the following arguments: + +* `allowedHeaders` - (Optional) List of headers allowed. +* `allowedMethods` - (Required) One or more HTTP methods that you allow the origin to execute. Can be `get`, `put`, `post`, `delete` or `head`. +* `allowedOrigins` - (Required) One or more origins you want customers to be able to access the bucket from. +* `exposeHeaders` - (Optional) One or more headers in the response that you want customers to be able to access from their applications (for example, from a JavaScript `xmlHttpRequest` object). +* `maxAgeSeconds` - (Optional) Specifies time in seconds that browser can cache the response for a preflight request. + +### Grant + +~> **NOTE:** Currently, changes to the `grant` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of ACL grants to an S3 bucket, use the `awsS3BucketAcl` resource instead. If you use `grant` on an `awsS3Bucket`, Terraform will assume management over the full set of ACL grants for the S3 bucket, treating additional ACL grants as drift. For this reason, `grant` cannot be mixed with the external `awsS3BucketAcl` resource for a given S3 bucket. + +The `grant` configuration block supports the following arguments: + +* `id` - (Optional) Canonical user id to grant for. Used only when `type` is `canonicalUser`. +* `type` - (Required) Type of grantee to apply for. Valid values are `canonicalUser` and `group`. `amazonCustomerByEmail` is not supported. +* `permissions` - (Required) List of permissions to apply for grantee. Valid values are `read`, `write`, `readAcp`, `writeAcp`, `fullControl`. +* `uri` - (Optional) Uri address to grant for. Used only when `type` is `group`. + +### Lifecycle Rule + +~> **NOTE:** Currently, changes to the `lifecycleRule` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of Lifecycle rules to an S3 bucket, use the `awsS3BucketLifecycleConfiguration` resource instead. If you use `lifecycleRule` on an `awsS3Bucket`, Terraform will assume management over the full set of Lifecycle rules for the S3 bucket, treating additional Lifecycle rules as drift. For this reason, `lifecycleRule` cannot be mixed with the external `awsS3BucketLifecycleConfiguration` resource for a given S3 bucket. + +~> **NOTE:** At least one of `abortIncompleteMultipartUploadDays`, `expiration`, `transition`, `noncurrentVersionExpiration`, `noncurrentVersionTransition` must be specified. + +The `lifecycleRule` configuration block supports the following arguments: + +* `id` - (Optional) Unique identifier for the rule. Must be less than or equal to 255 characters in length. +* `prefix` - (Optional) Object key prefix identifying one or more objects to which the rule applies. +* `tags` - (Optional) Specifies object tags key and value. +* `enabled` - (Required) Specifies lifecycle rule status. +* `abortIncompleteMultipartUploadDays` (Optional) Specifies the number of days after initiating a multipart upload when the multipart upload must be completed. +* `expiration` - (Optional) Specifies a period in the object's expire. See [Expiration](#expiration) below for details. +* `transition` - (Optional) Specifies a period in the object's transitions. See [Transition](#transition) below for details. +* `noncurrentVersionExpiration` - (Optional) Specifies when noncurrent object versions expire. See [Noncurrent Version Expiration](#noncurrent-version-expiration) below for details. +* `noncurrentVersionTransition` - (Optional) Specifies when noncurrent object versions transitions. See [Noncurrent Version Transition](#noncurrent-version-transition) below for details. + +### Expiration + +The `expiration` configuration block supports the following arguments: + +* `date` - (Optional) Specifies the date after which you want the corresponding action to take effect. +* `days` - (Optional) Specifies the number of days after object creation when the specific rule action takes effect. +* `expiredObjectDeleteMarker` - (Optional) On a versioned bucket (versioning-enabled or versioning-suspended bucket), you can add this element in the lifecycle configuration to direct Amazon S3 to delete expired object delete markers. This cannot be specified with Days or Date in a Lifecycle Expiration Policy. + +### Transition + +The `transition` configuration block supports the following arguments: + +* `date` - (Optional) Specifies the date after which you want the corresponding action to take effect. +* `days` - (Optional) Specifies the number of days after object creation when the specific rule action takes effect. +* `storageClass` - (Required) Specifies the Amazon S3 [storage class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_Transition.html#AmazonS3-Type-Transition-StorageClass) to which you want the object to transition. + +### Noncurrent Version Expiration + +The `noncurrentVersionExpiration` configuration block supports the following arguments: + +* `days` - (Required) Specifies the number of days noncurrent object versions expire. + +### Noncurrent Version Transition + +The `noncurrentVersionTransition` configuration supports the following arguments: + +* `days` - (Required) Specifies the number of days noncurrent object versions transition. +* `storageClass` - (Required) Specifies the Amazon S3 [storage class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_Transition.html#AmazonS3-Type-Transition-StorageClass) to which you want the object to transition. + +### Logging + +~> **NOTE:** Currently, changes to the `logging` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of logging parameters to an S3 bucket, use the `awsS3BucketLogging` resource instead. If you use `logging` on an `awsS3Bucket`, Terraform will assume management over the full set of logging parameters for the S3 bucket, treating additional logging parameters as drift. For this reason, `logging` cannot be mixed with the external `awsS3BucketLogging` resource for a given S3 bucket. + +The `logging` configuration block supports the following arguments: + +* `targetBucket` - (Required) Name of the bucket that will receive the log objects. +* `targetPrefix` - (Optional) To specify a key prefix for log objects. + +### Object Lock Configuration + +~> **NOTE:** You can only **enable** S3 Object Lock for **new** buckets. If you need to **enable** S3 Object Lock for an **existing** bucket, please contact AWS Support. +When you create a bucket with S3 Object Lock enabled, Amazon S3 automatically enables versioning for the bucket. +Once you create a bucket with S3 Object Lock enabled, you can't disable Object Lock or suspend versioning for the bucket. + +~> **NOTE:** Currently, changes to the `objectLockConfiguration` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of Object Lock settings to an S3 bucket, use the `awsS3BucketObjectLockConfiguration` resource instead. If you use `objectLockConfiguration` on an `awsS3Bucket`, Terraform will assume management over the full set of Object Lock configuration parameters for the S3 bucket, treating additional Object Lock configuration parameters as drift. For this reason, `objectLockConfiguration` cannot be mixed with the external `awsS3BucketObjectLockConfiguration` resource for a given S3 bucket. + +The `objectLockConfiguration` configuration block supports the following arguments: + +* `objectLockEnabled` - (Optional, **Deprecated**) Indicates whether this bucket has an Object Lock configuration enabled. Valid value is `enabled`. Use the top-level argument `objectLockEnabled` instead. +* `rule` - (Optional) Object Lock rule in place for this bucket ([documented below](#rule)). + +#### Rule + +The `rule` configuration block supports the following argument: + +* `defaultRetention` - (Required) Default retention period that you want to apply to new objects placed in this bucket ([documented below](#default-retention)). + +#### Default Retention + +The `defaultRetention` configuration block supports the following arguments: + +~> **NOTE:** Either `days` or `years` must be specified, but not both. + +* `mode` - (Required) Default Object Lock retention mode you want to apply to new objects placed in this bucket. Valid values are `governance` and `compliance`. +* `days` - (Optional) Number of days that you want to specify for the default retention period. +* `years` - (Optional) Number of years that you want to specify for the default retention period. + +### Replication Configuration + +~> **NOTE:** Currently, changes to the `replicationConfiguration` configuration of *existing* resources cannot be automatically detected by Terraform. To manage replication configuration changes to an S3 bucket, use the `awsS3BucketReplicationConfiguration` resource instead. If you use `replicationConfiguration` on an `awsS3Bucket`, Terraform will assume management over the full replication configuration for the S3 bucket, treating additional replication configuration rules as drift. For this reason, `replicationConfiguration` cannot be mixed with the external `awsS3BucketReplicationConfiguration` resource for a given S3 bucket. + +The `replicationConfiguration` configuration block supports the following arguments: + +* `role` - (Required) ARN of the IAM role for Amazon S3 to assume when replicating the objects. +* `rules` - (Required) Specifies the rules managing the replication ([documented below](#rules)). + +#### Rules + +The `rules` configuration block supports the following arguments: + +~> **NOTE:** Amazon S3's latest version of the replication configuration is V2, which includes the `filter` attribute for replication rules. +With the `filter` attribute, you can specify object filters based on the object key prefix, tags, or both to scope the objects that the rule applies to. +Replication configuration V1 supports filtering based on only the `prefix` attribute. For backwards compatibility, Amazon S3 continues to support the V1 configuration. + +* `deleteMarkerReplicationStatus` - (Optional) Whether delete markers are replicated. The only valid value is `enabled`. To disable, omit this argument. This argument is only valid with V2 replication configurations (i.e., when `filter` is used). +* `destination` - (Required) Specifies the destination for the rule ([documented below](#destination)). +* `filter` - (Optional, Conflicts with `prefix`) Filter that identifies subset of objects to which the replication rule applies ([documented below](#filter)). +* `id` - (Optional) Unique identifier for the rule. Must be less than or equal to 255 characters in length. +* `prefix` - (Optional, Conflicts with `filter`) Object keyname prefix identifying one or more objects to which the rule applies. Must be less than or equal to 1024 characters in length. +* `priority` - (Optional) Priority associated with the rule. Priority should only be set if `filter` is configured. If not provided, defaults to `0`. Priority must be unique between multiple rules. +* `sourceSelectionCriteria` - (Optional) Specifies special object selection criteria ([documented below](#source-selection-criteria)). +* `status` - (Required) Status of the rule. Either `enabled` or `disabled`. The rule is ignored if status is not Enabled. + +#### Filter + +The `filter` configuration block supports the following arguments: + +* `prefix` - (Optional) Object keyname prefix that identifies subset of objects to which the rule applies. Must be less than or equal to 1024 characters in length. +* `tags` - (Optional) A map of tags that identifies subset of objects to which the rule applies. + The rule applies only to objects having all the tags in its tagset. + +#### Destination + +~> **NOTE:** Replication to multiple destination buckets requires that `priority` is specified in the `rules` object. If the corresponding rule requires no filter, an empty configuration block `filter {}` must be specified. + +The `destination` configuration block supports the following arguments: + +* `bucket` - (Required) ARN of the S3 bucket where you want Amazon S3 to store replicas of the object identified by the rule. +* `storageClass` - (Optional) The [storage class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_Destination.html#AmazonS3-Type-Destination-StorageClass) used to store the object. By default, Amazon S3 uses the storage class of the source object to create the object replica. +* `replicaKmsKeyId` - (Optional) Destination KMS encryption key ARN for SSE-KMS replication. Must be used in conjunction with + `sseKmsEncryptedObjects` source selection criteria. +* `accessControlTranslation` - (Optional) Specifies the overrides to use for object owners on replication. Must be used in conjunction with `accountId` owner override configuration. +* `accountId` - (Optional) Account ID to use for overriding the object owner on replication. Must be used in conjunction with `accessControlTranslation` override configuration. +* `replicationTime` - (Optional) Enables S3 Replication Time Control (S3 RTC) ([documented below](#replication-time)). +* `metrics` - (Optional) Enables replication metrics (required for S3 RTC) ([documented below](#metrics)). + +#### Replication Time + +The `replicationTime` configuration block supports the following arguments: + +* `status` - (Optional) Status of RTC. Either `enabled` or `disabled`. +* `minutes` - (Optional) Threshold within which objects are to be replicated. The only valid value is `15`. + +#### Metrics + +The `metrics` configuration block supports the following arguments: + +* `status` - (Optional) Status of replication metrics. Either `enabled` or `disabled`. +* `minutes` - (Optional) Threshold within which objects are to be replicated. The only valid value is `15`. + +#### Source Selection Criteria + +The `sourceSelectionCriteria` configuration block supports the following argument: + +* `sseKmsEncryptedObjects` - (Optional) Match SSE-KMS encrypted objects ([documented below](#sse-kms-encrypted-objects)). If specified, `replicaKmsKeyId` + in `destination` must be specified as well. + +#### SSE KMS Encrypted Objects + +The `sseKmsEncryptedObjects` configuration block supports the following argument: + +* `enabled` - (Required) Boolean which indicates if this criteria is enabled. + +### Server Side Encryption Configuration + +~> **NOTE:** Currently, changes to the `serverSideEncryptionConfiguration` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes in encryption of an S3 bucket, use the `awsS3BucketServerSideEncryptionConfiguration` resource instead. If you use `serverSideEncryptionConfiguration` on an `awsS3Bucket`, Terraform will assume management over the encryption configuration for the S3 bucket, treating additional encryption changes as drift. For this reason, `serverSideEncryptionConfiguration` cannot be mixed with the external `awsS3BucketServerSideEncryptionConfiguration` resource for a given S3 bucket. + +The `serverSideEncryptionConfiguration` configuration block supports the following argument: + +* `rule` - (Required) Single object for server-side encryption by default configuration. (documented below) + +The `rule` configuration block supports the following arguments: + +* `applyServerSideEncryptionByDefault` - (Required) Single object for setting server-side encryption by default. (documented below) +* `bucketKeyEnabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. + +The `applyServerSideEncryptionByDefault` configuration block supports the following arguments: + +* `sseAlgorithm` - (Required) Server-side encryption algorithm to use. Valid values are `aes256` and `aws:kms` +* `kmsMasterKeyId` - (Optional) AWS KMS master key ID used for the SSE-KMS encryption. This can only be used when you set the value of `sseAlgorithm` as `aws:kms`. The default `aws/s3` AWS KMS master key is used if this element is absent while the `sseAlgorithm` is `aws:kms`. + +### Versioning + +~> **NOTE:** Currently, changes to the `versioning` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes of versioning state to an S3 bucket, use the `awsS3BucketVersioning` resource instead. If you use `versioning` on an `awsS3Bucket`, Terraform will assume management over the versioning state of the S3 bucket, treating additional versioning state changes as drift. For this reason, `versioning` cannot be mixed with the external `awsS3BucketVersioning` resource for a given S3 bucket. + +The `versioning` configuration block supports the following arguments: + +* `enabled` - (Optional) Enable versioning. Once you version-enable a bucket, it can never return to an unversioned state. You can, however, suspend versioning on that bucket. +* `mfaDelete` - (Optional) Enable MFA delete for either `Change the versioning state of your bucket` or `Permanently delete an object version`. Default is `false`. This cannot be used to toggle this setting but is available to allow managed buckets to reflect the state in AWS + +### Website + +~> **NOTE:** Currently, changes to the `website` configuration of *existing* resources cannot be automatically detected by Terraform. To manage changes to the website configuration of an S3 bucket, use the `awsS3BucketWebsiteConfiguration` resource instead. If you use `website` on an `awsS3Bucket`, Terraform will assume management over the configuration of the website of the S3 bucket, treating additional website configuration changes as drift. For this reason, `website` cannot be mixed with the external `awsS3BucketWebsiteConfiguration` resource for a given S3 bucket. + +The `website` configuration block supports the following arguments: + +* `indexDocument` - (Required, unless using `redirectAllRequestsTo`) Amazon S3 returns this index document when requests are made to the root domain or any of the subfolders. +* `errorDocument` - (Optional) Absolute path to the document to return in case of a 4XX error. +* `redirectAllRequestsTo` - (Optional) Hostname to redirect all website requests for this bucket to. Hostname can optionally be prefixed with a protocol (`http://` or `https://`) to use when redirecting requests. The default is the protocol that is used in the original request. +* `routingRules` - (Optional) JSON array containing [routing rules](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules.html) + describing redirect behavior and when redirects are applied. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the bucket. +* `arn` - ARN of the bucket. Will be of format `arn:aws:s3:::bucketname`. +* `bucketDomainName` - Bucket domain name. Will be of format `bucketnameS3AmazonawsCom`. +* `bucketRegionalDomainName` - The bucket region-specific domain name. The bucket domain name including the region name. Please refer to the [S3 endpoints reference](https://docs.aws.amazon.com/general/latest/gr/s3.html#s3_region) for format. Note: AWS CloudFront allows specifying an S3 region-specific endpoint when creating an S3 origin. This will prevent redirect issues from CloudFront to the S3 Origin URL. For more information, see the [Virtual Hosted-Style Requests for Other Regions](https://docs.aws.amazon.com/AmazonS3/latest/userguide/VirtualHosting.html#deprecated-global-endpoint) section in the AWS S3 User Guide. +* `hostedZoneId` - [Route 53 Hosted Zone ID](https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_website_region_endpoints) for this bucket's region. +* `region` - AWS region this bucket resides in. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `websiteEndpoint` - (**Deprecated**) Website endpoint, if the bucket is configured with a website. If not, this will be an empty string. Use the resource [`awsS3BucketWebsiteConfiguration`](s3_bucket_website_configuration.html.markdown) instead. +* `websiteDomain` - (**Deprecated**) Domain of the website endpoint, if the bucket is configured with a website. If not, this will be an empty string. This is used to create Route 53 alias records. Use the resource [`awsS3BucketWebsiteConfiguration`](s3_bucket_website_configuration.html.markdown) instead. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `20M`) +- `read` - (Default `20M`) +- `update` - (Default `20M`) +- `delete` - (Default `60M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket using the `bucket`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 bucket using the `bucket`. For example: + +```console +% terraform import aws_s3_bucket.bucket bucket-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_accelerate_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_accelerate_configuration.html.markdown new file mode 100644 index 00000000000..ee15d61e5cf --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_accelerate_configuration.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_accelerate_configuration" +description: |- + Provides an S3 bucket accelerate configuration resource. +--- + + + +# Resource: aws_s3_bucket_accelerate_configuration + +Provides an S3 bucket accelerate configuration resource. See the [Requirements for using Transfer Acceleration](https://docs.aws.amazon.com/AmazonS3/latest/userguide/transfer-acceleration.html#transfer-acceleration-requirements) for more details. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAccelerateConfiguration } from "./.gen/providers/aws/s3-bucket-accelerate-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const mybucket = new S3Bucket(this, "mybucket", { + bucket: "mybucket", + }); + new S3BucketAccelerateConfiguration(this, "example", { + bucket: mybucket.id, + status: "Enabled", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expectedBucketOwner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `status` - (Required) Transfer acceleration state of the bucket. Valid values: `enabled`, `suspended`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expectedBucketOwner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket accelerate configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import.** For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_accelerate_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_accelerate_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_acl.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_acl.html.markdown new file mode 100644 index 00000000000..13f68d373ab --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_acl.html.markdown @@ -0,0 +1,327 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_acl" +description: |- + Provides an S3 bucket ACL resource. +--- + + + +# Resource: aws_s3_bucket_acl + +Provides an S3 bucket ACL resource. + +~> **Note:** `terraform destroy` does not delete the S3 Bucket ACL but does remove the resource from Terraform state. + +## Example Usage + +### With `private` ACL + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketOwnershipControls } from "./.gen/providers/aws/s3-bucket-ownership-controls"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "my-tf-example-bucket", + }); + const awsS3BucketOwnershipControlsExample = new S3BucketOwnershipControls( + this, + "example_1", + { + bucket: example.id, + rule: { + objectOwnership: "BucketOwnerPreferred", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketOwnershipControlsExample.overrideLogicalId("example"); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_2", { + acl: "private", + bucket: example.id, + dependsOn: [awsS3BucketOwnershipControlsExample], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + } +} + +``` + +### With `publicRead` ACL + +-> This example explicitly disables the default S3 bucket security settings. This +should be done with caution, as all bucket objects become publicly exposed. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketOwnershipControls } from "./.gen/providers/aws/s3-bucket-ownership-controls"; +import { S3BucketPublicAccessBlock } from "./.gen/providers/aws/s3-bucket-public-access-block"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "my-tf-example-bucket", + }); + const awsS3BucketOwnershipControlsExample = new S3BucketOwnershipControls( + this, + "example_1", + { + bucket: example.id, + rule: { + objectOwnership: "BucketOwnerPreferred", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketOwnershipControlsExample.overrideLogicalId("example"); + const awsS3BucketPublicAccessBlockExample = new S3BucketPublicAccessBlock( + this, + "example_2", + { + blockPublicAcls: false, + blockPublicPolicy: false, + bucket: example.id, + ignorePublicAcls: false, + restrictPublicBuckets: false, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPublicAccessBlockExample.overrideLogicalId("example"); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_3", { + acl: "public-read", + bucket: example.id, + dependsOn: [ + awsS3BucketOwnershipControlsExample, + awsS3BucketPublicAccessBlockExample, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + } +} + +``` + +### With Grants + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCanonicalUserId } from "./.gen/providers/aws/data-aws-canonical-user-id"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketOwnershipControls } from "./.gen/providers/aws/s3-bucket-ownership-controls"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "my-tf-example-bucket", + }); + const awsS3BucketOwnershipControlsExample = new S3BucketOwnershipControls( + this, + "example_1", + { + bucket: example.id, + rule: { + objectOwnership: "BucketOwnerPreferred", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketOwnershipControlsExample.overrideLogicalId("example"); + const current = new DataAwsCanonicalUserId(this, "current", {}); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_3", { + accessControlPolicy: { + grant: [ + { + grantee: { + id: Token.asString(current.id), + type: "CanonicalUser", + }, + permission: "READ", + }, + { + grantee: { + type: "Group", + uri: "http://acs.amazonaws.com/groups/s3/LogDelivery", + }, + permission: "READ_ACP", + }, + ], + owner: { + id: Token.asString(current.id), + }, + }, + bucket: example.id, + dependsOn: [awsS3BucketOwnershipControlsExample], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `acl` - (Optional, Conflicts with `accessControlPolicy`) Canned ACL to apply to the bucket. +* `accessControlPolicy` - (Optional, Conflicts with `acl`) Configuration block that sets the ACL permissions for an object per grantee. [See below](#access_control_policy). +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expectedBucketOwner` - (Optional, Forces new resource) Account ID of the expected bucket owner. + +### access_control_policy + +The `accessControlPolicy` configuration block supports the following arguments: + +* `grant` - (Required) Set of `grant` configuration blocks. [See below](#grant). +* `owner` - (Required) Configuration block of the bucket owner's display name and ID. [See below](#owner). + +### grant + +The `grant` configuration block supports the following arguments: + +* `grantee` - (Required) Configuration block for the person being granted permissions. [See below](#grantee). +* `permission` - (Required) Logging permissions assigned to the grantee for the bucket. + +### owner + +The `owner` configuration block supports the following arguments: + +* `id` - (Required) ID of the owner. +* `displayName` - (Optional) Display name of the owner. + +### grantee + +The `grantee` configuration block supports the following arguments: + +* `emailAddress` - (Optional) Email address of the grantee. See [Regions and Endpoints](https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region) for supported AWS regions where this argument can be specified. +* `id` - (Optional) Canonical user ID of the grantee. +* `type` - (Required) Type of grantee. Valid values: `canonicalUser`, `amazonCustomerByEmail`, `group`. +* `uri` - (Optional) URI of the grantee group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket`, `expectedBucketOwner` (if configured), and `acl` (if configured) separated by commas (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket ACL using `bucket`, `expectedBucketOwner`, and/or `acl`, depending on your situation. For example: + +If the owner (account ID) of the source bucket is the _same_ account used to configure the Terraform AWS Provider, and the source bucket is **not configured** with a +[canned ACL][1] (i.e. predefined grant), import using the `bucket`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket is the _same_ account used to configure the Terraform AWS Provider, and the source bucket is **configured** with a +[canned ACL][1] (i.e. predefined grant), import using the `bucket` and `acl` separated by a comma (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket _differs_ from the account used to configure the Terraform AWS Provider, and the source bucket is **not configured** with a [canned ACL][1] (i.e. predefined grant), imported using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket _differs_ from the account used to configure the Terraform AWS Provider, and the source bucket is **configured** with a +[canned ACL][1] (i.e. predefined grant), imported using the `bucket`, `expectedBucketOwner`, and `acl` separated by commas (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** using `bucket`, `expectedBucketOwner`, and/or `acl`, depending on your situation. For example: + +If the owner (account ID) of the source bucket is the _same_ account used to configure the Terraform AWS Provider, and the source bucket is **not configured** with a +[canned ACL][1] (i.e. predefined grant), import using the `bucket`: + +```console +% terraform import aws_s3_bucket_acl.example bucket-name +``` + +If the owner (account ID) of the source bucket is the _same_ account used to configure the Terraform AWS Provider, and the source bucket is **configured** with a [canned ACL][1] (i.e. predefined grant), import using the `bucket` and `acl` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_acl.example bucket-name,private +``` + +If the owner (account ID) of the source bucket _differs_ from the account used to configure the Terraform AWS Provider, and the source bucket is **not configured** with a [canned ACL][1] (i.e. predefined grant), imported using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_acl.example bucket-name,123456789012 +``` + +If the owner (account ID) of the source bucket _differs_ from the account used to configure the Terraform AWS Provider, and the source bucket is **configured** with a [canned ACL][1] (i.e. predefined grant), imported using the `bucket`, `expectedBucketOwner`, and `acl` separated by commas (`,`): + +```console +% terraform import aws_s3_bucket_acl.example bucket-name,123456789012,private +``` + +[1]: https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_analytics_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_analytics_configuration.html.markdown new file mode 100644 index 00000000000..b137ac85d37 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_analytics_configuration.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_analytics_configuration" +description: |- + Provides a S3 bucket analytics configuration resource. +--- + + + +# Resource: aws_s3_bucket_analytics_configuration + +Provides a S3 bucket [analytics configuration](https://docs.aws.amazon.com/AmazonS3/latest/dev/analytics-storage-class.html) resource. + +## Example Usage + +### Add analytics configuration for entire S3 bucket and export results to a second S3 bucket + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAnalyticsConfiguration } from "./.gen/providers/aws/s3-bucket-analytics-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const analytics = new S3Bucket(this, "analytics", { + bucket: "analytics destination", + }); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + new S3BucketAnalyticsConfiguration(this, "example-entire-bucket", { + bucket: example.id, + name: "EntireBucket", + storageClassAnalysis: { + dataExport: { + destination: { + s3BucketDestination: { + bucketArn: analytics.arn, + }, + }, + }, + }, + }); + } +} + +``` + +### Add analytics configuration with S3 object filter + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAnalyticsConfiguration } from "./.gen/providers/aws/s3-bucket-analytics-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + new S3BucketAnalyticsConfiguration(this, "example-filtered", { + bucket: example.id, + filter: { + prefix: "documents/", + tags: { + class: "blue", + priority: "high", + }, + }, + name: "ImportantBlueDocuments", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the bucket this analytics configuration is associated with. +* `name` - (Required) Unique identifier of the analytics configuration for the bucket. +* `filter` - (Optional) Object filtering that accepts a prefix, tags, or a logical AND of prefix and tags (documented below). +* `storageClassAnalysis` - (Optional) Configuration for the analytics data export (documented below). + +The `filter` configuration supports the following: + +* `prefix` - (Optional) Object prefix for filtering. +* `tags` - (Optional) Set of object tags for filtering. + +The `storageClassAnalysis` configuration supports the following: + +* `dataExport` - (Required) Data export configuration (documented below). + +The `dataExport` configuration supports the following: + +* `outputSchemaVersion` - (Optional) Schema version of exported analytics data. Allowed values: `v1`. Default value: `v1`. +* `destination` - (Required) Specifies the destination for the exported analytics data (documented below). + +The `destination` configuration supports the following: + +* `s3BucketDestination` - (Required) Analytics data export currently only supports an S3 bucket destination (documented below). + +The `s3BucketDestination` configuration supports the following: + +* `bucketArn` - (Required) ARN of the destination bucket. +* `bucketAccountId` - (Optional) Account ID that owns the destination bucket. +* `format` - (Optional) Output format of exported analytics data. Allowed values: `csv`. Default value: `csv`. +* `prefix` - (Optional) Prefix to append to exported analytics data. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket analytics configurations using `bucket:analytics`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 bucket analytics configurations using `bucket:analytics`. For example: + +```console +% terraform import aws_s3_bucket_analytics_configuration.my-bucket-entire-bucket my-bucket:EntireBucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_cors_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_cors_configuration.html.markdown new file mode 100644 index 00000000000..7e9074c1f34 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_cors_configuration.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_cors_configuration" +description: |- + Provides an S3 bucket CORS configuration resource. +--- + + + +# Resource: aws_s3_bucket_cors_configuration + +Provides an S3 bucket CORS configuration resource. For more information about CORS, go to [Enabling Cross-Origin Resource Sharing](https://docs.aws.amazon.com/AmazonS3/latest/userguide/cors.html) in the Amazon S3 User Guide. + +~> **NOTE:** S3 Buckets only support a single CORS configuration. Declaring multiple `awsS3BucketCorsConfiguration` resources to the same S3 Bucket will cause a perpetual difference in configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketCorsConfiguration } from "./.gen/providers/aws/s3-bucket-cors-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "mybucket", + }); + const awsS3BucketCorsConfigurationExample = new S3BucketCorsConfiguration( + this, + "example_1", + { + bucket: example.id, + corsRule: [ + { + allowedHeaders: ["*"], + allowedMethods: ["PUT", "POST"], + allowedOrigins: ["https://s3-website-test.hashicorp.com"], + exposeHeaders: ["ETag"], + maxAgeSeconds: 3000, + }, + { + allowedMethods: ["GET"], + allowedOrigins: ["*"], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketCorsConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expectedBucketOwner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `corsRule` - (Required) Set of origins and methods (cross-origin access that you want to allow). [See below](#cors_rule). You can configure up to 100 rules. + +### cors_rule + +The `corsRule` configuration block supports the following arguments: + +* `allowedHeaders` - (Optional) Set of Headers that are specified in the `accessControlRequestHeaders` header. +* `allowedMethods` - (Required) Set of HTTP methods that you allow the origin to execute. Valid values are `get`, `put`, `head`, `post`, and `delete`. +* `allowedOrigins` - (Required) Set of origins you want customers to be able to access the bucket from. +* `exposeHeaders` - (Optional) Set of headers in the response that you want customers to be able to access from their applications (for example, from a JavaScript `xmlHttpRequest` object). +* `id` - (Optional) Unique identifier for the rule. The value cannot be longer than 255 characters. +* `maxAgeSeconds` - (Optional) Time in seconds that your browser is to cache the preflight response for the specified resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expectedBucketOwner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket CORS configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** S3 bucket CORS configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_cors_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_cors_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_intelligent_tiering_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_intelligent_tiering_configuration.html.markdown new file mode 100644 index 00000000000..2fde7fdf362 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_intelligent_tiering_configuration.html.markdown @@ -0,0 +1,141 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_intelligent_tiering_configuration" +description: |- + Provides an S3 Intelligent-Tiering configuration resource. +--- + + + +# Resource: aws_s3_bucket_intelligent_tiering_configuration + +Provides an [S3 Intelligent-Tiering](https://docs.aws.amazon.com/AmazonS3/latest/userguide/intelligent-tiering.html) configuration resource. + +## Example Usage + +### Add intelligent tiering configuration for entire S3 bucket + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketIntelligentTieringConfiguration } from "./.gen/providers/aws/s3-bucket-intelligent-tiering-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + new S3BucketIntelligentTieringConfiguration(this, "example-entire-bucket", { + bucket: example.id, + name: "EntireBucket", + tiering: [ + { + accessTier: "DEEP_ARCHIVE_ACCESS", + days: 180, + }, + { + accessTier: "ARCHIVE_ACCESS", + days: 125, + }, + ], + }); + } +} + +``` + +### Add intelligent tiering configuration with S3 object filter + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketIntelligentTieringConfiguration } from "./.gen/providers/aws/s3-bucket-intelligent-tiering-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + new S3BucketIntelligentTieringConfiguration(this, "example-filtered", { + bucket: example.id, + filter: { + prefix: "documents/", + tags: { + class: "blue", + priority: "high", + }, + }, + name: "ImportantBlueDocuments", + status: "Disabled", + tiering: [ + { + accessTier: "ARCHIVE_ACCESS", + days: 125, + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the bucket this intelligent tiering configuration is associated with. +* `name` - (Required) Unique name used to identify the S3 Intelligent-Tiering configuration for the bucket. +* `status` - (Optional) Specifies the status of the configuration. Valid values: `enabled`, `disabled`. +* `filter` - (Optional) Bucket filter. The configuration only includes objects that meet the filter's criteria (documented below). +* `tiering` - (Required) S3 Intelligent-Tiering storage class tiers of the configuration (documented below). + +The `filter` configuration supports the following: + +* `prefix` - (Optional) Object key name prefix that identifies the subset of objects to which the configuration applies. +* `tags` - (Optional) All of these tags must exist in the object's tag set in order for the configuration to apply. + +The `tiering` configuration supports the following: + +* `accessTier` - (Required) S3 Intelligent-Tiering access tier. Valid values: `archiveAccess`, `deepArchiveAccess`. +* `days` - (Required) Number of consecutive days of no access after which an object will be eligible to be transitioned to the corresponding tier. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket intelligent tiering configurations using `bucket:name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 bucket intelligent tiering configurations using `bucket:name`. For example: + +```console +% terraform import aws_s3_bucket_intelligent_tiering_configuration.my-bucket-entire-bucket my-bucket:EntireBucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_inventory.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_inventory.html.markdown new file mode 100644 index 00000000000..8edf4a3428d --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_inventory.html.markdown @@ -0,0 +1,171 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_inventory" +description: |- + Provides a S3 bucket inventory configuration resource. +--- + + + +# Resource: aws_s3_bucket_inventory + +Provides a S3 bucket [inventory configuration](https://docs.aws.amazon.com/AmazonS3/latest/dev/storage-inventory.html) resource. + +## Example Usage + +### Add inventory configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketInventory } from "./.gen/providers/aws/s3-bucket-inventory"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const inventory = new S3Bucket(this, "inventory", { + bucket: "my-tf-inventory-bucket", + }); + const test = new S3Bucket(this, "test", { + bucket: "my-tf-test-bucket", + }); + const awsS3BucketInventoryTest = new S3BucketInventory(this, "test_2", { + bucket: test.id, + destination: { + bucket: { + bucketArn: inventory.arn, + format: "ORC", + }, + }, + includedObjectVersions: "All", + name: "EntireBucketDaily", + schedule: { + frequency: "Daily", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketInventoryTest.overrideLogicalId("test"); + } +} + +``` + +### Add inventory configuration with S3 object prefix + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketInventory } from "./.gen/providers/aws/s3-bucket-inventory"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const inventory = new S3Bucket(this, "inventory", { + bucket: "my-tf-inventory-bucket", + }); + const test = new S3Bucket(this, "test", { + bucket: "my-tf-test-bucket", + }); + new S3BucketInventory(this, "test-prefix", { + bucket: test.id, + destination: { + bucket: { + bucketArn: inventory.arn, + format: "ORC", + prefix: "inventory", + }, + }, + filter: { + prefix: "documents/", + }, + includedObjectVersions: "All", + name: "DocumentsWeekly", + schedule: { + frequency: "Daily", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the source bucket that inventory lists the objects for. +* `name` - (Required) Unique identifier of the inventory configuration for the bucket. +* `includedObjectVersions` - (Required) Object versions to include in the inventory list. Valid values: `all`, `current`. +* `schedule` - (Required) Specifies the schedule for generating inventory results (documented below). +* `destination` - (Required) Contains information about where to publish the inventory results (documented below). +* `enabled` - (Optional, Default: `true`) Specifies whether the inventory is enabled or disabled. +* `filter` - (Optional) Specifies an inventory filter. The inventory only includes objects that meet the filter's criteria (documented below). +* `optionalFields` - (Optional) List of optional fields that are included in the inventory results. Please refer to the S3 [documentation](https://docs.aws.amazon.com/AmazonS3/latest/API/API_InventoryConfiguration.html#AmazonS3-Type-InventoryConfiguration-OptionalFields) for more details. + +The `filter` configuration supports the following: + +* `prefix` - (Optional) Prefix that an object must have to be included in the inventory results. + +The `schedule` configuration supports the following: + +* `frequency` - (Required) Specifies how frequently inventory results are produced. Valid values: `daily`, `weekly`. + +The `destination` configuration supports the following: + +* `bucket` - (Required) S3 bucket configuration where inventory results are published (documented below). + +The `bucket` configuration supports the following: + +* `bucketArn` - (Required) Amazon S3 bucket ARN of the destination. +* `format` - (Required) Specifies the output format of the inventory results. Can be `csv`, [`orc`](https://orc.apache.org/) or [`parquet`](https://parquet.apache.org/). +* `accountId` - (Optional) ID of the account that owns the destination bucket. Recommended to be set to prevent problems if the destination bucket ownership changes. +* `prefix` - (Optional) Prefix that is prepended to all inventory results. +* `encryption` - (Optional) Contains the type of server-side encryption to use to encrypt the inventory (documented below). + +The `encryption` configuration supports the following: + +* `sseKms` - (Optional) Specifies to use server-side encryption with AWS KMS-managed keys to encrypt the inventory file (documented below). +* `sseS3` - (Optional) Specifies to use server-side encryption with Amazon S3-managed keys (SSE-S3) to encrypt the inventory file. + +The `sseKms` configuration supports the following: + +* `keyId` - (Required) ARN of the KMS customer master key (CMK) used to encrypt the inventory file. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket inventory configurations using `bucket:inventory`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 bucket inventory configurations using `bucket:inventory`. For example: + +```console +% terraform import aws_s3_bucket_inventory.my-bucket-entire-bucket my-bucket:EntireBucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_lifecycle_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_lifecycle_configuration.html.markdown new file mode 100644 index 00000000000..b14d5c5b570 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_lifecycle_configuration.html.markdown @@ -0,0 +1,602 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_lifecycle_configuration" +description: |- + Provides a S3 bucket lifecycle configuration resource. +--- + + + +# Resource: aws_s3_bucket_lifecycle_configuration + +Provides an independent configuration resource for S3 bucket [lifecycle configuration](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lifecycle-mgmt.html). + +An S3 Lifecycle configuration consists of one or more Lifecycle rules. Each rule consists of the following: + +* Rule metadata (`id` and `status`) +* [Filter](#filter) identifying objects to which the rule applies +* One or more transition or expiration actions + +For more information see the Amazon S3 User Guide on [`Lifecycle Configuration Elements`](https://docs.aws.amazon.com/AmazonS3/latest/userguide/intro-lifecycle-rules.html). + +~> **NOTE:** S3 Buckets only support a single lifecycle configuration. Declaring multiple `awsS3BucketLifecycleConfiguration` resources to the same S3 Bucket will cause a perpetual difference in configuration. + +## Example Usage + +### With neither a filter nor prefix specified + +The Lifecycle rule applies to a subset of objects based on the key name prefix (`""`). + +This configuration is intended to replicate the default behavior of the `lifecycleRule` +parameter in the Terraform AWS Provider `awsS3Bucket` resource prior to `v40`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketLifecycleConfiguration } from "./.gen/providers/aws/s3-bucket-lifecycle-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketLifecycleConfiguration(this, "example", { + bucket: bucket.id, + rule: [ + { + id: "rule-1", + status: "Enabled", + }, + ], + }); + } +} + +``` + +### Specifying an empty filter + +The Lifecycle rule applies to all objects in the bucket. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketLifecycleConfiguration } from "./.gen/providers/aws/s3-bucket-lifecycle-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketLifecycleConfiguration(this, "example", { + bucket: bucket.id, + rule: [ + { + filter: {}, + id: "rule-1", + status: "Enabled", + }, + ], + }); + } +} + +``` + +### Specifying a filter using key prefixes + +The Lifecycle rule applies to a subset of objects based on the key name prefix (`logs/`). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketLifecycleConfiguration } from "./.gen/providers/aws/s3-bucket-lifecycle-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketLifecycleConfiguration(this, "example", { + bucket: bucket.id, + rule: [ + { + filter: { + prefix: "logs/", + }, + id: "rule-1", + status: "Enabled", + }, + ], + }); + } +} + +``` + +If you want to apply a Lifecycle action to a subset of objects based on different key name prefixes, specify separate rules. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketLifecycleConfiguration } from "./.gen/providers/aws/s3-bucket-lifecycle-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketLifecycleConfiguration(this, "example", { + bucket: bucket.id, + rule: [ + { + filter: { + prefix: "logs/", + }, + id: "rule-1", + status: "Enabled", + }, + { + filter: { + prefix: "tmp/", + }, + id: "rule-2", + status: "Enabled", + }, + ], + }); + } +} + +``` + +### Specifying a filter based on an object tag + +The Lifecycle rule specifies a filter based on a tag key and value. The rule then applies only to a subset of objects with the specific tag. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketLifecycleConfiguration } from "./.gen/providers/aws/s3-bucket-lifecycle-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketLifecycleConfiguration(this, "example", { + bucket: bucket.id, + rule: [ + { + filter: { + tag: { + key: "Name", + value: "Staging", + }, + }, + id: "rule-1", + status: "Enabled", + }, + ], + }); + } +} + +``` + +### Specifying a filter based on multiple tags + +The Lifecycle rule directs Amazon S3 to perform lifecycle actions on objects with two tags (with the specific tag keys and values). Notice `tags` is wrapped in the `and` configuration block. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketLifecycleConfiguration } from "./.gen/providers/aws/s3-bucket-lifecycle-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketLifecycleConfiguration(this, "example", { + bucket: bucket.id, + rule: [ + { + filter: { + and: { + tags: { + key1: "Value1", + key2: "Value2", + }, + }, + }, + id: "rule-1", + status: "Enabled", + }, + ], + }); + } +} + +``` + +### Specifying a filter based on both prefix and one or more tags + +The Lifecycle rule directs Amazon S3 to perform lifecycle actions on objects with the specified prefix and two tags (with the specific tag keys and values). Notice both `prefix` and `tags` are wrapped in the `and` configuration block. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketLifecycleConfiguration } from "./.gen/providers/aws/s3-bucket-lifecycle-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketLifecycleConfiguration(this, "example", { + bucket: bucket.id, + rule: [ + { + filter: { + and: { + prefix: "logs/", + tags: { + key1: "Value1", + key2: "Value2", + }, + }, + }, + id: "rule-1", + status: "Enabled", + }, + ], + }); + } +} + +``` + +### Specifying a filter based on object size + +Object size values are in bytes. Maximum filter size is 5TB. Some storage classes have minimum object size limitations, for more information, see [Comparing the Amazon S3 storage classes](https://docs.aws.amazon.com/AmazonS3/latest/userguide/storage-class-intro.html#sc-compare). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketLifecycleConfiguration } from "./.gen/providers/aws/s3-bucket-lifecycle-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketLifecycleConfiguration(this, "example", { + bucket: bucket.id, + rule: [ + { + filter: { + objectSizeGreaterThan: Token.asString(500), + }, + id: "rule-1", + status: "Enabled", + }, + ], + }); + } +} + +``` + +### Specifying a filter based on object size range and prefix + +The `objectSizeGreaterThan` must be less than the `objectSizeLessThan`. Notice both the object size range and prefix are wrapped in the `and` configuration block. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketLifecycleConfiguration } from "./.gen/providers/aws/s3-bucket-lifecycle-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketLifecycleConfiguration(this, "example", { + bucket: bucket.id, + rule: [ + { + filter: { + and: { + objectSizeGreaterThan: 500, + objectSizeLessThan: 64000, + prefix: "logs/", + }, + }, + id: "rule-1", + status: "Enabled", + }, + ], + }); + } +} + +``` + +### Creating a Lifecycle Configuration for a bucket with versioning + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketLifecycleConfiguration } from "./.gen/providers/aws/s3-bucket-lifecycle-configuration"; +import { S3BucketVersioningA } from "./.gen/providers/aws/s3-bucket-versioning"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bucket = new S3Bucket(this, "bucket", { + bucket: "my-bucket", + }); + const versioningBucket = new S3Bucket(this, "versioning_bucket", { + bucket: "my-versioning-bucket", + }); + new S3BucketAcl(this, "bucket_acl", { + acl: "private", + bucket: bucket.id, + }); + new S3BucketAcl(this, "versioning_bucket_acl", { + acl: "private", + bucket: versioningBucket.id, + }); + new S3BucketLifecycleConfiguration(this, "bucket-config", { + bucket: bucket.id, + rule: [ + { + expiration: { + days: 90, + }, + filter: { + and: { + prefix: "log/", + tags: { + autoclean: "true", + rule: "log", + }, + }, + }, + id: "log", + status: "Enabled", + transition: [ + { + days: 30, + storageClass: "STANDARD_IA", + }, + { + days: 60, + storageClass: "GLACIER", + }, + ], + }, + { + expiration: { + date: "2023-01-13T00:00:00Z", + }, + filter: { + prefix: "tmp/", + }, + id: "tmp", + status: "Enabled", + }, + ], + }); + const versioning = new S3BucketVersioningA(this, "versioning", { + bucket: versioningBucket.id, + versioningConfiguration: { + status: "Enabled", + }, + }); + new S3BucketLifecycleConfiguration(this, "versioning-bucket-config", { + bucket: versioningBucket.id, + dependsOn: [versioning], + rule: [ + { + filter: { + prefix: "config/", + }, + id: "config", + noncurrentVersionExpiration: { + noncurrentDays: 90, + }, + noncurrentVersionTransition: [ + { + noncurrentDays: 30, + storageClass: "STANDARD_IA", + }, + { + noncurrentDays: 60, + storageClass: "GLACIER", + }, + ], + status: "Enabled", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the source S3 bucket you want Amazon S3 to monitor. +* `expectedBucketOwner` - (Optional) Account ID of the expected bucket owner. If the bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error. +* `rule` - (Required) List of configuration blocks describing the rules managing the replication. [See below](#rule). + +### rule + +~> **NOTE:** The `filter` argument, while Optional, is required if the `rule` configuration block does not contain a `prefix` **and** you intend to override the default behavior of setting the rule to filter objects with the empty string prefix (`""`). +Since `prefix` is deprecated by Amazon S3 and will be removed in the next major version of the Terraform AWS Provider, we recommend users either specify `filter` or leave both `filter` and `prefix` unspecified. + +~> **NOTE:** A rule cannot be updated from having a filter (via either the `ruleFilter` parameter or when neither `ruleFilter` and `rulePrefix` are specified) to only having a prefix via the `rulePrefix` parameter. + +~> **NOTE** Terraform cannot distinguish a difference between configurations that use `rule.filter {}` and configurations that neither use `ruleFilter` nor `rulePrefix`, so a rule cannot be updated from applying to all objects in the bucket via `rule.filter {}` to applying to a subset of objects based on the key prefix `""` and vice versa. + +The `rule` configuration block supports the following arguments: + +* `abortIncompleteMultipartUpload` - (Optional) Configuration block that specifies the days since the initiation of an incomplete multipart upload that Amazon S3 will wait before permanently removing all parts of the upload. [See below](#abort_incomplete_multipart_upload). +* `expiration` - (Optional) Configuration block that specifies the expiration for the lifecycle of the object in the form of date, days and, whether the object has a delete marker. [See below](#expiration). +* `filter` - (Optional) Configuration block used to identify objects that a Lifecycle Rule applies to. [See below](#filter). If not specified, the `rule` will default to using `prefix`. +* `id` - (Required) Unique identifier for the rule. The value cannot be longer than 255 characters. +* `noncurrentVersionExpiration` - (Optional) Configuration block that specifies when noncurrent object versions expire. [See below](#noncurrent_version_expiration). +* `noncurrentVersionTransition` - (Optional) Set of configuration blocks that specify the transition rule for the lifecycle rule that describes when noncurrent objects transition to a specific storage class. [See below](#noncurrent_version_transition). +* `prefix` - (Optional) **DEPRECATED** Use `filter` instead. This has been deprecated by Amazon S3. Prefix identifying one or more objects to which the rule applies. Defaults to an empty string (`""`) if `filter` is not specified. +* `status` - (Required) Whether the rule is currently being applied. Valid values: `enabled` or `disabled`. +* `transition` - (Optional) Set of configuration blocks that specify when an Amazon S3 object transitions to a specified storage class. [See below](#transition). + +### abort_incomplete_multipart_upload + +The `abortIncompleteMultipartUpload` configuration block supports the following arguments: + +* `daysAfterInitiation` - Number of days after which Amazon S3 aborts an incomplete multipart upload. + +### expiration + +The `expiration` configuration block supports the following arguments: + +* `date` - (Optional) Date the object is to be moved or deleted. Should be in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `days` - (Optional) Lifetime, in days, of the objects that are subject to the rule. The value must be a non-zero positive integer. +* `expiredObjectDeleteMarker` - (Optional, Conflicts with `date` and `days`) Indicates whether Amazon S3 will remove a delete marker with no noncurrent versions. If set to `true`, the delete marker will be expired; if set to `false` the policy takes no action. + +### filter + +~> **NOTE:** The `filter` configuration block must either be specified as the empty configuration block (`filter {}`) or with exactly one of `prefix`, `tag`, `and`, `objectSizeGreaterThan` or `objectSizeLessThan` specified. + +The `filter` configuration block supports the following arguments: + +* `and`- (Optional) Configuration block used to apply a logical `and` to two or more predicates. [See below](#and). The Lifecycle Rule will apply to any object matching all the predicates configured inside the `and` block. +* `objectSizeGreaterThan` - (Optional) Minimum object size (in bytes) to which the rule applies. +* `objectSizeLessThan` - (Optional) Maximum object size (in bytes) to which the rule applies. +* `prefix` - (Optional) Prefix identifying one or more objects to which the rule applies. Defaults to an empty string (`""`) if not specified. +* `tag` - (Optional) Configuration block for specifying a tag key and value. [See below](#tag). + +### noncurrent_version_expiration + +The `noncurrentVersionExpiration` configuration block supports the following arguments: + +* `newerNoncurrentVersions` - (Optional) Number of noncurrent versions Amazon S3 will retain. Must be a non-zero positive integer. +* `noncurrentDays` - (Optional) Number of days an object is noncurrent before Amazon S3 can perform the associated action. Must be a positive integer. + +### noncurrent_version_transition + +The `noncurrentVersionTransition` configuration block supports the following arguments: + +* `newerNoncurrentVersions` - (Optional) Number of noncurrent versions Amazon S3 will retain. Must be a non-zero positive integer. +* `noncurrentDays` - (Optional) Number of days an object is noncurrent before Amazon S3 can perform the associated action. +* `storageClass` - (Required) Class of storage used to store the object. Valid Values: `glacier`, `standardIa`, `onezoneIa`, `intelligentTiering`, `deepArchive`, `glacierIr`. + +### transition + +The `transition` configuration block supports the following arguments: + +~> **Note:** Only one of `date` or `days` should be specified. If neither are specified, the `transition` will default to 0 `days`. + +* `date` - (Optional, Conflicts with `days`) Date objects are transitioned to the specified storage class. The date value must be in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) and set to midnight UTC e.g. `20230113T00:00:00Z`. +* `days` - (Optional, Conflicts with `date`) Number of days after creation when objects are transitioned to the specified storage class. The value must be a positive integer. If both `days` and `date` are not specified, defaults to `0`. Valid values depend on `storageClass`, see [Transition objects using Amazon S3 Lifecycle](https://docs.aws.amazon.com/AmazonS3/latest/userguide/lifecycle-transition-general-considerations.html) for more details. +* `storageClass` - Class of storage used to store the object. Valid Values: `glacier`, `standardIa`, `onezoneIa`, `intelligentTiering`, `deepArchive`, `glacierIr`. + +### and + +The `and` configuration block supports the following arguments: + +* `objectSizeGreaterThan` - (Optional) Minimum object size to which the rule applies. Value must be at least `0` if specified. +* `objectSizeLessThan` - (Optional) Maximum object size to which the rule applies. Value must be at least `1` if specified. +* `prefix` - (Optional) Prefix identifying one or more objects to which the rule applies. +* `tags` - (Optional) Key-value map of resource tags. All of these tags must exist in the object's tag set in order for the rule to apply. + +### tag + +The `tag` configuration block supports the following arguments: + +* `key` - (Required) Name of the object key. +* `value` - (Required) Value of the tag. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expectedBucketOwner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket lifecycle configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** S3 bucket lifecycle configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_lifecycle_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_lifecycle_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_logging.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_logging.html.markdown new file mode 100644 index 00000000000..3aa4b474e1b --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_logging.html.markdown @@ -0,0 +1,141 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_logging" +description: |- + Provides an S3 bucket (server access) logging resource. +--- + + + +# Resource: aws_s3_bucket_logging + +Provides an S3 bucket (server access) logging resource. For more information, see [Logging requests using server access logging](https://docs.aws.amazon.com/AmazonS3/latest/userguide/ServerLogs.html) +in the AWS S3 User Guide. + +~> **Note:** Amazon S3 supports server access logging, AWS CloudTrail, or a combination of both. Refer to the [Logging options for Amazon S3](https://docs.aws.amazon.com/AmazonS3/latest/userguide/logging-with-S3.html) +to decide which method meets your requirements. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketLoggingA } from "./.gen/providers/aws/s3-bucket-logging"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "my-tf-example-bucket", + }); + const logBucket = new S3Bucket(this, "log_bucket", { + bucket: "my-tf-log-bucket", + }); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_2", { + acl: "private", + bucket: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + new S3BucketAcl(this, "log_bucket_acl", { + acl: "log-delivery-write", + bucket: logBucket.id, + }); + const awsS3BucketLoggingExample = new S3BucketLoggingA(this, "example_4", { + bucket: example.id, + targetBucket: logBucket.id, + targetPrefix: "log/", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketLoggingExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expectedBucketOwner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `targetBucket` - (Required) Name of the bucket where you want Amazon S3 to store server access logs. +* `targetPrefix` - (Required) Prefix for all log object keys. +* `targetGrant` - (Optional) Set of configuration blocks with information for granting permissions. [See below](#target_grant). + +### target_grant + +The `targetGrant` configuration block supports the following arguments: + +* `grantee` - (Required) Configuration block for the person being granted permissions. [See below](#grantee). +* `permission` - (Required) Logging permissions assigned to the grantee for the bucket. Valid values: `fullControl`, `read`, `write`. + +### grantee + +The `grantee` configuration block supports the following arguments: + +* `emailAddress` - (Optional) Email address of the grantee. See [Regions and Endpoints](https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region) for supported AWS regions where this argument can be specified. +* `id` - (Optional) Canonical user ID of the grantee. +* `type` - (Required) Type of grantee. Valid values: `canonicalUser`, `amazonCustomerByEmail`, `group`. +* `uri` - (Optional) URI of the grantee group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expectedBucketOwner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket logging using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** S3 bucket logging using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_logging.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_logging.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_metric.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_metric.html.markdown new file mode 100644 index 00000000000..45be8efb100 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_metric.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_metric" +description: |- + Provides a S3 bucket metrics configuration resource. +--- + + + +# Resource: aws_s3_bucket_metric + +Provides a S3 bucket [metrics configuration](http://docs.aws.amazon.com/AmazonS3/latest/dev/metrics-configurations.html) resource. + +## Example Usage + +### Add metrics configuration for entire S3 bucket + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketMetric } from "./.gen/providers/aws/s3-bucket-metric"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + new S3BucketMetric(this, "example-entire-bucket", { + bucket: example.id, + name: "EntireBucket", + }); + } +} + +``` + +### Add metrics configuration with S3 object filter + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketMetric } from "./.gen/providers/aws/s3-bucket-metric"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + new S3BucketMetric(this, "example-filtered", { + bucket: example.id, + filter: { + prefix: "documents/", + tags: { + class: "blue", + priority: "high", + }, + }, + name: "ImportantBlueDocuments", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the bucket to put metric configuration. +* `name` - (Required) Unique identifier of the metrics configuration for the bucket. Must be less than or equal to 64 characters in length. +* `filter` - (Optional) [Object filtering](http://docs.aws.amazon.com/AmazonS3/latest/dev/metrics-configurations.html#metrics-configurations-filter) that accepts a prefix, tags, or a logical AND of prefix and tags (documented below). + +The `filter` metric configuration supports the following: + +~> **NOTE:** At least one of `prefix` or `tags` is required when specifying a `filter` + +* `prefix` - (Optional) Object prefix for filtering (singular). +* `tags` - (Optional) Object tags for filtering (up to 10). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket metric configurations using `bucket:metric`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 bucket metric configurations using `bucket:metric`. For example: + +```console +% terraform import aws_s3_bucket_metric.my-bucket-entire-bucket my-bucket:EntireBucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_notification.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_notification.html.markdown new file mode 100644 index 00000000000..d479540e5f7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_notification.html.markdown @@ -0,0 +1,484 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_notification" +description: |- + Manages a S3 Bucket Notification Configuration +--- + + + +# Resource: aws_s3_bucket_notification + +Manages a S3 Bucket Notification Configuration. For additional information, see the [Configuring S3 Event Notifications section in the Amazon S3 Developer Guide](https://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html). + +~> **NOTE:** S3 Buckets only support a single notification configuration. Declaring multiple `awsS3BucketNotification` resources to the same S3 Bucket will cause a perpetual difference in configuration. See the example "Trigger multiple Lambda functions" for an option. + +## Example Usage + +### Add notification configuration to SNS Topic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketNotification } from "./.gen/providers/aws/s3-bucket-notification"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bucket = new S3Bucket(this, "bucket", { + bucket: "your-bucket-name", + }); + const topic = new DataAwsIamPolicyDocument(this, "topic", { + statement: [ + { + actions: ["SNS:Publish"], + condition: [ + { + test: "ArnLike", + values: [bucket.arn], + variable: "aws:SourceArn", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["s3.amazonaws.com"], + type: "Service", + }, + ], + resources: ["arn:aws:sns:*:*:s3-event-notification-topic"], + }, + ], + }); + const awsSnsTopicTopic = new SnsTopic(this, "topic_2", { + name: "s3-event-notification-topic", + policy: Token.asString(topic.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicTopic.overrideLogicalId("topic"); + new S3BucketNotification(this, "bucket_notification", { + bucket: bucket.id, + topic: [ + { + events: ["s3:ObjectCreated:*"], + filterSuffix: ".log", + topicArn: Token.asString(awsSnsTopicTopic.arn), + }, + ], + }); + } +} + +``` + +### Add notification configuration to SQS Queue + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketNotification } from "./.gen/providers/aws/s3-bucket-notification"; +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bucket = new S3Bucket(this, "bucket", { + bucket: "your-bucket-name", + }); + const queue = new DataAwsIamPolicyDocument(this, "queue", { + statement: [ + { + actions: ["sqs:SendMessage"], + condition: [ + { + test: "ArnEquals", + values: [bucket.arn], + variable: "aws:SourceArn", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "*", + }, + ], + resources: ["arn:aws:sqs:*:*:s3-event-notification-queue"], + }, + ], + }); + const awsSqsQueueQueue = new SqsQueue(this, "queue_2", { + name: "s3-event-notification-queue", + policy: Token.asString(queue.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSqsQueueQueue.overrideLogicalId("queue"); + new S3BucketNotification(this, "bucket_notification", { + bucket: bucket.id, + queue: [ + { + events: ["s3:ObjectCreated:*"], + filterSuffix: ".log", + queueArn: Token.asString(awsSqsQueueQueue.arn), + }, + ], + }); + } +} + +``` + +### Add notification configuration to Lambda Function + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { LambdaFunction } from "./.gen/providers/aws/lambda-function"; +import { LambdaPermission } from "./.gen/providers/aws/lambda-permission"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketNotification } from "./.gen/providers/aws/s3-bucket-notification"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bucket = new S3Bucket(this, "bucket", { + bucket: "your-bucket-name", + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["lambda.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const iamForLambda = new IamRole(this, "iam_for_lambda", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "iam_for_lambda", + }); + const func = new LambdaFunction(this, "func", { + filename: "your-function.zip", + functionName: "example_lambda_name", + handler: "exports.example", + role: iamForLambda.arn, + runtime: "go1.x", + }); + const allowBucket = new LambdaPermission(this, "allow_bucket", { + action: "lambda:InvokeFunction", + functionName: func.arn, + principal: "s3.amazonaws.com", + sourceArn: bucket.arn, + statementId: "AllowExecutionFromS3Bucket", + }); + new S3BucketNotification(this, "bucket_notification", { + bucket: bucket.id, + dependsOn: [allowBucket], + lambdaFunction: [ + { + events: ["s3:ObjectCreated:*"], + filterPrefix: "AWSLogs/", + filterSuffix: ".log", + lambdaFunctionArn: func.arn, + }, + ], + }); + } +} + +``` + +### Trigger multiple Lambda functions + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { LambdaFunction } from "./.gen/providers/aws/lambda-function"; +import { LambdaPermission } from "./.gen/providers/aws/lambda-permission"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketNotification } from "./.gen/providers/aws/s3-bucket-notification"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bucket = new S3Bucket(this, "bucket", { + bucket: "your-bucket-name", + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["lambda.amazonaws.com"], + type: "Service", + }, + ], + }); + const iamForLambda = new IamRole(this, "iam_for_lambda", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "iam_for_lambda", + }); + const func1 = new LambdaFunction(this, "func1", { + filename: "your-function1.zip", + functionName: "example_lambda_name1", + handler: "exports.example", + role: iamForLambda.arn, + runtime: "go1.x", + }); + const func2 = new LambdaFunction(this, "func2", { + filename: "your-function2.zip", + functionName: "example_lambda_name2", + handler: "exports.example", + role: iamForLambda.arn, + }); + const allowBucket1 = new LambdaPermission(this, "allow_bucket1", { + action: "lambda:InvokeFunction", + functionName: func1.arn, + principal: "s3.amazonaws.com", + sourceArn: bucket.arn, + statementId: "AllowExecutionFromS3Bucket1", + }); + const allowBucket2 = new LambdaPermission(this, "allow_bucket2", { + action: "lambda:InvokeFunction", + functionName: func2.arn, + principal: "s3.amazonaws.com", + sourceArn: bucket.arn, + statementId: "AllowExecutionFromS3Bucket2", + }); + new S3BucketNotification(this, "bucket_notification", { + bucket: bucket.id, + dependsOn: [allowBucket1, allowBucket2], + lambdaFunction: [ + { + events: ["s3:ObjectCreated:*"], + filterPrefix: "AWSLogs/", + filterSuffix: ".log", + lambdaFunctionArn: func1.arn, + }, + { + events: ["s3:ObjectCreated:*"], + filterPrefix: "OtherLogs/", + filterSuffix: ".log", + lambdaFunctionArn: func2.arn, + }, + ], + }); + } +} + +``` + +### Add multiple notification configurations to SQS Queue + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketNotification } from "./.gen/providers/aws/s3-bucket-notification"; +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bucket = new S3Bucket(this, "bucket", { + bucket: "your-bucket-name", + }); + const queue = new DataAwsIamPolicyDocument(this, "queue", { + statement: [ + { + actions: ["sqs:SendMessage"], + condition: [ + { + test: "ArnEquals", + values: [bucket.arn], + variable: "aws:SourceArn", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "*", + }, + ], + resources: ["arn:aws:sqs:*:*:s3-event-notification-queue"], + }, + ], + }); + const awsSqsQueueQueue = new SqsQueue(this, "queue_2", { + name: "s3-event-notification-queue", + policy: Token.asString(queue.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSqsQueueQueue.overrideLogicalId("queue"); + new S3BucketNotification(this, "bucket_notification", { + bucket: bucket.id, + queue: [ + { + events: ["s3:ObjectCreated:*"], + filterPrefix: "images/", + id: "image-upload-event", + queueArn: Token.asString(awsSqsQueueQueue.arn), + }, + { + events: ["s3:ObjectCreated:*"], + filterPrefix: "videos/", + id: "video-upload-event", + queueArn: Token.asString(awsSqsQueueQueue.arn), + }, + ], + }); + } +} + +``` + +For Terraform's [JSON syntax](https://www.terraform.io/docs/configuration/syntax.html), use an array instead of defining the `queue` key twice. + +```json +{ + "bucket": "${aws_s3_bucket.bucket.id}", + "queue": [ + { + "id": "image-upload-event", + "queue_arn": "${aws_sqs_queue.queue.arn}", + "events": ["s3:ObjectCreated:*"], + "filter_prefix": "images/" + }, + { + "id": "video-upload-event", + "queue_arn": "${aws_sqs_queue.queue.arn}", + "events": ["s3:ObjectCreated:*"], + "filter_prefix": "videos/" + } + ] +} +``` + +### Emit events to EventBridge + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketNotification } from "./.gen/providers/aws/s3-bucket-notification"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const bucket = new S3Bucket(this, "bucket", { + bucket: "your-bucket-name", + }); + new S3BucketNotification(this, "bucket_notification", { + bucket: bucket.id, + eventbridge: true, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket for notification configuration. + +The following arguments are optional: + +* `eventbridge` - (Optional) Whether to enable Amazon EventBridge notifications. Defaults to `false`. +* `lambdaFunction` - (Optional, Multiple) Used to configure notifications to a Lambda Function. See below. +* `queue` - (Optional) Notification configuration to SQS Queue. See below. +* `topic` - (Optional) Notification configuration to SNS Topic. See below. + +### `lambdaFunction` + +* `events` - (Required) [Event](http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html#notification-how-to-event-types-and-destinations) for which to send notifications. +* `filterPrefix` - (Optional) Object key name prefix. +* `filterSuffix` - (Optional) Object key name suffix. +* `id` - (Optional) Unique identifier for each of the notification configurations. +* `lambdaFunctionArn` - (Required) Lambda function ARN. + +### `queue` + +* `events` - (Required) Specifies [event](http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html#notification-how-to-event-types-and-destinations) for which to send notifications. +* `filterPrefix` - (Optional) Object key name prefix. +* `filterSuffix` - (Optional) Object key name suffix. +* `id` - (Optional) Unique identifier for each of the notification configurations. +* `queueArn` - (Required) SQS queue ARN. + +### `topic` + +* `events` - (Required) [Event](http://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html#notification-how-to-event-types-and-destinations) for which to send notifications. +* `filterPrefix` - (Optional) Object key name prefix. +* `filterSuffix` - (Optional) Object key name suffix. +* `id` - (Optional) Unique identifier for each of the notification configurations. +* `topicArn` - (Required) SNS topic ARN. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket notification using the `bucket`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 bucket notification using the `bucket`. For example: + +```console +% terraform import aws_s3_bucket_notification.bucket_notification bucket-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_object.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_object.html.markdown new file mode 100644 index 00000000000..f177a384965 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_object.html.markdown @@ -0,0 +1,304 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_object" +description: |- + Provides an S3 object resource. +--- + + + +# Resource: aws_s3_bucket_object + +~> **NOTE:** The `awsS3BucketObject` resource is DEPRECATED and will be removed in a future version! Use `awsS3Object` instead, where new features and fixes will be added. When replacing `awsS3BucketObject` with `awsS3Object` in your configuration, on the next apply, Terraform will recreate the object. If you prefer to not have Terraform recreate the object, import the object using `awsS3Object`. + +Provides an S3 object resource. + +## Example Usage + +### Uploading a file to a bucket + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketObject } from "./.gen/providers/aws/s3-bucket-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketObject(this, "object", { + bucket: "your_bucket_name", + etag: Token.asString(Fn.filemd5("path/to/file")), + key: "new_object_key", + source: "path/to/file", + }); + } +} + +``` + +### Encrypting with KMS Key + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketObject } from "./.gen/providers/aws/s3-bucket-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const examplekms = new KmsKey(this, "examplekms", { + deletionWindowInDays: 7, + description: "KMS key 1", + }); + const examplebucket = new S3Bucket(this, "examplebucket", { + bucket: "examplebuckettftest", + }); + new S3BucketAcl(this, "example", { + acl: "private", + bucket: examplebucket.id, + }); + const awsS3BucketObjectExample = new S3BucketObject(this, "example_3", { + bucket: examplebucket.id, + key: "someobject", + kmsKeyId: examplekms.arn, + source: "index.html", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketObjectExample.overrideLogicalId("example"); + } +} + +``` + +### Server Side Encryption with S3 Default Master Key + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketObject } from "./.gen/providers/aws/s3-bucket-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const examplebucket = new S3Bucket(this, "examplebucket", { + bucket: "examplebuckettftest", + }); + new S3BucketAcl(this, "example", { + acl: "private", + bucket: examplebucket.id, + }); + const awsS3BucketObjectExample = new S3BucketObject(this, "example_2", { + bucket: examplebucket.id, + key: "someobject", + serverSideEncryption: "aws:kms", + source: "index.html", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketObjectExample.overrideLogicalId("example"); + } +} + +``` + +### Server Side Encryption with AWS-Managed Key + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketObject } from "./.gen/providers/aws/s3-bucket-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const examplebucket = new S3Bucket(this, "examplebucket", { + bucket: "examplebuckettftest", + }); + new S3BucketAcl(this, "example", { + acl: "private", + bucket: examplebucket.id, + }); + const awsS3BucketObjectExample = new S3BucketObject(this, "example_2", { + bucket: examplebucket.id, + key: "someobject", + serverSideEncryption: "AES256", + source: "index.html", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketObjectExample.overrideLogicalId("example"); + } +} + +``` + +### S3 Object Lock + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketObject } from "./.gen/providers/aws/s3-bucket-object"; +import { S3BucketVersioningA } from "./.gen/providers/aws/s3-bucket-versioning"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const examplebucket = new S3Bucket(this, "examplebucket", { + bucket: "examplebuckettftest", + objectLockEnabled: true, + }); + new S3BucketAcl(this, "example", { + acl: "private", + bucket: examplebucket.id, + }); + const awsS3BucketVersioningExample = new S3BucketVersioningA( + this, + "example_2", + { + bucket: examplebucket.id, + versioningConfiguration: { + status: "Enabled", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketVersioningExample.overrideLogicalId("example"); + const awsS3BucketObjectExample = new S3BucketObject(this, "example_3", { + bucket: examplebucket.id, + dependsOn: [awsS3BucketVersioningExample], + forceDestroy: true, + key: "someobject", + objectLockLegalHoldStatus: "ON", + objectLockMode: "GOVERNANCE", + objectLockRetainUntilDate: "2021-12-31T23:59:60Z", + source: "important.txt", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketObjectExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +-> **Note:** If you specify `contentEncoding` you are responsible for encoding the body appropriately. `source`, `content`, and `contentBase64` all expect already encoded/compressed bytes. + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket to put the file in. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified. +* `key` - (Required) Name of the object once it is in the bucket. + +The following arguments are optional: + +* `acl` - (Optional) [Canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Valid values are `private`, `publicRead`, `publicReadWrite`, `awsExecRead`, `authenticatedRead`, `bucketOwnerRead`, and `bucketOwnerFullControl`. Defaults to `private`. +* `bucketKeyEnabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. +* `cacheControl` - (Optional) Caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for further details. +* `contentBase64` - (Optional, conflicts with `source` and `content`) Base64-encoded data that will be decoded and uploaded as raw bytes for the object content. This allows safely uploading non-UTF8 binary data, but is recommended only for small content such as the result of the `gzipbase64` function with small text strings. For larger objects, use `source` to stream the content from a disk file. +* `contentDisposition` - (Optional) Presentational information for the object. Read [w3c content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information. +* `contentEncoding` - (Optional) Content encodings that have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. +* `contentLanguage` - (Optional) Language the content is in e.g., en-US or en-GB. +* `contentType` - (Optional) Standard MIME type describing the format of the object data, e.g., application/octet-stream. All Valid MIME Types are valid for this input. +* `content` - (Optional, conflicts with `source` and `contentBase64`) Literal string value to use as the object content, which will be uploaded as UTF-8-encoded text. +* `etag` - (Optional) Triggers updates when the value changes. The only meaningful value is `filemd5("path/to/file")` (Terraform 0.11.12 or later) or `${md5(file("path/to/file"))}` (Terraform 0.11.11 or earlier). This attribute is not compatible with KMS encryption, `kmsKeyId` or `server_side_encryption = "aws:kms"` (see `sourceHash` instead). +* `forceDestroy` - (Optional) Whether to allow the object to be deleted by removing any legal hold on any object version. Default is `false`. This value should be set to `true` only if the bucket has S3 object lock enabled. +* `kmsKeyId` - (Optional) ARN of the KMS Key to use for object encryption. If the S3 Bucket has server-side encryption enabled, that value will automatically be used. If referencing the `awsKmsKey` resource, use the `arn` attribute. If referencing the `awsKmsAlias` data source or resource, use the `targetKeyArn` attribute. Terraform will only perform drift detection if a configuration value is provided. +* `metadata` - (Optional) Map of keys/values to provision metadata (will be automatically prefixed by `xAmzMeta`, note that only lowercase label are currently supported by the AWS Go API). +* `objectLockLegalHoldStatus` - (Optional) [Legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds) status that you want to apply to the specified object. Valid values are `on` and `off`. +* `objectLockMode` - (Optional) Object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) that you want to apply to this object. Valid values are `governance` and `compliance`. +* `objectLockRetainUntilDate` - (Optional) Date and time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), when this object's object lock will [expire](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-periods). +* `serverSideEncryption` - (Optional) Server-side encryption of the object in S3. Valid values are "`aes256`" and "`aws:kms`". +* `sourceHash` - (Optional) Triggers updates like `etag` but useful to address `etag` encryption limitations. Set using `filemd5("path/to/source")` (Terraform 0.11.12 or later). (The value is only stored in state and not saved by AWS.) +* `source` - (Optional, conflicts with `content` and `contentBase64`) Path to a file that will be read and uploaded as raw bytes for the object content. +* `storageClass` - (Optional) [Storage Class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html#AmazonS3-PutObject-request-header-StorageClass) for the object. Defaults to "`standard`". +* `tags` - (Optional) Map of tags to assign to the object. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `websiteRedirect` - (Optional) Target URL for [website redirect](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html). + +If no content is provided through `source`, `content` or `contentBase64`, then the object will be empty. + +-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/indexHtml` and `indexHtml` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - ETag generated for the object (an MD5 sum of the object content). For plaintext objects or objects encrypted with an AWS-managed key, the hash is an MD5 digest of the object data. For objects encrypted with a KMS key or objects created by either the Multipart Upload or Part Copy operation, the hash is not an MD5 digest, regardless of the method of encryption. More information on possible values can be found on [Common Response Headers](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html). +* `id` - `key` of the resource supplied above +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `versionId` - Unique version ID value for the object, if bucket versioning is enabled. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import objects using the `id` or S3 URL. For example: + +Import using the `id`, which is the bucket name and the key together: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import using S3 URL syntax: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** objects using the `id` or S3 URL. For example: + +Import using the `id`, which is the bucket name and the key together: + +```console +% terraform import aws_s3_bucket_object.example some-bucket-name/some/key.txt +``` + +Import using S3 URL syntax: + +```console +% terraform import aws_s3_bucket_object.example s3://some-bucket-name/some/key.txt +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_object_lock_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_object_lock_configuration.html.markdown new file mode 100644 index 00000000000..bc742d89316 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_object_lock_configuration.html.markdown @@ -0,0 +1,202 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_object_lock_configuration" +description: |- + Provides an S3 bucket Object Lock configuration resource. +--- + + + +# Resource: aws_s3_bucket_object_lock_configuration + +Provides an S3 bucket Object Lock configuration resource. For more information about Object Locking, go to [Using S3 Object Lock](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lock.html) in the Amazon S3 User Guide. + +~> **NOTE:** This resource **does not enable** Object Lock for **new** buckets. It configures a default retention period for objects placed in the specified bucket. +Thus, to **enable** Object Lock for a **new** bucket, see the [Using object lock configuration](s3_bucket.html.markdown#using-object-lock-configuration) section in the `awsS3Bucket` resource or the [Object Lock configuration for a new bucket](#object-lock-configuration-for-a-new-bucket) example below. +If you want to **enable** Object Lock for an **existing** bucket, contact AWS Support and see the [Object Lock configuration for an existing bucket](#object-lock-configuration-for-an-existing-bucket) example below. + +## Example Usage + +### Object Lock configuration for a new bucket + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketObjectLockConfigurationA } from "./.gen/providers/aws/s3-bucket-object-lock-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "mybucket", + objectLockEnabled: true, + }); + const awsS3BucketObjectLockConfigurationExample = + new S3BucketObjectLockConfigurationA(this, "example_1", { + bucket: example.id, + rule: { + defaultRetention: { + days: 5, + mode: "COMPLIANCE", + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketObjectLockConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +### Object Lock configuration for an existing bucket + +This is a multistep process that requires AWS Support intervention. + +1. Enable versioning on your S3 bucket, if you have not already done so. +Doing so will generate an "Object Lock token" in the back-end. + + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketVersioningA } from "./.gen/providers/aws/s3-bucket-versioning"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "mybucket", + }); + const awsS3BucketVersioningExample = new S3BucketVersioningA( + this, + "example_1", + { + bucket: example.id, + versioningConfiguration: { + status: "Enabled", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketVersioningExample.overrideLogicalId("example"); + } +} + +``` + + +2. Contact AWS Support to provide you with the "Object Lock token" for the specified bucket and use the token (or token ID) within your new `awsS3BucketObjectLockConfiguration` resource. + Notice the `objectLockEnabled` argument does not need to be specified as it defaults to `enabled`. + + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketObjectLockConfigurationA } from "./.gen/providers/aws/s3-bucket-object-lock-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketObjectLockConfigurationA(this, "example", { + bucket: Token.asString(awsS3BucketExample.id), + rule: { + defaultRetention: { + days: 5, + mode: "COMPLIANCE", + }, + }, + token: + "NG2MKsfoLqV3A+aquXneSG4LOu/ekrlXkRXwIPFVfERT7XOPos+/k444d7RIH0E3W3p5QU6ml2exS2F/eYCFmMWHJ3hFZGk6al1sIJkmNhUMYmsv0jYVQyTTZNLM+DnfooA6SATt39mM1VW1yJh4E+XljMlWzaBwHKbss3/EjlGDjOmVhaSs4Z6427mMCaFD0RLwsYY7zX49gEc31YfOMJGxbXCXSeyNwAhhM/A8UH7gQf38RmjHjjAFbbbLtl8arsxTPW8F1IYohqwmKIr9DnotLLj8Tg44U2SPwujVaqmlKKP9s41rfgb4UbIm7khSafDBng0LGfxC4pMlT9Ny2w==", + }); + } +} + +``` + + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expectedBucketOwner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `objectLockEnabled` - (Optional, Forces new resource) Indicates whether this bucket has an Object Lock configuration enabled. Defaults to `enabled`. Valid values: `enabled`. +* `rule` - (Optional) Configuration block for specifying the Object Lock rule for the specified object. [See below](#rule). +* `token` - (Optional) Token to allow Object Lock to be enabled for an existing bucket. You must contact AWS support for the bucket's "Object Lock token". +The token is generated in the back-end when [versioning](https://docs.aws.amazon.com/AmazonS3/latest/userguide/manage-versioning-examples.html) is enabled on a bucket. For more details on versioning, see the [`awsS3BucketVersioning` resource](s3_bucket_versioning.html.markdown). + +### rule + +The `rule` configuration block supports the following arguments: + +* `defaultRetention` - (Required) Configuration block for specifying the default Object Lock retention settings for new objects placed in the specified bucket. [See below](#default_retention). + +### default_retention + +The `defaultRetention` configuration block supports the following arguments: + +* `days` - (Optional, Required if `years` is not specified) Number of days that you want to specify for the default retention period. +* `mode` - (Required) Default Object Lock retention mode you want to apply to new objects placed in the specified bucket. Valid values: `compliance`, `governance`. +* `years` - (Optional, Required if `days` is not specified) Number of years that you want to specify for the default retention period. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expectedBucketOwner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket Object Lock configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +import { + to = aws_s3_bucket_object_lock_configuration.example + id = "bucket-name,123456789012" +} + +**Using `terraform import` to import** S3 bucket Object Lock configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_object_lock_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_object_lock_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_ownership_controls.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_ownership_controls.html.markdown new file mode 100644 index 00000000000..339fb14ca32 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_ownership_controls.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_ownership_controls" +description: |- + Manages S3 Bucket Ownership Controls. +--- + + + +# Resource: aws_s3_bucket_ownership_controls + +Provides a resource to manage S3 Bucket Ownership Controls. For more information, see the [S3 Developer Guide](https://docs.aws.amazon.com/AmazonS3/latest/dev/about-object-ownership.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketOwnershipControls } from "./.gen/providers/aws/s3-bucket-ownership-controls"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsS3BucketOwnershipControlsExample = new S3BucketOwnershipControls( + this, + "example_1", + { + bucket: example.id, + rule: { + objectOwnership: "BucketOwnerPreferred", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketOwnershipControlsExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket that you want to associate this access point with. +* `rule` - (Required) Configuration block(s) with Ownership Controls rules. Detailed below. + +### rule Configuration Block + +The following arguments are required: + +* `objectOwnership` - (Required) Object ownership. Valid values: `bucketOwnerPreferred`, `objectWriter` or `bucketOwnerEnforced` + * `bucketOwnerPreferred` - Objects uploaded to the bucket change ownership to the bucket owner if the objects are uploaded with the `bucketOwnerFullControl` canned ACL. + * `objectWriter` - Uploading account will own the object if the object is uploaded with the `bucketOwnerFullControl` canned ACL. + * `bucketOwnerEnforced` - Bucket owner automatically owns and has full control over every object in the bucket. ACLs no longer affect permissions to data in the S3 bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - S3 Bucket name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Bucket Ownership Controls using S3 Bucket name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 Bucket Ownership Controls using S3 Bucket name. For example: + +```console +% terraform import aws_s3_bucket_ownership_controls.example my-bucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_policy.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_policy.html.markdown new file mode 100644 index 00000000000..f1df4fb9327 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_policy.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_policy" +description: |- + Attaches a policy to an S3 bucket resource. +--- + + + +# Resource: aws_s3_bucket_policy + +Attaches a policy to an S3 bucket resource. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "my-tf-test-bucket", + }); + const allowAccessFromAnotherAccount = new DataAwsIamPolicyDocument( + this, + "allow_access_from_another_account", + { + statement: [ + { + actions: ["s3:GetObject", "s3:ListBucket"], + principals: [ + { + identifiers: ["123456789012"], + type: "AWS", + }, + ], + resources: [example.arn, "${" + example.arn + "}/*"], + }, + ], + } + ); + const awsS3BucketPolicyAllowAccessFromAnotherAccount = new S3BucketPolicy( + this, + "allow_access_from_another_account_2", + { + bucket: example.id, + policy: Token.asString(allowAccessFromAnotherAccount.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPolicyAllowAccessFromAnotherAccount.overrideLogicalId( + "allow_access_from_another_account" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the bucket to which to apply the policy. +* `policy` - (Required) Text of the policy. Although this is a bucket policy rather than an IAM policy, the [`awsIamPolicyDocument`](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document) data source may be used, so long as it specifies a principal. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). Note: Bucket policies are limited to 20 KB in size. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket policies using the bucket name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 bucket policies using the bucket name. For example: + +```console +% terraform import aws_s3_bucket_policy.allow_access_from_another_account my-tf-test-bucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_public_access_block.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_public_access_block.html.markdown new file mode 100644 index 00000000000..7f9566b7029 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_public_access_block.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_public_access_block" +description: |- + Manages S3 bucket-level Public Access Block Configuration +--- + + + +# Resource: aws_s3_bucket_public_access_block + +Manages S3 bucket-level Public Access Block configuration. For more information about these settings, see the [AWS S3 Block Public Access documentation](https://docs.aws.amazon.com/AmazonS3/latest/dev/access-control-block-public-access.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketPublicAccessBlock } from "./.gen/providers/aws/s3-bucket-public-access-block"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsS3BucketPublicAccessBlockExample = new S3BucketPublicAccessBlock( + this, + "example_1", + { + blockPublicAcls: true, + blockPublicPolicy: true, + bucket: example.id, + ignorePublicAcls: true, + restrictPublicBuckets: true, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPublicAccessBlockExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) S3 Bucket to which this Public Access Block configuration should be applied. +* `blockPublicAcls` - (Optional) Whether Amazon S3 should block public ACLs for this bucket. Defaults to `false`. Enabling this setting does not affect existing policies or ACLs. When set to `true` causes the following behavior: + * PUT Bucket acl and PUT Object acl calls will fail if the specified ACL allows public access. + * PUT Object calls will fail if the request includes an object ACL. +* `blockPublicPolicy` - (Optional) Whether Amazon S3 should block public bucket policies for this bucket. Defaults to `false`. Enabling this setting does not affect the existing bucket policy. When set to `true` causes Amazon S3 to: + * Reject calls to PUT Bucket policy if the specified bucket policy allows public access. +* `ignorePublicAcls` - (Optional) Whether Amazon S3 should ignore public ACLs for this bucket. Defaults to `false`. Enabling this setting does not affect the persistence of any existing ACLs and doesn't prevent new public ACLs from being set. When set to `true` causes Amazon S3 to: + * Ignore public ACLs on this bucket and any objects that it contains. +* `restrictPublicBuckets` - (Optional) Whether Amazon S3 should restrict public bucket policies for this bucket. Defaults to `false`. Enabling this setting does not affect the previously stored bucket policy, except that public and cross-account access within the public bucket policy, including non-public delegation to specific accounts, is blocked. When set to `true`: + * Only the bucket owner and AWS Services can access this buckets if it has a public policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the S3 bucket the configuration is attached to + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsS3BucketPublicAccessBlock` using the bucket name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsS3BucketPublicAccessBlock` using the bucket name. For example: + +```console +% terraform import aws_s3_bucket_public_access_block.example my-bucket +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_replication_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_replication_configuration.html.markdown new file mode 100644 index 00000000000..081844f6f2b --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_replication_configuration.html.markdown @@ -0,0 +1,513 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_replication_configuration" +description: |- + Provides a S3 bucket replication configuration resource. +--- + + + +# Resource: aws_s3_bucket_replication_configuration + +Provides an independent configuration resource for S3 bucket [replication configuration](http://docs.aws.amazon.com/AmazonS3/latest/dev/crr.html). + +~> **NOTE:** S3 Buckets only support a single replication configuration. Declaring multiple `awsS3BucketReplicationConfiguration` resources to the same S3 Bucket will cause a perpetual difference in configuration. + +## Example Usage + +### Using replication configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketReplicationConfigurationA } from "./.gen/providers/aws/s3-bucket-replication-configuration"; +import { S3BucketVersioningA } from "./.gen/providers/aws/s3-bucket-versioning"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new AwsProvider(this, "aws", { + region: "eu-west-1", + }); + const central = new AwsProvider(this, "aws_1", { + alias: "central", + region: "eu-central-1", + }); + const destination = new S3Bucket(this, "destination", { + bucket: "tf-test-bucket-destination-12345", + }); + const source = new S3Bucket(this, "source", { + bucket: "tf-test-bucket-source-12345", + provider: central, + }); + new S3BucketAcl(this, "source_bucket_acl", { + acl: "private", + bucket: source.id, + provider: central, + }); + const awsS3BucketVersioningDestination = new S3BucketVersioningA( + this, + "destination_5", + { + bucket: destination.id, + versioningConfiguration: { + status: "Enabled", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketVersioningDestination.overrideLogicalId("destination"); + const awsS3BucketVersioningSource = new S3BucketVersioningA( + this, + "source_6", + { + bucket: source.id, + provider: central, + versioningConfiguration: { + status: "Enabled", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketVersioningSource.overrideLogicalId("source"); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["s3.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const replication = new DataAwsIamPolicyDocument(this, "replication", { + statement: [ + { + actions: ["s3:GetReplicationConfiguration", "s3:ListBucket"], + effect: "Allow", + resources: [source.arn], + }, + { + actions: [ + "s3:GetObjectVersionForReplication", + "s3:GetObjectVersionAcl", + "s3:GetObjectVersionTagging", + ], + effect: "Allow", + resources: ["${" + source.arn + "}/*"], + }, + { + actions: [ + "s3:ReplicateObject", + "s3:ReplicateDelete", + "s3:ReplicateTags", + ], + effect: "Allow", + resources: ["${" + destination.arn + "}/*"], + }, + ], + }); + const awsIamPolicyReplication = new IamPolicy(this, "replication_9", { + name: "tf-iam-role-policy-replication-12345", + policy: Token.asString(replication.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamPolicyReplication.overrideLogicalId("replication"); + const awsIamRoleReplication = new IamRole(this, "replication_10", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "tf-iam-role-replication-12345", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleReplication.overrideLogicalId("replication"); + const awsIamRolePolicyAttachmentReplication = new IamRolePolicyAttachment( + this, + "replication_11", + { + policyArn: Token.asString(awsIamPolicyReplication.arn), + role: Token.asString(awsIamRoleReplication.name), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyAttachmentReplication.overrideLogicalId("replication"); + const awsS3BucketReplicationConfigurationReplication = + new S3BucketReplicationConfigurationA(this, "replication_12", { + bucket: source.id, + dependsOn: [awsS3BucketVersioningSource], + provider: central, + role: Token.asString(awsIamRoleReplication.arn), + rule: [ + { + destination: { + bucket: destination.arn, + storageClass: "STANDARD", + }, + filter: { + prefix: "foo", + }, + id: "foobar", + status: "Enabled", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketReplicationConfigurationReplication.overrideLogicalId( + "replication" + ); + } +} + +``` + +### Bi-Directional Replication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketReplicationConfigurationA } from "./.gen/providers/aws/s3-bucket-replication-configuration"; +import { S3BucketVersioningA } from "./.gen/providers/aws/s3-bucket-versioning"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const east = new S3Bucket(this, "east", { + bucket: "tf-test-bucket-east-12345", + }); + const west = new S3Bucket(this, "west", { + bucket: "tf-test-bucket-west-12345", + provider: awsWest, + }); + const awsS3BucketVersioningEast = new S3BucketVersioningA(this, "east_2", { + bucket: east.id, + versioningConfiguration: { + status: "Enabled", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketVersioningEast.overrideLogicalId("east"); + const awsS3BucketVersioningWest = new S3BucketVersioningA(this, "west_3", { + bucket: west.id, + provider: awsWest, + versioningConfiguration: { + status: "Enabled", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketVersioningWest.overrideLogicalId("west"); + new S3BucketReplicationConfigurationA(this, "east_to_west", { + bucket: east.id, + dependsOn: [awsS3BucketVersioningEast], + role: eastReplication.arn, + rule: [ + { + destination: { + bucket: west.arn, + storageClass: "STANDARD", + }, + filter: { + prefix: "foo", + }, + id: "foobar", + status: "Enabled", + }, + ], + }); + new S3BucketReplicationConfigurationA(this, "west_to_east", { + bucket: west.id, + dependsOn: [awsS3BucketVersioningWest], + provider: awsWest, + role: westReplication.arn, + rule: [ + { + destination: { + bucket: east.arn, + storageClass: "STANDARD", + }, + filter: { + prefix: "foo", + }, + id: "foobar", + status: "Enabled", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required) Name of the source S3 bucket you want Amazon S3 to monitor. +* `role` - (Required) ARN of the IAM role for Amazon S3 to assume when replicating the objects. +* `rule` - (Required) List of configuration blocks describing the rules managing the replication. [See below](#rule). +* `token` - (Optional) Token to allow replication to be enabled on an Object Lock-enabled bucket. You must contact AWS support for the bucket's "Object Lock token". +For more details, see [Using S3 Object Lock with replication](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lock-managing.html#object-lock-managing-replication). + +### rule + +~> **NOTE:** Replication to multiple destination buckets requires that `priority` is specified in the `rule` object. If the corresponding rule requires no filter, an empty configuration block `filter {}` must be specified. + +~> **NOTE:** Amazon S3's latest version of the replication configuration is V2, which includes the `filter` attribute for replication rules. + +~> **NOTE:** The `existingObjectReplication` parameter is not supported by Amazon S3 at this time and should not be included in your `rule` configurations. Specifying this parameter will result in `malformedXml` errors. +To replicate existing objects, please refer to the [Replicating existing objects with S3 Batch Replication](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-batch-replication-batch.html) documentation in the Amazon S3 User Guide. + +The `rule` configuration block supports the following arguments: + +* `deleteMarkerReplication` - (Optional) Whether delete markers are replicated. This argument is only valid with V2 replication configurations (i.e., when `filter` is used)[documented below](#delete_marker_replication). +* `destination` - (Required) Specifies the destination for the rule. [See below](#destination). +* `existingObjectReplication` - (Optional) Replicate existing objects in the source bucket according to the rule configurations. [See below](#existing_object_replication). +* `filter` - (Optional, Conflicts with `prefix`) Filter that identifies subset of objects to which the replication rule applies. [See below](#filter). If not specified, the `rule` will default to using `prefix`. +* `id` - (Optional) Unique identifier for the rule. Must be less than or equal to 255 characters in length. +* `prefix` - (Optional, Conflicts with `filter`, **Deprecated**) Object key name prefix identifying one or more objects to which the rule applies. Must be less than or equal to 1024 characters in length. Defaults to an empty string (`""`) if `filter` is not specified. +* `priority` - (Optional) Priority associated with the rule. Priority should only be set if `filter` is configured. If not provided, defaults to `0`. Priority must be unique between multiple rules. +* `sourceSelectionCriteria` - (Optional) Specifies special object selection criteria. [See below](#source_selection_criteria). +* `status` - (Required) Status of the rule. Either `"enabled"` or `"disabled"`. The rule is ignored if status is not "Enabled". + +### delete_marker_replication + +~> **NOTE:** This argument is only available with V2 replication configurations. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +The `deleteMarkerReplication` configuration block supports the following arguments: + +* `status` - (Required) Whether delete markers should be replicated. Either `"enabled"` or `"disabled"`. + +### destination + +The `destination` configuration block supports the following arguments: + +* `accessControlTranslation` - (Optional) Configuration block that specifies the overrides to use for object owners on replication. [See below](#access_control_translation). Specify this only in a cross-account scenario (where source and destination bucket owners are not the same), and you want to change replica ownership to the AWS account that owns the destination bucket. If this is not specified in the replication configuration, the replicas are owned by same AWS account that owns the source object. Must be used in conjunction with `account` owner override configuration. +* `account` - (Optional) Account ID to specify the replica ownership. Must be used in conjunction with `accessControlTranslation` override configuration. +* `bucket` - (Required) ARN of the bucket where you want Amazon S3 to store the results. +* `encryptionConfiguration` - (Optional) Configuration block that provides information about encryption. [See below](#encryption_configuration). If `sourceSelectionCriteria` is specified, you must specify this element. +* `metrics` - (Optional) Configuration block that specifies replication metrics-related settings enabling replication metrics and events. [See below](#metrics). +* `replicationTime` - (Optional) Configuration block that specifies S3 Replication Time Control (S3 RTC), including whether S3 RTC is enabled and the time when all objects and operations on objects must be replicated. [See below](#replication_time). Replication Time Control must be used in conjunction with `metrics`. +* `storageClass` - (Optional) The [storage class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_Destination.html#AmazonS3-Type-Destination-StorageClass) used to store the object. By default, Amazon S3 uses the storage class of the source object to create the object replica. + +### access_control_translation + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +The `accessControlTranslation` configuration block supports the following arguments: + +* `owner` - (Required) Specifies the replica ownership. For default and valid values, see [PUT bucket replication](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketPUTreplication.html) in the Amazon S3 API Reference. Valid values: `destination`. + +### encryption_configuration + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +The `encryptionConfiguration` configuration block supports the following arguments: + +* `replicaKmsKeyId` - (Required) ID (Key ARN or Alias ARN) of the customer managed AWS KMS key stored in AWS Key Management Service (KMS) for the destination bucket. + +### metrics + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +The `metrics` configuration block supports the following arguments: + +* `eventThreshold` - (Optional) Configuration block that specifies the time threshold for emitting the `s3:replication:operationMissedThreshold` event. [See below](#event_threshold). +* `status` - (Required) Status of the Destination Metrics. Either `"enabled"` or `"disabled"`. + +### event_threshold + +The `eventThreshold` configuration block supports the following arguments: + +* `minutes` - (Required) Time in minutes. Valid values: `15`. + +### replication_time + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +The `replicationTime` configuration block supports the following arguments: + +* `status` - (Required) Status of the Replication Time Control. Either `"enabled"` or `"disabled"`. +* `time` - (Required) Configuration block specifying the time by which replication should be complete for all objects and operations on objects. [See below](#time). + +### time + +The `time` configuration block supports the following arguments: + +* `minutes` - (Required) Time in minutes. Valid values: `15`. + +### existing_object_replication + +~> **NOTE:** Replication for existing objects requires activation by AWS Support. See [userguide/replication-what-is-isnot-replicated](https://docs.aws.amazon.com/AmazonS3/latest/userguide/replication-what-is-isnot-replicated.html#existing-object-replication) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +The `existingObjectReplication` configuration block supports the following arguments: + +* `status` - (Required) Whether the existing objects should be replicated. Either `"enabled"` or `"disabled"`. + +### filter + +~> **NOTE:** The `filter` argument must be specified as either an empty configuration block (`filter {}`) to imply the rule requires no filter or with exactly one of `prefix`, `tag`, or `and`. +Replication configuration V1 supports filtering based on only the `prefix` attribute. For backwards compatibility, Amazon S3 continues to support the V1 configuration. + +The `filter` configuration block supports the following arguments: + +* `and` - (Optional) Configuration block for specifying rule filters. This element is required only if you specify more than one filter. See [and](#and) below for more details. +* `prefix` - (Optional) Object key name prefix that identifies subset of objects to which the rule applies. Must be less than or equal to 1024 characters in length. +* `tag` - (Optional) Configuration block for specifying a tag key and value. [See below](#tag). + +### and + +The `and` configuration block supports the following arguments: + +* `prefix` - (Optional) Object key name prefix that identifies subset of objects to which the rule applies. Must be less than or equal to 1024 characters in length. +* `tags` - (Optional, Required if `prefix` is configured) Map of tags (key and value pairs) that identifies a subset of objects to which the rule applies. The rule applies only to objects having all the tags in its tagset. + +### tag + +The `tag` configuration block supports the following arguments: + +* `key` - (Required) Name of the object key. +* `value` - (Required) Value of the tag. + +### source_selection_criteria + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +The `sourceSelectionCriteria` configuration block supports the following arguments: + +* `replicaModifications` - (Optional) Configuration block that you can specify for selections for modifications on replicas. Amazon S3 doesn't replicate replica modifications by default. In the latest version of replication configuration (when `filter` is specified), you can specify this element and set the status to `enabled` to replicate modifications on replicas. + +* `sseKmsEncryptedObjects` - (Optional) Configuration block for filter information for the selection of Amazon S3 objects encrypted with AWS KMS. If specified, `replicaKmsKeyId` in `destination` `encryptionConfiguration` must be specified as well. + +### replica_modifications + +The `replicaModifications` configuration block supports the following arguments: + +* `status` - (Required) Whether the existing objects should be replicated. Either `"enabled"` or `"disabled"`. + +### sse_kms_encrypted_objects + +The `sseKmsEncryptedObjects` configuration block supports the following arguments: + +* `status` - (Required) Whether the existing objects should be replicated. Either `"enabled"` or `"disabled"`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - S3 source bucket name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket replication configuration using the `bucket`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 bucket replication configuration using the `bucket`. For example: + +```console +% terraform import aws_s3_bucket_replication_configuration.replication bucket-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_request_payment_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_request_payment_configuration.html.markdown new file mode 100644 index 00000000000..4cbe89bdb90 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_request_payment_configuration.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_request_payment_configuration" +description: |- + Provides an S3 bucket request payment configuration resource. +--- + + + +# Resource: aws_s3_bucket_request_payment_configuration + +Provides an S3 bucket request payment configuration resource. For more information, see [Requester Pays Buckets](https://docs.aws.amazon.com/AmazonS3/latest/dev/RequesterPaysBuckets.html). + +~> **NOTE:** Destroying an `awsS3BucketRequestPaymentConfiguration` resource resets the bucket's `payer` to the S3 default: the bucket owner. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketRequestPaymentConfiguration } from "./.gen/providers/aws/s3-bucket-request-payment-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketRequestPaymentConfiguration(this, "example", { + bucket: Token.asString(awsS3BucketExample.id), + payer: "Requester", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `expectedBucketOwner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `payer` - (Required) Specifies who pays for the download and request fees. Valid values: `bucketOwner`, `requester`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expectedBucketOwner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket request payment configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** S3 bucket request payment configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_request_payment_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_request_payment_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_server_side_encryption_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_server_side_encryption_configuration.html.markdown new file mode 100644 index 00000000000..ee9ab6b363b --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_server_side_encryption_configuration.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_server_side_encryption_configuration" +description: |- + Provides a S3 bucket server-side encryption configuration resource. +--- + + + +# Resource: aws_s3_bucket_server_side_encryption_configuration + +Provides a S3 bucket server-side encryption configuration resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketServerSideEncryptionConfigurationA } from "./.gen/providers/aws/s3-bucket-server-side-encryption-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const mykey = new KmsKey(this, "mykey", { + deletionWindowInDays: 10, + description: "This key is used to encrypt bucket objects", + }); + const mybucket = new S3Bucket(this, "mybucket", { + bucket: "mybucket", + }); + new S3BucketServerSideEncryptionConfigurationA(this, "example", { + bucket: mybucket.id, + rule: [ + { + applyServerSideEncryptionByDefault: { + kmsMasterKeyId: mykey.arn, + sseAlgorithm: "aws:kms", + }, + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) ID (name) of the bucket. +* `expectedBucketOwner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `rule` - (Required) Set of server-side encryption configuration rules. [See below](#rule). Currently, only a single rule is supported. + +### rule + +The `rule` configuration block supports the following arguments: + +* `applyServerSideEncryptionByDefault` - (Optional) Single object for setting server-side encryption by default. [See below](#apply_server_side_encryption_by_default). +* `bucketKeyEnabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. + +### apply_server_side_encryption_by_default + +The `applyServerSideEncryptionByDefault` configuration block supports the following arguments: + +* `sseAlgorithm` - (Required) Server-side encryption algorithm to use. Valid values are `aes256` and `aws:kms` +* `kmsMasterKeyId` - (Optional) AWS KMS master key ID used for the SSE-KMS encryption. This can only be used when you set the value of `sseAlgorithm` as `aws:kms`. The default `aws/s3` AWS KMS master key is used if this element is absent while the `sseAlgorithm` is `aws:kms`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expectedBucketOwner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket server-side encryption configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** S3 bucket server-side encryption configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_server_side_encryption_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_server_side_encryption_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_versioning.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_versioning.html.markdown new file mode 100644 index 00000000000..7a22344ff72 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_versioning.html.markdown @@ -0,0 +1,215 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_versioning" +description: |- + Provides an S3 bucket versioning resource. +--- + + + +# Resource: aws_s3_bucket_versioning + +Provides a resource for controlling versioning on an S3 bucket. +Deleting this resource will either suspend versioning on the associated S3 bucket or +simply remove the resource from Terraform state if the associated S3 bucket is unversioned. + +For more information, see [How S3 versioning works](https://docs.aws.amazon.com/AmazonS3/latest/userguide/manage-versioning-examples.html). + +~> **NOTE:** If you are enabling versioning on the bucket for the first time, AWS recommends that you wait for 15 minutes after enabling versioning before issuing write operations (PUT or DELETE) on objects in the bucket. + +## Example Usage + +### With Versioning Enabled + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketVersioningA } from "./.gen/providers/aws/s3-bucket-versioning"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example-bucket", + }); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_1", { + acl: "private", + bucket: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + new S3BucketVersioningA(this, "versioning_example", { + bucket: example.id, + versioningConfiguration: { + status: "Enabled", + }, + }); + } +} + +``` + +### With Versioning Disabled + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketVersioningA } from "./.gen/providers/aws/s3-bucket-versioning"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example-bucket", + }); + const awsS3BucketAclExample = new S3BucketAcl(this, "example_1", { + acl: "private", + bucket: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketAclExample.overrideLogicalId("example"); + new S3BucketVersioningA(this, "versioning_example", { + bucket: example.id, + versioningConfiguration: { + status: "Disabled", + }, + }); + } +} + +``` + +### Object Dependency On Versioning + +When you create an object whose `versionId` you need and an `awsS3BucketVersioning` resource in the same configuration, you are more likely to have success by ensuring the `s3Object` depends either implicitly (see below) or explicitly (i.e., using `depends_on = [aws_s3_bucket_versioning.example]`) on the `awsS3BucketVersioning` resource. + +~> **NOTE:** For critical and/or production S3 objects, do not create a bucket, enable versioning, and create an object in the bucket within the same configuration. Doing so will not allow the AWS-recommended 15 minutes between enabling versioning and writing to the bucket. + +This example shows the `awsS3ObjectExample` depending implicitly on the versioning resource through the reference to `awsS3BucketVersioningExampleBucket` to define `bucket`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketVersioningA } from "./.gen/providers/aws/s3-bucket-versioning"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "yotto", + }); + const awsS3BucketVersioningExample = new S3BucketVersioningA( + this, + "example_1", + { + bucket: example.id, + versioningConfiguration: { + status: "Enabled", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketVersioningExample.overrideLogicalId("example"); + const awsS3ObjectExample = new S3Object(this, "example_2", { + bucket: Token.asString(awsS3BucketVersioningExample.id), + key: "droeloe", + source: "example.txt", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ObjectExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the S3 bucket. +* `versioningConfiguration` - (Required) Configuration block for the versioning parameters. [See below](#versioning_configuration). +* `expectedBucketOwner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `mfa` - (Optional, Required if `versioningConfiguration` `mfaDelete` is enabled) Concatenation of the authentication device's serial number, a space, and the value that is displayed on your authentication device. + +### versioning_configuration + +~> **Note:** While the `versioningConfigurationStatus` parameter supports `disabled`, this value is only intended for _creating_ or _importing_ resources that correspond to unversioned S3 buckets. +Updating the value from `enabled` or `suspended` to `disabled` will result in errors as the AWS S3 API does not support returning buckets to an unversioned state. + +The `versioningConfiguration` configuration block supports the following arguments: + +* `status` - (Required) Versioning state of the bucket. Valid values: `enabled`, `suspended`, or `disabled`. `disabled` should only be used when creating or importing resources that correspond to unversioned S3 buckets. +* `mfaDelete` - (Optional) Specifies whether MFA delete is enabled in the bucket versioning configuration. Valid values: `enabled` or `disabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expectedBucketOwner` separated by a comma (`,`) if the latter is provided. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket versioning using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** S3 bucket versioning using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_versioning.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_versioning.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_bucket_website_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3_bucket_website_configuration.html.markdown new file mode 100644 index 00000000000..aa497c9061e --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_bucket_website_configuration.html.markdown @@ -0,0 +1,197 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_bucket_website_configuration" +description: |- + Provides an S3 bucket website configuration resource. +--- + + + +# Resource: aws_s3_bucket_website_configuration + +Provides an S3 bucket website configuration resource. For more information, see [Hosting Websites on S3](https://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteHosting.html). + +## Example Usage + +### With `routingRule` configured + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketWebsiteConfiguration } from "./.gen/providers/aws/s3-bucket-website-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketWebsiteConfiguration(this, "example", { + bucket: Token.asString(awsS3BucketExample.id), + errorDocument: { + key: "error.html", + }, + indexDocument: { + suffix: "index.html", + }, + routingRule: [ + { + condition: { + keyPrefixEquals: "docs/", + }, + redirect: { + replaceKeyPrefixWith: "documents/", + }, + }, + ], + }); + } +} + +``` + +### With `routingRules` configured + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3BucketWebsiteConfiguration } from "./.gen/providers/aws/s3-bucket-website-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3BucketWebsiteConfiguration(this, "example", { + bucket: Token.asString(awsS3BucketExample.id), + errorDocument: { + key: "error.html", + }, + indexDocument: { + suffix: "index.html", + }, + routingRules: + '[{\n "Condition": {\n "KeyPrefixEquals": "docs/"\n },\n "Redirect": {\n "ReplaceKeyPrefixWith": ""\n }\n}]\n\n', + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `bucket` - (Required, Forces new resource) Name of the bucket. +* `errorDocument` - (Optional, Conflicts with `redirectAllRequestsTo`) Name of the error document for the website. [See below](#error_document). +* `expectedBucketOwner` - (Optional, Forces new resource) Account ID of the expected bucket owner. +* `indexDocument` - (Optional, Required if `redirectAllRequestsTo` is not specified) Name of the index document for the website. [See below](#index_document). +* `redirectAllRequestsTo` - (Optional, Required if `indexDocument` is not specified) Redirect behavior for every request to this bucket's website endpoint. [See below](#redirect_all_requests_to). Conflicts with `errorDocument`, `indexDocument`, and `routingRule`. +* `routingRule` - (Optional, Conflicts with `redirectAllRequestsTo` and `routingRules`) List of rules that define when a redirect is applied and the redirect behavior. [See below](#routing_rule). +* `routingRules` - (Optional, Conflicts with `routingRule` and `redirectAllRequestsTo`) JSON array containing [routing rules](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules.html) + describing redirect behavior and when redirects are applied. Use this parameter when your routing rules contain empty String values (`""`) as seen in the [example above](#with-routing_rules-configured). + +### error_document + +The `errorDocument` configuration block supports the following arguments: + +* `key` - (Required) Object key name to use when a 4XX class error occurs. + +### index_document + +The `indexDocument` configuration block supports the following arguments: + +* `suffix` - (Required) Suffix that is appended to a request that is for a directory on the website endpoint. +For example, if the suffix is `indexHtml` and you make a request to `samplebucket/images/`, the data that is returned will be for the object with the key name `images/indexHtml`. +The suffix must not be empty and must not include a slash character. + +### redirect_all_requests_to + +The `redirectAllRequestsTo` configuration block supports the following arguments: + +* `hostName` - (Required) Name of the host where requests are redirected. +* `protocol` - (Optional) Protocol to use when redirecting requests. The default is the protocol that is used in the original request. Valid values: `http`, `https`. + +### routing_rule + +The `routingRule` configuration block supports the following arguments: + +* `condition` - (Optional) Configuration block for describing a condition that must be met for the specified redirect to apply. [See below](#condition). +* `redirect` - (Required) Configuration block for redirect information. [See below](#redirect). + +### condition + +The `condition` configuration block supports the following arguments: + +* `httpErrorCodeReturnedEquals` - (Optional, Required if `keyPrefixEquals` is not specified) HTTP error code when the redirect is applied. If specified with `keyPrefixEquals`, then both must be true for the redirect to be applied. +* `keyPrefixEquals` - (Optional, Required if `httpErrorCodeReturnedEquals` is not specified) Object key name prefix when the redirect is applied. If specified with `httpErrorCodeReturnedEquals`, then both must be true for the redirect to be applied. + +### redirect + +The `redirect` configuration block supports the following arguments: + +* `hostName` - (Optional) Host name to use in the redirect request. +* `httpRedirectCode` - (Optional) HTTP redirect code to use on the response. +* `protocol` - (Optional) Protocol to use when redirecting requests. The default is the protocol that is used in the original request. Valid values: `http`, `https`. +* `replaceKeyPrefixWith` - (Optional, Conflicts with `replaceKeyWith`) Object key prefix to use in the redirect request. For example, to redirect requests for all pages with prefix `docs/` (objects in the `docs/` folder) to `documents/`, you can set a `condition` block with `keyPrefixEquals` set to `docs/` and in the `redirect` set `replaceKeyPrefixWith` to `/documents`. +* `replaceKeyWith` - (Optional, Conflicts with `replaceKeyPrefixWith`) Specific object key to use in the redirect request. For example, redirect request to `errorHtml`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `bucket` or `bucket` and `expectedBucketOwner` separated by a comma (`,`) if the latter is provided. +* `websiteDomain` - Domain of the website endpoint. This is used to create Route 53 alias records. +* `websiteEndpoint` - Website endpoint. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 bucket website configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** S3 bucket website configuration using the `bucket` or using the `bucket` and `expectedBucketOwner` separated by a comma (`,`). For example: + +If the owner (account ID) of the source bucket is the same account used to configure the Terraform AWS Provider, import using the `bucket`: + +```console +% terraform import aws_s3_bucket_website_configuration.example bucket-name +``` + +If the owner (account ID) of the source bucket differs from the account used to configure the Terraform AWS Provider, import using the `bucket` and `expectedBucketOwner` separated by a comma (`,`): + +```console +% terraform import aws_s3_bucket_website_configuration.example bucket-name,123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_object.html.markdown b/website/docs/cdktf/typescript/r/s3_object.html.markdown new file mode 100644 index 00000000000..04553d816fa --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_object.html.markdown @@ -0,0 +1,300 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_object" +description: |- + Provides an S3 object resource. +--- + + + +# Resource: aws_s3_object + +Provides an S3 object resource. + +## Example Usage + +### Uploading a file to a bucket + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3Object(this, "object", { + bucket: "your_bucket_name", + etag: Token.asString(Fn.filemd5("path/to/file")), + key: "new_object_key", + source: "path/to/file", + }); + } +} + +``` + +### Encrypting with KMS Key + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const examplekms = new KmsKey(this, "examplekms", { + deletionWindowInDays: 7, + description: "KMS key 1", + }); + const examplebucket = new S3Bucket(this, "examplebucket", { + bucket: "examplebuckettftest", + }); + new S3BucketAcl(this, "example", { + acl: "private", + bucket: examplebucket.id, + }); + const awsS3ObjectExample = new S3Object(this, "example_3", { + bucket: examplebucket.id, + key: "someobject", + kmsKeyId: examplekms.arn, + source: "index.html", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ObjectExample.overrideLogicalId("example"); + } +} + +``` + +### Server Side Encryption with S3 Default Master Key + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const examplebucket = new S3Bucket(this, "examplebucket", { + bucket: "examplebuckettftest", + }); + new S3BucketAcl(this, "example", { + acl: "private", + bucket: examplebucket.id, + }); + const awsS3ObjectExample = new S3Object(this, "example_2", { + bucket: examplebucket.id, + key: "someobject", + serverSideEncryption: "aws:kms", + source: "index.html", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ObjectExample.overrideLogicalId("example"); + } +} + +``` + +### Server Side Encryption with AWS-Managed Key + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const examplebucket = new S3Bucket(this, "examplebucket", { + bucket: "examplebuckettftest", + }); + new S3BucketAcl(this, "example", { + acl: "private", + bucket: examplebucket.id, + }); + const awsS3ObjectExample = new S3Object(this, "example_2", { + bucket: examplebucket.id, + key: "someobject", + serverSideEncryption: "AES256", + source: "index.html", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ObjectExample.overrideLogicalId("example"); + } +} + +``` + +### S3 Object Lock + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketAcl } from "./.gen/providers/aws/s3-bucket-acl"; +import { S3BucketVersioningA } from "./.gen/providers/aws/s3-bucket-versioning"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const examplebucket = new S3Bucket(this, "examplebucket", { + bucket: "examplebuckettftest", + objectLockEnabled: true, + }); + new S3BucketAcl(this, "example", { + acl: "private", + bucket: examplebucket.id, + }); + const awsS3BucketVersioningExample = new S3BucketVersioningA( + this, + "example_2", + { + bucket: examplebucket.id, + versioningConfiguration: { + status: "Enabled", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketVersioningExample.overrideLogicalId("example"); + new S3Object(this, "examplebucket_object", { + bucket: examplebucket.id, + dependsOn: [awsS3BucketVersioningExample], + forceDestroy: true, + key: "someobject", + objectLockLegalHoldStatus: "ON", + objectLockMode: "GOVERNANCE", + objectLockRetainUntilDate: "2021-12-31T23:59:60Z", + source: "important.txt", + }); + } +} + +``` + +## Argument Reference + +-> **Note:** If you specify `contentEncoding` you are responsible for encoding the body appropriately. `source`, `content`, and `contentBase64` all expect already encoded/compressed bytes. + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket to put the file in. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified. +* `key` - (Required) Name of the object once it is in the bucket. + +The following arguments are optional: + +* `acl` - (Optional) [Canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Valid values are `private`, `publicRead`, `publicReadWrite`, `awsExecRead`, `authenticatedRead`, `bucketOwnerRead`, and `bucketOwnerFullControl`. +* `bucketKeyEnabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. +* `cacheControl` - (Optional) Caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for further details. +* `contentBase64` - (Optional, conflicts with `source` and `content`) Base64-encoded data that will be decoded and uploaded as raw bytes for the object content. This allows safely uploading non-UTF8 binary data, but is recommended only for small content such as the result of the `gzipbase64` function with small text strings. For larger objects, use `source` to stream the content from a disk file. +* `contentDisposition` - (Optional) Presentational information for the object. Read [w3c content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information. +* `contentEncoding` - (Optional) Content encodings that have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. +* `contentLanguage` - (Optional) Language the content is in e.g., en-US or en-GB. +* `contentType` - (Optional) Standard MIME type describing the format of the object data, e.g., application/octet-stream. All Valid MIME Types are valid for this input. +* `content` - (Optional, conflicts with `source` and `contentBase64`) Literal string value to use as the object content, which will be uploaded as UTF-8-encoded text. +* `etag` - (Optional) Triggers updates when the value changes. The only meaningful value is `filemd5("path/to/file")` (Terraform 0.11.12 or later) or `${md5(file("path/to/file"))}` (Terraform 0.11.11 or earlier). This attribute is not compatible with KMS encryption, `kmsKeyId` or `server_side_encryption = "aws:kms"`, also if an object is larger than 16 MB, the AWS Management Console will upload or copy that object as a Multipart Upload, and therefore the ETag will not be an MD5 digest (see `sourceHash` instead). +* `forceDestroy` - (Optional) Whether to allow the object to be deleted by removing any legal hold on any object version. Default is `false`. This value should be set to `true` only if the bucket has S3 object lock enabled. +* `kmsKeyId` - (Optional) ARN of the KMS Key to use for object encryption. If the S3 Bucket has server-side encryption enabled, that value will automatically be used. If referencing the `awsKmsKey` resource, use the `arn` attribute. If referencing the `awsKmsAlias` data source or resource, use the `targetKeyArn` attribute. Terraform will only perform drift detection if a configuration value is provided. +* `metadata` - (Optional) Map of keys/values to provision metadata (will be automatically prefixed by `xAmzMeta`, note that only lowercase label are currently supported by the AWS Go API). +* `objectLockLegalHoldStatus` - (Optional) [Legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds) status that you want to apply to the specified object. Valid values are `on` and `off`. +* `objectLockMode` - (Optional) Object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) that you want to apply to this object. Valid values are `governance` and `compliance`. +* `objectLockRetainUntilDate` - (Optional) Date and time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), when this object's object lock will [expire](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-periods). +* `serverSideEncryption` - (Optional) Server-side encryption of the object in S3. Valid values are "`aes256`" and "`aws:kms`". +* `sourceHash` - (Optional) Triggers updates like `etag` but useful to address `etag` encryption limitations. Set using `filemd5("path/to/source")` (Terraform 0.11.12 or later). (The value is only stored in state and not saved by AWS.) +* `source` - (Optional, conflicts with `content` and `contentBase64`) Path to a file that will be read and uploaded as raw bytes for the object content. +* `storageClass` - (Optional) [Storage Class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html#AmazonS3-PutObject-request-header-StorageClass) for the object. Defaults to "`standard`". +* `tags` - (Optional) Map of tags to assign to the object. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `websiteRedirect` - (Optional) Target URL for [website redirect](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html). + +If no content is provided through `source`, `content` or `contentBase64`, then the object will be empty. + +-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/indexHtml` and `indexHtml` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - ETag generated for the object (an MD5 sum of the object content). For plaintext objects or objects encrypted with an AWS-managed key, the hash is an MD5 digest of the object data. For objects encrypted with a KMS key or objects created by either the Multipart Upload or Part Copy operation, the hash is not an MD5 digest, regardless of the method of encryption. More information on possible values can be found on [Common Response Headers](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html). +* `id` - `key` of the resource supplied above +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `versionId` - Unique version ID value for the object, if bucket versioning is enabled. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import objects using the `id` or S3 URL. For example: + +Import using the `id`, which is the bucket name and the key together: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import using S3 URL syntax: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** objects using the `id` or S3 URL. For example: + +Import using the `id`, which is the bucket name and the key together: + +```console +% terraform import aws_s3_object.example some-bucket-name/some/key.txt +``` + +Import using S3 URL syntax: + +```console +% terraform import aws_s3_object.example s3://some-bucket-name/some/key.txt +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3_object_copy.html.markdown b/website/docs/cdktf/typescript/r/s3_object_copy.html.markdown new file mode 100644 index 00000000000..c7d90f786dd --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3_object_copy.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "S3 (Simple Storage)" +layout: "aws" +page_title: "AWS: aws_s3_object_copy" +description: |- + Provides a resource for copying an S3 object. +--- + + + +# Resource: aws_s3_object_copy + +Provides a resource for copying an S3 object. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3ObjectCopy } from "./.gen/providers/aws/s3-object-copy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3ObjectCopy(this, "test", { + bucket: "destination_bucket", + grant: [ + { + permissions: ["READ"], + type: "Group", + uri: "http://acs.amazonaws.com/groups/global/AllUsers", + }, + ], + key: "destination_key", + source: "source_bucket/source_key", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket to put the file in. +* `key` - (Required) Name of the object once it is in the bucket. +* `source` - (Required) Specifies the source object for the copy operation. You specify the value in one of two formats. For objects not accessed through an access point, specify the name of the source bucket and the key of the source object, separated by a slash (`/`). For example, `testbucket/test1Json`. For objects accessed through access points, specify the ARN of the object as accessed through the access point, in the format `arn:aws:s3:::accesspoint//object/`. For example, `arn:aws:s3:usWest2:9999912999:accesspoint/myAccessPoint/object/testbucket/test1Json`. + +The following arguments are optional: + +* `acl` - (Optional) [Canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Valid values are `private`, `publicRead`, `publicReadWrite`, `authenticatedRead`, `awsExecRead`, `bucketOwnerRead`, and `bucketOwnerFullControl`. Conflicts with `grant`. +* `cacheControl` - (Optional) Specifies caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for further details. +* `contentDisposition` - (Optional) Specifies presentational information for the object. Read [w3c content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information. +* `contentEncoding` - (Optional) Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. +* `contentLanguage` - (Optional) Language the content is in e.g., en-US or en-GB. +* `contentType` - (Optional) Standard MIME type describing the format of the object data, e.g., `application/octetStream`. All Valid MIME Types are valid for this input. +* `copyIfMatch` - (Optional) Copies the object if its entity tag (ETag) matches the specified tag. +* `copyIfModifiedSince` - (Optional) Copies the object if it has been modified since the specified time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `copyIfNoneMatch` - (Optional) Copies the object if its entity tag (ETag) is different than the specified ETag. +* `copyIfUnmodifiedSince` - (Optional) Copies the object if it hasn't been modified since the specified time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `customerAlgorithm` - (Optional) Specifies the algorithm to use to when encrypting the object (for example, AES256). +* `customerKey` - (Optional) Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon S3 does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side-encryption-customer-algorithm header. +* `customerKeyMd5` - (Optional) Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure that the encryption key was transmitted without error. +* `expectedBucketOwner` - (Optional) Account id of the expected destination bucket owner. If the destination bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error. +* `expectedSourceBucketOwner` - (Optional) Account id of the expected source bucket owner. If the source bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error. +* `expires` - (Optional) Date and time at which the object is no longer cacheable, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `forceDestroy` - (Optional) Allow the object to be deleted by removing any legal hold on any object version. Default is `false`. This value should be set to `true` only if the bucket has S3 object lock enabled. +* `grant` - (Optional) Configuration block for header grants. Documented below. Conflicts with `acl`. +* `kmsEncryptionContext` - (Optional) Specifies the AWS KMS Encryption Context to use for object encryption. The value is a base64-encoded UTF-8 string holding JSON with the encryption context key-value pairs. +* `kmsKeyId` - (Optional) Specifies the AWS KMS Key ARN to use for object encryption. This value is a fully qualified **ARN** of the KMS Key. If using `awsKmsKey`, use the exported `arn` attribute: `kms_key_id = aws_kms_key.foo.arn` +* `metadata` - (Optional) Map of keys/values to provision metadata (will be automatically prefixed by `xAmzMeta`, note that only lowercase label are currently supported by the AWS Go API). +* `metadataDirective` - (Optional) Specifies whether the metadata is copied from the source object or replaced with metadata provided in the request. Valid values are `copy` and `replace`. +* `objectLockLegalHoldStatus` - (Optional) The [legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds) status that you want to apply to the specified object. Valid values are `on` and `off`. +* `objectLockMode` - (Optional) Object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) that you want to apply to this object. Valid values are `governance` and `compliance`. +* `objectLockRetainUntilDate` - (Optional) Date and time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), when this object's object lock will [expire](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-periods). +* `requestPayer` - (Optional) Confirms that the requester knows that they will be charged for the request. Bucket owners need not specify this parameter in their requests. For information about downloading objects from requester pays buckets, see Downloading Objects in Requestor Pays Buckets (https://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html) in the Amazon S3 Developer Guide. If included, the only valid value is `requester`. +* `serverSideEncryption` - (Optional) Specifies server-side encryption of the object in S3. Valid values are `aes256` and `aws:kms`. +* `sourceCustomerAlgorithm` - (Optional) Specifies the algorithm to use when decrypting the source object (for example, AES256). +* `sourceCustomerKey` - (Optional) Specifies the customer-provided encryption key for Amazon S3 to use to decrypt the source object. The encryption key provided in this header must be one that was used when the source object was created. +* `sourceCustomerKeyMd5` - (Optional) Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure that the encryption key was transmitted without error. +* `storageClass` - (Optional) Specifies the desired [storage class](https://docs.aws.amazon.com/AmazonS3/latest/API/API_CopyObject.html#AmazonS3-CopyObject-request-header-StorageClass) for the object. Defaults to `standard`. +* `taggingDirective` - (Optional) Specifies whether the object tag-set are copied from the source object or replaced with tag-set provided in the request. Valid values are `copy` and `replace`. +* `tags` - (Optional) Map of tags to assign to the object. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `websiteRedirect` - (Optional) Specifies a target URL for [website redirect](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html). + +### grant + +-> For more information on header grants, see the Amazon Simple Storage Service (S3) [API Reference: PutObjectAcl](https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObjectAcl.html). + +This configuration block has the following required arguments: + +* `permissions` - (Required) List of permissions to grant to grantee. Valid values are `read`, `readAcp`, `writeAcp`, `fullControl`. +* `type` - (Required) - Type of grantee. Valid values are `canonicalUser`, `group`, and `amazonCustomerByEmail`. + +This configuration block has the following optional arguments (one of the three is required): + +* `email` - (Optional) Email address of the grantee. Used only when `type` is `amazonCustomerByEmail`. +* `id` - (Optional) Canonical user ID of the grantee. Used only when `type` is `canonicalUser`. +* `uri` - (Optional) URI of the grantee group. Used only when `type` is `group`. + +-> **Note:** Terraform ignores all leading `/`s in the object's `key` and treats multiple `/`s in the rest of the object's `key` as a single `/`, so values of `/indexHtml` and `indexHtml` correspond to the same S3 object as do `first//second///third//` and `first/second/third/`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `etag` - ETag generated for the object (an MD5 sum of the object content). For plaintext objects or objects encrypted with an AWS-managed key, the hash is an MD5 digest of the object data. For objects encrypted with a KMS key or objects created by either the Multipart Upload or Part Copy operation, the hash is not an MD5 digest, regardless of the method of encryption. More information on possible values can be found on [Common Response Headers](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html). +* `expiration` - If the object expiration is configured, this attribute will be set. +* `id` - The `key` of the resource supplied above. +* `lastModified` - Returns the date that the object was last modified, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `requestCharged` - If present, indicates that the requester was successfully charged for the request. +* `sourceVersionId` - Version of the copied object in the source bucket. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `versionId` - Version ID of the newly created copy. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3control_access_point_policy.html.markdown b/website/docs/cdktf/typescript/r/s3control_access_point_policy.html.markdown new file mode 100644 index 00000000000..d5fbdbbd488 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3control_access_point_policy.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_access_point_policy" +description: |- + Provides a resource to manage an S3 Access Point resource policy. +--- + + + +# Resource: aws_s3control_access_point_policy + +Provides a resource to manage an S3 Access Point resource policy. + +~> **NOTE on Access Points and Access Point Policies:** Terraform provides both a standalone Access Point Policy resource and an [Access Point](s3_access_point.html) resource with a resource policy defined in-line. You cannot use an Access Point with in-line resource policy in conjunction with an Access Point Policy resource. Doing so will cause a conflict of policies and will overwrite the access point's resource policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3AccessPoint } from "./.gen/providers/aws/s3-access-point"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3ControlAccessPointPolicy } from "./.gen/providers/aws/s3-control-access-point-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsS3AccessPointExample = new S3AccessPoint(this, "example_1", { + bucket: example.id, + lifecycle: { + ignoreChanges: [policy], + }, + name: "example", + publicAccessBlockConfiguration: { + blockPublicAcls: true, + blockPublicPolicy: false, + ignorePublicAcls: true, + restrictPublicBuckets: false, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3AccessPointExample.overrideLogicalId("example"); + const awsS3ControlAccessPointPolicyExample = new S3ControlAccessPointPolicy( + this, + "example_2", + { + accessPointArn: Token.asString(awsS3AccessPointExample.arn), + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "s3:GetObjectTagging", + Effect: "Allow", + Principal: { + AWS: "*", + }, + Resource: "${" + awsS3AccessPointExample.arn + "}/object/*", + }, + ], + Version: "2008-10-17", + }) + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ControlAccessPointPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accessPointArn` - (Required) The ARN of the access point that you want to associate with the specified policy. +* `policy` - (Required) The policy that you want to apply to the specified access point. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `hasPublicAccessPolicy` - Indicates whether this access point currently has a policy that allows public access. +* `id` - The AWS account ID and access point name separated by a colon (`:`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Access Point policies using the `accessPointArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Access Point policies using the `accessPointArn`. For example: + +```console +% terraform import aws_s3control_access_point_policy.example arn:aws:s3:us-west-2:123456789012:accesspoint/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3control_bucket.html.markdown b/website/docs/cdktf/typescript/r/s3control_bucket.html.markdown new file mode 100644 index 00000000000..a2d1e6c9808 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3control_bucket.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_bucket" +description: |- + Manages an S3 Control Bucket. +--- + + + +# Resource: aws_s3control_bucket + +Provides a resource to manage an S3 Control Bucket. + +-> This functionality is for managing [S3 on Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html). To manage S3 Buckets in an AWS Partition, see the [`awsS3Bucket` resource](/docs/providers/aws/r/s3_bucket.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3ControlBucket } from "./.gen/providers/aws/s3-control-bucket"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3ControlBucket(this, "example", { + bucket: "example", + outpostId: Token.asString(dataAwsOutpostsOutpostExample.id), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Name of the bucket. +* `outpostId` - (Required) Identifier of the Outpost to contain this bucket. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the bucket. +* `creationDate` - UTC creation date in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `id` - Amazon Resource Name (ARN) of the bucket. +* `publicAccessBlockEnabled` - Boolean whether Public Access Block is enabled. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Control Buckets using Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 Control Buckets using Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_s3control_bucket.example arn:aws:s3-outposts:us-east-1:123456789012:outpost/op-12345678/bucket/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3control_bucket_lifecycle_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3control_bucket_lifecycle_configuration.html.markdown new file mode 100644 index 00000000000..a01caa3c6f6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3control_bucket_lifecycle_configuration.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_bucket_lifecycle_configuration" +description: |- + Manages an S3 Control Bucket Lifecycle Configuration. +--- + + + +# Resource: aws_s3control_bucket_lifecycle_configuration + +Provides a resource to manage an S3 Control Bucket Lifecycle Configuration. + +~> **NOTE:** Each S3 Control Bucket can only have one Lifecycle Configuration. Using multiple of this resource against the same S3 Control Bucket will result in perpetual differences each Terraform run. + +-> This functionality is for managing [S3 on Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html). To manage S3 Bucket Lifecycle Configurations in an AWS Partition, see the [`awsS3Bucket` resource](/docs/providers/aws/r/s3_bucket.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3ControlBucketLifecycleConfiguration } from "./.gen/providers/aws/s3-control-bucket-lifecycle-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3ControlBucketLifecycleConfiguration(this, "example", { + bucket: Token.asString(awsS3ControlBucketExample.arn), + rule: [ + { + expiration: { + days: 365, + }, + filter: { + prefix: "logs/", + }, + id: "logs", + }, + { + expiration: { + days: 7, + }, + filter: { + prefix: "temp/", + }, + id: "temp", + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Amazon Resource Name (ARN) of the bucket. +* `rule` - (Required) Configuration block(s) containing lifecycle rules for the bucket. + * `abortIncompleteMultipartUpload` - (Optional) Configuration block containing settings for abort incomplete multipart upload. + * `daysAfterInitiation` - (Required) Number of days after which Amazon S3 aborts an incomplete multipart upload. + * `expiration` - (Optional) Configuration block containing settings for expiration of objects. + * `date` - (Optional) Date the object is to be deleted. Should be in `yyyyMmDd` date format, e.g., `20200930`. + * `days` - (Optional) Number of days before the object is to be deleted. + * `expiredObjectDeleteMarker` - (Optional) Enable to remove a delete marker with no noncurrent versions. Cannot be specified with `date` or `days`. + * `filter` - (Optional) Configuration block containing settings for filtering. + * `prefix` - (Optional) Object prefix for rule filtering. + * `tags` - (Optional) Key-value map of object tags for rule filtering. + * `id` - (Required) Unique identifier for the rule. + * `status` - (Optional) Status of the rule. Valid values: `enabled` and `disabled`. Defaults to `enabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the bucket. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Control Bucket Lifecycle Configurations using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 Control Bucket Lifecycle Configurations using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_s3control_bucket_lifecycle_configuration.example arn:aws:s3-outposts:us-east-1:123456789012:outpost/op-12345678/bucket/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3control_bucket_policy.html.markdown b/website/docs/cdktf/typescript/r/s3control_bucket_policy.html.markdown new file mode 100644 index 00000000000..41056c1293b --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3control_bucket_policy.html.markdown @@ -0,0 +1,91 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_bucket_policy" +description: |- + Manages an S3 Control Bucket Policy. +--- + + + +# Resource: aws_s3control_bucket_policy + +Provides a resource to manage an S3 Control Bucket Policy. + +-> This functionality is for managing [S3 on Outposts](https://docs.aws.amazon.com/AmazonS3/latest/dev/S3onOutposts.html). To manage S3 Bucket Policies in an AWS Partition, see the [`awsS3BucketPolicy` resource](/docs/providers/aws/r/s3_bucket_policy.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3ControlBucketPolicy } from "./.gen/providers/aws/s3-control-bucket-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3ControlBucketPolicy(this, "example", { + bucket: Token.asString(awsS3ControlBucketExample.arn), + policy: Token.asString( + Fn.jsonencode({ + Id: "testBucketPolicy", + Statement: [ + { + Action: "s3-outposts:PutBucketLifecycleConfiguration", + Effect: "Deny", + Principal: { + AWS: "*", + }, + Resource: awsS3ControlBucketExample.arn, + Sid: "statement1", + }, + ], + Version: "2012-10-17", + }) + ), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `bucket` - (Required) Amazon Resource Name (ARN) of the bucket. +* `policy` - (Required) JSON string of the resource policy. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the bucket. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Control Bucket Policies using the Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 Control Bucket Policies using the Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_s3control_bucket_policy.example arn:aws:s3-outposts:us-east-1:123456789012:outpost/op-12345678/bucket/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3control_multi_region_access_point.html.markdown b/website/docs/cdktf/typescript/r/s3control_multi_region_access_point.html.markdown new file mode 100644 index 00000000000..debff0f99ea --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3control_multi_region_access_point.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_multi_region_access_point" +description: |- + Provides a resource to manage an S3 Multi-Region Access Point associated with specified buckets. +--- + + + +# Resource: aws_s3control_multi_region_access_point + +Provides a resource to manage an S3 Multi-Region Access Point associated with specified buckets. + +## Example Usage + +### Multiple AWS Buckets in Different Regions + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3ControlMultiRegionAccessPoint } from "./.gen/providers/aws/s3-control-multi-region-access-point"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const primaryRegion = new AwsProvider(this, "aws", { + alias: "primary_region", + region: "us-east-1", + }); + const secondaryRegion = new AwsProvider(this, "aws_1", { + alias: "secondary_region", + region: "us-west-2", + }); + const barBucket = new S3Bucket(this, "bar_bucket", { + bucket: "example-bucket-bar", + provider: secondaryRegion, + }); + const fooBucket = new S3Bucket(this, "foo_bucket", { + bucket: "example-bucket-foo", + provider: primaryRegion, + }); + new S3ControlMultiRegionAccessPoint(this, "example", { + details: { + name: "example", + region: [ + { + bucket: fooBucket.id, + }, + { + bucket: barBucket.id, + }, + ], + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Optional) The AWS account ID for the owner of the buckets for which you want to create a Multi-Region Access Point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `details` - (Required) A configuration block containing details about the Multi-Region Access Point. See [Details Configuration Block](#details-configuration) below for more details + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) +* `delete` - (Default `15M`) + +### Details Configuration + +The `details` block supports the following: + +* `name` - (Required) The name of the Multi-Region Access Point. +* `publicAccessBlock` - (Optional) Configuration block to manage the `publicAccessBlock` configuration that you want to apply to this Multi-Region Access Point. You can enable the configuration options in any combination. See [Public Access Block Configuration](#public-access-block-configuration) below for more details. +* `region` - (Required) The Region configuration block to specify the bucket associated with the Multi-Region Access Point. See [Region Configuration](#region-configuration) below for more details. + +For more information, see the documentation on [Multi-Region Access Points](https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiRegionAccessPoints.html). + +### Public Access Block Configuration + +The `publicAccessBlock` block supports the following: + +* `blockPublicAcls` - (Optional) Whether Amazon S3 should block public ACLs for buckets in this account. Defaults to `true`. Enabling this setting does not affect existing policies or ACLs. When set to `true` causes the following behavior: + * PUT Bucket acl and PUT Object acl calls fail if the specified ACL is public. + * PUT Object calls fail if the request includes a public ACL. + * PUT Bucket calls fail if the request includes a public ACL. +* `blockPublicPolicy` - (Optional) Whether Amazon S3 should block public bucket policies for buckets in this account. Defaults to `true`. Enabling this setting does not affect existing bucket policies. When set to `true` causes Amazon S3 to: + * Reject calls to PUT Bucket policy if the specified bucket policy allows public access. +* `ignorePublicAcls` - (Optional) Whether Amazon S3 should ignore public ACLs for buckets in this account. Defaults to `true`. Enabling this setting does not affect the persistence of any existing ACLs and doesn't prevent new public ACLs from being set. When set to `true` causes Amazon S3 to: + * Ignore all public ACLs on buckets in this account and any objects that they contain. +* `restrictPublicBuckets` - (Optional) Whether Amazon S3 should restrict public bucket policies for buckets in this account. Defaults to `true`. Enabling this setting does not affect previously stored bucket policies, except that public and cross-account access within any public bucket policy, including non-public delegation to specific accounts, is blocked. When set to `true`: + * Only the bucket owner and AWS Services can access buckets with public policies. + +### Region Configuration + +The `region` block supports the following: + +* `bucket` - (Required) The name of the associated bucket for the Region. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `alias` - The alias for the Multi-Region Access Point. +* `arn` - Amazon Resource Name (ARN) of the Multi-Region Access Point. +* `domainName` - The DNS domain name of the S3 Multi-Region Access Point in the format _`alias`_.accesspoint.s3-global.amazonaws.com. For more information, see the documentation on [Multi-Region Access Point Requests](https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiRegionAccessPointRequests.html). +* `id` - The AWS account ID and access point name separated by a colon (`:`). +* `status` - The current status of the Multi-Region Access Point. One of: `ready`, `inconsistentAcrossRegions`, `creating`, `partiallyCreated`, `partiallyDeleted`, `deleting`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Multi-Region Access Points using the `accountId` and `name` of the Multi-Region Access Point separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Multi-Region Access Points using the `accountId` and `name` of the Multi-Region Access Point separated by a colon (`:`). For example: + +```console +% terraform import aws_s3control_multi_region_access_point.example 123456789012:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3control_multi_region_access_point_policy.html.markdown b/website/docs/cdktf/typescript/r/s3control_multi_region_access_point_policy.html.markdown new file mode 100644 index 00000000000..47ec9e6021a --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3control_multi_region_access_point_policy.html.markdown @@ -0,0 +1,143 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_multi_region_access_point_policy" +description: |- + Provides a resource to manage an S3 Multi-Region Access Point access control policy. +--- + + + +# Resource: aws_s3control_multi_region_access_point_policy + +Provides a resource to manage an S3 Multi-Region Access Point access control policy. + +## Example Usage + +### Basic Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3ControlMultiRegionAccessPoint } from "./.gen/providers/aws/s3-control-multi-region-access-point"; +import { S3ControlMultiRegionAccessPointPolicy } from "./.gen/providers/aws/s3-control-multi-region-access-point-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const fooBucket = new S3Bucket(this, "foo_bucket", { + bucket: "example-bucket-foo", + }); + const example = new S3ControlMultiRegionAccessPoint(this, "example", { + details: { + name: "example", + region: [ + { + bucket: fooBucket.id, + }, + ], + }, + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsPartitionCurrent = new DataAwsPartition(this, "current_3", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsPartitionCurrent.overrideLogicalId("current"); + const awsS3ControlMultiRegionAccessPointPolicyExample = + new S3ControlMultiRegionAccessPointPolicy(this, "example_4", { + details: { + name: Token.asString(Fn.element(Fn.split(":", example.id), 1)), + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["s3:GetObject", "s3:PutObject"], + Effect: "Allow", + Principal: { + AWS: current.accountId, + }, + Resource: + "arn:${" + + dataAwsPartitionCurrent.partition + + "}:s3::${" + + current.accountId + + "}:accesspoint/${" + + example.alias + + "}/object/*", + Sid: "Example", + }, + ], + Version: "2012-10-17", + }) + ), + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ControlMultiRegionAccessPointPolicyExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Optional) The AWS account ID for the owner of the Multi-Region Access Point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `details` - (Required) A configuration block containing details about the policy for the Multi-Region Access Point. See [Details Configuration Block](#details-configuration) below for more details + +### Details Configuration + +The `details` block supports the following: + +* `name` - (Required) The name of the Multi-Region Access Point. +* `policy` - (Required) A valid JSON document that specifies the policy that you want to associate with this Multi-Region Access Point. Once applied, the policy can be edited, but not deleted. For more information, see the documentation on [Multi-Region Access Point Permissions](https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiRegionAccessPointPermissions.html). + +-> **NOTE:** When you update the `policy`, the update is first listed as the proposed policy. After the update is finished and all Regions have been updated, the proposed policy is listed as the established policy. If both policies have the same version number, the proposed policy is the established policy. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `established` - The last established policy for the Multi-Region Access Point. +* `id` - The AWS account ID and access point name separated by a colon (`:`). +* `proposed` - The proposed policy for the Multi-Region Access Point. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `15M`) +* `update` - (Default `15M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Multi-Region Access Point Policies using the `accountId` and `name` of the Multi-Region Access Point separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Multi-Region Access Point Policies using the `accountId` and `name` of the Multi-Region Access Point separated by a colon (`:`). For example: + +```console +% terraform import aws_s3control_multi_region_access_point_policy.example 123456789012:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3control_object_lambda_access_point.html.markdown b/website/docs/cdktf/typescript/r/s3control_object_lambda_access_point.html.markdown new file mode 100644 index 00000000000..5cb19840e0e --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3control_object_lambda_access_point.html.markdown @@ -0,0 +1,131 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_object_lambda_access_point" +description: |- + Provides a resource to manage an S3 Object Lambda Access Point. +--- + + + +# Resource: aws_s3control_object_lambda_access_point + +Provides a resource to manage an S3 Object Lambda Access Point. +An Object Lambda access point is associated with exactly one [standard access point](s3_access_point.html) and thus one Amazon S3 bucket. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3AccessPoint } from "./.gen/providers/aws/s3-access-point"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3ControlObjectLambdaAccessPoint } from "./.gen/providers/aws/s3-control-object-lambda-access-point"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsS3AccessPointExample = new S3AccessPoint(this, "example_1", { + bucket: example.id, + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3AccessPointExample.overrideLogicalId("example"); + const awsS3ControlObjectLambdaAccessPointExample = + new S3ControlObjectLambdaAccessPoint(this, "example_2", { + configuration: { + supportingAccessPoint: Token.asString(awsS3AccessPointExample.arn), + transformationConfiguration: [ + { + actions: ["GetObject"], + contentTransformation: { + awsLambda: { + functionArn: Token.asString(awsLambdaFunctionExample.arn), + }, + }, + }, + ], + }, + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ControlObjectLambdaAccessPointExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Optional) The AWS account ID for the owner of the bucket for which you want to create an Object Lambda Access Point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `configuration` - (Required) A configuration block containing details about the Object Lambda Access Point. See [Configuration](#configuration) below for more details. +* `name` - (Required) The name for this Object Lambda Access Point. + +### Configuration + +The `configuration` block supports the following: + +* `allowedFeatures` - (Optional) Allowed features. Valid values: `getObjectRange`, `getObjectPartNumber`. +* `cloudWatchMetricsEnabled` - (Optional) Whether or not the CloudWatch metrics configuration is enabled. +* `supportingAccessPoint` - (Required) Standard access point associated with the Object Lambda Access Point. +* `transformationConfiguration` - (Required) List of transformation configurations for the Object Lambda Access Point. See [Transformation Configuration](#transformation-configuration) below for more details. + +### Transformation Configuration + +The `transformationConfiguration` block supports the following: + +* `actions` - (Required) The actions of an Object Lambda Access Point configuration. Valid values: `getObject`. +* `contentTransformation` - (Required) The content transformation of an Object Lambda Access Point configuration. See [Content Transformation](#content-transformation) below for more details. + +### Content Transformation + +The `contentTransformation` block supports the following: + +* `awsLambda` - (Required) Configuration for an AWS Lambda function. See [AWS Lambda](#aws-lambda) below for more details. + +### AWS Lambda + +The `awsLambda` block supports the following: + +* `functionArn` - (Required) The Amazon Resource Name (ARN) of the AWS Lambda function. +* `functionPayload` - (Optional) Additional JSON that provides supplemental data to the Lambda function used to transform objects. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Object Lambda Access Point. +* `id` - The AWS account ID and access point name separated by a colon (`:`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Object Lambda Access Points using the `accountId` and `name`, separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Object Lambda Access Points using the `accountId` and `name`, separated by a colon (`:`). For example: + +```console +% terraform import aws_s3control_object_lambda_access_point.example 123456789012:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3control_object_lambda_access_point_policy.html.markdown b/website/docs/cdktf/typescript/r/s3control_object_lambda_access_point_policy.html.markdown new file mode 100644 index 00000000000..29da07d234b --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3control_object_lambda_access_point_policy.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_object_lambda_access_point_policy" +description: |- + Provides a resource to manage an S3 Object Lambda Access Point resource policy. +--- + + + +# Resource: aws_s3control_object_lambda_access_point_policy + +Provides a resource to manage an S3 Object Lambda Access Point resource policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3AccessPoint } from "./.gen/providers/aws/s3-access-point"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3ControlObjectLambdaAccessPoint } from "./.gen/providers/aws/s3-control-object-lambda-access-point"; +import { S3ControlObjectLambdaAccessPointPolicy } from "./.gen/providers/aws/s3-control-object-lambda-access-point-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example", + }); + const awsS3AccessPointExample = new S3AccessPoint(this, "example_1", { + bucket: example.id, + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3AccessPointExample.overrideLogicalId("example"); + const awsS3ControlObjectLambdaAccessPointExample = + new S3ControlObjectLambdaAccessPoint(this, "example_2", { + configuration: { + supportingAccessPoint: Token.asString(awsS3AccessPointExample.arn), + transformationConfiguration: [ + { + actions: ["GetObject"], + contentTransformation: { + awsLambda: { + functionArn: Token.asString(awsLambdaFunctionExample.arn), + }, + }, + }, + ], + }, + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ControlObjectLambdaAccessPointExample.overrideLogicalId("example"); + const awsS3ControlObjectLambdaAccessPointPolicyExample = + new S3ControlObjectLambdaAccessPointPolicy(this, "example_3", { + name: Token.asString(awsS3ControlObjectLambdaAccessPointExample.name), + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: "s3-object-lambda:GetObject", + Effect: "Allow", + Principal: { + AWS: current.accountId, + }, + Resource: awsS3ControlObjectLambdaAccessPointExample.arn, + }, + ], + Version: "2008-10-17", + }) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3ControlObjectLambdaAccessPointPolicyExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Optional) The AWS account ID for the account that owns the Object Lambda Access Point. Defaults to automatically determined account ID of the Terraform AWS provider. +* `name` - (Required) The name of the Object Lambda Access Point. +* `policy` - (Required) The Object Lambda Access Point resource policy document. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `hasPublicAccessPolicy` - Indicates whether this access point currently has a policy that allows public access. +* `id` - The AWS account ID and access point name separated by a colon (`:`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Object Lambda Access Point policies using the `accountId` and `name`, separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Object Lambda Access Point policies using the `accountId` and `name`, separated by a colon (`:`). For example: + +```console +% terraform import aws_s3control_object_lambda_access_point_policy.example 123456789012:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3control_storage_lens_configuration.html.markdown b/website/docs/cdktf/typescript/r/s3control_storage_lens_configuration.html.markdown new file mode 100644 index 00000000000..aa7265eeda1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3control_storage_lens_configuration.html.markdown @@ -0,0 +1,241 @@ +--- +subcategory: "S3 Control" +layout: "aws" +page_title: "AWS: aws_s3control_storage_lens_configuration" +description: |- + Provides a resource to manage an S3 Storage Lens configuration. +--- + + + +# Resource: aws_s3control_storage_lens_configuration + +Provides a resource to manage an S3 Storage Lens configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { S3ControlStorageLensConfiguration } from "./.gen/providers/aws/s3-control-storage-lens-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + new S3ControlStorageLensConfiguration(this, "example", { + configId: "example-1", + storageLensConfiguration: { + accountLevel: { + activityMetrics: { + enabled: true, + }, + bucketLevel: { + activityMetrics: { + enabled: true, + }, + }, + }, + dataExport: { + cloudWatchMetrics: { + enabled: true, + }, + s3BucketDestination: { + accountId: Token.asString(current.accountId), + arn: target.arn, + encryption: { + sseS3: [{}], + }, + format: "CSV", + outputSchemaVersion: "V_1", + }, + }, + enabled: true, + exclude: { + buckets: [b1.arn, b2.arn], + regions: ["us-east-2"], + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Optional) The AWS account ID for the S3 Storage Lens configuration. Defaults to automatically determined account ID of the Terraform AWS provider. +* `configId` - (Required) The ID of the S3 Storage Lens configuration. +* `storageLensConfiguration` - (Required) The S3 Storage Lens configuration. See [Storage Lens Configuration](#storage-lens-configuration) below for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Storage Lens Configuration + +The `storageLensConfiguration` block supports the following: + +* `accountLevel` (Required) The account-level configurations of the S3 Storage Lens configuration. See [Account Level](#account-level) below for more details. +* `awsOrg` (Optional) The Amazon Web Services organization for the S3 Storage Lens configuration. See [AWS Org](#aws-org) below for more details. +* `dataExport` (Optional) Properties of S3 Storage Lens metrics export including the destination, schema and format. See [Data Export](#data-export) below for more details. +* `enabled` (Required) Whether the S3 Storage Lens configuration is enabled. +* `exclude` (Optional) What is excluded in this configuration. Conflicts with `include`. See [Exclude](#exclude) below for more details. +* `include` (Optional) What is included in this configuration. Conflicts with `exclude`. See [Include](#include) below for more details. + +### Account Level + +The `accountLevel` block supports the following: + +* `activityMetrics` (Optional) S3 Storage Lens activity metrics. See [Activity Metrics](#activity-metrics) below for more details. +* `advancedCostOptimizationMetrics` (Optional) Advanced cost-optimization metrics for S3 Storage Lens. See [Advanced Cost-Optimization Metrics](#advanced-cost-optimization-metrics) below for more details. +* `advancedDataProtectionMetrics` (Optional) Advanced data-protection metrics for S3 Storage Lens. See [Advanced Data-Protection Metrics](#advanced-data-protection-metrics) below for more details. +* `bucketLevel` (Required) S3 Storage Lens bucket-level configuration. See [Bucket Level](#bucket-level) below for more details. +* `detailedStatusCodeMetrics` (Optional) Detailed status code metrics for S3 Storage Lens. See [Detailed Status Code Metrics](#detailed-status-code-metrics) below for more details. + +### Activity Metrics + +The `activityMetrics` block supports the following: + +* `enabled` (Optional) Whether the activity metrics are enabled. + +### Advanced Cost-Optimization Metrics + +The `advancedCostOptimizationMetrics` block supports the following: + +* `enabled` (Optional) Whether advanced cost-optimization metrics are enabled. + +### Advanced Data-Protection Metrics + +The `advancedDataProtectionMetrics` block supports the following: + +* `enabled` (Optional) Whether advanced data-protection metrics are enabled. + +### Detailed Status Code Metrics + +The `detailedStatusCodeMetrics` block supports the following: + +* `enabled` (Optional) Whether detailed status code metrics are enabled. + +### Bucket Level + +The `bucketLevel` block supports the following: + +* `activityMetrics` (Optional) S3 Storage Lens activity metrics. See [Activity Metrics](#activity-metrics) above for more details. +* `advancedCostOptimizationMetrics` (Optional) Advanced cost-optimization metrics for S3 Storage Lens. See [Advanced Cost-Optimization Metrics](#advanced-cost-optimization-metrics) above for more details. +* `advancedDataProtectionMetrics` (Optional) Advanced data-protection metrics for S3 Storage Lens. See [Advanced Data-Protection Metrics](#advanced-data-protection-metrics) above for more details. +* `detailedStatusCodeMetrics` (Optional) Detailed status code metrics for S3 Storage Lens. See [Detailed Status Code Metrics](#detailed-status-code-metrics) above for more details. +* `prefixLevel` (Optional) Prefix-level metrics for S3 Storage Lens. See [Prefix Level](#prefix-level) below for more details. + +### Prefix Level + +The `prefixLevel` block supports the following: + +* `storageMetrics` (Required) Prefix-level storage metrics for S3 Storage Lens. See [Prefix Level Storage Metrics](#prefix-level-storage-metrics) below for more details. + +### Prefix Level Storage Metrics + +The `storageMetrics` block supports the following: + +* `enabled` (Optional) Whether prefix-level storage metrics are enabled. +* `selectionCriteria` (Optional) Selection criteria. See [Selection Criteria](#selection-criteria) below for more details. + +### Selection Criteria + +The `selectionCriteria` block supports the following: + +* `delimiter` (Optional) The delimiter of the selection criteria being used. +* `maxDepth` (Optional) The max depth of the selection criteria. +* `minStorageBytesPercentage` (Optional) The minimum number of storage bytes percentage whose metrics will be selected. + +### AWS Org + +The `awsOrg` block supports the following: + +* `arn` (Required) The Amazon Resource Name (ARN) of the Amazon Web Services organization. + +### Data Export + +The `dataExport` block supports the following: + +* `cloudWatchMetrics` (Optional) Amazon CloudWatch publishing for S3 Storage Lens metrics. See [Cloud Watch Metrics](#cloud-watch-metrics) below for more details. +* `s3BucketDestination` (Optional) The bucket where the S3 Storage Lens metrics export will be located. See [S3 Bucket Destination](#s3-bucket-destination) below for more details. + +### Cloud Watch Metrics + +The `cloudWatchMetrics` block supports the following: + +* `enabled` (Required) Whether CloudWatch publishing for S3 Storage Lens metrics is enabled. + +### S3 Bucket Destination + +The `s3BucketDestination` block supports the following: + +* `accountId` (Required) The account ID of the owner of the S3 Storage Lens metrics export bucket. +* `arn` (Required) The Amazon Resource Name (ARN) of the bucket. +* `encryption` (Optional) Encryption of the metrics exports in this bucket. See [Encryption](#encryption) below for more details. +* `format` (Required) The export format. Valid values: `csv`, `parquet`. +* `outputSchemaVersion` (Required) The schema version of the export file. Valid values: `v1`. +* `prefix` (Optional) The prefix of the destination bucket where the metrics export will be delivered. + +### Encryption + +The `encryption` block supports the following: + +* `sseKms` (Optional) SSE-KMS encryption. See [SSE KMS](#sse-kms) below for more details. +* `sseS3` (Optional) SSE-S3 encryption. An empty configuration block `{}` should be used. + +### SSE KMS + +The `sseKms` block supports the following: + +* `keyId` (Required) KMS key ARN. + +### Exclude + +The `exclude` block supports the following: + +* `buckets` (Optional) List of S3 bucket ARNs. +* `regions` (Optional) List of AWS Regions. + +### Include + +The `include` block supports the following: + +* `buckets` (Optional) List of S3 bucket ARNs. +* `regions` (Optional) List of AWS Regions. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the S3 Storage Lens configuration. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Storage Lens configurations using the `accountId` and `configId`, separated by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 Storage Lens configurations using the `accountId` and `configId`, separated by a colon (`:`). For example: + +```console +% terraform import aws_s3control_storage_lens_configuration.example 123456789012:example-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/s3outposts_endpoint.html.markdown b/website/docs/cdktf/typescript/r/s3outposts_endpoint.html.markdown new file mode 100644 index 00000000000..4abec31a2a6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/s3outposts_endpoint.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "S3 on Outposts" +layout: "aws" +page_title: "AWS: aws_s3outposts_endpoint" +description: |- + Manages an S3 Outposts Endpoint. +--- + + + +# Resource: aws_s3outposts_endpoint + +Provides a resource to manage an S3 Outposts Endpoint. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3OutpostsEndpoint } from "./.gen/providers/aws/s3-outposts-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new S3OutpostsEndpoint(this, "example", { + outpostId: Token.asString(dataAwsOutpostsOutpostExample.id), + securityGroupId: Token.asString(awsSecurityGroupExample.id), + subnetId: Token.asString(awsSubnetExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `outpostId` - (Required) Identifier of the Outpost to contain this endpoint. +* `securityGroupId` - (Required) Identifier of the EC2 Security Group. +* `subnetId` - (Required) Identifier of the EC2 Subnet. +* `accessType` - (Optional) Type of access for the network connectivity. Valid values are `private` or `customerOwnedIp`. +* `customerOwnedIpv4Pool` - (Optional) The ID of a Customer Owned IP Pool. For more on customer owned IP addresses see the [User Guide](https://docs.aws.amazon.com/outposts/latest/userguide/local-rack.html#local-gateway-subnet). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the endpoint. +* `cidrBlock` - VPC CIDR block of the endpoint. +* `creationTime` - UTC creation time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `id` - Amazon Resource Name (ARN) of the endpoint. +* `networkInterfaces` - Set of nested attributes for associated Elastic Network Interfaces (ENIs). + * `networkInterfaceId` - Identifier of the Elastic Network Interface (ENI). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import S3 Outposts Endpoints using Amazon Resource Name (ARN), EC2 Security Group identifier, and EC2 Subnet identifier, separated by commas (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import S3 Outposts Endpoints using Amazon Resource Name (ARN), EC2 Security Group identifier, and EC2 Subnet identifier, separated by commas (`,`). For example: + +```console +% terraform import aws_s3outposts_endpoint.example arn:aws:s3-outposts:us-east-1:123456789012:outpost/op-12345678/endpoint/0123456789abcdef,sg-12345678,subnet-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_app.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_app.html.markdown new file mode 100644 index 00000000000..14c896b7f9b --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_app.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_app" +description: |- + Provides a SageMaker App resource. +--- + + + +# Resource: aws_sagemaker_app + +Provides a SageMaker App resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerApp } from "./.gen/providers/aws/sagemaker-app"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerApp(this, "example", { + appName: "example", + appType: "JupyterServer", + domainId: Token.asString(awsSagemakerDomainExample.id), + userProfileName: Token.asString( + awsSagemakerUserProfileExample.userProfileName + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `appName` - (Required) The name of the app. +* `appType` - (Required) The type of app. Valid values are `jupyterServer`, `kernelGateway`, `rStudioServerPro`, `rSessionGateway` and `tensorBoard`. +* `domainId` - (Required) The domain ID. +* `userProfileName` - (Optional) The user profile name. At least one of `userProfileName` or `spaceName` required. +* `resourceSpec` - (Optional) The instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance.See [Resource Spec](#resource-spec) below. +* `spaceName` - (Optional) The name of the space. At least one of `userProfileName` or `spaceName` required. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Resource Spec + +* `instanceType` - (Optional) The instance type that the image version runs on. For valid values see [SageMaker Instance Types](https://docs.aws.amazon.com/sagemaker/latest/dg/notebooks-available-instance-types.html). +* `lifecycleConfigArn` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configuration attached to the Resource. +* `sagemakerImageArn` - (Optional) The ARN of the SageMaker image that the image version belongs to. +* `sagemakerImageVersionArn` - (Optional) The ARN of the image version created on the instance. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) of the app. +* `arn` - The Amazon Resource Name (ARN) of the app. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Apps using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Apps using the `id`. For example: + +```console +% terraform import aws_sagemaker_app.example arn:aws:sagemaker:us-west-2:012345678912:app/domain-id/user-profile-name/app-type/app-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_app_image_config.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_app_image_config.html.markdown new file mode 100644 index 00000000000..c34af1c1e23 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_app_image_config.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_app_image_config" +description: |- + Provides a SageMaker App Image Config resource. +--- + + + +# Resource: aws_sagemaker_app_image_config + +Provides a SageMaker App Image Config resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerAppImageConfig } from "./.gen/providers/aws/sagemaker-app-image-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerAppImageConfig(this, "test", { + appImageConfigName: "example", + kernelGatewayImageConfig: { + kernelSpec: { + name: "example", + }, + }, + }); + } +} + +``` + +### Default File System Config + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerAppImageConfig } from "./.gen/providers/aws/sagemaker-app-image-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerAppImageConfig(this, "test", { + appImageConfigName: "example", + kernelGatewayImageConfig: { + fileSystemConfig: {}, + kernelSpec: { + name: "example", + }, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `appImageConfigName` - (Required) The name of the App Image Config. +* `kernelGatewayImageConfig` - (Optional) The configuration for the file system and kernels in a SageMaker image running as a KernelGateway app. See [Kernel Gateway Image Config](#kernel-gateway-image-config) details below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Kernel Gateway Image Config + +* `fileSystemConfig` - (Optional) The URL where the Git repository is located. See [File System Config](#file-system-config) details below. +* `kernelSpec` - (Required) The default branch for the Git repository. See [Kernel Spec](#kernel-spec) details below. + +#### File System Config + +* `defaultGid` - (Optional) The default POSIX group ID (GID). If not specified, defaults to `100`. Valid values are `0` and `100`. +* `defaultUid` - (Optional) The default POSIX user ID (UID). If not specified, defaults to `1000`. Valid values are `0` and `1000`. +* `mountPath` - (Optional) The path within the image to mount the user's EFS home directory. The directory should be empty. If not specified, defaults to `/home/sagemakerUser`. + +~> **Note:** When specifying `defaultGid` and `defaultUid`, Valid value pairs are [`0`, `0`] and [`100`, `1000`]. + +#### Kernel Spec + +* `name` - (Required) The name of the kernel. +* `displayName` - (Optional) The display name of the kernel. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the App Image Config. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this App Image Config. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker App Image Configs using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker App Image Configs using the `name`. For example: + +```console +% terraform import aws_sagemaker_app_image_config.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_code_repository.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_code_repository.html.markdown new file mode 100644 index 00000000000..8771e21fad2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_code_repository.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_code_repository" +description: |- + Provides a SageMaker Code Repository resource. +--- + + + +# Resource: aws_sagemaker_code_repository + +Provides a SageMaker Code Repository resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerCodeRepository } from "./.gen/providers/aws/sagemaker-code-repository"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerCodeRepository(this, "example", { + codeRepositoryName: "example", + gitConfig: { + repositoryUrl: + "https://github.com/hashicorp/terraform-provider-aws.git", + }, + }); + } +} + +``` + +### Example with Secret + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerCodeRepository } from "./.gen/providers/aws/sagemaker-code-repository"; +import { SecretsmanagerSecret } from "./.gen/providers/aws/secretsmanager-secret"; +import { SecretsmanagerSecretVersion } from "./.gen/providers/aws/secretsmanager-secret-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecretsmanagerSecret(this, "example", { + name: "example", + }); + const awsSecretsmanagerSecretVersionExample = + new SecretsmanagerSecretVersion(this, "example_1", { + secretId: example.id, + secretString: Token.asString( + Fn.jsonencode({ + password: "example", + username: "example", + }) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecretsmanagerSecretVersionExample.overrideLogicalId("example"); + const awsSagemakerCodeRepositoryExample = new SagemakerCodeRepository( + this, + "example_2", + { + codeRepositoryName: "example", + dependsOn: [awsSecretsmanagerSecretVersionExample], + gitConfig: { + repositoryUrl: + "https://github.com/hashicorp/terraform-provider-aws.git", + secretArn: example.arn, + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSagemakerCodeRepositoryExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `codeRepositoryName` - (Required) The name of the Code Repository (must be unique). +* `gitConfig` - (Required) Specifies details about the repository. see [Git Config](#git-config) details below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Git Config + +* `repositoryUrl` - (Required) The URL where the Git repository is located. +* `branch` - (Optional) The default branch for the Git repository. +* `secretArn` - (Optional) The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to access the git repository. The secret must have a staging label of AWSCURRENT and must be in the following format: `{"username": UserName, "password": Password}` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Code Repository. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Code Repository. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Code Repositories using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Code Repositories using the `name`. For example: + +```console +% terraform import aws_sagemaker_code_repository.test_code_repository my-code-repo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_data_quality_job_definition.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_data_quality_job_definition.html.markdown new file mode 100644 index 00000000000..612e56d15e6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_data_quality_job_definition.html.markdown @@ -0,0 +1,202 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_data_quality_job_definition" +description: |- + Provides a SageMaker Data Quality Job Definition resource. +--- + + + +# Resource: aws_sagemaker_data_quality_job_definition + +Provides a SageMaker data quality job definition resource. + +## Example Usage + +Basic usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerDataQualityJobDefinition } from "./.gen/providers/aws/sagemaker-data-quality-job-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerDataQualityJobDefinition(this, "test", { + dataQualityAppSpecification: { + imageUri: Token.asString(monitor.registryPath), + }, + dataQualityJobInput: { + endpointInput: { + endpointName: myEndpoint.name, + }, + }, + dataQualityJobOutputConfig: { + monitoringOutputs: { + s3Output: { + s3Uri: + "https://${" + myBucket.bucketRegionalDomainName + "}/output", + }, + }, + }, + jobResources: { + clusterConfig: { + instanceCount: 1, + instanceType: "ml.t3.medium", + volumeSizeInGb: 20, + }, + }, + name: "my-data-quality-job-definition", + roleArn: myRole.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `dataQualityAppSpecification` - (Required) Specifies the container that runs the monitoring job. Fields are documented below. +* `dataQualityBaselineConfig` - (Optional) Configures the constraints and baselines for the monitoring job. Fields are documented below. +* `dataQualityJobInput` - (Required) A list of inputs for the monitoring job. Fields are documented below. +* `dataQualityJobOutputConfig` - (Required) The output configuration for monitoring jobs. Fields are documented below. +* `jobResources` - (Required) Identifies the resources to deploy for a monitoring job. Fields are documented below. +* `name` - (Optional) The name of the data quality job definition. If omitted, Terraform will assign a random, unique name. +* `networkConfig` - (Optional) Specifies networking configuration for the monitoring job. Fields are documented below. +* `roleArn` - (Required) The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf. +* `stoppingCondition` - (Optional) A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below. +* `tags` - (Optional) A mapping of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### data_quality_app_specification + +* `environment` - (Optional) Sets the environment variables in the container that the monitoring job runs. A list of key value pairs. +* `imageUri` - (Required) The container image that the data quality monitoring job runs. +* `postAnalyticsProcessorSourceUri` - (Optional) An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers. +* `recordPreprocessorSourceUri` - (Optional) An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers. + +### data_quality_baseline_config + +* `constraintsResource` - (Optional) The constraints resource for a monitoring job. Fields are documented below. +* `statisticsResource` - (Optional) The statistics resource for a monitoring job. Fields are documented below. + +#### constraints_resource + +* `s3Uri` - (Optional) The Amazon S3 URI for the constraints resource. + +#### statistics_resource + +* `s3Uri` - (Optional) The Amazon S3 URI for the statistics resource. + +### data_quality_job_input + +* `batchTransformInput` - (Optional) Input object for the batch transform job. Fields are documented below. +* `endpointInput` - (Optional) Input object for the endpoint. Fields are documented below. + +#### batch_transform_input + +* `dataCapturedDestinationS3Uri` - (Required) The Amazon S3 location being used to capture the data. +* `datasetFormat` - (Required) The dataset format for your batch transform job. Fields are documented below. +* `localPath` - (Optional) Path to the filesystem where the batch transform data is available to the container. Defaults to `/opt/ml/processing/input`. +* `s3DataDistributionType` - (Optional) Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to `fullyReplicated`. Valid values are `fullyReplicated` or `shardedByS3Key` +* `s3InputMode` - (Optional) Whether the `pipe` or `file` is used as the input mode for transferring data for the monitoring job. `pipe` mode is recommended for large datasets. `file` mode is useful for small files that fit in memory. Defaults to `file`. Valid values are `pipe` or `file` + +##### dataset_format + +* `csv` - (Optional) The CSV dataset used in the monitoring job. Fields are documented below. +* `json` - (Optional) The JSON dataset used in the monitoring job. Fields are documented below. + +###### csv + +* `header` - (Optional) Indicates if the CSV data has a header. + +###### json + +* `line` - (Optional) Indicates if the file should be read as a json object per line. + +#### endpoint_input + +* `endpointName` - (Required) An endpoint in customer's account which has `dataCaptureConfig` enabled. +* `localPath` - (Optional) Path to the filesystem where the endpoint data is available to the container. Defaults to `/opt/ml/processing/input`. +* `s3DataDistributionType` - (Optional) Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to `fullyReplicated`. Valid values are `fullyReplicated` or `shardedByS3Key` +* `s3InputMode` - (Optional) Whether the `pipe` or `file` is used as the input mode for transferring data for the monitoring job. `pipe` mode is recommended for large datasets. `file` mode is useful for small files that fit in memory. Defaults to `file`. Valid values are `pipe` or `file` + +### data_quality_job_output_config + +* `kmsKeyId` - (Optional) The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption. +* `monitoringOutputs` - (Required) Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below. + +#### monitoring_outputs + +* `s3Output` - (Required) The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below. + +##### s3_output + +* `localPath` - (Optional) The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to `/opt/ml/processing/output`. +* `s3UploadMode` - (Optional) Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are `continuous` or `endOfJob` +* `s3Uri` - (Required) A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. + +### job_resources + +* `clusterConfig` - (Required) The configuration for the cluster resources used to run the processing job. Fields are documented below. + +#### cluster_config + +* `instanceCount` - (Required) The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1. +* `instanceType` - (Required) The ML compute instance type for the processing job. +* `volumeKmsKeyId` - (Optional) The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job. +* `volumeSizeInGb` - (Required) The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario. + +### network_config + +* `enableInterContainerTrafficEncryption` - (Optional) Whether to encrypt all communications between the instances used for the monitoring jobs. Choose `true` to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer. +* `enableNetworkIsolation` - (Optional) Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job. +* `vpcConfig` - (Optional) Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below. + +#### vpc_config + +* `securityGroupIds` - (Required) The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the `subnets` field. +* `subnets` - (Required) The ID of the subnets in the VPC to which you want to connect your training job or model. + +### stopping_condition + +* `maxRuntimeInSeconds` - (Required) The maximum runtime allowed in seconds. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition. +* `name` - The name of the data quality job definition. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import data quality job definitions using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import data quality job definitions using the `name`. For example: + +```console +% terraform import aws_sagemaker_data_quality_job_definition.test_data_quality_job_definition data-quality-job-definition-foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_device.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_device.html.markdown new file mode 100644 index 00000000000..1eda06c799b --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_device.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_device" +description: |- + Provides a SageMaker Device resource. +--- + + + +# Resource: aws_sagemaker_device + +Provides a SageMaker Device resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerDevice } from "./.gen/providers/aws/sagemaker-device"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerDevice(this, "example", { + device: { + deviceName: "example", + }, + deviceFleetName: Token.asString( + awsSagemakerDeviceFleetExample.deviceFleetName + ), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deviceFleetName` - (Required) The name of the Device Fleet. +* `device` - (Required) The device to register with SageMaker Edge Manager. See [Device](#device) details below. + +### Device + +* `description` - (Required) A description for the device. +* `deviceName` - (Optional) The name of the device. +* `iotThingName` - (Optional) Amazon Web Services Internet of Things (IoT) object name. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The id is constructed from `deviceFleetName/deviceName`. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Device. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Devices using the `deviceFleetName/deviceName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Devices using the `deviceFleetName/deviceName`. For example: + +```console +% terraform import aws_sagemaker_device.example my-fleet/my-device +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_device_fleet.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_device_fleet.html.markdown new file mode 100644 index 00000000000..1d4563137db --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_device_fleet.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_device_fleet" +description: |- + Provides a SageMaker Device Fleet resource. +--- + + + +# Resource: aws_sagemaker_device_fleet + +Provides a SageMaker Device Fleet resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerDeviceFleet } from "./.gen/providers/aws/sagemaker-device-fleet"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerDeviceFleet(this, "example", { + deviceFleetName: "example", + outputConfig: { + s3OutputLocation: "s3://${" + awsS3BucketExample.bucket + "}/prefix/", + }, + roleArn: test.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deviceFleetName` - (Required) The name of the Device Fleet (must be unique). +* `roleArn` - (Required) The Amazon Resource Name (ARN) that has access to AWS Internet of Things (IoT). +* `outputConfig` - (Required) Specifies details about the repository. see [Output Config](#output-config) details below. +* `description` - (Optional) A description of the fleet. +* `enableIotRoleAlias` - (Optional) Whether to create an AWS IoT Role Alias during device fleet creation. The name of the role alias generated will match this pattern: "SageMakerEdge-{DeviceFleetName}". +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Output Config + +* `s3OutputLocation` - (Required) The Amazon Simple Storage (S3) bucker URI. +* `kmsKeyId` - (Optional) The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume after compilation job. If you don't provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Device Fleet. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Device Fleet. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Device Fleets using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Device Fleets using the `name`. For example: + +```console +% terraform import aws_sagemaker_device_fleet.example my-fleet +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_domain.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_domain.html.markdown new file mode 100644 index 00000000000..05eb01f7029 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_domain.html.markdown @@ -0,0 +1,296 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_domain" +description: |- + Provides a SageMaker Domain resource. +--- + + + +# Resource: aws_sagemaker_domain + +Provides a SageMaker Domain resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { SagemakerDomain } from "./.gen/providers/aws/sagemaker-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsIamPolicyDocument(this, "example", { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["sagemaker.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const awsIamRoleExample = new IamRole(this, "example_1", { + assumeRolePolicy: Token.asString(example.json), + name: "example", + path: "/", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsSagemakerDomainExample = new SagemakerDomain(this, "example_2", { + authMode: "IAM", + defaultUserSettings: { + executionRole: Token.asString(awsIamRoleExample.arn), + }, + domainName: "example", + subnetIds: [Token.asString(awsSubnetExample.id)], + vpcId: Token.asString(awsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSagemakerDomainExample.overrideLogicalId("example"); + } +} + +``` + +### Using Custom Images + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerAppImageConfig } from "./.gen/providers/aws/sagemaker-app-image-config"; +import { SagemakerDomain } from "./.gen/providers/aws/sagemaker-domain"; +import { SagemakerImage } from "./.gen/providers/aws/sagemaker-image"; +import { SagemakerImageVersion } from "./.gen/providers/aws/sagemaker-image-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SagemakerAppImageConfig(this, "example", { + appImageConfigName: "example", + kernelGatewayImageConfig: { + kernelSpec: { + name: "example", + }, + }, + }); + const awsSagemakerImageExample = new SagemakerImage(this, "example_1", { + imageName: "example", + roleArn: Token.asString(awsIamRoleExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSagemakerImageExample.overrideLogicalId("example"); + const awsSagemakerImageVersionExample = new SagemakerImageVersion( + this, + "example_2", + { + baseImage: "base-image", + imageName: Token.asString(awsSagemakerImageExample.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSagemakerImageVersionExample.overrideLogicalId("example"); + const awsSagemakerDomainExample = new SagemakerDomain(this, "example_3", { + authMode: "IAM", + defaultUserSettings: { + executionRole: Token.asString(awsIamRoleExample.arn), + kernelGatewayAppSettings: { + customImage: [ + { + appImageConfigName: example.appImageConfigName, + imageName: Token.asString( + awsSagemakerImageVersionExample.imageName + ), + }, + ], + }, + }, + domainName: "example", + subnetIds: [Token.asString(awsSubnetExample.id)], + vpcId: Token.asString(awsVpcExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSagemakerDomainExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `authMode` - (Required) The mode of authentication that members use to access the domain. Valid values are `iam` and `sso`. +* `defaultSpaceSettings` - (Required) The default space settings. See [Default Space Settings](#default_space_settings) below. +* `defaultUserSettings` - (Required) The default user settings. See [Default User Settings](#default_user_settings) below.* `domainName` - (Required) The domain name. +* `subnetIds` - (Required) The VPC subnets that Studio uses for communication. +* `vpcId` - (Required) The ID of the Amazon Virtual Private Cloud (VPC) that Studio uses for communication. + +The following arguments are optional: + +* `appNetworkAccessType` - (Optional) Specifies the VPC used for non-EFS traffic. The default value is `publicInternetOnly`. Valid values are `publicInternetOnly` and `vpcOnly`. +* `appSecurityGroupManagement` - (Optional) The entity that creates and manages the required security groups for inter-app communication in `vpcOnly` mode. Valid values are `service` and `customer`. +* `domainSettings` - (Optional) The domain settings. See [Domain Settings](#domain_settings) below. +* `domainSettings` - (Optional) The domain's settings. +* `kmsKeyId` - (Optional) The AWS KMS customer managed CMK used to encrypt the EFS volume attached to the domain. +* `retentionPolicy` - (Optional) The retention policy for this domain, which specifies whether resources will be retained after the Domain is deleted. By default, all resources are retained. See [Retention Policy](#retention_policy) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### default_space_settings + +* `executionRole` - (Required) The execution role for the space. +* `jupyterServerAppSettings` - (Optional) The Jupyter server's app settings. See [Jupyter Server App Settings](#jupyter_server_app_settings) below. +* `kernelGatewayAppSettings` - (Optional) The kernel gateway app settings. See [Kernel Gateway App Settings](#kernel_gateway_app_settings) below. +* `securityGroups` - (Optional) The security groups for the Amazon Virtual Private Cloud that the space uses for communication. + +### default_user_settings + +* `executionRole` - (Required) The execution role ARN for the user. +* `canvasAppSettings` - (Optional) The Canvas app settings. See [Canvas App Settings](#canvas_app_settings) below. +* `jupyterServerAppSettings` - (Optional) The Jupyter server's app settings. See [Jupyter Server App Settings](#jupyter_server_app_settings) below. +* `kernelGatewayAppSettings` - (Optional) The kernel gateway app settings. See [Kernel Gateway App Settings](#kernel_gateway_app_settings) below. +* `rSessionAppSettings` - (Optional) The RSession app settings. See [RSession App Settings](#r_session_app_settings) below. +* `rStudioServerProAppSettings` - (Optional) A collection of settings that configure user interaction with the RStudioServerPro app. See [RStudioServerProAppSettings](#r_studio_server_pro_app_settings) below. +* `securityGroups` - (Optional) A list of security group IDs that will be attached to the user. +* `sharingSettings` - (Optional) The sharing settings. See [Sharing Settings](#sharing_settings) below. +* `tensorBoardAppSettings` - (Optional) The TensorBoard app settings. See [TensorBoard App Settings](#tensor_board_app_settings) below. + +#### r_studio_server_pro_app_settings + +* `accessStatus` - (Optional) Indicates whether the current user has access to the RStudioServerPro app. Valid values are `enabled` and `disabled`. +* `userGroup` - (Optional) The level of permissions that the user has within the RStudioServerPro app. This value defaults to `rStudioUser`. The `rStudioAdmin` value allows the user access to the RStudio Administrative Dashboard. Valid values are `rStudioUser` and `rStudioAdmin`. + +#### canvas_app_settings + +* `modelRegisterSettings` - (Optional) The model registry settings for the SageMaker Canvas application. See [Model Register Settings](#model_register_settings) below. +* `timeSeriesForecastingSettings` - (Optional) Time series forecast settings for the Canvas app. See [Time Series Forecasting Settings](#time_series_forecasting_settings) below. +* `workspaceSettings` - (Optional) The workspace settings for the SageMaker Canvas application. See [Workspace Settings](#workspace_settings) below. + +##### model_register_settings + +* `crossAccountModelRegisterRoleArn` - (Optional) The Amazon Resource Name (ARN) of the SageMaker model registry account. Required only to register model versions created by a different SageMaker Canvas AWS account than the AWS account in which SageMaker model registry is set up. +* `status` - (Optional) Describes whether the integration to the model registry is enabled or disabled in the Canvas application. Valid values are `enabled` and `disabled`. + +##### time_series_forecasting_settings + +* `amazonForecastRoleArn` - (Optional) The IAM role that Canvas passes to Amazon Forecast for time series forecasting. By default, Canvas uses the execution role specified in the UserProfile that launches the Canvas app. If an execution role is not specified in the UserProfile, Canvas uses the execution role specified in the Domain that owns the UserProfile. To allow time series forecasting, this IAM role should have the [AmazonSageMakerCanvasForecastAccess](https://docs.aws.amazon.com/sagemaker/latest/dg/security-iam-awsmanpol-canvas.html#security-iam-awsmanpol-AmazonSageMakerCanvasForecastAccess) policy attached and forecast.amazonaws.com added in the trust relationship as a service principal. +* `status` - (Optional) Describes whether time series forecasting is enabled or disabled in the Canvas app. Valid values are `enabled` and `disabled`. + +##### workspace_settings + +* `s3ArtifactPath` - (Optional) The Amazon S3 bucket used to store artifacts generated by Canvas. Updating the Amazon S3 location impacts existing configuration settings, and Canvas users no longer have access to their artifacts. Canvas users must log out and log back in to apply the new location. +* `s3KmsKeyId` - (Optional) The Amazon Web Services Key Management Service (KMS) encryption key ID that is used to encrypt artifacts generated by Canvas in the Amazon S3 bucket. + +#### sharing_settings + +* `notebookOutputOption` - (Optional) Whether to include the notebook cell output when sharing the notebook. The default is `disabled`. Valid values are `allowed` and `disabled`. +* `s3KmsKeyId` - (Optional) When `notebookOutputOption` is Allowed, the AWS Key Management Service (KMS) encryption key ID used to encrypt the notebook cell output in the Amazon S3 bucket. +* `s3OutputPath` - (Optional) When `notebookOutputOption` is Allowed, the Amazon S3 bucket used to save the notebook cell output. + +#### tensor_board_app_settings + +* `defaultResourceSpec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default_resource_spec) below. + +#### kernel_gateway_app_settings + +* `customImage` - (Optional) A list of custom SageMaker images that are configured to run as a KernelGateway app. see [Custom Image](#custom_image) below. +* `defaultResourceSpec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default_resource_spec) below. +* `lifecycleConfigArns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +#### jupyter_server_app_settings + +* `codeRepository` - (Optional) A list of Git repositories that SageMaker automatically displays to users for cloning in the JupyterServer application. see [Code Repository](#code_repository) below. +* `defaultResourceSpec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default_resource_spec) below. +* `lifecycleConfigArns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +##### code_repository + +* `repositoryUrl` - (Optional) The URL of the Git repository. + +##### default_resource_spec + +* `instanceType` - (Optional) The instance type that the image version runs on.. For valid values see [SageMaker Instance Types](https://docs.aws.amazon.com/sagemaker/latest/dg/notebooks-available-instance-types.html). +* `lifecycleConfigArn` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configuration attached to the Resource. +* `sagemakerImageArn` - (Optional) The ARN of the SageMaker image that the image version belongs to. +* `sagemakerImageVersionArn` - (Optional) The ARN of the image version created on the instance. + +#### r_session_app_settings + +* `customImage` - (Optional) A list of custom SageMaker images that are configured to run as a KernelGateway app. see [Custom Image](#custom_image) below. +* `defaultResourceSpec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default_resource_spec) below. + +##### custom_image + +* `appImageConfigName` - (Required) The name of the App Image Config. +* `imageName` - (Required) The name of the Custom Image. +* `imageVersionNumber` - (Optional) The version number of the Custom Image. + +### domain_settings + +* `executionRoleIdentityConfig` - (Optional) The configuration for attaching a SageMaker user profile name to the execution role as a sts:SourceIdentity key [AWS Docs](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_control-access_monitor.html). Valid values are `userProfileName` and `disabled`. +* `rStudioServerProDomainSettings` - (Optional) A collection of settings that configure the RStudioServerPro Domain-level app. see [RStudioServerProDomainSettings](#r_studio_server_pro_domain_settings) below. +* `securityGroupIds` - (Optional) The security groups for the Amazon Virtual Private Cloud that the Domain uses for communication between Domain-level apps and user apps. + +#### r_studio_server_pro_domain_settings + +* `defaultResourceSpec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default_resource_spec) below. +* `domainExecutionRoleArn` - (Required) The ARN of the execution role for the RStudioServerPro Domain-level app. +* `rStudioConnectUrl` - (Optional) A URL pointing to an RStudio Connect server. +* `rStudioPackageManagerUrl` - (Optional) A URL pointing to an RStudio Package Manager server. + +### retention_policy + +* `homeEfsFileSystem` - (Optional) The retention policy for data stored on an Amazon Elastic File System (EFS) volume. Valid values are `retain` or `delete`. Default value is `retain`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Domain. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Domain. +* `url` - The domain's URL. +* `singleSignOnManagedApplicationInstanceId` - The SSO managed application instance ID. +* `securityGroupIdForDomainBoundary` - The ID of the security group that authorizes traffic between the RSessionGateway apps and the RStudioServerPro app. +* `homeEfsFileSystemId` - The ID of the Amazon Elastic File System (EFS) managed by this Domain. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Domains using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Domains using the `id`. For example: + +```console +% terraform import aws_sagemaker_domain.test_domain d-8jgsjtilstu8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_endpoint.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_endpoint.html.markdown new file mode 100644 index 00000000000..f22a9a653e9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_endpoint.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_endpoint" +description: |- + Provides a SageMaker Endpoint resource. +--- + + + +# Resource: aws_sagemaker_endpoint + +Provides a SageMaker Endpoint resource. + +## Example Usage + +Basic usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerEndpoint } from "./.gen/providers/aws/sagemaker-endpoint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerEndpoint(this, "e", { + endpointConfigName: ec.name, + name: "my-endpoint", + tags: { + Name: "foo", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `endpointConfigName` - (Required) The name of the endpoint configuration to use. +* `deploymentConfig` - (Optional) The deployment configuration for an endpoint, which contains the desired deployment strategy and rollback configurations. See [Deployment Config](#deployment-config). +* `name` - (Optional) The name of the endpoint. If omitted, Terraform will assign a random, unique name. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Deployment Config + +* `blueGreenUpdatePolicy` - (Required) Update policy for a blue/green deployment. If this update policy is specified, SageMaker creates a new fleet during the deployment while maintaining the old fleet. See [Blue Green Update Config](#blue-green-update-config). +* `autoRollbackConfiguration` - (Optional) Automatic rollback configuration for handling endpoint deployment failures and recovery. See [Auto Rollback Configuration](#auto-rollback-configuration). + +#### Blue Green Update Config + +* `trafficRoutingConfiguration` - (Required) Defines the traffic routing strategy to shift traffic from the old fleet to the new fleet during an endpoint deployment. See [Traffic Routing Configuration](#traffic-routing-configuration). +* `maximumExecutionTimeoutInSeconds` - (Optional) Maximum execution timeout for the deployment. Note that the timeout value should be larger than the total waiting time specified in `terminationWaitInSeconds` and `waitIntervalInSeconds`. Valid values are between `600` and `14400`. +* `terminationWaitInSeconds` - (Optional) Additional waiting time in seconds after the completion of an endpoint deployment before terminating the old endpoint fleet. Default is `0`. Valid values are between `0` and `3600`. + +##### Traffic Routing Configuration + +* `type` - (Required) Traffic routing strategy type. Valid values are: `allAtOnce`, `canary`, and `linear`. +* `waitIntervalInSeconds` - (Required) The waiting time (in seconds) between incremental steps to turn on traffic on the new endpoint fleet. Valid values are between `0` and `3600`. +* `canarySize` - (Optional) Batch size for the first step to turn on traffic on the new endpoint fleet. Value must be less than or equal to 50% of the variant's total instance count. See [Canary Size](#canary-size). +* `linearStepSize` - (Optional) Batch size for each step to turn on traffic on the new endpoint fleet. Value must be 10-50% of the variant's total instance count. See [Linear Step Size](#linear-step-size). + +###### Canary Size + +* `type` - (Required) Specifies the endpoint capacity type. Valid values are: `instanceCount`, or `capacityPercent`. +* `value` - (Required) Defines the capacity size, either as a number of instances or a capacity percentage. + +###### Linear Step Size + +* `type` - (Required) Specifies the endpoint capacity type. Valid values are: `instanceCount`, or `capacityPercent`. +* `value` - (Required) Defines the capacity size, either as a number of instances or a capacity percentage. + +#### Auto Rollback Configuration + +* `alarms` - (Required) List of CloudWatch alarms in your account that are configured to monitor metrics on an endpoint. If any alarms are tripped during a deployment, SageMaker rolls back the deployment. See [Alarms](#alarms). + +##### Alarms + +* `alarmName` - (Required) The name of a CloudWatch alarm in your account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this endpoint. +* `name` - The name of the endpoint. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import endpoints using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import endpoints using the `name`. For example: + +```console +% terraform import aws_sagemaker_endpoint.test_endpoint my-endpoint +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_endpoint_configuration.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_endpoint_configuration.html.markdown new file mode 100644 index 00000000000..8014a74b056 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_endpoint_configuration.html.markdown @@ -0,0 +1,159 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_endpoint_configuration" +description: |- + Provides a SageMaker Endpoint Configuration resource. +--- + + + +# Resource: aws_sagemaker_endpoint_configuration + +Provides a SageMaker endpoint configuration resource. + +## Example Usage + +Basic usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerEndpointConfiguration } from "./.gen/providers/aws/sagemaker-endpoint-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerEndpointConfiguration(this, "ec", { + name: "my-endpoint-config", + productionVariants: [ + { + initialInstanceCount: 1, + instanceType: "ml.t2.medium", + modelName: m.name, + variantName: "variant-1", + }, + ], + tags: { + Name: "foo", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `productionVariants` - (Required) An list of ProductionVariant objects, one for each model that you want to host at this endpoint. Fields are documented below. +* `kmsKeyArn` - (Optional) Amazon Resource Name (ARN) of a AWS Key Management Service key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance that hosts the endpoint. +* `name` - (Optional) The name of the endpoint configuration. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional) Creates a unique endpoint configuration name beginning with the specified prefix. Conflicts with `name`. +* `tags` - (Optional) A mapping of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `dataCaptureConfig` - (Optional) Specifies the parameters to capture input/output of SageMaker models endpoints. Fields are documented below. +* `asyncInferenceConfig` - (Optional) Specifies configuration for how an endpoint performs asynchronous inference. +* `shadowProductionVariants` - (Optional) Array of ProductionVariant objects. There is one for each model that you want to host at this endpoint in shadow mode with production traffic replicated from the model specified on ProductionVariants.If you use this field, you can only specify one variant for ProductionVariants and one variant for ShadowProductionVariants. Fields are documented below. + +### production_variants + +* `acceleratorType` - (Optional) The size of the Elastic Inference (EI) instance to use for the production variant. +* `containerStartupHealthCheckTimeoutInSeconds` - (Optional) The timeout value, in seconds, for your inference container to pass health check by SageMaker Hosting. For more information about health check, see [How Your Container Should Respond to Health Check (Ping) Requests](https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms-inference-code.html#your-algorithms-inference-algo-ping-requests). Valid values between `60` and `3600`. +* `coreDumpConfig` - (Optional) Specifies configuration for a core dump from the model container when the process crashes. Fields are documented below. +* `enableSsmAccess` - (Optional) You can use this parameter to turn on native Amazon Web Services Systems Manager (SSM) access for a production variant behind an endpoint. By default, SSM access is disabled for all production variants behind an endpoints. +* `initialInstanceCount` - (Optional) Initial number of instances used for auto-scaling. +* `instanceType` - (Optional) The type of instance to start. +* `initialVariantWeight` - (Optional) Determines initial traffic distribution among all of the models that you specify in the endpoint configuration. If unspecified, it defaults to `10`. +* `modelDataDownloadTimeoutInSeconds` - (Optional) The timeout value, in seconds, to download and extract the model that you want to host from Amazon S3 to the individual inference instance associated with this production variant. Valid values between `60` and `3600`. +* `modelName` - (Required) The name of the model to use. +* `serverlessConfig` - (Optional) Specifies configuration for how an endpoint performs asynchronous inference. +* `variantName` - (Optional) The name of the variant. If omitted, Terraform will assign a random, unique name. +* `volumeSizeInGb` - (Optional) The size, in GB, of the ML storage volume attached to individual inference instance associated with the production variant. Valid values between `1` and `512`. + +#### core_dump_config + +* `destinationS3Uri` - (Required) The Amazon S3 bucket to send the core dump to. +* `kmsKeyId` - (Required) The Amazon Web Services Key Management Service (Amazon Web Services KMS) key that SageMaker uses to encrypt the core dump data at rest using Amazon S3 server-side encryption. + +#### serverless_config + +* `maxConcurrency` - (Required) The maximum number of concurrent invocations your serverless endpoint can process. Valid values are between `1` and `200`. +* `memorySizeInMb` - (Required) The memory size of your serverless endpoint. Valid values are in 1 GB increments: `1024` MB, `2048` MB, `3072` MB, `4096` MB, `5120` MB, or `6144` MB. +* `provisionedConcurrency` - The amount of provisioned concurrency to allocate for the serverless endpoint. Should be less than or equal to `maxConcurrency`. Valid values are between `1` and `200`. + +### data_capture_config + +* `initialSamplingPercentage` - (Required) Portion of data to capture. Should be between 0 and 100. +* `destinationS3Uri` - (Required) The URL for S3 location where the captured data is stored. +* `captureOptions` - (Required) Specifies what data to capture. Fields are documented below. +* `kmsKeyId` - (Optional) Amazon Resource Name (ARN) of a AWS Key Management Service key that Amazon SageMaker uses to encrypt the captured data on Amazon S3. +* `enableCapture` - (Optional) Flag to enable data capture. Defaults to `false`. +* `captureContentTypeHeader` - (Optional) The content type headers to capture. Fields are documented below. + +#### capture_options + +* `captureMode` - (Required) Specifies the data to be captured. Should be one of `input` or `output`. + +#### capture_content_type_header + +* `csvContentTypes` - (Optional) The CSV content type headers to capture. +* `jsonContentTypes` - (Optional) The JSON content type headers to capture. + +### async_inference_config + +* `outputConfig` - (Required) Specifies the configuration for asynchronous inference invocation outputs. +* `clientConfig` - (Optional) Configures the behavior of the client used by Amazon SageMaker to interact with the model container during asynchronous inference. + +#### client_config + +* `maxConcurrentInvocationsPerInstance` - (Optional) The maximum number of concurrent requests sent by the SageMaker client to the model container. If no value is provided, Amazon SageMaker will choose an optimal value for you. + +#### output_config + +* `s3OutputPath` - (Required) The Amazon S3 location to upload inference responses to. +* `s3FailurePath` - (Optional) The Amazon S3 location to upload failure inference responses to. +* `kmsKeyId` - (Optional) The Amazon Web Services Key Management Service (Amazon Web Services KMS) key that Amazon SageMaker uses to encrypt the asynchronous inference output in Amazon S3. +* `notificationConfig` - (Optional) Specifies the configuration for notifications of inference results for asynchronous inference. + +##### notification_config + +* `includeInferenceResponseIn` - (Optional) The Amazon SNS topics where you want the inference response to be included. Valid values are `successNotificationTopic` and `errorNotificationTopic`. +* `errorTopic` - (Optional) Amazon SNS topic to post a notification to when inference fails. If no topic is provided, no notification is sent on failure. +* `successTopic` - (Optional) Amazon SNS topic to post a notification to when inference completes successfully. If no topic is provided, no notification is sent on success. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this endpoint configuration. +* `name` - The name of the endpoint configuration. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import endpoint configurations using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import endpoint configurations using the `name`. For example: + +```console +% terraform import aws_sagemaker_endpoint_configuration.test_endpoint_config endpoint-config-foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_feature_group.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_feature_group.html.markdown new file mode 100644 index 00000000000..984bdddcd27 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_feature_group.html.markdown @@ -0,0 +1,127 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_feature_group" +description: |- + Provides a SageMaker Feature Group resource. +--- + + + +# Resource: aws_sagemaker_feature_group + +Provides a SageMaker Feature Group resource. + +## Example Usage + +Basic usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerFeatureGroup } from "./.gen/providers/aws/sagemaker-feature-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerFeatureGroup(this, "example", { + eventTimeFeatureName: "example", + featureDefinition: [ + { + featureName: "example", + featureType: "String", + }, + ], + featureGroupName: "example", + onlineStoreConfig: { + enableOnlineStore: true, + }, + recordIdentifierFeatureName: "example", + roleArn: test.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `featureGroupName` - (Required) The name of the Feature Group. The name must be unique within an AWS Region in an AWS account. +* `recordIdentifierFeatureName` - (Required) The name of the Feature whose value uniquely identifies a Record defined in the Feature Store. Only the latest record per identifier value will be stored in the Online Store. +* `eventTimeFeatureName` - (Required) The name of the feature that stores the EventTime of a Record in a Feature Group. +* `description` (Optional) - A free-form description of a Feature Group. +* `roleArn` (Required) - The Amazon Resource Name (ARN) of the IAM execution role used to persist data into the Offline Store if an `offlineStoreConfig` is provided. +* `featureDefinition` (Optional) - A list of Feature names and types. See [Feature Definition](#feature-definition) Below. +* `offlineStoreConfig` (Optional) - The Offline Feature Store Configuration. See [Offline Store Config](#offline-store-config) Below. +* `onlineStoreConfig` (Optional) - The Online Feature Store Configuration. See [Online Store Config](#online-store-config) Below. +* `tags` - (Optional) Map of resource tags for the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Feature Definition + +* `featureName` - (Required) The name of a feature. `featureName` cannot be any of the following: `isDeleted`, `writeTime`, `apiInvocationTime`. +* `featureType` - (Required) The value type of a feature. Valid values are `integral`, `fractional`, or `string`. + +### Offline Store Config + +* `enableOnlineStore` - (Optional) Set to `true` to disable the automatic creation of an AWS Glue table when configuring an OfflineStore. +* `s3StorageConfig` - (Required) The Amazon Simple Storage (Amazon S3) location of OfflineStore. See [S3 Storage Config](#s3-storage-config) Below. +* `dataCatalogConfig` - (Optional) The meta data of the Glue table that is autogenerated when an OfflineStore is created. See [Data Catalog Config](#data-catalog-config) Below. +* `tableFormat` - (Optional) Format for the offline store table. Supported formats are `glue` (Default) and Apache `iceberg` (https://iceberg.apache.org/). + +### Online Store Config + +* `disableGlueTableCreation` - (Optional) Set to `true` to turn Online Store On. +* `securityConfig` - (Required) Security config for at-rest encryption of your OnlineStore. See [Security Config](#security-config) Below. + +#### S3 Storage Config + +* `kmsKeyId` - (Optional) The AWS Key Management Service (KMS) key ID of the key used to encrypt any objects written into the OfflineStore S3 location. +* `s3Uri` - (Required) The S3 URI, or location in Amazon S3, of OfflineStore. + +#### Data Catalog Config + +* `catalog` - (Optional) The name of the Glue table catalog. +* `database` - (Optional) The name of the Glue table database. +* `tableName` - (Optional) The name of the Glue table. + +#### Security Config + +* `kmsKeyId` - (Optional) The ID of the AWS Key Management Service (AWS KMS) key that SageMaker Feature Store uses to encrypt the Amazon S3 objects at rest using Amazon S3 server-side encryption. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `name` - The name of the Feature Group. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this feature_group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Feature Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Feature Groups using the `name`. For example: + +```console +% terraform import aws_sagemaker_feature_group.test_feature_group feature_group-foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_flow_definition.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_flow_definition.html.markdown new file mode 100644 index 00000000000..3fece6270c7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_flow_definition.html.markdown @@ -0,0 +1,219 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_flow_definition" +description: |- + Provides a SageMaker Flow Definition resource. +--- + + + +# Resource: aws_sagemaker_flow_definition + +Provides a SageMaker Flow Definition resource. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerFlowDefinition } from "./.gen/providers/aws/sagemaker-flow-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerFlowDefinition(this, "example", { + flowDefinitionName: "example", + humanLoopConfig: { + humanTaskUiArn: Token.asString(awsSagemakerHumanTaskUiExample.arn), + taskAvailabilityLifetimeInSeconds: 1, + taskCount: 1, + taskDescription: "example", + taskTitle: "example", + workteamArn: Token.asString(awsSagemakerWorkteamExample.arn), + }, + outputConfig: { + s3OutputPath: "s3://${" + awsS3BucketExample.bucket + "}/", + }, + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +### Public Workteam Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerFlowDefinition } from "./.gen/providers/aws/sagemaker-flow-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerFlowDefinition(this, "example", { + flowDefinitionName: "example", + humanLoopConfig: { + humanTaskUiArn: Token.asString(awsSagemakerHumanTaskUiExample.arn), + publicWorkforceTaskPrice: { + amountInUsd: { + cents: 1, + tenthFractionsOfACent: 2, + }, + }, + taskAvailabilityLifetimeInSeconds: 1, + taskCount: 1, + taskDescription: "example", + taskTitle: "example", + workteamArn: + "arn:aws:sagemaker:${" + + current.name + + "}:394669845002:workteam/public-crowd/default", + }, + outputConfig: { + s3OutputPath: "s3://${" + awsS3BucketExample.bucket + "}/", + }, + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +### Human Loop Activation Config Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerFlowDefinition } from "./.gen/providers/aws/sagemaker-flow-definition"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerFlowDefinition(this, "example", { + flowDefinitionName: "example", + humanLoopActivationConfig: { + humanLoopActivationConditionsConfig: { + humanLoopActivationConditions: + ' {\n\t\t\t"Conditions": [\n\t\t\t {\n\t\t\t\t"ConditionType": "Sampling",\n\t\t\t\t"ConditionParameters": {\n\t\t\t\t "RandomSamplingPercentage": 5\n\t\t\t\t}\n\t\t\t }\n\t\t\t]\n\t\t}\n\n', + }, + }, + humanLoopConfig: { + humanTaskUiArn: Token.asString(awsSagemakerHumanTaskUiExample.arn), + taskAvailabilityLifetimeInSeconds: 1, + taskCount: 1, + taskDescription: "example", + taskTitle: "example", + workteamArn: Token.asString(awsSagemakerWorkteamExample.arn), + }, + humanLoopRequestSource: { + awsManagedHumanLoopRequestSource: + "AWS/Textract/AnalyzeDocument/Forms/V1", + }, + outputConfig: { + s3OutputPath: "s3://${" + awsS3BucketExample.bucket + "}/", + }, + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `flowDefinitionName` - (Required) The name of your flow definition. +* `humanLoopConfig` - (Required) An object containing information about the tasks the human reviewers will perform. See [Human Loop Config](#human-loop-config) details below. +* `roleArn` - (Required) The Amazon Resource Name (ARN) of the role needed to call other services on your behalf. +* `outputConfig` - (Required) An object containing information about where the human review results will be uploaded. See [Output Config](#output-config) details below. +* `humanLoopActivationConfig` - (Optional) An object containing information about the events that trigger a human workflow. See [Human Loop Activation Config](#human-loop-activation-config) details below. +* `humanLoopRequestSource` - (Optional) Container for configuring the source of human task requests. Use to specify if Amazon Rekognition or Amazon Textract is used as an integration source. See [Human Loop Request Source](#human-loop-request-source) details below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Human Loop Config + +* `humanTaskUiArn` - (Required) The Amazon Resource Name (ARN) of the human task user interface. +* `publicWorkforceTaskPrice` - (Optional) Defines the amount of money paid to an Amazon Mechanical Turk worker for each task performed. See [Public Workforce Task Price](#public-workforce-task-price) details below. +* `taskAvailabilityLifetimeInSeconds` - (Required) The length of time that a task remains available for review by human workers. Valid value range between `1` and `864000`. +* `taskCount` - (Required) The number of distinct workers who will perform the same task on each object. Valid value range between `1` and `3`. +* `taskDescription` - (Required) A description for the human worker task. +* `taskKeywords` - (Optional) An array of keywords used to describe the task so that workers can discover the task. +* `taskTimeLimitInSeconds` - (Optional) The amount of time that a worker has to complete a task. The default value is `3600` seconds. +* `taskTitle` - (Required) A title for the human worker task. +* `workteamArn` - (Required) The Amazon Resource Name (ARN) of the human task user interface. Amazon Resource Name (ARN) of a team of workers. For Public workforces see [AWS Docs](https://docs.aws.amazon.com/sagemaker/latest/dg/sms-workforce-management-public.html). + +#### Public Workforce Task Price + +* `amountInUsd` - (Optional) Defines the amount of money paid to an Amazon Mechanical Turk worker in United States dollars. See [Amount In Usd](#amount-in-usd) details below. + +##### Amount In Usd + +* `cents` - (Optional) The fractional portion, in cents, of the amount. Valid value range between `0` and `99`. +* `dollars` - (Optional) The whole number of dollars in the amount. Valid value range between `0` and `2`. +* `tenthFractionsOfACent` - (Optional) Fractions of a cent, in tenths. Valid value range between `0` and `9`. + +### Human Loop Activation Config + +* `humanLoopActivationConditionsConfig` - (Required) defines under what conditions SageMaker creates a human loop. See [Human Loop Activation Conditions Config](#human-loop-activation-conditions-config) details below. + +#### Human Loop Activation Conditions Config + +* `humanLoopActivationConditions` - (Required) A JSON expressing use-case specific conditions declaratively. If any condition is matched, atomic tasks are created against the configured work team. For more information about how to structure the JSON, see [JSON Schema for Human Loop Activation Conditions in Amazon Augmented AI](https://docs.aws.amazon.com/sagemaker/latest/dg/a2i-human-fallback-conditions-json-schema.html). + +### Human Loop Request Source + +* `awsManagedHumanLoopRequestSource` - (Required) Specifies whether Amazon Rekognition or Amazon Textract are used as the integration source. Valid values are: `aws/rekognition/detectModerationLabels/image/v3` and `aws/textract/analyzeDocument/forms/v1`. + +### Output Config + +* `s3OutputPath` - (Required) The Amazon S3 path where the object containing human output will be made available. +* `kmsKeyId` - (Optional) The Amazon Key Management Service (KMS) key ARN for server-side encryption. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Flow Definition. +* `id` - The name of the Flow Definition. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Flow Definitions using the `flowDefinitionName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Flow Definitions using the `flowDefinitionName`. For example: + +```console +% terraform import aws_sagemaker_flow_definition.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_human_task_ui.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_human_task_ui.html.markdown new file mode 100644 index 00000000000..5d4b4c3d299 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_human_task_ui.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_human_task_ui" +description: |- + Provides a SageMaker Human Task UI resource. +--- + + + +# Resource: aws_sagemaker_human_task_ui + +Provides a SageMaker Human Task UI resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerHumanTaskUi } from "./.gen/providers/aws/sagemaker-human-task-ui"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerHumanTaskUi(this, "example", { + humanTaskUiName: "example", + uiTemplate: { + content: Token.asString( + Fn.file("sagemaker-human-task-ui-template.html") + ), + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `humanTaskUiName` - (Required) The name of the Human Task UI. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `uiTemplate` - (Required) The Liquid template for the worker user interface. See [UI Template](#ui-template) below. + +### UI Template + +* `content` - (Required) The content of the Liquid template for the worker user interface. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Human Task UI. +* `id` - The name of the Human Task UI. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `uiTemplate` - (Required) The Liquid template for the worker user interface. See [UI Template](#ui-template) below. + +### UI Template + +* `contentSha256` - The SHA-256 digest of the contents of the template. +* `url` - The URL for the user interface template. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Human Task UIs using the `humanTaskUiName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Human Task UIs using the `humanTaskUiName`. For example: + +```console +% terraform import aws_sagemaker_human_task_ui.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_image.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_image.html.markdown new file mode 100644 index 00000000000..9e97a993091 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_image.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_image" +description: |- + Provides a SageMaker Image resource. +--- + + + +# Resource: aws_sagemaker_image + +Provides a SageMaker Image resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerImage } from "./.gen/providers/aws/sagemaker-image"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerImage(this, "example", { + imageName: "example", + roleArn: test.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `imageName` - (Required) The name of the image. Must be unique to your account. +* `roleArn` - (Required) The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on your behalf. +* `displayName` - (Optional) The display name of the image. When the image is added to a domain (must be unique to the domain). +* `description` - (Optional) The description of the image. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Image. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Image. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Code Images using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Code Images using the `name`. For example: + +```console +% terraform import aws_sagemaker_image.test_image my-code-repo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_image_version.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_image_version.html.markdown new file mode 100644 index 00000000000..2be5c6ace98 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_image_version.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_image_version" +description: |- + Provides a SageMaker Image Version resource. +--- + + + +# Resource: aws_sagemaker_image_version + +Provides a SageMaker Image Version resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerImageVersion } from "./.gen/providers/aws/sagemaker-image-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerImageVersion(this, "test", { + baseImage: "012345678912.dkr.ecr.us-west-2.amazonaws.com/image:latest", + imageName: Token.asString(awsSagemakerImageTest.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `imageName` - (Required) The name of the image. Must be unique to your account. +* `baseImage` - (Required) The registry path of the container image on which this image version is based. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Image. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Image Version. +* `imageArn`- The Amazon Resource Name (ARN) of the image the version is based on. +* `containerImage` - The registry path of the container image that contains this image version. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Image Versions using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Image Versions using the `name`. For example: + +```console +% terraform import aws_sagemaker_image_version.test_image my-code-repo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_model.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_model.html.markdown new file mode 100644 index 00000000000..8809b56f922 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_model.html.markdown @@ -0,0 +1,134 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_model" +description: |- + Provides a SageMaker model resource. +--- + + + +# Resource: aws_sagemaker_model + +Provides a SageMaker model resource. + +## Example Usage + +Basic usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsSagemakerPrebuiltEcrImage } from "./.gen/providers/aws/data-aws-sagemaker-prebuilt-ecr-image"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { SagemakerModel } from "./.gen/providers/aws/sagemaker-model"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["sagemaker.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const test = new DataAwsSagemakerPrebuiltEcrImage(this, "test", { + repositoryName: "kmeans", + }); + const example = new IamRole(this, "example", { + assumeRolePolicy: Token.asString(assumeRole.json), + }); + const awsSagemakerModelExample = new SagemakerModel(this, "example_3", { + executionRoleArn: example.arn, + name: "my-model", + primaryContainer: { + image: Token.asString(test.registryPath), + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSagemakerModelExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the model (must be unique). If omitted, Terraform will assign a random, unique name. +* `primaryContainer` - (Optional) The primary docker image containing inference code that is used when the model is deployed for predictions. If not specified, the `container` argument is required. Fields are documented below. +* `executionRoleArn` - (Required) A role that SageMaker can assume to access model artifacts and docker images for deployment. +* `inferenceExecutionConfig` - (Optional) Specifies details of how containers in a multi-container endpoint are called. see [Inference Execution Config](#inference-execution-config). +* `container` (Optional) - Specifies containers in the inference pipeline. If not specified, the `primaryContainer` argument is required. Fields are documented below. +* `enableNetworkIsolation` (Optional) - Isolates the model container. No inbound or outbound network calls can be made to or from the model container. +* `vpcConfig` (Optional) - Specifies the VPC that you want your model to connect to. VpcConfig is used in hosting services and in batch transform. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +The `primaryContainer` and `container` block both support: + +* `image` - (Optional) The registry path where the inference code image is stored in Amazon ECR. +* `mode` - (Optional) The container hosts value `singleModel/multiModel`. The default value is `singleModel`. +* `modelDataUrl` - (Optional) The URL for the S3 location where model artifacts are stored. +* `modelPackageName` - (Optional) The Amazon Resource Name (ARN) of the model package to use to create the model. +* `containerHostname` - (Optional) The DNS host name for the container. +* `environment` - (Optional) Environment variables for the Docker container. + A list of key value pairs. +* `imageConfig` - (Optional) Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). For more information see [Using a Private Docker Registry for Real-Time Inference Containers](https://docs.aws.amazon.com/sagemaker/latest/dg/your-algorithms-containers-inference-private.html). see [Image Config](#image-config). + +### Image Config + +* `repositoryAccessMode` - (Required) Specifies whether the model container is in Amazon ECR or a private Docker registry accessible from your Amazon Virtual Private Cloud (VPC). Allowed values are: `platform` and `vpc`. +* `repositoryAuthConfig` - (Optional) Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a value for this property only if you specified Vpc as the value for the RepositoryAccessMode field, and the private Docker registry where the model image is hosted requires authentication. see [Repository Auth Config](#repository-auth-config). + +#### Repository Auth Config + +* `repositoryCredentialsProviderArn` - (Required) The Amazon Resource Name (ARN) of an AWS Lambda function that provides credentials to authenticate to the private Docker registry where your model image is hosted. For information about how to create an AWS Lambda function, see [Create a Lambda function with the console](https://docs.aws.amazon.com/lambda/latest/dg/getting-started-create-function.html) in the _AWS Lambda Developer Guide_. + +## Inference Execution Config + +* `mode` - (Required) How containers in a multi-container are run. The following values are valid `serial` and `direct`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `name` - The name of the model. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this model. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import models using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import models using the `name`. For example: + +```console +% terraform import aws_sagemaker_model.test_model model-foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_model_package_group.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_model_package_group.html.markdown new file mode 100644 index 00000000000..0add8a06671 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_model_package_group.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_model_package_group" +description: |- + Provides a SageMaker Model Package Group resource. +--- + + + +# Resource: aws_sagemaker_model_package_group + +Provides a SageMaker Model Package Group resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerModelPackageGroup } from "./.gen/providers/aws/sagemaker-model-package-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerModelPackageGroup(this, "example", { + modelPackageGroupName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `modelPackageGroupName` - (Required) The name of the model group. +* `modelPackageGroupDescription` - (Optional) A description for the model group. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Model Package Group. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Model Package Group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Model Package Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Model Package Groups using the `name`. For example: + +```console +% terraform import aws_sagemaker_model_package_group.test_model_package_group my-code-repo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_model_package_group_policy.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_model_package_group_policy.html.markdown new file mode 100644 index 00000000000..a7ae3201a43 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_model_package_group_policy.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_model_package_group_policy" +description: |- + Provides a SageMaker Model Package Group Policy resource. +--- + + + +# Resource: aws_sagemaker_model_package_group_policy + +Provides a SageMaker Model Package Group Policy resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { SagemakerModelPackageGroup } from "./.gen/providers/aws/sagemaker-model-package-group"; +import { SagemakerModelPackageGroupPolicy } from "./.gen/providers/aws/sagemaker-model-package-group-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SagemakerModelPackageGroup(this, "example", { + modelPackageGroupName: "example", + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_2", + { + statement: [ + { + actions: [ + "sagemaker:DescribeModelPackage", + "sagemaker:ListModelPackages", + ], + principals: [ + { + identifiers: [Token.asString(current.accountId)], + type: "AWS", + }, + ], + resources: [example.arn], + sid: "AddPermModelPackageGroup", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsSagemakerModelPackageGroupPolicyExample = + new SagemakerModelPackageGroupPolicy(this, "example_3", { + modelPackageGroupName: example.modelPackageGroupName, + resourcePolicy: Token.asString( + Fn.jsonencode( + Fn.jsondecode(Token.asString(dataAwsIamPolicyDocumentExample.json)) + ) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSagemakerModelPackageGroupPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `modelPackageGroupName` - (Required) The name of the model package group. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Model Package Package Group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Model Package Groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Model Package Groups using the `name`. For example: + +```console +% terraform import aws_sagemaker_model_package_group_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_monitoring_schedule.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_monitoring_schedule.html.markdown new file mode 100644 index 00000000000..c14a4c95a38 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_monitoring_schedule.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_monitoring_schedule" +description: |- + Provides a SageMaker Monitoring Schedule resource. +--- + + + +# Resource: aws_sagemaker_monitoring_schedule + +Provides a SageMaker monitoring schedule resource. + +## Example Usage + +Basic usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerMonitoringSchedule } from "./.gen/providers/aws/sagemaker-monitoring-schedule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerMonitoringSchedule(this, "test", { + monitoringScheduleConfig: { + monitoringJobDefinitionName: Token.asString( + awsSagemakerDataQualityJobDefinitionTest.name + ), + monitoringType: "DataQuality", + }, + name: "my-monitoring-schedule", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `monitoringScheduleConfig` - (Required) The configuration object that specifies the monitoring schedule and defines the monitoring job. Fields are documented below. +* `name` - (Optional) The name of the monitoring schedule. The name must be unique within an AWS Region within an AWS account. If omitted, Terraform will assign a random, unique name. +* `tags` - (Optional) A mapping of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### monitoring_schedule_config + +* `monitoringJobDefinitionName` - (Required) The name of the monitoring job definition to schedule. +* `monitoringType` - (Required) The type of the monitoring job definition to schedule. Valid values are `dataQuality`, `modelQuality`, `modelBias` or `modelExplainability` +* `scheduleConfig` - (Optional) Configures the monitoring schedule. Fields are documented below. + +#### schedule_config + +* `scheduleExpression` - (Required) A cron expression that describes details about the monitoring schedule. For example, and hourly schedule would be `cron(0 * ? * * *)`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this monitoring schedule. +* `name` - The name of the monitoring schedule. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import monitoring schedules using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import monitoring schedules using the `name`. For example: + +```console +% terraform import aws_sagemaker_monitoring_schedule.test_monitoring_schedule monitoring-schedule-foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_notebook_instance.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_notebook_instance.html.markdown new file mode 100644 index 00000000000..09afb3de97c --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_notebook_instance.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_notebook_instance" +description: |- + Provides a SageMaker Notebook Instance resource. +--- + + + +# Resource: aws_sagemaker_notebook_instance + +Provides a SageMaker Notebook Instance resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerNotebookInstance } from "./.gen/providers/aws/sagemaker-notebook-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerNotebookInstance(this, "ni", { + instanceType: "ml.t2.medium", + name: "my-notebook-instance", + roleArn: role.arn, + tags: { + Name: "foo", + }, + }); + } +} + +``` + +### Code repository usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerCodeRepository } from "./.gen/providers/aws/sagemaker-code-repository"; +import { SagemakerNotebookInstance } from "./.gen/providers/aws/sagemaker-notebook-instance"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SagemakerCodeRepository(this, "example", { + codeRepositoryName: "my-notebook-instance-code-repo", + gitConfig: { + repositoryUrl: + "https://github.com/hashicorp/terraform-provider-aws.git", + }, + }); + new SagemakerNotebookInstance(this, "ni", { + defaultCodeRepository: example.codeRepositoryName, + instanceType: "ml.t2.medium", + name: "my-notebook-instance", + roleArn: role.arn, + tags: { + Name: "foo", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the notebook instance (must be unique). +* `roleArn` - (Required) The ARN of the IAM role to be used by the notebook instance which allows SageMaker to call other services on your behalf. +* `instanceType` - (Required) The name of ML compute instance type. +* `platformIdentifier` - (Optional) The platform identifier of the notebook instance runtime environment. This value can be either `notebookAl1V1`, `notebookAl2V1`, or `notebookAl2V2`, depending on which version of Amazon Linux you require. +* `volumeSize` - (Optional) The size, in GB, of the ML storage volume to attach to the notebook instance. The default value is 5 GB. +* `subnetId` - (Optional) The VPC subnet ID. +* `securityGroups` - (Optional) The associated security groups. +* `acceleratorTypes` - (Optional) A list of Elastic Inference (EI) instance types to associate with this notebook instance. See [Elastic Inference Accelerator](https://docs.aws.amazon.com/sagemaker/latest/dg/ei.html) for more details. Valid values: `mlEia1Medium`, `mlEia1Large`, `mlEia1Xlarge`, `mlEia2Medium`, `mlEia2Large`, `mlEia2Xlarge`. +* `additionalCodeRepositories` - (Optional) An array of up to three Git repositories to associate with the notebook instance. + These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in [AWS CodeCommit](https://docs.aws.amazon.com/codecommit/latest/userguide/welcome.html) or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. +* `defaultCodeRepository` - (Optional) The Git repository associated with the notebook instance as its default code repository. This can be either the name of a Git repository stored as a resource in your account, or the URL of a Git repository in [AWS CodeCommit](https://docs.aws.amazon.com/codecommit/latest/userguide/welcome.html) or in any other Git repository. +* `directInternetAccess` - (Optional) Set to `disabled` to disable internet access to notebook. Requires `securityGroups` and `subnetId` to be set. Supported values: `enabled` (Default) or `disabled`. If set to `disabled`, the notebook instance will be able to access resources only in your VPC, and will not be able to connect to Amazon SageMaker training and endpoint services unless your configure a NAT Gateway in your VPC. +* `instanceMetadataServiceConfiguration` - (Optional) Information on the IMDS configuration of the notebook instance. Conflicts with `instanceMetadataServiceConfiguration`. see details below. +* `kmsKeyId` - (Optional) The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption. +* `lifecycleConfigName` - (Optional) The name of a lifecycle configuration to associate with the notebook instance. +* `rootAccess` - (Optional) Whether root access is `enabled` or `disabled` for users of the notebook instance. The default value is `enabled`. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### instance_metadata_service_configuration + +* `minimumInstanceMetadataServiceVersion` - (Optional) Indicates the minimum IMDS version that the notebook instance supports. When passed "1" is passed. This means that both IMDSv1 and IMDSv2 are supported. Valid values are `1` and `2`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the notebook instance. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this notebook instance. +* `url` - The URL that you use to connect to the Jupyter notebook that is running in your notebook instance. +* `networkInterfaceId` - The network interface ID that Amazon SageMaker created at the time of creating the instance. Only available when setting `subnetId`. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Notebook Instances using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Notebook Instances using the `name`. For example: + +```console +% terraform import aws_sagemaker_notebook_instance.test_notebook_instance my-notebook-instance +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_notebook_instance_lifecycle_configuration.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_notebook_instance_lifecycle_configuration.html.markdown new file mode 100644 index 00000000000..6413413ad1e --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_notebook_instance_lifecycle_configuration.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_notebook_instance_lifecycle_configuration" +description: |- + Provides a lifecycle configuration for SageMaker Notebook Instances. +--- + + + +# Resource: aws_sagemaker_notebook_instance_lifecycle_configuration + +Provides a lifecycle configuration for SageMaker Notebook Instances. + +## Example Usage + +Usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerNotebookInstanceLifecycleConfiguration } from "./.gen/providers/aws/sagemaker-notebook-instance-lifecycle-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerNotebookInstanceLifecycleConfiguration(this, "lc", { + name: "foo", + onCreate: Token.asString(Fn.base64encode("echo foo")), + onStart: Token.asString(Fn.base64encode("echo bar")), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the lifecycle configuration (must be unique). If omitted, Terraform will assign a random, unique name. +* `onCreate` - (Optional) A shell script (base64-encoded) that runs only once when the SageMaker Notebook Instance is created. +* `onStart` - (Optional) A shell script (base64-encoded) that runs every time the SageMaker Notebook Instance is started including the time it's created. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this lifecycle configuration. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import models using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import models using the `name`. For example: + +```console +% terraform import aws_sagemaker_notebook_instance_lifecycle_configuration.lc foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_project.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_project.html.markdown new file mode 100644 index 00000000000..338b5152bc6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_project.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_project" +description: |- + Provides a SageMaker Project resource. +--- + + + +# Resource: aws_sagemaker_project + +Provides a SageMaker Project resource. + + -> Note: If you are trying to use SageMaker projects with SageMaker studio you will need to add a tag with the key `sagemaker:studioVisibility` with value `true`. For more on requirements to use projects and permission needed see [AWS Docs](https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-projects-templates-custom.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerProject } from "./.gen/providers/aws/sagemaker-project"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerProject(this, "example", { + projectName: "example", + serviceCatalogProvisioningDetails: { + productId: Token.asString(awsServicecatalogProductExample.id), + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `projectName` - (Required) The name of the Project. +* `projectDescription` - (Optional) A description for the project. +* `serviceCatalogProvisioningDetails` - (Required) The product ID and provisioning artifact ID to provision a service catalog. See [Service Catalog Provisioning Details](#service-catalog-provisioning-details) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Service Catalog Provisioning Details + +* `pathId` - (Optional) The path identifier of the product. This value is optional if the product has a default path, and required if the product has more than one path. +* `productId` - (Required) The ID of the product to provision. +* `provisioningArtifactId` - (Optional) The ID of the provisioning artifact. +* `provisioningParameter` - (Optional) A list of key value pairs that you specify when you provision a product. See [Provisioning Parameter](#provisioning-parameter) below. + +#### Provisioning Parameter + +* `key` - (Required) The key that identifies a provisioning parameter. +* `value` - (Optional) The value of the provisioning parameter. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Project. +* `id` - The name of the Project. +* `projectId` - The ID of the project. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Projects using the `projectName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Projects using the `projectName`. For example: + +```console +% terraform import aws_sagemaker_project.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_servicecatalog_portfolio_status.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_servicecatalog_portfolio_status.html.markdown new file mode 100644 index 00000000000..edf439b8294 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_servicecatalog_portfolio_status.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_servicecatalog_portfolio_status" +description: |- + Manages status of Service Catalog in SageMaker. Service Catalog is used to create SageMaker projects. +--- + + + +# Resource: aws_sagemaker_servicecatalog_portfolio_status + +Manages status of Service Catalog in SageMaker. Service Catalog is used to create SageMaker projects. + +## Example Usage + +Usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerServicecatalogPortfolioStatus } from "./.gen/providers/aws/sagemaker-servicecatalog-portfolio-status"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerServicecatalogPortfolioStatus(this, "example", { + status: "Enabled", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `status` - (Required) Whether Service Catalog is enabled or disabled in SageMaker. Valid values are `enabled` and `disabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The AWS Region the Servicecatalog portfolio status resides in. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import models using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import models using the `id`. For example: + +```console +% terraform import aws_sagemaker_servicecatalog_portfolio_status.example us-east-1 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_space.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_space.html.markdown new file mode 100644 index 00000000000..1bb62068339 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_space.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_space" +description: |- + Provides a SageMaker Space resource. +--- + + + +# Resource: aws_sagemaker_space + +Provides a SageMaker Space resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerSpace } from "./.gen/providers/aws/sagemaker-space"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerSpace(this, "example", { + domainId: test.id, + spaceName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `spaceName` - (Required) The name of the space. +* `domainId` - (Required) The ID of the associated Domain. +* `spaceSettings` - (Required) A collection of space settings. See [Space Settings](#space-settings) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Space Settings + +* `jupyterServerAppSettings` - (Optional) The Jupyter server's app settings. See [Jupyter Server App Settings](#jupyter-server-app-settings) below. +* `kernelGatewayAppSettings` - (Optional) The kernel gateway app settings. See [Kernel Gateway App Settings](#kernel-gateway-app-settings) below. + +#### Kernel Gateway App Settings + +* `defaultResourceSpec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. +* `customImage` - (Optional) A list of custom SageMaker images that are configured to run as a KernelGateway app. see [Custom Image](#custom-image) below. +* `lifecycleConfigArns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +#### Jupyter Server App Settings + +* `codeRepository` - (Optional) A list of Git repositories that SageMaker automatically displays to users for cloning in the JupyterServer application. see [Code Repository](#code-repository) below. +* `defaultResourceSpec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. +* `lifecycleConfigArns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +##### Code Repository + +* `repositoryUrl` - (Optional) The URL of the Git repository. + +##### Default Resource Spec + +* `instanceType` - (Optional) The instance type. +* `lifecycleConfigArn` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configuration attached to the Resource. +* `sagemakerImageArn` - (Optional) The Amazon Resource Name (ARN) of the SageMaker image created on the instance. +* `sagemakerImageVersionArn` - (Optional) The ARN of the image version created on the instance. + +##### Custom Image + +* `appImageConfigName` - (Required) The name of the App Image Config. +* `imageName` - (Required) The name of the Custom Image. +* `imageVersionNumber` - (Optional) The version number of the Custom Image. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The space's Amazon Resource Name (ARN). +* `arn` - The space's Amazon Resource Name (ARN). +* `homeEfsFileSystemUid` - The ID of the space's profile in the Amazon Elastic File System volume. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Spaces using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Spaces using the `id`. For example: + +```console +% terraform import aws_sagemaker_space.test_space arn:aws:sagemaker:us-west-2:123456789012:space/domain-id/space-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_studio_lifecycle_config.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_studio_lifecycle_config.html.markdown new file mode 100644 index 00000000000..8dea6997845 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_studio_lifecycle_config.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_studio_lifecycle_config" +description: |- + Provides a SageMaker Studio Lifecycle Config resource. +--- + + + +# Resource: aws_sagemaker_studio_lifecycle_config + +Provides a SageMaker Studio Lifecycle Config resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerStudioLifecycleConfig } from "./.gen/providers/aws/sagemaker-studio-lifecycle-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerStudioLifecycleConfig(this, "example", { + studioLifecycleConfigAppType: "JupyterServer", + studioLifecycleConfigContent: Token.asString( + Fn.base64encode("echo Hello") + ), + studioLifecycleConfigName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `studioLifecycleConfigName` - (Required) The name of the Studio Lifecycle Configuration to create. +* `studioLifecycleConfigAppType` - (Required) The App type that the Lifecycle Configuration is attached to. Valid values are `jupyterServer` and `kernelGateway`. +* `studioLifecycleConfigContent` - (Required) The content of your Studio Lifecycle Configuration script. This content must be base64 encoded. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Studio Lifecycle Config. +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Studio Lifecycle Config. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Studio Lifecycle Configs using the `studioLifecycleConfigName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Studio Lifecycle Configs using the `studioLifecycleConfigName`. For example: + +```console +% terraform import aws_sagemaker_studio_lifecycle_config.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_user_profile.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_user_profile.html.markdown new file mode 100644 index 00000000000..33fc67e690d --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_user_profile.html.markdown @@ -0,0 +1,164 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_user_profile" +description: |- + Provides a SageMaker User Profile resource. +--- + + + +# Resource: aws_sagemaker_user_profile + +Provides a SageMaker User Profile resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerUserProfile } from "./.gen/providers/aws/sagemaker-user-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerUserProfile(this, "example", { + domainId: test.id, + userProfileName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `userProfileName` - (Required) The name for the User Profile. +* `domainId` - (Required) The ID of the associated Domain. +* `singleSignOnUserIdentifier` - (Optional) A specifier for the type of value specified in `singleSignOnUserValue`. Currently, the only supported value is `userName`. If the Domain's AuthMode is SSO, this field is required. If the Domain's AuthMode is not SSO, this field cannot be specified. +* `singleSignOnUserValue` - (Required) The username of the associated AWS Single Sign-On User for this User Profile. If the Domain's AuthMode is SSO, this field is required, and must match a valid username of a user in your directory. If the Domain's AuthMode is not SSO, this field cannot be specified. +* `userSettings` - (Required) The user settings. See [User Settings](#user-settings) below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### User Settings + +* `executionRole` - (Required) The execution role ARN for the user. +* `securityGroups` - (Optional) The security groups. +* `sharingSettings` - (Optional) The sharing settings. See [Sharing Settings](#sharing-settings) below. +* `tensorBoardAppSettings` - (Optional) The TensorBoard app settings. See [TensorBoard App Settings](#tensorboard-app-settings) below. +* `jupyterServerAppSettings` - (Optional) The Jupyter server's app settings. See [Jupyter Server App Settings](#jupyter-server-app-settings) below. +* `kernelGatewayAppSettings` - (Optional) The kernel gateway app settings. See [Kernel Gateway App Settings](#kernel-gateway-app-settings) below. +* `rSessionAppSettings` - (Optional) The RSession app settings. See [RSession App Settings](#rsession-app-settings) below. +* `rStudioServerProAppSettings` - (Optional) A collection of settings that configure user interaction with the RStudioServerPro app. See [RStudio Server Pro App Settings](#rstudio-server-pro-app-settings) below. +* `canvasAppSettings` - (Optional) The Canvas app settings. See [Canvas App Settings](#canvas-app-settings) below. + +#### Canvas App Settings + +* `modelRegisterSettings` - (Optional) The model registry settings for the SageMaker Canvas application. See [Model Register Settings](#model-register-settings) below. +* `timeSeriesForecastingSettings` - (Optional) Time series forecast settings for the Canvas app. see [Time Series Forecasting Settings](#time-series-forecasting-settings) below. +* `workspaceSettings` - (Optional) The workspace settings for the SageMaker Canvas application. See [Workspace Settings](#workspace-settings) below. + +#### Sharing Settings + +* `notebookOutputOption` - (Optional) Whether to include the notebook cell output when sharing the notebook. The default is `disabled`. Valid values are `allowed` and `disabled`. +* `s3KmsKeyId` - (Optional) When `notebookOutputOption` is Allowed, the AWS Key Management Service (KMS) encryption key ID used to encrypt the notebook cell output in the Amazon S3 bucket. +* `s3OutputPath` - (Optional) When `notebookOutputOption` is Allowed, the Amazon S3 bucket used to save the notebook cell output. + +#### TensorBoard App Settings + +* `defaultResourceSpec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. + +#### Kernel Gateway App Settings + +* `defaultResourceSpec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. +* `customImage` - (Optional) A list of custom SageMaker images that are configured to run as a KernelGateway app. see [Custom Image](#custom-image) below. +* `lifecycleConfigArns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +#### Jupyter Server App Settings + +* `codeRepository` - (Optional) A list of Git repositories that SageMaker automatically displays to users for cloning in the JupyterServer application. see [Code Repository](#code-repository) below. +* `defaultResourceSpec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. +* `lifecycleConfigArns` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configurations. + +#### RSession App Settings + +* `defaultResourceSpec` - (Optional) The default instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance. see [Default Resource Spec](#default-resource-spec) below. +* `customImage` - (Optional) A list of custom SageMaker images that are configured to run as a KernelGateway app. see [Custom Image](#custom-image) below. + +#### RStudio Server Pro App Settings + +* `accessStatus` - (Optional) Indicates whether the current user has access to the RStudioServerPro app. Valid values are `enabled` and `disabled`. +* `userGroup` - (Optional) The level of permissions that the user has within the RStudioServerPro app. This value defaults to `rStudioUser`. The `rStudioAdmin` value allows the user access to the RStudio Administrative Dashboard. Valid values are `rStudioUser` and `rStudioAdmin`. + +##### Code Repository + +* `repositoryUrl` - (Optional) The URL of the Git repository. + +##### Default Resource Spec + +* `instanceType` - (Optional) The instance type. +* `lifecycleConfigArn` - (Optional) The Amazon Resource Name (ARN) of the Lifecycle Configuration attached to the Resource. +* `sagemakerImageArn` - (Optional) The Amazon Resource Name (ARN) of the SageMaker image created on the instance. +* `sagemakerImageVersionArn` - (Optional) The ARN of the image version created on the instance. + +##### Custom Image + +* `appImageConfigName` - (Required) The name of the App Image Config. +* `imageName` - (Required) The name of the Custom Image. +* `imageVersionNumber` - (Optional) The version number of the Custom Image. + +##### Time Series Forecasting Settings + +* `amazonForecastRoleArn` - (Optional) The IAM role that Canvas passes to Amazon Forecast for time series forecasting. By default, Canvas uses the execution role specified in the UserProfile that launches the Canvas app. If an execution role is not specified in the UserProfile, Canvas uses the execution role specified in the Domain that owns the UserProfile. To allow time series forecasting, this IAM role should have the [AmazonSageMakerCanvasForecastAccess](https://docs.aws.amazon.com/sagemaker/latest/dg/security-iam-awsmanpol-canvas.html#security-iam-awsmanpol-AmazonSageMakerCanvasForecastAccess) policy attached and forecast.amazonaws.com added in the trust relationship as a service principal. +* `status` - (Optional) Describes whether time series forecasting is enabled or disabled in the Canvas app. Valid values are `enabled` and `disabled`. + +##### Model Register Settings + +* `crossAccountModelRegisterRoleArn` - (Optional) The Amazon Resource Name (ARN) of the SageMaker model registry account. Required only to register model versions created by a different SageMaker Canvas AWS account than the AWS account in which SageMaker model registry is set up. +* `status` - (Optional) Describes whether the integration to the model registry is enabled or disabled in the Canvas application. Valid values are `enabled` and `disabled`. + +##### Workspace Settings + +* `s3ArtifactPath` - (Optional) The Amazon S3 bucket used to store artifacts generated by Canvas. Updating the Amazon S3 location impacts existing configuration settings, and Canvas users no longer have access to their artifacts. Canvas users must log out and log back in to apply the new location. +* `s3KmsKeyId` - (Optional) The Amazon Web Services Key Management Service (KMS) encryption key ID that is used to encrypt artifacts generated by Canvas in the Amazon S3 bucket. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The user profile Amazon Resource Name (ARN). +* `arn` - The user profile Amazon Resource Name (ARN). +* `homeEfsFileSystemUid` - The ID of the user's profile in the Amazon Elastic File System (EFS) volume. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker User Profiles using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker User Profiles using the `arn`. For example: + +```console +% terraform import aws_sagemaker_user_profile.test_user_profile arn:aws:sagemaker:us-west-2:123456789012:user-profile/domain-id/profile-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_workforce.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_workforce.html.markdown new file mode 100644 index 00000000000..4361f3ebe87 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_workforce.html.markdown @@ -0,0 +1,175 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_workforce" +description: |- + Provides a SageMaker Workforce resource. +--- + + + +# Resource: aws_sagemaker_workforce + +Provides a SageMaker Workforce resource. + +## Example Usage + +### Cognito Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CognitoUserPool } from "./.gen/providers/aws/cognito-user-pool"; +import { CognitoUserPoolClient } from "./.gen/providers/aws/cognito-user-pool-client"; +import { CognitoUserPoolDomain } from "./.gen/providers/aws/cognito-user-pool-domain"; +import { SagemakerWorkforce } from "./.gen/providers/aws/sagemaker-workforce"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CognitoUserPool(this, "example", { + name: "example", + }); + const awsCognitoUserPoolClientExample = new CognitoUserPoolClient( + this, + "example_1", + { + generateSecret: true, + name: "example", + userPoolId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserPoolClientExample.overrideLogicalId("example"); + const awsCognitoUserPoolDomainExample = new CognitoUserPoolDomain( + this, + "example_2", + { + domain: "example", + userPoolId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCognitoUserPoolDomainExample.overrideLogicalId("example"); + const awsSagemakerWorkforceExample = new SagemakerWorkforce( + this, + "example_3", + { + cognitoConfig: { + clientId: Token.asString(awsCognitoUserPoolClientExample.id), + userPool: Token.asString(awsCognitoUserPoolDomainExample.userPoolId), + }, + workforceName: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSagemakerWorkforceExample.overrideLogicalId("example"); + } +} + +``` + +### Oidc Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerWorkforce } from "./.gen/providers/aws/sagemaker-workforce"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerWorkforce(this, "example", { + oidcConfig: { + authorizationEndpoint: "https://example.com", + clientId: "example", + clientSecret: "example", + issuer: "https://example.com", + jwksUri: "https://example.com", + logoutEndpoint: "https://example.com", + tokenEndpoint: "https://example.com", + userInfoEndpoint: "https://example.com", + }, + workforceName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `workforceName` - (Required) The name of the Workforce (must be unique). +* `cognitoConfig` - (Optional) Use this parameter to configure an Amazon Cognito private workforce. A single Cognito workforce is created using and corresponds to a single Amazon Cognito user pool. Conflicts with `oidcConfig`. see [Cognito Config](#cognito-config) details below. +* `oidcConfig` - (Optional) Use this parameter to configure a private workforce using your own OIDC Identity Provider. Conflicts with `cognitoConfig`. see [OIDC Config](#oidc-config) details below. +* `sourceIpConfig` - (Optional) A list of IP address ranges Used to create an allow list of IP addresses for a private workforce. By default, a workforce isn't restricted to specific IP addresses. see [Source Ip Config](#source-ip-config) details below. +* `workforceVpcConfig` - (Optional) configure a workforce using VPC. see [Workforce VPC Config](#workforce-vpc-config) details below. + +### Cognito Config + +* `clientId` - (Required) The client ID for your Amazon Cognito user pool. +* `userPool` - (Required) ID for your Amazon Cognito user pool. + +### Oidc Config + +* `authorizationEndpoint` - (Required) The OIDC IdP authorization endpoint used to configure your private workforce. +* `clientId` - (Required) The OIDC IdP client ID used to configure your private workforce. +* `clientSecret` - (Required) The OIDC IdP client secret used to configure your private workforce. +* `issuer` - (Required) The OIDC IdP issuer used to configure your private workforce. +* `jwksUri` - (Required) The OIDC IdP JSON Web Key Set (Jwks) URI used to configure your private workforce. +* `logoutEndpoint` - (Required) The OIDC IdP logout endpoint used to configure your private workforce. +* `tokenEndpoint` - (Required) The OIDC IdP token endpoint used to configure your private workforce. +* `userInfoEndpoint` - (Required) The OIDC IdP user information endpoint used to configure your private workforce. + +### Source Ip Config + +* `cidrs` - (Required) A list of up to 10 CIDR values. + +### Workforce VPC Config + +* `securityGroupIds` - (Optional) The VPC security group IDs. The security groups must be for the same VPC as specified in the subnet. +* `subnets` - (Optional) The ID of the subnets in the VPC that you want to connect. +* `vpcId` - (Optional) The ID of the VPC that the workforce uses for communication. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Workforce. +* `id` - The name of the Workforce. +* `subdomain` - The subdomain for your OIDC Identity Provider. +* `workforceVpcConfig0VpcEndpointId` - The IDs for the VPC service endpoints of your VPC workforce. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Workforces using the `workforceName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Workforces using the `workforceName`. For example: + +```console +% terraform import aws_sagemaker_workforce.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sagemaker_workteam.html.markdown b/website/docs/cdktf/typescript/r/sagemaker_workteam.html.markdown new file mode 100644 index 00000000000..041cdeefe41 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sagemaker_workteam.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "SageMaker" +layout: "aws" +page_title: "AWS: aws_sagemaker_workteam" +description: |- + Provides a SageMaker Workteam resource. +--- + + + +# Resource: aws_sagemaker_workteam + +Provides a SageMaker Workteam resource. + +## Example Usage + +### Cognito Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerWorkteam } from "./.gen/providers/aws/sagemaker-workteam"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerWorkteam(this, "example", { + description: "example", + memberDefinition: [ + { + cognitoMemberDefinition: { + clientId: Token.asString(awsCognitoUserPoolClientExample.id), + userGroup: Token.asString(awsCognitoUserGroupExample.id), + userPool: Token.asString( + awsCognitoUserPoolDomainExample.userPoolId + ), + }, + }, + ], + workforceName: Token.asString(awsSagemakerWorkforceExample.id), + workteamName: "example", + }); + } +} + +``` + +### Oidc Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SagemakerWorkteam } from "./.gen/providers/aws/sagemaker-workteam"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SagemakerWorkteam(this, "example", { + description: "example", + memberDefinition: [ + { + oidcMemberDefinition: { + groups: ["example"], + }, + }, + ], + workforceName: Token.asString(awsSagemakerWorkforceExample.id), + workteamName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Required) A description of the work team. +* `workforceName` - (Required) The name of the Workteam (must be unique). +* `workteamName` - (Required) The name of the workforce. +* `memberDefinition` - (Required) A list of Member Definitions that contains objects that identify the workers that make up the work team. Workforces can be created using Amazon Cognito or your own OIDC Identity Provider (IdP). For private workforces created using Amazon Cognito use `cognitoMemberDefinition`. For workforces created using your own OIDC identity provider (IdP) use `oidcMemberDefinition`. Do not provide input for both of these parameters in a single request. see [Member Definition](#member-definition) details below. +* `notificationConfiguration` - (Optional) Configures notification of workers regarding available or expiring work items. see [Notification Configuration](#notification-configuration) details below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Member Definition + +* `cognitoMemberDefinition` - (Optional) The Amazon Cognito user group that is part of the work team. See [Cognito Member Definition](#cognito-member-definition) details below. +* `oidcMemberDefinition` - (Optional) A list user groups that exist in your OIDC Identity Provider (IdP). One to ten groups can be used to create a single private work team. See [Cognito Member Definition](#oidc-member-definition) details below. + +#### Cognito Member Definition + +* `clientId` - (Required) An identifier for an application client. You must create the app client ID using Amazon Cognito. +* `userPool` - (Required) An identifier for a user pool. The user pool must be in the same region as the service that you are calling. +* `userGroup` - (Required) An identifier for a user group. + +#### Oidc Member Definition + +* `groups` - (Required) A list of comma separated strings that identifies user groups in your OIDC IdP. Each user group is made up of a group of private workers. + +### Notification Configuration + +* `notificationTopicArn` - (Required) The ARN for the SNS topic to which notifications should be published. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) assigned by AWS to this Workteam. +* `id` - The name of the Workteam. +* `subdomain` - The subdomain for your OIDC Identity Provider. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SageMaker Workteams using the `workteamName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SageMaker Workteams using the `workteamName`. For example: + +```console +% terraform import aws_sagemaker_workteam.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/scheduler_schedule.html.markdown b/website/docs/cdktf/typescript/r/scheduler_schedule.html.markdown new file mode 100644 index 00000000000..87f2f19e4cf --- /dev/null +++ b/website/docs/cdktf/typescript/r/scheduler_schedule.html.markdown @@ -0,0 +1,243 @@ +--- +subcategory: "EventBridge Scheduler" +layout: "aws" +page_title: "AWS: aws_scheduler_schedule" +description: |- + Provides an EventBridge Scheduler Schedule resource. +--- + + + +# Resource: aws_scheduler_schedule + +Provides an EventBridge Scheduler Schedule resource. + +You can find out more about EventBridge Scheduler in the [User Guide](https://docs.aws.amazon.com/scheduler/latest/UserGuide/what-is-scheduler.html). + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SchedulerSchedule } from "./.gen/providers/aws/scheduler-schedule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SchedulerSchedule(this, "example", { + flexibleTimeWindow: { + mode: "OFF", + }, + groupName: "default", + name: "my-schedule", + scheduleExpression: "rate(1 hours)", + target: { + arn: Token.asString(awsSqsQueueExample.arn), + roleArn: Token.asString(awsIamRoleExample.arn), + }, + }); + } +} + +``` + +### Universal Target + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SchedulerSchedule } from "./.gen/providers/aws/scheduler-schedule"; +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SqsQueue(this, "example", {}); + const awsSchedulerScheduleExample = new SchedulerSchedule( + this, + "example_1", + { + flexibleTimeWindow: { + mode: "OFF", + }, + name: "my-schedule", + scheduleExpression: "rate(1 hours)", + target: { + arn: "arn:aws:scheduler:::aws-sdk:sqs:sendMessage", + input: Token.asString( + Fn.jsonencode({ + MessageBody: "Greetings, programs!", + QueueUrl: example.url, + }) + ), + roleArn: Token.asString(awsIamRoleExample.arn), + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSchedulerScheduleExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `flexibleTimeWindow` - (Required) Configures a time window during which EventBridge Scheduler invokes the schedule. Detailed below. +* `scheduleExpression` - (Required) Defines when the schedule runs. Read more in [Schedule types on EventBridge Scheduler](https://docs.aws.amazon.com/scheduler/latest/UserGuide/schedule-types.html). +* `target` - (Required) Configures the target of the schedule. Detailed below. + +The following arguments are optional: + +* `description` - (Optional) Brief description of the schedule. +* `endDate` - (Optional) The date, in UTC, before which the schedule can invoke its target. Depending on the schedule's recurrence expression, invocations might stop on, or before, the end date you specify. EventBridge Scheduler ignores the end date for one-time schedules. Example: `20300101T01:00:00Z`. +* `groupName` - (Optional, Forces new resource) Name of the schedule group to associate with this schedule. When omitted, the `default` schedule group is used. +* `kmsKeyArn` - (Optional) ARN for the customer managed KMS key that EventBridge Scheduler will use to encrypt and decrypt your data. +* `name` - (Optional, Forces new resource) Name of the schedule. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `scheduleExpressionTimezone` - (Optional) Timezone in which the scheduling expression is evaluated. Defaults to `utc`. Example: `australia/sydney`. +* `startDate` - (Optional) The date, in UTC, after which the schedule can begin invoking its target. Depending on the schedule's recurrence expression, invocations might occur on, or after, the start date you specify. EventBridge Scheduler ignores the start date for one-time schedules. Example: `20300101T01:00:00Z`. +* `state` - (Optional) Specifies whether the schedule is enabled or disabled. One of: `enabled` (default), `disabled`. + +### flexible_time_window Configuration Block + +* `maximumWindowInMinutes` - (Optional) Maximum time window during which a schedule can be invoked. Ranges from `1` to `1440` minutes. +* `mode` - (Required) Determines whether the schedule is invoked within a flexible time window. One of: `off`, `flexible`. + +### target Configuration Block + +The following arguments are required: + +* `arn` - (Required) ARN of the target of this schedule, such as a SQS queue or ECS cluster. For universal targets, this is a [Service ARN specific to the target service](https://docs.aws.amazon.com/scheduler/latest/UserGuide/managing-targets-universal.html#supported-universal-targets). +* `roleArn` - (Required) ARN of the IAM role that EventBridge Scheduler will use for this target when the schedule is invoked. Read more in [Set up the execution role](https://docs.aws.amazon.com/scheduler/latest/UserGuide/setting-up.html#setting-up-execution-role). + +The following arguments are optional: + +* `deadLetterConfig` - (Optional) Information about an Amazon SQS queue that EventBridge Scheduler uses as a dead-letter queue for your schedule. If specified, EventBridge Scheduler delivers failed events that could not be successfully delivered to a target to the queue. Detailed below. +* `ecsParameters` - (Optional) Templated target type for the Amazon ECS [`runTask`](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_RunTask.html) API operation. Detailed below. +* `eventbridgeParameters` - (Optional) Templated target type for the EventBridge [`putEvents`](https://docs.aws.amazon.com/eventbridge/latest/APIReference/API_PutEvents.html) API operation. Detailed below. +* `input` - (Optional) Text, or well-formed JSON, passed to the target. Read more in [Universal target](https://docs.aws.amazon.com/scheduler/latest/UserGuide/managing-targets-universal.html). +* `kinesisParameters` - (Optional) Templated target type for the Amazon Kinesis [`putRecord`](https://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecord.html) API operation. Detailed below. +* `retryPolicy` - (Optional) Information about the retry policy settings. Detailed below. +* `sagemakerPipelineParameters` - (Optional) Templated target type for the Amazon SageMaker [`startPipelineExecution`](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_StartPipelineExecution.html) API operation. Detailed below. +* `sqsParameters` - (Optional) The templated target type for the Amazon SQS [`sendMessage`](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_SendMessage.html) API operation. Detailed below. + +#### dead_letter_config Configuration Block + +* `arn` - (Required) ARN of the SQS queue specified as the destination for the dead-letter queue. + +#### ecs_parameters Configuration Block + +The following arguments are required: + +* `taskDefinitionArn` - (Required) ARN of the task definition to use. + +The following arguments are optional: + +* `capacityProviderStrategy` - (Optional) Up to `6` capacity provider strategies to use for the task. Detailed below. +* `enableEcsManagedTags` - (Optional) Specifies whether to enable Amazon ECS managed tags for the task. For more information, see [Tagging Your Amazon ECS Resources](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs-using-tags.html) in the Amazon ECS Developer Guide. +* `enableExecuteCommand` - (Optional) Specifies whether to enable the execute command functionality for the containers in this task. +* `group` - (Optional) Specifies an ECS task group for the task. At most 255 characters. +* `launchType` - (Optional) Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. One of: `ec2`, `fargate`, `external`. +* `networkConfiguration` - (Optional) Configures the networking associated with the task. Detailed below. +* `placementConstraints` - (Optional) A set of up to 10 placement constraints to use for the task. Detailed below. +* `placementStrategy` - (Optional) A set of up to 5 placement strategies. Detailed below. +* `platformVersion` - (Optional) Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as `110`. +* `propagateTags` - (Optional) Specifies whether to propagate the tags from the task definition to the task. One of: `taskDefinition`. +* `referenceId` - (Optional) Reference ID to use for the task. +* `tags` - (Optional) The metadata that you apply to the task. Each tag consists of a key and an optional value. For more information, see [`runTask`](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_RunTask.html) in the Amazon ECS API Reference. +* `taskCount` - (Optional) The number of tasks to create. Ranges from `1` (default) to `10`. + +##### capacity_provider_strategy Configuration Block + +* `base` - (Optional) How many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. Ranges from `0` (default) to `100000`. +* `capacityProvider` - (Required) Short name of the capacity provider. +* `weight` - (Optional) Designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Ranges from from `0` to `1000`. + +##### network_configuration Configuration Block + +* `assignPublicIp` - (Optional) Specifies whether the task's elastic network interface receives a public IP address. This attribute is a boolean type, where `true` maps to `enabled` and `false` to `disabled`. You can specify `true` only when the `launchType` is set to `fargate`. +* `securityGroups` - (Optional) Set of 1 to 5 Security Group ID-s to be associated with the task. These security groups must all be in the same VPC. +* `subnets` - (Optional) Set of 1 to 16 subnets to be associated with the task. These subnets must all be in the same VPC. + +##### placement_constraints Configuration Block + +* `expression` - (Optional) A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is `distinctInstance`. For more information, see [Cluster query language](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-query-language.html) in the Amazon ECS Developer Guide. +* `type` - (Required) The type of constraint. One of: `distinctInstance`, `memberOf`. + +##### placement_strategy Configuration Block + +* `field` - (Optional) The field to apply the placement strategy against. +* `type` - (Required) The type of placement strategy. One of: `random`, `spread`, `binpack`. + +#### eventbridge_parameters Configuration Block + +* `detailType` - (Required) Free-form string used to decide what fields to expect in the event detail. Up to 128 characters. +* `source` - (Required) Source of the event. + +#### kinesis_parameters Configuration Block + +* `partitionKey` - (Required) Specifies the shard to which EventBridge Scheduler sends the event. Up to 256 characters. + +#### retry_policy Configuration Block + +* `maximumEventAgeInSeconds` - (Optional) Maximum amount of time, in seconds, to continue to make retry attempts. Ranges from `60` to `86400` (default). +* `maximumRetryAttempts` - (Optional) Maximum number of retry attempts to make before the request fails. Ranges from `0` to `185` (default). + +#### sagemaker_pipeline_parameters Configuration Block + +* `pipelineParameter` - (Optional) Set of up to 200 parameter names and values to use when executing the SageMaker Model Building Pipeline. Detailed below. + +##### pipeline_parameter Configuration Block + +* `name` - (Required) Name of parameter to start execution of a SageMaker Model Building Pipeline. +* `value` - (Required) Value of parameter to start execution of a SageMaker Model Building Pipeline. + +#### sqs_parameters Configuration Block + +* `messageGroupId` - (Optional) FIFO message group ID to use as the target. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the schedule. +* `arn` - ARN of the schedule. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import schedules using the combination `groupName/name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import schedules using the combination `groupName/name`. For example: + +```console +% terraform import aws_scheduler_schedule.example my-schedule-group/my-schedule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/scheduler_schedule_group.html.markdown b/website/docs/cdktf/typescript/r/scheduler_schedule_group.html.markdown new file mode 100644 index 00000000000..62e8ca9f736 --- /dev/null +++ b/website/docs/cdktf/typescript/r/scheduler_schedule_group.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "EventBridge Scheduler" +layout: "aws" +page_title: "AWS: aws_scheduler_schedule_group" +description: |- + Provides an EventBridge Scheduler Schedule Group resource. +--- + + + +# Resource: aws_scheduler_schedule_group + +Provides an EventBridge Scheduler Schedule Group resource. + +You can find out more about EventBridge Scheduler in the [User Guide](https://docs.aws.amazon.com/scheduler/latest/UserGuide/what-is-scheduler.html). + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SchedulerScheduleGroup } from "./.gen/providers/aws/scheduler-schedule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SchedulerScheduleGroup(this, "example", { + name: "my-schedule-group", + }); + } +} + +``` + +## Argument Reference + +The following arguments are optional: + +* `name` - (Optional, Forces new resource) Name of the schedule group. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix`. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the schedule group. +* `arn` - ARN of the schedule group. +* `creationDate` - Time at which the schedule group was created. +* `lastModificationDate` - Time at which the schedule group was last modified. +* `state` - State of the schedule group. Can be `active` or `deleting`. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import schedule groups using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import schedule groups using the `name`. For example: + +```console +% terraform import aws_scheduler_schedule_group.example my-schedule-group +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/schemas_discoverer.html.markdown b/website/docs/cdktf/typescript/r/schemas_discoverer.html.markdown new file mode 100644 index 00000000000..f271d30eb61 --- /dev/null +++ b/website/docs/cdktf/typescript/r/schemas_discoverer.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "EventBridge Schemas" +layout: "aws" +page_title: "AWS: aws_schemas_discoverer" +description: |- + Provides an EventBridge Schema Discoverer resource. +--- + + + +# Resource: aws_schemas_discoverer + +Provides an EventBridge Schema Discoverer resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchEventBus } from "./.gen/providers/aws/cloudwatch-event-bus"; +import { SchemasDiscoverer } from "./.gen/providers/aws/schemas-discoverer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const messenger = new CloudwatchEventBus(this, "messenger", { + name: "chat-messages", + }); + new SchemasDiscoverer(this, "test", { + description: "Auto discover event schemas", + sourceArn: messenger.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `sourceArn` - (Required) The ARN of the event bus to discover event schemas on. +* `description` - (Optional) The description of the discoverer. Maximum of 256 characters. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the discoverer. +* `id` - The ID of the discoverer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge discoverers using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EventBridge discoverers using the `id`. For example: + +```console +% terraform import aws_schemas_discoverer.test 123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/schemas_registry.html.markdown b/website/docs/cdktf/typescript/r/schemas_registry.html.markdown new file mode 100644 index 00000000000..3bf6a280578 --- /dev/null +++ b/website/docs/cdktf/typescript/r/schemas_registry.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "EventBridge Schemas" +layout: "aws" +page_title: "AWS: aws_schemas_registry" +description: |- + Provides an EventBridge Custom Schema Registry resource. +--- + + + +# Resource: aws_schemas_registry + +Provides an EventBridge Custom Schema Registry resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SchemasRegistry } from "./.gen/providers/aws/schemas-registry"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SchemasRegistry(this, "test", { + description: "A custom schema registry", + name: "my_own_registry", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the custom event schema registry. Maximum of 64 characters consisting of lower case letters, upper case letters, 0-9, ., -, _. +* `description` - (Optional) The description of the discoverer. Maximum of 256 characters. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the discoverer. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge schema registries using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EventBridge schema registries using the `name`. For example: + +```console +% terraform import aws_schemas_registry.test my_own_registry +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/schemas_registry_policy.html.markdown b/website/docs/cdktf/typescript/r/schemas_registry_policy.html.markdown new file mode 100644 index 00000000000..cbafc4e7a45 --- /dev/null +++ b/website/docs/cdktf/typescript/r/schemas_registry_policy.html.markdown @@ -0,0 +1,107 @@ +--- +subcategory: "EventBridge Schemas" +layout: "aws" +page_title: "AWS: aws_schemas_registry_policy" +description: |- + Terraform resource for managing an AWS EventBridge Schemas Registry Policy. +--- + + + +# Resource: aws_schemas_registry_policy + +Terraform resource for managing an AWS EventBridge Schemas Registry Policy. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { SchemasRegistryPolicy } from "./.gen/providers/aws/schemas-registry-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsIamPolicyDocument(this, "example", { + statement: [ + { + actions: ["schemas:*"], + effect: "Allow", + principals: [ + { + identifiers: ["109876543210"], + type: "AWS", + }, + ], + resources: [ + "arn:aws:schemas:us-east-1:012345678901:registry/example", + "arn:aws:schemas:us-east-1:012345678901:schema/example*", + ], + sid: "example", + }, + ], + }); + const awsSchemasRegistryPolicyExample = new SchemasRegistryPolicy( + this, + "example_1", + { + policy: Token.asString(example.json), + registryName: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSchemasRegistryPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `registryName` - (Required) Name of EventBridge Schema Registry +* `policy` - (Required) Resource Policy for EventBridge Schema Registry + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `5M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge Schema Registry Policy using the `registryName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EventBridge Schema Registry Policy using the `registryName`. For example: + +```console +% terraform import aws_schemas_registry_policy.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/schemas_schema.html.markdown b/website/docs/cdktf/typescript/r/schemas_schema.html.markdown new file mode 100644 index 00000000000..2de89883805 --- /dev/null +++ b/website/docs/cdktf/typescript/r/schemas_schema.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "EventBridge Schemas" +layout: "aws" +page_title: "AWS: aws_schemas_schema" +description: |- + Provides an EventBridge Schema resource. +--- + + + +# Resource: aws_schemas_schema + +Provides an EventBridge Schema resource. + +~> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SchemasRegistry } from "./.gen/providers/aws/schemas-registry"; +import { SchemasSchema } from "./.gen/providers/aws/schemas-schema"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new SchemasRegistry(this, "test", { + name: "my_own_registry", + }); + const awsSchemasSchemaTest = new SchemasSchema(this, "test_1", { + content: Token.asString( + Fn.jsonencode({ + components: { + schemas: { + Event: { + properties: { + name: { + type: "string", + }, + }, + type: "object", + }, + }, + }, + info: { + title: "Event", + version: "1.0.0", + }, + openapi: "3.0.0", + paths: {}, + }) + ), + description: "The schema definition for my event", + name: "my_schema", + registryName: test.name, + type: "OpenApi3", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSchemasSchemaTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the schema. Maximum of 385 characters consisting of lower case letters, upper case letters, ., -, _, @. +* `content` - (Required) The schema specification. Must be a valid Open API 3.0 spec. +* `registryName` - (Required) The name of the registry in which this schema belongs. +* `type` - (Required) The type of the schema. Valid values: `openApi3`. +* `description` - (Optional) The description of the schema. Maximum of 256 characters. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the discoverer. +* `lastModified` - The last modified date of the schema. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version` - The version of the schema. +* `versionCreatedDate` - The created date of the version of the schema. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EventBridge schema using the `name` and `registryName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EventBridge schema using the `name` and `registryName`. For example: + +```console +% terraform import aws_schemas_schema.test name/registry +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/secretsmanager_secret.html.markdown b/website/docs/cdktf/typescript/r/secretsmanager_secret.html.markdown new file mode 100644 index 00000000000..2c754a8d5ea --- /dev/null +++ b/website/docs/cdktf/typescript/r/secretsmanager_secret.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret" +description: |- + Provides a resource to manage AWS Secrets Manager secret metadata +--- + + + +# Resource: aws_secretsmanager_secret + +Provides a resource to manage AWS Secrets Manager secret metadata. To manage secret rotation, see the [`awsSecretsmanagerSecretRotation` resource](/docs/providers/aws/r/secretsmanager_secret_rotation.html). To manage a secret value, see the [`awsSecretsmanagerSecretVersion` resource](/docs/providers/aws/r/secretsmanager_secret_version.html). + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecretsmanagerSecret } from "./.gen/providers/aws/secretsmanager-secret"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SecretsmanagerSecret(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) Description of the secret. +* `kmsKeyId` - (Optional) ARN or Id of the AWS KMS key to be used to encrypt the secret values in the versions stored in this secret. If you need to reference a CMK in a different account, you can use only the key ARN. If you don't specify this value, then Secrets Manager defaults to using the AWS account's default KMS key (the one named `aws/secretsmanager`). If the default KMS key with that name doesn't yet exist, then AWS Secrets Manager creates it for you automatically the first time. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `name` - (Optional) Friendly name of the new secret. The secret name can consist of uppercase letters, lowercase letters, digits, and any of the following characters: `/_+=.@` Conflicts with `namePrefix`. +* `policy` - (Optional) Valid JSON document representing a [resource policy](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access_resource-based-policies.html). For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). Removing `policy` from your configuration or setting `policy` to null or an empty string (i.e., `policy = ""`) _will not_ delete the policy since it could have been set by `awsSecretsmanagerSecretPolicy`. To delete the `policy`, set it to `"{}"` (an empty JSON document). +* `recoveryWindowInDays` - (Optional) Number of days that AWS Secrets Manager waits before it can delete the secret. This value can be `0` to force deletion without recovery or range from `7` to `30` days. The default value is `30`. +* `replica` - (Optional) Configuration block to support secret replication. See details below. +* `forceOverwriteReplicaSecret` - (Optional) Accepts boolean value to specify whether to overwrite a secret with the same name in the destination Region. +* `tags` - (Optional) Key-value map of user-defined tags that are attached to the secret. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### replica + +* `kmsKeyId` - (Optional) ARN, Key ID, or Alias of the AWS KMS key within the region secret is replicated to. If one is not specified, then Secrets Manager defaults to using the AWS account's default KMS key (`aws/secretsmanager`) in the region or creates one for use if non-existent. +* `region` - (Required) Region for replicating the secret. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN of the secret. +* `arn` - ARN of the secret. +* `replica` - Attributes of a replica are described below. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### replica + +* `lastAccessedDate` - Date that you last accessed the secret in the Region. +* `status` - Status can be `inProgress`, `failed`, or `inSync`. +* `statusMessage` - Message such as `Replication succeeded` or `Secret with this name already exists in this region`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsSecretsmanagerSecret` using the secret Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsSecretsmanagerSecret` using the secret Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_secretsmanager_secret.example arn:aws:secretsmanager:us-east-1:123456789012:secret:example-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/secretsmanager_secret_policy.html.markdown b/website/docs/cdktf/typescript/r/secretsmanager_secret_policy.html.markdown new file mode 100644 index 00000000000..36615153c2e --- /dev/null +++ b/website/docs/cdktf/typescript/r/secretsmanager_secret_policy.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret_policy" +description: |- + Provides a resource to manage AWS Secrets Manager secret policy +--- + + + +# Resource: aws_secretsmanager_secret_policy + +Provides a resource to manage AWS Secrets Manager secret policy. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { SecretsmanagerSecret } from "./.gen/providers/aws/secretsmanager-secret"; +import { SecretsmanagerSecretPolicy } from "./.gen/providers/aws/secretsmanager-secret-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecretsmanagerSecret(this, "example", { + name: "example", + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_1", + { + statement: [ + { + actions: ["secretsmanager:GetSecretValue"], + effect: "Allow", + principals: [ + { + identifiers: ["arn:aws:iam::123456789012:root"], + type: "AWS", + }, + ], + resources: ["*"], + sid: "EnableAnotherAWSAccountToReadTheSecret", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsSecretsmanagerSecretPolicyExample = new SecretsmanagerSecretPolicy( + this, + "example_2", + { + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + secretArn: example.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecretsmanagerSecretPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `policy` - (Required) Valid JSON document representing a [resource policy](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access_resource-based-policies.html). For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). Unlike `awsSecretsmanagerSecret`, where `policy` can be set to `"{}"` to delete the policy, `"{}"` is not a valid policy since `policy` is required. +* `secretArn` - (Required) Secret ARN. + +The following arguments are optional: + +* `blockPublicPolicy` - (Optional) Makes an optional API call to Zelkova to validate the Resource Policy to prevent broad access to your secret. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the secret. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsSecretsmanagerSecretPolicy` using the secret Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsSecretsmanagerSecretPolicy` using the secret Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_secretsmanager_secret_policy.example arn:aws:secretsmanager:us-east-1:123456789012:secret:example-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/secretsmanager_secret_rotation.html.markdown b/website/docs/cdktf/typescript/r/secretsmanager_secret_rotation.html.markdown new file mode 100644 index 00000000000..7e9f14231ca --- /dev/null +++ b/website/docs/cdktf/typescript/r/secretsmanager_secret_rotation.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret_rotation" +description: |- + Provides a resource to manage AWS Secrets Manager secret rotation +--- + + + +# Resource: aws_secretsmanager_secret_rotation + +Provides a resource to manage AWS Secrets Manager secret rotation. To manage a secret, see the [`awsSecretsmanagerSecret` resource](/docs/providers/aws/r/secretsmanager_secret.html). To manage a secret value, see the [`awsSecretsmanagerSecretVersion` resource](/docs/providers/aws/r/secretsmanager_secret_version.html). + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecretsmanagerSecretRotation } from "./.gen/providers/aws/secretsmanager-secret-rotation"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SecretsmanagerSecretRotation(this, "example", { + rotationLambdaArn: Token.asString(awsLambdaFunctionExample.arn), + rotationRules: { + automaticallyAfterDays: 30, + }, + secretId: Token.asString(awsSecretsmanagerSecretExample.id), + }); + } +} + +``` + +### Rotation Configuration + +To enable automatic secret rotation, the Secrets Manager service requires usage of a Lambda function. The [Rotate Secrets section in the Secrets Manager User Guide](https://docs.aws.amazon.com/secretsmanager/latest/userguide/rotating-secrets.html) provides additional information about deploying a prebuilt Lambda functions for supported credential rotation (e.g., RDS) or deploying a custom Lambda function. + +~> **NOTE:** Configuring rotation causes the secret to rotate once as soon as you enable rotation. Before you do this, you must ensure that all of your applications that use the credentials stored in the secret are updated to retrieve the secret from AWS Secrets Manager. The old credentials might no longer be usable after the initial rotation and any applications that you fail to update will break as soon as the old credentials are no longer valid. + +~> **NOTE:** If you cancel a rotation that is in progress (by removing the `rotation` configuration), it can leave the VersionStage labels in an unexpected state. Depending on what step of the rotation was in progress, you might need to remove the staging label AWSPENDING from the partially created version, specified by the SecretVersionId response value. You should also evaluate the partially rotated new version to see if it should be deleted, which you can do by removing all staging labels from the new version's VersionStage field. + +## Argument Reference + +This resource supports the following arguments: + +* `secretId` - (Required) Specifies the secret to which you want to add a new version. You can specify either the Amazon Resource Name (ARN) or the friendly name of the secret. The secret must already exist. +* `rotationLambdaArn` - (Required) Specifies the ARN of the Lambda function that can rotate the secret. +* `rotationRules` - (Required) A structure that defines the rotation configuration for this secret. Defined below. + +### rotation_rules + +* `automaticallyAfterDays` - (Optional) Specifies the number of days between automatic scheduled rotations of the secret. Either `automaticallyAfterDays` or `scheduleExpression` must be specified. +* `duration` - (Optional) - The length of the rotation window in hours. For example, `3H` for a three hour window. +* `scheduleExpression` - (Optional) A `cron()` or `rate()` expression that defines the schedule for rotating your secret. Either `automaticallyAfterDays` or `scheduleExpression` must be specified. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the secret. +* `arn` - Amazon Resource Name (ARN) of the secret. +* `rotationEnabled` - Specifies whether automatic rotation is enabled for this secret. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsSecretsmanagerSecretRotation` using the secret Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsSecretsmanagerSecretRotation` using the secret Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_secretsmanager_secret_rotation.example arn:aws:secretsmanager:us-east-1:123456789012:secret:example-123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/secretsmanager_secret_version.html.markdown b/website/docs/cdktf/typescript/r/secretsmanager_secret_version.html.markdown new file mode 100644 index 00000000000..e1b8e9a5fcd --- /dev/null +++ b/website/docs/cdktf/typescript/r/secretsmanager_secret_version.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "Secrets Manager" +layout: "aws" +page_title: "AWS: aws_secretsmanager_secret_version" +description: |- + Provides a resource to manage AWS Secrets Manager secret version including its secret value +--- + + + +# Resource: aws_secretsmanager_secret_version + +Provides a resource to manage AWS Secrets Manager secret version including its secret value. To manage secret metadata, see the [`awsSecretsmanagerSecret` resource](/docs/providers/aws/r/secretsmanager_secret.html). + +~> **NOTE:** If the `awscurrent` staging label is present on this version during resource deletion, that label cannot be removed and will be skipped to prevent errors when fully deleting the secret. That label will leave this secret version active even after the resource is deleted from Terraform unless the secret itself is deleted. Move the `awscurrent` staging label before or after deleting this resource from Terraform to fully trigger version deprecation if necessary. + +## Example Usage + +### Simple String Value + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecretsmanagerSecretVersion } from "./.gen/providers/aws/secretsmanager-secret-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SecretsmanagerSecretVersion(this, "example", { + secretId: Token.asString(awsSecretsmanagerSecretExample.id), + secretString: "example-string-to-protect", + }); + } +} + +``` + +### Key-Value Pairs + +Secrets Manager also accepts key-value pairs in JSON. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + VariableType, + TerraformVariable, + Token, + Fn, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecretsmanagerSecretVersion } from "./.gen/providers/aws/secretsmanager-secret-version"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const example = new TerraformVariable(this, "example", { + default: [ + { + key1: "value1", + key2: "value2", + }, + ], + type: VariableType.map(VariableType.STRING), + }); + const awsSecretsmanagerSecretVersionExample = + new SecretsmanagerSecretVersion(this, "example_1", { + secretId: Token.asString(awsSecretsmanagerSecretExample.id), + secretString: Token.asString(Fn.jsonencode(example.value)), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecretsmanagerSecretVersionExample.overrideLogicalId("example"); + } +} + +``` + +-> **Note:** In Terraform 0.14 and later, use `sensitive = true` to protect the values of the variable from being printed in logs and console output (see [Protect Sensitive Input Variables](https://learn.hashicorp.com/tutorials/terraform/sensitive-variables)). + +Reading key-value pairs from JSON back into a native Terraform map can be accomplished in Terraform 0.12 and later with the [`jsondecode()` function](https://www.terraform.io/docs/configuration/functions/jsondecode.html): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformOutput, Fn, propertyAccess, TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TerraformOutput(this, "example", { + value: propertyAccess(Fn.jsondecode(example.secretString), ['"key1"']), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `secretId` - (Required) Specifies the secret to which you want to add a new version. You can specify either the Amazon Resource Name (ARN) or the friendly name of the secret. The secret must already exist. +* `secretString` - (Optional) Specifies text data that you want to encrypt and store in this version of the secret. This is required if secret_binary is not set. +* `secretBinary` - (Optional) Specifies binary data that you want to encrypt and store in this version of the secret. This is required if secret_string is not set. Needs to be encoded to base64. +* `versionStages` - (Optional) Specifies a list of staging labels that are attached to this version of the secret. A staging label must be unique to a single version of the secret. If you specify a staging label that's already associated with a different version of the same secret then that staging label is automatically removed from the other version and attached to this version. If you do not specify a value, then AWS Secrets Manager automatically moves the staging label `awscurrent` to this new version on creation. + +~> **NOTE:** If `versionStages` is configured, you must include the `awscurrent` staging label if this secret version is the only version or if the label is currently present on this secret version, otherwise Terraform will show a perpetual difference. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the secret. +* `id` - A pipe delimited combination of secret ID and version ID. +* `versionId` - The unique identifier of the version of the secret. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsSecretsmanagerSecretVersion` using the secret ID and version ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsSecretsmanagerSecretVersion` using the secret ID and version ID. For example: + +```console +% terraform import aws_secretsmanager_secret_version.example 'arn:aws:secretsmanager:us-east-1:123456789012:secret:example-123456|xxxxx-xxxxxxx-xxxxxxx-xxxxx' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/security_group.html.markdown b/website/docs/cdktf/typescript/r/security_group.html.markdown new file mode 100644 index 00000000000..e9a06683f48 --- /dev/null +++ b/website/docs/cdktf/typescript/r/security_group.html.markdown @@ -0,0 +1,421 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_security_group" +description: |- + Provides a security group resource. +--- + + + +# Resource: aws_security_group + +Provides a security group resource. + +~> **NOTE on Security Groups and Security Group Rules:** Terraform currently provides a Security Group resource with `ingress` and `egress` rules defined in-line and a [Security Group Rule resource](security_group_rule.html) which manages one or more `ingress` or `egress` rules. Both of these resource were added before AWS assigned a [security group rule unique ID](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/security-group-rules.html), and they do not work well in all scenarios using the`description` and `tags` attributes, which rely on the unique ID. The [`awsVpcSecurityGroupEgressRule`](vpc_security_group_egress_rule.html) and [`awsVpcSecurityGroupIngressRule`](vpc_security_group_ingress_rule.html) resources have been added to address these limitations and should be used for all new security group rules. You should not use the `awsVpcSecurityGroupEgressRule` and `awsVpcSecurityGroupIngressRule` resources in conjunction with an `awsSecurityGroup` resource with in-line rules or with `awsSecurityGroupRule` resources defined for the same Security Group, as rule conflicts may occur and rules will be overwritten. + +~> **NOTE:** Referencing Security Groups across VPC peering has certain restrictions. More information is available in the [VPC Peering User Guide](https://docs.aws.amazon.com/vpc/latest/peering/vpc-peering-security-groups.html). + +~> **NOTE:** Due to [AWS Lambda improved VPC networking changes that began deploying in September 2019](https://aws.amazon.com/blogs/compute/announcing-improved-vpc-networking-for-aws-lambda-functions/), security groups associated with Lambda Functions can take up to 45 minutes to successfully delete. Terraform AWS Provider version 2.31.0 and later automatically handles this increased timeout, however prior versions require setting the [customizable deletion timeout](#timeouts) to 45 minutes (`delete = "45m"`). AWS and HashiCorp are working together to reduce the amount of time required for resource deletion and updates can be tracked in this [GitHub issue](https://github.com/hashicorp/terraform-provider-aws/issues/10329). + +~> **NOTE:** The `cidrBlocks` and `ipv6CidrBlocks` parameters are optional in the `ingress` and `egress` blocks. If nothing is specified, traffic will be blocked as described in _NOTE on Egress rules_ later. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SecurityGroup(this, "allow_tls", { + description: "Allow TLS inbound traffic", + egress: [ + { + cidrBlocks: ["0.0.0.0/0"], + fromPort: 0, + ipv6CidrBlocks: ["::/0"], + protocol: "-1", + toPort: 0, + }, + ], + ingress: [ + { + cidrBlocks: [main.cidrBlock], + description: "TLS from VPC", + fromPort: 443, + ipv6CidrBlocks: [main.ipv6CidrBlock], + protocol: "tcp", + toPort: 443, + }, + ], + name: "allow_tls", + tags: { + Name: "allow_tls", + }, + vpcId: main.id, + }); + } +} + +``` + +~> **NOTE on Egress rules:** By default, AWS creates an `ALLOW ALL` egress rule when creating a new Security Group inside of a VPC. When creating a new Security Group inside a VPC, **Terraform will remove this default rule**, and require you specifically re-create it if you desire that rule. We feel this leads to fewer surprises in terms of controlling your egress rules. If you desire this rule to be in place, you can use this `egress` block: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SecurityGroup(this, "example", { + egress: [ + { + cidrBlocks: ["0.0.0.0/0"], + fromPort: 0, + ipv6CidrBlocks: ["::/0"], + protocol: "-1", + toPort: 0, + }, + ], + }); + } +} + +``` + +### Usage With Prefix List IDs + +Prefix Lists are either managed by AWS internally, or created by the customer using a +[Prefix List resource](ec2_managed_prefix_list.html). Prefix Lists provided by +AWS are associated with a prefix list name, or service name, that is linked to a specific region. +Prefix list IDs are exported on VPC Endpoints, so you can use this format: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +import { VpcEndpoint } from "./.gen/providers/aws/vpc-endpoint"; +interface MyConfig { + serviceName: any; + vpcId: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const myEndpoint = new VpcEndpoint(this, "my_endpoint", { + serviceName: config.serviceName, + vpcId: config.vpcId, + }); + new SecurityGroup(this, "example", { + egress: [ + { + fromPort: 0, + prefixListIds: [myEndpoint.prefixListId], + protocol: "-1", + toPort: 0, + }, + ], + }); + } +} + +``` + +You can also find a specific Prefix List using the `awsPrefixList` data source. + +### Removing All Ingress and Egress Rules + +The `ingress` and `egress` arguments are processed in [attributes-as-blocks](https://developer.hashicorp.com/terraform/language/attr-as-blocks) mode. Due to this, removing these arguments from the configuration will **not** cause Terraform to destroy the managed rules. To subsequently remove all managed ingress and egress rules: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SecurityGroup(this, "example", { + egress: [], + ingress: [], + name: "sg", + vpcId: Token.asString(awsVpcExample.id), + }); + } +} + +``` + +### Recreating a Security Group + +A simple security group `name` change "forces new" the security group--Terraform destroys the security group and creates a new one. (Likewise, `description`, `namePrefix`, or `vpcId` [cannot be changed](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/working-with-security-groups.html#creating-security-group).) Attempting to recreate the security group leads to a variety of complications depending on how it is used. + +Security groups are generally associated with other resources--**more than 100** AWS Provider resources reference security groups. Referencing a resource from another resource creates a one-way dependency. For example, if you create an EC2 `awsInstance` that has a `vpcSecurityGroupIds` argument that refers to an `awsSecurityGroup` resource, the `awsSecurityGroup` is a dependent of the `awsInstance`. Because of this, Terraform will create the security group first so that it can then be associated with the EC2 instance. + +However, the dependency relationship actually goes both directions causing the _Security Group Deletion Problem_. AWS does not allow you to delete the security group associated with another resource (_e.g._, the `awsInstance`). + +Terraform does [not model bi-directional dependencies](https://developer.hashicorp.com/terraform/internals/graph) like this, but, even if it did, simply knowing the dependency situation would not be enough to solve it. For example, some resources must always have an associated security group while others don't need to. In addition, when the `awsSecurityGroup` resource attempts to recreate, it receives a dependent object error, which does not provide information on whether the dependent object is a security group rule or, for example, an associated EC2 instance. Within Terraform, the associated resource (_e.g._, `awsInstance`) does not receive an error when the `awsSecurityGroup` is trying to recreate even though that is where changes to the associated resource would need to take place (_e.g._, removing the security group association). + +Despite these sticky problems, below are some ways to improve your experience when you find it necessary to recreate a security group. + +#### `createBeforeDestroy` + +(This example is one approach to [recreating security groups](#recreating-a-security-group). For more information on the challenges and the _Security Group Deletion Problem_, see [the section above](#recreating-a-security-group).) + +Normally, Terraform first deletes the existing security group resource and then creates a new one. When a security group is associated with a resource, the delete won't succeed. You can invert the default behavior using the [`createBeforeDestroy` meta argument](https://www.terraform.io/language/meta-arguments/lifecycle#create_before_destroy): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SecurityGroup(this, "example", { + lifecycle: { + createBeforeDestroy: true, + }, + name: "changeable-name", + }); + } +} + +``` + +#### `replaceTriggeredBy` + +(This example is one approach to [recreating security groups](#recreating-a-security-group). For more information on the challenges and the _Security Group Deletion Problem_, see [the section above](#recreating-a-security-group).) + +To replace a resource when a security group changes, use the [`replaceTriggeredBy` meta argument](https://www.terraform.io/language/meta-arguments/lifecycle#replace_triggered_by). Note that in this example, the `awsInstance` will be destroyed and created again when the `awsSecurityGroup` changes. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Instance } from "./.gen/providers/aws/instance"; +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityGroup(this, "example", { + name: "sg", + }); + const awsInstanceExample = new Instance(this, "example_1", { + instanceType: "t3.small", + lifecycle: { + replaceTriggeredBy: [example], + }, + vpcSecurityGroupIds: [test.id], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsInstanceExample.overrideLogicalId("example"); + } +} + +``` + +#### Shorter timeout + +(This example is one approach to [recreating security groups](#recreating-a-security-group). For more information on the challenges and the _Security Group Deletion Problem_, see [the section above](#recreating-a-security-group).) + +If destroying a security group takes a long time, it may be because Terraform cannot distinguish between a dependent object (_e.g._, a security group rule or EC2 instance) that is _in the process of being deleted_ and one that is not. In other words, it may be waiting for a train that isn't scheduled to arrive. To fail faster, shorten the `delete` [timeout](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts) from the default timeout: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityGroup } from "./.gen/providers/aws/security-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SecurityGroup(this, "example", { + name: "izizavle", + timeouts: [ + { + delete: "2m", + }, + ], + }); + } +} + +``` + +#### Provisioners + +(This example is one approach to [recreating security groups](#recreating-a-security-group). For more information on the challenges and the _Security Group Deletion Problem_, see [the section above](#recreating-a-security-group).) + +**DISCLAIMER:** We **_HIGHLY_** recommend using one of the above approaches and _NOT_ using local provisioners. Provisioners, like the one shown below, should be considered a **last resort** since they are _not readable_, _require skills outside standard Terraform configuration_, are _error prone_ and _difficult to maintain_, are not compatible with cloud environments and upgrade tools, require AWS CLI installation, and are subject to AWS CLI and Terraform changes outside the AWS Provider. + +```terraform +data "aws_security_group" "default" { + name = "default" + # ... other configuration ... +} + +resource "aws_security_group" "example" { + name = "sg" + # ... other configuration ... + + # The downstream resource must have at least one SG attached, therefore we + # attach the default SG of the VPC temporarily and remove it later on + provisioner "local-exec" { + when = destroy + command = < **Note** Although `cidrBlocks`, `ipv6CidrBlocks`, `prefixListIds`, and `securityGroups` are all marked as optional, you _must_ provide one of them in order to configure the source of the traffic. + +* `cidrBlocks` - (Optional) List of CIDR blocks. +* `description` - (Optional) Description of this ingress rule. +* `ipv6CidrBlocks` - (Optional) List of IPv6 CIDR blocks. +* `prefixListIds` - (Optional) List of Prefix List IDs. +* `securityGroups` - (Optional) List of security groups. A group name can be used relative to the default VPC. Otherwise, group ID. +* `self` - (Optional) Whether the security group itself will be added as a source to this ingress rule. + +### egress + +This argument is processed in [attribute-as-blocks mode](https://www.terraform.io/docs/configuration/attr-as-blocks.html). + +The following arguments are required: + +* `fromPort` - (Required) Start port (or ICMP type number if protocol is `icmp`) +* `toPort` - (Required) End range port (or ICMP code if protocol is `icmp`). + +The following arguments are optional: + +~> **Note** Although `cidrBlocks`, `ipv6CidrBlocks`, `prefixListIds`, and `securityGroups` are all marked as optional, you _must_ provide one of them in order to configure the destination of the traffic. + +* `cidrBlocks` - (Optional) List of CIDR blocks. +* `description` - (Optional) Description of this egress rule. +* `ipv6CidrBlocks` - (Optional) List of IPv6 CIDR blocks. +* `prefixListIds` - (Optional) List of Prefix List IDs. +* `protocol` - (Required) Protocol. If you select a protocol of `1` (semantically equivalent to `all`, which is not a valid value here), you must specify a `fromPort` and `toPort` equal to 0. The supported values are defined in the `ipProtocol` argument in the [IpPermission](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_IpPermission.html) API reference. This argument is normalized to a lowercase value to match the AWS API requirement when using Terraform 0.12.x and above. Please make sure that the value of the protocol is specified as lowercase when used with older version of Terraform to avoid issues during upgrade. +* `securityGroups` - (Optional) List of security groups. A group name can be used relative to the default VPC. Otherwise, group ID. +* `self` - (Optional) Whether the security group itself will be added as a source to this egress rule. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the security group. +* `id` - ID of the security group. +* `ownerId` - Owner ID. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `15M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Groups using the security group `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Security Groups using the security group `id`. For example: + +```console +% terraform import aws_security_group.elb_sg sg-903004f8 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/security_group_rule.html.markdown b/website/docs/cdktf/typescript/r/security_group_rule.html.markdown new file mode 100644 index 00000000000..a9fd2b24fdf --- /dev/null +++ b/website/docs/cdktf/typescript/r/security_group_rule.html.markdown @@ -0,0 +1,317 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_security_group_rule" +description: |- + Provides an security group rule resource. +--- + + + +# Resource: aws_security_group_rule + +Provides a security group rule resource. Represents a single `ingress` or +`egress` group rule, which can be added to external Security Groups. + +~> **NOTE on Security Groups and Security Group Rules:** Terraform currently provides a [Security Group resource](security_group.html) with `ingress` and `egress` rules defined in-line and a Security Group Rule resource which manages one or more `ingress` or +`egress` rules. Both of these resource were added before AWS assigned a [security group rule unique ID](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/security-group-rules.html), and they do not work well in all scenarios using the`description` and `tags` attributes, which rely on the unique ID. +The [`awsVpcSecurityGroupEgressRule`](vpc_security_group_egress_rule.html) and [`awsVpcSecurityGroupIngressRule`](vpc_security_group_ingress_rule.html) resources have been added to address these limitations and should be used for all new security group rules. +You should not use the `awsVpcSecurityGroupEgressRule` and `awsVpcSecurityGroupIngressRule` resources in conjunction with an `awsSecurityGroup` resource with in-line rules or with `awsSecurityGroupRule` resources defined for the same Security Group, as rule conflicts may occur and rules will be overwritten. + +~> **NOTE:** Setting `protocol = "all"` or `protocol = -1` with `fromPort` and `toPort` will result in the EC2 API creating a security group rule with all ports open. This API behavior cannot be controlled by Terraform and may generate warnings in the future. + +~> **NOTE:** Referencing Security Groups across VPC peering has certain restrictions. More information is available in the [VPC Peering User Guide](https://docs.aws.amazon.com/vpc/latest/peering/vpc-peering-security-groups.html). + +## Example Usage + +Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityGroupRule } from "./.gen/providers/aws/security-group-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SecurityGroupRule(this, "example", { + cidrBlocks: [Token.asString(awsVpcExample.cidrBlock)], + fromPort: 0, + ipv6CidrBlocks: [Token.asString(awsVpcExample.ipv6CidrBlock)], + protocol: "tcp", + securityGroupId: "sg-123456", + toPort: 65535, + type: "ingress", + }); + } +} + +``` + +### Usage With Prefix List IDs + +Prefix Lists are either managed by AWS internally, or created by the customer using a +[Managed Prefix List resource](ec2_managed_prefix_list.html). Prefix Lists provided by +AWS are associated with a prefix list name, or service name, that is linked to a specific region. + +Prefix list IDs are exported on VPC Endpoints, so you can use this format: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityGroupRule } from "./.gen/providers/aws/security-group-rule"; +import { VpcEndpoint } from "./.gen/providers/aws/vpc-endpoint"; +interface MyConfig { + serviceName: any; + vpcId: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + const myEndpoint = new VpcEndpoint(this, "my_endpoint", { + serviceName: config.serviceName, + vpcId: config.vpcId, + }); + new SecurityGroupRule(this, "allow_all", { + fromPort: 0, + prefixListIds: [myEndpoint.prefixListId], + protocol: "-1", + securityGroupId: "sg-123456", + toPort: 0, + type: "egress", + }); + } +} + +``` + +You can also find a specific Prefix List using the [`awsPrefixList`](/docs/providers/aws/d/prefix_list.html) +or [`ec2ManagedPrefixList`](/docs/providers/aws/d/ec2_managed_prefix_list.html) data sources: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsPrefixList } from "./.gen/providers/aws/data-aws-prefix-list"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { SecurityGroupRule } from "./.gen/providers/aws/security-group-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsRegion(this, "current", {}); + const s3 = new DataAwsPrefixList(this, "s3", { + name: "com.amazonaws.${" + current.name + "}.s3", + }); + new SecurityGroupRule(this, "s3_gateway_egress", { + description: "S3 Gateway Egress", + fromPort: 443, + prefixListIds: [Token.asString(s3.id)], + protocol: "tcp", + securityGroupId: "sg-123456", + toPort: 443, + type: "egress", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `fromPort` - (Required) Start port (or ICMP type number if protocol is "icmp" or "icmpv6"). +* `protocol` - (Required) Protocol. If not icmp, icmpv6, tcp, udp, or all use the [protocol number](https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml) +* `securityGroupId` - (Required) Security group to apply this rule to. +* `toPort` - (Required) End port (or ICMP code if protocol is "icmp"). +* `type` - (Required) Type of rule being created. Valid options are `ingress` (inbound) +or `egress` (outbound). + +The following arguments are optional: + +~> **Note** Although `cidrBlocks`, `ipv6CidrBlocks`, `prefixListIds`, and `sourceSecurityGroupId` are all marked as optional, you _must_ provide one of them in order to configure the source of the traffic. + +* `cidrBlocks` - (Optional) List of CIDR blocks. Cannot be specified with `sourceSecurityGroupId` or `self`. +* `description` - (Optional) Description of the rule. +* `ipv6CidrBlocks` - (Optional) List of IPv6 CIDR blocks. Cannot be specified with `sourceSecurityGroupId` or `self`. +* `prefixListIds` - (Optional) List of Prefix List IDs. +* `self` - (Optional) Whether the security group itself will be added as a source to this ingress rule. Cannot be specified with `cidrBlocks`, `ipv6CidrBlocks`, or `sourceSecurityGroupId`. +* `sourceSecurityGroupId` - (Optional) Security group id to allow access to/from, depending on the `type`. Cannot be specified with `cidrBlocks`, `ipv6CidrBlocks`, or `self`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ID of the security group rule. +* `securityGroupRuleId` - If the `awsSecurityGroupRule` resource has a single source or destination then this is the AWS Security Group Rule resource ID. Otherwise it is empty. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Group Rules using the `securityGroupId`, `type`, `protocol`, `fromPort`, `toPort`, and source(s)/destination(s) (such as a `cidrBlock`) separated by underscores (`_`). All parts are required. For example: + +**NOTE:** Not all rule permissions (e.g., not all of a rule's CIDR blocks) need to be imported for Terraform to manage rule permissions. However, importing some of a rule's permissions but not others, and then making changes to the rule will result in the creation of an additional rule to capture the updated permissions. Rule permissions that were not imported are left intact in the original rule. + +Import an ingress rule in security group `sg6E616F6D69` for TCP port 8000 with an IPv4 destination CIDR of `10030/24`: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import a rule with various IPv4 and IPv6 source CIDR blocks: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import a rule, applicable to all ports, with a protocol other than TCP/UDP/ICMP/ICMPV6/ALL, e.g., Multicast Transport Protocol (MTP), using the IANA protocol number. For example: 92. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import a default any/any egress rule to 0.0.0.0/0: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import an egress rule with a prefix list ID destination: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import a rule applicable to all protocols and ports with a security group source: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Import a rule that has itself and an IPv6 CIDR block as sources: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** Security Group Rules using the `securityGroupId`, `type`, `protocol`, `fromPort`, `toPort`, and source(s)/destination(s) (such as a `cidrBlock`) separated by underscores (`_`). All parts are required. For example: + +**NOTE:** Not all rule permissions (e.g., not all of a rule's CIDR blocks) need to be imported for Terraform to manage rule permissions. However, importing some of a rule's permissions but not others, and then making changes to the rule will result in the creation of an additional rule to capture the updated permissions. Rule permissions that were not imported are left intact in the original rule. + +Import an ingress rule in security group `sg6E616F6D69` for TCP port 8000 with an IPv4 destination CIDR of `10030/24`: + +```console +% terraform import aws_security_group_rule.ingress sg-6e616f6d69_ingress_tcp_8000_8000_10.0.3.0/24 +``` + +Import a rule with various IPv4 and IPv6 source CIDR blocks: + +```console +% terraform import aws_security_group_rule.ingress sg-4973616163_ingress_tcp_100_121_10.1.0.0/16_2001:db8::/48_10.2.0.0/16_2002:db8::/48 +``` + +Import a rule, applicable to all ports, with a protocol other than TCP/UDP/ICMP/ICMPV6/ALL, e.g., Multicast Transport Protocol (MTP), using the IANA protocol number. For example: 92. + +```console +% terraform import aws_security_group_rule.ingress sg-6777656e646f6c796e_ingress_92_0_65536_10.0.3.0/24_10.0.4.0/24 +``` + +Import a default any/any egress rule to 0.0.0.0/0: + +```console +% terraform import aws_security_group_rule.default_egress sg-6777656e646f6c796e_egress_all_0_0_0.0.0.0/0 +``` + +Import an egress rule with a prefix list ID destination: + +```console +% terraform import aws_security_group_rule.egress sg-62726f6479_egress_tcp_8000_8000_pl-6469726b +``` + +Import a rule applicable to all protocols and ports with a security group source: + +```console +% terraform import aws_security_group_rule.ingress_rule sg-7472697374616e_ingress_all_0_65536_sg-6176657279 +``` + +Import a rule that has itself and an IPv6 CIDR block as sources: + +```console +% terraform import aws_security_group_rule.rule_name sg-656c65616e6f72_ingress_tcp_80_80_self_2001:db8::/48 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/securityhub_account.markdown b/website/docs/cdktf/typescript/r/securityhub_account.markdown new file mode 100644 index 00000000000..0b8cca4dfcd --- /dev/null +++ b/website/docs/cdktf/typescript/r/securityhub_account.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_account" +description: |- + Enables Security Hub for an AWS account. +--- + + + +# Resource: aws_securityhub_account + +Enables Security Hub for this AWS account. + +~> **NOTE:** Destroying this resource will disable Security Hub for this AWS account. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SecurityhubAccount(this, "example", {}); + } +} + +``` + +## Argument Reference + +* `enableDefaultStandards` - (Optional) Whether to enable the security standards that Security Hub has designated as automatically enabled including: ` AWS Foundational Security Best Practices v1.0.0` and `CIS AWS Foundations Benchmark v1.2.0`. Defaults to `true`. +* `controlFindingGenerator` - (Optional) Updates whether the calling account has consolidated control findings turned on. If the value for this field is set to `securityControl`, Security Hub generates a single finding for a control check even when the check applies to multiple enabled standards. If the value for this field is set to `standardControl`, Security Hub generates separate findings for a control check when the check applies to multiple enabled standards. For accounts that are part of an organization, this value can only be updated in the administrator account. +* `autoEnableControls` - (Optional) Whether to automatically enable new controls when they are added to standards that are enabled. By default, this is set to true, and new controls are enabled automatically. To not automatically enable new controls, set this to false. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS Account ID. +* `arn` - ARN of the SecurityHub Hub created in the account. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an existing Security Hub enabled account using the AWS account ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an existing Security Hub enabled account using the AWS account ID. For example: + +```console +% terraform import aws_securityhub_account.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/securityhub_action_target.html.markdown b/website/docs/cdktf/typescript/r/securityhub_action_target.html.markdown new file mode 100644 index 00000000000..3577531ae92 --- /dev/null +++ b/website/docs/cdktf/typescript/r/securityhub_action_target.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_action_target" +description: |- + Creates Security Hub custom action. +--- + + + +# Resource: aws_securityhub_action_target + +Creates Security Hub custom action. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubActionTarget } from "./.gen/providers/aws/securityhub-action-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const awsSecurityhubActionTargetExample = new SecurityhubActionTarget( + this, + "example_1", + { + dependsOn: [example], + description: "This is custom action sends selected findings to chat", + identifier: "SendToChat", + name: "Send notification to chat", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubActionTargetExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The description for the custom action target. +* `identifier` - (Required) The ID for the custom action target. +* `description` - (Required) The name of the custom action target. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Security Hub custom action target. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub custom action using the action target ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Security Hub custom action using the action target ARN. For example: + +```console +% terraform import aws_securityhub_action_target.example arn:aws:securityhub:eu-west-1:312940875350:action/custom/a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/securityhub_finding_aggregator.markdown b/website/docs/cdktf/typescript/r/securityhub_finding_aggregator.markdown new file mode 100644 index 00000000000..72bfbc38de4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/securityhub_finding_aggregator.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_finding_aggregator" +description: |- + Manages a Security Hub finding aggregator +--- + + + +# Resource: aws_securityhub_finding_aggregator + +Manages a Security Hub finding aggregator. Security Hub needs to be enabled in a region in order for the aggregator to pull through findings. + +## Example Usage + +### All Regions Usage + +The following example will enable the aggregator for every region. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubFindingAggregator } from "./.gen/providers/aws/securityhub-finding-aggregator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const awsSecurityhubFindingAggregatorExample = + new SecurityhubFindingAggregator(this, "example_1", { + dependsOn: [example], + linkingMode: "ALL_REGIONS", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubFindingAggregatorExample.overrideLogicalId("example"); + } +} + +``` + +### All Regions Except Specified Regions Usage + +The following example will enable the aggregator for every region except those specified in `specifiedRegions`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubFindingAggregator } from "./.gen/providers/aws/securityhub-finding-aggregator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const awsSecurityhubFindingAggregatorExample = + new SecurityhubFindingAggregator(this, "example_1", { + dependsOn: [example], + linkingMode: "ALL_REGIONS_EXCEPT_SPECIFIED", + specifiedRegions: ["eu-west-1", "eu-west-2"], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubFindingAggregatorExample.overrideLogicalId("example"); + } +} + +``` + +### Specified Regions Usage + +The following example will enable the aggregator for every region specified in `specifiedRegions`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubFindingAggregator } from "./.gen/providers/aws/securityhub-finding-aggregator"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const awsSecurityhubFindingAggregatorExample = + new SecurityhubFindingAggregator(this, "example_1", { + dependsOn: [example], + linkingMode: "SPECIFIED_REGIONS", + specifiedRegions: ["eu-west-1", "eu-west-2"], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubFindingAggregatorExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +- `linkingMode` - (Required) Indicates whether to aggregate findings from all of the available Regions or from a specified list. The options are `allRegions`, `allRegionsExceptSpecified` or `specifiedRegions`. When `allRegions` or `allRegionsExceptSpecified` are used, Security Hub will automatically aggregate findings from new Regions as Security Hub supports them and you opt into them. +- `specifiedRegions` - (Optional) List of regions to include or exclude (required if `linkingMode` is set to `allRegionsExceptSpecified` or `specifiedRegions`) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - Amazon Resource Name (ARN) of the Security Hub finding aggregator. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an existing Security Hub finding aggregator using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an existing Security Hub finding aggregator using the `arn`. For example: + +```console +% terraform import aws_securityhub_finding_aggregator.example arn:aws:securityhub:eu-west-1:123456789098:finding-aggregator/abcd1234-abcd-1234-1234-abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/securityhub_insight.html.markdown b/website/docs/cdktf/typescript/r/securityhub_insight.html.markdown new file mode 100644 index 00000000000..756cb0ba7a4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/securityhub_insight.html.markdown @@ -0,0 +1,413 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_insight" +description: |- + Provides a Security Hub custom insight resource. +--- + + + +# Resource: aws_securityhub_insight + +Provides a Security Hub custom insight resource. See the [Managing custom insights section](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-custom-insights.html) of the AWS User Guide for more information. + +## Example Usage + +### Filter by AWS account ID + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubInsight } from "./.gen/providers/aws/securityhub-insight"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const awsSecurityhubInsightExample = new SecurityhubInsight( + this, + "example_1", + { + dependsOn: [example], + filters: { + awsAccountId: [ + { + comparison: "EQUALS", + value: "1234567890", + }, + { + comparison: "EQUALS", + value: "09876543210", + }, + ], + }, + groupByAttribute: "AwsAccountId", + name: "example-insight", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubInsightExample.overrideLogicalId("example"); + } +} + +``` + +### Filter by date range + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubInsight } from "./.gen/providers/aws/securityhub-insight"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const awsSecurityhubInsightExample = new SecurityhubInsight( + this, + "example_1", + { + dependsOn: [example], + filters: { + createdAt: [ + { + dateRange: { + unit: "DAYS", + value: 5, + }, + }, + ], + }, + groupByAttribute: "CreatedAt", + name: "example-insight", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubInsightExample.overrideLogicalId("example"); + } +} + +``` + +### Filter by destination IPv4 address + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubInsight } from "./.gen/providers/aws/securityhub-insight"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const awsSecurityhubInsightExample = new SecurityhubInsight( + this, + "example_1", + { + dependsOn: [example], + filters: { + networkDestinationIpv4: [ + { + cidr: "10.0.0.0/16", + }, + ], + }, + groupByAttribute: "NetworkDestinationIpV4", + name: "example-insight", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubInsightExample.overrideLogicalId("example"); + } +} + +``` + +### Filter by finding's confidence + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubInsight } from "./.gen/providers/aws/securityhub-insight"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const awsSecurityhubInsightExample = new SecurityhubInsight( + this, + "example_1", + { + dependsOn: [example], + filters: { + confidence: [ + { + gte: "80", + }, + ], + }, + groupByAttribute: "Confidence", + name: "example-insight", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubInsightExample.overrideLogicalId("example"); + } +} + +``` + +### Filter by resource tags + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubInsight } from "./.gen/providers/aws/securityhub-insight"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const awsSecurityhubInsightExample = new SecurityhubInsight( + this, + "example_1", + { + dependsOn: [example], + filters: { + resourceTags: [ + { + comparison: "EQUALS", + key: "Environment", + value: "Production", + }, + ], + }, + groupByAttribute: "ResourceTags", + name: "example-insight", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubInsightExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `filters` - (Required) A configuration block including one or more (up to 10 distinct) attributes used to filter the findings included in the insight. The insight only includes findings that match criteria defined in the filters. See [filters](#filters) below for more details. +* `groupByAttribute` - (Required) The attribute used to group the findings for the insight e.g., if an insight is grouped by `resourceId`, then the insight produces a list of resource identifiers. +* `name` - (Required) The name of the custom insight. + +### filters + +The `filters` configuration block supports the following arguments: + +~> **NOTE:** For each argument below, up to 20 can be provided. + +* `awsAccountId` - (Optional) AWS account ID that a finding is generated in. See [String_Filter](#string-filter-argument-reference) below for more details. +* `companyName` - (Optional) The name of the findings provider (company) that owns the solution (product) that generates findings. See [String_Filter](#string-filter-argument-reference) below for more details. +* `complianceStatus` - (Optional) Exclusive to findings that are generated as the result of a check run against a specific rule in a supported standard, such as CIS AWS Foundations. Contains security standard-related finding details. See [String Filter](#string-filter-argument-reference) below for more details. +* `confidence` - (Optional) A finding's confidence. Confidence is defined as the likelihood that a finding accurately identifies the behavior or issue that it was intended to identify. Confidence is scored on a 0-100 basis using a ratio scale, where 0 means zero percent confidence and 100 means 100 percent confidence. See [Number Filter](#number-filter-argument-reference) below for more details. +* `createdAt` - (Optional) An ISO8601-formatted timestamp that indicates when the security-findings provider captured the potential security issue that a finding captured. See [Date Filter](#date-filter-argument-reference) below for more details. +* `criticality` - (Optional) The level of importance assigned to the resources associated with the finding. A score of 0 means that the underlying resources have no criticality, and a score of 100 is reserved for the most critical resources. See [Number Filter](#number-filter-argument-reference) below for more details. +* `description` - (Optional) A finding's description. See [String Filter](#string-filter-argument-reference) below for more details. +* `findingProviderFieldsConfidence` - (Optional) The finding provider value for the finding confidence. Confidence is defined as the likelihood that a finding accurately identifies the behavior or issue that it was intended to identify. Confidence is scored on a 0-100 basis using a ratio scale, where 0 means zero percent confidence and 100 means 100 percent confidence. See [Number Filter](#number-filter-argument-reference) below for more details. +* `findingProviderFieldsCriticality` - (Optional) The finding provider value for the level of importance assigned to the resources associated with the findings. A score of 0 means that the underlying resources have no criticality, and a score of 100 is reserved for the most critical resources. See [Number Filter](#number-filter-argument-reference) below for more details. +* `findingProviderFieldsRelatedFindingsId` - (Optional) The finding identifier of a related finding that is identified by the finding provider. See [String Filter](#string-filter-argument-reference) below for more details. +* `findingProviderFieldsRelatedFindingsProductArn` - (Optional) The ARN of the solution that generated a related finding that is identified by the finding provider. See [String Filter](#string-filter-argument-reference) below for more details. +* `findingProviderFieldsSeverityLabel` - (Optional) The finding provider value for the severity label. See [String Filter](#string-filter-argument-reference) below for more details. +* `findingProviderFieldsSeverityOriginal` - (Optional) The finding provider's original value for the severity. See [String Filter](#string-filter-argument-reference) below for more details. +* `findingProviderFieldsTypes` - (Optional) One or more finding types that the finding provider assigned to the finding. Uses the format of `namespace/category/classifier` that classify a finding. Valid namespace values include: `Software and Configuration Checks`, `ttPs`, `effects`, `Unusual Behaviors`, and `Sensitive Data Identifications`. See [String Filter](#string-filter-argument-reference) below for more details. +* `firstObservedAt` - (Optional) An ISO8601-formatted timestamp that indicates when the security-findings provider first observed the potential security issue that a finding captured. See [Date Filter](#date-filter-argument-reference) below for more details. +* `generatorId` - (Optional) The identifier for the solution-specific component (a discrete unit of logic) that generated a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `id` - (Optional) The security findings provider-specific identifier for a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `keyword` - (Optional) A keyword for a finding. See [Keyword Filter](#keyword-filter-argument-reference) below for more details. +* `lastObservedAt` - (Optional) An ISO8601-formatted timestamp that indicates when the security-findings provider most recently observed the potential security issue that a finding captured. See [Date Filter](#date-filter-argument-reference) below for more details. +* `malwareName` - (Optional) The name of the malware that was observed. See [String Filter](#string-filter-argument-reference) below for more details. +* `malwarePath` - (Optional) The filesystem path of the malware that was observed. See [String Filter](#string-filter-argument-reference) below for more details. +* `malwareState` - (Optional) The state of the malware that was observed. See [String Filter](#string-filter-argument-reference) below for more details. +* `malwareType` - (Optional) The type of the malware that was observed. See [String Filter](#string-filter-argument-reference) below for more details. +* `networkDestinationDomain` - (Optional) The destination domain of network-related information about a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `networkDestinationIpv4` - (Optional) The destination IPv4 address of network-related information about a finding. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `networkDestinationIpv6` - (Optional) The destination IPv6 address of network-related information about a finding. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `networkDestinationPort` - (Optional) The destination port of network-related information about a finding. See [Number Filter](#number-filter-argument-reference) below for more details. +* `networkDirection` - (Optional) Indicates the direction of network traffic associated with a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `networkProtocol` - (Optional) The protocol of network-related information about a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `networkSourceDomain` - (Optional) The source domain of network-related information about a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `networkSourceIpv4` - (Optional) The source IPv4 address of network-related information about a finding. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `networkSourceIpv6` - (Optional) The source IPv6 address of network-related information about a finding. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `networkSourceMac` - (Optional) The source media access control (MAC) address of network-related information about a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `networkSourcePort` - (Optional) The source port of network-related information about a finding. See [Number Filter](#number-filter-argument-reference) below for more details. +* `noteText` - (Optional) The text of a note. See [String Filter](#string-filter-argument-reference) below for more details. +* `noteUpdatedAt` - (Optional) The timestamp of when the note was updated. See [Date Filter](#date-filter-argument-reference) below for more details. +* `noteUpdatedBy` - (Optional) The principal that created a note. See [String Filter](#string-filter-argument-reference) below for more details. +* `processLaunchedAt` - (Optional) The date/time that the process was launched. See [Date Filter](#date-filter-argument-reference) below for more details. +* `processName` - (Optional) The name of the process. See [String Filter](#string-filter-argument-reference) below for more details. +* `processParentPid` - (Optional) The parent process ID. See [Number Filter](#number-filter-argument-reference) below for more details. +* `processPath` - (Optional) The path to the process executable. See [String Filter](#string-filter-argument-reference) below for more details. +* `processPid` - (Optional) The process ID. See [Number Filter](#number-filter-argument-reference) below for more details. +* `processTerminatedAt` - (Optional) The date/time that the process was terminated. See [Date Filter](#date-filter-argument-reference) below for more details. +* `productArn` - (Optional) The ARN generated by Security Hub that uniquely identifies a third-party company (security findings provider) after this provider's product (solution that generates findings) is registered with Security Hub. See [String Filter](#string-filter-argument-reference) below for more details. +* `productFields` - (Optional) A data type where security-findings providers can include additional solution-specific details that aren't part of the defined `awsSecurityFinding` format. See [Map Filter](#map-filter-argument-reference) below for more details. +* `productName` - (Optional) The name of the solution (product) that generates findings. See [String Filter](#string-filter-argument-reference) below for more details. +* `recommendationText` - (Optional) The recommendation of what to do about the issue described in a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `recordState` - (Optional) The updated record state for the finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `relatedFindingsId` - (Optional) The solution-generated identifier for a related finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `relatedFindingsProductArn` - (Optional) The ARN of the solution that generated a related finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceAwsEc2InstanceIamInstanceProfileArn` - (Optional) The IAM profile ARN of the instance. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceAwsEc2InstanceImageId` - (Optional) The Amazon Machine Image (AMI) ID of the instance. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceAwsEc2InstanceIpv4Addresses` - (Optional) The IPv4 addresses associated with the instance. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `resourceAwsEc2InstanceIpv6Addresses` - (Optional) The IPv6 addresses associated with the instance. See [Ip Filter](#ip-filter-argument-reference) below for more details. +* `resourceAwsEc2InstanceKeyName` - (Optional) The key name associated with the instance. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceAwsEc2InstanceLaunchedAt` - (Optional) The date and time the instance was launched. See [Date Filter](#date-filter-argument-reference) below for more details. +* `resourceAwsEc2InstanceSubnetId` - (Optional) The identifier of the subnet that the instance was launched in. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceAwsEc2InstanceType` - (Optional) The instance type of the instance. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceAwsEc2InstanceVpcId` - (Optional) The identifier of the VPC that the instance was launched in. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceAwsIamAccessKeyCreatedAt` - (Optional) The creation date/time of the IAM access key related to a finding. See [Date Filter](#date-filter-argument-reference) below for more details. +* `resourceAwsIamAccessKeyStatus` - (Optional) The status of the IAM access key related to a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceAwsIamAccessKeyUserName` - (Optional) The user associated with the IAM access key related to a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceAwsS3BucketOwnerId` - (Optional) The canonical user ID of the owner of the S3 bucket. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceAwsS3BucketOwnerName` - (Optional) The display name of the owner of the S3 bucket. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceContainerImageId` - (Optional) The identifier of the image related to a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceContainerImageName` - (Optional) The name of the image related to a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceContainerLaunchedAt` - (Optional) The date/time that the container was started. See [Date Filter](#date-filter-argument-reference) below for more details. +* `resourceContainerName` - (Optional) The name of the container related to a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceDetailsOther` - (Optional) The details of a resource that doesn't have a specific subfield for the resource type defined. See [Map Filter](#map-filter-argument-reference) below for more details. +* `resourceId` - (Optional) The canonical identifier for the given resource type. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourcePartition` - (Optional) The canonical AWS partition name that the Region is assigned to. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceRegion` - (Optional) The canonical AWS external Region name where this resource is located. See [String Filter](#string-filter-argument-reference) below for more details. +* `resourceTags` - (Optional) A list of AWS tags associated with a resource at the time the finding was processed. See [Map Filter](#map-filter-argument-reference) below for more details. +* `resourceType` - (Optional) Specifies the type of the resource that details are provided for. See [String Filter](#string-filter-argument-reference) below for more details. +* `severityLabel` - (Optional) The label of a finding's severity. See [String Filter](#string-filter-argument-reference) below for more details. +* `sourceUrl` - (Optional) A URL that links to a page about the current finding in the security-findings provider's solution. See [String Filter](#string-filter-argument-reference) below for more details. +* `threatIntelIndicatorCategory` - (Optional) The category of a threat intelligence indicator. See [String Filter](#string-filter-argument-reference) below for more details. +* `threatIntelIndicatorLastObservedAt` - (Optional) The date/time of the last observation of a threat intelligence indicator. See [Date Filter](#date-filter-argument-reference) below for more details. +* `threatIntelIndicatorSource` - (Optional) The source of the threat intelligence. See [String Filter](#string-filter-argument-reference) below for more details. +* `threatIntelIndicatorSourceUrl` - (Optional) The URL for more details from the source of the threat intelligence. See [String Filter](#string-filter-argument-reference) below for more details. +* `threatIntelIndicatorType` - (Optional) The type of a threat intelligence indicator. See [String Filter](#string-filter-argument-reference) below for more details. +* `threatIntelIndicatorValue` - (Optional) The value of a threat intelligence indicator. See [String Filter](#string-filter-argument-reference) below for more details. +* `title` - (Optional) A finding's title. See [String Filter](#string-filter-argument-reference) below for more details. +* `type` - (Optional) A finding type in the format of `namespace/category/classifier` that classifies a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `updatedAt` - (Optional) An ISO8601-formatted timestamp that indicates when the security-findings provider last updated the finding record. See [Date Filter](#date-filter-argument-reference) below for more details. +* `userDefinedValues` - (Optional) A list of name/value string pairs associated with the finding. These are custom, user-defined fields added to a finding. See [Map Filter](#map-filter-argument-reference) below for more details. +* `verificationState` - (Optional) The veracity of a finding. See [String Filter](#string-filter-argument-reference) below for more details. +* `workflowStatus` - (Optional) The status of the investigation into a finding. See [Workflow Status Filter](#workflow-status-filter-argument-reference) below for more details. + +### Date Filter Argument reference + +The date filter configuration block supports the following arguments: + +* `dateRange` - (Optional) A configuration block of the date range for the date filter. See [date_range](#date_range-argument-reference) below for more details. +* `end` - (Optional) An end date for the date filter. Required with `start` if `dateRange` is not specified. +* `start` - (Optional) A start date for the date filter. Required with `end` if `dateRange` is not specified. + +### date_range Argument reference + +The `dateRange` configuration block supports the following arguments: + +* `unit` - (Required) A date range unit for the date filter. Valid values: `days`. +* `value` - (Required) A date range value for the date filter, provided as an Integer. + +### Ip Filter Argument Reference + +The Ip filter configuration block supports the following arguments: + +* `cidr` - (Required) A finding's CIDR value. + +### Keyword Filter Argument Reference + +The keyword filter configuration block supports the following arguments: + +* `value` - (Required) A value for the keyword. + +### Map Filter Argument reference + +The map filter configuration block supports the following arguments: + +* `comparison` - (Required) The condition to apply to a string value when querying for findings. Valid values include: `equals` and `notEquals`. +* `key` - (Required) The key of the map filter. For example, for `resourceTags`, `key` identifies the name of the tag. For `userDefinedFields`, `key` is the name of the field. +* `value` - (Required) The value for the key in the map filter. Filter values are case sensitive. For example, one of the values for a tag called `department` might be `security`. If you provide `security` as the filter value, then there is no match. + +### Number Filter Argument reference + +The number filter configuration block supports the following arguments: + +~> **NOTE:** Only one of `eg`, `gte`, or `lte` must be specified. + +* `eq` - (Optional) The equal-to condition to be applied to a single field when querying for findings, provided as a String. +* `gte` - (Optional) The greater-than-equal condition to be applied to a single field when querying for findings, provided as a String. +* `lte` - (Optional) The less-than-equal condition to be applied to a single field when querying for findings, provided as a String. + +### String Filter Argument reference + +The string filter configuration block supports the following arguments: + +* `comparison` - (Required) The condition to apply to a string value when querying for findings. Valid values include: `equals`, `prefix`, `notEquals`, `prefixNotEquals`. +* `value` - (Required) The string filter value. Filter values are case sensitive. + +### Workflow Status Filter Argument reference + +The workflow status filter configuration block supports the following arguments: + +* `comparison` - (Required) The condition to apply to a string value when querying for findings. Valid values include: `equals`, `prefix`, `notEquals`, `prefixNotEquals`. +* `value` - (Required) The string filter value. Valid values include: `new`, `notified`, `suppressed`, and `resolved`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - ARN of the insight. +* `arn` - ARN of the insight. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub insights using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Security Hub insights using the ARN. For example: + +```console +% terraform import aws_securityhub_insight.example arn:aws:securityhub:us-west-2:1234567890:insight/1234567890/custom/91299ed7-abd0-4e44-a858-d0b15e37141a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/securityhub_invite_accepter.markdown b/website/docs/cdktf/typescript/r/securityhub_invite_accepter.markdown new file mode 100644 index 00000000000..11fcce8bd2b --- /dev/null +++ b/website/docs/cdktf/typescript/r/securityhub_invite_accepter.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_invite_accepter" +description: |- + Accepts a Security Hub invitation. +--- + + + +# Resource: aws_securityhub_invite_accepter + +-> **Note:** AWS accounts can only be associated with a single Security Hub master account. Destroying this resource will disassociate the member account from the master account. + +Accepts a Security Hub invitation. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubInviteAccepter } from "./.gen/providers/aws/securityhub-invite-accepter"; +import { SecurityhubMember } from "./.gen/providers/aws/securityhub-member"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SecurityhubAccount(this, "example", {}); + const invitee = new SecurityhubAccount(this, "invitee", { + provider: "aws.invitee", + }); + const awsSecurityhubMemberExample = new SecurityhubMember( + this, + "example_2", + { + accountId: "123456789012", + email: "example@example.com", + invite: true, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubMemberExample.overrideLogicalId("example"); + const awsSecurityhubInviteAccepterInvitee = new SecurityhubInviteAccepter( + this, + "invitee_3", + { + dependsOn: [invitee], + masterId: Token.asString(awsSecurityhubMemberExample.masterId), + provider: "aws.invitee", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubInviteAccepterInvitee.overrideLogicalId("invitee"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `masterId` - (Required) The account ID of the master Security Hub account whose invitation you're accepting. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `invitationId` - The ID of the invitation. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub invite acceptance using the account ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Security Hub invite acceptance using the account ID. For example: + +```console +% terraform import aws_securityhub_invite_accepter.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/securityhub_member.markdown b/website/docs/cdktf/typescript/r/securityhub_member.markdown new file mode 100644 index 00000000000..32a4792b2e3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/securityhub_member.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_member" +description: |- + Provides a Security Hub member resource. +--- + + + +# Resource: aws_securityhub_member + +Provides a Security Hub member resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubMember } from "./.gen/providers/aws/securityhub-member"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const awsSecurityhubMemberExample = new SecurityhubMember( + this, + "example_1", + { + accountId: "123456789012", + dependsOn: [example], + email: "example@example.com", + invite: true, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubMemberExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accountId` - (Required) The ID of the member AWS account. +* `email` - (Optional) The email of the member AWS account. +* `invite` - (Optional) Boolean whether to invite the account to Security Hub as a member. Defaults to `false`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the member AWS account (matches `accountId`). +* `masterId` - The ID of the master Security Hub AWS account. +* `memberStatus` - The status of the member account relationship. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub members using their account ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Security Hub members using their account ID. For example: + +```console +% terraform import aws_securityhub_member.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/securityhub_organization_admin_account.html.markdown b/website/docs/cdktf/typescript/r/securityhub_organization_admin_account.html.markdown new file mode 100644 index 00000000000..b259ce007cb --- /dev/null +++ b/website/docs/cdktf/typescript/r/securityhub_organization_admin_account.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_organization_admin_account" +description: |- + Manages a Security Hub administrator account for an organization. +--- + + + +# Resource: aws_securityhub_organization_admin_account + +Manages a Security Hub administrator account for an organization. The AWS account utilizing this resource must be an Organizations primary account. More information about Organizations support in Security Hub can be found in the [Security Hub User Guide](https://docs.aws.amazon.com/securityhub/latest/userguide/designate-orgs-admin-account.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OrganizationsOrganization } from "./.gen/providers/aws/organizations-organization"; +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubOrganizationAdminAccount } from "./.gen/providers/aws/securityhub-organization-admin-account"; +import { SecurityhubOrganizationConfiguration } from "./.gen/providers/aws/securityhub-organization-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new OrganizationsOrganization(this, "example", { + awsServiceAccessPrincipals: ["securityhub.amazonaws.com"], + featureSet: "ALL", + }); + const awsSecurityhubAccountExample = new SecurityhubAccount( + this, + "example_1", + {} + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubAccountExample.overrideLogicalId("example"); + const awsSecurityhubOrganizationAdminAccountExample = + new SecurityhubOrganizationAdminAccount(this, "example_2", { + adminAccountId: "123456789012", + dependsOn: [example], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubOrganizationAdminAccountExample.overrideLogicalId("example"); + const awsSecurityhubOrganizationConfigurationExample = + new SecurityhubOrganizationConfiguration(this, "example_3", { + autoEnable: true, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubOrganizationConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `adminAccountId` - (Required) The AWS account identifier of the account to designate as the Security Hub administrator account. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS account identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub Organization Admin Accounts using the AWS account ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Security Hub Organization Admin Accounts using the AWS account ID. For example: + +```console +% terraform import aws_securityhub_organization_admin_account.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/securityhub_organization_configuration.markdown b/website/docs/cdktf/typescript/r/securityhub_organization_configuration.markdown new file mode 100644 index 00000000000..46a4b892c00 --- /dev/null +++ b/website/docs/cdktf/typescript/r/securityhub_organization_configuration.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_organization_configuration" +description: |- + Manages the Security Hub Organization Configuration +--- + + + +# Resource: aws_securityhub_organization_configuration + +Manages the Security Hub Organization Configuration. + +~> **NOTE:** This resource requires an [`awsSecurityhubOrganizationAdminAccount`](/docs/providers/aws/r/securityhub_organization_admin_account.html) to be configured (not necessarily with Terraform). More information about managing Security Hub in an organization can be found in the [Managing administrator and member accounts](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-accounts.html) documentation + +~> **NOTE:** This is an advanced Terraform resource. Terraform will automatically assume management of the Security Hub Organization Configuration without import and perform no actions on removal from the Terraform configuration. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { OrganizationsOrganization } from "./.gen/providers/aws/organizations-organization"; +import { SecurityhubOrganizationAdminAccount } from "./.gen/providers/aws/securityhub-organization-admin-account"; +import { SecurityhubOrganizationConfiguration } from "./.gen/providers/aws/securityhub-organization-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new OrganizationsOrganization(this, "example", { + awsServiceAccessPrincipals: ["securityhub.amazonaws.com"], + featureSet: "ALL", + }); + const awsSecurityhubOrganizationAdminAccountExample = + new SecurityhubOrganizationAdminAccount(this, "example_1", { + adminAccountId: "123456789012", + dependsOn: [example], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubOrganizationAdminAccountExample.overrideLogicalId("example"); + const awsSecurityhubOrganizationConfigurationExample = + new SecurityhubOrganizationConfiguration(this, "example_2", { + autoEnable: true, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubOrganizationConfigurationExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `autoEnable` - (Required) Whether to automatically enable Security Hub for new accounts in the organization. +* `autoEnableStandards` - (Optional) Whether to automatically enable Security Hub default standards for new member accounts in the organization. By default, this parameter is equal to `default`, and new member accounts are automatically enabled with default Security Hub standards. To opt out of enabling default standards for new member accounts, set this parameter equal to `none`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - AWS Account ID. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an existing Security Hub enabled account using the AWS account ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an existing Security Hub enabled account using the AWS account ID. For example: + +```console +% terraform import aws_securityhub_organization_configuration.example 123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/securityhub_product_subscription.markdown b/website/docs/cdktf/typescript/r/securityhub_product_subscription.markdown new file mode 100644 index 00000000000..2c2c09454d9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/securityhub_product_subscription.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_product_subscription" +description: |- + Subscribes to a Security Hub product. +--- + + + +# Resource: aws_securityhub_product_subscription + +Subscribes to a Security Hub product. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubProductSubscription } from "./.gen/providers/aws/securityhub-product-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const current = new DataAwsRegion(this, "current", {}); + const awsSecurityhubProductSubscriptionExample = + new SecurityhubProductSubscription(this, "example_2", { + dependsOn: [example], + productArn: + "arn:aws:securityhub:${" + + current.name + + "}:733251395267:product/alertlogic/althreatmanagement", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSecurityhubProductSubscriptionExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `productArn` - (Required) The ARN of the product that generates findings that you want to import into Security Hub - see below. + +Amazon maintains a list of [Product integrations in AWS Security Hub](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-providers.html) that changes over time. Any of the products on the linked [Available AWS service integrations](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-internal-providers.html) or [Available third-party partner product integrations](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-partner-providers.html) can be configured using `awsSecurityhubProductSubscription`. + +Available products can also be listed by running the AWS CLI command `aws securityhub describe-products`. + +A subset of currently available products (remember to replace `${varRegion}` as appropriate) includes: + +* `arn:aws:securityhub:${varRegion}::product/aws/guardduty` +* `arn:aws:securityhub:${varRegion}::product/aws/inspector` +* `arn:aws:securityhub:${varRegion}::product/aws/macie` +* `arn:aws:securityhub:${varRegion}::product/alertlogic/althreatmanagement` +* `arn:aws:securityhub:${varRegion}::product/armordefense/armoranywhere` +* `arn:aws:securityhub:${varRegion}::product/barracuda/cloudsecurityguardian` +* `arn:aws:securityhub:${varRegion}::product/checkpoint/cloudguardIaas` +* `arn:aws:securityhub:${varRegion}::product/checkpoint/dome9Arc` +* `arn:aws:securityhub:${varRegion}::product/crowdstrike/crowdstrikeFalcon` +* `arn:aws:securityhub:${varRegion}::product/cyberark/cyberarkPta` +* `arn:aws:securityhub:${varRegion}::product/f5Networks/f5AdvancedWaf` +* `arn:aws:securityhub:${varRegion}::product/fortinet/fortigate` +* `arn:aws:securityhub:${varRegion}::product/guardicore/awsInfectionMonkey` +* `arn:aws:securityhub:${varRegion}::product/guardicore/guardicore` +* `arn:aws:securityhub:${varRegion}::product/ibm/qradarSiem` +* `arn:aws:securityhub:${varRegion}::product/imperva/impervaAttackAnalytics` +* `arn:aws:securityhub:${varRegion}::product/mcafeeSkyhigh/mcafeeMvisionCloudAws` +* `arn:aws:securityhub:${varRegion}::product/paloaltonetworks/redlock` +* `arn:aws:securityhub:${varRegion}::product/paloaltonetworks/vmSeries` +* `arn:aws:securityhub:${varRegion}::product/qualys/qualysPc` +* `arn:aws:securityhub:${varRegion}::product/qualys/qualysVm` +* `arn:aws:securityhub:${varRegion}::product/rapid7/insightvm` +* `arn:aws:securityhub:${varRegion}::product/sophos/sophosServerProtection` +* `arn:aws:securityhub:${varRegion}::product/splunk/splunkEnterprise` +* `arn:aws:securityhub:${varRegion}::product/splunk/splunkPhantom` +* `arn:aws:securityhub:${varRegion}::product/sumologicinc/sumologicMda` +* `arn:aws:securityhub:${varRegion}::product/symantecCorp/symantecCwp` +* `arn:aws:securityhub:${varRegion}::product/tenable/tenableIo` +* `arn:aws:securityhub:${varRegion}::product/trendMicro/deepSecurity` +* `arn:aws:securityhub:${varRegion}::product/turbot/turbot` +* `arn:aws:securityhub:${varRegion}::product/twistlock/twistlockEnterprise` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of a resource that represents your subscription to the product that generates the findings that you want to import into Security Hub. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub product subscriptions using `productArn,arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Security Hub product subscriptions using `productArn,arn`. For example: + +```console +% terraform import aws_securityhub_product_subscription.example arn:aws:securityhub:eu-west-1:733251395267:product/alertlogic/althreatmanagement,arn:aws:securityhub:eu-west-1:123456789012:product-subscription/alertlogic/althreatmanagement +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/securityhub_standards_control.markdown b/website/docs/cdktf/typescript/r/securityhub_standards_control.markdown new file mode 100644 index 00000000000..9c0afb93350 --- /dev/null +++ b/website/docs/cdktf/typescript/r/securityhub_standards_control.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_standards_control" +description: |- + Enable/disable Security Hub standards controls. +--- + + + +# Resource: aws_securityhub_standards_control + +Disable/enable Security Hub standards control in the current region. + +The `awsSecurityhubStandardsControl` behaves differently from normal resources, in that +Terraform does not _create_ this resource, but instead "adopts" it +into management. When you _delete_ this resource configuration, Terraform "abandons" resource as is and just removes it from the state. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubStandardsControl } from "./.gen/providers/aws/securityhub-standards-control"; +import { SecurityhubStandardsSubscription } from "./.gen/providers/aws/securityhub-standards-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + const cisAwsFoundationsBenchmark = new SecurityhubStandardsSubscription( + this, + "cis_aws_foundations_benchmark", + { + dependsOn: [example], + standardsArn: + "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0", + } + ); + new SecurityhubStandardsControl( + this, + "ensure_iam_password_policy_prevents_password_reuse", + { + controlStatus: "DISABLED", + dependsOn: [cisAwsFoundationsBenchmark], + disabledReason: "We handle password policies within Okta", + standardsControlArn: + "arn:aws:securityhub:us-east-1:111111111111:control/cis-aws-foundations-benchmark/v/1.2.0/1.10", + } + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `standardsControlArn` - (Required) The standards control ARN. See the AWS documentation for how to list existing controls using [`getEnabledStandards`](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/securityhub/get-enabled-standards.html) and [`describeStandardsControls`](https://awscli.amazonaws.com/v2/documentation/api/latest/reference/securityhub/describe-standards-controls.html). +* `controlStatus` – (Required) The control status could be `enabled` or `disabled`. You have to specify `disabledReason` argument for `disabled` control status. +* `disabledReason` – (Optional) A description of the reason why you are disabling a security standard control. If you specify this attribute, `controlStatus` will be set to `disabled` automatically. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The standard control ARN. +* `controlId` – The identifier of the security standard control. +* `controlStatusUpdatedAt` – The date and time that the status of the security standard control was most recently updated. +* `description` – The standard control longer description. Provides information about what the control is checking for. +* `relatedRequirements` – The list of requirements that are related to this control. +* `remediationUrl` – A link to remediation information for the control in the Security Hub user documentation. +* `severityRating` – The severity of findings generated from this security standard control. +* `title` – The standard control title. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/securityhub_standards_subscription.markdown b/website/docs/cdktf/typescript/r/securityhub_standards_subscription.markdown new file mode 100644 index 00000000000..5f438dc7730 --- /dev/null +++ b/website/docs/cdktf/typescript/r/securityhub_standards_subscription.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "Security Hub" +layout: "aws" +page_title: "AWS: aws_securityhub_standards_subscription" +description: |- + Subscribes to a Security Hub standard. +--- + + + +# Resource: aws_securityhub_standards_subscription + +Subscribes to a Security Hub standard. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { SecurityhubAccount } from "./.gen/providers/aws/securityhub-account"; +import { SecurityhubStandardsSubscription } from "./.gen/providers/aws/securityhub-standards-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SecurityhubAccount(this, "example", {}); + new SecurityhubStandardsSubscription(this, "cis", { + dependsOn: [example], + standardsArn: + "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0", + }); + const current = new DataAwsRegion(this, "current", {}); + new SecurityhubStandardsSubscription(this, "pci_321", { + dependsOn: [example], + standardsArn: + "arn:aws:securityhub:${" + + current.name + + "}::standards/pci-dss/v/3.2.1", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `standardsArn` - (Required) The ARN of a standard - see below. + +Currently available standards (remember to replace `${varRegion}` as appropriate): + +| Name | ARN | +|------------------------------------------|-------------------------------------------------------------------------------------------------| +| AWS Foundational Security Best Practices | `arn:aws:securityhub:${varRegion}::standards/awsFoundationalSecurityBestPractices/v/100` | +| CIS AWS Foundations Benchmark v1.2.0 | `arn:aws:securityhub:::ruleset/cisAwsFoundationsBenchmark/v/120` | +| CIS AWS Foundations Benchmark v1.4.0 | `arn:aws:securityhub:${varRegion}::standards/cisAwsFoundationsBenchmark/v/140` | +| NIST SP 800-53 Rev. 5 | `arn:aws:securityhub:${varRegion}::standards/nist80053/v/500` | +| PCI DSS | `arn:aws:securityhub:${varRegion}::standards/pciDss/v/321` | + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of a resource that represents your subscription to a supported standard. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Security Hub standards subscriptions using the standards subscription ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Security Hub standards subscriptions using the standards subscription ARN. For example: + +```console +% terraform import aws_securityhub_standards_subscription.cis arn:aws:securityhub:eu-west-1:123456789012:subscription/cis-aws-foundations-benchmark/v/1.2.0 +``` + +```console +% terraform import aws_securityhub_standards_subscription.pci_321 arn:aws:securityhub:eu-west-1:123456789012:subscription/pci-dss/v/3.2.1 +``` + +```console +% terraform import aws_securityhub_standards_subscription.nist_800_53_rev_5 arn:aws:securityhub:eu-west-1:123456789012:subscription/nist-800-53/v/5.0.0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/serverlessapplicationrepository_cloudformation_stack.html.markdown b/website/docs/cdktf/typescript/r/serverlessapplicationrepository_cloudformation_stack.html.markdown new file mode 100644 index 00000000000..82da89c42b7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/serverlessapplicationrepository_cloudformation_stack.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Serverless Application Repository" +layout: "aws" +page_title: "AWS: aws_serverlessapplicationrepository_cloudformation_stack" +description: |- + Deploys an Application CloudFormation Stack from the Serverless Application Repository. +--- + + + +# Resource: aws_serverlessapplicationrepository_cloudformation_stack + +Deploys an Application CloudFormation Stack from the Serverless Application Repository. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { ServerlessapplicationrepositoryCloudformationStack } from "./.gen/providers/aws/serverlessapplicationrepository-cloudformation-stack"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsPartition(this, "current", {}); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_1", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + new ServerlessapplicationrepositoryCloudformationStack( + this, + "postgres-rotator", + { + applicationId: + "arn:aws:serverlessrepo:us-east-1:297356227824:applications/SecretsManagerRDSPostgreSQLRotationSingleUser", + capabilities: ["CAPABILITY_IAM", "CAPABILITY_RESOURCE_POLICY"], + name: "postgres-rotator", + parameters: { + endpoint: + "secretsmanager.${" + + dataAwsRegionCurrent.name + + "}.${" + + current.dnsSuffix + + "}", + functionName: "func-postgres-rotator", + }, + } + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the stack to create. The resource deployed in AWS will be prefixed with `serverlessrepo` +* `applicationId` - (Required) The ARN of the application from the Serverless Application Repository. +* `capabilities` - (Required) A list of capabilities. Valid values are `capabilityIam`, `capabilityNamedIam`, `capabilityResourcePolicy`, or `capabilityAutoExpand` +* `parameters` - (Optional) A map of Parameter structures that specify input parameters for the stack. +* `semanticVersion` - (Optional) The version of the application to deploy. If not supplied, deploys the latest version. +* `tags` - (Optional) A list of tags to associate with this stack. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A unique identifier of the stack. +* `outputs` - A map of outputs from the stack. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Serverless Application Repository Stack using the CloudFormation Stack name (with or without the `serverlessrepo` prefix) or the CloudFormation Stack ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Serverless Application Repository Stack using the CloudFormation Stack name (with or without the `serverlessrepo` prefix) or the CloudFormation Stack ID. For example: + +```console +% terraform import aws_serverlessapplicationrepository_cloudformation_stack.example serverlessrepo-postgres-rotator +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/service_discovery_http_namespace.html.markdown b/website/docs/cdktf/typescript/r/service_discovery_http_namespace.html.markdown new file mode 100644 index 00000000000..4d8b9e56073 --- /dev/null +++ b/website/docs/cdktf/typescript/r/service_discovery_http_namespace.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_http_namespace" +description: |- + Provides a Service Discovery HTTP Namespace resource. +--- + + + +# Resource: aws_service_discovery_http_namespace + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServiceDiscoveryHttpNamespace } from "./.gen/providers/aws/service-discovery-http-namespace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServiceDiscoveryHttpNamespace(this, "example", { + description: "example", + name: "development", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the http namespace. +* `description` - (Optional) The description that you specify for the namespace when you create it. +* `tags` - (Optional) A map of tags to assign to the namespace. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of a namespace. +* `arn` - The ARN that Amazon Route 53 assigns to the namespace when you create it. +* `httpName` - The name of an HTTP namespace. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Discovery HTTP Namespace using the namespace ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Service Discovery HTTP Namespace using the namespace ID. For example: + +```console +% terraform import aws_service_discovery_http_namespace.example ns-1234567890 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/service_discovery_instance.html.markdown b/website/docs/cdktf/typescript/r/service_discovery_instance.html.markdown new file mode 100644 index 00000000000..2b55637e0d7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/service_discovery_instance.html.markdown @@ -0,0 +1,170 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_instance" +description: |- + Provides a Service Discovery Instance resource. +--- + + + +# Resource: aws_service_discovery_instance + +Provides a Service Discovery Instance resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServiceDiscoveryInstance } from "./.gen/providers/aws/service-discovery-instance"; +import { ServiceDiscoveryPrivateDnsNamespace } from "./.gen/providers/aws/service-discovery-private-dns-namespace"; +import { ServiceDiscoveryService } from "./.gen/providers/aws/service-discovery-service"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + enableDnsHostnames: true, + enableDnsSupport: true, + }); + const awsServiceDiscoveryPrivateDnsNamespaceExample = + new ServiceDiscoveryPrivateDnsNamespace(this, "example_1", { + description: "example", + name: "example.terraform.local", + vpc: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsServiceDiscoveryPrivateDnsNamespaceExample.overrideLogicalId("example"); + const awsServiceDiscoveryServiceExample = new ServiceDiscoveryService( + this, + "example_2", + { + dnsConfig: { + dnsRecords: [ + { + ttl: 10, + type: "A", + }, + ], + namespaceId: Token.asString( + awsServiceDiscoveryPrivateDnsNamespaceExample.id + ), + routingPolicy: "MULTIVALUE", + }, + healthCheckCustomConfig: { + failureThreshold: 1, + }, + name: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsServiceDiscoveryServiceExample.overrideLogicalId("example"); + const awsServiceDiscoveryInstanceExample = new ServiceDiscoveryInstance( + this, + "example_3", + { + attributes: { + AWS_INSTANCE_IPV4: "172.18.0.1", + custom_attribute: "custom", + }, + instanceId: "example-instance-id", + serviceId: Token.asString(awsServiceDiscoveryServiceExample.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsServiceDiscoveryInstanceExample.overrideLogicalId("example"); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServiceDiscoveryHttpNamespace } from "./.gen/providers/aws/service-discovery-http-namespace"; +import { ServiceDiscoveryInstance } from "./.gen/providers/aws/service-discovery-instance"; +import { ServiceDiscoveryService } from "./.gen/providers/aws/service-discovery-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ServiceDiscoveryHttpNamespace(this, "example", { + description: "example", + name: "example.terraform.com", + }); + const awsServiceDiscoveryServiceExample = new ServiceDiscoveryService( + this, + "example_1", + { + name: "example", + namespaceId: example.id, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsServiceDiscoveryServiceExample.overrideLogicalId("example"); + const awsServiceDiscoveryInstanceExample = new ServiceDiscoveryInstance( + this, + "example_2", + { + attributes: { + AWS_EC2_INSTANCE_ID: "i-0abdg374kd892cj6dl", + }, + instanceId: "example-instance-id", + serviceId: Token.asString(awsServiceDiscoveryServiceExample.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsServiceDiscoveryInstanceExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instanceId` - (Required, ForceNew) The ID of the service instance. +* `serviceId` - (Required, ForceNew) The ID of the service that you want to use to create the instance. +* `attributes` - (Required) A map contains the attributes of the instance. Check the [doc](https://docs.aws.amazon.com/cloud-map/latest/api/API_RegisterInstance.html#API_RegisterInstance_RequestSyntax) for the supported attributes and syntax. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the instance. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Discovery Instance using the service ID and instance ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Service Discovery Instance using the service ID and instance ID. For example: + +```console +% terraform import aws_service_discovery_instance.example 0123456789/i-0123 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/service_discovery_private_dns_namespace.html.markdown b/website/docs/cdktf/typescript/r/service_discovery_private_dns_namespace.html.markdown new file mode 100644 index 00000000000..e43922c381a --- /dev/null +++ b/website/docs/cdktf/typescript/r/service_discovery_private_dns_namespace.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_private_dns_namespace" +description: |- + Provides a Service Discovery Private DNS Namespace resource. +--- + + + +# Resource: aws_service_discovery_private_dns_namespace + +Provides a Service Discovery Private DNS Namespace resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServiceDiscoveryPrivateDnsNamespace } from "./.gen/providers/aws/service-discovery-private-dns-namespace"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + }); + const awsServiceDiscoveryPrivateDnsNamespaceExample = + new ServiceDiscoveryPrivateDnsNamespace(this, "example_1", { + description: "example", + name: "hoge.example.local", + vpc: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsServiceDiscoveryPrivateDnsNamespaceExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the namespace. +* `vpc` - (Required) The ID of VPC that you want to associate the namespace with. +* `description` - (Optional) The description that you specify for the namespace when you create it. +* `tags` - (Optional) A map of tags to assign to the namespace. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of a namespace. +* `arn` - The ARN that Amazon Route 53 assigns to the namespace when you create it. +* `hostedZone` - The ID for the hosted zone that Amazon Route 53 creates when you create a namespace. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Discovery Private DNS Namespace using the namespace ID and VPC ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Service Discovery Private DNS Namespace using the namespace ID and VPC ID. For example: + +```console +% terraform import aws_service_discovery_private_dns_namespace.example 0123456789:vpc-123345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/service_discovery_public_dns_namespace.html.markdown b/website/docs/cdktf/typescript/r/service_discovery_public_dns_namespace.html.markdown new file mode 100644 index 00000000000..bdbb17871ef --- /dev/null +++ b/website/docs/cdktf/typescript/r/service_discovery_public_dns_namespace.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_public_dns_namespace" +description: |- + Provides a Service Discovery Public DNS Namespace resource. +--- + + + +# Resource: aws_service_discovery_public_dns_namespace + +Provides a Service Discovery Public DNS Namespace resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServiceDiscoveryPublicDnsNamespace } from "./.gen/providers/aws/service-discovery-public-dns-namespace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServiceDiscoveryPublicDnsNamespace(this, "example", { + description: "example", + name: "hoge.example.com", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the namespace. +* `description` - (Optional) The description that you specify for the namespace when you create it. +* `tags` - (Optional) A map of tags to assign to the namespace. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of a namespace. +* `arn` - The ARN that Amazon Route 53 assigns to the namespace when you create it. +* `hostedZone` - The ID for the hosted zone that Amazon Route 53 creates when you create a namespace. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Discovery Public DNS Namespace using the namespace ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Service Discovery Public DNS Namespace using the namespace ID. For example: + +```console +% terraform import aws_service_discovery_public_dns_namespace.example 0123456789 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/service_discovery_service.html.markdown b/website/docs/cdktf/typescript/r/service_discovery_service.html.markdown new file mode 100644 index 00000000000..1a6c438516c --- /dev/null +++ b/website/docs/cdktf/typescript/r/service_discovery_service.html.markdown @@ -0,0 +1,191 @@ +--- +subcategory: "Cloud Map" +layout: "aws" +page_title: "AWS: aws_service_discovery_service" +description: |- + Provides a Service Discovery Service resource. +--- + + + +# Resource: aws_service_discovery_service + +Provides a Service Discovery Service resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServiceDiscoveryPrivateDnsNamespace } from "./.gen/providers/aws/service-discovery-private-dns-namespace"; +import { ServiceDiscoveryService } from "./.gen/providers/aws/service-discovery-service"; +import { Vpc } from "./.gen/providers/aws/vpc"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + enableDnsHostnames: true, + enableDnsSupport: true, + }); + const awsServiceDiscoveryPrivateDnsNamespaceExample = + new ServiceDiscoveryPrivateDnsNamespace(this, "example_1", { + description: "example", + name: "example.terraform.local", + vpc: example.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsServiceDiscoveryPrivateDnsNamespaceExample.overrideLogicalId("example"); + const awsServiceDiscoveryServiceExample = new ServiceDiscoveryService( + this, + "example_2", + { + dnsConfig: { + dnsRecords: [ + { + ttl: 10, + type: "A", + }, + ], + namespaceId: Token.asString( + awsServiceDiscoveryPrivateDnsNamespaceExample.id + ), + routingPolicy: "MULTIVALUE", + }, + healthCheckCustomConfig: { + failureThreshold: 1, + }, + name: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsServiceDiscoveryServiceExample.overrideLogicalId("example"); + } +} + +``` + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServiceDiscoveryPublicDnsNamespace } from "./.gen/providers/aws/service-discovery-public-dns-namespace"; +import { ServiceDiscoveryService } from "./.gen/providers/aws/service-discovery-service"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new ServiceDiscoveryPublicDnsNamespace(this, "example", { + description: "example", + name: "example.terraform.com", + }); + const awsServiceDiscoveryServiceExample = new ServiceDiscoveryService( + this, + "example_1", + { + dnsConfig: { + dnsRecords: [ + { + ttl: 10, + type: "A", + }, + ], + namespaceId: example.id, + }, + healthCheckConfig: { + failureThreshold: 10, + resourcePath: "path", + type: "HTTP", + }, + name: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsServiceDiscoveryServiceExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required, ForceNew) The name of the service. +* `description` - (Optional) The description of the service. +* `dnsConfig` - (Optional) A complex type that contains information about the resource record sets that you want Amazon Route 53 to create when you register an instance. +* `healthCheckConfig` - (Optional) A complex type that contains settings for an optional health check. Only for Public DNS namespaces. +* `forceDestroy` - (Optional, Default:false ) A boolean that indicates all instances should be deleted from the service so that the service can be destroyed without error. These instances are not recoverable. +* `healthCheckCustomConfig` - (Optional, ForceNew) A complex type that contains settings for ECS managed health checks. +* `namespaceId` - (Optional) The ID of the namespace that you want to use to create the service. +* `type` - (Optional) If present, specifies that the service instances are only discoverable using the `discoverInstances` API operation. No DNS records is registered for the service instances. The only valid value is `http`. +* `tags` - (Optional) A map of tags to assign to the service. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### dns_config + +This argument supports the following arguments: + +* `namespaceId` - (Required, ForceNew) The ID of the namespace to use for DNS configuration. +* `dnsRecords` - (Required) An array that contains one DnsRecord object for each resource record set. +* `routingPolicy` - (Optional) The routing policy that you want to apply to all records that Route 53 creates when you register an instance and specify the service. Valid Values: MULTIVALUE, WEIGHTED + +#### dns_records + +This argument supports the following arguments: + +* `ttl` - (Required) The amount of time, in seconds, that you want DNS resolvers to cache the settings for this resource record set. +* `type` - (Required, ForceNew) The type of the resource, which indicates the value that Amazon Route 53 returns in response to DNS queries. Valid Values: A, AAAA, SRV, CNAME + +### health_check_config + +This argument supports the following arguments: + +* `failureThreshold` - (Optional) The number of consecutive health checks. Maximum value of 10. +* `resourcePath` - (Optional) The path that you want Route 53 to request when performing health checks. Route 53 automatically adds the DNS name for the service. If you don't specify a value, the default value is /. +* `type` - (Optional, ForceNew) The type of health check that you want to create, which indicates how Route 53 determines whether an endpoint is healthy. Valid Values: HTTP, HTTPS, TCP + +### health_check_custom_config + +This argument supports the following arguments: + +* `failureThreshold` - (Optional, ForceNew) The number of 30-second intervals that you want service discovery to wait before it changes the health status of a service instance. Maximum value of 10. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the service. +* `arn` - The ARN of the service. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Discovery Service using the service ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Service Discovery Service using the service ID. For example: + +```console +% terraform import aws_service_discovery_service.example 0123456789 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_budget_resource_association.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_budget_resource_association.html.markdown new file mode 100644 index 00000000000..4dc8cf210d8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_budget_resource_association.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_budget_resource_association" +description: |- + Manages a Service Catalog Budget Resource Association +--- + + + +# Resource: aws_servicecatalog_budget_resource_association + +Manages a Service Catalog Budget Resource Association. + +-> **Tip:** A "resource" is either a Service Catalog portfolio or product. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogBudgetResourceAssociation } from "./.gen/providers/aws/servicecatalog-budget-resource-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogBudgetResourceAssociation(this, "example", { + budgetName: "budget-pjtvyakdlyo3m", + resourceId: "prod-dnigbtea24ste", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `budgetName` - (Required) Budget name. +* `resourceId` - (Required) Resource identifier. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the association. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3M`) +- `read` - (Default `10M`) +- `delete` - (Default `3M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicecatalogBudgetResourceAssociation` using the budget name and resource ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicecatalogBudgetResourceAssociation` using the budget name and resource ID. For example: + +```console +% terraform import aws_servicecatalog_budget_resource_association.example budget-pjtvyakdlyo3m:prod-dnigbtea24ste +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_constraint.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_constraint.html.markdown new file mode 100644 index 00000000000..1df190f4c17 --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_constraint.html.markdown @@ -0,0 +1,135 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_constraint" +description: |- + Manages a Service Catalog Constraint +--- + + + +# Resource: aws_servicecatalog_constraint + +Manages a Service Catalog Constraint. + +~> **NOTE:** This resource does not associate a Service Catalog product and portfolio. However, the product and portfolio must be associated (see the `awsServicecatalogProductPortfolioAssociation` resource) prior to creating a constraint or you will receive an error. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogConstraint } from "./.gen/providers/aws/servicecatalog-constraint"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogConstraint(this, "example", { + description: "Back off, man. I'm a scientist.", + parameters: Token.asString( + Fn.jsonencode({ + RoleArn: "arn:aws:iam::123456789012:role/LaunchRole", + }) + ), + portfolioId: Token.asString(awsServicecatalogPortfolioExample.id), + productId: Token.asString(awsServicecatalogProductExample.id), + type: "LAUNCH", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `parameters` - (Required) Constraint parameters in JSON format. The syntax depends on the constraint type. See details below. +* `portfolioId` - (Required) Portfolio identifier. +* `productId` - (Required) Product identifier. +* `type` - (Required) Type of constraint. Valid values are `launch`, `notification`, `resourceUpdate`, `stackset`, and `template`. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `description` - (Optional) Description of the constraint. + +### `parameters` + +The `type` you specify determines what must be included in the `parameters` JSON: + +* `launch`: You are required to specify either the RoleArn or the LocalRoleName but can't use both. If you specify the `localRoleName` property, when an account uses the launch constraint, the IAM role with that name in the account will be used. This allows launch-role constraints to be account-agnostic so the administrator can create fewer resources per shared account. The given role name must exist in the account used to create the launch constraint and the account of the user who launches a product with this launch constraint. You cannot have both a `launch` and a `stackset` constraint. You also cannot have more than one `launch` constraint on an `awsServicecatalogProduct` and `awsServicecatalogPortfolio`. Specify the `roleArn` and `localRoleName` properties as follows: + +```json +{ "RoleArn" : "arn:aws:iam::123456789012:role/LaunchRole" } +``` + +```json +{ "LocalRoleName" : "SCBasicLaunchRole" } +``` + +* `notification`: Specify the `notificationArns` property as follows: + +```json +{ "NotificationArns" : ["arn:aws:sns:us-east-1:123456789012:Topic"] } +``` + +* `resourceUpdate`: Specify the `tagUpdatesOnProvisionedProduct` property as follows. The `tagUpdatesOnProvisionedProduct` property accepts a string value of `allowed` or `notAllowed`. + +```json +{ "Version" : "2.0","Properties" :{ "TagUpdateOnProvisionedProduct" : "String" }} +``` + +* `stackset`: Specify the Parameters property as follows. You cannot have both a `launch` and a `stackset` constraint. You also cannot have more than one `stackset` constraint on on an `awsServicecatalogProduct` and `awsServicecatalogPortfolio`. Products with a `stackset` constraint will launch an AWS CloudFormation stack set. + +```json +{ "Version" : "String", "Properties" : { "AccountList" : [ "String" ], "RegionList" : [ "String" ], "AdminRole" : "String", "ExecutionRole" : "String" }} +``` + +* `template`: Specify the Rules property. For more information, see [Template Constraint Rules](http://docs.aws.amazon.com/servicecatalog/latest/adminguide/reference-template_constraint_rules.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Constraint identifier. +* `owner` - Owner of the constraint. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3M`) +- `read` - (Default `10M`) +- `update` - (Default `3M`) +- `delete` - (Default `3M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicecatalogConstraint` using the constraint ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicecatalogConstraint` using the constraint ID. For example: + +```console +% terraform import aws_servicecatalog_constraint.example cons-nmdkb6cgxfcrs +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_organizations_access.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_organizations_access.html.markdown new file mode 100644 index 00000000000..c0b20dec6f3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_organizations_access.html.markdown @@ -0,0 +1,59 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_organizations_access" +description: |- + Manages Service Catalog Organizations Access +--- + + + +# Resource: aws_servicecatalog_organizations_access + +Manages Service Catalog AWS Organizations Access, a portfolio sharing feature through AWS Organizations. This allows Service Catalog to receive updates on your organization in order to sync your shares with the current structure. This resource will prompt AWS to set `organizations:enableAwsServiceAccess` on your behalf so that your shares can be in sync with any changes in your AWS Organizations structure. + +~> **NOTE:** This resource can only be used by the management account in the organization. In other words, a delegated administrator is not authorized to use the resource. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogOrganizationsAccess } from "./.gen/providers/aws/servicecatalog-organizations-access"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogOrganizationsAccess(this, "example", { + enabled: Token.asBoolean("true"), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `enabled` - (Required) Whether to enable AWS Organizations access. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Account ID for the account using the resource. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `read` - (Default `10M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_portfolio.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_portfolio.html.markdown new file mode 100644 index 00000000000..84862590792 --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_portfolio.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_portfolio" +description: |- + Provides a resource to create a Service Catalog portfolio +--- + + + +# Resource: aws_servicecatalog_portfolio + +Provides a resource to create a Service Catalog Portfolio. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogPortfolio } from "./.gen/providers/aws/servicecatalog-portfolio"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogPortfolio(this, "portfolio", { + description: "List of my organizations apps", + name: "My App Portfolio", + providerName: "Brett", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the portfolio. +* `description` - (Required) Description of the portfolio +* `providerName` - (Required) Name of the person or organization who owns the portfolio. +* `tags` - (Optional) Tags to apply to the connection. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Service Catalog Portfolio. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `read` - (Default `10M`) +- `update` - (Default `30M`) +- `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Service Catalog Portfolios using the Service Catalog Portfolio `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Service Catalog Portfolios using the Service Catalog Portfolio `id`. For example: + +```console +% terraform import aws_servicecatalog_portfolio.testfolio port-12344321 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_portfolio_share.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_portfolio_share.html.markdown new file mode 100644 index 00000000000..f746fe5802a --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_portfolio_share.html.markdown @@ -0,0 +1,101 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_portfolio_share" +description: |- + Manages a Service Catalog Portfolio Share +--- + + + +# Resource: aws_servicecatalog_portfolio_share + +Manages a Service Catalog Portfolio Share. Shares the specified portfolio with the specified account or organization node. You can share portfolios to an organization, an organizational unit, or a specific account. + +If the portfolio share with the specified account or organization node already exists, using this resource to re-create the share will have no effect and will not return an error. You can then use this resource to update the share. + +~> **NOTE:** Shares to an organization node can only be created by the management account of an organization or by a delegated administrator. If a delegated admin is de-registered, they can no longer create portfolio shares. + +~> **NOTE:** AWSOrganizationsAccess must be enabled in order to create a portfolio share to an organization node. + +~> **NOTE:** You can't share a shared resource, including portfolios that contain a shared product. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogPortfolioShare } from "./.gen/providers/aws/servicecatalog-portfolio-share"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogPortfolioShare(this, "example", { + portfolioId: Token.asString(awsServicecatalogPortfolioExample.id), + principalId: "012128675309", + type: "ACCOUNT", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `portfolioId` - (Required) Portfolio identifier. +* `principalId` - (Required) Identifier of the principal with whom you will share the portfolio. Valid values AWS account IDs and ARNs of AWS Organizations and organizational units. +* `type` - (Required) Type of portfolio share. Valid values are `account` (an external account), `organization` (a share to every account in an organization), `organizationalUnit`, `organizationMemberAccount` (a share to an account in an organization). + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `sharePrincipals` - (Optional) Enables or disables Principal sharing when creating the portfolio share. If this flag is not provided, principal sharing is disabled. +* `shareTagOptions` - (Optional) Whether to enable sharing of `awsServicecatalogTagOption` resources when creating the portfolio share. +* `waitForAcceptance` - (Optional) Whether to wait (up to the timeout) for the share to be accepted. Organizational shares are automatically accepted. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `accepted` - Whether the shared portfolio is imported by the recipient account. If the recipient is organizational, the share is automatically imported, and the field is always set to true. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3M`) +- `read` - (Default `10M`) +- `update` - (Default `3M`) +- `delete` - (Default `3M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicecatalogPortfolioShare` using the portfolio share ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicecatalogPortfolioShare` using the portfolio share ID. For example: + +```console +% terraform import aws_servicecatalog_portfolio_share.example port-12344321:ACCOUNT:123456789012 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_principal_portfolio_association.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_principal_portfolio_association.html.markdown new file mode 100644 index 00000000000..2dd162514ad --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_principal_portfolio_association.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_principal_portfolio_association" +description: |- + Manages a Service Catalog Principal Portfolio Association +--- + + + +# Resource: aws_servicecatalog_principal_portfolio_association + +Manages a Service Catalog Principal Portfolio Association. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogPrincipalPortfolioAssociation } from "./.gen/providers/aws/servicecatalog-principal-portfolio-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogPrincipalPortfolioAssociation(this, "example", { + portfolioId: "port-68656c6c6f", + principalArn: "arn:aws:iam::123456789012:user/Eleanor", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `portfolioId` - (Required) Portfolio identifier. +* `principalArn` - (Required) Principal ARN. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `principalType` - (Optional) Principal type. Setting this argument empty (e.g., `principal_type = ""`) will result in an error. Valid value is `iam`. Default is `iam`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the association. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3M`) +- `read` - (Default `10M`) +- `delete` - (Default `3M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicecatalogPrincipalPortfolioAssociation` using the accept language, principal ARN, and portfolio ID, separated by a comma. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicecatalogPrincipalPortfolioAssociation` using the accept language, principal ARN, and portfolio ID, separated by a comma. For example: + +```console +% terraform import aws_servicecatalog_principal_portfolio_association.example en,arn:aws:iam::123456789012:user/Eleanor,port-68656c6c6f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_product.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_product.html.markdown new file mode 100644 index 00000000000..96f2be1d5ba --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_product.html.markdown @@ -0,0 +1,124 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_product" +description: |- + Manages a Service Catalog Product +--- + + + +# Resource: aws_servicecatalog_product + +Manages a Service Catalog Product. + +~> **NOTE:** The user or role that uses this resources must have the `cloudformation:getTemplate` IAM policy permission. This policy permission is required when using the `templatePhysicalId` argument. + +-> A "provisioning artifact" is also referred to as a "version." A "distributor" is also referred to as a "vendor." + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogProduct } from "./.gen/providers/aws/servicecatalog-product"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogProduct(this, "example", { + name: "example", + owner: "example-owner", + provisioningArtifactParameters: { + templateUrl: + "https://s3.amazonaws.com/cf-templates-ozkq9d3hgiq2-us-east-1/temp1.json", + }, + tags: { + foo: "bar", + }, + type: "CLOUD_FORMATION_TEMPLATE", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the product. +* `owner` - (Required) Owner of the product. +* `provisioningArtifactParameters` - (Required) Configuration block for provisioning artifact (i.e., version) parameters. Detailed below. +* `type` - (Required) Type of product. See [AWS Docs](https://docs.aws.amazon.com/servicecatalog/latest/dg/API_CreateProduct.html#API_CreateProduct_RequestSyntax) for valid list of values. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `description` - (Optional) Description of the product. +* `distributor` - (Optional) Distributor (i.e., vendor) of the product. +* `supportDescription` - (Optional) Support information about the product. +* `supportEmail` - (Optional) Contact email for product support. +* `supportUrl` - (Optional) Contact URL for product support. +* `tags` - (Optional) Tags to apply to the product. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### provisioning_artifact_parameters + +This argument supports the following arguments: + +* `description` - (Optional) Description of the provisioning artifact (i.e., version), including how it differs from the previous provisioning artifact. +* `disableTemplateValidation` - (Optional) Whether AWS Service Catalog stops validating the specified provisioning artifact template even if it is invalid. +* `name` - (Optional) Name of the provisioning artifact (for example, `v1`, `v2Beta`). No spaces are allowed. +* `templatePhysicalId` - (Required if `templateUrl` is not provided) Template source as the physical ID of the resource that contains the template. Currently only supports CloudFormation stack ARN. Specify the physical ID as `arn:[partition]:cloudformation:[region]:[account ID]:stack/[stack name]/[resource ID]`. +* `templateUrl` - (Required if `templatePhysicalId` is not provided) Template source as URL of the CloudFormation template in Amazon S3. +* `type` - (Optional) Type of provisioning artifact. See [AWS Docs](https://docs.aws.amazon.com/servicecatalog/latest/dg/API_ProvisioningArtifactProperties.html) for valid list of values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the product. +* `createdTime` - Time when the product was created. +* `hasDefaultPath` - Whether the product has a default path. If the product does not have a default path, call `listLaunchPaths` to disambiguate between paths. Otherwise, `listLaunchPaths` is not required, and the output of ProductViewSummary can be used directly with `describeProvisioningParameters`. +* `id` - Product ID. For example, `prodDnigbtea24Ste`. +* `status` - Status of the product. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `5M`) +- `read` - (Default `10M`) +- `update` - (Default `5M`) +- `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicecatalogProduct` using the product ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicecatalogProduct` using the product ID. For example: + +```console +% terraform import aws_servicecatalog_product.example prod-dnigbtea24ste +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_product_portfolio_association.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_product_portfolio_association.html.markdown new file mode 100644 index 00000000000..fb2a9b2b4f9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_product_portfolio_association.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_product_portfolio_association" +description: |- + Manages a Service Catalog Product Portfolio Association +--- + + + +# Resource: aws_servicecatalog_product_portfolio_association + +Manages a Service Catalog Product Portfolio Association. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogProductPortfolioAssociation } from "./.gen/providers/aws/servicecatalog-product-portfolio-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogProductPortfolioAssociation(this, "example", { + portfolioId: "port-68656c6c6f", + productId: "prod-dnigbtea24ste", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `portfolioId` - (Required) Portfolio identifier. +* `productId` - (Required) Product identifier. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `sourcePortfolioId` - (Optional) Identifier of the source portfolio. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the association. + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3M`) +- `read` - (Default `10M`) +- `delete` - (Default `3M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicecatalogProductPortfolioAssociation` using the accept language, portfolio ID, and product ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicecatalogProductPortfolioAssociation` using the accept language, portfolio ID, and product ID. For example: + +```console +% terraform import aws_servicecatalog_product_portfolio_association.example en:port-68656c6c6f:prod-dnigbtea24ste +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_provisioned_product.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_provisioned_product.html.markdown new file mode 100644 index 00000000000..dce3d1933fe --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_provisioned_product.html.markdown @@ -0,0 +1,161 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_provisioned_product" +description: |- + Manages a Service Catalog Provisioned Product +--- + + + +# Resource: aws_servicecatalog_provisioned_product + +This resource provisions and manages a Service Catalog provisioned product. + +A provisioned product is a resourced instance of a product. For example, provisioning a product based on a CloudFormation template launches a CloudFormation stack and its underlying resources. + +Like this resource, the `awsServicecatalogRecord` data source also provides information about a provisioned product. Although a Service Catalog record provides some overlapping information with this resource, a record is tied to a provisioned product event, such as provisioning, termination, and updating. + +-> **Tip:** If you include conflicted keys as tags, AWS will report an error, "Parameter validation failed: Missing required parameter in Tags[N]:Value". + +-> **Tip:** A "provisioning artifact" is also referred to as a "version." A "distributor" is also referred to as a "vendor." + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogProvisionedProduct } from "./.gen/providers/aws/servicecatalog-provisioned-product"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogProvisionedProduct(this, "example", { + name: "example", + productName: "Example product", + provisioningArtifactName: "Example version", + provisioningParameters: [ + { + key: "foo", + value: "bar", + }, + ], + tags: { + foo: "bar", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) User-friendly name of the provisioned product. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). Default value is `en`. +* `ignoreErrors` - (Optional) _Only applies to deleting._ If set to `true`, AWS Service Catalog stops managing the specified provisioned product even if it cannot delete the underlying resources. The default value is `false`. +* `notificationArns` - (Optional) Passed to CloudFormation. The SNS topic ARNs to which to publish stack-related events. +* `pathId` - (Optional) Path identifier of the product. This value is optional if the product has a default path, and required if the product has more than one path. To list the paths for a product, use `awsServicecatalogLaunchPaths`. When required, you must provide `pathId` or `pathName`, but not both. +* `pathName` - (Optional) Name of the path. You must provide `pathId` or `pathName`, but not both. +* `productId` - (Optional) Product identifier. For example, `prodAbcdzk7Xy33Qa`. You must provide `productId` or `productName`, but not both. +* `productName` - (Optional) Name of the product. You must provide `productId` or `productName`, but not both. +* `provisioningArtifactId` - (Optional) Identifier of the provisioning artifact. For example, `pa4Abcdjnxjj6Ne`. You must provide the `provisioningArtifactId` or `provisioningArtifactName`, but not both. +* `provisioningArtifactName` - (Optional) Name of the provisioning artifact. You must provide the `provisioningArtifactId` or `provisioningArtifactName`, but not both. +* `provisioningParameters` - (Optional) Configuration block with parameters specified by the administrator that are required for provisioning the product. See details below. +* `retainPhysicalResources` - (Optional) _Only applies to deleting._ Whether to delete the Service Catalog provisioned product but leave the CloudFormation stack, stack set, or the underlying resources of the deleted provisioned product. The default value is `false`. +* `stackSetProvisioningPreferences` - (Optional) Configuration block with information about the provisioning preferences for a stack set. See details below. +* `tags` - (Optional) Tags to apply to the provisioned product. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### provisioning_parameters + +This argument supports the following arguments: + +* `key` - (Required) Parameter key. +* `usePreviousValue` - (Optional) Whether to ignore `value` and keep the previous parameter value. Ignored when initially provisioning a product. +* `value` - (Optional) Parameter value. + +### stack_set_provisioning_preferences + +All of the `stackSetProvisioningPreferences` are only applicable to a `cfnStackset` provisioned product type. + +This argument supports the following arguments: + +* `accounts` - (Optional) One or more AWS accounts that will have access to the provisioned product. The AWS accounts specified should be within the list of accounts in the STACKSET constraint. To get the list of accounts in the STACKSET constraint, use the `awsServicecatalogProvisioningParameters` data source. If no values are specified, the default value is all accounts from the STACKSET constraint. +* `failureToleranceCount` - (Optional) Number of accounts, per region, for which this operation can fail before AWS Service Catalog stops the operation in that region. If the operation is stopped in a region, AWS Service Catalog doesn't attempt the operation in any subsequent regions. You must specify either `failureToleranceCount` or `failureTolerancePercentage`, but not both. The default value is 0 if no value is specified. +* `failureTolerancePercentage` - (Optional) Percentage of accounts, per region, for which this stack operation can fail before AWS Service Catalog stops the operation in that region. If the operation is stopped in a region, AWS Service Catalog doesn't attempt the operation in any subsequent regions. When calculating the number of accounts based on the specified percentage, AWS Service Catalog rounds down to the next whole number. You must specify either `failureToleranceCount` or `failureTolerancePercentage`, but not both. +* `maxConcurrencyCount` - (Optional) Maximum number of accounts in which to perform this operation at one time. This is dependent on the value of `failureToleranceCount`. `maxConcurrencyCount` is at most one more than the `failureToleranceCount`. Note that this setting lets you specify the maximum for operations. For large deployments, under certain circumstances the actual number of accounts acted upon concurrently may be lower due to service throttling. You must specify either `maxConcurrencyCount` or `maxConcurrencyPercentage`, but not both. +* `maxConcurrencyPercentage` - (Optional) Maximum percentage of accounts in which to perform this operation at one time. When calculating the number of accounts based on the specified percentage, AWS Service Catalog rounds down to the next whole number. This is true except in cases where rounding down would result is zero. In this case, AWS Service Catalog sets the number as 1 instead. Note that this setting lets you specify the maximum for operations. For large deployments, under certain circumstances the actual number of accounts acted upon concurrently may be lower due to service throttling. You must specify either `maxConcurrencyCount` or `maxConcurrencyPercentage`, but not both. +* `regions` - (Optional) One or more AWS Regions where the provisioned product will be available. The specified regions should be within the list of regions from the STACKSET constraint. To get the list of regions in the STACKSET constraint, use the `awsServicecatalogProvisioningParameters` data source. If no values are specified, the default value is all regions from the STACKSET constraint. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the provisioned product. +* `cloudwatchDashboardNames` - Set of CloudWatch dashboards that were created when provisioning the product. +* `createdTime` - Time when the provisioned product was created. +* `id` - Provisioned Product ID. +* `lastProvisioningRecordId` - Record identifier of the last request performed on this provisioned product of the following types: `provisionedProduct`, `updateProvisionedProduct`, `executeProvisionedProductPlan`, `terminateProvisionedProduct`. +* `lastRecordId` - Record identifier of the last request performed on this provisioned product. +* `lastSuccessfulProvisioningRecordId` - Record identifier of the last successful request performed on this provisioned product of the following types: `provisionedProduct`, `updateProvisionedProduct`, `executeProvisionedProductPlan`, `terminateProvisionedProduct`. +* `launchRoleArn` - ARN of the launch role associated with the provisioned product. +* `outputs` - The set of outputs for the product created. + * `description` - The description of the output. + * `key` - The output key. + * `value` - The output value. +* `status` - Current status of the provisioned product. See meanings below. +* `statusMessage` - Current status message of the provisioned product. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `type` - Type of provisioned product. Valid values are `cfnStack` and `cfnStackset`. + +### `status` Meanings + +* `available` - Stable state, ready to perform any operation. The most recent operation succeeded and completed. +* `underChange` - Transitive state. Operations performed might not have valid results. Wait for an `available` status before performing operations. +* `tainted` - Stable state, ready to perform any operation. The stack has completed the requested operation but is not exactly what was requested. For example, a request to update to a new version failed and the stack rolled back to the current version. +* `error` - An unexpected error occurred. The provisioned product exists but the stack is not running. For example, CloudFormation received a parameter value that was not valid and could not launch the stack. +* `planInProgress` - Transitive state. The plan operations were performed to provision a new product, but resources have not yet been created. After reviewing the list of resources to be created, execute the plan. Wait for an `available` status before performing operations. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `read` - (Default `10M`) +- `update` - (Default `30M`) +- `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicecatalogProvisionedProduct` using the provisioned product ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicecatalogProvisionedProduct` using the provisioned product ID. For example: + +```console +% terraform import aws_servicecatalog_provisioned_product.example pp-dnigbtea24ste +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_provisioning_artifact.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_provisioning_artifact.html.markdown new file mode 100644 index 00000000000..2ebe6870353 --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_provisioning_artifact.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_provisioning_artifact" +description: |- + Manages a Service Catalog Provisioning Artifact +--- + + + +# Resource: aws_servicecatalog_provisioning_artifact + +Manages a Service Catalog Provisioning Artifact for a specified product. + +-> A "provisioning artifact" is also referred to as a "version." + +~> **NOTE:** You cannot create a provisioning artifact for a product that was shared with you. + +~> **NOTE:** The user or role that use this resource must have the `cloudformation:getTemplate` IAM policy permission. This policy permission is required when using the `templatePhysicalId` argument. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogProvisioningArtifact } from "./.gen/providers/aws/servicecatalog-provisioning-artifact"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogProvisioningArtifact(this, "example", { + name: "example", + productId: Token.asString(awsServicecatalogProductExample.id), + templateUrl: + "https://${" + + awsS3BucketExample.bucketRegionalDomainName + + "}/${" + + awsS3ObjectExample.key + + "}", + type: "CLOUD_FORMATION_TEMPLATE", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `productId` - (Required) Identifier of the product. +* `templatePhysicalId` - (Required if `templateUrl` is not provided) Template source as the physical ID of the resource that contains the template. Currently only supports CloudFormation stack ARN. Specify the physical ID as `arn:[partition]:cloudformation:[region]:[account ID]:stack/[stack name]/[resource ID]`. +* `templateUrl` - (Required if `templatePhysicalId` is not provided) Template source as URL of the CloudFormation template in Amazon S3. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values: `en` (English), `jp` (Japanese), `zh` (Chinese). The default value is `en`. +* `active` - (Optional) Whether the product version is active. Inactive provisioning artifacts are invisible to end users. End users cannot launch or update a provisioned product from an inactive provisioning artifact. Default is `true`. +* `description` - (Optional) Description of the provisioning artifact (i.e., version), including how it differs from the previous provisioning artifact. +* `disableTemplateValidation` - (Optional) Whether AWS Service Catalog stops validating the specified provisioning artifact template even if it is invalid. +* `guidance` - (Optional) Information set by the administrator to provide guidance to end users about which provisioning artifacts to use. Valid values are `default` and `deprecated`. The default is `default`. Users are able to make updates to a provisioned product of a deprecated version but cannot launch new provisioned products using a deprecated version. +* `name` - (Optional) Name of the provisioning artifact (for example, `v1`, `v2Beta`). No spaces are allowed. +* `type` - (Optional) Type of provisioning artifact. See [AWS Docs](https://docs.aws.amazon.com/servicecatalog/latest/dg/API_ProvisioningArtifactProperties.html) for valid list of values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `createdTime` - Time when the provisioning artifact was created. +* `id` - Provisioning artifact identifier and product identifier separated by a colon. +* `provisioningArtifactId` - Provisioning artifact identifier. +* `status` - Status of the provisioning artifact. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3M`) +- `read` - (Default `10M`) +- `update` - (Default `3M`) +- `delete` - (Default `3M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicecatalogProvisioningArtifact` using the provisioning artifact ID and product ID separated by a colon. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicecatalogProvisioningArtifact` using the provisioning artifact ID and product ID separated by a colon. For example: + +```console +% terraform import aws_servicecatalog_provisioning_artifact.example pa-ij2b6lusy6dec:prod-el3an0rma3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_service_action.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_service_action.html.markdown new file mode 100644 index 00000000000..adbb7755e36 --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_service_action.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_service_action" +description: |- + Manages a Service Catalog Service Action +--- + + + +# Resource: aws_servicecatalog_service_action + +Manages a Service Catalog self-service action. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogServiceAction } from "./.gen/providers/aws/servicecatalog-service-action"; +interface MyConfig { + version: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new ServicecatalogServiceAction(this, "example", { + definition: { + name: "AWS-RestartEC2Instance", + version: config.version, + }, + description: "Motor generator unit", + name: "MGU", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `definition` - (Required) Self-service action definition configuration block. Detailed below. +* `name` - (Required) Self-service action name. + +The following arguments are optional: + +* `acceptLanguage` - (Optional) Language code. Valid values are `en` (English), `jp` (Japanese), and `zh` (Chinese). Default is `en`. +* `description` - (Optional) Self-service action description. + +### `definition` + +The `definition` configuration block supports the following attributes: + +* `assumeRole` - (Optional) ARN of the role that performs the self-service actions on your behalf. For example, `arn:aws:iam::12345678910:role/actionRole`. To reuse the provisioned product launch role, set to `launchRole`. +* `name` - (Required) Name of the SSM document. For example, `awsRestartEc2Instance`. If you are using a shared SSM document, you must provide the ARN instead of the name. +* `parameters` - (Optional) List of parameters in JSON format. For example: `[{\"name\":\"instanceId\",\"type\":\"target\"}]` or `[{\"name\":\"instanceId\",\"type\":\"textValue\"}]`. +* `type` - (Optional) Service action definition type. Valid value is `ssmAutomation`. Default is `ssmAutomation`. +* `version` - (Required) SSM document version. For example, `1`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the service action. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3M`) +- `read` - (Default `10M`) +- `update` - (Default `3M`) +- `delete` - (Default `3M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicecatalogServiceAction` using the service action ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicecatalogServiceAction` using the service action ID. For example: + +```console +% terraform import aws_servicecatalog_service_action.example act-f1w12eperfslh +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_tag_option.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_tag_option.html.markdown new file mode 100644 index 00000000000..a0f5550fe09 --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_tag_option.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_tag_option" +description: |- + Manages a Service Catalog Tag Option +--- + + + +# Resource: aws_servicecatalog_tag_option + +Manages a Service Catalog Tag Option. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogTagOption } from "./.gen/providers/aws/servicecatalog-tag-option"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogTagOption(this, "example", { + key: "nyckel", + value: "v\xE4rde", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `key` - (Required) Tag option key. +* `value` - (Required) Tag option value. + +The following arguments are optional: + +* `active` - (Optional) Whether tag option is active. Default is `true`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier (e.g., `tagPjtvagohlyo3M`). +* `ownerId` - AWS account ID of the owner account that created the tag option. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3M`) +- `read` - (Default `10M`) +- `update` - (Default `3M`) +- `delete` - (Default `3M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicecatalogTagOption` using the tag option ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicecatalogTagOption` using the tag option ID. For example: + +```console +% terraform import aws_servicecatalog_tag_option.example tag-pjtvagohlyo3m +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicecatalog_tag_option_resource_association.html.markdown b/website/docs/cdktf/typescript/r/servicecatalog_tag_option_resource_association.html.markdown new file mode 100644 index 00000000000..06525c0e09f --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicecatalog_tag_option_resource_association.html.markdown @@ -0,0 +1,89 @@ +--- +subcategory: "Service Catalog" +layout: "aws" +page_title: "AWS: aws_servicecatalog_tag_option_resource_association" +description: |- + Manages a Service Catalog Tag Option Resource Association +--- + + + +# Resource: aws_servicecatalog_tag_option_resource_association + +Manages a Service Catalog Tag Option Resource Association. + +-> **Tip:** A "resource" is either a Service Catalog portfolio or product. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicecatalogTagOptionResourceAssociation } from "./.gen/providers/aws/servicecatalog-tag-option-resource-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicecatalogTagOptionResourceAssociation(this, "example", { + resourceId: "prod-dnigbtea24ste", + tagOptionId: "tag-pjtvyakdlyo3m", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `resourceId` - (Required) Resource identifier. +* `tagOptionId` - (Required) Tag Option identifier. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Identifier of the association. +* `resourceArn` - ARN of the resource. +* `resourceCreatedTime` - Creation time of the resource. +* `resourceDescription` - Description of the resource. +* `resourceName` - Description of the resource. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `3M`) +- `read` - (Default `10M`) +- `delete` - (Default `3M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicecatalogTagOptionResourceAssociation` using the tag option ID and resource ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicecatalogTagOptionResourceAssociation` using the tag option ID and resource ID. For example: + +```console +% terraform import aws_servicecatalog_tag_option_resource_association.example tag-pjtvyakdlyo3m:prod-dnigbtea24ste +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/servicequotas_service_quota.html.markdown b/website/docs/cdktf/typescript/r/servicequotas_service_quota.html.markdown new file mode 100644 index 00000000000..4d9d0f1bc4a --- /dev/null +++ b/website/docs/cdktf/typescript/r/servicequotas_service_quota.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "Service Quotas" +layout: "aws" +page_title: "AWS: aws_servicequotas_service_quota" +description: |- + Manages an individual Service Quota +--- + + + +# Resource: aws_servicequotas_service_quota + +Manages an individual Service Quota. + +~> **NOTE:** Global quotas apply to all AWS regions, but can only be accessed in `usEast1` in the Commercial partition or `usGovWest1` in the GovCloud partition. In other regions, the AWS API will return the error `The request failed because the specified service does not exist.` + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ServicequotasServiceQuota } from "./.gen/providers/aws/servicequotas-service-quota"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ServicequotasServiceQuota(this, "example", { + quotaCode: "L-F678F1CE", + serviceCode: "vpc", + value: 75, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `quotaCode` - (Required) Code of the service quota to track. For example: `lF678F1Ce`. Available values can be found with the [AWS CLI service-quotas list-service-quotas command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-service-quotas.html). +* `serviceCode` - (Required) Code of the service to track. For example: `vpc`. Available values can be found with the [AWS CLI service-quotas list-services command](https://docs.aws.amazon.com/cli/latest/reference/service-quotas/list-services.html). +* `value` - (Required) Float specifying the desired value for the service quota. If the desired value is higher than the current value, a quota increase request is submitted. When a known request is submitted and pending, the value reflects the desired value of the pending request. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `adjustable` - Whether the service quota can be increased. +* `arn` - Amazon Resource Name (ARN) of the service quota. +* `defaultValue` - Default value of the service quota. +* `id` - Service code and quota code, separated by a front slash (`/`) +* `quotaName` - Name of the quota. +* `serviceName` - Name of the service. +* `usageMetric` - Information about the measurement. + * `metricDimensions` - The metric dimensions. + * `class` + * `resource` + * `service` + * `type` + * `metricName` - The name of the metric. + * `metricNamespace` - The namespace of the metric. + * `metricStatisticRecommendation` - The metric statistic that AWS recommend you use when determining quota usage. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsServicequotasServiceQuota` using the service code and quota code, separated by a front slash (`/`). For example: + +~> **NOTE:** This resource does not require explicit import and will assume management of an existing service quota on Terraform resource creation. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsServicequotasServiceQuota` using the service code and quota code, separated by a front slash (`/`). For example: + +~> **NOTE:** This resource does not require explicit import and will assume management of an existing service quota on Terraform resource creation. + +```console +% terraform import aws_servicequotas_service_quota.example vpc/L-F678F1CE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_active_receipt_rule_set.html.markdown b/website/docs/cdktf/typescript/r/ses_active_receipt_rule_set.html.markdown new file mode 100644 index 00000000000..7133cb02b99 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_active_receipt_rule_set.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_active_receipt_rule_set" +description: |- + Provides a resource to designate the active SES receipt rule set +--- + + + +# Resource: aws_ses_active_receipt_rule_set + +Provides a resource to designate the active SES receipt rule set + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesActiveReceiptRuleSet } from "./.gen/providers/aws/ses-active-receipt-rule-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesActiveReceiptRuleSet(this, "main", { + ruleSetName: "primary-rules", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `ruleSetName` - (Required) The name of the rule set + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The SES receipt rule set name. +* `arn` - The SES receipt rule set ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import active SES receipt rule sets using the rule set name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import active SES receipt rule sets using the rule set name. For example: + +```console +% terraform import aws_ses_active_receipt_rule_set.my_rule_set my_rule_set_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_configuration_set.html.markdown b/website/docs/cdktf/typescript/r/ses_configuration_set.html.markdown new file mode 100644 index 00000000000..cb1634e2140 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_configuration_set.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_configuration_set" +description: |- + Provides an SES configuration set +--- + + + +# Resource: aws_ses_configuration_set + +Provides an SES configuration set resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesConfigurationSet } from "./.gen/providers/aws/ses-configuration-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesConfigurationSet(this, "test", { + name: "some-configuration-set-test", + }); + } +} + +``` + +### Require TLS Connections + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesConfigurationSet } from "./.gen/providers/aws/ses-configuration-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesConfigurationSet(this, "test", { + deliveryOptions: { + tlsPolicy: "Require", + }, + name: "some-configuration-set-test", + }); + } +} + +``` + +## Argument Reference + +The following argument is required: + +* `name` - (Required) Name of the configuration set. + +The following argument is optional: + +* `deliveryOptions` - (Optional) Whether messages that use the configuration set are required to use TLS. See below. +* `reputationMetricsEnabled` - (Optional) Whether or not Amazon SES publishes reputation metrics for the configuration set, such as bounce and complaint rates, to Amazon CloudWatch. The default value is `false`. +* `sendingEnabled` - (Optional) Whether email sending is enabled or disabled for the configuration set. The default value is `true`. +* `trackingOptions` - (Optional) Domain that is used to redirect email recipients to an Amazon SES-operated domain. See below. **NOTE:** This functionality is best effort. + +### delivery_options + +* `tlsPolicy` - (Optional) Whether messages that use the configuration set are required to use Transport Layer Security (TLS). If the value is `require`, messages are only delivered if a TLS connection can be established. If the value is `optional`, messages can be delivered in plain text if a TLS connection can't be established. Valid values: `require` or `optional`. Defaults to `optional`. + +### tracking_options + +* `customRedirectDomain` - (Optional) Custom subdomain that is used to redirect email recipients to the Amazon SES event tracking domain. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - SES configuration set ARN. +* `id` - SES configuration set name. +* `lastFreshStart` - Date and time at which the reputation metrics for the configuration set were last reset. Resetting these metrics is known as a fresh start. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES Configuration Sets using their `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SES Configuration Sets using their `name`. For example: + +```console +% terraform import aws_ses_configuration_set.test some-configuration-set-test +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_domain_dkim.html.markdown b/website/docs/cdktf/typescript/r/ses_domain_dkim.html.markdown new file mode 100644 index 00000000000..bad83555c37 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_domain_dkim.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_domain_dkim" +description: |- + Provides an SES domain DKIM generation resource +--- + + + +# Resource: aws_ses_domain_dkim + +Provides an SES domain DKIM generation resource. + +Domain ownership needs to be confirmed first using [ses_domain_identity Resource](/docs/providers/aws/r/ses_domain_identity.html) + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) Verified domain name to generate DKIM tokens for. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `dkimTokens` - DKIM tokens generated by SES. + These tokens should be used to create CNAME records used to verify SES Easy DKIM. + See below for an example of how this might be achieved + when the domain is hosted in Route 53 and managed by Terraform. + Find out more about verifying domains in Amazon SES + in the [AWS SES docs](http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformCount, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Record } from "./.gen/providers/aws/route53-record"; +import { SesDomainDkim } from "./.gen/providers/aws/ses-domain-dkim"; +import { SesDomainIdentity } from "./.gen/providers/aws/ses-domain-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SesDomainIdentity(this, "example", { + domain: "example.com", + }); + const awsSesDomainDkimExample = new SesDomainDkim(this, "example_1", { + domain: example.domain, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSesDomainDkimExample.overrideLogicalId("example"); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleAmazonsesDkimRecordCount = TerraformCount.of( + Token.asNumber("3") + ); + new Route53Record(this, "example_amazonses_dkim_record", { + name: + Token.asString( + propertyAccess(awsSesDomainDkimExample.dkimTokens, [ + exampleAmazonsesDkimRecordCount.index, + ]) + ) + "._domainkey", + records: [ + Token.asString( + propertyAccess(awsSesDomainDkimExample.dkimTokens, [ + exampleAmazonsesDkimRecordCount.index, + ]) + ) + ".dkim.amazonses.com", + ], + ttl: Token.asNumber("600"), + type: "CNAME", + zoneId: "ABCDEFGHIJ123", + count: exampleAmazonsesDkimRecordCount, + }); + } +} + +``` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DKIM tokens using the `domain` attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import DKIM tokens using the `domain` attribute. For example: + +```console +% terraform import aws_ses_domain_dkim.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_domain_identity.html.markdown b/website/docs/cdktf/typescript/r/ses_domain_identity.html.markdown new file mode 100644 index 00000000000..891e48f657d --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_domain_identity.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_domain_identity" +description: |- + Provides an SES domain identity resource +--- + + + +# Resource: aws_ses_domain_identity + +Provides an SES domain identity resource + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesDomainIdentity } from "./.gen/providers/aws/ses-domain-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesDomainIdentity(this, "example", { + domain: "example.com", + }); + } +} + +``` + +### With Route53 Record + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Record } from "./.gen/providers/aws/route53-record"; +import { SesDomainIdentity } from "./.gen/providers/aws/ses-domain-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SesDomainIdentity(this, "example", { + domain: "example.com", + }); + new Route53Record(this, "example_amazonses_verification_record", { + name: "_amazonses.example.com", + records: [example.verificationToken], + ttl: Token.asNumber("600"), + type: "TXT", + zoneId: "ABCDEFGHIJ123", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) The domain name to assign to SES + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the domain identity. +* `verificationToken` - A code which when added to the domain as a TXT record + will signal to SES that the owner of the domain has authorised SES to act on + their behalf. The domain identity will be in state "verification pending" + until this is done. See the [With Route53 Record](#with-route53-record) example + for how this might be achieved when the domain is hosted in Route 53 and + managed by Terraform. Find out more about verifying domains in Amazon + SES in the [AWS SES + docs](http://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-domains.html). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES domain identities using the domain name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SES domain identities using the domain name. For example: + +```console +% terraform import aws_ses_domain_identity.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_domain_identity_verification.html.markdown b/website/docs/cdktf/typescript/r/ses_domain_identity_verification.html.markdown new file mode 100644 index 00000000000..1ea3f121af3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_domain_identity_verification.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_domain_identity_verification" +description: |- + Waits for and checks successful verification of an SES domain identity. +--- + + + +# Resource: aws_ses_domain_identity_verification + +Represents a successful verification of an SES domain identity. + +Most commonly, this resource is used together with [`awsRoute53Record`](route53_record.html) and +[`awsSesDomainIdentity`](ses_domain_identity.html) to request an SES domain identity, +deploy the required DNS verification records, and wait for verification to complete. + +~> **WARNING:** This resource implements a part of the verification workflow. It does not represent a real-world entity in AWS, therefore changing or deleting this resource on its own has no immediate effect. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Record } from "./.gen/providers/aws/route53-record"; +import { SesDomainIdentity } from "./.gen/providers/aws/ses-domain-identity"; +import { SesDomainIdentityVerification } from "./.gen/providers/aws/ses-domain-identity-verification"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SesDomainIdentity(this, "example", { + domain: "example.com", + }); + const exampleAmazonsesVerificationRecord = new Route53Record( + this, + "example_amazonses_verification_record", + { + name: "_amazonses.${" + example.id + "}", + records: [example.verificationToken], + ttl: Token.asNumber("600"), + type: "TXT", + zoneId: Token.asString(awsRoute53ZoneExample.zoneId), + } + ); + new SesDomainIdentityVerification(this, "example_verification", { + dependsOn: [exampleAmazonsesVerificationRecord], + domain: example.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `domain` - (Required) The domain name of the SES domain identity to verify. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The domain name of the domain identity. +* `arn` - The ARN of the domain identity. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `45M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_domain_mail_from.html.markdown b/website/docs/cdktf/typescript/r/ses_domain_mail_from.html.markdown new file mode 100644 index 00000000000..eab172262e6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_domain_mail_from.html.markdown @@ -0,0 +1,139 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_domain_mail_from" +description: |- + Provides an SES domain MAIL FROM resource +--- + + + +# Resource: aws_ses_domain_mail_from + +Provides an SES domain MAIL FROM resource. + +~> **NOTE:** For the MAIL FROM domain to be fully usable, this resource should be paired with the [aws_ses_domain_identity resource](/docs/providers/aws/r/ses_domain_identity.html). To validate the MAIL FROM domain, a DNS MX record is required. To pass SPF checks, a DNS TXT record may also be required. See the [Amazon SES MAIL FROM documentation](https://docs.aws.amazon.com/ses/latest/dg/mail-from.html) for more information. + +## Example Usage + +### Domain Identity MAIL FROM + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Route53Record } from "./.gen/providers/aws/route53-record"; +import { SesDomainIdentity } from "./.gen/providers/aws/ses-domain-identity"; +import { SesDomainMailFrom } from "./.gen/providers/aws/ses-domain-mail-from"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SesDomainIdentity(this, "example", { + domain: "example.com", + }); + const awsSesDomainMailFromExample = new SesDomainMailFrom( + this, + "example_1", + { + domain: example.domain, + mailFromDomain: "bounce.${" + example.domain + "}", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSesDomainMailFromExample.overrideLogicalId("example"); + new Route53Record(this, "example_ses_domain_mail_from_mx", { + name: Token.asString(awsSesDomainMailFromExample.mailFromDomain), + records: ["10 feedback-smtp.us-east-1.amazonses.com"], + ttl: Token.asNumber("600"), + type: "MX", + zoneId: Token.asString(awsRoute53ZoneExample.id), + }); + new Route53Record(this, "example_ses_domain_mail_from_txt", { + name: Token.asString(awsSesDomainMailFromExample.mailFromDomain), + records: ["v=spf1 include:amazonses.com -all"], + ttl: Token.asNumber("600"), + type: "TXT", + zoneId: Token.asString(awsRoute53ZoneExample.id), + }); + } +} + +``` + +### Email Identity MAIL FROM + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesDomainMailFrom } from "./.gen/providers/aws/ses-domain-mail-from"; +import { SesEmailIdentity } from "./.gen/providers/aws/ses-email-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SesEmailIdentity(this, "example", { + email: "user@example.com", + }); + const awsSesDomainMailFromExample = new SesDomainMailFrom( + this, + "example_1", + { + domain: example.email, + mailFromDomain: "mail.example.com", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSesDomainMailFromExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `domain` - (Required) Verified domain name or email identity to generate DKIM tokens for. +* `mailFromDomain` - (Required) Subdomain (of above domain) which is to be used as MAIL FROM address (Required for DMARC validation) + +The following arguments are optional: + +* `behaviorOnMxFailure` - (Optional) The action that you want Amazon SES to take if it cannot successfully read the required MX record when you send an email. Defaults to `useDefaultValue`. See the [SES API documentation](https://docs.aws.amazon.com/ses/latest/APIReference/API_SetIdentityMailFromDomain.html) for more information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The domain name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import MAIL FROM domain using the `domain` attribute. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import MAIL FROM domain using the `domain` attribute. For example: + +```console +% terraform import aws_ses_domain_mail_from.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_email_identity.html.markdown b/website/docs/cdktf/typescript/r/ses_email_identity.html.markdown new file mode 100644 index 00000000000..40dd167c84d --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_email_identity.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_email_identity" +description: |- + Provides an SES email identity resource +--- + + + +# Resource: aws_ses_email_identity + +Provides an SES email identity resource + +## Argument Reference + +This resource supports the following arguments: + +* `email` - (Required) The email address to assign to SES. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the email identity. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesEmailIdentity } from "./.gen/providers/aws/ses-email-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesEmailIdentity(this, "example", { + email: "email@example.com", + }); + } +} + +``` + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES email identities using the email address. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SES email identities using the email address. For example: + +```console +% terraform import aws_ses_email_identity.example email@example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_event_destination.html.markdown b/website/docs/cdktf/typescript/r/ses_event_destination.html.markdown new file mode 100644 index 00000000000..8f7054efb30 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_event_destination.html.markdown @@ -0,0 +1,164 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_event_destination" +description: |- + Provides an SES event destination +--- + + + +# Resource: aws_ses_event_destination + +Provides an SES event destination + +## Example Usage + +### CloudWatch Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesEventDestination } from "./.gen/providers/aws/ses-event-destination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesEventDestination(this, "cloudwatch", { + cloudwatchDestination: [ + { + defaultValue: "default", + dimensionName: "dimension", + valueSource: "emailHeader", + }, + ], + configurationSetName: example.name, + enabled: true, + matchingTypes: ["bounce", "send"], + name: "event-destination-cloudwatch", + }); + } +} + +``` + +### Kinesis Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesEventDestination } from "./.gen/providers/aws/ses-event-destination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesEventDestination(this, "kinesis", { + configurationSetName: example.name, + enabled: true, + kinesisDestination: { + roleArn: Token.asString(awsIamRoleExample.arn), + streamArn: Token.asString(awsKinesisFirehoseDeliveryStreamExample.arn), + }, + matchingTypes: ["bounce", "send"], + name: "event-destination-kinesis", + }); + } +} + +``` + +### SNS Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesEventDestination } from "./.gen/providers/aws/ses-event-destination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesEventDestination(this, "sns", { + configurationSetName: example.name, + enabled: true, + matchingTypes: ["bounce", "send"], + name: "event-destination-sns", + snsDestination: { + topicArn: Token.asString(awsSnsTopicExample.arn), + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the event destination +* `configurationSetName` - (Required) The name of the configuration set +* `enabled` - (Optional) If true, the event destination will be enabled +* `matchingTypes` - (Required) A list of matching types. May be any of `"send"`, `"reject"`, `"bounce"`, `"complaint"`, `"delivery"`, `"open"`, `"click"`, or `"renderingFailure"`. +* `cloudwatchDestination` - (Optional) CloudWatch destination for the events +* `kinesisDestination` - (Optional) Send the events to a kinesis firehose destination +* `snsDestination` - (Optional) Send the events to an SNS Topic destination + +~> **NOTE:** You can specify `"cloudwatchDestination"` or `"kinesisDestination"` but not both + +### cloudwatch_destination Argument Reference + +* `defaultValue` - (Required) The default value for the event +* `dimensionName` - (Required) The name for the dimension +* `valueSource` - (Required) The source for the value. May be any of `"messageTag"`, `"emailHeader"` or `"linkTag"`. + +### kinesis_destination Argument Reference + +* `streamArn` - (Required) The ARN of the Kinesis Stream +* `roleArn` - (Required) The ARN of the role that has permissions to access the Kinesis Stream + +### sns_destination Argument Reference + +* `topicArn` - (Required) The ARN of the SNS topic + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The SES event destination name. +* `arn` - The SES event destination ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES event destinations using `configurationSetName` together with the event destination's `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SES event destinations using `configurationSetName` together with the event destination's `name`. For example: + +```console +% terraform import aws_ses_event_destination.sns some-configuration-set-test/event-destination-sns +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_identity_notification_topic.markdown b/website/docs/cdktf/typescript/r/ses_identity_notification_topic.markdown new file mode 100644 index 00000000000..59d2952a0ef --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_identity_notification_topic.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_identity_notification_topic" +description: |- + Setting AWS SES Identity Notification Topic +--- + + + +# Resource: aws_ses_identity_notification_topic + +Resource for managing SES Identity Notification Topics + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesIdentityNotificationTopic } from "./.gen/providers/aws/ses-identity-notification-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesIdentityNotificationTopic(this, "test", { + identity: example.domain, + includeOriginalHeaders: true, + notificationType: "Bounce", + topicArn: Token.asString(awsSnsTopicExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `topicArn` - (Optional) The Amazon Resource Name (ARN) of the Amazon SNS topic. Can be set to `""` (an empty string) to disable publishing. +* `notificationType` - (Required) The type of notifications that will be published to the specified Amazon SNS topic. Valid Values: `bounce`, `complaint` or `delivery`. +* `identity` - (Required) The identity for which the Amazon SNS topic will be set. You can specify an identity by using its name or by using its Amazon Resource Name (ARN). +* `includeOriginalHeaders` - (Optional) Whether SES should include original email headers in SNS notifications of this type. `false` by default. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Identity Notification Topics using the ID of the record. The ID is made up as `identity|type` where `identity` is the SES Identity and `type` is the Notification Type. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Identity Notification Topics using the ID of the record. The ID is made up as `identity|type` where `identity` is the SES Identity and `type` is the Notification Type. For example: + +```console +% terraform import aws_ses_identity_notification_topic.test 'example.com|Bounce' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_identity_policy.html.markdown b/website/docs/cdktf/typescript/r/ses_identity_policy.html.markdown new file mode 100644 index 00000000000..78b831c2a94 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_identity_policy.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_identity_policy" +description: |- + Manages a SES Identity Policy +--- + + + +# Resource: aws_ses_identity_policy + +Manages a SES Identity Policy. More information about SES Sending Authorization Policies can be found in the [SES Developer Guide](https://docs.aws.amazon.com/ses/latest/DeveloperGuide/sending-authorization-policies.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { SesDomainIdentity } from "./.gen/providers/aws/ses-domain-identity"; +import { SesIdentityPolicy } from "./.gen/providers/aws/ses-identity-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SesDomainIdentity(this, "example", { + domain: "example.com", + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_1", + { + statement: [ + { + actions: ["SES:SendEmail", "SES:SendRawEmail"], + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [example.arn], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsSesIdentityPolicyExample = new SesIdentityPolicy( + this, + "example_2", + { + identity: example.arn, + name: "example", + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSesIdentityPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `identity` - (Required) Name or Amazon Resource Name (ARN) of the SES Identity. +* `name` - (Required) Name of the policy. +* `policy` - (Required) JSON string of the policy. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES Identity Policies using the identity and policy name, separated by a pipe character (`|`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SES Identity Policies using the identity and policy name, separated by a pipe character (`|`). For example: + +```console +% terraform import aws_ses_identity_policy.example 'example.com|example' +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_receipt_filter.html.markdown b/website/docs/cdktf/typescript/r/ses_receipt_filter.html.markdown new file mode 100644 index 00000000000..60c976e5c43 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_receipt_filter.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_receipt_filter" +description: |- + Provides an SES receipt filter +--- + + + +# Resource: aws_ses_receipt_filter + +Provides an SES receipt filter resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesReceiptFilter } from "./.gen/providers/aws/ses-receipt-filter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesReceiptFilter(this, "filter", { + cidr: "10.10.10.10", + name: "block-spammer", + policy: "Block", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the filter +* `cidr` - (Required) The IP address or address range to filter, in CIDR notation +* `policy` - (Required) Block or Allow + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The SES receipt filter name. +* `arn` - The SES receipt filter ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES Receipt Filter using their `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SES Receipt Filter using their `name`. For example: + +```console +% terraform import aws_ses_receipt_filter.test some-filter +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_receipt_rule.html.markdown b/website/docs/cdktf/typescript/r/ses_receipt_rule.html.markdown new file mode 100644 index 00000000000..a14223f9f0c --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_receipt_rule.html.markdown @@ -0,0 +1,150 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_receipt_rule" +description: |- + Provides an SES receipt rule resource +--- + + + +# Resource: aws_ses_receipt_rule + +Provides an SES receipt rule resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesReceiptRule } from "./.gen/providers/aws/ses-receipt-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesReceiptRule(this, "store", { + addHeaderAction: [ + { + headerName: "Custom-Header", + headerValue: "Added by SES", + position: 1, + }, + ], + enabled: true, + name: "store", + recipients: ["karen@example.com"], + ruleSetName: "default-rule-set", + s3Action: [ + { + bucketName: "emails", + position: 2, + }, + ], + scanEnabled: true, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the rule +* `ruleSetName` - (Required) The name of the rule set +* `after` - (Optional) The name of the rule to place this rule after +* `enabled` - (Optional) If true, the rule will be enabled +* `recipients` - (Optional) A list of email addresses +* `scanEnabled` - (Optional) If true, incoming emails will be scanned for spam and viruses +* `tlsPolicy` - (Optional) `require` or `optional` +* `addHeaderAction` - (Optional) A list of Add Header Action blocks. Documented below. +* `bounceAction` - (Optional) A list of Bounce Action blocks. Documented below. +* `lambdaAction` - (Optional) A list of Lambda Action blocks. Documented below. +* `s3Action` - (Optional) A list of S3 Action blocks. Documented below. +* `snsAction` - (Optional) A list of SNS Action blocks. Documented below. +* `stopAction` - (Optional) A list of Stop Action blocks. Documented below. +* `workmailAction` - (Optional) A list of WorkMail Action blocks. Documented below. + +Add header actions support the following: + +* `headerName` - (Required) The name of the header to add +* `headerValue` - (Required) The value of the header to add +* `position` - (Required) The position of the action in the receipt rule + +Bounce actions support the following: + +* `message` - (Required) The message to send +* `sender` - (Required) The email address of the sender +* `smtpReplyCode` - (Required) The RFC 5321 SMTP reply code +* `statusCode` - (Optional) The RFC 3463 SMTP enhanced status code +* `topicArn` - (Optional) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule + +Lambda actions support the following: + +* `functionArn` - (Required) The ARN of the Lambda function to invoke +* `invocationType` - (Optional) `event` or `requestResponse` +* `topicArn` - (Optional) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule + +S3 actions support the following: + +* `bucketName` - (Required) The name of the S3 bucket +* `kmsKeyArn` - (Optional) The ARN of the KMS key +* `objectKeyPrefix` - (Optional) The key prefix of the S3 bucket +* `topicArn` - (Optional) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule + +SNS actions support the following: + +* `topicArn` - (Required) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule +* `encoding` - (Optional) The encoding to use for the email within the Amazon SNS notification. Default value is `utf8`. + +Stop actions support the following: + +* `scope` - (Required) The scope to apply. The only acceptable value is `ruleSet`. +* `topicArn` - (Optional) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule + +WorkMail actions support the following: + +* `organizationArn` - (Required) The ARN of the WorkMail organization +* `topicArn` - (Optional) The ARN of an SNS topic to notify +* `position` - (Required) The position of the action in the receipt rule + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The SES receipt rule name. +* `arn` - The SES receipt rule ARN. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES receipt rules using the ruleset name and rule name separated by `:`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SES receipt rules using the ruleset name and rule name separated by `:`. For example: + +```console +% terraform import aws_ses_receipt_rule.my_rule my_rule_set:my_rule +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_receipt_rule_set.html.markdown b/website/docs/cdktf/typescript/r/ses_receipt_rule_set.html.markdown new file mode 100644 index 00000000000..26f2dc80195 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_receipt_rule_set.html.markdown @@ -0,0 +1,72 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_receipt_rule_set" +description: |- + Provides an SES receipt rule set resource +--- + + + +# Resource: aws_ses_receipt_rule_set + +Provides an SES receipt rule set resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesReceiptRuleSet } from "./.gen/providers/aws/ses-receipt-rule-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesReceiptRuleSet(this, "main", { + ruleSetName: "primary-rules", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `ruleSetName` - (Required) Name of the rule set. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - SES receipt rule set ARN. +* `id` - SES receipt rule set name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES receipt rule sets using the rule set name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SES receipt rule sets using the rule set name. For example: + +```console +% terraform import aws_ses_receipt_rule_set.my_rule_set my_rule_set_name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ses_template.html.markdown b/website/docs/cdktf/typescript/r/ses_template.html.markdown new file mode 100644 index 00000000000..855539ca32c --- /dev/null +++ b/website/docs/cdktf/typescript/r/ses_template.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "SES (Simple Email)" +layout: "aws" +page_title: "AWS: aws_ses_template" +description: |- + Provides a resource to create a SES template +--- + + + +# Resource: aws_ses_template + +Provides a resource to create a SES template. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SesTemplate } from "./.gen/providers/aws/ses-template"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SesTemplate(this, "MyTemplate", { + html: "

Hello {{name}},

Your favorite animal is {{favoriteanimal}}.

", + name: "MyTemplate", + subject: "Greetings, {{name}}!", + text: "Hello {{name}},\r\nYour favorite animal is {{favoriteanimal}}.\n", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the template. Cannot exceed 64 characters. You will refer to this name when you send email. +* `html` - (Optional) The HTML body of the email. Must be less than 500KB in size, including both the text and HTML parts. +* `subject` - (Optional) The subject line of the email. +* `text` - (Optional) The email body that will be visible to recipients whose email clients do not display HTML. Must be less than 500KB in size, including both the text and HTML parts. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the SES template +* `id` - The name of the SES template + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SES templates using the template name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SES templates using the template name. For example: + +```console +% terraform import aws_ses_template.MyTemplate MyTemplate +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sesv2_configuration_set.html.markdown b/website/docs/cdktf/typescript/r/sesv2_configuration_set.html.markdown new file mode 100644 index 00000000000..e11266a58d4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sesv2_configuration_set.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_configuration_set" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Configuration Set. +--- + + + +# Resource: aws_sesv2_configuration_set + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Configuration Set. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2ConfigurationSet } from "./.gen/providers/aws/sesv2-configuration-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Sesv2ConfigurationSet(this, "example", { + configurationSetName: "example", + deliveryOptions: { + tlsPolicy: "REQUIRE", + }, + reputationOptions: { + reputationMetricsEnabled: false, + }, + sendingOptions: { + sendingEnabled: true, + }, + suppressionOptions: { + suppressedReasons: ["BOUNCE", "COMPLAINT"], + }, + trackingOptions: { + customRedirectDomain: "example.com", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `configurationSetName` - (Required) The name of the configuration set. +* `deliveryOptions` - (Optional) An object that defines the dedicated IP pool that is used to send emails that you send using the configuration set. +* `reputationOptions` - (Optional) An object that defines whether or not Amazon SES collects reputation metrics for the emails that you send that use the configuration set. +* `sendingOptions` - (Optional) An object that defines whether or not Amazon SES can send email that you send using the configuration set. +* `suppressionOptions` - (Optional) An object that contains information about the suppression list preferences for your account. +* `tags` - (Optional) A map of tags to assign to the service. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `trackingOptions` - (Optional) An object that defines the open and click tracking options for emails that you send using the configuration set. +* `vdmOptions` - (Optional) An object that defines the VDM settings that apply to emails that you send using the configuration set. + +### delivery_options + +This argument supports the following arguments: + +* `sendingPoolName` - (Optional) The name of the dedicated IP pool to associate with the configuration set. +* `tlsPolicy` - (Optional) Specifies whether messages that use the configuration set are required to use Transport Layer Security (TLS). Valid values: `require`, `optional`. + +### reputation_options + +This argument supports the following arguments: + +* `reputationMetricsEnabled` - (Optional) If `true`, tracking of reputation metrics is enabled for the configuration set. If `false`, tracking of reputation metrics is disabled for the configuration set. + +### sending_options + +This argument supports the following arguments: + +* `sendingEnabled` - (Optional) If `true`, email sending is enabled for the configuration set. If `false`, email sending is disabled for the configuration set. + +### suppression_options + +* `suppressedReasons` - (Optional) A list that contains the reasons that email addresses are automatically added to the suppression list for your account. Valid values: `bounce`, `complaint`. + +### tracking_options + +* `customRedirectDomain` - (Required) The domain to use for tracking open and click events. + +### vdm_options + +* `dashboardOptions` - (Optional) Specifies additional settings for your VDM configuration as applicable to the Dashboard. +* `guardianOptions` - (Optional) Specifies additional settings for your VDM configuration as applicable to the Guardian. + +### dashboard_options + +* `engagementMetrics` - (Optional) Specifies the status of your VDM engagement metrics collection. Valid values: `enabled`, `disabled`. + +### guardian_options + +* `optimizedSharedDelivery` - (Optional) Specifies the status of your VDM optimized shared delivery. Valid values: `enabled`, `disabled`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Configuration Set. +* `reputationOptions` - An object that defines whether or not Amazon SES collects reputation metrics for the emails that you send that use the configuration set. + * `lastFreshStart` - The date and time (in Unix time) when the reputation metrics were last given a fresh start. When your account is given a fresh start, your reputation metrics are calculated starting from the date of the fresh start. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Configuration Set using the `configurationSetName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Configuration Set using the `configurationSetName`. For example: + +```console +% terraform import aws_sesv2_configuration_set.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sesv2_configuration_set_event_destination.html.markdown b/website/docs/cdktf/typescript/r/sesv2_configuration_set_event_destination.html.markdown new file mode 100644 index 00000000000..16555961f7d --- /dev/null +++ b/website/docs/cdktf/typescript/r/sesv2_configuration_set_event_destination.html.markdown @@ -0,0 +1,265 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_configuration_set_event_destination" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Configuration Set Event Destination. +--- + + + +# Resource: aws_sesv2_configuration_set_event_destination + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Configuration Set Event Destination. + +## Example Usage + +### Cloud Watch Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2ConfigurationSet } from "./.gen/providers/aws/sesv2-configuration-set"; +import { Sesv2ConfigurationSetEventDestination } from "./.gen/providers/aws/sesv2-configuration-set-event-destination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Sesv2ConfigurationSet(this, "example", { + configurationSetName: "example", + }); + const awsSesv2ConfigurationSetEventDestinationExample = + new Sesv2ConfigurationSetEventDestination(this, "example_1", { + configurationSetName: example.configurationSetName, + eventDestination: { + cloudWatchDestination: { + dimensionConfiguration: [ + { + defaultDimensionValue: "example", + dimensionName: "example", + dimensionValueSource: "MESSAGE_TAG", + }, + ], + }, + enabled: true, + matchingEventTypes: ["SEND"], + }, + eventDestinationName: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSesv2ConfigurationSetEventDestinationExample.overrideLogicalId( + "example" + ); + } +} + +``` + +### Kinesis Firehose Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2ConfigurationSet } from "./.gen/providers/aws/sesv2-configuration-set"; +import { Sesv2ConfigurationSetEventDestination } from "./.gen/providers/aws/sesv2-configuration-set-event-destination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Sesv2ConfigurationSet(this, "example", { + configurationSetName: "example", + }); + const awsSesv2ConfigurationSetEventDestinationExample = + new Sesv2ConfigurationSetEventDestination(this, "example_1", { + configurationSetName: example.configurationSetName, + eventDestination: { + enabled: true, + kinesisFirehoseDestination: { + deliveryStreamArn: Token.asString( + awsKinesisFirehoseDeliveryStreamExample.arn + ), + iamRoleArn: Token.asString(awsIamRoleExample.arn), + }, + matchingEventTypes: ["SEND"], + }, + eventDestinationName: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSesv2ConfigurationSetEventDestinationExample.overrideLogicalId( + "example" + ); + } +} + +``` + +### Pinpoint Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2ConfigurationSet } from "./.gen/providers/aws/sesv2-configuration-set"; +import { Sesv2ConfigurationSetEventDestination } from "./.gen/providers/aws/sesv2-configuration-set-event-destination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Sesv2ConfigurationSet(this, "example", { + configurationSetName: "example", + }); + const awsSesv2ConfigurationSetEventDestinationExample = + new Sesv2ConfigurationSetEventDestination(this, "example_1", { + configurationSetName: example.configurationSetName, + eventDestination: { + enabled: true, + matchingEventTypes: ["SEND"], + pinpointDestination: { + applicationArn: Token.asString(awsPinpointAppExample.arn), + }, + }, + eventDestinationName: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSesv2ConfigurationSetEventDestinationExample.overrideLogicalId( + "example" + ); + } +} + +``` + +### SNS Destination + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2ConfigurationSet } from "./.gen/providers/aws/sesv2-configuration-set"; +import { Sesv2ConfigurationSetEventDestination } from "./.gen/providers/aws/sesv2-configuration-set-event-destination"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Sesv2ConfigurationSet(this, "example", { + configurationSetName: "example", + }); + const awsSesv2ConfigurationSetEventDestinationExample = + new Sesv2ConfigurationSetEventDestination(this, "example_1", { + configurationSetName: example.configurationSetName, + eventDestination: { + enabled: true, + matchingEventTypes: ["SEND"], + snsDestination: { + topicArn: Token.asString(awsSnsTopicExample.arn), + }, + }, + eventDestinationName: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSesv2ConfigurationSetEventDestinationExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `configurationSetName` - (Required) The name of the configuration set. +* `eventDestination` - (Required) A name that identifies the event destination within the configuration set. +* `eventDestinationName` - (Required) An object that defines the event destination. See [event_destination](#event_destination) below. + +### event_destination + +The following arguments are required: + +* `matchingEventTypes` - (Required) - An array that specifies which events the Amazon SES API v2 should send to the destinations. Valid values: `send`, `reject`, `bounce`, `complaint`, `delivery`, `open`, `click`, `renderingFailure`, `deliveryDelay`, `subscription`. + +The following arguments are optional: + +* `cloudWatchDestination` - (Optional) An object that defines an Amazon CloudWatch destination for email events. See [cloud_watch_destination](#cloud_watch_destination) below +* `enabled` - (Optional) When the event destination is enabled, the specified event types are sent to the destinations. Default: `false`. +* `kinesisFirehoseDestination` - (Optional) An object that defines an Amazon Kinesis Data Firehose destination for email events. See [kinesis_firehose_destination](#kinesis_firehose_destination) below. +* `pinpointDestination` - (Optional) An object that defines an Amazon Pinpoint project destination for email events. See [pinpoint_destination](#pinpoint_destination) below. +* `snsDestination` - (Optional) An object that defines an Amazon SNS destination for email events. See [sns_destination](#sns_destination) below. + +### cloud_watch_destination + +The following arguments are required: + +* `dimensionConfiguration` - (Required) An array of objects that define the dimensions to use when you send email events to Amazon CloudWatch. See [dimension_configuration](#dimension_configuration) below. + +### dimension_configuration + +The following arguments are required: + +* `defaultDimensionValue` - (Required) The default value of the dimension that is published to Amazon CloudWatch if you don't provide the value of the dimension when you send an email. +( `dimensionName` - (Required) The name of an Amazon CloudWatch dimension associated with an email sending metric. +* `dimensionValueSource` - (Required) The location where the Amazon SES API v2 finds the value of a dimension to publish to Amazon CloudWatch. Valid values: `messageTag`, `emailHeader`, `linkTag`. + +### kinesis_firehose_destination + +The following arguments are required: + +* `deliveryStreamArn` - (Required) The Amazon Resource Name (ARN) of the Amazon Kinesis Data Firehose stream that the Amazon SES API v2 sends email events to. +* `iamRoleArn` - (Required) The Amazon Resource Name (ARN) of the IAM role that the Amazon SES API v2 uses to send email events to the Amazon Kinesis Data Firehose stream. + +### pinpoint_destination + +The following arguments are required: + +* `pinpointApplicationArn` - (Required) The Amazon Resource Name (ARN) of the Amazon Pinpoint project to send email events to. + +### sns_destination + +The following arguments are required: + +* `topicArn` - (Required) The Amazon Resource Name (ARN) of the Amazon SNS topic to publish email events to. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A pipe-delimited string combining `configurationSetName` and `eventDestinationName`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Configuration Set Event Destination using the `id` (`configurationSetName|eventDestinationName`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Configuration Set Event Destination using the `id` (`configurationSetName|eventDestinationName`). For example: + +```console +% terraform import aws_sesv2_configuration_set_event_destination.example example_configuration_set|example_event_destination +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sesv2_contact_list.html.markdown b/website/docs/cdktf/typescript/r/sesv2_contact_list.html.markdown new file mode 100644 index 00000000000..2bdc804b5ee --- /dev/null +++ b/website/docs/cdktf/typescript/r/sesv2_contact_list.html.markdown @@ -0,0 +1,123 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_contact_list" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Contact List. +--- + + + +# Resource: aws_sesv2_contact_list + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Contact List. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2ContactList } from "./.gen/providers/aws/sesv2-contact-list"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Sesv2ContactList(this, "example", { + contactListName: "example", + }); + } +} + +``` + +### Extended Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2ContactList } from "./.gen/providers/aws/sesv2-contact-list"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Sesv2ContactList(this, "example", { + contactListName: "example", + description: "description", + topic: [ + { + defaultSubscriptionStatus: "OPT_IN", + description: "topic description", + displayName: "Example Topic", + topicName: "example-topic", + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `contactListName` - (Required) The name of the contact list. + +The following arguments are optional: + +* `description` - (Optional) A description of what the contact list is about. +* `tags` - (Optional) Key-value map of resource tags for the contact list. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `topic` - (Optional) Configuration block(s) with topic for the contact list. Detailed below. + +### topic + +The following arguments are required: + +* `defaultSubscriptionStatus` - (Required) The default subscription status to be applied to a contact if the contact has not noted their preference for subscribing to a topic. +* `displayName` - (Required) The name of the topic the contact will see. +* `topicName` - (Required) The name of the topic. + +The following arguments are optional: + +* `description` - (Optional) A description of what the topic is about, which the contact will see. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `createdTimestamp` - A timestamp noting when the contact list was created in ISO 8601 format. +* `lastUpdatedTimestamp` - A timestamp noting the last time the contact list was updated in ISO 8601 format. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Contact List using the `exampleIdArg`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Contact List using the `exampleIdArg`. For example: + +```console +% terraform import aws_sesv2_contact_list.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sesv2_dedicated_ip_assignment.html.markdown b/website/docs/cdktf/typescript/r/sesv2_dedicated_ip_assignment.html.markdown new file mode 100644 index 00000000000..520289aa854 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sesv2_dedicated_ip_assignment.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_dedicated_ip_assignment" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Dedicated IP Assignment. +--- + + + +# Resource: aws_sesv2_dedicated_ip_assignment + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Dedicated IP Assignment. + +This resource is used with "Standard" dedicated IP addresses. This includes addresses [requested and relinquished manually](https://docs.aws.amazon.com/ses/latest/dg/dedicated-ip-case.html) via an AWS support case, or [Bring Your Own IP](https://docs.aws.amazon.com/ses/latest/dg/dedicated-ip-byo.html) addresses. Once no longer assigned, this resource returns the IP to the [`sesDefaultDedicatedPool`](https://docs.aws.amazon.com/ses/latest/dg/managing-ip-pools.html), managed by AWS. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2DedicatedIpAssignment } from "./.gen/providers/aws/sesv2-dedicated-ip-assignment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Sesv2DedicatedIpAssignment(this, "example", { + destinationPoolName: "my-pool", + ip: "0.0.0.0", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `ip` - (Required) Dedicated IP address. +* `destinationPoolName` - (Required) Dedicated IP address. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A comma-separated string made up of `ip` and `destinationPoolName`. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Dedicated IP Assignment using the `id`, which is a comma-separated string made up of `ip` and `destinationPoolName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Dedicated IP Assignment using the `id`, which is a comma-separated string made up of `ip` and `destinationPoolName`. For example: + +```console +% terraform import aws_sesv2_dedicated_ip_assignment.example "0.0.0.0,my-pool" +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sesv2_dedicated_ip_pool.html.markdown b/website/docs/cdktf/typescript/r/sesv2_dedicated_ip_pool.html.markdown new file mode 100644 index 00000000000..1cfe67b8f5e --- /dev/null +++ b/website/docs/cdktf/typescript/r/sesv2_dedicated_ip_pool.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_dedicated_ip_pool" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Dedicated IP Pool. +--- + + + +# Resource: aws_sesv2_dedicated_ip_pool + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Dedicated IP Pool. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2DedicatedIpPool } from "./.gen/providers/aws/sesv2-dedicated-ip-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Sesv2DedicatedIpPool(this, "example", { + poolName: "my-pool", + }); + } +} + +``` + +### Managed Pool + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2DedicatedIpPool } from "./.gen/providers/aws/sesv2-dedicated-ip-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Sesv2DedicatedIpPool(this, "example", { + poolName: "my-managed-pool", + scalingMode: "MANAGED", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `poolName` - (Required) Name of the dedicated IP pool. + +The following arguments are optional: + +* `scalingMode` - (Optional) IP pool scaling mode. Valid values: `standard`, `managed`. If omitted, the AWS API will default to a standard pool. +* `tags` - (Optional) A map of tags to assign to the pool. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Dedicated IP Pool. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Dedicated IP Pool using the `poolName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Dedicated IP Pool using the `poolName`. For example: + +```console +% terraform import aws_sesv2_dedicated_ip_pool.example my-pool +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sesv2_email_identity.html.markdown b/website/docs/cdktf/typescript/r/sesv2_email_identity.html.markdown new file mode 100644 index 00000000000..72a2fc50b01 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sesv2_email_identity.html.markdown @@ -0,0 +1,182 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_email_identity" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity. +--- + + + +# Resource: aws_sesv2_email_identity + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity. + +## Example Usage + +### Basic Usage + +#### Email Address Identity + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2EmailIdentity } from "./.gen/providers/aws/sesv2-email-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Sesv2EmailIdentity(this, "example", { + emailIdentity: "testing@example.com", + }); + } +} + +``` + +#### Domain Identity + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2EmailIdentity } from "./.gen/providers/aws/sesv2-email-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Sesv2EmailIdentity(this, "example", { + emailIdentity: "example.com", + }); + } +} + +``` + +#### Configuration Set + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2ConfigurationSet } from "./.gen/providers/aws/sesv2-configuration-set"; +import { Sesv2EmailIdentity } from "./.gen/providers/aws/sesv2-email-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Sesv2ConfigurationSet(this, "example", { + configurationSetName: "example", + }); + const awsSesv2EmailIdentityExample = new Sesv2EmailIdentity( + this, + "example_1", + { + configurationSetName: example.configurationSetName, + emailIdentity: "example.com", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSesv2EmailIdentityExample.overrideLogicalId("example"); + } +} + +``` + +#### DKIM Signing Attributes (BYODKIM) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2EmailIdentity } from "./.gen/providers/aws/sesv2-email-identity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Sesv2EmailIdentity(this, "example", { + dkimSigningAttributes: { + domainSigningPrivateKey: + "MIIJKAIBAAKCAgEA2Se7p8zvnI4yh+Gh9j2rG5e2aRXjg03Y8saiupLnadPH9xvM...", + domainSigningSelector: "example", + }, + emailIdentity: "example.com", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `emailIdentity` - (Required) The email address or domain to verify. + +The following arguments are optional: + +* `configurationSetName` - (Optional) The configuration set to use by default when sending from this identity. Note that any configuration set defined in the email sending request takes precedence. +* `dkimSigningAttributes` - (Optional) The configuration of the DKIM authentication settings for an email domain identity. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### dkim_signing_attributes + +* `domainSigningPrivateKey` - (Optional) [Bring Your Own DKIM] A private key that's used to generate a DKIM signature. The private key must use 1024 or 2048-bit RSA encryption, and must be encoded using base64 encoding. + +-> **NOTE:** You have to delete the first and last lines ('-----BEGIN PRIVATE KEY-----' and '-----END PRIVATE KEY-----', respectively) of the generated private key. Additionally, you have to remove the line breaks in the generated private key. The resulting value is a string of characters with no spaces or line breaks. + +* `domainSigningSelector` - (Optional) [Bring Your Own DKIM] A string that's used to identify a public key in the DNS configuration for a domain. +* `nextSigningKeyLength` - (Optional) [Easy DKIM] The key length of the future DKIM key pair to be generated. This can be changed at most once per day. Valid values: `rsa1024Bit`, `rsa2048Bit`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Email Identity. +* `dkimSigningAttributes` - A list of objects that contains at most one element with information about the private key and selector that you want to use to configure DKIM for the identity for Bring Your Own DKIM (BYODKIM) for the identity, or, configures the key length to be used for Easy DKIM. + * `currentSigningKeyLength` - [Easy DKIM] The key length of the DKIM key pair in use. + * `lastKeyGenerationTimestamp` - [Easy DKIM] The last time a key pair was generated for this identity. + * `nextSigningKeyLength` - [Easy DKIM] The key length of the future DKIM key pair to be generated. This can be changed at most once per day. + * `signingAttributesOrigin` - A string that indicates how DKIM was configured for the identity. `awsSes` indicates that DKIM was configured for the identity by using Easy DKIM. `external` indicates that DKIM was configured for the identity by using Bring Your Own DKIM (BYODKIM). + * `status` - Describes whether or not Amazon SES has successfully located the DKIM records in the DNS records for the domain. See the [AWS SES API v2 Reference](https://docs.aws.amazon.com/ses/latest/APIReference-V2/API_DkimAttributes.html#SES-Type-DkimAttributes-Status) for supported statuses. + * `tokens` - If you used Easy DKIM to configure DKIM authentication for the domain, then this object contains a set of unique strings that you use to create a set of CNAME records that you add to the DNS configuration for your domain. When Amazon SES detects these records in the DNS configuration for your domain, the DKIM authentication process is complete. If you configured DKIM authentication for the domain by providing your own public-private key pair, then this object contains the selector for the public key. +* `identityType` - The email identity type. Valid values: `emailAddress`, `domain`. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `verifiedForSendingStatus` - Specifies whether or not the identity is verified. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Email Identity using the `emailIdentity`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Email Identity using the `emailIdentity`. For example: + +```console +% terraform import aws_sesv2_email_identity.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sesv2_email_identity_feedback_attributes.html.markdown b/website/docs/cdktf/typescript/r/sesv2_email_identity_feedback_attributes.html.markdown new file mode 100644 index 00000000000..6f221514b6c --- /dev/null +++ b/website/docs/cdktf/typescript/r/sesv2_email_identity_feedback_attributes.html.markdown @@ -0,0 +1,80 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_email_identity_feedback_attributes" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity Feedback Attributes. +--- + + + +# Resource: aws_sesv2_email_identity_feedback_attributes + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity Feedback Attributes. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2EmailIdentity } from "./.gen/providers/aws/sesv2-email-identity"; +import { Sesv2EmailIdentityFeedbackAttributes } from "./.gen/providers/aws/sesv2-email-identity-feedback-attributes"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Sesv2EmailIdentity(this, "example", { + emailIdentity: "example.com", + }); + const awsSesv2EmailIdentityFeedbackAttributesExample = + new Sesv2EmailIdentityFeedbackAttributes(this, "example_1", { + emailForwardingEnabled: true, + emailIdentity: example.emailIdentity, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSesv2EmailIdentityFeedbackAttributesExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `emailIdentity` - (Required) The email identity. +* `emailForwardingEnabled` - (Optional) Sets the feedback forwarding configuration for the identity. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Email Identity Feedback Attributes using the `emailIdentity`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Email Identity Feedback Attributes using the `emailIdentity`. For example: + +```console +% terraform import aws_sesv2_email_identity_feedback_attributes.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sesv2_email_identity_mail_from_attributes.html.markdown b/website/docs/cdktf/typescript/r/sesv2_email_identity_mail_from_attributes.html.markdown new file mode 100644 index 00000000000..1de8df58fe6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sesv2_email_identity_mail_from_attributes.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "SESv2 (Simple Email V2)" +layout: "aws" +page_title: "AWS: aws_sesv2_email_identity_mail_from_attributes" +description: |- + Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity Mail From Attributes. +--- + + + +# Resource: aws_sesv2_email_identity_mail_from_attributes + +Terraform resource for managing an AWS SESv2 (Simple Email V2) Email Identity Mail From Attributes. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Sesv2EmailIdentity } from "./.gen/providers/aws/sesv2-email-identity"; +import { Sesv2EmailIdentityMailFromAttributes } from "./.gen/providers/aws/sesv2-email-identity-mail-from-attributes"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Sesv2EmailIdentity(this, "example", { + emailIdentity: "example.com", + }); + const awsSesv2EmailIdentityMailFromAttributesExample = + new Sesv2EmailIdentityMailFromAttributes(this, "example_1", { + behaviorOnMxFailure: "REJECT_MESSAGE", + emailIdentity: example.emailIdentity, + mailFromDomain: "subdomain.${" + example.emailIdentity + "}", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSesv2EmailIdentityMailFromAttributesExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `emailIdentity` - (Required) The verified email identity. +* `behaviorOnMxFailure` - (Optional) The action to take if the required MX record isn't found when you send an email. Valid values: `useDefaultValue`, `rejectMessage`. +* `mailFromDomain` - (Optional) The custom MAIL FROM domain that you want the verified identity to use. Required if `behaviorOnMxFailure` is `rejectMessage`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SESv2 (Simple Email V2) Email Identity Mail From Attributes using the `emailIdentity`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SESv2 (Simple Email V2) Email Identity Mail From Attributes using the `emailIdentity`. For example: + +```console +% terraform import aws_sesv2_email_identity_mail_from_attributes.example example.com +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sfn_activity.html.markdown b/website/docs/cdktf/typescript/r/sfn_activity.html.markdown new file mode 100644 index 00000000000..fc7e6f27dce --- /dev/null +++ b/website/docs/cdktf/typescript/r/sfn_activity.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_activity" +description: |- + Provides a Step Function Activity resource. +--- + + + +# Resource: aws_sfn_activity + +Provides a Step Function Activity resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SfnActivity } from "./.gen/providers/aws/sfn-activity"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SfnActivity(this, "sfn_activity", { + name: "my-activity", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the activity to create. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Name (ARN) that identifies the created activity. +* `name` - The name of the activity. +* `creationDate` - The date the activity was created. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import activities using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import activities using the `arn`. For example: + +```console +% terraform import aws_sfn_activity.foo arn:aws:states:eu-west-1:123456789098:activity:bar +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sfn_alias.html.markdown b/website/docs/cdktf/typescript/r/sfn_alias.html.markdown new file mode 100644 index 00000000000..5688231c32b --- /dev/null +++ b/website/docs/cdktf/typescript/r/sfn_alias.html.markdown @@ -0,0 +1,106 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_alias" +description: |- + Provides a Step Function State Machine Alias. +--- + + + +# Resource: aws_sfn_alias + +Provides a Step Function State Machine Alias. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SfnAlias } from "./.gen/providers/aws/sfn-alias"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SfnAlias(this, "my_sfn_alias", { + name: "my_sfn_alias", + routingConfiguration: [ + { + stateMachineVersionArn: + "arn:aws:states:us-east-1:12345:stateMachine:demo:3", + weight: 50, + }, + { + stateMachineVersionArn: + "arn:aws:states:us-east-1:12345:stateMachine:demo:2", + weight: 50, + }, + ], + }); + new SfnAlias(this, "sfn_alias", { + name: "my_sfn_alias", + routingConfiguration: [ + { + stateMachineVersionArn: sfnTest.stateMachineVersionArn, + weight: 100, + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name for the alias you are creating. +* `description` - (Optional) Description of the alias. +* `routingConfiguration` - (Required) The StateMachine alias' route configuration settings. Fields documented below + +`routingConfiguration` supports the following arguments: + +* `stateMachineVersionArn` - (Required) A version of the state machine. +* `weight` - (Required) Percentage of traffic routed to the state machine version. + +The following arguments are optional: + +* `optionalArg` - (Optional) Concise argument description. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) identifying your state machine alias. +* `creationDate` - The date the state machine alias was created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SFN (Step Functions) Alias using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SFN (Step Functions) Alias using the `arn`. For example: + +```console +% terraform import aws_sfn_alias.foo arn:aws:states:us-east-1:123456789098:stateMachine:myStateMachine:foo +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sfn_state_machine.html.markdown b/website/docs/cdktf/typescript/r/sfn_state_machine.html.markdown new file mode 100644 index 00000000000..6142f3c6f0b --- /dev/null +++ b/website/docs/cdktf/typescript/r/sfn_state_machine.html.markdown @@ -0,0 +1,199 @@ +--- +subcategory: "SFN (Step Functions)" +layout: "aws" +page_title: "AWS: aws_sfn_state_machine" +description: |- + Provides a Step Function State Machine resource. +--- + + + +# Resource: aws_sfn_state_machine + +Provides a Step Function State Machine resource + +## Example Usage + +### Basic (Standard Workflow) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SfnStateMachine } from "./.gen/providers/aws/sfn-state-machine"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SfnStateMachine(this, "sfn_state_machine", { + definition: + '{\n "Comment": "A Hello World example of the Amazon States Language using an AWS Lambda Function",\n "StartAt": "HelloWorld",\n "States": {\n "HelloWorld": {\n "Type": "Task",\n "Resource": "${' + + lambda.arn + + '}",\n "End": true\n }\n }\n}\n\n', + name: "my-state-machine", + roleArn: iamForSfn.arn, + }); + } +} + +``` + +### Basic (Express Workflow) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SfnStateMachine } from "./.gen/providers/aws/sfn-state-machine"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SfnStateMachine(this, "sfn_state_machine", { + definition: + '{\n "Comment": "A Hello World example of the Amazon States Language using an AWS Lambda Function",\n "StartAt": "HelloWorld",\n "States": {\n "HelloWorld": {\n "Type": "Task",\n "Resource": "${' + + lambda.arn + + '}",\n "End": true\n }\n }\n}\n\n', + name: "my-state-machine", + roleArn: iamForSfn.arn, + type: "EXPRESS", + }); + } +} + +``` + +### Publish (Publish SFN version) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SfnStateMachine } from "./.gen/providers/aws/sfn-state-machine"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SfnStateMachine(this, "sfn_state_machine", { + definition: + '{\n "Comment": "A Hello World example of the Amazon States Language using an AWS Lambda Function",\n "StartAt": "HelloWorld",\n "States": {\n "HelloWorld": {\n "Type": "Task",\n "Resource": "${' + + lambda.arn + + '}",\n "End": true\n }\n }\n}\n\n', + name: "my-state-machine", + publish: true, + roleArn: iamForSfn.arn, + type: "EXPRESS", + }); + } +} + +``` + +### Logging + +~> *NOTE:* See the [AWS Step Functions Developer Guide](https://docs.aws.amazon.com/step-functions/latest/dg/welcome.html) for more information about enabling Step Function logging. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SfnStateMachine } from "./.gen/providers/aws/sfn-state-machine"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SfnStateMachine(this, "sfn_state_machine", { + definition: + '{\n "Comment": "A Hello World example of the Amazon States Language using an AWS Lambda Function",\n "StartAt": "HelloWorld",\n "States": {\n "HelloWorld": {\n "Type": "Task",\n "Resource": "${' + + lambda.arn + + '}",\n "End": true\n }\n }\n}\n\n', + loggingConfiguration: { + includeExecutionData: true, + level: "ERROR", + logDestination: "${" + logGroupForSfn.arn + "}:*", + }, + name: "my-state-machine", + roleArn: iamForSfn.arn, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `definition` - (Required) The [Amazon States Language](https://docs.aws.amazon.com/step-functions/latest/dg/concepts-amazon-states-language.html) definition of the state machine. +* `loggingConfiguration` - (Optional) Defines what execution history events are logged and where they are logged. The `loggingConfiguration` parameter is only valid when `type` is set to `express`. Defaults to `off`. For more information see [Logging Express Workflows](https://docs.aws.amazon.com/step-functions/latest/dg/cw-logs.html) and [Log Levels](https://docs.aws.amazon.com/step-functions/latest/dg/cloudwatch-log-level.html) in the AWS Step Functions User Guide. +* `name` - (Optional) The name of the state machine. The name should only contain `0`-`9`, `a`-`z`, `a`-`z`, `-` and `_`. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `publish` - (Optional) Set to true to publish a version of the state machine during creation. Default: false. +* `roleArn` - (Required) The Amazon Resource Name (ARN) of the IAM role to use for this state machine. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tracingConfiguration` - (Optional) Selects whether AWS X-Ray tracing is enabled. +* `type` - (Optional) Determines whether a Standard or Express state machine is created. The default is `standard`. You cannot update the type of a state machine once it has been created. Valid values: `standard`, `express`. + +### `loggingConfiguration` Configuration Block + +* `includeExecutionData` - (Optional) Determines whether execution data is included in your log. When set to `false`, data is excluded. +* `level` - (Optional) Defines which category of execution history events are logged. Valid values: `all`, `error`, `fatal`, `off` +* `logDestination` - (Optional) Amazon Resource Name (ARN) of a CloudWatch log group. Make sure the State Machine has the correct IAM policies for logging. The ARN must end with `:*` + +### `tracingConfiguration` Configuration Block + +* `enabled` - (Optional) When set to `true`, AWS X-Ray tracing is enabled. Make sure the State Machine has the correct IAM policies for logging. See the [AWS Step Functions Developer Guide](https://docs.aws.amazon.com/step-functions/latest/dg/xray-iam.html) for details. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the state machine. +* `arn` - The ARN of the state machine. +* `creationDate` - The date the state machine was created. +* `status` - The current status of the state machine. Either `active` or `deleting`. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `5M`) +* `update` - (Default `1M`) +* `delete` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import State Machines using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import State Machines using the `arn`. For example: + +```console +% terraform import aws_sfn_state_machine.foo arn:aws:states:eu-west-1:123456789098:stateMachine:bar +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/shield_protection.html.markdown b/website/docs/cdktf/typescript/r/shield_protection.html.markdown new file mode 100644 index 00000000000..be310169151 --- /dev/null +++ b/website/docs/cdktf/typescript/r/shield_protection.html.markdown @@ -0,0 +1,103 @@ +--- +subcategory: "Shield" +layout: "aws" +page_title: "AWS: aws_shield_protection" +description: |- + Enables AWS Shield Advanced for a specific AWS resource. +--- + + + +# Resource: aws_shield_protection + +Enables AWS Shield Advanced for a specific AWS resource. +The resource can be an Amazon CloudFront distribution, Elastic Load Balancing load balancer, AWS Global Accelerator accelerator, Elastic IP Address, or an Amazon Route 53 hosted zone. + +## Example Usage + +### Create protection + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { Eip } from "./.gen/providers/aws/eip"; +import { ShieldProtection } from "./.gen/providers/aws/shield-protection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Eip(this, "example", { + domain: "vpc", + }); + new DataAwsAvailabilityZones(this, "available", {}); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_3", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const awsShieldProtectionExample = new ShieldProtection(this, "example_4", { + name: "example", + resourceArn: + "arn:aws:ec2:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:eip-allocation/${" + + example.id + + "}", + tags: { + Environment: "Dev", + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsShieldProtectionExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A friendly name for the Protection you are creating. +* `resourceArn` - (Required) The ARN (Amazon Resource Name) of the resource to be protected. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) for the Protection object that is created. +* `arn` - The ARN of the Protection. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Shield protection resources using specifying their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Shield protection resources using specifying their ID. For example: + +```console +% terraform import aws_shield_protection.example ff9592dc-22f3-4e88-afa1-7b29fde9669a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/shield_protection_group.html.markdown b/website/docs/cdktf/typescript/r/shield_protection_group.html.markdown new file mode 100644 index 00000000000..885d92cb38c --- /dev/null +++ b/website/docs/cdktf/typescript/r/shield_protection_group.html.markdown @@ -0,0 +1,172 @@ +--- +subcategory: "Shield" +layout: "aws" +page_title: "AWS: aws_shield_protection_group" +description: |- + Creates a grouping of protected resources so they can be handled as a collective. +--- + + + +# Resource: aws_shield_protection_group + +Creates a grouping of protected resources so they can be handled as a collective. +This resource grouping improves the accuracy of detection and reduces false positives. For more information see +[Managing AWS Shield Advanced protection groups](https://docs.aws.amazon.com/waf/latest/developerguide/manage-protection-group.html) + +## Example Usage + +### Create protection group for all resources + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ShieldProtectionGroup } from "./.gen/providers/aws/shield-protection-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ShieldProtectionGroup(this, "example", { + aggregation: "MAX", + pattern: "ALL", + protectionGroupId: "example", + }); + } +} + +``` + +### Create protection group for arbitrary number of resources + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { Eip } from "./.gen/providers/aws/eip"; +import { ShieldProtection } from "./.gen/providers/aws/shield-protection"; +import { ShieldProtectionGroup } from "./.gen/providers/aws/shield-protection-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Eip(this, "example", { + domain: "vpc", + }); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_2", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const awsShieldProtectionExample = new ShieldProtection(this, "example_3", { + name: "example", + resourceArn: + "arn:aws:ec2:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:eip-allocation/${" + + example.id + + "}", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsShieldProtectionExample.overrideLogicalId("example"); + const awsShieldProtectionGroupExample = new ShieldProtectionGroup( + this, + "example_4", + { + aggregation: "MEAN", + dependsOn: [awsShieldProtectionExample], + members: [ + "arn:aws:ec2:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:eip-allocation/${" + + example.id + + "}", + ], + pattern: "ARBITRARY", + protectionGroupId: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsShieldProtectionGroupExample.overrideLogicalId("example"); + } +} + +``` + +### Create protection group for a type of resource + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { ShieldProtectionGroup } from "./.gen/providers/aws/shield-protection-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new ShieldProtectionGroup(this, "example", { + aggregation: "SUM", + pattern: "BY_RESOURCE_TYPE", + protectionGroupId: "example", + resourceType: "ELASTIC_IP_ALLOCATION", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `aggregation` - (Required) Defines how AWS Shield combines resource data for the group in order to detect, mitigate, and report events. +* `members` - (Optional) The Amazon Resource Names (ARNs) of the resources to include in the protection group. You must set this when you set `pattern` to ARBITRARY and you must not set it for any other `pattern` setting. +* `pattern` - (Required) The criteria to use to choose the protected resources for inclusion in the group. +* `protectionGroupId` - (Required) The name of the protection group. +* `resourceType` - (Optional) The resource type to include in the protection group. You must set this when you set `pattern` to BY_RESOURCE_TYPE and you must not set it for any other `pattern` setting. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `protectionGroupArn` - The ARN (Amazon Resource Name) of the protection group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Shield protection group resources using their protection group id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Shield protection group resources using their protection group id. For example: + +```console +% terraform import aws_shield_protection_group.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/shield_protection_health_check_association.html.markdown b/website/docs/cdktf/typescript/r/shield_protection_health_check_association.html.markdown new file mode 100644 index 00000000000..aeb90a6ced1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/shield_protection_health_check_association.html.markdown @@ -0,0 +1,134 @@ +--- +subcategory: "Shield" +layout: "aws" +page_title: "AWS: aws_shield_protection_health_check_association" +description: |- + Creates an association between a Route53 Health Check and a Shield Advanced protected resource. +--- + + + +# Resource: aws_shield_protection_health_check_association + +Creates an association between a Route53 Health Check and a Shield Advanced protected resource. +This association uses the health of your applications to improve responsiveness and accuracy in attack detection and mitigation. + +Blog post: [AWS Shield Advanced now supports Health Based Detection](https://aws.amazon.com/about-aws/whats-new/2020/02/aws-shield-advanced-now-supports-health-based-detection/) + +## Example Usage + +### Create an association between a protected EIP and a Route53 Health Check + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsPartition } from "./.gen/providers/aws/data-aws-partition"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { Eip } from "./.gen/providers/aws/eip"; +import { Route53HealthCheck } from "./.gen/providers/aws/route53-health-check"; +import { ShieldProtection } from "./.gen/providers/aws/shield-protection"; +import { ShieldProtectionHealthCheckAssociation } from "./.gen/providers/aws/shield-protection-health-check-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Eip(this, "example", { + domain: "vpc", + tags: { + Name: "example", + }, + }); + const awsRoute53HealthCheckExample = new Route53HealthCheck( + this, + "example_1", + { + failureThreshold: Token.asNumber("3"), + ipAddress: example.publicIp, + port: 80, + requestInterval: Token.asNumber("30"), + resourcePath: "/ready", + tags: { + Name: "tf-example-health-check", + }, + type: "HTTP", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsRoute53HealthCheckExample.overrideLogicalId("example"); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsPartitionCurrent = new DataAwsPartition(this, "current_3", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsPartitionCurrent.overrideLogicalId("current"); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_4", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const awsShieldProtectionExample = new ShieldProtection(this, "example_5", { + name: "example-protection", + resourceArn: + "arn:${" + + dataAwsPartitionCurrent.partition + + "}:ec2:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:eip-allocation/${" + + example.id + + "}", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsShieldProtectionExample.overrideLogicalId("example"); + const awsShieldProtectionHealthCheckAssociationExample = + new ShieldProtectionHealthCheckAssociation(this, "example_6", { + healthCheckArn: Token.asString(awsRoute53HealthCheckExample.arn), + shieldProtectionId: Token.asString(awsShieldProtectionExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsShieldProtectionHealthCheckAssociationExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `healthCheckArn` - (Required) The ARN (Amazon Resource Name) of the Route53 Health Check resource which will be associated to the protected resource. +* `shieldProtectionId` - (Required) The ID of the protected resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The unique identifier (ID) for the Protection object that is created. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Shield protection health check association resources using the `shieldProtectionId` and `healthCheckArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Shield protection health check association resources using the `shieldProtectionId` and `healthCheckArn`. For example: + +```console +% terraform import aws_shield_protection_health_check_association.example ff9592dc-22f3-4e88-afa1-7b29fde9669a+arn:aws:route53:::healthcheck/3742b175-edb9-46bc-9359-f53e3b794b1b +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/signer_signing_job.html.markdown b/website/docs/cdktf/typescript/r/signer_signing_job.html.markdown new file mode 100644 index 00000000000..773e91fcff4 --- /dev/null +++ b/website/docs/cdktf/typescript/r/signer_signing_job.html.markdown @@ -0,0 +1,130 @@ +--- +subcategory: "Signer" +layout: "aws" +page_title: "AWS: aws_signer_signing_job" +description: |- + Creates a Signer Signing Job. +--- + + + +# Resource: aws_signer_signing_job + +Creates a Signer Signing Job. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SignerSigningJob } from "./.gen/providers/aws/signer-signing-job"; +import { SignerSigningProfile } from "./.gen/providers/aws/signer-signing-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const testSp = new SignerSigningProfile(this, "test_sp", { + platformId: "AWSLambda-SHA384-ECDSA", + }); + new SignerSigningJob(this, "build_signing_job", { + destination: { + s3: { + bucket: "s3-bucket-name", + prefix: "signed/", + }, + }, + ignoreSigningJobFailure: true, + profileName: testSp.name, + source: { + s3: { + bucket: "s3-bucket-name", + key: "object-to-be-signed.zip", + version: "jADjFYYYEXAMPLETszPjOmCMFDzd9dN1", + }, + }, + }); + } +} + +``` + +## Argument Reference + +* `profileName` - (Required) The name of the profile to initiate the signing operation. +* `source` - (Required) The S3 bucket that contains the object to sign. See [Source](#source) below for details. +* `destination` - (Required) The S3 bucket in which to save your signed object. See [Destination](#destination) below for details. +* `ignoreSigningJobFailure` - (Optional) Set this argument to `true` to ignore signing job failures and retrieve failed status and reason. Default `false`. + +### Source + +The source configuration block supports the following arguments: + +* `s3` - (Required) A configuration block describing the S3 Source object: See [S3 Source](#s3-source) below for details. + +### S3 Source + +The configuration block supports the following arguments: + +* `bucket` - (Required) Name of the S3 bucket. +* `key` - (Required) Key name of the object that contains your unsigned code. +* `version` - (Required) Version of your source image in your version enabled S3 bucket. + +### Destination + +The destination configuration block supports the following arguments: + +* `s3` - (Required) A configuration block describing the S3 Destination object: See [S3 Destination](#s3-destination) below for details. + +### S3 Destination + +The configuration block supports the following arguments: + +* `bucket` - (Required) Name of the S3 bucket. +* `prefix` - (Optional) An Amazon S3 object key prefix that you can use to limit signed objects keys to begin with the specified prefix. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `completedAt` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the signing job was completed. +* `createdAt` - Date and time in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) that the signing job was created. +* `jobId` - The ID of the signing job on output. +* `jobInvoker` - The IAM entity that initiated the signing job. +* `jobOwner` - The AWS account ID of the job owner. +* `platformDisplayName` - A human-readable name for the signing platform associated with the signing job. +* `platformId` - The platform to which your signed code image will be distributed. +* `profileVersion` - The version of the signing profile used to initiate the signing job. +* `requestedBy` - The IAM principal that requested the signing job. +* `revocationRecord` - A revocation record if the signature generated by the signing job has been revoked. Contains a timestamp and the ID of the IAM entity that revoked the signature. +* `signatureExpiresAt` - The time when the signature of a signing job expires. +* `signedObject` - Name of the S3 bucket where the signed code image is saved by code signing. +* `status` - Status of the signing job. +* `statusReason` - String value that contains the status reason. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Signer signing jobs using the `jobId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Signer signing jobs using the `jobId`. For example: + +```console +% terraform import aws_signer_signing_job.test_signer_signing_job 9ed7e5c3-b8d4-4da0-8459-44e0b068f7ee +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/signer_signing_profile.html.markdown b/website/docs/cdktf/typescript/r/signer_signing_profile.html.markdown new file mode 100644 index 00000000000..fef91058fca --- /dev/null +++ b/website/docs/cdktf/typescript/r/signer_signing_profile.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "Signer" +layout: "aws" +page_title: "AWS: aws_signer_signing_profile" +description: |- + Creates a Signer Signing Profile. +--- + + + +# Resource: aws_signer_signing_profile + +Creates a Signer Signing Profile. A signing profile contains information about the code signing configuration parameters that can be used by a given code signing user. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SignerSigningProfile } from "./.gen/providers/aws/signer-signing-profile"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SignerSigningProfile(this, "prod_sp", { + namePrefix: "prod_sp_", + platformId: "AWSLambda-SHA384-ECDSA", + signatureValidityPeriod: { + type: "YEARS", + value: 5, + }, + tags: { + tag1: "value1", + tag2: "value2", + }, + }); + new SignerSigningProfile(this, "test_sp", { + platformId: "AWSLambda-SHA384-ECDSA", + }); + } +} + +``` + +## Argument Reference + +* `platformId` - (Required) The ID of the platform that is used by the target signing profile. +* `name` - (Optional) A unique signing profile name. By default generated by Terraform. Signing profile names are immutable and cannot be reused after canceled. +* `namePrefix` - (Optional) A signing profile name prefix. Terraform will generate a unique suffix. Conflicts with `name`. +* `signatureValidityPeriod` - (Optional) The validity period for a signing job. +* `tags` - (Optional) A list of tags associated with the signing profile. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) for the signing profile. +* `name` - The name of the target signing profile. +* `platformDisplayName` - A human-readable name for the signing platform associated with the signing profile. +* `revocationRecord` - Revocation information for a signing profile. +* `status` - The status of the target signing profile. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version` - The current version of the signing profile. +* `versionArn` - The signing profile ARN, including the profile version. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Signer signing profiles using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Signer signing profiles using the `name`. For example: + +```console +% terraform import aws_signer_signing_profile.test_signer_signing_profile test_sp_DdW3Mk1foYL88fajut4mTVFGpuwfd4ACO6ANL0D1uIj7lrn8adK +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/signer_signing_profile_permission.html.markdown b/website/docs/cdktf/typescript/r/signer_signing_profile_permission.html.markdown new file mode 100644 index 00000000000..d04009f6ab6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/signer_signing_profile_permission.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "Signer" +layout: "aws" +page_title: "AWS: aws_signer_signing_profile_permission" +description: |- + Creates a Signer Signing Profile Permission. +--- + + + +# Resource: aws_signer_signing_profile_permission + +Creates a Signer Signing Profile Permission. That is, a cross-account permission for a signing profile. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SignerSigningProfile } from "./.gen/providers/aws/signer-signing-profile"; +import { SignerSigningProfilePermission } from "./.gen/providers/aws/signer-signing-profile-permission"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const prodSp = new SignerSigningProfile(this, "prod_sp", { + namePrefix: "prod_sp_", + platformId: "AWSLambda-SHA384-ECDSA", + signatureValidityPeriod: { + type: "YEARS", + value: 5, + }, + tags: { + tag1: "value1", + tag2: "value2", + }, + }); + new SignerSigningProfilePermission(this, "sp_permission_1", { + action: "signer:StartSigningJob", + principal: awsAccount.stringValue, + profileName: prodSp.name, + }); + new SignerSigningProfilePermission(this, "sp_permission_2", { + action: "signer:GetSigningProfile", + principal: awsTeamRoleArn.stringValue, + profileName: prodSp.name, + statementId: "ProdAccountStartSigningJob_StatementId", + }); + new SignerSigningProfilePermission(this, "sp_permission_3", { + action: "signer:RevokeSignature", + principal: "123456789012", + profileName: prodSp.name, + profileVersion: prodSp.version, + statementIdPrefix: "version-permission-", + }); + } +} + +``` + +## Argument Reference + +* `profileName` - (Required) Name of the signing profile to add the cross-account permissions. +* `action` - (Required) An AWS Signer action permitted as part of cross-account permissions. Valid values: `signer:startSigningJob`, `signer:getSigningProfile`, or `signer:revokeSignature`. +* `principal` - (Required) The AWS principal to be granted a cross-account permission. +* `profileVersion` - (Optional) The signing profile version that a permission applies to. +* `statementId` - (Optional) A unique statement identifier. By default generated by Terraform. +* `statementIdPrefix` - (Optional) A statement identifier prefix. Terraform will generate a unique suffix. Conflicts with `statementId`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Signer signing profile permission statements using profile_name/statement_id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Signer signing profile permission statements using profile_name/statement_id. For example: + +```console +% terraform import aws_signer_signing_profile_permission.test_signer_signing_profile_permission prod_profile_DdW3Mk1foYL88fajut4mTVFGpuwfd4ACO6ANL0D1uIj7lrn8adK/ProdAccountStartSigningJobStatementId +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/simpledb_domain.html.markdown b/website/docs/cdktf/typescript/r/simpledb_domain.html.markdown new file mode 100644 index 00000000000..4d7faa60c79 --- /dev/null +++ b/website/docs/cdktf/typescript/r/simpledb_domain.html.markdown @@ -0,0 +1,71 @@ +--- +subcategory: "SDB (SimpleDB)" +layout: "aws" +page_title: "AWS: aws_simpledb_domain" +description: |- + Provides a SimpleDB domain resource. +--- + + + +# Resource: aws_simpledb_domain + +Provides a SimpleDB domain resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SimpledbDomain } from "./.gen/providers/aws/simpledb-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SimpledbDomain(this, "users", { + name: "users", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the SimpleDB domain + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the SimpleDB domain + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SimpleDB Domains using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SimpleDB Domains using the `name`. For example: + +```console +% terraform import aws_simpledb_domain.users users +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/snapshot_create_volume_permission.html.markdown b/website/docs/cdktf/typescript/r/snapshot_create_volume_permission.html.markdown new file mode 100644 index 00000000000..201d2a6c076 --- /dev/null +++ b/website/docs/cdktf/typescript/r/snapshot_create_volume_permission.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_snapshot_create_volume_permission" +description: |- + Adds create volume permission to an EBS Snapshot +--- + + + +# Resource: aws_snapshot_create_volume_permission + +Adds permission to create volumes off of a given EBS Snapshot. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EbsSnapshot } from "./.gen/providers/aws/ebs-snapshot"; +import { EbsVolume } from "./.gen/providers/aws/ebs-volume"; +import { SnapshotCreateVolumePermission } from "./.gen/providers/aws/snapshot-create-volume-permission"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new EbsVolume(this, "example", { + availabilityZone: "us-west-2a", + size: 40, + }); + const exampleSnapshot = new EbsSnapshot(this, "example_snapshot", { + volumeId: example.id, + }); + new SnapshotCreateVolumePermission(this, "example_perm", { + accountId: "12345678", + snapshotId: exampleSnapshot.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `snapshotId` - (Required) A snapshot ID +* `accountId` - (Required) An AWS Account ID to add create volume permissions. The AWS Account cannot be the snapshot's owner + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A combination of "`snapshotId`-`accountId`". + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sns_platform_application.html.markdown b/website/docs/cdktf/typescript/r/sns_platform_application.html.markdown new file mode 100644 index 00000000000..1ab10f2aa70 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sns_platform_application.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_platform_application" +description: |- + Provides an SNS platform application resource. +--- + + + +# Resource: aws_sns_platform_application + +Provides an SNS platform application resource + +## Example Usage + +### Apple Push Notification Service (APNS) using certificate-based authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SnsPlatformApplication } from "./.gen/providers/aws/sns-platform-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SnsPlatformApplication(this, "apns_application", { + name: "apns_application", + platform: "APNS", + platformCredential: "", + platformPrincipal: "", + }); + } +} + +``` + +### Apple Push Notification Service (APNS) using token-based authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SnsPlatformApplication } from "./.gen/providers/aws/sns-platform-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SnsPlatformApplication(this, "apns_application", { + applePlatformBundleId: "", + applePlatformTeamId: "", + name: "apns_application", + platform: "APNS", + platformCredential: "", + platformPrincipal: "", + }); + } +} + +``` + +### Google Cloud Messaging (GCM) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SnsPlatformApplication } from "./.gen/providers/aws/sns-platform-application"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SnsPlatformApplication(this, "gcm_application", { + name: "gcm_application", + platform: "GCM", + platformCredential: "", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The friendly name for the SNS platform application +* `platform` - (Required) The platform that the app is registered with. See [Platform][1] for supported platforms. +* `platformCredential` - (Required) Application Platform credential. See [Credential][1] for type of credential required for platform. The value of this attribute when stored into the Terraform state is only a hash of the real value, so therefore it is not practical to use this as an attribute for other resources. +* `eventDeliveryFailureTopicArn` - (Optional) The ARN of the SNS Topic triggered when a delivery to any of the platform endpoints associated with your platform application encounters a permanent failure. +* `eventEndpointCreatedTopicArn` - (Optional) The ARN of the SNS Topic triggered when a new platform endpoint is added to your platform application. +* `eventEndpointDeletedTopicArn` - (Optional) The ARN of the SNS Topic triggered when an existing platform endpoint is deleted from your platform application. +* `eventEndpointUpdatedTopicArn` - (Optional) The ARN of the SNS Topic triggered when an existing platform endpoint is changed from your platform application. +* `failureFeedbackRoleArn` - (Optional) The IAM role ARN permitted to receive failure feedback for this application and give SNS write access to use CloudWatch logs on your behalf. +* `platformPrincipal` - (Optional) Application Platform principal. See [Principal][2] for type of principal required for platform. The value of this attribute when stored into the Terraform state is only a hash of the real value, so therefore it is not practical to use this as an attribute for other resources. +* `successFeedbackRoleArn` - (Optional) The IAM role ARN permitted to receive success feedback for this application and give SNS write access to use CloudWatch logs on your behalf. +* `successFeedbackSampleRate` - (Optional) The sample rate percentage (0-100) of successfully delivered messages. + +The following attributes are needed only when using APNS token credentials: + +* `applePlatformTeamId` - (Required) The identifier that's assigned to your Apple developer account team. Must be 10 alphanumeric characters. +* `applePlatformBundleId` - (Required) The bundle identifier that's assigned to your iOS app. May only include alphanumeric characters, hyphens (-), and periods (.). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the SNS platform application +* `arn` - The ARN of the SNS platform application + +[1]: http://docs.aws.amazon.com/sns/latest/dg/mobile-push-send-register.html +[2]: http://docs.aws.amazon.com/sns/latest/api/API_CreatePlatformApplication.html + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SNS platform applications using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SNS platform applications using the ARN. For example: + +```console +% terraform import aws_sns_platform_application.gcm_application arn:aws:sns:us-west-2:0123456789012:app/GCM/gcm_application +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sns_sms_preferences.html.markdown b/website/docs/cdktf/typescript/r/sns_sms_preferences.html.markdown new file mode 100644 index 00000000000..6293b41f29f --- /dev/null +++ b/website/docs/cdktf/typescript/r/sns_sms_preferences.html.markdown @@ -0,0 +1,50 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_sms_preferences" +description: |- + Provides a way to set SNS SMS preferences. +--- + + + +# Resource: aws_sns_sms_preferences + +Provides a way to set SNS SMS preferences. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SnsSmsPreferences } from "./.gen/providers/aws/sns-sms-preferences"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SnsSmsPreferences(this, "update_sms_prefs", {}); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `monthlySpendLimit` - (Optional) The maximum amount in USD that you are willing to spend each month to send SMS messages. +* `deliveryStatusIamRoleArn` - (Optional) The ARN of the IAM role that allows Amazon SNS to write logs about SMS deliveries in CloudWatch Logs. +* `deliveryStatusSuccessSamplingRate` - (Optional) The percentage of successful SMS deliveries for which Amazon SNS will write logs in CloudWatch Logs. The value must be between 0 and 100. +* `defaultSenderId` - (Optional) A string, such as your business brand, that is displayed as the sender on the receiving device. +* `defaultSmsType` - (Optional) The type of SMS message that you will send by default. Possible values are: Promotional, Transactional +* `usageReportS3Bucket` - (Optional) The name of the Amazon S3 bucket to receive daily SMS usage reports from Amazon SNS. + +## Attribute Reference + +This resource exports no additional attributes. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sns_topic.html.markdown b/website/docs/cdktf/typescript/r/sns_topic.html.markdown new file mode 100644 index 00000000000..e0da2e426bb --- /dev/null +++ b/website/docs/cdktf/typescript/r/sns_topic.html.markdown @@ -0,0 +1,174 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_topic" +description: |- + Provides an SNS topic resource. +--- + + + +# Resource: aws_sns_topic + +Provides an SNS topic resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SnsTopic(this, "user_updates", { + name: "user-updates-topic", + }); + } +} + +``` + +## Example with Delivery Policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SnsTopic(this, "user_updates", { + deliveryPolicy: + '{\n "http": {\n "defaultHealthyRetryPolicy": {\n "minDelayTarget": 20,\n "maxDelayTarget": 20,\n "numRetries": 3,\n "numMaxDelayRetries": 0,\n "numNoDelayRetries": 0,\n "numMinDelayRetries": 0,\n "backoffFunction": "linear"\n },\n "disableSubscriptionOverrides": false,\n "defaultThrottlePolicy": {\n "maxReceivesPerSecond": 1\n }\n }\n}\n\n', + name: "user-updates-topic", + }); + } +} + +``` + +## Example with Server-side encryption (SSE) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SnsTopic(this, "user_updates", { + kmsMasterKeyId: "alias/aws/sns", + name: "user-updates-topic", + }); + } +} + +``` + +## Example with First-In-First-Out (FIFO) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SnsTopic(this, "user_updates", { + contentBasedDeduplication: true, + fifoTopic: true, + name: "user-updates-topic.fifo", + }); + } +} + +``` + +## Message Delivery Status Arguments + +The `SuccessFeedbackRoleArn` and `FailureFeedbackRoleArn` arguments are used to give Amazon SNS write access to use CloudWatch Logs on your behalf. The `SuccessFeedbackSampleRate` argument is for specifying the sample rate percentage (0-100) of successfully delivered messages. After you configure the `FailureFeedbackRoleArn` argument, then all failed message deliveries generate CloudWatch Logs. + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the topic. Topic names must be made up of only uppercase and lowercase ASCII letters, numbers, underscores, and hyphens, and must be between 1 and 256 characters long. For a FIFO (first-in-first-out) topic, the name must end with the `fifo` suffix. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix` +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name` +* `displayName` - (Optional) The display name for the topic +* `policy` - (Optional) The fully-formed AWS policy as JSON. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `deliveryPolicy` - (Optional) The SNS delivery policy. More on [AWS documentation](https://docs.aws.amazon.com/sns/latest/dg/DeliveryPolicies.html) +* `applicationSuccessFeedbackRoleArn` - (Optional) The IAM role permitted to receive success feedback for this topic +* `applicationSuccessFeedbackSampleRate` - (Optional) Percentage of success to sample +* `applicationFailureFeedbackRoleArn` - (Optional) IAM role for failure feedback +* `httpSuccessFeedbackRoleArn` - (Optional) The IAM role permitted to receive success feedback for this topic +* `httpSuccessFeedbackSampleRate` - (Optional) Percentage of success to sample +* `httpFailureFeedbackRoleArn` - (Optional) IAM role for failure feedback +* `kmsMasterKeyId` - (Optional) The ID of an AWS-managed customer master key (CMK) for Amazon SNS or a custom CMK. For more information, see [Key Terms](https://docs.aws.amazon.com/sns/latest/dg/sns-server-side-encryption.html#sse-key-terms) +* `signatureVersion` - (Optional) If `signatureVersion` should be [1 (SHA1) or 2 (SHA256)](https://docs.aws.amazon.com/sns/latest/dg/sns-verify-signature-of-message.html). The signature version corresponds to the hashing algorithm used while creating the signature of the notifications, subscription confirmations, or unsubscribe confirmation messages sent by Amazon SNS. +* `tracingConfig` - (Optional) Tracing mode of an Amazon SNS topic. Valid values: `"passThrough"`, `"active"`. +* `fifoTopic` - (Optional) Boolean indicating whether or not to create a FIFO (first-in-first-out) topic (default is `false`). +* `contentBasedDeduplication` - (Optional) Enables content-based deduplication for FIFO topics. For more information, see the [related documentation](https://docs.aws.amazon.com/sns/latest/dg/fifo-message-dedup.html) +* `lambdaSuccessFeedbackRoleArn` - (Optional) The IAM role permitted to receive success feedback for this topic +* `lambdaSuccessFeedbackSampleRate` - (Optional) Percentage of success to sample +* `lambdaFailureFeedbackRoleArn` - (Optional) IAM role for failure feedback +* `sqsSuccessFeedbackRoleArn` - (Optional) The IAM role permitted to receive success feedback for this topic +* `sqsSuccessFeedbackSampleRate` - (Optional) Percentage of success to sample +* `sqsFailureFeedbackRoleArn` - (Optional) IAM role for failure feedback +* `firehoseSuccessFeedbackRoleArn` - (Optional) The IAM role permitted to receive success feedback for this topic +* `firehoseSuccessFeedbackSampleRate` - (Optional) Percentage of success to sample +* `firehoseFailureFeedbackRoleArn` - (Optional) IAM role for failure feedback +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the SNS topic +* `arn` - The ARN of the SNS topic, as a more obvious property (clone of id) +* `owner` - The AWS Account ID of the SNS topic owner +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SNS Topics using the topic `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SNS Topics using the topic `arn`. For example: + +```console +% terraform import aws_sns_topic.user_updates arn:aws:sns:us-west-2:0123456789012:my-topic +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sns_topic_data_protection_policy.html.markdown b/website/docs/cdktf/typescript/r/sns_topic_data_protection_policy.html.markdown new file mode 100644 index 00000000000..fd0371ac56d --- /dev/null +++ b/website/docs/cdktf/typescript/r/sns_topic_data_protection_policy.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_topic_data_protection_policy" +description: |- + Provides an SNS data protection topic policy resource. +--- + + + +# Resource: aws_sns_topic_data_protection_policy + +Provides an SNS data protection topic policy resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +import { SnsTopicDataProtectionPolicy } from "./.gen/providers/aws/sns-topic-data-protection-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SnsTopic(this, "example", { + name: "example", + }); + const awsSnsTopicDataProtectionPolicyExample = + new SnsTopicDataProtectionPolicy(this, "example_1", { + arn: example.arn, + policy: Token.asString( + Fn.jsonencode({ + Description: "Example data protection policy", + Name: "__example_data_protection_policy", + Statement: [ + { + DataDirection: "Inbound", + DataIdentifier: [ + "arn:aws:dataprotection::aws:data-identifier/EmailAddress", + ], + Operation: { + Deny: {}, + }, + Principal: ["*"], + Sid: "__deny_statement_11ba9d96", + }, + ], + Version: "2021-06-01", + }) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicDataProtectionPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `arn` - (Required) The ARN of the SNS topic +* `policy` - (Required) The fully-formed AWS policy as JSON. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SNS Data Protection Topic Policy using the topic ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SNS Data Protection Topic Policy using the topic ARN. For example: + +```console +% terraform import aws_sns_topic_data_protection_policy.example arn:aws:sns:us-west-2:0123456789012:example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sns_topic_policy.html.markdown b/website/docs/cdktf/typescript/r/sns_topic_policy.html.markdown new file mode 100644 index 00000000000..31c268d66b0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sns_topic_policy.html.markdown @@ -0,0 +1,118 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_topic_policy" +description: |- + Provides an SNS topic policy resource. +--- + + + +# Resource: aws_sns_topic_policy + +Provides an SNS topic policy resource + +~> **NOTE:** If a Principal is specified as just an AWS account ID rather than an ARN, AWS silently converts it to the ARN for the root user, causing future terraform plans to differ. To avoid this problem, just specify the full ARN, e.g., `arn:aws:iam::123456789012:root` + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +import { SnsTopicPolicy } from "./.gen/providers/aws/sns-topic-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new SnsTopic(this, "test", { + name: "my-topic-with-policy", + }); + const snsTopicPolicy = new DataAwsIamPolicyDocument( + this, + "sns_topic_policy", + { + policyId: "__default_policy_ID", + statement: [ + { + actions: [ + "SNS:Subscribe", + "SNS:SetTopicAttributes", + "SNS:RemovePermission", + "SNS:Receive", + "SNS:Publish", + "SNS:ListSubscriptionsByTopic", + "SNS:GetTopicAttributes", + "SNS:DeleteTopic", + "SNS:AddPermission", + ], + condition: [ + { + test: "StringEquals", + values: [accountId.stringValue], + variable: "AWS:SourceOwner", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [test.arn], + sid: "__default_statement_ID", + }, + ], + } + ); + new SnsTopicPolicy(this, "default", { + arn: test.arn, + policy: Token.asString(snsTopicPolicy.json), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `arn` - (Required) The ARN of the SNS topic +* `policy` - (Required) The fully-formed AWS policy as JSON. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `owner` - The AWS Account ID of the SNS topic owner + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SNS Topic Policy using the topic ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SNS Topic Policy using the topic ARN. For example: + +```console +% terraform import aws_sns_topic_policy.user_updates arn:aws:sns:us-west-2:0123456789012:my-topic +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sns_topic_subscription.html.markdown b/website/docs/cdktf/typescript/r/sns_topic_subscription.html.markdown new file mode 100644 index 00000000000..b410623628c --- /dev/null +++ b/website/docs/cdktf/typescript/r/sns_topic_subscription.html.markdown @@ -0,0 +1,408 @@ +--- +subcategory: "SNS (Simple Notification)" +layout: "aws" +page_title: "AWS: aws_sns_topic_subscription" +description: |- + Provides a resource for subscribing to SNS topics. +--- + + + +# Resource: aws_sns_topic_subscription + +Provides a resource for subscribing to SNS topics. Requires that an SNS topic exist for the subscription to attach to. This resource allows you to automatically place messages sent to SNS topics in SQS queues, send them as HTTP(S) POST requests to a given endpoint, send SMS messages, or notify devices / applications. The most likely use case for Terraform users will probably be SQS queues. + +~> **NOTE:** If the SNS topic and SQS queue are in different AWS regions, the `awsSnsTopicSubscription` must use an AWS provider that is in the same region as the SNS topic. If the `awsSnsTopicSubscription` uses a provider with a different region than the SNS topic, Terraform will fail to create the subscription. + +~> **NOTE:** Setup of cross-account subscriptions from SNS topics to SQS queues requires Terraform to have access to BOTH accounts. + +~> **NOTE:** If an SNS topic and SQS queue are in different AWS accounts but the same region, the `awsSnsTopicSubscription` must use the AWS provider for the account with the SQS queue. If `awsSnsTopicSubscription` uses a Provider with a different account than the SQS queue, Terraform creates the subscription but does not keep state and tries to re-create the subscription at every `apply`. + +~> **NOTE:** If an SNS topic and SQS queue are in different AWS accounts and different AWS regions, the subscription needs to be initiated from the account with the SQS queue but in the region of the SNS topic. + +~> **NOTE:** You cannot unsubscribe to a subscription that is pending confirmation. If you use `email`, `emailJson`, or `http`/`https` (without auto-confirmation enabled), until the subscription is confirmed (e.g., outside of Terraform), AWS does not allow Terraform to delete / unsubscribe the subscription. If you `destroy` an unconfirmed subscription, Terraform will remove the subscription from its state but the subscription will still exist in AWS. However, if you delete an SNS topic, SNS [deletes all the subscriptions](https://docs.aws.amazon.com/sns/latest/dg/sns-delete-subscription-topic.html) associated with the topic. Also, you can import a subscription after confirmation and then have the capability to delete it. + +## Example Usage + +You can directly supply a topic and ARN by hand in the `topicArn` property along with the queue ARN: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SnsTopicSubscription } from "./.gen/providers/aws/sns-topic-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SnsTopicSubscription(this, "user_updates_sqs_target", { + endpoint: "arn:aws:sqs:us-west-2:432981146916:terraform-queue-too", + protocol: "sqs", + topicArn: "arn:aws:sns:us-west-2:432981146916:user-updates-topic", + }); + } +} + +``` + +Alternatively you can use the ARN properties of a managed SNS topic and SQS queue: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +import { SnsTopicSubscription } from "./.gen/providers/aws/sns-topic-subscription"; +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const userUpdates = new SnsTopic(this, "user_updates", { + name: "user-updates-topic", + }); + const userUpdatesQueue = new SqsQueue(this, "user_updates_queue", { + name: "user-updates-queue", + }); + new SnsTopicSubscription(this, "user_updates_sqs_target", { + endpoint: userUpdatesQueue.arn, + protocol: "sqs", + topicArn: userUpdates.arn, + }); + } +} + +``` + +You can subscribe SNS topics to SQS queues in different Amazon accounts and regions: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + TerraformVariable, + propertyAccess, + Token, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { AwsProvider } from "./.gen/providers/aws/provider"; +import { SnsTopic } from "./.gen/providers/aws/sns-topic"; +import { SnsTopicSubscription } from "./.gen/providers/aws/sns-topic-subscription"; +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const sns = new TerraformVariable(this, "sns", { + default: [ + { + "account-id": "111111111111", + display_name: "example", + name: "example-sns-topic", + region: "us-west-1", + "role-name": "service/service-hashicorp-terraform", + }, + ], + }); + const sqs = new TerraformVariable(this, "sqs", { + default: [ + { + "account-id": "222222222222", + name: "example-sqs-queue", + region: "us-east-1", + "role-name": "service/service-hashicorp-terraform", + }, + ], + }); + const snsTopicPolicy = new DataAwsIamPolicyDocument( + this, + "sns-topic-policy", + { + policyId: "__default_policy_ID", + statement: [ + { + actions: [ + "SNS:Subscribe", + "SNS:SetTopicAttributes", + "SNS:RemovePermission", + "SNS:Publish", + "SNS:ListSubscriptionsByTopic", + "SNS:GetTopicAttributes", + "SNS:DeleteTopic", + "SNS:AddPermission", + ], + condition: [ + { + test: "StringEquals", + values: [ + Token.asString(propertyAccess(sns.value, ['"account-id"'])), + ], + variable: "AWS:SourceOwner", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [ + "arn:aws:sns:" + + Token.asString(propertyAccess(sns.value, ['"region"'])) + + ":" + + Token.asString(propertyAccess(sns.value, ['"account-id"'])) + + ":" + + Token.asString(propertyAccess(sns.value, ['"name"'])), + ], + sid: "__default_statement_ID", + }, + { + actions: ["SNS:Subscribe", "SNS:Receive"], + condition: [ + { + test: "StringLike", + values: [ + "arn:aws:sqs:" + + Token.asString(propertyAccess(sqs.value, ['"region"'])) + + ":" + + Token.asString( + propertyAccess(sqs.value, ['"account-id"']) + ) + + ":" + + Token.asString(propertyAccess(sqs.value, ['"name"'])), + ], + variable: "SNS:Endpoint", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [ + "arn:aws:sns:" + + Token.asString(propertyAccess(sns.value, ['"region"'])) + + ":" + + Token.asString(propertyAccess(sns.value, ['"account-id"'])) + + ":" + + Token.asString(propertyAccess(sns.value, ['"name"'])), + ], + sid: "__console_sub_0", + }, + ], + } + ); + const sqsQueuePolicy = new DataAwsIamPolicyDocument( + this, + "sqs-queue-policy", + { + policyId: + "arn:aws:sqs:" + + Token.asString(propertyAccess(sqs.value, ['"region"'])) + + ":" + + Token.asString(propertyAccess(sqs.value, ['"account-id"'])) + + ":" + + Token.asString(propertyAccess(sqs.value, ['"name"'])) + + "/SQSDefaultPolicy", + statement: [ + { + actions: ["SQS:SendMessage"], + condition: [ + { + test: "ArnEquals", + values: [ + "arn:aws:sns:" + + Token.asString(propertyAccess(sns.value, ['"region"'])) + + ":" + + Token.asString( + propertyAccess(sns.value, ['"account-id"']) + ) + + ":" + + Token.asString(propertyAccess(sns.value, ['"name"'])), + ], + variable: "aws:SourceArn", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "AWS", + }, + ], + resources: [ + "arn:aws:sqs:" + + Token.asString(propertyAccess(sqs.value, ['"region"'])) + + ":" + + Token.asString(propertyAccess(sqs.value, ['"account-id"'])) + + ":" + + Token.asString(propertyAccess(sqs.value, ['"name"'])), + ], + sid: "example-sns-topic", + }, + ], + } + ); + const awsSns = new AwsProvider(this, "aws", { + alias: "sns", + assumeRole: [ + { + roleArn: + "arn:aws:iam::" + + Token.asString(propertyAccess(sns.value, ['"account-id"'])) + + ":role/" + + Token.asString(propertyAccess(sns.value, ['"role-name"'])), + sessionName: + "sns-" + Token.asString(propertyAccess(sns.value, ['"region"'])), + }, + ], + region: Token.asString(propertyAccess(sns.value, ['"region"'])), + }); + const awsSqs = new AwsProvider(this, "aws_5", { + alias: "sqs", + assumeRole: [ + { + roleArn: + "arn:aws:iam::" + + Token.asString(propertyAccess(sqs.value, ['"account-id"'])) + + ":role/" + + Token.asString(propertyAccess(sqs.value, ['"role-name"'])), + sessionName: + "sqs-" + Token.asString(propertyAccess(sqs.value, ['"region"'])), + }, + ], + region: Token.asString(propertyAccess(sqs.value, ['"region"'])), + }); + const sns2Sqs = new AwsProvider(this, "aws_6", { + alias: "sns2sqs", + assumeRole: [ + { + roleArn: + "arn:aws:iam::" + + Token.asString(propertyAccess(sqs.value, ['"account-id"'])) + + ":role/" + + Token.asString(propertyAccess(sqs.value, ['"role-name"'])), + sessionName: + "sns2sqs-" + + Token.asString(propertyAccess(sns.value, ['"region"'])), + }, + ], + region: Token.asString(propertyAccess(sns.value, ['"region"'])), + }); + const snsTopic = new SnsTopic(this, "sns-topic", { + displayName: Token.asString( + propertyAccess(sns.value, ['"display_name"']) + ), + name: Token.asString(propertyAccess(sns.value, ['"name"'])), + policy: Token.asString(snsTopicPolicy.json), + provider: "${aws.sns}", + }); + const sqsQueue = new SqsQueue(this, "sqs-queue", { + name: Token.asString(propertyAccess(sqs.value, ['"name"'])), + policy: Token.asString(sqsQueuePolicy.json), + provider: "${aws.sqs}", + }); + const awsSnsTopicSubscriptionSnsTopic = new SnsTopicSubscription( + this, + "sns-topic_9", + { + endpoint: sqsQueue.arn, + protocol: "sqs", + provider: sns2Sqs, + topicArn: snsTopic.arn, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSnsTopicSubscriptionSnsTopic.overrideLogicalId("sns-topic"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `endpoint` - (Required) Endpoint to send data to. The contents vary with the protocol. See details below. +* `protocol` - (Required) Protocol to use. Valid values are: `sqs`, `sms`, `lambda`, `firehose`, and `application`. Protocols `email`, `emailJson`, `http` and `https` are also valid but partially supported. See details below. +* `subscriptionRoleArn` - (Required if `protocol` is `firehose`) ARN of the IAM role to publish to Kinesis Data Firehose delivery stream. Refer to [SNS docs](https://docs.aws.amazon.com/sns/latest/dg/sns-firehose-as-subscriber.html). +* `topicArn` - (Required) ARN of the SNS topic to subscribe to. + +The following arguments are optional: + +* `confirmationTimeoutInMinutes` - (Optional) Integer indicating number of minutes to wait in retrying mode for fetching subscription arn before marking it as failure. Only applicable for http and https protocols. Default is `1`. +* `deliveryPolicy` - (Optional) JSON String with the delivery policy (retries, backoff, etc.) that will be used in the subscription - this only applies to HTTP/S subscriptions. Refer to the [SNS docs](https://docs.aws.amazon.com/sns/latest/dg/DeliveryPolicies.html) for more details. +* `endpointAutoConfirms` - (Optional) Whether the endpoint is capable of [auto confirming subscription](http://docs.aws.amazon.com/sns/latest/dg/SendMessageToHttp.html#SendMessageToHttp.prepare) (e.g., PagerDuty). Default is `false`. +* `filterPolicy` - (Optional) JSON String with the filter policy that will be used in the subscription to filter messages seen by the target resource. Refer to the [SNS docs](https://docs.aws.amazon.com/sns/latest/dg/message-filtering.html) for more details. +* `filterPolicyScope` - (Optional) Whether the `filterPolicy` applies to `messageAttributes` (default) or `messageBody`. +* `rawMessageDelivery` - (Optional) Whether to enable raw message delivery (the original message is directly passed, not wrapped in JSON with the original message in the message property). Default is `false`. +* `redrivePolicy` - (Optional) JSON String with the redrive policy that will be used in the subscription. Refer to the [SNS docs](https://docs.aws.amazon.com/sns/latest/dg/sns-dead-letter-queues.html#how-messages-moved-into-dead-letter-queue) for more details. + +### Protocol support + +Supported values for `protocol` include: + +* `application` - Delivers JSON-encoded messages. `endpoint` is the endpoint ARN of a mobile app and device. +* `firehose` - Delivers JSON-encoded messages. `endpoint` is the ARN of an Amazon Kinesis Data Firehose delivery stream (e.g., +`arn:aws:firehose:usEast1:123456789012:deliverystream/ticketUploadStream`). +* `lambda` - Delivers JSON-encoded messages. `endpoint` is the ARN of an AWS Lambda function. +* `sms` - Delivers text messages via SMS. `endpoint` is the phone number of an SMS-enabled device. +* `sqs` - Delivers JSON-encoded messages. `endpoint` is the ARN of an Amazon SQS queue (e.g., `arn:aws:sqs:usWest2:123456789012:terraformQueueToo`). + +Partially supported values for `protocol` include: + +~> **NOTE:** If an `awsSnsTopicSubscription` uses a partially-supported protocol and the subscription is not confirmed, either through automatic confirmation or means outside of Terraform (e.g., clicking on a "Confirm Subscription" link in an email), Terraform cannot delete / unsubscribe the subscription. Attempting to `destroy` an unconfirmed subscription will remove the `awsSnsTopicSubscription` from Terraform's state but **_will not_** remove the subscription from AWS. The `pendingConfirmation` attribute provides confirmation status. + +* `email` - Delivers messages via SMTP. `endpoint` is an email address. +* `emailJson` - Delivers JSON-encoded messages via SMTP. `endpoint` is an email address. +* `http` -- Delivers JSON-encoded messages via HTTP POST. `endpoint` is a URL beginning with `http://`. +* `https` -- Delivers JSON-encoded messages via HTTPS POST. `endpoint` is a URL beginning with `https://`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the subscription. +* `confirmationWasAuthenticated` - Whether the subscription confirmation request was authenticated. +* `id` - ARN of the subscription. +* `ownerId` - AWS account ID of the subscription's owner. +* `pendingConfirmation` - Whether the subscription has not been confirmed. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SNS Topic Subscriptions using the subscription `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SNS Topic Subscriptions using the subscription `arn`. For example: + +```console +% terraform import aws_sns_topic_subscription.user_updates_sqs_target arn:aws:sns:us-west-2:0123456789012:my-topic:8a21d249-4329-4871-acc6-7be709c6ea7f +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/spot_datafeed_subscription.html.markdown b/website/docs/cdktf/typescript/r/spot_datafeed_subscription.html.markdown new file mode 100644 index 00000000000..90926f063a3 --- /dev/null +++ b/website/docs/cdktf/typescript/r/spot_datafeed_subscription.html.markdown @@ -0,0 +1,82 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_spot_datafeed_subscription" +description: |- + Provides a Spot Datafeed Subscription resource. +--- + + + +# Resource: aws_spot_datafeed_subscription + +-> **Note:** There is only a single subscription allowed per account. + +To help you understand the charges for your Spot instances, Amazon EC2 provides a data feed that describes your Spot instance usage and pricing. +This data feed is sent to an Amazon S3 bucket that you specify when you subscribe to the data feed. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { SpotDatafeedSubscription } from "./.gen/providers/aws/spot-datafeed-subscription"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const defaultVar = new S3Bucket(this, "default", { + bucket: "tf-spot-datafeed", + }); + const awsSpotDatafeedSubscriptionDefault = new SpotDatafeedSubscription( + this, + "default_1", + { + bucket: defaultVar.id, + prefix: "my_subdirectory", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSpotDatafeedSubscriptionDefault.overrideLogicalId("default"); + } +} + +``` + +## Argument Reference + +* `bucket` - (Required) The Amazon S3 bucket in which to store the Spot instance data feed. +* `prefix` - (Optional) Path of folder inside bucket to place spot pricing data. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import a Spot Datafeed Subscription using the word `spotDatafeedSubscription`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import a Spot Datafeed Subscription using the word `spotDatafeedSubscription`. For example: + +```console +% terraform import aws_spot_datafeed_subscription.mysubscription spot-datafeed-subscription +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/spot_fleet_request.html.markdown b/website/docs/cdktf/typescript/r/spot_fleet_request.html.markdown new file mode 100644 index 00000000000..f90800f9ded --- /dev/null +++ b/website/docs/cdktf/typescript/r/spot_fleet_request.html.markdown @@ -0,0 +1,530 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_spot_fleet_request" +description: |- + Provides a Spot Fleet Request resource. +--- + + + +# Resource: aws_spot_fleet_request + +Provides an EC2 Spot Fleet Request resource. This allows a fleet of Spot +instances to be requested on the Spot market. + +~> **NOTE [AWS strongly discourages](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-best-practices.html#which-spot-request-method-to-use) the use of the legacy APIs called by this resource. +We recommend using the [EC2 Fleet](ec2_fleet.html) or [Auto Scaling Group](autoscaling_group.html) resources instead. + +## Example Usage + +### Using launch specifications + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SpotFleetRequest } from "./.gen/providers/aws/spot-fleet-request"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SpotFleetRequest(this, "cheap_compute", { + allocationStrategy: "diversified", + iamFleetRole: "arn:aws:iam::12345678:role/spot-fleet", + launchSpecification: [ + { + ami: "ami-1234", + iamInstanceProfileArn: example.arn, + instanceType: "m4.10xlarge", + placementTenancy: "dedicated", + spotPrice: "2.793", + }, + { + ami: "ami-5678", + availabilityZone: "us-west-1a", + iamInstanceProfileArn: example.arn, + instanceType: "m4.4xlarge", + keyName: "my-key", + rootBlockDevice: [ + { + volumeSize: Token.asNumber("300"), + volumeType: "gp2", + }, + ], + spotPrice: "1.117", + subnetId: "subnet-1234", + tags: { + Name: "spot-fleet-example", + }, + weightedCapacity: Token.asString(35), + }, + ], + spotPrice: "0.03", + targetCapacity: 6, + validUntil: "2019-11-04T20:44:20Z", + }); + } +} + +``` + +### Using launch templates + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { LaunchTemplate } from "./.gen/providers/aws/launch-template"; +import { SpotFleetRequest } from "./.gen/providers/aws/spot-fleet-request"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new LaunchTemplate(this, "foo", { + imageId: "ami-516b9131", + instanceType: "m1.small", + keyName: "some-key", + name: "launch-template", + }); + const awsSpotFleetRequestFoo = new SpotFleetRequest(this, "foo_1", { + dependsOn: [testAttach], + iamFleetRole: "arn:aws:iam::12345678:role/spot-fleet", + launchTemplateConfig: [ + { + launchTemplateSpecification: { + id: foo.id, + version: Token.asString(foo.latestVersion), + }, + }, + ], + spotPrice: "0.005", + targetCapacity: 2, + validUntil: "2019-11-04T20:44:20Z", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSpotFleetRequestFoo.overrideLogicalId("foo"); + } +} + +``` + +~> **NOTE:** Terraform does not support the functionality where multiple `subnetId` or `availabilityZone` parameters can be specified in the same +launch configuration block. If you want to specify multiple values, then separate launch configuration blocks should be used or launch template overrides should be configured, one per subnet: + +### Using multiple launch specifications + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SpotFleetRequest } from "./.gen/providers/aws/spot-fleet-request"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SpotFleetRequest(this, "foo", { + iamFleetRole: "arn:aws:iam::12345678:role/spot-fleet", + launchSpecification: [ + { + ami: "ami-d06a90b0", + availabilityZone: "us-west-2a", + instanceType: "m1.small", + keyName: "my-key", + }, + { + ami: "ami-d06a90b0", + availabilityZone: "us-west-2a", + instanceType: "m5.large", + keyName: "my-key", + }, + ], + spotPrice: "0.005", + targetCapacity: 2, + validUntil: "2019-11-04T20:44:20Z", + }); + } +} + +``` + +-> In this example, we use a [`dynamic` block](https://www.terraform.io/language/expressions/dynamic-blocks) to define zero or more `launchSpecification` blocks, producing one for each element in the list of subnet ids. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { + TerraformVariable, + Token, + TerraformIterator, + propertyAccess, + TerraformStack, +} from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SpotFleetRequest } from "./.gen/providers/aws/spot-fleet-request"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + /*Terraform Variables are not always the best fit for getting inputs in the context of Terraform CDK. + You can read more about this at https://cdk.tf/variables*/ + const subnets = new TerraformVariable(this, "subnets", {}); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const exampleDynamicIterator0 = TerraformIterator.fromList( + Token.asAny( + "${[ for s in ${" + + subnets.value + + "} : {\n subnet_id = s[1]\n }]}" + ) + ); + new SpotFleetRequest(this, "example", { + allocationStrategy: "lowestPrice", + fleetType: "request", + iamFleetRole: "arn:aws:iam::12345678:role/spot-fleet", + targetCapacity: 3, + terminateInstancesWithExpiration: Token.asBoolean("true"), + validUntil: "2019-11-04T20:44:20Z", + waitForFulfillment: Token.asBoolean("true"), + launchSpecification: exampleDynamicIterator0.dynamic({ + ami: "ami-1234", + instance_type: "m4.4xlarge", + root_block_device: [ + { + delete_on_termination: "true", + volume_size: "8", + volume_type: "gp2", + }, + ], + subnet_id: propertyAccess(exampleDynamicIterator0.value, ["subnet_id"]), + tags: { + Name: "Spot Node", + tag_builder: "builder", + }, + vpc_security_group_ids: "sg-123456", + }), + }); + } +} + +``` + +### Using multiple launch configurations + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSubnets } from "./.gen/providers/aws/data-aws-subnets"; +import { LaunchTemplate } from "./.gen/providers/aws/launch-template"; +import { SpotFleetRequest } from "./.gen/providers/aws/spot-fleet-request"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new LaunchTemplate(this, "foo", { + imageId: "ami-516b9131", + instanceType: "m1.small", + keyName: "some-key", + name: "launch-template", + }); + const example = new DataAwsSubnets(this, "example", { + filter: [ + { + name: "vpc-id", + values: [vpcId.stringValue], + }, + ], + }); + const awsSpotFleetRequestFoo = new SpotFleetRequest(this, "foo_2", { + dependsOn: [testAttach], + iamFleetRole: "arn:aws:iam::12345678:role/spot-fleet", + launchTemplateConfig: [ + { + launchTemplateSpecification: { + id: foo.id, + version: Token.asString(foo.latestVersion), + }, + overrides: [ + { + subnetId: Token.asString(propertyAccess(example.ids, ["0"])), + }, + { + subnetId: Token.asString(propertyAccess(example.ids, ["1"])), + }, + { + subnetId: Token.asString(propertyAccess(example.ids, ["2"])), + }, + ], + }, + ], + spotPrice: "0.005", + targetCapacity: 2, + validUntil: "2019-11-04T20:44:20Z", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSpotFleetRequestFoo.overrideLogicalId("foo"); + } +} + +``` + +## Argument Reference + +Most of these arguments directly correspond to the +[official API](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_SpotFleetRequestConfigData.html). + +* `iamFleetRole` - (Required) Grants the Spot fleet permission to terminate + Spot instances on your behalf when you cancel its Spot fleet request using +CancelSpotFleetRequests or when the Spot fleet request expires, if you set +terminateInstancesWithExpiration. +* `context` - (Optional) Reserved. +* `replaceUnhealthyInstances` - (Optional) Indicates whether Spot fleet should replace unhealthy instances. Default `false`. +* `launchSpecification` - (Optional) Used to define the launch configuration of the + spot-fleet request. Can be specified multiple times to define different bids +across different markets and instance types. Conflicts with `launchTemplateConfig`. At least one of `launchSpecification` or `launchTemplateConfig` is required. + + **Note**: This takes in similar but not + identical inputs as [`awsInstance`](instance.html). There are limitations on + what you can specify. See the list of officially supported inputs in the + [reference documentation](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_SpotFleetLaunchSpecification.html). Any normal [`awsInstance`](instance.html) parameter that corresponds to those inputs may be used and it have + a additional parameter `iamInstanceProfileArn` takes `awsIamInstanceProfile` attribute `arn` as input. + +* `launchTemplateConfig` - (Optional) Launch template configuration block. See [Launch Template Configs](#launch-template-configs) below for more details. Conflicts with `launchSpecification`. At least one of `launchSpecification` or `launchTemplateConfig` is required. +* `spotMaintenanceStrategies` - (Optional) Nested argument containing maintenance strategies for managing your Spot Instances that are at an elevated risk of being interrupted. Defined below. +* `spotPrice` - (Optional; Default: On-demand price) The maximum bid price per unit hour. +* `waitForFulfillment` - (Optional; Default: false) If set, Terraform will + wait for the Spot Request to be fulfilled, and will throw an error if the + timeout of 10m is reached. +* `targetCapacity` - The number of units to request. You can choose to set the + target capacity in terms of instances or a performance characteristic that is + important to your application workload, such as vCPUs, memory, or I/O. +* `targetCapacityUnitType` - (Optional) The unit for the target capacity. This can only be done with `instanceRequirements` defined +* `allocationStrategy` - Indicates how to allocate the target capacity across + the Spot pools specified by the Spot fleet request. Valid values: `lowestPrice`, `diversified`, `capacityOptimized`, `capacityOptimizedPrioritized`, and `priceCapacityOptimized`. The default is + `lowestPrice`. +* `instancePoolsToUseCount` - (Optional; Default: 1) + The number of Spot pools across which to allocate your target Spot capacity. + Valid only when `allocationStrategy` is set to `lowestPrice`. Spot Fleet selects + the cheapest Spot pools and evenly allocates your target Spot capacity across + the number of Spot pools that you specify. +* `excessCapacityTerminationPolicy` - Indicates whether running Spot + instances should be terminated if the target capacity of the Spot fleet + request is decreased below the current size of the Spot fleet. +* `terminateInstancesWithExpiration` - (Optional) Indicates whether running Spot + instances should be terminated when the Spot fleet request expires. +* `terminateInstancesOnDelete` - (Optional) Indicates whether running Spot + instances should be terminated when the resource is deleted (and the Spot fleet request cancelled). + If no value is specified, the value of the `terminateInstancesWithExpiration` argument is used. +* `instanceInterruptionBehaviour` - (Optional) Indicates whether a Spot + instance stops or terminates when it is interrupted. Default is + `terminate`. +* `fleetType` - (Optional) The type of fleet request. Indicates whether the Spot Fleet only requests the target + capacity or also attempts to maintain it. Default is `maintain`. +* `validUntil` - (Optional) The end date and time of the request, in UTC [RFC3339](https://tools.ietf.org/html/rfc3339#section-5.8) format(for example, YYYY-MM-DDTHH:MM:SSZ). At this point, no new Spot instance requests are placed or enabled to fulfill the request. +* `validFrom` - (Optional) The start date and time of the request, in UTC [RFC3339](https://tools.ietf.org/html/rfc3339#section-5.8) format(for example, YYYY-MM-DDTHH:MM:SSZ). The default is to start fulfilling the request immediately. +* `loadBalancers` (Optional) A list of elastic load balancer names to add to the Spot fleet. +* `targetGroupArns` (Optional) A list of `awsAlbTargetGroup` ARNs, for use with Application Load Balancing. +* `onDemandAllocationStrategy` - The order of the launch template overrides to use in fulfilling On-Demand capacity. the possible values are: `lowestPrice` and `prioritized`. the default is `lowestPrice`. +* `onDemandMaxTotalPrice` - The maximum amount per hour for On-Demand Instances that you're willing to pay. When the maximum amount you're willing to pay is reached, the fleet stops launching instances even if it hasn’t met the target capacity. +* `onDemandTargetCapacity` - The number of On-Demand units to request. If the request type is `maintain`, you can specify a target capacity of 0 and add capacity later. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Launch Template Configs + +The `launchTemplateConfig` block supports the following: + +* `launchTemplateSpecification` - (Required) Launch template specification. See [Launch Template Specification](#launch-template-specification) below for more details. +* `overrides` - (Optional) One or more override configurations. See [Overrides](#overrides) below for more details. + +### Launch Template Specification + +* `id` - The ID of the launch template. Conflicts with `name`. +* `name` - The name of the launch template. Conflicts with `id`. +* `version` - (Optional) Template version. Unlike the autoscaling equivalent, does not support `$latest` or `$default`, so use the launch_template resource's attribute, e.g., `"${awsLaunchTemplateFooLatestVersion}"`. It will use the default version if omitted. + + **Note:** The specified launch template can specify only a subset of the + inputs of [`awsLaunchTemplate`](launch_template.html). There are limitations on + what you can specify as spot fleet does not support all the attributes that are supported by autoscaling groups. [AWS documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-launch-templates.html#launch-templates-spot-fleet) is currently sparse, but at least `instanceInitiatedShutdownBehavior` is confirmed unsupported. + +### spot_maintenance_strategies + +* `capacityRebalance` - (Optional) Nested argument containing the capacity rebalance for your fleet request. Defined below. + +### capacity_rebalance + +* `replacementStrategy` - (Optional) The replacement strategy to use. Only available for spot fleets with `fleetType` set to `maintain`. Valid values: `launch`. + +### Overrides + +* `availabilityZone` - (Optional) The availability zone in which to place the request. +* `instanceRequirements` - (Optional) The instance requirements. See below. +* `instanceType` - (Optional) The type of instance to request. +* `priority` - (Optional) The priority for the launch template override. The lower the number, the higher the priority. If no number is set, the launch template override has the lowest priority. +* `spotPrice` - (Optional) The maximum spot bid for this override request. +* `subnetId` - (Optional) The subnet in which to launch the requested instance. +* `weightedCapacity` - (Optional) The capacity added to the fleet by a fulfilled request. + +### Instance Requirements + +This configuration block supports the following: + +* `acceleratorCount` - (Optional) Block describing the minimum and maximum number of accelerators (GPUs, FPGAs, or AWS Inferentia chips). Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. Set to `0` to exclude instance types with accelerators. +* `acceleratorManufacturers` - (Optional) List of accelerator manufacturer names. Default is any manufacturer. + + ``` + Valid names: + * amazon-web-services + * amd + * nvidia + * xilinx + ``` + +* `acceleratorNames` - (Optional) List of accelerator names. Default is any acclerator. + + ``` + Valid names: + * a100 - NVIDIA A100 GPUs + * v100 - NVIDIA V100 GPUs + * k80 - NVIDIA K80 GPUs + * t4 - NVIDIA T4 GPUs + * m60 - NVIDIA M60 GPUs + * radeon-pro-v520 - AMD Radeon Pro V520 GPUs + * vu9p - Xilinx VU9P FPGAs + ``` + +* `acceleratorTotalMemoryMib` - (Optional) Block describing the minimum and maximum total memory of the accelerators. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `acceleratorTypes` - (Optional) List of accelerator types. Default is any accelerator type. + + ``` + Valid types: + * fpga + * gpu + * inference + ``` + +* `allowedInstanceTypes` - (Optional) List of instance types to apply your specified attributes against. All other instance types are ignored, even if they match your specified attributes. You can use strings with one or more wild cards, represented by an asterisk (\*), to allow an instance type, size, or generation. The following are examples: `m58Xlarge`, `c5*.*`, `m5A.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are allowing the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5A.*`, you are allowing all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is all instance types. + + ~> **NOTE:** If you specify `allowedInstanceTypes`, you can't specify `excludedInstanceTypes`. + +* `bareMetal` - (Optional) Indicate whether bare metal instace types should be `included`, `excluded`, or `required`. Default is `excluded`. +* `baselineEbsBandwidthMbps` - (Optional) Block describing the minimum and maximum baseline EBS bandwidth, in Mbps. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `burstablePerformance` - (Optional) Indicate whether burstable performance instance types should be `included`, `excluded`, or `required`. Default is `excluded`. +* `cpuManufacturers` (Optional) List of CPU manufacturer names. Default is any manufacturer. + + ~> **NOTE:** Don't confuse the CPU hardware manufacturer with the CPU hardware architecture. Instances will be launched with a compatible CPU architecture based on the Amazon Machine Image (AMI) that you specify in your launch template. + + ``` + Valid names: + * amazon-web-services + * amd + * intel + ``` + +* `excludedInstanceTypes` - (Optional) List of instance types to exclude. You can use strings with one or more wild cards, represented by an asterisk (\*), to exclude an instance type, size, or generation. The following are examples: `m58Xlarge`, `c5*.*`, `m5A.*`, `r*`, `*3*`. For example, if you specify `c5*`, you are excluding the entire C5 instance family, which includes all C5a and C5n instance types. If you specify `m5A.*`, you are excluding all the M5a instance types, but not the M5n instance types. Maximum of 400 entries in the list; each entry is limited to 30 characters. Default is no excluded instance types. + + ~> **NOTE:** If you specify `excludedInstanceTypes`, you can't specify `allowedInstanceTypes`. + +* `instanceGenerations` - (Optional) List of instance generation names. Default is any generation. + + ``` + Valid names: + * current - Recommended for best performance. + * previous - For existing applications optimized for older instance types. + ``` + +* `localStorage` - (Optional) Indicate whether instance types with local storage volumes are `included`, `excluded`, or `required`. Default is `included`. +* `localStorageTypes` - (Optional) List of local storage type names. Default any storage type. + + ``` + Value names: + * hdd - hard disk drive + * ssd - solid state drive + ``` + +* `memoryGibPerVcpu` - (Optional) Block describing the minimum and maximum amount of memory (GiB) per vCPU. Default is no minimum or maximum. + * `min` - (Optional) Minimum. May be a decimal number, e.g. `05`. + * `max` - (Optional) Maximum. May be a decimal number, e.g. `05`. +* `memoryMib` - (Optional) Block describing the minimum and maximum amount of memory (MiB). Default is no maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `networkBandwidthGbps` - (Optional) Block describing the minimum and maximum amount of network bandwidth, in gigabits per second (Gbps). Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `networkInterfaceCount` - (Optional) Block describing the minimum and maximum number of network interfaces. Default is no minimum or maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. +* `onDemandMaxPricePercentageOverLowestPrice` - (Optional) The price protection threshold for On-Demand Instances. This is the maximum you’ll pay for an On-Demand Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 20. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. +* `requireHibernateSupport` - (Optional) Indicate whether instance types must support On-Demand Instance Hibernation, either `true` or `false`. Default is `false`. +* `spotMaxPricePercentageOverLowestPrice` - (Optional) The price protection threshold for Spot Instances. This is the maximum you’ll pay for a Spot Instance, expressed as a percentage higher than the cheapest M, C, or R instance type with your specified attributes. When Amazon EC2 Auto Scaling selects instance types with your attributes, we will exclude instance types whose price is higher than your threshold. The parameter accepts an integer, which Amazon EC2 Auto Scaling interprets as a percentage. To turn off price protection, specify a high value, such as 999999. Default is 100. + + If you set DesiredCapacityType to vcpu or memory-mib, the price protection threshold is applied based on the per vCPU or per memory price instead of the per instance price. +* `totalLocalStorageGb` - (Optional) Block describing the minimum and maximum total local storage (GB). Default is no minimum or maximum. + * `min` - (Optional) Minimum. May be a decimal number, e.g. `05`. + * `max` - (Optional) Maximum. May be a decimal number, e.g. `05`. +* `vcpuCount` - (Optional) Block describing the minimum and maximum number of vCPUs. Default is no maximum. + * `min` - (Optional) Minimum. + * `max` - (Optional) Maximum. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Spot fleet request ID +* `spotRequestState` - The state of the Spot fleet request. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) +* `delete` - (Default `15M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Spot Fleet Requests using `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Spot Fleet Requests using `id`. For example: + +```console +% terraform import aws_spot_fleet_request.fleet sfr-005e9ec8-5546-4c31-b317-31a62325411e +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/spot_instance_request.html.markdown b/website/docs/cdktf/typescript/r/spot_instance_request.html.markdown new file mode 100644 index 00000000000..c84b20e3ee6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/spot_instance_request.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "EC2 (Elastic Compute Cloud)" +layout: "aws" +page_title: "AWS: aws_spot_instance_request" +description: |- + Provides a Spot Instance Request resource. +--- + + + +# Resource: aws_spot_instance_request + +Provides an EC2 Spot Instance Request resource. This allows instances to be +requested on the spot market. + +By default Terraform creates Spot Instance Requests with a `persistent` type, +which means that for the duration of their lifetime, AWS will launch an +instance with the configured details if and when the spot market will accept +the requested price. + +On destruction, Terraform will make an attempt to terminate the associated Spot +Instance if there is one present. + +Spot Instances requests with a `oneTime` type will close the spot request +when the instance is terminated either by the request being below the current spot +price availability or by a user. + +~> **NOTE:** Because their behavior depends on the live status of the spot +market, Spot Instance Requests have a unique lifecycle that makes them behave +differently than other Terraform resources. Most importantly: there is __no +guarantee__ that a Spot Instance exists to fulfill the request at any given +point in time. See the [AWS Spot Instance +documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-spot-instances.html) +for more information. + +~> **NOTE [AWS strongly discourages](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-best-practices.html#which-spot-request-method-to-use) the use of the legacy APIs called by this resource. +We recommend using the [EC2 Instance](instance.html) resource with `instanceMarketOptions` instead. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SpotInstanceRequest } from "./.gen/providers/aws/spot-instance-request"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SpotInstanceRequest(this, "cheap_worker", { + ami: "ami-1234", + instanceType: "c4.xlarge", + spotPrice: "0.03", + tags: { + Name: "CheapWorker", + }, + }); + } +} + +``` + +## Argument Reference + +Spot Instance Requests support all the same arguments as +[`awsInstance`](instance.html), with the addition of: + +* `spotPrice` - (Optional; Default: On-demand price) The maximum price to request on the spot market. +* `waitForFulfillment` - (Optional; Default: false) If set, Terraform will + wait for the Spot Request to be fulfilled, and will throw an error if the + timeout of 10m is reached. +* `spotType` - (Optional; Default: `persistent`) If set to `oneTime`, after + the instance is terminated, the spot request will be closed. +* `launchGroup` - (Optional) A launch group is a group of spot instances that launch together and terminate together. + If left empty instances are launched and terminated individually. +* `blockDurationMinutes` - (Optional) The required duration for the Spot instances, in minutes. This value must be a multiple of 60 (60, 120, 180, 240, 300, or 360). + The duration period starts as soon as your Spot instance receives its instance ID. At the end of the duration period, Amazon EC2 marks the Spot instance for termination and provides a Spot instance termination notice, which gives the instance a two-minute warning before it terminates. + Note that you can't specify an Availability Zone group or a launch group if you specify a duration. +* `instanceInterruptionBehavior` - (Optional) Indicates Spot instance behavior when it is interrupted. Valid values are `terminate`, `stop`, or `hibernate`. Default value is `terminate`. +* `validUntil` - (Optional) The end date and time of the request, in UTC [RFC3339](https://tools.ietf.org/html/rfc3339#section-5.8) format(for example, YYYY-MM-DDTHH:MM:SSZ). At this point, no new Spot instance requests are placed or enabled to fulfill the request. The default end date is 7 days from the current date. +* `validFrom` - (Optional) The start date and time of the request, in UTC [RFC3339](https://tools.ietf.org/html/rfc3339#section-5.8) format(for example, YYYY-MM-DDTHH:MM:SSZ). The default is to start fulfilling the request immediately. +* `tags` - (Optional) A map of tags to assign to the Spot Instance Request. These tags are not automatically applied to the launched Instance. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Spot Instance Request ID. + +These attributes are exported, but they are expected to change over time and so +should only be used for informational purposes, not for resource dependencies: + +* `spotBidStatus` - The current [bid + status](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-bid-status.html) + of the Spot Instance Request. +* `spotRequestState` The current [request + state](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html#creating-spot-request-status) + of the Spot Instance Request. +* `spotInstanceId` - The Instance ID (if any) that is currently fulfilling + the Spot Instance request. +* `publicDns` - The public DNS name assigned to the instance. For EC2-VPC, this + is only available if you've enabled DNS hostnames for your VPC +* `publicIp` - The public IP address assigned to the instance, if applicable. +* `privateDns` - The private DNS name assigned to the instance. Can only be + used inside the Amazon EC2, and only available if you've enabled DNS hostnames + for your VPC +* `privateIp` - The private IP address assigned to the instance +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) +* `delete` - (Default `20M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sqs_queue.html.markdown b/website/docs/cdktf/typescript/r/sqs_queue.html.markdown new file mode 100644 index 00000000000..4e36aabcada --- /dev/null +++ b/website/docs/cdktf/typescript/r/sqs_queue.html.markdown @@ -0,0 +1,228 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queue" +description: |- + Provides a SQS resource. +--- + + + +# Resource: aws_sqs_queue + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SqsQueue(this, "terraform_queue", { + delaySeconds: 90, + maxMessageSize: 2048, + messageRetentionSeconds: 86400, + name: "terraform-example-queue", + receiveWaitTimeSeconds: 10, + redrivePolicy: Token.asString( + Fn.jsonencode({ + deadLetterTargetArn: terraformQueueDeadletter.arn, + maxReceiveCount: 4, + }) + ), + tags: { + Environment: "production", + }, + }); + } +} + +``` + +## FIFO queue + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SqsQueue(this, "terraform_queue", { + contentBasedDeduplication: true, + fifoQueue: true, + name: "terraform-example-queue.fifo", + }); + } +} + +``` + +## High-throughput FIFO queue + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SqsQueue(this, "terraform_queue", { + deduplicationScope: "messageGroup", + fifoQueue: true, + fifoThroughputLimit: "perMessageGroupId", + name: "terraform-example-queue.fifo", + }); + } +} + +``` + +## Dead-letter queue + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SqsQueue(this, "terraform_queue_deadletter", { + name: "terraform-example-deadletter-queue", + redriveAllowPolicy: Token.asString( + Fn.jsonencode({ + redrivePermission: "byQueue", + sourceQueueArns: [terraformQueue.arn], + }) + ), + }); + } +} + +``` + +## Server-side encryption (SSE) + +Using [SSE-SQS](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-configure-sqs-sse-queue.html): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SqsQueue(this, "terraform_queue", { + name: "terraform-example-queue", + sqsManagedSseEnabled: true, + }); + } +} + +``` + +Using [SSE-KMS](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-configure-sse-existing-queue.html): + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SqsQueue(this, "terraform_queue", { + kmsDataKeyReusePeriodSeconds: 300, + kmsMasterKeyId: "alias/aws/sqs", + name: "terraform-example-queue", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The name of the queue. Queue names must be made up of only uppercase and lowercase ASCII letters, numbers, underscores, and hyphens, and must be between 1 and 80 characters long. For a FIFO (first-in-first-out) queue, the name must end with the `fifo` suffix. If omitted, Terraform will assign a random, unique name. Conflicts with `namePrefix` +* `namePrefix` - (Optional) Creates a unique name beginning with the specified prefix. Conflicts with `name` +* `visibilityTimeoutSeconds` - (Optional) The visibility timeout for the queue. An integer from 0 to 43200 (12 hours). The default for this attribute is 30. For more information about visibility timeout, see [AWS docs](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/AboutVT.html). +* `messageRetentionSeconds` - (Optional) The number of seconds Amazon SQS retains a message. Integer representing seconds, from 60 (1 minute) to 1209600 (14 days). The default for this attribute is 345600 (4 days). +* `maxMessageSize` - (Optional) The limit of how many bytes a message can contain before Amazon SQS rejects it. An integer from 1024 bytes (1 KiB) up to 262144 bytes (256 KiB). The default for this attribute is 262144 (256 KiB). +* `delaySeconds` - (Optional) The time in seconds that the delivery of all messages in the queue will be delayed. An integer from 0 to 900 (15 minutes). The default for this attribute is 0 seconds. +* `receiveWaitTimeSeconds` - (Optional) The time for which a ReceiveMessage call will wait for a message to arrive (long polling) before returning. An integer from 0 to 20 (seconds). The default for this attribute is 0, meaning that the call will return immediately. +* `policy` - (Optional) The JSON policy for the SQS queue. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). +* `redrivePolicy` - (Optional) The JSON policy to set up the Dead Letter Queue, see [AWS docs](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/SQSDeadLetterQueue.html). **Note:** when specifying `maxReceiveCount`, you must specify it as an integer (`5`), and not a string (`"5"`). +* `redriveAllowPolicy` - (Optional) The JSON policy to set up the Dead Letter Queue redrive permission, see [AWS docs](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/SQSDeadLetterQueue.html). +* `fifoQueue` - (Optional) Boolean designating a FIFO queue. If not set, it defaults to `false` making it standard. +* `contentBasedDeduplication` - (Optional) Enables content-based deduplication for FIFO queues. For more information, see the [related documentation](http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/FIFO-queues.html#FIFO-queues-exactly-once-processing) +* `sqsManagedSseEnabled` - (Optional) Boolean to enable server-side encryption (SSE) of message content with SQS-owned encryption keys. See [Encryption at rest](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-server-side-encryption.html). Terraform will only perform drift detection of its value when present in a configuration. +* `kmsMasterKeyId` - (Optional) The ID of an AWS-managed customer master key (CMK) for Amazon SQS or a custom CMK. For more information, see [Key Terms](http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-server-side-encryption.html#sqs-sse-key-terms). +* `kmsDataKeyReusePeriodSeconds` - (Optional) The length of time, in seconds, for which Amazon SQS can reuse a data key to encrypt or decrypt messages before calling AWS KMS again. An integer representing seconds, between 60 seconds (1 minute) and 86,400 seconds (24 hours). The default is 300 (5 minutes). +* `deduplicationScope` - (Optional) Specifies whether message deduplication occurs at the message group or queue level. Valid values are `messageGroup` and `queue` (default). +* `fifoThroughputLimit` - (Optional) Specifies whether the FIFO queue throughput quota applies to the entire queue or per message group. Valid values are `perQueue` (default) and `perMessageGroupId`. +* `tags` - (Optional) A map of tags to assign to the queue. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The URL for the created Amazon SQS queue. +* `arn` - The ARN of the SQS queue +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `url` - Same as `id`: The URL for the created Amazon SQS queue. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SQS Queues using the queue `url`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SQS Queues using the queue `url`. For example: + +```console +% terraform import aws_sqs_queue.public_queue https://queue.amazonaws.com/80398EXAMPLE/MyQueue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sqs_queue_policy.html.markdown b/website/docs/cdktf/typescript/r/sqs_queue_policy.html.markdown new file mode 100644 index 00000000000..79a085734c6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sqs_queue_policy.html.markdown @@ -0,0 +1,102 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queue_policy" +description: |- + Provides a SQS Queue Policy resource. +--- + + + +# Resource: aws_sqs_queue_policy + +Allows you to set a policy of an SQS Queue +while referencing ARN of the queue within the policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +import { SqsQueuePolicy } from "./.gen/providers/aws/sqs-queue-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const q = new SqsQueue(this, "q", { + name: "examplequeue", + }); + const test = new DataAwsIamPolicyDocument(this, "test", { + statement: [ + { + actions: ["sqs:SendMessage"], + condition: [ + { + test: "ArnEquals", + values: [example.arn], + variable: "aws:SourceArn", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["*"], + type: "*", + }, + ], + resources: [q.arn], + sid: "First", + }, + ], + }); + const awsSqsQueuePolicyTest = new SqsQueuePolicy(this, "test_2", { + policy: Token.asString(test.json), + queueUrl: q.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSqsQueuePolicyTest.overrideLogicalId("test"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `queueUrl` - (Required) The URL of the SQS Queue to which to attach the policy +* `policy` - (Required) The JSON policy for the SQS queue. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SQS Queue Policies using the queue URL. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SQS Queue Policies using the queue URL. For example: + +```console +% terraform import aws_sqs_queue_policy.test https://queue.amazonaws.com/0123456789012/myqueue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sqs_queue_redrive_allow_policy.html.markdown b/website/docs/cdktf/typescript/r/sqs_queue_redrive_allow_policy.html.markdown new file mode 100644 index 00000000000..635b69f6998 --- /dev/null +++ b/website/docs/cdktf/typescript/r/sqs_queue_redrive_allow_policy.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queue_redrive_allow_policy" +description: |- + Provides a SQS Queue Redrive Allow Policy resource. +--- + + + +# Resource: aws_sqs_queue_redrive_allow_policy + +Provides a SQS Queue Redrive Allow Policy resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +import { SqsQueueRedriveAllowPolicy } from "./.gen/providers/aws/sqs-queue-redrive-allow-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SqsQueue(this, "example", { + name: "examplequeue", + }); + const src = new SqsQueue(this, "src", { + name: "srcqueue", + redrivePolicy: Token.asString( + Fn.jsonencode({ + deadLetterTargetArn: example.arn, + maxReceiveCount: 4, + }) + ), + }); + const awsSqsQueueRedriveAllowPolicyExample = new SqsQueueRedriveAllowPolicy( + this, + "example_2", + { + queueUrl: example.id, + redriveAllowPolicy: Token.asString( + Fn.jsonencode({ + redrivePermission: "byQueue", + sourceQueueArns: [src.arn], + }) + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSqsQueueRedriveAllowPolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `queueUrl` - (Required) The URL of the SQS Queue to which to attach the policy +* `redriveAllowPolicy` - (Required) The JSON redrive allow policy for the SQS queue. Learn more in the [Amazon SQS dead-letter queues documentation](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-dead-letter-queues.html). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SQS Queue Redrive Allow Policies using the queue URL. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SQS Queue Redrive Allow Policies using the queue URL. For example: + +```console +% terraform import aws_sqs_queue_redrive_allow_policy.test https://queue.amazonaws.com/0123456789012/myqueue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/sqs_queue_redrive_policy.html.markdown b/website/docs/cdktf/typescript/r/sqs_queue_redrive_policy.html.markdown new file mode 100644 index 00000000000..b3762dddf5f --- /dev/null +++ b/website/docs/cdktf/typescript/r/sqs_queue_redrive_policy.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "SQS (Simple Queue)" +layout: "aws" +page_title: "AWS: aws_sqs_queue_redrive_policy" +description: |- + Provides a SQS Queue Redrive Policy resource. +--- + + + +# Resource: aws_sqs_queue_redrive_policy + +Allows you to set a redrive policy of an SQS Queue +while referencing ARN of the dead letter queue inside the redrive policy. + +This is useful when you want to set a dedicated +dead letter queue for a standard or FIFO queue, but need +the dead letter queue to exist before setting the redrive policy. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SqsQueue } from "./.gen/providers/aws/sqs-queue"; +import { SqsQueueRedrivePolicy } from "./.gen/providers/aws/sqs-queue-redrive-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const q = new SqsQueue(this, "q", { + name: "examplequeue", + }); + const ddl = new SqsQueue(this, "ddl", { + name: "examplequeue-ddl", + redriveAllowPolicy: Token.asString( + Fn.jsonencode({ + redrivePermission: "byQueue", + sourceQueueArns: [q.arn], + }) + ), + }); + const awsSqsQueueRedrivePolicyQ = new SqsQueueRedrivePolicy(this, "q_2", { + queueUrl: q.id, + redrivePolicy: Token.asString( + Fn.jsonencode({ + deadLetterTargetArn: ddl.arn, + maxReceiveCount: 4, + }) + ), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSqsQueueRedrivePolicyQ.overrideLogicalId("q"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `queueUrl` - (Required) The URL of the SQS Queue to which to attach the policy +* `redrivePolicy` - (Required) The JSON redrive policy for the SQS queue. Accepts two key/val pairs: `deadLetterTargetArn` and `maxReceiveCount`. Learn more in the [Amazon SQS dead-letter queues documentation](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-dead-letter-queues.html). + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SQS Queue Redrive Policies using the queue URL. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SQS Queue Redrive Policies using the queue URL. For example: + +```console +% terraform import aws_sqs_queue_redrive_policy.test https://queue.amazonaws.com/0123456789012/myqueue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_activation.html.markdown b/website/docs/cdktf/typescript/r/ssm_activation.html.markdown new file mode 100644 index 00000000000..015b38df4a8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_activation.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_activation" +description: |- + Registers an on-premises server or virtual machine with Amazon EC2 so that it can be managed using Run Command. +--- + + + +# Resource: aws_ssm_activation + +Registers an on-premises server or virtual machine with Amazon EC2 so that it can be managed using Run Command. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +import { SsmActivation } from "./.gen/providers/aws/ssm-activation"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["ssm.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const testRole = new IamRole(this, "test_role", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "test_role", + }); + const testAttach = new IamRolePolicyAttachment(this, "test_attach", { + policyArn: "arn:aws:iam::aws:policy/AmazonSSMManagedInstanceCore", + role: testRole.name, + }); + new SsmActivation(this, "foo", { + dependsOn: [testAttach], + description: "Test", + iamRole: testRole.id, + name: "test_ssm_activation", + registrationLimit: Token.asNumber("5"), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional) The default name of the registered managed instance. +* `description` - (Optional) The description of the resource that you want to register. +* `expirationDate` - (Optional) UTC timestamp in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8) by which this activation request should expire. The default value is 24 hours from resource creation time. Terraform will only perform drift detection of its value when present in a configuration. +* `iamRole` - (Required) The IAM Role to attach to the managed instance. +* `registrationLimit` - (Optional) The maximum number of managed instances you want to register. The default value is 1 instance. +* `tags` - (Optional) A map of tags to assign to the object. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The activation ID. +* `activationCode` - The code the system generates when it processes the activation. +* `name` - The default name of the registered managed instance. +* `description` - The description of the resource that was registered. +* `expired` - If the current activation has expired. +* `expirationDate` - The date by which this activation request should expire. The default value is 24 hours. +* `iamRole` - The IAM Role attached to the managed instance. +* `registrationLimit` - The maximum number of managed instances you want to be registered. The default value is 1 instance. +* `registrationCount` - The number of managed instances that are currently registered using this activation. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS SSM Activation using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS SSM Activation using the `id`. For example: + +```console +% terraform import aws_ssm_activation.example e488f2f6-e686-4afb-8a04-ef6dfEXAMPLE +``` + +-> **Note:** The `activationCode` attribute cannot be imported. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_association.html.markdown b/website/docs/cdktf/typescript/r/ssm_association.html.markdown new file mode 100644 index 00000000000..3ddc8e9fa98 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_association.html.markdown @@ -0,0 +1,198 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_association" +description: |- + Associates an SSM Document to an instance or EC2 tag. +--- + + + +# Resource: aws_ssm_association + +Associates an SSM Document to an instance or EC2 tag. + +## Example Usage + +### Create an association for a specific instance + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmAssociation } from "./.gen/providers/aws/ssm-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmAssociation(this, "example", { + name: Token.asString(awsSsmDocumentExample.name), + targets: [ + { + key: "InstanceIds", + values: [Token.asString(awsInstanceExample.id)], + }, + ], + }); + } +} + +``` + +### Create an association for all managed instances in an AWS account + +To target all managed instances in an AWS account, set the `key` as `"instanceIds"` with `values` set as `["*"]`. This example also illustrates how to use an Amazon owned SSM document named `amazonCloudWatchManageAgent`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmAssociation } from "./.gen/providers/aws/ssm-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmAssociation(this, "example", { + name: "AmazonCloudWatch-ManageAgent", + targets: [ + { + key: "InstanceIds", + values: ["*"], + }, + ], + }); + } +} + +``` + +### Create an association for a specific tag + +This example shows how to target all managed instances that are assigned a tag key of `environment` and value of `development`. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmAssociation } from "./.gen/providers/aws/ssm-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmAssociation(this, "example", { + name: "AmazonCloudWatch-ManageAgent", + targets: [ + { + key: "tag:Environment", + values: ["Development"], + }, + ], + }); + } +} + +``` + +### Create an association with a specific schedule + +This example shows how to schedule an association in various ways. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmAssociation } from "./.gen/providers/aws/ssm-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmAssociation(this, "example", { + name: Token.asString(awsSsmDocumentExample.name), + scheduleExpression: "cron(0 2 ? * SUN *)", + targets: [ + { + key: "InstanceIds", + values: [Token.asString(awsInstanceExample.id)], + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the SSM document to apply. +* `applyOnlyAtCronInterval` - (Optional) By default, when you create a new or update associations, the system runs it immediately and then according to the schedule you specified. Enable this option if you do not want an association to run immediately after you create or update it. This parameter is not supported for rate expressions. Default: `false`. +* `associationName` - (Optional) The descriptive name for the association. +* `documentVersion` - (Optional) The document version you want to associate with the target(s). Can be a specific version or the default version. +* `instanceId` - (Optional, **Deprecated**) The instance ID to apply an SSM document to. Use `targets` with key `instanceIds` for document schema versions 2.0 and above. Use the `targets` attribute instead. +* `outputLocation` - (Optional) An output location block. Output Location is documented below. +* `parameters` - (Optional) A block of arbitrary string parameters to pass to the SSM document. +* `scheduleExpression` - (Optional) A [cron or rate expression](https://docs.aws.amazon.com/systems-manager/latest/userguide/reference-cron-and-rate-expressions.html) that specifies when the association runs. +* `targets` - (Optional) A block containing the targets of the SSM association. Targets are documented below. AWS currently supports a maximum of 5 targets. +* `complianceSeverity` - (Optional) The compliance severity for the association. Can be one of the following: `unspecified`, `low`, `medium`, `high` or `critical` +* `maxConcurrency` - (Optional) The maximum number of targets allowed to run the association at the same time. You can specify a number, for example 10, or a percentage of the target set, for example 10%. +* `maxErrors` - (Optional) The number of errors that are allowed before the system stops sending requests to run the association on additional targets. You can specify a number, for example 10, or a percentage of the target set, for example 10%. +* `automationTargetParameterName` - (Optional) Specify the target for the association. This target is required for associations that use an `automation` document and target resources by using rate controls. This should be set to the SSM document `parameter` that will define how your automation will branch out. +* `waitForSuccessTimeoutSeconds` - (Optional) The number of seconds to wait for the association status to be `success`. If `success` status is not reached within the given time, create opration will fail. + +Output Location (`outputLocation`) is an S3 bucket where you want to store the results of this association: + +* `s3BucketName` - (Required) The S3 bucket name. +* `s3KeyPrefix` - (Optional) The S3 bucket prefix. Results stored in the root if not configured. +* `s3Region` - (Optional) The S3 bucket region. + +Targets specify what instance IDs or tags to apply the document to and has these keys: + +* `key` - (Required) Either `instanceIds` or `tag:Tag Name` to specify an EC2 tag. +* `values` - (Required) A list of instance IDs or tag values. AWS currently limits this list size to one value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the SSM association +* `associationId` - The ID of the SSM association. +* `instanceId` - The instance id that the SSM document was applied to. +* `name` - The name of the SSM document to apply. +* `parameters` - Additional parameters passed to the SSM document. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM associations using the `associationId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSM associations using the `associationId`. For example: + +```console +% terraform import aws_ssm_association.test-association 10abcdef-0abc-1234-5678-90abcdef123456 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_default_patch_baseline.html.markdown b/website/docs/cdktf/typescript/r/ssm_default_patch_baseline.html.markdown new file mode 100644 index 00000000000..cd7620895de --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_default_patch_baseline.html.markdown @@ -0,0 +1,144 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_default_patch_baseline" +description: |- + Terraform resource for managing an AWS Systems Manager Default Patch Baseline. +--- + + + +# Resource: aws_ssm_default_patch_baseline + +Terraform resource for registering an AWS Systems Manager Default Patch Baseline. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmDefaultPatchBaseline } from "./.gen/providers/aws/ssm-default-patch-baseline"; +import { SsmPatchBaseline } from "./.gen/providers/aws/ssm-patch-baseline"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new SsmPatchBaseline(this, "example", { + approvedPatches: ["KB123456"], + name: "example", + }); + const awsSsmDefaultPatchBaselineExample = new SsmDefaultPatchBaseline( + this, + "example_1", + { + baselineId: example.id, + operatingSystem: example.operatingSystem, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSsmDefaultPatchBaselineExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `baselineId` - (Required) ID of the patch baseline. + Can be an ID or an ARN. + When specifying an AWS-provided patch baseline, must be the ARN. +* `operatingSystem` - (Required) The operating system the patch baseline applies to. + Valid values are + `amazonLinux`, + `amazonLinux2`, + `amazonLinux2022`, + `centos`, + `debian`, + `macos`, + `oracleLinux`, + `raspbian`, + `redhatEnterpriseLinux`, + `rockyLinux`, + `suse`, + `ubuntu`, and + `windows`. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import the Systems Manager Default Patch Baseline using the patch baseline ID, patch baseline ARN, or the operating system value. For example: + +Using the patch baseline ID: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using the patch baseline ARN: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using the operating system value: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +**Using `terraform import` to import** the Systems Manager Default Patch Baseline using the patch baseline ID, patch baseline ARN, or the operating system value. For example: + +Using the patch baseline ID: + +```console +% terraform import aws_ssm_default_patch_baseline.example pb-1234567890abcdef1 +``` + +Using the patch baseline ARN: + +```console +% terraform import aws_ssm_default_patch_baseline.example arn:aws:ssm:us-west-2:123456789012:patchbaseline/pb-1234567890abcdef1 +``` + +Using the operating system value: + +```console +% terraform import aws_ssm_default_patch_baseline.example CENTOS +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_document.html.markdown b/website/docs/cdktf/typescript/r/ssm_document.html.markdown new file mode 100644 index 00000000000..3f3c44bcdb7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_document.html.markdown @@ -0,0 +1,183 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_document" +description: |- + Provides an SSM Document resource +--- + + + +# Resource: aws_ssm_document + +Provides an SSM Document resource + +~> **NOTE on updating SSM documents:** Only documents with a schema version of 2.0 +or greater can update their content once created, see [SSM Schema Features][1]. To update a document with an older schema version you must recreate the resource. Not all document types support a schema version of 2.0 or greater. Refer to [SSM document schema features and examples][2] for information about which schema versions are supported for the respective `documentType`. + +## Example Usage + +### Create an ssm document in JSON format + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmDocument } from "./.gen/providers/aws/ssm-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmDocument(this, "foo", { + content: + ' {\n "schemaVersion": "1.2",\n "description": "Check ip configuration of a Linux instance.",\n "parameters": {\n\n },\n "runtimeConfig": {\n "aws:runShellScript": {\n "properties": [\n {\n "id": "0.aws:runShellScript",\n "runCommand": ["ifconfig"]\n }\n ]\n }\n }\n }\n\n', + documentType: "Command", + name: "test_document", + }); + } +} + +``` + +### Create an ssm document in YAML format + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmDocument } from "./.gen/providers/aws/ssm-document"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmDocument(this, "foo", { + content: + "schemaVersion: '1.2'\ndescription: Check ip configuration of a Linux instance.\nparameters: {}\nruntimeConfig:\n 'aws:runShellScript':\n properties:\n - id: '0.aws:runShellScript'\n runCommand:\n - ifconfig\n\n", + documentFormat: "YAML", + documentType: "Command", + name: "test_document", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the document. +* `attachmentsSource` - (Optional) One or more configuration blocks describing attachments sources to a version of a document. Defined below. +* `content` - (Required) The JSON or YAML content of the document. +* `documentFormat` - (Optional, defaults to JSON) The format of the document. Valid document types include: `json` and `yaml` +* `documentType` - (Required) The type of the document. Valid document types include: `automation`, `command`, `package`, `policy`, and `session` +* `permissions` - (Optional) Additional Permissions to attach to the document. See [Permissions](#permissions) below for details. +* `targetType` - (Optional) The target type which defines the kinds of resources the document can run on. For example, /AWS::EC2::Instance. For a list of valid resource types, see AWS Resource Types Reference (http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) +* `tags` - (Optional) A map of tags to assign to the object. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `versionName` - (Optional) A field specifying the version of the artifact you are creating with the document. For example, "Release 12, Update 6". This value is unique across all versions of a document and cannot be changed for an existing document version. + +## attachments_source + +The `attachmentsSource` block supports the following: + +* `key` - (Required) The key describing the location of an attachment to a document. Valid key types include: `sourceUrl` and `s3FileUrl` +* `values` - (Required) The value describing the location of an attachment to a document +* `name` - (Optional) The name of the document attachment file + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `createdDate` - The date the document was created. +* `description` - The description of the document. +* `schemaVersion` - The schema version of the document. +* `defaultVersion` - The default version of the document. +* `documentVersion` - The document version. +* `hash` - The sha1 or sha256 of the document content +* `hashType` - "Sha1" "Sha256". The hashing algorithm used when hashing the content. +* `latestVersion` - The latest version of the document. +* `owner` - The AWS user account of the person who created the document. +* `status` - "Creating", "Active" or "Deleting". The current status of the document. +* `parameter` - The parameters that are available to this document. +* `platformTypes` - A list of OS platforms compatible with this SSM document, either "Windows" or "Linux". +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +[1]: http://docs.aws.amazon.com/systems-manager/latest/userguide/sysman-ssm-docs.html#document-schemas-features +[2]: https://docs.aws.amazon.com/systems-manager/latest/userguide/document-schemas-features.html + +## Permissions + +The permissions attribute specifies how you want to share the document. If you share a document privately, +you must specify the AWS user account IDs for those people who can use the document. If you share a document +publicly, you must specify All as the account ID. + +The permissions mapping supports the following: + +* `type` - The permission type for the document. The permission type can be `share`. +* `accountIds` - The AWS user accounts that should have access to the document. The account IDs can either be a group of account IDs or `all`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Documents using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSM Documents using the name. For example: + +```console +% terraform import aws_ssm_document.example example +``` + +The `attachmentsSource` argument does not have an SSM API method for reading the attachment information detail after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmDocument } from "./.gen/providers/aws/ssm-document"; +interface MyConfig { + content: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new SsmDocument(this, "test", { + attachmentsSource: [ + { + key: "SourceUrl", + values: ["s3://${" + objectBucket.bucket + "}/test.zip"], + }, + ], + documentType: "Package", + lifecycle: { + ignoreChanges: [attachmentsSource], + }, + name: "test_document", + content: config.content, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_maintenance_window.html.markdown b/website/docs/cdktf/typescript/r/ssm_maintenance_window.html.markdown new file mode 100644 index 00000000000..d17fd12e1f2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_maintenance_window.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_maintenance_window" +description: |- + Provides an SSM Maintenance Window resource +--- + + + +# Resource: aws_ssm_maintenance_window + +Provides an SSM Maintenance Window resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmMaintenanceWindow } from "./.gen/providers/aws/ssm-maintenance-window"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmMaintenanceWindow(this, "production", { + cutoff: 1, + duration: 3, + name: "maintenance-window-application", + schedule: "cron(0 16 ? * TUE *)", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the maintenance window. +* `schedule` - (Required) The schedule of the Maintenance Window in the form of a [cron or rate expression](https://docs.aws.amazon.com/systems-manager/latest/userguide/reference-cron-and-rate-expressions.html). +* `cutoff` - (Required) The number of hours before the end of the Maintenance Window that Systems Manager stops scheduling new tasks for execution. +* `duration` - (Required) The duration of the Maintenance Window in hours. +* `description` - (Optional) A description for the maintenance window. +* `allowUnassociatedTargets` - (Optional) Whether targets must be registered with the Maintenance Window before tasks can be defined for those targets. +* `enabled` - (Optional) Whether the maintenance window is enabled. Default: `true`. +* `endDate` - (Optional) Timestamp in [ISO-8601 extended format](https://www.iso.org/iso-8601-date-and-time-format.html) when to no longer run the maintenance window. +* `scheduleTimezone` - (Optional) Timezone for schedule in [Internet Assigned Numbers Authority (IANA) Time Zone Database format](https://www.iana.org/time-zones). For example: `america/losAngeles`, `etc/utc`, or `asia/seoul`. +* `scheduleOffset` - (Optional) The number of days to wait after the date and time specified by a CRON expression before running the maintenance window. +* `startDate` - (Optional) Timestamp in [ISO-8601 extended format](https://www.iso.org/iso-8601-date-and-time-format.html) when to begin the maintenance window. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the maintenance window. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Maintenance Windows using the maintenance window `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSM Maintenance Windows using the maintenance window `id`. For example: + +```console +% terraform import aws_ssm_maintenance_window.imported-window mw-0123456789 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_maintenance_window_target.html.markdown b/website/docs/cdktf/typescript/r/ssm_maintenance_window_target.html.markdown new file mode 100644 index 00000000000..56bef821a5d --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_maintenance_window_target.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_maintenance_window_target" +description: |- + Provides an SSM Maintenance Window Target resource +--- + + + +# Resource: aws_ssm_maintenance_window_target + +Provides an SSM Maintenance Window Target resource + +## Example Usage + +### Instance Target + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmMaintenanceWindow } from "./.gen/providers/aws/ssm-maintenance-window"; +import { SsmMaintenanceWindowTarget } from "./.gen/providers/aws/ssm-maintenance-window-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const window = new SsmMaintenanceWindow(this, "window", { + cutoff: 1, + duration: 3, + name: "maintenance-window-webapp", + schedule: "cron(0 16 ? * TUE *)", + }); + new SsmMaintenanceWindowTarget(this, "target1", { + description: "This is a maintenance window target", + name: "maintenance-window-target", + resourceType: "INSTANCE", + targets: [ + { + key: "tag:Name", + values: ["acceptance_test"], + }, + ], + windowId: window.id, + }); + } +} + +``` + +### Resource Group Target + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmMaintenanceWindow } from "./.gen/providers/aws/ssm-maintenance-window"; +import { SsmMaintenanceWindowTarget } from "./.gen/providers/aws/ssm-maintenance-window-target"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const window = new SsmMaintenanceWindow(this, "window", { + cutoff: 1, + duration: 3, + name: "maintenance-window-webapp", + schedule: "cron(0 16 ? * TUE *)", + }); + new SsmMaintenanceWindowTarget(this, "target1", { + description: "This is a maintenance window target", + name: "maintenance-window-target", + resourceType: "RESOURCE_GROUP", + targets: [ + { + key: "resource-groups:ResourceTypeFilters", + values: ["AWS::EC2::Instance"], + }, + ], + windowId: window.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `windowId` - (Required) The Id of the maintenance window to register the target with. +* `name` - (Optional) The name of the maintenance window target. +* `description` - (Optional) The description of the maintenance window target. +* `resourceType` - (Required) The type of target being registered with the Maintenance Window. Possible values are `instance` and `resourceGroup`. +* `targets` - (Required) The targets to register with the maintenance window. In other words, the instances to run commands on when the maintenance window runs. You can specify targets using instance IDs, resource group names, or tags that have been applied to instances. For more information about these examples formats see + (https://docs.aws.amazon.com/systems-manager/latest/userguide/mw-cli-tutorial-targets-examples.html) +* `ownerInformation` - (Optional) User-provided value that will be included in any CloudWatch events raised while running tasks for these targets in this Maintenance Window. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the maintenance window target. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Maintenance Window targets using `windowId/windowTargetId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSM Maintenance Window targets using `windowId/windowTargetId`. For example: + +```console +% terraform import aws_ssm_maintenance_window_target.example mw-0c50858d01EXAMPLE/23639a0b-ddbc-4bca-9e72-78d96EXAMPLE +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_maintenance_window_task.html.markdown b/website/docs/cdktf/typescript/r/ssm_maintenance_window_task.html.markdown new file mode 100644 index 00000000000..7bf77c394ba --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_maintenance_window_task.html.markdown @@ -0,0 +1,292 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_maintenance_window_task" +description: |- + Provides an SSM Maintenance Window Task resource +--- + + + +# Resource: aws_ssm_maintenance_window_task + +Provides an SSM Maintenance Window Task resource + +## Example Usage + +### Automation Tasks + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmMaintenanceWindowTask } from "./.gen/providers/aws/ssm-maintenance-window-task"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmMaintenanceWindowTask(this, "example", { + maxConcurrency: Token.asString(2), + maxErrors: Token.asString(1), + priority: 1, + targets: [ + { + key: "InstanceIds", + values: [Token.asString(awsInstanceExample.id)], + }, + ], + taskArn: "AWS-RestartEC2Instance", + taskInvocationParameters: { + automationParameters: { + documentVersion: "$LATEST", + parameter: [ + { + name: "InstanceId", + values: [Token.asString(awsInstanceExample.id)], + }, + ], + }, + }, + taskType: "AUTOMATION", + windowId: Token.asString(awsSsmMaintenanceWindowExample.id), + }); + } +} + +``` + +### Lambda Tasks + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmMaintenanceWindowTask } from "./.gen/providers/aws/ssm-maintenance-window-task"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmMaintenanceWindowTask(this, "example", { + maxConcurrency: Token.asString(2), + maxErrors: Token.asString(1), + priority: 1, + targets: [ + { + key: "InstanceIds", + values: [Token.asString(awsInstanceExample.id)], + }, + ], + taskArn: Token.asString(awsLambdaFunctionExample.arn), + taskInvocationParameters: { + lambdaParameters: { + clientContext: Token.asString( + Fn.base64encode('{\\"key1\\":\\"value1\\"}') + ), + payload: '{\\"key1\\":\\"value1\\"}', + }, + }, + taskType: "LAMBDA", + windowId: Token.asString(awsSsmMaintenanceWindowExample.id), + }); + } +} + +``` + +### Run Command Tasks + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmMaintenanceWindowTask } from "./.gen/providers/aws/ssm-maintenance-window-task"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmMaintenanceWindowTask(this, "example", { + maxConcurrency: Token.asString(2), + maxErrors: Token.asString(1), + priority: 1, + targets: [ + { + key: "InstanceIds", + values: [Token.asString(awsInstanceExample.id)], + }, + ], + taskArn: "AWS-RunShellScript", + taskInvocationParameters: { + runCommandParameters: { + notificationConfig: { + notificationArn: Token.asString(awsSnsTopicExample.arn), + notificationEvents: ["All"], + notificationType: "Command", + }, + outputS3Bucket: Token.asString(awsS3BucketExample.id), + outputS3KeyPrefix: "output", + parameter: [ + { + name: "commands", + values: ["date"], + }, + ], + serviceRoleArn: Token.asString(awsIamRoleExample.arn), + timeoutSeconds: 600, + }, + }, + taskType: "RUN_COMMAND", + windowId: Token.asString(awsSsmMaintenanceWindowExample.id), + }); + } +} + +``` + +### Step Function Tasks + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmMaintenanceWindowTask } from "./.gen/providers/aws/ssm-maintenance-window-task"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmMaintenanceWindowTask(this, "example", { + maxConcurrency: Token.asString(2), + maxErrors: Token.asString(1), + priority: 1, + targets: [ + { + key: "InstanceIds", + values: [Token.asString(awsInstanceExample.id)], + }, + ], + taskArn: Token.asString(awsSfnActivityExample.id), + taskInvocationParameters: { + stepFunctionsParameters: { + input: '{\\"key1\\":\\"value1\\"}', + name: "example", + }, + }, + taskType: "STEP_FUNCTIONS", + windowId: Token.asString(awsSsmMaintenanceWindowExample.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `windowId` - (Required) The Id of the maintenance window to register the task with. +* `maxConcurrency` - (Optional) The maximum number of targets this task can be run for in parallel. +* `maxErrors` - (Optional) The maximum number of errors allowed before this task stops being scheduled. +* `cutoffBehavior` - (Optional) Indicates whether tasks should continue to run after the cutoff time specified in the maintenance windows is reached. Valid values are `continueTask` and `cancelTask`. +* `taskType` - (Required) The type of task being registered. Valid values: `automation`, `lambda`, `runCommand` or `stepFunctions`. +* `taskArn` - (Required) The ARN of the task to execute. +* `serviceRoleArn` - (Optional) The role that should be assumed when executing the task. If a role is not provided, Systems Manager uses your account's service-linked role. If no service-linked role for Systems Manager exists in your account, it is created for you. +* `name` - (Optional) The name of the maintenance window task. +* `description` - (Optional) The description of the maintenance window task. +* `targets` - (Optional) The targets (either instances or window target ids). Instances are specified using Key=InstanceIds,Values=instanceid1,instanceid2. Window target ids are specified using Key=WindowTargetIds,Values=window target id1, window target id2. +* `priority` - (Optional) The priority of the task in the Maintenance Window, the lower the number the higher the priority. Tasks in a Maintenance Window are scheduled in priority order with tasks that have the same priority scheduled in parallel. +* `taskInvocationParameters` - (Optional) Configuration block with parameters for task execution. + +`taskInvocationParameters` supports the following: + +* `automationParameters` - (Optional) The parameters for an AUTOMATION task type. Documented below. +* `lambdaParameters` - (Optional) The parameters for a LAMBDA task type. Documented below. +* `runCommandParameters` - (Optional) The parameters for a RUN_COMMAND task type. Documented below. +* `stepFunctionsParameters` - (Optional) The parameters for a STEP_FUNCTIONS task type. Documented below. + +`automationParameters` supports the following: + +* `documentVersion` - (Optional) The version of an Automation document to use during task execution. +* `parameter` - (Optional) The parameters for the RUN_COMMAND task execution. Documented below. + +`lambdaParameters` supports the following: + +* `clientContext` - (Optional) Pass client-specific information to the Lambda function that you are invoking. +* `payload` - (Optional) JSON to provide to your Lambda function as input. +* `qualifier` - (Optional) Specify a Lambda function version or alias name. + +`runCommandParameters` supports the following: + +* `comment` - (Optional) Information about the command(s) to execute. +* `documentHash` - (Optional) The SHA-256 or SHA-1 hash created by the system when the document was created. SHA-1 hashes have been deprecated. +* `documentHashType` - (Optional) SHA-256 or SHA-1. SHA-1 hashes have been deprecated. Valid values: `sha256` and `sha1` +* `notificationConfig` - (Optional) Configurations for sending notifications about command status changes on a per-instance basis. Documented below. +* `outputS3Bucket` - (Optional) The name of the Amazon S3 bucket. +* `outputS3KeyPrefix` - (Optional) The Amazon S3 bucket subfolder. +* `parameter` - (Optional) The parameters for the RUN_COMMAND task execution. Documented below. +* `serviceRoleArn` - (Optional) The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) service role to use to publish Amazon Simple Notification Service (Amazon SNS) notifications for maintenance window Run Command tasks. +* `timeoutSeconds` - (Optional) If this time is reached and the command has not already started executing, it doesn't run. +* `cloudwatchConfig` - (Optional) Configuration options for sending command output to CloudWatch Logs. Documented below. + +`stepFunctionsParameters` supports the following: + +* `input` - (Optional) The inputs for the STEP_FUNCTION task. +* `name` - (Optional) The name of the STEP_FUNCTION task. + +`notificationConfig` supports the following: + +* `notificationArn` - (Optional) An Amazon Resource Name (ARN) for a Simple Notification Service (SNS) topic. Run Command pushes notifications about command status changes to this topic. +* `notificationEvents` - (Optional) The different events for which you can receive notifications. Valid values: `all`, `inProgress`, `success`, `timedOut`, `cancelled`, and `failed` +* `notificationType` - (Optional) When specified with `command`, receive notification when the status of a command changes. When specified with `invocation`, for commands sent to multiple instances, receive notification on a per-instance basis when the status of a command changes. Valid values: `command` and `invocation` + +`cloudwatchConfig` supports the following: + +* `cloudwatchLogGroupName` - (Optional) The name of the CloudWatch log group where you want to send command output. If you don't specify a group name, Systems Manager automatically creates a log group for you. The log group uses the following naming format: aws/ssm/SystemsManagerDocumentName. +* `cloudwatchOutputEnabled` - (Optional) Enables Systems Manager to send command output to CloudWatch Logs. + +`parameter` supports the following: + +* `name` - (Required) The parameter name. +* `values` - (Required) The array of strings. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the maintenance window task. +* `id` - The ID of the maintenance window task. +* `windowTaskId` - The ID of the maintenance window task. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS Maintenance Window Task using the `windowId` and `windowTaskId` separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS Maintenance Window Task using the `windowId` and `windowTaskId` separated by `/`. For example: + +```console +% terraform import aws_ssm_maintenance_window_task.task / +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_parameter.html.markdown b/website/docs/cdktf/typescript/r/ssm_parameter.html.markdown new file mode 100644 index 00000000000..8d9a8ce27e8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_parameter.html.markdown @@ -0,0 +1,138 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_parameter" +description: |- + Provides a SSM Parameter resource +--- + + + +# Resource: aws_ssm_parameter + +Provides an SSM Parameter resource. + +~> **Note:** `overwrite` also makes it possible to overwrite an existing SSM Parameter that's not created by Terraform before. + +## Example Usage + +### Basic example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmParameter } from "./.gen/providers/aws/ssm-parameter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmParameter(this, "foo", { + name: "foo", + type: "String", + value: "bar", + }); + } +} + +``` + +### Encrypted string using default SSM KMS key + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DbInstance } from "./.gen/providers/aws/db-instance"; +import { SsmParameter } from "./.gen/providers/aws/ssm-parameter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new DbInstance(this, "default", { + allocatedStorage: 10, + dbName: "mydb", + dbSubnetGroupName: "my_database_subnet_group", + engine: "mysql", + engineVersion: "5.7.16", + instanceClass: "db.t2.micro", + parameterGroupName: "default.mysql5.7", + password: databaseMasterPassword.stringValue, + storageType: "gp2", + username: "foo", + }); + new SsmParameter(this, "secret", { + description: "The parameter description", + name: "/production/database/password/master", + tags: { + environment: "production", + }, + type: "SecureString", + value: databaseMasterPassword.stringValue, + }); + } +} + +``` + +~> **Note:** The unencrypted value of a SecureString will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the parameter. If the name contains a path (e.g., any forward slashes (`/`)), it must be fully qualified with a leading forward slash (`/`). For additional requirements and constraints, see the [AWS SSM User Guide](https://docs.aws.amazon.com/systems-manager/latest/userguide/sysman-parameter-name-constraints.html). +* `type` - (Required) Type of the parameter. Valid types are `string`, `stringList` and `secureString`. + +The following arguments are optional: + +* `allowedPattern` - (Optional) Regular expression used to validate the parameter value. +* `dataType` - (Optional) Data type of the parameter. Valid values: `text`, `aws:ssm:integration` and `aws:ec2:image` for AMI format, see the [Native parameter support for Amazon Machine Image IDs](https://docs.aws.amazon.com/systems-manager/latest/userguide/parameter-store-ec2-aliases.html). +* `description` - (Optional) Description of the parameter. +* `insecureValue` - (Optional, exactly one of `value` or `insecureValue` is required) Value of the parameter. **Use caution:** This value is _never_ marked as sensitive in the Terraform plan output. This argument is not valid with a `type` of `secureString`. +* `keyId` - (Optional) KMS key ID or ARN for encrypting a SecureString. +* `overwrite` - (Optional, **Deprecated**) Overwrite an existing parameter. If not specified, will default to `false` if the resource has not been created by terraform to avoid overwrite of existing resource and will default to `true` otherwise (terraform lifecycle rules should then be used to manage the update behavior). +* `tags` - (Optional) Map of tags to assign to the object. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tier` - (Optional) Parameter tier to assign to the parameter. If not specified, will use the default parameter tier for the region. Valid tiers are `standard`, `advanced`, and `intelligentTiering`. Downgrading an `advanced` tier parameter to `standard` will recreate the resource. For more information on parameter tiers, see the [AWS SSM Parameter tier comparison and guide](https://docs.aws.amazon.com/systems-manager/latest/userguide/parameter-store-advanced-parameters.html). +* `value` - (Optional, exactly one of `value` or `insecureValue` is required) Value of the parameter. This value is always marked as sensitive in the Terraform plan output, regardless of `type`. In Terraform CLI version 0.15 and later, this may require additional configuration handling for certain scenarios. For more information, see the [Terraform v0.15 Upgrade Guide](https://www.terraform.io/upgrade-guides/0-15.html#sensitive-output-values). + +~> **NOTE:** `aws:ssm:integration` data_type parameters must be of the type `secureString` and the name must start with the prefix `/d9d01087-4a3f-49e0-b0b4-d568d7826553/ssm/integrations/webhook/`. See [here](https://docs.aws.amazon.com/systems-manager/latest/userguide/creating-integrations.html) for information on the usage of `aws:ssm:integration` parameters. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the parameter. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `version` - Version of the parameter. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Parameters using the parameter store `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSM Parameters using the parameter store `name`. For example: + +```console +% terraform import aws_ssm_parameter.my_param /my_path/my_paramname +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_patch_baseline.html.markdown b/website/docs/cdktf/typescript/r/ssm_patch_baseline.html.markdown new file mode 100644 index 00000000000..40a92fbc199 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_patch_baseline.html.markdown @@ -0,0 +1,315 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_patch_baseline" +description: |- + Provides an SSM Patch Baseline resource +--- + + + +# Resource: aws_ssm_patch_baseline + +Provides an SSM Patch Baseline resource. + +~> **NOTE on Patch Baselines:** The `approvedPatches` and `approvalRule` are +both marked as optional fields, but the Patch Baseline requires that at least one +of them is specified. + +## Example Usage + +### Basic Usage + +Using `approvedPatches` only. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmPatchBaseline } from "./.gen/providers/aws/ssm-patch-baseline"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmPatchBaseline(this, "production", { + approvedPatches: ["KB123456"], + name: "patch-baseline", + }); + } +} + +``` + +### Advanced Usage, specifying patch filters + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmPatchBaseline } from "./.gen/providers/aws/ssm-patch-baseline"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmPatchBaseline(this, "production", { + approvalRule: [ + { + approveAfterDays: 7, + complianceLevel: "HIGH", + patchFilter: [ + { + key: "PRODUCT", + values: ["WindowsServer2016"], + }, + { + key: "CLASSIFICATION", + values: ["CriticalUpdates", "SecurityUpdates", "Updates"], + }, + { + key: "MSRC_SEVERITY", + values: ["Critical", "Important", "Moderate"], + }, + ], + }, + { + approveAfterDays: 7, + patchFilter: [ + { + key: "PRODUCT", + values: ["WindowsServer2012"], + }, + ], + }, + ], + approvedPatches: ["KB123456", "KB456789"], + description: "Patch Baseline Description", + globalFilter: [ + { + key: "PRODUCT", + values: ["WindowsServer2008"], + }, + { + key: "CLASSIFICATION", + values: ["ServicePacks"], + }, + { + key: "MSRC_SEVERITY", + values: ["Low"], + }, + ], + name: "patch-baseline", + rejectedPatches: ["KB987654"], + }); + } +} + +``` + +### Advanced usage, specifying Microsoft application and Windows patch rules + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmPatchBaseline } from "./.gen/providers/aws/ssm-patch-baseline"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmPatchBaseline(this, "windows_os_apps", { + approvalRule: [ + { + approveAfterDays: 7, + patchFilter: [ + { + key: "CLASSIFICATION", + values: ["CriticalUpdates", "SecurityUpdates"], + }, + { + key: "MSRC_SEVERITY", + values: ["Critical", "Important"], + }, + ], + }, + { + approveAfterDays: 7, + patchFilter: [ + { + key: "PATCH_SET", + values: ["APPLICATION"], + }, + { + key: "PRODUCT", + values: ["Office 2013", "Office 2016"], + }, + ], + }, + ], + description: "Patch both Windows and Microsoft apps", + name: "WindowsOSAndMicrosoftApps", + operatingSystem: "WINDOWS", + }); + } +} + +``` + +### Advanced usage, specifying alternate patch source repository + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmPatchBaseline } from "./.gen/providers/aws/ssm-patch-baseline"; +interface MyConfig { + patchFilter: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new SsmPatchBaseline(this, "al_2017_09", { + approvalRule: [ + { + patchFilter: config.patchFilter, + }, + ], + description: "My patch repository for Amazon Linux 2017.09", + name: "Amazon-Linux-2017.09", + operatingSystem: "AMAZON_LINUX", + source: [ + { + configuration: + "[amzn-main]\nname=amzn-main-Base\nmirrorlist=http://repo./$awsregion./$awsdomain//$releasever/main/mirror.list\nmirrorlist_expire=300\nmetadata_expire=300\npriority=10\nfailovermethod=priority\nfastestmirror_enabled=0\ngpgcheck=1\ngpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-amazon-ga\nenabled=1\nretries=3\ntimeout=5\nreport_instanceid=yes\n\n", + name: "My-AL2017.09", + products: ["AmazonLinux2017.09"], + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the patch baseline. +* `description` - (Optional) The description of the patch baseline. +* `operatingSystem` - (Optional) The operating system the patch baseline applies to. + Valid values are + `almaLinux`, + `amazonLinux`, + `amazonLinux2`, + `amazonLinux2022`, + `amazonLinux2023`, + `centos`, + `debian`, + `macos`, + `oracleLinux`, + `raspbian`, + `redhatEnterpriseLinux`, + `rockyLinux`, + `suse`, + `ubuntu`, and + `windows`. + The default value is `windows`. +* `approvedPatchesComplianceLevel` - (Optional) The compliance level for approved patches. + This means that if an approved patch is reported as missing, this is the severity of the compliance violation. + Valid values are `critical`, `high`, `medium`, `low`, `informational`, `unspecified`. + The default value is `unspecified`. +* `approvedPatches` - (Optional) A list of explicitly approved patches for the baseline. + Cannot be specified with `approvalRule`. +* `rejectedPatches` - (Optional) A list of rejected patches. +* `globalFilter` - (Optional) A set of global filters used to exclude patches from the baseline. + Up to 4 global filters can be specified using Key/Value pairs. + Valid Keys are `product`, `classification`, `msrcSeverity`, and `patchId`. +* `approvalRule` - (Optional) A set of rules used to include patches in the baseline. + Up to 10 approval rules can be specified. + See [`approvalRule`](#approval_rule-block) below. +* `source` - (Optional) Configuration block with alternate sources for patches. + Applies to Linux instances only. + See [`source`](#source-block) below. +* `rejectedPatchesAction` - (Optional) The action for Patch Manager to take on patches included in the `rejectedPatches` list. + Valid values are `allowAsDependency` and `block`. +* `approvedPatchesEnableNonSecurity` - (Optional) Indicates whether the list of approved patches includes non-security updates that should be applied to the instances. + Applies to Linux instances only. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `approvalRule` Block + +The `approvalRule` block supports: + +* `approveAfterDays` - (Optional) The number of days after the release date of each patch matched by the rule the patch is marked as approved in the patch baseline. + Valid Range: 0 to 100. + Conflicts with `approveUntilDate`. +* `approveUntilDate` - (Optional) The cutoff date for auto approval of released patches. + Any patches released on or before this date are installed automatically. + Date is formatted as `yyyyMmDd`. + Conflicts with `approveAfterDays` +* `patchFilter` - (Required) The patch filter group that defines the criteria for the rule. + Up to 5 patch filters can be specified per approval rule using Key/Value pairs. + Valid combinations of these Keys and the `operatingSystem` value can be found in the [SSM DescribePatchProperties API Reference](https://docs.aws.amazon.com/systems-manager/latest/APIReference/API_DescribePatchProperties.html). + Valid Values are exact values for the patch property given as the key, or a wildcard `*`, which matches all values. + * `patchSet` defaults to `os` if unspecified +* `complianceLevel` - (Optional) The compliance level for patches approved by this rule. + Valid values are `critical`, `high`, `medium`, `low`, `informational`, and `unspecified`. + The default value is `unspecified`. +* `enableNonSecurity` - (Optional) Boolean enabling the application of non-security updates. + The default value is `false`. + Valid for Linux instances only. + +### `source` Block + +The `source` block supports: + +* `name` - (Required) The name specified to identify the patch source. +* `configuration` - (Required) The value of the yum repo configuration. + For information about other options available for your yum repository configuration, see the [`dnfConf` documentation](https://man7.org/linux/man-pages/man5/dnf.conf.5.html) +* `products` - (Required) The specific operating system versions a patch repository applies to, such as `"ubuntu1604"`, `"amazonLinux201609"`, `"redhatEnterpriseLinux72"` or `"suse127"`. + For lists of supported product values, see [PatchFilter](https://docs.aws.amazon.com/systems-manager/latest/APIReference/API_PatchFilter.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the patch baseline. +* `arn` - The ARN of the patch baseline. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Patch Baselines using their baseline ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSM Patch Baselines using their baseline ID. For example: + +```console +% terraform import aws_ssm_patch_baseline.example pb-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_patch_group.html.markdown b/website/docs/cdktf/typescript/r/ssm_patch_group.html.markdown new file mode 100644 index 00000000000..004bd35a362 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_patch_group.html.markdown @@ -0,0 +1,56 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_patch_group" +description: |- + Provides an SSM Patch Group resource +--- + + + +# Resource: aws_ssm_patch_group + +Provides an SSM Patch Group resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmPatchBaseline } from "./.gen/providers/aws/ssm-patch-baseline"; +import { SsmPatchGroup } from "./.gen/providers/aws/ssm-patch-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const production = new SsmPatchBaseline(this, "production", { + approvedPatches: ["KB123456"], + name: "patch-baseline", + }); + new SsmPatchGroup(this, "patchgroup", { + baselineId: production.id, + patchGroup: "patch-group-name", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `baselineId` - (Required) The ID of the patch baseline to register the patch group with. +* `patchGroup` - (Required) The name of the patch group that should be registered with the patch baseline. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the patch group and ID of the patch baseline separated by a comma (`,`). + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_resource_data_sync.html.markdown b/website/docs/cdktf/typescript/r/ssm_resource_data_sync.html.markdown new file mode 100644 index 00000000000..e6b8f159872 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_resource_data_sync.html.markdown @@ -0,0 +1,137 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_resource_data_sync" +description: |- + Provides a SSM resource data sync. +--- + + + +# Resource: aws_ssm_resource_data_sync + +Provides a SSM resource data sync. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3BucketPolicy } from "./.gen/providers/aws/s3-bucket-policy"; +import { SsmResourceDataSync } from "./.gen/providers/aws/ssm-resource-data-sync"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const hoge = new S3Bucket(this, "hoge", { + bucket: "tf-test-bucket-1234", + }); + new SsmResourceDataSync(this, "foo", { + name: "foo", + s3Destination: { + bucketName: hoge.bucket, + region: hoge.region, + }, + }); + const dataAwsIamPolicyDocumentHoge = new DataAwsIamPolicyDocument( + this, + "hoge_2", + { + statement: [ + { + actions: ["s3:GetBucketAcl"], + effect: "Allow", + principals: [ + { + identifiers: ["ssm.amazonaws.com"], + type: "Service", + }, + ], + resources: ["arn:aws:s3:::tf-test-bucket-1234"], + sid: "SSMBucketPermissionsCheck", + }, + { + actions: ["s3:PutObject"], + condition: [ + { + test: "StringEquals", + values: ["bucket-owner-full-control"], + variable: "s3:x-amz-acl", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["ssm.amazonaws.com"], + type: "Service", + }, + ], + resources: ["arn:aws:s3:::tf-test-bucket-1234/*"], + sid: "SSMBucketDelivery", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentHoge.overrideLogicalId("hoge"); + const awsS3BucketPolicyHoge = new S3BucketPolicy(this, "hoge_3", { + bucket: hoge.id, + policy: Token.asString(dataAwsIamPolicyDocumentHoge.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsS3BucketPolicyHoge.overrideLogicalId("hoge"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) Name for the configuration. +* `s3Destination` - (Required) Amazon S3 configuration details for the sync. + +## s3_destination + +`s3Destination` supports the following: + +* `bucketName` - (Required) Name of S3 bucket where the aggregated data is stored. +* `region` - (Required) Region with the bucket targeted by the Resource Data Sync. +* `kmsKeyArn` - (Optional) ARN of an encryption key for a destination in Amazon S3. +* `prefix` - (Optional) Prefix for the bucket. +* `syncFormat` - (Optional) A supported sync format. Only JsonSerDe is currently supported. Defaults to JsonSerDe. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM resource data sync using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSM resource data sync using the `name`. For example: + +```console +% terraform import aws_ssm_resource_data_sync.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssm_service_setting.html.markdown b/website/docs/cdktf/typescript/r/ssm_service_setting.html.markdown new file mode 100644 index 00000000000..7b617eedc6f --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssm_service_setting.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "SSM (Systems Manager)" +layout: "aws" +page_title: "AWS: aws_ssm_service_setting" +description: |- + Defines how a user interacts with or uses a service or a feature of a service. +--- + + + +# Resource: aws_ssm_service_setting + +This setting defines how a user interacts with or uses a service or a feature of a service. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmServiceSetting } from "./.gen/providers/aws/ssm-service-setting"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmServiceSetting(this, "test_setting", { + settingId: + "arn:aws:ssm:us-east-1:123456789012:servicesetting/ssm/parameter-store/high-throughput-enabled", + settingValue: "true", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `settingId` - (Required) ID of the service setting. +* `settingValue` - (Required) Value of the service setting. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the service setting. +* `status` - Status of the service setting. Value can be `default`, `customized` or `pendingUpdate`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS SSM Service Setting using the `settingId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS SSM Service Setting using the `settingId`. For example: + +```console +% terraform import aws_ssm_service_setting.example arn:aws:ssm:us-east-1:123456789012:servicesetting/ssm/parameter-store/high-throughput-enabled +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssmcontacts_contact.html.markdown b/website/docs/cdktf/typescript/r/ssmcontacts_contact.html.markdown new file mode 100644 index 00000000000..55829f19ca5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssmcontacts_contact.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_contact" +description: |- + Terraform resource for managing an AWS SSM Contact. +--- + + + +# Resource: aws_ssmcontacts_contact + +Terraform resource for managing an AWS SSM Contact. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmcontactsContact } from "./.gen/providers/aws/ssmcontacts-contact"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmcontactsContact(this, "example", { + alias: "alias", + dependsOn: [awsSsmincidentsReplicationSetExample], + type: "PERSONAL", + }); + } +} + +``` + +### Usage With All Fields + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmcontactsContact } from "./.gen/providers/aws/ssmcontacts-contact"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmcontactsContact(this, "example", { + alias: "alias", + dependsOn: [awsSsmincidentsReplicationSetExample], + displayName: "displayName", + tags: { + key: "value", + }, + type: "ESCALATION", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** A contact implicitly depends on a replication set. If you configured your replication set in Terraform, we recommend you add it to the `dependsOn` argument for the Terraform Contact Resource. + +The following arguments are required: + +- `alias` - (Required) A unique and identifiable alias for the contact or escalation plan. + +- `type` - (Required) The type of contact engaged. A single contact is type PERSONAL and an escalation + plan is type ESCALATION. + +The following arguments are optional: + +- `displayName` - (Optional) Full friendly name of the contact or escalation plan. + +- `tags` - (Optional) Map of tags to assign to the resource. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `arn` - The Amazon Resource Name (ARN) of the contact or escalation plan. + +- `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Contact using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSM Contact using the `arn`. For example: + +```console +% terraform import aws_ssmcontacts_contact.example {ARNValue} +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssmcontacts_contact_channel.html.markdown b/website/docs/cdktf/typescript/r/ssmcontacts_contact_channel.html.markdown new file mode 100644 index 00000000000..f044ee9c154 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssmcontacts_contact_channel.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_contact_channel" +description: |- + Terraform resource for managing an AWS SSM Contacts Contact Channel. +--- + + + +# Resource: aws_ssmcontacts_contact_channel + +Terraform resource for managing an AWS SSM Contacts Contact Channel. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmcontactsContactChannel } from "./.gen/providers/aws/ssmcontacts-contact-channel"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmcontactsContactChannel(this, "example", { + contactId: + "arn:aws:ssm-contacts:us-west-2:123456789012:contact/contactalias", + deliveryAddress: { + simpleAddress: "email@example.com", + }, + name: "Example contact channel", + type: "EMAIL", + }); + } +} + +``` + +### Usage with SSM Contact + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmcontactsContact } from "./.gen/providers/aws/ssmcontacts-contact"; +import { SsmcontactsContactChannel } from "./.gen/providers/aws/ssmcontacts-contact-channel"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const exampleContact = new SsmcontactsContact(this, "example_contact", { + alias: "example_contact", + type: "PERSONAL", + }); + new SsmcontactsContactChannel(this, "example", { + contactId: exampleContact.arn, + deliveryAddress: { + simpleAddress: "email@example.com", + }, + name: "Example contact channel", + type: "EMAIL", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** The contact channel needs to be activated in the AWS Systems Manager console, otherwise it can't be used to engage the contact. See the [Contacts section of the Incident Manager User Guide](https://docs.aws.amazon.com/incident-manager/latest/userguide/contacts.html) for more information. + +The following arguments are required: + +- `contactId` - (Required) Amazon Resource Name (ARN) of the AWS SSM Contact that the contact channel belongs to. + +- `deliveryAddress` - (Required) Block that contains contact engagement details. See details below. + +- `name` - (Required) Name of the contact channel. + +- `type` - (Required) Type of the contact channel. One of `sms`, `voice` or `email`. + +### delivery_address + +- `simpleAddress` - (Required) Details to engage this contact channel. The expected format depends on the contact channel type and is described in the [`contactChannelAddress` section of the SSM Contacts API Reference](https://docs.aws.amazon.com/incident-manager/latest/APIReference/API_SSMContacts_ContactChannelAddress.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +- `activationStatus` - Whether the contact channel is activated. The contact channel must be activated to use it to engage the contact. One of `activated` or `notActivated`. + +- `arn` - Amazon Resource Name (ARN) of the contact channel. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Contact Channel using the `arn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSM Contact Channel using the `arn`. For example: + +```console +% terraform import aws_ssmcontacts_contact_channel.example arn:aws:ssm-contacts:us-west-2:123456789012:contact-channel/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssmcontacts_plan.html.markdown b/website/docs/cdktf/typescript/r/ssmcontacts_plan.html.markdown new file mode 100644 index 00000000000..0c2dc30b7c6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssmcontacts_plan.html.markdown @@ -0,0 +1,164 @@ +--- +subcategory: "SSM Contacts" +layout: "aws" +page_title: "AWS: aws_ssmcontacts_plan" +description: |- + Terraform resource for managing an AWS SSM Contact Plan. +--- + + + +# Resource: aws_ssmcontacts_plan + +Terraform resource for managing an AWS SSM Contact Plan. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmcontactsPlan } from "./.gen/providers/aws/ssmcontacts-plan"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmcontactsPlan(this, "example", { + contactId: + "arn:aws:ssm-contacts:us-west-2:123456789012:contact/contactalias", + stage: [ + { + durationInMinutes: 1, + }, + ], + }); + } +} + +``` + +### Usage with SSM Contact + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmcontactsContact } from "./.gen/providers/aws/ssmcontacts-contact"; +import { SsmcontactsPlan } from "./.gen/providers/aws/ssmcontacts-plan"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const contact = new SsmcontactsContact(this, "contact", { + alias: "alias", + type: "PERSONAL", + }); + new SsmcontactsPlan(this, "plan", { + contactId: contact.arn, + stage: [ + { + durationInMinutes: 1, + }, + ], + }); + } +} + +``` + +### Usage With All Fields + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmcontactsContact } from "./.gen/providers/aws/ssmcontacts-contact"; +import { SsmcontactsPlan } from "./.gen/providers/aws/ssmcontacts-plan"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const contactOne = new SsmcontactsContact(this, "contact_one", { + alias: "alias", + type: "PERSONAL", + }); + const contactTwo = new SsmcontactsContact(this, "contact_two", { + alias: "alias", + type: "PERSONAL", + }); + const escalationPlan = new SsmcontactsContact(this, "escalation_plan", { + alias: "escalation-plan-alias", + type: "ESCALATION", + }); + new SsmcontactsPlan(this, "test", { + contactId: escalationPlan.arn, + stage: [ + { + durationInMinutes: 0, + target: [ + { + contactTargetInfo: { + contactId: contactOne.arn, + isEssential: false, + }, + }, + { + contactTargetInfo: { + contactId: contactTwo.arn, + isEssential: true, + }, + }, + ], + }, + ], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `contactId` - (Required) The Amazon Resource Name (ARN) of the contact or escalation plan. +* `stage` - (Required) List of stages. A contact has an engagement plan with stages that contact specified contact channels. An escalation plan uses stages that contact specified contacts. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSM Contact Plan using the Contact ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSM Contact Plan using the Contact ARN. For example: + +```console +% terraform import aws_ssmcontacts_plan.example {ARNValue} +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssmincidents_replication_set.html.markdown b/website/docs/cdktf/typescript/r/ssmincidents_replication_set.html.markdown new file mode 100644 index 00000000000..8395da986ae --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssmincidents_replication_set.html.markdown @@ -0,0 +1,217 @@ +--- +subcategory: "SSM Incident Manager Incidents" +layout: "aws" +page_title: "AWS: aws_ssmincidents_replication_set" +description: |- + Terraform resource for managing an incident replication set for AWS Systems Manager Incident Manager. +--- + + + +# Resource: aws_ssmincidents_replication_set + +Provides a resource for managing a replication set in AWS Systems Manager Incident Manager. + +~> **NOTE:** Deleting a replication set also deletes all Incident Manager related data including response plans, incident records, contacts and escalation plans. + +## Example Usage + +### Basic Usage + +Create a replication set. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmincidentsReplicationSet } from "./.gen/providers/aws/ssmincidents-replication-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmincidentsReplicationSet(this, "replicationSetName", { + region: [ + { + name: "us-west-2", + }, + ], + tags: { + exampleTag: "exampleValue", + }, + }); + } +} + +``` + +Add a Region to a replication set. (You can add only one Region at a time.) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmincidentsReplicationSet } from "./.gen/providers/aws/ssmincidents-replication-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmincidentsReplicationSet(this, "replicationSetName", { + region: [ + { + name: "us-west-2", + }, + { + name: "ap-southeast-2", + }, + ], + }); + } +} + +``` + +Delete a Region from a replication set. (You can delete only one Region at a time.) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmincidentsReplicationSet } from "./.gen/providers/aws/ssmincidents-replication-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmincidentsReplicationSet(this, "replicationSetName", { + region: [ + { + name: "us-west-2", + }, + ], + }); + } +} + +``` + +## Basic Usage with an AWS Customer Managed Key + +Create a replication set with an AWS Key Management Service (AWS KMS) customer manager key: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { SsmincidentsReplicationSet } from "./.gen/providers/aws/ssmincidents-replication-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const exampleKey = new KmsKey(this, "example_key", {}); + new SsmincidentsReplicationSet(this, "replicationSetName", { + region: [ + { + kmsKeyArn: exampleKey.arn, + name: "us-west-2", + }, + ], + tags: { + exampleTag: "exampleValue", + }, + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** The Region specified by a Terraform provider must always be one of the Regions specified for the replication set. This is especially important when you perform complex update operations. + +~> **NOTE:** After a replication set is created, you can add or delete only one Region at a time. + +~> **NOTE:** Incident Manager does not support updating the customer managed key associated with a replication set. Instead, for a replication set with multiple Regions, you must first delete a Region from the replication set, then re-add it with a different customer managed key in separate `terraform apply` operations. For a replication set with only one Region, the entire replication set must be deleted and recreated. To do this, comment out the replication set and all response plans, and then run the `terraform apply` command to recreate the replication set with the new customer managed key. + +~> **NOTE:** You must either use AWS-owned keys on all regions of a replication set, or customer managed keys. To change between an AWS owned key and a customer managed key, a replication set and it associated data must be deleted and recreated. + +~> **NOTE:** If possible, create all the customer managed keys you need (using the `terraform apply` command) before you create the replication set, or create the keys and replication set in the same `terraform apply` command. Otherwise, to delete a replication set, you must run one `terraform apply` command to delete the replication set and another to delete the AWS KMS keys used by the replication set. Deleting the AWS KMS keys before deleting the replication set results in an error. In that case, you must manually reenable the deleted key using the AWS Management Console before you can delete the replication set. + +The `region` configuration block is required and supports the following arguments: + +* `name` - (Required) The name of the Region, such as `apSoutheast2`. +* `kmsKeyArn` - (Optional) The Amazon Resource name (ARN) of the customer managed key. If omitted, AWS manages the AWS KMS keys for you, using an AWS owned key, as indicated by a default value of `defaultKey`. + +The following arguments are optional: + +* `tags` - Tags applied to the replication set. + +For information about the maximum allowed number of Regions and tag value constraints, see [CreateReplicationSet in the *AWS Systems Manager Incident Manager API Reference*](https://docs.aws.amazon.com/incident-manager/latest/APIReference/API_CreateReplicationSet.html). + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the replication set. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `createdBy` - The ARN of the user who created the replication set. +* `createdTime` - A timestamp showing when the replication set was created. +* `deletionProtected` - If `true`, the last region in a replication set cannot be deleted. +* `lastModifiedBy` - A timestamp showing when the replication set was last modified. +* `lastModifiedTime` - When the replication set was last modified +* `status` - The overall status of a replication set. + * Valid Values: `active` | `creating` | `updating` | `deleting` | `failed` + +In addition to the preceding arguments, the `region` configuration block exports the following attributes for each Region: + +* `status` - The current status of the Region. + * Valid Values: `active` | `creating` | `updating` | `deleting` | `failed` +* `statusUpdateTime` - A timestamp showing when the Region status was last updated. +* `statusMessage` - More information about the status of a Region. + +## Timeouts + +~> **NOTE:** `update` and `delete` operations applied to replication sets with large numbers of response plans and data take longer to complete. We recommend that you configure custom timeouts for this situation. + +~> **NOTE:** Each additional Region included when you create a replication set increases the amount of time required to complete the `create` operation. + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `120M`) +* `update` - (Default `120M`) +* `delete` - (Default `120M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an Incident Manager replication. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an Incident Manager replication. For example: + +```console +% terraform import aws_ssmincidents_replication_set.replicationSetName import +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssmincidents_response_plan.html.markdown b/website/docs/cdktf/typescript/r/ssmincidents_response_plan.html.markdown new file mode 100644 index 00000000000..786f4443edf --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssmincidents_response_plan.html.markdown @@ -0,0 +1,205 @@ +--- +subcategory: "SSM Incident Manager Incidents" +layout: "aws" +page_title: "AWS: aws_ssmincidents_response_plan" +description: |- + Terraform resource for managing an incident response plan in AWS Systems Manager Incident Manager. +--- + + + +# Resource: aws_ssmincidents_response_plan + +Provides a Terraform resource to manage response plans in AWS Systems Manager Incident Manager. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmincidentsResponsePlan } from "./.gen/providers/aws/ssmincidents-response-plan"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmincidentsResponsePlan(this, "example", { + dependsOn: [awsSsmincidentsReplicationSetExample], + incidentTemplate: { + impact: Token.asNumber("3"), + title: "title", + }, + name: "name", + tags: { + key: "value", + }, + }); + } +} + +``` + +### Usage With All Fields + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsmincidentsResponsePlan } from "./.gen/providers/aws/ssmincidents-response-plan"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsmincidentsResponsePlan(this, "example", { + action: { + ssmAutomation: [ + { + documentName: document1.name, + documentVersion: "version1", + dynamicParameters: { + anotherKey: "INCIDENT_RECORD_ARN", + someKey: "INVOLVED_RESOURCES", + }, + parameter: [ + { + name: "key", + values: ["value1", "value2"], + }, + { + name: "foo", + values: ["bar"], + }, + ], + roleArn: role1.arn, + targetAccount: "RESPONSE_PLAN_OWNER_ACCOUNT", + }, + ], + }, + chatChannel: [topic.arn], + dependsOn: [awsSsmincidentsReplicationSetExample], + displayName: "display name", + engagements: [ + "arn:aws:ssm-contacts:us-east-2:111122223333:contact/test1", + ], + incidentTemplate: { + dedupeString: "dedupe", + impact: Token.asNumber("3"), + incidentTags: { + key: "value", + }, + notificationTarget: [ + { + snsTopicArn: example1.arn, + }, + { + snsTopicArn: example2.arn, + }, + ], + summary: "summary", + title: "title", + }, + integration: { + pagerduty: [ + { + name: "pagerdutyIntergration", + secretId: "example", + serviceId: "example", + }, + ], + }, + name: "name", + tags: { + key: "value", + }, + }); + } +} + +``` + +## Argument Reference + +~> NOTE: A response plan implicitly depends on a replication set. If you configured your replication set in Terraform, +we recommend you add it to the `dependsOn` argument for the Terraform ResponsePlan Resource. + +The following arguments are required: + +* `name` - (Required) The name of the response plan. + +The `incidentTemplate` configuration block is required and supports the following arguments: + +* `title` - (Required) The title of a generated incident. +* `impact` - (Required) The impact value of a generated incident. The following values are supported: + * `1` - Severe Impact + * `2` - High Impact + * `3` - Medium Impact + * `4` - Low Impact + * `5` - No Impact +* `dedupeString` - (Optional) A string used to stop Incident Manager from creating multiple incident records for the same incident. +* `incidentTags` - (Optional) The tags assigned to an incident template. When an incident starts, Incident Manager assigns the tags specified in the template to the incident. +* `summary` - (Optional) The summary of an incident. +* `notificationTarget` - (Optional) The Amazon Simple Notification Service (Amazon SNS) targets that this incident notifies when it is updated. The `notificationTarget` configuration block supports the following argument: + * `snsTopicArn` - (Required) The ARN of the Amazon SNS topic. + +The following arguments are optional: + +* `tags` - (Optional) The tags applied to the response plan. +* `displayName` - (Optional) The long format of the response plan name. This field can contain spaces. +* `chatChannel` - (Optional) The Chatbot chat channel used for collaboration during an incident. +* `engagements` - (Optional) The Amazon Resource Name (ARN) for the contacts and escalation plans that the response plan engages during an incident. +* `action` - (Optional) The actions that the response plan starts at the beginning of an incident. + * `ssmAutomation` - (Optional) The Systems Manager automation document to start as the runbook at the beginning of the incident. The following values are supported: + * `documentName` - (Required) The automation document's name. + * `roleArn` - (Required) The Amazon Resource Name (ARN) of the role that the automation document assumes when it runs commands. + * `documentVersion` - (Optional) The version of the automation document to use at runtime. + * `targetAccount` - (Optional) The account that the automation document runs in. This can be in either the management account or an application account. + * `parameter` - (Optional) The key-value pair parameters to use when the automation document runs. The following values are supported: + * `name` - The name of parameter. + * `values` - The values for the associated parameter name. + * `dynamicParameters` - (Optional) The key-value pair to resolve dynamic parameter values when processing a Systems Manager Automation runbook. +* `integration` - (Optional) Information about third-party services integrated into the response plan. The following values are supported: + * `pagerduty` - (Optional) Details about the PagerDuty configuration for a response plan. The following values are supported: + * `name` - (Required) The name of the PagerDuty configuration. + * `serviceId` - (Required) The ID of the PagerDuty service that the response plan associated with the incident at launch. + * `secretId` - (Required) The ID of the AWS Secrets Manager secret that stores your PagerDuty key — either a General Access REST API Key or User Token REST API Key — and other user credentials. + +For more information about the constraints for each field, see [CreateResponsePlan](https://docs.aws.amazon.com/incident-manager/latest/APIReference/API_CreateResponsePlan.html) in the *AWS Systems Manager Incident Manager API Reference*. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the response plan. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import an Incident Manager response plan using the response plan ARN. You can find the response plan ARN in the AWS Management Console. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import an Incident Manager response plan using the response plan ARN. You can find the response plan ARN in the AWS Management Console. For example: + +```console +% terraform import aws_ssmincidents_response_plan.responsePlanName ARNValue +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssoadmin_account_assignment.html.markdown b/website/docs/cdktf/typescript/r/ssoadmin_account_assignment.html.markdown new file mode 100644 index 00000000000..6e0830161c0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssoadmin_account_assignment.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_account_assignment" +description: |- + Manages a Single Sign-On (SSO) Account Assignment +--- + + + +# Resource: aws_ssoadmin_account_assignment + +Provides a Single Sign-On (SSO) Account Assignment resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIdentitystoreGroup } from "./.gen/providers/aws/data-aws-identitystore-group"; +import { DataAwsSsoadminInstances } from "./.gen/providers/aws/data-aws-ssoadmin-instances"; +import { DataAwsSsoadminPermissionSet } from "./.gen/providers/aws/data-aws-ssoadmin-permission-set"; +import { SsoadminAccountAssignment } from "./.gen/providers/aws/ssoadmin-account-assignment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsSsoadminInstances(this, "example", {}); + const dataAwsSsoadminPermissionSetExample = + new DataAwsSsoadminPermissionSet(this, "example_1", { + instanceArn: Token.asString( + propertyAccess(Fn.tolist(example.arns), ["0"]) + ), + name: "AWSReadOnlyAccess", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsSsoadminPermissionSetExample.overrideLogicalId("example"); + const dataAwsIdentitystoreGroupExample = new DataAwsIdentitystoreGroup( + this, + "example_2", + { + alternateIdentifier: { + uniqueAttribute: { + attributePath: "DisplayName", + attributeValue: "ExampleGroup", + }, + }, + identityStoreId: Token.asString( + propertyAccess(Fn.tolist(example.identityStoreIds), ["0"]) + ), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIdentitystoreGroupExample.overrideLogicalId("example"); + const awsSsoadminAccountAssignmentExample = new SsoadminAccountAssignment( + this, + "example_3", + { + instanceArn: Token.asString( + propertyAccess(Fn.tolist(example.arns), ["0"]) + ), + permissionSetArn: Token.asString( + dataAwsSsoadminPermissionSetExample.arn + ), + principalId: Token.asString(dataAwsIdentitystoreGroupExample.groupId), + principalType: "GROUP", + targetId: "012347678910", + targetType: "AWS_ACCOUNT", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSsoadminAccountAssignmentExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instanceArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance. +* `permissionSetArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Permission Set that the admin wants to grant the principal access to. +* `principalId` - (Required, Forces new resource) An identifier for an object in SSO, such as a user or group. PrincipalIds are GUIDs (For example, `f81D4Fae7Dec11D0A76500A0C91E6Bf6`). +* `principalType` - (Required, Forces new resource) The entity type for which the assignment will be created. Valid values: `user`, `group`. +* `targetId` - (Required, Forces new resource) An AWS account identifier, typically a 10-12 digit string. +* `targetType` - (Optional, Forces new resource) The entity type for which the assignment will be created. Valid values: `awsAccount`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the Account Assignment i.e., `principalId`, `principalType`, `targetId`, `targetType`, `permissionSetArn`, `instanceArn` separated by commas (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Account Assignments using the `principalId`, `principalType`, `targetId`, `targetType`, `permissionSetArn`, `instanceArn` separated by commas (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSO Account Assignments using the `principalId`, `principalType`, `targetId`, `targetType`, `permissionSetArn`, `instanceArn` separated by commas (`,`). For example: + +```console +% terraform import aws_ssoadmin_account_assignment.example f81d4fae-7dec-11d0-a765-00a0c91e6bf6,GROUP,1234567890,AWS_ACCOUNT,arn:aws:sso:::permissionSet/ssoins-0123456789abcdef/ps-0123456789abcdef,arn:aws:sso:::instance/ssoins-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssoadmin_customer_managed_policy_attachment.html.markdown b/website/docs/cdktf/typescript/r/ssoadmin_customer_managed_policy_attachment.html.markdown new file mode 100644 index 00000000000..c4ff5847206 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssoadmin_customer_managed_policy_attachment.html.markdown @@ -0,0 +1,132 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_customer_managed_policy_attachment" +description: |- + Manages a customer managed policy for a Single Sign-On (SSO) Permission Set +--- + + + +# Resource: aws_ssoadmin_customer_managed_policy_attachment + +Provides a customer managed policy attachment for a Single Sign-On (SSO) Permission Set resource + +~> **NOTE:** Creating this resource will automatically [Provision the Permission Set](https://docs.aws.amazon.com/singlesignon/latest/APIReference/API_ProvisionPermissionSet.html) to apply the corresponding updates to all assigned accounts. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsoadminInstances } from "./.gen/providers/aws/data-aws-ssoadmin-instances"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { SsoadminCustomerManagedPolicyAttachment } from "./.gen/providers/aws/ssoadmin-customer-managed-policy-attachment"; +import { SsoadminPermissionSet } from "./.gen/providers/aws/ssoadmin-permission-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new IamPolicy(this, "example", { + description: "My test policy", + name: "TestPolicy", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["ec2:Describe*"], + Effect: "Allow", + Resource: "*", + }, + ], + Version: "2012-10-17", + }) + ), + }); + const dataAwsSsoadminInstancesExample = new DataAwsSsoadminInstances( + this, + "example_1", + {} + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsSsoadminInstancesExample.overrideLogicalId("example"); + const awsSsoadminPermissionSetExample = new SsoadminPermissionSet( + this, + "example_2", + { + instanceArn: Token.asString( + propertyAccess(Fn.tolist(dataAwsSsoadminInstancesExample.arns), ["0"]) + ), + name: "Example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSsoadminPermissionSetExample.overrideLogicalId("example"); + const awsSsoadminCustomerManagedPolicyAttachmentExample = + new SsoadminCustomerManagedPolicyAttachment(this, "example_3", { + customerManagedPolicyReference: { + name: example.name, + path: "/", + }, + instanceArn: Token.asString( + awsSsoadminPermissionSetExample.instanceArn + ), + permissionSetArn: Token.asString(awsSsoadminPermissionSetExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSsoadminCustomerManagedPolicyAttachmentExample.overrideLogicalId( + "example" + ); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instanceArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance under which the operation will be executed. +* `permissionSetArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Permission Set. +* `customerManagedPolicyReference` - (Required, Forces new resource) Specifies the name and path of a customer managed policy. See below. + +### Customer Managed Policy Reference + +The `customerManagedPolicyReference` config block describes a customer managed IAM policy. You must have an IAM policy that matches the name and path in each AWS account where you want to deploy your specified permission set. + +* `name` - (Required, Forces new resource) Name of the customer managed IAM Policy to be attached. +* `path` - (Optional, Forces new resource) The path to the IAM policy to be attached. The default is `/`. See [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-friendly-names) for more information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Policy Name, Policy Path, Permission Set Amazon Resource Name (ARN), and SSO Instance ARN, each separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Managed Policy Attachments using the `name`, `path`, `permissionSetArn`, and `instanceArn` separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSO Managed Policy Attachments using the `name`, `path`, `permissionSetArn`, and `instanceArn` separated by a comma (`,`). For example: + +```console +% terraform import aws_ssoadmin_customer_managed_policy_attachment.example TestPolicy,/,arn:aws:sso:::permissionSet/ssoins-2938j0x8920sbj72/ps-80383020jr9302rk,arn:aws:sso:::instance/ssoins-2938j0x8920sbj72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssoadmin_instance_access_control_attributes.html.markdown b/website/docs/cdktf/typescript/r/ssoadmin_instance_access_control_attributes.html.markdown new file mode 100644 index 00000000000..d3821c4c9a6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssoadmin_instance_access_control_attributes.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_instance_access_control_attributes" +description: |- + Provides a Single Sign-On (SSO) ABAC Resource: https://docs.aws.amazon.com/singlesignon/latest/userguide/abac.html +--- + + + +# Resource: aws_ssoadmin_instance_access_control_attributes + +Provides a Single Sign-On (SSO) ABAC Resource: https://docs.aws.amazon.com/singlesignon/latest/userguide/abac.html + +## Example Usage + +```terraform +data "aws_ssoadmin_instances" "example" {} + +resource "aws_ssoadmin_instance_access_control_attributes" "example" { + instance_arn = tolist(data.aws_ssoadmin_instances.example.arns)[0] + attribute { + key = "name" + value { + source = ["$${path:name.givenName}"] + } + } + attribute { + key = "last" + value { + source = ["$${path:name.familyName}"] + } + } +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instanceArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance. +* `attribute` - (Required) See [AccessControlAttribute](#accesscontrolattribute) for more details. + +### AccessControlAttribute + +* `key` - (Required) The name of the attribute associated with your identities in your identity source. This is used to map a specified attribute in your identity source with an attribute in AWS SSO. +* `value` - (Required) The value used for mapping a specified attribute to an identity source. See [AccessControlAttributeValue](#accesscontrolattributevalue) + +### AccessControlAttributeValue + +* `source` - (Required) The identity source to use when mapping a specified attribute to AWS SSO. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the Instance Access Control Attribute `instanceArn`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Account Assignments using the `instanceArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSO Account Assignments using the `instanceArn`. For example: + +```console +% terraform import aws_ssoadmin_instance_access_control_attributes.example arn:aws:sso:::instance/ssoins-0123456789abcdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssoadmin_managed_policy_attachment.html.markdown b/website/docs/cdktf/typescript/r/ssoadmin_managed_policy_attachment.html.markdown new file mode 100644 index 00000000000..6194f184a2f --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssoadmin_managed_policy_attachment.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_managed_policy_attachment" +description: |- + Manages an IAM managed policy for a Single Sign-On (SSO) Permission Set +--- + + + +# Resource: aws_ssoadmin_managed_policy_attachment + +Provides an IAM managed policy for a Single Sign-On (SSO) Permission Set resource + +~> **NOTE:** Creating this resource will automatically [Provision the Permission Set](https://docs.aws.amazon.com/singlesignon/latest/APIReference/API_ProvisionPermissionSet.html) to apply the corresponding updates to all assigned accounts. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsoadminInstances } from "./.gen/providers/aws/data-aws-ssoadmin-instances"; +import { SsoadminManagedPolicyAttachment } from "./.gen/providers/aws/ssoadmin-managed-policy-attachment"; +import { SsoadminPermissionSet } from "./.gen/providers/aws/ssoadmin-permission-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsSsoadminInstances(this, "example", {}); + const awsSsoadminPermissionSetExample = new SsoadminPermissionSet( + this, + "example_1", + { + instanceArn: Token.asString( + propertyAccess(Fn.tolist(example.arns), ["0"]) + ), + name: "Example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSsoadminPermissionSetExample.overrideLogicalId("example"); + const awsSsoadminManagedPolicyAttachmentExample = + new SsoadminManagedPolicyAttachment(this, "example_2", { + instanceArn: Token.asString( + propertyAccess(Fn.tolist(example.arns), ["0"]) + ), + managedPolicyArn: "arn:aws:iam::aws:policy/AlexaForBusinessDeviceSetup", + permissionSetArn: Token.asString(awsSsoadminPermissionSetExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSsoadminManagedPolicyAttachmentExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `instanceArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance under which the operation will be executed. +* `managedPolicyArn` - (Required, Forces new resource) The IAM managed policy Amazon Resource Name (ARN) to be attached to the Permission Set. +* `permissionSetArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Permission Set. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Names (ARNs) of the Managed Policy, Permission Set, and SSO Instance, separated by a comma (`,`). +* `managedPolicyName` - The name of the IAM Managed Policy. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Managed Policy Attachments using the `managedPolicyArn`, `permissionSetArn`, and `instanceArn` separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSO Managed Policy Attachments using the `managedPolicyArn`, `permissionSetArn`, and `instanceArn` separated by a comma (`,`). For example: + +```console +% terraform import aws_ssoadmin_managed_policy_attachment.example arn:aws:iam::aws:policy/AlexaForBusinessDeviceSetup,arn:aws:sso:::permissionSet/ssoins-2938j0x8920sbj72/ps-80383020jr9302rk,arn:aws:sso:::instance/ssoins-2938j0x8920sbj72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssoadmin_permission_set.html.markdown b/website/docs/cdktf/typescript/r/ssoadmin_permission_set.html.markdown new file mode 100644 index 00000000000..8da755adce7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssoadmin_permission_set.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_permission_set" +description: |- + Manages a Single Sign-On (SSO) Permission Set +--- + + + +# Resource: aws_ssoadmin_permission_set + +Provides a Single Sign-On (SSO) Permission Set resource + +~> **NOTE:** Updating this resource will automatically [Provision the Permission Set](https://docs.aws.amazon.com/singlesignon/latest/APIReference/API_ProvisionPermissionSet.html) to apply the corresponding updates to all assigned accounts. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsoadminInstances } from "./.gen/providers/aws/data-aws-ssoadmin-instances"; +import { SsoadminPermissionSet } from "./.gen/providers/aws/ssoadmin-permission-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsSsoadminInstances(this, "example", {}); + const awsSsoadminPermissionSetExample = new SsoadminPermissionSet( + this, + "example_1", + { + description: "An example", + instanceArn: Token.asString( + propertyAccess(Fn.tolist(example.arns), ["0"]) + ), + name: "Example", + relayState: + "https://s3.console.aws.amazon.com/s3/home?region=us-east-1#", + sessionDuration: "PT2H", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSsoadminPermissionSetExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) The description of the Permission Set. +* `instanceArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance under which the operation will be executed. +* `name` - (Required, Forces new resource) The name of the Permission Set. +* `relayState` - (Optional) The relay state URL used to redirect users within the application during the federation authentication process. +* `sessionDuration` - (Optional) The length of time that the application user sessions are valid in the ISO-8601 standard. Default: `pt1H`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Amazon Resource Name (ARN) of the Permission Set. +* `id` - The Amazon Resource Names (ARNs) of the Permission Set and SSO Instance, separated by a comma (`,`). +* `createdDate` - The date the Permission Set was created in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Permission Sets using the `arn` and `instanceArn` separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSO Permission Sets using the `arn` and `instanceArn` separated by a comma (`,`). For example: + +```console +% terraform import aws_ssoadmin_permission_set.example arn:aws:sso:::permissionSet/ssoins-2938j0x8920sbj72/ps-80383020jr9302rk,arn:aws:sso:::instance/ssoins-2938j0x8920sbj72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssoadmin_permission_set_inline_policy.html.markdown b/website/docs/cdktf/typescript/r/ssoadmin_permission_set_inline_policy.html.markdown new file mode 100644 index 00000000000..8ff69d3f3ba --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssoadmin_permission_set_inline_policy.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_permission_set_inline_policy" +description: |- + Manages an IAM inline policy for a Single Sign-On (SSO) Permission Set +--- + + + +# Resource: aws_ssoadmin_permission_set_inline_policy + +Provides an IAM inline policy for a Single Sign-On (SSO) Permission Set resource + +~> **NOTE:** AWS Single Sign-On (SSO) only supports one IAM inline policy per [`awsSsoadminPermissionSet`](ssoadmin_permission_set.html) resource. +Creating or updating this resource will automatically [Provision the Permission Set](https://docs.aws.amazon.com/singlesignon/latest/APIReference/API_ProvisionPermissionSet.html) to apply the corresponding updates to all assigned accounts. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsSsoadminInstances } from "./.gen/providers/aws/data-aws-ssoadmin-instances"; +import { SsoadminPermissionSet } from "./.gen/providers/aws/ssoadmin-permission-set"; +import { SsoadminPermissionSetInlinePolicy } from "./.gen/providers/aws/ssoadmin-permission-set-inline-policy"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new DataAwsIamPolicyDocument(this, "example", { + statement: [ + { + actions: ["s3:ListAllMyBuckets", "s3:GetBucketLocation"], + resources: ["arn:aws:s3:::*"], + sid: "1", + }, + ], + }); + const dataAwsSsoadminInstancesExample = new DataAwsSsoadminInstances( + this, + "example_1", + {} + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsSsoadminInstancesExample.overrideLogicalId("example"); + const awsSsoadminPermissionSetExample = new SsoadminPermissionSet( + this, + "example_2", + { + instanceArn: Token.asString( + propertyAccess(Fn.tolist(dataAwsSsoadminInstancesExample.arns), ["0"]) + ), + name: "Example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSsoadminPermissionSetExample.overrideLogicalId("example"); + const awsSsoadminPermissionSetInlinePolicyExample = + new SsoadminPermissionSetInlinePolicy(this, "example_3", { + inlinePolicy: Token.asString(example.json), + instanceArn: Token.asString( + propertyAccess(Fn.tolist(dataAwsSsoadminInstancesExample.arns), ["0"]) + ), + permissionSetArn: Token.asString(awsSsoadminPermissionSetExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSsoadminPermissionSetInlinePolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `inlinePolicy` - (Required) The IAM inline policy to attach to a Permission Set. +* `instanceArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance under which the operation will be executed. +* `permissionSetArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Permission Set. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The Amazon Resource Names (ARNs) of the Permission Set and SSO Instance, separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Permission Set Inline Policies using the `permissionSetArn` and `instanceArn` separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSO Permission Set Inline Policies using the `permissionSetArn` and `instanceArn` separated by a comma (`,`). For example: + +```console +% terraform import aws_ssoadmin_permission_set_inline_policy.example arn:aws:sso:::permissionSet/ssoins-2938j0x8920sbj72/ps-80383020jr9302rk,arn:aws:sso:::instance/ssoins-2938j0x8920sbj72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/ssoadmin_permissions_boundary_attachment.html.markdown b/website/docs/cdktf/typescript/r/ssoadmin_permissions_boundary_attachment.html.markdown new file mode 100644 index 00000000000..892cdd8735b --- /dev/null +++ b/website/docs/cdktf/typescript/r/ssoadmin_permissions_boundary_attachment.html.markdown @@ -0,0 +1,169 @@ +--- +subcategory: "SSO Admin" +layout: "aws" +page_title: "AWS: aws_ssoadmin_permissions_boundary_attachment" +description: |- + Attaches a permissions boundary policy to a Single Sign-On (SSO) Permission Set resource. +--- + + + +# Resource: aws_ssoadmin_permissions_boundary_attachment + +Attaches a permissions boundary policy to a Single Sign-On (SSO) Permission Set resource. + +~> **NOTE:** A permission set can have at most one permissions boundary attached; using more than one `awsSsoadminPermissionsBoundaryAttachment` references the same permission set will show a permanent difference. + +## Example Usage + +### Attaching a customer-managed policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsoadminInstances } from "./.gen/providers/aws/data-aws-ssoadmin-instances"; +import { IamPolicy } from "./.gen/providers/aws/iam-policy"; +import { SsoadminPermissionSet } from "./.gen/providers/aws/ssoadmin-permission-set"; +import { SsoadminPermissionsBoundaryAttachment } from "./.gen/providers/aws/ssoadmin-permissions-boundary-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new IamPolicy(this, "example", { + description: "My test policy", + name: "TestPolicy", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["ec2:Describe*"], + Effect: "Allow", + Resource: "*", + }, + ], + Version: "2012-10-17", + }) + ), + }); + const dataAwsSsoadminInstancesExample = new DataAwsSsoadminInstances( + this, + "example_1", + {} + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsSsoadminInstancesExample.overrideLogicalId("example"); + const awsSsoadminPermissionSetExample = new SsoadminPermissionSet( + this, + "example_2", + { + instanceArn: Token.asString( + propertyAccess(Fn.tolist(dataAwsSsoadminInstancesExample.arns), ["0"]) + ), + name: "Example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSsoadminPermissionSetExample.overrideLogicalId("example"); + const awsSsoadminPermissionsBoundaryAttachmentExample = + new SsoadminPermissionsBoundaryAttachment(this, "example_3", { + instanceArn: Token.asString( + awsSsoadminPermissionSetExample.instanceArn + ), + permissionSetArn: Token.asString(awsSsoadminPermissionSetExample.arn), + permissionsBoundary: { + customerManagedPolicyReference: { + name: example.name, + path: "/", + }, + }, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSsoadminPermissionsBoundaryAttachmentExample.overrideLogicalId( + "example" + ); + } +} + +``` + +### Attaching an AWS-managed policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SsoadminPermissionsBoundaryAttachment } from "./.gen/providers/aws/ssoadmin-permissions-boundary-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SsoadminPermissionsBoundaryAttachment(this, "example", { + instanceArn: Token.asString(awsSsoadminPermissionSetExample.instanceArn), + permissionSetArn: Token.asString(awsSsoadminPermissionSetExample.arn), + permissionsBoundary: { + managedPolicyArn: "arn:aws:iam::aws:policy/ReadOnlyAccess", + }, + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `instanceArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the SSO Instance under which the operation will be executed. +* `permissionSetArn` - (Required, Forces new resource) The Amazon Resource Name (ARN) of the Permission Set. +* `permissionsBoundary` - (Required, Forces new resource) The permissions boundary policy. See below. + +### Permissions Boundary + +The `permissionsBoundary` config block describes the permissions boundary policy to attach. You can reference either an AWS-managed policy, or a customer managed policy, but only one may be set. + +* `managedPolicyArn` - (Optional) AWS-managed IAM policy ARN to use as the permissions boundary. +* `customerManagedPolicyReference` - (Optional) Specifies the name and path of a customer managed policy. See below. + +### Customer Managed Policy Reference + +The `customerManagedPolicyReference` config block describes a customer managed IAM policy. You must have an IAM policy that matches the name and path in each AWS account where you want to deploy your specified permission set. + +* `name` - (Required, Forces new resource) Name of the customer managed IAM Policy to be attached. +* `path` - (Optional, Forces new resource) The path to the IAM policy to be attached. The default is `/`. See [IAM Identifiers](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-friendly-names) for more information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Permission Set Amazon Resource Name (ARN) and SSO Instance ARN, separated by a comma (`,`). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SSO Admin Permissions Boundary Attachments using the `permissionSetArn` and `instanceArn`, separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SSO Admin Permissions Boundary Attachments using the `permissionSetArn` and `instanceArn`, separated by a comma (`,`). For example: + +```console +% terraform import aws_ssoadmin_permissions_boundary_attachment.example arn:aws:sso:::permissionSet/ssoins-2938j0x8920sbj72/ps-80383020jr9302rk,arn:aws:sso:::instance/ssoins-2938j0x8920sbj72 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/storagegateway_cache.html.markdown b/website/docs/cdktf/typescript/r/storagegateway_cache.html.markdown new file mode 100644 index 00000000000..1f40cc4ed5e --- /dev/null +++ b/website/docs/cdktf/typescript/r/storagegateway_cache.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_cache" +description: |- + Manages an AWS Storage Gateway cache +--- + + + +# Resource: aws_storagegateway_cache + +Manages an AWS Storage Gateway cache. + +~> **NOTE:** The Storage Gateway API provides no method to remove a cache disk. Destroying this Terraform resource does not perform any Storage Gateway actions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayCache } from "./.gen/providers/aws/storagegateway-cache"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayCache(this, "example", { + diskId: Token.asString(dataAwsStoragegatewayLocalDiskExample.id), + gatewayArn: Token.asString(awsStoragegatewayGatewayExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `diskId` - (Required) Local disk identifier. For example, `pci0000:03:000Scsi0:0:0:0`. +* `gatewayArn` - (Required) The Amazon Resource Name (ARN) of the gateway. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Combined gateway Amazon Resource Name (ARN) and local disk identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsStoragegatewayCache` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsStoragegatewayCache` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```console +% terraform import aws_storagegateway_cache.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678:pci-0000:03:00.0-scsi-0:0:0:0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/storagegateway_cached_iscsi_volume.html.markdown b/website/docs/cdktf/typescript/r/storagegateway_cached_iscsi_volume.html.markdown new file mode 100644 index 00000000000..9aa41c63f9a --- /dev/null +++ b/website/docs/cdktf/typescript/r/storagegateway_cached_iscsi_volume.html.markdown @@ -0,0 +1,155 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_cached_iscsi_volume" +description: |- + Manages an AWS Storage Gateway cached iSCSI volume +--- + + + +# Resource: aws_storagegateway_cached_iscsi_volume + +Manages an AWS Storage Gateway cached iSCSI volume. + +~> **NOTE:** The gateway must have cache added (e.g., via the [`awsStoragegatewayCache`](/docs/providers/aws/r/storagegateway_cache.html) resource) before creating volumes otherwise the Storage Gateway API will return an error. + +~> **NOTE:** The gateway must have an upload buffer added (e.g., via the [`awsStoragegatewayUploadBuffer`](/docs/providers/aws/r/storagegateway_upload_buffer.html) resource) before the volume is operational to clients, however the Storage Gateway API will allow volume creation without error in that case and return volume status as `UPLOAD BUFFER NOT CONFIGURED`. + +## Example Usage + +~> **NOTE:** These examples are referencing the [`awsStoragegatewayCache`](/docs/providers/aws/r/storagegateway_cache.html) resource `gatewayArn` attribute to ensure Terraform properly adds cache before creating the volume. If you are not using this method, you may need to declare an expicit dependency (e.g., via `depends_on = [aws_storagegateway_cache.example]`) to ensure proper ordering. + +### Create Empty Cached iSCSI Volume + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayCachedIscsiVolume } from "./.gen/providers/aws/storagegateway-cached-iscsi-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayCachedIscsiVolume(this, "example", { + gatewayArn: Token.asString(awsStoragegatewayCacheExample.gatewayArn), + networkInterfaceId: Token.asString(awsInstanceExample.privateIp), + targetName: "example", + volumeSizeInBytes: 5368709120, + }); + } +} + +``` + +### Create Cached iSCSI Volume From Snapshot + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Op, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayCachedIscsiVolume } from "./.gen/providers/aws/storagegateway-cached-iscsi-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayCachedIscsiVolume(this, "example", { + gatewayArn: Token.asString(awsStoragegatewayCacheExample.gatewayArn), + networkInterfaceId: Token.asString(awsInstanceExample.privateIp), + snapshotId: Token.asString(awsEbsSnapshotExample.id), + targetName: "example", + volumeSizeInBytes: Token.asNumber( + Op.mul( + Op.mul(Op.mul(awsEbsSnapshotExample.volumeSize, 1024), 1024), + 1024 + ) + ), + }); + } +} + +``` + +### Create Cached iSCSI Volume From Source Volume + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayCachedIscsiVolume } from "./.gen/providers/aws/storagegateway-cached-iscsi-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayCachedIscsiVolume(this, "example", { + gatewayArn: Token.asString(awsStoragegatewayCacheExample.gatewayArn), + networkInterfaceId: Token.asString(awsInstanceExample.privateIp), + sourceVolumeArn: existing.arn, + targetName: "example", + volumeSizeInBytes: existing.volumeSizeInBytes, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `gatewayArn` - (Required) The Amazon Resource Name (ARN) of the gateway. +* `networkInterfaceId` - (Required) The network interface of the gateway on which to expose the iSCSI target. Only IPv4 addresses are accepted. +* `targetName` - (Required) The name of the iSCSI target used by initiators to connect to the target and as a suffix for the target ARN. The target name must be unique across all volumes of a gateway. +* `volumeSizeInBytes` - (Required) The size of the volume in bytes. +* `snapshotId` - (Optional) The snapshot ID of the snapshot to restore as the new cached volumeE.g., `snap1122Aabb`. +* `sourceVolumeArn` - (Optional) The ARN for an existing volume. Specifying this ARN makes the new volume into an exact copy of the specified existing volume's latest recovery point. The `volumeSizeInBytes` value for this new volume must be equal to or larger than the size of the existing volume, in bytes. +* `kmsEncrypted` - (Optional) Set to `true` to use Amazon S3 server side encryption with your own AWS KMS key, or `false` to use a key managed by Amazon S3. +* `kmsKey` - (Optional) The Amazon Resource Name (ARN) of the AWS KMS key used for Amazon S3 server side encryption. Is required when `kmsEncrypted` is set. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:usEast1:123456789012:gateway/sgw12345678/volume/vol12345678`. +* `chapEnabled` - Whether mutual CHAP is enabled for the iSCSI target. +* `id` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:usEast1:123456789012:gateway/sgw12345678/volume/vol12345678`. +* `lunNumber` - Logical disk number. +* `networkInterfacePort` - The port used to communicate with iSCSI targets. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `targetArn` - Target Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:usEast1:123456789012:gateway/sgw12345678/target/iqn199705ComAmazon:targetName`. +* `volumeArn` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:usEast1:123456789012:gateway/sgw12345678/volume/vol12345678`. +* `volumeId` - Volume ID, e.g., `vol12345678`. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsStoragegatewayCachedIscsiVolume` using the volume Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsStoragegatewayCachedIscsiVolume` using the volume Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_cached_iscsi_volume.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/volume/vol-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/storagegateway_file_system_association.html.markdown b/website/docs/cdktf/typescript/r/storagegateway_file_system_association.html.markdown new file mode 100644 index 00000000000..6a86a9474b8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/storagegateway_file_system_association.html.markdown @@ -0,0 +1,166 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_file_system_association" +description: |- + Mananges an association between an Amazon FSx file system and an Amazon FSx File Gateway. +--- + + + +# Resource: aws_storagegateway_file_system_association + +Associate an Amazon FSx file system with the FSx File Gateway. After the association process is complete, the file shares on the Amazon FSx file system are available for access through the gateway. This operation only supports the FSx File Gateway type. + +[FSx File Gateway requirements](https://docs.aws.amazon.com/filegateway/latest/filefsxw/Requirements.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayFileSystemAssociation } from "./.gen/providers/aws/storagegateway-file-system-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayFileSystemAssociation(this, "example", { + auditDestinationArn: Token.asString(awsS3BucketExample.arn), + gatewayArn: Token.asString(awsStoragegatewayGatewayExample.arn), + locationArn: Token.asString(awsFsxWindowsFileSystemExample.arn), + password: "avoid-plaintext-passwords", + username: "Admin", + }); + } +} + +``` + +## Required Services Example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsSsmParameter } from "./.gen/providers/aws/data-aws-ssm-parameter"; +import { FsxWindowsFileSystem } from "./.gen/providers/aws/fsx-windows-file-system"; +import { Instance } from "./.gen/providers/aws/instance"; +import { StoragegatewayFileSystemAssociation } from "./.gen/providers/aws/storagegateway-file-system-association"; +import { StoragegatewayGateway } from "./.gen/providers/aws/storagegateway-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new FsxWindowsFileSystem(this, "test", { + activeDirectoryId: Token.asString(awsDirectoryServiceDirectoryTest.id), + securityGroupIds: [Token.asString(awsSecurityGroupTest.id)], + skipFinalBackup: true, + storageCapacity: 32, + subnetIds: [Token.asString(propertyAccess(awsSubnetTest, ["0", "id"]))], + throughputCapacity: 8, + }); + const awsServiceStoragegatewayAmiFileS3Latest = new DataAwsSsmParameter( + this, + "aws_service_storagegateway_ami_FILE_S3_latest", + { + name: "/aws/service/storagegateway/ami/FILE_S3/latest", + } + ); + const awsInstanceTest = new Instance(this, "test_2", { + ami: Token.asString(awsServiceStoragegatewayAmiFileS3Latest.value), + associatePublicIpAddress: true, + dependsOn: [awsRouteTest, awsVpcDhcpOptionsAssociationTest], + instanceType: Token.asString(available.instanceType), + subnetId: Token.asString(propertyAccess(awsSubnetTest, ["0", "id"])), + vpcSecurityGroupIds: [Token.asString(awsSecurityGroupTest.id)], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsInstanceTest.overrideLogicalId("test"); + const awsStoragegatewayGatewayTest = new StoragegatewayGateway( + this, + "test_3", + { + gatewayIpAddress: Token.asString(awsInstanceTest.publicIp), + gatewayName: "test-sgw", + gatewayTimezone: "GMT", + gatewayType: "FILE_FSX_SMB", + smbActiveDirectorySettings: { + domainName: Token.asString(awsDirectoryServiceDirectoryTest.name), + password: Token.asString(awsDirectoryServiceDirectoryTest.password), + username: "Admin", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsStoragegatewayGatewayTest.overrideLogicalId("test"); + new StoragegatewayFileSystemAssociation(this, "fsx", { + auditDestinationArn: Token.asString(awsCloudwatchLogGroupTest.arn), + cacheAttributes: { + cacheStaleTimeoutInSeconds: 400, + }, + gatewayArn: Token.asString(awsStoragegatewayGatewayTest.arn), + locationArn: test.arn, + password: Token.asString(awsDirectoryServiceDirectoryTest.password), + username: "Admin", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `gatewayArn` - (Required) The Amazon Resource Name (ARN) of the gateway. +* `locationArn` - (Required) The Amazon Resource Name (ARN) of the Amazon FSx file system to associate with the FSx File Gateway. +* `username` - (Required) The user name of the user credential that has permission to access the root share of the Amazon FSx file system. The user account must belong to the Amazon FSx delegated admin user group. +* `password` - (Required, sensitive) The password of the user credential. +* `auditDestinationArn` - (Optional) The Amazon Resource Name (ARN) of the storage used for the audit logs. +* `cacheAttributes` - (Optional) Refresh cache information. see [Cache Attributes](#cache_attributes) for more details. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### cache_attributes + +* `cacheStaleTimeoutInSeconds` - (Optional) Refreshes a file share's cache by using Time To Live (TTL). + TTL is the length of time since the last refresh after which access to the directory would cause the file gateway + to first refresh that directory's contents from the Amazon S3 bucket. Valid Values: `0` or `300` to `2592000` seconds (5 minutes to 30 days). Defaults to `0` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the FSx file system association +* `arn` - Amazon Resource Name (ARN) of the newly created file system association. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsStoragegatewayFileSystemAssociation` using the FSx file system association Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsStoragegatewayFileSystemAssociation` using the FSx file system association Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_file_system_association.example arn:aws:storagegateway:us-east-1:123456789012:fs-association/fsa-0DA347732FDB40125 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/storagegateway_gateway.html.markdown b/website/docs/cdktf/typescript/r/storagegateway_gateway.html.markdown new file mode 100644 index 00000000000..4b1beab9b6e --- /dev/null +++ b/website/docs/cdktf/typescript/r/storagegateway_gateway.html.markdown @@ -0,0 +1,314 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_gateway" +description: |- + Manages an AWS Storage Gateway file, tape, or volume gateway in the provider region +--- + + + +# Resource: aws_storagegateway_gateway + +Manages an AWS Storage Gateway file, tape, or volume gateway in the provider region. + +~> **NOTE:** The Storage Gateway API requires the gateway to be connected to properly return information after activation. If you are receiving `The specified gateway is not connected` errors during resource creation (gateway activation), ensure your gateway instance meets the [Storage Gateway requirements](https://docs.aws.amazon.com/storagegateway/latest/userguide/Requirements.html). + +## Example Usage + +### Local Cache + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsStoragegatewayLocalDisk } from "./.gen/providers/aws/data-aws-storagegateway-local-disk"; +import { StoragegatewayCache } from "./.gen/providers/aws/storagegateway-cache"; +import { VolumeAttachment } from "./.gen/providers/aws/volume-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new VolumeAttachment(this, "test", { + deviceName: "/dev/xvdb", + instanceId: Token.asString(awsInstanceTest.id), + volumeId: Token.asString(awsEbsVolumeTest.id), + }); + const dataAwsStoragegatewayLocalDiskTest = + new DataAwsStoragegatewayLocalDisk(this, "test_1", { + diskNode: Token.asString(dataAwsVolumeAttachmentTest.deviceName), + gatewayArn: Token.asString(awsStoragegatewayGatewayTest.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsStoragegatewayLocalDiskTest.overrideLogicalId("test"); + const awsStoragegatewayCacheTest = new StoragegatewayCache(this, "test_2", { + diskId: Token.asString(dataAwsStoragegatewayLocalDiskTest.diskId), + gatewayArn: Token.asString(awsStoragegatewayGatewayTest.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsStoragegatewayCacheTest.overrideLogicalId("test"); + } +} + +``` + +### FSx File Gateway + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayGateway } from "./.gen/providers/aws/storagegateway-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayGateway(this, "example", { + gatewayIpAddress: "1.2.3.4", + gatewayName: "example", + gatewayTimezone: "GMT", + gatewayType: "FILE_FSX_SMB", + smbActiveDirectorySettings: { + domainName: "corp.example.com", + password: "avoid-plaintext-passwords", + username: "Admin", + }, + }); + } +} + +``` + +### S3 File Gateway + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayGateway } from "./.gen/providers/aws/storagegateway-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayGateway(this, "example", { + gatewayIpAddress: "1.2.3.4", + gatewayName: "example", + gatewayTimezone: "GMT", + gatewayType: "FILE_S3", + }); + } +} + +``` + +### Tape Gateway + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayGateway } from "./.gen/providers/aws/storagegateway-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayGateway(this, "example", { + gatewayIpAddress: "1.2.3.4", + gatewayName: "example", + gatewayTimezone: "GMT", + gatewayType: "VTL", + mediumChangerType: "AWS-Gateway-VTL", + tapeDriveType: "IBM-ULT3580-TD5", + }); + } +} + +``` + +### Volume Gateway (Cached) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayGateway } from "./.gen/providers/aws/storagegateway-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayGateway(this, "example", { + gatewayIpAddress: "1.2.3.4", + gatewayName: "example", + gatewayTimezone: "GMT", + gatewayType: "CACHED", + }); + } +} + +``` + +### Volume Gateway (Stored) + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayGateway } from "./.gen/providers/aws/storagegateway-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayGateway(this, "example", { + gatewayIpAddress: "1.2.3.4", + gatewayName: "example", + gatewayTimezone: "GMT", + gatewayType: "STORED", + }); + } +} + +``` + +## Argument Reference + +~> **NOTE:** One of `activationKey` or `gatewayIpAddress` must be provided for resource creation (gateway activation). Neither is required for resource import. If using `gatewayIpAddress`, Terraform must be able to make an HTTP (port 80) GET request to the specified IP address from where it is running. + +This argument supports the following arguments: + +* `gatewayName` - (Required) Name of the gateway. +* `gatewayTimezone` - (Required) Time zone for the gateway. The time zone is of the format "GMT", "GMT-hr:mm", or "GMT+hr:mm". For example, `gmt4:00` indicates the time is 4 hours behind GMT. The time zone is used, for example, for scheduling snapshots and your gateway's maintenance schedule. +* `activationKey` - (Optional) Gateway activation key during resource creation. Conflicts with `gatewayIpAddress`. Additional information is available in the [Storage Gateway User Guide](https://docs.aws.amazon.com/storagegateway/latest/userguide/get-activation-key.html). +* `averageDownloadRateLimitInBitsPerSec` - (Optional) The average download bandwidth rate limit in bits per second. This is supported for the `cached`, `stored`, and `vtl` gateway types. +* `averageUploadRateLimitInBitsPerSec` - (Optional) The average upload bandwidth rate limit in bits per second. This is supported for the `cached`, `stored`, and `vtl` gateway types. +* `gatewayIpAddress` - (Optional) Gateway IP address to retrieve activation key during resource creation. Conflicts with `activationKey`. Gateway must be accessible on port 80 from where Terraform is running. Additional information is available in the [Storage Gateway User Guide](https://docs.aws.amazon.com/storagegateway/latest/userguide/get-activation-key.html). +* `gatewayType` - (Optional) Type of the gateway. The default value is `stored`. Valid values: `cached`, `fileFsxSmb`, `fileS3`, `stored`, `vtl`. +* `gatewayVpcEndpoint` - (Optional) VPC endpoint address to be used when activating your gateway. This should be used when your instance is in a private subnet. Requires HTTP access from client computer running terraform. More info on what ports are required by your VPC Endpoint Security group in [Activating a Gateway in a Virtual Private Cloud](https://docs.aws.amazon.com/storagegateway/latest/userguide/gateway-private-link.html). +* `cloudwatchLogGroupArn` - (Optional) The Amazon Resource Name (ARN) of the Amazon CloudWatch log group to use to monitor and log events in the gateway. +* `maintenanceStartTime` - (Optional) The gateway's weekly maintenance start time information, including day and time of the week. The maintenance time is the time in your gateway's time zone. More details below. +* `mediumChangerType` - (Optional) Type of medium changer to use for tape gateway. Terraform cannot detect drift of this argument. Valid values: `stkL700`, `awsGatewayVtl`, `ibm03584L320402`. +* `smbActiveDirectorySettings` - (Optional) Nested argument with Active Directory domain join information for Server Message Block (SMB) file shares. Only valid for `fileS3` and `fileFsxSmb` gateway types. Must be set before creating `activeDirectory` authentication SMB file shares. More details below. +* `smbGuestPassword` - (Optional) Guest password for Server Message Block (SMB) file shares. Only valid for `fileS3` and `fileFsxSmb` gateway types. Must be set before creating `guestAccess` authentication SMB file shares. Terraform can only detect drift of the existence of a guest password, not its actual value from the gateway. Terraform can however update the password with changing the argument. +* `smbSecurityStrategy` - (Optional) Specifies the type of security strategy. Valid values are: `clientSpecified`, `mandatorySigning`, and `mandatoryEncryption`. See [Setting a Security Level for Your Gateway](https://docs.aws.amazon.com/storagegateway/latest/userguide/managing-gateway-file.html#security-strategy) for more information. +* `smbFileShareVisibility` - (Optional) Specifies whether the shares on this gateway appear when listing shares. +* `tapeDriveType` - (Optional) Type of tape drive to use for tape gateway. Terraform cannot detect drift of this argument. Valid values: `ibmUlt3580Td5`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### maintenance_start_time + +* `dayOfMonth` - (Optional) The day of the month component of the maintenance start time represented as an ordinal number from 1 to 28, where 1 represents the first day of the month and 28 represents the last day of the month. +* `dayOfWeek` - (Optional) The day of the week component of the maintenance start time week represented as an ordinal number from 0 to 6, where 0 represents Sunday and 6 Saturday. +* `hourOfDay` - (Required) The hour component of the maintenance start time represented as _hh_, where _hh_ is the hour (00 to 23). The hour of the day is in the time zone of the gateway. +* `minuteOfHour` - (Required) The minute component of the maintenance start time represented as _mm_, where _mm_ is the minute (00 to 59). The minute of the hour is in the time zone of the gateway. + +### smb_active_directory_settings + +Information to join the gateway to an Active Directory domain for Server Message Block (SMB) file shares. + +~> **NOTE** It is not possible to unconfigure this setting without recreating the gateway. Also, Terraform can only detect drift of the `domainName` argument from the gateway. + +~> **NOTE:** The Storage Gateway needs to be able to resolve the name of your Active Directory Domain Controller. If the gateway is hosted on EC2, ensure that DNS/DHCP is configured prior to creating the EC2 instance. If you are receiving `networkError` errors during resource creation (gateway joining the domain), ensure your gateway instance meets the [FSx File Gateway requirements](https://docs.aws.amazon.com/filegateway/latest/filefsxw/Requirements.html). + +* `domainName` - (Required) The name of the domain that you want the gateway to join. +* `password` - (Required) The password of the user who has permission to add the gateway to the Active Directory domain. +* `username` - (Required) The user name of user who has permission to add the gateway to the Active Directory domain. +* `timeoutInSeconds` - (Optional) Specifies the time in seconds, in which the JoinDomain operation must complete. The default is `20` seconds. +* `organizationalUnit` - (Optional) The organizational unit (OU) is a container in an Active Directory that can hold users, groups, + computers, and other OUs and this parameter specifies the OU that the gateway will join within the AD domain. +* `domainControllers` - (Optional) List of IPv4 addresses, NetBIOS names, or host names of your domain server. + If you need to specify the port number include it after the colon (“:”). For example, `mydcMydomainCom:389`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the gateway. +* `arn` - Amazon Resource Name (ARN) of the gateway. +* `gatewayId` - Identifier of the gateway. +* `ec2InstanceId` - The ID of the Amazon EC2 instance that was used to launch the gateway. +* `endpointType` - The type of endpoint for your gateway. +* `hostEnvironment` - The type of hypervisor environment used by the host. +* `gatewayNetworkInterface` - An array that contains descriptions of the gateway network interfaces. See [Gateway Network Interface](#gateway-network-interface). +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +### Gateway Network Interface + +* `ipv4Address` - The Internet Protocol version 4 (IPv4) address of the interface. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsStoragegatewayGateway` using the gateway Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsStoragegatewayGateway` using the gateway Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_gateway.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678 +``` + +Certain resource arguments, like `gatewayIpAddress` do not have a Storage Gateway API method for reading the information after creation, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayGateway } from "./.gen/providers/aws/storagegateway-gateway"; +interface MyConfig { + gatewayName: any; + gatewayTimezone: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new StoragegatewayGateway(this, "example", { + gatewayIpAddress: sgw.privateIp, + lifecycle: { + ignoreChanges: ["gateway_ip_address"], + }, + gatewayName: config.gatewayName, + gatewayTimezone: config.gatewayTimezone, + }); + } +} + +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/storagegateway_nfs_file_share.html.markdown b/website/docs/cdktf/typescript/r/storagegateway_nfs_file_share.html.markdown new file mode 100644 index 00000000000..83b6e28d19a --- /dev/null +++ b/website/docs/cdktf/typescript/r/storagegateway_nfs_file_share.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_nfs_file_share" +description: |- + Manages an AWS Storage Gateway NFS File Share +--- + + + +# Resource: aws_storagegateway_nfs_file_share + +Manages an AWS Storage Gateway NFS File Share. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayNfsFileShare } from "./.gen/providers/aws/storagegateway-nfs-file-share"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayNfsFileShare(this, "example", { + clientList: ["0.0.0.0/0"], + gatewayArn: Token.asString(awsStoragegatewayGatewayExample.arn), + locationArn: Token.asString(awsS3BucketExample.arn), + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `clientList` - (Required) The list of clients that are allowed to access the file gateway. The list must contain either valid IP addresses or valid CIDR blocks. Set to `["0000/0"]` to not limit access. Minimum 1 item. Maximum 100 items. +* `gatewayArn` - (Required) Amazon Resource Name (ARN) of the file gateway. +* `locationArn` - (Required) The ARN of the backed storage used for storing file data. +* `vpcEndpointDnsName` - (Optional) The DNS name of the VPC endpoint for S3 PrivateLink. +* `bucketRegion` - (Optional) The region of the S3 bucket used by the file share. Required when specifying `vpcEndpointDnsName`. +* `roleArn` - (Required) The ARN of the AWS Identity and Access Management (IAM) role that a file gateway assumes when it accesses the underlying storage. +* `auditDestinationArn` - (Optional) The Amazon Resource Name (ARN) of the storage used for audit logs. +* `defaultStorageClass` - (Optional) The default [storage class](https://docs.aws.amazon.com/storagegateway/latest/APIReference/API_CreateNFSFileShare.html#StorageGateway-CreateNFSFileShare-request-DefaultStorageClass) for objects put into an Amazon S3 bucket by the file gateway. Defaults to `s3Standard`. +* `guessMimeTypeEnabled` - (Optional) Boolean value that enables guessing of the MIME type for uploaded objects based on file extensions. Defaults to `true`. +* `kmsEncrypted` - (Optional) Boolean value if `true` to use Amazon S3 server side encryption with your own AWS KMS key, or `false` to use a key managed by Amazon S3. Defaults to `false`. +* `kmsKeyArn` - (Optional) Amazon Resource Name (ARN) for KMS key used for Amazon S3 server side encryption. This value can only be set when `kmsEncrypted` is true. +* `nfsFileShareDefaults` - (Optional) Nested argument with file share default values. More information below. see [NFS File Share Defaults](#nfs_file_share_defaults) for more details. +* `cacheAttributes` - (Optional) Refresh cache information. see [Cache Attributes](#cache_attributes) for more details. +* `objectAcl` - (Optional) Access Control List permission for S3 objects. Defaults to `private`. +* `readOnly` - (Optional) Boolean to indicate write status of file share. File share does not accept writes if `true`. Defaults to `false`. +* `requesterPays` - (Optional) Boolean who pays the cost of the request and the data download from the Amazon S3 bucket. Set this value to `true` if you want the requester to pay instead of the bucket owner. Defaults to `false`. +* `squash` - (Optional) Maps a user to anonymous user. Defaults to `rootSquash`. Valid values: `rootSquash` (only root is mapped to anonymous user), `noSquash` (no one is mapped to anonymous user), `allSquash` (everyone is mapped to anonymous user) +* `fileShareName` - (Optional) The name of the file share. Must be set if an S3 prefix name is set in `locationArn`. +* `notificationPolicy` - (Optional) The notification policy of the file share. For more information see the [AWS Documentation](https://docs.aws.amazon.com/storagegateway/latest/APIReference/API_CreateNFSFileShare.html#StorageGateway-CreateNFSFileShare-request-NotificationPolicy). Default value is `{}`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### nfs_file_share_defaults + +Files and folders stored as Amazon S3 objects in S3 buckets don't, by default, have Unix file permissions assigned to them. Upon discovery in an S3 bucket by Storage Gateway, the S3 objects that represent files and folders are assigned these default Unix permissions. + +* `directoryMode` - (Optional) The Unix directory mode in the string form "nnnn". Defaults to `"0777"`. +* `fileMode` - (Optional) The Unix file mode in the string form "nnnn". Defaults to `"0666"`. +* `groupId` - (Optional) The default group ID for the file share (unless the files have another group ID specified). Defaults to `65534` (`nfsnobody`). Valid values: `0` through `4294967294`. +* `ownerId` - (Optional) The default owner ID for the file share (unless the files have another owner ID specified). Defaults to `65534` (`nfsnobody`). Valid values: `0` through `4294967294`. + +### cache_attributes + +* `cacheStaleTimeoutInSeconds` - (Optional) Refreshes a file share's cache by using Time To Live (TTL). + TTL is the length of time since the last refresh after which access to the directory would cause the file gateway + to first refresh that directory's contents from the Amazon S3 bucket. Valid Values: 300 to 2,592,000 seconds (5 minutes to 30 days) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the NFS File Share. +* `arn` - Amazon Resource Name (ARN) of the NFS File Share. +* `fileshareId` - ID of the NFS File Share. +* `path` - File share path used by the NFS client to identify the mount point. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) +* `update` - (Default `10M`) +* `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsStoragegatewayNfsFileShare` using the NFS File Share Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsStoragegatewayNfsFileShare` using the NFS File Share Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_nfs_file_share.example arn:aws:storagegateway:us-east-1:123456789012:share/share-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/storagegateway_smb_file_share.html.markdown b/website/docs/cdktf/typescript/r/storagegateway_smb_file_share.html.markdown new file mode 100644 index 00000000000..eb39ae4f08a --- /dev/null +++ b/website/docs/cdktf/typescript/r/storagegateway_smb_file_share.html.markdown @@ -0,0 +1,147 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_smb_file_share" +description: |- + Manages an AWS Storage Gateway SMB File Share +--- + + + +# Resource: aws_storagegateway_smb_file_share + +Manages an AWS Storage Gateway SMB File Share. + +## Example Usage + +### Active Directory Authentication + +~> **NOTE:** The gateway must have already joined the Active Directory domain prior to SMB file share creationE.g., via "SMB Settings" in the AWS Storage Gateway console or `smbActiveDirectorySettings` in the [`awsStoragegatewayGateway` resource](/docs/providers/aws/r/storagegateway_gateway.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewaySmbFileShare } from "./.gen/providers/aws/storagegateway-smb-file-share"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewaySmbFileShare(this, "example", { + authentication: "ActiveDirectory", + gatewayArn: Token.asString(awsStoragegatewayGatewayExample.arn), + locationArn: Token.asString(awsS3BucketExample.arn), + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +### Guest Authentication + +~> **NOTE:** The gateway must have already had the SMB guest password set prior to SMB file share creationE.g., via "SMB Settings" in the AWS Storage Gateway console or `smbGuestPassword` in the [`awsStoragegatewayGateway` resource](/docs/providers/aws/r/storagegateway_gateway.html). + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewaySmbFileShare } from "./.gen/providers/aws/storagegateway-smb-file-share"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewaySmbFileShare(this, "example", { + authentication: "GuestAccess", + gatewayArn: Token.asString(awsStoragegatewayGatewayExample.arn), + locationArn: Token.asString(awsS3BucketExample.arn), + roleArn: Token.asString(awsIamRoleExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `gatewayArn` - (Required) Amazon Resource Name (ARN) of the file gateway. +* `locationArn` - (Required) The ARN of the backed storage used for storing file data. +* `vpcEndpointDnsName` - (Optional) The DNS name of the VPC endpoint for S3 private link. +* `bucketRegion` - (Optional) The region of the S3 buck used by the file share. Required when specifying a `vpcEndpointDnsName`. +* `roleArn` - (Required) The ARN of the AWS Identity and Access Management (IAM) role that a file gateway assumes when it accesses the underlying storage. +* `adminUserList` - (Optional) A list of users in the Active Directory that have admin access to the file share. Only valid if `authentication` is set to `activeDirectory`. +* `authentication` - (Optional) The authentication method that users use to access the file share. Defaults to `activeDirectory`. Valid values: `activeDirectory`, `guestAccess`. +* `auditDestinationArn` - (Optional) The Amazon Resource Name (ARN) of the CloudWatch Log Group used for the audit logs. +* `defaultStorageClass` - (Optional) The default [storage class](https://docs.aws.amazon.com/storagegateway/latest/APIReference/API_CreateNFSFileShare.html#StorageGateway-CreateNFSFileShare-request-DefaultStorageClass) for objects put into an Amazon S3 bucket by the file gateway. Defaults to `s3Standard`. +* `fileShareName` - (Optional) The name of the file share. Must be set if an S3 prefix name is set in `locationArn`. +* `guessMimeTypeEnabled` - (Optional) Boolean value that enables guessing of the MIME type for uploaded objects based on file extensions. Defaults to `true`. +* `invalidUserList` - (Optional) A list of users in the Active Directory that are not allowed to access the file share. Only valid if `authentication` is set to `activeDirectory`. +* `kmsEncrypted` - (Optional) Boolean value if `true` to use Amazon S3 server side encryption with your own AWS KMS key, or `false` to use a key managed by Amazon S3. Defaults to `false`. +* `kmsKeyArn` - (Optional) Amazon Resource Name (ARN) for KMS key used for Amazon S3 server side encryption. This value can only be set when `kmsEncrypted` is true. +* `objectAcl` - (Optional) Access Control List permission for S3 objects. Defaults to `private`. +* `oplocksEnabled` - (Optional) Boolean to indicate Opportunistic lock (oplock) status. Defaults to `true`. +* `cacheAttributes` - (Optional) Refresh cache information. see [Cache Attributes](#cache_attributes) for more details. +* `readOnly` - (Optional) Boolean to indicate write status of file share. File share does not accept writes if `true`. Defaults to `false`. +* `requesterPays` - (Optional) Boolean who pays the cost of the request and the data download from the Amazon S3 bucket. Set this value to `true` if you want the requester to pay instead of the bucket owner. Defaults to `false`. +* `smbAclEnabled` - (Optional) Set this value to `true` to enable ACL (access control list) on the SMB fileshare. Set it to `false` to map file and directory permissions to the POSIX permissions. This setting applies only to `activeDirectory` authentication type. +* `caseSensitivity` - (Optional) The case of an object name in an Amazon S3 bucket. For `clientSpecified`, the client determines the case sensitivity. For `caseSensitive`, the gateway determines the case sensitivity. The default value is `clientSpecified`. +* `validUserList` - (Optional) A list of users in the Active Directory that are allowed to access the file share. If you need to specify an Active directory group, add '@' before the name of the group. It will be set on Allowed group in AWS console. Only valid if `authentication` is set to `activeDirectory`. +* `accessBasedEnumeration` - (Optional) The files and folders on this share will only be visible to users with read access. Default value is `false`. +* `notificationPolicy` - (Optional) The notification policy of the file share. For more information see the [AWS Documentation](https://docs.aws.amazon.com/storagegateway/latest/APIReference/API_CreateNFSFileShare.html#StorageGateway-CreateNFSFileShare-request-NotificationPolicy). Default value is `{}`. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### cache_attributes + +* `cacheStaleTimeoutInSeconds` - (Optional) Refreshes a file share's cache by using Time To Live (TTL). + TTL is the length of time since the last refresh after which access to the directory would cause the file gateway + to first refresh that directory's contents from the Amazon S3 bucket. Valid Values: 300 to 2,592,000 seconds (5 minutes to 30 days) + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the SMB File Share. +* `arn` - Amazon Resource Name (ARN) of the SMB File Share. +* `fileshareId` - ID of the SMB File Share. +* `path` - File share path used by the NFS client to identify the mount point. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `10M`) +* `update` - (Default `10M`) +* `delete` - (Default `15M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsStoragegatewaySmbFileShare` using the SMB File Share Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsStoragegatewaySmbFileShare` using the SMB File Share Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_smb_file_share.example arn:aws:storagegateway:us-east-1:123456789012:share/share-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/storagegateway_stored_iscsi_volume.html.markdown b/website/docs/cdktf/typescript/r/storagegateway_stored_iscsi_volume.html.markdown new file mode 100644 index 00000000000..7a36d11f74f --- /dev/null +++ b/website/docs/cdktf/typescript/r/storagegateway_stored_iscsi_volume.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_stored_iscsi_volume" +description: |- + Manages an AWS Storage Gateway stored iSCSI volume +--- + + + +# Resource: aws_storagegateway_stored_iscsi_volume + +Manages an AWS Storage Gateway stored iSCSI volume. + +~> **NOTE:** The gateway must have a working storage added (e.g., via the [`awsStoragegatewayWorkingStorage`](/docs/providers/aws/r/storagegateway_working_storage.html) resource) before the volume is operational to clients, however the Storage Gateway API will allow volume creation without error in that case and return volume status as `WORKING STORAGE NOT CONFIGURED`. + +## Example Usage + +### Create Empty Stored iSCSI Volume + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayStoredIscsiVolume } from "./.gen/providers/aws/storagegateway-stored-iscsi-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayStoredIscsiVolume(this, "example", { + diskId: Token.asString(test.id), + gatewayArn: Token.asString(awsStoragegatewayCacheExample.gatewayArn), + networkInterfaceId: Token.asString(awsInstanceExample.privateIp), + preserveExistingData: false, + targetName: "example", + }); + } +} + +``` + +### Create Stored iSCSI Volume From Snapshot + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayStoredIscsiVolume } from "./.gen/providers/aws/storagegateway-stored-iscsi-volume"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayStoredIscsiVolume(this, "example", { + diskId: Token.asString(test.id), + gatewayArn: Token.asString(awsStoragegatewayCacheExample.gatewayArn), + networkInterfaceId: Token.asString(awsInstanceExample.privateIp), + preserveExistingData: false, + snapshotId: Token.asString(awsEbsSnapshotExample.id), + targetName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `gatewayArn` - (Required) The Amazon Resource Name (ARN) of the gateway. +* `networkInterfaceId` - (Required) The network interface of the gateway on which to expose the iSCSI target. Only IPv4 addresses are accepted. +* `targetName` - (Required) The name of the iSCSI target used by initiators to connect to the target and as a suffix for the target ARN. The target name must be unique across all volumes of a gateway. +* `diskId` - (Required) The unique identifier for the gateway local disk that is configured as a stored volume. +* `preserveExistingData` - (Required) Specify this field as `true` if you want to preserve the data on the local disk. Otherwise, specifying this field as false creates an empty volume. +* `snapshotId` - (Optional) The snapshot ID of the snapshot to restore as the new stored volumeE.g., `snap1122Aabb`. +* `kmsEncrypted` - (Optional) `true` to use Amazon S3 server side encryption with your own AWS KMS key, or `false` to use a key managed by Amazon S3. Optional. +* `kmsKey` - (Optional) The Amazon Resource Name (ARN) of the AWS KMS key used for Amazon S3 server side encryption. This value can only be set when `kmsEncrypted` is `true`. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:usEast1:123456789012:gateway/sgw12345678/volume/vol12345678`. +* `chapEnabled` - Whether mutual CHAP is enabled for the iSCSI target. +* `id` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:usEast1:123456789012:gateway/sgw12345678/volume/vol12345678`. +* `lunNumber` - Logical disk number. +* `networkInterfacePort` - The port used to communicate with iSCSI targets. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `targetArn` - Target Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:usEast1:123456789012:gateway/sgw12345678/target/iqn199705ComAmazon:targetName`. +* `volumeArn` - Volume Amazon Resource Name (ARN), e.g., `arn:aws:storagegateway:usEast1:123456789012:gateway/sgw12345678/volume/vol12345678`. +* `volumeId` - Volume ID, e.g., `vol12345678`. +* `volumeStatus` - indicates the state of the storage volume. +* `volumeType` - indicates the type of the volume. +* `volumeSizeInBytes` - The size of the data stored on the volume in bytes. +* `volumeAttachmentStatus` - A value that indicates whether a storage volume is attached to, detached from, or is in the process of detaching from a gateway. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsStoragegatewayStoredIscsiVolume` using the volume Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsStoragegatewayStoredIscsiVolume` using the volume Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_stored_iscsi_volume.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678/volume/vol-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/storagegateway_tape_pool.html.markdown b/website/docs/cdktf/typescript/r/storagegateway_tape_pool.html.markdown new file mode 100644 index 00000000000..1bc5cfcc76e --- /dev/null +++ b/website/docs/cdktf/typescript/r/storagegateway_tape_pool.html.markdown @@ -0,0 +1,77 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_tape_pool" +description: |- + Manages an AWS Storage Gateway Tape Pool +--- + + + +# Resource: aws_storagegateway_tape_pool + +Manages an AWS Storage Gateway Tape Pool. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayTapePool } from "./.gen/providers/aws/storagegateway-tape-pool"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayTapePool(this, "example", { + poolName: "example", + storageClass: "GLACIER", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `poolName` - (Required) The name of the new custom tape pool. +* `storageClass` - (Required) The storage class that is associated with the new custom pool. When you use your backup application to eject the tape, the tape is archived directly into the storage class that corresponds to the pool. Possible values are `deepArchive` or `glacier`. +* `retentionLockType` - (Required) Tape retention lock can be configured in two modes. When configured in governance mode, AWS accounts with specific IAM permissions are authorized to remove the tape retention lock from archived virtual tapes. When configured in compliance mode, the tape retention lock cannot be removed by any user, including the root AWS account. Possible values are `compliance`, `governance`, and `none`. Default value is `none`. +* `retentionLockTimeInDays` - (Optional) Tape retention lock time is set in days. Tape retention lock can be enabled for up to 100 years (36,500 days). Default value is 0. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Volume Amazon Resource Name (ARN), e.g., `aws_storagegateway_tape_pool.example arn:aws:storagegateway:us-east-1:123456789012:tapepool/pool-12345678`. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsStoragegatewayTapePool` using the volume Amazon Resource Name (ARN). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsStoragegatewayTapePool` using the volume Amazon Resource Name (ARN). For example: + +```console +% terraform import aws_storagegateway_tape_pool.example arn:aws:storagegateway:us-east-1:123456789012:tapepool/pool-12345678 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/storagegateway_upload_buffer.html.markdown b/website/docs/cdktf/typescript/r/storagegateway_upload_buffer.html.markdown new file mode 100644 index 00000000000..feaf4015f15 --- /dev/null +++ b/website/docs/cdktf/typescript/r/storagegateway_upload_buffer.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_upload_buffer" +description: |- + Manages an AWS Storage Gateway upload buffer +--- + + + +# Resource: aws_storagegateway_upload_buffer + +Manages an AWS Storage Gateway upload buffer. + +~> **NOTE:** The Storage Gateway API provides no method to remove an upload buffer disk. Destroying this Terraform resource does not perform any Storage Gateway actions. + +## Example Usage + +### Cached and VTL Gateway Type + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsStoragegatewayLocalDisk } from "./.gen/providers/aws/data-aws-storagegateway-local-disk"; +import { StoragegatewayUploadBuffer } from "./.gen/providers/aws/storagegateway-upload-buffer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new DataAwsStoragegatewayLocalDisk(this, "test", { + diskNode: Token.asString(awsVolumeAttachmentTest.deviceName), + gatewayArn: Token.asString(awsStoragegatewayGatewayTest.arn), + }); + const awsStoragegatewayUploadBufferTest = new StoragegatewayUploadBuffer( + this, + "test_1", + { + diskPath: Token.asString(test.diskPath), + gatewayArn: Token.asString(awsStoragegatewayGatewayTest.arn), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsStoragegatewayUploadBufferTest.overrideLogicalId("test"); + } +} + +``` + +### Stored Gateway Type + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsStoragegatewayLocalDisk } from "./.gen/providers/aws/data-aws-storagegateway-local-disk"; +import { StoragegatewayUploadBuffer } from "./.gen/providers/aws/storagegateway-upload-buffer"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayUploadBuffer(this, "example", { + diskId: Token.asString(dataAwsStoragegatewayLocalDiskExample.id), + gatewayArn: Token.asString(awsStoragegatewayGatewayExample.arn), + }); + new DataAwsStoragegatewayLocalDisk(this, "test", { + diskNode: Token.asString(awsVolumeAttachmentTest.deviceName), + gatewayArn: Token.asString(awsStoragegatewayGatewayTest.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `diskId` - (Optional) Local disk identifier. For example, `pci0000:03:000Scsi0:0:0:0`. +* `diskPath` - (Optional) Local disk path. For example, `/dev/nvme1N1`. +* `gatewayArn` - (Required) The Amazon Resource Name (ARN) of the gateway. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Combined gateway Amazon Resource Name (ARN) and local disk identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsStoragegatewayUploadBuffer` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsStoragegatewayUploadBuffer` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```console +% terraform import aws_storagegateway_upload_buffer.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678:pci-0000:03:00.0-scsi-0:0:0:0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/storagegateway_working_storage.html.markdown b/website/docs/cdktf/typescript/r/storagegateway_working_storage.html.markdown new file mode 100644 index 00000000000..5b3eb40d660 --- /dev/null +++ b/website/docs/cdktf/typescript/r/storagegateway_working_storage.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "Storage Gateway" +layout: "aws" +page_title: "AWS: aws_storagegateway_working_storage" +description: |- + Manages an AWS Storage Gateway working storage +--- + + + +# Resource: aws_storagegateway_working_storage + +Manages an AWS Storage Gateway working storage. + +~> **NOTE:** The Storage Gateway API provides no method to remove a working storage disk. Destroying this Terraform resource does not perform any Storage Gateway actions. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { StoragegatewayWorkingStorage } from "./.gen/providers/aws/storagegateway-working-storage"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new StoragegatewayWorkingStorage(this, "example", { + diskId: Token.asString(dataAwsStoragegatewayLocalDiskExample.id), + gatewayArn: Token.asString(awsStoragegatewayGatewayExample.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `diskId` - (Required) Local disk identifier. For example, `pci0000:03:000Scsi0:0:0:0`. +* `gatewayArn` - (Required) The Amazon Resource Name (ARN) of the gateway. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Combined gateway Amazon Resource Name (ARN) and local disk identifier. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsStoragegatewayWorkingStorage` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsStoragegatewayWorkingStorage` using the gateway Amazon Resource Name (ARN) and local disk identifier separated with a colon (`:`). For example: + +```console +% terraform import aws_storagegateway_working_storage.example arn:aws:storagegateway:us-east-1:123456789012:gateway/sgw-12345678:pci-0000:03:00.0-scsi-0:0:0:0 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/subnet.html.markdown b/website/docs/cdktf/typescript/r/subnet.html.markdown new file mode 100644 index 00000000000..f2b4ea469e7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/subnet.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "VPC (Virtual Private Cloud)" +layout: "aws" +page_title: "AWS: aws_subnet" +description: |- + Provides an VPC subnet resource. +--- + + + +# Resource: aws_subnet + +Provides an VPC subnet resource. + +~> **NOTE:** Due to [AWS Lambda improved VPC networking changes that began deploying in September 2019](https://aws.amazon.com/blogs/compute/announcing-improved-vpc-networking-for-aws-lambda-functions/), subnets associated with Lambda Functions can take up to 45 minutes to successfully delete. Terraform AWS Provider version 2.31.0 and later automatically handles this increased timeout, however prior versions require setting the [customizable deletion timeout](#timeouts) to 45 minutes (`delete = "45m"`). AWS and HashiCorp are working together to reduce the amount of time required for resource deletion and updates can be tracked in this [GitHub issue](https://github.com/hashicorp/terraform-provider-aws/issues/10329). + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Subnet } from "./.gen/providers/aws/subnet"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Subnet(this, "main", { + cidrBlock: "10.0.1.0/24", + tags: { + Name: "Main", + }, + vpcId: Token.asString(awsVpcMain.id), + }); + } +} + +``` + +### Subnets In Secondary VPC CIDR Blocks + +When managing subnets in one of a VPC's secondary CIDR blocks created using a [`awsVpcIpv4CidrBlockAssociation`](vpc_ipv4_cidr_block_association.html) +resource, it is recommended to reference that resource's `vpcId` attribute to ensure correct dependency ordering. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Subnet } from "./.gen/providers/aws/subnet"; +import { VpcIpv4CidrBlockAssociation } from "./.gen/providers/aws/vpc-ipv4-cidr-block-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const secondaryCidr = new VpcIpv4CidrBlockAssociation( + this, + "secondary_cidr", + { + cidrBlock: "172.2.0.0/16", + vpcId: main.id, + } + ); + new Subnet(this, "in_secondary_cidr", { + cidrBlock: "172.2.0.0/24", + vpcId: secondaryCidr.vpcId, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `assignIpv6AddressOnCreation` - (Optional) Specify true to indicate + that network interfaces created in the specified subnet should be + assigned an IPv6 address. Default is `false` +* `availabilityZone` - (Optional) AZ for the subnet. +* `availabilityZoneId` - (Optional) AZ ID of the subnet. This argument is not supported in all regions or partitions. If necessary, use `availabilityZone` instead. +* `cidrBlock` - (Optional) The IPv4 CIDR block for the subnet. +* `customerOwnedIpv4Pool` - (Optional) The customer owned IPv4 address pool. Typically used with the `mapCustomerOwnedIpOnLaunch` argument. The `outpostArn` argument must be specified when configured. +* `enableDns64` - (Optional) Indicates whether DNS queries made to the Amazon-provided DNS Resolver in this subnet should return synthetic IPv6 addresses for IPv4-only destinations. Default: `false`. +* `enableLniAtDeviceIndex` - (Optional) Indicates the device position for local network interfaces in this subnet. For example, 1 indicates local network interfaces in this subnet are the secondary network interface (eth1). A local network interface cannot be the primary network interface (eth0). +* `enableResourceNameDnsAaaaRecordOnLaunch` - (Optional) Indicates whether to respond to DNS queries for instance hostnames with DNS AAAA records. Default: `false`. +* `enableResourceNameDnsARecordOnLaunch` - (Optional) Indicates whether to respond to DNS queries for instance hostnames with DNS A records. Default: `false`. +* `ipv6CidrBlock` - (Optional) The IPv6 network range for the subnet, + in CIDR notation. The subnet size must use a /64 prefix length. +* `ipv6Native` - (Optional) Indicates whether to create an IPv6-only subnet. Default: `false`. +* `mapCustomerOwnedIpOnLaunch` - (Optional) Specify `true` to indicate that network interfaces created in the subnet should be assigned a customer owned IP address. The `customerOwnedIpv4Pool` and `outpostArn` arguments must be specified when set to `true`. Default is `false`. +* `mapPublicIpOnLaunch` - (Optional) Specify true to indicate + that instances launched into the subnet should be assigned + a public IP address. Default is `false`. +* `outpostArn` - (Optional) The Amazon Resource Name (ARN) of the Outpost. +* `privateDnsHostnameTypeOnLaunch` - (Optional) The type of hostnames to assign to instances in the subnet at launch. For IPv6-only subnets, an instance DNS name must be based on the instance ID. For dual-stack and IPv4-only subnets, you can specify whether DNS names use the instance IPv4 address or the instance ID. Valid values: `ipName`, `resourceName`. +* `vpcId` - (Required) The VPC ID. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the subnet +* `arn` - The ARN of the subnet. +* `ipv6CidrBlockAssociationId` - The association ID for the IPv6 CIDR block. +* `ownerId` - The ID of the AWS account that owns the subnet. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `10M`) +- `delete` - (Default `20M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import subnets using the subnet `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import subnets using the subnet `id`. For example: + +```console +% terraform import aws_subnet.public_subnet subnet-9d4a7b6c +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/swf_domain.html.markdown b/website/docs/cdktf/typescript/r/swf_domain.html.markdown new file mode 100644 index 00000000000..55cfc4dd876 --- /dev/null +++ b/website/docs/cdktf/typescript/r/swf_domain.html.markdown @@ -0,0 +1,81 @@ +--- +subcategory: "SWF (Simple Workflow)" +layout: "aws" +page_title: "AWS: aws_swf_domain" +description: |- + Provides an SWF Domain resource +--- + + + +# Resource: aws_swf_domain + +Provides an SWF Domain resource. + +## Example Usage + +To register a basic SWF domain: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SwfDomain } from "./.gen/providers/aws/swf-domain"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SwfDomain(this, "foo", { + description: "Terraform SWF Domain", + name: "foo", + workflowExecutionRetentionPeriodInDays: Token.asString(30), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Optional, Forces new resource) The name of the domain. If omitted, Terraform will assign a random, unique name. +* `namePrefix` - (Optional, Forces new resource) Creates a unique name beginning with the specified prefix. Conflicts with `name`. +* `description` - (Optional, Forces new resource) The domain description. +* `workflowExecutionRetentionPeriodInDays` - (Required, Forces new resource) Length of time that SWF will continue to retain information about the workflow execution after the workflow execution is complete, must be between 0 and 90 days. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the domain. +* `arn` - Amazon Resource Name (ARN) +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SWF Domains using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import SWF Domains using the `name`. For example: + +```console +% terraform import aws_swf_domain.foo test-domain +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/synthetics_canary.html.markdown b/website/docs/cdktf/typescript/r/synthetics_canary.html.markdown new file mode 100644 index 00000000000..6631539239b --- /dev/null +++ b/website/docs/cdktf/typescript/r/synthetics_canary.html.markdown @@ -0,0 +1,146 @@ +--- +subcategory: "CloudWatch Synthetics" +layout: "aws" +page_title: "AWS: aws_synthetics_canary" +description: |- + Provides a Synthetics Canary resource +--- + + + +# Resource: aws_synthetics_canary + +Provides a Synthetics Canary resource. + +~> **NOTE:** When you create a canary, AWS creates supporting implicit resources. See the Amazon CloudWatch Synthetics documentation on [DeleteCanary](https://docs.aws.amazon.com/AmazonSynthetics/latest/APIReference/API_DeleteCanary.html) for a full list. Neither AWS nor Terraform deletes these implicit resources automatically when the canary is deleted. Before deleting a canary, ensure you have all the information about the canary that you need to delete the implicit resources using Terraform shell commands, the AWS Console, or AWS CLI. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SyntheticsCanary } from "./.gen/providers/aws/synthetics-canary"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SyntheticsCanary(this, "some", { + artifactS3Location: "s3://some-bucket/", + executionRoleArn: "some-role", + handler: "exports.handler", + name: "some-canary", + runtimeVersion: "syn-1.0", + schedule: { + expression: "rate(0 minute)", + }, + zipFile: "test-fixtures/lambdatest.zip", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `artifactS3Location` - (Required) Location in Amazon S3 where Synthetics stores artifacts from the test runs of this canary. +* `executionRoleArn` - (Required) ARN of the IAM role to be used to run the canary. see [AWS Docs](https://docs.aws.amazon.com/AmazonSynthetics/latest/APIReference/API_CreateCanary.html#API_CreateCanary_RequestSyntax) for permissions needs for IAM Role. +* `handler` - (Required) Entry point to use for the source code when running the canary. This value must end with the string `handler` . +* `name` - (Required) Name for this canary. Has a maximum length of 21 characters. Valid characters are lowercase alphanumeric, hyphen, or underscore. +* `runtimeVersion` - (Required) Runtime version to use for the canary. Versions change often so consult the [Amazon CloudWatch documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_Library.html) for the latest valid versions. Values include `synPythonSelenium10`, `synNodejsPuppeteer30`, `synNodejs22`, `synNodejs21`, `synNodejs20`, and `syn10`. +* `schedule` - (Required) Configuration block providing how often the canary is to run and when these test runs are to stop. Detailed below. + +The following arguments are optional: + +* `deleteLambda` - (Optional) Specifies whether to also delete the Lambda functions and layers used by this canary. The default is `false`. +* `vpcConfig` - (Optional) Configuration block. Detailed below. +* `failureRetentionPeriod` - (Optional) Number of days to retain data about failed runs of this canary. If you omit this field, the default of 31 days is used. The valid range is 1 to 455 days. +* `runConfig` - (Optional) Configuration block for individual canary runs. Detailed below. +* `s3Bucket` - (Optional) Full bucket name which is used if your canary script is located in S3. The bucket must already exist. **Conflicts with `zipFile`.** +* `s3Key` - (Optional) S3 key of your script. **Conflicts with `zipFile`.** +* `s3Version` - (Optional) S3 version ID of your script. **Conflicts with `zipFile`.** +* `startCanary` - (Optional) Whether to run or stop the canary. +* `successRetentionPeriod` - (Optional) Number of days to retain data about successful runs of this canary. If you omit this field, the default of 31 days is used. The valid range is 1 to 455 days. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `artifactConfig` - (Optional) configuration for canary artifacts, including the encryption-at-rest settings for artifacts that the canary uploads to Amazon S3. See [Artifact Config](#artifact_config). +* `zipFile` - (Optional) ZIP file that contains the script, if you input your canary script directly into the canary instead of referring to an S3 location. It can be up to 225KB. **Conflicts with `s3Bucket`, `s3Key`, and `s3Version`.** + +### artifact_config + +* `s3Encryption` - (Optional) Configuration of the encryption-at-rest settings for artifacts that the canary uploads to Amazon S3. See [S3 Encryption](#s3_encryption). + +### s3_encryption + +* `encryptionMode` - (Optional) The encryption method to use for artifacts created by this canary. Valid values are: `sseS3` and `sseKms`. +* `kmsKeyArn` - (Optional) The ARN of the customer-managed KMS key to use, if you specify `sseKms` for `encryptionMode`. + +### schedule + +* `expression` - (Required) Rate expression or cron expression that defines how often the canary is to run. For rate expression, the syntax is `rate(number unit)`. _unit_ can be `minute`, `minutes`, or `hour`. For cron expression, the syntax is `cron(expression)`. For more information about the syntax for cron expressions, see [Scheduling canary runs using cron](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_cron.html). +* `durationInSeconds` - (Optional) Duration in seconds, for the canary to continue making regular runs according to the schedule in the Expression value. + +### run_config + +* `timeoutInSeconds` - (Optional) Number of seconds the canary is allowed to run before it must stop. If you omit this field, the frequency of the canary is used, up to a maximum of 840 (14 minutes). +* `memoryInMb` - (Optional) Maximum amount of memory available to the canary while it is running, in MB. The value you specify must be a multiple of 64. +* `activeTracing` - (Optional) Whether this canary is to use active AWS X-Ray tracing when it runs. You can enable active tracing only for canaries that use version syn-nodejs-2.0 or later for their canary runtime. +* `environmentVariables` - (Optional) Map of environment variables that are accessible from the canary during execution. Please see [AWS Docs](https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html#configuration-envvars-runtime) for variables reserved for Lambda. + +### vpc_config + +If this canary tests an endpoint in a VPC, this structure contains information about the subnet and security groups of the VPC endpoint. For more information, see [Running a Canary in a VPC](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Synthetics_Canaries_VPC.html). + +* `subnetIds` - (Required) IDs of the subnets where this canary is to run. +* `securityGroupIds` - (Required) IDs of the security groups for this canary. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the Canary. +* `engineArn` - ARN of the Lambda function that is used as your canary's engine. +* `id` - Name for this canary. +* `sourceLocationArn` - ARN of the Lambda layer where Synthetics stores the canary script code. +* `status` - Canary status. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `timeline` - Structure that contains information about when the canary was created, modified, and most recently run. see [Timeline](#timeline). + +### vpc_config + +* `vpcId` - ID of the VPC where this canary is to run. + +### timeline + +* `created` - Date and time the canary was created. +* `lastModified` - Date and time the canary was most recently modified. +* `lastStarted` - Date and time that the canary's most recent run started. +* `lastStopped` - Date and time that the canary's most recent run ended. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Synthetics Canaries using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Synthetics Canaries using the `name`. For example: + +```console +% terraform import aws_synthetics_canary.some some-canary +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/synthetics_group.html.markdown b/website/docs/cdktf/typescript/r/synthetics_group.html.markdown new file mode 100644 index 00000000000..960604ef61e --- /dev/null +++ b/website/docs/cdktf/typescript/r/synthetics_group.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "CloudWatch Synthetics" +layout: "aws" +page_title: "AWS: aws_synthetics_group" +description: |- + Provides a Synthetics Group resource +--- + + + +# Resource: aws_synthetics_group + +Provides a Synthetics Group resource. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SyntheticsGroup } from "./.gen/providers/aws/synthetics-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SyntheticsGroup(this, "example", { + name: "example", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) Name of the group. + +The following arguments are optional: + +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - ARN of the Group. +* `groupId` - ID of the Group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Synthetics Group using the `name`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Synthetics Group using the `name`. For example: + +```console +% terraform import aws_synthetics_group.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/synthetics_group_association.html.markdown b/website/docs/cdktf/typescript/r/synthetics_group_association.html.markdown new file mode 100644 index 00000000000..67aaeda0acc --- /dev/null +++ b/website/docs/cdktf/typescript/r/synthetics_group_association.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "CloudWatch Synthetics" +layout: "aws" +page_title: "AWS: aws_synthetics_group_association" +description: |- + Provides a Synthetics Group Association resource +--- + + + +# Resource: aws_synthetics_group_association + +Provides a Synthetics Group Association resource. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { SyntheticsGroupAssociation } from "./.gen/providers/aws/synthetics-group-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new SyntheticsGroupAssociation(this, "example", { + canaryArn: Token.asString(awsSyntheticsCanaryExample.arn), + groupName: Token.asString(awsSyntheticsGroupExample.name), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `groupName` - (Required) Name of the group that the canary will be associated with. +* `canaryArn` - (Required) ARN of the canary. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `groupName` - Name of the Group. +* `groupId` - ID of the Group. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import CloudWatch Synthetics Group Association using the `canaryArn,groupName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import CloudWatch Synthetics Group Association using the `canaryArn,groupName`. For example: + +```console +% terraform import aws_synthetics_group_association.example arn:aws:synthetics:us-west-2:123456789012:canary:tf-acc-test-abcd1234,examplename +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/timestreamwrite_database.html.markdown b/website/docs/cdktf/typescript/r/timestreamwrite_database.html.markdown new file mode 100644 index 00000000000..5bc6a326b03 --- /dev/null +++ b/website/docs/cdktf/typescript/r/timestreamwrite_database.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "Timestream Write" +layout: "aws" +page_title: "AWS: aws_timestreamwrite_database" +description: |- + Provides a Timestream database resource. +--- + + + +# Resource: aws_timestreamwrite_database + +Provides a Timestream database resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TimestreamwriteDatabase } from "./.gen/providers/aws/timestreamwrite-database"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TimestreamwriteDatabase(this, "example", { + databaseName: "database-example", + }); + } +} + +``` + +### Full usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TimestreamwriteDatabase } from "./.gen/providers/aws/timestreamwrite-database"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TimestreamwriteDatabase(this, "example", { + databaseName: "database-example", + kmsKeyId: Token.asString(awsKmsKeyExample.arn), + tags: { + Name: "value", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `databaseName` – (Required) The name of the Timestream database. Minimum length of 3. Maximum length of 64. +* `kmsKeyId` - (Optional) The ARN (not Alias ARN) of the KMS key to be used to encrypt the data stored in the database. If the KMS key is not specified, the database will be encrypted with a Timestream managed KMS key located in your account. Refer to [AWS managed KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk) for more info. +* `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the Timestream database. +* `arn` - The ARN that uniquely identifies this database. +* `kmsKeyId` - The ARN of the KMS key used to encrypt the data stored in the database. +* `tableCount` - The total number of tables found within the Timestream database. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Timestream databases using the `databaseName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Timestream databases using the `databaseName`. For example: + +```console +% terraform import aws_timestreamwrite_database.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/timestreamwrite_table.html.markdown b/website/docs/cdktf/typescript/r/timestreamwrite_table.html.markdown new file mode 100644 index 00000000000..769d7124dd0 --- /dev/null +++ b/website/docs/cdktf/typescript/r/timestreamwrite_table.html.markdown @@ -0,0 +1,190 @@ +--- +subcategory: "Timestream Write" +layout: "aws" +page_title: "AWS: aws_timestreamwrite_table" +description: |- + Provides a Timestream table resource. +--- + + + +# Resource: aws_timestreamwrite_table + +Provides a Timestream table resource. + +## Example Usage + +### Basic usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TimestreamwriteTable } from "./.gen/providers/aws/timestreamwrite-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TimestreamwriteTable(this, "example", { + databaseName: Token.asString( + awsTimestreamwriteDatabaseExample.databaseName + ), + tableName: "example", + }); + } +} + +``` + +### Full usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TimestreamwriteTable } from "./.gen/providers/aws/timestreamwrite-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TimestreamwriteTable(this, "example", { + databaseName: Token.asString( + awsTimestreamwriteDatabaseExample.databaseName + ), + retentionProperties: { + magneticStoreRetentionPeriodInDays: 30, + memoryStoreRetentionPeriodInHours: 8, + }, + tableName: "example", + tags: { + Name: "example-timestream-table", + }, + }); + } +} + +``` + +### Customer-defined Partition Key + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TimestreamwriteTable } from "./.gen/providers/aws/timestreamwrite-table"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TimestreamwriteTable(this, "example", { + databaseName: Token.asString( + awsTimestreamwriteDatabaseExample.databaseName + ), + schema: { + compositePartitionKey: { + enforcementInRecord: "REQUIRED", + name: "attr1", + type: "DIMENSION", + }, + }, + tableName: "example", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `databaseName` – (Required) The name of the Timestream database. +* `magneticStoreWriteProperties` - (Optional) Contains properties to set on the table when enabling magnetic store writes. See [Magnetic Store Write Properties](#magnetic-store-write-properties) below for more details. +* `retentionProperties` - (Optional) The retention duration for the memory store and magnetic store. See [Retention Properties](#retention-properties) below for more details. If not provided, `magneticStoreRetentionPeriodInDays` default to 73000 and `memoryStoreRetentionPeriodInHours` defaults to 6. +* `schema` - (Optional) The schema of the table. See [Schema](#schema) below for more details. +* `tableName` - (Required) The name of the Timestream table. +* `tags` - (Optional) Map of tags to assign to this resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Magnetic Store Write Properties + +The `magneticStoreWriteProperties` block supports the following arguments: + +* `enableMagneticStoreWrites` - (Required) A flag to enable magnetic store writes. +* `magneticStoreRejectedDataLocation` - (Optional) The location to write error reports for records rejected asynchronously during magnetic store writes. See [Magnetic Store Rejected Data Location](#magnetic-store-rejected-data-location) below for more details. + +#### Magnetic Store Rejected Data Location + +The `magneticStoreRejectedDataLocation` block supports the following arguments: + +* `s3Configuration` - (Optional) Configuration of an S3 location to write error reports for records rejected, asynchronously, during magnetic store writes. See [S3 Configuration](#s3-configuration) below for more details. + +##### S3 Configuration + +The `s3Configuration` block supports the following arguments: + +* `bucketName` - (Optional) Bucket name of the customer S3 bucket. +* `encryptionOption` - (Optional) Encryption option for the customer s3 location. Options are S3 server side encryption with an S3-managed key or KMS managed key. Valid values are `sseKms` and `sseS3`. +* `kmsKeyId` - (Optional) KMS key arn for the customer s3 location when encrypting with a KMS managed key. +* `objectKeyPrefix` - (Optional) Object key prefix for the customer S3 location. + +### Retention Properties + +The `retentionProperties` block supports the following arguments: + +* `magneticStoreRetentionPeriodInDays` - (Required) The duration for which data must be stored in the magnetic store. Minimum value of 1. Maximum value of 73000. +* `memoryStoreRetentionPeriodInHours` - (Required) The duration for which data must be stored in the memory store. Minimum value of 1. Maximum value of 8766. + +### Schema + +The `schema` block supports the following arguments: + +* `compositePartitionKey` - (Required) A non-empty list of partition keys defining the attributes used to partition the table data. The order of the list determines the partition hierarchy. The name and type of each partition key as well as the partition key order cannot be changed after the table is created. However, the enforcement level of each partition key can be changed. See [Composite Partition Key](#composite-partition-key) below for more details. + +### Composite Partition Key + +The `compositePartitionKey` block supports the following arguments: + +* `enforcementInRecord` - (Optional) The level of enforcement for the specification of a dimension key in ingested records. Valid values: `required`, `optional`. +* `name` - (Optional) The name of the attribute used for a dimension key. +* `type` - (Required) The type of the partition key. Valid values: `dimension`, `measure`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The `tableName` and `databaseName` separated by a colon (`:`). +* `arn` - The ARN that uniquely identifies this table. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Timestream tables using the `tableName` and `databaseName` separate by a colon (`:`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Timestream tables using the `tableName` and `databaseName` separate by a colon (`:`). For example: + +```console +% terraform import aws_timestreamwrite_table.example ExampleTable:ExampleDatabase +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transcribe_language_model.html.markdown b/website/docs/cdktf/typescript/r/transcribe_language_model.html.markdown new file mode 100644 index 00000000000..3dd086bd93d --- /dev/null +++ b/website/docs/cdktf/typescript/r/transcribe_language_model.html.markdown @@ -0,0 +1,165 @@ +--- +subcategory: "Transcribe" +layout: "aws" +page_title: "AWS: aws_transcribe_language_model" +description: |- + Terraform resource for managing an AWS Transcribe LanguageModel. +--- + + + +# Resource: aws_transcribe_language_model + +Terraform resource for managing an AWS Transcribe LanguageModel. + +-> This resource can take a significant amount of time to provision. See Language Model [FAQ](https://aws.amazon.com/transcribe/faqs/) for more details. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +import { TranscribeLanguageModel } from "./.gen/providers/aws/transcribe-language-model"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example-transcribe", + forceDestroy: true, + }); + new S3Object(this, "object", { + bucket: example.id, + key: "transcribe/test1.txt", + source: "test1.txt", + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_2", + { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["transcribe.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsIamRoleExample = new IamRole(this, "example_3", { + assumeRolePolicy: Token.asString(dataAwsIamPolicyDocumentExample.json), + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + new IamRolePolicy(this, "test_policy", { + name: "example", + policy: Token.asString( + Fn.jsonencode({ + Statement: [ + { + Action: ["s3:GetObject", "s3:ListBucket"], + Effect: "Allow", + Resource: ["*"], + }, + ], + Version: "2012-10-17", + }) + ), + role: Token.asString(awsIamRoleExample.id), + }); + const awsTranscribeLanguageModelExample = new TranscribeLanguageModel( + this, + "example_5", + { + baseModelName: "NarrowBand", + inputDataConfig: { + dataAccessRoleArn: Token.asString(awsIamRoleExample.arn), + s3Uri: "s3://${" + example.id + "}/transcribe/", + }, + languageCode: "en-US", + modelName: "example", + tags: { + ENVIRONMENT: "development", + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsTranscribeLanguageModelExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `baseModelName` - (Required) Name of reference base model. +* `inputDataConfig` - (Required) The input data config for the LanguageModel. See [Input Data Config](#input-data-config) for more details. +* `languageCode` - (Required) The language code you selected for your language model. Refer to the [supported languages](https://docs.aws.amazon.com/transcribe/latest/dg/supported-languages.html) page for accepted codes. +* `modelName` - (Required) The model name. + +### Input Data Config + +* `dataAccessRoleArn` - (Required) IAM role with access to S3 bucket. +* `s3Uri` - (Required) S3 URI where training data is located. +* `tuningDataS3Uri` - (Optional) S3 URI where tuning data is located. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the LanguageModel. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - LanguageModel name. +* `arn` - ARN of the LanguageModel. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `600M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transcribe LanguageModel using the `modelName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transcribe LanguageModel using the `modelName`. For example: + +```console +% terraform import aws_transcribe_language_model.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transcribe_medical_vocabulary.html.markdown b/website/docs/cdktf/typescript/r/transcribe_medical_vocabulary.html.markdown new file mode 100644 index 00000000000..427b6947abf --- /dev/null +++ b/website/docs/cdktf/typescript/r/transcribe_medical_vocabulary.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "Transcribe" +layout: "aws" +page_title: "AWS: aws_transcribe_medical_vocabulary" +description: |- + Terraform resource for managing an AWS Transcribe MedicalVocabulary. +--- + + + +# Resource: aws_transcribe_medical_vocabulary + +Terraform resource for managing an AWS Transcribe MedicalVocabulary. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +import { TranscribeMedicalVocabulary } from "./.gen/providers/aws/transcribe-medical-vocabulary"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example-medical-vocab-123", + forceDestroy: true, + }); + const object = new S3Object(this, "object", { + bucket: example.id, + key: "transcribe/test1.txt", + source: "test.txt", + }); + const awsTranscribeMedicalVocabularyExample = + new TranscribeMedicalVocabulary(this, "example_2", { + dependsOn: [object], + languageCode: "en-US", + tags: { + tag1: "value1", + tag2: "value3", + }, + vocabularyFileUri: "s3://${" + example.id + "}/${" + object.key + "}", + vocabularyName: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsTranscribeMedicalVocabularyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `languageCode` - (Required) The language code you selected for your medical vocabulary. US English (en-US) is the only language supported with Amazon Transcribe Medical. +* `vocabularyFileUri` - (Required) The Amazon S3 location (URI) of the text file that contains your custom medical vocabulary. +* `vocabularyName` - (Required) The name of the Medical Vocabulary. + +The following arguments are optional: + +* `tags` - (Optional) A map of tags to assign to the MedicalVocabulary. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the MedicalVocabulary. +* `arn` - ARN of the MedicalVocabulary. +* `downloadUri` - Generated download URI. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transcribe MedicalVocabulary using the `vocabularyName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transcribe MedicalVocabulary using the `vocabularyName`. For example: + +```console +% terraform import aws_transcribe_medical_vocabulary.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transcribe_vocabulary.html.markdown b/website/docs/cdktf/typescript/r/transcribe_vocabulary.html.markdown new file mode 100644 index 00000000000..8cca3aea41e --- /dev/null +++ b/website/docs/cdktf/typescript/r/transcribe_vocabulary.html.markdown @@ -0,0 +1,115 @@ +--- +subcategory: "Transcribe" +layout: "aws" +page_title: "AWS: aws_transcribe_vocabulary" +description: |- + Terraform resource for managing an AWS Transcribe Vocabulary. +--- + + + +# Resource: aws_transcribe_vocabulary + +Terraform resource for managing an AWS Transcribe Vocabulary. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { S3Bucket } from "./.gen/providers/aws/s3-bucket"; +import { S3Object } from "./.gen/providers/aws/s3-object"; +import { TranscribeVocabulary } from "./.gen/providers/aws/transcribe-vocabulary"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new S3Bucket(this, "example", { + bucket: "example-vocab-123", + forceDestroy: true, + }); + const object = new S3Object(this, "object", { + bucket: example.id, + key: "transcribe/test1.txt", + source: "test.txt", + }); + const awsTranscribeVocabularyExample = new TranscribeVocabulary( + this, + "example_2", + { + dependsOn: [object], + languageCode: "en-US", + tags: { + tag1: "value1", + tag2: "value3", + }, + vocabularyFileUri: "s3://${" + example.id + "}/${" + object.key + "}", + vocabularyName: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsTranscribeVocabularyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `languageCode` - (Required) The language code you selected for your vocabulary. +* `vocabularyFileUri` - (Required) The Amazon S3 location (URI) of the text file that contains your custom vocabulary. +* `vocabularyName` - (Required) The name of the Vocabulary. + +The following arguments are optional: + +* `phrases` - (Optional) - A list of terms to include in the vocabulary. Conflicts with `vocabularyFileUri` +* `vocabularyFileUri` - (Optional) The Amazon S3 location (URI) of the text file that contains your custom vocabulary. Conflicts wth `phrases`. +* `tags` - (Optional) A map of tags to assign to the Vocabulary. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Name of the Vocabulary. +* `arn` - ARN of the Vocabulary. +* `downloadUri` - Generated download URI. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `30M`) +* `update` - (Default `30M`) +* `delete` - (Default `30M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transcribe Vocabulary using the `vocabularyName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transcribe Vocabulary using the `vocabularyName`. For example: + +```console +% terraform import aws_transcribe_vocabulary.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transcribe_vocabulary_filter.html.markdown b/website/docs/cdktf/typescript/r/transcribe_vocabulary_filter.html.markdown new file mode 100644 index 00000000000..0963b969fab --- /dev/null +++ b/website/docs/cdktf/typescript/r/transcribe_vocabulary_filter.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Transcribe" +layout: "aws" +page_title: "AWS: aws_transcribe_vocabulary_filter" +description: |- + Terraform resource for managing an AWS Transcribe VocabularyFilter. +--- + + + +# Resource: aws_transcribe_vocabulary_filter + +Terraform resource for managing an AWS Transcribe VocabularyFilter. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TranscribeVocabularyFilter } from "./.gen/providers/aws/transcribe-vocabulary-filter"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TranscribeVocabularyFilter(this, "example", { + languageCode: "en-US", + tags: { + tag1: "value1", + tag2: "value3", + }, + vocabularyFilterName: "example", + words: ["cars", "bucket"], + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `languageCode` - (Required) The language code you selected for your vocabulary filter. Refer to the [supported languages](https://docs.aws.amazon.com/transcribe/latest/dg/supported-languages.html) page for accepted codes. +* `vocabularyFilterName` - (Required) The name of the VocabularyFilter. + +The following arguments are optional: + +* `vocabularyFilterFileUri` - (Optional) The Amazon S3 location (URI) of the text file that contains your custom VocabularyFilter. Conflicts with `words` argument. +* `tags` - (Optional) A map of tags to assign to the VocabularyFilter. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `words` - (Optional) - A list of terms to include in the vocabulary. Conflicts with `vocabularyFilterFileUri` argument. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - VocabularyFilter name. +* `arn` - ARN of the VocabularyFilter. +* `downloadUri` - Generated download URI. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transcribe VocabularyFilter using the `vocabularyFilterName`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transcribe VocabularyFilter using the `vocabularyFilterName`. For example: + +```console +% terraform import aws_transcribe_vocabulary_filter.example example-name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transfer_access.html.markdown b/website/docs/cdktf/typescript/r/transfer_access.html.markdown new file mode 100644 index 00000000000..0aafae69078 --- /dev/null +++ b/website/docs/cdktf/typescript/r/transfer_access.html.markdown @@ -0,0 +1,123 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_access" +description: |- + Provides a AWS Transfer Access resource. +--- + + + +# Resource: aws_transfer_access + +Provides a AWS Transfer Access resource. + +## Example Usage + +### Basic S3 + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferAccess } from "./.gen/providers/aws/transfer-access"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferAccess(this, "example", { + externalId: "S-1-1-12-1234567890-123456789-1234567890-1234", + homeDirectory: "/${" + awsS3BucketExample.id + "}/", + role: Token.asString(awsIamRoleExample.arn), + serverId: Token.asString(awsTransferServerExample.id), + }); + } +} + +``` + +### Basic EFS + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferAccess } from "./.gen/providers/aws/transfer-access"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferAccess(this, "test", { + externalId: "S-1-1-12-1234567890-123456789-1234567890-1234", + homeDirectory: "/${" + awsEfsFileSystemTest.id + "}/", + posixProfile: { + gid: 1000, + uid: 1000, + }, + role: Token.asString(awsIamRoleTest.arn), + serverId: Token.asString(awsTransferServerTest.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `externalId` - (Required) The SID of a group in the directory connected to the Transfer Server (e.g., `s1112123456789012345678912345678901234`) +* `serverId` - (Required) The Server ID of the Transfer Server (e.g., `s12345678`) +* `homeDirectory` - (Optional) The landing directory (folder) for a user when they log in to the server using their SFTP client. It should begin with a `/`. The first item in the path is the name of the home bucket (accessible as `${transfer:homeBucket}` in the policy) and the rest is the home directory (accessible as `${transfer:homeDirectory}` in the policy). For example, `/exampleBucket1234/username` would set the home bucket to `exampleBucket1234` and the home directory to `username`. +* `homeDirectoryMappings` - (Optional) Logical directory mappings that specify what S3 paths and keys should be visible to your user and how you want to make them visible. See [Home Directory Mappings](#home-directory-mappings) below. +* `homeDirectoryType` - (Optional) The type of landing directory (folder) you mapped for your users' home directory. Valid values are `path` and `logical`. +* `policy` - (Optional) An IAM JSON policy document that scopes down user access to portions of their Amazon S3 bucket. IAM variables you can use inside this policy include `${transfer:userName}`, `${transfer:homeDirectory}`, and `${transfer:homeBucket}`. Since the IAM variable syntax matches Terraform's interpolation syntax, they must be escaped inside Terraform configuration strings (`$${transfer:userName}`). These are evaluated on-the-fly when navigating the bucket. +* `posixProfile` - (Optional) Specifies the full POSIX identity, including user ID (Uid), group ID (Gid), and any secondary groups IDs (SecondaryGids), that controls your users' access to your Amazon EFS file systems. See [Posix Profile](#posix-profile) below. +* `role` - (Required) Amazon Resource Name (ARN) of an IAM role that allows the service to controls your user’s access to your Amazon S3 bucket. + +### Home Directory Mappings + +* `entry` - (Required) Represents an entry and a target. +* `target` - (Required) Represents the map target. + +### Posix Profile + +* `gid` - (Required) The POSIX group ID used for all EFS operations by this user. +* `uid` - (Required) The POSIX user ID used for all EFS operations by this user. +* `secondaryGids` - (Optional) The secondary POSIX group IDs used for all EFS operations by this user. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the resource + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer Accesses using the `serverId` and `externalId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transfer Accesses using the `serverId` and `externalId`. For example: + +```console +% terraform import aws_transfer_access.example s-12345678/S-1-1-12-1234567890-123456789-1234567890-1234 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transfer_agreement.html.markdown b/website/docs/cdktf/typescript/r/transfer_agreement.html.markdown new file mode 100644 index 00000000000..5687b5df8eb --- /dev/null +++ b/website/docs/cdktf/typescript/r/transfer_agreement.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_agreement" +description: |- + Provides a AWS Transfer AS2 Agreement Resource +--- + + + +# Resource: aws_transfer_agreement + +Provides a AWS Transfer AS2 Agreement resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferAgreement } from "./.gen/providers/aws/transfer-agreement"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferAgreement(this, "example", { + accessRole: test.arn, + baseDirectory: "/DOC-EXAMPLE-BUCKET/home/mydirectory", + description: "example", + localProfileId: local.profileId, + partnerProfileId: partner.profileId, + serverId: Token.asString(awsTransferServerTest.id), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accessRole` - (Required) The IAM Role which provides read and write access to the parent directory of the file location mentioned in the StartFileTransfer request. +* `baseDirectory` - (Required) The landing directory for the files transferred by using the AS2 protocol. +* `description` - (Optional) The Optional description of the transdfer. +* `localProfileId` - (Required) The unique identifier for the AS2 local profile. +* `partnerProfileId` - (Required) The unique identifier for the AS2 partner profile. +* `serverId` - (Required) The unique server identifier for the server instance. This is the specific server the agreement uses. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `agreementId` - The unique identifier for the AS2 agreement +* `staus` - The staus of the agreement which is either ACTIVE or INACTIVE. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer AS2 Agreement using the `serverId/agreementId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transfer AS2 Agreement using the `serverId/agreementId`. For example: + +```console +% terraform import aws_transfer_agreement.example s-4221a88afd5f4362a/a-4221a88afd5f4362a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transfer_certificate.html.markdown b/website/docs/cdktf/typescript/r/transfer_certificate.html.markdown new file mode 100644 index 00000000000..cb460ebe8f2 --- /dev/null +++ b/website/docs/cdktf/typescript/r/transfer_certificate.html.markdown @@ -0,0 +1,90 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_certificate" +description: |- + Provides a AWS Transfer AS2 Certificate Resource +--- + + + +# Resource: aws_transfer_certificate + +Provides a AWS Transfer AS2 Certificate resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferCertificate } from "./.gen/providers/aws/transfer-certificate"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferCertificate(this, "example", { + certificate: Token.asString( + Fn.file("${path.module}/example.com/example.crt") + ), + certificateChain: Token.asString( + Fn.file("${path.module}/example.com/ca.crt") + ), + description: "example", + privateKey: Token.asString( + Fn.file("${path.module}/example.com/example.key") + ), + usage: "SIGNING", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate` - (Required) The valid certificate file required for the transfer. +* `certificateChain` - (Optional) The optional list of certificate that make up the chain for the certificate that is being imported. +* `description` - (Optional) A short description that helps identify the certificate. +* `privateKey` - (Optional) The private key associated with the certificate being imported. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `usage` - (Required) Specifies if a certificate is being used for signing or encryption. The valid values are SIGNING and ENCRYPTION. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `certificateId` - The unique identifier for the AS2 certificate +* `activeDate` - An date when the certificate becomes active +* `inactiveDate` - An date when the certificate becomes inactive + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer AS2 Certificate using the `certificateId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transfer AS2 Certificate using the `certificateId`. For example: + +```console +% terraform import aws_transfer_certificate.example c-4221a88afd5f4362a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transfer_connector.html.markdown b/website/docs/cdktf/typescript/r/transfer_connector.html.markdown new file mode 100644 index 00000000000..05c4286a38d --- /dev/null +++ b/website/docs/cdktf/typescript/r/transfer_connector.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_connector" +description: |- + Provides a AWS Transfer AS2 Connector Resource +--- + + + +# Resource: aws_transfer_connector + +Provides a AWS Transfer AS2 Connector resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferConnector } from "./.gen/providers/aws/transfer-connector"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferConnector(this, "example", { + accessRole: test.arn, + as2Config: { + compression: "DISABLED", + encryptionAlgorithm: "AWS128_CBC", + localProfileId: local.profileId, + mdnResponse: "NONE", + mdnSigningAlgorithm: "NONE", + messageSubject: "For Connector", + partnerProfileId: partner.profileId, + signingAlgorithm: "NONE", + }, + url: "http://www.test.com", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `accessRole` - (Required) The IAM Role which provides read and write access to the parent directory of the file location mentioned in the StartFileTransfer request. +* `as2Config` - (Required) The parameters to configure for the connector object. Fields documented below. +* `loggingRole` - (Optional) The IAM Role which is required for allowing the connector to turn on CloudWatch logging for Amazon S3 events. +* `url` - (Required) The URL of the partners AS2 endpoint. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### As2Config Details + +* `compression` - (Required) Specifies weather AS2 file is compressed. The valud values are ZLIB and DISABLED. +* `encryptionAlgorithm` - (Required) The algorithm that is used to encrypt the file. The valid values are AES128_CBC | AES192_CBC | AES256_CBC | NONE. +* `localProfileId` - (Required) The unique identifier for the AS2 local profile. +* `mdnResponse` - (Required) Used for outbound requests to determine if a partner response for transfers is synchronous or asynchronous. The valid values are SYNC and NONE. +* `mdnSigningAlgorithm` - (Optional) The signing algorithm for the Mdn response. The valid values are SHA256 | SHA384 | SHA512 | SHA1 | NONE | DEFAULT. +* `messageSubject` - (Optional) Used as the subject HTTP header attribute in AS2 messages that are being sent with the connector. +* `partnerProfileId` - (Required) The unique identifier for the AS2 partner profile. +* `signingAlgorithm` - (Required) The algorithm that is used to sign AS2 messages sent with the connector. The valid values are SHA256 | SHA384 | SHA512 | SHA1 | NONE . + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `connectorId` - The unique identifier for the AS2 profile + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer AS2 Connector using the `connectorId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transfer AS2 Connector using the `connectorId`. For example: + +```console +% terraform import aws_transfer_connector.example c-4221a88afd5f4362a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transfer_profile.html.markdown b/website/docs/cdktf/typescript/r/transfer_profile.html.markdown new file mode 100644 index 00000000000..cbba9628c78 --- /dev/null +++ b/website/docs/cdktf/typescript/r/transfer_profile.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_profile" +description: |- + Provides a AWS Transfer AS2 Profile Resource +--- + + + +# Resource: aws_transfer_profile + +Provides a AWS Transfer AS2 Profile resource. + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferProfile } from "./.gen/providers/aws/transfer-profile"; +interface MyConfig { + profileType: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new TransferProfile(this, "example", { + as2Id: "example", + certificateIds: [ + Token.asString(awsTransferCertificateExample.certificateId), + ], + usage: "LOCAL", + profileType: config.profileType, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `as2Id` - (Required) The As2Id is the AS2 name as defined in the RFC 4130. For inbound ttransfers this is the AS2 From Header for the AS2 messages sent from the partner. For Outbound messages this is the AS2 To Header for the AS2 messages sent to the partner. his ID cannot include spaces. +* `certificateIds` - (Optional) The list of certificate Ids from the imported certificate operation. +* `profileType` - (Required) The profile type should be LOCAL or PARTNER. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `profileId` - The unique identifier for the AS2 profile + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer AS2 Profile using the `profileId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transfer AS2 Profile using the `profileId`. For example: + +```console +% terraform import aws_transfer_profile.example p-4221a88afd5f4362a +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transfer_server.html.markdown b/website/docs/cdktf/typescript/r/transfer_server.html.markdown new file mode 100644 index 00000000000..b27f5f9a94c --- /dev/null +++ b/website/docs/cdktf/typescript/r/transfer_server.html.markdown @@ -0,0 +1,321 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_server" +description: |- + Provides a AWS Transfer Server resource. +--- + + + +# Resource: aws_transfer_server + +Provides a AWS Transfer Server resource. + +~> **NOTE on AWS IAM permissions:** If the `endpointType` is set to `vpc`, the `ec2:describeVpcEndpoints` and `ec2:modifyVpcEndpoint` [actions](https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonec2.html#amazonec2-actions-as-permissions) are used. + +~> **NOTE:** Use the [`awsTransferTag`](transfer_tag.html) resource to manage the system tags used for [custom hostnames](https://docs.aws.amazon.com/transfer/latest/userguide/requirements-dns.html#tag-custom-hostname-cdk). + +## Example Usage + +### Basic + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferServer } from "./.gen/providers/aws/transfer-server"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferServer(this, "example", { + tags: { + Name: "Example", + }, + }); + } +} + +``` + +### Security Policy Name + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferServer } from "./.gen/providers/aws/transfer-server"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferServer(this, "example", { + securityPolicyName: "TransferSecurityPolicy-2020-06", + }); + } +} + +``` + +### VPC Endpoint + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferServer } from "./.gen/providers/aws/transfer-server"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferServer(this, "example", { + endpointDetails: { + addressAllocationIds: [Token.asString(awsEipExample.id)], + subnetIds: [Token.asString(awsSubnetExample.id)], + vpcId: Token.asString(awsVpcExample.id), + }, + endpointType: "VPC", + }); + } +} + +``` + +### AWS Directory authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferServer } from "./.gen/providers/aws/transfer-server"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferServer(this, "example", { + directoryId: Token.asString(awsDirectoryServiceDirectoryExample.id), + identityProviderType: "AWS_DIRECTORY_SERVICE", + }); + } +} + +``` + +### AWS Lambda authentication + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferServer } from "./.gen/providers/aws/transfer-server"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferServer(this, "example", { + function: Token.asString(awsLambdaIdentityProviderExample.arn), + identityProviderType: "AWS_LAMBDA", + }); + } +} + +``` + +### Protocols + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferServer } from "./.gen/providers/aws/transfer-server"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferServer(this, "example", { + certificate: Token.asString(awsAcmCertificateExample.arn), + endpointDetails: { + subnetIds: [Token.asString(awsSubnetExample.id)], + vpcId: Token.asString(awsVpcExample.id), + }, + endpointType: "VPC", + identityProviderType: "API_GATEWAY", + protocols: ["FTP", "FTPS"], + url: + "${" + + awsApiGatewayDeploymentExample.invokeUrl + + "${" + + awsApiGatewayResourceExample.path + + "}", + }); + } +} + +``` + +### Using Structured Logging Destinations + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { TransferServer } from "./.gen/providers/aws/transfer-server"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const transfer = new CloudwatchLogGroup(this, "transfer", { + namePrefix: "transfer_test_", + }); + const transferAssumeRole = new DataAwsIamPolicyDocument( + this, + "transfer_assume_role", + { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["transfer.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + } + ); + const iamForTransfer = new IamRole(this, "iam_for_transfer", { + assumeRolePolicy: Token.asString(transferAssumeRole.json), + managedPolicyArns: [ + "arn:aws:iam::aws:policy/service-role/AWSTransferLoggingAccess", + ], + namePrefix: "iam_for_transfer_", + }); + const awsTransferServerTransfer = new TransferServer(this, "transfer_3", { + endpointType: "PUBLIC", + loggingRole: iamForTransfer.arn, + protocols: ["SFTP"], + structured_log_destinations: ["${" + transfer.arn + "}:*"], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsTransferServerTransfer.overrideLogicalId("transfer"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `certificate` - (Optional) The Amazon Resource Name (ARN) of the AWS Certificate Manager (ACM) certificate. This is required when `protocols` is set to `ftps` +* `domain` - (Optional) The domain of the storage system that is used for file transfers. Valid values are: `s3` and `efs`. The default value is `s3`. +* `protocols` - (Optional) Specifies the file transfer protocol or protocols over which your file transfer protocol client can connect to your server's endpoint. This defaults to `sftp` . The available protocols are: + * `as2`: File transfer over Applicability Statement 2 + * `sftp`: File transfer over SSH + * `ftps`: File transfer with TLS encryption + * `ftp`: Unencrypted file transfer +* `endpointDetails` - (Optional) The virtual private cloud (VPC) endpoint settings that you want to configure for your SFTP server. Fields documented below. +* `endpointType` - (Optional) The type of endpoint that you want your SFTP server connect to. If you connect to a `vpc` (or `vpcEndpoint`), your SFTP server isn't accessible over the public internet. If you want to connect your SFTP server via public internet, set `public`. Defaults to `public`. +* `invocationRole` - (Optional) Amazon Resource Name (ARN) of the IAM role used to authenticate the user account with an `identityProviderType` of `apiGateway`. +* `hostKey` - (Optional) RSA, ECDSA, or ED25519 private key (e.g., as generated by the `ssh-keygen -t rsa -b 2048 -N "" -m PEM -f my-new-server-key`, `ssh-keygen -t ecdsa -b 256 -N "" -m PEM -f my-new-server-key` or `ssh-keygen -t ed25519 -N "" -f my-new-server-key` commands). +* `url` - (Optional) - URL of the service endpoint used to authenticate users with an `identityProviderType` of `apiGateway`. +* `identityProviderType` - (Optional) The mode of authentication enabled for this service. The default value is `serviceManaged`, which allows you to store and access SFTP user credentials within the service. `apiGateway` indicates that user authentication requires a call to an API Gateway endpoint URL provided by you to integrate an identity provider of your choice. Using `awsDirectoryService` will allow for authentication against AWS Managed Active Directory or Microsoft Active Directory in your on-premises environment, or in AWS using AD Connectors. Use the `awsLambda` value to directly use a Lambda function as your identity provider. If you choose this value, you must specify the ARN for the lambda function in the `function` argument. +* `directoryId` - (Optional) The directory service ID of the directory service you want to connect to with an `identityProviderType` of `awsDirectoryService`. +* `function` - (Optional) The ARN for a lambda function to use for the Identity provider. +* `loggingRole` - (Optional) Amazon Resource Name (ARN) of an IAM role that allows the service to write your SFTP users’ activity to your Amazon CloudWatch logs for monitoring and auditing purposes. +* `forceDestroy` - (Optional) A boolean that indicates all users associated with the server should be deleted so that the Server can be destroyed without error. The default value is `false`. This option only applies to servers configured with a `serviceManaged` `identityProviderType`. +* `postAuthenticationLoginBanner`- (Optional) Specify a string to display when users connect to a server. This string is displayed after the user authenticates. The SFTP protocol does not support post-authentication display banners. +* `preAuthenticationLoginBanner`- (Optional) Specify a string to display when users connect to a server. This string is displayed before the user authenticates. +* `protocolDetails`- (Optional) The protocol settings that are configured for your server. +* `securityPolicyName` - (Optional) Specifies the name of the security policy that is attached to the server. Possible values are `transferSecurityPolicy201811`, `transferSecurityPolicy202006`, `transferSecurityPolicyFips202006`, `transferSecurityPolicy202203` and `transferSecurityPolicy202305`. Default value is: `transferSecurityPolicy201811`. +* `structuredLoggingDestinations` - (Optional) A set of ARNs of destinations that will receive structured logs from the transfer server such as CloudWatch Log Group ARNs. If provided this enables the transfer server to emit structured logs to the specified locations. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `workflowDetails` - (Optional) Specifies the workflow details. See Workflow Details below. + +### Endpoint Details + +* `addressAllocationIds` - (Optional) A list of address allocation IDs that are required to attach an Elastic IP address to your SFTP server's endpoint. This property can only be used when `endpointType` is set to `vpc`. +* `securityGroupIds` - (Optional) A list of security groups IDs that are available to attach to your server's endpoint. If no security groups are specified, the VPC's default security groups are automatically assigned to your endpoint. This property can only be used when `endpointType` is set to `vpc`. +* `subnetIds` - (Optional) A list of subnet IDs that are required to host your SFTP server endpoint in your VPC. This property can only be used when `endpointType` is set to `vpc`. +* `vpcEndpointId` - (Optional) The ID of the VPC endpoint. This property can only be used when `endpointType` is set to `vpcEndpoint` +* `vpcId` - (Optional) The VPC ID of the virtual private cloud in which the SFTP server's endpoint will be hosted. This property can only be used when `endpointType` is set to `vpc`. + +### Protocol Details + +* `as2Transports` - (Optional) Indicates the transport method for the AS2 messages. Currently, only `http` is supported. +* `passiveIp` - (Optional) Indicates passive mode, for FTP and FTPS protocols. Enter a single IPv4 address, such as the public IP address of a firewall, router, or load balancer. +* `setStatOption` - (Optional) Use to ignore the error that is generated when the client attempts to use `setstat` on a file you are uploading to an S3 bucket. Valid values: `default`, `enableNoOp`. +* `tlsSessionResumptionMode` - (Optional) A property used with Transfer Family servers that use the FTPS protocol. Provides a mechanism to resume or share a negotiated secret key between the control and data connection for an FTPS session. Valid values: `disabled`, `enabled`, `enforced`. + +### Workflow Details + +* `onUpload` - (Optional) A trigger that starts a workflow: the workflow begins to execute after a file is uploaded. See Workflow Detail below. +* `onPartialUpload` - (Optional) A trigger that starts a workflow if a file is only partially uploaded. See Workflow Detail below. + +#### Workflow Detail + +* `executionRole` - (Required) Includes the necessary permissions for S3, EFS, and Lambda operations that Transfer can assume, so that all workflow steps can operate on the required resources. +* `workflowId` - (Required) A unique identifier for the workflow. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Transfer Server +* `id` - The Server ID of the Transfer Server (e.g., `s12345678`) +* `endpoint` - The endpoint of the Transfer Server (e.g., `s12345678ServerTransferRegionAmazonawsCom`) +* `hostKeyFingerprint` - This value contains the message-digest algorithm (MD5) hash of the server's host key. This value is equivalent to the output of the `ssh-keygen -l -E md5 -f my-new-server-key` command. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer Servers using the server `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transfer Servers using the server `id`. For example: + +```console +% terraform import aws_transfer_server.example s-12345678 +``` + +Certain resource arguments, such as `hostKey`, cannot be read via the API and imported into Terraform. Terraform will display a difference for these arguments the first run after import if declared in the Terraform configuration for an imported resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transfer_ssh_key.html.markdown b/website/docs/cdktf/typescript/r/transfer_ssh_key.html.markdown new file mode 100644 index 00000000000..672bc852f06 --- /dev/null +++ b/website/docs/cdktf/typescript/r/transfer_ssh_key.html.markdown @@ -0,0 +1,139 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_ssh_key" +description: |- + Provides a AWS Transfer SSH Public Key resource. +--- + + + +# Resource: aws_transfer_ssh_key + +Provides a AWS Transfer User SSH Key resource. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { TransferServer } from "./.gen/providers/aws/transfer-server"; +import { TransferSshKey } from "./.gen/providers/aws/transfer-ssh-key"; +import { TransferUser } from "./.gen/providers/aws/transfer-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new TransferServer(this, "example", { + identityProviderType: "SERVICE_MANAGED", + tags: { + NAME: "tf-acc-test-transfer-server", + }, + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["transfer.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_2", + { + statement: [ + { + actions: ["s3:*"], + effect: "Allow", + resources: ["*"], + sid: "AllowFullAccesstoS3", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsIamRoleExample = new IamRole(this, "example_3", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "tf-test-transfer-user-iam-role", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleExample.overrideLogicalId("example"); + const awsIamRolePolicyExample = new IamRolePolicy(this, "example_4", { + name: "tf-test-transfer-user-iam-policy", + policy: Token.asString(dataAwsIamPolicyDocumentExample.json), + role: Token.asString(awsIamRoleExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyExample.overrideLogicalId("example"); + const awsTransferUserExample = new TransferUser(this, "example_5", { + role: Token.asString(awsIamRoleExample.arn), + serverId: example.id, + tags: { + NAME: "tftestuser", + }, + userName: "tftestuser", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsTransferUserExample.overrideLogicalId("example"); + const awsTransferSshKeyExample = new TransferSshKey(this, "example_6", { + body: "... SSH key ...", + serverId: example.id, + userName: Token.asString(awsTransferUserExample.userName), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsTransferSshKeyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `serverId` - (Requirement) The Server ID of the Transfer Server (e.g., `s12345678`) +* `userName` - (Requirement) The name of the user account that is assigned to one or more servers. +* `body` - (Requirement) The public key portion of an SSH key pair. + +## Attribute Reference + +This resource exports no additional attributes. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer SSH Public Key using the `serverId` and `userName` and `sshPublicKeyId` separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transfer SSH Public Key using the `serverId` and `userName` and `sshPublicKeyId` separated by `/`. For example: + +```console +% terraform import aws_transfer_ssh_key.bar s-12345678/test-username/key-12345 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transfer_tag.html.markdown b/website/docs/cdktf/typescript/r/transfer_tag.html.markdown new file mode 100644 index 00000000000..2121fdf6cf8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/transfer_tag.html.markdown @@ -0,0 +1,88 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_tag" +description: |- + Manages an individual Transfer Family resource tag +--- + + + +# Resource: aws_transfer_tag + +Manages an individual Transfer Family resource tag. This resource should only be used in cases where Transfer Family resources are created outside Terraform (e.g., Servers without AWS Management Console) or the tag key has the `aws:` prefix. + +~> **NOTE:** This tagging resource should not be combined with the Terraform resource for managing the parent resource. For example, using `awsTransferServer` and `awsTransferTag` to manage tags of the same server will cause a perpetual difference where the `awsTransferServer` resource will try to remove the tag being added by the `awsTransferTag` resource. + +~> **NOTE:** This tagging resource does not use the [provider `ignoreTags` configuration](/docs/providers/aws/index.html#ignore_tags). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferServer } from "./.gen/providers/aws/transfer-server"; +import { TransferTag } from "./.gen/providers/aws/transfer-tag"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new TransferServer(this, "example", { + identityProviderType: "SERVICE_MANAGED", + }); + new TransferTag(this, "hostname", { + key: "aws:transfer:customHostname", + resourceArn: example.arn, + value: "example.com", + }); + new TransferTag(this, "zone_id", { + key: "aws:transfer:route53HostedZoneId", + resourceArn: example.arn, + value: "/hostedzone/MyHostedZoneId", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceArn` - (Required) Amazon Resource Name (ARN) of the Transfer Family resource to tag. +* `key` - (Required) Tag name. +* `value` - (Required) Tag value. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Transfer Family resource identifier and key, separated by a comma (`,`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsTransferTag` using the Transfer Family resource identifier and key, separated by a comma (`,`). For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import `awsTransferTag` using the Transfer Family resource identifier and key, separated by a comma (`,`). For example: + +```console +% terraform import aws_transfer_tag.example arn:aws:transfer:us-east-1:123456789012:server/s-1234567890abcdef0,Name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transfer_user.html.markdown b/website/docs/cdktf/typescript/r/transfer_user.html.markdown new file mode 100644 index 00000000000..68c87eca78d --- /dev/null +++ b/website/docs/cdktf/typescript/r/transfer_user.html.markdown @@ -0,0 +1,175 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_user" +description: |- + Provides a AWS Transfer User resource. +--- + + + +# Resource: aws_transfer_user + +Provides a AWS Transfer User resource. Managing SSH keys can be accomplished with the [`awsTransferSshKey` resource](/docs/providers/aws/r/transfer_ssh_key.html). + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicy } from "./.gen/providers/aws/iam-role-policy"; +import { TransferServer } from "./.gen/providers/aws/transfer-server"; +import { TransferUser } from "./.gen/providers/aws/transfer-user"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new TransferServer(this, "foo", { + identityProviderType: "SERVICE_MANAGED", + tags: { + NAME: "tf-acc-test-transfer-server", + }, + }); + const assumeRole = new DataAwsIamPolicyDocument(this, "assume_role", { + statement: [ + { + actions: ["sts:AssumeRole"], + effect: "Allow", + principals: [ + { + identifiers: ["transfer.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const dataAwsIamPolicyDocumentFoo = new DataAwsIamPolicyDocument( + this, + "foo_2", + { + statement: [ + { + actions: ["s3:*"], + effect: "Allow", + resources: ["*"], + sid: "AllowFullAccesstoS3", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentFoo.overrideLogicalId("foo"); + const awsIamRoleFoo = new IamRole(this, "foo_3", { + assumeRolePolicy: Token.asString(assumeRole.json), + name: "tf-test-transfer-user-iam-role", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRoleFoo.overrideLogicalId("foo"); + const awsIamRolePolicyFoo = new IamRolePolicy(this, "foo_4", { + name: "tf-test-transfer-user-iam-policy", + policy: Token.asString(dataAwsIamPolicyDocumentFoo.json), + role: Token.asString(awsIamRoleFoo.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsIamRolePolicyFoo.overrideLogicalId("foo"); + const awsTransferUserFoo = new TransferUser(this, "foo_5", { + homeDirectoryMappings: [ + { + entry: "/test.pdf", + target: "/bucket3/test-path/tftestuser.pdf", + }, + ], + homeDirectoryType: "LOGICAL", + role: Token.asString(awsIamRoleFoo.arn), + serverId: foo.id, + userName: "tftestuser", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsTransferUserFoo.overrideLogicalId("foo"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `serverId` - (Required) The Server ID of the Transfer Server (e.g., `s12345678`) +* `userName` - (Required) The name used for log in to your SFTP server. +* `homeDirectory` - (Optional) The landing directory (folder) for a user when they log in to the server using their SFTP client. It should begin with a `/`. The first item in the path is the name of the home bucket (accessible as `${transfer:homeBucket}` in the policy) and the rest is the home directory (accessible as `${transfer:homeDirectory}` in the policy). For example, `/exampleBucket1234/username` would set the home bucket to `exampleBucket1234` and the home directory to `username`. +* `homeDirectoryMappings` - (Optional) Logical directory mappings that specify what S3 paths and keys should be visible to your user and how you want to make them visible. See [Home Directory Mappings](#home-directory-mappings) below. +* `homeDirectoryType` - (Optional) The type of landing directory (folder) you mapped for your users' home directory. Valid values are `path` and `logical`. +* `policy` - (Optional) An IAM JSON policy document that scopes down user access to portions of their Amazon S3 bucket. IAM variables you can use inside this policy include `${transfer:userName}`, `${transfer:homeDirectory}`, and `${transfer:homeBucket}`. Since the IAM variable syntax matches Terraform's interpolation syntax, they must be escaped inside Terraform configuration strings (`$${transfer:userName}`). These are evaluated on-the-fly when navigating the bucket. +* `posixProfile` - (Optional) Specifies the full POSIX identity, including user ID (Uid), group ID (Gid), and any secondary groups IDs (SecondaryGids), that controls your users' access to your Amazon EFS file systems. See [Posix Profile](#posix-profile) below. +* `role` - (Required) Amazon Resource Name (ARN) of an IAM role that allows the service to control your user’s access to your Amazon S3 bucket. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Home Directory Mappings + +* `entry` - (Required) Represents an entry and a target. +* `target` - (Required) Represents the map target. + +The `restricted` option is achieved using the following mapping: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +### Posix Profile + +* `gid` - (Required) The POSIX group ID used for all EFS operations by this user. +* `uid` - (Required) The POSIX user ID used for all EFS operations by this user. +* `secondaryGids` - (Optional) The secondary POSIX group IDs used for all EFS operations by this user. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of Transfer User +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer Users using the `serverId` and `userName` separated by `/`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transfer Users using the `serverId` and `userName` separated by `/`. For example: + +```console +% terraform import aws_transfer_user.bar s-12345678/test-username +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/transfer_workflow.html.markdown b/website/docs/cdktf/typescript/r/transfer_workflow.html.markdown new file mode 100644 index 00000000000..cfe243dc1d5 --- /dev/null +++ b/website/docs/cdktf/typescript/r/transfer_workflow.html.markdown @@ -0,0 +1,193 @@ +--- +subcategory: "Transfer Family" +layout: "aws" +page_title: "AWS: aws_transfer_workflow" +description: |- + Provides a AWS Transfer Workflow resource. +--- + + + +# Resource: aws_transfer_workflow + +Provides a AWS Transfer Workflow resource. + +## Example Usage + +### Basic single step example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferWorkflow } from "./.gen/providers/aws/transfer-workflow"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferWorkflow(this, "example", { + steps: [ + { + deleteStepDetails: { + name: "example", + sourceFileLocation: file, + }, + type: "DELETE", + }, + ], + }); + } +} + +``` + +### Multistep example + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { TransferWorkflow } from "./.gen/providers/aws/transfer-workflow"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new TransferWorkflow(this, "example", { + steps: [ + { + customStepDetails: { + name: "example", + sourceFileLocation: file, + target: Token.asString(awsLambdaFunctionExample.arn), + timeoutSeconds: 60, + }, + type: "CUSTOM", + }, + { + tagStepDetails: { + name: "example", + sourceFileLocation: file, + tags: [ + { + key: "Name", + value: "Hello World", + }, + ], + }, + type: "TAG", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `description` - (Optional) A textual description for the workflow. +* `onExceptionSteps` - (Optional) Specifies the steps (actions) to take if errors are encountered during execution of the workflow. See Workflow Steps below. +* `steps` - (Required) Specifies the details for the steps that are in the specified workflow. See Workflow Steps below. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Workflow Steps + +* `copyStepDetails` - (Optional) Details for a step that performs a file copy. See Copy Step Details below. +* `customStepDetails` - (Optional) Details for a step that invokes a lambda function. +* `decryptStepDetails` - (Optional) Details for a step that decrypts the file. +* `deleteStepDetails` - (Optional) Details for a step that deletes the file. +* `tagStepDetails` - (Optional) Details for a step that creates one or more tags. +* `type` - (Required) One of the following step types are supported. `copy`, `custom`, `decrypt`, `delete`, and `tag`. + +#### Copy Step Details + +* `destinationFileLocation` - (Optional) Specifies the location for the file being copied. Use ${Transfer:username} in this field to parametrize the destination prefix by username. +* `name` - (Optional) The name of the step, used as an identifier. +* `overwriteExisting` - (Optional) A flag that indicates whether or not to overwrite an existing file of the same name. The default is `false`. Valid values are `true` and `false`. +* `sourceFileLocation` - (Optional) Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file for the workflow. Enter ${previous.file} to use the previous file as the input. In this case, this workflow step uses the output file from the previous workflow step as input. This is the default value. Enter ${original.file} to use the originally-uploaded file location as input for this step. + +#### Custom Step Details + +* `name` - (Optional) The name of the step, used as an identifier. +* `sourceFileLocation` - (Optional) Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file for the workflow. Enter ${previous.file} to use the previous file as the input. In this case, this workflow step uses the output file from the previous workflow step as input. This is the default value. Enter ${original.file} to use the originally-uploaded file location as input for this step. +* `target` - (Optional) The ARN for the lambda function that is being called. +* `timeoutSeconds` - (Optional) Timeout, in seconds, for the step. + +#### Decrypt Step Details + +* `destinationFileLocation` - (Optional) Specifies the location for the file being copied. Use ${Transfer:username} in this field to parametrize the destination prefix by username. +* `name` - (Optional) The name of the step, used as an identifier. +* `overwriteExisting` - (Optional) A flag that indicates whether or not to overwrite an existing file of the same name. The default is `false`. Valid values are `true` and `false`. +* `sourceFileLocation` - (Optional) Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file for the workflow. Enter ${previous.file} to use the previous file as the input. In this case, this workflow step uses the output file from the previous workflow step as input. This is the default value. Enter ${original.file} to use the originally-uploaded file location as input for this step. +* `type` - (Required) The type of encryption used. Currently, this value must be `"pgp"`. + +#### Delete Step Details + +* `name` - (Optional) The name of the step, used as an identifier. +* `sourceFileLocation` - (Optional) Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file for the workflow. Enter ${previous.file} to use the previous file as the input. In this case, this workflow step uses the output file from the previous workflow step as input. This is the default value. Enter ${original.file} to use the originally-uploaded file location as input for this step. + +#### Tag Step Details + +* `name` - (Optional) The name of the step, used as an identifier. +* `sourceFileLocation` - (Optional) Specifies which file to use as input to the workflow step: either the output from the previous step, or the originally uploaded file for the workflow. Enter ${previous.file} to use the previous file as the input. In this case, this workflow step uses the output file from the previous workflow step as input. This is the default value. Enter ${original.file} to use the originally-uploaded file location as input for this step. +* `tags` - (Optional) Array that contains from 1 to 10 key/value pairs. See S3 Tags below. + +##### Destination File Location + +* `efsFileLocation` - (Optional) Specifies the details for the EFS file being copied. +* `s3FileLocation` - (Optional) Specifies the details for the S3 file being copied. + +###### EFS File Location + +* `fileSystemId` - (Optional) The ID of the file system, assigned by Amazon EFS. +* `path` - (Optional) The pathname for the folder being used by a workflow. + +###### S3 File Location + +* `bucket` - (Optional) Specifies the S3 bucket for the customer input file. +* `key` - (Optional) The name assigned to the file when it was created in S3. You use the object key to retrieve the object. + +##### S3 Tag + +* `key` - (Required) The name assigned to the tag that you create. +* `value` - (Required) The value that corresponds to the key. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The Workflow ARN. +* `id` - The Workflow id. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Transfer Workflows using the `worflowId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Transfer Workflows using the `worflowId`. For example: + +```console +% terraform import aws_transfer_workflow.example example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/volume_attachment.html.markdown b/website/docs/cdktf/typescript/r/volume_attachment.html.markdown new file mode 100644 index 00000000000..48a23b22b11 --- /dev/null +++ b/website/docs/cdktf/typescript/r/volume_attachment.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "EBS (EC2)" +layout: "aws" +page_title: "AWS: aws_volume_attachment" +description: |- + Provides an AWS EBS Volume Attachment +--- + + + +# Resource: aws_volume_attachment + +Provides an AWS EBS Volume Attachment as a top level resource, to attach and +detach volumes from AWS Instances. + +~> **NOTE on EBS block devices:** If you use `ebsBlockDevice` on an `awsInstance`, Terraform will assume management over the full set of non-root EBS block devices for the instance, and treats additional block devices as drift. For this reason, `ebsBlockDevice` cannot be mixed with external `awsEbsVolume` + `awsVolumeAttachment` resources for a given instance. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { EbsVolume } from "./.gen/providers/aws/ebs-volume"; +import { Instance } from "./.gen/providers/aws/instance"; +import { VolumeAttachment } from "./.gen/providers/aws/volume-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new EbsVolume(this, "example", { + availabilityZone: "us-west-2a", + size: 1, + }); + const web = new Instance(this, "web", { + ami: "ami-21f78e11", + availabilityZone: "us-west-2a", + instanceType: "t2.micro", + tags: { + Name: "HelloWorld", + }, + }); + new VolumeAttachment(this, "ebs_att", { + deviceName: "/dev/sdh", + instanceId: web.id, + volumeId: example.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `deviceName` - (Required) The device name to expose to the instance (for +example, `/dev/sdh` or `xvdh`). See [Device Naming on Linux Instances][1] and [Device Naming on Windows Instances][2] for more information. +* `instanceId` - (Required) ID of the Instance to attach to +* `volumeId` - (Required) ID of the Volume to be attached +* `forceDetach` - (Optional, Boolean) Set to `true` if you want to force the +volume to detach. Useful if previous attempts failed, but use this option only +as a last resort, as this can result in **data loss**. See +[Detaching an Amazon EBS Volume from an Instance][3] for more information. +* `skipDestroy` - (Optional, Boolean) Set this to true if you do not wish +to detach the volume from the instance to which it is attached at destroy +time, and instead just remove the attachment from Terraform state. This is +useful when destroying an instance which has volumes created by some other +means attached. +* `stopInstanceBeforeDetaching` - (Optional, Boolean) Set this to true to ensure that the target instance is stopped +before trying to detach the volume. Stops the instance, if it is not already stopped. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `deviceName` - The device name exposed to the instance +* `instanceId` - ID of the Instance +* `volumeId` - ID of the Volume + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import EBS Volume Attachments using `deviceName:volumeId:instanceId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import EBS Volume Attachments using `deviceName:volumeId:instanceId`. For example: + +```console +% terraform import aws_volume_attachment.example /dev/sdh:vol-049df61146c4d7901:i-12345678 +``` + +[1]: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html#available-ec2-device-names +[2]: https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html#available-ec2-device-names +[3]: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-detaching-volume.html + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc.html.markdown b/website/docs/cdktf/typescript/r/vpc.html.markdown index 6151353849b..768ddd2cbd6 100644 --- a/website/docs/cdktf/typescript/r/vpc.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc.html.markdown @@ -115,7 +115,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidrBlock` - (Optional) The IPv4 CIDR block for the VPC. CIDR can be explicitly set or it can be derived from IPAM using `ipv4NetmaskLength`. * `instanceTenancy` - (Optional) A tenancy option for instances launched into the VPC. Default is `default`, which ensures that EC2 instances launched in this VPC use the EC2 instance tenancy attribute specified when the EC2 instance is launched. The only other option is `dedicated`, which ensures that EC2 instances launched in this VPC are run on dedicated tenancy instances regardless of the tenancy attribute specified at launch. This has a dedicated per region fee of $2 per hour, plus an hourly per instance usage fee. @@ -131,9 +131,9 @@ The following arguments are supported: * `assignGeneratedIpv6CidrBlock` - (Optional) Requests an Amazon-provided IPv6 CIDR block with a /56 prefix length for the VPC. You cannot specify the range of IP addresses, or the size of the CIDR block. Default is `false`. Conflicts with `ipv6IpamPoolId` * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of VPC * `id` - The ID of the VPC @@ -154,10 +154,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPCs can be imported using the `vpc id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPCs using the VPC `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc.test_vpc vpc-a01106c2 + +Using `terraform import`, import VPCs using the VPC `id`. For example: + +```console +% terraform import aws_vpc.test_vpc vpc-a01106c2 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_dhcp_options.html.markdown b/website/docs/cdktf/typescript/r/vpc_dhcp_options.html.markdown index 43804994a86..60ccc46c8ae 100644 --- a/website/docs/cdktf/typescript/r/vpc_dhcp_options.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_dhcp_options.html.markdown @@ -67,7 +67,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `domainName` - (Optional) the suffix domain name to use by default when resolving non Fully Qualified Domain Names. In other words, this is what ends up being the `search` value in the `/etc/resolvConf` file. * `domainNameServers` - (Optional) List of name servers to configure in `/etc/resolvConf`. If you want to use the default AWS nameservers you should set this to `amazonProvidedDns`. @@ -84,9 +84,9 @@ The following arguments are supported: * If you delete a DHCP Options Set, all VPCs using it will be associated to AWS's `default` DHCP Option Set. * In most cases unless you're configuring your own DNS you'll want to set `domainNameServers` to `amazonProvidedDns`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the DHCP Options Set. * `arn` - The ARN of the DHCP Options Set. @@ -98,10 +98,24 @@ official [AWS User Guide](https://docs.aws.amazon.com/AmazonVPC/latest/UserGuide ## Import -VPC DHCP Options can be imported using the `dhcp options id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC DHCP Options using the DHCP Options `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_dhcp_options.my_options dopt-d9070ebb + +Using `terraform import`, import VPC DHCP Options using the DHCP Options `id`. For example: + +```console +% terraform import aws_vpc_dhcp_options.my_options dopt-d9070ebb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_dhcp_options_association.html.markdown b/website/docs/cdktf/typescript/r/vpc_dhcp_options_association.html.markdown index defd0a78288..664c1fd88cd 100644 --- a/website/docs/cdktf/typescript/r/vpc_dhcp_options_association.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_dhcp_options_association.html.markdown @@ -37,7 +37,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpcId` - (Required) The ID of the VPC to which we would like to associate a DHCP Options Set. * `dhcpOptionsId` - (Required) The ID of the DHCP Options Set to associate to the VPC. @@ -47,18 +47,32 @@ The following arguments are supported: * You can only associate one DHCP Options Set to a given VPC ID. * Removing the DHCP Options Association automatically sets AWS's `default` DHCP Options Set to the VPC. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the DHCP Options Set Association. ## Import -DHCP associations can be imported by providing the VPC ID associated with the options: +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import DHCP associations using the VPC ID associated with the options. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_dhcp_options_association.imported vpc-0f001273ec18911b1 + +Using `terraform import`, import DHCP associations using the VPC ID associated with the options. For example: + +```console +% terraform import aws_vpc_dhcp_options_association.imported vpc-0f001273ec18911b1 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_endpoint.html.markdown b/website/docs/cdktf/typescript/r/vpc_endpoint.html.markdown index 707696a4378..819f402f9d7 100644 --- a/website/docs/cdktf/typescript/r/vpc_endpoint.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_endpoint.html.markdown @@ -187,7 +187,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `serviceName` - (Required) The service name. For AWS services the service name is usually in the form `comAmazonaws..` (the SageMaker Notebook service is an exception to this rule, the service name is in the form `awsSagemaker.Notebook`). * `vpcId` - (Required) The ID of the VPC in which the endpoint will be used. @@ -217,9 +217,9 @@ If no security groups are specified, the VPC's [default security group](https:// - `update` - (Default `10M`) - `delete` - (Default `10M`) -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC endpoint. * `arn` - The Amazon Resource Name (ARN) of the VPC endpoint. @@ -239,10 +239,24 @@ DNS blocks (for `dnsEntry`) support the following attributes: ## Import -VPC Endpoints can be imported using the `vpc endpoint id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoints using the VPC endpoint `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_endpoint.endpoint1 vpce-3ecf2a57 + +Using `terraform import`, import VPC Endpoints using the VPC endpoint `id`. For example: + +```console +% terraform import aws_vpc_endpoint.endpoint1 vpce-3ecf2a57 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_endpoint_connection_accepter.html.markdown b/website/docs/cdktf/typescript/r/vpc_endpoint_connection_accepter.html.markdown index 279583eeeb2..f5ef659ffba 100644 --- a/website/docs/cdktf/typescript/r/vpc_endpoint_connection_accepter.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_endpoint_connection_accepter.html.markdown @@ -62,24 +62,38 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpcEndpointId` - (Required) AWS VPC Endpoint ID. * `vpcEndpointServiceId` - (Required) AWS VPC Endpoint Service ID. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC Endpoint Connection. * `vpcEndpointState` - State of the VPC Endpoint. ## Import -VPC Endpoint Services can be imported using ID of the connection, which is the `VPC Endpoint Service ID` and `VPC Endpoint ID` separated by underscore (`_`). e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint Services using ID of the connection, which is the `VPC Endpoint Service ID` and `VPC Endpoint ID` separated by underscore (`_`).. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_endpoint_connection_accepter.foo vpce-svc-0f97a19d3fa8220bc_vpce-010601a6db371e263 + +Using `terraform import`, import VPC Endpoint Services using ID of the connection, which is the `VPC Endpoint Service ID` and `VPC Endpoint ID` separated by underscore (`_`).. For example: + +```console +% terraform import aws_vpc_endpoint_connection_accepter.foo vpce-svc-0f97a19d3fa8220bc_vpce-010601a6db371e263 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_endpoint_connection_notification.html.markdown b/website/docs/cdktf/typescript/r/vpc_endpoint_connection_notification.html.markdown index 14c549d4c13..8887b29a1ae 100644 --- a/website/docs/cdktf/typescript/r/vpc_endpoint_connection_notification.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_endpoint_connection_notification.html.markdown @@ -70,7 +70,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpcEndpointServiceId` - (Optional) The ID of the VPC Endpoint Service to receive notifications for. * `vpcEndpointId` - (Optional) The ID of the VPC Endpoint to receive notifications for. @@ -79,9 +79,9 @@ The following arguments are supported: ~> **NOTE:** One of `vpcEndpointServiceId` or `vpcEndpointId` must be specified. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC connection notification. * `state` - The state of the notification. @@ -89,10 +89,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Endpoint connection notifications can be imported using the `VPC endpoint connection notification id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint connection notifications using the VPC endpoint connection notification `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_endpoint_connection_notification.foo vpce-nfn-09e6ed3b4efba2263 + +Using `terraform import`, import VPC Endpoint connection notifications using the VPC endpoint connection notification `id`. For example: + +```console +% terraform import aws_vpc_endpoint_connection_notification.foo vpce-nfn-09e6ed3b4efba2263 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_endpoint_policy.html.markdown b/website/docs/cdktf/typescript/r/vpc_endpoint_policy.html.markdown index 177057718f4..918cde90516 100644 --- a/website/docs/cdktf/typescript/r/vpc_endpoint_policy.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_endpoint_policy.html.markdown @@ -79,23 +79,37 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpcEndpointId` - (Required) The VPC Endpoint ID. * `policy` - (Optional) A policy to attach to the endpoint that controls access to the service. Defaults to full access. All `gateway` and some `interface` endpoints support policies - see the [relevant AWS documentation](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-endpoints-access.html) for more details. For more information about building AWS IAM policy documents with Terraform, see the [AWS IAM Policy Document Guide](https://learn.hashicorp.com/terraform/aws/iam-policy). -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC endpoint. ## Import -VPC Endpoint Policies can be imported using the `id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint Policies using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_endpoint_policy.example vpce-3ecf2a57 + +Using `terraform import`, import VPC Endpoint Policies using the `id`. For example: + +```console +% terraform import aws_vpc_endpoint_policy.example vpce-3ecf2a57 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_endpoint_route_table_association.html.markdown b/website/docs/cdktf/typescript/r/vpc_endpoint_route_table_association.html.markdown index 76944cb042e..e70334e1348 100644 --- a/website/docs/cdktf/typescript/r/vpc_endpoint_route_table_association.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_endpoint_route_table_association.html.markdown @@ -37,24 +37,37 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `routeTableId` - (Required) Identifier of the EC2 Route Table to be associated with the VPC Endpoint. * `vpcEndpointId` - (Required) Identifier of the VPC Endpoint with which the EC2 Route Table will be associated. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - A hash of the EC2 Route Table and VPC Endpoint identifiers. ## Import -VPC Endpoint Route Table Associations can be imported using `vpcEndpointId` together with `routeTableId`, -e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint Route Table Associations using `vpcEndpointId` together with `routeTableId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_endpoint_route_table_association.example vpce-aaaaaaaa/rtb-bbbbbbbb + +Using `terraform import`, import VPC Endpoint Route Table Associations using `vpcEndpointId` together with `routeTableId`. For example: + +```console +% terraform import aws_vpc_endpoint_route_table_association.example vpce-aaaaaaaa/rtb-bbbbbbbb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_endpoint_security_group_association.html.markdown b/website/docs/cdktf/typescript/r/vpc_endpoint_security_group_association.html.markdown index defb3504a6b..c66e44119ae 100644 --- a/website/docs/cdktf/typescript/r/vpc_endpoint_security_group_association.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_endpoint_security_group_association.html.markdown @@ -45,16 +45,16 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `securityGroupId` - (Required) The ID of the security group to be associated with the VPC endpoint. * `vpcEndpointId` - (Required) The ID of the VPC endpoint with which the security group will be associated. * `replaceDefaultAssociation` - (Optional) Whether this association should replace the association with the VPC's default security group that is created when no security groups are specified during VPC endpoint creation. At most 1 association per-VPC endpoint should be configured with `replace_default_association = true`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the association. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_endpoint_service.html.markdown b/website/docs/cdktf/typescript/r/vpc_endpoint_service.html.markdown index 00c751fc778..2b3fccc0247 100644 --- a/website/docs/cdktf/typescript/r/vpc_endpoint_service.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_endpoint_service.html.markdown @@ -69,7 +69,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `acceptanceRequired` - (Required) Whether or not VPC endpoint connection requests to the service must be accepted by the service owner - `true` or `false`. * `allowedPrincipals` - (Optional) The ARNs of one or more principals allowed to discover the endpoint service. @@ -79,9 +79,9 @@ The following arguments are supported: * `privateDnsName` - (Optional) The private DNS name for the service. * `supportedIpAddressTypes` - (Optional) The supported IP address types. The possible values are `ipv4` and `ipv6`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC endpoint service. * `availabilityZones` - A set of Availability Zones in which the service is available. @@ -100,10 +100,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Endpoint Services can be imported using the `VPC endpoint service id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint Services using the VPC endpoint service `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_endpoint_service.foo vpce-svc-0f97a19d3fa8220bc + +Using `terraform import`, import VPC Endpoint Services using the VPC endpoint service `id`. For example: + +```console +% terraform import aws_vpc_endpoint_service.foo vpce-svc-0f97a19d3fa8220bc ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_endpoint_service_allowed_principal.html.markdown b/website/docs/cdktf/typescript/r/vpc_endpoint_service_allowed_principal.html.markdown index 273a826159f..5abbfa5a8a9 100644 --- a/website/docs/cdktf/typescript/r/vpc_endpoint_service_allowed_principal.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_endpoint_service_allowed_principal.html.markdown @@ -47,15 +47,15 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpcEndpointServiceId` - (Required) The ID of the VPC endpoint service to allow permission. * `principalArn` - (Required) The ARN of the principal to allow permissions. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the association. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_endpoint_subnet_association.html.markdown b/website/docs/cdktf/typescript/r/vpc_endpoint_subnet_association.html.markdown index 0e257718823..947b7995063 100644 --- a/website/docs/cdktf/typescript/r/vpc_endpoint_subnet_association.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_endpoint_subnet_association.html.markdown @@ -45,14 +45,14 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpcEndpointId` - (Required) The ID of the VPC endpoint with which the subnet will be associated. * `subnetId` - (Required) The ID of the subnet to be associated with the VPC endpoint. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the association. @@ -65,11 +65,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Endpoint Subnet Associations can be imported using `vpcEndpointId` together with `subnetId`, -e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Endpoint Subnet Associations using `vpcEndpointId` together with `subnetId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_endpoint_subnet_association.example vpce-aaaaaaaa/subnet-bbbbbbbbbbbbbbbbb + +Using `terraform import`, import VPC Endpoint Subnet Associations using `vpcEndpointId` together with `subnetId`. For example: + +```console +% terraform import aws_vpc_endpoint_subnet_association.example vpce-aaaaaaaa/subnet-bbbbbbbbbbbbbbbbb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_ipam.html.markdown b/website/docs/cdktf/typescript/r/vpc_ipam.html.markdown index eb4047baccb..64b64150c40 100644 --- a/website/docs/cdktf/typescript/r/vpc_ipam.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_ipam.html.markdown @@ -98,7 +98,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional) A description for the IPAM. * `operatingRegions` - (Required) Determines which locales can be chosen when you create pools. Locale is the Region where you want to make an IPAM pool available for allocations. You can only create pools with locales that match the operating Regions of the IPAM. You can only create VPCs from a pool whose locale matches the VPC's Region. You specify a region using the [region_name](#operating_regions) parameter. You **must** set your provider block region as an operating_region. @@ -109,9 +109,9 @@ The following arguments are supported: * `regionName` - (Required) The name of the Region you want to add to the IPAM. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of IPAM * `id` - The ID of the IPAM @@ -125,10 +125,24 @@ IP space. The public scope is intended for all internet-routable IP space. ## Import -IPAMs can be imported using the `ipam id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the IPAM `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_ipam.example ipam-0178368ad2146a492 + +Using `terraform import`, import IPAMs using the IPAM `id`. For example: + +```console +% terraform import aws_vpc_ipam.example ipam-0178368ad2146a492 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_ipam_organization_admin_account.html.markdown b/website/docs/cdktf/typescript/r/vpc_ipam_organization_admin_account.html.markdown index 15c33e50490..a0add07f34c 100644 --- a/website/docs/cdktf/typescript/r/vpc_ipam_organization_admin_account.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_ipam_organization_admin_account.html.markdown @@ -46,13 +46,13 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `delegatedAdminAccountId` - (Required) -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Organizations ARN for the delegate account. * `id` - The Organizations member account ID that you want to enable as the IPAM account. @@ -62,10 +62,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAMs can be imported using the `delegate account id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the delegate account `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_ipam_organization_admin_account.example 12345678901 + +Using `terraform import`, import IPAMs using the delegate account `id`. For example: + +```console +% terraform import aws_vpc_ipam_organization_admin_account.example 12345678901 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_ipam_pool.html.markdown b/website/docs/cdktf/typescript/r/vpc_ipam_pool.html.markdown index 95ff2e4690a..ecab49b62fc 100644 --- a/website/docs/cdktf/typescript/r/vpc_ipam_pool.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_ipam_pool.html.markdown @@ -100,7 +100,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `addressFamily` - (Optional) The IP protocol assigned to this pool. You must choose either IPv4 or IPv6 protocol for a pool. * `allocationDefaultNetmaskLength` - (Optional) A default netmask length for allocations added to this pool. If, for example, the CIDR assigned to this pool is 10.0.0.0/8 and you enter 16 here, new allocations will default to 10.0.0.0/16 (unless you provide a different netmask value when you create the new allocation). @@ -118,9 +118,9 @@ within the CIDR range in the pool. * `sourceIpamPoolId` - (Optional) The ID of the source IPAM pool. Use this argument to create a child pool within an existing pool. * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of IPAM * `id` - The ID of the IPAM @@ -129,10 +129,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAMs can be imported using the `ipam pool id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the IPAM pool `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_ipam_pool.example ipam-pool-0958f95207d978e1e + +Using `terraform import`, import IPAMs using the IPAM pool `id`. For example: + +```console +% terraform import aws_vpc_ipam_pool.example ipam-pool-0958f95207d978e1e ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_ipam_pool_cidr.html.markdown b/website/docs/cdktf/typescript/r/vpc_ipam_pool_cidr.html.markdown index 07f4d9758ba..175fbae22b8 100644 --- a/website/docs/cdktf/typescript/r/vpc_ipam_pool_cidr.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_ipam_pool_cidr.html.markdown @@ -116,7 +116,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidr` - (Optional) The CIDR you want to assign to the pool. Conflicts with `netmaskLength`. * `cidrAuthorizationContext` - (Optional) A signed document that proves that you are authorized to bring the specified IP address range to Amazon using BYOIP. This is not stored in the state file. See [cidr_authorization_context](#cidr_authorization_context) for more information. @@ -128,19 +128,37 @@ The following arguments are supported: * `message` - (Optional) The plain-text authorization message for the prefix and account. * `signature` - (Optional) The signed authorization message for the prefix and account. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the IPAM Pool Cidr concatenated with the IPAM Pool ID. * `ipamPoolCidrId` - The unique ID generated by AWS for the pool cidr. Typically this is the resource `id` but this attribute was added to the API calls after the fact and is therefore not used as the terraform resource id. ## Import -IPAMs can be imported using the `_`. Please note we **DO NOT** use the ipam pool cidr id as this was introduced after the resource already existed. An import example: +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the `_`. For example: + +**NOTE:** Do not use the IPAM Pool Cidr ID as this was introduced after the resource already existed. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_ipam_pool_cidr.example 172.2.0.0/24_ipam-pool-0e634f5a1517cccdc + +Using `terraform import`, import IPAMs using the `_`. For example: + +**NOTE:** Do not use the IPAM Pool Cidr ID as this was introduced after the resource already existed. + +```console +% terraform import aws_vpc_ipam_pool_cidr.example 172.2.0.0/24_ipam-pool-0e634f5a1517cccdc ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_ipam_pool_cidr_allocation.html.markdown b/website/docs/cdktf/typescript/r/vpc_ipam_pool_cidr_allocation.html.markdown index c9be78b54bf..0f84b40bb6f 100644 --- a/website/docs/cdktf/typescript/r/vpc_ipam_pool_cidr_allocation.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_ipam_pool_cidr_allocation.html.markdown @@ -127,7 +127,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidr` - (Optional) The CIDR you want to assign to the pool. * `description` - (Optional) The description for the allocation. @@ -135,9 +135,9 @@ The following arguments are supported: * `ipamPoolId` - (Required) The ID of the pool to which you want to assign a CIDR. * `netmaskLength` - (Optional) The netmask length of the CIDR you would like to allocate to the IPAM pool. Valid Values: `0128`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the allocation. * `resourceId` - The ID of the resource. @@ -146,10 +146,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAM allocations can be imported using the `allocation id` and `pool id`, separated by `_`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAM allocations using the allocation `id` and `pool id`, separated by `_`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_ipam_pool_cidr_allocation.example ipam-pool-alloc-0dc6d196509c049ba8b549ff99f639736_ipam-pool-07cfb559e0921fcbe + +Using `terraform import`, import IPAM allocations using the allocation `id` and `pool id`, separated by `_`. For example: + +```console +% terraform import aws_vpc_ipam_pool_cidr_allocation.example ipam-pool-alloc-0dc6d196509c049ba8b549ff99f639736_ipam-pool-07cfb559e0921fcbe ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_ipam_preview_next_cidr.html.markdown b/website/docs/cdktf/typescript/r/vpc_ipam_preview_next_cidr.html.markdown index c2ff0b4b672..23557ad29e4 100644 --- a/website/docs/cdktf/typescript/r/vpc_ipam_preview_next_cidr.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_ipam_preview_next_cidr.html.markdown @@ -72,17 +72,17 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `disallowedCidrs` - (Optional) Exclude a particular CIDR range from being returned by the pool. * `ipamPoolId` - (Required) The ID of the pool to which you want to assign a CIDR. * `netmaskLength` - (Optional) The netmask length of the CIDR you would like to preview from the IPAM pool. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `cidr` - The previewed CIDR from the pool. * `id` - The ID of the preview. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_ipam_resource_discovery.html.markdown b/website/docs/cdktf/typescript/r/vpc_ipam_resource_discovery.html.markdown index 6d5bd12d312..9f4c78ce07c 100644 --- a/website/docs/cdktf/typescript/r/vpc_ipam_resource_discovery.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_ipam_resource_discovery.html.markdown @@ -48,7 +48,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `description` - (Optional) A description for the IPAM Resource Discovery. * `operatingRegions` - (Required) Determines which regions the Resource Discovery will enable IPAM features for usage and monitoring. Locale is the Region where you want to make an IPAM pool available for allocations. You can only create pools with locales that match the operating Regions of the IPAM Resource Discovery. You can only create VPCs from a pool whose locale matches the VPC's Region. You specify a region using the [region_name](#operating_regions) parameter. **You must set your provider block region as an operating_region.** @@ -58,9 +58,9 @@ The following arguments are supported: * `regionName` - (Required) The name of the Region you want to add to the IPAM. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - Amazon Resource Name (ARN) of IPAM Resource Discovery * `id` - The ID of the IPAM Resource Discovery @@ -71,10 +71,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAMs can be imported using the `ipam resource discovery id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the IPAM resource discovery `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_ipam_resource_discovery.example ipam-res-disco-0178368ad2146a492 + +Using `terraform import`, import IPAMs using the IPAM resource discovery `id`. For example: + +```console +% terraform import aws_vpc_ipam_resource_discovery.example ipam-res-disco-0178368ad2146a492 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_ipam_resource_discovery_association.html.markdown b/website/docs/cdktf/typescript/r/vpc_ipam_resource_discovery_association.html.markdown index 95153c8d64e..681722b3de8 100644 --- a/website/docs/cdktf/typescript/r/vpc_ipam_resource_discovery_association.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_ipam_resource_discovery_association.html.markdown @@ -46,15 +46,15 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `ipamId` - (Required) The ID of the IPAM to associate. * `ipamResourceDiscoveryId` - (Required) The ID of the Resource Discovery to associate. * `tags` - (Optional) A map of tags to add to the IPAM resource discovery association resource. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of IPAM Resource Discovery Association. * `id` - The ID of the IPAM Resource Discovery Association. @@ -67,10 +67,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAMs can be imported using the `ipam resource discovery association id`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the IPAM resource discovery association `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_ipam_resource_discovery_association.example ipam-res-disco-assoc-0178368ad2146a492 + +Using `terraform import`, import IPAMs using the IPAM resource discovery association `id`. For example: + +```console +% terraform import aws_vpc_ipam_resource_discovery_association.example ipam-res-disco-assoc-0178368ad2146a492 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_ipam_scope.html.markdown b/website/docs/cdktf/typescript/r/vpc_ipam_scope.html.markdown index 144d525551c..d75ee8461d9 100644 --- a/website/docs/cdktf/typescript/r/vpc_ipam_scope.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_ipam_scope.html.markdown @@ -51,15 +51,15 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `ipamId` - The ID of the IPAM for which you're creating this scope. * `description` - (Optional) A description for the scope you're creating. * `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of the scope. * `id` - The ID of the IPAM Scope. @@ -70,10 +70,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -IPAMs can be imported using the `scopeId`, e.g. +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import IPAMs using the `scopeId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_ipam_scope.example ipam-scope-0513c69f283d11dfb + +Using `terraform import`, import IPAMs using the `scopeId`. For example: + +```console +% terraform import aws_vpc_ipam_scope.example ipam-scope-0513c69f283d11dfb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_ipv4_cidr_block_association.html.markdown b/website/docs/cdktf/typescript/r/vpc_ipv4_cidr_block_association.html.markdown index 35607af0813..bd674d1def9 100644 --- a/website/docs/cdktf/typescript/r/vpc_ipv4_cidr_block_association.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_ipv4_cidr_block_association.html.markdown @@ -44,16 +44,16 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `cidrBlock` - (Optional) The IPv4 CIDR block for the VPC. CIDR can be explicitly set or it can be derived from IPAM using `ipv4NetmaskLength`. * `ipv4IpamPoolId` - (Optional) The ID of an IPv4 IPAM pool you want to use for allocating this VPC's CIDR. IPAM is a VPC feature that you can use to automate your IP address management workflows including assigning, tracking, troubleshooting, and auditing IP addresses across AWS Regions and accounts. Using IPAM you can monitor IP address usage throughout your AWS Organization. * `ipv4NetmaskLength` - (Optional) The netmask length of the IPv4 CIDR you want to allocate to this VPC. Requires specifying a `ipv4IpamPoolId`. * `vpcId` - (Required) The ID of the VPC to make the association with. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC CIDR association @@ -66,10 +66,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -`awsVpcIpv4CidrBlockAssociation` can be imported by using the VPC CIDR Association ID, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsVpcIpv4CidrBlockAssociation` using the VPC CIDR Association ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_ipv4_cidr_block_association.example vpc-cidr-assoc-xxxxxxxx + +Using `terraform import`, import `awsVpcIpv4CidrBlockAssociation` using the VPC CIDR Association ID. For example: + +```console +% terraform import aws_vpc_ipv4_cidr_block_association.example vpc-cidr-assoc-xxxxxxxx ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_ipv6_cidr_block_association.html.markdown b/website/docs/cdktf/typescript/r/vpc_ipv6_cidr_block_association.html.markdown index 32ee8c004c0..75ae8e0f869 100644 --- a/website/docs/cdktf/typescript/r/vpc_ipv6_cidr_block_association.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_ipv6_cidr_block_association.html.markdown @@ -49,7 +49,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `ipv6CidrBlock` - (Optional) The IPv6 CIDR block for the VPC. CIDR can be explicitly set or it can be derived from IPAM using `ipv6NetmaskLength`. This parameter is required if `ipv6NetmaskLength` is not set and he IPAM pool does not have `allocationDefaultNetmask` set. * `ipv6IpamPoolId` - (Required) The ID of an IPv6 IPAM pool you want to use for allocating this VPC's CIDR. IPAM is a VPC feature that you can use to automate your IP address management workflows including assigning, tracking, troubleshooting, and auditing IP addresses across AWS Regions and accounts. @@ -63,18 +63,32 @@ The following arguments are supported: - `create` - (Default `10M`) - `delete` - (Default `10M`) -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC CIDR association ## Import -`awsVpcIpv6CidrBlockAssociation` can be imported by using the VPC CIDR Association ID, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import `awsVpcIpv6CidrBlockAssociation` using the VPC CIDR Association ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_ipv6_cidr_block_association.example vpc-cidr-assoc-xxxxxxxx + +Using `terraform import`, import `awsVpcIpv6CidrBlockAssociation` using the VPC CIDR Association ID. For example: + +```console +% terraform import aws_vpc_ipv6_cidr_block_association.example vpc-cidr-assoc-xxxxxxxx ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_network_performance_metric_subscription.html.markdown b/website/docs/cdktf/typescript/r/vpc_network_performance_metric_subscription.html.markdown index f59d7507bc1..6d5d392c8ea 100644 --- a/website/docs/cdktf/typescript/r/vpc_network_performance_metric_subscription.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_network_performance_metric_subscription.html.markdown @@ -37,17 +37,17 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `destination` - (Required) The target Region or Availability Zone that the metric subscription is enabled for. For example, `euWest1`. * `metric` - (Optional) The metric used for the enabled subscription. Valid values: `aggregateLatency`. Default: `aggregateLatency`. * `source` - (Required) The source Region or Availability Zone that the metric subscription is enabled for. For example, `usEast1`. * `statistic` - (Optional) The statistic used for the enabled subscription. Valid values: `p50`. Default: `p50`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `period` - The data aggregation time for the subscription. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_peering_connection.html.markdown b/website/docs/cdktf/typescript/r/vpc_peering_connection.html.markdown index d9da4e7b773..9bf95a1f913 100644 --- a/website/docs/cdktf/typescript/r/vpc_peering_connection.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_peering_connection.html.markdown @@ -160,7 +160,7 @@ can be done using the [`autoAccept`](vpc_peering_connection.html#auto_accept) at Connection has to be made active manually using other means. See [notes](vpc_peering_connection.html#notes) below for more information. -The following arguments are supported: +This argument supports the following arguments: * `peerOwnerId` - (Optional) The AWS account ID of the owner of the peer VPC. Defaults to the account ID the [AWS provider][1] is currently connected to. @@ -183,9 +183,9 @@ must have support for the DNS hostnames enabled. This can be done using the [`en * `allowRemoteVpcDnsResolution` - (Optional) Allow a local VPC to resolve public DNS hostnames to private IP addresses when queried from instances in the peer VPC. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC Peering Connection. * `acceptStatus` - The status of the VPC Peering Connection request. @@ -207,12 +207,26 @@ or accept the connection manually using the AWS Management Console, AWS CLI, thr ## Import -VPC Peering resources can be imported using the `vpc peering id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Peering resources using the VPC peering `id`. For example: -```sh -$ terraform import aws_vpc_peering_connection.test_connection pcx-111aaa111 +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import VPC Peering resources using the VPC peering `id`. For example: + +```console +% terraform import aws_vpc_peering_connection.test_connection pcx-111aaa111 ``` [1]: /docs/providers/aws/index.html - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_peering_connection_accepter.html.markdown b/website/docs/cdktf/typescript/r/vpc_peering_connection_accepter.html.markdown index 6b07d55a966..362cd46ce1f 100644 --- a/website/docs/cdktf/typescript/r/vpc_peering_connection_accepter.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_peering_connection_accepter.html.markdown @@ -94,7 +94,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpcPeeringConnectionId` - (Required) The VPC Peering Connection ID to manage. * `autoAccept` - (Optional) Whether or not to accept the peering request. Defaults to `false`. @@ -108,9 +108,9 @@ by removing the corresponding `awsVpcPeeringConnection` resource from your confi Removing a `awsVpcPeeringConnectionAccepter` resource from your configuration will remove it from your statefile and management, **but will not destroy the VPC Peering Connection.** -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC Peering Connection. * `acceptStatus` - The status of the VPC Peering Connection request. @@ -124,20 +124,34 @@ In addition to all arguments above, the following attributes are exported: (https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the requester VPC. * `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). -#### Accepter and Requester Attributes Reference +#### Accepter and Requester Attribute Reference * `allowRemoteVpcDnsResolution` - Indicates whether a local VPC can resolve public DNS hostnames to private IP addresses when queried from instances in a peer VPC. ## Import -VPC Peering Connection Accepters can be imported by using the Peering Connection ID, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Peering Connection Accepters using the Peering Connection ID. For example: -```sh -$ terraform import aws_vpc_peering_connection_accepter.example pcx-12345678 +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import VPC Peering Connection Accepters using the Peering Connection ID. For example: + +```console +% terraform import aws_vpc_peering_connection_accepter.example pcx-12345678 ``` -Certain resource arguments, like `autoAccept`, do not have an EC2 API method for reading the information after peering connection creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference, e.g., +Certain resource arguments, like `autoAccept`, do not have an EC2 API method for reading the information after peering connection creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignoreChanges`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference. For example: ```typescript // Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug @@ -165,4 +179,4 @@ class MyConvertedCode extends TerraformStack { ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_peering_connection_options.html.markdown b/website/docs/cdktf/typescript/r/vpc_peering_connection_options.html.markdown index 04047ffadd3..b20d3952e69 100644 --- a/website/docs/cdktf/typescript/r/vpc_peering_connection_options.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_peering_connection_options.html.markdown @@ -167,7 +167,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `vpcPeeringConnectionId` - (Required) The ID of the requester VPC peering connection. * `accepter` (Optional) - An optional configuration block that allows for [VPC Peering Connection](https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options to be set for the VPC that acceptsthe peering connection (a maximum of one). @@ -179,18 +179,32 @@ The following arguments are supported: * `allowRemoteVpcDnsResolution` - (Optional) Allow a local VPC to resolve public DNS hostnames to private IP addresses when queried from instances in the peer VPC. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - The ID of the VPC Peering Connection Options. ## Import -VPC Peering Connection Options can be imported using the `vpc peering id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Peering Connection Options using the VPC peering `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_peering_connection_options.foo pcx-111aaa111 + +Using `terraform import`, import VPC Peering Connection Options using the VPC peering `id`. For example: + +```console +% terraform import aws_vpc_peering_connection_options.foo pcx-111aaa111 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_security_group_egress_rule.html.markdown b/website/docs/cdktf/typescript/r/vpc_security_group_egress_rule.html.markdown index 691bf571a24..f114b1017d8 100644 --- a/website/docs/cdktf/typescript/r/vpc_security_group_egress_rule.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_security_group_egress_rule.html.markdown @@ -38,7 +38,7 @@ class MyConvertedCode extends TerraformStack { fromPort: 80, ipProtocol: "tcp", securityGroupId: Token.asString(awsSecurityGroupExample.id), - toPort: 8080, + toPort: 80, }); } } @@ -47,7 +47,9 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +~> **Note** Although `cidrIpv4`, `cidrIpv6`, `prefixListId`, and `referencedSecurityGroupId` are all marked as optional, you *must* provide one of them in order to configure the destination of the traffic. The `fromPort` and `toPort` arguments are required unless `ipProtocol` is set to `1` or `icmpv6`. + +This argument supports the following arguments: * `cidrIpv4` - (Optional) The destination IPv4 CIDR range. * `cidrIpv6` - (Optional) The destination IPv6 CIDR range. @@ -60,9 +62,9 @@ The following arguments are supported: * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `toPort` - (Optional) The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of the security group rule. * `securityGroupRuleId` - The ID of the security group rule. @@ -70,10 +72,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Security group egress rules can be imported using the `securityGroupRuleId`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import security group egress rules using the `securityGroupRuleId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_security_group_egress_rule.example sgr-02108b27edd666983 + +Using `terraform import`, import security group egress rules using the `securityGroupRuleId`. For example: + +```console +% terraform import aws_vpc_security_group_egress_rule.example sgr-02108b27edd666983 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpc_security_group_ingress_rule.html.markdown b/website/docs/cdktf/typescript/r/vpc_security_group_ingress_rule.html.markdown index ed4f3fbc583..06da1b4e977 100644 --- a/website/docs/cdktf/typescript/r/vpc_security_group_ingress_rule.html.markdown +++ b/website/docs/cdktf/typescript/r/vpc_security_group_ingress_rule.html.markdown @@ -38,7 +38,7 @@ class MyConvertedCode extends TerraformStack { fromPort: 80, ipProtocol: "tcp", securityGroupId: Token.asString(awsSecurityGroupExample.id), - toPort: 8080, + toPort: 80, }); } } @@ -47,22 +47,24 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: + +~> **Note** Although `cidrIpv4`, `cidrIpv6`, `prefixListId`, and `referencedSecurityGroupId` are all marked as optional, you *must* provide one of them in order to configure the destination of the traffic. The `fromPort` and `toPort` arguments are required unless `ipProtocol` is set to `1` or `icmpv6`. * `cidrIpv4` - (Optional) The source IPv4 CIDR range. * `cidrIpv6` - (Optional) The source IPv6 CIDR range. * `description` - (Optional) The security group rule description. * `fromPort` - (Optional) The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 type. -* `ipProtocol` - (Optional) The IP protocol name or number. Use `1` to specify all protocols. Note that if `ipProtocol` is set to `1`, it translates to all protocols, all port ranges, and `fromPort` and `toPort` values should not be defined. +* `ipProtocol` - (Required) The IP protocol name or number. Use `1` to specify all protocols. Note that if `ipProtocol` is set to `1`, it translates to all protocols, all port ranges, and `fromPort` and `toPort` values should not be defined. * `prefixListId` - (Optional) The ID of the source prefix list. * `referencedSecurityGroupId` - (Optional) The source security group that is referenced in the rule. * `securityGroupId` - (Required) The ID of the security group. * `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `toPort` - (Optional) The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The Amazon Resource Name (ARN) of the security group rule. * `securityGroupRuleId` - The ID of the security group rule. @@ -70,10 +72,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -Security group ingress rules can be imported using the `securityGroupRuleId`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import security group ingress rules using the `securityGroupRuleId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpc_security_group_ingress_rule.example sgr-02108b27edd666983 + +Using `terraform import`, import security group ingress rules using the `securityGroupRuleId`. For example: + +```console +% terraform import aws_vpc_security_group_ingress_rule.example sgr-02108b27edd666983 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpclattice_access_log_subscription.html.markdown b/website/docs/cdktf/typescript/r/vpclattice_access_log_subscription.html.markdown index 079252ad2ff..8299b25b94c 100644 --- a/website/docs/cdktf/typescript/r/vpclattice_access_log_subscription.html.markdown +++ b/website/docs/cdktf/typescript/r/vpclattice_access_log_subscription.html.markdown @@ -44,9 +44,9 @@ The following arguments are required: * `destinationArn` - (Required) Amazon Resource Name (ARN) of the log destination. * `resourceIdentifier` - (Required) The ID or Amazon Resource Identifier (ARN) of the service network or service. You must use the ARN if the resources specified in the operation are in different accounts. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `id` - ID of the access log subscription. * `arn` - Amazon Resource Name (ARN) of the access log subscription. @@ -56,10 +56,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Access Log Subscription can be imported using the access log subscription ID, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Access Log Subscription using the access log subscription ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpclattice_access_log_subscription.example rft-8012925589 + +Using `terraform import`, import VPC Lattice Access Log Subscription using the access log subscription ID. For example: + +```console +% terraform import aws_vpclattice_access_log_subscription.example rft-8012925589 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpclattice_auth_policy.html.markdown b/website/docs/cdktf/typescript/r/vpclattice_auth_policy.html.markdown index e14642a2185..eea7f3021ac 100644 --- a/website/docs/cdktf/typescript/r/vpclattice_auth_policy.html.markdown +++ b/website/docs/cdktf/typescript/r/vpclattice_auth_policy.html.markdown @@ -73,9 +73,9 @@ The following arguments are required: * `resourceIdentifier` - (Required) The ID or Amazon Resource Name (ARN) of the service network or service for which the policy is created. * `policy` - (Required) The auth policy. The policy string in JSON must not contain newlines or blank lines. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `policy` - The auth policy. The policy string in JSON must not contain newlines or blank lines. * `state` - The state of the auth policy. The auth policy is only active when the auth type is set to AWS_IAM. If you provide a policy, then authentication and authorization decisions are made based on this policy and the client's IAM policy. If the Auth type is NONE, then, any auth policy you provide will remain inactive. @@ -90,10 +90,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Auth Policy can be imported using the `exampleIdArg`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Auth Policy using the `exampleIdArg`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpclattice_auth_policy.example rft-8012925589 + +Using `terraform import`, import VPC Lattice Auth Policy using the `exampleIdArg`. For example: + +```console +% terraform import aws_vpclattice_auth_policy.example rft-8012925589 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpclattice_listener.html.markdown b/website/docs/cdktf/typescript/r/vpclattice_listener.html.markdown index e7531cd15d7..46e74ed1d6a 100644 --- a/website/docs/cdktf/typescript/r/vpclattice_listener.html.markdown +++ b/website/docs/cdktf/typescript/r/vpclattice_listener.html.markdown @@ -153,7 +153,7 @@ class MyConvertedCode extends TerraformStack { ## Argument Reference -The following arguments are supported: +This resource supports the following arguments: * `defaultAction` - (Required) Default action block for the default listener rule. Default action blocks are defined below. * `name` - (Required, Forces new resource) Name of the listener. A listener name must be unique within a service. Valid characters are a-z, 0-9, and hyphens (-). You can't use a hyphen as the first or last character, or immediately after another hyphen. @@ -193,9 +193,9 @@ Target group blocks (for `targetGroup`) must include the following arguments: * `weight` - (Optional) Determines how requests are distributed to the target group. Only required if you specify multiple target groups for a forward action. For example, if you specify two target groups, one with a weight of 10 and the other with a weight of 20, the target group with a weight of 20 receives twice as many requests as the other target group. See [Listener rules](https://docs.aws.amazon.com/vpc-lattice/latest/ug/listeners.html#listener-rules) in the AWS documentation for additional examples. Default: `100`. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the listener. * `createdAt` - Date and time that the listener was created, specified in ISO-8601 format. @@ -204,10 +204,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Listener can be imported by using the `listenerId` of the listener and the `id` of the VPC Lattice service combined with a `/` character, e.g.: +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Listener using the `listenerId` of the listener and the `id` of the VPC Lattice service combined with a `/` character. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpclattice_listener.example svc-1a2b3c4d/listener-987654321 + +Using `terraform import`, import VPC Lattice Listener using the `listenerId` of the listener and the `id` of the VPC Lattice service combined with a `/` character. For example: + +```console +% terraform import aws_vpclattice_listener.example svc-1a2b3c4d/listener-987654321 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpclattice_listener_rule.html.markdown b/website/docs/cdktf/typescript/r/vpclattice_listener_rule.html.markdown index 20846948c6e..f18d63a7914 100644 --- a/website/docs/cdktf/typescript/r/vpclattice_listener_rule.html.markdown +++ b/website/docs/cdktf/typescript/r/vpclattice_listener_rule.html.markdown @@ -174,9 +174,9 @@ path match match (`match`) supports the following: * `exact` - (Optional) Specifies an exact type match. * `prefix` - (Optional) Specifies a prefix type match. Matches the value with the prefix. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the target group. * `ruleId` - Unique identifier for the target group. @@ -192,10 +192,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Listener Rule can be imported using the `exampleIdArg`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Listener Rule using the `exampleIdArg`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpclattice_listener_rule.example rft-8012925589 + +Using `terraform import`, import VPC Lattice Listener Rule using the `exampleIdArg`. For example: + +```console +% terraform import aws_vpclattice_listener_rule.example rft-8012925589 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpclattice_resource_policy.html.markdown b/website/docs/cdktf/typescript/r/vpclattice_resource_policy.html.markdown index 45178fe207b..30e7109bffe 100644 --- a/website/docs/cdktf/typescript/r/vpclattice_resource_policy.html.markdown +++ b/website/docs/cdktf/typescript/r/vpclattice_resource_policy.html.markdown @@ -84,16 +84,30 @@ The following arguments are required: * `resourceArn` - (Required) The ID or Amazon Resource Name (ARN) of the service network or service for which the policy is created. * `policy` - (Required) An IAM policy. The policy string in JSON must not contain newlines or blank lines. -## Attributes Reference +## Attribute Reference -No additional attributes are exported. +This resource exports no additional attributes. ## Import -VPC Lattice Resource Policy can be imported using the `resourceArn`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Resource Policy using the `resourceArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpclattice_resource_policy.example rft-8012925589 + +Using `terraform import`, import VPC Lattice Resource Policy using the `resourceArn`. For example: + +```console +% terraform import aws_vpclattice_resource_policy.example rft-8012925589 ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpclattice_service.html.markdown b/website/docs/cdktf/typescript/r/vpclattice_service.html.markdown index 821394b3c95..9d155d2a57b 100644 --- a/website/docs/cdktf/typescript/r/vpclattice_service.html.markdown +++ b/website/docs/cdktf/typescript/r/vpclattice_service.html.markdown @@ -51,9 +51,9 @@ The following arguments are optional: * `customDomainName` - (Optional) Custom domain name of the service. * `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the service. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. * `dnsEntry` - Concise description. Do not begin the description with "An", "The", "Defines", "Indicates", or "Specifies," as these are verbose. In other words, "Indicates the amount of storage," can be rewritten as "Amount of storage," without losing any information. @@ -70,10 +70,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Service can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Service using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpclattice_service.example svc-06728e2357ea55f8a + +Using `terraform import`, import VPC Lattice Service using the `id`. For example: + +```console +% terraform import aws_vpclattice_service.example svc-06728e2357ea55f8a ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpclattice_service_network.html.markdown b/website/docs/cdktf/typescript/r/vpclattice_service_network.html.markdown index d886ba6b479..60aa7fcb288 100644 --- a/website/docs/cdktf/typescript/r/vpclattice_service_network.html.markdown +++ b/website/docs/cdktf/typescript/r/vpclattice_service_network.html.markdown @@ -48,19 +48,33 @@ The following arguments are optional: * `authType` - (Optional) Type of IAM policy. Either `none` or `awsIam`. * `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the Service Network. * `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). ## Import -VPC Lattice Service Network can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Service Network using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpclattice_service_network.example sn-0158f91c1e3358dba + +Using `terraform import`, import VPC Lattice Service Network using the `id`. For example: + +```console +% terraform import aws_vpclattice_service_network.example sn-0158f91c1e3358dba ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpclattice_service_network_service_association.html.markdown b/website/docs/cdktf/typescript/r/vpclattice_service_network_service_association.html.markdown index d13723b688e..68b7dcc10d8 100644 --- a/website/docs/cdktf/typescript/r/vpclattice_service_network_service_association.html.markdown +++ b/website/docs/cdktf/typescript/r/vpclattice_service_network_service_association.html.markdown @@ -49,9 +49,9 @@ The following arguments are optional: * `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the Association. * `createdBy` - The account that created the association. @@ -72,10 +72,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Service Network Service Association can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Service Network Service Association using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpclattice_service_network_service_association.example snsa-05e2474658a88f6ba + +Using `terraform import`, import VPC Lattice Service Network Service Association using the `id`. For example: + +```console +% terraform import aws_vpclattice_service_network_service_association.example snsa-05e2474658a88f6ba ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpclattice_service_network_vpc_association.html.markdown b/website/docs/cdktf/typescript/r/vpclattice_service_network_vpc_association.html.markdown index 270df221fe4..af96c2316dc 100644 --- a/website/docs/cdktf/typescript/r/vpclattice_service_network_vpc_association.html.markdown +++ b/website/docs/cdktf/typescript/r/vpclattice_service_network_vpc_association.html.markdown @@ -51,9 +51,9 @@ The following arguments are optional: * `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. * `securityGroupIds` - (Optional) The IDs of the security groups. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - The ARN of the Association. * `createdBy` - The account that created the association. @@ -70,10 +70,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Service Network VPC Association can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Service Network VPC Association using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpclattice_service_network_vpc_association.example snsa-05e2474658a88f6ba + +Using `terraform import`, import VPC Lattice Service Network VPC Association using the `id`. For example: + +```console +% terraform import aws_vpclattice_service_network_vpc_association.example snsa-05e2474658a88f6ba ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpclattice_target_group.html.markdown b/website/docs/cdktf/typescript/r/vpclattice_target_group.html.markdown index 8de2a16340e..fe5877694fe 100644 --- a/website/docs/cdktf/typescript/r/vpclattice_target_group.html.markdown +++ b/website/docs/cdktf/typescript/r/vpclattice_target_group.html.markdown @@ -145,9 +145,9 @@ Health Check (`healthCheck`) supports the following: * `protocolVersion` - (Optional) The protocol version used when performing health checks on targets. The possible protocol versions are `http1` and `http2`. The default is `http1`. * `unhealthyThresholdCount` - (Optional) The number of consecutive failed health checks required before considering a target unhealthy. The range is 2–10. The default is 2. -## Attributes Reference +## Attribute Reference -In addition to all arguments above, the following attributes are exported: +This resource exports the following attributes in addition to the arguments above: * `arn` - ARN of the target group. * `id` - Unique identifier for the target group. @@ -163,10 +163,24 @@ In addition to all arguments above, the following attributes are exported: ## Import -VPC Lattice Target Group can be imported using the `id`, e.g., +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPC Lattice Target Group using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} ``` -$ terraform import aws_vpclattice_target_group.example tg-0c11d4dc16ed96bdb + +Using `terraform import`, import VPC Lattice Target Group using the `id`. For example: + +```console +% terraform import aws_vpclattice_target_group.example tg-0c11d4dc16ed96bdb ``` - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpclattice_target_group_attachment.html.markdown b/website/docs/cdktf/typescript/r/vpclattice_target_group_attachment.html.markdown index a5fa7a9768e..09bf4c43a9e 100644 --- a/website/docs/cdktf/typescript/r/vpclattice_target_group_attachment.html.markdown +++ b/website/docs/cdktf/typescript/r/vpclattice_target_group_attachment.html.markdown @@ -52,8 +52,8 @@ The following arguments are required: - `id` - (Required) The ID of the target. If the target type of the target group is INSTANCE, this is an instance ID. If the target type is IP , this is an IP address. If the target type is LAMBDA, this is the ARN of the Lambda function. If the target type is ALB, this is the ARN of the Application Load Balancer. - `port` - (Optional) The port on which the target is listening. For HTTP, the default is 80. For HTTPS, the default is 443. -## Attributes Reference +## Attribute Reference -No additional attributes are exported. +This resource exports no additional attributes. - \ No newline at end of file + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpn_connection.html.markdown b/website/docs/cdktf/typescript/r/vpn_connection.html.markdown new file mode 100644 index 00000000000..1b3c254ec35 --- /dev/null +++ b/website/docs/cdktf/typescript/r/vpn_connection.html.markdown @@ -0,0 +1,326 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_connection" +description: |- + Manages a Site-to-Site VPN connection. A Site-to-Site VPN connection is an Internet Protocol security (IPsec) VPN connection between a VPC and an on-premises network. +--- + + + +# Resource: aws_vpn_connection + +Manages a Site-to-Site VPN connection. A Site-to-Site VPN connection is an Internet Protocol security (IPsec) VPN connection between a VPC and an on-premises network. +Any new Site-to-Site VPN connection that you create is an [AWS VPN connection](https://docs.aws.amazon.com/vpn/latest/s2svpn/vpn-categories.html). + +~> **Note:** All arguments including `tunnel1PresharedKey` and `tunnel2PresharedKey` will be stored in the raw state as plain-text. +[Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html). + +~> **Note:** The CIDR blocks in the arguments `tunnel1InsideCidr` and `tunnel2InsideCidr` must have a prefix of /30 and be a part of a specific range. +[Read more about this in the AWS documentation](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_VpnTunnelOptionsSpecification.html). + +## Example Usage + +### EC2 Transit Gateway + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CustomerGateway } from "./.gen/providers/aws/customer-gateway"; +import { Ec2TransitGateway } from "./.gen/providers/aws/ec2-transit-gateway"; +import { VpnConnection } from "./.gen/providers/aws/vpn-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CustomerGateway(this, "example", { + bgpAsn: Token.asString(65000), + ipAddress: "172.0.0.1", + type: "ipsec.1", + }); + const awsEc2TransitGatewayExample = new Ec2TransitGateway( + this, + "example_1", + {} + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEc2TransitGatewayExample.overrideLogicalId("example"); + const awsVpnConnectionExample = new VpnConnection(this, "example_2", { + customerGatewayId: example.id, + transitGatewayId: Token.asString(awsEc2TransitGatewayExample.id), + type: example.type, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpnConnectionExample.overrideLogicalId("example"); + } +} + +``` + +### Virtual Private Gateway + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CustomerGateway } from "./.gen/providers/aws/customer-gateway"; +import { Vpc } from "./.gen/providers/aws/vpc"; +import { VpnConnection } from "./.gen/providers/aws/vpn-connection"; +import { VpnGateway } from "./.gen/providers/aws/vpn-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const customerGateway = new CustomerGateway(this, "customer_gateway", { + bgpAsn: Token.asString(65000), + ipAddress: "172.0.0.1", + type: "ipsec.1", + }); + const vpc = new Vpc(this, "vpc", { + cidrBlock: "10.0.0.0/16", + }); + const vpnGateway = new VpnGateway(this, "vpn_gateway", { + vpcId: vpc.id, + }); + new VpnConnection(this, "main", { + customerGatewayId: customerGateway.id, + staticRoutesOnly: true, + type: "ipsec.1", + vpnGatewayId: vpnGateway.id, + }); + } +} + +``` + +### AWS Site to Site Private VPN + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CustomerGateway } from "./.gen/providers/aws/customer-gateway"; +import { DataAwsEc2TransitGatewayDxGatewayAttachment } from "./.gen/providers/aws/data-aws-ec2-transit-gateway-dx-gateway-attachment"; +import { DxGateway } from "./.gen/providers/aws/dx-gateway"; +import { DxGatewayAssociation } from "./.gen/providers/aws/dx-gateway-association"; +import { Ec2TransitGateway } from "./.gen/providers/aws/ec2-transit-gateway"; +import { VpnConnection } from "./.gen/providers/aws/vpn-connection"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CustomerGateway(this, "example", { + bgpAsn: Token.asString(64514), + ipAddress: "10.0.0.1", + tags: { + Name: "terraform_ipsec_vpn_example", + }, + type: "ipsec.1", + }); + const awsDxGatewayExample = new DxGateway(this, "example_1", { + amazonSideAsn: "64512", + name: "terraform_ipsec_vpn_example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDxGatewayExample.overrideLogicalId("example"); + const awsEc2TransitGatewayExample = new Ec2TransitGateway( + this, + "example_2", + { + amazonSideAsn: Token.asNumber("64513"), + description: "terraform_ipsec_vpn_example", + transitGatewayCidrBlocks: ["10.0.0.0/24"], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsEc2TransitGatewayExample.overrideLogicalId("example"); + const awsDxGatewayAssociationExample = new DxGatewayAssociation( + this, + "example_3", + { + allowedPrefixes: ["10.0.0.0/8"], + associatedGatewayId: Token.asString(awsEc2TransitGatewayExample.id), + dxGatewayId: Token.asString(awsDxGatewayExample.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDxGatewayAssociationExample.overrideLogicalId("example"); + const dataAwsEc2TransitGatewayDxGatewayAttachmentExample = + new DataAwsEc2TransitGatewayDxGatewayAttachment(this, "example_4", { + dependsOn: [awsDxGatewayAssociationExample], + dxGatewayId: Token.asString(awsDxGatewayExample.id), + transitGatewayId: Token.asString(awsEc2TransitGatewayExample.id), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsEc2TransitGatewayDxGatewayAttachmentExample.overrideLogicalId( + "example" + ); + const awsVpnConnectionExample = new VpnConnection(this, "example_5", { + customerGatewayId: example.id, + outsideIpAddressType: "PrivateIpv4", + tags: { + Name: "terraform_ipsec_vpn_example", + }, + transitGatewayId: Token.asString(awsEc2TransitGatewayExample.id), + transportTransitGatewayAttachmentId: Token.asString( + dataAwsEc2TransitGatewayDxGatewayAttachmentExample.id + ), + type: "ipsec.1", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsVpnConnectionExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `customerGatewayId` - (Required) The ID of the customer gateway. +* `type` - (Required) The type of VPN connection. The only type AWS supports at this time is "ipsec.1". +* `transitGatewayId` - (Optional) The ID of the EC2 Transit Gateway. +* `vpnGatewayId` - (Optional) The ID of the Virtual Private Gateway. +* `staticRoutesOnly` - (Optional, Default `false`) Whether the VPN connection uses static routes exclusively. Static routes must be used for devices that don't support BGP. +* `enableAcceleration` - (Optional, Default `false`) Indicate whether to enable acceleration for the VPN connection. Supports only EC2 Transit Gateway. +* `tags` - (Optional) Tags to apply to the connection. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `localIpv4NetworkCidr` - (Optional, Default `0000/0`) The IPv4 CIDR on the customer gateway (on-premises) side of the VPN connection. +* `localIpv6NetworkCidr` - (Optional, Default `::/0`) The IPv6 CIDR on the customer gateway (on-premises) side of the VPN connection. +* `outsideIpAddressType` - (Optional, Default `publicIpv4`) Indicates if a Public S2S VPN or Private S2S VPN over AWS Direct Connect. Valid values are `PublicIpv4 | PrivateIpv4` +* `remoteIpv4NetworkCidr` - (Optional, Default `0000/0`) The IPv4 CIDR on the AWS side of the VPN connection. +* `remoteIpv6NetworkCidr` - (Optional, Default `::/0`) The IPv6 CIDR on the customer gateway (on-premises) side of the VPN connection. +* `transportTransitGatewayAttachmentId` - (Required when outside_ip_address_type is set to `privateIpv4`). The attachment ID of the Transit Gateway attachment to Direct Connect Gateway. The ID is obtained through a data source only. +* `tunnelInsideIpVersion` - (Optional, Default `ipv4`) Indicate whether the VPN tunnels process IPv4 or IPv6 traffic. Valid values are `ipv4 | ipv6`. `ipv6` Supports only EC2 Transit Gateway. +* `tunnel1InsideCidr` - (Optional) The CIDR block of the inside IP addresses for the first VPN tunnel. Valid value is a size /30 CIDR block from the 169.254.0.0/16 range. +* `tunnel2InsideCidr` - (Optional) The CIDR block of the inside IP addresses for the second VPN tunnel. Valid value is a size /30 CIDR block from the 169.254.0.0/16 range. +* `tunnel1InsideIpv6Cidr` - (Optional) The range of inside IPv6 addresses for the first VPN tunnel. Supports only EC2 Transit Gateway. Valid value is a size /126 CIDR block from the local fd00::/8 range. +* `tunnel2InsideIpv6Cidr` - (Optional) The range of inside IPv6 addresses for the second VPN tunnel. Supports only EC2 Transit Gateway. Valid value is a size /126 CIDR block from the local fd00::/8 range. +* `tunnel1PresharedKey` - (Optional) The preshared key of the first VPN tunnel. The preshared key must be between 8 and 64 characters in length and cannot start with zero(0). Allowed characters are alphanumeric characters, periods(.) and underscores(_). +* `tunnel2PresharedKey` - (Optional) The preshared key of the second VPN tunnel. The preshared key must be between 8 and 64 characters in length and cannot start with zero(0). Allowed characters are alphanumeric characters, periods(.) and underscores(_). +* `tunnel1DpdTimeoutAction` - (Optional, Default `clear`) The action to take after DPD timeout occurs for the first VPN tunnel. Specify restart to restart the IKE initiation. Specify clear to end the IKE session. Valid values are `clear | none | restart`. +* `tunnel2DpdTimeoutAction` - (Optional, Default `clear`) The action to take after DPD timeout occurs for the second VPN tunnel. Specify restart to restart the IKE initiation. Specify clear to end the IKE session. Valid values are `clear | none | restart`. +* `tunnel1DpdTimeoutSeconds` - (Optional, Default `30`) The number of seconds after which a DPD timeout occurs for the first VPN tunnel. Valid value is equal or higher than `30`. +* `tunnel2DpdTimeoutSeconds` - (Optional, Default `30`) The number of seconds after which a DPD timeout occurs for the second VPN tunnel. Valid value is equal or higher than `30`. +* `tunnel1EnableTunnelLifecycleControl` - (Optional, Default `false`) Turn on or off tunnel endpoint lifecycle control feature for the first VPN tunnel. Valid values are `true | false`. +* `tunnel2EnableTunnelLifecycleControl` - (Optional, Default `false`) Turn on or off tunnel endpoint lifecycle control feature for the second VPN tunnel. Valid values are `true | false`. +* `tunnel1IkeVersions` - (Optional) The IKE versions that are permitted for the first VPN tunnel. Valid values are `ikev1 | ikev2`. +* `tunnel2IkeVersions` - (Optional) The IKE versions that are permitted for the second VPN tunnel. Valid values are `ikev1 | ikev2`. +* `tunnel1LogOptions` - (Optional) Options for logging VPN tunnel activity. See [Log Options](#log-options) below for more details. +* `tunnel2LogOptions` - (Optional) Options for logging VPN tunnel activity. See [Log Options](#log-options) below for more details. +* `tunnel1Phase1DhGroupNumbers` - (Optional) List of one or more Diffie-Hellman group numbers that are permitted for the first VPN tunnel for phase 1 IKE negotiations. Valid values are ` 2 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24`. +* `tunnel2Phase1DhGroupNumbers` - (Optional) List of one or more Diffie-Hellman group numbers that are permitted for the second VPN tunnel for phase 1 IKE negotiations. Valid values are ` 2 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24`. +* `tunnel1Phase1EncryptionAlgorithms` - (Optional) List of one or more encryption algorithms that are permitted for the first VPN tunnel for phase 1 IKE negotiations. Valid values are `AES128 | AES256 | AES128-GCM-16 | AES256-GCM-16`. +* `tunnel2Phase1EncryptionAlgorithms` - (Optional) List of one or more encryption algorithms that are permitted for the second VPN tunnel for phase 1 IKE negotiations. Valid values are `AES128 | AES256 | AES128-GCM-16 | AES256-GCM-16`. +* `tunnel1Phase1IntegrityAlgorithms` - (Optional) One or more integrity algorithms that are permitted for the first VPN tunnel for phase 1 IKE negotiations. Valid values are `SHA1 | SHA2-256 | SHA2-384 | SHA2-512`. +* `tunnel2Phase1IntegrityAlgorithms` - (Optional) One or more integrity algorithms that are permitted for the second VPN tunnel for phase 1 IKE negotiations. Valid values are `SHA1 | SHA2-256 | SHA2-384 | SHA2-512`. +* `tunnel1Phase1LifetimeSeconds` - (Optional, Default `28800`) The lifetime for phase 1 of the IKE negotiation for the first VPN tunnel, in seconds. Valid value is between `900` and `28800`. +* `tunnel2Phase1LifetimeSeconds` - (Optional, Default `28800`) The lifetime for phase 1 of the IKE negotiation for the second VPN tunnel, in seconds. Valid value is between `900` and `28800`. +* `tunnel1Phase2DhGroupNumbers` - (Optional) List of one or more Diffie-Hellman group numbers that are permitted for the first VPN tunnel for phase 2 IKE negotiations. Valid values are `2 | 5 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24`. +* `tunnel2Phase2DhGroupNumbers` - (Optional) List of one or more Diffie-Hellman group numbers that are permitted for the second VPN tunnel for phase 2 IKE negotiations. Valid values are `2 | 5 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24`. +* `tunnel1Phase2EncryptionAlgorithms` - (Optional) List of one or more encryption algorithms that are permitted for the first VPN tunnel for phase 2 IKE negotiations. Valid values are `AES128 | AES256 | AES128-GCM-16 | AES256-GCM-16`. +* `tunnel2Phase2EncryptionAlgorithms` - (Optional) List of one or more encryption algorithms that are permitted for the second VPN tunnel for phase 2 IKE negotiations. Valid values are `AES128 | AES256 | AES128-GCM-16 | AES256-GCM-16`. +* `tunnel1Phase2IntegrityAlgorithms` - (Optional) List of one or more integrity algorithms that are permitted for the first VPN tunnel for phase 2 IKE negotiations. Valid values are `SHA1 | SHA2-256 | SHA2-384 | SHA2-512`. +* `tunnel2Phase2IntegrityAlgorithms` - (Optional) List of one or more integrity algorithms that are permitted for the second VPN tunnel for phase 2 IKE negotiations. Valid values are `SHA1 | SHA2-256 | SHA2-384 | SHA2-512`. +* `tunnel1Phase2LifetimeSeconds` - (Optional, Default `3600`) The lifetime for phase 2 of the IKE negotiation for the first VPN tunnel, in seconds. Valid value is between `900` and `3600`. +* `tunnel2Phase2LifetimeSeconds` - (Optional, Default `3600`) The lifetime for phase 2 of the IKE negotiation for the second VPN tunnel, in seconds. Valid value is between `900` and `3600`. +* `tunnel1RekeyFuzzPercentage` - (Optional, Default `100`) The percentage of the rekey window for the first VPN tunnel (determined by `tunnel1RekeyMarginTimeSeconds`) during which the rekey time is randomly selected. Valid value is between `0` and `100`. +* `tunnel2RekeyFuzzPercentage` - (Optional, Default `100`) The percentage of the rekey window for the second VPN tunnel (determined by `tunnel2RekeyMarginTimeSeconds`) during which the rekey time is randomly selected. Valid value is between `0` and `100`. +* `tunnel1RekeyMarginTimeSeconds` - (Optional, Default `540`) The margin time, in seconds, before the phase 2 lifetime expires, during which the AWS side of the first VPN connection performs an IKE rekey. The exact time of the rekey is randomly selected based on the value for `tunnel1RekeyFuzzPercentage`. Valid value is between `60` and half of `tunnel1Phase2LifetimeSeconds`. +* `tunnel2RekeyMarginTimeSeconds` - (Optional, Default `540`) The margin time, in seconds, before the phase 2 lifetime expires, during which the AWS side of the second VPN connection performs an IKE rekey. The exact time of the rekey is randomly selected based on the value for `tunnel2RekeyFuzzPercentage`. Valid value is between `60` and half of `tunnel2Phase2LifetimeSeconds`. +* `tunnel1ReplayWindowSize` - (Optional, Default `1024`) The number of packets in an IKE replay window for the first VPN tunnel. Valid value is between `64` and `2048`. +* `tunnel2ReplayWindowSize` - (Optional, Default `1024`) The number of packets in an IKE replay window for the second VPN tunnel. Valid value is between `64` and `2048`. +* `tunnel1StartupAction` - (Optional, Default `add`) The action to take when the establishing the tunnel for the first VPN connection. By default, your customer gateway device must initiate the IKE negotiation and bring up the tunnel. Specify start for AWS to initiate the IKE negotiation. Valid values are `add | start`. +* `tunnel2StartupAction` - (Optional, Default `add`) The action to take when the establishing the tunnel for the second VPN connection. By default, your customer gateway device must initiate the IKE negotiation and bring up the tunnel. Specify start for AWS to initiate the IKE negotiation. Valid values are `add | start`. + +### Log Options + +The `tunnel1LogOptions` and `tunnel2LogOptions` block supports the following arguments: + +* `cloudwatchLogOptions` - (Optional) Options for sending VPN tunnel logs to CloudWatch. See [CloudWatch Log Options](#cloudwatch-log-options) below for more details. + +### CloudWatch Log Options + +The `cloudwatchLogOptions` blocks supports the following arguments: + +* `logEnabled` - (Optional) Enable or disable VPN tunnel logging feature. The default is `false`. +* `logGroupArn` - (Optional) The Amazon Resource Name (ARN) of the CloudWatch log group to send logs to. +* `logOutputFormat` - (Optional) Set log format. Default format is json. Possible values are: `json` and `text`. The default is `json`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the VPN Connection. +* `id` - The amazon-assigned ID of the VPN connection. +* `coreNetworkArn` - The ARN of the core network. +* `coreNetworkAttachmentArn` - The ARN of the core network attachment. +* `customerGatewayConfiguration` - The configuration information for the VPN connection's customer gateway (in the native XML format). +* `customerGatewayId` - The ID of the customer gateway to which the connection is attached. +* `routes` - The static routes associated with the VPN connection. Detailed below. +* `staticRoutesOnly` - Whether the VPN connection uses static routes exclusively. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `transitGatewayAttachmentId` - When associated with an EC2 Transit Gateway (`transitGatewayId` argument), the attachment ID. See also the [`awsEc2Tag` resource](/docs/providers/aws/r/ec2_tag.html) for tagging the EC2 Transit Gateway VPN Attachment. +* `tunnel1Address` - The public IP address of the first VPN tunnel. +* `tunnel1CgwInsideAddress` - The RFC 6890 link-local address of the first VPN tunnel (Customer Gateway Side). +* `tunnel1VgwInsideAddress` - The RFC 6890 link-local address of the first VPN tunnel (VPN Gateway Side). +* `tunnel1PresharedKey` - The preshared key of the first VPN tunnel. +* `tunnel1BgpAsn` - The bgp asn number of the first VPN tunnel. +* `tunnel1BgpHoldtime` - The bgp holdtime of the first VPN tunnel. +* `tunnel2Address` - The public IP address of the second VPN tunnel. +* `tunnel2CgwInsideAddress` - The RFC 6890 link-local address of the second VPN tunnel (Customer Gateway Side). +* `tunnel2VgwInsideAddress` - The RFC 6890 link-local address of the second VPN tunnel (VPN Gateway Side). +* `tunnel2PresharedKey` - The preshared key of the second VPN tunnel. +* `tunnel2BgpAsn` - The bgp asn number of the second VPN tunnel. +* `tunnel2BgpHoldtime` - The bgp holdtime of the second VPN tunnel. +* `vgwTelemetry` - Telemetry for the VPN tunnels. Detailed below. +* `vpnGatewayId` - The ID of the virtual private gateway to which the connection is attached. + +### routes + +* `destinationCidrBlock` - The CIDR block associated with the local subnet of the customer data center. +* `source` - Indicates how the routes were provided. +* `state` - The current state of the static route. + +### vgw_telemetry + +* `acceptedRouteCount` - The number of accepted routes. +* `certificateArn` - The Amazon Resource Name (ARN) of the VPN tunnel endpoint certificate. +* `lastStatusChange` - The date and time of the last change in status. +* `outsideIpAddress` - The Internet-routable IP address of the virtual private gateway's outside interface. +* `status` - The status of the VPN tunnel. +* `statusMessage` - If an error occurs, a description of the error. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPN Connections using the VPN connection `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import VPN Connections using the VPN connection `id`. For example: + +```console +% terraform import aws_vpn_connection.testvpnconnection vpn-40f41529 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpn_connection_route.html.markdown b/website/docs/cdktf/typescript/r/vpn_connection_route.html.markdown new file mode 100644 index 00000000000..e06e3b27dab --- /dev/null +++ b/website/docs/cdktf/typescript/r/vpn_connection_route.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_connection_route" +description: |- + Provides a static route between a VPN connection and a customer gateway. +--- + + + +# Resource: aws_vpn_connection_route + +Provides a static route between a VPN connection and a customer gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CustomerGateway } from "./.gen/providers/aws/customer-gateway"; +import { Vpc } from "./.gen/providers/aws/vpc"; +import { VpnConnection } from "./.gen/providers/aws/vpn-connection"; +import { VpnConnectionRoute } from "./.gen/providers/aws/vpn-connection-route"; +import { VpnGateway } from "./.gen/providers/aws/vpn-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const customerGateway = new CustomerGateway(this, "customer_gateway", { + bgpAsn: Token.asString(65000), + ipAddress: "172.0.0.1", + type: "ipsec.1", + }); + const vpc = new Vpc(this, "vpc", { + cidrBlock: "10.0.0.0/16", + }); + const vpnGateway = new VpnGateway(this, "vpn_gateway", { + vpcId: vpc.id, + }); + const main = new VpnConnection(this, "main", { + customerGatewayId: customerGateway.id, + staticRoutesOnly: true, + type: "ipsec.1", + vpnGatewayId: vpnGateway.id, + }); + new VpnConnectionRoute(this, "office", { + destinationCidrBlock: "192.168.10.0/24", + vpnConnectionId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `destinationCidrBlock` - (Required) The CIDR block associated with the local subnet of the customer network. +* `vpnConnectionId` - (Required) The ID of the VPN connection. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `destinationCidrBlock` - The CIDR block associated with the local subnet of the customer network. +* `vpnConnectionId` - The ID of the VPN connection. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpn_gateway.html.markdown b/website/docs/cdktf/typescript/r/vpn_gateway.html.markdown new file mode 100644 index 00000000000..d77e32b75ab --- /dev/null +++ b/website/docs/cdktf/typescript/r/vpn_gateway.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_gateway" +description: |- + Provides a resource to create a VPC VPN Gateway. +--- + + + +# Resource: aws_vpn_gateway + +Provides a resource to create a VPC VPN Gateway. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { VpnGateway } from "./.gen/providers/aws/vpn-gateway"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new VpnGateway(this, "vpn_gw", { + tags: { + Name: "main", + }, + vpcId: main.id, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `vpcId` - (Optional) The VPC ID to create in. +* `availabilityZone` - (Optional) The Availability Zone for the virtual private gateway. +* `tags` - (Optional) A map of tags to assign to the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `amazonSideAsn` - (Optional) The Autonomous System Number (ASN) for the Amazon side of the gateway. If you don't specify an ASN, the virtual private gateway is created with the default ASN. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the VPN Gateway. +* `id` - The ID of the VPN Gateway. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import VPN Gateways using the VPN gateway `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import VPN Gateways using the VPN gateway `id`. For example: + +```console +% terraform import aws_vpn_gateway.testvpngateway vgw-9a4cacf3 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpn_gateway_attachment.html.markdown b/website/docs/cdktf/typescript/r/vpn_gateway_attachment.html.markdown new file mode 100644 index 00000000000..af4e855e6db --- /dev/null +++ b/website/docs/cdktf/typescript/r/vpn_gateway_attachment.html.markdown @@ -0,0 +1,75 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_gateway_attachment" +description: |- + Provides a Virtual Private Gateway attachment resource. +--- + + + +# Resource: aws_vpn_gateway_attachment + +Provides a Virtual Private Gateway attachment resource, allowing for an existing +hardware VPN gateway to be attached and/or detached from a VPC. + +-> **Note:** The [`awsVpnGateway`](vpn_gateway.html) +resource can also automatically attach the Virtual Private Gateway it creates +to an existing VPC by setting the [`vpcId`](vpn_gateway.html#vpc_id) attribute accordingly. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Vpc } from "./.gen/providers/aws/vpc"; +import { VpnGateway } from "./.gen/providers/aws/vpn-gateway"; +import { VpnGatewayAttachment } from "./.gen/providers/aws/vpn-gateway-attachment"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const network = new Vpc(this, "network", { + cidrBlock: "10.0.0.0/16", + }); + const vpn = new VpnGateway(this, "vpn", { + tags: { + Name: "example-vpn-gateway", + }, + }); + new VpnGatewayAttachment(this, "vpn_attachment", { + vpcId: network.id, + vpnGatewayId: vpn.id, + }); + } +} + +``` + +See [Virtual Private Cloud](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_Introduction.html) +and [Virtual Private Gateway](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_VPN.html) user +guides for more information. + +## Argument Reference + +This resource supports the following arguments: + +* `vpcId` - (Required) The ID of the VPC. +* `vpnGatewayId` - (Required) The ID of the Virtual Private Gateway. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `vpcId` - The ID of the VPC that Virtual Private Gateway is attached to. +* `vpnGatewayId` - The ID of the Virtual Private Gateway. + +## Import + +You cannot import this resource. + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/vpn_gateway_route_propagation.html.markdown b/website/docs/cdktf/typescript/r/vpn_gateway_route_propagation.html.markdown new file mode 100644 index 00000000000..ba8a7ecbe45 --- /dev/null +++ b/website/docs/cdktf/typescript/r/vpn_gateway_route_propagation.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "VPN (Site-to-Site)" +layout: "aws" +page_title: "AWS: aws_vpn_gateway_route_propagation" +description: |- + Requests automatic route propagation between a VPN gateway and a route table. +--- + + + +# Resource: aws_vpn_gateway_route_propagation + +Requests automatic route propagation between a VPN gateway and a route table. + +~> **Note:** This resource should not be used with a route table that has +the `propagatingVgws` argument set. If that argument is set, any route +propagation not explicitly listed in its value will be removed. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { VpnGatewayRoutePropagation } from "./.gen/providers/aws/vpn-gateway-route-propagation"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new VpnGatewayRoutePropagation(this, "example", { + routeTableId: Token.asString(awsRouteTableExample.id), + vpnGatewayId: Token.asString(awsVpnGatewayExample.id), + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `vpnGatewayId` - The id of the `awsVpnGateway` to propagate routes from. +* `routeTableId` - The id of the `awsRouteTable` to propagate routes into. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `2M`) +- `delete` - (Default `2M`) + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_byte_match_set.html.markdown b/website/docs/cdktf/typescript/r/waf_byte_match_set.html.markdown new file mode 100644 index 00000000000..64f289b9f07 --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_byte_match_set.html.markdown @@ -0,0 +1,119 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_byte_match_set" +description: |- + Provides a AWS WAF Byte Match Set resource. +--- + + + +# Resource: aws_waf_byte_match_set + +Provides a WAF Byte Match Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafByteMatchSet } from "./.gen/providers/aws/waf-byte-match-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafByteMatchSet(this, "byte_set", { + byteMatchTuples: [ + { + fieldToMatch: { + data: "referer", + type: "HEADER", + }, + positionalConstraint: "CONTAINS", + targetString: "badrefer1", + textTransformation: "NONE", + }, + ], + name: "tf_waf_byte_match_set", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Byte Match Set. +* `byteMatchTuples` - Specifies the bytes (typically a string that corresponds + with ASCII characters) that you want to search for in web requests, + the location in requests that you want to search, and other settings. + +## Nested blocks + +### `byteMatchTuples` + +#### Arguments + +* `fieldToMatch` - (Required) The part of a web request that you want to search, such as a specified header or a query string. +* `positionalConstraint` - (Required) Within the portion of a web request that you want to search + (for example, in the query string, if any), specify where you want to search. + e.g., `contains`, `containsWord` or `exactly`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchTuple.html#WAF-Type-ByteMatchTuple-PositionalConstraint) + for all supported values. +* `targetString` - (Optional) The value that you want to search for within the field specified by `fieldToMatch`, e.g., `badrefer1`. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_waf_ByteMatchTuple.html) + for all supported values. +* `textTransformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `targetString` before inspecting a request for a match. + e.g., `cmdLine`, `htmlEntityDecode` or `none`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchTuple.html#WAF-Type-ByteMatchTuple-TextTransformation) + for all supported values. + +### `fieldToMatch` + +#### Arguments + +* `data` - (Optional) When `type` is `header`, enter the name of the header that you want to search, e.g., `userAgent` or `referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `header`, `method` or `body`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Byte Match Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Byte Match Set using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Byte Match Set using the id. For example: + +```console +% terraform import aws_waf_byte_match_set.byte_set a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_geo_match_set.html.markdown b/website/docs/cdktf/typescript/r/waf_geo_match_set.html.markdown new file mode 100644 index 00000000000..c4d4dfecb77 --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_geo_match_set.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_geo_match_set" +description: |- + Provides a AWS WAF GeoMatchSet resource. +--- + + + +# Resource: aws_waf_geo_match_set + +Provides a WAF Geo Match Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafGeoMatchSet } from "./.gen/providers/aws/waf-geo-match-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafGeoMatchSet(this, "geo_match_set", { + geoMatchConstraint: [ + { + type: "Country", + value: "US", + }, + { + type: "Country", + value: "CA", + }, + ], + name: "geo_match_set", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the GeoMatchSet. +* `geoMatchConstraint` - (Optional) The GeoMatchConstraint objects which contain the country that you want AWS WAF to search for. + +## Nested Blocks + +### `geoMatchConstraint` + +#### Arguments + +* `type` - (Required) The type of geographical area you want AWS WAF to search for. Currently Country is the only valid value. +* `value` - (Required) The country that you want AWS WAF to search for. + This is the two-letter country code, e.g., `us`, `ca`, `ru`, `cn`, etc. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_GeoMatchConstraint.html) for all supported values. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF GeoMatchSet. +* `arn` - Amazon Resource Name (ARN) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Geo Match Set using their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Geo Match Set using their ID. For example: + +```console +% terraform import aws_waf_geo_match_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_ipset.html.markdown b/website/docs/cdktf/typescript/r/waf_ipset.html.markdown new file mode 100644 index 00000000000..70992e65d78 --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_ipset.html.markdown @@ -0,0 +1,92 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_ipset" +description: |- + Provides a AWS WAF IPSet resource. +--- + + + +# Resource: aws_waf_ipset + +Provides a WAF IPSet Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafIpset } from "./.gen/providers/aws/waf-ipset"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafIpset(this, "ipset", { + ipSetDescriptors: [ + { + type: "IPV4", + value: "192.0.7.0/24", + }, + { + type: "IPV4", + value: "10.16.16.0/16", + }, + ], + name: "tfIPSet", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the IPSet. +* `ipSetDescriptors` - (Optional) One or more pairs specifying the IP address type (IPV4 or IPV6) and the IP address range (in CIDR format) from which web requests originate. + +## Nested Blocks + +### `ipSetDescriptors` + +#### Arguments + +* `type` - (Required) Type of the IP address - `ipv4` or `ipv6`. +* `value` - (Required) An IPv4 or IPv6 address specified via CIDR notationE.g., `1920244/32` or `1111:0000:0000:0000:0000:0000:0000:0000/64` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF IPSet. +* `arn` - The ARN of the WAF IPSet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF IPSets using their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF IPSets using their ID. For example: + +```console +% terraform import aws_waf_ipset.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_rate_based_rule.html.markdown b/website/docs/cdktf/typescript/r/waf_rate_based_rule.html.markdown new file mode 100644 index 00000000000..1b53e9ca9b9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_rate_based_rule.html.markdown @@ -0,0 +1,116 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_rate_based_rule" +description: |- + Provides a AWS WAF rule resource. +--- + + + +# Resource: aws_waf_rate_based_rule + +Provides a WAF Rate Based Rule Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafIpset } from "./.gen/providers/aws/waf-ipset"; +import { WafRateBasedRule } from "./.gen/providers/aws/waf-rate-based-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ipset = new WafIpset(this, "ipset", { + ipSetDescriptors: [ + { + type: "IPV4", + value: "192.0.7.0/24", + }, + ], + name: "tfIPSet", + }); + new WafRateBasedRule(this, "wafrule", { + dependsOn: [ipset], + metricName: "tfWAFRule", + name: "tfWAFRule", + predicates: [ + { + dataId: ipset.id, + negated: false, + type: "IPMatch", + }, + ], + rateKey: "IP", + rateLimit: 100, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `metricName` - (Required) The name or description for the Amazon CloudWatch metric of this rule. +* `name` - (Required) The name or description of the rule. +* `rateKey` - (Required) Valid value is IP. +* `rateLimit` - (Required) The maximum number of requests, which have an identical value in the field specified by the RateKey, allowed in a five-minute period. Minimum value is 100. +* `predicates` - (Optional) The objects to include in a rule (documented below). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `predicates` + +See the [WAF Documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_Predicate.html) for more information. + +#### Arguments + +* `negated` - (Required) Set this to `false` if you want to allow, block, or count requests + based on the settings in the specified `byteMatchSet`, `ipSet`, `sqlInjectionMatchSet`, `xssMatchSet`, or `sizeConstraintSet`. + For example, if an IPSet includes the IP address `1920244`, AWS WAF will allow or block requests based on that IP address. + If set to `true`, AWS WAF will allow, block, or count requests based on all IP addresses _except_ `1920244`. +* `dataId` - (Required) A unique identifier for a predicate in the rule, such as Byte Match Set ID or IPSet ID. +* `type` - (Required) The type of predicate in a rule. Valid values: `byteMatch`, `geoMatch`, `ipMatch`, `regexMatch`, `sizeConstraint`, `sqlInjectionMatch`, or `xssMatch`. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF rule. +* `arn` - Amazon Resource Name (ARN) +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Rated Based Rule using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Rated Based Rule using the id. For example: + +```console +% terraform import aws_waf_rate_based_rule.wafrule a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_regex_match_set.html.markdown b/website/docs/cdktf/typescript/r/waf_regex_match_set.html.markdown new file mode 100644 index 00000000000..47614765a22 --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_regex_match_set.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_regex_match_set" +description: |- + Provides a AWS WAF Regex Match Set resource. +--- + + + +# Resource: aws_waf_regex_match_set + +Provides a WAF Regex Match Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafRegexMatchSet } from "./.gen/providers/aws/waf-regex-match-set"; +import { WafRegexPatternSet } from "./.gen/providers/aws/waf-regex-pattern-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new WafRegexPatternSet(this, "example", { + name: "example", + regexPatternStrings: ["one", "two"], + }); + const awsWafRegexMatchSetExample = new WafRegexMatchSet(this, "example_1", { + name: "example", + regexMatchTuple: [ + { + fieldToMatch: { + data: "User-Agent", + type: "HEADER", + }, + regexPatternSetId: example.id, + textTransformation: "NONE", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWafRegexMatchSetExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Regex Match Set. +* `regexMatchTuple` - (Required) The regular expression pattern that you want AWS WAF to search for in web requests, the location in requests that you want AWS WAF to search, and other settings. See below. + +### Nested Arguments + +#### `regexMatchTuple` + +* `fieldToMatch` - (Required) The part of a web request that you want to search, such as a specified header or a query string. +* `regexPatternSetId` - (Required) The ID of a [Regex Pattern Set](/docs/providers/aws/r/waf_regex_pattern_set.html). +* `textTransformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + e.g., `cmdLine`, `htmlEntityDecode` or `none`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchTuple.html#WAF-Type-ByteMatchTuple-TextTransformation) + for all supported values. + +#### `fieldToMatch` + +* `data` - (Optional) When `type` is `header`, enter the name of the header that you want to search, e.g., `userAgent` or `referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `header`, `method` or `body`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regex Match Set. +* `arn` - Amazon Resource Name (ARN) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regex Match Set using their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regex Match Set using their ID. For example: + +```console +% terraform import aws_waf_regex_match_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_regex_pattern_set.html.markdown b/website/docs/cdktf/typescript/r/waf_regex_pattern_set.html.markdown new file mode 100644 index 00000000000..2cbcb5e4181 --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_regex_pattern_set.html.markdown @@ -0,0 +1,74 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_regex_pattern_set" +description: |- + Provides a AWS WAF Regex Pattern Set resource. +--- + + + +# Resource: aws_waf_regex_pattern_set + +Provides a WAF Regex Pattern Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafRegexPatternSet } from "./.gen/providers/aws/waf-regex-pattern-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafRegexPatternSet(this, "example", { + name: "tf_waf_regex_pattern_set", + regexPatternStrings: ["one", "two"], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Regex Pattern Set. +* `regexPatternStrings` - (Optional) A list of regular expression (regex) patterns that you want AWS WAF to search for, such as `b[a@]dB[o0]t`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regex Pattern Set. +* `arn` - Amazon Resource Name (ARN) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS WAF Regex Pattern Set using their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS WAF Regex Pattern Set using their ID. For example: + +```console +% terraform import aws_waf_regex_pattern_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_rule.html.markdown b/website/docs/cdktf/typescript/r/waf_rule.html.markdown new file mode 100644 index 00000000000..eadab796f9d --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_rule.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_rule" +description: |- + Provides a AWS WAF rule resource. +--- + + + +# Resource: aws_waf_rule + +Provides a WAF Rule Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafIpset } from "./.gen/providers/aws/waf-ipset"; +import { WafRule } from "./.gen/providers/aws/waf-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ipset = new WafIpset(this, "ipset", { + ipSetDescriptors: [ + { + type: "IPV4", + value: "192.0.7.0/24", + }, + ], + name: "tfIPSet", + }); + new WafRule(this, "wafrule", { + dependsOn: [ipset], + metricName: "tfWAFRule", + name: "tfWAFRule", + predicates: [ + { + dataId: ipset.id, + negated: false, + type: "IPMatch", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `metricName` - (Required) The name or description for the Amazon CloudWatch metric of this rule. The name can contain only alphanumeric characters (A-Z, a-z, 0-9); the name can't contain whitespace. +* `name` - (Required) The name or description of the rule. +* `predicates` - (Optional) The objects to include in a rule (documented below). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `predicates` + +See the [WAF Documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_Predicate.html) for more information. + +#### Arguments + +* `negated` - (Required) Set this to `false` if you want to allow, block, or count requests + based on the settings in the specified [waf_byte_match_set](/docs/providers/aws/r/waf_byte_match_set.html), [waf_ipset](/docs/providers/aws/r/waf_ipset.html), [aws_waf_size_constraint_set](/docs/providers/aws/r/waf_size_constraint_set.html), [aws_waf_sql_injection_match_set](/docs/providers/aws/r/waf_sql_injection_match_set.html) or [aws_waf_xss_match_set](/docs/providers/aws/r/waf_xss_match_set.html). + For example, if an IPSet includes the IP address `1920244`, AWS WAF will allow or block requests based on that IP address. + If set to `true`, AWS WAF will allow, block, or count requests based on all IP addresses except `1920244`. +* `dataId` - (Required) A unique identifier for a predicate in the rule, such as Byte Match Set ID or IPSet ID. +* `type` - (Required) The type of predicate in a rule. Valid values: `byteMatch`, `geoMatch`, `ipMatch`, `regexMatch`, `sizeConstraint`, `sqlInjectionMatch`, or `xssMatch`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF rule. +* `arn` - The ARN of the WAF rule. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF rules using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF rules using the id. For example: + +```console +% terraform import aws_waf_rule.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_rule_group.html.markdown b/website/docs/cdktf/typescript/r/waf_rule_group.html.markdown new file mode 100644 index 00000000000..b03bc40364f --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_rule_group.html.markdown @@ -0,0 +1,105 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_rule_group" +description: |- + Provides a AWS WAF rule group resource. +--- + + + +# Resource: aws_waf_rule_group + +Provides a WAF Rule Group Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafRule } from "./.gen/providers/aws/waf-rule"; +import { WafRuleGroup } from "./.gen/providers/aws/waf-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new WafRule(this, "example", { + metricName: "example", + name: "example", + }); + const awsWafRuleGroupExample = new WafRuleGroup(this, "example_1", { + activatedRule: [ + { + action: { + type: "COUNT", + }, + priority: 50, + ruleId: example.id, + }, + ], + metricName: "example", + name: "example", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWafRuleGroupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A friendly name of the rule group +* `metricName` - (Required) A friendly name for the metrics from the rule group +* `activatedRule` - (Optional) A list of activated rules, see below +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `activatedRule` + +#### Arguments + +* `action` - (Required) Specifies the action that CloudFront or AWS WAF takes when a web request matches the conditions in the rule. + * `type` - (Required) e.g., `block`, `allow`, or `count` +* `priority` - (Required) Specifies the order in which the rules are evaluated. Rules with a lower value are evaluated before rules with a higher value. +* `ruleId` - (Required) The ID of a [rule](/docs/providers/aws/r/waf_rule.html) +* `type` - (Optional) The rule type, either [`regular`](/docs/providers/aws/r/waf_rule.html), [`rateBased`](/docs/providers/aws/r/waf_rate_based_rule.html), or `group`. Defaults to `regular`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF rule group. +* `arn` - The ARN of the WAF rule group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Rule Group using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Rule Group using the id. For example: + +```console +% terraform import aws_waf_rule_group.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_size_constraint_set.html.markdown b/website/docs/cdktf/typescript/r/waf_size_constraint_set.html.markdown new file mode 100644 index 00000000000..05d298d900e --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_size_constraint_set.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_size_constraint_set" +description: |- + Provides a AWS WAF Size Constraint Set resource. +--- + + + +# Resource: aws_waf_size_constraint_set + +Provides a WAF Size Constraint Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafSizeConstraintSet } from "./.gen/providers/aws/waf-size-constraint-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafSizeConstraintSet(this, "size_constraint_set", { + name: "tfsize_constraints", + sizeConstraints: [ + { + comparisonOperator: "EQ", + fieldToMatch: { + type: "BODY", + }, + size: Token.asNumber("4096"), + textTransformation: "NONE", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Size Constraint Set. +* `sizeConstraints` - (Optional) Specifies the parts of web requests that you want to inspect the size of. + +## Nested Blocks + +### `sizeConstraints` + +#### Arguments + +* `fieldToMatch` - (Required) Specifies where in a web request to look for the size constraint. +* `comparisonOperator` - (Required) The type of comparison you want to perform. + e.g., `eq`, `ne`, `lt`, `gt`. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_wafRegional_SizeConstraint.html) for all supported values. +* `size` - (Required) The size in bytes that you want to compare against the size of the specified `fieldToMatch`. + Valid values are between 0 - 21474836480 bytes (0 - 20 GB). +* `textTransformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `fieldToMatch` before inspecting a request for a match. + e.g., `cmdLine`, `htmlEntityDecode` or `none`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_SizeConstraint.html#WAF-Type-SizeConstraint-TextTransformation) + for all supported values. + **Note:** if you choose `body` as `type`, you must choose `none` because CloudFront forwards only the first 8192 bytes for inspection. + +### `fieldToMatch` + +#### Arguments + +* `data` - (Optional) When `type` is `header`, enter the name of the header that you want to search, e.g., `userAgent` or `referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `header`, `method` or `body`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Size Constraint Set. +* `arn` - Amazon Resource Name (ARN) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS WAF Size Constraint Set using their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS WAF Size Constraint Set using their ID. For example: + +```console +% terraform import aws_waf_size_constraint_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_sql_injection_match_set.html.markdown b/website/docs/cdktf/typescript/r/waf_sql_injection_match_set.html.markdown new file mode 100644 index 00000000000..939f2569e79 --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_sql_injection_match_set.html.markdown @@ -0,0 +1,104 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_sql_injection_match_set" +description: |- + Provides a AWS WAF SQL Injection Match Set resource. +--- + + + +# Resource: aws_waf_sql_injection_match_set + +Provides a WAF SQL Injection Match Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafSqlInjectionMatchSet } from "./.gen/providers/aws/waf-sql-injection-match-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafSqlInjectionMatchSet(this, "sql_injection_match_set", { + name: "tf-sql_injection_match_set", + sqlInjectionMatchTuples: [ + { + fieldToMatch: { + type: "QUERY_STRING", + }, + textTransformation: "URL_DECODE", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the SQL Injection Match Set. +* `sqlInjectionMatchTuples` - (Optional) The parts of web requests that you want AWS WAF to inspect for malicious SQL code and, if you want AWS WAF to inspect a header, the name of the header. + +## Nested Blocks + +### `sqlInjectionMatchTuples` + +* `fieldToMatch` - (Required) Specifies where in a web request to look for snippets of malicious SQL code. +* `textTransformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `fieldToMatch` before inspecting a request for a match. + e.g., `cmdLine`, `htmlEntityDecode` or `none`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_SqlInjectionMatchTuple.html#WAF-Type-SqlInjectionMatchTuple-TextTransformation) + for all supported values. + +### `fieldToMatch` + +#### Arguments + +* `data` - (Optional) When `type` is `header`, enter the name of the header that you want to search, e.g., `userAgent` or `referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `header`, `method` or `body`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF SQL Injection Match Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS WAF SQL Injection Match Set using their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS WAF SQL Injection Match Set using their ID. For example: + +```console +% terraform import aws_waf_sql_injection_match_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_web_acl.html.markdown b/website/docs/cdktf/typescript/r/waf_web_acl.html.markdown new file mode 100644 index 00000000000..dccdb017836 --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_web_acl.html.markdown @@ -0,0 +1,199 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_web_acl" +description: |- + Provides a AWS WAF web access control group (ACL) resource. +--- + + + +# Resource: aws_waf_web_acl + +Provides a WAF Web ACL Resource + +## Example Usage + +This example blocks requests coming from `192070/24` and allows everything else. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafIpset } from "./.gen/providers/aws/waf-ipset"; +import { WafRule } from "./.gen/providers/aws/waf-rule"; +import { WafWebAcl } from "./.gen/providers/aws/waf-web-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ipset = new WafIpset(this, "ipset", { + ipSetDescriptors: [ + { + type: "IPV4", + value: "192.0.7.0/24", + }, + ], + name: "tfIPSet", + }); + const wafrule = new WafRule(this, "wafrule", { + dependsOn: [ipset], + metricName: "tfWAFRule", + name: "tfWAFRule", + predicates: [ + { + dataId: ipset.id, + negated: false, + type: "IPMatch", + }, + ], + }); + new WafWebAcl(this, "waf_acl", { + defaultAction: { + type: "ALLOW", + }, + dependsOn: [ipset, wafrule], + metricName: "tfWebACL", + name: "tfWebACL", + rules: [ + { + action: { + type: "BLOCK", + }, + priority: 1, + ruleId: wafrule.id, + type: "REGULAR", + }, + ], + }); + } +} + +``` + +### Logging + +~> *NOTE:* The Kinesis Firehose Delivery Stream name must begin with `awsWafLogs` and be located in `usEast1` region. See the [AWS WAF Developer Guide](https://docs.aws.amazon.com/waf/latest/developerguide/logging.html) for more information about enabling WAF logging. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafWebAcl } from "./.gen/providers/aws/waf-web-acl"; +interface MyConfig { + defaultAction: any; + metricName: any; + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new WafWebAcl(this, "example", { + loggingConfiguration: { + logDestination: Token.asString( + awsKinesisFirehoseDeliveryStreamExample.arn + ), + redactedFields: { + fieldToMatch: [ + { + type: "URI", + }, + { + data: "referer", + type: "HEADER", + }, + ], + }, + }, + defaultAction: config.defaultAction, + metricName: config.metricName, + name: config.name, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `defaultAction` - (Required) Configuration block with action that you want AWS WAF to take when a request doesn't match the criteria in any of the rules that are associated with the web ACL. Detailed below. +* `metricName` - (Required) The name or description for the Amazon CloudWatch metric of this web ACL. +* `name` - (Required) The name or description of the web ACL. +* `rules` - (Optional) Configuration blocks containing rules to associate with the web ACL and the settings for each rule. Detailed below. +* `loggingConfiguration` - (Optional) Configuration block to enable WAF logging. Detailed below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `defaultAction` Configuration Block + +* `type` - (Required) Specifies how you want AWS WAF to respond to requests that don't match the criteria in any of the `rules`. + e.g., `allow` or `block` + +### `loggingConfiguration` Configuration Block + +* `logDestination` - (Required) Amazon Resource Name (ARN) of Kinesis Firehose Delivery Stream +* `redactedFields` - (Optional) Configuration block containing parts of the request that you want redacted from the logs. Detailed below. + +#### `redactedFields` Configuration Block + +* `fieldToMatch` - (Required) Set of configuration blocks for fields to redact. Detailed below. + +##### `fieldToMatch` Configuration Block + +-> Additional information about this configuration can be found in the [AWS WAF Regional API Reference](https://docs.aws.amazon.com/waf/latest/APIReference/API_regional_FieldToMatch.html). + +* `data` - (Optional) When the value of `type` is `header`, enter the name of the header that you want the WAF to search, for example, `userAgent` or `referer`. If the value of `type` is any other value, omit `data`. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified stringE.g., `header` or `method` + +### `rules` Configuration Block + +See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_ActivatedRule.html) for all details and supported values. + +* `action` - (Optional) The action that CloudFront or AWS WAF takes when a web request matches the conditions in the rule. Not used if `type` is `group`. + * `type` - (Required) valid values are: `block`, `allow`, or `count` +* `overrideAction` - (Optional) Override the action that a group requests CloudFront or AWS WAF takes when a web request matches the conditions in the rule. Only used if `type` is `group`. + * `type` - (Required) valid values are: `none` or `count` +* `priority` - (Required) Specifies the order in which the rules in a WebACL are evaluated. + Rules with a lower value are evaluated before rules with a higher value. +* `ruleId` - (Required) ID of the associated WAF (Global) rule (e.g., [`awsWafRule`](/docs/providers/aws/r/waf_rule.html)). WAF (Regional) rules cannot be used. +* `type` - (Optional) The rule type, either `regular`, as defined by [Rule](http://docs.aws.amazon.com/waf/latest/APIReference/API_Rule.html), `rateBased`, as defined by [RateBasedRule](http://docs.aws.amazon.com/waf/latest/APIReference/API_RateBasedRule.html), or `group`, as defined by [RuleGroup](https://docs.aws.amazon.com/waf/latest/APIReference/API_RuleGroup.html). The default is REGULAR. If you add a RATE_BASED rule, you need to set `type` as `rateBased`. If you add a GROUP rule, you need to set `type` as `group`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF WebACL. +* `arn` - The ARN of the WAF WebACL. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Web ACL using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Web ACL using the `id`. For example: + +```console +% terraform import aws_waf_web_acl.main 0c8e583e-18f3-4c13-9e2a-67c4805d2f94 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/waf_xss_match_set.html.markdown b/website/docs/cdktf/typescript/r/waf_xss_match_set.html.markdown new file mode 100644 index 00000000000..06fd5ab7233 --- /dev/null +++ b/website/docs/cdktf/typescript/r/waf_xss_match_set.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "WAF Classic" +layout: "aws" +page_title: "AWS: aws_waf_xss_match_set" +description: |- + Provides a AWS WAF XssMatchSet resource. +--- + + + +# Resource: aws_waf_xss_match_set + +Provides a WAF XSS Match Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafXssMatchSet } from "./.gen/providers/aws/waf-xss-match-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafXssMatchSet(this, "xss_match_set", { + name: "xss_match_set", + xssMatchTuples: [ + { + fieldToMatch: { + type: "URI", + }, + textTransformation: "NONE", + }, + { + fieldToMatch: { + type: "QUERY_STRING", + }, + textTransformation: "NONE", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the SizeConstraintSet. +* `xssMatchTuples` - (Optional) The parts of web requests that you want to inspect for cross-site scripting attacks. + +## Nested Blocks + +### `xssMatchTuples` + +* `fieldToMatch` - (Required) Specifies where in a web request to look for cross-site scripting attacks. +* `textTransformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `targetString` before inspecting a request for a match. + e.g., `cmdLine`, `htmlEntityDecode` or `none`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_XssMatchTuple.html#WAF-Type-XssMatchTuple-TextTransformation) + for all supported values. + +### `fieldToMatch` + +#### Arguments + +* `data` - (Optional) When `type` is `header`, enter the name of the header that you want to search, e.g., `userAgent` or `referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `header`, `method` or `body`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF XssMatchSet. +* `arn` - Amazon Resource Name (ARN) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF XSS Match Set using their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF XSS Match Set using their ID. For example: + +```console +% terraform import aws_waf_xss_match_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_byte_match_set.html.markdown b/website/docs/cdktf/typescript/r/wafregional_byte_match_set.html.markdown new file mode 100644 index 00000000000..8679cc40352 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_byte_match_set.html.markdown @@ -0,0 +1,97 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_byte_match_set" +description: |- + Provides a AWS WAF Regional ByteMatchSet resource for use with ALB. +--- + + + +# Resource: aws_wafregional_byte_match_set + +Provides a WAF Regional Byte Match Set Resource for use with Application Load Balancer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalByteMatchSet } from "./.gen/providers/aws/wafregional-byte-match-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafregionalByteMatchSet(this, "byte_set", { + byteMatchTuples: [ + { + fieldToMatch: { + data: "referer", + type: "HEADER", + }, + positionalConstraint: "CONTAINS", + targetString: "badrefer1", + textTransformation: "NONE", + }, + ], + name: "tf_waf_byte_match_set", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the ByteMatchSet. +* `byteMatchTuples` - (Optional)Settings for the ByteMatchSet, such as the bytes (typically a string that corresponds with ASCII characters) that you want AWS WAF to search for in web requests. ByteMatchTuple documented below. + +ByteMatchTuples(byte_match_tuples) support the following: + +* `fieldToMatch` - (Required) Settings for the ByteMatchTuple. FieldToMatch documented below. +* `positionalConstraint` - (Required) Within the portion of a web request that you want to search. +* `targetString` - (Required) The value that you want AWS WAF to search for. The maximum length of the value is 50 bytes. +* `textTransformation` - (Required) The formatting way for web request. + +FieldToMatch(field_to_match) support following: + +* `data` - (Optional) When the value of Type is HEADER, enter the name of the header that you want AWS WAF to search, for example, User-Agent or Referer. If the value of Type is any other value, omit Data. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF ByteMatchSet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Byte Match Set using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regional Byte Match Set using the id. For example: + +```console +% terraform import aws_wafregional_byte_match_set.byte_set a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_geo_match_set.html.markdown b/website/docs/cdktf/typescript/r/wafregional_geo_match_set.html.markdown new file mode 100644 index 00000000000..e800eed1c7e --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_geo_match_set.html.markdown @@ -0,0 +1,93 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_geo_match_set" +description: |- + Provides a AWS WAF Regional Geo Match Set resource. +--- + + + +# Resource: aws_wafregional_geo_match_set + +Provides a WAF Regional Geo Match Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalGeoMatchSet } from "./.gen/providers/aws/wafregional-geo-match-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafregionalGeoMatchSet(this, "geo_match_set", { + geoMatchConstraint: [ + { + type: "Country", + value: "US", + }, + { + type: "Country", + value: "CA", + }, + ], + name: "geo_match_set", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Geo Match Set. +* `geoMatchConstraint` - (Optional) The Geo Match Constraint objects which contain the country that you want AWS WAF to search for. + +## Nested Blocks + +### `geoMatchConstraint` + +#### Arguments + +* `type` - (Required) The type of geographical area you want AWS WAF to search for. Currently Country is the only valid value. +* `value` - (Required) The country that you want AWS WAF to search for. + This is the two-letter country code, e.g., `us`, `ca`, `ru`, `cn`, etc. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_GeoMatchConstraint.html) for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Geo Match Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Geo Match Set using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regional Geo Match Set using the id. For example: + +```console +% terraform import aws_wafregional_geo_match_set.geo_match_set a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_ipset.html.markdown b/website/docs/cdktf/typescript/r/wafregional_ipset.html.markdown new file mode 100644 index 00000000000..bfafdf7b788 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_ipset.html.markdown @@ -0,0 +1,94 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_ipset" +description: |- + Provides a AWS WAF Regional IPSet resource for use with ALB. +--- + + + +# Resource: aws_wafregional_ipset + +Provides a WAF Regional IPSet Resource for use with Application Load Balancer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalIpset } from "./.gen/providers/aws/wafregional-ipset"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafregionalIpset(this, "ipset", { + ipSetDescriptor: [ + { + type: "IPV4", + value: "192.0.7.0/24", + }, + { + type: "IPV4", + value: "10.16.16.0/16", + }, + ], + name: "tfIPSet", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the IPSet. +* `ipSetDescriptor` - (Optional) One or more pairs specifying the IP address type (IPV4 or IPV6) and the IP address range (in CIDR notation) from which web requests originate. + +## Nested Blocks + +### `ipSetDescriptor` + +#### Arguments + +* `type` - (Required) The string like IPV4 or IPV6. +* `value` - (Required) The CIDR notation. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF IPSet. +* `arn` - The ARN of the WAF IPSet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional IPSets using their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regional IPSets using their ID. For example: + +```console +% terraform import aws_wafregional_ipset.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_rate_based_rule.html.markdown b/website/docs/cdktf/typescript/r/wafregional_rate_based_rule.html.markdown new file mode 100644 index 00000000000..6ee793a509c --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_rate_based_rule.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_rate_based_rule" +description: |- + Provides a AWS WAF Regional rate based rule resource. +--- + + + +# Resource: aws_wafregional_rate_based_rule + +Provides a WAF Rate Based Rule Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalIpset } from "./.gen/providers/aws/wafregional-ipset"; +import { WafregionalRateBasedRule } from "./.gen/providers/aws/wafregional-rate-based-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ipset = new WafregionalIpset(this, "ipset", { + ipSetDescriptor: [ + { + type: "IPV4", + value: "192.0.7.0/24", + }, + ], + name: "tfIPSet", + }); + new WafregionalRateBasedRule(this, "wafrule", { + dependsOn: [ipset], + metricName: "tfWAFRule", + name: "tfWAFRule", + predicate: [ + { + dataId: ipset.id, + negated: false, + type: "IPMatch", + }, + ], + rateKey: "IP", + rateLimit: 100, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `metricName` - (Required) The name or description for the Amazon CloudWatch metric of this rule. +* `name` - (Required) The name or description of the rule. +* `rateKey` - (Required) Valid value is IP. +* `rateLimit` - (Required) The maximum number of requests, which have an identical value in the field specified by the RateKey, allowed in a five-minute period. Minimum value is 100. +* `predicate` - (Optional) The objects to include in a rule (documented below). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `predicate` + +See the [WAF Documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_Predicate.html) for more information. + +#### Arguments + +* `negated` - (Required) Set this to `false` if you want to allow, block, or count requests + based on the settings in the specified `byteMatchSet`, `ipSet`, `sqlInjectionMatchSet`, `xssMatchSet`, or `sizeConstraintSet`. + For example, if an IPSet includes the IP address `1920244`, AWS WAF will allow or block requests based on that IP address. + If set to `true`, AWS WAF will allow, block, or count requests based on all IP addresses _except_ `1920244`. +* `dataId` - (Required) A unique identifier for a predicate in the rule, such as Byte Match Set ID or IPSet ID. +* `type` - (Required) The type of predicate in a rule. Valid values: `byteMatch`, `geoMatch`, `ipMatch`, `regexMatch`, `sizeConstraint`, `sqlInjectionMatch`, or `xssMatch`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Rate Based Rule. +* `arn` - The ARN of the WAF Regional Rate Based Rule. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Rate Based Rule using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regional Rate Based Rule using the id. For example: + +```console +% terraform import aws_wafregional_rate_based_rule.wafrule a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_regex_match_set.html.markdown b/website/docs/cdktf/typescript/r/wafregional_regex_match_set.html.markdown new file mode 100644 index 00000000000..713030f4dc9 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_regex_match_set.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_regex_match_set" +description: |- + Provides a AWS WAF Regional Regex Match Set resource. +--- + + + +# Resource: aws_wafregional_regex_match_set + +Provides a WAF Regional Regex Match Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalRegexMatchSet } from "./.gen/providers/aws/wafregional-regex-match-set"; +import { WafregionalRegexPatternSet } from "./.gen/providers/aws/wafregional-regex-pattern-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new WafregionalRegexPatternSet(this, "example", { + name: "example", + regexPatternStrings: ["one", "two"], + }); + const awsWafregionalRegexMatchSetExample = new WafregionalRegexMatchSet( + this, + "example_1", + { + name: "example", + regexMatchTuple: [ + { + fieldToMatch: { + data: "User-Agent", + type: "HEADER", + }, + regexPatternSetId: example.id, + textTransformation: "NONE", + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWafregionalRegexMatchSetExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Regex Match Set. +* `regexMatchTuple` - (Required) The regular expression pattern that you want AWS WAF to search for in web requests, the location in requests that you want AWS WAF to search, and other settings. See below. + +### Nested Arguments + +#### `regexMatchTuple` + +* `fieldToMatch` - (Required) The part of a web request that you want to search, such as a specified header or a query string. +* `regexPatternSetId` - (Required) The ID of a [Regex Pattern Set](/docs/providers/aws/r/waf_regex_pattern_set.html). +* `textTransformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + e.g., `cmdLine`, `htmlEntityDecode` or `none`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchTuple.html#WAF-Type-ByteMatchTuple-TextTransformation) + for all supported values. + +#### `fieldToMatch` + +* `data` - (Optional) When `type` is `header`, enter the name of the header that you want to search, e.g., `userAgent` or `referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `header`, `method` or `body`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Regex Match Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Regex Match Set using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regional Regex Match Set using the id. For example: + +```console +% terraform import aws_wafregional_regex_match_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_regex_pattern_set.html.markdown b/website/docs/cdktf/typescript/r/wafregional_regex_pattern_set.html.markdown new file mode 100644 index 00000000000..678fd7db5ec --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_regex_pattern_set.html.markdown @@ -0,0 +1,73 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_regex_pattern_set" +description: |- + Provides a AWS WAF Regional Regex Pattern Set resource. +--- + + + +# Resource: aws_wafregional_regex_pattern_set + +Provides a WAF Regional Regex Pattern Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalRegexPatternSet } from "./.gen/providers/aws/wafregional-regex-pattern-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafregionalRegexPatternSet(this, "example", { + name: "example", + regexPatternStrings: ["one", "two"], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Regex Pattern Set. +* `regexPatternStrings` - (Optional) A list of regular expression (regex) patterns that you want AWS WAF to search for, such as `b[a@]dB[o0]t`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Regex Pattern Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Regex Pattern Set using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regional Regex Pattern Set using the id. For example: + +```console +% terraform import aws_wafregional_regex_pattern_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_rule.html.markdown b/website/docs/cdktf/typescript/r/wafregional_rule.html.markdown new file mode 100644 index 00000000000..f134322531a --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_rule.html.markdown @@ -0,0 +1,108 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_rule" +description: |- + Provides an AWS WAF Regional rule resource for use with ALB. +--- + + + +# Resource: aws_wafregional_rule + +Provides an WAF Regional Rule Resource for use with Application Load Balancer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalIpset } from "./.gen/providers/aws/wafregional-ipset"; +import { WafregionalRule } from "./.gen/providers/aws/wafregional-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ipset = new WafregionalIpset(this, "ipset", { + ipSetDescriptor: [ + { + type: "IPV4", + value: "192.0.7.0/24", + }, + ], + name: "tfIPSet", + }); + new WafregionalRule(this, "wafrule", { + metricName: "tfWAFRule", + name: "tfWAFRule", + predicate: [ + { + dataId: ipset.id, + negated: false, + type: "IPMatch", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the rule. +* `metricName` - (Required) The name or description for the Amazon CloudWatch metric of this rule. +* `predicate` - (Optional) The objects to include in a rule (documented below). +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Fields + +### `predicate` + +See the [WAF Documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_Predicate.html) for more information. + +#### Arguments + +* `type` - (Required) The type of predicate in a rule. Valid values: `byteMatch`, `geoMatch`, `ipMatch`, `regexMatch`, `sizeConstraint`, `sqlInjectionMatch`, or `xssMatch` +* `dataId` - (Required) The unique identifier of a predicate, such as the ID of a `byteMatchSet` or `ipSet`. +* `negated` - (Required) Whether to use the settings or the negated settings that you specified in the objects. + +## Remarks + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Rule. +* `arn` - The ARN of the WAF Regional Rule. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Rule using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regional Rule using the id. For example: + +```console +% terraform import aws_wafregional_rule.wafrule a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_rule_group.html.markdown b/website/docs/cdktf/typescript/r/wafregional_rule_group.html.markdown new file mode 100644 index 00000000000..d025a7d55b6 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_rule_group.html.markdown @@ -0,0 +1,109 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_rule_group" +description: |- + Provides a AWS WAF Regional Rule Group resource. +--- + + + +# Resource: aws_wafregional_rule_group + +Provides a WAF Regional Rule Group Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalRule } from "./.gen/providers/aws/wafregional-rule"; +import { WafregionalRuleGroup } from "./.gen/providers/aws/wafregional-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new WafregionalRule(this, "example", { + metricName: "example", + name: "example", + }); + const awsWafregionalRuleGroupExample = new WafregionalRuleGroup( + this, + "example_1", + { + activatedRule: [ + { + action: { + type: "COUNT", + }, + priority: 50, + ruleId: example.id, + }, + ], + metricName: "example", + name: "example", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWafregionalRuleGroupExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A friendly name of the rule group +* `metricName` - (Required) A friendly name for the metrics from the rule group +* `activatedRule` - (Optional) A list of activated rules, see below +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `activatedRule` + +#### Arguments + +* `action` - (Required) Specifies the action that CloudFront or AWS WAF takes when a web request matches the conditions in the rule. + * `type` - (Required) e.g., `block`, `allow`, or `count` +* `priority` - (Required) Specifies the order in which the rules are evaluated. Rules with a lower value are evaluated before rules with a higher value. +* `ruleId` - (Required) The ID of a [rule](/docs/providers/aws/r/wafregional_rule.html) +* `type` - (Optional) The rule type, either [`regular`](/docs/providers/aws/r/wafregional_rule.html), [`rateBased`](/docs/providers/aws/r/wafregional_rate_based_rule.html), or `group`. Defaults to `regular`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Regional Rule Group. +* `arn` - The ARN of the WAF Regional Rule Group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Rule Group using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regional Rule Group using the id. For example: + +```console +% terraform import aws_wafregional_rule_group.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_size_constraint_set.html.markdown b/website/docs/cdktf/typescript/r/wafregional_size_constraint_set.html.markdown new file mode 100644 index 00000000000..d85a2262153 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_size_constraint_set.html.markdown @@ -0,0 +1,112 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_size_constraint_set" +description: |- + Provides an AWS WAF Regional Size Constraint Set resource for use with ALB. +--- + + + +# Resource: aws_wafregional_size_constraint_set + +Provides a WAF Regional Size Constraint Set Resource for use with Application Load Balancer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalSizeConstraintSet } from "./.gen/providers/aws/wafregional-size-constraint-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafregionalSizeConstraintSet(this, "size_constraint_set", { + name: "tfsize_constraints", + sizeConstraints: [ + { + comparisonOperator: "EQ", + fieldToMatch: { + type: "BODY", + }, + size: Token.asNumber("4096"), + textTransformation: "NONE", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the Size Constraint Set. +* `sizeConstraints` - (Optional) Specifies the parts of web requests that you want to inspect the size of. + +## Nested Blocks + +### `sizeConstraints` + +#### Arguments + +* `fieldToMatch` - (Required) Specifies where in a web request to look for the size constraint. +* `comparisonOperator` - (Required) The type of comparison you want to perform. + e.g., `eq`, `ne`, `lt`, `gt`. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_wafRegional_SizeConstraint.html) for all supported values. +* `size` - (Required) The size in bytes that you want to compare against the size of the specified `fieldToMatch`. + Valid values are between 0 - 21474836480 bytes (0 - 20 GB). +* `textTransformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `fieldToMatch` before inspecting a request for a match. + e.g., `cmdLine`, `htmlEntityDecode` or `none`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_SizeConstraint.html#WAF-Type-SizeConstraint-TextTransformation) + for all supported values. + **Note:** if you choose `body` as `type`, you must choose `none` because CloudFront forwards only the first 8192 bytes for inspection. + +### `fieldToMatch` + +#### Arguments + +* `data` - (Optional) When `type` is `header`, enter the name of the header that you want to search, e.g., `userAgent` or `referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `header`, `method` or `body`. + See [docs](http://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) + for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF Size Constraint Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Size Constraint Set using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Size Constraint Set using the id. For example: + +```console +% terraform import aws_wafregional_size_constraint_set.size_constraint_set a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_sql_injection_match_set.html.markdown b/website/docs/cdktf/typescript/r/wafregional_sql_injection_match_set.html.markdown new file mode 100644 index 00000000000..5ac27c6a4c1 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_sql_injection_match_set.html.markdown @@ -0,0 +1,100 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_sql_injection_match_set" +description: |- + Provides a AWS WAF Regional SqlInjectionMatchSet resource for use with ALB. +--- + + + +# Resource: aws_wafregional_sql_injection_match_set + +Provides a WAF Regional SQL Injection Match Set Resource for use with Application Load Balancer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalSqlInjectionMatchSet } from "./.gen/providers/aws/wafregional-sql-injection-match-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafregionalSqlInjectionMatchSet(this, "sql_injection_match_set", { + name: "tf-sql_injection_match_set", + sqlInjectionMatchTuple: [ + { + fieldToMatch: { + type: "QUERY_STRING", + }, + textTransformation: "URL_DECODE", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name or description of the SizeConstraintSet. +* `sqlInjectionMatchTuple` - (Optional) The parts of web requests that you want AWS WAF to inspect for malicious SQL code and, if you want AWS WAF to inspect a header, the name of the header. + +### Nested fields + +### `sqlInjectionMatchTuple` + +* `fieldToMatch` - (Required) Specifies where in a web request to look for snippets of malicious SQL code. +* `textTransformation` - (Required) Text transformations used to eliminate unusual formatting that attackers use in web requests in an effort to bypass AWS WAF. + If you specify a transformation, AWS WAF performs the transformation on `fieldToMatch` before inspecting a request for a match. + e.g., `cmdLine`, `htmlEntityDecode` or `none`. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_regional_SqlInjectionMatchTuple.html#WAF-Type-regional_SqlInjectionMatchTuple-TextTransformation) + for all supported values. + +### `fieldToMatch` + +* `data` - (Optional) When `type` is `header`, enter the name of the header that you want to search, e.g., `userAgent` or `referer`. + If `type` is any other value, omit this field. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified string. + e.g., `header`, `method` or `body`. + See [docs](https://docs.aws.amazon.com/waf/latest/APIReference/API_regional_FieldToMatch.html) + for all supported values. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF SqlInjectionMatchSet. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Sql Injection Match Set using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regional Sql Injection Match Set using the id. For example: + +```console +% terraform import aws_wafregional_sql_injection_match_set.sql_injection_match_set a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_web_acl.html.markdown b/website/docs/cdktf/typescript/r/wafregional_web_acl.html.markdown new file mode 100644 index 00000000000..10122326522 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_web_acl.html.markdown @@ -0,0 +1,234 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_web_acl" +description: |- + Provides a AWS WAF Regional web access control group (ACL) resource for use with ALB. +--- + + + +# Resource: aws_wafregional_web_acl + +Provides a WAF Regional Web ACL Resource for use with Application Load Balancer. + +## Example Usage + +### Regular Rule + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalIpset } from "./.gen/providers/aws/wafregional-ipset"; +import { WafregionalRule } from "./.gen/providers/aws/wafregional-rule"; +import { WafregionalWebAcl } from "./.gen/providers/aws/wafregional-web-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const ipset = new WafregionalIpset(this, "ipset", { + ipSetDescriptor: [ + { + type: "IPV4", + value: "192.0.7.0/24", + }, + ], + name: "tfIPSet", + }); + const wafrule = new WafregionalRule(this, "wafrule", { + metricName: "tfWAFRule", + name: "tfWAFRule", + predicate: [ + { + dataId: ipset.id, + negated: false, + type: "IPMatch", + }, + ], + }); + new WafregionalWebAcl(this, "wafacl", { + defaultAction: { + type: "ALLOW", + }, + metricName: "tfWebACL", + name: "tfWebACL", + rule: [ + { + action: { + type: "BLOCK", + }, + priority: 1, + ruleId: wafrule.id, + type: "REGULAR", + }, + ], + }); + } +} + +``` + +### Group Rule + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalWebAcl } from "./.gen/providers/aws/wafregional-web-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafregionalWebAcl(this, "example", { + defaultAction: { + type: "ALLOW", + }, + metricName: "example", + name: "example", + rule: [ + { + overrideAction: { + type: "NONE", + }, + priority: 1, + ruleId: Token.asString(awsWafregionalRuleGroupExample.id), + type: "GROUP", + }, + ], + }); + } +} + +``` + +### Logging + +~> *NOTE:* The Kinesis Firehose Delivery Stream name must begin with `awsWafLogs`. See the [AWS WAF Developer Guide](https://docs.aws.amazon.com/waf/latest/developerguide/logging.html) for more information about enabling WAF logging. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalWebAcl } from "./.gen/providers/aws/wafregional-web-acl"; +interface MyConfig { + defaultAction: any; + metricName: any; + name: any; +} +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string, config: MyConfig) { + super(scope, name); + new WafregionalWebAcl(this, "example", { + loggingConfiguration: { + logDestination: Token.asString( + awsKinesisFirehoseDeliveryStreamExample.arn + ), + redactedFields: { + fieldToMatch: [ + { + type: "URI", + }, + { + data: "referer", + type: "HEADER", + }, + ], + }, + }, + defaultAction: config.defaultAction, + metricName: config.metricName, + name: config.name, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `defaultAction` - (Required) The action that you want AWS WAF Regional to take when a request doesn't match the criteria in any of the rules that are associated with the web ACL. +* `metricName` - (Required) The name or description for the Amazon CloudWatch metric of this web ACL. +* `name` - (Required) The name or description of the web ACL. +* `loggingConfiguration` - (Optional) Configuration block to enable WAF logging. Detailed below. +* `rule` - (Optional) Set of configuration blocks containing rules for the web ACL. Detailed below. +* `tags` - (Optional) Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### `defaultAction` Configuration Block + +* `type` - (Required) Specifies how you want AWS WAF Regional to respond to requests that match the settings in a ruleE.g., `allow`, `block` or `count` + +### `loggingConfiguration` Configuration Block + +* `logDestination` - (Required) Amazon Resource Name (ARN) of Kinesis Firehose Delivery Stream +* `redactedFields` - (Optional) Configuration block containing parts of the request that you want redacted from the logs. Detailed below. + +#### `redactedFields` Configuration Block + +* `fieldToMatch` - (Required) Set of configuration blocks for fields to redact. Detailed below. + +##### `fieldToMatch` Configuration Block + +-> Additional information about this configuration can be found in the [AWS WAF Regional API Reference](https://docs.aws.amazon.com/waf/latest/APIReference/API_regional_FieldToMatch.html). + +* `data` - (Optional) When the value of `type` is `header`, enter the name of the header that you want the WAF to search, for example, `userAgent` or `referer`. If the value of `type` is any other value, omit `data`. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified stringE.g., `header` or `method` + +### `rule` Configuration Block + +-> Additional information about this configuration can be found in the [AWS WAF Regional API Reference](https://docs.aws.amazon.com/waf/latest/APIReference/API_regional_ActivatedRule.html). + +* `priority` - (Required) Specifies the order in which the rules in a WebACL are evaluated. + Rules with a lower value are evaluated before rules with a higher value. +* `ruleId` - (Required) ID of the associated WAF (Regional) rule (e.g., [`awsWafregionalRule`](/docs/providers/aws/r/wafregional_rule.html)). WAF (Global) rules cannot be used. +* `action` - (Optional) Configuration block of the action that CloudFront or AWS WAF takes when a web request matches the conditions in the rule. Not used if `type` is `group`. Detailed below. +* `overrideAction` - (Optional) Configuration block of the override the action that a group requests CloudFront or AWS WAF takes when a web request matches the conditions in the rule. Only used if `type` is `group`. Detailed below. +* `type` - (Optional) The rule type, either `regular`, as defined by [Rule](http://docs.aws.amazon.com/waf/latest/APIReference/API_Rule.html), `rateBased`, as defined by [RateBasedRule](http://docs.aws.amazon.com/waf/latest/APIReference/API_RateBasedRule.html), or `group`, as defined by [RuleGroup](https://docs.aws.amazon.com/waf/latest/APIReference/API_RuleGroup.html). The default is REGULAR. If you add a RATE_BASED rule, you need to set `type` as `rateBased`. If you add a GROUP rule, you need to set `type` as `group`. + +#### `action` / `overrideAction` Configuration Block + +* `type` - (Required) Specifies how you want AWS WAF Regional to respond to requests that match the settings in a rule. Valid values for `action` are `allow`, `block` or `count`. Valid values for `overrideAction` are `count` and `none`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - Amazon Resource Name (ARN) of the WAF Regional WebACL. +* `id` - The ID of the WAF Regional WebACL. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Web ACL using the id. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regional Web ACL using the id. For example: + +```console +% terraform import aws_wafregional_web_acl.wafacl a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_web_acl_association.html.markdown b/website/docs/cdktf/typescript/r/wafregional_web_acl_association.html.markdown new file mode 100644 index 00000000000..a395d475564 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_web_acl_association.html.markdown @@ -0,0 +1,241 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_web_acl_association" +description: |- + Manages an association with WAF Regional Web ACL +--- + + + +# Resource: aws_wafregional_web_acl_association + +Manages an association with WAF Regional Web ACL. + +-> **Note:** An Application Load Balancer can only be associated with one WAF Regional WebACL. + +## Example Usage + +### Application Load Balancer Association + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, propertyAccess, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Alb } from "./.gen/providers/aws/alb"; +import { DataAwsAvailabilityZones } from "./.gen/providers/aws/data-aws-availability-zones"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +import { WafregionalIpset } from "./.gen/providers/aws/wafregional-ipset"; +import { WafregionalRule } from "./.gen/providers/aws/wafregional-rule"; +import { WafregionalWebAcl } from "./.gen/providers/aws/wafregional-web-acl"; +import { WafregionalWebAclAssociation } from "./.gen/providers/aws/wafregional-web-acl-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const foo = new Vpc(this, "foo", { + cidrBlock: "10.1.0.0/16", + }); + const ipset = new WafregionalIpset(this, "ipset", { + ipSetDescriptor: [ + { + type: "IPV4", + value: "192.0.7.0/24", + }, + ], + name: "tfIPSet", + }); + const awsWafregionalRuleFoo = new WafregionalRule(this, "foo_2", { + metricName: "tfWAFRule", + name: "tfWAFRule", + predicate: [ + { + dataId: ipset.id, + negated: false, + type: "IPMatch", + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWafregionalRuleFoo.overrideLogicalId("foo"); + const awsWafregionalWebAclFoo = new WafregionalWebAcl(this, "foo_3", { + defaultAction: { + type: "ALLOW", + }, + metricName: "foo", + name: "foo", + rule: [ + { + action: { + type: "BLOCK", + }, + priority: 1, + ruleId: Token.asString(awsWafregionalRuleFoo.id), + }, + ], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWafregionalWebAclFoo.overrideLogicalId("foo"); + const available = new DataAwsAvailabilityZones(this, "available", {}); + const bar = new Subnet(this, "bar", { + availabilityZone: Token.asString(propertyAccess(available.names, ["1"])), + cidrBlock: "10.1.2.0/24", + vpcId: foo.id, + }); + const awsSubnetFoo = new Subnet(this, "foo_6", { + availabilityZone: Token.asString(propertyAccess(available.names, ["0"])), + cidrBlock: "10.1.1.0/24", + vpcId: foo.id, + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsSubnetFoo.overrideLogicalId("foo"); + const awsAlbFoo = new Alb(this, "foo_7", { + internal: true, + subnets: [Token.asString(awsSubnetFoo.id), bar.id], + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsAlbFoo.overrideLogicalId("foo"); + const awsWafregionalWebAclAssociationFoo = new WafregionalWebAclAssociation( + this, + "foo_8", + { + resourceArn: Token.asString(awsAlbFoo.arn), + webAclId: Token.asString(awsWafregionalWebAclFoo.id), + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWafregionalWebAclAssociationFoo.overrideLogicalId("foo"); + } +} + +``` + +### API Gateway Association + +```terraform +resource "aws_wafregional_ipset" "ipset" { + name = "tfIPSet" + + ip_set_descriptor { + type = "IPV4" + value = "192.0.7.0/24" + } +} + +resource "aws_wafregional_rule" "foo" { + name = "tfWAFRule" + metric_name = "tfWAFRule" + + predicate { + data_id = aws_wafregional_ipset.ipset.id + negated = false + type = "IPMatch" + } +} + +resource "aws_wafregional_web_acl" "foo" { + name = "foo" + metric_name = "foo" + + default_action { + type = "ALLOW" + } + + rule { + action { + type = "BLOCK" + } + + priority = 1 + rule_id = aws_wafregional_rule.foo.id + } +} + +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} + +resource "aws_wafregional_web_acl_association" "association" { + resource_arn = aws_api_gateway_stage.example.arn + web_acl_id = aws_wafregional_web_acl.foo.id +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `webAclId` - (Required) The ID of the WAF Regional WebACL to create an association. +* `resourceArn` - (Required) ARN of the resource to associate with. For example, an Application Load Balancer or API Gateway Stage. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the association + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAF Regional Web ACL Association using their `webAclId:resourceArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAF Regional Web ACL Association using their `webAclId:resourceArn`. For example: + +```console +% terraform import aws_wafregional_web_acl_association.foo web_acl_id:resource_arn +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafregional_xss_match_set.html.markdown b/website/docs/cdktf/typescript/r/wafregional_xss_match_set.html.markdown new file mode 100644 index 00000000000..2af07b9550d --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafregional_xss_match_set.html.markdown @@ -0,0 +1,98 @@ +--- +subcategory: "WAF Classic Regional" +layout: "aws" +page_title: "AWS: aws_wafregional_xss_match_set" +description: |- + Provides an AWS WAF Regional XSS Match Set resource for use with ALB. +--- + + + +# Resource: aws_wafregional_xss_match_set + +Provides a WAF Regional XSS Match Set Resource for use with Application Load Balancer. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WafregionalXssMatchSet } from "./.gen/providers/aws/wafregional-xss-match-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WafregionalXssMatchSet(this, "xss_match_set", { + name: "xss_match_set", + xssMatchTuple: [ + { + fieldToMatch: { + type: "URI", + }, + textTransformation: "NONE", + }, + { + fieldToMatch: { + type: "QUERY_STRING", + }, + textTransformation: "NONE", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the set +* `xssMatchTuple` - (Optional) The parts of web requests that you want to inspect for cross-site scripting attacks. + +### Nested fields + +#### `xssMatchTuple` + +* `fieldToMatch` - (Required) Specifies where in a web request to look for cross-site scripting attacks. +* `textTransformation` - (Required) Which text transformation, if any, to perform on the web request before inspecting the request for cross-site scripting attacks. + +#### `fieldToMatch` + +* `data` - (Optional) When the value of `type` is `header`, enter the name of the header that you want the WAF to search, for example, `userAgent` or `referer`. If the value of `type` is any other value, omit `data`. +* `type` - (Required) The part of the web request that you want AWS WAF to search for a specified stringE.g., `header` or `method` + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the Regional WAF XSS Match Set. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import AWS WAF Regional XSS Match using the `id`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import AWS WAF Regional XSS Match using the `id`. For example: + +```console +% terraform import aws_wafregional_xss_match_set.example 12345abcde +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafv2_ip_set.html.markdown b/website/docs/cdktf/typescript/r/wafv2_ip_set.html.markdown new file mode 100644 index 00000000000..8da07cede24 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafv2_ip_set.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_ip_set" +description: |- + Provides an AWS WAFv2 IP Set resource. +--- + + + +# Resource: aws_wafv2_ip_set + +Provides a WAFv2 IP Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Wafv2IpSet } from "./.gen/providers/aws/wafv2-ip-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Wafv2IpSet(this, "example", { + addresses: ["1.2.3.4/32", "5.6.7.8/32"], + description: "Example IP set", + ipAddressVersion: "IPV4", + name: "example", + scope: "REGIONAL", + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A friendly name of the IP set. +* `description` - (Optional) A friendly description of the IP set. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `cloudfront` or `regional`. To work with CloudFront, you must also specify the Region US East (N. Virginia). +* `ipAddressVersion` - (Required) Specify IPV4 or IPV6. Valid values are `ipv4` or `ipv6`. +* `addresses` - (Required) Contains an array of strings that specifies zero or more IP addresses or blocks of IP addresses. All addresses must be specified using Classless Inter-Domain Routing (CIDR) notation. WAF supports all IPv4 and IPv6 CIDR ranges except for `/0`. +* `tags` - (Optional) An array of key:value pairs to associate with the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A unique identifier for the IP set. +* `arn` - The Amazon Resource Name (ARN) of the IP set. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 IP Sets using `id/name/scope`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAFv2 IP Sets using `id/name/scope`. For example: + +```console +% terraform import aws_wafv2_ip_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc/example/REGIONAL +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafv2_regex_pattern_set.html.markdown b/website/docs/cdktf/typescript/r/wafv2_regex_pattern_set.html.markdown new file mode 100644 index 00000000000..4a6c268e8f8 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafv2_regex_pattern_set.html.markdown @@ -0,0 +1,95 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_regex_pattern_set" +description: |- + Provides an AWS WAFv2 Regex Pattern Set resource. +--- + + + +# Resource: aws_wafv2_regex_pattern_set + +Provides an AWS WAFv2 Regex Pattern Set Resource + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Wafv2RegexPatternSet } from "./.gen/providers/aws/wafv2-regex-pattern-set"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Wafv2RegexPatternSet(this, "example", { + description: "Example regex pattern set", + name: "example", + regularExpression: [ + { + regexString: "one", + }, + { + regexString: "two", + }, + ], + scope: "REGIONAL", + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A friendly name of the regular expression pattern set. +* `description` - (Optional) A friendly description of the regular expression pattern set. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `cloudfront` or `regional`. To work with CloudFront, you must also specify the region `usEast1` (N. Virginia) on the AWS provider. +* `regularExpression` - (Optional) One or more blocks of regular expression patterns that you want AWS WAF to search for, such as `b[a@]dB[o0]t`. See [Regular Expression](#regular-expression) below for details. A maximum of 10 `regularExpression` blocks may be specified. +* `tags` - (Optional) An array of key:value pairs to associate with the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Regular Expression + +* `regexString` - (Required) The string representing the regular expression, see the AWS WAF [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-regex-pattern-set-creating.html) for more information. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - A unique identifier for the set. +* `arn` - The Amazon Resource Name (ARN) that identifies the cluster. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 Regex Pattern Sets using `id/name/scope`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAFv2 Regex Pattern Sets using `id/name/scope`. For example: + +```console +% terraform import aws_wafv2_regex_pattern_set.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc/example/REGIONAL +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafv2_rule_group.html.markdown b/website/docs/cdktf/typescript/r/wafv2_rule_group.html.markdown new file mode 100644 index 00000000000..c9f0bb9a86e --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafv2_rule_group.html.markdown @@ -0,0 +1,726 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_rule_group" +description: |- + Creates a WAFv2 rule group resource. +--- + + + +# Resource: aws_wafv2_rule_group + +Creates a WAFv2 Rule Group resource. + +## Example Usage + +### Simple + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Wafv2RuleGroup } from "./.gen/providers/aws/wafv2-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Wafv2RuleGroup(this, "example", { + capacity: 2, + name: "example-rule", + rule: [ + { + action: { + allow: {}, + }, + name: "rule-1", + priority: 1, + statement: { + geoMatchStatement: { + countryCodes: ["US", "NL"], + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-rule-metric-name", + sampledRequestsEnabled: false, + }, + }, + ], + scope: "REGIONAL", + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-metric-name", + sampledRequestsEnabled: false, + }, + }); + } +} + +``` + +### Complex + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Wafv2IpSet } from "./.gen/providers/aws/wafv2-ip-set"; +import { Wafv2RegexPatternSet } from "./.gen/providers/aws/wafv2-regex-pattern-set"; +import { Wafv2RuleGroup } from "./.gen/providers/aws/wafv2-rule-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const test = new Wafv2IpSet(this, "test", { + addresses: ["1.1.1.1/32", "2.2.2.2/32"], + ipAddressVersion: "IPV4", + name: "test", + scope: "REGIONAL", + }); + const awsWafv2RegexPatternSetTest = new Wafv2RegexPatternSet( + this, + "test_1", + { + name: "test", + regularExpression: [ + { + regexString: "one", + }, + ], + scope: "REGIONAL", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWafv2RegexPatternSetTest.overrideLogicalId("test"); + new Wafv2RuleGroup(this, "example", { + capacity: 500, + captcha_config: [ + { + immunity_time_property: [ + { + immunity_time: 120, + }, + ], + }, + ], + description: "An rule group containing all statements", + name: "complex-example", + rule: [ + { + action: { + block: {}, + }, + name: "rule-1", + priority: 1, + statement: { + notStatement: { + statement: [ + { + andStatement: { + statement: [ + { + geoMatchStatement: { + countryCodes: ["US"], + }, + }, + { + byteMatchStatement: { + fieldToMatch: { + allQueryArguments: {}, + }, + positionalConstraint: "CONTAINS", + searchString: "word", + textTransformation: [ + { + priority: 5, + type: "CMD_LINE", + }, + { + priority: 2, + type: "LOWERCASE", + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "rule-1", + sampledRequestsEnabled: false, + }, + }, + { + action: { + count: {}, + }, + captchaConfig: { + immunityTimeProperty: { + immunityTime: 240, + }, + }, + name: "rule-2", + priority: 2, + statement: { + orStatement: { + statement: [ + { + regexMatchStatement: { + fieldToMatch: { + singleHeader: { + name: "user-agent", + }, + }, + regexString: "[a-z]([a-z0-9_-]*[a-z0-9])?", + textTransformation: [ + { + priority: 6, + type: "NONE", + }, + ], + }, + }, + { + sqliMatchStatement: { + fieldToMatch: { + body: {}, + }, + textTransformation: [ + { + priority: 5, + type: "URL_DECODE", + }, + { + priority: 4, + type: "HTML_ENTITY_DECODE", + }, + { + priority: 3, + type: "COMPRESS_WHITE_SPACE", + }, + ], + }, + }, + { + xssMatchStatement: { + fieldToMatch: { + method: {}, + }, + textTransformation: [ + { + priority: 2, + type: "NONE", + }, + ], + }, + }, + ], + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "rule-2", + sampledRequestsEnabled: false, + }, + }, + { + action: { + block: {}, + }, + name: "rule-3", + priority: 3, + statement: { + sizeConstraintStatement: { + comparisonOperator: "GT", + fieldToMatch: { + singleQueryArgument: { + name: "username", + }, + }, + size: 100, + textTransformation: [ + { + priority: 5, + type: "NONE", + }, + ], + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "rule-3", + sampledRequestsEnabled: false, + }, + }, + { + action: { + block: {}, + }, + name: "rule-4", + priority: 4, + statement: { + orStatement: { + statement: [ + { + ipSetReferenceStatement: { + arn: test.arn, + }, + }, + { + regexPatternSetReferenceStatement: { + arn: Token.asString(awsWafv2RegexPatternSetTest.arn), + fieldToMatch: { + singleHeader: { + name: "referer", + }, + }, + textTransformation: [ + { + priority: 2, + type: "NONE", + }, + ], + }, + }, + ], + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "rule-4", + sampledRequestsEnabled: false, + }, + }, + ], + scope: "REGIONAL", + tags: { + Code: "123456", + Name: "example-and-statement", + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-metric-name", + sampledRequestsEnabled: false, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `capacity` - (Required, Forces new resource) The web ACL capacity units (WCUs) required for this rule group. See [here](https://docs.aws.amazon.com/waf/latest/APIReference/API_CreateRuleGroup.html#API_CreateRuleGroup_RequestSyntax) for general information and [here](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statements-list.html) for capacity specific information. +* `customResponseBody` - (Optional) Defines custom response bodies that can be referenced by `customResponse` actions. See [Custom Response Body](#custom-response-body) below for details. +* `description` - (Optional) A friendly description of the rule group. +* `name` - (Required, Forces new resource) A friendly name of the rule group. +* `rule` - (Optional) The rule blocks used to identify the web requests that you want to `allow`, `block`, or `count`. See [Rules](#rules) below for details. +* `scope` - (Required, Forces new resource) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `cloudfront` or `regional`. To work with CloudFront, you must also specify the region `usEast1` (N. Virginia) on the AWS provider. +* `tags` - (Optional) An array of key:value pairs to associate with the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `visibilityConfig` - (Required) Defines and enables Amazon CloudWatch metrics and web request sample collection. See [Visibility Configuration](#visibility-configuration) below for details. + +### Custom Response Body + +Each `customResponseBody` block supports the following arguments: + +* `key` - (Required) A unique key identifying the custom response body. This is referenced by the `customResponseBodyKey` argument in the [Custom Response](#custom-response) block. +* `content` - (Required) The payload of the custom response. +* `contentType` - (Required) The type of content in the payload that you are defining in the `content` argument. Valid values are `textPlain`, `textHtml`, or `applicationJson`. + +### Rules + +Each `rule` supports the following arguments: + +* `action` - (Required) The action that AWS WAF should take on a web request when it matches the rule's statement. Settings at the `awsWafv2WebAcl` level can override the rule action setting. See [Action](#action) below for details. +* `captchaConfig` - (Optional) Specifies how AWS WAF should handle CAPTCHA evaluations. See [Captcha Configuration](#captcha-configuration) below for details. +* `name` - (Required, Forces new resource) A friendly name of the rule. +* `priority` - (Required) If you define more than one Rule in a WebACL, AWS WAF evaluates each request against the `rules` in order based on the value of `priority`. AWS WAF processes rules with lower priority first. +* `ruleLabel` - (Optional) Labels to apply to web requests that match the rule match statement. See [Rule Label](#rule-label) below for details. +* `statement` - (Required) The AWS WAF processing statement for the rule, for example `byteMatchStatement` or `geoMatchStatement`. See [Statement](#statement) below for details. +* `visibilityConfig` - (Required) Defines and enables Amazon CloudWatch metrics and web request sample collection. See [Visibility Configuration](#visibility-configuration) below for details. + +### Action + +The `action` block supports the following arguments: + +~> **NOTE:** One of `allow`, `block`, or `count`, is required when specifying an `action`. + +* `allow` - (Optional) Instructs AWS WAF to allow the web request. See [Allow](#action) below for details. +* `block` - (Optional) Instructs AWS WAF to block the web request. See [Block](#block) below for details. +* `captcha` - (Optional) Instructs AWS WAF to run a `captcha` check against the web request. See [Captcha](#captcha) below for details. +* `challenge` - (Optional) Instructs AWS WAF to run a check against the request to verify that the request is coming from a legitimate client session. See [Challenge](#challenge) below for details. +* `count` - (Optional) Instructs AWS WAF to count the web request and allow it. See [Count](#count) below for details. + +### Allow + +The `allow` block supports the following arguments: + +* `customRequestHandling` - (Optional) Defines custom handling for the web request. See [Custom Request Handling](#custom-request-handling) below for details. + +### Block + +The `block` block supports the following arguments: + +* `customResponse` - (Optional) Defines a custom response for the web request. See [Custom Response](#custom-response) below for details. + +### Captcha + +The `captcha` block supports the following arguments: + +* `customRequestHandling` - (Optional) Defines custom handling for the web request. See [Custom Request Handling](#custom-request-handling) below for details. + +#### Challenge + +The `challenge` block supports the following arguments: + +* `customRequestHandling` - (Optional) Defines custom handling for the web request. See [Custom Request Handling](#custom-request-handling) below for details. + +### Count + +The `count` block supports the following arguments: + +* `customRequestHandling` - (Optional) Defines custom handling for the web request. See [Custom Request Handling](#custom-request-handling) below for details. + +### Custom Request Handling + +The `customRequestHandling` block supports the following arguments: + +* `insertHeader` - (Required) The `insertHeader` blocks used to define HTTP headers added to the request. See [Custom HTTP Header](#custom-http-header) below for details. + +### Custom Response + +The `customResponse` block supports the following arguments: + +* `customResponseBodyKey` - (Optional) References the response body that you want AWS WAF to return to the web request client. This must reference a `key` defined in a `customResponseBody` block of this resource. +* `responseCode` - (Required) The HTTP status code to return to the client. +* `responseHeader` - (Optional) The `responseHeader` blocks used to define the HTTP response headers added to the response. See [Custom HTTP Header](#custom-http-header) below for details. + +### Custom HTTP Header + +Each block supports the following arguments. Duplicate header names are not allowed: + +* `name` - The name of the custom header. For custom request header insertion, when AWS WAF inserts the header into the request, it prefixes this name `xAmznWaf`, to avoid confusion with the headers that are already in the request. For example, for the header name `sample`, AWS WAF inserts the header `xAmznWafSample`. +* `value` - The value of the custom header. + +### Rule Label + +Each block supports the following arguments: + +* `name` - The label string. + +### Statement + +The processing guidance for a Rule, used by AWS WAF to determine whether a web request matches the rule. See the [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statements-list.html) for more information. + +-> **NOTE:** Although the `statement` block is recursive, currently only 3 levels are supported. + +The `statement` block supports the following arguments: + +* `andStatement` - (Optional) A logical rule statement used to combine other rule statements with AND logic. See [AND Statement](#and-statement) below for details. +* `byteMatchStatement` - (Optional) A rule statement that defines a string match search for AWS WAF to apply to web requests. See [Byte Match Statement](#byte-match-statement) below for details. +* `geoMatchStatement` - (Optional) A rule statement used to identify web requests based on country of origin. See [GEO Match Statement](#geo-match-statement) below for details. +* `labelMatchStatement` - (Optional) A rule statement that defines a string match search against labels that have been added to the web request by rules that have already run in the web ACL. See [Label Match Statement](#label-match-statement) below for details. +* `ipSetReferenceStatement` - (Optional) A rule statement used to detect web requests coming from particular IP addresses or address ranges. See [IP Set Reference Statement](#ip-set-reference-statement) below for details. +* `notStatement` - (Optional) A logical rule statement used to negate the results of another rule statement. See [NOT Statement](#not-statement) below for details. +* `orStatement` - (Optional) A logical rule statement used to combine other rule statements with OR logic. See [OR Statement](#or-statement) below for details. +* `rateBasedStatement` - (Optional) A rate-based rule tracks the rate of requests for each originating `IP address`, and triggers the rule action when the rate exceeds a limit that you specify on the number of requests in any `5Minute` time span. This statement can not be nested. See [Rate Based Statement](#rate-based-statement) below for details. +* `regexMatchStatement` - (Optional) A rule statement used to search web request components for a match against a single regular expression. See [Regex Match Statement](#regex-match-statement) below for details. +* `regexPatternSetReferenceStatement` - (Optional) A rule statement used to search web request components for matches with regular expressions. See [Regex Pattern Set Reference Statement](#regex-pattern-set-reference-statement) below for details. +* `sizeConstraintStatement` - (Optional) A rule statement that compares a number of bytes against the size of a request component, using a comparison operator, such as greater than (>) or less than (<). See [Size Constraint Statement](#size-constraint-statement) below for more details. +* `sqliMatchStatement` - (Optional) An SQL injection match condition identifies the part of web requests, such as the URI or the query string, that you want AWS WAF to inspect. See [SQL Injection Match Statement](#sql-injection-match-statement) below for details. +* `xssMatchStatement` - (Optional) A rule statement that defines a cross-site scripting (XSS) match search for AWS WAF to apply to web requests. See [XSS Match Statement](#xss-match-statement) below for details. + +### AND Statement + +A logical rule statement used to combine other rule statements with `and` logic. You provide more than one `statement` within the `andStatement`. + +The `andStatement` block supports the following arguments: + +* `statement` - (Required) The statements to combine with `and` logic. You can use any statements that can be nested. See [Statement](#statement) above for details. + +### Byte Match Statement + +The byte match statement provides the bytes to search for, the location in requests that you want AWS WAF to search, and other settings. The bytes to search for are typically a string that corresponds with ASCII characters. + +The `byteMatchStatement` block supports the following arguments: + +* `fieldToMatch` - (Required) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `positionalConstraint` - (Required) The area within the portion of a web request that you want AWS WAF to search for `searchString`. Valid values include the following: `exactly`, `startsWith`, `endsWith`, `contains`, `containsWord`. See the AWS [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchStatement.html) for more information. +* `searchString` - (Required) A string value that you want AWS WAF to search for. AWS WAF searches only in the part of web requests that you designate for inspection in `fieldToMatch`. The maximum length of the value is 50 bytes. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### GEO Match Statement + +The `geoMatchStatement` block supports the following arguments: + +* `countryCodes` - (Required) An array of two-character country codes, for example, [ "US", "CN" ], from the alpha-2 country ISO codes of the `ISO 3166` international standard. See the [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_GeoMatchStatement.html) for valid values. +* `forwardedIpConfig` - (Optional) The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. See [Forwarded IP Config](#forwarded-ip-config) below for details. + +### Label Match Statement + +The `labelMatchStatement` block supports the following arguments: + +* `scope` - (Required) Specify whether you want to match using the label name or just the namespace. Valid values are `label` or `namespace`. +* `key` - (Required) The string to match against. + +### IP Set Reference Statement + +A rule statement used to detect web requests coming from particular IP addresses or address ranges. To use this, create an `awsWafv2IpSet` that specifies the addresses you want to detect, then use the `arn` of that set in this statement. + +The `ipSetReferenceStatement` block supports the following arguments: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the IP Set that this statement references. +* `ipSetForwardedIpConfig` - (Optional) The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. See [IPSet Forwarded IP Config](#ipset-forwarded-ip-config) below for more details. + +### NOT Statement + +A logical rule statement used to negate the results of another rule statement. You provide one `statement` within the `notStatement`. + +The `notStatement` block supports the following arguments: + +* `statement` - (Required) The statement to negate. You can use any statement that can be nested. See [Statement](#statement) above for details. + +### OR Statement + +A logical rule statement used to combine other rule statements with `or` logic. You provide more than one `statement` within the `orStatement`. + +The `orStatement` block supports the following arguments: + +* `statement` - (Required) The statements to combine with `or` logic. You can use any statements that can be nested. See [Statement](#statement) above for details. + +### Rate Based Statement + +A rate-based rule tracks the rate of requests for each originating IP address, and triggers the rule action when the rate exceeds a limit that you specify on the number of requests in any 5-minute time span. You can use this to put a temporary block on requests from an IP address that is sending excessive requests. See the [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_RateBasedStatement.html) for more information. + +You can't nest a `rateBasedStatement`, for example for use inside a `notStatement` or `orStatement`. It can only be referenced as a `topLevel` statement within a `rule`. + +The `rateBasedStatement` block supports the following arguments: + +* `aggregateKeyType` - (Optional) Setting that indicates how to aggregate the request counts. Valid values include: `constant`, `forwardedIp` or `ip`. Default: `ip`. +* `forwardedIpConfig` - (Optional) The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. If `aggregateKeyType` is set to `forwardedIp`, this block is required. See [Forwarded IP Config](#forwarded-ip-config) below for details. +* `limit` - (Required) The limit on requests per 5-minute period for a single originating IP address. +* `scopeDownStatement` - (Optional) An optional nested statement that narrows the scope of the rate-based statement to matching web requests. This can be any nestable statement, and you can nest statements at any level below this scope-down statement. See [Statement](#statement) above for details. If `aggregateKeyType` is set to `constant`, this block is required. + +### Regex Match Statement + +A rule statement used to search web request components for a match against a single regular expression. + +The `regexMatchStatement` block supports the following arguments: + +* `regexString` - (Required) The string representing the regular expression. Minimum of `1` and maximum of `512` characters. +* `fieldToMatch` - (Required) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### Regex Pattern Set Reference Statement + +A rule statement used to search web request components for matches with regular expressions. To use this, create a `awsWafv2RegexPatternSet` that specifies the expressions that you want to detect, then use the `arn` of that set in this statement. A web request matches the pattern set rule statement if the request component matches any of the patterns in the set. + +The `regexPatternSetReferenceStatement` block supports the following arguments: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the Regex Pattern Set that this statement references. +* `fieldToMatch` - (Required) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### Size Constraint Statement + +A rule statement that uses a comparison operator to compare a number of bytes against the size of a request component. AWS WAFv2 inspects up to the first 8192 bytes (8 KB) of a request body, and when inspecting the request URI Path, the slash `/` in +the URI counts as one character. + +The `sizeConstraintStatement` block supports the following arguments: + +* `comparisonOperator` - (Required) The operator to use to compare the request part to the size setting. Valid values include: `eq`, `ne`, `le`, `lt`, `ge`, or `gt`. +* `fieldToMatch` - (Optional) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `size` - (Required) The size, in bytes, to compare to the request part, after any transformations. Valid values are integers between 0 and 21474836480, inclusive. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### SQL Injection Match Statement + +An SQL injection match condition identifies the part of web requests, such as the URI or the query string, that you want AWS WAF to inspect. Later in the process, when you create a web ACL, you specify whether to allow or block requests that appear to contain malicious SQL code. + +The `sqliMatchStatement` block supports the following arguments: + +* `fieldToMatch` - (Required) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### XSS Match Statement + +The XSS match statement provides the location in requests that you want AWS WAF to search and text transformations to use on the search area before AWS WAF searches for character sequences that are likely to be malicious strings. + +The `xssMatchStatement` block supports the following arguments: + +* `fieldToMatch` - (Required) The part of a web request that you want AWS WAF to inspect. See [Field to Match](#field-to-match) below for details. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. + At least one required. + See [Text Transformation](#text-transformation) below for details. + +### Field to Match + +The part of a web request that you want AWS WAF to inspect. Include the single `fieldToMatch` type that you want to inspect, with additional specifications as needed, according to the type. You specify a single request component in `fieldToMatch` for each rule statement that requires it. To inspect more than one component of a web request, create a separate rule statement for each component. See the [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statement-fields.html#waf-rule-statement-request-component) for more details. + +The `fieldToMatch` block supports the following arguments: + +~> **NOTE:** Only one of `allQueryArguments`, `body`, `cookies`, `headers`, `jsonBody`, `method`, `queryString`, `singleHeader`, `singleQueryArgument`, or `uriPath` can be specified. +An empty configuration block `{}` should be used when specifying `allQueryArguments`, `body`, `method`, or `queryString` attributes. + +* `allQueryArguments` - (Optional) Inspect all query arguments. +* `body` - (Optional) Inspect the request body, which immediately follows the request headers. +* `cookies` - (Optional) Inspect the cookies in the web request. See [Cookies](#cookies) below for details. +* `headers` - (Optional) Inspect the request headers. See [Headers](#headers) below for details. +* `jsonBody` - (Optional) Inspect the request body as JSON. See [JSON Body](#json-body) for details. +* `method` - (Optional) Inspect the HTTP method. The method indicates the type of operation that the request is asking the origin to perform. +* `queryString` - (Optional) Inspect the query string. This is the part of a URL that appears after a `?` character, if any. +* `singleHeader` - (Optional) Inspect a single header. See [Single Header](#single-header) below for details. +* `singleQueryArgument` - (Optional) Inspect a single query argument. See [Single Query Argument](#single-query-argument) below for details. +* `uriPath` - (Optional) Inspect the request URI path. This is the part of a web request that identifies a resource, for example, `/images/dailyAdJpg`. + +### Forwarded IP Config + +The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. Commonly, this is the X-Forwarded-For (XFF) header, but you can specify +any header name. If the specified header isn't present in the request, AWS WAFv2 doesn't apply the rule to the web request at all. +AWS WAFv2 only evaluates the first IP address found in the specified HTTP header. + +The `forwardedIpConfig` block supports the following arguments: + +* `fallbackBehavior` - (Required) - The match status to assign to the web request if the request doesn't have a valid IP address in the specified position. Valid values include: `match` or `noMatch`. +* `headerName` - (Required) - The name of the HTTP header to use for the IP address. + +### IPSet Forwarded IP Config + +The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. Commonly, this is the X-Forwarded-For (XFF) header, but you can specify any header name. + +The `ipSetForwardedIpConfig` block supports the following arguments: + +* `fallbackBehavior` - (Required) - The match status to assign to the web request if the request doesn't have a valid IP address in the specified position. Valid values include: `match` or `noMatch`. +* `headerName` - (Required) - The name of the HTTP header to use for the IP address. +* `position` - (Required) - The position in the header to search for the IP address. Valid values include: `first`, `last`, or `any`. If `any` is specified and the header contains more than 10 IP addresses, AWS WAFv2 inspects the last 10. + +### Headers + +Inspect the request headers. + +The `headers` block supports the following arguments: + +* `matchPattern` - (Required) The filter to use to identify the subset of headers to inspect in a web request. The `matchPattern` block supports only one of the following arguments: + * `all` - An empty configuration block that is used for inspecting all headers. + * `includedHeaders` - An array of strings that will be used for inspecting headers that have a key that matches one of the provided values. + * `excludedHeaders` - An array of strings that will be used for inspecting headers that do not have a key that matches one of the provided values. +* `matchScope` - (Required) The parts of the headers to inspect with the rule inspection criteria. If you specify `all`, AWS WAF inspects both keys and values. Valid values include the following: `all`, `key`, `value`. +* `oversizeHandling` - (Required) Oversize handling tells AWS WAF what to do with a web request when the request component that the rule inspects is over the limits. Valid values include the following: `continue`, `match`, `noMatch`. See the AWS [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statement-oversize-handling.html) for more information. + +### JSON Body + +The `jsonBody` block supports the following arguments: + +* `invalidFallbackBehavior` - (Optional) What to do when JSON parsing fails. Defaults to evaluating up to the first parsing failure. Valid values are `evaluateAsString`, `match` and `noMatch`. +* `matchPattern` - (Required) The patterns to look for in the JSON body. You must specify exactly one setting: either `all` or `includedPaths`. See [JsonMatchPattern](https://docs.aws.amazon.com/waf/latest/APIReference/API_JsonMatchPattern.html) for details. +* `matchScope` - (Required) The parts of the JSON to match against using the `matchPattern`. Valid values are `all`, `key` and `value`. +* `oversizeHandling` - (Optional) What to do if the body is larger than can be inspected. Valid values are `continue` (default), `match` and `noMatch`. + +### Single Header + +Inspect a single header. Provide the name of the header to inspect, for example, `userAgent` or `referer` (provided as lowercase strings). + +The `singleHeader` block supports the following arguments: + +* `name` - (Optional) The name of the query header to inspect. This setting must be provided as lower case characters. + +### Single Query Argument + +Inspect a single query argument. Provide the name of the query argument to inspect, such as `userName` or `salesRegion` (provided as lowercase strings). + +The `singleQueryArgument` block supports the following arguments: + +* `name` - (Optional) The name of the query header to inspect. This setting must be provided as lower case characters. + +### Cookies + +Inspect the cookies in the web request. You can specify the parts of the cookies to inspect and you can narrow the set of cookies to inspect by including or excluding specific keys. +This is used to indicate the web request component to inspect, in the [FieldToMatch](https://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) specification. + +The `cookies` block supports the following arguments: + +* `matchPattern` - (Required) The filter to use to identify the subset of cookies to inspect in a web request. You must specify exactly one setting: either `all`, `includedCookies` or `excludedCookies`. More details: [CookieMatchPattern](https://docs.aws.amazon.com/waf/latest/APIReference/API_CookieMatchPattern.html) +* `matchScope` - (Required) The parts of the cookies to inspect with the rule inspection criteria. If you specify All, AWS WAF inspects both keys and values. Valid values: `all`, `key`, `value` +* `oversizeHandling` - (Required) What AWS WAF should do if the cookies of the request are larger than AWS WAF can inspect. AWS WAF does not support inspecting the entire contents of request cookies when they exceed 8 KB (8192 bytes) or 200 total cookies. The underlying host service forwards a maximum of 200 cookies and at most 8 KB of cookie contents to AWS WAF. Valid values: `continue`, `match`, `noMatch` + +### Text Transformation + +The `textTransformation` block supports the following arguments: + +* `priority` - (Required) The relative processing order for multiple transformations that are defined for a rule statement. AWS WAF processes all transformations, from lowest priority to highest, before inspecting the transformed content. +* `type` - (Required) The transformation to apply, please refer to the Text Transformation [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_TextTransformation.html) for more details. + +### Visibility Configuration + +The `visibilityConfig` block supports the following arguments: + +* `cloudwatchMetricsEnabled` - (Required) A boolean indicating whether the associated resource sends metrics to CloudWatch. For the list of available metrics, see [AWS WAF Metrics](https://docs.aws.amazon.com/waf/latest/developerguide/monitoring-cloudwatch.html#waf-metrics). +* `metricName` - (Required, Forces new resource) A friendly name of the CloudWatch metric. The name can contain only alphanumeric characters (A-Z, a-z, 0-9) hyphen(-) and underscore (_), with length from one to 128 characters. It can't contain whitespace or metric names reserved for AWS WAF, for example `all` and `defaultAction`. +* `sampledRequestsEnabled` - (Required) A boolean indicating whether AWS WAF should store a sampling of the web requests that match the rules. You can view the sampled requests through the AWS WAF console. + +### Captcha Configuration + +The `captchaConfig` block supports the following arguments: + +* `immunityTimeProperty` - (Optional) Defines custom immunity time. See [Immunity Time Property](#immunity-time-property) below for details. + +### Immunity Time Property + +The `immunityTimeProperty` block supports the following arguments: + +* `immunityTime` - (Optional) The amount of time, in seconds, that a CAPTCHA or challenge timestamp is considered valid by AWS WAF. The default setting is 300. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ID of the WAF rule group. +* `arn` - The ARN of the WAF rule group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 Rule Group using `id/name/scope`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAFv2 Rule Group using `id/name/scope`. For example: + +```console +% terraform import aws_wafv2_rule_group.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc/example/REGIONAL +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafv2_web_acl.html.markdown b/website/docs/cdktf/typescript/r/wafv2_web_acl.html.markdown new file mode 100644 index 00000000000..b4d100d9388 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafv2_web_acl.html.markdown @@ -0,0 +1,908 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_web_acl" +description: |- + Creates a WAFv2 Web ACL resource. +--- + + + +# Resource: aws_wafv2_web_acl + +Creates a WAFv2 Web ACL resource. + +~> **Note** In `fieldToMatch` blocks, _e.g._, in `byteMatchStatement`, the `body` block includes an optional argument `oversizeHandling`. AWS indicates this argument will be required starting February 2023. To avoid configurations breaking when that change happens, treat the `oversizeHandling` argument as **required** as soon as possible. + +## Example Usage + +This resource is based on `awsWafv2RuleGroup`, check the documentation of the `awsWafv2RuleGroup` resource to see examples of the various available statements. + +### Managed Rule + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Wafv2WebAcl } from "./.gen/providers/aws/wafv2-web-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Wafv2WebAcl(this, "example", { + defaultAction: { + allow: {}, + }, + description: "Example of a managed rule.", + name: "managed-rule-example", + rule: [ + { + name: "rule-1", + overrideAction: { + count: {}, + }, + priority: 1, + statement: { + managedRuleGroupStatement: { + name: "AWSManagedRulesCommonRuleSet", + ruleActionOverride: [ + { + actionToUse: { + count: {}, + }, + name: "SizeRestrictions_QUERYSTRING", + }, + { + actionToUse: { + count: {}, + }, + name: "NoUserAgent_HEADER", + }, + ], + scopeDownStatement: { + geoMatchStatement: { + countryCodes: ["US", "NL"], + }, + }, + vendorName: "AWS", + }, + }, + token_domains: ["mywebsite.com", "myotherwebsite.com"], + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-rule-metric-name", + sampledRequestsEnabled: false, + }, + }, + ], + scope: "REGIONAL", + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-metric-name", + sampledRequestsEnabled: false, + }, + }); + } +} + +``` + +### Account Takeover Protection + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Wafv2WebAcl } from "./.gen/providers/aws/wafv2-web-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Wafv2WebAcl(this, "atp-example", { + defaultAction: { + allow: {}, + }, + description: "Example of a managed ATP rule.", + name: "managed-atp-example", + rule: [ + { + name: "atp-rule-1", + overrideAction: { + count: {}, + }, + priority: 1, + statement: { + managedRuleGroupStatement: { + managedRuleGroupConfigs: [ + { + awsManagedRulesAtpRuleSet: { + loginPath: "/api/1/signin", + requestInspection: { + passwordField: { + identifier: "/password", + }, + payloadType: "JSON", + usernameField: { + identifier: "/email", + }, + }, + responseInspection: { + statusCode: { + failureCodes: [Token.asNumber("403")], + successCodes: [Token.asNumber("200")], + }, + }, + }, + }, + ], + name: "AWSManagedRulesATPRuleSet", + vendorName: "AWS", + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-rule-metric-name", + sampledRequestsEnabled: false, + }, + }, + ], + scope: "CLOUDFRONT", + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-metric-name", + sampledRequestsEnabled: false, + }, + }); + } +} + +``` + +### Rate Based + +Rate-limit US and NL-based clients to 10,000 requests for every 5 minutes. + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Wafv2WebAcl } from "./.gen/providers/aws/wafv2-web-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Wafv2WebAcl(this, "example", { + defaultAction: { + allow: {}, + }, + description: "Example of a Cloudfront rate based statement.", + name: "rate-based-example", + rule: [ + { + action: { + block: {}, + }, + name: "rule-1", + priority: 1, + statement: { + rateBasedStatement: { + aggregateKeyType: "IP", + limit: 10000, + scopeDownStatement: { + geoMatchStatement: { + countryCodes: ["US", "NL"], + }, + }, + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-rule-metric-name", + sampledRequestsEnabled: false, + }, + }, + ], + scope: "CLOUDFRONT", + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-metric-name", + sampledRequestsEnabled: false, + }, + }); + } +} + +``` + +### Rule Group Reference + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Wafv2RuleGroup } from "./.gen/providers/aws/wafv2-rule-group"; +import { Wafv2WebAcl } from "./.gen/providers/aws/wafv2-web-acl"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Wafv2RuleGroup(this, "example", { + capacity: 10, + name: "example-rule-group", + rule: [ + { + action: { + count: {}, + }, + name: "rule-1", + priority: 1, + statement: { + geoMatchStatement: { + countryCodes: ["NL"], + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-rule-metric-name", + sampledRequestsEnabled: false, + }, + }, + { + action: { + allow: {}, + }, + name: "rule-to-exclude-a", + priority: 10, + statement: { + geoMatchStatement: { + countryCodes: ["US"], + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-rule-metric-name", + sampledRequestsEnabled: false, + }, + }, + { + action: { + allow: {}, + }, + name: "rule-to-exclude-b", + priority: 15, + statement: { + geoMatchStatement: { + countryCodes: ["GB"], + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-rule-metric-name", + sampledRequestsEnabled: false, + }, + }, + ], + scope: "REGIONAL", + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-metric-name", + sampledRequestsEnabled: false, + }, + }); + new Wafv2WebAcl(this, "test", { + defaultAction: { + block: {}, + }, + name: "rule-group-example", + rule: [ + { + name: "rule-1", + overrideAction: { + count: {}, + }, + priority: 1, + statement: { + ruleGroupReferenceStatement: { + arn: example.arn, + ruleActionOverride: [ + { + actionToUse: { + count: {}, + }, + name: "rule-to-exclude-b", + }, + { + actionToUse: { + count: {}, + }, + name: "rule-to-exclude-a", + }, + ], + }, + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-rule-metric-name", + sampledRequestsEnabled: false, + }, + }, + ], + scope: "REGIONAL", + tags: { + Tag1: "Value1", + Tag2: "Value2", + }, + visibilityConfig: { + cloudwatchMetricsEnabled: false, + metricName: "friendly-metric-name", + sampledRequestsEnabled: false, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `associationConfig` - (Optional) Specifies custom configurations for the associations between the web ACL and protected resources. See [`associationConfig`](#association_config-block) below for details. +* `customResponseBody` - (Optional) Defines custom response bodies that can be referenced by `customResponse` actions. See [`customResponseBody`](#custom_response_body-block) below for details. +* `defaultAction` - (Required) Action to perform if none of the `rules` contained in the WebACL match. See [`defaultAction`](#default_action-block) below for details. +* `description` - (Optional) Friendly description of the WebACL. +* `name` - (Required) Friendly name of the WebACL. +* `rule` - (Optional) Rule blocks used to identify the web requests that you want to `allow`, `block`, or `count`. See [`rule`](#rule-block) below for details. +* `scope` - (Required) Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `cloudfront` or `regional`. To work with CloudFront, you must also specify the region `usEast1` (N. Virginia) on the AWS provider. +* `tags` - (Optional) Map of key-value pairs to associate with the resource. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `tokenDomains` - (Optional) Specifies the domains that AWS WAF should accept in a web request token. This enables the use of tokens across multiple protected websites. When AWS WAF provides a token, it uses the domain of the AWS resource that the web ACL is protecting. If you don't specify a list of token domains, AWS WAF accepts tokens only for the domain of the protected resource. With a token domain list, AWS WAF accepts the resource's host domain plus all domains in the token domain list, including their prefixed subdomains. +* `visibilityConfig` - (Required) Defines and enables Amazon CloudWatch metrics and web request sample collection. See [`visibilityConfig`](#visibility_config-block) below for details. + +### `associationConfig` Block + +The `associationConfig` block supports the following arguments: + +* `requestBody` - (Optional) Customizes the request body that your protected resource forward to AWS WAF for inspection. See [`requestBody`](#request_body-block) below for details. + +### `customResponseBody` Block + +Each `customResponseBody` block supports the following arguments: + +* `key` - (Required) Unique key identifying the custom response body. This is referenced by the `customResponseBodyKey` argument in the [`customResponse`](#custom_response-block) block. +* `content` - (Required) Payload of the custom response. +* `contentType` - (Required) Type of content in the payload that you are defining in the `content` argument. Valid values are `textPlain`, `textHtml`, or `applicationJson`. + +### `defaultAction` Block + +The `defaultAction` block supports the following arguments: + +~> **Note** One of `allow` or `block`, expressed as an empty configuration block `{}`, is required when specifying a `defaultAction` + +* `allow` - (Optional) Specifies that AWS WAF should allow requests by default. See [`allow`](#allow-block) below for details. +* `block` - (Optional) Specifies that AWS WAF should block requests by default. See [`block`](#block-block) below for details. + +### `rule` Block + +~> **Note** One of `action` or `overrideAction` is required when specifying a rule + +Each `rule` supports the following arguments: + +* `action` - (Optional) Action that AWS WAF should take on a web request when it matches the rule's statement. This is used only for rules whose **statements do not reference a rule group**. See [`action`](#action-block) for details. +* `captchaConfig` - (Optional) Specifies how AWS WAF should handle CAPTCHA evaluations. See [`captchaConfig`](#captcha_config-block) below for details. +* `name` - (Required) Friendly name of the rule. Note that the provider assumes that rules with names matching this pattern, `^shieldMitigationRuleGroup___.*`, are AWS-added for [automatic application layer DDoS mitigation activities](https://docs.aws.amazon.com/waf/latest/developerguide/ddos-automatic-app-layer-response-rg.html). Such rules will be ignored by the provider unless you explicitly include them in your configuration (for example, by using the AWS CLI to discover their properties and creating matching configuration). However, since these rules are owned and managed by AWS, you may get permission errors. +* `overrideAction` - (Optional) Override action to apply to the rules in a rule group. Used only for rule **statements that reference a rule group**, like `ruleGroupReferenceStatement` and `managedRuleGroupStatement`. See [`overrideAction`](#override_action-block) below for details. +* `priority` - (Required) If you define more than one Rule in a WebACL, AWS WAF evaluates each request against the `rules` in order based on the value of `priority`. AWS WAF processes rules with lower priority first. +* `ruleLabel` - (Optional) Labels to apply to web requests that match the rule match statement. See [`ruleLabel`](#rule_label-block) below for details. +* `statement` - (Required) The AWS WAF processing statement for the rule, for example `byteMatchStatement` or `geoMatchStatement`. See [`statement`](#statement-block) below for details. +* `visibilityConfig` - (Required) Defines and enables Amazon CloudWatch metrics and web request sample collection. See [`visibilityConfig`](#visibility_config-block) below for details. + +### `action` Block + +The `action` block supports the following arguments: + +~> **Note** One of `allow`, `block`, or `count`, is required when specifying an `action`. + +* `allow` - (Optional) Instructs AWS WAF to allow the web request. See [`allow`](#allow-block) below for details. +* `block` - (Optional) Instructs AWS WAF to block the web request. See [`block`](#block-block) below for details. +* `captcha` - (Optional) Instructs AWS WAF to run a Captcha check against the web request. See [`captcha`](#captcha-block) below for details. +* `challenge` - (Optional) Instructs AWS WAF to run a check against the request to verify that the request is coming from a legitimate client session. See [`challenge`](#challenge-block) below for details. +* `count` - (Optional) Instructs AWS WAF to count the web request and allow it. See [`count`](#count-block) below for details. + +### `overrideAction` Block + +The `overrideAction` block supports the following arguments: + +~> **Note** One of `count` or `none`, expressed as an empty configuration block `{}`, is required when specifying an `overrideAction` + +* `count` - (Optional) Override the rule action setting to count (i.e., only count matches). Configured as an empty block `{}`. +* `none` - (Optional) Don't override the rule action setting. Configured as an empty block `{}`. + +### `allow` Block + +The `allow` block supports the following arguments: + +* `customRequestHandling` - (Optional) Defines custom handling for the web request. See [`customRequestHandling`](#custom_request_handling-block) below for details. + +### `block` Block + +The `block` block supports the following arguments: + +* `customResponse` - (Optional) Defines a custom response for the web request. See [`customResponse`](#custom_response-block) below for details. + +### `captcha` Block + +The `captcha` block supports the following arguments: + +* `customRequestHandling` - (Optional) Defines custom handling for the web request. See [`customRequestHandling`](#custom_request_handling-block) below for details. + +### `challenge` Block + +The `challenge` block supports the following arguments: + +* `customRequestHandling` - (Optional) Defines custom handling for the web request. See [`customRequestHandling`](#custom_request_handling-block) below for details. + +### `count` Block + +The `count` block supports the following arguments: + +* `customRequestHandling` - (Optional) Defines custom handling for the web request. See [`customRequestHandling`](#custom_request_handling-block) below for details. + +### `customRequestHandling` Block + +The `customRequestHandling` block supports the following arguments: + +* `insertHeader` - (Required) The `insertHeader` blocks used to define HTTP headers added to the request. See [`insertHeader`](#insert_header-block) below for details. + +### `insertHeader` Block + +Each `insertHeader` block supports the following arguments. Duplicate header names are not allowed: + +* `name` - Name of the custom header. For custom request header insertion, when AWS WAF inserts the header into the request, it prefixes this name `xAmznWaf`, to avoid confusion with the headers that are already in the request. For example, for the header name `sample`, AWS WAF inserts the header `xAmznWafSample`. +* `value` - Value of the custom header. + +### `customResponse` Block + +The `customResponse` block supports the following arguments: + +* `customResponseBodyKey` - (Optional) References the response body that you want AWS WAF to return to the web request client. This must reference a `key` defined in a `customResponseBody` block of this resource. +* `responseCode` - (Required) The HTTP status code to return to the client. +* `responseHeader` - (Optional) The `responseHeader` blocks used to define the HTTP response headers added to the response. See [`responseHeader`](#response_header-block) below for details. + +### `responseHeader` Block + +Each `responseHeader` block supports the following arguments. Duplicate header names are not allowed: + +* `name` - Name of the custom header. For custom request header insertion, when AWS WAF inserts the header into the request, it prefixes this name `xAmznWaf`, to avoid confusion with the headers that are already in the request. For example, for the header name `sample`, AWS WAF inserts the header `xAmznWafSample`. +* `value` - Value of the custom header. + +### `ruleLabel` Block + +Each block supports the following arguments: + +* `name` - Label string. + +### `statement` Block + +The processing guidance for a Rule, used by AWS WAF to determine whether a web request matches the rule. See the [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statements-list.html) for more information. + +-> **Note** Although the `statement` block is recursive, currently only 3 levels are supported. + +The `statement` block supports the following arguments: + +* `andStatement` - (Optional) Logical rule statement used to combine other rule statements with AND logic. See [`andStatement`](#and_statement-block) below for details. +* `byteMatchStatement` - (Optional) Rule statement that defines a string match search for AWS WAF to apply to web requests. See [`byteMatchStatement`](#byte_match_statement-block) below for details. +* `geoMatchStatement` - (Optional) Rule statement used to identify web requests based on country of origin. See [`geoMatchStatement`](#geo_match_statement-block) below for details. +* `ipSetReferenceStatement` - (Optional) Rule statement used to detect web requests coming from particular IP addresses or address ranges. See [`ipSetReferenceStatement`](#ip_set_reference_statement-block) below for details. +* `labelMatchStatement` - (Optional) Rule statement that defines a string match search against labels that have been added to the web request by rules that have already run in the web ACL. See [`labelMatchStatement`](#label_match_statement-block) below for details. +* `managedRuleGroupStatement` - (Optional) Rule statement used to run the rules that are defined in a managed rule group. This statement can not be nested. See [`managedRuleGroupStatement`](#managed_rule_group_statement-block) below for details. +* `notStatement` - (Optional) Logical rule statement used to negate the results of another rule statement. See [`notStatement`](#not_statement-block) below for details. +* `orStatement` - (Optional) Logical rule statement used to combine other rule statements with OR logic. See [`orStatement`](#or_statement-block) below for details. +* `rateBasedStatement` - (Optional) Rate-based rule tracks the rate of requests for each originating `IP address`, and triggers the rule action when the rate exceeds a limit that you specify on the number of requests in any `5Minute` time span. This statement can not be nested. See [`rateBasedStatement`](#rate_based_statement-block) below for details. +* `regexMatchStatement` - (Optional) Rule statement used to search web request components for a match against a single regular expression. See [`regexMatchStatement`](#regex_match_statement-block) below for details. +* `regexPatternSetReferenceStatement` - (Optional) Rule statement used to search web request components for matches with regular expressions. See [`regexPatternSetReferenceStatement`](#regex_pattern_set_reference_statement-block) below for details. +* `ruleGroupReferenceStatement` - (Optional) Rule statement used to run the rules that are defined in an WAFv2 Rule Group. See [`ruleGroupReferenceStatement`](#rule_group_reference_statement-block) below for details. +* `sizeConstraintStatement` - (Optional) Rule statement that compares a number of bytes against the size of a request component, using a comparison operator, such as greater than (>) or less than (<). See [`sizeConstraintStatement`](#size_constraint_statement-block) below for more details. +* `sqliMatchStatement` - (Optional) An SQL injection match condition identifies the part of web requests, such as the URI or the query string, that you want AWS WAF to inspect. See [`sqliMatchStatement`](#sqli_match_statement-block) below for details. +* `xssMatchStatement` - (Optional) Rule statement that defines a cross-site scripting (XSS) match search for AWS WAF to apply to web requests. See [`xssMatchStatement`](#xss_match_statement-block) below for details. + +### `andStatement` Block + +A logical rule statement used to combine other rule statements with `and` logic. You provide more than one `statement` within the `andStatement`. + +The `andStatement` block supports the following arguments: + +* `statement` - (Required) Statements to combine with `and` logic. You can use any statements that can be nested. See [`statement`](#statement-block) above for details. + +### `byteMatchStatement` Block + +The byte match statement provides the bytes to search for, the location in requests that you want AWS WAF to search, and other settings. The bytes to search for are typically a string that corresponds with ASCII characters. + +The `byteMatchStatement` block supports the following arguments: + +* `fieldToMatch` - (Optional) Part of a web request that you want AWS WAF to inspect. See [`fieldToMatch`](#field_to_match-block) below for details. +* `positionalConstraint` - (Required) Area within the portion of a web request that you want AWS WAF to search for `searchString`. Valid values include the following: `exactly`, `startsWith`, `endsWith`, `contains`, `containsWord`. See the AWS [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_ByteMatchStatement.html) for more information. +* `searchString` - (Required) String value that you want AWS WAF to search for. AWS WAF searches only in the part of web requests that you designate for inspection in `fieldToMatch`. The maximum length of the value is 50 bytes. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`textTransformation`](#text_transformation-block) below for details. + +### `geoMatchStatement` Block + +The `geoMatchStatement` block supports the following arguments: + +* `countryCodes` - (Required) Array of two-character country codes, for example, [ "US", "CN" ], from the alpha-2 country ISO codes of the `ISO 3166` international standard. See the [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_GeoMatchStatement.html) for valid values. +* `forwardedIpConfig` - (Optional) Configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. See [`forwardedIpConfig`](#forwarded_ip_config-block) below for details. + +### `ipSetReferenceStatement` Block + +A rule statement used to detect web requests coming from particular IP addresses or address ranges. To use this, create an `awsWafv2IpSet` that specifies the addresses you want to detect, then use the `arn` of that set in this statement. + +The `ipSetReferenceStatement` block supports the following arguments: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the IP Set that this statement references. +* `ipSetForwardedIpConfig` - (Optional) Configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. See [`ipSetForwardedIpConfig`](#ip_set_forwarded_ip_config-block) below for more details. + +### `labelMatchStatement` Block + +The `labelMatchStatement` block supports the following arguments: + +* `scope` - (Required) Specify whether you want to match using the label name or just the namespace. Valid values are `label` or `namespace`. +* `key` - (Required) String to match against. + +### `managedRuleGroupStatement` Block + +A rule statement used to run the rules that are defined in a managed rule group. + +You can't nest a `managedRuleGroupStatement`, for example for use inside a `notStatement` or `orStatement`. It can only be referenced as a `topLevel` statement within a `rule`. + +The `managedRuleGroupStatement` block supports the following arguments: + +* `name` - (Required) Name of the managed rule group. +* `ruleActionOverride` - (Optional) Action settings to use in the place of the rule actions that are configured inside the rule group. You specify one override for each rule whose action you want to change. See [`ruleActionOverride`](#rule_action_override-block) below for details. +* `managedRuleGroupConfigs`- (Optional) Additional information that's used by a managed rule group. Only one rule attribute is allowed in each config. See [`managedRuleGroupConfigs`](#managed_rule_group_configs-block) for more details +* `scopeDownStatement` - Narrows the scope of the statement to matching web requests. This can be any nestable statement, and you can nest statements at any level below this scope-down statement. See [`statement`](#statement-block) above for details. +* `vendorName` - (Required) Name of the managed rule group vendor. +* `version` - (Optional) Version of the managed rule group. You can set `version10` or `version11` etc. If you want to use the default version, do not set anything. + +### `notStatement` Block + +A logical rule statement used to negate the results of another rule statement. You provide one `statement` within the `notStatement`. + +The `notStatement` block supports the following arguments: + +* `statement` - (Required) Statement to negate. You can use any statement that can be nested. See [`statement`](#statement-block) above for details. + +### `orStatement` Block + +A logical rule statement used to combine other rule statements with `or` logic. You provide more than one `statement` within the `orStatement`. + +The `orStatement` block supports the following arguments: + +* `statement` - (Required) Statements to combine with `or` logic. You can use any statements that can be nested. See [`statement`](#statement-block) above for details. + +### `rateBasedStatement` Block + +A rate-based rule tracks the rate of requests for each originating IP address, and triggers the rule action when the rate exceeds a limit that you specify on the number of requests in any 5-minute time span. You can use this to put a temporary block on requests from an IP address that is sending excessive requests. See the [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_RateBasedStatement.html) for more information. + +You can't nest a `rateBasedStatement`, for example for use inside a `notStatement` or `orStatement`. It can only be referenced as a `topLevel` statement within a `rule`. + +The `rateBasedStatement` block supports the following arguments: + +* `aggregateKeyType` - (Optional) Setting that indicates how to aggregate the request counts. Valid values include: `constant`, `forwardedIp` or `ip`. Default: `ip`. +* `forwardedIpConfig` - (Optional) Configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. If `aggregateKeyType` is set to `forwardedIp`, this block is required. See [`forwardedIpConfig`](#forwarded_ip_config-block) below for details. +* `limit` - (Required) Limit on requests per 5-minute period for a single originating IP address. +* `scopeDownStatement` - (Optional) Optional nested statement that narrows the scope of the rate-based statement to matching web requests. This can be any nestable statement, and you can nest statements at any level below this scope-down statement. See [`statement`](#statement-block) above for details. If `aggregateKeyType` is set to `constant`, this block is required. + +### `regexMatchStatement` Block + +A rule statement used to search web request components for a match against a single regular expression. + +The `regexMatchStatement` block supports the following arguments: + +* `regexString` - (Required) String representing the regular expression. Minimum of `1` and maximum of `512` characters. +* `fieldToMatch` - (Required) The part of a web request that you want AWS WAF to inspect. See [`fieldToMatch`](#field_to_match-block) below for details. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`textTransformation`](#text_transformation-block) below for details. + +### `regexPatternSetReferenceStatement` Block + +A rule statement used to search web request components for matches with regular expressions. To use this, create a `awsWafv2RegexPatternSet` that specifies the expressions that you want to detect, then use the `arn` of that set in this statement. A web request matches the pattern set rule statement if the request component matches any of the patterns in the set. + +The `regexPatternSetReferenceStatement` block supports the following arguments: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the Regex Pattern Set that this statement references. +* `fieldToMatch` - (Optional) Part of a web request that you want AWS WAF to inspect. See [`fieldToMatch`](#field_to_match-block) below for details. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`textTransformation`](#text_transformation-block) below for details. + +### `ruleGroupReferenceStatement` Block + +A rule statement used to run the rules that are defined in an WAFv2 Rule Group or `awsWafv2RuleGroup` resource. + +You can't nest a `ruleGroupReferenceStatement`, for example for use inside a `notStatement` or `orStatement`. It can only be referenced as a `topLevel` statement within a `rule`. + +The `ruleGroupReferenceStatement` block supports the following arguments: + +* `arn` - (Required) The Amazon Resource Name (ARN) of the `awsWafv2RuleGroup` resource. +* `ruleActionOverride` - (Optional) Action settings to use in the place of the rule actions that are configured inside the rule group. You specify one override for each rule whose action you want to change. See [`ruleActionOverride`](#rule_action_override-block) below for details. + +### `sizeConstraintStatement` Block + +A rule statement that uses a comparison operator to compare a number of bytes against the size of a request component. AWS WAFv2 inspects up to the first 8192 bytes (8 KB) of a request body, and when inspecting the request URI Path, the slash `/` in +the URI counts as one character. + +The `sizeConstraintStatement` block supports the following arguments: + +* `comparisonOperator` - (Required) Operator to use to compare the request part to the size setting. Valid values include: `eq`, `ne`, `le`, `lt`, `ge`, or `gt`. +* `fieldToMatch` - (Optional) Part of a web request that you want AWS WAF to inspect. See [`fieldToMatch`](#field_to_match-block) below for details. +* `size` - (Required) Size, in bytes, to compare to the request part, after any transformations. Valid values are integers between 0 and 21474836480, inclusive. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`textTransformation`](#text_transformation-block) below for details. + +### `sqliMatchStatement` Block + +An SQL injection match condition identifies the part of web requests, such as the URI or the query string, that you want AWS WAF to inspect. Later in the process, when you create a web ACL, you specify whether to allow or block requests that appear to contain malicious SQL code. + +The `sqliMatchStatement` block supports the following arguments: + +* `fieldToMatch` - (Optional) Part of a web request that you want AWS WAF to inspect. See [`fieldToMatch`](#field_to_match-block) below for details. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`textTransformation`](#text_transformation-block) below for details. + +### `xssMatchStatement` Block + +The XSS match statement provides the location in requests that you want AWS WAF to search and text transformations to use on the search area before AWS WAF searches for character sequences that are likely to be malicious strings. + +The `xssMatchStatement` block supports the following arguments: + +* `fieldToMatch` - (Optional) Part of a web request that you want AWS WAF to inspect. See [`fieldToMatch`](#field_to_match-block) below for details. +* `textTransformation` - (Required) Text transformations eliminate some of the unusual formatting that attackers use in web requests in an effort to bypass detection. At least one transformation is required. See [`textTransformation`](#text_transformation-block) below for details. + +### `ruleActionOverride` Block + +The `ruleActionOverride` block supports the following arguments: + +* `actionToUse` - (Required) Override action to use, in place of the configured action of the rule in the rule group. See [`action`](#action-block) for details. +* `name` - (Required) Name of the rule to override. See the [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/aws-managed-rule-groups-list.html) for a list of names in the appropriate rule group in use. + +### `managedRuleGroupConfigs` Block + +The `managedRuleGroupConfigs` block support the following arguments: + +* `awsManagedRulesBotControlRuleSet` - (Optional) Additional configuration for using the Bot Control managed rule group. Use this to specify the inspection level that you want to use. See [`awsManagedRulesBotControlRuleSet`](#aws_managed_rules_bot_control_rule_set-block) for more details +* `awsManagedRulesAtpRuleSet` - (Optional) Additional configuration for using the Account Takeover Protection managed rule group. Use this to specify information such as the sign-in page of your application and the type of content to accept or reject from the client. +* `loginPath` - (Optional, **Deprecated**) The path of the login endpoint for your application. +* `passwordField` - (Optional, **Deprecated**) Details about your login page password field. See [`passwordField`](#password_field-block) for more details. +* `payloadType`- (Optional, **Deprecated**) The payload type for your login endpoint, either JSON or form encoded. +* `usernameField` - (Optional, **Deprecated**) Details about your login page username field. See [`usernameField`](#username_field-block) for more details. + +### `awsManagedRulesBotControlRuleSet` Block + +* `inspectionLevel` - (Optional) The inspection level to use for the Bot Control rule group. + +### `awsManagedRulesAtpRuleSet` Block + +* `loginPath` - (Required) The path of the login endpoint for your application. +* `requestInspection` - (Optional) The criteria for inspecting login requests, used by the ATP rule group to validate credentials usage. See [`requestInspection`](#request_inspection-block) for more details. +* `responseInspection` - (Optional) The criteria for inspecting responses to login requests, used by the ATP rule group to track login failure rates. Note that Response Inspection is available only on web ACLs that protect CloudFront distributions. See [`responseInspection`](#response_inspection-block) for more details. + +### `requestInspection` Block + +* `payloadType` (Required) The payload type for your login endpoint, either JSON or form encoded. +* `usernameField` (Required) Details about your login page username field. See [`usernameField`](#username_field-block) for more details. +* `passwordField` (Required) Details about your login page password field. See [`passwordField`](#password_field-block) for more details. + +### `passwordField` Block + +* `identifier` - (Optional) The name of the password field. + +### `usernameField` Block + +* `identifier` - (Optional) The name of the username field. + +### `responseInspection` Block + +* `bodyContains` (Optional) Configures inspection of the response body. See [`bodyContains`](#body_contains-block) for more details. +* `header` (Optional) Configures inspection of the response header.See [`header`](#header-block) for more details. +* `json` (Optional) Configures inspection of the response JSON. See [`json`](#json-block) for more details. +* `statusCode` (Optional) Configures inspection of the response status code.See [`statusCode`](#status_code-block) for more details. + +### `bodyContains` Block + +* `successStrings` (Required) Strings in the body of the response that indicate a successful login attempt. +* `failureStrings` (Required) Strings in the body of the response that indicate a failed login attempt. + +### `header` Block + +* `name` (Required) The name of the header to match against. The name must be an exact match, including case. +* `successValues` (Required) Values in the response header with the specified name that indicate a successful login attempt. +* `failureValues` (Required) Values in the response header with the specified name that indicate a failed login attempt. + +### `json` Block + +* `identifier` (Required) The identifier for the value to match against in the JSON. +* `successStrings` (Required) Strings in the body of the response that indicate a successful login attempt. +* `failureStrings` (Required) Strings in the body of the response that indicate a failed login attempt. + +### `statusCode` Block + +* `successCodes` (Required) Status codes in the response that indicate a successful login attempt. +* `failureCodes` (Required) Status codes in the response that indicate a failed login attempt. + +### `fieldToMatch` Block + +The part of a web request that you want AWS WAF to inspect. Include the single `fieldToMatch` type that you want to inspect, with additional specifications as needed, according to the type. You specify a single request component in `fieldToMatch` for each rule statement that requires it. To inspect more than one component of a web request, create a separate rule statement for each component. See the [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statement-fields.html#waf-rule-statement-request-component) for more details. + +The `fieldToMatch` block supports the following arguments: + +~> **Note** Only one of `allQueryArguments`, `body`, `cookies`, `headers`, `jsonBody`, `method`, `queryString`, `singleHeader`, `singleQueryArgument`, or `uriPath` can be specified. An empty configuration block `{}` should be used when specifying `allQueryArguments`, `method`, or `queryString` attributes. + +* `allQueryArguments` - (Optional) Inspect all query arguments. +* `body` - (Optional) Inspect the request body, which immediately follows the request headers. See [`body`](#body-block) below for details. +* `cookies` - (Optional) Inspect the cookies in the web request. See [`cookies`](#cookies-block) below for details. +* `headers` - (Optional) Inspect the request headers. See [`headers`](#headers-block) below for details. +* `jsonBody` - (Optional) Inspect the request body as JSON. See [`jsonBody`](#json_body-block) for details. +* `method` - (Optional) Inspect the HTTP method. The method indicates the type of operation that the request is asking the origin to perform. +* `queryString` - (Optional) Inspect the query string. This is the part of a URL that appears after a `?` character, if any. +* `singleHeader` - (Optional) Inspect a single header. See [`singleHeader`](#single_header-block) below for details. +* `singleQueryArgument` - (Optional) Inspect a single query argument. See [`singleQueryArgument`](#single_query_argument-block) below for details. +* `uriPath` - (Optional) Inspect the request URI path. This is the part of a web request that identifies a resource, for example, `/images/dailyAdJpg`. + +### `forwardedIpConfig` Block + +The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. Commonly, this is the X-Forwarded-For (XFF) header, but you can specify any header name. If the specified header isn't present in the request, AWS WAFv2 doesn't apply the rule to the web request at all. AWS WAFv2 only evaluates the first IP address found in the specified HTTP header. + +The `forwardedIpConfig` block supports the following arguments: + +* `fallbackBehavior` - (Required) - Match status to assign to the web request if the request doesn't have a valid IP address in the specified position. Valid values include: `match` or `noMatch`. +* `headerName` - (Required) - Name of the HTTP header to use for the IP address. + +### `ipSetForwardedIpConfig` Block + +The configuration for inspecting IP addresses in an HTTP header that you specify, instead of using the IP address that's reported by the web request origin. Commonly, this is the X-Forwarded-For (XFF) header, but you can specify any header name. + +The `ipSetForwardedIpConfig` block supports the following arguments: + +* `fallbackBehavior` - (Required) - Match status to assign to the web request if the request doesn't have a valid IP address in the specified position. Valid values include: `match` or `noMatch`. +* `headerName` - (Required) - Name of the HTTP header to use for the IP address. +* `position` - (Required) - Position in the header to search for the IP address. Valid values include: `first`, `last`, or `any`. If `any` is specified and the header contains more than 10 IP addresses, AWS WAFv2 inspects the last 10. + +### `headers` Block + +Inspect the request headers. + +The `headers` block supports the following arguments: + +* `matchPattern` - (Required) The filter to use to identify the subset of headers to inspect in a web request. The `matchPattern` block supports only one of the following arguments: + * `all` - An empty configuration block that is used for inspecting all headers. + * `includedHeaders` - An array of strings that will be used for inspecting headers that have a key that matches one of the provided values. + * `excludedHeaders` - An array of strings that will be used for inspecting headers that do not have a key that matches one of the provided values. +* `matchScope` - (Required) The parts of the headers to inspect with the rule inspection criteria. If you specify `all`, AWS WAF inspects both keys and values. Valid values include the following: `all`, `key`, `value`. +* `oversizeHandling` - (Required) Oversize handling tells AWS WAF what to do with a web request when the request component that the rule inspects is over the limits. Valid values include the following: `continue`, `match`, `noMatch`. See the AWS [documentation](https://docs.aws.amazon.com/waf/latest/developerguide/waf-rule-statement-oversize-handling.html) for more information. + +### `jsonBody` Block + +The `jsonBody` block supports the following arguments: + +* `invalidFallbackBehavior` - (Optional) What to do when JSON parsing fails. Defaults to evaluating up to the first parsing failure. Valid values are `evaluateAsString`, `match` and `noMatch`. +* `matchPattern` - (Required) The patterns to look for in the JSON body. You must specify exactly one setting: either `all` or `includedPaths`. See [JsonMatchPattern](https://docs.aws.amazon.com/waf/latest/APIReference/API_JsonMatchPattern.html) for details. +* `matchScope` - (Required) The parts of the JSON to match against using the `matchPattern`. Valid values are `all`, `key` and `value`. +* `oversizeHandling` - (Optional) What to do if the body is larger than can be inspected. Valid values are `continue` (default), `match` and `noMatch`. + +### `singleHeader` Block + +Inspect a single header. Provide the name of the header to inspect, for example, `userAgent` or `referer` (provided as lowercase strings). + +The `singleHeader` block supports the following arguments: + +* `name` - (Optional) Name of the query header to inspect. This setting must be provided as lower case characters. + +### `singleQueryArgument` Block + +Inspect a single query argument. Provide the name of the query argument to inspect, such as `userName` or `salesRegion` (provided as lowercase strings). + +The `singleQueryArgument` block supports the following arguments: + +* `name` - (Optional) Name of the query header to inspect. This setting must be provided as lower case characters. + +### `body` Block + +The `body` block supports the following arguments: + +* `oversizeHandling` - (Optional) What WAF should do if the body is larger than WAF can inspect. WAF does not support inspecting the entire contents of the body of a web request when the body exceeds 8 KB (8192 bytes). Only the first 8 KB of the request body are forwarded to WAF by the underlying host service. Valid values: `continue`, `match`, `noMatch`. + +### `cookies` Block + +Inspect the cookies in the web request. You can specify the parts of the cookies to inspect and you can narrow the set of cookies to inspect by including or excluding specific keys. This is used to indicate the web request component to inspect, in the [FieldToMatch](https://docs.aws.amazon.com/waf/latest/APIReference/API_FieldToMatch.html) specification. + +The `cookies` block supports the following arguments: + +* `matchPattern` - (Required) The filter to use to identify the subset of cookies to inspect in a web request. You must specify exactly one setting: either `all`, `includedCookies` or `excludedCookies`. More details: [CookieMatchPattern](https://docs.aws.amazon.com/waf/latest/APIReference/API_CookieMatchPattern.html) +* `matchScope` - (Required) The parts of the cookies to inspect with the rule inspection criteria. If you specify All, AWS WAF inspects both keys and values. Valid values: `all`, `key`, `value` +* `oversizeHandling` - (Required) What AWS WAF should do if the cookies of the request are larger than AWS WAF can inspect. AWS WAF does not support inspecting the entire contents of request cookies when they exceed 8 KB (8192 bytes) or 200 total cookies. The underlying host service forwards a maximum of 200 cookies and at most 8 KB of cookie contents to AWS WAF. Valid values: `continue`, `match`, `noMatch`. + +### `textTransformation` Block + +The `textTransformation` block supports the following arguments: + +* `priority` - (Required) Relative processing order for multiple transformations that are defined for a rule statement. AWS WAF processes all transformations, from lowest priority to highest, before inspecting the transformed content. +* `type` - (Required) Transformation to apply, please refer to the Text Transformation [documentation](https://docs.aws.amazon.com/waf/latest/APIReference/API_TextTransformation.html) for more details. + +### `visibilityConfig` Block + +The `visibilityConfig` block supports the following arguments: + +* `cloudwatchMetricsEnabled` - (Required) Whether the associated resource sends metrics to CloudWatch. For the list of available metrics, see [AWS WAF Metrics](https://docs.aws.amazon.com/waf/latest/developerguide/monitoring-cloudwatch.html#waf-metrics). +* `metricName` - (Required) A friendly name of the CloudWatch metric. The name can contain only alphanumeric characters (A-Z, a-z, 0-9) hyphen(-) and underscore (\_), with length from one to 128 characters. It can't contain whitespace or metric names reserved for AWS WAF, for example `all` and `defaultAction`. +* `sampledRequestsEnabled` - (Required) Whether AWS WAF should store a sampling of the web requests that match the rules. You can view the sampled requests through the AWS WAF console. + +### `captchaConfig` Block + +The `captchaConfig` block supports the following arguments: + +* `immunityTimeProperty` - (Optional) Defines custom immunity time. See [`immunityTimeProperty`](#immunity_time_property-block) below for details. + +### `immunityTimeProperty` Block + +The `immunityTimeProperty` block supports the following arguments: + +* `immunityTime` - (Optional) The amount of time, in seconds, that a CAPTCHA or challenge timestamp is considered valid by AWS WAF. The default setting is 300. + +### `requestBody` Block + +The `requestBody` block supports the following arguments: + +* `cloudfront` - (Optional) Customizes the request body that your protected CloudFront distributions forward to AWS WAF for inspection. See [`cloudfront`](#cloudfront-block) below for details. + +### `cloudfront` Block + +The `cloudfront` block supports the following arguments: + +* `defaultSizeInspectionLimit` - (Required) Specifies the maximum size of the web request body component that an associated CloudFront distribution should send to AWS WAF for inspection. This applies to statements in the web ACL that inspect the body or JSON body. Valid values are `kb16`, `kb32`, `kb48` and `kb64`. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `arn` - The ARN of the WAF WebACL. +* `capacity` - Web ACL capacity units (WCUs) currently being used by this web ACL. +* `id` - The ID of the WAF WebACL. +* `tagsAll` - Map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 Web ACLs using `id/name/scope`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAFv2 Web ACLs using `id/name/scope`. For example: + +```console +% terraform import aws_wafv2_web_acl.example a1b2c3d4-d5f6-7777-8888-9999aaaabbbbcccc/example/REGIONAL +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafv2_web_acl_association.html.markdown b/website/docs/cdktf/typescript/r/wafv2_web_acl_association.html.markdown new file mode 100644 index 00000000000..07bd7581950 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafv2_web_acl_association.html.markdown @@ -0,0 +1,125 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_web_acl_association" +description: |- + Creates a WAFv2 Web ACL Association. +--- + + + +# Resource: aws_wafv2_web_acl_association + +Creates a WAFv2 Web ACL Association. + +~> **NOTE on associating a WAFv2 Web ACL with a Cloudfront distribution:** Do not use this resource to associate a WAFv2 Web ACL with a Cloudfront Distribution. The [AWS API call backing this resource][1] notes that you should use the [`webAclId`][2] property on the [`cloudfrontDistribution`][2] instead. + +[1]: https://docs.aws.amazon.com/waf/latest/APIReference/API_AssociateWebACL.html +[2]: /docs/providers/aws/r/cloudfront_distribution.html#web_acl_id + +## Example Usage + +```terraform +resource "aws_api_gateway_rest_api" "example" { + body = jsonencode({ + openapi = "3.0.1" + info = { + title = "example" + version = "1.0" + } + paths = { + "/path1" = { + get = { + x-amazon-apigateway-integration = { + httpMethod = "GET" + payloadFormatVersion = "1.0" + type = "HTTP_PROXY" + uri = "https://ip-ranges.amazonaws.com/ip-ranges.json" + } + } + } + } + }) + + name = "example" +} + +resource "aws_api_gateway_deployment" "example" { + rest_api_id = aws_api_gateway_rest_api.example.id + + triggers = { + redeployment = sha1(jsonencode(aws_api_gateway_rest_api.example.body)) + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_api_gateway_stage" "example" { + deployment_id = aws_api_gateway_deployment.example.id + rest_api_id = aws_api_gateway_rest_api.example.id + stage_name = "example" +} + +resource "aws_wafv2_web_acl" "example" { + name = "web-acl-association-example" + scope = "REGIONAL" + + default_action { + allow {} + } + + visibility_config { + cloudwatch_metrics_enabled = false + metric_name = "friendly-metric-name" + sampled_requests_enabled = false + } +} + +resource "aws_wafv2_web_acl_association" "example" { + resource_arn = aws_api_gateway_stage.example.arn + web_acl_arn = aws_wafv2_web_acl.example.arn +} +``` + +## Argument Reference + +This resource supports the following arguments: + +* `resourceArn` - (Required) The Amazon Resource Name (ARN) of the resource to associate with the web ACL. This must be an ARN of an Application Load Balancer, an Amazon API Gateway stage, or an Amazon Cognito User Pool. +* `webAclArn` - (Required) The Amazon Resource Name (ARN) of the Web ACL that you want to associate with the resource. + +## Attribute Reference + +This resource exports no additional attributes. + +## Timeouts + +[Configuration options](https://www.terraform.io/docs/configuration/blocks/resources/syntax.html#operation-timeouts): + +* `create` - (Default `5M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 Web ACL Association using `webAclArn,resourceArn`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAFv2 Web ACL Association using `webAclArn,resourceArn`. For example: + +```console +% terraform import aws_wafv2_web_acl_association.example arn:aws:wafv2:...7ce849ea,arn:aws:apigateway:...ages/name +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/wafv2_web_acl_logging_configuration.html.markdown b/website/docs/cdktf/typescript/r/wafv2_web_acl_logging_configuration.html.markdown new file mode 100644 index 00000000000..df02b2c3c85 --- /dev/null +++ b/website/docs/cdktf/typescript/r/wafv2_web_acl_logging_configuration.html.markdown @@ -0,0 +1,286 @@ +--- +subcategory: "WAF" +layout: "aws" +page_title: "AWS: aws_wafv2_web_acl_logging_configuration" +description: |- + Create a resource for WAFv2 Web ACL Logging Configuration. +--- + + + +# Resource: aws_wafv2_web_acl_logging_configuration + +This resource creates a WAFv2 Web ACL Logging Configuration. + +!> **WARNING:** When logging from a WAFv2 Web ACL to a CloudWatch Log Group, the WAFv2 service tries to create or update a generic Log Resource Policy named `awswafLogs`. However, if there are a large number of Web ACLs or if the account frequently creates and deletes Web ACLs, this policy may exceed the maximum policy size. As a result, this resource type will fail to be created. More details about this issue can be found in [this issue](https://github.com/hashicorp/terraform-provider-aws/issues/25296). To prevent this issue, you can manage a specific resource policy. Please refer to the [example](#with-cloudwatch-log-group-and-managed-cloudwatch-log-resource-policy) below for managing a CloudWatch Log Group with a managed CloudWatch Log Resource Policy. + +## Example Usage + +### With Redacted Fields + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Wafv2WebAclLoggingConfiguration } from "./.gen/providers/aws/wafv2-web-acl-logging-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Wafv2WebAclLoggingConfiguration(this, "example", { + logDestinationConfigs: [ + Token.asString(awsKinesisFirehoseDeliveryStreamExample.arn), + ], + redactedFields: [ + { + singleHeader: { + name: "user-agent", + }, + }, + ], + resourceArn: Token.asString(awsWafv2WebAclExample.arn), + }); + } +} + +``` + +### With Logging Filter + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { Wafv2WebAclLoggingConfiguration } from "./.gen/providers/aws/wafv2-web-acl-logging-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new Wafv2WebAclLoggingConfiguration(this, "example", { + logDestinationConfigs: [ + Token.asString(awsKinesisFirehoseDeliveryStreamExample.arn), + ], + loggingFilter: { + defaultBehavior: "KEEP", + filter: [ + { + behavior: "DROP", + condition: [ + { + actionCondition: { + action: "COUNT", + }, + }, + { + labelNameCondition: { + labelName: + "awswaf:111122223333:rulegroup:testRules:LabelNameZ", + }, + }, + ], + requirement: "MEETS_ALL", + }, + { + behavior: "KEEP", + condition: [ + { + actionCondition: { + action: "ALLOW", + }, + }, + ], + requirement: "MEETS_ANY", + }, + ], + }, + resourceArn: Token.asString(awsWafv2WebAclExample.arn), + }); + } +} + +``` + +### With CloudWatch Log Group and managed CloudWatch Log Resource Policy + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, Fn, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { CloudwatchLogGroup } from "./.gen/providers/aws/cloudwatch-log-group"; +import { CloudwatchLogResourcePolicy } from "./.gen/providers/aws/cloudwatch-log-resource-policy"; +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DataAwsRegion } from "./.gen/providers/aws/data-aws-region"; +import { Wafv2WebAclLoggingConfiguration } from "./.gen/providers/aws/wafv2-web-acl-logging-configuration"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new CloudwatchLogGroup(this, "example", { + name: "aws-waf-logs-some-uniq-suffix", + }); + const awsWafv2WebAclLoggingConfigurationExample = + new Wafv2WebAclLoggingConfiguration(this, "example_1", { + logDestinationConfigs: [example.arn], + resourceArn: Token.asString(awsWafv2WebAclExample.arn), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWafv2WebAclLoggingConfigurationExample.overrideLogicalId("example"); + const current = new DataAwsCallerIdentity(this, "current", {}); + const dataAwsRegionCurrent = new DataAwsRegion(this, "current_3", {}); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsRegionCurrent.overrideLogicalId("current"); + const dataAwsIamPolicyDocumentExample = new DataAwsIamPolicyDocument( + this, + "example_4", + { + statement: [ + { + actions: ["logs:CreateLogStream", "logs:PutLogEvents"], + condition: [ + { + test: "ArnLike", + values: [ + "arn:aws:logs:${" + + dataAwsRegionCurrent.name + + "}:${" + + current.accountId + + "}:*", + ], + variable: "aws:SourceArn", + }, + { + test: "StringEquals", + values: [Token.asString(Fn.tostring(current.accountId))], + variable: "aws:SourceAccount", + }, + ], + effect: "Allow", + principals: [ + { + identifiers: ["delivery.logs.amazonaws.com"], + type: "Service", + }, + ], + resources: ["${" + example.arn + "}:*"], + }, + ], + version: "2012-10-17", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + dataAwsIamPolicyDocumentExample.overrideLogicalId("example"); + const awsCloudwatchLogResourcePolicyExample = + new CloudwatchLogResourcePolicy(this, "example_5", { + policyDocument: Token.asString(dataAwsIamPolicyDocumentExample.json), + policyName: "webacl-policy-uniq-name", + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsCloudwatchLogResourcePolicyExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `logDestinationConfigs` - (Required) Configuration block that allows you to associate Amazon Kinesis Data Firehose, Cloudwatch Log log group, or S3 bucket Amazon Resource Names (ARNs) with the web ACL. +* `loggingFilter` - (Optional) Configuration block that specifies which web requests are kept in the logs and which are dropped. It allows filtering based on the rule action and the web request labels applied by matching rules during web ACL evaluation. For more details, refer to the [Logging Filter](#logging-filter) section below. +* `redactedFields` - (Optional) Configuration for parts of the request that you want to keep out of the logs. Up to 100 `redactedFields` blocks are supported. See [Redacted Fields](#redacted-fields) below for more details. +* `resourceArn` - (Required) Amazon Resource Name (ARN) of the web ACL that you want to associate with `logDestinationConfigs`. + +### Logging Filter + +The `loggingFilter` block supports the following arguments: + +* `defaultBehavior` - (Required) Default handling for logs that don't match any of the specified filtering conditions. Valid values for `defaultBehavior` are `keep` or `drop`. +* `filter` - (Required) Filter(s) that you want to apply to the logs. See [Filter](#filter) below for more details. + +### Filter + +The `filter` block supports the following arguments: + +* `behavior` - (Required) Parameter that determines how to handle logs that meet the conditions and requirements of the filter. The valid values for `behavior` are `keep` or `drop`. +* `condition` - (Required) Match condition(s) for the filter. See [Condition](#condition) below for more details. +* `requirement` - (Required) Logic to apply to the filtering conditions. You can specify that a log must match all conditions or at least one condition in order to satisfy the filter. Valid values for `requirement` are `meetsAll` or `meetsAny`. + +### Condition + +The `condition` block supports the following arguments: + +~> **NOTE:** Either the `actionCondition` or `labelNameCondition` must be specified. + +* `actionCondition` - (Optional) Configuration for a single action condition. See [Action Condition](#action-condition) below for more details. +* `labelNameCondition` - (Optional) Condition for a single label name. See [Label Name Condition](#label-name-condition) below for more details. + +### Action Condition + +The `actionCondition` block supports the following argument: + +* `action` - (Required) Action setting that a log record must contain in order to meet the condition. Valid values for `action` are `allow`, `block`, and `count`. + +### Label Name Condition + +The `labelNameCondition` block supports the following argument: + +* `labelName` - (Required) Name of the label that a log record must contain in order to meet the condition. It must be a fully qualified label name, which includes a prefix, optional namespaces, and the label name itself. The prefix identifies the rule group or web ACL context of the rule that added the label. + +### Redacted Fields + +The `redactedFields` block supports the following arguments: + +~> **NOTE:** You can only specify one of the following: `method`, `queryString`, `singleHeader`, or `uriPath`. + +* `method` - (Optional) HTTP method to be redacted. It must be specified as an empty configuration block `{}`. The method indicates the type of operation that the request is asking the origin to perform. +* `queryString` - (Optional) Whether to redact the query string. It must be specified as an empty configuration block `{}`. The query string is the part of a URL that appears after a `?` character, if any. +* `singleHeader` - (Optional) "single_header" refers to the redaction of a single header. For more information, please see the details below under [Single Header](#single-header). +* `uriPath` - (Optional) Configuration block that redacts the request URI path. It should be specified as an empty configuration block `{}`. The URI path is the part of a web request that identifies a resource, such as `/images/dailyAdJpg`. + +### Single Header + +To redact a single header, provide the name of the header to be redacted. For example, use `userAgent` or `referer` (provided as lowercase strings). + +The `singleHeader` block supports the following arguments: + +* `name` - (Optional) Name of the query header to redact. This setting must be provided in lowercase characters. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Amazon Resource Name (ARN) of the WAFv2 Web ACL. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WAFv2 Web ACL Logging Configurations using the ARN of the WAFv2 Web ACL. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WAFv2 Web ACL Logging Configurations using the ARN of the WAFv2 Web ACL. For example: + +```console +% terraform import aws_wafv2_web_acl_logging_configuration.example arn:aws:wafv2:us-west-2:123456789012:regional/webacl/test-logs/a1b2c3d4-5678-90ab-cdef +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/worklink_fleet.html.markdown b/website/docs/cdktf/typescript/r/worklink_fleet.html.markdown new file mode 100644 index 00000000000..6ceb319a6af --- /dev/null +++ b/website/docs/cdktf/typescript/r/worklink_fleet.html.markdown @@ -0,0 +1,149 @@ +--- +subcategory: "WorkLink" +layout: "aws" +page_title: "AWS: aws_worklink_fleet" +description: |- + Provides a AWS WorkLink Fleet resource. +--- + + + +# Resource: aws_worklink_fleet + +## Example Usage + +Basic usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WorklinkFleet } from "./.gen/providers/aws/worklink-fleet"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WorklinkFleet(this, "example", { + name: "terraform-example", + }); + } +} + +``` + +Network Configuration Usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { propertyAccess, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WorklinkFleet } from "./.gen/providers/aws/worklink-fleet"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WorklinkFleet(this, "example", { + name: "terraform-example", + network: { + securityGroupIds: [test.id], + subnetIds: [Token.asString(propertyAccess(awsSubnetTest, ["*", "id"]))], + vpcId: Token.asString(awsVpcTest.id), + }, + }); + } +} + +``` + +Identity Provider Configuration Usage: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WorklinkFleet } from "./.gen/providers/aws/worklink-fleet"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WorklinkFleet(this, "test", { + identityProvider: { + samlMetadata: Token.asString(Fn.file("saml-metadata.xml")), + type: "SAML", + }, + name: "tf-worklink-fleet", + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) A region-unique name for the AMI. +* `auditStreamArn` - (Optional) The ARN of the Amazon Kinesis data stream that receives the audit events. Kinesis data stream name must begin with `"amazonWorkLink-"`. +* `deviceCaCertificate` - (Optional) The certificate chain, including intermediate certificates and the root certificate authority certificate used to issue device certificates. +* `identityProvider` - (Optional) Provide this to allow manage the identity provider configuration for the fleet. Fields documented below. +* `displayName` - (Optional) The name of the fleet. +* `network` - (Optional) Provide this to allow manage the company network configuration for the fleet. Fields documented below. +* `optimizeForEndUserLocation` - (Optional) The option to optimize for better performance by routing traffic through the closest AWS Region to users, which may be outside of your home Region. Defaults to `true`. + +**network** requires the following: + +~> **NOTE:** `network` cannot be removed without force recreating by `terraform taint`. + +* `vpcId` - (Required) The VPC ID with connectivity to associated websites. +* `subnetIds` - (Required) A list of subnet IDs used for X-ENI connections from Amazon WorkLink rendering containers. +* `securityGroupIds` - (Required) A list of security group IDs associated with access to the provided subnets. + +**identity_provider** requires the following: + +~> **NOTE:** `identityProvider` cannot be removed without force recreating by `terraform taint`. + +* `type` - (Required) The type of identity provider. +* `samlMetadata` - (Required) The SAML metadata document provided by the customer’s identity provider. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the created WorkLink Fleet. +* `arn` - The ARN of the created WorkLink Fleet. +* `companyCode` - The identifier used by users to sign in to the Amazon WorkLink app. +* `createdTime` - The time that the fleet was created. +* `lastUpdatedTime` - The time that the fleet was last updated. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WorkLink using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WorkLink using the ARN. For example: + +```console +% terraform import aws_worklink_fleet.test arn:aws:worklink::123456789012:fleet/example +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/worklink_website_certificate_authority_association.html.markdown b/website/docs/cdktf/typescript/r/worklink_website_certificate_authority_association.html.markdown new file mode 100644 index 00000000000..abdf8dbad31 --- /dev/null +++ b/website/docs/cdktf/typescript/r/worklink_website_certificate_authority_association.html.markdown @@ -0,0 +1,76 @@ +--- +subcategory: "WorkLink" +layout: "aws" +page_title: "AWS: aws_worklink_website_certificate_authority_association" +description: |- + Provides a AWS WorkLink Website Certificate Authority Association resource. +--- + + + +# Resource: aws_worklink_website_certificate_authority_association + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Fn, Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WorklinkFleet } from "./.gen/providers/aws/worklink-fleet"; +import { WorklinkWebsiteCertificateAuthorityAssociation } from "./.gen/providers/aws/worklink-website-certificate-authority-association"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WorklinkFleet(this, "example", { + name: "terraform-example", + }); + new WorklinkWebsiteCertificateAuthorityAssociation(this, "test", { + certificate: Token.asString(Fn.file("certificate.pem")), + fleetArn: Token.asString(awsWorklinkFleetTest.arn), + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `fleetArn` - (Required, ForceNew) The ARN of the fleet. +* `certificate` - (Required, ForceNew) The root certificate of the Certificate Authority. +* `displayName` - (Optional, ForceNew) The certificate name to display. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `websiteCaId` - A unique identifier for the Certificate Authority. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WorkLink Website Certificate Authority using `fleetArn,websiteCaId`. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WorkLink Website Certificate Authority using `fleetArn,websiteCaId`. For example: + +```console +% terraform import aws_worklink_website_certificate_authority_association.example arn:aws:worklink::123456789012:fleet/example,abcdefghijk +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/workspaces_connection_alias.html.markdown b/website/docs/cdktf/typescript/r/workspaces_connection_alias.html.markdown new file mode 100644 index 00000000000..0031e25f08b --- /dev/null +++ b/website/docs/cdktf/typescript/r/workspaces_connection_alias.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_connection_alias" +description: |- + Terraform resource for managing an AWS WorkSpaces Connection Alias. +--- + + + +# Resource: aws_workspaces_connection_alias + +Terraform resource for managing an AWS WorkSpaces Connection Alias. + +## Example Usage + +### Basic Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WorkspacesConnectionAlias } from "./.gen/providers/aws/workspaces-connection-alias"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WorkspacesConnectionAlias(this, "example", { + connectionString: "testdomain.test", + }); + } +} + +``` + +## Argument Reference + +The following arguments are required: + +* `connectionString` - (Required) The connection string specified for the connection alias. The connection string must be in the form of a fully qualified domain name (FQDN), such as www.example.com. +* `tags` – (Optional) A map of tags assigned to the WorkSpaces Connection Alias. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The identifier of the connection alias. +* `ownerAccountId` - The identifier of the Amazon Web Services account that owns the connection alias. +* `state` - The current state of the connection alias. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +* `create` - (Default `60M`) +* `update` - (Default `180M`) +* `delete` - (Default `90M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WorkSpaces Connection Alias using the connection alias ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WorkSpaces Connection Alias using the connection alias ID. For example: + +```console +% terraform import aws_workspaces_connection_alias.example rft-8012925589 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/workspaces_directory.html.markdown b/website/docs/cdktf/typescript/r/workspaces_directory.html.markdown new file mode 100644 index 00000000000..c7ff10f4492 --- /dev/null +++ b/website/docs/cdktf/typescript/r/workspaces_directory.html.markdown @@ -0,0 +1,266 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_directory" +description: |- + Provides a WorkSpaces directory in AWS WorkSpaces Service. +--- + + + +# Resource: aws_workspaces_directory + +Provides a WorkSpaces directory in AWS WorkSpaces Service. + +~> **NOTE:** AWS WorkSpaces service requires [`workspacesDefaultRole`](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspaces-access-control.html#create-default-role) IAM role to operate normally. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { DirectoryServiceDirectory } from "./.gen/providers/aws/directory-service-directory"; +import { IamRole } from "./.gen/providers/aws/iam-role"; +import { IamRolePolicyAttachment } from "./.gen/providers/aws/iam-role-policy-attachment"; +import { Subnet } from "./.gen/providers/aws/subnet"; +import { Vpc } from "./.gen/providers/aws/vpc"; +import { WorkspacesDirectory } from "./.gen/providers/aws/workspaces-directory"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new Vpc(this, "example", { + cidrBlock: "10.0.0.0/16", + }); + const workspaces = new DataAwsIamPolicyDocument(this, "workspaces", { + statement: [ + { + actions: ["sts:AssumeRole"], + principals: [ + { + identifiers: ["workspaces.amazonaws.com"], + type: "Service", + }, + ], + }, + ], + }); + const workspacesDefault = new IamRole(this, "workspaces_default", { + assumeRolePolicy: Token.asString(workspaces.json), + name: "workspaces_DefaultRole", + }); + const workspacesDefaultSelfServiceAccess = new IamRolePolicyAttachment( + this, + "workspaces_default_self_service_access", + { + policyArn: "arn:aws:iam::aws:policy/AmazonWorkSpacesSelfServiceAccess", + role: workspacesDefault.name, + } + ); + const workspacesDefaultServiceAccess = new IamRolePolicyAttachment( + this, + "workspaces_default_service_access", + { + policyArn: "arn:aws:iam::aws:policy/AmazonWorkSpacesServiceAccess", + role: workspacesDefault.name, + } + ); + const exampleA = new Subnet(this, "example_a", { + availabilityZone: "us-east-1a", + cidrBlock: "10.0.0.0/24", + vpcId: example.id, + }); + const exampleB = new Subnet(this, "example_b", { + availabilityZone: "us-east-1b", + cidrBlock: "10.0.1.0/24", + vpcId: example.id, + }); + const exampleC = new Subnet(this, "example_c", { + availabilityZone: "us-east-1c", + cidrBlock: "10.0.2.0/24", + vpcId: example.id, + }); + const exampleD = new Subnet(this, "example_d", { + availabilityZone: "us-east-1d", + cidrBlock: "10.0.3.0/24", + vpcId: example.id, + }); + const awsDirectoryServiceDirectoryExample = new DirectoryServiceDirectory( + this, + "example_9", + { + name: "corp.example.com", + password: "#S1ncerely", + size: "Small", + vpcSettings: { + subnetIds: [exampleA.id, exampleB.id], + vpcId: example.id, + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsDirectoryServiceDirectoryExample.overrideLogicalId("example"); + const awsWorkspacesDirectoryExample = new WorkspacesDirectory( + this, + "example_10", + { + dependsOn: [ + workspacesDefaultServiceAccess, + workspacesDefaultSelfServiceAccess, + ], + directoryId: Token.asString(awsDirectoryServiceDirectoryExample.id), + selfServicePermissions: { + changeComputeType: true, + increaseVolumeSize: true, + rebuildWorkspace: true, + restartWorkspace: true, + switchRunningMode: true, + }, + subnetIds: [exampleC.id, exampleD.id], + tags: { + Example: Token.asString(true), + }, + workspaceAccessProperties: { + deviceTypeAndroid: "ALLOW", + deviceTypeChromeos: "ALLOW", + deviceTypeIos: "ALLOW", + deviceTypeLinux: "DENY", + deviceTypeOsx: "ALLOW", + deviceTypeWeb: "DENY", + deviceTypeWindows: "DENY", + deviceTypeZeroclient: "DENY", + }, + workspaceCreationProperties: { + customSecurityGroupId: Token.asString(awsSecurityGroupExample.id), + defaultOu: "OU=AWS,DC=Workgroup,DC=Example,DC=com", + enableInternetAccess: true, + enableMaintenanceMode: true, + userEnabledAsLocalAdministrator: true, + }, + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWorkspacesDirectoryExample.overrideLogicalId("example"); + } +} + +``` + +### IP Groups + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WorkspacesDirectory } from "./.gen/providers/aws/workspaces-directory"; +import { WorkspacesIpGroup } from "./.gen/providers/aws/workspaces-ip-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const example = new WorkspacesIpGroup(this, "example", { + name: "example", + }); + const awsWorkspacesDirectoryExample = new WorkspacesDirectory( + this, + "example_1", + { + directoryId: Token.asString(awsDirectoryServiceDirectoryExample.id), + ipGroupIds: [example.id], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsWorkspacesDirectoryExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `directoryId` - (Required) The directory identifier for registration in WorkSpaces service. +* `subnetIds` - (Optional) The identifiers of the subnets where the directory resides. +* `ipGroupIds` - The identifiers of the IP access control groups associated with the directory. +* `tags` – (Optional) A map of tags assigned to the WorkSpaces directory. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `selfServicePermissions` – (Optional) Permissions to enable or disable self-service capabilities. Defined below. +* `workspaceAccessProperties` – (Optional) Specifies which devices and operating systems users can use to access their WorkSpaces. Defined below. +* `workspaceCreationProperties` – (Optional) Default properties that are used for creating WorkSpaces. Defined below. + +### self_service_permissions + +* `changeComputeType` – (Optional) Whether WorkSpaces directory users can change the compute type (bundle) for their workspace. Default `false`. +* `increaseVolumeSize` – (Optional) Whether WorkSpaces directory users can increase the volume size of the drives on their workspace. Default `false`. +* `rebuildWorkspace` – (Optional) Whether WorkSpaces directory users can rebuild the operating system of a workspace to its original state. Default `false`. +* `restartWorkspace` – (Optional) Whether WorkSpaces directory users can restart their workspace. Default `true`. +* `switchRunningMode` – (Optional) Whether WorkSpaces directory users can switch the running mode of their workspace. Default `false`. + +### workspace_access_properties + +* `deviceTypeAndroid` – (Optional) Indicates whether users can use Android devices to access their WorkSpaces. +* `deviceTypeChromeos` – (Optional) Indicates whether users can use Chromebooks to access their WorkSpaces. +* `deviceTypeIos` – (Optional) Indicates whether users can use iOS devices to access their WorkSpaces. +* `deviceTypeLinux` – (Optional) Indicates whether users can use Linux clients to access their WorkSpaces. +* `deviceTypeOsx` – (Optional) Indicates whether users can use macOS clients to access their WorkSpaces. +* `deviceTypeWeb` – (Optional) Indicates whether users can access their WorkSpaces through a web browser. +* `deviceTypeWindows` – (Optional) Indicates whether users can use Windows clients to access their WorkSpaces. +* `deviceTypeZeroclient` – (Optional) Indicates whether users can use zero client devices to access their WorkSpaces. + +### workspace_creation_properties + +-> **Note:** Once you specified `customSecurityGroupId` or `defaultOu`, there is no way to delete these attributes. If you cleanup them from the configuration, they still be present in state. + +* `customSecurityGroupId` – (Optional) The identifier of your custom security group. Should relate to the same VPC, where workspaces reside in. +* `defaultOu` – (Optional) The default organizational unit (OU) for your WorkSpace directories. Should conform `"ou=,dc=,...,dc="` pattern. +* `enableInternetAccess` – (Optional) Indicates whether internet access is enabled for your WorkSpaces. +* `enableMaintenanceMode` – (Optional) Indicates whether maintenance mode is enabled for your WorkSpaces. For more information, see [WorkSpace Maintenance](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspace-maintenance.html).. +* `userEnabledAsLocalAdministrator` – (Optional) Indicates whether users are local administrators of their WorkSpaces. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The WorkSpaces directory identifier. +* `alias` - The directory alias. +* `customerUserName` - The user name for the service account. +* `directoryName` - The name of the directory. +* `directoryType` - The directory type. +* `dnsIpAddresses` - The IP addresses of the DNS servers for the directory. +* `iamRoleId` - The identifier of the IAM role. This is the role that allows Amazon WorkSpaces to make calls to other services, such as Amazon EC2, on your behalf. +* `ipGroupIds` - The identifiers of the IP access control groups associated with the directory. +* `registrationCode` - The registration code for the directory. This is the code that users enter in their Amazon WorkSpaces client application to connect to the directory. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). +* `workspaceSecurityGroupId` - The identifier of the security group that is assigned to new WorkSpaces. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Workspaces directory using the directory ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Workspaces directory using the directory ID. For example: + +```console +% terraform import aws_workspaces_directory.main d-4444444444 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/workspaces_ip_group.html.markdown b/website/docs/cdktf/typescript/r/workspaces_ip_group.html.markdown new file mode 100644 index 00000000000..981867d3e51 --- /dev/null +++ b/website/docs/cdktf/typescript/r/workspaces_ip_group.html.markdown @@ -0,0 +1,99 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_ip_group" +description: |- + Provides an IP access control group in AWS WorkSpaces Service. +--- + + + +# Resource: aws_workspaces_ip_group + +Provides an IP access control group in AWS WorkSpaces Service + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { WorkspacesIpGroup } from "./.gen/providers/aws/workspaces-ip-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new WorkspacesIpGroup(this, "contractors", { + description: "Contractors IP access control group", + name: "Contractors", + rules: [ + { + description: "NY", + source: "150.24.14.0/24", + }, + { + description: "LA", + source: "125.191.14.85/32", + }, + { + description: "STL", + source: "44.98.100.0/24", + }, + ], + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `name` - (Required) The name of the IP group. +* `description` - (Optional) The description of the IP group. +* `rules` - (Optional) One or more pairs specifying the IP group rule (in CIDR format) from which web requests originate. +* `tags` – (Optional) A map of tags assigned to the WorkSpaces directory. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Nested Blocks + +### `rules` + +#### Arguments + +* `source` - (Required) The IP address range, in CIDR notation, e.g., `10000/16` +* `description` - (Optional) The description. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The IP group identifier. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import WorkSpaces IP groups using their GroupID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import WorkSpaces IP groups using their GroupID. For example: + +```console +% terraform import aws_workspaces_ip_group.example wsipg-488lrtl3k +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/workspaces_workspace.html.markdown b/website/docs/cdktf/typescript/r/workspaces_workspace.html.markdown new file mode 100644 index 00000000000..9d176150527 --- /dev/null +++ b/website/docs/cdktf/typescript/r/workspaces_workspace.html.markdown @@ -0,0 +1,123 @@ +--- +subcategory: "WorkSpaces" +layout: "aws" +page_title: "AWS: aws_workspaces_workspace" +description: |- + Provides a workspaces in AWS Workspaces Service. +--- + + + +# Resource: aws_workspaces_workspace + +Provides a workspace in [AWS Workspaces](https://docs.aws.amazon.com/workspaces/latest/adminguide/amazon-workspaces.html) Service + +~> **NOTE:** AWS WorkSpaces service requires [`workspacesDefaultRole`](https://docs.aws.amazon.com/workspaces/latest/adminguide/workspaces-access-control.html#create-default-role) IAM role to operate normally. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsWorkspacesBundle } from "./.gen/providers/aws/data-aws-workspaces-bundle"; +import { WorkspacesWorkspace } from "./.gen/providers/aws/workspaces-workspace"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const valueWindows10 = new DataAwsWorkspacesBundle( + this, + "value_windows_10", + { + bundleId: "wsb-bh8rsxt14", + } + ); + new WorkspacesWorkspace(this, "example", { + bundleId: Token.asString(valueWindows10.id), + directoryId: Token.asString(awsWorkspacesDirectoryExample.id), + rootVolumeEncryptionEnabled: true, + tags: { + Department: "IT", + }, + userName: "john.doe", + userVolumeEncryptionEnabled: true, + volumeEncryptionKey: "alias/aws/workspaces", + workspaceProperties: { + computeTypeName: "VALUE", + rootVolumeSizeGib: 80, + runningMode: "AUTO_STOP", + runningModeAutoStopTimeoutInMinutes: 60, + userVolumeSizeGib: 10, + }, + }); + } +} + +``` + +## Argument Reference + +This resource supports the following arguments: + +* `directoryId` - (Required) The ID of the directory for the WorkSpace. +* `bundleId` - (Required) The ID of the bundle for the WorkSpace. +* `userName` – (Required) The user name of the user for the WorkSpace. This user name must exist in the directory for the WorkSpace. +* `rootVolumeEncryptionEnabled` - (Optional) Indicates whether the data stored on the root volume is encrypted. +* `userVolumeEncryptionEnabled` – (Optional) Indicates whether the data stored on the user volume is encrypted. +* `volumeEncryptionKey` – (Optional) The symmetric AWS KMS customer master key (CMK) used to encrypt data stored on your WorkSpace. Amazon WorkSpaces does not support asymmetric CMKs. +* `tags` - (Optional) The tags for the WorkSpace. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `workspaceProperties` – (Optional) The WorkSpace properties. + +`workspaceProperties` supports the following: + +* `computeTypeName` – (Optional) The compute type. For more information, see [Amazon WorkSpaces Bundles](http://aws.amazon.com/workspaces/details/#Amazon_WorkSpaces_Bundles). Valid values are `value`, `standard`, `performance`, `power`, `graphics`, `powerpro`, `graphicspro`, `graphicsG4Dn`, and `graphicsproG4Dn`. +* `rootVolumeSizeGib` – (Optional) The size of the root volume. +* `runningMode` – (Optional) The running mode. For more information, see [Manage the WorkSpace Running Mode](https://docs.aws.amazon.com/workspaces/latest/adminguide/running-mode.html). Valid values are `autoStop` and `alwaysOn`. +* `runningModeAutoStopTimeoutInMinutes` – (Optional) The time after a user logs off when WorkSpaces are automatically stopped. Configured in 60-minute intervals. +* `userVolumeSizeGib` – (Optional) The size of the user storage. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The workspaces ID. +* `ipAddress` - The IP address of the WorkSpace. +* `computerName` - The name of the WorkSpace, as seen by the operating system. +* `state` - The operational state of the WorkSpace. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Timeouts + +[Configuration options](https://developer.hashicorp.com/terraform/language/resources/syntax#operation-timeouts): + +- `create` - (Default `30M`) +- `update` - (Default `10M`) +- `delete` - (Default `10M`) + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import Workspaces using their ID. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import Workspaces using their ID. For example: + +```console +% terraform import aws_workspaces_workspace.example ws-9z9zmbkhv +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/xray_encryption_config.html.markdown b/website/docs/cdktf/typescript/r/xray_encryption_config.html.markdown new file mode 100644 index 00000000000..fe68a03ec21 --- /dev/null +++ b/website/docs/cdktf/typescript/r/xray_encryption_config.html.markdown @@ -0,0 +1,128 @@ +--- +subcategory: "X-Ray" +layout: "aws" +page_title: "AWS: aws_xray_encryption_config" +description: |- + Creates and manages an AWS XRay Encryption Config. +--- + + + +# Resource: aws_xray_encryption_config + +Creates and manages an AWS XRay Encryption Config. + +~> **NOTE:** Removing this resource from Terraform has no effect to the encryption configuration within X-Ray. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { XrayEncryptionConfig } from "./.gen/providers/aws/xray-encryption-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new XrayEncryptionConfig(this, "example", { + type: "NONE", + }); + } +} + +``` + +## Example Usage with KMS Key + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { Token, TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { DataAwsCallerIdentity } from "./.gen/providers/aws/data-aws-caller-identity"; +import { DataAwsIamPolicyDocument } from "./.gen/providers/aws/data-aws-iam-policy-document"; +import { KmsKey } from "./.gen/providers/aws/kms-key"; +import { XrayEncryptionConfig } from "./.gen/providers/aws/xray-encryption-config"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + const current = new DataAwsCallerIdentity(this, "current", {}); + const example = new DataAwsIamPolicyDocument(this, "example", { + statement: [ + { + actions: ["kms:*"], + effect: "Allow", + principals: [ + { + identifiers: ["arn:aws:iam::${" + current.accountId + "}:root"], + type: "AWS", + }, + ], + resources: ["*"], + sid: "Enable IAM User Permissions", + }, + ], + }); + const awsKmsKeyExample = new KmsKey(this, "example_2", { + deletionWindowInDays: 7, + description: "Some Key", + policy: Token.asString(example.json), + }); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsKmsKeyExample.overrideLogicalId("example"); + const awsXrayEncryptionConfigExample = new XrayEncryptionConfig( + this, + "example_3", + { + keyId: Token.asString(awsKmsKeyExample.arn), + type: "KMS", + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + awsXrayEncryptionConfigExample.overrideLogicalId("example"); + } +} + +``` + +## Argument Reference + +* `type` - (Required) The type of encryption. Set to `kms` to use your own key for encryption. Set to `none` for default encryption. +* `keyId` - (Optional) An AWS KMS customer master key (CMK) ARN. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - Region name. + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import XRay Encryption Config using the region name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import XRay Encryption Config using the region name. For example: + +```console +% terraform import aws_xray_encryption_config.example us-west-2 +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/xray_group.html.markdown b/website/docs/cdktf/typescript/r/xray_group.html.markdown new file mode 100644 index 00000000000..23931ccd30b --- /dev/null +++ b/website/docs/cdktf/typescript/r/xray_group.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "X-Ray" +layout: "aws" +page_title: "AWS: aws_xray_group" +description: |- + Creates and manages an AWS XRay Group. +--- + + + +# Resource: aws_xray_group + +Creates and manages an AWS XRay Group. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { XrayGroup } from "./.gen/providers/aws/xray-group"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new XrayGroup(this, "example", { + filterExpression: "responsetime > 5", + groupName: "example", + insightsConfiguration: { + insightsEnabled: true, + notificationsEnabled: true, + }, + }); + } +} + +``` + +## Argument Reference + +* `groupName` - (Required) The name of the group. +* `filterExpression` - (Required) The filter expression defining criteria by which to group traces. more info can be found in official [docs](https://docs.aws.amazon.com/xray/latest/devguide/xray-console-filters.html). +* `insightsConfiguration` - (Optional) Configuration options for enabling insights. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +### Nested fields + +#### `insightsConfiguration` + +* `insightsEnabled` - (Required) Specifies whether insights are enabled. +* `notificationsEnabled` - (Optional) Specifies whether insight notifications are enabled. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The ARN of the Group. +* `arn` - The ARN of the Group. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import XRay Groups using the ARN. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import XRay Groups using the ARN. For example: + +```console +% terraform import aws_xray_group.example arn:aws:xray:us-west-2:1234567890:group/example-group/TNGX7SW5U6QY36T4ZMOUA3HVLBYCZTWDIOOXY3CJAXTHSS3YCWUA +``` + + \ No newline at end of file diff --git a/website/docs/cdktf/typescript/r/xray_sampling_rule.html.markdown b/website/docs/cdktf/typescript/r/xray_sampling_rule.html.markdown new file mode 100644 index 00000000000..503ddf8e4e7 --- /dev/null +++ b/website/docs/cdktf/typescript/r/xray_sampling_rule.html.markdown @@ -0,0 +1,96 @@ +--- +subcategory: "X-Ray" +layout: "aws" +page_title: "AWS: aws_xray_sampling_rule" +description: |- + Creates and manages an AWS XRay Sampling Rule. +--- + + + +# Resource: aws_xray_sampling_rule + +Creates and manages an AWS XRay Sampling Rule. + +## Example Usage + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +/* + * Provider bindings are generated by running `cdktf get`. + * See https://cdk.tf/provider-generation for more details. + */ +import { XraySamplingRule } from "./.gen/providers/aws/xray-sampling-rule"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + new XraySamplingRule(this, "example", { + attributes: { + Hello: "Tris", + }, + fixedRate: 0.05, + host: "*", + httpMethod: "*", + priority: 9999, + reservoirSize: 1, + resourceArn: "*", + ruleName: "example", + serviceName: "*", + serviceType: "*", + urlPath: "*", + version: 1, + }); + } +} + +``` + +## Argument Reference + +* `ruleName` - (Required) The name of the sampling rule. +* `resourceArn` - (Required) Matches the ARN of the AWS resource on which the service runs. +* `priority` - (Required) The priority of the sampling rule. +* `fixedRate` - (Required) The percentage of matching requests to instrument, after the reservoir is exhausted. +* `reservoirSize` - (Required) A fixed number of matching requests to instrument per second, prior to applying the fixed rate. The reservoir is not used directly by services, but applies to all services using the rule collectively. +* `serviceName` - (Required) Matches the `name` that the service uses to identify itself in segments. +* `serviceType` - (Required) Matches the `origin` that the service uses to identify its type in segments. +* `host` - (Required) Matches the hostname from a request URL. +* `httpMethod` - (Required) Matches the HTTP method of a request. +* `urlPath` - (Required) Matches the path from a request URL. +* `version` - (Required) The version of the sampling rule format (`1` ) +* `attributes` - (Optional) Matches attributes derived from the request. +* `tags` - (Optional) Key-value mapping of resource tags. If configured with a provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. + +## Attribute Reference + +This resource exports the following attributes in addition to the arguments above: + +* `id` - The name of the sampling rule. +* `arn` - The ARN of the sampling rule. +* `tagsAll` - A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#default_tags-configuration-block). + +## Import + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import XRay Sampling Rules using the name. For example: + +```typescript +// Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug +import { Construct } from "constructs"; +import { TerraformStack } from "cdktf"; +class MyConvertedCode extends TerraformStack { + constructor(scope: Construct, name: string) { + super(scope, name); + } +} + +``` + +Using `terraform import`, import XRay Sampling Rules using the name. For example: + +```console +% terraform import aws_xray_sampling_rule.example example +``` + + \ No newline at end of file